From 4cfc0c402376cde0346e66fe7efc23091fd6c740 Mon Sep 17 00:00:00 2001 From: Nandini Agrawal <78158744+NandiniAgrawal15@users.noreply.github.com> Date: Tue, 8 Apr 2025 15:56:10 +0530 Subject: [PATCH 001/884] this is for testing purpose --- mmv1/products/compute/Interconnect.yaml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/mmv1/products/compute/Interconnect.yaml b/mmv1/products/compute/Interconnect.yaml index e5c34860a1e1..561464471a34 100644 --- a/mmv1/products/compute/Interconnect.yaml +++ b/mmv1/products/compute/Interconnect.yaml @@ -411,3 +411,10 @@ properties: output: true item_type: type: String + - name: 'testing' + type: String + description: | + This is for learning purposes. + output: true + item_type: + type: String From 573771dbab419195b588e8b6eb685662ef28b343 Mon Sep 17 00:00:00 2001 From: Nandini Agrawal <78158744+NandiniAgrawal15@users.noreply.github.com> Date: Tue, 15 Apr 2025 11:13:38 +0530 Subject: [PATCH 002/884] Revert not required changes. --- mmv1/products/compute/Interconnect.yaml | 7 ------- 1 file changed, 7 deletions(-) diff --git a/mmv1/products/compute/Interconnect.yaml b/mmv1/products/compute/Interconnect.yaml index 561464471a34..e5c34860a1e1 100644 --- a/mmv1/products/compute/Interconnect.yaml +++ b/mmv1/products/compute/Interconnect.yaml @@ -411,10 +411,3 @@ properties: output: true item_type: type: String - - name: 'testing' - type: String - description: | - This is for learning purposes. - output: true - item_type: - type: String From 3a19d4c1228d1ebd9418e56e71d60286cc6eca68 Mon Sep 17 00:00:00 2001 From: Nandini Agrawal <78158744+NandiniAgrawal15@users.noreply.github.com> Date: Wed, 23 Apr 2025 17:41:07 +0530 Subject: [PATCH 003/884] testing changes --- mmv1/products/compute/CrossSiteNetwork.yaml | 53 ++++++++++++ .../compute_cross_site_network_basic.tf.tmpl | 13 +++ ...ce_compute_cross_site_network_test.go.tmpl | 86 +++++++++++++++++++ 3 files changed, 152 insertions(+) create mode 100644 mmv1/products/compute/CrossSiteNetwork.yaml create mode 100644 mmv1/templates/terraform/examples/compute_cross_site_network_basic.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/compute/resource_compute_cross_site_network_test.go.tmpl diff --git a/mmv1/products/compute/CrossSiteNetwork.yaml b/mmv1/products/compute/CrossSiteNetwork.yaml new file mode 100644 index 000000000000..d3ac47e10c11 --- /dev/null +++ b/mmv1/products/compute/CrossSiteNetwork.yaml @@ -0,0 +1,53 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'CrossSiteNetwork' +kind: 'compute/crossSiteNetwork' +description: | + Represents a cross-site-network resource.A CrossSiteNetwork is used to establish L2 connectivity between groups of Interconnects. +min_version: beta +references: + guides: + 'Create a Cross-Site Interconnect': 'https://cloud.google.com/network-connectivity/docs/interconnect/how-to/cross-site/create-network' + api: 'https://cloud.google.com/compute/docs/reference/rest/beta/crossSiteNetworks' +docs: +base_url: 'projects/{project}/global/crossSiteNetworks' +self_link: 'projects/{project}/global/crossSiteNetworks/{crossSiteNetwork}' +update_verb: 'PATCH' +timeouts: + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 +async: + actions: ['create', 'delete', 'update'] + type: 'OpAsync' + operation: + base_url: '{{op_id}}' + result: + resource_inside_response: false +examples: + - name: 'compute_cross_site_network_basic' + primary_resource_id: 'example-cross-site-network' + vars: + name: 'test-cross-site-network' + description: 'Example cross site network' + min_version: 'beta' + test_env_vars: + project: 'PROJECT_NAME' +parameters: +properties: + - name: 'description' + type: String + description: | + An optional description of this resource. Provide this property when you create the resource. diff --git a/mmv1/templates/terraform/examples/compute_cross_site_network_basic.tf.tmpl b/mmv1/templates/terraform/examples/compute_cross_site_network_basic.tf.tmpl new file mode 100644 index 000000000000..a2e1064a19a4 --- /dev/null +++ b/mmv1/templates/terraform/examples/compute_cross_site_network_basic.tf.tmpl @@ -0,0 +1,13 @@ +locals { + project = "{{index $.Vars "project"}}" # Google Cloud Platform Project ID +} + +data "google_project" "project" { + provider = google-beta +} + +resource "google_compute_cross_site_network" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "name"}}" + description = "{{index $.Vars "description"}}" + provider = google-beta +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_cross_site_network_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_cross_site_network_test.go.tmpl new file mode 100644 index 000000000000..423b0f1f6fe8 --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/resource_compute_cross_site_network_test.go.tmpl @@ -0,0 +1,86 @@ +package compute_test +{{ if ne $.TargetVersionName `ga` -}} +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccComputeCrossSiteNetwork_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project": envvar.GetTestProjectFromEnv(), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckComputeCrossSiteNetworkDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeCrossSiteNetwork_basic(context), + }, + { + ResourceName: "google_compute_cross_site_network.example-cross-site-network", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccComputeCrossSiteNetwork_update(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_compute_cross_site_network.example-cross-site-network", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_compute_cross_site_network.example-cross-site-network", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccComputeCrossSiteNetwork_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +locals { + project = "" # Google Cloud Platform Project ID +} + +data "google_project" "project" { + provider = google-beta +} + +resource "google_compute_cross_site_network" "example-cross-site-network" { + name = "tf-test-test-cross-site-network%{random_suffix}" + description = "Example cross site network%{random_suffix}" + provider = google-beta +} +`, context) +} + +func testAccComputeCrossSiteNetwork_update(context map[string]interface{}) string { + return acctest.Nprintf(` +locals { + project = "" # Google Cloud Platform Project ID +} + +data "google_project" "project" { + provider = google-beta +} + +resource "google_compute_cross_site_network" "example-cross-site-network" { + name = "tf-test-test-cross-site-network%{random_suffix}" + description = "Example cross site network updated%{random_suffix}" + provider = google-beta +} +`, context) +} +{{- end }} \ No newline at end of file From 34f0e81af0c4f170a1b2b07aa2a09772f5fae127 Mon Sep 17 00:00:00 2001 From: Nandini Agrawal <78158744+NandiniAgrawal15@users.noreply.github.com> Date: Thu, 24 Apr 2025 11:42:55 +0530 Subject: [PATCH 004/884] Testing changes --- mmv1/products/compute/CrossSiteNetwork.yaml | 21 ++++++-- .../cross_site_network.go.tmpl | 23 +++++++++ .../custom_delete/cross_site_network.go.tmpl | 48 +++++++++++++++++++ ...ce_compute_cross_site_network_test.go.tmpl | 15 +++--- 4 files changed, 95 insertions(+), 12 deletions(-) create mode 100644 mmv1/templates/terraform/custom_check_destroy/cross_site_network.go.tmpl create mode 100644 mmv1/templates/terraform/custom_delete/cross_site_network.go.tmpl diff --git a/mmv1/products/compute/CrossSiteNetwork.yaml b/mmv1/products/compute/CrossSiteNetwork.yaml index d3ac47e10c11..21bed5e7ca4d 100644 --- a/mmv1/products/compute/CrossSiteNetwork.yaml +++ b/mmv1/products/compute/CrossSiteNetwork.yaml @@ -13,7 +13,7 @@ --- name: 'CrossSiteNetwork' -kind: 'compute/crossSiteNetwork' +# kind: 'compute/crossSiteNetwork' description: | Represents a cross-site-network resource.A CrossSiteNetwork is used to establish L2 connectivity between groups of Interconnects. min_version: beta @@ -22,9 +22,12 @@ references: 'Create a Cross-Site Interconnect': 'https://cloud.google.com/network-connectivity/docs/interconnect/how-to/cross-site/create-network' api: 'https://cloud.google.com/compute/docs/reference/rest/beta/crossSiteNetworks' docs: -base_url: 'projects/{project}/global/crossSiteNetworks' -self_link: 'projects/{project}/global/crossSiteNetworks/{crossSiteNetwork}' +base_url: 'projects/{{project}}/global/crossSiteNetworks' +self_link: 'projects/{{project}}/global/crossSiteNetworks/{{crossSiteNetwork}}' update_verb: 'PATCH' +custom_code: + custom_delete: templates/terraform/custom_delete/cross_site_network.go.tmpl + test_check_destroy: templates/terraform/custom_check_destroy/cross_site_network.go.tmpl timeouts: insert_minutes: 20 update_minutes: 20 @@ -47,6 +50,18 @@ examples: project: 'PROJECT_NAME' parameters: properties: + - name: 'name' + type: String + description: | + Name of the resource. Provided by the client when the resource is created. The name must be + 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters + long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first + character must be a lowercase letter, and all following characters must be a dash, + lowercase letter, or digit, except the last character, which cannot be a dash. + required: true + immutable: true + validation: + regex: '^[a-z]([-a-z0-9]*[a-z0-9])?$' - name: 'description' type: String description: | diff --git a/mmv1/templates/terraform/custom_check_destroy/cross_site_network.go.tmpl b/mmv1/templates/terraform/custom_check_destroy/cross_site_network.go.tmpl new file mode 100644 index 000000000000..901b6a90bc0d --- /dev/null +++ b/mmv1/templates/terraform/custom_check_destroy/cross_site_network.go.tmpl @@ -0,0 +1,23 @@ + config := acctest.GoogleProviderConfig(t) + + url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{"{{"}}ComputeBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/global/crossSiteNetworks") + if err != nil { + return err + } + + billingProject := "" + + if config.BillingProject != "" { + billingProject = config.BillingProject + } + + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: url, + UserAgent: config.UserAgent, + }) + if err == nil { + return fmt.Errorf("ComputeCrossSiteNetwork still exists at %s", url) + } \ No newline at end of file diff --git a/mmv1/templates/terraform/custom_delete/cross_site_network.go.tmpl b/mmv1/templates/terraform/custom_delete/cross_site_network.go.tmpl new file mode 100644 index 000000000000..6ae8ec5fdc84 --- /dev/null +++ b/mmv1/templates/terraform/custom_delete/cross_site_network.go.tmpl @@ -0,0 +1,48 @@ + + billingProject := "" + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return fmt.Errorf("Error fetching project for CrossSiteNetwork: %s", err) + } + billingProject = project + + url, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}ComputeBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/global/crossSiteNetworks/{{"{{"}}crossSiteNetworks{{"}}"}}") + if err != nil { + return err + } + + var obj map[string]interface{} + + // err == nil indicates that the billing_project value was found + if bp, err := tpgresource.GetBillingProject(d, config); err == nil { + billingProject = bp + } + + headers := make(http.Header) + + log.Printf("[DEBUG] Deleting CrossSiteNetwork %q", d.Id()) + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "DELETE", + Project: billingProject, + RawURL: url, + UserAgent: userAgent, + Body: obj, + Timeout: d.Timeout(schema.TimeoutDelete), + Headers: headers, + }) + if err != nil { + return transport_tpg.HandleNotFoundError(err, d, "CrossSiteNetwork") + } + + err = ComputeOperationWaitTime( + config, res, project, "Deleting CrossSiteNetwork", userAgent, + d.Timeout(schema.TimeoutDelete)) + + if err != nil { + return err + } + + log.Printf("[DEBUG] Finished deleting CrossSiteNetwork %q: %#v", d.Id(), res) + return nil \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_cross_site_network_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_cross_site_network_test.go.tmpl index 423b0f1f6fe8..b97c62c32559 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_cross_site_network_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_cross_site_network_test.go.tmpl @@ -4,7 +4,6 @@ import ( "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" @@ -33,11 +32,9 @@ func TestAccComputeCrossSiteNetwork_update(t *testing.T) { }, { Config: testAccComputeCrossSiteNetwork_update(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_compute_cross_site_network.example-cross-site-network", plancheck.ResourceActionUpdate), - }, - }, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_compute_cross_site_network.example-cross-site-network", "description", "Example cross site network updated"+context["random_suffix"].(string)), + ), }, { ResourceName: "google_compute_cross_site_network.example-cross-site-network", @@ -59,7 +56,7 @@ data "google_project" "project" { } resource "google_compute_cross_site_network" "example-cross-site-network" { - name = "tf-test-test-cross-site-network%{random_suffix}" + name = "tf-test-test-cross-site-network%{random_suffix}" description = "Example cross site network%{random_suffix}" provider = google-beta } @@ -77,8 +74,8 @@ data "google_project" "project" { } resource "google_compute_cross_site_network" "example-cross-site-network" { - name = "tf-test-test-cross-site-network%{random_suffix}" - description = "Example cross site network updated%{random_suffix}" +name = "tf-test-test-cross-site-network%{random_suffix}" +description = "Example cross site network updated%{random_suffix}" provider = google-beta } `, context) From 15dcea3765884f940d909da005a2b3f5e23504d2 Mon Sep 17 00:00:00 2001 From: Nandini Agrawal <78158744+NandiniAgrawal15@users.noreply.github.com> Date: Fri, 25 Apr 2025 14:13:02 +0530 Subject: [PATCH 005/884] Cross-Site Network --- mmv1/products/compute/CrossSiteNetwork.yaml | 5 +---- .../examples/compute_cross_site_network_basic.tf.tmpl | 4 ---- .../resource_compute_cross_site_network_test.go.tmpl | 8 -------- 3 files changed, 1 insertion(+), 16 deletions(-) diff --git a/mmv1/products/compute/CrossSiteNetwork.yaml b/mmv1/products/compute/CrossSiteNetwork.yaml index 21bed5e7ca4d..57a4b758a5d3 100644 --- a/mmv1/products/compute/CrossSiteNetwork.yaml +++ b/mmv1/products/compute/CrossSiteNetwork.yaml @@ -23,11 +23,8 @@ references: api: 'https://cloud.google.com/compute/docs/reference/rest/beta/crossSiteNetworks' docs: base_url: 'projects/{{project}}/global/crossSiteNetworks' -self_link: 'projects/{{project}}/global/crossSiteNetworks/{{crossSiteNetwork}}' +self_link: 'projects/{{project}}/global/crossSiteNetworks/{{name}}' update_verb: 'PATCH' -custom_code: - custom_delete: templates/terraform/custom_delete/cross_site_network.go.tmpl - test_check_destroy: templates/terraform/custom_check_destroy/cross_site_network.go.tmpl timeouts: insert_minutes: 20 update_minutes: 20 diff --git a/mmv1/templates/terraform/examples/compute_cross_site_network_basic.tf.tmpl b/mmv1/templates/terraform/examples/compute_cross_site_network_basic.tf.tmpl index a2e1064a19a4..6860582a71c4 100644 --- a/mmv1/templates/terraform/examples/compute_cross_site_network_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/compute_cross_site_network_basic.tf.tmpl @@ -1,7 +1,3 @@ -locals { - project = "{{index $.Vars "project"}}" # Google Cloud Platform Project ID -} - data "google_project" "project" { provider = google-beta } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_cross_site_network_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_cross_site_network_test.go.tmpl index b97c62c32559..f2f79da4fcb0 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_cross_site_network_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_cross_site_network_test.go.tmpl @@ -47,10 +47,6 @@ func TestAccComputeCrossSiteNetwork_update(t *testing.T) { func testAccComputeCrossSiteNetwork_basic(context map[string]interface{}) string { return acctest.Nprintf(` -locals { - project = "" # Google Cloud Platform Project ID -} - data "google_project" "project" { provider = google-beta } @@ -65,10 +61,6 @@ resource "google_compute_cross_site_network" "example-cross-site-network" { func testAccComputeCrossSiteNetwork_update(context map[string]interface{}) string { return acctest.Nprintf(` -locals { - project = "" # Google Cloud Platform Project ID -} - data "google_project" "project" { provider = google-beta } From 8cbc84e44250b530c59b336210dc30222bae8c50 Mon Sep 17 00:00:00 2001 From: Nandini Agrawal <78158744+NandiniAgrawal15@users.noreply.github.com> Date: Mon, 5 May 2025 09:41:36 +0530 Subject: [PATCH 006/884] Nit Changes. --- mmv1/products/compute/CrossSiteNetwork.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mmv1/products/compute/CrossSiteNetwork.yaml b/mmv1/products/compute/CrossSiteNetwork.yaml index 57a4b758a5d3..169acbf30d85 100644 --- a/mmv1/products/compute/CrossSiteNetwork.yaml +++ b/mmv1/products/compute/CrossSiteNetwork.yaml @@ -15,7 +15,7 @@ name: 'CrossSiteNetwork' # kind: 'compute/crossSiteNetwork' description: | - Represents a cross-site-network resource.A CrossSiteNetwork is used to establish L2 connectivity between groups of Interconnects. + Represents a cross-site-network resource. A CrossSiteNetwork is used to establish L2 connectivity between groups of Interconnects. min_version: beta references: guides: @@ -62,4 +62,4 @@ properties: - name: 'description' type: String description: | - An optional description of this resource. Provide this property when you create the resource. + An optional description of this resource. From da83ea7502529cfb0c09a481a6d1d87bf81b5d98 Mon Sep 17 00:00:00 2001 From: NandiniAgrawal15 Date: Wed, 14 May 2025 04:07:44 +0000 Subject: [PATCH 007/884] Add Terraform Support for WireGroups. --- mmv1/products/compute/WireGroups.yaml | 178 ++++++++++++++++++ .../examples/compute_wire_group_basic.tf.tmpl | 15 ++ .../resource_compute_wire_group_test.go.tmpl | 106 +++++++++++ 3 files changed, 299 insertions(+) create mode 100644 mmv1/products/compute/WireGroups.yaml create mode 100644 mmv1/templates/terraform/examples/compute_wire_group_basic.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl diff --git a/mmv1/products/compute/WireGroups.yaml b/mmv1/products/compute/WireGroups.yaml new file mode 100644 index 000000000000..c9a7cdbcaacd --- /dev/null +++ b/mmv1/products/compute/WireGroups.yaml @@ -0,0 +1,178 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'WireGroup' +kind: 'compute#wireGroup' +description: | + The WireGroup resource represents a group of redundant wires between interconnects in two different metros. Each WireGroup belongs to a CrossSiteNetwork. A wire group defines endpoints and the wires which exist between them. + +references: + guides: + 'Create a WireGroup': 'https://cloud.google.com/network-connectivity/docs/interconnect/how-to/cross-site/modify-network#add-wire-group' + api: 'https://cloud.google.com/compute/docs/reference/rest/beta/wireGroups' +min_version: beta +docs: +id_format: 'projects/{{project}}/global/crossSiteNetworks/{{crossSiteNetwork}}/wireGroups/{{name}}' +base_url: 'projects/{{project}}/global/crossSiteNetworks/{{crossSiteNetwork}}/wireGroups' +self_link: 'projects/{{project}}/global/crossSiteNetworks/{{crossSiteNetwork}}/wireGroups/{{name}}' +update_verb: 'PATCH' +import_format: + - 'projects/{{project}}/global/crossSiteNetworks/{{crossSiteNetwork}}/wireGroups/{{name}}' +timeouts: + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 +async: + actions: ['create', 'delete', 'update'] + type: 'OpAsync' + operation: + base_url: '{{op_id}}' + result: + resource_inside_response: false +examples: + - name: 'compute_wire_group_basic' + primary_resource_id: 'example-test-wire-group' + vars: + name: 'test-wire-group' + description: 'Example Wire Group' + min_version: 'beta' + test_env_vars: + project: 'PROJECT_NAME' + cross_site_network: 'test-cross-site-network' +parameters: + - name: 'crossSiteNetwork' + type: ResourceRef + description: Required cross site network to which wire group belongs. + required: true + immutable: true + resource: 'CrossSiteNetwork' + imports: 'name' + diff_suppress_func: 'tpgresource.CompareResourceNames' + custom_expand: 'templates/terraform/custom_expand/resourceref_with_validation.go.tmpl' +properties: + - name: 'description' + type: String + description: | + An optional description of this resource. Provide this property when you create the resource. + - name: 'creationTimestamp' + type: Time + description: | + Creation timestamp in RFC3339 text format. + output: true + - name: 'name' + type: String + description: | + Name of the resource. Provided by the client when the resource is created. The name must be + 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters + long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first + character must be a lowercase letter, and all following characters must be a dash, + lowercase letter, or digit, except the last character, which cannot be a dash. + required: true + immutable: + validation: + regex: '^[a-z]([-a-z0-9]*[a-z0-9])?$' + - name: endpoints + type: KeyValuePairs + description: | + Endpoints grouped by location, each mapping to interconnect configurations. + properties: + - name: interconnects + type: KeyValuePairs + description: | + Map of interconnect details. + properties: + - name: interconnect + type: string + - name: vlan_tags + type: integer + - name: admin_enabled + type: boolean + description: | + Indicates whether the wire group is administratively enabled. + - name: wire_group_properties + type: object + description: | + Properties specific to the wire group. + properties: + - name: type + type: enum + description: | + Type of wire group (enum). + WIRE: a single pseudowire over two Interconnect connections with no redundancy. + REDUNDANT: two pseudowires over four Interconnect connections, with two connections in one metro and two connections in another metro. + BOX_AND_CROSS: four pseudowires over four Interconnect connections, with two connections in one metro and two connections in another metro. + enum_values: + - 'WIRE' + - 'REDUNDANT' + - 'BOX_AND_CROSS' + - name: wire_properties + type: object + description: | + Default properties for wires within the group. + properties: + - name: bandwidth_unmetered + type: string + description: | + The unmetered bandwidth setting. + - name: fault_response + type: enum + description: | + Response when a fault is detected in a pseudowire: + NONE: default. + DISABLE_PORT: set the port line protocol down when inline probes detect a fault. This setting is only permitted on port mode pseudowires. + enum_values: + - 'NONE' + - 'DISABLE_PORT' + - name: wires + type: NestedObject + description: | + The single/redundant wire(s) managed by the wire group. + properties: + - name: label + type: string + - name: endpoints + type: NestedObject + description: | + 'Wire endpoints are specific Interconnect connections.' + properties: + - name: interconnect + type: string + - name: vlan_tag + type: integer + - name: wire_properties + type: object + output: true + properties: + - name: bandwidth_unmetered + type: string + - name: fault_response + type: enum + description: 'Abc' + enum_values: + - 'NONE' + - 'DISABLE_PORT' + - name: admin_enabled + type: boolean + - name: topology + type: NestedObject + description: | + Topology details for the wire group configuration. + properties: + - name: endpoints + type: NestedObject + properties: + - name: label + type: string + - name: city + type: string diff --git a/mmv1/templates/terraform/examples/compute_wire_group_basic.tf.tmpl b/mmv1/templates/terraform/examples/compute_wire_group_basic.tf.tmpl new file mode 100644 index 000000000000..32f5d6f35c60 --- /dev/null +++ b/mmv1/templates/terraform/examples/compute_wire_group_basic.tf.tmpl @@ -0,0 +1,15 @@ +data "google_project" "project" { +} + +resource "google_compute_cross_site_network" "example-cross-site-network" { + name = "tf-test-cross-site-network{{index $.Vars "random_suffix"}}" + description = "Example cross site network{{index $.Vars "random_suffix"}}" + provider = google-beta +} + +resource "google_compute_wire_group" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "name"}}" + description = "{{index $.Vars "description"}}" + cross_site_network = "${google_compute_cross_site_network.example-cross-site-network.name}" + provider = google-beta +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl new file mode 100644 index 000000000000..23f66ef204a2 --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl @@ -0,0 +1,106 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** Type: MMv1 *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Magic Modules and manual +// changes will be clobbered when the file is regenerated. +// +// Please read more about how to change this file in +// .github/CONTRIBUTING.md. +// +// ---------------------------------------------------------------------------- + +package compute_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccComputeWireGroup_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project": envvar.GetTestProjectFromEnv(), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckComputeWireGroupDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeWireGroup_basic(context), + }, + { + ResourceName: "google_compute_wire_group.example-test-wire-group", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"crossSiteNetwork"}, + }, + { + Config: testAccComputeWireGroup_update(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_compute_wire_group.example-test-wire-group", "description", "Example Wire Group Updated"+context["random_suffix"].(string)), + ), + }, + { + ResourceName: "google_compute_wire_group.example-test-wire-group", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"crossSiteNetwork"}, + }, + }, + }) +} + +func testAccComputeWireGroup_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "project" { +} + +resource "google_compute_cross_site_network" "example-cross-site-network" { + name = "tf-test-cross-site-network" + description = "Example cross site network" + provider = google-beta +} + +resource "google_compute_wire_group" "example-test-wire-group" { + name = "tf-test-test-wire-group%{random_suffix}" + description = "Example Wire Group%{random_suffix}" + crossSiteNetwork = google_compute_cross_site_network.example-cross-site-network.name + provider = google-beta +} +`, context) +} + + +func testAccComputeWireGroup_update(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "project" { +} + +resource "google_compute_cross_site_network" "example-cross-site-network" { + name = "tf-test-cross-site-network" + description = "Example cross site network" + provider = google-beta +} + +resource "google_compute_wire_group" "example-test-wire-group" { + name = "tf-test-test-wire-group%{random_suffix}" + description = "Example Wire Group Updated%{random_suffix}" + crossSiteNetwork = google_compute_cross_site_network.example-cross-site-network.name + provider = google-beta +} +`, context) +} From 93708f23288f8be1619dde4f0b4ec63b2ca81119 Mon Sep 17 00:00:00 2001 From: NandiniAgrawal15 Date: Fri, 16 May 2025 06:05:54 +0000 Subject: [PATCH 008/884] Nit Changes --- .../{WireGroups.yaml => WireGroup.yaml} | 28 +++-- .../examples/compute_wire_group_basic.tf.tmpl | 11 +- .../resource_compute_wire_group_test.go.tmpl | 100 +++++++++++------- 3 files changed, 87 insertions(+), 52 deletions(-) rename mmv1/products/compute/{WireGroups.yaml => WireGroup.yaml} (92%) diff --git a/mmv1/products/compute/WireGroups.yaml b/mmv1/products/compute/WireGroup.yaml similarity index 92% rename from mmv1/products/compute/WireGroups.yaml rename to mmv1/products/compute/WireGroup.yaml index c9a7cdbcaacd..ac63b725f890 100644 --- a/mmv1/products/compute/WireGroups.yaml +++ b/mmv1/products/compute/WireGroup.yaml @@ -23,12 +23,12 @@ references: api: 'https://cloud.google.com/compute/docs/reference/rest/beta/wireGroups' min_version: beta docs: -id_format: 'projects/{{project}}/global/crossSiteNetworks/{{crossSiteNetwork}}/wireGroups/{{name}}' -base_url: 'projects/{{project}}/global/crossSiteNetworks/{{crossSiteNetwork}}/wireGroups' -self_link: 'projects/{{project}}/global/crossSiteNetworks/{{crossSiteNetwork}}/wireGroups/{{name}}' +id_format: 'projects/{{project}}/global/crossSiteNetworks/{{cross_site_network}}/wireGroups/{{name}}' +base_url: 'projects/{{project}}/global/crossSiteNetworks/{{cross_site_network}}/wireGroups' +self_link: 'projects/{{project}}/global/crossSiteNetworks/{{cross_site_network}}/wireGroups/{{name}}' update_verb: 'PATCH' import_format: - - 'projects/{{project}}/global/crossSiteNetworks/{{crossSiteNetwork}}/wireGroups/{{name}}' + - 'projects/{{project}}/global/crossSiteNetworks/{{cross_site_network}}/wireGroups/{{name}}' timeouts: insert_minutes: 20 update_minutes: 20 @@ -46,10 +46,10 @@ examples: vars: name: 'test-wire-group' description: 'Example Wire Group' + cross_site_network: 'test-cross-site-network' min_version: 'beta' test_env_vars: project: 'PROJECT_NAME' - cross_site_network: 'test-cross-site-network' parameters: - name: 'crossSiteNetwork' type: ResourceRef @@ -95,13 +95,17 @@ properties: - name: interconnect type: string - name: vlan_tags - type: integer + type: Array + description: | + VLAN tags for the interconnect. + item_type: + type: integer - name: admin_enabled type: boolean description: | Indicates whether the wire group is administratively enabled. - name: wire_group_properties - type: object + type: NestedObject description: | Properties specific to the wire group. properties: @@ -117,12 +121,12 @@ properties: - 'REDUNDANT' - 'BOX_AND_CROSS' - name: wire_properties - type: object + type: NestedObject description: | Default properties for wires within the group. properties: - name: bandwidth_unmetered - type: string + type: String description: | The unmetered bandwidth setting. - name: fault_response @@ -138,6 +142,7 @@ properties: type: NestedObject description: | The single/redundant wire(s) managed by the wire group. + output: true properties: - name: label type: string @@ -151,11 +156,11 @@ properties: - name: vlan_tag type: integer - name: wire_properties - type: object + type: NestedObject output: true properties: - name: bandwidth_unmetered - type: string + type: Integer - name: fault_response type: enum description: 'Abc' @@ -168,6 +173,7 @@ properties: type: NestedObject description: | Topology details for the wire group configuration. + output: true properties: - name: endpoints type: NestedObject diff --git a/mmv1/templates/terraform/examples/compute_wire_group_basic.tf.tmpl b/mmv1/templates/terraform/examples/compute_wire_group_basic.tf.tmpl index 32f5d6f35c60..21653c610dba 100644 --- a/mmv1/templates/terraform/examples/compute_wire_group_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/compute_wire_group_basic.tf.tmpl @@ -10,6 +10,15 @@ resource "google_compute_cross_site_network" "example-cross-site-network" { resource "google_compute_wire_group" "{{$.PrimaryResourceId}}" { name = "{{index $.Vars "name"}}" description = "{{index $.Vars "description"}}" - cross_site_network = "${google_compute_cross_site_network.example-cross-site-network.name}" + cross_site_network = google_compute_cross_site_network.example-cross-site-network.name provider = google-beta + depends_on = [ + google_compute_cross_site_network.example-cross-site-test-1 + ] + wire_properties { + bandwidth_unmetered = 1000 + } + wire_group_properties { + type = "REDUNDANT" + } } \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl index 23f66ef204a2..ea9a35cbcd82 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl @@ -18,54 +18,55 @@ package compute_test import ( - "testing" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "testing" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" ) func TestAccComputeWireGroup_update(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "project": envvar.GetTestProjectFromEnv(), - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), - CheckDestroy: testAccCheckComputeWireGroupDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeWireGroup_basic(context), - }, + t.Parallel() + + context := map[string]interface{}{ + "project": envvar.GetTestProjectFromEnv(), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckComputeWireGroupDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeWireGroup_basic(context), + }, + { + ResourceName: "google_compute_wire_group.example-test-wire-group", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"cross_site_network"}, + }, { - ResourceName: "google_compute_wire_group.example-test-wire-group", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"crossSiteNetwork"}, - }, - { - Config: testAccComputeWireGroup_update(context), - Check: resource.ComposeTestCheckFunc( + Config: testAccComputeWireGroup_update(context), + Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("google_compute_wire_group.example-test-wire-group", "description", "Example Wire Group Updated"+context["random_suffix"].(string)), ), - }, - { - ResourceName: "google_compute_wire_group.example-test-wire-group", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"crossSiteNetwork"}, - }, - }, - }) + }, + { + ResourceName: "google_compute_wire_group.example-test-wire-group", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"cross_site_network"}, + }, + }, + }) } func testAccComputeWireGroup_basic(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` data "google_project" "project" { } @@ -78,15 +79,24 @@ resource "google_compute_cross_site_network" "example-cross-site-network" { resource "google_compute_wire_group" "example-test-wire-group" { name = "tf-test-test-wire-group%{random_suffix}" description = "Example Wire Group%{random_suffix}" - crossSiteNetwork = google_compute_cross_site_network.example-cross-site-network.name + cross_site_network = google_compute_cross_site_network.example-cross-site-network.name provider = google-beta + depends_on = [ + google_compute_cross_site_network.example-cross-site-network + ] + wire_properties { + bandwidth_unmetered = 1000 + } + wire_group_properties { + type = "REDUNDANT" + } } `, context) } func testAccComputeWireGroup_update(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` data "google_project" "project" { } @@ -99,8 +109,18 @@ resource "google_compute_cross_site_network" "example-cross-site-network" { resource "google_compute_wire_group" "example-test-wire-group" { name = "tf-test-test-wire-group%{random_suffix}" description = "Example Wire Group Updated%{random_suffix}" - crossSiteNetwork = google_compute_cross_site_network.example-cross-site-network.name + cross_site_network = google_compute_cross_site_network.example-cross-site-network.name provider = google-beta + depends_on = [ + google_compute_cross_site_network.example-cross-site-network + ] + wire_properties { + bandwidth_unmetered = 1000 + } + wire_group_properties { + type = "REDUNDANT" + } } `, context) } + From 03e8416f11a0ceabd26e426bfcd5efbb1e86c65a Mon Sep 17 00:00:00 2001 From: NandiniAgrawal15 Date: Fri, 16 May 2025 06:13:29 +0000 Subject: [PATCH 009/884] Nit Changes1 --- mmv1/products/compute/WireGroup.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mmv1/products/compute/WireGroup.yaml b/mmv1/products/compute/WireGroup.yaml index ac63b725f890..87f709b44cf7 100644 --- a/mmv1/products/compute/WireGroup.yaml +++ b/mmv1/products/compute/WireGroup.yaml @@ -27,7 +27,7 @@ id_format: 'projects/{{project}}/global/crossSiteNetworks/{{cross_site_network}} base_url: 'projects/{{project}}/global/crossSiteNetworks/{{cross_site_network}}/wireGroups' self_link: 'projects/{{project}}/global/crossSiteNetworks/{{cross_site_network}}/wireGroups/{{name}}' update_verb: 'PATCH' -import_format: +import_format: - 'projects/{{project}}/global/crossSiteNetworks/{{cross_site_network}}/wireGroups/{{name}}' timeouts: insert_minutes: 20 @@ -114,7 +114,7 @@ properties: description: | Type of wire group (enum). WIRE: a single pseudowire over two Interconnect connections with no redundancy. - REDUNDANT: two pseudowires over four Interconnect connections, with two connections in one metro and two connections in another metro. + REDUNDANT: two pseudowires over four Interconnect connections, with two connections in one metro and two connections in another metro. BOX_AND_CROSS: four pseudowires over four Interconnect connections, with two connections in one metro and two connections in another metro. enum_values: - 'WIRE' From b581f9c972062d38281426941632c7fbaf9c8a85 Mon Sep 17 00:00:00 2001 From: NandiniAgrawal15 Date: Fri, 16 May 2025 06:15:52 +0000 Subject: [PATCH 010/884] Removing the .tmpl extension from this File --- ...st.go.tmpl => resource_compute_wire_group_test.go} | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) rename mmv1/third_party/terraform/services/compute/{resource_compute_wire_group_test.go.tmpl => resource_compute_wire_group_test.go} (91%) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go similarity index 91% rename from mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl rename to mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go index ea9a35cbcd82..4ca497dfae1b 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go @@ -18,7 +18,6 @@ package compute_test import ( - "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" @@ -49,11 +48,11 @@ func TestAccComputeWireGroup_update(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"cross_site_network"}, }, - { + { Config: testAccComputeWireGroup_update(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("google_compute_wire_group.example-test-wire-group", "description", "Example Wire Group Updated"+context["random_suffix"].(string)), - ), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_compute_wire_group.example-test-wire-group", "description", "Example Wire Group Updated"+context["random_suffix"].(string)), + ), }, { ResourceName: "google_compute_wire_group.example-test-wire-group", @@ -94,7 +93,6 @@ resource "google_compute_wire_group" "example-test-wire-group" { `, context) } - func testAccComputeWireGroup_update(context map[string]interface{}) string { return acctest.Nprintf(` data "google_project" "project" { @@ -123,4 +121,3 @@ resource "google_compute_wire_group" "example-test-wire-group" { } `, context) } - From bdadc945c6e1a1be72cc5d94856ebbd45fb72dcc Mon Sep 17 00:00:00 2001 From: NandiniAgrawal15 Date: Mon, 19 May 2025 04:23:52 +0000 Subject: [PATCH 011/884] Change Type of bandwidth_unmetered to Integer --- mmv1/products/compute/WireGroup.yaml | 3 +-- .../examples/compute_wire_group_basic.tf.tmpl | 9 ++++---- ... resource_compute_wire_group_test.go.tmpl} | 22 ++++--------------- 3 files changed, 10 insertions(+), 24 deletions(-) rename mmv1/third_party/terraform/services/compute/{resource_compute_wire_group_test.go => resource_compute_wire_group_test.go.tmpl} (84%) diff --git a/mmv1/products/compute/WireGroup.yaml b/mmv1/products/compute/WireGroup.yaml index 87f709b44cf7..3fb5a02032bd 100644 --- a/mmv1/products/compute/WireGroup.yaml +++ b/mmv1/products/compute/WireGroup.yaml @@ -126,7 +126,7 @@ properties: Default properties for wires within the group. properties: - name: bandwidth_unmetered - type: String + type: Integer description: | The unmetered bandwidth setting. - name: fault_response @@ -163,7 +163,6 @@ properties: type: Integer - name: fault_response type: enum - description: 'Abc' enum_values: - 'NONE' - 'DISABLE_PORT' diff --git a/mmv1/templates/terraform/examples/compute_wire_group_basic.tf.tmpl b/mmv1/templates/terraform/examples/compute_wire_group_basic.tf.tmpl index 21653c610dba..d9c7b8e0439b 100644 --- a/mmv1/templates/terraform/examples/compute_wire_group_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/compute_wire_group_basic.tf.tmpl @@ -1,4 +1,5 @@ data "google_project" "project" { +provider = google-beta } resource "google_compute_cross_site_network" "example-cross-site-network" { @@ -10,15 +11,15 @@ resource "google_compute_cross_site_network" "example-cross-site-network" { resource "google_compute_wire_group" "{{$.PrimaryResourceId}}" { name = "{{index $.Vars "name"}}" description = "{{index $.Vars "description"}}" - cross_site_network = google_compute_cross_site_network.example-cross-site-network.name + cross_site_network = "{{index $.Vars "cross_site_network"}}" provider = google-beta depends_on = [ - google_compute_cross_site_network.example-cross-site-test-1 + google_compute_cross_site_network.example-cross-site-network ] wire_properties { - bandwidth_unmetered = 1000 + bandwidth_unmetered = 10 } wire_group_properties { - type = "REDUNDANT" + type = "WIRE" } } \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl similarity index 84% rename from mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go rename to mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl index 4ca497dfae1b..52d7ca3540ae 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl @@ -1,22 +1,5 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -// ---------------------------------------------------------------------------- -// -// *** AUTO GENERATED CODE *** Type: MMv1 *** -// -// ---------------------------------------------------------------------------- -// -// This file is automatically generated by Magic Modules and manual -// changes will be clobbered when the file is regenerated. -// -// Please read more about how to change this file in -// .github/CONTRIBUTING.md. -// -// ---------------------------------------------------------------------------- - package compute_test - +{{ if ne $.TargetVersionName `ga` -}} import ( "testing" @@ -67,6 +50,7 @@ func TestAccComputeWireGroup_update(t *testing.T) { func testAccComputeWireGroup_basic(context map[string]interface{}) string { return acctest.Nprintf(` data "google_project" "project" { +provider = google-beta } resource "google_compute_cross_site_network" "example-cross-site-network" { @@ -96,6 +80,7 @@ resource "google_compute_wire_group" "example-test-wire-group" { func testAccComputeWireGroup_update(context map[string]interface{}) string { return acctest.Nprintf(` data "google_project" "project" { +provider = google-beta } resource "google_compute_cross_site_network" "example-cross-site-network" { @@ -121,3 +106,4 @@ resource "google_compute_wire_group" "example-test-wire-group" { } `, context) } +{{- end }} \ No newline at end of file From dda2a26700c7d0ae19a40f7b045ce0ce990d97df Mon Sep 17 00:00:00 2001 From: NandiniAgrawal15 Date: Mon, 19 May 2025 04:32:32 +0000 Subject: [PATCH 012/884] Remove random_suffix from templates/terraform/examples/compute_wire_group_basic.tf.tmpl --- .../terraform/examples/compute_wire_group_basic.tf.tmpl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mmv1/templates/terraform/examples/compute_wire_group_basic.tf.tmpl b/mmv1/templates/terraform/examples/compute_wire_group_basic.tf.tmpl index d9c7b8e0439b..bf4f16732ee8 100644 --- a/mmv1/templates/terraform/examples/compute_wire_group_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/compute_wire_group_basic.tf.tmpl @@ -3,8 +3,8 @@ provider = google-beta } resource "google_compute_cross_site_network" "example-cross-site-network" { - name = "tf-test-cross-site-network{{index $.Vars "random_suffix"}}" - description = "Example cross site network{{index $.Vars "random_suffix"}}" + name = "tf-test-cross-site-network" + description = "Example cross site network" provider = google-beta } From ec9976884fedb039b8b67488501125dc73cf30d4 Mon Sep 17 00:00:00 2001 From: Nandini Agrawal Date: Wed, 21 May 2025 12:28:40 +0530 Subject: [PATCH 013/884] Update mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl Co-authored-by: Nick Elliot --- .../services/compute/resource_compute_wire_group_test.go.tmpl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl index 52d7ca3540ae..57d8724a97ff 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl @@ -84,7 +84,7 @@ provider = google-beta } resource "google_compute_cross_site_network" "example-cross-site-network" { - name = "tf-test-cross-site-network" + name = "tf-test-cross-site-network%{random_suffix}" description = "Example cross site network" provider = google-beta } From 84d80ce6c3c8f9438ba281b733f97d3447b394f3 Mon Sep 17 00:00:00 2001 From: NandiniAgrawal15 Date: Wed, 21 May 2025 07:00:07 +0000 Subject: [PATCH 014/884] Change the hard code to reference the vasr --- .../terraform/examples/compute_wire_group_basic.tf.tmpl | 2 +- .../services/compute/resource_compute_wire_group_test.go.tmpl | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mmv1/templates/terraform/examples/compute_wire_group_basic.tf.tmpl b/mmv1/templates/terraform/examples/compute_wire_group_basic.tf.tmpl index bf4f16732ee8..b4f19b51f3d3 100644 --- a/mmv1/templates/terraform/examples/compute_wire_group_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/compute_wire_group_basic.tf.tmpl @@ -3,7 +3,7 @@ provider = google-beta } resource "google_compute_cross_site_network" "example-cross-site-network" { - name = "tf-test-cross-site-network" + name = "{{index $.Vars "cross_site_network"}}" description = "Example cross site network" provider = google-beta } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl index 57d8724a97ff..ab578639cd6c 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl @@ -54,7 +54,7 @@ provider = google-beta } resource "google_compute_cross_site_network" "example-cross-site-network" { - name = "tf-test-cross-site-network" + name = "tf-test-cross-site-network%{random_suffix}" description = "Example cross site network" provider = google-beta } From 30605ca42d5f8b5f44353d05a1d89892bfcdd63f Mon Sep 17 00:00:00 2001 From: Nandini Agrawal Date: Wed, 28 May 2025 10:22:11 +0530 Subject: [PATCH 015/884] Update mmv1/products/compute/WireGroup.yaml Co-authored-by: Nick Elliot --- mmv1/products/compute/WireGroup.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/mmv1/products/compute/WireGroup.yaml b/mmv1/products/compute/WireGroup.yaml index 3fb5a02032bd..ed10a5c82b52 100644 --- a/mmv1/products/compute/WireGroup.yaml +++ b/mmv1/products/compute/WireGroup.yaml @@ -56,6 +56,7 @@ parameters: description: Required cross site network to which wire group belongs. required: true immutable: true + url_param_only: true resource: 'CrossSiteNetwork' imports: 'name' diff_suppress_func: 'tpgresource.CompareResourceNames' From 0702ee9b38e39aaad6a4394fbdc36eae14314389 Mon Sep 17 00:00:00 2001 From: Nandini Agrawal <78158744+NandiniAgrawal15@users.noreply.github.com> Date: Tue, 8 Apr 2025 15:56:10 +0530 Subject: [PATCH 016/884] this is for testing purpose --- mmv1/products/compute/Interconnect.yaml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/mmv1/products/compute/Interconnect.yaml b/mmv1/products/compute/Interconnect.yaml index f3d1b9ae06b9..84f3414d6cc7 100644 --- a/mmv1/products/compute/Interconnect.yaml +++ b/mmv1/products/compute/Interconnect.yaml @@ -412,3 +412,10 @@ properties: output: true item_type: type: String + - name: 'testing' + type: String + description: | + This is for learning purposes. + output: true + item_type: + type: String From 28d387dc86f9ea750c790043e588893fb340d3e5 Mon Sep 17 00:00:00 2001 From: Nandini Agrawal <78158744+NandiniAgrawal15@users.noreply.github.com> Date: Tue, 15 Apr 2025 11:13:38 +0530 Subject: [PATCH 017/884] Revert not required changes. --- mmv1/products/compute/Interconnect.yaml | 7 ------- 1 file changed, 7 deletions(-) diff --git a/mmv1/products/compute/Interconnect.yaml b/mmv1/products/compute/Interconnect.yaml index 84f3414d6cc7..f3d1b9ae06b9 100644 --- a/mmv1/products/compute/Interconnect.yaml +++ b/mmv1/products/compute/Interconnect.yaml @@ -412,10 +412,3 @@ properties: output: true item_type: type: String - - name: 'testing' - type: String - description: | - This is for learning purposes. - output: true - item_type: - type: String From 91dca2959855a325fd73d6e1f9e38f0c845a60d4 Mon Sep 17 00:00:00 2001 From: Nandini Agrawal <78158744+NandiniAgrawal15@users.noreply.github.com> Date: Thu, 24 Apr 2025 11:42:55 +0530 Subject: [PATCH 018/884] Testing changes --- mmv1/products/compute/CrossSiteNetwork.yaml | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/mmv1/products/compute/CrossSiteNetwork.yaml b/mmv1/products/compute/CrossSiteNetwork.yaml index 169acbf30d85..d184340f873e 100644 --- a/mmv1/products/compute/CrossSiteNetwork.yaml +++ b/mmv1/products/compute/CrossSiteNetwork.yaml @@ -23,8 +23,11 @@ references: api: 'https://cloud.google.com/compute/docs/reference/rest/beta/crossSiteNetworks' docs: base_url: 'projects/{{project}}/global/crossSiteNetworks' -self_link: 'projects/{{project}}/global/crossSiteNetworks/{{name}}' +self_link: 'projects/{{project}}/global/crossSiteNetworks/{{crossSiteNetwork}}' update_verb: 'PATCH' +custom_code: + custom_delete: templates/terraform/custom_delete/cross_site_network.go.tmpl + test_check_destroy: templates/terraform/custom_check_destroy/cross_site_network.go.tmpl timeouts: insert_minutes: 20 update_minutes: 20 @@ -47,6 +50,18 @@ examples: project: 'PROJECT_NAME' parameters: properties: + - name: 'name' + type: String + description: | + Name of the resource. Provided by the client when the resource is created. The name must be + 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters + long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first + character must be a lowercase letter, and all following characters must be a dash, + lowercase letter, or digit, except the last character, which cannot be a dash. + required: true + immutable: true + validation: + regex: '^[a-z]([-a-z0-9]*[a-z0-9])?$' - name: 'name' type: String description: | From 9f6b8515fab12e91be8e599b21d89b330f483118 Mon Sep 17 00:00:00 2001 From: Nandini Agrawal <78158744+NandiniAgrawal15@users.noreply.github.com> Date: Fri, 25 Apr 2025 14:13:02 +0530 Subject: [PATCH 019/884] Cross-Site Network --- mmv1/products/compute/CrossSiteNetwork.yaml | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/mmv1/products/compute/CrossSiteNetwork.yaml b/mmv1/products/compute/CrossSiteNetwork.yaml index d184340f873e..771893952651 100644 --- a/mmv1/products/compute/CrossSiteNetwork.yaml +++ b/mmv1/products/compute/CrossSiteNetwork.yaml @@ -23,11 +23,8 @@ references: api: 'https://cloud.google.com/compute/docs/reference/rest/beta/crossSiteNetworks' docs: base_url: 'projects/{{project}}/global/crossSiteNetworks' -self_link: 'projects/{{project}}/global/crossSiteNetworks/{{crossSiteNetwork}}' +self_link: 'projects/{{project}}/global/crossSiteNetworks/{{name}}' update_verb: 'PATCH' -custom_code: - custom_delete: templates/terraform/custom_delete/cross_site_network.go.tmpl - test_check_destroy: templates/terraform/custom_check_destroy/cross_site_network.go.tmpl timeouts: insert_minutes: 20 update_minutes: 20 From 9f33eb24b3a1919b6f0cfb5a06f807fde9037326 Mon Sep 17 00:00:00 2001 From: NandiniAgrawal15 Date: Thu, 5 Jun 2025 05:17:21 +0000 Subject: [PATCH 020/884] Change the snake_case name of the WireGroup resource to camelCase in the Terraform template and test file. Add Changes to test-file. --- mmv1/products/compute/WireGroup.yaml | 85 ++++++++++--------- .../custom_check_destroy/wire_group.go.tmpl | 22 +++++ .../resource_compute_wire_group_test.go.tmpl | 6 ++ 3 files changed, 75 insertions(+), 38 deletions(-) create mode 100644 mmv1/templates/terraform/custom_check_destroy/wire_group.go.tmpl diff --git a/mmv1/products/compute/WireGroup.yaml b/mmv1/products/compute/WireGroup.yaml index ed10a5c82b52..62416f7773dc 100644 --- a/mmv1/products/compute/WireGroup.yaml +++ b/mmv1/products/compute/WireGroup.yaml @@ -27,6 +27,7 @@ id_format: 'projects/{{project}}/global/crossSiteNetworks/{{cross_site_network}} base_url: 'projects/{{project}}/global/crossSiteNetworks/{{cross_site_network}}/wireGroups' self_link: 'projects/{{project}}/global/crossSiteNetworks/{{cross_site_network}}/wireGroups/{{name}}' update_verb: 'PATCH' +update_mask: true import_format: - 'projects/{{project}}/global/crossSiteNetworks/{{cross_site_network}}/wireGroups/{{name}}' timeouts: @@ -40,6 +41,8 @@ async: base_url: '{{op_id}}' result: resource_inside_response: false +custom_code: + test_check_destroy: 'templates/terraform/custom_check_destroy/wire_group.go.tmpl' examples: - name: 'compute_wire_group_basic' primary_resource_id: 'example-test-wire-group' @@ -61,6 +64,7 @@ parameters: imports: 'name' diff_suppress_func: 'tpgresource.CompareResourceNames' custom_expand: 'templates/terraform/custom_expand/resourceref_with_validation.go.tmpl' + min_version: beta properties: - name: 'description' type: String @@ -80,7 +84,6 @@ properties: character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. required: true - immutable: validation: regex: '^[a-z]([-a-z0-9]*[a-z0-9])?$' - name: endpoints @@ -101,11 +104,11 @@ properties: VLAN tags for the interconnect. item_type: type: integer - - name: admin_enabled + - name: adminEnabled type: boolean description: | Indicates whether the wire group is administratively enabled. - - name: wire_group_properties + - name: wireGroupProperties type: NestedObject description: | Properties specific to the wire group. @@ -121,16 +124,16 @@ properties: - 'WIRE' - 'REDUNDANT' - 'BOX_AND_CROSS' - - name: wire_properties + - name: wireProperties type: NestedObject description: | Default properties for wires within the group. properties: - - name: bandwidth_unmetered + - name: bandwidthUnmetered type: Integer description: | The unmetered bandwidth setting. - - name: fault_response + - name: faultResponse type: enum description: | Response when a fault is detected in a pseudowire: @@ -140,35 +143,39 @@ properties: - 'NONE' - 'DISABLE_PORT' - name: wires - type: NestedObject + type: Array description: | The single/redundant wire(s) managed by the wire group. output: true - properties: - - name: label - type: string - - name: endpoints - type: NestedObject - description: | - 'Wire endpoints are specific Interconnect connections.' - properties: - - name: interconnect - type: string - - name: vlan_tag - type: integer - - name: wire_properties - type: NestedObject - output: true - properties: - - name: bandwidth_unmetered - type: Integer - - name: fault_response - type: enum - enum_values: - - 'NONE' - - 'DISABLE_PORT' - - name: admin_enabled - type: boolean + item_type: + type: NestedObject + properties: + - name: label + type: string + - name: endpoints + type: Array + description: | + 'Wire endpoints are specific Interconnect connections.' + item_type: + type: NestedObject + properties: + - name: interconnect + type: string + - name: vlanTag + type: integer + - name: wireProperties + type: NestedObject + output: true # This is redundant if the parent 'wires' is output: true, but harmless + properties: + - name: bandwidthUnmetered + type: Integer + - name: faultResponse + type: enum + enum_values: + - 'NONE' + - 'DISABLE_PORT' + - name: adminEnabled + type: boolean - name: topology type: NestedObject description: | @@ -176,9 +183,11 @@ properties: output: true properties: - name: endpoints - type: NestedObject - properties: - - name: label - type: string - - name: city - type: string + type: Array + item_type: + type: NestedObject + properties: + - name: label + type: string + - name: city + type: string diff --git a/mmv1/templates/terraform/custom_check_destroy/wire_group.go.tmpl b/mmv1/templates/terraform/custom_check_destroy/wire_group.go.tmpl new file mode 100644 index 000000000000..baad2abccd4a --- /dev/null +++ b/mmv1/templates/terraform/custom_check_destroy/wire_group.go.tmpl @@ -0,0 +1,22 @@ + config := acctest.GoogleProviderConfig(t) + url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{"{{"}}ComputeBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/global/crossSiteNetworks/{{"{{"}}cross_site_network{{"}}"}}/wireGroups/{{"{{"}}name{{"}}"}}") + if err != nil { + return err + } + + billingProject := "" + + if config.BillingProject != "" { + billingProject = config.BillingProject + } + + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: url, + UserAgent: config.UserAgent, + }) + if err == nil { + return fmt.Errorf("ComputeWireGroup still exists at %s", url) + } \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl index ab578639cd6c..7b9fdc3b965f 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl @@ -4,6 +4,7 @@ import ( "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" @@ -33,6 +34,11 @@ func TestAccComputeWireGroup_update(t *testing.T) { }, { Config: testAccComputeWireGroup_update(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_compute_wire_group.example-test-wire-group", plancheck.ResourceActionUpdate), + }, + }, Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("google_compute_wire_group.example-test-wire-group", "description", "Example Wire Group Updated"+context["random_suffix"].(string)), ), From 64d450ac3d54e98bcfa0defe77750305c90e2ad3 Mon Sep 17 00:00:00 2001 From: Sam Levenick Date: Mon, 28 Apr 2025 10:01:39 -0400 Subject: [PATCH 021/884] Move flex_start to beta-only (#13789) --- .../terraform/services/container/node_config.go.tmpl | 6 ++++++ .../container/resource_container_cluster_test.go.tmpl | 5 ++++- .../container/resource_container_node_pool_test.go.tmpl | 2 ++ .../website/docs/r/container_cluster.html.markdown | 2 +- 4 files changed, 13 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/node_config.go.tmpl b/mmv1/third_party/terraform/services/container/node_config.go.tmpl index fcfd7832752d..858fcea02f2e 100644 --- a/mmv1/third_party/terraform/services/container/node_config.go.tmpl +++ b/mmv1/third_party/terraform/services/container/node_config.go.tmpl @@ -875,12 +875,14 @@ func schemaNodeConfig() *schema.Schema { ForceNew: true, Description: `The runtime of each node in the node pool in seconds, terminated by 's'. Example: "3600s".`, }, +{{ if ne $.TargetVersionName `ga` -}} "flex_start" : { Type: schema.TypeBool, Optional: true, ForceNew: true, Description: `Enables Flex Start provisioning model for the node pool`, }, +{{- end }} }, }, } @@ -1276,9 +1278,11 @@ func expandNodeConfig(v interface{}) *container.NodeConfig { nc.MaxRunDuration = v.(string) } + {{ if ne $.TargetVersionName `ga` -}} if v,ok := nodeConfig["flex_start"]; ok { nc.FlexStart = v.(bool) } + {{- end }} {{ if ne $.TargetVersionName `ga` -}} if v, ok := nodeConfig["host_maintenance_policy"]; ok { @@ -1711,7 +1715,9 @@ func flattenNodeConfig(c *container.NodeConfig, v interface{}) []map[string]inte "node_group": c.NodeGroup, "advanced_machine_features": flattenAdvancedMachineFeaturesConfig(c.AdvancedMachineFeatures), "max_run_duration": c.MaxRunDuration, +{{- if ne $.TargetVersionName "ga" }} "flex_start": c.FlexStart, +{{- end }} "sole_tenant_config": flattenSoleTenantConfig(c.SoleTenantConfig), "fast_socket": flattenFastSocket(c.FastSocket), "resource_manager_tags": flattenResourceManagerTags(c.ResourceManagerTags), diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl index cf1e7ece8b3e..9d503ddf0f67 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl @@ -503,7 +503,7 @@ func TestAccContainerCluster_withMaxRunDuration(t *testing.T) { }) } - +{{ if ne $.TargetVersionName `ga` -}} func TestAccContainerCluster_withFlexStart(t *testing.T) { t.Parallel() @@ -535,6 +535,7 @@ func TestAccContainerCluster_withFlexStart(t *testing.T) { }, }) } +{{- end }} func TestAccContainerCluster_withILBSubsetting(t *testing.T) { t.Parallel() @@ -7154,6 +7155,7 @@ resource "google_container_cluster" "max_run_duration" { `, clusterName, npName, duration, networkName, subnetworkName) } +{{ if ne $.TargetVersionName `ga` -}} func testAccContainerCluster_withFlexStart(clusterName, npName, networkName, subnetworkName string) string { return fmt.Sprintf(` resource "google_container_cluster" "flex_start" { @@ -7199,6 +7201,7 @@ resource "google_container_cluster" "flex_start" { } `, clusterName, npName, networkName, subnetworkName) } +{{- end }} func testAccContainerCluster_withILBSubSetting(clusterName, npName, networkName, subnetworkName string) string { return fmt.Sprintf(` diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl index 201c67b7947e..2de098a64a9e 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl @@ -4545,6 +4545,7 @@ resource "google_container_node_pool" "np" { `, clusterName, networkName, subnetworkName, np) } +{{ if ne $.TargetVersionName `ga` -}} func TestAccContainerNodePool_withFlexStart(t *testing.T) { t.Parallel() @@ -4618,6 +4619,7 @@ resource "google_container_node_pool" "np" { } `, clusterName, networkName, subnetworkName, np) } +{{- end }} func TestAccContainerNodePool_tpuTopology(t *testing.T) { t.Parallel() diff --git a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown index 557b8f7897a1..997ec0cfdd98 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown @@ -931,7 +931,7 @@ gvnic { * `max_run_duration` - (Optional) The runtime of each node in the node pool in seconds, terminated by 's'. Example: "3600s". -* `flex_start` - (Optional) Enables Flex Start provisioning model for the node pool. +* `flex_start` - (Optional, [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html))) Enables Flex Start provisioning model for the node pool. * `local_ssd_count` - (Optional) The amount of local SSD disks that will be attached to each cluster node. Defaults to 0. From 0893760dc3fb1eaeaa3503f179b1cba748d28f32 Mon Sep 17 00:00:00 2001 From: Ryan Oaks Date: Mon, 28 Apr 2025 11:53:58 -0400 Subject: [PATCH 022/884] Revert "Add tags to Secret Manager Secret TagsR2401" (#13786) --- mmv1/products/secretmanager/Secret.yaml | 8 -- ...esource_secret_manager_secret_test.go.tmpl | 112 +++++------------- 2 files changed, 27 insertions(+), 93 deletions(-) diff --git a/mmv1/products/secretmanager/Secret.yaml b/mmv1/products/secretmanager/Secret.yaml index 6a7c3cac6683..e1742248fd81 100644 --- a/mmv1/products/secretmanager/Secret.yaml +++ b/mmv1/products/secretmanager/Secret.yaml @@ -250,11 +250,3 @@ properties: description: | The Duration between rotation notifications. Must be in seconds and at least 3600s (1h) and at most 3153600000s (100 years). If rotationPeriod is set, `next_rotation_time` must be set. `next_rotation_time` will be advanced by this period when the service automatically sends rotation notifications. - - name: 'tags' - type: KeyValuePairs - description: | - A map of resource manager tags. - Resource manager tag keys and values have the same definition as resource manager tags. - Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/{tag_key_value}. - immutable: true - ignore_read: true diff --git a/mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_test.go.tmpl b/mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_test.go.tmpl index 510427e2dc4b..8e48e3a9690c 100644 --- a/mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_test.go.tmpl +++ b/mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_test.go.tmpl @@ -25,9 +25,9 @@ func TestAccSecretManagerSecret_import(t *testing.T) { Config: testAccSecretManagerSecret_basic(context), }, { - ResourceName: "google_secret_manager_secret.secret-basic", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, }, }, @@ -54,9 +54,9 @@ func TestAccSecretManagerSecret_cmek(t *testing.T) { Config: testAccSecretMangerSecret_cmek(context1), }, { - ResourceName: "google_secret_manager_secret.secret-basic", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, }, }, @@ -79,27 +79,27 @@ func TestAccSecretManagerSecret_annotationsUpdate(t *testing.T) { Config: testAccSecretManagerSecret_annotationsBasic(context), }, { - ResourceName: "google_secret_manager_secret.secret-with-annotations", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_secret.secret-with-annotations", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels", "annotations"}, }, { Config: testAccSecretManagerSecret_annotationsUpdate(context), }, { - ResourceName: "google_secret_manager_secret.secret-with-annotations", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_secret.secret-with-annotations", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels", "annotations"}, }, { Config: testAccSecretManagerSecret_annotationsBasic(context), }, { - ResourceName: "google_secret_manager_secret.secret-with-annotations", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_secret.secret-with-annotations", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels", "annotations"}, }, }, @@ -122,36 +122,36 @@ func TestAccSecretManagerSecret_versionAliasesUpdate(t *testing.T) { Config: testAccSecretManagerSecret_basicWithSecretVersions(context), }, { - ResourceName: "google_secret_manager_secret.secret-basic", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, }, { Config: testAccSecretManagerSecret_versionAliasesBasic(context), }, { - ResourceName: "google_secret_manager_secret.secret-basic", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, }, { Config: testAccSecretManagerSecret_versionAliasesUpdate(context), }, { - ResourceName: "google_secret_manager_secret.secret-basic", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, }, { Config: testAccSecretManagerSecret_basicWithSecretVersions(context), }, { - ResourceName: "google_secret_manager_secret.secret-basic", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, }, }, @@ -464,36 +464,6 @@ func TestAccSecretManagerSecret_updateBetweenTtlAndExpireTime(t *testing.T) { }) } -func TestAccSecretManagerSecret_tags(t *testing.T) { - t.Parallel() - - tagKey := acctest.BootstrapSharedTestTagKey(t, "secret_manager_secret-tagkey") - - context := map[string]interface{}{ - "org": envvar.GetTestOrgFromEnv(t), - "tagKey": tagKey, - "tagValue": acctest.BootstrapSharedTestTagValue(t, "secret_manager_secret-tagvalue", tagKey), - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckSecretManagerSecretDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccSecretManagerSecret_tags(context), - }, - { - ResourceName: "google_secret_manager_secret.secret-tags", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels", "tags"}, - }, - }, - }) -} - func testAccSecretManagerSecret_basic(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_secret_manager_secret" "secret-basic" { @@ -1250,31 +1220,3 @@ resource "google_secret_manager_secret" "secret-basic" { } `, context) } - -func testAccSecretManagerSecret_tags(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_secret_manager_secret" "secret-tags" { - secret_id = "tf-test-secret-%{random_suffix}" - - labels = { - label = "my-label" - } - - replication { - user_managed { - replicas { - location = "us-central1" - } - replicas { - location = "us-east1" - } - } - } - - ttl = "3600s" - tags = { - "%{org}/%{tagKey}" = "%{tagValue}" - } -} -`, context) -} From 598730acdbb9d358950b35c0f1b3ed2eaa65946d Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Mon, 28 Apr 2025 10:03:30 -0700 Subject: [PATCH 023/884] update references to 7.0.0 for breaking change doc (#13791) Co-authored-by: Cameron Thornton --- docs/content/breaking-changes/make-a-breaking-change.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/content/breaking-changes/make-a-breaking-change.md b/docs/content/breaking-changes/make-a-breaking-change.md index 231572f3c026..6a623305ec07 100644 --- a/docs/content/breaking-changes/make-a-breaking-change.md +++ b/docs/content/breaking-changes/make-a-breaking-change.md @@ -1,6 +1,6 @@ --- -majorVersion: "6.0.0" -upgradeGuide: "version_6_upgrade.html.markdown" +majorVersion: "7.0.0" +upgradeGuide: "version_7_upgrade.html.markdown" title: "Make a breaking change" summary: "Guidance on making a breaking changes" weight: 20 @@ -65,7 +65,7 @@ The general process for contributing a breaking change to the 1. Make the `main` branch forwards-compatible with the major release 2. Add deprecations and warnings to the `main` branch of `magic-modules` -3. Add upgrade guide entries to the `FEATURE-BRANCH-major-release-6.0.0` branch of `magic-modules` +3. Add upgrade guide entries to the `FEATURE-BRANCH-major-release-7.0.0` branch of `magic-modules` 4. Make the breaking change on `FEATURE-BRANCH-major-release-{{% param "majorVersion" %}}` These are covered in more detail in the following sections. The upgrade guide @@ -200,7 +200,7 @@ with the following changes: 1. Add the upgrade guide entries to [{{< param upgradeGuide >}}](https://github.com/GoogleCloudPlatform/magic-modules/blob/FEATURE-BRANCH-major-release-6.0.0/mmv1/third_party/terraform/website/docs/guides/{{< param upgradeGuide >}}). Entries should focus on the changes that users need to make when upgrading to `{{% param "majorVersion" %}}`, rather than how to write configurations -after upgrading. See [Terraform provider for Google Cloud 5.0.0 Upgrade Guide](https://registry.terraform.io/providers/hashicorp/google/latest/docs/guides/version_5_upgrade) +after upgrading. See [Terraform provider for Google Cloud 6.0.0 Upgrade Guide](https://registry.terraform.io/providers/hashicorp/google/latest/docs/guides/version_6_upgrade) and other upgrade guides for examples. 1. Remove any deprecation notices and warnings (including in documentation) not already removed by the breaking change. 1. When you create your pull request, From cde744f2fed98edd8fa4a2aa2ef7b5746c5d1663 Mon Sep 17 00:00:00 2001 From: Katrina Mitchell Date: Mon, 28 Apr 2025 19:01:34 -0400 Subject: [PATCH 024/884] Bump disk size in tests to new 12 GB minimum (#13788) --- .../container/resource_container_node_pool_test.go.tmpl | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl index 2de098a64a9e..bd76636bf5d3 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl @@ -2951,7 +2951,7 @@ resource "google_container_node_pool" "np_with_management" { node_config { machine_type = "g1-small" - disk_size_gb = 10 + disk_size_gb = 15 oauth_scopes = ["compute-rw", "storage-ro", "logging-write", "monitoring"] } } @@ -2976,7 +2976,7 @@ resource "google_container_node_pool" "np_with_node_config" { initial_node_count = 1 node_config { machine_type = "g1-small" - disk_size_gb = 10 + disk_size_gb = 15 oauth_scopes = [ "https://www.googleapis.com/auth/compute", "https://www.googleapis.com/auth/devstorage.read_only", @@ -3033,7 +3033,7 @@ resource "google_container_node_pool" "np_with_node_config" { initial_node_count = 1 node_config { machine_type = "g1-small" - disk_size_gb = 10 + disk_size_gb = 15 oauth_scopes = [ "https://www.googleapis.com/auth/compute", "https://www.googleapis.com/auth/devstorage.read_only", @@ -3993,7 +3993,7 @@ resource "google_container_node_pool" "np_with_node_config_scope_alias" { initial_node_count = 1 node_config { machine_type = "g1-small" - disk_size_gb = 10 + disk_size_gb = 15 oauth_scopes = ["compute-rw", "storage-ro", "logging-write", "monitoring"] } } From e053373d2e253f2c7a44d0d61ffc002325bf343d Mon Sep 17 00:00:00 2001 From: Thomas Rodgers Date: Mon, 28 Apr 2025 16:37:59 -0700 Subject: [PATCH 025/884] Use commit shas instead of branch names in diff comment (#13732) --- .ci/magician/cmd/generate_comment.go | 33 +++++++++++----- .ci/magician/cmd/generate_comment_test.go | 38 ++++++++++++++----- .ci/magician/cmd/generate_downstream.go | 13 +++++-- .ci/magician/cmd/mock_runner_test.go | 7 +++- .../cmd/templates/DIFF_COMMENT.md.tmpl | 2 +- 5 files changed, 69 insertions(+), 24 deletions(-) diff --git a/.ci/magician/cmd/generate_comment.go b/.ci/magician/cmd/generate_comment.go index 9c526cc51e1b..ac689fdc8db7 100644 --- a/.ci/magician/cmd/generate_comment.go +++ b/.ci/magician/cmd/generate_comment.go @@ -45,9 +45,11 @@ var ( ) type Diff struct { - Title string - Repo string - ShortStat string + Title string + Repo string + ShortStat string + CommitSHA string + OldCommitSHA string } type BreakingChange struct { @@ -77,7 +79,6 @@ type Errors struct { } type diffCommentData struct { - PrNumber int Diffs []Diff BreakingChanges []BreakingChange MissingServiceLabels []string @@ -214,9 +215,7 @@ func execGenerateComment(prNumber int, ghTokenMagicModules, buildId, buildStep, } // Initialize repos - data := diffCommentData{ - PrNumber: prNumber, - } + data := diffCommentData{} for _, repo := range []*source.Repo{&tpgRepo, &tpgbRepo, &tgcRepo, &tfoicsRepo} { errors[repo.Title] = []string{} repo.Branch = newBranch @@ -262,10 +261,24 @@ func execGenerateComment(prNumber int, ghTokenMagicModules, buildId, buildStep, errors[repo.Title] = append(errors[repo.Title], "Failed to compute repo diff shortstats") } if shortStat != "" { + variablePath := fmt.Sprintf("/workspace/commitSHA_modular-magician_%s.txt", repo.Name) + oldVariablePath := fmt.Sprintf("/workspace/commitSHA_modular-magician_%s-old.txt", repo.Name) + commitSHA, err := rnr.ReadFile(variablePath) + if err != nil { + errors[repo.Title] = append(errors[repo.Title], "Failed to read commit sha from file") + continue + } + oldCommitSHA, err := rnr.ReadFile(oldVariablePath) + if err != nil { + errors[repo.Title] = append(errors[repo.Title], "Failed to read old commit sha from file") + continue + } diffs = append(diffs, Diff{ - Title: repo.Title, - Repo: repo.Name, - ShortStat: shortStat, + Title: repo.Title, + Repo: repo.Name, + ShortStat: shortStat, + CommitSHA: commitSHA, + OldCommitSHA: oldCommitSHA, }) repo.ChangedFiles, err = ctlr.DiffNameOnly(repo, oldBranch, newBranch) if err != nil { diff --git a/.ci/magician/cmd/generate_comment_test.go b/.ci/magician/cmd/generate_comment_test.go index 25d91879db76..8b08fb59a3b9 100644 --- a/.ci/magician/cmd/generate_comment_test.go +++ b/.ci/magician/cmd/generate_comment_test.go @@ -16,6 +16,7 @@ package cmd import ( + "fmt" "os" "reflect" "testing" @@ -38,6 +39,22 @@ func TestExecGenerateComment(t *testing.T) { "GOPATH": os.Getenv("GOPATH"), "HOME": os.Getenv("HOME"), } + for _, repo := range []string{ + "terraform-provider-google", + "terraform-provider-google-beta", + "terraform-google-conversion", + } { + variablePathOld := fmt.Sprintf("/workspace/commitSHA_modular-magician_%s-old.txt", repo) + variablePath := fmt.Sprintf("/workspace/commitSHA_modular-magician_%s.txt", repo) + err := mr.WriteFile(variablePathOld, "1a2a3a4a") + if err != nil { + t.Errorf("Error writing file: %s", err) + } + err = mr.WriteFile(variablePath, "1a2a3a4b") + if err != nil { + t.Errorf("Error writing file: %s", err) + } + } execGenerateComment( 123456, "*******", @@ -115,7 +132,7 @@ func TestExecGenerateComment(t *testing.T) { {"123456", "terraform-provider-breaking-change-test", "success", "https://console.cloud.google.com/cloud-build/builds;region=global/build1;step=17?project=project1", "sha1"}, {"123456", "terraform-provider-missing-service-labels", "success", "https://console.cloud.google.com/cloud-build/builds;region=global/build1;step=17?project=project1", "sha1"}, }, - "PostComment": {{"123456", "Hi there, I'm the Modular magician. I've detected the following information about your changes:\n\n## Diff report\n\nYour PR generated some diffs in downstreams - here they are.\n\n`google` provider: [Diff](https://github.com/modular-magician/terraform-provider-google/compare/auto-pr-123456-old..auto-pr-123456) ( 2 files changed, 40 insertions(+))\n`google-beta` provider: [Diff](https://github.com/modular-magician/terraform-provider-google-beta/compare/auto-pr-123456-old..auto-pr-123456) ( 2 files changed, 40 insertions(+))\n`terraform-google-conversion`: [Diff](https://github.com/modular-magician/terraform-google-conversion/compare/auto-pr-123456-old..auto-pr-123456) ( 1 file changed, 10 insertions(+))\n\n\n\n## Missing test report\nYour PR includes resource fields which are not covered by any test.\n\nResource: `google_folder_access_approval_settings` (3 total tests)\nPlease add an acceptance test which includes these fields. The test should include the following:\n\n```hcl\nresource \"google_folder_access_approval_settings\" \"primary\" {\n uncovered_field = # value needed\n}\n\n```\n\n\n"}}, + "PostComment": {{"123456", "Hi there, I'm the Modular magician. I've detected the following information about your changes:\n\n## Diff report\n\nYour PR generated some diffs in downstreams - here they are.\n\n`google` provider: [Diff](https://github.com/modular-magician/terraform-provider-google/compare/1a2a3a4a..1a2a3a4b) ( 2 files changed, 40 insertions(+))\n`google-beta` provider: [Diff](https://github.com/modular-magician/terraform-provider-google-beta/compare/1a2a3a4a..1a2a3a4b) ( 2 files changed, 40 insertions(+))\n`terraform-google-conversion`: [Diff](https://github.com/modular-magician/terraform-google-conversion/compare/1a2a3a4a..1a2a3a4b) ( 1 file changed, 10 insertions(+))\n\n\n\n## Missing test report\nYour PR includes resource fields which are not covered by any test.\n\nResource: `google_folder_access_approval_settings` (3 total tests)\nPlease add an acceptance test which includes these fields. The test should include the following:\n\n```hcl\nresource \"google_folder_access_approval_settings\" \"primary\" {\n uncovered_field = # value needed\n}\n\n```\n\n\n"}}, "AddLabels": {{"123456", []string{"service/alloydb"}}}, } { if actualCalls, ok := gh.calledMethods[method]; !ok { @@ -170,24 +187,27 @@ func TestFormatDiffComment(t *testing.T) { }, "diffs are displayed": { data: diffCommentData{ - PrNumber: 1234567890, Diffs: []Diff{ { - Title: "Repo 1", - Repo: "repo-1", - ShortStat: "+1 added, -1 removed", + Title: "Repo 1", + Repo: "repo-1", + ShortStat: "+1 added, -1 removed", + CommitSHA: "1a2a3a4b", + OldCommitSHA: "1a2a3a4a", }, { - Title: "Repo 2", - Repo: "repo-2", - ShortStat: "+2 added, -2 removed", + Title: "Repo 2", + Repo: "repo-2", + ShortStat: "+2 added, -2 removed", + CommitSHA: "1a2a3a4d", + OldCommitSHA: "1a2a3a4c", }, }, }, expectedStrings: []string{ "## Diff report", "generated some diffs", - "Repo 1: [Diff](https://github.com/modular-magician/repo-1/compare/auto-pr-1234567890-old..auto-pr-1234567890) (+1 added, -1 removed)\nRepo 2: [Diff](https://github.com/modular-magician/repo-2/compare/auto-pr-1234567890-old..auto-pr-1234567890) (+2 added, -2 removed)", + "Repo 1: [Diff](https://github.com/modular-magician/repo-1/compare/1a2a3a4a..1a2a3a4b) (+1 added, -1 removed)\nRepo 2: [Diff](https://github.com/modular-magician/repo-2/compare/1a2a3a4c..1a2a3a4d) (+2 added, -2 removed)", }, notExpectedStrings: []string{ "hasn't generated any diffs", diff --git a/.ci/magician/cmd/generate_downstream.go b/.ci/magician/cmd/generate_downstream.go index 402bf704e2dc..9f8713b1a6ed 100644 --- a/.ci/magician/cmd/generate_downstream.go +++ b/.ci/magician/cmd/generate_downstream.go @@ -339,7 +339,9 @@ func createCommit(scratchRepo *source.Repo, commitMessage string, rnr ExecRunner } if _, err := rnr.Run("git", []string{"commit", "--signoff", "-m", commitMessage}, nil); err != nil { - return "", err + if !strings.Contains(err.Error(), "nothing to commit") { + return "", err + } } commitSha, err := rnr.Run("git", []string{"rev-parse", "HEAD"}, nil) @@ -352,8 +354,13 @@ func createCommit(scratchRepo *source.Repo, commitMessage string, rnr ExecRunner // auto-pr's use commitSHA_modular-magician__.txt file to communicate commmit hash // across cloudbuild steps. Used in test-tpg to execute unit tests for the HEAD commit - if strings.HasPrefix(scratchRepo.Branch, "auto-pr-") && !strings.HasSuffix(scratchRepo.Branch, "-old") { - variablePath := fmt.Sprintf("/workspace/commitSHA_modular-magician_%s.txt", scratchRepo.Name) + if strings.HasPrefix(scratchRepo.Branch, "auto-pr-") { + var variablePath string + if strings.HasSuffix(scratchRepo.Branch, "-old") { + variablePath = fmt.Sprintf("/workspace/commitSHA_modular-magician_%s-old.txt", scratchRepo.Name) + } else { + variablePath = fmt.Sprintf("/workspace/commitSHA_modular-magician_%s.txt", scratchRepo.Name) + } fmt.Println("variablePath: ", variablePath) err = rnr.WriteFile(variablePath, commitSha) if err != nil { diff --git a/.ci/magician/cmd/mock_runner_test.go b/.ci/magician/cmd/mock_runner_test.go index 79bc206f2c43..742ca0617dba 100644 --- a/.ci/magician/cmd/mock_runner_test.go +++ b/.ci/magician/cmd/mock_runner_test.go @@ -41,6 +41,7 @@ type mockRunner struct { cwd string dirStack *list.List notifyError bool + fileContents map[string]string } func sortedEnvString(env map[string]string) string { @@ -107,10 +108,14 @@ func (mr *mockRunner) Walk(root string, fn filepath.WalkFunc) error { } func (mr *mockRunner) ReadFile(name string) (string, error) { - return "", nil + return mr.fileContents[name], nil } func (mr *mockRunner) WriteFile(name, data string) error { + if mr.fileContents == nil { + mr.fileContents = make(map[string]string) + } + mr.fileContents[name] = data return nil } diff --git a/.ci/magician/cmd/templates/DIFF_COMMENT.md.tmpl b/.ci/magician/cmd/templates/DIFF_COMMENT.md.tmpl index f50fdee4626a..7f22c8073e3a 100644 --- a/.ci/magician/cmd/templates/DIFF_COMMENT.md.tmpl +++ b/.ci/magician/cmd/templates/DIFF_COMMENT.md.tmpl @@ -7,7 +7,7 @@ Your PR hasn't generated any diffs, but I'll let you know if a future commit doe Your PR generated some diffs in downstreams - here they are. {{range .Diffs -}} -{{.Title}}: [Diff](https://github.com/modular-magician/{{.Repo}}/compare/auto-pr-{{$.PrNumber}}-old..auto-pr-{{$.PrNumber}}) ({{.ShortStat}}) +{{.Title}}: [Diff](https://github.com/modular-magician/{{.Repo}}/compare/{{.OldCommitSHA}}..{{.CommitSHA}}) ({{.ShortStat}}) {{end -}} {{end -}} From 19dd5ee51363d6b405c972f8122dfea54fe4c4a6 Mon Sep 17 00:00:00 2001 From: Thomas Rodgers Date: Mon, 28 Apr 2025 17:59:02 -0700 Subject: [PATCH 026/884] Revert "Use commit shas instead of branch names in diff comment" (#13812) --- .ci/magician/cmd/generate_comment.go | 33 +++++----------- .ci/magician/cmd/generate_comment_test.go | 38 +++++-------------- .ci/magician/cmd/generate_downstream.go | 13 ++----- .ci/magician/cmd/mock_runner_test.go | 7 +--- .../cmd/templates/DIFF_COMMENT.md.tmpl | 2 +- 5 files changed, 24 insertions(+), 69 deletions(-) diff --git a/.ci/magician/cmd/generate_comment.go b/.ci/magician/cmd/generate_comment.go index ac689fdc8db7..9c526cc51e1b 100644 --- a/.ci/magician/cmd/generate_comment.go +++ b/.ci/magician/cmd/generate_comment.go @@ -45,11 +45,9 @@ var ( ) type Diff struct { - Title string - Repo string - ShortStat string - CommitSHA string - OldCommitSHA string + Title string + Repo string + ShortStat string } type BreakingChange struct { @@ -79,6 +77,7 @@ type Errors struct { } type diffCommentData struct { + PrNumber int Diffs []Diff BreakingChanges []BreakingChange MissingServiceLabels []string @@ -215,7 +214,9 @@ func execGenerateComment(prNumber int, ghTokenMagicModules, buildId, buildStep, } // Initialize repos - data := diffCommentData{} + data := diffCommentData{ + PrNumber: prNumber, + } for _, repo := range []*source.Repo{&tpgRepo, &tpgbRepo, &tgcRepo, &tfoicsRepo} { errors[repo.Title] = []string{} repo.Branch = newBranch @@ -261,24 +262,10 @@ func execGenerateComment(prNumber int, ghTokenMagicModules, buildId, buildStep, errors[repo.Title] = append(errors[repo.Title], "Failed to compute repo diff shortstats") } if shortStat != "" { - variablePath := fmt.Sprintf("/workspace/commitSHA_modular-magician_%s.txt", repo.Name) - oldVariablePath := fmt.Sprintf("/workspace/commitSHA_modular-magician_%s-old.txt", repo.Name) - commitSHA, err := rnr.ReadFile(variablePath) - if err != nil { - errors[repo.Title] = append(errors[repo.Title], "Failed to read commit sha from file") - continue - } - oldCommitSHA, err := rnr.ReadFile(oldVariablePath) - if err != nil { - errors[repo.Title] = append(errors[repo.Title], "Failed to read old commit sha from file") - continue - } diffs = append(diffs, Diff{ - Title: repo.Title, - Repo: repo.Name, - ShortStat: shortStat, - CommitSHA: commitSHA, - OldCommitSHA: oldCommitSHA, + Title: repo.Title, + Repo: repo.Name, + ShortStat: shortStat, }) repo.ChangedFiles, err = ctlr.DiffNameOnly(repo, oldBranch, newBranch) if err != nil { diff --git a/.ci/magician/cmd/generate_comment_test.go b/.ci/magician/cmd/generate_comment_test.go index 8b08fb59a3b9..25d91879db76 100644 --- a/.ci/magician/cmd/generate_comment_test.go +++ b/.ci/magician/cmd/generate_comment_test.go @@ -16,7 +16,6 @@ package cmd import ( - "fmt" "os" "reflect" "testing" @@ -39,22 +38,6 @@ func TestExecGenerateComment(t *testing.T) { "GOPATH": os.Getenv("GOPATH"), "HOME": os.Getenv("HOME"), } - for _, repo := range []string{ - "terraform-provider-google", - "terraform-provider-google-beta", - "terraform-google-conversion", - } { - variablePathOld := fmt.Sprintf("/workspace/commitSHA_modular-magician_%s-old.txt", repo) - variablePath := fmt.Sprintf("/workspace/commitSHA_modular-magician_%s.txt", repo) - err := mr.WriteFile(variablePathOld, "1a2a3a4a") - if err != nil { - t.Errorf("Error writing file: %s", err) - } - err = mr.WriteFile(variablePath, "1a2a3a4b") - if err != nil { - t.Errorf("Error writing file: %s", err) - } - } execGenerateComment( 123456, "*******", @@ -132,7 +115,7 @@ func TestExecGenerateComment(t *testing.T) { {"123456", "terraform-provider-breaking-change-test", "success", "https://console.cloud.google.com/cloud-build/builds;region=global/build1;step=17?project=project1", "sha1"}, {"123456", "terraform-provider-missing-service-labels", "success", "https://console.cloud.google.com/cloud-build/builds;region=global/build1;step=17?project=project1", "sha1"}, }, - "PostComment": {{"123456", "Hi there, I'm the Modular magician. I've detected the following information about your changes:\n\n## Diff report\n\nYour PR generated some diffs in downstreams - here they are.\n\n`google` provider: [Diff](https://github.com/modular-magician/terraform-provider-google/compare/1a2a3a4a..1a2a3a4b) ( 2 files changed, 40 insertions(+))\n`google-beta` provider: [Diff](https://github.com/modular-magician/terraform-provider-google-beta/compare/1a2a3a4a..1a2a3a4b) ( 2 files changed, 40 insertions(+))\n`terraform-google-conversion`: [Diff](https://github.com/modular-magician/terraform-google-conversion/compare/1a2a3a4a..1a2a3a4b) ( 1 file changed, 10 insertions(+))\n\n\n\n## Missing test report\nYour PR includes resource fields which are not covered by any test.\n\nResource: `google_folder_access_approval_settings` (3 total tests)\nPlease add an acceptance test which includes these fields. The test should include the following:\n\n```hcl\nresource \"google_folder_access_approval_settings\" \"primary\" {\n uncovered_field = # value needed\n}\n\n```\n\n\n"}}, + "PostComment": {{"123456", "Hi there, I'm the Modular magician. I've detected the following information about your changes:\n\n## Diff report\n\nYour PR generated some diffs in downstreams - here they are.\n\n`google` provider: [Diff](https://github.com/modular-magician/terraform-provider-google/compare/auto-pr-123456-old..auto-pr-123456) ( 2 files changed, 40 insertions(+))\n`google-beta` provider: [Diff](https://github.com/modular-magician/terraform-provider-google-beta/compare/auto-pr-123456-old..auto-pr-123456) ( 2 files changed, 40 insertions(+))\n`terraform-google-conversion`: [Diff](https://github.com/modular-magician/terraform-google-conversion/compare/auto-pr-123456-old..auto-pr-123456) ( 1 file changed, 10 insertions(+))\n\n\n\n## Missing test report\nYour PR includes resource fields which are not covered by any test.\n\nResource: `google_folder_access_approval_settings` (3 total tests)\nPlease add an acceptance test which includes these fields. The test should include the following:\n\n```hcl\nresource \"google_folder_access_approval_settings\" \"primary\" {\n uncovered_field = # value needed\n}\n\n```\n\n\n"}}, "AddLabels": {{"123456", []string{"service/alloydb"}}}, } { if actualCalls, ok := gh.calledMethods[method]; !ok { @@ -187,27 +170,24 @@ func TestFormatDiffComment(t *testing.T) { }, "diffs are displayed": { data: diffCommentData{ + PrNumber: 1234567890, Diffs: []Diff{ { - Title: "Repo 1", - Repo: "repo-1", - ShortStat: "+1 added, -1 removed", - CommitSHA: "1a2a3a4b", - OldCommitSHA: "1a2a3a4a", + Title: "Repo 1", + Repo: "repo-1", + ShortStat: "+1 added, -1 removed", }, { - Title: "Repo 2", - Repo: "repo-2", - ShortStat: "+2 added, -2 removed", - CommitSHA: "1a2a3a4d", - OldCommitSHA: "1a2a3a4c", + Title: "Repo 2", + Repo: "repo-2", + ShortStat: "+2 added, -2 removed", }, }, }, expectedStrings: []string{ "## Diff report", "generated some diffs", - "Repo 1: [Diff](https://github.com/modular-magician/repo-1/compare/1a2a3a4a..1a2a3a4b) (+1 added, -1 removed)\nRepo 2: [Diff](https://github.com/modular-magician/repo-2/compare/1a2a3a4c..1a2a3a4d) (+2 added, -2 removed)", + "Repo 1: [Diff](https://github.com/modular-magician/repo-1/compare/auto-pr-1234567890-old..auto-pr-1234567890) (+1 added, -1 removed)\nRepo 2: [Diff](https://github.com/modular-magician/repo-2/compare/auto-pr-1234567890-old..auto-pr-1234567890) (+2 added, -2 removed)", }, notExpectedStrings: []string{ "hasn't generated any diffs", diff --git a/.ci/magician/cmd/generate_downstream.go b/.ci/magician/cmd/generate_downstream.go index 9f8713b1a6ed..402bf704e2dc 100644 --- a/.ci/magician/cmd/generate_downstream.go +++ b/.ci/magician/cmd/generate_downstream.go @@ -339,9 +339,7 @@ func createCommit(scratchRepo *source.Repo, commitMessage string, rnr ExecRunner } if _, err := rnr.Run("git", []string{"commit", "--signoff", "-m", commitMessage}, nil); err != nil { - if !strings.Contains(err.Error(), "nothing to commit") { - return "", err - } + return "", err } commitSha, err := rnr.Run("git", []string{"rev-parse", "HEAD"}, nil) @@ -354,13 +352,8 @@ func createCommit(scratchRepo *source.Repo, commitMessage string, rnr ExecRunner // auto-pr's use commitSHA_modular-magician__.txt file to communicate commmit hash // across cloudbuild steps. Used in test-tpg to execute unit tests for the HEAD commit - if strings.HasPrefix(scratchRepo.Branch, "auto-pr-") { - var variablePath string - if strings.HasSuffix(scratchRepo.Branch, "-old") { - variablePath = fmt.Sprintf("/workspace/commitSHA_modular-magician_%s-old.txt", scratchRepo.Name) - } else { - variablePath = fmt.Sprintf("/workspace/commitSHA_modular-magician_%s.txt", scratchRepo.Name) - } + if strings.HasPrefix(scratchRepo.Branch, "auto-pr-") && !strings.HasSuffix(scratchRepo.Branch, "-old") { + variablePath := fmt.Sprintf("/workspace/commitSHA_modular-magician_%s.txt", scratchRepo.Name) fmt.Println("variablePath: ", variablePath) err = rnr.WriteFile(variablePath, commitSha) if err != nil { diff --git a/.ci/magician/cmd/mock_runner_test.go b/.ci/magician/cmd/mock_runner_test.go index 742ca0617dba..79bc206f2c43 100644 --- a/.ci/magician/cmd/mock_runner_test.go +++ b/.ci/magician/cmd/mock_runner_test.go @@ -41,7 +41,6 @@ type mockRunner struct { cwd string dirStack *list.List notifyError bool - fileContents map[string]string } func sortedEnvString(env map[string]string) string { @@ -108,14 +107,10 @@ func (mr *mockRunner) Walk(root string, fn filepath.WalkFunc) error { } func (mr *mockRunner) ReadFile(name string) (string, error) { - return mr.fileContents[name], nil + return "", nil } func (mr *mockRunner) WriteFile(name, data string) error { - if mr.fileContents == nil { - mr.fileContents = make(map[string]string) - } - mr.fileContents[name] = data return nil } diff --git a/.ci/magician/cmd/templates/DIFF_COMMENT.md.tmpl b/.ci/magician/cmd/templates/DIFF_COMMENT.md.tmpl index 7f22c8073e3a..f50fdee4626a 100644 --- a/.ci/magician/cmd/templates/DIFF_COMMENT.md.tmpl +++ b/.ci/magician/cmd/templates/DIFF_COMMENT.md.tmpl @@ -7,7 +7,7 @@ Your PR hasn't generated any diffs, but I'll let you know if a future commit doe Your PR generated some diffs in downstreams - here they are. {{range .Diffs -}} -{{.Title}}: [Diff](https://github.com/modular-magician/{{.Repo}}/compare/{{.OldCommitSHA}}..{{.CommitSHA}}) ({{.ShortStat}}) +{{.Title}}: [Diff](https://github.com/modular-magician/{{.Repo}}/compare/auto-pr-{{$.PrNumber}}-old..auto-pr-{{$.PrNumber}}) ({{.ShortStat}}) {{end -}} {{end -}} From 40940ee6638cdc05d78e4f6371763483184734ec Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Mon, 28 Apr 2025 18:23:38 -0700 Subject: [PATCH 027/884] Deprecate google_tpu_node (#13759) --- mmv1/products/tpu/Node.yaml | 9 ++- .../services/tpu/resource_tpu_node_test.go | 58 ------------------- 2 files changed, 8 insertions(+), 59 deletions(-) delete mode 100644 mmv1/third_party/terraform/services/tpu/resource_tpu_node_test.go diff --git a/mmv1/products/tpu/Node.yaml b/mmv1/products/tpu/Node.yaml index 6950ed83085a..6e48ab9ff558 100644 --- a/mmv1/products/tpu/Node.yaml +++ b/mmv1/products/tpu/Node.yaml @@ -19,7 +19,10 @@ references: guides: 'Official Documentation': 'https://cloud.google.com/tpu/docs/' api: 'https://cloud.google.com/tpu/docs/reference/rest/v1/projects.locations.nodes' -docs: +deprecation_message: >- + `google_tpu_node` is deprecated and will be removed in a future major release. + Use `google_tpu_v2_vm` instead. For moving from TPU Node to TPU VM architecture, see + https://cloud.google.com/tpu/docs/system-architecture-tpu-vm#from-tpu-node-to-tpu-vm. base_url: 'projects/{{project}}/locations/{{zone}}/nodes' self_link: 'projects/{{project}}/locations/{{zone}}/nodes/{{name}}' create_url: 'projects/{{project}}/locations/{{zone}}/nodes?nodeId={{name}}' @@ -48,6 +51,8 @@ examples: primary_resource_id: 'tpu' vars: node_name: 'test-tpu' + # resource is deprecated + exclude_test: true - name: 'tpu_node_full' primary_resource_id: 'tpu' vars: @@ -62,6 +67,8 @@ examples: network_name: 'tpu-node-network' test_vars_overrides: 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "vpc-network-1")' + # resource is deprecated + exclude_test: true exclude_docs: true parameters: # TODO: resourceref? diff --git a/mmv1/third_party/terraform/services/tpu/resource_tpu_node_test.go b/mmv1/third_party/terraform/services/tpu/resource_tpu_node_test.go deleted file mode 100644 index 1b7d5caff451..000000000000 --- a/mmv1/third_party/terraform/services/tpu/resource_tpu_node_test.go +++ /dev/null @@ -1,58 +0,0 @@ -package tpu_test - -import ( - "testing" - - "fmt" - - "github.com/hashicorp/terraform-provider-google/google/acctest" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" -) - -func TestAccTPUNode_tpuNodeBUpdateTensorFlowVersion(t *testing.T) { - t.Parallel() - - nodeId := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckTPUNodeDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccTpuNode_tpuNodeTensorFlow(nodeId, 0), - }, - { - ResourceName: "google_tpu_node.tpu", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"zone"}, - }, - { - Config: testAccTpuNode_tpuNodeTensorFlow(nodeId, 1), - }, - { - ResourceName: "google_tpu_node.tpu", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"zone"}, - }, - }, - }) -} - -func testAccTpuNode_tpuNodeTensorFlow(nodeId string, versionIdx int) string { - return fmt.Sprintf(` -data "google_tpu_tensorflow_versions" "available" { -} - -resource "google_tpu_node" "tpu" { - name = "%s" - zone = "us-central1-b" - - accelerator_type = "v3-8" - tensorflow_version = data.google_tpu_tensorflow_versions.available.versions[%d] -} -`, nodeId, versionIdx) -} From b73f473216f61eb569cef7fe8114ffaf5182bda4 Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Mon, 28 Apr 2025 18:23:44 -0700 Subject: [PATCH 028/884] Added instance-region to Workbench Instance metadata DFS (#13796) --- mmv1/templates/terraform/constants/workbench_instance.go.tmpl | 1 + 1 file changed, 1 insertion(+) diff --git a/mmv1/templates/terraform/constants/workbench_instance.go.tmpl b/mmv1/templates/terraform/constants/workbench_instance.go.tmpl index c7356aa02072..51072aef19bd 100644 --- a/mmv1/templates/terraform/constants/workbench_instance.go.tmpl +++ b/mmv1/templates/terraform/constants/workbench_instance.go.tmpl @@ -50,6 +50,7 @@ var WorkbenchInstanceProvidedMetadata = []string{ "install-monitoring-agent", "install-nvidia-driver", "installed-extensions", + "instance-region", "last_updated_diagnostics", "notebooks-api", "notebooks-api-version", From 2366b25a9f01fbe7ea09b5f5706e479b8ac2aa94 Mon Sep 17 00:00:00 2001 From: paridhishah18 <166548459+paridhishah18@users.noreply.github.com> Date: Mon, 28 Apr 2025 18:57:58 -0700 Subject: [PATCH 029/884] Add manual scaling and service min instances for cloudrun services. (#13761) --- mmv1/products/cloudrun/Service.yaml | 3 + .../resource_cloud_run_service_test.go.tmpl | 171 ++++++++++++++++++ 2 files changed, 174 insertions(+) diff --git a/mmv1/products/cloudrun/Service.yaml b/mmv1/products/cloudrun/Service.yaml index e54ab3db759e..6f2ef4e9f2b3 100644 --- a/mmv1/products/cloudrun/Service.yaml +++ b/mmv1/products/cloudrun/Service.yaml @@ -1106,3 +1106,6 @@ properties: for the Service. For example, `"run.googleapis.com/ingress" = "all"`. - `run.googleapis.com/launch-stage` sets the [launch stage](https://cloud.google.com/run/docs/troubleshooting#launch-stage-validation) when a preview feature is used. For example, `"run.googleapis.com/launch-stage": "BETA"` + - `run.googleapis.com/minScale` sets the [minimum number of container instances](https://cloud.google.com/sdk/gcloud/reference/run/deploy#--min) of the Service. + - `run.googleapis.com/scalingMode` sets the type of scaling mode for the service. The supported values for scaling mode are "manual" and "automatic". If not provided, it defaults to "automatic". + - `run.googleapis.com/manualInstanceCount` sets the total instance count for the service in manual scaling mode. This number of instances is divided among all revisions with specified traffic based on the percent of traffic they are receiving. diff --git a/mmv1/third_party/terraform/services/cloudrun/resource_cloud_run_service_test.go.tmpl b/mmv1/third_party/terraform/services/cloudrun/resource_cloud_run_service_test.go.tmpl index 4ba8b05e14da..e2370c664c30 100644 --- a/mmv1/third_party/terraform/services/cloudrun/resource_cloud_run_service_test.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudrun/resource_cloud_run_service_test.go.tmpl @@ -1594,6 +1594,177 @@ resource "google_cloud_run_service" "default" { `, name, project) } +func TestAccCloudRunService_cloudRunServiceWithMinInstance(t *testing.T) { + acctest.SkipIfVcr(t) + t.Parallel() + + project := envvar.GetTestProjectFromEnv() + name := "tftest-cloudrun-" + acctest.RandString(t, 6) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccCloudRunService_cloudRunServiceWithMinInstance(name, project), + }, + { + ResourceName: "google_cloud_run_service.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"metadata.0.resource_version", "metadata.0.annotations", "metadata.0.labels", "metadata.0.terraform_labels", "status.0.conditions"}, + }, + { + Config: testAccCloudRunService_cloudRunServiceUpdateWithMinInstance(name, project,), + }, + { + ResourceName: "google_cloud_run_service.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"metadata.0.resource_version", "metadata.0.annotations", "metadata.0.labels", "metadata.0.terraform_labels", "status.0.conditions"}, + }, + }, + }) + } + +func testAccCloudRunService_cloudRunServiceWithMinInstance(name, project string) string { + return fmt.Sprintf(` +resource "google_cloud_run_service" "default" { + name = "%s" + location = "us-central1" + + metadata { + namespace = "%s" + annotations = { + generated-by = "magic-modules" + "run.googleapis.com/minScale": "2" + } + } + + template { + spec { + containers { + image = "gcr.io/cloudrun/hello" + } + } + } +} +`, name, project) +} + +func testAccCloudRunService_cloudRunServiceUpdateWithMinInstance(name, project string) string { + return fmt.Sprintf(` +resource "google_cloud_run_service" "default" { + name = "%s" + location = "us-central1" + + metadata { + namespace = "%s" + annotations = { + generated-by = "magic-modules" + "run.googleapis.com/minScale": "5" + } + } + + template { + spec { + containers { + image = "gcr.io/cloudrun/hello" + } + } + } +} +`, name, project) +} + +func TestAccCloudRunService_cloudRunServiceWithManualScaling(t *testing.T) { + acctest.SkipIfVcr(t) + t.Parallel() + + project := envvar.GetTestProjectFromEnv() + name := "tftest-cloudrun-" + acctest.RandString(t, 6) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccCloudRunService_cloudRunServiceWithManualScaling(name, project), + }, + { + ResourceName: "google_cloud_run_service.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"metadata.0.resource_version", "metadata.0.annotations", "metadata.0.labels", "metadata.0.terraform_labels", "status.0.conditions"}, + }, + { + Config: testAccCloudRunService_cloudRunServiceUpdateWithManualScaling(name, project,), + }, + { + ResourceName: "google_cloud_run_service.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"metadata.0.resource_version", "metadata.0.annotations", "metadata.0.labels", "metadata.0.terraform_labels", "status.0.conditions"}, + }, + }, + }) + } + +func testAccCloudRunService_cloudRunServiceWithManualScaling(name, project string) string { + return fmt.Sprintf(` +resource "google_cloud_run_service" "default" { + name = "%s" + location = "us-central1" + + metadata { + namespace = "%s" + annotations = { + generated-by = "magic-modules" + "run.googleapis.com/launch-stage": "BETA" + "run.googleapis.com/scalingMode": "manual" + "run.googleapis.com/manualInstanceCount": "2" + } + } + + template { + spec { + containers { + image = "gcr.io/cloudrun/hello" + } + } + } +} +`, name, project) +} + +func testAccCloudRunService_cloudRunServiceUpdateWithManualScaling(name, project string) string { + return fmt.Sprintf(` +resource "google_cloud_run_service" "default" { + name = "%s" + location = "us-central1" + + metadata { + namespace = "%s" + annotations = { + generated-by = "magic-modules" + "run.googleapis.com/launch-stage": "BETA" + "run.googleapis.com/scalingMode": "manual" + "run.googleapis.com/manualInstanceCount": "5" + + } + } + + template { + spec { + containers { + image = "gcr.io/cloudrun/hello" + } + } + } +} +`, name, project) +} + {{ if ne $.TargetVersionName `ga` -}} func TestAccCloudRunService_cloudRunServiceIap_update(t *testing.T) { t.Parallel() From 37b32b002dc492ca4c29871987b2a04b9b2ed384 Mon Sep 17 00:00:00 2001 From: William Yardley Date: Mon, 28 Apr 2025 19:39:48 -0700 Subject: [PATCH 030/884] compute: Documented empty string for `load_balancing_scheme` (#13489) --- mmv1/products/compute/ForwardingRule.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/mmv1/products/compute/ForwardingRule.yaml b/mmv1/products/compute/ForwardingRule.yaml index 114d2a667bcb..7975ee959dc8 100644 --- a/mmv1/products/compute/ForwardingRule.yaml +++ b/mmv1/products/compute/ForwardingRule.yaml @@ -370,6 +370,10 @@ properties: description: | Specifies the forwarding rule type. + Note that an empty string value (`""`) is also supported for some use + cases, for example PSC (private service connection) regional forwarding + rules. + For more information about forwarding rules, refer to [Forwarding rule concepts](https://cloud.google.com/load-balancing/docs/forwarding-rule-concepts). default_value: "EXTERNAL" From 7296a6adea504e8d8acc67dbad1e6d2af5ddf846 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Tue, 29 Apr 2025 09:21:33 -0700 Subject: [PATCH 031/884] Removed iap brand set_computed_name post_create (#13632) --- mmv1/products/iap/Brand.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/mmv1/products/iap/Brand.yaml b/mmv1/products/iap/Brand.yaml index 15fc48816b79..70631bc128c0 100644 --- a/mmv1/products/iap/Brand.yaml +++ b/mmv1/products/iap/Brand.yaml @@ -50,7 +50,6 @@ async: identity: - name custom_code: - post_create: 'templates/terraform/post_create/set_computed_name.tmpl' custom_import: 'templates/terraform/custom_import/iap_brand.go.tmpl' examples: - name: 'iap_brand' From d99125cacdb50598f447dc8e11244c6c542bf2cb Mon Sep 17 00:00:00 2001 From: shantstepanian <17996546+shantstepanian@users.noreply.github.com> Date: Tue, 29 Apr 2025 13:41:03 -0400 Subject: [PATCH 032/884] Bigtable: add NodeScalingFactor support for Clusters (#13391) --- .../bigtable/resource_bigtable_instance.go | 50 +++++++--- ...esource_bigtable_instance_internal_test.go | 28 +++--- .../resource_bigtable_instance_test.go | 95 +++++++++++++++++++ .../docs/r/bigtable_instance.html.markdown | 2 + 4 files changed, 151 insertions(+), 24 deletions(-) diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance.go index c3afbf79c873..c890e2f3127b 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance.go @@ -152,6 +152,14 @@ func ResourceBigtableInstance() *schema.Resource { Computed: true, Description: `The state of the cluster`, }, + "node_scaling_factor": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Default: "NodeScalingFactor1X", + ValidateFunc: validation.StringInSlice([]string{"NodeScalingFactor1X", "NodeScalingFactor2X"}, false), + Description: `The node scaling factor of this cluster. One of "NodeScalingFactor1X" or "NodeScalingFactor2X". Defaults to "NodeScalingFactor1X".`, + }, }, }, }, @@ -521,13 +529,22 @@ func flattenBigtableCluster(c *bigtable.ClusterInfo) map[string]interface{} { storageType = "HDD" } + var nodeScalingFactor string + switch c.NodeScalingFactor { + case bigtable.NodeScalingFactor1X: + nodeScalingFactor = "NodeScalingFactor1X" + case bigtable.NodeScalingFactor2X: + nodeScalingFactor = "NodeScalingFactor2X" + } + cluster := map[string]interface{}{ - "zone": c.Zone, - "num_nodes": c.ServeNodes, - "cluster_id": c.Name, - "storage_type": storageType, - "kms_key_name": c.KMSKeyName, - "state": c.State, + "zone": c.Zone, + "num_nodes": c.ServeNodes, + "cluster_id": c.Name, + "storage_type": storageType, + "kms_key_name": c.KMSKeyName, + "state": c.State, + "node_scaling_factor": nodeScalingFactor, } if c.AutoscalingConfig != nil { cluster["autoscaling_config"] = make([]map[string]interface{}, 1) @@ -610,12 +627,21 @@ func expandBigtableClusters(clusters []interface{}, instanceID string, config *t storageType = bigtable.HDD } + var nodeScalingFactor bigtable.NodeScalingFactor + switch cluster["node_scaling_factor"].(string) { + case "NodeScalingFactor1X": + nodeScalingFactor = bigtable.NodeScalingFactor1X + case "NodeScalingFactor2X": + nodeScalingFactor = bigtable.NodeScalingFactor2X + } + cluster_config := bigtable.ClusterConfig{ - InstanceID: instanceID, - Zone: zone, - ClusterID: cluster["cluster_id"].(string), - StorageType: storageType, - KMSKeyName: cluster["kms_key_name"].(string), + InstanceID: instanceID, + Zone: zone, + ClusterID: cluster["cluster_id"].(string), + StorageType: storageType, + KMSKeyName: cluster["kms_key_name"].(string), + NodeScalingFactor: nodeScalingFactor, } autoscaling_configs := cluster["autoscaling_config"].([]interface{}) if len(autoscaling_configs) > 0 { @@ -754,7 +780,7 @@ func resourceBigtableInstanceClusterReorderTypeListFunc(diff tpgresource.Terrafo return err } - // Clusters can't have their zone, storage_type or kms_key_name updated, + // Clusters can't have their zone, storage_type, kms_key_name, or node_scaling_factor updated, // ForceNew if it's changed. This will show a diff with the old state on // the left side and the unmodified new state on the right and the ForceNew // attributed to the _old state index_ even if the diff appears to have moved. diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance_internal_test.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance_internal_test.go index 7490932c314b..b692d4aa11e8 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance_internal_test.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance_internal_test.go @@ -167,24 +167,28 @@ func TestUnitBigtable_flattenBigtableCluster(t *testing.T) { "storage_target": 60, }, }, + // unspecified node scaling factor in input will lead to an empty string here + "node_scaling_factor": "", }, }, "HDD manual scaling": { clusterInfo: &bigtable.ClusterInfo{ - StorageType: bigtable.HDD, - Zone: "zone2", - ServeNodes: 7, - Name: "hdd-cluster", - KMSKeyName: "KMS", - State: "READY", + StorageType: bigtable.HDD, + Zone: "zone2", + ServeNodes: 7, + Name: "hdd-cluster", + KMSKeyName: "KMS", + State: "READY", + NodeScalingFactor: bigtable.NodeScalingFactor2X, }, want: map[string]interface{}{ - "zone": "zone2", - "num_nodes": 7, - "cluster_id": "hdd-cluster", - "storage_type": "HDD", - "kms_key_name": "KMS", - "state": "READY", + "zone": "zone2", + "num_nodes": 7, + "cluster_id": "hdd-cluster", + "storage_type": "HDD", + "kms_key_name": "KMS", + "state": "READY", + "node_scaling_factor": "NodeScalingFactor2X", }, }, } diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance_test.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance_test.go index 52aef858126e..ecc9023c9e45 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance_test.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance_test.go @@ -546,6 +546,101 @@ func TestAccBigtableInstance_forceDestroyBackups(t *testing.T) { }) } +func TestAccBigtableInstance_createWithNodeScalingFactorDefault(t *testing.T) { + // bigtable instance does not use the shared HTTP client, this test creates an instance + acctest.SkipIfVcr(t) + t.Parallel() + + instanceName := fmt.Sprintf("tf-test-nsf-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckBigtableInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + // Create config with nothing specified for node scaling factor. + // Ensure that we get 1X back. + Config: testAccBigtableInstance_nodeScalingFactor_allowDestroy(instanceName, 2, ""), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_bigtable_instance.instance", "cluster.0.num_nodes", "2"), + resource.TestCheckResourceAttr("google_bigtable_instance.instance", "cluster.0.node_scaling_factor", "NodeScalingFactor1X"), + ), + }, + { + ResourceName: "google_bigtable_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection", "instance_type"}, // we don't read instance type back + }, + }, + }) +} + +func TestAccBigtableInstance_createWithNodeScalingFactorThenUpdateViaForceNew(t *testing.T) { + // bigtable instance does not use the shared HTTP client, this test creates an instance + acctest.SkipIfVcr(t) + t.Parallel() + + instanceName := fmt.Sprintf("tf-test-nsf-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckBigtableInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + // Create config with node scaling factor as 2x. + Config: testAccBigtableInstance_nodeScalingFactor_allowDestroy(instanceName, 2, "NodeScalingFactor2X"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_bigtable_instance.instance", "cluster.0.num_nodes", "2"), + resource.TestCheckResourceAttr("google_bigtable_instance.instance", "cluster.0.node_scaling_factor", "NodeScalingFactor2X"), + ), + }, + { + ResourceName: "google_bigtable_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection", "instance_type"}, // we don't read instance type back + }, + { + // Updating the node scaling factor only possible without delete protection, as we need ForceNew + Config: testAccBigtableInstance_nodeScalingFactor_allowDestroy(instanceName, 2, "NodeScalingFactor1X"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_bigtable_instance.instance", "cluster.0.num_nodes", "2"), + resource.TestCheckResourceAttr("google_bigtable_instance.instance", "cluster.0.node_scaling_factor", "NodeScalingFactor1X"), + ), + }, + { + ResourceName: "google_bigtable_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection", "instance_type"}, // we don't read instance type back + }, + }, + }) +} + +func testAccBigtableInstance_nodeScalingFactor_allowDestroy(instanceName string, numNodes int, nodeScalingFactor string) string { + nodeScalingFactorAttribute := "" + if nodeScalingFactor != "" { + nodeScalingFactorAttribute = fmt.Sprintf("node_scaling_factor = \"%s\"", nodeScalingFactor) + } + return fmt.Sprintf(` +resource "google_bigtable_instance" "instance" { + name = "%s" + cluster { + cluster_id = "%s" + zone = "us-central1-b" + num_nodes = %d + storage_type = "SSD" + %s + } + deletion_protection = false +} +`, instanceName, instanceName, numNodes, nodeScalingFactorAttribute) +} + func testAccBigtableInstance_multipleClustersSameID(instanceName string) string { return fmt.Sprintf(` resource "google_bigtable_instance" "instance" { diff --git a/mmv1/third_party/terraform/website/docs/r/bigtable_instance.html.markdown b/mmv1/third_party/terraform/website/docs/r/bigtable_instance.html.markdown index c9b952fdefb7..fbe537ba6579 100644 --- a/mmv1/third_party/terraform/website/docs/r/bigtable_instance.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/bigtable_instance.html.markdown @@ -141,6 +141,8 @@ If no value is set, Cloud Bigtable automatically allocates nodes based on your d * `kms_key_name` - (Optional) Describes the Cloud KMS encryption key that will be used to protect the destination Bigtable cluster. The requirements for this key are: 1) The Cloud Bigtable service account associated with the project that contains this cluster must be granted the `cloudkms.cryptoKeyEncrypterDecrypter` role on the CMEK key. 2) Only regional keys can be used and the region of the CMEK key must match the region of the cluster. +* `node_scaling_factor` - (Optional) The node scaling factor for this cluster. One of `"NodeScalingFactor1X"` or `"NodeScalingFactor2X"`. Defaults to `"NodeScalingFactor1X"`. If `"NodeScalingFactor2X"` is specified, then `num_nodes`, `min_nodes`, and `max_nodes` would need to be specified in increments of 2. This value cannot be updated after the cluster is created. + -> **Note**: Removing the field entirely from the config will cause the provider to default to the backend value. !> **Warning:** Modifying the `storage_type`, `zone` or `kms_key_name` of an existing cluster (by From d6e7f2fd773652ffd4f70875ac746a8066b9ad36 Mon Sep 17 00:00:00 2001 From: paridhishah18 <166548459+paridhishah18@users.noreply.github.com> Date: Tue, 29 Apr 2025 11:13:45 -0700 Subject: [PATCH 033/884] Add manual scaling for cloudrunv2 services (#13742) --- mmv1/products/cloudrunv2/Service.yaml | 11 +++ ...resource_cloud_run_v2_service_test.go.tmpl | 82 +++++++++++++++++++ 2 files changed, 93 insertions(+) diff --git a/mmv1/products/cloudrunv2/Service.yaml b/mmv1/products/cloudrunv2/Service.yaml index 5f62e9c8b3db..237903dc5544 100644 --- a/mmv1/products/cloudrunv2/Service.yaml +++ b/mmv1/products/cloudrunv2/Service.yaml @@ -340,6 +340,17 @@ properties: type: Integer description: | Minimum number of instances for the service, to be divided among all revisions receiving traffic. + - name: 'scalingMode' + type: Enum + description: | + The [scaling mode](https://cloud.google.com/run/docs/reference/rest/v2/projects.locations.services#scalingmode) for the service. + enum_values: + - 'AUTOMATIC' + - 'MANUAL' + - name: 'manualInstanceCount' + type: Integer + description: | + Total instance count for the service in manual scaling mode. This number of instances is divided among all revisions with specified traffic based on the percent of traffic they are receiving. - name: 'defaultUriDisabled' type: Boolean description: |- diff --git a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl index 0cf551030454..2291c5b7e5cb 100644 --- a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl @@ -1558,6 +1558,88 @@ resource "google_project_iam_member" "logs_writer" { `, context) } +func TestAccCloudRunV2Service_cloudrunv2ServiceWithManualScaling(t *testing.T) { + t.Parallel() + context := map[string]interface{} { + "random_suffix" : acctest.RandString(t, 10), + } + acctest.VcrTest(t, resource.TestCase { + PreCheck: func() { acctest.AccTestPreCheck(t)}, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckCloudRunV2ServiceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccCloudRunV2Service_cloudrunv2ServiceWithManualScaling(context), + }, + { + ResourceName: "google_cloud_run_v2_service.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "location", "annotations", "labels", "terraform_labels", "launch_stage", "deletion_protection"}, + }, + { + Config: testAccCloudRunV2Service_cloudrunv2ServiceUpdateWithManualScaling(context), + }, + { + ResourceName: "google_cloud_run_v2_service.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "location", "annotations", "labels", "terraform_labels", "launch_stage", "deletion_protection"}, + }, + }, + }) +} + +func testAccCloudRunV2Service_cloudrunv2ServiceWithManualScaling(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_cloud_run_v2_service" "default" { + name = "tf-test-cloudrun-manual-scaling-service%{random_suffix}" + description = "description creating" + location = "us-central1" + deletion_protection = false + annotations = { + generated-by = "magic-modules" + } + ingress = "INGRESS_TRAFFIC_ALL" + launch_stage = "BETA" + scaling { + scaling_mode = "MANUAL" + manual_instance_count = 2 + } + template { + containers { + image = "us-docker.pkg.dev/cloudrun/container/hello" + } + } +} +`, context) +} + +func testAccCloudRunV2Service_cloudrunv2ServiceUpdateWithManualScaling(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_cloud_run_v2_service" "default" { + name = "tf-test-cloudrun-manual-scaling-service%{random_suffix}" + description = "description creating" + location = "us-central1" + deletion_protection = false + annotations = { + generated-by = "magic-modules" + } + ingress = "INGRESS_TRAFFIC_ALL" + launch_stage = "BETA" + scaling { + scaling_mode = "MANUAL" + manual_instance_count = 10 + } + template { + containers { + image = "us-docker.pkg.dev/cloudrun/container/hello" + } + } +} +`, context) +} + {{ if ne $.TargetVersionName `ga` -}} func TestAccCloudRunV2Service_cloudrunv2ServiceIapUpdate(t *testing.T) { t.Parallel() From 1e163ebc111a88b9000a2f99352f3c41ee51d9ab Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Wed, 30 Apr 2025 09:14:43 -0700 Subject: [PATCH 034/884] Removed osconfig guestpolicies set_computed_name post_create (#13639) --- mmv1/products/osconfig/GuestPolicies.yaml | 2 -- 1 file changed, 2 deletions(-) diff --git a/mmv1/products/osconfig/GuestPolicies.yaml b/mmv1/products/osconfig/GuestPolicies.yaml index 1b2119faf71e..e7dc14eeba65 100644 --- a/mmv1/products/osconfig/GuestPolicies.yaml +++ b/mmv1/products/osconfig/GuestPolicies.yaml @@ -38,8 +38,6 @@ timeouts: delete_minutes: 20 identity: - guestPolicyId -custom_code: - post_create: 'templates/terraform/post_create/set_computed_name.tmpl' examples: - name: 'os_config_guest_policies_basic' primary_resource_id: 'guest_policies' From 4d7a8dd3dc60a254201949e60b3267ed7a71f116 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Wed, 30 Apr 2025 09:23:54 -0700 Subject: [PATCH 035/884] Remove can_ip_forward = false in most compute instance tests (#13822) --- .../resource_compute_instance_test.go.tmpl | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl index 4d1eba38721b..5b5ebe1dd96e 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl @@ -5971,7 +5971,6 @@ resource "google_compute_instance" "foobar" { name = "%s" machine_type = "e2-medium" zone = "us-central1-a" - can_ip_forward = false tags = ["foo", "bar"] boot_disk { @@ -6033,7 +6032,6 @@ resource "google_compute_instance" "foobar" { name = "%s" machine_type = "e2-medium" zone = "us-central1-a" - can_ip_forward = false tags = ["foo", "bar"] boot_disk { @@ -6064,7 +6062,6 @@ resource "google_compute_instance" "foobar" { name = "%s" machine_type = "e2-medium" zone = "us-central1-a" - can_ip_forward = false tags = ["foo", "bar"] boot_disk { @@ -6483,7 +6480,6 @@ resource "google_compute_instance" "foobar" { name = "%s" machine_type = "e2-medium" zone = "us-central1-a" - can_ip_forward = false tags = ["foo", "bar"] deletion_protection = false @@ -6511,7 +6507,6 @@ resource "google_compute_instance" "foobar" { name = "%s" machine_type = "e2-medium" zone = "us-central1-a" - can_ip_forward = false tags = ["foo", "bar"] deletion_protection = true @@ -8735,7 +8730,6 @@ resource "google_compute_instance" "foobar" { name = "%s" machine_type = "e2-medium" zone = "us-central1-a" - can_ip_forward = false tags = ["foo", "bar"] //deletion_protection = false is implicit in this config due to default value @@ -9153,7 +9147,6 @@ resource "google_compute_instance" "foobar" { name = "%s" machine_type = "e2-medium" zone = "us-central1-a" - can_ip_forward = false boot_disk { initialize_params { @@ -9884,7 +9877,6 @@ resource "google_compute_instance" "foobar" { name = "%s" machine_type = "e2-medium" zone = "us-central1-a" - can_ip_forward = false tags = ["foo", "bar"] desired_status = "RUNNING" @@ -9921,7 +9913,6 @@ resource "google_compute_instance" "foobar" { name = "%s" machine_type = "e2-medium" zone = "us-central1-a" - can_ip_forward = false tags = ["foo", "bar"] desired_status = "RUNNING" @@ -10088,7 +10079,6 @@ resource "google_compute_instance" "foobar" { name = "%s" machine_type = "%s" zone = "us-central1-a" - can_ip_forward = false tags = ["foo", "bar"] boot_disk { @@ -10150,7 +10140,6 @@ resource "google_compute_instance" "foobar" { name = "%s" machine_type = "e2-medium" zone = "us-central1-a" - can_ip_forward = false tags = ["baz"] boot_disk { @@ -10214,7 +10203,6 @@ resource "google_compute_instance" "foobar" { name = "%s" machine_type = "c2-standard-4" zone = "us-east4-b" - can_ip_forward = false tags = ["foo", "bar"] //deletion_protection = false is implicit in this config due to default value @@ -10242,7 +10230,6 @@ resource "google_compute_instance" "second" { name = "%s-2" machine_type = "c2-standard-4" zone = "us-east4-b" - can_ip_forward = false tags = ["foo", "bar"] //deletion_protection = false is implicit in this config due to default value @@ -10289,7 +10276,6 @@ resource "google_compute_instance" "foobar" { name = "%s" machine_type = "e2-standard-4" zone = "us-east4-b" - can_ip_forward = false tags = ["foo", "bar"] //deletion_protection = false is implicit in this config due to default value @@ -10964,7 +10950,6 @@ resource "google_compute_instance" "foobar" { name = "%s" machine_type = "%s" zone = "us-central1-a" - can_ip_forward = false tags = ["foo", "bar"] boot_disk { @@ -10997,7 +10982,6 @@ resource "google_compute_instance" "foobar" { name = "%s" machine_type = "%s" zone = "us-central1-a" - can_ip_forward = false tags = ["foo", "bar"] boot_disk { From dc9f4acc70fff3096c98bc59603757607d230cd8 Mon Sep 17 00:00:00 2001 From: Yanwei Guo Date: Wed, 30 Apr 2025 09:28:16 -0700 Subject: [PATCH 036/884] Add an example for `cloud_run_v2_job` resource using multi-container (#13794) --- mmv1/products/cloudrunv2/Job.yaml | 7 +++++++ .../cloudrunv2_job_multicontainer.tf.tmpl | 18 ++++++++++++++++++ 2 files changed, 25 insertions(+) create mode 100644 mmv1/templates/terraform/examples/cloudrunv2_job_multicontainer.tf.tmpl diff --git a/mmv1/products/cloudrunv2/Job.yaml b/mmv1/products/cloudrunv2/Job.yaml index 035c6a12c4b5..d0457dccefa3 100644 --- a/mmv1/products/cloudrunv2/Job.yaml +++ b/mmv1/products/cloudrunv2/Job.yaml @@ -118,6 +118,13 @@ examples: cloud_run_job_name: 'cloudrun-job' ignore_read_extra: - 'deletion_protection' + - name: 'cloudrunv2_job_multicontainer' + primary_resource_id: 'default' + primary_resource_name: 'fmt.Sprintf("tf-test-cloudrun-job%s", context["random_suffix"])' + vars: + cloud_run_job_name: 'cloudrun-job' + ignore_read_extra: + - 'deletion_protection' virtual_fields: - name: 'deletion_protection' description: | diff --git a/mmv1/templates/terraform/examples/cloudrunv2_job_multicontainer.tf.tmpl b/mmv1/templates/terraform/examples/cloudrunv2_job_multicontainer.tf.tmpl new file mode 100644 index 000000000000..7a63cc5cbca3 --- /dev/null +++ b/mmv1/templates/terraform/examples/cloudrunv2_job_multicontainer.tf.tmpl @@ -0,0 +1,18 @@ +resource "google_cloud_run_v2_job" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "cloud_run_job_name"}}" + location = "us-central1" + deletion_protection = false + + template { + template { + containers { + name = "job-1" + image = "us-docker.pkg.dev/cloudrun/container/job" + } + containers { + name = "job-2" + image = "us-docker.pkg.dev/cloudrun/container/job" + } + } + } +} From 1d6b2083cbd6fd388a77e25a4a3933a49b29ae25 Mon Sep 17 00:00:00 2001 From: Scott Suarez Date: Wed, 30 Apr 2025 11:09:43 -0700 Subject: [PATCH 037/884] Implement team membership check (#13824) --- .ci/magician/github/get.go | 25 +++++++++++++++++++++++++ .ci/magician/github/membership.go | 17 ++++++++--------- 2 files changed, 33 insertions(+), 9 deletions(-) diff --git a/.ci/magician/github/get.go b/.ci/magician/github/get.go index 50fe8ce89b64..e39dfe25ddc3 100644 --- a/.ci/magician/github/get.go +++ b/.ci/magician/github/get.go @@ -145,3 +145,28 @@ func (gh *Client) GetTeamMembers(organization, team string) ([]User, error) { } return members, nil } + +func (gh *Client) IsOrgMember(author, org string) bool { + url := fmt.Sprintf("https://api.github.com/orgs/%s/members/%s", org, author) + err := utils.RequestCallWithRetry(url, "GET", gh.token, nil, nil) + return err == nil +} + +func (gh *Client) IsTeamMember(organization, teamSlug, username string) bool { + type TeamMembership struct { + URL string `json:"url"` + Role string `json:"role"` + State string `json:"state"` + } + + url := fmt.Sprintf("https://api.github.com/orgs/%s/teams/%s/memberships/%s", organization, teamSlug, username) + var membership TeamMembership + err := utils.RequestCallWithRetry(url, "GET", gh.token, &membership, nil) + + if err != nil { + return false + } + + // User is considered a member if state is "active" + return membership.State == "active" +} diff --git a/.ci/magician/github/membership.go b/.ci/magician/github/membership.go index a45b42bb30e5..0347745022bf 100644 --- a/.ci/magician/github/membership.go +++ b/.ci/magician/github/membership.go @@ -50,12 +50,18 @@ func (gh *Client) GetUserType(user string) UserType { return CoreContributorUserType } - if isOrgMember(user, "GoogleCloudPlatform", gh.token) { + if gh.IsTeamMember("GoogleCloudPlatform", "terraform", user) { + fmt.Printf("Debug test --- User '%s' is an active member of the 'terraform' team in 'GoogleCloudPlatform' organization\n", user) + } else { + fmt.Printf("Debug test --- User '%s' is not an active member of the 'terraform' team in 'GoogleCloudPlatform' organization\n", user) + } + + if gh.IsOrgMember(user, "GoogleCloudPlatform") { fmt.Println("User is a GCP org member") return GooglerUserType } - if isOrgMember(user, "googlers", gh.token) { + if gh.IsOrgMember(user, "googlers") { fmt.Println("User is a googlers org member") return GooglerUserType } @@ -74,13 +80,6 @@ func IsCoreReviewer(user string) bool { return isCoreReviewer } -func isOrgMember(author, org, githubToken string) bool { - url := fmt.Sprintf("https://api.github.com/orgs/%s/members/%s", org, author) - err := utils.RequestCallWithRetry(url, "GET", githubToken, nil, nil) - - return err == nil -} - // GetRandomReviewer returns a random available reviewer (optionally excluding some people from the reviewer pool) func GetRandomReviewer(excludedReviewers []string) string { availableReviewers := AvailableReviewers(excludedReviewers) From 646734c003c738848e1da7f3048d3c75d28aed27 Mon Sep 17 00:00:00 2001 From: skysarthak Date: Wed, 30 Apr 2025 13:35:05 -0700 Subject: [PATCH 038/884] Register disk provisioning fields in DatabaseInstance metadata (#13779) Co-authored-by: Sarthak Tandon --- .../services/sql/resource_sql_database_instance.go.tmpl | 3 +-- .../services/sql/resource_sql_database_instance_meta.yaml | 2 ++ 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl index 0ee23a7ab748..ed0f45de67ab 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl @@ -419,7 +419,6 @@ is set to true. Defaults to ZONAL.`, "disk_size": { Type: schema.TypeInt, Optional: true, - // Default is likely 10gb, but it is undocumented and may change. Computed: true, Description: `The size of data disk, in GB. Size of a running instance cannot be reduced but can be increased. The minimum value is 10GB for PD_SSD, PD_HDD and 20GB for HYPERDISK_BALANCED.`, }, @@ -429,7 +428,7 @@ is set to true. Defaults to ZONAL.`, Computed: true, ForceNew: true, DiffSuppressFunc: caseDiffDashSuppress, - Description: `The type of supported data disk is tier dependent and can be PD_SSD or PD_HDD or HyperDisk_Balanced `, + Description: `The type of supported data disk is tier dependent and can be PD_SSD or PD_HDD or HYPERDISK_BALANCED.`, }, {{- if ne $.TargetVersionName "ga" }} "data_disk_provisioned_iops": { diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_meta.yaml b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_meta.yaml index 003e20c08d5f..39cca9f8f62b 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_meta.yaml +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_meta.yaml @@ -79,6 +79,8 @@ fields: - field: 'settings.disk_autoresize' - field: 'settings.disk_autoresize_limit' - field: 'settings.disk_size' + - field: 'settings.data_disk_provisioned_iops' + - field: 'settings.data_disk_provisioned_throughput' - field: 'settings.disk_type' - field: 'settings.edition' - field: 'settings.enable_dataplex_integration' From 93285c0e1c3f8df51252d429e55e03d61541fe3b Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Wed, 30 Apr 2025 14:05:29 -0700 Subject: [PATCH 039/884] Fix the bug when validating subnetwork project for compute instance (#13823) --- .../services/compute/resource_compute_instance.go.tmpl | 5 +++-- .../third_party/terraform/tpgresource/self_link_helpers.go | 2 +- .../terraform/tpgresource/self_link_helpers_test.go | 7 ++++--- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl index d85a74e24e3f..9cee3cedf7ea 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl @@ -143,8 +143,9 @@ func ValidateSubnetworkProjectFunc(d tpgresource.TerraformResourceDiff) error { return nil } - if tpgresource.GetProjectFromRegionalSelfLink(subnetwork.(string)) != subnetworkProject.(string) { - return fmt.Errorf("project in subnetwork's self_link %q must match subnetwork_project %q", subnetwork, subnetworkProject) + project := tpgresource.GetProjectFromRegionalSelfLink(subnetwork.(string)) + if project != subnetworkProject.(string) { + return fmt.Errorf("project %s in subnetwork's self_link %q must match subnetwork_project %q", project, subnetwork, subnetworkProject) } } return nil diff --git a/mmv1/third_party/terraform/tpgresource/self_link_helpers.go b/mmv1/third_party/terraform/tpgresource/self_link_helpers.go index 2e1089fb36aa..6891254e3f46 100644 --- a/mmv1/third_party/terraform/tpgresource/self_link_helpers.go +++ b/mmv1/third_party/terraform/tpgresource/self_link_helpers.go @@ -189,7 +189,7 @@ func GetRegionFromRegionalSelfLink(selfLink string) string { } func GetProjectFromRegionalSelfLink(selfLink string) string { - re := regexp.MustCompile("projects/([a-zA-Z0-9-:]*)/(?:locations|regions)/[a-zA-Z0-9-:]*") + re := regexp.MustCompile("projects/([a-zA-Z0-9-:.]*)/(?:locations|regions)/[a-zA-Z0-9-:]*") switch { case re.MatchString(selfLink): if res := re.FindStringSubmatch(selfLink); len(res) == 2 && res[1] != "" { diff --git a/mmv1/third_party/terraform/tpgresource/self_link_helpers_test.go b/mmv1/third_party/terraform/tpgresource/self_link_helpers_test.go index 4dc0c84e0381..df4cedb81bf9 100644 --- a/mmv1/third_party/terraform/tpgresource/self_link_helpers_test.go +++ b/mmv1/third_party/terraform/tpgresource/self_link_helpers_test.go @@ -189,9 +189,10 @@ func TestGetRegionFromRegionalSelfLink(t *testing.T) { func TestGetProjectFromRegionalSelfLink(t *testing.T) { cases := map[string]string{ - "projects/foo/locations/europe-north1/datasets/bar/operations/foobar": "foo", - "projects/REDACTED/regions/europe-north1/subnetworks/tf-test-net-xbwhsmlfm8": "REDACTED", - "projects/REDA:CT-ED09/regions/europe-north1/subnetworks/tf-test-net-xbwhsmlfm8": "REDA:CT-ED09", + "projects/foo/locations/europe-north1/datasets/bar/operations/foobar": "foo", + "projects/REDACTED/regions/europe-north1/subnetworks/tf-test-net-xbwhsmlfm8": "REDACTED", + "projects/REDA:CT-ED09/regions/europe-north1/subnetworks/tf-test-net-xbwhsmlfm8": "REDA:CT-ED09", + "projects/REDA.com:CT-ED09/regions/europe-north1/subnetworks/tf-test-net-xbwhsmlfm8": "REDA.com:CT-ED09", } for input, expected := range cases { if result := GetProjectFromRegionalSelfLink(input); result != expected { From 42dadc6c54dfd6374340528afcc0edb222d8e433 Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Thu, 1 May 2025 00:40:53 +0200 Subject: [PATCH 040/884] feat: Serverless VPC Access Connector update fields without recreating the resource (#12830) Co-authored-by: Scott Suarez --- mmv1/products/vpcaccess/Connector.yaml | 19 +- .../constants/vpc_access_connector.go.tmpl | 8 + .../vpc_access_connector_instances.go.tmpl | 15 ++ .../resource_vpc_access_connector_test.go | 207 ++++++++++++++++++ 4 files changed, 248 insertions(+), 1 deletion(-) create mode 100644 mmv1/templates/terraform/constants/vpc_access_connector.go.tmpl create mode 100644 mmv1/templates/terraform/pre_update/vpc_access_connector_instances.go.tmpl diff --git a/mmv1/products/vpcaccess/Connector.yaml b/mmv1/products/vpcaccess/Connector.yaml index 7a881c119d53..964d5b9a727a 100644 --- a/mmv1/products/vpcaccess/Connector.yaml +++ b/mmv1/products/vpcaccess/Connector.yaml @@ -22,7 +22,9 @@ references: docs: base_url: 'projects/{{project}}/locations/{{region}}/connectors' create_url: 'projects/{{project}}/locations/{{region}}/connectors?connectorId={{name}}' -immutable: true +update_url: 'projects/{{project}}/locations/{{region}}/connectors/{{name}}' +update_verb: 'PATCH' +update_mask: true timeouts: insert_minutes: 20 update_minutes: 20 @@ -39,6 +41,11 @@ custom_code: encoder: 'templates/terraform/encoders/no_send_name.go.tmpl' decoder: 'templates/terraform/decoders/long_name_to_self_link.go.tmpl' post_create: 'templates/terraform/post_create/sleep.go.tmpl' + constants: 'templates/terraform/constants/vpc_access_connector.go.tmpl' + pre_update: 'templates/terraform/pre_update/vpc_access_connector_instances.go.tmpl' +custom_diff: + - 'customdiff.ForceNewIfChange("min_instances", isInstanceShrinkage)' + - 'customdiff.ForceNewIfChange("max_instances", isInstanceShrinkage)' examples: - name: 'vpc_access_connector' primary_resource_id: 'connector' @@ -69,6 +76,7 @@ properties: The name of the resource (Max 25 characters). required: true custom_flatten: 'templates/terraform/custom_flatten/name_from_self_link.tmpl' + immutable: true - name: 'network' type: String description: | @@ -80,12 +88,14 @@ properties: diff_suppress_func: 'tpgresource.CompareResourceNames' custom_flatten: 'templates/terraform/custom_flatten/name_from_self_link.tmpl' custom_expand: 'templates/terraform/custom_expand/resource_from_self_link.go.tmpl' + immutable: true - name: 'ipCidrRange' type: String description: | The range of internal addresses that follows RFC 4632 notation. Example: `10.132.0.0/28`. required_with: - 'network' + immutable: true - name: 'state' type: Enum description: | @@ -97,6 +107,7 @@ properties: - 'DELETING' - 'ERROR' - 'UPDATING' + immutable: true - name: 'machineType' type: String description: | @@ -113,6 +124,7 @@ properties: - min_instances validation: function: 'validation.IntBetween(200, 1000)' + immutable: true - name: 'minInstances' type: Integer description: | @@ -121,6 +133,8 @@ properties: default_from_api: true conflicts: - min_throughput + required_with: + - max_instances - name: 'maxInstances' type: Integer description: | @@ -129,6 +143,8 @@ properties: default_from_api: true conflicts: - max_throughput + required_with: + - min_instances - name: 'maxThroughput' type: Integer description: | @@ -140,6 +156,7 @@ properties: - max_instances validation: function: 'validation.IntBetween(200, 1000)' + immutable: true - name: 'selfLink' type: String description: | diff --git a/mmv1/templates/terraform/constants/vpc_access_connector.go.tmpl b/mmv1/templates/terraform/constants/vpc_access_connector.go.tmpl new file mode 100644 index 000000000000..c8bb1c45beaf --- /dev/null +++ b/mmv1/templates/terraform/constants/vpc_access_connector.go.tmpl @@ -0,0 +1,8 @@ +func isInstanceShrinkage(_ context.Context, old, new, _ interface{}) bool { + // max and min instances can only increase in-place, + // so we must create a new resource if it is decreased. + if old == nil || new == nil { + return false + } + return new.(int) < old.(int) +} \ No newline at end of file diff --git a/mmv1/templates/terraform/pre_update/vpc_access_connector_instances.go.tmpl b/mmv1/templates/terraform/pre_update/vpc_access_connector_instances.go.tmpl new file mode 100644 index 000000000000..89a2fc22ea98 --- /dev/null +++ b/mmv1/templates/terraform/pre_update/vpc_access_connector_instances.go.tmpl @@ -0,0 +1,15 @@ +if d.HasChange("min_instances") && !d.HasChange("max_instances") { + obj["maxInstances"] = d.Get("max_instances").(int) + updateMask = append(updateMask, "maxInstances", "minInstances") +} + +if d.HasChange("max_instances") && !d.HasChange("min_instances") { + obj["minInstances"] = d.Get("min_instances").(int) + updateMask = append(updateMask, "maxInstances", "minInstances") +} + +// Overwrite the previously set mask. +url, err = transport_tpg.AddQueryParams(url, map[string]string{"updateMask": strings.Join(updateMask, ",")}) +if err != nil { + return err +} diff --git a/mmv1/third_party/terraform/services/vpcaccess/resource_vpc_access_connector_test.go b/mmv1/third_party/terraform/services/vpcaccess/resource_vpc_access_connector_test.go index c7ce3ac9ca7c..544d7c21a4e2 100644 --- a/mmv1/third_party/terraform/services/vpcaccess/resource_vpc_access_connector_test.go +++ b/mmv1/third_party/terraform/services/vpcaccess/resource_vpc_access_connector_test.go @@ -1,10 +1,12 @@ package vpcaccess_test import ( + "fmt" "regexp" "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" "github.com/hashicorp/terraform-provider-google/google/acctest" ) @@ -32,6 +34,130 @@ func TestAccVPCAccessConnector_vpcAccessConnectorThroughput(t *testing.T) { }) } +func TestAccVPCAccessConnector_vpcAccessConnectorUpdateAllMutableFieldsCauseUpdateInPlace(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckVPCAccessConnectorDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccVPCAccessConnector_vpcAccessConnectorThroughput(context), + }, + { + ResourceName: "google_vpc_access_connector.connector", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccVPCAccessConnector_vpcAccessConnectorUpdateAllMutableFields(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_vpc_access_connector.connector", plancheck.ResourceActionUpdate), + }, + }, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_vpc_access_connector.connector", "machine_type", "f1-micro"), + resource.TestCheckResourceAttr("google_vpc_access_connector.connector", "min_instances", "3"), + resource.TestCheckResourceAttr("google_vpc_access_connector.connector", "max_instances", "5"), + ), + }, + { + ResourceName: "google_vpc_access_connector.connector", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccVPCAccessConnector_vpcAccessConnectorUpdateOnlyMinInstancesCauseUpdateInPlace(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckVPCAccessConnectorDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccVPCAccessConnector_vpcAccessConnectorThroughput(context), + }, + { + ResourceName: "google_vpc_access_connector.connector", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccVPCAccessConnector_vpcAccessConnectorUpdateOnlyMinInstances(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_vpc_access_connector.connector", plancheck.ResourceActionUpdate), + }, + }, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_vpc_access_connector.connector", "machine_type", "e2-standard-4"), + resource.TestCheckResourceAttr("google_vpc_access_connector.connector", "min_instances", "3"), + resource.TestCheckResourceAttr("google_vpc_access_connector.connector", "max_instances", "4"), + ), + }, + { + ResourceName: "google_vpc_access_connector.connector", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccVPCAccessConnector_vpcAccessConnectorUpdateImmutableFieldCauseReplace(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckVPCAccessConnectorDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccVPCAccessConnector_vpcAccessConnectorThroughput(context), + }, + { + ResourceName: "google_vpc_access_connector.connector", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccVPCAccessConnector_vpcAccessConnectorUpdateImmutableField(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_vpc_access_connector.connector", plancheck.ResourceActionReplace), + }, + }, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_vpc_access_connector.connector", "name", fmt.Sprintf("immutable%s", context["random_suffix"])), + ), + }, + { + ResourceName: "google_vpc_access_connector.connector", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + func TestAccVPCAccessConnector_vpcAccessConnectorThroughput_combiningThroughputAndInstancesFields_conflict(t *testing.T) { // Need to skip this test as the expected failure happens before the provider interacts with APIs // In VCR mode this test fails due to lack of cassettes @@ -116,6 +242,87 @@ resource "google_vpc_access_connector" "connector" { } machine_type = "e2-standard-4" min_instances = 2 + max_instances = 4 + region = "us-central1" +} + +resource "google_compute_subnetwork" "custom_test" { + name = "tf-test-vpc-con%{random_suffix}" + ip_cidr_range = "10.2.0.0/28" + region = "us-central1" + network = google_compute_network.custom_test.id +} + +resource "google_compute_network" "custom_test" { + name = "tf-test-vpc-con%{random_suffix}" + auto_create_subnetworks = false +} +`, context) +} + +func testAccVPCAccessConnector_vpcAccessConnectorUpdateOnlyMinInstances(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_vpc_access_connector" "connector" { + name = "tf-test-vpc-con%{random_suffix}" + subnet { + name = google_compute_subnetwork.custom_test.name + } + machine_type = "e2-standard-4" + min_instances = 3 + max_instances = 4 + region = "us-central1" +} + +resource "google_compute_subnetwork" "custom_test" { + name = "tf-test-vpc-con%{random_suffix}" + ip_cidr_range = "10.2.0.0/28" + region = "us-central1" + network = google_compute_network.custom_test.id +} + +resource "google_compute_network" "custom_test" { + name = "tf-test-vpc-con%{random_suffix}" + auto_create_subnetworks = false +} +`, context) +} + +func testAccVPCAccessConnector_vpcAccessConnectorUpdateAllMutableFields(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_vpc_access_connector" "connector" { + name = "tf-test-vpc-con%{random_suffix}" + subnet { + name = google_compute_subnetwork.custom_test.name + } + machine_type = "f1-micro" + min_instances = 3 + max_instances = 5 + region = "us-central1" +} + +resource "google_compute_subnetwork" "custom_test" { + name = "tf-test-vpc-con%{random_suffix}" + ip_cidr_range = "10.2.0.0/28" + region = "us-central1" + network = google_compute_network.custom_test.id +} + +resource "google_compute_network" "custom_test" { + name = "tf-test-vpc-con%{random_suffix}" + auto_create_subnetworks = false +} +`, context) +} + +func testAccVPCAccessConnector_vpcAccessConnectorUpdateImmutableField(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_vpc_access_connector" "connector" { + name = "immutable%{random_suffix}" + subnet { + name = google_compute_subnetwork.custom_test.name + } + machine_type = "e2-standard-4" + min_instances = 2 max_instances = 3 region = "us-central1" } From c881ef0b90de52e901450272e7aa4808d1cd049e Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Wed, 30 Apr 2025 17:01:37 -0700 Subject: [PATCH 041/884] Modify compute instance tests (#13829) --- .../compute/resource_compute_instance_test.go.tmpl | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl index 5b5ebe1dd96e..598723fd8cb2 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl @@ -2803,9 +2803,13 @@ func TestAccComputeInstance_enableDisplay(t *testing.T) { computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), { Config: testAccComputeInstance_enableDisplayUpdated(instanceName), + }, + computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), + { + Config: testAccComputeInstance_enableDisplay(instanceName), Check: resource.ComposeTestCheckFunc( acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_enableDisplayUpdated(instanceName)), + testAccComputeInstance_enableDisplay(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), @@ -9612,7 +9616,8 @@ resource "google_compute_instance" "foobar" { %{host_error_timeout_sec} automatic_restart = true } -}`, context) +} +`, context) } {{- end }} @@ -10899,7 +10904,8 @@ resource "google_compute_instance" "foobar" { network_interface { network = "default" } -}`, instance) +} +`, instance) } func testAccComputeInstance_partnerMetadata(instance string) string { From 5fff76fe4fef1880d688e99f7293cdc97e31f7ff Mon Sep 17 00:00:00 2001 From: bcreddy-gcp <123543489+bcreddy-gcp@users.noreply.github.com> Date: Wed, 30 Apr 2025 17:19:08 -0700 Subject: [PATCH 042/884] workbench instance - Ignore irrelevant fields in update tests (#13830) Co-authored-by: Stephen Lewis (Burrows) --- ...ench_instance_shielded_config_test.go.tmpl | 20 +++---- .../resource_workbench_instance_test.go.tmpl | 52 +++++++++---------- 2 files changed, 36 insertions(+), 36 deletions(-) diff --git a/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_shielded_config_test.go.tmpl b/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_shielded_config_test.go.tmpl index 61487dfa4716..6a7e7be5c506 100644 --- a/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_shielded_config_test.go.tmpl +++ b/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_shielded_config_test.go.tmpl @@ -30,7 +30,7 @@ func TestAccWorkbenchInstance_shielded_config_update(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "update_time"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, { Config: testAccWorkbenchInstance_shielded_config_true(context), @@ -43,7 +43,7 @@ func TestAccWorkbenchInstance_shielded_config_update(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "update_time"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, }, }) @@ -71,7 +71,7 @@ func TestAccWorkbenchInstance_shielded_config_remove(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "update_time"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, { Config: testAccWorkbenchInstance_shielded_config_none(context), @@ -84,7 +84,7 @@ func TestAccWorkbenchInstance_shielded_config_remove(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "update_time"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, }, }) @@ -112,7 +112,7 @@ func TestAccWorkbenchInstance_shielded_config_double_apply(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "update_time"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, { Config: testAccWorkbenchInstance_shielded_config_none(context), @@ -125,7 +125,7 @@ func TestAccWorkbenchInstance_shielded_config_double_apply(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "update_time"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, { Config: testAccWorkbenchInstance_shielded_config_false(context), @@ -138,7 +138,7 @@ func TestAccWorkbenchInstance_shielded_config_double_apply(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "update_time"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, { Config: testAccWorkbenchInstance_shielded_config_false(context), @@ -151,7 +151,7 @@ func TestAccWorkbenchInstance_shielded_config_double_apply(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "update_time"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, { Config: testAccWorkbenchInstance_shielded_config_true(context), @@ -164,7 +164,7 @@ func TestAccWorkbenchInstance_shielded_config_double_apply(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "update_time"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, { Config: testAccWorkbenchInstance_shielded_config_true(context), @@ -177,7 +177,7 @@ func TestAccWorkbenchInstance_shielded_config_double_apply(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "update_time"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, }, }) diff --git a/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_test.go.tmpl b/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_test.go.tmpl index bb5f8eff4455..07f66b1a539a 100644 --- a/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_test.go.tmpl @@ -30,7 +30,7 @@ func TestAccWorkbenchInstance_update(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, { Config: testAccWorkbenchInstance_update(context), @@ -43,7 +43,7 @@ func TestAccWorkbenchInstance_update(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, }, }) @@ -122,7 +122,7 @@ func TestAccWorkbenchInstance_updateGpu(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, { Config: testAccWorkbenchInstance_updateGpu(context), @@ -135,7 +135,7 @@ func TestAccWorkbenchInstance_updateGpu(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, }, }) @@ -210,7 +210,7 @@ func TestAccWorkbenchInstance_removeGpu(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, { Config: testAccWorkbenchInstance_removeGpu(context), @@ -223,7 +223,7 @@ func TestAccWorkbenchInstance_removeGpu(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, }, }) @@ -283,7 +283,7 @@ func TestAccWorkbenchInstance_updateMetadata(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "update_time"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, { Config: testAccWorkbenchInstance_updateMetadata(context), @@ -296,7 +296,7 @@ func TestAccWorkbenchInstance_updateMetadata(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "update_time"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, { Config: testAccWorkbenchInstance_basic(context), @@ -309,7 +309,7 @@ func TestAccWorkbenchInstance_updateMetadata(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "update_time"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, }, }) @@ -337,7 +337,7 @@ func TestAccWorkbenchInstance_updateMetadataKey(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "update_time", "health_info", "health_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, { Config: testAccWorkbenchInstance_updateMetadataKey(context), @@ -350,7 +350,7 @@ func TestAccWorkbenchInstance_updateMetadataKey(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "update_time", "health_info", "health_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, { Config: testAccWorkbenchInstance_updateMetadata(context), @@ -363,7 +363,7 @@ func TestAccWorkbenchInstance_updateMetadataKey(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "update_time", "health_info", "health_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, }, }) @@ -434,7 +434,7 @@ func TestAccWorkbenchInstance_updateState(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, { Config: testAccWorkbenchInstance_updateState(context), @@ -447,7 +447,7 @@ func TestAccWorkbenchInstance_updateState(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, { Config: testAccWorkbenchInstance_basic(context), @@ -460,7 +460,7 @@ func TestAccWorkbenchInstance_updateState(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, }, }) @@ -500,7 +500,7 @@ func TestAccWorkbenchInstance_empty_accelerator(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, { Config: testAccWorkbenchInstance_empty_accelerator(context), @@ -513,7 +513,7 @@ func TestAccWorkbenchInstance_empty_accelerator(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, { Config: testAccWorkbenchInstance_empty_accelerator(context), @@ -526,7 +526,7 @@ func TestAccWorkbenchInstance_empty_accelerator(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, }, }) @@ -568,7 +568,7 @@ func TestAccWorkbenchInstance_updateBootDisk(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, { Config: testAccWorkbenchInstance_updateBootDisk(context), @@ -581,7 +581,7 @@ func TestAccWorkbenchInstance_updateBootDisk(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, }, }) @@ -609,7 +609,7 @@ func TestAccWorkbenchInstance_updateDataDisk(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, { Config: testAccWorkbenchInstance_updateDataDisk(context), @@ -622,7 +622,7 @@ func TestAccWorkbenchInstance_updateDataDisk(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, }, }) @@ -650,7 +650,7 @@ func TestAccWorkbenchInstance_updateBothDisks(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, { Config: testAccWorkbenchInstance_updateBothDisks(context), @@ -663,7 +663,7 @@ func TestAccWorkbenchInstance_updateBothDisks(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, }, }) @@ -804,7 +804,7 @@ func TestAccWorkbenchInstance_updateCustomContainers(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, { Config: testAccWorkbenchInstance_updatedcustomcontainer(context), @@ -817,7 +817,7 @@ func TestAccWorkbenchInstance_updateCustomContainers(t *testing.T) { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, }, }) From b1422d450fdb7bcd906674919c832277d802f51c Mon Sep 17 00:00:00 2001 From: govardhanitallam Date: Thu, 1 May 2025 17:20:21 +0530 Subject: [PATCH 043/884] Terraform Integration for Capability (#13282) --- .../products/resourcemanager3/Capability.yaml | 79 +++++++++++++++++++ mmv1/products/resourcemanager3/product.yaml | 26 ++++++ .../resource_manager_capability.go.tmpl | 16 ++++ .../resource_manager_capability.tf.tmpl | 17 ++++ .../components/inputs/services_beta.kt | 5 ++ .../components/inputs/services_ga.kt | 5 ++ ...e_resource_manager_capability_test.go.tmpl | 64 +++++++++++++++ 7 files changed, 212 insertions(+) create mode 100644 mmv1/products/resourcemanager3/Capability.yaml create mode 100644 mmv1/products/resourcemanager3/product.yaml create mode 100644 mmv1/templates/terraform/custom_import/resource_manager_capability.go.tmpl create mode 100644 mmv1/templates/terraform/examples/resource_manager_capability.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/resourcemanager3/resource_resource_manager_capability_test.go.tmpl diff --git a/mmv1/products/resourcemanager3/Capability.yaml b/mmv1/products/resourcemanager3/Capability.yaml new file mode 100644 index 000000000000..0ddee6611585 --- /dev/null +++ b/mmv1/products/resourcemanager3/Capability.yaml @@ -0,0 +1,79 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'Capability' +description: + An app-enabled folder is a folder within the Google Cloud resource hierarchy that has been configured for application management. + This folder lets you define and manage App Hub applications. These applications are functional groupings of services and workloads + that span multiple projects within that folder and its descendant projects. +references: + guides: + 'Official Documentation': 'https://cloud.google.com/resource-manager/docs/manage-applications' + api: 'https://cloud.google.com/resource-manager/reference/rest' +min_version: beta + +import_format: + - '{{parent}}/capabilities/{{capability_name}}' + +self_link: '{{parent}}/capabilities/{{capability_name}}' +create_url: '{{parent}}/capabilities/{{capability_name}}?updateMask=value' +update_url: '{{parent}}/capabilities/{{capability_name}}' + +create_verb: 'PATCH' +update_verb: 'PATCH' + +update_mask: true + +exclude_delete: true +timeouts: + insert_minutes: 20 + update_minutes: 20 + +autogen_async: true +async: + actions: ['create', 'update'] + operation: + base_url: '{{op_id}}' +custom_code: + custom_import: 'templates/terraform/custom_import/resource_manager_capability.go.tmpl' +examples: + - name: 'resource_manager_capability' + primary_resource_id: 'capability' + vars: + display_name: 'my-folder' + test_env_vars: + org_id: "ORG_ID" + min_version: beta + external_providers: ["time"] + +parameters: + +properties: + - name: 'parent' + type: String + description: | + Folder on which Capability needs to be updated in the format folders/folder_id. + required: true + url_param_only: true + - name: 'capability_name' + type: String + description: | + Capability name that should be updated on the folder. + required: true + url_param_only: true + - name: 'value' + type: Boolean + description: | + Capability Value. + required: true diff --git a/mmv1/products/resourcemanager3/product.yaml b/mmv1/products/resourcemanager3/product.yaml new file mode 100644 index 000000000000..b5d3ffad64f5 --- /dev/null +++ b/mmv1/products/resourcemanager3/product.yaml @@ -0,0 +1,26 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'ResourceManager3' +legacy_name: 'resource_manager' +display_name: 'Resource Manager' +versions: + - name: 'beta' + base_url: 'https://cloudresourcemanager.googleapis.com/v3/' +scopes: + - 'https://www.googleapis.com/auth/cloud-platform' +async: + type: "OpAsync" + operation: + base_url: '{{op_id}}' diff --git a/mmv1/templates/terraform/custom_import/resource_manager_capability.go.tmpl b/mmv1/templates/terraform/custom_import/resource_manager_capability.go.tmpl new file mode 100644 index 000000000000..de9cd27e8566 --- /dev/null +++ b/mmv1/templates/terraform/custom_import/resource_manager_capability.go.tmpl @@ -0,0 +1,16 @@ + config := meta.(*transport_tpg.Config) + + // current import_formats can't import fields with forward slashes in their value + if err := tpgresource.ParseImportId([]string{ + "(?P.+)/capabilities/(?P.+)", + }, d, config); err != nil { + return nil, err + } + + id, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}parent{{"}}"}}/capabilities/{{"{{"}}capability_name{{"}}"}}") + if err != nil { + return nil, fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + + return []*schema.ResourceData{d}, nil diff --git a/mmv1/templates/terraform/examples/resource_manager_capability.tf.tmpl b/mmv1/templates/terraform/examples/resource_manager_capability.tf.tmpl new file mode 100644 index 000000000000..f1fde1da1528 --- /dev/null +++ b/mmv1/templates/terraform/examples/resource_manager_capability.tf.tmpl @@ -0,0 +1,17 @@ +resource "google_folder" "folder" { + provider = google-beta + display_name = "{{index $.Vars "display_name"}}" + parent = "organizations/{{index $.TestEnvVars "org_id"}}" + deletion_protection = false +} +resource "time_sleep" "wait_60s" { + depends_on = [google_folder.folder] + create_duration = "60s" +} +resource "google_resource_manager_capability" "{{$.PrimaryResourceId}}" { + provider = google-beta + value = true + parent = "${google_folder.folder.name}" + capability_name = "app-management" + depends_on = [time_sleep.wait_60s] +} diff --git a/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt b/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt index 8ffcca156620..da21829d3144 100644 --- a/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt +++ b/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt @@ -671,6 +671,11 @@ var ServicesListBeta = mapOf( "displayName" to "Resourcemanager", "path" to "./google-beta/services/resourcemanager" ), + "resourcemanager3" to mapOf( + "name" to "resourcemanager3", + "displayName" to "Resourcemanager3", + "path" to "./google-beta/services/resourcemanager3" + ), "runtimeconfig" to mapOf( "name" to "runtimeconfig", "displayName" to "Runtimeconfig", diff --git a/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt b/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt index 19cff2fb52f5..375189e3d45a 100644 --- a/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt +++ b/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt @@ -666,6 +666,11 @@ var ServicesListGa = mapOf( "displayName" to "Resourcemanager", "path" to "./google/services/resourcemanager" ), + "resourcemanager3" to mapOf( + "name" to "resourcemanager3", + "displayName" to "Resourcemanager3", + "path" to "./google/services/resourcemanager3" + ), "runtimeconfig" to mapOf( "name" to "runtimeconfig", "displayName" to "Runtimeconfig", diff --git a/mmv1/third_party/terraform/services/resourcemanager3/resource_resource_manager_capability_test.go.tmpl b/mmv1/third_party/terraform/services/resourcemanager3/resource_resource_manager_capability_test.go.tmpl new file mode 100644 index 000000000000..8277c542e255 --- /dev/null +++ b/mmv1/third_party/terraform/services/resourcemanager3/resource_resource_manager_capability_test.go.tmpl @@ -0,0 +1,64 @@ +package resourcemanager3_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccResourceManagerCapability_resourceManagerCapabilityExample_basic(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "org_id": envvar.GetTestOrgFromEnv(t), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: testAccResourceManagerCapability_resourceManagerCapabilityExample_basic(context), + }, + { + ResourceName: "google_resource_manager_capability.capability", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"capability_name", "parent"}, + }, + }, + }) +} + +func testAccResourceManagerCapability_resourceManagerCapabilityExample_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_folder" "folder" { + provider = google-beta + display_name = "my-folder%{random_suffix}" + parent = "organizations/%{org_id}" + deletion_protection = false +} +resource "time_sleep" "wait_60s" { + depends_on = [google_folder.folder] + create_duration = "60s" +} +resource "google_resource_manager_capability" "capability" { + provider = google-beta + value = true + parent = "${google_folder.folder.name}" + capability_name = "app-management" + depends_on = [time_sleep.wait_60s] +} +`, context) +} +{{- else }} +// Capability is only in beta version. +{{- end }} From 7fb9125cd8975158d4ac5f5432448d47a4790643 Mon Sep 17 00:00:00 2001 From: xuebaoZ Date: Thu, 1 May 2025 11:40:20 -0700 Subject: [PATCH 044/884] feat: support Cloud SQL\'s new field connection_pool_config. (#13497) --- .../resource_sql_database_instance.go.tmpl | 93 +++++++++++++++++++ ...esource_sql_database_instance_test.go.tmpl | 81 ++++++++++++++++ .../r/sql_database_instance.html.markdown | 29 ++++++ 3 files changed, 203 insertions(+) diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl index ed0f45de67ab..ffbbb540f401 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl @@ -73,6 +73,11 @@ var ( "settings.0.backup_configuration.0.transaction_log_retention_days", } + connectionPoolConfigKeys = []string{ + "settings.0.connection_pool_config.0.connection_pooling_enabled", + "settings.0.connection_pool_config.0.flags", + } + ipConfigurationKeys = []string{ "settings.0.ip_configuration.0.authorized_networks", "settings.0.ip_configuration.0.ipv4_enabled", @@ -444,6 +449,28 @@ is set to true. Defaults to ZONAL.`, Description: `Provisioned throughput measured in MiB per second for the data disk. This field is only used for HYPERDISK_BALANCED disk types.`, }, {{- end }} + "connection_pool_config": { + Type: schema.TypeSet, + Optional: true, + Computed: true, + Description: `The managed connection pool setting for a Cloud SQL instance.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "connection_pooling_enabled": { + Type: schema.TypeBool, + Optional: true, + Description: `Whether Managed Connection Pool is enabled for this instance.`, + }, + "flags": { + Type: schema.TypeSet, + Optional: true, + Set: schema.HashResource(sqlDatabaseFlagSchemaElem), + Elem: sqlDatabaseFlagSchemaElem, + Description: `List of connection pool configuration flags`, + }, + }, + }, + }, "ip_configuration": { Type: schema.TypeList, Optional: true, @@ -1426,6 +1453,7 @@ func expandSqlDatabaseInstanceSettings(configured []interface{}, databaseVersion UserLabels: tpgresource.ConvertStringMap(_settings["user_labels"].(map[string]interface{})), BackupConfiguration: expandBackupConfiguration(_settings["backup_configuration"].([]interface{})), DatabaseFlags: expandDatabaseFlags(_settings["database_flags"].(*schema.Set).List()), + ConnectionPoolConfig: expandConnectionPoolConfig(_settings["connection_pool_config"].(*schema.Set).List()), IpConfiguration: expandIpConfiguration(_settings["ip_configuration"].([]interface{}), databaseVersion), LocationPreference: expandLocationPreference(_settings["location_preference"].([]interface{})), MaintenanceWindow: expandMaintenanceWindow(_settings["maintenance_window"].([]interface{})), @@ -1575,6 +1603,35 @@ func expandPscConfig(configured []interface{}) *sqladmin.PscConfig { return nil } +func expandFlags(configured []interface{}) []*sqladmin.ConnectionPoolFlags { + connectionPoolFlags := make([]*sqladmin.ConnectionPoolFlags, 0, len(configured)) + for _, _flag := range configured { + if _flag == nil { + continue + } + _entry := _flag.(map[string]interface{}) + + connectionPoolFlags = append(connectionPoolFlags, &sqladmin.ConnectionPoolFlags{ + Name: _entry["name"].(string), + Value: _entry["value"].(string), + }) + } + return connectionPoolFlags +} + +func expandConnectionPoolConfig(configured []interface{}) *sqladmin.ConnectionPoolConfig { + if len(configured) == 0 || configured[0] == nil { + return nil + } + + _connectionPoolConfig := configured[0].(map[string]interface{}) + + return &sqladmin.ConnectionPoolConfig{ + ConnectionPoolingEnabled: _connectionPoolConfig["connection_pooling_enabled"].(bool), + Flags: expandFlags(_connectionPoolConfig["flags"].(*schema.Set).List()), + } +} + func expandAuthorizedNetworks(configured []interface{}) []*sqladmin.AclEntry { an := make([]*sqladmin.AclEntry, 0, len(configured)) for _, _acl := range configured { @@ -2336,6 +2393,10 @@ func flattenSettings(settings *sqladmin.Settings, d *schema.ResourceData) []map[ data["database_flags"] = flattenDatabaseFlags(settings.DatabaseFlags) } + if settings.ConnectionPoolConfig != nil { + data["connection_pool_config"] = flattenConnectionPoolConfig(settings.ConnectionPoolConfig) + } + if settings.IpConfiguration != nil { data["ip_configuration"] = flattenIpConfiguration(settings.IpConfiguration, d) } @@ -2506,6 +2567,38 @@ func flattenReplicationCluster(replicationCluster *sqladmin.ReplicationCluster, return []map[string]interface{}{data} } +func flattenConnectionPoolFlags(connectionPoolFlags []*sqladmin.ConnectionPoolFlags) []interface{} { + if len(connectionPoolFlags) == 0 { // Handles nil or empty slice + return make([]interface{}, 0) // Explicitly return empty slice + } + + mcpflags := make([]interface{}, len(connectionPoolFlags)) // Pre-allocate for efficiency + for i, mcpflag := range connectionPoolFlags { + data := map[string]interface{}{ + "name": mcpflag.Name, + "value": mcpflag.Value, + } + mcpflags[i] = data + } + return mcpflags +} + +func flattenConnectionPoolConfig(connectionPoolConfig *sqladmin.ConnectionPoolConfig) []interface{}{ + if connectionPoolConfig == nil { + return []interface{}{ + map[string]interface{}{ + "connection_pooling_enabled": false, + "flags": make([]interface{}, 0), // Default to empty flags + }, + } + } + data := map[string]interface{}{ + "connection_pooling_enabled": connectionPoolConfig.ConnectionPoolingEnabled, // Corrected key + "flags": flattenConnectionPoolFlags(connectionPoolConfig.Flags), // Corrected key + } + return []interface{}{data} +} + func flattenIpConfiguration(ipConfiguration *sqladmin.IpConfiguration, d *schema.ResourceData) interface{} { data := map[string]interface{}{ "ipv4_enabled": ipConfiguration.Ipv4Enabled, diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl index 38118d6cb415..4e11fff17e6a 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl @@ -851,6 +851,52 @@ func TestAccSqlDatabaseInstance_withPrivateNetwork_withoutAllocatedIpRange(t *te }) } +func TestAccSqlDatabaseInstance_withMCPEnabled(t *testing.T) { + t.Parallel() + + instanceName := "tf-test-" + acctest.RandString(t, 10) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccSqlDatabaseInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccSqlDatabaseInstance_withMCPEnabled(instanceName), + }, + { + ResourceName: "google_sql_database_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + }, + }) +} + +func TestAccSqlDatabaseInstance_withoutMCPEnabled(t *testing.T) { + t.Parallel() + + instanceName := "tf-test-" + acctest.RandString(t, 10) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccSqlDatabaseInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccSqlDatabaseInstance_withoutMCPEnabled(instanceName), + }, + { + ResourceName: "google_sql_database_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + }, + }) +} + func TestAccSqlDatabaseInstance_withPSCEnabled_withoutAllowedConsumerProjects(t *testing.T) { t.Parallel() @@ -4765,6 +4811,41 @@ func verifyPscAutoConnectionsOperation(resourceName string, isPscConfigExpected } } +func testAccSqlDatabaseInstance_withoutMCPEnabled(instanceName string) string { + return fmt.Sprintf(` +resource "google_sql_database_instance" "instance" { + name = "%s" + region = "us-central1" + database_version = "POSTGRES_16" + deletion_protection = false + settings { + tier = "db-perf-optimized-N-2" + } +} +`, instanceName) +} + +func testAccSqlDatabaseInstance_withMCPEnabled(instanceName string) string { + return fmt.Sprintf(` +resource "google_sql_database_instance" "instance" { + name = "%s" + region = "us-central1" + database_version = "POSTGRES_16" + deletion_protection = false + settings { + tier = "db-perf-optimized-N-2" + connection_pool_config { + connection_pooling_enabled = true + flags { + name = "max_client_connections" + value = "1980" + } + } + } +} +`, instanceName) +} + func testAccSqlDatabaseInstance_withPSCEnabled_withoutPscAutoConnections(instanceName string) string { return fmt.Sprintf(` resource "google_sql_database_instance" "instance" { diff --git a/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown b/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown index 9cbb8c5d91fe..22fac0853045 100644 --- a/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown @@ -173,6 +173,25 @@ resource "google_sql_database_instance" "main" { } ``` +### Cloud SQL Instance with MCP +```hcl +resource "google_sql_database_instance" "instance" { + name: = "mcp-enabled-main-instance" + region = "us-central1" + database_version = "POSTGRES_16" + settings { + tier = "db-perf-optimized-N-2" + connection_pool_config { + connection_pooling_enabled = true + flags { + name = "max_client_connections" + value = "1980" + } + } + } +} +``` + ### Cloud SQL Instance with PSC connectivity ```hcl @@ -571,6 +590,16 @@ The optional, computed `replication_cluster` block represents a primary instance * `dr_replica`: Read-only field that indicates whether the replica is a DR replica. +The optional `settings.connection_pool_config` subblock supports: + +* `connection_pooling_enabled`: (Optional) True if the manager connection pooling configuration is enabled. + +The optional `settings.connection_pool_config.flags` sublist supports: + +* `name` - (Required) Name of the flag. + +* `value` - (Required) Value of the flag. + ## Attributes Reference In addition to the arguments listed above, the following computed attributes are From 43ea70416920078f5e9a40fd93517f34255a2453 Mon Sep 17 00:00:00 2001 From: Michael Turgeman Date: Thu, 1 May 2025 23:14:39 +0300 Subject: [PATCH 045/884] Add directory services config field to Filestore instance in beta (#13727) --- mmv1/products/filestore/Instance.yaml | 47 +++++++++++++ ... resource_filestore_instance_test.go.tmpl} | 66 +++++++++++++++++++ 2 files changed, 113 insertions(+) rename mmv1/third_party/terraform/services/filestore/{resource_filestore_instance_test.go => resource_filestore_instance_test.go.tmpl} (87%) diff --git a/mmv1/products/filestore/Instance.yaml b/mmv1/products/filestore/Instance.yaml index a2139d6c22ee..058fec6eac85 100644 --- a/mmv1/products/filestore/Instance.yaml +++ b/mmv1/products/filestore/Instance.yaml @@ -406,3 +406,50 @@ properties: A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z" output: true + - name: 'directoryServices' + type: NestedObject + min_version: beta + description: | + Directory Services configuration. + Should only be set if protocol is "NFS_V4_1". + immutable: true + properties: + - name: 'ldap' + type: NestedObject + description: | + Configuration for LDAP servers. + immutable: true + properties: + - name: 'domain' + type: String + required: true + description: | + The LDAP domain name in the format of `my-domain.com`. + immutable: true + - name: 'servers' + required: true + type: Array + description: | + The servers names are used for specifying the LDAP servers names. + The LDAP servers names can come with two formats: + 1. DNS name, for example: `ldap.example1.com`, `ldap.example2.com`. + 2. IP address, for example: `10.0.0.1`, `10.0.0.2`, `10.0.0.3`. + All servers names must be in the same format: either all DNS names or all + IP addresses. + immutable: true + item_type: + type: String + - name: 'usersOu' + type: String + description: | + The users Organizational Unit (OU) is optional. This parameter is a hint + to allow faster lookup in the LDAP namespace. In case that this parameter + is not provided, Filestore instance will query the whole LDAP namespace. + immutable: true + - name: 'groupsOu' + type: String + description: | + The groups Organizational Unit (OU) is optional. This parameter is a hint + to allow faster lookup in the LDAP namespace. In case that this parameter + is not provided, Filestore instance will query the whole LDAP namespace. + immutable: true diff --git a/mmv1/third_party/terraform/services/filestore/resource_filestore_instance_test.go b/mmv1/third_party/terraform/services/filestore/resource_filestore_instance_test.go.tmpl similarity index 87% rename from mmv1/third_party/terraform/services/filestore/resource_filestore_instance_test.go rename to mmv1/third_party/terraform/services/filestore/resource_filestore_instance_test.go.tmpl index 1e05c21ecdaf..4c915b76595e 100644 --- a/mmv1/third_party/terraform/services/filestore/resource_filestore_instance_test.go +++ b/mmv1/third_party/terraform/services/filestore/resource_filestore_instance_test.go.tmpl @@ -527,3 +527,69 @@ resource "google_filestore_instance" "replica-instance" { } `, context) } + +{{- if ne $.TargetVersionName "ga" }} + +func TestAccFilestoreInstance_directoryServices(t *testing.T) { + t.Parallel() + + name := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + location := "us-central1" + tier := "REGIONAL" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckFilestoreInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccFilestoreInstance_ldap(name, location, tier), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_filestore_instance.instance", "directory_services.0.ldap.0.domain", "my-domain.com"), + resource.TestCheckResourceAttr("google_filestore_instance.instance", "directory_services.0.ldap.0.servers.0", "ldap.example1.com"), + resource.TestCheckResourceAttr("google_filestore_instance.instance", "directory_services.0.ldap.0.users_ou", "users"), + resource.TestCheckResourceAttr("google_filestore_instance.instance", "directory_services.0.ldap.0.groups_ou", "groups"), + ), + }, + { + ResourceName: "google_filestore_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"zone"}, + }, + }, + }) +} + +func testAccFilestoreInstance_ldap(name, location, tier string) string { + return fmt.Sprintf(` +resource "google_filestore_instance" "instance" { + provider = google-beta + name = "%s" + location = "%s" + tier = "%s" + description = "An instance created during testing." + protocol = "NFS_V4_1" + + file_shares { + capacity_gb = 1024 + name = "share" + } + + networks { + network = "default" + modes = ["MODE_IPV4"] + } + + directory_services { + ldap { + domain = "my-domain.com" + servers = ["ldap.example1.com"] + users_ou = "users" + groups_ou = "groups" + } + } +} +`, name, location, tier) +} +{{- end }} \ No newline at end of file From 1eddb2a1d180c45920cf204fcc34eaa66644ee75 Mon Sep 17 00:00:00 2001 From: gurusai-voleti Date: Fri, 2 May 2025 15:33:17 +0000 Subject: [PATCH 046/884] feat: (storage) added sha512 in data object content (#13764) Co-authored-by: Stephen Lewis (Burrows) --- ...ta_source_storage_bucket_object_content.go | 27 +++++++++++++++++++ ...urce_storage_bucket_object_content_test.go | 2 ++ ...torage_bucket_object_content.html.markdown | 5 ++++ 3 files changed, 34 insertions(+) diff --git a/mmv1/third_party/terraform/services/storage/data_source_storage_bucket_object_content.go b/mmv1/third_party/terraform/services/storage/data_source_storage_bucket_object_content.go index 0596fcb3637b..6662217a3e8a 100644 --- a/mmv1/third_party/terraform/services/storage/data_source_storage_bucket_object_content.go +++ b/mmv1/third_party/terraform/services/storage/data_source_storage_bucket_object_content.go @@ -1,7 +1,9 @@ package storage import ( + "crypto/sha512" "encoding/base64" + "encoding/hex" "fmt" "io/ioutil" "net/http" @@ -30,6 +32,22 @@ func DataSourceGoogleStorageBucketObjectContent() *schema.Resource { Required: false, } + dsSchema["content_hexsha512"] = &schema.Schema{ + Type: schema.TypeString, + Description: "Hex encoded SHA512 checksum of object content.", + Computed: true, + Optional: false, + Required: false, + } + + dsSchema["content_base64sha512"] = &schema.Schema{ + Type: schema.TypeString, + Description: "Base64 encoded SHA512 checksum of object content.", + Computed: true, + Optional: false, + Required: false, + } + return &schema.Resource{ Read: dataSourceGoogleStorageBucketObjectContentRead, Schema: dsSchema, @@ -73,6 +91,15 @@ func dataSourceGoogleStorageBucketObjectContentRead(d *schema.ResourceData, meta return fmt.Errorf("Error setting content_base64: %s", err) } + sha512Sum := sha512.Sum512(objectBytes) + if err := d.Set("content_hexsha512", hex.EncodeToString(sha512Sum[:])); err != nil { + return fmt.Errorf("Error setting content_hexsha512: %s", err) + } + + if err := d.Set("content_base64sha512", base64.StdEncoding.EncodeToString(sha512Sum[:])); err != nil { + return fmt.Errorf("Error setting content_base64sha512: %s", err) + } + d.SetId(bucket + "-" + name) return nil } diff --git a/mmv1/third_party/terraform/services/storage/data_source_storage_bucket_object_content_test.go b/mmv1/third_party/terraform/services/storage/data_source_storage_bucket_object_content_test.go index e6774cf5fcbf..07dd90d84dad 100644 --- a/mmv1/third_party/terraform/services/storage/data_source_storage_bucket_object_content_test.go +++ b/mmv1/third_party/terraform/services/storage/data_source_storage_bucket_object_content_test.go @@ -68,6 +68,8 @@ func TestAccDataSourceStorageBucketObjectContent_FileContentBase64(t *testing.T) Config: testAccDataSourceStorageBucketObjectContent_FileContentBase64(bucket, folderName), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttrSet("data.google_storage_bucket_object_content.this", "content_base64"), + resource.TestCheckResourceAttrSet("data.google_storage_bucket_object_content.this", "content_hexsha512"), + resource.TestCheckResourceAttrSet("data.google_storage_bucket_object_content.this", "content_base64sha512"), verifyValidZip(), ), }, diff --git a/mmv1/third_party/terraform/website/docs/d/storage_bucket_object_content.html.markdown b/mmv1/third_party/terraform/website/docs/d/storage_bucket_object_content.html.markdown index a45cdadd011e..ce97d70e1221 100644 --- a/mmv1/third_party/terraform/website/docs/d/storage_bucket_object_content.html.markdown +++ b/mmv1/third_party/terraform/website/docs/d/storage_bucket_object_content.html.markdown @@ -45,3 +45,8 @@ The following attributes are exported: * `content_base64` - (Computed) Base64 encoded version of the object content. Use this when dealing with binary data. + +* `content_hexsha512` - (Computed) Hex encoded SHA512 checksum of file content. + +* `content_base64sha512` - (Computed) Base64 encoded SHA512 checksum of file content. + From 2e976dae7058c570170f187403cccd5210691911 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Fri, 2 May 2025 10:16:56 -0700 Subject: [PATCH 047/884] Modify compute tests to get config for tgc testing (#13833) --- .../terraform/acctest/tgc_utils.go | 4 +- .../resource_compute_instance_test.go.tmpl | 102 +++++++++++++++++- .../resource_google_project_test.go | 22 ++++ 3 files changed, 121 insertions(+), 7 deletions(-) diff --git a/mmv1/third_party/terraform/acctest/tgc_utils.go b/mmv1/third_party/terraform/acctest/tgc_utils.go index a3f0bcbda038..90df47f9d02c 100644 --- a/mmv1/third_party/terraform/acctest/tgc_utils.go +++ b/mmv1/third_party/terraform/acctest/tgc_utils.go @@ -55,7 +55,6 @@ func GetTestMetadataForTgc(service, address, rawConfig string) resource.TestChec // The acceptance tests names will be also used for the tgc tests. // "service" is logged and will be used to put the tgc tests into specific service packages. log.Printf("[DEBUG]TGC Terraform service: %s", service) - log.Printf("[DEBUG]TGC Terraform resource: %s", resourceType) re := regexp.MustCompile(`\"(tf[-_]?test[-_]?.*?)([a-z0-9]+)\"`) rawConfig = re.ReplaceAllString(rawConfig, `"${1}tgc"`) @@ -64,10 +63,11 @@ func GetTestMetadataForTgc(service, address, rawConfig string) resource.TestChec // which is used to get the main resource object by checking the address after parsing raw config. // For example, replace `"google_compute_instance" "foobar"` with `"google_compute_instance" "tf-test-mi3fqaucf8"` n := tpgresource.GetResourceNameFromSelfLink(rState.Primary.ID) + log.Printf("[DEBUG]TGC Terraform resource: %s.%s", resourceType, n) + old := fmt.Sprintf(`"%s" "%s"`, resourceType, resourceName) new := fmt.Sprintf(`"%s" "%s"`, resourceType, n) rawConfig = strings.Replace(rawConfig, old, new, 1) - log.Printf("[DEBUG]TGC raw_config starts %sEnd of TGC raw_config", rawConfig) return nil } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl index 598723fd8cb2..611db8d72674 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl @@ -635,6 +635,8 @@ func TestAccComputeInstance_internalIPv6PrefixLength(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceIpv6AccessConfigHasInternalIPv6(&instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_internalIpv6PrefixLength("96", instanceName)), ), }, computeInstanceImportStep("us-west2-a", instanceName, []string{"allow_stopping_for_update"}), @@ -670,6 +672,8 @@ func TestAccComputeInstance_PTRRecord(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceAccessConfigHasNatIP(&instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_ip(ipName, instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{"metadata.baz", "metadata.foo"}), @@ -848,6 +852,8 @@ func TestAccComputeInstance_rsaBootDiskEncryption(t *testing.T) { Config: testAccComputeInstance_rsaBootDiskEncryption(context), Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_rsaBootDiskEncryption(context)), ), }, }, @@ -910,6 +916,8 @@ func TestAccComputeInstance_instanceEncryption(t *testing.T) { Config: testAccComputeInstance_instanceEncryption_SelfLinkServiceAccount(context_3), Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_instanceEncryption_SelfLinkServiceAccount(context_3)), ), }, }, @@ -938,6 +946,8 @@ func TestAccComputeInstance_snapshot(t *testing.T) { Config: testAccComputeInstance_snapshot(context), //create from snapshot Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_snapshot(context)), ), }, }, @@ -979,6 +989,8 @@ func TestAccComputeInstance_snapshotEncryption(t *testing.T) { Config: testAccComputeInstance_snapshotEncryption_RsaKey(context), Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_snapshotEncryption_RsaKey(context)), ), }, }, @@ -1020,6 +1032,8 @@ func TestAccComputeInstance_imageEncryption(t *testing.T) { Config: testAccComputeInstance_imageEncryption_RsaKey(context), Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_imageEncryption_RsaKey(context)), ), }, }, @@ -1045,6 +1059,8 @@ func TestAccComputeInstance_attachedDisk_RSAencryption(t *testing.T) { Config: testAccComputeInstance_attachedDisk_RSAencryption(context), Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_attachedDisk_RSAencryption(context)), ), }, }, @@ -1814,6 +1830,8 @@ func TestAccComputeInstance_schedulingTerminationTime(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_TerminationTimeDeleted(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), @@ -2290,6 +2308,8 @@ func TestAccComputeInstance_forceChangeMachineTypeManually(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceUpdateMachineType(t, "google_compute_instance.foobar"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_basic(instanceName)), ), ExpectNonEmptyPlan: true, }, @@ -2398,6 +2418,8 @@ func TestAccComputeInstance_guestAcceleratorSkip(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceLacksGuestAccelerator(&instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_guestAccelerator(instanceName, 0)), ), }, }, @@ -2490,6 +2512,8 @@ func TestAccComputeInstance_deletionProtectionExplicitTrueAndUpdateFalse(t *test testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasConfiguredDeletionProtection(&instance, false), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_basic_deletionProtectionFalse(instanceName)), ), }, }, @@ -2893,6 +2917,8 @@ func TestAccComputeInstance_desiredStatusSuspendedOnCreation(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasStatus(&instance, context_2["desired_status"].(string)), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_desiredStatusOnCreation(context_2)), ), }, }, @@ -3025,6 +3051,8 @@ func TestAccComputeInstance_updateRunning_desiredStatusRunning_allowStoppingForU t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasMachineType(&instance, "e2-standard-2"), testAccCheckComputeInstanceHasStatus(&instance, "RUNNING"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_machineType_desiredStatus_allowStoppingForUpdate(instanceName, "e2-standard-2", "RUNNING", true)), ), }, }, @@ -3050,6 +3078,8 @@ func TestAccComputeInstance_updateRunning_desiredStatusNotSet_notAllowStoppingFo testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasStatus(&instance, "RUNNING"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_basic2(instanceName)), ), }, { @@ -3077,6 +3107,8 @@ func TestAccComputeInstance_updateRunning_desiredStatusRunning_notAllowStoppingF testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasStatus(&instance, "RUNNING"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_basic2(instanceName)), ), }, { @@ -3147,6 +3179,8 @@ func TestAccComputeInstance_updateRunning_desiredStatusTerminated_notAllowStoppi t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasMachineType(&instance, "e2-standard-2"), testAccCheckComputeInstanceHasStatus(&instance, "TERMINATED"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_machineType_desiredStatus_allowStoppingForUpdate(instanceName, "e2-standard-2", "TERMINATED", false)), ), }, }, @@ -3229,6 +3263,8 @@ func TestAccComputeInstance_updateTerminated_desiredStatusTerminated_allowStoppi t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasMachineType(&instance, "e2-standard-2"), testAccCheckComputeInstanceHasStatus(&instance, "TERMINATED"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_machineType_desiredStatus_allowStoppingForUpdate(instanceName, "e2-standard-2", "TERMINATED", true)), ), }, }, @@ -3351,6 +3387,8 @@ func TestAccComputeInstance_updateTerminated_desiredStatusRunning_allowStoppingF t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasMachineType(&instance, "e2-standard-2"), testAccCheckComputeInstanceHasStatus(&instance, "RUNNING"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_machineType_desiredStatus_allowStoppingForUpdate(instanceName, "e2-standard-2", "RUNNING", true)), ), }, }, @@ -3391,6 +3429,8 @@ func TestAccComputeInstance_updateTerminated_desiredStatusRunning_notAllowStoppi t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasMachineType(&instance, "e2-standard-2"), testAccCheckComputeInstanceHasStatus(&instance, "RUNNING"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_machineType_desiredStatus_allowStoppingForUpdate(instanceName, "e2-standard-2", "RUNNING", false)), ), }, }, @@ -3653,6 +3693,8 @@ func TestAccComputeInstance_spotVM_update(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_spotVM(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -3814,6 +3856,8 @@ func TestAccComputeInstance_standardVM_maxRunDuration_deleteTerminationAction(t t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceTerminationAction(&instance, instanceTerminationAction), testAccCheckComputeInstanceMaxRunDuration(&instance, expectedMaxRunDuration), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_standardVM_maxRunDuration(instanceName, instanceTerminationAction)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -3849,6 +3893,8 @@ func TestAccComputeInstance_spotVM_maxRunDuration_update(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceMaxRunDuration(&instance, expectedMaxRunDuration), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_spotVM_maxRunDuration(instanceName, "DELETE")), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -3914,6 +3960,8 @@ func TestAccComputeInstance_localSsdRecoveryTimeout_update(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceLocalSsdRecoveryTimeout(&instance, expectedLocalSsdRecoveryTimeout), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_localSsdRecoveryTimeout(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -3981,6 +4029,8 @@ func TestAccComputeInstance_partnerMetadata_update(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstancePartnerMetadata(&instance, expectedPartnerMetadata), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_partnerMetadata(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{fmt.Sprintf("partner_metadata.%s", namespace)}), @@ -4189,6 +4239,8 @@ func TestAccComputeInstance_creationOnlyAttributionLabelConfiguredOnUpdate(t *te t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceLabel(&instance, "user_label", "bar"), testAccCheckComputeInstanceAttributionLabel(&instance, false), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_attributionLabelUpdate(instanceName, "true", "CREATION_ONLY")), ), }, }, @@ -4224,6 +4276,8 @@ func TestAccComputeInstance_proactiveAttributionLabel(t *testing.T) { t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceLabel(&instance, "user_label", "bar"), testAccCheckComputeInstanceAttributionLabel(&instance, true), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_attributionLabelUpdate(instanceName, "true", "PROACTIVE")), ), }, }, @@ -4319,8 +4373,16 @@ func TestAccComputeInstance_keyRevocationActionType(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), resource.TestCheckResourceAttr("google_compute_instance.foobar", "key_revocation_action_type", ""), + ), + }, + { + Config: testAccComputeInstance_keyRevocationActionType(context_2), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeInstanceExists( + t, "google_compute_instance.foobar", &instance), + resource.TestCheckResourceAttr("google_compute_instance.foobar", "key_revocation_action_type", "STOP"), acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_keyRevocationActionType(context_3)), + testAccComputeInstance_keyRevocationActionType(context_2)), ), }, }, @@ -4531,6 +4593,8 @@ func TestAccComputeInstance_GracefulShutdownWithoutResetUpdate(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), resource.TestCheckResourceAttr("google_compute_instance.foobar", "scheduling.0.graceful_shutdown.0.max_duration.0.seconds", "100"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_GracefulShutdownUpdate(acceptableByApi_3)), ), }, { @@ -4954,6 +5018,8 @@ func TestAccComputeInstance_NetworkAttachment(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasNetworkAttachment(&instance, fmt.Sprintf("https://www.googleapis.com/compute/%s/%s", providerVersion, fullFormNetworkAttachmentName)), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_networkAttachment(context)), ), }, }, @@ -4984,6 +5050,10 @@ func TestAccComputeInstance_NetworkAttachmentUpdate(t *testing.T) { computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), { Config: testAccComputeInstance_networkAttachmentUpdate(networkAttachmentSelflink1, envRegion, suffix), + Check: resource.ComposeTestCheckFunc( + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_networkAttachmentUpdate(networkAttachmentSelflink1, envRegion, suffix)), + ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), }, @@ -5011,6 +5081,10 @@ func TestAccComputeInstance_NicStackTypeUpdate(t *testing.T) { computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), { Config: testAccComputeInstance_nicStackTypeUpdate(suffix, envRegion, "IPV4_ONLY", instanceName), + Check: resource.ComposeTestCheckFunc( + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_nicStackTypeUpdate(suffix, envRegion, "IPV4_ONLY", instanceName)), + ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), }, @@ -5032,6 +5106,10 @@ func TestAccComputeInstance_NicStackType_IPV6(t *testing.T) { Steps: []resource.TestStep{ { Config: testAccComputeInstance_nicStackTypeUpdate_ipv6(context), + Check: resource.ComposeTestCheckFunc( + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_nicStackTypeUpdate_ipv6(context)), + ), }, }, }) @@ -5058,6 +5136,8 @@ func TestAccComputeInstance_guestOsFeatures(t *testing.T) { resource.TestCheckResourceAttr("google_compute_instance.foobar", "boot_disk.0.guest_os_features.1", "VIRTIO_SCSI_MULTIQUEUE"), resource.TestCheckResourceAttr("google_compute_instance.foobar", "boot_disk.0.guest_os_features.2", "GVNIC"), resource.TestCheckResourceAttr("google_compute_instance.foobar", "boot_disk.0.guest_os_features.3", "IDPF"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_guestOsFeatures(context_1)), ), }, }, @@ -9587,7 +9667,8 @@ resource "google_compute_instance" "foobar" { values = ["%[1]s"] } } -}`, instanceName) +} +`, instanceName) } {{ if ne $.TargetVersionName `ga` -}} @@ -10266,7 +10347,6 @@ resource "google_compute_resource_policy" "foo" { collocation = "COLLOCATED" } } - `, instance, instance, suffix) } @@ -10941,7 +11021,8 @@ resource "google_compute_instance" "foobar" { } }) } -}`, instance) +} +`, instance) } {{- end }} @@ -12054,6 +12135,10 @@ func TestAccComputeInstance_bootDisk_storagePoolSpecified(t *testing.T) { Steps: []resource.TestStep{ { Config: testAccComputeInstance_bootDisk_storagePoolSpecified(instanceName, storagePoolNameLong, envvar.GetTestZoneFromEnv()), + Check: resource.ComposeTestCheckFunc( + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_bootDisk_storagePoolSpecified(instanceName, storagePoolNameLong, envvar.GetTestZoneFromEnv())), + ), }, { ResourceName: "google_compute_instance.foobar", @@ -12076,6 +12161,10 @@ func TestAccComputeInstance_bootDisk_storagePoolSpecified_nameOnly(t *testing.T) Steps: []resource.TestStep{ { Config: testAccComputeInstance_bootDisk_storagePoolSpecified(instanceName, "tf-bootstrap-storage-pool-hyperdisk-balanced-basic-2", envvar.GetTestZoneFromEnv()), + Check: resource.ComposeTestCheckFunc( + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_bootDisk_storagePoolSpecified(instanceName, "tf-bootstrap-storage-pool-hyperdisk-balanced-basic-2", envvar.GetTestZoneFromEnv())), + ), }, { ResourceName: "google_compute_instance.foobar", @@ -12140,6 +12229,8 @@ func TestAccComputeInstance_bootAndAttachedDisk_interface(t *testing.T) { Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("google_compute_instance.foobar", "boot_disk.0.interface", "SCSI"), resource.TestCheckResourceAttr("google_compute_instance.foobar", "machine_type", "n2-standard-8"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_bootAndAttachedDisk_interface(instanceName2, diskName2, envvar.GetTestZoneFromEnv(), "n2-standard-8", "SCSI", true)), ), }, //computeInstanceImportStep("us-central1-a", instanceName2, []string{"desired_status","allow_stopping_for_update"}), @@ -12268,7 +12359,8 @@ resource "google_compute_instance" "foobar" { network_interface { network = "default" } -}`, context) +} +`, context) } func testAccComputeInstance_nicStackTypeUpdate_ipv6(context map[string]interface{}) string { diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_test.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_test.go index 4ce40fbc7dc0..8de7667f3ba6 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_test.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_test.go @@ -63,6 +63,8 @@ func TestAccProject_create(t *testing.T) { Config: testAccProject(pid, org), Check: resource.ComposeTestCheckFunc( testAccCheckGoogleProjectExists("google_project.acceptance", pid), + acctest.GetTestMetadataForTgc("resourcemanager", "google_project.acceptance", + testAccProject(pid, org)), ), }, }, @@ -110,6 +112,8 @@ func TestAccProject_billing(t *testing.T) { Config: testAccProject(pid, org), Check: resource.ComposeTestCheckFunc( testAccCheckGoogleProjectHasBillingAccount(t, "google_project.acceptance", pid, ""), + acctest.GetTestMetadataForTgc("resourcemanager", "google_project.acceptance", + testAccProject(pid, org)), ), }, }, @@ -179,6 +183,10 @@ func TestAccProject_deleteDefaultNetwork(t *testing.T) { Steps: []resource.TestStep{ { Config: testAccProject_deleteDefaultNetwork(pid, org, billingId), + Check: resource.ComposeTestCheckFunc( + acctest.GetTestMetadataForTgc("resourcemanager", "google_project.acceptance", + testAccProject_deleteDefaultNetwork(pid, org, billingId)), + ), }, }, }) @@ -235,6 +243,10 @@ func TestAccProject_migrateParent(t *testing.T) { }, { Config: testAccProject_migrateParentFolder(pid, folderDisplayName, org), + Check: resource.ComposeTestCheckFunc( + acctest.GetTestMetadataForTgc("resourcemanager", "google_project.acceptance", + testAccProject_migrateParentFolder(pid, folderDisplayName, org)), + ), }, { ResourceName: "google_project.acceptance", @@ -286,6 +298,10 @@ func TestAccProject_tags(t *testing.T) { }, { Config: testAccProject_tagsAllowDestroy(context), + Check: resource.ComposeTestCheckFunc( + acctest.GetTestMetadataForTgc("resourcemanager", "google_project.acceptance", + testAccProject_tagsAllowDestroy(context)), + ), }, }, }) @@ -433,6 +449,10 @@ func TestAccProject_noAllowDestroy(t *testing.T) { }, { Config: testAccProject(pid, org), + Check: resource.ComposeTestCheckFunc( + acctest.GetTestMetadataForTgc("resourcemanager", "google_project.acceptance", + testAccProject(pid, org)), + ), }, }, }) @@ -461,6 +481,8 @@ func TestAccProject_abandon(t *testing.T) { Destroy: true, Check: resource.ComposeTestCheckFunc( testAccCheckGoogleProjectExists("google_project.acceptance", pid), + acctest.GetTestMetadataForTgc("resourcemanager", "google_project.acceptance", + testAccProject_abandon(pid, org)), ), }, }, From d47c11cbda030b04db0e4601c54fec103af66022 Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Fri, 2 May 2025 19:57:22 +0200 Subject: [PATCH 048/884] feat: support for the `enforce` attribute in the `s_sl_info` block for the `google_apigee_target_server` (#13841) --- mmv1/products/apigee/TargetServer.yaml | 4 ++++ .../services/apigee/resource_apigee_target_server_test.go | 2 ++ 2 files changed, 6 insertions(+) diff --git a/mmv1/products/apigee/TargetServer.yaml b/mmv1/products/apigee/TargetServer.yaml index 3e16dc8eeead..7b5f0d779136 100644 --- a/mmv1/products/apigee/TargetServer.yaml +++ b/mmv1/products/apigee/TargetServer.yaml @@ -142,6 +142,10 @@ properties: type: Boolean description: | Indicates whether the cert should be matched against as a wildcard cert. + - name: 'enforce' + type: Boolean + description: | + If true, TLS is strictly enforced. - name: 'protocol' type: Enum diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_target_server_test.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_target_server_test.go index 6740c72c3e5b..87552eb2d2c0 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_target_server_test.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_target_server_test.go @@ -353,6 +353,7 @@ resource "google_apigee_target_server" "apigee_target_server"{ key_store = google_apigee_env_keystore.apigee_environment_keystore.name protocols = ["TLSv1.1"] trust_store = google_apigee_env_keystore.apigee_environment_keystore.name + enforce = false common_name{ value = "testCn" wildcard_match = true @@ -468,6 +469,7 @@ resource "google_apigee_target_server" "apigee_target_server"{ key_store = google_apigee_env_keystore.apigee_environment_keystore2.name protocols = ["TLSv1.2", "TLSv1.1"] trust_store = google_apigee_env_keystore.apigee_environment_keystore2.name + enforce = true } depends_on = [ google_apigee_env_keystore.apigee_environment_keystore2, From 92695fcc92a3a1a6dad34ec7928d621ea899ca12 Mon Sep 17 00:00:00 2001 From: Scott Suarez Date: Fri, 2 May 2025 12:27:52 -0700 Subject: [PATCH 049/884] Promote team membership check to approve PR builds (#13825) Co-authored-by: Stephen Lewis (Burrows) --- .ci/magician/github/membership.go | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.ci/magician/github/membership.go b/.ci/magician/github/membership.go index 0347745022bf..84d67b0e5d17 100644 --- a/.ci/magician/github/membership.go +++ b/.ci/magician/github/membership.go @@ -51,9 +51,10 @@ func (gh *Client) GetUserType(user string) UserType { } if gh.IsTeamMember("GoogleCloudPlatform", "terraform", user) { - fmt.Printf("Debug test --- User '%s' is an active member of the 'terraform' team in 'GoogleCloudPlatform' organization\n", user) + fmt.Println("User is an active member of the 'terraform' team in 'GoogleCloudPlatform' organization") + return GooglerUserType } else { - fmt.Printf("Debug test --- User '%s' is not an active member of the 'terraform' team in 'GoogleCloudPlatform' organization\n", user) + fmt.Printf("User '%s' is not an active member of the 'terraform' team in 'GoogleCloudPlatform' organization\n", user) } if gh.IsOrgMember(user, "GoogleCloudPlatform") { From 192445d2bbb65339479fdc47672ab3b4cb55cb40 Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Fri, 2 May 2025 13:04:56 -0700 Subject: [PATCH 050/884] Update make-a-breaking-change.md for missing major version reference (#13835) --- docs/content/breaking-changes/make-a-breaking-change.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/content/breaking-changes/make-a-breaking-change.md b/docs/content/breaking-changes/make-a-breaking-change.md index 6a623305ec07..e717df96f9eb 100644 --- a/docs/content/breaking-changes/make-a-breaking-change.md +++ b/docs/content/breaking-changes/make-a-breaking-change.md @@ -198,7 +198,7 @@ with the following changes: merged into the major release branch every Monday. 1. Make the breaking change. 1. Add the upgrade guide entries to -[{{< param upgradeGuide >}}](https://github.com/GoogleCloudPlatform/magic-modules/blob/FEATURE-BRANCH-major-release-6.0.0/mmv1/third_party/terraform/website/docs/guides/{{< param upgradeGuide >}}). Entries should focus on the changes that users need to make when upgrading +[{{< param upgradeGuide >}}](https://github.com/GoogleCloudPlatform/magic-modules/blob/FEATURE-BRANCH-major-release-{{% param "majorVersion" %}}/mmv1/third_party/terraform/website/docs/guides/{{< param upgradeGuide >}}). Entries should focus on the changes that users need to make when upgrading to `{{% param "majorVersion" %}}`, rather than how to write configurations after upgrading. See [Terraform provider for Google Cloud 6.0.0 Upgrade Guide](https://registry.terraform.io/providers/hashicorp/google/latest/docs/guides/version_6_upgrade) and other upgrade guides for examples. From a9782a6754a00885bbd6043b0031feb3906f7173 Mon Sep 17 00:00:00 2001 From: Scott Suarez Date: Fri, 2 May 2025 13:23:37 -0700 Subject: [PATCH 051/884] add cleanup before build go/mm-sync-fix (#13722) Co-authored-by: Stephen Lewis (Burrows) --- .ci/magician/cmd/generate_downstream.go | 2 + .github/actions/build-downstream/action.yml | 1 - .github/workflows/build-downstream.yml | 1 - GNUmakefile | 80 ++++++++++++++++----- docs/content/develop/add-fields.md | 16 ++--- docs/content/develop/add-iam-support.md | 17 +++-- docs/content/develop/add-resource.md | 16 ++--- docs/content/develop/generate-providers.md | 36 +++------- docs/content/develop/promote-to-ga.md | 18 ++--- docs/content/document/add-documentation.md | 16 ++--- docs/content/reference/make-commands.md | 6 +- 11 files changed, 118 insertions(+), 91 deletions(-) diff --git a/.ci/magician/cmd/generate_downstream.go b/.ci/magician/cmd/generate_downstream.go index 402bf704e2dc..62dc90913570 100644 --- a/.ci/magician/cmd/generate_downstream.go +++ b/.ci/magician/cmd/generate_downstream.go @@ -300,9 +300,11 @@ func runMake(downstreamRepo *source.Repo, command string, rnr ExecRunner) error return err } case "terraform": + // --- legacy -- can be cleaned up after go/mm-pull/13722 is submitted if _, err := rnr.Run("make", []string{"clean-provider", "OUTPUT_PATH=" + downstreamRepo.Path}, nil); err != nil { return err } + // ------------------------------------------------------------------- if _, err := rnr.Run("make", []string{"provider", "OUTPUT_PATH=" + downstreamRepo.Path, fmt.Sprintf("VERSION=%s", downstreamRepo.Version)}, nil); err != nil { return err } diff --git a/.github/actions/build-downstream/action.yml b/.github/actions/build-downstream/action.yml index 731810d372c6..4c3a35d57019 100644 --- a/.github/actions/build-downstream/action.yml +++ b/.github/actions/build-downstream/action.yml @@ -66,7 +66,6 @@ runs: else export VERSION=beta fi - make clean-provider make provider elif [ "$GH_REPO" == "terraform-google-conversion" ]; then UPSTREAM_OWNER=GoogleCloudPlatform diff --git a/.github/workflows/build-downstream.yml b/.github/workflows/build-downstream.yml index af1d30a3d8e6..3490e1ad9bb9 100644 --- a/.github/workflows/build-downstream.yml +++ b/.github/workflows/build-downstream.yml @@ -71,7 +71,6 @@ jobs: else export VERSION=beta fi - make clean-provider make provider elif [ "$GH_REPO" == "terraform-google-conversion" ]; then UPSTREAM_OWNER=GoogleCloudPlatform diff --git a/GNUmakefile b/GNUmakefile index 5670b031bea0..c29074d563bf 100644 --- a/GNUmakefile +++ b/GNUmakefile @@ -53,15 +53,23 @@ endif ifeq ($(FORCE_DCL),) FORCE_DCL=latest endif -terraform build provider: - @make validate_environment; - make mmv1 - make tpgtools + +SHOULD_SKIP_CLEAN := false # Default: do not skip +ifneq ($(SKIP_CLEAN),) + ifneq ($(SKIP_CLEAN),false) + SHOULD_SKIP_CLEAN := true + endif +endif + +terraform build provider: validate_environment clean-provider mmv1 tpgtools + @echo "Provider generation process finished for $(VERSION) in $(OUTPUT_PATH)" + mmv1: - # Chaining these with "&&" is critical so this will exit non-0 if the first - # command fails, since we're not forcing bash and errexit / pipefail here. - cd mmv1;\ + @echo "Executing mmv1 build for $(OUTPUT_PATH)"; + # Chaining these with "&&" is critical so this will exit non-0 if the first + # command fails, since we're not forcing bash and errexit / pipefail here. + @cd mmv1;\ if [ "$(VERSION)" = "ga" ]; then \ go run . --output $(OUTPUT_PATH) --version ga --no-docs $(mmv1_compile) \ && go run . --output $(OUTPUT_PATH) --version beta --no-code $(mmv1_compile); \ @@ -69,15 +77,39 @@ mmv1: go run . --output $(OUTPUT_PATH) --version $(VERSION) $(mmv1_compile); \ fi -tpgtools: - make serialize - cd tpgtools;\ +tpgtools: serialize + @echo "Executing tpgtools build for $(OUTPUT_PATH)"; + @cd tpgtools;\ go run . --output $(OUTPUT_PATH) --version $(VERSION) $(tpgtools_compile) -clean-provider: - cd $(OUTPUT_PATH);\ - go mod download;\ - find . -type f -not -wholename "./.git*" -not -wholename "./.changelog*" -not -name ".travis.yml" -not -name ".golangci.yml" -not -name "CHANGELOG.md" -not -name "CHANGELOG_v*.md" -not -name "GNUmakefile" -not -name "docscheck.sh" -not -name "LICENSE" -not -name "CODEOWNERS" -not -name "README.md" -not -wholename "./examples*" -not -name ".go-version" -not -name ".hashibot.hcl" -print0 | xargs -0 git rm > /dev/null +clean-provider: check_safe_build + @if [ -n "$(PRODUCT)" ]; then \ + printf "\n\e[1;33mWARNING:\e[0m Skipping clean-provider step because PRODUCT ('$(PRODUCT)') is set.\n"; \ + printf " Ensure your downstream repository is synchronized with the Magic Modules branch\n"; \ + printf " to avoid potential build inconsistencies.\n"; \ + printf " Downstream repository (OUTPUT_PATH): %s\n\n" "$(OUTPUT_PATH)"; \ + elif [ "$(SHOULD_SKIP_CLEAN)" = "true" ]; then \ + printf "\e[1;33mINFO:\e[0m Skipping clean-provider step because SKIP_CLEAN is set to a non-false value ('$(SKIP_CLEAN)').\n"; \ + else \ + echo "Executing clean-provider in $(OUTPUT_PATH)..."; \ + ( \ + cd $(OUTPUT_PATH) && \ + echo "---> Changing directory to $(OUTPUT_PATH)" && \ + if ! command -v git > /dev/null 2>&1; then \ + printf "\e[1;33mINFO:\e[0m Skipping git-based cleaning because git is not installed.\n"; \ + elif ! git rev-parse --is-inside-work-tree > /dev/null 2>&1; then \ + printf "\e[1;33mINFO:\e[0m Skipping git-based cleaning because $(OUTPUT_PATH) is not a git repository.\n"; \ + else \ + echo "---> Downloading Go module dependencies... (Ensures tools like gofmt can find relevant code)" && \ + go mod download && \ + echo "---> Finding tracked files to remove..." && \ + git ls-files | grep -v -E '(^\.git|^\.changelog|^\.travis\.yml$$|^\.golangci\.yml$$|^CHANGELOG\.md$$|^CHANGELOG_v.*\.md$$|^GNUmakefile$$|docscheck\.sh$$|^LICENSE$$|^CODEOWNERS$$|^README\.md$$|^\.go-version$$|^\.hashibot\.hcl$$|^go\.mod$$|^go\.sum$$|^examples)' | xargs -r git rm -f -q && \ + echo "---> Unstaging changes with git reset..." && \ + git reset -q && \ + echo "---> clean-provider actions finished. Changes have been unstaged."; \ + fi \ + ) && echo "clean-provider target finished successfully."; \ + fi clean-tgc: cd $(OUTPUT_PATH);\ @@ -122,13 +154,25 @@ upgrade-dcl: sed ${SED_I} "s!.*declarative-resource-client-library.*!$$MOD_LINE!" go.mod; echo "$$SUM_LINE" >> go.sum -validate_environment: +validate_environment: check_parameters check_safe_build + +check_parameters: # only print doctor script to console if there was a dependency failure detected. @./scripts/doctor 2>&1 > /dev/null || ./scripts/doctor - @[ -d "$(OUTPUT_PATH)" ] || (printf " \e[1;31mdirectory '$(OUTPUT_PATH)' does not exist - ENV variable \033[0mOUTPUT_PATH\e[1;31m should be set to a provider directory. \033[0m \n" && exit 1); - @[ -n "$(VERSION)" ] || (printf " \e[1;31mversion '$(VERSION)' does not exist - ENV variable \033[0mVERSION\e[1;31m should be set to ga or beta \033[0m \n" && exit 1); + @[ -d "$(OUTPUT_PATH)" ] || (printf "\n\e[1;31mERROR: directory '$(OUTPUT_PATH)' does not exist - ENV variable \033[0mOUTPUT_PATH\e[1;31m should be set to a provider directory. \033[0m \n\n" && exit 1); + @[ -n "$(VERSION)" ] || (printf "\n\e[1;31mERROR: version '$(VERSION)' does not exist - ENV variable \033[0mVERSION\e[1;31m should be set to ga or beta \033[0m \n\n" && exit 1); + + +check_safe_build: + @([ -f "$(OUTPUT_PATH)/go.mod" ] && head -n 1 "$(OUTPUT_PATH)/go.mod" | grep -q 'terraform') || \ + ( \ + printf "\n\e[1;31mERROR: Validation failed for OUTPUT_PATH '$(OUTPUT_PATH)'.\n" && \ + printf " Either go.mod is missing or the module name within it does not contain 'terraform'.\n" && \ + printf " This is a safety check before cleaning/building. Halting.\033[0m\n\n" && \ + exit 1 \ + ); \ doctor: ./scripts/doctor -.PHONY: mmv1 tpgtools test +.PHONY: mmv1 tpgtools test clean-provider validate_environment serialize doctor diff --git a/docs/content/develop/add-fields.md b/docs/content/develop/add-fields.md index 0bf5985f2855..f1e6bb8c571a 100644 --- a/docs/content/develop/add-fields.md +++ b/docs/content/develop/add-fields.md @@ -19,14 +19,14 @@ For more information about types of resources and the generation process overall 1. Complete the steps in [Set up your development environment]({{< ref "/develop/set-up-dev-environment" >}}) to set up your environment and your Google Cloud project. 1. [Ensure the resource to which you want to add the fields exists in the provider]({{< ref "/develop/add-resource" >}}). 1. Ensure that your `magic-modules`, `terraform-provider-google`, and `terraform-provider-google-beta` repositories are up to date. - ``` - cd ~/magic-modules - git checkout main && git clean -f . && git checkout -- . && git pull - cd $GOPATH/src/github.com/hashicorp/terraform-provider-google - git checkout main && git clean -f . && git checkout -- . && git pull - cd $GOPATH/src/github.com/hashicorp/terraform-provider-google-beta - git checkout main && git clean -f . && git checkout -- . && git pull - ``` + ```bash + cd ~/magic-modules + git checkout main && git clean -f . && git checkout -- . && git pull + cd $GOPATH/src/github.com/hashicorp/terraform-provider-google + git checkout main && git clean -f . && git checkout -- . && git pull + cd $GOPATH/src/github.com/hashicorp/terraform-provider-google-beta + git checkout main && git clean -f . && git checkout -- . && git pull + ``` ## Add fields diff --git a/docs/content/develop/add-iam-support.md b/docs/content/develop/add-iam-support.md index 575e8950e6f0..9cad46ef01cb 100644 --- a/docs/content/develop/add-iam-support.md +++ b/docs/content/develop/add-iam-support.md @@ -14,15 +14,14 @@ For more information about types of resources and the generation process overall 1. Complete the steps in [Set up your development environment]({{< ref "/develop/set-up-dev-environment" >}}) to set up your environment and your Google Cloud project. 1. Ensure that your `magic-modules`, `terraform-provider-google`, and `terraform-provider-google-beta` repositories are up to date. - ``` - cd ~/magic-modules - git checkout main && git clean -f . && git checkout -- . && git pull - cd $GOPATH/src/github.com/hashicorp/terraform-provider-google - git checkout main && git clean -f . && git checkout -- . && git pull - cd $GOPATH/src/github.com/hashicorp/terraform-provider-google-beta - git checkout main && git clean -f . && git checkout -- . && git pull - ``` - + ```bash + cd ~/magic-modules + git checkout main && git clean -f . && git checkout -- . && git pull + cd $GOPATH/src/github.com/hashicorp/terraform-provider-google + git checkout main && git clean -f . && git checkout -- . && git pull + cd $GOPATH/src/github.com/hashicorp/terraform-provider-google-beta + git checkout main && git clean -f . && git checkout -- . && git pull + ``` ## Add IAM support {{< tabs "IAM" >}} diff --git a/docs/content/develop/add-resource.md b/docs/content/develop/add-resource.md index 52c2d1c3d24d..0b32d7ac9d5d 100644 --- a/docs/content/develop/add-resource.md +++ b/docs/content/develop/add-resource.md @@ -34,14 +34,14 @@ For more information about types of resources and the generation process overall 1. Complete the steps in [Set up your development environment]({{< ref "/develop/set-up-dev-environment" >}}) to set up your environment and your Google Cloud project. 1. Ensure that your `magic-modules`, `terraform-provider-google`, and `terraform-provider-google-beta` repositories are up to date. - ``` - cd ~/magic-modules - git checkout main && git clean -f . && git checkout -- . && git pull - cd $GOPATH/src/github.com/hashicorp/terraform-provider-google - git checkout main && git clean -f . && git checkout -- . && git pull - cd $GOPATH/src/github.com/hashicorp/terraform-provider-google-beta - git checkout main && git clean -f . && git checkout -- . && git pull - ``` + ```bash + cd ~/magic-modules + git checkout main && git clean -f . && git checkout -- . && git pull + cd $GOPATH/src/github.com/hashicorp/terraform-provider-google + git checkout main && git clean -f . && git checkout -- . && git pull + cd $GOPATH/src/github.com/hashicorp/terraform-provider-google-beta + git checkout main && git clean -f . && git checkout -- . && git pull + ``` ## Add a resource diff --git a/docs/content/develop/generate-providers.md b/docs/content/develop/generate-providers.md index 129ce81d1361..75d06dbb46bd 100644 --- a/docs/content/develop/generate-providers.md +++ b/docs/content/develop/generate-providers.md @@ -24,6 +24,8 @@ provider changes to the `google` and `google-beta` Terraform providers. + [Adding custom resource code]({{< ref "/develop/custom-code" >}}). + [Promoting a resource to GA]({{< ref "/develop/promote-to-ga" >}}). +By default, running a full `make provider` command cleans the output directory (`OUTPUT_PATH`) before generating code to prevent sync issues. This will override and delete any changes to that directory. See the [`make` commands reference]({{< ref "/reference/make-commands" >}}) for details on advanced usage. + ## Generate a provider change 1. Clone the `google` and `google-beta` provider repositories with the following commands: @@ -33,32 +35,15 @@ provider changes to the `google` and `google-beta` Terraform providers. git clone https://github.com/hashicorp/terraform-provider-google-beta.git $GOPATH/src/github.com/hashicorp/terraform-provider-google-beta ``` 1. Generate changes for the `google` provider: - ```bash - make provider VERSION=ga OUTPUT_PATH="$GOPATH/src/github.com/hashicorp/terraform-provider-google" PRODUCT=[PRODUCT_NAME] - ``` - Where `[PRODUCT_NAME]` is one of the folder names in - https://github.com/GoogleCloudPlatform/magic-modules/tree/main/mmv1/products. - - For example, if your product is `bigqueryanalyticshub`, the command would be - the following: - - ```bash - make provider VERSION=ga OUTPUT_PATH="$GOPATH/src/github.com/hashicorp/terraform-provider-google" PRODUCT=bigqueryanalyticshub - ``` + ```bash + make provider VERSION=ga OUTPUT_PATH="$GOPATH/src/github.com/hashicorp/terraform-provider-google" + ``` 1. Generate changes for the `google-beta` provider: - ```bash - make provider VERSION=beta OUTPUT_PATH="$GOPATH/src/github.com/hashicorp/terraform-provider-google-beta" PRODUCT=[PRODUCT_NAME] - ``` + ```bash + make provider VERSION=beta OUTPUT_PATH="$GOPATH/src/github.com/hashicorp/terraform-provider-google-beta" + ``` - Where `[PRODUCT_NAME]` is one of the folder names in https://github.com/GoogleCloudPlatform/magic-modules/tree/main/mmv1/products. - - For example, if your product name is `bigqueryanalyticshub`, the command would be the following: - - ```bash - make provider VERSION=beta OUTPUT_PATH="$GOPATH/src/github.com/hashicorp/terraform-provider-google-beta" PRODUCT=bigqueryanalyticshub - ``` - 1. Confirm that the expected changes were generated: ```bash cd $GOPATH/src/github.com/hashicorp/terraform-provider-google @@ -69,12 +54,9 @@ provider changes to the `google` and `google-beta` Terraform providers. {{< hint info >}} - **Note**: There may be additional changes present due to specifying a - `PRODUCT=` value or due to the `magic-modules` repository being out of sync - with the provider repositories. + **Note**: You might see additional changes in your `git diff` output beyond your own. This can happen if your `magic-modules` repository is out of sync with the provider repositories, causing the generator to also apply any pending updates from `magic-modules`. {{< /hint >}} - ## Troubleshoot ### Too many open files {#too-many-open-files} diff --git a/docs/content/develop/promote-to-ga.md b/docs/content/develop/promote-to-ga.md index bd5f3fd1b2e0..79b1f41d2858 100644 --- a/docs/content/develop/promote-to-ga.md +++ b/docs/content/develop/promote-to-ga.md @@ -14,15 +14,15 @@ For more information about types of resources and the generation process overall ## Before you begin 1. Complete the steps in [Set up your development environment]({{< ref "/develop/set-up-dev-environment" >}}) to set up your environment and your Google Cloud project. -2. Ensure that your `magic-modules`, `terraform-provider-google`, and `terraform-provider-google-beta` repositories are up to date. - ``` - cd ~/magic-modules - git checkout main && git clean -f . && git checkout -- . && git pull - cd $GOPATH/src/github.com/hashicorp/terraform-provider-google - git checkout main && git clean -f . && git checkout -- . && git pull - cd $GOPATH/src/github.com/hashicorp/terraform-provider-google-beta - git checkout main && git clean -f . && git checkout -- . && git pull - ``` +1. Ensure that your `magic-modules`, `terraform-provider-google`, and `terraform-provider-google-beta` repositories are up to date. + ```bash + cd ~/magic-modules + git checkout main && git clean -f . && git checkout -- . && git pull + cd $GOPATH/src/github.com/hashicorp/terraform-provider-google + git checkout main && git clean -f . && git checkout -- . && git pull + cd $GOPATH/src/github.com/hashicorp/terraform-provider-google-beta + git checkout main && git clean -f . && git checkout -- . && git pull + ``` ## Promote fields and resources diff --git a/docs/content/document/add-documentation.md b/docs/content/document/add-documentation.md index 3bc4c529b536..f959ad587512 100644 --- a/docs/content/document/add-documentation.md +++ b/docs/content/document/add-documentation.md @@ -15,14 +15,14 @@ For more information about types of resources and the generation process overall 1. Complete the steps in [Set up your development environment]({{< ref "/develop/set-up-dev-environment" >}}) to set up your environment and your Google Cloud project. 1. Ensure that your `magic-modules`, `terraform-provider-google`, and `terraform-provider-google-beta` repositories are up to date. - ``` - cd ~/magic-modules - git checkout main && git clean -f . && git checkout -- . && git pull - cd $GOPATH/src/github.com/hashicorp/terraform-provider-google - git checkout main && git clean -f . && git checkout -- . && git pull - cd $GOPATH/src/github.com/hashicorp/terraform-provider-google-beta - git checkout main && git clean -f . && git checkout -- . && git pull - ``` + ```bash + cd ~/magic-modules + git checkout main && git clean -f . && git checkout -- . && git pull + cd $GOPATH/src/github.com/hashicorp/terraform-provider-google + git checkout main && git clean -f . && git checkout -- . && git pull + cd $GOPATH/src/github.com/hashicorp/terraform-provider-google-beta + git checkout main && git clean -f . && git checkout -- . && git pull + ``` ## Add documentation diff --git a/docs/content/reference/make-commands.md b/docs/content/reference/make-commands.md index 6742b44173a8..6acdfc18b1a8 100644 --- a/docs/content/reference/make-commands.md +++ b/docs/content/reference/make-commands.md @@ -8,7 +8,8 @@ weight: 30 ### `make` / `make provider` -Generates the code for the downstream `google` and `google-beta` providers. +Generates the code for the downstream `google` and `google-beta` providers +into the `OUTPUT_PATH`, overriding and deleting any local changes. {{< hint info >}} **Note:** Generation works best if the downstream provider has a commit checked out corresponding to the latest `main` branch commit that is present in your `magic-modules` working branch. This can generally be identified based on matching commit messages. @@ -34,7 +35,8 @@ make provider VERSION=ga OUTPUT_PATH="$GOPATH/src/github.com/hashicorp/terraform - `OUTPUT_PATH`: Required. The location you are generating provider code into. - `VERSION`: Required. The version of the provider you are building into. Valid values are `ga` and `beta`. -- `PRODUCT`: Limits generations to the specified folder within `mmv1/products` or `tpgtools/api`. Handwritten files from `mmv1/third_party/terraform` are always generated into the downstream regardless of this setting, so you can provide a non-existent product name to generate only handwritten code. Required if `RESOURCE` is specified. +- `PRODUCT`: Limits generations to the specified folder within `mmv1/products` or `tpgtools/api`. Handwritten files from `mmv1/third_party/terraform` are always generated into the downstream regardless of this setting, so you can provide a non-existent product name to generate only handwritten code. Required if `RESOURCE` is specified. **Using `PRODUCT` skips the pre-generation cleanup step. This is considered advanced usage; recommend running a full, clean build (`make provider` without `PRODUCT`) beforehand if repositories may be out of sync.** +- `SKIP_CLEAN`: If set to `true`, skips the default pre-generation cleanup of `OUTPUT_PATH` during a full provider build. Has no effect if `PRODUCT` is specified (as cleanup is already skipped). Example: `make provider VERSION=ga OUTPUT_PATH=... SKIP_CLEAN=true`. - `RESOURCE`: Limits generation to the specified resource within a particular product. For `mmv1` resources, matches the resource's `name` field (set in its configuration file).For `tpgtools` resources, matches the terraform resource name. - `ENGINE`: Modifies `make provider` to only generate code using the specified engine. Valid values are `mmv1` or `tpgtools`. (Providing `tpgtools` will still generate any prerequisite mmv1 files required for tpgtools.) From 5a76bca6e6f5d9bf9fc3daeaebf54a6511254d17 Mon Sep 17 00:00:00 2001 From: jkrish-c <31221535+jkrish-c@users.noreply.github.com> Date: Mon, 5 May 2025 10:00:02 -0700 Subject: [PATCH 052/884] Feature/add grpc_with_tls field for healthcheck resource (#13608) --- mmv1/products/compute/HealthCheck.yaml | 74 +++++++++++++ mmv1/products/compute/RegionHealthCheck.yaml | 76 +++++++++++++ .../terraform/encoders/health_check_type.tmpl | 16 +++ .../health_check_grpc_with_tls.tf.tmpl | 12 +++ .../health_check_grpc_with_tls_full.tf.tmpl | 17 +++ .../region_health_check_grpc_with_tls.tf.tmpl | 12 +++ ...on_health_check_grpc_with_tls_full.tf.tmpl | 17 +++ ...resource_compute_health_check_test.go.tmpl | 93 ++++++++++++++++ ...e_compute_region_health_check_test.go.tmpl | 101 ++++++++++++++++++ 9 files changed, 418 insertions(+) create mode 100644 mmv1/templates/terraform/examples/health_check_grpc_with_tls.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/health_check_grpc_with_tls_full.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/region_health_check_grpc_with_tls.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/region_health_check_grpc_with_tls_full.tf.tmpl diff --git a/mmv1/products/compute/HealthCheck.yaml b/mmv1/products/compute/HealthCheck.yaml index f99e6cb340d6..5c51747e8aa0 100644 --- a/mmv1/products/compute/HealthCheck.yaml +++ b/mmv1/products/compute/HealthCheck.yaml @@ -108,6 +108,16 @@ examples: primary_resource_id: 'grpc-health-check' vars: health_check_name: 'grpc-health-check' + - name: 'health_check_grpc_with_tls' + primary_resource_id: 'grpc-with-tls-health-check' + min_version: 'beta' + vars: + health_check_name: 'grpc-with-tls-health-check' + - name: 'health_check_grpc_with_tls_full' + primary_resource_id: 'grpc-with-tls-health-check' + min_version: 'beta' + vars: + health_check_name: 'grpc-with-tls-health-check' - name: 'health_check_with_logging' primary_resource_id: 'health-check-with-logging' min_version: 'beta' @@ -218,6 +228,7 @@ properties: - 'tcp_health_check' - 'ssl_health_check' - 'grpc_health_check' + - 'grpc_tls_health_check' diff_suppress_func: 'portDiffSuppress' properties: - name: 'host' @@ -343,6 +354,7 @@ properties: - 'tcp_health_check' - 'ssl_health_check' - 'grpc_health_check' + - 'grpc_tls_health_check' diff_suppress_func: 'portDiffSuppress' properties: - name: 'host' @@ -468,6 +480,7 @@ properties: - 'tcp_health_check' - 'ssl_health_check' - 'grpc_health_check' + - 'grpc_tls_health_check' diff_suppress_func: 'portDiffSuppress' properties: - name: 'request' @@ -574,6 +587,7 @@ properties: - 'tcp_health_check' - 'ssl_health_check' - 'grpc_health_check' + - 'grpc_tls_health_check' diff_suppress_func: 'portDiffSuppress' properties: - name: 'request' @@ -680,6 +694,7 @@ properties: - 'tcp_health_check' - 'ssl_health_check' - 'grpc_health_check' + - 'grpc_tls_health_check' diff_suppress_func: 'portDiffSuppress' properties: - name: 'host' @@ -805,6 +820,7 @@ properties: - 'tcp_health_check' - 'ssl_health_check' - 'grpc_health_check' + - 'grpc_tls_health_check' diff_suppress_func: 'portDiffSuppress' properties: - name: 'port' @@ -867,6 +883,64 @@ properties: - 'grpc_health_check.0.port_name' - 'grpc_health_check.0.port_specification' - 'grpc_health_check.0.grpc_service_name' + - name: 'grpcTlsHealthCheck' + min_version: beta + type: NestedObject + exactly_one_of: + - 'http_health_check' + - 'https_health_check' + - 'http2_health_check' + - 'tcp_health_check' + - 'ssl_health_check' + - 'grpc_health_check' + - 'grpc_tls_health_check' + diff_suppress_func: 'portDiffSuppress' + properties: + - name: 'port' + type: Integer + description: | + The port number for the health check request. + Must be specified if port_specification is USE_FIXED_PORT. Valid values are 1 through 65535. + at_least_one_of: + - 'grpc_tls_health_check.0.port' + - 'grpc_tls_health_check.0.port_specification' + - 'grpc_tls_health_check.0.grpc_service_name' + - name: 'portSpecification' + type: Enum + description: | + Specifies how port is selected for health checking, can be one of the + following values: + + * `USE_FIXED_PORT`: The port number in `port` is used for health checking. + + * `USE_NAMED_PORT`: Not supported for GRPC with TLS health checking. + + * `USE_SERVING_PORT`: For NetworkEndpointGroup, the port specified for each + network endpoint is used for health checking. For other backends, the + port or named port specified in the Backend Service is used for health + checking. + + If not specified, gRPC with TLS health check follows behavior specified in the `port` field. + at_least_one_of: + - 'grpc_tls_health_check.0.port' + - 'grpc_tls_health_check.0.port_specification' + - 'grpc_tls_health_check.0.grpc_service_name' + enum_values: + - 'USE_FIXED_PORT' + - 'USE_NAMED_PORT' + - 'USE_SERVING_PORT' + - name: 'grpcServiceName' + type: String + description: | + The gRPC service name for the health check. + The value of grpcServiceName has the following meanings by convention: + - Empty serviceName means the overall status of all services at the backend. + - Non-empty serviceName means the health of that gRPC service, as defined by the owner of the service. + The grpcServiceName can only be ASCII. + at_least_one_of: + - 'grpc_tls_health_check.0.port' + - 'grpc_tls_health_check.0.port_specification' + - 'grpc_tls_health_check.0.grpc_service_name' - name: 'logConfig' type: NestedObject description: | diff --git a/mmv1/products/compute/RegionHealthCheck.yaml b/mmv1/products/compute/RegionHealthCheck.yaml index bb952940b13a..4c8ce671bd14 100644 --- a/mmv1/products/compute/RegionHealthCheck.yaml +++ b/mmv1/products/compute/RegionHealthCheck.yaml @@ -112,6 +112,16 @@ examples: primary_resource_id: 'grpc-region-health-check' vars: health_check_name: 'grpc-region-health-check' + - name: 'region_health_check_grpc_with_tls' + primary_resource_id: 'grpc-with-tls-region-health-check' + min_version: 'beta' + vars: + health_check_name: 'grpc-with-tls-region-health-check' + - name: 'region_health_check_grpc_with_tls_full' + primary_resource_id: 'grpc-with-tls-region-health-check' + min_version: 'beta' + vars: + health_check_name: 'grpc-with-tls-region-health-check' parameters: - name: 'region' type: ResourceRef @@ -199,6 +209,7 @@ properties: - 'tcp_health_check' - 'ssl_health_check' - 'grpc_health_check' + - 'grpc_tls_health_check' diff_suppress_func: 'portDiffSuppress' properties: - name: 'host' @@ -324,6 +335,7 @@ properties: - 'tcp_health_check' - 'ssl_health_check' - 'grpc_health_check' + - 'grpc_tls_health_check' diff_suppress_func: 'portDiffSuppress' properties: - name: 'host' @@ -449,6 +461,7 @@ properties: - 'tcp_health_check' - 'ssl_health_check' - 'grpc_health_check' + - 'grpc_tls_health_check' diff_suppress_func: 'portDiffSuppress' properties: - name: 'request' @@ -555,6 +568,7 @@ properties: - 'tcp_health_check' - 'ssl_health_check' - 'grpc_health_check' + - 'grpc_tls_health_check' diff_suppress_func: 'portDiffSuppress' properties: - name: 'request' @@ -661,6 +675,7 @@ properties: - 'tcp_health_check' - 'ssl_health_check' - 'grpc_health_check' + - 'grpc_tls_health_check' diff_suppress_func: 'portDiffSuppress' properties: - name: 'host' @@ -786,6 +801,7 @@ properties: - 'tcp_health_check' - 'ssl_health_check' - 'grpc_health_check' + - 'grpc_tls_health_check' diff_suppress_func: 'portDiffSuppress' properties: - name: 'port' @@ -850,6 +866,66 @@ properties: - 'grpc_health_check.0.port_name' - 'grpc_health_check.0.port_specification' - 'grpc_health_check.0.grpc_service_name' + - name: 'grpcTlsHealthCheck' + min_version: 'beta' + type: NestedObject + exactly_one_of: + - 'http_health_check' + - 'https_health_check' + - 'http2_health_check' + - 'tcp_health_check' + - 'ssl_health_check' + - 'grpc_health_check' + - 'grpc_tls_health_check' + diff_suppress_func: 'portDiffSuppress' + properties: + - name: 'port' + type: Integer + description: | + The port number for the health check request. + Must be specified if port_specification is USE_FIXED_PORT. Valid values are 1 through 65535. + at_least_one_of: + - 'grpc_tls_health_check.0.port' + - 'grpc_tls_health_check.0.port_specification' + - 'grpc_tls_health_check.0.grpc_service_name' + - name: 'portSpecification' + type: Enum + description: | + Specifies how port is selected for health checking, can be one of the + following values: + + * `USE_FIXED_PORT`: The port number in `port` is used for health checking. + + * `USE_NAMED_PORT`: Not supported for GRPC with TLS health checking. + + * `USE_SERVING_PORT`: For NetworkEndpointGroup, the port specified for each + network endpoint is used for health checking. For other backends, the + port or named port specified in the Backend Service is used for health + checking. + + If not specified, gRPC health check follows behavior specified in the `port` field. + at_least_one_of: + - 'grpc_tls_health_check.0.port' + - 'grpc_tls_health_check.0.port_specification' + - 'grpc_tls_health_check.0.grpc_service_name' + enum_values: + - 'USE_FIXED_PORT' + - 'USE_NAMED_PORT' + - 'USE_SERVING_PORT' + - name: 'grpcServiceName' + type: String + description: | + The gRPC service name for the health check. + The value of grpcServiceName has the following meanings by convention: + + * Empty serviceName means the overall status of all services at the backend. + * Non-empty serviceName means the health of that gRPC service, as defined by the owner of the service. + + The grpcServiceName can only be ASCII. + at_least_one_of: + - 'grpc_tls_health_check.0.port' + - 'grpc_tls_health_check.0.port_specification' + - 'grpc_tls_health_check.0.grpc_service_name' - name: 'logConfig' type: NestedObject description: | diff --git a/mmv1/templates/terraform/encoders/health_check_type.tmpl b/mmv1/templates/terraform/encoders/health_check_type.tmpl index e5d7cdae36b6..143364463b9f 100644 --- a/mmv1/templates/terraform/encoders/health_check_type.tmpl +++ b/mmv1/templates/terraform/encoders/health_check_type.tmpl @@ -96,4 +96,20 @@ if _, ok := d.GetOk("grpc_health_check"); ok { return obj, nil } +{{ if ne $.TargetVersionName `ga` -}} +if _, ok := d.GetOk("grpc_tls_health_check"); ok { + hc := d.Get("grpc_tls_health_check").([]interface{})[0] + ps := hc.(map[string]interface{})["port_specification"] + + if ps == "USE_FIXED_PORT" || ps == "" { + m := obj["grpcTlsHealthCheck"].(map[string]interface{}) + if m["port"] == nil { + return nil, fmt.Errorf("error in HealthCheck %s: `port` must be set for GRPC with TLS health checks`.", d.Get("name").(string)) + } + } + obj["type"] = "GRPC_WITH_TLS" + return obj, nil +} +{{- end }} + return nil, fmt.Errorf("error in HealthCheck %s: No health check block specified.", d.Get("name").(string)) diff --git a/mmv1/templates/terraform/examples/health_check_grpc_with_tls.tf.tmpl b/mmv1/templates/terraform/examples/health_check_grpc_with_tls.tf.tmpl new file mode 100644 index 000000000000..219846e6472b --- /dev/null +++ b/mmv1/templates/terraform/examples/health_check_grpc_with_tls.tf.tmpl @@ -0,0 +1,12 @@ +resource "google_compute_health_check" "grpc-with-tls-health-check" { + provider = google-beta + + name = "{{index $.Vars "health_check_name"}}" + + timeout_sec = 1 + check_interval_sec = 1 + + grpc_tls_health_check { + port = "443" + } +} diff --git a/mmv1/templates/terraform/examples/health_check_grpc_with_tls_full.tf.tmpl b/mmv1/templates/terraform/examples/health_check_grpc_with_tls_full.tf.tmpl new file mode 100644 index 000000000000..ba3d0b38dc46 --- /dev/null +++ b/mmv1/templates/terraform/examples/health_check_grpc_with_tls_full.tf.tmpl @@ -0,0 +1,17 @@ +resource "google_compute_health_check" "grpc-with-tls-health-check" { + provider = google-beta + + name = "{{index $.Vars "health_check_name"}}" + description = "Health check via grpc with TLS" + + timeout_sec = 1 + check_interval_sec = 1 + healthy_threshold = 4 + unhealthy_threshold = 5 + + grpc_tls_health_check { + port_specification = "USE_FIXED_PORT" + port = "443" + grpc_service_name = "testservice" + } +} diff --git a/mmv1/templates/terraform/examples/region_health_check_grpc_with_tls.tf.tmpl b/mmv1/templates/terraform/examples/region_health_check_grpc_with_tls.tf.tmpl new file mode 100644 index 000000000000..5b4a8bcc39f9 --- /dev/null +++ b/mmv1/templates/terraform/examples/region_health_check_grpc_with_tls.tf.tmpl @@ -0,0 +1,12 @@ +resource "google_compute_region_health_check" "grpc-with-tls-region-health-check" { + provider = google-beta + + name = "{{index $.Vars "health_check_name"}}" + + timeout_sec = 1 + check_interval_sec = 1 + + grpc_tls_health_check { + port = "443" + } +} diff --git a/mmv1/templates/terraform/examples/region_health_check_grpc_with_tls_full.tf.tmpl b/mmv1/templates/terraform/examples/region_health_check_grpc_with_tls_full.tf.tmpl new file mode 100644 index 000000000000..bd324a3dfdd0 --- /dev/null +++ b/mmv1/templates/terraform/examples/region_health_check_grpc_with_tls_full.tf.tmpl @@ -0,0 +1,17 @@ +resource "google_compute_region_health_check" "grpc-with-tls-region-health-check" { + provider = google-beta + + name = "{{index $.Vars "health_check_name"}}" + description = "regional health check via GRPC with TLS" + + timeout_sec = 1 + check_interval_sec = 1 + healthy_threshold = 4 + unhealthy_threshold = 5 + + grpc_tls_health_check { + port_specification = "USE_FIXED_PORT" + port = "443" + grpc_service_name = "testservice" + } +} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_health_check_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_health_check_test.go.tmpl index 42baf71b8e3c..0b2b8553e518 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_health_check_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_health_check_test.go.tmpl @@ -39,6 +39,62 @@ func TestAccComputeHealthCheck_tcp_update(t *testing.T) { }) } +{{ if ne $.TargetVersionName `ga` -}} +func TestAccComputeHealthCheck_grpcWithTls_create(t *testing.T) { + t.Parallel() + + hckName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckComputeHealthCheckDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeHealthCheck_grpcWithTls(hckName), + }, + { + ResourceName: "google_compute_health_check.foobar", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} +{{- end }} + +{{ if ne $.TargetVersionName `ga` -}} +func TestAccComputeHealthCheck_grpcWithTls_update(t *testing.T) { + t.Parallel() + + hckName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckComputeHealthCheckDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeHealthCheck_grpcWithTls(hckName), + }, + { + ResourceName: "google_compute_health_check.foobar", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccComputeHealthCheck_grpcWithTls_update(hckName), + }, + { + ResourceName: "google_compute_health_check.foobar", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} +{{- end }} + func TestAccComputeHealthCheck_ssl_port_spec(t *testing.T) { t.Parallel() @@ -212,6 +268,43 @@ resource "google_compute_health_check" "foobar" { `, hckName) } +{{ if ne $.TargetVersionName `ga` -}} +func testAccComputeHealthCheck_grpcWithTls(hckName string) string { + return fmt.Sprintf(` +resource "google_compute_health_check" "foobar" { + provider = "google-beta" + check_interval_sec = 3 + description = "Resource created for Terraform acceptance testing" + healthy_threshold = 3 + name = "tf-test-health-test-%s" + timeout_sec = 2 + unhealthy_threshold = 3 + grpc_tls_health_check { + port = "443" + } +} +`, hckName) +} +{{- end }} + +{{ if ne $.TargetVersionName `ga` -}} +func testAccComputeHealthCheck_grpcWithTls_update(hckName string) string { + return fmt.Sprintf(` +resource "google_compute_health_check" "foobar" { + provider = "google-beta" + check_interval_sec = 3 + healthy_threshold = 10 + name = "tf-test-health-test-%s" + timeout_sec = 2 + unhealthy_threshold = 10 + grpc_tls_health_check { + port = "8080" + } +} +`, hckName) +} +{{- end }} + func testAccComputeHealthCheck_ssl(hckName string) string { return fmt.Sprintf(` resource "google_compute_health_check" "foobar" { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_health_check_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_health_check_test.go.tmpl index a0d12639dd3c..72a72339e772 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_health_check_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_health_check_test.go.tmpl @@ -43,6 +43,70 @@ func TestAccComputeRegionHealthCheck_tcp_update(t *testing.T) { }) } +{{ if ne $.TargetVersionName `ga` -}} +func TestAccComputeRegionHealthCheck_grpcWithTls_create(t *testing.T) { + t.Parallel() + + hckName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckComputeRegionHealthCheckDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeRegionHealthCheck_grpcWithTls(hckName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet( + "google_compute_region_health_check.foobar", "health_check_id"), + ), + }, + { + ResourceName: "google_compute_region_health_check.foobar", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} +{{- end }} + +{{ if ne $.TargetVersionName `ga` -}} +func TestAccComputeRegionHealthCheck_grpcWithTls_update(t *testing.T) { + t.Parallel() + + hckName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckComputeRegionHealthCheckDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeRegionHealthCheck_grpcWithTls(hckName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet( + "google_compute_region_health_check.foobar", "health_check_id"), + ), + }, + { + ResourceName: "google_compute_region_health_check.foobar", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccComputeRegionHealthCheck_grpcWithTls_update(hckName), + }, + { + ResourceName: "google_compute_region_health_check.foobar", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} +{{- end }} + func TestAccComputeRegionHealthCheck_ssl_port_spec(t *testing.T) { t.Parallel() @@ -238,6 +302,43 @@ resource "google_compute_region_health_check" "foobar" { `, hckName) } +{{ if ne $.TargetVersionName `ga` -}} +func testAccComputeRegionHealthCheck_grpcWithTls(hckName string) string { + return fmt.Sprintf(` +resource "google_compute_region_health_check" "foobar" { + provider = "google-beta" + check_interval_sec = 3 + description = "Resource created for Terraform acceptance testing" + healthy_threshold = 3 + name = "tf-test-health-test-%s" + timeout_sec = 2 + unhealthy_threshold = 3 + grpc_tls_health_check { + port = "443" + } +} +`, hckName) +} +{{- end }} + +{{ if ne $.TargetVersionName `ga` -}} +func testAccComputeRegionHealthCheck_grpcWithTls_update(hckName string) string { + return fmt.Sprintf(` +resource "google_compute_region_health_check" "foobar" { + provider = "google-beta" + check_interval_sec = 3 + healthy_threshold = 10 + name = "tf-test-health-test-%s" + timeout_sec = 2 + unhealthy_threshold = 10 + grpc_tls_health_check { + port = "8080" + } +} +`, hckName) +} +{{- end }} + func testAccComputeRegionHealthCheck_ssl(hckName string) string { return fmt.Sprintf(` resource "google_compute_region_health_check" "foobar" { From d872bb01a7d77993fea9977486f093d32293b6c8 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Mon, 5 May 2025 12:45:32 -0700 Subject: [PATCH 053/884] Tweaked resource version guard docs to be clearer (#13852) --- docs/content/develop/add-resource.md | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/docs/content/develop/add-resource.md b/docs/content/develop/add-resource.md index 0b32d7ac9d5d..45d00d1ebb10 100644 --- a/docs/content/develop/add-resource.md +++ b/docs/content/develop/add-resource.md @@ -163,9 +163,8 @@ For more information about types of resources and the generation process overall - Replace all occurrences of `github.com/hashicorp/terraform-provider-google-beta/google-beta` with `github.com/hashicorp/terraform-provider-google/google` - Remove the `Example` suffix from all test function names. - Remove the comments at the top of the file. - - If beta-only fields are being tested, do the following: - - Change the file suffix to `.go.tmpl` - - Wrap each beta-only test in a separate version guard: `{{- if ne $.TargetVersionName "ga" -}}...{{- else }}...{{- end }}` + - If any of the added Go code (including any imports) is beta-only, change the file suffix to `.go.tmpl` and wrap the beta-only code in a version guard: `{{- if ne $.TargetVersionName "ga" -}}...{{- else }}...{{- end }}`. + - If the whole resource is beta-only, wrap everything except package declarations. Otherwise, individually wrap each logically-related block of code in a version guard (field, test, etc) rather than grouping adjacent version-guarded sections - it's easier to read and easier to modify as things move out of beta. 5. Register the resource `handwrittenResources` in [`magic-modules/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl`](https://github.com/GoogleCloudPlatform/magic-modules/blob/main/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl) - Add a version guard for any beta-only resources. 6. Optional: Complete other handwritten tasks that require the MMv1 configuration file. From 12fbae98ee653519254b61968a66682dc4c4fca2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wiktor=20Niesiob=C4=99dzki?= Date: Tue, 6 May 2025 18:08:47 +0200 Subject: [PATCH 054/884] Allow in-place update for producer accept/reject lists (#13849) --- mmv1/products/compute/NetworkAttachment.yaml | 4 +- ...ce_compute_network_attachment_test.go.tmpl | 204 ++++++++++++++++++ 2 files changed, 207 insertions(+), 1 deletion(-) create mode 100644 mmv1/third_party/terraform/services/compute/resource_compute_network_attachment_test.go.tmpl diff --git a/mmv1/products/compute/NetworkAttachment.yaml b/mmv1/products/compute/NetworkAttachment.yaml index a4d6f97ca575..fce1f0218cd1 100644 --- a/mmv1/products/compute/NetworkAttachment.yaml +++ b/mmv1/products/compute/NetworkAttachment.yaml @@ -22,7 +22,7 @@ references: api: 'https://cloud.google.com/compute/docs/reference/rest/v1/networkAttachments' docs: base_url: 'projects/{{project}}/regions/{{region}}/networkAttachments' -immutable: true +update_verb: 'PATCH' timeouts: insert_minutes: 20 update_minutes: 20 @@ -62,6 +62,7 @@ parameters: description: | Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. required: true + immutable: true - name: 'region' type: ResourceRef description: | @@ -102,6 +103,7 @@ properties: type: Enum description: | The connection preference of service attachment. The value can be set to ACCEPT_AUTOMATIC. An ACCEPT_AUTOMATIC service attachment is one that always accepts the connection from consumer forwarding rules. + immutable: true required: true enum_values: - 'ACCEPT_AUTOMATIC' diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_network_attachment_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_network_attachment_test.go.tmpl new file mode 100644 index 000000000000..3aa877e8b1c7 --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/resource_compute_network_attachment_test.go.tmpl @@ -0,0 +1,204 @@ +package compute_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccComputeNetworkAttachment_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + "org_id": envvar.GetTestOrgFromEnv(t), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeNetworkAttachmentDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeNetworkAttachment_full(context), + }, + { + ResourceName: "google_compute_network_attachment.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"region"}, + }, + { + Config: testAccComputeNetworkAttachment_update(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_compute_network_attachment.default", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_compute_network_attachment.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"region"}, + }, + }, + }) +} + +func testAccComputeNetworkAttachment_full(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_network_attachment" "default" { + name = "tf-test-basic-network-attachment%{random_suffix}" + region = "us-central1" + description = "basic network attachment description" + connection_preference = "ACCEPT_MANUAL" + + subnetworks = [ + google_compute_subnetwork.net1.self_link + ] + + producer_accept_lists = [ + google_project.accepted_producer_project1.project_id + ] + + producer_reject_lists = [ + google_project.rejected_producer_project1.project_id + ] +} + +resource "google_compute_network" "default" { + name = "tf-test-basic-network%{random_suffix}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "net1" { + name = "tf-test-basic-subnetwork1-%{random_suffix}" + region = "us-central1" + + network = google_compute_network.default.id + ip_cidr_range = "10.0.0.0/16" +} + +resource "google_compute_subnetwork" "net2" { + name = "tf-test-basic-subnetwork2-%{random_suffix}" + region = "us-central1" + + network = google_compute_network.default.id + ip_cidr_range = "10.1.0.0/16" +} + +resource "google_project" "rejected_producer_project1" { + project_id = "tf-test-prj-reject1-%{random_suffix}" + name = "tf-test-prj-reject1-%{random_suffix}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" + deletion_policy = "DELETE" +} + +resource "google_project" "rejected_producer_project2" { + project_id = "tf-test-prj-reject2-%{random_suffix}" + name = "tf-test-prj-reject2-%{random_suffix}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" + deletion_policy = "DELETE" +} + +resource "google_project" "accepted_producer_project1" { + project_id = "tf-test-prj-accept1-%{random_suffix}" + name = "tf-test-prj-accept1-%{random_suffix}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" + deletion_policy = "DELETE" +} + +resource "google_project" "accepted_producer_project2" { + project_id = "tf-test-prj-accept2-%{random_suffix}" + name = "tf-test-prj-accept2-%{random_suffix}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" + deletion_policy = "DELETE" +} +`, context) +} + +func testAccComputeNetworkAttachment_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_network_attachment" "default" { + name = "tf-test-basic-network-attachment%{random_suffix}" + region = "us-central1" + description = "basic network attachment description" + connection_preference = "ACCEPT_MANUAL" + + subnetworks = [ + google_compute_subnetwork.net2.self_link + ] + + producer_accept_lists = [ + google_project.accepted_producer_project2.project_id + ] + + producer_reject_lists = [ + google_project.rejected_producer_project2.project_id + ] +} + +resource "google_compute_network" "default" { + name = "tf-test-basic-network%{random_suffix}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "net1" { + name = "tf-test-basic-subnetwork1-%{random_suffix}" + region = "us-central1" + + network = google_compute_network.default.id + ip_cidr_range = "10.0.0.0/16" +} + +resource "google_compute_subnetwork" "net2" { + name = "tf-test-basic-subnetwork2-%{random_suffix}" + region = "us-central1" + + network = google_compute_network.default.id + ip_cidr_range = "10.1.0.0/16" +} + +resource "google_project" "rejected_producer_project1" { + project_id = "tf-test-prj-reject1-%{random_suffix}" + name = "tf-test-prj-reject1-%{random_suffix}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" + deletion_policy = "DELETE" +} + +resource "google_project" "rejected_producer_project2" { + project_id = "tf-test-prj-reject2-%{random_suffix}" + name = "tf-test-prj-reject2-%{random_suffix}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" + deletion_policy = "DELETE" +} + +resource "google_project" "accepted_producer_project1" { + project_id = "tf-test-prj-accept1-%{random_suffix}" + name = "tf-test-prj-accept1-%{random_suffix}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" + deletion_policy = "DELETE" +} + +resource "google_project" "accepted_producer_project2" { + project_id = "tf-test-prj-accept2-%{random_suffix}" + name = "tf-test-prj-accept2-%{random_suffix}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" + deletion_policy = "DELETE" +} +`, context) +} From 0b31077126fe8b51512b201ff4379b1735d82df6 Mon Sep 17 00:00:00 2001 From: bcreddy-gcp <123543489+bcreddy-gcp@users.noreply.github.com> Date: Tue, 6 May 2025 09:16:08 -0700 Subject: [PATCH 055/884] Workbench: Metadata diff suppress checks the exact metadata key (#13855) --- .../terraform/constants/workbench_instance.go.tmpl | 7 +++++-- .../workbench/resource_workbench_instance_test.go.tmpl | 1 + 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/mmv1/templates/terraform/constants/workbench_instance.go.tmpl b/mmv1/templates/terraform/constants/workbench_instance.go.tmpl index 51072aef19bd..41c2c3438a78 100644 --- a/mmv1/templates/terraform/constants/workbench_instance.go.tmpl +++ b/mmv1/templates/terraform/constants/workbench_instance.go.tmpl @@ -84,9 +84,12 @@ var WorkbenchInstanceProvidedMetadata = []string{ } func WorkbenchInstanceMetadataDiffSuppress(k, old, new string, d *schema.ResourceData) bool { - // Suppress diffs for the Metadata + // Extract the actual metadata key from the full key path + parts := strings.Split(k, ".") + key := parts[len(parts)-1] + for _, metadata := range WorkbenchInstanceProvidedMetadata { - if strings.Contains(k, metadata) { + if key == metadata { return true } } diff --git a/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_test.go.tmpl b/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_test.go.tmpl index 07f66b1a539a..04fa6a2f27fc 100644 --- a/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_test.go.tmpl @@ -401,6 +401,7 @@ resource "google_workbench_instance" "instance" { terraform = "true", "idle-timeout-seconds" = "10800", "image-url" = "fake-value", + "container-custom-params" = "test-params", } } From dfb1cc6b83195a9da29a2f03e9c31cd33a94f2f2 Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Tue, 6 May 2025 11:21:11 -0700 Subject: [PATCH 056/884] Update enrolled_teams.yml (#13858) --- tools/issue-labeler/labeler/enrolled_teams.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tools/issue-labeler/labeler/enrolled_teams.yml b/tools/issue-labeler/labeler/enrolled_teams.yml index 480d1e3eb292..aecdeda85fe0 100755 --- a/tools/issue-labeler/labeler/enrolled_teams.yml +++ b/tools/issue-labeler/labeler/enrolled_teams.yml @@ -440,8 +440,7 @@ service/iam-serviceaccount: service/iam-wlid: resources: - google_iam_access_boundary_policy - - google_iam_workload_identity_pool - - google_iam_workload_identity_pool_provider + - google_iam_workload_identity_pool.* service/iam-workforce: resources: - google_iam_workforce_pool.* From bf61e6228f0436ba88abb5930e7d4271f4f8fd77 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Tue, 6 May 2025 13:45:20 -0700 Subject: [PATCH 057/884] modify tests to get config for tgc testing (#13865) --- .../compute/resource_compute_instance_test.go.tmpl | 12 ++++++++++-- .../resourcemanager/resource_google_project_test.go | 4 ++-- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl index 611db8d72674..c9f02ac96dc6 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl @@ -2096,11 +2096,19 @@ func TestAccComputeInstance_hostErrorTimeoutSecconds(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), resource.TestCheckResourceAttr("google_compute_instance.foobar", "scheduling.0.host_error_timeout_seconds", "0"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_hostErrorTimeoutSeconds(context_4)), ), }, computeInstanceImportStep(context_4["zone"].(string), context_4["instance_name"].(string), []string{}), + { + Config: testAccComputeInstance_hostErrorTimeoutSeconds(context_1), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), + resource.TestCheckResourceAttr("google_compute_instance.foobar", "scheduling.0.host_error_timeout_seconds", "90"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_hostErrorTimeoutSeconds(context_1)), + ), + }, + computeInstanceImportStep(context_1["zone"].(string), context_1["instance_name"].(string), []string{}), }, }) } diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_test.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_test.go index 8de7667f3ba6..1c9b854f2cf8 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_test.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_test.go @@ -653,8 +653,8 @@ resource "google_project" "acceptance" { } func testAccProject_tagsAllowDestroy(context map[string]interface{}) string { - return acctest.Nprintf( - `resource "google_project" "acceptance" { + return acctest.Nprintf(` +resource "google_project" "acceptance" { project_id = "%{pid}" name = "%{pid}" org_id = "%{org}" From 37939623ca53e7a9ee67d0197080316a8822c6e6 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Wed, 7 May 2025 09:19:54 -0700 Subject: [PATCH 058/884] Print original resource address for tgc testing (#13868) --- mmv1/third_party/terraform/acctest/tgc_utils.go | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/mmv1/third_party/terraform/acctest/tgc_utils.go b/mmv1/third_party/terraform/acctest/tgc_utils.go index 90df47f9d02c..033a8f84432e 100644 --- a/mmv1/third_party/terraform/acctest/tgc_utils.go +++ b/mmv1/third_party/terraform/acctest/tgc_utils.go @@ -8,7 +8,6 @@ import ( "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/terraform" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" ) // Hardcode the Terraform resource name -> API service name mapping temporarily. @@ -55,19 +54,10 @@ func GetTestMetadataForTgc(service, address, rawConfig string) resource.TestChec // The acceptance tests names will be also used for the tgc tests. // "service" is logged and will be used to put the tgc tests into specific service packages. log.Printf("[DEBUG]TGC Terraform service: %s", service) + log.Printf("[DEBUG]TGC Terraform resource: %s", address) re := regexp.MustCompile(`\"(tf[-_]?test[-_]?.*?)([a-z0-9]+)\"`) rawConfig = re.ReplaceAllString(rawConfig, `"${1}tgc"`) - - // Replace resource name with the resource's real name, - // which is used to get the main resource object by checking the address after parsing raw config. - // For example, replace `"google_compute_instance" "foobar"` with `"google_compute_instance" "tf-test-mi3fqaucf8"` - n := tpgresource.GetResourceNameFromSelfLink(rState.Primary.ID) - log.Printf("[DEBUG]TGC Terraform resource: %s.%s", resourceType, n) - - old := fmt.Sprintf(`"%s" "%s"`, resourceType, resourceName) - new := fmt.Sprintf(`"%s" "%s"`, resourceType, n) - rawConfig = strings.Replace(rawConfig, old, new, 1) log.Printf("[DEBUG]TGC raw_config starts %sEnd of TGC raw_config", rawConfig) return nil } From f0b91e29d5829679d9df7b0a2c4e1a1c33d4c549 Mon Sep 17 00:00:00 2001 From: dorianverna Date: Wed, 7 May 2025 20:41:53 +0200 Subject: [PATCH 059/884] Added immutability support to Internal Ranges in Terraform. (#13860) --- .../networkconnectivity/InternalRange.yaml | 5 ++ ...etwork_connectivity_internal_range_test.go | 58 +++++++++++++++++++ 2 files changed, 63 insertions(+) diff --git a/mmv1/products/networkconnectivity/InternalRange.yaml b/mmv1/products/networkconnectivity/InternalRange.yaml index f909d72624e7..b9bb9d6ffc67 100644 --- a/mmv1/products/networkconnectivity/InternalRange.yaml +++ b/mmv1/products/networkconnectivity/InternalRange.yaml @@ -183,3 +183,8 @@ properties: may not exist yet. For example /projects/{project}/regions/{region}/subnetworks/{subnet} required: true + - name: 'immutable' + type: Boolean + description: | + Immutable ranges cannot have their fields modified, except for labels and description. + immutable: true diff --git a/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_internal_range_test.go b/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_internal_range_test.go index 5c549630356c..aa862d81ed18 100644 --- a/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_internal_range_test.go +++ b/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_internal_range_test.go @@ -337,3 +337,61 @@ resource "google_compute_network" "default" { } `, context) } + +func TestAccNetworkConnectivityInternalRange_networkConnectivityInternalRangesImmutableExample_full(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + resourceName := "google_network_connectivity_internal_range.default" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckNetworkConnectivityInternalRangeDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccNetworkConnectivityInternalRange_networkConnectivityInternalRangesImmutableExample_full(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + resourceName, "description", "Test internal range Immutable"), + resource.TestCheckResourceAttr( + resourceName, "ip_cidr_range", "11.11.20.0/24"), + resource.TestCheckResourceAttr( + resourceName, "usage", "FOR_VPC"), + resource.TestCheckResourceAttr( + resourceName, "peering", "FOR_SELF"), + resource.TestCheckResourceAttr( + resourceName, "immutable", "true"), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "network", "labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccNetworkConnectivityInternalRange_networkConnectivityInternalRangesImmutableExample_full(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_network_connectivity_internal_range" "default" { + name = "basic%{random_suffix}" + description = "Test internal range Immutable" + network = google_compute_network.default.name + ip_cidr_range = "11.11.20.0/24" + usage = "FOR_VPC" + peering = "FOR_SELF" + immutable = true +} + +resource "google_compute_network" "default" { + name = "tf-test-internal-ranges%{random_suffix}" + auto_create_subnetworks = false +} +`, context) +} From 20dce9e866b0e7f4111ffe9141725c4231bd595d Mon Sep 17 00:00:00 2001 From: Andras Kerekes Date: Wed, 7 May 2025 11:46:08 -0700 Subject: [PATCH 060/884] replacing abiu in cloud functions examples with a more meaningful name (#13857) --- mmv1/products/cloudfunctions2/Function.yaml | 4 ++-- ...pl => cloudfunctions2_automatic_base_image_update.tf.tmpl} | 0 ...pl => cloudfunctions2_on_deploy_base_image_update.tf.tmpl} | 0 3 files changed, 2 insertions(+), 2 deletions(-) rename mmv1/templates/terraform/examples/{cloudfunctions2_abiu.tf.tmpl => cloudfunctions2_automatic_base_image_update.tf.tmpl} (100%) rename mmv1/templates/terraform/examples/{cloudfunctions2_abiu_on_deploy.tf.tmpl => cloudfunctions2_on_deploy_base_image_update.tf.tmpl} (100%) diff --git a/mmv1/products/cloudfunctions2/Function.yaml b/mmv1/products/cloudfunctions2/Function.yaml index 08a51465040d..3d2cd85db46e 100644 --- a/mmv1/products/cloudfunctions2/Function.yaml +++ b/mmv1/products/cloudfunctions2/Function.yaml @@ -264,7 +264,7 @@ examples: project: 'my-project-name' # this example file will cause IAM conflicts between tests if used to make a test exclude_test: true - - name: 'cloudfunctions2_abiu' + - name: 'cloudfunctions2_automatic_base_image_update' primary_resource_id: 'function' min_version: 'beta' vars: @@ -283,7 +283,7 @@ examples: ignore_read_extra: - 'build_config.0.source.0.storage_source.0.object' - 'build_config.0.source.0.storage_source.0.bucket' - - name: 'cloudfunctions2_abiu_on_deploy' + - name: 'cloudfunctions2_on_deploy_base_image_update' primary_resource_id: 'function' min_version: 'beta' vars: diff --git a/mmv1/templates/terraform/examples/cloudfunctions2_abiu.tf.tmpl b/mmv1/templates/terraform/examples/cloudfunctions2_automatic_base_image_update.tf.tmpl similarity index 100% rename from mmv1/templates/terraform/examples/cloudfunctions2_abiu.tf.tmpl rename to mmv1/templates/terraform/examples/cloudfunctions2_automatic_base_image_update.tf.tmpl diff --git a/mmv1/templates/terraform/examples/cloudfunctions2_abiu_on_deploy.tf.tmpl b/mmv1/templates/terraform/examples/cloudfunctions2_on_deploy_base_image_update.tf.tmpl similarity index 100% rename from mmv1/templates/terraform/examples/cloudfunctions2_abiu_on_deploy.tf.tmpl rename to mmv1/templates/terraform/examples/cloudfunctions2_on_deploy_base_image_update.tf.tmpl From 2ca18fb99874e411acd23ccc3692392b2a184507 Mon Sep 17 00:00:00 2001 From: Raj Anand <88097156+raazanand@users.noreply.github.com> Date: Thu, 8 May 2025 00:30:12 +0530 Subject: [PATCH 061/884] Adding new fields to BackupVault & Backup to power a new feature called Cross Region Backups (#13839) --- mmv1/products/netapp/Backup.yaml | 10 +++ mmv1/products/netapp/BackupVault.yaml | 28 +++++++ .../netapp/resource_netapp_backup_test.go | 82 ++++++++++++++++++- 3 files changed, 119 insertions(+), 1 deletion(-) diff --git a/mmv1/products/netapp/Backup.yaml b/mmv1/products/netapp/Backup.yaml index 05828c3088fd..a3afa45c6c9d 100644 --- a/mmv1/products/netapp/Backup.yaml +++ b/mmv1/products/netapp/Backup.yaml @@ -143,3 +143,13 @@ properties: Format: `projects/{{projectId}}/locations/{{location}}/volumes/{{volumename}}/snapshots/{{snapshotname}}`` required: false diff_suppress_func: 'tpgresource.ProjectNumberDiffSuppress' + - name: 'volumeRegion' + type: String + description: | + Region of the volume from which the backup was created. + output: true + - name: 'backupRegion' + type: String + description: | + Region in which backup is stored. + output: true diff --git a/mmv1/products/netapp/BackupVault.yaml b/mmv1/products/netapp/BackupVault.yaml index d5b9b2d9dd3d..0bfb511d6aa8 100644 --- a/mmv1/products/netapp/BackupVault.yaml +++ b/mmv1/products/netapp/BackupVault.yaml @@ -88,3 +88,31 @@ properties: description: | Labels as key value pairs. Example: `{ "owner": "Bob", "department": "finance", "purpose": "testing" }`. required: false + - name: 'backupVaultType' + type: Enum + description: | + Type of the backup vault to be created. Default is IN_REGION. + enum_values: + - 'BACKUP_VAULT_TYPE_UNSPECIFIED' + - 'IN_REGION' + - 'CROSS_REGION' + default_from_api: true + - name: 'backupRegion' + type: String + description: | + Region in which backup is stored. + - name: 'sourceRegion' + type: String + description: | + Region in which the backup vault is created. + output: true + - name: 'sourceBackupVault' + type: String + description: | + Name of the Backup vault created in source region. + output: true + - name: 'destinationBackupVault' + type: String + description: | + Name of the Backup vault created in backup region. + output: true diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_backup_test.go b/mmv1/third_party/terraform/services/netapp/resource_netapp_backup_test.go index f6ea2a1e86d7..4baccdc19539 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_backup_test.go +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_backup_test.go @@ -233,7 +233,7 @@ resource "google_netapp_volume_snapshot" "default" { resource "google_netapp_backup" "test_backup" { name = "tf-test-test-backup%{random_suffix}" - description = "This is a test backup" + description = "This is a flex test backup" source_volume = google_netapp_volume.default.id location = google_netapp_backup_vault.default.location vault_name = google_netapp_backup_vault.default.name @@ -245,3 +245,83 @@ resource "google_netapp_backup" "test_backup" { } `, context) } + +func TestAccNetappBackup_NetappIntegratedBackup(t *testing.T) { + context := map[string]interface{}{ + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckNetappBackupDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccNetappBackup_IntegratedBackup(context), + }, + { + ResourceName: "google_netapp_backup.test_backup", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "location", "name", "terraform_labels", "vault_name"}, + }, + }, + }) +} + +func testAccNetappBackup_IntegratedBackup(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_compute_network" "default" { + name = "%{network_name}" +} +resource "google_netapp_storage_pool" "default" { + name = "tf-test-backup-pool%{random_suffix}" + location = "us-east4" + service_level = "PREMIUM" + capacity_gib = "2048" + network = data.google_compute_network.default.id +} +resource "google_netapp_volume" "default" { + name = "tf-test-backup-volume%{random_suffix}" + location = google_netapp_storage_pool.default.location + capacity_gib = "100" + share_name = "tf-test-backup-volume%{random_suffix}" + storage_pool = google_netapp_storage_pool.default.name + protocols = ["NFSV3"] + deletion_policy = "FORCE" + backup_config { + backup_vault = google_netapp_backup_vault.default.id + } +} +resource "google_netapp_backup_vault" "default" { + name = "tf-test-backup-vault%{random_suffix}" + location = google_netapp_storage_pool.default.location + backup_vault_type = "CROSS_REGION" + backup_region = "us-west4" +} +resource "google_netapp_volume_snapshot" "default" { + depends_on = [google_netapp_volume.default] + location = google_netapp_volume.default.location + volume_name = google_netapp_volume.default.name + description = "This is a test description" + name = "testvolumesnap%{random_suffix}" + labels = { + key= "test" + value= "snapshot" + } + } +resource "google_netapp_backup" "test_backup" { + name = "tf-test-test-backup%{random_suffix}" + description = "This is a test integrated backup" + source_volume = google_netapp_volume.default.id + location = google_netapp_backup_vault.default.location + vault_name = google_netapp_backup_vault.default.name + source_snapshot = google_netapp_volume_snapshot.default.id + labels = { + key= "test" + value= "backup" + } +} +`, context) +} From c13f9b3778086a03351aea3b78575a7c14e7a25c Mon Sep 17 00:00:00 2001 From: ML Date: Wed, 7 May 2025 21:13:25 +0200 Subject: [PATCH 062/884] Adding support for Dataplex Entry resource. (#13820) Co-authored-by: Marek Lipert --- mmv1/products/dataplex/Entry.yaml | 268 ++++++ .../constants/dataplex_entry.go.tmpl | 124 +++ .../dataplex_entry_aspects.go.tmpl | 44 + .../terraform/decoders/dataplex_entry.go.tmpl | 21 + .../terraform/encoders/dataplex_entry.go.tmpl | 12 + .../examples/dataplex_entry_basic.tf.tmpl | 20 + .../examples/dataplex_entry_full.tf.tmpl | 133 +++ .../terraform/pre_read/dataplex_entry.go.tmpl | 4 + .../pre_update/dataplex_entry.go.tmpl | 28 + .../resource_dataplex_entry_meta.yaml | 36 + .../dataplex/resource_dataplex_entry_test.go | 769 ++++++++++++++++++ 11 files changed, 1459 insertions(+) create mode 100644 mmv1/products/dataplex/Entry.yaml create mode 100644 mmv1/templates/terraform/constants/dataplex_entry.go.tmpl create mode 100644 mmv1/templates/terraform/custom_flatten/dataplex_entry_aspects.go.tmpl create mode 100644 mmv1/templates/terraform/decoders/dataplex_entry.go.tmpl create mode 100644 mmv1/templates/terraform/encoders/dataplex_entry.go.tmpl create mode 100644 mmv1/templates/terraform/examples/dataplex_entry_basic.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/dataplex_entry_full.tf.tmpl create mode 100644 mmv1/templates/terraform/pre_read/dataplex_entry.go.tmpl create mode 100644 mmv1/templates/terraform/pre_update/dataplex_entry.go.tmpl create mode 100644 mmv1/third_party/terraform/services/dataplex/resource_dataplex_entry_meta.yaml create mode 100644 mmv1/third_party/terraform/services/dataplex/resource_dataplex_entry_test.go diff --git a/mmv1/products/dataplex/Entry.yaml b/mmv1/products/dataplex/Entry.yaml new file mode 100644 index 000000000000..3c53e0de6387 --- /dev/null +++ b/mmv1/products/dataplex/Entry.yaml @@ -0,0 +1,268 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'Entry' +description: | + An entry represents a data asset that you capture metadata for. Every entry is an instance of an entry type. + Each operation on aspects for an entry needs to comply with the required aspects of its entry type. + For example, when you create an entry, you must provide values for all the aspect types defined by the entry type. + You can't delete those aspects for an entry that are marked as required in the entry type. + +references: + guides: + 'Manage entries and ingest custom sources': 'https://cloud.google.com/dataplex/docs/ingest-custom-sources' + api: 'https://cloud.google.com/dataplex/docs/reference/rest/v1/projects.locations.entryGroups.entries' + +base_url: 'projects/{{project}}/locations/{{location}}/entryGroups/{{entry_group_id}}/entries/{{entry_id}}' +self_link: 'projects/{{project}}/locations/{{location}}/entryGroups/{{entry_group_id}}/entries/{{entry_id}}' +create_url: 'projects/{{project}}/locations/{{location}}/entryGroups/{{entry_group_id}}/entries?entryId={{entry_id}}' +update_verb: 'PATCH' +update_mask: true +import_format: + - 'projects/{{project}}/locations/{{location}}/entryGroups/{{entry_group_id}}/entries/{{entry_id}}' + +custom_code: + constants: templates/terraform/constants/dataplex_entry.go.tmpl + decoder: templates/terraform/decoders/dataplex_entry.go.tmpl + encoder: templates/terraform/encoders/dataplex_entry.go.tmpl + pre_read: templates/terraform/pre_read/dataplex_entry.go.tmpl + pre_update: templates/terraform/pre_update/dataplex_entry.go.tmpl + +timeouts: + insert_minutes: 5 + update_minutes: 5 + delete_minutes: 5 + +examples: + - name: 'dataplex_entry_basic' + primary_resource_id: 'test_basic' + primary_resource_name: 'fmt.Sprintf("tf-test-entry%s", context["random_suffix"])' + vars: + entry_id: 'entry-basic' + entry_group_name: 'entry-group-basic' + aspect_type_name: "aspect-type-basic" + entry_type_name: "entry-type-basic" + test_env_vars: + project_number: 'PROJECT_NUMBER' + - name: 'dataplex_entry_full' + primary_resource_id: 'test_entry_full' + primary_resource_name: 'fmt.Sprintf("tf-test-entry%s", context["random_suffix"])' + ignore_read_extra: + - 'aspects' + vars: + entry_id: 'entry-full' + entry_group_name: 'entry-group-full' + aspect_type_name: "aspect-type-full" + entry_type_name: "entry-type-full" + test_env_vars: + project_number: 'PROJECT_NUMBER' + +parameters: + - name: 'location' + type: String + url_param_only: true + immutable: true + description: | + The location where entry will be created. + + - name: 'entryGroupId' + type: String + url_param_only: true + immutable: true + description: | + The entry group id of the entry group the entry will be created in. + + - name: 'entryId' + type: String + url_param_only: true + immutable: true + description: | + The entry id of the entry. + +properties: + - name: 'name' + type: String + output: true + immutable: true + description: | + The relative resource name of the entry, in the format projects/{project_number}/locations/{locationId}/entryGroups/{entryGroupId}/entries/{entryId}. + + - name: 'entryType' + type: String + required: true + immutable: true + validation: + function: ProjectNumberValidation + description: | + The relative resource name of the entry type that was used to create this entry, in the format projects/{project_number}/locations/{locationId}/entryTypes/{entryTypeId}. + + - name: 'createTime' + type: Time + output: true + description: | + The time when the Entry was created in Dataplex. + + - name: 'updateTime' + type: Time + output: true + description: | + The time when the entry was last updated in Dataplex. + + - name: 'aspects' + type: Array + custom_flatten: 'templates/terraform/custom_flatten/dataplex_entry_aspects.go.tmpl' + description: | + The aspects that are attached to the entry. + + item_type: + type: NestedObject + properties: + - name: "aspectKey" + type: String + required: true + validation: + function: AspectProjectNumberValidation + description: | + Depending on how the aspect is attached to the entry, the format of the aspect key can be one of the following: + + If the aspect is attached directly to the entry: {project_number}.{locationId}.{aspectTypeId} + If the aspect is attached to an entry's path: {project_number}.{locationId}.{aspectTypeId}@{path} + + - name: "aspectValue" + type: NestedObject + properties: + - name: "aspectType" + type: String + output: true + description: | + The resource name of the type used to create this Aspect. + + - name: "path" + type: String + output: true + description: | + The path in the entry under which the aspect is attached. + + - name: "createTime" + type: Time + output: true + description: | + The time when the Aspect was created. + + - name: "updateTime" + type: Time + output: true + description: | + The time when the Aspect was last modified. + + - name: "data" + type: String + required: true + state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' + custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' + custom_expand: 'templates/terraform/custom_expand/json_schema.tmpl' + validation: + function: 'validation.StringIsJSON' + description: | + The content of the aspect in JSON form, according to its aspect type schema. The maximum size of the field is 120KB (encoded as UTF-8). + + - name: 'parentEntry' + type: String + immutable: true + description: | + The resource name of the parent entry, in the format projects/{project_number}/locations/{locationId}/entryGroups/{entryGroupId}/entries/{entryId}. + + - name: "fullyQualifiedName" + type: String + description: | + A name for the entry that can be referenced by an external system. For more information, see https://cloud.google.com/dataplex/docs/fully-qualified-names. + The maximum size of the field is 4000 characters. + + - name: "entrySource" + type: NestedObject + default_from_api: true + properties: + - name: "resource" + type: String + description: | + The name of the resource in the source system. Maximum length is 4,000 characters. + + - name: "system" + type: String + description: | + The name of the source system. Maximum length is 64 characters. + + - name: "platform" + type: String + description: | + The platform containing the source system. Maximum length is 64 characters. + + - name: "displayName" + type: String + description: | + A user-friendly display name. Maximum length is 500 characters. + + - name: "description" + type: String + description: | + A description of the data resource. Maximum length is 2,000 characters. + + - name: "labels" + type: KeyValuePairs + description: | + User-defined labels. The maximum size of keys and values is 128 characters each. + An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }. + + - name: "ancestors" + type: Array + immutable: true + item_type: + type: NestedObject + properties: + - name: "name" + type: String + description: | + The name of the ancestor resource. + + - name: "type" + type: String + description: | + The type of the ancestor resource. + + description: | + The entries representing the ancestors of the data resource in the source system. + + - name: 'createTime' + type: Time + validation: + function: 'validation.IsRFC3339Time' + description: | + The time when the resource was created in the source system. + + - name: 'updateTime' + type: Time + validation: + function: 'validation.IsRFC3339Time' + description: | + The time when the resource was last updated in the source system. + If the entry exists in the system and its EntrySource has updateTime populated, + further updates to the EntrySource of the entry must provide incremental updates to its updateTime. + + - name: 'location' + type: String + output: true + description: |- + Location of the resource in the source system. You can search the entry by this location. + By default, this should match the location of the entry group containing this entry. + A different value allows capturing the source location for data external to Google Cloud. diff --git a/mmv1/templates/terraform/constants/dataplex_entry.go.tmpl b/mmv1/templates/terraform/constants/dataplex_entry.go.tmpl new file mode 100644 index 000000000000..9d996fa2c254 --- /dev/null +++ b/mmv1/templates/terraform/constants/dataplex_entry.go.tmpl @@ -0,0 +1,124 @@ +// GetEntry supports up to 100 aspects. Therefore we set a threshold at 99. +const maxAspectNumber = 99 + +// NumberOfAspectsValidation checks if the number of aspects on an entry exceeds certain threshold. +func NumberOfAspectsValidation(i interface{}, k string) (warnings []string, errors []error) { + s, isSlice := i.([]interface{}) + m, isMap := i.(map[string]interface{}) + + if !isSlice && !isMap { + errors = append(errors, fmt.Errorf("expected type of field %q to be array, but got %T", k, i)) + return warnings, errors + } + + if len(s)+len(m) > maxAspectNumber { + errors = append(errors, fmt.Errorf( + "field %q has an invalid content: %q. The maximal number of aspects is 99.", + k, i, + )) + } + + return warnings, errors +} + +// ProjectNumberValidation checks if the input string conforms to the pattern: +// "projects//" +func ProjectNumberValidation(i interface{}, k string) (warnings []string, errors []error) { + v, ok := i.(string) + + if !ok { + errors = append(errors, fmt.Errorf("expected type of field %q to be string, but got %T", k, i)) + return warnings, errors + } + + var projectNumberRegex = regexp.MustCompile(`^projects\/[1-9]\d*\/.+$`) + if !projectNumberRegex.MatchString(v) { + errors = append(errors, fmt.Errorf( + "field %q has an invalid format: %q. Expected format: 'projects//'. Please note that project IDs are not supported.", + k, v, + )) + } + + return warnings, errors +} + +// ProjectNumberValidation checks if the input string conforms to the pattern: +// "projects//" +func AspectProjectNumberValidation(i interface{}, k string) (warnings []string, errors []error) { + v, ok := i.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected type of field %q to be string, but got %T", k, i)) + return warnings, errors + } + + var numberDotAnythingRegex = regexp.MustCompile(`^[1-9]\d*\..+$`) + + if !numberDotAnythingRegex.MatchString(v) { + errors = append(errors, fmt.Errorf( + "field %q has an invalid format: %q. Expected format: '.anything'. Please note that project IDs are not supported.", + k, v, + )) + } + + return warnings, errors +} + +func FilterAspects(aspectKeySet map[string]struct{}, res map[string]interface{}) { + if res["aspects"] == nil { + return + } + aspectsMap := res["aspects"].(map[string]interface{}) + for key := range aspectsMap { + if _, keep := aspectKeySet[key]; !keep { + delete(aspectsMap, key) + } + } +} + +func AddAspectsToSet(aspectKeySet map[string]struct{}, aspects interface{}) { + for _, aspectItemRaw := range aspects.([]interface{}) { + aspectMap := aspectItemRaw.(map[string]interface{}) + keyString := aspectMap["aspect_key"].(string) + aspectKeySet[keyString] = struct{}{} + } +} + +// InverseTransformAspects converts the "aspects" map back to a slice of maps, +// re-inserting the "aspectKey". Modifies obj in-place. +func InverseTransformAspects(res map[string]interface{}) { + if res["aspects"] == nil { + return + } + originalMap := res["aspects"].(map[string]interface{}) + newSlice := make([]interface{}, 0, len(originalMap)) + + for key, value := range originalMap { + innerMap := value.(map[string]interface{}) + box := make(map[string]interface{}, 2) + box["aspectKey"] = key + box["aspectValue"] = innerMap + newSlice = append(newSlice, box) + } + res["aspects"] = newSlice +} + +// TransformAspects concisely transforms the "aspects" slice within obj into a map. +// It assumes obj["aspects"] exists and is a []interface{} containing +// map[string]interface{} elements, each with a string "aspectKey". +// Modifies obj in-place. +func TransformAspects(obj map[string]interface{}) { + if obj["aspects"] == nil { + return + } + originalSlice := obj["aspects"].([]interface{}) + newMap := make(map[string]interface{}, len(originalSlice)) + for _, item := range originalSlice { + aspectMap := item.(map[string]interface{}) + + key := aspectMap["aspectKey"].(string) + value := aspectMap["aspectValue"].(map[string]interface{}) + + newMap[key] = value + } + obj["aspects"] = newMap +} diff --git a/mmv1/templates/terraform/custom_flatten/dataplex_entry_aspects.go.tmpl b/mmv1/templates/terraform/custom_flatten/dataplex_entry_aspects.go.tmpl new file mode 100644 index 000000000000..de8dd922f1f9 --- /dev/null +++ b/mmv1/templates/terraform/custom_flatten/dataplex_entry_aspects.go.tmpl @@ -0,0 +1,44 @@ +// This file is a transposition of mmv1/templates/terraform/flatten_property_method.go.tmpl +// Most of the code is copied from there, with the exception of sorting logic. +func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]map[string]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + + {{- range $prop := $.ItemType.UserProperties }} + {{- if not (or $prop.IgnoreRead $prop.WriteOnly) }} + "{{ underscore $prop.Name }}": flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}{{$prop.TitlelizeProperty}}(original["{{ $prop.ApiName }}"], d, config), + {{- end }} + {{- end }} + }) + } + + configData := []map[string]interface{}{} + + for _, item := range d.Get("aspects").([]interface{}) { + configData = append(configData, item.(map[string]interface{})) + } + + sorted, err := tpgresource.SortMapsByConfigOrder(configData, transformed, "aspect_key") + if err != nil { + log.Printf("[ERROR] Could not sort API response value: %s", err) + return v + } + + return sorted +} + +{{- if $.NestedProperties }} + {{- range $prop := $.NestedProperties }} + {{ template "flattenPropertyMethod" $prop -}} + {{- end }} +{{- end }} diff --git a/mmv1/templates/terraform/decoders/dataplex_entry.go.tmpl b/mmv1/templates/terraform/decoders/dataplex_entry.go.tmpl new file mode 100644 index 000000000000..c609650d1634 --- /dev/null +++ b/mmv1/templates/terraform/decoders/dataplex_entry.go.tmpl @@ -0,0 +1,21 @@ +aspects := res["aspects"] +if aspects != nil { + _, errors := NumberOfAspectsValidation(aspects, "aspects") + if len(errors) > 0 { + return nil, errors[0] + } +} + +aspectKeysOfInterest := make(map[string]struct{}) +if d.HasChange("aspects") { + currentAspects, futureAspects := d.GetChange("aspects") + AddAspectsToSet(aspectKeysOfInterest, currentAspects) + AddAspectsToSet(aspectKeysOfInterest, futureAspects) +} else { + AddAspectsToSet(aspectKeysOfInterest, d.Get("aspects")) +} + +FilterAspects(aspectKeysOfInterest, res) +InverseTransformAspects(res) + +return res, nil diff --git a/mmv1/templates/terraform/encoders/dataplex_entry.go.tmpl b/mmv1/templates/terraform/encoders/dataplex_entry.go.tmpl new file mode 100644 index 000000000000..2054ba74f752 --- /dev/null +++ b/mmv1/templates/terraform/encoders/dataplex_entry.go.tmpl @@ -0,0 +1,12 @@ +// The yaml file does not allow validation for Array fields. +// Therefore we add validation as a part of the encoding proecess. +aspects := obj["aspects"] +if aspects != nil { + _, errors := NumberOfAspectsValidation(aspects, "aspects") + if len(errors) > 0 { + return nil, errors[0] + } +} + +TransformAspects(obj) +return obj, nil diff --git a/mmv1/templates/terraform/examples/dataplex_entry_basic.tf.tmpl b/mmv1/templates/terraform/examples/dataplex_entry_basic.tf.tmpl new file mode 100644 index 000000000000..7bed2768935d --- /dev/null +++ b/mmv1/templates/terraform/examples/dataplex_entry_basic.tf.tmpl @@ -0,0 +1,20 @@ +resource "google_dataplex_entry_group" "{{index $.Vars "entry_group_name"}}" { + entry_group_id = "{{index $.Vars "entry_group_name"}}" + project = "{{index $.TestEnvVars "project_number"}}" + location = "us-central1" +} + +resource "google_dataplex_entry_type" "{{index $.Vars "entry_type_name"}}" { + entry_type_id = "{{index $.Vars "entry_type_name"}}" + project = "{{index $.TestEnvVars "project_number"}}" + location = "us-central1" +} + +resource "google_dataplex_entry" "{{$.PrimaryResourceId}}" { + entry_group_id = google_dataplex_entry_group.{{index $.Vars "entry_group_name"}}.entry_group_id + project = "{{index $.TestEnvVars "project_number"}}" + location = "us-central1" + entry_id = "{{index $.Vars "entry_id"}}" + entry_type = google_dataplex_entry_type.{{index $.Vars "entry_type_name"}}.name +} + diff --git a/mmv1/templates/terraform/examples/dataplex_entry_full.tf.tmpl b/mmv1/templates/terraform/examples/dataplex_entry_full.tf.tmpl new file mode 100644 index 000000000000..096100ed7073 --- /dev/null +++ b/mmv1/templates/terraform/examples/dataplex_entry_full.tf.tmpl @@ -0,0 +1,133 @@ +resource "google_dataplex_aspect_type" "{{index $.Vars "aspect_type_name"}}-one" { + aspect_type_id = "{{index $.Vars "aspect_type_name"}}-one" + location = "us-central1" + project = "{{index $.TestEnvVars "project_number"}}" + + metadata_template = < 0 + + if hasError != tc.expectError { + t.Fatalf("%s: NumberOfAspectsValidation() error expectation mismatch: got error = %v (%v), want error = %v", tc.name, hasError, errors, tc.expectError) + } + + if tc.expectError && tc.errorMsg != "" { + found := false + for _, err := range errors { + if strings.Contains(err.Error(), tc.errorMsg) { // Check if error message contains the expected substring + found = true + break + } + } + if !found { + t.Errorf("%s: NumberOfAspectsValidation() expected error containing %q, but got: %v", tc.name, tc.errorMsg, errors) + } + } + }) + } +} + +func TestProjectNumberValidation(t *testing.T) { + fieldName := "some_field" + testCases := []struct { + name string + input interface{} + expectError bool + errorMsg string + }{ + {"valid input", "projects/1234567890/locations/us-central1", false, ""}, + {"valid input with only number", "projects/987/stuff", false, ""}, + {"valid input with trailing slash content", "projects/1/a/b/c", false, ""}, + {"valid input minimal", "projects/1/a", false, ""}, + {"invalid input trailing slash only", "projects/555/", true, "has an invalid format"}, + {"invalid type - int", 123, true, `to be string, but got int`}, + {"invalid type - nil", nil, true, `to be string, but got `}, + {"invalid format - missing 'projects/' prefix", "12345/locations/us", true, "has an invalid format"}, + {"invalid format - project number starts with 0", "projects/0123/data", true, "has an invalid format"}, + {"invalid format - no project number", "projects//data", true, "has an invalid format"}, + {"invalid format - letters instead of number", "projects/abc/data", true, "has an invalid format"}, + {"invalid format - missing content after number/", "projects/123", true, "has an invalid format"}, + {"invalid format - empty string", "", true, "has an invalid format"}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + _, errors := dataplex.ProjectNumberValidation(tc.input, fieldName) + hasError := len(errors) > 0 + + if hasError != tc.expectError { + t.Fatalf("%s: ProjectNumberValidation() error expectation mismatch: got error = %v (%v), want error = %v", tc.name, hasError, errors, tc.expectError) + } + + if tc.expectError && tc.errorMsg != "" { + found := false + for _, err := range errors { + if strings.Contains(err.Error(), tc.errorMsg) { // Check if error message contains the expected substring + found = true + break + } + } + if !found { + t.Errorf("%s: ProjectNumberValidation() expected error containing %q, but got: %v", tc.name, tc.errorMsg, errors) + } + } + }) + } +} + +func TestAspectProjectNumberValidation(t *testing.T) { + fieldName := "some_field" + testCases := []struct { + name string + input interface{} + expectError bool + errorMsg string + }{ + {"valid input", "1234567890.compute.googleapis.com/Disk", false, ""}, + {"valid input minimal", "1.a", false, ""}, + {"invalid input trailing dot only", "987.", true, "has an invalid format"}, + {"invalid type - int", 456, true, `to be string, but got int`}, + {"invalid type - nil", nil, true, `to be string, but got `}, + {"invalid format - missing number", ".compute.googleapis.com/Disk", true, "has an invalid format"}, + {"invalid format - number starts with 0", "0123.compute.googleapis.com/Disk", true, "has an invalid format"}, + {"invalid format - missing dot", "12345compute", true, "has an invalid format"}, + {"invalid format - letters instead of number", "abc.compute.googleapis.com/Disk", true, "has an invalid format"}, + {"invalid format - missing content after dot", "12345", true, "has an invalid format"}, + {"invalid format - empty string", "", true, "has an invalid format"}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + _, errors := dataplex.AspectProjectNumberValidation(tc.input, fieldName) + hasError := len(errors) > 0 + + if hasError != tc.expectError { + t.Fatalf("%s: AspectProjectNumberValidation() error expectation mismatch: got error = %v (%v), want error = %v", tc.name, hasError, errors, tc.expectError) + } + + if tc.expectError && tc.errorMsg != "" { + found := false + for _, err := range errors { + if strings.Contains(err.Error(), tc.errorMsg) { // Check if error message contains the expected substring + found = true + break + } + } + if !found { + t.Errorf("%s: AspectProjectNumberValidation() expected error containing %q, but got: %v", tc.name, tc.errorMsg, errors) + } + } + }) + } +} + +func TestFilterAspects(t *testing.T) { + testCases := []struct { + name string + aspectKeySet map[string]struct{} + resInput map[string]interface{} + expectedAspects map[string]interface{} + }{ + {"aspects is nil", + map[string]struct{}{"keep": {}}, + map[string]interface{}{"otherKey": "value"}, + nil}, + {"empty aspectKeySet", map[string]struct{}{}, map[string]interface{}{"aspects": map[string]interface{}{"one": map[string]interface{}{"data": 1}, "two": map[string]interface{}{"data": 2}}}, map[string]interface{}{}}, + {"keep all aspects", map[string]struct{}{"one": {}, "two": {}}, map[string]interface{}{"aspects": map[string]interface{}{"one": map[string]interface{}{"data": 1}, "two": map[string]interface{}{"data": 2}}}, map[string]interface{}{"one": map[string]interface{}{"data": 1}, "two": map[string]interface{}{"data": 2}}}, + {"keep some aspects", map[string]struct{}{"two": {}, "three_not_present": {}}, map[string]interface{}{"aspects": map[string]interface{}{"one": map[string]interface{}{"data": 1}, "two": map[string]interface{}{"data": 2}}}, map[string]interface{}{"two": map[string]interface{}{"data": 2}}}, + {"input aspects map is empty", map[string]struct{}{"keep": {}}, map[string]interface{}{"aspects": map[string]interface{}{}}, map[string]interface{}{}}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + resCopy := deepCopyMap(tc.resInput) + dataplex.FilterAspects(tc.aspectKeySet, resCopy) + + actualAspectsRaw, aspectsKeyExists := resCopy["aspects"] + + if tc.expectedAspects == nil { + if aspectsKeyExists && actualAspectsRaw != nil { + t.Errorf("%s: Expected 'aspects' to be nil or absent, but got: %v", tc.name, actualAspectsRaw) + } + return + } + + if !aspectsKeyExists { + t.Fatalf("%s: Expected 'aspects' key to exist, but it was absent", tc.name) + } + + actualAspects, ok := actualAspectsRaw.(map[string]interface{}) + if !ok { + t.Fatalf("%s: Expected 'aspects' to be a map[string]interface{}, but got %T", tc.name, actualAspectsRaw) + } + + if !reflect.DeepEqual(actualAspects, tc.expectedAspects) { + t.Errorf("%s: FilterAspects() result mismatch:\ngot: %#v\nwant: %#v", tc.name, actualAspects, tc.expectedAspects) + } + }) + } +} + +func TestAddAspectsToSet(t *testing.T) { + testCases := []struct { + name string + initialSet map[string]struct{} + aspectsInput interface{} + expectedSet map[string]struct{} + expectPanic bool + }{ + {"add to empty set", map[string]struct{}{}, []interface{}{map[string]interface{}{"aspect_key": "key1"}, map[string]interface{}{"aspect_key": "key2"}}, map[string]struct{}{"key1": {}, "key2": {}}, false}, + {"add to existing set", map[string]struct{}{"existing": {}}, []interface{}{map[string]interface{}{"aspect_key": "key1"}}, map[string]struct{}{"existing": {}, "key1": {}}, false}, + {"add duplicate keys", map[string]struct{}{}, []interface{}{map[string]interface{}{"aspect_key": "key1"}, map[string]interface{}{"aspect_key": "key1"}, map[string]interface{}{"aspect_key": "key2"}}, map[string]struct{}{"key1": {}, "key2": {}}, false}, + {"input aspects is empty slice", map[string]struct{}{"existing": {}}, []interface{}{}, map[string]struct{}{"existing": {}}, false}, + {"input aspects is nil", map[string]struct{}{}, nil, map[string]struct{}{}, true}, + {"input aspects is wrong type", map[string]struct{}{}, "not a slice", map[string]struct{}{}, true}, + {"item in slice is not a map", map[string]struct{}{}, []interface{}{"not a map"}, map[string]struct{}{}, true}, + {"item map missing aspect_key", map[string]struct{}{}, []interface{}{map[string]interface{}{"wrong_key": "key1"}}, map[string]struct{}{}, true}, + {"aspect_key is not a string", map[string]struct{}{}, []interface{}{map[string]interface{}{"aspect_key": 123}}, map[string]struct{}{}, true}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + currentSet := make(map[string]struct{}) + for k, v := range tc.initialSet { + currentSet[k] = v + } + + defer func() { + r := recover() + if tc.expectPanic && r == nil { + t.Errorf("%s: Expected a panic, but AddAspectsToSet did not panic", tc.name) + } else if !tc.expectPanic && r != nil { + t.Errorf("%s: AddAspectsToSet panicked unexpectedly: %v", tc.name, r) + } + + if !tc.expectPanic { + if !reflect.DeepEqual(currentSet, tc.expectedSet) { + t.Errorf("%s: AddAspectsToSet() result mismatch:\ngot: %v\nwant: %v", tc.name, currentSet, tc.expectedSet) + } + } + }() + + dataplex.AddAspectsToSet(currentSet, tc.aspectsInput) + }) + } +} + +func sortAspectSlice(slice []interface{}) { + sort.SliceStable(slice, func(i, j int) bool { + mapI, okI := slice[i].(map[string]interface{}) + mapJ, okJ := slice[j].(map[string]interface{}) + if !okI || !okJ { + return false + } // Should not happen in valid tests + + keyI, okI := mapI["aspectKey"].(string) + keyJ, okJ := mapJ["aspectKey"].(string) + if !okI || !okJ { + return false + } // Should not happen in valid tests + + return keyI < keyJ + }) +} + +func TestInverseTransformAspects(t *testing.T) { + testCases := []struct { + name string + resInput map[string]interface{} + expectedAspects []interface{} + expectNilAspects bool + expectPanic bool + }{ + {"aspects is nil", map[string]interface{}{"otherKey": "value"}, nil, true, false}, + {"aspects is empty map", map[string]interface{}{"aspects": map[string]interface{}{}}, []interface{}{}, false, false}, + {"aspects with one entry", map[string]interface{}{"aspects": map[string]interface{}{"key1": map[string]interface{}{"data": "value1"}}}, []interface{}{map[string]interface{}{"aspectKey": "key1", "aspectValue": map[string]interface{}{"data": "value1"}}}, false, false}, + {"aspects with multiple entries", map[string]interface{}{"aspects": map[string]interface{}{"key2": map[string]interface{}{"data": "value2"}, "key1": map[string]interface{}{"data": "value1"}}}, []interface{}{map[string]interface{}{"aspectKey": "key1", "aspectValue": map[string]interface{}{"data": "value1"}}, map[string]interface{}{"aspectKey": "key2", "aspectValue": map[string]interface{}{"data": "value2"}}}, false, false}, + {"aspects is wrong type (not map)", map[string]interface{}{"aspects": "not a map"}, nil, false, true}, + {"aspect value is not a map", map[string]interface{}{"aspects": map[string]interface{}{"key1": "not a map"}}, nil, false, true}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + resCopy := deepCopyMap(tc.resInput) + + defer func() { + r := recover() + if tc.expectPanic && r == nil { + t.Errorf("%s: Expected a panic, but InverseTransformAspects did not panic", tc.name) + } else if !tc.expectPanic && r != nil { + t.Errorf("%s: InverseTransformAspects panicked unexpectedly: %v", tc.name, r) + } + + if !tc.expectPanic { + actualAspectsRaw, aspectsKeyExists := resCopy["aspects"] + + if tc.expectNilAspects { + if aspectsKeyExists && actualAspectsRaw != nil { + t.Errorf("%s: Expected 'aspects' to be nil or absent, but got: %v", tc.name, actualAspectsRaw) + } + return + } + + if !aspectsKeyExists && !tc.expectNilAspects { // Should exist if not expecting nil + t.Fatalf("%s: Expected 'aspects' key in result map, but it was missing", tc.name) + } + + actualAspects, ok := actualAspectsRaw.([]interface{}) + if !ok && !tc.expectNilAspects { // Type check only if we didn't expect nil and key exists + t.Fatalf("%s: Expected 'aspects' to be []interface{}, but got %T", tc.name, actualAspectsRaw) + } + + sortAspectSlice(actualAspects) + sortAspectSlice(tc.expectedAspects) // Ensure expected is sorted if non-nil + + if !reflect.DeepEqual(actualAspects, tc.expectedAspects) { + t.Errorf("%s: InverseTransformAspects() result mismatch:\ngot: %#v\nwant: %#v", tc.name, actualAspects, tc.expectedAspects) + } + } + }() + + dataplex.InverseTransformAspects(resCopy) + }) + } +} + +func TestTransformAspects(t *testing.T) { + testCases := []struct { + name string + objInput map[string]interface{} + expectedAspects map[string]interface{} + expectNilAspects bool + expectPanic bool + }{ + {"aspects is nil", map[string]interface{}{"otherKey": "value"}, nil, true, false}, + {"aspects is empty slice", map[string]interface{}{"aspects": []interface{}{}}, map[string]interface{}{}, false, false}, + {"aspects with one item", map[string]interface{}{"aspects": []interface{}{map[string]interface{}{"aspectKey": "key1", "aspectValue": map[string]interface{}{"data": "value1"}}}}, map[string]interface{}{"key1": map[string]interface{}{"data": "value1"}}, false, false}, + {"aspects with multiple items", map[string]interface{}{"aspects": []interface{}{map[string]interface{}{"aspectKey": "key1", "aspectValue": map[string]interface{}{"data": "value1"}}, map[string]interface{}{"aspectKey": "key2", "aspectValue": map[string]interface{}{"data": "value2"}}}}, map[string]interface{}{"key1": map[string]interface{}{"data": "value1"}, "key2": map[string]interface{}{"data": "value2"}}, false, false}, + {"aspects with duplicate aspectKey", map[string]interface{}{"aspects": []interface{}{map[string]interface{}{"aspectKey": "key1", "aspectValue": map[string]interface{}{"data": "value_first"}}, map[string]interface{}{"aspectKey": "key2", "aspectValue": map[string]interface{}{"data": "value2"}}, map[string]interface{}{"aspectKey": "key1", "aspectValue": map[string]interface{}{"data": "value_last"}}}}, map[string]interface{}{"key1": map[string]interface{}{"data": "value_last"}, "key2": map[string]interface{}{"data": "value2"}}, false, false}, + {"aspects is wrong type (not slice)", map[string]interface{}{"aspects": "not a slice"}, nil, false, true}, + {"item in slice is not a map", map[string]interface{}{"aspects": []interface{}{"not a map"}}, nil, false, true}, + {"item map missing aspectKey", map[string]interface{}{"aspects": []interface{}{map[string]interface{}{"wrongKey": "k1", "aspectValue": map[string]interface{}{}}}}, nil, false, true}, + {"aspectKey is not a string", map[string]interface{}{"aspects": []interface{}{map[string]interface{}{"aspectKey": 123, "aspectValue": map[string]interface{}{}}}}, nil, false, true}, + {"item map missing aspectValue", map[string]interface{}{"aspects": []interface{}{map[string]interface{}{"aspectKey": "key1"}}}, nil, false, true}, + {"aspectValue is not a map", map[string]interface{}{"aspects": []interface{}{map[string]interface{}{"aspectKey": "key1", "aspectValue": "not a map"}}}, nil, false, true}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + objCopy := deepCopyMap(tc.objInput) + + defer func() { + r := recover() + if tc.expectPanic && r == nil { + t.Errorf("%s: Expected a panic, but TransformAspects did not panic", tc.name) + } else if !tc.expectPanic && r != nil { + t.Errorf("%s: TransformAspects panicked unexpectedly: %v", tc.name, r) + } + + if !tc.expectPanic { + actualAspectsRaw, aspectsKeyExists := objCopy["aspects"] + + if tc.expectNilAspects { + if aspectsKeyExists && actualAspectsRaw != nil { + t.Errorf("%s: Expected 'aspects' to be nil or absent, but got: %v", tc.name, actualAspectsRaw) + } + return + } + + if !aspectsKeyExists && !tc.expectNilAspects { + t.Fatalf("%s: Expected 'aspects' key in result map, but it was missing", tc.name) + } + + actualAspects, ok := actualAspectsRaw.(map[string]interface{}) + if !ok && !tc.expectNilAspects { + t.Fatalf("%s: Expected 'aspects' to be map[string]interface{}, but got %T", tc.name, actualAspectsRaw) + } + + if !reflect.DeepEqual(actualAspects, tc.expectedAspects) { + t.Errorf("%s: TransformAspects() result mismatch:\ngot: %#v\nwant: %#v", tc.name, actualAspects, tc.expectedAspects) + } + } + }() + + dataplex.TransformAspects(objCopy) + }) + } +} + +func deepCopyMap(original map[string]interface{}) map[string]interface{} { + if original == nil { + return nil + } + copyMap := make(map[string]interface{}, len(original)) + for key, value := range original { + copyMap[key] = deepCopyValue(value) + } + return copyMap +} + +func deepCopySlice(original []interface{}) []interface{} { + if original == nil { + return nil + } + copySlice := make([]interface{}, len(original)) + for i, value := range original { + copySlice[i] = deepCopyValue(value) + } + return copySlice +} + +func deepCopyValue(value interface{}) interface{} { + if value == nil { + return nil + } + switch v := value.(type) { + case map[string]interface{}: + return deepCopyMap(v) + case []interface{}: + return deepCopySlice(v) + default: + return v + } +} + +func TestAccDataplexEntry_dataplexEntryUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project_number": envvar.GetTestProjectNumberFromEnv(), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataplexEntryDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataplexEntry_dataplexEntryFullUpdatePreapre(context), + }, + { + ResourceName: "google_dataplex_entry.test_entry_full", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"aspects", "entry_group_id", "entry_id", "location"}, + }, + { + Config: testAccDataplexEntry_dataplexEntryUpdate(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_dataplex_entry.test_entry_full", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_dataplex_entry.test_entry_full", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"aspects", "entry_group_id", "entry_id", "location"}, + }, + }, + }) +} + +func testAccDataplexEntry_dataplexEntryFullUpdatePreapre(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_dataplex_aspect_type" "tf-test-aspect-type-full%{random_suffix}-one" { + aspect_type_id = "tf-test-aspect-type-full%{random_suffix}-one" + location = "us-central1" + project = "%{project_number}" + + metadata_template = < Date: Wed, 7 May 2025 12:22:57 -0700 Subject: [PATCH 063/884] tgc-revival: remove TPGB dependency (part1) (#13837) Co-authored-by: Stephen Lewis (Burrows) --- mmv1/provider/terraform_tgc_next.go | 36 +- .../pkg/provider/provider_mmv1_resources.go | 15 + .../services/compute/compute_instance.go | 1413 +++++++++++++++++ .../services/resourcemanager/project.go | 105 ++ 4 files changed, 1552 insertions(+), 17 deletions(-) create mode 100644 mmv1/third_party/tgc_next/pkg/provider/provider_mmv1_resources.go diff --git a/mmv1/provider/terraform_tgc_next.go b/mmv1/provider/terraform_tgc_next.go index 59ead87d5698..71394c35a326 100644 --- a/mmv1/provider/terraform_tgc_next.go +++ b/mmv1/provider/terraform_tgc_next.go @@ -70,28 +70,19 @@ func (tgc TerraformGoogleConversionNext) GenerateCaiToHclObjects(outputFolder, r } func (tgc TerraformGoogleConversionNext) CompileCommonFiles(outputFolder string, products []*api.Product, overridePath string) { - tgc.CompileTfToCaiCommonFiles(outputFolder, products) - tgc.CompileCaiToHclCommonFiles(outputFolder, products) -} - -func (tgc TerraformGoogleConversionNext) CompileTfToCaiCommonFiles(outputFolder string, products []*api.Product) { - log.Printf("Compiling common files for tgc tfplan2cai.") - resourceConverters := map[string]string{ + // common + "pkg/provider/provider_validators.go": "third_party/terraform/provider/provider_validators.go.tmpl", + + // tfplan2cai "pkg/tfplan2cai/converters/resource_converters.go": "templates/tgc_next/tfplan2cai/resource_converters.go.tmpl", "pkg/tfplan2cai/converters/services/compute/compute_instance_helpers.go": "third_party/terraform/services/compute/compute_instance_helpers.go.tmpl", "pkg/tfplan2cai/converters/services/compute/metadata.go": "third_party/terraform/services/compute/metadata.go.tmpl", - } - templateData := NewTemplateData(outputFolder, tgc.TargetVersionName) - tgc.CompileFileList(outputFolder, resourceConverters, *templateData, products) -} - -func (tgc TerraformGoogleConversionNext) CompileCaiToHclCommonFiles(outputFolder string, products []*api.Product) { - log.Printf("Compiling common files for tgc tfplan2cai.") - resourceConverters := map[string]string{ + // cai2hcl "pkg/cai2hcl/converters/resource_converters.go": "templates/tgc_next/cai2hcl/resource_converters.go.tmpl", } + templateData := NewTemplateData(outputFolder, tgc.TargetVersionName) tgc.CompileFileList(outputFolder, resourceConverters, *templateData, products) } @@ -134,8 +125,18 @@ func (tgc TerraformGoogleConversionNext) CopyCommonFiles(outputFolder string, ge log.Println(fmt.Errorf("error copying directory %v: %v", outputFolder, err)) } - tgc.CopyTfToCaiCommonFiles(outputFolder) - tgc.CopyCaiToHclCommonFiles(outputFolder) + resourceConverters := map[string]string{ + // common + "pkg/provider/mtls_util.go": "third_party/terraform/provider/mtls_util.go", + "pkg/verify/validation.go": "third_party/terraform/verify/validation.go", + "pkg/verify/path_or_contents.go": "third_party/terraform/verify/path_or_contents.go", + "pkg/version/version.go": "third_party/terraform/version/version.go", + + // tfplan2cai + "pkg/tfplan2cai/converters/services/compute/image.go": "third_party/terraform/services/compute/image.go", + "pkg/tfplan2cai/converters/services/compute/disk_type.go": "third_party/terraform/services/compute/disk_type.go", + } + tgc.CopyFileList(outputFolder, resourceConverters) } func (tgc TerraformGoogleConversionNext) CopyTfToCaiCommonFiles(outputFolder string) { @@ -193,6 +194,7 @@ func (tgc TerraformGoogleConversionNext) replaceImportPath(outputFolder, target // replace google to google-beta gaImportPath := ImportPathFromVersion("ga") sourceByte = bytes.Replace(sourceByte, []byte(gaImportPath), []byte(TERRAFORM_PROVIDER_BETA+"/"+RESOURCE_DIRECTORY_BETA), -1) + err = os.WriteFile(targetFile, sourceByte, 0644) if err != nil { log.Fatalf("Cannot write file %s to replace import path: %s", target, err) diff --git a/mmv1/third_party/tgc_next/pkg/provider/provider_mmv1_resources.go b/mmv1/third_party/tgc_next/pkg/provider/provider_mmv1_resources.go new file mode 100644 index 000000000000..07dc94951017 --- /dev/null +++ b/mmv1/third_party/tgc_next/pkg/provider/provider_mmv1_resources.go @@ -0,0 +1,15 @@ +package provider + +import ( + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/converters/services/compute" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/converters/services/resourcemanager" +) + +var handwrittenTfplan2caiResources = map[string]*schema.Resource{ + // ####### START handwritten resources ########### + "google_compute_instance": compute.ResourceComputeInstance(), + "google_project": resourcemanager.ResourceGoogleProject(), + // ####### END handwritten resources ########### +} diff --git a/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/compute/compute_instance.go b/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/compute/compute_instance.go index 79da2f1ed4ff..1eaff02a28c0 100644 --- a/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/compute/compute_instance.go +++ b/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/compute/compute_instance.go @@ -9,16 +9,1429 @@ import ( compute "google.golang.org/api/compute/v0.beta" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/converters/cai" "github.com/hashicorp/terraform-provider-google-beta/google-beta/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google-beta/google-beta/transport" + "github.com/hashicorp/terraform-provider-google-beta/google-beta/verify" ) const ComputeInstanceAssetType string = "compute.googleapis.com/Instance" const ComputeDiskAssetType string = "compute.googleapis.com/Disk" +var ( + advancedMachineFeaturesKeys = []string{ + "advanced_machine_features.0.enable_nested_virtualization", + "advanced_machine_features.0.threads_per_core", + "advanced_machine_features.0.turbo_mode", + "advanced_machine_features.0.visible_core_count", + "advanced_machine_features.0.performance_monitoring_unit", + "advanced_machine_features.0.enable_uefi_networking", + } + + bootDiskKeys = []string{ + "boot_disk.0.guest_os_features", + "boot_disk.0.auto_delete", + "boot_disk.0.device_name", + "boot_disk.0.disk_encryption_key_raw", + "boot_disk.0.kms_key_self_link", + "boot_disk.0.disk_encryption_key_rsa", + "boot_disk.0.disk_encryption_service_account", + "boot_disk.0.initialize_params", + "boot_disk.0.mode", + "boot_disk.0.source", + } + + initializeParamsKeys = []string{ + "boot_disk.0.initialize_params.0.size", + "boot_disk.0.initialize_params.0.type", + "boot_disk.0.initialize_params.0.image", + "boot_disk.0.initialize_params.0.labels", + "boot_disk.0.initialize_params.0.resource_manager_tags", + "boot_disk.0.initialize_params.0.provisioned_iops", + "boot_disk.0.initialize_params.0.provisioned_throughput", + "boot_disk.0.initialize_params.0.enable_confidential_compute", + "boot_disk.0.initialize_params.0.source_image_encryption_key", + "boot_disk.0.initialize_params.0.snapshot", + "boot_disk.0.initialize_params.0.source_snapshot_encryption_key", + "boot_disk.0.initialize_params.0.storage_pool", + "boot_disk.0.initialize_params.0.resource_policies", + "boot_disk.0.initialize_params.0.architecture", + } + + schedulingKeys = []string{ + "scheduling.0.on_host_maintenance", + "scheduling.0.automatic_restart", + "scheduling.0.preemptible", + "scheduling.0.node_affinities", + "scheduling.0.min_node_cpus", + "scheduling.0.provisioning_model", + "scheduling.0.instance_termination_action", + "scheduling.0.termination_time", + "scheduling.0.availability_domain", + "scheduling.0.max_run_duration", + "scheduling.0.on_instance_stop_action", + "scheduling.0.maintenance_interval", + "scheduling.0.host_error_timeout_seconds", + "scheduling.0.graceful_shutdown", + "scheduling.0.local_ssd_recovery_timeout", + } + + shieldedInstanceConfigKeys = []string{ + "shielded_instance_config.0.enable_secure_boot", + "shielded_instance_config.0.enable_vtpm", + "shielded_instance_config.0.enable_integrity_monitoring", + } +) + +func ResourceComputeInstance() *schema.Resource { + return &schema.Resource{ + // A compute instance is more or less a superset of a compute instance + // template. Please attempt to maintain consistency with the + // resource_compute_instance_template schema when updating this one. + Schema: map[string]*schema.Schema{ + "boot_disk": { + Type: schema.TypeList, + Required: true, + ForceNew: true, + MaxItems: 1, + Description: `The boot disk for the instance.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "auto_delete": { + Type: schema.TypeBool, + Optional: true, + AtLeastOneOf: bootDiskKeys, + Default: true, + Description: `Whether the disk will be auto-deleted when the instance is deleted.`, + }, + + "device_name": { + Type: schema.TypeString, + Optional: true, + AtLeastOneOf: bootDiskKeys, + Computed: true, + ForceNew: true, + Description: `Name with which attached disk will be accessible under /dev/disk/by-id/`, + }, + + "disk_encryption_key_raw": { + Type: schema.TypeString, + Optional: true, + AtLeastOneOf: bootDiskKeys, + ForceNew: true, + ConflictsWith: []string{"boot_disk.0.kms_key_self_link", "boot_disk.0.disk_encryption_key_rsa"}, + Sensitive: true, + Description: `A 256-bit customer-supplied encryption key, encoded in RFC 4648 base64 to encrypt this disk. Only one of kms_key_self_link, disk_encryption_key_raw and disk_encryption_key_rsa may be set.`, + }, + + "disk_encryption_key_rsa": { + Type: schema.TypeString, + Optional: true, + AtLeastOneOf: bootDiskKeys, + ForceNew: true, + ConflictsWith: []string{"boot_disk.0.kms_key_self_link", "boot_disk.0.disk_encryption_key_raw"}, + Sensitive: true, + Description: `Specifies an RFC 4648 base64 encoded, RSA-wrapped 2048-bit customer-supplied encryption key to either encrypt or decrypt this resource. Only one of kms_key_self_link, disk_encryption_key_raw and disk_encryption_key_rsa may be set.`, + }, + + "disk_encryption_key_sha256": { + Type: schema.TypeString, + Computed: true, + Description: `The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied encryption key that protects this resource.`, + }, + + "disk_encryption_service_account": { + Type: schema.TypeString, + Optional: true, + AtLeastOneOf: bootDiskKeys, + ForceNew: true, + Description: `The service account being used for the encryption request for the given KMS key. If absent, the Compute Engine default service account is used`, + }, + + "interface": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{"SCSI", "NVME"}, false), + Description: `The disk interface used for attaching this disk. One of SCSI or NVME. (This field is shared with attached_disk and only used for specific cases, please don't specify this field without advice from Google.)`, + }, + + "kms_key_self_link": { + Type: schema.TypeString, + Optional: true, + AtLeastOneOf: bootDiskKeys, + ForceNew: true, + ConflictsWith: []string{"boot_disk.0.disk_encryption_key_raw", "boot_disk.0.disk_encryption_key_rsa"}, + DiffSuppressFunc: tpgresource.CompareSelfLinkRelativePaths, + Computed: true, + Description: `The self_link of the encryption key that is stored in Google Cloud KMS to encrypt this disk. Only one of kms_key_self_link, disk_encryption_key_raw and disk_encryption_key_rsa may be set.`, + }, + + "guest_os_features": { + Type: schema.TypeList, + Optional: true, + AtLeastOneOf: bootDiskKeys, + ForceNew: true, + Computed: true, + Description: `A list of features to enable on the guest operating system. Applicable only for bootable images.`, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + + "initialize_params": { + Type: schema.TypeList, + Optional: true, + AtLeastOneOf: bootDiskKeys, + Computed: true, + ForceNew: true, + MaxItems: 1, + Description: `Parameters with which a disk was created alongside the instance.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "size": { + Type: schema.TypeInt, + Optional: true, + AtLeastOneOf: initializeParamsKeys, + Computed: true, + ForceNew: true, + ValidateFunc: validation.IntAtLeast(1), + Description: `The size of the image in gigabytes.`, + }, + + "type": { + Type: schema.TypeString, + Optional: true, + AtLeastOneOf: initializeParamsKeys, + Computed: true, + ForceNew: true, + Description: `The Google Compute Engine disk type. Such as pd-standard, pd-ssd or pd-balanced.`, + }, + + "image": { + Type: schema.TypeString, + Optional: true, + AtLeastOneOf: initializeParamsKeys, + Computed: true, + ForceNew: true, + Description: `The image from which this disk was initialised.`, + }, + + "source_image_encryption_key": { + Type: schema.TypeList, + Optional: true, + AtLeastOneOf: initializeParamsKeys, + MaxItems: 1, + Description: `The encryption key used to decrypt the source image.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "raw_key": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Sensitive: true, + Description: `Specifies a 256-bit customer-supplied encryption key, encoded in RFC 4648 base64 to either encrypt or decrypt this resource. Only one of kms_key_self_link, rsa_encrypted_key and raw_key may be set.`, + }, + + "rsa_encrypted_key": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Sensitive: true, + Description: `Specifies an RFC 4648 base64 encoded, RSA-wrapped 2048-bit customer-supplied encryption key to either encrypt or decrypt this resource. Only one of kms_key_self_link, rsa_encrypted_key and raw_key may be set.`, + }, + + "kms_key_self_link": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Computed: true, + DiffSuppressFunc: tpgresource.CompareCryptoKeyVersions, + Description: `The self link of the encryption key that is stored in Google Cloud KMS. Only one of kms_key_self_link, rsa_encrypted_key and raw_key may be set.`, + }, + + "kms_key_service_account": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: `The service account being used for the encryption request for the given KMS key. If absent, the Compute Engine default service account is used.`, + }, + + "sha256": { + Type: schema.TypeString, + Computed: true, + Description: `The SHA256 hash of the encryption key used to encrypt this disk.`, + }, + }, + }, + }, + + "snapshot": { + Type: schema.TypeString, + Optional: true, + AtLeastOneOf: initializeParamsKeys, + Computed: true, + ForceNew: true, + DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, + Description: `The snapshot from which this disk was initialised.`, + }, + + "source_snapshot_encryption_key": { + Type: schema.TypeList, + Optional: true, + AtLeastOneOf: initializeParamsKeys, + MaxItems: 1, + Description: `The encryption key used to decrypt the source snapshot.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "raw_key": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Sensitive: true, + Description: `Specifies a 256-bit customer-supplied encryption key, encoded in RFC 4648 base64 to either encrypt or decrypt this resource. Only one of kms_key_self_link, rsa_encrypted_key and raw_key may be set.`, + }, + + "rsa_encrypted_key": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Sensitive: true, + Description: `Specifies an RFC 4648 base64 encoded, RSA-wrapped 2048-bit customer-supplied encryption key to either encrypt or decrypt this resource. Only one of kms_key_self_link, rsa_encrypted_key and raw_key may be set.`, + }, + + "kms_key_self_link": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Computed: true, + DiffSuppressFunc: tpgresource.CompareCryptoKeyVersions, + Description: `The self link of the encryption key that is stored in Google Cloud KMS. Only one of kms_key_self_link, rsa_encrypted_key and raw_key may be set.`, + }, + + "kms_key_service_account": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: `The service account being used for the encryption request for the given KMS key. If absent, the Compute Engine default service account is used.`, + }, + + "sha256": { + Type: schema.TypeString, + Computed: true, + Description: `The SHA256 hash of the encryption key used to encrypt this disk.`, + }, + }, + }, + }, + + "labels": { + Type: schema.TypeMap, + Optional: true, + AtLeastOneOf: initializeParamsKeys, + Computed: true, + ForceNew: true, + Description: `A set of key/value label pairs assigned to the disk.`, + }, + + "resource_manager_tags": { + Type: schema.TypeMap, + Optional: true, + ForceNew: true, + AtLeastOneOf: initializeParamsKeys, + Description: `A map of resource manager tags. Resource manager tag keys and values have the same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/456. The field is ignored (both PUT & PATCH) when empty.`, + }, + + "resource_policies": { + Type: schema.TypeList, + Elem: &schema.Schema{Type: schema.TypeString}, + Optional: true, + ForceNew: true, + Computed: true, + AtLeastOneOf: initializeParamsKeys, + DiffSuppressFunc: tpgresource.CompareSelfLinkRelativePaths, + MaxItems: 1, + Description: `A list of self_links of resource policies to attach to the instance's boot disk. Modifying this list will cause the instance to recreate. Currently a max of 1 resource policy is supported.`, + }, + + "provisioned_iops": { + Type: schema.TypeInt, + Optional: true, + AtLeastOneOf: initializeParamsKeys, + Computed: true, + ForceNew: true, + Description: `Indicates how many IOPS to provision for the disk. This sets the number of I/O operations per second that the disk can handle.`, + }, + + "provisioned_throughput": { + Type: schema.TypeInt, + Optional: true, + AtLeastOneOf: initializeParamsKeys, + Computed: true, + ForceNew: true, + Description: `Indicates how much throughput to provision for the disk. This sets the number of throughput mb per second that the disk can handle.`, + }, + + "enable_confidential_compute": { + Type: schema.TypeBool, + Optional: true, + AtLeastOneOf: initializeParamsKeys, + ForceNew: true, + Description: `A flag to enable confidential compute mode on boot disk`, + }, + + "storage_pool": { + Type: schema.TypeString, + Optional: true, + AtLeastOneOf: initializeParamsKeys, + ForceNew: true, + DiffSuppressFunc: tpgresource.CompareResourceNames, + Description: `The URL of the storage pool in which the new disk is created`, + }, + + "architecture": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + AtLeastOneOf: initializeParamsKeys, + ValidateFunc: validation.StringInSlice([]string{"X86_64", "ARM64"}, false), + Description: `The architecture of the disk. One of "X86_64" or "ARM64".`, + }, + }, + }, + }, + + "mode": { + Type: schema.TypeString, + Optional: true, + AtLeastOneOf: bootDiskKeys, + ForceNew: true, + Default: "READ_WRITE", + ValidateFunc: validation.StringInSlice([]string{"READ_WRITE", "READ_ONLY"}, false), + Description: `Read/write mode for the disk. One of "READ_ONLY" or "READ_WRITE".`, + }, + + "source": { + Type: schema.TypeString, + Optional: true, + AtLeastOneOf: bootDiskKeys, + Computed: true, + ForceNew: true, + ConflictsWith: []string{"boot_disk.initialize_params"}, + DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, + Description: `The name or self_link of the disk attached to this instance.`, + }, + }, + }, + }, + + "machine_type": { + Type: schema.TypeString, + Required: true, + Description: `The machine type to create.`, + DiffSuppressFunc: tpgresource.CompareResourceNames, + }, + + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: verify.ValidateRFC1035Name(1, 63), + Description: `The name of the instance. One of name or self_link must be provided.`, + }, + + "network_interface": { + Type: schema.TypeList, + Required: true, + ForceNew: true, + Description: `The networks attached to the instance.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "network": { + Type: schema.TypeString, + Optional: true, + Computed: true, + DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, + Description: `The name or self_link of the network attached to this interface.`, + }, + + "subnetwork": { + Type: schema.TypeString, + Optional: true, + Computed: true, + DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, + Description: `The name or self_link of the subnetwork attached to this interface.`, + }, + + "network_attachment": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, + Description: `The URL of the network attachment that this interface should connect to in the following format: projects/{projectNumber}/regions/{region_name}/networkAttachments/{network_attachment_name}.`, + }, + + "subnetwork_project": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `The project in which the subnetwork belongs.`, + }, + + "network_ip": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `The private IP address assigned to the instance.`, + }, + + "name": { + Type: schema.TypeString, + Computed: true, + Description: `The name of the interface`, + }, + "nic_type": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validation.StringInSlice([]string{"GVNIC", "VIRTIO_NET", "IDPF", "MRDMA", "IRDMA"}, false), + Description: `The type of vNIC to be used on this interface. Possible values:GVNIC, VIRTIO_NET, IDPF, MRDMA, and IRDMA`, + }, + "access_config": { + Type: schema.TypeList, + Optional: true, + Description: `Access configurations, i.e. IPs via which this instance can be accessed via the Internet.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "nat_ip": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `The IP address that is be 1:1 mapped to the instance's network ip.`, + }, + + "network_tier": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `The networking tier used for configuring this instance. One of PREMIUM or STANDARD.`, + }, + + "public_ptr_domain_name": { + Type: schema.TypeString, + Optional: true, + Description: `The DNS domain name for the public PTR record.`, + }, + "security_policy": { + Type: schema.TypeString, + Computed: true, + Description: `A full or partial URL to a security policy to add to this instance. If this field is set to an empty string it will remove the associated security policy.`, + }, + }, + }, + }, + + "alias_ip_range": { + Type: schema.TypeList, + Optional: true, + Description: `An array of alias IP ranges for this network interface.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "ip_cidr_range": { + Type: schema.TypeString, + Required: true, + Description: `The IP CIDR range represented by this alias IP range.`, + }, + "subnetwork_range_name": { + Type: schema.TypeString, + Optional: true, + Description: `The subnetwork secondary range name specifying the secondary range from which to allocate the IP CIDR range for this alias IP range.`, + }, + }, + }, + }, + + "stack_type": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ValidateFunc: validation.StringInSlice([]string{"IPV4_ONLY", "IPV4_IPV6", "IPV6_ONLY", ""}, false), + Description: `The stack type for this network interface to identify whether the IPv6 feature is enabled or not. If not specified, IPV4_ONLY will be used.`, + }, + + "ipv6_access_type": { + Type: schema.TypeString, + Computed: true, + Description: `One of EXTERNAL, INTERNAL to indicate whether the IP can be accessed from the Internet. This field is always inherited from its subnetwork.`, + }, + + "ipv6_access_config": { + Type: schema.TypeList, + Optional: true, + Description: `An array of IPv6 access configurations for this interface. Currently, only one IPv6 access config, DIRECT_IPV6, is supported. If there is no ipv6AccessConfig specified, then this instance will have no external IPv6 Internet access.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "network_tier": { + Type: schema.TypeString, + Required: true, + Description: `The service-level to be provided for IPv6 traffic when the subnet has an external subnet. Only PREMIUM tier is valid for IPv6`, + }, + "public_ptr_domain_name": { + Type: schema.TypeString, + Optional: true, + Description: `The domain name to be used when creating DNSv6 records for the external IPv6 ranges.`, + }, + "external_ipv6": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + Description: `The first IPv6 address of the external IPv6 range associated with this instance, prefix length is stored in externalIpv6PrefixLength in ipv6AccessConfig. To use a static external IP address, it must be unused and in the same region as the instance's zone. If not specified, Google Cloud will automatically assign an external IPv6 address from the instance's subnetwork.`, + }, + "external_ipv6_prefix_length": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + Description: `The prefix length of the external IPv6 range.`, + }, + "name": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + Description: `The name of this access configuration. In ipv6AccessConfigs, the recommended name is External IPv6.`, + }, + "security_policy": { + Type: schema.TypeString, + Computed: true, + Description: `A full or partial URL to a security policy to add to this instance. If this field is set to an empty string it will remove the associated security policy.`, + }, + }, + }, + }, + + "internal_ipv6_prefix_length": { + Type: schema.TypeInt, + Optional: true, + Computed: true, + Description: `The prefix length of the primary internal IPv6 range.`, + }, + + "ipv6_address": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `An IPv6 internal network address for this network interface. If not specified, Google Cloud will automatically assign an internal IPv6 address from the instance's subnetwork.`, + }, + + "queue_count": { + Type: schema.TypeInt, + Optional: true, + ForceNew: true, + Description: `The networking queue count that's specified by users for the network interface. Both Rx and Tx queues will be set to this number. It will be empty if not specified.`, + }, + + "security_policy": { + Type: schema.TypeString, + Optional: true, + Description: `A full or partial URL to a security policy to add to this instance. If this field is set to an empty string it will remove the associated security policy.`, + }, + }, + }, + }, + "network_performance_config": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + ForceNew: true, + Description: `Configures network performance settings for the instance. If not specified, the instance will be created with its default network performance configuration.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "total_egress_bandwidth_tier": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringInSlice([]string{"TIER_1", "DEFAULT"}, false), + Description: `The egress bandwidth tier to enable. Possible values:TIER_1, DEFAULT`, + }, + }, + }, + }, + "allow_stopping_for_update": { + Type: schema.TypeBool, + Optional: true, + Description: `If true, allows Terraform to stop the instance to update its properties. If you try to update a property that requires stopping the instance without setting this field, the update will fail.`, + }, + + "attached_disk": { + Type: schema.TypeList, + Optional: true, + Description: `List of disks attached to the instance`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "source": { + Type: schema.TypeString, + Required: true, + DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, + Description: `The name or self_link of the disk attached to this instance.`, + }, + + "device_name": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `Name with which the attached disk is accessible under /dev/disk/by-id/`, + }, + + "mode": { + Type: schema.TypeString, + Optional: true, + Default: "READ_WRITE", + ValidateFunc: validation.StringInSlice([]string{"READ_WRITE", "READ_ONLY"}, false), + Description: `Read/write mode for the disk. One of "READ_ONLY" or "READ_WRITE".`, + }, + + "disk_encryption_key_raw": { + Type: schema.TypeString, + Optional: true, + Sensitive: true, + Description: `A 256-bit customer-supplied encryption key, encoded in RFC 4648 base64 to encrypt this disk. Only one of kms_key_self_link, disk_encryption_key_rsa and disk_encryption_key_raw may be set.`, + }, + + "disk_encryption_key_rsa": { + Type: schema.TypeString, + Optional: true, + Sensitive: true, + Description: `Specifies an RFC 4648 base64 encoded, RSA-wrapped 2048-bit customer-supplied encryption key to either encrypt or decrypt this resource. Only one of kms_key_self_link, disk_encryption_key_rsa and disk_encryption_key_raw may be set.`, + }, + + "kms_key_self_link": { + Type: schema.TypeString, + Optional: true, + DiffSuppressFunc: tpgresource.CompareSelfLinkRelativePaths, + Computed: true, + Description: `The self_link of the encryption key that is stored in Google Cloud KMS to encrypt this disk. Only one of kms_key_self_link, disk_encryption_key_rsa and disk_encryption_key_raw may be set.`, + }, + + "disk_encryption_service_account": { + Type: schema.TypeString, + Optional: true, + Description: `The service account being used for the encryption request for the given KMS key. If absent, the Compute Engine default service account is used`, + }, + + "disk_encryption_key_sha256": { + Type: schema.TypeString, + Computed: true, + Description: `The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied encryption key that protects this resource.`, + }, + }, + }, + }, + + "can_ip_forward": { + Type: schema.TypeBool, + Optional: true, + Default: false, + Description: `Whether sending and receiving of packets with non-matching source or destination IPs is allowed.`, + }, + + "description": { + Type: schema.TypeString, + Optional: true, + Description: `A brief description of the resource.`, + }, + + "deletion_protection": { + Type: schema.TypeBool, + Optional: true, + Default: false, + Description: `Whether deletion protection is enabled on this instance.`, + }, + + "enable_display": { + Type: schema.TypeBool, + Optional: true, + Description: `Whether the instance has virtual displays enabled.`, + }, + + "guest_accelerator": { + Type: schema.TypeList, + Optional: true, + Computed: true, + ForceNew: true, + Description: `List of the type and count of accelerator cards attached to the instance.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "count": { + Type: schema.TypeInt, + Required: true, + ForceNew: true, + Description: `The number of the guest accelerator cards exposed to this instance.`, + }, + "type": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, + Description: `The accelerator type resource exposed to this instance. E.g. nvidia-tesla-k80.`, + }, + }, + }, + }, + + "params": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + ForceNew: true, + Description: `Stores additional params passed with the request, but not persisted as part of resource payload.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "resource_manager_tags": { + Type: schema.TypeMap, + Optional: true, + Description: `A map of resource manager tags. Resource manager tag keys and values have the same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/456. The field is ignored (both PUT & PATCH) when empty.`, + }, + }, + }, + }, + + "labels": { + Type: schema.TypeMap, + Optional: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: `A set of key/value label pairs assigned to the instance. + + **Note**: This field is non-authoritative, and will only manage the labels present in your configuration. + Please refer to the field 'effective_labels' for all of the labels present on the resource.`, + }, + + "terraform_labels": { + Type: schema.TypeMap, + Computed: true, + Description: `The combination of labels configured directly on the resource and default labels configured on the provider.`, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + + "effective_labels": { + Type: schema.TypeMap, + Computed: true, + Description: `All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Terraform, other clients and services.`, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + + "metadata": { + Type: schema.TypeMap, + Optional: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: `Metadata key/value pairs made available within the instance.`, + }, + + "partner_metadata": { + Type: schema.TypeMap, + Optional: true, + DiffSuppressFunc: ComparePartnerMetadataDiff, + DiffSuppressOnRefresh: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: `Partner Metadata Map made available within the instance.`, + }, + + "metadata_startup_script": { + Type: schema.TypeString, + Optional: true, + Description: `Metadata startup scripts made available within the instance.`, + }, + + "min_cpu_platform": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `The minimum CPU platform specified for the VM instance.`, + }, + + "project": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + Description: `The ID of the project in which the resource belongs. If self_link is provided, this value is ignored. If neither self_link nor project are provided, the provider project is used.`, + }, + + "scheduling": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Computed: true, + Description: `The scheduling strategy being used by the instance.`, + Elem: &schema.Resource{ + // !!! IMPORTANT !!! + // We have a custom diff function for the scheduling block due to issues with Terraform's + // diff on schema.Set. If changes are made to this block, they must be reflected in that + // method. See schedulingHasChangeWithoutReboot in compute_instance_helpers.go + Schema: map[string]*schema.Schema{ + "on_host_maintenance": { + Type: schema.TypeString, + Optional: true, + Computed: true, + AtLeastOneOf: schedulingKeys, + Description: `Describes maintenance behavior for the instance. One of MIGRATE or TERMINATE,`, + }, + + "automatic_restart": { + Type: schema.TypeBool, + Optional: true, + AtLeastOneOf: schedulingKeys, + Default: true, + Description: `Specifies if the instance should be restarted if it was terminated by Compute Engine (not a user).`, + }, + + "preemptible": { + Type: schema.TypeBool, + Optional: true, + Default: false, + AtLeastOneOf: schedulingKeys, + ForceNew: true, + Description: `Whether the instance is preemptible.`, + }, + + "node_affinities": { + Type: schema.TypeSet, + Optional: true, + AtLeastOneOf: schedulingKeys, + Elem: instanceSchedulingNodeAffinitiesElemSchema(), + DiffSuppressFunc: tpgresource.EmptyOrDefaultStringSuppress(""), + Description: `Specifies node affinities or anti-affinities to determine which sole-tenant nodes your instances and managed instance groups will use as host systems.`, + }, + + "min_node_cpus": { + Type: schema.TypeInt, + Optional: true, + AtLeastOneOf: schedulingKeys, + }, + + "provisioning_model": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + AtLeastOneOf: schedulingKeys, + Description: `Whether the instance is spot. If this is set as SPOT.`, + }, + + "instance_termination_action": { + Type: schema.TypeString, + Optional: true, + AtLeastOneOf: schedulingKeys, + Description: `Specifies the action GCE should take when SPOT VM is preempted.`, + }, + "termination_time": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + AtLeastOneOf: schedulingKeys, + Description: `Specifies the timestamp, when the instance will be terminated, +in RFC3339 text format. If specified, the instance termination action +will be performed at the termination time.`, + }, + "availability_domain": { + Type: schema.TypeInt, + Optional: true, + AtLeastOneOf: schedulingKeys, + Description: `Specifies the availability domain, which this instance should be scheduled on.`, + }, + "max_run_duration": { + Type: schema.TypeList, + Optional: true, + Description: `The timeout for new network connections to hosts.`, + MaxItems: 1, + ForceNew: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "seconds": { + Type: schema.TypeInt, + Required: true, + ForceNew: true, + Description: `Span of time at a resolution of a second. +Must be from 0 to 315,576,000,000 inclusive.`, + }, + "nanos": { + Type: schema.TypeInt, + Optional: true, + ForceNew: true, + Description: `Span of time that's a fraction of a second at nanosecond +resolution. Durations less than one second are represented +with a 0 seconds field and a positive nanos field. Must +be from 0 to 999,999,999 inclusive.`, + }, + }, + }, + }, + "on_instance_stop_action": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + ForceNew: true, + Description: `Defines the behaviour for instances with the instance_termination_action.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "discard_local_ssd": { + Type: schema.TypeBool, + Optional: true, + Description: `If true, the contents of any attached Local SSD disks will be discarded.`, + Default: false, + ForceNew: true, + }, + }, + }, + }, + "host_error_timeout_seconds": { + Type: schema.TypeInt, + Optional: true, + Description: `Specify the time in seconds for host error detection, the value must be within the range of [90, 330] with the increment of 30, if unset, the default behavior of host error recovery will be used.`, + }, + + "maintenance_interval": { + Type: schema.TypeString, + Optional: true, + AtLeastOneOf: schedulingKeys, + Description: `Specifies the frequency of planned maintenance events. The accepted values are: PERIODIC`, + }, + "local_ssd_recovery_timeout": { + Type: schema.TypeList, + Optional: true, + Description: `Specifies the maximum amount of time a Local Ssd Vm should wait while + recovery of the Local Ssd state is attempted. Its value should be in + between 0 and 168 hours with hour granularity and the default value being 1 + hour.`, + MaxItems: 1, + ForceNew: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "seconds": { + Type: schema.TypeInt, + Required: true, + ForceNew: true, + Description: `Span of time at a resolution of a second. +Must be from 0 to 315,576,000,000 inclusive.`, + }, + "nanos": { + Type: schema.TypeInt, + Optional: true, + ForceNew: true, + Description: `Span of time that's a fraction of a second at nanosecond +resolution. Durations less than one second are represented +with a 0 seconds field and a positive nanos field. Must +be from 0 to 999,999,999 inclusive.`, + }, + }, + }, + }, + "graceful_shutdown": { + Type: schema.TypeList, + Optional: true, + Description: `Settings for the instance to perform a graceful shutdown.`, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "enabled": { + Type: schema.TypeBool, + Required: true, + Description: `Opts-in for graceful shutdown.`, + }, + "max_duration": { + Type: schema.TypeList, + Optional: true, + Description: `The time allotted for the instance to gracefully shut down. + If the graceful shutdown isn't complete after this time, then the instance + transitions to the STOPPING state.`, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "seconds": { + Type: schema.TypeInt, + Required: true, + Description: `Span of time at a resolution of a second. + The value must be between 1 and 3600, which is 3,600 seconds (one hour).`, + }, + "nanos": { + Type: schema.TypeInt, + Optional: true, + Description: `Span of time that's a fraction of a second at nanosecond + resolution. Durations less than one second are represented + with a 0 seconds field and a positive nanos field. Must + be from 0 to 999,999,999 inclusive.`, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + + "scratch_disk": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + Description: `The scratch disks attached to the instance.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "device_name": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `Name with which the attached disk is accessible under /dev/disk/by-id/`, + }, + "interface": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{"SCSI", "NVME"}, false), + Description: `The disk interface used for attaching this disk. One of SCSI or NVME.`, + }, + "size": { + Type: schema.TypeInt, + Optional: true, + ForceNew: true, + ValidateFunc: validation.IntAtLeast(375), + Default: 375, + Description: `The size of the disk in gigabytes. One of 375 or 3000.`, + }, + }, + }, + }, + + "service_account": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Description: `The service account to attach to the instance.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "email": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `The service account e-mail address.`, + }, + + "scopes": { + Type: schema.TypeSet, + Required: true, + Description: `A list of service scopes.`, + Elem: &schema.Schema{ + Type: schema.TypeString, + StateFunc: func(v interface{}) string { + return tpgresource.CanonicalizeServiceScope(v.(string)) + }, + }, + Set: tpgresource.StringScopeHashcode, + }, + }, + }, + }, + + "shielded_instance_config": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + // Since this block is used by the API based on which + // image being used, the field needs to be marked as Computed. + Computed: true, + DiffSuppressFunc: tpgresource.EmptyOrDefaultStringSuppress(""), + Description: `The shielded vm config being used by the instance.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "enable_secure_boot": { + Type: schema.TypeBool, + Optional: true, + AtLeastOneOf: shieldedInstanceConfigKeys, + Default: false, + Description: `Whether secure boot is enabled for the instance.`, + }, + + "enable_vtpm": { + Type: schema.TypeBool, + Optional: true, + AtLeastOneOf: shieldedInstanceConfigKeys, + Default: true, + Description: `Whether the instance uses vTPM.`, + }, + + "enable_integrity_monitoring": { + Type: schema.TypeBool, + Optional: true, + AtLeastOneOf: shieldedInstanceConfigKeys, + Default: true, + Description: `Whether integrity monitoring is enabled for the instance.`, + }, + }, + }, + }, + "advanced_machine_features": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Description: `Controls for advanced machine-related behavior features.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "enable_nested_virtualization": { + Type: schema.TypeBool, + Optional: true, + AtLeastOneOf: advancedMachineFeaturesKeys, + Description: `Whether to enable nested virtualization or not.`, + }, + "threads_per_core": { + Type: schema.TypeInt, + Optional: true, + AtLeastOneOf: advancedMachineFeaturesKeys, + Description: `The number of threads per physical core. To disable simultaneous multithreading (SMT) set this to 1. If unset, the maximum number of threads supported per core by the underlying processor is assumed.`, + }, + "turbo_mode": { + Type: schema.TypeString, + Optional: true, + AtLeastOneOf: advancedMachineFeaturesKeys, + Description: `Turbo frequency mode to use for the instance. Currently supported modes is "ALL_CORE_MAX".`, + ValidateFunc: validation.StringInSlice([]string{"ALL_CORE_MAX"}, false), + }, + "visible_core_count": { + Type: schema.TypeInt, + Optional: true, + AtLeastOneOf: advancedMachineFeaturesKeys, + Description: `The number of physical cores to expose to an instance. Multiply by the number of threads per core to compute the total number of virtual CPUs to expose to the instance. If unset, the number of cores is inferred from the instance\'s nominal CPU count and the underlying platform\'s SMT width.`, + }, + "performance_monitoring_unit": { + Type: schema.TypeString, + Optional: true, + AtLeastOneOf: advancedMachineFeaturesKeys, + ValidateFunc: validation.StringInSlice([]string{"STANDARD", "ENHANCED", "ARCHITECTURAL"}, false), + Description: `The PMU is a hardware component within the CPU core that monitors how the processor runs code. Valid values for the level of PMU are "STANDARD", "ENHANCED", and "ARCHITECTURAL".`, + }, + "enable_uefi_networking": { + Type: schema.TypeBool, + Optional: true, + ForceNew: true, + AtLeastOneOf: advancedMachineFeaturesKeys, + Description: `Whether to enable UEFI networking for the instance.`, + }, + }, + }, + }, + "confidential_instance_config": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + ForceNew: true, + Computed: true, + Description: `The Confidential VM config being used by the instance. on_host_maintenance has to be set to TERMINATE or this will fail to create.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "enable_confidential_compute": { + Type: schema.TypeBool, + Optional: true, + Description: `Defines whether the instance should have confidential compute enabled. Field will be deprecated in a future release`, + AtLeastOneOf: []string{"confidential_instance_config.0.enable_confidential_compute", "confidential_instance_config.0.confidential_instance_type"}, + }, + "confidential_instance_type": { + Type: schema.TypeString, + Optional: true, + Description: ` + The confidential computing technology the instance uses. + SEV is an AMD feature. TDX is an Intel feature. One of the following + values is required: SEV, SEV_SNP, TDX. If SEV_SNP, min_cpu_platform = + "AMD Milan" is currently required.`, + AtLeastOneOf: []string{"confidential_instance_config.0.enable_confidential_compute", "confidential_instance_config.0.confidential_instance_type"}, + }, + }, + }, + }, + "desired_status": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{"RUNNING", "TERMINATED", "SUSPENDED"}, false), + Description: `Desired status of the instance. Either "RUNNING", "SUSPENDED" or "TERMINATED".`, + }, + "current_status": { + Type: schema.TypeString, + Computed: true, + Description: ` + Current status of the instance. + This could be one of the following values: PROVISIONING, STAGING, RUNNING, STOPPING, SUSPENDING, SUSPENDED, REPAIRING, and TERMINATED. + For more information about the status of the instance, see [Instance life cycle](https://cloud.google.com/compute/docs/instances/instance-life-cycle).`, + }, + "tags": { + Type: schema.TypeSet, + Optional: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Set: schema.HashString, + Description: `The list of tags attached to the instance.`, + }, + + "zone": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + Description: `The zone of the instance. If self_link is provided, this value is ignored. If neither self_link nor zone are provided, the provider zone is used.`, + }, + + "cpu_platform": { + Type: schema.TypeString, + Computed: true, + Description: `The CPU platform used by this instance.`, + }, + + "instance_id": { + Type: schema.TypeString, + Computed: true, + Description: `The server-assigned unique identifier of this instance.`, + }, + + "creation_timestamp": { + Type: schema.TypeString, + Computed: true, + Description: `Creation timestamp in RFC3339 text format.`, + }, + + "label_fingerprint": { + Type: schema.TypeString, + Computed: true, + Description: `The unique fingerprint of the labels.`, + }, + + "metadata_fingerprint": { + Type: schema.TypeString, + Computed: true, + Description: `The unique fingerprint of the metadata.`, + }, + + "self_link": { + Type: schema.TypeString, + Computed: true, + Description: `The URI of the created resource.`, + }, + + "tags_fingerprint": { + Type: schema.TypeString, + Computed: true, + Description: `The unique fingerprint of the tags.`, + }, + + "hostname": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: `A custom hostname for the instance. Must be a fully qualified DNS name and RFC-1035-valid. Valid format is a series of labels 1-63 characters long matching the regular expression [a-z]([-a-z0-9]*[a-z0-9]), concatenated with periods. The entire hostname must not exceed 253 characters. Changing this forces a new resource to be created.`, + }, + + "resource_policies": { + Type: schema.TypeList, + Elem: &schema.Schema{Type: schema.TypeString}, + DiffSuppressFunc: tpgresource.CompareSelfLinkRelativePaths, + Optional: true, + MaxItems: 1, + Description: `A list of self_links of resource policies to attach to the instance. Currently a max of 1 resource policy is supported.`, + }, + + "reservation_affinity": { + Type: schema.TypeList, + MaxItems: 1, + Computed: true, + Optional: true, + ForceNew: true, + Description: `Specifies the reservations that this instance can consume from.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "type": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringInSlice([]string{"ANY_RESERVATION", "SPECIFIC_RESERVATION", "NO_RESERVATION"}, false), + Description: `The type of reservation from which this instance can consume resources.`, + }, + + "specific_reservation": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + ForceNew: true, + Description: `Specifies the label selector for the reservation to use.`, + + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "key": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `Corresponds to the label key of a reservation resource. To target a SPECIFIC_RESERVATION by name, specify compute.googleapis.com/reservation-name as the key and specify the name of your reservation as the only value.`, + }, + "values": { + Type: schema.TypeList, + Elem: &schema.Schema{Type: schema.TypeString}, + Required: true, + ForceNew: true, + Description: `Corresponds to the label values of a reservation resource.`, + }, + }, + }, + }, + }, + }, + }, + + "key_revocation_action_type": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validation.StringInSlice([]string{"STOP", "NONE", ""}, false), + Description: `Action to be taken when a customer's encryption key is revoked. Supports "STOP" and "NONE", with "NONE" being the default.`, + }, + + "instance_encryption_key": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + ForceNew: true, + Description: `Encryption key used to provide data encryption on the given instance.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "kms_key_self_link": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + DiffSuppressFunc: tpgresource.CompareCryptoKeyVersions, + Computed: true, + Description: `The self link of the encryption key that is stored in Google Cloud KMS.`, + }, + + "kms_key_service_account": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: `The service account being used for the encryption request for the given KMS key. If absent, the Compute Engine default service account is used.`, + }, + + "sha256": { + Type: schema.TypeString, + Computed: true, + Description: `The SHA256 hash of the customer's encryption key.`, + }, + }, + }, + }, + }, + UseJSONNumber: true, + } +} + func ResourceConverterComputeInstance() cai.ResourceConverter { return cai.ResourceConverter{ Convert: GetComputeInstanceAndDisksCaiObjects, diff --git a/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/resourcemanager/project.go b/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/resourcemanager/project.go index a5eb07c5047a..32302a04a068 100644 --- a/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/resourcemanager/project.go +++ b/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/resourcemanager/project.go @@ -5,16 +5,121 @@ import ( "strconv" "strings" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/converters/cai" "github.com/hashicorp/terraform-provider-google-beta/google-beta/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google-beta/google-beta/transport" + "github.com/hashicorp/terraform-provider-google-beta/google-beta/verify" "google.golang.org/api/cloudbilling/v1" "google.golang.org/api/cloudresourcemanager/v1" ) +func ParseFolderId(v interface{}) string { + folderId := v.(string) + if strings.HasPrefix(folderId, "folders/") { + return folderId[8:] + } + return folderId +} + +// ResourceGoogleProject returns a *schema.Resource that allows a customer +// to declare a Google Cloud Project resource. +func ResourceGoogleProject() *schema.Resource { + return &schema.Resource{ + SchemaVersion: 1, + + Schema: map[string]*schema.Schema{ + "project_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: verify.ValidateProjectID(), + Description: `The project ID. Changing this forces a new project to be created.`, + }, + "deletion_policy": { + Type: schema.TypeString, + Optional: true, + Default: "PREVENT", + Description: `The deletion policy for the Project. Setting PREVENT will protect the project against any destroy actions caused by a terraform apply or terraform destroy. Setting ABANDON allows the resource + to be abandoned rather than deleted. Possible values are: "PREVENT", "ABANDON", "DELETE"`, + ValidateFunc: validation.StringInSlice([]string{"PREVENT", "ABANDON", "DELETE"}, false), + }, + "auto_create_network": { + Type: schema.TypeBool, + Optional: true, + Default: true, + Description: `Create the 'default' network automatically. Default true. If set to false, the default network will be deleted. Note that, for quota purposes, you will still need to have 1 network slot available to create the project successfully, even if you set auto_create_network to false, since the network will exist momentarily.`, + }, + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: verify.ValidateProjectName(), + Description: `The display name of the project.`, + }, + "org_id": { + Type: schema.TypeString, + Optional: true, + ConflictsWith: []string{"folder_id"}, + Description: `The numeric ID of the organization this project belongs to. Changing this forces a new project to be created. Only one of org_id or folder_id may be specified. If the org_id is specified then the project is created at the top level. Changing this forces the project to be migrated to the newly specified organization.`, + }, + "folder_id": { + Type: schema.TypeString, + Optional: true, + StateFunc: ParseFolderId, + ConflictsWith: []string{"org_id"}, + Description: `The numeric ID of the folder this project should be created under. Only one of org_id or folder_id may be specified. If the folder_id is specified, then the project is created under the specified folder. Changing this forces the project to be migrated to the newly specified folder.`, + }, + "number": { + Type: schema.TypeString, + Computed: true, + Description: `The numeric identifier of the project.`, + }, + "billing_account": { + Type: schema.TypeString, + Optional: true, + Description: `The alphanumeric ID of the billing account this project belongs to. The user or service account performing this operation with Terraform must have Billing Account Administrator privileges (roles/billing.admin) in the organization. See Google Cloud Billing API Access Control for more details.`, + }, + "labels": { + Type: schema.TypeMap, + Optional: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: `A set of key/value label pairs to assign to the project. + + **Note**: This field is non-authoritative, and will only manage the labels present in your configuration. + Please refer to the field 'effective_labels' for all of the labels present on the resource.`, + }, + + "terraform_labels": { + Type: schema.TypeMap, + Computed: true, + Description: `(ReadOnly) The combination of labels configured directly on the resource and default labels configured on the provider.`, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + + "effective_labels": { + Type: schema.TypeMap, + Computed: true, + Description: `All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Terraform, other clients and services.`, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + + "tags": { + Type: schema.TypeMap, + Optional: true, + ForceNew: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: `A map of resource manager tags. Resource manager tag keys and values have the same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/456. The field is ignored when empty. This field is only set at create time and modifying this field after creation will trigger recreation. To apply tags to an existing resource, see the google_tags_tag_value resource.`, + }, + }, + UseJSONNumber: true, + } +} + func ResourceConverterProject() cai.ResourceConverter { return cai.ResourceConverter{ Convert: GetProjectAndBillingInfoCaiObjects, From 2a3bfdcd13ce01b23e4de68ef75892d30a23e792 Mon Sep 17 00:00:00 2001 From: James Cherry Date: Wed, 7 May 2025 15:28:41 -0400 Subject: [PATCH 064/884] Remove hard-coded GKE version for flexStart test (#13843) --- .../resource_container_node_pool_test.go.tmpl | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl index bd76636bf5d3..2b81ea252b5f 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl @@ -4580,13 +4580,20 @@ func TestAccContainerNodePool_withFlexStart(t *testing.T) { func testAccContainerNodePool_withFlexStart(clusterName, np, networkName, subnetworkName string) string { return fmt.Sprintf(` +data "google_container_engine_versions" "central1a" { + location = "us-central1-a" +} + resource "google_container_cluster" "cluster" { - min_master_version = "1.32.3-gke.1717000" - name = "%s" location = "us-central1-a" initial_node_count = 1 deletion_protection = false + + min_master_version = data.google_container_engine_versions.central1a.release_channel_latest_version["RAPID"] + release_channel { + channel = "RAPID" + } network = "%s" subnetwork = "%s" } From cf2f2a8d44036f54e43477d8ea520777a7a6141e Mon Sep 17 00:00:00 2001 From: Hengfeng Li Date: Thu, 8 May 2025 05:32:10 +1000 Subject: [PATCH 065/884] Add default_time_zone to Spanner database resource (#13762) --- mmv1/products/spanner/Database.yaml | 5 + .../examples/spanner_database_basic.tf.tmpl | 1 + .../post_create/spanner_database.go.tmpl | 15 ++- .../resource_spanner_database_test.go.tmpl | 102 ++++++++++++++++++ 4 files changed, 122 insertions(+), 1 deletion(-) diff --git a/mmv1/products/spanner/Database.yaml b/mmv1/products/spanner/Database.yaml index 6d4a7edf357e..dfa838d702eb 100644 --- a/mmv1/products/spanner/Database.yaml +++ b/mmv1/products/spanner/Database.yaml @@ -81,6 +81,11 @@ virtual_fields: When the field is set to false, deleting the database is allowed. type: Boolean default_value: true + - name: 'default_time_zone' + description: | + The default time zone for the database. The default time zone must be a valid name + from the tz database. Default value is "America/Los_angeles". + type: String parameters: - name: 'instance' type: ResourceRef diff --git a/mmv1/templates/terraform/examples/spanner_database_basic.tf.tmpl b/mmv1/templates/terraform/examples/spanner_database_basic.tf.tmpl index 069aae72dcdc..f4134464c82a 100644 --- a/mmv1/templates/terraform/examples/spanner_database_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/spanner_database_basic.tf.tmpl @@ -8,6 +8,7 @@ resource "google_spanner_database" "database" { instance = google_spanner_instance.main.name name = "{{index $.Vars "database_name"}}" version_retention_period = "3d" + default_time_zone = "UTC" ddl = [ "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", "CREATE TABLE t2 (t2 INT64 NOT NULL,) PRIMARY KEY(t2)", diff --git a/mmv1/templates/terraform/post_create/spanner_database.go.tmpl b/mmv1/templates/terraform/post_create/spanner_database.go.tmpl index 7bb30b6427c5..160e53566c3f 100644 --- a/mmv1/templates/terraform/post_create/spanner_database.go.tmpl +++ b/mmv1/templates/terraform/post_create/spanner_database.go.tmpl @@ -3,16 +3,29 @@ // `terraform apply` twice to get their desired outcome, the provider does not set // `extraStatements` in the call to the `create` endpoint and all DDL (other than // ) is run post-create, by calling the `updateDdl` endpoint +defaultTimeZoneObj, defaultTimeZoneOk := d.GetOk("default_time_zone") +defaultTimeZone := defaultTimeZoneObj.(string) retention, retentionPeriodOk := d.GetOk("version_retention_period") retentionPeriod := retention.(string) ddl, ddlOk := d.GetOk("ddl") ddlStatements := ddl.([]interface{}) -if retentionPeriodOk || ddlOk { +if defaultTimeZoneOk || retentionPeriodOk || ddlOk { obj := make(map[string]interface{}) updateDdls := []string{} + // We need to put setting default time zone as first because it requires an empty + // database where tables do not exist. + if defaultTimeZoneOk { + dbName := d.Get("name") + timeZoneDdl := fmt.Sprintf("ALTER DATABASE `%s` SET OPTIONS (default_time_zone=\"%s\")", dbName, defaultTimeZone) + if dialect, ok := d.GetOk("database_dialect"); ok && dialect == "POSTGRESQL" { + timeZoneDdl = fmt.Sprintf("ALTER DATABASE \"%s\" SET spanner.default_time_zone TO \"%s\"", dbName, defaultTimeZone) + } + updateDdls = append(updateDdls, timeZoneDdl) + } + if ddlOk { for i := 0; i < len(ddlStatements); i++ { if ddlStatements[i] != nil { diff --git a/mmv1/third_party/terraform/services/spanner/resource_spanner_database_test.go.tmpl b/mmv1/third_party/terraform/services/spanner/resource_spanner_database_test.go.tmpl index 5d438850a80a..ffd8cb5bed98 100644 --- a/mmv1/third_party/terraform/services/spanner/resource_spanner_database_test.go.tmpl +++ b/mmv1/third_party/terraform/services/spanner/resource_spanner_database_test.go.tmpl @@ -388,6 +388,108 @@ resource "google_spanner_database" "basic" { `, instanceName, instanceName, databaseName, databaseName, databaseName) } +func TestAccSpannerDatabase_defaultTimeZone(t *testing.T) { + t.Parallel() + + rnd := acctest.RandString(t, 10) + instanceName := fmt.Sprintf("tf-test-%s", rnd) + databaseName := fmt.Sprintf("tfgen_%s", rnd) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckSpannerDatabaseDestroyProducer(t), + Steps: []resource.TestStep{ + { + // Test creating a database with `default_time_zone` set + Config: testAccSpannerDatabase_defaultTimeZone(instanceName, databaseName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("google_spanner_database.basic", "state"), + resource.TestCheckResourceAttr("google_spanner_database.basic", "default_time_zone", "UTC"), + ), + }, + { + // Test removing `default_time_zone` and setting default time zone to a new value with a DDL statement in `ddl` + Config: testAccSpannerDatabase_defaultTimeZoneUpdate1(instanceName, databaseName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("google_spanner_database.basic", "state"), + resource.TestCheckResourceAttr("google_spanner_database.basic", "default_time_zone", "UTC"), + ), + }, + { + // Test that adding `default_time_zone`, regardless of any previous statements in `ddl` + Config: testAccSpannerDatabase_defaultTimeZoneUpdate2(instanceName, databaseName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("google_spanner_database.basic", "state"), + resource.TestCheckResourceAttr("google_spanner_database.basic", "default_time_zone", "Australia/Sydney"), + ), + }, + }, + }) +} + +func testAccSpannerDatabase_defaultTimeZone(instanceName, databaseName string) string { + return fmt.Sprintf(` +resource "google_spanner_instance" "basic" { + name = "%s" + config = "regional-us-central1" + display_name = "%s-display" + num_nodes = 1 +} + +resource "google_spanner_database" "basic" { + instance = google_spanner_instance.basic.name + name = "%s" + default_time_zone = "UTC" + ddl = [ + "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", + ] + deletion_protection = false +} +`, instanceName, instanceName, databaseName) +} + +func testAccSpannerDatabase_defaultTimeZoneUpdate1(instanceName, databaseName string) string { + return fmt.Sprintf(` +resource "google_spanner_instance" "basic" { + name = "%s" + config = "regional-us-central1" + display_name = "%s-display" + num_nodes = 1 +} + +resource "google_spanner_database" "basic" { + instance = google_spanner_instance.basic.name + name = "%s" + default_time_zone = "UTC" + // Change : remove the table. + ddl = [] + deletion_protection = false +} +`, instanceName, instanceName, databaseName) +} + +func testAccSpannerDatabase_defaultTimeZoneUpdate2(instanceName, databaseName string) string { + return fmt.Sprintf(` +resource "google_spanner_instance" "basic" { + name = "%s" + config = "regional-us-central1" + display_name = "%s-display" + num_nodes = 1 +} + +resource "google_spanner_database" "basic" { + instance = google_spanner_instance.basic.name + name = "%s" + default_time_zone = "Australia/Sydney" // Change : updated default_time_zone argument + ddl = [ + "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", + ] + deletion_protection = false +} +`, instanceName, instanceName, databaseName) +} + func TestAccSpannerDatabase_enableDropProtection(t *testing.T) { t.Parallel() From 150f4cc7db696ba9abbaec932fbb69a5da86802b Mon Sep 17 00:00:00 2001 From: Jeremie Stordeur Date: Wed, 7 May 2025 16:07:10 -0400 Subject: [PATCH 066/884] Update RegionalEndpoints examples to use the public hostname (#13872) --- mmv1/products/networkconnectivity/RegionalEndpoint.yaml | 2 +- ...twork_connectivity_regional_endpoint_global_access.tf.tmpl | 2 +- ...ork_connectivity_regional_endpoint_regional_access.tf.tmpl | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/mmv1/products/networkconnectivity/RegionalEndpoint.yaml b/mmv1/products/networkconnectivity/RegionalEndpoint.yaml index 5e809e34fcf1..fd1765d57014 100644 --- a/mmv1/products/networkconnectivity/RegionalEndpoint.yaml +++ b/mmv1/products/networkconnectivity/RegionalEndpoint.yaml @@ -89,7 +89,7 @@ properties: - name: 'targetGoogleApi' type: String description: | - The service endpoint this private regional endpoint connects to. Format: `{apiname}.{region}.p.rep.googleapis.com` Example: \"cloudkms.us-central1.p.rep.googleapis.com\". + The service endpoint this private regional endpoint connects to. Format: `{apiname}.{region}.rep.googleapis.com` Example: \"cloudkms.us-central1.rep.googleapis.com\". required: true - name: 'network' type: String diff --git a/mmv1/templates/terraform/examples/network_connectivity_regional_endpoint_global_access.tf.tmpl b/mmv1/templates/terraform/examples/network_connectivity_regional_endpoint_global_access.tf.tmpl index 0e9d1a71b7e9..d9e9d78f916f 100644 --- a/mmv1/templates/terraform/examples/network_connectivity_regional_endpoint_global_access.tf.tmpl +++ b/mmv1/templates/terraform/examples/network_connectivity_regional_endpoint_global_access.tf.tmpl @@ -13,7 +13,7 @@ resource "google_compute_subnetwork" "my_subnetwork" { resource "google_network_connectivity_regional_endpoint" "{{$.PrimaryResourceId}}" { name = "{{index $.Vars "rep_name"}}" location = "us-central1" - target_google_api = "storage.us-central1.p.rep.googleapis.com" + target_google_api = "storage.us-central1.rep.googleapis.com" access_type = "GLOBAL" address = "192.168.0.4" network = google_compute_network.my_network.id diff --git a/mmv1/templates/terraform/examples/network_connectivity_regional_endpoint_regional_access.tf.tmpl b/mmv1/templates/terraform/examples/network_connectivity_regional_endpoint_regional_access.tf.tmpl index a39621ba2ccb..d9c6b4200a38 100644 --- a/mmv1/templates/terraform/examples/network_connectivity_regional_endpoint_regional_access.tf.tmpl +++ b/mmv1/templates/terraform/examples/network_connectivity_regional_endpoint_regional_access.tf.tmpl @@ -13,11 +13,11 @@ resource "google_compute_subnetwork" "my_subnetwork" { resource "google_network_connectivity_regional_endpoint" "{{$.PrimaryResourceId}}" { name = "{{index $.Vars "rep_name"}}" location = "us-central1" - target_google_api = "storage.us-central1.p.rep.googleapis.com" + target_google_api = "storage.us-central1.rep.googleapis.com" access_type = "REGIONAL" address = "192.168.0.5" network = google_compute_network.my_network.id subnetwork = google_compute_subnetwork.my_subnetwork.id - description = "My RegionalEndpoint targeting Google API storage.us-central1.p.rep.googleapis.com" + description = "My RegionalEndpoint targeting Google API storage.us-central1.rep.googleapis.com" labels = {env = "default"} } From f6f3a14b4cf03200f8c99ffe64d46f4b1ac18917 Mon Sep 17 00:00:00 2001 From: abhilashsamgoogle Date: Wed, 7 May 2025 13:55:39 -0700 Subject: [PATCH 067/884] [AlloyDB] Terraform support for PSC service automation (#13832) --- mmv1/products/alloydb/Instance.yaml | 36 +++++++++ .../alloydb/resource_alloydb_instance_test.go | 79 +++++++++++++++++++ 2 files changed, 115 insertions(+) diff --git a/mmv1/products/alloydb/Instance.yaml b/mmv1/products/alloydb/Instance.yaml index 4c4b3e09b8e4..08c12b14c454 100644 --- a/mmv1/products/alloydb/Instance.yaml +++ b/mmv1/products/alloydb/Instance.yaml @@ -361,6 +361,42 @@ properties: The network attachment resource created in the consumer project to which the PSC interface will be linked. This is of the format: "projects/${CONSUMER_PROJECT}/regions/${REGION}/networkAttachments/${NETWORK_ATTACHMENT_NAME}". The network attachment must be in the same region as the instance. + - name: 'pscAutoConnections' + type: Array + description: | + Configurations for setting up PSC service automation. + item_type: + type: NestedObject + properties: + - name: 'consumerProject' + type: String + description: | + The consumer project to which the PSC service automation endpoint will + be created. The API expects the consumer project to be the project ID( + and not the project number). + - name: 'consumerNetwork' + type: String + description: | + The consumer network for the PSC service automation, example: + "projects/vpc-host-project/global/networks/default". + The consumer network might be hosted a different project than the + consumer project. The API expects the consumer project specified to be + the project ID (and not the project number) + - name: 'ipAddress' + type: String + description: | + The IP address of the PSC service automation endpoint. + output: true + - name: 'status' + type: String + description: | + The status of the PSC service automation connection. + output: true + - name: 'consumerNetworkStatus' + type: String + description: | + The status of the service connection policy. + output: true - name: 'networkConfig' type: NestedObject default_from_api: true diff --git a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go index a6a7d1050fff..e7ae2142d3ba 100644 --- a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go +++ b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go @@ -910,3 +910,82 @@ func TestAccAlloydbInstance_updateInstanceWithPscInterfaceConfigs(t *testing.T) }, }) } + +func TestAccAlloydbInstance_updatePscAutoConnections(t *testing.T) { + t.Parallel() + + networkName := acctest.BootstrapSharedTestNetwork(t, "tf-test-alloydb-network-psc") + random_suffix := acctest.RandString(t, 10) + context := map[string]interface{}{ + "network_name": networkName, + "random_suffix": random_suffix, + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckAlloydbInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccAlloydbInstance_pscAutoConnections(context), + }, + { + Config: testAccAlloydbInstance_updatePscAutoConnections(context), + }, + }, + }) +} + +func testAccAlloydbInstance_pscAutoConnections(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_alloydb_instance" "default" { + cluster = google_alloydb_cluster.default.name + instance_id = "tf-test-alloydb-instance%{random_suffix}" + instance_type = "PRIMARY" + machine_config { + cpu_count = 2 + } + psc_instance_config { + psc_auto_connections { + consumer_project = "${data.google_project.project.project_id}" + consumer_network = "projects/${data.google_project.project.project_id}/global/networks/%{network_name}" + } + } +} +resource "google_alloydb_cluster" "default" { + cluster_id = "tf-test-alloydb-cluster%{random_suffix}" + location = "us-central1" + psc_config { + psc_enabled = true + } + initial_user { + password = "tf-test-alloydb-cluster%{random_suffix}" + } +} +data "google_project" "project" {} +`, context) +} + +func testAccAlloydbInstance_updatePscAutoConnections(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_alloydb_instance" "default" { + cluster = google_alloydb_cluster.default.name + instance_id = "tf-test-alloydb-instance%{random_suffix}" + instance_type = "PRIMARY" + machine_config { + cpu_count = 2 + } +} +resource "google_alloydb_cluster" "default" { + cluster_id = "tf-test-alloydb-cluster%{random_suffix}" + location = "us-central1" + psc_config { + psc_enabled = true + } + initial_user { + password = "tf-test-alloydb-cluster%{random_suffix}" + } +} +data "google_project" "project" {} +`, context) +} From 5be9b3e0b39b711add9f5744bb6b64deb8baf82d Mon Sep 17 00:00:00 2001 From: ArtoriaRen Date: Wed, 7 May 2025 17:10:44 -0400 Subject: [PATCH 068/884] Add knowledge_connector_settings to dialogflow_cx_flow/page (#13792) --- mmv1/products/dialogflowcx/Flow.yaml | 388 ++++++++++++++++++ mmv1/products/dialogflowcx/Page.yaml | 388 ++++++++++++++++++ .../examples/dialogflowcx_flow_full.tf.tmpl | 132 +++++- .../examples/dialogflowcx_page_full.tf.tmpl | 126 +++++- .../resource_dialogflowcx_flow_test.go | 29 ++ .../resource_dialogflowcx_page_test.go | 29 ++ 6 files changed, 1090 insertions(+), 2 deletions(-) diff --git a/mmv1/products/dialogflowcx/Flow.yaml b/mmv1/products/dialogflowcx/Flow.yaml index 05d7643c9c54..67933f2483da 100644 --- a/mmv1/products/dialogflowcx/Flow.yaml +++ b/mmv1/products/dialogflowcx/Flow.yaml @@ -634,3 +634,391 @@ properties: type: Boolean description: | Enables consent-based end-user input redaction, if true, a pre-defined session parameter **$session.params.conversation-redaction** will be used to determine if the utterance should be redacted. + - name: 'knowledgeConnectorSettings' + type: NestedObject + description: | + Knowledge connector configuration. + properties: + - name: 'enabled' + type: Boolean + description: | + Whether Knowledge Connector is enabled or not. + - name: 'triggerFulfillment' + type: NestedObject + description: | + The fulfillment to be triggered. + When the answers from the Knowledge Connector are selected by Dialogflow, you can utitlize the request scoped parameter $request.knowledge.answers (contains up to the 5 highest confidence answers) and $request.knowledge.questions (contains the corresponding questions) to construct the fulfillment. + properties: + - name: 'messages' + type: Array + description: | + The list of rich message responses to present to the user. + item_type: + type: NestedObject + properties: + # 'responseType' is ignored when creating/updating resources, so we skip this field. See https://github.com/GoogleCloudPlatform/magic-modules/pull/8757/commits/65ad64bd008c60498d9b27b767dc7bc664d42c0b. + - name: 'channel' + type: String + description: | + The channel which the response is associated with. Clients can specify the channel via QueryParameters.channel, and only associated channel response will be returned. + - name: 'text' + type: NestedObject + description: | + The text response message. + This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. + properties: + - name: 'text' + type: Array + description: | + A collection of text response variants. If multiple variants are defined, only one text response variant is returned at runtime. + required: true + item_type: + type: String + - name: 'allowPlaybackInterruption' + type: Boolean + description: | + Whether the playback of this message can be interrupted by the end user's speech and the client can then starts the next Dialogflow request. + output: true + # This can be an arbitrary json blob, so we use a string instead of a NestedObject. + - name: 'payload' + type: String + description: | + Returns a response containing a custom, platform-specific payload. + This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. + state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' + custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' + custom_expand: 'templates/terraform/custom_expand/json_schema.tmpl' + validation: + function: 'validation.StringIsJSON' + - name: 'conversationSuccess' + type: NestedObject + description: | + Indicates that the conversation succeeded, i.e., the bot handled the issue that the customer talked to it about. + Dialogflow only uses this to determine which conversations should be counted as successful and doesn't process the metadata in this message in any way. Note that Dialogflow also considers conversations that get to the conversation end page as successful even if they don't return ConversationSuccess. + You may set this, for example: + * In the entryFulfillment of a Page if entering the page indicates that the conversation succeeded. + * In a webhook response when you determine that you handled the customer issue. + This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. + properties: + # This can be an arbitrary json blob, so we use a string instead of a NestedObject. + - name: 'metadata' + type: String + description: | + Custom metadata. Dialogflow doesn't impose any structure on this. + state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' + custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' + custom_expand: 'templates/terraform/custom_expand/json_schema.tmpl' + validation: + function: 'validation.StringIsJSON' + - name: 'outputAudioText' + type: NestedObject + description: | + A text or ssml response that is preferentially used for TTS output audio synthesis, as described in the comment on the ResponseMessage message. + This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. + properties: + - name: 'allowPlaybackInterruption' + type: Boolean + description: | + Whether the playback of this message can be interrupted by the end user's speech and the client can then starts the next Dialogflow request. + output: true + - name: 'text' + type: String + description: | + The raw text to be synthesized. + This field is part of a union field `source`: Only one of `text` or `ssml` may be set. + - name: 'ssml' + type: String + description: | + The SSML text to be synthesized. For more information, see SSML. + This field is part of a union field `source`: Only one of `text` or `ssml` may be set. + - name: 'liveAgentHandoff' + type: NestedObject + description: | + Indicates that the conversation should be handed off to a live agent. + Dialogflow only uses this to determine which conversations were handed off to a human agent for measurement purposes. What else to do with this signal is up to you and your handoff procedures. + You may set this, for example: + * In the entryFulfillment of a Page if entering the page indicates something went extremely wrong in the conversation. + * In a webhook response when you determine that the customer issue can only be handled by a human. + This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. + properties: + # This can be an arbitrary json blob, so we use a string instead of a NestedObject. + - name: 'metadata' + type: String + description: | + Custom metadata. Dialogflow doesn't impose any structure on this. + state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' + custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' + custom_expand: 'templates/terraform/custom_expand/json_schema.tmpl' + validation: + function: 'validation.StringIsJSON' + - name: 'endInteraction' + type: NestedObject + description: | + This type has no fields. + Indicates that interaction with the Dialogflow agent has ended. This message is generated by Dialogflow only and not supposed to be defined by the user. + This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. + output: true + allow_empty_object: true + properties: [] # Meant to be an empty object with no properties. + - name: 'playAudio' + type: NestedObject + description: | + Specifies an audio clip to be played by the client as part of the response. + This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. + properties: + - name: 'audioUri' + type: String + description: | + URI of the audio clip. Dialogflow does not impose any validation on this value. It is specific to the client that reads it. + required: true + - name: 'allowPlaybackInterruption' + type: Boolean + description: | + Whether the playback of this message can be interrupted by the end user's speech and the client can then starts the next Dialogflow request. + output: true + - name: 'mixedAudio' + type: NestedObject + description: | + Represents an audio message that is composed of both segments synthesized from the Dialogflow agent prompts and ones hosted externally at the specified URIs. The external URIs are specified via playAudio. This message is generated by Dialogflow only and not supposed to be defined by the user. + This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. + output: true + properties: + - name: 'segments' + type: Array + description: | + Segments this audio response is composed of. + item_type: + type: NestedObject + properties: + - name: 'allowPlaybackInterruption' + type: Boolean + description: | + Whether the playback of this segment can be interrupted by the end user's speech and the client should then start the next Dialogflow request. + output: true + - name: 'audio' + type: String + description: | + Raw audio synthesized from the Dialogflow agent's response using the output config specified in the request. + A base64-encoded string. + This field is part of a union field `content`: Only one of `audio` or `uri` may be set. + - name: 'uri' + type: String + description: | + Client-specific URI that points to an audio clip accessible to the client. Dialogflow does not impose any validation on it. + This field is part of a union field `content`: Only one of `audio` or `uri` may be set. + - name: 'telephonyTransferCall' + type: NestedObject + description: | + Represents the signal that telles the client to transfer the phone call connected to the agent to a third-party endpoint. + This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. + properties: + - name: 'phoneNumber' + type: String + description: | + Transfer the call to a phone number in E.164 format. + required: true + - name: 'knowledgeInfoCard' + type: NestedObject + description: | + This type has no fields. + Represents info card response. If the response contains generative knowledge prediction, Dialogflow will return a payload with Infobot Messenger compatible info card. + Otherwise, the info card response is skipped. + This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. + allow_empty_object: true + send_empty_value: true + properties: [] # Meant to be an empty object with no properties. + # Although ResponseMessage has a field named "toolCall", we can't include it here because it references the Tool resource, which hasn't been available on Terraform. + # See https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3beta1/ResponseMessage + - name: 'webhook' + type: String + description: | + The webhook to call. Format: projects//locations//agents//webhooks/. + - name: 'returnPartialResponses' + type: Boolean + description: | + Whether Dialogflow should return currently queued fulfillment response messages in streaming APIs. If a webhook is specified, it happens before Dialogflow invokes webhook. Warning: 1) This flag only affects streaming API. Responses are still queued and returned once in non-streaming API. 2) The flag can be enabled in any fulfillment but only the first 3 partial responses will be returned. You may only want to apply it to fulfillments that have slow webhooks. + - name: 'tag' + type: String + description: | + The tag used by the webhook to identify which fulfillment is being called. This field is required if webhook is specified. + - name: 'setParameterActions' + type: Array + description: | + Set parameter values before executing the webhook. + item_type: + type: NestedObject + properties: + - name: 'parameter' + type: String + description: | + Display name of the parameter. + - name: 'value' + type: String + description: | + The new JSON-encoded value of the parameter. A null value clears the parameter. + state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' + custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' + custom_expand: 'templates/terraform/custom_expand/json_value.tmpl' + validation: + function: 'validation.StringIsJSON' + - name: 'conditionalCases' + type: Array + description: | + Conditional cases for this fulfillment. + item_type: + type: NestedObject + properties: + - name: 'cases' + type: String + description: | + A JSON encoded list of cascading if-else conditions. Cases are mutually exclusive. The first one with a matching condition is selected, all the rest ignored. + See [Case](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/Fulfillment#case) for the schema. + state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' + custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' + custom_expand: 'templates/terraform/custom_expand/json_value.tmpl' + validation: + function: 'validation.StringIsJSON' + - name: 'advancedSettings' + type: NestedObject + description: | + Hierarchical advanced settings for agent/flow/page/fulfillment/parameter. Settings exposed at lower level overrides the settings exposed at higher level. Overriding occurs at the sub-setting level. For example, the playbackInterruptionSettings at fulfillment level only overrides the playbackInterruptionSettings at the agent level, leaving other settings at the agent level unchanged. + DTMF settings does not override each other. DTMF settings set at different levels define DTMF detections running in parallel. + Hierarchy: Agent->Flow->Page->Fulfillment/Parameter. + properties: + # This field currently can't be set. The API is not including the value in the API response, causing Acceptance Test to fail. + # - name: 'audioExportGcsDestination' + # type: NestedObject + # description: | + # If present, incoming audio is exported by Dialogflow to the configured Google Cloud Storage destination. Exposed at the following levels: + # * Agent level + # * Flow level + # properties: + # - name: 'uri' + # type: String + # description: | + # The Google Cloud Storage URI for the exported objects. A URI is of the form: gs://bucket/object-name-or-prefix Whether a full object name, or just a prefix, its usage depends on the Dialogflow operation. + # required: true + - name: 'speechSettings' + type: NestedObject + description: | + Settings for speech to text detection. Exposed at the following levels: + * Agent level + * Flow level + * Page level + * Parameter level + properties: + - name: 'endpointerSensitivity' + type: Integer + description: | + Sensitivity of the speech model that detects the end of speech. Scale from 0 to 100. + - name: 'noSpeechTimeout' + type: String + description: | + Timeout before detecting no speech. + A duration in seconds with up to nine fractional digits, ending with 's'. Example: "3.500s". + - name: 'useTimeoutBasedEndpointing' + type: Boolean + description: | + Use timeout based endpointing, interpreting endpointer sensitivity as seconds of timeout value. + - name: 'models' + type: KeyValuePairs + description: | + Mapping from language to Speech-to-Text model. The mapped Speech-to-Text model will be selected for requests from its corresponding language. For more information, see [Speech models](https://cloud.google.com/dialogflow/cx/docs/concept/speech-models). + An object containing a list of **"key": value** pairs. Example: **{ "name": "wrench", "mass": "1.3kg", "count": "3" }**. + - name: 'dtmfSettings' + type: NestedObject + description: | + Define behaviors for DTMF (dual tone multi frequency). DTMF settings does not override each other. DTMF settings set at different levels define DTMF detections running in parallel. Exposed at the following levels: + * Agent level + * Flow level + * Page level + * Parameter level + properties: + - name: 'enabled' + type: Boolean + description: | + If true, incoming audio is processed for DTMF (dual tone multi frequtectency) events. For example, if the caller presses a button on their telephone keypad and DTMF processing is enabled, Dialogflow will de the event (e.g. a "3" was pressed) in the incoming audio and pass the event to the bot to drive business logic (e.g. when 3 is pressed, return the account balance). + - name: 'maxDigits' + type: Integer + description: | + Max length of DTMF digits. + - name: 'finishDigit' + type: String + description: | + The digit that terminates a DTMF digit sequence. + - name: 'interdigitTimeoutDuration' + type: String + description: | + Interdigit timeout setting for matching dtmf input to regex. + A duration in seconds with up to nine fractional digits, ending with 's'. Example: "3.500s". + - name: 'endpointingTimeoutDuration' + type: String + description: | + Endpoint timeout setting for matching dtmf input to regex. + A duration in seconds with up to nine fractional digits, ending with 's'. Example: "3.500s". + - name: 'loggingSettings' + type: NestedObject + # Due to inconsistent API behaviour http://b/303056144, ignore read can be removed once fixed + ignore_read: true + description: | + Settings for logging. Settings for Dialogflow History, Contact Center messages, StackDriver logs, and speech logging. Exposed at the following levels: + * Agent level + properties: + - name: 'enableStackdriverLogging' + type: Boolean + description: | + Enables Google Cloud Logging. + - name: 'enableInteractionLogging' + type: Boolean + description: | + Enables DF Interaction logging. + - name: 'enableConsentBasedRedaction' + type: Boolean + description: | + Enables consent-based end-user input redaction, if true, a pre-defined session parameter **$session.params.conversation-redaction** will be used to determine if the utterance should be redacted. + - name: 'enableGenerativeFallback' + type: Boolean + description: | + If the flag is true, the agent will utilize LLM to generate a text response. If LLM generation fails, the defined responses in the fulfillment will be respected. This flag is only useful for fulfillments associated with no-match event handlers. + - name: 'dataStoreConnections' + type: Array + description: | + Optional. List of related data store connections. + item_type: + type: NestedObject + properties: + - name: 'dataStoreType' + type: Enum + description: | + The type of the connected data store. + * PUBLIC_WEB: A data store that contains public web content. + * UNSTRUCTURED: A data store that contains unstructured private data. + * STRUCTURED: A data store that contains structured data (for example FAQ). + enum_values: + - 'PUBLIC_WEB' + - 'UNSTRUCTURED' + - 'STRUCTURED' + - name: 'dataStore' + type: String + description: | + The full name of the referenced data store. Formats: projects/{project}/locations/{location}/collections/{collection}/dataStores/{dataStore} projects/{project}/locations/{location}/dataStores/{dataStore} + - name: 'documentProcessingMode' + type: Enum + description: | + The document processing mode for the data store connection. Should only be set for PUBLIC_WEB and UNSTRUCTURED data stores. If not set it is considered as DOCUMENTS, as this is the legacy mode. + * DOCUMENTS: Documents are processed as documents. + * CHUNKS: Documents are converted to chunks. + enum_values: + - 'DOCUMENTS' + - 'CHUNKS' + - name: 'targetPage' + type: String + description: | + The target page to transition to. Format: projects//locations//agents//flows//pages/. + The page must be in the same host flow (the flow that owns this `KnowledgeConnectorSettings`). + This field is part of a union field `target`: Only one of `targetPage` or `targetFlow` may be set. + - name: 'targetFlow' + type: String + description: | + The target flow to transition to. Format: projects//locations//agents//flows/. + This field is part of a union field `target`: Only one of `targetPage` or `targetFlow` may be set. diff --git a/mmv1/products/dialogflowcx/Page.yaml b/mmv1/products/dialogflowcx/Page.yaml index 4166d047f098..d6980fec118c 100644 --- a/mmv1/products/dialogflowcx/Page.yaml +++ b/mmv1/products/dialogflowcx/Page.yaml @@ -1157,3 +1157,391 @@ properties: type: String description: | The digit that terminates a DTMF digit sequence. + - name: 'knowledgeConnectorSettings' + type: NestedObject + description: | + Knowledge connector configuration. + properties: + - name: 'enabled' + type: Boolean + description: | + Whether Knowledge Connector is enabled or not. + - name: 'triggerFulfillment' + type: NestedObject + description: | + The fulfillment to be triggered. + When the answers from the Knowledge Connector are selected by Dialogflow, you can utitlize the request scoped parameter $request.knowledge.answers (contains up to the 5 highest confidence answers) and $request.knowledge.questions (contains the corresponding questions) to construct the fulfillment. + properties: + - name: 'messages' + type: Array + description: | + The list of rich message responses to present to the user. + item_type: + type: NestedObject + properties: + # 'responseType' is ignored when creating/updating resources, so we skip this field. See https://github.com/GoogleCloudPlatform/magic-modules/pull/8757/commits/65ad64bd008c60498d9b27b767dc7bc664d42c0b. + - name: 'channel' + type: String + description: | + The channel which the response is associated with. Clients can specify the channel via QueryParameters.channel, and only associated channel response will be returned. + - name: 'text' + type: NestedObject + description: | + The text response message. + This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. + properties: + - name: 'text' + type: Array + description: | + A collection of text response variants. If multiple variants are defined, only one text response variant is returned at runtime. + required: true + item_type: + type: String + - name: 'allowPlaybackInterruption' + type: Boolean + description: | + Whether the playback of this message can be interrupted by the end user's speech and the client can then starts the next Dialogflow request. + output: true + # This can be an arbitrary json blob, so we use a string instead of a NestedObject. + - name: 'payload' + type: String + description: | + Returns a response containing a custom, platform-specific payload. + This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. + state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' + custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' + custom_expand: 'templates/terraform/custom_expand/json_schema.tmpl' + validation: + function: 'validation.StringIsJSON' + - name: 'conversationSuccess' + type: NestedObject + description: | + Indicates that the conversation succeeded, i.e., the bot handled the issue that the customer talked to it about. + Dialogflow only uses this to determine which conversations should be counted as successful and doesn't process the metadata in this message in any way. Note that Dialogflow also considers conversations that get to the conversation end page as successful even if they don't return ConversationSuccess. + You may set this, for example: + * In the entryFulfillment of a Page if entering the page indicates that the conversation succeeded. + * In a webhook response when you determine that you handled the customer issue. + This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. + properties: + # This can be an arbitrary json blob, so we use a string instead of a NestedObject. + - name: 'metadata' + type: String + description: | + Custom metadata. Dialogflow doesn't impose any structure on this. + state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' + custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' + custom_expand: 'templates/terraform/custom_expand/json_schema.tmpl' + validation: + function: 'validation.StringIsJSON' + - name: 'outputAudioText' + type: NestedObject + description: | + A text or ssml response that is preferentially used for TTS output audio synthesis, as described in the comment on the ResponseMessage message. + This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. + properties: + - name: 'allowPlaybackInterruption' + type: Boolean + description: | + Whether the playback of this message can be interrupted by the end user's speech and the client can then starts the next Dialogflow request. + output: true + - name: 'text' + type: String + description: | + The raw text to be synthesized. + This field is part of a union field `source`: Only one of `text` or `ssml` may be set. + - name: 'ssml' + type: String + description: | + The SSML text to be synthesized. For more information, see SSML. + This field is part of a union field `source`: Only one of `text` or `ssml` may be set. + - name: 'liveAgentHandoff' + type: NestedObject + description: | + Indicates that the conversation should be handed off to a live agent. + Dialogflow only uses this to determine which conversations were handed off to a human agent for measurement purposes. What else to do with this signal is up to you and your handoff procedures. + You may set this, for example: + * In the entryFulfillment of a Page if entering the page indicates something went extremely wrong in the conversation. + * In a webhook response when you determine that the customer issue can only be handled by a human. + This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. + properties: + # This can be an arbitrary json blob, so we use a string instead of a NestedObject. + - name: 'metadata' + type: String + description: | + Custom metadata. Dialogflow doesn't impose any structure on this. + state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' + custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' + custom_expand: 'templates/terraform/custom_expand/json_schema.tmpl' + validation: + function: 'validation.StringIsJSON' + - name: 'endInteraction' + type: NestedObject + description: | + This type has no fields. + Indicates that interaction with the Dialogflow agent has ended. This message is generated by Dialogflow only and not supposed to be defined by the user. + This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. + output: true + allow_empty_object: true + properties: [] # Meant to be an empty object with no properties. + - name: 'playAudio' + type: NestedObject + description: | + Specifies an audio clip to be played by the client as part of the response. + This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. + properties: + - name: 'audioUri' + type: String + description: | + URI of the audio clip. Dialogflow does not impose any validation on this value. It is specific to the client that reads it. + required: true + - name: 'allowPlaybackInterruption' + type: Boolean + description: | + Whether the playback of this message can be interrupted by the end user's speech and the client can then starts the next Dialogflow request. + output: true + - name: 'mixedAudio' + type: NestedObject + description: | + Represents an audio message that is composed of both segments synthesized from the Dialogflow agent prompts and ones hosted externally at the specified URIs. The external URIs are specified via playAudio. This message is generated by Dialogflow only and not supposed to be defined by the user. + This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. + output: true + properties: + - name: 'segments' + type: Array + description: | + Segments this audio response is composed of. + item_type: + type: NestedObject + properties: + - name: 'allowPlaybackInterruption' + type: Boolean + description: | + Whether the playback of this segment can be interrupted by the end user's speech and the client should then start the next Dialogflow request. + output: true + - name: 'audio' + type: String + description: | + Raw audio synthesized from the Dialogflow agent's response using the output config specified in the request. + A base64-encoded string. + This field is part of a union field `content`: Only one of `audio` or `uri` may be set. + - name: 'uri' + type: String + description: | + Client-specific URI that points to an audio clip accessible to the client. Dialogflow does not impose any validation on it. + This field is part of a union field `content`: Only one of `audio` or `uri` may be set. + - name: 'telephonyTransferCall' + type: NestedObject + description: | + Represents the signal that telles the client to transfer the phone call connected to the agent to a third-party endpoint. + This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. + properties: + - name: 'phoneNumber' + type: String + description: | + Transfer the call to a phone number in E.164 format. + required: true + - name: 'knowledgeInfoCard' + type: NestedObject + description: | + This type has no fields. + Represents info card response. If the response contains generative knowledge prediction, Dialogflow will return a payload with Infobot Messenger compatible info card. + Otherwise, the info card response is skipped. + This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. + allow_empty_object: true + send_empty_value: true + properties: [] # Meant to be an empty object with no properties. + # Although ResponseMessage has a field named "toolCall", we can't include it here because it references the Tool resource, which hasn't been available on Terraform. + # See https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3beta1/ResponseMessage + - name: 'webhook' + type: String + description: | + The webhook to call. Format: projects//locations//agents//webhooks/. + - name: 'returnPartialResponses' + type: Boolean + description: | + Whether Dialogflow should return currently queued fulfillment response messages in streaming APIs. If a webhook is specified, it happens before Dialogflow invokes webhook. Warning: 1) This flag only affects streaming API. Responses are still queued and returned once in non-streaming API. 2) The flag can be enabled in any fulfillment but only the first 3 partial responses will be returned. You may only want to apply it to fulfillments that have slow webhooks. + - name: 'tag' + type: String + description: | + The tag used by the webhook to identify which fulfillment is being called. This field is required if webhook is specified. + - name: 'setParameterActions' + type: Array + description: | + Set parameter values before executing the webhook. + item_type: + type: NestedObject + properties: + - name: 'parameter' + type: String + description: | + Display name of the parameter. + - name: 'value' + type: String + description: | + The new JSON-encoded value of the parameter. A null value clears the parameter. + state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' + custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' + custom_expand: 'templates/terraform/custom_expand/json_value.tmpl' + validation: + function: 'validation.StringIsJSON' + - name: 'conditionalCases' + type: Array + description: | + Conditional cases for this fulfillment. + item_type: + type: NestedObject + properties: + - name: 'cases' + type: String + description: | + A JSON encoded list of cascading if-else conditions. Cases are mutually exclusive. The first one with a matching condition is selected, all the rest ignored. + See [Case](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/Fulfillment#case) for the schema. + state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' + custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' + custom_expand: 'templates/terraform/custom_expand/json_value.tmpl' + validation: + function: 'validation.StringIsJSON' + - name: 'advancedSettings' + type: NestedObject + description: | + Hierarchical advanced settings for agent/flow/page/fulfillment/parameter. Settings exposed at lower level overrides the settings exposed at higher level. Overriding occurs at the sub-setting level. For example, the playbackInterruptionSettings at fulfillment level only overrides the playbackInterruptionSettings at the agent level, leaving other settings at the agent level unchanged. + DTMF settings does not override each other. DTMF settings set at different levels define DTMF detections running in parallel. + Hierarchy: Agent->Flow->Page->Fulfillment/Parameter. + properties: + # This field currently can't be set. The API is not including the value in the API response, causing Acceptance Test to fail. + # - name: 'audioExportGcsDestination' + # type: NestedObject + # description: | + # If present, incoming audio is exported by Dialogflow to the configured Google Cloud Storage destination. Exposed at the following levels: + # * Agent level + # * Flow level + # properties: + # - name: 'uri' + # type: String + # description: | + # The Google Cloud Storage URI for the exported objects. A URI is of the form: gs://bucket/object-name-or-prefix Whether a full object name, or just a prefix, its usage depends on the Dialogflow operation. + # required: true + - name: 'speechSettings' + type: NestedObject + description: | + Settings for speech to text detection. Exposed at the following levels: + * Agent level + * Flow level + * Page level + * Parameter level + properties: + - name: 'endpointerSensitivity' + type: Integer + description: | + Sensitivity of the speech model that detects the end of speech. Scale from 0 to 100. + - name: 'noSpeechTimeout' + type: String + description: | + Timeout before detecting no speech. + A duration in seconds with up to nine fractional digits, ending with 's'. Example: "3.500s". + - name: 'useTimeoutBasedEndpointing' + type: Boolean + description: | + Use timeout based endpointing, interpreting endpointer sensitivity as seconds of timeout value. + - name: 'models' + type: KeyValuePairs + description: | + Mapping from language to Speech-to-Text model. The mapped Speech-to-Text model will be selected for requests from its corresponding language. For more information, see [Speech models](https://cloud.google.com/dialogflow/cx/docs/concept/speech-models). + An object containing a list of **"key": value** pairs. Example: **{ "name": "wrench", "mass": "1.3kg", "count": "3" }**. + - name: 'dtmfSettings' + type: NestedObject + description: | + Define behaviors for DTMF (dual tone multi frequency). DTMF settings does not override each other. DTMF settings set at different levels define DTMF detections running in parallel. Exposed at the following levels: + * Agent level + * Flow level + * Page level + * Parameter level + properties: + - name: 'enabled' + type: Boolean + description: | + If true, incoming audio is processed for DTMF (dual tone multi frequtectency) events. For example, if the caller presses a button on their telephone keypad and DTMF processing is enabled, Dialogflow will de the event (e.g. a "3" was pressed) in the incoming audio and pass the event to the bot to drive business logic (e.g. when 3 is pressed, return the account balance). + - name: 'maxDigits' + type: Integer + description: | + Max length of DTMF digits. + - name: 'finishDigit' + type: String + description: | + The digit that terminates a DTMF digit sequence. + - name: 'interdigitTimeoutDuration' + type: String + description: | + Interdigit timeout setting for matching dtmf input to regex. + A duration in seconds with up to nine fractional digits, ending with 's'. Example: "3.500s". + - name: 'endpointingTimeoutDuration' + type: String + description: | + Endpoint timeout setting for matching dtmf input to regex. + A duration in seconds with up to nine fractional digits, ending with 's'. Example: "3.500s". + - name: 'loggingSettings' + type: NestedObject + # Due to inconsistent API behaviour http://b/303056144, ignore read can be removed once fixed + ignore_read: true + description: | + Settings for logging. Settings for Dialogflow History, Contact Center messages, StackDriver logs, and speech logging. Exposed at the following levels: + * Agent level + properties: + - name: 'enableStackdriverLogging' + type: Boolean + description: | + Enables Google Cloud Logging. + - name: 'enableInteractionLogging' + type: Boolean + description: | + Enables DF Interaction logging. + - name: 'enableConsentBasedRedaction' + type: Boolean + description: | + Enables consent-based end-user input redaction, if true, a pre-defined session parameter **$session.params.conversation-redaction** will be used to determine if the utterance should be redacted. + - name: 'enableGenerativeFallback' + type: Boolean + description: | + If the flag is true, the agent will utilize LLM to generate a text response. If LLM generation fails, the defined responses in the fulfillment will be respected. This flag is only useful for fulfillments associated with no-match event handlers. + - name: 'dataStoreConnections' + type: Array + description: | + Optional. List of related data store connections. + item_type: + type: NestedObject + properties: + - name: 'dataStoreType' + type: Enum + description: | + The type of the connected data store. + * PUBLIC_WEB: A data store that contains public web content. + * UNSTRUCTURED: A data store that contains unstructured private data. + * STRUCTURED: A data store that contains structured data (for example FAQ). + enum_values: + - 'PUBLIC_WEB' + - 'UNSTRUCTURED' + - 'STRUCTURED' + - name: 'dataStore' + type: String + description: | + The full name of the referenced data store. Formats: projects/{project}/locations/{location}/collections/{collection}/dataStores/{dataStore} projects/{project}/locations/{location}/dataStores/{dataStore} + - name: 'documentProcessingMode' + type: Enum + description: | + The document processing mode for the data store connection. Should only be set for PUBLIC_WEB and UNSTRUCTURED data stores. If not set it is considered as DOCUMENTS, as this is the legacy mode. + * DOCUMENTS: Documents are processed as documents. + * CHUNKS: Documents are converted to chunks. + enum_values: + - 'DOCUMENTS' + - 'CHUNKS' + - name: 'targetPage' + type: String + description: | + The target page to transition to. Format: projects//locations//agents//flows//pages/. + The page must be in the same host flow (the flow that owns this `KnowledgeConnectorSettings`). + This field is part of a union field `target`: Only one of `targetPage` or `targetFlow` may be set. + - name: 'targetFlow' + type: String + description: | + The target flow to transition to. Format: projects//locations//agents//flows/. + This field is part of a union field `target`: Only one of `targetPage` or `targetFlow` may be set. diff --git a/mmv1/templates/terraform/examples/dialogflowcx_flow_full.tf.tmpl b/mmv1/templates/terraform/examples/dialogflowcx_flow_full.tf.tmpl index 774be2521980..9eae630e10ea 100644 --- a/mmv1/templates/terraform/examples/dialogflowcx_flow_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/dialogflowcx_flow_full.tf.tmpl @@ -294,4 +294,134 @@ resource "google_dialogflow_cx_flow" "{{$.PrimaryResourceId}}" { enable_consent_based_redaction = true } } -} + + knowledge_connector_settings { + enabled = true + trigger_fulfillment { + messages { + channel = "some-channel" + text { + text = ["information completed, navigating to page 2"] + } + } + messages { + payload = <Some example SSML XML + EOF + } + } + messages { + live_agent_handoff { + metadata = <Some example SSML XML + EOF + } + } + messages { + live_agent_handoff { + metadata = < Date: Wed, 7 May 2025 14:33:43 -0700 Subject: [PATCH 069/884] Corrected immutability docs (#13834) --- docs/content/reference/field.md | 21 ++++++++++---- docs/content/reference/resource.md | 5 ++-- mmv1/api/type.go | 44 ++++++++++++++++++++++++++---- 3 files changed, 56 insertions(+), 14 deletions(-) diff --git a/docs/content/reference/field.md b/docs/content/reference/field.md index dda2ff000d6f..5dabcf947cc9 100644 --- a/docs/content/reference/field.md +++ b/docs/content/reference/field.md @@ -27,10 +27,17 @@ is present in provider.yaml. Do not use if an ancestor field (or the overall resource) is already marked as beta-only. ### `immutable` -If true, the field (and any subfields) are considered immutable - that is, -only settable on create. If unset or false, the field is still considered -immutable if any ancestor field (or the overall resource) is immutable, -unless `update_url` is set. +If true, the field is considered immutable - that is, only settable on create. If +unset or false, the field is considered to support update-in-place. + +Immutability is not inherited from field to field: subfields are still considered to +be updatable in place by default. However, if the overall resource has +[`immutable`]({{< ref "/reference/resource#immutable" >}}) set to true, all its +fields are considered immutable. Individual fields can override this for themselves +and their subfields with [`update_url`]({{< ref "/reference/field#update_url" >}}) +if they have a custom update method in the API. + +See [Best practices: Immutable fields]({{< ref "/best-practices/immutable-fields/" >}}) for more information. Example: @@ -40,8 +47,10 @@ immutable: true ### `update_url` If set, changes to the field's value trigger a separate call to a specific -API method for updating the field's value. The field is not considered -immutable even if an ancestor field (or the overall resource) is immutable. +API method for updating the field's value. Even if the overall resource is marked +immutable, the field and its subfields are not considered immutable unless explicitly +marked as such. + Terraform field names enclosed in double curly braces are replaced with the field values from the resource at runtime. diff --git a/docs/content/reference/resource.md b/docs/content/reference/resource.md index eb86839f3bc2..921af666156a 100644 --- a/docs/content/reference/resource.md +++ b/docs/content/reference/resource.md @@ -97,8 +97,9 @@ self_link: 'projects/{{project}}/locations/{{location}}/resourcenames/{{name}}' ### `immutable` If true, the resource and all its fields are considered immutable - that is, -only creatable, not updatable. Individual fields can override this if they -have a custom update method in the API. +only creatable, not updatable. Individual fields can override this for themselves and +their subfields with [`update_url`]({{< ref "/reference/field#update_url" >}}) +if they have a custom update method in the API. See [Best practices: Immutable fields]({{< ref "/best-practices/immutable-fields/" >}}) for more information. diff --git a/mmv1/api/type.go b/mmv1/api/type.go index 573730146017..6c0d5e15920e 100644 --- a/mmv1/api/type.go +++ b/mmv1/api/type.go @@ -1123,13 +1123,45 @@ func (t *Type) IsForceNew() bool { return t.Immutable } + // WriteOnly fields are never immutable + if t.WriteOnly { + return false + } + + // Output fields (except effective labels) can't be immutable + if t.Output && !t.IsA("KeyValueEffectiveLabels") { + return false + } + + // Explicitly-marked fields are always immutable + if t.Immutable { + return true + } + + // At this point the field can only be immutable if the resource is immutable. + if !t.ResourceMetadata.Immutable { + return false + } + + // If this field has an update_url set, it's not immutable. + if t.UpdateUrl != "" { + return false + } + + // If this is a top-level field, it inherits immutability from the resource. parent := t.Parent() - return !t.WriteOnly && (!t.Output || t.IsA("KeyValueEffectiveLabels")) && - (t.Immutable || - (t.ResourceMetadata.Immutable && t.UpdateUrl == "" && - (parent == nil || - (parent.IsForceNew() && - !(parent.FlattenObject && t.IsA("KeyValueLabels")))))) + if parent == nil { + return true + } + + // If the parent field _isn't_ immutable, that's inherited by this field. + if !parent.IsForceNew() { + return false + } + + // Otherwise, the field is immutable unless it's a KeyValueLabels field + // and the parent has FlattenObject set. + return !(parent.FlattenObject && t.IsA("KeyValueLabels")) } // Returns true if the type does not correspond to an API type From d4be85ee7f564903baa47b8d5fc490ea9f2e5dc0 Mon Sep 17 00:00:00 2001 From: Aiden Grossman Date: Wed, 7 May 2025 15:23:10 -0700 Subject: [PATCH 070/884] Fix typo around Windows Versions for GKE Clusters (#13876) --- .../terraform/website/docs/r/container_cluster.html.markdown | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown index 997ec0cfdd98..75272bf3e2db 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown @@ -1012,7 +1012,7 @@ kubelet_config { * `linux_node_config` - (Optional) Parameters that can be configured on Linux nodes. Structure is [documented below](#nested_linux_node_config). * `windows_node_config` - (Optional) -Windows node configuration, currently supporting OSVersion [attribute](https://cloud.google.com/kubernetes-engine/docs/reference/rest/v1/NodeConfig#osversion). The value must be one of [OS_VERSION_UNSPECIFIED, OS_VERSION_LTSC2019, OS_VERSION_LTSC2019]. For example: +Windows node configuration, currently supporting OSVersion [attribute](https://cloud.google.com/kubernetes-engine/docs/reference/rest/v1/NodeConfig#osversion). The value must be one of [OS_VERSION_UNSPECIFIED, OS_VERSION_LTSC2019, OS_VERSION_LTSC2022]. For example: ```hcl windows_node_config { From 568706c7f8b96f2552d3909509ff424cc0a05957 Mon Sep 17 00:00:00 2001 From: Rohan Chawla <73727454+rohanchawla23@users.noreply.github.com> Date: Wed, 7 May 2025 16:05:24 -0700 Subject: [PATCH 071/884] Make Organization field in subjectConfig optional (#13827) --- .../privateca/CertificateAuthority.yaml | 16 ++++++++- ...certificate_authority_basic_no_org.tf.tmpl | 35 +++++++++++++++++++ 2 files changed, 50 insertions(+), 1 deletion(-) create mode 100644 mmv1/templates/terraform/examples/privateca_certificate_authority_basic_no_org.tf.tmpl diff --git a/mmv1/products/privateca/CertificateAuthority.yaml b/mmv1/products/privateca/CertificateAuthority.yaml index c6a7e80aa27e..513ec47fe234 100644 --- a/mmv1/products/privateca/CertificateAuthority.yaml +++ b/mmv1/products/privateca/CertificateAuthority.yaml @@ -70,6 +70,21 @@ examples: 'deletion_protection': 'false' ignore_read_extra: - 'deletion_protection' + - name: 'privateca_certificate_authority_basic_no_org' + primary_resource_id: 'default' + vars: + certificate_authority_id: 'my-certificate-authority' + pool_name: 'ca-pool' + pool_location: 'us-central1' + deletion_protection: 'true' + test_vars_overrides: + 'pool_name': 'acctest.BootstrapSharedCaPoolInLocation(t, "us-central1")' + 'pool_location': '"us-central1"' + 'deletion_protection': 'false' + ignore_read_extra: + - 'deletion_protection' + # This example is meant to confirm that it's possible to make + # a CA without specifying the organization field - name: 'privateca_certificate_authority_subordinate' primary_resource_id: 'default' vars: @@ -565,7 +580,6 @@ properties: - name: 'organization' type: String description: The organization of the subject. - required: true immutable: true - name: 'organizationalUnit' type: String diff --git a/mmv1/templates/terraform/examples/privateca_certificate_authority_basic_no_org.tf.tmpl b/mmv1/templates/terraform/examples/privateca_certificate_authority_basic_no_org.tf.tmpl new file mode 100644 index 000000000000..593e303c4913 --- /dev/null +++ b/mmv1/templates/terraform/examples/privateca_certificate_authority_basic_no_org.tf.tmpl @@ -0,0 +1,35 @@ +resource "google_privateca_certificate_authority" "{{$.PrimaryResourceId}}" { + // This example assumes this pool already exists. + // Pools cannot be deleted in normal test circumstances, so we depend on static pools + pool = "{{index $.Vars "pool_name"}}" + certificate_authority_id = "{{index $.Vars "certificate_authority_id"}}" + location = "{{index $.Vars "pool_location"}}" + deletion_protection = {{index $.Vars "deletion_protection"}} + config { + subject_config { + subject { + common_name = "my-certificate-authority" + } + } + x509_config { + ca_options { + # is_ca *MUST* be true for certificate authorities + is_ca = true + } + key_usage { + base_key_usage { + # cert_sign and crl_sign *MUST* be true for certificate authorities + cert_sign = true + crl_sign = true + } + extended_key_usage { + } + } + } + } + # valid for 10 years + lifetime = "${10 * 365 * 24 * 3600}s" + key_spec { + algorithm = "RSA_PKCS1_4096_SHA256" + } +} From 0f92d2503dfe7458b32044436d518fc5621e3c46 Mon Sep 17 00:00:00 2001 From: ma-g-22 <123424520+ma-g-22@users.noreply.github.com> Date: Wed, 7 May 2025 19:25:38 -0400 Subject: [PATCH 072/884] =?UTF-8?q?bigtable:=20add=20explicit=20disable=20?= =?UTF-8?q?automated=20backup=20on=20create=20for=20bigtabl=E2=80=A6=20(#1?= =?UTF-8?q?3773)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../bigtable/resource_bigtable_table.go | 10 +++-- .../bigtable/resource_bigtable_table_test.go | 39 +++++++++++++++++++ .../docs/r/bigtable_table.html.markdown | 2 +- 3 files changed, 46 insertions(+), 5 deletions(-) diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table.go index 1d60c80b40f0..afbaede5ed3a 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table.go @@ -154,7 +154,7 @@ func ResourceBigtableTable() *schema.Resource { }, }, }, - Description: `Defines an automated backup policy for a table, specified by Retention Period and Frequency. To _create_ a table with automated backup disabled, omit this argument. To disable automated backup on an _existing_ table that has automated backup enabled, set both Retention Period and Frequency to "0". If this argument is not provided in the configuration on update, the resource's automated backup policy will _not_ be modified.`, + Description: `Defines an automated backup policy for a table, specified by Retention Period and Frequency. To _create_ a table with automated backup disabled, either omit the automated_backup_policy argument, or set both Retention Period and Frequency properties to "0". To disable automated backup on an _existing_ table that has automated backup enabled, set _both_ Retention Period and Frequency properties to "0". When updating an existing table, to modify the Retention Period or Frequency properties of the resource's automated backup policy, set the respective property to a non-zero value. If the automated_backup_policy argument is not provided in the configuration on update, the resource's automated backup policy will _not_ be modified.`, }, }, UseJSONNumber: true, @@ -270,9 +270,11 @@ func resourceBigtableTableCreate(d *schema.ResourceData, meta interface{}) error if err != nil { return fmt.Errorf("Error parsing automated backup policy frequency: %s", err) } - tblConf.AutomatedBackupConfig = &bigtable.TableAutomatedBackupPolicy{ - RetentionPeriod: abpRetentionPeriod, - Frequency: abpFrequency, + if abpFrequency != 0 && abpRetentionPeriod != 0 { // if fields are zero this indicates disable-on-create + tblConf.AutomatedBackupConfig = &bigtable.TableAutomatedBackupPolicy{ + RetentionPeriod: abpRetentionPeriod, + Frequency: abpFrequency, + } } } } diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_test.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_test.go index 477dd587894a..20f8f4a66af8 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_test.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_test.go @@ -532,6 +532,45 @@ func TestAccBigtableTable_automated_backups(t *testing.T) { }) } +func TestAccBigtableTable_automated_backups_explicitly_disabled_on_create(t *testing.T) { + // bigtable instance does not use the shared HTTP client, this test creates an instance + acctest.SkipIfVcr(t) + t.Parallel() + + instanceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + tableName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + family := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckBigtableTableDestroyProducer(t), + Steps: []resource.TestStep{ + // Creating a table with automated backup explicitly disabled + { + Config: testAccBigtableTable_automated_backups(instanceName, tableName, "0", "0", family), + Check: resource.ComposeTestCheckFunc(verifyBigtableAutomatedBackupsEnablementState(t, false)), + }, + { + ResourceName: "google_bigtable_table.table", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"automated_backup_policy"}, // ImportStateVerify doesn't use CustomizeDiff function + }, + // it is possible to delete the table when automated backup is disabled + { + Config: testAccBigtableTable_destroyTable(instanceName), + }, + { + ResourceName: "google_bigtable_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection", "instance_type"}, + }, + }, + }) +} + func TestAccBigtableTable_familyMany(t *testing.T) { // bigtable instance does not use the shared HTTP client, this test creates an instance acctest.SkipIfVcr(t) diff --git a/mmv1/third_party/terraform/website/docs/r/bigtable_table.html.markdown b/mmv1/third_party/terraform/website/docs/r/bigtable_table.html.markdown index ccd76783e0d1..ef0f71085b91 100644 --- a/mmv1/third_party/terraform/website/docs/r/bigtable_table.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/bigtable_table.html.markdown @@ -100,7 +100,7 @@ to delete/recreate the entire `google_bigtable_table` resource. * `change_stream_retention` - (Optional) Duration to retain change stream data for the table. Set to 0 to disable. Must be between 1 and 7 days. -* `automated_backup_policy` - (Optional) Defines an automated backup policy for a table, specified by Retention Period and Frequency. To _create_ a table with automated backup disabled, omit this argument. To disable automated backup on an _existing_ table that has automated backup enabled, set both Retention Period and Frequency to "0". If this argument is not provided in the configuration on update, the resource's automated backup policy will _not_ be modified. +* `automated_backup_policy` - (Optional) Defines an automated backup policy for a table, specified by Retention Period and Frequency. To _create_ a table with automated backup disabled, either omit the automated_backup_policy argument, or set both Retention Period and Frequency properties to "0". To disable automated backup on an _existing_ table that has automated backup enabled, set _both_ Retention Period and Frequency properties to "0". When updating an existing table, to modify the Retention Period or Frequency properties of the resource's automated backup policy, set the respective property to a non-zero value. If the automated_backup_policy argument is not provided in the configuration on update, the resource's automated backup policy will _not_ be modified. ----- From 25c3d78a1068ea641a01275ae40e37094fc730cb Mon Sep 17 00:00:00 2001 From: Rajesh Guptha Date: Thu, 8 May 2025 19:47:59 +0530 Subject: [PATCH 073/884] Revert "Support for Tags in google_dataproc_metastore_service resource" (#13854) --- mmv1/products/metastore/Service.yaml | 8 --- ...ce_dataproc_metastore_service_test.go.tmpl | 60 +------------------ 2 files changed, 2 insertions(+), 66 deletions(-) diff --git a/mmv1/products/metastore/Service.yaml b/mmv1/products/metastore/Service.yaml index d10d902d95e5..bf19008718d4 100644 --- a/mmv1/products/metastore/Service.yaml +++ b/mmv1/products/metastore/Service.yaml @@ -520,11 +520,3 @@ properties: enum_values: - 'LEGACY' - 'JSON' - - name: 'tags' - type: KeyValuePairs - description: | - A map of resource manager tags. - Resource manager tag keys and values have the same definition as resource manager tags. - Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/{tag_value_id}. - immutable: true - ignore_read: true diff --git a/mmv1/third_party/terraform/services/dataprocmetastore/resource_dataproc_metastore_service_test.go.tmpl b/mmv1/third_party/terraform/services/dataprocmetastore/resource_dataproc_metastore_service_test.go.tmpl index dfac75470d1a..58dc408b7ffd 100644 --- a/mmv1/third_party/terraform/services/dataprocmetastore/resource_dataproc_metastore_service_test.go.tmpl +++ b/mmv1/third_party/terraform/services/dataprocmetastore/resource_dataproc_metastore_service_test.go.tmpl @@ -2,9 +2,8 @@ package dataprocmetastore_test import ( "fmt" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) @@ -167,59 +166,4 @@ resource "google_storage_bucket" "bucket" { location = "us-central1" } `, context) -} - -func TestAccMetastoreService_tags(t *testing.T) { - t.Parallel() - tagKey := acctest.BootstrapSharedTestTagKey(t, "metastore-service-tagkey") - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - "org": envvar.GetTestOrgFromEnv(t), - "tagKey": tagKey, - "tagValue": acctest.BootstrapSharedTestTagValue(t, "metastore-service-tagvalue", tagKey), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckDataprocMetastoreServiceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccMetastoreServiceTags(context), - }, - { - ResourceName: "google_dataproc_metastore_service.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"service_id", "location", "labels", "terraform_labels", "tags"}, - }, - }, - }) -} - -func testAccMetastoreServiceTags(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_dataproc_metastore_service" "default" { - service_id = "tf-test-my-service-%{random_suffix}" - location = "us-central1" - port = 9080 - tier = "DEVELOPER" - - maintenance_window { - hour_of_day = 2 - day_of_week = "SUNDAY" - } - - hive_metastore_config { - version = "2.3.6" - } - - labels = { - env = "test" - } - tags = { - "%{org}/%{tagKey}" = "%{tagValue}" - } -} -`, context) -} +} \ No newline at end of file From 74c5d80f662fa6bd45cacb57dd841e90df8e7e36 Mon Sep 17 00:00:00 2001 From: Rajesh Guptha Date: Thu, 8 May 2025 19:48:08 +0530 Subject: [PATCH 074/884] Revert "Adding tags field to Redis Instances for TagsR2401" (#13853) --- mmv1/products/redis/Instance.yaml | 8 ---- .../redis/resource_redis_instance_test.go | 42 ------------------- 2 files changed, 50 deletions(-) diff --git a/mmv1/products/redis/Instance.yaml b/mmv1/products/redis/Instance.yaml index a86b63745bdd..419ae6c60bb7 100644 --- a/mmv1/products/redis/Instance.yaml +++ b/mmv1/products/redis/Instance.yaml @@ -568,11 +568,3 @@ properties: Optional. The KMS key reference that you want to use to encrypt the data at rest for this Redis instance. If this is provided, CMEK is enabled. immutable: true - - name: 'tags' - type: KeyValuePairs - description: | - A map of resource manager tags. - Resource manager tag keys and values have the same definition as resource manager tags. - Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/{tag_key_value}. - immutable: true - ignore_read: true diff --git a/mmv1/third_party/terraform/services/redis/resource_redis_instance_test.go b/mmv1/third_party/terraform/services/redis/resource_redis_instance_test.go index d5a0c6d0ae30..43f53e4ce0e2 100644 --- a/mmv1/third_party/terraform/services/redis/resource_redis_instance_test.go +++ b/mmv1/third_party/terraform/services/redis/resource_redis_instance_test.go @@ -461,45 +461,3 @@ resource "google_redis_instance" "test" { } `, name) } - -func TestAccRedisInstance_tags(t *testing.T) { - - t.Parallel() - - tagKey := acctest.BootstrapSharedTestTagKey(t, "redis-instances-tagkey") - context := map[string]interface{}{ - "org": envvar.GetTestOrgFromEnv(t), - "tagKey": tagKey, - "tagValue": acctest.BootstrapSharedTestTagValue(t, "redis-instances-tagvalue", tagKey), - "random_suffix": acctest.RandString(t, 10), - } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckRedisInstanceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccRedisInstanceTags(context), - }, - { - ResourceName: "google_redis_instance.test", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"tags"}, - }, - }, - }) -} - -func testAccRedisInstanceTags(context map[string]interface{}) string { - - return acctest.Nprintf(` - resource "google_redis_instance" "test" { - name = "tf-test-instance-%{random_suffix}" - memory_size_gb = 5 - tags = { - "%{org}/%{tagKey}" = "%{tagValue}" - } -} -`, context) -} From c190ed8b37db7e5a7fd814f837a0ac88ac0e1ca2 Mon Sep 17 00:00:00 2001 From: Parker DeWilde Date: Thu, 8 May 2025 09:32:23 -0700 Subject: [PATCH 075/884] Fix copy-paste errors in `google_storage_bucket` docs examples. (#13775) --- .../terraform/website/docs/r/storage_bucket.html.markdown | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/website/docs/r/storage_bucket.html.markdown b/mmv1/third_party/terraform/website/docs/r/storage_bucket.html.markdown index 9ee3665c7e2c..e5035cd8d1af 100644 --- a/mmv1/third_party/terraform/website/docs/r/storage_bucket.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/storage_bucket.html.markdown @@ -94,7 +94,7 @@ resource "google_storage_bucket" "no-age-enabled" { ## Example Usage - Enabling public access prevention ```hcl -resource "google_storage_bucket" "auto-expire" { +resource "google_storage_bucket" "no-public-access" { name = "no-public-access-bucket" location = "US" force_destroy = true @@ -106,7 +106,7 @@ resource "google_storage_bucket" "auto-expire" { ## Example Usage - Enabling hierarchical namespace ```hcl -resource "google_storage_bucket" "auto-expire" { +resource "google_storage_bucket" "hns-enabled" { name = "hns-enabled-bucket" location = "US" force_destroy = true From fbdf34df4ec96c12f751a7421955c85a8075b8e6 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Thu, 8 May 2025 11:07:16 -0700 Subject: [PATCH 076/884] Updated ACM from 1.18.2 to 1.21.0 (#13885) --- ...ce_gke_hub_feature_membership_test.go.tmpl | 28 +++++++++---------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_test.go.tmpl b/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_test.go.tmpl index 3b3c03cafa27..e839a1825884 100644 --- a/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_test.go.tmpl +++ b/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_test.go.tmpl @@ -109,7 +109,7 @@ resource "google_gke_hub_feature_membership" "feature_member_1" { feature = google_gke_hub_feature.feature.name membership = google_gke_hub_membership.membership.membership_id configmanagement { - version = "1.18.2" + version = "1.21.0" config_sync { enabled = true source_format = "hierarchy" @@ -138,7 +138,7 @@ resource "google_gke_hub_feature_membership" "feature_member_2" { feature = google_gke_hub_feature.feature.name membership = google_gke_hub_membership.membership_second.membership_id configmanagement { - version = "1.18.2" + version = "1.21.0" config_sync { enabled = true source_format = "hierarchy" @@ -219,7 +219,7 @@ resource "google_gke_hub_feature_membership" "feature_member_2" { feature = google_gke_hub_feature.feature.name membership = google_gke_hub_membership.membership_second.membership_id configmanagement { - version = "1.18.2" + version = "1.21.0" config_sync { enabled = true source_format = "hierarchy" @@ -252,7 +252,7 @@ resource "google_gke_hub_feature_membership" "feature_member_2" { feature = google_gke_hub_feature.feature.name membership = google_gke_hub_membership.membership_second.membership_id configmanagement { - version = "1.18.2" + version = "1.21.0" config_sync { enabled = true source_format = "unstructured" @@ -275,7 +275,7 @@ resource "google_gke_hub_feature_membership" "feature_member_3" { feature = google_gke_hub_feature.feature.name membership = google_gke_hub_membership.membership_third.membership_id configmanagement { - version = "1.18.2" + version = "1.21.0" config_sync { enabled = true source_format = "hierarchy" @@ -298,7 +298,7 @@ resource "google_gke_hub_feature_membership" "feature_member_4" { feature = google_gke_hub_feature.feature.name membership = google_gke_hub_membership.membership_fourth.membership_id configmanagement { - version = "1.18.2" + version = "1.21.0" } } `, context) @@ -323,7 +323,7 @@ resource "google_gke_hub_feature_membership" "feature_member_3" { feature = google_gke_hub_feature.feature.name membership = google_gke_hub_membership.membership_third.membership_id configmanagement { - version = "1.18.2" + version = "1.21.0" } } `, context) @@ -491,7 +491,7 @@ resource "google_gke_hub_feature_membership" "feature_member" { feature = google_gke_hub_feature.feature.name membership = google_gke_hub_membership.membership.membership_id configmanagement { - version = "1.18.2" + version = "1.21.0" config_sync { enabled = true git { @@ -553,7 +553,7 @@ resource "google_gke_hub_feature_membership" "feature_member" { feature = google_gke_hub_feature.feature.name membership = google_gke_hub_membership.membership.membership_id configmanagement { - version = "1.18.2" + version = "1.21.0" config_sync { enabled = true git { @@ -646,7 +646,7 @@ resource "google_gke_hub_feature_membership" "feature_member" { feature = google_gke_hub_feature.feature.name membership = google_gke_hub_membership.membership_acmoci.membership_id configmanagement { - version = "1.18.2" + version = "1.21.0" config_sync { enabled = true source_format = "unstructured" @@ -688,7 +688,7 @@ resource "google_gke_hub_feature_membership" "feature_member" { feature = google_gke_hub_feature.feature.name membership = google_gke_hub_membership.membership_acmoci.membership_id configmanagement { - version = "1.18.2" + version = "1.21.0" config_sync { enabled = true source_format = "hierarchy" @@ -730,7 +730,7 @@ resource "google_gke_hub_feature_membership" "feature_member" { feature = google_gke_hub_feature.feature.name membership = google_gke_hub_membership.membership_acmoci.membership_id configmanagement { - version = "1.18.2" + version = "1.21.0" } } `, context) @@ -1087,7 +1087,7 @@ resource "google_gke_hub_feature_membership" "feature_member" { } } } - version = "1.17.0" + version = "1.20.0" } } `, context) @@ -1137,7 +1137,7 @@ resource "google_gke_hub_feature_membership" "feature_member" { } } } - version = "1.17.0" + version = "1.20.0" } } `, context) From 3c1ca2a3e3465a634a658cd25412b25b4b9f573f Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Thu, 8 May 2025 11:20:53 -0700 Subject: [PATCH 077/884] Prevent storagecontrol target project from being swept (#13879) --- ...a_source_storage_control_project_intelligence_config_test.go | 2 +- ...resource_storage_control_project_intelligence_config_test.go | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/storagecontrol/data_source_storage_control_project_intelligence_config_test.go b/mmv1/third_party/terraform/services/storagecontrol/data_source_storage_control_project_intelligence_config_test.go index a9b65169adc0..8766331ab785 100644 --- a/mmv1/third_party/terraform/services/storagecontrol/data_source_storage_control_project_intelligence_config_test.go +++ b/mmv1/third_party/terraform/services/storagecontrol/data_source_storage_control_project_intelligence_config_test.go @@ -13,7 +13,7 @@ func TestAccDataSourceGoogleStorageControlProjectIntelligenceConfig_basic(t *tes context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "project": acctest.BootstrapProject(t, "tf-test-stor-int-", envvar.GetTestBillingAccountFromEnv(t), []string{"storage.googleapis.com"}).ProjectId, + "project": acctest.BootstrapProject(t, "tf-boot-stor-int-", envvar.GetTestBillingAccountFromEnv(t), []string{"storage.googleapis.com"}).ProjectId, } acctest.VcrTest(t, resource.TestCase{ diff --git a/mmv1/third_party/terraform/services/storagecontrol/resource_storage_control_project_intelligence_config_test.go b/mmv1/third_party/terraform/services/storagecontrol/resource_storage_control_project_intelligence_config_test.go index b4535d9c40df..848702b5c08b 100644 --- a/mmv1/third_party/terraform/services/storagecontrol/resource_storage_control_project_intelligence_config_test.go +++ b/mmv1/third_party/terraform/services/storagecontrol/resource_storage_control_project_intelligence_config_test.go @@ -14,7 +14,7 @@ func TestAccStorageControlProjectIntelligenceConfig_update(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "project": acctest.BootstrapProject(t, "tf-test-stor-int-", envvar.GetTestBillingAccountFromEnv(t), []string{"storage.googleapis.com"}).ProjectId, + "project": acctest.BootstrapProject(t, "tf-boot-stor-int-", envvar.GetTestBillingAccountFromEnv(t), []string{"storage.googleapis.com"}).ProjectId, "random_suffix": acctest.RandString(t, 10), } From 33a70ea5ececbf93b5579a1dad3b652f405dabb7 Mon Sep 17 00:00:00 2001 From: Thomas Rodgers Date: Thu, 8 May 2025 12:15:12 -0700 Subject: [PATCH 078/884] Magician comment sha (#13818) --- .ci/magician/cmd/generate_comment.go | 33 +++++++++++----- .ci/magician/cmd/generate_comment_test.go | 38 ++++++++++++++----- .ci/magician/cmd/generate_downstream.go | 16 +++++--- .ci/magician/cmd/mock_runner_test.go | 7 +++- .../cmd/templates/DIFF_COMMENT.md.tmpl | 2 +- 5 files changed, 70 insertions(+), 26 deletions(-) diff --git a/.ci/magician/cmd/generate_comment.go b/.ci/magician/cmd/generate_comment.go index 9c526cc51e1b..ac689fdc8db7 100644 --- a/.ci/magician/cmd/generate_comment.go +++ b/.ci/magician/cmd/generate_comment.go @@ -45,9 +45,11 @@ var ( ) type Diff struct { - Title string - Repo string - ShortStat string + Title string + Repo string + ShortStat string + CommitSHA string + OldCommitSHA string } type BreakingChange struct { @@ -77,7 +79,6 @@ type Errors struct { } type diffCommentData struct { - PrNumber int Diffs []Diff BreakingChanges []BreakingChange MissingServiceLabels []string @@ -214,9 +215,7 @@ func execGenerateComment(prNumber int, ghTokenMagicModules, buildId, buildStep, } // Initialize repos - data := diffCommentData{ - PrNumber: prNumber, - } + data := diffCommentData{} for _, repo := range []*source.Repo{&tpgRepo, &tpgbRepo, &tgcRepo, &tfoicsRepo} { errors[repo.Title] = []string{} repo.Branch = newBranch @@ -262,10 +261,24 @@ func execGenerateComment(prNumber int, ghTokenMagicModules, buildId, buildStep, errors[repo.Title] = append(errors[repo.Title], "Failed to compute repo diff shortstats") } if shortStat != "" { + variablePath := fmt.Sprintf("/workspace/commitSHA_modular-magician_%s.txt", repo.Name) + oldVariablePath := fmt.Sprintf("/workspace/commitSHA_modular-magician_%s-old.txt", repo.Name) + commitSHA, err := rnr.ReadFile(variablePath) + if err != nil { + errors[repo.Title] = append(errors[repo.Title], "Failed to read commit sha from file") + continue + } + oldCommitSHA, err := rnr.ReadFile(oldVariablePath) + if err != nil { + errors[repo.Title] = append(errors[repo.Title], "Failed to read old commit sha from file") + continue + } diffs = append(diffs, Diff{ - Title: repo.Title, - Repo: repo.Name, - ShortStat: shortStat, + Title: repo.Title, + Repo: repo.Name, + ShortStat: shortStat, + CommitSHA: commitSHA, + OldCommitSHA: oldCommitSHA, }) repo.ChangedFiles, err = ctlr.DiffNameOnly(repo, oldBranch, newBranch) if err != nil { diff --git a/.ci/magician/cmd/generate_comment_test.go b/.ci/magician/cmd/generate_comment_test.go index 25d91879db76..8b08fb59a3b9 100644 --- a/.ci/magician/cmd/generate_comment_test.go +++ b/.ci/magician/cmd/generate_comment_test.go @@ -16,6 +16,7 @@ package cmd import ( + "fmt" "os" "reflect" "testing" @@ -38,6 +39,22 @@ func TestExecGenerateComment(t *testing.T) { "GOPATH": os.Getenv("GOPATH"), "HOME": os.Getenv("HOME"), } + for _, repo := range []string{ + "terraform-provider-google", + "terraform-provider-google-beta", + "terraform-google-conversion", + } { + variablePathOld := fmt.Sprintf("/workspace/commitSHA_modular-magician_%s-old.txt", repo) + variablePath := fmt.Sprintf("/workspace/commitSHA_modular-magician_%s.txt", repo) + err := mr.WriteFile(variablePathOld, "1a2a3a4a") + if err != nil { + t.Errorf("Error writing file: %s", err) + } + err = mr.WriteFile(variablePath, "1a2a3a4b") + if err != nil { + t.Errorf("Error writing file: %s", err) + } + } execGenerateComment( 123456, "*******", @@ -115,7 +132,7 @@ func TestExecGenerateComment(t *testing.T) { {"123456", "terraform-provider-breaking-change-test", "success", "https://console.cloud.google.com/cloud-build/builds;region=global/build1;step=17?project=project1", "sha1"}, {"123456", "terraform-provider-missing-service-labels", "success", "https://console.cloud.google.com/cloud-build/builds;region=global/build1;step=17?project=project1", "sha1"}, }, - "PostComment": {{"123456", "Hi there, I'm the Modular magician. I've detected the following information about your changes:\n\n## Diff report\n\nYour PR generated some diffs in downstreams - here they are.\n\n`google` provider: [Diff](https://github.com/modular-magician/terraform-provider-google/compare/auto-pr-123456-old..auto-pr-123456) ( 2 files changed, 40 insertions(+))\n`google-beta` provider: [Diff](https://github.com/modular-magician/terraform-provider-google-beta/compare/auto-pr-123456-old..auto-pr-123456) ( 2 files changed, 40 insertions(+))\n`terraform-google-conversion`: [Diff](https://github.com/modular-magician/terraform-google-conversion/compare/auto-pr-123456-old..auto-pr-123456) ( 1 file changed, 10 insertions(+))\n\n\n\n## Missing test report\nYour PR includes resource fields which are not covered by any test.\n\nResource: `google_folder_access_approval_settings` (3 total tests)\nPlease add an acceptance test which includes these fields. The test should include the following:\n\n```hcl\nresource \"google_folder_access_approval_settings\" \"primary\" {\n uncovered_field = # value needed\n}\n\n```\n\n\n"}}, + "PostComment": {{"123456", "Hi there, I'm the Modular magician. I've detected the following information about your changes:\n\n## Diff report\n\nYour PR generated some diffs in downstreams - here they are.\n\n`google` provider: [Diff](https://github.com/modular-magician/terraform-provider-google/compare/1a2a3a4a..1a2a3a4b) ( 2 files changed, 40 insertions(+))\n`google-beta` provider: [Diff](https://github.com/modular-magician/terraform-provider-google-beta/compare/1a2a3a4a..1a2a3a4b) ( 2 files changed, 40 insertions(+))\n`terraform-google-conversion`: [Diff](https://github.com/modular-magician/terraform-google-conversion/compare/1a2a3a4a..1a2a3a4b) ( 1 file changed, 10 insertions(+))\n\n\n\n## Missing test report\nYour PR includes resource fields which are not covered by any test.\n\nResource: `google_folder_access_approval_settings` (3 total tests)\nPlease add an acceptance test which includes these fields. The test should include the following:\n\n```hcl\nresource \"google_folder_access_approval_settings\" \"primary\" {\n uncovered_field = # value needed\n}\n\n```\n\n\n"}}, "AddLabels": {{"123456", []string{"service/alloydb"}}}, } { if actualCalls, ok := gh.calledMethods[method]; !ok { @@ -170,24 +187,27 @@ func TestFormatDiffComment(t *testing.T) { }, "diffs are displayed": { data: diffCommentData{ - PrNumber: 1234567890, Diffs: []Diff{ { - Title: "Repo 1", - Repo: "repo-1", - ShortStat: "+1 added, -1 removed", + Title: "Repo 1", + Repo: "repo-1", + ShortStat: "+1 added, -1 removed", + CommitSHA: "1a2a3a4b", + OldCommitSHA: "1a2a3a4a", }, { - Title: "Repo 2", - Repo: "repo-2", - ShortStat: "+2 added, -2 removed", + Title: "Repo 2", + Repo: "repo-2", + ShortStat: "+2 added, -2 removed", + CommitSHA: "1a2a3a4d", + OldCommitSHA: "1a2a3a4c", }, }, }, expectedStrings: []string{ "## Diff report", "generated some diffs", - "Repo 1: [Diff](https://github.com/modular-magician/repo-1/compare/auto-pr-1234567890-old..auto-pr-1234567890) (+1 added, -1 removed)\nRepo 2: [Diff](https://github.com/modular-magician/repo-2/compare/auto-pr-1234567890-old..auto-pr-1234567890) (+2 added, -2 removed)", + "Repo 1: [Diff](https://github.com/modular-magician/repo-1/compare/1a2a3a4a..1a2a3a4b) (+1 added, -1 removed)\nRepo 2: [Diff](https://github.com/modular-magician/repo-2/compare/1a2a3a4c..1a2a3a4d) (+2 added, -2 removed)", }, notExpectedStrings: []string{ "hasn't generated any diffs", diff --git a/.ci/magician/cmd/generate_downstream.go b/.ci/magician/cmd/generate_downstream.go index 62dc90913570..d93834e68dbf 100644 --- a/.ci/magician/cmd/generate_downstream.go +++ b/.ci/magician/cmd/generate_downstream.go @@ -340,8 +340,9 @@ func createCommit(scratchRepo *source.Repo, commitMessage string, rnr ExecRunner return "", err } - if _, err := rnr.Run("git", []string{"commit", "--signoff", "-m", commitMessage}, nil); err != nil { - return "", err + _, commitErr := rnr.Run("git", []string{"commit", "--signoff", "-m", commitMessage}, nil) + if commitErr != nil && !strings.Contains(commitErr.Error(), "nothing to commit") { + return "", commitErr } commitSha, err := rnr.Run("git", []string{"rev-parse", "HEAD"}, nil) @@ -354,8 +355,13 @@ func createCommit(scratchRepo *source.Repo, commitMessage string, rnr ExecRunner // auto-pr's use commitSHA_modular-magician__.txt file to communicate commmit hash // across cloudbuild steps. Used in test-tpg to execute unit tests for the HEAD commit - if strings.HasPrefix(scratchRepo.Branch, "auto-pr-") && !strings.HasSuffix(scratchRepo.Branch, "-old") { - variablePath := fmt.Sprintf("/workspace/commitSHA_modular-magician_%s.txt", scratchRepo.Name) + if strings.HasPrefix(scratchRepo.Branch, "auto-pr-") { + var variablePath string + if strings.HasSuffix(scratchRepo.Branch, "-old") { + variablePath = fmt.Sprintf("/workspace/commitSHA_modular-magician_%s-old.txt", scratchRepo.Name) + } else { + variablePath = fmt.Sprintf("/workspace/commitSHA_modular-magician_%s.txt", scratchRepo.Name) + } fmt.Println("variablePath: ", variablePath) err = rnr.WriteFile(variablePath, commitSha) if err != nil { @@ -363,7 +369,7 @@ func createCommit(scratchRepo *source.Repo, commitMessage string, rnr ExecRunner } } - return commitSha, err + return commitSha, commitErr } func addChangelogEntry(downstreamRepo *source.Repo, pullRequest *github.PullRequest, rnr ExecRunner) error { diff --git a/.ci/magician/cmd/mock_runner_test.go b/.ci/magician/cmd/mock_runner_test.go index 79bc206f2c43..742ca0617dba 100644 --- a/.ci/magician/cmd/mock_runner_test.go +++ b/.ci/magician/cmd/mock_runner_test.go @@ -41,6 +41,7 @@ type mockRunner struct { cwd string dirStack *list.List notifyError bool + fileContents map[string]string } func sortedEnvString(env map[string]string) string { @@ -107,10 +108,14 @@ func (mr *mockRunner) Walk(root string, fn filepath.WalkFunc) error { } func (mr *mockRunner) ReadFile(name string) (string, error) { - return "", nil + return mr.fileContents[name], nil } func (mr *mockRunner) WriteFile(name, data string) error { + if mr.fileContents == nil { + mr.fileContents = make(map[string]string) + } + mr.fileContents[name] = data return nil } diff --git a/.ci/magician/cmd/templates/DIFF_COMMENT.md.tmpl b/.ci/magician/cmd/templates/DIFF_COMMENT.md.tmpl index f50fdee4626a..7f22c8073e3a 100644 --- a/.ci/magician/cmd/templates/DIFF_COMMENT.md.tmpl +++ b/.ci/magician/cmd/templates/DIFF_COMMENT.md.tmpl @@ -7,7 +7,7 @@ Your PR hasn't generated any diffs, but I'll let you know if a future commit doe Your PR generated some diffs in downstreams - here they are. {{range .Diffs -}} -{{.Title}}: [Diff](https://github.com/modular-magician/{{.Repo}}/compare/auto-pr-{{$.PrNumber}}-old..auto-pr-{{$.PrNumber}}) ({{.ShortStat}}) +{{.Title}}: [Diff](https://github.com/modular-magician/{{.Repo}}/compare/{{.OldCommitSHA}}..{{.CommitSHA}}) ({{.ShortStat}}) {{end -}} {{end -}} From aaf58a50b5827c1567347c6e0cd1b5dca050f88a Mon Sep 17 00:00:00 2001 From: Thomas Rodgers Date: Thu, 8 May 2025 12:15:35 -0700 Subject: [PATCH 079/884] Document is_set and set_hash_func (#13811) --- docs/content/reference/field.md | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/docs/content/reference/field.md b/docs/content/reference/field.md index 5dabcf947cc9..2cbf9b832b37 100644 --- a/docs/content/reference/field.md +++ b/docs/content/reference/field.md @@ -327,6 +327,29 @@ Example: Regex regex: '^[a-zA-Z][a-zA-Z0-9_]*$' ``` +### `is_set` +If true, the field is a Set rather than an Array. Set fields represent an +unordered set of unique elements. `set_hash_func` may be used to customize the +hash function used to index elements in the set, otherwise the schema default +function will be used. Adding this property to an existing field is usually a +breaking change. + +```yaml +- name: 'fieldOne' + type: Array + is_set: true +``` + +### `set_hash_func` +Specifies a function for hashing elements in a Set field. If unspecified, +`schema.HashString` will be used if the elements are strings, otherwise +`schema.HashSchema`. The hash function should be defined in +`custom_code.constants`. + +```yaml +set_hash_func: functionName +``` + ### `api_name` Specifies a name to use for communication with the API that is different than the name of the field in Terraform. In general, setting an `api_name` is not From fc03b339ac4a65e981d205ccf352b11afb129996 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Thu, 8 May 2025 12:17:32 -0700 Subject: [PATCH 080/884] Documented logging bucket quota increase (#13886) --- .ci/infra/terraform/README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/.ci/infra/terraform/README.md b/.ci/infra/terraform/README.md index a600ed2534cc..fac7f9e8e323 100644 --- a/.ci/infra/terraform/README.md +++ b/.ci/infra/terraform/README.md @@ -73,3 +73,4 @@ Quotas that will need to be adjusted to support all tests: - compute.googleapis.com/c2_cpus (us-central1) - compute.googleapis.com/n2_cpus (us-central1) to 36+ - VMware Engine standard 72 vCPUs nodes per region - southamerica-east1 to 21 +- logging.googleapis.com/log_buckets_count to 200 From a62706526f226a84c919e126064d164fffe4ae72 Mon Sep 17 00:00:00 2001 From: sachin purohit Date: Thu, 8 May 2025 14:13:23 -0700 Subject: [PATCH 081/884] feat(bigquery): added security_mode option for google_bigquery_routine (#13331) --- mmv1/products/bigquery/Routine.yaml | 6 ++++++ .../terraform/examples/bigquery_routine_basic.tf.tmpl | 1 + .../services/bigquery/resource_bigquery_routine_test.go | 2 ++ 3 files changed, 9 insertions(+) diff --git a/mmv1/products/bigquery/Routine.yaml b/mmv1/products/bigquery/Routine.yaml index 759b27b169e6..bd144e9df1e8 100644 --- a/mmv1/products/bigquery/Routine.yaml +++ b/mmv1/products/bigquery/Routine.yaml @@ -233,6 +233,12 @@ properties: description: If set to DATA_MASKING, the function is validated and made available as a masking function. For more information, see https://cloud.google.com/bigquery/docs/user-defined-functions#custom-mask enum_values: - 'DATA_MASKING' + - name: 'securityMode' + type: Enum + description: Optional. The security mode of the routine, if defined. If not defined, the security mode is automatically determined from the routine's configuration. + enum_values: + - 'DEFINER' + - 'INVOKER' - name: 'sparkOptions' type: NestedObject description: | diff --git a/mmv1/templates/terraform/examples/bigquery_routine_basic.tf.tmpl b/mmv1/templates/terraform/examples/bigquery_routine_basic.tf.tmpl index 1a473f5bc7c8..c880dcd99e6f 100644 --- a/mmv1/templates/terraform/examples/bigquery_routine_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/bigquery_routine_basic.tf.tmpl @@ -7,6 +7,7 @@ resource "google_bigquery_routine" "sproc" { routine_id = "{{index $.Vars "routine_id"}}" routine_type = "PROCEDURE" language = "SQL" + security_mode = "INVOKER" definition_body = "CREATE FUNCTION Add(x FLOAT64, y FLOAT64) RETURNS FLOAT64 AS (x + y);" } diff --git a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_routine_test.go b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_routine_test.go index a71019023bf2..7a9e2e3a635e 100644 --- a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_routine_test.go +++ b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_routine_test.go @@ -50,6 +50,7 @@ resource "google_bigquery_routine" "sproc" { routine_id = "%s" routine_type = "SCALAR_FUNCTION" language = "SQL" + security_mode = "INVOKER" definition_body = "1" } `, dataset, routine) @@ -66,6 +67,7 @@ resource "google_bigquery_routine" "sproc" { routine_id = "%s" routine_type = "SCALAR_FUNCTION" language = "JAVASCRIPT" + security_mode = "DEFINER" definition_body = "CREATE FUNCTION multiplyInputs return x*y;" arguments { name = "x" From d103f7bdd09db71609cd4f680eb265a37f462265 Mon Sep 17 00:00:00 2001 From: karolgorc Date: Thu, 8 May 2025 23:17:46 +0200 Subject: [PATCH 082/884] Add missing disk related fields to instance_template resources (#13245) --- ...resource_compute_instance_template.go.tmpl | 28 +++++++++ ...rce_compute_instance_template_test.go.tmpl | 57 ++++++++++++++++++ ...e_compute_region_instance_template.go.tmpl | 18 ++++++ ...pute_region_instance_template_test.go.tmpl | 58 +++++++++++++++++++ .../r/compute_instance_template.html.markdown | 4 ++ ...ute_region_instance_template.html.markdown | 4 ++ 6 files changed, 169 insertions(+) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_template.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template.go.tmpl index cca279377579..718a225f584f 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_template.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template.go.tmpl @@ -150,6 +150,14 @@ func ResourceComputeInstanceTemplate() *schema.Resource { Description: `Name of the disk. When not provided, this defaults to the name of the instance.`, }, + "architecture": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Computed: true, + Description: `The architecture of the image. Allowed values are ARM64 or X86_64.`, + }, + "disk_size_gb": { Type: schema.TypeInt, Optional: true, @@ -201,6 +209,16 @@ func ResourceComputeInstanceTemplate() *schema.Resource { Description: `A map of resource manager tags. Resource manager tag keys and values have the same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/456. The field is ignored (both PUT & PATCH) when empty.`, }, + "guest_os_features": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + Description: `A list of features to enable on the guest operating system. Applicable only for bootable images.`, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "source_image": { Type: schema.TypeString, Optional: true, @@ -1475,6 +1493,14 @@ func buildDisks(d *schema.ResourceData, config *transport_tpg.Config) ([]*comput disk.Type = v.(string) } + if v, ok := d.GetOk(prefix + ".guest_os_features"); ok { + disk.GuestOsFeatures = expandComputeInstanceGuestOsFeatures(v.([]interface{})) + } + + if v, ok := d.GetOk(prefix + ".architecture"); ok { + disk.Architecture = v.(string) + } + disks = append(disks, &disk) } @@ -1752,6 +1778,8 @@ func flattenDisk(disk *compute.AttachedDisk, configDisk map[string]any, defaultP diskMap["source"] = tpgresource.ConvertSelfLinkToV1(disk.Source) diskMap["mode"] = disk.Mode diskMap["type"] = disk.Type + diskMap["guest_os_features"] = flattenComputeInstanceGuestOsFeatures(disk.GuestOsFeatures) + diskMap["architecture"] = configDisk["architecture"] return diskMap, nil } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_test.go.tmpl index f487b7f0d2e7..75d6c1553bfd 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_test.go.tmpl @@ -1775,6 +1775,36 @@ func TestAccComputeInstanceTemplate_migration(t *testing.T) { }) } +func TestAccComputeInstanceTemplate_GuestOsFeatures(t *testing.T) { + t.Parallel() + + var instanceTemplate compute.InstanceTemplate + context := map[string]interface{}{ + "template_name": fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)), + "guest_os_features": `["UEFI_COMPATIBLE", "VIRTIO_SCSI_MULTIQUEUE", "GVNIC", "IDPF"]`, + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeInstanceTemplateDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeInstanceTemplate_GuestOsFeatures(context), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeInstanceTemplateExists( + t, "google_compute_instance_template.foobar", &instanceTemplate), + resource.TestCheckResourceAttr("google_compute_instance_template.foobar", "disk.0.guest_os_features.#", "4"), + resource.TestCheckResourceAttr("google_compute_instance_template.foobar", "disk.0.guest_os_features.0", "UEFI_COMPATIBLE"), + resource.TestCheckResourceAttr("google_compute_instance_template.foobar", "disk.0.guest_os_features.1", "VIRTIO_SCSI_MULTIQUEUE"), + resource.TestCheckResourceAttr("google_compute_instance_template.foobar", "disk.0.guest_os_features.2", "GVNIC"), + resource.TestCheckResourceAttr("google_compute_instance_template.foobar", "disk.0.guest_os_features.3", "IDPF"), + ), + }, + }, + }) +} + func TestAccComputeInstanceTemplate_withLabels(t *testing.T) { acctest.SkipIfVcr(t) t.Parallel() @@ -5237,3 +5267,30 @@ resource "google_compute_instance_template" "foobar" { } `, context) } + +func testAccComputeInstanceTemplate_GuestOsFeatures(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_compute_image" "my_image" { + family = "debian-11" + project = "debian-cloud" +} + +resource "google_compute_instance_template" "foobar" { + name = "%{template_name}" + machine_type = "e2-medium" + + disk { + source_image = data.google_compute_image.my_image.self_link + auto_delete = true + disk_size_gb = 10 + architecture = "X86_64" + boot = true + guest_os_features = %{guest_os_features} + } + + network_interface { + network = "default" + } +} +`, context) +} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template.go.tmpl index bd355498bb1d..36e3eb3351e8 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template.go.tmpl @@ -124,6 +124,14 @@ func ResourceComputeRegionInstanceTemplate() *schema.Resource { Description: `Name of the disk. When not provided, this defaults to the name of the instance.`, }, + "architecture": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Computed: true, + Description: `The architecture of the image. Allowed values are ARM64 or X86_64.`, + }, + "disk_size_gb": { Type: schema.TypeInt, Optional: true, @@ -175,6 +183,16 @@ func ResourceComputeRegionInstanceTemplate() *schema.Resource { Description: `A map of resource manager tags. Resource manager tag keys and values have the same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/456. The field is ignored (both PUT & PATCH) when empty.`, }, + "guest_os_features": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + Description: `A list of features to enable on the guest operating system. Applicable only for bootable images.`, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "source_image": { Type: schema.TypeString, Optional: true, diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_test.go.tmpl index 89a03ce2bcbe..c0534475d920 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_test.go.tmpl @@ -1659,6 +1659,36 @@ func TestAccComputeRegionInstanceTemplate_gracefulShutdown(t *testing.T) { } {{- end }} +func TestAccComputeRegionInstanceTemplate_GuestOsFeatures(t *testing.T) { + t.Parallel() + + var instanceTemplate compute.InstanceTemplate + context := map[string]interface{}{ + "template_name": fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)), + "guest_os_features": `["UEFI_COMPATIBLE", "VIRTIO_SCSI_MULTIQUEUE", "GVNIC", "IDPF"]`, + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeRegionInstanceTemplateDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeRegionInstanceTemplate_GuestOsFeatures(context), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeRegionInstanceTemplateExists( + t, "google_compute_region_instance_template.foobar", &instanceTemplate), + resource.TestCheckResourceAttr("google_compute_region_instance_template.foobar", "disk.0.guest_os_features.#", "4"), + resource.TestCheckResourceAttr("google_compute_region_instance_template.foobar", "disk.0.guest_os_features.0", "UEFI_COMPATIBLE"), + resource.TestCheckResourceAttr("google_compute_region_instance_template.foobar", "disk.0.guest_os_features.1", "VIRTIO_SCSI_MULTIQUEUE"), + resource.TestCheckResourceAttr("google_compute_region_instance_template.foobar", "disk.0.guest_os_features.2", "GVNIC"), + resource.TestCheckResourceAttr("google_compute_region_instance_template.foobar", "disk.0.guest_os_features.3", "IDPF"), + ), + }, + }, + }) +} + func testAccCheckComputeRegionInstanceTemplateDestroyProducer(t *testing.T) func(s *terraform.State) error { return func(s *terraform.State) error { config := acctest.GoogleProviderConfig(t) @@ -4608,6 +4638,34 @@ data "google_compute_default_service_account" "default" { `, context) } +func testAccComputeRegionInstanceTemplate_GuestOsFeatures(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_compute_image" "my_image" { + family = "debian-11" + project = "debian-cloud" +} + +resource "google_compute_region_instance_template" "foobar" { + name = "%{template_name}" + machine_type = "e2-medium" + region = "us-central1" + + disk { + source_image = data.google_compute_image.my_image.self_link + auto_delete = true + disk_size_gb = 10 + boot = true + architecture = "X86_64" + guest_os_features = %{guest_os_features} + } + + network_interface { + network = "default" + } +} +`, context) +} + {{ if ne $.TargetVersionName `ga` -}} func testAccComputeRegionInstanceTemplate_gracefulShutdown(context map[string]interface{}) string { return acctest.Nprintf(` diff --git a/mmv1/third_party/terraform/website/docs/r/compute_instance_template.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_instance_template.html.markdown index ddb65e42f955..ff7833af0656 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_instance_template.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_instance_template.html.markdown @@ -451,6 +451,8 @@ The following arguments are supported: * `resource_manager_tags` - (Optional) A set of key/value resource manager tag pairs to bind to this disk. Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/456. +* `guest_os_features` - (optional) A list of features to enable on the guest operating system. Applicable only for bootable images. Read [Enabling guest operating system features](https://cloud.google.com/compute/docs/images/create-delete-deprecate-private-images#guest-os-features) to see a list of available options. + * `source_image` - (Optional) The image from which to initialize this disk. This can be one of: the image's `self_link`, `projects/{project}/global/images/{image}`, @@ -484,6 +486,8 @@ The following arguments are supported: or READ_ONLY. If you are attaching or creating a boot disk, this must read-write mode. +* `architecture` - (Optional) The architecture of the attached disk. Valid values are `ARM64` or `x86_64`. + * `source` - (Optional) The name (**not self_link**) of the disk (such as those managed by `google_compute_disk`) to attach. ~> **Note:** Either `source`, `source_image`, or `source_snapshot` is **required** in a disk block unless the disk type is `local-ssd`. Check the API [docs](https://cloud.google.com/compute/docs/reference/rest/v1/instanceTemplates/insert) for details. diff --git a/mmv1/third_party/terraform/website/docs/r/compute_region_instance_template.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_region_instance_template.html.markdown index f0f1886ed5bd..c5fa24b0ba64 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_region_instance_template.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_region_instance_template.html.markdown @@ -416,6 +416,8 @@ The following arguments are supported: * `resource_manager_tags` - (Optional) A set of key/value resource manager tag pairs to bind to this disk. Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/456. +* `guest_os_features` - (optional) A list of features to enable on the guest operating system. Applicable only for bootable images. Read [Enabling guest operating system features](https://cloud.google.com/compute/docs/images/create-delete-deprecate-private-images#guest-os-features) to see a list of available options. + * `source_image` - (Optional) The image from which to initialize this disk. This can be one of: the image's `self_link`, `projects/{project}/global/images/{image}`, @@ -449,6 +451,8 @@ The following arguments are supported: or READ_ONLY. If you are attaching or creating a boot disk, this must read-write mode. +* `architecture` - (Optional) The architecture of the attached disk. Valid values are `ARM64` or `x86_64`. + * `source` - (Optional) The name (**not self_link**) of the disk (such as those managed by `google_compute_disk`) to attach. ~> **Note:** Either `source`, `source_image`, or `source_snapshot` is **required** in a disk block unless the disk type is `local-ssd`. Check the API [docs](https://cloud.google.com/compute/docs/reference/rest/v1/instanceTemplates/insert) for details. From 414ff5ce91f444fd3842fb9e05343146899252eb Mon Sep 17 00:00:00 2001 From: Rachel Thornton Date: Thu, 8 May 2025 17:25:06 -0400 Subject: [PATCH 083/884] Add field to support configuring new Media CDN feature (flexible shielding) via Terraform (#13696) --- .../networkservices/EdgeCacheOrigin.yaml | 23 +++++++++++++++++++ ...network_services_edge_cache_origin_test.go | 6 +++++ 2 files changed, 29 insertions(+) diff --git a/mmv1/products/networkservices/EdgeCacheOrigin.yaml b/mmv1/products/networkservices/EdgeCacheOrigin.yaml index 8fbb43ddcbd0..1680c2a76e98 100644 --- a/mmv1/products/networkservices/EdgeCacheOrigin.yaml +++ b/mmv1/products/networkservices/EdgeCacheOrigin.yaml @@ -332,3 +332,26 @@ properties: item_type: type: String max_size: 5 + - name: 'flexShielding' + type: NestedObject + description: | + The FlexShieldingOptions to be used for all routes to this origin. + + If not set, defaults to a global caching layer in front of the origin. + properties: + - name: flexShieldingRegions + type: Array + description: | + Whenever possible, content will be fetched from origin and cached in or + near the specified origin. Best effort. + + You must specify exactly one FlexShieldingRegion. + item_type: + type: Enum + description: | + Available regions for flexible shielding caching layer. + enum_values: + - 'AFRICA_SOUTH1' + - 'ME_CENTRAL1' + min_size: 1 + max_size: 1 diff --git a/mmv1/third_party/terraform/services/networkservices/resource_network_services_edge_cache_origin_test.go b/mmv1/third_party/terraform/services/networkservices/resource_network_services_edge_cache_origin_test.go index 524969b82a63..32de5a3a8d93 100644 --- a/mmv1/third_party/terraform/services/networkservices/resource_network_services_edge_cache_origin_test.go +++ b/mmv1/third_party/terraform/services/networkservices/resource_network_services_edge_cache_origin_test.go @@ -51,6 +51,9 @@ func testAccNetworkServicesEdgeCacheOrigin_update_0(name string) string { timeout { connect_timeout = "10s" } + flex_shielding { + flex_shielding_regions = ["AFRICA_SOUTH1"] + } } `, name) } @@ -68,6 +71,9 @@ func testAccNetworkServicesEdgeCacheOrigin_update_1(name string) string { response_timeout = "29s" read_timeout = "13s" } + flex_shielding { + flex_shielding_regions = ["ME_CENTRAL1"] + } } `, name) } From ff277f669118c618a92fdae5a37ada6388a722ec Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Thu, 8 May 2025 15:16:01 -0700 Subject: [PATCH 084/884] Switched force destroy to use a URL param instead of request body (#13887) --- mmv1/products/gemini/CodeRepositoryIndex.yaml | 6 ++++-- .../pre_delete/code_repository_index_force_delete.go.tmpl | 6 ------ 2 files changed, 4 insertions(+), 8 deletions(-) delete mode 100644 mmv1/templates/terraform/pre_delete/code_repository_index_force_delete.go.tmpl diff --git a/mmv1/products/gemini/CodeRepositoryIndex.yaml b/mmv1/products/gemini/CodeRepositoryIndex.yaml index 6391c6286028..29f78c38f19b 100644 --- a/mmv1/products/gemini/CodeRepositoryIndex.yaml +++ b/mmv1/products/gemini/CodeRepositoryIndex.yaml @@ -21,6 +21,7 @@ references: base_url: projects/{{project}}/locations/{{location}}/codeRepositoryIndexes self_link: projects/{{project}}/locations/{{location}}/codeRepositoryIndexes/{{code_repository_index_id}} create_url: projects/{{project}}/locations/{{location}}/codeRepositoryIndexes?codeRepositoryIndexId={{code_repository_index_id}} +delete_url: projects/{{project}}/locations/{{location}}/codeRepositoryIndexes/{{code_repository_index_id}}?force={{force_destroy}} update_verb: 'PATCH' update_mask: true id_format: projects/{{project}}/locations/{{location}}/codeRepositoryIndexes/{{code_repository_index_id}} @@ -53,11 +54,12 @@ async: result: resource_inside_response: true include_project: false -custom_code: - pre_delete: templates/terraform/pre_delete/code_repository_index_force_delete.go.tmpl error_retry_predicates: - 'transport_tpg.IsCodeRepositoryIndexUnreadyError' - 'transport_tpg.IsRepositoryGroupQueueError' +sweeper: + url_substitutions: + - force_destroy: true virtual_fields: - name: 'force_destroy' description: diff --git a/mmv1/templates/terraform/pre_delete/code_repository_index_force_delete.go.tmpl b/mmv1/templates/terraform/pre_delete/code_repository_index_force_delete.go.tmpl deleted file mode 100644 index 86a1aa3742dd..000000000000 --- a/mmv1/templates/terraform/pre_delete/code_repository_index_force_delete.go.tmpl +++ /dev/null @@ -1,6 +0,0 @@ -obj = make(map[string]interface{}) -if v, ok := d.GetOk("force_destroy"); ok { - if v == true { - obj["force"] = true - } -} From d77777dea1f763a46b94fc85f81c0755e56fc0ae Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Thu, 8 May 2025 15:47:11 -0700 Subject: [PATCH 085/884] Switched to only one bootstrapped code repository index to reduce quota issues (#13889) --- mmv1/products/gemini/RepositoryGroup.yaml | 6 ------ .../gemini/resource_gemini_repository_group_test.go.tmpl | 2 +- 2 files changed, 1 insertion(+), 7 deletions(-) diff --git a/mmv1/products/gemini/RepositoryGroup.yaml b/mmv1/products/gemini/RepositoryGroup.yaml index 0fa7043ccfd1..883506b5e2b5 100644 --- a/mmv1/products/gemini/RepositoryGroup.yaml +++ b/mmv1/products/gemini/RepositoryGroup.yaml @@ -28,18 +28,12 @@ mutex: 'projects/{{project}}/locations/{{location}}/codeRepositoryIndexes/{{code examples: - name: "gemini_repository_group_basic" primary_resource_id: "example" - primary_resource_name: 'acctest.BootstrapSharedCodeRepositoryIndex(t, "basic-rg-gen-example", "us-central1", "", map[string]string{"ccfe_debug_note":"terraform_e2e_do_not_delete"}), fmt.Sprintf("tf-test-gen-repository-group-%s", context["random_suffix"])' vars: repository_group_id: "example-repository-group" git_repository_link_id: 'example-git-repository-link-id' cri_id: "cri-example" repository_resource: "projects/example-project/locations/us-central1/connections/example-connection/gitRepositoryLinks/example-repo" connection_id: "example-connection-id" - test_vars_overrides: - git_repository_link_id: 'acctest.BootstrapGitRepository(t, "basic", "us-central1", "https://github.com/CC-R-github-robot/tf-test.git", acctest.BootstrapDeveloperConnection(t, "basic", "us-central1", "projects/502367051001/secrets/tf-test-cloudaicompanion-github-oauthtoken-c42e5c/versions/1", 54180648))' - cri_id: 'acctest.BootstrapSharedCodeRepositoryIndex(t, "basic-rg-gen-example", "us-central1", "", map[string]string{"ccfe_debug_note":"terraform_e2e_do_not_delete"})' - repository_resource: '"projects/"+envvar.GetTestProjectFromEnv()+"/locations/us-central1/connections/"+acctest.BootstrapDeveloperConnection(t, "basic", "us-central1", "projects/502367051001/secrets/tf-test-cloudaicompanion-github-oauthtoken-c42e5c/versions/1", 54180648)+"/gitRepositoryLinks/"+acctest.BootstrapGitRepository(t, "basic", "us-central1", "https://github.com/CC-R-github-robot/tf-test.git", acctest.BootstrapDeveloperConnection(t, "basic", "us-central1", "projects/502367051001/secrets/tf-test-cloudaicompanion-github-oauthtoken-c42e5c/versions/1", 54180648))' - connection_id: 'acctest.BootstrapDeveloperConnection(t, "basic", "us-central1", "projects/502367051001/secrets/tf-test-cloudaicompanion-github-oauthtoken-c42e5c/versions/1", 54180648)' exclude_test: true timeouts: insert_minutes: 30 diff --git a/mmv1/third_party/terraform/services/gemini/resource_gemini_repository_group_test.go.tmpl b/mmv1/third_party/terraform/services/gemini/resource_gemini_repository_group_test.go.tmpl index bb4547d3f545..eac176d96b7e 100644 --- a/mmv1/third_party/terraform/services/gemini/resource_gemini_repository_group_test.go.tmpl +++ b/mmv1/third_party/terraform/services/gemini/resource_gemini_repository_group_test.go.tmpl @@ -13,7 +13,7 @@ import ( // More details: https://cloud.google.com/developer-connect/docs/connect-github-repo#before_you_begin func TestAccGeminiRepositoryGroup_update(t *testing.T) { - codeRepositoryIndexId := acctest.BootstrapSharedCodeRepositoryIndex(t, "basic-rg-test", "us-central1", "", map[string]string{"ccfe_debug_note": "terraform_e2e_do_not_delete"}) + codeRepositoryIndexId := acctest.BootstrapSharedCodeRepositoryIndex(t, "basic", "us-central1", "", map[string]string{"ccfe_debug_note": "terraform_e2e_do_not_delete"}) context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), "project_id": os.Getenv("GOOGLE_PROJECT"), From 9c0af1fe2bc3983bc3ae79b25864cf5e080647ae Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Thu, 8 May 2025 16:04:17 -0700 Subject: [PATCH 086/884] tgc-revival: Remove the dependency of TPGB (part2) (#13880) --- mmv1/provider/terraform.go | 1 + mmv1/provider/terraform_tgc_next.go | 42 ++++++++++++++++--- .../terraform/provider/provider.go.tmpl | 23 +++++++--- .../terraform/transport/config.go.tmpl | 2 + 4 files changed, 56 insertions(+), 12 deletions(-) diff --git a/mmv1/provider/terraform.go b/mmv1/provider/terraform.go index 75a077105bc4..c1a66ea89acb 100644 --- a/mmv1/provider/terraform.go +++ b/mmv1/provider/terraform.go @@ -801,5 +801,6 @@ func (t Terraform) SupportedProviderVersions() []string { type ProviderWithProducts struct { Terraform + Compiler string Products []*api.Product } diff --git a/mmv1/provider/terraform_tgc_next.go b/mmv1/provider/terraform_tgc_next.go index 71394c35a326..6093454d3eb2 100644 --- a/mmv1/provider/terraform_tgc_next.go +++ b/mmv1/provider/terraform_tgc_next.go @@ -72,7 +72,11 @@ func (tgc TerraformGoogleConversionNext) GenerateCaiToHclObjects(outputFolder, r func (tgc TerraformGoogleConversionNext) CompileCommonFiles(outputFolder string, products []*api.Product, overridePath string) { resourceConverters := map[string]string{ // common - "pkg/provider/provider_validators.go": "third_party/terraform/provider/provider_validators.go.tmpl", + "pkg/transport/config.go": "third_party/terraform/transport/config.go.tmpl", + "pkg/transport/provider_handwritten_endpoint.go": "third_party/terraform/transport/provider_handwritten_endpoint.go.tmpl", + "pkg/tpgresource/common_diff_suppress.go": "third_party/terraform/tpgresource/common_diff_suppress.go.tmpl", + "pkg/provider/provider.go": "third_party/terraform/provider/provider.go.tmpl", + "pkg/provider/provider_validators.go": "third_party/terraform/provider/provider_validators.go.tmpl", // tfplan2cai "pkg/tfplan2cai/converters/resource_converters.go": "templates/tgc_next/tfplan2cai/resource_converters.go.tmpl", @@ -88,6 +92,12 @@ func (tgc TerraformGoogleConversionNext) CompileCommonFiles(outputFolder string, } func (tgc TerraformGoogleConversionNext) CompileFileList(outputFolder string, files map[string]string, fileTemplate TemplateData, products []*api.Product) { + providerWithProducts := TgcWithProducts{ + TerraformGoogleConversionNext: tgc, + Compiler: "terraformgoogleconversion-codegen", + Products: products, + } + if err := os.MkdirAll(outputFolder, os.ModePerm); err != nil { log.Println(fmt.Errorf("error creating output directory %v: %v", outputFolder, err)) } @@ -105,7 +115,7 @@ func (tgc TerraformGoogleConversionNext) CompileFileList(outputFolder string, fi formatFile := filepath.Ext(targetFile) == ".go" - fileTemplate.GenerateFile(targetFile, source, tgc, formatFile, templates...) + fileTemplate.GenerateFile(targetFile, source, providerWithProducts, formatFile, templates...) tgc.replaceImportPath(outputFolder, target) } } @@ -127,10 +137,23 @@ func (tgc TerraformGoogleConversionNext) CopyCommonFiles(outputFolder string, ge resourceConverters := map[string]string{ // common - "pkg/provider/mtls_util.go": "third_party/terraform/provider/mtls_util.go", - "pkg/verify/validation.go": "third_party/terraform/verify/validation.go", - "pkg/verify/path_or_contents.go": "third_party/terraform/verify/path_or_contents.go", - "pkg/version/version.go": "third_party/terraform/version/version.go", + "pkg/transport/batcher.go": "third_party/terraform/transport/batcher.go", + "pkg/transport/retry_transport.go": "third_party/terraform/transport/retry_transport.go", + "pkg/transport/retry_utils.go": "third_party/terraform/transport/retry_utils.go", + "pkg/transport/header_transport.go": "third_party/terraform/transport/header_transport.go", + "pkg/transport/error_retry_predicates.go": "third_party/terraform/transport/error_retry_predicates.go", + "pkg/transport/bigtable_client_factory.go": "third_party/terraform/transport/bigtable_client_factory.go", + "pkg/transport/transport.go": "third_party/terraform/transport/transport.go", + "pkg/tpgresource/utils.go": "third_party/terraform/tpgresource/utils.go", + "pkg/tpgresource/self_link_helpers.go": "third_party/terraform/tpgresource/self_link_helpers.go", + "pkg/tpgresource/hashcode.go": "third_party/terraform/tpgresource/hashcode.go", + "pkg/tpgresource/regional_utils.go": "third_party/terraform/tpgresource/regional_utils.go", + "pkg/tpgresource/field_helpers.go": "third_party/terraform/tpgresource/field_helpers.go", + "pkg/tpgresource/service_scope.go": "third_party/terraform/tpgresource/service_scope.go", + "pkg/provider/mtls_util.go": "third_party/terraform/provider/mtls_util.go", + "pkg/verify/validation.go": "third_party/terraform/verify/validation.go", + "pkg/verify/path_or_contents.go": "third_party/terraform/verify/path_or_contents.go", + "pkg/version/version.go": "third_party/terraform/version/version.go", // tfplan2cai "pkg/tfplan2cai/converters/services/compute/image.go": "third_party/terraform/services/compute/image.go", @@ -194,9 +217,16 @@ func (tgc TerraformGoogleConversionNext) replaceImportPath(outputFolder, target // replace google to google-beta gaImportPath := ImportPathFromVersion("ga") sourceByte = bytes.Replace(sourceByte, []byte(gaImportPath), []byte(TERRAFORM_PROVIDER_BETA+"/"+RESOURCE_DIRECTORY_BETA), -1) + sourceByte = bytes.Replace(sourceByte, []byte(TERRAFORM_PROVIDER_GA+"/version"), []byte(TERRAFORM_PROVIDER_BETA+"/version"), -1) err = os.WriteFile(targetFile, sourceByte, 0644) if err != nil { log.Fatalf("Cannot write file %s to replace import path: %s", target, err) } } + +type TgcWithProducts struct { + TerraformGoogleConversionNext + Compiler string + Products []*api.Product +} diff --git a/mmv1/third_party/terraform/provider/provider.go.tmpl b/mmv1/third_party/terraform/provider/provider.go.tmpl index 1ab0852b03e0..d4f6bf90270b 100644 --- a/mmv1/third_party/terraform/provider/provider.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider.go.tmpl @@ -205,20 +205,21 @@ func Provider() *schema.Provider { Optional: true, }, }, - +{{if ne $.Compiler "terraformgoogleconversion-codegen"}} DataSourcesMap: DatasourceMap(), +{{- end }} ResourcesMap: ResourceMap(), } provider.ConfigureContextFunc = func(ctx context.Context, d *schema.ResourceData) (interface{}, diag.Diagnostics) { return ProviderConfigure(ctx, d, provider) } - +{{ if ne $.Compiler "terraformgoogleconversion-codegen"}} transport_tpg.ConfigureDCLProvider(provider) - +{{ end }} return provider } - +{{ if ne $.Compiler "terraformgoogleconversion-codegen"}} func DatasourceMap() map[string]*schema.Resource { datasourceMap, _ := DatasourceMapWithErrors() return datasourceMap @@ -231,6 +232,7 @@ func DatasourceMapWithErrors() (map[string]*schema.Resource, error) { handwrittenIAMDatasources, ) } +{{- end }} func ResourceMap() map[string]*schema.Resource { resourceMap, _ := ResourceMapWithErrors() @@ -238,12 +240,18 @@ func ResourceMap() map[string]*schema.Resource { } func ResourceMapWithErrors() (map[string]*schema.Resource, error) { +{{- if ne $.Compiler "terraformgoogleconversion-codegen"}} return mergeResourceMaps( generatedResources, handwrittenResources, handwrittenIAMResources, dclResources, ) +{{- else }} + return mergeResourceMaps( + handwrittenTfplan2caiResources, + ) +{{- end }} } func ProviderConfigure(ctx context.Context, d *schema.ResourceData, p *schema.Provider) (interface{}, diag.Diagnostics) { @@ -319,10 +327,11 @@ func ProviderConfigure(ctx context.Context, d *schema.ResourceData, p *schema.Pr if v, ok := d.GetOk("universe_domain"); ok { config.UniverseDomain = v.(string) } - +{{ if ne $.Compiler "terraformgoogleconversion-codegen"}} // Configure DCL basePath transport_tpg.ProviderDCLConfigure(d, &config) - +{{- end }} + // Replace hostname by the universe_domain field. if config.UniverseDomain != "" && config.UniverseDomain != "googleapis.com" { for key, basePath := range transport_tpg.DefaultBasePaths { @@ -334,7 +343,9 @@ func ProviderConfigure(ctx context.Context, d *schema.ResourceData, p *schema.Pr if err != nil { return nil, diag.FromErr(err) } +{{- if ne $.Compiler "terraformgoogleconversion-codegen"}} transport_tpg.HandleDCLCustomEndpointDefaults(d) +{{- end }} // Given that impersonate_service_account is a secondary auth method, it has // no conflicts to worry about. We pull the env var in a DefaultFunc. diff --git a/mmv1/third_party/terraform/transport/config.go.tmpl b/mmv1/third_party/terraform/transport/config.go.tmpl index ed7294954e55..aeeeeb21c51d 100644 --- a/mmv1/third_party/terraform/transport/config.go.tmpl +++ b/mmv1/third_party/terraform/transport/config.go.tmpl @@ -231,7 +231,9 @@ func ExpandExternalCredentialsConfig(v interface{}) (*ExternalCredentials, error // Config is the configuration structure used to instantiate the Google // provider. type Config struct { +{{- if ne $.Compiler "terraformgoogleconversion-codegen"}} DCLConfig +{{- end }} AccessToken string Credentials string ExternalCredentials *ExternalCredentials From d5806860d2b13ff75afd5cdb360f1529b84b1986 Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Thu, 8 May 2025 16:09:25 -0700 Subject: [PATCH 087/884] Update netapp tests to use bootstrapped networks (#13877) --- mmv1/products/netapp/StoragePool.yaml | 9 +++ .../examples/Storage_pool_create.tf.tmpl | 33 +-------- .../examples/Storage_pool_create_doc.tf.tmpl | 43 ++++++++++++ .../resource_netapp_storage_pool_test.go.tmpl | 67 +++---------------- 4 files changed, 64 insertions(+), 88 deletions(-) create mode 100644 mmv1/templates/terraform/examples/Storage_pool_create_doc.tf.tmpl diff --git a/mmv1/products/netapp/StoragePool.yaml b/mmv1/products/netapp/StoragePool.yaml index be122b485c1f..974cb1dab83b 100644 --- a/mmv1/products/netapp/StoragePool.yaml +++ b/mmv1/products/netapp/StoragePool.yaml @@ -71,11 +71,20 @@ custom_code: exclude_sweeper: true examples: - name: 'Storage_pool_create' + primary_resource_id: 'test_pool' + vars: + pool_name: 'test-pool' + network_name: 'test-network' + test_vars_overrides: + 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' + exclude_docs: true + - name: 'Storage_pool_create_doc' primary_resource_id: 'test_pool' vars: pool_name: 'test-pool' network_name: 'test-network' global_name: 'test-address' + exclude_test: true parameters: - name: 'location' type: String diff --git a/mmv1/templates/terraform/examples/Storage_pool_create.tf.tmpl b/mmv1/templates/terraform/examples/Storage_pool_create.tf.tmpl index 1d219ce86ea6..d358ac895c99 100644 --- a/mmv1/templates/terraform/examples/Storage_pool_create.tf.tmpl +++ b/mmv1/templates/terraform/examples/Storage_pool_create.tf.tmpl @@ -1,36 +1,7 @@ -# Create a network or use datasource to reference existing network -resource "google_compute_network" "peering_network" { +data "google_compute_network" "default" { name = "{{index $.Vars "network_name"}}" } -# Reserve a CIDR for NetApp Volumes to use -# When using shared-VPCs, this resource needs to be created in host project -resource "google_compute_global_address" "private_ip_alloc" { - name = "{{index $.Vars "global_name"}}" - purpose = "VPC_PEERING" - address_type = "INTERNAL" - prefix_length = 16 - network = google_compute_network.peering_network.id -} - -# Create a Private Service Access connection -# When using shared-VPCs, this resource needs to be created in host project -resource "google_service_networking_connection" "default" { - network = google_compute_network.peering_network.id - service = "netapp.servicenetworking.goog" - reserved_peering_ranges = [google_compute_global_address.private_ip_alloc.name] -} - -# Modify the PSA Connection to allow import/export of custom routes -# When using shared-VPCs, this resource needs to be created in host project -resource "google_compute_network_peering_routes_config" "route_updates" { - peering = google_service_networking_connection.default.peering - network = google_compute_network.peering_network.name - - import_custom_routes = true - export_custom_routes = true -} - # Create a storage pool # Create this resource in the project which is expected to own the volumes resource "google_netapp_storage_pool" "{{$.PrimaryResourceId}}" { @@ -39,5 +10,5 @@ resource "google_netapp_storage_pool" "{{$.PrimaryResourceId}}" { location = "us-central1" service_level = "PREMIUM" capacity_gib = "2048" - network = google_compute_network.peering_network.id + network = data.google_compute_network.default.id } diff --git a/mmv1/templates/terraform/examples/Storage_pool_create_doc.tf.tmpl b/mmv1/templates/terraform/examples/Storage_pool_create_doc.tf.tmpl new file mode 100644 index 000000000000..1d219ce86ea6 --- /dev/null +++ b/mmv1/templates/terraform/examples/Storage_pool_create_doc.tf.tmpl @@ -0,0 +1,43 @@ +# Create a network or use datasource to reference existing network +resource "google_compute_network" "peering_network" { + name = "{{index $.Vars "network_name"}}" +} + +# Reserve a CIDR for NetApp Volumes to use +# When using shared-VPCs, this resource needs to be created in host project +resource "google_compute_global_address" "private_ip_alloc" { + name = "{{index $.Vars "global_name"}}" + purpose = "VPC_PEERING" + address_type = "INTERNAL" + prefix_length = 16 + network = google_compute_network.peering_network.id +} + +# Create a Private Service Access connection +# When using shared-VPCs, this resource needs to be created in host project +resource "google_service_networking_connection" "default" { + network = google_compute_network.peering_network.id + service = "netapp.servicenetworking.goog" + reserved_peering_ranges = [google_compute_global_address.private_ip_alloc.name] +} + +# Modify the PSA Connection to allow import/export of custom routes +# When using shared-VPCs, this resource needs to be created in host project +resource "google_compute_network_peering_routes_config" "route_updates" { + peering = google_service_networking_connection.default.peering + network = google_compute_network.peering_network.name + + import_custom_routes = true + export_custom_routes = true +} + +# Create a storage pool +# Create this resource in the project which is expected to own the volumes +resource "google_netapp_storage_pool" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "pool_name"}}" + # project = + location = "us-central1" + service_level = "PREMIUM" + capacity_gib = "2048" + network = google_compute_network.peering_network.id +} diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl b/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl index 9adcf1f1dc5e..617f33a67538 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl @@ -12,6 +12,7 @@ func TestAccNetappStoragePool_storagePoolCreateExample_update(t *testing.T) { t.Parallel() context := map[string]interface{}{ + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } @@ -44,24 +45,8 @@ func TestAccNetappStoragePool_storagePoolCreateExample_update(t *testing.T) { func testAccNetappStoragePool_storagePoolCreateExample_full(context map[string]interface{}) string { return acctest.Nprintf(` -resource "google_compute_network" "peering_network" { - name = "tf-test-network%{random_suffix}" -} - -# Create an IP address -resource "google_compute_global_address" "private_ip_alloc" { - name = "tf-test-address%{random_suffix}" - purpose = "VPC_PEERING" - address_type = "INTERNAL" - prefix_length = 16 - network = google_compute_network.peering_network.id -} - -# Create a private connection -resource "google_service_networking_connection" "default" { - network = google_compute_network.peering_network.id - service = "netapp.servicenetworking.goog" - reserved_peering_ranges = [google_compute_global_address.private_ip_alloc.name] +data "google_compute_network" "default" { + name = "%{network_name}" } resource "google_netapp_storage_pool" "test_pool" { @@ -69,7 +54,7 @@ resource "google_netapp_storage_pool" "test_pool" { location = "us-central1" service_level = "PREMIUM" capacity_gib = "2048" - network = google_compute_network.peering_network.id + network = data.google_compute_network.default.id active_directory = "" description = "this is a test description" kms_config = "" @@ -86,24 +71,8 @@ resource "google_netapp_storage_pool" "test_pool" { func testAccNetappStoragePool_storagePoolCreateExample_update(context map[string]interface{}) string { return acctest.Nprintf(` -resource "google_compute_network" "peering_network" { - name = "tf-test-network%{random_suffix}" -} - -# Create an IP address -resource "google_compute_global_address" "private_ip_alloc" { - name = "tf-test-address%{random_suffix}" - purpose = "VPC_PEERING" - address_type = "INTERNAL" - prefix_length = 16 - network = google_compute_network.peering_network.id -} - -# Create a private connection -resource "google_service_networking_connection" "default" { - network = google_compute_network.peering_network.id - service = "netapp.servicenetworking.goog" - reserved_peering_ranges = [google_compute_global_address.private_ip_alloc.name] +data "google_compute_network" "default" { + name = "%{network_name}" } resource "google_netapp_storage_pool" "test_pool" { @@ -111,7 +80,7 @@ resource "google_netapp_storage_pool" "test_pool" { location = "us-central1" service_level = "PREMIUM" capacity_gib = "4096" - network = google_compute_network.peering_network.id + network = data.google_compute_network.default.id active_directory = "" description = "this is test" kms_config = "" @@ -155,24 +124,8 @@ func TestAccNetappStoragePool_autoTieredStoragePoolCreateExample_update(t *testi func testAccNetappStoragePool_autoTieredStoragePoolCreateExample_full(context map[string]interface{}) string { return acctest.Nprintf(` -resource "google_compute_network" "peering_network" { - name = "tf-test-network%{random_suffix}" -} - -# Create an IP address -resource "google_compute_global_address" "private_ip_alloc" { - name = "tf-test-address%{random_suffix}" - purpose = "VPC_PEERING" - address_type = "INTERNAL" - prefix_length = 16 - network = google_compute_network.peering_network.id -} - -# Create a private connection -resource "google_service_networking_connection" "default" { - network = google_compute_network.peering_network.id - service = "netapp.servicenetworking.goog" - reserved_peering_ranges = [google_compute_global_address.private_ip_alloc.name] +data "google_compute_network" "default" { + name = "%{network_name}" } resource "google_netapp_storage_pool" "test_pool" { @@ -180,7 +133,7 @@ resource "google_netapp_storage_pool" "test_pool" { location = "us-east4" service_level = "PREMIUM" capacity_gib = "2048" - network = google_compute_network.peering_network.id + network = data.google_compute_network.default.id active_directory = "" description = "this is a test description" kms_config = "" From 29d5f0af8213997b4af5361dcd6936b40ffcd90f Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Fri, 9 May 2025 10:30:11 -0700 Subject: [PATCH 088/884] tgc-revival: Remove the dependency of TPGB (#13787) --- mmv1/provider/provider.go | 2 + mmv1/provider/terraform_tgc_next.go | 4 +- .../cai2hcl/resource_converters.go.tmpl | 2 +- .../services/compute/compute_instance.go | 2 +- .../compute/compute_instance_helpers.go | 2 +- .../pkg/cai2hcl/converters/utils/utils.go | 2 +- .../cai2hcl/converters/utils/utils_test.go | 30 ++-- .../ancestrymanager/ancestrymanager.go | 4 +- .../ancestrymanager/ancestryutil.go | 4 +- .../pkg/tfplan2cai/converters/cai/cai.go | 4 +- .../converters/cai/resource_converter.go | 4 +- .../tfplan2cai/converters/convert_resource.go | 2 +- .../services/compute/compute_instance.go | 6 +- .../services/resourcemanager/project.go | 6 +- .../fake_resource_data_with_meta_test.go | 151 ++++++++---------- .../resolvers/default_pre_resolver.go | 2 +- .../pkg/tfplan2cai/transport/getconfig.go | 2 +- 17 files changed, 103 insertions(+), 126 deletions(-) diff --git a/mmv1/provider/provider.go b/mmv1/provider/provider.go index fab8e0a52161..89f1fe550fc2 100644 --- a/mmv1/provider/provider.go +++ b/mmv1/provider/provider.go @@ -17,10 +17,12 @@ type Provider interface { const TERRAFORM_PROVIDER_GA = "github.com/hashicorp/terraform-provider-google" const TERRAFORM_PROVIDER_BETA = "github.com/hashicorp/terraform-provider-google-beta" +const TGC_PROVIDER = "github.com/GoogleCloudPlatform/terraform-google-conversion/v6" const TERRAFORM_PROVIDER_PRIVATE = "internal/terraform-next" const RESOURCE_DIRECTORY_GA = "google" const RESOURCE_DIRECTORY_BETA = "google-beta" const RESOURCE_DIRECTORY_PRIVATE = "google-private" +const RESOURCE_DIRECTORY_TGC = "pkg" // # TODO(nelsonjr): Review all object interfaces and move to private methods // # that should not be exposed outside the object hierarchy. diff --git a/mmv1/provider/terraform_tgc_next.go b/mmv1/provider/terraform_tgc_next.go index 6093454d3eb2..6d48de141eb7 100644 --- a/mmv1/provider/terraform_tgc_next.go +++ b/mmv1/provider/terraform_tgc_next.go @@ -216,8 +216,8 @@ func (tgc TerraformGoogleConversionNext) replaceImportPath(outputFolder, target // replace google to google-beta gaImportPath := ImportPathFromVersion("ga") - sourceByte = bytes.Replace(sourceByte, []byte(gaImportPath), []byte(TERRAFORM_PROVIDER_BETA+"/"+RESOURCE_DIRECTORY_BETA), -1) - sourceByte = bytes.Replace(sourceByte, []byte(TERRAFORM_PROVIDER_GA+"/version"), []byte(TERRAFORM_PROVIDER_BETA+"/version"), -1) + sourceByte = bytes.Replace(sourceByte, []byte(gaImportPath), []byte(TGC_PROVIDER+"/"+RESOURCE_DIRECTORY_TGC), -1) + sourceByte = bytes.Replace(sourceByte, []byte(TERRAFORM_PROVIDER_GA+"/version"), []byte(TGC_PROVIDER+"/"+RESOURCE_DIRECTORY_TGC+"/version"), -1) err = os.WriteFile(targetFile, sourceByte, 0644) if err != nil { diff --git a/mmv1/templates/tgc_next/cai2hcl/resource_converters.go.tmpl b/mmv1/templates/tgc_next/cai2hcl/resource_converters.go.tmpl index 72acfe3809ae..06160729863d 100644 --- a/mmv1/templates/tgc_next/cai2hcl/resource_converters.go.tmpl +++ b/mmv1/templates/tgc_next/cai2hcl/resource_converters.go.tmpl @@ -33,7 +33,7 @@ import ( "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters/services/resourcemanager" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - tpg_provider "github.com/hashicorp/terraform-provider-google-beta/google-beta/provider" + tpg_provider "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/provider" ) var provider *schema.Provider = tpg_provider.Provider() diff --git a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance.go b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance.go index 789f32bafc45..e5b74582c8a9 100644 --- a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance.go +++ b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance.go @@ -8,8 +8,8 @@ import ( "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/models" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-provider-google-beta/google-beta/tpgresource" compute "google.golang.org/api/compute/v0.beta" ) diff --git a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance_helpers.go b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance_helpers.go index 2631e0c181cf..e04a7ca608f9 100644 --- a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance_helpers.go +++ b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance_helpers.go @@ -5,7 +5,7 @@ import ( "strings" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters/utils" - "github.com/hashicorp/terraform-provider-google-beta/google-beta/tpgresource" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" compute "google.golang.org/api/compute/v0.beta" ) diff --git a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils.go b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils.go index 4496cf8179a6..8c84fe583098 100644 --- a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils.go +++ b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils.go @@ -5,9 +5,9 @@ import ( "fmt" "strings" + transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" hashicorpcty "github.com/hashicorp/go-cty/cty" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - transport_tpg "github.com/hashicorp/terraform-provider-google-beta/google-beta/transport" "github.com/zclconf/go-cty/cty" ctyjson "github.com/zclconf/go-cty/cty/json" ) diff --git a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils_test.go b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils_test.go index f16820860032..d83dfcce47e3 100644 --- a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils_test.go +++ b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils_test.go @@ -5,14 +5,14 @@ import ( "github.com/stretchr/testify/assert" + tpg_provider "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/provider" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - tpg_provider "github.com/hashicorp/terraform-provider-google-beta/google-beta/provider" - "github.com/hashicorp/terraform-provider-google-beta/google-beta/tpgresource" "github.com/zclconf/go-cty/cty" ) func TestSubsetOfFieldsMapsToCtyValue(t *testing.T) { - schema := createSchema("google_compute_forwarding_rule") + schema := createSchema("google_compute_instance") outputMap := map[string]interface{}{ "name": "forwarding-rule-1", @@ -25,7 +25,7 @@ func TestSubsetOfFieldsMapsToCtyValue(t *testing.T) { } func TestWrongFieldTypeBreaksConversion(t *testing.T) { - resourceSchema := createSchema("google_compute_backend_service") + resourceSchema := createSchema("google_compute_instance") outputMap := map[string]interface{}{ "name": "fr-1", "description": []string{"unknownValue"}, // string is required, not array. @@ -38,7 +38,7 @@ func TestWrongFieldTypeBreaksConversion(t *testing.T) { } func TestNilValue(t *testing.T) { - resourceSchema := createSchema("google_compute_forwarding_rule") + resourceSchema := createSchema("google_compute_instance") outputMap := map[string]interface{}{ "name": "fr-1", "description": nil, @@ -52,7 +52,7 @@ func TestNilValue(t *testing.T) { } func TestNilValueInRequiredField(t *testing.T) { - resourceSchema := createSchema("google_compute_forwarding_rule") + resourceSchema := createSchema("google_compute_instance") outputMap := map[string]interface{}{ "name": nil, } @@ -65,21 +65,21 @@ func TestNilValueInRequiredField(t *testing.T) { } func TestFieldsWithTypeSlice(t *testing.T) { - resourceSchema := createSchema("google_compute_forwarding_rule") + resourceSchema := createSchema("google_compute_instance") outputMap := map[string]interface{}{ - "name": "fr-1", - "ports": []string{"80"}, + "name": "fr-1", + "resource_policies": []string{"test"}, } val, err := MapToCtyValWithSchema(outputMap, resourceSchema) assert.Nil(t, err) - assert.Equal(t, []cty.Value{cty.StringVal("80")}, val.GetAttr("ports").AsValueSlice()) + assert.Equal(t, []cty.Value{cty.StringVal("test")}, val.GetAttr("resource_policies").AsValueSlice()) } func TestMissingFieldDoesNotBreakConversionConversion(t *testing.T) { - resourceSchema := createSchema("google_compute_forwarding_rule") + resourceSchema := createSchema("google_compute_instance") outputMap := map[string]interface{}{ "name": "fr-1", "unknownField": "unknownValue", @@ -94,16 +94,16 @@ func TestMissingFieldDoesNotBreakConversionConversion(t *testing.T) { } func TestFieldWithTypeSchemaSet(t *testing.T) { - resourceSchema := createSchema("google_compute_forwarding_rule") + resourceSchema := createSchema("google_compute_instance") outputMap := map[string]interface{}{ - "name": "fr-1", - "ports": schema.NewSet(schema.HashString, tpgresource.ConvertStringArrToInterface([]string{"80"})), + "name": "fr-1", + "resource_policies": schema.NewSet(schema.HashString, tpgresource.ConvertStringArrToInterface([]string{"test"})), } val, err := MapToCtyValWithSchema(outputMap, resourceSchema) assert.Nil(t, err) - assert.Equal(t, []cty.Value{cty.StringVal("80")}, val.GetAttr("ports").AsValueSlice()) + assert.Equal(t, []cty.Value{cty.StringVal("test")}, val.GetAttr("resource_policies").AsValueSlice()) } func TestFieldWithTypeSchemaListAndNestedObject(t *testing.T) { diff --git a/mmv1/third_party/tgc_next/pkg/tfplan2cai/ancestrymanager/ancestrymanager.go b/mmv1/third_party/tgc_next/pkg/tfplan2cai/ancestrymanager/ancestrymanager.go index 795c0a2059d0..3d21fabf4352 100644 --- a/mmv1/third_party/tgc_next/pkg/tfplan2cai/ancestrymanager/ancestrymanager.go +++ b/mmv1/third_party/tgc_next/pkg/tfplan2cai/ancestrymanager/ancestrymanager.go @@ -13,8 +13,8 @@ import ( "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" - "github.com/hashicorp/terraform-provider-google-beta/google-beta/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google-beta/google-beta/transport" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" + transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" "go.uber.org/zap" ) diff --git a/mmv1/third_party/tgc_next/pkg/tfplan2cai/ancestrymanager/ancestryutil.go b/mmv1/third_party/tgc_next/pkg/tfplan2cai/ancestrymanager/ancestryutil.go index 0483cb414263..65e251c47e07 100644 --- a/mmv1/third_party/tgc_next/pkg/tfplan2cai/ancestrymanager/ancestryutil.go +++ b/mmv1/third_party/tgc_next/pkg/tfplan2cai/ancestrymanager/ancestryutil.go @@ -4,9 +4,9 @@ import ( "fmt" "strings" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" + transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" "github.com/hashicorp/errwrap" - "github.com/hashicorp/terraform-provider-google-beta/google-beta/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google-beta/google-beta/transport" "google.golang.org/api/googleapi" ) diff --git a/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/cai/cai.go b/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/cai/cai.go index d777675eff86..7c17829f56c3 100644 --- a/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/cai/cai.go +++ b/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/cai/cai.go @@ -3,8 +3,8 @@ package cai import ( "regexp" - "github.com/hashicorp/terraform-provider-google-beta/google-beta/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google-beta/google-beta/transport" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" + transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" ) // AssetName templates an asset.name by looking up and replacing all instances diff --git a/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/cai/resource_converter.go b/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/cai/resource_converter.go index de0ce5d93edd..abe5e18fbe14 100644 --- a/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/cai/resource_converter.go +++ b/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/cai/resource_converter.go @@ -3,8 +3,8 @@ package cai import ( "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" - "github.com/hashicorp/terraform-provider-google-beta/google-beta/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google-beta/google-beta/transport" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" + transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" ) type ConvertFunc func(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]caiasset.Asset, error) diff --git a/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/convert_resource.go b/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/convert_resource.go index d79ae0931fe2..5a10cf1710c5 100644 --- a/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/convert_resource.go +++ b/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/convert_resource.go @@ -8,7 +8,7 @@ import ( "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/converters/cai" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/models" - transport_tpg "github.com/hashicorp/terraform-provider-google-beta/google-beta/transport" + transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" "github.com/pkg/errors" "go.uber.org/zap" diff --git a/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/compute/compute_instance.go b/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/compute/compute_instance.go index 1eaff02a28c0..3ec7d7d3d4cf 100644 --- a/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/compute/compute_instance.go +++ b/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/compute/compute_instance.go @@ -15,9 +15,9 @@ import ( "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/converters/cai" - "github.com/hashicorp/terraform-provider-google-beta/google-beta/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google-beta/google-beta/transport" - "github.com/hashicorp/terraform-provider-google-beta/google-beta/verify" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" + transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/verify" ) const ComputeInstanceAssetType string = "compute.googleapis.com/Instance" diff --git a/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/resourcemanager/project.go b/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/resourcemanager/project.go index 32302a04a068..af57aa05dd0d 100644 --- a/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/resourcemanager/project.go +++ b/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/resourcemanager/project.go @@ -11,9 +11,9 @@ import ( "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/converters/cai" - "github.com/hashicorp/terraform-provider-google-beta/google-beta/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google-beta/google-beta/transport" - "github.com/hashicorp/terraform-provider-google-beta/google-beta/verify" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" + transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/verify" "google.golang.org/api/cloudbilling/v1" "google.golang.org/api/cloudresourcemanager/v1" diff --git a/mmv1/third_party/tgc_next/pkg/tfplan2cai/models/fake_resource_data_with_meta_test.go b/mmv1/third_party/tgc_next/pkg/tfplan2cai/models/fake_resource_data_with_meta_test.go index fc6bada7d120..d48e3e96abea 100644 --- a/mmv1/third_party/tgc_next/pkg/tfplan2cai/models/fake_resource_data_with_meta_test.go +++ b/mmv1/third_party/tgc_next/pkg/tfplan2cai/models/fake_resource_data_with_meta_test.go @@ -16,7 +16,7 @@ package models import ( "testing" - provider "github.com/hashicorp/terraform-provider-google-beta/google-beta/provider" + provider "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/provider" "github.com/stretchr/testify/assert" ) @@ -24,38 +24,34 @@ func TestFakeResourceDataWithMeta_kind(t *testing.T) { p := provider.Provider() values := map[string]interface{}{ - "name": "test-disk", - "type": "pd-ssd", - "zone": "us-central1-a", - "image": "projects/debian-cloud/global/images/debian-8-jessie-v20170523", - "physical_block_size_bytes": 4096, + "name": "test-project", + "org_id": "529579013760", + "project_id": "tf-test-872899419570852129", } d := NewFakeResourceDataWithMeta( - "google_compute_disk", - p.ResourcesMap["google_compute_disk"].Schema, + "google_project", + p.ResourcesMap["google_project"].Schema, values, false, - "google_compute_disk.test-disk", + "google_project.test-project", ) - assert.Equal(t, "google_compute_disk", d.Kind()) + assert.Equal(t, "google_project", d.Kind()) } func TestFakeResourceDataWithMeta_id(t *testing.T) { p := provider.Provider() values := map[string]interface{}{ - "name": "test-disk", - "type": "pd-ssd", - "zone": "us-central1-a", - "image": "projects/debian-cloud/global/images/debian-8-jessie-v20170523", - "physical_block_size_bytes": 4096, + "name": "test-project", + "org_id": "529579013760", + "project_id": "tf-test-872899419570852129", } d := NewFakeResourceDataWithMeta( - "google_compute_disk", - p.ResourcesMap["google_compute_disk"].Schema, + "google_project", + p.ResourcesMap["google_project"].Schema, values, false, - "google_compute_disk.test-disk", + "google_project.test-project", ) assert.Equal(t, d.Id(), "") } @@ -64,41 +60,37 @@ func TestFakeResourceDataWithMeta_get(t *testing.T) { p := provider.Provider() values := map[string]interface{}{ - "name": "test-disk", - "type": "pd-ssd", - "zone": "us-central1-a", - "image": "projects/debian-cloud/global/images/debian-8-jessie-v20170523", - "physical_block_size_bytes": 4096, + "name": "test-project", + "org_id": "529579013760", + "project_id": "tf-test-872899419570852129", } d := NewFakeResourceDataWithMeta( - "google_compute_disk", - p.ResourcesMap["google_compute_disk"].Schema, + "google_project", + p.ResourcesMap["google_project"].Schema, values, false, - "google_compute_disk.test-disk", + "google_project.test-project", ) - assert.Equal(t, d.Get("name"), "test-disk") + assert.Equal(t, d.Get("name"), "test-project") } func TestFakeResourceDataWithMeta_getOkOk(t *testing.T) { p := provider.Provider() values := map[string]interface{}{ - "name": "test-disk", - "type": "pd-ssd", - "zone": "us-central1-a", - "image": "projects/debian-cloud/global/images/debian-8-jessie-v20170523", - "physical_block_size_bytes": 4096, + "name": "test-project", + "org_id": "529579013760", + "project_id": "tf-test-872899419570852129", } d := NewFakeResourceDataWithMeta( - "google_compute_disk", - p.ResourcesMap["google_compute_disk"].Schema, + "google_project", + p.ResourcesMap["google_project"].Schema, values, false, - "google_compute_disk.test-disk", + "google_project.test-project", ) res, ok := d.GetOk("name") - assert.Equal(t, "test-disk", res) + assert.Equal(t, "test-project", res) assert.True(t, ok) } @@ -106,18 +98,16 @@ func TestFakeResourceDataWithMeta_getOkNonexistentField(t *testing.T) { p := provider.Provider() values := map[string]interface{}{ - "name": "test-disk", - "type": "pd-ssd", - "zone": "us-central1-a", - "image": "projects/debian-cloud/global/images/debian-8-jessie-v20170523", - "physical_block_size_bytes": 4096, + "name": "test-project", + "org_id": "529579013760", + "project_id": "tf-test-872899419570852129", } d := NewFakeResourceDataWithMeta( - "google_compute_disk", - p.ResourcesMap["google_compute_disk"].Schema, + "google_project", + p.ResourcesMap["google_project"].Schema, values, false, - "google_compute_disk.test-disk", + "google_project.test-project", ) res, ok := d.GetOk("incorrect") assert.Nil(t, res) @@ -128,20 +118,19 @@ func TestFakeResourceDataWithMeta_getOkEmptyString(t *testing.T) { p := provider.Provider() values := map[string]interface{}{ - "name": "test-disk", - "type": "pd-ssd", - "zone": "us-central1-a", - "image": "", - "physical_block_size_bytes": 4096, + "name": "test-project", + "org_id": "529579013760", + "project_id": "tf-test-872899419570852129", + "billing_account": "", } d := NewFakeResourceDataWithMeta( - "google_compute_disk", - p.ResourcesMap["google_compute_disk"].Schema, + "google_project", + p.ResourcesMap["google_project"].Schema, values, false, - "google_compute_disk.test-disk", + "google_project.test-project", ) - res, ok := d.GetOk("image") + res, ok := d.GetOk("billing_account") assert.Equal(t, "", res) assert.False(t, ok) } @@ -150,28 +139,18 @@ func TestFakeResourceDataWithMeta_getOkUnsetString(t *testing.T) { p := provider.Provider() values := map[string]interface{}{ - "name": "my-node-pool", - "location": "us-central1", - "cluster": "projects/my-project-id/global/clusters/my-gke-cluster", - "config": map[string]interface{}{ - "machineType": "n1-standard-1", - "metadata": map[string]string{ - "disable-legacy-endpoints": "true", - }, - "oauthScopes": []string{ - "https://www.googleapis.com/auth/cloud-platform", - }, - "preemptible": true, - }, + "name": "test-project", + "org_id": "529579013760", + "project_id": "tf-test-872899419570852129", } d := NewFakeResourceDataWithMeta( - "google_container_cluster", - p.ResourcesMap["google_container_cluster"].Schema, + "google_project", + p.ResourcesMap["google_project"].Schema, values, false, - "google_container_cluster.my-node-pool", + "google_project.test-project", ) - res, ok := d.GetOk("subnetwork") + res, ok := d.GetOk("billing_account") assert.Equal(t, "", res) assert.False(t, ok) } @@ -353,18 +332,16 @@ func TestFakeResourceDataWithMeta_isDelelted(t *testing.T) { p := provider.Provider() values := map[string]interface{}{ - "name": "test-disk", - "type": "pd-ssd", - "zone": "us-central1-a", - "image": "projects/debian-cloud/global/images/debian-8-jessie-v20170523", - "physical_block_size_bytes": 4096, + "name": "test-project", + "org_id": "529579013760", + "project_id": "tf-test-872899419570852129", } d := NewFakeResourceDataWithMeta( - "google_compute_disk", - p.ResourcesMap["google_compute_disk"].Schema, + "google_project", + p.ResourcesMap["google_project"].Schema, values, true, - "google_compute_disk.test-disk", + "google_project.test-project", ) assert.Equal(t, true, d.IsDeleted()) } @@ -373,18 +350,16 @@ func TestFakeResourceDataWithMeta_address(t *testing.T) { p := provider.Provider() values := map[string]interface{}{ - "name": "test-disk", - "type": "pd-ssd", - "zone": "us-central1-a", - "image": "projects/debian-cloud/global/images/debian-8-jessie-v20170523", - "physical_block_size_bytes": 4096, + "name": "test-project", + "org_id": "529579013760", + "project_id": "tf-test-872899419570852129", } d := NewFakeResourceDataWithMeta( - "google_compute_disk", - p.ResourcesMap["google_compute_disk"].Schema, + "google_project", + p.ResourcesMap["google_project"].Schema, values, - true, - "google_compute_disk.test-disk", + false, + "google_project.test-project", ) - assert.Equal(t, "google_compute_disk.test-disk", d.Address()) + assert.Equal(t, "google_project.test-project", d.Address()) } diff --git a/mmv1/third_party/tgc_next/pkg/tfplan2cai/resolvers/default_pre_resolver.go b/mmv1/third_party/tgc_next/pkg/tfplan2cai/resolvers/default_pre_resolver.go index 4c2aab4f0937..047c3cd89154 100644 --- a/mmv1/third_party/tgc_next/pkg/tfplan2cai/resolvers/default_pre_resolver.go +++ b/mmv1/third_party/tgc_next/pkg/tfplan2cai/resolvers/default_pre_resolver.go @@ -11,8 +11,8 @@ import ( "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/models" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/tfplan" + provider "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/provider" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - provider "github.com/hashicorp/terraform-provider-google-beta/google-beta/provider" ) var ErrDuplicateAsset = errors.New("duplicate asset") diff --git a/mmv1/third_party/tgc_next/pkg/tfplan2cai/transport/getconfig.go b/mmv1/third_party/tgc_next/pkg/tfplan2cai/transport/getconfig.go index c1b938e64280..87f29fa2a2f8 100644 --- a/mmv1/third_party/tgc_next/pkg/tfplan2cai/transport/getconfig.go +++ b/mmv1/third_party/tgc_next/pkg/tfplan2cai/transport/getconfig.go @@ -5,7 +5,7 @@ import ( "github.com/pkg/errors" - transport_tpg "github.com/hashicorp/terraform-provider-google-beta/google-beta/transport" + transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" ) func NewConfig(ctx context.Context, project, zone, region string, offline bool, userAgent string) (*transport_tpg.Config, error) { From 338bd5e8b23d726c6d2f1c1df0478ecc1e71e805 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Fri, 9 May 2025 12:20:47 -0700 Subject: [PATCH 089/884] Documented how to troubleshoot inconsistent dependency lock file errors (#13895) --- docs/content/test/run-tests.md | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/docs/content/test/run-tests.md b/docs/content/test/run-tests.md index a85d87a40cab..19c01fa267bf 100644 --- a/docs/content/test/run-tests.md +++ b/docs/content/test/run-tests.md @@ -147,6 +147,32 @@ This indicates that after an apply to create or update a resource, the resource - The URL for reads was built incorrectly. The exact fix will depend on why this is happening. Run the test with the `TF_LOG=DEBUG` environment variable and check whether the read URL matches what you expect. - There is a call to unset the resource's id (`d.SetId("")`) somewhere it shouldn't be. The fix is to remove that extraneous call. This is rare. +### Error: Inconsistent dependency lock file + +Tests require all of the providers they use (except the one actually being tested) to be explicitly stated. This error generally means one of a few things: + +- This is a beta-only test and one of the `google_*` resources in the test doesn't have `provider = google-beta` set + - ```hcl + resource "google_compute_instance" "beta-instance" { + provider = google-beta + # ... + } + ``` + +- This is a GA+beta test and one of the `google_*` resources has `provider = google-beta` set + - `provider = google-beta` can't be set unless the test is beta-only. +- The test relies on an external provider, such as `time`, and that is not explicitly declared + - For MMv1 example-based tests, use [`examples.external_providers`](https://googlecloudplatform.github.io/magic-modules/reference/resource/#examples). + - For Handwritten tests, use TestCase.ExternalProviders: + ```go + acctest.VcrTest(t, resource.TestCase{ + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + // ... + } + ``` + ## Optional: Test with different `terraform` versions Tests will use whatever version of the `terraform` binary is found on your `PATH`. If you are testing a change that you know only impacts certain `terraform` versions, follow these steps: From c8c9aa77501783960e9b845950279bc199b8de40 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Fri, 9 May 2025 12:21:29 -0700 Subject: [PATCH 090/884] Document more beta-only handwritten create test steps (#13894) --- docs/content/test/test.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/content/test/test.md b/docs/content/test/test.md index d08b12236846..4a51917c3a5d 100644 --- a/docs/content/test/test.md +++ b/docs/content/test/test.md @@ -143,6 +143,8 @@ This section assumes you've used the [Add a resource]({{< ref "/develop/add-reso - If beta-only fields are being tested, do the following: - Change the file suffix to `.go.tmpl` - Wrap each beta-only test in a separate version guard: `{{- if ne $.TargetVersionName "ga" -}}...{{- else }}...{{- end }}` + - In each beta-only test, ensure that the TestCase sets `ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t)` + - In each beta-only test, ensure that all Terraform resources in all configs have `provider = google-beta` set {{< /tab >}} {{< /tabs >}} From 89fddd6dc59be68010bb2f3c0c3b0c01f0c4a637 Mon Sep 17 00:00:00 2001 From: Alex Morozov Date: Fri, 9 May 2025 17:55:16 -0400 Subject: [PATCH 091/884] Replace a hardcoded Kubernetes version with a current stable one. (#13892) Co-authored-by: Alex Morozov --- .../resource_container_cluster_test.go.tmpl | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl index 9d503ddf0f67..6cc3f50082d1 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl @@ -7735,7 +7735,7 @@ func TestAccContainerCluster_withCidrBlockWithoutPrivateEndpointSubnetwork(t *te CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccContainerCluster_withCidrBlockWithoutPrivateEndpointSubnetwork(containerNetName, clusterName, "us-central1-a"), + Config: testAccContainerCluster_withCidrBlockWithoutPrivateEndpointSubnetwork(containerNetName, clusterName), }, { ResourceName: "google_container_cluster.with_private_flexible_cluster", @@ -7747,8 +7747,12 @@ func TestAccContainerCluster_withCidrBlockWithoutPrivateEndpointSubnetwork(t *te }) } -func testAccContainerCluster_withCidrBlockWithoutPrivateEndpointSubnetwork(containerNetName, clusterName, location string) string { +func testAccContainerCluster_withCidrBlockWithoutPrivateEndpointSubnetwork(containerNetName, clusterName string) string { return fmt.Sprintf(` +data "google_container_engine_versions" "uscentral1a" { + location = "us-central1-a" +} + resource "google_compute_network" "container_network" { name = "%s" auto_create_subnetworks = false @@ -7762,8 +7766,8 @@ resource "google_compute_subnetwork" "container_subnetwork" { resource "google_container_cluster" "with_private_flexible_cluster" { name = "%s" - location = "%s" - min_master_version = "1.29" + location = "us-central1-a" + min_master_version = data.google_container_engine_versions.uscentral1a.release_channel_latest_version["STABLE"] initial_node_count = 1 networking_mode = "VPC_NATIVE" @@ -7776,7 +7780,7 @@ resource "google_container_cluster" "with_private_flexible_cluster" { } deletion_protection = false } -`, containerNetName, clusterName, location) +`, containerNetName, clusterName) } func TestAccContainerCluster_withEnablePrivateEndpointToggle(t *testing.T) { From 0d6c3bfec8993b50060a61363c50307f216cd88d Mon Sep 17 00:00:00 2001 From: ArtoriaRen Date: Fri, 9 May 2025 17:56:26 -0400 Subject: [PATCH 092/884] =?UTF-8?q?Add=20`Flow=20>=20eventHandlers=20>=20t?= =?UTF-8?q?riggerFulfillment=20>=20enableGenerativeFall=E2=80=A6=20(#13897?= =?UTF-8?q?)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- mmv1/products/dialogflowcx/Flow.yaml | 6 ++++++ .../terraform/examples/dialogflowcx_flow_full.tf.tmpl | 2 ++ .../dialogflowcx/resource_dialogflowcx_flow_test.go | 2 ++ 3 files changed, 10 insertions(+) diff --git a/mmv1/products/dialogflowcx/Flow.yaml b/mmv1/products/dialogflowcx/Flow.yaml index 67933f2483da..caa33947c7b2 100644 --- a/mmv1/products/dialogflowcx/Flow.yaml +++ b/mmv1/products/dialogflowcx/Flow.yaml @@ -501,6 +501,12 @@ properties: custom_expand: 'templates/terraform/custom_expand/json_value.tmpl' validation: function: 'validation.StringIsJSON' + - name: 'enableGenerativeFallback' + type: Boolean + description: | + If the flag is true, the agent will utilize LLM to generate a text response. + If LLM generation fails, the defined responses in the fulfillment will be respected. + This flag is only useful for fulfillments associated with no-match event handlers. - name: 'targetPage' type: String description: | diff --git a/mmv1/templates/terraform/examples/dialogflowcx_flow_full.tf.tmpl b/mmv1/templates/terraform/examples/dialogflowcx_flow_full.tf.tmpl index 9eae630e10ea..455507b5be80 100644 --- a/mmv1/templates/terraform/examples/dialogflowcx_flow_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/dialogflowcx_flow_full.tf.tmpl @@ -164,6 +164,8 @@ resource "google_dialogflow_cx_flow" "{{$.PrimaryResourceId}}" { }, ]) } + + enable_generative_fallback = true } } diff --git a/mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflowcx_flow_test.go b/mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflowcx_flow_test.go index cf3ede5b12fb..11db4db55773 100644 --- a/mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflowcx_flow_test.go +++ b/mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflowcx_flow_test.go @@ -234,6 +234,8 @@ func testAccDialogflowCXFlow_full(context map[string]interface{}) string { }, ]) } + + enable_generative_fallback = true } } From 5771f9e45a227b103ec13198e948ffe3648fd777 Mon Sep 17 00:00:00 2001 From: Thomas Rodgers Date: Fri, 9 May 2025 14:58:26 -0700 Subject: [PATCH 093/884] Revert "Adding support for Dataplex Entry resource." (#13888) --- mmv1/products/dataplex/Entry.yaml | 268 ------ .../constants/dataplex_entry.go.tmpl | 124 --- .../dataplex_entry_aspects.go.tmpl | 44 - .../terraform/decoders/dataplex_entry.go.tmpl | 21 - .../terraform/encoders/dataplex_entry.go.tmpl | 12 - .../examples/dataplex_entry_basic.tf.tmpl | 20 - .../examples/dataplex_entry_full.tf.tmpl | 133 --- .../terraform/pre_read/dataplex_entry.go.tmpl | 4 - .../pre_update/dataplex_entry.go.tmpl | 28 - .../resource_dataplex_entry_meta.yaml | 36 - .../dataplex/resource_dataplex_entry_test.go | 769 ------------------ 11 files changed, 1459 deletions(-) delete mode 100644 mmv1/products/dataplex/Entry.yaml delete mode 100644 mmv1/templates/terraform/constants/dataplex_entry.go.tmpl delete mode 100644 mmv1/templates/terraform/custom_flatten/dataplex_entry_aspects.go.tmpl delete mode 100644 mmv1/templates/terraform/decoders/dataplex_entry.go.tmpl delete mode 100644 mmv1/templates/terraform/encoders/dataplex_entry.go.tmpl delete mode 100644 mmv1/templates/terraform/examples/dataplex_entry_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/dataplex_entry_full.tf.tmpl delete mode 100644 mmv1/templates/terraform/pre_read/dataplex_entry.go.tmpl delete mode 100644 mmv1/templates/terraform/pre_update/dataplex_entry.go.tmpl delete mode 100644 mmv1/third_party/terraform/services/dataplex/resource_dataplex_entry_meta.yaml delete mode 100644 mmv1/third_party/terraform/services/dataplex/resource_dataplex_entry_test.go diff --git a/mmv1/products/dataplex/Entry.yaml b/mmv1/products/dataplex/Entry.yaml deleted file mode 100644 index 3c53e0de6387..000000000000 --- a/mmv1/products/dataplex/Entry.yaml +++ /dev/null @@ -1,268 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'Entry' -description: | - An entry represents a data asset that you capture metadata for. Every entry is an instance of an entry type. - Each operation on aspects for an entry needs to comply with the required aspects of its entry type. - For example, when you create an entry, you must provide values for all the aspect types defined by the entry type. - You can't delete those aspects for an entry that are marked as required in the entry type. - -references: - guides: - 'Manage entries and ingest custom sources': 'https://cloud.google.com/dataplex/docs/ingest-custom-sources' - api: 'https://cloud.google.com/dataplex/docs/reference/rest/v1/projects.locations.entryGroups.entries' - -base_url: 'projects/{{project}}/locations/{{location}}/entryGroups/{{entry_group_id}}/entries/{{entry_id}}' -self_link: 'projects/{{project}}/locations/{{location}}/entryGroups/{{entry_group_id}}/entries/{{entry_id}}' -create_url: 'projects/{{project}}/locations/{{location}}/entryGroups/{{entry_group_id}}/entries?entryId={{entry_id}}' -update_verb: 'PATCH' -update_mask: true -import_format: - - 'projects/{{project}}/locations/{{location}}/entryGroups/{{entry_group_id}}/entries/{{entry_id}}' - -custom_code: - constants: templates/terraform/constants/dataplex_entry.go.tmpl - decoder: templates/terraform/decoders/dataplex_entry.go.tmpl - encoder: templates/terraform/encoders/dataplex_entry.go.tmpl - pre_read: templates/terraform/pre_read/dataplex_entry.go.tmpl - pre_update: templates/terraform/pre_update/dataplex_entry.go.tmpl - -timeouts: - insert_minutes: 5 - update_minutes: 5 - delete_minutes: 5 - -examples: - - name: 'dataplex_entry_basic' - primary_resource_id: 'test_basic' - primary_resource_name: 'fmt.Sprintf("tf-test-entry%s", context["random_suffix"])' - vars: - entry_id: 'entry-basic' - entry_group_name: 'entry-group-basic' - aspect_type_name: "aspect-type-basic" - entry_type_name: "entry-type-basic" - test_env_vars: - project_number: 'PROJECT_NUMBER' - - name: 'dataplex_entry_full' - primary_resource_id: 'test_entry_full' - primary_resource_name: 'fmt.Sprintf("tf-test-entry%s", context["random_suffix"])' - ignore_read_extra: - - 'aspects' - vars: - entry_id: 'entry-full' - entry_group_name: 'entry-group-full' - aspect_type_name: "aspect-type-full" - entry_type_name: "entry-type-full" - test_env_vars: - project_number: 'PROJECT_NUMBER' - -parameters: - - name: 'location' - type: String - url_param_only: true - immutable: true - description: | - The location where entry will be created. - - - name: 'entryGroupId' - type: String - url_param_only: true - immutable: true - description: | - The entry group id of the entry group the entry will be created in. - - - name: 'entryId' - type: String - url_param_only: true - immutable: true - description: | - The entry id of the entry. - -properties: - - name: 'name' - type: String - output: true - immutable: true - description: | - The relative resource name of the entry, in the format projects/{project_number}/locations/{locationId}/entryGroups/{entryGroupId}/entries/{entryId}. - - - name: 'entryType' - type: String - required: true - immutable: true - validation: - function: ProjectNumberValidation - description: | - The relative resource name of the entry type that was used to create this entry, in the format projects/{project_number}/locations/{locationId}/entryTypes/{entryTypeId}. - - - name: 'createTime' - type: Time - output: true - description: | - The time when the Entry was created in Dataplex. - - - name: 'updateTime' - type: Time - output: true - description: | - The time when the entry was last updated in Dataplex. - - - name: 'aspects' - type: Array - custom_flatten: 'templates/terraform/custom_flatten/dataplex_entry_aspects.go.tmpl' - description: | - The aspects that are attached to the entry. - - item_type: - type: NestedObject - properties: - - name: "aspectKey" - type: String - required: true - validation: - function: AspectProjectNumberValidation - description: | - Depending on how the aspect is attached to the entry, the format of the aspect key can be one of the following: - - If the aspect is attached directly to the entry: {project_number}.{locationId}.{aspectTypeId} - If the aspect is attached to an entry's path: {project_number}.{locationId}.{aspectTypeId}@{path} - - - name: "aspectValue" - type: NestedObject - properties: - - name: "aspectType" - type: String - output: true - description: | - The resource name of the type used to create this Aspect. - - - name: "path" - type: String - output: true - description: | - The path in the entry under which the aspect is attached. - - - name: "createTime" - type: Time - output: true - description: | - The time when the Aspect was created. - - - name: "updateTime" - type: Time - output: true - description: | - The time when the Aspect was last modified. - - - name: "data" - type: String - required: true - state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' - custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' - custom_expand: 'templates/terraform/custom_expand/json_schema.tmpl' - validation: - function: 'validation.StringIsJSON' - description: | - The content of the aspect in JSON form, according to its aspect type schema. The maximum size of the field is 120KB (encoded as UTF-8). - - - name: 'parentEntry' - type: String - immutable: true - description: | - The resource name of the parent entry, in the format projects/{project_number}/locations/{locationId}/entryGroups/{entryGroupId}/entries/{entryId}. - - - name: "fullyQualifiedName" - type: String - description: | - A name for the entry that can be referenced by an external system. For more information, see https://cloud.google.com/dataplex/docs/fully-qualified-names. - The maximum size of the field is 4000 characters. - - - name: "entrySource" - type: NestedObject - default_from_api: true - properties: - - name: "resource" - type: String - description: | - The name of the resource in the source system. Maximum length is 4,000 characters. - - - name: "system" - type: String - description: | - The name of the source system. Maximum length is 64 characters. - - - name: "platform" - type: String - description: | - The platform containing the source system. Maximum length is 64 characters. - - - name: "displayName" - type: String - description: | - A user-friendly display name. Maximum length is 500 characters. - - - name: "description" - type: String - description: | - A description of the data resource. Maximum length is 2,000 characters. - - - name: "labels" - type: KeyValuePairs - description: | - User-defined labels. The maximum size of keys and values is 128 characters each. - An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }. - - - name: "ancestors" - type: Array - immutable: true - item_type: - type: NestedObject - properties: - - name: "name" - type: String - description: | - The name of the ancestor resource. - - - name: "type" - type: String - description: | - The type of the ancestor resource. - - description: | - The entries representing the ancestors of the data resource in the source system. - - - name: 'createTime' - type: Time - validation: - function: 'validation.IsRFC3339Time' - description: | - The time when the resource was created in the source system. - - - name: 'updateTime' - type: Time - validation: - function: 'validation.IsRFC3339Time' - description: | - The time when the resource was last updated in the source system. - If the entry exists in the system and its EntrySource has updateTime populated, - further updates to the EntrySource of the entry must provide incremental updates to its updateTime. - - - name: 'location' - type: String - output: true - description: |- - Location of the resource in the source system. You can search the entry by this location. - By default, this should match the location of the entry group containing this entry. - A different value allows capturing the source location for data external to Google Cloud. diff --git a/mmv1/templates/terraform/constants/dataplex_entry.go.tmpl b/mmv1/templates/terraform/constants/dataplex_entry.go.tmpl deleted file mode 100644 index 9d996fa2c254..000000000000 --- a/mmv1/templates/terraform/constants/dataplex_entry.go.tmpl +++ /dev/null @@ -1,124 +0,0 @@ -// GetEntry supports up to 100 aspects. Therefore we set a threshold at 99. -const maxAspectNumber = 99 - -// NumberOfAspectsValidation checks if the number of aspects on an entry exceeds certain threshold. -func NumberOfAspectsValidation(i interface{}, k string) (warnings []string, errors []error) { - s, isSlice := i.([]interface{}) - m, isMap := i.(map[string]interface{}) - - if !isSlice && !isMap { - errors = append(errors, fmt.Errorf("expected type of field %q to be array, but got %T", k, i)) - return warnings, errors - } - - if len(s)+len(m) > maxAspectNumber { - errors = append(errors, fmt.Errorf( - "field %q has an invalid content: %q. The maximal number of aspects is 99.", - k, i, - )) - } - - return warnings, errors -} - -// ProjectNumberValidation checks if the input string conforms to the pattern: -// "projects//" -func ProjectNumberValidation(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - - if !ok { - errors = append(errors, fmt.Errorf("expected type of field %q to be string, but got %T", k, i)) - return warnings, errors - } - - var projectNumberRegex = regexp.MustCompile(`^projects\/[1-9]\d*\/.+$`) - if !projectNumberRegex.MatchString(v) { - errors = append(errors, fmt.Errorf( - "field %q has an invalid format: %q. Expected format: 'projects//'. Please note that project IDs are not supported.", - k, v, - )) - } - - return warnings, errors -} - -// ProjectNumberValidation checks if the input string conforms to the pattern: -// "projects//" -func AspectProjectNumberValidation(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of field %q to be string, but got %T", k, i)) - return warnings, errors - } - - var numberDotAnythingRegex = regexp.MustCompile(`^[1-9]\d*\..+$`) - - if !numberDotAnythingRegex.MatchString(v) { - errors = append(errors, fmt.Errorf( - "field %q has an invalid format: %q. Expected format: '.anything'. Please note that project IDs are not supported.", - k, v, - )) - } - - return warnings, errors -} - -func FilterAspects(aspectKeySet map[string]struct{}, res map[string]interface{}) { - if res["aspects"] == nil { - return - } - aspectsMap := res["aspects"].(map[string]interface{}) - for key := range aspectsMap { - if _, keep := aspectKeySet[key]; !keep { - delete(aspectsMap, key) - } - } -} - -func AddAspectsToSet(aspectKeySet map[string]struct{}, aspects interface{}) { - for _, aspectItemRaw := range aspects.([]interface{}) { - aspectMap := aspectItemRaw.(map[string]interface{}) - keyString := aspectMap["aspect_key"].(string) - aspectKeySet[keyString] = struct{}{} - } -} - -// InverseTransformAspects converts the "aspects" map back to a slice of maps, -// re-inserting the "aspectKey". Modifies obj in-place. -func InverseTransformAspects(res map[string]interface{}) { - if res["aspects"] == nil { - return - } - originalMap := res["aspects"].(map[string]interface{}) - newSlice := make([]interface{}, 0, len(originalMap)) - - for key, value := range originalMap { - innerMap := value.(map[string]interface{}) - box := make(map[string]interface{}, 2) - box["aspectKey"] = key - box["aspectValue"] = innerMap - newSlice = append(newSlice, box) - } - res["aspects"] = newSlice -} - -// TransformAspects concisely transforms the "aspects" slice within obj into a map. -// It assumes obj["aspects"] exists and is a []interface{} containing -// map[string]interface{} elements, each with a string "aspectKey". -// Modifies obj in-place. -func TransformAspects(obj map[string]interface{}) { - if obj["aspects"] == nil { - return - } - originalSlice := obj["aspects"].([]interface{}) - newMap := make(map[string]interface{}, len(originalSlice)) - for _, item := range originalSlice { - aspectMap := item.(map[string]interface{}) - - key := aspectMap["aspectKey"].(string) - value := aspectMap["aspectValue"].(map[string]interface{}) - - newMap[key] = value - } - obj["aspects"] = newMap -} diff --git a/mmv1/templates/terraform/custom_flatten/dataplex_entry_aspects.go.tmpl b/mmv1/templates/terraform/custom_flatten/dataplex_entry_aspects.go.tmpl deleted file mode 100644 index de8dd922f1f9..000000000000 --- a/mmv1/templates/terraform/custom_flatten/dataplex_entry_aspects.go.tmpl +++ /dev/null @@ -1,44 +0,0 @@ -// This file is a transposition of mmv1/templates/terraform/flatten_property_method.go.tmpl -// Most of the code is copied from there, with the exception of sorting logic. -func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - if v == nil { - return v - } - l := v.([]interface{}) - transformed := make([]map[string]interface{}, 0, len(l)) - for _, raw := range l { - original := raw.(map[string]interface{}) - if len(original) < 1 { - // Do not include empty json objects coming back from the api - continue - } - transformed = append(transformed, map[string]interface{}{ - - {{- range $prop := $.ItemType.UserProperties }} - {{- if not (or $prop.IgnoreRead $prop.WriteOnly) }} - "{{ underscore $prop.Name }}": flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}{{$prop.TitlelizeProperty}}(original["{{ $prop.ApiName }}"], d, config), - {{- end }} - {{- end }} - }) - } - - configData := []map[string]interface{}{} - - for _, item := range d.Get("aspects").([]interface{}) { - configData = append(configData, item.(map[string]interface{})) - } - - sorted, err := tpgresource.SortMapsByConfigOrder(configData, transformed, "aspect_key") - if err != nil { - log.Printf("[ERROR] Could not sort API response value: %s", err) - return v - } - - return sorted -} - -{{- if $.NestedProperties }} - {{- range $prop := $.NestedProperties }} - {{ template "flattenPropertyMethod" $prop -}} - {{- end }} -{{- end }} diff --git a/mmv1/templates/terraform/decoders/dataplex_entry.go.tmpl b/mmv1/templates/terraform/decoders/dataplex_entry.go.tmpl deleted file mode 100644 index c609650d1634..000000000000 --- a/mmv1/templates/terraform/decoders/dataplex_entry.go.tmpl +++ /dev/null @@ -1,21 +0,0 @@ -aspects := res["aspects"] -if aspects != nil { - _, errors := NumberOfAspectsValidation(aspects, "aspects") - if len(errors) > 0 { - return nil, errors[0] - } -} - -aspectKeysOfInterest := make(map[string]struct{}) -if d.HasChange("aspects") { - currentAspects, futureAspects := d.GetChange("aspects") - AddAspectsToSet(aspectKeysOfInterest, currentAspects) - AddAspectsToSet(aspectKeysOfInterest, futureAspects) -} else { - AddAspectsToSet(aspectKeysOfInterest, d.Get("aspects")) -} - -FilterAspects(aspectKeysOfInterest, res) -InverseTransformAspects(res) - -return res, nil diff --git a/mmv1/templates/terraform/encoders/dataplex_entry.go.tmpl b/mmv1/templates/terraform/encoders/dataplex_entry.go.tmpl deleted file mode 100644 index 2054ba74f752..000000000000 --- a/mmv1/templates/terraform/encoders/dataplex_entry.go.tmpl +++ /dev/null @@ -1,12 +0,0 @@ -// The yaml file does not allow validation for Array fields. -// Therefore we add validation as a part of the encoding proecess. -aspects := obj["aspects"] -if aspects != nil { - _, errors := NumberOfAspectsValidation(aspects, "aspects") - if len(errors) > 0 { - return nil, errors[0] - } -} - -TransformAspects(obj) -return obj, nil diff --git a/mmv1/templates/terraform/examples/dataplex_entry_basic.tf.tmpl b/mmv1/templates/terraform/examples/dataplex_entry_basic.tf.tmpl deleted file mode 100644 index 7bed2768935d..000000000000 --- a/mmv1/templates/terraform/examples/dataplex_entry_basic.tf.tmpl +++ /dev/null @@ -1,20 +0,0 @@ -resource "google_dataplex_entry_group" "{{index $.Vars "entry_group_name"}}" { - entry_group_id = "{{index $.Vars "entry_group_name"}}" - project = "{{index $.TestEnvVars "project_number"}}" - location = "us-central1" -} - -resource "google_dataplex_entry_type" "{{index $.Vars "entry_type_name"}}" { - entry_type_id = "{{index $.Vars "entry_type_name"}}" - project = "{{index $.TestEnvVars "project_number"}}" - location = "us-central1" -} - -resource "google_dataplex_entry" "{{$.PrimaryResourceId}}" { - entry_group_id = google_dataplex_entry_group.{{index $.Vars "entry_group_name"}}.entry_group_id - project = "{{index $.TestEnvVars "project_number"}}" - location = "us-central1" - entry_id = "{{index $.Vars "entry_id"}}" - entry_type = google_dataplex_entry_type.{{index $.Vars "entry_type_name"}}.name -} - diff --git a/mmv1/templates/terraform/examples/dataplex_entry_full.tf.tmpl b/mmv1/templates/terraform/examples/dataplex_entry_full.tf.tmpl deleted file mode 100644 index 096100ed7073..000000000000 --- a/mmv1/templates/terraform/examples/dataplex_entry_full.tf.tmpl +++ /dev/null @@ -1,133 +0,0 @@ -resource "google_dataplex_aspect_type" "{{index $.Vars "aspect_type_name"}}-one" { - aspect_type_id = "{{index $.Vars "aspect_type_name"}}-one" - location = "us-central1" - project = "{{index $.TestEnvVars "project_number"}}" - - metadata_template = < 0 - - if hasError != tc.expectError { - t.Fatalf("%s: NumberOfAspectsValidation() error expectation mismatch: got error = %v (%v), want error = %v", tc.name, hasError, errors, tc.expectError) - } - - if tc.expectError && tc.errorMsg != "" { - found := false - for _, err := range errors { - if strings.Contains(err.Error(), tc.errorMsg) { // Check if error message contains the expected substring - found = true - break - } - } - if !found { - t.Errorf("%s: NumberOfAspectsValidation() expected error containing %q, but got: %v", tc.name, tc.errorMsg, errors) - } - } - }) - } -} - -func TestProjectNumberValidation(t *testing.T) { - fieldName := "some_field" - testCases := []struct { - name string - input interface{} - expectError bool - errorMsg string - }{ - {"valid input", "projects/1234567890/locations/us-central1", false, ""}, - {"valid input with only number", "projects/987/stuff", false, ""}, - {"valid input with trailing slash content", "projects/1/a/b/c", false, ""}, - {"valid input minimal", "projects/1/a", false, ""}, - {"invalid input trailing slash only", "projects/555/", true, "has an invalid format"}, - {"invalid type - int", 123, true, `to be string, but got int`}, - {"invalid type - nil", nil, true, `to be string, but got `}, - {"invalid format - missing 'projects/' prefix", "12345/locations/us", true, "has an invalid format"}, - {"invalid format - project number starts with 0", "projects/0123/data", true, "has an invalid format"}, - {"invalid format - no project number", "projects//data", true, "has an invalid format"}, - {"invalid format - letters instead of number", "projects/abc/data", true, "has an invalid format"}, - {"invalid format - missing content after number/", "projects/123", true, "has an invalid format"}, - {"invalid format - empty string", "", true, "has an invalid format"}, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - _, errors := dataplex.ProjectNumberValidation(tc.input, fieldName) - hasError := len(errors) > 0 - - if hasError != tc.expectError { - t.Fatalf("%s: ProjectNumberValidation() error expectation mismatch: got error = %v (%v), want error = %v", tc.name, hasError, errors, tc.expectError) - } - - if tc.expectError && tc.errorMsg != "" { - found := false - for _, err := range errors { - if strings.Contains(err.Error(), tc.errorMsg) { // Check if error message contains the expected substring - found = true - break - } - } - if !found { - t.Errorf("%s: ProjectNumberValidation() expected error containing %q, but got: %v", tc.name, tc.errorMsg, errors) - } - } - }) - } -} - -func TestAspectProjectNumberValidation(t *testing.T) { - fieldName := "some_field" - testCases := []struct { - name string - input interface{} - expectError bool - errorMsg string - }{ - {"valid input", "1234567890.compute.googleapis.com/Disk", false, ""}, - {"valid input minimal", "1.a", false, ""}, - {"invalid input trailing dot only", "987.", true, "has an invalid format"}, - {"invalid type - int", 456, true, `to be string, but got int`}, - {"invalid type - nil", nil, true, `to be string, but got `}, - {"invalid format - missing number", ".compute.googleapis.com/Disk", true, "has an invalid format"}, - {"invalid format - number starts with 0", "0123.compute.googleapis.com/Disk", true, "has an invalid format"}, - {"invalid format - missing dot", "12345compute", true, "has an invalid format"}, - {"invalid format - letters instead of number", "abc.compute.googleapis.com/Disk", true, "has an invalid format"}, - {"invalid format - missing content after dot", "12345", true, "has an invalid format"}, - {"invalid format - empty string", "", true, "has an invalid format"}, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - _, errors := dataplex.AspectProjectNumberValidation(tc.input, fieldName) - hasError := len(errors) > 0 - - if hasError != tc.expectError { - t.Fatalf("%s: AspectProjectNumberValidation() error expectation mismatch: got error = %v (%v), want error = %v", tc.name, hasError, errors, tc.expectError) - } - - if tc.expectError && tc.errorMsg != "" { - found := false - for _, err := range errors { - if strings.Contains(err.Error(), tc.errorMsg) { // Check if error message contains the expected substring - found = true - break - } - } - if !found { - t.Errorf("%s: AspectProjectNumberValidation() expected error containing %q, but got: %v", tc.name, tc.errorMsg, errors) - } - } - }) - } -} - -func TestFilterAspects(t *testing.T) { - testCases := []struct { - name string - aspectKeySet map[string]struct{} - resInput map[string]interface{} - expectedAspects map[string]interface{} - }{ - {"aspects is nil", - map[string]struct{}{"keep": {}}, - map[string]interface{}{"otherKey": "value"}, - nil}, - {"empty aspectKeySet", map[string]struct{}{}, map[string]interface{}{"aspects": map[string]interface{}{"one": map[string]interface{}{"data": 1}, "two": map[string]interface{}{"data": 2}}}, map[string]interface{}{}}, - {"keep all aspects", map[string]struct{}{"one": {}, "two": {}}, map[string]interface{}{"aspects": map[string]interface{}{"one": map[string]interface{}{"data": 1}, "two": map[string]interface{}{"data": 2}}}, map[string]interface{}{"one": map[string]interface{}{"data": 1}, "two": map[string]interface{}{"data": 2}}}, - {"keep some aspects", map[string]struct{}{"two": {}, "three_not_present": {}}, map[string]interface{}{"aspects": map[string]interface{}{"one": map[string]interface{}{"data": 1}, "two": map[string]interface{}{"data": 2}}}, map[string]interface{}{"two": map[string]interface{}{"data": 2}}}, - {"input aspects map is empty", map[string]struct{}{"keep": {}}, map[string]interface{}{"aspects": map[string]interface{}{}}, map[string]interface{}{}}, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - resCopy := deepCopyMap(tc.resInput) - dataplex.FilterAspects(tc.aspectKeySet, resCopy) - - actualAspectsRaw, aspectsKeyExists := resCopy["aspects"] - - if tc.expectedAspects == nil { - if aspectsKeyExists && actualAspectsRaw != nil { - t.Errorf("%s: Expected 'aspects' to be nil or absent, but got: %v", tc.name, actualAspectsRaw) - } - return - } - - if !aspectsKeyExists { - t.Fatalf("%s: Expected 'aspects' key to exist, but it was absent", tc.name) - } - - actualAspects, ok := actualAspectsRaw.(map[string]interface{}) - if !ok { - t.Fatalf("%s: Expected 'aspects' to be a map[string]interface{}, but got %T", tc.name, actualAspectsRaw) - } - - if !reflect.DeepEqual(actualAspects, tc.expectedAspects) { - t.Errorf("%s: FilterAspects() result mismatch:\ngot: %#v\nwant: %#v", tc.name, actualAspects, tc.expectedAspects) - } - }) - } -} - -func TestAddAspectsToSet(t *testing.T) { - testCases := []struct { - name string - initialSet map[string]struct{} - aspectsInput interface{} - expectedSet map[string]struct{} - expectPanic bool - }{ - {"add to empty set", map[string]struct{}{}, []interface{}{map[string]interface{}{"aspect_key": "key1"}, map[string]interface{}{"aspect_key": "key2"}}, map[string]struct{}{"key1": {}, "key2": {}}, false}, - {"add to existing set", map[string]struct{}{"existing": {}}, []interface{}{map[string]interface{}{"aspect_key": "key1"}}, map[string]struct{}{"existing": {}, "key1": {}}, false}, - {"add duplicate keys", map[string]struct{}{}, []interface{}{map[string]interface{}{"aspect_key": "key1"}, map[string]interface{}{"aspect_key": "key1"}, map[string]interface{}{"aspect_key": "key2"}}, map[string]struct{}{"key1": {}, "key2": {}}, false}, - {"input aspects is empty slice", map[string]struct{}{"existing": {}}, []interface{}{}, map[string]struct{}{"existing": {}}, false}, - {"input aspects is nil", map[string]struct{}{}, nil, map[string]struct{}{}, true}, - {"input aspects is wrong type", map[string]struct{}{}, "not a slice", map[string]struct{}{}, true}, - {"item in slice is not a map", map[string]struct{}{}, []interface{}{"not a map"}, map[string]struct{}{}, true}, - {"item map missing aspect_key", map[string]struct{}{}, []interface{}{map[string]interface{}{"wrong_key": "key1"}}, map[string]struct{}{}, true}, - {"aspect_key is not a string", map[string]struct{}{}, []interface{}{map[string]interface{}{"aspect_key": 123}}, map[string]struct{}{}, true}, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - currentSet := make(map[string]struct{}) - for k, v := range tc.initialSet { - currentSet[k] = v - } - - defer func() { - r := recover() - if tc.expectPanic && r == nil { - t.Errorf("%s: Expected a panic, but AddAspectsToSet did not panic", tc.name) - } else if !tc.expectPanic && r != nil { - t.Errorf("%s: AddAspectsToSet panicked unexpectedly: %v", tc.name, r) - } - - if !tc.expectPanic { - if !reflect.DeepEqual(currentSet, tc.expectedSet) { - t.Errorf("%s: AddAspectsToSet() result mismatch:\ngot: %v\nwant: %v", tc.name, currentSet, tc.expectedSet) - } - } - }() - - dataplex.AddAspectsToSet(currentSet, tc.aspectsInput) - }) - } -} - -func sortAspectSlice(slice []interface{}) { - sort.SliceStable(slice, func(i, j int) bool { - mapI, okI := slice[i].(map[string]interface{}) - mapJ, okJ := slice[j].(map[string]interface{}) - if !okI || !okJ { - return false - } // Should not happen in valid tests - - keyI, okI := mapI["aspectKey"].(string) - keyJ, okJ := mapJ["aspectKey"].(string) - if !okI || !okJ { - return false - } // Should not happen in valid tests - - return keyI < keyJ - }) -} - -func TestInverseTransformAspects(t *testing.T) { - testCases := []struct { - name string - resInput map[string]interface{} - expectedAspects []interface{} - expectNilAspects bool - expectPanic bool - }{ - {"aspects is nil", map[string]interface{}{"otherKey": "value"}, nil, true, false}, - {"aspects is empty map", map[string]interface{}{"aspects": map[string]interface{}{}}, []interface{}{}, false, false}, - {"aspects with one entry", map[string]interface{}{"aspects": map[string]interface{}{"key1": map[string]interface{}{"data": "value1"}}}, []interface{}{map[string]interface{}{"aspectKey": "key1", "aspectValue": map[string]interface{}{"data": "value1"}}}, false, false}, - {"aspects with multiple entries", map[string]interface{}{"aspects": map[string]interface{}{"key2": map[string]interface{}{"data": "value2"}, "key1": map[string]interface{}{"data": "value1"}}}, []interface{}{map[string]interface{}{"aspectKey": "key1", "aspectValue": map[string]interface{}{"data": "value1"}}, map[string]interface{}{"aspectKey": "key2", "aspectValue": map[string]interface{}{"data": "value2"}}}, false, false}, - {"aspects is wrong type (not map)", map[string]interface{}{"aspects": "not a map"}, nil, false, true}, - {"aspect value is not a map", map[string]interface{}{"aspects": map[string]interface{}{"key1": "not a map"}}, nil, false, true}, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - resCopy := deepCopyMap(tc.resInput) - - defer func() { - r := recover() - if tc.expectPanic && r == nil { - t.Errorf("%s: Expected a panic, but InverseTransformAspects did not panic", tc.name) - } else if !tc.expectPanic && r != nil { - t.Errorf("%s: InverseTransformAspects panicked unexpectedly: %v", tc.name, r) - } - - if !tc.expectPanic { - actualAspectsRaw, aspectsKeyExists := resCopy["aspects"] - - if tc.expectNilAspects { - if aspectsKeyExists && actualAspectsRaw != nil { - t.Errorf("%s: Expected 'aspects' to be nil or absent, but got: %v", tc.name, actualAspectsRaw) - } - return - } - - if !aspectsKeyExists && !tc.expectNilAspects { // Should exist if not expecting nil - t.Fatalf("%s: Expected 'aspects' key in result map, but it was missing", tc.name) - } - - actualAspects, ok := actualAspectsRaw.([]interface{}) - if !ok && !tc.expectNilAspects { // Type check only if we didn't expect nil and key exists - t.Fatalf("%s: Expected 'aspects' to be []interface{}, but got %T", tc.name, actualAspectsRaw) - } - - sortAspectSlice(actualAspects) - sortAspectSlice(tc.expectedAspects) // Ensure expected is sorted if non-nil - - if !reflect.DeepEqual(actualAspects, tc.expectedAspects) { - t.Errorf("%s: InverseTransformAspects() result mismatch:\ngot: %#v\nwant: %#v", tc.name, actualAspects, tc.expectedAspects) - } - } - }() - - dataplex.InverseTransformAspects(resCopy) - }) - } -} - -func TestTransformAspects(t *testing.T) { - testCases := []struct { - name string - objInput map[string]interface{} - expectedAspects map[string]interface{} - expectNilAspects bool - expectPanic bool - }{ - {"aspects is nil", map[string]interface{}{"otherKey": "value"}, nil, true, false}, - {"aspects is empty slice", map[string]interface{}{"aspects": []interface{}{}}, map[string]interface{}{}, false, false}, - {"aspects with one item", map[string]interface{}{"aspects": []interface{}{map[string]interface{}{"aspectKey": "key1", "aspectValue": map[string]interface{}{"data": "value1"}}}}, map[string]interface{}{"key1": map[string]interface{}{"data": "value1"}}, false, false}, - {"aspects with multiple items", map[string]interface{}{"aspects": []interface{}{map[string]interface{}{"aspectKey": "key1", "aspectValue": map[string]interface{}{"data": "value1"}}, map[string]interface{}{"aspectKey": "key2", "aspectValue": map[string]interface{}{"data": "value2"}}}}, map[string]interface{}{"key1": map[string]interface{}{"data": "value1"}, "key2": map[string]interface{}{"data": "value2"}}, false, false}, - {"aspects with duplicate aspectKey", map[string]interface{}{"aspects": []interface{}{map[string]interface{}{"aspectKey": "key1", "aspectValue": map[string]interface{}{"data": "value_first"}}, map[string]interface{}{"aspectKey": "key2", "aspectValue": map[string]interface{}{"data": "value2"}}, map[string]interface{}{"aspectKey": "key1", "aspectValue": map[string]interface{}{"data": "value_last"}}}}, map[string]interface{}{"key1": map[string]interface{}{"data": "value_last"}, "key2": map[string]interface{}{"data": "value2"}}, false, false}, - {"aspects is wrong type (not slice)", map[string]interface{}{"aspects": "not a slice"}, nil, false, true}, - {"item in slice is not a map", map[string]interface{}{"aspects": []interface{}{"not a map"}}, nil, false, true}, - {"item map missing aspectKey", map[string]interface{}{"aspects": []interface{}{map[string]interface{}{"wrongKey": "k1", "aspectValue": map[string]interface{}{}}}}, nil, false, true}, - {"aspectKey is not a string", map[string]interface{}{"aspects": []interface{}{map[string]interface{}{"aspectKey": 123, "aspectValue": map[string]interface{}{}}}}, nil, false, true}, - {"item map missing aspectValue", map[string]interface{}{"aspects": []interface{}{map[string]interface{}{"aspectKey": "key1"}}}, nil, false, true}, - {"aspectValue is not a map", map[string]interface{}{"aspects": []interface{}{map[string]interface{}{"aspectKey": "key1", "aspectValue": "not a map"}}}, nil, false, true}, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - objCopy := deepCopyMap(tc.objInput) - - defer func() { - r := recover() - if tc.expectPanic && r == nil { - t.Errorf("%s: Expected a panic, but TransformAspects did not panic", tc.name) - } else if !tc.expectPanic && r != nil { - t.Errorf("%s: TransformAspects panicked unexpectedly: %v", tc.name, r) - } - - if !tc.expectPanic { - actualAspectsRaw, aspectsKeyExists := objCopy["aspects"] - - if tc.expectNilAspects { - if aspectsKeyExists && actualAspectsRaw != nil { - t.Errorf("%s: Expected 'aspects' to be nil or absent, but got: %v", tc.name, actualAspectsRaw) - } - return - } - - if !aspectsKeyExists && !tc.expectNilAspects { - t.Fatalf("%s: Expected 'aspects' key in result map, but it was missing", tc.name) - } - - actualAspects, ok := actualAspectsRaw.(map[string]interface{}) - if !ok && !tc.expectNilAspects { - t.Fatalf("%s: Expected 'aspects' to be map[string]interface{}, but got %T", tc.name, actualAspectsRaw) - } - - if !reflect.DeepEqual(actualAspects, tc.expectedAspects) { - t.Errorf("%s: TransformAspects() result mismatch:\ngot: %#v\nwant: %#v", tc.name, actualAspects, tc.expectedAspects) - } - } - }() - - dataplex.TransformAspects(objCopy) - }) - } -} - -func deepCopyMap(original map[string]interface{}) map[string]interface{} { - if original == nil { - return nil - } - copyMap := make(map[string]interface{}, len(original)) - for key, value := range original { - copyMap[key] = deepCopyValue(value) - } - return copyMap -} - -func deepCopySlice(original []interface{}) []interface{} { - if original == nil { - return nil - } - copySlice := make([]interface{}, len(original)) - for i, value := range original { - copySlice[i] = deepCopyValue(value) - } - return copySlice -} - -func deepCopyValue(value interface{}) interface{} { - if value == nil { - return nil - } - switch v := value.(type) { - case map[string]interface{}: - return deepCopyMap(v) - case []interface{}: - return deepCopySlice(v) - default: - return v - } -} - -func TestAccDataplexEntry_dataplexEntryUpdate(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "project_number": envvar.GetTestProjectNumberFromEnv(), - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckDataplexEntryDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccDataplexEntry_dataplexEntryFullUpdatePreapre(context), - }, - { - ResourceName: "google_dataplex_entry.test_entry_full", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"aspects", "entry_group_id", "entry_id", "location"}, - }, - { - Config: testAccDataplexEntry_dataplexEntryUpdate(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_dataplex_entry.test_entry_full", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_dataplex_entry.test_entry_full", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"aspects", "entry_group_id", "entry_id", "location"}, - }, - }, - }) -} - -func testAccDataplexEntry_dataplexEntryFullUpdatePreapre(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_dataplex_aspect_type" "tf-test-aspect-type-full%{random_suffix}-one" { - aspect_type_id = "tf-test-aspect-type-full%{random_suffix}-one" - location = "us-central1" - project = "%{project_number}" - - metadata_template = < Date: Fri, 9 May 2025 15:50:54 -0700 Subject: [PATCH 094/884] Fixed TestAccComputeDisk_resourceManagerTags (#13900) --- .../services/compute/resource_compute_disk_test.go.tmpl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_disk_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_disk_test.go.tmpl index d67dfa0bd1f2..9fb63e9d07ef 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_disk_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_disk_test.go.tmpl @@ -2193,12 +2193,12 @@ func testAccComputeDisk_resourceManagerTags(context map[string]interface{}) stri return acctest.Nprintf(` resource "google_tags_tag_key" "tag_key" { parent = "projects/%{project_id}" - short_name = "test" + short_name = "test-%{random_suffix}" } resource "google_tags_tag_value" "tag_value" { parent = "tagKeys/${google_tags_tag_key.tag_key.name}" - short_name = "name" + short_name = "name-%{random_suffix}" } resource "google_compute_disk" "foobar" { From 847b20d78a70c14d7e9fc4e4ac4a805bc56c79a7 Mon Sep 17 00:00:00 2001 From: shantstepanian <17996546+shantstepanian@users.noreply.github.com> Date: Fri, 9 May 2025 19:43:37 -0400 Subject: [PATCH 095/884] fix: bigtable - avoid instance recreation due to node_scaling_factor addition in 6.34.0 for existing clusters (#13902) --- .../terraform/services/bigtable/resource_bigtable_instance.go | 2 ++ .../bigtable/resource_bigtable_instance_internal_test.go | 4 ++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance.go index c890e2f3127b..1487ce288b7e 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance.go @@ -535,6 +535,8 @@ func flattenBigtableCluster(c *bigtable.ClusterInfo) map[string]interface{} { nodeScalingFactor = "NodeScalingFactor1X" case bigtable.NodeScalingFactor2X: nodeScalingFactor = "NodeScalingFactor2X" + default: + nodeScalingFactor = "NodeScalingFactor1X" } cluster := map[string]interface{}{ diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance_internal_test.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance_internal_test.go index b692d4aa11e8..8562a35c72d6 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance_internal_test.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance_internal_test.go @@ -167,8 +167,8 @@ func TestUnitBigtable_flattenBigtableCluster(t *testing.T) { "storage_target": 60, }, }, - // unspecified node scaling factor in input will lead to an empty string here - "node_scaling_factor": "", + // unspecified node scaling factor in input will default to 1X + "node_scaling_factor": "NodeScalingFactor1X", }, }, "HDD manual scaling": { From eb67488913ee7c8a8c61dfa1b25953a926dcfc4a Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Fri, 9 May 2025 16:48:45 -0700 Subject: [PATCH 096/884] update bootstrapped vpc network name for netapp tests (#13896) --- mmv1/products/netapp/Backup.yaml | 2 +- mmv1/products/netapp/StoragePool.yaml | 2 +- mmv1/products/netapp/Volume.yaml | 2 +- mmv1/products/netapp/VolumeQuotaRule.yaml | 2 +- mmv1/products/netapp/VolumeReplication.yaml | 2 +- mmv1/products/netapp/VolumeSnapshot.yaml | 2 +- .../services/netapp/resource_netapp_backup_test.go | 6 +++--- .../netapp/resource_netapp_storage_pool_test.go.tmpl | 10 +++++----- .../netapp/resource_netapp_volume_quotaRule_test.go | 2 +- .../netapp/resource_netapp_volume_replication_test.go | 2 +- .../netapp/resource_netapp_volume_snapshot_test.go | 2 +- .../services/netapp/resource_netapp_volume_test.go | 4 ++-- 12 files changed, 19 insertions(+), 19 deletions(-) diff --git a/mmv1/products/netapp/Backup.yaml b/mmv1/products/netapp/Backup.yaml index a3afa45c6c9d..27bf9982c85a 100644 --- a/mmv1/products/netapp/Backup.yaml +++ b/mmv1/products/netapp/Backup.yaml @@ -69,7 +69,7 @@ examples: backup_vault_name: 'backup-vault' backup_name: 'test-backup' test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' + 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' parameters: - name: 'location' type: String diff --git a/mmv1/products/netapp/StoragePool.yaml b/mmv1/products/netapp/StoragePool.yaml index 974cb1dab83b..a4065a8ef61d 100644 --- a/mmv1/products/netapp/StoragePool.yaml +++ b/mmv1/products/netapp/StoragePool.yaml @@ -76,7 +76,7 @@ examples: pool_name: 'test-pool' network_name: 'test-network' test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' + 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' exclude_docs: true - name: 'Storage_pool_create_doc' primary_resource_id: 'test_pool' diff --git a/mmv1/products/netapp/Volume.yaml b/mmv1/products/netapp/Volume.yaml index 87c09a8b5343..582f12299a0d 100644 --- a/mmv1/products/netapp/Volume.yaml +++ b/mmv1/products/netapp/Volume.yaml @@ -56,7 +56,7 @@ examples: pool_name: 'test-pool' network_name: 'test-network' test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' + 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' ignore_read_extra: - 'deletion_policy' virtual_fields: diff --git a/mmv1/products/netapp/VolumeQuotaRule.yaml b/mmv1/products/netapp/VolumeQuotaRule.yaml index 930ef3c0e7eb..e4f85eb9f9bb 100644 --- a/mmv1/products/netapp/VolumeQuotaRule.yaml +++ b/mmv1/products/netapp/VolumeQuotaRule.yaml @@ -52,7 +52,7 @@ examples: network_name: 'test-network' quota_rule_name: 'test-volume-quota-rule' test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' + 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' parameters: - name: 'location' type: String diff --git a/mmv1/products/netapp/VolumeReplication.yaml b/mmv1/products/netapp/VolumeReplication.yaml index 58ad3df736a7..71d2de89f3a4 100644 --- a/mmv1/products/netapp/VolumeReplication.yaml +++ b/mmv1/products/netapp/VolumeReplication.yaml @@ -71,7 +71,7 @@ examples: destination_volume: 'destination-volume' network_name: 'test-network' test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' + 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' ignore_read_extra: - 'delete_destination_volume' - 'replication_enabled' diff --git a/mmv1/products/netapp/VolumeSnapshot.yaml b/mmv1/products/netapp/VolumeSnapshot.yaml index 1180e3898898..455a3d3794f9 100644 --- a/mmv1/products/netapp/VolumeSnapshot.yaml +++ b/mmv1/products/netapp/VolumeSnapshot.yaml @@ -56,7 +56,7 @@ examples: network_name: 'test-network' snap_name: 'testvolumesnap' test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' + 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' parameters: - name: 'location' type: String diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_backup_test.go b/mmv1/third_party/terraform/services/netapp/resource_netapp_backup_test.go index 4baccdc19539..c3ae4eae0e02 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_backup_test.go +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_backup_test.go @@ -12,7 +12,7 @@ import ( func TestAccNetappBackup_NetappBackupFull_update(t *testing.T) { context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } @@ -163,7 +163,7 @@ resource "google_netapp_backup" "test_backup" { func TestAccNetappBackup_NetappFlexBackup(t *testing.T) { context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } @@ -248,7 +248,7 @@ resource "google_netapp_backup" "test_backup" { func TestAccNetappBackup_NetappIntegratedBackup(t *testing.T) { context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl b/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl index 617f33a67538..ca02e3288786 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl @@ -12,7 +12,7 @@ func TestAccNetappStoragePool_storagePoolCreateExample_update(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } @@ -97,7 +97,7 @@ resource "google_netapp_storage_pool" "test_pool" { func TestAccNetappStoragePool_autoTieredStoragePoolCreateExample_update(t *testing.T) { context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } @@ -149,7 +149,7 @@ resource "google_netapp_storage_pool" "test_pool" { func TestAccNetappStoragePool_FlexRegionalStoragePoolCreateExample_update(t *testing.T) { context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } @@ -272,7 +272,7 @@ data "google_compute_network" "default" { func TestAccNetappStoragePool_FlexRegionalStoragePoolNoZone(t *testing.T) { context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } @@ -321,7 +321,7 @@ data "google_compute_network" "default" { {{ if ne $.TargetVersionName `ga` -}} func TestAccNetappStoragePool_customPerformanceStoragePoolCreateExample_update(t *testing.T) { context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_quotaRule_test.go b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_quotaRule_test.go index 804dcb37b292..0dd9f6ffb1cb 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_quotaRule_test.go +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_quotaRule_test.go @@ -15,7 +15,7 @@ func TestAccNetappVolumeQuotaRule_netappVolumeQuotaRuleBasicExample_update(t *te t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_replication_test.go b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_replication_test.go index 0a111b829a9a..04bce53188d6 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_replication_test.go +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_replication_test.go @@ -15,7 +15,7 @@ func TestAccNetappVolumeReplication_NetappVolumeReplicationCreateExample_update( t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_snapshot_test.go b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_snapshot_test.go index f1ae861a60c7..ca277f1a4e39 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_snapshot_test.go +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_snapshot_test.go @@ -15,7 +15,7 @@ func TestAccNetappVolumeSnapshot_volumeSnapshotCreateExample_update(t *testing.T t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go index 6925ec048983..e324b75b4e6c 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go @@ -20,7 +20,7 @@ import ( func TestAccNetappVolume_NetappVolumeBasicExample_update(t *testing.T) { context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } @@ -661,7 +661,7 @@ func testAccNetappVolume_volumeBasicExample_cleanupScheduledBackup(t *testing.T, func TestAccNetappVolume_autoTieredNetappVolume_update(t *testing.T) { context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } From 86b24e7ab222b37b84d595c0cf25c9da603c5155 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Fri, 9 May 2025 17:29:12 -0700 Subject: [PATCH 097/884] Fixed TestAccSpannerDatabase_spannerDatabaseBasicExample importstateverify (#13906) --- mmv1/products/spanner/Database.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/mmv1/products/spanner/Database.yaml b/mmv1/products/spanner/Database.yaml index dfa838d702eb..a55393817cbb 100644 --- a/mmv1/products/spanner/Database.yaml +++ b/mmv1/products/spanner/Database.yaml @@ -68,6 +68,7 @@ examples: database_name: 'my-database' ignore_read_extra: - 'deletion_protection' + - 'default_time_zone' # Randomness due to spanner instance skip_vcr: true virtual_fields: From ca259bba97defdb488b3275201ed759c2cd7e58f Mon Sep 17 00:00:00 2001 From: Cameron Thornton Date: Mon, 12 May 2025 12:18:32 -0500 Subject: [PATCH 098/884] make datasource google_projects universe domain aware (#13899) --- .../services/resourcemanager/data_source_google_projects.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/services/resourcemanager/data_source_google_projects.go b/mmv1/third_party/terraform/services/resourcemanager/data_source_google_projects.go index 8dc84a628a65..5ba84edbf486 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/data_source_google_projects.go +++ b/mmv1/third_party/terraform/services/resourcemanager/data_source_google_projects.go @@ -75,7 +75,8 @@ func datasourceGoogleProjectsRead(d *schema.ResourceData, meta interface{}) erro for { params["filter"] = d.Get("filter").(string) - url := "https://cloudresourcemanager.googleapis.com/v1/projects" + domain := transport_tpg.GetUniverseDomainFromMeta(meta) + url := fmt.Sprintf("https://cloudresourcemanager.%s/v1/projects", domain) url, err := transport_tpg.AddQueryParams(url, params) if err != nil { From b4df11abf6c30dc4b37ab0e740da955cb1aecaf4 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Mon, 12 May 2025 10:33:31 -0700 Subject: [PATCH 099/884] Switched TestAccContainerCluster_withFlexStart to use engine versions datasource (#13904) --- .../container/resource_container_cluster_test.go.tmpl | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl index 6cc3f50082d1..7bd62946645c 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl @@ -7158,8 +7158,12 @@ resource "google_container_cluster" "max_run_duration" { {{ if ne $.TargetVersionName `ga` -}} func testAccContainerCluster_withFlexStart(clusterName, npName, networkName, subnetworkName string) string { return fmt.Sprintf(` +data "google_container_engine_versions" "uscentral1a" { + location = "us-central1-a" +} + resource "google_container_cluster" "flex_start" { - min_master_version = "1.32.3-gke.1717000" + min_master_version = data.google_container_engine_versions.uscentral1a.release_channel_latest_version["RAPID"] name = "%s" location = "us-central1-a" From a71b75e7f01cd86928ade3fa35c9116add6e6cdd Mon Sep 17 00:00:00 2001 From: Lingkai Shen Date: Mon, 12 May 2025 13:40:02 -0400 Subject: [PATCH 100/884] Firebase App Hosting Domain, DefaultDomain and Traffic resources (#13828) --- .../firebaseapphosting/DefaultDomain.yaml | 122 +++++ mmv1/products/firebaseapphosting/Domain.yaml | 421 ++++++++++++++++++ mmv1/products/firebaseapphosting/Traffic.yaml | 200 +++++++++ ...pp_hosting_default_domain_disabled.tf.tmpl | 30 ++ ...se_app_hosting_default_domain_full.tf.tmpl | 30 ++ ...app_hosting_default_domain_minimal.tf.tmpl | 28 ++ .../firebase_app_hosting_domain_full.tf.tmpl | 35 ++ ...irebase_app_hosting_domain_minimal.tf.tmpl | 28 ++ ...app_hosting_traffic_rollout_policy.tf.tmpl | 50 +++ ...ng_traffic_rollout_policy_disabled.tf.tmpl | 51 +++ ...irebase_app_hosting_traffic_target.tf.tmpl | 66 +++ 11 files changed, 1061 insertions(+) create mode 100644 mmv1/products/firebaseapphosting/DefaultDomain.yaml create mode 100644 mmv1/products/firebaseapphosting/Domain.yaml create mode 100644 mmv1/products/firebaseapphosting/Traffic.yaml create mode 100644 mmv1/templates/terraform/examples/firebase_app_hosting_default_domain_disabled.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/firebase_app_hosting_default_domain_full.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/firebase_app_hosting_default_domain_minimal.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/firebase_app_hosting_domain_full.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/firebase_app_hosting_domain_minimal.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/firebase_app_hosting_traffic_rollout_policy.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/firebase_app_hosting_traffic_rollout_policy_disabled.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/firebase_app_hosting_traffic_target.tf.tmpl diff --git a/mmv1/products/firebaseapphosting/DefaultDomain.yaml b/mmv1/products/firebaseapphosting/DefaultDomain.yaml new file mode 100644 index 000000000000..845823dcf53e --- /dev/null +++ b/mmv1/products/firebaseapphosting/DefaultDomain.yaml @@ -0,0 +1,122 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: DefaultDomain +description: A domain name that is associated with a backend. +base_url: projects/{{project}}/locations/{{location}}/backends/{{backend}}/domains +update_mask: true +self_link: projects/{{project}}/locations/{{location}}/backends/{{backend}}/domains/{{domain_id}} +create_url: projects/{{project}}/locations/{{location}}/backends/{{backend}}/domains/{{domain_id}}?update_mask=disabled +create_verb: PATCH +update_verb: PATCH +id_format: projects/{{project}}/locations/{{location}}/backends/{{backend}}/domains/{{domain_id}} +# The default domain can't be deleted, only disabled +exclude_delete: true +exclude_sweeper: true +import_format: + - projects/{{project}}/locations/{{location}}/backends/{{backend}}/domains/{{domain_id}} + - "{{project}}/{{location}}/{{backend}}/{{domain_id}}" + - "{{location}}/{{backend}}/{{domain_id}}" +examples: + - name: firebase_app_hosting_default_domain_minimal + primary_resource_id: example + vars: + backend_id: 'dd-mini' + test_env_vars: + project_id: 'PROJECT_NAME' + test_vars_overrides: + # prevent tests from colliding with each other + service_act_id: '"tf-test-dd-mi"' + - name: firebase_app_hosting_default_domain_full + primary_resource_id: example + vars: + backend_id: 'dd-full' + test_env_vars: + project_id: 'PROJECT_NAME' + test_vars_overrides: + # prevent tests from colliding with each other + service_act_id: '"tf-test-dd-full"' + - name: firebase_app_hosting_default_domain_disabled + primary_resource_id: example + vars: + backend_id: 'dd-disabled' + test_env_vars: + project_id: 'PROJECT_NAME' + test_vars_overrides: + # prevent tests from colliding with each other + service_act_id: '"tf-test-dd-disabled"' +autogen_async: true +async: + operation: + timeouts: + insert_minutes: 20 + update_minutes: 20 + base_url: "{{op_id}}" + actions: + - create + - update + type: OpAsync + result: + resource_inside_response: true + include_project: false +autogen_status: RG9tYWlu +parameters: + - name: location + type: String + description: The location of the Backend that this Domain is associated with + immutable: true + url_param_only: true + required: true + - name: backend + type: String + description: The ID of the Backend that this Domain is associated with + immutable: true + url_param_only: true + required: true + - name: domainId + type: String + description: |- + Id of the domain. For default domain, it should be {{backend}}--{{project_id}}.{{location}}.hosted.app + immutable: true + url_param_only: true + required: true +properties: + - name: disabled + type: Boolean + description: Whether the domain is disabled. Defaults to false. + default_from_api: true + - name: name + type: String + description: |- + Identifier. The resource name of the domain, e.g. + `projects/{project}/locations/{locationId}/backends/{backendId}/domains/{domainId}` + output: true + - name: uid + type: String + description: System-assigned, unique identifier. + output: true + - name: etag + type: String + description: |- + Server-computed checksum based on other values; may be sent + on update or delete to ensure operation is done on expected resource. + output: true + - name: updateTime + type: String + description: Time at which the domain was last updated. + output: true + - name: createTime + type: String + description: Time at which the domain was created. + output: true diff --git a/mmv1/products/firebaseapphosting/Domain.yaml b/mmv1/products/firebaseapphosting/Domain.yaml new file mode 100644 index 000000000000..d2907277f52c --- /dev/null +++ b/mmv1/products/firebaseapphosting/Domain.yaml @@ -0,0 +1,421 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: Domain +description: A domain name that is associated with a backend. +base_url: projects/{{project}}/locations/{{location}}/backends/{{backend}}/domains +update_mask: true +self_link: projects/{{project}}/locations/{{location}}/backends/{{backend}}/domains/{{domain_id}} +create_url: projects/{{project}}/locations/{{location}}/backends/{{backend}}/domains?domainId={{domain_id}} +update_verb: PATCH +id_format: projects/{{project}}/locations/{{location}}/backends/{{backend}}/domains/{{domain_id}} +import_format: + - projects/{{project}}/locations/{{location}}/backends/{{backend}}/domains/{{domain_id}} + - "{{project}}/{{location}}/{{backend}}/{{domain_id}}" + - "{{location}}/{{backend}}/{{domain_id}}" +autogen_async: true +async: + operation: + timeouts: + update_minutes: 20 + delete_minutes: 20 + base_url: "{{op_id}}" + actions: + - update + - delete + type: PollAsync + include_project: false + check_response_func_existence: 'transport_tpg.PollCheckForExistence' + check_response_func_absence: 'transport_tpg.PollCheckForAbsence' + # Errors are on the Domain resource itself + suppress_error: true +examples: + - name: firebase_app_hosting_domain_minimal + primary_resource_id: example + vars: + backend_id: 'domain-mini' + test_env_vars: + project_id: 'PROJECT_NAME' + test_vars_overrides: + domain_id: '"my-domain-m.com"' + # prevent tests from colliding with each other + service_act_id: '"tf-test-domain-m"' + - name: firebase_app_hosting_domain_full + primary_resource_id: example + vars: + backend_id: 'domain-full' + test_env_vars: + project_id: 'PROJECT_NAME' + test_vars_overrides: + domain_id: '"my-domain.com"' + # prevent tests from colliding with each other + service_act_id: '"tf-test-domain"' +autogen_status: RG9tYWlu +parameters: + - name: location + type: String + description: The location of the Backend that this Domain is associated with + immutable: true + url_param_only: true + required: true + - name: backend + type: String + description: The ID of the Backend that this Domain is associated with + immutable: true + url_param_only: true + required: true + - name: domainId + type: String + description: |- + Id of the domain to create. + Must be a valid domain name, such as "foo.com" + immutable: true + url_param_only: true + required: true +properties: + - name: customDomainStatus + type: NestedObject + description: The status of a custom domain's linkage to the Backend. + output: true + properties: + - name: certState + type: String + output: true + description: |- + Possible values: + CERT_PREPARING + CERT_VALIDATING + CERT_PROPAGATING + CERT_ACTIVE + CERT_EXPIRING_SOON + CERT_EXPIRED + - name: requiredDnsUpdates + type: Array + description: |- + Lists the records that must added or removed to a custom domain's DNS + in order to finish setup and start serving content. + Field is present during onboarding. Also present after onboarding if one + or more of the above states is not *_ACTIVE, indicating the domain's DNS + records are in a bad state. + output: true + item_type: + type: NestedObject + properties: + - name: domainName + type: String + description: The domain name the DNS updates pertain to. + output: true + - name: discovered + type: Array + description: The set of DNS records App Hosting discovered when inspecting a domain. + output: true + item_type: + type: NestedObject + properties: + - name: domainName + type: String + description: The domain name the record set pertains to. + output: true + - name: checkError + type: NestedObject + output: true + description: |- + The `Status` type defines a logical error model that is suitable for + different programming environments, including REST APIs and RPC APIs. It is + used by [gRPC](https://github.com/grpc). Each `Status` message contains + three pieces of data: error code, error message, and error details. + + You can find out more about this error model and how to work with it in the + [API Design Guide](https://cloud.google.com/apis/design/errors). + properties: + - name: code + type: Integer + description: The status code, which should be an enum value of google.rpc.Code. + output: true + - name: message + type: String + output: true + description: |- + A developer-facing error message, which should be in English. Any + user-facing error message should be localized and sent in the + google.rpc.Status.details field, or localized by the client. + - name: details + type: String + output: true + description: | + A list of messages that carry the error details. + custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' + custom_expand: 'templates/terraform/custom_expand/json_schema.tmpl' + validation: + function: 'validation.StringIsJSON' + - name: records + type: Array + description: Records on the domain. + output: true + item_type: + type: NestedObject + properties: + - name: domainName + type: String + description: The domain the record pertains to, e.g. `foo.bar.com.`. + output: true + - name: type + type: String + description: |- + The record's type, which determines what data the record contains. + Possible values: + A + CNAME + TXT + AAAA + CAA + output: true + - name: rdata + type: String + description: |- + The data of the record. The meaning of the value depends on record type: + - A and AAAA: IP addresses for the domain. + - CNAME: Another domain to check for records. + - TXT: Arbitrary text strings associated with the domain. App Hosting + uses TXT records to determine which Firebase projects have + permission to act on the domain's behalf. + - CAA: The record's flags, tag, and value, e.g. `0 issue "pki.goog"`. + output: true + - name: requiredAction + type: String + description: |- + An enum that indicates the a required action for this record. Populated + when the record is part of a required change in a `DnsUpdates` + `discovered` or `desired` record set. + Possible values: + NONE + ADD + REMOVE + output: true + - name: relevantState + type: Array + description: |- + An enum that indicates which state(s) this DNS record applies to. Populated + for all records with an `ADD` or `REMOVE` required action. + output: true + item_type: + type: String + - name: desired + type: Array + description: |- + The set of DNS records App Hosting needs in order to be able to serve + secure content on the domain. + output: true + item_type: + type: NestedObject + properties: + - name: domainName + type: String + description: The domain name the record set pertains to. + output: true + - name: checkError + type: NestedObject + output: true + description: |- + The `Status` type defines a logical error model that is suitable for + different programming environments, including REST APIs and RPC APIs. It is + used by [gRPC](https://github.com/grpc). Each `Status` message contains + three pieces of data: error code, error message, and error details. + + You can find out more about this error model and how to work with it in the + [API Design Guide](https://cloud.google.com/apis/design/errors). + properties: + - name: code + type: Integer + output: true + description: The status code, which should be an enum value of google.rpc.Code. + - name: message + type: String + output: true + description: |- + A developer-facing error message, which should be in English. Any + user-facing error message should be localized and sent in the + google.rpc.Status.details field, or localized by the client. + - name: details + type: String + output: true + description: | + A list of messages that carry the error details. + custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' + custom_expand: 'templates/terraform/custom_expand/json_schema.tmpl' + validation: + function: 'validation.StringIsJSON' + - name: records + type: Array + description: Records on the domain. + output: true + item_type: + type: NestedObject + properties: + - name: requiredAction + type: String + description: |- + An enum that indicates the a required action for this record. Populated + when the record is part of a required change in a `DnsUpdates` + `discovered` or `desired` record set. + Possible values: + NONE + ADD + REMOVE + output: true + - name: relevantState + type: Array + description: |- + An enum that indicates which state(s) this DNS record applies to. Populated + for all records with an `ADD` or `REMOVE` required action. + output: true + item_type: + type: String + - name: domainName + type: String + description: The domain the record pertains to, e.g. `foo.bar.com.`. + output: true + - name: type + type: String + description: |- + The record's type, which determines what data the record contains. + Possible values: + A + CNAME + TXT + AAAA + CAA + output: true + - name: rdata + type: String + description: |- + The data of the record. The meaning of the value depends on record type: + - A and AAAA: IP addresses for the domain. + - CNAME: Another domain to check for records. + - TXT: Arbitrary text strings associated with the domain. App Hosting + uses TXT records to determine which Firebase projects have + permission to act on the domain's behalf. + - CAA: The record's flags, tag, and value, e.g. `0 issue "pki.goog"`. + output: true + - name: checkTime + type: String + description: The last time App Hosting checked your custom domain's DNS records. + output: true + - name: issues + type: Array + description: |- + A list of issues with domain configuration. Allows users to self-correct + problems with DNS records. + output: true + item_type: + type: NestedObject + properties: + - name: code + type: Integer + output: true + description: The status code, which should be an enum value of google.rpc.Code. + - name: message + type: String + output: true + description: |- + A developer-facing error message, which should be in English. Any + user-facing error message should be localized and sent in the + google.rpc.Status.details field, or localized by the client. + - name: details + type: String + output: true + description: | + A list of messages that carry the error details. + custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' + custom_expand: 'templates/terraform/custom_expand/json_schema.tmpl' + validation: + function: 'validation.StringIsJSON' + - name: hostState + type: String + output: true + description: |- + Possible values: + HOST_UNHOSTED + HOST_UNREACHABLE + HOST_NON_FAH + HOST_CONFLICT + HOST_WRONG_SHARD + HOST_ACTIVE + - name: ownershipState + type: String + output: true + description: |- + Possible values: + OWNERSHIP_MISSING + OWNERSHIP_UNREACHABLE + OWNERSHIP_MISMATCH + OWNERSHIP_CONFLICT + OWNERSHIP_PENDING + OWNERSHIP_ACTIVE + - name: name + type: String + description: |- + Identifier. The resource name of the domain, e.g. + `projects/{project}/locations/{locationId}/backends/{backendId}/domains/{domainId}` + output: true + - name: uid + type: String + description: System-assigned, unique identifier. + output: true + - name: etag + type: String + description: |- + Server-computed checksum based on other values; may be sent + on update or delete to ensure operation is done on expected resource. + output: true + - name: serve + type: NestedObject + description: |- + The serving behavior of the domain. If specified, the domain will + serve content other than its Backend's live content. + properties: + - name: redirect + type: NestedObject + description: Specifies redirect behavior for a domain. + properties: + - name: uri + type: String + description: |- + The URI of the redirect's intended destination. This URI will be + prepended to the original request path. URI without a scheme are + assumed to be HTTPS. + required: true + - name: status + type: String + description: |- + The status code to use in a redirect response. Must be a valid HTTP 3XX + status code. Defaults to 302 if not present. + - name: updateTime + type: String + description: Time at which the domain was last updated. + output: true + - name: purgeTime + type: String + description: |- + Time at which a soft-deleted domain will be purged, rendering in + permanently deleted. + output: true + - name: deleteTime + type: String + description: Time at which the domain was deleted. + output: true + - name: createTime + type: String + description: Time at which the domain was created. + output: true diff --git a/mmv1/products/firebaseapphosting/Traffic.yaml b/mmv1/products/firebaseapphosting/Traffic.yaml new file mode 100644 index 000000000000..958fa0e659bb --- /dev/null +++ b/mmv1/products/firebaseapphosting/Traffic.yaml @@ -0,0 +1,200 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: Traffic +description: Controls traffic configuration for a backend. +base_url: projects/{{project}}/locations/{{location}}/backends/{{backend}} +update_mask: true +self_link: projects/{{project}}/locations/{{location}}/backends/{{backend}}/traffic +create_url: projects/{{project}}/locations/{{location}}/backends/{{backend}}/traffic?update_mask=* +create_verb: PATCH +update_verb: PATCH +exclude_delete: true # Traffic config cannot be deleted +exclude_sweeper: true +id_format: projects/{{project}}/locations/{{location}}/backends/{{backend}}/traffic +import_format: + - "projects/{{project}}/locations/{{location}}/backends/{{backend}}/traffic" + - "{{project}}/{{location}}/{{backend}}" + - "{{location}}/{{backend}}" +examples: + - name: firebase_app_hosting_traffic_target + primary_resource_id: example + vars: + backend_id: "traffic-tg" + build_id: "target-build" + service_act_id: "firebase-app-hosting-compute" + test_env_vars: + project_id: "PROJECT_NAME" + test_vars_overrides: + # prevent tests from colliding with each other + service_act_id: '"tf-test-traffic-tg"' + - name: firebase_app_hosting_traffic_rollout_policy + primary_resource_id: example + vars: + backend_id: "traffic-rp" + service_act_id: "firebase-app-hosting-compute" + branch: "main" + test_env_vars: + project_id: "PROJECT_NAME" + test_vars_overrides: + # prevent tests from colliding with each other + service_act_id: '"tf-test-traffic-rp"' + - name: firebase_app_hosting_traffic_rollout_policy_disabled + primary_resource_id: example + vars: + backend_id: "traffic-rpd" + service_act_id: "firebase-app-hosting-compute" + branch: "main" + test_env_vars: + project_id: "PROJECT_NAME" + test_vars_overrides: + # prevent tests from colliding with each other + service_act_id: '"tf-test-traffic-rpd"' +autogen_async: true +async: + operation: + timeouts: + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 + base_url: "{{op_id}}" + actions: + - create + - delete + - update + type: OpAsync + result: + resource_inside_response: true + include_project: false +parameters: + - name: location + type: String + description: The location the Backend that this Traffic config applies to + immutable: true + url_param_only: true + required: true + - name: backend + type: String + description: Id of the backend that this Traffic config applies to + immutable: true + url_param_only: true + required: true +properties: + - name: etag + type: Fingerprint + description: |- + Server-computed checksum based on other values; may be sent + on update or delete to ensure operation is done on expected resource. + output: true + - name: uid + type: String + description: System-assigned, unique identifier. + output: true + - name: createTime + type: Time + description: Time at which the backend was created. + output: true + - name: updateTime + type: Time + description: Time at which the backend was last updated. + output: true + - name: deleteTime + type: Time + description: Time at which the backend was deleted. + output: true + - name: name + type: String + description: |- + Identifier. The resource name of the backend traffic config + + Format: + + `projects/{project}/locations/{locationId}/backends/{backendId}/traffic`. + output: true + - name: current + type: NestedObject + output: true + description: |- + Current state of traffic allocation for the backend. + When setting `target`, this field may differ for some time until the desired state is reached. + properties: + - name: splits + type: Array + description: A list of traffic splits that together represent where traffic is being routed. + output: true + item_type: + type: NestedObject + description: The traffic allocation for the backend. + properties: + - name: build + type: String + output: true + description: |- + The build that traffic is being routed to. + - name: percent + type: Integer + output: true + description: |- + The percentage of traffic to send to the build. Currently must be 100 or 0. + - name: target + type: NestedObject + description: |- + Set to manually control the desired traffic for the backend. This will + cause current to eventually match this value. The percentages must add + up to 100. + exactly_one_of: + - rolloutPolicy + - target + properties: + - name: splits + type: Array + description: A list of traffic splits that together represent where traffic is being routed. + required: true + item_type: + type: NestedObject + description: The traffic allocation for the backend. + properties: + - name: build + type: String + required: true + description: |- + The build that traffic is being routed to. + - name: percent + type: Integer + required: true + description: |- + The percentage of traffic to send to the build. Currently must be 100 or 0. + - name: rolloutPolicy + type: NestedObject + description: |- + The policy for how builds and rollouts are triggered and rolled out. + exactly_one_of: + - rolloutPolicy + - target + properties: + - name: disabled + type: Boolean + description: |- + A flag that, if true, prevents rollouts from being created via this RolloutPolicy. + default_value: false + - name: disabledTime + type: Time + output: true + description: |- + If disabled is set, the time at which the rollouts were disabled. + - name: codebaseBranch + type: String + description: |- + Specifies a branch that triggers a new build to be started with this + policy. If not set, no automatic rollouts will happen. diff --git a/mmv1/templates/terraform/examples/firebase_app_hosting_default_domain_disabled.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_hosting_default_domain_disabled.tf.tmpl new file mode 100644 index 000000000000..d2385d599188 --- /dev/null +++ b/mmv1/templates/terraform/examples/firebase_app_hosting_default_domain_disabled.tf.tmpl @@ -0,0 +1,30 @@ +resource "google_firebase_app_hosting_default_domain" "example" { + project = google_firebase_app_hosting_backend.example.project + location = google_firebase_app_hosting_backend.example.location + backend = google_firebase_app_hosting_backend.example.backend_id + domain_id = google_firebase_app_hosting_backend.example.uri + + disabled = true +} + +resource "google_firebase_app_hosting_backend" "example" { + project = "{{index $.TestEnvVars "project_id"}}" + + # Choose the region closest to your users + location = "us-central1" + backend_id = "{{index $.Vars "backend_id"}}" + app_id = "1:0000000000:web:674cde32020e16fbce9dbd" + serving_locality = "GLOBAL_ACCESS" + service_account = google_service_account.service_account.email +} + +resource "google_service_account" "service_account" { + project = "{{index $.TestEnvVars "project_id"}}" + + # Must be firebase-app-hosting-compute + account_id = "{{index $.Vars "service_act_id"}}" + display_name = "Firebase App Hosting compute service account" + + # Do not throw if already exists + create_ignore_already_exists = true +} diff --git a/mmv1/templates/terraform/examples/firebase_app_hosting_default_domain_full.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_hosting_default_domain_full.tf.tmpl new file mode 100644 index 000000000000..e2cb17bd2fa6 --- /dev/null +++ b/mmv1/templates/terraform/examples/firebase_app_hosting_default_domain_full.tf.tmpl @@ -0,0 +1,30 @@ +resource "google_firebase_app_hosting_default_domain" "example" { + project = google_firebase_app_hosting_backend.example.project + location = google_firebase_app_hosting_backend.example.location + backend = google_firebase_app_hosting_backend.example.backend_id + domain_id = google_firebase_app_hosting_backend.example.uri + + disabled = false +} + +resource "google_firebase_app_hosting_backend" "example" { + project = "{{index $.TestEnvVars "project_id"}}" + + # Choose the region closest to your users + location = "us-central1" + backend_id = "{{index $.Vars "backend_id"}}" + app_id = "1:0000000000:web:674cde32020e16fbce9dbd" + serving_locality = "GLOBAL_ACCESS" + service_account = google_service_account.service_account.email +} + +resource "google_service_account" "service_account" { + project = "{{index $.TestEnvVars "project_id"}}" + + # Must be firebase-app-hosting-compute + account_id = "{{index $.Vars "service_act_id"}}" + display_name = "Firebase App Hosting compute service account" + + # Do not throw if already exists + create_ignore_already_exists = true +} diff --git a/mmv1/templates/terraform/examples/firebase_app_hosting_default_domain_minimal.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_hosting_default_domain_minimal.tf.tmpl new file mode 100644 index 000000000000..7574a112dfee --- /dev/null +++ b/mmv1/templates/terraform/examples/firebase_app_hosting_default_domain_minimal.tf.tmpl @@ -0,0 +1,28 @@ +resource "google_firebase_app_hosting_default_domain" "example" { + project = google_firebase_app_hosting_backend.example.project + location = google_firebase_app_hosting_backend.example.location + backend = google_firebase_app_hosting_backend.example.backend_id + domain_id = google_firebase_app_hosting_backend.example.uri +} + +resource "google_firebase_app_hosting_backend" "example" { + project = "{{index $.TestEnvVars "project_id"}}" + + # Choose the region closest to your users + location = "us-central1" + backend_id = "{{index $.Vars "backend_id"}}" + app_id = "1:0000000000:web:674cde32020e16fbce9dbd" + serving_locality = "GLOBAL_ACCESS" + service_account = google_service_account.service_account.email +} + +resource "google_service_account" "service_account" { + project = "{{index $.TestEnvVars "project_id"}}" + + # Must be firebase-app-hosting-compute + account_id = "{{index $.Vars "service_act_id"}}" + display_name = "Firebase App Hosting compute service account" + + # Do not throw if already exists + create_ignore_already_exists = true +} diff --git a/mmv1/templates/terraform/examples/firebase_app_hosting_domain_full.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_hosting_domain_full.tf.tmpl new file mode 100644 index 000000000000..e36a28bf1be1 --- /dev/null +++ b/mmv1/templates/terraform/examples/firebase_app_hosting_domain_full.tf.tmpl @@ -0,0 +1,35 @@ +resource "google_firebase_app_hosting_domain" "example" { + project = google_firebase_app_hosting_backend.example.project + location = google_firebase_app_hosting_backend.example.location + backend = google_firebase_app_hosting_backend.example.backend_id + domain_id = "{{index $.Vars "domain_id"}}" + + serve { + redirect { + uri = "google.com" + status = "302" + } + } +} + +resource "google_firebase_app_hosting_backend" "example" { + project = "{{index $.TestEnvVars "project_id"}}" + + # Choose the region closest to your users + location = "us-central1" + backend_id = "{{index $.Vars "backend_id"}}" + app_id = "1:0000000000:web:674cde32020e16fbce9dbd" + serving_locality = "GLOBAL_ACCESS" + service_account = google_service_account.service_account.email +} + +resource "google_service_account" "service_account" { + project = "{{index $.TestEnvVars "project_id"}}" + + # Must be firebase-app-hosting-compute + account_id = "{{index $.Vars "service_act_id"}}" + display_name = "Firebase App Hosting compute service account" + + # Do not throw if already exists + create_ignore_already_exists = true +} diff --git a/mmv1/templates/terraform/examples/firebase_app_hosting_domain_minimal.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_hosting_domain_minimal.tf.tmpl new file mode 100644 index 000000000000..3b2f7cbef3e6 --- /dev/null +++ b/mmv1/templates/terraform/examples/firebase_app_hosting_domain_minimal.tf.tmpl @@ -0,0 +1,28 @@ +resource "google_firebase_app_hosting_domain" "example" { + project = google_firebase_app_hosting_backend.example.project + location = google_firebase_app_hosting_backend.example.location + backend = google_firebase_app_hosting_backend.example.backend_id + domain_id = "{{index $.Vars "domain_id"}}" +} + +resource "google_firebase_app_hosting_backend" "example" { + project = "{{index $.TestEnvVars "project_id"}}" + + # Choose the region closest to your users + location = "us-central1" + backend_id = "{{index $.Vars "backend_id"}}" + app_id = "1:0000000000:web:674cde32020e16fbce9dbd" + serving_locality = "GLOBAL_ACCESS" + service_account = google_service_account.service_account.email +} + +resource "google_service_account" "service_account" { + project = "{{index $.TestEnvVars "project_id"}}" + + # Must be firebase-app-hosting-compute + account_id = "{{index $.Vars "service_act_id"}}" + display_name = "Firebase App Hosting compute service account" + + # Do not throw if already exists + create_ignore_already_exists = true +} diff --git a/mmv1/templates/terraform/examples/firebase_app_hosting_traffic_rollout_policy.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_hosting_traffic_rollout_policy.tf.tmpl new file mode 100644 index 000000000000..e8048229281c --- /dev/null +++ b/mmv1/templates/terraform/examples/firebase_app_hosting_traffic_rollout_policy.tf.tmpl @@ -0,0 +1,50 @@ +resource "google_firebase_app_hosting_traffic" "example" { + project = google_firebase_app_hosting_backend.example.project + location = google_firebase_app_hosting_backend.example.location + backend = google_firebase_app_hosting_backend.example.backend_id + + rollout_policy { + codebase_branch = "{{index $.Vars "branch"}}" + } +} + +resource "google_firebase_app_hosting_backend" "example" { + project = "{{index $.TestEnvVars "project_id"}}" + # Choose the region closest to your users + + location = "asia-east1" + backend_id = "{{index $.Vars "backend_id"}}" + app_id = "1:0000000000:web:674cde32020e16fbce9dbd" + serving_locality = "GLOBAL_ACCESS" + service_account = google_service_account.service_account.email + + depends_on = [google_project_service.fah] +} + +### Include these blocks only once per project if you are starting from scratch ### +resource "google_service_account" "service_account" { + project = "{{index $.TestEnvVars "project_id"}}" + + # Must be firebase-app-hosting-compute + account_id = "{{index $.Vars "service_act_id"}}" + display_name = "Firebase App Hosting compute service account" + + # Do not throw if already exists + create_ignore_already_exists = true +} + +resource "google_project_iam_member" "app_hosting_sa_runner" { + project = "{{index $.TestEnvVars "project_id"}}" + + # For App Hosting + role = "roles/firebaseapphosting.computeRunner" + member = google_service_account.service_account.member +} + +resource "google_project_service" "fah" { + project = "{{index $.TestEnvVars "project_id"}}" + service = "firebaseapphosting.googleapis.com" + + disable_on_destroy = false +} +### diff --git a/mmv1/templates/terraform/examples/firebase_app_hosting_traffic_rollout_policy_disabled.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_hosting_traffic_rollout_policy_disabled.tf.tmpl new file mode 100644 index 000000000000..0c9c43112824 --- /dev/null +++ b/mmv1/templates/terraform/examples/firebase_app_hosting_traffic_rollout_policy_disabled.tf.tmpl @@ -0,0 +1,51 @@ +resource "google_firebase_app_hosting_traffic" "example" { + project = google_firebase_app_hosting_backend.example.project + location = google_firebase_app_hosting_backend.example.location + backend = google_firebase_app_hosting_backend.example.backend_id + + rollout_policy { + disabled = true + codebase_branch = "{{index $.Vars "branch"}}" + } +} + +resource "google_firebase_app_hosting_backend" "example" { + project = "{{index $.TestEnvVars "project_id"}}" + # Choose the region closest to your users + + location = "asia-east1" + backend_id = "{{index $.Vars "backend_id"}}" + app_id = "1:0000000000:web:674cde32020e16fbce9dbd" + serving_locality = "GLOBAL_ACCESS" + service_account = google_service_account.service_account.email + + depends_on = [google_project_service.fah] +} + +### Include these blocks only once per project if you are starting from scratch ### +resource "google_service_account" "service_account" { + project = "{{index $.TestEnvVars "project_id"}}" + + # Must be firebase-app-hosting-compute + account_id = "{{index $.Vars "service_act_id"}}" + display_name = "Firebase App Hosting compute service account" + + # Do not throw if already exists + create_ignore_already_exists = true +} + +resource "google_project_iam_member" "app_hosting_sa_runner" { + project = "{{index $.TestEnvVars "project_id"}}" + + # For App Hosting + role = "roles/firebaseapphosting.computeRunner" + member = google_service_account.service_account.member +} + +resource "google_project_service" "fah" { + project = "{{index $.TestEnvVars "project_id"}}" + service = "firebaseapphosting.googleapis.com" + + disable_on_destroy = false +} +### diff --git a/mmv1/templates/terraform/examples/firebase_app_hosting_traffic_target.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_hosting_traffic_target.tf.tmpl new file mode 100644 index 000000000000..98fa778f879d --- /dev/null +++ b/mmv1/templates/terraform/examples/firebase_app_hosting_traffic_target.tf.tmpl @@ -0,0 +1,66 @@ +resource "google_firebase_app_hosting_traffic" "example" { + project = google_firebase_app_hosting_backend.example.project + location = google_firebase_app_hosting_backend.example.location + backend = google_firebase_app_hosting_backend.example.backend_id + + target { + splits { + build = google_firebase_app_hosting_build.example.name + percent = 100 + } + } +} + +resource "google_firebase_app_hosting_build" "example" { + project = google_firebase_app_hosting_backend.example.project + location = google_firebase_app_hosting_backend.example.location + backend = google_firebase_app_hosting_backend.example.backend_id + build_id = "{{index $.Vars "build_id"}}" + + source { + container { + image = "us-docker.pkg.dev/cloudrun/container/hello" + } + } +} + +resource "google_firebase_app_hosting_backend" "example" { + project = "{{index $.TestEnvVars "project_id"}}" + # Choose the region closest to your users + + location = "asia-east1" + backend_id = "{{index $.Vars "backend_id"}}" + app_id = "1:0000000000:web:674cde32020e16fbce9dbd" + serving_locality = "GLOBAL_ACCESS" + service_account = google_service_account.service_account.email + + depends_on = [google_project_service.fah] +} + +### Include these blocks only once per project if you are starting from scratch ### +resource "google_service_account" "service_account" { + project = "{{index $.TestEnvVars "project_id"}}" + + # Must be firebase-app-hosting-compute + account_id = "{{index $.Vars "service_act_id"}}" + display_name = "Firebase App Hosting compute service account" + + # Do not throw if already exists + create_ignore_already_exists = true +} + +resource "google_project_iam_member" "app_hosting_sa_runner" { + project = "{{index $.TestEnvVars "project_id"}}" + + # For App Hosting + role = "roles/firebaseapphosting.computeRunner" + member = google_service_account.service_account.member +} + +resource "google_project_service" "fah" { + project = "{{index $.TestEnvVars "project_id"}}" + service = "firebaseapphosting.googleapis.com" + + disable_on_destroy = false +} +### From 52fbb35ad78711ba22edec3d6ac3963c77524f69 Mon Sep 17 00:00:00 2001 From: Ilia Lazebnik Date: Mon, 12 May 2025 16:53:51 -0400 Subject: [PATCH 101/884] container: allow updating storage_pools (#13657) Signed-off-by: drfaust92 --- .../services/container/node_config.go.tmpl | 1 - .../resource_container_cluster_test.go.tmpl | 33 +++++++++++++++++++ 2 files changed, 33 insertions(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/services/container/node_config.go.tmpl b/mmv1/third_party/terraform/services/container/node_config.go.tmpl index 858fcea02f2e..d4aa04bfc927 100644 --- a/mmv1/third_party/terraform/services/container/node_config.go.tmpl +++ b/mmv1/third_party/terraform/services/container/node_config.go.tmpl @@ -470,7 +470,6 @@ func schemaNodeConfig() *schema.Schema { "storage_pools": { Type: schema.TypeList, - ForceNew: true, Optional: true, Elem: &schema.Schema{Type: schema.TypeString}, Description: `The list of Storage Pools where boot disks are provisioned.`, diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl index 7bd62946645c..770b105d5633 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl @@ -13358,6 +13358,18 @@ func TestAccContainerCluster_storagePoolsWithNodeConfig(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"deletion_protection"}, }, + { + Config: testAccContainerCluster_storagePoolsWithNodeConfigUpdate(cluster, location, networkName, subnetworkName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.storage_pools_with_node_config", "node_config.0.storage_pools.#", "0"), + ), + }, + { + ResourceName: "google_container_cluster.storage_pools_with_node_config", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, }, }) } @@ -13384,6 +13396,27 @@ resource "google_container_cluster" "storage_pools_with_node_config" { `, cluster, location, storagePoolResourceName, networkName, subnetworkName) } +func testAccContainerCluster_storagePoolsWithNodeConfigUpdate(cluster, location, networkName, subnetworkName string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "storage_pools_with_node_config" { + name = "%s" + location = "%s" + + initial_node_count = 1 + node_config { + machine_type = "c3-standard-4" + image_type = "COS_CONTAINERD" + disk_type = "hyperdisk-balanced" + } + + network = "%s" + subnetwork = "%s" + + deletion_protection = false +} +`, cluster, location, networkName, subnetworkName) +} + func TestAccContainerCluster_withAutopilotGcpFilestoreCsiDriver(t *testing.T) { t.Parallel() From 814c65252321042cf0c1ae3b1cba4cb58a5f061b Mon Sep 17 00:00:00 2001 From: Betto Cerrillos <32439055+Berro321@users.noreply.github.com> Date: Mon, 12 May 2025 21:50:11 +0000 Subject: [PATCH 102/884] Deprecate `google_beyondcorp_application` (#13869) Co-authored-by: Riley Karson --- mmv1/products/beyondcorp/Application.yaml | 5 +++-- ...on_basic.tf.tmpl => beyondcorp_application_basic.tf.tmpl} | 0 ...cation_vpc.tf.tmpl => beyondcorp_application_vpc.tf.tmpl} | 0 3 files changed, 3 insertions(+), 2 deletions(-) rename mmv1/templates/terraform/examples/{beyondcorp_security_gateway_application_basic.tf.tmpl => beyondcorp_application_basic.tf.tmpl} (100%) rename mmv1/templates/terraform/examples/{beyondcorp_security_gateway_application_vpc.tf.tmpl => beyondcorp_application_vpc.tf.tmpl} (100%) diff --git a/mmv1/products/beyondcorp/Application.yaml b/mmv1/products/beyondcorp/Application.yaml index 891835f21fa6..a4d4e862bbaf 100644 --- a/mmv1/products/beyondcorp/Application.yaml +++ b/mmv1/products/beyondcorp/Application.yaml @@ -13,6 +13,7 @@ --- name: Application +deprecation_message: '`google_beyondcorp_application` is deprecated. Use `google_beyondcorp_security_gateway_application` instead.' description: Specifies application endpoint(s) to protect behind a Security Gateway. base_url: projects/{{project}}/locations/global/securityGateways/{{security_gateways_id}}/applications update_mask: true @@ -31,13 +32,13 @@ iam_policy: - 'projects/{{project}}/locations/global/securityGateways/{{security_gateways_id}}/applications/{{application_id}}' - '{{application_id}}' examples: - - name: beyondcorp_security_gateway_application_basic + - name: beyondcorp_application_basic primary_resource_id: example primary_resource_name: 'fmt.Sprintf("default%s", context["random_suffix"]), fmt.Sprintf("google%s", context["random_suffix"])' vars: security_gateway_name: default application_name: google - - name: beyondcorp_security_gateway_application_vpc + - name: beyondcorp_application_vpc primary_resource_id: example primary_resource_name: 'fmt.Sprintf("default%s", context["random_suffix"]), fmt.Sprintf("google%s", context["random_suffix"])' vars: diff --git a/mmv1/templates/terraform/examples/beyondcorp_security_gateway_application_basic.tf.tmpl b/mmv1/templates/terraform/examples/beyondcorp_application_basic.tf.tmpl similarity index 100% rename from mmv1/templates/terraform/examples/beyondcorp_security_gateway_application_basic.tf.tmpl rename to mmv1/templates/terraform/examples/beyondcorp_application_basic.tf.tmpl diff --git a/mmv1/templates/terraform/examples/beyondcorp_security_gateway_application_vpc.tf.tmpl b/mmv1/templates/terraform/examples/beyondcorp_application_vpc.tf.tmpl similarity index 100% rename from mmv1/templates/terraform/examples/beyondcorp_security_gateway_application_vpc.tf.tmpl rename to mmv1/templates/terraform/examples/beyondcorp_application_vpc.tf.tmpl From badc75b08771664c965b9cf1971322ca3d851875 Mon Sep 17 00:00:00 2001 From: Ron Gal <125445217+ron-gal@users.noreply.github.com> Date: Mon, 12 May 2025 17:56:09 -0400 Subject: [PATCH 103/884] feat(bigtable) Add support for deletion protection for Logical Views (#13441) --- mmv1/products/bigtable/LogicalView.yaml | 9 +++++++++ .../terraform/examples/bigtable_logical_view.tf.tmpl | 1 + .../bigtable/resource_bigtable_logical_view_test.go | 11 ++++++----- 3 files changed, 16 insertions(+), 5 deletions(-) diff --git a/mmv1/products/bigtable/LogicalView.yaml b/mmv1/products/bigtable/LogicalView.yaml index be4e6c7b1dbd..cef599560909 100644 --- a/mmv1/products/bigtable/LogicalView.yaml +++ b/mmv1/products/bigtable/LogicalView.yaml @@ -42,6 +42,11 @@ examples: instance_name: 'bt-instance' table_name: 'bt-table' logical_view_name: 'bt-logical-view' + deletion_protection: 'true' + test_vars_overrides: + 'deletion_protection': 'false' + oics_vars_overrides: + 'deletion_protection': 'false' # bigtable instance does not use the shared HTTP client, this test creates an instance skip_vcr: true parameters: @@ -71,3 +76,7 @@ properties: description: 'The logical view''s select query.' required: true + - name: 'deletionProtection' + type: Boolean + description: + 'Set to true to make the logical view protected against deletion.' diff --git a/mmv1/templates/terraform/examples/bigtable_logical_view.tf.tmpl b/mmv1/templates/terraform/examples/bigtable_logical_view.tf.tmpl index 917aaccc2dea..28f596124d1e 100644 --- a/mmv1/templates/terraform/examples/bigtable_logical_view.tf.tmpl +++ b/mmv1/templates/terraform/examples/bigtable_logical_view.tf.tmpl @@ -22,6 +22,7 @@ resource "google_bigtable_table" "table" { resource "google_bigtable_logical_view" "{{$.PrimaryResourceId}}" { logical_view_id = "{{index $.Vars "logical_view_name"}}" instance = google_bigtable_instance.instance.name + deletion_protection = false query = < Date: Tue, 13 May 2025 04:52:25 +0530 Subject: [PATCH 104/884] Support retries when the API returns resourceNotReady for Networks (#13881) --- .../transport/error_retry_predicates.go | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/mmv1/third_party/terraform/transport/error_retry_predicates.go b/mmv1/third_party/terraform/transport/error_retry_predicates.go index 9969b71645ce..801d63214fdd 100644 --- a/mmv1/third_party/terraform/transport/error_retry_predicates.go +++ b/mmv1/third_party/terraform/transport/error_retry_predicates.go @@ -49,6 +49,12 @@ var defaultErrorRetryPredicates = []RetryErrorPredicateFunc{ // GCE returns the wrong error code, as this should be a 429, which we retry // already. is403QuotaExceededPerMinuteError, + + // GCE Networks are considered unready for a brief period when certain + // operations are performed on them, and the scope is likely too broad to + // apply a mutex. If we attempt an operation w/ an unready network, retry + // it. + isNetworkUnreadyError, } /** END GLOBAL ERROR RETRY PREDICATES HERE **/ @@ -143,6 +149,19 @@ func isSubnetworkUnreadyError(err error) (bool, string) { return false, "" } +func isNetworkUnreadyError(err error) (bool, string) { + gerr, ok := err.(*googleapi.Error) + if !ok { + return false, "" + } + + if gerr.Code == 400 && strings.Contains(gerr.Body, "resourceNotReady") && strings.Contains(gerr.Body, "networks") { + log.Printf("[DEBUG] Dismissed an error as retryable based on error code 400 and error reason 'resourceNotReady' w/ 'networks': %s", err) + return true, "Network not ready" + } + return false, "" +} + // GCE (and possibly other APIs) incorrectly return a 403 rather than a 429 on // rate limits. func is403QuotaExceededPerMinuteError(err error) (bool, string) { From 89244ff574a03f7b8eea84483a9fa3a475bb2724 Mon Sep 17 00:00:00 2001 From: ArtoriaRen Date: Mon, 12 May 2025 19:30:09 -0400 Subject: [PATCH 105/884] Add `GenAppBuilderSettings` to Dialogflow CX `Agent` resource (#13898) --- mmv1/products/dialogflowcx/Agent.yaml | 11 +++++++++++ .../examples/dialogflowcx_agent_full.tf.tmpl | 3 +++ 2 files changed, 14 insertions(+) diff --git a/mmv1/products/dialogflowcx/Agent.yaml b/mmv1/products/dialogflowcx/Agent.yaml index ef9fe6f7e087..cfdf2c37ba39 100644 --- a/mmv1/products/dialogflowcx/Agent.yaml +++ b/mmv1/products/dialogflowcx/Agent.yaml @@ -267,3 +267,14 @@ properties: custom_expand: 'templates/terraform/custom_expand/json_schema.tmpl' validation: function: 'validation.StringIsJSON' + - name: 'genAppBuilderSettings' + type: NestedObject + description: | + Gen App Builder-related agent-level settings. + properties: + - name: 'engine' + type: String + required: true + description: | + The full name of the Gen App Builder engine related to this agent if there is one. + Format: projects/{Project ID}/locations/{Location ID}/collections/{Collection ID}/engines/{Engine ID} diff --git a/mmv1/templates/terraform/examples/dialogflowcx_agent_full.tf.tmpl b/mmv1/templates/terraform/examples/dialogflowcx_agent_full.tf.tmpl index 4e76cd8876e5..a70263220f7f 100644 --- a/mmv1/templates/terraform/examples/dialogflowcx_agent_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/dialogflowcx_agent_full.tf.tmpl @@ -65,4 +65,7 @@ resource "google_dialogflow_cx_agent" "{{$.PrimaryResourceId}}" { } }) } + gen_app_builder_settings { + engine = "projects/-/locations/-/collections/-/engines/-" + } } \ No newline at end of file From 55eabdfeb63edf162982c7b85d7ebbdc3d591dba Mon Sep 17 00:00:00 2001 From: Ilia Lazebnik Date: Mon, 12 May 2025 21:42:57 -0400 Subject: [PATCH 106/884] container: added in_transit_encryption_config (#13909) Signed-off-by: drfaust92 --- .../resource_container_cluster.go.tmpl | 44 +++++++++++++++ .../resource_container_cluster_test.go.tmpl | 54 +++++++++++++++++++ .../docs/r/container_cluster.html.markdown | 3 ++ 3 files changed, 101 insertions(+) diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl index ae5b636411eb..b7eedd22d084 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl @@ -2431,6 +2431,12 @@ func ResourceContainerCluster() *schema.Resource { }, }, }, + "in_transit_encryption_config": { + Type: schema.TypeString, + Optional: true, + Description: `Defines the config of in-transit encryption`, + ValidateFunc: validation.StringInSlice([]string{"IN_TRANSIT_ENCRYPTION_CONFIG_UNSPECIFIED", "IN_TRANSIT_ENCRYPTION_DISABLED", "IN_TRANSIT_ENCRYPTION_INTER_NODE_TRANSPARENT"}, false), + }, }, } } @@ -2589,6 +2595,7 @@ func resourceContainerClusterCreate(d *schema.ResourceData, meta interface{}) er DatapathProvider: d.Get("datapath_provider").(string), EnableCiliumClusterwideNetworkPolicy: d.Get("enable_cilium_clusterwide_network_policy").(bool), PrivateIpv6GoogleAccess: d.Get("private_ipv6_google_access").(string), + InTransitEncryptionConfig: d.Get("in_transit_encryption_config").(string), EnableL4ilbSubsetting: d.Get("enable_l4_ilb_subsetting").(bool), DisableL4LbFirewallReconciliation: d.Get("disable_l4_lb_firewall_reconciliation").(bool), DnsConfig: expandDnsConfig(d.Get("dns_config")), @@ -3179,6 +3186,9 @@ func resourceContainerClusterRead(d *schema.ResourceData, meta interface{}) erro if err := d.Set("private_ipv6_google_access", cluster.NetworkConfig.PrivateIpv6GoogleAccess); err != nil { return fmt.Errorf("Error setting private_ipv6_google_access: %s", err) } + if err := d.Set("in_transit_encryption_config", cluster.NetworkConfig.InTransitEncryptionConfig); err != nil { + return fmt.Errorf("Error setting in_transit_encryption_config: %s", err) + } if err := d.Set("authenticator_groups_config", flattenAuthenticatorGroupsConfig(cluster.AuthenticatorGroupsConfig)); err != nil { return err } @@ -3688,6 +3698,40 @@ func resourceContainerClusterUpdate(d *schema.ResourceData, meta interface{}) er log.Printf("[INFO] GKE cluster %s Disable L4 LB Firewall Reconciliation has been updated to %v", d.Id(), enabled) } + if d.HasChange("in_transit_encryption_config") { + inTransitConfig := d.Get("in_transit_encryption_config").(string) + req := &container.UpdateClusterRequest{ + Update: &container.ClusterUpdate{ + DesiredInTransitEncryptionConfig: inTransitConfig, + ForceSendFields: []string{"DesiredInTransitEncryptionConfig"}, + }, + } + updateF := func() error { + log.Println("[DEBUG] updating in_transit_encryption_config") + name := containerClusterFullName(project, location, clusterName) + clusterUpdateCall := config.NewContainerClient(userAgent).Projects.Locations.Clusters.Update(name, req) + if config.UserProjectOverride { + clusterUpdateCall.Header().Add("X-Goog-User-Project", project) + } + op, err := clusterUpdateCall.Do() + if err != nil { + return err + } + + // Wait until it's updated + err = ContainerOperationWait(config, op, project, location, "updating In-Transit Encryption Config", userAgent, d.Timeout(schema.TimeoutUpdate)) + log.Println("[DEBUG] done updating in_transit_encryption_config") + return err + } + + // Call update serially. + if err := transport_tpg.LockedCall(lockKey, updateF); err != nil { + return err + } + + log.Printf("[INFO] GKE cluster %s In-Transit Encryption Config has been updated to %v", d.Id(), inTransitConfig) + } + if d.HasChange("enable_fqdn_network_policy") { enabled := d.Get("enable_fqdn_network_policy").(bool) req := &container.UpdateClusterRequest{ diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl index 770b105d5633..e841901d6ad7 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl @@ -648,6 +648,45 @@ func TestAccContainerCluster_withMultiNetworking(t *testing.T) { }) } +func TestAccContainerCluster_inTransitEncryptionConfig(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) + networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") + subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_inTransitEncryptionConfig(clusterName, networkName, subnetworkName, "IN_TRANSIT_ENCRYPTION_INTER_NODE_TRANSPARENT"), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.primary", "in_transit_encryption_config", "IN_TRANSIT_ENCRYPTION_INTER_NODE_TRANSPARENT"), + ), + }, + { + ResourceName: "google_container_cluster.primary", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + { + Config: testAccContainerCluster_inTransitEncryptionConfig(clusterName, networkName, subnetworkName, "IN_TRANSIT_ENCRYPTION_DISABLED"), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.primary", "in_transit_encryption_config", "IN_TRANSIT_ENCRYPTION_DISABLED"), + ), + }, + { + ResourceName: "google_container_cluster.primary", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + }, + }) +} + func TestAccContainerCluster_withFQDNNetworkPolicy(t *testing.T) { t.Parallel() @@ -13775,3 +13814,18 @@ resource "google_container_cluster" "primary" { } `, clusterName, networkName, subnetworkName) } + +func testAccContainerCluster_inTransitEncryptionConfig(name, networkName, subnetworkName, config string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "primary" { + name = "%s" + location = "us-central1-a" + initial_node_count = 1 + network = "%s" + subnetwork = "%s" + datapath_provider = "ADVANCED_DATAPATH" + deletion_protection = false + in_transit_encryption_config = "%s" +} +`, name, networkName, subnetworkName, config) +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown index 75272bf3e2db..3c9a97e5c801 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown @@ -387,6 +387,9 @@ subnetwork in which the cluster's instances are launched. * `datapath_provider` - (Optional) The desired datapath provider for this cluster. This is set to `LEGACY_DATAPATH` by default, which uses the IPTables-based kube-proxy implementation. Set to `ADVANCED_DATAPATH` to enable Dataplane v2. +* `in_transit_encryption_config` - (Optional) + Defines the config of in-transit encryption. Valid values are `IN_TRANSIT_ENCRYPTION_DISABLED` and `IN_TRANSIT_ENCRYPTION_INTER_NODE_TRANSPARENT`. + * `enable_cilium_clusterwide_network_policy` - (Optional) Whether CiliumClusterWideNetworkPolicy is enabled on this cluster. Defaults to false. From ed7578affd54bd48883985fde9d98f0fc45a704f Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Tue, 13 May 2025 08:33:37 -0700 Subject: [PATCH 107/884] Documented ProtoV5ProviderFactories in dependency lock troubleshooting (#13913) --- docs/content/test/run-tests.md | 34 +++++++++++++++++++++++----------- 1 file changed, 23 insertions(+), 11 deletions(-) diff --git a/docs/content/test/run-tests.md b/docs/content/test/run-tests.md index 19c01fa267bf..cf460ce1b34b 100644 --- a/docs/content/test/run-tests.md +++ b/docs/content/test/run-tests.md @@ -151,17 +151,29 @@ This indicates that after an apply to create or update a resource, the resource Tests require all of the providers they use (except the one actually being tested) to be explicitly stated. This error generally means one of a few things: -- This is a beta-only test and one of the `google_*` resources in the test doesn't have `provider = google-beta` set - - ```hcl - resource "google_compute_instance" "beta-instance" { - provider = google-beta - # ... - } - ``` - -- This is a GA+beta test and one of the `google_*` resources has `provider = google-beta` set - - `provider = google-beta` can't be set unless the test is beta-only. -- The test relies on an external provider, such as `time`, and that is not explicitly declared +- If the error mentions `provider registry.terraform.io/hashicorp/google`: + - Beta-only test: This indicates that one of the `google_*` resources in the test doesn't have `provider = google-beta` set + - ```hcl + resource "google_compute_instance" "beta-instance" { + provider = google-beta + # ... + } + ``` + - GA+beta test: This indicates that the wrong setting is being used for `ProtoV5ProviderFactories` on a handwritten test case. Should be: + - ```go + acctest.VcrTest(t, resource.TestCase{ + // ... + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + ``` +- If the error mentions `provider registry.terraform.io/hashicorp/google-beta`: + - Beta-only test: This indicates that the wrong setting is being used for `ProtoV5ProviderFactories` on a handwritten test case. Should be: + - ```go + acctest.VcrTest(t, resource.TestCase{ + // ... + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + ``` + - GA+beta test: This indicates that one of the `google_*` resources in the test has `provider = google-beta` set. `provider = google-beta` can't be set unless the test is beta-only. +- If the error mentions some other provider: The test relies on an external provider, such as `time`, and that is not explicitly declared - For MMv1 example-based tests, use [`examples.external_providers`](https://googlecloudplatform.github.io/magic-modules/reference/resource/#examples). - For Handwritten tests, use TestCase.ExternalProviders: ```go From b19a12f62f090d3049a82315ac34925078c05fec Mon Sep 17 00:00:00 2001 From: Scott Suarez Date: Tue, 13 May 2025 08:49:52 -0700 Subject: [PATCH 108/884] Make googler auto-test call to action more visible (#13916) Co-authored-by: Stephen Lewis (Burrows) --- .ci/magician/github/REVIEWER_ASSIGNMENT_COMMENT.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.ci/magician/github/REVIEWER_ASSIGNMENT_COMMENT.md b/.ci/magician/github/REVIEWER_ASSIGNMENT_COMMENT.md index 784465ddba9a..6954d07d906f 100644 --- a/.ci/magician/github/REVIEWER_ASSIGNMENT_COMMENT.md +++ b/.ci/magician/github/REVIEWER_ASSIGNMENT_COMMENT.md @@ -1,4 +1,6 @@ -Hello! I am a robot. Tests will require approval from a repository maintainer to run. Googlers: see go/terraform-auto-test-runs to set up automatic test runs. +Hello! I am a robot. Tests will require approval from a repository maintainer to run. + +**Googlers:** For automatic test runs see go/terraform-auto-test-runs. @{{.reviewer}}, a repository maintainer, has been assigned to [review your changes](https://googlecloudplatform.github.io/magic-modules/contribute/review-pr/). If you have not received review feedback within 2 business days, please leave a comment on this PR asking them to take a look. From 0e15bbecacbfb67dc9db326d75d2d4964dc38d57 Mon Sep 17 00:00:00 2001 From: Swamita Gupta <55314843+swamitagupta@users.noreply.github.com> Date: Tue, 13 May 2025 23:38:47 +0530 Subject: [PATCH 109/884] Update PC Create timeout (#13925) --- mmv1/products/vmwareengine/PrivateCloud.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mmv1/products/vmwareengine/PrivateCloud.yaml b/mmv1/products/vmwareengine/PrivateCloud.yaml index e1239d5e1d02..904a527b8efa 100644 --- a/mmv1/products/vmwareengine/PrivateCloud.yaml +++ b/mmv1/products/vmwareengine/PrivateCloud.yaml @@ -26,7 +26,7 @@ delete_url: 'projects/{{project}}/locations/{{location}}/privateClouds/{{name}}' import_format: - 'projects/{{project}}/locations/{{location}}/privateClouds/{{name}}' timeouts: - insert_minutes: 240 + insert_minutes: 360 update_minutes: 190 delete_minutes: 150 autogen_async: true @@ -36,7 +36,7 @@ async: operation: base_url: '{{op_id}}' timeouts: - insert_minutes: 240 + insert_minutes: 360 update_minutes: 190 delete_minutes: 150 result: From 77f563fd982dfa1b4f89c0109162bddd7c4b142a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wiktor=20Niesiob=C4=99dzki?= Date: Tue, 13 May 2025 21:17:05 +0200 Subject: [PATCH 110/884] Add numeric_id to google_compute_instance_template (#13584) --- .../resource_compute_instance_template.go.tmpl | 12 ++++++++++++ .../docs/r/compute_instance_template.html.markdown | 2 ++ 2 files changed, 14 insertions(+) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_template.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template.go.tmpl index 718a225f584f..88d85a142384 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_template.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template.go.tmpl @@ -6,6 +6,7 @@ import ( "fmt" "reflect" "strings" + "strconv" "time" "github.com/hashicorp/errwrap" @@ -684,6 +685,13 @@ Google Cloud KMS. Only one of kms_key_self_link, rsa_encrypted_key and raw_key m }, }, + "numeric_id": { + Type: schema.TypeString, + ForceNew: true, + Computed: true, + Description: `The ID of the template in numeric format.`, + }, + "project": { Type: schema.TypeString, Optional: true, @@ -1966,6 +1974,10 @@ func resourceComputeInstanceTemplateRead(d *schema.ResourceData, meta interface{ } } + if err = d.Set("numeric_id", strconv.FormatUint(instanceTemplate.Id, 10)); err != nil { + return fmt.Errorf("Error setting numeric_id: %s", err) + } + {{ if ne $.TargetVersionName `ga` -}} if instanceTemplate.Properties.PartnerMetadata != nil { partnerMetadata, err := flattenPartnerMetadata(instanceTemplate.Properties.PartnerMetadata) diff --git a/mmv1/third_party/terraform/website/docs/r/compute_instance_template.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_instance_template.html.markdown index ff7833af0656..f9ba186d4af1 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_instance_template.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_instance_template.html.markdown @@ -799,6 +799,8 @@ exported: * `creation_timestamp` - Creation timestamp in RFC3339 text format. +* `numeric_id` - numeric identifier of the resource. + * `metadata_fingerprint` - The unique fingerprint of the metadata. * `self_link` - The URI of the created resource. From e3d23a677f035c57a9c591f906d5ea4abbf92913 Mon Sep 17 00:00:00 2001 From: "Bob \"Wombat\" Hogg" Date: Tue, 13 May 2025 16:22:51 -0400 Subject: [PATCH 111/884] firestore_database: Deprecate deletion_policy (#13936) --- mmv1/products/firestore/Database.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/mmv1/products/firestore/Database.yaml b/mmv1/products/firestore/Database.yaml index 0040822f0829..2a757fe8f2a8 100644 --- a/mmv1/products/firestore/Database.yaml +++ b/mmv1/products/firestore/Database.yaml @@ -145,6 +145,7 @@ virtual_fields: See also `delete_protection`. type: String default_value: "ABANDON" + deprecation_message: '`deletion_policy` is deprecated and will be removed in a future major release. Use `delete_protection_state` instead.' parameters: properties: - name: 'name' From 6732b8331e72b697e0b27babf572a2e8ece4c3e1 Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Tue, 13 May 2025 22:27:00 +0200 Subject: [PATCH 112/884] chore: add `.idea` for subdirectories in .gitignore (handy when opening subdirectory directly in IDE) (#13934) --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index ab0fe65a0cd1..383e6dbd3c09 100644 --- a/.gitignore +++ b/.gitignore @@ -27,6 +27,7 @@ # IDEA files .idea/* *.iml +**/.idea/* # OS generated files .DS_Store From a5e84a050b7454202af38e05b8ce03305f738cd7 Mon Sep 17 00:00:00 2001 From: hao-nan-li <100219545+hao-nan-li@users.noreply.github.com> Date: Tue, 13 May 2025 13:40:34 -0700 Subject: [PATCH 113/884] Deprecated bool_to_object custom expander (#13917) Co-authored-by: Cameron Thornton --- mmv1/products/billingbudget/Budget.yaml | 3 ++- mmv1/products/dlp/DeidentifyTemplate.yaml | 3 ++- mmv1/products/dns/ManagedZone.yaml | 3 ++- ...ool_to_object.go.tmpl => deprecated_bool_to_object.go.tmpl} | 3 +++ 4 files changed, 9 insertions(+), 3 deletions(-) rename mmv1/templates/terraform/custom_expand/{bool_to_object.go.tmpl => deprecated_bool_to_object.go.tmpl} (92%) diff --git a/mmv1/products/billingbudget/Budget.yaml b/mmv1/products/billingbudget/Budget.yaml index 0cdc678ffbd8..13b0d5d889fd 100644 --- a/mmv1/products/billingbudget/Budget.yaml +++ b/mmv1/products/billingbudget/Budget.yaml @@ -412,7 +412,8 @@ properties: - 'amount.0.specified_amount' - 'amount.0.last_period_amount' custom_flatten: 'templates/terraform/custom_flatten/object_to_bool.go.tmpl' - custom_expand: 'templates/terraform/custom_expand/bool_to_object.go.tmpl' + # THIS TEMPLATE IS DEPRECATED, DO NOT USE FOR NEW FIELDS + custom_expand: 'templates/terraform/custom_expand/deprecated_bool_to_object.go.tmpl' - name: 'thresholdRules' type: Array description: | diff --git a/mmv1/products/dlp/DeidentifyTemplate.yaml b/mmv1/products/dlp/DeidentifyTemplate.yaml index 198b67d9aec4..8ff5250fb9d6 100644 --- a/mmv1/products/dlp/DeidentifyTemplate.yaml +++ b/mmv1/products/dlp/DeidentifyTemplate.yaml @@ -349,7 +349,8 @@ properties: description: | Replace each matching finding with the name of the info type. custom_flatten: 'templates/terraform/custom_flatten/object_to_bool.go.tmpl' - custom_expand: 'templates/terraform/custom_expand/bool_to_object.go.tmpl' + # THIS TEMPLATE IS DEPRECATED, DO NOT USE FOR NEW FIELDS + custom_expand: 'templates/terraform/custom_expand/deprecated_bool_to_object.go.tmpl' - name: 'characterMaskConfig' type: NestedObject description: | diff --git a/mmv1/products/dns/ManagedZone.yaml b/mmv1/products/dns/ManagedZone.yaml index 68b65cbd99e0..7781d6e75e08 100644 --- a/mmv1/products/dns/ManagedZone.yaml +++ b/mmv1/products/dns/ManagedZone.yaml @@ -403,7 +403,8 @@ properties: min_version: 'beta' immutable: true custom_flatten: 'templates/terraform/custom_flatten/object_to_bool.go.tmpl' - custom_expand: 'templates/terraform/custom_expand/bool_to_object.go.tmpl' + # THIS TEMPLATE IS DEPRECATED, DO NOT USE FOR NEW FIELDS + custom_expand: 'templates/terraform/custom_expand/deprecated_bool_to_object.go.tmpl' - name: 'serviceDirectoryConfig' type: NestedObject description: diff --git a/mmv1/templates/terraform/custom_expand/bool_to_object.go.tmpl b/mmv1/templates/terraform/custom_expand/deprecated_bool_to_object.go.tmpl similarity index 92% rename from mmv1/templates/terraform/custom_expand/bool_to_object.go.tmpl rename to mmv1/templates/terraform/custom_expand/deprecated_bool_to_object.go.tmpl index ce0c8bfd5895..dc5e6c13022f 100644 --- a/mmv1/templates/terraform/custom_expand/bool_to_object.go.tmpl +++ b/mmv1/templates/terraform/custom_expand/deprecated_bool_to_object.go.tmpl @@ -10,6 +10,9 @@ See the License for the specific language governing permissions and limitations under the License. */ -}} + +{{/* THIS TEMPLATE IS DEPRECATED, DO NOT USE FOR NEW FIELDS */}} + func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { if v == nil || !v.(bool) { return nil, nil From d64e6006b57cddc8fc104fb132b6257279878c14 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Tue, 13 May 2025 13:55:28 -0700 Subject: [PATCH 114/884] removed TestAccColabSchedule_colabScheduleFullExample from GA (#13907) --- mmv1/products/colab/Schedule.yaml | 1 + .../terraform/examples/colab_schedule_full.tf.tmpl | 6 ++++++ 2 files changed, 7 insertions(+) diff --git a/mmv1/products/colab/Schedule.yaml b/mmv1/products/colab/Schedule.yaml index 7bdd1b89322f..29398afea896 100644 --- a/mmv1/products/colab/Schedule.yaml +++ b/mmv1/products/colab/Schedule.yaml @@ -62,6 +62,7 @@ examples: ignore_read_extra: - desired_state - name: 'colab_schedule_full' + min_version: "beta" primary_resource_id: 'schedule' bootstrap_iam: - member: "serviceAccount:service-{project_number}@gcp-sa-dataform.iam.gserviceaccount.com" diff --git a/mmv1/templates/terraform/examples/colab_schedule_full.tf.tmpl b/mmv1/templates/terraform/examples/colab_schedule_full.tf.tmpl index cef17bdd1ad2..c50a5552f000 100644 --- a/mmv1/templates/terraform/examples/colab_schedule_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/colab_schedule_full.tf.tmpl @@ -1,4 +1,5 @@ resource "google_colab_runtime_template" "my_runtime_template" { + provider = google-beta name = "{{index $.Vars "runtime_template_name"}}" display_name = "Runtime template" location = "us-central1" @@ -13,6 +14,7 @@ resource "google_colab_runtime_template" "my_runtime_template" { } resource "google_storage_bucket" "output_bucket" { + provider = google-beta name = "{{index $.Vars "bucket"}}" location = "US" force_destroy = true @@ -20,6 +22,7 @@ resource "google_storage_bucket" "output_bucket" { } resource "google_secret_manager_secret" "secret" { + provider = google-beta secret_id = "{{index $.Vars "secret"}}" replication { auto {} @@ -27,11 +30,13 @@ resource "google_secret_manager_secret" "secret" { } resource "google_secret_manager_secret_version" "secret_version" { + provider = google-beta secret = google_secret_manager_secret.secret.id secret_data = "secret-data" } resource "google_dataform_repository" "dataform_repository" { + provider = google-beta name = "{{index $.Vars "dataform_repository"}}" display_name = "dataform_repository" npmrc_environment_variables_secret_version = google_secret_manager_secret_version.secret_version.id @@ -56,6 +61,7 @@ resource "google_dataform_repository" "dataform_repository" { } resource "google_colab_schedule" "{{$.PrimaryResourceId}}" { + provider = google-beta display_name = "{{index $.Vars "display_name"}}" location = "{{index $.TestEnvVars "location"}}" allow_queueing = true From 0aa28a754f4230826dc86ded174e624944c22f54 Mon Sep 17 00:00:00 2001 From: NA2047 <12290725+NA2047@users.noreply.github.com> Date: Tue, 13 May 2025 15:50:37 -0700 Subject: [PATCH 115/884] Adding lustre Instance datasource (#13810) --- mmv1/products/lustre/Instance.yaml | 4 + .../provider/provider_mmv1_resources.go.tmpl | 1 + .../lustre/data_source_lustre_instance.go | 76 +++++++++++++++++ .../data_source_lustre_instance_test.go | 81 +++++++++++++++++++ .../docs/d/lustre_instance.html.markdown | 32 ++++++++ 5 files changed, 194 insertions(+) create mode 100644 mmv1/third_party/terraform/services/lustre/data_source_lustre_instance.go create mode 100644 mmv1/third_party/terraform/services/lustre/data_source_lustre_instance_test.go create mode 100644 mmv1/third_party/terraform/website/docs/d/lustre_instance.html.markdown diff --git a/mmv1/products/lustre/Instance.yaml b/mmv1/products/lustre/Instance.yaml index 2e190dfc347a..0a63e7bbeba3 100644 --- a/mmv1/products/lustre/Instance.yaml +++ b/mmv1/products/lustre/Instance.yaml @@ -14,6 +14,10 @@ --- name: Instance description: A Managed Lustre instance +references: + guides: + 'Official Documentation': 'https://cloud.google.com/managed-lustre/docs/create-instance' + api: 'https://cloud.google.com/managed-lustre/docs/reference/rest/v1/projects.locations.instances' base_url: projects/{{project}}/locations/{{location}}/instances update_mask: true self_link: projects/{{project}}/locations/{{location}}/instances/{{instance_id}} diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index c920e4acdd24..1f458e6b3974 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -170,6 +170,7 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_logging_project_cmek_settings": logging.DataSourceGoogleLoggingProjectCmekSettings(), "google_logging_project_settings": logging.DataSourceGoogleLoggingProjectSettings(), "google_logging_sink": logging.DataSourceGoogleLoggingSink(), + "google_lustre_instance": lustre.DataSourceLustreInstance(), "google_monitoring_notification_channel": monitoring.DataSourceMonitoringNotificationChannel(), "google_monitoring_cluster_istio_service": monitoring.DataSourceMonitoringServiceClusterIstio(), "google_monitoring_istio_canonical_service": monitoring.DataSourceMonitoringIstioCanonicalService(), diff --git a/mmv1/third_party/terraform/services/lustre/data_source_lustre_instance.go b/mmv1/third_party/terraform/services/lustre/data_source_lustre_instance.go new file mode 100644 index 000000000000..dc84b9dcde67 --- /dev/null +++ b/mmv1/third_party/terraform/services/lustre/data_source_lustre_instance.go @@ -0,0 +1,76 @@ +package lustre + +import ( + "fmt" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func DataSourceLustreInstance() *schema.Resource { + + // Generate datasource schema from resource + dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceLustreInstance().Schema) + + dsScema_zone := map[string]*schema.Schema{ + "zone": { + Type: schema.TypeString, + Optional: true, + Description: `Zone of Lustre instance`, + }, + } + + // Set 'Required' schema elements from resource + tpgresource.AddRequiredFieldsToSchema(dsSchema, "instance_id") + + // Set 'Optional' schema elements from resource + tpgresource.AddOptionalFieldsToSchema(dsSchema, "project") + + // Merge schema elements + dsSchema_m := tpgresource.MergeSchemas(dsScema_zone, dsSchema) + + return &schema.Resource{ + Read: dataSourceLustreInstanceRead, + Schema: dsSchema_m, + } +} + +func dataSourceLustreInstanceRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + + // Get required fields for ID + instance_id := d.Get("instance_id").(string) + + zone, err := tpgresource.GetZone(d, config) + if err != nil { + return err + } + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + + // Set the ID + id := fmt.Sprintf("projects/%s/locations/%s/instances/%s", project, zone, instance_id) + d.SetId(id) + + // Setting location field for url_param_only field + d.Set("location", zone) + + err = resourceLustreInstanceRead(d, meta) + if err != nil { + return err + } + + if err := tpgresource.SetDataSourceLabels(d); err != nil { + return err + } + + if d.Id() == "" { + return fmt.Errorf("%s not found", d.Id()) + } + + return nil +} diff --git a/mmv1/third_party/terraform/services/lustre/data_source_lustre_instance_test.go b/mmv1/third_party/terraform/services/lustre/data_source_lustre_instance_test.go new file mode 100644 index 000000000000..c443c5969ef5 --- /dev/null +++ b/mmv1/third_party/terraform/services/lustre/data_source_lustre_instance_test.go @@ -0,0 +1,81 @@ +package lustre_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccLustreInstanceDatasource_basic(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccLustreInstanceDatasource_basic(context), + Check: acctest.CheckDataSourceStateMatchesResourceState( + "data.google_lustre_instance.default", + "google_lustre_instance.instance", + ), + }, + { + ResourceName: "google_lustre_instance.instance", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccLustreInstanceDatasource_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_lustre_instance" "instance" { + instance_id = "my-instance-%{random_suffix}" + location = "us-central1-a" + filesystem = "testfs" + capacity_gib = 18000 + network = google_compute_network.producer_net.id + gke_support_enabled = false + + depends_on = [ google_service_networking_connection.service_con ] +} + +resource "google_compute_subnetwork" "producer_subnet" { + name = "tf-test-my-subnet-%{random_suffix}" + ip_cidr_range = "10.0.0.248/29" + region = "us-central1" + network = google_compute_network.producer_net.id +} + +resource "google_compute_network" "producer_net" { + name = "tf-test-my-network-%{random_suffix}" + auto_create_subnetworks = false +} + +resource "google_compute_global_address" "private_ip_alloc" { + name = "private-ip-alloc-%{random_suffix}" + purpose = "VPC_PEERING" + address_type = "INTERNAL" + prefix_length = 16 + network = google_compute_network.producer_net.id +} + +resource "google_service_networking_connection" "service_con" { + network = google_compute_network.producer_net.id + service = "servicenetworking.googleapis.com" + reserved_peering_ranges = [google_compute_global_address.private_ip_alloc.name] +} + +data "google_lustre_instance" "default" { + instance_id = google_lustre_instance.instance.instance_id + zone = "us-central1-a" +} +`, context) +} diff --git a/mmv1/third_party/terraform/website/docs/d/lustre_instance.html.markdown b/mmv1/third_party/terraform/website/docs/d/lustre_instance.html.markdown new file mode 100644 index 000000000000..4c91149e60c7 --- /dev/null +++ b/mmv1/third_party/terraform/website/docs/d/lustre_instance.html.markdown @@ -0,0 +1,32 @@ +--- +subcategory: "Lustre" +description: |- + Fetches the details of a Lustre instance. +--- + +# google_lustre_instance + +Use this data source to get information about a Lustre instance. For more information see the [API docs](https://cloud.google.com/filestore/docs/lustre/reference/rest/v1/projects.locations.instances). + +## Example Usage + +```hcl +data "google_lustre_instance" "instance" { + name = "my-instance" + location = "us-central1" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `instance_id` - (Required) The instance id of the Lustre instance. + +* `zone` - (Optional) The ID of the zone in which the resource belongs. If it is not provided, the provider zone is used. + +* `project` - (Optional) The ID of the project in which the resource belongs. If it is not provided, the provider project is used. + +## Attributes Reference + +See [google_lustre_instance](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/lustre_instance) resource for details of all the available attributes. From 9a450334f45ed5d1f86a55e7592ca05cdff425f8 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Tue, 13 May 2025 16:28:25 -0700 Subject: [PATCH 116/884] Skipped additional dataproc gdc tests (#13905) --- mmv1/products/dataprocgdc/ServiceInstance.yaml | 1 + .../resource_dataproc_gdc_application_environment_test.go | 1 + 2 files changed, 2 insertions(+) diff --git a/mmv1/products/dataprocgdc/ServiceInstance.yaml b/mmv1/products/dataprocgdc/ServiceInstance.yaml index 8643340b34ea..dd4156091f63 100644 --- a/mmv1/products/dataprocgdc/ServiceInstance.yaml +++ b/mmv1/products/dataprocgdc/ServiceInstance.yaml @@ -57,6 +57,7 @@ examples: project: "my-project" test_vars_overrides: 'project': '"gdce-cluster-monitoring"' + skip_test: https://github.com/hashicorp/terraform-provider-google/issues/21173 properties: - name: gdceCluster type: NestedObject diff --git a/mmv1/third_party/terraform/services/dataprocgdc/resource_dataproc_gdc_application_environment_test.go b/mmv1/third_party/terraform/services/dataprocgdc/resource_dataproc_gdc_application_environment_test.go index 7531bdcd9ef6..0b0f43830992 100644 --- a/mmv1/third_party/terraform/services/dataprocgdc/resource_dataproc_gdc_application_environment_test.go +++ b/mmv1/third_party/terraform/services/dataprocgdc/resource_dataproc_gdc_application_environment_test.go @@ -9,6 +9,7 @@ import ( ) func TestAccDataprocGdcApplicationEnvironment_update(t *testing.T) { + t.Skip("https://github.com/hashicorp/terraform-provider-google/issues/20419") t.Parallel() context := map[string]interface{}{ From 9e5aaab9e8e45c9cf4a43b9104d21fb5c6ffad7f Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Wed, 14 May 2025 08:42:54 -0700 Subject: [PATCH 117/884] Added check for multiple new resources in one PR (#13937) --- .ci/magician/cmd/generate_comment.go | 23 ++++++++++++++++++- .ci/magician/cmd/generate_comment_test.go | 20 ++++++++++++++++ .../cmd/templates/DIFF_COMMENT.md.tmpl | 7 ++++++ 3 files changed, 49 insertions(+), 1 deletion(-) diff --git a/.ci/magician/cmd/generate_comment.go b/.ci/magician/cmd/generate_comment.go index ac689fdc8db7..322c732d667a 100644 --- a/.ci/magician/cmd/generate_comment.go +++ b/.ci/magician/cmd/generate_comment.go @@ -21,6 +21,7 @@ import ( "os" "path/filepath" "regexp" + "slices" "sort" "strconv" "strings" @@ -84,6 +85,7 @@ type diffCommentData struct { MissingServiceLabels []string MissingTests map[string]*MissingTestInfo MissingDocs *MissingDocsSummary + AddedResources []string Errors []Errors } @@ -93,6 +95,7 @@ type simpleSchemaDiff struct { const allowBreakingChangesLabel = "override-breaking-change" const allowMissingServiceLabelsLabel = "override-missing-service-labels" +const allowMultipleResourcesLabel = "override-multiple-resources" var gcEnvironmentVariables = [...]string{ "BUILD_ID", @@ -363,6 +366,25 @@ func execGenerateComment(prNumber int, ghTokenMagicModules, buildId, buildStep, }) data.BreakingChanges = breakingChangesSlice + // Check if multiple resources were added. + multipleResourcesState := "success" + if len(uniqueAddedResources) > 1 { + multipleResourcesState = "failure" + for _, label := range pullRequest.Labels { + if label.Name == allowMultipleResourcesLabel { + multipleResourcesState = "success" + break + } + } + } + targetURL := fmt.Sprintf("https://console.cloud.google.com/cloud-build/builds;region=global/%s;step=%s?project=%s", buildId, buildStep, projectId) + if err = gh.PostBuildStatus(strconv.Itoa(prNumber), "terraform-provider-multiple-resources", multipleResourcesState, targetURL, commitSha); err != nil { + fmt.Printf("Error posting terraform-provider-multiple-resources build status for pr %d commit %s: %v\n", prNumber, commitSha, err) + errors["Other"] = append(errors["Other"], "Failed to update missing-service-labels status check with state: "+multipleResourcesState) + } + data.AddedResources = maps.Keys(uniqueAddedResources) + slices.Sort(data.AddedResources) + // Compute affected resources based on changed files changedFilesAffectedResources := map[string]struct{}{} for _, repo := range []source.Repo{tpgRepo, tpgbRepo} { @@ -427,7 +449,6 @@ func execGenerateComment(prNumber int, ghTokenMagicModules, buildId, buildStep, } } } - targetURL := fmt.Sprintf("https://console.cloud.google.com/cloud-build/builds;region=global/%s;step=%s?project=%s", buildId, buildStep, projectId) if err = gh.PostBuildStatus(strconv.Itoa(prNumber), "terraform-provider-breaking-change-test", breakingState, targetURL, commitSha); err != nil { fmt.Printf("Error posting terraform-provider-breaking-change-test build status for pr %d commit %s: %v\n", prNumber, commitSha, err) errors["Other"] = append(errors["Other"], "Failed to update breaking-change status check with state: "+breakingState) diff --git a/.ci/magician/cmd/generate_comment_test.go b/.ci/magician/cmd/generate_comment_test.go index 8b08fb59a3b9..6cc1eb029b5d 100644 --- a/.ci/magician/cmd/generate_comment_test.go +++ b/.ci/magician/cmd/generate_comment_test.go @@ -129,6 +129,7 @@ func TestExecGenerateComment(t *testing.T) { for method, expectedCalls := range map[string][][]any{ "PostBuildStatus": { + {"123456", "terraform-provider-multiple-resources", "success", "https://console.cloud.google.com/cloud-build/builds;region=global/build1;step=17?project=project1", "sha1"}, {"123456", "terraform-provider-breaking-change-test", "success", "https://console.cloud.google.com/cloud-build/builds;region=global/build1;step=17?project=project1", "sha1"}, {"123456", "terraform-provider-missing-service-labels", "success", "https://console.cloud.google.com/cloud-build/builds;region=global/build1;step=17?project=project1", "sha1"}, }, @@ -242,6 +243,25 @@ func TestFormatDiffComment(t *testing.T) { "## Missing test report", }, }, + "multiple resources are displayed": { + data: diffCommentData{ + AddedResources: []string{"google_redis_instance", "google_alloydb_cluster"}, + }, + expectedStrings: []string{ + "## Diff report", + "## Multiple resources added", + "`override-multiple-resources`", + "split it into multiple PRs", + "`google_redis_instance`, `google_alloydb_cluster`.", + }, + notExpectedStrings: []string{ + "generated some diffs", + "## Errors", + "## Missing test report", + "## Missing doc report", + "## Breaking Change(s) Detected", + }, + }, "missing tests are displayed": { data: diffCommentData{ MissingTests: map[string]*MissingTestInfo{ diff --git a/.ci/magician/cmd/templates/DIFF_COMMENT.md.tmpl b/.ci/magician/cmd/templates/DIFF_COMMENT.md.tmpl index 7f22c8073e3a..5c2942060dc0 100644 --- a/.ci/magician/cmd/templates/DIFF_COMMENT.md.tmpl +++ b/.ci/magician/cmd/templates/DIFF_COMMENT.md.tmpl @@ -51,6 +51,13 @@ If you believe this detection to be incorrect please raise the concern with your An `override-missing-service-label` label can be added to allow merging. {{end}} +{{- if gt (len .AddedResources) 1 }} +## Multiple resources added + +This PR adds multiple new resources: {{range $i, $resource := .AddedResources}}{{ if gt $i 0}}, {{end}}`{{$resource}}`{{end}}. This makes review significantly more difficult. Please split it into multiple PRs, one per resource. +An `override-multiple-resources` label can be added to allow merging. +{{end}} + {{- if and (.MissingDocs) (or .MissingDocs.Resource .MissingDocs.DataSource) }} ## Missing doc report (experimental) From 18c124c0a14568a12afe81d009241b60a8623ac3 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Wed, 14 May 2025 09:49:51 -0700 Subject: [PATCH 118/884] Added workflow to disallow large PRs (#13919) --- .github/workflows/basic-pr-checks.yml | 38 +++++++++++++++++++++++ .github/workflows/disallow-submodules.yml | 22 ------------- 2 files changed, 38 insertions(+), 22 deletions(-) create mode 100644 .github/workflows/basic-pr-checks.yml delete mode 100644 .github/workflows/disallow-submodules.yml diff --git a/.github/workflows/basic-pr-checks.yml b/.github/workflows/basic-pr-checks.yml new file mode 100644 index 000000000000..23d676229e7b --- /dev/null +++ b/.github/workflows/basic-pr-checks.yml @@ -0,0 +1,38 @@ +name: "Basic PR checks" +permissions: read-all + +on: + pull_request + +jobs: + disallow-submodules: + runs-on: ubuntu-22.04 + steps: + - name: Checkout repository + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.2 + - name: Check for submodules + run: | + output=$(git submodule status --recursive 2>&1) + if [ ! -z $output ]; then + echo $output + echo "Submodules are not allowed" + exit 1 + else + echo "No submodules found" + fi + disallow-large-prs: + runs-on: ubuntu-22.04 + steps: + - name: Check PR size + run: | + response=$(curl --get -Ss -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/repos/${{ github.repository }}/pulls/${{github.event.pull_request.number}}") + additions=$(echo "$response" | jq -r '.additions') + deletions=$(echo "$response" | jq -r '.deletions') + total=$(( $additions + $deletions )) + echo "$additions lines added; $deletions lines deleted" + if (( $total > 500 )); then + echo "This PR changed $total lines of code, which is above the recommended limit of 500. Your reviewer may ask you to break it into multiple PRs." + exit 1 + else + echo "This PR changed $total lines of code, which meets the recommended limit of 500." + fi diff --git a/.github/workflows/disallow-submodules.yml b/.github/workflows/disallow-submodules.yml deleted file mode 100644 index c61685931565..000000000000 --- a/.github/workflows/disallow-submodules.yml +++ /dev/null @@ -1,22 +0,0 @@ -name: "Disallow submodules" -permissions: read-all - -on: - pull_request - -jobs: - disallow-submodules: - runs-on: ubuntu-22.04 - steps: - - name: Checkout repository - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.2 - - name: Check for submodules - run: | - output=$(git submodule status --recursive 2>&1) - if [ ! -z $output ]; then - echo $output - echo "Submodules are not allowed" - exit 1 - else - echo "No submodules found" - fi From 1ced6e3cd247cdbe4f6e90c66aa984b56da4bc5f Mon Sep 17 00:00:00 2001 From: sahil-mahajan-google Date: Wed, 14 May 2025 22:51:05 +0530 Subject: [PATCH 119/884] Add independent scaling fields (hyperdisk) to GA (#13949) --- mmv1/products/netapp/StoragePool.yaml | 4 ---- .../netapp/resource_netapp_storage_pool_test.go.tmpl | 10 ++-------- 2 files changed, 2 insertions(+), 12 deletions(-) diff --git a/mmv1/products/netapp/StoragePool.yaml b/mmv1/products/netapp/StoragePool.yaml index a4065a8ef61d..a21c5894e4ae 100644 --- a/mmv1/products/netapp/StoragePool.yaml +++ b/mmv1/products/netapp/StoragePool.yaml @@ -35,7 +35,6 @@ description: | the next apply. You can trigger a manual [zone switch](https://cloud.google.com/netapp/volumes/docs/configure-and-use/storage-pools/edit-or-delete-storage-pool#switch_active_and_replica_zones) via Terraform by swapping the value of the `zone` and `replica_zone` parameters in your HCL code. - Note: Custom Performance FLEX storage pools are supported in beta provider currently. references: guides: @@ -187,14 +186,11 @@ properties: description: | Optional. True if using Independent Scaling of capacity and performance (Hyperdisk). Default is false. immutable: true - min_version: 'beta' - name: 'totalThroughputMibps' type: String description: | Optional. Custom Performance Total Throughput of the pool (in MiB/s). - min_version: 'beta' - name: 'totalIops' type: String description: | Optional. Custom Performance Total IOPS of the pool If not provided, it will be calculated based on the totalThroughputMibps - min_version: 'beta' diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl b/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl index ca02e3288786..043edc879c79 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl @@ -318,7 +318,6 @@ data "google_compute_network" "default" { `, context) } -{{ if ne $.TargetVersionName `ga` -}} func TestAccNetappStoragePool_customPerformanceStoragePoolCreateExample_update(t *testing.T) { context := map[string]interface{}{ "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), @@ -327,7 +326,7 @@ func TestAccNetappStoragePool_customPerformanceStoragePoolCreateExample_update(t acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccCheckNetappStoragePoolDestroyProducer(t), Steps: []resource.TestStep{ { @@ -355,7 +354,6 @@ func TestAccNetappStoragePool_customPerformanceStoragePoolCreateExample_update(t func testAccNetappStoragePool_customPerformanceStoragePoolCreateExample_full(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_netapp_storage_pool" "test_pool" { - provider = google-beta name = "tf-test-pool%{random_suffix}" location = "us-east4-a" service_level = "FLEX" @@ -368,7 +366,6 @@ resource "google_netapp_storage_pool" "test_pool" { } data "google_compute_network" "default" { - provider = google-beta name = "%{network_name}" } `, context) @@ -377,7 +374,6 @@ data "google_compute_network" "default" { func testAccNetappStoragePool_customPerformanceStoragePoolCreateExample_update(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_netapp_storage_pool" "test_pool" { - provider = google-beta name = "tf-test-pool%{random_suffix}" location = "us-east4-a" service_level = "FLEX" @@ -390,9 +386,7 @@ resource "google_netapp_storage_pool" "test_pool" { } data "google_compute_network" "default" { - provider = google-beta name = "%{network_name}" } `, context) -} -{{ end }} +} \ No newline at end of file From a36951e3800d683a49ec7d74a66331f144670c84 Mon Sep 17 00:00:00 2001 From: Arpit Gupta Date: Wed, 14 May 2025 22:54:16 +0530 Subject: [PATCH 120/884] Change description of destination_project field in backup and restore channel resources. (#13945) --- mmv1/products/gkebackup/BackupChannel.yaml | 2 +- mmv1/products/gkebackup/RestoreChannel.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mmv1/products/gkebackup/BackupChannel.yaml b/mmv1/products/gkebackup/BackupChannel.yaml index 727ae15333c8..22387f21ab0f 100644 --- a/mmv1/products/gkebackup/BackupChannel.yaml +++ b/mmv1/products/gkebackup/BackupChannel.yaml @@ -78,7 +78,7 @@ properties: description: | The project where Backups are allowed to be stored. The format is `projects/{project}`. - {project} can only be a project number. + {project} can be project number or project id. required: true immutable: true - name: 'description' diff --git a/mmv1/products/gkebackup/RestoreChannel.yaml b/mmv1/products/gkebackup/RestoreChannel.yaml index 98809c2a0b31..8cd932beb137 100644 --- a/mmv1/products/gkebackup/RestoreChannel.yaml +++ b/mmv1/products/gkebackup/RestoreChannel.yaml @@ -78,7 +78,7 @@ properties: description: | The project where Backups will be restored. The format is `projects/{project}`. - {project} can only be a project number. + {project} can be project number or project id. required: true immutable: true - name: 'description' From 467eaeb333d1f6e7d3ae68d708c2e41d52d41aa2 Mon Sep 17 00:00:00 2001 From: Scott Suarez Date: Wed, 14 May 2025 10:28:24 -0700 Subject: [PATCH 121/884] validate template syntax needed (#13929) --- .github/workflows/mmv1-check-templates.yml | 30 - ...nt-product-yaml.yml => unit-test-mmv1.yml} | 37 +- mmv1/provider/terraform.go | 2 +- mmv1/provider/terraform_tgc_next.go | 4 +- ...idators.go.tmpl => provider_validators.go} | 2 +- ...> resource_backup_dr_backup_vault_test.go} | 6 +- ...ource_binary_authorization_policy_test.go} | 8 +- ...ource_chronicle_data_access_label_test.go} | 0 ...ource_chronicle_data_access_scope_test.go} | 0 ...resource_chronicle_reference_list_test.go} | 0 ...esource_chronicle_rule_deployment_test.go} | 0 ...o.tmpl => resource_chronicle_rule_test.go} | 0 ...l => resource_chronicle_watchlist_test.go} | 0 ...oogle_cloud_asset_search_all_resources.go} | 13 +- ..._cloud_asset_search_all_resources_test.go} | 3 +- ...ata_source_cloud_identity_group_lookup.go} | 0 ...mpl => resource_cloud_tasks_queue_test.go} | 27 +- ...omposer_user_workloads_config_map_test.go} | 0 ...le_composer_user_workloads_secret_test.go} | 2 +- ... => resource_composer_environment_test.go} | 450 +++++++------- ...omposer_user_workloads_config_map_test.go} | 10 +- ...ce_composer_user_workloads_secret_test.go} | 56 +- ....tmpl => compute_instance_helpers_test.go} | 0 ...l => data_source_google_compute_images.go} | 0 ...=> data_source_google_compute_instance.go} | 4 +- ...rce_google_compute_instance_group_test.go} | 4 +- ...ta_source_google_compute_instance_test.go} | 2 +- ... => data_source_google_compute_network.go} | 4 +- ...gle_compute_region_instance_group_test.go} | 6 +- ..._source_google_compute_resource_policy.go} | 2 +- ...urce_compute_firewall_policy_rule_test.go} | 148 ++--- ... resource_compute_firewall_policy_test.go} | 6 +- ...tmpl => resource_compute_firewall_test.go} | 4 +- ...> resource_compute_global_address_test.go} | 14 +- ...e_compute_global_network_endpoint_test.go} | 3 +- ...esource_compute_instance_settings_test.go} | 2 +- ...source_compute_network_attachment_test.go} | 0 ...ce_compute_network_endpoint_group_test.go} | 2 +- ...pute_network_firewall_policy_rule_test.go} | 282 +++++---- ...pl => resource_compute_node_group_test.go} | 0 ...ource_compute_per_instance_config_test.go} | 14 +- ...gion_network_firewall_policy_rule_test.go} | 18 +- ...ompute_region_per_instance_config_test.go} | 3 +- ..._compute_region_target_http_proxy_test.go} | 2 +- ...e_compute_region_target_tcp_proxy_test.go} | 4 +- ...> resource_compute_region_url_map_test.go} | 4 +- ...source_compute_router_nat_address_test.go} | 2 +- ...pl => resource_compute_router_nat_test.go} | 17 +- ...ource_compute_router_route_policy_test.go} | 2 +- ...o.tmpl => resource_compute_router_test.go} | 8 +- ...urce_compute_security_policy_rule_test.go} | 45 +- ...source_compute_service_attachment_test.go} | 0 ...w_job.go.tmpl => resource_dataflow_job.go} | 43 +- ...tmpl => resource_dataproc_cluster_test.go} | 77 ++- ...c_job.go.tmpl => resource_dataproc_job.go} | 12 +- ...source_dataproc_metastore_service_test.go} | 2 +- ...ataproc_metastore_service_diff_supress.go} | 0 ...source_dataproc_metastore_service_test.go} | 4 +- ...eveloper_connect_account_connector_test.go | 524 +++++++++++++++++ ...per_connect_account_connector_test.go.tmpl | 547 ------------------ ...urce_developer_connect_connection_test.go} | 227 ++++---- ...tmpl => resource_dialogflow_agent_test.go} | 2 +- ...> resource_dialogflow_entity_type_test.go} | 0 ...> resource_dialogflow_fulfillment_test.go} | 0 ...mpl => resource_dialogflow_intent_test.go} | 0 ...pl => resource_dialogflowcx_agent_test.go} | 0 ...st.go.tmpl => data_source_dns_key_test.go} | 1 - ...mpl => data_source_dns_record_set_test.go} | 0 ...st.go.tmpl => resource_dns_policy_test.go} | 2 +- ...esource_firestore_database_update_test.go} | 38 +- ...pl => iam_gemini_repository_group_test.go} | 0 ...urce_gemini_code_repository_index_test.go} | 0 ...gemini_code_tools_setting_binding_test.go} | 6 +- ...esource_gemini_code_tools_setting_test.go} | 2 +- ...aring_with_google_setting_binding_test.go} | 4 +- ..._data_sharing_with_google_setting_test.go} | 2 +- ...ni_gcp_enablement_setting_binding_test.go} | 4 +- ...ini_gemini_gcp_enablement_setting_test.go} | 2 +- ...ce_gemini_logging_setting_binding_test.go} | 4 +- ...i_release_channel_setting_binding_test.go} | 4 +- ...ce_gemini_release_channel_setting_test.go} | 0 ... resource_gemini_repository_group_test.go} | 0 ...esource_gke_backup_backup_channel_test.go} | 0 ...> resource_gke_backup_backup_plan_test.go} | 42 +- ...source_gke_backup_restore_channel_test.go} | 0 ... resource_gke_backup_restore_plan_test.go} | 9 +- ...st.go.tmpl => iam_gke_hub_feature_test.go} | 0 ...eration.go.tmpl => gkeonprem_operation.go} | 8 +- ...urce_gkeonprem_bare_metal_cluster_test.go} | 178 +++--- ...ce_gkeonprem_bare_metal_node_pool_test.go} | 62 +- ...resource_gkeonprem_vmware_cluster_test.go} | 182 +++--- ...source_gkeonprem_vmware_node_pool_test.go} | 62 +- ....tmpl => resource_iam_deny_policy_test.go} | 2 +- ...source_iam_folders_policy_binding_test.go} | 2 +- ..._iam_organizations_policy_binding_test.go} | 2 +- ..._principal_access_boundary_policy_test.go} | 1 + ...ource_iam_projects_policy_binding_test.go} | 7 +- ...m_workload_identity_pool_provider_test.go} | 2 +- ...source_iam_workload_identity_pool_test.go} | 2 +- ...rce_iam_workload_identity_pool_id_test.go} | 0 ...orkload_identity_pool_provider_id_test.go} | 0 ...m_workload_identity_pool_provider_test.go} | 4 +- ...source_iam_workload_identity_pool_test.go} | 0 ...ource_iam_oauth_client_credential_test.go} | 3 +- ...tmpl => resource_iam_oauth_client_test.go} | 4 +- ...pl => resource_iam_workforce_pool_test.go} | 2 +- ..._workforce_pool_workforce_pool_id_test.go} | 0 ...e_pool_workforce_pool_provider_id_test.go} | 0 ...=> resource_managed_kafka_cluster_test.go} | 0 ...l => resource_managed_kafka_topic_test.go} | 0 ...k_management_vpc_flow_logs_config_test.go} | 69 ++- ...k_security_security_profile_group_test.go} | 0 ...network_security_security_profile_test.go} | 2 +- ...pl => resource_notebooks_instance_test.go} | 2 +- ...mpl => resource_notebooks_runtime_test.go} | 3 +- ...urce_org_policy_custom_constraint_test.go} | 3 +- ...v2_policy_orchestrator_for_folder_test.go} | 0 ...icy_orchestrator_for_organization_test.go} | 7 +- ..._os_config_v2_policy_orchestrator_test.go} | 1 - ...ource_parameter_manager_parameter_test.go} | 0 ..._manager_parameter_version_render_test.go} | 0 ...rameter_manager_parameter_version_test.go} | 2 +- ...urce_parameter_manager_parameters_test.go} | 0 ...ource_parameter_manager_parameter_test.go} | 6 +- ...rameter_manager_parameter_version_test.go} | 0 ...ameter_manager_regional_parameter_test.go} | 0 ...regional_parameter_version_render_test.go} | 0 ...anager_regional_parameter_version_test.go} | 2 +- ...meter_manager_regional_parameters_test.go} | 0 ...ameter_manager_regional_parameter_test.go} | 6 +- ...anager_regional_parameter_version_test.go} | 0 ...ileged_access_manager_entitlement_test.go} | 2 +- ...go.tmpl => resource_redis_cluster_test.go} | 8 +- ....tmpl => data_source_google_iam_policy.go} | 16 +- ...source_google_project_iam_binding_test.go} | 4 +- ...esource_google_project_iam_member_test.go} | 4 +- ...esource_google_project_iam_policy_test.go} | 12 +- ...tmpl => iam_secret_manager_secret_test.go} | 2 +- ...=> resource_secret_manager_secret_test.go} | 54 +- ...rce_secret_manager_secret_version_test.go} | 20 +- ...am_secret_manager_regional_secret_test.go} | 0 ...ce_secret_manager_regional_secret_test.go} | 60 +- ...t_manager_regional_secret_version_test.go} | 0 ...tmpl => resource_spanner_database_test.go} | 2 +- ...o.tmpl => resource_storage_bucket_test.go} | 42 +- ...workbench_instance_shielded_config_test.go | 228 ++++++++ ...ench_instance_shielded_config_test.go.tmpl | 228 -------- ...pl => resource_workbench_instance_test.go} | 183 +++--- ...pl => resource_workflows_workflow_test.go} | 5 +- ....tmpl => terraform-registry-manifest.json} | 0 ...ppress.go.tmpl => common_diff_suppress.go} | 4 +- mmv1/validate_third_party_test.go | 86 +++ 152 files changed, 2204 insertions(+), 2192 deletions(-) delete mode 100644 .github/workflows/mmv1-check-templates.yml rename .github/workflows/{mmv1-lint-product-yaml.yml => unit-test-mmv1.yml} (51%) rename mmv1/third_party/terraform/provider/{provider_validators.go.tmpl => provider_validators.go} (99%) rename mmv1/third_party/terraform/services/backupdr/{resource_backup_dr_backup_vault_test.go.tmpl => resource_backup_dr_backup_vault_test.go} (96%) rename mmv1/third_party/terraform/services/binaryauthorization/{resource_binary_authorization_policy_test.go.tmpl => resource_binary_authorization_policy_test.go} (98%) rename mmv1/third_party/terraform/services/chronicle/{resource_chronicle_data_access_label_test.go.tmpl => resource_chronicle_data_access_label_test.go} (100%) rename mmv1/third_party/terraform/services/chronicle/{resource_chronicle_data_access_scope_test.go.tmpl => resource_chronicle_data_access_scope_test.go} (100%) rename mmv1/third_party/terraform/services/chronicle/{resource_chronicle_reference_list_test.go.tmpl => resource_chronicle_reference_list_test.go} (100%) rename mmv1/third_party/terraform/services/chronicle/{resource_chronicle_rule_deployment_test.go.tmpl => resource_chronicle_rule_deployment_test.go} (100%) rename mmv1/third_party/terraform/services/chronicle/{resource_chronicle_rule_test.go.tmpl => resource_chronicle_rule_test.go} (100%) rename mmv1/third_party/terraform/services/chronicle/{resource_chronicle_watchlist_test.go.tmpl => resource_chronicle_watchlist_test.go} (100%) rename mmv1/third_party/terraform/services/cloudasset/{data_source_google_cloud_asset_search_all_resources.go.tmpl => data_source_google_cloud_asset_search_all_resources.go} (94%) rename mmv1/third_party/terraform/services/cloudasset/{data_source_google_cloud_asset_search_all_resources_test.go.tmpl => data_source_google_cloud_asset_search_all_resources_test.go} (99%) rename mmv1/third_party/terraform/services/cloudidentity/{data_source_cloud_identity_group_lookup.go.tmpl => data_source_cloud_identity_group_lookup.go} (100%) rename mmv1/third_party/terraform/services/cloudtasks/{resource_cloud_tasks_queue_test.go.tmpl => resource_cloud_tasks_queue_test.go} (94%) rename mmv1/third_party/terraform/services/composer/{data_source_google_composer_user_workloads_config_map_test.go.tmpl => data_source_google_composer_user_workloads_config_map_test.go} (100%) rename mmv1/third_party/terraform/services/composer/{data_source_google_composer_user_workloads_secret_test.go.tmpl => data_source_google_composer_user_workloads_secret_test.go} (98%) rename mmv1/third_party/terraform/services/composer/{resource_composer_environment_test.go.tmpl => resource_composer_environment_test.go} (88%) rename mmv1/third_party/terraform/services/composer/{resource_composer_user_workloads_config_map_test.go.tmpl => resource_composer_user_workloads_config_map_test.go} (92%) rename mmv1/third_party/terraform/services/composer/{resource_composer_user_workloads_secret_test.go.tmpl => resource_composer_user_workloads_secret_test.go} (65%) rename mmv1/third_party/terraform/services/compute/{compute_instance_helpers_test.go.tmpl => compute_instance_helpers_test.go} (100%) rename mmv1/third_party/terraform/services/compute/{data_source_google_compute_images.go.tmpl => data_source_google_compute_images.go} (100%) rename mmv1/third_party/terraform/services/compute/{data_source_google_compute_instance.go.tmpl => data_source_google_compute_instance.go} (99%) rename mmv1/third_party/terraform/services/compute/{data_source_google_compute_instance_group_test.go.tmpl => data_source_google_compute_instance_group_test.go} (100%) rename mmv1/third_party/terraform/services/compute/{data_source_google_compute_instance_test.go.tmpl => data_source_google_compute_instance_test.go} (99%) rename mmv1/third_party/terraform/services/compute/{data_source_google_compute_network.go.tmpl => data_source_google_compute_network.go} (98%) rename mmv1/third_party/terraform/services/compute/{data_source_google_compute_region_instance_group_test.go.tmpl => data_source_google_compute_region_instance_group_test.go} (97%) rename mmv1/third_party/terraform/services/compute/{data_source_google_compute_resource_policy.go.tmpl => data_source_google_compute_resource_policy.go} (100%) rename mmv1/third_party/terraform/services/compute/{resource_compute_firewall_policy_rule_test.go.tmpl => resource_compute_firewall_policy_rule_test.go} (89%) rename mmv1/third_party/terraform/services/compute/{resource_compute_firewall_policy_test.go.tmpl => resource_compute_firewall_policy_test.go} (98%) rename mmv1/third_party/terraform/services/compute/{resource_compute_firewall_test.go.tmpl => resource_compute_firewall_test.go} (99%) rename mmv1/third_party/terraform/services/compute/{resource_compute_global_address_test.go.tmpl => resource_compute_global_address_test.go} (93%) rename mmv1/third_party/terraform/services/compute/{resource_compute_global_network_endpoint_test.go.tmpl => resource_compute_global_network_endpoint_test.go} (99%) rename mmv1/third_party/terraform/services/compute/{resource_compute_instance_settings_test.go.tmpl => resource_compute_instance_settings_test.go} (97%) rename mmv1/third_party/terraform/services/compute/{resource_compute_network_attachment_test.go.tmpl => resource_compute_network_attachment_test.go} (100%) rename mmv1/third_party/terraform/services/compute/{resource_compute_network_endpoint_group_test.go.tmpl => resource_compute_network_endpoint_group_test.go} (100%) rename mmv1/third_party/terraform/services/compute/{resource_compute_network_firewall_policy_rule_test.go.tmpl => resource_compute_network_firewall_policy_rule_test.go} (84%) rename mmv1/third_party/terraform/services/compute/{resource_compute_node_group_test.go.tmpl => resource_compute_node_group_test.go} (100%) rename mmv1/third_party/terraform/services/compute/{resource_compute_per_instance_config_test.go.tmpl => resource_compute_per_instance_config_test.go} (99%) rename mmv1/third_party/terraform/services/compute/{resource_compute_region_network_firewall_policy_rule_test.go.tmpl => resource_compute_region_network_firewall_policy_rule_test.go} (98%) rename mmv1/third_party/terraform/services/compute/{resource_compute_region_per_instance_config_test.go.tmpl => resource_compute_region_per_instance_config_test.go} (99%) rename mmv1/third_party/terraform/services/compute/{resource_compute_region_target_http_proxy_test.go.tmpl => resource_compute_region_target_http_proxy_test.go} (100%) rename mmv1/third_party/terraform/services/compute/{resource_compute_region_target_tcp_proxy_test.go.tmpl => resource_compute_region_target_tcp_proxy_test.go} (99%) rename mmv1/third_party/terraform/services/compute/{resource_compute_region_url_map_test.go.tmpl => resource_compute_region_url_map_test.go} (99%) rename mmv1/third_party/terraform/services/compute/{resource_compute_router_nat_address_test.go.tmpl => resource_compute_router_nat_address_test.go} (99%) rename mmv1/third_party/terraform/services/compute/{resource_compute_router_nat_test.go.tmpl => resource_compute_router_nat_test.go} (99%) rename mmv1/third_party/terraform/services/compute/{resource_compute_router_route_policy_test.go.tmpl => resource_compute_router_route_policy_test.go} (99%) rename mmv1/third_party/terraform/services/compute/{resource_compute_router_test.go.tmpl => resource_compute_router_test.go} (98%) rename mmv1/third_party/terraform/services/compute/{resource_compute_security_policy_rule_test.go.tmpl => resource_compute_security_policy_rule_test.go} (98%) rename mmv1/third_party/terraform/services/compute/{resource_compute_service_attachment_test.go.tmpl => resource_compute_service_attachment_test.go} (100%) rename mmv1/third_party/terraform/services/dataflow/{resource_dataflow_job.go.tmpl => resource_dataflow_job.go} (95%) rename mmv1/third_party/terraform/services/dataproc/{resource_dataproc_cluster_test.go.tmpl => resource_dataproc_cluster_test.go} (97%) rename mmv1/third_party/terraform/services/dataproc/{resource_dataproc_job.go.tmpl => resource_dataproc_job.go} (99%) rename mmv1/third_party/terraform/services/dataprocmetastore/{data_source_dataproc_metastore_service_test.go.tmpl => data_source_dataproc_metastore_service_test.go} (100%) rename mmv1/third_party/terraform/services/dataprocmetastore/{dataproc_metastore_service_diff_supress.go.tmpl => dataproc_metastore_service_diff_supress.go} (100%) rename mmv1/third_party/terraform/services/dataprocmetastore/{resource_dataproc_metastore_service_test.go.tmpl => resource_dataproc_metastore_service_test.go} (99%) create mode 100644 mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_account_connector_test.go delete mode 100644 mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_account_connector_test.go.tmpl rename mmv1/third_party/terraform/services/developerconnect/{resource_developer_connect_connection_test.go.tmpl => resource_developer_connect_connection_test.go} (83%) rename mmv1/third_party/terraform/services/dialogflow/{resource_dialogflow_agent_test.go.tmpl => resource_dialogflow_agent_test.go} (100%) rename mmv1/third_party/terraform/services/dialogflow/{resource_dialogflow_entity_type_test.go.tmpl => resource_dialogflow_entity_type_test.go} (100%) rename mmv1/third_party/terraform/services/dialogflow/{resource_dialogflow_fulfillment_test.go.tmpl => resource_dialogflow_fulfillment_test.go} (100%) rename mmv1/third_party/terraform/services/dialogflow/{resource_dialogflow_intent_test.go.tmpl => resource_dialogflow_intent_test.go} (100%) rename mmv1/third_party/terraform/services/dialogflowcx/{resource_dialogflowcx_agent_test.go.tmpl => resource_dialogflowcx_agent_test.go} (100%) rename mmv1/third_party/terraform/services/dns/{data_source_dns_key_test.go.tmpl => data_source_dns_key_test.go} (99%) rename mmv1/third_party/terraform/services/dns/{data_source_dns_record_set_test.go.tmpl => data_source_dns_record_set_test.go} (100%) rename mmv1/third_party/terraform/services/dns/{resource_dns_policy_test.go.tmpl => resource_dns_policy_test.go} (100%) rename mmv1/third_party/terraform/services/firestore/{resource_firestore_database_update_test.go.tmpl => resource_firestore_database_update_test.go} (83%) rename mmv1/third_party/terraform/services/gemini/{iam_gemini_repository_group_test.go.tmpl => iam_gemini_repository_group_test.go} (100%) rename mmv1/third_party/terraform/services/gemini/{resource_gemini_code_repository_index_test.go.tmpl => resource_gemini_code_repository_index_test.go} (100%) rename mmv1/third_party/terraform/services/gemini/{resource_gemini_code_tools_setting_binding_test.go.tmpl => resource_gemini_code_tools_setting_binding_test.go} (97%) rename mmv1/third_party/terraform/services/gemini/{resource_gemini_code_tools_setting_test.go.tmpl => resource_gemini_code_tools_setting_test.go} (99%) rename mmv1/third_party/terraform/services/gemini/{resource_gemini_data_sharing_with_google_setting_binding_test.go.tmpl => resource_gemini_data_sharing_with_google_setting_binding_test.go} (96%) rename mmv1/third_party/terraform/services/gemini/{resource_gemini_data_sharing_with_google_setting_test.go.tmpl => resource_gemini_data_sharing_with_google_setting_test.go} (99%) rename mmv1/third_party/terraform/services/gemini/{resource_gemini_gemini_gcp_enablement_setting_binding_test.go.tmpl => resource_gemini_gemini_gcp_enablement_setting_binding_test.go} (96%) rename mmv1/third_party/terraform/services/gemini/{resource_gemini_gemini_gcp_enablement_setting_test.go.tmpl => resource_gemini_gemini_gcp_enablement_setting_test.go} (99%) rename mmv1/third_party/terraform/services/gemini/{resource_gemini_logging_setting_binding_test.go.tmpl => resource_gemini_logging_setting_binding_test.go} (99%) rename mmv1/third_party/terraform/services/gemini/{resource_gemini_release_channel_setting_binding_test.go.tmpl => resource_gemini_release_channel_setting_binding_test.go} (96%) rename mmv1/third_party/terraform/services/gemini/{resource_gemini_release_channel_setting_test.go.tmpl => resource_gemini_release_channel_setting_test.go} (100%) rename mmv1/third_party/terraform/services/gemini/{resource_gemini_repository_group_test.go.tmpl => resource_gemini_repository_group_test.go} (100%) rename mmv1/third_party/terraform/services/gkebackup/{resource_gke_backup_backup_channel_test.go.tmpl => resource_gke_backup_backup_channel_test.go} (100%) rename mmv1/third_party/terraform/services/gkebackup/{resource_gke_backup_backup_plan_test.go.tmpl => resource_gke_backup_backup_plan_test.go} (90%) rename mmv1/third_party/terraform/services/gkebackup/{resource_gke_backup_restore_channel_test.go.tmpl => resource_gke_backup_restore_channel_test.go} (100%) rename mmv1/third_party/terraform/services/gkebackup/{resource_gke_backup_restore_plan_test.go.tmpl => resource_gke_backup_restore_plan_test.go} (99%) rename mmv1/third_party/terraform/services/gkehub2/{iam_gke_hub_feature_test.go.tmpl => iam_gke_hub_feature_test.go} (100%) rename mmv1/third_party/terraform/services/gkeonprem/{gkeonprem_operation.go.tmpl => gkeonprem_operation.go} (97%) rename mmv1/third_party/terraform/services/gkeonprem/{resource_gkeonprem_bare_metal_cluster_test.go.tmpl => resource_gkeonprem_bare_metal_cluster_test.go} (79%) rename mmv1/third_party/terraform/services/gkeonprem/{resource_gkeonprem_bare_metal_node_pool_test.go.tmpl => resource_gkeonprem_bare_metal_node_pool_test.go} (81%) rename mmv1/third_party/terraform/services/gkeonprem/{resource_gkeonprem_vmware_cluster_test.go.tmpl => resource_gkeonprem_vmware_cluster_test.go} (77%) rename mmv1/third_party/terraform/services/gkeonprem/{resource_gkeonprem_vmware_node_pool_test.go.tmpl => resource_gkeonprem_vmware_node_pool_test.go} (81%) rename mmv1/third_party/terraform/services/iam2/{resource_iam_deny_policy_test.go.tmpl => resource_iam_deny_policy_test.go} (99%) rename mmv1/third_party/terraform/services/iam3/{resource_iam_folders_policy_binding_test.go.tmpl => resource_iam_folders_policy_binding_test.go} (99%) rename mmv1/third_party/terraform/services/iam3/{resource_iam_organizations_policy_binding_test.go.tmpl => resource_iam_organizations_policy_binding_test.go} (98%) rename mmv1/third_party/terraform/services/iam3/{resource_iam_principal_access_boundary_policy_test.go.tmpl => resource_iam_principal_access_boundary_policy_test.go} (99%) rename mmv1/third_party/terraform/services/iam3/{resource_iam_projects_policy_binding_test.go.tmpl => resource_iam_projects_policy_binding_test.go} (96%) rename mmv1/third_party/terraform/services/iambeta/{data_source_iam_workload_identity_pool_provider_test.go.tmpl => data_source_iam_workload_identity_pool_provider_test.go} (100%) rename mmv1/third_party/terraform/services/iambeta/{data_source_iam_workload_identity_pool_test.go.tmpl => data_source_iam_workload_identity_pool_test.go} (100%) rename mmv1/third_party/terraform/services/iambeta/{resource_iam_workload_identity_pool_id_test.go.tmpl => resource_iam_workload_identity_pool_id_test.go} (100%) rename mmv1/third_party/terraform/services/iambeta/{resource_iam_workload_identity_pool_provider_id_test.go.tmpl => resource_iam_workload_identity_pool_provider_id_test.go} (100%) rename mmv1/third_party/terraform/services/iambeta/{resource_iam_workload_identity_pool_provider_test.go.tmpl => resource_iam_workload_identity_pool_provider_test.go} (99%) rename mmv1/third_party/terraform/services/iambeta/{resource_iam_workload_identity_pool_test.go.tmpl => resource_iam_workload_identity_pool_test.go} (100%) rename mmv1/third_party/terraform/services/iamworkforcepool/{resource_iam_oauth_client_credential_test.go.tmpl => resource_iam_oauth_client_credential_test.go} (99%) rename mmv1/third_party/terraform/services/iamworkforcepool/{resource_iam_oauth_client_test.go.tmpl => resource_iam_oauth_client_test.go} (99%) rename mmv1/third_party/terraform/services/iamworkforcepool/{resource_iam_workforce_pool_test.go.tmpl => resource_iam_workforce_pool_test.go} (100%) rename mmv1/third_party/terraform/services/iamworkforcepool/{resource_iam_workforce_pool_workforce_pool_id_test.go.tmpl => resource_iam_workforce_pool_workforce_pool_id_test.go} (100%) rename mmv1/third_party/terraform/services/iamworkforcepool/{resource_iam_workforce_pool_workforce_pool_provider_id_test.go.tmpl => resource_iam_workforce_pool_workforce_pool_provider_id_test.go} (100%) rename mmv1/third_party/terraform/services/managedkafka/{resource_managed_kafka_cluster_test.go.tmpl => resource_managed_kafka_cluster_test.go} (100%) rename mmv1/third_party/terraform/services/managedkafka/{resource_managed_kafka_topic_test.go.tmpl => resource_managed_kafka_topic_test.go} (100%) rename mmv1/third_party/terraform/services/networkmanagement/{resource_network_management_vpc_flow_logs_config_test.go.tmpl => resource_network_management_vpc_flow_logs_config_test.go} (81%) rename mmv1/third_party/terraform/services/networksecurity/{resource_network_security_security_profile_group_test.go.tmpl => resource_network_security_security_profile_group_test.go} (100%) rename mmv1/third_party/terraform/services/networksecurity/{resource_network_security_security_profile_test.go.tmpl => resource_network_security_security_profile_test.go} (100%) rename mmv1/third_party/terraform/services/notebooks/{resource_notebooks_instance_test.go.tmpl => resource_notebooks_instance_test.go} (98%) rename mmv1/third_party/terraform/services/notebooks/{resource_notebooks_runtime_test.go.tmpl => resource_notebooks_runtime_test.go} (99%) rename mmv1/third_party/terraform/services/orgpolicy/{resource_org_policy_custom_constraint_test.go.tmpl => resource_org_policy_custom_constraint_test.go} (99%) rename mmv1/third_party/terraform/services/osconfigv2/{resource_os_config_v2_policy_orchestrator_for_folder_test.go.tmpl => resource_os_config_v2_policy_orchestrator_for_folder_test.go} (100%) rename mmv1/third_party/terraform/services/osconfigv2/{resource_os_config_v2_policy_orchestrator_for_organization_test.go.tmpl => resource_os_config_v2_policy_orchestrator_for_organization_test.go} (96%) rename mmv1/third_party/terraform/services/osconfigv2/{resource_os_config_v2_policy_orchestrator_test.go.tmpl => resource_os_config_v2_policy_orchestrator_test.go} (99%) rename mmv1/third_party/terraform/services/parametermanager/{data_source_parameter_manager_parameter_test.go.tmpl => data_source_parameter_manager_parameter_test.go} (100%) rename mmv1/third_party/terraform/services/parametermanager/{data_source_parameter_manager_parameter_version_render_test.go.tmpl => data_source_parameter_manager_parameter_version_render_test.go} (100%) rename mmv1/third_party/terraform/services/parametermanager/{data_source_parameter_manager_parameter_version_test.go.tmpl => data_source_parameter_manager_parameter_version_test.go} (98%) rename mmv1/third_party/terraform/services/parametermanager/{data_source_parameter_manager_parameters_test.go.tmpl => data_source_parameter_manager_parameters_test.go} (100%) rename mmv1/third_party/terraform/services/parametermanager/{resource_parameter_manager_parameter_test.go.tmpl => resource_parameter_manager_parameter_test.go} (95%) rename mmv1/third_party/terraform/services/parametermanager/{resource_parameter_manager_parameter_version_test.go.tmpl => resource_parameter_manager_parameter_version_test.go} (100%) rename mmv1/third_party/terraform/services/parametermanagerregional/{data_source_parameter_manager_regional_parameter_test.go.tmpl => data_source_parameter_manager_regional_parameter_test.go} (100%) rename mmv1/third_party/terraform/services/parametermanagerregional/{data_source_parameter_manager_regional_parameter_version_render_test.go.tmpl => data_source_parameter_manager_regional_parameter_version_render_test.go} (100%) rename mmv1/third_party/terraform/services/parametermanagerregional/{data_source_parameter_manager_regional_parameter_version_test.go.tmpl => data_source_parameter_manager_regional_parameter_version_test.go} (98%) rename mmv1/third_party/terraform/services/parametermanagerregional/{data_source_parameter_manager_regional_parameters_test.go.tmpl => data_source_parameter_manager_regional_parameters_test.go} (100%) rename mmv1/third_party/terraform/services/parametermanagerregional/{resource_parameter_manager_regional_parameter_test.go.tmpl => resource_parameter_manager_regional_parameter_test.go} (96%) rename mmv1/third_party/terraform/services/parametermanagerregional/{resource_parameter_manager_regional_parameter_version_test.go.tmpl => resource_parameter_manager_regional_parameter_version_test.go} (100%) rename mmv1/third_party/terraform/services/privilegedaccessmanager/{resource_privileged_access_manager_entitlement_test.go.tmpl => resource_privileged_access_manager_entitlement_test.go} (98%) rename mmv1/third_party/terraform/services/redis/{resource_redis_cluster_test.go.tmpl => resource_redis_cluster_test.go} (99%) rename mmv1/third_party/terraform/services/resourcemanager/{data_source_google_iam_policy.go.tmpl => data_source_google_iam_policy.go} (97%) rename mmv1/third_party/terraform/services/resourcemanager/{resource_google_project_iam_binding_test.go.tmpl => resource_google_project_iam_binding_test.go} (99%) rename mmv1/third_party/terraform/services/resourcemanager/{resource_google_project_iam_member_test.go.tmpl => resource_google_project_iam_member_test.go} (98%) rename mmv1/third_party/terraform/services/resourcemanager/{resource_google_project_iam_policy_test.go.tmpl => resource_google_project_iam_policy_test.go} (97%) rename mmv1/third_party/terraform/services/secretmanager/{iam_secret_manager_secret_test.go.tmpl => iam_secret_manager_secret_test.go} (98%) rename mmv1/third_party/terraform/services/secretmanager/{resource_secret_manager_secret_test.go.tmpl => resource_secret_manager_secret_test.go} (96%) rename mmv1/third_party/terraform/services/secretmanager/{resource_secret_manager_secret_version_test.go.tmpl => resource_secret_manager_secret_version_test.go} (94%) rename mmv1/third_party/terraform/services/secretmanagerregional/{iam_secret_manager_regional_secret_test.go.tmpl => iam_secret_manager_regional_secret_test.go} (100%) rename mmv1/third_party/terraform/services/secretmanagerregional/{resource_secret_manager_regional_secret_test.go.tmpl => resource_secret_manager_regional_secret_test.go} (95%) rename mmv1/third_party/terraform/services/secretmanagerregional/{resource_secret_manager_regional_secret_version_test.go.tmpl => resource_secret_manager_regional_secret_version_test.go} (100%) rename mmv1/third_party/terraform/services/spanner/{resource_spanner_database_test.go.tmpl => resource_spanner_database_test.go} (99%) rename mmv1/third_party/terraform/services/storage/{resource_storage_bucket_test.go.tmpl => resource_storage_bucket_test.go} (96%) create mode 100644 mmv1/third_party/terraform/services/workbench/resource_workbench_instance_shielded_config_test.go delete mode 100644 mmv1/third_party/terraform/services/workbench/resource_workbench_instance_shielded_config_test.go.tmpl rename mmv1/third_party/terraform/services/workbench/{resource_workbench_instance_test.go.tmpl => resource_workbench_instance_test.go} (84%) rename mmv1/third_party/terraform/services/workflows/{resource_workflows_workflow_test.go.tmpl => resource_workflows_workflow_test.go} (99%) rename mmv1/third_party/terraform/{terraform-registry-manifest.json.tmpl => terraform-registry-manifest.json} (100%) rename mmv1/third_party/terraform/tpgresource/{common_diff_suppress.go.tmpl => common_diff_suppress.go} (99%) create mode 100644 mmv1/validate_third_party_test.go diff --git a/.github/workflows/mmv1-check-templates.yml b/.github/workflows/mmv1-check-templates.yml deleted file mode 100644 index f8b1f4c052ce..000000000000 --- a/.github/workflows/mmv1-check-templates.yml +++ /dev/null @@ -1,30 +0,0 @@ -name: mmv1-check-templates - -permissions: read-all - -on: - pull_request: - paths: - - 'mmv1/**/*.tmpl' - -jobs: - version-guard-check: - runs-on: ubuntu-22.04 - steps: - - name: Checkout Repository - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.2 - with: - path: repo - fetch-depth: 0 - - name: Merge base branch - id: pull_request - run: | - cd repo - git config user.name "modular-magician" - git config user.email "magic-modules@google.com" - git fetch origin ${{ github.base_ref }} # Fetch the base branch - git merge --no-ff origin/${{ github.base_ref }} # Merge with the base branch - - name: Check for invalid version guards - run: | - cd repo/tools/template-check - git diff --name-only --diff-filter=d origin/${{ github.base_ref }} ../../*.tmpl | sed 's=^=../../=g' | go run main.go diff --git a/.github/workflows/mmv1-lint-product-yaml.yml b/.github/workflows/unit-test-mmv1.yml similarity index 51% rename from .github/workflows/mmv1-lint-product-yaml.yml rename to .github/workflows/unit-test-mmv1.yml index 41bdcfd65ace..7ab1de8edd4a 100644 --- a/.github/workflows/mmv1-lint-product-yaml.yml +++ b/.github/workflows/unit-test-mmv1.yml @@ -1,13 +1,33 @@ -name: mmv1-lint-product-yaml +name: mmv1 permissions: read-all on: pull_request: paths: - - 'mmv1/products/**' + - 'mmv1/**' jobs: + version-guard-check: + runs-on: ubuntu-22.04 + steps: + - name: Checkout Repository + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.2 + with: + path: repo + fetch-depth: 0 + - name: Merge base branch + id: pull_request + run: | + cd repo + git config user.name "modular-magician" + git config user.email "magic-modules@google.com" + git fetch origin ${{ github.base_ref }} # Fetch the base branch + git merge --no-ff origin/${{ github.base_ref }} # Merge with the base branch + - name: Check for invalid version guards + run: | + cd repo/tools/template-check + git diff --name-only --diff-filter=d origin/${{ github.base_ref }} ../../*.tmpl | sed 's=^=../../=g' | go run main.go lint-yaml: runs-on: ubuntu-22.04 steps: @@ -34,3 +54,16 @@ jobs: - name: Lint YAML files if: ${{ !failure() && steps.pull_request.outputs.yamlfiles != '' }} run: yamllint -c repo/.yamllint ${{steps.pull_request.outputs.yamlfiles}} + unit-tests: + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.2 + - name: Set up Go + uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0 + with: + go-version: '^1.23' + - name: Run mmv1 unit tests + run: | + cd mmv1 + go test ./... -v + diff --git a/mmv1/provider/terraform.go b/mmv1/provider/terraform.go index c1a66ea89acb..877207535aac 100644 --- a/mmv1/provider/terraform.go +++ b/mmv1/provider/terraform.go @@ -344,7 +344,7 @@ func (t Terraform) getCommonCopyFiles(versionName string, generateCode, generate "go.sum": "third_party/terraform/go.sum", "go.mod": "third_party/terraform/go.mod", ".go-version": "third_party/terraform/.go-version", - "terraform-registry-manifest.json": "third_party/terraform/terraform-registry-manifest.json.tmpl", + "terraform-registry-manifest.json": "third_party/terraform/terraform-registry-manifest.json", } maps.Copy(commonCopyFiles, singleFiles) diff --git a/mmv1/provider/terraform_tgc_next.go b/mmv1/provider/terraform_tgc_next.go index 6d48de141eb7..25b5ca70b334 100644 --- a/mmv1/provider/terraform_tgc_next.go +++ b/mmv1/provider/terraform_tgc_next.go @@ -74,9 +74,9 @@ func (tgc TerraformGoogleConversionNext) CompileCommonFiles(outputFolder string, // common "pkg/transport/config.go": "third_party/terraform/transport/config.go.tmpl", "pkg/transport/provider_handwritten_endpoint.go": "third_party/terraform/transport/provider_handwritten_endpoint.go.tmpl", - "pkg/tpgresource/common_diff_suppress.go": "third_party/terraform/tpgresource/common_diff_suppress.go.tmpl", + "pkg/tpgresource/common_diff_suppress.go": "third_party/terraform/tpgresource/common_diff_suppress.go", "pkg/provider/provider.go": "third_party/terraform/provider/provider.go.tmpl", - "pkg/provider/provider_validators.go": "third_party/terraform/provider/provider_validators.go.tmpl", + "pkg/provider/provider_validators.go": "third_party/terraform/provider/provider_validators.go", // tfplan2cai "pkg/tfplan2cai/converters/resource_converters.go": "templates/tgc_next/tfplan2cai/resource_converters.go.tmpl", diff --git a/mmv1/third_party/terraform/provider/provider_validators.go.tmpl b/mmv1/third_party/terraform/provider/provider_validators.go similarity index 99% rename from mmv1/third_party/terraform/provider/provider_validators.go.tmpl rename to mmv1/third_party/terraform/provider/provider_validators.go index 67012b616821..5d1081235716 100644 --- a/mmv1/third_party/terraform/provider/provider_validators.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_validators.go @@ -85,4 +85,4 @@ func ValidateServiceAccountEmail(v interface{}, k string) (warnings []string, er } return -} \ No newline at end of file +} diff --git a/mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_vault_test.go.tmpl b/mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_vault_test.go similarity index 96% rename from mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_vault_test.go.tmpl rename to mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_vault_test.go index c851a5387309..1f8e0d4a4a3e 100644 --- a/mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_vault_test.go.tmpl +++ b/mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_vault_test.go @@ -18,9 +18,9 @@ func TestAccBackupDRBackupVault_fullUpdate(t *testing.T) { referenceTime := time.Date(timeNow.Year(), timeNow.Month(), timeNow.Day(), 0, 0, 0, 0, time.UTC) context := map[string]interface{}{ - "project": envvar.GetTestProjectFromEnv(), + "project": envvar.GetTestProjectFromEnv(), "effective_time": referenceTime.Add(24 * time.Hour).Format(time.RFC3339), - "random_suffix": acctest.RandString(t, 10), + "random_suffix": acctest.RandString(t, 10), } acctest.VcrTest(t, resource.TestCase{ @@ -97,4 +97,4 @@ resource "google_backup_dr_backup_vault" "backup-vault-test" { allow_missing = "true" } `, context) -} \ No newline at end of file +} diff --git a/mmv1/third_party/terraform/services/binaryauthorization/resource_binary_authorization_policy_test.go.tmpl b/mmv1/third_party/terraform/services/binaryauthorization/resource_binary_authorization_policy_test.go similarity index 98% rename from mmv1/third_party/terraform/services/binaryauthorization/resource_binary_authorization_policy_test.go.tmpl rename to mmv1/third_party/terraform/services/binaryauthorization/resource_binary_authorization_policy_test.go index ddbde6e496c1..19400d6c7701 100644 --- a/mmv1/third_party/terraform/services/binaryauthorization/resource_binary_authorization_policy_test.go.tmpl +++ b/mmv1/third_party/terraform/services/binaryauthorization/resource_binary_authorization_policy_test.go @@ -21,7 +21,7 @@ func TestAccBinaryAuthorizationPolicy_basic(t *testing.T) { pid := "tf-test-" + acctest.RandString(t, 10) billingId := envvar.GetTestBillingAccountFromEnv(t) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { @@ -165,9 +165,9 @@ func testAccCheckBinaryAuthorizationPolicyDefault(t *testing.T, pid string) reso config := acctest.GoogleProviderConfig(t) url := fmt.Sprintf("https://binaryauthorization.googleapis.com/v1/projects/%s/policy", pid) pol, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - RawURL: url, + Config: config, + Method: "GET", + RawURL: url, UserAgent: config.UserAgent, }) if err != nil { diff --git a/mmv1/third_party/terraform/services/chronicle/resource_chronicle_data_access_label_test.go.tmpl b/mmv1/third_party/terraform/services/chronicle/resource_chronicle_data_access_label_test.go similarity index 100% rename from mmv1/third_party/terraform/services/chronicle/resource_chronicle_data_access_label_test.go.tmpl rename to mmv1/third_party/terraform/services/chronicle/resource_chronicle_data_access_label_test.go diff --git a/mmv1/third_party/terraform/services/chronicle/resource_chronicle_data_access_scope_test.go.tmpl b/mmv1/third_party/terraform/services/chronicle/resource_chronicle_data_access_scope_test.go similarity index 100% rename from mmv1/third_party/terraform/services/chronicle/resource_chronicle_data_access_scope_test.go.tmpl rename to mmv1/third_party/terraform/services/chronicle/resource_chronicle_data_access_scope_test.go diff --git a/mmv1/third_party/terraform/services/chronicle/resource_chronicle_reference_list_test.go.tmpl b/mmv1/third_party/terraform/services/chronicle/resource_chronicle_reference_list_test.go similarity index 100% rename from mmv1/third_party/terraform/services/chronicle/resource_chronicle_reference_list_test.go.tmpl rename to mmv1/third_party/terraform/services/chronicle/resource_chronicle_reference_list_test.go diff --git a/mmv1/third_party/terraform/services/chronicle/resource_chronicle_rule_deployment_test.go.tmpl b/mmv1/third_party/terraform/services/chronicle/resource_chronicle_rule_deployment_test.go similarity index 100% rename from mmv1/third_party/terraform/services/chronicle/resource_chronicle_rule_deployment_test.go.tmpl rename to mmv1/third_party/terraform/services/chronicle/resource_chronicle_rule_deployment_test.go diff --git a/mmv1/third_party/terraform/services/chronicle/resource_chronicle_rule_test.go.tmpl b/mmv1/third_party/terraform/services/chronicle/resource_chronicle_rule_test.go similarity index 100% rename from mmv1/third_party/terraform/services/chronicle/resource_chronicle_rule_test.go.tmpl rename to mmv1/third_party/terraform/services/chronicle/resource_chronicle_rule_test.go diff --git a/mmv1/third_party/terraform/services/chronicle/resource_chronicle_watchlist_test.go.tmpl b/mmv1/third_party/terraform/services/chronicle/resource_chronicle_watchlist_test.go similarity index 100% rename from mmv1/third_party/terraform/services/chronicle/resource_chronicle_watchlist_test.go.tmpl rename to mmv1/third_party/terraform/services/chronicle/resource_chronicle_watchlist_test.go diff --git a/mmv1/third_party/terraform/services/cloudasset/data_source_google_cloud_asset_search_all_resources.go.tmpl b/mmv1/third_party/terraform/services/cloudasset/data_source_google_cloud_asset_search_all_resources.go similarity index 94% rename from mmv1/third_party/terraform/services/cloudasset/data_source_google_cloud_asset_search_all_resources.go.tmpl rename to mmv1/third_party/terraform/services/cloudasset/data_source_google_cloud_asset_search_all_resources.go index 79a2b86d5a0f..4ae01603ef03 100644 --- a/mmv1/third_party/terraform/services/cloudasset/data_source_google_cloud_asset_search_all_resources.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudasset/data_source_google_cloud_asset_search_all_resources.go @@ -115,7 +115,7 @@ func DataSourceGoogleCloudAssetSearchAllResources() *schema.Resource { func datasourceGoogleCloudAssetSearchAllResourcesRead(d *schema.ResourceData, meta interface{}) error { config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) if err != nil { return err } @@ -147,10 +147,10 @@ func datasourceGoogleCloudAssetSearchAllResourcesRead(d *schema.ResourceData, me } res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Project: project, - Method: "GET", - RawURL: url, + Config: config, + Project: project, + Method: "GET", + RawURL: url, UserAgent: userAgent, }) if err != nil { @@ -195,7 +195,7 @@ func flattenDatasourceGoogleCloudAssetSearchAllResources(v interface{}) []map[st for _, raw := range ls { p := raw.(map[string]interface{}) - var mName, mAssetType, mProject, mFolders, mOrganization, mDisplayName, mDescription, mLocation, mLabels, mNetworkTags, mKmsKeys, mCreateTime, mUpdateTime, mState, mParentFullResourceName, mParentAssetType interface{} + var mName, mAssetType, mProject, mFolders, mOrganization, mDisplayName, mDescription, mLocation, mLabels, mNetworkTags, mKmsKeys, mCreateTime, mUpdateTime, mState, mParentFullResourceName, mParentAssetType interface{} if pName, ok := p["name"]; ok { mName = pName } @@ -266,4 +266,3 @@ func flattenDatasourceGoogleCloudAssetSearchAllResources(v interface{}) []map[st return results } - diff --git a/mmv1/third_party/terraform/services/cloudasset/data_source_google_cloud_asset_search_all_resources_test.go.tmpl b/mmv1/third_party/terraform/services/cloudasset/data_source_google_cloud_asset_search_all_resources_test.go similarity index 99% rename from mmv1/third_party/terraform/services/cloudasset/data_source_google_cloud_asset_search_all_resources_test.go.tmpl rename to mmv1/third_party/terraform/services/cloudasset/data_source_google_cloud_asset_search_all_resources_test.go index ba59b309effe..9b30ddb5b382 100644 --- a/mmv1/third_party/terraform/services/cloudasset/data_source_google_cloud_asset_search_all_resources_test.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudasset/data_source_google_cloud_asset_search_all_resources_test.go @@ -5,9 +5,9 @@ import ( "regexp" "testing" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceGoogleCloudAssetSearchAllResources_basic(t *testing.T) { @@ -46,4 +46,3 @@ data google_cloud_asset_search_all_resources resources { } `, project) } - diff --git a/mmv1/third_party/terraform/services/cloudidentity/data_source_cloud_identity_group_lookup.go.tmpl b/mmv1/third_party/terraform/services/cloudidentity/data_source_cloud_identity_group_lookup.go similarity index 100% rename from mmv1/third_party/terraform/services/cloudidentity/data_source_cloud_identity_group_lookup.go.tmpl rename to mmv1/third_party/terraform/services/cloudidentity/data_source_cloud_identity_group_lookup.go diff --git a/mmv1/third_party/terraform/services/cloudtasks/resource_cloud_tasks_queue_test.go.tmpl b/mmv1/third_party/terraform/services/cloudtasks/resource_cloud_tasks_queue_test.go similarity index 94% rename from mmv1/third_party/terraform/services/cloudtasks/resource_cloud_tasks_queue_test.go.tmpl rename to mmv1/third_party/terraform/services/cloudtasks/resource_cloud_tasks_queue_test.go index 98d3a3df2c6e..707509e9033e 100644 --- a/mmv1/third_party/terraform/services/cloudtasks/resource_cloud_tasks_queue_test.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudtasks/resource_cloud_tasks_queue_test.go @@ -2,8 +2,8 @@ package cloudtasks_test import ( "fmt" - "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" + "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) @@ -127,17 +127,17 @@ func TestAccCloudTasksQueue_HttpTargetOIDC_update(t *testing.T) { Config: testAccCloudTasksQueue_HttpTargetOIDC(name, serviceAccountID), }, { - ResourceName: "google_cloud_tasks_queue.default", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_cloud_tasks_queue.default", + ImportState: true, + ImportStateVerify: true, }, { Config: testAccCloudTasksQueue_basic(name), }, { - ResourceName: "google_cloud_tasks_queue.default", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_cloud_tasks_queue.default", + ImportState: true, + ImportStateVerify: true, }, }, }) @@ -157,17 +157,17 @@ func TestAccCloudTasksQueue_HttpTargetOAuth_update(t *testing.T) { Config: testAccCloudTasksQueue_HttpTargetOAuth(name, serviceAccountID), }, { - ResourceName: "google_cloud_tasks_queue.default", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_cloud_tasks_queue.default", + ImportState: true, + ImportStateVerify: true, }, { Config: testAccCloudTasksQueue_basic(name), }, { - ResourceName: "google_cloud_tasks_queue.default", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_cloud_tasks_queue.default", + ImportState: true, + ImportStateVerify: true, }, }, }) @@ -332,7 +332,6 @@ resource "google_service_account" "test" { `, name, serviceAccountID) } - func testAccCloudTasksQueue_HttpTargetOAuth(name, serviceAccountID string) string { return fmt.Sprintf(` resource "google_cloud_tasks_queue" "default" { diff --git a/mmv1/third_party/terraform/services/composer/data_source_google_composer_user_workloads_config_map_test.go.tmpl b/mmv1/third_party/terraform/services/composer/data_source_google_composer_user_workloads_config_map_test.go similarity index 100% rename from mmv1/third_party/terraform/services/composer/data_source_google_composer_user_workloads_config_map_test.go.tmpl rename to mmv1/third_party/terraform/services/composer/data_source_google_composer_user_workloads_config_map_test.go diff --git a/mmv1/third_party/terraform/services/composer/data_source_google_composer_user_workloads_secret_test.go.tmpl b/mmv1/third_party/terraform/services/composer/data_source_google_composer_user_workloads_secret_test.go similarity index 98% rename from mmv1/third_party/terraform/services/composer/data_source_google_composer_user_workloads_secret_test.go.tmpl rename to mmv1/third_party/terraform/services/composer/data_source_google_composer_user_workloads_secret_test.go index 2098a4aeb89e..713f3b5ac1c5 100644 --- a/mmv1/third_party/terraform/services/composer/data_source_google_composer_user_workloads_secret_test.go.tmpl +++ b/mmv1/third_party/terraform/services/composer/data_source_google_composer_user_workloads_secret_test.go @@ -54,7 +54,7 @@ func checkSecretDataSourceMatchesResource() resource.TestCheckFunc { } // ignore diff if it's due to secrets being masked. if strings.HasPrefix(k, "data.") { - if _, ok := dsAttr[k]; !ok{ + if _, ok := dsAttr[k]; !ok { errMsg += fmt.Sprintf("%s is defined in resource and not in datasource\n", k) } if dsAttr[k] == "**********" { diff --git a/mmv1/third_party/terraform/services/composer/resource_composer_environment_test.go.tmpl b/mmv1/third_party/terraform/services/composer/resource_composer_environment_test.go similarity index 88% rename from mmv1/third_party/terraform/services/composer/resource_composer_environment_test.go.tmpl rename to mmv1/third_party/terraform/services/composer/resource_composer_environment_test.go index 4afe6a9b76b0..0a9940f31fa8 100644 --- a/mmv1/third_party/terraform/services/composer/resource_composer_environment_test.go.tmpl +++ b/mmv1/third_party/terraform/services/composer/resource_composer_environment_test.go @@ -4,8 +4,8 @@ import ( "fmt" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" - tpgcompute "github.com/hashicorp/terraform-provider-google/google/services/compute" "github.com/hashicorp/terraform-provider-google/google/services/composer" + tpgcompute "github.com/hashicorp/terraform-provider-google/google/services/compute" "testing" "log" @@ -55,7 +55,7 @@ func TestAccComposerEnvironment_basic(t *testing.T) { network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ @@ -69,30 +69,29 @@ func TestAccComposerEnvironment_basic(t *testing.T) { resource.TestCheckResourceAttrSet("google_composer_environment.test", "config.0.node_config.0.machine_type")), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, { - ResourceName: "google_composer_environment.test", - ImportState: true, - ImportStateId: fmt.Sprintf("projects/%s/locations/%s/environments/%s", envvar.GetTestProjectFromEnv(), "us-central1", envName), + ResourceName: "google_composer_environment.test", + ImportState: true, + ImportStateId: fmt.Sprintf("projects/%s/locations/%s/environments/%s", envvar.GetTestProjectFromEnv(), "us-central1", envName), ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO: Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_basic(envName, network, subnetwork), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironment_basic(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } - // Checks private environment creation for composer 1 and 2. func TestAccComposerEnvironmentComposer1_private(t *testing.T) { t.Parallel() @@ -102,7 +101,7 @@ func TestAccComposerEnvironmentComposer1_private(t *testing.T) { subnetwork := network + "-1" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ @@ -110,24 +109,24 @@ func TestAccComposerEnvironmentComposer1_private(t *testing.T) { Config: testAccComposerEnvironmentComposer1_private(envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, { - ResourceName: "google_composer_environment.test", - ImportState: true, - ImportStateId: fmt.Sprintf("projects/%s/locations/%s/environments/%s", envvar.GetTestProjectFromEnv(), "us-central1", envName), + ResourceName: "google_composer_environment.test", + ImportState: true, + ImportStateId: fmt.Sprintf("projects/%s/locations/%s/environments/%s", envvar.GetTestProjectFromEnv(), "us-central1", envName), ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO: Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironmentComposer1_private(envName, network, subnetwork), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironmentComposer1_private(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) @@ -141,7 +140,7 @@ func TestAccComposerEnvironmentComposer2_private(t *testing.T) { subnetwork := network + "-1" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ @@ -149,24 +148,24 @@ func TestAccComposerEnvironmentComposer2_private(t *testing.T) { Config: testAccComposerEnvironmentComposer2_private(envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, { - ResourceName: "google_composer_environment.test", - ImportState: true, - ImportStateId: fmt.Sprintf("projects/%s/locations/%s/environments/%s", envvar.GetTestProjectFromEnv(), "us-central1", envName), + ResourceName: "google_composer_environment.test", + ImportState: true, + ImportStateId: fmt.Sprintf("projects/%s/locations/%s/environments/%s", envvar.GetTestProjectFromEnv(), "us-central1", envName), ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO: Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironmentComposer2_private(envName, network, subnetwork), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironmentComposer2_private(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) @@ -181,7 +180,7 @@ func TestAccComposerEnvironment_privateWithWebServerControl(t *testing.T) { subnetwork := network + "-1" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ @@ -189,32 +188,32 @@ func TestAccComposerEnvironment_privateWithWebServerControl(t *testing.T) { Config: testAccComposerEnvironment_privateWithWebServerControl(envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, { Config: testAccComposerEnvironment_privateWithWebServerControlUpdated(envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, { - ResourceName: "google_composer_environment.test", - ImportState: true, - ImportStateId: fmt.Sprintf("projects/%s/locations/%s/environments/%s", envvar.GetTestProjectFromEnv(), "us-central1", envName), + ResourceName: "google_composer_environment.test", + ImportState: true, + ImportStateId: fmt.Sprintf("projects/%s/locations/%s/environments/%s", envvar.GetTestProjectFromEnv(), "us-central1", envName), ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO: Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_privateWithWebServerControlUpdated(envName, network, subnetwork), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironment_privateWithWebServerControlUpdated(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) @@ -227,7 +226,7 @@ func TestAccComposerEnvironment_withDatabaseConfig(t *testing.T) { subnetwork := network + "-1" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ @@ -238,18 +237,18 @@ func TestAccComposerEnvironment_withDatabaseConfig(t *testing.T) { Config: testAccComposerEnvironment_databaseCfgUpdated(envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO: Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_databaseCfgUpdated(envName, network, subnetwork), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironment_databaseCfgUpdated(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) @@ -266,7 +265,7 @@ func TestAccComposerEnvironment_withEncryptionConfigComposer1(t *testing.T) { subnetwork := network + "-1" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ @@ -274,18 +273,18 @@ func TestAccComposerEnvironment_withEncryptionConfigComposer1(t *testing.T) { Config: testAccComposerEnvironment_encryptionCfg(pid, "1", "1", envName, kms.CryptoKey.Name, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_encryptionCfg(pid, "1", "1", envName, kms.CryptoKey.Name, network, subnetwork), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironment_encryptionCfg(pid, "1", "1", envName, kms.CryptoKey.Name, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) @@ -303,7 +302,7 @@ func TestAccComposerEnvironment_withEncryptionConfigComposer2(t *testing.T) { subnetwork := network + "-1" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ @@ -311,18 +310,18 @@ func TestAccComposerEnvironment_withEncryptionConfigComposer2(t *testing.T) { Config: testAccComposerEnvironment_encryptionCfg(pid, "2", "2", envName, kms.CryptoKey.Name, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_encryptionCfg(pid, "2", "2", envName, kms.CryptoKey.Name, network, subnetwork), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironment_encryptionCfg(pid, "2", "2", envName, kms.CryptoKey.Name, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) @@ -336,7 +335,7 @@ func TestAccComposerEnvironment_withMaintenanceWindow(t *testing.T) { subnetwork := network + "-1" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ @@ -344,18 +343,18 @@ func TestAccComposerEnvironment_withMaintenanceWindow(t *testing.T) { Config: testAccComposerEnvironment_maintenanceWindow(envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_maintenanceWindow(envName, network, subnetwork), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironment_maintenanceWindow(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) @@ -369,7 +368,7 @@ func TestAccComposerEnvironment_maintenanceWindowUpdate(t *testing.T) { subnetwork := network + "-1" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ @@ -380,18 +379,18 @@ func TestAccComposerEnvironment_maintenanceWindowUpdate(t *testing.T) { Config: testAccComposerEnvironment_maintenanceWindowUpdate(envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO: Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_maintenanceWindowUpdate(envName, network, subnetwork), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironment_maintenanceWindowUpdate(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) @@ -405,7 +404,7 @@ func TestAccComposerEnvironment_ComposerV2(t *testing.T) { subnetwork := network + "-1" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ @@ -413,18 +412,18 @@ func TestAccComposerEnvironment_ComposerV2(t *testing.T) { Config: testAccComposerEnvironment_composerV2(envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_composerV2(envName, network, subnetwork), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironment_composerV2(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) @@ -449,60 +448,58 @@ func TestAccComposerEnvironment_UpdateComposerV2ImageVersion(t *testing.T) { Config: testAccComposerEnvironment_composerNewVersion(envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_composerNewVersion(envName, network, subnetwork), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironment_composerNewVersion(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } func TestAccComposerEnvironment_UpdateComposerV2ResilienceMode(t *testing.T) { - t.Parallel() - - envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) - network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) - subnetwork := network + "-1" - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComposerEnvironment_composerV2HighResilience(envName, network, subnetwork), - }, - { - Config: testAccComposerEnvironment_updateComposerV2StandardResilience(envName, network, subnetwork), - }, - { - ResourceName: "google_composer_environment.test", - ImportState: true, - ImportStateVerify: true, - }, - // This is a terrible clean-up step in order to get destroy to succeed, - // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. - { - PlanOnly: true, - ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_updateComposerV2StandardResilience(envName, network, subnetwork), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), - }, - }, - }) -} + t.Parallel() + envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) + network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) + subnetwork := network + "-1" + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComposerEnvironment_composerV2HighResilience(envName, network, subnetwork), + }, + { + Config: testAccComposerEnvironment_updateComposerV2StandardResilience(envName, network, subnetwork), + }, + { + ResourceName: "google_composer_environment.test", + ImportState: true, + ImportStateVerify: true, + }, + // This is a terrible clean-up step in order to get destroy to succeed, + // due to dangling firewall rules left by the Composer Environment blocking network deletion. + // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. + { + PlanOnly: true, + ExpectNonEmptyPlan: false, + Config: testAccComposerEnvironment_updateComposerV2StandardResilience(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + }, + }, + }) +} func TestAccComposerEnvironment_ComposerV2HighResilience(t *testing.T) { t.Parallel() @@ -512,7 +509,7 @@ func TestAccComposerEnvironment_ComposerV2HighResilience(t *testing.T) { subnetwork := network + "-1" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ @@ -520,18 +517,18 @@ func TestAccComposerEnvironment_ComposerV2HighResilience(t *testing.T) { Config: testAccComposerEnvironment_composerV2HighResilience(envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_composerV2HighResilience(envName, network, subnetwork), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironment_composerV2HighResilience(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) @@ -581,7 +578,7 @@ func TestAccComposerEnvironment_UpdateComposerV2(t *testing.T) { subnetwork := network + "-1" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ @@ -592,18 +589,18 @@ func TestAccComposerEnvironment_UpdateComposerV2(t *testing.T) { Config: testAccComposerEnvironment_updateComposerV2(envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_updateComposerV2(envName, network, subnetwork), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironment_updateComposerV2(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) @@ -616,7 +613,7 @@ func TestAccComposerEnvironment_composerV2PrivateServiceConnect(t *testing.T) { network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ @@ -624,18 +621,18 @@ func TestAccComposerEnvironment_composerV2PrivateServiceConnect(t *testing.T) { Config: testAccComposerEnvironment_composerV2PrivateServiceConnect(envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_composerV2PrivateServiceConnect(envName, network, subnetwork), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironment_composerV2PrivateServiceConnect(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) @@ -648,7 +645,7 @@ func TestAccComposerEnvironment_composerV1MasterAuthNetworks(t *testing.T) { network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ @@ -656,18 +653,18 @@ func TestAccComposerEnvironment_composerV1MasterAuthNetworks(t *testing.T) { Config: testAccComposerEnvironment_MasterAuthNetworks("1", "1", envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_MasterAuthNetworks("1", "1", envName, network, subnetwork), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironment_MasterAuthNetworks("1", "1", envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) @@ -680,7 +677,7 @@ func TestAccComposerEnvironment_composerV2MasterAuthNetworks(t *testing.T) { network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ @@ -688,18 +685,18 @@ func TestAccComposerEnvironment_composerV2MasterAuthNetworks(t *testing.T) { Config: testAccComposerEnvironment_MasterAuthNetworks("2", "2", envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_MasterAuthNetworks("2", "2", envName, network, subnetwork), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironment_MasterAuthNetworks("2", "2", envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) @@ -712,7 +709,7 @@ func TestAccComposerEnvironment_composerV1MasterAuthNetworksUpdate(t *testing.T) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ @@ -723,18 +720,18 @@ func TestAccComposerEnvironment_composerV1MasterAuthNetworksUpdate(t *testing.T) Config: testAccComposerEnvironment_MasterAuthNetworksUpdate("1", "1", envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_MasterAuthNetworksUpdate("1", "1", envName, network, subnetwork), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironment_MasterAuthNetworksUpdate("1", "1", envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) @@ -747,7 +744,7 @@ func TestAccComposerEnvironment_composerV2MasterAuthNetworksUpdate(t *testing.T) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ @@ -758,18 +755,18 @@ func TestAccComposerEnvironment_composerV2MasterAuthNetworksUpdate(t *testing.T) Config: testAccComposerEnvironment_MasterAuthNetworksUpdate("2", "2", envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_MasterAuthNetworksUpdate("2", "2", envName, network, subnetwork), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironment_MasterAuthNetworksUpdate("2", "2", envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) @@ -784,7 +781,7 @@ func TestAccComposer2Environment_withNodeConfig(t *testing.T) { serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ @@ -792,18 +789,18 @@ func TestAccComposer2Environment_withNodeConfig(t *testing.T) { Config: testAccComposer2Environment_nodeCfg(envName, network, subnetwork, serviceAccount), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO: Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposer2Environment_nodeCfg(envName, network, subnetwork, serviceAccount), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposer2Environment_nodeCfg(envName, network, subnetwork, serviceAccount), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) @@ -856,7 +853,7 @@ func TestAccComposerEnvironment_withSoftwareConfig(t *testing.T) { subnetwork := network + "-1" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ @@ -864,18 +861,18 @@ func TestAccComposerEnvironment_withSoftwareConfig(t *testing.T) { Config: testAccComposerEnvironment_softwareCfg(envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO: Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_softwareCfg(envName, network, subnetwork), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironment_softwareCfg(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) @@ -888,7 +885,7 @@ func TestAccComposerEnvironmentAirflow2_withSoftwareConfig(t *testing.T) { subnetwork := network + "-1" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ @@ -896,8 +893,8 @@ func TestAccComposerEnvironmentAirflow2_withSoftwareConfig(t *testing.T) { Config: testAccComposerEnvironment_airflow2SoftwareCfg(envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, { @@ -906,13 +903,13 @@ func TestAccComposerEnvironmentAirflow2_withSoftwareConfig(t *testing.T) { { ResourceName: "google_composer_environment.test", ImportState: true, - ImportStateVerify: true, + ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO: Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, Config: testAccComposerEnvironmentUpdate_airflow2SoftwareCfg(envName, network, subnetwork), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), @@ -931,7 +928,7 @@ func TestAccComposerEnvironment_withUpdateOnCreate(t *testing.T) { subnetwork := network + "-1" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ @@ -939,18 +936,18 @@ func TestAccComposerEnvironment_withUpdateOnCreate(t *testing.T) { Config: testAccComposerEnvironment_updateOnlyFields(envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO: Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_updateOnlyFields(envName, network, subnetwork), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironment_updateOnlyFields(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) @@ -999,8 +996,8 @@ func testAccComposerEnvironmentDestroyProducer(t *testing.T) func(s *terraform.S return fmt.Errorf("Invalid ID %q, expected format projects/{project}/regions/{region}/environments/{environment}", rs.Primary.ID) } envName := &composer.ComposerEnvironmentName{ - Project: idTokens[1], - Region: idTokens[3], + Project: idTokens[1], + Region: idTokens[3], Environment: idTokens[5], } @@ -1023,7 +1020,7 @@ func TestAccComposerEnvironment_customBucket(t *testing.T) { network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ @@ -1056,7 +1053,7 @@ func TestAccComposerEnvironment_customBucketWithUrl(t *testing.T) { network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ @@ -1090,7 +1087,7 @@ func TestAccComposerEnvironmentComposer3_basic(t *testing.T) { subnetwork := network + "-1" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ @@ -1124,7 +1121,7 @@ func TestAccComposerEnvironmentComposer3_update(t *testing.T) { subnetwork := network + "-1" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ @@ -1135,8 +1132,8 @@ func TestAccComposerEnvironmentComposer3_update(t *testing.T) { Config: testAccComposerEnvironmentComposer3_update(envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, @@ -1161,13 +1158,13 @@ func TestAccComposerEnvironmentComposer3_withNetworkSubnetworkAndAttachment_expe networkAttachment := fmt.Sprintf("%s-%d", testComposerNetworkAttachmentPrefix, acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironmentComposer3_withNetworkSubnetworkAndAttachment_expectError(envName, networkAttachment, network, subnetwork), - ExpectError: regexp.MustCompile("Conflicting configuration arguments"), + Config: testAccComposerEnvironmentComposer3_withNetworkSubnetworkAndAttachment_expectError(envName, networkAttachment, network, subnetwork), + ExpectError: regexp.MustCompile("Conflicting configuration arguments"), }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. @@ -1190,12 +1187,12 @@ func TestAccComposerEnvironmentComposer3_databaseRetention(t *testing.T) { subnetwork := network + "-1" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironmentComposer3_databaseRetention(envName, network, subnetwork), + Config: testAccComposerEnvironmentComposer3_databaseRetention(envName, network, subnetwork), }, { ResourceName: "google_composer_environment.test", @@ -1225,17 +1222,17 @@ func TestAccComposerEnvironmentComposer3_withNetworkAttachment(t *testing.T) { fullFormNetworkAttachmentName := fmt.Sprintf("projects/%s/regions/%s/networkAttachments/%s", envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), networkAttachment) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, networkAttachment, network, subnetwork), + Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, networkAttachment, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_composer_environment.test", + ImportState: true, + ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. @@ -1260,20 +1257,20 @@ func TestAccComposerEnvironmentComposer3_updateWithNetworkAttachment(t *testing. fullFormNetworkAttachmentName := fmt.Sprintf("projects/%s/regions/%s/networkAttachments/%s", envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), networkAttachment) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironmentComposer3_withNetworkAndSubnetwork(envName, networkAttachment, network, subnetwork), + Config: testAccComposerEnvironmentComposer3_withNetworkAndSubnetwork(envName, networkAttachment, network, subnetwork), }, { - Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, networkAttachment, network, subnetwork), + Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, networkAttachment, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_composer_environment.test", + ImportState: true, + ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. @@ -1298,20 +1295,20 @@ func TestAccComposerEnvironmentComposer3_updateWithNetworkAndSubnetwork(t *testi fullFormNetworkAttachmentName := fmt.Sprintf("projects/%s/regions/%s/networkAttachments/%s", envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), networkAttachment) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, networkAttachment, network, subnetwork), + Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, networkAttachment, network, subnetwork), }, { - Config: testAccComposerEnvironmentComposer3_withNetworkAndSubnetwork(envName, networkAttachment, network, subnetwork), + Config: testAccComposerEnvironmentComposer3_withNetworkAndSubnetwork(envName, networkAttachment, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_composer_environment.test", + ImportState: true, + ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. @@ -1409,16 +1406,16 @@ func TestAccComposerEnvironmentComposer3_upgrade_expectError(t *testing.T) { errorRegExp, _ := regexp.Compile(".*upgrade to composer 3 is not yet supported.*") acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironmentComposer2_empty(envName, network, subnetwork), + Config: testAccComposerEnvironmentComposer2_empty(envName, network, subnetwork), }, { - Config: testAccComposerEnvironmentComposer3_empty(envName, network, subnetwork), - ExpectError: errorRegExp, + Config: testAccComposerEnvironmentComposer3_empty(envName, network, subnetwork), + ExpectError: errorRegExp, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. @@ -1440,13 +1437,13 @@ func TestAccComposerEnvironmentComposer2_usesUnsupportedField_expectError(t *tes errorRegExp, _ := regexp.Compile(".*error in configuration, .* should only be used in Composer 3.*") acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironmentComposer2_usesUnsupportedField(envName), - ExpectError: errorRegExp, + Config: testAccComposerEnvironmentComposer2_usesUnsupportedField(envName), + ExpectError: errorRegExp, }, }, }) @@ -1459,13 +1456,13 @@ func TestAccComposerEnvironmentComposer3_usesUnsupportedField_expectError(t *tes errorRegExp, _ := regexp.Compile(".*error in configuration, .* should not be used in Composer 3.*") acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironmentComposer3_usesUnsupportedField(envName), - ExpectError: errorRegExp, + Config: testAccComposerEnvironmentComposer3_usesUnsupportedField(envName), + ExpectError: errorRegExp, }, }, }) @@ -1906,10 +1903,9 @@ resource "google_compute_subnetwork" "test" { network = google_compute_network.test.self_link } `, - pid, kmsKey, name, compVersion, airflowVersion, kmsKey, network, subnetwork) + pid, kmsKey, name, compVersion, airflowVersion, kmsKey, network, subnetwork) } - func testAccComposerEnvironment_maintenanceWindow(envName, network, subnetwork string) string { return fmt.Sprintf(` resource "google_composer_environment" "test" { @@ -2329,9 +2325,8 @@ resource "google_compute_subnetwork" "test" { `, envName, compVersion, airflowVersion, network, subnetwork) } - func testAccComposerEnvironment_updateComposerV2StandardResilience(envName, network, subnetwork string) string { - return fmt.Sprintf(` + return fmt.Sprintf(` resource "google_composer_environment" "test" { name = "%s" region = "us-east1" @@ -2393,7 +2388,6 @@ resource "google_compute_subnetwork" "test" { `, envName, network, subnetwork) } - func testAccComposerEnvironment_MasterAuthNetworksUpdate(compVersion, airflowVersion, envName, network, subnetwork string) string { return fmt.Sprintf(` resource "google_composer_environment" "test" { @@ -3011,7 +3005,7 @@ resource "google_compute_subnetwork" "test" { } func testAccComposerEnvironmentComposer2_usesUnsupportedField(name string) string { -return fmt.Sprintf(` + return fmt.Sprintf(` resource "google_composer_environment" "test" { name = "%s" region = "us-central1" @@ -3026,7 +3020,7 @@ resource "google_composer_environment" "test" { } func testAccComposerEnvironmentComposer3_usesUnsupportedField(name string) string { -return fmt.Sprintf(` + return fmt.Sprintf(` resource "google_composer_environment" "test" { name = "%s" region = "us-central1" @@ -3138,7 +3132,7 @@ resource "google_compute_subnetwork" "test_1" { region = "us-central1" network = google_compute_network.test_1.self_link } -`, name, network, subnetwork, network + "-update", subnetwork + "update") +`, name, network, subnetwork, network+"-update", subnetwork+"update") } func testAccComposerEnvironmentComposer3_withNetworkAttachment(name, networkAttachment, network, subnetwork string) string { diff --git a/mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_config_map_test.go.tmpl b/mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_config_map_test.go similarity index 92% rename from mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_config_map_test.go.tmpl rename to mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_config_map_test.go index 37feb346b799..aaa97b101c05 100644 --- a/mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_config_map_test.go.tmpl +++ b/mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_config_map_test.go @@ -33,9 +33,9 @@ func TestAccComposerUserWorkloadsConfigMap_composerUserWorkloadsConfigMapBasicEx }, { Config: testAccComposerUserWorkloadsConfigMap_composerUserWorkloadsConfigMapBasicExample_update(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("google_composer_user_workloads_config_map.config_map", "data.db_host", "dbhost:5432"), - resource.TestCheckNoResourceAttr("google_composer_user_workloads_config_map.config_map", "data.api_host"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_composer_user_workloads_config_map.config_map", "data.db_host", "dbhost:5432"), + resource.TestCheckNoResourceAttr("google_composer_user_workloads_config_map.config_map", "data.api_host"), ), }, { @@ -69,8 +69,8 @@ func TestAccComposerUserWorkloadsConfigMap_composerUserWorkloadsConfigMapBasicEx }, { Config: testAccComposerUserWorkloadsConfigMap_composerUserWorkloadsConfigMapBasicExample_delete(context), - Check: resource.ComposeTestCheckFunc( - testAccComposerUserWorkloadsConfigMapDestroyed(t), + Check: resource.ComposeTestCheckFunc( + testAccComposerUserWorkloadsConfigMapDestroyed(t), ), }, }, diff --git a/mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_secret_test.go.tmpl b/mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_secret_test.go similarity index 65% rename from mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_secret_test.go.tmpl rename to mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_secret_test.go index cb83e29ce0d0..aa600b16a0f6 100644 --- a/mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_secret_test.go.tmpl +++ b/mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_secret_test.go @@ -2,8 +2,8 @@ package composer_test import ( "fmt" - "testing" "strings" + "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" @@ -22,20 +22,20 @@ func TestAccComposerUserWorkloadsSecret_basic(t *testing.T) { secretName := fmt.Sprintf("%s-%d", testComposerUserWorkloadsSecretPrefix, acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerUserWorkloadsSecret_basic(envName, secretName, envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv()), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet("google_composer_user_workloads_secret.test", "data.username"), - resource.TestCheckResourceAttrSet("google_composer_user_workloads_secret.test", "data.password"), + Config: testAccComposerUserWorkloadsSecret_basic(envName, secretName, envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv()), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("google_composer_user_workloads_secret.test", "data.username"), + resource.TestCheckResourceAttrSet("google_composer_user_workloads_secret.test", "data.password"), ), }, { - ResourceName: "google_composer_user_workloads_secret.test", - ImportState: true, + ResourceName: "google_composer_user_workloads_secret.test", + ImportState: true, }, }, }) @@ -48,19 +48,19 @@ func TestAccComposerUserWorkloadsSecret_update(t *testing.T) { secretName := fmt.Sprintf("%s-%d", testComposerUserWorkloadsSecretPrefix, acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerUserWorkloadsSecret_basic(envName, secretName, envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv()), + Config: testAccComposerUserWorkloadsSecret_basic(envName, secretName, envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv()), }, { - Config: testAccComposerUserWorkloadsSecret_update(envName, secretName), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet("google_composer_user_workloads_secret.test", "data.email"), - resource.TestCheckResourceAttrSet("google_composer_user_workloads_secret.test", "data.password"), - resource.TestCheckNoResourceAttr("google_composer_user_workloads_secret.test", "data.username"), + Config: testAccComposerUserWorkloadsSecret_update(envName, secretName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("google_composer_user_workloads_secret.test", "data.email"), + resource.TestCheckResourceAttrSet("google_composer_user_workloads_secret.test", "data.password"), + resource.TestCheckNoResourceAttr("google_composer_user_workloads_secret.test", "data.username"), ), }, }, @@ -74,17 +74,17 @@ func TestAccComposerUserWorkloadsSecret_delete(t *testing.T) { secretName := fmt.Sprintf("%s-%d", testComposerUserWorkloadsSecretPrefix, acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerUserWorkloadsSecret_basic(envName, secretName, envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv()), + Config: testAccComposerUserWorkloadsSecret_basic(envName, secretName, envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv()), }, { - Config: testAccComposerUserWorkloadsSecret_delete(envName), - Check: resource.ComposeTestCheckFunc( - testAccComposerUserWorkloadsSecretDestroyed(t), + Config: testAccComposerUserWorkloadsSecret_delete(envName), + Check: resource.ComposeTestCheckFunc( + testAccComposerUserWorkloadsSecretDestroyed(t), ), }, }, @@ -162,8 +162,8 @@ func testAccComposerUserWorkloadsSecretDestroyed(t *testing.T) func(s *terraform return fmt.Errorf("Invalid ID %q, expected format projects/{project}/regions/{region}/environments/{environment}/userWorkloadsSecrets/{name}", rs.Primary.ID) } secretName := &composer.UserWorkloadsSecretName{ - Project: idTokens[1], - Region: idTokens[3], + Project: idTokens[1], + Region: idTokens[3], Environment: idTokens[5], Secret: idTokens[7], } diff --git a/mmv1/third_party/terraform/services/compute/compute_instance_helpers_test.go.tmpl b/mmv1/third_party/terraform/services/compute/compute_instance_helpers_test.go similarity index 100% rename from mmv1/third_party/terraform/services/compute/compute_instance_helpers_test.go.tmpl rename to mmv1/third_party/terraform/services/compute/compute_instance_helpers_test.go diff --git a/mmv1/third_party/terraform/services/compute/data_source_google_compute_images.go.tmpl b/mmv1/third_party/terraform/services/compute/data_source_google_compute_images.go similarity index 100% rename from mmv1/third_party/terraform/services/compute/data_source_google_compute_images.go.tmpl rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_images.go diff --git a/mmv1/third_party/terraform/services/compute/data_source_google_compute_instance.go.tmpl b/mmv1/third_party/terraform/services/compute/data_source_google_compute_instance.go similarity index 99% rename from mmv1/third_party/terraform/services/compute/data_source_google_compute_instance.go.tmpl rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_instance.go index 6c7cd5b37862..9f28b771750d 100644 --- a/mmv1/third_party/terraform/services/compute/data_source_google_compute_instance.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_instance.go @@ -204,11 +204,11 @@ func dataSourceGoogleComputeInstanceRead(d *schema.ResourceData, meta interface{ } if err := d.Set("key_revocation_action_type", instance.KeyRevocationActionType); err != nil { return fmt.Errorf("Error setting key_revocation_action_type: %s", err) - } + } if err := d.Set("creation_timestamp", instance.CreationTimestamp); err != nil { return fmt.Errorf("Error setting creation_timestamp: %s", err) } - + d.SetId(fmt.Sprintf("projects/%s/zones/%s/instances/%s", project, tpgresource.GetResourceNameFromSelfLink(instance.Zone), instance.Name)) return nil } diff --git a/mmv1/third_party/terraform/services/compute/data_source_google_compute_instance_group_test.go.tmpl b/mmv1/third_party/terraform/services/compute/data_source_google_compute_instance_group_test.go similarity index 100% rename from mmv1/third_party/terraform/services/compute/data_source_google_compute_instance_group_test.go.tmpl rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_instance_group_test.go index 1bf600dceffa..2603417e79f5 100644 --- a/mmv1/third_party/terraform/services/compute/data_source_google_compute_instance_group_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_instance_group_test.go @@ -8,10 +8,10 @@ import ( "strings" "testing" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/terraform" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" ) func TestAccDataSourceGoogleComputeInstanceGroup_basic(t *testing.T) { diff --git a/mmv1/third_party/terraform/services/compute/data_source_google_compute_instance_test.go.tmpl b/mmv1/third_party/terraform/services/compute/data_source_google_compute_instance_test.go similarity index 99% rename from mmv1/third_party/terraform/services/compute/data_source_google_compute_instance_test.go.tmpl rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_instance_test.go index 2a738c5614e0..5b7c9cd9b767 100644 --- a/mmv1/third_party/terraform/services/compute/data_source_google_compute_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_instance_test.go @@ -223,4 +223,4 @@ resource "google_compute_network_attachment" "net_attar_default" { connection_preference = "ACCEPT_AUTOMATIC" } `, instanceName, instanceName, instanceName, instanceName) -} \ No newline at end of file +} diff --git a/mmv1/third_party/terraform/services/compute/data_source_google_compute_network.go.tmpl b/mmv1/third_party/terraform/services/compute/data_source_google_compute_network.go similarity index 98% rename from mmv1/third_party/terraform/services/compute/data_source_google_compute_network.go.tmpl rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_network.go index 0800230929f7..5c06a0c68572 100644 --- a/mmv1/third_party/terraform/services/compute/data_source_google_compute_network.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_network.go @@ -31,8 +31,8 @@ func DataSourceGoogleComputeNetwork() *schema.Resource { // Deprecated in favor of network_id "numeric_id": { - Type: schema.TypeString, - Computed: true, + Type: schema.TypeString, + Computed: true, Deprecated: "`numeric_id` is deprecated and will be removed in a future major release. Use `network_id` instead.", }, diff --git a/mmv1/third_party/terraform/services/compute/data_source_google_compute_region_instance_group_test.go.tmpl b/mmv1/third_party/terraform/services/compute/data_source_google_compute_region_instance_group_test.go similarity index 97% rename from mmv1/third_party/terraform/services/compute/data_source_google_compute_region_instance_group_test.go.tmpl rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_region_instance_group_test.go index 0f30c8d653fe..24bbde48d011 100644 --- a/mmv1/third_party/terraform/services/compute/data_source_google_compute_region_instance_group_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_region_instance_group_test.go @@ -2,10 +2,10 @@ package compute_test import ( "fmt" - "testing" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "testing" ) func TestAccDataSourceRegionInstanceGroup(t *testing.T) { @@ -14,7 +14,7 @@ func TestAccDataSourceRegionInstanceGroup(t *testing.T) { t.Parallel() name := "tf-test-" + acctest.RandString(t, 6) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { diff --git a/mmv1/third_party/terraform/services/compute/data_source_google_compute_resource_policy.go.tmpl b/mmv1/third_party/terraform/services/compute/data_source_google_compute_resource_policy.go similarity index 100% rename from mmv1/third_party/terraform/services/compute/data_source_google_compute_resource_policy.go.tmpl rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_resource_policy.go index c86f5215b5a4..bfd165e60c5c 100644 --- a/mmv1/third_party/terraform/services/compute/data_source_google_compute_resource_policy.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_resource_policy.go @@ -3,9 +3,9 @@ package compute import ( "fmt" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) func DataSourceGoogleComputeResourcePolicy() *schema.Resource { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_rule_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_rule_test.go similarity index 89% rename from mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_rule_test.go.tmpl rename to mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_rule_test.go index 3ee224c40dec..6699ec98e280 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_rule_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_rule_test.go @@ -66,85 +66,85 @@ func TestAccComputeFirewallPolicyRule_update(t *testing.T) { } func TestAccComputeFirewallPolicyRule_multipleRules(t *testing.T) { - t.Parallel() + t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - "org_name": fmt.Sprintf("organizations/%s", envvar.GetTestOrgFromEnv(t)), - } + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "org_name": fmt.Sprintf("organizations/%s", envvar.GetTestOrgFromEnv(t)), + } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeFirewallPolicyRule_multiple(context), - }, - { - ResourceName: "google_compute_firewall_policy_rule.fw_policy_rule1", - ImportState: true, - ImportStateVerify: true, - // Referencing using ID causes import to fail - ImportStateVerifyIgnore: []string{"firewall_policy"}, - }, - { - ResourceName: "google_compute_firewall_policy_rule.fw_policy_rule2", - ImportState: true, - ImportStateVerify: true, - // Referencing using ID causes import to fail - ImportStateVerifyIgnore: []string{"firewall_policy"}, - }, - { - Config: testAccComputeFirewallPolicyRule_multipleAdd(context), - }, - { - ResourceName: "google_compute_firewall_policy_rule.fw_policy_rule3", - ImportState: true, - ImportStateVerify: true, - // Referencing using ID causes import to fail - ImportStateVerifyIgnore: []string{"firewall_policy"}, - }, - { - Config: testAccComputeFirewallPolicyRule_multipleRemove(context), - }, - }, - }) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeFirewallPolicyRule_multiple(context), + }, + { + ResourceName: "google_compute_firewall_policy_rule.fw_policy_rule1", + ImportState: true, + ImportStateVerify: true, + // Referencing using ID causes import to fail + ImportStateVerifyIgnore: []string{"firewall_policy"}, + }, + { + ResourceName: "google_compute_firewall_policy_rule.fw_policy_rule2", + ImportState: true, + ImportStateVerify: true, + // Referencing using ID causes import to fail + ImportStateVerifyIgnore: []string{"firewall_policy"}, + }, + { + Config: testAccComputeFirewallPolicyRule_multipleAdd(context), + }, + { + ResourceName: "google_compute_firewall_policy_rule.fw_policy_rule3", + ImportState: true, + ImportStateVerify: true, + // Referencing using ID causes import to fail + ImportStateVerifyIgnore: []string{"firewall_policy"}, + }, + { + Config: testAccComputeFirewallPolicyRule_multipleRemove(context), + }, + }, + }) } func TestAccComputeFirewallPolicyRule_securityProfileGroup_update(t *testing.T) { - t.Parallel() + t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - "org_name": fmt.Sprintf("organizations/%s", envvar.GetTestOrgFromEnv(t)), - } + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "org_name": fmt.Sprintf("organizations/%s", envvar.GetTestOrgFromEnv(t)), + } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeFirewallPolicyRule_securityProfileGroup_basic(context), - }, - { - ResourceName: "google_compute_firewall_policy_rule.fw_policy_rule1", - ImportState: true, - ImportStateVerify: true, - // Referencing using ID causes import to fail - ImportStateVerifyIgnore: []string{"firewall_policy"}, - }, - { - Config: testAccComputeFirewallPolicyRule_securityProfileGroup_update(context), - }, - { - ResourceName: "google_compute_firewall_policy_rule.fw_policy_rule1", - ImportState: true, - ImportStateVerify: true, - // Referencing using ID causes import to fail - ImportStateVerifyIgnore: []string{"firewall_policy", "target_resources"}, - }, - }, - }) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeFirewallPolicyRule_securityProfileGroup_basic(context), + }, + { + ResourceName: "google_compute_firewall_policy_rule.fw_policy_rule1", + ImportState: true, + ImportStateVerify: true, + // Referencing using ID causes import to fail + ImportStateVerifyIgnore: []string{"firewall_policy"}, + }, + { + Config: testAccComputeFirewallPolicyRule_securityProfileGroup_update(context), + }, + { + ResourceName: "google_compute_firewall_policy_rule.fw_policy_rule1", + ImportState: true, + ImportStateVerify: true, + // Referencing using ID causes import to fail + ImportStateVerifyIgnore: []string{"firewall_policy", "target_resources"}, + }, + }, + }) } func TestAccComputeFirewallPolicyRule_basic(t *testing.T) { @@ -223,7 +223,7 @@ resource "google_compute_firewall_policy_rule" "fw_policy_rule" { } func testAccComputeFirewallPolicyRule_securityProfileGroup_basic(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_folder" "folder" { display_name = "tf-test-folder-%{random_suffix}" parent = "%{org_name}" @@ -273,7 +273,7 @@ resource "google_compute_firewall_policy_rule" "fw_policy_rule1" { } func testAccComputeFirewallPolicyRule_securityProfileGroup_update(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_folder" "folder" { display_name = "tf-test-folder-%{random_suffix}" parent = "%{org_name}" diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_test.go similarity index 98% rename from mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_test.go.tmpl rename to mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_test.go index 5d45e5016768..76ccb0720cbf 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_test.go @@ -2,9 +2,9 @@ package compute_test import ( "fmt" - "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" + "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) @@ -50,7 +50,7 @@ func TestAccComputeFirewallPolicy_update(t *testing.T) { } func testAccComputeFirewallPolicy_basic(org, policyName, folderName string) string { - return fmt.Sprintf(` + return fmt.Sprintf(` resource "google_folder" "folder" { display_name = "%s" parent = "%s" @@ -66,7 +66,7 @@ resource "google_compute_firewall_policy" "default" { } func testAccComputeFirewallPolicy_update(org, policyName, folderName string) string { - return fmt.Sprintf(` + return fmt.Sprintf(` resource "google_folder" "folder" { display_name = "%s" parent = "%s" diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_firewall_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_firewall_test.go similarity index 99% rename from mmv1/third_party/terraform/services/compute/resource_compute_firewall_test.go.tmpl rename to mmv1/third_party/terraform/services/compute/resource_compute_firewall_test.go index d0576311d631..ee3837129e55 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_firewall_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_firewall_test.go @@ -133,7 +133,7 @@ func TestAccComputeFirewall_noSource(t *testing.T) { CheckDestroy: testAccCheckComputeFirewallDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComputeFirewall_noSource(networkName, firewallName), + Config: testAccComputeFirewall_noSource(networkName, firewallName), ExpectError: regexp.MustCompile("one of source_tags, source_ranges, or source_service_accounts must be defined"), }, }, @@ -356,7 +356,6 @@ resource "google_compute_firewall" "foobar" { `, network, firewall) } - func testAccComputeFirewall_localRangesUpdate(network, firewall string) string { return fmt.Sprintf(` resource "google_compute_network" "foobar" { @@ -424,7 +423,6 @@ resource "google_compute_firewall" "foobar" { `, network, firewall) } - func testAccComputeFirewall_priority(network, firewall string, priority int) string { return fmt.Sprintf(` resource "google_compute_network" "foobar" { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_global_address_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_global_address_test.go similarity index 93% rename from mmv1/third_party/terraform/services/compute/resource_compute_global_address_test.go.tmpl rename to mmv1/third_party/terraform/services/compute/resource_compute_global_address_test.go index 7c4eb0fde00e..4748818ec51a 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_global_address_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_global_address_test.go @@ -2,8 +2,8 @@ package compute_test import ( "fmt" - "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" + "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/plancheck" @@ -25,9 +25,9 @@ func TestAccComputeGlobalAddress_update(t *testing.T) { Config: testAccComputeGlobalAddress_update1(context), }, { - ResourceName: "google_compute_global_address.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_global_address.foobar", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, }, { @@ -39,9 +39,9 @@ func TestAccComputeGlobalAddress_update(t *testing.T) { }, }, { - ResourceName: "google_compute_global_address.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_global_address.foobar", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, }, }, diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_global_network_endpoint_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_global_network_endpoint_test.go similarity index 99% rename from mmv1/third_party/terraform/services/compute/resource_compute_global_network_endpoint_test.go.tmpl rename to mmv1/third_party/terraform/services/compute/resource_compute_global_network_endpoint_test.go index b60be89689e2..73a249356d51 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_global_network_endpoint_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_global_network_endpoint_test.go @@ -1,11 +1,12 @@ package compute_test + import ( "fmt" "testing" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccComputeGlobalNetworkEndpoint_networkEndpointsBasic(t *testing.T) { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_settings_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_settings_test.go similarity index 97% rename from mmv1/third_party/terraform/services/compute/resource_compute_instance_settings_test.go.tmpl rename to mmv1/third_party/terraform/services/compute/resource_compute_instance_settings_test.go index 7567a28d28e8..b172d6e00f44 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_settings_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_settings_test.go @@ -12,7 +12,7 @@ func TestAccComputeInstanceSettings_update(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), + "random_suffix": acctest.RandString(t, 10), } acctest.VcrTest(t, resource.TestCase{ diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_network_attachment_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_network_attachment_test.go similarity index 100% rename from mmv1/third_party/terraform/services/compute/resource_compute_network_attachment_test.go.tmpl rename to mmv1/third_party/terraform/services/compute/resource_compute_network_attachment_test.go diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_network_endpoint_group_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_network_endpoint_group_test.go similarity index 100% rename from mmv1/third_party/terraform/services/compute/resource_compute_network_endpoint_group_test.go.tmpl rename to mmv1/third_party/terraform/services/compute/resource_compute_network_endpoint_group_test.go index 388cdc74c2a1..fd4d0954f704 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_network_endpoint_group_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_network_endpoint_group_test.go @@ -3,8 +3,8 @@ package compute_test import ( "testing" - "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" ) func TestAccComputeNetworkEndpointGroup_networkEndpointGroup(t *testing.T) { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_network_firewall_policy_rule_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_network_firewall_policy_rule_test.go similarity index 84% rename from mmv1/third_party/terraform/services/compute/resource_compute_network_firewall_policy_rule_test.go.tmpl rename to mmv1/third_party/terraform/services/compute/resource_compute_network_firewall_policy_rule_test.go index 3ce9ff750231..ded8779fd055 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_network_firewall_policy_rule_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_network_firewall_policy_rule_test.go @@ -5,9 +5,9 @@ import ( "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" - "github.com/hashicorp/terraform-plugin-testing/plancheck" ) func TestAccComputeNetworkFirewallPolicyRule_update(t *testing.T) { @@ -34,7 +34,7 @@ func TestAccComputeNetworkFirewallPolicyRule_update(t *testing.T) { }, { Config: testAccComputeNetworkFirewallPolicyRule_update(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ + ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction("google_compute_network_firewall_policy_rule.fw_policy_rule1", plancheck.ResourceActionUpdate), }, @@ -49,7 +49,7 @@ func TestAccComputeNetworkFirewallPolicyRule_update(t *testing.T) { }, { Config: testAccComputeNetworkFirewallPolicyRule_removeConfigs(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ + ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction("google_compute_network_firewall_policy_rule.fw_policy_rule1", plancheck.ResourceActionUpdate), }, @@ -64,7 +64,7 @@ func TestAccComputeNetworkFirewallPolicyRule_update(t *testing.T) { }, { Config: testAccComputeNetworkFirewallPolicyRule_start(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ + ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction("google_compute_network_firewall_policy_rule.fw_policy_rule1", plancheck.ResourceActionUpdate), }, @@ -82,139 +82,139 @@ func TestAccComputeNetworkFirewallPolicyRule_update(t *testing.T) { } func TestAccComputeNetworkFirewallPolicyRule_multipleRules(t *testing.T) { - t.Parallel() + t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - "project_name": envvar.GetTestProjectFromEnv(), - "org_name": fmt.Sprintf("organizations/%s", envvar.GetTestOrgFromEnv(t)), - } + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "project_name": envvar.GetTestProjectFromEnv(), + "org_name": fmt.Sprintf("organizations/%s", envvar.GetTestOrgFromEnv(t)), + } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeNetworkFirewallPolicyRule_multiple(context), - }, - { - ResourceName: "google_compute_network_firewall_policy_rule.fw_policy_rule1", - ImportState: true, - ImportStateVerify: true, - // Referencing using ID causes import to fail - ImportStateVerifyIgnore: []string{"firewall_policy"}, - }, - { - ResourceName: "google_compute_network_firewall_policy_rule.fw_policy_rule2", - ImportState: true, - ImportStateVerify: true, - // Referencing using ID causes import to fail - ImportStateVerifyIgnore: []string{"firewall_policy"}, - }, - { - Config: testAccComputeNetworkFirewallPolicyRule_multipleAdd(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeNetworkFirewallPolicyRule_multiple(context), + }, + { + ResourceName: "google_compute_network_firewall_policy_rule.fw_policy_rule1", + ImportState: true, + ImportStateVerify: true, + // Referencing using ID causes import to fail + ImportStateVerifyIgnore: []string{"firewall_policy"}, + }, + { + ResourceName: "google_compute_network_firewall_policy_rule.fw_policy_rule2", + ImportState: true, + ImportStateVerify: true, + // Referencing using ID causes import to fail + ImportStateVerifyIgnore: []string{"firewall_policy"}, + }, + { + Config: testAccComputeNetworkFirewallPolicyRule_multipleAdd(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction("google_compute_network_firewall_policy_rule.fw_policy_rule1", plancheck.ResourceActionUpdate), }, }, - }, - { - ResourceName: "google_compute_network_firewall_policy_rule.fw_policy_rule3", - ImportState: true, - ImportStateVerify: true, - // Referencing using ID causes import to fail - ImportStateVerifyIgnore: []string{"firewall_policy"}, - }, - { - Config: testAccComputeNetworkFirewallPolicyRule_multipleRemove(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ + }, + { + ResourceName: "google_compute_network_firewall_policy_rule.fw_policy_rule3", + ImportState: true, + ImportStateVerify: true, + // Referencing using ID causes import to fail + ImportStateVerifyIgnore: []string{"firewall_policy"}, + }, + { + Config: testAccComputeNetworkFirewallPolicyRule_multipleRemove(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction("google_compute_network_firewall_policy_rule.fw_policy_rule1", plancheck.ResourceActionUpdate), plancheck.ExpectResourceAction("google_compute_network_firewall_policy_rule.fw_policy_rule2", plancheck.ResourceActionDestroy), plancheck.ExpectResourceAction("google_compute_network_firewall_policy_rule.fw_policy_rule3", plancheck.ResourceActionUpdate), }, }, - }, - }, - }) + }, + }, + }) } func TestAccComputeNetworkFirewallPolicyRule_addressGroupOrder(t *testing.T) { - t.Parallel() + t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - "project": envvar.GetTestProjectFromEnv(), - } + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "project": envvar.GetTestProjectFromEnv(), + } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeNetworkFirewallPolicyRule_addressGroupOrder(context), - }, - { - ResourceName: "google_compute_network_firewall_policy_rule.src_test", - ImportState: true, - ImportStateVerify: true, - // Referencing using ID causes import to fail - // Client-side reordering doesn't work with no state, so ignore on import - ImportStateVerifyIgnore: []string{"firewall_policy", "match.0.src_address_groups"}, - }, - { - ResourceName: "google_compute_network_firewall_policy_rule.dest_test", - ImportState: true, - ImportStateVerify: true, - // Referencing using ID causes import to fail - // Client-side reordering doesn't work with no state, so ignore on import - ImportStateVerifyIgnore: []string{"firewall_policy", "match.0.dest_address_groups"}, - }, - }, - }) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeNetworkFirewallPolicyRule_addressGroupOrder(context), + }, + { + ResourceName: "google_compute_network_firewall_policy_rule.src_test", + ImportState: true, + ImportStateVerify: true, + // Referencing using ID causes import to fail + // Client-side reordering doesn't work with no state, so ignore on import + ImportStateVerifyIgnore: []string{"firewall_policy", "match.0.src_address_groups"}, + }, + { + ResourceName: "google_compute_network_firewall_policy_rule.dest_test", + ImportState: true, + ImportStateVerify: true, + // Referencing using ID causes import to fail + // Client-side reordering doesn't work with no state, so ignore on import + ImportStateVerifyIgnore: []string{"firewall_policy", "match.0.dest_address_groups"}, + }, + }, + }) } func TestAccComputeNetworkFirewallPolicyRule_securityProfileGroup_update(t *testing.T) { - t.Parallel() + t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - "org_name": fmt.Sprintf("organizations/%s", envvar.GetTestOrgFromEnv(t)), - "security_profile_group_prefix": "//", - } + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "org_name": fmt.Sprintf("organizations/%s", envvar.GetTestOrgFromEnv(t)), + "security_profile_group_prefix": "//", + } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeNetworkFirewallPolicyRule_securityProfileGroup_basic(context), - }, - { - ResourceName: "google_compute_network_firewall_policy_rule.fw_policy_rule1", - ImportState: true, - ImportStateVerify: true, - // Referencing using ID causes import to fail - ImportStateVerifyIgnore: []string{"firewall_policy"}, - }, - { - Config: testAccComputeNetworkFirewallPolicyRule_securityProfileGroup_update(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeNetworkFirewallPolicyRule_securityProfileGroup_basic(context), + }, + { + ResourceName: "google_compute_network_firewall_policy_rule.fw_policy_rule1", + ImportState: true, + ImportStateVerify: true, + // Referencing using ID causes import to fail + ImportStateVerifyIgnore: []string{"firewall_policy"}, + }, + { + Config: testAccComputeNetworkFirewallPolicyRule_securityProfileGroup_update(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction("google_compute_network_firewall_policy_rule.fw_policy_rule1", plancheck.ResourceActionUpdate), }, }, - }, - { - ResourceName: "google_compute_network_firewall_policy_rule.fw_policy_rule1", - ImportState: true, - ImportStateVerify: true, - // Referencing using ID causes import to fail - ImportStateVerifyIgnore: []string{"firewall_policy"}, - }, - }, - }) + }, + { + ResourceName: "google_compute_network_firewall_policy_rule.fw_policy_rule1", + ImportState: true, + ImportStateVerify: true, + // Referencing using ID causes import to fail + ImportStateVerifyIgnore: []string{"firewall_policy"}, + }, + }, + }) } func TestAccComputeNetworkFirewallPolicyRule_secureTags(t *testing.T) { @@ -239,12 +239,12 @@ func TestAccComputeNetworkFirewallPolicyRule_secureTags(t *testing.T) { ResourceName: "google_compute_network_firewall_policy_rule.primary", ImportState: true, ImportStateVerify: true, - // Referencing using ID causes import to fail + // Referencing using ID causes import to fail ImportStateVerifyIgnore: []string{"firewall_policy", "project"}, }, { Config: testAccComputeNetworkFirewallPolicyRule_secureTagsUpdate(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ + ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction("google_compute_network_firewall_policy_rule.primary", plancheck.ResourceActionUpdate), }, @@ -254,43 +254,40 @@ func TestAccComputeNetworkFirewallPolicyRule_secureTags(t *testing.T) { ResourceName: "google_compute_network_firewall_policy_rule.primary", ImportState: true, ImportStateVerify: true, - // Referencing using ID causes import to fail + // Referencing using ID causes import to fail ImportStateVerifyIgnore: []string{"firewall_policy", "project"}, }, }, }) } - func TestAccComputeNetworkFirewallSecurityProfileGroupDiffsuppress(t *testing.T) { - t.Parallel() + t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - "org_name": fmt.Sprintf("organizations/%s", envvar.GetTestOrgFromEnv(t)), - "security_profile_group_prefix": "/", - } + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "org_name": fmt.Sprintf("organizations/%s", envvar.GetTestOrgFromEnv(t)), + "security_profile_group_prefix": "/", + } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeNetworkFirewallPolicyRule_securityProfileGroup_update(context), - }, - { - ResourceName: "google_compute_network_firewall_policy_rule.fw_policy_rule1", - ImportState: true, - ImportStateVerify: true, - // Referencing using ID causes import to fail + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeNetworkFirewallPolicyRule_securityProfileGroup_update(context), + }, + { + ResourceName: "google_compute_network_firewall_policy_rule.fw_policy_rule1", + ImportState: true, + ImportStateVerify: true, + // Referencing using ID causes import to fail ImportStateVerifyIgnore: []string{"firewall_policy"}, - }, - }, - }) + }, + }, + }) } - - func testAccComputeNetworkFirewallPolicyRule_secureTags(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_network_security_address_group" "basic_global_networksecurity_address_group" { @@ -434,7 +431,7 @@ resource "google_tags_tag_value" "basic_value" { } func testAccComputeNetworkFirewallPolicyRule_securityProfileGroup_basic(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_compute_network" "network1" { name = "tf-test-%{random_suffix}" auto_create_subnetworks = false @@ -487,7 +484,7 @@ resource "google_compute_network_firewall_policy_rule" "fw_policy_rule1" { } func testAccComputeNetworkFirewallPolicyRule_securityProfileGroup_update(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_network_security_security_profile" "security_profile" { name = "tf-test-my-sp%{random_suffix}" type = "THREAT_PREVENTION" @@ -963,9 +960,8 @@ resource "google_compute_network_firewall_policy_rule" "fw_policy_rule3" { `, context) } - func testAccComputeNetworkFirewallPolicyRule_addressGroupOrder(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_compute_network_firewall_policy" "policy" { name = "tf-test-policy-%{random_suffix}" description = "Resource created for Terraform acceptance testing" @@ -1031,4 +1027,4 @@ resource "google_compute_network_firewall_policy_rule" "dest_test" { } `, context) -} \ No newline at end of file +} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_node_group_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_node_group_test.go similarity index 100% rename from mmv1/third_party/terraform/services/compute/resource_compute_node_group_test.go.tmpl rename to mmv1/third_party/terraform/services/compute/resource_compute_node_group_test.go diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_per_instance_config_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_per_instance_config_test.go similarity index 99% rename from mmv1/third_party/terraform/services/compute/resource_compute_per_instance_config_test.go.tmpl rename to mmv1/third_party/terraform/services/compute/resource_compute_per_instance_config_test.go index 2943de437d62..5b57791c967f 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_per_instance_config_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_per_instance_config_test.go @@ -2,9 +2,9 @@ package compute_test import ( "fmt" - "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" + "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/terraform" @@ -688,9 +688,9 @@ func testAccComputePerInstanceConfigListInstances(t *testing.T, igmId string) (m url := fmt.Sprintf("%s%s/listManagedInstances", config.ComputeBasePath, igmId) res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "POST", - RawURL: url, + Config: config, + Method: "POST", + RawURL: url, UserAgent: config.UserAgent, }) if err != nil { @@ -715,9 +715,9 @@ func testAccComputePerInstanceConfigListNames(t *testing.T, igmId string) (map[s url := fmt.Sprintf("%s%s/listPerInstanceConfigs", config.ComputeBasePath, igmId) res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "POST", - RawURL: url, + Config: config, + Method: "POST", + RawURL: url, UserAgent: config.UserAgent, }) if err != nil { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_network_firewall_policy_rule_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_network_firewall_policy_rule_test.go similarity index 98% rename from mmv1/third_party/terraform/services/compute/resource_compute_region_network_firewall_policy_rule_test.go.tmpl rename to mmv1/third_party/terraform/services/compute/resource_compute_region_network_firewall_policy_rule_test.go index 3e157ac6b7e3..f416cff53491 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_network_firewall_policy_rule_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_network_firewall_policy_rule_test.go @@ -5,9 +5,9 @@ import ( "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" - "github.com/hashicorp/terraform-plugin-testing/plancheck" ) func TestAccComputeRegionNetworkFirewallPolicyRule_update(t *testing.T) { @@ -35,7 +35,7 @@ func TestAccComputeRegionNetworkFirewallPolicyRule_update(t *testing.T) { }, { Config: testAccComputeRegionNetworkFirewallPolicyRule_update(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ + ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction("google_compute_region_network_firewall_policy_rule.fw_policy_rule1", plancheck.ResourceActionUpdate), }, @@ -50,7 +50,7 @@ func TestAccComputeRegionNetworkFirewallPolicyRule_update(t *testing.T) { }, { Config: testAccComputeRegionNetworkFirewallPolicyRule_removeConfigs(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ + ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction("google_compute_region_network_firewall_policy_rule.fw_policy_rule1", plancheck.ResourceActionUpdate), }, @@ -65,7 +65,7 @@ func TestAccComputeRegionNetworkFirewallPolicyRule_update(t *testing.T) { }, { Config: testAccComputeRegionNetworkFirewallPolicyRule_start(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ + ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction("google_compute_region_network_firewall_policy_rule.fw_policy_rule1", plancheck.ResourceActionUpdate), }, @@ -114,7 +114,7 @@ func TestAccComputeRegionNetworkFirewallPolicyRule_multipleRules(t *testing.T) { }, { Config: testAccComputeRegionNetworkFirewallPolicyRule_multipleAdd(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ + ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction("google_compute_region_network_firewall_policy_rule.fw_policy_rule1", plancheck.ResourceActionUpdate), }, @@ -129,7 +129,7 @@ func TestAccComputeRegionNetworkFirewallPolicyRule_multipleRules(t *testing.T) { }, { Config: testAccComputeRegionNetworkFirewallPolicyRule_multipleRemove(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ + ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction("google_compute_region_network_firewall_policy_rule.fw_policy_rule1", plancheck.ResourceActionUpdate), plancheck.ExpectResourceAction("google_compute_region_network_firewall_policy_rule.fw_policy_rule2", plancheck.ResourceActionDestroy), @@ -164,12 +164,12 @@ func TestAccComputeRegionNetworkFirewallPolicyRule_secureTags(t *testing.T) { ResourceName: "google_compute_region_network_firewall_policy_rule.primary", ImportState: true, ImportStateVerify: true, - // Referencing using ID causes import to fail + // Referencing using ID causes import to fail ImportStateVerifyIgnore: []string{"firewall_policy", "project"}, }, { Config: testAccComputeRegionNetworkFirewallPolicyRule_secureTagsUpdate(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ + ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction("google_compute_region_network_firewall_policy_rule.primary", plancheck.ResourceActionUpdate), }, @@ -179,7 +179,7 @@ func TestAccComputeRegionNetworkFirewallPolicyRule_secureTags(t *testing.T) { ResourceName: "google_compute_region_network_firewall_policy_rule.primary", ImportState: true, ImportStateVerify: true, - // Referencing using ID causes import to fail + // Referencing using ID causes import to fail ImportStateVerifyIgnore: []string{"firewall_policy", "project"}, }, }, diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_per_instance_config_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_per_instance_config_test.go similarity index 99% rename from mmv1/third_party/terraform/services/compute/resource_compute_region_per_instance_config_test.go.tmpl rename to mmv1/third_party/terraform/services/compute/resource_compute_region_per_instance_config_test.go index 7de4e9725b1d..2a98a49462f2 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_per_instance_config_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_per_instance_config_test.go @@ -2,9 +2,9 @@ package compute_test import ( "fmt" - "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" + "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/terraform" @@ -407,7 +407,6 @@ resource "google_compute_region_instance_group_manager" "rigm" { `, context) } - func testAccComputeRegionPerInstanceConfig_removeInstanceOnDestroyBefore(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_compute_network" "default" { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_target_http_proxy_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_target_http_proxy_test.go similarity index 100% rename from mmv1/third_party/terraform/services/compute/resource_compute_region_target_http_proxy_test.go.tmpl rename to mmv1/third_party/terraform/services/compute/resource_compute_region_target_http_proxy_test.go index ff6cd381af7f..e2b982856093 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_target_http_proxy_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_target_http_proxy_test.go @@ -2,8 +2,8 @@ package compute_test import ( "fmt" - "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" + "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_target_tcp_proxy_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_target_tcp_proxy_test.go similarity index 99% rename from mmv1/third_party/terraform/services/compute/resource_compute_region_target_tcp_proxy_test.go.tmpl rename to mmv1/third_party/terraform/services/compute/resource_compute_region_target_tcp_proxy_test.go index 7964c51e6b68..2d34dbed13a5 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_target_tcp_proxy_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_target_tcp_proxy_test.go @@ -2,8 +2,8 @@ package compute_test import ( "fmt" - "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" + "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/terraform" @@ -152,4 +152,4 @@ resource "google_compute_region_health_check" "zero" { region = "us-central1" } `, target, backend, backend, hc) -} \ No newline at end of file +} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_url_map_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_url_map_test.go similarity index 99% rename from mmv1/third_party/terraform/services/compute/resource_compute_region_url_map_test.go.tmpl rename to mmv1/third_party/terraform/services/compute/resource_compute_region_url_map_test.go index 7a3bef847c29..2602db0de647 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_url_map_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_url_map_test.go @@ -2,8 +2,8 @@ package compute_test import ( "fmt" - "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" + "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) @@ -1204,4 +1204,4 @@ resource "google_compute_region_backend_service" "home" { timeout_sec = 10 } `, randomSuffix, randomSuffix, randomSuffix) -} \ No newline at end of file +} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_router_nat_address_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_router_nat_address_test.go similarity index 99% rename from mmv1/third_party/terraform/services/compute/resource_compute_router_nat_address_test.go.tmpl rename to mmv1/third_party/terraform/services/compute/resource_compute_router_nat_address_test.go index de2498f991f8..f42099418d6d 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_router_nat_address_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_router_nat_address_test.go @@ -113,7 +113,7 @@ func TestAccComputeRouterNatAddress_withAddressRemoved(t *testing.T) { ExternalProviders: map[string]resource.ExternalProvider{ "random": {}, }, - CheckDestroy: testAccCheckComputeRouterNatAddressDestroyProducer(t), + CheckDestroy: testAccCheckComputeRouterNatAddressDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeRouterNatAddressWithNatIps(routerName), diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_router_nat_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_router_nat_test.go similarity index 99% rename from mmv1/third_party/terraform/services/compute/resource_compute_router_nat_test.go.tmpl rename to mmv1/third_party/terraform/services/compute/resource_compute_router_nat_test.go index dc4ae4f9a2be..160d65f0b505 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_router_nat_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_router_nat_test.go @@ -31,7 +31,7 @@ func TestAccComputeRouterNat_basic(t *testing.T) { }, { // implicitly full ImportStateId - ResourceName: "google_compute_router_nat.foobar", + ResourceName: "google_compute_router_nat.foobar", ImportState: true, ImportStateVerify: true, }, @@ -171,7 +171,7 @@ func TestAccComputeRouterNat_withPortAllocationMethods(t *testing.T) { ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccCheckComputeRouterNatDestroyProducer(t), Steps: []resource.TestStep{ - { + { Config: testAccComputeRouterNatWithAllocationMethod(routerName, false, true), }, { @@ -271,7 +271,6 @@ func TestAccComputeRouterNat_withNatIpsAndDrainNatIps(t *testing.T) { }) } - func TestAccComputeRouterNat_withNatRules(t *testing.T) { t.Parallel() @@ -291,7 +290,7 @@ func TestAccComputeRouterNat_withNatRules(t *testing.T) { Config: testAccComputeRouterNatRulesBasic_omitRules(routerName), }, { - ResourceName: "google_compute_router_nat.foobar", + ResourceName: "google_compute_router_nat.foobar", ImportState: true, ImportStateVerify: true, }, @@ -363,7 +362,7 @@ func TestAccComputeRouterNat_withNatRules(t *testing.T) { Config: testAccComputeRouterNatRulesBasic_omitAction(routerName, 100, ruleDescriptionUpdate, matchUpdate), }, { - ResourceName: "google_compute_router_nat.foobar", + ResourceName: "google_compute_router_nat.foobar", ImportState: true, ImportStateVerify: true, }, @@ -371,7 +370,7 @@ func TestAccComputeRouterNat_withNatRules(t *testing.T) { Config: testAccComputeRouterNatRulesBasic_omitDescription(routerName, 100, matchUpdate), }, { - ResourceName: "google_compute_router_nat.foobar", + ResourceName: "google_compute_router_nat.foobar", ImportState: true, ImportStateVerify: true, }, @@ -387,7 +386,7 @@ func TestAccComputeRouterNat_withNatRules(t *testing.T) { Config: testAccComputeRouterNatRulesBasic_omitRules(routerName), }, { - ResourceName: "google_compute_router_nat.foobar", + ResourceName: "google_compute_router_nat.foobar", ImportState: true, ImportStateVerify: true, }, @@ -468,10 +467,10 @@ func TestAccComputeRouterNat_AutoNetworkTier(t *testing.T) { CheckDestroy: testAccCheckComputeRouterNatDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComputeRouterNatWitAutoNetworkTier(routerName, hubName), + Config: testAccComputeRouterNatWitAutoNetworkTier(routerName, hubName), }, { - // implicitly full ImportStateId + // implicitly full ImportStateId ResourceName: "google_compute_router_nat.foobar", ImportState: true, ImportStateVerify: true, diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_router_route_policy_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_router_route_policy_test.go similarity index 99% rename from mmv1/third_party/terraform/services/compute/resource_compute_router_route_policy_test.go.tmpl rename to mmv1/third_party/terraform/services/compute/resource_compute_router_route_policy_test.go index f4ac5f418e9d..104bab62a947 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_router_route_policy_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_router_route_policy_test.go @@ -68,4 +68,4 @@ resource "google_compute_router_route_policy" "route_policy" { } } `, routerName, routePolicyName) -} \ No newline at end of file +} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_router_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_router_test.go similarity index 98% rename from mmv1/third_party/terraform/services/compute/resource_compute_router_test.go.tmpl rename to mmv1/third_party/terraform/services/compute/resource_compute_router_test.go index d02cab34f229..876e8ed61194 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_router_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_router_test.go @@ -91,9 +91,9 @@ func TestAccComputeRouter_advertisedIpRangesOrder(t *testing.T) { Config: testAccComputeRouterAdvertisedIpRangesOrder(routerName), }, { - ResourceName: "google_compute_router.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router.foobar", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"bgp.0.advertised_ip_ranges.0.range", "bgp.0.advertised_ip_ranges.1.range"}, }, }, @@ -386,4 +386,4 @@ resource "google_compute_router" "foobar" { } } `, routerName, routerName) -} \ No newline at end of file +} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_security_policy_rule_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_security_policy_rule_test.go similarity index 98% rename from mmv1/third_party/terraform/services/compute/resource_compute_security_policy_rule_test.go.tmpl rename to mmv1/third_party/terraform/services/compute/resource_compute_security_policy_rule_test.go index 50cd764a47c2..14e340833c97 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_security_policy_rule_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_security_policy_rule_test.go @@ -1,10 +1,10 @@ package compute_test import ( - "fmt" - "regexp" - "testing" + "fmt" "github.com/hashicorp/terraform-provider-google/google/acctest" + "regexp" + "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) @@ -94,7 +94,7 @@ func TestAccComputeSecurityPolicyRule_extendedUpdate(t *testing.T) { ImportStateVerify: true, }, { - Config: testAccComputeSecurityPolicyRule_extPosUpdateSamePriority(context), + Config: testAccComputeSecurityPolicyRule_extPosUpdateSamePriority(context), ExpectError: regexp.MustCompile("Cannot have rules with the same priorities."), }, { @@ -130,9 +130,9 @@ func TestAccComputeSecurityPolicyRule_withPreconfiguredWafConfig(t *testing.T) { Config: testAccComputeSecurityPolicyRule_withPreconfiguredWafConfig_create(context), }, { - ResourceName: "google_compute_security_policy_rule.policy_rule", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_security_policy_rule.policy_rule", + ImportState: true, + ImportStateVerify: true, }, { Config: testAccComputeSecurityPolicyRule_withPreconfiguredWafConfig_update(context), @@ -172,21 +172,21 @@ func TestAccComputeSecurityPolicyRule_withRateLimitOptions(t *testing.T) { { Config: testAccComputeSecurityPolicyRule_withRateLimitOptionsCreate(context), }, - { - ResourceName: "google_compute_security_policy_rule.policy_rule", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccComputeSecurityPolicyRule_withRateLimitOptionsUpdate(context), - }, - { - ResourceName: "google_compute_security_policy_rule.policy_rule", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) + { + ResourceName: "google_compute_security_policy_rule.policy_rule", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccComputeSecurityPolicyRule_withRateLimitOptionsUpdate(context), + }, + { + ResourceName: "google_compute_security_policy_rule.policy_rule", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) } func TestAccComputeSecurityPolicyRule_withRateLimit_withEnforceOnKeyConfigs(t *testing.T) { @@ -241,7 +241,6 @@ func TestAccComputeSecurityPolicyRule_withRateLimitOption_withMultipleEnforceOnK }) } - func TestAccComputeSecurityPolicyRule_EnforceOnKeyUpdates(t *testing.T) { t.Parallel() diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_service_attachment_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_service_attachment_test.go similarity index 100% rename from mmv1/third_party/terraform/services/compute/resource_compute_service_attachment_test.go.tmpl rename to mmv1/third_party/terraform/services/compute/resource_compute_service_attachment_test.go diff --git a/mmv1/third_party/terraform/services/dataflow/resource_dataflow_job.go.tmpl b/mmv1/third_party/terraform/services/dataflow/resource_dataflow_job.go similarity index 95% rename from mmv1/third_party/terraform/services/dataflow/resource_dataflow_job.go.tmpl rename to mmv1/third_party/terraform/services/dataflow/resource_dataflow_job.go index fb6d4a649283..ecc4c9eda3c4 100644 --- a/mmv1/third_party/terraform/services/dataflow/resource_dataflow_job.go.tmpl +++ b/mmv1/third_party/terraform/services/dataflow/resource_dataflow_job.go @@ -68,17 +68,16 @@ func ResourceDataflowJobTemplateGcsPathDiffSuppress(k, old, new string, d *schem return isRegionSuffixedPathMatch(old, new) } - func isRegionSuffixedPathMatch(old, new string) bool { - re := regexp.MustCompile(`gs://([a-z0-9\-]+)-[a-z0-9]+-[a-z0-9]+(/.*)?`) - matches := re.FindStringSubmatch(old) - - if len(matches) == 3 && matches[2] != "" { - modifiedOld := "gs://" + matches[1] + matches[2] - return modifiedOld == new - } - - return false + re := regexp.MustCompile(`gs://([a-z0-9\-]+)-[a-z0-9]+-[a-z0-9]+(/.*)?`) + matches := re.FindStringSubmatch(old) + + if len(matches) == 3 && matches[2] != "" { + modifiedOld := "gs://" + matches[1] + matches[2] + return modifiedOld == new + } + + return false } func ResourceDataflowJob() *schema.Resource { @@ -115,10 +114,10 @@ func ResourceDataflowJob() *schema.Resource { }, "template_gcs_path": { - Type: schema.TypeString, - Required: true, + Type: schema.TypeString, + Required: true, DiffSuppressFunc: ResourceDataflowJobTemplateGcsPathDiffSuppress, - Description: `The Google Cloud Storage path to the Dataflow job template.`, + Description: `The Google Cloud Storage path to the Dataflow job template.`, }, "temp_gcs_location": { @@ -158,9 +157,9 @@ func ResourceDataflowJob() *schema.Resource { }, "labels": { - Type: schema.TypeMap, - Optional: true, - Description: `User labels to be specified for the job. Keys and values should follow the restrictions specified in the labeling restrictions page. NOTE: This field is non-authoritative, and will only manage the labels present in your configuration. + Type: schema.TypeMap, + Optional: true, + Description: `User labels to be specified for the job. Keys and values should follow the restrictions specified in the labeling restrictions page. NOTE: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.`, }, @@ -322,7 +321,7 @@ func shouldStopDataflowJobDeleteQuery(state string, skipWait bool) bool { func resourceDataflowJobCreate(d *schema.ResourceData, meta interface{}) error { config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) if err != nil { return err } @@ -362,7 +361,7 @@ func resourceDataflowJobCreate(d *schema.ResourceData, meta interface{}) error { func resourceDataflowJobRead(d *schema.ResourceData, meta interface{}) error { config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) if err != nil { return err } @@ -457,7 +456,7 @@ func resourceDataflowJobUpdateByReplacement(d *schema.ResourceData, meta interfa if jobHasUpdate(d, ResourceDataflowJob().Schema) { config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) if err != nil { return err } @@ -494,7 +493,7 @@ func resourceDataflowJobUpdateByReplacement(d *schema.ResourceData, meta interfa response, updateErr = resourceDataflowJobLaunchTemplate(config, project, region, userAgent, d.Get("template_gcs_path").(string), &request) return updateErr }, - Timeout: time.Minute*time.Duration(5), + Timeout: time.Minute * time.Duration(5), ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.IsDataflowJobUpdateRetryableError}, }) if err != nil { @@ -512,7 +511,7 @@ func resourceDataflowJobUpdateByReplacement(d *schema.ResourceData, meta interfa func resourceDataflowJobDelete(d *schema.ResourceData, meta interface{}) error { config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) if err != nil { return err } @@ -570,7 +569,7 @@ func resourceDataflowJobDelete(d *schema.ResourceData, meta interface{}) error { } // Wait for state to reach terminal state (canceled/drained/done plus cancelling/draining if skipWait) - skipWait := d.Get("skip_wait_on_job_termination").(bool) + skipWait := d.Get("skip_wait_on_job_termination").(bool) ok := shouldStopDataflowJobDeleteQuery(d.Get("state").(string), skipWait) for !ok { log.Printf("[DEBUG] Waiting for job with job state %q to terminate...", d.Get("state").(string)) diff --git a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_test.go similarity index 97% rename from mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_test.go.tmpl rename to mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_test.go index ce91b2dc7217..a97933fcb679 100644 --- a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_test.go @@ -280,48 +280,48 @@ func TestAccDataprocCluster_withShieldedConfig(t *testing.T) { } func TestAccDataprocCluster_withConfidentialCompute(t *testing.T) { - t.Parallel() + t.Parallel() - var cluster dataproc.Cluster - rnd := acctest.RandString(t, 10) + var cluster dataproc.Cluster + rnd := acctest.RandString(t, 10) networkName := acctest.BootstrapSharedTestNetwork(t, "dataproc-cluster") subnetworkName := acctest.BootstrapSubnet(t, "dataproc-cluster", networkName) acctest.BootstrapFirewallForDataprocSharedNetwork(t, "dataproc-cluster", networkName) imageUri := "https://www.googleapis.com/compute/v1/projects/cloud-dataproc/global/images/dataproc-2-1-ubu20-20241026-165100-rc01" - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckDataprocClusterDestroy(t), - Steps: []resource.TestStep{ - { - Config: testAccDataprocCluster_withConfidentialCompute(rnd, subnetworkName, imageUri), - Check: resource.ComposeTestCheckFunc( - testAccCheckDataprocClusterExists(t, "google_dataproc_cluster.confidential", &cluster), - - // Check confidential compute - resource.TestCheckResourceAttr("google_dataproc_cluster.confidential", - "cluster_config.0.gce_cluster_config.0.confidential_instance_config.0.enable_confidential_compute", "true"), - - // Check master - resource.TestCheckResourceAttr("google_dataproc_cluster.confidential", - "cluster_config.0.master_config.0.machine_type", "n2d-standard-2"), - resource.TestCheckResourceAttr("google_dataproc_cluster.confidential", - "cluster_config.0.master_config.0.image_uri", imageUri), - resource.TestCheckResourceAttr("google_dataproc_cluster.confidential", - "cluster_config.0.master_config.0.min_cpu_platform", "AMD Rome"), - - // Check worker - resource.TestCheckResourceAttr("google_dataproc_cluster.confidential", - "cluster_config.0.worker_config.0.machine_type", "n2d-standard-2"), - resource.TestCheckResourceAttr("google_dataproc_cluster.confidential", - "cluster_config.0.worker_config.0.image_uri", imageUri), - resource.TestCheckResourceAttr("google_dataproc_cluster.confidential", - "cluster_config.0.worker_config.0.min_cpu_platform", "AMD Rome"), - ), - }, - }, - }) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataprocClusterDestroy(t), + Steps: []resource.TestStep{ + { + Config: testAccDataprocCluster_withConfidentialCompute(rnd, subnetworkName, imageUri), + Check: resource.ComposeTestCheckFunc( + testAccCheckDataprocClusterExists(t, "google_dataproc_cluster.confidential", &cluster), + + // Check confidential compute + resource.TestCheckResourceAttr("google_dataproc_cluster.confidential", + "cluster_config.0.gce_cluster_config.0.confidential_instance_config.0.enable_confidential_compute", "true"), + + // Check master + resource.TestCheckResourceAttr("google_dataproc_cluster.confidential", + "cluster_config.0.master_config.0.machine_type", "n2d-standard-2"), + resource.TestCheckResourceAttr("google_dataproc_cluster.confidential", + "cluster_config.0.master_config.0.image_uri", imageUri), + resource.TestCheckResourceAttr("google_dataproc_cluster.confidential", + "cluster_config.0.master_config.0.min_cpu_platform", "AMD Rome"), + + // Check worker + resource.TestCheckResourceAttr("google_dataproc_cluster.confidential", + "cluster_config.0.worker_config.0.machine_type", "n2d-standard-2"), + resource.TestCheckResourceAttr("google_dataproc_cluster.confidential", + "cluster_config.0.worker_config.0.image_uri", imageUri), + resource.TestCheckResourceAttr("google_dataproc_cluster.confidential", + "cluster_config.0.worker_config.0.min_cpu_platform", "AMD Rome"), + ), + }, + }, + }) } func TestAccDataprocCluster_withMetadataAndTags(t *testing.T) { @@ -1055,7 +1055,7 @@ func TestAccDataprocCluster_KMS(t *testing.T) { acctest.BootstrapIamMembers(t, []acctest.IamMember{ { Member: "serviceAccount:service-{project_number}@compute-system.iam.gserviceaccount.com", - Role: "roles/cloudkms.cryptoKeyEncrypterDecrypter", + Role: "roles/cloudkms.cryptoKeyEncrypterDecrypter", }, }) @@ -1629,7 +1629,7 @@ resource "google_dataproc_cluster" "basic" { } func testAccDataprocCluster_withConfidentialCompute(rnd, subnetworkName string, imageUri string) string { - return fmt.Sprintf(` + return fmt.Sprintf(` resource "google_dataproc_cluster" "confidential" { name = "tf-test-dproc-%s" region = "us-central1" @@ -2774,4 +2774,3 @@ resource "google_dataproc_metastore_service" "ms" { } `, clusterName, serviceId) } - diff --git a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_job.go.tmpl b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_job.go similarity index 99% rename from mmv1/third_party/terraform/services/dataproc/resource_dataproc_job.go.tmpl rename to mmv1/third_party/terraform/services/dataproc/resource_dataproc_job.go index 4fc7c56c3528..e08344fc40b4 100644 --- a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_job.go.tmpl +++ b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_job.go @@ -148,13 +148,13 @@ func ResourceDataprocJob() *schema.Resource { }, "labels": { - Type: schema.TypeMap, + Type: schema.TypeMap, Description: `Optional. The labels to associate with this job. **Note**: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.`, - Optional: true, - Elem: &schema.Schema{Type: schema.TypeString}, + Optional: true, + Elem: &schema.Schema{Type: schema.TypeString}, }, "terraform_labels": { @@ -219,7 +219,7 @@ func resourceDataprocJobUpdate(d *schema.ResourceData, meta interface{}) error { func resourceDataprocJobCreate(d *schema.ResourceData, meta interface{}) error { config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) if err != nil { return err } @@ -311,7 +311,7 @@ func resourceDataprocJobCreate(d *schema.ResourceData, meta interface{}) error { func resourceDataprocJobRead(d *schema.ResourceData, meta interface{}) error { config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) if err != nil { return err } @@ -406,7 +406,7 @@ func resourceDataprocJobRead(d *schema.ResourceData, meta interface{}) error { func resourceDataprocJobDelete(d *schema.ResourceData, meta interface{}) error { config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) if err != nil { return err } diff --git a/mmv1/third_party/terraform/services/dataprocmetastore/data_source_dataproc_metastore_service_test.go.tmpl b/mmv1/third_party/terraform/services/dataprocmetastore/data_source_dataproc_metastore_service_test.go similarity index 100% rename from mmv1/third_party/terraform/services/dataprocmetastore/data_source_dataproc_metastore_service_test.go.tmpl rename to mmv1/third_party/terraform/services/dataprocmetastore/data_source_dataproc_metastore_service_test.go index ec98020e5c97..04109d7f2c73 100644 --- a/mmv1/third_party/terraform/services/dataprocmetastore/data_source_dataproc_metastore_service_test.go.tmpl +++ b/mmv1/third_party/terraform/services/dataprocmetastore/data_source_dataproc_metastore_service_test.go @@ -2,8 +2,8 @@ package dataprocmetastore_test import ( "fmt" - "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" + "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) diff --git a/mmv1/third_party/terraform/services/dataprocmetastore/dataproc_metastore_service_diff_supress.go.tmpl b/mmv1/third_party/terraform/services/dataprocmetastore/dataproc_metastore_service_diff_supress.go similarity index 100% rename from mmv1/third_party/terraform/services/dataprocmetastore/dataproc_metastore_service_diff_supress.go.tmpl rename to mmv1/third_party/terraform/services/dataprocmetastore/dataproc_metastore_service_diff_supress.go diff --git a/mmv1/third_party/terraform/services/dataprocmetastore/resource_dataproc_metastore_service_test.go.tmpl b/mmv1/third_party/terraform/services/dataprocmetastore/resource_dataproc_metastore_service_test.go similarity index 99% rename from mmv1/third_party/terraform/services/dataprocmetastore/resource_dataproc_metastore_service_test.go.tmpl rename to mmv1/third_party/terraform/services/dataprocmetastore/resource_dataproc_metastore_service_test.go index 58dc408b7ffd..4979c2114595 100644 --- a/mmv1/third_party/terraform/services/dataprocmetastore/resource_dataproc_metastore_service_test.go.tmpl +++ b/mmv1/third_party/terraform/services/dataprocmetastore/resource_dataproc_metastore_service_test.go @@ -2,8 +2,8 @@ package dataprocmetastore_test import ( "fmt" - "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" + "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) @@ -166,4 +166,4 @@ resource "google_storage_bucket" "bucket" { location = "us-central1" } `, context) -} \ No newline at end of file +} diff --git a/mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_account_connector_test.go b/mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_account_connector_test.go new file mode 100644 index 000000000000..a6f19404144d --- /dev/null +++ b/mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_account_connector_test.go @@ -0,0 +1,524 @@ +package developerconnect_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccDeveloperConnectAccountConnector_developerConnectAccountConnectorGithubUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDeveloperConnectAccountConnector_Github(context), + }, + { + ResourceName: "google_developer_connect_account_connector.my-account-connector", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, + }, + { + Config: testAccDeveloperConnectAccountConnector_GithubUpdate(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_developer_connect_account_connector.my-account-connector", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_developer_connect_account_connector.my-account-connector", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, + }, + }, + }) +} + +func testAccDeveloperConnectAccountConnector_Github(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_developer_connect_account_connector" "my-account-connector" { + location = "us-central1" + account_connector_id = "tf-test-ac%{random_suffix}" + + provider_oauth_config { + system_provider_id = "GITHUB" + scopes = ["repo"] + } +} +`, context) +} + +func testAccDeveloperConnectAccountConnector_GithubUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_developer_connect_account_connector" "my-account-connector" { + location = "us-central1" + account_connector_id = "tf-test-ac%{random_suffix}" + annotations = { + "foo": "bar" + } + labels = { + "bar": "foo" + } + + provider_oauth_config { + system_provider_id = "GITHUB" + scopes = ["repo", "public_repo"] + } +} +`, context) +} + +func TestAccDeveloperConnectAccountConnector_developerConnectAccountConnectorGitlabUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDeveloperConnectAccountConnector_Gitlab(context), + }, + { + ResourceName: "google_developer_connect_account_connector.my-account-connector", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, + }, + { + Config: testAccDeveloperConnectAccountConnector_GitlabUpdate(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_developer_connect_account_connector.my-account-connector", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_developer_connect_account_connector.my-account-connector", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, + }, + }, + }) +} + +func testAccDeveloperConnectAccountConnector_Gitlab(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_developer_connect_account_connector" "my-account-connector" { + location = "us-central1" + account_connector_id = "tf-test-ac%{random_suffix}" + + provider_oauth_config { + system_provider_id = "GITLAB" + scopes = ["api"] + } +} +`, context) +} + +func testAccDeveloperConnectAccountConnector_GitlabUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_developer_connect_account_connector" "my-account-connector" { + location = "us-central1" + account_connector_id = "tf-test-ac%{random_suffix}" + + annotations = { + "foo": "bar" + } + + labels = { + "bar": "foo" + } + + provider_oauth_config { + system_provider_id = "GITLAB" + scopes = ["api", "read_api"] + } +} +`, context) +} + +func TestAccDeveloperConnectAccountConnector_developerConnectAccountConnectorGoogleUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDeveloperConnectAccountConnector_Google(context), + }, + { + ResourceName: "google_developer_connect_account_connector.my-account-connector", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, + }, + { + Config: testAccDeveloperConnectAccountConnector_GoogleUpdate(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_developer_connect_account_connector.my-account-connector", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_developer_connect_account_connector.my-account-connector", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, + }, + }, + }) +} + +func testAccDeveloperConnectAccountConnector_Google(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_developer_connect_account_connector" "my-account-connector" { + location = "us-central1" + account_connector_id = "tf-test-ac%{random_suffix}" + + provider_oauth_config { + system_provider_id = "GOOGLE" + scopes = ["https://www.googleapis.com/auth/drive.readonly"] + } +} +`, context) +} + +func testAccDeveloperConnectAccountConnector_GoogleUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_developer_connect_account_connector" "my-account-connector" { + location = "us-central1" + account_connector_id = "tf-test-ac%{random_suffix}" + + annotations = { + "foo": "bar" + } + + labels = { + "bar": "foo" + } + + provider_oauth_config { + system_provider_id = "GOOGLE" + scopes = ["https://www.googleapis.com/auth/drive.readonly", "https://www.googleapis.com/auth/documents.readonly"] + } +} +`, context) +} + +func TestAccDeveloperConnectAccountConnector_developerConnectAccountConnectorSentryUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDeveloperConnectAccountConnector_Sentry(context), + }, + { + ResourceName: "google_developer_connect_account_connector.my-account-connector", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, + }, + { + Config: testAccDeveloperConnectAccountConnector_SentryUpdate(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_developer_connect_account_connector.my-account-connector", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_developer_connect_account_connector.my-account-connector", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, + }, + }, + }) +} + +func testAccDeveloperConnectAccountConnector_Sentry(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_developer_connect_account_connector" "my-account-connector" { + location = "us-central1" + account_connector_id = "tf-test-ac%{random_suffix}" + + provider_oauth_config { + system_provider_id = "SENTRY" + scopes = ["org:read"] + } +} +`, context) +} + +func testAccDeveloperConnectAccountConnector_SentryUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_developer_connect_account_connector" "my-account-connector" { + location = "us-central1" + account_connector_id = "tf-test-ac%{random_suffix}" + + annotations = { + "foo": "bar" + } + + labels = { + "bar": "foo" + } + + provider_oauth_config { + system_provider_id = "SENTRY" + scopes = ["org:read", "org:write"] + } +} +`, context) +} + +func TestAccDeveloperConnectAccountConnector_developerConnectAccountConnectorRovoUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDeveloperConnectAccountConnector_Rovo(context), + }, + { + ResourceName: "google_developer_connect_account_connector.my-account-connector", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, + }, + { + Config: testAccDeveloperConnectAccountConnector_RovoUpdate(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_developer_connect_account_connector.my-account-connector", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_developer_connect_account_connector.my-account-connector", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, + }, + }, + }) +} + +func testAccDeveloperConnectAccountConnector_Rovo(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_developer_connect_account_connector" "my-account-connector" { + location = "us-central1" + account_connector_id = "tf-test-ac%{random_suffix}" + + provider_oauth_config { + system_provider_id = "ROVO" + scopes = ["rovo"] + } +} +`, context) +} + +func testAccDeveloperConnectAccountConnector_RovoUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_developer_connect_account_connector" "my-account-connector" { + location = "us-central1" + account_connector_id = "tf-test-ac%{random_suffix}" + + annotations = { + "foo": "bar" + } + + labels = { + "bar": "foo" + } + + provider_oauth_config { + system_provider_id = "ROVO" + scopes = ["rovo"] + } +} +`, context) +} + +func TestAccDeveloperConnectAccountConnector_developerConnectAccountConnectorNewRelicUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDeveloperConnectAccountConnector_NewRelic(context), + }, + { + ResourceName: "google_developer_connect_account_connector.my-account-connector", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, + }, + { + Config: testAccDeveloperConnectAccountConnector_NewRelicUpdate(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_developer_connect_account_connector.my-account-connector", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_developer_connect_account_connector.my-account-connector", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, + }, + }, + }) +} + +func testAccDeveloperConnectAccountConnector_NewRelic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_developer_connect_account_connector" "my-account-connector" { + location = "us-central1" + account_connector_id = "tf-test-ac%{random_suffix}" + + provider_oauth_config { + system_provider_id = "NEW_RELIC" + scopes = [] + } +} +`, context) +} + +func testAccDeveloperConnectAccountConnector_NewRelicUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_developer_connect_account_connector" "my-account-connector" { + location = "us-central1" + account_connector_id = "tf-test-ac%{random_suffix}" + + annotations = { + "foo": "bar" + } + + labels = { + "bar": "foo" + } + + provider_oauth_config { + system_provider_id = "NEW_RELIC" + scopes = [] + } +} +`, context) +} + +func TestAccDeveloperConnectAccountConnector_developerConnectAccountConnectorDatastaxUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDeveloperConnectAccountConnector_Datastax(context), + }, + { + ResourceName: "google_developer_connect_account_connector.my-account-connector", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, + }, + { + Config: testAccDeveloperConnectAccountConnector_DatastaxUpdate(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_developer_connect_account_connector.my-account-connector", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_developer_connect_account_connector.my-account-connector", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, + }, + }, + }) +} + +func testAccDeveloperConnectAccountConnector_Datastax(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_developer_connect_account_connector" "my-account-connector" { + location = "us-central1" + account_connector_id = "tf-test-ac%{random_suffix}" + + provider_oauth_config { + system_provider_id = "DATASTAX" + scopes = [] + } +} +`, context) +} + +func testAccDeveloperConnectAccountConnector_DatastaxUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_developer_connect_account_connector" "my-account-connector" { + location = "us-central1" + account_connector_id = "tf-test-ac%{random_suffix}" + + annotations = { + "foo": "bar" + } + + labels = { + "bar": "foo" + } + + provider_oauth_config { + system_provider_id = "DATASTAX" + scopes = [] + } +} +`, context) +} diff --git a/mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_account_connector_test.go.tmpl b/mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_account_connector_test.go.tmpl deleted file mode 100644 index ea74a7e7dc09..000000000000 --- a/mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_account_connector_test.go.tmpl +++ /dev/null @@ -1,547 +0,0 @@ -package developerconnect_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-plugin-testing/plancheck" - -) - -func TestAccDeveloperConnectAccountConnector_developerConnectAccountConnectorGithubUpdate(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccDeveloperConnectAccountConnector_Github(context), - }, - { - ResourceName: "google_developer_connect_account_connector.my-account-connector", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, - }, - { - Config: testAccDeveloperConnectAccountConnector_GithubUpdate(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_developer_connect_account_connector.my-account-connector", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_developer_connect_account_connector.my-account-connector", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, - }, - }, - }) -} - - -func testAccDeveloperConnectAccountConnector_Github(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_developer_connect_account_connector" "my-account-connector" { - location = "us-central1" - account_connector_id = "tf-test-ac%{random_suffix}" - - provider_oauth_config { - system_provider_id = "GITHUB" - scopes = ["repo"] - } -} -`, context) -} - - -func testAccDeveloperConnectAccountConnector_GithubUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_developer_connect_account_connector" "my-account-connector" { - location = "us-central1" - account_connector_id = "tf-test-ac%{random_suffix}" - annotations = { - "foo": "bar" - } - labels = { - "bar": "foo" - } - - provider_oauth_config { - system_provider_id = "GITHUB" - scopes = ["repo", "public_repo"] - } -} -`, context) -} - - -func TestAccDeveloperConnectAccountConnector_developerConnectAccountConnectorGitlabUpdate(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccDeveloperConnectAccountConnector_Gitlab(context), - }, - { - ResourceName: "google_developer_connect_account_connector.my-account-connector", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, - }, - { - Config: testAccDeveloperConnectAccountConnector_GitlabUpdate(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_developer_connect_account_connector.my-account-connector", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_developer_connect_account_connector.my-account-connector", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, - }, - }, - }) -} - - -func testAccDeveloperConnectAccountConnector_Gitlab(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_developer_connect_account_connector" "my-account-connector" { - location = "us-central1" - account_connector_id = "tf-test-ac%{random_suffix}" - - provider_oauth_config { - system_provider_id = "GITLAB" - scopes = ["api"] - } -} -`, context) -} - - -func testAccDeveloperConnectAccountConnector_GitlabUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_developer_connect_account_connector" "my-account-connector" { - location = "us-central1" - account_connector_id = "tf-test-ac%{random_suffix}" - - annotations = { - "foo": "bar" - } - - labels = { - "bar": "foo" - } - - provider_oauth_config { - system_provider_id = "GITLAB" - scopes = ["api", "read_api"] - } -} -`, context) -} - - -func TestAccDeveloperConnectAccountConnector_developerConnectAccountConnectorGoogleUpdate(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccDeveloperConnectAccountConnector_Google(context), - }, - { - ResourceName: "google_developer_connect_account_connector.my-account-connector", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, - }, - { - Config: testAccDeveloperConnectAccountConnector_GoogleUpdate(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_developer_connect_account_connector.my-account-connector", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_developer_connect_account_connector.my-account-connector", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, - }, - }, - }) -} - - -func testAccDeveloperConnectAccountConnector_Google(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_developer_connect_account_connector" "my-account-connector" { - location = "us-central1" - account_connector_id = "tf-test-ac%{random_suffix}" - - provider_oauth_config { - system_provider_id = "GOOGLE" - scopes = ["https://www.googleapis.com/auth/drive.readonly"] - } -} -`, context) -} - - -func testAccDeveloperConnectAccountConnector_GoogleUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_developer_connect_account_connector" "my-account-connector" { - location = "us-central1" - account_connector_id = "tf-test-ac%{random_suffix}" - - annotations = { - "foo": "bar" - } - - labels = { - "bar": "foo" - } - - provider_oauth_config { - system_provider_id = "GOOGLE" - scopes = ["https://www.googleapis.com/auth/drive.readonly", "https://www.googleapis.com/auth/documents.readonly"] - } -} -`, context) -} - - -func TestAccDeveloperConnectAccountConnector_developerConnectAccountConnectorSentryUpdate(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccDeveloperConnectAccountConnector_Sentry(context), - }, - { - ResourceName: "google_developer_connect_account_connector.my-account-connector", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, - }, - { - Config: testAccDeveloperConnectAccountConnector_SentryUpdate(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_developer_connect_account_connector.my-account-connector", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_developer_connect_account_connector.my-account-connector", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, - }, - }, - }) -} - - -func testAccDeveloperConnectAccountConnector_Sentry(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_developer_connect_account_connector" "my-account-connector" { - location = "us-central1" - account_connector_id = "tf-test-ac%{random_suffix}" - - provider_oauth_config { - system_provider_id = "SENTRY" - scopes = ["org:read"] - } -} -`, context) -} - - -func testAccDeveloperConnectAccountConnector_SentryUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_developer_connect_account_connector" "my-account-connector" { - location = "us-central1" - account_connector_id = "tf-test-ac%{random_suffix}" - - annotations = { - "foo": "bar" - } - - labels = { - "bar": "foo" - } - - provider_oauth_config { - system_provider_id = "SENTRY" - scopes = ["org:read", "org:write"] - } -} -`, context) -} - - -func TestAccDeveloperConnectAccountConnector_developerConnectAccountConnectorRovoUpdate(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccDeveloperConnectAccountConnector_Rovo(context), - }, - { - ResourceName: "google_developer_connect_account_connector.my-account-connector", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, - }, - { - Config: testAccDeveloperConnectAccountConnector_RovoUpdate(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_developer_connect_account_connector.my-account-connector", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_developer_connect_account_connector.my-account-connector", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, - }, - }, - }) -} - - -func testAccDeveloperConnectAccountConnector_Rovo(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_developer_connect_account_connector" "my-account-connector" { - location = "us-central1" - account_connector_id = "tf-test-ac%{random_suffix}" - - provider_oauth_config { - system_provider_id = "ROVO" - scopes = ["rovo"] - } -} -`, context) -} - - -func testAccDeveloperConnectAccountConnector_RovoUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_developer_connect_account_connector" "my-account-connector" { - location = "us-central1" - account_connector_id = "tf-test-ac%{random_suffix}" - - annotations = { - "foo": "bar" - } - - labels = { - "bar": "foo" - } - - provider_oauth_config { - system_provider_id = "ROVO" - scopes = ["rovo"] - } -} -`, context) -} - - -func TestAccDeveloperConnectAccountConnector_developerConnectAccountConnectorNewRelicUpdate(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccDeveloperConnectAccountConnector_NewRelic(context), - }, - { - ResourceName: "google_developer_connect_account_connector.my-account-connector", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, - }, - { - Config: testAccDeveloperConnectAccountConnector_NewRelicUpdate(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_developer_connect_account_connector.my-account-connector", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_developer_connect_account_connector.my-account-connector", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, - }, - }, - }) -} - - -func testAccDeveloperConnectAccountConnector_NewRelic(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_developer_connect_account_connector" "my-account-connector" { - location = "us-central1" - account_connector_id = "tf-test-ac%{random_suffix}" - - provider_oauth_config { - system_provider_id = "NEW_RELIC" - scopes = [] - } -} -`, context) -} - - -func testAccDeveloperConnectAccountConnector_NewRelicUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_developer_connect_account_connector" "my-account-connector" { - location = "us-central1" - account_connector_id = "tf-test-ac%{random_suffix}" - - annotations = { - "foo": "bar" - } - - labels = { - "bar": "foo" - } - - provider_oauth_config { - system_provider_id = "NEW_RELIC" - scopes = [] - } -} -`, context) -} - - -func TestAccDeveloperConnectAccountConnector_developerConnectAccountConnectorDatastaxUpdate(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccDeveloperConnectAccountConnector_Datastax(context), - }, - { - ResourceName: "google_developer_connect_account_connector.my-account-connector", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, - }, - { - Config: testAccDeveloperConnectAccountConnector_DatastaxUpdate(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_developer_connect_account_connector.my-account-connector", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_developer_connect_account_connector.my-account-connector", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, - }, - }, - }) -} - - -func testAccDeveloperConnectAccountConnector_Datastax(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_developer_connect_account_connector" "my-account-connector" { - location = "us-central1" - account_connector_id = "tf-test-ac%{random_suffix}" - - provider_oauth_config { - system_provider_id = "DATASTAX" - scopes = [] - } -} -`, context) -} - - -func testAccDeveloperConnectAccountConnector_DatastaxUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_developer_connect_account_connector" "my-account-connector" { - location = "us-central1" - account_connector_id = "tf-test-ac%{random_suffix}" - - annotations = { - "foo": "bar" - } - - labels = { - "bar": "foo" - } - - provider_oauth_config { - system_provider_id = "DATASTAX" - scopes = [] - } -} -`, context) -} - - diff --git a/mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_connection_test.go.tmpl b/mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_connection_test.go similarity index 83% rename from mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_connection_test.go.tmpl rename to mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_connection_test.go index d9be48f20c8b..0bc3bf5ae643 100644 --- a/mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_connection_test.go.tmpl +++ b/mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_connection_test.go @@ -1,13 +1,11 @@ - package developerconnect_test import ( "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-plugin-testing/plancheck" - + "github.com/hashicorp/terraform-provider-google/google/acctest" ) func TestAccDeveloperConnectConnection_developerConnectConnectionGithubUpdate(t *testing.T) { @@ -43,9 +41,8 @@ func TestAccDeveloperConnectConnection_developerConnectConnectionGithubUpdate(t }) } - func testAccDeveloperConnectConnection_Github(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_developer_connect_connection" "my-connection" { location = "us-central1" connection_id = "tf-test-tf-test-connection%{random_suffix}" @@ -61,7 +58,6 @@ resource "google_developer_connect_connection" "my-connection" { `, context) } - func testAccDeveloperConnectConnection_GithubUpdate(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_developer_connect_connection" "my-connection" { @@ -86,7 +82,6 @@ resource "google_developer_connect_connection" "my-connection" { `, context) } - func TestAccDeveloperConnectConnection_developerConnectConnectionGithubEnterpriseUpdate(t *testing.T) { t.Parallel() @@ -120,9 +115,8 @@ func TestAccDeveloperConnectConnection_developerConnectConnectionGithubEnterpris }) } - func testAccDeveloperConnectConnection_GithubEnterprise(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_developer_connect_connection" "my-connection" { location = "us-central1" connection_id = "tf-test-tf-test-connection%{random_suffix}" @@ -137,7 +131,6 @@ resource "google_developer_connect_connection" "my-connection" { `, context) } - func testAccDeveloperConnectConnection_GithubEnterpriseUpdate(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_developer_connect_connection" "my-connection" { @@ -161,7 +154,6 @@ resource "google_developer_connect_connection" "my-connection" { `, context) } - func TestAccDeveloperConnectConnection_GhePrivConnection(t *testing.T) { t.Parallel() @@ -186,9 +178,8 @@ func TestAccDeveloperConnectConnection_GhePrivConnection(t *testing.T) { }) } - func testAccDeveloperConnectConnection_GhePrivConnection(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_developer_connect_connection" "my-connection" { location = "us-central1" connection_id = "tf-test-tf-test-connection%{random_suffix}" @@ -213,7 +204,6 @@ resource "google_developer_connect_connection" "my-connection" { `, context) } - func TestAccDeveloperConnectConnection_developerConnectConnectionGitlabUpdate(t *testing.T) { t.Parallel() @@ -247,9 +237,8 @@ func TestAccDeveloperConnectConnection_developerConnectConnectionGitlabUpdate(t }) } - func testAccDeveloperConnectConnection_Gitlab(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_developer_connect_connection" "my-connection" { location = "us-central1" connection_id = "tf-test-tf-test-connection%{random_suffix}" @@ -269,7 +258,6 @@ resource "google_developer_connect_connection" "my-connection" { `, context) } - func testAccDeveloperConnectConnection_GitlabUpdate(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_developer_connect_connection" "my-connection" { @@ -321,9 +309,8 @@ func TestAccDeveloperConnectConnection_GlePrivConnection(t *testing.T) { }) } - func testAccDeveloperConnectConnection_GlePrivConnection(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_developer_connect_connection" "my-connection" { location = "us-central1" connection_id = "tf-test-tf-test-connection%{random_suffix}" @@ -355,7 +342,6 @@ resource "google_developer_connect_connection" "my-connection" { `, context) } - func TestAccDeveloperConnectConnection_developerConnectConnectionGitlabEnterpriseUpdate(t *testing.T) { t.Parallel() @@ -389,9 +375,8 @@ func TestAccDeveloperConnectConnection_developerConnectConnectionGitlabEnterpris }) } - func testAccDeveloperConnectConnection_GitlabEnterprise(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_developer_connect_connection" "my-connection" { location = "us-central1" connection_id = "tf-test-tf-test-connection%{random_suffix}" @@ -413,7 +398,6 @@ resource "google_developer_connect_connection" "my-connection" { `, context) } - func testAccDeveloperConnectConnection_GitlabEnterpriseUpdate(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_developer_connect_connection" "my-connection" { @@ -443,48 +427,46 @@ resource "google_developer_connect_connection" "my-connection" { `, context) } - func TestAccDeveloperConnectConnection_developerConnectConnectionBitbucketCloudUpdate(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccDeveloperConnectConnection_BitbucketCloud(context), - }, - { - ResourceName: "google_developer_connect_connection.my-connection", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"connection_id", "location", "terraform_labels"}, - }, - { - Config: testAccDeveloperConnectConnection_BitbucketCloudUpdate(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_developer_connect_connection.my-connection", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_developer_connect_connection.my-connection", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"connection_id", "location", "terraform_labels"}, - }, - }, - }) -} + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDeveloperConnectConnection_BitbucketCloud(context), + }, + { + ResourceName: "google_developer_connect_connection.my-connection", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"connection_id", "location", "terraform_labels"}, + }, + { + Config: testAccDeveloperConnectConnection_BitbucketCloudUpdate(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_developer_connect_connection.my-connection", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_developer_connect_connection.my-connection", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"connection_id", "location", "terraform_labels"}, + }, + }, + }) +} func testAccDeveloperConnectConnection_BitbucketCloud(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_developer_connect_connection" "my-connection" { location = "us-central1" connection_id = "tf-test-tf-test-connection%{random_suffix}" @@ -506,9 +488,8 @@ resource "google_developer_connect_connection" "my-connection" { `, context) } - func testAccDeveloperConnectConnection_BitbucketCloudUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_developer_connect_connection" "my-connection" { location = "us-central1" connection_id = "tf-test-tf-test-connection%{random_suffix}" @@ -536,48 +517,46 @@ resource "google_developer_connect_connection" "my-connection" { `, context) } - func TestAccDeveloperConnectConnection_developerConnectConnectionBitbucketDataCenterUpdate(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccDeveloperConnectConnection_BitbucketDataCenter(context), - }, - { - ResourceName: "google_developer_connect_connection.my-connection", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"connection_id", "location", "terraform_labels"}, - }, - { - Config: testAccDeveloperConnectConnection_BitbucketDataCenterUpdate(context), + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDeveloperConnectConnection_BitbucketDataCenter(context), + }, + { + ResourceName: "google_developer_connect_connection.my-connection", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"connection_id", "location", "terraform_labels"}, + }, + { + Config: testAccDeveloperConnectConnection_BitbucketDataCenterUpdate(context), ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_developer_connect_connection.my-connection", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_developer_connect_connection.my-connection", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"connection_id", "location", "terraform_labels"}, - }, - }, - }) + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_developer_connect_connection.my-connection", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_developer_connect_connection.my-connection", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"connection_id", "location", "terraform_labels"}, + }, + }, + }) } - func testAccDeveloperConnectConnection_BitbucketDataCenter(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_developer_connect_connection" "my-connection" { location = "us-central1" connection_id = "tf-test-tf-test-connection%{random_suffix}" @@ -599,9 +578,8 @@ resource "google_developer_connect_connection" "my-connection" { `, context) } - func testAccDeveloperConnectConnection_BitbucketDataCenterUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_developer_connect_connection" "my-connection" { location = "us-central1" connection_id = "tf-test-tf-test-connection%{random_suffix}" @@ -630,32 +608,31 @@ resource "google_developer_connect_connection" "my-connection" { } func TestAccDeveloperConnectConnection_BbdcPrivConnection(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccDeveloperConnectConnection_BbdcPrivConnection(context), - }, - { - ResourceName: "google_developer_connect_connection.my-connection", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"connection_id", "location", "terraform_labels"}, - }, - }, - }) -} + t.Parallel() + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDeveloperConnectConnection_BbdcPrivConnection(context), + }, + { + ResourceName: "google_developer_connect_connection.my-connection", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"connection_id", "location", "terraform_labels"}, + }, + }, + }) +} func testAccDeveloperConnectConnection_BbdcPrivConnection(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_developer_connect_connection" "my-connection" { location = "us-central1" connection_id = "tf-test-tf-test-connection%{random_suffix}" diff --git a/mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_agent_test.go.tmpl b/mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_agent_test.go similarity index 100% rename from mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_agent_test.go.tmpl rename to mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_agent_test.go index 8e9c3aed7b54..640d82c9e1e5 100644 --- a/mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_agent_test.go.tmpl +++ b/mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_agent_test.go @@ -3,9 +3,9 @@ package dialogflow_test import ( "testing" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDialogflowAgent_update(t *testing.T) { diff --git a/mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_entity_type_test.go.tmpl b/mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_entity_type_test.go similarity index 100% rename from mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_entity_type_test.go.tmpl rename to mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_entity_type_test.go diff --git a/mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_fulfillment_test.go.tmpl b/mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_fulfillment_test.go similarity index 100% rename from mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_fulfillment_test.go.tmpl rename to mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_fulfillment_test.go diff --git a/mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_intent_test.go.tmpl b/mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_intent_test.go similarity index 100% rename from mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_intent_test.go.tmpl rename to mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_intent_test.go diff --git a/mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflowcx_agent_test.go.tmpl b/mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflowcx_agent_test.go similarity index 100% rename from mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflowcx_agent_test.go.tmpl rename to mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflowcx_agent_test.go diff --git a/mmv1/third_party/terraform/services/dns/data_source_dns_key_test.go.tmpl b/mmv1/third_party/terraform/services/dns/data_source_dns_key_test.go similarity index 99% rename from mmv1/third_party/terraform/services/dns/data_source_dns_key_test.go.tmpl rename to mmv1/third_party/terraform/services/dns/data_source_dns_key_test.go index 8e652968ca40..eb6c37bad539 100644 --- a/mmv1/third_party/terraform/services/dns/data_source_dns_key_test.go.tmpl +++ b/mmv1/third_party/terraform/services/dns/data_source_dns_key_test.go @@ -33,7 +33,6 @@ func TestAccDataSourceDNSKeys_basic(t *testing.T) { }) } - func TestAccDataSourceDNSKeys_noDnsSec(t *testing.T) { t.Parallel() diff --git a/mmv1/third_party/terraform/services/dns/data_source_dns_record_set_test.go.tmpl b/mmv1/third_party/terraform/services/dns/data_source_dns_record_set_test.go similarity index 100% rename from mmv1/third_party/terraform/services/dns/data_source_dns_record_set_test.go.tmpl rename to mmv1/third_party/terraform/services/dns/data_source_dns_record_set_test.go diff --git a/mmv1/third_party/terraform/services/dns/resource_dns_policy_test.go.tmpl b/mmv1/third_party/terraform/services/dns/resource_dns_policy_test.go similarity index 100% rename from mmv1/third_party/terraform/services/dns/resource_dns_policy_test.go.tmpl rename to mmv1/third_party/terraform/services/dns/resource_dns_policy_test.go index a4210409d1c6..5e4ea9ad6249 100644 --- a/mmv1/third_party/terraform/services/dns/resource_dns_policy_test.go.tmpl +++ b/mmv1/third_party/terraform/services/dns/resource_dns_policy_test.go @@ -2,8 +2,8 @@ package dns_test import ( "fmt" - "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" + "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) diff --git a/mmv1/third_party/terraform/services/firestore/resource_firestore_database_update_test.go.tmpl b/mmv1/third_party/terraform/services/firestore/resource_firestore_database_update_test.go similarity index 83% rename from mmv1/third_party/terraform/services/firestore/resource_firestore_database_update_test.go.tmpl rename to mmv1/third_party/terraform/services/firestore/resource_firestore_database_update_test.go index d867090678b0..596a4ccb48ed 100644 --- a/mmv1/third_party/terraform/services/firestore/resource_firestore_database_update_test.go.tmpl +++ b/mmv1/third_party/terraform/services/firestore/resource_firestore_database_update_test.go @@ -2,9 +2,9 @@ package firestore_test import ( "fmt" - "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" + "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) @@ -26,18 +26,18 @@ func TestAccFirestoreDatabase_updateConcurrencyMode(t *testing.T) { Config: testAccFirestoreDatabase_concurrencyMode(projectId, randomSuffix, "OPTIMISTIC"), }, { - ResourceName: "google_firestore_database.database", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_firestore_database.database", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"etag", "project"}, }, { Config: testAccFirestoreDatabase_concurrencyMode(projectId, randomSuffix, "PESSIMISTIC"), }, { - ResourceName: "google_firestore_database.database", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_firestore_database.database", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"etag", "project"}, }, }, @@ -61,18 +61,18 @@ func TestAccFirestoreDatabase_updatePitrEnablement(t *testing.T) { Config: testAccFirestoreDatabase_pitrEnablement(projectId, randomSuffix, "POINT_IN_TIME_RECOVERY_ENABLED"), }, { - ResourceName: "google_firestore_database.database", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_firestore_database.database", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"etag", "project"}, }, { Config: testAccFirestoreDatabase_pitrEnablement(projectId, randomSuffix, "POINT_IN_TIME_RECOVERY_DISABLED"), }, { - ResourceName: "google_firestore_database.database", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_firestore_database.database", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"etag", "project"}, }, }, @@ -96,18 +96,18 @@ func TestAccFirestoreDatabase_updateDeleteProtectionState(t *testing.T) { Config: testAccFirestoreDatabase_deleteProtectionState(projectId, randomSuffix, "DELETE_PROTECTION_ENABLED"), }, { - ResourceName: "google_firestore_database.database", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_firestore_database.database", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"etag", "project"}, }, { Config: testAccFirestoreDatabase_deleteProtectionState(projectId, randomSuffix, "DELETE_PROTECTION_DISABLED"), }, { - ResourceName: "google_firestore_database.database", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_firestore_database.database", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"etag", "project"}, }, }, diff --git a/mmv1/third_party/terraform/services/gemini/iam_gemini_repository_group_test.go.tmpl b/mmv1/third_party/terraform/services/gemini/iam_gemini_repository_group_test.go similarity index 100% rename from mmv1/third_party/terraform/services/gemini/iam_gemini_repository_group_test.go.tmpl rename to mmv1/third_party/terraform/services/gemini/iam_gemini_repository_group_test.go diff --git a/mmv1/third_party/terraform/services/gemini/resource_gemini_code_repository_index_test.go.tmpl b/mmv1/third_party/terraform/services/gemini/resource_gemini_code_repository_index_test.go similarity index 100% rename from mmv1/third_party/terraform/services/gemini/resource_gemini_code_repository_index_test.go.tmpl rename to mmv1/third_party/terraform/services/gemini/resource_gemini_code_repository_index_test.go diff --git a/mmv1/third_party/terraform/services/gemini/resource_gemini_code_tools_setting_binding_test.go.tmpl b/mmv1/third_party/terraform/services/gemini/resource_gemini_code_tools_setting_binding_test.go similarity index 97% rename from mmv1/third_party/terraform/services/gemini/resource_gemini_code_tools_setting_binding_test.go.tmpl rename to mmv1/third_party/terraform/services/gemini/resource_gemini_code_tools_setting_binding_test.go index d54ea63c659c..f9d053b35866 100644 --- a/mmv1/third_party/terraform/services/gemini/resource_gemini_code_tools_setting_binding_test.go.tmpl +++ b/mmv1/third_party/terraform/services/gemini/resource_gemini_code_tools_setting_binding_test.go @@ -1,7 +1,7 @@ package gemini_test import ( - "fmt" + "fmt" "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" @@ -15,7 +15,7 @@ func TestAccGeminiCodeToolsSettingBinding_update(t *testing.T) { context := map[string]interface{}{ "code_tools_setting_id": fmt.Sprintf("tf-test-ls-%s", acctest.RandString(t, 10)), - "setting_binding_id": fmt.Sprintf("tf-test-lsb-%s", acctest.RandString(t, 10)), + "setting_binding_id": fmt.Sprintf("tf-test-lsb-%s", acctest.RandString(t, 10)), } acctest.VcrTest(t, resource.TestCase{ @@ -109,4 +109,4 @@ resource "google_gemini_code_tools_setting_binding" "basic_binding" { product = "GEMINI_CODE_ASSIST" } `, context) -} \ No newline at end of file +} diff --git a/mmv1/third_party/terraform/services/gemini/resource_gemini_code_tools_setting_test.go.tmpl b/mmv1/third_party/terraform/services/gemini/resource_gemini_code_tools_setting_test.go similarity index 99% rename from mmv1/third_party/terraform/services/gemini/resource_gemini_code_tools_setting_test.go.tmpl rename to mmv1/third_party/terraform/services/gemini/resource_gemini_code_tools_setting_test.go index 67a0da137674..5743e8d565b6 100644 --- a/mmv1/third_party/terraform/services/gemini/resource_gemini_code_tools_setting_test.go.tmpl +++ b/mmv1/third_party/terraform/services/gemini/resource_gemini_code_tools_setting_test.go @@ -1,7 +1,7 @@ package gemini_test import ( - "fmt" + "fmt" "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" diff --git a/mmv1/third_party/terraform/services/gemini/resource_gemini_data_sharing_with_google_setting_binding_test.go.tmpl b/mmv1/third_party/terraform/services/gemini/resource_gemini_data_sharing_with_google_setting_binding_test.go similarity index 96% rename from mmv1/third_party/terraform/services/gemini/resource_gemini_data_sharing_with_google_setting_binding_test.go.tmpl rename to mmv1/third_party/terraform/services/gemini/resource_gemini_data_sharing_with_google_setting_binding_test.go index b52680727430..c5481e98360f 100644 --- a/mmv1/third_party/terraform/services/gemini/resource_gemini_data_sharing_with_google_setting_binding_test.go.tmpl +++ b/mmv1/third_party/terraform/services/gemini/resource_gemini_data_sharing_with_google_setting_binding_test.go @@ -1,7 +1,7 @@ package gemini_test import ( - "fmt" + "fmt" "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" @@ -15,7 +15,7 @@ func TestAccGeminiDataSharingWithGoogleSettingBinding_update(t *testing.T) { context := map[string]interface{}{ "data_sharing_with_google_setting_id": fmt.Sprintf("tf-test-ls-%s", acctest.RandString(t, 10)), - "setting_binding_id": fmt.Sprintf("tf-test-lsb-%s", acctest.RandString(t, 10)), + "setting_binding_id": fmt.Sprintf("tf-test-lsb-%s", acctest.RandString(t, 10)), } acctest.VcrTest(t, resource.TestCase{ diff --git a/mmv1/third_party/terraform/services/gemini/resource_gemini_data_sharing_with_google_setting_test.go.tmpl b/mmv1/third_party/terraform/services/gemini/resource_gemini_data_sharing_with_google_setting_test.go similarity index 99% rename from mmv1/third_party/terraform/services/gemini/resource_gemini_data_sharing_with_google_setting_test.go.tmpl rename to mmv1/third_party/terraform/services/gemini/resource_gemini_data_sharing_with_google_setting_test.go index 720fc3b8bd4a..ddf22216594a 100644 --- a/mmv1/third_party/terraform/services/gemini/resource_gemini_data_sharing_with_google_setting_test.go.tmpl +++ b/mmv1/third_party/terraform/services/gemini/resource_gemini_data_sharing_with_google_setting_test.go @@ -1,7 +1,7 @@ package gemini_test import ( - "fmt" + "fmt" "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" diff --git a/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_binding_test.go.tmpl b/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_binding_test.go similarity index 96% rename from mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_binding_test.go.tmpl rename to mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_binding_test.go index 38b5fd383fa4..ba034eeccc40 100644 --- a/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_binding_test.go.tmpl +++ b/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_binding_test.go @@ -1,7 +1,7 @@ package gemini_test import ( - "fmt" + "fmt" "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" @@ -15,7 +15,7 @@ func TestAccGeminiGeminiGcpEnablementSettingBinding_update(t *testing.T) { context := map[string]interface{}{ "gemini_gcp_enablement_setting_id": fmt.Sprintf("tf-test-ls-%s", acctest.RandString(t, 10)), - "setting_binding_id": fmt.Sprintf("tf-test-lsb-%s", acctest.RandString(t, 10)), + "setting_binding_id": fmt.Sprintf("tf-test-lsb-%s", acctest.RandString(t, 10)), } acctest.VcrTest(t, resource.TestCase{ diff --git a/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_test.go.tmpl b/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_test.go similarity index 99% rename from mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_test.go.tmpl rename to mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_test.go index 2f0edee86fe3..0d1294cbc08c 100644 --- a/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_test.go.tmpl +++ b/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_test.go @@ -1,7 +1,7 @@ package gemini_test import ( - "fmt" + "fmt" "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" diff --git a/mmv1/third_party/terraform/services/gemini/resource_gemini_logging_setting_binding_test.go.tmpl b/mmv1/third_party/terraform/services/gemini/resource_gemini_logging_setting_binding_test.go similarity index 99% rename from mmv1/third_party/terraform/services/gemini/resource_gemini_logging_setting_binding_test.go.tmpl rename to mmv1/third_party/terraform/services/gemini/resource_gemini_logging_setting_binding_test.go index 363c71a9b2a6..dd0368926e09 100644 --- a/mmv1/third_party/terraform/services/gemini/resource_gemini_logging_setting_binding_test.go.tmpl +++ b/mmv1/third_party/terraform/services/gemini/resource_gemini_logging_setting_binding_test.go @@ -1,7 +1,7 @@ package gemini_test import ( - "fmt" + "fmt" "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" @@ -91,4 +91,4 @@ resource "google_gemini_logging_setting_binding" "basic_binding" { product = "GEMINI_CODE_ASSIST" } `, context) -} \ No newline at end of file +} diff --git a/mmv1/third_party/terraform/services/gemini/resource_gemini_release_channel_setting_binding_test.go.tmpl b/mmv1/third_party/terraform/services/gemini/resource_gemini_release_channel_setting_binding_test.go similarity index 96% rename from mmv1/third_party/terraform/services/gemini/resource_gemini_release_channel_setting_binding_test.go.tmpl rename to mmv1/third_party/terraform/services/gemini/resource_gemini_release_channel_setting_binding_test.go index a68853e1714a..8012ed347746 100644 --- a/mmv1/third_party/terraform/services/gemini/resource_gemini_release_channel_setting_binding_test.go.tmpl +++ b/mmv1/third_party/terraform/services/gemini/resource_gemini_release_channel_setting_binding_test.go @@ -1,7 +1,7 @@ package gemini_test import ( - "fmt" + "fmt" "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" @@ -15,7 +15,7 @@ func TestAccGeminiReleaseChannelSettingBinding_update(t *testing.T) { context := map[string]interface{}{ "release_channel_setting_id": fmt.Sprintf("tf-test-ls-%s", acctest.RandString(t, 10)), - "setting_binding_id": fmt.Sprintf("tf-test-lsb-%s", acctest.RandString(t, 10)), + "setting_binding_id": fmt.Sprintf("tf-test-lsb-%s", acctest.RandString(t, 10)), } acctest.VcrTest(t, resource.TestCase{ diff --git a/mmv1/third_party/terraform/services/gemini/resource_gemini_release_channel_setting_test.go.tmpl b/mmv1/third_party/terraform/services/gemini/resource_gemini_release_channel_setting_test.go similarity index 100% rename from mmv1/third_party/terraform/services/gemini/resource_gemini_release_channel_setting_test.go.tmpl rename to mmv1/third_party/terraform/services/gemini/resource_gemini_release_channel_setting_test.go diff --git a/mmv1/third_party/terraform/services/gemini/resource_gemini_repository_group_test.go.tmpl b/mmv1/third_party/terraform/services/gemini/resource_gemini_repository_group_test.go similarity index 100% rename from mmv1/third_party/terraform/services/gemini/resource_gemini_repository_group_test.go.tmpl rename to mmv1/third_party/terraform/services/gemini/resource_gemini_repository_group_test.go diff --git a/mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_backup_channel_test.go.tmpl b/mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_backup_channel_test.go similarity index 100% rename from mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_backup_channel_test.go.tmpl rename to mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_backup_channel_test.go diff --git a/mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_backup_plan_test.go.tmpl b/mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_backup_plan_test.go similarity index 90% rename from mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_backup_plan_test.go.tmpl rename to mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_backup_plan_test.go index 613c30efa76c..658e79ce5b0b 100644 --- a/mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_backup_plan_test.go.tmpl +++ b/mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_backup_plan_test.go @@ -28,54 +28,54 @@ func TestAccGKEBackupBackupPlan_update(t *testing.T) { Config: testAccGKEBackupBackupPlan_basic(context), }, { - ResourceName: "google_gke_backup_backup_plan.backupplan", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_gke_backup_backup_plan.backupplan", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, }, - { + { Config: testAccGKEBackupBackupPlan_permissive(context), }, { - ResourceName: "google_gke_backup_backup_plan.backupplan", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_gke_backup_backup_plan.backupplan", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, }, { Config: testAccGKEBackupBackupPlan_full(context), }, { - ResourceName: "google_gke_backup_backup_plan.backupplan", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_gke_backup_backup_plan.backupplan", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, }, - { + { Config: testAccGKEBackupBackupPlan_rpo_daily_window(context), }, { - ResourceName: "google_gke_backup_backup_plan.backupplan", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_gke_backup_backup_plan.backupplan", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, }, { Config: testAccGKEBackupBackupPlan_rpo_weekly_window(context), }, { - ResourceName: "google_gke_backup_backup_plan.backupplan", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_gke_backup_backup_plan.backupplan", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, }, - { + { Config: testAccGKEBackupBackupPlan_full(context), }, { - ResourceName: "google_gke_backup_backup_plan.backupplan", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_gke_backup_backup_plan.backupplan", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, }, }, diff --git a/mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_restore_channel_test.go.tmpl b/mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_restore_channel_test.go similarity index 100% rename from mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_restore_channel_test.go.tmpl rename to mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_restore_channel_test.go diff --git a/mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_restore_plan_test.go.tmpl b/mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_restore_plan_test.go similarity index 99% rename from mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_restore_plan_test.go.tmpl rename to mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_restore_plan_test.go index cde094427e20..04c53ad5f927 100644 --- a/mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_restore_plan_test.go.tmpl +++ b/mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_restore_plan_test.go @@ -1,16 +1,13 @@ - - package gkebackup_test import ( "testing" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) - func TestAccGKEBackupRestorePlan_update(t *testing.T) { t.Parallel() @@ -35,7 +32,7 @@ func TestAccGKEBackupRestorePlan_update(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"labels", "location", "terraform_labels"}, }, - { + { Config: testAccGKEBackupRestorePlan_update(context), }, { @@ -204,4 +201,4 @@ resource "google_gke_backup_restore_plan" "restore_plan" { } } `, context) -} \ No newline at end of file +} diff --git a/mmv1/third_party/terraform/services/gkehub2/iam_gke_hub_feature_test.go.tmpl b/mmv1/third_party/terraform/services/gkehub2/iam_gke_hub_feature_test.go similarity index 100% rename from mmv1/third_party/terraform/services/gkehub2/iam_gke_hub_feature_test.go.tmpl rename to mmv1/third_party/terraform/services/gkehub2/iam_gke_hub_feature_test.go diff --git a/mmv1/third_party/terraform/services/gkeonprem/gkeonprem_operation.go.tmpl b/mmv1/third_party/terraform/services/gkeonprem/gkeonprem_operation.go similarity index 97% rename from mmv1/third_party/terraform/services/gkeonprem/gkeonprem_operation.go.tmpl rename to mmv1/third_party/terraform/services/gkeonprem/gkeonprem_operation.go index 255f43439b96..b255c5011634 100644 --- a/mmv1/third_party/terraform/services/gkeonprem/gkeonprem_operation.go.tmpl +++ b/mmv1/third_party/terraform/services/gkeonprem/gkeonprem_operation.go @@ -99,10 +99,10 @@ func (w *gkeonpremOperationWaiter) QueryOp() (interface{}, error) { url := fmt.Sprintf("%s%s", w.Config.GkeonpremBasePath, w.Op.Name) return transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: w.Config, - Method: "GET", - Project: w.Project, - RawURL: url, + Config: w.Config, + Method: "GET", + Project: w.Project, + RawURL: url, UserAgent: w.UserAgent, }) } diff --git a/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_bare_metal_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_bare_metal_cluster_test.go similarity index 79% rename from mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_bare_metal_cluster_test.go.tmpl rename to mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_bare_metal_cluster_test.go index 7fa9b1323f19..e5d8fdf33ee4 100644 --- a/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_bare_metal_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_bare_metal_cluster_test.go @@ -8,105 +8,105 @@ import ( ) func TestAccGkeonpremBareMetalCluster_bareMetalClusterUpdateBasic(t *testing.T) { - t.Parallel() + t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckGkeonpremBareMetalClusterDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateMetalLbStart(context), - }, - { - ResourceName: "google_gkeonprem_bare_metal_cluster.cluster-metallb", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"annotations"}, - }, - { - Config: testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateMetalLb(context), - }, - { - ResourceName: "google_gkeonprem_bare_metal_cluster.cluster-metallb", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"annotations"}, - }, - }, - }) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGkeonpremBareMetalClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateMetalLbStart(context), + }, + { + ResourceName: "google_gkeonprem_bare_metal_cluster.cluster-metallb", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations"}, + }, + { + Config: testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateMetalLb(context), + }, + { + ResourceName: "google_gkeonprem_bare_metal_cluster.cluster-metallb", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations"}, + }, + }, + }) } func TestAccGkeonpremBareMetalCluster_bareMetalClusterUpdateManualLb(t *testing.T) { - t.Parallel() + t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckGkeonpremBareMetalClusterDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateManualLbStart(context), - }, - { - ResourceName: "google_gkeonprem_bare_metal_cluster.cluster-manuallb", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateManualLb(context), - }, - { - ResourceName: "google_gkeonprem_bare_metal_cluster.cluster-manuallb", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGkeonpremBareMetalClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateManualLbStart(context), + }, + { + ResourceName: "google_gkeonprem_bare_metal_cluster.cluster-manuallb", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateManualLb(context), + }, + { + ResourceName: "google_gkeonprem_bare_metal_cluster.cluster-manuallb", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) } func TestAccGkeonpremBareMetalCluster_bareMetalClusterUpdateBgpLb(t *testing.T) { - t.Parallel() + t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckGkeonpremBareMetalClusterDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateBgpLbStart(context), - }, - { - ResourceName: "google_gkeonprem_bare_metal_cluster.cluster-bgplb", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateBgpLb(context), - }, - { - ResourceName: "google_gkeonprem_bare_metal_cluster.cluster-bgplb", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGkeonpremBareMetalClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateBgpLbStart(context), + }, + { + ResourceName: "google_gkeonprem_bare_metal_cluster.cluster-bgplb", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateBgpLb(context), + }, + { + ResourceName: "google_gkeonprem_bare_metal_cluster.cluster-bgplb", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) } func testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateMetalLbStart(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_gkeonprem_bare_metal_cluster" "cluster-metallb" { name = "cluster-metallb%{random_suffix}" @@ -184,7 +184,7 @@ func testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateMetalLbStart(context } func testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateMetalLb(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_gkeonprem_bare_metal_cluster" "cluster-metallb" { name = "cluster-metallb%{random_suffix}" @@ -260,7 +260,7 @@ func testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateMetalLb(context map[ } func testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateManualLbStart(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_gkeonprem_bare_metal_cluster" "cluster-manuallb" { name = "cluster-manuallb%{random_suffix}" @@ -339,7 +339,7 @@ func testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateManualLbStart(contex } func testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateManualLb(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_gkeonprem_bare_metal_cluster" "cluster-manuallb" { name = "cluster-manuallb%{random_suffix}" @@ -407,7 +407,7 @@ func testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateManualLb(context map } func testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateBgpLbStart(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_gkeonprem_bare_metal_cluster" "cluster-bgplb" { name = "cluster-bgplb%{random_suffix}" @@ -491,7 +491,7 @@ func testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateBgpLbStart(context m } func testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateBgpLb(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_gkeonprem_bare_metal_cluster" "cluster-bgplb" { name = "cluster-bgplb%{random_suffix}" diff --git a/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_bare_metal_node_pool_test.go.tmpl b/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_bare_metal_node_pool_test.go similarity index 81% rename from mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_bare_metal_node_pool_test.go.tmpl rename to mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_bare_metal_node_pool_test.go index da81a36fe5cc..e0e430b79f3a 100644 --- a/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_bare_metal_node_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_bare_metal_node_pool_test.go @@ -8,41 +8,41 @@ import ( ) func TestAccGkeonpremBareMetalNodePool_bareMetalNodePoolUpdate(t *testing.T) { - t.Parallel() + t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckGkeonpremBareMetalNodePoolDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccGkeonpremBareMetalNodePool_bareMetalNodePoolUpdateStart(context), - }, - { - ResourceName: "google_gkeonprem_bare_metal_node_pool.nodepool", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"annotations"}, - }, - { - Config: testAccGkeonpremBareMetalNodePool_bareMetalNodePoolUpdate(context), - }, - { - ResourceName: "google_gkeonprem_bare_metal_node_pool.nodepool", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"annotations"}, - }, - }, - }) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGkeonpremBareMetalNodePoolDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGkeonpremBareMetalNodePool_bareMetalNodePoolUpdateStart(context), + }, + { + ResourceName: "google_gkeonprem_bare_metal_node_pool.nodepool", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations"}, + }, + { + Config: testAccGkeonpremBareMetalNodePool_bareMetalNodePoolUpdate(context), + }, + { + ResourceName: "google_gkeonprem_bare_metal_node_pool.nodepool", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations"}, + }, + }, + }) } func testAccGkeonpremBareMetalNodePool_bareMetalNodePoolUpdateStart(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_gkeonprem_bare_metal_cluster" "cluster" { name = "tf-test-cluster-%{random_suffix}" @@ -134,7 +134,7 @@ func testAccGkeonpremBareMetalNodePool_bareMetalNodePoolUpdateStart(context map[ } func testAccGkeonpremBareMetalNodePool_bareMetalNodePoolUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_gkeonprem_bare_metal_cluster" "cluster" { name = "tf-test-cluster-%{random_suffix}" diff --git a/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_vmware_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_vmware_cluster_test.go similarity index 77% rename from mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_vmware_cluster_test.go.tmpl rename to mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_vmware_cluster_test.go index 8e380005fb16..4f495496da86 100644 --- a/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_vmware_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_vmware_cluster_test.go @@ -8,107 +8,107 @@ import ( ) func TestAccGkeonpremVmwareCluster_vmwareClusterUpdateBasic(t *testing.T) { - t.Parallel() + t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckGkeonpremVmwareClusterDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccGkeonpremVmwareCluster_vmwareClusterUpdateMetalLbStart(context), - }, - { - ResourceName: "google_gkeonprem_vmware_cluster.cluster", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"annotations"}, - }, - { - Config: testAccGkeonpremVmwareCluster_vmwareClusterUpdateMetalLb(context), - }, - { - ResourceName: "google_gkeonprem_vmware_cluster.cluster", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"annotations"}, - }, - }, - }) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGkeonpremVmwareClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGkeonpremVmwareCluster_vmwareClusterUpdateMetalLbStart(context), + }, + { + ResourceName: "google_gkeonprem_vmware_cluster.cluster", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations"}, + }, + { + Config: testAccGkeonpremVmwareCluster_vmwareClusterUpdateMetalLb(context), + }, + { + ResourceName: "google_gkeonprem_vmware_cluster.cluster", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations"}, + }, + }, + }) } func TestAccGkeonpremVmwareCluster_vmwareClusterUpdateF5Lb(t *testing.T) { - t.Parallel() + t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckGkeonpremVmwareClusterDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccGkeonpremVmwareCluster_vmwareClusterUpdateF5LbStart(context), - }, - { - ResourceName: "google_gkeonprem_vmware_cluster.cluster", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccGkeonpremVmwareCluster_vmwareClusterUpdateF5lb(context), - }, - { - ResourceName: "google_gkeonprem_vmware_cluster.cluster", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGkeonpremVmwareClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGkeonpremVmwareCluster_vmwareClusterUpdateF5LbStart(context), + }, + { + ResourceName: "google_gkeonprem_vmware_cluster.cluster", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccGkeonpremVmwareCluster_vmwareClusterUpdateF5lb(context), + }, + { + ResourceName: "google_gkeonprem_vmware_cluster.cluster", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) } func TestAccGkeonpremVmwareCluster_vmwareClusterUpdateManualLb(t *testing.T) { - // VCR fails to handle batched project services - acctest.SkipIfVcr(t) - t.Parallel() + // VCR fails to handle batched project services + acctest.SkipIfVcr(t) + t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckGkeonpremVmwareClusterDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccGkeonpremVmwareCluster_vmwareClusterUpdateManualLbStart(context), - }, - { - ResourceName: "google_gkeonprem_vmware_cluster.cluster", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccGkeonpremVmwareCluster_vmwareClusterUpdateManualLb(context), - }, - { - ResourceName: "google_gkeonprem_vmware_cluster.cluster", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGkeonpremVmwareClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGkeonpremVmwareCluster_vmwareClusterUpdateManualLbStart(context), + }, + { + ResourceName: "google_gkeonprem_vmware_cluster.cluster", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccGkeonpremVmwareCluster_vmwareClusterUpdateManualLb(context), + }, + { + ResourceName: "google_gkeonprem_vmware_cluster.cluster", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) } func testAccGkeonpremVmwareCluster_vmwareClusterUpdateMetalLbStart(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_gkeonprem_vmware_cluster" "cluster" { name = "tf-test-cluster-%{random_suffix}" @@ -156,7 +156,7 @@ func testAccGkeonpremVmwareCluster_vmwareClusterUpdateMetalLbStart(context map[s } func testAccGkeonpremVmwareCluster_vmwareClusterUpdateMetalLb(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_gkeonprem_vmware_cluster" "cluster" { name = "tf-test-cluster-%{random_suffix}" @@ -204,7 +204,7 @@ func testAccGkeonpremVmwareCluster_vmwareClusterUpdateMetalLb(context map[string } func testAccGkeonpremVmwareCluster_vmwareClusterUpdateF5LbStart(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_gkeonprem_vmware_cluster" "cluster" { name = "tf-test-cluster-%{random_suffix}" @@ -251,7 +251,7 @@ func testAccGkeonpremVmwareCluster_vmwareClusterUpdateF5LbStart(context map[stri } func testAccGkeonpremVmwareCluster_vmwareClusterUpdateF5lb(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_gkeonprem_vmware_cluster" "cluster" { name = "tf-test-cluster-%{random_suffix}" @@ -298,7 +298,7 @@ func testAccGkeonpremVmwareCluster_vmwareClusterUpdateF5lb(context map[string]in } func testAccGkeonpremVmwareCluster_vmwareClusterUpdateManualLbStart(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_gkeonprem_vmware_cluster" "cluster" { name = "tf-test-cluster-%{random_suffix}" @@ -392,7 +392,7 @@ func testAccGkeonpremVmwareCluster_vmwareClusterUpdateManualLbStart(context map[ } func testAccGkeonpremVmwareCluster_vmwareClusterUpdateManualLb(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_gkeonprem_vmware_cluster" "cluster" { name = "tf-test-cluster-%{random_suffix}" diff --git a/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_vmware_node_pool_test.go.tmpl b/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_vmware_node_pool_test.go similarity index 81% rename from mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_vmware_node_pool_test.go.tmpl rename to mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_vmware_node_pool_test.go index 37b54ef28f53..b89cdaff1768 100644 --- a/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_vmware_node_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_vmware_node_pool_test.go @@ -8,41 +8,41 @@ import ( ) func TestAccGkeonpremVmwareNodePool_vmwareNodePoolUpdate(t *testing.T) { - t.Parallel() + t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckGkeonpremVmwareNodePoolDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccGkeonpremVmwareNodePool_vmwareNodePoolUpdateStart(context), - }, - { - ResourceName: "google_gkeonprem_vmware_node_pool.nodepool", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"annotations"}, - }, - { - Config: testAccGkeonpremVmwareNodePool_vmwareNodePoolUpdate(context), - }, - { - ResourceName: "google_gkeonprem_vmware_node_pool.nodepool", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"annotations"}, - }, - }, - }) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGkeonpremVmwareNodePoolDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGkeonpremVmwareNodePool_vmwareNodePoolUpdateStart(context), + }, + { + ResourceName: "google_gkeonprem_vmware_node_pool.nodepool", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations"}, + }, + { + Config: testAccGkeonpremVmwareNodePool_vmwareNodePoolUpdate(context), + }, + { + ResourceName: "google_gkeonprem_vmware_node_pool.nodepool", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations"}, + }, + }, + }) } func testAccGkeonpremVmwareNodePool_vmwareNodePoolUpdateStart(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_gkeonprem_vmware_cluster" "cluster" { name = "tf-test-cluster-%{random_suffix}" @@ -127,7 +127,7 @@ func testAccGkeonpremVmwareNodePool_vmwareNodePoolUpdateStart(context map[string } func testAccGkeonpremVmwareNodePool_vmwareNodePoolUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_gkeonprem_vmware_cluster" "cluster" { name = "tf-test-cluster-%{random_suffix}" diff --git a/mmv1/third_party/terraform/services/iam2/resource_iam_deny_policy_test.go.tmpl b/mmv1/third_party/terraform/services/iam2/resource_iam_deny_policy_test.go similarity index 99% rename from mmv1/third_party/terraform/services/iam2/resource_iam_deny_policy_test.go.tmpl rename to mmv1/third_party/terraform/services/iam2/resource_iam_deny_policy_test.go index c1227b201afa..3725784c4660 100644 --- a/mmv1/third_party/terraform/services/iam2/resource_iam_deny_policy_test.go.tmpl +++ b/mmv1/third_party/terraform/services/iam2/resource_iam_deny_policy_test.go @@ -76,7 +76,7 @@ func TestAccIAM2DenyPolicy_iamDenyPolicyFolderParent(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"name", "parent"}, }, - { + { Config: testAccIAM2DenyPolicy_iamDenyPolicyFolderUpdate(context), }, { diff --git a/mmv1/third_party/terraform/services/iam3/resource_iam_folders_policy_binding_test.go.tmpl b/mmv1/third_party/terraform/services/iam3/resource_iam_folders_policy_binding_test.go similarity index 99% rename from mmv1/third_party/terraform/services/iam3/resource_iam_folders_policy_binding_test.go.tmpl rename to mmv1/third_party/terraform/services/iam3/resource_iam_folders_policy_binding_test.go index 8dedbe2d98f7..6a4856956932 100644 --- a/mmv1/third_party/terraform/services/iam3/resource_iam_folders_policy_binding_test.go.tmpl +++ b/mmv1/third_party/terraform/services/iam3/resource_iam_folders_policy_binding_test.go @@ -21,7 +21,7 @@ func TestAccIAM3FoldersPolicyBinding_iamFoldersPolicyBindingExample_update(t *te PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), ExternalProviders: map[string]resource.ExternalProvider{ - "time": {}, + "time": {}, }, CheckDestroy: testAccCheckIAM3FoldersPolicyBindingDestroyProducer(t), Steps: []resource.TestStep{ diff --git a/mmv1/third_party/terraform/services/iam3/resource_iam_organizations_policy_binding_test.go.tmpl b/mmv1/third_party/terraform/services/iam3/resource_iam_organizations_policy_binding_test.go similarity index 98% rename from mmv1/third_party/terraform/services/iam3/resource_iam_organizations_policy_binding_test.go.tmpl rename to mmv1/third_party/terraform/services/iam3/resource_iam_organizations_policy_binding_test.go index 0e77227913ab..6eccb406b3f4 100644 --- a/mmv1/third_party/terraform/services/iam3/resource_iam_organizations_policy_binding_test.go.tmpl +++ b/mmv1/third_party/terraform/services/iam3/resource_iam_organizations_policy_binding_test.go @@ -21,7 +21,7 @@ func TestAccIAM3OrganizationsPolicyBinding_iam3OrganizationsPolicyBindingExample PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccCheckIAM3OrganizationsPolicyBindingDestroyProducer(t), - ExternalProviders: map[string]resource.ExternalProvider{ + ExternalProviders: map[string]resource.ExternalProvider{ "time": {}, }, Steps: []resource.TestStep{ diff --git a/mmv1/third_party/terraform/services/iam3/resource_iam_principal_access_boundary_policy_test.go.tmpl b/mmv1/third_party/terraform/services/iam3/resource_iam_principal_access_boundary_policy_test.go similarity index 99% rename from mmv1/third_party/terraform/services/iam3/resource_iam_principal_access_boundary_policy_test.go.tmpl rename to mmv1/third_party/terraform/services/iam3/resource_iam_principal_access_boundary_policy_test.go index 5603b10a2b9f..8aa95953b7a3 100644 --- a/mmv1/third_party/terraform/services/iam3/resource_iam_principal_access_boundary_policy_test.go.tmpl +++ b/mmv1/third_party/terraform/services/iam3/resource_iam_principal_access_boundary_policy_test.go @@ -1,4 +1,5 @@ package iam3_test + import ( "testing" diff --git a/mmv1/third_party/terraform/services/iam3/resource_iam_projects_policy_binding_test.go.tmpl b/mmv1/third_party/terraform/services/iam3/resource_iam_projects_policy_binding_test.go similarity index 96% rename from mmv1/third_party/terraform/services/iam3/resource_iam_projects_policy_binding_test.go.tmpl rename to mmv1/third_party/terraform/services/iam3/resource_iam_projects_policy_binding_test.go index 70776bedcd6c..3e2ade2da282 100644 --- a/mmv1/third_party/terraform/services/iam3/resource_iam_projects_policy_binding_test.go.tmpl +++ b/mmv1/third_party/terraform/services/iam3/resource_iam_projects_policy_binding_test.go @@ -13,14 +13,14 @@ func TestAccIAM3ProjectsPolicyBinding_iamProjectsPolicyBindingExample_update(t * t.Parallel() context := map[string]interface{}{ - "org_id": envvar.GetTestOrgFromEnv(t), - "random_suffix": acctest.RandString(t, 10), + "org_id": envvar.GetTestOrgFromEnv(t), + "random_suffix": acctest.RandString(t, 10), } acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckIAM3ProjectsPolicyBindingDestroyProducer(t), + CheckDestroy: testAccCheckIAM3ProjectsPolicyBindingDestroyProducer(t), ExternalProviders: map[string]resource.ExternalProvider{ "time": {}, }, @@ -52,7 +52,6 @@ func TestAccIAM3ProjectsPolicyBinding_iamProjectsPolicyBindingExample_update(t * ImportStateVerify: true, ImportStateVerifyIgnore: []string{"annotations", "location", "policy_binding_id"}, }, - }, }) } diff --git a/mmv1/third_party/terraform/services/iambeta/data_source_iam_workload_identity_pool_provider_test.go.tmpl b/mmv1/third_party/terraform/services/iambeta/data_source_iam_workload_identity_pool_provider_test.go similarity index 100% rename from mmv1/third_party/terraform/services/iambeta/data_source_iam_workload_identity_pool_provider_test.go.tmpl rename to mmv1/third_party/terraform/services/iambeta/data_source_iam_workload_identity_pool_provider_test.go index 2a5d5d148483..83c712a330cc 100644 --- a/mmv1/third_party/terraform/services/iambeta/data_source_iam_workload_identity_pool_provider_test.go.tmpl +++ b/mmv1/third_party/terraform/services/iambeta/data_source_iam_workload_identity_pool_provider_test.go @@ -1,8 +1,8 @@ package iambeta_test import ( - "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" + "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) diff --git a/mmv1/third_party/terraform/services/iambeta/data_source_iam_workload_identity_pool_test.go.tmpl b/mmv1/third_party/terraform/services/iambeta/data_source_iam_workload_identity_pool_test.go similarity index 100% rename from mmv1/third_party/terraform/services/iambeta/data_source_iam_workload_identity_pool_test.go.tmpl rename to mmv1/third_party/terraform/services/iambeta/data_source_iam_workload_identity_pool_test.go index da0d26fa2c88..0a303d908299 100644 --- a/mmv1/third_party/terraform/services/iambeta/data_source_iam_workload_identity_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/iambeta/data_source_iam_workload_identity_pool_test.go @@ -1,8 +1,8 @@ package iambeta_test import ( - "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" + "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) diff --git a/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_id_test.go.tmpl b/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_id_test.go similarity index 100% rename from mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_id_test.go.tmpl rename to mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_id_test.go diff --git a/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_provider_id_test.go.tmpl b/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_provider_id_test.go similarity index 100% rename from mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_provider_id_test.go.tmpl rename to mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_provider_id_test.go diff --git a/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_provider_test.go.tmpl b/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_provider_test.go similarity index 99% rename from mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_provider_test.go.tmpl rename to mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_provider_test.go index dc07534ed47d..89c530f5caf5 100644 --- a/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_provider_test.go.tmpl +++ b/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_provider_test.go @@ -1,8 +1,8 @@ package iambeta_test import ( - "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" + "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) @@ -259,7 +259,7 @@ func TestAccIAMBetaWorkloadIdentityPoolProvider_x509(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"workload_identity_pool_id", "workload_identity_pool_provider_id"}, }, - { + { Config: testAccIAMBetaWorkloadIdentityPoolProvider_x509_update(context), }, { diff --git a/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_test.go.tmpl b/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_test.go similarity index 100% rename from mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_test.go.tmpl rename to mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_test.go diff --git a/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_oauth_client_credential_test.go.tmpl b/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_oauth_client_credential_test.go similarity index 99% rename from mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_oauth_client_credential_test.go.tmpl rename to mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_oauth_client_credential_test.go index 3d1dacc3ca48..82167e5a58de 100644 --- a/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_oauth_client_credential_test.go.tmpl +++ b/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_oauth_client_credential_test.go @@ -9,7 +9,6 @@ import ( "github.com/hashicorp/terraform-provider-google/google/acctest" ) - func TestAccIAMWorkforcePoolOauthClientCredential_full(t *testing.T) { t.Parallel() @@ -131,4 +130,4 @@ resource "google_iam_oauth_client_credential" "example" { oauth_client_credential_id = "tf-test-cred-id%{random_suffix}" } `, context) -} \ No newline at end of file +} diff --git a/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_oauth_client_test.go.tmpl b/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_oauth_client_test.go similarity index 99% rename from mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_oauth_client_test.go.tmpl rename to mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_oauth_client_test.go index 8e5ed6a79e0f..506fadd3c7ec 100644 --- a/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_oauth_client_test.go.tmpl +++ b/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_oauth_client_test.go @@ -30,7 +30,7 @@ func TestAccIAMWorkforcePoolOauthClient_full(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"location", "oauth_client_id"}, }, - { + { Config: testAccIAMWorkforcePoolOauthClient_full_update(context), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ @@ -106,4 +106,4 @@ resource "google_iam_oauth_client" "example" { client_type = "CONFIDENTIAL_CLIENT" } `, context) -} \ No newline at end of file +} diff --git a/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_test.go.tmpl b/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_test.go similarity index 100% rename from mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_test.go.tmpl rename to mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_test.go index 0a2ba96b08cb..91c6c5211e3d 100644 --- a/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_test.go @@ -2,9 +2,9 @@ package iamworkforcepool_test import ( "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" + "testing" ) func TestAccIAMWorkforcePoolWorkforcePool_full(t *testing.T) { diff --git a/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_workforce_pool_id_test.go.tmpl b/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_workforce_pool_id_test.go similarity index 100% rename from mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_workforce_pool_id_test.go.tmpl rename to mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_workforce_pool_id_test.go diff --git a/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_workforce_pool_provider_id_test.go.tmpl b/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_workforce_pool_provider_id_test.go similarity index 100% rename from mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_workforce_pool_provider_id_test.go.tmpl rename to mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_workforce_pool_provider_id_test.go diff --git a/mmv1/third_party/terraform/services/managedkafka/resource_managed_kafka_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/managedkafka/resource_managed_kafka_cluster_test.go similarity index 100% rename from mmv1/third_party/terraform/services/managedkafka/resource_managed_kafka_cluster_test.go.tmpl rename to mmv1/third_party/terraform/services/managedkafka/resource_managed_kafka_cluster_test.go diff --git a/mmv1/third_party/terraform/services/managedkafka/resource_managed_kafka_topic_test.go.tmpl b/mmv1/third_party/terraform/services/managedkafka/resource_managed_kafka_topic_test.go similarity index 100% rename from mmv1/third_party/terraform/services/managedkafka/resource_managed_kafka_topic_test.go.tmpl rename to mmv1/third_party/terraform/services/managedkafka/resource_managed_kafka_topic_test.go diff --git a/mmv1/third_party/terraform/services/networkmanagement/resource_network_management_vpc_flow_logs_config_test.go.tmpl b/mmv1/third_party/terraform/services/networkmanagement/resource_network_management_vpc_flow_logs_config_test.go similarity index 81% rename from mmv1/third_party/terraform/services/networkmanagement/resource_network_management_vpc_flow_logs_config_test.go.tmpl rename to mmv1/third_party/terraform/services/networkmanagement/resource_network_management_vpc_flow_logs_config_test.go index 04c17b73a924..2c00b5c634a9 100644 --- a/mmv1/third_party/terraform/services/networkmanagement/resource_network_management_vpc_flow_logs_config_test.go.tmpl +++ b/mmv1/third_party/terraform/services/networkmanagement/resource_network_management_vpc_flow_logs_config_test.go @@ -9,41 +9,41 @@ import ( ) func TestAccNetworkManagementVpcFlowLogsConfig_updateInterconnect(t *testing.T) { - t.Parallel() + t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckNetworkManagementVpcFlowLogsConfigDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccNetworkManagementVpcFlowLogsConfig_fullInterconnect(context), - }, - { - ResourceName: "google_network_management_vpc_flow_logs_config.interconnect-test", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"labels", "location", "terraform_labels", "vpc_flow_logs_config_id"}, - }, - { - Config: testAccNetworkManagementVpcFlowLogsConfig_updateInterconnect(context), - }, - { - ResourceName: "google_network_management_vpc_flow_logs_config.interconnect-test", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"labels", "location", "terraform_labels", "vpc_flow_logs_config_id"}, - }, - }, - }) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckNetworkManagementVpcFlowLogsConfigDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccNetworkManagementVpcFlowLogsConfig_fullInterconnect(context), + }, + { + ResourceName: "google_network_management_vpc_flow_logs_config.interconnect-test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "location", "terraform_labels", "vpc_flow_logs_config_id"}, + }, + { + Config: testAccNetworkManagementVpcFlowLogsConfig_updateInterconnect(context), + }, + { + ResourceName: "google_network_management_vpc_flow_logs_config.interconnect-test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "location", "terraform_labels", "vpc_flow_logs_config_id"}, + }, + }, + }) } func testAccNetworkManagementVpcFlowLogsConfig_fullInterconnect(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` data "google_project" "project" { } @@ -77,7 +77,7 @@ resource "google_compute_interconnect_attachment" "attachment" { } func testAccNetworkManagementVpcFlowLogsConfig_updateInterconnect(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` data "google_project" "project" { } @@ -136,7 +136,7 @@ func TestAccNetworkManagementVpcFlowLogsConfig_updateVpn(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"labels", "location", "terraform_labels", "vpc_flow_logs_config_id"}, }, - { + { Config: testAccNetworkManagementVpcFlowLogsConfig_updateVpn(context), }, { @@ -160,7 +160,7 @@ resource "google_network_management_vpc_flow_logs_config" "example" { vpn_tunnel = "projects/${data.google_project.project.number}/regions/us-central1/vpnTunnels/${google_compute_vpn_tunnel.tunnel.name}" } `, context) - return fmt.Sprintf("%s\n\n%s\n\n", vpcFlowLogsCfg, testAccNetworkManagementVpcFlowLogsConfig_baseResources(context)) + return fmt.Sprintf("%s\n\n%s\n\n", vpcFlowLogsCfg, testAccNetworkManagementVpcFlowLogsConfig_baseResources(context)) } func testAccNetworkManagementVpcFlowLogsConfig_updateVpn(context map[string]interface{}) string { @@ -179,7 +179,7 @@ resource "google_network_management_vpc_flow_logs_config" "example" { metadata = "EXCLUDE_ALL_METADATA" } `, context) - return fmt.Sprintf("%s\n\n%s\n\n", vpcFlowLogsCfg, testAccNetworkManagementVpcFlowLogsConfig_baseResources(context)) + return fmt.Sprintf("%s\n\n%s\n\n", vpcFlowLogsCfg, testAccNetworkManagementVpcFlowLogsConfig_baseResources(context)) } func testAccNetworkManagementVpcFlowLogsConfig_baseResources(context map[string]interface{}) string { @@ -242,4 +242,3 @@ resource "google_compute_route" "route" { } `, context) } - diff --git a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_security_profile_group_test.go.tmpl b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_security_profile_group_test.go similarity index 100% rename from mmv1/third_party/terraform/services/networksecurity/resource_network_security_security_profile_group_test.go.tmpl rename to mmv1/third_party/terraform/services/networksecurity/resource_network_security_security_profile_group_test.go diff --git a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_security_profile_test.go.tmpl b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_security_profile_test.go similarity index 100% rename from mmv1/third_party/terraform/services/networksecurity/resource_network_security_security_profile_test.go.tmpl rename to mmv1/third_party/terraform/services/networksecurity/resource_network_security_security_profile_test.go index f0ffccc8d9df..bdf6ae2e66dc 100644 --- a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_security_profile_test.go.tmpl +++ b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_security_profile_test.go @@ -4,8 +4,8 @@ import ( "fmt" "testing" - "github.com/hashicorp/terraform-plugin-testing/plancheck" "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" ) diff --git a/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_test.go.tmpl b/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_test.go similarity index 98% rename from mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_test.go.tmpl rename to mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_test.go index 1a8a2ac91443..819738664e66 100644 --- a/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_test.go @@ -15,7 +15,7 @@ func TestAccNotebooksInstance_create_vm_image(t *testing.T) { prefix := fmt.Sprintf("%d", acctest.RandInt(t)) name := fmt.Sprintf("tf-%s", prefix) - acctest.VcrTest(t, resource.TestCase{ + acctest.VcrTest(t, resource.TestCase{ ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { diff --git a/mmv1/third_party/terraform/services/notebooks/resource_notebooks_runtime_test.go.tmpl b/mmv1/third_party/terraform/services/notebooks/resource_notebooks_runtime_test.go similarity index 99% rename from mmv1/third_party/terraform/services/notebooks/resource_notebooks_runtime_test.go.tmpl rename to mmv1/third_party/terraform/services/notebooks/resource_notebooks_runtime_test.go index d9bc74aace88..de8cff548f05 100644 --- a/mmv1/third_party/terraform/services/notebooks/resource_notebooks_runtime_test.go.tmpl +++ b/mmv1/third_party/terraform/services/notebooks/resource_notebooks_runtime_test.go @@ -3,8 +3,8 @@ package notebooks_test import ( "testing" - "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" ) func TestAccNotebooksRuntime_update(t *testing.T) { @@ -48,7 +48,6 @@ func TestAccNotebooksRuntime_update(t *testing.T) { }) } - func testAccNotebooksRuntime_basic(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_notebooks_runtime" "runtime" { diff --git a/mmv1/third_party/terraform/services/orgpolicy/resource_org_policy_custom_constraint_test.go.tmpl b/mmv1/third_party/terraform/services/orgpolicy/resource_org_policy_custom_constraint_test.go similarity index 99% rename from mmv1/third_party/terraform/services/orgpolicy/resource_org_policy_custom_constraint_test.go.tmpl rename to mmv1/third_party/terraform/services/orgpolicy/resource_org_policy_custom_constraint_test.go index fc13e6adcc10..87c14932c638 100644 --- a/mmv1/third_party/terraform/services/orgpolicy/resource_org_policy_custom_constraint_test.go.tmpl +++ b/mmv1/third_party/terraform/services/orgpolicy/resource_org_policy_custom_constraint_test.go @@ -1,9 +1,9 @@ package orgpolicy_test import ( - "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" + "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) @@ -74,4 +74,3 @@ resource "google_org_policy_custom_constraint" "constraint" { } `, context) } - diff --git a/mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_for_folder_test.go.tmpl b/mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_for_folder_test.go similarity index 100% rename from mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_for_folder_test.go.tmpl rename to mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_for_folder_test.go diff --git a/mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_for_organization_test.go.tmpl b/mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_for_organization_test.go similarity index 96% rename from mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_for_organization_test.go.tmpl rename to mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_for_organization_test.go index d247ec7ea7cd..e34839ff36e1 100644 --- a/mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_for_organization_test.go.tmpl +++ b/mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_for_organization_test.go @@ -24,11 +24,10 @@ func TestAccOSConfigV2PolicyOrchestratorForOrganization_basic(t *testing.T) { }, }) - context := map[string]interface{}{ - "org_id": envvar.GetTestOrgTargetFromEnv(t), - "zone": envvar.GetTestZoneFromEnv(), - "random_suffix": acctest.RandString(t, 10), + "org_id": envvar.GetTestOrgTargetFromEnv(t), + "zone": envvar.GetTestZoneFromEnv(), + "random_suffix": acctest.RandString(t, 10), } acctest.VcrTest(t, resource.TestCase{ diff --git a/mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_test.go.tmpl b/mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_test.go similarity index 99% rename from mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_test.go.tmpl rename to mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_test.go index a51797992db9..4aed5eeac4ef 100644 --- a/mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_test.go.tmpl +++ b/mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_test.go @@ -32,7 +32,6 @@ func TestAccOSConfigV2PolicyOrchestrator_basic(t *testing.T) { "random_suffix": acctest.RandString(t, 10), } - acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), diff --git a/mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameter_test.go.tmpl b/mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameter_test.go similarity index 100% rename from mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameter_test.go.tmpl rename to mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameter_test.go diff --git a/mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameter_version_render_test.go.tmpl b/mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameter_version_render_test.go similarity index 100% rename from mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameter_version_render_test.go.tmpl rename to mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameter_version_render_test.go diff --git a/mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameter_version_test.go.tmpl b/mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameter_version_test.go similarity index 98% rename from mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameter_version_test.go.tmpl rename to mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameter_version_test.go index dcd4666ea561..581fb8758070 100644 --- a/mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameter_version_test.go.tmpl +++ b/mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameter_version_test.go @@ -198,7 +198,7 @@ func TestAccDataSourceParameterManagerParameterVersion_withKmsKey(t *testing.T) }) context := map[string]interface{}{ - "kms_key": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "global", "tf-parameter-manager-managed-1").CryptoKey.Name, + "kms_key": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "global", "tf-parameter-manager-managed-1").CryptoKey.Name, "random_suffix": acctest.RandString(t, 10), } diff --git a/mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameters_test.go.tmpl b/mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameters_test.go similarity index 100% rename from mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameters_test.go.tmpl rename to mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameters_test.go diff --git a/mmv1/third_party/terraform/services/parametermanager/resource_parameter_manager_parameter_test.go.tmpl b/mmv1/third_party/terraform/services/parametermanager/resource_parameter_manager_parameter_test.go similarity index 95% rename from mmv1/third_party/terraform/services/parametermanager/resource_parameter_manager_parameter_test.go.tmpl rename to mmv1/third_party/terraform/services/parametermanager/resource_parameter_manager_parameter_test.go index d7104eba2062..627cf6d03178 100644 --- a/mmv1/third_party/terraform/services/parametermanager/resource_parameter_manager_parameter_test.go.tmpl +++ b/mmv1/third_party/terraform/services/parametermanager/resource_parameter_manager_parameter_test.go @@ -3,8 +3,8 @@ package parametermanager_test import ( "testing" - "github.com/hashicorp/terraform-plugin-testing/plancheck" "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" "github.com/hashicorp/terraform-provider-google/google/acctest" ) @@ -113,8 +113,8 @@ func TestAccParameterManagerParameter_kmsKeyUpdate(t *testing.T) { }) context := map[string]interface{}{ - "kms_key": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "global", "tf-parameter-manager-managed-1").CryptoKey.Name, - "kms_key_other": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "global", "tf-parameter-manager-managed-2").CryptoKey.Name, + "kms_key": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "global", "tf-parameter-manager-managed-1").CryptoKey.Name, + "kms_key_other": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "global", "tf-parameter-manager-managed-2").CryptoKey.Name, "random_suffix": acctest.RandString(t, 10), } diff --git a/mmv1/third_party/terraform/services/parametermanager/resource_parameter_manager_parameter_version_test.go.tmpl b/mmv1/third_party/terraform/services/parametermanager/resource_parameter_manager_parameter_version_test.go similarity index 100% rename from mmv1/third_party/terraform/services/parametermanager/resource_parameter_manager_parameter_version_test.go.tmpl rename to mmv1/third_party/terraform/services/parametermanager/resource_parameter_manager_parameter_version_test.go diff --git a/mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameter_test.go.tmpl b/mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameter_test.go similarity index 100% rename from mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameter_test.go.tmpl rename to mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameter_test.go diff --git a/mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameter_version_render_test.go.tmpl b/mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameter_version_render_test.go similarity index 100% rename from mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameter_version_render_test.go.tmpl rename to mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameter_version_render_test.go diff --git a/mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameter_version_test.go.tmpl b/mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameter_version_test.go similarity index 98% rename from mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameter_version_test.go.tmpl rename to mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameter_version_test.go index 9e9083e19dbe..28ba96aa4166 100644 --- a/mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameter_version_test.go.tmpl +++ b/mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameter_version_test.go @@ -204,7 +204,7 @@ func TestAccDataSourceParameterManagerRegionalRegionalParameterVersion_withKmsKe }) context := map[string]interface{}{ - "kms_key": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-parameter-manager-managed-central-key1").CryptoKey.Name, + "kms_key": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-parameter-manager-managed-central-key1").CryptoKey.Name, "random_suffix": acctest.RandString(t, 10), } diff --git a/mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameters_test.go.tmpl b/mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameters_test.go similarity index 100% rename from mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameters_test.go.tmpl rename to mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameters_test.go diff --git a/mmv1/third_party/terraform/services/parametermanagerregional/resource_parameter_manager_regional_parameter_test.go.tmpl b/mmv1/third_party/terraform/services/parametermanagerregional/resource_parameter_manager_regional_parameter_test.go similarity index 96% rename from mmv1/third_party/terraform/services/parametermanagerregional/resource_parameter_manager_regional_parameter_test.go.tmpl rename to mmv1/third_party/terraform/services/parametermanagerregional/resource_parameter_manager_regional_parameter_test.go index 046c0ea269ae..b49304df14ef 100644 --- a/mmv1/third_party/terraform/services/parametermanagerregional/resource_parameter_manager_regional_parameter_test.go.tmpl +++ b/mmv1/third_party/terraform/services/parametermanagerregional/resource_parameter_manager_regional_parameter_test.go @@ -3,8 +3,8 @@ package parametermanagerregional_test import ( "testing" - "github.com/hashicorp/terraform-plugin-testing/plancheck" "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" "github.com/hashicorp/terraform-provider-google/google/acctest" ) @@ -160,8 +160,8 @@ func TestAccParameterManagerRegionalRegionalParameter_kmskeyUpdate(t *testing.T) }) context := map[string]interface{}{ - "kms_key": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-parameter-manager-managed-central-key1").CryptoKey.Name, - "kms_key_other": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-parameter-manager-managed-central-key2").CryptoKey.Name, + "kms_key": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-parameter-manager-managed-central-key1").CryptoKey.Name, + "kms_key_other": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-parameter-manager-managed-central-key2").CryptoKey.Name, "random_suffix": acctest.RandString(t, 10), } diff --git a/mmv1/third_party/terraform/services/parametermanagerregional/resource_parameter_manager_regional_parameter_version_test.go.tmpl b/mmv1/third_party/terraform/services/parametermanagerregional/resource_parameter_manager_regional_parameter_version_test.go similarity index 100% rename from mmv1/third_party/terraform/services/parametermanagerregional/resource_parameter_manager_regional_parameter_version_test.go.tmpl rename to mmv1/third_party/terraform/services/parametermanagerregional/resource_parameter_manager_regional_parameter_version_test.go diff --git a/mmv1/third_party/terraform/services/privilegedaccessmanager/resource_privileged_access_manager_entitlement_test.go.tmpl b/mmv1/third_party/terraform/services/privilegedaccessmanager/resource_privileged_access_manager_entitlement_test.go similarity index 98% rename from mmv1/third_party/terraform/services/privilegedaccessmanager/resource_privileged_access_manager_entitlement_test.go.tmpl rename to mmv1/third_party/terraform/services/privilegedaccessmanager/resource_privileged_access_manager_entitlement_test.go index 55fa1c1a6ff3..fc1b2d8d4cd3 100644 --- a/mmv1/third_party/terraform/services/privilegedaccessmanager/resource_privileged_access_manager_entitlement_test.go.tmpl +++ b/mmv1/third_party/terraform/services/privilegedaccessmanager/resource_privileged_access_manager_entitlement_test.go @@ -6,7 +6,7 @@ import ( "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-provider-google/google/envvar" ) func TestAccPrivilegedAccessManagerEntitlement_privilegedAccessManagerEntitlementProjectExample_update(t *testing.T) { diff --git a/mmv1/third_party/terraform/services/redis/resource_redis_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/redis/resource_redis_cluster_test.go similarity index 99% rename from mmv1/third_party/terraform/services/redis/resource_redis_cluster_test.go.tmpl rename to mmv1/third_party/terraform/services/redis/resource_redis_cluster_test.go index f67faddd73f3..2c7ffdf39f29 100644 --- a/mmv1/third_party/terraform/services/redis/resource_redis_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/redis/resource_redis_cluster_test.go @@ -3,17 +3,17 @@ package redis_test import ( "fmt" "log" + "strings" "testing" "time" - "strings" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/terraform" - "github.com/hashicorp/terraform-provider-google/google/services/redis" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/services/redis" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) + func TestAccRedisCluster_createUpdateClusterWithNodeType(t *testing.T) { t.Parallel() @@ -446,7 +446,7 @@ func testAccCheckRedisClusterOnDemandBackup(t *testing.T, resourceName string, b if err != nil { return fmt.Errorf("Error creating on-demand backup for Redis cluster %s: %s", name, err) } - + // Wait for the operation to complete err = redis.RedisOperationWaitTime( config, res, project, "Creating Redis Cluster Backup", config.UserAgent, diff --git a/mmv1/third_party/terraform/services/resourcemanager/data_source_google_iam_policy.go.tmpl b/mmv1/third_party/terraform/services/resourcemanager/data_source_google_iam_policy.go similarity index 97% rename from mmv1/third_party/terraform/services/resourcemanager/data_source_google_iam_policy.go.tmpl rename to mmv1/third_party/terraform/services/resourcemanager/data_source_google_iam_policy.go index ba67ce30dbf5..baf8cebe945b 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/data_source_google_iam_policy.go.tmpl +++ b/mmv1/third_party/terraform/services/resourcemanager/data_source_google_iam_policy.go @@ -18,14 +18,14 @@ import ( // to express a Google Cloud IAM policy in a data resource. This is an example // of how the schema would be used in a config: // -// data "google_iam_policy" "admin" { -// binding { -// role = "roles/storage.objectViewer" -// members = [ -// "user:evanbrown@google.com", -// ] -// } -// } +// data "google_iam_policy" "admin" { +// binding { +// role = "roles/storage.objectViewer" +// members = [ +// "user:evanbrown@google.com", +// ] +// } +// } func DataSourceGoogleIamPolicy() *schema.Resource { return &schema.Resource{ Read: dataSourceGoogleIamPolicyRead, diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_binding_test.go.tmpl b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_binding_test.go similarity index 99% rename from mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_binding_test.go.tmpl rename to mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_binding_test.go index 85711b8f8989..31b935ca707a 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_binding_test.go.tmpl +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_binding_test.go @@ -2,9 +2,9 @@ package resourcemanager_test import ( "fmt" - "regexp" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" + "regexp" "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" @@ -266,7 +266,7 @@ func TestAccProjectIamBinding_invalidMembers(t *testing.T) { ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { - Config: testAccProjectAssociateBindingBasic(pid, org, role, "admin@hashicorptest.com"), + Config: testAccProjectAssociateBindingBasic(pid, org, role, "admin@hashicorptest.com"), ExpectError: regexp.MustCompile("invalid value \"admin@hashicorptest.com\" for members\\.0 \\(IAM members must have one of the values outlined here: https://cloud.google.com/billing/docs/reference/rest/v1/Policy#Binding\\)"), }, { diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_member_test.go.tmpl b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_member_test.go similarity index 98% rename from mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_member_test.go.tmpl rename to mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_member_test.go index 248f9ab19bd3..443f1f6febc6 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_member_test.go.tmpl +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_member_test.go @@ -2,9 +2,9 @@ package resourcemanager_test import ( "fmt" - "regexp" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" + "regexp" "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" @@ -182,7 +182,7 @@ func TestAccProjectIamMember_invalidMembers(t *testing.T) { ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { - Config: testAccProjectAssociateMemberBasic(pid, org, role, "admin@hashicorptest.com"), + Config: testAccProjectAssociateMemberBasic(pid, org, role, "admin@hashicorptest.com"), ExpectError: regexp.MustCompile("invalid value \"admin@hashicorptest.com\" for member \\(IAM members must have one of the values outlined here: https://cloud.google.com/billing/docs/reference/rest/v1/Policy#Binding\\)"), }, { diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_policy_test.go.tmpl b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_policy_test.go similarity index 97% rename from mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_policy_test.go.tmpl rename to mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_policy_test.go index 89593089fb69..fadec307ccd0 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_policy_test.go.tmpl +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_policy_test.go @@ -55,7 +55,7 @@ func TestAccProjectIamPolicy_emptyMembers(t *testing.T) { org := envvar.GetTestOrgFromEnv(t) pid := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { @@ -72,7 +72,7 @@ func TestAccProjectIamPolicy_expanded(t *testing.T) { org := envvar.GetTestOrgFromEnv(t) pid := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { @@ -92,7 +92,7 @@ func TestAccProjectIamPolicy_basicAuditConfig(t *testing.T) { org := envvar.GetTestOrgFromEnv(t) pid := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ // Create a new project @@ -122,7 +122,7 @@ func TestAccProjectIamPolicy_expandedAuditConfig(t *testing.T) { org := envvar.GetTestOrgFromEnv(t) pid := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { @@ -141,7 +141,7 @@ func TestAccProjectIamPolicy_withCondition(t *testing.T) { org := envvar.GetTestOrgFromEnv(t) pid := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ // Create a new project @@ -176,7 +176,7 @@ func TestAccProjectIamPolicy_invalidMembers(t *testing.T) { ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { - Config: testAccProjectAssociatePolicyBasic(pid, org, "admin@hashicorptest.com"), + Config: testAccProjectAssociatePolicyBasic(pid, org, "admin@hashicorptest.com"), ExpectError: regexp.MustCompile("invalid value \"admin@hashicorptest.com\" for bindings\\.1\\.members\\.0 \\(IAM members must have one of the values outlined here: https://cloud.google.com/billing/docs/reference/rest/v1/Policy#Binding\\)"), }, { diff --git a/mmv1/third_party/terraform/services/secretmanager/iam_secret_manager_secret_test.go.tmpl b/mmv1/third_party/terraform/services/secretmanager/iam_secret_manager_secret_test.go similarity index 98% rename from mmv1/third_party/terraform/services/secretmanager/iam_secret_manager_secret_test.go.tmpl rename to mmv1/third_party/terraform/services/secretmanager/iam_secret_manager_secret_test.go index 6e291254097d..0eb78e092355 100644 --- a/mmv1/third_party/terraform/services/secretmanager/iam_secret_manager_secret_test.go.tmpl +++ b/mmv1/third_party/terraform/services/secretmanager/iam_secret_manager_secret_test.go @@ -15,7 +15,7 @@ func TestAccSecretManagerSecretIam_iamMemberConditionUpdate(t *testing.T) { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "role": "roles/secretmanager.secretAccessor", + "role": "roles/secretmanager.secretAccessor", } acctest.VcrTest(t, resource.TestCase{ diff --git a/mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_test.go.tmpl b/mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_test.go similarity index 96% rename from mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_test.go.tmpl rename to mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_test.go index 8e48e3a9690c..81ce574fe53a 100644 --- a/mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_test.go.tmpl +++ b/mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_test.go @@ -25,9 +25,9 @@ func TestAccSecretManagerSecret_import(t *testing.T) { Config: testAccSecretManagerSecret_basic(context), }, { - ResourceName: "google_secret_manager_secret.secret-basic", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, }, }, @@ -54,9 +54,9 @@ func TestAccSecretManagerSecret_cmek(t *testing.T) { Config: testAccSecretMangerSecret_cmek(context1), }, { - ResourceName: "google_secret_manager_secret.secret-basic", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, }, }, @@ -79,27 +79,27 @@ func TestAccSecretManagerSecret_annotationsUpdate(t *testing.T) { Config: testAccSecretManagerSecret_annotationsBasic(context), }, { - ResourceName: "google_secret_manager_secret.secret-with-annotations", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_secret.secret-with-annotations", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels", "annotations"}, }, { Config: testAccSecretManagerSecret_annotationsUpdate(context), }, { - ResourceName: "google_secret_manager_secret.secret-with-annotations", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_secret.secret-with-annotations", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels", "annotations"}, }, { Config: testAccSecretManagerSecret_annotationsBasic(context), }, { - ResourceName: "google_secret_manager_secret.secret-with-annotations", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_secret.secret-with-annotations", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels", "annotations"}, }, }, @@ -122,36 +122,36 @@ func TestAccSecretManagerSecret_versionAliasesUpdate(t *testing.T) { Config: testAccSecretManagerSecret_basicWithSecretVersions(context), }, { - ResourceName: "google_secret_manager_secret.secret-basic", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, }, { Config: testAccSecretManagerSecret_versionAliasesBasic(context), }, { - ResourceName: "google_secret_manager_secret.secret-basic", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, }, { Config: testAccSecretManagerSecret_versionAliasesUpdate(context), }, { - ResourceName: "google_secret_manager_secret.secret-basic", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, }, { Config: testAccSecretManagerSecret_basicWithSecretVersions(context), }, { - ResourceName: "google_secret_manager_secret.secret-basic", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, }, }, diff --git a/mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_version_test.go.tmpl b/mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_version_test.go similarity index 94% rename from mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_version_test.go.tmpl rename to mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_version_test.go index a955b4f6d748..761ccde659bb 100644 --- a/mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_version_test.go.tmpl +++ b/mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_version_test.go @@ -3,8 +3,8 @@ package secretmanager_test import ( "testing" - "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" ) func TestAccSecretManagerSecretVersion_update(t *testing.T) { @@ -23,18 +23,18 @@ func TestAccSecretManagerSecretVersion_update(t *testing.T) { Config: testAccSecretManagerSecretVersion_basic(context), }, { - ResourceName: "google_secret_manager_secret_version.secret-version-basic", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_secret_version.secret-version-basic", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"secret_data", "secret_data_wo_version"}, }, { Config: testAccSecretManagerSecretVersion_disable(context), }, { - ResourceName: "google_secret_manager_secret_version.secret-version-basic", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_secret_version.secret-version-basic", + ImportState: true, + ImportStateVerify: true, // at this point the secret data is disabled and so reading the data on import will // give an empty string ImportStateVerifyIgnore: []string{"secret_data", "secret_data_wo_version"}, @@ -43,9 +43,9 @@ func TestAccSecretManagerSecretVersion_update(t *testing.T) { Config: testAccSecretManagerSecretVersion_basic(context), }, { - ResourceName: "google_secret_manager_secret_version.secret-version-basic", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_secret_version.secret-version-basic", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"secret_data", "secret_data_wo_version"}, }, }, diff --git a/mmv1/third_party/terraform/services/secretmanagerregional/iam_secret_manager_regional_secret_test.go.tmpl b/mmv1/third_party/terraform/services/secretmanagerregional/iam_secret_manager_regional_secret_test.go similarity index 100% rename from mmv1/third_party/terraform/services/secretmanagerregional/iam_secret_manager_regional_secret_test.go.tmpl rename to mmv1/third_party/terraform/services/secretmanagerregional/iam_secret_manager_regional_secret_test.go diff --git a/mmv1/third_party/terraform/services/secretmanagerregional/resource_secret_manager_regional_secret_test.go.tmpl b/mmv1/third_party/terraform/services/secretmanagerregional/resource_secret_manager_regional_secret_test.go similarity index 95% rename from mmv1/third_party/terraform/services/secretmanagerregional/resource_secret_manager_regional_secret_test.go.tmpl rename to mmv1/third_party/terraform/services/secretmanagerregional/resource_secret_manager_regional_secret_test.go index 0324798304b8..062f005b581d 100644 --- a/mmv1/third_party/terraform/services/secretmanagerregional/resource_secret_manager_regional_secret_test.go.tmpl +++ b/mmv1/third_party/terraform/services/secretmanagerregional/resource_secret_manager_regional_secret_test.go @@ -24,9 +24,9 @@ func TestAccSecretManagerRegionalRegionalSecret_import(t *testing.T) { Config: testAccSecretManagerRegionalSecret_basic(context), }, { - ResourceName: "google_secret_manager_regional_secret.regional-secret-basic", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_regional_secret.regional-secret-basic", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "secret_id", "terraform_labels"}, }, }, @@ -49,36 +49,36 @@ func TestAccSecretManagerRegionalRegionalSecret_labelsUpdate(t *testing.T) { Config: testAccSecretManagerRegionalSecret_withoutLabels(context), }, { - ResourceName: "google_secret_manager_regional_secret.regional-secret-with-labels", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_regional_secret.regional-secret-with-labels", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "secret_id", "terraform_labels"}, }, { Config: testAccSecretManagerRegionalSecret_labelsUpdate(context), }, { - ResourceName: "google_secret_manager_regional_secret.regional-secret-with-labels", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_regional_secret.regional-secret-with-labels", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "secret_id", "terraform_labels"}, }, { Config: testAccSecretManagerRegionalSecret_labelsUpdateOther(context), }, { - ResourceName: "google_secret_manager_regional_secret.regional-secret-with-labels", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_regional_secret.regional-secret-with-labels", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "secret_id", "terraform_labels"}, }, { Config: testAccSecretManagerRegionalSecret_withoutLabels(context), }, { - ResourceName: "google_secret_manager_regional_secret.regional-secret-with-labels", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_regional_secret.regional-secret-with-labels", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "secret_id", "terraform_labels"}, }, }, @@ -101,36 +101,36 @@ func TestAccSecretManagerRegionalRegionalSecret_annotationsUpdate(t *testing.T) Config: testAccSecretManagerRegionalSecret_withoutAnnotations(context), }, { - ResourceName: "google_secret_manager_regional_secret.regional-secret-with-annotations", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_regional_secret.regional-secret-with-annotations", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "secret_id", "terraform_labels"}, }, { Config: testAccSecretManagerRegionalSecret_annotationsUpdate(context), }, { - ResourceName: "google_secret_manager_regional_secret.regional-secret-with-annotations", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_regional_secret.regional-secret-with-annotations", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "secret_id", "terraform_labels"}, }, { Config: testAccSecretManagerRegionalSecret_annotationsUpdateOther(context), }, { - ResourceName: "google_secret_manager_regional_secret.regional-secret-with-annotations", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_regional_secret.regional-secret-with-annotations", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "secret_id", "terraform_labels"}, }, { Config: testAccSecretManagerRegionalSecret_withoutAnnotations(context), }, { - ResourceName: "google_secret_manager_regional_secret.regional-secret-with-annotations", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_regional_secret.regional-secret-with-annotations", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "secret_id", "terraform_labels"}, }, }, @@ -141,9 +141,9 @@ func TestAccSecretManagerRegionalRegionalSecret_cmekUpdate(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "kms_key_name": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-secret-manager-managed-central-key3").CryptoKey.Name, - "kms_key_name_other": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-secret-manager-managed-central-key4").CryptoKey.Name, - "random_suffix": acctest.RandString(t, 10), + "kms_key_name": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-secret-manager-managed-central-key3").CryptoKey.Name, + "kms_key_name_other": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-secret-manager-managed-central-key4").CryptoKey.Name, + "random_suffix": acctest.RandString(t, 10), } acctest.VcrTest(t, resource.TestCase{ diff --git a/mmv1/third_party/terraform/services/secretmanagerregional/resource_secret_manager_regional_secret_version_test.go.tmpl b/mmv1/third_party/terraform/services/secretmanagerregional/resource_secret_manager_regional_secret_version_test.go similarity index 100% rename from mmv1/third_party/terraform/services/secretmanagerregional/resource_secret_manager_regional_secret_version_test.go.tmpl rename to mmv1/third_party/terraform/services/secretmanagerregional/resource_secret_manager_regional_secret_version_test.go diff --git a/mmv1/third_party/terraform/services/spanner/resource_spanner_database_test.go.tmpl b/mmv1/third_party/terraform/services/spanner/resource_spanner_database_test.go similarity index 99% rename from mmv1/third_party/terraform/services/spanner/resource_spanner_database_test.go.tmpl rename to mmv1/third_party/terraform/services/spanner/resource_spanner_database_test.go index ffd8cb5bed98..e3ee32694a0c 100644 --- a/mmv1/third_party/terraform/services/spanner/resource_spanner_database_test.go.tmpl +++ b/mmv1/third_party/terraform/services/spanner/resource_spanner_database_test.go @@ -625,7 +625,7 @@ func TestAccSpannerDatabase_cmek(t *testing.T) { acctest.BootstrapIamMembers(t, []acctest.IamMember{ { Member: "serviceAccount:service-{project_number}@gcp-sa-spanner.iam.gserviceaccount.com", - Role: "roles/cloudkms.cryptoKeyEncrypterDecrypter", + Role: "roles/cloudkms.cryptoKeyEncrypterDecrypter", }, }) diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go.tmpl b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go similarity index 96% rename from mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go.tmpl rename to mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go index 7176ab4ad65b..4760f072487f 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go.tmpl +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go @@ -140,7 +140,7 @@ func TestAccStorageBucket_AutoclassDiffSuppress(t *testing.T) { ImportStateVerifyIgnore: []string{"force_destroy"}, }, { - Config: testAccStorageBucket_basicWithAutoclass(bucketName,false), + Config: testAccStorageBucket_basicWithAutoclass(bucketName, false), Check: resource.ComposeTestCheckFunc( testAccCheckStorageBucketExists( t, "google_storage_bucket.bucket", bucketName, &bucket), @@ -153,7 +153,7 @@ func TestAccStorageBucket_AutoclassDiffSuppress(t *testing.T) { ImportStateVerifyIgnore: []string{"force_destroy"}, }, { - Config: testAccStorageBucket_basicWithAutoclass(bucketName,true), + Config: testAccStorageBucket_basicWithAutoclass(bucketName, true), Check: resource.ComposeTestCheckFunc( testAccCheckStorageBucketExists( t, "google_storage_bucket.bucket", bucketName, &bucket), @@ -353,7 +353,7 @@ func TestAccStorageBucket_dualLocation_rpo(t *testing.T) { ImportStateVerifyIgnore: []string{"force_destroy"}, }, { - Config: testAccStorageBucket_dualLocation_rpo(bucketName,"ASYNC_TURBO"), + Config: testAccStorageBucket_dualLocation_rpo(bucketName, "ASYNC_TURBO"), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr( "google_storage_bucket.bucket", "rpo", "ASYNC_TURBO"), @@ -366,7 +366,7 @@ func TestAccStorageBucket_dualLocation_rpo(t *testing.T) { ImportStateVerifyIgnore: []string{"force_destroy"}, }, { - Config: testAccStorageBucket_dualLocation_rpo(bucketName,"DEFAULT"), + Config: testAccStorageBucket_dualLocation_rpo(bucketName, "DEFAULT"), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr( "google_storage_bucket.bucket", "rpo", "DEFAULT"), @@ -427,7 +427,7 @@ func TestAccStorageBucket_lifecycleRulesMultiple(t *testing.T) { ResourceName: "google_storage_bucket.bucket", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"force_destroy","lifecycle_rule.0.condition.0.send_age_if_zero","lifecycle_rule.1.condition.0.send_age_if_zero","lifecycle_rule.2.condition.0.send_age_if_zero","lifecycle_rule.3.condition.0.send_age_if_zero","lifecycle_rule.4.condition.0.send_age_if_zero","lifecycle_rule.5.condition.0.send_age_if_zero","lifecycle_rule.6.condition.0.send_age_if_zero","lifecycle_rule.7.condition.0.send_age_if_zero","lifecycle_rule.8.condition.0.send_age_if_zero","lifecycle_rule.9.condition.0.send_age_if_zero"}, + ImportStateVerifyIgnore: []string{"force_destroy", "lifecycle_rule.0.condition.0.send_age_if_zero", "lifecycle_rule.1.condition.0.send_age_if_zero", "lifecycle_rule.2.condition.0.send_age_if_zero", "lifecycle_rule.3.condition.0.send_age_if_zero", "lifecycle_rule.4.condition.0.send_age_if_zero", "lifecycle_rule.5.condition.0.send_age_if_zero", "lifecycle_rule.6.condition.0.send_age_if_zero", "lifecycle_rule.7.condition.0.send_age_if_zero", "lifecycle_rule.8.condition.0.send_age_if_zero", "lifecycle_rule.9.condition.0.send_age_if_zero"}, }, { Config: testAccStorageBucket_lifecycleRulesMultiple_update(bucketName), @@ -436,7 +436,7 @@ func TestAccStorageBucket_lifecycleRulesMultiple(t *testing.T) { ResourceName: "google_storage_bucket.bucket", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"force_destroy","lifecycle_rule.0.condition.0.send_age_if_zero","lifecycle_rule.1.condition.0.send_age_if_zero","lifecycle_rule.2.condition.0.send_age_if_zero","lifecycle_rule.3.condition.0.send_age_if_zero","lifecycle_rule.4.condition.0.send_age_if_zero","lifecycle_rule.5.condition.0.send_age_if_zero","lifecycle_rule.6.condition.0.send_age_if_zero","lifecycle_rule.7.condition.0.send_age_if_zero","lifecycle_rule.8.condition.0.send_age_if_zero","lifecycle_rule.9.condition.0.send_age_if_zero"}, + ImportStateVerifyIgnore: []string{"force_destroy", "lifecycle_rule.0.condition.0.send_age_if_zero", "lifecycle_rule.1.condition.0.send_age_if_zero", "lifecycle_rule.2.condition.0.send_age_if_zero", "lifecycle_rule.3.condition.0.send_age_if_zero", "lifecycle_rule.4.condition.0.send_age_if_zero", "lifecycle_rule.5.condition.0.send_age_if_zero", "lifecycle_rule.6.condition.0.send_age_if_zero", "lifecycle_rule.7.condition.0.send_age_if_zero", "lifecycle_rule.8.condition.0.send_age_if_zero", "lifecycle_rule.9.condition.0.send_age_if_zero"}, }, }, }) @@ -465,7 +465,7 @@ func TestAccStorageBucket_lifecycleRuleStateLive(t *testing.T) { ResourceName: "google_storage_bucket.bucket", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"force_destroy","lifecycle_rule.0.condition.0.send_age_if_zero","lifecycle_rule.1.condition.0.send_age_if_zero"}, + ImportStateVerifyIgnore: []string{"force_destroy", "lifecycle_rule.0.condition.0.send_age_if_zero", "lifecycle_rule.1.condition.0.send_age_if_zero"}, }, }, }) @@ -494,7 +494,7 @@ func TestAccStorageBucket_lifecycleRuleStateArchived(t *testing.T) { ResourceName: "google_storage_bucket.bucket", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"force_destroy","lifecycle_rule.0.condition.0.send_age_if_zero"}, + ImportStateVerifyIgnore: []string{"force_destroy", "lifecycle_rule.0.condition.0.send_age_if_zero"}, }, { Config: testAccStorageBucket_lifecycleRule_withStateArchived(bucketName), @@ -508,7 +508,7 @@ func TestAccStorageBucket_lifecycleRuleStateArchived(t *testing.T) { ResourceName: "google_storage_bucket.bucket", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"force_destroy","lifecycle_rule.0.condition.0.send_age_if_zero"}, + ImportStateVerifyIgnore: []string{"force_destroy", "lifecycle_rule.0.condition.0.send_age_if_zero"}, }, }, }) @@ -537,7 +537,7 @@ func TestAccStorageBucket_lifecycleRuleStateAny(t *testing.T) { ResourceName: "google_storage_bucket.bucket", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"force_destroy","lifecycle_rule.0.condition.0.send_age_if_zero"}, + ImportStateVerifyIgnore: []string{"force_destroy", "lifecycle_rule.0.condition.0.send_age_if_zero"}, }, { Config: testAccStorageBucket_lifecycleRule_withStateLive(bucketName), @@ -551,7 +551,7 @@ func TestAccStorageBucket_lifecycleRuleStateAny(t *testing.T) { ResourceName: "google_storage_bucket.bucket", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"force_destroy","lifecycle_rule.0.condition.0.send_age_if_zero","lifecycle_rule.1.condition.0.send_age_if_zero"}, + ImportStateVerifyIgnore: []string{"force_destroy", "lifecycle_rule.0.condition.0.send_age_if_zero", "lifecycle_rule.1.condition.0.send_age_if_zero"}, }, { Config: testAccStorageBucket_lifecycleRule_withStateAny(bucketName), @@ -565,7 +565,7 @@ func TestAccStorageBucket_lifecycleRuleStateAny(t *testing.T) { ResourceName: "google_storage_bucket.bucket", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"force_destroy","lifecycle_rule.0.condition.0.send_age_if_zero"}, + ImportStateVerifyIgnore: []string{"force_destroy", "lifecycle_rule.0.condition.0.send_age_if_zero"}, }, { Config: testAccStorageBucket_lifecycleRule_withStateArchived(bucketName), @@ -579,7 +579,7 @@ func TestAccStorageBucket_lifecycleRuleStateAny(t *testing.T) { ResourceName: "google_storage_bucket.bucket", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"force_destroy","lifecycle_rule.0.condition.0.send_age_if_zero"}, + ImportStateVerifyIgnore: []string{"force_destroy", "lifecycle_rule.0.condition.0.send_age_if_zero"}, }, }, }) @@ -798,7 +798,7 @@ func TestAccStorageBucket_update(t *testing.T) { ResourceName: "google_storage_bucket.bucket", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"force_destroy","lifecycle_rule.0.condition.0.send_age_if_zero"}, + ImportStateVerifyIgnore: []string{"force_destroy", "lifecycle_rule.0.condition.0.send_age_if_zero"}, }, { Config: testAccStorageBucket_customAttributes_withLifecycle2(bucketName), @@ -814,7 +814,7 @@ func TestAccStorageBucket_update(t *testing.T) { ResourceName: "google_storage_bucket.bucket", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"force_destroy","lifecycle_rule.0.condition.0.send_age_if_zero","lifecycle_rule.1.condition.0.send_age_if_zero"}, + ImportStateVerifyIgnore: []string{"force_destroy", "lifecycle_rule.0.condition.0.send_age_if_zero", "lifecycle_rule.1.condition.0.send_age_if_zero"}, }, { Config: testAccStorageBucket_customAttributes_withLifecycle1Update(bucketName), @@ -830,7 +830,7 @@ func TestAccStorageBucket_update(t *testing.T) { ResourceName: "google_storage_bucket.bucket", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"force_destroy","lifecycle_rule.0.condition.0.send_age_if_zero"}, + ImportStateVerifyIgnore: []string{"force_destroy", "lifecycle_rule.0.condition.0.send_age_if_zero"}, }, { Config: testAccStorageBucket_customAttributes(bucketName), @@ -1462,7 +1462,7 @@ func TestAccStorageBucket_SoftDeletePolicy(t *testing.T) { ImportStateVerifyIgnore: []string{"force_destroy"}, }, { - Config: testAccStorageBucket_SoftDeletePolicy(bucketName,7776000), + Config: testAccStorageBucket_SoftDeletePolicy(bucketName, 7776000), Check: resource.ComposeTestCheckFunc( testAccCheckStorageBucketExists( t, "google_storage_bucket.bucket", bucketName, &bucket), @@ -1477,7 +1477,7 @@ func TestAccStorageBucket_SoftDeletePolicy(t *testing.T) { ImportStateVerifyIgnore: []string{"force_destroy"}, }, { - Config: testAccStorageBucket_SoftDeletePolicy(bucketName,0), + Config: testAccStorageBucket_SoftDeletePolicy(bucketName, 0), Check: resource.ComposeTestCheckFunc( testAccCheckStorageBucketExists( t, "google_storage_bucket.bucket", bucketName, &bucket), @@ -1894,7 +1894,7 @@ resource "google_storage_bucket" "bucket" { `, bucketName) } -func testAccStorageBucket_dualLocation_rpo(bucketName string,rpo string) string { +func testAccStorageBucket_dualLocation_rpo(bucketName string, rpo string) string { return fmt.Sprintf(` resource "google_storage_bucket" "bucket" { name = "%s" @@ -1905,7 +1905,7 @@ resource "google_storage_bucket" "bucket" { } rpo = "%s" } -`, bucketName,rpo) +`, bucketName, rpo) } func testAccStorageBucket_customAttributes(bucketName string) string { @@ -2626,7 +2626,7 @@ resource "google_storage_bucket" "bucket" { } func testAccStorageBucket_SoftDeletePolicy(bucketName string, duration int) string { - return fmt.Sprintf(` + return fmt.Sprintf(` resource "google_storage_bucket" "bucket" { name = "%s" location = "US" diff --git a/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_shielded_config_test.go b/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_shielded_config_test.go new file mode 100644 index 000000000000..e72f63d05d14 --- /dev/null +++ b/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_shielded_config_test.go @@ -0,0 +1,228 @@ +package workbench_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccWorkbenchInstance_shielded_config_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkbenchInstance_shielded_config_false(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + }, + { + Config: testAccWorkbenchInstance_shielded_config_true(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + }, + }, + }) +} + +func TestAccWorkbenchInstance_shielded_config_remove(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkbenchInstance_shielded_config_true(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + }, + { + Config: testAccWorkbenchInstance_shielded_config_none(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + }, + }, + }) +} + +func TestAccWorkbenchInstance_shielded_config_double_apply(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkbenchInstance_shielded_config_none(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + }, + { + Config: testAccWorkbenchInstance_shielded_config_none(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + }, + { + Config: testAccWorkbenchInstance_shielded_config_false(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + }, + { + Config: testAccWorkbenchInstance_shielded_config_false(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + }, + { + Config: testAccWorkbenchInstance_shielded_config_true(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + }, + { + Config: testAccWorkbenchInstance_shielded_config_true(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + }, + }, + }) +} + +func testAccWorkbenchInstance_shielded_config_true(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_workbench_instance" "instance" { + name = "tf-test-workbench-instance%{random_suffix}" + location = "us-central1-a" + + gce_setup { + shielded_instance_config { + enable_secure_boot = true + enable_vtpm = true + enable_integrity_monitoring = true + } + } +} +`, context) +} + +func testAccWorkbenchInstance_shielded_config_false(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_workbench_instance" "instance" { + name = "tf-test-workbench-instance%{random_suffix}" + location = "us-central1-a" + + gce_setup { + shielded_instance_config { + enable_secure_boot = false + enable_vtpm = false + enable_integrity_monitoring = false + } + } + +} +`, context) +} + +func testAccWorkbenchInstance_shielded_config_none(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_workbench_instance" "instance" { + name = "tf-test-workbench-instance%{random_suffix}" + location = "us-central1-a" +} +`, context) +} diff --git a/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_shielded_config_test.go.tmpl b/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_shielded_config_test.go.tmpl deleted file mode 100644 index 6a7e7be5c506..000000000000 --- a/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_shielded_config_test.go.tmpl +++ /dev/null @@ -1,228 +0,0 @@ -package workbench_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - - "github.com/hashicorp/terraform-provider-google/google/acctest" -) - -func TestAccWorkbenchInstance_shielded_config_update(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccWorkbenchInstance_shielded_config_false(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), - }, - { - ResourceName: "google_workbench_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, - }, - { - Config: testAccWorkbenchInstance_shielded_config_true(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), - }, - { - ResourceName: "google_workbench_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, - }, - }, - }) -} - -func TestAccWorkbenchInstance_shielded_config_remove(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccWorkbenchInstance_shielded_config_true(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), - }, - { - ResourceName: "google_workbench_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, - }, - { - Config: testAccWorkbenchInstance_shielded_config_none(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), - }, - { - ResourceName: "google_workbench_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, - }, - }, - }) -} - -func TestAccWorkbenchInstance_shielded_config_double_apply(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccWorkbenchInstance_shielded_config_none(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), - }, - { - ResourceName: "google_workbench_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, - }, - { - Config: testAccWorkbenchInstance_shielded_config_none(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), - }, - { - ResourceName: "google_workbench_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, - }, - { - Config: testAccWorkbenchInstance_shielded_config_false(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), - }, - { - ResourceName: "google_workbench_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, - }, - { - Config: testAccWorkbenchInstance_shielded_config_false(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), - }, - { - ResourceName: "google_workbench_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, - }, - { - Config: testAccWorkbenchInstance_shielded_config_true(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), - }, - { - ResourceName: "google_workbench_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, - }, - { - Config: testAccWorkbenchInstance_shielded_config_true(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), - }, - { - ResourceName: "google_workbench_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, - }, - }, - }) -} - -func testAccWorkbenchInstance_shielded_config_true(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_workbench_instance" "instance" { - name = "tf-test-workbench-instance%{random_suffix}" - location = "us-central1-a" - - gce_setup { - shielded_instance_config { - enable_secure_boot = true - enable_vtpm = true - enable_integrity_monitoring = true - } - } -} -`, context) -} - -func testAccWorkbenchInstance_shielded_config_false(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_workbench_instance" "instance" { - name = "tf-test-workbench-instance%{random_suffix}" - location = "us-central1-a" - - gce_setup { - shielded_instance_config { - enable_secure_boot = false - enable_vtpm = false - enable_integrity_monitoring = false - } - } - -} -`, context) -} - -func testAccWorkbenchInstance_shielded_config_none(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_workbench_instance" "instance" { - name = "tf-test-workbench-instance%{random_suffix}" - location = "us-central1-a" -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_test.go.tmpl b/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_test.go similarity index 84% rename from mmv1/third_party/terraform/services/workbench/resource_workbench_instance_test.go.tmpl rename to mmv1/third_party/terraform/services/workbench/resource_workbench_instance_test.go index 04fa6a2f27fc..325324b42bc3 100644 --- a/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_test.go @@ -22,9 +22,9 @@ func TestAccWorkbenchInstance_update(t *testing.T) { { Config: testAccWorkbenchInstance_basic(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", @@ -32,12 +32,12 @@ func TestAccWorkbenchInstance_update(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, - { + { Config: testAccWorkbenchInstance_update(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", @@ -114,9 +114,9 @@ func TestAccWorkbenchInstance_updateGpu(t *testing.T) { { Config: testAccWorkbenchInstance_basicGpu(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", @@ -124,12 +124,12 @@ func TestAccWorkbenchInstance_updateGpu(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, - { + { Config: testAccWorkbenchInstance_updateGpu(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", @@ -202,9 +202,9 @@ func TestAccWorkbenchInstance_removeGpu(t *testing.T) { { Config: testAccWorkbenchInstance_Gpu(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", @@ -212,12 +212,12 @@ func TestAccWorkbenchInstance_removeGpu(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, - { + { Config: testAccWorkbenchInstance_removeGpu(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", @@ -275,9 +275,9 @@ func TestAccWorkbenchInstance_updateMetadata(t *testing.T) { { Config: testAccWorkbenchInstance_basic(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", @@ -285,12 +285,12 @@ func TestAccWorkbenchInstance_updateMetadata(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, - { + { Config: testAccWorkbenchInstance_updateMetadata(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", @@ -301,9 +301,9 @@ func TestAccWorkbenchInstance_updateMetadata(t *testing.T) { { Config: testAccWorkbenchInstance_basic(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", @@ -329,9 +329,9 @@ func TestAccWorkbenchInstance_updateMetadataKey(t *testing.T) { { Config: testAccWorkbenchInstance_updateMetadata(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", @@ -339,12 +339,12 @@ func TestAccWorkbenchInstance_updateMetadataKey(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, - { + { Config: testAccWorkbenchInstance_updateMetadataKey(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", @@ -355,9 +355,9 @@ func TestAccWorkbenchInstance_updateMetadataKey(t *testing.T) { { Config: testAccWorkbenchInstance_updateMetadata(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", @@ -427,9 +427,9 @@ func TestAccWorkbenchInstance_updateState(t *testing.T) { { Config: testAccWorkbenchInstance_basic(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", @@ -437,12 +437,12 @@ func TestAccWorkbenchInstance_updateState(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, - { + { Config: testAccWorkbenchInstance_updateState(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "STOPPED"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "STOPPED"), + ), }, { ResourceName: "google_workbench_instance.instance", @@ -450,12 +450,12 @@ func TestAccWorkbenchInstance_updateState(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, - { + { Config: testAccWorkbenchInstance_basic(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", @@ -493,9 +493,9 @@ func TestAccWorkbenchInstance_empty_accelerator(t *testing.T) { { Config: testAccWorkbenchInstance_basic(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", @@ -503,12 +503,12 @@ func TestAccWorkbenchInstance_empty_accelerator(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, - { + { Config: testAccWorkbenchInstance_empty_accelerator(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", @@ -519,9 +519,9 @@ func TestAccWorkbenchInstance_empty_accelerator(t *testing.T) { { Config: testAccWorkbenchInstance_empty_accelerator(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", @@ -561,9 +561,9 @@ func TestAccWorkbenchInstance_updateBootDisk(t *testing.T) { { Config: testAccWorkbenchInstance_basic(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", @@ -571,12 +571,12 @@ func TestAccWorkbenchInstance_updateBootDisk(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, - { + { Config: testAccWorkbenchInstance_updateBootDisk(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", @@ -602,9 +602,9 @@ func TestAccWorkbenchInstance_updateDataDisk(t *testing.T) { { Config: testAccWorkbenchInstance_basic(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", @@ -612,12 +612,12 @@ func TestAccWorkbenchInstance_updateDataDisk(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, - { + { Config: testAccWorkbenchInstance_updateDataDisk(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", @@ -643,8 +643,8 @@ func TestAccWorkbenchInstance_updateBothDisks(t *testing.T) { { Config: testAccWorkbenchInstance_basic(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), ), }, { @@ -653,11 +653,11 @@ func TestAccWorkbenchInstance_updateBothDisks(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, - { + { Config: testAccWorkbenchInstance_updateBothDisks(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), ), }, { @@ -730,8 +730,8 @@ func TestAccWorkbenchInstance_updatelabels(t *testing.T) { { Config: testAccWorkbenchInstance_label(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), ), }, { @@ -743,8 +743,8 @@ func TestAccWorkbenchInstance_updatelabels(t *testing.T) { { Config: testAccWorkbenchInstance_basic(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), ), }, { @@ -756,8 +756,8 @@ func TestAccWorkbenchInstance_updatelabels(t *testing.T) { { Config: testAccWorkbenchInstance_label(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), ), }, { @@ -782,7 +782,6 @@ resource "google_workbench_instance" "instance" { `, context) } - func TestAccWorkbenchInstance_updateCustomContainers(t *testing.T) { t.Parallel() @@ -797,8 +796,8 @@ func TestAccWorkbenchInstance_updateCustomContainers(t *testing.T) { { Config: testAccWorkbenchInstance_customcontainer(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), ), }, { @@ -810,8 +809,8 @@ func TestAccWorkbenchInstance_updateCustomContainers(t *testing.T) { { Config: testAccWorkbenchInstance_updatedcustomcontainer(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), ), }, { diff --git a/mmv1/third_party/terraform/services/workflows/resource_workflows_workflow_test.go.tmpl b/mmv1/third_party/terraform/services/workflows/resource_workflows_workflow_test.go similarity index 99% rename from mmv1/third_party/terraform/services/workflows/resource_workflows_workflow_test.go.tmpl rename to mmv1/third_party/terraform/services/workflows/resource_workflows_workflow_test.go index 0e4b807a76ee..4ac91af4ceec 100644 --- a/mmv1/third_party/terraform/services/workflows/resource_workflows_workflow_test.go.tmpl +++ b/mmv1/third_party/terraform/services/workflows/resource_workflows_workflow_test.go @@ -141,7 +141,7 @@ func TestAccWorkflowsWorkflow_UpdateDeletionProtectionFalseToTrue(t *testing.T) { Config: testAccWorkflowsWorkflow_Basic_DeletionProtectionTrue(workflowName), }, - { + { Config: testAccWorkflowsWorkflow_Basic_DeletionProtectionFalse(workflowName), }, }, @@ -297,7 +297,7 @@ func TestAccWorkflowsWorkflow_CMEK(t *testing.T) { acctest.BootstrapIamMembers(t, []acctest.IamMember{ { Member: "serviceAccount:service-{project_number}@gcp-sa-workflows.iam.gserviceaccount.com", - Role: "roles/cloudkms.cryptoKeyEncrypterDecrypter", + Role: "roles/cloudkms.cryptoKeyEncrypterDecrypter", }, }) @@ -351,4 +351,3 @@ EOF } `, workflowName, kmsKeyName) } - diff --git a/mmv1/third_party/terraform/terraform-registry-manifest.json.tmpl b/mmv1/third_party/terraform/terraform-registry-manifest.json similarity index 100% rename from mmv1/third_party/terraform/terraform-registry-manifest.json.tmpl rename to mmv1/third_party/terraform/terraform-registry-manifest.json diff --git a/mmv1/third_party/terraform/tpgresource/common_diff_suppress.go.tmpl b/mmv1/third_party/terraform/tpgresource/common_diff_suppress.go similarity index 99% rename from mmv1/third_party/terraform/tpgresource/common_diff_suppress.go.tmpl rename to mmv1/third_party/terraform/tpgresource/common_diff_suppress.go index 20db7453d0bc..cbc7e442a25e 100644 --- a/mmv1/third_party/terraform/tpgresource/common_diff_suppress.go.tmpl +++ b/mmv1/third_party/terraform/tpgresource/common_diff_suppress.go @@ -136,11 +136,11 @@ func CidrOrSizeDiffSuppress(k, old, new string, d *schema.ResourceData) bool { } // Base64DiffSuppress compares two Base64 strings, ignoring differences -// between standard encoding and web safe URL encoding, padding, and +// between standard encoding and web safe URL encoding, padding, and // embedded line endings. func Base64DiffSuppress(_, old, new string, _ *schema.ResourceData) bool { r := strings.NewReplacer("\r", "", "\n", "", "+", "-", "/", "_", "=", "") normalizedOld := r.Replace(old) normalizedNew := r.Replace(new) return normalizedOld == normalizedNew -} \ No newline at end of file +} diff --git a/mmv1/validate_third_party_test.go b/mmv1/validate_third_party_test.go new file mode 100644 index 000000000000..2c94ae01c32c --- /dev/null +++ b/mmv1/validate_third_party_test.go @@ -0,0 +1,86 @@ +package main + +import ( + "os" + "path/filepath" + "regexp" + "runtime" + "testing" +) + +func TestTemplatesStillNeedToBeTemplates(t *testing.T) { + // Get the directory where this test file is located + _, testFilePath, _, ok := runtime.Caller(0) + if !ok { + t.Fatal("Failed to get current test file path") + } + testDir := filepath.Dir(testFilePath) + + // Define the third_party directory relative to the test file + thirdPartyDir := filepath.Join(testDir, "third_party", "terraform") + + // Regular expression to match Go template syntax + templateSyntaxRegex := regexp.MustCompile(`\{\{.*?\}\}`) + + // Track files that no longer need to be templates + unnecessaryTemplates := []string{} + + // Walk through the third_party directory + err := filepath.Walk(thirdPartyDir, func(path string, info os.FileInfo, err error) error { + if err != nil { + // Handle case where third_party directory doesn't exist + if os.IsNotExist(err) && path == thirdPartyDir { + t.Logf("Warning: third_party directory not found at %s", thirdPartyDir) + return nil + } + return err + } + + // Skip directories + if info.IsDir() { + return nil + } + + // Only check .tmpl files + if filepath.Ext(path) != ".tmpl" { + return nil + } + + // Read file content + content, err := os.ReadFile(path) + if err != nil { + t.Logf("Error reading file %s: %v", path, err) + return nil + } + + // Check if file contains any Go template syntax + hasTemplateSyntax := templateSyntaxRegex.Match(content) + + // If no template syntax found, add to the list + if !hasTemplateSyntax { + // Get relative path for cleaner output + relPath, _ := filepath.Rel(testDir, path) + unnecessaryTemplates = append(unnecessaryTemplates, relPath) + } + + return nil + }) + + if err != nil { + t.Fatalf("Error walking directory: %v", err) + } + + // Output results at the end + if len(unnecessaryTemplates) > 0 { + t.Errorf("\nThe following %d .tmpl files in third_party directory don't contain any template syntax "+ + "and no longer need to be templates:\n", len(unnecessaryTemplates)) + + for _, file := range unnecessaryTemplates { + t.Errorf(" - %s", file) + } + + t.Errorf("\nConsider removing the .tmpl extension from these files.") + } else { + t.Logf("All .tmpl files in third_party directory properly contain template syntax.") + } +} From 88954bcbc24a5c241c6d192f92e3f54b46e9c053 Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Wed, 14 May 2025 19:31:46 +0200 Subject: [PATCH 122/884] feat: expose the numeric `id` attribute as `generated_id` for the `google_compute_network_endpoint_group` (#13914) Co-authored-by: Scott Suarez --- mmv1/products/compute/NetworkEndpointGroup.yaml | 6 ++++++ .../data_source_compute_network_endpoint_group_test.go | 5 +++++ 2 files changed, 11 insertions(+) diff --git a/mmv1/products/compute/NetworkEndpointGroup.yaml b/mmv1/products/compute/NetworkEndpointGroup.yaml index 9ba059d1748f..b15b69dda2c9 100644 --- a/mmv1/products/compute/NetworkEndpointGroup.yaml +++ b/mmv1/products/compute/NetworkEndpointGroup.yaml @@ -143,3 +143,9 @@ properties: description: | The default port used if the port number is not specified in the network endpoint. + - name: 'generated_id' + type: Integer + api_name: 'id' + output: true + description: | + The uniquely generated identifier for the resource. This identifier is defined by the server. diff --git a/mmv1/third_party/terraform/services/compute/data_source_compute_network_endpoint_group_test.go b/mmv1/third_party/terraform/services/compute/data_source_compute_network_endpoint_group_test.go index 56b6b27a1d08..bd4fd55f0b58 100644 --- a/mmv1/third_party/terraform/services/compute/data_source_compute_network_endpoint_group_test.go +++ b/mmv1/third_party/terraform/services/compute/data_source_compute_network_endpoint_group_test.go @@ -61,6 +61,11 @@ func testAccDataSourceComputeNetworkEndpointGroupCheck(data_source_name string, ) } } + + if v, ok := ds_attr["generated_id"]; !ok || v == "" { + return fmt.Errorf("generated_id is not set") + } + return nil } } From 1303e908e564fc3b353bbef75913fe85349d64d1 Mon Sep 17 00:00:00 2001 From: ahmed-laiq Date: Wed, 14 May 2025 13:50:33 -0400 Subject: [PATCH 123/884] Add fields to support for configuring Class ALB migration feature. (#13926) Co-authored-by: Riley Karson --- mmv1/products/compute/BackendService.yaml | 33 ++- .../compute/GlobalForwardingRule.yaml | 36 +++ ...ource_compute_backend_service_test.go.tmpl | 207 ++++++++++++++++-- ...ompute_global_forwarding_rule_test.go.tmpl | 179 +++++++++++++++ 4 files changed, 441 insertions(+), 14 deletions(-) diff --git a/mmv1/products/compute/BackendService.yaml b/mmv1/products/compute/BackendService.yaml index cf5c22b576cb..28da0b3d9d04 100644 --- a/mmv1/products/compute/BackendService.yaml +++ b/mmv1/products/compute/BackendService.yaml @@ -860,7 +860,6 @@ properties: external load balancing. A backend service created for one type of load balancing cannot be used with the other. For more information, refer to [Choosing a load balancer](https://cloud.google.com/load-balancing/docs/backend-service). - immutable: true default_value: "EXTERNAL" # If you're modifying this value, it probably means Global ILB is now # an option. If that's the case, all of the documentation is based on @@ -870,6 +869,38 @@ properties: - 'INTERNAL_SELF_MANAGED' - 'INTERNAL_MANAGED' - 'EXTERNAL_MANAGED' + - name: 'externalManagedMigrationState' + type: Enum + description: | + Specifies the canary migration state. Possible values are PREPARE, TEST_BY_PERCENTAGE, and + TEST_ALL_TRAFFIC. + + To begin the migration from EXTERNAL to EXTERNAL_MANAGED, the state must be changed to + PREPARE. The state must be changed to TEST_ALL_TRAFFIC before the loadBalancingScheme can be + changed to EXTERNAL_MANAGED. Optionally, the TEST_BY_PERCENTAGE state can be used to migrate + traffic by percentage using externalManagedMigrationTestingPercentage. + + Rolling back a migration requires the states to be set in reverse order. So changing the + scheme from EXTERNAL_MANAGED to EXTERNAL requires the state to be set to TEST_ALL_TRAFFIC at + the same time. Optionally, the TEST_BY_PERCENTAGE state can be used to migrate some traffic + back to EXTERNAL or PREPARE can be used to migrate all traffic back to EXTERNAL. + enum_values: + - 'PREPARE' + - 'TEST_BY_PERCENTAGE' + - 'TEST_ALL_TRAFFIC' + - name: 'externalManagedMigrationTestingPercentage' + type: Double + description: | + Determines the fraction of requests that should be processed by the Global external + Application Load Balancer. + + The value of this field must be in the range [0, 100]. + + Session affinity options will slightly affect this routing behavior, for more details, + see: Session Affinity. + + This value can only be set if the loadBalancingScheme in the backend service is set to + EXTERNAL (when using the Classic ALB) and the migration state is TEST_BY_PERCENTAGE. - name: 'localityLbPolicy' type: Enum description: | diff --git a/mmv1/products/compute/GlobalForwardingRule.yaml b/mmv1/products/compute/GlobalForwardingRule.yaml index c110523f0929..e3f4bd9c967b 100644 --- a/mmv1/products/compute/GlobalForwardingRule.yaml +++ b/mmv1/products/compute/GlobalForwardingRule.yaml @@ -318,6 +318,8 @@ properties: - 'EXTERNAL_MANAGED' - 'INTERNAL_MANAGED' - 'INTERNAL_SELF_MANAGED' + update_url: 'projects/{{project}}/global/forwardingRules/{{name}}' + update_verb: 'PATCH' - name: 'metadataFilters' type: Array description: | @@ -500,6 +502,40 @@ properties: enum_values: - 'PREMIUM' - 'STANDARD' + - name: 'externalManagedBackendBucketMigrationState' + type: Enum + description: | + Specifies the canary migration state for the backend buckets attached to this forwarding rule. + Possible values are PREPARE, TEST_BY_PERCENTAGE, and TEST_ALL_TRAFFIC. + + To begin the migration from EXTERNAL to EXTERNAL_MANAGED, the state must be changed to + PREPARE. The state must be changed to TEST_ALL_TRAFFIC before the loadBalancingScheme can be + changed to EXTERNAL_MANAGED. Optionally, the TEST_BY_PERCENTAGE state can be used to migrate + traffic to backend buckets attached to this forwarding rule by percentage using + externalManagedBackendBucketMigrationTestingPercentage. + + Rolling back a migration requires the states to be set in reverse order. So changing the + scheme from EXTERNAL_MANAGED to EXTERNAL requires the state to be set to TEST_ALL_TRAFFIC at + the same time. Optionally, the TEST_BY_PERCENTAGE state can be used to migrate some traffic + back to EXTERNAL or PREPARE can be used to migrate all traffic back to EXTERNAL. + enum_values: + - 'PREPARE' + - 'TEST_BY_PERCENTAGE' + - 'TEST_ALL_TRAFFIC' + update_url: 'projects/{{project}}/global/forwardingRules/{{name}}' + update_verb: 'PATCH' + - name: 'externalManagedBackendBucketMigrationTestingPercentage' + type: Double + description: | + Determines the fraction of requests to backend buckets that should be processed by the Global + external Application Load Balancer. + + The value of this field must be in the range [0, 100]. + + This value can only be set if the loadBalancingScheme in the forwarding rule is set to + EXTERNAL (when using the Classic ALB) and the migration state is TEST_BY_PERCENTAGE. + update_url: 'projects/{{project}}/global/forwardingRules/{{name}}' + update_verb: 'PATCH' - name: 'serviceDirectoryRegistrations' type: Array description: | diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_backend_service_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_backend_service_test.go.tmpl index c7a428667fc3..e8ab0657286c 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_backend_service_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_backend_service_test.go.tmpl @@ -3,9 +3,10 @@ package compute_test import ( "fmt" "testing" - "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + "github.com/hashicorp/terraform-provider-google/google/acctest" ) func TestAccComputeBackendService_basic(t *testing.T) { @@ -146,17 +147,18 @@ func TestAccComputeBackendService_withBackendAndIAP(t *testing.T) { }) } -func TestAccComputeBackendService_withBackendAndPreference(t *testing.T) { +func TestAccComputeBackendService_withBackendAndPreferenceInternalManaged(t *testing.T) { t.Parallel() - randomSuffix := acctest.RandString(t, 10) + im_suffix := fmt.Sprintf("im-%s", acctest.RandString(t, 10)) + acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccCheckComputeBackendServiceDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComputeBackendService_withBackendAndPreference(randomSuffix, "INTERNAL_MANAGED", "DEFAULT", 10), + Config: testAccComputeBackendService_withBackendAndPreference(im_suffix, "INTERNAL_MANAGED", "DEFAULT", 10), }, { ResourceName: "google_compute_backend_service.lipsum", @@ -164,15 +166,29 @@ func TestAccComputeBackendService_withBackendAndPreference(t *testing.T) { ImportStateVerify: true, }, { - Config: testAccComputeBackendService_withBackendAndPreference(randomSuffix, "INTERNAL_MANAGED", "PREFERRED", 20), + Config: testAccComputeBackendService_withBackendAndPreference(im_suffix, "INTERNAL_MANAGED", "PREFERRED", 20), }, { ResourceName: "google_compute_backend_service.lipsum", ImportState: true, ImportStateVerify: true, }, + }, + }) +} + +func TestAccComputeBackendService_withBackendAndPreferenceInternalSelfManaged(t *testing.T) { + t.Parallel() + + ism_suffix := fmt.Sprintf("ism-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeBackendServiceDestroyProducer(t), + Steps: []resource.TestStep{ { - Config: testAccComputeBackendService_withBackendAndPreference(randomSuffix, "INTERNAL_SELF_MANAGED", "DEFAULT", 10), + Config: testAccComputeBackendService_withBackendAndPreference(ism_suffix, "INTERNAL_SELF_MANAGED", "DEFAULT", 10), }, { ResourceName: "google_compute_backend_service.lipsum", @@ -180,15 +196,28 @@ func TestAccComputeBackendService_withBackendAndPreference(t *testing.T) { ImportStateVerify: true, }, { - Config: testAccComputeBackendService_withBackendAndPreference(randomSuffix, "INTERNAL_SELF_MANAGED", "PREFERRED", 20), + Config: testAccComputeBackendService_withBackendAndPreference(ism_suffix, "INTERNAL_SELF_MANAGED", "PREFERRED", 20), }, { ResourceName: "google_compute_backend_service.lipsum", ImportState: true, ImportStateVerify: true, }, - { - Config: testAccComputeBackendService_withBackendAndPreference(randomSuffix, "EXTERNAL_MANAGED", "DEFAULT", 10), + }, + }) +} + +func TestAccComputeBackendService_withBackendAndPreferenceExternalManaged(t *testing.T) { + t.Parallel() + em_suffix := fmt.Sprintf("em-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeBackendServiceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeBackendService_withBackendAndPreference(em_suffix, "EXTERNAL_MANAGED", "DEFAULT", 10), }, { ResourceName: "google_compute_backend_service.lipsum", @@ -196,7 +225,7 @@ func TestAccComputeBackendService_withBackendAndPreference(t *testing.T) { ImportStateVerify: true, }, { - Config: testAccComputeBackendService_withBackendAndPreference(randomSuffix, "EXTERNAL_MANAGED", "PREFERRED", 20), + Config: testAccComputeBackendService_withBackendAndPreference(em_suffix, "EXTERNAL_MANAGED", "PREFERRED", 20), }, { ResourceName: "google_compute_backend_service.lipsum", @@ -789,8 +818,23 @@ func TestAccComputeBackendService_withLogConfig(t *testing.T) { ImportState: true, ImportStateVerify: true, }, + }, + }) +} + +func TestAccComputeBackendService_withLogConfigMode(t *testing.T) { + t.Parallel() + + serviceName := fmt.Sprintf("tf-test-lc-%s", acctest.RandString(t, 10)) + checkName := fmt.Sprintf("tf-test-lc-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeBackendServiceDestroyProducer(t), + Steps: []resource.TestStep{ { - Config: testAccComputeBackendService_withLogConfig3(serviceName, checkName, "INCLUDE_ALL_OPTIONAL", true), + Config: testAccComputeBackendService_withLogConfigMode(serviceName, checkName, "INCLUDE_ALL_OPTIONAL", true), }, { ResourceName: "google_compute_backend_service.foobar", @@ -798,7 +842,7 @@ func TestAccComputeBackendService_withLogConfig(t *testing.T) { ImportStateVerify: true, }, { - Config: testAccComputeBackendService_withLogConfig3(serviceName, checkName, "EXCLUDE_ALL_OPTIONAL", true), + Config: testAccComputeBackendService_withLogConfigMode(serviceName, checkName, "EXCLUDE_ALL_OPTIONAL", true), }, { ResourceName: "google_compute_backend_service.foobar", @@ -2232,7 +2276,7 @@ resource "google_compute_http_health_check" "zero" { `, serviceName, enabled, checkName) } -func testAccComputeBackendService_withLogConfig3(serviceName, checkName, mode string, enabled bool) string { +func testAccComputeBackendService_withLogConfigMode(serviceName, checkName, mode string, enabled bool) string { return fmt.Sprintf(` resource "google_compute_backend_service" "foobar" { name = "%s" @@ -2686,3 +2730,140 @@ resource "google_compute_health_check" "health_check" { } `, suffix, timeout, loadBalancingScheme, preference, suffix, suffix, suffix) } + +func TestAccComputeBackendService_updateCanaryMigration(t *testing.T) { + t.Parallel() + + serviceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + checkName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + CheckDestroy: testAccCheckComputeBackendServiceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeBackendService_basic(serviceName, checkName), + }, + { + ResourceName: "google_compute_backend_service.foobar", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccComputeBackendService_withCanaryMigration( + serviceName, checkName, "updated-to-prepare", "PREPARE"), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_compute_backend_service.foobar", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_compute_backend_service.foobar", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccComputeBackendService_withCanaryMigrationPercentage( + serviceName, checkName, "updated-to-percentage", 50), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_compute_backend_service.foobar", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_compute_backend_service.foobar", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccComputeBackendService_withCanaryMigration( + serviceName, checkName, "update-to-all", "TEST_ALL_TRAFFIC"), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_compute_backend_service.foobar", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_compute_backend_service.foobar", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccComputeBackendService_withCanaryMigration(serviceName, checkName, description, migrationState string) string { + return fmt.Sprintf(` +resource "google_compute_backend_service" "foobar" { + name = "%s" + description = "%s" + health_checks = [google_compute_http_health_check.zero.self_link] + external_managed_migration_state = "%s" +} + +resource "google_compute_http_health_check" "zero" { + name = "%s" + request_path = "/" + check_interval_sec = 1 + timeout_sec = 1 +} +`, serviceName, description, migrationState, checkName) +} + +func testAccComputeBackendService_withCanaryMigrationWithWait(serviceName, checkName, description, migrationState string) string { + return fmt.Sprintf(` +resource "time_sleep" "six_minutes_delay" { + create_duration = "370s" # litte more than 6 minutes (360 seconds = 6 minutes) +} + +resource "google_compute_backend_service" "foobar" { + name = "%s" + description = "%s" + health_checks = [google_compute_http_health_check.zero.self_link] + external_managed_migration_state = "%s" + depends_on = [ + time_sleep.six_minutes_delay + ] +} + +resource "google_compute_http_health_check" "zero" { + name = "%s" + request_path = "/" + check_interval_sec = 1 + timeout_sec = 1 +} +`, serviceName, description, migrationState, checkName) +} + +func testAccComputeBackendService_withCanaryMigrationPercentage(serviceName, checkName, description string, percentage int64) string { + return fmt.Sprintf(` +resource "time_sleep" "six_minutes_delay" { + create_duration = "370s" # litte more than 6 minutes (360 seconds = 6 minutes) +} + +resource "google_compute_backend_service" "foobar" { + name = "%s" + description = "%s" + health_checks = [google_compute_http_health_check.zero.self_link] + external_managed_migration_state = "TEST_BY_PERCENTAGE" + external_managed_migration_testing_percentage = %d + depends_on = [ + time_sleep.six_minutes_delay + ] +} + +resource "google_compute_http_health_check" "zero" { + name = "%s" + request_path = "/" + check_interval_sec = 1 + timeout_sec = 1 +} +`, serviceName, description, percentage, checkName) +} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_global_forwarding_rule_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_global_forwarding_rule_test.go.tmpl index fd8366e75dee..e71c21ef61ae 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_global_forwarding_rule_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_global_forwarding_rule_test.go.tmpl @@ -6,6 +6,7 @@ import ( "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/services/compute" ) @@ -367,6 +368,74 @@ func TestUnitComputeGlobalForwardingRule_InternalIpDiffSuppress(t *testing.T) { } } +func TestAccComputeGlobalForwardingRule_updateCanaryMigration(t *testing.T) { + t.Parallel() + + fr := fmt.Sprintf("fr-canary-mgiration-%s", acctest.RandString(t, 10)) + proxy := fmt.Sprintf("pr-canary-mgiration-%s", acctest.RandString(t, 10)) + urlmap := fmt.Sprintf("um-canary-mgiration-%s", acctest.RandString(t, 10)) + backendservice := fmt.Sprintf("bs-canary-mgiration-%s", acctest.RandString(t, 10)) + address := fmt.Sprintf("addr-canary-mgiration-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + CheckDestroy: testAccCheckComputeGlobalForwardingRuleDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeGlobalForwardingRule_basic(fr, proxy, urlmap, backendservice, address), + }, + { + ResourceName: "google_compute_global_forwarding_rule.forwarding_rule", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccComputeGlobalForwardingRule_withCanaryMigration(fr, "PREPARE", proxy, urlmap, backendservice, address), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_compute_global_forwarding_rule.forwarding_rule", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_compute_global_forwarding_rule.forwarding_rule", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccComputeGlobalForwardingRule_withCanaryMigrationPercentage(fr, proxy, urlmap, backendservice, address, 50), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_compute_global_forwarding_rule.forwarding_rule", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_compute_global_forwarding_rule.forwarding_rule", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccComputeGlobalForwardingRule_withCanaryMigration(fr, "TEST_ALL_TRAFFIC", proxy, urlmap, backendservice, address), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_compute_global_forwarding_rule.forwarding_rule", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_compute_global_forwarding_rule.forwarding_rule", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + func testAccComputeGlobalForwardingRule_httpProxy(fr, targetProxy, proxy, proxy2, backend, hc, urlmap string) string { return fmt.Sprintf(` resource "google_compute_global_forwarding_rule" "forwarding_rule" { @@ -912,3 +981,113 @@ resource "google_compute_instance_template" "instance_template" { `, fr, proxy, backend, hc, urlmap, igm, it) } {{- end }} + + + +func testAccComputeGlobalForwardingRule_basic(fr, proxy, urlmap, backendservice, address string) string { + return fmt.Sprintf(` +resource "google_compute_global_forwarding_rule" "forwarding_rule" { + name = "%s" + ip_protocol = "TCP" + port_range = "80" + load_balancing_scheme = "EXTERNAL" + target = google_compute_target_http_proxy.my_target_http_proxy.id + ip_address = google_compute_global_address.my_global_ip.address +} + +resource "google_compute_target_http_proxy" "my_target_http_proxy" { + name = "%s" + url_map = google_compute_url_map.my_url_map.id +} + +resource "google_compute_url_map" "my_url_map" { + name = "%s" + default_service = google_compute_backend_service.my_backend_service.id +} + +resource "google_compute_backend_service" "my_backend_service" { + name = "%s" + protocol = "HTTP" + load_balancing_scheme = "EXTERNAL" +} + +resource "google_compute_global_address" "my_global_ip" { + name = "%s" +} +`, fr, proxy, urlmap, backendservice, address) +} + +func testAccComputeGlobalForwardingRule_withCanaryMigration(fr, bucket_migration_state, proxy, urlmap, backendservice, address string) string { + return fmt.Sprintf(` +resource "google_compute_global_forwarding_rule" "forwarding_rule" { + name = "%s" + ip_protocol = "TCP" + port_range = "80" + load_balancing_scheme = "EXTERNAL" + target = google_compute_target_http_proxy.my_target_http_proxy.id + ip_address = google_compute_global_address.my_global_ip.address + external_managed_backend_bucket_migration_state = "%s" +} + +resource "google_compute_target_http_proxy" "my_target_http_proxy" { + name = "%s" + url_map = google_compute_url_map.my_url_map.id +} + +resource "google_compute_url_map" "my_url_map" { + name = "%s" + default_service = google_compute_backend_service.my_backend_service.id +} + +resource "google_compute_backend_service" "my_backend_service" { + name = "%s" + protocol = "HTTP" + load_balancing_scheme = "EXTERNAL" +} + +resource "google_compute_global_address" "my_global_ip" { + name = "%s" +} +`, fr, bucket_migration_state, proxy, urlmap, backendservice, address) +} + + +func testAccComputeGlobalForwardingRule_withCanaryMigrationPercentage(fr, proxy, urlmap, backendservice, address string, percentage int64) string { + return fmt.Sprintf(` +resource "time_sleep" "six_minutes_delay" { + create_duration = "370s" # litte more than 6 minutes (360 seconds = 6 minutes) +} + +resource "google_compute_global_forwarding_rule" "forwarding_rule" { + name = "%s" + ip_protocol = "TCP" + port_range = "80" + load_balancing_scheme = "EXTERNAL" + target = google_compute_target_http_proxy.my_target_http_proxy.id + ip_address = google_compute_global_address.my_global_ip.address + external_managed_backend_bucket_migration_state = "TEST_BY_PERCENTAGE" + external_managed_backend_bucket_migration_testing_percentage = %d + depends_on = [time_sleep.six_minutes_delay] +} + +resource "google_compute_target_http_proxy" "my_target_http_proxy" { + name = "%s" + url_map = google_compute_url_map.my_url_map.id +} + +resource "google_compute_url_map" "my_url_map" { + name = "%s" + default_service = google_compute_backend_service.my_backend_service.id +} + +resource "google_compute_backend_service" "my_backend_service" { + name = "%s" + protocol = "HTTP" + load_balancing_scheme = "EXTERNAL" +} + +resource "google_compute_global_address" "my_global_ip" { + name = "%s" +} +`, fr, percentage, proxy, urlmap, backendservice, address) +} From e98455ea607c9849c2d5db16decc97e1e74ba275 Mon Sep 17 00:00:00 2001 From: Ilia Lazebnik Date: Wed, 14 May 2025 14:32:41 -0400 Subject: [PATCH 124/884] container: allow updating windows_node_config in place (#13658) Signed-off-by: drfaust92 --- .../services/container/node_config.go.tmpl | 101 +++++++++--------- .../resource_container_node_pool_test.go.tmpl | 5 + 2 files changed, 55 insertions(+), 51 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/node_config.go.tmpl b/mmv1/third_party/terraform/services/container/node_config.go.tmpl index d4aa04bfc927..f1e92234c700 100644 --- a/mmv1/third_party/terraform/services/container/node_config.go.tmpl +++ b/mmv1/third_party/terraform/services/container/node_config.go.tmpl @@ -629,41 +629,41 @@ func schemaNodeConfig() *schema.Schema { Description: `Controls the maximum number of processes allowed to run in a pod.`, }, "container_log_max_size": { - Type: schema.TypeString, - Optional: true, - Description: `Defines the maximum size of the container log file before it is rotated.`, - }, + Type: schema.TypeString, + Optional: true, + Description: `Defines the maximum size of the container log file before it is rotated.`, + }, "container_log_max_files": { - Type: schema.TypeInt, - Optional: true, - Description: `Defines the maximum number of container log files that can be present for a container.`, - }, + Type: schema.TypeInt, + Optional: true, + Description: `Defines the maximum number of container log files that can be present for a container.`, + }, "image_gc_low_threshold_percent": { - Type: schema.TypeInt, - Optional: true, - Description: `Defines the percent of disk usage before which image garbage collection is never run. Lowest disk usage to garbage collect to.`, - }, + Type: schema.TypeInt, + Optional: true, + Description: `Defines the percent of disk usage before which image garbage collection is never run. Lowest disk usage to garbage collect to.`, + }, "image_gc_high_threshold_percent": { - Type: schema.TypeInt, - Optional: true, - Description: `Defines the percent of disk usage after which image garbage collection is always run.`, - }, + Type: schema.TypeInt, + Optional: true, + Description: `Defines the percent of disk usage after which image garbage collection is always run.`, + }, "image_minimum_gc_age": { - Type: schema.TypeString, - Optional: true, - Description: `Defines the minimum age for an unused image before it is garbage collected.`, - }, + Type: schema.TypeString, + Optional: true, + Description: `Defines the minimum age for an unused image before it is garbage collected.`, + }, "image_maximum_gc_age": { - Type: schema.TypeString, - Optional: true, - Description: `Defines the maximum age an image can be unused before it is garbage collected.`, - }, + Type: schema.TypeString, + Optional: true, + Description: `Defines the maximum age an image can be unused before it is garbage collected.`, + }, "allowed_unsafe_sysctls": { - Type: schema.TypeList, - Optional: true, - Description: `Defines a comma-separated allowlist of unsafe sysctls or sysctl patterns which can be set on the Pods.`, + Type: schema.TypeList, + Optional: true, + Description: `Defines a comma-separated allowlist of unsafe sysctls or sysctl patterns which can be set on the Pods.`, Elem: &schema.Schema{Type: schema.TypeString}, - }, + }, }, }, }, @@ -722,7 +722,6 @@ func schemaNodeConfig() *schema.Schema { "osversion": { Type: schema.TypeString, Optional: true, - ForceNew: true, Default: "OS_VERSION_UNSPECIFIED", Description: `The OS Version of the windows nodepool.Values are OS_VERSION_UNSPECIFIED,OS_VERSION_LTSC2019 and OS_VERSION_LTSC2022`, ValidateFunc: validation.StringInSlice([]string{"OS_VERSION_UNSPECIFIED", "OS_VERSION_LTSC2019", "OS_VERSION_LTSC2022"}, false), @@ -747,8 +746,8 @@ func schemaNodeConfig() *schema.Schema { Schema: map[string]*schema.Schema{ "threads_per_core": { Type: schema.TypeInt, - Required: true, - ForceNew: true, + Required: true, + ForceNew: true, Description: `The number of threads per physical core. To disable simultaneous multithreading (SMT) set this to 1. If unset, the maximum number of threads supported per core by the underlying processor is assumed.`, }, "enable_nested_virtualization": { @@ -856,10 +855,10 @@ func schemaNodeConfig() *schema.Schema { Description: `A map of resource manager tags. Resource manager tag keys and values have the same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/456. The field is ignored (both PUT & PATCH) when empty.`, }, "enable_confidential_storage": { - Type: schema.TypeBool, - Optional: true, - ForceNew: true, - Description: `If enabled boot disks are configured with confidential mode.`, + Type: schema.TypeBool, + Optional: true, + ForceNew: true, + Description: `If enabled boot disks are configured with confidential mode.`, }, "local_ssd_encryption_mode": { Type: schema.TypeString, @@ -1367,30 +1366,30 @@ func expandKubeletConfig(v interface{}) *container.NodeKubeletConfig { kConfig.PodPidsLimit = int64(podPidsLimit.(int)) } if containerLogMaxSize, ok := cfg["container_log_max_size"]; ok { - kConfig.ContainerLogMaxSize = containerLogMaxSize.(string) - } + kConfig.ContainerLogMaxSize = containerLogMaxSize.(string) + } if containerLogMaxFiles, ok := cfg["container_log_max_files"]; ok { - kConfig.ContainerLogMaxFiles = int64(containerLogMaxFiles.(int)) - } + kConfig.ContainerLogMaxFiles = int64(containerLogMaxFiles.(int)) + } if imageGcLowThresholdPercent, ok := cfg["image_gc_low_threshold_percent"]; ok { - kConfig.ImageGcLowThresholdPercent = int64(imageGcLowThresholdPercent.(int)) - } + kConfig.ImageGcLowThresholdPercent = int64(imageGcLowThresholdPercent.(int)) + } if imageGcHighThresholdPercent, ok := cfg["image_gc_high_threshold_percent"]; ok { - kConfig.ImageGcHighThresholdPercent = int64(imageGcHighThresholdPercent.(int)) - } + kConfig.ImageGcHighThresholdPercent = int64(imageGcHighThresholdPercent.(int)) + } if imageMinimumGcAge, ok := cfg["image_minimum_gc_age"]; ok { - kConfig.ImageMinimumGcAge = imageMinimumGcAge.(string) - } + kConfig.ImageMinimumGcAge = imageMinimumGcAge.(string) + } if imageMaximumGcAge, ok := cfg["image_maximum_gc_age"]; ok { - kConfig.ImageMaximumGcAge = imageMaximumGcAge.(string) - } + kConfig.ImageMaximumGcAge = imageMaximumGcAge.(string) + } if allowedUnsafeSysctls, ok := cfg["allowed_unsafe_sysctls"]; ok { - sysctls := allowedUnsafeSysctls.([]interface{}) + sysctls := allowedUnsafeSysctls.([]interface{}) kConfig.AllowedUnsafeSysctls = make([]string, len(sysctls)) for i, s := range sysctls { kConfig.AllowedUnsafeSysctls[i] = s.(string) } - } + } return kConfig } @@ -1685,7 +1684,7 @@ func flattenNodeConfig(c *container.NodeConfig, v interface{}) []map[string]inte "local_nvme_ssd_block_config": flattenLocalNvmeSsdBlockConfig(c.LocalNvmeSsdBlockConfig), "ephemeral_storage_local_ssd_config": flattenEphemeralStorageLocalSsdConfig(c.EphemeralStorageLocalSsdConfig), "gcfs_config": flattenGcfsConfig(c.GcfsConfig), - "gvnic": flattenGvnic(c.Gvnic), + "gvnic": flattenGvnic(c.Gvnic), "reservation_affinity": flattenGKEReservationAffinity(c.ReservationAffinity), "service_account": c.ServiceAccount, "metadata": c.Metadata, @@ -1703,7 +1702,7 @@ func flattenNodeConfig(c *container.NodeConfig, v interface{}) []map[string]inte "effective_taints": flattenEffectiveTaints(c.Taints), "workload_metadata_config": flattenWorkloadMetadataConfig(c.WorkloadMetadataConfig), {{- if ne $.TargetVersionName "ga" }} - "sandbox_config": flattenSandboxConfig(c.SandboxConfig), + "sandbox_config": flattenSandboxConfig(c.SandboxConfig), "host_maintenance_policy": flattenHostMaintenancePolicy(c.HostMaintenancePolicy), {{- end }} "confidential_nodes": flattenConfidentialNodes(c.ConfidentialNodes), diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl index 2b81ea252b5f..c373757996c1 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl @@ -674,6 +674,11 @@ func TestAccContainerNodePool_withWindowsNodeConfig(t *testing.T) { // Perform an update. { Config: testAccContainerNodePool_withWindowsNodeConfig(cluster, np, "OS_VERSION_LTSC2022"), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_container_node_pool.with_windows_node_config", plancheck.ResourceActionUpdate), + }, + }, }, { ResourceName: "google_container_node_pool.with_windows_node_config", From 13df2aae50377d4bff3acb0fa4955fe22ee76fbb Mon Sep 17 00:00:00 2001 From: Scott Suarez Date: Wed, 14 May 2025 12:14:37 -0700 Subject: [PATCH 125/884] remove template that no longer needs to be (#13952) --- ...ol_test.go.tmpl => resource_netapp_storage_pool_test.go} | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) rename mmv1/third_party/terraform/services/netapp/{resource_netapp_storage_pool_test.go.tmpl => resource_netapp_storage_pool_test.go} (99%) diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl b/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go similarity index 99% rename from mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl rename to mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go index 043edc879c79..6b69d1f49a61 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go @@ -3,8 +3,9 @@ package netapp_test import ( "testing" "time" - "github.com/hashicorp/terraform-plugin-testing/terraform" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" "github.com/hashicorp/terraform-provider-google/google/acctest" ) @@ -94,7 +95,6 @@ resource "google_netapp_storage_pool" "test_pool" { `, context) } - func TestAccNetappStoragePool_autoTieredStoragePoolCreateExample_update(t *testing.T) { context := map[string]interface{}{ "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), @@ -389,4 +389,4 @@ data "google_compute_network" "default" { name = "%{network_name}" } `, context) -} \ No newline at end of file +} From c6dd37a5e89cd2f251b2ab47168cb089db34c76a Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Wed, 14 May 2025 13:17:01 -0700 Subject: [PATCH 126/884] Removed cloud identity group set_computed_name post_create (#13627) --- mmv1/products/cloudidentity/Group.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/mmv1/products/cloudidentity/Group.yaml b/mmv1/products/cloudidentity/Group.yaml index f3b8b25de0b2..ca7b90719f81 100644 --- a/mmv1/products/cloudidentity/Group.yaml +++ b/mmv1/products/cloudidentity/Group.yaml @@ -45,7 +45,6 @@ async: target_occurrences: 10 actions: ['create', 'update', 'delete'] custom_code: - post_create: 'templates/terraform/post_create/set_computed_name.tmpl' custom_import: 'templates/terraform/custom_import/cloud_identity_group_import.go.tmpl' exclude_sweeper: true examples: From 2f690ce11ef21f113dc802e3f6dfcb417715bf01 Mon Sep 17 00:00:00 2001 From: Scott Suarez Date: Wed, 14 May 2025 13:20:02 -0700 Subject: [PATCH 127/884] convert all code to use github/v68 (#13950) --- .ci/magician/cmd/scheduled_pr_reminders.go | 2 +- .../cmd/scheduled_pr_reminders_test.go | 2 +- .ci/magician/go.mod | 1 - .ci/magician/go.sum | 2 - tools/issue-labeler/go.mod | 2 +- tools/issue-labeler/go.sum | 4 +- tools/issue-labeler/labeler/backfill.go | 2 +- tools/issue-labeler/labeler/backfill_test.go | 50 +++++++++---------- tools/issue-labeler/labeler/github.go | 2 +- tools/issue-labeler/labeler/labels.go | 2 +- tools/issue-labeler/labeler/labels_test.go | 16 +++--- 11 files changed, 41 insertions(+), 44 deletions(-) diff --git a/.ci/magician/cmd/scheduled_pr_reminders.go b/.ci/magician/cmd/scheduled_pr_reminders.go index f75cd3ddb72c..097310244897 100644 --- a/.ci/magician/cmd/scheduled_pr_reminders.go +++ b/.ci/magician/cmd/scheduled_pr_reminders.go @@ -26,7 +26,7 @@ import ( membership "magician/github" - "github.com/google/go-github/v61/github" + "github.com/google/go-github/v68/github" "github.com/spf13/cobra" "golang.org/x/exp/slices" diff --git a/.ci/magician/cmd/scheduled_pr_reminders_test.go b/.ci/magician/cmd/scheduled_pr_reminders_test.go index 60719116cb39..8e1d1d146d3e 100644 --- a/.ci/magician/cmd/scheduled_pr_reminders_test.go +++ b/.ci/magician/cmd/scheduled_pr_reminders_test.go @@ -6,7 +6,7 @@ import ( membership "magician/github" - "github.com/google/go-github/v61/github" + "github.com/google/go-github/v68/github" "github.com/stretchr/testify/assert" ) diff --git a/.ci/magician/go.mod b/.ci/magician/go.mod index 4a19d6ff1ba7..c6e029ad9c04 100644 --- a/.ci/magician/go.mod +++ b/.ci/magician/go.mod @@ -16,7 +16,6 @@ require ( require ( cloud.google.com/go/storage v1.50.0 github.com/google/go-cmp v0.6.0 - github.com/google/go-github/v61 v61.0.0 github.com/google/go-github/v68 v68.0.0 github.com/otiai10/copy v1.12.0 github.com/stretchr/testify v1.10.0 diff --git a/.ci/magician/go.sum b/.ci/magician/go.sum index 733652cf458d..ffc9c5757f12 100644 --- a/.ci/magician/go.sum +++ b/.ci/magician/go.sum @@ -85,8 +85,6 @@ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/go-github/v61 v61.0.0 h1:VwQCBwhyE9JclCI+22/7mLB1PuU9eowCXKY5pNlu1go= -github.com/google/go-github/v61 v61.0.0/go.mod h1:0WR+KmsWX75G2EbpyGsGmradjo3IiciuI4BmdVCobQY= github.com/google/go-github/v68 v68.0.0 h1:ZW57zeNZiXTdQ16qrDiZ0k6XucrxZ2CGmoTvcCyQG6s= github.com/google/go-github/v68 v68.0.0/go.mod h1:K9HAUBovM2sLwM408A18h+wd9vqdLOEqTUCbnRIcx68= github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= diff --git a/tools/issue-labeler/go.mod b/tools/issue-labeler/go.mod index 98c26dfa69fe..c4fc7833d382 100644 --- a/tools/issue-labeler/go.mod +++ b/tools/issue-labeler/go.mod @@ -4,7 +4,7 @@ go 1.23 require ( github.com/golang/glog v1.1.1 - github.com/google/go-github/v61 v61.0.0 + github.com/google/go-github/v68 v68.0.0 github.com/spf13/cobra v1.8.1 golang.org/x/exp v0.0.0-20230810033253-352e893a4cad golang.org/x/oauth2 v0.24.0 diff --git a/tools/issue-labeler/go.sum b/tools/issue-labeler/go.sum index 6bf2d02dc507..8175a6852808 100644 --- a/tools/issue-labeler/go.sum +++ b/tools/issue-labeler/go.sum @@ -4,8 +4,8 @@ github.com/golang/glog v1.1.1/go.mod h1:zR+okUeTbrL6EL3xHUDxZuEtGv04p5shwip1+mL/ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/go-github/v61 v61.0.0 h1:VwQCBwhyE9JclCI+22/7mLB1PuU9eowCXKY5pNlu1go= -github.com/google/go-github/v61 v61.0.0/go.mod h1:0WR+KmsWX75G2EbpyGsGmradjo3IiciuI4BmdVCobQY= +github.com/google/go-github/v68 v68.0.0 h1:ZW57zeNZiXTdQ16qrDiZ0k6XucrxZ2CGmoTvcCyQG6s= +github.com/google/go-github/v68 v68.0.0/go.mod h1:K9HAUBovM2sLwM408A18h+wd9vqdLOEqTUCbnRIcx68= github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= diff --git a/tools/issue-labeler/labeler/backfill.go b/tools/issue-labeler/labeler/backfill.go index f97499299d1f..f7689f0316b5 100644 --- a/tools/issue-labeler/labeler/backfill.go +++ b/tools/issue-labeler/labeler/backfill.go @@ -9,7 +9,7 @@ import ( "time" "github.com/golang/glog" - "github.com/google/go-github/v61/github" + "github.com/google/go-github/v68/github" ) type Label struct { diff --git a/tools/issue-labeler/labeler/backfill_test.go b/tools/issue-labeler/labeler/backfill_test.go index 294a405ef915..c06e0f1279a9 100644 --- a/tools/issue-labeler/labeler/backfill_test.go +++ b/tools/issue-labeler/labeler/backfill_test.go @@ -7,11 +7,11 @@ import ( "strings" "testing" - "github.com/google/go-github/v61/github" + "github.com/google/go-github/v68/github" ) func testIssueBodyWithResources(resources []string) *string { - return github.String(fmt.Sprintf(` + return github.Ptr(fmt.Sprintf(` ### New or Affected Resource(s): %s @@ -54,7 +54,7 @@ func TestComputeIssueUpdates(t *testing.T) { description: "gracefully handle a nil issue body", issues: []*github.Issue{ { - Number: github.Int(1), + Number: github.Ptr(1), }, }, regexpLabels: defaultRegexpLabels, @@ -75,8 +75,8 @@ func TestComputeIssueUpdates(t *testing.T) { name: "no listed resources", issues: []*github.Issue{ { - Number: github.Int(1), - Body: github.String("Body with unusual structure"), + Number: github.Ptr(1), + Body: github.Ptr("Body with unusual structure"), }, }, regexpLabels: defaultRegexpLabels, @@ -87,14 +87,14 @@ func TestComputeIssueUpdates(t *testing.T) { description: "issues with service/terraform shouldn't get new labels", issues: []*github.Issue{ { - Number: github.Int(1), + Number: github.Ptr(1), Body: testIssueBodyWithResources([]string{"google_service1_resource1"}), - Labels: []*github.Label{{Name: github.String("service/terraform")}}, + Labels: []*github.Label{{Name: github.Ptr("service/terraform")}}, }, { - Number: github.Int(2), + Number: github.Ptr(2), Body: testIssueBodyWithResources([]string{"google_service1_resource1"}), - Labels: []*github.Label{{Name: github.String("forward/exempt")}}, + Labels: []*github.Label{{Name: github.Ptr("forward/exempt")}}, }, }, regexpLabels: defaultRegexpLabels, @@ -105,11 +105,11 @@ func TestComputeIssueUpdates(t *testing.T) { description: "issues with affected resources should normally get new labels added", issues: []*github.Issue{ { - Number: github.Int(1), + Number: github.Ptr(1), Body: testIssueBodyWithResources([]string{"google_service1_resource1"}), }, { - Number: github.Int(2), + Number: github.Ptr(2), Body: testIssueBodyWithResources([]string{"google_service2_resource1"}), }, }, @@ -130,14 +130,14 @@ func TestComputeIssueUpdates(t *testing.T) { description: "don't update issues if all expected service labels are already present", issues: []*github.Issue{ { - Number: github.Int(1), + Number: github.Ptr(1), Body: testIssueBodyWithResources([]string{"google_service1_resource1"}), - Labels: []*github.Label{{Name: github.String("service/service1")}}, + Labels: []*github.Label{{Name: github.Ptr("service/service1")}}, }, { - Number: github.Int(2), + Number: github.Ptr(2), Body: testIssueBodyWithResources([]string{"google_service2_resource1"}), - Labels: []*github.Label{{Name: github.String("service/service2-subteam1")}}, + Labels: []*github.Label{{Name: github.Ptr("service/service2-subteam1")}}, }, }, regexpLabels: defaultRegexpLabels, @@ -148,14 +148,14 @@ func TestComputeIssueUpdates(t *testing.T) { description: "add missing service labels", issues: []*github.Issue{ { - Number: github.Int(1), + Number: github.Ptr(1), Body: testIssueBodyWithResources([]string{"google_service1_resource1"}), - Labels: []*github.Label{{Name: github.String("service/service2-subteam1")}}, + Labels: []*github.Label{{Name: github.Ptr("service/service2-subteam1")}}, }, { - Number: github.Int(2), + Number: github.Ptr(2), Body: testIssueBodyWithResources([]string{"google_service2_resource2"}), - Labels: []*github.Label{{Name: github.String("service/service1")}}, + Labels: []*github.Label{{Name: github.Ptr("service/service1")}}, }, }, regexpLabels: defaultRegexpLabels, @@ -177,9 +177,9 @@ func TestComputeIssueUpdates(t *testing.T) { description: "don't add missing service labels if already linked", issues: []*github.Issue{ { - Number: github.Int(1), + Number: github.Ptr(1), Body: testIssueBodyWithResources([]string{"google_service1_resource1"}), - Labels: []*github.Label{{Name: github.String("service/service2-subteam1")}, {Name: github.String("forward/linked")}}, + Labels: []*github.Label{{Name: github.Ptr("service/service2-subteam1")}, {Name: github.Ptr("forward/linked")}}, }, }, regexpLabels: defaultRegexpLabels, @@ -190,14 +190,14 @@ func TestComputeIssueUpdates(t *testing.T) { description: "add service labels if missed but don't add forward/review label for test failure ticket", issues: []*github.Issue{ { - Number: github.Int(1), + Number: github.Ptr(1), Body: testIssueBodyWithResources([]string{"google_service1_resource1"}), - Labels: []*github.Label{{Name: github.String("test-failure")}, {Name: github.String("test-failure-100")}}, + Labels: []*github.Label{{Name: github.Ptr("test-failure")}, {Name: github.Ptr("test-failure-100")}}, }, { - Number: github.Int(2), + Number: github.Ptr(2), Body: testIssueBodyWithResources([]string{"google_service2_resource1"}), - Labels: []*github.Label{{Name: github.String("test-failure")}, {Name: github.String("test-failure-50")}, {Name: github.String("service/service2-subteam1")}}, + Labels: []*github.Label{{Name: github.Ptr("test-failure")}, {Name: github.Ptr("test-failure-50")}, {Name: github.Ptr("service/service2-subteam1")}}, }, }, regexpLabels: defaultRegexpLabels, diff --git a/tools/issue-labeler/labeler/github.go b/tools/issue-labeler/labeler/github.go index a71beb90ea8d..62d6b6bf6cf4 100644 --- a/tools/issue-labeler/labeler/github.go +++ b/tools/issue-labeler/labeler/github.go @@ -6,7 +6,7 @@ import ( "os" "strings" - "github.com/google/go-github/v61/github" + "github.com/google/go-github/v68/github" "golang.org/x/oauth2" ) diff --git a/tools/issue-labeler/labeler/labels.go b/tools/issue-labeler/labeler/labels.go index 9aa6ce98b4d0..3ac523afdb99 100644 --- a/tools/issue-labeler/labeler/labels.go +++ b/tools/issue-labeler/labeler/labels.go @@ -10,7 +10,7 @@ import ( _ "embed" "github.com/golang/glog" - "github.com/google/go-github/v61/github" + "github.com/google/go-github/v68/github" "gopkg.in/yaml.v2" ) diff --git a/tools/issue-labeler/labeler/labels_test.go b/tools/issue-labeler/labeler/labels_test.go index 07658c9260b0..278f1cacab5b 100644 --- a/tools/issue-labeler/labeler/labels_test.go +++ b/tools/issue-labeler/labeler/labels_test.go @@ -5,7 +5,7 @@ import ( "regexp" "testing" - "github.com/google/go-github/v61/github" + "github.com/google/go-github/v68/github" "golang.org/x/exp/slices" ) @@ -225,9 +225,9 @@ func TestComputeLabelChanges(t *testing.T) { { name: "existing labels with correct color", existingLabels: []*github.Label{ - {Name: github.String("xyz"), Color: github.String("FF0000")}, - {Name: github.String("bug"), Color: github.String("FF0000")}, - {Name: github.String("enhancement"), Color: github.String("FF0000")}, + {Name: github.Ptr("xyz"), Color: github.Ptr("FF0000")}, + {Name: github.Ptr("bug"), Color: github.Ptr("FF0000")}, + {Name: github.Ptr("enhancement"), Color: github.Ptr("FF0000")}, }, desiredLabels: []string{"bug", "enhancement"}, desiredColor: "FF0000", @@ -239,8 +239,8 @@ func TestComputeLabelChanges(t *testing.T) { { name: "existing labels with wrong color", existingLabels: []*github.Label{ - {Name: github.String("bug"), Color: github.String("00FF00")}, - {Name: github.String("enhancement"), Color: github.String("00FF00")}, + {Name: github.Ptr("bug"), Color: github.Ptr("00FF00")}, + {Name: github.Ptr("enhancement"), Color: github.Ptr("00FF00")}, }, desiredLabels: []string{"bug", "enhancement"}, desiredColor: "FF0000", @@ -252,7 +252,7 @@ func TestComputeLabelChanges(t *testing.T) { { name: "mixed existing and new labels", existingLabels: []*github.Label{ - {Name: github.String("bug"), Color: github.String("FF0000")}, + {Name: github.Ptr("bug"), Color: github.Ptr("FF0000")}, }, desiredLabels: []string{"bug", "enhancement"}, desiredColor: "FF0000", @@ -264,7 +264,7 @@ func TestComputeLabelChanges(t *testing.T) { { name: "case insensitive color comparison", existingLabels: []*github.Label{ - {Name: github.String("bug"), Color: github.String("ff0000")}, + {Name: github.Ptr("bug"), Color: github.Ptr("ff0000")}, }, desiredLabels: []string{"bug"}, desiredColor: "FF0000", From 8d29bc8067c153f077f9536b268d0df670ee85d5 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Wed, 14 May 2025 13:23:50 -0700 Subject: [PATCH 128/884] Removed cloud identity group membership set_computed_name post_create (#13628) --- mmv1/products/cloudidentity/GroupMembership.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/mmv1/products/cloudidentity/GroupMembership.yaml b/mmv1/products/cloudidentity/GroupMembership.yaml index 9b0376a33642..835595d2b12a 100644 --- a/mmv1/products/cloudidentity/GroupMembership.yaml +++ b/mmv1/products/cloudidentity/GroupMembership.yaml @@ -37,7 +37,6 @@ timeouts: update_minutes: 20 delete_minutes: 20 custom_code: - post_create: 'templates/terraform/post_create/set_computed_name.tmpl' custom_update: 'templates/terraform/custom_update/cloud_identity_group_membership.go.tmpl' post_import: 'templates/terraform/post_import/cloud_identity_group_membership.go.tmpl' exclude_sweeper: true From 017ad8ee588c0af24c0f83e046debba8d149fb3b Mon Sep 17 00:00:00 2001 From: stevenyang72 Date: Wed, 14 May 2025 13:49:08 -0700 Subject: [PATCH 129/884] Add mode, inline cert iss & trust config to workload identity pool (#13814) --- .../iambeta/WorkloadIdentityPool.yaml | 166 +++++++++++- .../iam_workload_identity_pool_full.tf.tmpl | 6 - ...ity_pool_full_federation_only_mode.tf.tmpl | 9 + ...entity_pool_full_trust_domain_mode.tf.tmpl | 38 +++ ...esource_iam_workload_identity_pool_test.go | 99 -------- ...ce_iam_workload_identity_pool_test.go.tmpl | 239 ++++++++++++++++++ .../iambeta/test-fixtures/trust_anchor_1.pem | 3 + .../iambeta/test-fixtures/trust_anchor_2.pem | 3 + .../iambeta/test-fixtures/trust_anchor_3.pem | 3 + .../iambeta/test-fixtures/trust_anchor_4.pem | 3 + 10 files changed, 459 insertions(+), 110 deletions(-) delete mode 100644 mmv1/templates/terraform/examples/iam_workload_identity_pool_full.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/iam_workload_identity_pool_full_federation_only_mode.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/iam_workload_identity_pool_full_trust_domain_mode.tf.tmpl delete mode 100644 mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_test.go create mode 100644 mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_test.go.tmpl create mode 100644 mmv1/third_party/terraform/services/iambeta/test-fixtures/trust_anchor_1.pem create mode 100644 mmv1/third_party/terraform/services/iambeta/test-fixtures/trust_anchor_2.pem create mode 100644 mmv1/third_party/terraform/services/iambeta/test-fixtures/trust_anchor_3.pem create mode 100644 mmv1/third_party/terraform/services/iambeta/test-fixtures/trust_anchor_4.pem diff --git a/mmv1/products/iambeta/WorkloadIdentityPool.yaml b/mmv1/products/iambeta/WorkloadIdentityPool.yaml index 4ab9768f124d..829f398fac0b 100644 --- a/mmv1/products/iambeta/WorkloadIdentityPool.yaml +++ b/mmv1/products/iambeta/WorkloadIdentityPool.yaml @@ -19,6 +19,8 @@ description: | references: guides: 'Managing workload identity pools': 'https://cloud.google.com/iam/docs/manage-workload-identity-pools-providers#pools' + 'Configure managed workload identity authentication for Compute Engine': 'https://cloud.google.com/iam/docs/create-managed-workload-identities' + 'Configure managed workload identity authentication for GKE': 'https://cloud.google.com/iam/docs/create-managed-workload-identities-gke' api: 'https://cloud.google.com/iam/docs/reference/rest/v1/projects.locations.workloadIdentityPools' docs: base_url: 'projects/{{project}}/locations/global/workloadIdentityPools' @@ -40,6 +42,16 @@ async: base_url: '{{op_id}}' result: resource_inside_response: false +iam_policy: + parent_resource_attribute: 'workload_identity_pool_id' + method_name_separator: ':' + fetch_iam_policy_verb: 'POST' + import_format: + - 'projects/{{project}}/locations/global/workloadIdentityPools/{{workload_identity_pool_id}}' + - '{{workload_identity_pool_id}}' + allowed_iam_role: 'roles/iam.workloadIdentityPoolViewer' + iam_conditions_request_type: 'REQUEST_BODY' + min_version: beta custom_code: constants: 'templates/terraform/constants/iam_workload_identity_pool.go.tmpl' decoder: 'templates/terraform/decoders/treat_deleted_state_as_gone.go.tmpl' @@ -47,12 +59,26 @@ custom_code: examples: - name: 'iam_workload_identity_pool_basic' primary_resource_id: 'example' + primary_resource_name: + 'fmt.Sprintf("tf-test-example-pool%s", context["random_suffix"])' vars: workload_identity_pool_id: 'example-pool' - - name: 'iam_workload_identity_pool_full' + - name: 'iam_workload_identity_pool_full_federation_only_mode' primary_resource_id: 'example' + primary_resource_name: + 'fmt.Sprintf("tf-test-example-pool%s", context["random_suffix"])' vars: workload_identity_pool_id: 'example-pool' + min_version: beta + external_providers: + - 'random' + - name: 'iam_workload_identity_pool_full_trust_domain_mode' + primary_resource_id: 'example' + primary_resource_name: + 'fmt.Sprintf("tf-test-example-pool%s", context["random_suffix"])' + vars: + workload_identity_pool_id: 'example-pool' + min_version: beta parameters: properties: - name: 'workloadIdentityPoolId' @@ -70,11 +96,11 @@ properties: type: Enum description: | The state of the pool. - * STATE_UNSPECIFIED: State unspecified. - * ACTIVE: The pool is active, and may be used in Google Cloud policies. - * DELETED: The pool is soft-deleted. Soft-deleted pools are permanently deleted after + * `STATE_UNSPECIFIED`: State unspecified. + * `ACTIVE`: The pool is active, and may be used in Google Cloud policies. + * `DELETED`: The pool is soft-deleted. Soft-deleted pools are permanently deleted after approximately 30 days. You can restore a soft-deleted pool using - UndeleteWorkloadIdentityPool. You cannot reuse the ID of a soft-deleted pool until it is + `UndeleteWorkloadIdentityPool`. You cannot reuse the ID of a soft-deleted pool until it is permanently deleted. While a pool is deleted, you cannot use it to exchange tokens, or use existing tokens to access resources. If the pool is undeleted, existing tokens grant access again. @@ -101,3 +127,133 @@ properties: Whether the pool is disabled. You cannot use a disabled pool to exchange tokens, or use existing tokens to access resources. If the pool is re-enabled, existing tokens grant access again. + - name: mode + type: Enum + description: | + The mode for the pool is operating in. Pools with an unspecified mode will operate as if they + are in `FEDERATION_ONLY` mode. + + + ~> **Note** This field cannot be changed after the Workload Identity Pool is created. While + `terraform plan` may show an update if you change this field's value, `terraform apply` + **will fail with an API error** (such as `Error 400: Attempted to update an immutable field.`). + To specify a different `mode`, please create a new Workload Identity Pool resource. + + * `FEDERATION_ONLY`: Pools can only be used for federating external workload identities into + Google Cloud. Unless otherwise noted, no structure or format constraints are applied to + workload identities in a `FEDERATION_ONLY` mode pool, and you may not create any resources + within the pool besides providers. + * `TRUST_DOMAIN`: Pools can be used to assign identities to Google Cloud workloads. All + identities within a `TRUST_DOMAIN` mode pool must consist of a single namespace and individual + workload identifier. The subject identifier for all identities must conform to the following + format: `ns//sa/`. + `google_iam_workload_identity_pool_provider`s cannot be created within `TRUST_DOMAIN` + mode pools. + min_version: beta + enum_values: + - 'FEDERATION_ONLY' + - 'TRUST_DOMAIN' + - name: 'inlineCertificateIssuanceConfig' + type: NestedObject + description: | + Represents configuration for generating mutual TLS (mTLS) certificates for the identities + within this pool. Defines the Certificate Authority (CA) pool resources and configurations + required for issuance and rotation of mTLS workload certificates. + min_version: beta + properties: + - name: 'caPools' + type: KeyValuePairs + description: | + A required mapping of a cloud region to the CA pool resource located in that region used + for certificate issuance, adhering to these constraints: + + * **Key format:** A supported cloud region name equivalent to the location identifier in + the corresponding map entry's value. + * **Value format:** A valid CA pool resource path format like: + `projects/{project}/locations/{location}/caPools/{ca_pool}` + * **Region Matching:** Workloads are ONLY issued certificates from CA pools within the + same region. Also the CA pool region (in value) must match the workload's region (key). + required: true + - name: 'lifetime' + type: String + description: | + Lifetime of the workload certificates issued by the CA pool in seconds. Must be between + `86400s` (24 hours) to `2592000s` (30 days), ends in the suffix "`s`" (indicating seconds) + and is preceded by the number of seconds. If unspecified, this will be defaulted to + `86400s` (24 hours). + default_from_api: true + - name: 'rotationWindowPercentage' + type: Integer + description: | + Rotation window percentage indicating when certificate rotation should be initiated based + on remaining lifetime. Must be between `50` - `80`. If unspecified, this will be defaulted + to `50`. + default_from_api: true + - name: 'keyAlgorithm' + type: Enum + description: | + Key algorithm to use when generating the key pair. This key pair will be used to create + the certificate. If unspecified, this will default to `ECDSA_P256`. + + * `RSA_2048`: Specifies RSA with a 2048-bit modulus. + * `RSA_3072`: Specifies RSA with a 3072-bit modulus. + * `RSA_4096`: Specifies RSA with a 4096-bit modulus. + * `ECDSA_P256`: Specifies ECDSA with curve P256. + * `ECDSA_P384`: Specifies ECDSA with curve P384. + default_from_api: true + enum_values: + - 'RSA_2048' + - 'RSA_3072' + - 'RSA_4096' + - 'ECDSA_P256' + - 'ECDSA_P384' + - name: 'inlineTrustConfig' + type: NestedObject + description: | + Represents config to add additional trusted trust domains. Defines configuration for extending + trust to additional trust domains. By establishing trust with another domain, the current + domain will recognize and accept certificates issued by entities within the trusted domains. + Note that a trust domain automatically trusts itself, eliminating the need for explicit + configuration. + min_version: beta + properties: + - name: 'additionalTrustBundles' + type: Map + description: | + Maps specific trust domains (e.g., "example.com") to their corresponding `TrustStore` + objects, which contain the trusted root certificates for that domain. There can be a + maximum of `10` trust domain entries in this map. + + Note that a trust domain automatically trusts itself and don't need to be specified here. + If however, this `WorkloadIdentityPool`'s trust domain contains any trust anchors in the + `additional_trust_bundles` map, those trust anchors will be *appended to* the Trust Bundle + automatically derived from your `InlineCertificateIssuanceConfig`'s `ca_pools`. + key_name: trust_domain + key_description: | + The trusted trust domains (e.g., "example.com") to be extended trust to additional trust + domains to. + value_type: + name: trustStore + type: NestedObject + description: | + Trust store that contains trust anchors and optional intermediate CAs used in PKI to + build trust chain and verify client's identity. + properties: + - name: 'trustAnchors' + type: Array + description: | + List of Trust Anchors to be used while performing validation against a given + `TrustStore`. The incoming end entity's certificate must be chained up to one of the + trust anchors here. + required: true + item_type: + type: NestedObject + description: | + Represents a root of trust. + properties: + - name: 'pemCertificate' + type: String + description: | + PEM certificate of the PKI used for validation. Must only contain one ca + certificate(either root or intermediate cert). + required: true diff --git a/mmv1/templates/terraform/examples/iam_workload_identity_pool_full.tf.tmpl b/mmv1/templates/terraform/examples/iam_workload_identity_pool_full.tf.tmpl deleted file mode 100644 index a46cc332b260..000000000000 --- a/mmv1/templates/terraform/examples/iam_workload_identity_pool_full.tf.tmpl +++ /dev/null @@ -1,6 +0,0 @@ -resource "google_iam_workload_identity_pool" "{{$.PrimaryResourceId}}" { - workload_identity_pool_id = "{{index $.Vars "workload_identity_pool_id"}}" - display_name = "Name of pool" - description = "Identity pool for automated test" - disabled = true -} diff --git a/mmv1/templates/terraform/examples/iam_workload_identity_pool_full_federation_only_mode.tf.tmpl b/mmv1/templates/terraform/examples/iam_workload_identity_pool_full_federation_only_mode.tf.tmpl new file mode 100644 index 000000000000..bccc8eec0b14 --- /dev/null +++ b/mmv1/templates/terraform/examples/iam_workload_identity_pool_full_federation_only_mode.tf.tmpl @@ -0,0 +1,9 @@ +resource "google_iam_workload_identity_pool" "{{$.PrimaryResourceId}}" { + provider = google-beta + + workload_identity_pool_id = "{{index $.Vars "workload_identity_pool_id"}}" + display_name = "Name of the pool" + description = "Identity pool operates in FEDERATION_ONLY mode" + disabled = true + mode = "FEDERATION_ONLY" +} diff --git a/mmv1/templates/terraform/examples/iam_workload_identity_pool_full_trust_domain_mode.tf.tmpl b/mmv1/templates/terraform/examples/iam_workload_identity_pool_full_trust_domain_mode.tf.tmpl new file mode 100644 index 000000000000..8480d68712da --- /dev/null +++ b/mmv1/templates/terraform/examples/iam_workload_identity_pool_full_trust_domain_mode.tf.tmpl @@ -0,0 +1,38 @@ +resource "google_iam_workload_identity_pool" "{{$.PrimaryResourceId}}" { + provider = google-beta + + workload_identity_pool_id = "{{index $.Vars "workload_identity_pool_id"}}" + display_name = "Name of the pool" + description = "Identity pool operates in TRUST_DOMAIN mode" + disabled = true + mode = "TRUST_DOMAIN" + inline_certificate_issuance_config { + ca_pools = { + "us-central1" : "projects/project-bar/locations/us-central1/caPools/ca-pool-bar" + "asia-east2" : "projects/project-foo/locations/asia-east2/caPools/ca-pool-foo" + } + lifetime = "86400s" + rotation_window_percentage = 50 + key_algorithm = "ECDSA_P256" + } + inline_trust_config { + additional_trust_bundles { + trust_domain = "example.com" + trust_anchors { + pem_certificate = file("test-fixtures/trust_anchor_1.pem") + } + trust_anchors { + pem_certificate = file("test-fixtures/trust_anchor_2.pem") + } + } + additional_trust_bundles { + trust_domain = "example.net" + trust_anchors { + pem_certificate = file("test-fixtures/trust_anchor_3.pem") + } + trust_anchors { + pem_certificate = file("test-fixtures/trust_anchor_4.pem") + } + } + } +} diff --git a/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_test.go b/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_test.go deleted file mode 100644 index 52113b646693..000000000000 --- a/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_test.go +++ /dev/null @@ -1,99 +0,0 @@ -package iambeta_test - -import ( - "fmt" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" -) - -func TestAccIAMBetaWorkloadIdentityPool_full(t *testing.T) { - t.Parallel() - - randomSuffix := acctest.RandString(t, 10) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckIAMBetaWorkloadIdentityPoolDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccIAMBetaWorkloadIdentityPool_full(randomSuffix), - }, - { - ResourceName: "google_iam_workload_identity_pool.my_pool", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccIAMBetaWorkloadIdentityPool_update(randomSuffix), - }, - { - ResourceName: "google_iam_workload_identity_pool.my_pool", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func TestAccIAMBetaWorkloadIdentityPool_minimal(t *testing.T) { - t.Parallel() - - randomSuffix := acctest.RandString(t, 10) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckIAMBetaWorkloadIdentityPoolDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccIAMBetaWorkloadIdentityPool_minimal(randomSuffix), - }, - { - ResourceName: "google_iam_workload_identity_pool.my_pool", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccIAMBetaWorkloadIdentityPool_update(randomSuffix), - }, - { - ResourceName: "google_iam_workload_identity_pool.my_pool", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func testAccIAMBetaWorkloadIdentityPool_full(suffix string) string { - return fmt.Sprintf(` -resource "google_iam_workload_identity_pool" "my_pool" { - workload_identity_pool_id = "my-pool-%s" - display_name = "Name of pool" - description = "Identity pool for automated test" - disabled = true -} -`, suffix) -} - -func testAccIAMBetaWorkloadIdentityPool_minimal(suffix string) string { - return fmt.Sprintf(` -resource "google_iam_workload_identity_pool" "my_pool" { - workload_identity_pool_id = "my-pool-%s" -} -`, suffix) -} - -func testAccIAMBetaWorkloadIdentityPool_update(suffix string) string { - return fmt.Sprintf(` -resource "google_iam_workload_identity_pool" "my_pool" { - workload_identity_pool_id = "my-pool-%s" - display_name = "Updated name of pool" - description = "Updated description" - disabled = false -} -`, suffix) -} diff --git a/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_test.go.tmpl b/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_test.go.tmpl new file mode 100644 index 000000000000..b1faf0591677 --- /dev/null +++ b/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_test.go.tmpl @@ -0,0 +1,239 @@ +package iambeta_test + +import ( + "fmt" + {{if ne $.TargetVersionName "ga" -}} + "github.com/hashicorp/terraform-plugin-testing/plancheck" + {{- end }} + "github.com/hashicorp/terraform-provider-google/google/acctest" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" +) + +func TestAccIAMBetaWorkloadIdentityPool_full(t *testing.T) { + t.Parallel() + + randomSuffix := acctest.RandString(t, 10) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckIAMBetaWorkloadIdentityPoolDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccIAMBetaWorkloadIdentityPool_full(randomSuffix), + }, + { + ResourceName: "google_iam_workload_identity_pool.my_pool", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccIAMBetaWorkloadIdentityPool_update(randomSuffix), + }, + { + ResourceName: "google_iam_workload_identity_pool.my_pool", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccIAMBetaWorkloadIdentityPool_minimal(t *testing.T) { + t.Parallel() + + randomSuffix := acctest.RandString(t, 10) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckIAMBetaWorkloadIdentityPoolDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccIAMBetaWorkloadIdentityPool_minimal(randomSuffix), + }, + { + ResourceName: "google_iam_workload_identity_pool.my_pool", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccIAMBetaWorkloadIdentityPool_update(randomSuffix), + }, + { + ResourceName: "google_iam_workload_identity_pool.my_pool", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +{{if ne $.TargetVersionName "ga" -}} +func TestAccIAMBetaWorkloadIdentityPool_beta_update(t *testing.T) { + t.Parallel() + + randomSuffix := acctest.RandString(t, 10) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckIAMBetaWorkloadIdentityPoolDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccIAMBetaWorkloadIdentityPool_beta_full(randomSuffix), + }, + { + ResourceName: "google_iam_workload_identity_pool.my_pool", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccIAMBetaWorkloadIdentityPool_beta_update(randomSuffix), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_iam_workload_identity_pool.my_pool", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_iam_workload_identity_pool.my_pool", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccIAMBetaWorkloadIdentityPool_beta_minimum(randomSuffix), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_iam_workload_identity_pool.my_pool", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_iam_workload_identity_pool.my_pool", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} +{{- end }} + +func testAccIAMBetaWorkloadIdentityPool_full(suffix string) string { + return fmt.Sprintf(` +resource "google_iam_workload_identity_pool" "my_pool" { + workload_identity_pool_id = "my-pool-%s" + display_name = "Name of pool" + description = "Identity pool for automated test" + disabled = true +} +`, suffix) +} + +func testAccIAMBetaWorkloadIdentityPool_minimal(suffix string) string { + return fmt.Sprintf(` +resource "google_iam_workload_identity_pool" "my_pool" { + workload_identity_pool_id = "my-pool-%s" +} +`, suffix) +} + +func testAccIAMBetaWorkloadIdentityPool_update(suffix string) string { + return fmt.Sprintf(` +resource "google_iam_workload_identity_pool" "my_pool" { + workload_identity_pool_id = "my-pool-%s" + display_name = "Updated name of pool" + description = "Updated description" + disabled = false +} +`, suffix) +} + +{{if ne $.TargetVersionName "ga" -}} +func testAccIAMBetaWorkloadIdentityPool_beta_full(suffix string) string { + return fmt.Sprintf(` +resource "google_iam_workload_identity_pool" "my_pool" { + provider = google-beta + + workload_identity_pool_id = "my-pool-%s" + display_name = "Name of the pool" + description = "Identity pool operates in TRUST_DOMAIN mode" + disabled = true + mode = "TRUST_DOMAIN" + inline_certificate_issuance_config { + ca_pools = { + "us-central1" : "projects/project-bar/locations/us-central1/caPools/ca-pool-bar" + "asia-east2" : "projects/project-foo/locations/asia-east2/caPools/ca-pool-foo" + } + lifetime = "86400s" + rotation_window_percentage = 50 + key_algorithm = "ECDSA_P256" + } + inline_trust_config { + additional_trust_bundles { + trust_domain = "ca-pool-foo.global.project-foo.workload.id.goog" + trust_anchors { + pem_certificate = file("test-fixtures/trust_anchor_1.pem") + } + trust_anchors { + pem_certificate = file("test-fixtures/trust_anchor_2.pem") + } + } + additional_trust_bundles { + trust_domain = "ca-pool-bar.global.project-bar.workload.id.goog" + trust_anchors { + pem_certificate = file("test-fixtures/trust_anchor_3.pem") + } + trust_anchors { + pem_certificate = file("test-fixtures/trust_anchor_4.pem") + } + } + } +} +`, suffix) +} + +func testAccIAMBetaWorkloadIdentityPool_beta_update(suffix string) string { + return fmt.Sprintf(` +resource "google_iam_workload_identity_pool" "my_pool" { + provider = google-beta + + workload_identity_pool_id = "my-pool-%s" + display_name = "Updated name of the pool" + description = "Updated identity pool operates in TRUST_DOMAIN mode" + disabled = false + mode = "TRUST_DOMAIN" + inline_certificate_issuance_config { + ca_pools = { + "us-central2" : "projects/project-bar/locations/us-central2/caPools/ca-pool-bar" + "asia-east1" : "projects/project-foo/locations/asia-east1/caPools/ca-pool-foo" + } + lifetime = "36000s" + rotation_window_percentage = 75 + key_algorithm = "RSA_4096" + } + inline_trust_config { + additional_trust_bundles { + trust_domain = "ca-pool-baz.global.project-baz.workload.id.goog" + trust_anchors { + pem_certificate = file("test-fixtures/trust_anchor_updated.pem") + } + } + } +} +`, suffix) +} + +func testAccIAMBetaWorkloadIdentityPool_beta_minimum(suffix string) string { + return fmt.Sprintf(` +resource "google_iam_workload_identity_pool" "my_pool" { + provider = google-beta + + workload_identity_pool_id = "my-pool-%s" + mode = "TRUST_DOMAIN" +} +`, suffix) +} +{{- end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/iambeta/test-fixtures/trust_anchor_1.pem b/mmv1/third_party/terraform/services/iambeta/test-fixtures/trust_anchor_1.pem new file mode 100644 index 000000000000..0c7e92db772c --- /dev/null +++ b/mmv1/third_party/terraform/services/iambeta/test-fixtures/trust_anchor_1.pem @@ -0,0 +1,3 @@ +-----BEGIN CERTIFICATE----- +MIID9jCCAt6gAwIBAgIJALDL1dNMR+H8MA0GCSqGSIb3DQEBCwUAMIGeMQswCQYDVQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTESMBAGA1UEBwwJU3Vubnl2YWxlMRkwFwYDVQQKDBBHb29nbGUgVGVzdCBDb3JwMSAwHgYDVQQLDBdQcm9kdWN0aW9uIFdlYiBTZXJ2aWNlczEpMCcGA1UEAwwgdXMtd2VzdDIucHJvZC53ZWJhcHAuZXhhbXBsZS5jb20wHhcNMjUwNDI5MjMxMTAyWhcNMzUwNDI3MjMxMTAyWjCBnjELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExEjAQBgNVBAcMCVN1bm55dmFsZTEZMBcGA1UECgwQR29vZ2xlIFRlc3QgQ29ycDEgMB4GA1UECwwXUHJvZHVjdGlvbiBXZWIgU2VydmljZXMxKTAnBgNVBAMMIHVzLXdlc3QyLnByb2Qud2ViYXBwLmV4YW1wbGUuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAx1YFuo6mMlv+2e9r0LROY3bMwKUyUpaD1Jlf6fVFcTDXTHFYAU8uqjA6bxZDDaIfXIuzUbvXfnVsX7U5yWjDfYf0oRV9QDv/TbagdzBNvIIjIs1kxskO6wBrTmJNkWP3rlnlQhEnTai5X/uARZShajTbKU9yfQFPQj9aG0pptuqwWZQ7DGCpybfuFBQ296Zznul1Sunu090SE7InTsoJtthhUdPZ4krk6EH7bV/59+vjJjOF2rsAFEf9CmN4pLdK0+c003s6fZc/pkja40jwyKgRtRzh9SrDPgnF3Qy/hDGTG+BBGkvQRyBJ4EqtbuE05IUg1Ek58QiF3ET4nB9lqQIDAQABozUwMzAPBgNVHRMBAf8EBTADAQH/MAsGA1UdDwQEAwICBDATBgNVHSUEDDAKBggrBgEFBQcDAjANBgkqhkiG9w0BAQsFAAOCAQEAFv+inLwh8s5XOcM8GVSUwvxGei1WTSntt/ia2AUQa5iM1MIGLpu7EUYZId3Zmc4YCpfzXinnf8aCmWfNXhCXbrYVBZNgUTo3dBYyHR4lSQqVygmxFJ4Hwx8esP1+8W0yG+t7nmmwvMQaLElDl9I8B19VZP6IQwddYOmD/0eFdcrbVh00zvUZPNiuGRvfwTxPphaRDEd/VUWRkTLegRzL5WtJlCBJoP62M9EtHjoYjzUUrRarapgPMZJpO7DRHJwsLUNwHCHvyM+B2gDVmawLCvcxzMZUIBaxoGpyuOI9zbOK2wdGl2fLU48mm5qQQiw7toMcnG/I6Offj4Mu4m90bw== +-----END CERTIFICATE----- \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/iambeta/test-fixtures/trust_anchor_2.pem b/mmv1/third_party/terraform/services/iambeta/test-fixtures/trust_anchor_2.pem new file mode 100644 index 000000000000..0775e9046990 --- /dev/null +++ b/mmv1/third_party/terraform/services/iambeta/test-fixtures/trust_anchor_2.pem @@ -0,0 +1,3 @@ +-----BEGIN CERTIFICATE----- +MIID8jCCAtqgAwIBAgIJAPMKxdVc8n0fMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYDVQQGEwJVUzEQMA4GA1UECAwHQXJpem9uYTEQMA4GA1UEBwwHUGhvZW5peDEZMBcGA1UECgwQR29vZ2xlIFRlc3QgQ29ycDEgMB4GA1UECwwXUHJvZHVjdGlvbiBXZWIgU2VydmljZXMxLDAqBgNVBAMMI3VzLWNlbnRyYWwxLnByb2Qud2ViYXBwLmV4YW1wbGUuY29tMB4XDTI1MDQyOTIzMTUwOFoXDTM1MDQyNzIzMTUwOFowgZwxCzAJBgNVBAYTAlVTMRAwDgYDVQQIDAdBcml6b25hMRAwDgYDVQQHDAdQaG9lbml4MRkwFwYDVQQKDBBHb29nbGUgVGVzdCBDb3JwMSAwHgYDVQQLDBdQcm9kdWN0aW9uIFdlYiBTZXJ2aWNlczEsMCoGA1UEAwwjdXMtY2VudHJhbDEucHJvZC53ZWJhcHAuZXhhbXBsZS5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCxzfupfYd1r0DPwAPoD7YvqEmgOhZA3TAbtsPEjo7YeOj93gvTEXpAo7x9X2AAAdTKrgPiVlVKMKuLNEXXTQPOBaLkt08w92FnC2MS9AuihfrLNOei5ImEKCKkzsWquRU1spbRzMnVKW1i6AmZSsElZvxB6F6fW89+sclBMkII0FZPpsiC5q44OvYPgCOXBh4FE6lQddh/EnfAmQfJkPgjTWA/jTzCqKMe+fTU/x29psgEaB/Fa1fQ2P4wWEzzhlxEDEKudBgtRc9VjYECnYK9O72DlzN2K/Gv7M37ipCK2AF96/cPv3R13lk4LbHhSma7xj9VXjjoG3h5jpPJ5tuZAgMBAAGjNTAzMA8GA1UdEwEB/wQFMAMBAf8wCwYDVR0PBAQDAgIEMBMGA1UdJQQMMAoGCCsGAQUFBwMCMA0GCSqGSIb3DQEBCwUAA4IBAQAllMFpvcMb4DqK6Jukjlw473EvP22MzzF8+kfLWr4i/7KJQDou6sVbaKmSl2SNHCZNanIHANT9JvEr3UqvpujfOOg43M4O0o8vvfZgzcvQHBE1qgRkIPu3TVX/so9TCIvyOL1y0f15AEaRAdY5lMC1G5tjjZqkpJ4OsmVch9zK1SljteRsAJuNKpAyfiAV60YCR1e1gOYADd9kv17imHP89WFwmAL/c6pk0jne8w7Y7A/F122TlAyp+P+gne+EOXQwvBDCwMM4lsb3jS2js9XjKSvlNcRAw1B7cl2qmV79Qg48MlQaad7Ac//2fIlFGOwkWQBmHrVd59wxYzBJk+4o +-----END CERTIFICATE----- \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/iambeta/test-fixtures/trust_anchor_3.pem b/mmv1/third_party/terraform/services/iambeta/test-fixtures/trust_anchor_3.pem new file mode 100644 index 000000000000..165d362c24b1 --- /dev/null +++ b/mmv1/third_party/terraform/services/iambeta/test-fixtures/trust_anchor_3.pem @@ -0,0 +1,3 @@ +-----BEGIN CERTIFICATE----- +MIIEADCCAuigAwIBAgIJAM98d8EGv17jMA0GCSqGSIb3DQEBCwUAMIGjMQswCQYDVQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTESMBAGA1UEBwwJU3Vubnl2YWxlMRkwFwYDVQQKDBBHb29nbGUgVGVzdCBDb3JwMRwwGgYDVQQLDBNTdGFnaW5nIEVudmlyb25tZW50MTIwMAYDVQQDDCl1cy13ZXN0Mi5zdGFnaW5nLWFwaS5pbnRlcm5hbC5leGFtcGxlLm5ldDAeFw0yNTA0MjkyMzI1MTJaFw0zNTA0MjcyMzI1MTJaMIGjMQswCQYDVQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTESMBAGA1UEBwwJU3Vubnl2YWxlMRkwFwYDVQQKDBBHb29nbGUgVGVzdCBDb3JwMRwwGgYDVQQLDBNTdGFnaW5nIEVudmlyb25tZW50MTIwMAYDVQQDDCl1cy13ZXN0Mi5zdGFnaW5nLWFwaS5pbnRlcm5hbC5leGFtcGxlLm5ldDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMLunz2AHYl4MYAlrZvSpRycgggsS+oOx/rJHAgb8jxuJSqGWb2aCnlKD5oC/P+qdthra7DRHR8zHGnrzCHKmsWwsaWqMpMh6VoQP0IyXvpQuhMBnjg7YiaaZ5+vegTIOqW1wWgUPPejVicROiWN7bbTaesoW+VwecvMvyGnlQWCLnSMUKzqUhvKA1nUWd+bPALDCvNtvFKUUA4gfhGRJBh/7aj+/OAIk3TcO1Io4peusvDpIAnVdTbiF3I9F7wHuyDs/nCt4+T/59khQoxOpqBHsmRqDUJbz1ZH/c9/Qmh5B+vPLvAN30K0LDx9l1B3xEy6aw7Rcf1I6MYSR384n5MCAwEAAaM1MDMwDwYDVR0TAQH/BAUwAwEB/zALBgNVHQ8EBAMCAgQwEwYDVR0lBAwwCgYIKwYBBQUHAwIwDQYJKoZIhvcNAQELBQADggEBAKVIWlkF/5aEVgY1jdREuIoxS0hexH3C5vqLG6jIiGkR3t89MAE60f0+aaR+cGvCnDJiYU0E+c4jDbvv3gIkhh4kvu3yhyFFTX583Zk4NldocwXDubR079AlE16pDnHdKUPdc2Tsxb1g+CWumPF2cjNi14P7eEQ3KdQOO5nV5tEpybsruGIspUZrH6hnc2q7dWRq+Ix6dfJYHIOnyLhkpfIRJ/6rVq6moizGuUAIuRgPrw9U8mSGEE349ZqC5x/sHzWpIgUdIaWLYbwe3PH+mxT4PlWRBmrmBe4BnJ+tl0P1TBTKNRjC2xUvRQGHss7VWbJkfZQhALw54aq1kKk96ns= +-----END CERTIFICATE----- \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/iambeta/test-fixtures/trust_anchor_4.pem b/mmv1/third_party/terraform/services/iambeta/test-fixtures/trust_anchor_4.pem new file mode 100644 index 000000000000..34fe5fa2d8e3 --- /dev/null +++ b/mmv1/third_party/terraform/services/iambeta/test-fixtures/trust_anchor_4.pem @@ -0,0 +1,3 @@ +-----BEGIN CERTIFICATE----- +MIID/DCCAuSgAwIBAgIJAMOZdoK0Z53fMA0GCSqGSIb3DQEBCwUAMIGhMQswCQYDVQQGEwJVUzEQMA4GA1UECAwHQXJpem9uYTEQMA4GA1UEBwwHUGhvZW5peDEZMBcGA1UECgwQR29vZ2xlIFRlc3QgQ29ycDEcMBoGA1UECwwTU3RhZ2luZyBFbnZpcm9ubWVudDE1MDMGA1UEAwwsdXMtY2VudHJhbDEuc3RhZ2luZy1hcGkuaW50ZXJuYWwuZXhhbXBsZS5uZXQwHhcNMjUwNDI5MjMyNDMxWhcNMzUwNDI3MjMyNDMxWjCBoTELMAkGA1UEBhMCVVMxEDAOBgNVBAgMB0FyaXpvbmExEDAOBgNVBAcMB1Bob2VuaXgxGTAXBgNVBAoMEEdvb2dsZSBUZXN0IENvcnAxHDAaBgNVBAsME1N0YWdpbmcgRW52aXJvbm1lbnQxNTAzBgNVBAMMLHVzLWNlbnRyYWwxLnN0YWdpbmctYXBpLmludGVybmFsLmV4YW1wbGUubmV0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA5EJGDTTfLz8yLXMqEUuocPJR5rGVF/JvMnmhU36bPRNxuaMt2uBNJQjoVTgsPjR77s9+nxQlls1ad6RPYGCqCXr43qrTYHM5grHcn0uwNgF7lmpTA2p56yQDO0dTko784o9O0eOFsmbtuXkVrYGYYZkoACzRayP7P/kIvfevtFXCP+acNNfMufLJ1ptm9vQrT91McnBM1qf35956M2MAZvVsVQxhfis8bN+qEOunMXOkvqhtYBLNicQV0dVb1osFwNmnEGrBgkrt7ov/1SXi1hoZY0QrUMSFwhSi+Iq7NUsnoP6SQ0MbNOr2thc7tRuDH72TT987zNrmBF9foj1Z2wIDAQABozUwMzAPBgNVHRMBAf8EBTADAQH/MAsGA1UdDwQEAwICBDATBgNVHSUEDDAKBggrBgEFBQcDAjANBgkqhkiG9w0BAQsFAAOCAQEA3siZEVRWCDzo2qpBMQpkPSTBVgcwIFbbGi0ZmdCatOn6nUPCvfSCIjcPcRVo8lUol6j7yHYDzdLj1ANdwE3IKwSJ4BRd5KrGULc+nCD1RB6Gj6VMHQ0TNgs3Ac36pcxWk6qf+2FDhmFfNu2PuUkSFlQyFbNy48w7Bzxzcy65PrGk8nqRrG2aqiYj3SUAlSSkFvWzK9CYy+ze5glTsP2IjaGaZmx4thThYhdCMI80RfzDFAyZqgJDNU9iVYw2uh/dSHwDwpOdPDfXUYLlLGE0dGIGHb/sMu13rLeT0FEKVABbG4hJPG5+Ajw7jrNwS5CDtXSjBLyLcsFST17R7ehVhg== +-----END CERTIFICATE----- \ No newline at end of file From 860a8ee29b90b30e0918226bb3668ca56d648ff2 Mon Sep 17 00:00:00 2001 From: Sam Levenick Date: Wed, 14 May 2025 17:04:25 -0400 Subject: [PATCH 130/884] Check example vars (#13938) --- mmv1/api/resource.go | 41 ++++++++++++++- mmv1/api/resource/examples.go | 52 +++++++++++++------ mmv1/api/type.go | 2 +- .../AuthorizedOrgsDesc.yaml | 2 + mmv1/products/apigee/EnvgroupAttachment.yaml | 3 ++ mmv1/products/apigee/InstanceAttachment.yaml | 3 ++ .../products/bigquerydatatransfer/Config.yaml | 1 + mmv1/products/cloudbuild/Trigger.yaml | 2 + mmv1/products/colab/Schedule.yaml | 1 + mmv1/products/datastream/Stream.yaml | 1 + mmv1/products/dialogflow/EntityType.yaml | 2 +- mmv1/products/eventarc/Trigger.yaml | 1 + .../firebaseapphosting/DefaultDomain.yaml | 3 ++ mmv1/products/firebaseapphosting/Domain.yaml | 4 ++ .../products/firebaseextensions/Instance.yaml | 1 + mmv1/products/netapp/Backup.yaml | 1 + .../OrganizationSccBigQueryExport.yaml | 2 +- .../OrganizationSccBigQueryExport.yaml | 2 +- .../OrganizationSccBigQueryExports.yaml | 2 +- .../vmwareengine/ExternalAddress.yaml | 2 + ...ticshub_listing_subscription_basic.tf.tmpl | 6 +-- ...iner_local_control_plane_node_pool.tf.tmpl | 2 +- .../gkebackup_backupchannel_basic.tf.tmpl | 2 +- .../gkebackup_restorechannel_basic.tf.tmpl | 2 +- .../terraform/examples/instance_basic.tf.tmpl | 2 +- 25 files changed, 112 insertions(+), 30 deletions(-) diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index 42b2315c2d14..2ae709dc0d81 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -13,18 +13,23 @@ package api import ( + "bytes" "fmt" "log" "maps" + "path/filepath" "regexp" + "slices" "sort" "strings" + "text/template" + + "github.com/golang/glog" "github.com/GoogleCloudPlatform/magic-modules/mmv1/api/product" "github.com/GoogleCloudPlatform/magic-modules/mmv1/api/resource" "github.com/GoogleCloudPlatform/magic-modules/mmv1/api/utils" "github.com/GoogleCloudPlatform/magic-modules/mmv1/google" - "golang.org/x/exp/slices" ) const RELATIVE_MAGICIAN_LOCATION = "mmv1/" @@ -1590,13 +1595,45 @@ func (r Resource) FormatDocDescription(desc string, indent bool) string { } func (r Resource) CustomTemplate(templatePath string, appendNewline bool) string { - output := resource.ExecuteTemplate(&r, templatePath, appendNewline) + output := ExecuteTemplate(&r, templatePath, appendNewline) if !appendNewline { output = strings.TrimSuffix(output, "\n") } return output } +func ExecuteTemplate(e any, templatePath string, appendNewline bool) string { + templates := []string{ + templatePath, + "templates/terraform/expand_resource_ref.tmpl", + "templates/terraform/custom_flatten/bigquery_table_ref.go.tmpl", + "templates/terraform/flatten_property_method.go.tmpl", + "templates/terraform/expand_property_method.go.tmpl", + "templates/terraform/update_mask.go.tmpl", + "templates/terraform/nested_query.go.tmpl", + "templates/terraform/unordered_list_customize_diff.go.tmpl", + } + templateFileName := filepath.Base(templatePath) + + tmpl, err := template.New(templateFileName).Funcs(google.TemplateFunctions).ParseFiles(templates...) + if err != nil { + glog.Exit(err) + } + + contents := bytes.Buffer{} + if err = tmpl.ExecuteTemplate(&contents, templateFileName, e); err != nil { + glog.Exit(err) + } + + rs := contents.String() + + if !strings.HasSuffix(rs, "\n") && appendNewline { + rs = fmt.Sprintf("%s\n", rs) + } + + return rs +} + // Returns the key of the list of resources in the List API response // Used to get the list of resources to sweep func (r Resource) ResourceListKey() string { diff --git a/mmv1/api/resource/examples.go b/mmv1/api/resource/examples.go index b2d267363e8d..89a981d9f329 100644 --- a/mmv1/api/resource/examples.go +++ b/mmv1/api/resource/examples.go @@ -18,6 +18,7 @@ import ( "fmt" "log" "net/url" + "os" "path/filepath" "regexp" "slices" @@ -201,6 +202,22 @@ func (e *Examples) Validate(rName string) { e.ValidateExternalProviders() } +func validateRegexForContents(r *regexp.Regexp, contents string, configPath string, objName string, vars map[string]string) { + matches := r.FindAllStringSubmatch(contents, -1) + for _, v := range matches { + found := false + for k, _ := range vars { + if k == v[1] { + found = true + break + } + } + if !found { + log.Fatalf("Failed to find %s environment variable defined in YAML file when validating the file %s. Please define this in %s", v[1], configPath, objName) + } + } +} + func (e *Examples) ValidateExternalProviders() { // Official providers supported by HashiCorp // https://registry.terraform.io/search/providers?namespace=hashicorp&tier=official @@ -249,7 +266,7 @@ func (e *Examples) SetHCLText() { docTestEnvVars[key] = docs_defaults[e.TestEnvVars[key]] } e.TestEnvVars = docTestEnvVars - e.DocumentationHCLText = ExecuteTemplate(e, e.ConfigPath, true) + e.DocumentationHCLText = e.ExecuteTemplate() e.DocumentationHCLText = regexp.MustCompile(`\n\n$`).ReplaceAllString(e.DocumentationHCLText, "\n") // Remove region tags @@ -290,7 +307,7 @@ func (e *Examples) SetHCLText() { e.Vars = testVars e.TestEnvVars = testTestEnvVars - e.TestHCLText = ExecuteTemplate(e, e.ConfigPath, true) + e.TestHCLText = e.ExecuteTemplate() e.TestHCLText = regexp.MustCompile(`\n\n$`).ReplaceAllString(e.TestHCLText, "\n") // Remove region tags e.TestHCLText = re1.ReplaceAllString(e.TestHCLText, "") @@ -302,20 +319,23 @@ func (e *Examples) SetHCLText() { e.TestEnvVars = originalTestEnvVars } -func ExecuteTemplate(e any, templatePath string, appendNewline bool) string { - templates := []string{ - templatePath, - "templates/terraform/expand_resource_ref.tmpl", - "templates/terraform/custom_flatten/bigquery_table_ref.go.tmpl", - "templates/terraform/flatten_property_method.go.tmpl", - "templates/terraform/expand_property_method.go.tmpl", - "templates/terraform/update_mask.go.tmpl", - "templates/terraform/nested_query.go.tmpl", - "templates/terraform/unordered_list_customize_diff.go.tmpl", +func (e *Examples) ExecuteTemplate() string { + templateContent, err := os.ReadFile(e.ConfigPath) + if err != nil { + glog.Exit(err) } - templateFileName := filepath.Base(templatePath) - tmpl, err := template.New(templateFileName).Funcs(google.TemplateFunctions).ParseFiles(templates...) + fileContentString := string(templateContent) + + // Check that any variables in Vars or TestEnvVars used in the example are defined via YAML + envVarRegex := regexp.MustCompile(`{{index \$\.TestEnvVars "([a-zA-Z_]*)"}}`) + validateRegexForContents(envVarRegex, fileContentString, e.ConfigPath, "test_env_vars", e.TestEnvVars) + varRegex := regexp.MustCompile(`{{index \$\.Vars "([a-zA-Z_]*)"}}`) + validateRegexForContents(varRegex, fileContentString, e.ConfigPath, "vars", e.Vars) + + templateFileName := filepath.Base(e.ConfigPath) + + tmpl, err := template.New(templateFileName).Funcs(google.TemplateFunctions).Parse(fileContentString) if err != nil { glog.Exit(err) } @@ -327,7 +347,7 @@ func ExecuteTemplate(e any, templatePath string, appendNewline bool) string { rs := contents.String() - if !strings.HasSuffix(rs, "\n") && appendNewline { + if !strings.HasSuffix(rs, "\n") { rs = fmt.Sprintf("%s\n", rs) } @@ -401,7 +421,7 @@ func (e *Examples) SetOiCSHCLText() { } e.Vars = testVars - e.OicsHCLText = ExecuteTemplate(e, e.ConfigPath, true) + e.OicsHCLText = e.ExecuteTemplate() e.OicsHCLText = regexp.MustCompile(`\n\n$`).ReplaceAllString(e.OicsHCLText, "\n") // Remove region tags diff --git a/mmv1/api/type.go b/mmv1/api/type.go index 6c0d5e15920e..75d41ddcb78f 100644 --- a/mmv1/api/type.go +++ b/mmv1/api/type.go @@ -1078,7 +1078,7 @@ func (t Type) NamespaceProperty() string { } func (t Type) CustomTemplate(templatePath string, appendNewline bool) string { - return resource.ExecuteTemplate(&t, templatePath, appendNewline) + return ExecuteTemplate(&t, templatePath, appendNewline) } func (t *Type) GetIdFormat() string { diff --git a/mmv1/products/accesscontextmanager/AuthorizedOrgsDesc.yaml b/mmv1/products/accesscontextmanager/AuthorizedOrgsDesc.yaml index e8b1cfd103f9..680d5a717fd4 100644 --- a/mmv1/products/accesscontextmanager/AuthorizedOrgsDesc.yaml +++ b/mmv1/products/accesscontextmanager/AuthorizedOrgsDesc.yaml @@ -61,6 +61,8 @@ examples: - name: 'access_context_manager_authorized_orgs_desc_basic' primary_resource_id: 'authorized-orgs-desc' exclude_test: true + test_env_vars: + org_id: 'ORG_ID' parameters: - name: 'parent' type: String diff --git a/mmv1/products/apigee/EnvgroupAttachment.yaml b/mmv1/products/apigee/EnvgroupAttachment.yaml index 3d659175fd38..ac75d8abae38 100644 --- a/mmv1/products/apigee/EnvgroupAttachment.yaml +++ b/mmv1/products/apigee/EnvgroupAttachment.yaml @@ -52,6 +52,9 @@ examples: project_id: 'my-project' envgroup_name: 'my-envgroup' environment_name: 'my-environment' + test_env_vars: + org_id: 'ORG_ID' + billing_account: 'BILLING_ACCT' exclude_test: true - name: 'apigee_environment_group_attachment_basic_test' primary_resource_id: 'apigee_environment_group_attachment' diff --git a/mmv1/products/apigee/InstanceAttachment.yaml b/mmv1/products/apigee/InstanceAttachment.yaml index 1ae86f456e68..942b765976ea 100644 --- a/mmv1/products/apigee/InstanceAttachment.yaml +++ b/mmv1/products/apigee/InstanceAttachment.yaml @@ -51,6 +51,9 @@ examples: project_id: 'my-project' instance_name: 'my-instance-name' environment_name: 'my-environment-name' + test_env_vars: + org_id: 'ORG_ID' + billing_account: 'BILLING_ACCT' exclude_test: true # This is a more verbose version of the above that creates all # the resources needed for the acceptance test. diff --git a/mmv1/products/bigquerydatatransfer/Config.yaml b/mmv1/products/bigquerydatatransfer/Config.yaml index 50628f0c56cb..8206309eee45 100644 --- a/mmv1/products/bigquerydatatransfer/Config.yaml +++ b/mmv1/products/bigquerydatatransfer/Config.yaml @@ -60,6 +60,7 @@ examples: dataset_id: 'example_dataset' key_name: 'example-key' keyring_name: 'example-keyring' + display_name: 'display-name' exclude_test: true - name: 'bigquerydatatransfer_config_salesforce' primary_resource_id: 'salesforce_config' diff --git a/mmv1/products/cloudbuild/Trigger.yaml b/mmv1/products/cloudbuild/Trigger.yaml index db31f044d3f6..f4367b6eab7d 100644 --- a/mmv1/products/cloudbuild/Trigger.yaml +++ b/mmv1/products/cloudbuild/Trigger.yaml @@ -82,6 +82,8 @@ examples: cloudbuild_trigger_name: 'manual-trigger' - name: 'cloudbuild_trigger_manual_github_enterprise' primary_resource_id: 'manual-ghe-trigger' + vars: + cloudbuild_trigger_name: 'my-trigger' exclude_test: true - name: 'cloudbuild_trigger_manual_bitbucket_server' primary_resource_id: 'manual-bitbucket-trigger' diff --git a/mmv1/products/colab/Schedule.yaml b/mmv1/products/colab/Schedule.yaml index 29398afea896..9aa46e282b57 100644 --- a/mmv1/products/colab/Schedule.yaml +++ b/mmv1/products/colab/Schedule.yaml @@ -76,6 +76,7 @@ examples: dataform_repository: 'dataform-repository' start_time: '2014-10-02T15:01:23Z' end_time: '2014-10-10T15:01:23Z' + key_name: 'my-key' test_env_vars: project_id: 'PROJECT_NAME' location: 'REGION' diff --git a/mmv1/products/datastream/Stream.yaml b/mmv1/products/datastream/Stream.yaml index 10c6ae72349e..fa4df7b8b3e0 100644 --- a/mmv1/products/datastream/Stream.yaml +++ b/mmv1/products/datastream/Stream.yaml @@ -216,6 +216,7 @@ examples: - name: 'datastream_stream_salesforce' primary_resource_id: 'default' vars: + stream_id: 'sf-stream' source_connection_profile_id: 'source-profile' destination_connection_profile_id: 'destination-profile' exclude_test: true diff --git a/mmv1/products/dialogflow/EntityType.yaml b/mmv1/products/dialogflow/EntityType.yaml index 179920721f21..c938ed0191b0 100644 --- a/mmv1/products/dialogflow/EntityType.yaml +++ b/mmv1/products/dialogflow/EntityType.yaml @@ -38,7 +38,7 @@ examples: - name: 'dialogflow_entity_type_basic' primary_resource_id: 'basic_entity_type' vars: - intent_name: 'basic-entity-type' + entity_type_name: 'basic-entity-type' exclude_test: true parameters: properties: diff --git a/mmv1/products/eventarc/Trigger.yaml b/mmv1/products/eventarc/Trigger.yaml index fcb4cd0a7a6c..35c1d03dd84e 100644 --- a/mmv1/products/eventarc/Trigger.yaml +++ b/mmv1/products/eventarc/Trigger.yaml @@ -48,6 +48,7 @@ examples: primary_resource_id: primary vars: trigger_name: some-trigger + network_attachment_name: network-attachment test_vars_overrides: 'network_attachment_name': 'acctest.BootstrapNetworkAttachment(t, "tf-test-eventarc-trigger-na", acctest.BootstrapSubnet(t, "tf-test-eventarc-trigger-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-test-eventarc-trigger-network")))' test_env_vars: diff --git a/mmv1/products/firebaseapphosting/DefaultDomain.yaml b/mmv1/products/firebaseapphosting/DefaultDomain.yaml index 845823dcf53e..84e94a7a75da 100644 --- a/mmv1/products/firebaseapphosting/DefaultDomain.yaml +++ b/mmv1/products/firebaseapphosting/DefaultDomain.yaml @@ -33,6 +33,7 @@ examples: primary_resource_id: example vars: backend_id: 'dd-mini' + service_act_id: 'service-account' test_env_vars: project_id: 'PROJECT_NAME' test_vars_overrides: @@ -42,6 +43,7 @@ examples: primary_resource_id: example vars: backend_id: 'dd-full' + service_act_id: 'service-account' test_env_vars: project_id: 'PROJECT_NAME' test_vars_overrides: @@ -51,6 +53,7 @@ examples: primary_resource_id: example vars: backend_id: 'dd-disabled' + service_act_id: 'service-account' test_env_vars: project_id: 'PROJECT_NAME' test_vars_overrides: diff --git a/mmv1/products/firebaseapphosting/Domain.yaml b/mmv1/products/firebaseapphosting/Domain.yaml index d2907277f52c..f27993655a87 100644 --- a/mmv1/products/firebaseapphosting/Domain.yaml +++ b/mmv1/products/firebaseapphosting/Domain.yaml @@ -45,6 +45,8 @@ examples: primary_resource_id: example vars: backend_id: 'domain-mini' + service_act_id: 'sa-id' + domain_id: example.com test_env_vars: project_id: 'PROJECT_NAME' test_vars_overrides: @@ -55,6 +57,8 @@ examples: primary_resource_id: example vars: backend_id: 'domain-full' + service_act_id: 'sa-id' + domain_id: example.com test_env_vars: project_id: 'PROJECT_NAME' test_vars_overrides: diff --git a/mmv1/products/firebaseextensions/Instance.yaml b/mmv1/products/firebaseextensions/Instance.yaml index 71791e8e5c60..85d6a897f8b7 100644 --- a/mmv1/products/firebaseextensions/Instance.yaml +++ b/mmv1/products/firebaseextensions/Instance.yaml @@ -52,6 +52,7 @@ examples: instance-id: 'storage-resize-images' bucket_id: 'bucket-id' service-account-id: 's-a' + location: "us-central1" test_env_vars: project_id: 'PROJECT_NAME' test_vars_overrides: diff --git a/mmv1/products/netapp/Backup.yaml b/mmv1/products/netapp/Backup.yaml index 27bf9982c85a..091e3d8d245b 100644 --- a/mmv1/products/netapp/Backup.yaml +++ b/mmv1/products/netapp/Backup.yaml @@ -68,6 +68,7 @@ examples: volume_name: 'backup-volume' backup_vault_name: 'backup-vault' backup_name: 'test-backup' + network_name: 'network' test_vars_overrides: 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' parameters: diff --git a/mmv1/products/securitycenter/OrganizationSccBigQueryExport.yaml b/mmv1/products/securitycenter/OrganizationSccBigQueryExport.yaml index 8a01eb3fa977..5a68d4bc8a59 100644 --- a/mmv1/products/securitycenter/OrganizationSccBigQueryExport.yaml +++ b/mmv1/products/securitycenter/OrganizationSccBigQueryExport.yaml @@ -45,7 +45,7 @@ examples: primary_resource_id: 'custom_big_query_export_config' vars: big_query_export_id: 'my-export' - dataset: 'my-dataset' + dataset_id: 'my-dataset' name: 'my-export' test_env_vars: org_id: 'ORG_ID' diff --git a/mmv1/products/securitycenterv2/OrganizationSccBigQueryExport.yaml b/mmv1/products/securitycenterv2/OrganizationSccBigQueryExport.yaml index 79e11daa14c3..8e415261d0ca 100644 --- a/mmv1/products/securitycenterv2/OrganizationSccBigQueryExport.yaml +++ b/mmv1/products/securitycenterv2/OrganizationSccBigQueryExport.yaml @@ -46,7 +46,7 @@ examples: primary_resource_id: 'custom_big_query_export_config' vars: big_query_export_id: 'my-export' - dataset: 'my-dataset' + dataset_id: 'my-dataset' name: 'my-export' test_env_vars: org_id: 'ORG_ID' diff --git a/mmv1/products/securitycenterv2/OrganizationSccBigQueryExports.yaml b/mmv1/products/securitycenterv2/OrganizationSccBigQueryExports.yaml index ccfcbb0f6752..2d3e382dec7b 100644 --- a/mmv1/products/securitycenterv2/OrganizationSccBigQueryExports.yaml +++ b/mmv1/products/securitycenterv2/OrganizationSccBigQueryExports.yaml @@ -47,7 +47,7 @@ examples: primary_resource_id: 'custom_big_query_export_config' vars: big_query_export_id: 'my-export' - dataset: 'my-dataset' + dataset_id: 'my-dataset' name: 'my-export' test_env_vars: org_id: 'ORG_ID' diff --git a/mmv1/products/vmwareengine/ExternalAddress.yaml b/mmv1/products/vmwareengine/ExternalAddress.yaml index ee3aa777699b..5e51a5143d24 100644 --- a/mmv1/products/vmwareengine/ExternalAddress.yaml +++ b/mmv1/products/vmwareengine/ExternalAddress.yaml @@ -62,6 +62,8 @@ examples: private_cloud_id: 'sample-pc' management_cluster_id: 'sample-mgmt-cluster' network_policy_id: 'sample-np' + test_env_vars: + region: 'REGION' # update tests will take care of all CRUD tests. Parent PC creation is expensive and node reservation is required. exclude_test: true parameters: diff --git a/mmv1/templates/terraform/examples/bigquery_analyticshub_listing_subscription_basic.tf.tmpl b/mmv1/templates/terraform/examples/bigquery_analyticshub_listing_subscription_basic.tf.tmpl index 752b377818ea..806abd0bb698 100644 --- a/mmv1/templates/terraform/examples/bigquery_analyticshub_listing_subscription_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/bigquery_analyticshub_listing_subscription_basic.tf.tmpl @@ -2,7 +2,7 @@ resource "google_bigquery_analytics_hub_data_exchange" "{{$.PrimaryResourceId}}" location = "US" data_exchange_id = "{{index $.Vars "data_exchange_id"}}" display_name = "{{index $.Vars "data_exchange_id"}}" - description = "{{index $.Vars "desc"}}" + description = "Test Description" } resource "google_bigquery_analytics_hub_listing" "{{$.PrimaryResourceId}}" { @@ -10,7 +10,7 @@ resource "google_bigquery_analytics_hub_listing" "{{$.PrimaryResourceId}}" { data_exchange_id = google_bigquery_analytics_hub_data_exchange.{{$.PrimaryResourceId}}.data_exchange_id listing_id = "{{index $.Vars "listing_id"}}" display_name = "{{index $.Vars "listing_id"}}" - description = "{{index $.Vars "desc"}}" + description = "Test Description" bigquery_dataset { dataset = google_bigquery_dataset.{{$.PrimaryResourceId}}.id @@ -20,7 +20,7 @@ resource "google_bigquery_analytics_hub_listing" "{{$.PrimaryResourceId}}" { resource "google_bigquery_dataset" "{{$.PrimaryResourceId}}" { dataset_id = "{{index $.Vars "listing_id"}}" friendly_name = "{{index $.Vars "listing_id"}}" - description = "{{index $.Vars "desc"}}" + description = "Test Description" location = "US" } diff --git a/mmv1/templates/terraform/examples/edgecontainer_local_control_plane_node_pool.tf.tmpl b/mmv1/templates/terraform/examples/edgecontainer_local_control_plane_node_pool.tf.tmpl index d7eaf014bf00..4edcdd696975 100644 --- a/mmv1/templates/terraform/examples/edgecontainer_local_control_plane_node_pool.tf.tmpl +++ b/mmv1/templates/terraform/examples/edgecontainer_local_control_plane_node_pool.tf.tmpl @@ -1,5 +1,5 @@ resource "google_edgecontainer_cluster" "{{$.PrimaryResourceId}}" { - name = "{{index $.Vars "edgecontainer_cluster_name"}}" + name = "default" location = "us-central1" authorization { diff --git a/mmv1/templates/terraform/examples/gkebackup_backupchannel_basic.tf.tmpl b/mmv1/templates/terraform/examples/gkebackup_backupchannel_basic.tf.tmpl index b31780a21afe..727a9d8de164 100644 --- a/mmv1/templates/terraform/examples/gkebackup_backupchannel_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/gkebackup_backupchannel_basic.tf.tmpl @@ -1,7 +1,7 @@ resource "google_gke_backup_backup_channel" "basic" { name = "{{index $.Vars "name"}}" location = "us-central1" - description = "{{index $.Vars "description"}}" + description = "Description" destination_project = "{{index $.Vars "destination_project"}}" labels = { "key": "some-value" } } diff --git a/mmv1/templates/terraform/examples/gkebackup_restorechannel_basic.tf.tmpl b/mmv1/templates/terraform/examples/gkebackup_restorechannel_basic.tf.tmpl index cfa7fcbff6d7..7baf60005bcc 100644 --- a/mmv1/templates/terraform/examples/gkebackup_restorechannel_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/gkebackup_restorechannel_basic.tf.tmpl @@ -1,7 +1,7 @@ resource "google_gke_backup_restore_channel" "basic" { name = "{{index $.Vars "name"}}" location = "us-central1" - description = "{{index $.Vars "description"}}" + description = "Description" destination_project = "{{index $.Vars "destination_project"}}" labels = { "key": "some-value" } } diff --git a/mmv1/templates/terraform/examples/instance_basic.tf.tmpl b/mmv1/templates/terraform/examples/instance_basic.tf.tmpl index ecbc1172ad0c..10f12c1b45a4 100644 --- a/mmv1/templates/terraform/examples/instance_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/instance_basic.tf.tmpl @@ -1,6 +1,6 @@ resource "google_compute_instance" "{{$.PrimaryResourceId}}" { name = "{{index $.Vars "instance_name"}}" - zone = "{{index $.Vars "zone_name"}}" + zone = "us-central1-a" machine_type = "e2-medium" boot_disk { From 4ff40f62516a4c8c104ff33dba0a070aede26198 Mon Sep 17 00:00:00 2001 From: "Haoting.C" <34197666+plus-1s@users.noreply.github.com> Date: Wed, 14 May 2025 14:13:27 -0700 Subject: [PATCH 131/884] Mark google_iam_oauth_client_credential client_secret as sensitive (#13946) --- mmv1/products/iamworkforcepool/OauthClientCredential.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/mmv1/products/iamworkforcepool/OauthClientCredential.yaml b/mmv1/products/iamworkforcepool/OauthClientCredential.yaml index b6eb2abd111e..bacf221fadcd 100644 --- a/mmv1/products/iamworkforcepool/OauthClientCredential.yaml +++ b/mmv1/products/iamworkforcepool/OauthClientCredential.yaml @@ -85,6 +85,7 @@ properties: more, see [OAuth client and credential security risks and mitigations](https://cloud.google.com/iam/docs/workforce-oauth-app#security) output: true + sensitive: true - name: displayName type: String description: |- From 5062ab98e4ff98028b2bcd008c1c77c9c1b48b3f Mon Sep 17 00:00:00 2001 From: Scott Suarez Date: Wed, 14 May 2025 14:15:39 -0700 Subject: [PATCH 132/884] remove -v from github go test runs (#13953) --- .github/workflows/unit-test-magician.yml | 2 +- .github/workflows/unit-test-mmv1.yml | 2 +- .github/workflows/unit-test-tools.yml | 12 ++++++------ 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/unit-test-magician.yml b/.github/workflows/unit-test-magician.yml index 68680b5a75e0..c90e855ebf5a 100644 --- a/.github/workflows/unit-test-magician.yml +++ b/.github/workflows/unit-test-magician.yml @@ -19,7 +19,7 @@ jobs: - name: Run magician unit tests run: | cd .ci/magician - go test ./... -v + go test ./... env: GITHUB_TOKEN_CLASSIC: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/unit-test-mmv1.yml b/.github/workflows/unit-test-mmv1.yml index 7ab1de8edd4a..db75e507fffe 100644 --- a/.github/workflows/unit-test-mmv1.yml +++ b/.github/workflows/unit-test-mmv1.yml @@ -65,5 +65,5 @@ jobs: - name: Run mmv1 unit tests run: | cd mmv1 - go test ./... -v + go test ./... diff --git a/.github/workflows/unit-test-tools.yml b/.github/workflows/unit-test-tools.yml index e63e9becc393..21cfe6eedb48 100644 --- a/.github/workflows/unit-test-tools.yml +++ b/.github/workflows/unit-test-tools.yml @@ -29,7 +29,7 @@ jobs: - name: Test diff-processor with TPG run: | cd tools/diff-processor - go test -v ./... + go test ./... env: SERVICES_DIR: tools/diff-processor/new/google/services @@ -42,7 +42,7 @@ jobs: - name: Test diff-processor with TPGB run: | cd tools/diff-processor - go test -v ./... + go test ./... env: SERVICES_DIR: tools/diff-processor/new/google/services @@ -64,7 +64,7 @@ jobs: - name: Test go-changelog run: | cd tools/go-changelog - go test -v ./... + go test ./... issue-labeler: runs-on: ubuntu-22.04 @@ -84,7 +84,7 @@ jobs: - name: Test issue-labeler run: | cd tools/issue-labeler - go test -v ./... + go test ./... template-check: runs-on: ubuntu-22.04 @@ -104,7 +104,7 @@ jobs: - name: Test template-check run: | cd tools/template-check - go test -v ./... + go test ./... test-reader: runs-on: ubuntu-22.04 @@ -124,4 +124,4 @@ jobs: - name: Test test-reader run: | cd tools/test-reader - go test -v ./... \ No newline at end of file + go test ./... \ No newline at end of file From c55e5d994709fb3b25c72a1d5585c53779129f21 Mon Sep 17 00:00:00 2001 From: Scott Suarez Date: Wed, 14 May 2025 14:34:28 -0700 Subject: [PATCH 133/884] Add intialization for github client in interface (#13951) --- .ci/magician/github/init.go | 62 +++++++++++++++++++++++++++++++++++ .ci/magician/utility/utils.go | 33 ++++++++++++------- 2 files changed, 84 insertions(+), 11 deletions(-) diff --git a/.ci/magician/github/init.go b/.ci/magician/github/init.go index d64995aa46b7..c09217803075 100644 --- a/.ci/magician/github/init.go +++ b/.ci/magician/github/init.go @@ -15,13 +15,75 @@ */ package github +import ( + "bytes" + "context" + "io" + "net/http" + + utils "magician/utility" + + gh "github.com/google/go-github/v68/github" +) + // Client for GitHub interactions. type Client struct { token string + gh *gh.Client + ctx context.Context +} + +// retryTransport is a custom RoundTripper that adds retry and logging +type retryTransport struct { + underlyingTransport http.RoundTripper + token string } func NewClient(token string) *Client { + ctx := context.Background() + + // Create a custom transport with retry logic + rt := &retryTransport{ + underlyingTransport: http.DefaultTransport, + token: token, + } + + // Use this custom transport with OAuth2 + tc := &http.Client{Transport: rt} + + // Create the GitHub client with our custom transport + ghClient := gh.NewClient(tc) + return &Client{ + gh: ghClient, token: token, + ctx: ctx, + } +} + +// RoundTrip implements the http.RoundTripper interface +func (rt *retryTransport) RoundTrip(req *http.Request) (*http.Response, error) { + // Extract information from the request + method := req.Method + urlStr := req.URL.String() + + // Read and log the request body if present + var bodyBytes []byte + if req.Body != nil { + bodyBytes, _ = io.ReadAll(req.Body) + req.Body.Close() + req.Body = io.NopCloser(bytes.NewReader(bodyBytes)) } + + resp, respBody, err := utils.RequestCallWithRetryRaw(urlStr, method, rt.token, bodyBytes) + if err != nil { + return nil, err + } + + // Replace the response body with our captured body + resp.Body.Close() // Close the original body + resp.Body = io.NopCloser(bytes.NewReader(respBody)) + resp.ContentLength = int64(len(respBody)) + + return resp, nil } diff --git a/.ci/magician/utility/utils.go b/.ci/magician/utility/utils.go index 02534af9b0a4..0325c1090d85 100644 --- a/.ci/magician/utility/utils.go +++ b/.ci/magician/utility/utils.go @@ -143,9 +143,11 @@ func calculateBackoff(attempt int, config retryConfig) time.Duration { return backoff } -// RequestCallWithRetry makes an HTTP request with retry capability -func requestCallWithRetry(url, method, credentials string, result any, body any, config retryConfig) error { +// RequestCallWithRetryRaw raw version of the retry function that returns the response and body bytes +func requestCallWithRetryRaw(url, method, credentials string, body any, config retryConfig) (*http.Response, []byte, error) { var lastErr error + var lastResp *http.Response + var lastBodyBytes []byte for attempt := 0; attempt <= config.MaxRetries; attempt++ { // If this is a retry attempt, wait before trying again @@ -161,27 +163,36 @@ func requestCallWithRetry(url, method, credentials string, result any, body any, continue // Network error, retry } - // Process the response - err = processResponse(resp, respBodyBytes, result) - if err != nil { - lastErr = err + lastResp = resp + lastBodyBytes = respBodyBytes - // Check if we should retry based on status code + // Check if we should retry based on status code + if resp.StatusCode < 200 || resp.StatusCode >= 300 { if shouldRetry(resp.StatusCode, config) { continue } } - // If we got here with no error, return success - return err + return lastResp, lastBodyBytes, nil } - return fmt.Errorf("max retries exceeded: %w", lastErr) + return lastResp, lastBodyBytes, lastErr +} + +// RequestCallWithRetryRaw is a convenience function that uses default retry settings +func RequestCallWithRetryRaw(url, method, credentials string, body any) (*http.Response, []byte, error) { + return requestCallWithRetryRaw(url, method, credentials, body, defaultRetryConfig()) } // RequestCallWithRetry is a convenience function that uses default retry settings +// and unmarshals the response into the result func RequestCallWithRetry(url, method, credentials string, result any, body any) error { - return requestCallWithRetry(url, method, credentials, result, body, defaultRetryConfig()) + resp, respBodyBytes, err := requestCallWithRetryRaw(url, method, credentials, body, defaultRetryConfig()) + if err != nil { + return err + } + + return processResponse(resp, respBodyBytes, result) } func Removes(s1 []string, s2 []string) []string { From 7a8fbb3fe2557f939b8aafa26976a6d12e06db9b Mon Sep 17 00:00:00 2001 From: Daniel Dubnikov Date: Thu, 15 May 2025 01:26:54 +0300 Subject: [PATCH 134/884] Promote network security intercept resources to GA. (#13884) --- .../networksecurity/InterceptDeployment.yaml | 12 ---- .../InterceptDeploymentGroup.yaml | 16 ----- .../InterceptEndpointGroup.yaml | 19 ------ .../InterceptEndpointGroupAssociation.yaml | 17 ------ ...ecurity_intercept_deployment_basic.tf.tmpl | 27 ++++----- ...y_intercept_deployment_group_basic.tf.tmpl | 2 - ...t_endpoint_group_association_basic.tf.tmpl | 11 +--- ...ity_intercept_endpoint_group_basic.tf.tmpl | 11 ++-- ...curity_intercept_deployment_group_test.go} | 9 +-- ...ork_security_intercept_deployment_test.go} | 59 +++++++------------ ...ercept_endpoint_group_association_test.go} | 27 +++------ ...security_intercept_endpoint_group_test.go} | 27 +++------ 12 files changed, 55 insertions(+), 182 deletions(-) rename mmv1/third_party/terraform/services/networksecurity/{resource_network_security_intercept_deployment_group_generated_test.go.tmpl => resource_network_security_intercept_deployment_group_test.go} (90%) rename mmv1/third_party/terraform/services/networksecurity/{resource_network_security_intercept_deployment_generated_test.go.tmpl => resource_network_security_intercept_deployment_test.go} (74%) rename mmv1/third_party/terraform/services/networksecurity/{resource_network_security_intercept_endpoint_group_association_generated_test.go.tmpl => resource_network_security_intercept_endpoint_group_association_test.go} (80%) rename mmv1/third_party/terraform/services/networksecurity/{resource_network_security_intercept_endpoint_group_generated_test.go.tmpl => resource_network_security_intercept_endpoint_group_test.go} (75%) diff --git a/mmv1/products/networksecurity/InterceptDeployment.yaml b/mmv1/products/networksecurity/InterceptDeployment.yaml index 3a0942bf0089..14c349a9183b 100644 --- a/mmv1/products/networksecurity/InterceptDeployment.yaml +++ b/mmv1/products/networksecurity/InterceptDeployment.yaml @@ -18,7 +18,6 @@ description: |- GENEVE-encapsulated traffic, e.g. a zonal instance group fronted by an internal passthrough load balancer. Deployments are always part of a global deployment group which represents a global intercept service. -min_version: 'beta' docs: id_format: 'projects/{{project}}/locations/{{location}}/interceptDeployments/{{intercept_deployment_id}}' base_url: 'projects/{{project}}/locations/{{location}}/interceptDeployments' @@ -61,7 +60,6 @@ parameters: type: String description: |- The cloud location of the deployment, e.g. `us-central1-a` or `asia-south1-b`. - min_version: 'beta' url_param_only: true required: true immutable: true @@ -70,7 +68,6 @@ parameters: description: |- The ID to use for the new deployment, which will become the final component of the deployment's resource name. - min_version: 'beta' url_param_only: true required: true immutable: true @@ -81,34 +78,29 @@ properties: The resource name of this deployment, for example: `projects/123456789/locations/us-central1-a/interceptDeployments/my-dep`. See https://google.aip.dev/122 for more details. - min_version: 'beta' output: true - name: 'createTime' type: String description: |- The timestamp when the resource was created. See https://google.aip.dev/148#timestamps. - min_version: 'beta' output: true - name: 'updateTime' type: String description: |- The timestamp when the resource was most recently updated. See https://google.aip.dev/148#timestamps. - min_version: 'beta' output: true - name: 'labels' type: KeyValueLabels description: |- Labels are key/value pairs that help to organize and filter resources. - min_version: 'beta' - name: 'forwardingRule' type: String description: |- The regional forwarding rule that fronts the interceptors, for example: `projects/123456789/regions/us-central1/forwardingRules/my-rule`. See https://google.aip.dev/124. - min_version: 'beta' required: true immutable: true - name: 'interceptDeploymentGroup' @@ -117,7 +109,6 @@ properties: The deployment group that this deployment is a part of, for example: `projects/123456789/locations/global/interceptDeploymentGroups/my-dg`. See https://google.aip.dev/124. - min_version: 'beta' required: true immutable: true - name: 'state' @@ -132,7 +123,6 @@ properties: DELETING OUT_OF_SYNC DELETE_FAILED - min_version: 'beta' output: true - name: 'reconciling' type: Boolean @@ -141,11 +131,9 @@ properties: and the system is working to reconcile them. This part of the normal operation (e.g. linking a new association to the parent group). See https://google.aip.dev/128. - min_version: 'beta' output: true - name: 'description' type: String description: |- User-provided description of the deployment. Used as additional context for the deployment. - min_version: 'beta' diff --git a/mmv1/products/networksecurity/InterceptDeploymentGroup.yaml b/mmv1/products/networksecurity/InterceptDeploymentGroup.yaml index 8bfca204c402..5678948f59dc 100644 --- a/mmv1/products/networksecurity/InterceptDeploymentGroup.yaml +++ b/mmv1/products/networksecurity/InterceptDeploymentGroup.yaml @@ -17,7 +17,6 @@ description: |- A deployment group aggregates many zonal intercept backends (deployments) into a single global intercept service. Consumers can connect this service using an endpoint group. -min_version: 'beta' docs: id_format: 'projects/{{project}}/locations/{{location}}/interceptDeploymentGroups/{{intercept_deployment_group_id}}' base_url: 'projects/{{project}}/locations/{{location}}/interceptDeploymentGroups' @@ -58,7 +57,6 @@ parameters: type: String description: |- The cloud location of the deployment group, currently restricted to `global`. - min_version: 'beta' url_param_only: true required: true immutable: true @@ -67,7 +65,6 @@ parameters: description: |- The ID to use for the new deployment group, which will become the final component of the deployment group's resource name. - min_version: 'beta' url_param_only: true required: true immutable: true @@ -78,39 +75,33 @@ properties: The resource name of this deployment group, for example: `projects/123456789/locations/global/interceptDeploymentGroups/my-dg`. See https://google.aip.dev/122 for more details. - min_version: 'beta' output: true - name: 'createTime' type: String description: |- The timestamp when the resource was created. See https://google.aip.dev/148#timestamps. - min_version: 'beta' output: true - name: 'updateTime' type: String description: |- The timestamp when the resource was most recently updated. See https://google.aip.dev/148#timestamps. - min_version: 'beta' output: true - name: 'labels' type: KeyValueLabels description: |- Labels are key/value pairs that help to organize and filter resources. - min_version: 'beta' - name: 'network' type: String description: |- The network that will be used for all child deployments, for example: `projects/{project}/global/networks/{network}`. See https://google.aip.dev/124. - min_version: 'beta' required: true immutable: true - name: 'connectedEndpointGroups' type: Array - min_version: 'beta' description: |- The list of endpoint groups that are connected to this resource. output: true @@ -123,7 +114,6 @@ properties: The connected endpoint group's resource name, for example: `projects/123456789/locations/global/interceptEndpointGroups/my-eg`. See https://google.aip.dev/124. - min_version: 'beta' output: true - name: 'state' type: String @@ -135,7 +125,6 @@ properties: ACTIVE CREATING DELETING - min_version: 'beta' output: true - name: 'reconciling' type: Boolean @@ -144,20 +133,17 @@ properties: and the system is working to reconcile them. This is part of the normal operation (e.g. adding a new deployment to the group) See https://google.aip.dev/128. - min_version: 'beta' output: true - name: 'description' type: String description: |- User-provided description of the deployment group. Used as additional context for the deployment group. - min_version: 'beta' - name: 'locations' type: Array is_set: true description: |- The list of locations where the deployment group is present. - min_version: 'beta' output: true item_type: type: NestedObject @@ -170,11 +156,9 @@ properties: STATE_UNSPECIFIED ACTIVE OUT_OF_SYNC - min_version: 'beta' output: true - name: 'location' type: String description: |- The cloud location, e.g. `us-central1-a` or `asia-south1-b`. - min_version: 'beta' output: true diff --git a/mmv1/products/networksecurity/InterceptEndpointGroup.yaml b/mmv1/products/networksecurity/InterceptEndpointGroup.yaml index 58e89ba17649..e72884477419 100644 --- a/mmv1/products/networksecurity/InterceptEndpointGroup.yaml +++ b/mmv1/products/networksecurity/InterceptEndpointGroup.yaml @@ -19,7 +19,6 @@ description: |- - An association between their network and the endpoint group. - A security profile that points to the endpoint group. - A firewall rule that references the security profile (group). -min_version: 'beta' docs: id_format: 'projects/{{project}}/locations/{{location}}/interceptEndpointGroups/{{intercept_endpoint_group_id}}' base_url: 'projects/{{project}}/locations/{{location}}/interceptEndpointGroups' @@ -60,7 +59,6 @@ parameters: type: String description: |- The cloud location of the endpoint group, currently restricted to `global`. - min_version: 'beta' url_param_only: true required: true immutable: true @@ -69,7 +67,6 @@ parameters: description: |- The ID to use for the endpoint group, which will become the final component of the endpoint group's resource name. - min_version: 'beta' url_param_only: true required: true immutable: true @@ -80,34 +77,29 @@ properties: The resource name of this endpoint group, for example: `projects/123456789/locations/global/interceptEndpointGroups/my-eg`. See https://google.aip.dev/122 for more details. - min_version: 'beta' output: true - name: 'createTime' type: String description: |- The timestamp when the resource was created. See https://google.aip.dev/148#timestamps. - min_version: 'beta' output: true - name: 'updateTime' type: String description: |- The timestamp when the resource was most recently updated. See https://google.aip.dev/148#timestamps. - min_version: 'beta' output: true - name: 'labels' type: KeyValueLabels description: |- Labels are key/value pairs that help to organize and filter resources. - min_version: 'beta' - name: 'interceptDeploymentGroup' type: String description: |- The deployment group that this endpoint group is connected to, for example: `projects/123456789/locations/global/interceptDeploymentGroups/my-dg`. See https://google.aip.dev/124. - min_version: 'beta' required: true immutable: true - name: 'state' @@ -123,7 +115,6 @@ properties: DELETING OUT_OF_SYNC DELETE_FAILED - min_version: 'beta' output: true - name: 'reconciling' type: Boolean @@ -132,20 +123,17 @@ properties: and the system is working to reconcile them. This is part of the normal operation (e.g. adding a new association to the group). See https://google.aip.dev/128. - min_version: 'beta' output: true - name: description type: String description: |- User-provided description of the endpoint group. Used as additional context for the endpoint group. - min_version: 'beta' - name: associations type: Array is_set: true description: |- List of associations to this endpoint group. - min_version: 'beta' output: true item_type: type: NestedObject @@ -156,7 +144,6 @@ properties: The connected association's resource name, for example: `projects/123456789/locations/global/interceptEndpointGroupAssociations/my-ega`. See https://google.aip.dev/124. - min_version: 'beta' output: true - name: network type: String @@ -164,7 +151,6 @@ properties: The associated network, for example: projects/123456789/global/networks/my-network. See https://google.aip.dev/124. - min_version: 'beta' output: true - name: state type: String @@ -178,13 +164,11 @@ properties: CLOSED OUT_OF_SYNC DELETE_FAILED - min_version: 'beta' output: true - name: connectedDeploymentGroup type: NestedObject description: |- The endpoint group's view of a connected deployment group. - min_version: 'beta' output: true properties: - name: name @@ -193,14 +177,12 @@ properties: The connected deployment group's resource name, for example: `projects/123456789/locations/global/interceptDeploymentGroups/my-dg`. See https://google.aip.dev/124. - min_version: 'beta' output: true - name: locations type: Array is_set: true description: |- The list of locations where the deployment group is present. - min_version: 'beta' output: true item_type: type: NestedObject @@ -218,5 +200,4 @@ properties: STATE_UNSPECIFIED ACTIVE OUT_OF_SYNC - min_version: 'beta' output: true diff --git a/mmv1/products/networksecurity/InterceptEndpointGroupAssociation.yaml b/mmv1/products/networksecurity/InterceptEndpointGroupAssociation.yaml index b54b4e1c4b0e..299d5c5b3311 100644 --- a/mmv1/products/networksecurity/InterceptEndpointGroupAssociation.yaml +++ b/mmv1/products/networksecurity/InterceptEndpointGroupAssociation.yaml @@ -21,7 +21,6 @@ description: |- network to the endpoint group, but does not enable intercept by itself. To enable intercept, the user must also create a network firewall policy containing intercept rules and associate it with the network. -min_version: 'beta' docs: id_format: 'projects/{{project}}/locations/{{location}}/interceptEndpointGroupAssociations/{{intercept_endpoint_group_association_id}}' base_url: 'projects/{{project}}/locations/{{location}}/interceptEndpointGroupAssociations' @@ -62,7 +61,6 @@ parameters: type: String description: |- The cloud location of the association, currently restricted to `global`. - min_version: 'beta' url_param_only: true required: true immutable: true @@ -72,7 +70,6 @@ parameters: The ID to use for the new association, which will become the final component of the endpoint group's resource name. If not provided, the server will generate a unique ID. - min_version: 'beta' url_param_only: true immutable: true properties: @@ -82,34 +79,29 @@ properties: The resource name of this endpoint group association, for example: `projects/123456789/locations/global/interceptEndpointGroupAssociations/my-eg-association`. See https://google.aip.dev/122 for more details. - min_version: 'beta' output: true - name: 'createTime' type: String description: |- The timestamp when the resource was created. See https://google.aip.dev/148#timestamps. - min_version: 'beta' output: true - name: 'updateTime' type: String description: |- The timestamp when the resource was most recently updated. See https://google.aip.dev/148#timestamps. - min_version: 'beta' output: true - name: 'labels' type: KeyValueLabels description: |- Labels are key/value pairs that help to organize and filter resources. - min_version: 'beta' - name: 'interceptEndpointGroup' type: String description: |- The endpoint group that this association is connected to, for example: `projects/123456789/locations/global/interceptEndpointGroups/my-eg`. See https://google.aip.dev/124. - min_version: 'beta' required: true immutable: true - name: 'network' @@ -118,7 +110,6 @@ properties: The VPC network that is associated. for example: `projects/123456789/global/networks/my-network`. See https://google.aip.dev/124. - min_version: 'beta' required: true immutable: true - name: 'locationsDetails' @@ -129,14 +120,12 @@ properties: of the association itself. deprecation_message: |- `locationsDetails` is deprecated and will be removed in a future major release. Use `locations` instead. - min_version: 'beta' output: true item_type: type: NestedObject properties: - name: 'location' type: String - min_version: 'beta' description: |- The cloud location, e.g. `us-central1-a` or `asia-south1`. output: true @@ -148,7 +137,6 @@ properties: STATE_UNSPECIFIED ACTIVE OUT_OF_SYNC - min_version: 'beta' output: true - name: 'state' type: String @@ -162,7 +150,6 @@ properties: CLOSED OUT_OF_SYNC DELETE_FAILED - min_version: 'beta' output: true - name: 'reconciling' type: Boolean @@ -171,7 +158,6 @@ properties: and the system is working to reconcile them. This part of the normal operation (e.g. adding a new location to the target deployment group). See https://google.aip.dev/128. - min_version: 'beta' output: true - name: locations type: Array @@ -179,7 +165,6 @@ properties: description: |- The list of locations where the association is configured. This information is retrieved from the linked endpoint group. - min_version: 'beta' output: true item_type: type: NestedObject @@ -188,7 +173,6 @@ properties: type: String description: |- The cloud location, e.g. `us-central1-a` or `asia-south1-b`. - min_version: 'beta' output: true - name: state type: String @@ -198,5 +182,4 @@ properties: STATE_UNSPECIFIED ACTIVE OUT_OF_SYNC - min_version: 'beta' output: true diff --git a/mmv1/templates/terraform/examples/network_security_intercept_deployment_basic.tf.tmpl b/mmv1/templates/terraform/examples/network_security_intercept_deployment_basic.tf.tmpl index 3affebab005c..dfc952ded06a 100644 --- a/mmv1/templates/terraform/examples/network_security_intercept_deployment_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/network_security_intercept_deployment_basic.tf.tmpl @@ -1,11 +1,9 @@ resource "google_compute_network" "network" { - provider = google-beta name = "{{index $.Vars "network_name"}}" auto_create_subnetworks = false } resource "google_compute_subnetwork" "subnetwork" { - provider = google-beta name = "{{index $.Vars "subnetwork_name"}}" region = "us-central1" ip_cidr_range = "10.1.0.0/16" @@ -13,16 +11,14 @@ resource "google_compute_subnetwork" "subnetwork" { } resource "google_compute_region_health_check" "health_check" { - provider = google-beta - name = "{{index $.Vars "health_check_name"}}" - region = "us-central1" + name = "{{index $.Vars "health_check_name"}}" + region = "us-central1" http_health_check { port = 80 } } resource "google_compute_region_backend_service" "backend_service" { - provider = google-beta name = "{{index $.Vars "backend_service_name"}}" region = "us-central1" health_checks = [google_compute_region_health_check.health_check.id] @@ -31,26 +27,23 @@ resource "google_compute_region_backend_service" "backend_service" { } resource "google_compute_forwarding_rule" "forwarding_rule" { - provider = google-beta - name = "{{index $.Vars "forwarding_rule_name"}}" - region = "us-central1" - network = google_compute_network.network.name - subnetwork = google_compute_subnetwork.subnetwork.name - backend_service = google_compute_region_backend_service.backend_service.id - load_balancing_scheme = "INTERNAL" - ports = [6081] - ip_protocol = "UDP" + name = "{{index $.Vars "forwarding_rule_name"}}" + region = "us-central1" + network = google_compute_network.network.name + subnetwork = google_compute_subnetwork.subnetwork.name + backend_service = google_compute_region_backend_service.backend_service.id + load_balancing_scheme = "INTERNAL" + ports = [6081] + ip_protocol = "UDP" } resource "google_network_security_intercept_deployment_group" "deployment_group" { - provider = google-beta intercept_deployment_group_id = "{{index $.Vars "deployment_group_id"}}" location = "global" network = google_compute_network.network.id } resource "google_network_security_intercept_deployment" "{{$.PrimaryResourceId}}" { - provider = google-beta intercept_deployment_id = "{{index $.Vars "deployment_id"}}" location = "us-central1-a" forwarding_rule = google_compute_forwarding_rule.forwarding_rule.id diff --git a/mmv1/templates/terraform/examples/network_security_intercept_deployment_group_basic.tf.tmpl b/mmv1/templates/terraform/examples/network_security_intercept_deployment_group_basic.tf.tmpl index e09f9d5d3ccd..cb8946439004 100644 --- a/mmv1/templates/terraform/examples/network_security_intercept_deployment_group_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/network_security_intercept_deployment_group_basic.tf.tmpl @@ -1,11 +1,9 @@ resource "google_compute_network" "network" { - provider = google-beta name = "{{index $.Vars "network_name"}}" auto_create_subnetworks = false } resource "google_network_security_intercept_deployment_group" "{{$.PrimaryResourceId}}" { - provider = google-beta intercept_deployment_group_id = "{{index $.Vars "deployment_group_id"}}" location = "global" network = google_compute_network.network.id diff --git a/mmv1/templates/terraform/examples/network_security_intercept_endpoint_group_association_basic.tf.tmpl b/mmv1/templates/terraform/examples/network_security_intercept_endpoint_group_association_basic.tf.tmpl index 7f17b1c8ba47..69971ffdebe2 100644 --- a/mmv1/templates/terraform/examples/network_security_intercept_endpoint_group_association_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/network_security_intercept_endpoint_group_association_basic.tf.tmpl @@ -1,31 +1,26 @@ resource "google_compute_network" "producer_network" { - provider = google-beta name = "{{index $.Vars "producer_network_name"}}" auto_create_subnetworks = false } resource "google_compute_network" "consumer_network" { - provider = google-beta name = "{{index $.Vars "consumer_network_name"}}" auto_create_subnetworks = false } resource "google_network_security_intercept_deployment_group" "deployment_group" { - provider = google-beta intercept_deployment_group_id = "{{index $.Vars "deployment_group_id"}}" location = "global" network = google_compute_network.producer_network.id } resource "google_network_security_intercept_endpoint_group" "endpoint_group" { - provider = google-beta - intercept_endpoint_group_id = "{{index $.Vars "endpoint_group_id"}}" - location = "global" - intercept_deployment_group = google_network_security_intercept_deployment_group.deployment_group.id + intercept_endpoint_group_id = "{{index $.Vars "endpoint_group_id"}}" + location = "global" + intercept_deployment_group = google_network_security_intercept_deployment_group.deployment_group.id } resource "google_network_security_intercept_endpoint_group_association" "{{$.PrimaryResourceId}}" { - provider = google-beta intercept_endpoint_group_association_id = "{{index $.Vars "endpoint_group_association_id"}}" location = "global" network = google_compute_network.consumer_network.id diff --git a/mmv1/templates/terraform/examples/network_security_intercept_endpoint_group_basic.tf.tmpl b/mmv1/templates/terraform/examples/network_security_intercept_endpoint_group_basic.tf.tmpl index 4facb880e499..7027ab023e9f 100644 --- a/mmv1/templates/terraform/examples/network_security_intercept_endpoint_group_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/network_security_intercept_endpoint_group_basic.tf.tmpl @@ -1,22 +1,19 @@ resource "google_compute_network" "network" { - provider = google-beta name = "{{index $.Vars "network_name"}}" auto_create_subnetworks = false } resource "google_network_security_intercept_deployment_group" "deployment_group" { - provider = google-beta intercept_deployment_group_id = "{{index $.Vars "deployment_group_id"}}" location = "global" network = google_compute_network.network.id } resource "google_network_security_intercept_endpoint_group" "{{$.PrimaryResourceId}}" { - provider = google-beta - intercept_endpoint_group_id = "{{index $.Vars "endpoint_group_id"}}" - location = "global" - intercept_deployment_group = google_network_security_intercept_deployment_group.deployment_group.id - description = "some description" + intercept_endpoint_group_id = "{{index $.Vars "endpoint_group_id"}}" + location = "global" + intercept_deployment_group = google_network_security_intercept_deployment_group.deployment_group.id + description = "some description" labels = { foo = "bar" } diff --git a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_deployment_group_generated_test.go.tmpl b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_deployment_group_test.go similarity index 90% rename from mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_deployment_group_generated_test.go.tmpl rename to mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_deployment_group_test.go index 5911b17c1bc3..614a06f28b81 100644 --- a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_deployment_group_generated_test.go.tmpl +++ b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_deployment_group_test.go @@ -1,5 +1,4 @@ package networksecurity_test -{{- if ne $.TargetVersionName "ga" }} import ( "testing" @@ -19,7 +18,7 @@ func TestAccNetworkSecurityInterceptDeploymentGroup_update(t *testing.T) { acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { Config: testAccNetworkSecurityInterceptDeploymentGroup_basic(context), @@ -51,13 +50,11 @@ func TestAccNetworkSecurityInterceptDeploymentGroup_update(t *testing.T) { func testAccNetworkSecurityInterceptDeploymentGroup_basic(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_compute_network" "network" { - provider = google-beta name = "tf-test-example-network%{random_suffix}" auto_create_subnetworks = false } resource "google_network_security_intercept_deployment_group" "default" { - provider = google-beta intercept_deployment_group_id = "tf-test-example-dg%{random_suffix}" location = "global" network = google_compute_network.network.id @@ -72,13 +69,11 @@ resource "google_network_security_intercept_deployment_group" "default" { func testAccNetworkSecurityInterceptDeploymentGroup_update(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_compute_network" "network" { - provider = google-beta name = "tf-test-example-network%{random_suffix}" auto_create_subnetworks = false } resource "google_network_security_intercept_deployment_group" "default" { - provider = google-beta intercept_deployment_group_id = "tf-test-example-dg%{random_suffix}" location = "global" network = google_compute_network.network.id @@ -89,5 +84,3 @@ resource "google_network_security_intercept_deployment_group" "default" { } `, context) } - -{{ end }} diff --git a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_deployment_generated_test.go.tmpl b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_deployment_test.go similarity index 74% rename from mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_deployment_generated_test.go.tmpl rename to mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_deployment_test.go index 8b16becf4d43..932d25f63a97 100644 --- a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_deployment_generated_test.go.tmpl +++ b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_deployment_test.go @@ -1,5 +1,4 @@ package networksecurity_test -{{- if ne $.TargetVersionName "ga" }} import ( "testing" @@ -19,7 +18,7 @@ func TestAccNetworkSecurityInterceptDeployment_update(t *testing.T) { acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { Config: testAccNetworkSecurityInterceptDeployment_basic(context), @@ -51,13 +50,11 @@ func TestAccNetworkSecurityInterceptDeployment_update(t *testing.T) { func testAccNetworkSecurityInterceptDeployment_basic(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_compute_network" "network" { - provider = google-beta name = "tf-test-example-network%{random_suffix}" auto_create_subnetworks = false } resource "google_compute_subnetwork" "subnetwork" { - provider = google-beta name = "tf-test-example-subnet%{random_suffix}" region = "us-central1" ip_cidr_range = "10.1.0.0/16" @@ -65,16 +62,14 @@ resource "google_compute_subnetwork" "subnetwork" { } resource "google_compute_region_health_check" "health_check" { - provider = google-beta - name = "tf-test-example-hc%{random_suffix}" - region = "us-central1" + name = "tf-test-example-hc%{random_suffix}" + region = "us-central1" http_health_check { port = 80 } } resource "google_compute_region_backend_service" "backend_service" { - provider = google-beta name = "tf-test-example-bs%{random_suffix}" region = "us-central1" health_checks = [google_compute_region_health_check.health_check.id] @@ -83,26 +78,23 @@ resource "google_compute_region_backend_service" "backend_service" { } resource "google_compute_forwarding_rule" "forwarding_rule" { - provider = google-beta - name = "tf-test-example-fwr%{random_suffix}" - region = "us-central1" - network = google_compute_network.network.name - subnetwork = google_compute_subnetwork.subnetwork.name - backend_service = google_compute_region_backend_service.backend_service.id - load_balancing_scheme = "INTERNAL" - ports = [6081] - ip_protocol = "UDP" + name = "tf-test-example-fwr%{random_suffix}" + region = "us-central1" + network = google_compute_network.network.name + subnetwork = google_compute_subnetwork.subnetwork.name + backend_service = google_compute_region_backend_service.backend_service.id + load_balancing_scheme = "INTERNAL" + ports = [6081] + ip_protocol = "UDP" } resource "google_network_security_intercept_deployment_group" "deployment_group" { - provider = google-beta intercept_deployment_group_id = "tf-test-example-dg%{random_suffix}" location = "global" network = google_compute_network.network.id } resource "google_network_security_intercept_deployment" "default" { - provider = google-beta intercept_deployment_id = "tf-test-example-deployment%{random_suffix}" location = "us-central1-a" forwarding_rule = google_compute_forwarding_rule.forwarding_rule.id @@ -118,13 +110,11 @@ resource "google_network_security_intercept_deployment" "default" { func testAccNetworkSecurityInterceptDeployment_update(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_compute_network" "network" { - provider = google-beta name = "tf-test-example-network%{random_suffix}" auto_create_subnetworks = false } resource "google_compute_subnetwork" "subnetwork" { - provider = google-beta name = "tf-test-example-subnet%{random_suffix}" region = "us-central1" ip_cidr_range = "10.1.0.0/16" @@ -132,16 +122,14 @@ resource "google_compute_subnetwork" "subnetwork" { } resource "google_compute_region_health_check" "health_check" { - provider = google-beta - name = "tf-test-example-hc%{random_suffix}" - region = "us-central1" + name = "tf-test-example-hc%{random_suffix}" + region = "us-central1" http_health_check { port = 80 } } resource "google_compute_region_backend_service" "backend_service" { - provider = google-beta name = "tf-test-example-bs%{random_suffix}" region = "us-central1" health_checks = [google_compute_region_health_check.health_check.id] @@ -150,26 +138,23 @@ resource "google_compute_region_backend_service" "backend_service" { } resource "google_compute_forwarding_rule" "forwarding_rule" { - provider = google-beta - name = "tf-test-example-fwr%{random_suffix}" - region = "us-central1" - network = google_compute_network.network.name - subnetwork = google_compute_subnetwork.subnetwork.name - backend_service = google_compute_region_backend_service.backend_service.id - load_balancing_scheme = "INTERNAL" - ports = [6081] - ip_protocol = "UDP" + name = "tf-test-example-fwr%{random_suffix}" + region = "us-central1" + network = google_compute_network.network.name + subnetwork = google_compute_subnetwork.subnetwork.name + backend_service = google_compute_region_backend_service.backend_service.id + load_balancing_scheme = "INTERNAL" + ports = [6081] + ip_protocol = "UDP" } resource "google_network_security_intercept_deployment_group" "deployment_group" { - provider = google-beta intercept_deployment_group_id = "tf-test-example-dg%{random_suffix}" location = "global" network = google_compute_network.network.id } resource "google_network_security_intercept_deployment" "default" { - provider = google-beta intercept_deployment_id = "tf-test-example-deployment%{random_suffix}" location = "us-central1-a" forwarding_rule = google_compute_forwarding_rule.forwarding_rule.id @@ -181,5 +166,3 @@ resource "google_network_security_intercept_deployment" "default" { } `, context) } - -{{ end }} diff --git a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_endpoint_group_association_generated_test.go.tmpl b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_endpoint_group_association_test.go similarity index 80% rename from mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_endpoint_group_association_generated_test.go.tmpl rename to mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_endpoint_group_association_test.go index cc581eb932dc..921ca4406600 100644 --- a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_endpoint_group_association_generated_test.go.tmpl +++ b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_endpoint_group_association_test.go @@ -1,5 +1,4 @@ package networksecurity_test -{{- if ne $.TargetVersionName "ga" }} import ( "testing" @@ -19,7 +18,7 @@ func TestAccNetworkSecurityInterceptEndpointGroupAssociation_update(t *testing.T acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { Config: testAccNetworkSecurityInterceptEndpointGroupAssociation_basic(context), @@ -51,33 +50,28 @@ func TestAccNetworkSecurityInterceptEndpointGroupAssociation_update(t *testing.T func testAccNetworkSecurityInterceptEndpointGroupAssociation_basic(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_compute_network" "producer_network" { - provider = google-beta name = "tf-test-example-prod-network%{random_suffix}" auto_create_subnetworks = false } resource "google_compute_network" "consumer_network" { - provider = google-beta name = "tf-test-example-cons-network%{random_suffix}" auto_create_subnetworks = false } resource "google_network_security_intercept_deployment_group" "deployment_group" { - provider = google-beta intercept_deployment_group_id = "tf-test-example-dg%{random_suffix}" location = "global" network = google_compute_network.producer_network.id } resource "google_network_security_intercept_endpoint_group" "endpoint_group" { - provider = google-beta - intercept_endpoint_group_id = "tf-test-example-eg%{random_suffix}" - location = "global" - intercept_deployment_group = google_network_security_intercept_deployment_group.deployment_group.id + intercept_endpoint_group_id = "tf-test-example-eg%{random_suffix}" + location = "global" + intercept_deployment_group = google_network_security_intercept_deployment_group.deployment_group.id } resource "google_network_security_intercept_endpoint_group_association" "default" { - provider = google-beta intercept_endpoint_group_association_id = "tf-test-example-ega%{random_suffix}" location = "global" network = google_compute_network.consumer_network.id @@ -92,33 +86,28 @@ resource "google_network_security_intercept_endpoint_group_association" "default func testAccNetworkSecurityInterceptEndpointGroupAssociation_update(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_compute_network" "producer_network" { - provider = google-beta name = "tf-test-example-prod-network%{random_suffix}" auto_create_subnetworks = false } resource "google_compute_network" "consumer_network" { - provider = google-beta name = "tf-test-example-cons-network%{random_suffix}" auto_create_subnetworks = false } resource "google_network_security_intercept_deployment_group" "deployment_group" { - provider = google-beta intercept_deployment_group_id = "tf-test-example-dg%{random_suffix}" location = "global" network = google_compute_network.producer_network.id } resource "google_network_security_intercept_endpoint_group" "endpoint_group" { - provider = google-beta - intercept_endpoint_group_id = "tf-test-example-eg%{random_suffix}" - location = "global" - intercept_deployment_group = google_network_security_intercept_deployment_group.deployment_group.id + intercept_endpoint_group_id = "tf-test-example-eg%{random_suffix}" + location = "global" + intercept_deployment_group = google_network_security_intercept_deployment_group.deployment_group.id } resource "google_network_security_intercept_endpoint_group_association" "default" { - provider = google-beta intercept_endpoint_group_association_id = "tf-test-example-ega%{random_suffix}" location = "global" network = google_compute_network.consumer_network.id @@ -129,5 +118,3 @@ resource "google_network_security_intercept_endpoint_group_association" "default } `, context) } - -{{ end }} diff --git a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_endpoint_group_generated_test.go.tmpl b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_endpoint_group_test.go similarity index 75% rename from mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_endpoint_group_generated_test.go.tmpl rename to mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_endpoint_group_test.go index 70c9b750c9d3..de889329df01 100644 --- a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_endpoint_group_generated_test.go.tmpl +++ b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_endpoint_group_test.go @@ -1,5 +1,4 @@ package networksecurity_test -{{- if ne $.TargetVersionName "ga" }} import ( "testing" @@ -19,7 +18,7 @@ func TestAccNetworkSecurityInterceptEndpointGroup_update(t *testing.T) { acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { Config: testAccNetworkSecurityInterceptEndpointGroup_basic(context), @@ -51,24 +50,21 @@ func TestAccNetworkSecurityInterceptEndpointGroup_update(t *testing.T) { func testAccNetworkSecurityInterceptEndpointGroup_basic(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_compute_network" "network" { - provider = google-beta name = "tf-test-example-network%{random_suffix}" auto_create_subnetworks = false } resource "google_network_security_intercept_deployment_group" "deployment_group" { - provider = google-beta intercept_deployment_group_id = "tf-test-example-dg%{random_suffix}" location = "global" network = google_compute_network.network.id } resource "google_network_security_intercept_endpoint_group" "default" { - provider = google-beta - intercept_endpoint_group_id = "tf-test-example-eg%{random_suffix}" - location = "global" - intercept_deployment_group = google_network_security_intercept_deployment_group.deployment_group.id - description = "initial description" + intercept_endpoint_group_id = "tf-test-example-eg%{random_suffix}" + location = "global" + intercept_deployment_group = google_network_security_intercept_deployment_group.deployment_group.id + description = "initial description" labels = { foo = "bar" } @@ -79,29 +75,24 @@ resource "google_network_security_intercept_endpoint_group" "default" { func testAccNetworkSecurityInterceptEndpointGroup_update(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_compute_network" "network" { - provider = google-beta name = "tf-test-example-network%{random_suffix}" auto_create_subnetworks = false } resource "google_network_security_intercept_deployment_group" "deployment_group" { - provider = google-beta intercept_deployment_group_id = "tf-test-example-dg%{random_suffix}" location = "global" network = google_compute_network.network.id } resource "google_network_security_intercept_endpoint_group" "default" { - provider = google-beta - intercept_endpoint_group_id = "tf-test-example-eg%{random_suffix}" - location = "global" - intercept_deployment_group = google_network_security_intercept_deployment_group.deployment_group.id - description = "updated description" + intercept_endpoint_group_id = "tf-test-example-eg%{random_suffix}" + location = "global" + intercept_deployment_group = google_network_security_intercept_deployment_group.deployment_group.id + description = "updated description" labels = { foo = "goo" } } `, context) } - -{{ end }} From 5ac33c14625564a55a83783c4665934c76ed3699 Mon Sep 17 00:00:00 2001 From: Pawel Jasinski Date: Thu, 15 May 2025 00:30:54 +0200 Subject: [PATCH 135/884] H2C protocol added to backend_service and region_backend_service (#13910) --- mmv1/products/compute/BackendService.yaml | 16 +++++++------- .../compute/RegionBackendService.yaml | 21 +++++++++---------- .../backend_service_external_managed.tf.tmpl | 1 + ...ion_backend_service_balancing_mode.tf.tmpl | 2 +- 4 files changed, 21 insertions(+), 19 deletions(-) diff --git a/mmv1/products/compute/BackendService.yaml b/mmv1/products/compute/BackendService.yaml index 28da0b3d9d04..450488f2d8b1 100644 --- a/mmv1/products/compute/BackendService.yaml +++ b/mmv1/products/compute/BackendService.yaml @@ -954,7 +954,7 @@ properties: locality_lb_policy is applicable to either: - * A regional backend service with the service_protocol set to HTTP, HTTPS, or HTTP2, + * A regional backend service with the service_protocol set to HTTP, HTTPS, HTTP2 or H2C, and loadBalancingScheme set to INTERNAL_MANAGED. * A global backend service with the load_balancing_scheme set to INTERNAL_SELF_MANAGED. * A regional backend service with loadBalancingScheme set to EXTERNAL (External Network @@ -1357,11 +1357,10 @@ properties: type: Enum description: | The protocol this BackendService uses to communicate with backends. - The default is HTTP. **NOTE**: HTTP2 is only valid for beta HTTP/2 load balancer - types and may result in errors if used with the GA API. **NOTE**: With protocol “UNSPECIFIED”, - the backend service can be used by Layer 4 Internal Load Balancing or Network Load Balancing - with TCP/UDP/L3_DEFAULT Forwarding Rule protocol. - # TODO: make a ResourceRef to Security Policy + The default is HTTP. Possible values are HTTP, HTTPS, HTTP2, H2C, TCP, SSL, UDP + or GRPC. Refer to the documentation for the load balancers or for Traffic Director + for more information. Must be set to GRPC when the backend service is referenced + by a URL map that is bound to target gRPC proxy. default_from_api: true enum_values: - 'HTTP' @@ -1369,9 +1368,12 @@ properties: - 'HTTP2' - 'TCP' - 'SSL' + - 'UDP' - 'GRPC' - 'UNSPECIFIED' + - 'H2C' - name: 'securityPolicy' + # TODO: make a ResourceRef to Security Policy type: String description: | The security policy associated with this backend service. @@ -1385,7 +1387,7 @@ properties: type: NestedObject description: | The security settings that apply to this backend service. This field is applicable to either - a regional backend service with the service_protocol set to HTTP, HTTPS, or HTTP2, and + a regional backend service with the service_protocol set to HTTP, HTTPS, HTTP2 or H2C, and load_balancing_scheme set to INTERNAL_MANAGED; or a global backend service with the load_balancing_scheme set to INTERNAL_SELF_MANAGED. properties: diff --git a/mmv1/products/compute/RegionBackendService.yaml b/mmv1/products/compute/RegionBackendService.yaml index 821c5706fe5f..5bc2bcb1df0f 100644 --- a/mmv1/products/compute/RegionBackendService.yaml +++ b/mmv1/products/compute/RegionBackendService.yaml @@ -336,7 +336,7 @@ properties: description: | Settings controlling the volume of connections to a backend service. This field is applicable only when the `load_balancing_scheme` is set to INTERNAL_MANAGED - and the `protocol` is set to HTTP, HTTPS, or HTTP2. + and the `protocol` is set to HTTP, HTTPS, HTTP2 or H2C. properties: - name: 'connectTimeout' type: NestedObject @@ -441,7 +441,7 @@ properties: hashing. This field only applies when all of the following are true - * `load_balancing_scheme` is set to INTERNAL_MANAGED - * `protocol` is set to HTTP, HTTPS, or HTTP2 + * `protocol` is set to HTTP, HTTPS, HTTP2 or H2C * `locality_lb_policy` is set to MAGLEV or RING_HASH properties: - name: 'httpCookie' @@ -911,7 +911,7 @@ properties: locality_lb_policy is applicable to either: - * A regional backend service with the service_protocol set to HTTP, HTTPS, or HTTP2, + * A regional backend service with the service_protocol set to HTTP, HTTPS, HTTP2 or H2C, and loadBalancingScheme set to INTERNAL_MANAGED. * A global backend service with the load_balancing_scheme set to INTERNAL_SELF_MANAGED. * A regional backend service with loadBalancingScheme set to EXTERNAL (External Network @@ -975,7 +975,7 @@ properties: description: | Settings controlling eviction of unhealthy hosts from the load balancing pool. This field is applicable only when the `load_balancing_scheme` is set - to INTERNAL_MANAGED and the `protocol` is set to HTTP, HTTPS, or HTTP2. + to INTERNAL_MANAGED and the `protocol` is set to HTTP, HTTPS, HTTP2 or H2C. properties: - name: 'baseEjectionTime' type: NestedObject @@ -1219,22 +1219,21 @@ properties: - name: 'protocol' type: Enum description: | - The protocol this RegionBackendService uses to communicate with backends. - The default is HTTP. **NOTE**: HTTP2 is only valid for beta HTTP/2 load balancer - types and may result in errors if used with the GA API. + The protocol this BackendService uses to communicate with backends. + The default is HTTP. Possible values are HTTP, HTTPS, HTTP2, H2C, TCP, SSL, UDP + or GRPC. Refer to the documentation for the load balancers or for Traffic Director + for more information. default_from_api: true - # This is removed to avoid breaking terraform, as default values cannot be - # unspecified. Providers should include this as needed via overrides - # default_value: :TCP enum_values: - 'HTTP' - 'HTTPS' - 'HTTP2' - - 'SSL' - 'TCP' + - 'SSL' - 'UDP' - 'GRPC' - 'UNSPECIFIED' + - 'H2C' - name: 'securityPolicy' type: String description: | diff --git a/mmv1/templates/terraform/examples/backend_service_external_managed.tf.tmpl b/mmv1/templates/terraform/examples/backend_service_external_managed.tf.tmpl index 800fcf72bb71..9d5de7a167e0 100644 --- a/mmv1/templates/terraform/examples/backend_service_external_managed.tf.tmpl +++ b/mmv1/templates/terraform/examples/backend_service_external_managed.tf.tmpl @@ -2,6 +2,7 @@ resource "google_compute_backend_service" "{{$.PrimaryResourceId}}" { name = "{{index $.Vars "backend_service_name"}}" health_checks = [google_compute_health_check.default.id] load_balancing_scheme = "EXTERNAL_MANAGED" + protocol = "H2C" } resource "google_compute_health_check" "default" { diff --git a/mmv1/templates/terraform/examples/region_backend_service_balancing_mode.tf.tmpl b/mmv1/templates/terraform/examples/region_backend_service_balancing_mode.tf.tmpl index be1bc742abba..9e62bf3c70b7 100644 --- a/mmv1/templates/terraform/examples/region_backend_service_balancing_mode.tf.tmpl +++ b/mmv1/templates/terraform/examples/region_backend_service_balancing_mode.tf.tmpl @@ -9,7 +9,7 @@ resource "google_compute_region_backend_service" "default" { region = "us-central1" name = "{{index $.Vars "region_backend_service_name"}}" - protocol = "HTTP" + protocol = "H2C" timeout_sec = 10 health_checks = [google_compute_region_health_check.default.id] From 23d06c99ca3cf793f93d4bbb65aa8084955d108e Mon Sep 17 00:00:00 2001 From: ArtoriaRen Date: Wed, 14 May 2025 18:48:16 -0400 Subject: [PATCH 136/884] We shouldn't replace the url if `dialogflow_cx_custom_endpoint` is set. (#13873) --- mmv1/products/dialogflowcx/Flow.yaml | 5 ++ .../dialogflowcx_flow_custom_endpoint.tf.tmpl | 47 +++++++++++++++++++ .../dialogflow_set_location.go.tmpl | 5 +- ...owcx_set_location_skip_default_obj.go.tmpl | 5 +- 4 files changed, 60 insertions(+), 2 deletions(-) create mode 100644 mmv1/templates/terraform/examples/dialogflowcx_flow_custom_endpoint.tf.tmpl diff --git a/mmv1/products/dialogflowcx/Flow.yaml b/mmv1/products/dialogflowcx/Flow.yaml index caa33947c7b2..7bd0cba5e376 100644 --- a/mmv1/products/dialogflowcx/Flow.yaml +++ b/mmv1/products/dialogflowcx/Flow.yaml @@ -54,6 +54,11 @@ examples: vars: agent_name: 'dialogflowcx-agent' exclude_docs: true + - name: 'dialogflowcx_flow_custom_endpoint' + primary_resource_id: 'custom_endpoint_flow' + vars: + agent_name: 'dialogflowcx-agent' + exclude_docs: true virtual_fields: - name: 'is_default_start_flow' description: | diff --git a/mmv1/templates/terraform/examples/dialogflowcx_flow_custom_endpoint.tf.tmpl b/mmv1/templates/terraform/examples/dialogflowcx_flow_custom_endpoint.tf.tmpl new file mode 100644 index 000000000000..ad727353da3d --- /dev/null +++ b/mmv1/templates/terraform/examples/dialogflowcx_flow_custom_endpoint.tf.tmpl @@ -0,0 +1,47 @@ +provider "google" { + dialogflow_cx_custom_endpoint = "https://us-central1-dialogflow.googleapis.com/v3/" +} + +resource "google_dialogflow_cx_agent" "agent" { + display_name = "{{index $.Vars "agent_name"}}" + location = "us-central1" + default_language_code = "en" + time_zone = "America/New_York" + description = "Example description." +} + + +resource "google_dialogflow_cx_flow" "{{$.PrimaryResourceId}}" { + parent = google_dialogflow_cx_agent.agent.id + display_name = "MyFlow" + description = "Test Flow" + + nlu_settings { + classification_threshold = 0.3 + model_type = "MODEL_TYPE_STANDARD" + } + + event_handlers { + event = "sys.no-match-default" + trigger_fulfillment { + return_partial_responses = false + messages { + text { + text = ["Sorry, could you say that again?"] + } + } + } + } + + event_handlers { + event = "sys.no-input-default" + trigger_fulfillment { + return_partial_responses = false + messages { + text { + text = ["One more time?"] + } + } + } + } +} diff --git a/mmv1/templates/terraform/pre_create/dialogflow_set_location.go.tmpl b/mmv1/templates/terraform/pre_create/dialogflow_set_location.go.tmpl index 68e83e1317e0..a1ca9cf70185 100644 --- a/mmv1/templates/terraform/pre_create/dialogflow_set_location.go.tmpl +++ b/mmv1/templates/terraform/pre_create/dialogflow_set_location.go.tmpl @@ -11,4 +11,7 @@ if parts := regexp.MustCompile(`locations\/([^\/]*)\/`).FindStringSubmatch(d.Get ) } -url = strings.Replace(url, "-dialogflow", fmt.Sprintf("%s-dialogflow", location), 1) +// only insert location into url if the base_url in products/dialogflowcx/product.yaml is used +if strings.HasPrefix(url, "https://-dialogflow.googleapis.com/v3/") { + url = strings.Replace(url, "-dialogflow", fmt.Sprintf("%s-dialogflow", location), 1) +} diff --git a/mmv1/templates/terraform/pre_create/dialogflowcx_set_location_skip_default_obj.go.tmpl b/mmv1/templates/terraform/pre_create/dialogflowcx_set_location_skip_default_obj.go.tmpl index 79be0a450d28..19699a95dd86 100644 --- a/mmv1/templates/terraform/pre_create/dialogflowcx_set_location_skip_default_obj.go.tmpl +++ b/mmv1/templates/terraform/pre_create/dialogflowcx_set_location_skip_default_obj.go.tmpl @@ -11,7 +11,10 @@ if parts := regexp.MustCompile(`locations\/([^\/]*)\/`).FindStringSubmatch(d.Get ) } -url = strings.Replace(url,"-dialogflow",fmt.Sprintf("%s-dialogflow",location),1) +// only insert location into url if the base_url in products/dialogflowcx/product.yaml is used +if strings.HasPrefix(url, "https://-dialogflow.googleapis.com/v3/") { + url = strings.Replace(url,"-dialogflow",fmt.Sprintf("%s-dialogflow",location),1) +} // if it's a default object Dialogflow creates for you, "Update" instead of "Create" // Note: below we try to access fields that aren't present in the resource, because this custom code is reused across multiple Dialogflow resources that contain different fields. When the field isn't present, we deliberately ignore the error and the boolean is false. From 49e2387b8205af06dfff7bdbe371ee8f68e3d64a Mon Sep 17 00:00:00 2001 From: Betto Cerrillos <32439055+Berro321@users.noreply.github.com> Date: Wed, 14 May 2025 22:48:53 +0000 Subject: [PATCH 137/884] Add data source for `google_beyondcorp_security_gateway` (#13908) --- .../provider/provider_mmv1_resources.go.tmpl | 1 + ...urce_google_beyondcorp_security_gateway.go | 53 ++++++++++++ ...google_beyondcorp_security_gateway_test.go | 82 +++++++++++++++++++ .../beyondcorp_security_gateway.html.markdown | 32 ++++++++ 4 files changed, 168 insertions(+) create mode 100644 mmv1/third_party/terraform/services/beyondcorp/data_source_google_beyondcorp_security_gateway.go create mode 100644 mmv1/third_party/terraform/services/beyondcorp/data_source_google_beyondcorp_security_gateway_test.go create mode 100644 mmv1/third_party/terraform/website/docs/d/beyondcorp_security_gateway.html.markdown diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index 1f458e6b3974..d7d322ab93d8 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -42,6 +42,7 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_beyondcorp_app_connection": beyondcorp.DataSourceGoogleBeyondcorpAppConnection(), "google_beyondcorp_app_connector": beyondcorp.DataSourceGoogleBeyondcorpAppConnector(), "google_beyondcorp_app_gateway": beyondcorp.DataSourceGoogleBeyondcorpAppGateway(), + "google_beyondcorp_security_gateway": beyondcorp.DataSourceGoogleBeyondcorpSecurityGateway(), "google_billing_account": billing.DataSourceGoogleBillingAccount(), "google_bigquery_tables": bigquery.DataSourceGoogleBigQueryTables(), "google_bigquery_dataset": bigquery.DataSourceGoogleBigqueryDataset(), diff --git a/mmv1/third_party/terraform/services/beyondcorp/data_source_google_beyondcorp_security_gateway.go b/mmv1/third_party/terraform/services/beyondcorp/data_source_google_beyondcorp_security_gateway.go new file mode 100644 index 000000000000..f24e25373afa --- /dev/null +++ b/mmv1/third_party/terraform/services/beyondcorp/data_source_google_beyondcorp_security_gateway.go @@ -0,0 +1,53 @@ +package beyondcorp + +import ( + "fmt" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func DataSourceGoogleBeyondcorpSecurityGateway() *schema.Resource { + + dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceBeyondcorpSecurityGateway().Schema) + tpgresource.AddRequiredFieldsToSchema(dsSchema, "security_gateway_id") + + tpgresource.AddOptionalFieldsToSchema(dsSchema, "project") + + return &schema.Resource{ + Read: dataSourceGoogleBeyondcorpSecurityGatewayRead, + Schema: dsSchema, + } +} + +func dataSourceGoogleBeyondcorpSecurityGatewayRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + + name := d.Get("security_gateway_id").(string) + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + + id := fmt.Sprintf("projects/%s/locations/global/securityGateways/%s", project, name) + d.SetId(id) + // Remove after deprecated location variable is removed. + d.Set("location", "global") + + err = resourceBeyondcorpSecurityGatewayRead(d, meta) + if err != nil { + return err + } + + if err := tpgresource.SetDataSourceLabels(d); err != nil { + return err + } + + if d.Id() == "" { + return fmt.Errorf("%s not found", id) + } + + return nil +} diff --git a/mmv1/third_party/terraform/services/beyondcorp/data_source_google_beyondcorp_security_gateway_test.go b/mmv1/third_party/terraform/services/beyondcorp/data_source_google_beyondcorp_security_gateway_test.go new file mode 100644 index 000000000000..8ce0c344addb --- /dev/null +++ b/mmv1/third_party/terraform/services/beyondcorp/data_source_google_beyondcorp_security_gateway_test.go @@ -0,0 +1,82 @@ +package beyondcorp_test + +import ( + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + + "testing" + + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccDataSourceGoogleBeyondcorpSecurityGateway_basic(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckBeyondcorpSecurityGatewayDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceGoogleBeyondcorpSecurityGateway_basic(context), + Check: resource.ComposeTestCheckFunc( + acctest.CheckDataSourceStateMatchesResourceState("data.google_beyondcorp_security_gateway.foo", "google_beyondcorp_security_gateway.foo"), + ), + }, + }, + }) +} + +func TestAccDataSourceGoogleBeyondcorpSecurityGateway_full(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckBeyondcorpSecurityGatewayDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceGoogleBeyondcorpSecurityGateway_full(context), + Check: resource.ComposeTestCheckFunc( + acctest.CheckDataSourceStateMatchesResourceState("data.google_beyondcorp_security_gateway.foo", "google_beyondcorp_security_gateway.foo"), + ), + }, + }, + }) +} + +func testAccDataSourceGoogleBeyondcorpSecurityGateway_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_beyondcorp_security_gateway" "foo" { + security_gateway_id = "default-foo-sg-basic-%{random_suffix}" + display_name = "My Security Gateway resource" + hubs { region = "us-central1" } +} + +data "google_beyondcorp_security_gateway" "foo" { + security_gateway_id = google_beyondcorp_security_gateway.foo.security_gateway_id +} +`, context) +} + +func testAccDataSourceGoogleBeyondcorpSecurityGateway_full(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_beyondcorp_security_gateway" "foo" { + security_gateway_id = "default-foo-sg-full-%{random_suffix}" + display_name = "My Security Gateway resource" + hubs { region = "us-central1" } +} + +data "google_beyondcorp_security_gateway" "foo" { + security_gateway_id = google_beyondcorp_security_gateway.foo.security_gateway_id + project = google_beyondcorp_security_gateway.foo.project +} +`, context) +} diff --git a/mmv1/third_party/terraform/website/docs/d/beyondcorp_security_gateway.html.markdown b/mmv1/third_party/terraform/website/docs/d/beyondcorp_security_gateway.html.markdown new file mode 100644 index 000000000000..e9fbbea7b69a --- /dev/null +++ b/mmv1/third_party/terraform/website/docs/d/beyondcorp_security_gateway.html.markdown @@ -0,0 +1,32 @@ +--- +subcategory: "BeyondCorp" +description: |- + Get information about a Google BeyondCorp Security Gateway. +--- + +# google_beyondcorp_security_gateway + +Get information about a Google BeyondCorp Security Gateway. + +## Example Usage + +```hcl +data "google_beyondcorp_security_gateway" "my-beyondcorp-security-gateway" { + security_gateway_id = "my-beyondcorp-security-gateway" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `security_gateway_id` - (Required) The name of the Security Gateway resource. + +- - - + +* `project` - (Optional) The project in which the resource belongs. If it + is not provided, the provider project is used. + +## Attributes Reference + +See [google_beyondcorp_security_gateway](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/beyondcorp_security_gateway) resource for details of the available attributes. From fe32297fa07c8b026e340855ee1b786e70f4231f Mon Sep 17 00:00:00 2001 From: Sachin_R Date: Thu, 15 May 2025 04:37:28 +0530 Subject: [PATCH 138/884] Terraform Support for Dataplex Glossary (#13095) --- mmv1/products/dataplex/Glossary.yaml | 112 ++++++++++++++++++ .../examples/dataplex_glossary_basic.tf.tmpl | 4 + .../examples/dataplex_glossary_full.tf.tmpl | 8 ++ .../resource_dataplex_glossary_test.go.tmpl | 73 ++++++++++++ 4 files changed, 197 insertions(+) create mode 100644 mmv1/products/dataplex/Glossary.yaml create mode 100644 mmv1/templates/terraform/examples/dataplex_glossary_basic.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/dataplex_glossary_full.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/dataplex/resource_dataplex_glossary_test.go.tmpl diff --git a/mmv1/products/dataplex/Glossary.yaml b/mmv1/products/dataplex/Glossary.yaml new file mode 100644 index 000000000000..a47c435ee8ca --- /dev/null +++ b/mmv1/products/dataplex/Glossary.yaml @@ -0,0 +1,112 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: 'Glossary' +description: | + Represents a collection of categories and terms defined by the user. Glossary is a top level resource and is the GCP parent resource of all the categories and terms within it. +base_url: 'projects/{{project}}/locations/{{location}}/glossaries/{{glossary_id}}' +self_link: 'projects/{{project}}/locations/{{location}}/glossaries/{{glossary_id}}' +create_url: 'projects/{{project}}/locations/{{location}}/glossaries?glossary_id={{glossary_id}}' +update_verb: 'PATCH' +update_mask: true +timeouts: + insert_minutes: 5 + update_minutes: 5 + delete_minutes: 5 +autogen_async: true +import_format: ['projects/{{project}}/locations/{{location}}/glossaries/{{glossary_id}}'] +async: + actions: ['create', 'delete', 'update'] + type: 'OpAsync' + operation: + base_url: '{{op_id}}' +iam_policy: + method_name_separator: ':' + parent_resource_attribute: 'glossary_id' + import_format: + - 'projects/{{project}}/locations/{{location}}/glossaries/{{glossary_id}}' + - '{{glossary_id}}' +examples: + - name: 'dataplex_glossary_basic' + primary_resource_id: 'glossary_test_id' + primary_resource_name: 'fmt.Sprintf("tf-test-glossary-basic%s", context["random_suffix"])' + vars: + glossary_name: 'glossary-basic' + - name: 'dataplex_glossary_full' + primary_resource_id: 'glossary_test_id_full' + primary_resource_name: 'fmt.Sprintf("tf-test-glossary-full%s", context["random_suffix"])' + vars: + glossary_name: 'glossary-full' +parameters: + - name: 'location' + type: String + description: | + The location where the glossary should reside. + url_param_only: true + required: true + immutable: true + - name: 'glossaryId' + type: String + description: | + The glossary id for creation. + url_param_only: true + required: true + immutable: true +properties: + - name: 'name' + type: String + description: | + The resource name of the Glossary. Format: projects/{projectId}/locations/{locationId}/glossaries/{glossaryId} + output: true + - name: 'displayName' + type: String + description: | + User friendly display name of the glossary. This is user-mutable. This will be same as the glossaryId, if not specified. + required: false + - name: 'description' + type: String + description: | + The user-mutable description of the glossary. + required: false + - name: 'labels' + type: KeyValueLabels + description: | + User-defined labels for the Glossary. + required: false + - name: 'uid' + type: String + description: | + System generated unique id for the Glossary. This ID will be different if the Glossary is deleted and re-created with the same name. + output: true + - name: 'createTime' + type: Timestamp + description: | + The time at which the glossary was created. + output: true + - name: 'updateTime' + type: Timestamp + description: | + The time at which the glossary was last updated. + output: true + - name: 'termCount' + type: Integer + description: | + The number of terms in the glossary. + required: false + output: true + - name: 'categoryCount' + type: Integer + description: | + The number of categories in the glossary. + required: false + output: true diff --git a/mmv1/templates/terraform/examples/dataplex_glossary_basic.tf.tmpl b/mmv1/templates/terraform/examples/dataplex_glossary_basic.tf.tmpl new file mode 100644 index 000000000000..a4cac769a527 --- /dev/null +++ b/mmv1/templates/terraform/examples/dataplex_glossary_basic.tf.tmpl @@ -0,0 +1,4 @@ +resource "google_dataplex_glossary" "{{$.PrimaryResourceId}}" { + glossary_id = "{{index $.Vars "glossary_name"}}" + location = "us-central1" +} diff --git a/mmv1/templates/terraform/examples/dataplex_glossary_full.tf.tmpl b/mmv1/templates/terraform/examples/dataplex_glossary_full.tf.tmpl new file mode 100644 index 000000000000..f232b710c176 --- /dev/null +++ b/mmv1/templates/terraform/examples/dataplex_glossary_full.tf.tmpl @@ -0,0 +1,8 @@ +resource "google_dataplex_glossary" "{{$.PrimaryResourceId}}" { + glossary_id = "{{index $.Vars "glossary_name"}}" + location = "us-central1" + + labels = { "tag": "test-tf" } + display_name = "terraform glossary" + description = "glossary created by Terraform" +} diff --git a/mmv1/third_party/terraform/services/dataplex/resource_dataplex_glossary_test.go.tmpl b/mmv1/third_party/terraform/services/dataplex/resource_dataplex_glossary_test.go.tmpl new file mode 100644 index 000000000000..a2be0e83b12f --- /dev/null +++ b/mmv1/third_party/terraform/services/dataplex/resource_dataplex_glossary_test.go.tmpl @@ -0,0 +1,73 @@ +package dataplex_test + +{{- if ne $.TargetVersionName "ga" }} + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-plugin-testing/plancheck" +) + +func TestAccDataplexGlossary_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDataplexGlossary_update(context), + }, + { + ResourceName: "google_dataplex_glossary.glossary_test_id_full", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"glossary_id", "labels", "location", "terraform_labels"}, + }, + { + Config: testAccDataplexGlossary_full(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_dataplex_glossary.glossary_test_id_full", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_dataplex_glossary.glossary_test_id_full", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"glossary_id", "labels", "location", "terraform_labels"}, + }, + }, + }) +} + +func testAccDataplexGlossary_update(context map[string]interface{}) string { + return acctest.Nprintf(` + resource "google_dataplex_glossary" "glossary_test_id_full" { + glossary_id = "tf-test-glossary-full%{random_suffix}" + location = "us-central1" + } +`, context) +} + +func testAccDataplexGlossary_full(context map[string]interface{}) string { + return acctest.Nprintf(` + resource "google_dataplex_glossary" "glossary_test_id_full" { + glossary_id = "tf-test-glossary-full%{random_suffix}" + location = "us-central1" + labels = { "tag": "test-tf" } + display_name = "terraform glossary" + description = "glossary created by Terraform" + } +`, context) +} + +{{- end }} \ No newline at end of file From 38563e7643cad6ffa4a8e947aed30e456180ab08 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Wed, 14 May 2025 16:16:03 -0700 Subject: [PATCH 139/884] Fixed TestAccComputeHealthCheck_tcpAndSsl_shouldFail (#13901) --- .../services/compute/resource_compute_health_check_test.go.tmpl | 2 +- .../compute/resource_compute_region_health_check_test.go.tmpl | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_health_check_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_health_check_test.go.tmpl index 0b2b8553e518..34fc29d8d96a 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_health_check_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_health_check_test.go.tmpl @@ -206,7 +206,7 @@ func TestAccComputeHealthCheck_tcpAndSsl_shouldFail(t *testing.T) { Steps: []resource.TestStep{ { Config: testAccComputeHealthCheck_tcpAndSsl_shouldFail(hckName), - ExpectError: regexp.MustCompile("only one of\n`grpc_health_check,http2_health_check,http_health_check,https_health_check,ssl_health_check,tcp_health_check`\ncan be specified, but `ssl_health_check,tcp_health_check` were specified"), + ExpectError: regexp.MustCompile("only one of\n`grpc_health_check,grpc_tls_health_check,http2_health_check,http_health_check,https_health_check,ssl_health_check,tcp_health_check`\ncan be specified, but `ssl_health_check,tcp_health_check` were specified"), }, }, }) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_health_check_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_health_check_test.go.tmpl index 72a72339e772..71b5924b894f 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_health_check_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_health_check_test.go.tmpl @@ -223,7 +223,7 @@ func TestAccComputeRegionHealthCheck_tcpAndSsl_shouldFail(t *testing.T) { Steps: []resource.TestStep{ { Config: testAccComputeRegionHealthCheck_tcpAndSsl_shouldFail(hckName), - ExpectError: regexp.MustCompile("only one of\n`grpc_health_check,http2_health_check,http_health_check,https_health_check,ssl_health_check,tcp_health_check`\ncan be specified, but `ssl_health_check,tcp_health_check` were specified"), + ExpectError: regexp.MustCompile("only one of\n`grpc_health_check,grpc_tls_health_check,http2_health_check,http_health_check,https_health_check,ssl_health_check,tcp_health_check`\ncan be specified, but `ssl_health_check,tcp_health_check` were specified"), }, }, From 09c0f3b709b3a109ea781e1ddac18ee434dddae4 Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Wed, 14 May 2025 17:49:35 -0700 Subject: [PATCH 140/884] Update nightly test data ingestion (#13956) --- .../cmd/collect_nightly_test_status.go | 34 ++++++++++++------- .ci/magician/teamcity/get.go | 7 ++-- 2 files changed, 27 insertions(+), 14 deletions(-) diff --git a/.ci/magician/cmd/collect_nightly_test_status.go b/.ci/magician/cmd/collect_nightly_test_status.go index 8882c6299d3e..31de8b6373f1 100644 --- a/.ci/magician/cmd/collect_nightly_test_status.go +++ b/.ci/magician/cmd/collect_nightly_test_status.go @@ -38,11 +38,16 @@ var cntsRequiredEnvironmentVariables = [...]string{ } type TestInfo struct { - Name string `json:"name"` - Status string `json:"status"` - Service string `json:"service"` - ErrorMessage string `json:"error_message"` - LogLink string `json"log_link` + Name string `json:"name"` + Status string `json:"status"` + Service string `json:"service"` + ErrorMessage string `json:"error_message"` + LogLink string `json"log_link` + ProviderVersion string `json:"provider_version"` + QueuedDate string `json:"queuedDate"` + StartDate string `json:"startDate"` + FinishDate string `json:"finishDate"` + Duration int `json:"duration"` } // collectNightlyTestStatusCmd represents the collectNightlyTestStatus command @@ -87,8 +92,8 @@ var collectNightlyTestStatusCmd = &cobra.Command{ // check if a specific date is provided if customDate != "" { parsedDate, err := time.Parse("2006-01-02", customDate) // input format YYYY-MM-DD - // Set the time to 6pm PT - date = time.Date(parsedDate.Year(), parsedDate.Month(), parsedDate.Day(), 18, 0, 0, 0, loc) + // Set the time to 7pm PT + date = time.Date(parsedDate.Year(), parsedDate.Month(), parsedDate.Day(), 19, 0, 0, 0, loc) if err != nil { return fmt.Errorf("invalid input time format: %w", err) } @@ -164,11 +169,16 @@ func createTestReport(pVersion provider.Version, tc TeamcityClient, gcs Cloudsto errorMessage = convertErrorMessage(testResult.ErrorMessage) } testInfoList = append(testInfoList, TestInfo{ - Name: testResult.Name, - Status: testResult.Status, - Service: serviceName, - ErrorMessage: errorMessage, - LogLink: logLink, + Name: testResult.Name, + Status: testResult.Status, + Service: serviceName, + ErrorMessage: errorMessage, + LogLink: logLink, + ProviderVersion: pVersion.String(), + Duration: testResult.Duration, + QueuedDate: build.QueuedDate, + StartDate: build.StartDate, + FinishDate: build.FinishDate, }) } } diff --git a/.ci/magician/teamcity/get.go b/.ci/magician/teamcity/get.go index 37f0ec8d4c17..0126dc2ed9f4 100644 --- a/.ci/magician/teamcity/get.go +++ b/.ci/magician/teamcity/get.go @@ -28,6 +28,9 @@ type Build struct { BuildConfName string `json:"buildConfName"` WebUrl string `json:"webUrl"` Number string `json:"number"` + QueuedDate string `json:"queuedDate"` + StartDate string `json:"startDate"` + FinishDate string `json:"finishDate"` } type Builds struct { @@ -52,7 +55,7 @@ type FirstFailed struct { } func (tc *Client) GetBuilds(project, finishCut, startCut string) (Builds, error) { - url := fmt.Sprintf("https://hashicorp.teamcity.com/app/rest/builds?locator=count:500,tag:cron-trigger,project:%s,branch:refs/heads/nightly-test,finishDate:(date:%s,condition:before),startDate:(date:%s,condition:after)", project, finishCut, startCut) + url := fmt.Sprintf("https://hashicorp.teamcity.com/app/rest/builds?locator=count:500,tag:cron-trigger,project:%s,branch:refs/heads/nightly-test,queuedDate:(date:%s,condition:before),queuedDate:(date:%s,condition:after)&fields=build(id,buildTypeId,buildConfName,webUrl,number,queuedDate,startDate,finishDate)", project, finishCut, startCut) var builds Builds @@ -62,7 +65,7 @@ func (tc *Client) GetBuilds(project, finishCut, startCut string) (Builds, error) } func (tc *Client) GetTestResults(build Build) (TestResults, error) { - url := fmt.Sprintf("https://hashicorp.teamcity.com/app/rest/testOccurrences?locator=count:5000,build:(id:%d)&fields=testOccurrence(id,name,status,duration,firstFailed(href),details,build(webUrl))", build.Id) + url := fmt.Sprintf("https://hashicorp.teamcity.com/app/rest/testOccurrences?locator=count:5000,build:(id:%d)&fields=testOccurrence(id,name,status,duration,firstFailed(href),details)", build.Id) var testResults TestResults From 770488a62fa3721d07e46218884061594a1c2bc9 Mon Sep 17 00:00:00 2001 From: Cezary Sobczak <57288981+Cezarus27@users.noreply.github.com> Date: Thu, 15 May 2025 17:58:57 +0200 Subject: [PATCH 141/884] Add `force_attach` to resource compute instance (#12819) Signed-off-by: Cezary Sobczak --- .../compute/resource_compute_instance.go.tmpl | 44 +++ ...ompute_instance_from_template_test.go.tmpl | 166 +++++++++++ .../resource_compute_instance_test.go.tmpl | 282 ++++++++++++++++++ .../docs/r/compute_instance.html.markdown | 8 + .../tgc/tfdata/fake_resource_data_test.go | 2 + .../fake_resource_data_with_meta_test.go | 2 + 6 files changed, 504 insertions(+) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl index 9cee3cedf7ea..42364881a9f4 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl @@ -53,6 +53,13 @@ func IpCidrRangeDiffSuppress(k, old, new string, d *schema.ResourceData) bool { return false } +func DisksForceAttachDiffSuppress(_, old, new string, _ *schema.ResourceData) bool { + if new == old { + return true + } + return false +} + var ( advancedMachineFeaturesKeys = []string{ "advanced_machine_features.0.enable_nested_virtualization", @@ -74,6 +81,7 @@ var ( "boot_disk.0.initialize_params", "boot_disk.0.mode", "boot_disk.0.source", + "boot_disk.0.force_attach", } initializeParamsKeys = []string{ @@ -549,6 +557,16 @@ func ResourceComputeInstance() *schema.Resource { DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, Description: `The name or self_link of the disk attached to this instance.`, }, + + "force_attach": { + Type: schema.TypeBool, + Optional: true, + Default: false, + AtLeastOneOf: bootDiskKeys, + ForceNew: true, + DiffSuppressFunc: DisksForceAttachDiffSuppress, + Description: `Whether to force attach the regional disk even if it's currently attached to another instance. If you try to force attach a zonal disk to an instance, you will receive an error. Setting this parameter cause VM recreation.`, + }, }, }, }, @@ -863,6 +881,15 @@ func ResourceComputeInstance() *schema.Resource { Computed: true, Description: `The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied encryption key that protects this resource.`, }, + + "force_attach": { + Type: schema.TypeBool, + Optional: true, + Default: false, + ForceNew: true, + DiffSuppressFunc: DisksForceAttachDiffSuppress, + Description: `Whether to force attach the regional disk even if it's currently attached to another instance. If you try to force attach a zonal disk to an instance, you will receive an error. Setting this parameter cause VM recreation.`, + }, }, }, }, @@ -2091,6 +2118,10 @@ func resourceComputeInstanceRead(d *schema.ResourceData, meta interface{}) error } } + if v, ok := d.GetOk(fmt.Sprintf("attached_disk.%d.force_attach", adIndex)); ok { + di["force_attach"] = v.(bool) + } + // We want the disks to remain in the order we set in the config, so if a disk // is present in the config, make sure it's at the correct index. Otherwise, append it. if inConfig { @@ -3231,6 +3262,11 @@ func expandAttachedDisk(diskConfig map[string]interface{}, d *schema.ResourceDat disk.DiskEncryptionKey.KmsKeyServiceAccount = kmsServiceAccount.(string) } } + + if forceAttach, ok := diskConfig["force_attach"]; ok { + disk.ForceAttach = forceAttach.(bool) + } + return disk, nil } @@ -3578,6 +3614,10 @@ func expandBootDisk(d *schema.ResourceData, config *transport_tpg.Config, projec disk.Mode = v.(string) } + if v, ok := d.GetOk("boot_disk.0.force_attach"); ok { + disk.ForceAttach = v.(bool) + } + return disk, nil } @@ -3588,6 +3628,7 @@ func flattenBootDisk(d *schema.ResourceData, disk *compute.AttachedDisk, config "mode": disk.Mode, "source": tpgresource.ConvertSelfLinkToV1(disk.Source), "guest_os_features": flattenComputeInstanceGuestOsFeatures(disk.GuestOsFeatures), + "force_attach": d.Get("boot_disk.0.force_attach"), // disk_encryption_key_raw is not returned from the API, so copy it from what the user // originally specified to avoid diffs. "disk_encryption_key_raw": d.Get("boot_disk.0.disk_encryption_key_raw"), @@ -3596,6 +3637,9 @@ func flattenBootDisk(d *schema.ResourceData, disk *compute.AttachedDisk, config if _,ok := d.GetOk("boot_disk.0.interface"); ok { result["interface"] = disk.Interface } + if v, ok := d.GetOk("boot_disk.0.force_attach"); ok { + result["force_attach"] = v.(bool) + } diskDetails, err := getDisk(disk.Source, d, config) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_from_template_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_from_template_test.go.tmpl index 35b9553f27d3..5ccbfae30768 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_from_template_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_from_template_test.go.tmpl @@ -566,6 +566,37 @@ func TestAccComputeInstanceFromTemplate_confidentialInstanceConfigMain(t *testin }) } +func TestAccComputeInstanceFromTemplate_DiskForceAttach(t *testing.T) { + t.Parallel() + + var instance compute.Instance + instanceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + templateName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + resourceName := "google_compute_instance_from_template.foobar" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeInstanceFromTemplateDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeInstanceFromTemplate_DiskForceAttach_zonal(instanceName, templateName), + ExpectError: regexp.MustCompile("Force attaching zonal disks is not supported"), + }, + { + Config: testAccComputeInstanceFromTemplate_DiskForceAttach(instanceName, templateName), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeInstanceExists(t, resourceName, &instance), + + // Check that fields were set based on the template + resource.TestCheckResourceAttr(resourceName, "boot_disk.0.force_attach", "true"), + resource.TestCheckResourceAttr(resourceName, "attached_disk.0.force_attach", "true"), + ), + }, + }, + }) +} + func testAccComputeInstanceFromTemplate_basic(instance, template string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { @@ -2215,3 +2246,138 @@ resource "google_compute_instance_from_template" "foobar" { } `, suffix, suffix, template, template) } + +func testAccComputeInstanceFromTemplate_DiskForceAttach_zonal(instance, template string) string { + return fmt.Sprintf(` +data "google_compute_image" "my_image" { + family = "debian-11" + project = "debian-cloud" +} + +resource "google_compute_disk" "foobar" { + name = "%s" + image = data.google_compute_image.my_image.self_link + size = 10 + type = "pd-ssd" + zone = "us-central1-a" +} + +resource "google_compute_instance_template" "foobar" { + name = "%s" + machine_type = "n1-standard-1" // can't be e2 because of local-ssd + + disk { + source = google_compute_disk.foobar.name + auto_delete = false + boot = true + } + + network_interface { + network = "default" + } + + metadata = { + foo = "bar" + } + + scheduling { + automatic_restart = true + } + + can_ip_forward = true +} + +resource "google_compute_instance_from_template" "foobar" { + name = "%s" + zone = "us-central1-a" + + source_instance_template = google_compute_instance_template.foobar.self_link + + // Overrides + boot_disk { + source = google_compute_disk.foobar.name + force_attach = true + } + attached_disk { + source = google_compute_disk.foobar.name + force_attach = true + } +} +`, template, template, instance) +} + +func testAccComputeInstanceFromTemplate_DiskForceAttach(instance, template string) string { + return fmt.Sprintf(` +data "google_compute_image" "my_image" { + family = "debian-11" + project = "debian-cloud" +} + +resource "google_compute_region_disk" "foobar" { + name = "%s-1" + size = 10 + type = "pd-ssd" + region = "us-central1" + replica_zones = ["us-central1-a", "us-central1-b"] +} + +resource "google_compute_region_disk" "foobaz" { + name = "%s-2" + size = 10 + type = "pd-ssd" + region = "us-central1" + replica_zones = ["us-central1-a", "us-central1-b"] +} + +resource "google_compute_instance_template" "foobar" { + name = "%s" + machine_type = "n1-standard-1" // can't be e2 because of local-ssd + + disk { + source = google_compute_region_disk.foobar.self_link + auto_delete = false + boot = true + } + + disk { + source_image = data.google_compute_image.my_image.self_link + auto_delete = true + disk_size_gb = 100 + boot = false + disk_type = "pd-ssd" + type = "PERSISTENT" + } + + network_interface { + network = "default" + } + + metadata = { + foo = "bar" + } + + scheduling { + automatic_restart = true + } + + can_ip_forward = true +} + +resource "google_compute_instance_from_template" "foobar" { + name = "%s" + zone = "us-central1-a" + + source_instance_template = google_compute_instance_template.foobar.self_link + + // Overrides + boot_disk { + source = google_compute_region_disk.foobar.self_link + force_attach = true + } + attached_disk { + source = google_compute_region_disk.foobaz.self_link + force_attach = true + } +} +`, template, template, template, instance) +} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl index c9f02ac96dc6..a4ab90e4fa85 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl @@ -101,6 +101,40 @@ func TestMinCpuPlatformDiffSuppress(t *testing.T) { } } +func TestDisksForceAttachDiffSuppress(t *testing.T) { + cases := map[string]struct { + Old, New string + ExpectDiffSuppress bool + }{ + "force_attach unchanged": { + Old: "true", + New: "true", + ExpectDiffSuppress: true, + }, + "force_attach changed to true": { + Old: "false", + New: "true", + ExpectDiffSuppress: false, + }, + "force_attach changed to false": { + Old: "true", + New: "false", + ExpectDiffSuppress: false, + }, + "force_attach unchanged false": { + Old: "false", + New: "false", + ExpectDiffSuppress: true, + }, + } + + for tn, tc := range cases { + if tpgcompute.DisksForceAttachDiffSuppress("", tc.Old, tc.New, nil) != tc.ExpectDiffSuppress { + t.Errorf("bad: %s, %q => %q expect DiffSuppress to return %t", tn, tc.Old, tc.New, tc.ExpectDiffSuppress) + } + } +} + func TestCheckForCommonAliasIp(t *testing.T) { type testCase struct { old, new []*compute.AliasIpRange @@ -1202,6 +1236,52 @@ func TestAccComputeInstance_attachedDisk_modeRo(t *testing.T) { }) } +func TestAccComputeInstance_attachDisk_forceAttach(t *testing.T) { + t.Parallel() + + var instance compute.Instance + var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + var diskName = fmt.Sprintf("tf-testd-%s", acctest.RandString(t, 10)) + var forceAttachSetToTrue = true + var forceAttachSetToFalse = false + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeInstance_attachedDisk_forceAttach_zonal(diskName, instanceName, forceAttachSetToTrue), + ExpectError: regexp.MustCompile("Force attaching zonal disks is not supported"), + }, + { + Config: testAccComputeInstance_attachedDisk_forceAttach_zonal(diskName, instanceName, forceAttachSetToFalse), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeInstanceExists( + t, "google_compute_instance.foobar", &instance), + resource.TestCheckResourceAttr("google_compute_instance.foobar", "attached_disk.0.force_attach", fmt.Sprintf("%t", forceAttachSetToFalse)), + ), + }, + { + Config: testAccComputeInstance_attachedDisk_forceAttach(diskName, instanceName, forceAttachSetToTrue), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeInstanceExists( + t, "google_compute_instance.foobar", &instance), + resource.TestCheckResourceAttr("google_compute_instance.foobar", "attached_disk.0.force_attach", fmt.Sprintf("%t", forceAttachSetToTrue)), + ), + }, + { + Config: testAccComputeInstance_attachedDisk_forceAttach(diskName, instanceName, forceAttachSetToFalse), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeInstanceExists( + t, "google_compute_instance.foobar", &instance), + resource.TestCheckResourceAttr("google_compute_instance.foobar", "attached_disk.0.force_attach", fmt.Sprintf("%t", forceAttachSetToFalse)), + ), + }, + }, + }) +} + func TestAccComputeInstance_attachedDiskUpdate(t *testing.T) { t.Parallel() @@ -1360,6 +1440,52 @@ func TestAccComputeInstance_bootDisk_mode(t *testing.T) { }) } +func TestAccComputeInstance_bootDisk_forceAttach(t *testing.T) { + t.Parallel() + + var instance compute.Instance + var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + var diskName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + var forceAttachSetToTrue = true + var forceAttachSetToFalse = false + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeInstance_bootDisk_forceAttach_zonal(diskName, instanceName, forceAttachSetToTrue), + ExpectError: regexp.MustCompile("Force attaching zonal disks is not supported"), + }, + { + Config: testAccComputeInstance_bootDisk_forceAttach_zonal(diskName, instanceName, forceAttachSetToFalse), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeInstanceExists( + t, "google_compute_instance.foobar", &instance), + resource.TestCheckResourceAttr("google_compute_instance.foobar", "boot_disk.0.force_attach", fmt.Sprintf("%t", forceAttachSetToFalse)), + ), + }, + { + Config: testAccComputeInstance_bootDisk_forceAttach(instanceName, forceAttachSetToTrue), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeInstanceExists( + t, "google_compute_instance.foobar", &instance), + resource.TestCheckResourceAttr("google_compute_instance.foobar", "boot_disk.0.force_attach", fmt.Sprintf("%t", forceAttachSetToTrue)), + ), + }, + { + Config: testAccComputeInstance_bootDisk_forceAttach(instanceName, forceAttachSetToFalse), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeInstanceExists( + t, "google_compute_instance.foobar", &instance), + resource.TestCheckResourceAttr("google_compute_instance.foobar", "boot_disk.0.force_attach", fmt.Sprintf("%t", forceAttachSetToFalse)), + ), + }, + }, + }) +} + func TestAccComputeInstance_with375GbScratchDisk(t *testing.T) { t.Parallel() @@ -7736,6 +7862,106 @@ resource "google_compute_instance" "foobar" { `, disk, instance) } +func testAccComputeInstance_attachedDisk_forceAttach(disk, instance string, force_attach bool) string { + return fmt.Sprintf(` +data "google_compute_image" "my_image" { + family = "debian-11" + project = "debian-cloud" +} + +resource "google_compute_region_disk" "regionaldisk" { + name = "%s-1" + size = 10 + type = "pd-ssd" + region = "us-central1" + replica_zones = ["us-central1-a", "us-central1-b"] +} + +resource "google_compute_region_disk" "regionaldisk2" { + name = "%s-2" + size = 10 + type = "pd-ssd" + region = "us-central1" + replica_zones = ["us-central1-a", "us-central1-b"] +} + +resource "google_compute_instance" "foobar" { + name = "%s" + machine_type = "e2-medium" + zone = "us-central1-a" + + boot_disk { + initialize_params { + image = data.google_compute_image.my_image.self_link + } + } + + attached_disk { + source = google_compute_region_disk.regionaldisk.self_link + force_attach = %t + } + + attached_disk { + source = google_compute_region_disk.regionaldisk2.self_link + force_attach = %t + } + + network_interface { + network = "default" + } +} +`, disk, disk, instance, force_attach, force_attach) +} + +func testAccComputeInstance_attachedDisk_forceAttach_zonal(disk, instance string, force_attach bool) string { + return fmt.Sprintf(` +data "google_compute_image" "my_image" { + family = "debian-11" + project = "debian-cloud" +} + +resource "google_compute_disk" "foobar" { + name = "%s-1" + size = 10 + type = "pd-ssd" + zone = "us-central1-a" +} + +resource "google_compute_disk" "foobar2" { + name = "%s-2" + size = 10 + type = "pd-ssd" + zone = "us-central1-a" +} + +resource "google_compute_instance" "foobar" { + name = "%s" + machine_type = "e2-medium" + zone = "us-central1-a" + + boot_disk { + initialize_params { + image = data.google_compute_image.my_image.self_link + } + } + + attached_disk { + source = google_compute_disk.foobar.name + force_attach = %t + } + + attached_disk { + source = google_compute_disk.foobar2.name + force_attach = %t + } + + network_interface { + network = "default" + } +} +`, disk, disk, instance, force_attach, force_attach) +} + func testAccComputeInstance_bootDisk_source(disk, instance string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { @@ -7848,6 +8074,62 @@ resource "google_compute_instance" "foobar" { `, instance, diskMode) } +func testAccComputeInstance_bootDisk_forceAttach_zonal(disk, instance string, force_attach bool) string { + return fmt.Sprintf(` +data "google_compute_image" "my_image" { + family = "debian-11" + project = "debian-cloud" +} + +resource "google_compute_disk" "foobar" { + name = "%s" + zone = "us-central1-a" + image = data.google_compute_image.my_image.self_link +} + +resource "google_compute_instance" "foobar" { + name = "%s" + machine_type = "e2-medium" + zone = "us-central1-a" + + boot_disk { + source = google_compute_disk.foobar.name + force_attach = %t + } + + network_interface { + network = "default" + } +} +`, disk, instance, force_attach) +} + +func testAccComputeInstance_bootDisk_forceAttach(instance string, force_attach bool) string { + return fmt.Sprintf(` +data "google_compute_image" "my_image" { + family = "debian-11" + project = "debian-cloud" +} + +resource "google_compute_instance" "foobar" { + name = "%s" + machine_type = "e2-medium" + zone = "us-central1-a" + + boot_disk { + initialize_params { + image = data.google_compute_image.my_image.self_link + } + force_attach = %t + } + + network_interface { + network = "default" + } +} +`, instance, force_attach) +} + func testAccComputeInstance_with375GbScratchDisk(instance string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { diff --git a/mmv1/third_party/terraform/website/docs/r/compute_instance.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_instance.html.markdown index da78b9884698..f334b798825f 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_instance.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_instance.html.markdown @@ -293,6 +293,10 @@ is desired, you will need to modify your state file manually using `google_compute_disk`) or disk image. To create an instance from a snapshot, first create a `google_compute_disk` from a snapshot and reference it here. +* `force_attach` - (Optional) boolean field that determines whether to force attach the regional + disk even if it's currently attached to another instance. If you try to force attach a zonal + disk to an instance, you will receive an error. Setting this parameter cause VM recreation. + The `initialize_params` block supports: * `size` - (Optional) The size of the image in gigabytes. If not specified, it @@ -419,6 +423,10 @@ is desired, you will need to modify your state file manually using * `kms_key_service_account` - (Optional) The service account being used for the encryption request for the given KMS key. If absent, the Compute Engine default service account is used. +* `force_attach` - (Optional) boolean field that determines whether to force attach the regional + disk even if it's currently attached to another instance. If you try to force attach a zonal + disk to an instance, you will receive an error. Setting this parameter cause VM recreation. + The `network_performance_config` block supports: * `total_egress_bandwidth_tier` - (Optional) The egress bandwidth tier to enable. diff --git a/mmv1/third_party/tgc/tfdata/fake_resource_data_test.go b/mmv1/third_party/tgc/tfdata/fake_resource_data_test.go index 32d6a670e944..1da86e017058 100644 --- a/mmv1/third_party/tgc/tfdata/fake_resource_data_test.go +++ b/mmv1/third_party/tgc/tfdata/fake_resource_data_test.go @@ -247,6 +247,7 @@ func TestFakeResourceData_getOkTypeObject(t *testing.T) { "disk_encryption_key_sha256": "", "disk_encryption_key_rsa": "", "disk_encryption_service_account": "", + "force_attach": false, "kms_key_self_link": "test-kms_key_self_link", "mode": "READ_ONLY", "source": "test-source", @@ -324,6 +325,7 @@ func TestFakeResourceData_getOknsetTypeObject(t *testing.T) { "disk_encryption_key_sha256": "", "disk_encryption_key_rsa": "", "disk_encryption_service_account": "", + "force_attach": false, "kms_key_self_link": "", "mode": "", "source": "", diff --git a/mmv1/third_party/tgc_next/pkg/tfplan2cai/models/fake_resource_data_with_meta_test.go b/mmv1/third_party/tgc_next/pkg/tfplan2cai/models/fake_resource_data_with_meta_test.go index d48e3e96abea..4b5d8876d9c4 100644 --- a/mmv1/third_party/tgc_next/pkg/tfplan2cai/models/fake_resource_data_with_meta_test.go +++ b/mmv1/third_party/tgc_next/pkg/tfplan2cai/models/fake_resource_data_with_meta_test.go @@ -242,6 +242,7 @@ func TestFakeResourceDataWithMeta_getOkTypeObject(t *testing.T) { "disk_encryption_key_sha256": "", "disk_encryption_key_rsa": "", "disk_encryption_service_account": "", + "force_attach": false, "kms_key_self_link": "test-kms_key_self_link", "mode": "READ_ONLY", "source": "test-source", @@ -321,6 +322,7 @@ func TestFakeResourceDataWithMeta_getOknsetTypeObject(t *testing.T) { "disk_encryption_key_sha256": "", "disk_encryption_key_rsa": "", "disk_encryption_service_account": "", + "force_attach": false, "kms_key_self_link": "", "mode": "", "source": "", From 06756861dea4f781b34a1ede9f2b811ec4993005 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Thu, 15 May 2025 09:25:39 -0700 Subject: [PATCH 142/884] Fix the bug that required fields are not always in iam docs (#13954) --- mmv1/api/resource.go | 26 ++++++++++++++----- .../datasource_iam.html.markdown.tmpl | 17 ++++++------ .../terraform/resource_iam.html.markdown.tmpl | 17 ++++++------ 3 files changed, 38 insertions(+), 22 deletions(-) diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index 2ae709dc0d81..725ec1abb931 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -1400,13 +1400,27 @@ func (r Resource) IamSelfLinkIdentifiers() []string { return r.ExtractIdentifiers(selfLink) } -// Returns the resource properties that are idenfifires in the selflink url -func (r Resource) IamSelfLinkProperties() []*Type { - params := r.IamSelfLinkIdentifiers() +// Returns the resource properties that are idenfifires in Iam resource when generating the docs. +// The "project" and "organization" properties are excluded, as they are handled seperated in the docs. +func (r Resource) IamResourceProperties() []*Type { + urlProperties := make([]*Type, 0) + for _, param := range r.IamResourceParams() { + if param == "project" || param == "organization" { + continue + } - urlProperties := google.Select(r.AllUserProperties(), func(p *Type) bool { - return slices.Contains(params, p.Name) - }) + found := false + for _, p := range r.AllUserProperties() { + if param == google.Underscore(p.Name) { + urlProperties = append(urlProperties, p) + found = true + break + } + } + if !found { + urlProperties = append(urlProperties, &Type{Name: param}) + } + } return urlProperties } diff --git a/mmv1/templates/terraform/datasource_iam.html.markdown.tmpl b/mmv1/templates/terraform/datasource_iam.html.markdown.tmpl index 88296a56262d..16dc380923bb 100644 --- a/mmv1/templates/terraform/datasource_iam.html.markdown.tmpl +++ b/mmv1/templates/terraform/datasource_iam.html.markdown.tmpl @@ -66,15 +66,16 @@ data "{{ $.IamTerraformName }}_policy" "policy" { ## Argument Reference The following arguments are supported: -{{ range $param := $.IamSelfLinkProperties }} -{{- if eq $param.Name "name" }} -* `{{if $.IamPolicy.ParentResourceAttribute}}{{$.IamPolicy.ParentResourceAttribute}}{{else}}{{underscore $.Name}}{{end}}` - (Required) Used to find the parent resource to bind the IAM policy to -{{- else if or (or (eq (underscore $param.Name) "region") (eq (underscore $param.Name) "zone")) (eq (underscore $param.Name) "location") }} -* `{{ underscore $param.Name }}` - (Optional) {{ $param.Description }} Used to find the parent resource to bind the IAM policy to. If not specified, - the value will be parsed from the identifier of the parent resource. If no {{ underscore $param.Name }} is provided in the parent identifier and no - {{ underscore $param.Name }} is specified, it is taken from the provider configuration. +{{ range $param := $.IamResourceProperties }} + {{- $n := underscore $param.Name }} +{{- if eq $n $.IamParentResourceName }} +* `{{ $n }}` - (Required) Used to find the parent resource to bind the IAM policy to +{{- else if or (or (eq $n "region") (eq $n "zone")) (eq $n "location") }} +* `{{ $n }}` - (Optional) {{ $param.Description }} Used to find the parent resource to bind the IAM policy to. If not specified, + the value will be parsed from the identifier of the parent resource. If no {{ $n }} is provided in the parent identifier and no + {{ $n }} is specified, it is taken from the provider configuration. {{- else }} -* `{{ underscore $param.Name }}` - (Required) {{ $param.Description }} Used to find the parent resource to bind the IAM policy to +* `{{ $n }}` - (Required) {{ $param.Description }} Used to find the parent resource to bind the IAM policy to {{- end }} {{- end }} {{- if $.IamPolicy.BaseUrl }} diff --git a/mmv1/templates/terraform/resource_iam.html.markdown.tmpl b/mmv1/templates/terraform/resource_iam.html.markdown.tmpl index f5336c101c20..affc3c258c9a 100644 --- a/mmv1/templates/terraform/resource_iam.html.markdown.tmpl +++ b/mmv1/templates/terraform/resource_iam.html.markdown.tmpl @@ -189,15 +189,16 @@ resource "{{ $.IamTerraformName }}_member" "member" { ## Argument Reference The following arguments are supported: -{{ range $param := $.IamSelfLinkProperties }} -{{- if eq $param.Name "name" }} -* `{{ $.IamParentResourceName }}` - (Required) Used to find the parent resource to bind the IAM policy to -{{- else if or (or (eq (underscore $param.Name) "region") (eq (underscore $param.Name) "zone")) (eq (underscore $param.Name) "location") }} -* `{{ underscore $param.Name }}` - (Optional) {{ $param.Description }} Used to find the parent resource to bind the IAM policy to. If not specified, - the value will be parsed from the identifier of the parent resource. If no {{ underscore $param.Name }} is provided in the parent identifier and no - {{ underscore $param.Name }} is specified, it is taken from the provider configuration. +{{ range $param := $.IamResourceProperties }} + {{- $n := underscore $param.Name }} +{{- if eq $n $.IamParentResourceName }} +* `{{ $n }}` - (Required) Used to find the parent resource to bind the IAM policy to +{{- else if or (or (eq $n "region") (eq $n "zone")) (eq $n "location") }} +* `{{ $n }}` - (Optional) {{ $param.Description }} Used to find the parent resource to bind the IAM policy to. If not specified, + the value will be parsed from the identifier of the parent resource. If no {{ $n }} is provided in the parent identifier and no + {{ $n }} is specified, it is taken from the provider configuration. {{- else }} -* `{{ underscore $param.Name }}` - (Required) {{ $param.Description }} Used to find the parent resource to bind the IAM policy to +* `{{ $n }}` - (Required) {{ $param.Description }} Used to find the parent resource to bind the IAM policy to {{- end }} {{- end }} {{- if $.IamPolicy.BaseUrl }} From 69030e5ba5b87808161779e82c69667d3349aca3 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Thu, 15 May 2025 09:27:32 -0700 Subject: [PATCH 143/884] Removed unnecessary post_create template for colab runtime template (#13957) --- mmv1/products/colab/RuntimeTemplate.yaml | 4 ++-- .../post_create/colab_runtime_template.go.tmpl | 10 ---------- 2 files changed, 2 insertions(+), 12 deletions(-) delete mode 100644 mmv1/templates/terraform/post_create/colab_runtime_template.go.tmpl diff --git a/mmv1/products/colab/RuntimeTemplate.yaml b/mmv1/products/colab/RuntimeTemplate.yaml index 5fc646f1ad47..31b575a60e59 100644 --- a/mmv1/products/colab/RuntimeTemplate.yaml +++ b/mmv1/products/colab/RuntimeTemplate.yaml @@ -32,6 +32,8 @@ async: type: 'OpAsync' operation: full_url: 'https://{{location}}-aiplatform.googleapis.com/v1/{{op_id}}' + result: + resource_inside_response: true iam_policy: parent_resource_attribute: 'runtime_template' method_name_separator: ':' @@ -40,8 +42,6 @@ iam_policy: import_format: - 'projects/{{project}}/locations/{{location}}/notebookRuntimeTemplates/{{runtime_template}}' - '{{runtime_template}}' -custom_code: - post_create: 'templates/terraform/post_create/colab_runtime_template.go.tmpl' examples: - name: 'colab_runtime_template_basic' primary_resource_id: 'runtime-template' diff --git a/mmv1/templates/terraform/post_create/colab_runtime_template.go.tmpl b/mmv1/templates/terraform/post_create/colab_runtime_template.go.tmpl deleted file mode 100644 index 322c8de2d4eb..000000000000 --- a/mmv1/templates/terraform/post_create/colab_runtime_template.go.tmpl +++ /dev/null @@ -1,10 +0,0 @@ -// The operation for this resource contains the generated name that we need -// in order to perform a READ. We need to access the object inside of it as -// a map[string]interface, so let's do that. - -resp := res["response"].(map[string]interface{}) -name := tpgresource.GetResourceNameFromSelfLink(resp["name"].(string)) -log.Printf("[DEBUG] Setting resource name to %s", name) -if err := d.Set("name", name); err != nil { - return fmt.Errorf("Error setting name: %s", err) -} From 76aa678aa27d70aa15463b6fc025cface032e664 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Thu, 15 May 2025 10:45:46 -0700 Subject: [PATCH 144/884] Revert "Removed cloud identity group set_computed_name post_create" (#13966) --- mmv1/products/cloudidentity/Group.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/mmv1/products/cloudidentity/Group.yaml b/mmv1/products/cloudidentity/Group.yaml index ca7b90719f81..f3b8b25de0b2 100644 --- a/mmv1/products/cloudidentity/Group.yaml +++ b/mmv1/products/cloudidentity/Group.yaml @@ -45,6 +45,7 @@ async: target_occurrences: 10 actions: ['create', 'update', 'delete'] custom_code: + post_create: 'templates/terraform/post_create/set_computed_name.tmpl' custom_import: 'templates/terraform/custom_import/cloud_identity_group_import.go.tmpl' exclude_sweeper: true examples: From fccaaa9571670e49222f1c1d356730d6eba56c0d Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Thu, 15 May 2025 11:20:50 -0700 Subject: [PATCH 145/884] Fix unit tests for tgc (#13971) --- .../pkg/tfplan2cai/models/fake_resource_data_with_meta_test.go | 2 -- 1 file changed, 2 deletions(-) diff --git a/mmv1/third_party/tgc_next/pkg/tfplan2cai/models/fake_resource_data_with_meta_test.go b/mmv1/third_party/tgc_next/pkg/tfplan2cai/models/fake_resource_data_with_meta_test.go index 4b5d8876d9c4..d48e3e96abea 100644 --- a/mmv1/third_party/tgc_next/pkg/tfplan2cai/models/fake_resource_data_with_meta_test.go +++ b/mmv1/third_party/tgc_next/pkg/tfplan2cai/models/fake_resource_data_with_meta_test.go @@ -242,7 +242,6 @@ func TestFakeResourceDataWithMeta_getOkTypeObject(t *testing.T) { "disk_encryption_key_sha256": "", "disk_encryption_key_rsa": "", "disk_encryption_service_account": "", - "force_attach": false, "kms_key_self_link": "test-kms_key_self_link", "mode": "READ_ONLY", "source": "test-source", @@ -322,7 +321,6 @@ func TestFakeResourceDataWithMeta_getOknsetTypeObject(t *testing.T) { "disk_encryption_key_sha256": "", "disk_encryption_key_rsa": "", "disk_encryption_service_account": "", - "force_attach": false, "kms_key_self_link": "", "mode": "", "source": "", From 7984b024f8d1031ba6809ae6ae1f78e0f3b69ad9 Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Thu, 15 May 2025 11:29:00 -0700 Subject: [PATCH 146/884] Increase BeyondCorp AppGateway timeout from 20m to 40m (#13970) --- mmv1/products/beyondcorp/AppGateway.yaml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/mmv1/products/beyondcorp/AppGateway.yaml b/mmv1/products/beyondcorp/AppGateway.yaml index 9a39bc7b9318..1efdaf1504d3 100644 --- a/mmv1/products/beyondcorp/AppGateway.yaml +++ b/mmv1/products/beyondcorp/AppGateway.yaml @@ -29,9 +29,9 @@ create_url: 'projects/{{project}}/locations/{{region}}/appGateways?app_gateway_i # This resources is not updatable immutable: true timeouts: - insert_minutes: 20 - update_minutes: 20 - delete_minutes: 20 + insert_minutes: 40 + update_minutes: 40 + delete_minutes: 40 autogen_async: true async: actions: ['create', 'delete', 'update'] @@ -39,9 +39,9 @@ async: operation: base_url: '{{op_id}}' timeouts: - insert_minutes: 20 - update_minutes: 20 - delete_minutes: 20 + insert_minutes: 40 + update_minutes: 40 + delete_minutes: 40 result: resource_inside_response: true custom_code: From bed6497f23cb64dd216a3a98b69c22d9214a68c5 Mon Sep 17 00:00:00 2001 From: wj-chen Date: Thu, 15 May 2025 11:42:51 -0700 Subject: [PATCH 147/884] Add a new resource google_bigquery_row_access_policy (#13932) --- mmv1/products/bigquery/RowAccessPolicy.yaml | 111 ++++++++++++++++++ .../bigquery_row_access_policy_ref.go.tmpl | 11 ++ .../bigquery_row_access_policy_basic.tf.tmpl | 22 ++++ .../bigquery_row_access_policy.go.tmpl | 3 + ...esource_bigquery_row_access_policy_test.go | 96 +++++++++++++++ 5 files changed, 243 insertions(+) create mode 100644 mmv1/products/bigquery/RowAccessPolicy.yaml create mode 100644 mmv1/templates/terraform/custom_expand/bigquery_row_access_policy_ref.go.tmpl create mode 100644 mmv1/templates/terraform/examples/bigquery_row_access_policy_basic.tf.tmpl create mode 100644 mmv1/templates/terraform/pre_delete/bigquery_row_access_policy.go.tmpl create mode 100644 mmv1/third_party/terraform/services/bigquery/resource_bigquery_row_access_policy_test.go diff --git a/mmv1/products/bigquery/RowAccessPolicy.yaml b/mmv1/products/bigquery/RowAccessPolicy.yaml new file mode 100644 index 000000000000..4cbb43179211 --- /dev/null +++ b/mmv1/products/bigquery/RowAccessPolicy.yaml @@ -0,0 +1,111 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: RowAccessPolicy +description: Represents access on a subset of rows on the specified table, defined by its filter predicate. Access to the subset of rows is controlled by its IAM policy. +base_url: projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}/rowAccessPolicies +self_link: projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}/rowAccessPolicies/{{policy_id}} +id_format: projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}/rowAccessPolicies/{{policy_id}} +import_format: + - projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}/rowAccessPolicies/{{policy_id}} +custom_code: + pre_delete: 'templates/terraform/pre_delete/bigquery_row_access_policy.go.tmpl' +examples: + - name: 'bigquery_row_access_policy_basic' + primary_resource_id: 'example' + vars: + dataset_id: 'dataset_id' + table_id: 'table_id' + policy_id: 'policy_id' +parameters: +properties: + - name: rowAccessPolicyReference + type: NestedObject + description: Id path of a row access policy. + required: true + custom_expand: 'templates/terraform/custom_expand/bigquery_row_access_policy_ref.go.tmpl' + flatten_object: true + properties: + - name: datasetId + type: String + description: The ID of the dataset containing this row access policy. + required: true + immutable: true + - name: tableId + type: String + description: The ID of the table containing this row access policy. + required: true + immutable: true + - name: policyId + type: String + description: |- + The ID of the row access policy. The ID must contain only + letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum + length is 256 characters. + required: true + immutable: true + - name: filterPredicate + type: String + description: |- + A SQL boolean expression that represents the rows defined by this row + access policy, similar to the boolean expression in a WHERE clause of a + SELECT query on a table. + References to other tables, routines, and temporary functions are not + supported. + + Examples: region="EU" + date_field = CAST('2019-9-27' as DATE) + nullable_field is not NULL + numeric_field BETWEEN 1.0 AND 5.0 + required: true + - name: creationTime + type: String + description: |- + The time when this row access policy was created, in milliseconds since + the epoch. + output: true + - name: lastModifiedTime + type: String + description: |- + The time when this row access policy was last modified, in milliseconds + since the epoch. + output: true + - name: grantees + type: Array + description: |- + Input only. The optional list of iam_member users or groups that specifies the initial + members that the row-level access policy should be created with. + + grantees types: + - "user:alice@example.com": An email address that represents a specific + Google account. + - "serviceAccount:my-other-app@appspot.gserviceaccount.com": An email + address that represents a service account. + - "group:admins@example.com": An email address that represents a Google + group. + - "domain:example.com":The Google Workspace domain (primary) that + represents all the users of that domain. + - "allAuthenticatedUsers": A special identifier that represents all service + accounts and all users on the internet who have authenticated with a Google + Account. This identifier includes accounts that aren't connected to a + Google Workspace or Cloud Identity domain, such as personal Gmail accounts. + Users who aren't authenticated, such as anonymous visitors, aren't + included. + - "allUsers":A special identifier that represents anyone who is on + the internet, including authenticated and unauthenticated users. Because + BigQuery requires authentication before a user can access the service, + allUsers includes only authenticated users. + item_type: + type: String + ignore_read: true diff --git a/mmv1/templates/terraform/custom_expand/bigquery_row_access_policy_ref.go.tmpl b/mmv1/templates/terraform/custom_expand/bigquery_row_access_policy_ref.go.tmpl new file mode 100644 index 000000000000..56c8bbc3cdf8 --- /dev/null +++ b/mmv1/templates/terraform/custom_expand/bigquery_row_access_policy_ref.go.tmpl @@ -0,0 +1,11 @@ +func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + transformed := make(map[string]interface{}) + + project, _ := tpgresource.GetProject(d, config) + transformed["projectId"] = project + transformed["datasetId"] = d.Get("dataset_id") + transformed["tableId"] = d.Get("table_id") + transformed["policyId"] = d.Get("policy_id") + + return transformed, nil +} diff --git a/mmv1/templates/terraform/examples/bigquery_row_access_policy_basic.tf.tmpl b/mmv1/templates/terraform/examples/bigquery_row_access_policy_basic.tf.tmpl new file mode 100644 index 000000000000..0f0b303a89bd --- /dev/null +++ b/mmv1/templates/terraform/examples/bigquery_row_access_policy_basic.tf.tmpl @@ -0,0 +1,22 @@ +resource "google_bigquery_dataset" "{{$.PrimaryResourceId}}" { + dataset_id = "{{index $.Vars "dataset_id"}}" + location = "US" +} + +resource "google_bigquery_table" "{{$.PrimaryResourceId}}" { + deletion_protection = false + + dataset_id = google_bigquery_dataset.{{$.PrimaryResourceId}}.dataset_id + table_id = "{{index $.Vars "table_id"}}" +} + +resource "google_bigquery_row_access_policy" "{{$.PrimaryResourceId}}" { + dataset_id = google_bigquery_dataset.{{$.PrimaryResourceId}}.dataset_id + table_id = google_bigquery_table.{{$.PrimaryResourceId}}.table_id + policy_id = "{{index $.Vars "policy_id"}}" + + filter_predicate = "nullable_field is not NULL" + grantees = [ + "domain:google.com" + ] +} diff --git a/mmv1/templates/terraform/pre_delete/bigquery_row_access_policy.go.tmpl b/mmv1/templates/terraform/pre_delete/bigquery_row_access_policy.go.tmpl new file mode 100644 index 000000000000..ff0401b721fb --- /dev/null +++ b/mmv1/templates/terraform/pre_delete/bigquery_row_access_policy.go.tmpl @@ -0,0 +1,3 @@ +// Always force-delete. +obj = make(map[string]interface{}) +obj["force"] = true diff --git a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_row_access_policy_test.go b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_row_access_policy_test.go new file mode 100644 index 000000000000..8076a61f68dc --- /dev/null +++ b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_row_access_policy_test.go @@ -0,0 +1,96 @@ +package bigquery_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccBigQueryRowAccessPolicy_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckBigQueryRowAccessPolicyDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccBigQueryRowAccessPolicy_full(context), + }, + { + ResourceName: "google_bigquery_row_access_policy.test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"grantees"}, + }, + { + Config: testAccBigQueryRowAccessPolicy_update(context), + }, + { + ResourceName: "google_bigquery_row_access_policy.test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"grantees"}, + }, + }, + }) +} + +func testAccBigQueryRowAccessPolicy_full(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_bigquery_dataset" "test" { + dataset_id = "tf_test_dataset_id%{random_suffix}" + location = "US" +} + +resource "google_bigquery_table" "test" { + deletion_protection = false + + dataset_id = google_bigquery_dataset.test.dataset_id + table_id = "tf_test_table_id%{random_suffix}" +} + +resource "google_bigquery_row_access_policy" "test" { + dataset_id = google_bigquery_dataset.test.dataset_id + table_id = google_bigquery_table.test.table_id + policy_id = "tf_test_policy_id%{random_suffix}" + + filter_predicate = "nullable_field is not NULL" + grantees = [ + "domain:google.com" + ] +} +`, context) +} + +func testAccBigQueryRowAccessPolicy_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_bigquery_dataset" "test" { + dataset_id = "tf_test_dataset_id%{random_suffix}" + location = "US" +} + +resource "google_bigquery_table" "test" { + deletion_protection = false + + dataset_id = google_bigquery_dataset.test.dataset_id + table_id = "tf_test_table_id%{random_suffix}" +} + +resource "google_bigquery_row_access_policy" "test" { + dataset_id = google_bigquery_dataset.test.dataset_id + table_id = google_bigquery_table.test.table_id + policy_id = "tf_test_policy_id%{random_suffix}" + + filter_predicate = "nullable_field is NULL" + grantees = [ + "group:googlers@google.com" + ] +} +`, context) +} From d7913b20a27e9afa445a3a15f2194ab622501e9f Mon Sep 17 00:00:00 2001 From: Lingkai Shen Date: Thu, 15 May 2025 15:33:41 -0400 Subject: [PATCH 148/884] Fix typos (#13968) --- .../examples/firebase_app_hosting_backend_github.tf.tmpl | 1 - .../examples/firebase_app_hosting_build_github.tf.tmpl | 4 ---- 2 files changed, 5 deletions(-) diff --git a/mmv1/templates/terraform/examples/firebase_app_hosting_backend_github.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_hosting_backend_github.tf.tmpl index d167fca53ef5..f5a0318a1aba 100644 --- a/mmv1/templates/terraform/examples/firebase_app_hosting_backend_github.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebase_app_hosting_backend_github.tf.tmpl @@ -37,7 +37,6 @@ resource "google_developer_connect_git_repository_link" "my-repository" { resource "google_project_service_identity" "devconnect-p4sa" { provider = google-beta - provider = google-beta project = "{{index $.TestEnvVars "project_id"}}" service = "developerconnect.googleapis.com" } diff --git a/mmv1/templates/terraform/examples/firebase_app_hosting_build_github.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_hosting_build_github.tf.tmpl index eebc21d555cc..50d501770735 100644 --- a/mmv1/templates/terraform/examples/firebase_app_hosting_build_github.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebase_app_hosting_build_github.tf.tmpl @@ -42,10 +42,6 @@ resource "google_developer_connect_git_repository_link" "my-repository" { project = "{{index $.TestEnvVars "project_id"}}" location = "us-central1" - provider = google-beta - - provider = google-beta - project = "{{index $.TestEnvVars "project_id"}}" service = "developerconnect.googleapis.com" } From 9b6fd792c0837be5388d1f2a3f62e69486554e32 Mon Sep 17 00:00:00 2001 From: Kian Jones <11655409+kianjones9@users.noreply.github.com> Date: Thu, 15 May 2025 13:12:30 -0700 Subject: [PATCH 149/884] Update using_gke_with_terraform.html.markdown (#13961) --- .../website/docs/guides/using_gke_with_terraform.html.markdown | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/website/docs/guides/using_gke_with_terraform.html.markdown b/mmv1/third_party/terraform/website/docs/guides/using_gke_with_terraform.html.markdown index da5c7b3cd87a..c9736bd43d90 100644 --- a/mmv1/third_party/terraform/website/docs/guides/using_gke_with_terraform.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/using_gke_with_terraform.html.markdown @@ -59,7 +59,7 @@ provider "kubernetes" { ) } ``` -Although the above can result in authentication errors, over time, as the token recorded in the google_client_cofig data resource is short lived (thus it expires) and it's stored in state. Fortunately, the [kubernetes provider can accept valid credentials from an exec-based plugin](https://registry.terraform.io/providers/hashicorp/kubernetes/latest/docs#exec-plugins) to fetch a new token before each Terraform operation (so long as you have the [gke-cloud-auth-plugin for kubectl installed](https://cloud.google.com/blog/products/containers-kubernetes/kubectl-auth-changes-in-gke)), like so: +Although the above can result in authentication errors, over time, as the token recorded in the google_client_config data resource is short lived (thus it expires) and it's stored in state. Fortunately, the [kubernetes provider can accept valid credentials from an exec-based plugin](https://registry.terraform.io/providers/hashicorp/kubernetes/latest/docs#exec-plugins) to fetch a new token before each Terraform operation (so long as you have the [gke-cloud-auth-plugin for kubectl installed](https://cloud.google.com/blog/products/containers-kubernetes/kubectl-auth-changes-in-gke)), like so: ```hcl # Retrieve an access token as the Terraform runner From ca7260491f50d9da490fc5176de1b5f0facd7e48 Mon Sep 17 00:00:00 2001 From: hao-nan-li <100219545+hao-nan-li@users.noreply.github.com> Date: Thu, 15 May 2025 14:31:12 -0700 Subject: [PATCH 150/884] Image error (#13863) Co-authored-by: Riley Karson --- .../terraform/acctest/provider_test_utils.go | 8 ++++ .../terraform/envvar/envvar_utils.go | 9 ++++ .../terraform/provider/provider.go.tmpl | 1 - .../universe/universe_domain_compute_test.go | 47 +++++++++++++++++++ .../terraform/services/compute/image.go | 3 ++ 5 files changed, 67 insertions(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/acctest/provider_test_utils.go b/mmv1/third_party/terraform/acctest/provider_test_utils.go index 81bc0fd80a1d..0a13f0206aa1 100644 --- a/mmv1/third_party/terraform/acctest/provider_test_utils.go +++ b/mmv1/third_party/terraform/acctest/provider_test_utils.go @@ -40,6 +40,14 @@ func GoogleProviderConfig(t *testing.T) *transport_tpg.Config { sdkProvider := provider.Provider() rc := terraform.ResourceConfig{} + + // `universe_domain` must be specified through config (i.e. unlike most provider settings there's no environment variable), and we check the value matches the credentials during provider initilization + // In the test environment we seed the value through a test-only environment variable, and we need to pre-seed a value in ResourceConfig as if it was in config to pass the check + universeDomain := envvar.GetTestUniverseDomainFromEnv(t) + if universeDomain != "" && universeDomain != "googleapis.com" { + rc.Config = make(map[string]interface{}) + rc.Config["universe_domain"] = universeDomain + } sdkProvider.Configure(context.Background(), &rc) return sdkProvider.Meta().(*transport_tpg.Config) } diff --git a/mmv1/third_party/terraform/envvar/envvar_utils.go b/mmv1/third_party/terraform/envvar/envvar_utils.go index 537242cef5e9..dbd6b4707167 100644 --- a/mmv1/third_party/terraform/envvar/envvar_utils.go +++ b/mmv1/third_party/terraform/envvar/envvar_utils.go @@ -86,6 +86,10 @@ var UniverseDomainEnvVars = []string{ "GOOGLE_UNIVERSE_DOMAIN", } +var ProjectPrefixEnvVars = []string{ + "GOOGLE_PROJECT_PREFIX", +} + // This is the billing account that will be charged for the infrastructure used during testing. For // that reason, it is also the billing account used for creating new projects. var BillingAccountEnvVars = []string{ @@ -146,6 +150,11 @@ func GetTestUniverseDomainFromEnv(t *testing.T) string { return transport_tpg.MultiEnvSearch(UniverseDomainEnvVars) } +// Project Prefix of different universes +func GetProjectPrefixFromEnv() string { + return transport_tpg.MultiEnvSearch(ProjectPrefixEnvVars) +} + // AccTestPreCheck ensures at least one of the region env variables is set. func GetTestRegionFromEnv() string { return transport_tpg.MultiEnvSearch(RegionEnvVars) diff --git a/mmv1/third_party/terraform/provider/provider.go.tmpl b/mmv1/third_party/terraform/provider/provider.go.tmpl index d4f6bf90270b..76f0eb3f7246 100644 --- a/mmv1/third_party/terraform/provider/provider.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider.go.tmpl @@ -426,7 +426,6 @@ func ProviderConfigure(ctx context.Context, d *schema.ResourceData, p *schema.Pr if err := config.LoadAndValidate(stopCtx); err != nil { return nil, diag.FromErr(err) } - // Verify that universe domains match between credentials and configuration if v, ok := d.GetOk("universe_domain"); ok { if config.UniverseDomain == "" && v.(string) != "googleapis.com" { // v can't be "", as it wouldn't pass `ok` above diff --git a/mmv1/third_party/terraform/provider/universe/universe_domain_compute_test.go b/mmv1/third_party/terraform/provider/universe/universe_domain_compute_test.go index ea2703508e3b..89d1d5d39bcd 100644 --- a/mmv1/third_party/terraform/provider/universe/universe_domain_compute_test.go +++ b/mmv1/third_party/terraform/provider/universe/universe_domain_compute_test.go @@ -33,6 +33,31 @@ func TestAccUniverseDomainDisk(t *testing.T) { }) } +func TestAccUniverseDomainDiskImage(t *testing.T) { + + universeDomain := envvar.GetTestUniverseDomainFromEnv(t) + zone := envvar.GetTestZoneFromEnv() + prefix := envvar.GetProjectPrefixFromEnv() + image_project := "" + + if prefix != "" { + image_project = prefix + ":debian-cloud" + } else { + image_project = "debian-cloud" + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeDiskDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccUniverseDomain_basic_disk_image(universeDomain, zone, image_project), + }, + }, + }) +} + func TestAccDefaultUniverseDomainDisk(t *testing.T) { universeDomain := "googleapis.com" @@ -85,6 +110,28 @@ resource "google_compute_instance_template" "instance_template" { `, universeDomain) } +func testAccUniverseDomain_basic_disk_image(universeDomain, zone, image_project string) string { + return fmt.Sprintf(` +provider "google" { + universe_domain = "%s" +} + +data "google_compute_images" "debian" { + project = "%s" + filter = "name=debian-12*" +} + +resource "google_compute_disk" "primary" { + name = "async-test-disk" + type = "pd-ssd" + zone = "%s" + + physical_block_size_bytes = 4096 + image = "projects/%s/global/images/${data.google_compute_images.debian.images[0].name}" +} +`, universeDomain, image_project, zone, image_project) +} + func testAccCheckComputeDiskDestroyProducer(t *testing.T) func(s *terraform.State) error { return func(s *terraform.State) error { for name, rs := range s.RootModule().Resources { diff --git a/mmv1/third_party/terraform/services/compute/image.go b/mmv1/third_party/terraform/services/compute/image.go index e35942046e24..bc72330267f6 100644 --- a/mmv1/third_party/terraform/services/compute/image.go +++ b/mmv1/third_party/terraform/services/compute/image.go @@ -106,6 +106,9 @@ func ResolveImage(c *transport_tpg.Config, project, name, userAgent string) (str break } } + if c.UniverseDomain != "" && c.UniverseDomain != "googleapis.com" { + resolveImageLink = regexp.MustCompile(fmt.Sprintf("^https://compute.%s/compute/[a-z0-9]+/projects/(%s)/global/images/(%s)", c.UniverseDomain, verify.ProjectRegex, resolveImageImageRegex)) + } switch { case resolveImageLink.MatchString(name): // https://www.googleapis.com/compute/v1/projects/xyz/global/images/xyz return name, nil From 40c0c275976f3d87b1dd20b6a72b03cdf1399e64 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Thu, 15 May 2025 16:26:10 -0700 Subject: [PATCH 151/884] Revert "Removed cloud identity group membership set_computed_name post_create" (#13983) --- mmv1/products/cloudidentity/GroupMembership.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/mmv1/products/cloudidentity/GroupMembership.yaml b/mmv1/products/cloudidentity/GroupMembership.yaml index 835595d2b12a..9b0376a33642 100644 --- a/mmv1/products/cloudidentity/GroupMembership.yaml +++ b/mmv1/products/cloudidentity/GroupMembership.yaml @@ -37,6 +37,7 @@ timeouts: update_minutes: 20 delete_minutes: 20 custom_code: + post_create: 'templates/terraform/post_create/set_computed_name.tmpl' custom_update: 'templates/terraform/custom_update/cloud_identity_group_membership.go.tmpl' post_import: 'templates/terraform/post_import/cloud_identity_group_membership.go.tmpl' exclude_sweeper: true From 0ba8dbde09fe95dc57e72a30c8a0cb8ffabb80b4 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Thu, 15 May 2025 16:39:21 -0700 Subject: [PATCH 152/884] Manage go.mod and go.sum of tgc (#13976) --- mmv1/third_party/tgc_next/go.mod | 119 +++++++++ mmv1/third_party/tgc_next/go.sum | 409 +++++++++++++++++++++++++++++++ 2 files changed, 528 insertions(+) create mode 100644 mmv1/third_party/tgc_next/go.mod create mode 100644 mmv1/third_party/tgc_next/go.sum diff --git a/mmv1/third_party/tgc_next/go.mod b/mmv1/third_party/tgc_next/go.mod new file mode 100644 index 000000000000..6ea26054c30c --- /dev/null +++ b/mmv1/third_party/tgc_next/go.mod @@ -0,0 +1,119 @@ +module github.com/GoogleCloudPlatform/terraform-google-conversion/v6 + +go 1.23.0 + +toolchain go1.23.5 + +require ( + github.com/apparentlymart/go-cidr v1.1.0 + github.com/google/go-cmp v0.7.0 + github.com/hashicorp/errwrap v1.1.0 + github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320 + github.com/hashicorp/hcl v1.0.0 + github.com/hashicorp/hcl/v2 v2.23.0 + github.com/hashicorp/terraform-json v0.24.0 + github.com/hashicorp/terraform-plugin-sdk/v2 v2.36.0 + github.com/hashicorp/terraform-provider-google-beta v1.20.1-0.20250515195612-fa096fe771b5 + github.com/mitchellh/go-homedir v1.1.0 + github.com/pkg/errors v0.9.1 + github.com/stretchr/testify v1.10.0 + github.com/zclconf/go-cty v1.16.2 + go.uber.org/zap v1.27.0 + google.golang.org/api v0.229.0 +) + +require ( + cloud.google.com/go/bigtable v1.37.0 + github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 + github.com/hashicorp/go-cleanhttp v0.5.2 + github.com/hashicorp/terraform-plugin-framework v1.13.0 + github.com/hashicorp/terraform-plugin-framework-validators v0.12.0 + github.com/sirupsen/logrus v1.9.3 + github.com/spf13/cobra v1.8.1 + golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 + golang.org/x/oauth2 v0.29.0 + google.golang.org/genproto/googleapis/rpc v0.0.0-20250414145226-207652e42e2e + google.golang.org/grpc v1.71.1 +) + +require ( + bitbucket.org/creachadair/stringset v0.0.11 // indirect + cel.dev/expr v0.19.2 // indirect + cloud.google.com/go v0.120.0 // indirect + cloud.google.com/go/auth v0.16.0 // indirect + cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect + cloud.google.com/go/compute/metadata v0.6.0 // indirect + cloud.google.com/go/iam v1.5.0 // indirect + cloud.google.com/go/longrunning v0.6.6 // indirect + cloud.google.com/go/monitoring v1.24.1 // indirect + github.com/GoogleCloudPlatform/declarative-resource-client-library v1.79.0 // indirect + github.com/agext/levenshtein v1.2.3 // indirect + github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect + github.com/cenkalti/backoff v2.2.1+incompatible // indirect + github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/cncf/xds/go v0.0.0-20250121191232-2f005788dc42 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/envoyproxy/go-control-plane/envoy v1.32.4 // indirect + github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect + github.com/fatih/color v1.16.0 // indirect + github.com/felixge/httpsnoop v1.0.4 // indirect + github.com/gammazero/deque v0.2.1 // indirect + github.com/gammazero/workerpool v1.1.3 // indirect + github.com/go-logr/logr v1.4.2 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/golang/glog v1.2.4 // indirect + github.com/golang/protobuf v1.5.4 // indirect + github.com/google/go-cpy v0.0.0-20211218193943-a9c933c06932 // indirect + github.com/google/s2a-go v0.1.9 // indirect + github.com/google/uuid v1.6.0 // indirect + github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect + github.com/googleapis/gax-go/v2 v2.14.1 // indirect + github.com/hashicorp/go-hclog v1.6.3 // indirect + github.com/hashicorp/go-multierror v1.1.1 // indirect + github.com/hashicorp/go-uuid v1.0.3 // indirect + github.com/hashicorp/go-version v1.7.0 // indirect + github.com/hashicorp/logutils v1.0.0 // indirect + github.com/hashicorp/terraform-plugin-go v0.26.0 // indirect + github.com/hashicorp/terraform-plugin-log v0.9.0 // indirect + github.com/hashicorp/terraform-plugin-testing v1.5.1 // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect + github.com/kylelemons/godebug v1.1.0 // indirect + github.com/mattn/go-colorable v0.1.13 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mitchellh/copystructure v1.2.0 // indirect + github.com/mitchellh/go-testing-interface v1.14.1 // indirect + github.com/mitchellh/go-wordwrap v1.0.1 // indirect + github.com/mitchellh/hashstructure v1.1.0 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect + github.com/mitchellh/reflectwalk v1.0.2 // indirect + github.com/oklog/run v1.1.0 // indirect + github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/spf13/pflag v1.0.5 // indirect + github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect + github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect + github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect + go.opentelemetry.io/auto/sdk v1.1.0 // indirect + go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.60.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0 // indirect + go.opentelemetry.io/otel v1.35.0 // indirect + go.opentelemetry.io/otel/metric v1.35.0 // indirect + go.opentelemetry.io/otel/sdk v1.35.0 // indirect + go.opentelemetry.io/otel/sdk/metric v1.35.0 // indirect + go.opentelemetry.io/otel/trace v1.35.0 // indirect + go.uber.org/multierr v1.10.0 // indirect + go4.org/netipx v0.0.0-20231129151722-fdeea329fbba // indirect + golang.org/x/crypto v0.37.0 // indirect + golang.org/x/mod v0.22.0 // indirect + golang.org/x/net v0.39.0 // indirect + golang.org/x/sync v0.13.0 // indirect + golang.org/x/sys v0.32.0 // indirect + golang.org/x/text v0.24.0 // indirect + golang.org/x/time v0.11.0 // indirect + golang.org/x/tools v0.22.0 // indirect + google.golang.org/appengine v1.6.8 // indirect + google.golang.org/genproto v0.0.0-20250303144028-a0af3efb3deb // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20250414145226-207652e42e2e // indirect + google.golang.org/protobuf v1.36.6 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) diff --git a/mmv1/third_party/tgc_next/go.sum b/mmv1/third_party/tgc_next/go.sum new file mode 100644 index 000000000000..5bcb088ceb12 --- /dev/null +++ b/mmv1/third_party/tgc_next/go.sum @@ -0,0 +1,409 @@ +bitbucket.org/creachadair/stringset v0.0.11 h1:6Sv4CCv14Wm+OipW4f3tWOb0SQVpBDLW0knnJqUnmZ8= +bitbucket.org/creachadair/stringset v0.0.11/go.mod h1:wh0BHewFe+j0HrzWz7KcGbSNpFzWwnpmgPRlB57U5jU= +cel.dev/expr v0.19.2 h1:V354PbqIXr9IQdwy4SYA4xa0HXaWq1BUPAGzugBY5V4= +cel.dev/expr v0.19.2/go.mod h1:MrpN08Q+lEBs+bGYdLxxHkZoUSsCp0nSKTs0nTymJgw= +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.120.0 h1:wc6bgG9DHyKqF5/vQvX1CiZrtHnxJjBlKUyF9nP6meA= +cloud.google.com/go v0.120.0/go.mod h1:/beW32s8/pGRuj4IILWQNd4uuebeT4dkOhKmkfit64Q= +cloud.google.com/go/auth v0.16.0 h1:Pd8P1s9WkcrBE2n/PhAwKsdrR35V3Sg2II9B+ndM3CU= +cloud.google.com/go/auth v0.16.0/go.mod h1:1howDHJ5IETh/LwYs3ZxvlkXF48aSqqJUM+5o02dNOI= +cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc= +cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c= +cloud.google.com/go/bigtable v1.37.0 h1:Q+x7y04lQ0B+WXp03wc1/FLhFt4CwcQdkwWT0M4Jp3w= +cloud.google.com/go/bigtable v1.37.0/go.mod h1:HXqddP6hduwzrtiTCqZPpj9ij4hGZb4Zy1WF/dT+yaU= +cloud.google.com/go/compute/metadata v0.6.0 h1:A6hENjEsCDtC1k8byVsgwvVcioamEHvZ4j01OwKxG9I= +cloud.google.com/go/compute/metadata v0.6.0/go.mod h1:FjyFAW1MW0C203CEOMDTu3Dk1FlqW3Rga40jzHL4hfg= +cloud.google.com/go/iam v1.5.0 h1:QlLcVMhbLGOjRcGe6VTGGTyQib8dRLK2B/kYNV0+2xs= +cloud.google.com/go/iam v1.5.0/go.mod h1:U+DOtKQltF/LxPEtcDLoobcsZMilSRwR7mgNL7knOpo= +cloud.google.com/go/longrunning v0.6.6 h1:XJNDo5MUfMM05xK3ewpbSdmt7R2Zw+aQEMbdQR65Rbw= +cloud.google.com/go/longrunning v0.6.6/go.mod h1:hyeGJUrPHcx0u2Uu1UFSoYZLn4lkMrccJig0t4FI7yw= +cloud.google.com/go/monitoring v1.24.1 h1:vKiypZVFD/5a3BbQMvI4gZdl8445ITzXFh257XBgrS0= +cloud.google.com/go/monitoring v1.24.1/go.mod h1:Z05d1/vn9NaujqY2voG6pVQXoJGbp+r3laV+LySt9K0= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.79.0 h1:vaebDVboAZ2tbAoMKRsprO3zAdZnQegYFhkgAwjJC8g= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.79.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= +github.com/ProtonMail/go-crypto v1.1.3 h1:nRBOetoydLeUb4nHajyO2bKqMLfWQ/ZPwkXqXxPxCFk= +github.com/ProtonMail/go-crypto v1.1.3/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= +github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7lmo= +github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= +github.com/apparentlymart/go-cidr v1.1.0 h1:2mAhrMoF+nhXqxTzSZMUzDHkLjmIHC+Zzn4tdgBZjnU= +github.com/apparentlymart/go-cidr v1.1.0/go.mod h1:EBcsNrHc3zQeuaeCeCtQruQm+n9/YjEn/vI25Lg7Gwc= +github.com/apparentlymart/go-textseg/v12 v12.0.0/go.mod h1:S/4uRK2UtaQttw1GenVJEynmyUenKwP++x/+DdGV/Ec= +github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY= +github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4= +github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= +github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4= +github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cloudflare/circl v1.3.7 h1:qlCDlTPz2n9fu58M0Nh1J/JzcFpfgkFHHX3O35r5vcU= +github.com/cloudflare/circl v1.3.7/go.mod h1:sRTcRWXGLrKw6yIGJ+l7amYJFfAXbZG0kBSc8r4zxgA= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/xds/go v0.0.0-20250121191232-2f005788dc42 h1:Om6kYQYDUk5wWbT0t0q6pvyM49i9XZAv9dDrkDA7gjk= +github.com/cncf/xds/go v0.0.0-20250121191232-2f005788dc42/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= +github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dnaeon/go-vcr v1.0.1 h1:r8L/HqC0Hje5AXMu1ooW8oyQyOFv4GxqpL0nRP7SLLY= +github.com/dnaeon/go-vcr v1.0.1/go.mod h1:aBB1+wY4s93YsC3HHjMBMrwTj2R9FHDzUr9KyGc8n1E= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.13.4 h1:zEqyPVyku6IvWCFwux4x9RxkLOMUL+1vC9xUFv5l2/M= +github.com/envoyproxy/go-control-plane v0.13.4/go.mod h1:kDfuBlDVsSj2MjrLEtRWtHlsWIFcGyB2RMO44Dc5GZA= +github.com/envoyproxy/go-control-plane/envoy v1.32.4 h1:jb83lalDRZSpPWW2Z7Mck/8kXZ5CQAFYVjQcdVIr83A= +github.com/envoyproxy/go-control-plane/envoy v1.32.4/go.mod h1:Gzjc5k8JcJswLjAx1Zm+wSYE20UrLtt7JZMWiWQXQEw= +github.com/envoyproxy/go-control-plane/ratelimit v0.1.0 h1:/G9QYbddjL25KvtKTv3an9lx6VBE2cnb8wp1vEGNYGI= +github.com/envoyproxy/go-control-plane/ratelimit v0.1.0/go.mod h1:Wk+tMFAFbCXaJPzVVHnPgRKdUdwW/KdbRt94AzgRee4= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/envoyproxy/protoc-gen-validate v1.2.1 h1:DEo3O99U8j4hBFwbJfrz9VtgcDfUKS7KJ7spH3d86P8= +github.com/envoyproxy/protoc-gen-validate v1.2.1/go.mod h1:d/C80l/jxXLdfEIhX1W2TmLfsJ31lvEjwamM4DxlWXU= +github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= +github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM= +github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/gammazero/deque v0.2.1 h1:qSdsbG6pgp6nL7A0+K/B7s12mcCY/5l5SIUpMOl+dC0= +github.com/gammazero/deque v0.2.1/go.mod h1:LFroj8x4cMYCukHJDbxFCkT+r9AndaJnFMuZDV34tuU= +github.com/gammazero/workerpool v1.1.3 h1:WixN4xzukFoN0XSeXF6puqEqFTl2mECI9S6W44HWy9Q= +github.com/gammazero/workerpool v1.1.3/go.mod h1:wPjyBLDbyKnUn2XwwyD3EEwo9dHutia9/fwNmSHWACc= +github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= +github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= +github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68= +github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/glog v1.2.4 h1:CNNw5U8lSiiBk7druxtSHHTsRWcxKoac6kZKm2peBBc= +github.com/golang/glog v1.2.4/go.mod h1:6AhwSGph0fcJtXVM/PEHPqZlFeoLxhs7/t5UDAwmO+w= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/google/btree v1.1.3 h1:CVpQJjYgC4VbzxeGVHfvZrv1ctoYCAI8vbl07Fcxlyg= +github.com/google/btree v1.1.3/go.mod h1:qOPhT0dTNdNzV6Z/lhRX0YXUafgPLFUh+gZMl761Gm4= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/go-cpy v0.0.0-20211218193943-a9c933c06932 h1:5/4TSDzpDnHQ8rKEEQBjRlYx77mHOvXu08oGchxej7o= +github.com/google/go-cpy v0.0.0-20211218193943-a9c933c06932/go.mod h1:cC6EdPbj/17GFCPDK39NRarlMI+kt+O60S12cNB5J9Y= +github.com/google/s2a-go v0.1.9 h1:LGD7gtMgezd8a/Xak7mEWL0PjoTQFvpRudN895yqKW0= +github.com/google/s2a-go v0.1.9/go.mod h1:YA0Ei2ZQL3acow2O62kdp9UlnvMmU7kA6Eutn0dXayM= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/enterprise-certificate-proxy v0.3.6 h1:GW/XbdyBFQ8Qe+YAmFU9uHLo7OnF5tL52HFAgMmyrf4= +github.com/googleapis/enterprise-certificate-proxy v0.3.6/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA= +github.com/googleapis/gax-go/v2 v2.14.1 h1:hb0FFeiPaQskmvakKu5EbCbpntQn48jyHuvrkurSS/Q= +github.com/googleapis/gax-go/v2 v2.14.1/go.mod h1:Hb/NubMaVM88SrNkvl8X/o8XWwDJEPqouaLeN2IUxoA= +github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 h1:UH//fgunKIs4JdUbpDl1VZCDaL56wXCB/5+wF6uHfaI= +github.com/grpc-ecosystem/go-grpc-middleware v1.4.0/go.mod h1:g5qyo/la0ALbONm6Vbp88Yd8NsDy6rZz+RcrMPxvld8= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= +github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-checkpoint v0.5.0 h1:MFYpPZCnQqQTE18jFwSII6eUQrD/oxMFp3mlgcqk5mU= +github.com/hashicorp/go-checkpoint v0.5.0/go.mod h1:7nfLNL10NsxqO4iWuW6tWW0HjZuDrwkBuEQsVcpCOgg= +github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= +github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= +github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320 h1:1/D3zfFHttUKaCaGKZ/dR2roBXv0vKbSCnssIldfQdI= +github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320/go.mod h1:EiZBMaudVLy8fmjf9Npq1dq9RalhveqZG5w/yz3mHWs= +github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k= +github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= +github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/go-plugin v1.6.2 h1:zdGAEd0V1lCaU0u+MxWQhtSDQmahpkwOun8U8EiRVog= +github.com/hashicorp/go-plugin v1.6.2/go.mod h1:CkgLQ5CZqNmdL9U9JzM532t8ZiYQ35+pj3b1FD37R0Q= +github.com/hashicorp/go-retryablehttp v0.7.7 h1:C8hUCYzor8PIfXHa4UrZkU4VvK8o9ISHxT2Q8+VepXU= +github.com/hashicorp/go-retryablehttp v0.7.7/go.mod h1:pkQpWZeYWskR+D1tR2O5OcBFOxfA7DoAO6xtkuQnHTk= +github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8= +github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-version v1.7.0 h1:5tqGy27NaOTB8yJKUZELlFAS/LTKJkrmONwQKeRZfjY= +github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/hc-install v0.9.1 h1:gkqTfE3vVbafGQo6VZXcy2v5yoz2bE0+nhZXruCuODQ= +github.com/hashicorp/hc-install v0.9.1/go.mod h1:pWWvN/IrfeBK4XPeXXYkL6EjMufHkCK5DvwxeLKuBf0= +github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/hashicorp/hcl/v2 v2.23.0 h1:Fphj1/gCylPxHutVSEOf2fBOh1VE4AuLV7+kbJf3qos= +github.com/hashicorp/hcl/v2 v2.23.0/go.mod h1:62ZYHrXgPoX8xBnzl8QzbWq4dyDsDtfCRgIq1rbJEvA= +github.com/hashicorp/logutils v1.0.0 h1:dLEQVugN8vlakKOUE3ihGLTZJRB4j+M2cdTm/ORI65Y= +github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= +github.com/hashicorp/terraform-exec v0.22.0 h1:G5+4Sz6jYZfRYUCg6eQgDsqTzkNXV+fP8l+uRmZHj64= +github.com/hashicorp/terraform-exec v0.22.0/go.mod h1:bjVbsncaeh8jVdhttWYZuBGj21FcYw6Ia/XfHcNO7lQ= +github.com/hashicorp/terraform-json v0.24.0 h1:rUiyF+x1kYawXeRth6fKFm/MdfBS6+lW4NbeATsYz8Q= +github.com/hashicorp/terraform-json v0.24.0/go.mod h1:Nfj5ubo9xbu9uiAoZVBsNOjvNKB66Oyrvtit74kC7ow= +github.com/hashicorp/terraform-plugin-framework v1.13.0 h1:8OTG4+oZUfKgnfTdPTJwZ532Bh2BobF4H+yBiYJ/scw= +github.com/hashicorp/terraform-plugin-framework v1.13.0/go.mod h1:j64rwMGpgM3NYXTKuxrCnyubQb/4VKldEKlcG8cvmjU= +github.com/hashicorp/terraform-plugin-framework-validators v0.12.0 h1:HOjBuMbOEzl7snOdOoUfE2Jgeto6JOjLVQ39Ls2nksc= +github.com/hashicorp/terraform-plugin-framework-validators v0.12.0/go.mod h1:jfHGE/gzjxYz6XoUwi/aYiiKrJDeutQNUtGQXkaHklg= +github.com/hashicorp/terraform-plugin-go v0.26.0 h1:cuIzCv4qwigug3OS7iKhpGAbZTiypAfFQmw8aE65O2M= +github.com/hashicorp/terraform-plugin-go v0.26.0/go.mod h1:+CXjuLDiFgqR+GcrM5a2E2Kal5t5q2jb0E3D57tTdNY= +github.com/hashicorp/terraform-plugin-log v0.9.0 h1:i7hOA+vdAItN1/7UrfBqBwvYPQ9TFvymaRGZED3FCV0= +github.com/hashicorp/terraform-plugin-log v0.9.0/go.mod h1:rKL8egZQ/eXSyDqzLUuwUYLVdlYeamldAHSxjUFADow= +github.com/hashicorp/terraform-plugin-mux v0.17.0 h1:/J3vv3Ps2ISkbLPiZOLspFcIZ0v5ycUXCEQScudGCCw= +github.com/hashicorp/terraform-plugin-mux v0.17.0/go.mod h1:yWuM9U1Jg8DryNfvCp+lH70WcYv6D8aooQxxxIzFDsE= +github.com/hashicorp/terraform-plugin-sdk/v2 v2.36.0 h1:7/iejAPyCRBhqAg3jOx+4UcAhY0A+Sg8B+0+d/GxSfM= +github.com/hashicorp/terraform-plugin-sdk/v2 v2.36.0/go.mod h1:TiQwXAjFrgBf5tg5rvBRz8/ubPULpU0HjSaVi5UoJf8= +github.com/hashicorp/terraform-plugin-testing v1.5.1 h1:T4aQh9JAhmWo4+t1A7x+rnxAJHCDIYW9kXyo4sVO92c= +github.com/hashicorp/terraform-plugin-testing v1.5.1/go.mod h1:dg8clO6K59rZ8w9EshBmDp1CxTIPu3yA4iaDpX1h5u0= +github.com/hashicorp/terraform-provider-google-beta v1.20.1-0.20250515195612-fa096fe771b5 h1:PY+Jbw367pf9I5cfUQhhDla9pmoysVMQZMWljyqL1aw= +github.com/hashicorp/terraform-provider-google-beta v1.20.1-0.20250515195612-fa096fe771b5/go.mod h1:5a610wnrgp3L1ejcgWda9y9WBVJ1QbcjZSX7OChO2uU= +github.com/hashicorp/terraform-registry-address v0.2.4 h1:JXu/zHB2Ymg/TGVCRu10XqNa4Sh2bWcqCNyKWjnCPJA= +github.com/hashicorp/terraform-registry-address v0.2.4/go.mod h1:tUNYTVyCtU4OIGXXMDp7WNcJ+0W1B4nmstVDgHMjfAU= +github.com/hashicorp/terraform-svchost v0.1.1 h1:EZZimZ1GxdqFRinZ1tpJwVxxt49xc/S52uzrw4x0jKQ= +github.com/hashicorp/terraform-svchost v0.1.1/go.mod h1:mNsjQfZyf/Jhz35v6/0LWcv26+X7JPS+buii2c9/ctc= +github.com/hashicorp/yamux v0.1.1 h1:yrQxtgseBDrq9Y652vSRDvsKCJKOUD+GzTS4Y0Y8pvE= +github.com/hashicorp/yamux v0.1.1/go.mod h1:CtWFDAQgb7dxtzFs4tWbplKIe2jSi3+5vKbgIO0SLnQ= +github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= +github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= +github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= +github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= +github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-testing-interface v1.14.1 h1:jrgshOhYAUVNMAJiKbEu7EqAwgJJ2JqpQmpLJOu07cU= +github.com/mitchellh/go-testing-interface v1.14.1/go.mod h1:gfgS7OtZj6MA4U1UrDRp04twqAjfvlZyCfX3sDjEym8= +github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0= +github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0= +github.com/mitchellh/hashstructure v1.1.0 h1:P6P1hdjqAAknpY/M1CGipelZgp+4y9ja9kmUZPXP+H0= +github.com/mitchellh/hashstructure v1.1.0/go.mod h1:xUDAozZz0Wmdiufv0uyhnHkUTN6/6d8ulp4AwfLKrmA= +github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= +github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= +github.com/oklog/run v1.1.0 h1:GEenZ1cK0+q0+wsJew9qUg/DyD8k3JzYsZAi5gYi2mA= +github.com/oklog/run v1.1.0/go.mod h1:sVPdnTZT1zYwAJeCMu2Th4T21pA3FPOQRfWjQlk7DVU= +github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 h1:GFCKgmp0tecUJ0sJuv4pzYCqS9+RGSn52M3FUwPs+uo= +github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1:t/avpk3KcrXxUnYOhZhMXJlSEyie6gQbtLq5NM3loB8= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= +github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM= +github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y= +github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= +github.com/vmihailenco/msgpack v4.0.4+incompatible h1:dSLoQfGFAo3F6OoNhwUmLwVgaUXK79GlxNBwueZn0xI= +github.com/vmihailenco/msgpack v4.0.4+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= +github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IUPn0Bjt8= +github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= +github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= +github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +github.com/zclconf/go-cty v1.16.2 h1:LAJSwc3v81IRBZyUVQDUdZ7hs3SYs9jv0eZJDWHD/70= +github.com/zclconf/go-cty v1.16.2/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE= +github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940 h1:4r45xpDWB6ZMSMNJFMOjqrGHynW3DIBuR2H9j0ug+Mo= +github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940/go.mod h1:CmBdvvj3nqzfzJ6nTCIwDTPZ56aVGvDrmztiO5g3qrM= +go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= +go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.60.0 h1:x7wzEgXfnzJcHDwStJT+mxOz4etr2EcexjqhBvmoakw= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.60.0/go.mod h1:rg+RlpR5dKwaS95IyyZqj5Wd4E13lk/msnTS0Xl9lJM= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0 h1:sbiXRNDSWJOTobXh5HyQKjq6wUC5tNybqjIqDpAY4CU= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0/go.mod h1:69uWxva0WgAA/4bu2Yy70SLDBwZXuQ6PbBpbsa5iZrQ= +go.opentelemetry.io/otel v1.35.0 h1:xKWKPxrxB6OtMCbmMY021CqC45J+3Onta9MqjhnusiQ= +go.opentelemetry.io/otel v1.35.0/go.mod h1:UEqy8Zp11hpkUrL73gSlELM0DupHoiq72dR+Zqel/+Y= +go.opentelemetry.io/otel/metric v1.35.0 h1:0znxYu2SNyuMSQT4Y9WDWej0VpcsxkuklLa4/siN90M= +go.opentelemetry.io/otel/metric v1.35.0/go.mod h1:nKVFgxBZ2fReX6IlyW28MgZojkoAkJGaE8CpgeAU3oE= +go.opentelemetry.io/otel/sdk v1.35.0 h1:iPctf8iprVySXSKJffSS79eOjl9pvxV9ZqOWT0QejKY= +go.opentelemetry.io/otel/sdk v1.35.0/go.mod h1:+ga1bZliga3DxJ3CQGg3updiaAJoNECOgJREo9KHGQg= +go.opentelemetry.io/otel/sdk/metric v1.35.0 h1:1RriWBmCKgkeHEhM7a2uMjMUfP7MsOF5JpUCaEqEI9o= +go.opentelemetry.io/otel/sdk/metric v1.35.0/go.mod h1:is6XYCUMpcKi+ZsOvfluY5YstFnhW0BidkR+gL+qN+w= +go.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt/xgMs= +go.opentelemetry.io/otel/trace v1.35.0/go.mod h1:WUk7DtFp1Aw2MkvqGdwiXYDZZNvA/1J8o6xRXLrIkyc= +go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/goleak v1.1.10/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A= +go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= +go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= +go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= +go.uber.org/multierr v1.10.0 h1:S0h4aNzvfcFsC3dRF1jLoaov7oRaKqRGC/pUEJ2yvPQ= +go.uber.org/multierr v1.10.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= +go.uber.org/zap v1.18.1/go.mod h1:xg/QME4nWcxGxrpdeYfq7UvYrLh66cuVKdrbD1XF/NI= +go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8= +go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= +go4.org/netipx v0.0.0-20231129151722-fdeea329fbba h1:0b9z3AuHCjxk0x/opv64kcgZLBseWJUpBw5I82+2U4M= +go4.org/netipx v0.0.0-20231129151722-fdeea329fbba/go.mod h1:PLyyIXexvUFg3Owu6p/WfdlivPbZJsZdgWZlrGope/Y= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.37.0 h1:kJNSjF/Xp7kU0iB2Z+9viTPMW4EqqsrywMXLJOOsXSE= +golang.org/x/crypto v0.37.0/go.mod h1:vg+k43peMZ0pUMhYmVAWysMK35e6ioLh3wB8ZCAfbVc= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 h1:ESSUROHIBHg7USnszlcdmjBEwdMj9VUvU+OPk4yl2mc= +golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8/go.mod h1:/lliqkxwWAhPjf5oSOIJup2XcqJaw8RGS6k3TGEc7GI= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.39.0 h1:ZCu7HMWDxpXpaiKdhzIfaltL9Lp31x/3fCP11bc6/fY= +golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.29.0 h1:WdYw2tdTK1S8olAzWHdgeqfy+Mtm9XNhv/xJsY65d98= +golang.org/x/oauth2 v0.29.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.13.0 h1:AauUjRAJ9OSnvULf/ARrrVywoJDy0YS2AwQ98I37610= +golang.org/x/sync v0.13.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20= +golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= +golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0= +golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU= +golang.org/x/time v0.11.0 h1:/bpjEDfN9tkoN/ryeYHnv5hcMlc8ncjMcM4XBk5NWV0= +golang.org/x/time v0.11.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.22.0 h1:gqSGLZqv+AI9lIQzniJ0nZDRG5GBPsSi+DRNHWNz6yA= +golang.org/x/tools v0.22.0/go.mod h1:aCwcsjqvq7Yqt6TNyX7QMU2enbQ/Gt0bo6krSeEri+c= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/api v0.229.0 h1:p98ymMtqeJ5i3lIBMj5MpR9kzIIgzpHHh8vQ+vgAzx8= +google.golang.org/api v0.229.0/go.mod h1:wyDfmq5g1wYJWn29O22FDWN48P7Xcz0xz+LBpptYvB0= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= +google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20200423170343-7949de9c1215/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20250303144028-a0af3efb3deb h1:ITgPrl429bc6+2ZraNSzMDk3I95nmQln2fuPstKwFDE= +google.golang.org/genproto v0.0.0-20250303144028-a0af3efb3deb/go.mod h1:sAo5UzpjUwgFBCzupwhcLcxHVDK7vG5IqI30YnwX2eE= +google.golang.org/genproto/googleapis/api v0.0.0-20250414145226-207652e42e2e h1:UdXH7Kzbj+Vzastr5nVfccbmFsmYNygVLSPk1pEfDoY= +google.golang.org/genproto/googleapis/api v0.0.0-20250414145226-207652e42e2e/go.mod h1:085qFyf2+XaZlRdCgKNCIZ3afY2p4HHZdoIRpId8F4A= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250414145226-207652e42e2e h1:ztQaXfzEXTmCBvbtWYRhJxW+0iJcz2qXfd38/e9l7bA= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250414145226-207652e42e2e/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.71.1 h1:ffsFWr7ygTUscGPI0KKK6TLrGz0476KUvvsbqWK0rPI= +google.golang.org/grpc v1.71.1/go.mod h1:H0GRtasmQOh9LkFoCPDu3ZrwUtD1YGE+b2vYBYd/8Ec= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= +google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +rsc.io/binaryregexp v0.2.0 h1:HfqmD5MEmC0zvwBuF187nq9mdnXjXsSivRiXN7SmRkE= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= From 2e121e0c1f59ee355c366e922e6a977a7d90bbd7 Mon Sep 17 00:00:00 2001 From: uaditya70 Date: Fri, 16 May 2025 21:23:29 +0530 Subject: [PATCH 153/884] Main (#13943) --- mmv1/products/alloydb/Instance.yaml | 16 ++++ .../alloydb/resource_alloydb_instance_test.go | 93 +++++++++++++++++++ 2 files changed, 109 insertions(+) diff --git a/mmv1/products/alloydb/Instance.yaml b/mmv1/products/alloydb/Instance.yaml index 08c12b14c454..5457b33c4df1 100644 --- a/mmv1/products/alloydb/Instance.yaml +++ b/mmv1/products/alloydb/Instance.yaml @@ -200,6 +200,22 @@ properties: - 'AVAILABILITY_TYPE_UNSPECIFIED' - 'ZONAL' - 'REGIONAL' + - name: 'activationPolicy' + type: Enum + description: | + 'Specifies whether an instance needs to spin up. Once the instance is + active, the activation policy can be updated to the `NEVER` to stop the + instance. Likewise, the activation policy can be updated to `ALWAYS` to + start the instance. + There are restrictions around when an instance can/cannot be activated (for + example, a read pool instance should be stopped before stopping primary + etc.). Please refer to the API documentation for more details. + Possible values are: `ACTIVATION_POLICY_UNSPECIFIED`, `ALWAYS`, `NEVER`.' + default_from_api: true + enum_values: + - 'ACTIVATION_POLICY_UNSPECIFIED' + - 'ALWAYS' + - 'NEVER' - name: 'instanceType' type: Enum description: | diff --git a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go index e7ae2142d3ba..d138851f7d33 100644 --- a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go +++ b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go @@ -128,6 +128,99 @@ func TestAccAlloydbInstance_createInstanceWithMandatoryFields(t *testing.T) { }) } +// This test passes if we are able to create a primary instance STOP it and then START it back again +func TestAccAlloydbInstance_stopstart(t *testing.T) { + t.Parallel() + + suffix := acctest.RandString(t, 10) + networkName := acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-clientconnectionconfig") + + context := map[string]interface{}{ + "random_suffix": suffix, + "network_name": networkName, + } + + contextStop := map[string]interface{}{ + "random_suffix": suffix, + "network_name": networkName, + "activation_policy": "NEVER", + } + + contextStart := map[string]interface{}{ + "random_suffix": suffix, + "network_name": networkName, + "activation_policy": "ALWAYS", + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckAlloydbInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccAlloydbInstance_createInstanceWithMandatoryFields(context), + }, + { + ResourceName: "google_alloydb_instance.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"cluster", "instance_id", "reconciling", "update_time"}, + }, + { + Config: testAccAlloydbInstance_updateActivationPolicy(contextStop), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_alloydb_instance.default", "activation_policy", "NEVER"), + resource.TestCheckResourceAttr("google_alloydb_instance.default", "state", "STOPPED"), + ), + }, + { + ResourceName: "google_alloydb_instance.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"cluster", "instance_id", "reconciling", "update_time", "labels", "terraform_labels"}, + }, + { + Config: testAccAlloydbInstance_updateActivationPolicy(contextStart), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_alloydb_instance.default", "activation_policy", "ALWAYS"), + resource.TestCheckResourceAttr("google_alloydb_instance.default", "state", "READY"), + ), + }, + { + ResourceName: "google_alloydb_instance.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"cluster", "instance_id", "reconciling", "update_time", "labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccAlloydbInstance_updateActivationPolicy(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_alloydb_instance" "default" { + cluster = google_alloydb_cluster.default.name + instance_id = "tf-test-alloydb-instance%{random_suffix}" + instance_type = "PRIMARY" + activation_policy = "%{activation_policy}" +} + +resource "google_alloydb_cluster" "default" { + cluster_id = "tf-test-alloydb-cluster%{random_suffix}" + location = "us-central1" + network_config { + network = data.google_compute_network.default.id + } +} + +data "google_project" "project" {} + +data "google_compute_network" "default" { + name = "%{network_name}" +} +`, context) +} + func testAccAlloydbInstance_createInstanceWithMandatoryFields(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_alloydb_instance" "default" { From 8b1c33b8e3ba8ad0a81c0393f087bf90db00c087 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Fri, 16 May 2025 10:14:20 -0700 Subject: [PATCH 154/884] Removed unused template (#13978) --- mmv1/templates/terraform/post_create/group.tmpl | 4 ---- 1 file changed, 4 deletions(-) delete mode 100644 mmv1/templates/terraform/post_create/group.tmpl diff --git a/mmv1/templates/terraform/post_create/group.tmpl b/mmv1/templates/terraform/post_create/group.tmpl deleted file mode 100644 index 158d4b5021e6..000000000000 --- a/mmv1/templates/terraform/post_create/group.tmpl +++ /dev/null @@ -1,4 +0,0 @@ -d.SetId(flattenMonitoringGroupName(res["name"]).(string)) -if err := d.Set("name", flattenMonitoringGroupName(res["name"])); err != nil { - return fmt.Errorf("Error setting name: %s", err) -} \ No newline at end of file From 751e847252ee9384e1520e230ce94512544a312b Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Fri, 16 May 2025 10:16:30 -0700 Subject: [PATCH 155/884] Fixed permadiff on `product` in `google_gemini_logging_setting_binding` (#13973) --- mmv1/products/gemini/LoggingSettingBinding.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/mmv1/products/gemini/LoggingSettingBinding.yaml b/mmv1/products/gemini/LoggingSettingBinding.yaml index 57b29f646e9c..1793330954de 100644 --- a/mmv1/products/gemini/LoggingSettingBinding.yaml +++ b/mmv1/products/gemini/LoggingSettingBinding.yaml @@ -91,6 +91,7 @@ properties: Product type of the setting binding. enum_values: - 'GEMINI_CODE_ASSIST' + default_from_api: true - name: name type: String description: |- From dec40a5473fa4af1f6ac70a24b44b8f0edeca36b Mon Sep 17 00:00:00 2001 From: Riley Karson Date: Fri, 16 May 2025 11:28:56 -0700 Subject: [PATCH 156/884] Create stub product.go files for products (#13980) --- mmv1/api/product.go | 8 +++++++ mmv1/provider/template_data.go | 9 +++++++- mmv1/provider/terraform.go | 16 ++++++++++++++ mmv1/provider/terraform_tgc.go | 1 + mmv1/provider/terraform_tgc_cai2hcl.go | 1 + mmv1/provider/terraform_tgc_next.go | 1 + mmv1/templates/terraform/product.go.tmpl | 22 +++++++++++++++++++ .../provider/provider_mmv1_resources.go.tmpl | 11 ++++++++++ 8 files changed, 68 insertions(+), 1 deletion(-) create mode 100644 mmv1/templates/terraform/product.go.tmpl diff --git a/mmv1/api/product.go b/mmv1/api/product.go index 0a42267653a1..10b93a67bf88 100644 --- a/mmv1/api/product.go +++ b/mmv1/api/product.go @@ -14,6 +14,7 @@ package api import ( + "fmt" "log" "reflect" "regexp" @@ -68,6 +69,9 @@ type Product struct { LegacyName string `yaml:"legacy_name,omitempty"` ClientName string `yaml:"client_name,omitempty"` + + // The compiler to generate the downstream files, for example "terraformgoogleconversion-codegen". + Compiler string `yaml:"-"` } func (p *Product) UnmarshalYAML(unmarshal func(any) error) error { @@ -133,6 +137,10 @@ func (p *Product) SetDisplayName() { } } +func (p *Product) SetCompiler(t string) { + p.Compiler = fmt.Sprintf("%s-codegen", strings.ToLower(t)) +} + // ==================== // Version-related methods // ==================== diff --git a/mmv1/provider/template_data.go b/mmv1/provider/template_data.go index 5db15f6e51fc..79c51dff50ce 100644 --- a/mmv1/provider/template_data.go +++ b/mmv1/provider/template_data.go @@ -24,7 +24,6 @@ import ( "path/filepath" "strings" "sync" - "text/template" "github.com/GoogleCloudPlatform/magic-modules/mmv1/api" @@ -94,6 +93,14 @@ func (td *TemplateData) GenerateMetadataFile(filePath string, resource api.Resou td.GenerateFile(filePath, templatePath, resource, false, templates...) } +func (td *TemplateData) GenerateProductFile(filePath string, product api.Product) { + templatePath := "templates/terraform/product.go.tmpl" + templates := []string{ + templatePath, + } + td.GenerateFile(filePath, templatePath, product, true, templates...) +} + func (td *TemplateData) GenerateOperationFile(filePath string, resource api.Resource) { templatePath := "templates/terraform/operation.go.tmpl" templates := []string{ diff --git a/mmv1/provider/terraform.go b/mmv1/provider/terraform.go index 877207535aac..cc6123e1b734 100644 --- a/mmv1/provider/terraform.go +++ b/mmv1/provider/terraform.go @@ -61,6 +61,7 @@ func NewTerraform(product *api.Product, versionName string, startTime time.Time) } t.Product.SetPropertiesBasedOnVersion(&t.Version) + t.Product.SetCompiler(ProviderName(t)) for _, r := range t.Product.Objects { r.SetCompiler(ProviderName(t)) r.ImportPath = ImportPathFromVersion(versionName) @@ -77,6 +78,7 @@ func (t Terraform) Generate(outputFolder, productPath, resourceToGenerate string t.GenerateObjects(outputFolder, resourceToGenerate, generateCode, generateDocs) if generateCode { + t.GenerateProduct(outputFolder) t.GenerateOperation(outputFolder) } } @@ -186,6 +188,20 @@ func (t *Terraform) GenerateResourceSweeper(object api.Resource, templateData Te templateData.GenerateSweeperFile(targetFilePath, object) } +// GenerateProduct creates the product.go file for a given service directory. +// This will be used to seed the directory and add a package-level comment +// specific to the product. +func (t *Terraform) GenerateProduct(outputFolder string) { + targetFolder := path.Join(outputFolder, t.FolderName(), "services", t.Product.ApiName) + if err := os.MkdirAll(targetFolder, os.ModePerm); err != nil { + log.Println(fmt.Errorf("error creating parent directory %v: %v", targetFolder, err)) + } + + targetFilePath := path.Join(targetFolder, "product.go") + templateData := NewTemplateData(outputFolder, t.TargetVersionName) + templateData.GenerateProductFile(targetFilePath, *t.Product) +} + func (t *Terraform) GenerateOperation(outputFolder string) { asyncObjects := google.Select(t.Product.Objects, func(o *api.Resource) bool { return o.AutogenAsync diff --git a/mmv1/provider/terraform_tgc.go b/mmv1/provider/terraform_tgc.go index 2b6a38e9dba4..3312dddb4474 100644 --- a/mmv1/provider/terraform_tgc.go +++ b/mmv1/provider/terraform_tgc.go @@ -60,6 +60,7 @@ func NewTerraformGoogleConversion(product *api.Product, versionName string, star } t.Product.SetPropertiesBasedOnVersion(&t.Version) + t.Product.SetCompiler(ProviderName(t)) for _, r := range t.Product.Objects { r.SetCompiler(ProviderName(t)) r.ImportPath = ImportPathFromVersion(versionName) diff --git a/mmv1/provider/terraform_tgc_cai2hcl.go b/mmv1/provider/terraform_tgc_cai2hcl.go index 91fd12498cbc..780344278f2c 100644 --- a/mmv1/provider/terraform_tgc_cai2hcl.go +++ b/mmv1/provider/terraform_tgc_cai2hcl.go @@ -44,6 +44,7 @@ func NewCaiToTerraformConversion(product *api.Product, versionName string, start } t.Product.SetPropertiesBasedOnVersion(&t.Version) + t.Product.SetCompiler(ProviderName(t)) for _, r := range t.Product.Objects { r.SetCompiler(ProviderName(t)) r.ImportPath = ImportPathFromVersion(versionName) diff --git a/mmv1/provider/terraform_tgc_next.go b/mmv1/provider/terraform_tgc_next.go index 25b5ca70b334..663c1959e196 100644 --- a/mmv1/provider/terraform_tgc_next.go +++ b/mmv1/provider/terraform_tgc_next.go @@ -50,6 +50,7 @@ func NewTerraformGoogleConversionNext(product *api.Product, versionName string, } t.Product.SetPropertiesBasedOnVersion(&t.Version) + t.Product.SetCompiler(ProviderName(t)) for _, r := range t.Product.Objects { r.SetCompiler(ProviderName(t)) r.ImportPath = ImportPathFromVersion(versionName) diff --git a/mmv1/templates/terraform/product.go.tmpl b/mmv1/templates/terraform/product.go.tmpl new file mode 100644 index 000000000000..98f1033ace91 --- /dev/null +++ b/mmv1/templates/terraform/product.go.tmpl @@ -0,0 +1,22 @@ +{{- if ne $.Compiler "terraformgoogleconversion-codegen" }} +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 +{{ end }} +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** Type: MMv1 *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Magic Modules and manual +// changes will be clobbered when the file is regenerated. +// +// Please read more about how to change this file in +// .github/CONTRIBUTING.md. +// +// ---------------------------------------------------------------------------- + +// Package {{ lower $.Name }} contains resources, datasources, etc. for the {{ lower $.DisplayName }} service. +package {{ lower $.Name }} + +const ProductName = "{{ lower $.Name }}" diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index d7d322ab93d8..5e601cd64c54 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -508,3 +508,14 @@ var handwrittenIAMResources = map[string]*schema.Resource{ "google_service_account_iam_policy": tpgiamresource.ResourceIamPolicy(resourcemanager.IamServiceAccountSchema, resourcemanager.NewServiceAccountIamUpdater, resourcemanager.ServiceAccountIdParseFunc), // ####### END non-generated IAM resources ########### } + +// UseGeneratedProducts uses every generated product to avoid "imported and not used" errors. +// This allows developers to define a product without any resources, datasources, or other files. +// +// This avoids goimports which takes a very long time, or needing to know if there are any other +// files in a given package when generating this file (which is done in parallel) +func UseGeneratedProducts() { + {{- range $service := $.GetMmv1ServicesInVersion $.Products }} + var _ = {{$service}}.ProductName + {{- end }} +} From 70363f733382dad9c0195464e17924b32bb3fec2 Mon Sep 17 00:00:00 2001 From: hao-nan-li <100219545+hao-nan-li@users.noreply.github.com> Date: Fri, 16 May 2025 13:45:45 -0700 Subject: [PATCH 157/884] rename GOOGLE_PROJECT_PREFIX (#13988) --- mmv1/third_party/terraform/envvar/envvar_utils.go | 4 ++-- .../provider/universe/universe_domain_compute_test.go | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/mmv1/third_party/terraform/envvar/envvar_utils.go b/mmv1/third_party/terraform/envvar/envvar_utils.go index dbd6b4707167..ff0856ee38e2 100644 --- a/mmv1/third_party/terraform/envvar/envvar_utils.go +++ b/mmv1/third_party/terraform/envvar/envvar_utils.go @@ -87,7 +87,7 @@ var UniverseDomainEnvVars = []string{ } var ProjectPrefixEnvVars = []string{ - "GOOGLE_PROJECT_PREFIX", + "GOOGLE_UNIVERSE_PROJECT_PREFIX", } // This is the billing account that will be charged for the infrastructure used during testing. For @@ -151,7 +151,7 @@ func GetTestUniverseDomainFromEnv(t *testing.T) string { } // Project Prefix of different universes -func GetProjectPrefixFromEnv() string { +func GetUniverseProjectPrefixFromEnv() string { return transport_tpg.MultiEnvSearch(ProjectPrefixEnvVars) } diff --git a/mmv1/third_party/terraform/provider/universe/universe_domain_compute_test.go b/mmv1/third_party/terraform/provider/universe/universe_domain_compute_test.go index 89d1d5d39bcd..ad223abc2916 100644 --- a/mmv1/third_party/terraform/provider/universe/universe_domain_compute_test.go +++ b/mmv1/third_party/terraform/provider/universe/universe_domain_compute_test.go @@ -37,7 +37,7 @@ func TestAccUniverseDomainDiskImage(t *testing.T) { universeDomain := envvar.GetTestUniverseDomainFromEnv(t) zone := envvar.GetTestZoneFromEnv() - prefix := envvar.GetProjectPrefixFromEnv() + prefix := envvar.GetUniverseProjectPrefixFromEnv() image_project := "" if prefix != "" { From 11db0294f645f3a022d73264e1514d711d21de01 Mon Sep 17 00:00:00 2001 From: pujawadare Date: Fri, 16 May 2025 21:16:26 +0000 Subject: [PATCH 158/884] fix(storage): Remove flaky test for bucket's location change (#13912) --- .../storage/resource_storage_bucket_test.go | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go index 4760f072487f..ec66e8ea875d 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go @@ -695,21 +695,6 @@ func TestAccStorageBucket_storageClass(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"force_destroy"}, }, - { - Config: testAccStorageBucket_storageClass(bucketName, "REGIONAL", "US-CENTRAL1"), - Check: resource.ComposeTestCheckFunc( - testAccCheckStorageBucketExists( - t, "google_storage_bucket.bucket", bucketName, &updated), - // Location change causes recreate - testAccCheckStorageBucketWasRecreated(&updated, &bucket), - ), - }, - { - ResourceName: "google_storage_bucket.bucket", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"force_destroy"}, - }, }, }) } From 17fcd66e3416029f3f758e7d0803f34f9c65c0cf Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Fri, 16 May 2025 14:18:29 -0700 Subject: [PATCH 159/884] Removed post_create for resourcemanager lien (#13982) --- mmv1/products/resourcemanager/Lien.yaml | 1 - mmv1/templates/terraform/post_create/lien.tmpl | 11 ----------- 2 files changed, 12 deletions(-) delete mode 100644 mmv1/templates/terraform/post_create/lien.tmpl diff --git a/mmv1/products/resourcemanager/Lien.yaml b/mmv1/products/resourcemanager/Lien.yaml index 8a1467851126..17b24e49ee45 100644 --- a/mmv1/products/resourcemanager/Lien.yaml +++ b/mmv1/products/resourcemanager/Lien.yaml @@ -45,7 +45,6 @@ nested_query: modify_by_patch: false custom_code: decoder: 'templates/terraform/decoders/avoid_meaningless_project_update.tmpl' - post_create: 'templates/terraform/post_create/lien.tmpl' pre_delete: 'templates/terraform/pre_delete/modify_delete_url.tmpl' post_import: 'templates/terraform/post_import/lien_import.tmpl' examples: diff --git a/mmv1/templates/terraform/post_create/lien.tmpl b/mmv1/templates/terraform/post_create/lien.tmpl deleted file mode 100644 index 466d699b1129..000000000000 --- a/mmv1/templates/terraform/post_create/lien.tmpl +++ /dev/null @@ -1,11 +0,0 @@ -// This resource is unusual - instead of returning an Operation from -// Create, it returns the created object itself. We don't parse -// any of the values there, preferring to centralize that logic in -// Read(). In this resource, Read is also unusual - it requires -// us to know the server-side generated name of the object we're -// trying to fetch, and the only way to know that is to capture -// it here. The following two lines do that. -d.SetId(flattenNestedResourceManagerLienName(res["name"], d, config).(string)) -if err := d.Set("name", flattenNestedResourceManagerLienName(res["name"], d, config)); err != nil { - return fmt.Errorf("Error setting name: %s", err) -} From afe5b7866d5b8848bbe3e88718f33bdfee952dd7 Mon Sep 17 00:00:00 2001 From: Sachin_R Date: Sat, 17 May 2025 02:50:47 +0530 Subject: [PATCH 160/884] Terraform support for Category and Term Resources (#13923) --- mmv1/products/dataplex/Glossary.yaml | 6 +- mmv1/products/dataplex/GlossaryCategory.yaml | 99 +++++++++++++++++++ mmv1/products/dataplex/GlossaryTerm.yaml | 99 +++++++++++++++++++ .../dataplex_glossary_category_basic.tf.tmpl | 11 +++ .../dataplex_glossary_category_full.tf.tmpl | 15 +++ .../dataplex_glossary_term_basic.tf.tmpl | 11 +++ .../dataplex_glossary_term_full.tf.tmpl | 15 +++ ...esource_dataplex_glossary_category_test.go | 83 ++++++++++++++++ .../resource_dataplex_glossary_term_test.go | 83 ++++++++++++++++ 9 files changed, 419 insertions(+), 3 deletions(-) create mode 100644 mmv1/products/dataplex/GlossaryCategory.yaml create mode 100644 mmv1/products/dataplex/GlossaryTerm.yaml create mode 100644 mmv1/templates/terraform/examples/dataplex_glossary_category_basic.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/dataplex_glossary_category_full.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/dataplex_glossary_term_basic.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/dataplex_glossary_term_full.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/dataplex/resource_dataplex_glossary_category_test.go create mode 100644 mmv1/third_party/terraform/services/dataplex/resource_dataplex_glossary_term_test.go diff --git a/mmv1/products/dataplex/Glossary.yaml b/mmv1/products/dataplex/Glossary.yaml index a47c435ee8ca..ab539228dc4c 100644 --- a/mmv1/products/dataplex/Glossary.yaml +++ b/mmv1/products/dataplex/Glossary.yaml @@ -20,9 +20,9 @@ create_url: 'projects/{{project}}/locations/{{location}}/glossaries?glossary_id= update_verb: 'PATCH' update_mask: true timeouts: - insert_minutes: 5 - update_minutes: 5 - delete_minutes: 5 + insert_minutes: 15 + update_minutes: 15 + delete_minutes: 15 autogen_async: true import_format: ['projects/{{project}}/locations/{{location}}/glossaries/{{glossary_id}}'] async: diff --git a/mmv1/products/dataplex/GlossaryCategory.yaml b/mmv1/products/dataplex/GlossaryCategory.yaml new file mode 100644 index 000000000000..9a8b13dbcbf1 --- /dev/null +++ b/mmv1/products/dataplex/GlossaryCategory.yaml @@ -0,0 +1,99 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: 'GlossaryCategory' +description: | + Represents a collection of categories and terms within a Glossary that are related to each other. +base_url: 'projects/{{project}}/locations/{{location}}/glossaries/{{glossary_id}}/categories/{{category_id}}' +self_link: 'projects/{{project}}/locations/{{location}}/glossaries/{{glossary_id}}/categories/{{category_id}}' +create_url: 'projects/{{project}}/locations/{{location}}/glossaries/{{glossary_id}}/categories?category_id={{category_id}}' +update_verb: 'PATCH' +update_mask: true +timeouts: + insert_minutes: 15 + update_minutes: 15 + delete_minutes: 15 +autogen_async: true +import_format: ['projects/{{project}}/locations/{{location}}/glossaries/{{glossary_id}}/categories/{{category_id}}'] +examples: + - name: 'dataplex_glossary_category_basic' + primary_resource_id: 'category_test_id' + primary_resource_name: 'fmt.Sprintf("tf-test-category-basic%s", context["random_suffix"]), fmt.Sprintf("tf-test-glossary%s", context["random_suffix"])' + vars: + category_name: 'category-basic' + - name: 'dataplex_glossary_category_full' + primary_resource_id: 'category_test_id_full' + primary_resource_name: 'fmt.Sprintf("tf-test-category-full%s", context["random_suffix"]), fmt.Sprintf("tf-test-glossary%s", context["random_suffix"])' + vars: + category_name: 'category-full' +parameters: + - name: 'location' + type: String + description: | + The location where the glossary category should reside. + url_param_only: true + required: true + immutable: true + - name: 'glossaryId' + type: String + description: | + The glossary id for creation. + url_param_only: true + immutable: true + - name: 'categoryId' + type: String + description: | + The category id for creation. + url_param_only: true + immutable: true +properties: + - name: 'name' + type: String + description: | + The resource name of the GlossaryCategory. Format: projects/{projectId}/locations/{locationId}/glossaries/{glossaryId}/categories/{categoryId} + output: true + - name: 'displayName' + type: String + description: | + User friendly display name of the GlossaryCategory. This is user-mutable. This will be same as the categoryId, if not specified. + required: false + - name: 'description' + type: String + description: | + The user-mutable description of the GlossaryCategory. + required: false + - name: 'labels' + type: KeyValueLabels + description: | + User-defined labels for the GlossaryCategory. + required: false + - name: 'uid' + type: String + description: | + System generated unique id for the GlossaryCategory. This ID will be different if the GlossaryCategory is deleted and re-created with the same name. + output: true + - name: 'createTime' + type: Timestamp + description: | + The time at which the GlossaryCategory was created. + output: true + - name: 'updateTime' + type: Timestamp + description: | + The time at which the GlossaryCategory was last updated. + output: true + - name: 'parent' + type: String + description: | + The immediate parent of the GlossaryCategory in the resource-hierarchy. It can either be a Glossary or a Category. Format: projects/{projectId}/locations/{locationId}/glossaries/{glossaryId} OR projects/{projectId}/locations/{locationId}/glossaries/{glossaryId}/categories/{categoryId} + required: true diff --git a/mmv1/products/dataplex/GlossaryTerm.yaml b/mmv1/products/dataplex/GlossaryTerm.yaml new file mode 100644 index 000000000000..e133549ed345 --- /dev/null +++ b/mmv1/products/dataplex/GlossaryTerm.yaml @@ -0,0 +1,99 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: 'GlossaryTerm' +description: | + Represents a collection of terms within a Glossary that are related to each other. +base_url: 'projects/{{project}}/locations/{{location}}/glossaries/{{glossary_id}}/terms/{{term_id}}' +self_link: 'projects/{{project}}/locations/{{location}}/glossaries/{{glossary_id}}/terms/{{term_id}}' +create_url: 'projects/{{project}}/locations/{{location}}/glossaries/{{glossary_id}}/terms?term_id={{term_id}}' +update_verb: 'PATCH' +update_mask: true +timeouts: + insert_minutes: 15 + update_minutes: 15 + delete_minutes: 15 +autogen_async: true +import_format: ['projects/{{project}}/locations/{{location}}/glossaries/{{glossary_id}}/terms/{{term_id}}'] +examples: + - name: 'dataplex_glossary_term_basic' + primary_resource_id: 'term_test_id' + primary_resource_name: 'fmt.Sprintf("tf-test-term-basic%s", context["random_suffix"]), fmt.Sprintf("tf-test-glossary%s", context["random_suffix"])' + vars: + term_name: 'term-basic' + - name: 'dataplex_glossary_term_full' + primary_resource_id: 'term_test_id_full' + primary_resource_name: 'fmt.Sprintf("tf-test-term-full%s", context["random_suffix"]), fmt.Sprintf("tf-test-glossary%s", context["random_suffix"])' + vars: + term_name: 'term-full' +parameters: + - name: 'location' + type: String + description: | + The location where the glossary term should reside. + url_param_only: true + required: true + immutable: true + - name: 'glossaryId' + type: String + description: | + The glossary id for creation. + url_param_only: true + immutable: true + - name: 'termId' + type: String + description: | + The term id for creation. + url_param_only: true + immutable: true +properties: + - name: 'name' + type: String + description: | + The resource name of the GlossaryTerm. Format: projects/{projectId}/locations/{locationId}/glossaries/{glossaryId}/categories/{termId} + output: true + - name: 'displayName' + type: String + description: | + User friendly display name of the GlossaryTerm. This is user-mutable. This will be same as the termId, if not specified. + required: false + - name: 'description' + type: String + description: | + The user-mutable description of the GlossaryTerm. + required: false + - name: 'labels' + type: KeyValueLabels + description: | + User-defined labels for the GlossaryTerm. + required: false + - name: 'uid' + type: String + description: | + System generated unique id for the GlossaryTerm. This ID will be different if the GlossaryTerm is deleted and re-created with the same name. + output: true + - name: 'createTime' + type: Timestamp + description: | + The time at which the GlossaryTerm was created. + output: true + - name: 'updateTime' + type: Timestamp + description: | + The time at which the GlossaryTerm was last updated. + output: true + - name: 'parent' + type: String + description: | + The immediate parent of the GlossaryTerm in the resource-hierarchy. It can either be a Glossary or a Term. Format: projects/{projectId}/locations/{locationId}/glossaries/{glossaryId} OR projects/{projectId}/locations/{locationId}/glossaries/{glossaryId}/terms/{termId} + required: true diff --git a/mmv1/templates/terraform/examples/dataplex_glossary_category_basic.tf.tmpl b/mmv1/templates/terraform/examples/dataplex_glossary_category_basic.tf.tmpl new file mode 100644 index 000000000000..33603e802fb7 --- /dev/null +++ b/mmv1/templates/terraform/examples/dataplex_glossary_category_basic.tf.tmpl @@ -0,0 +1,11 @@ +resource "google_dataplex_glossary" "{{$.PrimaryResourceId}}" { + glossary_id = "tf-test-glossary%{random_suffix}" + location = "us-central1" +} + +resource "google_dataplex_glossary_category" "{{$.PrimaryResourceId}}" { + parent = "projects/${google_dataplex_glossary.category_test_id.project}/locations/us-central1/glossaries/${google_dataplex_glossary.category_test_id.glossary_id}" + glossary_id = google_dataplex_glossary.category_test_id.glossary_id + location = "us-central1" + category_id = "tf-test-category-basic%{random_suffix}" +} diff --git a/mmv1/templates/terraform/examples/dataplex_glossary_category_full.tf.tmpl b/mmv1/templates/terraform/examples/dataplex_glossary_category_full.tf.tmpl new file mode 100644 index 000000000000..2ead91ef0a47 --- /dev/null +++ b/mmv1/templates/terraform/examples/dataplex_glossary_category_full.tf.tmpl @@ -0,0 +1,15 @@ +resource "google_dataplex_glossary" "{{$.PrimaryResourceId}}" { + glossary_id = "tf-test-glossary%{random_suffix}" + location = "us-central1" +} + +resource "google_dataplex_glossary_category" "{{$.PrimaryResourceId}}" { + parent = "projects/${google_dataplex_glossary.category_test_id_full.project}/locations/us-central1/glossaries/${google_dataplex_glossary.category_test_id_full.glossary_id}" + glossary_id = google_dataplex_glossary.category_test_id_full.glossary_id + location = "us-central1" + category_id = "tf-test-category-full%{random_suffix}" + + labels = { "tag": "test-tf" } + display_name = "terraform category" + description = "category created by Terraform" +} diff --git a/mmv1/templates/terraform/examples/dataplex_glossary_term_basic.tf.tmpl b/mmv1/templates/terraform/examples/dataplex_glossary_term_basic.tf.tmpl new file mode 100644 index 000000000000..645cc127fb3f --- /dev/null +++ b/mmv1/templates/terraform/examples/dataplex_glossary_term_basic.tf.tmpl @@ -0,0 +1,11 @@ +resource "google_dataplex_glossary" "{{$.PrimaryResourceId}}" { + glossary_id = "tf-test-glossary%{random_suffix}" + location = "us-central1" +} + +resource "google_dataplex_glossary_term" "{{$.PrimaryResourceId}}" { + parent = "projects/${google_dataplex_glossary.term_test_id.project}/locations/us-central1/glossaries/${google_dataplex_glossary.term_test_id.glossary_id}" + glossary_id = google_dataplex_glossary.term_test_id.glossary_id + location = "us-central1" + term_id = "tf-test-term-basic%{random_suffix}" +} diff --git a/mmv1/templates/terraform/examples/dataplex_glossary_term_full.tf.tmpl b/mmv1/templates/terraform/examples/dataplex_glossary_term_full.tf.tmpl new file mode 100644 index 000000000000..9fda7a3f31f2 --- /dev/null +++ b/mmv1/templates/terraform/examples/dataplex_glossary_term_full.tf.tmpl @@ -0,0 +1,15 @@ +resource "google_dataplex_glossary" "{{$.PrimaryResourceId}}" { + glossary_id = "tf-test-glossary%{random_suffix}" + location = "us-central1" +} + +resource "google_dataplex_glossary_term" "{{$.PrimaryResourceId}}" { + parent = "projects/${google_dataplex_glossary.term_test_id_full.project}/locations/us-central1/glossaries/${google_dataplex_glossary.term_test_id_full.glossary_id}" + glossary_id = google_dataplex_glossary.term_test_id_full.glossary_id + location = "us-central1" + term_id = "tf-test-term-full%{random_suffix}" + + labels = { "tag": "test-tf" } + display_name = "terraform term" + description = "term created by Terraform" +} diff --git a/mmv1/third_party/terraform/services/dataplex/resource_dataplex_glossary_category_test.go b/mmv1/third_party/terraform/services/dataplex/resource_dataplex_glossary_category_test.go new file mode 100644 index 000000000000..863fedcb8570 --- /dev/null +++ b/mmv1/third_party/terraform/services/dataplex/resource_dataplex_glossary_category_test.go @@ -0,0 +1,83 @@ +package dataplex_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + + "github.com/hashicorp/terraform-plugin-testing/plancheck" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccDataplexGlossaryCategory_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDataplexGlossaryCategory_dataplexGlossaryCategoryFull(context), + }, + { + ResourceName: "google_dataplex_glossary_category.category_test_id_full", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"category_id", "glossary_id", "labels", "location", "terraform_labels"}, + }, + { + Config: testAccDataplexGlossaryCategory_update(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_dataplex_glossary_category.category_test_id_full", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_dataplex_glossary_category.category_test_id_full", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"category_id", "glossary_id", "labels", "location", "terraform_labels"}, + }, + }, + }) +} + +func testAccDataplexGlossaryCategory_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_dataplex_glossary" "category_test_id_full" { + glossary_id = "tf-test-glossary%{random_suffix}" + location = "us-central1" +} +resource "google_dataplex_glossary_category" "category_test_id_full" { + parent = "projects/${google_dataplex_glossary.category_test_id_full.project}/locations/us-central1/glossaries/${google_dataplex_glossary.category_test_id_full.glossary_id}" + glossary_id = google_dataplex_glossary.category_test_id_full.glossary_id + location = "us-central1" + category_id = "tf-test-category-full%{random_suffix}" + display_name = "terraform category updated" + description = "category updated by Terraform" +} +`, context) +} + +func testAccDataplexGlossaryCategory_dataplexGlossaryCategoryFull(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_dataplex_glossary" "category_test_id_full" { + glossary_id = "tf-test-glossary%{random_suffix}" + location = "us-central1" +} +resource "google_dataplex_glossary_category" "category_test_id_full" { + parent = "projects/${google_dataplex_glossary.category_test_id_full.project}/locations/us-central1/glossaries/${google_dataplex_glossary.category_test_id_full.glossary_id}" + glossary_id = google_dataplex_glossary.category_test_id_full.glossary_id + location = "us-central1" + category_id = "tf-test-category-full%{random_suffix}" + labels = { "tag": "test-tf" } + display_name = "terraform category" + description = "category created by Terraform" +} +`, context) +} diff --git a/mmv1/third_party/terraform/services/dataplex/resource_dataplex_glossary_term_test.go b/mmv1/third_party/terraform/services/dataplex/resource_dataplex_glossary_term_test.go new file mode 100644 index 000000000000..557297d051e6 --- /dev/null +++ b/mmv1/third_party/terraform/services/dataplex/resource_dataplex_glossary_term_test.go @@ -0,0 +1,83 @@ +package dataplex_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + + "github.com/hashicorp/terraform-plugin-testing/plancheck" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccDataplexGlossaryTerm_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDataplexGlossaryTerm_dataplexGlossaryTermFull(context), + }, + { + ResourceName: "google_dataplex_glossary_term.term_test_id_full", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"glossary_id", "labels", "location", "term_id", "terraform_labels"}, + }, + { + Config: testAccDataplexGlossaryTerm_update(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_dataplex_glossary_term.term_test_id_full", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_dataplex_glossary_term.term_test_id_full", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"glossary_id", "labels", "location", "term_id", "terraform_labels"}, + }, + }, + }) +} + +func testAccDataplexGlossaryTerm_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_dataplex_glossary" "term_test_id_full" { + glossary_id = "tf-test-glossary%{random_suffix}" + location = "us-central1" +} +resource "google_dataplex_glossary_term" "term_test_id_full" { + parent = "projects/${google_dataplex_glossary.term_test_id_full.project}/locations/us-central1/glossaries/${google_dataplex_glossary.term_test_id_full.glossary_id}" + glossary_id = google_dataplex_glossary.term_test_id_full.glossary_id + location = "us-central1" + term_id = "tf-test-term-full%{random_suffix}" + display_name = "terraform term updated" + description = "term created by Terraform updated" +} +`, context) +} + +func testAccDataplexGlossaryTerm_dataplexGlossaryTermFull(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_dataplex_glossary" "term_test_id_full" { + glossary_id = "tf-test-glossary%{random_suffix}" + location = "us-central1" +} +resource "google_dataplex_glossary_term" "term_test_id_full" { + parent = "projects/${google_dataplex_glossary.term_test_id_full.project}/locations/us-central1/glossaries/${google_dataplex_glossary.term_test_id_full.glossary_id}" + glossary_id = google_dataplex_glossary.term_test_id_full.glossary_id + location = "us-central1" + term_id = "tf-test-term-full%{random_suffix}" + labels = { "tag": "test-tf" } + display_name = "terraform term" + description = "term created by Terraform" +} +`, context) +} From c4858a7bd9329ef87c714d2cb2db9aa2232eee28 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Fri, 16 May 2025 14:24:02 -0700 Subject: [PATCH 161/884] tgc-revival: modify compute instance tfplan2cai converter (#13992) --- .../services/compute/compute_instance.go | 252 ++++++++++++++++-- 1 file changed, 225 insertions(+), 27 deletions(-) diff --git a/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/compute/compute_instance.go b/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/compute/compute_instance.go index 3ec7d7d3d4cf..cdc5a13051ec 100644 --- a/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/compute/compute_instance.go +++ b/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/compute/compute_instance.go @@ -5,9 +5,8 @@ import ( "fmt" "strings" - "google.golang.org/api/googleapi" - compute "google.golang.org/api/compute/v0.beta" + "google.golang.org/api/googleapi" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" @@ -1501,7 +1500,6 @@ func expandComputeInstance(project string, d tpgresource.TerraformResourceData, } // Build up the list of disks - disks := []*compute.AttachedDisk{} if _, hasBootDisk := d.GetOk("boot_disk"); hasBootDisk { bootDisk, err := expandBootDisk(d, config, project) @@ -1531,23 +1529,9 @@ func expandComputeInstance(project string, d tpgresource.TerraformResourceData, disks = append(disks, disk) } - sch := d.Get("scheduling").([]interface{}) - var scheduling *compute.Scheduling - if len(sch) == 0 { - // TF doesn't do anything about defaults inside of nested objects, so if - // scheduling hasn't been set, then send it with its default values. - scheduling = &compute.Scheduling{ - AutomaticRestart: googleapi.Bool(true), - } - } else { - prefix := "scheduling.0" - scheduling = &compute.Scheduling{ - AutomaticRestart: googleapi.Bool(d.Get(prefix + ".automatic_restart").(bool)), - Preemptible: d.Get(prefix + ".preemptible").(bool), - OnHostMaintenance: d.Get(prefix + ".on_host_maintenance").(string), - ProvisioningModel: d.Get(prefix + ".provisioning_model").(string), - ForceSendFields: []string{"AutomaticRestart", "Preemptible"}, - } + scheduling, err := expandSchedulingTgc(d.Get("scheduling")) + if err != nil { + return nil, fmt.Errorf("error creating scheduling: %s", err) } params, err := expandParams(d) @@ -1560,12 +1544,12 @@ func expandComputeInstance(project string, d tpgresource.TerraformResourceData, return nil, fmt.Errorf("Error creating metadata: %s", err) } - PartnerMetadata, err := resourceInstancePartnerMetadata(d) + partnerMetadata, err := resourceInstancePartnerMetadata(d) if err != nil { return nil, fmt.Errorf("Error creating partner metadata: %s", err) } - networkInterfaces, err := expandNetworkInterfaces(d, config) + networkInterfaces, err := expandNetworkInterfacesTgc(d, config) if err != nil { return nil, fmt.Errorf("Error creating network interfaces: %s", err) } @@ -1592,7 +1576,7 @@ func expandComputeInstance(project string, d tpgresource.TerraformResourceData, Disks: disks, MachineType: machineTypeUrl, Metadata: metadata, - PartnerMetadata: PartnerMetadata, + PartnerMetadata: partnerMetadata, Name: d.Get("name").(string), Zone: d.Get("zone").(string), NetworkInterfaces: networkInterfaces, @@ -1606,7 +1590,6 @@ func expandComputeInstance(project string, d tpgresource.TerraformResourceData, Scheduling: scheduling, DeletionProtection: d.Get("deletion_protection").(bool), Hostname: d.Get("hostname").(string), - ForceSendFields: []string{"CanIpForward", "DeletionProtection"}, ConfidentialInstanceConfig: expandConfidentialInstanceConfig(d), AdvancedMachineFeatures: expandAdvancedMachineFeatures(d), ShieldedInstanceConfig: expandShieldedVmConfigs(d), @@ -1614,6 +1597,7 @@ func expandComputeInstance(project string, d tpgresource.TerraformResourceData, ResourcePolicies: tpgresource.ConvertStringArr(d.Get("resource_policies").([]interface{})), ReservationAffinity: reservationAffinity, KeyRevocationActionType: d.Get("key_revocation_action_type").(string), + InstanceEncryptionKey: expandComputeInstanceEncryptionKey(d), }, nil } @@ -1637,7 +1621,7 @@ func expandAttachedDisk(diskConfig map[string]interface{}, d tpgresource.Terrafo } disk := &compute.AttachedDisk{ - Source: sourceLink, + Source: fmt.Sprintf("https://www.googleapis.com/compute/v1/%s", sourceLink), } if v, ok := diskConfig["mode"]; ok { @@ -1657,6 +1641,15 @@ func expandAttachedDisk(diskConfig map[string]interface{}, d tpgresource.Terrafo } } + keyValue, keyOk = diskConfig["disk_encryption_key_rsa"] + if keyOk { + if keyValue != "" { + disk.DiskEncryptionKey = &compute.CustomerEncryptionKey{ + RsaEncryptedKey: keyValue.(string), + } + } + } + kmsValue, kmsOk := diskConfig["kms_key_self_link"] if kmsOk { if keyOk && keyValue != "" && kmsValue != "" { @@ -1668,6 +1661,18 @@ func expandAttachedDisk(diskConfig map[string]interface{}, d tpgresource.Terrafo } } } + + kmsServiceAccount, kmsServiceAccountOk := diskConfig["disk_encryption_service_account"] + if kmsServiceAccountOk { + if kmsServiceAccount != "" { + if disk.DiskEncryptionKey == nil { + disk.DiskEncryptionKey = &compute.CustomerEncryptionKey{ + KmsKeyServiceAccount: kmsServiceAccount.(string), + } + } + disk.DiskEncryptionKey.KmsKeyServiceAccount = kmsServiceAccount.(string) + } + } return disk, nil } @@ -1719,6 +1724,14 @@ func expandBootDisk(d tpgresource.TerraformResourceData, config *transport_tpg.C disk.DeviceName = v.(string) } + if v, ok := d.GetOk("boot_disk.0.interface"); ok { + disk.Interface = v.(string) + } + + if v, ok := d.GetOk("boot_disk.0.guest_os_features"); ok { + disk.GuestOsFeatures = expandComputeInstanceGuestOsFeatures(v) + } + if v, ok := d.GetOk("boot_disk.0.disk_encryption_key_raw"); ok { if v != "" { disk.DiskEncryptionKey = &compute.CustomerEncryptionKey{ @@ -1727,6 +1740,14 @@ func expandBootDisk(d tpgresource.TerraformResourceData, config *transport_tpg.C } } + if v, ok := d.GetOk("boot_disk.0.disk_encryption_key_rsa"); ok { + if v != "" { + disk.DiskEncryptionKey = &compute.CustomerEncryptionKey{ + RsaEncryptedKey: v.(string), + } + } + } + if v, ok := d.GetOk("boot_disk.0.kms_key_self_link"); ok { if v != "" { disk.DiskEncryptionKey = &compute.CustomerEncryptionKey{ @@ -1735,12 +1756,28 @@ func expandBootDisk(d tpgresource.TerraformResourceData, config *transport_tpg.C } } + if v, ok := d.GetOk("boot_disk.0.disk_encryption_service_account"); ok { + if v != "" { + disk.DiskEncryptionKey.KmsKeyServiceAccount = v.(string) + } + } + + // disk_encryption_key_sha256 is computed, so it is not converted. + if v, ok := d.GetOk("boot_disk.0.source"); ok { - source, err := tpgresource.ParseDiskFieldValue(v.(string), d, config) + var err error + var source interface { + RelativeLink() string + } + if strings.Contains(v.(string), "regions/") { + source, err = tpgresource.ParseRegionDiskFieldValue(v.(string), d, config) + } else { + source, err = tpgresource.ParseDiskFieldValue(v.(string), d, config) + } if err != nil { return nil, err } - disk.Source = source.RelativeLink() + disk.Source = fmt.Sprintf("https://www.googleapis.com/compute/v1/%s", source.RelativeLink()) } if _, ok := d.GetOk("boot_disk.0.initialize_params"); ok { @@ -1749,6 +1786,10 @@ func expandBootDisk(d tpgresource.TerraformResourceData, config *transport_tpg.C } } + if v, ok := d.GetOk("boot_disk.0.initialize_params.0.architecture"); ok { + disk.Architecture = v.(string) + } + if v, ok := d.GetOk("boot_disk.0.mode"); ok { disk.Mode = v.(string) } @@ -1769,6 +1810,8 @@ func expandScratchDisks(d tpgresource.TerraformResourceData, config *transport_t AutoDelete: true, Type: "SCRATCH", Interface: d.Get(fmt.Sprintf("scratch_disk.%d.interface", i)).(string), + DeviceName: d.Get(fmt.Sprintf("scratch_disk.%d.device_name", i)).(string), + DiskSizeGb: int64(d.Get(fmt.Sprintf("scratch_disk.%d.size", i)).(int)), InitializeParams: &compute.AttachedDiskInitializeParams{ DiskType: diskType.RelativeLink(), }, @@ -1864,3 +1907,158 @@ func GetComputeDiskData(d tpgresource.TerraformResourceData, config *transport_t return diskDetails, nil } + +func expandNetworkInterfacesTgc(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]*compute.NetworkInterface, error) { + configs := d.Get("network_interface").([]interface{}) + ifaces := make([]*compute.NetworkInterface, len(configs)) + for i, raw := range configs { + data := raw.(map[string]interface{}) + + var networkAttachment = "" + network := data["network"].(string) + subnetwork := data["subnetwork"].(string) + if networkAttachmentObj, ok := data["network_attachment"]; ok { + networkAttachment = networkAttachmentObj.(string) + } + // Checks if networkAttachment is not specified in resource, network or subnetwork have to be specified. + if networkAttachment == "" && network == "" && subnetwork == "" { + return nil, fmt.Errorf("exactly one of network, subnetwork, or network_attachment must be provided") + } + + ifaces[i] = &compute.NetworkInterface{ + NetworkIP: data["network_ip"].(string), + Network: network, + NetworkAttachment: networkAttachment, + Subnetwork: subnetwork, + AccessConfigs: expandAccessConfigs(data["access_config"].([]interface{})), + AliasIpRanges: expandAliasIpRanges(data["alias_ip_range"].([]interface{})), + NicType: data["nic_type"].(string), + StackType: data["stack_type"].(string), + QueueCount: int64(data["queue_count"].(int)), + Ipv6AccessConfigs: expandIpv6AccessConfigs(data["ipv6_access_config"].([]interface{})), + Ipv6Address: data["ipv6_address"].(string), + InternalIpv6PrefixLength: int64(data["internal_ipv6_prefix_length"].(int)), + } + } + return ifaces, nil +} + +func expandSchedulingTgc(v interface{}) (*compute.Scheduling, error) { + if v == nil { + // We can't set default values for lists. + return &compute.Scheduling{ + AutomaticRestart: googleapi.Bool(true), + }, nil + } + + ls := v.([]interface{}) + if len(ls) == 0 { + // We can't set default values for lists + return &compute.Scheduling{ + AutomaticRestart: googleapi.Bool(true), + }, nil + } + + if len(ls) > 1 || ls[0] == nil { + return nil, fmt.Errorf("expected exactly one scheduling block") + } + + original := ls[0].(map[string]interface{}) + scheduling := &compute.Scheduling{ + ForceSendFields: make([]string, 0, 4), + } + + if v, ok := original["automatic_restart"]; ok { + scheduling.AutomaticRestart = googleapi.Bool(v.(bool)) + scheduling.ForceSendFields = append(scheduling.ForceSendFields, "AutomaticRestart") + } + + if v, ok := original["preemptible"]; ok { + scheduling.Preemptible = v.(bool) + scheduling.ForceSendFields = append(scheduling.ForceSendFields, "Preemptible") + } + + if v, ok := original["on_host_maintenance"]; ok { + scheduling.OnHostMaintenance = v.(string) + scheduling.ForceSendFields = append(scheduling.ForceSendFields, "OnHostMaintenance") + } + + if v, ok := original["node_affinities"]; ok && v != nil { + naSet := v.(*schema.Set).List() + scheduling.NodeAffinities = make([]*compute.SchedulingNodeAffinity, 0) + for _, nodeAffRaw := range naSet { + if nodeAffRaw == nil { + continue + } + nodeAff := nodeAffRaw.(map[string]interface{}) + transformed := &compute.SchedulingNodeAffinity{ + Key: nodeAff["key"].(string), + Operator: nodeAff["operator"].(string), + Values: tpgresource.ConvertStringArr(nodeAff["values"].(*schema.Set).List()), + } + scheduling.NodeAffinities = append(scheduling.NodeAffinities, transformed) + } + } + + if v, ok := original["min_node_cpus"]; ok { + scheduling.MinNodeCpus = int64(v.(int)) + } + if v, ok := original["provisioning_model"]; ok { + scheduling.ProvisioningModel = v.(string) + scheduling.ForceSendFields = append(scheduling.ForceSendFields, "ProvisioningModel") + } + if v, ok := original["instance_termination_action"]; ok { + scheduling.InstanceTerminationAction = v.(string) + scheduling.ForceSendFields = append(scheduling.ForceSendFields, "InstanceTerminationAction") + } + if v, ok := original["availability_domain"]; ok && v != nil { + scheduling.AvailabilityDomain = int64(v.(int)) + } + if v, ok := original["max_run_duration"]; ok { + transformedMaxRunDuration, err := expandComputeMaxRunDuration(v) + if err != nil { + return nil, err + } + scheduling.MaxRunDuration = transformedMaxRunDuration + scheduling.ForceSendFields = append(scheduling.ForceSendFields, "MaxRunDuration") + } + + if v, ok := original["on_instance_stop_action"]; ok { + transformedOnInstanceStopAction, err := expandComputeOnInstanceStopAction(v) + if err != nil { + return nil, err + } + scheduling.OnInstanceStopAction = transformedOnInstanceStopAction + scheduling.ForceSendFields = append(scheduling.ForceSendFields, "OnInstanceStopAction") + } + if v, ok := original["host_error_timeout_seconds"]; ok { + if v != nil && v != 0 { + scheduling.HostErrorTimeoutSeconds = int64(v.(int)) + } + } + + if v, ok := original["maintenance_interval"]; ok { + scheduling.MaintenanceInterval = v.(string) + } + + if v, ok := original["graceful_shutdown"]; ok { + transformedGracefulShutdown, err := expandGracefulShutdown(v) + if err != nil { + return nil, err + } + scheduling.GracefulShutdown = transformedGracefulShutdown + scheduling.ForceSendFields = append(scheduling.ForceSendFields, "GracefulShutdown") + } + if v, ok := original["local_ssd_recovery_timeout"]; ok { + transformedLocalSsdRecoveryTimeout, err := expandComputeLocalSsdRecoveryTimeout(v) + if err != nil { + return nil, err + } + scheduling.LocalSsdRecoveryTimeout = transformedLocalSsdRecoveryTimeout + scheduling.ForceSendFields = append(scheduling.ForceSendFields, "LocalSsdRecoveryTimeout") + } + if v, ok := original["termination_time"]; ok { + scheduling.TerminationTime = v.(string) + } + return scheduling, nil +} From a2cf1b9f30aa119301e7dc77867cf1e4e79e7b85 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Fri, 16 May 2025 14:40:14 -0700 Subject: [PATCH 162/884] Removed firestore index post_create (#13981) --- mmv1/products/firestore/Index.yaml | 1 - .../terraform/post_create/index.go.tmpl | 21 ------------------- 2 files changed, 22 deletions(-) delete mode 100644 mmv1/templates/terraform/post_create/index.go.tmpl diff --git a/mmv1/products/firestore/Index.yaml b/mmv1/products/firestore/Index.yaml index 026cb9b3594d..438ebf4ec651 100644 --- a/mmv1/products/firestore/Index.yaml +++ b/mmv1/products/firestore/Index.yaml @@ -50,7 +50,6 @@ async: custom_code: constants: 'templates/terraform/constants/firestore_index.go.tmpl' encoder: 'templates/terraform/encoders/index.go.tmpl' - post_create: 'templates/terraform/post_create/index.go.tmpl' custom_import: 'templates/terraform/custom_import/index_self_link_as_name_set_project.go.tmpl' error_retry_predicates: diff --git a/mmv1/templates/terraform/post_create/index.go.tmpl b/mmv1/templates/terraform/post_create/index.go.tmpl deleted file mode 100644 index be0711cb05a8..000000000000 --- a/mmv1/templates/terraform/post_create/index.go.tmpl +++ /dev/null @@ -1,21 +0,0 @@ -{{/* - The license inside this block applies to this file - Copyright 2024 Google Inc. - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ -}} -// The operation for this resource contains the generated name that we need -// in order to perform a READ. -metadata := res["metadata"].(map[string]interface{}) -name := metadata["index"].(string) -log.Printf("[DEBUG] Setting Index name, id to %s", name) -if err := d.Set("name", name); err != nil { - return fmt.Errorf("Error setting name: %s", err) -} -d.SetId(name) From 69c903f4bc6acc08fbf8f5b6dbbc388028e11c41 Mon Sep 17 00:00:00 2001 From: Scott Suarez Date: Fri, 16 May 2025 14:51:21 -0700 Subject: [PATCH 163/884] Suggest alternative measure for pr size (#13990) --- .github/workflows/basic-pr-checks.yml | 59 ++++++++++++++++++++------- 1 file changed, 45 insertions(+), 14 deletions(-) diff --git a/.github/workflows/basic-pr-checks.yml b/.github/workflows/basic-pr-checks.yml index 23d676229e7b..ac43d8bb581a 100644 --- a/.github/workflows/basic-pr-checks.yml +++ b/.github/workflows/basic-pr-checks.yml @@ -1,4 +1,4 @@ -name: "Basic PR checks" +name: "pull-request" permissions: read-all on: @@ -23,16 +23,47 @@ jobs: disallow-large-prs: runs-on: ubuntu-22.04 steps: - - name: Check PR size - run: | - response=$(curl --get -Ss -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/repos/${{ github.repository }}/pulls/${{github.event.pull_request.number}}") - additions=$(echo "$response" | jq -r '.additions') - deletions=$(echo "$response" | jq -r '.deletions') - total=$(( $additions + $deletions )) - echo "$additions lines added; $deletions lines deleted" - if (( $total > 500 )); then - echo "This PR changed $total lines of code, which is above the recommended limit of 500. Your reviewer may ask you to break it into multiple PRs." - exit 1 - else - echo "This PR changed $total lines of code, which meets the recommended limit of 500." - fi + - name: Check PR size + shell: bash + run: | + # Get PR details + pr_data=$(curl --get -Ss -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/repos/${{ github.repository }}/pulls/${{github.event.pull_request.number}}") + + # Get list of files in the PR + pr_files=$(curl --get -Ss -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/repos/${{ github.repository }}/pulls/${{github.event.pull_request.number}}/files") + + # Calculate additions and deletions excluding test files + total_additions=0 + total_deletions=0 + + # Use jq to filter out test and documentation files and calculate totals + filtered_stats=$(echo "$pr_files" | jq '[ + .[] | + select( + (.filename | endswith("_test.go") | not) and + (.filename | endswith("test.go.tmpl") | not) and + (.filename | endswith(".md") | not) and + (.filename | endswith(".md.tmpl") | not) and + (.filename | endswith(".html.markdown") | not) + ) | + {additions: .additions, deletions: .deletions} + ] | + reduce .[] as $item ( + {"additions": 0, "deletions": 0}; + .additions += $item.additions | + .deletions += $item.deletions + )') + + total_additions=$(echo "$filtered_stats" | jq -r '.additions') + total_deletions=$(echo "$filtered_stats" | jq -r '.deletions') + total=$((total_additions + total_deletions)) + + echo "Excluding test and documentation files:" + echo "$total_additions lines added; $total_deletions lines deleted" + + if (( total > 500 )); then + echo "This PR changed $total lines of code (excluding test and documentation files), which is above the recommended limit of 500. Your reviewer may ask you to break it into multiple PRs." + exit 1 + else + echo "This PR changed $total lines of code (excluding test and documentation files), which meets the recommended limit of 500." + fi \ No newline at end of file From a7f0e070782d945f3b36440415adbe60d54dd352 Mon Sep 17 00:00:00 2001 From: Scott Suarez Date: Fri, 16 May 2025 14:52:33 -0700 Subject: [PATCH 164/884] go-github set and get function conversion (#13969) --- .ci/magician/github/README.md | 24 ++ .ci/magician/github/get.go | 159 ++++++---- .ci/magician/github/integration_test.go | 310 ++++++++++++++++++++ .ci/magician/github/interface_conversion.go | 70 +++++ .ci/magician/github/set.go | 91 +++--- .ci/magician/utility/utils.go | 33 ++- 6 files changed, 589 insertions(+), 98 deletions(-) create mode 100644 .ci/magician/github/README.md create mode 100644 .ci/magician/github/integration_test.go create mode 100644 .ci/magician/github/interface_conversion.go diff --git a/.ci/magician/github/README.md b/.ci/magician/github/README.md new file mode 100644 index 000000000000..987b87408af2 --- /dev/null +++ b/.ci/magician/github/README.md @@ -0,0 +1,24 @@ +# GitHub Integration Tests + +## Overview +This directory contains an interface for the GitHub client that make real API calls to GitHub's API. The tests in `integration_test.go` are isolated with build tags to prevent accidental execution. + +## Build Tags +This file uses Go build tags (`//go:build integration`) which: +- Exclude these tests from normal test execution (`go test ./...`) +- Require explicit opt-in (`go test -tags=integration`) +- Prevent accidental execution of tests that make real API calls and may have side effects + +## Usage + +### Requirements +- GitHub API token with appropriate permissions +- Token set as environment variable: `GITHUB_API_TOKEN` + +### Running Tests +```bash +# Run all integration tests +GITHUB_API_TOKEN=your_token_here go test -v -tags=integration ./github + +# Run specific test +GITHUB_API_TOKEN=your_token_here go test -v -tags=integration -run TestIntegrationGetPullRequest ./github \ No newline at end of file diff --git a/.ci/magician/github/get.go b/.ci/magician/github/get.go index e39dfe25ddc3..667c8b498f0d 100644 --- a/.ci/magician/github/get.go +++ b/.ci/magician/github/get.go @@ -18,9 +18,18 @@ package github import ( "fmt" utils "magician/utility" + "strconv" "time" + + gh "github.com/google/go-github/v68/github" +) + +const ( + defaultOwner = "GoogleCloudPlatform" + defaultRepo = "magic-modules" ) +// Types for external interface compatibility type User struct { Login string `json:"login"` } @@ -47,26 +56,44 @@ type PullRequestComment struct { CreatedAt time.Time `json:"created_at"` } -func (gh *Client) GetPullRequest(prNumber string) (PullRequest, error) { - url := fmt.Sprintf("https://api.github.com/repos/GoogleCloudPlatform/magic-modules/issues/%s", prNumber) - - var pullRequest PullRequest +// GetPullRequest fetches a single pull request +func (c *Client) GetPullRequest(prNumber string) (PullRequest, error) { + num, err := strconv.Atoi(prNumber) + if err != nil { + return PullRequest{}, err + } - err := utils.RequestCallWithRetry(url, "GET", gh.token, &pullRequest, nil) + pr, _, err := c.gh.PullRequests.Get(c.ctx, defaultOwner, defaultRepo, num) + if err != nil { + return PullRequest{}, err + } - return pullRequest, err + return convertGHPullRequest(pr), nil } -func (gh *Client) GetPullRequests(state, base, sort, direction string) ([]PullRequest, error) { - url := fmt.Sprintf("https://api.github.com/repos/GoogleCloudPlatform/magic-modules/pulls?state=%s&base=%s&sort=%s&direction=%s", state, base, sort, direction) +// GetPullRequests fetches multiple pull requests +func (c *Client) GetPullRequests(state, base, sort, direction string) ([]PullRequest, error) { + opts := &gh.PullRequestListOptions{ + State: state, + Base: base, + Sort: sort, + Direction: direction, + } - var pullRequests []PullRequest + prs, _, err := c.gh.PullRequests.List(c.ctx, defaultOwner, defaultRepo, opts) + if err != nil { + return nil, err + } - err := utils.RequestCallWithRetry(url, "GET", gh.token, &pullRequests, nil) + result := make([]PullRequest, len(prs)) + for i, pr := range prs { + result[i] = convertGHPullRequest(pr) + } - return pullRequests, err + return result, nil } +// GetPullRequestRequestedReviewers gets requested reviewers for a PR func (gh *Client) GetPullRequestRequestedReviewers(prNumber string) ([]User, error) { url := fmt.Sprintf("https://api.github.com/repos/GoogleCloudPlatform/magic-modules/pulls/%s/requested_reviewers", prNumber) @@ -82,6 +109,7 @@ func (gh *Client) GetPullRequestRequestedReviewers(prNumber string) ([]User, err return requestedReviewers.Users, nil } +// GetPullRequestPreviousReviewers gets previous reviewers for a PR func (gh *Client) GetPullRequestPreviousReviewers(prNumber string) ([]User, error) { url := fmt.Sprintf("https://api.github.com/repos/GoogleCloudPlatform/magic-modules/pulls/%s/reviews", prNumber) @@ -107,66 +135,97 @@ func (gh *Client) GetPullRequestPreviousReviewers(prNumber string) ([]User, erro return result, nil } -func (gh *Client) GetCommitMessage(owner, repo, sha string) (string, error) { - url := fmt.Sprintf("https://api.github.com/repos/%s/%s/commits/%s", owner, repo, sha) - - var commit struct { - Commit struct { - Message string `json:"message"` - } `json:"commit"` - } - - err := utils.RequestCall(url, "GET", gh.token, &commit, nil) +// GetCommitMessage gets a commit message +func (c *Client) GetCommitMessage(owner, repo, sha string) (string, error) { + commit, _, err := c.gh.Repositories.GetCommit(c.ctx, owner, repo, sha, nil) if err != nil { return "", err } - return commit.Commit.Message, nil -} + if commit.Commit != nil && commit.Commit.Message != nil { + return *commit.Commit.Message, nil + } -func (gh *Client) GetPullRequestComments(prNumber string) ([]PullRequestComment, error) { - url := fmt.Sprintf("https://api.github.com/repos/GoogleCloudPlatform/magic-modules/issues/%s/comments", prNumber) + return "", fmt.Errorf("no commit message found") +} - var comments []PullRequestComment - err := utils.RequestCallWithRetry(url, "GET", gh.token, &comments, nil) +// GetPullRequestComments gets all comments on a PR, handling pagination +func (c *Client) GetPullRequestComments(prNumber string) ([]PullRequestComment, error) { + num, err := strconv.Atoi(prNumber) if err != nil { return nil, err } - return comments, nil -} -func (gh *Client) GetTeamMembers(organization, team string) ([]User, error) { - url := fmt.Sprintf("https://api.github.com/orgs/%s/teams/%s/members", organization, team) + var allComments []*gh.IssueComment + opts := &gh.IssueListCommentsOptions{ + ListOptions: gh.ListOptions{ + PerPage: 100, + }, + } - var members []User - err := utils.RequestCallWithRetry(url, "GET", gh.token, &members, nil) - if err != nil { - return nil, err + for { + comments, resp, err := c.gh.Issues.ListComments(c.ctx, defaultOwner, defaultRepo, num, opts) + if err != nil { + return nil, err + } + + allComments = append(allComments, comments...) + + if resp.NextPage == 0 { + break // No more pages + } + + // Set up for the next page + opts.Page = resp.NextPage } - return members, nil + + return convertGHComments(allComments), nil } -func (gh *Client) IsOrgMember(author, org string) bool { - url := fmt.Sprintf("https://api.github.com/orgs/%s/members/%s", org, author) - err := utils.RequestCallWithRetry(url, "GET", gh.token, nil, nil) - return err == nil +// GetTeamMembers gets all members of a team, handling pagination +func (c *Client) GetTeamMembers(organization, team string) ([]User, error) { + var allMembers []*gh.User + opts := &gh.TeamListTeamMembersOptions{ + ListOptions: gh.ListOptions{ + PerPage: 100, + }, + } + + for { + members, resp, err := c.gh.Teams.ListTeamMembersBySlug(c.ctx, organization, team, opts) + if err != nil { + return nil, err + } + + allMembers = append(allMembers, members...) + + if resp.NextPage == 0 { + break // No more pages + } + + // Set up for the next page + opts.Page = resp.NextPage + } + + return convertGHUsers(allMembers), nil } -func (gh *Client) IsTeamMember(organization, teamSlug, username string) bool { - type TeamMembership struct { - URL string `json:"url"` - Role string `json:"role"` - State string `json:"state"` +// IsOrgMember checks if a user is a member of an organization +func (c *Client) IsOrgMember(username, org string) bool { + isMember, _, err := c.gh.Organizations.IsMember(c.ctx, org, username) + if err != nil { + return false } - url := fmt.Sprintf("https://api.github.com/orgs/%s/teams/%s/memberships/%s", organization, teamSlug, username) - var membership TeamMembership - err := utils.RequestCallWithRetry(url, "GET", gh.token, &membership, nil) + return isMember +} +// IsTeamMember checks if a user is a member of a team +func (c *Client) IsTeamMember(organization, teamSlug, username string) bool { + membership, _, err := c.gh.Teams.GetTeamMembershipBySlug(c.ctx, organization, teamSlug, username) if err != nil { return false } - // User is considered a member if state is "active" - return membership.State == "active" + return membership != nil && membership.State != nil && *membership.State == "active" } diff --git a/.ci/magician/github/integration_test.go b/.ci/magician/github/integration_test.go new file mode 100644 index 000000000000..bf5a649f27a1 --- /dev/null +++ b/.ci/magician/github/integration_test.go @@ -0,0 +1,310 @@ +//go:build integration +// +build integration + +/* +* Copyright 2025 Google LLC. All Rights Reserved. +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +* +* Integration tests - makes real GitHub API calls. +* NOT run during normal test execution (go test). +* Requires: +* - GITHUB_API_TOKEN environment variable +* - Run with: go test -tags=integration + */ + +package github + +import ( + "fmt" + "os" + "testing" + "time" +) + +// https://github.com/GoogleCloudPlatform/magic-modules +const ( + testNonMember = "bananaman5000" + testRepo = "magic-modules" + testOwner = "GoogleCloudPlatform" + testPRNumber = "13969" // replace this with an actual PR Number + testPRCommitSha = "4a8409686810551655eea2533e939cc5344e83e2" // replace this with an actual SHA + testMainCommitSha = "fd910977cf24595d2c04e3f0a369a82c79fdb8f8" // replace this with an actual SHA + testLabel = "terraform-3.0" + testOrg = "GoogleCloudPlatform" + testTeam = "terraform" + workflowFileName = "test-tpg.yml" +) + +func skipIfNoToken(t *testing.T) *Client { + token := os.Getenv("GITHUB_API_TOKEN") + if token == "" { + t.Skip("Skipping integration test: GITHUB_API_TOKEN environment variable not set") + } + + return NewClient(token) +} + +func TestIntegrationGetPullRequest(t *testing.T) { + client := skipIfNoToken(t) + + pr, err := client.GetPullRequest(testPRNumber) + if err != nil { + t.Fatalf("GetPullRequest failed: %v", err) + } + + t.Logf("PR Title: %s", pr.Title) + if pr.Number == 0 { + t.Error("Expected PR number to be non-zero") + } + if pr.Title == "" { + t.Error("Expected PR title to be non-empty") + } +} + +func TestIntegrationGetPullRequests(t *testing.T) { + client := skipIfNoToken(t) + + prs, err := client.GetPullRequests("open", "main", "created", "desc") + if err != nil { + t.Fatalf("GetPullRequests failed: %v", err) + } + + t.Logf("Found %d PRs", len(prs)) +} + +func TestIntegrationGetCommitMessage(t *testing.T) { + client := skipIfNoToken(t) + + // You'll need a valid commit SHA for this test + if testMainCommitSha == "HEAD" { + t.Skip("Skipping GetCommitMessage test: need a valid commit SHA") + } + + message, err := client.GetCommitMessage(testOwner, testRepo, testMainCommitSha) + if err != nil { + t.Fatalf("GetCommitMessage failed: %v", err) + } + + t.Logf("Commit message: %s", message) + if message == "" { + t.Error("Expected commit message to be non-empty") + } +} + +func TestIntegrationGetPullRequestComments(t *testing.T) { + client := skipIfNoToken(t) + + comments, err := client.GetPullRequestComments(testPRNumber) + if err != nil { + t.Fatalf("GetPullRequestComments failed: %v", err) + } + + t.Logf("Found %d comments", len(comments)) + for i, comment := range comments { + t.Logf("Comment %d: %s by %s", i+1, comment.Body[:min(len(comment.Body), 50)], comment.User.Login) + } +} + +func TestIntegrationGetTeamMembers(t *testing.T) { + client := skipIfNoToken(t) + + members, err := client.GetTeamMembers(testOrg, testTeam) + if err != nil { + t.Logf("GetTeamMembers failed: %v", err) + t.Skip("Skipping team member test - might not have access to the specified team") + } + + t.Logf("Found %d team members", len(members)) + for i, member := range members { + t.Logf("Member %d: %s", i+1, member.Login) + } +} + +func TestIntegrationIsOrgMember(t *testing.T) { + client := skipIfNoToken(t) + + isMember := client.IsOrgMember(testOwner, testOrg) + t.Logf("Is %s a member of %s: %v", testOwner, testOrg, isMember) + + if !isMember { + t.Errorf("Note: Expected %s to be a member of %s, but they're not", testOwner, testOrg) + } + + isMember = client.IsOrgMember(testNonMember, testOrg) + if isMember { + t.Errorf("Expected %s to not be a member of %s, but they are", testNonMember, testOrg) + } else { + t.Logf("Is %s not a member of %s: %v", testNonMember, testOrg, isMember) + } +} + +func TestIntegrationIsTeamMember(t *testing.T) { + client := skipIfNoToken(t) + + isMember := client.IsTeamMember(testOrg, testTeam, testOwner) + if !isMember { + t.Errorf("Expected %s to be a member of team %s in org %s, but they're not", testOwner, testTeam, testOrg) + } else { + t.Logf("Is %s a member of team %s in org %s: %v", testOwner, testTeam, testOrg, isMember) + } + + isMember = client.IsTeamMember(testOrg, testTeam, testNonMember) + if isMember { + t.Errorf("Expected %s to not be a member of team %s in org %s, but they are", testNonMember, testTeam, testOrg) + } else { + t.Logf("Is %s not a member of team %s in org %s: %v", testNonMember, testTeam, testOrg, isMember) + } +} + +func TestIntegrationPostAndUpdateComment(t *testing.T) { + client := skipIfNoToken(t) + + // First post a comment + comment := fmt.Sprintf("Test comment from integration test at %s", time.Now().Format(time.RFC3339)) + err := client.PostComment(testPRNumber, comment) + if err != nil { + t.Fatalf("PostComment failed: %v", err) + } + + // Get the comment ID + comments, err := client.GetPullRequestComments(testPRNumber) + if err != nil { + t.Fatalf("GetPullRequestComments failed: %v", err) + } + + var commentID int + for _, c := range comments { + if c.Body == comment { + commentID = c.ID + break + } + } + + if commentID == 0 { + t.Fatal("Could not find the comment we just posted") + } + + // Update the comment + updatedComment := fmt.Sprintf("Updated test comment from integration test at %s", time.Now().Format(time.RFC3339)) + err = client.UpdateComment(testPRNumber, updatedComment, commentID) + if err != nil { + t.Fatalf("UpdateComment failed: %v", err) + } + + t.Logf("Successfully posted and updated comment with ID: %d", commentID) +} + +func TestIntegrationAddAndRemoveLabels(t *testing.T) { + client := skipIfNoToken(t) + + err := client.AddLabels(testPRNumber, []string{testLabel}) + if err != nil { + t.Fatalf("AddLabels failed: %v", err) + } + + // Then remove the label + err = client.RemoveLabel(testPRNumber, testLabel) + if err != nil { + t.Fatalf("RemoveLabel failed: %v", err) + } + + t.Logf("Successfully added and removed label: %s", testLabel) +} + +func TestIntegrationPostBuildStatus(t *testing.T) { + client := skipIfNoToken(t) + + // You'll need a valid commit SHA for this test + if testPRCommitSha == "HEAD" { + t.Skip("Skipping PostBuildStatus test: need a valid commit SHA") + } + + err := client.PostBuildStatus( + testPRNumber, + "integration-test", + "success", + "https://example.com/integration-test", + testPRCommitSha, + ) + if err != nil { + t.Errorf("PostBuildStatus failed: %v", err) + } + + err = client.PostBuildStatus( + testPRNumber, + "integration-test-failed", + "failure", + "https://example.com/integration-test-fail", + testPRCommitSha, + ) + if err != nil { + t.Errorf("PostBuildStatus failed: %v", err) + } + + t.Logf("Successfully posted build status") +} + +func TestIntegrationCreateWorkflowDispatchEvent(t *testing.T) { + client := skipIfNoToken(t) + + // Skip this test by default as it can have side effects + if os.Getenv("RUN_WORKFLOW_DISPATCH_TEST") != "true" { + t.Skip("Skipping workflow dispatch test: set RUN_WORKFLOW_DISPATCH_TEST=true to run") + } + + if err := client.CreateWorkflowDispatchEvent("test-tpg.yml", map[string]any{ + "owner": "modular-magician", + "repo": testRepo, + "branch": "main", + "pr-number": testPRNumber, + "sha": testPRCommitSha, + }); err != nil { + t.Errorf("error creating workflow dispatch event: %v", err) + } + + t.Logf("Successfully triggered workflow dispatch event") +} + +// TestIntegrationMergePullRequest is commented out as it has permanent effects +// Uncomment and run only if you're sure you want to merge the PR +/* + func TestIntegrationMergePullRequest(t *testing.T) { + client := skipIfNoToken(t) + + // Skip this test by default as it has permanent effects + if os.Getenv("RUN_MERGE_PR_TEST") != "true" { + t.Skip("Skipping merge PR test: set RUN_MERGE_PR_TEST=true to run") + } + + // You'll need a valid commit SHA for this test + if testPRCommitSha == "HEAD" { + t.Skip("Skipping MergePullRequest test: need a valid commit SHA") + } + + err := client.MergePullRequest(testOwner, testRepo, testPRNumber, testPRCommitSha) + if err != nil { + t.Fatalf("MergePullRequest failed: %v", err) + } + + t.Logf("Successfully merged pull request") + } +*/ + +// Helper function to get minimum of two integers +func min(a, b int) int { + if a < b { + return a + } + return b +} diff --git a/.ci/magician/github/interface_conversion.go b/.ci/magician/github/interface_conversion.go new file mode 100644 index 000000000000..3ca9f35a1ba2 --- /dev/null +++ b/.ci/magician/github/interface_conversion.go @@ -0,0 +1,70 @@ +package github + +import ( + gh "github.com/google/go-github/v68/github" +) + +// Convert from GitHub types to our types +func convertGHPullRequest(pr *gh.PullRequest) PullRequest { + if pr == nil { + return PullRequest{} + } + + var labels []Label + if pr.Labels != nil { + for _, l := range pr.Labels { + if l.Name != nil { + labels = append(labels, Label{Name: *l.Name}) + } + } + } + + return PullRequest{ + HTMLUrl: pr.GetHTMLURL(), + Number: pr.GetNumber(), + Title: pr.GetTitle(), + User: User{Login: pr.GetUser().GetLogin()}, + Body: pr.GetBody(), + Labels: labels, + MergeCommitSha: pr.GetMergeCommitSHA(), + Merged: pr.GetMerged(), + } +} + +func convertGHUser(user *gh.User) User { + if user == nil { + return User{} + } + return User{ + Login: user.GetLogin(), + } +} + +func convertGHUsers(users []*gh.User) []User { + result := make([]User, len(users)) + for i, u := range users { + result[i] = convertGHUser(u) + } + return result +} + +func convertGHComment(comment *gh.IssueComment) PullRequestComment { + if comment == nil { + return PullRequestComment{} + } + + return PullRequestComment{ + User: convertGHUser(comment.User), + Body: comment.GetBody(), + ID: int(comment.GetID()), + CreatedAt: comment.GetCreatedAt().Time, + } +} + +func convertGHComments(comments []*gh.IssueComment) []PullRequestComment { + result := make([]PullRequestComment, len(comments)) + for i, c := range comments { + result[i] = convertGHComment(c) + } + return result +} diff --git a/.ci/magician/github/set.go b/.ci/magician/github/set.go index 15ac9996ca22..e56b5a03f5b5 100644 --- a/.ci/magician/github/set.go +++ b/.ci/magician/github/set.go @@ -17,61 +17,64 @@ package github import ( "fmt" - utils "magician/utility" + "strconv" "strings" "time" -) -func (gh *Client) PostBuildStatus(prNumber, title, state, targetURL, commitSha string) error { - url := fmt.Sprintf("https://api.github.com/repos/GoogleCloudPlatform/magic-modules/statuses/%s", commitSha) + utils "magician/utility" - postBody := map[string]string{ - "context": title, - "state": state, - "target_url": targetURL, + gh "github.com/google/go-github/v68/github" +) + +// PostBuildStatus creates a commit status for a specific SHA +func (c *Client) PostBuildStatus(prNumber, title, state, targetURL, commitSha string) error { + repoStatus := &gh.RepoStatus{ + Context: gh.Ptr(title), + State: gh.Ptr(state), + TargetURL: gh.Ptr(targetURL), } - err := utils.RequestCallWithRetry(url, "POST", gh.token, nil, postBody) + _, _, err := c.gh.Repositories.CreateStatus(c.ctx, defaultOwner, defaultRepo, commitSha, repoStatus) if err != nil { return err } fmt.Printf("Successfully posted build status to pull request %s\n", prNumber) - return nil } -func (gh *Client) PostComment(prNumber, comment string) error { - url := fmt.Sprintf("https://api.github.com/repos/GoogleCloudPlatform/magic-modules/issues/%s/comments", prNumber) +// PostComment adds a comment to a pull request +func (c *Client) PostComment(prNumber, comment string) error { + num, err := strconv.Atoi(prNumber) + if err != nil { + return err + } - body := map[string]string{ - "body": comment, + issueComment := &gh.IssueComment{ + Body: gh.Ptr(comment), } - err := utils.RequestCallWithRetry(url, "POST", gh.token, nil, body) + _, _, err = c.gh.Issues.CreateComment(c.ctx, defaultOwner, defaultRepo, num, issueComment) if err != nil { return err } fmt.Printf("Successfully posted comment to pull request %s\n", prNumber) - return nil } -func (gh *Client) UpdateComment(prNumber, comment string, id int) error { - url := fmt.Sprintf("https://api.github.com/repos/GoogleCloudPlatform/magic-modules/issues/comments/%d", id) - - body := map[string]string{ - "body": comment, +// UpdateComment updates an existing comment +func (c *Client) UpdateComment(prNumber, comment string, id int) error { + issueComment := &gh.IssueComment{ + Body: gh.Ptr(comment), } - err := utils.RequestCallWithRetry(url, "PATCH", gh.token, nil, body) + _, _, err := c.gh.Issues.EditComment(c.ctx, defaultOwner, defaultRepo, int64(id), issueComment) if err != nil { return err } fmt.Printf("Successfully updated comment %d in pull request %s\n", id, prNumber) - return nil } @@ -111,26 +114,29 @@ func (gh *Client) RemovePullRequestReviewers(prNumber string, reviewers []string return nil } -func (gh *Client) AddLabels(prNumber string, labels []string) error { - url := fmt.Sprintf("https://api.github.com/repos/GoogleCloudPlatform/magic-modules/issues/%s/labels", prNumber) - - body := map[string][]string{ - "labels": labels, +// AddLabels adds labels to an issue or pull request +func (c *Client) AddLabels(prNumber string, labels []string) error { + num, err := strconv.Atoi(prNumber) + if err != nil { + return err } - err := utils.RequestCallWithRetry(url, "POST", gh.token, nil, body) + _, _, err = c.gh.Issues.AddLabelsToIssue(c.ctx, defaultOwner, defaultRepo, num, labels) if err != nil { return fmt.Errorf("failed to add %q labels: %s", labels, err) } return nil - } -func (gh *Client) RemoveLabel(prNumber, label string) error { - url := fmt.Sprintf("https://api.github.com/repos/GoogleCloudPlatform/magic-modules/issues/%s/labels/%s", prNumber, label) - err := utils.RequestCallWithRetry(url, "DELETE", gh.token, nil, nil) +// RemoveLabel removes a label from an issue or pull request +func (c *Client) RemoveLabel(prNumber, label string) error { + num, err := strconv.Atoi(prNumber) + if err != nil { + return err + } + _, err = c.gh.Issues.RemoveLabelForIssue(c.ctx, defaultOwner, defaultRepo, num, label) if err != nil { return fmt.Errorf("failed to remove %s label: %s", label, err) } @@ -138,19 +144,24 @@ func (gh *Client) RemoveLabel(prNumber, label string) error { return nil } -func (gh *Client) CreateWorkflowDispatchEvent(workflowFileName string, inputs map[string]any) error { - url := fmt.Sprintf("https://api.github.com/repos/GoogleCloudPlatform/magic-modules/actions/workflows/%s/dispatches", workflowFileName) - err := utils.RequestCallWithRetry(url, "POST", gh.token, nil, map[string]any{ - "ref": "main", - "inputs": inputs, - }) +// CreateWorkflowDispatchEvent triggers a workflow run +func (c *Client) CreateWorkflowDispatchEvent(workflowFileName string, inputs map[string]any) error { + stringInputs := make(map[string]interface{}) + for k, v := range inputs { + stringInputs[k] = v + } + event := gh.CreateWorkflowDispatchEventRequest{ + Ref: "main", + Inputs: stringInputs, + } + + _, err := c.gh.Actions.CreateWorkflowDispatchEventByFileName(c.ctx, defaultOwner, defaultRepo, workflowFileName, event) if err != nil { return fmt.Errorf("failed to create workflow dispatch event: %s", err) } fmt.Printf("Successfully created workflow dispatch event for %s with inputs %v\n", workflowFileName, inputs) - return nil } diff --git a/.ci/magician/utility/utils.go b/.ci/magician/utility/utils.go index 0325c1090d85..1c55326bc84a 100644 --- a/.ci/magician/utility/utils.go +++ b/.ci/magician/utility/utils.go @@ -23,6 +23,7 @@ import ( "math" "net/http" "os" + "strings" "time" "golang.org/x/exp/slices" @@ -51,21 +52,37 @@ func defaultRetryConfig() retryConfig { // makeHTTPRequest performs the actual HTTP request and returns the response func makeHTTPRequest(url, method, credentials string, body any) (*http.Response, []byte, error) { client := &http.Client{} - jsonBody, err := json.Marshal(body) - if err != nil { - return nil, nil, fmt.Errorf("error marshaling JSON: %s", err) + + fmt.Println("") + fmt.Println("request url: ", url) + + var reqBody io.Reader + if body != nil { + switch v := body.(type) { + case []byte: + // Body is already serialized, use directly + reqBody = bytes.NewBuffer(v) + rbString := strings.TrimSpace(string(v)) + fmt.Println("request body (raw bytes): ", rbString) + default: + // Body needs serialization + jsonBody, err := json.Marshal(body) + if err != nil { + return nil, nil, fmt.Errorf("error marshaling JSON: %s", err) + } + reqBody = bytes.NewBuffer(jsonBody) + fmt.Println("request body (serialized): ", string(jsonBody)) + } } - req, err := http.NewRequest(method, url, bytes.NewBuffer(jsonBody)) + + req, err := http.NewRequest(method, url, reqBody) if err != nil { return nil, nil, fmt.Errorf("error creating request: %s", err) } + req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", credentials)) req.Header.Set("Content-Type", "application/json") req.Header.Set("Accept", "application/json") - - fmt.Println("") - fmt.Println("request url: ", url) - fmt.Println("request body: ", string(jsonBody)) fmt.Println("") resp, err := client.Do(req) From c808557950de2c7566a83a0824cce41a56519312 Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Fri, 16 May 2025 15:38:33 -0700 Subject: [PATCH 165/884] removing `required_with` tag from `min`/`max_instances` fields in vpcaccess connector (#13989) --- mmv1/products/vpcaccess/Connector.yaml | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/mmv1/products/vpcaccess/Connector.yaml b/mmv1/products/vpcaccess/Connector.yaml index 964d5b9a727a..2ec3d20f6cc6 100644 --- a/mmv1/products/vpcaccess/Connector.yaml +++ b/mmv1/products/vpcaccess/Connector.yaml @@ -129,22 +129,18 @@ properties: type: Integer description: | Minimum value of instances in autoscaling group underlying the connector. Value must be between 2 and 9, inclusive. Must be - lower than the value specified by max_instances. + lower than the value specified by max_instances. Required alongside `max_instances` if not using `min_throughput`/`max_throughput`. default_from_api: true conflicts: - min_throughput - required_with: - - max_instances - name: 'maxInstances' type: Integer description: | Maximum value of instances in autoscaling group underlying the connector. Value must be between 3 and 10, inclusive. Must be - higher than the value specified by min_instances. + higher than the value specified by min_instances. Required alongside `min_instances` if not using `min_throughput`/`max_throughput`. default_from_api: true conflicts: - max_throughput - required_with: - - min_instances - name: 'maxThroughput' type: Integer description: | From 3a9d270f8d6c0da8d55fd9688abc1bc8fc227f38 Mon Sep 17 00:00:00 2001 From: Ilia Lazebnik Date: Fri, 16 May 2025 18:53:51 -0400 Subject: [PATCH 166/884] container: add docs for user_managed_keys_config (#13996) Signed-off-by: drfaust92 --- .../website/docs/r/container_cluster.html.markdown | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown index 3c9a97e5c801..5f7087d27c69 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown @@ -304,6 +304,8 @@ region are guaranteed to support the same version. [Google Groups for GKE](https://cloud.google.com/kubernetes-engine/docs/how-to/role-based-access-control#groups-setup-gsuite) feature. Structure is [documented below](#nested_authenticator_groups_config). +* `user_managed_keys_config` - (Optional) The custom keys configuration of the cluster Structure is [documented below](#nested_control_plane_endpoints_config). + * `control_plane_endpoints_config` - (Optional) Configuration for all of the cluster's control plane endpoints. Structure is [documented below](#nested_control_plane_endpoints_config). @@ -1220,6 +1222,17 @@ notification_config { * `enabled` (Required) - Enable the Secret Manager add-on for this cluster. +The `user_managed_keys_config` block supports: + +* `cluster_ca` - (Optional) The Certificate Authority Service caPool to use for the cluster CA in this cluster. +* `etcd_api_ca` - (Optional) The Certificate Authority Service caPool to use for the etcd API CA in this cluster. +* `etcd_peer_ca` - (Optional) The Certificate Authority Service caPool to use for the etcd peer CA in this cluster. +* `aggregation_ca` - (Optional) The Certificate Authority Service caPool to use for the aggreation CA in this cluster. +* `service_account_signing_keys` - (Optional) The Cloud KMS cryptoKeyVersions to use for signing service account JWTs issued by this cluster. +* `service_account_verification_keys` - (Optional) The Cloud KMS cryptoKeyVersions to use for verifying service account JWTs issued by this cluster. +* `control_plane_disk_encryption_key` - (Optional) The Cloud KMS cryptoKey to use for Confidential Hyperdisk on the control plane nodes. +* `gkeops_etcd_backup_encryption_key` - (Optional) Resource path of the Cloud KMS cryptoKey to use for encryption of internal etcd backups. + The `control_plane_endpoints_config` block supports: * `dns_endpoint_config` - (Optional) DNS endpoint configuration. From 06c25569a31d168dedc5d8055c58b52f636c357b Mon Sep 17 00:00:00 2001 From: Jaylon McShan Date: Mon, 19 May 2025 09:03:17 -0600 Subject: [PATCH 167/884] Add FutureReservation resource to Compute (#13317) --- mmv1/products/compute/FutureReservation.yaml | 547 ++++++++++++++++++ .../examples/future_reservation_basic.tf.tmpl | 19 + .../shared_future_reservation.tf.tmpl | 53 ++ .../update_encoder/future_reservation.go.tmpl | 17 + ...ce_compute_future_reservation_test.go.tmpl | 101 ++++ 5 files changed, 737 insertions(+) create mode 100644 mmv1/products/compute/FutureReservation.yaml create mode 100644 mmv1/templates/terraform/examples/future_reservation_basic.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/shared_future_reservation.tf.tmpl create mode 100644 mmv1/templates/terraform/update_encoder/future_reservation.go.tmpl create mode 100644 mmv1/third_party/terraform/services/compute/resource_compute_future_reservation_test.go.tmpl diff --git a/mmv1/products/compute/FutureReservation.yaml b/mmv1/products/compute/FutureReservation.yaml new file mode 100644 index 000000000000..e05fe8eac7a6 --- /dev/null +++ b/mmv1/products/compute/FutureReservation.yaml @@ -0,0 +1,547 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'FutureReservation' +description: | + Represents a future reservation resource in Compute Engine. Future reservations allow users + to reserve capacity for a specified time window, ensuring that resources are available + when needed. + + Reservations apply only to Compute Engine, Cloud Dataproc, and Google + Kubernetes Engine VM usage.Reservations do not apply to `f1-micro` or + `g1-small` machine types, preemptible VMs, sole tenant nodes, or other + services not listed above + like Cloud SQL and Dataflow. +references: + guides: + 'Future Reservations Guide': 'https://cloud.google.com/compute/docs/instances/future-reservations-overview' + api: https://cloud.google.com/compute/docs/reference/rest/v1/futureReservations +docs: +base_url: 'projects/{{project}}/zones/{{zone}}/futureReservations' +has_self_link: true +update_url: 'projects/{{project}}/zones/{{zone}}/futureReservations/{{name}}' +update_verb: 'PATCH' +update_mask: true +timeouts: + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 +async: + actions: ['create', 'delete', 'update'] + type: 'OpAsync' + operation: + base_url: '{{op_id}}' + result: + resource_inside_response: false +collection_url_key: 'items' +custom_code: + update_encoder: 'templates/terraform/update_encoder/future_reservation.go.tmpl' +min_version: beta +examples: + - name: 'future_reservation_basic' + primary_resource_id: 'gce_future_reservation' + vars: + future_reservation_name: 'gce-future-reservation' + test_env_vars: + project: 'PROJECT_NAME' + org_id: 'ORG_ID' + billing_account: 'BILLING_ACCT' + - name: 'shared_future_reservation' + primary_resource_id: 'gce_future_reservation' + vars: + future_reservation_name: 'gce-shared-future-reservation' + test_env_vars: + project: 'PROJECT_NAME' + org_id: 'ORG_ID' + billing_account: 'BILLING_ACCT' + exclude_docs: true + skip_vcr: true +parameters: + - name: 'name' + type: String + description: | + Name of the resource. Provided by the client when the resource is + created. The name must be 1-63 characters long, and comply with + RFC1035. Specifically, the name must be 1-63 characters long and match + the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the + first character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, except the las + character, which cannot be a dash. + required: true + immutable: true +properties: + - name: 'zone' + type: String + description: | + URL of the Zone where this future reservation resides. + output: true + - name: 'creationTimestamp' + type: Time + description: | + The creation timestamp for this future reservation in RFC3339 text format. + output: true + - name: 'selfLinkWithId' + type: String + description: | + Server-defined URL for this resource with the resource id. + output: true + - name: 'description' + type: String + description: | + An optional description of this resource. + - name: 'timeWindow' + type: NestedObject + description: | + Time window for this Future Reservation. + required: true + properties: + - name: 'startTime' + type: String + description: | + Start time of the future reservation in RFC3339 format. + required: true + - name: 'endTime' + type: String + description: | + End time of the future reservation in RFC3339 format. + - name: 'duration' + type: NestedObject + description: | + Duration of the future reservation + properties: + - name: 'seconds' + type: String + description: | + Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive. + - name: 'nanos' + type: Integer + description: | + Span of time that's a fraction of a second at nanosecond resolution. + - name: 'shareSettings' + type: NestedObject + description: | + Settings for sharing the future reservation + properties: + - name: 'shareType' + type: Enum + description: | + Type of sharing for this future reservation. + immutable: true + default_from_api: true + enum_values: + - 'LOCAL' + - 'SPECIFIC_PROJECTS' + - name: 'projects' + type: Array + description: | + list of Project names to specify consumer projects for this shared-reservation. This is only valid when shareType's value is SPECIFIC_PROJECTS. + item_type: + type: String + - name: 'projectMap' + type: Map + description: | + A map of project id and project config. This is only valid when shareType's value is SPECIFIC_PROJECTS. + key_name: 'id' + key_description: | + The project id/number which is deleting or adding to the project list. + value_type: + name: 'projectConfig' + type: NestedObject + properties: + - name: 'projectId' + type: String + description: | + The project ID, should be same as the key of this project config in the parent map. + - name: 'namePrefix' + type: String + description: | + Name prefix for the reservations to be created at the time of delivery. The name prefix must comply with RFC1035. Maximum allowed length for name prefix is 20. Automatically created reservations name format will be -date-####. + - name: 'status' + type: NestedObject + description: | + [Output only] Status of the Future Reservation + output: true + properties: + - name: 'procurementStatus' + type: Enum + description: | + Current state of this Future Reservation + enum_values: + - 'APPROVED' + - 'CANCELLED' + - 'COMMITTED' + - 'DECLINED' + - 'DRAFTING' + - 'FAILED' + - 'FAILED_PARTIALLY_FULFILLED' + - 'FULFILLED' + - 'PENDING_AMENDMENT_APPROVAL' + - 'PENDING_APPROVAL' + - 'PROCURING' + - 'PROVISIONING' + - name: 'lockTime' + type: String + description: | + Time when Future Reservation would become LOCKED, after which no modifications to Future Reservation will be allowed. Applicable only after the Future Reservation is in the APPROVED state. The lockTime is an RFC3339 string. The procurementStatus will transition to PROCURING state at this time. + - name: 'autoCreatedReservations' + type: Array + description: | + Fully qualified urls of the automatically created reservations at startTime. + item_type: + type: String + - name: 'fulfilledCount' + type: String + description: | + This count indicates the fulfilled capacity so far. This is set during "PROVISIONING" state. This count also includes capacity delivered as part of existing matching reservations. + - name: 'specificSkuProperties' + type: NestedObject + description: | + Instance properties related to the Future Reservation. + properties: + - name: 'sourceInstanceTemplateId' + type: String + description: | + ID of the instance template used to populate the Future Reservation properties. + - name: 'amendmentStatus' + type: Enum + description: | + The current status of the requested amendment. + - name: 'lastKnownGoodState' + type: NestedObject + description: | + This field represents the future reservation before an amendment was requested. If the amendment is declined, the Future Reservation will be reverted to the last known good state. The last known good state is not set when updating a future reservation whose Procurement Status is DRAFTING. + properties: + - name: 'futureReservationSpecs' + type: NestedObject + description: | + The previous instance-related properties of the Future Reservation. + properties: + - name: 'specificSkuProperties' + type: NestedObject + description: | + The previous instance related properties of the Future Reservation. + properties: + - name: 'instanceProperties' + type: NestedObject + description: | + Properties of the SKU instances being reserved. + properties: + - name: 'machineType' + type: String + description: | + Specifies type of machine (name only) which has fixed number of vCPUs and fixed amount of memory. This also includes specifying custom machine type following custom-NUMBER_OF_CPUS-AMOUNT_OF_MEMORY pattern. + - name: 'guestAccelerators' + type: Array + description: | + Specifies accelerator type and count. + item_type: + type: NestedObject + properties: + - name: 'acceleratorType' + type: String + description: | + Full or partial URL of the accelerator type resource to attach to this instance. + - name: 'acceleratorCount' + type: Integer + description: | + The number of the guest accelerator cards exposed to this instance. + - name: 'minCpuPlatform' + type: String + description: | + Minimum CPU platform for the reservation. + - name: 'localSsds' + type: Array + description: | + Specifies amount of local ssd to reserve with each instance. The type of disk is local-ssd. + item_type: + type: NestedObject + properties: + - name: 'diskSizeGb' + type: String + description: | + Specifies the size of the disk in base-2 GB. + - name: 'interface' + type: Enum + description: | + Specifies the disk interface to use for attaching this disk, which is either SCSI or NVME. The default is SCSI. + enum_values: + - 'SCSI' + - 'NVME' + - name: 'maintenanceFreezeDurationHours' + type: Integer + description: | + Specifies the number of hours after reservation creation where instances using the reservation won't be scheduled for maintenance. + - name: 'locationHint' + type: String + description: | + An opaque location hint used to place the allocation close to other resources. This field is for use by internal tools that use the public API. + - name: 'maintenanceInterval' + type: Enum + description: | + Specifies the frequency of planned maintenance events. The accepted values are: PERIODIC. + enum_values: + - 'PERIODIC' + - name: 'totalCount' + type: String + description: | + Total number of instances for which capacity assurance is requested at a future time period. + - name: 'sourceInstanceTemplate' + type: String + description: | + The instance template that will be used to populate the ReservedInstanceProperties of the future reservation + - name: 'timeWindow' + type: NestedObject + description: | + [Output Only] The previous time window of the Future Reservation. + properties: + - name: 'startTime' + type: String + description: | + Start time of the Future Reservation. The startTime is an RFC3339 string. + - name: 'endTime' + type: String + description: | + End time of the Future Reservation in RFC3339 format. + - name: 'duration' + type: NestedObject + description: | + Specifies the duration of the reservation. + properties: + - name: 'seconds' + type: String + description: | + Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive. + - name: 'nanos' + type: Integer + description: | + Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive. + - name: 'shareSettings' + type: NestedObject + description: | + The previous share settings of the Future Reservation. + properties: + - name: 'shareType' + type: Enum + description: | + Type of sharing for this shared-reservation + enum_values: + - 'LOCAL' + - 'ORGANIZATION' + - 'SPECIFIC_PROJECTS' + - name: 'projects' + type: Array + description: | + A futureReservations.list of Project names to specify consumer projects for this shared-reservation. This is only valid when shareType's value is SPECIFIC_PROJECTS. + item_type: + type: String + - name: 'projectMap' + type: Map + key_name: project + key_description: | + The project ID, used as the key in this map. + description: | + A map of project id and project config. This is only valid when shareType's value is SPECIFIC_PROJECTS. + value_type: + name: 'projectConfig' + type: NestedObject + properties: + - name: 'projectId' + type: String + description: | + The project ID, should be same as the key of this project config in the parent map. + - name: 'procurementStatus' + type: Enum + description: | + The status of the last known good state for the Future Reservation + - name: 'namePrefix' + type: String + description: | + The name prefix of the Future Reservation before an amendment was requested. + - name: 'description' + type: String + description: | + The description of the FutureReservation before an amendment was requested. + - name: 'lockTime' + type: String + description: | + The lock time of the FutureReservation before an amendment was requested. + - name: 'existingMatchingUsageInfo' + type: NestedObject + description: | + Represents the matching usage for the future reservation before an amendment was requested. + properties: + - name: 'count' + type: String + description: | + Count representing minimum(FR totalCount, matching_reserved_capacity+matching_unreserved_instances). + - name: 'timeStamp' + type: String + description: | + Timestamp when the matching usage was calculated. + - name: 'planningStatus' + type: Enum + description: | + Planning state before being submitted for evaluation + enum_values: + - 'DRAFT' + - 'SUBMITTED' + default_from_api: true + - name: 'autoDeleteAutoCreatedReservations' + type: Boolean + ignore_read: true + description: | + Setting for enabling or disabling automatic deletion for auto-created reservation. If set to true, auto-created reservations will be deleted at Future Reservation's end time (default) or at user's defined timestamp if any of the [autoCreatedReservationsDeleteTime, autoCreatedReservationsDuration] values is specified. For keeping auto-created reservation indefinitely, this value should be set to false. + - name: 'specificReservationRequired' + type: Boolean + description: | + Indicates whether the auto-created reservation can be consumed by VMs with affinity for "any" reservation. If the field is set, then only VMs that target the reservation by name can consume from the delivered reservation. + - name: 'reservationName' + type: String + description: | + Name of reservations where the capacity is provisioned at the time of delivery of future reservations. If the reservation with the given name does not exist already, it is created automatically at the time of Approval with INACTIVE state till specified start-time. Either provide the reservationName or a namePrefix. + - name: 'deploymentType' + type: Enum + description: | + Type of the deployment requested as part of future reservation. + enum_values: + - 'DENSE' + - 'FLEXIBLE' + - name: 'reservationMode' + type: Enum + description: | + The reservation mode which determines reservation-termination behavior and expected pricing. + enum_values: + - 'CALENDAR' + - 'DEFAULT' + - name: 'commitmentInfo' + type: NestedObject + description: | + If not present, then FR will not deliver a new commitment or update an existing commitment. + properties: + - name: 'commitmentPlan' + type: Enum + description: | + Indicates if a Commitment needs to be created as part of FR delivery. If this field is not present, then no commitment needs to be created. + enum_values: + - 'INVALID' + - 'THIRTY_SIX_MONTH' + - 'TWELVE_MONTH' + - name: 'commitmentName' + type: String + description: | + name of the commitment where capacity is being delivered to. + - name: 'previousCommitmentTerms' + type: Enum + description: | + Only applicable if FR is delivering to the same reservation. If set, all parent commitments will be extended to match the end date of the plan for this commitment. + enum_values: + - 'EXTEND' + - name: 'schedulingType' + type: Enum + description: | + Maintenance information for this reservation + enum_values: + - 'GROUPED' + - 'INDEPENDENT' + - name: 'specificSkuProperties' + type: NestedObject + description: | + Future Reservation configuration to indicate instance properties and total count. + properties: + - name: 'instanceProperties' + type: NestedObject + description: | + Properties of the SKU instances being reserved. + properties: + - name: 'machineType' + type: String + description: | + Specifies type of machine (name only) which has fixed number of vCPUs and fixed amount of memory. This also includes specifying custom machine type following custom-NUMBER_OF_CPUS-AMOUNT_OF_MEMORY pattern. + - name: 'guestAccelerators' + type: Array + description: | + Specifies accelerator type and count. + item_type: + type: NestedObject + properties: + - name: 'acceleratorType' + type: String + description: | + Full or partial URL of the accelerator type resource to attach to this instance. + - name: 'acceleratorCount' + type: Integer + description: | + The number of the guest accelerator cards exposed to this instance. + - name: 'minCpuPlatform' + type: String + description: | + Minimum cpu platform the reservation. + - name: 'localSsds' + type: Array + description: | + Specifies amount of local ssd to reserve with each instance. The type of disk is local-ssd. + item_type: + type: NestedObject + properties: + - name: 'diskSizeGb' + type: String + description: | + Specifies the size of the disk in base-2 GB. + - name: 'interface' + type: Enum + description: | + Specifies the disk interface to use for attaching this disk, which is either SCSI or NVME. The default is SCSI. + enum_values: + - 'SCSI' + - 'NVME' + - name: 'maintenanceFreezeDurationHours' + type: Integer + description: | + Specifies the number of hours after reservation creation where instances using the reservation won't be scheduled for maintenance. + - name: 'locationHint' + type: String + description: | + An opaque location hint used to place the allocation close to other resources. This field is for use by internal tools that use the public API. + - name: 'maintenanceInterval' + type: Enum + description: | + Specifies the frequency of planned maintenance events. The accepted values are: PERIODIC + enum_values: + - 'PERIODIC' + - name: 'totalCount' + type: String + description: | + Total number of instances for which capacity assurance is requested at a future time period. + - name: 'sourceInstanceTemplate' + type: String + description: | + The instance template that will be used to populate the ReservedInstanceProperties of the future reservation + - name: 'autoCreatedReservationsDeleteTime' + type: String + ignore_read: true + description: | + Future timestamp when the FR auto-created reservations will be deleted by Compute Engine. + - name: 'autoCreatedReservationsDuration' + type: NestedObject + description: | + Specifies the duration of auto-created reservations. It represents relative time to future reservation startTime when auto-created reservations will be automatically deleted by Compute Engine. Duration time unit is represented as a count of seconds and fractions of seconds at nanosecond resolution. + properties: + - name: 'seconds' + type: String + description: | + Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive. + - name: 'nanos' + type: Integer + description: | + Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive. diff --git a/mmv1/templates/terraform/examples/future_reservation_basic.tf.tmpl b/mmv1/templates/terraform/examples/future_reservation_basic.tf.tmpl new file mode 100644 index 000000000000..84a9019ea603 --- /dev/null +++ b/mmv1/templates/terraform/examples/future_reservation_basic.tf.tmpl @@ -0,0 +1,19 @@ +resource "google_compute_future_reservation" "{{$.PrimaryResourceId}}" { + provider = google-beta + name = "{{index $.Vars "future_reservation_name"}}" + project = "{{index $.TestEnvVars "project"}}" + auto_delete_auto_created_reservations = true + planning_status = "DRAFT" + name_prefix = "fr-basic" + time_window { + start_time = "2025-11-01T00:00:00Z" + end_time = "2025-11-02T00:00:00Z" + } + specific_sku_properties { + total_count = "1" + + instance_properties { + machine_type = "e2-standard-2" + } + } +} diff --git a/mmv1/templates/terraform/examples/shared_future_reservation.tf.tmpl b/mmv1/templates/terraform/examples/shared_future_reservation.tf.tmpl new file mode 100644 index 000000000000..c88b3d4bcf02 --- /dev/null +++ b/mmv1/templates/terraform/examples/shared_future_reservation.tf.tmpl @@ -0,0 +1,53 @@ +resource "google_project" "owner_project" { + project_id = "tf-test%{random_suffix}" + name = "tf-test%{random_suffix}" + org_id = "{{index $.TestEnvVars "org_id"}}" + billing_account = "{{index $.TestEnvVars "billing_account"}}" + deletion_policy = "DELETE" +} + +resource "google_project_service" "compute" { + project = google_project.owner_project.project_id + service = "compute.googleapis.com" + disable_on_destroy = false +} + +resource "google_project" "guest_project" { + project_id = "tf-test-2%{random_suffix}" + name = "tf-test-2%{random_suffix}" + org_id = "{{index $.TestEnvVars "org_id"}}" + deletion_policy = "DELETE" +} + +resource "google_organization_policy" "shared_future_reservation_org_policy" { + org_id = "{{index $.TestEnvVars "org_id"}}" + constraint = "constraints/compute.sharedReservationsOwnerProjects" + list_policy { + allow { + values = ["projects/${google_project.owner_project.number}"] + } + } +} + +resource "google_compute_future_reservation" "{{$.PrimaryResourceId}}" { + project = google_project.owner_project.project_id + name = "{{index $.Vars "future_reservation_name"}}" + time_window { + start_time = "2025-08-01T00:00:00Z" + end_time = "2025-08-02T00:00:00Z" + } + + share_settings { + share_type = "SPECIFIC_PROJECTS" + project_map { + id = google_project.guest_project.project_id + project_id = google_project.guest_project.project_id + } + } + + + depends_on = [ + google_organization_policy.shared_future_reservation_org_policy, + google_project_service.compute + ] +} \ No newline at end of file diff --git a/mmv1/templates/terraform/update_encoder/future_reservation.go.tmpl b/mmv1/templates/terraform/update_encoder/future_reservation.go.tmpl new file mode 100644 index 000000000000..03e161230e59 --- /dev/null +++ b/mmv1/templates/terraform/update_encoder/future_reservation.go.tmpl @@ -0,0 +1,17 @@ +{{/* + The license inside this block applies to this file + Copyright 2024 Google Inc. + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ -}} +nameProp := d.Get("name") +if v, ok := d.GetOkExists("name"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, nameProp)) { + obj["name"] = nameProp +} +return obj, nil diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_future_reservation_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_future_reservation_test.go.tmpl new file mode 100644 index 000000000000..411c3d35dcd0 --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/resource_compute_future_reservation_test.go.tmpl @@ -0,0 +1,101 @@ +{{- if ne $.TargetVersionName "ga" -}} +package compute_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-plugin-testing/plancheck" +) + +func TestAccComputeFutureReservation_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "project": envvar.GetTestProjectFromEnv(), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeFutureReservation_full(context), + }, + { + ResourceName: "google_compute_future_reservation.gce_future_reservation", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_delete_auto_created_reservations"}, + }, + { + Config: testAccComputeFutureReservation_update(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_compute_future_reservation.gce_future_reservation", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_compute_future_reservation.gce_future_reservation", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_delete_auto_created_reservations"}, + }, + }, + }) +} + +func testAccComputeFutureReservation_full(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_future_reservation" "gce_future_reservation" { + provider = google-beta + name = "tf-fr%{random_suffix}" + name_prefix = "fr-%{random_suffix}" + project = "%{project}" + planning_status = "DRAFT" + auto_delete_auto_created_reservations = true + description = "test future reservation" + time_window { + start_time = "2025-11-01T00:00:00Z" + end_time = "2025-11-02T00:00:00Z" + } + + specific_sku_properties { + total_count = "1" + instance_properties { + machine_type = "e2-standard-2" + } + } +} +`, context) +} + +func testAccComputeFutureReservation_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_future_reservation" "gce_future_reservation" { + provider = google-beta + name = "tf-fr%{random_suffix}" + name_prefix = "fru-%{random_suffix}" + project = "%{project}" + planning_status = "SUBMITTED" + auto_delete_auto_created_reservations = false + description = "test updated future reservation" + time_window { + start_time = "2025-11-01T00:00:00Z" + end_time = "2025-11-02T00:00:00Z" + } + specific_sku_properties { + total_count = "1" + instance_properties { + machine_type = "e2-standard-2" + } + } +} +`, context) +} + +{{- end }} \ No newline at end of file From d38310b48b470e11d193d4d772d6003b0407e039 Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Mon, 19 May 2025 09:36:50 -0700 Subject: [PATCH 168/884] fix TestAccDataLossPreventionDiscoveryConfig_dlpDiscoveryConfigOrgFolderPausedExample (#14011) --- mmv1/products/dlp/DiscoveryConfig.yaml | 1 + .../examples/dlp_discovery_config_org_folder_paused.tf.tmpl | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/mmv1/products/dlp/DiscoveryConfig.yaml b/mmv1/products/dlp/DiscoveryConfig.yaml index 6e0493f05907..df8330e5bf74 100644 --- a/mmv1/products/dlp/DiscoveryConfig.yaml +++ b/mmv1/products/dlp/DiscoveryConfig.yaml @@ -62,6 +62,7 @@ examples: test_env_vars: project: 'PROJECT_NAME' organization: 'ORG_ID' + location: 'REGION' - name: 'dlp_discovery_config_conditions_cadence' primary_resource_id: 'conditions_cadence' test_env_vars: diff --git a/mmv1/templates/terraform/examples/dlp_discovery_config_org_folder_paused.tf.tmpl b/mmv1/templates/terraform/examples/dlp_discovery_config_org_folder_paused.tf.tmpl index b6764b8e1ee8..82ebca669602 100644 --- a/mmv1/templates/terraform/examples/dlp_discovery_config_org_folder_paused.tf.tmpl +++ b/mmv1/templates/terraform/examples/dlp_discovery_config_org_folder_paused.tf.tmpl @@ -1,6 +1,6 @@ resource "google_data_loss_prevention_discovery_config" "{{$.PrimaryResourceId}}" { - parent = "organizations/{{index $.TestEnvVars "organization"}}/locations/us" - location = "us" + parent = "organizations/{{index $.TestEnvVars "organization"}}/locations/{{index $.TestEnvVars "location"}}" + location = "{{index $.TestEnvVars "location"}}" targets { big_query_target { From 540843adc4bfc4b3b59422c95c9aa4fe44ede105 Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Mon, 19 May 2025 09:36:59 -0700 Subject: [PATCH 169/884] Fix apigee data residency test and example (#14003) --- ...gee_organization_cloud_basic_data_residency.tf.tmpl | 8 +++++--- ...rganization_cloud_basic_data_residency_test.tf.tmpl | 10 ++++++---- 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/mmv1/templates/terraform/examples/apigee_organization_cloud_basic_data_residency.tf.tmpl b/mmv1/templates/terraform/examples/apigee_organization_cloud_basic_data_residency.tf.tmpl index 148874ab1633..64aa8f4cdb8a 100644 --- a/mmv1/templates/terraform/examples/apigee_organization_cloud_basic_data_residency.tf.tmpl +++ b/mmv1/templates/terraform/examples/apigee_organization_cloud_basic_data_residency.tf.tmpl @@ -5,8 +5,10 @@ provider "google" { data "google_client_config" "current" {} resource "google_apigee_organization" "org" { - description = "Terraform-provisioned basic Apigee Org under European Union hosting jurisdiction." - project_id = data.google_client_config.current.project - disable_vpc_peering = true + description = "Terraform-provisioned basic Apigee Org under European Union hosting jurisdiction." + project_id = data.google_client_config.current.project + api_consumer_data_location = "europe-west1" + billing_type = "PAYG" + disable_vpc_peering = true } diff --git a/mmv1/templates/terraform/examples/apigee_organization_cloud_basic_data_residency_test.tf.tmpl b/mmv1/templates/terraform/examples/apigee_organization_cloud_basic_data_residency_test.tf.tmpl index 22996438f7b8..399ece85e1b1 100644 --- a/mmv1/templates/terraform/examples/apigee_organization_cloud_basic_data_residency_test.tf.tmpl +++ b/mmv1/templates/terraform/examples/apigee_organization_cloud_basic_data_residency_test.tf.tmpl @@ -16,10 +16,12 @@ resource "google_project_service" "apigee" { } resource "google_apigee_organization" "{{$.PrimaryResourceId}}" { - description = "Terraform-provisioned basic Apigee Org under European Union hosting jurisdiction." - project_id = google_project.project.project_id - disable_vpc_peering = true - depends_on = [ + description = "Terraform-provisioned basic Apigee Org under European Union hosting jurisdiction." + project_id = google_project.project.project_id + api_consumer_data_location = "europe-west1" + billing_type = "PAYG" + disable_vpc_peering = true + depends_on = [ google_project_service.apigee, ] } From 2c23537341d2d3e1832f8c6e0a6303b8ae9ab6ab Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Mon, 19 May 2025 09:37:09 -0700 Subject: [PATCH 170/884] Fix TestAccDataplexTaskDataplexTask_update (#14012) --- .../services/dataplex/resource_dataplex_task_test.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/dataplex/resource_dataplex_task_test.go b/mmv1/third_party/terraform/services/dataplex/resource_dataplex_task_test.go index 6b1a078e8d71..181574475772 100644 --- a/mmv1/third_party/terraform/services/dataplex/resource_dataplex_task_test.go +++ b/mmv1/third_party/terraform/services/dataplex/resource_dataplex_task_test.go @@ -29,7 +29,7 @@ func TestAccDataplexTaskDataplexTask_update(t *testing.T) { ResourceName: "google_dataplex_task.example", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"location", "lake", "task_id"}, + ImportStateVerifyIgnore: []string{"location", "lake", "task_id", "execution_status"}, }, { Config: testAccDataplexTask_dataplexTaskPrimaryUpdate(context), @@ -38,7 +38,7 @@ func TestAccDataplexTaskDataplexTask_update(t *testing.T) { ResourceName: "google_dataplex_task.example", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"location", "lake", "task_id"}, + ImportStateVerifyIgnore: []string{"location", "lake", "task_id", "execution_status"}, }, }, }) From 36f596b9f8b55effedaeb9a1c65225483fd7d0fa Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Mon, 19 May 2025 09:37:28 -0700 Subject: [PATCH 171/884] Skip private_service_connect in TestAccCloudbuildWorkerPool_basic (#14007) --- .../resource_cloudbuild_worker_pool_test.go.tmpl | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/mmv1/third_party/terraform/services/cloudbuild/resource_cloudbuild_worker_pool_test.go.tmpl b/mmv1/third_party/terraform/services/cloudbuild/resource_cloudbuild_worker_pool_test.go.tmpl index 6a32bb6f629a..9451c9c5c2ce 100644 --- a/mmv1/third_party/terraform/services/cloudbuild/resource_cloudbuild_worker_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudbuild/resource_cloudbuild_worker_pool_test.go.tmpl @@ -116,10 +116,11 @@ resource "google_cloudbuild_worker_pool" "pool" { no_external_ip = true } - private_service_connect { - network_attachment = "%{network_attachment}" - route_all_traffic = false - } + // private_service_connect feature is not supported yet. b/394920388 + // private_service_connect { + // network_attachment = "%{network_attachment}" + // route_all_traffic = false + // } } `, context) } From 3c135f668d6bff9ca2bcbe8a6d146cc8370c502c Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Mon, 19 May 2025 09:37:48 -0700 Subject: [PATCH 172/884] Fix TestAccDataprocMetastoreService_dataprocMetastoreServiceCmekTestExample (#14013) --- mmv1/products/metastore/Service.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/mmv1/products/metastore/Service.yaml b/mmv1/products/metastore/Service.yaml index bf19008718d4..af8aab692d4d 100644 --- a/mmv1/products/metastore/Service.yaml +++ b/mmv1/products/metastore/Service.yaml @@ -71,6 +71,8 @@ examples: primary_resource_id: 'default' vars: metastore_service_name: 'example-service' + kms_key_name: 'example-key' + test_vars_overrides: 'kms_key_name': 'acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-bootstrap-metastore-service-key1").CryptoKey.Name' exclude_docs: true skip_vcr: true From 04621254ffe1b4220a5b1896bc8386ab23af2261 Mon Sep 17 00:00:00 2001 From: tulika-aakriti Date: Mon, 19 May 2025 22:50:11 +0530 Subject: [PATCH 173/884] Update project ID for oracledatabase's resources tests (#14027) --- mmv1/products/oracledatabase/AutonomousDatabase.yaml | 4 ++-- mmv1/products/oracledatabase/CloudExadataInfrastructure.yaml | 4 ++-- mmv1/products/oracledatabase/CloudVmCluster.yaml | 4 ++-- .../data_source_oracle_database_autonomous_database_test.go | 2 +- .../data_source_oracle_database_autonomous_databases_test.go | 2 +- ...ource_oracle_database_cloud_exadata_infrastructure_test.go | 4 ++-- ...urce_oracle_database_cloud_exadata_infrastructures_test.go | 2 +- .../data_source_oracle_database_cloud_vm_cluster_test.go | 2 +- .../data_source_oracle_database_cloud_vm_clusters_test.go | 2 +- .../data_source_oracle_database_db_nodes_test.go | 2 +- .../data_source_oracle_database_db_servers_test.go | 2 +- 11 files changed, 15 insertions(+), 15 deletions(-) diff --git a/mmv1/products/oracledatabase/AutonomousDatabase.yaml b/mmv1/products/oracledatabase/AutonomousDatabase.yaml index e3b225c4149a..d22aacbbe4bc 100644 --- a/mmv1/products/oracledatabase/AutonomousDatabase.yaml +++ b/mmv1/products/oracledatabase/AutonomousDatabase.yaml @@ -56,7 +56,7 @@ examples: - 'deletion_protection' test_vars_overrides: deletion_protection: 'false' - project: '"oci-terraform-testing"' + project: '"oci-terraform-testing-prod"' database_name: 'fmt.Sprintf("tftestdatabase%s", acctest.RandString(t, 10))' - name: 'oracledatabase_autonomous_database_full' primary_resource_id: 'myADB' @@ -69,7 +69,7 @@ examples: ignore_read_extra: - 'deletion_protection' test_vars_overrides: - project: '"oci-terraform-testing"' + project: '"oci-terraform-testing-prod"' deletion_protection: 'false' database_name: 'fmt.Sprintf("tftestdatabase%s", acctest.RandString(t, 10))' endpoint_name: 'fmt.Sprintf("tftestendpoint%s", acctest.RandString(t, 10))' diff --git a/mmv1/products/oracledatabase/CloudExadataInfrastructure.yaml b/mmv1/products/oracledatabase/CloudExadataInfrastructure.yaml index ac66ce05f378..2976447ffaf5 100644 --- a/mmv1/products/oracledatabase/CloudExadataInfrastructure.yaml +++ b/mmv1/products/oracledatabase/CloudExadataInfrastructure.yaml @@ -54,7 +54,7 @@ examples: ignore_read_extra: - 'deletion_protection' test_vars_overrides: - 'project': '"oci-terraform-testing"' + 'project': '"oci-terraform-testing-prod"' 'deletion_protection': 'false' # ofake- prefix is needed to create a dummy resource for testing purposes only # See: https://github.com/hashicorp/terraform-provider-google/issues/19983#issuecomment-2516403770 @@ -70,7 +70,7 @@ examples: ignore_read_extra: - 'deletion_protection' test_vars_overrides: - 'project': '"oci-terraform-testing"' + 'project': '"oci-terraform-testing-prod"' 'deletion_protection': 'false' # ofake- prefix is needed to create a dummy resource for testing purposes only # See: https://github.com/hashicorp/terraform-provider-google/issues/19983#issuecomment-2516403770 diff --git a/mmv1/products/oracledatabase/CloudVmCluster.yaml b/mmv1/products/oracledatabase/CloudVmCluster.yaml index 34d8b6a7bb14..da816972400a 100644 --- a/mmv1/products/oracledatabase/CloudVmCluster.yaml +++ b/mmv1/products/oracledatabase/CloudVmCluster.yaml @@ -58,7 +58,7 @@ examples: - 'deletion_protection' test_vars_overrides: 'deletion_protection': 'false' - 'project': '"oci-terraform-testing"' + 'project': '"oci-terraform-testing-prod"' # ofake- prefix is needed to create a dummy resource for testing purposes only # See: https://github.com/hashicorp/terraform-provider-google/issues/19983#issuecomment-2516403770 # As a result these resources are not sweepable @@ -76,7 +76,7 @@ examples: - 'deletion_protection' test_vars_overrides: 'deletion_protection': 'false' - 'project': '"oci-terraform-testing"' + 'project': '"oci-terraform-testing-prod"' # ofake- prefix is needed to create a dummy resource for testing purposes only # See: https://github.com/hashicorp/terraform-provider-google/issues/19983#issuecomment-2516403770 # As a result these resources are not sweepable diff --git a/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_autonomous_database_test.go b/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_autonomous_database_test.go index fce8fe50cf32..7cf17999d4e4 100644 --- a/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_autonomous_database_test.go +++ b/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_autonomous_database_test.go @@ -34,7 +34,7 @@ func testAccOracleDatabaseAutonomousDatabase_basic() string { data "google_oracle_database_autonomous_database" "my-adb"{ autonomous_database_id = "do-not-delete-tf-adb" location = "us-east4" - project = "oci-terraform-testing" + project = "oci-terraform-testing-prod" } `) } diff --git a/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_autonomous_databases_test.go b/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_autonomous_databases_test.go index 5048247fe538..2ee722c0252c 100644 --- a/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_autonomous_databases_test.go +++ b/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_autonomous_databases_test.go @@ -35,7 +35,7 @@ func testAccOracleDatabaseAutonomousDatabases_basic() string { return fmt.Sprintf(` data "google_oracle_database_autonomous_databases" "my-adbs"{ location = "us-east4" - project = "oci-terraform-testing" + project = "oci-terraform-testing-prod" } `) } diff --git a/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_cloud_exadata_infrastructure_test.go b/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_cloud_exadata_infrastructure_test.go index af95923d4a8d..c2dcb5dc6e17 100644 --- a/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_cloud_exadata_infrastructure_test.go +++ b/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_cloud_exadata_infrastructure_test.go @@ -21,7 +21,7 @@ func TestAccOracleDatabaseCloudExadataInfrastructure_basic(t *testing.T) { resource.TestCheckResourceAttrSet("data.google_oracle_database_cloud_exadata_infrastructure.my-exadata", "gcp_oracle_zone"), resource.TestCheckResourceAttrSet("data.google_oracle_database_cloud_exadata_infrastructure.my-exadata", "properties.#"), resource.TestCheckResourceAttrSet("data.google_oracle_database_cloud_exadata_infrastructure.my-exadata", "properties.0.compute_count"), - resource.TestCheckResourceAttr("data.google_oracle_database_cloud_exadata_infrastructure.my-exadata", "display_name", "ofake-do-not-delete-tf-exadata display name"), + resource.TestCheckResourceAttr("data.google_oracle_database_cloud_exadata_infrastructure.my-exadata", "display_name", "ofake-do-not-delete-tf-exadata"), resource.TestCheckResourceAttr("data.google_oracle_database_cloud_exadata_infrastructure.my-exadata", "gcp_oracle_zone", "us-east4-b-r1"), resource.TestCheckResourceAttr("data.google_oracle_database_cloud_exadata_infrastructure.my-exadata", "properties.0.state", "AVAILABLE"), resource.TestCheckResourceAttr("data.google_oracle_database_cloud_exadata_infrastructure.my-exadata", "properties.0.shape", "Exadata.X9M"), @@ -35,7 +35,7 @@ func testAccOracleDatabaseCloudExadataInfrastructure_basic() string { return fmt.Sprintf(` data "google_oracle_database_cloud_exadata_infrastructure" "my-exadata"{ cloud_exadata_infrastructure_id = "ofake-do-not-delete-tf-exadata" - project = "oci-terraform-testing" + project = "oci-terraform-testing-prod" location = "us-east4" } `) diff --git a/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_cloud_exadata_infrastructures_test.go b/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_cloud_exadata_infrastructures_test.go index 46c1b272d08d..5a9752323426 100644 --- a/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_cloud_exadata_infrastructures_test.go +++ b/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_cloud_exadata_infrastructures_test.go @@ -36,7 +36,7 @@ func testAccOracleDatabaseCloudExadataInfrastructures_basic() string { return fmt.Sprintf(` data "google_oracle_database_cloud_exadata_infrastructures" "my_cloud_exadatas"{ location = "us-east4" - project = "oci-terraform-testing" + project = "oci-terraform-testing-prod" } `) } diff --git a/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_cloud_vm_cluster_test.go b/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_cloud_vm_cluster_test.go index 4d5f55afba42..5c1a37bd38ad 100644 --- a/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_cloud_vm_cluster_test.go +++ b/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_cloud_vm_cluster_test.go @@ -37,7 +37,7 @@ func testAccOracleDatabaseCloudVmCluster_basic() string { return fmt.Sprintf(` data "google_oracle_database_cloud_vm_cluster" "my-vmcluster"{ cloud_vm_cluster_id = "ofake-do-not-delete-tf-vmcluster" - project = "oci-terraform-testing" + project = "oci-terraform-testing-prod" location = "us-east4" } `) diff --git a/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_cloud_vm_clusters_test.go b/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_cloud_vm_clusters_test.go index 4a38d480cbbf..db9df1244d32 100644 --- a/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_cloud_vm_clusters_test.go +++ b/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_cloud_vm_clusters_test.go @@ -30,7 +30,7 @@ func testAccOracleDatabaseCloudVmClusters_basic() string { return fmt.Sprintf(` data "google_oracle_database_cloud_vm_clusters" "my_vmclusters"{ location = "us-east4" - project = "oci-terraform-testing" + project = "oci-terraform-testing-prod" } `) } diff --git a/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_db_nodes_test.go b/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_db_nodes_test.go index fce3340670aa..7cb74bf8ffb6 100644 --- a/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_db_nodes_test.go +++ b/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_db_nodes_test.go @@ -34,7 +34,7 @@ func testAccOracleDatabaseDbNodesConfig() string { return fmt.Sprintf(` data "google_oracle_database_db_nodes" "my_db_nodes"{ location = "us-east4" - project = "oci-terraform-testing" + project = "oci-terraform-testing-prod" cloud_vm_cluster = "ofake-do-not-delete-tf-vmcluster" } `) diff --git a/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_db_servers_test.go b/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_db_servers_test.go index fc91c97ef3b1..e81972c4c9b2 100644 --- a/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_db_servers_test.go +++ b/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_db_servers_test.go @@ -33,7 +33,7 @@ func TestAccOracleDatabaseDbServers_basic(t *testing.T) { const testAccOracleDatabaseDbServers_basic = ` data "google_oracle_database_db_servers" "my_db_servers"{ location = "us-east4" - project = "oci-terraform-testing" + project = "oci-terraform-testing-prod" cloud_exadata_infrastructure = "ofake-do-not-delete-tf-exadata" } ` From fd2735649faf3835e4c13dd98cc9423581a15c8b Mon Sep 17 00:00:00 2001 From: Scott Suarez Date: Mon, 19 May 2025 10:41:21 -0700 Subject: [PATCH 174/884] expand tgc metadata coverage to 100% of testcases (#13955) --- .../terraform/acctest/tgc_utils.go | 281 +++++- .../terraform/acctest/vcr_utils.go | 2 + .../resource_compute_instance_test.go.tmpl | 840 ++++++------------ .../resource_google_project_test.go | 31 +- 4 files changed, 525 insertions(+), 629 deletions(-) diff --git a/mmv1/third_party/terraform/acctest/tgc_utils.go b/mmv1/third_party/terraform/acctest/tgc_utils.go index 033a8f84432e..4b8f4adcf1b5 100644 --- a/mmv1/third_party/terraform/acctest/tgc_utils.go +++ b/mmv1/third_party/terraform/acctest/tgc_utils.go @@ -1,15 +1,38 @@ package acctest import ( + "encoding/base64" + "encoding/json" "fmt" "log" "regexp" "strings" + "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/terraform" ) +type ResourceMetadata struct { + CaiAssetName string `json:"cai_asset_name"` + ResourceType string `json:"resource_type"` + ResourceAddress string `json:"resource_address"` + ImportMetadata ImportMetadata `json:"import_metadata,omitempty"` + Service string `json:"service"` +} + +type ImportMetadata struct { + Id string `json:"id,omitempty"` + IgnoredFields []string `json:"ignored_fields,omitempty"` +} + +type TgcMetadataPayload struct { + TestName string `json:"test_name"` + RawConfig string `json:"raw_config"` + ResourceMetadata map[string]ResourceMetadata `json:"resource_metadata"` + PrimaryResource string `json:"primary_resource"` +} + // Hardcode the Terraform resource name -> API service name mapping temporarily. // TODO: [tgc] read the mapping from the resource metadata files. var ApiServiceNames = map[string]string{ @@ -17,48 +40,238 @@ var ApiServiceNames = map[string]string{ "google_project": "cloudresourcemanager.googleapis.com", } -// Gets the test metadata for tgc: -// - test config -// - cai asset name -// For example: //compute.googleapis.com/projects/ci-test-188019/zones/us-central1-a/instances/tf-test-mi3fqaucf8 -func GetTestMetadataForTgc(service, address, rawConfig string) resource.TestCheckFunc { +// encodeToBase64JSON converts a struct to base64-encoded JSON +func encodeToBase64JSON(data interface{}) (string, error) { + jsonData, err := json.Marshal(data) + if err != nil { + return "", fmt.Errorf("error marshalling data to JSON: %v", err) + } + + return base64.StdEncoding.EncodeToString(jsonData), nil +} + +// CollectAllTgcMetadata collects metadata for all resources in a test step +func CollectAllTgcMetadata(tgcPayload TgcMetadataPayload) resource.TestCheckFunc { return func(s *terraform.State) error { - splits := strings.Split(address, ".") - if splits == nil || len(splits) < 2 { - return fmt.Errorf("The resource address %s is invalid.", address) - } - resourceType := splits[0] - resourceName := splits[1] + // Process each resource to get CAI asset names and resolve auto IDs + for address, metadata := range tgcPayload.ResourceMetadata { + // If there is import metadata update our primary resource + if metadata.ImportMetadata.Id != "" { + tgcPayload.PrimaryResource = address + } + + rState := s.RootModule().Resources[address] + if rState == nil || rState.Primary == nil { + log.Printf("[DEBUG]TGC Terraform error: resource state unavailable for %s, skipping", address) + continue + } + + // Resolve the CAI asset name + if apiServiceName, ok := ApiServiceNames[metadata.ResourceType]; ok { + var rName string + switch metadata.ResourceType { + case "google_project": + rName = fmt.Sprintf("projects/%s", rState.Primary.Attributes["number"]) + default: + rName = rState.Primary.ID + } + metadata.CaiAssetName = fmt.Sprintf("//%s/%s", apiServiceName, rName) + } else { + metadata.CaiAssetName = "unknown" + } + + // Resolve auto IDs in import metadata + if metadata.ImportMetadata.Id != "" { + metadata.ImportMetadata.Id = strings.Replace(metadata.ImportMetadata.Id, "", rState.Primary.ID, 1) + } - rState := s.RootModule().Resources[address] - if rState == nil || rState.Primary == nil { - return fmt.Errorf("The resource state is unavailable. Please check if the address %s.%s is correct.", resourceType, resourceName) + // Update the metadata in the map + tgcPayload.ResourceMetadata[address] = metadata } - // Convert the resource ID into CAI asset name - // and then print out the CAI asset name in the logs - if apiServiceName, ok := ApiServiceNames[resourceType]; !ok { - return fmt.Errorf("The Cai product backend name for resource %s doesn't exist.", resourceType) + // Encode the entire payload to base64 JSON + encodedData, err := encodeToBase64JSON(tgcPayload) + if err != nil { + log.Printf("[DEBUG]TGC Terraform error: %v", err) } else { - var rName string - switch resourceType { - case "google_project": - rName = fmt.Sprintf("projects/%s", rState.Primary.Attributes["number"]) - default: - rName = rState.Primary.ID + log.Printf("[DEBUG]TGC Terraform metadata: %s", encodedData) + } + + return nil + } +} + +// parseResources extracts all resources from a Terraform configuration string +func parseResources(config string) []string { + // This regex matches resource blocks in Terraform configurations + resourceRegex := regexp.MustCompile(`resource\s+"([^"]+)"\s+"([^"]+)"`) + matches := resourceRegex.FindAllStringSubmatch(config, -1) + + var resources []string + for _, match := range matches { + if len(match) >= 3 { + // Combine resource type and name to form the address + resources = append(resources, fmt.Sprintf("%s.%s", match[1], match[2])) + } + } + + return resources +} + +// getServicePackage determines the service package for a resource type +func getServicePackage(resourceType string) string { + var ServicePackages = map[string]string{ + "google_compute_": "compute", + "google_storage_": "storage", + "google_sql_": "sql", + "google_container_": "container", + "google_bigquery_": "bigquery", + "google_project": "resourcemanager", + "google_cloud_run_": "cloudrun", + } + + // Check for exact matches first + if service, ok := ServicePackages[resourceType]; ok { + return service + } + + // Check for prefix matches + for prefix, service := range ServicePackages { + if strings.HasPrefix(resourceType, prefix) { + return service + } + } + + // Default to "unknown" if no match found + return "unknown" +} + +// determineImportMetadata checks if the next step is an import step and extracts all import metadata +func determineImportMetadata(steps []resource.TestStep, currentStepIndex int, resourceName string) ImportMetadata { + var metadata ImportMetadata + + // Check if there's a next step and if it's an import step + if currentStepIndex+1 < len(steps) { + nextStep := steps[currentStepIndex+1] + + // Check if it's an import step for our resource + if nextStep.ImportState && (nextStep.ResourceName == resourceName || + strings.HasSuffix(nextStep.ResourceName, "."+strings.Split(resourceName, ".")[1])) { + // Capture ignored fields if present + if nextStep.ImportStateVerify && len(nextStep.ImportStateVerifyIgnore) > 0 { + metadata.IgnoredFields = nextStep.ImportStateVerifyIgnore + } + + // If ImportStateId is explicitly set, use that + if nextStep.ImportStateId != "" { + metadata.Id = nextStep.ImportStateId + return metadata + } + + // If ImportStateIdPrefix is set, note it + if nextStep.ImportStateIdPrefix != "" { + metadata.Id = fmt.Sprintf("%s", nextStep.ImportStateIdPrefix) + return metadata } - caiAssetName := fmt.Sprintf("//%s/%s", apiServiceName, rName) - log.Printf("[DEBUG]TGC CAI asset names start\n%s\nEnd of TGC CAI asset names", caiAssetName) + + // If ImportStateIdFunc is set, get function info + if nextStep.ImportStateIdFunc != nil { + metadata.Id = "" + return metadata + } + + // Default case - the ID will be automatically determined + metadata.Id = "" + return metadata } + } - // The acceptance tests names will be also used for the tgc tests. - // "service" is logged and will be used to put the tgc tests into specific service packages. - log.Printf("[DEBUG]TGC Terraform service: %s", service) - log.Printf("[DEBUG]TGC Terraform resource: %s", address) + return metadata +} - re := regexp.MustCompile(`\"(tf[-_]?test[-_]?.*?)([a-z0-9]+)\"`) - rawConfig = re.ReplaceAllString(rawConfig, `"${1}tgc"`) - log.Printf("[DEBUG]TGC raw_config starts %sEnd of TGC raw_config", rawConfig) - return nil +// extendWithTGCData adds TGC metadata check function to the last non-plan config entry +func extendWithTGCData(t *testing.T, c resource.TestCase) resource.TestCase { + var updatedSteps []resource.TestStep + + // Find the last non-plan config step + lastNonPlanConfigStep := -1 + for i := len(c.Steps) - 1; i >= 0; i-- { + step := c.Steps[i] + if step.Config != "" && !step.PlanOnly { + lastNonPlanConfigStep = i + break + } + } + + // Process all steps + for i, step := range c.Steps { + // If this is the last non-plan config step, add our TGC check + if i == lastNonPlanConfigStep { + // Parse resources from the config + resources := parseResources(step.Config) + + // Skip if no resources found + if len(resources) == 0 { + updatedSteps = append(updatedSteps, step) + continue + } + + // Determine the service package from the first resource + firstResource := resources[0] + parts := strings.Split(firstResource, ".") + if len(parts) < 2 { + updatedSteps = append(updatedSteps, step) + continue + } + + // Collect metadata for all resources + resourceMetadata := make(map[string]ResourceMetadata) + + // Create the consolidated TGC payload + tgcPayload := TgcMetadataPayload{ + TestName: t.Name(), + RawConfig: step.Config, + ResourceMetadata: resourceMetadata, + } + + for _, res := range resources { + parts := strings.Split(res, ".") + if len(parts) >= 2 { + resourceType := parts[0] + + // Determine import metadata if the next step is an import step + importMeta := determineImportMetadata(c.Steps, i, res) + + // Create metadata for this resource + resourceMetadata[res] = ResourceMetadata{ + ResourceType: resourceType, + ResourceAddress: res, + ImportMetadata: importMeta, + Service: getServicePackage(resourceType), + // CaiAssetName will be populated at runtime in the check function + } + } + } + + // Add a single consolidated TGC check for all resources + tgcCheck := CollectAllTgcMetadata(tgcPayload) + + // If there's an existing check function, wrap it with our consolidated check + if step.Check != nil { + existingCheck := step.Check + step.Check = resource.ComposeTestCheckFunc( + existingCheck, + tgcCheck, + ) + } else { + // Otherwise, just use our consolidated check + step.Check = tgcCheck + } + } + + updatedSteps = append(updatedSteps, step) } + + c.Steps = updatedSteps + return c } diff --git a/mmv1/third_party/terraform/acctest/vcr_utils.go b/mmv1/third_party/terraform/acctest/vcr_utils.go index 5e55ca88cb2d..d670c637fe2c 100644 --- a/mmv1/third_party/terraform/acctest/vcr_utils.go +++ b/mmv1/third_party/terraform/acctest/vcr_utils.go @@ -148,6 +148,8 @@ func VcrTest(t *testing.T, c resource.TestCase) { c = initializeReleaseDiffTest(c, t.Name()) } + c = extendWithTGCData(t, c) + // terraform_labels is a computed field to which "goog-terraform-provisioned": "true" is always // added by the provider. ImportStateVerify "checks for strict equality and does not respect // DiffSuppressFunc or CustomizeDiff" so any test using ImportStateVerify must ignore diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl index a4ab90e4fa85..e74e165215cd 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl @@ -193,7 +193,7 @@ func computeInstanceImportStep(zone, instanceName string, additionalImportIgnore // metadata is only read into state if set in the config // importing doesn't know whether metadata.startup_script vs metadata_startup_script is set in the config, // it always takes metadata.startup-script - ignores := []string{"metadata.%", "metadata.startup-script", "metadata_startup_script", "boot_disk.0.initialize_params.0.resource_manager_tags.%", "params.0.resource_manager_tags.%"} + ignores := []string{"metadata.%", "metadata.startup-script", "metadata_startup_script", "boot_disk.0.initialize_params.0.resource_manager_tags.%", "params.0.resource_manager_tags.%"} return resource.TestStep{ ResourceName: "google_compute_instance.foobar", @@ -211,9 +211,9 @@ func TestAccComputeInstance_basic1(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic(instanceName), @@ -233,9 +233,6 @@ func TestAccComputeInstance_basic1(t *testing.T) { // instance resource without an explicit deletion_protection = true declaration. // Other tests check explicit true/false configs: TestAccComputeInstance_deletionProtectionExplicit[True | False] testAccCheckComputeInstanceHasConfiguredDeletionProtection(&instance, false), - - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_basic(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{"metadata.baz", "metadata.foo", "desired_status", "current_status", "labels", "terraform_labels"}), @@ -250,9 +247,9 @@ func TestAccComputeInstance_basic2(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic2(instanceName), @@ -262,8 +259,6 @@ func TestAccComputeInstance_basic2(t *testing.T) { testAccCheckComputeInstanceTag(&instance, "foo"), testAccCheckComputeInstanceMetadata(&instance, "foo", "bar"), testAccCheckComputeInstanceDisk(&instance, instanceName, true, true), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_basic2(instanceName)), ), }, }, @@ -277,9 +272,9 @@ func TestAccComputeInstance_basic3(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic3(instanceName), @@ -289,8 +284,6 @@ func TestAccComputeInstance_basic3(t *testing.T) { testAccCheckComputeInstanceTag(&instance, "foo"), testAccCheckComputeInstanceMetadata(&instance, "foo", "bar"), testAccCheckComputeInstanceDisk(&instance, instanceName, true, true), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_basic3(instanceName)), ), }, }, @@ -304,9 +297,9 @@ func TestAccComputeInstance_basic4(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic4(instanceName), @@ -316,8 +309,6 @@ func TestAccComputeInstance_basic4(t *testing.T) { testAccCheckComputeInstanceTag(&instance, "foo"), testAccCheckComputeInstanceMetadata(&instance, "foo", "bar"), testAccCheckComputeInstanceDisk(&instance, instanceName, true, true), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_basic4(instanceName)), ), }, }, @@ -331,9 +322,9 @@ func TestAccComputeInstance_basic5(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic5(instanceName), @@ -343,8 +334,6 @@ func TestAccComputeInstance_basic5(t *testing.T) { testAccCheckComputeInstanceTag(&instance, "foo"), testAccCheckComputeInstanceMetadata(&instance, "foo", "bar"), testAccCheckComputeInstanceDisk(&instance, instanceName, true, true), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_basic5(instanceName)), ), }, }, @@ -356,16 +345,16 @@ func TestAccComputeInstance_resourceManagerTags(t *testing.T) { var instance compute.Instance var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - context := map[string]interface{}{ + context := map[string]interface{}{ "project": envvar.GetTestProjectFromEnv(), "random_suffix": acctest.RandString(t, 10), "instance_name": instanceName, } acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_resourceManagerTags(context), @@ -378,8 +367,6 @@ func TestAccComputeInstance_resourceManagerTags(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_resourceManagerTagsUpdate(context)), ), }, }, @@ -412,8 +399,6 @@ func TestAccComputeInstance_diskResourcePolicies(t *testing.T) { Config: testAccComputeInstance_diskResourcePoliciesOnePolicyUpdate(context), Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_diskResourcePoliciesOnePolicyUpdate(context)), ), }, { @@ -470,8 +455,6 @@ func TestAccComputeInstance_diskResourcePolicies_attachmentDiff(t *testing.T) { Config: testAccComputeInstance_diskResourcePoliciesOnePolicy(context_1), Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_diskResourcePoliciesOnePolicy(context_1)), ), }, { @@ -501,8 +484,6 @@ func TestAccComputeInstance_machineTypeUrl(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), resource.TestCheckResourceAttr("google_compute_instance.foobar", "description", "old_desc"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_machineType(instanceName, machineTypeUrl)), ), }, }, @@ -516,9 +497,9 @@ func TestAccComputeInstance_descriptionUpdate(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_description(instanceName), @@ -534,8 +515,6 @@ func TestAccComputeInstance_descriptionUpdate(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), resource.TestCheckResourceAttr("google_compute_instance.foobar", "description", "new_desc"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_descriptionUpdate(instanceName)), ), }, }, @@ -550,19 +529,16 @@ func TestAccComputeInstance_IP(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_ip(ipName, instanceName), Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), - testAccCheckComputeInstanceAccessConfigHasNatIP(&instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_ip(ipName, instanceName)), - ), + testAccCheckComputeInstanceAccessConfigHasNatIP(&instance)), }, }, }) @@ -577,9 +553,9 @@ func TestAccComputeInstance_IPv6(t *testing.T) { var ptrName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_ipv6(ipName, instanceName, ptrName), @@ -587,8 +563,6 @@ func TestAccComputeInstance_IPv6(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceIpv6AccessConfigHasExternalIPv6(&instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_ipv6(ipName, instanceName, ptrName)), ), }, { @@ -616,8 +590,6 @@ func TestAccComputeInstance_ipv6ExternalReservation(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_ipv6ExternalReservation(instanceName)), ), }, computeInstanceImportStep("us-west2-a", instanceName, []string{}), @@ -626,56 +598,52 @@ func TestAccComputeInstance_ipv6ExternalReservation(t *testing.T) { } func TestAccComputeInstance_internalIPv6(t *testing.T) { - t.Parallel() - - var instance compute.Instance - var ipName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeInstance_internalIpv6(ipName, instanceName), - Check: resource.ComposeTestCheckFunc( - testAccCheckComputeInstanceExists( - t, "google_compute_instance.foobar", &instance), - testAccCheckComputeInstanceIpv6AccessConfigHasInternalIPv6(&instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_internalIpv6(ipName, instanceName)), - ), - }, - computeInstanceImportStep("us-west2-a", instanceName, []string{}), - }, - }) + t.Parallel() + + var instance compute.Instance + var ipName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeInstance_internalIpv6(ipName, instanceName), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeInstanceExists( + t, "google_compute_instance.foobar", &instance), + testAccCheckComputeInstanceIpv6AccessConfigHasInternalIPv6(&instance), + ), + }, + computeInstanceImportStep("us-west2-a", instanceName, []string{}), + }, + }) } func TestAccComputeInstance_internalIPv6PrefixLength(t *testing.T) { - t.Parallel() - - var instance compute.Instance - var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeInstance_internalIpv6PrefixLength("96", instanceName), - Check: resource.ComposeTestCheckFunc( - testAccCheckComputeInstanceExists( - t, "google_compute_instance.foobar", &instance), - testAccCheckComputeInstanceIpv6AccessConfigHasInternalIPv6(&instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_internalIpv6PrefixLength("96", instanceName)), - ), - }, - computeInstanceImportStep("us-west2-a", instanceName, []string{"allow_stopping_for_update"}), - }, - }) + t.Parallel() + + var instance compute.Instance + var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeInstance_internalIpv6PrefixLength("96", instanceName), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeInstanceExists( + t, "google_compute_instance.foobar", &instance), + testAccCheckComputeInstanceIpv6AccessConfigHasInternalIPv6(&instance), + ), + }, + computeInstanceImportStep("us-west2-a", instanceName, []string{"allow_stopping_for_update"}), + }, + }) } func TestAccComputeInstance_PTRRecord(t *testing.T) { @@ -687,9 +655,9 @@ func TestAccComputeInstance_PTRRecord(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_PTRRecord(ptrName, instanceName), @@ -706,8 +674,6 @@ func TestAccComputeInstance_PTRRecord(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceAccessConfigHasNatIP(&instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_ip(ipName, instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{"metadata.baz", "metadata.foo"}), @@ -720,9 +686,9 @@ func TestAccComputeInstance_networkTier(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_networkTier(instanceName), @@ -731,8 +697,6 @@ func TestAccComputeInstance_networkTier(t *testing.T) { t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceAccessConfigHasNatIP(&instance), testAccCheckComputeInstanceHasAssignedNatIP, - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_networkTier(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -763,9 +727,9 @@ func TestAccComputeInstance_diskEncryption(t *testing.T) { } acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_disks_encryption(bootEncryptionKey, diskNameToEncryptionKey, instanceName, acctest.RandString(t, 10)), @@ -773,8 +737,6 @@ func TestAccComputeInstance_diskEncryption(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceDiskEncryptionKey("google_compute_instance.foobar", &instance, bootEncryptionKeyHash, diskNameToEncryptionKey), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_disks_encryption(bootEncryptionKey, diskNameToEncryptionKey, instanceName, acctest.RandString(t, 10))), ), }, }, @@ -796,9 +758,9 @@ func TestAccComputeInstance_diskEncryptionRestart(t *testing.T) { } acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_disks_encryption_restart(bootEncryptionKey, diskNameToEncryptionKey, instanceName), @@ -814,8 +776,6 @@ func TestAccComputeInstance_diskEncryptionRestart(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceDiskEncryptionKey("google_compute_instance.foobar", &instance, bootEncryptionKeyHash, diskNameToEncryptionKey), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_disks_encryption_restartUpdate(bootEncryptionKey, diskNameToEncryptionKey, instanceName)), ), }, }, @@ -845,22 +805,20 @@ func TestAccComputeInstance_kmsDiskEncryption(t *testing.T) { acctest.BootstrapIamMembers(t, []acctest.IamMember{ { Member: "serviceAccount:service-{project_number}@compute-system.iam.gserviceaccount.com", - Role: "roles/cloudkms.cryptoKeyEncrypterDecrypter", + Role: "roles/cloudkms.cryptoKeyEncrypterDecrypter", }, }) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_disks_kms(bootKmsKeyName, diskNameToEncryptionKey, instanceName, acctest.RandString(t, 10)), Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceDiskKmsEncryptionKey("google_compute_instance.foobar", &instance, bootKmsKeyName, diskNameToEncryptionKey), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_disks_kms(bootKmsKeyName, diskNameToEncryptionKey, instanceName, acctest.RandString(t, 10))), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -886,8 +844,6 @@ func TestAccComputeInstance_rsaBootDiskEncryption(t *testing.T) { Config: testAccComputeInstance_rsaBootDiskEncryption(context), Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_rsaBootDiskEncryption(context)), ), }, }, @@ -950,8 +906,6 @@ func TestAccComputeInstance_instanceEncryption(t *testing.T) { Config: testAccComputeInstance_instanceEncryption_SelfLinkServiceAccount(context_3), Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_instanceEncryption_SelfLinkServiceAccount(context_3)), ), }, }, @@ -980,8 +934,6 @@ func TestAccComputeInstance_snapshot(t *testing.T) { Config: testAccComputeInstance_snapshot(context), //create from snapshot Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_snapshot(context)), ), }, }, @@ -1023,8 +975,6 @@ func TestAccComputeInstance_snapshotEncryption(t *testing.T) { Config: testAccComputeInstance_snapshotEncryption_RsaKey(context), Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_snapshotEncryption_RsaKey(context)), ), }, }, @@ -1066,8 +1016,6 @@ func TestAccComputeInstance_imageEncryption(t *testing.T) { Config: testAccComputeInstance_imageEncryption_RsaKey(context), Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_imageEncryption_RsaKey(context)), ), }, }, @@ -1093,8 +1041,6 @@ func TestAccComputeInstance_attachedDisk_RSAencryption(t *testing.T) { Config: testAccComputeInstance_attachedDisk_RSAencryption(context), Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_attachedDisk_RSAencryption(context)), ), }, }, @@ -1110,9 +1056,9 @@ func TestAccComputeInstance_resourcePolicyUpdate(t *testing.T) { var scheduleName2 = fmt.Sprintf("tf-tests-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_instanceSchedule(instanceName, scheduleName1), @@ -1147,8 +1093,6 @@ func TestAccComputeInstance_resourcePolicyUpdate(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeResourcePolicy(&instance, "", 0), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_removeResourcePolicy(instanceName, scheduleName1, scheduleName2)), ), }, }, @@ -1163,9 +1107,9 @@ func TestAccComputeInstance_attachedDisk(t *testing.T) { var diskName = fmt.Sprintf("tf-testd-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_attachedDisk(diskName, instanceName), @@ -1173,8 +1117,6 @@ func TestAccComputeInstance_attachedDisk(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceDisk(&instance, diskName, false, false), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_attachedDisk(diskName, instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -1190,9 +1132,9 @@ func TestAccComputeInstance_attachedDisk_sourceUrl(t *testing.T) { var diskName = fmt.Sprintf("tf-testd-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_attachedDisk_sourceUrl(diskName, instanceName), @@ -1200,8 +1142,6 @@ func TestAccComputeInstance_attachedDisk_sourceUrl(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceDisk(&instance, diskName, false, false), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_attachedDisk_sourceUrl(diskName, instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -1217,9 +1157,9 @@ func TestAccComputeInstance_attachedDisk_modeRo(t *testing.T) { var diskName = fmt.Sprintf("tf-testd-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_attachedDisk_modeRo(diskName, instanceName), @@ -1227,8 +1167,6 @@ func TestAccComputeInstance_attachedDisk_modeRo(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceDisk(&instance, diskName, false, false), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_attachedDisk_modeRo(diskName, instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -1291,9 +1229,9 @@ func TestAccComputeInstance_attachedDiskUpdate(t *testing.T) { var diskName2 = fmt.Sprintf("tf-testd-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_attachedDisk(diskName, instanceName), @@ -1329,8 +1267,6 @@ func TestAccComputeInstance_attachedDiskUpdate(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceDisk(&instance, diskName, false, false), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_updateAttachedDiskEncryptionKey(diskName, instanceName)), ), }, }, @@ -1345,9 +1281,9 @@ func TestAccComputeInstance_bootDisk_source(t *testing.T) { var diskName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_bootDisk_source(diskName, instanceName), @@ -1355,8 +1291,6 @@ func TestAccComputeInstance_bootDisk_source(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceBootDisk(&instance, diskName), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_bootDisk_source(diskName, instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -1372,9 +1306,9 @@ func TestAccComputeInstance_bootDisk_sourceUrl(t *testing.T) { var diskName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_bootDisk_sourceUrl(diskName, instanceName), @@ -1382,8 +1316,6 @@ func TestAccComputeInstance_bootDisk_sourceUrl(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceBootDisk(&instance, diskName), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_bootDisk_sourceUrl(diskName, instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -1399,9 +1331,9 @@ func TestAccComputeInstance_bootDisk_type(t *testing.T) { var diskType = "pd-ssd" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_bootDisk_type(instanceName, diskType), @@ -1409,8 +1341,6 @@ func TestAccComputeInstance_bootDisk_type(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceBootDiskType(t, instanceName, diskType), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_bootDisk_type(instanceName, diskType)), ), }, }, @@ -1424,16 +1354,12 @@ func TestAccComputeInstance_bootDisk_mode(t *testing.T) { var diskMode = "READ_WRITE" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_bootDisk_mode(instanceName, diskMode), - Check: resource.ComposeTestCheckFunc( - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_bootDisk_mode(instanceName, diskMode)), - ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), }, @@ -1493,9 +1419,9 @@ func TestAccComputeInstance_with375GbScratchDisk(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_with375GbScratchDisk(instanceName), @@ -1510,16 +1436,14 @@ func TestAccComputeInstance_with375GbScratchDisk(t *testing.T) { "interface": "SCSI", }, { - "interface": "NVME", + "interface": "NVME", "deviceName": "nvme-local-ssd", }, { - "interface": "SCSI", + "interface": "SCSI", "deviceName": "scsi-local-ssd", }, }), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_with375GbScratchDisk(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -1537,9 +1461,9 @@ func TestAccComputeInstance_with18TbScratchDisk(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_with18TbScratchDisk(instanceName), @@ -1566,8 +1490,6 @@ func TestAccComputeInstance_with18TbScratchDisk(t *testing.T) { "interface": "NVME", }, }), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_with18TbScratchDisk(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -1582,9 +1504,9 @@ func TestAccComputeInstance_forceNewAndChangeMetadata(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic(instanceName), @@ -1600,8 +1522,6 @@ func TestAccComputeInstance_forceNewAndChangeMetadata(t *testing.T) { t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceMetadata( &instance, "qux", "true"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_forceNewAndChangeMetadata(instanceName)), ), }, }, @@ -1615,9 +1535,9 @@ func TestAccComputeInstance_update(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic(instanceName), @@ -1636,8 +1556,6 @@ func TestAccComputeInstance_update(t *testing.T) { testAccCheckComputeInstanceLabel(&instance, "only_me", "nothing_else"), testAccCheckComputeInstanceTag(&instance, "baz"), testAccCheckComputeInstanceAccessConfig(&instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_update(instanceName)), ), }, }, @@ -1651,9 +1569,9 @@ func TestAccComputeInstance_stopInstanceToUpdate(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ // Set fields that require stopping the instance { @@ -1679,8 +1597,6 @@ func TestAccComputeInstance_stopInstanceToUpdate(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_stopInstanceToUpdate3(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), @@ -1695,9 +1611,9 @@ func TestAccComputeInstance_serviceAccount(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_serviceAccount(instanceName), @@ -1710,8 +1626,6 @@ func TestAccComputeInstance_serviceAccount(t *testing.T) { "https://www.googleapis.com/auth/devstorage.read_only"), testAccCheckComputeInstanceServiceAccount(&instance, "https://www.googleapis.com/auth/userinfo.email"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_serviceAccount(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -1736,8 +1650,6 @@ func TestAccComputeInstance_noServiceAccount(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceNoServiceAccount(&instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_noServiceAccount(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -1762,8 +1674,6 @@ func TestAccComputeInstance_serviceAccountEmail_0scopes(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceMatchServiceAccount(&instance, "\\d+-compute@developer.gserviceaccount.com"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_serviceAccountEmail_0scopes(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -1778,9 +1688,9 @@ func TestAccComputeInstance_serviceAccount_updated(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_serviceAccount_update0(instanceName), @@ -1819,8 +1729,6 @@ func TestAccComputeInstance_serviceAccount_updated(t *testing.T) { t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceMatchServiceAccount(&instance, "\\d+-compute@developer.gserviceaccount.com"), testAccCheckComputeInstanceScopes(&instance, 3), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_serviceAccount_update3(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), @@ -1835,9 +1743,9 @@ func TestAccComputeInstance_serviceAccount_updated0to1to0scopes(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_serviceAccount_update01(instanceName), @@ -1866,8 +1774,6 @@ func TestAccComputeInstance_serviceAccount_updated0to1to0scopes(t *testing.T) { t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceNoServiceAccount(&instance), testAccCheckComputeInstanceScopes(&instance, 0), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_serviceAccount_update01(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), @@ -1882,9 +1788,9 @@ func TestAccComputeInstance_scheduling(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_scheduling(instanceName), @@ -1899,8 +1805,6 @@ func TestAccComputeInstance_scheduling(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_schedulingUpdated(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -1956,8 +1860,6 @@ func TestAccComputeInstance_schedulingTerminationTime(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_TerminationTimeDeleted(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), @@ -1972,9 +1874,9 @@ func TestAccComputeInstance_advancedMachineFeatures(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_advancedMachineFeatures(instanceName), @@ -1989,8 +1891,6 @@ func TestAccComputeInstance_advancedMachineFeatures(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_advancedMachineFeaturesUpdated(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), @@ -2044,8 +1944,6 @@ func TestAccComputeInstance_performanceMonitoringUnit(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), resource.TestCheckResourceAttr("google_compute_instance.foobar", "advanced_machine_features.0.performance_monitoring_unit", "ARCHITECTURAL"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_performanceMonitoringUnit(context_3)), ), }, }, @@ -2057,7 +1955,7 @@ func TestAccComputeInstance_enableUefiNetworking(t *testing.T) { var instance compute.Instance context_1 := map[string]interface{}{ - "instance_name": fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)), + "instance_name": fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)), "enable_uefi_networking": "true", } @@ -2072,8 +1970,6 @@ func TestAccComputeInstance_enableUefiNetworking(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), resource.TestCheckResourceAttr("google_compute_instance.foobar", "advanced_machine_features.0.enable_uefi_networking", "true"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_enableUefiNetworking(context_1)), ), }, computeInstanceImportStep("us-central1-a", context_1["instance_name"].(string), []string{}), @@ -2089,9 +1985,9 @@ func TestAccComputeInstance_soleTenantNodeAffinities(t *testing.T) { var groupName = fmt.Sprintf("tf-test-nodegroup-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_withoutNodeAffinities(instanceName, templateName, groupName), @@ -2107,17 +2003,12 @@ func TestAccComputeInstance_soleTenantNodeAffinities(t *testing.T) { computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), { Config: testAccComputeInstance_soleTenantNodeAffinitiesReduced(instanceName, templateName, groupName), - Check: resource.ComposeTestCheckFunc( - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_soleTenantNodeAffinitiesReduced(instanceName, templateName, groupName)), - ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), }, }) } - func TestAccComputeInstance_reservationAffinities(t *testing.T) { t.Parallel() @@ -2125,9 +2016,9 @@ func TestAccComputeInstance_reservationAffinities(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-resaffinity-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_reservationAffinity_nonSpecificReservationConfig(instanceName, "NO_RESERVATION"), @@ -2150,8 +2041,6 @@ func TestAccComputeInstance_reservationAffinities(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasReservationAffinity(&instance, "SPECIFIC_RESERVATION", instanceName), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_reservationAffinity_specificReservationConfig(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -2230,8 +2119,6 @@ func TestAccComputeInstance_hostErrorTimeoutSecconds(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), resource.TestCheckResourceAttr("google_compute_instance.foobar", "scheduling.0.host_error_timeout_seconds", "90"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_hostErrorTimeoutSeconds(context_1)), ), }, computeInstanceImportStep(context_1["zone"].(string), context_1["instance_name"].(string), []string{}), @@ -2247,9 +2134,9 @@ func TestAccComputeInstance_subnet_auto(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_subnet_auto(acctest.RandString(t, 10), instanceName), @@ -2257,8 +2144,6 @@ func TestAccComputeInstance_subnet_auto(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasSubnet(&instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_subnet_auto(acctest.RandString(t, 10), instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -2273,9 +2158,9 @@ func TestAccComputeInstance_subnet_custom(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_subnet_custom(acctest.RandString(t, 10), instanceName), @@ -2283,8 +2168,6 @@ func TestAccComputeInstance_subnet_custom(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasSubnet(&instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_subnet_custom(acctest.RandString(t, 10), instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -2304,9 +2187,9 @@ func TestAccComputeInstance_subnet_xpn(t *testing.T) { projectName := fmt.Sprintf("tf-test-xpn-%d", time.Now().Unix()) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_subnet_xpn(org, billingId, projectName, instanceName, acctest.RandString(t, 10)), @@ -2315,8 +2198,6 @@ func TestAccComputeInstance_subnet_xpn(t *testing.T) { t, "google_compute_instance.foobar", fmt.Sprintf("%s-service", projectName), &instance), testAccCheckComputeInstanceHasSubnet(&instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_subnet_xpn(org, billingId, projectName, instanceName, acctest.RandString(t, 10))), ), }, }, @@ -2330,9 +2211,9 @@ func TestAccComputeInstance_networkIPAuto(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_networkIPAuto(acctest.RandString(t, 10), instanceName), @@ -2340,8 +2221,6 @@ func TestAccComputeInstance_networkIPAuto(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasAnyNetworkIP(&instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_networkIPAuto(acctest.RandString(t, 10), instanceName)), ), }, }, @@ -2355,9 +2234,9 @@ func TestAccComputeInstance_network_ip_custom(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) var ipAddress = "10.0.200.200" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_network_ip_custom(acctest.RandString(t, 10), instanceName, ipAddress), @@ -2365,8 +2244,6 @@ func TestAccComputeInstance_network_ip_custom(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasNetworkIP(&instance, ipAddress), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_network_ip_custom(acctest.RandString(t, 10), instanceName, ipAddress)), ), }, }, @@ -2382,17 +2259,15 @@ func TestAccComputeInstance_private_image_family(t *testing.T) { var familyName = fmt.Sprintf("tf-testf-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_private_image_family(diskName, familyName, instanceName), Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_private_image_family(diskName, familyName, instanceName)), ), }, }, @@ -2408,9 +2283,9 @@ func TestAccComputeInstance_networkPerformanceConfig(t *testing.T) { var imageName = fmt.Sprintf("tf-testf-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_networkPerformanceConfig(imageName, diskName, instanceName), @@ -2418,8 +2293,6 @@ func TestAccComputeInstance_networkPerformanceConfig(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasNetworkPerformanceConfig(&instance, "DEFAULT"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_networkPerformanceConfig(imageName, diskName, instanceName)), ), }, }, @@ -2433,17 +2306,15 @@ func TestAccComputeInstance_forceChangeMachineTypeManually(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic(instanceName), Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceUpdateMachineType(t, "google_compute_instance.foobar"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_basic(instanceName)), ), ExpectNonEmptyPlan: true, }, @@ -2461,17 +2332,15 @@ func TestAccComputeInstance_multiNic(t *testing.T) { subnetworkName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_multiNic(instanceName, networkName, subnetworkName), Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasMultiNic(&instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_multiNic(instanceName, networkName, subnetworkName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -2486,9 +2355,9 @@ func TestAccComputeInstance_nictype_update(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_nictype(instanceName, "GVNIC"), @@ -2502,8 +2371,6 @@ func TestAccComputeInstance_nictype_update(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_nictype(instanceName, "VIRTIO_NET")), ), }, }, @@ -2517,17 +2384,15 @@ func TestAccComputeInstance_guestAccelerator(t *testing.T) { instanceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_guestAccelerator(instanceName, 1), Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasGuestAccelerator(&instance, "nvidia-tesla-t4", 1), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_guestAccelerator(instanceName, 1)), ), }, computeInstanceImportStep("us-east1-d", instanceName, []string{"metadata.baz", "metadata.foo"}), @@ -2543,17 +2408,15 @@ func TestAccComputeInstance_guestAcceleratorSkip(t *testing.T) { instanceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_guestAccelerator(instanceName, 0), Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceLacksGuestAccelerator(&instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_guestAccelerator(instanceName, 0)), ), }, }, @@ -2568,9 +2431,9 @@ func TestAccComputeInstance_minCpuPlatform(t *testing.T) { instanceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_minCpuPlatform(instanceName), @@ -2585,8 +2448,6 @@ func TestAccComputeInstance_minCpuPlatform(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasMinCpuPlatform(&instance, ""), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_minCpuPlatform_remove(instanceName)), ), }, computeInstanceImportStep("us-east1-d", instanceName, []string{"allow_stopping_for_update"}), @@ -2601,9 +2462,9 @@ func TestAccComputeInstance_deletionProtectionExplicitFalse(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic_deletionProtectionFalse(instanceName), @@ -2611,8 +2472,6 @@ func TestAccComputeInstance_deletionProtectionExplicitFalse(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasConfiguredDeletionProtection(&instance, false), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_basic_deletionProtectionFalse(instanceName)), ), }, }, @@ -2626,9 +2485,9 @@ func TestAccComputeInstance_deletionProtectionExplicitTrueAndUpdateFalse(t *test var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic_deletionProtectionTrue(instanceName), @@ -2646,8 +2505,6 @@ func TestAccComputeInstance_deletionProtectionExplicitTrueAndUpdateFalse(t *test testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasConfiguredDeletionProtection(&instance, false), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_basic_deletionProtectionFalse(instanceName)), ), }, }, @@ -2661,17 +2518,15 @@ func TestAccComputeInstance_primaryAliasIpRange(t *testing.T) { instanceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_primaryAliasIpRange(instanceName), Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasAliasIpRange(&instance, "", "/24"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_primaryAliasIpRange(instanceName)), ), }, computeInstanceImportStep("us-east1-d", instanceName, []string{}), @@ -2688,9 +2543,9 @@ func TestAccComputeInstance_secondaryAliasIpRange(t *testing.T) { subnetName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_secondaryAliasIpRange(networkName, subnetName, instanceName), @@ -2705,8 +2560,6 @@ func TestAccComputeInstance_secondaryAliasIpRange(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasAliasIpRange(&instance, "", "10.0.1.0/24"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_secondaryAliasIpRangeUpdate(networkName, subnetName, instanceName)), ), }, computeInstanceImportStep("us-east1-d", instanceName, []string{"network_interface.0.alias_ip_range.0.ip_cidr_range", "network_interface.0.alias_ip_range.0.subnetwork_range_name", "network_interface.0.alias_ip_range.1.ip_cidr_range", "network_interface.0.alias_ip_range.1.subnetwork_range_name"}), @@ -2751,8 +2604,6 @@ func TestAccComputeInstance_aliasIpRangeCommonAddresses(t *testing.T) { testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasAliasIpRange(&instance, "inst-test-secondary", "172.16.1.0/24"), testAccCheckComputeInstanceHasAliasIpRange(&instance, "inst-test-tertiary", "10.1.3.0/24"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_secondaryAliasIpRangeUpdateWithCommonAddressDifferentRanges(networkName, subnetName, instanceName)), ), }, computeInstanceImportStep("us-east1-d", instanceName, []string{"network_interface.0.alias_ip_range.0.ip_cidr_range", "network_interface.0.alias_ip_range.0.subnetwork_range_name", "network_interface.0.alias_ip_range.1.ip_cidr_range", "network_interface.0.alias_ip_range.1.subnetwork_range_name"}), @@ -2767,17 +2618,15 @@ func TestAccComputeInstance_hostname(t *testing.T) { instanceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_hostname(instanceName), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttrSet("google_compute_instance.foobar", "hostname"), testAccCheckComputeInstanceLacksShieldedVmConfig(&instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_hostname(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -2792,9 +2641,9 @@ func TestAccComputeInstance_shieldedVmConfig(t *testing.T) { instanceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_shieldedVmConfig(instanceName, true, true, true), @@ -2809,8 +2658,6 @@ func TestAccComputeInstance_shieldedVmConfig(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasShieldedVmConfig(&instance, true, true, false), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_shieldedVmConfig(instanceName, true, true, false)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), @@ -2830,9 +2677,9 @@ func TestAccComputeInstanceConfidentialInstanceConfigMain(t *testing.T) { instanceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstanceConfidentialInstanceConfigEnable(instanceName, "SEV"), @@ -2863,8 +2710,6 @@ func TestAccComputeInstanceConfidentialInstanceConfigMain(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar5", &instance), testAccCheckComputeInstanceHasConfidentialInstanceConfig(&instance, false, "TDX"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar5", - testAccComputeInstanceConfidentialInstanceConfigEnableTdx(instanceName, "TDX")), ), }, }, @@ -2908,10 +2753,6 @@ func TestAccComputeInstance_confidentialHyperDiskBootDisk(t *testing.T) { computeInstanceImportStep(context_1["zone"].(string), context_1["instance_name"].(string), []string{"allow_stopping_for_update"}), { Config: testAccComputeInstanceConfidentialHyperDiskBootDisk(context_2), - Check: resource.ComposeTestCheckFunc( - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstanceConfidentialHyperDiskBootDisk(context_2)), - ), }, computeInstanceImportStep(context_2["zone"].(string), context_2["instance_name"].(string), []string{"allow_stopping_for_update"}), }, @@ -2922,9 +2763,9 @@ func TestAccComputeInstance_hyperdiskBootDisk_provisioned_iops_throughput(t *tes t.Parallel() context := map[string]interface{}{ - "instance_name": fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)), - "zone": "us-central1-a", - "provisioned_iops": 12000, + "instance_name": fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)), + "zone": "us-central1-a", + "provisioned_iops": 12000, "provisioned_throughput": 200, } @@ -2935,10 +2776,6 @@ func TestAccComputeInstance_hyperdiskBootDisk_provisioned_iops_throughput(t *tes Steps: []resource.TestStep{ { Config: testAccComputeInstanceHyperDiskBootDiskProvisionedIopsThroughput(context), - Check: resource.ComposeTestCheckFunc( - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstanceHyperDiskBootDiskProvisionedIopsThroughput(context)), - ), }, computeInstanceImportStep(context["zone"].(string), context["instance_name"].(string), []string{"allow_stopping_for_update"}), }, @@ -2951,9 +2788,9 @@ func TestAccComputeInstance_enableDisplay(t *testing.T) { instanceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_enableDisplay(instanceName), @@ -2965,10 +2802,6 @@ func TestAccComputeInstance_enableDisplay(t *testing.T) { computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), { Config: testAccComputeInstance_enableDisplay(instanceName), - Check: resource.ComposeTestCheckFunc( - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_enableDisplay(instanceName)), - ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), }, @@ -3009,8 +2842,6 @@ func TestAccComputeInstance_desiredStatusTerminatedOnCreation(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasStatus(&instance, context_2["desired_status"].(string)), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_desiredStatusOnCreation(context_2)), ), }, }, @@ -3051,8 +2882,6 @@ func TestAccComputeInstance_desiredStatusSuspendedOnCreation(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasStatus(&instance, context_2["desired_status"].(string)), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_desiredStatusOnCreation(context_2)), ), }, }, @@ -3066,9 +2895,9 @@ func TestAccComputeInstance_desiredStatusUpdateBasic(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic2(instanceName), @@ -3107,8 +2936,6 @@ func TestAccComputeInstance_desiredStatusUpdateBasic(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasStatus(&instance, "RUNNING"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_machineType_desiredStatus_allowStoppingForUpdate(instanceName, "e2-medium", "RUNNING", false)), ), }, }, @@ -3122,9 +2949,9 @@ func TestAccComputeInstance_desiredStatusTerminatedUpdateFields(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic2(instanceName), @@ -3151,8 +2978,6 @@ func TestAccComputeInstance_desiredStatusTerminatedUpdateFields(t *testing.T) { testAccCheckComputeInstanceLabel(&instance, "only_me", "nothing_else"), testAccCheckComputeInstanceTag(&instance, "baz"), testAccCheckComputeInstanceHasStatus(&instance, "TERMINATED"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_desiredStatusTerminatedUpdate(instanceName)), ), }, }, @@ -3166,9 +2991,9 @@ func TestAccComputeInstance_updateRunning_desiredStatusRunning_allowStoppingForU var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic2(instanceName), @@ -3185,8 +3010,6 @@ func TestAccComputeInstance_updateRunning_desiredStatusRunning_allowStoppingForU t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasMachineType(&instance, "e2-standard-2"), testAccCheckComputeInstanceHasStatus(&instance, "RUNNING"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_machineType_desiredStatus_allowStoppingForUpdate(instanceName, "e2-standard-2", "RUNNING", true)), ), }, }, @@ -3202,9 +3025,9 @@ func TestAccComputeInstance_updateRunning_desiredStatusNotSet_notAllowStoppingFo var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic2(instanceName), @@ -3212,8 +3035,6 @@ func TestAccComputeInstance_updateRunning_desiredStatusNotSet_notAllowStoppingFo testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasStatus(&instance, "RUNNING"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_basic2(instanceName)), ), }, { @@ -3231,9 +3052,9 @@ func TestAccComputeInstance_updateRunning_desiredStatusRunning_notAllowStoppingF var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic2(instanceName), @@ -3241,8 +3062,6 @@ func TestAccComputeInstance_updateRunning_desiredStatusRunning_notAllowStoppingF testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasStatus(&instance, "RUNNING"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_basic2(instanceName)), ), }, { @@ -3260,9 +3079,9 @@ func TestAccComputeInstance_updateRunning_desiredStatusTerminated_allowStoppingF var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic2(instanceName), @@ -3279,8 +3098,6 @@ func TestAccComputeInstance_updateRunning_desiredStatusTerminated_allowStoppingF t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasMachineType(&instance, "e2-standard-2"), testAccCheckComputeInstanceHasStatus(&instance, "TERMINATED"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_machineType_desiredStatus_allowStoppingForUpdate(instanceName, "e2-standard-2", "TERMINATED", true)), ), }, }, @@ -3294,9 +3111,9 @@ func TestAccComputeInstance_updateRunning_desiredStatusTerminated_notAllowStoppi var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic2(instanceName), @@ -3313,8 +3130,6 @@ func TestAccComputeInstance_updateRunning_desiredStatusTerminated_notAllowStoppi t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasMachineType(&instance, "e2-standard-2"), testAccCheckComputeInstanceHasStatus(&instance, "TERMINATED"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_machineType_desiredStatus_allowStoppingForUpdate(instanceName, "e2-standard-2", "TERMINATED", false)), ), }, }, @@ -3328,9 +3143,9 @@ func TestAccComputeInstance_updateTerminated_desiredStatusNotSet_allowStoppingFo var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic2(instanceName), @@ -3355,8 +3170,6 @@ func TestAccComputeInstance_updateTerminated_desiredStatusNotSet_allowStoppingFo t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasMachineType(&instance, "e2-standard-2"), testAccCheckComputeInstanceHasStatus(&instance, "TERMINATED"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_machineType_desiredStatus_allowStoppingForUpdate(instanceName, "e2-standard-2", "", true)), ), }, }, @@ -3370,9 +3183,9 @@ func TestAccComputeInstance_updateTerminated_desiredStatusTerminated_allowStoppi var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic2(instanceName), @@ -3397,8 +3210,6 @@ func TestAccComputeInstance_updateTerminated_desiredStatusTerminated_allowStoppi t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasMachineType(&instance, "e2-standard-2"), testAccCheckComputeInstanceHasStatus(&instance, "TERMINATED"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_machineType_desiredStatus_allowStoppingForUpdate(instanceName, "e2-standard-2", "TERMINATED", true)), ), }, }, @@ -3412,9 +3223,9 @@ func TestAccComputeInstance_updateTerminated_desiredStatusNotSet_notAllowStoppin var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic2(instanceName), @@ -3439,8 +3250,6 @@ func TestAccComputeInstance_updateTerminated_desiredStatusNotSet_notAllowStoppin t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasMachineType(&instance, "e2-standard-2"), testAccCheckComputeInstanceHasStatus(&instance, "TERMINATED"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_machineType_desiredStatus_allowStoppingForUpdate(instanceName, "e2-standard-2", "", false)), ), }, }, @@ -3454,9 +3263,9 @@ func TestAccComputeInstance_updateTerminated_desiredStatusTerminated_notAllowSto var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic2(instanceName), @@ -3494,9 +3303,9 @@ func TestAccComputeInstance_updateTerminated_desiredStatusRunning_allowStoppingF var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic2(instanceName), @@ -3521,8 +3330,6 @@ func TestAccComputeInstance_updateTerminated_desiredStatusRunning_allowStoppingF t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasMachineType(&instance, "e2-standard-2"), testAccCheckComputeInstanceHasStatus(&instance, "RUNNING"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_machineType_desiredStatus_allowStoppingForUpdate(instanceName, "e2-standard-2", "RUNNING", true)), ), }, }, @@ -3536,9 +3343,9 @@ func TestAccComputeInstance_updateTerminated_desiredStatusRunning_notAllowStoppi var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic2(instanceName), @@ -3563,8 +3370,6 @@ func TestAccComputeInstance_updateTerminated_desiredStatusRunning_notAllowStoppi t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasMachineType(&instance, "e2-standard-2"), testAccCheckComputeInstanceHasStatus(&instance, "RUNNING"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_machineType_desiredStatus_allowStoppingForUpdate(instanceName, "e2-standard-2", "RUNNING", false)), ), }, }, @@ -3611,8 +3416,6 @@ func TestAccComputeInstance_desiredStatus_suspended(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasStatus(&instance, "RUNNING"), // this mimics resume method behavior - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_desiredStatus_suspended(context_1)), ), }, }, @@ -3625,16 +3428,12 @@ func TestAccComputeInstance_resourcePolicyCollocate(t *testing.T) { instanceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_resourcePolicyCollocate(instanceName, acctest.RandString(t, 10)), - Check: resource.ComposeTestCheckFunc( - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_resourcePolicyCollocate(instanceName, acctest.RandString(t, 10))), - ), }, computeInstanceImportStep("us-east4-b", instanceName, []string{"allow_stopping_for_update"}), }, @@ -3648,9 +3447,9 @@ func TestAccComputeInstance_resourcePolicySpread(t *testing.T) { var instance compute.Instance acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_resourcePolicySpread(instanceName, acctest.RandString(t, 10)), @@ -3659,8 +3458,6 @@ func TestAccComputeInstance_resourcePolicySpread(t *testing.T) { t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasStatus(&instance, "RUNNING"), testAccCheckComputeInstanceHasAvailabilityDomain(&instance, 3), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_resourcePolicySpread(instanceName, acctest.RandString(t, 10))), ), }, computeInstanceImportStep("us-east4-b", instanceName, []string{"allow_stopping_for_update"}), @@ -3674,9 +3471,9 @@ func TestAccComputeInstance_subnetworkUpdate(t *testing.T) { suffix := fmt.Sprintf("%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_subnetworkUpdate(suffix, instanceName), @@ -3688,10 +3485,6 @@ func TestAccComputeInstance_subnetworkUpdate(t *testing.T) { computeInstanceImportStep("us-east1-d", instanceName, []string{"allow_stopping_for_update", "network_interface.0.alias_ip_range.0.ip_cidr_range", "network_interface.0.alias_ip_range.0.subnetwork_range_name", "network_interface.0.alias_ip_range.1.ip_cidr_range", "network_interface.0.alias_ip_range.1.subnetwork_range_name"}), { Config: testAccComputeInstance_subnetworkUpdate(suffix, instanceName), - Check: resource.ComposeTestCheckFunc( - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_subnetworkUpdate(suffix, instanceName)), - ), }, computeInstanceImportStep("us-east1-d", instanceName, []string{"allow_stopping_for_update", "network_interface.0.alias_ip_range.0.ip_cidr_range", "network_interface.0.alias_ip_range.0.subnetwork_range_name", "network_interface.0.alias_ip_range.1.ip_cidr_range", "network_interface.0.alias_ip_range.1.subnetwork_range_name"}), }, @@ -3703,12 +3496,12 @@ func TestAccComputeInstance_subnetworkProjectMustMatchError(t *testing.T) { instanceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) suffix := fmt.Sprintf("%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComputeInstance_subnetworkProjectExpectError(suffix, instanceName), + Config: testAccComputeInstance_subnetworkProjectExpectError(suffix, instanceName), ExpectError: regexp.MustCompile("must match subnetwork_project"), }, }, @@ -3748,8 +3541,6 @@ func TestAccComputeInstance_networkIpUpdate(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasNetworkIP(&instance, "10.3.0.5"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_networkIpUpdateWithComputeAddress(suffix, instanceName)), ), }, computeInstanceImportStep("us-east1-d", instanceName, []string{}), @@ -3762,40 +3553,33 @@ func TestAccComputeInstance_queueCount(t *testing.T) { instanceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_queueCountSet(instanceName), - Check: resource.ComposeTestCheckFunc( - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_queueCountSet(instanceName)), - ), }, computeInstanceImportStep("us-east1-d", instanceName, []string{"allow_stopping_for_update"}), }, }) } - func TestAccComputeInstance_spotVM(t *testing.T) { t.Parallel() var instance compute.Instance var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_spotVM(instanceName), Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_spotVM(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -3803,16 +3587,15 @@ func TestAccComputeInstance_spotVM(t *testing.T) { }) } - func TestAccComputeInstance_spotVM_update(t *testing.T) { t.Parallel() var instance compute.Instance var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_scheduling(instanceName), @@ -3827,8 +3610,6 @@ func TestAccComputeInstance_spotVM_update(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_spotVM(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -3847,9 +3628,9 @@ func TestAccComputeInstance_maxRunDuration_update(t *testing.T) { expectedMaxRunDuration.Seconds = 60 acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_standardVM_maxRunDuration(instanceName, "STOP"), @@ -3862,8 +3643,6 @@ func TestAccComputeInstance_maxRunDuration_update(t *testing.T) { t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceTerminationAction(&instance, "STOP"), testAccCheckComputeInstanceMaxRunDuration(&instance, expectedMaxRunDuration), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_standardVM_maxRunDurationUpdated(instanceName, "STOP")), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), @@ -3883,9 +3662,9 @@ func TestAccComputeInstance_standardVM_maxRunDuration_stopTerminationAction(t *t var instanceTerminationAction = "STOP" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_standardVM_maxRunDuration(instanceName, instanceTerminationAction), @@ -3894,8 +3673,6 @@ func TestAccComputeInstance_standardVM_maxRunDuration_stopTerminationAction(t *t t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceTerminationAction(&instance, instanceTerminationAction), testAccCheckComputeInstanceMaxRunDuration(&instance, expectedMaxRunDuration), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_standardVM_maxRunDuration(instanceName, instanceTerminationAction)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -3915,9 +3692,9 @@ func TestAccComputeInstance_localSsdVM_maxRunDuration_stopTerminationAction(t *t var instanceTerminationAction = "STOP" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_localSsdVM_maxRunDuration(instanceName, instanceTerminationAction), @@ -3926,8 +3703,6 @@ func TestAccComputeInstance_localSsdVM_maxRunDuration_stopTerminationAction(t *t t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceTerminationAction(&instance, instanceTerminationAction), testAccCheckComputeInstanceMaxRunDuration(&instance, expectedMaxRunDuration), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_localSsdVM_maxRunDuration(instanceName, instanceTerminationAction)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -3947,9 +3722,9 @@ func TestAccComputeInstance_spotVM_maxRunDuration_deleteTerminationAction(t *tes var instanceTerminationAction = "DELETE" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_spotVM_maxRunDuration(instanceName, instanceTerminationAction), @@ -3958,8 +3733,6 @@ func TestAccComputeInstance_spotVM_maxRunDuration_deleteTerminationAction(t *tes t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceTerminationAction(&instance, instanceTerminationAction), testAccCheckComputeInstanceMaxRunDuration(&instance, expectedMaxRunDuration), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_spotVM_maxRunDuration(instanceName, instanceTerminationAction)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -3979,9 +3752,9 @@ func TestAccComputeInstance_standardVM_maxRunDuration_deleteTerminationAction(t var instanceTerminationAction = "DELETE" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_standardVM_maxRunDuration(instanceName, instanceTerminationAction), @@ -3990,8 +3763,6 @@ func TestAccComputeInstance_standardVM_maxRunDuration_deleteTerminationAction(t t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceTerminationAction(&instance, instanceTerminationAction), testAccCheckComputeInstanceMaxRunDuration(&instance, expectedMaxRunDuration), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_standardVM_maxRunDuration(instanceName, instanceTerminationAction)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -4009,9 +3780,9 @@ func TestAccComputeInstance_spotVM_maxRunDuration_update(t *testing.T) { expectedMaxRunDuration.Nanos = 123 expectedMaxRunDuration.Seconds = 60 acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_scheduling(instanceName), @@ -4027,8 +3798,6 @@ func TestAccComputeInstance_spotVM_maxRunDuration_update(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceMaxRunDuration(&instance, expectedMaxRunDuration), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_spotVM_maxRunDuration(instanceName, "DELETE")), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -4047,9 +3816,9 @@ func TestAccComputeInstance_localSsdRecoveryTimeout(t *testing.T) { expectedLocalSsdRecoveryTimeout.Seconds = 3600 acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_localSsdRecoveryTimeout(instanceName), @@ -4057,8 +3826,6 @@ func TestAccComputeInstance_localSsdRecoveryTimeout(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceLocalSsdRecoveryTimeout(&instance, expectedLocalSsdRecoveryTimeout), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_localSsdRecoveryTimeout(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -4076,9 +3843,9 @@ func TestAccComputeInstance_localSsdRecoveryTimeout_update(t *testing.T) { expectedLocalSsdRecoveryTimeout.Nanos = 0 expectedLocalSsdRecoveryTimeout.Seconds = 3600 acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_scheduling(instanceName), @@ -4094,8 +3861,6 @@ func TestAccComputeInstance_localSsdRecoveryTimeout_update(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceLocalSsdRecoveryTimeout(&instance, expectedLocalSsdRecoveryTimeout), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_localSsdRecoveryTimeout(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -4125,8 +3890,6 @@ func TestAccComputeInstance_partnerMetadata(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstancePartnerMetadata(&instance, expectedPartnerMetadata), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_partnerMetadata(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{fmt.Sprintf("partner_metadata.%s", namespace)}), @@ -4163,8 +3926,6 @@ func TestAccComputeInstance_partnerMetadata_update(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstancePartnerMetadata(&instance, expectedPartnerMetadata), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_partnerMetadata(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{fmt.Sprintf("partner_metadata.%s", namespace)}), @@ -4201,8 +3962,6 @@ func TestAccComputeInstance_partnerMetadata_deletePartnerMetadata(t *testing.T) Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_partnerMetadata_empty(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{fmt.Sprintf("partner_metadata.%s", namespace)}), @@ -4218,9 +3977,9 @@ func TestAccComputeInstance_metadataStartupScript_update(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_metadataStartupScript(instanceName, "e2-medium", "abc"), @@ -4234,8 +3993,6 @@ func TestAccComputeInstance_metadataStartupScript_update(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_metadataStartupScript(instanceName, "e2-standard-4", "xyz")), ), }, }, @@ -4273,8 +4030,6 @@ func TestAccComputeInstance_metadataStartupScript_gracefulSwitch(t *testing.T) { &instance, "foo", "abc"), testAccCheckComputeInstanceMetadata( &instance, "startup-script", "echo hi > /test.txt"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_metadataStartupScript_gracefulSwitch(instanceName, "e2-medium", "abc")), ), }, }, @@ -4290,9 +4045,9 @@ func TestAccComputeInstance_regionBootDisk(t *testing.T) { var suffix = acctest.RandString(t, 10) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_regionBootDisk(instanceName, diskName, suffix), @@ -4300,10 +4055,7 @@ func TestAccComputeInstance_regionBootDisk(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.regional_vm_instance", &instance), testAccCheckComputeInstanceBootDisk(&instance, diskName), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.regional_vm_instance", - testAccComputeInstance_regionBootDisk(instanceName, diskName, suffix)), ), - }, }, }) @@ -4336,8 +4088,6 @@ func TestAccComputeInstance_creationOnlyAttributionLabel(t *testing.T) { t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceLabel(&instance, "user_label", "bar"), testAccCheckComputeInstanceAttributionLabel(&instance, true), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_attributionLabelUpdate(instanceName, "true", "CREATION_ONLY")), ), }, }, @@ -4373,8 +4123,6 @@ func TestAccComputeInstance_creationOnlyAttributionLabelConfiguredOnUpdate(t *te t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceLabel(&instance, "user_label", "bar"), testAccCheckComputeInstanceAttributionLabel(&instance, false), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_attributionLabelUpdate(instanceName, "true", "CREATION_ONLY")), ), }, }, @@ -4410,8 +4158,6 @@ func TestAccComputeInstance_proactiveAttributionLabel(t *testing.T) { t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceLabel(&instance, "user_label", "bar"), testAccCheckComputeInstanceAttributionLabel(&instance, true), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_attributionLabelUpdate(instanceName, "true", "PROACTIVE")), ), }, }, @@ -4455,8 +4201,6 @@ func TestAccComputeInstance_autoDeleteUpdate(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), resource.TestCheckResourceAttr("google_compute_instance.foobar", "boot_disk.0.auto_delete", "true"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_autoDeleteUpdate(context_1)), ), }, }, @@ -4515,8 +4259,6 @@ func TestAccComputeInstance_keyRevocationActionType(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), resource.TestCheckResourceAttr("google_compute_instance.foobar", "key_revocation_action_type", "STOP"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_keyRevocationActionType(context_2)), ), }, }, @@ -4622,8 +4364,6 @@ func TestAccComputeInstance_GracefulShutdownWithResetUpdate(t *testing.T) { testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), resource.TestCheckResourceAttr("google_compute_instance.foobar", "allow_stopping_for_update", "true"), resource.TestCheckResourceAttr("google_compute_instance.foobar", "scheduling.0.graceful_shutdown.0.max_duration.0.seconds", "100"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_GracefulShutdownUpdate(acceptableByApi_3)), ), }, { @@ -4727,8 +4467,6 @@ func TestAccComputeInstance_GracefulShutdownWithoutResetUpdate(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), resource.TestCheckResourceAttr("google_compute_instance.foobar", "scheduling.0.graceful_shutdown.0.max_duration.0.seconds", "100"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_GracefulShutdownUpdate(acceptableByApi_3)), ), }, { @@ -4901,7 +4639,7 @@ func testAccComputeInstance_nic_securityPolicyCreateWithTwoNicsAndTwoAccessConfi ImportStateVerify: true, }, { - Config: testAccComputeInstance_nic_securityPolicyCreateWithTwoNicsAndTwoAccessConfigsUpdateTwoPoliciesRemoveAccessConfig(suffix, policyName, policyName2, instanceName, "google_compute_region_security_policy.policyforinstance.self_link", "google_compute_region_security_policy.policyforinstance.self_link"), + Config: testAccComputeInstance_nic_securityPolicyCreateWithTwoNicsAndTwoAccessConfigsUpdateTwoPoliciesRemoveAccessConfig(suffix, policyName, policyName2, instanceName, "google_compute_region_security_policy.policyforinstance.self_link", "google_compute_region_security_policy.policyforinstance.self_link"), ExpectError: regexp.MustCompile(errorDeleteAccessConfigWithSecPolicy), }, { @@ -5007,7 +4745,7 @@ func testAccComputeInstance_nic_securityPolicyCreateWithAccessConfigUpdateAccess ImportStateVerify: true, }, { - Config: testAccComputeInstance_nic_securityPolicyCreateWithTwoAccessConfigsUpdateAccessConfig(suffix, policyName, instanceName), + Config: testAccComputeInstance_nic_securityPolicyCreateWithTwoAccessConfigsUpdateAccessConfig(suffix, policyName, instanceName), ExpectError: regexp.MustCompile(errorDeleteAccessConfigWithSecPolicy), }, { @@ -5121,7 +4859,7 @@ func testAccCheckComputeInstanceUpdateMachineType(t *testing.T, n string) resour func TestAccComputeInstance_NetworkAttachment(t *testing.T) { t.Parallel() suffix := fmt.Sprintf("%s", acctest.RandString(t, 10)) - envRegion := envvar.GetTestRegionFromEnv() + envRegion := envvar.GetTestRegionFromEnv() var instance compute.Instance {{ if eq $.TargetVersionName `ga` }} @@ -5138,7 +4876,7 @@ func TestAccComputeInstance_NetworkAttachment(t *testing.T) { context := map[string]interface{}{ "suffix": (acctest.RandString(t, 10)), "network_attachment_name": testNetworkAttachmentName, - "region": envRegion, + "region": envRegion, } acctest.VcrTest(t, resource.TestCase{ @@ -5152,8 +4890,6 @@ func TestAccComputeInstance_NetworkAttachment(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasNetworkAttachment(&instance, fmt.Sprintf("https://www.googleapis.com/compute/%s/%s", providerVersion, fullFormNetworkAttachmentName)), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_networkAttachment(context)), ), }, }, @@ -5184,10 +4920,6 @@ func TestAccComputeInstance_NetworkAttachmentUpdate(t *testing.T) { computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), { Config: testAccComputeInstance_networkAttachmentUpdate(networkAttachmentSelflink1, envRegion, suffix), - Check: resource.ComposeTestCheckFunc( - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_networkAttachmentUpdate(networkAttachmentSelflink1, envRegion, suffix)), - ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), }, @@ -5215,10 +4947,6 @@ func TestAccComputeInstance_NicStackTypeUpdate(t *testing.T) { computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), { Config: testAccComputeInstance_nicStackTypeUpdate(suffix, envRegion, "IPV4_ONLY", instanceName), - Check: resource.ComposeTestCheckFunc( - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_nicStackTypeUpdate(suffix, envRegion, "IPV4_ONLY", instanceName)), - ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), }, @@ -5240,10 +4968,6 @@ func TestAccComputeInstance_NicStackType_IPV6(t *testing.T) { Steps: []resource.TestStep{ { Config: testAccComputeInstance_nicStackTypeUpdate_ipv6(context), - Check: resource.ComposeTestCheckFunc( - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_nicStackTypeUpdate_ipv6(context)), - ), }, }, }) @@ -5270,15 +4994,12 @@ func TestAccComputeInstance_guestOsFeatures(t *testing.T) { resource.TestCheckResourceAttr("google_compute_instance.foobar", "boot_disk.0.guest_os_features.1", "VIRTIO_SCSI_MULTIQUEUE"), resource.TestCheckResourceAttr("google_compute_instance.foobar", "boot_disk.0.guest_os_features.2", "GVNIC"), resource.TestCheckResourceAttr("google_compute_instance.foobar", "boot_disk.0.guest_os_features.3", "IDPF"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_guestOsFeatures(context_1)), ), }, }, }) } - func testAccCheckComputeInstanceDestroyProducer(t *testing.T) func(s *terraform.State) error { return func(s *terraform.State) error { config := acctest.GoogleProviderConfig(t) @@ -5405,15 +5126,15 @@ func testAccCheckComputeInstanceIpv6AccessConfigHasExternalIPv6(instance *comput } func testAccCheckComputeInstanceIpv6AccessConfigHasInternalIPv6(instance *compute.Instance) resource.TestCheckFunc { - return func(s *terraform.State) error { - for _, i := range instance.NetworkInterfaces { - if i.Ipv6Address == "" { - return fmt.Errorf("no internal IPv6 address") - } - } + return func(s *terraform.State) error { + for _, i := range instance.NetworkInterfaces { + if i.Ipv6Address == "" { + return fmt.Errorf("no internal IPv6 address") + } + } - return nil - } + return nil + } } func testAccCheckComputeInstanceAccessConfigHasPTR(instance *compute.Instance) resource.TestCheckFunc { @@ -5455,7 +5176,7 @@ func testAccCheckComputeInstanceMaxRunDuration(instance *compute.Instance, insta } if !reflect.DeepEqual(*instance.Scheduling.MaxRunDuration, instanceMaxRunDurationWant) { - return fmt.Errorf("got the wrong instance max run duration action: have: %#v; want: %#v",instance.Scheduling.MaxRunDuration, instanceMaxRunDurationWant) + return fmt.Errorf("got the wrong instance max run duration action: have: %#v; want: %#v", instance.Scheduling.MaxRunDuration, instanceMaxRunDurationWant) } return nil @@ -5489,7 +5210,7 @@ func testAccCheckComputeInstanceLocalSsdRecoveryTimeout(instance *compute.Instan } if !reflect.DeepEqual(*instance.Scheduling.LocalSsdRecoveryTimeout, instanceLocalSsdRecoveryTiemoutWant) { - return fmt.Errorf("got the wrong instance local ssd recovery timeout action: have: %#v; want: %#v",instance.Scheduling.LocalSsdRecoveryTimeout, instanceLocalSsdRecoveryTiemoutWant) + return fmt.Errorf("got the wrong instance local ssd recovery timeout action: have: %#v; want: %#v", instance.Scheduling.LocalSsdRecoveryTimeout, instanceLocalSsdRecoveryTiemoutWant) } return nil @@ -5536,7 +5257,7 @@ func testAccCheckComputeInstanceTerminationAction(instance *compute.Instance, in } if instance.Scheduling.InstanceTerminationAction != instanceTerminationActionWant { - return fmt.Errorf("got the wrong instance termniation action: have: %s; want: %s",instance.Scheduling.InstanceTerminationAction, instanceTerminationActionWant) + return fmt.Errorf("got the wrong instance termniation action: have: %s; want: %s", instance.Scheduling.InstanceTerminationAction, instanceTerminationActionWant) } return nil @@ -5632,7 +5353,7 @@ func testAccCheckComputeInstanceScratchDisk(instance *compute.Instance, interfac if deviceName, ok := interfaces[i]["deviceName"]; ok { if disk.DeviceName != deviceName { return fmt.Errorf("Mismatched device name on scratch disk #%d, expected: %q, found: %q", - i, deviceName, disk.DeviceName) + i, deviceName, disk.DeviceName) } } @@ -6908,7 +6629,7 @@ resource "google_compute_instance" "foobar" { } func testAccComputeInstance_internalIpv6(ip, instance string) string { - return fmt.Sprintf(` + return fmt.Sprintf(` data "google_compute_image" "my_image" { family = "debian-11" project = "debian-cloud" @@ -11182,7 +10903,6 @@ resource "google_compute_instance" "foobar" { `, instance, instanceTerminationAction) } - func testAccComputeInstance_spotVM_maxRunDuration(instance string, instanceTerminationAction string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { @@ -12424,11 +12144,7 @@ func TestAccComputeInstance_bootDisk_storagePoolSpecified(t *testing.T) { ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { - Config: testAccComputeInstance_bootDisk_storagePoolSpecified(instanceName, storagePoolNameLong, envvar.GetTestZoneFromEnv()), - Check: resource.ComposeTestCheckFunc( - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_bootDisk_storagePoolSpecified(instanceName, storagePoolNameLong, envvar.GetTestZoneFromEnv())), - ), + Config: testAccComputeInstance_bootDisk_storagePoolSpecified(instanceName, storagePoolNameLong, envvar.GetTestZoneFromEnv()), }, { ResourceName: "google_compute_instance.foobar", @@ -12450,11 +12166,7 @@ func TestAccComputeInstance_bootDisk_storagePoolSpecified_nameOnly(t *testing.T) ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { - Config: testAccComputeInstance_bootDisk_storagePoolSpecified(instanceName, "tf-bootstrap-storage-pool-hyperdisk-balanced-basic-2", envvar.GetTestZoneFromEnv()), - Check: resource.ComposeTestCheckFunc( - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_bootDisk_storagePoolSpecified(instanceName, "tf-bootstrap-storage-pool-hyperdisk-balanced-basic-2", envvar.GetTestZoneFromEnv())), - ), + Config: testAccComputeInstance_bootDisk_storagePoolSpecified(instanceName, "tf-bootstrap-storage-pool-hyperdisk-balanced-basic-2", envvar.GetTestZoneFromEnv()), }, { ResourceName: "google_compute_instance.foobar", @@ -12496,40 +12208,38 @@ resource "google_compute_instance" "foobar" { } func TestAccComputeInstance_bootAndAttachedDisk_interface(t *testing.T) { - t.Parallel() - - instanceName1 := fmt.Sprintf("tf-test-vm1-%s", acctest.RandString(t, 10)) - diskName1 := fmt.Sprintf("tf-test-disk1-%s", acctest.RandString(t, 10)) - instanceName2 := fmt.Sprintf("tf-test-vm2-%s", acctest.RandString(t, 10)) - diskName2 := fmt.Sprintf("tf-test-disk2-%s", acctest.RandString(t, 10)) - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeInstance_bootAndAttachedDisk_interface(instanceName1, diskName1, envvar.GetTestZoneFromEnv(), "c3-standard-22", "NVME", false), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("google_compute_instance.foobar", "boot_disk.0.interface", "NVME"), - resource.TestCheckResourceAttr("google_compute_instance.foobar", "machine_type", "c3-standard-22"), - ), - }, - //computeInstanceImportStep("us-central1-a", instanceName1, []string{"desired_status","allow_stopping_for_update"}), - { - Config: testAccComputeInstance_bootAndAttachedDisk_interface(instanceName2, diskName2, envvar.GetTestZoneFromEnv(), "n2-standard-8", "SCSI", true), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("google_compute_instance.foobar", "boot_disk.0.interface", "SCSI"), - resource.TestCheckResourceAttr("google_compute_instance.foobar", "machine_type", "n2-standard-8"), - acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", - testAccComputeInstance_bootAndAttachedDisk_interface(instanceName2, diskName2, envvar.GetTestZoneFromEnv(), "n2-standard-8", "SCSI", true)), - ), - }, - //computeInstanceImportStep("us-central1-a", instanceName2, []string{"desired_status","allow_stopping_for_update"}), - }, - }) + t.Parallel() + + instanceName1 := fmt.Sprintf("tf-test-vm1-%s", acctest.RandString(t, 10)) + diskName1 := fmt.Sprintf("tf-test-disk1-%s", acctest.RandString(t, 10)) + instanceName2 := fmt.Sprintf("tf-test-vm2-%s", acctest.RandString(t, 10)) + diskName2 := fmt.Sprintf("tf-test-disk2-%s", acctest.RandString(t, 10)) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeInstance_bootAndAttachedDisk_interface(instanceName1, diskName1, envvar.GetTestZoneFromEnv(), "c3-standard-22", "NVME", false), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_compute_instance.foobar", "boot_disk.0.interface", "NVME"), + resource.TestCheckResourceAttr("google_compute_instance.foobar", "machine_type", "c3-standard-22"), + ), + }, + //computeInstanceImportStep("us-central1-a", instanceName1, []string{"desired_status","allow_stopping_for_update"}), + { + Config: testAccComputeInstance_bootAndAttachedDisk_interface(instanceName2, diskName2, envvar.GetTestZoneFromEnv(), "n2-standard-8", "SCSI", true), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_compute_instance.foobar", "boot_disk.0.interface", "SCSI"), + resource.TestCheckResourceAttr("google_compute_instance.foobar", "machine_type", "n2-standard-8"), + ), + }, + //computeInstanceImportStep("us-central1-a", instanceName2, []string{"desired_status","allow_stopping_for_update"}), + }, + }) } func testAccComputeInstance_bootAndAttachedDisk_interface(instanceName, diskName, zone, machineType, bootDiskInterface string, allowStoppingForUpdate bool) string { - return fmt.Sprintf(` + return fmt.Sprintf(` data "google_compute_image" "my_image" { family = "ubuntu-2204-lts" project = "ubuntu-os-cloud" @@ -13183,4 +12893,4 @@ resource "google_compute_instance" "foobar" { data "google_compute_default_service_account" "default" { } `, context) -} \ No newline at end of file +} diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_test.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_test.go index 1c9b854f2cf8..f64fb0c36fa6 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_test.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_test.go @@ -63,8 +63,6 @@ func TestAccProject_create(t *testing.T) { Config: testAccProject(pid, org), Check: resource.ComposeTestCheckFunc( testAccCheckGoogleProjectExists("google_project.acceptance", pid), - acctest.GetTestMetadataForTgc("resourcemanager", "google_project.acceptance", - testAccProject(pid, org)), ), }, }, @@ -112,8 +110,6 @@ func TestAccProject_billing(t *testing.T) { Config: testAccProject(pid, org), Check: resource.ComposeTestCheckFunc( testAccCheckGoogleProjectHasBillingAccount(t, "google_project.acceptance", pid, ""), - acctest.GetTestMetadataForTgc("resourcemanager", "google_project.acceptance", - testAccProject(pid, org)), ), }, }, @@ -163,8 +159,6 @@ func TestAccProject_labels(t *testing.T) { Config: testAccProject_labels(pid, org, "label", "label-value"), Check: resource.ComposeTestCheckFunc( testAccCheckGoogleProjectHasLabels(t, "google_project.acceptance", pid, map[string]string{"label": "label-value"}), - acctest.GetTestMetadataForTgc("resourcemanager", "google_project.acceptance", - testAccProject_labels(pid, org, "label", "label-value")), ), }, }, @@ -183,10 +177,6 @@ func TestAccProject_deleteDefaultNetwork(t *testing.T) { Steps: []resource.TestStep{ { Config: testAccProject_deleteDefaultNetwork(pid, org, billingId), - Check: resource.ComposeTestCheckFunc( - acctest.GetTestMetadataForTgc("resourcemanager", "google_project.acceptance", - testAccProject_deleteDefaultNetwork(pid, org, billingId)), - ), }, }, }) @@ -204,10 +194,6 @@ func TestAccProject_parentFolder(t *testing.T) { Steps: []resource.TestStep{ { Config: testAccProject_parentFolder(pid, folderDisplayName, org), - Check: resource.ComposeTestCheckFunc( - acctest.GetTestMetadataForTgc("resourcemanager", "google_project.acceptance", - testAccProject_parentFolder(pid, folderDisplayName, org)), - ), }, }, }) @@ -243,10 +229,6 @@ func TestAccProject_migrateParent(t *testing.T) { }, { Config: testAccProject_migrateParentFolder(pid, folderDisplayName, org), - Check: resource.ComposeTestCheckFunc( - acctest.GetTestMetadataForTgc("resourcemanager", "google_project.acceptance", - testAccProject_migrateParentFolder(pid, folderDisplayName, org)), - ), }, { ResourceName: "google_project.acceptance", @@ -298,10 +280,6 @@ func TestAccProject_tags(t *testing.T) { }, { Config: testAccProject_tagsAllowDestroy(context), - Check: resource.ComposeTestCheckFunc( - acctest.GetTestMetadataForTgc("resourcemanager", "google_project.acceptance", - testAccProject_tagsAllowDestroy(context)), - ), }, }, }) @@ -449,10 +427,6 @@ func TestAccProject_noAllowDestroy(t *testing.T) { }, { Config: testAccProject(pid, org), - Check: resource.ComposeTestCheckFunc( - acctest.GetTestMetadataForTgc("resourcemanager", "google_project.acceptance", - testAccProject(pid, org)), - ), }, }, }) @@ -480,10 +454,7 @@ func TestAccProject_abandon(t *testing.T) { Config: testAccProject_abandon(pid, org), Destroy: true, Check: resource.ComposeTestCheckFunc( - testAccCheckGoogleProjectExists("google_project.acceptance", pid), - acctest.GetTestMetadataForTgc("resourcemanager", "google_project.acceptance", - testAccProject_abandon(pid, org)), - ), + testAccCheckGoogleProjectExists("google_project.acceptance", pid)), }, }, }) From 7e07165ffd021cab1c174a0c0c29e08ea5f41b23 Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Mon, 19 May 2025 10:47:00 -0700 Subject: [PATCH 175/884] Update data fusion instance version in tests (#14010) --- .../resource_data_fusion_instance_test.go | 60 +------------------ 1 file changed, 2 insertions(+), 58 deletions(-) diff --git a/mmv1/third_party/terraform/services/datafusion/resource_data_fusion_instance_test.go b/mmv1/third_party/terraform/services/datafusion/resource_data_fusion_instance_test.go index 9b5eee0700e7..503b7c96a820 100644 --- a/mmv1/third_party/terraform/services/datafusion/resource_data_fusion_instance_test.go +++ b/mmv1/third_party/terraform/services/datafusion/resource_data_fusion_instance_test.go @@ -47,7 +47,7 @@ resource "google_data_fusion_instance" "foobar" { region = "us-central1" type = "BASIC" # See supported versions here https://cloud.google.com/data-fusion/docs/support/version-support-policy - version = "6.9.1" + version = "6.10.0" # Mark for testing to avoid service networking connection usage that is not cleaned up options = { prober_test_run = "true" @@ -73,7 +73,7 @@ resource "google_data_fusion_instance" "foobar" { label1 = "value1" label2 = "value2" } - version = "6.9.2" + version = "6.10.1" accelerators { accelerator_type = "CCAI_INSIGHTS" @@ -154,62 +154,6 @@ resource "google_data_fusion_instance" "foobar" { `, instanceName) } -func TestAccDataFusionInstanceVersion_dataFusionInstanceUpdate(t *testing.T) { - t.Skip("https://github.com/hashicorp/terraform-provider-google/issues/20574") - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - "version": "6.9.1", - } - - contextUpdate := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - "version": "6.9.2", - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckDataFusionInstanceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccDataFusionInstanceVersion_dataFusionInstanceUpdate(context), - }, - { - ResourceName: "google_data_fusion_instance.basic_instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"region"}, - }, - { - Config: testAccDataFusionInstanceVersion_dataFusionInstanceUpdate(contextUpdate), - }, - { - ResourceName: "google_data_fusion_instance.basic_instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"region"}, - }, - }, - }) -} - -func testAccDataFusionInstanceVersion_dataFusionInstanceUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_data_fusion_instance" "basic_instance" { - name = "tf-test-my-instance%{random_suffix}" - region = "us-central1" - type = "BASIC" - # Mark for testing to avoid service networking connection usage that is not cleaned up - options = { - prober_test_run = "true" - } - version = "%{version}" -} -`, context) -} - func TestAccDatafusionInstance_tags(t *testing.T) { t.Parallel() From 28072a23d09affa03c3de4973970a0c5eff6bbe6 Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Mon, 19 May 2025 11:05:56 -0700 Subject: [PATCH 176/884] remove TestAccComputeHealthCheck_tcpAndSsl_shouldFail (#14009) --- ...resource_compute_health_check_test.go.tmpl | 40 ------------------ ...e_compute_region_health_check_test.go.tmpl | 41 ------------------- 2 files changed, 81 deletions(-) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_health_check_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_health_check_test.go.tmpl index 34fc29d8d96a..f53433d9e468 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_health_check_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_health_check_test.go.tmpl @@ -192,26 +192,6 @@ func TestAccComputeHealthCheck_typeTransition(t *testing.T) { }) } -func TestAccComputeHealthCheck_tcpAndSsl_shouldFail(t *testing.T) { - // No HTTP interactions, is a unit test - acctest.SkipIfVcr(t) - t.Parallel() - - hckName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeHealthCheckDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeHealthCheck_tcpAndSsl_shouldFail(hckName), - ExpectError: regexp.MustCompile("only one of\n`grpc_health_check,grpc_tls_health_check,http2_health_check,http_health_check,https_health_check,ssl_health_check,tcp_health_check`\ncan be specified, but `ssl_health_check,tcp_health_check` were specified"), - }, - }, - }) -} - {{ if ne $.TargetVersionName `ga` -}} func TestAccComputeHealthCheck_logConfigDisabled(t *testing.T) { t.Parallel() @@ -435,26 +415,6 @@ resource "google_compute_health_check" "foobar" { `, hckName) } -func testAccComputeHealthCheck_tcpAndSsl_shouldFail(hckName string) string { - return fmt.Sprintf(` -resource "google_compute_health_check" "foobar" { - check_interval_sec = 3 - description = "Resource created for Terraform acceptance testing" - healthy_threshold = 3 - name = "health-test-%s" - timeout_sec = 2 - unhealthy_threshold = 3 - - tcp_health_check { - port = 443 - } - ssl_health_check { - port = 443 - } -} -`, hckName) -} - {{ if ne $.TargetVersionName `ga` -}} func testAccComputeHealthCheck_logConfigDisabled(hckName string) string { return fmt.Sprintf(` diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_health_check_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_health_check_test.go.tmpl index 71b5924b894f..5e9a2d0629af 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_health_check_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_health_check_test.go.tmpl @@ -209,27 +209,6 @@ func TestAccComputeRegionHealthCheck_typeTransition(t *testing.T) { }) } -func TestAccComputeRegionHealthCheck_tcpAndSsl_shouldFail(t *testing.T) { - // This is essentially a unit test, no interactions - acctest.SkipIfVcr(t) - t.Parallel() - - hckName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeRegionHealthCheckDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeRegionHealthCheck_tcpAndSsl_shouldFail(hckName), - ExpectError: regexp.MustCompile("only one of\n`grpc_health_check,grpc_tls_health_check,http2_health_check,http_health_check,https_health_check,ssl_health_check,tcp_health_check`\ncan be specified, but `ssl_health_check,tcp_health_check` were specified"), - - }, - }, - }) -} - func TestAccComputeRegionHealthCheck_logConfigDisabled(t *testing.T) { t.Parallel() @@ -468,23 +447,3 @@ resource "google_compute_region_health_check" "foobar" { } `, hckName) } - -func testAccComputeRegionHealthCheck_tcpAndSsl_shouldFail(hckName string) string { - return fmt.Sprintf(` -resource "google_compute_region_health_check" "foobar" { - check_interval_sec = 3 - description = "Resource created for Terraform acceptance testing" - healthy_threshold = 3 - name = "health-test-%s" - timeout_sec = 2 - unhealthy_threshold = 3 - - tcp_health_check { - port = 443 - } - ssl_health_check { - port = 443 - } -} -`, hckName) -} From 47547a82022c35f1cbcaf0eab77a5ee561a7fcd3 Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Mon, 19 May 2025 11:06:02 -0700 Subject: [PATCH 177/884] Update README.md (#13813) --- .ci/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.ci/README.md b/.ci/README.md index 08160b06ca23..ad5fd3dd5276 100644 --- a/.ci/README.md +++ b/.ci/README.md @@ -22,7 +22,7 @@ The sync branches allow downstream generation for each downstream to wait until Run the following command to verify what commits the sync branches are pointing to: ``` -`git fetch origin && git rev-parse origin/tpg-sync origin/tpgb-sync origin/tf-oics-sync origin/tgc-sync` +git fetch origin && git rev-parse origin/tpg-sync origin/tpgb-sync origin/tf-oics-sync origin/tgc-sync ``` ### Transient GitHub failures From b011c5e89cc83846229884ce9226c81cb827df39 Mon Sep 17 00:00:00 2001 From: anthonyrtong Date: Mon, 19 May 2025 12:19:20 -0700 Subject: [PATCH 178/884] remove MANAGEMENT_AUTOMATIC from ACM tests (#14032) --- .../gkehub/resource_gke_hub_feature_membership_test.go.tmpl | 1 - 1 file changed, 1 deletion(-) diff --git a/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_test.go.tmpl b/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_test.go.tmpl index e839a1825884..2cea9bd3eff9 100644 --- a/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_test.go.tmpl +++ b/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_test.go.tmpl @@ -209,7 +209,6 @@ resource "google_gke_hub_feature_membership" "feature_member_1" { } } } - management = "MANAGEMENT_AUTOMATIC" } } From a6659b0aa2933ef7eaa33a2d4bc5c942cb15be93 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Mon, 19 May 2025 12:33:38 -0700 Subject: [PATCH 179/884] tgc-revival: tfplan2cai initial resource converter template (#13998) --- mmv1/provider/template_data.go | 5 +- mmv1/provider/terraform_tgc.go | 3 +- mmv1/provider/terraform_tgc_next.go | 48 +++++- .../tfplan2cai/resource_converter.go.tmpl | 149 ++++++++++++++++++ 4 files changed, 199 insertions(+), 6 deletions(-) create mode 100644 mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl diff --git a/mmv1/provider/template_data.go b/mmv1/provider/template_data.go index 79c51dff50ce..da29e65278c1 100644 --- a/mmv1/provider/template_data.go +++ b/mmv1/provider/template_data.go @@ -190,11 +190,12 @@ func (td *TemplateData) GenerateSweeperFile(filePath string, resource api.Resour td.GenerateFile(filePath, templatePath, resource, false, templates...) } -func (td *TemplateData) GenerateTGCResourceFile(filePath string, resource api.Resource) { - templatePath := "templates/tgc/resource_converter.go.tmpl" +func (td *TemplateData) GenerateTGCResourceFile(templatePath, filePath string, resource api.Resource) { templates := []string{ templatePath, "templates/terraform/expand_property_method.go.tmpl", + "templates/terraform/schema_property.go.tmpl", + "templates/terraform/schema_subresource.go.tmpl", } td.GenerateFile(filePath, templatePath, resource, true, templates...) } diff --git a/mmv1/provider/terraform_tgc.go b/mmv1/provider/terraform_tgc.go index 3312dddb4474..3f0802052af0 100644 --- a/mmv1/provider/terraform_tgc.go +++ b/mmv1/provider/terraform_tgc.go @@ -124,8 +124,9 @@ func (tgc TerraformGoogleConversion) GenerateResource(object api.Resource, templ log.Println(fmt.Errorf("error creating parent directory %v: %v", targetFolder, err)) } + templatePath := "templates/tgc/resource_converter.go.tmpl" targetFilePath := path.Join(targetFolder, fmt.Sprintf("%s_%s.go", productName, google.Underscore(object.Name))) - templateData.GenerateTGCResourceFile(targetFilePath, object) + templateData.GenerateTGCResourceFile(templatePath, targetFilePath, object) } // Generate the IAM policy for this object. This is used to query and test diff --git a/mmv1/provider/terraform_tgc_next.go b/mmv1/provider/terraform_tgc_next.go index 663c1959e196..f5d4d2df6963 100644 --- a/mmv1/provider/terraform_tgc_next.go +++ b/mmv1/provider/terraform_tgc_next.go @@ -21,11 +21,13 @@ import ( "fmt" "log" "os" + "path" "path/filepath" "time" "github.com/GoogleCloudPlatform/magic-modules/mmv1/api" "github.com/GoogleCloudPlatform/magic-modules/mmv1/api/product" + "github.com/GoogleCloudPlatform/magic-modules/mmv1/google" "github.com/otiai10/copy" ) @@ -60,11 +62,51 @@ func NewTerraformGoogleConversionNext(product *api.Product, versionName string, } func (tgc TerraformGoogleConversionNext) Generate(outputFolder, productPath, resourceToGenerate string, generateCode, generateDocs bool) { - tgc.GenerateTfToCaiObjects(outputFolder, resourceToGenerate, generateCode, generateDocs) - tgc.GenerateCaiToHclObjects(outputFolder, resourceToGenerate, generateCode, generateDocs) + for _, object := range tgc.Product.Objects { + object.ExcludeIfNotInVersion(&tgc.Version) + + if resourceToGenerate != "" && object.Name != resourceToGenerate { + log.Printf("Excluding %s per user request", object.Name) + continue + } + + tgc.GenerateObject(*object, outputFolder, tgc.TargetVersionName, generateCode, generateDocs) + } } -func (tgc TerraformGoogleConversionNext) GenerateTfToCaiObjects(outputFolder, resourceToGenerate string, generateCode, generateDocs bool) { +func (tgc TerraformGoogleConversionNext) GenerateObject(object api.Resource, outputFolder, resourceToGenerate string, generateCode, generateDocs bool) { + if object.ExcludeTgc { + log.Printf("Skipping fine-grained resource %s", object.Name) + return + } + + // TODO: remove it after supporting most of resources. + supportList := map[string]bool{ + "ComputeAddress": true, + } + + if ok := supportList[object.ResourceName()]; !ok { + return + } + + templateData := NewTemplateData(outputFolder, tgc.TargetVersionName) + + if !object.IsExcluded() { + tgc.GenerateResource(object, *templateData, outputFolder, generateCode, generateDocs, "tfplan2cai") + } +} + +func (tgc TerraformGoogleConversionNext) GenerateResource(object api.Resource, templateData TemplateData, outputFolder string, generateCode, generateDocs bool, converter string) { + productName := tgc.Product.ApiName + conveterFolder := fmt.Sprintf("pkg/%s/converters/services", converter) + targetFolder := path.Join(outputFolder, conveterFolder, productName) + if err := os.MkdirAll(targetFolder, os.ModePerm); err != nil { + log.Println(fmt.Errorf("error creating parent directory %v: %v", targetFolder, err)) + } + + templatePath := fmt.Sprintf("templates/tgc_next/%s/resource_converter.go.tmpl", converter) + targetFilePath := path.Join(targetFolder, fmt.Sprintf("%s_%s.go", productName, google.Underscore(object.Name))) + templateData.GenerateTGCResourceFile(templatePath, targetFilePath, object) } func (tgc TerraformGoogleConversionNext) GenerateCaiToHclObjects(outputFolder, resourceToGenerate string, generateCode, generateDocs bool) { diff --git a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl new file mode 100644 index 000000000000..2a6941700d25 --- /dev/null +++ b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl @@ -0,0 +1,149 @@ +{{/* The license inside this block applies to this file + Copyright 2025 Google LLC. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. */ -}} +{{$.CodeHeader TemplatePath}} + +package {{ lower $.ProductMetadata.Name }} + +import ( +{{/* We list all the v2 imports here and unstable imports, because we run 'goimports' to guess the correct + set of imports, which will never guess the major version correctly. */ -}} + "github.com/apparentlymart/go-cidr/cidr" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/id" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/retry" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/structure" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/logging" + "google.golang.org/api/bigtableadmin/v2" + "google.golang.org/api/googleapi" + + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/converters/cai" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" + transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/verify" +) + +{{- $caiProductBaseUrl := $.CaiProductBaseUrl }} +{{- $productBackendName := $.CaiProductBackendName $caiProductBaseUrl }} +{{- $apiVersion := $.CaiApiVersion $productBackendName $caiProductBaseUrl}} + +{{if $.CustomCode.Constants -}} + {{- $.CustomTemplate $.CustomCode.Constants true -}} +{{- end}} + +const {{ $.ResourceName -}}AssetType string = "{{ $productBackendName }}.googleapis.com/{{ $.Name -}}" + +func Resource{{ $.ResourceName -}}() *schema.Resource { + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + {{- range $prop := $.OrderProperties $.AllUserProperties }} +{{template "SchemaFields" $prop -}} + {{- end }} + {{- range $prop := $.VirtualFields }} +{{template "SchemaFields" $prop -}} + {{- end }} +{{- if $.CustomCode.ExtraSchemaEntry }} + {{ $.CustomTemplate $.CustomCode.ExtraSchemaEntry false -}} +{{- end}} +{{ if $.HasProject -}} + "project": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + }, +{{- end}} +{{- if $.HasSelfLink }} + "self_link": { + Type: schema.TypeString, + Computed: true, + }, +{{- end}} + }, + UseJSONNumber: true, + } +} + +{{- range $prop := $.AllUserProperties }} +{{template "SchemaSubResource" $prop}} +{{- end}} + +func ResourceConverter{{ $.ResourceName -}}() cai.ResourceConverter { + return cai.ResourceConverter{ + Convert: Get{{ $.ResourceName -}}CaiObject, + } +} + +func Get{{ $.ResourceName -}}CaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]caiasset.Asset, error) { + name, err := cai.AssetName(d, config, "{{ $.CaiAssetNameTemplate $productBackendName }}") + if err != nil { + return []caiasset.Asset{}, err + } + if obj, err := Get{{ $.ResourceName -}}ApiObject(d, config); err == nil { + return []caiasset.Asset{{"{{"}} + Name: name, + Type: {{ $.ResourceName -}}AssetType, + Resource: &caiasset.AssetResource{ + Version: "{{ $apiVersion }}", + DiscoveryDocumentURI: "https://www.googleapis.com/discovery/v1/apis/{{ $productBackendName }}/{{ $apiVersion }}/rest", + DiscoveryName: "{{ $.Name }}", + Data: obj, + }, + {{"}}"}}, nil + } else { + return []caiasset.Asset{}, err + } +} + +func Get{{ $.ResourceName -}}ApiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (map[string]interface{}, error) { + obj := make(map[string]interface{}) +{{- range $prop := $.SettableProperties }} +{{- if $prop.FlattenObject }} + {{ $prop.ApiName -}}Prop, err := expand{{ $.ResourceName -}}{{$prop.TitlelizeProperty}}(nil, d, config) +{{- else }} + {{ $prop.ApiName -}}Prop, err := expand{{ $.ResourceName -}}{{$prop.TitlelizeProperty}}(d.Get("{{underscore $prop.Name}}"), d, config) +{{- end}} + if err != nil { + return nil, err +{{- if not $prop.SendEmptyValue }} + } else if v, ok := d.GetOkExists("{{underscore $prop.Name}}"); !tpgresource.IsEmptyValue(reflect.ValueOf({{ $prop.ApiName -}}Prop)) && (ok || !reflect.DeepEqual(v, {{ $prop.ApiName -}}Prop)) { +{{- else }} + } else if v, ok := d.GetOkExists("{{underscore $prop.Name}}"); ok || !reflect.DeepEqual(v, {{ $prop.ApiName -}}Prop) { +{{- end }} + obj["{{ $prop.ApiName -}}"] = {{ $prop.ApiName -}}Prop + } +{{- end}} + +{{ if $.CustomCode.Encoder -}} + return resource{{ $.ResourceName -}}Encoder(d, config, obj) +{{- else }} + return obj, nil +{{- end}} +} + +{{if $.CustomCode.Encoder -}} +func resource{{ $.ResourceName -}}Encoder(d tpgresource.TerraformResourceData, meta interface{}, obj map[string]interface{}) (map[string]interface{}, error) { +{{ $.CustomTemplate $.CustomCode.Encoder false -}} +} +{{- end}} + +{{ range $prop := $.SettableProperties }} + {{- template "expandPropertyMethod" $prop -}} +{{- end}} \ No newline at end of file From 192a90f6b9b0ac09e406fc24ecfc84728e20296b Mon Sep 17 00:00:00 2001 From: NA2047 <12290725+NA2047@users.noreply.github.com> Date: Mon, 19 May 2025 13:11:18 -0700 Subject: [PATCH 180/884] Adding desired_auto_created_endpoints Virtual field to Memorystore instance (#13819) --- mmv1/products/memorystore/Instance.yaml | 29 +++- .../decoders/memorystore_instance.go.tmpl | 126 +++++++++++++----- .../encoders/memorystore_instance.go.tmpl | 107 ++++++++++----- .../memorystore_instance_basic.tf.tmpl | 4 +- ...ed_user_and_auto_created_endpoints.tf.tmpl | 2 +- .../memorystore_instance_full.tf.tmpl | 6 +- ...morystore_instance_persistence_aof.tf.tmpl | 4 +- ...ystore_instance_secondary_instance.tf.tmpl | 4 +- ...morystore_instance_standalone_full.tf.tmpl | 4 +- .../data_source_memorystore_instance_test.go | 40 +----- .../resource_memorystore_instance_test.go | 75 ++++++++++- 11 files changed, 278 insertions(+), 123 deletions(-) diff --git a/mmv1/products/memorystore/Instance.yaml b/mmv1/products/memorystore/Instance.yaml index 7307a3e961e1..42c802d91f16 100644 --- a/mmv1/products/memorystore/Instance.yaml +++ b/mmv1/products/memorystore/Instance.yaml @@ -101,10 +101,35 @@ examples: 'secondary_instance_prevent_destroy': 'false' virtual_fields: - name: 'desired_psc_auto_connections' + description: "`desired_psc_auto_connections` is deprecated Use `desired_auto_created_endpoints` instead." + deprecation_message: '`desired_psc_auto_connections` is deprecated Use `desired_auto_created_endpoints` instead.' + type: Array + immutable: true + conflicts: + - desiredAutoCreatedEndpoints + item_type: + type: NestedObject + properties: + - type: String + name: project_id + description: + "Required. The consumer project_id where the forwarding rule is + created from. " + required: true + - type: String + name: network + description: + "Required. The consumer network where the IP address resides, in + the form of\nprojects/{project_id}/global/networks/{network_id}. " + required: true + - name: 'desired_auto_created_endpoints' description: "Immutable. User inputs for the auto-created - PSC connections. " + endpoints connections. " type: Array + # is_set: true immutable: true + conflicts: + - desiredPscAutoConnections item_type: type: NestedObject properties: @@ -257,6 +282,7 @@ properties: description: "Output only. Endpoints clients can connect to the instance through. Currently only one\ndiscovery endpoint is supported. " + deprecation_message: '`discovery_endpoints` is deprecated Use `endpoints` instead.' output: true item_type: type: NestedObject @@ -675,6 +701,7 @@ properties: description: "Output only. User inputs and resource details of the auto-created PSC connections. " + deprecation_message: '`psc_auto_connections` is deprecated Use `endpoints.connections.pscAutoConnections` instead.' output: true item_type: type: NestedObject diff --git a/mmv1/templates/terraform/decoders/memorystore_instance.go.tmpl b/mmv1/templates/terraform/decoders/memorystore_instance.go.tmpl index f82edc817dc7..fcc04d550eaa 100644 --- a/mmv1/templates/terraform/decoders/memorystore_instance.go.tmpl +++ b/mmv1/templates/terraform/decoders/memorystore_instance.go.tmpl @@ -1,45 +1,97 @@ -// Retrieve pscAutoConnections from API response -v, ok := res["pscAutoConnections"] -if !ok { - if _, endpointsFound := res["endpoints"]; endpointsFound { - return res, nil // For Cluster Disabled instances, we would have 'endpoints' instead of 'pscAutoConnections' - } - return res, nil -} + // Retrieve endpoints.connections.pscAutoConnection from API response + v, ok := res["pscAutoConnections"] + if ok { -connections, ok := v.([]interface{}) -if !ok { - return nil, fmt.Errorf("pscAutoConnections is not an array") -} + connections, ok := v.([]interface{}) + if !ok { + return nil, fmt.Errorf("pscAutoConnections is not an array") + } -transformed := make([]interface{}, 0, len(connections)) -uniqueConnections := make(map[string]bool) // Track unique project+network combos + transformed := make([]interface{}, 0, len(connections)) + uniqueConnections := make(map[string]bool) // Track unique project+network combos -for _, raw := range connections { - connectionData, ok := raw.(map[string]interface{}) - if !ok || len(connectionData) < 1 { - return nil, fmt.Errorf("Invalid or empty psc connection data: %v", raw) - } + for _, raw := range connections { + connectionData, ok := raw.(map[string]interface{}) + if !ok || len(connectionData) < 1 { + return nil, fmt.Errorf("Invalid or empty psc connection data: %v", raw) + } - projectID, ok := connectionData["projectId"].(string) - if !ok { - return nil, fmt.Errorf("invalid project ID in psc connection: %v", connectionData) - } + projectID, ok := connectionData["projectId"].(string) + if !ok { + return nil, fmt.Errorf("invalid project ID in psc connection: %v", connectionData) + } - networkID, ok := connectionData["network"].(string) - if !ok { - return nil, fmt.Errorf("invalid network ID in psc connection: %v", connectionData) - } + networkID, ok := connectionData["network"].(string) + if !ok { + return nil, fmt.Errorf("invalid network ID in psc connection: %v", connectionData) + } + + uniqueKey := projectID + networkID + if !uniqueConnections[uniqueKey] { // Check for uniqueness + uniqueConnections[uniqueKey] = true + transformed = append(transformed, map[string]interface{}{ + "project_id": projectID, + "network": networkID, + }) + } + } + d.Set("desired_psc_auto_connections", transformed) + log.Printf("[DEBUG] You are setting desired_psc_auto_connections in decoder %#v", transformed) + + // Retrieve pscAutoConnections from API response + } else if v, ok := res["endpoints"]; ok { + + endpointsArray, ok := v.([]interface{}) + if !ok || len(endpointsArray) == 0 { + // No endpoints or empty array, nothing to process + } else { + transformed := make([]interface{}, 0) + uniqueEndpoints := make(map[string]bool) // Track unique project+network combos + + for _, endpoint := range endpointsArray { + endpointData, ok := endpoint.(map[string]interface{}) + if !ok { + continue + } + + connections, ok := endpointData["connections"].([]interface{}) + if !ok { + continue + } + + for _, connection := range connections { + connectionData, ok := connection.(map[string]interface{}) + if !ok { + continue + } + + pscAutoConnection, ok := connectionData["pscAutoConnection"].(map[string]interface{}) + if !ok { + continue + } + + projectID, projectOk := pscAutoConnection["projectId"].(string) + networkID, networkOk := pscAutoConnection["network"].(string) + + if projectOk && networkOk { + uniqueKey := projectID + networkID + if !uniqueEndpoints[uniqueKey] { // Check for uniqueness + uniqueEndpoints[uniqueKey] = true + transformed = append(transformed, map[string]interface{}{ + "project_id": projectID, + "network": networkID, + }) + } + } + } + } + if len(transformed) > 0 { + d.Set("desired_auto_created_endpoints", transformed) + log.Printf("[DEBUG] Setting desired_auto_created_endpoints in decoder for %#v", transformed) + + } + } - uniqueKey := projectID + networkID - if !uniqueConnections[uniqueKey] { // Check for uniqueness - uniqueConnections[uniqueKey] = true - transformed = append(transformed, map[string]interface{}{ - "project_id": projectID, - "network": networkID, - }) } -} -d.Set("desired_psc_auto_connections", transformed) -return res, nil + return res, nil \ No newline at end of file diff --git a/mmv1/templates/terraform/encoders/memorystore_instance.go.tmpl b/mmv1/templates/terraform/encoders/memorystore_instance.go.tmpl index e85836d19da1..acefdc55491c 100644 --- a/mmv1/templates/terraform/encoders/memorystore_instance.go.tmpl +++ b/mmv1/templates/terraform/encoders/memorystore_instance.go.tmpl @@ -1,34 +1,73 @@ -v, ok := d.GetOk("desired_psc_auto_connections") -if !ok { - return obj, nil // No desired connections, nothing to update -} -l := v.([]interface{}) -req := make([]interface{}, 0, len(l)) -for _, raw := range l { - if raw == nil { - continue - } - desiredConnection := raw.(map[string]interface{}) - connectionReq := make(map[string]interface{}) - - projectId := desiredConnection["project_id"] - if val := reflect.ValueOf(projectId); val.IsValid() && !tpgresource.IsEmptyValue(val) { - connectionReq["projectId"] = projectId - } - - network := desiredConnection["network"] - if val := reflect.ValueOf(network); val.IsValid() && !tpgresource.IsEmptyValue(val) { - connectionReq["network"] = network - } - - req = append(req, connectionReq) -} - -obj["pscAutoConnections"] = req -// if the automated_backup_config is not defined, automatedBackupMode needs to be passed and set to DISABLED in the expand -if obj["automatedBackupConfig"] == nil { - config := meta.(*transport_tpg.Config) - automatedBackupConfigProp, _ := expandMemorystoreInstanceAutomatedBackupConfig(d.Get("automated_backup_config"), d, config) - obj["automatedBackupConfig"] = automatedBackupConfigProp -} -return obj, nil + // Handles desired_auto_created_endpoints virtual field + v, ok := d.GetOk("desired_auto_created_endpoints") + if ok { + l := v.([]interface{}) + if len(l) > 0 { + endpoints := make([]interface{}, 1) + endpointObj := make(map[string]interface{}) + connections := make([]interface{}, 0, len(l)) + + for _, raw := range l { + if raw == nil { + continue + } + desiredEndpoint := raw.(map[string]interface{}) + connectionObj := make(map[string]interface{}) + pscAutoConnection := make(map[string]interface{}) + + projectId := desiredEndpoint["project_id"] + if val := reflect.ValueOf(projectId); val.IsValid() && !tpgresource.IsEmptyValue(val) { + pscAutoConnection["projectId"] = projectId + } + + network := desiredEndpoint["network"] + if val := reflect.ValueOf(network); val.IsValid() && !tpgresource.IsEmptyValue(val) { + pscAutoConnection["network"] = network + } + + connectionObj["pscAutoConnection"] = pscAutoConnection + connections = append(connections, connectionObj) + } + + endpointObj["connections"] = connections + endpoints[0] = endpointObj + obj["endpoints"] = endpoints + log.Printf("[DEBUG] You are setting desired_auto_created_endpoints in encoder %#v", endpoints) + + } + // Handles desired_auto_created_endpoints virtual field + } else if v, ok := d.GetOk("desired_psc_auto_connections"); ok { + l := v.([]interface{}) + req := make([]interface{}, 0, len(l)) + for _, raw := range l { + if raw == nil { + continue + } + desiredConnection := raw.(map[string]interface{}) + connectionReq := make(map[string]interface{}) + + projectId := desiredConnection["project_id"] + if val := reflect.ValueOf(projectId); val.IsValid() && !tpgresource.IsEmptyValue(val) { + connectionReq["projectId"] = projectId + } + + network := desiredConnection["network"] + if val := reflect.ValueOf(network); val.IsValid() && !tpgresource.IsEmptyValue(val) { + connectionReq["network"] = network + } + + req = append(req, connectionReq) + } + + obj["pscAutoConnections"] = req + log.Printf("[DEBUG] You are setting desired_psc_auto_connections in encoder %#v", req) + + } + + // If the automated_backup_config is not defined, automatedBackupMode needs to be passed and set to DISABLED in the expand + if obj["automatedBackupConfig"] == nil { + config := meta.(*transport_tpg.Config) + automatedBackupConfigProp, _ := expandMemorystoreInstanceAutomatedBackupConfig(d.Get("automated_backup_config"), d, config) + obj["automatedBackupConfig"] = automatedBackupConfigProp + } + return obj, nil \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/memorystore_instance_basic.tf.tmpl b/mmv1/templates/terraform/examples/memorystore_instance_basic.tf.tmpl index 7204c9fc9f0e..27a7491a5481 100644 --- a/mmv1/templates/terraform/examples/memorystore_instance_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/memorystore_instance_basic.tf.tmpl @@ -1,7 +1,7 @@ resource "google_memorystore_instance" "{{$.PrimaryResourceId}}" { instance_id = "{{index $.Vars "instance_name"}}" - shard_count = 3 - desired_psc_auto_connections { + shard_count = 1 + desired_auto_created_endpoints { network = google_compute_network.producer_net.id project_id = data.google_project.project.project_id } diff --git a/mmv1/templates/terraform/examples/memorystore_instance_desired_user_and_auto_created_endpoints.tf.tmpl b/mmv1/templates/terraform/examples/memorystore_instance_desired_user_and_auto_created_endpoints.tf.tmpl index ae8016449e57..8533b2b5bb8c 100644 --- a/mmv1/templates/terraform/examples/memorystore_instance_desired_user_and_auto_created_endpoints.tf.tmpl +++ b/mmv1/templates/terraform/examples/memorystore_instance_desired_user_and_auto_created_endpoints.tf.tmpl @@ -73,7 +73,7 @@ resource "google_compute_network" "network2" { resource "google_memorystore_instance" "{{$.PrimaryResourceId}}" { instance_id = "{{index $.Vars "instance_name"}}" shard_count = 1 - desired_psc_auto_connections { + desired_auto_created_endpoints { network = google_compute_network.network1.id project_id = data.google_project.project.project_id } diff --git a/mmv1/templates/terraform/examples/memorystore_instance_full.tf.tmpl b/mmv1/templates/terraform/examples/memorystore_instance_full.tf.tmpl index caa6555c8ab3..5ec779d74e91 100644 --- a/mmv1/templates/terraform/examples/memorystore_instance_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/memorystore_instance_full.tf.tmpl @@ -1,12 +1,12 @@ resource "google_memorystore_instance" "{{$.PrimaryResourceId}}" { instance_id = "{{index $.Vars "instance_name"}}" - shard_count = 3 - desired_psc_auto_connections { + shard_count = 1 + desired_auto_created_endpoints { network = google_compute_network.producer_net.id project_id = data.google_project.project.project_id } location = "us-central1" - replica_count = 2 + replica_count = 1 node_type = "SHARED_CORE_NANO" transit_encryption_mode = "TRANSIT_ENCRYPTION_DISABLED" authorization_mode = "AUTH_DISABLED" diff --git a/mmv1/templates/terraform/examples/memorystore_instance_persistence_aof.tf.tmpl b/mmv1/templates/terraform/examples/memorystore_instance_persistence_aof.tf.tmpl index d4ec704a76dd..c3bb54bc7bc4 100644 --- a/mmv1/templates/terraform/examples/memorystore_instance_persistence_aof.tf.tmpl +++ b/mmv1/templates/terraform/examples/memorystore_instance_persistence_aof.tf.tmpl @@ -1,7 +1,7 @@ resource "google_memorystore_instance" "{{$.PrimaryResourceId}}" { instance_id = "{{index $.Vars "instance_name"}}" - shard_count = 3 - desired_psc_auto_connections { + shard_count = 1 + desired_auto_created_endpoints { network = google_compute_network.producer_net.id project_id = data.google_project.project.project_id } diff --git a/mmv1/templates/terraform/examples/memorystore_instance_secondary_instance.tf.tmpl b/mmv1/templates/terraform/examples/memorystore_instance_secondary_instance.tf.tmpl index c6b950035efa..a40e5c126383 100644 --- a/mmv1/templates/terraform/examples/memorystore_instance_secondary_instance.tf.tmpl +++ b/mmv1/templates/terraform/examples/memorystore_instance_secondary_instance.tf.tmpl @@ -2,7 +2,7 @@ resource "google_memorystore_instance" "primary_instance" { instance_id = "{{index $.Vars "primary_instance_name"}}" shard_count = 1 - desired_psc_auto_connections { + desired_auto_created_endpoints { network = google_compute_network.primary_producer_net.id project_id = data.google_project.project.project_id } @@ -63,7 +63,7 @@ resource "google_compute_network" "primary_producer_net" { resource "google_memorystore_instance" "secondary_instance" { instance_id = "{{index $.Vars "secondary_instance_name"}}" shard_count = 1 - desired_psc_auto_connections { + desired_auto_created_endpoints { network = google_compute_network.secondary_producer_net.id project_id = data.google_project.project.project_id } diff --git a/mmv1/templates/terraform/examples/memorystore_instance_standalone_full.tf.tmpl b/mmv1/templates/terraform/examples/memorystore_instance_standalone_full.tf.tmpl index 8c5b5853e982..4fa5b0fa76f4 100644 --- a/mmv1/templates/terraform/examples/memorystore_instance_standalone_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/memorystore_instance_standalone_full.tf.tmpl @@ -2,12 +2,12 @@ resource "google_memorystore_instance" "{{$.PrimaryResourceId}}" { provider = google-beta instance_id = "{{index $.Vars "instance_name"}}" shard_count = 1 - desired_psc_auto_connections { + desired_auto_created_endpoints { network = google_compute_network.producer_net.id project_id = data.google_project.project.project_id } location = "us-central1" - replica_count = 2 + replica_count = 1 node_type = "SHARED_CORE_NANO" transit_encryption_mode = "TRANSIT_ENCRYPTION_DISABLED" authorization_mode = "AUTH_DISABLED" diff --git a/mmv1/third_party/terraform/services/memorystore/data_source_memorystore_instance_test.go b/mmv1/third_party/terraform/services/memorystore/data_source_memorystore_instance_test.go index 4d7e1be646f1..ac36d32b96cb 100644 --- a/mmv1/third_party/terraform/services/memorystore/data_source_memorystore_instance_test.go +++ b/mmv1/third_party/terraform/services/memorystore/data_source_memorystore_instance_test.go @@ -12,7 +12,6 @@ func TestAccMemorystoreInstanceDatasourceConfig(t *testing.T) { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "memorystore-instance-ds"), } acctest.VcrTest(t, resource.TestCase{ @@ -22,6 +21,9 @@ func TestAccMemorystoreInstanceDatasourceConfig(t *testing.T) { Steps: []resource.TestStep{ { Config: testAccMemorystoreInstanceDatasourceConfig(context), + Check: resource.ComposeTestCheckFunc( + acctest.CheckDataSourceStateMatchesResourceState("data.google_memorystore_instance.default", "google_memorystore_instance.instance-basic"), + ), }, }, }) @@ -31,48 +33,14 @@ func testAccMemorystoreInstanceDatasourceConfig(context map[string]interface{}) return acctest.Nprintf(` resource "google_memorystore_instance" "instance-basic" { instance_id = "tf-test-memorystore-instance%{random_suffix}" - shard_count = 3 - desired_psc_auto_connections { - network = google_compute_network.producer_net.id - project_id = data.google_project.project.project_id - } + shard_count = 1 location = "us-central1" deletion_protection_enabled = false - depends_on = [google_network_connectivity_service_connection_policy.default] - -} - -resource "google_network_connectivity_service_connection_policy" "default" { - name = "%{network_name}-policy" - location = "us-central1" - service_class = "gcp-memorystore" - description = "my basic service connection policy" - network = google_compute_network.producer_net.id - psc_config { - subnetworks = [google_compute_subnetwork.producer_subnet.id] - } } - -resource "google_compute_subnetwork" "producer_subnet" { - name = "%{network_name}-sn" - ip_cidr_range = "10.0.0.248/29" - region = "us-central1" - network = google_compute_network.producer_net.id -} - -resource "google_compute_network" "producer_net" { - name = "%{network_name}-vpc" - auto_create_subnetworks = false -} - - data "google_project" "project" { - } - data "google_memorystore_instance" "default" { instance_id = google_memorystore_instance.instance-basic.instance_id location = "us-central1" - } `, context) } diff --git a/mmv1/third_party/terraform/services/memorystore/resource_memorystore_instance_test.go b/mmv1/third_party/terraform/services/memorystore/resource_memorystore_instance_test.go index 6de9a8508464..0e02d4c4b5af 100644 --- a/mmv1/third_party/terraform/services/memorystore/resource_memorystore_instance_test.go +++ b/mmv1/third_party/terraform/services/memorystore/resource_memorystore_instance_test.go @@ -93,7 +93,7 @@ resource "google_memorystore_instance" "test_abc" { replica_count = 0 node_type = "SHARED_CORE_NANO" deletion_protection_enabled = false - desired_psc_auto_connections { + desired_auto_created_endpoints { network = google_compute_network.primary_producer_net.id project_id = data.google_project.project.project_id } @@ -139,6 +139,75 @@ data "google_project" "project" { func testAccMemorystoreInstance_automatedBackupConfigWithout(context map[string]interface{}) string { return acctest.Nprintf(` // Primary instance +resource "google_memorystore_instance" "test_abc" { + instance_id = "tf-test-instance-abc-%{random_suffix}" + shard_count = 1 + location = "us-central1" + replica_count = 0 + node_type = "SHARED_CORE_NANO" + deletion_protection_enabled = false + desired_auto_created_endpoints { + network = google_compute_network.primary_producer_net.id + project_id = data.google_project.project.project_id + } + depends_on = [ google_network_connectivity_service_connection_policy.primary_policy ] +} + +resource "google_network_connectivity_service_connection_policy" "primary_policy" { + name = "tf-test-abc-policy-%{random_suffix}" + location = "us-central1" + service_class = "gcp-memorystore" + description = "my basic service connection policy" + network = google_compute_network.primary_producer_net.id + psc_config { + subnetworks = [google_compute_subnetwork.primary_producer_subnet.id] + } +} + +resource "google_compute_subnetwork" "primary_producer_subnet" { + name = "tf-test-abc-%{random_suffix}" + ip_cidr_range = "10.0.4.0/29" + region = "us-central1" + network = google_compute_network.primary_producer_net.id +} + +resource "google_compute_network" "primary_producer_net" { + name = "tf-test-abc-net-%{random_suffix}" + auto_create_subnetworks = false +} + +data "google_project" "project" { +} +`, context) +} + +func TestAccMemorystoreInstance_deprecatedDesiredPscAutoConnections(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckMemorystoreInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccMemorystoreInstance_deprecatedDesiredPscAutoConnections(context), + }, + { + ResourceName: "google_memorystore_instance.test_abc", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccMemorystoreInstance_deprecatedDesiredPscAutoConnections(context map[string]interface{}) string { + return acctest.Nprintf(` +// Primary instance resource "google_memorystore_instance" "test_abc" { instance_id = "tf-test-instance-abc-%{random_suffix}" shard_count = 1 @@ -1033,7 +1102,7 @@ resource "google_memorystore_instance" "test_secondary" { shard_count = %d node_type = "%s" location = "us-west2" - desired_psc_auto_connections { + desired_auto_created_endpoints { network = google_compute_network.producer_net.id project_id = data.google_project.project.project_id } @@ -1328,7 +1397,7 @@ resource "google_memorystore_instance" "test" { shard_count = %d node_type = "%s" location = "us-west2" - desired_psc_auto_connections { + desired_auto_created_endpoints { network = google_compute_network.producer_net.id project_id = data.google_project.project.project_id } From 63c122a5e8a50546d15713d46db55b7b61222850 Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Mon, 19 May 2025 13:38:53 -0700 Subject: [PATCH 181/884] Fix composer tests (#14006) --- .../resource_composer_environment_test.go | 574 +++++++++++------- 1 file changed, 341 insertions(+), 233 deletions(-) diff --git a/mmv1/third_party/terraform/services/composer/resource_composer_environment_test.go b/mmv1/third_party/terraform/services/composer/resource_composer_environment_test.go index 0a9940f31fa8..92113ecf2156 100644 --- a/mmv1/third_party/terraform/services/composer/resource_composer_environment_test.go +++ b/mmv1/third_party/terraform/services/composer/resource_composer_environment_test.go @@ -402,6 +402,7 @@ func TestAccComposerEnvironment_ComposerV2(t *testing.T) { envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" + serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -409,7 +410,7 @@ func TestAccComposerEnvironment_ComposerV2(t *testing.T) { CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_composerV2(envName, network, subnetwork), + Config: testAccComposerEnvironment_composerV2(envName, network, subnetwork, serviceAccount), }, { ResourceName: "google_composer_environment.test", @@ -422,7 +423,7 @@ func TestAccComposerEnvironment_ComposerV2(t *testing.T) { { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_composerV2(envName, network, subnetwork), + Config: testAccComposerEnvironment_composerV2(envName, network, subnetwork, serviceAccount), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, @@ -435,6 +436,7 @@ func TestAccComposerEnvironment_UpdateComposerV2ImageVersion(t *testing.T) { envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" + serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -442,10 +444,10 @@ func TestAccComposerEnvironment_UpdateComposerV2ImageVersion(t *testing.T) { CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_composerOldVersion(envName, network, subnetwork), + Config: testAccComposerEnvironment_composerOldVersion(envName, network, subnetwork, serviceAccount), }, { - Config: testAccComposerEnvironment_composerNewVersion(envName, network, subnetwork), + Config: testAccComposerEnvironment_composerNewVersion(envName, network, subnetwork, serviceAccount), }, { ResourceName: "google_composer_environment.test", @@ -458,7 +460,7 @@ func TestAccComposerEnvironment_UpdateComposerV2ImageVersion(t *testing.T) { { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_composerNewVersion(envName, network, subnetwork), + Config: testAccComposerEnvironment_composerNewVersion(envName, network, subnetwork, serviceAccount), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, @@ -471,6 +473,7 @@ func TestAccComposerEnvironment_UpdateComposerV2ResilienceMode(t *testing.T) { envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" + serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -478,10 +481,10 @@ func TestAccComposerEnvironment_UpdateComposerV2ResilienceMode(t *testing.T) { CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_composerV2HighResilience(envName, network, subnetwork), + Config: testAccComposerEnvironment_composerV2HighResilience(envName, network, subnetwork, serviceAccount), }, { - Config: testAccComposerEnvironment_updateComposerV2StandardResilience(envName, network, subnetwork), + Config: testAccComposerEnvironment_updateComposerV2StandardResilience(envName, network, subnetwork, serviceAccount), }, { ResourceName: "google_composer_environment.test", @@ -494,7 +497,7 @@ func TestAccComposerEnvironment_UpdateComposerV2ResilienceMode(t *testing.T) { { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_updateComposerV2StandardResilience(envName, network, subnetwork), + Config: testAccComposerEnvironment_updateComposerV2StandardResilience(envName, network, subnetwork, serviceAccount), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, @@ -507,6 +510,7 @@ func TestAccComposerEnvironment_ComposerV2HighResilience(t *testing.T) { envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" + serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -514,7 +518,7 @@ func TestAccComposerEnvironment_ComposerV2HighResilience(t *testing.T) { CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_composerV2HighResilience(envName, network, subnetwork), + Config: testAccComposerEnvironment_composerV2HighResilience(envName, network, subnetwork, serviceAccount), }, { ResourceName: "google_composer_environment.test", @@ -527,7 +531,7 @@ func TestAccComposerEnvironment_ComposerV2HighResilience(t *testing.T) { { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_composerV2HighResilience(envName, network, subnetwork), + Config: testAccComposerEnvironment_composerV2HighResilience(envName, network, subnetwork, serviceAccount), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, @@ -540,6 +544,7 @@ func TestAccComposerEnvironment_UpdateComposerV2WithTriggerer(t *testing.T) { envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" + serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -547,10 +552,10 @@ func TestAccComposerEnvironment_UpdateComposerV2WithTriggerer(t *testing.T) { CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_composerV2(envName, network, subnetwork), + Config: testAccComposerEnvironment_composerV2(envName, network, subnetwork, serviceAccount), }, { - Config: testAccComposerEnvironment_composerV2WithDisabledTriggerer(envName, network, subnetwork), + Config: testAccComposerEnvironment_composerV2WithDisabledTriggerer(envName, network, subnetwork, serviceAccount), }, { ResourceName: "google_composer_environment.test", @@ -563,7 +568,7 @@ func TestAccComposerEnvironment_UpdateComposerV2WithTriggerer(t *testing.T) { { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_composerV2WithDisabledTriggerer(envName, network, subnetwork), + Config: testAccComposerEnvironment_composerV2WithDisabledTriggerer(envName, network, subnetwork, serviceAccount), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, @@ -576,6 +581,7 @@ func TestAccComposerEnvironment_UpdateComposerV2(t *testing.T) { envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" + serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -583,10 +589,10 @@ func TestAccComposerEnvironment_UpdateComposerV2(t *testing.T) { CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_composerV2(envName, network, subnetwork), + Config: testAccComposerEnvironment_composerV2(envName, network, subnetwork, serviceAccount), }, { - Config: testAccComposerEnvironment_updateComposerV2(envName, network, subnetwork), + Config: testAccComposerEnvironment_updateComposerV2(envName, network, subnetwork, serviceAccount), }, { ResourceName: "google_composer_environment.test", @@ -599,7 +605,7 @@ func TestAccComposerEnvironment_UpdateComposerV2(t *testing.T) { { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_updateComposerV2(envName, network, subnetwork), + Config: testAccComposerEnvironment_updateComposerV2(envName, network, subnetwork, serviceAccount), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, @@ -1492,8 +1498,8 @@ resource "google_composer_environment" "test" { } } storage_config { - bucket = google_storage_bucket.test.name - } + bucket = google_storage_bucket.test.name + } } // use a separate network to avoid conflicts with other tests running in parallel @@ -1614,7 +1620,7 @@ resource "google_composer_environment" "test" { private_environment_config { enable_private_endpoint = true enable_privately_used_public_ips = true - } + } } } @@ -1657,7 +1663,7 @@ resource "google_composer_environment" "test" { connection_type = "VPC_PEERING" enable_private_endpoint = true enable_privately_used_public_ips = true - } + } } } @@ -1964,8 +1970,21 @@ resource "google_compute_subnetwork" "test" { `, envName, network, subnetwork) } -func testAccComposerEnvironment_composerV2WithDisabledTriggerer(envName, network, subnetwork string) string { +func testAccComposerEnvironment_composerV2WithDisabledTriggerer(envName, network, subnetwork, serviceAccount string) string { return fmt.Sprintf(` +data "google_project" "project" {} + +resource "google_service_account" "test" { + account_id = "%s" + display_name = "Test Service Account for Composer Environment" +} + +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} + resource "google_composer_environment" "test" { name = "%s" region = "us-east1" @@ -1974,10 +1993,11 @@ resource "google_composer_environment" "test" { node_config { network = google_compute_network.test.self_link subnetwork = google_compute_subnetwork.test.self_link + service_account = google_service_account.test.name ip_allocation_policy { cluster_ipv4_cidr_block = "10.0.0.0/16" } - } + } software_config { image_version = "composer-2-airflow-2" @@ -2011,7 +2031,7 @@ resource "google_composer_environment" "test" { cloud_sql_ipv4_cidr_block = "10.3.224.0/20" } } - + depends_on = [google_project_iam_member.composer-worker] } resource "google_compute_network" "test" { @@ -2027,73 +2047,87 @@ resource "google_compute_subnetwork" "test" { private_ip_google_access = true } -`, envName, network, subnetwork) +`, serviceAccount, envName, network, subnetwork) } -func testAccComposerEnvironment_composerV2(envName, network, subnetwork string) string { +func testAccComposerEnvironment_composerV2(envName, network, subnetwork, serviceAccount string) string { return fmt.Sprintf(` +data "google_project" "project" {} + +resource "google_service_account" "test" { + account_id = "%s" + display_name = "Test Service Account for Composer Environment" +} + +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} + resource "google_composer_environment" "test" { name = "%s" region = "us-east1" - config { - node_config { - network = google_compute_network.test.self_link - subnetwork = google_compute_subnetwork.test.self_link - ip_allocation_policy { - cluster_ipv4_cidr_block = "10.0.0.0/16" - } + config { + node_config { + service_account = google_service_account.test.name + network = google_compute_network.test.self_link + subnetwork = google_compute_subnetwork.test.self_link + ip_allocation_policy { + cluster_ipv4_cidr_block = "10.0.0.0/16" } + } - software_config { - image_version = "composer-2-airflow-2" - cloud_data_lineage_integration { - enabled = true - } + software_config { + image_version = "composer-2-airflow-2" + cloud_data_lineage_integration { + enabled = true } + } - workloads_config { - scheduler { - cpu = 1.25 - memory_gb = 2.5 - storage_gb = 5.4 - count = 2 - } - web_server { - cpu = 1.75 - memory_gb = 3.0 - storage_gb = 4.4 - } - worker { - cpu = 0.5 - memory_gb = 2.0 - storage_gb = 3.4 - min_count = 2 - max_count = 5 - } - triggerer { - cpu = 0.5 - memory_gb = 2.0 - count = 1 - } + workloads_config { + scheduler { + cpu = 1.25 + memory_gb = 2.5 + storage_gb = 5.4 + count = 2 } - database_config { - zone = "us-east1-c" + web_server { + cpu = 1.75 + memory_gb = 3.0 + storage_gb = 4.4 } - environment_size = "ENVIRONMENT_SIZE_MEDIUM" - data_retention_config { - task_logs_retention_config { - storage_mode = "CLOUD_LOGGING_ONLY" - } + worker { + cpu = 0.5 + memory_gb = 2.0 + storage_gb = 3.4 + min_count = 2 + max_count = 5 } - private_environment_config { - enable_private_endpoint = true - cloud_composer_network_ipv4_cidr_block = "10.3.192.0/24" - master_ipv4_cidr_block = "172.16.194.0/23" - cloud_sql_ipv4_cidr_block = "10.3.224.0/20" + triggerer { + cpu = 0.5 + memory_gb = 2.0 + count = 1 } } - + database_config { + zone = "us-east1-c" + } + environment_size = "ENVIRONMENT_SIZE_MEDIUM" + data_retention_config { + task_logs_retention_config { + storage_mode = "CLOUD_LOGGING_ONLY" + } + } + private_environment_config { + enable_private_endpoint = true + cloud_composer_network_ipv4_cidr_block = "10.3.192.0/24" + master_ipv4_cidr_block = "172.16.194.0/23" + cloud_sql_ipv4_cidr_block = "10.3.224.0/20" + } + } + depends_on = [google_project_iam_member.composer-worker] } resource "google_compute_network" "test" { @@ -2109,25 +2143,40 @@ resource "google_compute_subnetwork" "test" { private_ip_google_access = true } -`, envName, network, subnetwork) +`, serviceAccount, envName, network, subnetwork) } -func testAccComposerEnvironment_composerOldVersion(envName, network, subnetwork string) string { +func testAccComposerEnvironment_composerOldVersion(envName, network, subnetwork, serviceAccount string) string { return fmt.Sprintf(` +data "google_project" "project" {} + +resource "google_service_account" "test" { + account_id = "%s" + display_name = "Test Service Account for Composer Environment" +} + +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} + resource "google_composer_environment" "test" { name = "%s" region = "us-east1" - config { - node_config { - network = google_compute_network.test.self_link - subnetwork = google_compute_subnetwork.test.self_link - } + config { + node_config { + network = google_compute_network.test.self_link + subnetwork = google_compute_subnetwork.test.self_link + service_account = google_service_account.test.name + } - software_config { - image_version = "composer-2.10.0-airflow-2.10.2" - } + software_config { + image_version = "composer-2.10.0-airflow-2.10.2" } + } + depends_on = [google_project_iam_member.composer-worker] } resource "google_compute_network" "test" { @@ -2143,25 +2192,40 @@ resource "google_compute_subnetwork" "test" { private_ip_google_access = true } -`, envName, network, subnetwork) +`, serviceAccount, envName, network, subnetwork) } -func testAccComposerEnvironment_composerNewVersion(envName, network, subnetwork string) string { +func testAccComposerEnvironment_composerNewVersion(envName, network, subnetwork, serviceAccount string) string { return fmt.Sprintf(` +data "google_project" "project" {} + +resource "google_service_account" "test" { + account_id = "%s" + display_name = "Test Service Account for Composer Environment" +} + +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} + resource "google_composer_environment" "test" { name = "%s" region = "us-east1" - config { - node_config { - network = google_compute_network.test.self_link - subnetwork = google_compute_subnetwork.test.self_link - } + config { + node_config { + network = google_compute_network.test.self_link + subnetwork = google_compute_subnetwork.test.self_link + service_account = google_service_account.test.name + } - software_config { - image_version = "composer-2.10.1-airflow-2.10.2" - } + software_config { + image_version = "composer-2.10.1-airflow-2.10.2" } + } + depends_on = [google_project_iam_member.composer-worker] } resource "google_compute_network" "test" { @@ -2177,54 +2241,69 @@ resource "google_compute_subnetwork" "test" { private_ip_google_access = true } -`, envName, network, subnetwork) +`, serviceAccount, envName, network, subnetwork) } -func testAccComposerEnvironment_composerV2HighResilience(envName, network, subnetwork string) string { +func testAccComposerEnvironment_composerV2HighResilience(envName, network, subnetwork, serviceAccount string) string { return fmt.Sprintf(` +data "google_project" "project" {} + +resource "google_service_account" "test" { + account_id = "%s" + display_name = "Test Service Account for Composer Environment" +} + +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} + resource "google_composer_environment" "test" { name = "%s" region = "us-east1" - config { - node_config { - network = google_compute_network.test.self_link - subnetwork = google_compute_subnetwork.test.self_link - } + config { + node_config { + network = google_compute_network.test.self_link + subnetwork = google_compute_subnetwork.test.self_link + service_account = google_service_account.test.name + } - software_config { - image_version = "composer-2-airflow-2" - } + software_config { + image_version = "composer-2-airflow-2" + } - workloads_config { - scheduler { - cpu = 1.25 - memory_gb = 2.5 - storage_gb = 5.4 - count = 2 - } - web_server { - cpu = 1.75 - memory_gb = 3.0 - storage_gb = 4.4 - } - worker { - cpu = 0.5 - memory_gb = 2.0 - storage_gb = 3.4 - min_count = 2 - max_count = 5 - } - } - environment_size = "ENVIRONMENT_SIZE_MEDIUM" - resilience_mode = "HIGH_RESILIENCE" - private_environment_config { - enable_private_endpoint = true - cloud_composer_network_ipv4_cidr_block = "10.3.192.0/24" - master_ipv4_cidr_block = "172.16.194.0/23" - cloud_sql_ipv4_cidr_block = "10.3.224.0/20" - } - } + workloads_config { + scheduler { + cpu = 1.25 + memory_gb = 2.5 + storage_gb = 5.4 + count = 2 + } + web_server { + cpu = 1.75 + memory_gb = 3.0 + storage_gb = 4.4 + } + worker { + cpu = 0.5 + memory_gb = 2.0 + storage_gb = 3.4 + min_count = 2 + max_count = 5 + } + } + environment_size = "ENVIRONMENT_SIZE_MEDIUM" + resilience_mode = "HIGH_RESILIENCE" + private_environment_config { + enable_private_endpoint = true + cloud_composer_network_ipv4_cidr_block = "10.3.192.0/24" + master_ipv4_cidr_block = "172.16.194.0/23" + cloud_sql_ipv4_cidr_block = "10.3.224.0/20" + } + } + depends_on = [google_project_iam_member.composer-worker] } resource "google_compute_network" "test" { @@ -2240,7 +2319,7 @@ resource "google_compute_subnetwork" "test" { private_ip_google_access = true } -`, envName, network, subnetwork) +`, serviceAccount, envName, network, subnetwork) } func testAccComposerEnvironment_composerV2PrivateServiceConnect(envName, network, subnetwork string) string { @@ -2249,19 +2328,19 @@ resource "google_composer_environment" "test" { name = "%s" region = "us-central1" - config { - node_config { - network = google_compute_network.test.self_link - subnetwork = google_compute_subnetwork.test.self_link - } + config { + node_config { + network = google_compute_network.test.self_link + subnetwork = google_compute_subnetwork.test.self_link + } - software_config { - image_version = "composer-2-airflow-2" - } + software_config { + image_version = "composer-2-airflow-2" + } private_environment_config { - cloud_composer_connection_subnetwork = google_compute_subnetwork.test.self_link - } + cloud_composer_connection_subnetwork = google_compute_subnetwork.test.self_link } + } } @@ -2325,51 +2404,66 @@ resource "google_compute_subnetwork" "test" { `, envName, compVersion, airflowVersion, network, subnetwork) } -func testAccComposerEnvironment_updateComposerV2StandardResilience(envName, network, subnetwork string) string { +func testAccComposerEnvironment_updateComposerV2StandardResilience(envName, network, subnetwork, serviceAccount string) string { return fmt.Sprintf(` +data "google_project" "project" {} + +resource "google_service_account" "test" { + account_id = "%s" + display_name = "Test Service Account for Composer Environment" +} + +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} + resource "google_composer_environment" "test" { name = "%s" region = "us-east1" - config { - node_config { - network = google_compute_network.test.self_link - subnetwork = google_compute_subnetwork.test.self_link - } - - software_config { - image_version = "composer-2-airflow-2" - } - - workloads_config { - scheduler { - cpu = 1.25 - memory_gb = 2.5 - storage_gb = 5.4 - count = 2 - } - web_server { - cpu = 1.75 - memory_gb = 3.0 - storage_gb = 4.4 - } - worker { - cpu = 0.5 - memory_gb = 2.0 - storage_gb = 3.4 - min_count = 2 - max_count = 5 - } - } - environment_size = "ENVIRONMENT_SIZE_MEDIUM" - resilience_mode = "STANDARD_RESILIENCE" - private_environment_config { - enable_private_endpoint = true - cloud_composer_network_ipv4_cidr_block = "10.3.192.0/24" - master_ipv4_cidr_block = "172.16.194.0/23" - cloud_sql_ipv4_cidr_block = "10.3.224.0/20" - } + config { + node_config { + network = google_compute_network.test.self_link + subnetwork = google_compute_subnetwork.test.self_link + service_account = google_service_account.test.name + } + + software_config { + image_version = "composer-2-airflow-2" + } + + workloads_config { + scheduler { + cpu = 1.25 + memory_gb = 2.5 + storage_gb = 5.4 + count = 2 + } + web_server { + cpu = 1.75 + memory_gb = 3.0 + storage_gb = 4.4 + } + worker { + cpu = 0.5 + memory_gb = 2.0 + storage_gb = 3.4 + min_count = 2 + max_count = 5 + } } + environment_size = "ENVIRONMENT_SIZE_MEDIUM" + resilience_mode = "STANDARD_RESILIENCE" + private_environment_config { + enable_private_endpoint = true + cloud_composer_network_ipv4_cidr_block = "10.3.192.0/24" + master_ipv4_cidr_block = "172.16.194.0/23" + cloud_sql_ipv4_cidr_block = "10.3.224.0/20" + } + } + depends_on = [google_project_iam_member.composer-worker] } resource "google_compute_network" "test" { @@ -2385,7 +2479,7 @@ resource "google_compute_subnetwork" "test" { private_ip_google_access = true } -`, envName, network, subnetwork) +`, serviceAccount, envName, network, subnetwork) } func testAccComposerEnvironment_MasterAuthNetworksUpdate(compVersion, airflowVersion, envName, network, subnetwork string) string { @@ -2429,67 +2523,81 @@ resource "google_compute_subnetwork" "test" { `, envName, compVersion, airflowVersion, network, subnetwork) } -func testAccComposerEnvironment_updateComposerV2(name, network, subnetwork string) string { +func testAccComposerEnvironment_updateComposerV2(name, network, subnetwork, serviceAccount string) string { return fmt.Sprintf(` +data "google_project" "project" {} + +resource "google_service_account" "test" { + account_id = "%s" + display_name = "Test Service Account for Composer Environment" +} + +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} + resource "google_composer_environment" "test" { name = "%s" region = "us-east1" - config { - node_config { - network = google_compute_network.test.self_link - subnetwork = google_compute_subnetwork.test.self_link - ip_allocation_policy { - cluster_ipv4_cidr_block = "10.0.0.0/16" - } + config { + node_config { + network = google_compute_network.test.self_link + subnetwork = google_compute_subnetwork.test.self_link + service_account = google_service_account.test.name + ip_allocation_policy { + cluster_ipv4_cidr_block = "10.0.0.0/16" } + } - software_config { - image_version = "composer-2-airflow-2" - cloud_data_lineage_integration { - enabled = false - } + software_config { + image_version = "composer-2-airflow-2" + cloud_data_lineage_integration { + enabled = false } + } - workloads_config { - scheduler { - cpu = 2.25 - memory_gb = 3.5 - storage_gb = 6.4 - count = 3 - } - web_server { - cpu = 2.75 - memory_gb = 4.0 - storage_gb = 5.4 - } - worker { - cpu = 1.5 - memory_gb = 3.0 - storage_gb = 4.4 - min_count = 3 - max_count = 6 - } - triggerer { - cpu = 0.75 - memory_gb = 2 - count = 1 - } + workloads_config { + scheduler { + cpu = 2.25 + memory_gb = 3.5 + storage_gb = 6.4 + count = 3 } - environment_size = "ENVIRONMENT_SIZE_LARGE" - data_retention_config { - task_logs_retention_config { - storage_mode = "CLOUD_LOGGING_AND_CLOUD_STORAGE" - } + web_server { + cpu = 2.75 + memory_gb = 4.0 + storage_gb = 5.4 } - private_environment_config { - enable_private_endpoint = true - cloud_composer_network_ipv4_cidr_block = "10.3.192.0/24" - master_ipv4_cidr_block = "172.16.194.0/23" - cloud_sql_ipv4_cidr_block = "10.3.224.0/20" + worker { + cpu = 1.5 + memory_gb = 3.0 + storage_gb = 4.4 + min_count = 3 + max_count = 6 + } + triggerer { + cpu = 0.75 + memory_gb = 2 + count = 1 } } - + environment_size = "ENVIRONMENT_SIZE_LARGE" + data_retention_config { + task_logs_retention_config { + storage_mode = "CLOUD_LOGGING_AND_CLOUD_STORAGE" + } + } + private_environment_config { + enable_private_endpoint = true + cloud_composer_network_ipv4_cidr_block = "10.3.192.0/24" + master_ipv4_cidr_block = "172.16.194.0/23" + cloud_sql_ipv4_cidr_block = "10.3.224.0/20" + } + } + depends_on = [google_project_iam_member.composer-worker] } resource "google_compute_network" "test" { @@ -2504,7 +2612,7 @@ resource "google_compute_subnetwork" "test" { network = google_compute_network.test.self_link private_ip_google_access = true } -`, name, network, subnetwork) +`, serviceAccount, name, network, subnetwork) } func testAccComposer2Environment_nodeCfg(environment, network, subnetwork, serviceAccount string) string { From ebd5b2c24452fcdb03aee89a7e7ee0357e707161 Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Mon, 19 May 2025 13:39:01 -0700 Subject: [PATCH 182/884] address notebooks_instance flaky tests (#14004) --- mmv1/products/notebooks/Instance.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/mmv1/products/notebooks/Instance.yaml b/mmv1/products/notebooks/Instance.yaml index e5f82d4a64bb..c412d9f3c9f6 100644 --- a/mmv1/products/notebooks/Instance.yaml +++ b/mmv1/products/notebooks/Instance.yaml @@ -70,6 +70,8 @@ examples: region_override: 'us-west1-a' vars: instance_name: 'notebooks-instance' + ignore_read_extra: + - 'update_time' - name: 'notebook_instance_basic_stopped' primary_resource_id: 'instance' primary_resource_name: 'fmt.Sprintf("tf-test-notebooks-instance%s", context["random_suffix"])' @@ -78,6 +80,7 @@ examples: instance_name: 'notebooks-instance' ignore_read_extra: - 'desired_state' + skip_test: https://github.com/hashicorp/terraform-provider-google/issues/17593#issuecomment-2888583933 - name: 'notebook_instance_basic_container' primary_resource_id: 'instance' primary_resource_name: 'fmt.Sprintf("tf-test-notebooks-instance%s", context["random_suffix"])' From 7c6e4126c05aa23d58ae156aa4b24183a1d35ca7 Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Mon, 19 May 2025 13:40:01 -0700 Subject: [PATCH 183/884] fix flaky test TestAccComputeInstanceSettings_update (#14008) --- .../compute/resource_compute_instance_settings_test.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_settings_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_instance_settings_test.go index b172d6e00f44..58d1a7abb5f9 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_settings_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_settings_test.go @@ -55,7 +55,7 @@ func testAccComputeInstanceSettings_basic(context map[string]interface{}) string return acctest.Nprintf(` resource "google_compute_instance_settings" "gce_instance_settings" { - zone = "us-east7-b" + zone = "us-east5-c" metadata { items = { foo = "baz" @@ -70,7 +70,7 @@ func testAccComputeInstanceSettings_update(context map[string]interface{}) strin return acctest.Nprintf(` resource "google_compute_instance_settings" "gce_instance_settings" { - zone = "us-east7-b" + zone = "us-east5-c" metadata { items = { foo = "bar" @@ -86,7 +86,7 @@ func testAccComputeInstanceSettings_delete(context map[string]interface{}) strin return acctest.Nprintf(` resource "google_compute_instance_settings" "gce_instance_settings" { - zone = "us-east7-b" + zone = "us-east5-c" metadata { items = { baz = "qux" From 968e68a6a0df988ae06e671225858a6ce68e20d4 Mon Sep 17 00:00:00 2001 From: Chris Hawk Date: Mon, 19 May 2025 14:16:48 -0700 Subject: [PATCH 184/884] Disable siteverification domain test (#14033) --- .../resource_site_verification_web_resource_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/services/siteverification/resource_site_verification_web_resource_test.go b/mmv1/third_party/terraform/services/siteverification/resource_site_verification_web_resource_test.go index bd11ab324023..494ba4868d44 100644 --- a/mmv1/third_party/terraform/services/siteverification/resource_site_verification_web_resource_test.go +++ b/mmv1/third_party/terraform/services/siteverification/resource_site_verification_web_resource_test.go @@ -15,7 +15,7 @@ import ( func TestAccSiteVerificationWebResource_siteVerificationDomain(t *testing.T) { // This test requires manual project configuration. - acctest.SkipIfVcr(t) + t.Skip() // This test needs to be able to create DNS records that are publicly // resolvable. To run, you'll need a registered domain with a GCP managed zone From 8ef45b91c6f00365308460535919efe8441cd067 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Mon, 19 May 2025 15:18:52 -0700 Subject: [PATCH 185/884] tgc-revival: copy compute_instance_helper.go for cai2hcl (#14028) --- mmv1/provider/terraform_tgc_next.go | 3 +- .../services/compute/compute_instance.go | 10 +- .../compute/compute_instance_helpers.go | 330 ------------------ .../compute/compute_instance_helpers_tgc.go | 194 ++++++++++ 4 files changed, 201 insertions(+), 336 deletions(-) delete mode 100644 mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance_helpers.go create mode 100644 mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance_helpers_tgc.go diff --git a/mmv1/provider/terraform_tgc_next.go b/mmv1/provider/terraform_tgc_next.go index f5d4d2df6963..b6ffabe85afb 100644 --- a/mmv1/provider/terraform_tgc_next.go +++ b/mmv1/provider/terraform_tgc_next.go @@ -127,7 +127,8 @@ func (tgc TerraformGoogleConversionNext) CompileCommonFiles(outputFolder string, "pkg/tfplan2cai/converters/services/compute/metadata.go": "third_party/terraform/services/compute/metadata.go.tmpl", // cai2hcl - "pkg/cai2hcl/converters/resource_converters.go": "templates/tgc_next/cai2hcl/resource_converters.go.tmpl", + "pkg/cai2hcl/converters/resource_converters.go": "templates/tgc_next/cai2hcl/resource_converters.go.tmpl", + "pkg/cai2hcl/converters/services/compute/compute_instance_helpers.go": "third_party/terraform/services/compute/compute_instance_helpers.go.tmpl", } templateData := NewTemplateData(outputFolder, tgc.TargetVersionName) diff --git a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance.go b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance.go index e5b74582c8a9..ff6b00ac9260 100644 --- a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance.go +++ b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance.go @@ -67,7 +67,7 @@ func (c *ComputeInstanceConverter) convertResourceData(asset caiasset.Asset) (*m hclData["network_performance_config"] = flattenNetworkPerformanceConfig(instance.NetworkPerformanceConfig) // Set the networks - networkInterfaces, _, _, err := flattenNetworkInterfaces(instance.NetworkInterfaces, project) + networkInterfaces, _, _, err := flattenNetworkInterfacesTgc(instance.NetworkInterfaces, project) if err != nil { return nil, err } @@ -78,7 +78,7 @@ func (c *ComputeInstanceConverter) convertResourceData(asset caiasset.Asset) (*m } hclData["labels"] = utils.RemoveTerraformAttributionLabel(instance.Labels) - hclData["service_account"] = flattenServiceAccounts(instance.ServiceAccounts) + hclData["service_account"] = flattenServiceAccountsTgc(instance.ServiceAccounts) hclData["resource_policies"] = instance.ResourcePolicies bootDisk, ads, scratchDisks := flattenDisks(instance.Disks, instance.Name) @@ -86,8 +86,8 @@ func (c *ComputeInstanceConverter) convertResourceData(asset caiasset.Asset) (*m hclData["attached_disk"] = ads hclData["scratch_disk"] = scratchDisks - hclData["scheduling"] = flattenScheduling(instance.Scheduling) - hclData["guest_accelerator"] = flattenGuestAccelerators(instance.GuestAccelerators) + hclData["scheduling"] = flattenSchedulingTgc(instance.Scheduling) + hclData["guest_accelerator"] = flattenGuestAcceleratorsTgc(instance.GuestAccelerators) hclData["shielded_instance_config"] = flattenShieldedVmConfig(instance.ShieldedInstanceConfig) hclData["enable_display"] = flattenEnableDisplay(instance.DisplayDevice) hclData["min_cpu_platform"] = instance.MinCpuPlatform @@ -102,7 +102,7 @@ func (c *ComputeInstanceConverter) convertResourceData(asset caiasset.Asset) (*m hclData["hostname"] = instance.Hostname hclData["confidential_instance_config"] = flattenConfidentialInstanceConfig(instance.ConfidentialInstanceConfig) hclData["advanced_machine_features"] = flattenAdvancedMachineFeatures(instance.AdvancedMachineFeatures) - hclData["reservation_affinity"] = flattenReservationAffinity(instance.ReservationAffinity) + hclData["reservation_affinity"] = flattenReservationAffinityTgc(instance.ReservationAffinity) hclData["key_revocation_action_type"] = instance.KeyRevocationActionType // TODO: convert details from the boot disk assets (separate disk assets) into initialize_params in cai2hcl? diff --git a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance_helpers.go b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance_helpers.go deleted file mode 100644 index e04a7ca608f9..000000000000 --- a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance_helpers.go +++ /dev/null @@ -1,330 +0,0 @@ -package compute - -import ( - "strconv" - "strings" - - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters/utils" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" - - compute "google.golang.org/api/compute/v0.beta" -) - -func flattenAliasIpRange(ranges []*compute.AliasIpRange) []map[string]interface{} { - rangesSchema := make([]map[string]interface{}, 0, len(ranges)) - for _, ipRange := range ranges { - rangesSchema = append(rangesSchema, map[string]interface{}{ - "ip_cidr_range": ipRange.IpCidrRange, - "subnetwork_range_name": ipRange.SubnetworkRangeName, - }) - } - return rangesSchema -} - -func flattenScheduling(resp *compute.Scheduling) []map[string]interface{} { - schedulingMap := make(map[string]interface{}, 0) - - if resp.InstanceTerminationAction != "" { - schedulingMap["instance_termination_action"] = resp.InstanceTerminationAction - } - - if resp.MinNodeCpus != 0 { - schedulingMap["min_node_cpus"] = resp.MinNodeCpus - } - - if resp.OnHostMaintenance != "MIGRATE" { - schedulingMap["on_host_maintenance"] = resp.OnHostMaintenance - } - - if resp.AutomaticRestart != nil && !*resp.AutomaticRestart { - schedulingMap["automatic_restart"] = *resp.AutomaticRestart - } - - if resp.Preemptible { - schedulingMap["preemptible"] = resp.Preemptible - } - - if resp.NodeAffinities != nil && len(resp.NodeAffinities) > 0 { - nodeAffinities := []map[string]interface{}{} - for _, na := range resp.NodeAffinities { - nodeAffinities = append(nodeAffinities, map[string]interface{}{ - "key": na.Key, - "operator": na.Operator, - "values": tpgresource.ConvertStringArrToInterface(na.Values), - }) - } - schedulingMap["node_affinities"] = nodeAffinities - } - - if resp.ProvisioningModel != "STANDARD" { - schedulingMap["provisioning_model"] = resp.ProvisioningModel - } - - if resp.AvailabilityDomain != 0 { - schedulingMap["availability_domain"] = resp.AvailabilityDomain - } - - if resp.MaxRunDuration != nil { - schedulingMap["max_run_duration"] = flattenComputeMaxRunDuration(resp.MaxRunDuration) - } - - if resp.OnInstanceStopAction != nil { - schedulingMap["on_instance_stop_action"] = flattenOnInstanceStopAction(resp.OnInstanceStopAction) - } - - if resp.HostErrorTimeoutSeconds != 0 { - schedulingMap["host_error_timeout_seconds"] = resp.HostErrorTimeoutSeconds - } - - if resp.MaintenanceInterval != "" { - schedulingMap["maintenance_interval"] = resp.MaintenanceInterval - } - - if resp.LocalSsdRecoveryTimeout != nil { - schedulingMap["local_ssd_recovery_timeout"] = flattenComputeLocalSsdRecoveryTimeout(resp.LocalSsdRecoveryTimeout) - } - - if len(schedulingMap) == 0 { - return nil - } - - return []map[string]interface{}{schedulingMap} -} - -func flattenComputeMaxRunDuration(v *compute.Duration) []interface{} { - if v == nil { - return nil - } - transformed := make(map[string]interface{}) - transformed["nanos"] = v.Nanos - transformed["seconds"] = v.Seconds - return []interface{}{transformed} -} - -func flattenOnInstanceStopAction(v *compute.SchedulingOnInstanceStopAction) []interface{} { - if v == nil { - return nil - } - transformed := make(map[string]interface{}) - transformed["discard_local_ssd"] = v.DiscardLocalSsd - return []interface{}{transformed} -} - -func flattenComputeLocalSsdRecoveryTimeout(v *compute.Duration) []interface{} { - if v == nil { - return nil - } - transformed := make(map[string]interface{}) - transformed["nanos"] = v.Nanos - transformed["seconds"] = v.Seconds - return []interface{}{transformed} -} - -func flattenAccessConfigs(accessConfigs []*compute.AccessConfig) ([]map[string]interface{}, string) { - flattened := make([]map[string]interface{}, len(accessConfigs)) - natIP := "" - for i, ac := range accessConfigs { - flattened[i] = map[string]interface{}{ - "nat_ip": ac.NatIP, - "network_tier": ac.NetworkTier, - } - if ac.SetPublicPtr { - flattened[i]["public_ptr_domain_name"] = ac.PublicPtrDomainName - } - if natIP == "" { - natIP = ac.NatIP - } - if ac.SecurityPolicy != "" { - flattened[i]["security_policy"] = ac.SecurityPolicy - } - } - return flattened, natIP -} - -func flattenIpv6AccessConfigs(ipv6AccessConfigs []*compute.AccessConfig) []map[string]interface{} { - flattened := make([]map[string]interface{}, len(ipv6AccessConfigs)) - for i, ac := range ipv6AccessConfigs { - flattened[i] = map[string]interface{}{ - "network_tier": ac.NetworkTier, - } - flattened[i]["public_ptr_domain_name"] = ac.PublicPtrDomainName - flattened[i]["external_ipv6"] = ac.ExternalIpv6 - flattened[i]["external_ipv6_prefix_length"] = strconv.FormatInt(ac.ExternalIpv6PrefixLength, 10) - flattened[i]["name"] = ac.Name - if ac.SecurityPolicy != "" { - flattened[i]["security_policy"] = ac.SecurityPolicy - } - } - return flattened -} - -func flattenNetworkInterfaces(networkInterfaces []*compute.NetworkInterface, project string) ([]map[string]interface{}, string, string, error) { - flattened := make([]map[string]interface{}, len(networkInterfaces)) - var internalIP, externalIP string - - for i, iface := range networkInterfaces { - var ac []map[string]interface{} - ac, externalIP = flattenAccessConfigs(iface.AccessConfigs) - - flattened[i] = map[string]interface{}{ - "network_ip": iface.NetworkIP, - "access_config": ac, - "alias_ip_range": flattenAliasIpRange(iface.AliasIpRanges), - "nic_type": iface.NicType, - "ipv6_access_config": flattenIpv6AccessConfigs(iface.Ipv6AccessConfigs), - "ipv6_address": iface.Ipv6Address, - } - - if !strings.HasSuffix(iface.Network, "/default") { - flattened[i]["network"] = tpgresource.ConvertSelfLinkToV1(iface.Network) - } - - if !strings.HasSuffix(iface.Subnetwork, "/default") { - flattened[i]["subnetwork"] = tpgresource.ConvertSelfLinkToV1(iface.Subnetwork) - } - - subnetProject := utils.ParseFieldValue(iface.Subnetwork, "projects") - if subnetProject != project { - flattened[i]["subnetwork_project"] = subnetProject - } - - if iface.StackType != "IPV4_ONLY" { - flattened[i]["stack_type"] = iface.StackType - } - - if iface.QueueCount != 0 { - flattened[i]["queue_count"] = iface.QueueCount - } - - if internalIP == "" { - internalIP = iface.NetworkIP - } - - if iface.NetworkAttachment != "" { - networkAttachment, err := tpgresource.GetRelativePath(iface.NetworkAttachment) - if err != nil { - return nil, "", "", err - } - flattened[i]["network_attachment"] = networkAttachment - } - - // the security_policy for a network_interface is found in one of its accessConfigs. - if len(iface.AccessConfigs) > 0 && iface.AccessConfigs[0].SecurityPolicy != "" { - flattened[i]["security_policy"] = iface.AccessConfigs[0].SecurityPolicy - } else if len(iface.Ipv6AccessConfigs) > 0 && iface.Ipv6AccessConfigs[0].SecurityPolicy != "" { - flattened[i]["security_policy"] = iface.Ipv6AccessConfigs[0].SecurityPolicy - } - } - return flattened, internalIP, externalIP, nil -} - -func flattenServiceAccounts(serviceAccounts []*compute.ServiceAccount) []map[string]interface{} { - result := make([]map[string]interface{}, len(serviceAccounts)) - for i, serviceAccount := range serviceAccounts { - result[i] = map[string]interface{}{ - "email": serviceAccount.Email, - "scopes": serviceAccount.Scopes, - } - } - return result -} - -func flattenGuestAccelerators(accelerators []*compute.AcceleratorConfig) []map[string]interface{} { - acceleratorsSchema := make([]map[string]interface{}, len(accelerators)) - for i, accelerator := range accelerators { - acceleratorsSchema[i] = map[string]interface{}{ - "count": accelerator.AcceleratorCount, - "type": accelerator.AcceleratorType, - } - } - return acceleratorsSchema -} - -func flattenConfidentialInstanceConfig(ConfidentialInstanceConfig *compute.ConfidentialInstanceConfig) []map[string]interface{} { - if ConfidentialInstanceConfig == nil { - return nil - } - - return []map[string]interface{}{{ - "enable_confidential_compute": ConfidentialInstanceConfig.EnableConfidentialCompute, - "confidential_instance_type": ConfidentialInstanceConfig.ConfidentialInstanceType, - }} -} - -func flattenAdvancedMachineFeatures(AdvancedMachineFeatures *compute.AdvancedMachineFeatures) []map[string]interface{} { - if AdvancedMachineFeatures == nil { - return nil - } - return []map[string]interface{}{{ - "enable_nested_virtualization": AdvancedMachineFeatures.EnableNestedVirtualization, - "threads_per_core": AdvancedMachineFeatures.ThreadsPerCore, - "turbo_mode": AdvancedMachineFeatures.TurboMode, - "visible_core_count": AdvancedMachineFeatures.VisibleCoreCount, - "performance_monitoring_unit": AdvancedMachineFeatures.PerformanceMonitoringUnit, - "enable_uefi_networking": AdvancedMachineFeatures.EnableUefiNetworking, - }} -} - -func flattenShieldedVmConfig(shieldedVmConfig *compute.ShieldedInstanceConfig) []map[string]bool { - if shieldedVmConfig == nil { - return nil - } - - shieldedInstanceConfig := map[string]bool{} - - if shieldedVmConfig.EnableSecureBoot { - shieldedInstanceConfig["enable_secure_boot"] = shieldedVmConfig.EnableSecureBoot - } - - if !shieldedVmConfig.EnableVtpm { - shieldedInstanceConfig["enable_vtpm"] = shieldedVmConfig.EnableVtpm - } - - if !shieldedVmConfig.EnableIntegrityMonitoring { - shieldedInstanceConfig["enable_integrity_monitoring"] = shieldedVmConfig.EnableIntegrityMonitoring - } - - if len(shieldedInstanceConfig) == 0 { - return nil - } - - return []map[string]bool{shieldedInstanceConfig} -} - -func flattenEnableDisplay(displayDevice *compute.DisplayDevice) interface{} { - if displayDevice == nil { - return nil - } - - return displayDevice.EnableDisplay -} - -func flattenReservationAffinity(affinity *compute.ReservationAffinity) []map[string]interface{} { - if affinity == nil { - return nil - } - - flattened := map[string]interface{}{ - "type": affinity.ConsumeReservationType, - } - - if affinity.ConsumeReservationType == "SPECIFIC_RESERVATION" { - flattened["specific_reservation"] = []map[string]interface{}{{ - "key": affinity.Key, - "values": affinity.Values, - }} - } - - return []map[string]interface{}{flattened} -} - -func flattenNetworkPerformanceConfig(c *compute.NetworkPerformanceConfig) []map[string]interface{} { - if c == nil { - return nil - } - return []map[string]interface{}{ - { - "total_egress_bandwidth_tier": c.TotalEgressBandwidthTier, - }, - } -} diff --git a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance_helpers_tgc.go b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance_helpers_tgc.go new file mode 100644 index 000000000000..1e66d37cd40b --- /dev/null +++ b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance_helpers_tgc.go @@ -0,0 +1,194 @@ +package compute + +import ( + "strings" + + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters/utils" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" + + compute "google.golang.org/api/compute/v0.beta" +) + +func flattenAliasIpRangeTgc(ranges []*compute.AliasIpRange) []map[string]interface{} { + rangesSchema := make([]map[string]interface{}, 0, len(ranges)) + for _, ipRange := range ranges { + rangesSchema = append(rangesSchema, map[string]interface{}{ + "ip_cidr_range": ipRange.IpCidrRange, + "subnetwork_range_name": ipRange.SubnetworkRangeName, + }) + } + return rangesSchema +} + +func flattenSchedulingTgc(resp *compute.Scheduling) []map[string]interface{} { + schedulingMap := make(map[string]interface{}, 0) + + // gracefulShutdown is not in the cai asset, so graceful_shutdown is skipped. + + if resp.InstanceTerminationAction != "" { + schedulingMap["instance_termination_action"] = resp.InstanceTerminationAction + } + + if resp.MinNodeCpus != 0 { + schedulingMap["min_node_cpus"] = resp.MinNodeCpus + } + + schedulingMap["on_host_maintenance"] = resp.OnHostMaintenance + + if resp.AutomaticRestart != nil && !*resp.AutomaticRestart { + schedulingMap["automatic_restart"] = *resp.AutomaticRestart + } + + if resp.Preemptible { + schedulingMap["preemptible"] = resp.Preemptible + } + + if resp.NodeAffinities != nil && len(resp.NodeAffinities) > 0 { + nodeAffinities := []map[string]interface{}{} + for _, na := range resp.NodeAffinities { + nodeAffinities = append(nodeAffinities, map[string]interface{}{ + "key": na.Key, + "operator": na.Operator, + "values": tpgresource.ConvertStringArrToInterface(na.Values), + }) + } + schedulingMap["node_affinities"] = nodeAffinities + } + + schedulingMap["provisioning_model"] = resp.ProvisioningModel + + if resp.AvailabilityDomain != 0 { + schedulingMap["availability_domain"] = resp.AvailabilityDomain + } + + if resp.MaxRunDuration != nil { + schedulingMap["max_run_duration"] = flattenComputeMaxRunDuration(resp.MaxRunDuration) + } + + if resp.OnInstanceStopAction != nil { + schedulingMap["on_instance_stop_action"] = flattenOnInstanceStopAction(resp.OnInstanceStopAction) + } + + if resp.HostErrorTimeoutSeconds != 0 { + schedulingMap["host_error_timeout_seconds"] = resp.HostErrorTimeoutSeconds + } + + if resp.MaintenanceInterval != "" { + schedulingMap["maintenance_interval"] = resp.MaintenanceInterval + } + + if resp.LocalSsdRecoveryTimeout != nil { + schedulingMap["local_ssd_recovery_timeout"] = flattenComputeLocalSsdRecoveryTimeout(resp.LocalSsdRecoveryTimeout) + } + + if len(schedulingMap) == 0 { + return nil + } + + return []map[string]interface{}{schedulingMap} +} + +func flattenNetworkInterfacesTgc(networkInterfaces []*compute.NetworkInterface, project string) ([]map[string]interface{}, string, string, error) { + flattened := make([]map[string]interface{}, len(networkInterfaces)) + var internalIP, externalIP string + + for i, iface := range networkInterfaces { + var ac []map[string]interface{} + ac, externalIP = flattenAccessConfigs(iface.AccessConfigs) + + flattened[i] = map[string]interface{}{ + "network_ip": iface.NetworkIP, + "access_config": ac, + "alias_ip_range": flattenAliasIpRangeTgc(iface.AliasIpRanges), + "nic_type": iface.NicType, + "stack_type": iface.StackType, + "ipv6_access_config": flattenIpv6AccessConfigs(iface.Ipv6AccessConfigs), + "ipv6_address": iface.Ipv6Address, + "network": tpgresource.ConvertSelfLinkToV1(iface.Network), + "subnetwork": tpgresource.ConvertSelfLinkToV1(iface.Subnetwork), + "internal_ipv6_prefix_length": iface.InternalIpv6PrefixLength, + } + + subnetProject := utils.ParseFieldValue(iface.Subnetwork, "projects") + if subnetProject != project { + flattened[i]["subnetwork_project"] = subnetProject + } + + // The field name is computed, no it is not converted. + + if iface.StackType != "IPV4_ONLY" { + flattened[i]["stack_type"] = iface.StackType + } + + if iface.QueueCount != 0 { + flattened[i]["queue_count"] = iface.QueueCount + } + + if internalIP == "" { + internalIP = iface.NetworkIP + } + + if iface.NetworkAttachment != "" { + networkAttachment, err := tpgresource.GetRelativePath(iface.NetworkAttachment) + if err != nil { + return nil, "", "", err + } + flattened[i]["network_attachment"] = networkAttachment + } + + // the security_policy for a network_interface is found in one of its accessConfigs. + if len(iface.AccessConfigs) > 0 && iface.AccessConfigs[0].SecurityPolicy != "" { + flattened[i]["security_policy"] = iface.AccessConfigs[0].SecurityPolicy + } else if len(iface.Ipv6AccessConfigs) > 0 && iface.Ipv6AccessConfigs[0].SecurityPolicy != "" { + flattened[i]["security_policy"] = iface.Ipv6AccessConfigs[0].SecurityPolicy + } + } + return flattened, internalIP, externalIP, nil +} + +func flattenServiceAccountsTgc(serviceAccounts []*compute.ServiceAccount) []map[string]interface{} { + result := make([]map[string]interface{}, len(serviceAccounts)) + for i, serviceAccount := range serviceAccounts { + scopes := serviceAccount.Scopes + if len(scopes) == 0 { + scopes = []string{} + } + result[i] = map[string]interface{}{ + "email": serviceAccount.Email, + "scopes": scopes, + } + } + return result +} + +func flattenGuestAcceleratorsTgc(accelerators []*compute.AcceleratorConfig) []map[string]interface{} { + acceleratorsSchema := make([]map[string]interface{}, len(accelerators)) + for i, accelerator := range accelerators { + acceleratorsSchema[i] = map[string]interface{}{ + "count": accelerator.AcceleratorCount, + "type": tpgresource.GetResourceNameFromSelfLink(accelerator.AcceleratorType), + } + } + return acceleratorsSchema +} + +func flattenReservationAffinityTgc(affinity *compute.ReservationAffinity) []map[string]interface{} { + if affinity == nil { + return nil + } + + // The values of ConsumeReservationType in cai assets are NO_ALLOCATION, SPECIFIC_ALLOCATION, ANY_ALLOCATION + crt := strings.ReplaceAll(affinity.ConsumeReservationType, "_ALLOCATION", "_RESERVATION") + flattened := map[string]interface{}{ + "type": crt, + } + + if crt == "SPECIFIC_RESERVATION" { + flattened["specific_reservation"] = []map[string]interface{}{{ + "key": affinity.Key, + "values": affinity.Values, + }} + } + + return []map[string]interface{}{flattened} +} From c4ba2b4690518b0fbdb23f93c071c523020bf811 Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Mon, 19 May 2025 15:49:44 -0700 Subject: [PATCH 186/884] make `gen_app_builder_settings` o+c in `google_dialogflow_cx_agent` (#14014) --- mmv1/products/dialogflowcx/Agent.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/mmv1/products/dialogflowcx/Agent.yaml b/mmv1/products/dialogflowcx/Agent.yaml index cfdf2c37ba39..7ae0704d3b32 100644 --- a/mmv1/products/dialogflowcx/Agent.yaml +++ b/mmv1/products/dialogflowcx/Agent.yaml @@ -271,6 +271,7 @@ properties: type: NestedObject description: | Gen App Builder-related agent-level settings. + default_from_api: true properties: - name: 'engine' type: String From 788615192d0274d0788a646fe7534a8779e7235a Mon Sep 17 00:00:00 2001 From: Ilia Lazebnik Date: Tue, 20 May 2025 02:59:34 -0400 Subject: [PATCH 187/884] container: allow updating stack_type in place (#14001) Signed-off-by: drfaust92 --- .../resource_container_cluster.go.tmpl | 19 +++++++++++++++++- .../resource_container_cluster_test.go.tmpl | 20 +++++++++++++++---- 2 files changed, 34 insertions(+), 5 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl index b7eedd22d084..18a8149f6005 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl @@ -1732,7 +1732,6 @@ func ResourceContainerCluster() *schema.Resource { "stack_type": { Type: schema.TypeString, Optional: true, - ForceNew: true, Default: "IPV4", ValidateFunc: validation.StringInSlice([]string{"IPV4", "IPV4_IPV6"}, false), Description: `The IP Stack type of the cluster. Choose between IPV4 and IPV4_IPV6. Default type is IPV4 Only if not set`, @@ -3423,6 +3422,24 @@ func resourceContainerClusterUpdate(d *schema.ResourceData, meta interface{}) er log.Printf("[INFO] GKE cluster %s's default enable private nodes has been updated to %v", d.Id(), enabled) } + if d.HasChange("ip_allocation_policy.0.stack_type") { + if stackType, ok := d.GetOk("ip_allocation_policy.0.stack_type"); ok { + req := &container.UpdateClusterRequest{ + Update: &container.ClusterUpdate{ + DesiredStackType: stackType.(string), + }, + } + + updateF := updateFunc(req, "updating GKE cluster stack type") + // Call update serially. + if err := transport_tpg.LockedCall(lockKey, updateF); err != nil { + return err + } + + log.Printf("[INFO] GKE cluster %s stack type has been updated", d.Id()) + } + } + if d.HasChange("addons_config") { if ac, ok := d.GetOk("addons_config"); ok { req := &container.UpdateClusterRequest{ diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl index e841901d6ad7..8ae0024dc368 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl @@ -3409,7 +3409,7 @@ func TestAccContainerCluster_stackType_withDualStack(t *testing.T) { CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccContainerCluster_stackType_withDualStack(containerNetName, clusterName), + Config: testAccContainerCluster_stackType_withDualStack(containerNetName, clusterName, "IPV4_IPV6"), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr(resourceName, "ip_allocation_policy.0.stack_type", "IPV4_IPV6"), ), @@ -3420,6 +3420,18 @@ func TestAccContainerCluster_stackType_withDualStack(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"deletion_protection"}, }, + { + Config: testAccContainerCluster_stackType_withDualStack(containerNetName, clusterName, "IPV4"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "ip_allocation_policy.0.stack_type", "IPV4"), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, }, }) } @@ -10111,7 +10123,7 @@ resource "google_container_cluster" "with_ip_allocation_policy" { `, containerNetName, clusterName) } -func testAccContainerCluster_stackType_withDualStack(containerNetName string, clusterName string) string { +func testAccContainerCluster_stackType_withDualStack(containerNetName, clusterName, stack string) string { return fmt.Sprintf(` resource "google_compute_network" "container_network" { name = "%s" @@ -10141,11 +10153,11 @@ resource "google_container_cluster" "with_stack_type" { ip_allocation_policy { cluster_ipv4_cidr_block = "10.0.0.0/16" services_ipv4_cidr_block = "10.1.0.0/16" - stack_type = "IPV4_IPV6" + stack_type = "%s" } deletion_protection = false } -`, containerNetName, clusterName) +`, containerNetName, clusterName, stack) } func testAccContainerCluster_stackType_withSingleStack(containerNetName string, clusterName string) string { From 3f409b2ed2b39988c82f5553c4c30452e03306a0 Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Tue, 20 May 2025 17:10:08 +0200 Subject: [PATCH 188/884] feat: implementation of spanner `instance_type` to be able to provision a `FREE_INSTANCE` via terraform (#13851) Co-authored-by: Sam Levenick --- mmv1/products/spanner/Instance.yaml | 43 ++++++++++--- .../encoders/spanner_instance.go.tmpl | 14 ++++- .../spanner/resource_spanner_instance_test.go | 61 ++++++++++++++++++- 3 files changed, 107 insertions(+), 11 deletions(-) diff --git a/mmv1/products/spanner/Instance.yaml b/mmv1/products/spanner/Instance.yaml index 63520a1f091c..5b453628b69b 100644 --- a/mmv1/products/spanner/Instance.yaml +++ b/mmv1/products/spanner/Instance.yaml @@ -108,24 +108,32 @@ properties: - name: 'num_nodes' type: Integer description: | - The number of nodes allocated to this instance. Exactly one of either node_count or processing_units - must be present in terraform. + The number of nodes allocated to this instance. Exactly one of either num_nodes, processing_units or + autoscaling_config must be present in terraform except when instance_type = FREE_INSTANCE. api_name: nodeCount default_from_api: true - exactly_one_of: + at_least_one_of: - 'num_nodes' - 'processing_units' - 'autoscaling_config' + - 'instance_type' + conflicts: + - 'processing_units' + - 'autoscaling_config' - name: 'processingUnits' type: Integer description: | - The number of processing units allocated to this instance. Exactly one of processing_units - or node_count must be present in terraform. + The number of processing units allocated to this instance. Exactly one of either num_nodes, + processing_units or autoscaling_config must be present in terraform except when instance_type = FREE_INSTANCE. default_from_api: true - exactly_one_of: + at_least_one_of: - 'num_nodes' - 'processing_units' - 'autoscaling_config' + - 'instance_type' + conflicts: + - 'num_nodes' + - 'autoscaling_config' - name: 'labels' type: KeyValueLabels description: | @@ -143,13 +151,19 @@ properties: type: NestedObject description: | The autoscaling configuration. Autoscaling is enabled if this field is set. + Exactly one of either num_nodes, processing_units or autoscaling_config must be + present in terraform except when instance_type = FREE_INSTANCE. When autoscaling is enabled, num_nodes and processing_units are treated as, OUTPUT_ONLY fields and reflect the current compute capacity allocated to the instance. - exactly_one_of: + at_least_one_of: - 'num_nodes' - 'processing_units' - 'autoscaling_config' + - 'instance_type' + conflicts: + - 'num_nodes' + - 'processing_units' properties: - name: 'autoscalingLimits' type: NestedObject @@ -261,6 +275,21 @@ properties: - 'STANDARD' - 'ENTERPRISE' - 'ENTERPRISE_PLUS' + - name: 'instanceType' + type: Enum + description: | + The type of this instance. The type can be used to distinguish product variants, that can affect aspects like: + usage restrictions, quotas and billing. Currently this is used to distinguish FREE_INSTANCE vs PROVISIONED instances. + When configured as FREE_INSTANCE, the field `edition` should not be configured. + default_from_api: true + at_least_one_of: + - 'num_nodes' + - 'processing_units' + - 'autoscaling_config' + - 'instance_type' + enum_values: + - 'PROVISIONED' + - 'FREE_INSTANCE' - name: 'defaultBackupScheduleType' type: Enum description: | diff --git a/mmv1/templates/terraform/encoders/spanner_instance.go.tmpl b/mmv1/templates/terraform/encoders/spanner_instance.go.tmpl index fdda9c430a47..25d1463ee8c9 100644 --- a/mmv1/templates/terraform/encoders/spanner_instance.go.tmpl +++ b/mmv1/templates/terraform/encoders/spanner_instance.go.tmpl @@ -1,7 +1,15 @@ -// Temp Logic to accommodate autoscaling_config, processing_units and num_nodes -if obj["processingUnits"] == nil && obj["nodeCount"] == nil && obj["autoscalingConfig"] == nil { - obj["nodeCount"] = 1 +if obj["instanceType"] == "FREE_INSTANCE" { + // when provisioning a FREE_INSTANCE, the following fields cannot be specified + if obj["nodeCount"] != nil || obj["processingUnits"] != nil || obj["autoscalingConfig"] != nil { + return nil, fmt.Errorf("`num_nodes`, `processing_units`, and `autoscaling_config` cannot be specified when instance_type is FREE_INSTANCE") + } +} else { + // Temp Logic to accommodate autoscaling_config, processing_units and num_nodes + if obj["processingUnits"] == nil && obj["nodeCount"] == nil && obj["autoscalingConfig"] == nil && obj["instanceType"] != "FREE_INSTANCE" { + obj["nodeCount"] = 1 + } } + newObj := make(map[string]interface{}) newObj["instance"] = obj if obj["name"] == nil { diff --git a/mmv1/third_party/terraform/services/spanner/resource_spanner_instance_test.go b/mmv1/third_party/terraform/services/spanner/resource_spanner_instance_test.go index 854b5ca764fb..5277a16f4674 100644 --- a/mmv1/third_party/terraform/services/spanner/resource_spanner_instance_test.go +++ b/mmv1/third_party/terraform/services/spanner/resource_spanner_instance_test.go @@ -84,7 +84,7 @@ func TestAccSpannerInstance_noNodeCountSpecified(t *testing.T) { Steps: []resource.TestStep{ { Config: testAccSpannerInstance_noNodeCountSpecified(idName), - ExpectError: regexp.MustCompile(".*one of `autoscaling_config,num_nodes,processing_units`\nmust be specified.*"), + ExpectError: regexp.MustCompile(".*one of\n`autoscaling_config,instance_type,num_nodes,processing_units` must be\nspecified.*"), }, }, }) @@ -497,6 +497,41 @@ func TestAccSpannerInstance_spannerInstanceWithAutoscaling(t *testing.T) { }) } +func TestAccSpannerInstance_freeInstanceBasicUpdate(t *testing.T) { + displayName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckSpannerInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccSpannerInstance_freeInstanceBasic(displayName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("google_spanner_instance.main", "state"), + ), + }, + { + ResourceName: "google_spanner_instance.main", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testAccSpannerInstance_freeInstanceBasicUpdate(displayName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("google_spanner_instance.main", "state"), + ), + }, + { + ResourceName: "google_spanner_instance.main", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + }, + }) +} + func testAccSpannerInstance_basic(name string) string { return fmt.Sprintf(` resource "google_spanner_instance" "basic" { @@ -793,3 +828,27 @@ resource "google_spanner_instance" "example" { } `, context) } + +func testAccSpannerInstance_freeInstanceBasic(name string) string { + return fmt.Sprintf(` +resource "google_spanner_instance" "main" { + name = "%s" + config = "regional-europe-west1" + display_name = "%s" + instance_type = "FREE_INSTANCE" +} +`, name, name) +} + +func testAccSpannerInstance_freeInstanceBasicUpdate(name string) string { + return fmt.Sprintf(` +resource "google_spanner_instance" "main" { + name = "%s" + config = "nam-eur-asia3" + display_name = "%s" + edition = "ENTERPRISE_PLUS" + instance_type = "PROVISIONED" + num_nodes = 1 +} +`, name, name) +} From 3afb982ad2493462e741feb3958cc21932dc9a14 Mon Sep 17 00:00:00 2001 From: Shrishty Chandra <3104562+shrishty@users.noreply.github.com> Date: Tue, 20 May 2025 21:29:54 +0530 Subject: [PATCH 189/884] Update go.mod (#14036) Co-authored-by: Shrishty Chandra --- mmv1/third_party/terraform/go.mod | 27 +- mmv1/third_party/terraform/go.sum | 557 ++---------------------------- 2 files changed, 38 insertions(+), 546 deletions(-) diff --git a/mmv1/third_party/terraform/go.mod b/mmv1/third_party/terraform/go.mod index 9001ab5e21bb..c0b3994665ce 100644 --- a/mmv1/third_party/terraform/go.mod +++ b/mmv1/third_party/terraform/go.mod @@ -30,19 +30,19 @@ require ( github.com/stretchr/testify v1.10.0 go4.org/netipx v0.0.0-20231129151722-fdeea329fbba golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 - golang.org/x/net v0.39.0 - golang.org/x/oauth2 v0.29.0 - google.golang.org/api v0.229.0 - google.golang.org/genproto/googleapis/rpc v0.0.0-20250414145226-207652e42e2e - google.golang.org/grpc v1.71.1 + golang.org/x/net v0.40.0 + golang.org/x/oauth2 v0.30.0 + google.golang.org/api v0.233.0 + google.golang.org/genproto/googleapis/rpc v0.0.0-20250505200425-f936aa4a68b2 + google.golang.org/grpc v1.72.0 google.golang.org/protobuf v1.36.6 ) require ( bitbucket.org/creachadair/stringset v0.0.8 // indirect - cel.dev/expr v0.19.2 // indirect + cel.dev/expr v0.20.0 // indirect cloud.google.com/go v0.120.0 // indirect - cloud.google.com/go/auth v0.16.0 // indirect + cloud.google.com/go/auth v0.16.1 // indirect cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect cloud.google.com/go/compute/metadata v0.6.0 // indirect cloud.google.com/go/iam v1.5.0 // indirect @@ -60,10 +60,10 @@ require ( github.com/fatih/color v1.16.0 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect github.com/gammazero/deque v0.0.0-20180920172122-f6adf94963e4 // indirect + github.com/go-jose/go-jose/v4 v4.0.4 // indirect github.com/go-logr/logr v1.4.2 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/golang/glog v1.2.4 // indirect - github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/protobuf v1.5.4 // indirect github.com/google/go-cpy v0.0.0-20211218193943-a9c933c06932 // indirect github.com/google/s2a-go v0.1.9 // indirect @@ -93,11 +93,12 @@ require ( github.com/oklog/run v1.0.0 // indirect github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/spiffe/go-spiffe/v2 v2.5.0 // indirect github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect github.com/zclconf/go-cty v1.16.2 // indirect - go.opencensus.io v0.24.0 // indirect + github.com/zeebo/errs v1.4.0 // indirect go.opentelemetry.io/auto/sdk v1.1.0 // indirect go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.60.0 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0 // indirect @@ -106,11 +107,11 @@ require ( go.opentelemetry.io/otel/sdk v1.35.0 // indirect go.opentelemetry.io/otel/sdk/metric v1.35.0 // indirect go.opentelemetry.io/otel/trace v1.35.0 // indirect - golang.org/x/crypto v0.37.0 // indirect + golang.org/x/crypto v0.38.0 // indirect golang.org/x/mod v0.22.0 // indirect - golang.org/x/sync v0.13.0 // indirect - golang.org/x/sys v0.32.0 // indirect - golang.org/x/text v0.24.0 // indirect + golang.org/x/sync v0.14.0 // indirect + golang.org/x/sys v0.33.0 // indirect + golang.org/x/text v0.25.0 // indirect golang.org/x/time v0.11.0 // indirect golang.org/x/tools v0.22.0 // indirect google.golang.org/appengine v1.6.8 // indirect diff --git a/mmv1/third_party/terraform/go.sum b/mmv1/third_party/terraform/go.sum index 7a39295adb53..f6095302d9e2 100644 --- a/mmv1/third_party/terraform/go.sum +++ b/mmv1/third_party/terraform/go.sum @@ -1,111 +1,40 @@ bitbucket.org/creachadair/stringset v0.0.8 h1:gQqe4vs8XWgMyijfyKE6K8o4TcyGGrRXe0JvHgx5H+M= bitbucket.org/creachadair/stringset v0.0.8/go.mod h1:AgthVMyMxC/6FK1KBJ2ALdqkZObGN8hOetgpwXyMn34= -bitbucket.org/creachadair/stringset v0.0.14 h1:t1ejQyf8utS4GZV/4fM+1gvYucggZkfhb+tMobDxYOE= -bitbucket.org/creachadair/stringset v0.0.14/go.mod h1:Ej8fsr6rQvmeMDf6CCWMWGb14H9mz8kmDgPPTdiVT0w= -cel.dev/expr v0.19.1 h1:NciYrtDRIR0lNCnH1LFJegdjspNx9fI59O7TWcua/W4= -cel.dev/expr v0.19.1/go.mod h1:MrpN08Q+lEBs+bGYdLxxHkZoUSsCp0nSKTs0nTymJgw= -cel.dev/expr v0.19.2 h1:V354PbqIXr9IQdwy4SYA4xa0HXaWq1BUPAGzugBY5V4= -cel.dev/expr v0.19.2/go.mod h1:MrpN08Q+lEBs+bGYdLxxHkZoUSsCp0nSKTs0nTymJgw= -cel.dev/expr v0.23.1 h1:K4KOtPCJQjVggkARsjG9RWXP6O4R73aHeJMa/dmCQQg= -cel.dev/expr v0.23.1/go.mod h1:hLPLo1W4QUmuYdA72RBX06QTs6MXw941piREPl3Yfiw= +cel.dev/expr v0.20.0 h1:OunBvVCfvpWlt4dN7zg3FM6TDkzOePe1+foGJ9AXeeI= +cel.dev/expr v0.20.0/go.mod h1:MrpN08Q+lEBs+bGYdLxxHkZoUSsCp0nSKTs0nTymJgw= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= -cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= -cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= -cloud.google.com/go v0.44.3/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= -cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= -cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= -cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= -cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= -cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= -cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= -cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= -cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= -cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= -cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= -cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= -cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= -cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY= -cloud.google.com/go v0.116.0 h1:B3fRrSDkLRt5qSHWe40ERJvhvnQwdZiHu0bJOpldweE= -cloud.google.com/go v0.116.0/go.mod h1:cEPSRWPzZEswwdr9BxE6ChEn01dWlTaF05LiC2Xs70U= cloud.google.com/go v0.120.0 h1:wc6bgG9DHyKqF5/vQvX1CiZrtHnxJjBlKUyF9nP6meA= cloud.google.com/go v0.120.0/go.mod h1:/beW32s8/pGRuj4IILWQNd4uuebeT4dkOhKmkfit64Q= -cloud.google.com/go v0.120.1 h1:Z+5V7yd383+9617XDCyszmK5E4wJRJL+tquMfDj9hLM= -cloud.google.com/go v0.120.1/go.mod h1:56Vs7sf/i2jYM6ZL9NYlC82r04PThNcPS5YgFmb0rp8= -cloud.google.com/go/auth v0.15.0 h1:Ly0u4aA5vG/fsSsxu98qCQBemXtAtJf+95z9HK+cxps= -cloud.google.com/go/auth v0.15.0/go.mod h1:WJDGqZ1o9E9wKIL+IwStfyn/+s59zl4Bi+1KQNVXLZ8= -cloud.google.com/go/auth v0.16.0 h1:Pd8P1s9WkcrBE2n/PhAwKsdrR35V3Sg2II9B+ndM3CU= -cloud.google.com/go/auth v0.16.0/go.mod h1:1howDHJ5IETh/LwYs3ZxvlkXF48aSqqJUM+5o02dNOI= -cloud.google.com/go/auth/oauth2adapt v0.2.7 h1:/Lc7xODdqcEw8IrZ9SvwnlLX6j9FHQM74z6cBk9Rw6M= -cloud.google.com/go/auth/oauth2adapt v0.2.7/go.mod h1:NTbTTzfvPl1Y3V1nPpOgl2w6d/FjO7NNUQaWSox6ZMc= +cloud.google.com/go/auth v0.16.1 h1:XrXauHMd30LhQYVRHLGvJiYeczweKQXZxsTbV9TiguU= +cloud.google.com/go/auth v0.16.1/go.mod h1:1howDHJ5IETh/LwYs3ZxvlkXF48aSqqJUM+5o02dNOI= cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc= cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c= -cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= -cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= -cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= -cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= -cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= -cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= -cloud.google.com/go/bigtable v1.33.0 h1:2BDaWLRAwXO14DJL/u8crbV2oUbMZkIa2eGq8Yao1bk= -cloud.google.com/go/bigtable v1.33.0/go.mod h1:HtpnH4g25VT1pejHRtInlFPnN5sjTxbQlsYBjh9t5l0= cloud.google.com/go/bigtable v1.37.0 h1:Q+x7y04lQ0B+WXp03wc1/FLhFt4CwcQdkwWT0M4Jp3w= cloud.google.com/go/bigtable v1.37.0/go.mod h1:HXqddP6hduwzrtiTCqZPpj9ij4hGZb4Zy1WF/dT+yaU= cloud.google.com/go/compute/metadata v0.6.0 h1:A6hENjEsCDtC1k8byVsgwvVcioamEHvZ4j01OwKxG9I= cloud.google.com/go/compute/metadata v0.6.0/go.mod h1:FjyFAW1MW0C203CEOMDTu3Dk1FlqW3Rga40jzHL4hfg= -cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= -cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= -cloud.google.com/go/iam v1.2.2 h1:ozUSofHUGf/F4tCNy/mu9tHLTaxZFLOUiKzjcgWHGIA= -cloud.google.com/go/iam v1.2.2/go.mod h1:0Ys8ccaZHdI1dEUilwzqng/6ps2YB6vRsjIe00/+6JY= cloud.google.com/go/iam v1.5.0 h1:QlLcVMhbLGOjRcGe6VTGGTyQib8dRLK2B/kYNV0+2xs= cloud.google.com/go/iam v1.5.0/go.mod h1:U+DOtKQltF/LxPEtcDLoobcsZMilSRwR7mgNL7knOpo= -cloud.google.com/go/iam v1.5.2 h1:qgFRAGEmd8z6dJ/qyEchAuL9jpswyODjA2lS+w234g8= -cloud.google.com/go/iam v1.5.2/go.mod h1:SE1vg0N81zQqLzQEwxL2WI6yhetBdbNQuTvIKCSkUHE= -cloud.google.com/go/longrunning v0.6.2 h1:xjDfh1pQcWPEvnfjZmwjKQEcHnpz6lHjfy7Fo0MK+hc= -cloud.google.com/go/longrunning v0.6.2/go.mod h1:k/vIs83RN4bE3YCswdXC5PFfWVILjm3hpEUlSko4PiI= cloud.google.com/go/longrunning v0.6.6 h1:XJNDo5MUfMM05xK3ewpbSdmt7R2Zw+aQEMbdQR65Rbw= cloud.google.com/go/longrunning v0.6.6/go.mod h1:hyeGJUrPHcx0u2Uu1UFSoYZLn4lkMrccJig0t4FI7yw= -cloud.google.com/go/longrunning v0.6.7 h1:IGtfDWHhQCgCjwQjV9iiLnUta9LBCo8R9QmAFsS/PrE= -cloud.google.com/go/longrunning v0.6.7/go.mod h1:EAFV3IZAKmM56TyiE6VAP3VoTzhZzySwI/YI1s/nRsY= -cloud.google.com/go/monitoring v1.21.2 h1:FChwVtClH19E7pJ+e0xUhJPGksctZNVOk2UhMmblmdU= -cloud.google.com/go/monitoring v1.21.2/go.mod h1:hS3pXvaG8KgWTSz+dAdyzPrGUYmi2Q+WFX8g2hqVEZU= cloud.google.com/go/monitoring v1.24.1 h1:vKiypZVFD/5a3BbQMvI4gZdl8445ITzXFh257XBgrS0= cloud.google.com/go/monitoring v1.24.1/go.mod h1:Z05d1/vn9NaujqY2voG6pVQXoJGbp+r3laV+LySt9K0= -cloud.google.com/go/monitoring v1.24.2 h1:5OTsoJ1dXYIiMiuL+sYscLc9BumrL3CarVLL7dd7lHM= -cloud.google.com/go/monitoring v1.24.2/go.mod h1:x7yzPWcgDRnPEv3sI+jJGBkwl5qINf+6qY4eq0I9B4U= -cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= -cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= -cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= -cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= -cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= -cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= -cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= -cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= -cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= -cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= -dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/GoogleCloudPlatform/declarative-resource-client-library v1.79.0 h1:vaebDVboAZ2tbAoMKRsprO3zAdZnQegYFhkgAwjJC8g= github.com/GoogleCloudPlatform/declarative-resource-client-library v1.79.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= -github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= -github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= +github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= +github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= github.com/ProtonMail/go-crypto v1.1.3 h1:nRBOetoydLeUb4nHajyO2bKqMLfWQ/ZPwkXqXxPxCFk= github.com/ProtonMail/go-crypto v1.1.3/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= -github.com/ProtonMail/go-crypto v1.2.0 h1:+PhXXn4SPGd+qk76TlEePBfOfivE0zkWFenhGhFLzWs= -github.com/ProtonMail/go-crypto v1.2.0/go.mod h1:9whxjD8Rbs29b4XWbB8irEcE8KHMqaR2e7GWU1R+/PE= github.com/agext/levenshtein v1.2.2 h1:0S/Yg6LYmFJ5stwQeRp6EeOcCbj7xiqQSdNelsXvaqE= github.com/agext/levenshtein v1.2.2/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= -github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7lmo= -github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= github.com/apparentlymart/go-cidr v1.1.0 h1:2mAhrMoF+nhXqxTzSZMUzDHkLjmIHC+Zzn4tdgBZjnU= github.com/apparentlymart/go-cidr v1.1.0/go.mod h1:EBcsNrHc3zQeuaeCeCtQruQm+n9/YjEn/vI25Lg7Gwc= github.com/apparentlymart/go-textseg/v12 v12.0.0/go.mod h1:S/4uRK2UtaQttw1GenVJEynmyUenKwP++x/+DdGV/Ec= github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY= github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4= -github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= github.com/bufbuild/protocompile v0.4.0 h1:LbFKd2XowZvQ/kajzguUp2DC9UEIQhIq77fZZlaQsNA= github.com/bufbuild/protocompile v0.4.0/go.mod h1:3v93+mbWn/v3xzN+31nwkJfrEpAUwp+BagBSZWx+TP8= github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4= @@ -113,23 +42,12 @@ github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QH github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= -github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= -github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cloudflare/circl v1.3.7 h1:qlCDlTPz2n9fu58M0Nh1J/JzcFpfgkFHHX3O35r5vcU= github.com/cloudflare/circl v1.3.7/go.mod h1:sRTcRWXGLrKw6yIGJ+l7amYJFfAXbZG0kBSc8r4zxgA= -github.com/cloudflare/circl v1.6.1 h1:zqIqSPIndyBh1bjLVVDHMPpVKqp8Su/V+6MeDzzQBQ0= -github.com/cloudflare/circl v1.6.1/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= -github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/cncf/xds/go v0.0.0-20241223141626-cff3c89139a3 h1:boJj011Hh+874zpIySeApCX4GeOjPl9qhRF3QuIZq+Q= -github.com/cncf/xds/go v0.0.0-20241223141626-cff3c89139a3/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= github.com/cncf/xds/go v0.0.0-20250121191232-2f005788dc42 h1:Om6kYQYDUk5wWbT0t0q6pvyM49i9XZAv9dDrkDA7gjk= github.com/cncf/xds/go v0.0.0-20250121191232-2f005788dc42/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= -github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f h1:C5bqEmzEPLsHm9Mv73lSE9e9bKV23aB1vxOsmZrkl3k= -github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= github.com/creachadair/staticfile v0.1.2/go.mod h1:a3qySzCIXEprDGxk6tSxSI+dBBdLzqeBOMhZ+o2d3pM= github.com/cyphar/filepath-securejoin v0.2.5 h1:6iR5tXJ/e6tJZzzdMc1km3Sa7RRIVBKAK32O2s7AYfo= github.com/cyphar/filepath-securejoin v0.2.5/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= @@ -138,15 +56,11 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/dnaeon/go-vcr v1.0.1 h1:r8L/HqC0Hje5AXMu1ooW8oyQyOFv4GxqpL0nRP7SLLY= github.com/dnaeon/go-vcr v1.0.1/go.mod h1:aBB1+wY4s93YsC3HHjMBMrwTj2R9FHDzUr9KyGc8n1E= -github.com/dnaeon/go-vcr v1.2.0 h1:zHCHvJYTMh1N7xnV7zf1m1GPBF9Ad0Jk/whtQ1663qI= -github.com/dnaeon/go-vcr v1.2.0/go.mod h1:R4UdLID7HZT3taECzJs4YgbbH6PIGXB6W/sc5OLb6RQ= github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= -github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= -github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.13.4 h1:zEqyPVyku6IvWCFwux4x9RxkLOMUL+1vC9xUFv5l2/M= github.com/envoyproxy/go-control-plane v0.13.4/go.mod h1:kDfuBlDVsSj2MjrLEtRWtHlsWIFcGyB2RMO44Dc5GZA= github.com/envoyproxy/go-control-plane/envoy v1.32.4 h1:jb83lalDRZSpPWW2Z7Mck/8kXZ5CQAFYVjQcdVIr83A= @@ -159,33 +73,22 @@ github.com/envoyproxy/protoc-gen-validate v1.2.1/go.mod h1:d/C80l/jxXLdfEIhX1W2T github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM= github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE= -github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM= -github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU= github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/gammazero/deque v0.0.0-20180920172122-f6adf94963e4 h1:R+19WKQClnfMXS60cP5BmMe1wjZ4u0evY2p2Ar0ZTXo= github.com/gammazero/deque v0.0.0-20180920172122-f6adf94963e4/go.mod h1:GeIq9qoE43YdGnDXURnmKTnGg15pQz4mYkXSTChbneI= -github.com/gammazero/deque v1.0.0 h1:LTmimT8H7bXkkCy6gZX7zNLtkbz4NdS2z8LZuor3j34= -github.com/gammazero/deque v1.0.0/go.mod h1:iflpYvtGfM3U8S8j+sZEKIak3SAKYpA5/SQewgfXDKo= github.com/gammazero/workerpool v0.0.0-20181230203049-86a96b5d5d92 h1:EipXK6U05IQ2wtuFRn4k3h0+2lXypzItoXGVyf4r9Io= github.com/gammazero/workerpool v0.0.0-20181230203049-86a96b5d5d92/go.mod h1:w9RqFVO2BM3xwWEcAB8Fwp0OviTBBEiRmSBDfbXnd3w= -github.com/gammazero/workerpool v1.1.3 h1:WixN4xzukFoN0XSeXF6puqEqFTl2mECI9S6W44HWy9Q= -github.com/gammazero/workerpool v1.1.3/go.mod h1:wPjyBLDbyKnUn2XwwyD3EEwo9dHutia9/fwNmSHWACc= github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI= github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic= github.com/go-git/go-billy/v5 v5.6.0 h1:w2hPNtoehvJIxR00Vb4xX94qHQi/ApZfX+nBE2Cjio8= github.com/go-git/go-billy/v5 v5.6.0/go.mod h1:sFDq7xD3fn3E0GOwUSZqHo9lrkmx8xJhA0ZrfvjBRGM= github.com/go-git/go-git/v5 v5.13.0 h1:vLn5wlGIh/X78El6r3Jr+30W16Blk0CTcxTYcYPWi5E= github.com/go-git/go-git/v5 v5.13.0/go.mod h1:Wjo7/JyVKtQgUNdXYXIepzWfJQkUEIGvkvVkiXRR/zw= -github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= -github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-jose/go-jose/v4 v4.0.4 h1:VsjPI33J0SB9vQM6PLmNjoHqMQNGPiZ0rHL7Ni7Q6/E= github.com/go-jose/go-jose/v4 v4.0.4/go.mod h1:NKb5HO1EZccyMpiZNbdUw/14tiXNyUJh188dfnMCAfc= github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= -github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= -github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= @@ -199,94 +102,38 @@ github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69 github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/glog v1.2.4 h1:CNNw5U8lSiiBk7druxtSHHTsRWcxKoac6kZKm2peBBc= github.com/golang/glog v1.2.4/go.mod h1:6AhwSGph0fcJtXVM/PEHPqZlFeoLxhs7/t5UDAwmO+w= -github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ= -github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= -github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= -github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= -github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= -github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= -github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= -github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= -github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= -github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= -github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.1.3 h1:CVpQJjYgC4VbzxeGVHfvZrv1ctoYCAI8vbl07Fcxlyg= github.com/google/btree v1.1.3/go.mod h1:qOPhT0dTNdNzV6Z/lhRX0YXUafgPLFUh+gZMl761Gm4= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= -github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= github.com/google/go-cpy v0.0.0-20211218193943-a9c933c06932 h1:5/4TSDzpDnHQ8rKEEQBjRlYx77mHOvXu08oGchxej7o= github.com/google/go-cpy v0.0.0-20211218193943-a9c933c06932/go.mod h1:cC6EdPbj/17GFCPDK39NRarlMI+kt+O60S12cNB5J9Y= -github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= -github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/s2a-go v0.1.9 h1:LGD7gtMgezd8a/Xak7mEWL0PjoTQFvpRudN895yqKW0= github.com/google/s2a-go v0.1.9/go.mod h1:YA0Ei2ZQL3acow2O62kdp9UlnvMmU7kA6Eutn0dXayM= -github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/googleapis/enterprise-certificate-proxy v0.3.5 h1:VgzTY2jogw3xt39CusEnFJWm7rlsq5yL5q9XdLOuP5g= -github.com/googleapis/enterprise-certificate-proxy v0.3.5/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA= github.com/googleapis/enterprise-certificate-proxy v0.3.6 h1:GW/XbdyBFQ8Qe+YAmFU9uHLo7OnF5tL52HFAgMmyrf4= github.com/googleapis/enterprise-certificate-proxy v0.3.6/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA= -github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= -github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/gax-go/v2 v2.14.1 h1:hb0FFeiPaQskmvakKu5EbCbpntQn48jyHuvrkurSS/Q= github.com/googleapis/gax-go/v2 v2.14.1/go.mod h1:Hb/NubMaVM88SrNkvl8X/o8XWwDJEPqouaLeN2IUxoA= -github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 h1:+9834+KizmvFV7pXQGSXQTsaWhq2GjuNUt0aUU0YBYw= github.com/grpc-ecosystem/go-grpc-middleware v1.3.0/go.mod h1:z0ButlSOZa5vEBq9m2m2hlwIgKw+rp3sdCBRoJY+30Y= -github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 h1:UH//fgunKIs4JdUbpDl1VZCDaL56wXCB/5+wF6uHfaI= -github.com/grpc-ecosystem/go-grpc-middleware v1.4.0/go.mod h1:g5qyo/la0ALbONm6Vbp88Yd8NsDy6rZz+RcrMPxvld8= github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= -github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= -github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/go-checkpoint v0.5.0 h1:MFYpPZCnQqQTE18jFwSII6eUQrD/oxMFp3mlgcqk5mU= github.com/hashicorp/go-checkpoint v0.5.0/go.mod h1:7nfLNL10NsxqO4iWuW6tWW0HjZuDrwkBuEQsVcpCOgg= github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= @@ -294,16 +141,12 @@ github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9n github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320 h1:1/D3zfFHttUKaCaGKZ/dR2roBXv0vKbSCnssIldfQdI= github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320/go.mod h1:EiZBMaudVLy8fmjf9Npq1dq9RalhveqZG5w/yz3mHWs= -github.com/hashicorp/go-cty v1.5.0 h1:EkQ/v+dDNUqnuVpmS5fPqyY71NXVgT5gf32+57xY8g0= -github.com/hashicorp/go-cty v1.5.0/go.mod h1:lFUCG5kd8exDobgSfyj4ONE/dc822kiYMguVKdHGMLM= github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k= github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= github.com/hashicorp/go-plugin v1.6.2 h1:zdGAEd0V1lCaU0u+MxWQhtSDQmahpkwOun8U8EiRVog= github.com/hashicorp/go-plugin v1.6.2/go.mod h1:CkgLQ5CZqNmdL9U9JzM532t8ZiYQ35+pj3b1FD37R0Q= -github.com/hashicorp/go-plugin v1.6.3 h1:xgHB+ZUSYeuJi96WtxEjzi23uh7YQpznjGh0U0UUrwg= -github.com/hashicorp/go-plugin v1.6.3/go.mod h1:MRobyh+Wc/nYy1V4KAXUiYfzxoYhs7V1mlH1Z7iY2h0= github.com/hashicorp/go-retryablehttp v0.7.7 h1:C8hUCYzor8PIfXHa4UrZkU4VvK8o9ISHxT2Q8+VepXU= github.com/hashicorp/go-retryablehttp v0.7.7/go.mod h1:pkQpWZeYWskR+D1tR2O5OcBFOxfA7DoAO6xtkuQnHTk= github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= @@ -311,72 +154,45 @@ github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/C github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-version v1.7.0 h1:5tqGy27NaOTB8yJKUZELlFAS/LTKJkrmONwQKeRZfjY= github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= -github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/hc-install v0.9.1 h1:gkqTfE3vVbafGQo6VZXcy2v5yoz2bE0+nhZXruCuODQ= github.com/hashicorp/hc-install v0.9.1/go.mod h1:pWWvN/IrfeBK4XPeXXYkL6EjMufHkCK5DvwxeLKuBf0= -github.com/hashicorp/hc-install v0.9.2 h1:v80EtNX4fCVHqzL9Lg/2xkp62bbvQMnvPQ0G+OmtO24= -github.com/hashicorp/hc-install v0.9.2/go.mod h1:XUqBQNnuT4RsxoxiM9ZaUk0NX8hi2h+Lb6/c0OZnC/I= github.com/hashicorp/hcl/v2 v2.23.0 h1:Fphj1/gCylPxHutVSEOf2fBOh1VE4AuLV7+kbJf3qos= github.com/hashicorp/hcl/v2 v2.23.0/go.mod h1:62ZYHrXgPoX8xBnzl8QzbWq4dyDsDtfCRgIq1rbJEvA= github.com/hashicorp/logutils v1.0.0 h1:dLEQVugN8vlakKOUE3ihGLTZJRB4j+M2cdTm/ORI65Y= github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= github.com/hashicorp/terraform-exec v0.22.0 h1:G5+4Sz6jYZfRYUCg6eQgDsqTzkNXV+fP8l+uRmZHj64= github.com/hashicorp/terraform-exec v0.22.0/go.mod h1:bjVbsncaeh8jVdhttWYZuBGj21FcYw6Ia/XfHcNO7lQ= -github.com/hashicorp/terraform-exec v0.23.0 h1:MUiBM1s0CNlRFsCLJuM5wXZrzA3MnPYEsiXmzATMW/I= -github.com/hashicorp/terraform-exec v0.23.0/go.mod h1:mA+qnx1R8eePycfwKkCRk3Wy65mwInvlpAeOwmA7vlY= github.com/hashicorp/terraform-json v0.24.0 h1:rUiyF+x1kYawXeRth6fKFm/MdfBS6+lW4NbeATsYz8Q= github.com/hashicorp/terraform-json v0.24.0/go.mod h1:Nfj5ubo9xbu9uiAoZVBsNOjvNKB66Oyrvtit74kC7ow= github.com/hashicorp/terraform-plugin-framework v1.13.0 h1:8OTG4+oZUfKgnfTdPTJwZ532Bh2BobF4H+yBiYJ/scw= github.com/hashicorp/terraform-plugin-framework v1.13.0/go.mod h1:j64rwMGpgM3NYXTKuxrCnyubQb/4VKldEKlcG8cvmjU= -github.com/hashicorp/terraform-plugin-framework v1.14.1 h1:jaT1yvU/kEKEsxnbrn4ZHlgcxyIfjvZ41BLdlLk52fY= -github.com/hashicorp/terraform-plugin-framework v1.14.1/go.mod h1:xNUKmvTs6ldbwTuId5euAtg37dTxuyj3LHS3uj7BHQ4= github.com/hashicorp/terraform-plugin-framework-validators v0.9.0 h1:LYz4bXh3t7bTEydXOmPDPupRRnA480B/9+jV8yZvxBA= github.com/hashicorp/terraform-plugin-framework-validators v0.9.0/go.mod h1:+BVERsnfdlhYR2YkXMBtPnmn9UsL19U3qUtSZ+Y/5MY= -github.com/hashicorp/terraform-plugin-framework-validators v0.17.0 h1:0uYQcqqgW3BMyyve07WJgpKorXST3zkpzvrOnf3mpbg= -github.com/hashicorp/terraform-plugin-framework-validators v0.17.0/go.mod h1:VwdfgE/5Zxm43flraNa0VjcvKQOGVrcO4X8peIri0T0= github.com/hashicorp/terraform-plugin-go v0.26.0 h1:cuIzCv4qwigug3OS7iKhpGAbZTiypAfFQmw8aE65O2M= github.com/hashicorp/terraform-plugin-go v0.26.0/go.mod h1:+CXjuLDiFgqR+GcrM5a2E2Kal5t5q2jb0E3D57tTdNY= github.com/hashicorp/terraform-plugin-log v0.9.0 h1:i7hOA+vdAItN1/7UrfBqBwvYPQ9TFvymaRGZED3FCV0= github.com/hashicorp/terraform-plugin-log v0.9.0/go.mod h1:rKL8egZQ/eXSyDqzLUuwUYLVdlYeamldAHSxjUFADow= github.com/hashicorp/terraform-plugin-mux v0.17.0 h1:/J3vv3Ps2ISkbLPiZOLspFcIZ0v5ycUXCEQScudGCCw= github.com/hashicorp/terraform-plugin-mux v0.17.0/go.mod h1:yWuM9U1Jg8DryNfvCp+lH70WcYv6D8aooQxxxIzFDsE= -github.com/hashicorp/terraform-plugin-mux v0.18.0 h1:7491JFSpWyAe0v9YqBT+kel7mzHAbO5EpxxT0cUL/Ms= -github.com/hashicorp/terraform-plugin-mux v0.18.0/go.mod h1:Ho1g4Rr8qv0qTJlcRKfjjXTIO67LNbDtM6r+zHUNHJQ= github.com/hashicorp/terraform-plugin-sdk/v2 v2.36.0 h1:7/iejAPyCRBhqAg3jOx+4UcAhY0A+Sg8B+0+d/GxSfM= github.com/hashicorp/terraform-plugin-sdk/v2 v2.36.0/go.mod h1:TiQwXAjFrgBf5tg5rvBRz8/ubPULpU0HjSaVi5UoJf8= -github.com/hashicorp/terraform-plugin-sdk/v2 v2.36.1 h1:WNMsTLkZf/3ydlgsuXePa3jvZFwAJhruxTxP/c1Viuw= -github.com/hashicorp/terraform-plugin-sdk/v2 v2.36.1/go.mod h1:P6o64QS97plG44iFzSM6rAn6VJIC/Sy9a9IkEtl79K4= github.com/hashicorp/terraform-plugin-testing v1.5.1 h1:T4aQh9JAhmWo4+t1A7x+rnxAJHCDIYW9kXyo4sVO92c= github.com/hashicorp/terraform-plugin-testing v1.5.1/go.mod h1:dg8clO6K59rZ8w9EshBmDp1CxTIPu3yA4iaDpX1h5u0= -github.com/hashicorp/terraform-plugin-testing v1.12.0 h1:tpIe+T5KBkA1EO6aT704SPLedHUo55RenguLHcaSBdI= -github.com/hashicorp/terraform-plugin-testing v1.12.0/go.mod h1:jbDQUkT9XRjAh1Bvyufq+PEH1Xs4RqIdpOQumSgSXBM= github.com/hashicorp/terraform-registry-address v0.2.4 h1:JXu/zHB2Ymg/TGVCRu10XqNa4Sh2bWcqCNyKWjnCPJA= github.com/hashicorp/terraform-registry-address v0.2.4/go.mod h1:tUNYTVyCtU4OIGXXMDp7WNcJ+0W1B4nmstVDgHMjfAU= -github.com/hashicorp/terraform-registry-address v0.2.5 h1:2GTftHqmUhVOeuu9CW3kwDkRe4pcBDq0uuK5VJngU1M= -github.com/hashicorp/terraform-registry-address v0.2.5/go.mod h1:PpzXWINwB5kuVS5CA7m1+eO2f1jKb5ZDIxrOPfpnGkg= github.com/hashicorp/terraform-svchost v0.1.1 h1:EZZimZ1GxdqFRinZ1tpJwVxxt49xc/S52uzrw4x0jKQ= github.com/hashicorp/terraform-svchost v0.1.1/go.mod h1:mNsjQfZyf/Jhz35v6/0LWcv26+X7JPS+buii2c9/ctc= github.com/hashicorp/yamux v0.1.1 h1:yrQxtgseBDrq9Y652vSRDvsKCJKOUD+GzTS4Y0Y8pvE= github.com/hashicorp/yamux v0.1.1/go.mod h1:CtWFDAQgb7dxtzFs4tWbplKIe2jSi3+5vKbgIO0SLnQ= -github.com/hashicorp/yamux v0.1.2 h1:XtB8kyFOyHXYVFnwT5C3+Bdo8gArse7j2AQ0DA0Uey8= -github.com/hashicorp/yamux v0.1.2/go.mod h1:C+zze2n6e/7wshOZep2A70/aQU6QBRWJO/G6FT1wIns= -github.com/iancoleman/strcase v0.3.0 h1:nTXanmYxhfFAMjZL34Ov6gkzEsSJZ5DbhxWjvSASxEI= -github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= -github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= github.com/jhump/protoreflect v1.15.1 h1:HUMERORf3I3ZdX05WaQ6MIpd/NJ434hTp5YiKgfCL6c= github.com/jhump/protoreflect v1.15.1/go.mod h1:jD/2GMKKE6OqX8qTjhADU1e6DShO+gavG9e0Q693nKo= -github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= -github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4= github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= @@ -387,14 +203,10 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= -github.com/lyft/protoc-gen-star/v2 v2.0.4-0.20230330145011-496ad1ac90a4 h1:sIXJOMrYnQZJu7OB7ANSF4MYri2fTEGIsRLz6LwI4xE= -github.com/lyft/protoc-gen-star/v2 v2.0.4-0.20230330145011-496ad1ac90a4/go.mod h1:amey7yeodaJhXSbf/TlLvWiqQfLOSpEk//mLlc+axEk= github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= -github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE= -github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= @@ -408,32 +220,23 @@ github.com/mitchellh/go-testing-interface v1.14.1 h1:jrgshOhYAUVNMAJiKbEu7EqAwgJ github.com/mitchellh/go-testing-interface v1.14.1/go.mod h1:gfgS7OtZj6MA4U1UrDRp04twqAjfvlZyCfX3sDjEym8= github.com/mitchellh/go-wordwrap v1.0.0 h1:6GlHJ/LTGMrIJbwgdqdl2eEH8o+Exx/0m8ir9Gns0u4= github.com/mitchellh/go-wordwrap v1.0.0/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= -github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0= -github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0= github.com/mitchellh/hashstructure v1.1.0 h1:P6P1hdjqAAknpY/M1CGipelZgp+4y9ja9kmUZPXP+H0= github.com/mitchellh/hashstructure v1.1.0/go.mod h1:xUDAozZz0Wmdiufv0uyhnHkUTN6/6d8ulp4AwfLKrmA= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= -github.com/modocache/gover v0.0.0-20171022184752-b58185e213c5 h1:8Q0qkMVC/MmWkpIdlvZgcv2o2jrlF6zqVOh7W5YHdMA= -github.com/modocache/gover v0.0.0-20171022184752-b58185e213c5/go.mod h1:caMODM3PzxT8aQXRPkAt8xlV/e7d7w8GM5g0fa5F0D8= github.com/oklog/run v1.0.0 h1:Ru7dDtJNOyC66gQ5dQmaCa0qIsAUFY3sFpK1Xk8igrw= github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= -github.com/oklog/run v1.1.0 h1:GEenZ1cK0+q0+wsJew9qUg/DyD8k3JzYsZAi5gYi2mA= -github.com/oklog/run v1.1.0/go.mod h1:sVPdnTZT1zYwAJeCMu2Th4T21pA3FPOQRfWjQlk7DVU= github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= github.com/pjbgf/sha1cd v0.3.0 h1:4D5XXmUUBUl/xQ6IjCkEAbqXskkq/4O7LmGn0AqMDs4= github.com/pjbgf/sha1cd v0.3.0/go.mod h1:nZ1rrWOcGJ5uZgEEVL1VUM9iRQiZvWdbZjkKyFzPPsI= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 h1:GFCKgmp0tecUJ0sJuv4pzYCqS9+RGSn52M3FUwPs+uo= github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1:t/avpk3KcrXxUnYOhZhMXJlSEyie6gQbtLq5NM3loB8= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8= @@ -441,27 +244,15 @@ github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE= github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= -github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= -github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/skeema/knownhosts v1.3.0 h1:AM+y0rI04VksttfwjkSTNQorvGqmwATnvnAHpSgc0LY= github.com/skeema/knownhosts v1.3.0/go.mod h1:sPINvnADmT/qYH1kfv+ePMmOBTH6Tbl7b5LvTDjFK7M= -github.com/spf13/afero v1.10.0 h1:EaGW2JJh15aKOejeuJ+wpFSHnbd7GE6Wvp3TsNhb6LY= -github.com/spf13/afero v1.10.0/go.mod h1:UBogFpq8E9Hx+xc5CNTTEpTnuHVmXDwZcZcE1eb/UhQ= github.com/spiffe/go-spiffe/v2 v2.5.0 h1:N2I01KCUkv1FAjZXJMwh95KK1ZIQLYbPfhaxw8WS0hE= github.com/spiffe/go-spiffe/v2 v2.5.0/go.mod h1:P+NxobPc6wXhVtINNtFjNWGBTreew1GBUCwT2wPmb7g= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= -github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= -github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= -github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals= -github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= -github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= @@ -473,9 +264,7 @@ github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAh github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw= -github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= github.com/zclconf/go-cty v1.16.2 h1:LAJSwc3v81IRBZyUVQDUdZ7hs3SYs9jv0eZJDWHD/70= @@ -484,450 +273,152 @@ github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940 h1:4r45xpDWB6 github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940/go.mod h1:CmBdvvj3nqzfzJ6nTCIwDTPZ56aVGvDrmztiO5g3qrM= github.com/zeebo/errs v1.4.0 h1:XNdoD/RRMKP7HD0UhJnIzUy74ISdGGxURlYG8HSWSfM= github.com/zeebo/errs v1.4.0/go.mod h1:sgbWHsvVuTPHcqJJGQ1WhI5KbWlHYz+2+2C/LSEtCw4= -go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= -go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= -go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= -go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= -go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= -go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.59.0 h1:rgMkmiGfix9vFJDcDi1PK8WEQP4FLQwLDfhp5ZLpFeE= -go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.59.0/go.mod h1:ijPqXp5P6IRRByFVVg9DY8P5HkxkHE5ARIa+86aXPf4= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.60.0 h1:x7wzEgXfnzJcHDwStJT+mxOz4etr2EcexjqhBvmoakw= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.60.0/go.mod h1:rg+RlpR5dKwaS95IyyZqj5Wd4E13lk/msnTS0Xl9lJM= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.59.0 h1:CV7UdSGJt/Ao6Gp4CXckLxVRRsRgDHoI8XjbL3PDl8s= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.59.0/go.mod h1:FRmFuRJfag1IZ2dPkHnEoSFVgTVPUd2qf5Vi69hLb8I= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0 h1:sbiXRNDSWJOTobXh5HyQKjq6wUC5tNybqjIqDpAY4CU= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0/go.mod h1:69uWxva0WgAA/4bu2Yy70SLDBwZXuQ6PbBpbsa5iZrQ= -go.opentelemetry.io/otel v1.34.0 h1:zRLXxLCgL1WyKsPVrgbSdMN4c0FMkDAskSTQP+0hdUY= -go.opentelemetry.io/otel v1.34.0/go.mod h1:OWFPOQ+h4G8xpyjgqo4SxJYdDQ/qmRH+wivy7zzx9oI= go.opentelemetry.io/otel v1.35.0 h1:xKWKPxrxB6OtMCbmMY021CqC45J+3Onta9MqjhnusiQ= go.opentelemetry.io/otel v1.35.0/go.mod h1:UEqy8Zp11hpkUrL73gSlELM0DupHoiq72dR+Zqel/+Y= -go.opentelemetry.io/otel/metric v1.34.0 h1:+eTR3U0MyfWjRDhmFMxe2SsW64QrZ84AOhvqS7Y+PoQ= -go.opentelemetry.io/otel/metric v1.34.0/go.mod h1:CEDrp0fy2D0MvkXE+dPV7cMi8tWZwX3dmaIhwPOaqHE= go.opentelemetry.io/otel/metric v1.35.0 h1:0znxYu2SNyuMSQT4Y9WDWej0VpcsxkuklLa4/siN90M= go.opentelemetry.io/otel/metric v1.35.0/go.mod h1:nKVFgxBZ2fReX6IlyW28MgZojkoAkJGaE8CpgeAU3oE= -go.opentelemetry.io/otel/sdk v1.34.0 h1:95zS4k/2GOy069d321O8jWgYsW3MzVV+KuSPKp7Wr1A= -go.opentelemetry.io/otel/sdk v1.34.0/go.mod h1:0e/pNiaMAqaykJGKbi+tSjWfNNHMTxoC9qANsCzbyxU= go.opentelemetry.io/otel/sdk v1.35.0 h1:iPctf8iprVySXSKJffSS79eOjl9pvxV9ZqOWT0QejKY= go.opentelemetry.io/otel/sdk v1.35.0/go.mod h1:+ga1bZliga3DxJ3CQGg3updiaAJoNECOgJREo9KHGQg= -go.opentelemetry.io/otel/sdk/metric v1.34.0 h1:5CeK9ujjbFVL5c1PhLuStg1wxA7vQv7ce1EK0Gyvahk= -go.opentelemetry.io/otel/sdk/metric v1.34.0/go.mod h1:jQ/r8Ze28zRKoNRdkjCZxfs6YvBTG1+YIqyFVFYec5w= go.opentelemetry.io/otel/sdk/metric v1.35.0 h1:1RriWBmCKgkeHEhM7a2uMjMUfP7MsOF5JpUCaEqEI9o= go.opentelemetry.io/otel/sdk/metric v1.35.0/go.mod h1:is6XYCUMpcKi+ZsOvfluY5YstFnhW0BidkR+gL+qN+w= -go.opentelemetry.io/otel/trace v1.34.0 h1:+ouXS2V8Rd4hp4580a8q23bg0azF2nI8cqLYnC8mh/k= -go.opentelemetry.io/otel/trace v1.34.0/go.mod h1:Svm7lSjQD7kG7KJ/MUHPVXSDGz2OX4h0M2jHBhmSfRE= go.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt/xgMs= go.opentelemetry.io/otel/trace v1.35.0/go.mod h1:WUk7DtFp1Aw2MkvqGdwiXYDZZNvA/1J8o6xRXLrIkyc= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= -go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= -go.uber.org/goleak v1.1.10/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= -go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= -go.uber.org/zap v1.18.1/go.mod h1:xg/QME4nWcxGxrpdeYfq7UvYrLh66cuVKdrbD1XF/NI= go4.org/netipx v0.0.0-20231129151722-fdeea329fbba h1:0b9z3AuHCjxk0x/opv64kcgZLBseWJUpBw5I82+2U4M= go4.org/netipx v0.0.0-20231129151722-fdeea329fbba/go.mod h1:PLyyIXexvUFg3Owu6p/WfdlivPbZJsZdgWZlrGope/Y= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34= -golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc= -golang.org/x/crypto v0.37.0 h1:kJNSjF/Xp7kU0iB2Z+9viTPMW4EqqsrywMXLJOOsXSE= -golang.org/x/crypto v0.37.0/go.mod h1:vg+k43peMZ0pUMhYmVAWysMK35e6ioLh3wB8ZCAfbVc= +golang.org/x/crypto v0.38.0 h1:jt+WWG8IZlBnVbomuhg2Mdq0+BBQaHbtqHEFEigjUV8= +golang.org/x/crypto v0.38.0/go.mod h1:MvrbAqul58NNYPKnOra203SB9vpuZW0e+RRZV+Ggqjw= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= -golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= -golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= -golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= -golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 h1:ESSUROHIBHg7USnszlcdmjBEwdMj9VUvU+OPk4yl2mc= golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8/go.mod h1:/lliqkxwWAhPjf5oSOIJup2XcqJaw8RGS6k3TGEc7GI= -golang.org/x/exp v0.0.0-20250408133849-7e4ce0ab07d0 h1:R84qjqJb5nVJMxqWYb3np9L5ZsaDtB+a39EqjV0JSUM= -golang.org/x/exp v0.0.0-20250408133849-7e4ce0ab07d0/go.mod h1:S9Xr4PYopiDyqSyp5NjCrhFrqg6A5zA2E/iPHPhqnS8= -golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= -golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= -golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= -golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= -golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= -golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= -golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= -golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= -golang.org/x/mod v0.24.0 h1:ZfthKaKaT4NrhGVZHO1/WDTwGES4De8KtWO0SIbNJMU= -golang.org/x/mod v0.24.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.37.0 h1:1zLorHbz+LYj7MQlSf1+2tPIIgibq2eL5xkrGk6f+2c= -golang.org/x/net v0.37.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8= -golang.org/x/net v0.39.0 h1:ZCu7HMWDxpXpaiKdhzIfaltL9Lp31x/3fCP11bc6/fY= -golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E= +golang.org/x/net v0.40.0 h1:79Xs7wF06Gbdcg4kdCCIQArK11Z1hr5POQ6+fIYHNuY= +golang.org/x/net v0.40.0/go.mod h1:y0hY0exeL2Pku80/zKK7tpntoX23cqL3Oa6njdgRtds= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.28.0 h1:CrgCKl8PPAVtLnU3c+EDw6x11699EWlsDeWNWKdIOkc= -golang.org/x/oauth2 v0.28.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8= -golang.org/x/oauth2 v0.29.0 h1:WdYw2tdTK1S8olAzWHdgeqfy+Mtm9XNhv/xJsY65d98= -golang.org/x/oauth2 v0.29.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8= +golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI= +golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.12.0 h1:MHc5BpPuC30uJk597Ri8TV3CNZcTLu6B6z4lJy+g6Jw= -golang.org/x/sync v0.12.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= -golang.org/x/sync v0.13.0 h1:AauUjRAJ9OSnvULf/ARrrVywoJDy0YS2AwQ98I37610= -golang.org/x/sync v0.13.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sync v0.14.0 h1:woo0S4Yywslg6hp4eUFjTVOyKt0RookbpAHG4c1HmhQ= +golang.org/x/sync v0.14.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik= -golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= -golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20= -golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= +golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.30.0 h1:PQ39fJZ+mfadBm0y5WlL4vlM7Sx1Hgf13sMIY2+QS9Y= -golang.org/x/term v0.30.0/go.mod h1:NYYFdzHoI5wRh/h5tDMdMqCqPJZEuNqVR5xJLd/n67g= -golang.org/x/term v0.31.0 h1:erwDkOK1Msy6offm1mOgvspSkslFnIGsFnxOKoufg3o= -golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/term v0.32.0 h1:DR4lr0TjUs3epypdhTOkMmuF5CDFJ/8pOnbzMZPQ7bg= +golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= -golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY= -golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4= -golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0= -golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU= -golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/text v0.25.0 h1:qVyWApTSYLk/drJRO5mDlNYskwQznZmkpV2c8q9zls4= +golang.org/x/text v0.25.0/go.mod h1:WEdwpYrmk1qmdHvhkSTNPm3app7v4rsT8F2UD6+VHIA= golang.org/x/time v0.11.0 h1:/bpjEDfN9tkoN/ryeYHnv5hcMlc8ncjMcM4XBk5NWV0= golang.org/x/time v0.11.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= -golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= -golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= -golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= -golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.22.0 h1:gqSGLZqv+AI9lIQzniJ0nZDRG5GBPsSi+DRNHWNz6yA= golang.org/x/tools v0.22.0/go.mod h1:aCwcsjqvq7Yqt6TNyX7QMU2enbQ/Gt0bo6krSeEri+c= -golang.org/x/tools v0.32.0 h1:Q7N1vhpkQv7ybVzLFtTjvQya2ewbwNDZzUgfXGqtMWU= -golang.org/x/tools v0.32.0/go.mod h1:ZxrU41P/wAbZD8EDa6dDCa6XfpkhJ7HFMjHJXfBDu8s= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= -google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= -google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= -google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= -google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= -google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= -google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= -google.golang.org/api v0.226.0 h1:9A29y1XUD+YRXfnHkO66KggxHBZWg9LsTGqm7TkUvtQ= -google.golang.org/api v0.226.0/go.mod h1:WP/0Xm4LVvMOCldfvOISnWquSRWbG2kArDZcg+W2DbY= -google.golang.org/api v0.229.0 h1:p98ymMtqeJ5i3lIBMj5MpR9kzIIgzpHHh8vQ+vgAzx8= -google.golang.org/api v0.229.0/go.mod h1:wyDfmq5g1wYJWn29O22FDWN48P7Xcz0xz+LBpptYvB0= +google.golang.org/api v0.233.0 h1:iGZfjXAJiUFSSaekVB7LzXl6tRfEKhUN7FkZN++07tI= +google.golang.org/api v0.233.0/go.mod h1:TCIVLLlcwunlMpZIhIp7Ltk77W+vUSdUKAAIlbxY44c= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= -google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= -google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= -google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= -google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200423170343-7949de9c1215/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= -google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= -google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20241104194629-dd2ea8efbc28 h1:KJjNNclfpIkVqrZlTWcgOOaVQ00LdBnoEaRfkUx760s= -google.golang.org/genproto v0.0.0-20241104194629-dd2ea8efbc28/go.mod h1:mt9/MofW7AWQ+Gy179ChOnvmJatV8YHUmrcedo9CIFI= google.golang.org/genproto v0.0.0-20250303144028-a0af3efb3deb h1:ITgPrl429bc6+2ZraNSzMDk3I95nmQln2fuPstKwFDE= google.golang.org/genproto v0.0.0-20250303144028-a0af3efb3deb/go.mod h1:sAo5UzpjUwgFBCzupwhcLcxHVDK7vG5IqI30YnwX2eE= -google.golang.org/genproto v0.0.0-20250414145226-207652e42e2e h1:mYHFv3iX85YMwhGSaZS4xpkM8WQDmJUovz7yqsFrwDk= -google.golang.org/genproto v0.0.0-20250414145226-207652e42e2e/go.mod h1:TQT1YpH/rlDCS5+EuFaqPIMqDfuNMFR1OI8EcZJGgAk= -google.golang.org/genproto/googleapis/api v0.0.0-20250106144421-5f5ef82da422 h1:GVIKPyP/kLIyVOgOnTwFOrvQaQUzOzGMCxgFUOEmm24= -google.golang.org/genproto/googleapis/api v0.0.0-20250106144421-5f5ef82da422/go.mod h1:b6h1vNKhxaSoEI+5jc3PJUCustfli/mRab7295pY7rw= google.golang.org/genproto/googleapis/api v0.0.0-20250414145226-207652e42e2e h1:UdXH7Kzbj+Vzastr5nVfccbmFsmYNygVLSPk1pEfDoY= google.golang.org/genproto/googleapis/api v0.0.0-20250414145226-207652e42e2e/go.mod h1:085qFyf2+XaZlRdCgKNCIZ3afY2p4HHZdoIRpId8F4A= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250303144028-a0af3efb3deb h1:TLPQVbx1GJ8VKZxz52VAxl1EBgKXXbTiU9Fc5fZeLn4= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250303144028-a0af3efb3deb/go.mod h1:LuRYeWDFV6WOn90g357N17oMCaxpgCnbi/44qJvDn2I= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250414145226-207652e42e2e h1:ztQaXfzEXTmCBvbtWYRhJxW+0iJcz2qXfd38/e9l7bA= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250414145226-207652e42e2e/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250505200425-f936aa4a68b2 h1:IqsN8hx+lWLqlN+Sc3DoMy/watjofWiU8sRFgQ8fhKM= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250505200425-f936aa4a68b2/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= -google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= -google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= -google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= -google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= -google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= -google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.71.0 h1:kF77BGdPTQ4/JZWMlb9VpJ5pa25aqvVqogsxNHHdeBg= -google.golang.org/grpc v1.71.0/go.mod h1:H0GRtasmQOh9LkFoCPDu3ZrwUtD1YGE+b2vYBYd/8Ec= -google.golang.org/grpc v1.71.1 h1:ffsFWr7ygTUscGPI0KKK6TLrGz0476KUvvsbqWK0rPI= -google.golang.org/grpc v1.71.1/go.mod h1:H0GRtasmQOh9LkFoCPDu3ZrwUtD1YGE+b2vYBYd/8Ec= google.golang.org/grpc v1.72.0 h1:S7UkcVa60b5AAQTaO6ZKamFp1zMZSU0fGDK2WZLbBnM= google.golang.org/grpc v1.72.0/go.mod h1:wH5Aktxcg25y1I3w7H69nHfXdOG3UiadoBtjh3izSDM= -google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= -google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= -google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= -google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= -google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= -google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= -google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.36.5 h1:tPhr+woSbjfYvY6/GPufUoYizxw1cF/yFoxJ2fmpwlM= -google.golang.org/protobuf v1.36.5/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= -gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME= gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= -gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= -honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= rsc.io/binaryregexp v0.2.0 h1:HfqmD5MEmC0zvwBuF187nq9mdnXjXsSivRiXN7SmRkE= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= -rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= -rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= From fbafea95e4d70e8005c42339593d25b070d53a96 Mon Sep 17 00:00:00 2001 From: Sharan Teja M Date: Tue, 20 May 2025 21:53:22 +0530 Subject: [PATCH 190/884] added additionalPipelineOptions to google_dataflow_flex_template_job (#13963) --- ...esource_dataflow_flex_template_job.go.tmpl | 44 ++++-- ..._dataflow_flex_template_job_meta.yaml.tmpl | 1 + ...ce_dataflow_flex_template_job_test.go.tmpl | 139 ++++++++++++++++++ .../dataflow_flex_template_job.html.markdown | 2 + 4 files changed, 170 insertions(+), 16 deletions(-) diff --git a/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job.go.tmpl b/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job.go.tmpl index 3a77c6601e69..a49ae520a043 100644 --- a/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job.go.tmpl +++ b/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job.go.tmpl @@ -222,6 +222,15 @@ func ResourceDataflowFlexTemplateJob() *schema.Resource { }, }, + "additional_pipeline_options": { + Type: schema.TypeSet, + Optional: true, + Description: `List of pipeline options that should be used by the job. An example value is ["numberOfWorkerHarnessThreads=20"].`, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "enable_streaming_engine": { Type: schema.TypeBool, Optional: true, @@ -313,6 +322,8 @@ func resourceDataflowFlexJobSetupEnv(d *schema.ResourceData, config *transport_t additionalExperiments := tpgresource.ConvertStringSet(d.Get("additional_experiments").(*schema.Set)) + additionalPipelineOptions := tpgresource.ConvertStringSet(d.Get("additional_pipeline_options").(*schema.Set)) + var autoscalingAlgorithm string autoscalingAlgorithm, updatedParameters = dataflowFlexJobTypeTransferVar("autoscaling_algorithm", "autoscalingAlgorithm", updatedParameters, d) @@ -383,22 +394,23 @@ func resourceDataflowFlexJobSetupEnv(d *schema.ResourceData, config *transport_t launcherMachineType, updatedParameters := dataflowFlexJobTypeTransferVar("launcher_machine_type", "launcherMachineType", updatedParameters, d) env := dataflow.FlexTemplateRuntimeEnvironment{ - AdditionalUserLabels: tpgresource.ExpandStringMap(d, "effective_labels"), - AutoscalingAlgorithm: autoscalingAlgorithm, - NumWorkers: int64(numWorkers), - MaxWorkers: int64(maxNumWorkers), - Network: network, - ServiceAccountEmail: serviceAccountEmail, - Subnetwork: subnetwork, - TempLocation: tempLocation, - StagingLocation: stagingLocation, - MachineType: machineType, - KmsKeyName: kmsKeyName, - IpConfiguration: ipConfiguration, - EnableStreamingEngine: enableStreamingEngine, - AdditionalExperiments: additionalExperiments, - SdkContainerImage: sdkContainerImage, - LauncherMachineType: launcherMachineType, + AdditionalUserLabels: tpgresource.ExpandStringMap(d, "effective_labels"), + AutoscalingAlgorithm: autoscalingAlgorithm, + NumWorkers: int64(numWorkers), + MaxWorkers: int64(maxNumWorkers), + Network: network, + ServiceAccountEmail: serviceAccountEmail, + Subnetwork: subnetwork, + TempLocation: tempLocation, + StagingLocation: stagingLocation, + MachineType: machineType, + KmsKeyName: kmsKeyName, + IpConfiguration: ipConfiguration, + EnableStreamingEngine: enableStreamingEngine, + AdditionalExperiments: additionalExperiments, + AdditionalPipelineOptions: additionalPipelineOptions, + SdkContainerImage: sdkContainerImage, + LauncherMachineType: launcherMachineType, } return env, updatedParameters, nil } diff --git a/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job_meta.yaml.tmpl b/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job_meta.yaml.tmpl index dfc601232de8..05695e5a3529 100644 --- a/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job_meta.yaml.tmpl +++ b/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job_meta.yaml.tmpl @@ -6,6 +6,7 @@ api_version: 'v1beta3' api_resource_type_kind: 'Job' fields: - field: 'additional_experiments' + - field: 'additional_pipeline_options' - field: 'autoscaling_algorithm' - field: 'container_spec_gcs_path' - field: 'effective_labels' diff --git a/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job_test.go.tmpl b/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job_test.go.tmpl index aec6c6971afd..cb7c2847a19d 100644 --- a/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job_test.go.tmpl +++ b/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job_test.go.tmpl @@ -2,6 +2,7 @@ package dataflow_test {{- if ne $.TargetVersionName "ga" }} import ( + "encoding/json" "fmt" "regexp" "strings" @@ -372,6 +373,40 @@ func TestAccDataflowFlexTemplateJob_withAdditionalExperiments(t *testing.T) { }) } +func TestAccDataflowFlexTemplateJob_withAdditionalPipelineOptions(t *testing.T) { + // Dataflow responses include serialized java classes and bash commands + // This makes body comparison infeasible + acctest.SkipIfVcr(t) + t.Parallel() + + randStr := acctest.RandString(t, 10) + job := "tf-test-dataflow-job-" + randStr + additionalPipelineOptions := []string{"numberOfWorkerHarnessThreads=200"} + bucket := "tf-test-dataflow-bucket-" + randStr + topic := "tf-test-topic" + randStr + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataflowJobDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataflowFlexTemplateJob_additionalPipelineOptions(job, bucket, topic, additionalPipelineOptions), + Check: resource.ComposeTestCheckFunc( + testAccDataflowFlexJobExists(t, "google_dataflow_flex_template_job.flex_job_pipeline_options", false), + testAccDataflowFlexTemplateJobHasAdditionalPipelineOptions(t, "google_dataflow_flex_template_job.flex_job_pipeline_options", additionalPipelineOptions, false), + ), + }, + { + ResourceName: "google_dataflow_flex_template_job.flex_job_pipeline_options", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "additional_pipeline_options", "container_spec_gcs_path", "labels", "terraform_labels"}, + }, + }, + }) +} + func TestAccDataflowFlexTemplateJob_withProviderDefaultLabels(t *testing.T) { // This resource uses custom retry logic that cannot be sped up without // modifying the actual resource @@ -675,6 +710,55 @@ func testAccDataflowFlexTemplateJobHasAdditionalExperiments(t *testing.T, res st } } +func testAccDataflowFlexTemplateJobHasAdditionalPipelineOptions(t *testing.T, res string, pipelineOptions []string, wait bool) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[res] + if !ok { + return fmt.Errorf("resource %q not found in state", res) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("No ID is set") + } + config := acctest.GoogleProviderConfig(t) + + job, err := config.NewDataflowClient(config.UserAgent).Projects.Jobs.Get(config.Project, rs.Primary.ID).View("JOB_VIEW_ALL").Do() + if err != nil { + return fmt.Errorf("dataflow job does not exist") + } + + var sdkPipelineOptionsMap map[string]interface{} + // Unmarshal the SdkPipelineOptions + err = json.Unmarshal(job.Environment.SdkPipelineOptions, &sdkPipelineOptionsMap) + if err != nil { + return fmt.Errorf("Error unmarshaling SdkPipelineOptions: '%s'", err) + } + + //Capture the options inside SdkPipelineOptions + options, ok := sdkPipelineOptionsMap["options"] + if !ok { + return fmt.Errorf("Error: 'options' field not found within actualPipelineOptionsMap.") + } + + actualPipelineOptionsMap, isMap := options.(map[string]interface{}) + if !isMap { + return fmt.Errorf("Error: 'options' field is not a JSON object") + } + + // Check if each pipelineOption exists in SdkPipelineOptions.options + for _, expectedPipelineOption := range pipelineOptions { + pOption := strings.SplitN(expectedPipelineOption, "=", 2) + key := pOption[0] + _, ok := actualPipelineOptionsMap[key] + if !ok { + return fmt.Errorf("Expected pipeline option '%s' not found in SdkPipelineOptions", expectedPipelineOption) + } + } + + return nil + } +} + func testAccDataflowFlexTemplateGetGeneratedInstanceTemplate(t *testing.T, s *terraform.State, res string) (*compute.InstanceTemplate, error) { rs, ok := s.RootModule().Resources[res] if !ok { @@ -1419,6 +1503,61 @@ resource "google_dataflow_flex_template_job" "flex_job_experiments" { `, topicName, bucket, job, strings.Join(experiments, `", "`)) } +func testAccDataflowFlexTemplateJob_additionalPipelineOptions(job, bucket, topicName string, pipelineOptions []string) string { + return fmt.Sprintf(` +data "google_project" "project" {} + +resource "google_pubsub_topic" "example" { + name = "%s" +} + +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US-CENTRAL1" + force_destroy = true + uniform_bucket_level_access = true +} + +resource "google_storage_bucket_object" "schema" { + name = "schema.json" + bucket = google_storage_bucket.bucket.name + content = < Date: Tue, 20 May 2025 10:01:09 -0700 Subject: [PATCH 191/884] Removed unreleased resource from `google_beyondcorp_application` deprecation message (#13991) --- mmv1/products/beyondcorp/Application.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/products/beyondcorp/Application.yaml b/mmv1/products/beyondcorp/Application.yaml index a4d4e862bbaf..7d82fb2672c8 100644 --- a/mmv1/products/beyondcorp/Application.yaml +++ b/mmv1/products/beyondcorp/Application.yaml @@ -13,7 +13,7 @@ --- name: Application -deprecation_message: '`google_beyondcorp_application` is deprecated. Use `google_beyondcorp_security_gateway_application` instead.' +deprecation_message: '`google_beyondcorp_application` is deprecated.' description: Specifies application endpoint(s) to protect behind a Security Gateway. base_url: projects/{{project}}/locations/global/securityGateways/{{security_gateways_id}}/applications update_mask: true From ad22d072337c517ddfc767e8fb13e596fcff7c76 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Tue, 20 May 2025 10:39:17 -0700 Subject: [PATCH 192/884] Removed post_create for identityplatform tenant (#13975) --- mmv1/products/identityplatform/Tenant.yaml | 2 -- .../terraform/post_create/gcip_tenant.go.tmpl | 14 -------------- 2 files changed, 16 deletions(-) delete mode 100644 mmv1/templates/terraform/post_create/gcip_tenant.go.tmpl diff --git a/mmv1/products/identityplatform/Tenant.yaml b/mmv1/products/identityplatform/Tenant.yaml index 97255b481867..316f37493103 100644 --- a/mmv1/products/identityplatform/Tenant.yaml +++ b/mmv1/products/identityplatform/Tenant.yaml @@ -31,8 +31,6 @@ timeouts: insert_minutes: 20 update_minutes: 20 delete_minutes: 20 -custom_code: - post_create: 'templates/terraform/post_create/gcip_tenant.go.tmpl' examples: - name: 'identity_platform_tenant_basic' primary_resource_id: 'tenant' diff --git a/mmv1/templates/terraform/post_create/gcip_tenant.go.tmpl b/mmv1/templates/terraform/post_create/gcip_tenant.go.tmpl deleted file mode 100644 index e9a0de49ec1d..000000000000 --- a/mmv1/templates/terraform/post_create/gcip_tenant.go.tmpl +++ /dev/null @@ -1,14 +0,0 @@ -// `name` is autogenerated from the api so needs to be set post-create -name, ok := res["name"] -if !ok { - return fmt.Errorf("Create response didn't contain critical fields. Create may not have succeeded.") -} -if err := d.Set("name", tpgresource.GetResourceNameFromSelfLink(name.(string))); err != nil { - return fmt.Errorf("Error setting name: %s", err) -} -// Store the ID now that we have set the computed name -id, err = tpgresource.ReplaceVars(d, config, "projects/{{"{{"}}project{{"}}"}}/tenants/{{"{{"}}name{{"}}"}}") -if err != nil { - return fmt.Errorf("Error constructing id: %s", err) -} -d.SetId(id) From 819b8f1535ccc7767ac1567a74915bdf0d4325ea Mon Sep 17 00:00:00 2001 From: James Alseth Date: Tue, 20 May 2025 11:16:31 -0700 Subject: [PATCH 193/884] chore: Clean up logic in MMV1 binary (#13927) Signed-off-by: James Alseth Co-authored-by: Riley Karson --- mmv1/main.go | 41 ++++++-------------------- mmv1/provider/terraform_tgc_cai2hcl.go | 2 +- 2 files changed, 10 insertions(+), 33 deletions(-) diff --git a/mmv1/main.go b/mmv1/main.go index b8465d04c7d0..469fa990faba 100644 --- a/mmv1/main.go +++ b/mmv1/main.go @@ -140,46 +140,26 @@ func main() { log.Printf("Building %s version", *version) log.Printf("Building %s provider", providerName) - // Building compute takes a long time and can't be parallelized within the product - // so lets build it first - sort.Slice(allProductFiles, func(i int, j int) bool { - if allProductFiles[i] == "products/compute" { - return true - } - return false - }) - - var providerToGenerate provider.Provider - - productFileChannel := make(chan string, len(allProductFiles)) productsForVersionChannel := make(chan *api.Product, len(allProductFiles)) - for _, pf := range allProductFiles { - productFileChannel <- pf - } - - for i := 0; i < len(allProductFiles); i++ { + for _, productFile := range allProductFiles { wg.Add(1) - go GenerateProduct(productFileChannel, providerToGenerate, productsForVersionChannel, startTime, productsToGenerate, *resourceToGenerate, *overrideDirectory, generateCode, generateDocs) + go GenerateProduct(productFile, productsForVersionChannel, startTime, productsToGenerate, *resourceToGenerate, *overrideDirectory, generateCode, generateDocs) } wg.Wait() - close(productFileChannel) close(productsForVersionChannel) var productsForVersion []*api.Product - for p := range productsForVersionChannel { productsForVersion = append(productsForVersion, p) } - slices.SortFunc(productsForVersion, func(p1, p2 *api.Product) int { return strings.Compare(strings.ToLower(p1.Name), strings.ToLower(p2.Name)) }) // In order to only copy/compile files once per provider this must be called outside - // of the products loop. This will get called with the provider from the final iteration - // of the loop - providerToGenerate = setProvider(*forceProvider, *version, productsForVersion[0], startTime) + // of the products loop. Create an MMv1 provider with an arbitrary product (the first loaded). + providerToGenerate := newProvider(*forceProvider, *version, productsForVersion[0], startTime) providerToGenerate.CopyCommonFiles(*outputPath, generateCode, generateDocs) if generateCode { @@ -189,10 +169,8 @@ func main() { provider.FixImports(*outputPath, *showImportDiffs) } -func GenerateProduct(productChannel chan string, providerToGenerate provider.Provider, productsForVersionChannel chan *api.Product, startTime time.Time, productsToGenerate []string, resourceToGenerate, overrideDirectory string, generateCode, generateDocs bool) { - +func GenerateProduct(productName string, productsForVersionChannel chan *api.Product, startTime time.Time, productsToGenerate []string, resourceToGenerate, overrideDirectory string, generateCode, generateDocs bool) { defer wg.Done() - productName := <-productChannel productYamlPath := path.Join(productName, "product.yaml") @@ -308,8 +286,6 @@ func GenerateProduct(productChannel chan string, providerToGenerate provider.Pro productApi.Objects = resources productApi.Validate() - providerToGenerate = setProvider(*forceProvider, *version, productApi, startTime) - productsForVersionChannel <- productApi if !slices.Contains(productsToGenerate, productName) { @@ -318,12 +294,13 @@ func GenerateProduct(productChannel chan string, providerToGenerate provider.Pro } log.Printf("%s: Generating files", productName) + + providerToGenerate := newProvider(*forceProvider, *version, productApi, startTime) providerToGenerate.Generate(*outputPath, productName, resourceToGenerate, generateCode, generateDocs) } -// Sets provider via flag -func setProvider(forceProvider, version string, productApi *api.Product, startTime time.Time) provider.Provider { - switch forceProvider { +func newProvider(providerName, version string, productApi *api.Product, startTime time.Time) provider.Provider { + switch providerName { case "tgc": return provider.NewTerraformGoogleConversion(productApi, version, startTime) case "tgc_cai2hcl": diff --git a/mmv1/provider/terraform_tgc_cai2hcl.go b/mmv1/provider/terraform_tgc_cai2hcl.go index 780344278f2c..6a0da4c838b0 100644 --- a/mmv1/provider/terraform_tgc_cai2hcl.go +++ b/mmv1/provider/terraform_tgc_cai2hcl.go @@ -63,7 +63,7 @@ func (cai2hcl CaiToTerraformConversion) CopyCommonFiles(outputFolder string, gen if !generateCode { return } - log.Printf("Coping cai2hcl common files") + log.Print("Copying cai2hcl common files") if err := os.MkdirAll(outputFolder, os.ModePerm); err != nil { log.Println(fmt.Errorf("error creating output directory %v: %v", outputFolder, err)) From e8f91585c64bd5ce78ee74c1e0dec2e8d1b65162 Mon Sep 17 00:00:00 2001 From: stevenyang72 Date: Tue, 20 May 2025 11:26:39 -0700 Subject: [PATCH 194/884] Add workload identity pool namespace. (#13920) --- .../WorkloadIdentityPoolNamespace.yaml | 116 ++++++++++++++ ...m_workload_identity_pool_namespace.go.tmpl | 44 ++++++ ...m_workload_identity_pool_namespace.go.tmpl | 22 +++ ...load_identity_pool_namespace_basic.tf.tmpl | 13 ++ ...kload_identity_pool_namespace_full.tf.tmpl | 15 ++ ...ad_identity_pool_namespace_id_test.go.tmpl | 38 +++++ ...kload_identity_pool_namespace_test.go.tmpl | 148 ++++++++++++++++++ 7 files changed, 396 insertions(+) create mode 100644 mmv1/products/iambeta/WorkloadIdentityPoolNamespace.yaml create mode 100644 mmv1/templates/terraform/constants/iam_workload_identity_pool_namespace.go.tmpl create mode 100644 mmv1/templates/terraform/custom_check_destroy/iam_workload_identity_pool_namespace.go.tmpl create mode 100644 mmv1/templates/terraform/examples/iam_workload_identity_pool_namespace_basic.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/iam_workload_identity_pool_namespace_full.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_namespace_id_test.go.tmpl create mode 100644 mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_namespace_test.go.tmpl diff --git a/mmv1/products/iambeta/WorkloadIdentityPoolNamespace.yaml b/mmv1/products/iambeta/WorkloadIdentityPoolNamespace.yaml new file mode 100644 index 000000000000..fc89c4f2b9c8 --- /dev/null +++ b/mmv1/products/iambeta/WorkloadIdentityPoolNamespace.yaml @@ -0,0 +1,116 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'WorkloadIdentityPoolNamespace' +description: | + Represents a namespace for a workload identity pool. Namespaces are used to segment identities + within the pool. +references: + guides: + 'Configure managed workload identity authentication for Compute Engine': 'https://cloud.google.com/iam/docs/create-managed-workload-identities' + 'Configure managed workload identity authentication for GKE': 'https://cloud.google.com/iam/docs/create-managed-workload-identities-gke' + api: 'https://cloud.google.com/iam/docs/reference/rest/v1/projects.locations.workloadIdentityPools.namespaces' +min_version: beta +base_url: 'projects/{{project}}/locations/global/workloadIdentityPools/{{workload_identity_pool_id}}/namespaces' +self_link: 'projects/{{project}}/locations/global/workloadIdentityPools/{{workload_identity_pool_id}}/namespaces/{{workload_identity_pool_namespace_id}}' +create_url: 'projects/{{project}}/locations/global/workloadIdentityPools/{{workload_identity_pool_id}}/namespaces?workloadIdentityPoolNamespaceId={{workload_identity_pool_namespace_id}}' +update_verb: 'PATCH' +update_mask: true +import_format: + - 'projects/{{project}}/locations/global/workloadIdentityPools/{{workload_identity_pool_id}}/namespaces/{{workload_identity_pool_namespace_id}}' +autogen_async: true +custom_code: + constants: 'templates/terraform/constants/iam_workload_identity_pool_namespace.go.tmpl' + decoder: 'templates/terraform/decoders/treat_deleted_state_as_gone.go.tmpl' + test_check_destroy: 'templates/terraform/custom_check_destroy/iam_workload_identity_pool_namespace.go.tmpl' +examples: + - name: 'iam_workload_identity_pool_namespace_basic' + primary_resource_id: 'example' + vars: + workload_identity_pool_id: 'example-pool' + workload_identity_pool_namespace_id: 'example-nmspc' + - name: 'iam_workload_identity_pool_namespace_full' + primary_resource_id: 'example' + vars: + workload_identity_pool_id: 'example-pool' + workload_identity_pool_namespace_id: 'example-nmspc' +parameters: + - name: 'workload_identity_pool_id' + type: String + required: true + immutable: true + url_param_only: true + description: | + The ID to use for the pool, which becomes the final component of the resource name. This + value should be 4-32 characters, and may contain the characters [a-z0-9-]. The prefix + `gcp-` is reserved for use by Google, and may not be specified. + - name: 'workload_identity_pool_namespace_id' + type: String + required: true + immutable: true + url_param_only: true + description: | + The ID to use for the namespace. This value must: + * contain at most 63 characters + * contain only lowercase alphanumeric characters or `-` + * start with an alphanumeric character + * end with an alphanumeric character + + + The prefix `gcp-` will be reserved for future uses. + validation: + function: 'ValidateWorkloadIdentityPoolNamespaceId' +properties: + - name: 'name' + type: String + description: | + The resource name of the namespace as + `projects/{project_number}/locations/global/workloadIdentityPools/{workload_identity_pool_id}/namespaces/{workload_identity_pool_namespace_id}`. + output: true + - name: 'description' + type: String + description: | + A description of the namespace. Cannot exceed 256 characters. + - name: 'state' + type: Enum + description: | + The current state of the namespace. + * `ACTIVE`: The namespace is active. + * `DELETED`: The namespace is soft-deleted. Soft-deleted namespaces are permanently deleted + after approximately 30 days. You can restore a soft-deleted namespace using + UndeleteWorkloadIdentityPoolNamespace. You cannot reuse the ID of a soft-deleted namespace + until it is permanently deleted. + output: true + enum_values: + - 'ACTIVE' + - 'DELETED' + - name: 'disabled' + type: Boolean + description: | + Whether the namespace is disabled. If disabled, credentials may no longer be issued for + identities within this namespace, however existing credentials will still be accepted until + they expire. + - name: 'ownerService' + type: NestedObject + description: | + Defines the owner that is allowed to mutate this resource. If present, this resource can only + be mutated by the owner. + output: true + properties: + - name: 'principalSubject' + type: String + description: | + The service agent principal subject, e.g. + `serviceAccount:service-1234@gcp-sa-gkehub.iam.gserviceaccount.com`. + output: true diff --git a/mmv1/templates/terraform/constants/iam_workload_identity_pool_namespace.go.tmpl b/mmv1/templates/terraform/constants/iam_workload_identity_pool_namespace.go.tmpl new file mode 100644 index 000000000000..d811c515103b --- /dev/null +++ b/mmv1/templates/terraform/constants/iam_workload_identity_pool_namespace.go.tmpl @@ -0,0 +1,44 @@ +const workloadIdentityPoolNamespaceIdRegexp = `^[0-9a-z-]+$` + +func ValidateWorkloadIdentityPoolNamespaceId(v interface{}, k string) (ws []string, errors []error) { + value := v.(string) + + if !regexp.MustCompile(workloadIdentityPoolNamespaceIdRegexp).MatchString(value) { + errors = append(errors, fmt.Errorf( + "%q must contain only lowercase letters (a-z), numbers (0-9), or dashes (-)", k)) + } + + if len(value) < 2 { + errors = append(errors, fmt.Errorf( + "%q cannot be less than 2 characters", k)) + return + } + + if len(value) > 63 { + errors = append(errors, fmt.Errorf( + "%q cannot be greater than 63 characters", k)) + } + + isLowerAlphaNumeric := func(r byte) bool { + return (r >= '0' && r <= '9') || (r >= 'a' && r <= 'z') + } + + firstChar := value[0] + if !isLowerAlphaNumeric(firstChar) { + errors = append(errors, fmt.Errorf( + "%q must start with an alphanumeric character", k)) + } + + lastChar := value[len(value) - 1] + if !isLowerAlphaNumeric(lastChar) { + errors = append(errors, fmt.Errorf( + "%q must end with an alphanumeric character", k)) + } + + if strings.HasPrefix(value, "gcp-") { + errors = append(errors, fmt.Errorf( + "%q (%q) can not start with \"gcp-\"", k, value)) + } + + return +} diff --git a/mmv1/templates/terraform/custom_check_destroy/iam_workload_identity_pool_namespace.go.tmpl b/mmv1/templates/terraform/custom_check_destroy/iam_workload_identity_pool_namespace.go.tmpl new file mode 100644 index 000000000000..7207b1e052af --- /dev/null +++ b/mmv1/templates/terraform/custom_check_destroy/iam_workload_identity_pool_namespace.go.tmpl @@ -0,0 +1,22 @@ +config := acctest.GoogleProviderConfig(t) + +url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{"{{"}}IAMBetaBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/locations/global/workloadIdentityPools/{{"{{"}}workload_identity_pool_id{{"}}"}}/namespaces/{{"{{"}}workload_identity_pool_namespace_id{{"}}"}}") +if err != nil { + return err +} + +res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + RawURL: url, + UserAgent: config.UserAgent, +}) +if err != nil { + return nil +} + +if v := res["state"]; v == "DELETED" { + return nil +} + +return fmt.Errorf("IAMBetaWorkloadIdentityPoolNamespace still exists at %s", url) diff --git a/mmv1/templates/terraform/examples/iam_workload_identity_pool_namespace_basic.tf.tmpl b/mmv1/templates/terraform/examples/iam_workload_identity_pool_namespace_basic.tf.tmpl new file mode 100644 index 000000000000..76f9bdb23d27 --- /dev/null +++ b/mmv1/templates/terraform/examples/iam_workload_identity_pool_namespace_basic.tf.tmpl @@ -0,0 +1,13 @@ +resource "google_iam_workload_identity_pool" "pool" { + provider = google-beta + + workload_identity_pool_id = "{{index $.Vars "workload_identity_pool_id"}}" + mode = "TRUST_DOMAIN" +} + +resource "google_iam_workload_identity_pool_namespace" "{{$.PrimaryResourceId}}" { + provider = google-beta + + workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id + workload_identity_pool_namespace_id = "{{index $.Vars "workload_identity_pool_namespace_id"}}" +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/iam_workload_identity_pool_namespace_full.tf.tmpl b/mmv1/templates/terraform/examples/iam_workload_identity_pool_namespace_full.tf.tmpl new file mode 100644 index 000000000000..6a8760508a29 --- /dev/null +++ b/mmv1/templates/terraform/examples/iam_workload_identity_pool_namespace_full.tf.tmpl @@ -0,0 +1,15 @@ +resource "google_iam_workload_identity_pool" "pool" { + provider = google-beta + + workload_identity_pool_id = "{{index $.Vars "workload_identity_pool_id"}}" + mode = "TRUST_DOMAIN" +} + +resource "google_iam_workload_identity_pool_namespace" "{{$.PrimaryResourceId}}" { + provider = google-beta + + workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id + workload_identity_pool_namespace_id = "{{index $.Vars "workload_identity_pool_namespace_id"}}" + description = "Example Namespace in a Workload Identity Pool" + disabled = true +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_namespace_id_test.go.tmpl b/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_namespace_id_test.go.tmpl new file mode 100644 index 000000000000..5b6b9b3ae8a9 --- /dev/null +++ b/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_namespace_id_test.go.tmpl @@ -0,0 +1,38 @@ +{{- if ne $.TargetVersionName "ga" -}} +package iambeta_test + +import ( + "strings" + "testing" + + "github.com/hashicorp/terraform-provider-google/google/services/iambeta" + "github.com/hashicorp/terraform-provider-google/google/verify" +) + +func TestValidateWorkloadIdentityPoolNamespaceId(t *testing.T) { + x := []verify.StringValidationTestCase{ + // No errors + {TestName: "basic", Value: "foobar"}, + {TestName: "with numbers", Value: "foobar123"}, + {TestName: "short", Value: "foos"}, + {TestName: "long", Value: "12345678901234567890123456789012"}, + {TestName: "has a hyphen", Value: "foo-bar"}, + + // With errors + {TestName: "empty", Value: "", ExpectError: true}, + {TestName: "starts with a gcp-", Value: "gcp-foobar", ExpectError: true}, + {TestName: "with uppercase", Value: "fooBar", ExpectError: true}, + {TestName: "has an slash", Value: "foo/bar", ExpectError: true}, + {TestName: "has an backslash", Value: "foo\bar", ExpectError: true}, + {TestName: "too short", Value: "f", ExpectError: true}, + {TestName: "too long", Value: strings.Repeat("f", 64), ExpectError: true}, + {TestName: "starts with non-alphanumeric", Value: "-foobar", ExpectError: true}, + {TestName: "ends with non-alphanumeric", Value: "foobar-", ExpectError: true}, + } + + es := verify.TestStringValidationCases(x, iambeta.ValidateWorkloadIdentityPoolNamespaceId) + if len(es) > 0 { + t.Errorf("Failed to validate WorkloadIdentityPoolNamespace names: %v", es) + } +} +{{- end -}} diff --git a/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_namespace_test.go.tmpl b/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_namespace_test.go.tmpl new file mode 100644 index 000000000000..c81bda7329c1 --- /dev/null +++ b/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_namespace_test.go.tmpl @@ -0,0 +1,148 @@ +{{- if ne $.TargetVersionName "ga" -}} +package iambeta_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccIAMBetaWorkloadIdentityPoolNamespace_minimal(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckIAMBetaWorkloadIdentityPoolNamespaceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccIAMBetaWorkloadIdentityPoolNamespace_minimal(context), + }, + { + ResourceName: "google_iam_workload_identity_pool_namespace.example", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"workload_identity_pool_id", "workload_identity_pool_namespace_id"}, + }, + { + Config: testAccIAMBetaWorkloadIdentityPoolNamespace_updated(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_iam_workload_identity_pool_namespace.example", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_iam_workload_identity_pool_namespace.example", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"workload_identity_pool_id", "workload_identity_pool_namespace_id"}, + }, + }, + }) +} + +func TestAccIAMBetaWorkloadIdentityPoolNamespace_full(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckIAMBetaWorkloadIdentityPoolNamespaceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccIAMBetaWorkloadIdentityPoolNamespace_full(context), + }, + { + ResourceName: "google_iam_workload_identity_pool_namespace.example", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"workload_identity_pool_id", "workload_identity_pool_namespace_id"}, + }, + { + Config: testAccIAMBetaWorkloadIdentityPoolNamespace_updated(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_iam_workload_identity_pool_namespace.example", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_iam_workload_identity_pool_namespace.example", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"workload_identity_pool_id", "workload_identity_pool_namespace_id"}, + }, + }, + }) +} + +func testAccIAMBetaWorkloadIdentityPoolNamespace_minimal(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_workload_identity_pool" "pool" { + provider = google-beta + + workload_identity_pool_id = "tf-test-example-pool%{random_suffix}" + mode = "TRUST_DOMAIN" +} + +resource "google_iam_workload_identity_pool_namespace" "example" { + provider = google-beta + + workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id + workload_identity_pool_namespace_id = "tf-test-example-nmspc%{random_suffix}" +} +`, context) +} + +func testAccIAMBetaWorkloadIdentityPoolNamespace_full(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_workload_identity_pool" "pool" { + provider = google-beta + + workload_identity_pool_id = "tf-test-example-pool%{random_suffix}" + mode = "TRUST_DOMAIN" +} + +resource "google_iam_workload_identity_pool_namespace" "example" { + provider = google-beta + + workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id + workload_identity_pool_namespace_id = "tf-test-example-nmspc%{random_suffix}" + description = "Example Namespace in a Workload Identity Pool" + disabled = true +} +`, context) +} + +func testAccIAMBetaWorkloadIdentityPoolNamespace_updated(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_workload_identity_pool" "pool" { + provider = google-beta + + workload_identity_pool_id = "tf-test-example-pool%{random_suffix}" + mode = "TRUST_DOMAIN" +} + +resource "google_iam_workload_identity_pool_namespace" "example" { + provider = google-beta + + workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id + workload_identity_pool_namespace_id = "tf-test-example-nmspc%{random_suffix}" + description = "Updated Namespace in a Workload Identity Pool" + disabled = false +} +`, context) +} +{{- end -}} From a8ef65ad31982cb6bf516e1fcd149208fcc22421 Mon Sep 17 00:00:00 2001 From: Ilia Lazebnik Date: Tue, 20 May 2025 14:28:26 -0400 Subject: [PATCH 195/884] container: allow updating enable_multi_networking (#14000) Signed-off-by: drfaust92 --- .../resource_container_cluster.go.tmpl | 18 +++- .../resource_container_node_pool_test.go.tmpl | 102 ++++++++++++++++++ 2 files changed, 119 insertions(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl index 18a8149f6005..0f8bef478bd6 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl @@ -2172,7 +2172,6 @@ func ResourceContainerCluster() *schema.Resource { "enable_multi_networking": { Type: schema.TypeBool, Optional: true, - ForceNew: true, Description: `Whether multi-networking is enabled for this cluster.`, Default: false, }, @@ -3748,6 +3747,23 @@ func resourceContainerClusterUpdate(d *schema.ResourceData, meta interface{}) er log.Printf("[INFO] GKE cluster %s In-Transit Encryption Config has been updated to %v", d.Id(), inTransitConfig) } + if d.HasChange("enable_multi_networking") { + enabled := d.Get("enable_multi_networking").(bool) + req := &container.UpdateClusterRequest{ + Update: &container.ClusterUpdate{ + DesiredEnableMultiNetworking: enabled, + ForceSendFields: []string{"DesiredEnableMultiNetworking"}, + + }, + } + updateF := updateFunc(req, "updating multi networking") + // Call update serially. + if err := transport_tpg.LockedCall(lockKey, updateF); err != nil { + return err + } + + log.Printf("[INFO] GKE cluster %s Multi Networking has been updated to %v", d.Id(), enabled) + } if d.HasChange("enable_fqdn_network_policy") { enabled := d.Get("enable_fqdn_network_policy").(bool) diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl index c373757996c1..bb9989e18ca6 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl @@ -817,6 +817,21 @@ func TestAccContainerNodePool_withMultiNicNetworkConfig(t *testing.T) { Steps: []resource.TestStep{ { Config: testAccContainerNodePool_withMultiNicNetworkConfig(cluster, np, network), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.cluster", "enable_multi_networking", "true"), + ), + }, + { + ResourceName: "google_container_cluster.cluster", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"network_config.0.create_pod_range", "deletion_protection"}, + }, + { + Config: testAccContainerNodePool_withMultiNicNetworkConfigUpdate(cluster, np, network), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.cluster", "enable_multi_networking", "false"), + ), }, { ResourceName: "google_container_cluster.cluster", @@ -3825,6 +3840,93 @@ resource "google_container_node_pool" "with_multi_nic" { `, network, network, network, network, network, network, cluster, np) } +func testAccContainerNodePool_withMultiNicNetworkConfigUpdate(cluster, np, network string) string { + return fmt.Sprintf(` +resource "google_compute_network" "container_network" { + name = "%s-1" + auto_create_subnetworks = false +} + +resource "google_compute_network" "addn_net_1" { + name = "%s-2" + auto_create_subnetworks = false +} + +resource "google_compute_network" "addn_net_2" { + name = "%s-3" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "container_subnetwork" { + name = "%s-subnet-1" + network = google_compute_network.container_network.name + ip_cidr_range = "10.0.36.0/24" + region = "us-central1" + private_ip_google_access = true + + secondary_ip_range { + range_name = "pod" + ip_cidr_range = "10.0.0.0/19" + } + + secondary_ip_range { + range_name = "svc" + ip_cidr_range = "10.0.32.0/22" + } + + lifecycle { + ignore_changes = [ + # The auto nodepool creates a secondary range which diffs this resource. + secondary_ip_range, + ] + } +} + +resource "google_compute_subnetwork" "subnet1" { + name = "%s-subnet-2" + network = google_compute_network.addn_net_1.name + ip_cidr_range = "10.0.37.0/24" + region = "us-central1" +} + +resource "google_compute_subnetwork" "subnet2" { + name = "%s-subnet-3" + network = google_compute_network.addn_net_2.name + ip_cidr_range = "10.0.38.0/24" + region = "us-central1" + + secondary_ip_range { + range_name = "pod" + ip_cidr_range = "10.0.64.0/19" + } +} + +resource "google_container_cluster" "cluster" { + name = "%s" + location = "us-central1" + initial_node_count = 1 + + network = google_compute_network.container_network.name + subnetwork = google_compute_subnetwork.container_subnetwork.name + ip_allocation_policy { + cluster_secondary_range_name = google_compute_subnetwork.container_subnetwork.secondary_ip_range[0].range_name + services_secondary_range_name = google_compute_subnetwork.container_subnetwork.secondary_ip_range[1].range_name + } + private_cluster_config { + enable_private_nodes = true + master_ipv4_cidr_block = "10.42.0.0/28" + } + release_channel { + channel = "RAPID" + } + enable_multi_networking = false + datapath_provider = "ADVANCED_DATAPATH" + deletion_protection = false +} + +`, network, network, network, network, network, network, cluster) +} + {{ if not (or (eq $.TargetVersionName ``) (eq $.TargetVersionName `ga`)) }} func testAccContainerNodePool_withBootDiskKmsKey(cluster, np, networkName, subnetworkName string) string { return fmt.Sprintf(` From 1f42bf3f67f02b41f6e4a135a78aae77fae71035 Mon Sep 17 00:00:00 2001 From: oferhandel-google Date: Tue, 20 May 2025 12:41:15 -0700 Subject: [PATCH 196/884] Cloud DMS PrivateConnection support for create_without_validation (#13878) Co-authored-by: Nick Elliot --- .../databasemigrationservice/PrivateConnection.yaml | 11 ++++++++++- ...abase_migration_service_private_connection.tf.tmpl | 2 ++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/mmv1/products/databasemigrationservice/PrivateConnection.yaml b/mmv1/products/databasemigrationservice/PrivateConnection.yaml index 8b2187688ac6..7029eb22b30f 100644 --- a/mmv1/products/databasemigrationservice/PrivateConnection.yaml +++ b/mmv1/products/databasemigrationservice/PrivateConnection.yaml @@ -23,7 +23,7 @@ docs: id_format: 'projects/{{project}}/locations/{{location}}/privateConnections/{{private_connection_id}}' base_url: 'projects/{{project}}/locations/{{location}}/privateConnections' self_link: 'projects/{{project}}/locations/{{location}}/privateConnections/{{private_connection_id}}' -create_url: 'projects/{{project}}/locations/{{location}}/privateConnections?privateConnectionId={{private_connection_id}}' +create_url: 'projects/{{project}}/locations/{{location}}/privateConnections?privateConnectionId={{private_connection_id}}&skip_validation={{create_without_validation}}' immutable: true import_format: - 'projects/{{project}}/locations/{{location}}/privateConnections/{{private_connection_id}}' @@ -46,6 +46,7 @@ examples: vars: private_connection_id: 'my-connection' network_name: 'my-network' + create_without_validation: 'false' parameters: - name: 'privateConnectionId' type: String @@ -54,6 +55,14 @@ parameters: url_param_only: true required: true immutable: true + - name: 'create_without_validation' + type: Boolean + description: |- + If set to true, will skip validations. + url_param_only: true + required: false + immutable: true + default_value: false - name: 'location' type: String description: | diff --git a/mmv1/templates/terraform/examples/database_migration_service_private_connection.tf.tmpl b/mmv1/templates/terraform/examples/database_migration_service_private_connection.tf.tmpl index 0293220ab696..fde2453a84c9 100644 --- a/mmv1/templates/terraform/examples/database_migration_service_private_connection.tf.tmpl +++ b/mmv1/templates/terraform/examples/database_migration_service_private_connection.tf.tmpl @@ -11,6 +11,8 @@ resource "google_database_migration_service_private_connection" "{{$.PrimaryReso vpc_name = resource.google_compute_network.default.id subnet = "10.0.0.0/29" } + + create_without_validation = false } resource "google_compute_network" "default" { From 1a4b73d9ee0feda65b3ff41bcfb16325beede863 Mon Sep 17 00:00:00 2001 From: Jatin Miglani Date: Tue, 20 May 2025 22:03:01 +0200 Subject: [PATCH 197/884] Removing folder policy orchestrator tests from ga provider (#14019) --- ...s_config_v2_policy_orchestrator_for_folder_test.go.tmpl} | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) rename mmv1/third_party/terraform/services/osconfigv2/{resource_os_config_v2_policy_orchestrator_for_folder_test.go => resource_os_config_v2_policy_orchestrator_for_folder_test.go.tmpl} (99%) diff --git a/mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_for_folder_test.go b/mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_for_folder_test.go.tmpl similarity index 99% rename from mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_for_folder_test.go rename to mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_for_folder_test.go.tmpl index e67a0565fbb9..7f071363c4f3 100644 --- a/mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_for_folder_test.go +++ b/mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_for_folder_test.go.tmpl @@ -1,5 +1,6 @@ package osconfigv2_test +{{ if ne $.TargetVersionName `ga` -}} import ( "testing" @@ -110,10 +111,10 @@ resource "google_os_config_v2_policy_orchestrator_for_folder" "policy_orchestrat policy_orchestrator_id = "tf-test-po-folder%{random_suffix}" folder_id = google_folder.my_folder.folder_id - + state = "ACTIVE" action = "UPSERT" - + orchestrated_resource { id = "tf-test-test-orchestrated-resource-folder%{random_suffix}" os_policy_assignment_v1_payload { @@ -288,3 +289,4 @@ resource "google_os_config_v2_policy_orchestrator_for_folder" "policy_orchestrat } `, context) } +{{- end }} From ad2ddce27ece71a699f553954aac88b1e98562e4 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Tue, 20 May 2025 13:10:27 -0700 Subject: [PATCH 198/884] Removed IAP client post_create (#13979) --- mmv1/products/iap/Client.yaml | 1 - mmv1/templates/terraform/post_create/iap_client.go.tmpl | 7 ------- 2 files changed, 8 deletions(-) delete mode 100644 mmv1/templates/terraform/post_create/iap_client.go.tmpl diff --git a/mmv1/products/iap/Client.yaml b/mmv1/products/iap/Client.yaml index 3aae432eee14..bb24408b25c5 100644 --- a/mmv1/products/iap/Client.yaml +++ b/mmv1/products/iap/Client.yaml @@ -36,7 +36,6 @@ timeouts: update_minutes: 20 delete_minutes: 20 custom_code: - post_create: 'templates/terraform/post_create/iap_client.go.tmpl' custom_import: 'templates/terraform/custom_import/iap_client.go.tmpl' exclude_sweeper: true error_retry_predicates: diff --git a/mmv1/templates/terraform/post_create/iap_client.go.tmpl b/mmv1/templates/terraform/post_create/iap_client.go.tmpl deleted file mode 100644 index 391bc3c6488c..000000000000 --- a/mmv1/templates/terraform/post_create/iap_client.go.tmpl +++ /dev/null @@ -1,7 +0,0 @@ -brand := d.Get("brand") -clientId := flattenIapClientClientId(res["name"], d, config) - -if err := d.Set("client_id", clientId); err != nil { - return fmt.Errorf("Error setting client_id: %s", err) -} -d.SetId(fmt.Sprintf("%s/identityAwareProxyClients/%s", brand, clientId)) From cd7728b53ea5294c29611816534e02e551ab4dd2 Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Tue, 20 May 2025 13:21:39 -0700 Subject: [PATCH 199/884] Fix TestAccDataSourceGoogleCloudBackupDRDataSource_basic (#13986) --- ...ata_source_backup_dr_data_source_test.go.tmpl | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_data_source_test.go.tmpl b/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_data_source_test.go.tmpl index 884120f87c30..fb5628955556 100644 --- a/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_data_source_test.go.tmpl +++ b/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_data_source_test.go.tmpl @@ -15,8 +15,15 @@ import ( func TestAccDataSourceGoogleCloudBackupDRDataSource_basic(t *testing.T) { t.Parallel() + + {{ if ne $.TargetVersionName "ga" -}} + data_source_id := "ds-test" + {{ else -}} + data_source_id := "56b93b14529b77d764b21b2251e1ea8f0006e8dd" + {{- end }} context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), + "data_source_id": data_source_id, } stepChecks := func(wantName string, wantState string) []resource.TestCheckFunc { @@ -27,7 +34,7 @@ func TestAccDataSourceGoogleCloudBackupDRDataSource_basic(t *testing.T) { return stepCheck } project := envvar.GetTestProjectFromEnv() - expectedName := fmt.Sprintf("projects/%s/locations/us-central1/backupVaults/bv-test/dataSources/ds-test", project) + expectedName := fmt.Sprintf("projects/%s/locations/us-central1/backupVaults/bv-test/dataSources/%s", project, data_source_id) expectedState := "ACTIVE" acctest.VcrTest(t, resource.TestCase{ @@ -51,12 +58,7 @@ data "google_backup_dr_data_source" "foo" { project = data.google_project.project.project_id location = "us-central1" backup_vault_id = "bv-test" - {{ if ne $.TargetVersionName "ga" -}} - data_source_id = "ds-test" - {{ else -}} - data_source_id = "56b93b14529b77d764b21b2251e1ea8f0006e8dd" - {{- end }} + data_source_id = "%{data_source_id}" } - `, context) } From 6b2c9c2cb39e394da11902eda803db352fbd9d9a Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Tue, 20 May 2025 13:22:03 -0700 Subject: [PATCH 200/884] Fixed permadiff on `product` in `google_gemini_release_channel_setting_binding` (#13972) --- mmv1/products/gemini/ReleaseChannelSettingBinding.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/mmv1/products/gemini/ReleaseChannelSettingBinding.yaml b/mmv1/products/gemini/ReleaseChannelSettingBinding.yaml index 5bef6995417d..93380e0e2cf2 100644 --- a/mmv1/products/gemini/ReleaseChannelSettingBinding.yaml +++ b/mmv1/products/gemini/ReleaseChannelSettingBinding.yaml @@ -105,3 +105,4 @@ properties: enum_values: - 'GEMINI_CLOUD_ASSIST' - 'GEMINI_CODE_ASSIST' + default_from_api: true From f821cfefbb13ec6c1201ecf5bf7045f94fd45cfb Mon Sep 17 00:00:00 2001 From: translucens Date: Wed, 21 May 2025 05:36:11 +0900 Subject: [PATCH 201/884] added IP address range for direct connectivity from GCE VM and IPv6 (#13783) --- .../data_source_google_netblock_ip_ranges.go | 44 ++++++++++++++- ...a_source_google_netblock_ip_ranges_test.go | 54 +++++++++++++++++-- .../docs/d/netblock_ip_ranges.html.markdown | 10 ++-- 3 files changed, 99 insertions(+), 9 deletions(-) diff --git a/mmv1/third_party/terraform/services/resourcemanager/data_source_google_netblock_ip_ranges.go b/mmv1/third_party/terraform/services/resourcemanager/data_source_google_netblock_ip_ranges.go index 4a4f9ceebd3d..d5a45808bc43 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/data_source_google_netblock_ip_ranges.go +++ b/mmv1/third_party/terraform/services/resourcemanager/data_source_google_netblock_ip_ranges.go @@ -124,23 +124,63 @@ func dataSourceGoogleNetblockIpRangesRead(d *schema.ResourceData, meta interface case "restricted-googleapis": // https://cloud.google.com/vpc/docs/private-access-options#domain-vips CidrBlocks["cidr_blocks_ipv4"] = append(CidrBlocks["cidr_blocks_ipv4"], "199.36.153.4/30") - CidrBlocks["cidr_blocks"] = CidrBlocks["cidr_blocks_ipv4"] + CidrBlocks["cidr_blocks_ipv6"] = append(CidrBlocks["cidr_blocks_ipv6"], "2600:2d00:0002:1000::/64") + CidrBlocks["cidr_blocks"] = append(CidrBlocks["cidr_blocks_ipv4"], CidrBlocks["cidr_blocks_ipv6"]...) + + if err := d.Set("cidr_blocks", CidrBlocks["cidr_blocks"]); err != nil { + return fmt.Errorf("Error setting cidr_blocks: %s", err) + } + if err := d.Set("cidr_blocks_ipv4", CidrBlocks["cidr_blocks_ipv4"]); err != nil { + return fmt.Errorf("Error setting cidr_blocks_ipv4: %s", err) + } + if err := d.Set("cidr_blocks_ipv6", CidrBlocks["cidr_blocks_ipv6"]); err != nil { + return fmt.Errorf("Error setting cidr_blocks_ipv6: %s", err) + } + case "restricted-googleapis-with-directconnectivity": + // https://cloud.google.com/vpc/docs/configure-private-google-access#config-options + CidrBlocks["cidr_blocks_ipv4"] = append(CidrBlocks["cidr_blocks_ipv4"], "199.36.153.4/30", "34.126.0.0/18") + CidrBlocks["cidr_blocks_ipv6"] = append(CidrBlocks["cidr_blocks_ipv6"], "2600:2d00:0002:1000::/64", "2001:4860:8040::/42") + CidrBlocks["cidr_blocks"] = append(CidrBlocks["cidr_blocks_ipv4"], CidrBlocks["cidr_blocks_ipv6"]...) + if err := d.Set("cidr_blocks", CidrBlocks["cidr_blocks"]); err != nil { return fmt.Errorf("Error setting cidr_blocks: %s", err) } if err := d.Set("cidr_blocks_ipv4", CidrBlocks["cidr_blocks_ipv4"]); err != nil { return fmt.Errorf("Error setting cidr_blocks_ipv4: %s", err) } + if err := d.Set("cidr_blocks_ipv6", CidrBlocks["cidr_blocks_ipv6"]); err != nil { + return fmt.Errorf("Error setting cidr_blocks_ipv6: %s", err) + } case "private-googleapis": // https://cloud.google.com/vpc/docs/private-access-options#domain-vips CidrBlocks["cidr_blocks_ipv4"] = append(CidrBlocks["cidr_blocks_ipv4"], "199.36.153.8/30") - CidrBlocks["cidr_blocks"] = CidrBlocks["cidr_blocks_ipv4"] + CidrBlocks["cidr_blocks_ipv6"] = append(CidrBlocks["cidr_blocks_ipv6"], "2600:2d00:0002:2000::/64") + CidrBlocks["cidr_blocks"] = append(CidrBlocks["cidr_blocks_ipv4"], CidrBlocks["cidr_blocks_ipv6"]...) + if err := d.Set("cidr_blocks", CidrBlocks["cidr_blocks"]); err != nil { return fmt.Errorf("Error setting cidr_blocks: %s", err) } if err := d.Set("cidr_blocks_ipv4", CidrBlocks["cidr_blocks_ipv4"]); err != nil { return fmt.Errorf("Error setting cidr_blocks_ipv4: %s", err) } + if err := d.Set("cidr_blocks_ipv6", CidrBlocks["cidr_blocks_ipv6"]); err != nil { + return fmt.Errorf("Error setting cidr_blocks_ipv6: %s", err) + } + case "private-googleapis-with-directconnectivity": + // https://cloud.google.com/vpc/docs/private-access-options#domain-vips + CidrBlocks["cidr_blocks_ipv4"] = append(CidrBlocks["cidr_blocks_ipv4"], "199.36.153.8/30", "34.126.0.0/18") + CidrBlocks["cidr_blocks_ipv6"] = append(CidrBlocks["cidr_blocks_ipv6"], "2600:2d00:0002:2000::/64", "2001:4860:8040::/42") + CidrBlocks["cidr_blocks"] = append(CidrBlocks["cidr_blocks_ipv4"], CidrBlocks["cidr_blocks_ipv6"]...) + + if err := d.Set("cidr_blocks", CidrBlocks["cidr_blocks"]); err != nil { + return fmt.Errorf("Error setting cidr_blocks: %s", err) + } + if err := d.Set("cidr_blocks_ipv4", CidrBlocks["cidr_blocks_ipv4"]); err != nil { + return fmt.Errorf("Error setting cidr_blocks_ipv4: %s", err) + } + if err := d.Set("cidr_blocks_ipv6", CidrBlocks["cidr_blocks_ipv6"]); err != nil { + return fmt.Errorf("Error setting cidr_blocks_ipv6: %s", err) + } case "dns-forwarders": // https://cloud.google.com/dns/zones/#creating-forwarding-zones CidrBlocks["cidr_blocks_ipv4"] = append(CidrBlocks["cidr_blocks_ipv4"], "35.199.192.0/19") diff --git a/mmv1/third_party/terraform/services/resourcemanager/data_source_google_netblock_ip_ranges_test.go b/mmv1/third_party/terraform/services/resourcemanager/data_source_google_netblock_ip_ranges_test.go index 1ac308f49fa9..cb1ccea75fe6 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/data_source_google_netblock_ip_ranges_test.go +++ b/mmv1/third_party/terraform/services/resourcemanager/data_source_google_netblock_ip_ranges_test.go @@ -71,26 +71,60 @@ func TestAccDataSourceGoogleNetblockIpRanges_basic(t *testing.T) { Config: testAccNetblockIpRangesConfig_restricted, Check: resource.ComposeTestCheckFunc( // Private Google Access Restricted VIP - resource.TestCheckResourceAttr("data.google_netblock_ip_ranges.restricted", "cidr_blocks.#", "1"), + resource.TestCheckResourceAttr("data.google_netblock_ip_ranges.restricted", "cidr_blocks.#", "2"), resource.TestMatchResourceAttr("data.google_netblock_ip_ranges.restricted", "cidr_blocks.0", regexp.MustCompile("^(?:[0-9a-fA-F./:]{1,4}){1,2}.*/[0-9]{1,3}$")), resource.TestCheckResourceAttr("data.google_netblock_ip_ranges.restricted", "cidr_blocks_ipv4.#", "1"), resource.TestMatchResourceAttr("data.google_netblock_ip_ranges.restricted", "cidr_blocks_ipv4.0", regexp.MustCompile("^(?:[0-9]{1,3}.){3}[0-9]{1,3}/[0-9]{1,2}$")), - resource.TestCheckResourceAttr("data.google_netblock_ip_ranges.restricted", "cidr_blocks_ipv6.#", "0"), + resource.TestCheckResourceAttr("data.google_netblock_ip_ranges.restricted", "cidr_blocks_ipv6.#", "1"), + resource.TestMatchResourceAttr("data.google_netblock_ip_ranges.restricted", + "cidr_blocks_ipv6.0", regexp.MustCompile("^(?:[0-9a-fA-F]{1,4}:){1,2}.*/[0-9]{1,3}$")), + ), + }, + { + Config: testAccNetblockIpRangesConfig_restricted_with_directconnectivity, + Check: resource.ComposeTestCheckFunc( + // Private Google Access Restricted VIP + resource.TestCheckResourceAttr("data.google_netblock_ip_ranges.restricted", "cidr_blocks.#", "4"), + resource.TestMatchResourceAttr("data.google_netblock_ip_ranges.restricted", + "cidr_blocks.0", regexp.MustCompile("^(?:[0-9a-fA-F./:]{1,4}){1,2}.*/[0-9]{1,3}$")), + resource.TestCheckResourceAttr("data.google_netblock_ip_ranges.restricted", "cidr_blocks_ipv4.#", "2"), + resource.TestMatchResourceAttr("data.google_netblock_ip_ranges.restricted", + "cidr_blocks_ipv4.1", regexp.MustCompile("^(?:[0-9]{1,3}.){3}[0-9]{1,3}/[0-9]{1,2}$")), + resource.TestCheckResourceAttr("data.google_netblock_ip_ranges.restricted", "cidr_blocks_ipv6.#", "2"), + resource.TestMatchResourceAttr("data.google_netblock_ip_ranges.restricted", + "cidr_blocks_ipv6.1", regexp.MustCompile("^(?:[0-9a-fA-F]{1,4}:){1,2}.*/[0-9]{1,3}$")), ), }, { Config: testAccNetblockIpRangesConfig_private, Check: resource.ComposeTestCheckFunc( // Private Google Access Unrestricted VIP - resource.TestCheckResourceAttr("data.google_netblock_ip_ranges.private", "cidr_blocks.#", "1"), + resource.TestCheckResourceAttr("data.google_netblock_ip_ranges.private", "cidr_blocks.#", "2"), resource.TestMatchResourceAttr("data.google_netblock_ip_ranges.private", "cidr_blocks.0", regexp.MustCompile("^(?:[0-9a-fA-F./:]{1,4}){1,2}.*/[0-9]{1,3}$")), resource.TestCheckResourceAttr("data.google_netblock_ip_ranges.private", "cidr_blocks_ipv4.#", "1"), resource.TestMatchResourceAttr("data.google_netblock_ip_ranges.private", "cidr_blocks_ipv4.0", regexp.MustCompile("^(?:[0-9]{1,3}.){3}[0-9]{1,3}/[0-9]{1,2}$")), - resource.TestCheckResourceAttr("data.google_netblock_ip_ranges.private", "cidr_blocks_ipv6.#", "0"), + resource.TestCheckResourceAttr("data.google_netblock_ip_ranges.private", "cidr_blocks_ipv6.#", "1"), + resource.TestMatchResourceAttr("data.google_netblock_ip_ranges.private", + "cidr_blocks_ipv6.0", regexp.MustCompile("^(?:[0-9a-fA-F]{1,4}:){1,2}.*/[0-9]{1,3}$")), + ), + }, + { + Config: testAccNetblockIpRangesConfig_private_with_directconnectivity, + Check: resource.ComposeTestCheckFunc( + // Private Google Access Unrestricted VIP + resource.TestCheckResourceAttr("data.google_netblock_ip_ranges.private", "cidr_blocks.#", "4"), + resource.TestMatchResourceAttr("data.google_netblock_ip_ranges.private", + "cidr_blocks.0", regexp.MustCompile("^(?:[0-9a-fA-F./:]{1,4}){1,2}.*/[0-9]{1,3}$")), + resource.TestCheckResourceAttr("data.google_netblock_ip_ranges.private", "cidr_blocks_ipv4.#", "2"), + resource.TestMatchResourceAttr("data.google_netblock_ip_ranges.private", + "cidr_blocks_ipv4.1", regexp.MustCompile("^(?:[0-9]{1,3}.){3}[0-9]{1,3}/[0-9]{1,2}$")), + resource.TestCheckResourceAttr("data.google_netblock_ip_ranges.private", "cidr_blocks_ipv6.#", "2"), + resource.TestMatchResourceAttr("data.google_netblock_ip_ranges.private", + "cidr_blocks_ipv6.1", regexp.MustCompile("^(?:[0-9a-fA-F]{1,4}:){1,2}.*/[0-9]{1,3}$")), ), }, { @@ -171,12 +205,24 @@ data "google_netblock_ip_ranges" "restricted" { } ` +const testAccNetblockIpRangesConfig_restricted_with_directconnectivity = ` +data "google_netblock_ip_ranges" "restricted" { + range_type = "restricted-googleapis-with-directconnectivity" +} +` + const testAccNetblockIpRangesConfig_private = ` data "google_netblock_ip_ranges" "private" { range_type = "private-googleapis" } ` +const testAccNetblockIpRangesConfig_private_with_directconnectivity = ` +data "google_netblock_ip_ranges" "private" { + range_type = "private-googleapis-with-directconnectivity" +} +` + const testAccNetblockIpRangesConfig_dns = ` data "google_netblock_ip_ranges" "dns" { range_type = "dns-forwarders" diff --git a/mmv1/third_party/terraform/website/docs/d/netblock_ip_ranges.html.markdown b/mmv1/third_party/terraform/website/docs/d/netblock_ip_ranges.html.markdown index a2e11867dc6b..6f96e17b2968 100644 --- a/mmv1/third_party/terraform/website/docs/d/netblock_ip_ranges.html.markdown +++ b/mmv1/third_party/terraform/website/docs/d/netblock_ip_ranges.html.markdown @@ -63,9 +63,13 @@ The following arguments are supported: * `google-netblocks` - Corresponds to IP addresses used for Google services. [More details.](https://cloud.google.com/compute/docs/faq#where_can_i_find_product_name_short_ip_ranges) - * `restricted-googleapis` - Corresponds to the IP addresses used for Private Google Access only for services that support VPC Service Controls API access. [More details.](https://cloud.google.com/vpc/docs/private-access-options#domain-vips) + * `restricted-googleapis` - Corresponds to the IP addresses used for Private Google Access only for services that support VPC Service Controls API access. These ranges are for DNS configuration. [More details.](https://cloud.google.com/vpc/docs/configure-private-google-access#config-options) - * `private-googleapis` - Corresponds to the IP addresses used for Private Google Access for services that do not support VPC Service Controls. [More details.](https://cloud.google.com/vpc/docs/private-access-options#domain-vips) + * `restricted-googleapis-with-directconnectivity` - Corresponds to the IP addresses used for Private Google Access only for services that support VPC Service Controls API access. These ranges are for routing and firewall configurations. [More details.](https://cloud.google.com/vpc/docs/configure-private-google-access#config-options) + + * `private-googleapis` - Corresponds to the IP addresses used for Private Google Access, including services that do not support VPC Service Controls. These ranges are for DNS configuration. [More details.](https://cloud.google.com/vpc/docs/configure-private-google-access#config-options) + + * `private-googleapis-with-directconnectivity` - Corresponds to the IP addresses used for Private Google Access, including services that do not support VPC Service Controls. These ranges are for routing and firewall configurations. [More details.](https://cloud.google.com/vpc/docs/configure-private-google-access#config-options) * `dns-forwarders` - Corresponds to the IP addresses used to originate Cloud DNS outbound forwarding. [More details.](https://cloud.google.com/dns/zones/#creating-forwarding-zones) @@ -73,7 +77,7 @@ The following arguments are supported: * `health-checkers` - Corresponds to the IP addresses used for health checking in Cloud Load Balancing. [More details.](https://cloud.google.com/load-balancing/docs/health-checks) - * `legacy-health-checkers` - Corresponds to the IP addresses used for legacy style health checkers (used by Network Load Balancing). [ More details.](https://cloud.google.com/load-balancing/docs/health-checks) + * `legacy-health-checkers` - Corresponds to the IP addresses used for legacy style health checkers (used by Network Load Balancing). [More details.](https://cloud.google.com/load-balancing/docs/health-checks) ## Attributes Reference From bd7ed8a64fc16b42d7f4dcb868ed86c6b2708cce Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Tue, 20 May 2025 13:42:53 -0700 Subject: [PATCH 202/884] Removed logging metric set_computed_name post_create (#13634) --- mmv1/products/logging/Metric.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/mmv1/products/logging/Metric.yaml b/mmv1/products/logging/Metric.yaml index 6689245b2927..04a748a71323 100644 --- a/mmv1/products/logging/Metric.yaml +++ b/mmv1/products/logging/Metric.yaml @@ -36,7 +36,6 @@ timeouts: update_minutes: 20 delete_minutes: 20 custom_code: - post_create: 'templates/terraform/post_create/set_computed_name.tmpl' custom_import: 'templates/terraform/custom_import/self_link_as_name.tmpl' examples: - name: 'logging_metric_basic' From 8b666a35ac268045386fae8b2746108745d61f53 Mon Sep 17 00:00:00 2001 From: Sing Date: Tue, 20 May 2025 22:24:42 +0100 Subject: [PATCH 203/884] EdgeCacheService: Fixed `defaultTtl/maxTtl` Config validation failed error when switching `cache_mode` (#13964) --- .../networkservices/EdgeCacheService.yaml | 1 + ...etwork_services_edge_cache_service.go.tmpl | 53 +++++ ...etwork_services_edge_cache_service_test.go | 201 ++++++++++++++++++ 3 files changed, 255 insertions(+) create mode 100644 mmv1/templates/terraform/encoders/network_services_edge_cache_service.go.tmpl diff --git a/mmv1/products/networkservices/EdgeCacheService.yaml b/mmv1/products/networkservices/EdgeCacheService.yaml index 33ad8ba9c796..7b0c5dd0f621 100644 --- a/mmv1/products/networkservices/EdgeCacheService.yaml +++ b/mmv1/products/networkservices/EdgeCacheService.yaml @@ -42,6 +42,7 @@ async: result: resource_inside_response: false custom_code: + encoder: 'templates/terraform/encoders/network_services_edge_cache_service.go.tmpl' examples: - name: 'network_services_edge_cache_service_basic' primary_resource_id: 'instance' diff --git a/mmv1/templates/terraform/encoders/network_services_edge_cache_service.go.tmpl b/mmv1/templates/terraform/encoders/network_services_edge_cache_service.go.tmpl new file mode 100644 index 000000000000..a668418ea16c --- /dev/null +++ b/mmv1/templates/terraform/encoders/network_services_edge_cache_service.go.tmpl @@ -0,0 +1,53 @@ +// This encoder ensures TTL fields are handled correctly based on cache mode +routing, ok := obj["routing"].(map[string]interface{}) +if !ok { + return obj, nil +} + +pathMatchers, ok := routing["pathMatchers"].([]interface{}) +if !ok || len(pathMatchers) == 0 { + return obj, nil +} + +for _, pm := range pathMatchers { + pathMatcher, ok := pm.(map[string]interface{}) + if !ok { + continue + } + + routeRules, ok := pathMatcher["routeRules"].([]interface{}) + if !ok { + continue + } + + for _, rr := range routeRules { + routeRule, ok := rr.(map[string]interface{}) + if !ok { + continue + } + + routeAction, ok := routeRule["routeAction"].(map[string]interface{}) + if !ok { + continue + } + + cdnPolicy, ok := routeAction["cdnPolicy"].(map[string]interface{}) + if !ok { + continue + } + + // Handle TTL fields based on cache mode + if cacheMode, ok := cdnPolicy["cacheMode"].(string); ok { + switch cacheMode { + case "USE_ORIGIN_HEADERS", "BYPASS_CACHE": + delete(cdnPolicy, "clientTtl") + delete(cdnPolicy, "defaultTtl") + delete(cdnPolicy, "maxTtl") + case "FORCE_CACHE_ALL": + delete(cdnPolicy, "maxTtl") + } + } + } +} + +return obj, nil \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/networkservices/resource_network_services_edge_cache_service_test.go b/mmv1/third_party/terraform/services/networkservices/resource_network_services_edge_cache_service_test.go index 82b3329e5889..269b10fb2cf6 100644 --- a/mmv1/third_party/terraform/services/networkservices/resource_network_services_edge_cache_service_test.go +++ b/mmv1/third_party/terraform/services/networkservices/resource_network_services_edge_cache_service_test.go @@ -147,3 +147,204 @@ resource "google_network_services_edge_cache_service" "served" { } `, bktName, originName, serviceName) } + +func TestAccNetworkServicesEdgeCacheService_cacheModeAndTtl(t *testing.T) { + t.Parallel() + namebkt := "tf-test-bucket-" + acctest.RandString(t, 10) + nameorigin := "tf-test-origin-" + acctest.RandString(t, 10) + nameservice := "tf-test-service-" + acctest.RandString(t, 10) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckNetworkServicesEdgeCacheServiceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccNetworkServicesEdgeCacheService_cacheModeAndTtl_0(namebkt, nameorigin, nameservice), + }, + { + ResourceName: "google_network_services_edge_cache_service.served", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccNetworkServicesEdgeCacheService_cacheModeAndTtl_1(namebkt, nameorigin, nameservice), + }, + { + ResourceName: "google_network_services_edge_cache_service.served", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccNetworkServicesEdgeCacheService_cacheModeAndTtl_2(namebkt, nameorigin, nameservice), + }, + { + ResourceName: "google_network_services_edge_cache_service.served", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccNetworkServicesEdgeCacheService_cacheModeAndTtl_0(bktName, originName, serviceName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "dest" { + name = "%s" + location = "US" + force_destroy = true + uniform_bucket_level_access = true +} +resource "google_network_services_edge_cache_origin" "instance" { + name = "%s" + origin_address = google_storage_bucket.dest.url + description = "The default bucket for media edge test" + max_attempts = 2 + timeout { + connect_timeout = "10s" + } +} +resource "google_network_services_edge_cache_service" "served" { + name = "%s" + description = "some description" + routing { + host_rule { + description = "host rule description" + hosts = ["sslcert.tf-test.club"] + path_matcher = "routes" + } + path_matcher { + name = "routes" + route_rule { + description = "a route rule to match against" + priority = 1 + match_rule { + prefix_match = "/" + } + origin = google_network_services_edge_cache_origin.instance.name + route_action { + cdn_policy { + cache_mode = "CACHE_ALL_STATIC" + default_ttl = "1000s" + max_ttl = "2000s" + } + compression_mode = "AUTOMATIC" + } + header_action { + response_header_to_add { + header_name = "x-cache-status" + header_value = "{cdn_cache_status}" + } + } + } + } + } +} +`, bktName, originName, serviceName) +} + +func testAccNetworkServicesEdgeCacheService_cacheModeAndTtl_1(bktName, originName, serviceName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "dest" { + name = "%s" + location = "US" + force_destroy = true + uniform_bucket_level_access = true +} +resource "google_network_services_edge_cache_origin" "instance" { + name = "%s" + origin_address = google_storage_bucket.dest.url + description = "The default bucket for media edge test" + max_attempts = 2 + timeout { + connect_timeout = "10s" + } +} +resource "google_network_services_edge_cache_service" "served" { + name = "%s" + description = "some description" + routing { + host_rule { + description = "host rule description" + hosts = ["sslcert.tf-test.club"] + path_matcher = "routes" + } + path_matcher { + name = "routes" + route_rule { + description = "a route rule to match against" + priority = 1 + match_rule { + prefix_match = "/" + } + origin = google_network_services_edge_cache_origin.instance.name + route_action { + cdn_policy { + cache_mode = "FORCE_CACHE_ALL" + default_ttl = "1100s" + } + } + header_action { + response_header_to_add { + header_name = "x-cache-status" + header_value = "{cdn_cache_status}" + } + } + } + } + } +} +`, bktName, originName, serviceName) +} + +func testAccNetworkServicesEdgeCacheService_cacheModeAndTtl_2(bktName, originName, serviceName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "dest" { + name = "%s" + location = "US" + force_destroy = true + uniform_bucket_level_access = true +} +resource "google_network_services_edge_cache_origin" "instance" { + name = "%s" + origin_address = google_storage_bucket.dest.url + description = "The default bucket for media edge test" + max_attempts = 2 + timeout { + connect_timeout = "10s" + } +} +resource "google_network_services_edge_cache_service" "served" { + name = "%s" + description = "some description" + routing { + host_rule { + description = "host rule description" + hosts = ["sslcert.tf-test.club"] + path_matcher = "routes" + } + path_matcher { + name = "routes" + route_rule { + description = "a route rule to match against" + priority = 1 + match_rule { + prefix_match = "/" + } + origin = google_network_services_edge_cache_origin.instance.name + route_action { + cdn_policy { + cache_mode = "BYPASS_CACHE" + } + } + header_action { + response_header_to_add { + header_name = "x-cache-status" + header_value = "{cdn_cache_status}" + } + } + } + } + } +} +`, bktName, originName, serviceName) +} From c75b6ada590e8411ac2d33b5d5469d5983881f64 Mon Sep 17 00:00:00 2001 From: paridhishah18 <166548459+paridhishah18@users.noreply.github.com> Date: Tue, 20 May 2025 14:49:00 -0700 Subject: [PATCH 204/884] V2 worker pool (#13816) --- mmv1/products/cloudrunv2/WorkerPool.yaml | 818 ++++++++++++++++++ .../cloudrunv2_worker_pool_basic.tf.tmpl | 12 + ...runv2_worker_pool_custom_audiences.tf.tmpl | 13 + .../cloudrunv2_worker_pool_directvpc.tf.tmpl | 19 + .../cloudrunv2_worker_pool_gpu.tf.tmpl | 22 + .../cloudrunv2_worker_pool_limits.tf.tmpl | 18 + .../cloudrunv2_worker_pool_mount_gcs.tf.tmpl | 31 + .../cloudrunv2_worker_pool_mount_nfs.tf.tmpl | 48 + ...udrunv2_worker_pool_multicontainer.tf.tmpl | 33 + .../cloudrunv2_worker_pool_secret.tf.tmpl | 51 ++ .../cloudrunv2_worker_pool_sql.tf.tmpl | 77 ++ ...drunv2_worker_pool_deletion_policy.go.tmpl | 3 + .../provider/provider_mmv1_resources.go.tmpl | 1 + ..._source_google_cloud_run_v2_worker_pool.go | 52 ++ ...ce_google_cloud_run_v2_worker_pool_test.go | 65 ++ ...urce_cloud_run_v2_worker_pool_test.go.tmpl | 618 +++++++++++++ .../d/cloud_run_v2_worker_pool.html.markdown | 37 + 17 files changed, 1918 insertions(+) create mode 100644 mmv1/products/cloudrunv2/WorkerPool.yaml create mode 100644 mmv1/templates/terraform/examples/cloudrunv2_worker_pool_basic.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/cloudrunv2_worker_pool_custom_audiences.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/cloudrunv2_worker_pool_directvpc.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/cloudrunv2_worker_pool_gpu.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/cloudrunv2_worker_pool_limits.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/cloudrunv2_worker_pool_mount_gcs.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/cloudrunv2_worker_pool_mount_nfs.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/cloudrunv2_worker_pool_multicontainer.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/cloudrunv2_worker_pool_secret.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/cloudrunv2_worker_pool_sql.tf.tmpl create mode 100644 mmv1/templates/terraform/pre_delete/cloudrunv2_worker_pool_deletion_policy.go.tmpl create mode 100644 mmv1/third_party/terraform/services/cloudrunv2/data_source_google_cloud_run_v2_worker_pool.go create mode 100644 mmv1/third_party/terraform/services/cloudrunv2/data_source_google_cloud_run_v2_worker_pool_test.go create mode 100644 mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_worker_pool_test.go.tmpl create mode 100644 mmv1/third_party/terraform/website/docs/d/cloud_run_v2_worker_pool.html.markdown diff --git a/mmv1/products/cloudrunv2/WorkerPool.yaml b/mmv1/products/cloudrunv2/WorkerPool.yaml new file mode 100644 index 000000000000..37ecf07f8773 --- /dev/null +++ b/mmv1/products/cloudrunv2/WorkerPool.yaml @@ -0,0 +1,818 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'WorkerPool' +description: | + WorkerPool acts as a top-level container that manages a set of configurations and revision templates which implement a pull-based workload. WorkerPool exists to provide a singular abstraction which can be access controlled, reasoned about, and which encapsulates software lifecycle decisions such as rollout policy and team resource ownership. +references: + guides: + 'Official Documentation': 'https://cloud.google.com/run/docs/' + api: 'https://cloud.google.com/run/docs/reference/rest/v2/projects.locations.workerPools' +docs: +id_format: 'projects/{{project}}/locations/{{location}}/workerPools/{{name}}' +base_url: 'projects/{{project}}/locations/{{location}}/workerPools' +self_link: 'projects/{{project}}/locations/{{location}}/workerPools/{{name}}' +create_url: 'projects/{{project}}/locations/{{location}}/workerPools?workerPoolId={{name}}' +update_verb: 'PATCH' +import_format: + - 'projects/{{project}}/locations/{{location}}/workerPools/{{name}}' +timeouts: + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 +autogen_async: true +async: + actions: ['create', 'delete', 'update'] + type: 'OpAsync' + operation: + base_url: '{{op_id}}' + result: + resource_inside_response: true +iam_policy: + method_name_separator: ':' + parent_resource_attribute: 'name' + base_url: 'projects/{{project}}/locations/{{location}}/workerPools/{{name}}' + example_config_body: 'templates/terraform/iam/iam_attributes.go.tmpl' + import_format: + - 'projects/{{project}}/locations/{{location}}/workerPools/{{name}}' + - '{{name}}' +custom_code: + pre_delete: 'templates/terraform/pre_delete/cloudrunv2_worker_pool_deletion_policy.go.tmpl' +taint_resource_on_failed_create: true +examples: + - name: 'cloudrunv2_worker_pool_basic' + primary_resource_id: 'default' + primary_resource_name: 'fmt.Sprintf("tf-test-cloudrun-worker-pool%s", context["random_suffix"])' + vars: + cloud_run_worker_pool_name: 'cloudrun-worker-pool' + ignore_read_extra: + - 'deletion_protection' + - name: 'cloudrunv2_worker_pool_limits' + primary_resource_id: 'default' + vars: + cloud_run_worker_pool_name: 'cloudrun-worker-pool' + ignore_read_extra: + - 'deletion_protection' + - name: 'cloudrunv2_worker_pool_sql' + primary_resource_id: 'default' + primary_resource_name: 'fmt.Sprintf("tf-test-cloudrun-wp%s", context["random_suffix"])' + vars: + cloud_run_worker_pool_name: 'cloudrun-worker-pool' + secret_id: 'secret-1' + cloud_run_sql_name: 'cloudrun-sql' + deletion_protection: 'true' + test_vars_overrides: + 'deletion_protection': 'false' + oics_vars_overrides: + 'deletion_protection': 'false' + ignore_read_extra: + - 'deletion_protection' + - name: 'cloudrunv2_worker_pool_directvpc' + primary_resource_id: 'default' + primary_resource_name: 'fmt.Sprintf("tf-test-cloudrun-wp%s", context["random_suffix"])' + vars: + cloud_run_worker_pool_name: 'cloudrun-worker-pool' + ignore_read_extra: + - 'deletion_protection' + - name: 'cloudrunv2_worker_pool_gpu' + primary_resource_id: 'default' + primary_resource_name: 'fmt.Sprintf("tf-test-cloudrun-wp%s", context["random_suffix"])' + vars: + cloud_run_worker_pool_name: 'cloudrun-worker-pool' + ignore_read_extra: + - 'deletion_protection' + - name: 'cloudrunv2_worker_pool_secret' + primary_resource_id: 'default' + primary_resource_name: 'fmt.Sprintf("tf-test-cloudrun-wp%s", context["random_suffix"])' + vars: + cloud_run_worker_pool_name: 'cloudrun-worker-pool' + secret_id: 'secret-1' + ignore_read_extra: + - 'deletion_protection' + - name: 'cloudrunv2_worker_pool_multicontainer' + primary_resource_id: 'default' + primary_resource_name: 'fmt.Sprintf("tf-test-cloudrun-worker-pool%s", context["random_suffix"])' + vars: + cloud_run_worker_pool_name: 'cloudrun-worker-pool' + ignore_read_extra: + - 'deletion_protection' + - name: 'cloudrunv2_worker_pool_mount_gcs' + primary_resource_id: 'default' + primary_resource_name: 'fmt.Sprintf("tf-test-cloudrun-worker-pool-%s", context["random_suffix"])' + vars: + cloud_run_worker_pool_name: 'cloudrun-worker-pool' + ignore_read_extra: + - 'deletion_protection' + # Currently failing + skip_vcr: true + - name: 'cloudrunv2_worker_pool_mount_nfs' + primary_resource_id: 'default' + primary_resource_name: 'fmt.Sprintf("tf-test-cloudrun-worker-pool-%s", context["random_suffix"])' + vars: + cloud_run_worker_pool_name: 'cloudrun-worker-pool' + ignore_read_extra: + - 'deletion_protection' + # Currently failing + skip_vcr: true + - name: 'cloudrunv2_worker_pool_custom_audiences' + primary_resource_id: 'default' + primary_resource_name: 'fmt.Sprintf("tf-test-cloudrun-worker-pool-%s", context["random_suffix"])' + vars: + cloud_run_worker_pool_name: 'cloudrun-worker-pool' + ignore_read_extra: + - 'deletion_protection' +virtual_fields: + - name: 'deletion_protection' + description: | + Whether Terraform will be prevented from destroying the service. Defaults to true. + When a`terraform destroy` or `terraform apply` would delete the service, + the command will fail if this field is not set to false in Terraform state. + When the field is set to true or unset in Terraform state, a `terraform apply` + or `terraform destroy` that would delete the WorkerPool will fail. + When the field is set to false, deleting the WorkerPool is allowed. + type: Boolean + default_value: true +parameters: + - name: 'location' + type: String + description: The location of the cloud run worker pool + url_param_only: true + required: true + immutable: true +properties: + - name: 'name' + type: String + description: | + Name of the WorkerPool. + url_param_only: true + required: true + immutable: true + diff_suppress_func: 'tpgresource.CompareSelfLinkOrResourceName' + custom_flatten: 'templates/terraform/custom_flatten/name_from_self_link.tmpl' + custom_expand: 'templates/terraform/custom_expand/resource_from_self_link.go.tmpl' + - name: 'description' + type: String + description: | + User-provided description of the WorkerPool. This field currently has a 512-character limit. + - name: 'uid' + type: String + description: | + Server assigned unique identifier for the trigger. The value is a UUID4 string and guaranteed to remain unchanged until the resource is deleted. + output: true + - name: 'generation' + type: String + description: | + A number that monotonically increases every time the user modifies the desired state. Please note that unlike v1, this is an int64 value. As with most Google APIs, its JSON representation will be a string instead of an integer. + output: true + - name: 'labels' + type: KeyValueLabels + description: |- + Unstructured key value map that can be used to organize and categorize objects. User-provided labels are shared with Google's billing system, so they can be used to filter, or break down billing charges by team, component, + environment, state, etc. For more information, visit https://cloud.google.com/resource-manager/docs/creating-managing-labels or https://cloud.google.com/run/docs/configuring/labels. + + Cloud Run API v2 does not support labels with `run.googleapis.com`, `cloud.googleapis.com`, `serving.knative.dev`, or `autoscaling.knative.dev` namespaces, and they will be rejected. + All system labels in v1 now have a corresponding field in v2 WorkerPool. + - name: 'annotations' + type: KeyValueAnnotations + description: |- + Unstructured key value map that may be set by external tools to store and arbitrary metadata. They are not queryable and should be preserved when modifying objects. + + Cloud Run API v2 does not support annotations with `run.googleapis.com`, `cloud.googleapis.com`, `serving.knative.dev`, or `autoscaling.knative.dev` namespaces, and they will be rejected in new resources. + All system annotations in v1 now have a corresponding field in v2 WorkerPool. + + This field follows Kubernetes annotations' namespacing, limits, and rules. + - name: 'createTime' + type: Time + description: |- + The creation time. + output: true + - name: 'updateTime' + type: Time + description: |- + The last-modified time. + output: true + - name: 'deleteTime' + type: Time + description: |- + The deletion time. + output: true + - name: 'expireTime' + type: Time + description: |- + For a deleted resource, the time after which it will be permanently deleted. + output: true + - name: 'creator' + type: String + description: |- + Email address of the authenticated creator. + output: true + - name: 'lastModifier' + type: String + description: |- + Email address of the last authenticated modifier. + output: true + - name: 'client' + type: String + description: | + Arbitrary identifier for the API client. + - name: 'clientVersion' + type: String + description: | + Arbitrary version identifier for the API client. + - name: 'launchStage' + type: Enum + description: | + The launch stage as defined by [Google Cloud Platform Launch Stages](https://cloud.google.com/products#product-launch-stages). Cloud Run supports ALPHA, BETA, and GA. + If no value is specified, GA is assumed. Set the launch stage to a preview stage on input to allow use of preview features in that stage. On read (or output), describes whether the resource uses preview features. + + For example, if ALPHA is provided as input, but only BETA and GA-level features are used, this field will be BETA on output. + default_from_api: true + enum_values: + - 'UNIMPLEMENTED' + - 'PRELAUNCH' + - 'EARLY_ACCESS' + - 'ALPHA' + - 'BETA' + - 'GA' + - 'DEPRECATED' + - name: 'binaryAuthorization' + type: NestedObject + description: | + Settings for the Binary Authorization feature. + properties: + - name: 'breakglassJustification' + type: String + description: | + If present, indicates to use Breakglass using this justification. If useDefault is False, then it must be empty. For more information on breakglass, see https://cloud.google.com/binary-authorization/docs/using-breakglass + - name: 'useDefault' + type: Boolean + description: | + If True, indicates to use the default project's binary authorization policy. If False, binary authorization will be disabled. + conflicts: + - policy + - name: 'policy' + type: String + description: | + The path to a binary authorization policy. Format: projects/{project}/platforms/cloudRun/{policy-name} + conflicts: + - use_default + - name: 'customAudiences' + type: Array + description: | + One or more custom audiences that you want this worker pool to support. Specify each custom audience as the full URL in a string. The custom audiences are encoded in the token and used to authenticate requests. + For more information, see https://cloud.google.com/run/docs/configuring/custom-audiences. + item_type: + type: String + - name: 'scaling' + type: NestedObject + description: | + Scaling settings that apply to the worker pool. + default_from_api: true + properties: + - name: 'scalingMode' + type: Enum + description: | + The scaling mode for the worker pool. It defaults to MANUAL. + enum_values: + - 'AUTOMATIC' + - 'MANUAL' + - name: 'minInstanceCount' + type: Integer + description: | + The minimum count of instances distributed among revisions based on the specified instance split percentages. + - name: 'maxInstanceCount' + type: Integer + description: | + The maximum count of instances distributed among revisions based on the specified instance split percentages. + - name: 'manualInstanceCount' + type: Integer + description: | + The total number of instances in manual scaling mode. + - name: 'template' + type: NestedObject + description: | + The template used to create revisions for this WorkerPool. + required: true + properties: + - name: 'revision' + type: String + description: |- + The unique name for the revision. If this field is omitted, it will be automatically generated based on the WorkerPool name. + - name: 'labels' + type: KeyValuePairs + description: |- + Unstructured key value map that can be used to organize and categorize objects. User-provided labels are shared with Google's billing system, so they can be used to filter, or break down billing charges by team, component, environment, state, etc. + For more information, visit https://cloud.google.com/resource-manager/docs/creating-managing-labels or https://cloud.google.com/run/docs/configuring/labels. + + Cloud Run API v2 does not support labels with `run.googleapis.com`, `cloud.googleapis.com`, `serving.knative.dev`, or `autoscaling.knative.dev` namespaces, and they will be rejected. + All system labels in v1 now have a corresponding field in v2 WorkerPoolRevisionTemplate. + - name: 'annotations' + type: KeyValuePairs + description: |- + Unstructured key value map that may be set by external tools to store and arbitrary metadata. They are not queryable and should be preserved when modifying objects. + + Cloud Run API v2 does not support annotations with `run.googleapis.com`, `cloud.googleapis.com`, `serving.knative.dev`, or `autoscaling.knative.dev` namespaces, and they will be rejected. + All system annotations in v1 now have a corresponding field in v2 WorkerPoolRevisionTemplate. + + This field follows Kubernetes annotations' namespacing, limits, and rules. + - name: 'vpcAccess' + type: NestedObject + description: |- + VPC Access configuration to use for this Revision. For more information, visit https://cloud.google.com/run/docs/configuring/connecting-vpc. + properties: + - name: 'egress' + type: Enum + description: |- + Traffic VPC egress settings. + default_from_api: true + enum_values: + - 'ALL_TRAFFIC' + - 'PRIVATE_RANGES_ONLY' + - name: 'networkInterfaces' + type: Array + description: |- + Direct VPC egress settings. Currently only single network interface is supported. + item_type: + type: NestedObject + properties: + - name: 'network' + type: String + description: |- + The VPC network that the Cloud Run resource will be able to send traffic to. At least one of network or subnetwork must be specified. If both + network and subnetwork are specified, the given VPC subnetwork must belong to the given VPC network. If network is not specified, it will be + looked up from the subnetwork. + default_from_api: true + - name: 'subnetwork' + type: String + description: |- + The VPC subnetwork that the Cloud Run resource will get IPs from. At least one of network or subnetwork must be specified. If both + network and subnetwork are specified, the given VPC subnetwork must belong to the given VPC network. If subnetwork is not specified, the + subnetwork with the same name with the network will be used. + default_from_api: true + - name: 'tags' + type: Array + description: |- + Network tags applied to this Cloud Run WorkerPool. + item_type: + type: String + - name: 'serviceAccount' + type: String + description: |- + Email address of the IAM service account associated with the revision of the WorkerPool. The service account represents the identity of the running revision, and determines what permissions the revision has. If not provided, the revision will use the project's default service account. + default_from_api: true + - name: 'containers' + type: Array + description: |- + Holds the containers that define the unit of execution for this WorkerPool. + item_type: + type: NestedObject + properties: + - name: 'name' + type: String + description: |- + Name of the container specified as a DNS_LABEL. + - name: 'image' + type: String + description: |- + URL of the Container image in Google Container Registry or Google Artifact Registry. More info: https://kubernetes.io/docs/concepts/containers/images + required: true + - name: 'command' + type: Array + description: |- + Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. The $(VAR_NAME) syntax can be escaped with a double $$, ie: $$(VAR_NAME). Escaped references will never be expanded, regardless of whether the variable exists or not. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell + item_type: + type: String + - name: 'args' + type: Array + description: |- + Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable references are not supported in Cloud Run. + item_type: + type: String + - name: 'env' + type: Array + description: |- + List of environment variables to set in the container. + is_set: true + item_type: + type: NestedObject + properties: + - name: 'name' + type: String + description: |- + Name of the environment variable. Must be a C_IDENTIFIER, and may not exceed 32768 characters. + required: true + - name: 'value' + type: String + # env is a set. + # The env.value has value "" in Terraform state, but it has value nil in Terraform plan, + # which causes the diffs for unchanged env. default_value: "" is to suppress the diffs. + default_value: "" + description: |- + Literal value of the environment variable. Defaults to "" and the maximum allowed length is 32768 characters. Variable references are not supported in Cloud Run. + # exactly_one_of: + # - template.0.containers.0.env.0.value + # - template.0.containers.0.env.0.valueSource + - name: 'valueSource' + type: NestedObject + description: |- + Source for the environment variable's value. + # exactly_one_of: + # - template.0.containers.0.env.0.value + # - template.0.containers.0.env.0.valueSource + properties: + - name: 'secretKeyRef' + type: NestedObject + description: |- + Selects a secret and a specific version from Cloud Secret Manager. + properties: + - name: 'secret' + type: String + description: |- + The name of the secret in Cloud Secret Manager. Format: {secretName} if the secret is in the same project. projects/{project}/secrets/{secretName} if the secret is in a different project. + required: true + - name: 'version' + type: String + description: |- + The Cloud Secret Manager secret version. Can be 'latest' for the latest value or an integer for a specific version. + - name: 'resources' + type: NestedObject + description: |- + Compute Resource requirements by this container. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#resources + default_from_api: true + properties: + - name: 'limits' + type: KeyValuePairs + description: |- + Only memory, CPU, and nvidia.com/gpu are supported. Use key `cpu` for CPU limit, `memory` for memory limit, `nvidia.com/gpu` for gpu limit. Note: The only supported values for CPU are '1', '2', '4', and '8'. Setting 4 CPU requires at least 2Gi of memory. The values of the map is string form of the 'quantity' k8s type: https://github.com/kubernetes/kubernetes/blob/master/staging/src/k8s.io/apimachinery/pkg/api/resource/quantity.go + default_from_api: true + - name: 'volumeMounts' + type: Array + description: |- + Volume to mount into the container's filesystem. + item_type: + type: NestedObject + properties: + - name: 'name' + type: String + description: |- + This must match the Name of a Volume. + required: true + - name: 'mountPath' + type: String + description: |- + Path within the container at which the volume should be mounted. Must not contain ':'. For Cloud SQL volumes, it can be left empty, or must otherwise be /cloudsql. All instances defined in the Volume will be available as /cloudsql/[instance]. For more information on Cloud SQL volumes, visit https://cloud.google.com/sql/docs/mysql/connect-run + required: true + - name: 'workingDir' + type: String + description: |- + Container's working directory. If not specified, the container runtime's default will be used, which might be configured in the container image. + - name: 'dependsOn' + type: Array + description: |- + Containers which should be started before this container. If specified the container will wait to start until all containers with the listed names are healthy. + item_type: + type: String + - name: 'volumes' + type: Array + description: |- + A list of Volumes to make available to containers. + item_type: + type: NestedObject + properties: + - name: 'name' + type: String + description: |- + Volume's name. + required: true + - name: 'secret' + type: NestedObject + description: |- + Secret represents a secret that should populate this volume. More info: https://kubernetes.io/docs/concepts/storage/volumes#secret + # exactly_one_of: + # - template.0.volumes.0.secret + # - template.0.volumes.0.cloudSqlInstance + # - template.0.volumes.0.emptyDir + properties: + - name: 'secret' + type: String + description: |- + The name of the secret in Cloud Secret Manager. Format: {secret} if the secret is in the same project. projects/{project}/secrets/{secret} if the secret is in a different project. + required: true + - name: 'defaultMode' + type: Integer + description: |- + Integer representation of mode bits to use on created files by default. Must be a value between 0000 and 0777 (octal), defaulting to 0444. Directories within the path are not affected by this setting. + - name: 'items' + type: Array + description: |- + If unspecified, the volume will expose a file whose name is the secret, relative to VolumeMount.mount_path. If specified, the key will be used as the version to fetch from Cloud Secret Manager and the path will be the name of the file exposed in the volume. When items are defined, they must specify a path and a version. + item_type: + type: NestedObject + properties: + - name: 'path' + type: String + description: |- + The relative path of the secret in the container. + required: true + - name: 'version' + type: String + description: |- + The Cloud Secret Manager secret version. Can be 'latest' for the latest value or an integer for a specific version + - name: 'mode' + type: Integer + description: |- + Integer octal mode bits to use on this file, must be a value between 01 and 0777 (octal). If 0 or not set, the Volume's default mode will be used. + - name: 'cloudSqlInstance' + type: NestedObject + description: |- + For Cloud SQL volumes, contains the specific instances that should be mounted. Visit https://cloud.google.com/sql/docs/mysql/connect-run for more information on how to connect Cloud SQL and Cloud Run. + # exactly_one_of: + # - template.0.volumes.0.secret + # - template.0.volumes.0.cloudSqlInstance + # - template.0.volumes.0.emptyDir + # - template.0.volumes.0.gcs + properties: + - name: 'instances' + type: Array + description: |- + The Cloud SQL instance connection names, as can be found in https://console.cloud.google.com/sql/instances. Visit https://cloud.google.com/sql/docs/mysql/connect-run for more information on how to connect Cloud SQL and Cloud Run. Format: {project}:{location}:{instance} + is_set: true + item_type: + type: String + - name: 'emptyDir' + type: NestedObject + description: |- + Ephemeral storage used as a shared volume. + # exactly_one_of: + # - template.0.volumes.0.secret + # - template.0.volumes.0.cloudSqlInstance + # - template.0.volumes.0.emptyDir + # - template.0.volumes.0.gcs + properties: + - name: 'medium' + type: Enum + description: |- + The different types of medium supported for EmptyDir. + default_value: "MEMORY" + enum_values: + - 'MEMORY' + - name: 'sizeLimit' + type: String + description: |- + Limit on the storage usable by this EmptyDir volume. The size limit is also applicable for memory medium. The maximum usage on memory medium EmptyDir would be the minimum value between the SizeLimit specified here and the sum of memory limits of all containers in a pod. This field's values are of the 'Quantity' k8s type: https://kubernetes.io/docs/reference/kubernetes-api/common-definitions/quantity/. The default is nil which means that the limit is undefined. More info: https://kubernetes.io/docs/concepts/storage/volumes/#emptydir. + - name: 'gcs' + type: NestedObject + description: |- + Cloud Storage bucket mounted as a volume using GCSFuse. This feature is only supported in the gen2 execution environment. + # exactly_one_of: + # - template.0.volumes.0.secret + # - template.0.volumes.0.cloudSqlInstance + # - template.0.volumes.0.emptyDir + # - template.0.volumes.0.gcs + properties: + - name: 'bucket' + type: String + description: GCS Bucket name + required: true + - name: 'readOnly' + type: Boolean + description: If true, mount the GCS bucket as read-only + required: false + - name: 'mountOptions' + min_version: 'beta' + type: Array + description: | + A list of flags to pass to the gcsfuse command for configuring this volume. + Flags should be passed without leading dashes. + item_type: + type: String + - name: 'nfs' + type: NestedObject + description: Represents an NFS mount. + properties: + - name: 'server' + type: String + description: Hostname or IP address of the NFS server + required: true + - name: 'path' + type: String + description: Path that is exported by the NFS server. + required: true + - name: 'readOnly' + type: Boolean + description: If true, mount the NFS volume as read only + required: false + - name: 'encryptionKey' + type: String + description: |- + A reference to a customer managed encryption key (CMEK) to use to encrypt this container image. For more information, go to https://cloud.google.com/run/docs/securing/using-cmek + - name: 'encryptionKeyRevocationAction' + type: Enum + description: |- + The action to take if the encryption key is revoked. + enum_values: + - 'PREVENT_NEW' + - 'SHUTDOWN' + - name: 'encryptionKeyShutdownDuration' + type: String + description: |- + If encryptionKeyRevocationAction is SHUTDOWN, the duration before shutting down all instances. The minimum increment is 1 hour. + + A duration in seconds with up to nine fractional digits, ending with 's'. Example: "3.5s". + - name: 'nodeSelector' + type: NestedObject + description: Node Selector describes the hardware requirements of the resources. + properties: + - name: 'accelerator' + type: String + description: + The GPU to attach to an instance. See https://cloud.google.com/run/docs/configuring/services/gpu for configuring GPU. + required: true + - name: 'gpuZonalRedundancyDisabled' + type: Boolean + description: True if GPU zonal redundancy is disabled on this revision. + - name: 'instanceSplits' + type: Array + description: |- + Specifies how to distribute instances over a collection of Revisions belonging to the WorkerPool. If instance split is empty or not provided, defaults to 100% instances assigned to the latest Ready Revision. + default_from_api: true + item_type: + type: NestedObject + properties: + - name: 'type' + type: Enum + description: | + The allocation type for this instance split. + enum_values: + - 'INSTANCE_SPLIT_ALLOCATION_TYPE_LATEST' + - 'INSTANCE_SPLIT_ALLOCATION_TYPE_REVISION' + - name: 'revision' + type: String + description: | + Revision to which to assign this portion of instances, if split allocation is by revision. + - name: 'percent' + type: Integer + description: | + Specifies percent of the instance split to this Revision. This defaults to zero if unspecified. + default_from_api: true + - name: 'observedGeneration' + type: String + description: | + The generation of this WorkerPool currently serving traffic. See comments in reconciling for additional information on reconciliation process in Cloud Run. Please note that unlike v1, this is an int64 value. As with most Google APIs, its JSON representation will be a string instead of an integer. + output: true + - name: 'terminalCondition' + type: NestedObject + description: | + The Condition of this WorkerPool, containing its readiness status, and detailed error information in case it did not reach a serving state. See comments in reconciling for additional information on reconciliation process in Cloud Run. + output: true + properties: + - name: 'type' + type: String + description: |- + type is used to communicate the status of the reconciliation process. See also: https://github.com/knative/serving/blob/main/docs/spec/errors.md#error-conditions-and-reporting Types common to all resources include: * "Ready": True when the Resource is ready. + output: true + - name: 'state' + type: String + description: |- + State of the condition. + output: true + - name: 'message' + type: String + description: |- + Human readable message indicating details about the current status. + output: true + - name: 'lastTransitionTime' + type: Time + description: |- + Last time the condition transitioned from one status to another. + output: true + - name: 'severity' + type: String + description: |- + How to interpret failures of this condition, one of Error, Warning, Info + output: true + - name: 'reason' + type: String + description: |- + A common (workerPool-level) reason for this condition. + output: true + - name: 'revisionReason' + type: String + description: |- + A reason for the revision condition. + output: true + - name: 'executionReason' + type: String + description: |- + A reason for the execution condition. + output: true + - name: 'conditions' + type: Array + description: |- + The Conditions of all other associated sub-resources. They contain additional diagnostics information in case the WorkerPool does not reach its Serving state. See comments in reconciling for additional information on reconciliation process in Cloud Run. + output: true + item_type: + type: NestedObject + properties: + - name: 'type' + type: String + description: |- + type is used to communicate the status of the reconciliation process. See also: https://github.com/knative/serving/blob/main/docs/spec/errors.md#error-conditions-and-reporting Types common to all resources include: * "Ready": True when the Resource is ready. + output: true + - name: 'state' + type: String + description: |- + State of the condition. + output: true + - name: 'message' + type: String + description: |- + Human readable message indicating details about the current status. + output: true + - name: 'lastTransitionTime' + type: Time + description: |- + Last time the condition transitioned from one status to another. + + A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z". + output: true + - name: 'severity' + type: String + description: |- + How to interpret failures of this condition, one of Error, Warning, Info + output: true + - name: 'reason' + type: String + description: |- + A common (workerPool-level) reason for this condition. + output: true + - name: 'revisionReason' + type: String + description: |- + A reason for the revision condition. + output: true + - name: 'executionReason' + type: String + description: |- + A reason for the execution condition. + output: true + - name: 'latestReadyRevision' + type: String + description: | + Name of the latest revision that is serving traffic. See comments in reconciling for additional information on reconciliation process in Cloud Run. + output: true + - name: 'latestCreatedRevision' + type: String + description: | + Name of the last created revision. See comments in reconciling for additional information on reconciliation process in Cloud Run. + output: true + - name: 'instanceSplitStatuses' + type: Array + description: |- + Detailed status information for corresponding instance splits. See comments in reconciling for additional information on reconciliation process in Cloud Run. + output: true + item_type: + type: NestedObject + properties: + - name: 'type' + type: String + description: |- + The allocation type for this instance split. + output: true + - name: 'revision' + type: String + description: |- + Revision to which this instance split is assigned. + output: true + - name: 'percent' + type: Integer + description: |- + Specifies percent of the instance split to this Revision. + output: true + - name: 'reconciling' + type: Boolean + description: | + Returns true if the WorkerPool is currently being acted upon by the system to bring it into the desired state. + + When a new WorkerPool is created, or an existing one is updated, Cloud Run will asynchronously perform all necessary steps to bring the WorkerPool to the desired serving state. This process is called reconciliation. While reconciliation is in process, observedGeneration, latest_ready_revison, trafficStatuses, and uri will have transient values that might mismatch the intended state: Once reconciliation is over (and this field is false), there are two possible outcomes: reconciliation succeeded and the serving state matches the WorkerPool, or there was an error, and reconciliation failed. This state can be found in terminalCondition.state. + + If reconciliation succeeded, the following fields will match: traffic and trafficStatuses, observedGeneration and generation, latestReadyRevision and latestCreatedRevision. + + If reconciliation failed, trafficStatuses, observedGeneration, and latestReadyRevision will have the state of the last serving revision, or empty for newly created WorkerPools. Additional information on the failure can be found in terminalCondition and conditions. + output: true + - name: 'etag' + type: String + description: | + A system-generated fingerprint for this version of the resource. May be used to detect modification conflict during updates. + output: true diff --git a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_basic.tf.tmpl b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_basic.tf.tmpl new file mode 100644 index 000000000000..18183202d8ba --- /dev/null +++ b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_basic.tf.tmpl @@ -0,0 +1,12 @@ +resource "google_cloud_run_v2_worker_pool" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "cloud_run_worker_pool_name"}}" + location = "us-central1" + deletion_protection = false + launch_stage = "ALPHA" + + template { + containers { + image = "us-docker.pkg.dev/cloudrun/container/worker-pool" + } + } +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_custom_audiences.tf.tmpl b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_custom_audiences.tf.tmpl new file mode 100644 index 000000000000..6c27c3837755 --- /dev/null +++ b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_custom_audiences.tf.tmpl @@ -0,0 +1,13 @@ +resource "google_cloud_run_v2_worker_pool" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "cloud_run_worker_pool_name"}}" + location = "us-central1" + deletion_protection = false + launch_stage = "ALPHA" + + custom_audiences = ["aud1"] + template { + containers { + image = "us-docker.pkg.dev/cloudrun/container/worker-pool" + } + } +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_directvpc.tf.tmpl b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_directvpc.tf.tmpl new file mode 100644 index 000000000000..722108cda5b0 --- /dev/null +++ b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_directvpc.tf.tmpl @@ -0,0 +1,19 @@ +resource "google_cloud_run_v2_worker_pool" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "cloud_run_worker_pool_name"}}" + location = "us-central1" + deletion_protection = false + launch_stage = "ALPHA" + + template { + containers { + image = "us-docker.pkg.dev/cloudrun/container/worker-pool" + } + vpc_access{ + network_interfaces { + network = "default" + subnetwork = "default" + tags = ["tag1", "tag2", "tag3"] + } + } + } +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_gpu.tf.tmpl b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_gpu.tf.tmpl new file mode 100644 index 000000000000..6e6031c10048 --- /dev/null +++ b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_gpu.tf.tmpl @@ -0,0 +1,22 @@ +resource "google_cloud_run_v2_worker_pool" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "cloud_run_worker_pool_name"}}" + location = "us-central1" + deletion_protection = false + launch_stage = "ALPHA" + + template { + containers { + image = "us-docker.pkg.dev/cloudrun/container/worker-pool" + resources { + limits = { + "cpu" = "4" + "memory" = "16Gi" + "nvidia.com/gpu" = "1" + } + } + } + node_selector { + accelerator = "nvidia-l4" + } + } +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_limits.tf.tmpl b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_limits.tf.tmpl new file mode 100644 index 000000000000..0c23cab900ea --- /dev/null +++ b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_limits.tf.tmpl @@ -0,0 +1,18 @@ +resource "google_cloud_run_v2_worker_pool" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "cloud_run_worker_pool_name"}}" + location = "us-central1" + deletion_protection = false + launch_stage = "ALPHA" + + template { + containers { + image = "us-docker.pkg.dev/cloudrun/container/worker-pool" + resources { + limits = { + cpu = "2" + memory = "1024Mi" + } + } + } + } +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_mount_gcs.tf.tmpl b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_mount_gcs.tf.tmpl new file mode 100644 index 000000000000..626028614c1a --- /dev/null +++ b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_mount_gcs.tf.tmpl @@ -0,0 +1,31 @@ +resource "google_cloud_run_v2_worker_pool" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "cloud_run_worker_pool_name"}}" + + location = "us-central1" + deletion_protection = false + launch_stage = "ALPHA" + + template { + containers { + image = "us-docker.pkg.dev/cloudrun/container/worker-pool" + volume_mounts { + name = "bucket" + mount_path = "/var/www" + } + } + + volumes { + name = "bucket" + gcs { + bucket = google_storage_bucket.{{$.PrimaryResourceId}}.name + read_only = false + } + } + } +} + +resource "google_storage_bucket" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "cloud_run_worker_pool_name"}}" + location = "US" + uniform_bucket_level_access = true +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_mount_nfs.tf.tmpl b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_mount_nfs.tf.tmpl new file mode 100644 index 000000000000..18642072cd13 --- /dev/null +++ b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_mount_nfs.tf.tmpl @@ -0,0 +1,48 @@ +resource "google_cloud_run_v2_worker_pool" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "cloud_run_worker_pool_name"}}" + + location = "us-central1" + deletion_protection = false + launch_stage = "ALPHA" + + template { + containers { + image = "us-docker.pkg.dev/cloudrun/container/worker-pool:latest" + volume_mounts { + name = "nfs" + mount_path = "/mnt/nfs/filestore" + } + } + vpc_access { + network_interfaces { + network = "default" + subnetwork = "default" + } + } + + volumes { + name = "nfs" + nfs { + server = google_filestore_instance.default.networks[0].ip_addresses[0] + path = "/share1" + read_only = false + } + } + } +} + +resource "google_filestore_instance" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "cloud_run_worker_pool_name"}}" + location = "us-central1-b" + tier = "BASIC_HDD" + + file_shares { + capacity_gb = 1024 + name = "share1" + } + + networks { + network = "default" + modes = ["MODE_IPV4"] + } +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_multicontainer.tf.tmpl b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_multicontainer.tf.tmpl new file mode 100644 index 000000000000..51330b8353ae --- /dev/null +++ b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_multicontainer.tf.tmpl @@ -0,0 +1,33 @@ +resource "google_cloud_run_v2_worker_pool" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "cloud_run_worker_pool_name"}}" + location = "us-central1" + deletion_protection = false + launch_stage = "ALPHA" + + template { + containers { + name = "hello-1" + image = "us-docker.pkg.dev/cloudrun/container/worker-pool" + depends_on = ["hello-2"] + volume_mounts { + name = "empty-dir-volume" + mount_path = "/mnt" + } + } + containers { + name = "hello-2" + image = "us-docker.pkg.dev/cloudrun/container/worker-pool" + env { + name = "PORT" + value = "8081" + } + } + volumes { + name = "empty-dir-volume" + empty_dir { + medium = "MEMORY" + size_limit = "256Mi" + } + } + } +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_secret.tf.tmpl b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_secret.tf.tmpl new file mode 100644 index 000000000000..0c3e925f0631 --- /dev/null +++ b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_secret.tf.tmpl @@ -0,0 +1,51 @@ +resource "google_cloud_run_v2_worker_pool" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "cloud_run_worker_pool_name"}}" + location = "us-central1" + deletion_protection = false + launch_stage = "ALPHA" + + template { + volumes { + name = "a-volume" + secret { + secret = google_secret_manager_secret.secret.secret_id + default_mode = 292 # 0444 + items { + version = "1" + path = "my-secret" + mode = 0444 + } + } + } + containers { + image = "us-docker.pkg.dev/cloudrun/container/worker-pool" + volume_mounts { + name = "a-volume" + mount_path = "/secrets" + } + } + } + depends_on = [google_secret_manager_secret_version.secret-version-data] +} + +data "google_project" "project" { +} + +resource "google_secret_manager_secret" "secret" { + secret_id = "{{index $.Vars "secret_id"}}" + replication { + auto {} + } +} + +resource "google_secret_manager_secret_version" "secret-version-data" { + secret = google_secret_manager_secret.secret.name + secret_data = "secret-data" +} + +resource "google_secret_manager_secret_iam_member" "secret-access" { + secret_id = google_secret_manager_secret.secret.id + role = "roles/secretmanager.secretAccessor" + member = "serviceAccount:${data.google_project.project.number}-compute@developer.gserviceaccount.com" + depends_on = [google_secret_manager_secret.secret] +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_sql.tf.tmpl b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_sql.tf.tmpl new file mode 100644 index 000000000000..ffdd851e93d4 --- /dev/null +++ b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_sql.tf.tmpl @@ -0,0 +1,77 @@ +resource "google_cloud_run_v2_worker_pool" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "cloud_run_worker_pool_name"}}" + location = "us-central1" + deletion_protection = false + launch_stage = "ALPHA" + + template { + + volumes { + name = "cloudsql" + cloud_sql_instance { + instances = [google_sql_database_instance.instance.connection_name] + } + } + + containers { + image = "us-docker.pkg.dev/cloudrun/container/worker-pool" + + env { + name = "FOO" + value = "bar" + } + env { + name = "SECRET_ENV_VAR" + value_source { + secret_key_ref { + secret = google_secret_manager_secret.secret.secret_id + version = "1" + } + } + } + volume_mounts { + name = "cloudsql" + mount_path = "/cloudsql" + } + } + } + + instance_splits { + type = "INSTANCE_SPLIT_ALLOCATION_TYPE_LATEST" + percent = 100 + } + depends_on = [google_secret_manager_secret_version.secret-version-data] +} + +data "google_project" "project" { +} + +resource "google_secret_manager_secret" "secret" { + secret_id = "{{index $.Vars "secret_id"}}" + replication { + auto {} + } +} + +resource "google_secret_manager_secret_version" "secret-version-data" { + secret = google_secret_manager_secret.secret.name + secret_data = "secret-data" +} + +resource "google_secret_manager_secret_iam_member" "secret-access" { + secret_id = google_secret_manager_secret.secret.id + role = "roles/secretmanager.secretAccessor" + member = "serviceAccount:${data.google_project.project.number}-compute@developer.gserviceaccount.com" + depends_on = [google_secret_manager_secret.secret] +} + +resource "google_sql_database_instance" "instance" { + name = "{{index $.Vars "cloud_run_sql_name"}}" + region = "us-central1" + database_version = "MYSQL_5_7" + settings { + tier = "db-f1-micro" + } + + deletion_protection = {{index $.Vars "deletion_protection"}} +} \ No newline at end of file diff --git a/mmv1/templates/terraform/pre_delete/cloudrunv2_worker_pool_deletion_policy.go.tmpl b/mmv1/templates/terraform/pre_delete/cloudrunv2_worker_pool_deletion_policy.go.tmpl new file mode 100644 index 000000000000..c7ad64e13099 --- /dev/null +++ b/mmv1/templates/terraform/pre_delete/cloudrunv2_worker_pool_deletion_policy.go.tmpl @@ -0,0 +1,3 @@ +if d.Get("deletion_protection").(bool) { + return fmt.Errorf("cannot destroy WorkerPool without setting deletion_protection=false and running `terraform apply`") +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index 5e601cd64c54..9ab4ebf8a890 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -66,6 +66,7 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_cloud_run_service": cloudrun.DataSourceGoogleCloudRunService(), "google_cloud_run_v2_job": cloudrunv2.DataSourceGoogleCloudRunV2Job(), "google_cloud_run_v2_service": cloudrunv2.DataSourceGoogleCloudRunV2Service(), + "google_cloud_run_v2_worker_pool": cloudrunv2.DataSourceGoogleCloudRunV2WorkerPool(), "google_composer_environment": composer.DataSourceGoogleComposerEnvironment(), "google_composer_user_workloads_config_map": composer.DataSourceGoogleComposerUserWorkloadsConfigMap(), "google_composer_user_workloads_secret": composer.DataSourceGoogleComposerUserWorkloadsSecret(), diff --git a/mmv1/third_party/terraform/services/cloudrunv2/data_source_google_cloud_run_v2_worker_pool.go b/mmv1/third_party/terraform/services/cloudrunv2/data_source_google_cloud_run_v2_worker_pool.go new file mode 100644 index 000000000000..3bd61fb84bec --- /dev/null +++ b/mmv1/third_party/terraform/services/cloudrunv2/data_source_google_cloud_run_v2_worker_pool.go @@ -0,0 +1,52 @@ +package cloudrunv2 + +import ( + "fmt" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func DataSourceGoogleCloudRunV2WorkerPool() *schema.Resource { + dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceCloudRunV2WorkerPool().Schema) + tpgresource.AddRequiredFieldsToSchema(dsSchema, "name") + tpgresource.AddOptionalFieldsToSchema(dsSchema, "location") + + // Set 'Optional' schema elements + tpgresource.AddOptionalFieldsToSchema(dsSchema, "project") + + return &schema.Resource{ + Read: dataSourceGoogleCloudRunV2WorkerPoolRead, + Schema: dsSchema, + } +} + +func dataSourceGoogleCloudRunV2WorkerPoolRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + + id, err := tpgresource.ReplaceVars(d, config, "projects/{{project}}/locations/{{location}}/workerPools/{{name}}") + if err != nil { + return fmt.Errorf("Error constructing id: %s", err) + } + + d.SetId(id) + + err = resourceCloudRunV2WorkerPoolRead(d, meta) + if err != nil { + return err + } + + if err := tpgresource.SetDataSourceLabels(d); err != nil { + return err + } + + if err := tpgresource.SetDataSourceAnnotations(d); err != nil { + return err + } + + if d.Id() == "" { + return fmt.Errorf("%s not found", id) + } + return nil +} diff --git a/mmv1/third_party/terraform/services/cloudrunv2/data_source_google_cloud_run_v2_worker_pool_test.go b/mmv1/third_party/terraform/services/cloudrunv2/data_source_google_cloud_run_v2_worker_pool_test.go new file mode 100644 index 000000000000..a8f0faad67d7 --- /dev/null +++ b/mmv1/third_party/terraform/services/cloudrunv2/data_source_google_cloud_run_v2_worker_pool_test.go @@ -0,0 +1,65 @@ +package cloudrunv2_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccDataSourceGoogleCloudRunV2WorkerPool_basic(t *testing.T) { + t.Parallel() + + project := envvar.GetTestProjectFromEnv() + + name := fmt.Sprintf("tf-test-cloud-run-v2-wp-%d", acctest.RandInt(t)) + location := "us-central1" + id := fmt.Sprintf("projects/%s/locations/%s/workerPools/%s", project, location, name) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceGoogleCloudRunV2WorkerPool_basic(name, location), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("data.google_cloud_run_v2_worker_pool.hello", "id", id), + resource.TestCheckResourceAttr("data.google_cloud_run_v2_worker_pool.hello", "name", name), + resource.TestCheckResourceAttr("data.google_cloud_run_v2_worker_pool.hello", "location", location), + ), + }, + }, + }) +} + +func testAccDataSourceGoogleCloudRunV2WorkerPool_basic(name, location string) string { + return fmt.Sprintf(` +resource "google_cloud_run_v2_worker_pool" "hello" { + name = "%s" + location = "%s" + deletion_protection = false + launch_stage = "ALPHA" + + template { + containers { + image = "us-docker.pkg.dev/cloudrun/container/worker-pool" + } + } + + labels = { + "key" = "value" + } + + annotations = { + "key" = "value" + } +} + +data "google_cloud_run_v2_worker_pool" "hello" { + name = google_cloud_run_v2_worker_pool.hello.name + location = google_cloud_run_v2_worker_pool.hello.location +} +`, name, location) +} diff --git a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_worker_pool_test.go.tmpl b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_worker_pool_test.go.tmpl new file mode 100644 index 000000000000..1f57cfc70c5c --- /dev/null +++ b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_worker_pool_test.go.tmpl @@ -0,0 +1,618 @@ +package cloudrunv2_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolFullUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckCloudRunV2WorkerPoolDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolFull(context), + }, + { + ResourceName: "google_cloud_run_v2_worker_pool.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "location", "annotations", "labels", "terraform_labels"}, + }, + { + Config: testAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolFullUpdate(context), + }, + { + ResourceName: "google_cloud_run_v2_worker_pool.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "location", "annotations", "labels", "terraform_labels", "deletion_protection"}, + }, + }, + }) +} + + +func testAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolFull(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_cloud_run_v2_worker_pool" "default" { + name = "tf-test-cloudrun-worker-pool%{random_suffix}" + description = "description creating" + location = "us-central1" + launch_stage = "ALPHA" + annotations = { + generated-by = "magic-modules" + } + + labels = { + label-1 = "value-1" + } + client = "client-1" + client_version = "client-version-1" + template { + labels = { + label-1 = "value-1" + } + service_account = google_service_account.service_account.email + annotations = { + generated-by = "magic-modules" + } + containers { + name = "container-1" + image = "us-docker.pkg.dev/cloudrun/container/worker-pool" + env { + name = "SOURCE" + value = "remote" + } + env { + name = "TARGET" + value = "home" + } + resources { + limits = { + cpu = "4" + memory = "2Gi" + } + } + } + } +} + +resource "google_service_account" "service_account" { + account_id = "tf-test-my-account%{random_suffix}" + display_name = "Test Service Account" +} +`, context) +} +func testAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolFullUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_cloud_run_v2_worker_pool" "default" { + name = "tf-test-cloudrun-worker-pool%{random_suffix}" + description = "description updating" + location = "us-central1" + deletion_protection = false + launch_stage = "ALPHA" + + annotations = { + generated-by = "magic-modules-files" + } + + binary_authorization { + use_default = true + breakglass_justification = "Some justification" + } + labels = { + label-1 = "value-update" + } + client = "client-update" + client_version = "client-version-update" + + template { + revision = "tf-test-cloudrun-worker-pool%{random_suffix}-001" + labels = { + label-1 = "value-update" + } + service_account = google_service_account.service_account.email + annotations = { + generated-by = "magic-modules" + } + containers { + name = "container-update" + image = "us-docker.pkg.dev/cloudrun/container/worker-pool" + args = ["arg1", "arg2"] + command = ["/bin/sh", "-c"] + env { + name = "SOURCE_UPDATE" + value = "remote-update" + } + env { + name = "TARGET_UPDATE" + value = "home-update" + } + resources { + limits = { + cpu = "2" + memory = "8Gi" + } + } + working_dir = "/home" + } + } + instance_splits { + type = "INSTANCE_SPLIT_ALLOCATION_TYPE_REVISION" + revision = "tf-test-cloudrun-worker-pool%{random_suffix}-001" + } +} + +resource "google_service_account" "service_account" { + account_id = "tf-test-my-account%{random_suffix}" + display_name = "Test Service Account" +} +`, context) +} +func TestAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolGcsVolume(t *testing.T) { + acctest.SkipIfVcr(t) + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckCloudRunV2WorkerPoolDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolGcsVolume(context), + }, + { + ResourceName: "google_cloud_run_v2_worker_pool.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "location", "annotations", "labels", "terraform_labels", "launch_stage", "deletion_protection"}, + }, + }, + }) +} + +func testAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolGcsVolume(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_cloud_run_v2_worker_pool" "default" { + name = "tf-test-cloudrun-worker-pool%{random_suffix}" + description = "description creating" + location = "us-central1" + deletion_protection = false + launch_stage = "ALPHA" + + + annotations = { + generated-by = "magic-modules" + } + + labels = { + label-1 = "value-1" + } + client = "client-1" + client_version = "client-version-1" + template { + labels = { + label-1 = "value-1" + } + service_account = google_service_account.service_account.email + annotations = { + generated-by = "magic-modules" + } + volumes { + name = "gcs" + gcs { + bucket = "gcp-public-data-landsat" + read_only = true +{{ if ne $.TargetVersionName `ga` -}} + mount_options = ["log-severity=info"] +{{ end }} + } + } + containers { + name = "container-1" + image = "us-docker.pkg.dev/cloudrun/container/worker-pool" + env { + name = "SOURCE" + value = "remote" + } + env { + name = "TARGET" + value = "home" + } + volume_mounts { + name = "gcs" + mount_path = "/mnt/landsat" + } + resources { + limits = { + cpu = "4" + memory = "2Gi" + } + } + } + } +} + +resource "google_service_account" "service_account" { + account_id = "tf-test-my-account%{random_suffix}" + display_name = "Test Service Account" +} +`, context) +} + +func TestAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolWithDirectVPCUpdate(t *testing.T) { + t.Parallel() + + workerPoolName := fmt.Sprintf("tf-test-cloudrun-worker-pool%s", acctest.RandString(t, 10)) + context := map[string]interface{}{ + "service_name": workerPoolName, + "project": envvar.GetTestProjectFromEnv(), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckCloudRunV2WorkerPoolDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccCloudRunV2WorkerPool_cloudRunWorkerPoolWithDirectVPC(context), + }, + { + ResourceName: "google_cloud_run_v2_worker_pool.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "location", "deletion_protection"}, + }, + { + Config: testAccCloudRunV2WorkerPool_cloudRunWorkerPoolWithDirectVPCAndNamedBinAuthPolicyUpdate(context), + }, + { + ResourceName: "google_cloud_run_v2_worker_pool.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "location", "deletion_protection"}, + }, + }, + }) +} + +func testAccCloudRunV2WorkerPool_cloudRunWorkerPoolWithDirectVPC(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_cloud_run_v2_worker_pool" "default" { + name = "%{service_name}" + location = "us-central1" + deletion_protection = false + launch_stage = "ALPHA" + + template { + containers { + image = "us-docker.pkg.dev/cloudrun/container/worker-pool" + } + vpc_access { + egress = "ALL_TRAFFIC" + network_interfaces { + network = "default" + } + } + } +} +`, context) +} + +func testAccCloudRunV2WorkerPool_cloudRunWorkerPoolWithDirectVPCAndNamedBinAuthPolicyUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_cloud_run_v2_worker_pool" "default" { + name = "%{service_name}" + location = "us-central1" + deletion_protection = false + launch_stage = "ALPHA" + + binary_authorization { + policy = "projects/%{project}/platforms/cloudRun/policies/my-policy" + breakglass_justification = "Some justification" + } + template { + containers { + image = "us-docker.pkg.dev/cloudrun/container/worker-pool" + } + vpc_access { + network_interfaces { + subnetwork = "default" + tags = ["foo", "bar"] + } + } + } +} +`, context) +} + +func TestAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolCustomAudienceUpdate(t *testing.T) { + t.Parallel() + + workerPoolName := fmt.Sprintf("tf-test-cloudrun-worker-pool%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckCloudRunV2WorkerPoolDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccCloudRunV2WorkerPool_cloudRunWorkerPoolUpdateWithCustomAudience(workerPoolName, "test"), + }, + { + ResourceName: "google_cloud_run_v2_worker_pool.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "location", "annotations", "launch_stage", "deletion_protection"}, + }, + { + Config: testAccCloudRunV2WorkerPool_cloudRunWorkerPoolUpdateWithCustomAudience(workerPoolName, "test_update"), + }, + { + ResourceName: "google_cloud_run_v2_worker_pool.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "location", "annotations", "launch_stage", "deletion_protection"}, + }, + { + Config: testAccCloudRunV2WorkerPool_cloudRunWorkerPoolUpdateWithoutCustomAudience(workerPoolName), + }, + { + ResourceName: "google_cloud_run_v2_worker_pool.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "location", "annotations", "launch_stage", "deletion_protection"}, + }, + }, + }) +} + +func testAccCloudRunV2WorkerPool_cloudRunWorkerPoolUpdateWithoutCustomAudience(workerPoolName string) string { + return fmt.Sprintf(` +resource "google_cloud_run_v2_worker_pool" "default" { + name = "%s" + location = "us-central1" + deletion_protection = false + launch_stage = "ALPHA" + + template { + containers { + image = "us-docker.pkg.dev/cloudrun/container/worker-pool" + } + } +} +`, workerPoolName) +} + +func testAccCloudRunV2WorkerPool_cloudRunWorkerPoolUpdateWithCustomAudience(workerPoolName string, customAudience string) string { + return fmt.Sprintf(` +resource "google_cloud_run_v2_worker_pool" "default" { + name = "%s" + location = "us-central1" + deletion_protection = false + custom_audiences = ["%s"] + launch_stage = "ALPHA" + + template { + containers { + image = "us-docker.pkg.dev/cloudrun/container/worker-pool" + } + } +} +`, workerPoolName, customAudience) +} + + +func TestAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolAttributionLabel(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "add_attribution": "true", + "attribution_strategy": "CREATION_ONLY", + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckCloudRunV2WorkerPoolDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolWithAttributionLabel(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_cloud_run_v2_worker_pool.default", "labels.%", "1"), + resource.TestCheckResourceAttr("google_cloud_run_v2_worker_pool.default", "labels.user_label", "foo"), + + resource.TestCheckResourceAttr("google_cloud_run_v2_worker_pool.default", "terraform_labels.%", "2"), + resource.TestCheckResourceAttr("google_cloud_run_v2_worker_pool.default", "terraform_labels.user_label", "foo"), + resource.TestCheckResourceAttr("google_cloud_run_v2_worker_pool.default", "terraform_labels.goog-terraform-provisioned", "true"), + + resource.TestCheckResourceAttr("google_cloud_run_v2_worker_pool.default", "effective_labels.%", "2"), + resource.TestCheckResourceAttr("google_cloud_run_v2_worker_pool.default", "effective_labels.user_label", "foo"), + resource.TestCheckResourceAttr("google_cloud_run_v2_worker_pool.default", "effective_labels.goog-terraform-provisioned", "true"), + ), + }, + { + Config: testAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolWithAttributionLabelUpdate(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_cloud_run_v2_worker_pool.default", "labels.%", "1"), + resource.TestCheckResourceAttr("google_cloud_run_v2_worker_pool.default", "labels.user_label", "bar"), + + resource.TestCheckResourceAttr("google_cloud_run_v2_worker_pool.default", "terraform_labels.%", "2"), + resource.TestCheckResourceAttr("google_cloud_run_v2_worker_pool.default", "terraform_labels.user_label", "bar"), + resource.TestCheckResourceAttr("google_cloud_run_v2_worker_pool.default", "terraform_labels.goog-terraform-provisioned", "true"), + + resource.TestCheckResourceAttr("google_cloud_run_v2_worker_pool.default", "effective_labels.%", "2"), + resource.TestCheckResourceAttr("google_cloud_run_v2_worker_pool.default", "effective_labels.user_label", "bar"), + resource.TestCheckResourceAttr("google_cloud_run_v2_worker_pool.default", "effective_labels.goog-terraform-provisioned", "true"), + ), + }, + }, + }) +} + +func testAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolWithAttributionLabel(context map[string]interface{}) string { + return acctest.Nprintf(` +provider "google" { + add_terraform_attribution_label = %{add_attribution} + terraform_attribution_label_addition_strategy = "%{attribution_strategy}" +} + +resource "google_cloud_run_v2_worker_pool" "default" { + name = "tf-test-cloudrun-worker-pool%{random_suffix}" + location = "us-central1" + deletion_protection = false + launch_stage = "ALPHA" + + + labels = { + user_label = "foo" + } + + template { + containers { + image = "us-docker.pkg.dev/cloudrun/container/worker-pool" + } + } +} +`, context) +} + +func testAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolWithAttributionLabelUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +provider "google" { + add_terraform_attribution_label = %{add_attribution} + terraform_attribution_label_addition_strategy = "%{attribution_strategy}" +} + +resource "google_cloud_run_v2_worker_pool" "default" { + name = "tf-test-cloudrun-worker-pool%{random_suffix}" + location = "us-central1" + deletion_protection = false + launch_stage = "ALPHA" + + + labels = { + user_label = "bar" + } + + template { + containers { + image = "us-docker.pkg.dev/cloudrun/container/worker-pool" + } + } +} +`, context) +} + +func TestAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolWithManualScaling(t *testing.T) { + t.Parallel() + context := map[string]interface{} { + "random_suffix" : acctest.RandString(t, 10), + } + acctest.VcrTest(t, resource.TestCase { + PreCheck: func() { acctest.AccTestPreCheck(t)}, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckCloudRunV2WorkerPoolDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolWithManualScaling(context), + }, + { + ResourceName: "google_cloud_run_v2_worker_pool.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "location", "annotations", "labels", "terraform_labels", "launch_stage", "deletion_protection"}, + }, + { + Config: testAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolUpdateWithManualScaling(context), + }, + { + ResourceName: "google_cloud_run_v2_worker_pool.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "location", "annotations", "labels", "terraform_labels", "launch_stage", "deletion_protection"}, + }, + + }, + }) +} + +func testAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolWithManualScaling(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_cloud_run_v2_worker_pool" "default" { + name = "tf-test-cloudrun-worker-pool%{random_suffix}" + description = "description creating" + location = "us-central1" + deletion_protection = false + launch_stage = "ALPHA" + annotations = { + generated-by = "magic-modules" + } + scaling { + scaling_mode = "MANUAL" + manual_instance_count = 5 + } + + labels = { + label-1 = "value-1" + } + client = "client-1" + client_version = "client-version-1" + template { + containers { + name = "container-1" + image = "us-docker.pkg.dev/cloudrun/container/worker-pool" + } + } +} + +`, context) +} +func testAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolUpdateWithManualScaling(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_cloud_run_v2_worker_pool" "default" { + name = "tf-test-cloudrun-worker-pool%{random_suffix}" + description = "description creating" + location = "us-central1" + deletion_protection = false + annotations = { + generated-by = "magic-modules" + } + + labels = { + label-1 = "value-1" + } + client = "client-1" + client_version = "client-version-1" + launch_stage = "ALPHA" + scaling { + scaling_mode = "MANUAL" + manual_instance_count = 2 + } + template { + containers { + name = "container-1" + image = "us-docker.pkg.dev/cloudrun/container/worker-pool" + } + } +} + +`, context) +} diff --git a/mmv1/third_party/terraform/website/docs/d/cloud_run_v2_worker_pool.html.markdown b/mmv1/third_party/terraform/website/docs/d/cloud_run_v2_worker_pool.html.markdown new file mode 100644 index 000000000000..166d4ff9ad1a --- /dev/null +++ b/mmv1/third_party/terraform/website/docs/d/cloud_run_v2_worker_pool.html.markdown @@ -0,0 +1,37 @@ +--- +subcategory: "Cloud Run (v2 API)" +description: |- + Get information about a Google Cloud Run v2 Worker Pool. +--- + +# google_cloud_run_v2_worker_pool + +Get information about a Google Cloud Run v2 Worker Pool. For more information see +the [official documentation](https://cloud.google.com/run/docs/) +and [API](https://cloud.google.com/run/docs/apis). + +## Example Usage + +```hcl +data "google_cloud_run_v2_worker_pool" "my_worker_pool" { + name = "my-worker-pool" + location = "us-central1" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `name` - (Required) The name of the Cloud Run v2 Worker Pool. + +* `location` - (Required) The location of the instance. eg us-central1 + +- - - + +* `project` - (Optional) The project in which the resource belongs. If it + is not provided, the provider project is used. + +## Attributes Reference + +See [google_cloud_run_v2_worker_pool](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/cloud_run_v2_worker_pool#argument-reference) resource for details of the available attributes. From 1313b424c858899232efe9cb55234568148deaa6 Mon Sep 17 00:00:00 2001 From: Ronson Xaviour <50081163+ronsonx@users.noreply.github.com> Date: Wed, 21 May 2025 03:29:38 +0530 Subject: [PATCH 205/884] docs: Updated argument descriptions in apigee_addons_config resource to match with corresponding REST API specs. (#7405) Co-authored-by: Ronson Xaviour Co-authored-by: Thomas Rodgers --- mmv1/products/apigee/AddonsConfig.yaml | 25 +++++++++++-------------- 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/mmv1/products/apigee/AddonsConfig.yaml b/mmv1/products/apigee/AddonsConfig.yaml index 7a23d261e204..df67662ba025 100644 --- a/mmv1/products/apigee/AddonsConfig.yaml +++ b/mmv1/products/apigee/AddonsConfig.yaml @@ -67,7 +67,7 @@ properties: properties: - name: 'advancedApiOpsConfig' type: NestedObject - description: Configuration for the Monetization add-on. + description: Configuration for the Advanced API Ops add-on. properties: - name: 'enabled' type: Boolean @@ -76,12 +76,12 @@ properties: enabled. - name: 'integrationConfig' type: NestedObject - description: Configuration for the Monetization add-on. + description: Configuration for the Integration add-on. properties: - name: 'enabled' type: Boolean description: - Flag that specifies whether the Advanced API Ops add-on is + Flag that specifies whether the Integration add-on is enabled. - name: 'monetizationConfig' type: NestedObject @@ -90,22 +90,20 @@ properties: - name: 'enabled' type: Boolean description: - Flag that specifies whether the Advanced API Ops add-on is - enabled. + Flag that specifies whether the Monetization add-on is enabled. - name: 'apiSecurityConfig' type: NestedObject - description: Configuration for the Monetization add-on. + description: Configuration for the API Security add-on. properties: - name: 'enabled' type: Boolean description: - Flag that specifies whether the Advanced API Ops add-on is - enabled. + Flag that specifies whether the API security add-on is enabled. - name: 'expiresAt' type: String description: - Flag that specifies whether the Advanced API Ops add-on is - enabled. + Time at which the API Security add-on expires in in milliseconds since epoch. + If unspecified, the add-on will never expire. output: true - name: 'connectorsPlatformConfig' type: NestedObject @@ -114,11 +112,10 @@ properties: - name: 'enabled' type: Boolean description: - Flag that specifies whether the Advanced API Ops add-on is - enabled. + Flag that specifies whether the Connectors Platform add-on is enabled. - name: 'expiresAt' type: String description: - Flag that specifies whether the Advanced API Ops add-on is - enabled. + Time at which the Connectors Platform add-on expires in milliseconds since epoch. + If unspecified, the add-on will never expire. output: true From 94ac74769bfc017c8176ffe1a45772241d7fa302 Mon Sep 17 00:00:00 2001 From: Cezary Sobczak <57288981+Cezarus27@users.noreply.github.com> Date: Wed, 21 May 2025 00:01:32 +0200 Subject: [PATCH 206/884] Feature gap: Add missed fields to `BackendService` and `RegionBackendService` (#13821) Signed-off-by: Cezary Sobczak --- mmv1/products/compute/BackendService.yaml | 54 +++++++++ .../compute/RegionBackendService.yaml | 34 ++++++ ...backend_service_dynamic_forwarding.tf.tmpl | 10 ++ ...backend_service_dynamic_forwarding.tf.tmpl | 11 ++ ...ource_compute_backend_service_test.go.tmpl | 105 ++++++++++++++++++ ...ompute_region_backend_service_test.go.tmpl | 42 ++++++- 6 files changed, 254 insertions(+), 2 deletions(-) create mode 100644 mmv1/templates/terraform/examples/backend_service_dynamic_forwarding.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/region_backend_service_dynamic_forwarding.tf.tmpl diff --git a/mmv1/products/compute/BackendService.yaml b/mmv1/products/compute/BackendService.yaml index 450488f2d8b1..33a8c8e1bb01 100644 --- a/mmv1/products/compute/BackendService.yaml +++ b/mmv1/products/compute/BackendService.yaml @@ -142,6 +142,11 @@ examples: vars: url_map_name: 'url_map' exclude_test: true + - name: 'backend_service_dynamic_forwarding' + primary_resource_id: 'default' + min_version: 'beta' + vars: + backend_service_name: 'backend-service' parameters: properties: - name: 'affinityCookieTtlSec' @@ -1618,3 +1623,52 @@ properties: Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive. + - name: 'networkPassThroughLbTrafficPolicy' + type: NestedObject + description: | + Configures traffic steering properties of internal passthrough Network Load Balancers. + min_version: beta + properties: + - name: 'zonalAffinity' + type: NestedObject + description: | + When configured, new connections are load balanced across healthy backend endpoints in the local zone. + properties: + - name: 'spillover' + type: Enum + description: | + This field indicates whether zonal affinity is enabled or not. + enum_values: + - 'ZONAL_AFFINITY_DISABLED' + - 'ZONAL_AFFINITY_SPILL_CROSS_ZONE' + - 'ZONAL_AFFINITY_STAY_WITHIN_ZONE' + default_value: 'ZONAL_AFFINITY_DISABLED' + min_version: beta + - name: 'spilloverRatio' + type: Double + description: | + The value of the field must be in [0, 1]. When the ratio of the count of healthy backend endpoints in a zone + to the count of backend endpoints in that same zone is equal to or above this threshold, the load balancer + distributes new connections to all healthy endpoints in the local zone only. When the ratio of the count + of healthy backend endpoints in a zone to the count of backend endpoints in that same zone is below this + threshold, the load balancer distributes all new connections to all healthy endpoints across all zones. + min_version: beta + - name: 'dynamicForwarding' + type: NestedObject + description: | + Dynamic forwarding configuration. This field is used to configure the backend service with dynamic forwarding + feature which together with Service Extension allows customized and complex routing logic. + min_version: beta + properties: + - name: 'ipPortSelection' + type: NestedObject + description: | + IP:PORT based dynamic forwarding configuration. + min_version: beta + properties: + - name: 'enabled' + type: Boolean + min_version: beta + description: | + A boolean flag enabling IP:PORT based dynamic forwarding. + immutable: true diff --git a/mmv1/products/compute/RegionBackendService.yaml b/mmv1/products/compute/RegionBackendService.yaml index 5bc2bcb1df0f..a392909e9a68 100644 --- a/mmv1/products/compute/RegionBackendService.yaml +++ b/mmv1/products/compute/RegionBackendService.yaml @@ -139,6 +139,11 @@ examples: instance_group_name: 'instance_group' network_name: 'network' exclude_test: true + - name: 'region_backend_service_dynamic_forwarding' + primary_resource_id: 'default' + min_version: 'beta' + vars: + region_backend_service_name: 'region-service' parameters: - name: 'region' type: ResourceRef @@ -1438,3 +1443,32 @@ properties: required: true enum_values: - 'CONSISTENT_HASH_SUBSETTING' + - name: 'subsetSize' + type: Integer + description: | + The number of backends per backend group assigned to each proxy instance or each service mesh client. + An input parameter to the CONSISTENT_HASH_SUBSETTING algorithm. Can only be set if policy is set to + CONSISTENT_HASH_SUBSETTING. Can only be set if load balancing scheme is INTERNAL_MANAGED or INTERNAL_SELF_MANAGED. + subsetSize is optional for Internal HTTP(S) load balancing and required for Traffic Director. + If you do not provide this value, Cloud Load Balancing will calculate it dynamically to optimize the number + of proxies/clients visible to each backend and vice versa. + Must be greater than 0. If subsetSize is larger than the number of backends/endpoints, then subsetting is disabled. + - name: 'dynamicForwarding' + type: NestedObject + description: | + Dynamic forwarding configuration. This field is used to configure the backend service with dynamic forwarding + feature which together with Service Extension allows customized and complex routing logic. + min_version: beta + properties: + - name: 'ipPortSelection' + type: NestedObject + description: | + IP:PORT based dynamic forwarding configuration. + min_version: beta + properties: + - name: 'enabled' + type: Boolean + min_version: beta + description: | + A boolean flag enabling IP:PORT based dynamic forwarding. + immutable: true diff --git a/mmv1/templates/terraform/examples/backend_service_dynamic_forwarding.tf.tmpl b/mmv1/templates/terraform/examples/backend_service_dynamic_forwarding.tf.tmpl new file mode 100644 index 000000000000..f9c5ba1ad962 --- /dev/null +++ b/mmv1/templates/terraform/examples/backend_service_dynamic_forwarding.tf.tmpl @@ -0,0 +1,10 @@ +resource "google_compute_backend_service" "{{$.PrimaryResourceId}}" { + provider = google-beta + name = "{{index $.Vars "backend_service_name"}}" + load_balancing_scheme = "INTERNAL_MANAGED" + dynamic_forwarding { + ip_port_selection { + enabled = true + } + } +} diff --git a/mmv1/templates/terraform/examples/region_backend_service_dynamic_forwarding.tf.tmpl b/mmv1/templates/terraform/examples/region_backend_service_dynamic_forwarding.tf.tmpl new file mode 100644 index 000000000000..8a71a1d7fb29 --- /dev/null +++ b/mmv1/templates/terraform/examples/region_backend_service_dynamic_forwarding.tf.tmpl @@ -0,0 +1,11 @@ +resource "google_compute_region_backend_service" "{{$.PrimaryResourceId}}" { + provider = google-beta + name = "{{index $.Vars "region_backend_service_name"}}" + region = "us-central1" + load_balancing_scheme = "EXTERNAL_MANAGED" + dynamic_forwarding { + ip_port_selection { + enabled = true + } + } +} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_backend_service_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_backend_service_test.go.tmpl index e8ab0657286c..c0e3c2dc9aac 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_backend_service_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_backend_service_test.go.tmpl @@ -2,6 +2,9 @@ package compute_test import ( "fmt" +{{ if ne $.TargetVersionName `ga` -}} + "regexp" +{{- end }} "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" @@ -1116,6 +1119,50 @@ func TestAccComputeBackendService_backendServiceMaxDuration(t *testing.T) { }) } +{{ if ne $.TargetVersionName `ga` -}} +func TestAccComputeBackendService_withNetworkPassThroughLbTrafficPolicy(t *testing.T) { + t.Parallel() + + namePrefix := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckComputeBackendServiceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeBackendService_withNetworkPassThroughLbTrafficPolicy(namePrefix, "ZONAL_AFFINITY_DISABLED", 0.5), + }, + { + ResourceName: "google_compute_backend_service.nptlbtp", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccComputeBackendService_withNetworkPassThroughLbTrafficPolicy(namePrefix, "ZONAL_AFFINITY_SPILL_CROSS_ZONE", 0.6), + }, + { + ResourceName: "google_compute_backend_service.nptlbtp", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccComputeBackendService_withNetworkPassThroughLbTrafficPolicy(namePrefix, "ZONAL_AFFINITY_STAY_WITHIN_ZONE", 0.2), + }, + { + ResourceName: "google_compute_backend_service.nptlbtp", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccComputeBackendService_withNetworkPassThroughLbTrafficPolicy(namePrefix, "ZONAL_AFFINITY_STAY_WITHIN_ZONE", 1.001), + ExpectError: regexp.MustCompile("Must be less than or equal to 1.0"), + }, + }, + }) +} +{{- end }} + func testAccComputeBackendService_trafficDirectorBasic(serviceName, checkName string) string { return fmt.Sprintf(` resource "google_compute_backend_service" "foobar" { @@ -2867,3 +2914,61 @@ resource "google_compute_http_health_check" "zero" { } `, serviceName, description, percentage, checkName) } + +{{ if ne $.TargetVersionName `ga` -}} +func testAccComputeBackendService_withNetworkPassThroughLbTrafficPolicy(namePrefix, spillover string, ratio float64) string { + return fmt.Sprintf(` +resource "google_compute_backend_service" "nptlbtp" { + provider = google-beta + name = "%s-backend" + description = "Hello World 1234" + protocol = "TCP" + health_checks = [google_compute_health_check.default.self_link] + + backend { + group = google_compute_network_endpoint_group.lb-neg.self_link + balancing_mode = "CONNECTION" + max_connections_per_endpoint = 1000 + } + + network_pass_through_lb_traffic_policy { + zonal_affinity { + spillover = "%s" + spillover_ratio = %f + } + } +} + +resource "google_compute_network_endpoint_group" "lb-neg" { + provider = google-beta + name = "%s-neg" + network = google_compute_network.default.self_link + subnetwork = google_compute_subnetwork.default.self_link + default_port = "90" + zone = "us-central1-a" +} + +resource "google_compute_network" "default" { + provider = google-beta + name = "%s-network" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + provider = google-beta + name = "%s-subnetwork" + ip_cidr_range = "10.0.0.0/16" + region = "us-central1" + network = google_compute_network.default.self_link +} + +resource "google_compute_health_check" "default" { + provider = google-beta + name = "%s-health-check" + tcp_health_check { + port = "110" + } +} +`, namePrefix, spillover, ratio, namePrefix, namePrefix, namePrefix, namePrefix) +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_test.go.tmpl index ee55e2bba82a..a8a46c54f711 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_test.go.tmpl @@ -385,6 +385,18 @@ func TestAccComputeRegionBackendService_subsettingUpdate(t *testing.T) { ImportState: true, ImportStateVerify: true, }, + { + Config: testAccComputeRegionBackendService_imlbWithSubsettingSubsetSize(backendName, checkName, 3), + }, + { + ResourceName: "google_compute_region_backend_service.foobar", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccComputeRegionBackendService_imlbWithSubsettingSubsetSize(backendName, checkName, -1), + ExpectError: regexp.MustCompile("Must be greater than or equal to 1"), + }, }, }) } @@ -1310,7 +1322,7 @@ resource "google_compute_region_backend_service" "foobar" { name = "%s" health_checks = [google_compute_health_check.health_check.self_link] protocol = "TCP" - load_balancing_scheme = "INTERNAL" + load_balancing_scheme = "INTERNAL" subsetting { policy = "CONSISTENT_HASH_SUBSETTING" } @@ -1331,7 +1343,7 @@ resource "google_compute_region_backend_service" "foobar" { name = "%s" health_checks = [google_compute_health_check.health_check.self_link] protocol = "TCP" - load_balancing_scheme = "INTERNAL" + load_balancing_scheme = "INTERNAL" } resource "google_compute_health_check" "health_check" { @@ -1342,6 +1354,32 @@ resource "google_compute_health_check" "health_check" { } `, serviceName, checkName) } + +func testAccComputeRegionBackendService_imlbWithSubsettingSubsetSize(serviceName, checkName string, subsetSize int64) string { + return fmt.Sprintf(` +resource "google_compute_region_backend_service" "foobar" { + name = "%s" + health_checks = [google_compute_region_health_check.zero.self_link] + protocol = "HTTP" + load_balancing_scheme = "INTERNAL_MANAGED" + subsetting { + policy = "CONSISTENT_HASH_SUBSETTING" + subset_size = %d + } +} + +resource "google_compute_region_health_check" "zero" { + name = "%s" + region = "us-central1" + check_interval_sec = 1 + timeout_sec = 1 + + http_health_check { + port = 80 + } +} +`, serviceName, subsetSize, checkName) +} {{- end }} {{ if ne $.TargetVersionName `ga` -}} From 7a5d8547e16c988b7ed22872ce3ae41541377c1a Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Tue, 20 May 2025 15:32:28 -0700 Subject: [PATCH 207/884] Removed colab schedule post_create logic related to setting name (#13965) --- .../terraform/constants/router_nat.go.tmpl | 2 +- .../custom_flatten/name_from_self_link.tmpl | 2 +- .../terraform/post_create/colab_schedule.go.tmpl | 16 ---------------- .../services/compute/compute_forwarding_rule.go | 2 +- .../compute/compute_region_backend_service.go | 2 +- .../compute/compute_region_health_check.go | 2 +- .../resource_os_config_os_policy_assignment.go | 2 +- .../terraform/tpgresource/self_link_helpers.go | 8 -------- 8 files changed, 6 insertions(+), 30 deletions(-) diff --git a/mmv1/templates/terraform/constants/router_nat.go.tmpl b/mmv1/templates/terraform/constants/router_nat.go.tmpl index 78947b6c0d0f..ee99d52dc06f 100644 --- a/mmv1/templates/terraform/constants/router_nat.go.tmpl +++ b/mmv1/templates/terraform/constants/router_nat.go.tmpl @@ -84,7 +84,7 @@ func computeRouterNatSubnetworkHash(v interface{}) int { } } - return schema.HashString(tpgresource.NameFromSelfLinkStateFunc(name)) + sourceIpRangesHash + secondaryIpRangeHash + return schema.HashString(tpgresource.GetResourceNameFromSelfLink(name.(string))) + sourceIpRangesHash + secondaryIpRangeHash } func computeRouterNatIPsHash(v interface{}) int { diff --git a/mmv1/templates/terraform/custom_flatten/name_from_self_link.tmpl b/mmv1/templates/terraform/custom_flatten/name_from_self_link.tmpl index 24b2086d705e..9093852372a5 100644 --- a/mmv1/templates/terraform/custom_flatten/name_from_self_link.tmpl +++ b/mmv1/templates/terraform/custom_flatten/name_from_self_link.tmpl @@ -14,5 +14,5 @@ func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.Reso if v == nil { return v } - return tpgresource.NameFromSelfLinkStateFunc(v) + return tpgresource.GetResourceNameFromSelfLink(v.(string)) } diff --git a/mmv1/templates/terraform/post_create/colab_schedule.go.tmpl b/mmv1/templates/terraform/post_create/colab_schedule.go.tmpl index a7b6748b995e..56d2db1e78e8 100644 --- a/mmv1/templates/terraform/post_create/colab_schedule.go.tmpl +++ b/mmv1/templates/terraform/post_create/colab_schedule.go.tmpl @@ -1,19 +1,3 @@ -// The response for create request contains the generated name generated name that we need -// in order to perform a READ. We need to access the object inside of it as -// a map[string]interface, so let's do that. - -longName := res["name"].(string) -name := tpgresource.GetResourceNameFromSelfLink(longName) -log.Printf("[DEBUG] Setting resource name to %s", name) -if err := d.Set("name", name); err != nil { - return fmt.Errorf("Error setting name: %s", err) -} - -parts := strings.Split(longName, "/") -parts[1] = project -updatedLongName := strings.Join(parts, "/") -d.SetId(updatedLongName) - if p, ok := d.GetOk("desired_state"); ok && p.(string) == "PAUSED" { _, err := modifyScheduleState(config, d, project, billingProject, userAgent, "pause") if err != nil { diff --git a/mmv1/third_party/cai2hcl/services/compute/compute_forwarding_rule.go b/mmv1/third_party/cai2hcl/services/compute/compute_forwarding_rule.go index ff66c9e2292a..674f633f6203 100644 --- a/mmv1/third_party/cai2hcl/services/compute/compute_forwarding_rule.go +++ b/mmv1/third_party/cai2hcl/services/compute/compute_forwarding_rule.go @@ -307,5 +307,5 @@ func flattenComputeForwardingRuleRegion(v interface{}, d *schema.ResourceData, c if v == nil { return v } - return tpgresource.NameFromSelfLinkStateFunc(v) + return tpgresource.GetResourceNameFromSelfLink(v.(string)) } diff --git a/mmv1/third_party/cai2hcl/services/compute/compute_region_backend_service.go b/mmv1/third_party/cai2hcl/services/compute/compute_region_backend_service.go index 56bcf640ccad..7e6a79dd03de 100644 --- a/mmv1/third_party/cai2hcl/services/compute/compute_region_backend_service.go +++ b/mmv1/third_party/cai2hcl/services/compute/compute_region_backend_service.go @@ -1394,5 +1394,5 @@ func flattenComputeRegionBackendServiceRegion(v interface{}, d *schema.ResourceD if v == nil { return v } - return tpgresource.NameFromSelfLinkStateFunc(v) + return tpgresource.GetResourceNameFromSelfLink(v.(string)) } diff --git a/mmv1/third_party/cai2hcl/services/compute/compute_region_health_check.go b/mmv1/third_party/cai2hcl/services/compute/compute_region_health_check.go index f816ccf3df65..ea66d50a8018 100644 --- a/mmv1/third_party/cai2hcl/services/compute/compute_region_health_check.go +++ b/mmv1/third_party/cai2hcl/services/compute/compute_region_health_check.go @@ -564,5 +564,5 @@ func flattenComputeRegionHealthCheckRegion(v interface{}, d *schema.ResourceData if v == nil { return v } - return tpgresource.NameFromSelfLinkStateFunc(v) + return tpgresource.GetResourceNameFromSelfLink(v.(string)) } diff --git a/mmv1/third_party/terraform/services/osconfig/resource_os_config_os_policy_assignment.go b/mmv1/third_party/terraform/services/osconfig/resource_os_config_os_policy_assignment.go index 9ea349cdbce8..5ae751ff3792 100644 --- a/mmv1/third_party/terraform/services/osconfig/resource_os_config_os_policy_assignment.go +++ b/mmv1/third_party/terraform/services/osconfig/resource_os_config_os_policy_assignment.go @@ -1485,7 +1485,7 @@ func flattenOSConfigOSPolicyAssignmentName(v interface{}, d *schema.ResourceData if v == nil { return v } - return tpgresource.NameFromSelfLinkStateFunc(v) + return tpgresource.GetResourceNameFromSelfLink(v.(string)) } func flattenOSConfigOSPolicyAssignmentDescription(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { diff --git a/mmv1/third_party/terraform/tpgresource/self_link_helpers.go b/mmv1/third_party/terraform/tpgresource/self_link_helpers.go index 6891254e3f46..0a1f3d9ff8be 100644 --- a/mmv1/third_party/terraform/tpgresource/self_link_helpers.go +++ b/mmv1/third_party/terraform/tpgresource/self_link_helpers.go @@ -102,14 +102,6 @@ func GetResourceNameFromSelfLink(link string) string { return parts[len(parts)-1] } -func NameFromSelfLinkStateFunc(v interface{}) string { - return GetResourceNameFromSelfLink(v.(string)) -} - -func StoreResourceName(resourceLink interface{}) string { - return GetResourceNameFromSelfLink(resourceLink.(string)) -} - type LocationType int const ( From 71004285c47f0b301f84aa144e70dd753b35c9ac Mon Sep 17 00:00:00 2001 From: Betto Cerrillos <32439055+Berro321@users.noreply.github.com> Date: Tue, 20 May 2025 23:08:58 +0000 Subject: [PATCH 208/884] Add `google_beyondcorp_security_gateway_application` resource. (#13918) --- mmv1/products/beyondcorp/Application.yaml | 2 +- .../SecurityGatewayApplication.yaml | 151 ++++++++++++++++++ ...security_gateway_application_basic.tf.tmpl | 13 ++ ...p_security_gateway_application_vpc.tf.tmpl | 23 +++ ...dcorp_security_gateway_application_test.go | 98 ++++++++++++ 5 files changed, 286 insertions(+), 1 deletion(-) create mode 100644 mmv1/products/beyondcorp/SecurityGatewayApplication.yaml create mode 100644 mmv1/templates/terraform/examples/beyondcorp_security_gateway_application_basic.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/beyondcorp_security_gateway_application_vpc.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/beyondcorp/resource_beyondcorp_security_gateway_application_test.go diff --git a/mmv1/products/beyondcorp/Application.yaml b/mmv1/products/beyondcorp/Application.yaml index 7d82fb2672c8..a4d4e862bbaf 100644 --- a/mmv1/products/beyondcorp/Application.yaml +++ b/mmv1/products/beyondcorp/Application.yaml @@ -13,7 +13,7 @@ --- name: Application -deprecation_message: '`google_beyondcorp_application` is deprecated.' +deprecation_message: '`google_beyondcorp_application` is deprecated. Use `google_beyondcorp_security_gateway_application` instead.' description: Specifies application endpoint(s) to protect behind a Security Gateway. base_url: projects/{{project}}/locations/global/securityGateways/{{security_gateways_id}}/applications update_mask: true diff --git a/mmv1/products/beyondcorp/SecurityGatewayApplication.yaml b/mmv1/products/beyondcorp/SecurityGatewayApplication.yaml new file mode 100644 index 000000000000..35cc5a00ee43 --- /dev/null +++ b/mmv1/products/beyondcorp/SecurityGatewayApplication.yaml @@ -0,0 +1,151 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: SecurityGatewayApplication +description: Specifies application endpoint(s) to protect behind a Security Gateway. +base_url: projects/{{project}}/locations/global/securityGateways/{{security_gateway_id}}/applications +update_mask: true +self_link: projects/{{project}}/locations/global/securityGateways/{{security_gateway_id}}/applications/{{application_id}} +create_url: projects/{{project}}/locations/global/securityGateways/{{security_gateway_id}}/applications?applicationId={{application_id}} +update_verb: PATCH +id_format: projects/{{project}}/locations/global/securityGateways/{{security_gateway_id}}/applications/{{application_id}} +import_format: + - projects/{{project}}/locations/global/securityGateways/{{security_gateway_id}}/applications/{{application_id}} +iam_policy: + method_name_separator: ':' + iam_conditions_request_type: 'QUERY_PARAM_NESTED' + allowed_iam_role: 'roles/beyondcorp.securityGatewayUser' + parent_resource_attribute: 'application_id' + import_format: + - 'projects/{{project}}/locations/global/securityGateways/{{security_gateway_id}}/applications/{{application_id}}' + - '{{application_id}}' +examples: + - name: beyondcorp_security_gateway_application_basic + primary_resource_id: example + primary_resource_name: 'fmt.Sprintf("tf-test-default-sg%s", context["random_suffix"]), fmt.Sprintf("tf-test-google-sga%s", context["random_suffix"])' + vars: + security_gateway_name: default-sg + application_name: google-sga + - name: beyondcorp_security_gateway_application_vpc + primary_resource_id: example + primary_resource_name: 'fmt.Sprintf("tf-test-default-sg%s", context["random_suffix"]), fmt.Sprintf("tf-test-google-sga%s", context["random_suffix"])' + vars: + security_gateway_name: default-sg + application_name: my-vm-service2 +autogen_async: true +async: + operation: + timeouts: + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 + base_url: '{{op_id}}' + actions: + - create + - delete + - update + type: OpAsync + result: + resource_inside_response: true + include_project: false +autogen_status: QXBwbGljYXRpb24= +parameters: + - name: securityGatewayId + type: String + description: ID of the Security Gateway resource this belongs to. + immutable: true + url_param_only: true + required: true + - name: applicationId + type: String + description: |- + User-settable Application resource ID. + * Must start with a letter. + * Must contain between 4-63 characters from `/a-z-/`. + * Must end with a number or letter. + immutable: true + url_param_only: true + required: true +properties: + - name: createTime + type: String + description: Output only. Timestamp when the resource was created. + output: true + - name: displayName + type: String + description: |- + Optional. An arbitrary user-provided name for the Application resource. + Cannot exceed 64 characters. + - name: endpointMatchers + type: Array + description: |- + Required. Endpoint matchers associated with an application. + A combination of hostname and ports as endpoint matcher is used to match + the application. + Match conditions for OR logic. + An array of match conditions to allow for multiple matching criteria. + The rule is considered a match if one the conditions are met. + The conditions can be one of the following combination + (Hostname), (Hostname & Ports) + + EXAMPLES: + Hostname - ("*.abc.com"), ("xyz.abc.com") + Hostname and Ports - ("abc.com" and "22"), ("abc.com" and "22,33") etc + required: true + item_type: + type: NestedObject + properties: + - name: hostname + type: String + description: Required. Hostname of the application. + required: true + - name: ports + type: Array + description: Optional. Ports of the application. + item_type: + type: Integer + - name: upstreams + type: Array + description: Optional. List of which upstream resource(s) to forward traffic to. + item_type: + type: NestedObject + properties: + - name: egressPolicy + type: NestedObject + description: Optional. Routing policy information. + properties: + - name: regions + type: Array + description: Required. List of regions where the application sends traffic to. + required: true + item_type: + type: String + - name: network + type: NestedObject + description: Network to forward traffic to. + properties: + - name: name + type: string + description: |- + Required. Network name is of the format: + `projects/{project}/global/networks/{network}` + required: true + - name: name + type: String + description: Identifier. Name of the resource. + output: true + - name: updateTime + type: String + description: Output only. Timestamp when the resource was last modified. + output: true diff --git a/mmv1/templates/terraform/examples/beyondcorp_security_gateway_application_basic.tf.tmpl b/mmv1/templates/terraform/examples/beyondcorp_security_gateway_application_basic.tf.tmpl new file mode 100644 index 000000000000..6fe3273fde60 --- /dev/null +++ b/mmv1/templates/terraform/examples/beyondcorp_security_gateway_application_basic.tf.tmpl @@ -0,0 +1,13 @@ +resource "google_beyondcorp_security_gateway" "default" { + security_gateway_id = "{{index $.Vars "security_gateway_name"}}" + display_name = "My Security Gateway resource" + hubs { region = "us-central1" } +} + +resource "google_beyondcorp_security_gateway_application" "{{$.PrimaryResourceId}}" { + security_gateway_id = google_beyondcorp_security_gateway.default.security_gateway_id + application_id = "{{index $.Vars "application_name"}}" + endpoint_matchers { + hostname = "google.com" + } +} diff --git a/mmv1/templates/terraform/examples/beyondcorp_security_gateway_application_vpc.tf.tmpl b/mmv1/templates/terraform/examples/beyondcorp_security_gateway_application_vpc.tf.tmpl new file mode 100644 index 000000000000..155c7ea37088 --- /dev/null +++ b/mmv1/templates/terraform/examples/beyondcorp_security_gateway_application_vpc.tf.tmpl @@ -0,0 +1,23 @@ +data "google_project" "project" {} + +resource "google_beyondcorp_security_gateway" "default" { + security_gateway_id = "{{index $.Vars "security_gateway_name"}}" + display_name = "My Security Gateway resource" + hubs { region = "us-central1" } +} + +resource "google_beyondcorp_security_gateway_application" "{{$.PrimaryResourceId}}" { + security_gateway_id = google_beyondcorp_security_gateway.default.security_gateway_id + application_id = "{{index $.Vars "application_name"}}" + endpoint_matchers { + hostname = "my-vm-service.com" + } + upstreams { + egress_policy { + regions = ["us-central1"] + } + network { + name = "projects/${data.google_project.project.project_id}/global/networks/default" + } + } +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/beyondcorp/resource_beyondcorp_security_gateway_application_test.go b/mmv1/third_party/terraform/services/beyondcorp/resource_beyondcorp_security_gateway_application_test.go new file mode 100644 index 000000000000..2175e9a51a16 --- /dev/null +++ b/mmv1/third_party/terraform/services/beyondcorp/resource_beyondcorp_security_gateway_application_test.go @@ -0,0 +1,98 @@ +package beyondcorp_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccBeyondcorpSecurityGatewayApplication_beyondcorpSecurityGatewayApplicationBasicExample_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccBeyondcorpSecurityGatewayApplication_beyondcorpSecurityGatewayApplicationBasicExample_basic(context), + }, + { + ResourceName: "google_beyondcorp_security_gateway_application.example", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"application_id", "security_gateway_id"}, + }, + { + Config: testAccBeyondcorpSecurityGatewayApplication_beyondcorpSecurityGatewayApplicationBasicExample_update(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_beyondcorp_security_gateway_application.example", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_beyondcorp_security_gateway_application.example", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"application_id", "security_gateway_id"}, + }, + }, + }) +} + +func testAccBeyondcorpSecurityGatewayApplication_beyondcorpSecurityGatewayApplicationBasicExample_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "project" {} + +resource "google_beyondcorp_security_gateway" "default" { + security_gateway_id = "default-sg%{random_suffix}" + display_name = "My Security Gateway resource" + hubs { region = "us-central1" } +} + +resource "google_beyondcorp_security_gateway_application" "example" { + security_gateway_id = google_beyondcorp_security_gateway.default.security_gateway_id + application_id = "google-sga%{random_suffix}" + endpoint_matchers { + hostname = "google.com" + } +} +`, context) +} + +func testAccBeyondcorpSecurityGatewayApplication_beyondcorpSecurityGatewayApplicationBasicExample_update(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "project" {} + +resource "google_beyondcorp_security_gateway" "default" { + security_gateway_id = "default-sg%{random_suffix}" + display_name = "My Security Gateway resource" + hubs { region = "us-central1" } +} + +resource "google_beyondcorp_security_gateway_application" "example" { + security_gateway_id = google_beyondcorp_security_gateway.default.security_gateway_id + display_name = "Updated Name" + application_id = "google-sga%{random_suffix}" + endpoint_matchers { + hostname = "*.google.com" + } + endpoint_matchers { + hostname = "google.com" + ports = [443, 80] + } + upstreams { + network { + name = "projects/${data.google_project.project.project_id}/global/networks/default" + } + } +} +`, context) +} From f45ad8eef6a2c3118f0babc4e4fd0444b82d4b6e Mon Sep 17 00:00:00 2001 From: Iris Chen <10179943+iyabchen@users.noreply.github.com> Date: Tue, 20 May 2025 17:11:53 -0700 Subject: [PATCH 209/884] Add a copy file header to state the source file for handwritten files (#13725) --- mmv1/provider/terraform.go | 64 +++++++++++++++++++++++++++++++++++++- 1 file changed, 63 insertions(+), 1 deletion(-) diff --git a/mmv1/provider/terraform.go b/mmv1/provider/terraform.go index cc6123e1b734..1c0779ec1f57 100644 --- a/mmv1/provider/terraform.go +++ b/mmv1/provider/terraform.go @@ -423,7 +423,9 @@ func (t Terraform) CopyFileList(outputFolder string, files map[string]string, ge if filepath.Ext(target) == ".go" || (filepath.Ext(target) == ".mod" && generateCode) { t.replaceImportPath(outputFolder, target) } - + if filepath.Ext(target) == ".go" || filepath.Ext(target) == ".markdown" { + t.addCopyfileHeader(source, outputFolder, target) + } if filepath.Ext(target) == ".go" { t.addHashicorpCopyRightHeader(outputFolder, target) } @@ -523,10 +525,70 @@ func (t Terraform) CompileFileList(outputFolder string, files map[string]string, continue } t.replaceImportPath(outputFolder, target) + if filepath.Ext(targetFile) == ".go" || filepath.Ext(targetFile) == ".markdown" { + t.addCopyfileHeader(source, outputFolder, target) + } t.addHashicorpCopyRightHeader(outputFolder, target) } } +func (t Terraform) addCopyfileHeader(srcpath, outputFolder, target string) { + githubPrefix := "https://github.com/GoogleCloudPlatform/magic-modules/tree/main/mmv1/" + if !strings.HasPrefix(srcpath, githubPrefix) { + srcpath = githubPrefix + srcpath + } + + targetFile := filepath.Join(outputFolder, target) + sourceByte, err := os.ReadFile(targetFile) + if err != nil { + log.Fatalf("Cannot read file %s to add copy file header: %s", targetFile, err) + } + + srcStr := string(sourceByte) + if strings.Contains(srcStr, "*** AUTO GENERATED CODE *** Type: Handwritten ***") { + return + } + + templateFormat := `// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** Type: Handwritten *** +// +// ---------------------------------------------------------------------------- +// +// This code is generated by Magic Modules using the following: +// +// Source file: %s +// +// DO NOT EDIT this file directly. Any changes made to this file will be +// overwritten during the next generation cycle. +// +// ---------------------------------------------------------------------------- +%s` + content := srcStr + if filepath.Ext(target) == ".markdown" { + // insert the header after --- + templateFormat = "---\n" + strings.Replace(templateFormat, "//", "#", -1) + content = strings.TrimPrefix(srcStr, "---\n") + } + + fileStr := fmt.Sprintf(templateFormat, srcpath, content) + + sourceByte = []byte(fileStr) + // format go file + if filepath.Ext(targetFile) == ".go" { + sourceByte, err = format.Source(sourceByte) + if err != nil { + log.Printf("error formatting %s: %s\n", targetFile, err) + return + } + } + + err = os.WriteFile(targetFile, sourceByte, 0644) + if err != nil { + log.Fatalf("Cannot write file %s to add copy file header: %s", target, err) + } +} + func (t Terraform) addHashicorpCopyRightHeader(outputFolder, target string) { if !expectedOutputFolder(outputFolder) { log.Printf("Unexpected output folder (%s) detected "+ From 5ccdabf8bc7fd692788e28d68139c30ade610100 Mon Sep 17 00:00:00 2001 From: Or Sela Date: Wed, 21 May 2025 16:20:18 +0300 Subject: [PATCH 210/884] Add PSC fields to Filestore instance in beta (#13883) --- mmv1/products/filestore/Instance.yaml | 22 +++ .../resource_filestore_instance_test.go.tmpl | 162 ++++++++++++++++++ 2 files changed, 184 insertions(+) diff --git a/mmv1/products/filestore/Instance.yaml b/mmv1/products/filestore/Instance.yaml index 058fec6eac85..8c471e0d35b5 100644 --- a/mmv1/products/filestore/Instance.yaml +++ b/mmv1/products/filestore/Instance.yaml @@ -209,6 +209,12 @@ properties: An integer representing the anonymous group id with a default value of 65534. Anon_gid may only be set with squashMode of ROOT_SQUASH. An error will be returned if this field is specified for other squashMode settings. + - name: 'network' + type: String + min_version: beta + description: | + The source VPC network for `ip_ranges`. + Required for instances using Private Service Connect, optional otherwise. max_size: 10 max_size: 1 - name: 'networks' @@ -273,6 +279,22 @@ properties: enum_values: - 'DIRECT_PEERING' - 'PRIVATE_SERVICE_ACCESS' + - 'PRIVATE_SERVICE_CONNECT' + - name: 'pscConfig' + type: NestedObject + min_version: beta + description: | + Private Service Connect configuration. + Should only be set when connect_mode is PRIVATE_SERVICE_CONNECT. + properties: + - name: endpointProject + type: String + description: | + Consumer service project in which the Private Service Connect endpoint + would be set up. This is optional, and only relevant in case the network + is a shared VPC. If this is not specified, the endpoint would be set up + in the VPC host project. + immutable: true min_size: 1 - name: 'etag' type: String diff --git a/mmv1/third_party/terraform/services/filestore/resource_filestore_instance_test.go.tmpl b/mmv1/third_party/terraform/services/filestore/resource_filestore_instance_test.go.tmpl index 4c915b76595e..146fa0df13c6 100644 --- a/mmv1/third_party/terraform/services/filestore/resource_filestore_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/filestore/resource_filestore_instance_test.go.tmpl @@ -592,4 +592,166 @@ resource "google_filestore_instance" "instance" { } `, name, location, tier) } + +{{- end }} +{{- if ne $.TargetVersionName "ga" }} + +func TestAccFilestoreInstance_psc(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "name": fmt.Sprintf("tf-test-%d", acctest.RandInt(t)), + "location": "us-central1", + "tier": "REGIONAL", + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckFilestoreInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccFilestoreInstance_psc(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_filestore_instance.instance", "networks.0.connect_mode", "PRIVATE_SERVICE_CONNECT"), + ), + }, + { + ResourceName: "google_filestore_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"zone"}, + }, + }, + }) +} + +func testAccFilestoreInstance_psc(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_client_config" "current" { + provider = google-beta +} + +resource "google_compute_network" "psc_network" { + provider = google-beta + name = "%{name}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "psc_subnet" { + provider = google-beta + name = "%{name}" + ip_cidr_range = "10.2.0.0/16" + region = "%{location}" + network = google_compute_network.psc_network.id +} + +resource "google_network_connectivity_service_connection_policy" "default" { + provider = google-beta + name = "%{name}" + location = "%{location}" + service_class = "google-cloud-filestore" + network = google_compute_network.psc_network.id + psc_config { + subnetworks = [google_compute_subnetwork.psc_subnet.id] + } +} + +resource "google_filestore_instance" "instance" { + provider = google-beta + depends_on = [ + google_network_connectivity_service_connection_policy.default + ] + name = "%{name}" + location = "%{location}" + tier = "%{tier}" + description = "An instance created during testing." + protocol = "NFS_V4_1" + + file_shares { + capacity_gb = 1024 + name = "share" + + nfs_export_options { + ip_ranges = ["70.0.0.1/24"] + network = google_compute_network.psc_network.name + } + } + + networks { + network = google_compute_network.psc_network.name + modes = ["MODE_IPV4"] + connect_mode = "PRIVATE_SERVICE_CONNECT" + psc_config { + endpoint_project = data.google_client_config.current.project + } + } +} +`, context) +} + +func TestAccFilestoreInstance_nfsExportOptionsNetwork_update(t *testing.T) { + t.Parallel() + + name := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + location := "us-central1-a" + tier := "ZONAL" + + // Currently, we can only alternate between an empty network and the instance network of non-PSC instances. + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckFilestoreInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccFilestoreInstance_nfsExportOptionsNetwork_update(name, location, tier, ""), + Check: resource.TestCheckResourceAttr("google_filestore_instance.instance", "file_shares.0.nfs_export_options.0.network", ""), + }, + { + ResourceName: "google_filestore_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"zone"}, + }, + { + Config: testAccFilestoreInstance_nfsExportOptionsNetwork_update(name, location, tier, "default"), + Check: resource.TestCheckResourceAttr("google_filestore_instance.instance", "file_shares.0.nfs_export_options.0.network", "default"), + }, + { + ResourceName: "google_filestore_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"zone"}, + }, + }, + }) +} + +func testAccFilestoreInstance_nfsExportOptionsNetwork_update(name, location, tier, network string) string { + return fmt.Sprintf(` +resource "google_filestore_instance" "instance" { + provider = google-beta + name = "%s" + zone = "%s" + tier = "%s" + description = "An instance created during testing." + + file_shares { + capacity_gb = 1024 + name = "share" + + nfs_export_options { + ip_ranges = ["70.0.0.1/24"] + network = "%s" + } + } + + networks { + network = "default" + modes = ["MODE_IPV4"] + } +} +`, name, location, tier, network) +} + {{- end }} \ No newline at end of file From aac3e517bdc6d31168b191583da77fb3f7ee6131 Mon Sep 17 00:00:00 2001 From: Sam Levenick Date: Wed, 21 May 2025 15:24:23 -0400 Subject: [PATCH 211/884] Update membership_data.go (#14044) --- .ci/magician/github/membership_data.go | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.ci/magician/github/membership_data.go b/.ci/magician/github/membership_data.go index 28bcaba31972..ca596c162298 100644 --- a/.ci/magician/github/membership_data.go +++ b/.ci/magician/github/membership_data.go @@ -117,7 +117,12 @@ var ( }, }, "slevenick": { - vacations: []Vacation{}, + vacations: []Vacation{ + { + startDate: newDate(2025, 5, 22), + endDate: newDate(2025, 6, 7), + }, + }, }, "trodge": { vacations: []Vacation{}, From 192392142bf0701a81de40c6e37df90fa8c1f1a5 Mon Sep 17 00:00:00 2001 From: Samir Ribeiro <42391123+Samir-Cit@users.noreply.github.com> Date: Wed, 21 May 2025 17:01:05 -0300 Subject: [PATCH 212/884] Fix TestAccContainerNodePool_resourceManagerTags (#13994) --- .../acctest/bootstrap_test_utils.go.tmpl | 41 +++- .../resource_container_node_pool_test.go.tmpl | 180 +++++------------- .../resource_data_fusion_instance_test.go | 4 +- .../resource_filestore_backup_test.go | 4 +- .../resource_filestore_instance_test.go.tmpl | 4 +- .../resource_google_folder_test.go | 4 +- .../resource_google_project_test.go | 4 +- 7 files changed, 93 insertions(+), 148 deletions(-) diff --git a/mmv1/third_party/terraform/acctest/bootstrap_test_utils.go.tmpl b/mmv1/third_party/terraform/acctest/bootstrap_test_utils.go.tmpl index b8f96e37a355..e1bdc3feb674 100644 --- a/mmv1/third_party/terraform/acctest/bootstrap_test_utils.go.tmpl +++ b/mmv1/third_party/terraform/acctest/bootstrap_test_utils.go.tmpl @@ -2059,11 +2059,30 @@ func BootstrapSharedCodeRepositoryIndex(t *testing.T, codeRepositoryIndexId, loc } const sharedTagKeyPrefix = "tf-bootstrap-tagkey" +const sharedTagKeyParentErr = "Parent %q is not valid. Should be in format: 'organizations/123' OR 'projects/123'." -func BootstrapSharedTestTagKey(t *testing.T, testId string) string { +func BootstrapSharedTestProjectTagKey(t *testing.T, testId string, obj map[string]interface{}) string { + pid := envvar.GetTestProjectFromEnv() + return bootstrapSharedTestTagKey(t, testId, "projects/"+pid, obj) +} + +func BootstrapSharedTestOrganizationTagKey(t *testing.T, testId string, obj map[string]interface{}) string { org := envvar.GetTestOrgFromEnv(t) + return bootstrapSharedTestTagKey(t, testId, "organizations/"+org, obj) +} + +// parent should be in format: {"organization" OR "projects"}/{id} +func bootstrapSharedTestTagKey(t *testing.T, testId, parent string, obj map[string]interface{}) string { sharedTagKey := fmt.Sprintf("%s-%s", sharedTagKeyPrefix, testId) - tagKeyName := fmt.Sprintf("%s/%s", org, sharedTagKey) + + parentSplit := strings.Split(parent, "/") + if len(parentSplit) < 2 || (parentSplit[0] != "organizations" && parentSplit[0] != "projects") { + parentErr := fmt.Sprintf(sharedTagKeyParentErr, parent) + t.Fatalf("Error bootstrapping shared tag key %q: %s", sharedTagKey, parentErr) + } + + parentId := parentSplit[1] + tagKeyName := fmt.Sprintf("%s/%s", parentId, sharedTagKey) config := BootstrapConfig(t) if config == nil { @@ -2083,10 +2102,13 @@ func BootstrapSharedTestTagKey(t *testing.T, testId string) string { if err != nil && transport_tpg.IsGoogleApiErrorWithCode(err, 403) { log.Printf("[DEBUG] TagKey %q not found, bootstrapping", sharedTagKey) tagKeyObj := map[string]interface{}{ - "parent": "organizations/" + org, + "parent": parent, "shortName": sharedTagKey, "description": "Bootstrapped tag key for Terraform Acceptance testing", } + if obj != nil { + maps.Insert(tagKeyObj, maps.All(obj)) + } res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ Config: config, @@ -2131,10 +2153,19 @@ func BootstrapSharedTestTagKey(t *testing.T, testId string) string { const sharedTagValuePrefix = "tf-bootstrap-tagvalue" -func BootstrapSharedTestTagValue(t *testing.T, testId string, tagKey string) string { +func BootstrapSharedTestProjectTagValue(t *testing.T, testId string, tagKey string) string { + pid := envvar.GetTestProjectFromEnv() + return BootstrapSharedTestTagValue(t, testId, tagKey, pid) +} + +func BootstrapSharedTestOrganizationTagValue(t *testing.T, testId string, tagKey string) string { org := envvar.GetTestOrgFromEnv(t) + return BootstrapSharedTestTagValue(t, testId, tagKey, org) +} + +func BootstrapSharedTestTagValue(t *testing.T, testId string, tagKey, parentId string) string { sharedTagValue := fmt.Sprintf("%s-%s", sharedTagValuePrefix, testId) - tagKeyName := fmt.Sprintf("%s/%s", org, tagKey) + tagKeyName := fmt.Sprintf("%s/%s", parentId, tagKey) tagValueName := fmt.Sprintf("%s/%s", tagKeyName, sharedTagValue) config := BootstrapConfig(t) diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl index bb9989e18ca6..112febeeb259 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl @@ -41,11 +41,27 @@ func TestAccContainerNodePool_resourceManagerTags(t *testing.T) { t.Parallel() pid := envvar.GetTestProjectFromEnv() - randomSuffix := acctest.RandString(t, 10) - clusterName := fmt.Sprintf("tf-test-cluster-%s", randomSuffix) - networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") - subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) + tagData := map[string]interface{}{ + "purpose": "GCE_FIREWALL", + "purpose_data": map[string]interface{}{ + "network": pid + "/" + networkName, + }, + } + tagKey1 := acctest.BootstrapSharedTestProjectTagKey(t, "resourceManagerTags1", tagData) + tagKey2 := acctest.BootstrapSharedTestProjectTagKey(t, "resourceManagerTags2", tagData) + + context := map[string]interface{}{ + "pid": pid, + "org": envvar.GetTestOrgFromEnv(t), + "network": networkName, + "subnet": acctest.BootstrapSubnet(t, "gke-cluster", networkName), + "tagKey1": tagKey1, + "tagValue1": acctest.BootstrapSharedTestProjectTagValue(t, "resourceManagerTags1", tagKey1), + "tagKey2": tagKey2, + "tagValue2": acctest.BootstrapSharedTestProjectTagValue(t, "resourceManagerTags2", tagKey2), + "random_suffix": acctest.RandString(t, 10), + } bootstrapGkeTagManagerServiceAgents(t) @@ -58,7 +74,7 @@ func TestAccContainerNodePool_resourceManagerTags(t *testing.T) { CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccContainerNodePool_resourceManagerTags(pid, clusterName, networkName, subnetworkName, randomSuffix), + Config: testAccContainerNodePool_resourceManagerTags(context), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttrSet("google_container_node_pool.primary_nodes", "node_config.0.resource_manager_tags.%"), ), @@ -70,7 +86,7 @@ func TestAccContainerNodePool_resourceManagerTags(t *testing.T) { ImportStateVerifyIgnore: []string{"min_master_version", "cluster"}, }, { - Config: testAccContainerNodePool_resourceManagerTagsUpdate1(pid, clusterName, networkName, subnetworkName, randomSuffix), + Config: testAccContainerNodePool_resourceManagerTagsUpdate1(context), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttrSet("google_container_node_pool.primary_nodes", "node_config.0.resource_manager_tags.%"), ), @@ -82,7 +98,7 @@ func TestAccContainerNodePool_resourceManagerTags(t *testing.T) { ImportStateVerifyIgnore: []string{"min_master_version", "cluster"}, }, { - Config: testAccContainerNodePool_resourceManagerTagsUpdate2(pid, clusterName, networkName, subnetworkName, randomSuffix), + Config: testAccContainerNodePool_resourceManagerTagsUpdate2(context), }, { ResourceName: "google_container_node_pool.primary_nodes", @@ -4999,44 +5015,10 @@ resource "google_container_node_pool" "without_confidential_boot_disk" { `, cluster, networkName, subnetworkName, np) } -func testAccContainerNodePool_resourceManagerTags(projectID, clusterName, networkName, subnetworkName, randomSuffix string) string { - return fmt.Sprintf(` +func testAccContainerNodePool_resourceManagerTags(context map[string]interface{}) string { + return acctest.Nprintf(` data "google_project" "project" { - project_id = "%[1]s" -} - -resource "google_tags_tag_key" "key1" { - parent = "projects/%[1]s" - short_name = "foobarbaz1-%[2]s" - description = "For foo/bar1 resources" - purpose = "GCE_FIREWALL" - purpose_data = { - network = "%[1]s/%[4]s" - } -} - -resource "google_tags_tag_value" "value1" { - parent = google_tags_tag_key.key1.id - short_name = "foo1-%[2]s" - description = "For foo1 resources" -} - -resource "google_tags_tag_key" "key2" { - parent = "projects/%[1]s" - short_name = "foobarbaz2-%[2]s" - description = "For foo/bar2 resources" - purpose = "GCE_FIREWALL" - purpose_data = { - network = "%[1]s/%[4]s" - } - - depends_on = [google_tags_tag_key.key1] -} - -resource "google_tags_tag_value" "value2" { - parent = google_tags_tag_key.key2.id - short_name = "foo2-%[2]s" - description = "For foo2 resources" + project_id = "%{pid}" } data "google_container_engine_versions" "uscentral1a" { @@ -5044,7 +5026,7 @@ data "google_container_engine_versions" "uscentral1a" { } resource "google_container_cluster" "primary" { - name = "%[3]s" + name = "tf-test-cluster-%{random_suffix}" location = "us-central1-a" min_master_version = data.google_container_engine_versions.uscentral1a.release_channel_latest_version["STABLE"] @@ -5055,8 +5037,8 @@ resource "google_container_cluster" "primary" { initial_node_count = 1 deletion_protection = false - network = "%[4]s" - subnetwork = "%[5]s" + network = "%{network}" + subnetwork = "%{subnet}" timeouts { create = "30m" @@ -5078,51 +5060,17 @@ resource "google_container_node_pool" "primary_nodes" { disk_size_gb = 15 resource_manager_tags = { - (google_tags_tag_key.key1.id) = google_tags_tag_value.value1.id + "%{pid}/%{tagKey1}" = "%{tagValue1}" } } } -`, projectID, randomSuffix, clusterName, networkName, subnetworkName) +`, context) } -func testAccContainerNodePool_resourceManagerTagsUpdate1(projectID, clusterName, networkName, subnetworkName, randomSuffix string) string { - return fmt.Sprintf(` +func testAccContainerNodePool_resourceManagerTagsUpdate1(context map[string]interface{}) string { + return acctest.Nprintf(` data "google_project" "project" { - project_id = "%[1]s" -} - -resource "google_tags_tag_key" "key1" { - parent = "projects/%[1]s" - short_name = "foobarbaz1-%[2]s" - description = "For foo/bar1 resources" - purpose = "GCE_FIREWALL" - purpose_data = { - network = "%[1]s/%[4]s" - } -} - -resource "google_tags_tag_value" "value1" { - parent = google_tags_tag_key.key1.id - short_name = "foo1-%[2]s" - description = "For foo1 resources" -} - -resource "google_tags_tag_key" "key2" { - parent = "projects/%[1]s" - short_name = "foobarbaz2-%[2]s" - description = "For foo/bar2 resources" - purpose = "GCE_FIREWALL" - purpose_data = { - network = "%[1]s/%[4]s" - } - - depends_on = [google_tags_tag_key.key1] -} - -resource "google_tags_tag_value" "value2" { - parent = google_tags_tag_key.key2.id - short_name = "foo2-%[2]s" - description = "For foo2 resources" + project_id = "%{pid}" } data "google_container_engine_versions" "uscentral1a" { @@ -5130,7 +5078,7 @@ data "google_container_engine_versions" "uscentral1a" { } resource "google_container_cluster" "primary" { - name = "%[3]s" + name = "tf-test-cluster-%{random_suffix}" location = "us-central1-a" min_master_version = data.google_container_engine_versions.uscentral1a.release_channel_latest_version["STABLE"] @@ -5141,8 +5089,8 @@ resource "google_container_cluster" "primary" { initial_node_count = 1 deletion_protection = false - network = "%[4]s" - subnetwork = "%[5]s" + network = "%{network}" + subnetwork = "%{subnet}" timeouts { create = "30m" @@ -5164,52 +5112,18 @@ resource "google_container_node_pool" "primary_nodes" { disk_size_gb = 15 resource_manager_tags = { - (google_tags_tag_key.key1.id) = google_tags_tag_value.value1.id - (google_tags_tag_key.key2.id) = google_tags_tag_value.value2.id + "%{pid}/%{tagKey1}" = "%{tagValue1}" + "%{pid}/%{tagKey2}" = "%{tagValue2}" } } } -`, projectID, randomSuffix, clusterName, networkName, subnetworkName) +`, context) } -func testAccContainerNodePool_resourceManagerTagsUpdate2(projectID, clusterName, networkName, subnetworkName, randomSuffix string) string { - return fmt.Sprintf(` +func testAccContainerNodePool_resourceManagerTagsUpdate2(context map[string]interface{}) string { + return acctest.Nprintf(` data "google_project" "project" { - project_id = "%[1]s" -} - -resource "google_tags_tag_key" "key1" { - parent = "projects/%[1]s" - short_name = "foobarbaz1-%[2]s" - description = "For foo/bar1 resources" - purpose = "GCE_FIREWALL" - purpose_data = { - network = "%[1]s/%[4]s" - } -} - -resource "google_tags_tag_value" "value1" { - parent = google_tags_tag_key.key1.id - short_name = "foo1-%[2]s" - description = "For foo1 resources" -} - -resource "google_tags_tag_key" "key2" { - parent = "projects/%[1]s" - short_name = "foobarbaz2-%[2]s" - description = "For foo/bar2 resources" - purpose = "GCE_FIREWALL" - purpose_data = { - network = "%[1]s/%[4]s" - } - - depends_on = [google_tags_tag_key.key1] -} - -resource "google_tags_tag_value" "value2" { - parent = google_tags_tag_key.key2.id - short_name = "foo2-%[2]s" - description = "For foo2 resources" + project_id = "%{pid}" } data "google_container_engine_versions" "uscentral1a" { @@ -5217,7 +5131,7 @@ data "google_container_engine_versions" "uscentral1a" { } resource "google_container_cluster" "primary" { - name = "%[3]s" + name = "tf-test-cluster-%{random_suffix}" location = "us-central1-a" min_master_version = data.google_container_engine_versions.uscentral1a.release_channel_latest_version["STABLE"] @@ -5228,8 +5142,8 @@ resource "google_container_cluster" "primary" { initial_node_count = 1 deletion_protection = false - network = "%[4]s" - subnetwork = "%[5]s" + network = "%{network}" + subnetwork = "%{subnet}" timeouts { create = "30m" @@ -5251,7 +5165,7 @@ resource "google_container_node_pool" "primary_nodes" { disk_size_gb = 15 } } -`, projectID, randomSuffix, clusterName, networkName, subnetworkName) +`, context) } func TestAccContainerNodePool_privateRegistry(t *testing.T) { diff --git a/mmv1/third_party/terraform/services/datafusion/resource_data_fusion_instance_test.go b/mmv1/third_party/terraform/services/datafusion/resource_data_fusion_instance_test.go index 503b7c96a820..cf9bd08c117b 100644 --- a/mmv1/third_party/terraform/services/datafusion/resource_data_fusion_instance_test.go +++ b/mmv1/third_party/terraform/services/datafusion/resource_data_fusion_instance_test.go @@ -157,11 +157,11 @@ resource "google_data_fusion_instance" "foobar" { func TestAccDatafusionInstance_tags(t *testing.T) { t.Parallel() - tagKey := acctest.BootstrapSharedTestTagKey(t, "datafusion-instances-tagkey") + tagKey := acctest.BootstrapSharedTestOrganizationTagKey(t, "datafusion-instances-tagkey", nil) context := map[string]interface{}{ "org": envvar.GetTestOrgFromEnv(t), "tagKey": tagKey, - "tagValue": acctest.BootstrapSharedTestTagValue(t, "datafusion-instances-tagvalue", tagKey), + "tagValue": acctest.BootstrapSharedTestOrganizationTagValue(t, "datafusion-instances-tagvalue", tagKey), "random_suffix": acctest.RandString(t, 10), } diff --git a/mmv1/third_party/terraform/services/filestore/resource_filestore_backup_test.go b/mmv1/third_party/terraform/services/filestore/resource_filestore_backup_test.go index abe465107b2c..b01cd9705ecc 100644 --- a/mmv1/third_party/terraform/services/filestore/resource_filestore_backup_test.go +++ b/mmv1/third_party/terraform/services/filestore/resource_filestore_backup_test.go @@ -118,11 +118,11 @@ resource "google_filestore_backup" "backup" { func TestAccFilestoreBackup_tags(t *testing.T) { t.Parallel() - tagKey := acctest.BootstrapSharedTestTagKey(t, "filestore-backups-tagkey") + tagKey := acctest.BootstrapSharedTestOrganizationTagKey(t, "filestore-backups-tagkey", nil) context := map[string]interface{}{ "org": envvar.GetTestOrgFromEnv(t), "tagKey": tagKey, - "tagValue": acctest.BootstrapSharedTestTagValue(t, "filestore-backups-tagvalue", tagKey), + "tagValue": acctest.BootstrapSharedTestOrganizationTagValue(t, "filestore-backups-tagvalue", tagKey), "random_suffix": acctest.RandString(t, 10), } diff --git a/mmv1/third_party/terraform/services/filestore/resource_filestore_instance_test.go.tmpl b/mmv1/third_party/terraform/services/filestore/resource_filestore_instance_test.go.tmpl index 146fa0df13c6..d490ec4c2fcb 100644 --- a/mmv1/third_party/terraform/services/filestore/resource_filestore_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/filestore/resource_filestore_instance_test.go.tmpl @@ -409,11 +409,11 @@ resource "google_filestore_instance" "instance" { func TestAccFilestoreInstance_tags(t *testing.T) { t.Parallel() - tagKey := acctest.BootstrapSharedTestTagKey(t, "filestore-instances-tagkey") + tagKey := acctest.BootstrapSharedTestOrganizationTagKey(t, "filestore-instances-tagkey", nil) context := map[string]interface{}{ "org": envvar.GetTestOrgFromEnv(t), "tagKey": tagKey, - "tagValue": acctest.BootstrapSharedTestTagValue(t, "filestore-instances-tagvalue", tagKey), + "tagValue": acctest.BootstrapSharedTestOrganizationTagValue(t, "filestore-instances-tagvalue", tagKey), "random_suffix": acctest.RandString(t, 10), } diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder_test.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder_test.go index d87e0d8034c1..ce89b683ef14 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder_test.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder_test.go @@ -93,11 +93,11 @@ func TestAccFolder_moveParent(t *testing.T) { func TestAccFolder_tags(t *testing.T) { t.Parallel() - tagKey := acctest.BootstrapSharedTestTagKey(t, "crm-folder-tagkey") + tagKey := acctest.BootstrapSharedTestOrganizationTagKey(t, "crm-folder-tagkey", nil) context := map[string]interface{}{ "org": envvar.GetTestOrgFromEnv(t), "tagKey": tagKey, - "tagValue": acctest.BootstrapSharedTestTagValue(t, "crm-folder-tagvalue", tagKey), + "tagValue": acctest.BootstrapSharedTestOrganizationTagValue(t, "crm-folder-tagvalue", tagKey), "random_suffix": acctest.RandString(t, 10), } diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_test.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_test.go index f64fb0c36fa6..2c73b9988f42 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_test.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_test.go @@ -245,12 +245,12 @@ func TestAccProject_tags(t *testing.T) { t.Parallel() pid := fmt.Sprintf("%s-%d", TestPrefix, acctest.RandInt(t)) - tagKey := acctest.BootstrapSharedTestTagKey(t, "crm-projects-tagkey") + tagKey := acctest.BootstrapSharedTestOrganizationTagKey(t, "crm-projects-tagkey", nil) context := map[string]interface{}{ "pid": pid, "org": envvar.GetTestOrgFromEnv(t), "tagKey": tagKey, - "tagValue": acctest.BootstrapSharedTestTagValue(t, "crm-projects-tagvalue", tagKey), + "tagValue": acctest.BootstrapSharedTestOrganizationTagValue(t, "crm-projects-tagvalue", tagKey), "random_suffix": acctest.RandString(t, 10), } acctest.VcrTest(t, resource.TestCase{ From 9afda4e79b027b292131a6dd88026e901924eab5 Mon Sep 17 00:00:00 2001 From: Mauricio Alvarez Leon <65101411+BBBmau@users.noreply.github.com> Date: Wed, 21 May 2025 13:32:13 -0700 Subject: [PATCH 213/884] `resourceIdentity`: set `nightlyTestsEnabled` to `false` (#14055) --- .../feature_branches/FEATURE_BRANCH_resource_identity.kt | 1 + 1 file changed, 1 insertion(+) diff --git a/mmv1/third_party/terraform/.teamcity/components/projects/feature_branches/FEATURE_BRANCH_resource_identity.kt b/mmv1/third_party/terraform/.teamcity/components/projects/feature_branches/FEATURE_BRANCH_resource_identity.kt index ba81b22e0779..e2ff35042b96 100644 --- a/mmv1/third_party/terraform/.teamcity/components/projects/feature_branches/FEATURE_BRANCH_resource_identity.kt +++ b/mmv1/third_party/terraform/.teamcity/components/projects/feature_branches/FEATURE_BRANCH_resource_identity.kt @@ -32,6 +32,7 @@ fun featureBranchResourceIdentitySubProject(allConfig: AllContextParameters): Pr val trigger = NightlyTriggerConfiguration( branch = "refs/heads/$featureBranchResourceIdentity", // triggered builds must test the feature branch startHour = DefaultStartHour + 6, + nightlyTestsEnabled = false ) val vcrConfig = getVcrAcceptanceTestConfig(allConfig) // Reused below for both MM testing build configs val servicesToTest = arrayOf("secretmanager", "resourcemanager") From 0147bdaf57dfbb27213914099a3e9f50c68a4c4c Mon Sep 17 00:00:00 2001 From: Thomas Rodgers Date: Wed, 21 May 2025 13:56:30 -0700 Subject: [PATCH 214/884] Combine all post replay templates into one (#13958) --- .../templates/vcr/non_exercised_tests.tmpl | 13 - .../cmd/templates/vcr/post_replay.tmpl | 55 ++++ .../cmd/templates/vcr/test_analytics.tmpl | 20 -- .../vcr/with_replay_failed_tests.tmpl | 12 - .../vcr/without_replay_failed_tests.tmpl | 7 - .ci/magician/cmd/test_eap_vcr.go | 40 +-- .ci/magician/cmd/test_terraform_vcr.go | 116 ++------ .ci/magician/cmd/test_terraform_vcr_test.go | 259 ++++++++---------- 8 files changed, 213 insertions(+), 309 deletions(-) delete mode 100644 .ci/magician/cmd/templates/vcr/non_exercised_tests.tmpl create mode 100644 .ci/magician/cmd/templates/vcr/post_replay.tmpl delete mode 100644 .ci/magician/cmd/templates/vcr/test_analytics.tmpl delete mode 100644 .ci/magician/cmd/templates/vcr/with_replay_failed_tests.tmpl delete mode 100644 .ci/magician/cmd/templates/vcr/without_replay_failed_tests.tmpl diff --git a/.ci/magician/cmd/templates/vcr/non_exercised_tests.tmpl b/.ci/magician/cmd/templates/vcr/non_exercised_tests.tmpl deleted file mode 100644 index 9efae64310a5..000000000000 --- a/.ci/magician/cmd/templates/vcr/non_exercised_tests.tmpl +++ /dev/null @@ -1,13 +0,0 @@ -{{- if or (gt (len .NotRunBetaTests) 0) (gt (len .NotRunGATests) 0)}} -#### Non-exercised tests - -{{if gt (len .NotRunBetaTests) 0 -}} -{{color "red" "Tests were added that are skipped in VCR:"}} -{{range .NotRunBetaTests}}{{. | printf "- %s\n"}}{{end}} -{{end}} - -{{if gt (len .NotRunGATests) 0 -}} -{{color "red" "Tests were added that are GA-only additions and require manual runs:"}} -{{range .NotRunGATests}}{{. | printf "- %s\n"}}{{end}} -{{end}} -{{end}} diff --git a/.ci/magician/cmd/templates/vcr/post_replay.tmpl b/.ci/magician/cmd/templates/vcr/post_replay.tmpl new file mode 100644 index 000000000000..e89973cca41e --- /dev/null +++ b/.ci/magician/cmd/templates/vcr/post_replay.tmpl @@ -0,0 +1,55 @@ +{{- if or (gt (len .NotRunBetaTests) 0) (gt (len .NotRunGATests) 0)}} +#### Non-exercised tests + +{{if gt (len .NotRunBetaTests) 0 -}} +{{color "red" "Tests were added that are skipped in VCR:"}} +{{range .NotRunBetaTests}}{{. | printf "- %s\n"}}{{end}} +{{end}} + +{{if gt (len .NotRunGATests) 0 -}} +{{color "red" "Tests were added that are GA-only additions and require manual runs:"}} +{{range .NotRunGATests}}{{. | printf "- %s\n"}}{{end}} +{{end}} +{{end}} +#### Tests analytics +Total tests: {{add (add (len .ReplayingResult.PassedTests) (len .ReplayingResult.SkippedTests)) (len .ReplayingResult.FailedTests) }} +Passed tests: {{len .ReplayingResult.PassedTests}} +Skipped tests: {{len .ReplayingResult.SkippedTests}} +Affected tests: {{len .ReplayingResult.FailedTests}} + +
+Click here to see the affected service packages +
+{{if .RunFullVCR}} +All service packages are affected +{{else if gt (len .AffectedServices) 0}} +
    +{{range .AffectedServices}}{{. | printf "
  • %s
  • \n"}}{{end}} +
+{{else}} +None +{{end}} +
+
+{{ if gt (len .ReplayingResult.FailedTests) 0 -}} +#### Action taken +
+Found {{len .ReplayingResult.FailedTests}} affected test(s) by replaying old test recordings. Starting RECORDING based on the most recent commit. Click here to see the affected tests + +
+
    +{{range .ReplayingResult.FailedTests}}{{. | printf "
  • %s
  • \n"}}{{end}} +
+
+
+ +[Get to know how VCR tests work](https://googlecloudplatform.github.io/magic-modules/develop/test/test/) +{{ else -}} +{{- if .ReplayingErr -}} +{{color "red" "Errors occurred during REPLAYING mode. Please fix them to complete your PR."}} +{{- else -}} +{{color "green" "All tests passed!"}} +{{- end}} + +View the [build log](https://storage.cloud.google.com/{{.LogBucket}}/{{.Version}}/refs/heads/{{.Head}}/artifacts/{{.BuildID}}/build-log/replaying_test.log) +{{- end}} \ No newline at end of file diff --git a/.ci/magician/cmd/templates/vcr/test_analytics.tmpl b/.ci/magician/cmd/templates/vcr/test_analytics.tmpl deleted file mode 100644 index 9c4006541946..000000000000 --- a/.ci/magician/cmd/templates/vcr/test_analytics.tmpl +++ /dev/null @@ -1,20 +0,0 @@ -#### Tests analytics -Total tests: {{add (add (len .ReplayingResult.PassedTests) (len .ReplayingResult.SkippedTests)) (len .ReplayingResult.FailedTests) }} -Passed tests: {{len .ReplayingResult.PassedTests}} -Skipped tests: {{len .ReplayingResult.SkippedTests}} -Affected tests: {{len .ReplayingResult.FailedTests}} - -
-Click here to see the affected service packages -
-{{if .RunFullVCR}} -All service packages are affected -{{else if gt (len .AffectedServices) 0}} -
    -{{range .AffectedServices}}{{. | printf "
  • %s
  • \n"}}{{end}} -
-{{else}} -None -{{end}} -
-
diff --git a/.ci/magician/cmd/templates/vcr/with_replay_failed_tests.tmpl b/.ci/magician/cmd/templates/vcr/with_replay_failed_tests.tmpl deleted file mode 100644 index d6d14bb4175f..000000000000 --- a/.ci/magician/cmd/templates/vcr/with_replay_failed_tests.tmpl +++ /dev/null @@ -1,12 +0,0 @@ -#### Action taken -
-Found {{len .ReplayingResult.FailedTests}} affected test(s) by replaying old test recordings. Starting RECORDING based on the most recent commit. Click here to see the affected tests - -
-
    -{{range .ReplayingResult.FailedTests}}{{. | printf "
  • %s
  • \n"}}{{end}} -
-
-
- -[Get to know how VCR tests work](https://googlecloudplatform.github.io/magic-modules/develop/test/test/) diff --git a/.ci/magician/cmd/templates/vcr/without_replay_failed_tests.tmpl b/.ci/magician/cmd/templates/vcr/without_replay_failed_tests.tmpl deleted file mode 100644 index 256d1652fc4f..000000000000 --- a/.ci/magician/cmd/templates/vcr/without_replay_failed_tests.tmpl +++ /dev/null @@ -1,7 +0,0 @@ -{{- if .ReplayingErr -}} -{{color "red" "Errors occurred during REPLAYING mode. Please fix them to complete your PR."}} -{{- else -}} -{{color "green" "All tests passed!"}} -{{- end}} - -View the [build log](https://storage.cloud.google.com/{{.LogBucket}}/{{.Version}}/refs/heads/{{.Head}}/artifacts/{{.BuildID}}/build-log/replaying_test.log) diff --git a/.ci/magician/cmd/test_eap_vcr.go b/.ci/magician/cmd/test_eap_vcr.go index 867c1a7ac800..7534814914c7 100644 --- a/.ci/magician/cmd/test_eap_vcr.go +++ b/.ci/magician/cmd/test_eap_vcr.go @@ -141,29 +141,23 @@ func execTestEAPVCR(changeNumber, genPath, kokoroArtifactsDir, modifiedFilePath for s := range services { servicesArr = append(servicesArr, s) } - analyticsData := analytics{ - ReplayingResult: replayingResult, + postReplayData := postReplay{ RunFullVCR: runFullVCR, AffectedServices: sort.StringSlice(servicesArr), + ReplayingResult: replayingResult, + ReplayingErr: replayingErr, + LogBucket: "ci-vcr-logs", + Version: provider.Private.String(), + Head: head, } - testsAnalyticsComment, err := formatTestsAnalytics(analyticsData) + comment, err := formatPostReplay(postReplayData) if err != nil { - return fmt.Errorf("error formatting test_analytics comment: %w", err) + return fmt.Errorf("error formatting post replay comment: %w", err) + } + if err := postGerritComment(kokoroArtifactsDir, modifiedFilePath, comment, rnr); err != nil { + return fmt.Errorf("error posting comment: %w", err) } if len(replayingResult.FailedTests) > 0 { - withReplayFailedTestsData := withReplayFailedTests{ - ReplayingResult: replayingResult, - } - - withReplayFailedTestsComment, err := formatWithReplayFailedTests(withReplayFailedTestsData) - if err != nil { - return fmt.Errorf("error formatting action taken comment: %w", err) - } - comment := strings.Join([]string{testsAnalyticsComment, withReplayFailedTestsComment}, "\n") - if err := postGerritComment(kokoroArtifactsDir, modifiedFilePath, comment, rnr); err != nil { - return fmt.Errorf("error posting comment: %w", err) - } - recordingResult, recordingErr := vt.RunParallel(vcr.RunOptions{ Mode: vcr.Recording, Version: provider.Private, @@ -222,18 +216,6 @@ func execTestEAPVCR(changeNumber, genPath, kokoroArtifactsDir, modifiedFilePath if err := postGerritComment(kokoroArtifactsDir, modifiedFilePath, recordReplayComment, rnr); err != nil { return fmt.Errorf("error posting comment: %w", err) } - } else { // len(replayingResult.FailedTests) == 0 - withoutReplayFailedTestsData := withoutReplayFailedTests{ - ReplayingErr: replayingErr, - } - withoutReplayFailedTestsComment, err := formatWithoutReplayFailedTests(withoutReplayFailedTestsData) - if err != nil { - return fmt.Errorf("error formatting action taken comment: %w", err) - } - comment := strings.Join([]string{testsAnalyticsComment, withoutReplayFailedTestsComment}, "\n") - if err := postGerritComment(kokoroArtifactsDir, modifiedFilePath, comment, rnr); err != nil { - return fmt.Errorf("error posting comment: %w", err) - } } return nil } diff --git a/.ci/magician/cmd/test_terraform_vcr.go b/.ci/magician/cmd/test_terraform_vcr.go index 3dd30d9a3d1d..4c12b8b5755f 100644 --- a/.ci/magician/cmd/test_terraform_vcr.go +++ b/.ci/magician/cmd/test_terraform_vcr.go @@ -21,14 +21,8 @@ import ( ) var ( - //go:embed templates/vcr/test_analytics.tmpl - testsAnalyticsTmplText string - //go:embed templates/vcr/non_exercised_tests.tmpl - nonExercisedTestsTmplText string - //go:embed templates/vcr/with_replay_failed_tests.tmpl - withReplayFailedTestsTmplText string - //go:embed templates/vcr/without_replay_failed_tests.tmpl - withoutReplayFailedTestsTmplText string + //go:embed templates/vcr/post_replay.tmpl + postReplayTmplText string //go:embed templates/vcr/record_replay.tmpl recordReplayTmplText string ) @@ -60,27 +54,17 @@ var ttvOptionalEnvironmentVariables = [...]string{ "GOOGLE_VMWAREENGINE_PROJECT", } -type analytics struct { - ReplayingResult vcr.Result +type postReplay struct { RunFullVCR bool AffectedServices []string -} - -type nonExercisedTests struct { - NotRunBetaTests []string - NotRunGATests []string -} - -type withReplayFailedTests struct { - ReplayingResult vcr.Result -} - -type withoutReplayFailedTests struct { - ReplayingErr error - LogBucket string - Version string - Head string - BuildID string + NotRunBetaTests []string + NotRunGATests []string + ReplayingResult vcr.Result + ReplayingErr error + LogBucket string + Version string + Head string + BuildID string } type recordReplay struct { @@ -249,41 +233,29 @@ func execTestTerraformVCR(prNumber, mmCommitSha, buildID, projectID, buildStep, for s := range services { servicesArr = append(servicesArr, s) } - analyticsData := analytics{ - ReplayingResult: replayingResult, + + notRunBeta, notRunGa := notRunTests(tpgRepo.UnifiedZeroDiff, tpgbRepo.UnifiedZeroDiff, replayingResult) + postReplayData := postReplay{ RunFullVCR: runFullVCR, AffectedServices: sort.StringSlice(servicesArr), + NotRunBetaTests: notRunBeta, + NotRunGATests: notRunGa, + ReplayingResult: replayingResult, + ReplayingErr: replayingErr, + LogBucket: "ci-vcr-logs", + Version: provider.Beta.String(), + Head: newBranch, + BuildID: buildID, } - testsAnalyticsComment, err := formatTestsAnalytics(analyticsData) - if err != nil { - return fmt.Errorf("error formatting test_analytics comment: %w", err) - } - - notRunBeta, notRunGa := notRunTests(tpgRepo.UnifiedZeroDiff, tpgbRepo.UnifiedZeroDiff, replayingResult) - nonExercisedTestsData := nonExercisedTests{ - NotRunBetaTests: notRunBeta, - NotRunGATests: notRunGa, - } - nonExercisedTestsComment, err := formatNonExercisedTests(nonExercisedTestsData) + comment, err := formatPostReplay(postReplayData) if err != nil { - return fmt.Errorf("error formatting non exercised tests comment: %w", err) + return fmt.Errorf("error formatting post replay comment: %w", err) + } + if err := gh.PostComment(prNumber, comment); err != nil { + return fmt.Errorf("error posting comment: %w", err) } - if len(replayingResult.FailedTests) > 0 { - withReplayFailedTestsData := withReplayFailedTests{ - ReplayingResult: replayingResult, - } - withReplayFailedTestsComment, err := formatWithReplayFailedTests(withReplayFailedTestsData) - if err != nil { - return fmt.Errorf("error formatting action taken comment: %w", err) - } - - comment := strings.Join([]string{testsAnalyticsComment, nonExercisedTestsComment, withReplayFailedTestsComment}, "\n") - if err := gh.PostComment(prNumber, comment); err != nil { - return fmt.Errorf("error posting comment: %w", err) - } - recordingResult, recordingErr := vt.RunParallel(vcr.RunOptions{ Mode: vcr.Recording, Version: provider.Beta, @@ -363,24 +335,6 @@ func execTestTerraformVCR(prNumber, mmCommitSha, buildID, projectID, buildStep, if err := gh.PostComment(prNumber, recordReplayComment); err != nil { return fmt.Errorf("error posting comment: %w", err) } - - } else { // len(replayingResult.FailedTests) == 0 - withoutReplayFailedTestsData := withoutReplayFailedTests{ - ReplayingErr: replayingErr, - LogBucket: "ci-vcr-logs", - Version: provider.Beta.String(), - Head: newBranch, - BuildID: buildID, - } - withoutReplayFailedTestsComment, err := formatWithoutReplayFailedTests(withoutReplayFailedTestsData) - if err != nil { - return fmt.Errorf("error formatting action taken comment: %w", err) - } - - comment := strings.Join([]string{testsAnalyticsComment, nonExercisedTestsComment, withoutReplayFailedTestsComment}, "\n") - if err := gh.PostComment(prNumber, comment); err != nil { - return fmt.Errorf("error posting comment: %w", err) - } } if err := gh.PostBuildStatus(prNumber, "VCR-test", testState, buildStatusTargetURL, mmCommitSha); err != nil { @@ -530,20 +484,8 @@ func formatComment(fileName string, tmplText string, data any) (string, error) { return strings.TrimSpace(sb.String()), nil } -func formatTestsAnalytics(data analytics) (string, error) { - return formatComment("test_analytics.tmpl", testsAnalyticsTmplText, data) -} - -func formatNonExercisedTests(data nonExercisedTests) (string, error) { - return formatComment("non_exercised_tests.tmpl", nonExercisedTestsTmplText, data) -} - -func formatWithReplayFailedTests(data withReplayFailedTests) (string, error) { - return formatComment("with_replay_failed_tests.tmpl", withReplayFailedTestsTmplText, data) -} - -func formatWithoutReplayFailedTests(data withoutReplayFailedTests) (string, error) { - return formatComment("without_replay_failed_tests.tmpl", withoutReplayFailedTestsTmplText, data) +func formatPostReplay(data postReplay) (string, error) { + return formatComment("post_replay.tmpl", postReplayTmplText, data) } func formatRecordReplay(data recordReplay) (string, error) { diff --git a/.ci/magician/cmd/test_terraform_vcr_test.go b/.ci/magician/cmd/test_terraform_vcr_test.go index cf7e4b019327..835d9d302d32 100644 --- a/.ci/magician/cmd/test_terraform_vcr_test.go +++ b/.ci/magician/cmd/test_terraform_vcr_test.go @@ -6,7 +6,6 @@ import ( "strings" "testing" - "github.com/google/go-cmp/cmp" "github.com/stretchr/testify/assert" "magician/provider" @@ -220,13 +219,13 @@ func TestNotRunTests(t *testing.T) { func TestAnalyticsComment(t *testing.T) { tests := []struct { - name string - data analytics - want string + name string + data postReplay + wantContains []string }{ { name: "run full vcr is false and no affected services", - data: analytics{ + data: postReplay{ ReplayingResult: vcr.Result{ PassedTests: []string{"a", "b", "c"}, SkippedTests: []string{"d", "e"}, @@ -235,29 +234,26 @@ func TestAnalyticsComment(t *testing.T) { RunFullVCR: false, AffectedServices: []string{}, }, - want: strings.Join( - []string{ - "#### Tests analytics", - "Total tests: 6", - "Passed tests: 3", - "Skipped tests: 2", - "Affected tests: 1", - "", - "
", - "Click here to see the affected service packages", - "
", - "", - "None", - "", - "
", - "
", - }, - "\n", - ), + wantContains: []string{ + "#### Tests analytics", + "Total tests: 6", + "Passed tests: 3", + "Skipped tests: 2", + "Affected tests: 1", + "", + "
", + "Click here to see the affected service packages", + "
", + "", + "None", + "", + "
", + "
", + }, }, { name: "run full vcr is false and has affected services", - data: analytics{ + data: postReplay{ ReplayingResult: vcr.Result{ PassedTests: []string{"a", "b", "c"}, SkippedTests: []string{"d", "e"}, @@ -266,33 +262,30 @@ func TestAnalyticsComment(t *testing.T) { RunFullVCR: false, AffectedServices: []string{"svc-a", "svc-b"}, }, - want: strings.Join( - []string{ - "#### Tests analytics", - "Total tests: 6", - "Passed tests: 3", - "Skipped tests: 2", - "Affected tests: 1", - "", - "
", - "Click here to see the affected service packages", - "
", - "", - "
    ", - "
  • svc-a
  • ", - "
  • svc-b
  • ", - "", - "
", - "", - "
", - "
", - }, - "\n", - ), + wantContains: []string{ + "#### Tests analytics", + "Total tests: 6", + "Passed tests: 3", + "Skipped tests: 2", + "Affected tests: 1", + "", + "
", + "Click here to see the affected service packages", + "
", + "", + "
    ", + "
  • svc-a
  • ", + "
  • svc-b
  • ", + "", + "
", + "", + "
", + "
", + }, }, { name: "run full vcr is true", - data: analytics{ + data: postReplay{ ReplayingResult: vcr.Result{ PassedTests: []string{"a", "b", "c"}, SkippedTests: []string{"d", "e"}, @@ -301,7 +294,7 @@ func TestAnalyticsComment(t *testing.T) { RunFullVCR: true, AffectedServices: []string{}, }, - want: strings.Join([]string{ + wantContains: []string{ "#### Tests analytics", "Total tests: 6", "Passed tests: 3", @@ -317,18 +310,18 @@ func TestAnalyticsComment(t *testing.T) { "", "", }, - "\n", - ), }, } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { - got, err := formatTestsAnalytics(tc.data) + got, err := formatPostReplay(tc.data) if err != nil { t.Fatalf("Failed to format comment: %v", err) } - if diff := cmp.Diff(tc.want, got); diff != "" { - t.Errorf("formatTestsAnalytics() returned unexpected difference (-want +got):\n%s", diff) + for _, wc := range tc.wantContains { + if !strings.Contains(got, wc) { + t.Errorf("formatPostReplay() returned %q, which does not contain %q", got, wc) + } } }) } @@ -336,84 +329,69 @@ func TestAnalyticsComment(t *testing.T) { func TestNonExercisedTestsComment(t *testing.T) { tests := []struct { - name string - data nonExercisedTests - want string + name string + data postReplay + wantContains []string }{ - { - name: "without non exercised tests", - data: nonExercisedTests{}, - want: strings.Join( - []string{}, - "\n", - ), - }, { name: "with not run beta tests", - data: nonExercisedTests{ + data: postReplay{ NotRunBetaTests: []string{"beta-1", "beta-2"}, }, - want: strings.Join( - []string{ - "#### Non-exercised tests", - "", - color("red", "Tests were added that are skipped in VCR:"), - "- beta-1", - "- beta-2", - }, - "\n", - ), + wantContains: []string{ + "#### Non-exercised tests", + "", + color("red", "Tests were added that are skipped in VCR:"), + "- beta-1", + "- beta-2", + }, }, { name: "with not run ga tests", - data: nonExercisedTests{ + data: postReplay{ NotRunGATests: []string{"ga-1", "ga-2"}, }, - want: strings.Join( - []string{ - "#### Non-exercised tests", - "", - "", - "", - color("red", "Tests were added that are GA-only additions and require manual runs:"), - "- ga-1", - "- ga-2", - }, - "\n", - ), + wantContains: []string{ + "#### Non-exercised tests", + "", + "", + "", + color("red", "Tests were added that are GA-only additions and require manual runs:"), + "- ga-1", + "- ga-2", + }, }, { name: "with not run ga tests and not run beta tests", - data: nonExercisedTests{ + data: postReplay{ NotRunGATests: []string{"ga-1", "ga-2"}, NotRunBetaTests: []string{"beta-1", "beta-2"}, }, - want: strings.Join( - []string{ - "#### Non-exercised tests", - "", - color("red", "Tests were added that are skipped in VCR:"), - "- beta-1", - "- beta-2", - "", - "", - "", - color("red", "Tests were added that are GA-only additions and require manual runs:"), - "- ga-1", - "- ga-2", - }, - "\n", - ), + wantContains: []string{ + "#### Non-exercised tests", + "", + color("red", "Tests were added that are skipped in VCR:"), + "- beta-1", + "- beta-2", + "", + "", + "", + color("red", "Tests were added that are GA-only additions and require manual runs:"), + "- ga-1", + "- ga-2", + }, }, } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { - got, err := formatNonExercisedTests(tc.data) + got, err := formatPostReplay(tc.data) if err != nil { t.Fatalf("Failed to format comment: %v", err) } - if diff := cmp.Diff(tc.want, got); diff != "" { - t.Errorf("formatNonExercisedTests() returned unexpected difference (-want +got):\n%s", diff) + for _, wc := range tc.wantContains { + if !strings.Contains(got, wc) { + t.Errorf("formatPostReplay() returned %q, which does not contain %q", got, wc) + } } }) } @@ -421,46 +399,45 @@ func TestNonExercisedTestsComment(t *testing.T) { func TestWithReplayFailedTests(t *testing.T) { tests := []struct { - name string - data withReplayFailedTests - want string + name string + data postReplay + wantContains []string }{ { name: "with failed tests", - data: withReplayFailedTests{ + data: postReplay{ ReplayingResult: vcr.Result{ FailedTests: []string{"a", "b"}, }, }, - want: strings.Join( - []string{ - "#### Action taken", - "
", - "Found 2 affected test(s) by replaying old test recordings. Starting RECORDING based on the most recent commit. Click here to see the affected tests", - "", - "
", - "
    ", - "
  • a
  • ", - "
  • b
  • ", - "", // Empty line - "
", - "
", - "
", - "", - "[Get to know how VCR tests work](https://googlecloudplatform.github.io/magic-modules/develop/test/test/)", - }, - "\n", - ), + wantContains: []string{ + "#### Action taken", + "
", + "Found 2 affected test(s) by replaying old test recordings. Starting RECORDING based on the most recent commit. Click here to see the affected tests", + "", + "
", + "
    ", + "
  • a
  • ", + "
  • b
  • ", + "", // Empty line + "
", + "
", + "
", + "", + "[Get to know how VCR tests work](https://googlecloudplatform.github.io/magic-modules/develop/test/test/)", + }, }, } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { - got, err := formatWithReplayFailedTests(tc.data) + got, err := formatPostReplay(tc.data) if err != nil { t.Fatalf("Failed to format comment: %v", err) } - if diff := cmp.Diff(tc.want, got); diff != "" { - t.Errorf("formatWithReplayFailedTests() returned unexpected difference (-want +got):\n%s", diff) + for _, wc := range tc.wantContains { + if !strings.Contains(got, wc) { + t.Errorf("formatPostReplay() returned %q, which does not contain %q", got, wc) + } } }) } @@ -469,12 +446,12 @@ func TestWithReplayFailedTests(t *testing.T) { func TestWithoutReplayFailedTests(t *testing.T) { tests := []struct { name string - data withoutReplayFailedTests + data postReplay wantContains []string }{ { name: "with replay error", - data: withoutReplayFailedTests{ + data: postReplay{ ReplayingErr: fmt.Errorf("some error"), BuildID: "build-123", Head: "auto-pr-123", @@ -488,7 +465,7 @@ func TestWithoutReplayFailedTests(t *testing.T) { }, { name: "without replay error", - data: withoutReplayFailedTests{ + data: postReplay{ BuildID: "build-123", Head: "auto-pr-123", LogBucket: "ci-vcr-logs", @@ -502,13 +479,13 @@ func TestWithoutReplayFailedTests(t *testing.T) { } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { - got, err := formatWithoutReplayFailedTests(tc.data) + got, err := formatPostReplay(tc.data) if err != nil { t.Fatalf("Failed to format comment: %v", err) } for _, wc := range tc.wantContains { if !strings.Contains(got, wc) { - t.Errorf("formatWithoutReplayFailedTests() returned %q, which does not contain %q", got, wc) + t.Errorf("formatPostReplay() returned %q, which does not contain %q", got, wc) } } }) From 69af4c56adef760564406a1360600cc6b9b00631 Mon Sep 17 00:00:00 2001 From: kigesui Date: Wed, 21 May 2025 14:05:04 -0700 Subject: [PATCH 215/884] Adding Apigee Security Monitoring Condition resource support (#13714) Co-authored-by: Meng Yang --- .../apigee/SecurityMonitoringCondition.yaml | 132 ++++++++++++++++++ ...igee_security_monitoring_condition.go.tmpl | 32 +++++ ...ecurity_monitoring_condition_basic.tf.tmpl | 53 +++++++ ...ty_monitoring_condition_basic_test.tf.tmpl | 89 ++++++++++++ 4 files changed, 306 insertions(+) create mode 100644 mmv1/products/apigee/SecurityMonitoringCondition.yaml create mode 100644 mmv1/templates/terraform/custom_import/apigee_security_monitoring_condition.go.tmpl create mode 100644 mmv1/templates/terraform/examples/apigee_security_monitoring_condition_basic.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/apigee_security_monitoring_condition_basic_test.tf.tmpl diff --git a/mmv1/products/apigee/SecurityMonitoringCondition.yaml b/mmv1/products/apigee/SecurityMonitoringCondition.yaml new file mode 100644 index 000000000000..fb4d5a459c6e --- /dev/null +++ b/mmv1/products/apigee/SecurityMonitoringCondition.yaml @@ -0,0 +1,132 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'SecurityMonitoringCondition' +description: | + Security monitoring condition for risk assessment version 2 in Apigee. +references: + guides: + 'Creating a security monitoring condition': 'https://cloud.google.com/apigee/docs/api-security/security-scores#monitoring-conditions' + api: 'https://cloud.google.com/apigee/docs/reference/apis/apigee/rest/v1/organizations.securityMonitoringConditions/create' +docs: +base_url: '{{org_id}}/securityMonitoringConditions' +self_link: '{{org_id}}/securityMonitoringConditions/{{condition_id}}' +create_url: '{{org_id}}/securityMonitoringConditions?security_monitoring_condition_id={{condition_id}}' +update_verb: 'PATCH' +delete_url: "{{org_id}}/securityMonitoringConditions/{{condition_id}}" +immutable: false +import_format: + - '{{org_id}}/securityMonitoringConditions/{{condition_id}}' + - '{{org_id}}/{{condition_id}}' +custom_code: + custom_import: "templates/terraform/custom_import/apigee_security_monitoring_condition.go.tmpl" +examples: + - name: 'apigee_security_monitoring_condition_basic' + vars: + environment_name: 'my-environment' + security_monitoring_condition_id: 'my-condition' + exclude_test: true + - name: 'apigee_security_monitoring_condition_basic_test' + primary_resource_id: 'security_monitoring_condition' + test_env_vars: + org_id: 'ORG_ID' + billing_account: 'BILLING_ACCT' + exclude_docs: true + skip_vcr: true + external_providers: ["time"] +parameters: + - name: 'orgId' + type: String + description: | + The Apigee Organization associated with the Apigee Security Monitoring Condition, + in the format `organizations/{{org_name}}`. + url_param_only: true + required: true + immutable: true + - name: 'conditionId' + type: String + description: | + Resource ID of the security monitoring condition. + immutable: true + url_param_only: true + required: true +properties: + - name: 'name' + type: String + description: | + Name of the security monitoring condition resource, + in the format `organizations/{{org_name}}/securityMonitoringConditions/{{condition_id}}`. + output: true + - name: 'profile' + type: String + description: | + ID of security profile of the security monitoring condition. + required: true + - name: 'scope' + type: String + description: | + ID of security profile of the security monitoring condition. + required: true + - name: 'includeAllResources' + type: NestedObject + exactly_one_of: + - include_all_resources + # TODO(hashicorp/terraform-provider-google#22581): add this block back + test once deployment is supported + # - include + properties: [] + send_empty_value: true + allow_empty_object: true + # TODO(hashicorp/terraform-provider-google#22581): add this block back + test once deployment is supported + # - name: 'include' + # type: NestedObject + # properties: + # - name: 'resources' + # type: Array + # required: true + # item_type: + # type: NestedObject + # properties: + # - name: 'type' + # type: Enum + # description: 'Type of this resource' + # enum_values: + # - 'API_PROXY' + # required: true + # - name: 'name' + # type: String + # description: 'Name of this resource' + # required: true + # exactly_one_of: + # - include_all_resources + # - include + - name: 'createTime' + type: String + description: | + The timestamp at which this profile was created. + output: true + - name: 'updateTime' + type: String + description: | + The timestamp at which this profile was most recently updated. + output: true + - name: 'totalMonitoredResources' + type: Integer + description: | + Total number of monitored resources within this condition. + output: true + - name: 'totalDeployedResources' + type: Integer + description: | + Total number of deployed resources within scope. + output: true diff --git a/mmv1/templates/terraform/custom_import/apigee_security_monitoring_condition.go.tmpl b/mmv1/templates/terraform/custom_import/apigee_security_monitoring_condition.go.tmpl new file mode 100644 index 000000000000..2e9861432527 --- /dev/null +++ b/mmv1/templates/terraform/custom_import/apigee_security_monitoring_condition.go.tmpl @@ -0,0 +1,32 @@ +config := meta.(*transport_tpg.Config) + +// current import_formats cannot import fields with forward slashes in their value +if err := tpgresource.ParseImportId([]string{"(?P.+)"}, d, config); err != nil { + return nil, err +} + +nameParts := strings.Split(d.Get("name").(string), "/") +if len(nameParts) == 4 { + // `organizations/{{"{{"}}org_name{{"}}"}}/securityMonitoringConditions/{{"{{"}}condition_id{{"}}"}}` + orgId := fmt.Sprintf("organizations/%s", nameParts[1]) + if err := d.Set("org_id", orgId); err != nil { + return nil, fmt.Errorf("Error setting org_id: %s", err) + } + if err := d.Set("condition_id", nameParts[3]); err != nil { + return nil, fmt.Errorf("Error setting condition_id: %s", err) + } +} else { + return nil, fmt.Errorf( + "Saw %s when the name is expected to have shape %s", + d.Get("name"), + "organizations/{{"{{"}}org_name{{"}}"}}/securityMonitoringConditions/{{"{{"}}name{{"}}"}}") +} + +// Replace import id for the resource id +id, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}org_id{{"}}"}}/securityMonitoringConditions/{{"{{"}}condition_id{{"}}"}}") +if err != nil { + return nil, fmt.Errorf("Error constructing id: %s", err) +} +d.SetId(id) + +return []*schema.ResourceData{d}, nil diff --git a/mmv1/templates/terraform/examples/apigee_security_monitoring_condition_basic.tf.tmpl b/mmv1/templates/terraform/examples/apigee_security_monitoring_condition_basic.tf.tmpl new file mode 100644 index 000000000000..4b45055c0ed1 --- /dev/null +++ b/mmv1/templates/terraform/examples/apigee_security_monitoring_condition_basic.tf.tmpl @@ -0,0 +1,53 @@ +data "google_client_config" "current" {} + +resource "google_compute_network" "apigee_network" { + name = "apigee-network" +} + +resource "google_compute_global_address" "apigee_range" { + name = "apigee-range" + purpose = "VPC_PEERING" + address_type = "INTERNAL" + prefix_length = 16 + network = google_compute_network.apigee_network.id +} + +resource "google_service_networking_connection" "apigee_vpc_connection" { + network = google_compute_network.apigee_network.id + service = "servicenetworking.googleapis.com" + reserved_peering_ranges = [google_compute_global_address.apigee_range.name] +} + +resource "google_apigee_organization" "apigee_org" { + analytics_region = "us-central1" + project_id = data.google_client_config.current.project + authorized_network = google_compute_network.apigee_network.id + depends_on = [google_service_networking_connection.apigee_vpc_connection] +} + +resource "google_apigee_environment" "env" { + name = "{{index $.Vars "environment_name"}}" + description = "Apigee Environment" + display_name = "environment-1" + org_id = google_apigee_organization.apigee_org.id +} + +resource "google_apigee_addons_config" "apigee_org_security_addons_config" { + org = google_apigee_organization.apigee_org.name + addons_config { + api_security_config { + enabled = true + } + } +} + +resource "google_apigee_security_monitoring_condition" "security_monitoring_condition" { + condition_id = "{{index $.Vars "security_monitoring_condition_id"}}" + org_id = google_apigee_organization.apigee_org.id + profile = "google-default" + scope = "{{index $.Vars "environment_name"}}" + include_all_resources {} + depends_on = [ + google_apigee_addons_config.apigee_org_security_addons_config + ] +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/apigee_security_monitoring_condition_basic_test.tf.tmpl b/mmv1/templates/terraform/examples/apigee_security_monitoring_condition_basic_test.tf.tmpl new file mode 100644 index 000000000000..cf2d2b86f208 --- /dev/null +++ b/mmv1/templates/terraform/examples/apigee_security_monitoring_condition_basic_test.tf.tmpl @@ -0,0 +1,89 @@ +resource "google_project" "project" { + project_id = "tf-test%{random_suffix}" + name = "tf-test%{random_suffix}" + org_id = "{{index $.TestEnvVars "org_id"}}" + billing_account = "{{index $.TestEnvVars "billing_account"}}" + deletion_policy = "DELETE" +} + +resource "time_sleep" "wait_60_seconds" { + create_duration = "60s" + depends_on = [google_project.project] +} + +resource "google_project_service" "apigee" { + project = google_project.project.project_id + service = "apigee.googleapis.com" + depends_on = [time_sleep.wait_60_seconds] +} + +resource "google_project_service" "servicenetworking" { + project = google_project.project.project_id + service = "servicenetworking.googleapis.com" + depends_on = [google_project_service.apigee] +} + +resource "google_project_service" "compute" { + project = google_project.project.project_id + service = "compute.googleapis.com" + depends_on = [google_project_service.servicenetworking] +} + +resource "google_compute_network" "apigee_network" { + name = "apigee-network" + project = google_project.project.project_id + depends_on = [google_project_service.compute] +} + +resource "google_compute_global_address" "apigee_range" { + name = "apigee-range" + purpose = "VPC_PEERING" + address_type = "INTERNAL" + prefix_length = 16 + network = google_compute_network.apigee_network.id + project = google_project.project.project_id +} + +resource "google_service_networking_connection" "apigee_vpc_connection" { + network = google_compute_network.apigee_network.id + service = "servicenetworking.googleapis.com" + reserved_peering_ranges = [google_compute_global_address.apigee_range.name] + depends_on = [google_project_service.servicenetworking] +} + +resource "google_apigee_organization" "apigee_org" { + analytics_region = "us-central1" + project_id = google_project.project.project_id + authorized_network = google_compute_network.apigee_network.id + depends_on = [ + google_service_networking_connection.apigee_vpc_connection, + google_project_service.apigee, + ] +} + +resource "google_apigee_environment" "apigee_environment" { + org_id = google_apigee_organization.apigee_org.id + name = "tf-test-env-%{random_suffix}" + description = "Apigee Environment" + display_name = "environment-1" +} + +resource "google_apigee_addons_config" "apigee_org_security_addons_config" { + org = google_apigee_organization.apigee_org.name + addons_config { + api_security_config { + enabled = true + } + } +} + +resource "google_apigee_security_monitoring_condition" "{{$.PrimaryResourceId}}" { + org_id = google_apigee_organization.apigee_org.id + condition_id = "tf-test-conditoin" + profile = "google-default" + scope = google_apigee_environment.apigee_environment.name + include_all_resources {} + depends_on = [ + google_apigee_addons_config.apigee_org_security_addons_config + ] +} From e53175d55ff3aa651d4a27e4398cb2e1a6e46a9d Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Wed, 21 May 2025 14:15:42 -0700 Subject: [PATCH 216/884] Suppress the diff for false value of force_attach field (#14057) --- .../services/compute/resource_compute_instance.go.tmpl | 4 ++++ .../compute/resource_compute_instance_test.go.tmpl | 10 ++++++++++ 2 files changed, 14 insertions(+) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl index 42364881a9f4..46ea496e1388 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl @@ -54,6 +54,10 @@ func IpCidrRangeDiffSuppress(k, old, new string, d *schema.ResourceData) bool { } func DisksForceAttachDiffSuppress(_, old, new string, _ *schema.ResourceData) bool { + if (new == "false" && old == "") || (new == "" && old == "false") { + return true + } + if new == old { return true } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl index e74e165215cd..a47e3c26963a 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl @@ -126,6 +126,16 @@ func TestDisksForceAttachDiffSuppress(t *testing.T) { New: "false", ExpectDiffSuppress: true, }, + "force_attach changed false from empty": { + Old: "", + New: "false", + ExpectDiffSuppress: true, + }, + "force_attach changed empty from false": { + Old: "false", + New: "", + ExpectDiffSuppress: true, + }, } for tn, tc := range cases { From e1576d0e8caea273ac9e1db677961fc1c8cc2d8c Mon Sep 17 00:00:00 2001 From: Ashwin G Date: Wed, 21 May 2025 14:43:19 -0700 Subject: [PATCH 217/884] Allow updates to the mtu property in the Network resource. (#13995) --- mmv1/products/compute/Network.yaml | 5 +- .../resource_compute_network_test.go.tmpl | 74 ++++++++++++++++++- 2 files changed, 77 insertions(+), 2 deletions(-) diff --git a/mmv1/products/compute/Network.yaml b/mmv1/products/compute/Network.yaml index 34cb65993e95..d92227edc3b8 100644 --- a/mmv1/products/compute/Network.yaml +++ b/mmv1/products/compute/Network.yaml @@ -204,8 +204,11 @@ properties: Note that packets larger than 1500 bytes (standard Ethernet) can be subject to TCP-MSS clamping or dropped with an ICMP `Fragmentation-Needed` message if the packets are routed to the Internet or other VPCs with varying MTUs. - immutable: true default_from_api: true + update_url: 'projects/{{project}}/global/networks/{{name}}' + update_verb: 'PATCH' + update_id: 'mtu' + fingerprint_name: 'fingerprint' - name: 'enableUlaInternalIpv6' type: Boolean description: | diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_network_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_network_test.go.tmpl index 3632c5a084ce..4d6634292bdc 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_network_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_network_test.go.tmpl @@ -78,6 +78,41 @@ func TestAccComputeNetwork_customSubnet(t *testing.T) { }) } +func TestAccComputeNetwork_mtuAndUpdate(t *testing.T) { + t.Parallel() + + var network compute.Network + suffixName := acctest.RandString(t, 10) + networkName := fmt.Sprintf("tf-test-network-routing-mode-%s", suffixName) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeNetworkDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeNetwork_mtu(networkName, 1460), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeNetworkExists( + t, "google_compute_network.acc_network_mtu", &network), + testAccCheckComputeNetworkHasMtu( + t, "google_compute_network.acc_network_mtu", &network, 1460), + ), + }, + // Test updating the mtu field from 1460 to 1500. + { + Config: testAccComputeNetwork_mtu(networkName, 1500), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeNetworkExists( + t, "google_compute_network.acc_network_mtu", &network), + testAccCheckComputeNetworkHasMtu( + t, "google_compute_network.acc_network_mtu", &network, 1500), + ), + }, + }, + }) +} + func TestAccComputeNetwork_routingModeAndUpdate(t *testing.T) { t.Parallel() @@ -530,6 +565,35 @@ func testAccCheckComputeNetworkIsCustomSubnet(t *testing.T, n string, network *c } } +func testAccCheckComputeNetworkHasMtu(t *testing.T, n string, network *compute.Network, mtu int32) resource.TestCheckFunc { + return func(s *terraform.State) error { + config := acctest.GoogleProviderConfig(t) + + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Not found: %s", n) + } + + if rs.Primary.Attributes["mtu"] == "" { + return fmt.Errorf("Routing mode not found on resource") + } + + found, err := config.NewComputeClient(config.UserAgent).Networks.Get( + config.Project, network.Name).Do() + if err != nil { + return err + } + + foundMtu := found.Mtu + + if int64(mtu) != foundMtu { + return fmt.Errorf("Expected mtu %d to match actual routing mode %d", mtu, foundMtu) + } + + return nil + } +} + func testAccCheckComputeNetworkHasRoutingMode(t *testing.T, n string, network *compute.Network, routingMode string) resource.TestCheckFunc { return func(s *terraform.State) error { config := acctest.GoogleProviderConfig(t) @@ -644,6 +708,15 @@ resource "google_compute_network" "baz" { `, networkName) } +func testAccComputeNetwork_mtu(networkName string, mtu int32) string { + return fmt.Sprintf(` +resource "google_compute_network" "acc_network_mtu" { + name = "%s" + mtu = %d +} +`, networkName, mtu) +} + func testAccComputeNetwork_routing_mode(networkName, routingMode string) string { return fmt.Sprintf(` resource "google_compute_network" "acc_network_routing_mode" { @@ -653,7 +726,6 @@ resource "google_compute_network" "acc_network_routing_mode" { `, networkName, routingMode) } - func testAccComputeNetwork_best_bgp_path_selection_mode(networkName, bgpBestPathSelection string) string { return fmt.Sprintf(` resource "google_compute_network" "acc_network_bgp_best_path_selection_mode" { From 08e2e42bcad479a984c90f0097e245eac74352be Mon Sep 17 00:00:00 2001 From: Allison Fisher Date: Wed, 21 May 2025 18:09:13 -0400 Subject: [PATCH 218/884] Add Managed Kafka Acl resource and tests. (#14034) Co-authored-by: Sam Levenick --- mmv1/products/managedkafka/Acl.yaml | 152 ++++++++++++++++++ .../examples/managedkafka_acl_basic.tf.tmpl | 37 +++++ .../resource_managed_kafka_acl_test.go | 130 +++++++++++++++ 3 files changed, 319 insertions(+) create mode 100644 mmv1/products/managedkafka/Acl.yaml create mode 100644 mmv1/templates/terraform/examples/managedkafka_acl_basic.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/managedkafka/resource_managed_kafka_acl_test.go diff --git a/mmv1/products/managedkafka/Acl.yaml b/mmv1/products/managedkafka/Acl.yaml new file mode 100644 index 000000000000..3aab9dff1067 --- /dev/null +++ b/mmv1/products/managedkafka/Acl.yaml @@ -0,0 +1,152 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +# API resource name +name: 'Acl' +# Resource description for the provider documentation. +description: | + A Managed Service for Apache Kafka ACL. Apache Kafka is a trademark owned by the Apache Software Foundation. + +docs: +id_format: 'projects/{{project}}/locations/{{location}}/clusters/{{cluster}}/acls/{{acl_id}}' +base_url: 'projects/{{project}}/locations/{{location}}/clusters/{{cluster}}/acls' +self_link: 'projects/{{project}}/locations/{{location}}/clusters/{{cluster}}/acls/{{acl_id}}' +create_url: 'projects/{{project}}/locations/{{location}}/clusters/{{cluster}}/acls?aclId={{acl_id}}' +update_verb: 'PATCH' +update_mask: true +import_format: + - 'projects/{{project}}/locations/{{location}}/clusters/{{cluster}}/acls/{{%acl_id}}' + +timeouts: + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 + +custom_code: + post_create: 'templates/terraform/post_create/sleep.go.tmpl' + post_update: 'templates/terraform/post_create/sleep.go.tmpl' + +examples: + - name: 'managedkafka_acl_basic' + primary_resource_id: 'example' + vars: + acl_id: 'topic/mytopic' + cluster_id: 'my-cluster' + test_vars_overrides: + 'acl_id': '"topic/mytopic"' + +parameters: + - name: 'location' + type: String + description: "ID of the location of the Kafka resource. See + https://cloud.google.com/managed-kafka/docs/locations for a list of + supported locations." + url_param_only: true + required: true + immutable: true + - name: 'cluster' + type: String + description: "The cluster name." + url_param_only: true + required: true + immutable: true + - name: 'aclId' + type: String + description: "The ID to use for the acl, which will become the final + component of the acl's name. The structure of `aclId` defines the Resource Pattern (resource_type, + resource_name, pattern_type) of the acl. `aclId` is structured like one of the following: + + For acls on the cluster: + `cluster` + + For acls on a single resource within the cluster: + `topic/{resource_name}` + `consumerGroup/{resource_name}` + `transactionalId/{resource_name}` + + For acls on all resources that match a prefix: + `topicPrefixed/{resource_name}` + `consumerGroupPrefixed/{resource_name}` + `transactionalIdPrefixed/{resource_name}` + + For acls on all resources of a given type (i.e. the wildcard literal '*''): + `allTopics` (represents `topic/*`) + `allConsumerGroups` (represents `consumerGroup/*`) + `allTransactionalIds` (represents `transactionalId/*`)." + url_param_only: true + required: true + immutable: true + +properties: + - name: 'name' + type: String + description: "The name of the acl. The `ACL_ID` segment is used when + connecting directly to the cluster. Must be in the format `projects/PROJECT_ID/locations/LOCATION/clusters/CLUSTER_ID/acls/ACL_ID`." + output: true + - name: 'aclEntries' + type: Array + is_set: true + required: true + description: "The acl entries that apply to the resource pattern. The maximum number of allowed + entries is 100." + item_type: + type: NestedObject + properties: + - name: 'principal' + type: String + description: 'The principal. Specified as Google Cloud account, with the Kafka + StandardAuthorizer prefix User:". For example: "User:test-kafka-client@test-project.iam.gserviceaccount.com". + Can be the wildcard "User:*" to refer to all users.' + required: true + - name: 'permissionType' + type: String + default_value: "ALLOW" + description: 'The permission type. Accepted values are (case insensitive): ALLOW, DENY.' + - name: 'operation' + type: String + description: | + The operation type. Allowed values are (case insensitive): ALL, READ, + WRITE, CREATE, DELETE, ALTER, DESCRIBE, CLUSTER_ACTION, DESCRIBE_CONFIGS, + ALTER_CONFIGS, and IDEMPOTENT_WRITE. See https://kafka.apache.org/documentation/#operations_resources_and_protocols + for valid combinations of resource_type and operation for different Kafka API requests. + required: true + - name: 'host' + type: String + default_value: "*" + description: 'The host. Must be set to "*" for Managed Service for Apache Kafka.' + - name: 'etag' + type: Fingerprint + output: true + description: | + `etag` is used for concurrency control. An `etag` is returned in the + response to `GetAcl` and `CreateAcl`. Callers are required to put that etag + in the request to `UpdateAcl` to ensure that their change will be applied + to the same version of the acl that exists in the Kafka Cluster. + + A terminal 'T' character in the etag indicates that the AclEntries were + truncated due to repeated field limits. + - name: 'resourceType' + type: String + description: | + The acl resource type derived from the name. One of: CLUSTER, TOPIC, GROUP, TRANSACTIONAL_ID. + output: true + - name: 'resourceName' + type: String + description: | + The acl resource name derived from the name. For cluster resource_type, this is always "kafka-cluster". Can be the wildcard literal "*". + output: true + - name: 'patternType' + type: String + description: "The acl pattern type derived from the name. One of: LITERAL, PREFIXED." + output: true diff --git a/mmv1/templates/terraform/examples/managedkafka_acl_basic.tf.tmpl b/mmv1/templates/terraform/examples/managedkafka_acl_basic.tf.tmpl new file mode 100644 index 000000000000..2d38dcab8ba4 --- /dev/null +++ b/mmv1/templates/terraform/examples/managedkafka_acl_basic.tf.tmpl @@ -0,0 +1,37 @@ +resource "google_managed_kafka_cluster" "cluster" { + cluster_id = "{{index $.Vars "cluster_id"}}" + location = "us-central1" + capacity_config { + vcpu_count = 3 + memory_bytes = 3221225472 + } + gcp_config { + access_config { + network_configs { + subnet = "projects/${data.google_project.project.number}/regions/us-central1/subnetworks/default" + } + } + } +} + +resource "google_managed_kafka_acl" "{{$.PrimaryResourceId}}" { + acl_id = "{{index $.Vars "acl_id"}}" + cluster = google_managed_kafka_cluster.cluster.cluster_id + location = "us-central1" + acl_entries { + principal = "User:admin@my-project.iam.gserviceaccount.com" + permission_type = "ALLOW" + operation = "ALL" + host = "*" + } + acl_entries { + principal = "User:producer-client@my-project.iam.gserviceaccount.com" + permission_type = "ALLOW" + operation = "WRITE" + host = "*" + } +} + +data "google_project" "project" { +} + diff --git a/mmv1/third_party/terraform/services/managedkafka/resource_managed_kafka_acl_test.go b/mmv1/third_party/terraform/services/managedkafka/resource_managed_kafka_acl_test.go new file mode 100644 index 000000000000..b9dd9fc7a058 --- /dev/null +++ b/mmv1/third_party/terraform/services/managedkafka/resource_managed_kafka_acl_test.go @@ -0,0 +1,130 @@ +package managedkafka_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccManagedKafkaAcl_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckManagedKafkaAclDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccManagedKafkaAcl_full(context), + }, + { + ResourceName: "google_managed_kafka_acl.example", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"cluster", "location", "acl_id"}, + }, + { + Config: testAccManagedKafkaAcl_update(context), + }, + { + ResourceName: "google_managed_kafka_acl.example", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"cluster", "location", "acl_id"}, + }, + }, + }) +} + +func testAccManagedKafkaAcl_full(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_managed_kafka_cluster" "example" { + cluster_id = "tf-test-my-cluster%{random_suffix}" + location = "us-central1" + capacity_config { + vcpu_count = 3 + memory_bytes = 3221225472 + } + gcp_config { + access_config { + network_configs { + subnet = "projects/${data.google_project.project.number}/regions/us-central1/subnetworks/default" + } + } + } +} + +resource "google_managed_kafka_acl" "example" { + cluster = google_managed_kafka_cluster.example.cluster_id + acl_id = "topic/tf-test-my-acl%{random_suffix}" + location = "us-central1" + acl_entries { + principal = "User:admin@my-project.iam.gserviceaccount.com" + permission_type = "ALLOW" + operation = "ALL" + host = "*" + } + acl_entries { + principal = "User:producer-client@my-project.iam.gserviceaccount.com" + permission_type = "ALLOW" + operation = "WRITE" + host = "*" + } +} + +data "google_project" "project" { +} +`, context) +} + +func testAccManagedKafkaAcl_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_managed_kafka_cluster" "example" { + cluster_id = "tf-test-my-cluster%{random_suffix}" + location = "us-central1" + capacity_config { + vcpu_count = 3 + memory_bytes = 3221225472 + } + gcp_config { + access_config { + network_configs { + subnet = "projects/${data.google_project.project.number}/regions/us-central1/subnetworks/default" + } + } + } +} + +resource "google_managed_kafka_acl" "example" { + cluster = google_managed_kafka_cluster.example.cluster_id + acl_id = "topic/tf-test-my-acl%{random_suffix}" + location = "us-central1" + acl_entries { + principal = "User:admin@project.iam.gserviceaccount.com" + permission_type = "ALLOW" + operation = "ALL" + host = "*" + } + acl_entries { + principal = "User:producer-client@my-project.iam.gserviceaccount.com" + permission_type = "ALLOW" + operation = "WRITE" + host = "*" + } + acl_entries { + principal = "User:producer-client@my-project.iam.gserviceaccount.com" + permission_type = "ALLOW" + operation = "CREATE" + host = "*" + } +} + +data "google_project" "project" { +} +`, context) +} From 8a83dc981547b33460f8d5028125f980879d3ecf Mon Sep 17 00:00:00 2001 From: mihhalj Date: Thu, 22 May 2025 00:10:04 +0200 Subject: [PATCH 219/884] Promote firewall policy with rules resources to GA (#13546) --- .../compute/FirewallPolicyWithRules.yaml | 62 ---------------- .../NetworkFirewallPolicyWithRules.yaml | 71 ------------------ .../RegionNetworkFirewallPolicyWithRules.yaml | 72 ------------------- ...te_firewall_policy_with_rules_full.tf.tmpl | 45 +----------- ...rk_firewall_policy_with_rules_full.tf.tmpl | 50 ------------- ...rk_firewall_policy_with_rules_full.tf.tmpl | 49 ------------- ...te_firewall_policy_with_rules_test.go.tmpl | 22 +++--- ...rk_firewall_policy_with_rules_test.go.tmpl | 18 +---- ...rk_firewall_policy_with_rules_test.go.tmpl | 14 +--- 9 files changed, 11 insertions(+), 392 deletions(-) diff --git a/mmv1/products/compute/FirewallPolicyWithRules.yaml b/mmv1/products/compute/FirewallPolicyWithRules.yaml index ec241e0ae038..ee84d15c17eb 100644 --- a/mmv1/products/compute/FirewallPolicyWithRules.yaml +++ b/mmv1/products/compute/FirewallPolicyWithRules.yaml @@ -17,7 +17,6 @@ api_resource_type_kind: FirewallPolicy description: | The Compute FirewallPolicy with rules resource. It declaratively manges all rules in the firewall policy. -min_version: 'beta' docs: id_format: 'locations/global/firewallPolicies/{{policy_id}}' base_url: 'locations/global/firewallPolicies?parentId={{parent}}' @@ -55,36 +54,30 @@ parameters: description: | The parent of this FirewallPolicy in the Cloud Resource Hierarchy. Format: organizations/{organization_id} or folders/{folder_id} - min_version: 'beta' required: true immutable: true properties: - name: 'creationTimestamp' type: String description: Creation timestamp in RFC3339 text format. - min_version: 'beta' output: true - name: 'shortName' type: String description: A textual name of the security policy. - min_version: 'beta' required: true immutable: true - name: 'policyId' type: String description: The unique identifier for the resource. This identifier is defined by the server. api_name: id - min_version: 'beta' output: true - name: 'description' type: String description: An optional description of this resource. - min_version: 'beta' - name: 'rule' type: Array description: A list of firewall policy rules. api_name: rules - min_version: 'beta' required: true item_type: type: NestedObject @@ -93,27 +86,23 @@ properties: type: String description: | A description of the rule. - min_version: 'beta' - name: 'ruleName' type: String description: | An optional name for the rule. This field is not a unique identifier and can be updated. - min_version: 'beta' - name: 'priority' type: Integer description: | An integer indicating the priority of a rule in the list. The priority must be a value between 0 and 2147483647. Rules are evaluated from highest to lowest priority where 0 is the highest priority and 2147483647 is the lowest priority. - min_version: 'beta' required: true - name: 'match' type: NestedObject description: A match condition that incoming traffic is evaluated against. If it evaluates to true, the corresponding 'action' is enforced. - min_version: 'beta' required: true properties: - name: 'srcIpRanges' @@ -121,7 +110,6 @@ properties: description: | Source IP address range in CIDR format. Required for INGRESS rules. - min_version: 'beta' item_type: type: String - name: 'destIpRanges' @@ -129,7 +117,6 @@ properties: description: | Destination IP address range in CIDR format. Required for EGRESS rules. - min_version: 'beta' item_type: type: String - name: 'srcAddressGroups' @@ -137,7 +124,6 @@ properties: description: | Address groups which should be matched against the traffic source. Maximum number of source address groups is 10. - min_version: 'beta' item_type: type: String - name: 'destAddressGroups' @@ -145,7 +131,6 @@ properties: description: | Address groups which should be matched against the traffic destination. Maximum number of destination address groups is 10. - min_version: 'beta' item_type: type: String - name: 'srcFqdns' @@ -153,7 +138,6 @@ properties: description: | Fully Qualified Domain Name (FQDN) which should be matched against traffic source. Maximum number of source fqdn allowed is 100. - min_version: 'beta' item_type: type: String - name: 'destFqdns' @@ -161,7 +145,6 @@ properties: description: | Fully Qualified Domain Name (FQDN) which should be matched against traffic destination. Maximum number of destination fqdn allowed is 100. - min_version: 'beta' item_type: type: String - name: 'srcNetworkScope' @@ -198,7 +181,6 @@ properties: of traffic. Should be specified as 2 letter country code defined as per ISO 3166 alpha-2 country codes. ex."US" Maximum number of source region codes allowed is 5000. - min_version: 'beta' item_type: type: String - name: 'destRegionCodes' @@ -208,7 +190,6 @@ properties: of traffic. Should be specified as 2 letter country code defined as per ISO 3166 alpha-2 country codes. ex."US" Maximum number of destination region codes allowed is 5000. - min_version: 'beta' item_type: type: String - name: 'srcThreatIntelligences' @@ -216,7 +197,6 @@ properties: description: | Names of Network Threat Intelligence lists. The IPs in these lists will be matched against traffic source. - min_version: 'beta' item_type: type: String - name: 'destThreatIntelligences' @@ -224,7 +204,6 @@ properties: description: | Names of Network Threat Intelligence lists. The IPs in these lists will be matched against traffic destination. - min_version: 'beta' item_type: type: String - name: 'layer4Config' @@ -232,7 +211,6 @@ properties: description: | Pairs of IP protocols and ports that the rule should match. api_name: layer4Configs - min_version: 'beta' required: true item_type: type: NestedObject @@ -245,7 +223,6 @@ properties: This value can either be one of the following well known protocol strings (tcp, udp, icmp, esp, ah, ipip, sctp), or the IP protocol number. - min_version: 'beta' required: true - name: 'ports' type: Array @@ -256,7 +233,6 @@ properties: applies to connections through any port. Example inputs include: ["22"], ["80","443"], and ["12345-12349"]. - min_version: 'beta' item_type: type: String - name: 'action' @@ -264,13 +240,11 @@ properties: description: | The Action to perform when the client connection triggers the rule. Can currently be either "allow", "deny", "apply_security_profile_group" or "goto_next". - min_version: 'beta' required: true - name: 'direction' type: Enum description: | The direction in which this rule applies. If unspecified an INGRESS rule is created. - min_version: 'beta' enum_values: - 'INGRESS' - 'EGRESS' @@ -280,14 +254,12 @@ properties: Denotes whether to enable logging for a particular rule. If logging is enabled, logs will be exported to the configured export destination in Stackdriver. - min_version: 'beta' send_empty_value: true - name: 'targetServiceAccounts' type: Array description: | A list of service accounts indicating the sets of instances that are applied with this rule. - min_version: 'beta' item_type: type: String - name: 'securityProfileGroup' @@ -297,13 +269,11 @@ properties: Example: https://networksecurity.googleapis.com/v1/projects/{project}/locations/{location}/securityProfileGroups/my-security-profile-group Must be specified if action is 'apply_security_profile_group'. - min_version: 'beta' - name: 'tlsInspect' type: Boolean description: | Boolean flag indicating if the traffic should be TLS decrypted. It can be set only if action = 'apply_security_profile_group' and cannot be set for other actions. - min_version: 'beta' - name: 'targetResources' type: Array description: | @@ -311,7 +281,6 @@ properties: This field allows you to control which network's VMs get this rule. If this field is left blank, all VMs within the organization will receive the rule. - min_version: 'beta' item_type: type: String - name: 'disabled' @@ -321,11 +290,9 @@ properties: the firewall policy rule is not enforced and traffic behaves as if it did not exist. If this is unspecified, the firewall policy rule will be enabled. - min_version: 'beta' - name: 'predefinedRules' type: Array description: A list of pre-define firewall policy rules. - min_version: 'beta' output: true item_type: type: NestedObject @@ -334,14 +301,12 @@ properties: type: String description: | A description of the rule. - min_version: 'beta' output: true - name: 'ruleName' type: String description: | An optional name for the rule. This field is not a unique identifier and can be updated. - min_version: 'beta' output: true - name: 'priority' type: Integer @@ -349,14 +314,12 @@ properties: An integer indicating the priority of a rule in the list. The priority must be a value between 0 and 2147483647. Rules are evaluated from highest to lowest priority where 0 is the highest priority and 2147483647 is the lowest priority. - min_version: 'beta' output: true - name: 'match' type: NestedObject description: A match condition that incoming traffic is evaluated against. If it evaluates to true, the corresponding 'action' is enforced. - min_version: 'beta' output: true properties: - name: 'srcIpRanges' @@ -364,7 +327,6 @@ properties: description: | Source IP address range in CIDR format. Required for INGRESS rules. - min_version: 'beta' output: true item_type: type: String @@ -373,7 +335,6 @@ properties: description: | Destination IP address range in CIDR format. Required for EGRESS rules. - min_version: 'beta' output: true item_type: type: String @@ -382,7 +343,6 @@ properties: description: | Address groups which should be matched against the traffic source. Maximum number of source address groups is 10. - min_version: 'beta' output: true item_type: type: String @@ -391,7 +351,6 @@ properties: description: | Address groups which should be matched against the traffic destination. Maximum number of destination address groups is 10. - min_version: 'beta' output: true item_type: type: String @@ -400,7 +359,6 @@ properties: description: | Fully Qualified Domain Name (FQDN) which should be matched against traffic source. Maximum number of source fqdn allowed is 100. - min_version: 'beta' output: true item_type: type: String @@ -409,7 +367,6 @@ properties: description: | Fully Qualified Domain Name (FQDN) which should be matched against traffic destination. Maximum number of destination fqdn allowed is 100. - min_version: 'beta' output: true item_type: type: String @@ -420,7 +377,6 @@ properties: of traffic. Should be specified as 2 letter country code defined as per ISO 3166 alpha-2 country codes. ex."US" Maximum number of source region codes allowed is 5000. - min_version: 'beta' output: true item_type: type: String @@ -431,7 +387,6 @@ properties: of traffic. Should be specified as 2 letter country code defined as per ISO 3166 alpha-2 country codes. ex."US" Maximum number of destination region codes allowed is 5000. - min_version: 'beta' output: true item_type: type: String @@ -440,7 +395,6 @@ properties: description: | Names of Network Threat Intelligence lists. The IPs in these lists will be matched against traffic source. - min_version: 'beta' output: true item_type: type: String @@ -449,7 +403,6 @@ properties: description: | Names of Network Threat Intelligence lists. The IPs in these lists will be matched against traffic destination. - min_version: 'beta' output: true item_type: type: String @@ -458,7 +411,6 @@ properties: description: | Pairs of IP protocols and ports that the rule should match. api_name: layer4Configs - min_version: 'beta' output: true item_type: type: NestedObject @@ -471,7 +423,6 @@ properties: This value can either be one of the following well known protocol strings (tcp, udp, icmp, esp, ah, ipip, sctp), or the IP protocol number. - min_version: 'beta' output: true - name: 'ports' type: Array @@ -482,7 +433,6 @@ properties: applies to connections through any port. Example inputs include: ["22"], ["80","443"], and ["12345-12349"]. - min_version: 'beta' output: true item_type: type: String @@ -491,13 +441,11 @@ properties: description: | The Action to perform when the client connection triggers the rule. Can currently be either "allow", "deny", "apply_security_profile_group" or "goto_next". - min_version: 'beta' output: true - name: 'direction' type: Enum description: | The direction in which this rule applies. If unspecified an INGRESS rule is created. - min_version: 'beta' output: true enum_values: - 'INGRESS' @@ -508,14 +456,12 @@ properties: Denotes whether to enable logging for a particular rule. If logging is enabled, logs will be exported to the configured export destination in Stackdriver. - min_version: 'beta' output: true - name: 'targetServiceAccounts' type: Array description: | A list of service accounts indicating the sets of instances that are applied with this rule. - min_version: 'beta' output: true item_type: type: String @@ -526,14 +472,12 @@ properties: Example: https://networksecurity.googleapis.com/v1/projects/{project}/locations/{location}/securityProfileGroups/my-security-profile-group Must be specified if action is 'apply_security_profile_group'. - min_version: 'beta' output: true - name: 'tlsInspect' type: Boolean description: | Boolean flag indicating if the traffic should be TLS decrypted. It can be set only if action = 'apply_security_profile_group' and cannot be set for other actions. - min_version: 'beta' output: true - name: 'targetResources' type: Array @@ -542,7 +486,6 @@ properties: This field allows you to control which network's VMs get this rule. If this field is left blank, all VMs within the organization will receive the rule. - min_version: 'beta' output: true item_type: type: String @@ -553,25 +496,20 @@ properties: the firewall policy rule is not enforced and traffic behaves as if it did not exist. If this is unspecified, the firewall policy rule will be enabled. - min_version: 'beta' output: true - name: 'fingerprint' type: Fingerprint description: Fingerprint of the resource. This field is used internally during updates of this resource. - min_version: 'beta' output: true - name: 'selfLink' type: String description: Server-defined URL for the resource. - min_version: 'beta' output: true - name: 'selfLinkWithId' type: String description: Server-defined URL for this resource with the resource id. - min_version: 'beta' output: true - name: 'ruleTupleCount' type: Integer description: Total count of all firewall policy rule tuples. A firewall policy can not exceed a set number of tuples. - min_version: 'beta' output: true diff --git a/mmv1/products/compute/NetworkFirewallPolicyWithRules.yaml b/mmv1/products/compute/NetworkFirewallPolicyWithRules.yaml index 5e4f5a5b2c50..48f935b4d065 100644 --- a/mmv1/products/compute/NetworkFirewallPolicyWithRules.yaml +++ b/mmv1/products/compute/NetworkFirewallPolicyWithRules.yaml @@ -15,7 +15,6 @@ name: 'NetworkFirewallPolicyWithRules' api_resource_type_kind: FirewallPolicy description: "The Compute NetworkFirewallPolicy with rules resource" -min_version: 'beta' docs: base_url: 'projects/{{project}}/global/firewallPolicies' self_link: 'projects/{{project}}/global/firewallPolicies/{{name}}' @@ -57,7 +56,6 @@ properties: - name: 'creationTimestamp' type: String description: Creation timestamp in RFC3339 text format. - min_version: 'beta' output: true - name: 'name' type: String @@ -68,24 +66,20 @@ properties: the name must be 1-63 characters long and match the regular expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. - min_version: 'beta' required: true immutable: true - name: 'networkFirewallPolicyId' type: String description: The unique identifier for the resource. This identifier is defined by the server. api_name: id - min_version: 'beta' output: true - name: 'description' type: String description: An optional description of this resource. - min_version: 'beta' - name: 'rule' type: Array description: A list of firewall policy rules. api_name: rules - min_version: 'beta' required: true item_type: type: NestedObject @@ -94,27 +88,23 @@ properties: type: String description: | A description of the rule. - min_version: 'beta' - name: 'ruleName' type: String description: | An optional name for the rule. This field is not a unique identifier and can be updated. - min_version: 'beta' - name: 'priority' type: Integer description: | An integer indicating the priority of a rule in the list. The priority must be a value between 0 and 2147483647. Rules are evaluated from highest to lowest priority where 0 is the highest priority and 2147483647 is the lowest priority. - min_version: 'beta' required: true - name: 'match' type: NestedObject description: A match condition that incoming traffic is evaluated against. If it evaluates to true, the corresponding 'action' is enforced. - min_version: 'beta' required: true properties: - name: 'srcIpRanges' @@ -122,7 +112,6 @@ properties: description: | Source IP address range in CIDR format. Required for INGRESS rules. - min_version: 'beta' item_type: type: String - name: 'destIpRanges' @@ -130,7 +119,6 @@ properties: description: | Destination IP address range in CIDR format. Required for EGRESS rules. - min_version: 'beta' item_type: type: String - name: 'srcAddressGroups' @@ -138,7 +126,6 @@ properties: description: | Address groups which should be matched against the traffic source. Maximum number of source address groups is 10. - min_version: 'beta' item_type: type: String - name: 'destAddressGroups' @@ -146,7 +133,6 @@ properties: description: | Address groups which should be matched against the traffic destination. Maximum number of destination address groups is 10. - min_version: 'beta' item_type: type: String - name: 'srcFqdns' @@ -154,7 +140,6 @@ properties: description: | Fully Qualified Domain Name (FQDN) which should be matched against traffic source. Maximum number of source fqdn allowed is 100. - min_version: 'beta' item_type: type: String - name: 'destFqdns' @@ -162,7 +147,6 @@ properties: description: | Fully Qualified Domain Name (FQDN) which should be matched against traffic destination. Maximum number of destination fqdn allowed is 100. - min_version: 'beta' item_type: type: String - name: 'srcRegionCodes' @@ -172,7 +156,6 @@ properties: of traffic. Should be specified as 2 letter country code defined as per ISO 3166 alpha-2 country codes. ex."US" Maximum number of source region codes allowed is 5000. - min_version: 'beta' item_type: type: String - name: 'destRegionCodes' @@ -182,7 +165,6 @@ properties: of traffic. Should be specified as 2 letter country code defined as per ISO 3166 alpha-2 country codes. ex."US" Maximum number of destination region codes allowed is 5000. - min_version: 'beta' item_type: type: String - name: 'srcNetworkScope' @@ -217,7 +199,6 @@ properties: description: | Names of Network Threat Intelligence lists. The IPs in these lists will be matched against traffic source. - min_version: 'beta' item_type: type: String - name: 'destThreatIntelligences' @@ -225,7 +206,6 @@ properties: description: | Names of Network Threat Intelligence lists. The IPs in these lists will be matched against traffic destination. - min_version: 'beta' item_type: type: String - name: 'layer4Config' @@ -233,7 +213,6 @@ properties: description: | Pairs of IP protocols and ports that the rule should match. api_name: layer4Configs - min_version: 'beta' required: true item_type: type: NestedObject @@ -246,7 +225,6 @@ properties: This value can either be one of the following well known protocol strings (tcp, udp, icmp, esp, ah, ipip, sctp), or the IP protocol number. - min_version: 'beta' required: true - name: 'ports' type: Array @@ -257,7 +235,6 @@ properties: applies to connections through any port. Example inputs include: ["22"], ["80","443"], and ["12345-12349"]. - min_version: 'beta' item_type: type: String - name: 'srcSecureTag' @@ -269,7 +246,6 @@ properties: and there is no srcIpRange, this rule will be ignored. Maximum number of source tag values allowed is 256. api_name: srcSecureTags - min_version: 'beta' item_type: type: NestedObject properties: @@ -278,14 +254,12 @@ properties: description: | Name of the secure tag, created with TagManager's TagValue API. @pattern tagValues/[0-9]+ - min_version: 'beta' - name: 'state' type: Enum description: | [Output Only] State of the secure tag, either `EFFECTIVE` or `INEFFECTIVE`. A secure tag is `INEFFECTIVE` when it is deleted or its network is deleted. - min_version: 'beta' output: true enum_values: - 'EFFECTIVE' @@ -305,7 +279,6 @@ properties: to all instances on the specified network. Maximum number of target label tags allowed is 256. api_name: targetSecureTags - min_version: 'beta' item_type: type: NestedObject properties: @@ -314,14 +287,12 @@ properties: description: | Name of the secure tag, created with TagManager's TagValue API. @pattern tagValues/[0-9]+ - min_version: 'beta' - name: 'state' type: Enum description: | [Output Only] State of the secure tag, either `EFFECTIVE` or `INEFFECTIVE`. A secure tag is `INEFFECTIVE` when it is deleted or its network is deleted. - min_version: 'beta' output: true enum_values: - 'EFFECTIVE' @@ -331,13 +302,11 @@ properties: description: | The Action to perform when the client connection triggers the rule. Can currently be either "allow", "deny", "apply_security_profile_group" or "goto_next". - min_version: 'beta' required: true - name: 'direction' type: Enum description: | The direction in which this rule applies. If unspecified an INGRESS rule is created. - min_version: 'beta' enum_values: - 'INGRESS' - 'EGRESS' @@ -347,14 +316,12 @@ properties: Denotes whether to enable logging for a particular rule. If logging is enabled, logs will be exported to the configured export destination in Stackdriver. - min_version: 'beta' send_empty_value: true - name: 'targetServiceAccounts' type: Array description: | A list of service accounts indicating the sets of instances that are applied with this rule. - min_version: 'beta' item_type: type: String - name: 'securityProfileGroup' @@ -364,13 +331,11 @@ properties: Example: https://networksecurity.googleapis.com/v1/projects/{project}/locations/{location}/securityProfileGroups/my-security-profile-group Must be specified if action is 'apply_security_profile_group'. - min_version: 'beta' - name: 'tlsInspect' type: Boolean description: | Boolean flag indicating if the traffic should be TLS decrypted. It can be set only if action = 'apply_security_profile_group' and cannot be set for other actions. - min_version: 'beta' - name: 'disabled' type: Boolean description: | @@ -378,11 +343,9 @@ properties: the firewall policy rule is not enforced and traffic behaves as if it did not exist. If this is unspecified, the firewall policy rule will be enabled. - min_version: 'beta' - name: 'predefinedRules' type: Array description: A list of firewall policy pre-defined rules. - min_version: 'beta' output: true item_type: type: NestedObject @@ -391,14 +354,12 @@ properties: type: String description: | A description of the rule. - min_version: 'beta' output: true - name: 'ruleName' type: String description: | An optional name for the rule. This field is not a unique identifier and can be updated. - min_version: 'beta' output: true - name: 'priority' type: Integer @@ -406,14 +367,12 @@ properties: An integer indicating the priority of a rule in the list. The priority must be a value between 0 and 2147483647. Rules are evaluated from highest to lowest priority where 0 is the highest priority and 2147483647 is the lowest priority. - min_version: 'beta' output: true - name: 'match' type: NestedObject description: A match condition that incoming traffic is evaluated against. If it evaluates to true, the corresponding 'action' is enforced. - min_version: 'beta' output: true properties: - name: 'srcIpRanges' @@ -421,7 +380,6 @@ properties: description: | Source IP address range in CIDR format. Required for INGRESS rules. - min_version: 'beta' output: true item_type: type: String @@ -430,7 +388,6 @@ properties: description: | Destination IP address range in CIDR format. Required for EGRESS rules. - min_version: 'beta' output: true item_type: type: String @@ -439,7 +396,6 @@ properties: description: | Address groups which should be matched against the traffic source. Maximum number of source address groups is 10. - min_version: 'beta' output: true item_type: type: String @@ -448,7 +404,6 @@ properties: description: | Address groups which should be matched against the traffic destination. Maximum number of destination address groups is 10. - min_version: 'beta' output: true item_type: type: String @@ -457,7 +412,6 @@ properties: description: | Fully Qualified Domain Name (FQDN) which should be matched against traffic source. Maximum number of source fqdn allowed is 100. - min_version: 'beta' output: true item_type: type: String @@ -466,7 +420,6 @@ properties: description: | Fully Qualified Domain Name (FQDN) which should be matched against traffic destination. Maximum number of destination fqdn allowed is 100. - min_version: 'beta' output: true item_type: type: String @@ -477,7 +430,6 @@ properties: of traffic. Should be specified as 2 letter country code defined as per ISO 3166 alpha-2 country codes. ex."US" Maximum number of source region codes allowed is 5000. - min_version: 'beta' output: true item_type: type: String @@ -488,7 +440,6 @@ properties: of traffic. Should be specified as 2 letter country code defined as per ISO 3166 alpha-2 country codes. ex."US" Maximum number of destination region codes allowed is 5000. - min_version: 'beta' output: true item_type: type: String @@ -497,7 +448,6 @@ properties: description: | Names of Network Threat Intelligence lists. The IPs in these lists will be matched against traffic source. - min_version: 'beta' output: true item_type: type: String @@ -506,7 +456,6 @@ properties: description: | Names of Network Threat Intelligence lists. The IPs in these lists will be matched against traffic destination. - min_version: 'beta' output: true item_type: type: String @@ -515,7 +464,6 @@ properties: description: | Pairs of IP protocols and ports that the rule should match. api_name: layer4Configs - min_version: 'beta' output: true item_type: type: NestedObject @@ -528,7 +476,6 @@ properties: This value can either be one of the following well known protocol strings (tcp, udp, icmp, esp, ah, ipip, sctp), or the IP protocol number. - min_version: 'beta' output: true - name: 'ports' type: Array @@ -539,7 +486,6 @@ properties: applies to connections through any port. Example inputs include: ["22"], ["80","443"], and ["12345-12349"]. - min_version: 'beta' output: true item_type: type: String @@ -552,7 +498,6 @@ properties: and there is no srcIpRange, this rule will be ignored. Maximum number of source tag values allowed is 256. api_name: srcSecureTags - min_version: 'beta' output: true item_type: type: NestedObject @@ -562,7 +507,6 @@ properties: description: | Name of the secure tag, created with TagManager's TagValue API. @pattern tagValues/[0-9]+ - min_version: 'beta' output: true - name: 'state' type: Enum @@ -570,7 +514,6 @@ properties: [Output Only] State of the secure tag, either `EFFECTIVE` or `INEFFECTIVE`. A secure tag is `INEFFECTIVE` when it is deleted or its network is deleted. - min_version: 'beta' output: true enum_values: - 'EFFECTIVE' @@ -590,7 +533,6 @@ properties: to all instances on the specified network. Maximum number of target label tags allowed is 256. api_name: targetSecureTags - min_version: 'beta' output: true item_type: type: NestedObject @@ -600,7 +542,6 @@ properties: description: | Name of the secure tag, created with TagManager's TagValue API. @pattern tagValues/[0-9]+ - min_version: 'beta' output: true - name: 'state' type: Enum @@ -608,7 +549,6 @@ properties: [Output Only] State of the secure tag, either `EFFECTIVE` or `INEFFECTIVE`. A secure tag is `INEFFECTIVE` when it is deleted or its network is deleted. - min_version: 'beta' output: true enum_values: - 'EFFECTIVE' @@ -618,13 +558,11 @@ properties: description: | The Action to perform when the client connection triggers the rule. Can currently be either "allow", "deny", "apply_security_profile_group" or "goto_next". - min_version: 'beta' output: true - name: 'direction' type: Enum description: | The direction in which this rule applies. If unspecified an INGRESS rule is created. - min_version: 'beta' output: true enum_values: - 'INGRESS' @@ -635,7 +573,6 @@ properties: Denotes whether to enable logging for a particular rule. If logging is enabled, logs will be exported to the configured export destination in Stackdriver. - min_version: 'beta' output: true send_empty_value: true - name: 'targetServiceAccounts' @@ -643,7 +580,6 @@ properties: description: | A list of service accounts indicating the sets of instances that are applied with this rule. - min_version: 'beta' output: true item_type: type: String @@ -654,14 +590,12 @@ properties: Example: https://networksecurity.googleapis.com/v1/projects/{project}/locations/{location}/securityProfileGroups/my-security-profile-group Must be specified if action is 'apply_security_profile_group'. - min_version: 'beta' output: true - name: 'tlsInspect' type: Boolean description: | Boolean flag indicating if the traffic should be TLS decrypted. It can be set only if action = 'apply_security_profile_group' and cannot be set for other actions. - min_version: 'beta' output: true - name: 'disabled' type: Boolean @@ -670,25 +604,20 @@ properties: the firewall policy rule is not enforced and traffic behaves as if it did not exist. If this is unspecified, the firewall policy rule will be enabled. - min_version: 'beta' output: true - name: 'fingerprint' type: Fingerprint description: Fingerprint of the resource. This field is used internally during updates of this resource. - min_version: 'beta' output: true - name: 'selfLink' type: String description: Server-defined URL for the resource. - min_version: 'beta' output: true - name: 'selfLinkWithId' type: String description: Server-defined URL for this resource with the resource id. - min_version: 'beta' output: true - name: 'ruleTupleCount' type: Integer description: Total count of all firewall policy rule tuples. A firewall policy can not exceed a set number of tuples. - min_version: 'beta' output: true diff --git a/mmv1/products/compute/RegionNetworkFirewallPolicyWithRules.yaml b/mmv1/products/compute/RegionNetworkFirewallPolicyWithRules.yaml index 6dbaa6561153..7681f5262886 100644 --- a/mmv1/products/compute/RegionNetworkFirewallPolicyWithRules.yaml +++ b/mmv1/products/compute/RegionNetworkFirewallPolicyWithRules.yaml @@ -15,7 +15,6 @@ name: 'RegionNetworkFirewallPolicyWithRules' api_resource_type_kind: FirewallPolicy description: "The Compute NetworkFirewallPolicy with rules resource" -min_version: 'beta' docs: base_url: 'projects/{{project}}/regions/{{region}}/firewallPolicies' self_link: 'projects/{{project}}/regions/{{region}}/firewallPolicies/{{name}}' @@ -57,7 +56,6 @@ parameters: - name: 'region' type: String description: The region of this resource. - min_version: 'beta' url_param_only: true immutable: true default_from_api: true @@ -65,7 +63,6 @@ properties: - name: 'creationTimestamp' type: String description: Creation timestamp in RFC3339 text format. - min_version: 'beta' output: true - name: 'name' type: String @@ -76,24 +73,20 @@ properties: the name must be 1-63 characters long and match the regular expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. - min_version: 'beta' required: true immutable: true - name: 'networkFirewallPolicyId' type: String description: The unique identifier for the resource. This identifier is defined by the server. api_name: id - min_version: 'beta' output: true - name: 'description' type: String description: An optional description of this resource. - min_version: 'beta' - name: 'rule' type: Array description: A list of firewall policy rules. api_name: rules - min_version: 'beta' required: true item_type: type: NestedObject @@ -102,27 +95,23 @@ properties: type: String description: | A description of the rule. - min_version: 'beta' - name: 'ruleName' type: String description: | An optional name for the rule. This field is not a unique identifier and can be updated. - min_version: 'beta' - name: 'priority' type: Integer description: | An integer indicating the priority of a rule in the list. The priority must be a value between 0 and 2147483647. Rules are evaluated from highest to lowest priority where 0 is the highest priority and 2147483647 is the lowest priority. - min_version: 'beta' required: true - name: 'match' type: NestedObject description: A match condition that incoming traffic is evaluated against. If it evaluates to true, the corresponding 'action' is enforced. - min_version: 'beta' required: true properties: - name: 'srcIpRanges' @@ -130,7 +119,6 @@ properties: description: | Source IP address range in CIDR format. Required for INGRESS rules. - min_version: 'beta' item_type: type: String - name: 'destIpRanges' @@ -138,7 +126,6 @@ properties: description: | Destination IP address range in CIDR format. Required for EGRESS rules. - min_version: 'beta' item_type: type: String - name: 'srcAddressGroups' @@ -146,7 +133,6 @@ properties: description: | Address groups which should be matched against the traffic source. Maximum number of source address groups is 10. - min_version: 'beta' item_type: type: String - name: 'destAddressGroups' @@ -154,7 +140,6 @@ properties: description: | Address groups which should be matched against the traffic destination. Maximum number of destination address groups is 10. - min_version: 'beta' item_type: type: String - name: 'srcFqdns' @@ -162,7 +147,6 @@ properties: description: | Fully Qualified Domain Name (FQDN) which should be matched against traffic source. Maximum number of source fqdn allowed is 100. - min_version: 'beta' item_type: type: String - name: 'destFqdns' @@ -170,7 +154,6 @@ properties: description: | Fully Qualified Domain Name (FQDN) which should be matched against traffic destination. Maximum number of destination fqdn allowed is 100. - min_version: 'beta' item_type: type: String - name: 'srcNetworkScope' @@ -207,7 +190,6 @@ properties: of traffic. Should be specified as 2 letter country code defined as per ISO 3166 alpha-2 country codes. ex."US" Maximum number of source region codes allowed is 5000. - min_version: 'beta' item_type: type: String - name: 'destRegionCodes' @@ -217,7 +199,6 @@ properties: of traffic. Should be specified as 2 letter country code defined as per ISO 3166 alpha-2 country codes. ex."US" Maximum number of destination region codes allowed is 5000. - min_version: 'beta' item_type: type: String - name: 'srcThreatIntelligences' @@ -225,7 +206,6 @@ properties: description: | Names of Network Threat Intelligence lists. The IPs in these lists will be matched against traffic source. - min_version: 'beta' item_type: type: String - name: 'destThreatIntelligences' @@ -233,7 +213,6 @@ properties: description: | Names of Network Threat Intelligence lists. The IPs in these lists will be matched against traffic destination. - min_version: 'beta' item_type: type: String - name: 'layer4Config' @@ -241,7 +220,6 @@ properties: description: | Pairs of IP protocols and ports that the rule should match. api_name: layer4Configs - min_version: 'beta' required: true item_type: type: NestedObject @@ -254,7 +232,6 @@ properties: This value can either be one of the following well known protocol strings (tcp, udp, icmp, esp, ah, ipip, sctp), or the IP protocol number. - min_version: 'beta' required: true - name: 'ports' type: Array @@ -265,7 +242,6 @@ properties: applies to connections through any port. Example inputs include: ["22"], ["80","443"], and ["12345-12349"]. - min_version: 'beta' item_type: type: String - name: 'srcSecureTag' @@ -277,7 +253,6 @@ properties: and there is no srcIpRange, this rule will be ignored. Maximum number of source tag values allowed is 256. api_name: srcSecureTags - min_version: 'beta' item_type: type: NestedObject properties: @@ -286,14 +261,12 @@ properties: description: | Name of the secure tag, created with TagManager's TagValue API. @pattern tagValues/[0-9]+ - min_version: 'beta' - name: 'state' type: Enum description: | [Output Only] State of the secure tag, either `EFFECTIVE` or `INEFFECTIVE`. A secure tag is `INEFFECTIVE` when it is deleted or its network is deleted. - min_version: 'beta' output: true enum_values: - 'EFFECTIVE' @@ -313,7 +286,6 @@ properties: to all instances on the specified network. Maximum number of target label tags allowed is 256. api_name: targetSecureTags - min_version: 'beta' item_type: type: NestedObject properties: @@ -322,14 +294,12 @@ properties: description: | Name of the secure tag, created with TagManager's TagValue API. @pattern tagValues/[0-9]+ - min_version: 'beta' - name: 'state' type: Enum description: | [Output Only] State of the secure tag, either `EFFECTIVE` or `INEFFECTIVE`. A secure tag is `INEFFECTIVE` when it is deleted or its network is deleted. - min_version: 'beta' output: true enum_values: - 'EFFECTIVE' @@ -339,13 +309,11 @@ properties: description: | The Action to perform when the client connection triggers the rule. Can currently be either "allow", "deny", "apply_security_profile_group" or "goto_next". - min_version: 'beta' required: true - name: 'direction' type: Enum description: | The direction in which this rule applies. If unspecified an INGRESS rule is created. - min_version: 'beta' enum_values: - 'INGRESS' - 'EGRESS' @@ -355,14 +323,12 @@ properties: Denotes whether to enable logging for a particular rule. If logging is enabled, logs will be exported to the configured export destination in Stackdriver. - min_version: 'beta' send_empty_value: true - name: 'targetServiceAccounts' type: Array description: | A list of service accounts indicating the sets of instances that are applied with this rule. - min_version: 'beta' item_type: type: String - name: 'securityProfileGroup' @@ -372,13 +338,11 @@ properties: Example: https://networksecurity.googleapis.com/v1/projects/{project}/locations/{location}/securityProfileGroups/my-security-profile-group Must be specified if action is 'apply_security_profile_group'. - min_version: 'beta' - name: 'tlsInspect' type: Boolean description: | Boolean flag indicating if the traffic should be TLS decrypted. It can be set only if action = 'apply_security_profile_group' and cannot be set for other actions. - min_version: 'beta' - name: 'disabled' type: Boolean description: | @@ -386,11 +350,9 @@ properties: the firewall policy rule is not enforced and traffic behaves as if it did not exist. If this is unspecified, the firewall policy rule will be enabled. - min_version: 'beta' - name: 'predefinedRules' type: Array description: A list of firewall policy pre-defined rules. - min_version: 'beta' output: true item_type: type: NestedObject @@ -399,14 +361,12 @@ properties: type: String description: | A description of the rule. - min_version: 'beta' output: true - name: 'ruleName' type: String description: | An optional name for the rule. This field is not a unique identifier and can be updated. - min_version: 'beta' output: true - name: 'priority' type: Integer @@ -414,14 +374,12 @@ properties: An integer indicating the priority of a rule in the list. The priority must be a value between 0 and 2147483647. Rules are evaluated from highest to lowest priority where 0 is the highest priority and 2147483647 is the lowest priority. - min_version: 'beta' output: true - name: 'match' type: NestedObject description: A match condition that incoming traffic is evaluated against. If it evaluates to true, the corresponding 'action' is enforced. - min_version: 'beta' output: true properties: - name: 'srcIpRanges' @@ -429,7 +387,6 @@ properties: description: | Source IP address range in CIDR format. Required for INGRESS rules. - min_version: 'beta' output: true item_type: type: String @@ -438,7 +395,6 @@ properties: description: | Destination IP address range in CIDR format. Required for EGRESS rules. - min_version: 'beta' output: true item_type: type: String @@ -447,7 +403,6 @@ properties: description: | Address groups which should be matched against the traffic source. Maximum number of source address groups is 10. - min_version: 'beta' output: true item_type: type: String @@ -456,7 +411,6 @@ properties: description: | Address groups which should be matched against the traffic destination. Maximum number of destination address groups is 10. - min_version: 'beta' output: true item_type: type: String @@ -465,7 +419,6 @@ properties: description: | Fully Qualified Domain Name (FQDN) which should be matched against traffic source. Maximum number of source fqdn allowed is 100. - min_version: 'beta' output: true item_type: type: String @@ -474,7 +427,6 @@ properties: description: | Fully Qualified Domain Name (FQDN) which should be matched against traffic destination. Maximum number of destination fqdn allowed is 100. - min_version: 'beta' output: true item_type: type: String @@ -485,7 +437,6 @@ properties: of traffic. Should be specified as 2 letter country code defined as per ISO 3166 alpha-2 country codes. ex."US" Maximum number of source region codes allowed is 5000. - min_version: 'beta' output: true item_type: type: String @@ -496,7 +447,6 @@ properties: of traffic. Should be specified as 2 letter country code defined as per ISO 3166 alpha-2 country codes. ex."US" Maximum number of destination region codes allowed is 5000. - min_version: 'beta' output: true item_type: type: String @@ -505,7 +455,6 @@ properties: description: | Names of Network Threat Intelligence lists. The IPs in these lists will be matched against traffic source. - min_version: 'beta' output: true item_type: type: String @@ -514,7 +463,6 @@ properties: description: | Names of Network Threat Intelligence lists. The IPs in these lists will be matched against traffic destination. - min_version: 'beta' output: true item_type: type: String @@ -523,7 +471,6 @@ properties: description: | Pairs of IP protocols and ports that the rule should match. api_name: layer4Configs - min_version: 'beta' output: true item_type: type: NestedObject @@ -536,7 +483,6 @@ properties: This value can either be one of the following well known protocol strings (tcp, udp, icmp, esp, ah, ipip, sctp), or the IP protocol number. - min_version: 'beta' output: true - name: 'ports' type: Array @@ -547,7 +493,6 @@ properties: applies to connections through any port. Example inputs include: ["22"], ["80","443"], and ["12345-12349"]. - min_version: 'beta' output: true item_type: type: String @@ -560,7 +505,6 @@ properties: and there is no srcIpRange, this rule will be ignored. Maximum number of source tag values allowed is 256. api_name: srcSecureTags - min_version: 'beta' output: true item_type: type: NestedObject @@ -570,7 +514,6 @@ properties: description: | Name of the secure tag, created with TagManager's TagValue API. @pattern tagValues/[0-9]+ - min_version: 'beta' output: true - name: 'state' type: Enum @@ -578,7 +521,6 @@ properties: [Output Only] State of the secure tag, either `EFFECTIVE` or `INEFFECTIVE`. A secure tag is `INEFFECTIVE` when it is deleted or its network is deleted. - min_version: 'beta' output: true enum_values: - 'EFFECTIVE' @@ -598,7 +540,6 @@ properties: to all instances on the specified network. Maximum number of target label tags allowed is 256. api_name: targetSecureTags - min_version: 'beta' output: true item_type: type: NestedObject @@ -608,7 +549,6 @@ properties: description: | Name of the secure tag, created with TagManager's TagValue API. @pattern tagValues/[0-9]+ - min_version: 'beta' output: true - name: 'state' type: Enum @@ -616,7 +556,6 @@ properties: [Output Only] State of the secure tag, either `EFFECTIVE` or `INEFFECTIVE`. A secure tag is `INEFFECTIVE` when it is deleted or its network is deleted. - min_version: 'beta' output: true enum_values: - 'EFFECTIVE' @@ -626,13 +565,11 @@ properties: description: | The Action to perform when the client connection triggers the rule. Can currently be either "allow", "deny", "apply_security_profile_group" or "goto_next". - min_version: 'beta' output: true - name: 'direction' type: Enum description: | The direction in which this rule applies. If unspecified an INGRESS rule is created. - min_version: 'beta' output: true enum_values: - 'INGRESS' @@ -643,7 +580,6 @@ properties: Denotes whether to enable logging for a particular rule. If logging is enabled, logs will be exported to the configured export destination in Stackdriver. - min_version: 'beta' output: true send_empty_value: true - name: 'targetServiceAccounts' @@ -651,7 +587,6 @@ properties: description: | A list of service accounts indicating the sets of instances that are applied with this rule. - min_version: 'beta' output: true item_type: type: String @@ -662,14 +597,12 @@ properties: Example: https://networksecurity.googleapis.com/v1/projects/{project}/locations/{location}/securityProfileGroups/my-security-profile-group Must be specified if action is 'apply_security_profile_group'. - min_version: 'beta' output: true - name: 'tlsInspect' type: Boolean description: | Boolean flag indicating if the traffic should be TLS decrypted. It can be set only if action = 'apply_security_profile_group' and cannot be set for other actions. - min_version: 'beta' output: true - name: 'disabled' type: Boolean @@ -678,25 +611,20 @@ properties: the firewall policy rule is not enforced and traffic behaves as if it did not exist. If this is unspecified, the firewall policy rule will be enabled. - min_version: 'beta' output: true - name: 'fingerprint' type: Fingerprint description: Fingerprint of the resource. This field is used internally during updates of this resource. - min_version: 'beta' output: true - name: 'selfLink' type: String description: Server-defined URL for the resource. - min_version: 'beta' output: true - name: 'selfLinkWithId' type: String description: Server-defined URL for this resource with the resource id. - min_version: 'beta' output: true - name: 'ruleTupleCount' type: Integer description: Total count of all firewall policy rule tuples. A firewall policy can not exceed a set number of tuples. - min_version: 'beta' output: true diff --git a/mmv1/templates/terraform/examples/compute_firewall_policy_with_rules_full.tf.tmpl b/mmv1/templates/terraform/examples/compute_firewall_policy_with_rules_full.tf.tmpl index 3564b750be8e..4c83c878312d 100644 --- a/mmv1/templates/terraform/examples/compute_firewall_policy_with_rules_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/compute_firewall_policy_with_rules_full.tf.tmpl @@ -1,9 +1,7 @@ data "google_project" "project" { - provider = google-beta } resource "google_compute_firewall_policy_with_rules" "{{$.PrimaryResourceId}}" { - provider = google-beta short_name = "{{index $.Vars "fw_policy"}}" description = "Terraform test" parent = "organizations/{{index $.TestEnvVars "org_id"}}" @@ -14,7 +12,7 @@ resource "google_compute_firewall_policy_with_rules" "{{$.PrimaryResourceId}}" { enable_logging = true action = "allow" direction = "EGRESS" - target_resources = ["https://www.googleapis.com/compute/beta/projects/${data.google_project.project.project_id}/global/networks/default"] + target_resources = [google_compute_network.network.self_link] match { dest_ip_ranges = ["11.100.0.1/32"] @@ -70,47 +68,9 @@ resource "google_compute_firewall_policy_with_rules" "{{$.PrimaryResourceId}}" { } } } - - rule { - description = "network scope rule 1" - rule_name = "network scope 1" - priority = 4000 - enable_logging = false - action = "allow" - direction = "INGRESS" - match { - src_ip_ranges = ["11.100.0.1/32"] - src_network_scope = "VPC_NETWORKS" - src_networks = [google_compute_network.network.id] - - layer4_config { - ip_protocol = "tcp" - ports = [8080] - } - } - } - - rule { - description = "network scope rule 2" - rule_name = "network scope 2" - priority = 5000 - enable_logging = false - action = "allow" - direction = "EGRESS" - match { - dest_ip_ranges = ["0.0.0.0/0"] - dest_network_scope = "INTERNET" - - layer4_config { - ip_protocol = "tcp" - ports = [8080] - } - } - } } resource "google_network_security_address_group" "address_group_1" { - provider = google-beta name = "{{index $.Vars "address_group"}}" parent = "organizations/{{index $.TestEnvVars "org_id"}}" description = "Global address group" @@ -121,7 +81,6 @@ resource "google_network_security_address_group" "address_group_1" { } resource "google_network_security_security_profile_group" "security_profile_group_1" { - provider = google-beta name = "{{index $.Vars "security_profile_group"}}" parent = "organizations/{{index $.TestEnvVars "org_id"}}" description = "my description" @@ -129,7 +88,6 @@ resource "google_network_security_security_profile_group" "security_profile_grou } resource "google_network_security_security_profile" "security_profile_1" { - provider = google-beta name = "{{index $.Vars "security_profile"}}" type = "THREAT_PREVENTION" parent = "organizations/{{index $.TestEnvVars "org_id"}}" @@ -137,7 +95,6 @@ resource "google_network_security_security_profile" "security_profile_1" { } resource "google_compute_network" "network" { - provider = google-beta name = "{{index $.Vars "network"}}" auto_create_subnetworks = false } diff --git a/mmv1/templates/terraform/examples/compute_network_firewall_policy_with_rules_full.tf.tmpl b/mmv1/templates/terraform/examples/compute_network_firewall_policy_with_rules_full.tf.tmpl index e596eac2edb3..279793498164 100644 --- a/mmv1/templates/terraform/examples/compute_network_firewall_policy_with_rules_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/compute_network_firewall_policy_with_rules_full.tf.tmpl @@ -1,9 +1,7 @@ data "google_project" "project" { - provider = google-beta } resource "google_compute_network_firewall_policy_with_rules" "{{$.PrimaryResourceId}}" { - provider = google-beta name = "{{index $.Vars "fw_policy"}}" description = "Terraform test" @@ -76,48 +74,9 @@ resource "google_compute_network_firewall_policy_with_rules" "{{$.PrimaryResourc } } } - - rule { - description = "network scope rule 1" - rule_name = "network scope 1" - priority = 4000 - enable_logging = false - action = "allow" - direction = "INGRESS" - - match { - src_ip_ranges = ["11.100.0.1/32"] - src_network_scope = "VPC_NETWORKS" - src_networks = [google_compute_network.network.id] - - layer4_config { - ip_protocol = "tcp" - ports = [8080] - } - } - } - - rule { - description = "network scope rule 2" - rule_name = "network scope 2" - priority = 5000 - enable_logging = false - action = "allow" - direction = "EGRESS" - match { - dest_ip_ranges = ["0.0.0.0/0"] - dest_network_scope = "INTERNET" - - layer4_config { - ip_protocol = "tcp" - ports = [8080] - } - } - } } resource "google_network_security_address_group" "address_group_1" { - provider = google-beta name = "{{index $.Vars "address_group"}}" parent = data.google_project.project.id description = "Global address group" @@ -128,7 +87,6 @@ resource "google_network_security_address_group" "address_group_1" { } resource "google_tags_tag_key" "secure_tag_key_1" { - provider = google-beta description = "Tag key" parent = data.google_project.project.id purpose = "GCE_FIREWALL" @@ -140,14 +98,12 @@ resource "google_tags_tag_key" "secure_tag_key_1" { } resource "google_tags_tag_value" "secure_tag_value_1" { - provider = google-beta description = "Tag value" parent = google_tags_tag_key.secure_tag_key_1.id short_name = "{{index $.Vars "tag_value"}}" } resource "google_network_security_security_profile_group" "security_profile_group_1" { - provider = google-beta name = "{{index $.Vars "security_profile_group"}}" parent = "organizations/{{index $.TestEnvVars "org_id"}}" description = "my description" @@ -155,15 +111,9 @@ resource "google_network_security_security_profile_group" "security_profile_grou } resource "google_network_security_security_profile" "security_profile_1" { - provider = google-beta name = "{{index $.Vars "security_profile"}}" type = "THREAT_PREVENTION" parent = "organizations/{{index $.TestEnvVars "org_id"}}" location = "global" } -resource "google_compute_network" "network" { - provider = google-beta - name = "{{index $.Vars "network"}}" - auto_create_subnetworks = false -} diff --git a/mmv1/templates/terraform/examples/compute_region_network_firewall_policy_with_rules_full.tf.tmpl b/mmv1/templates/terraform/examples/compute_region_network_firewall_policy_with_rules_full.tf.tmpl index d3a7c3d67fd7..011de845e53d 100644 --- a/mmv1/templates/terraform/examples/compute_region_network_firewall_policy_with_rules_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/compute_region_network_firewall_policy_with_rules_full.tf.tmpl @@ -1,9 +1,7 @@ data "google_project" "project" { - provider = google-beta } resource "google_compute_region_network_firewall_policy_with_rules" "{{$.PrimaryResourceId}}" { - provider = google-beta name = "{{index $.Vars "fw_policy"}}" region = "us-west2" description = "Terraform test" @@ -58,49 +56,9 @@ resource "google_compute_region_network_firewall_policy_with_rules" "{{$.Primary } } } - - rule { - description = "network scope rule 1" - rule_name = "network scope 1" - priority = 4000 - enable_logging = false - action = "allow" - direction = "INGRESS" - - match { - src_ip_ranges = ["11.100.0.1/32"] - src_network_scope = "VPC_NETWORKS" - src_networks = [google_compute_network.network.id] - - layer4_config { - ip_protocol = "tcp" - ports = [8080] - } - } - } - - rule { - description = "network scope rule 2" - rule_name = "network scope 2" - priority = 5000 - enable_logging = false - action = "allow" - direction = "EGRESS" - - match { - dest_ip_ranges = ["0.0.0.0/0"] - dest_network_scope = "NON_INTERNET" - - layer4_config { - ip_protocol = "tcp" - ports = [8080] - } - } - } } resource "google_network_security_address_group" "address_group_1" { - provider = google-beta name = "{{index $.Vars "address_group"}}" parent = data.google_project.project.id description = "Regional address group" @@ -111,7 +69,6 @@ resource "google_network_security_address_group" "address_group_1" { } resource "google_tags_tag_key" "secure_tag_key_1" { - provider = google-beta description = "Tag key" parent = data.google_project.project.id purpose = "GCE_FIREWALL" @@ -122,14 +79,8 @@ resource "google_tags_tag_key" "secure_tag_key_1" { } resource "google_tags_tag_value" "secure_tag_value_1" { - provider = google-beta description = "Tag value" parent = google_tags_tag_key.secure_tag_key_1.id short_name = "{{index $.Vars "tag_value"}}" } -resource "google_compute_network" "network" { - provider = google-beta - name = "{{index $.Vars "network"}}" - auto_create_subnetworks = false -} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_with_rules_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_with_rules_test.go.tmpl index b1bbb6466780..cf2634c22a59 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_with_rules_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_with_rules_test.go.tmpl @@ -1,5 +1,4 @@ package compute_test -{{- if ne $.TargetVersionName "ga" }} import ( "testing" @@ -19,7 +18,7 @@ func TestAccComputeFirewallPolicyWithRules_update(t *testing.T) { acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccCheckComputeFirewallPolicyWithRulesDestroyProducer(t), Steps: []resource.TestStep{ { @@ -45,14 +44,12 @@ func TestAccComputeFirewallPolicyWithRules_update(t *testing.T) { func testAccComputeFirewallPolicyWithRules_full(context map[string]interface{}) string { return acctest.Nprintf(` data "google_project" "project" { - provider = google-beta } resource "google_compute_firewall_policy_with_rules" "firewall-policy-with-rules" { short_name = "tf-test-tf-fw-org-policy-with-rules%{random_suffix}" description = "Terraform test" parent = "organizations/%{org_id}" - provider = google-beta rule { description = "tcp rule" @@ -71,7 +68,7 @@ resource "google_compute_firewall_policy_with_rules" "firewall-policy-with-rules dest_threat_intelligences = ["iplist-search-engines-crawlers", "iplist-tor-exit-nodes"] dest_address_groups = [google_network_security_address_group.address_group_1.id] } - target_resources = ["https://www.googleapis.com/compute/beta/projects/${data.google_project.project.project_id}/global/networks/default"] + target_resources = [google_compute_network.network.self_link] } rule { description = "udp rule" @@ -111,7 +108,6 @@ resource "google_compute_firewall_policy_with_rules" "firewall-policy-with-rules } resource "google_network_security_address_group" "address_group_1" { - provider = google-beta name = "tf-test-tf-address-group%{random_suffix}" parent = "organizations/%{org_id}" description = "Global address group" @@ -122,7 +118,6 @@ resource "google_network_security_address_group" "address_group_1" { } resource "google_network_security_security_profile_group" "security_profile_group_1" { - provider = google-beta name = "tf-test-tf-security-profile-group%{random_suffix}" parent = "organizations/%{org_id}" description = "my description" @@ -130,26 +125,29 @@ resource "google_network_security_security_profile_group" "security_profile_grou } resource "google_network_security_security_profile" "security_profile_1" { - provider = google-beta name = "tf-test-tf-security-profile%{random_suffix}" type = "THREAT_PREVENTION" parent = "organizations/%{org_id}" location = "global" } + +resource "google_compute_network" "network" { + name = "tf-network%{random_suffix}" + auto_create_subnetworks = false +} + `, context) } func testAccComputeFirewallPolicyWithRules_update(context map[string]interface{}) string { return acctest.Nprintf(` data "google_project" "project" { - provider = google-beta } resource "google_compute_firewall_policy_with_rules" "firewall-policy-with-rules" { short_name = "tf-test-tf-fw-org-policy-with-rules%{random_suffix}" description = "Terraform test - update" parent = "organizations/%{org_id}" - provider = google-beta rule { description = "tcp rule - update" @@ -190,7 +188,6 @@ resource "google_compute_firewall_policy_with_rules" "firewall-policy-with-rules } resource "google_network_security_address_group" "address_group_1" { - provider = google-beta name = "tf-test-tf-address-group%{random_suffix}" parent = "organizations/%{org_id}" description = "Global address group" @@ -201,7 +198,6 @@ resource "google_network_security_address_group" "address_group_1" { } resource "google_network_security_security_profile_group" "security_profile_group_1" { - provider = google-beta name = "tf-test-tf-security-profile-group%{random_suffix}" parent = "organizations/%{org_id}" description = "my description" @@ -209,7 +205,6 @@ resource "google_network_security_security_profile_group" "security_profile_grou } resource "google_network_security_security_profile" "security_profile_1" { - provider = google-beta name = "tf-test-tf-security-profile%{random_suffix}" type = "THREAT_PREVENTION" parent = "organizations/%{org_id}" @@ -217,5 +212,4 @@ resource "google_network_security_security_profile" "security_profile_1" { } `, context) } -{{- end }} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_network_firewall_policy_with_rules_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_network_firewall_policy_with_rules_test.go.tmpl index 0fddf58aad55..253a89f8d45c 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_network_firewall_policy_with_rules_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_network_firewall_policy_with_rules_test.go.tmpl @@ -1,5 +1,4 @@ package compute_test -{{- if ne $.TargetVersionName "ga" }} import ( "testing" @@ -19,7 +18,7 @@ func TestAccComputeNetworkFirewallPolicyWithRules_update(t *testing.T) { acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccCheckComputeNetworkFirewallPolicyWithRulesDestroyProducer(t), Steps: []resource.TestStep{ { @@ -45,13 +44,11 @@ func TestAccComputeNetworkFirewallPolicyWithRules_update(t *testing.T) { func testAccComputeNetworkFirewallPolicyWithRules_full(context map[string]interface{}) string { return acctest.Nprintf(` data "google_project" "project" { - provider = google-beta } resource "google_compute_network_firewall_policy_with_rules" "network-firewall-policy-with-rules" { name = "tf-test-tf-fw-policy-with-rules%{random_suffix}" description = "Terraform test" - provider = google-beta rule { description = "tcp rule" @@ -115,7 +112,6 @@ resource "google_compute_network_firewall_policy_with_rules" "network-firewall-p } resource "google_network_security_address_group" "address_group_1" { - provider = google-beta name = "tf-test-tf-address-group%{random_suffix}" parent = "projects/${data.google_project.project.name}" description = "Global address group" @@ -126,7 +122,6 @@ resource "google_network_security_address_group" "address_group_1" { } resource "google_tags_tag_key" "secure_tag_key_1" { - provider = google-beta description = "Tag key" parent = "projects/${data.google_project.project.name}" purpose = "GCE_FIREWALL" @@ -137,14 +132,12 @@ resource "google_tags_tag_key" "secure_tag_key_1" { } resource "google_tags_tag_value" "secure_tag_value_1" { - provider = google-beta description = "Tag value" parent = google_tags_tag_key.secure_tag_key_1.id short_name = "tf-test-tf-tag-value%{random_suffix}" } resource "google_network_security_security_profile_group" "security_profile_group_1" { - provider = google-beta name = "tf-test-tf-security-profile-group%{random_suffix}" parent = "organizations/%{org_id}" description = "my description" @@ -152,7 +145,6 @@ resource "google_network_security_security_profile_group" "security_profile_grou } resource "google_network_security_security_profile" "security_profile_1" { - provider = google-beta name = "tf-test-tf-security-profile%{random_suffix}" type = "THREAT_PREVENTION" parent = "organizations/%{org_id}" @@ -164,13 +156,11 @@ resource "google_network_security_security_profile" "security_profile_1" { func testAccComputeNetworkFirewallPolicyWithRules_update(context map[string]interface{}) string { return acctest.Nprintf(` data "google_project" "project" { - provider = google-beta } resource "google_compute_network_firewall_policy_with_rules" "network-firewall-policy-with-rules" { name = "tf-test-tf-fw-policy-with-rules%{random_suffix}" description = "Terraform test - update" - provider = google-beta rule { description = "tcp rule - changed" @@ -213,7 +203,6 @@ resource "google_compute_network_firewall_policy_with_rules" "network-firewall-p } resource "google_network_security_address_group" "address_group_1" { - provider = google-beta name = "tf-test-tf-address-group%{random_suffix}" parent = "projects/${data.google_project.project.name}" description = "Global address group" @@ -224,7 +213,6 @@ resource "google_network_security_address_group" "address_group_1" { } resource "google_tags_tag_key" "secure_tag_key_1" { - provider = google-beta description = "Tag key" parent = "projects/${data.google_project.project.name}" purpose = "GCE_FIREWALL" @@ -235,14 +223,12 @@ resource "google_tags_tag_key" "secure_tag_key_1" { } resource "google_tags_tag_value" "secure_tag_value_1" { - provider = google-beta description = "Tag value" parent = google_tags_tag_key.secure_tag_key_1.id short_name = "tf-test-tf-tag-value%{random_suffix}" } resource "google_network_security_security_profile_group" "security_profile_group_1" { - provider = google-beta name = "tf-test-tf-security-profile-group%{random_suffix}" parent = "organizations/%{org_id}" description = "my description" @@ -250,7 +236,6 @@ resource "google_network_security_security_profile_group" "security_profile_grou } resource "google_network_security_security_profile" "security_profile_1" { - provider = google-beta name = "tf-test-tf-security-profile%{random_suffix}" type = "THREAT_PREVENTION" parent = "organizations/%{org_id}" @@ -258,6 +243,5 @@ resource "google_network_security_security_profile" "security_profile_1" { } `, context) } -{{- end }} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_network_firewall_policy_with_rules_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_network_firewall_policy_with_rules_test.go.tmpl index 9adc5d300f29..5898bbc2c4b0 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_network_firewall_policy_with_rules_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_network_firewall_policy_with_rules_test.go.tmpl @@ -1,5 +1,4 @@ package compute_test -{{- if ne $.TargetVersionName "ga" }} import ( "testing" @@ -17,7 +16,7 @@ func TestAccComputeRegionNetworkFirewallPolicyWithRules_update(t *testing.T) { acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccCheckComputeRegionNetworkFirewallPolicyWithRulesDestroyProducer(t), Steps: []resource.TestStep{ { @@ -45,14 +44,12 @@ func TestAccComputeRegionNetworkFirewallPolicyWithRules_update(t *testing.T) { func testAccComputeRegionNetworkFirewallPolicyWithRules_full(context map[string]interface{}) string { return acctest.Nprintf(` data "google_project" "project" { - provider = google-beta } resource "google_compute_region_network_firewall_policy_with_rules" "region-network-firewall-policy-with-rules" { name = "tf-test-tf-region-fw-policy-with-rules%{random_suffix}" region = "us-west2" description = "Terraform test" - provider = google-beta rule { description = "tcp rule" @@ -100,7 +97,6 @@ resource "google_compute_region_network_firewall_policy_with_rules" "region-netw } resource "google_network_security_address_group" "address_group_1" { - provider = google-beta name = "tf-test-tf-address-group%{random_suffix}" parent = "projects/${data.google_project.project.name}" description = "Regional address group" @@ -111,7 +107,6 @@ resource "google_network_security_address_group" "address_group_1" { } resource "google_tags_tag_key" "secure_tag_key_1" { - provider = google-beta description = "Tag key" parent = "projects/${data.google_project.project.name}" purpose = "GCE_FIREWALL" @@ -122,7 +117,6 @@ resource "google_tags_tag_key" "secure_tag_key_1" { } resource "google_tags_tag_value" "secure_tag_value_1" { - provider = google-beta description = "Tag value" parent = google_tags_tag_key.secure_tag_key_1.id short_name = "tf-test-tf-tag-value%{random_suffix}" @@ -133,14 +127,12 @@ resource "google_tags_tag_value" "secure_tag_value_1" { func testAccComputeRegionNetworkFirewallPolicyWithRules_update(context map[string]interface{}) string { return acctest.Nprintf(` data "google_project" "project" { - provider = google-beta } resource "google_compute_region_network_firewall_policy_with_rules" "region-network-firewall-policy-with-rules" { name = "tf-test-tf-fw-policy-with-rules%{random_suffix}" description = "Terraform test - update" region = "us-west2" - provider = google-beta rule { description = "tcp rule - changed" @@ -180,7 +172,6 @@ resource "google_compute_region_network_firewall_policy_with_rules" "region-netw } resource "google_network_security_address_group" "address_group_1" { - provider = google-beta name = "tf-test-tf-address-group%{random_suffix}" parent = "projects/${data.google_project.project.name}" description = "Regional address group" @@ -191,7 +182,6 @@ resource "google_network_security_address_group" "address_group_1" { } resource "google_tags_tag_key" "secure_tag_key_1" { - provider = google-beta description = "Tag key" parent = "projects/${data.google_project.project.name}" purpose = "GCE_FIREWALL" @@ -202,12 +192,10 @@ resource "google_tags_tag_key" "secure_tag_key_1" { } resource "google_tags_tag_value" "secure_tag_value_1" { - provider = google-beta description = "Tag value" parent = google_tags_tag_key.secure_tag_key_1.id short_name = "tf-test-tf-tag-value%{random_suffix}" } `, context) } -{{- end }} From 84af2f87727b493cfdb9662895cef75d82c05deb Mon Sep 17 00:00:00 2001 From: Guy Bidkar <5646214+gbidkar@users.noreply.github.com> Date: Thu, 22 May 2025 01:35:41 +0300 Subject: [PATCH 220/884] container: add emphasis of service account attribute reference in docs (#14020) --- .../terraform/website/docs/r/container_cluster.html.markdown | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown index 5f7087d27c69..a338c933f32a 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown @@ -606,7 +606,7 @@ as "Intel Haswell" or "Intel Sandy Bridge". -> `monitoring.write` is always enabled regardless of user input. `monitoring` and `logging.write` may also be enabled depending on the values for `monitoring_service` and `logging_service`. -* `service_account` - (Optional) The Google Cloud Platform Service Account to be used by the node VMs created by GKE Autopilot or NAP. +* `service_account` - (Optional) The `email` of the Google Cloud Platform Service Account to be used by the node VMs created by GKE Autopilot or NAP. * `boot_disk_kms_key` - (Optional) The Customer Managed Encryption Key used to encrypt the boot disk attached to each node in the node pool. This should be of the form projects/[KEY_PROJECT_ID]/locations/[LOCATION]/keyRings/[RING_NAME]/cryptoKeys/[KEY_NAME]. For more information about protecting resources with Cloud KMS Keys please see: https://cloud.google.com/compute/docs/disks/customer-managed-encryption From 9aa6aacf53f4d5581a315308a9dc4f31668b8633 Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Thu, 22 May 2025 00:49:38 +0200 Subject: [PATCH 221/884] feat: `google_iam_workforce_pool_provider_key` resource creation (#13842) --- .../WorkforcePoolProviderKey.yaml | 158 ++++++++++++++++++ .../iam_workforce_pool_provider_key.go.tmpl | 18 ++ ...force_pool_provider_saml_key_basic.tf.tmpl | 29 ++++ ...ce_iam_workforce_pool_provider_key_test.go | 136 +++++++++++++++ ...ool_workforce_pool_provider_key_id_test.go | 33 ++++ 5 files changed, 374 insertions(+) create mode 100644 mmv1/products/iamworkforcepool/WorkforcePoolProviderKey.yaml create mode 100644 mmv1/templates/terraform/constants/iam_workforce_pool_provider_key.go.tmpl create mode 100644 mmv1/templates/terraform/examples/iam_workforce_pool_provider_saml_key_basic.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_provider_key_test.go create mode 100644 mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_workforce_pool_provider_key_id_test.go diff --git a/mmv1/products/iamworkforcepool/WorkforcePoolProviderKey.yaml b/mmv1/products/iamworkforcepool/WorkforcePoolProviderKey.yaml new file mode 100644 index 000000000000..1082c3fca870 --- /dev/null +++ b/mmv1/products/iamworkforcepool/WorkforcePoolProviderKey.yaml @@ -0,0 +1,158 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'WorkforcePoolProviderKey' +description: | + Represents a public key configuration for a Workforce Pool Provider. The key can be configured in your identity provider to encrypt SAML assertions. + Google holds the corresponding private key, which it uses to decrypt encrypted tokens. +references: + guides: + 'Workforce Identity Federation Overview': 'https://cloud.google.com/iam/docs/workforce-identity-federation' + 'Configure a provider within the workforce pool': 'https://cloud.google.com/iam/docs/manage-workforce-identity-pools-providers#configure_a_provider_within_the_workforce_pool' + api: 'https://cloud.google.com/iam/docs/reference/rest/v1/locations.workforcePools.providers.keys' +base_url: 'locations/{{location}}/workforcePools/{{workforce_pool_id}}/providers/{{provider_id}}/keys' +self_link: 'locations/{{location}}/workforcePools/{{workforce_pool_id}}/providers/{{provider_id}}/keys/{{key_id}}' +create_url: 'locations/{{location}}/workforcePools/{{workforce_pool_id}}/providers/{{provider_id}}/keys?workforcePoolProviderKeyId={{key_id}}' +immutable: true +import_format: + - 'locations/{{location}}/workforcePools/{{workforce_pool_id}}/providers/{{provider_id}}/keys/{{key_id}}' +timeouts: + insert_minutes: 20 + delete_minutes: 20 +autogen_async: true +async: + actions: ['create', 'delete'] + type: 'OpAsync' + operation: + base_url: '{{op_id}}' + result: + resource_inside_response: false +custom_code: + constants: 'templates/terraform/constants/iam_workforce_pool_provider_key.go.tmpl' +examples: + - name: 'iam_workforce_pool_provider_saml_key_basic' + primary_resource_id: "example" + vars: + workforce_pool_id: 'example-pool' + provider_id: 'example-prvdr' + key_id: 'example-key' + test_env_vars: + org_id: 'ORG_ID' +parameters: + - name: 'location' + type: String + description: | + The location for the resource. + url_param_only: true + required: true + immutable: true + - name: 'workforcePoolId' + type: String + description: | + The ID of the workforce pool. + url_param_only: true + required: true + immutable: true + - name: 'providerId' + type: String + description: | + The ID of the provider. + url_param_only: true + required: true + immutable: true + - name: 'keyId' + type: String + description: | + The ID to use for the key, which becomes the final component of the resource name. This value must be 4-32 characters, and may contain the characters [a-z0-9-]. + url_param_only: true + required: true + immutable: true + validation: + function: 'ValidateWorkforcePoolProviderKeyId' +properties: + - name: 'name' + type: String + description: | + Identifier. The resource name of the key. + Format: `locations/{location}/workforcePools/{workforcePoolId}/providers/{providerId}/keys/{keyId}` + output: true + - name: 'keyData' + type: 'NestedObject' + description: | + Immutable. Public half of the asymmetric key. + required: true + properties: + - name: 'format' + type: Enum + description: | + The format of the key. + output: true + enum_values: + - 'RSA_X509_PEM' + - name: 'notBeforeTime' + type: String + description: | + Earliest timestamp when this key is valid. Attempts to use this key before this time will fail. + Only present if the key data represents a X.509 certificate. + + Uses RFC 3339, where generated output will always be Z-normalized and uses 0, 3, 6 or 9 fractional digits. + Offsets other than "Z" are also accepted. + Examples: "2014-10-02T15:01:23Z", "2014-10-02T15:01:23.045123456Z" or "2014-10-02T15:01:23+05:30". + output: true + - name: 'notAfterTime' + type: String + description: | + Latest timestamp when this key is valid. Attempts to use this key after this time will fail. + Only present if the key data represents a X.509 certificate. + + Uses RFC 3339, where generated output will always be Z-normalized and uses 0, 3, 6 or 9 fractional digits. + Offsets other than "Z" are also accepted. + Examples: "2014-10-02T15:01:23Z", "2014-10-02T15:01:23.045123456Z" or "2014-10-02T15:01:23+05:30". + output: true + - name: 'key' + type: String + description: | + The key data. The format of the key is represented by the format field. + output: true + - name: 'keySpec' + type: Enum + description: | + The specifications for the key. + required: true + enum_values: + - 'RSA_2048' + - 'RSA_3072' + - 'RSA_4096' + - name: 'state' + type: Enum + description: | + The state of the key. + output: true + enum_values: + - 'STATE_UNSPECIFIED' + - 'ACTIVE' + - 'DELETED' + - name: 'use' + type: Enum + description: | + The purpose of the key. + required: true + enum_values: + - 'ENCRYPTION' + - name: 'expireTime' + type: String + description: | + The time after which the key will be permanently deleted and cannot be recovered. + Note that the key may get purged before this time if the total limit of keys per provider is exceeded. + output: true diff --git a/mmv1/templates/terraform/constants/iam_workforce_pool_provider_key.go.tmpl b/mmv1/templates/terraform/constants/iam_workforce_pool_provider_key.go.tmpl new file mode 100644 index 000000000000..82d8e6be96f6 --- /dev/null +++ b/mmv1/templates/terraform/constants/iam_workforce_pool_provider_key.go.tmpl @@ -0,0 +1,18 @@ +const workforcePoolProviderKeyIdRegexp = `^[a-z0-9-]{4,32}$` + +func ValidateWorkforcePoolProviderKeyId(v interface{}, k string) (ws []string, errors []error) { + value := v.(string) + + if strings.HasPrefix(value, "gcp-") { + errors = append(errors, fmt.Errorf( + "%q (%q) can not start with \"gcp-\". " + + "The prefix `gcp-` is reserved for use by Google, and may not be specified.", k, value)) + } + + if !regexp.MustCompile(workforcePoolProviderKeyIdRegexp).MatchString(value) { + errors = append(errors, fmt.Errorf( + "%q (%q) must be 4-32 characters, and may contain the characters [a-z0-9-].", k, value)) + } + + return +} diff --git a/mmv1/templates/terraform/examples/iam_workforce_pool_provider_saml_key_basic.tf.tmpl b/mmv1/templates/terraform/examples/iam_workforce_pool_provider_saml_key_basic.tf.tmpl new file mode 100644 index 000000000000..fbd6d892fbfe --- /dev/null +++ b/mmv1/templates/terraform/examples/iam_workforce_pool_provider_saml_key_basic.tf.tmpl @@ -0,0 +1,29 @@ +resource "google_iam_workforce_pool" "pool" { + workforce_pool_id = "{{index $.Vars "workforce_pool_id"}}" + parent = "organizations/{{index $.TestEnvVars "org_id"}}" + location = "global" +} + +resource "google_iam_workforce_pool_provider" "provider" { + workforce_pool_id = google_iam_workforce_pool.pool.workforce_pool_id + location = google_iam_workforce_pool.pool.location + provider_id = "{{index $.Vars "provider_id"}}" + attribute_mapping = { + "google.subject" = "assertion.sub" + } + saml { + idp_metadata_xml = " MIIDpDCCAoygAwIBAgIGAX7/5qPhMA0GCSqGSIb3DQEBCwUAMIGSMQswCQYDVQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEUMBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi00NTg0MjExHDAaBgkqhkiG9w0BCQEWDWluZm9Ab2t0YS5jb20wHhcNMjIwMjE2MDAxOTEyWhcNMzIwMjE2MDAyMDEyWjCBkjELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNVBAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtNDU4NDIxMRwwGgYJKoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxrBl7GKz52cRpxF9xCsirnRuMxnhFBaUrsHqAQrLqWmdlpNYZTVg+T9iQ+aq/iE68L+BRZcZniKIvW58wqqS0ltXVvIkXuDSvnvnkkI5yMIVErR20K8jSOKQm1FmK+fgAJ4koshFiu9oLiqu0Ejc0DuL3/XRsb4RuxjktKTb1khgBBtb+7idEk0sFR0RPefAweXImJkDHDm7SxjDwGJUubbqpdTxasPr0W+AHI1VUzsUsTiHAoyb0XDkYqHfDzhj/ZdIEl4zHQ3bEZvlD984ztAnmX2SuFLLKfXeAAGHei8MMixJvwxYkkPeYZ/5h8WgBZPP4heS2CPjwYExt29L8QIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQARjJFz++a9Z5IQGFzsZMrX2EDR5ML4xxUiQkbhld1S1PljOLcYFARDmUC2YYHOueU4ee8Jid9nPGEUebV/4Jok+b+oQh+dWMgiWjSLI7h5q4OYZ3VJtdlVwgMFt2iz+/4yBKMUZ50g3Qgg36vE34us+eKitg759JgCNsibxn0qtJgSPm0sgP2L6yTaLnoEUbXBRxCwynTSkp9ZijZqEzbhN0e2dWv7Rx/nfpohpDP6vEiFImKFHpDSv3M/5de1ytQzPFrZBYt9WlzlYwE1aD9FHCxdd+rWgYMVVoRaRmndpV/Rq3QUuDuFJtaoX11bC7ExkOpg9KstZzA63i3VcfYv" + } +} + +resource "google_iam_workforce_pool_provider_key" "{{$.PrimaryResourceId}}" { + workforce_pool_id = google_iam_workforce_pool.pool.workforce_pool_id + location = google_iam_workforce_pool.pool.location + provider_id = google_iam_workforce_pool_provider.provider.provider_id + key_id = "{{index $.Vars "key_id"}}" + + key_data { + key_spec = "RSA_2048" + } + use = "ENCRYPTION" +} diff --git a/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_provider_key_test.go b/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_provider_key_test.go new file mode 100644 index 000000000000..a9d23a2d1c1f --- /dev/null +++ b/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_provider_key_test.go @@ -0,0 +1,136 @@ +package iamworkforcepool_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccIAMWorkforcePoolWorkforcePoolProviderKey_update(t *testing.T) { + t.Parallel() + + random_suffix := acctest.RandString(t, 10) + context := map[string]interface{}{ + "org_id": envvar.GetTestOrgFromEnv(t), + "random_suffix": random_suffix, + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckIAMWorkforcePoolWorkforcePoolProviderKeyDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccIAMWorkforcePoolWorkforcePoolProviderKey_basic(context), + }, + { + ResourceName: "google_iam_workforce_pool_provider_key.default", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccIAMWorkforcePoolWorkforcePoolProviderKey_update(context), + }, + { + ResourceName: "google_iam_workforce_pool_provider_key.default", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccIAMWorkforcePoolWorkforcePoolProviderKey_destroy(context), + }, + }, + }) +} + +func testAccIAMWorkforcePoolWorkforcePoolProviderKey_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_workforce_pool" "default" { + workforce_pool_id = "my-pool-%{random_suffix}" + parent = "organizations/%{org_id}" + location = "global" +} + +resource "google_iam_workforce_pool_provider" "default" { + workforce_pool_id = google_iam_workforce_pool.default.workforce_pool_id + location = google_iam_workforce_pool.default.location + provider_id = "my-provider-%{random_suffix}" + attribute_mapping = { + "google.subject" = "assertion.sub" + } + saml { + idp_metadata_xml = " MIIDpDCCAoygAwIBAgIGAX7/5qPhMA0GCSqGSIb3DQEBCwUAMIGSMQswCQYDVQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEUMBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi00NTg0MjExHDAaBgkqhkiG9w0BCQEWDWluZm9Ab2t0YS5jb20wHhcNMjIwMjE2MDAxOTEyWhcNMzIwMjE2MDAyMDEyWjCBkjELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNVBAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtNDU4NDIxMRwwGgYJKoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxrBl7GKz52cRpxF9xCsirnRuMxnhFBaUrsHqAQrLqWmdlpNYZTVg+T9iQ+aq/iE68L+BRZcZniKIvW58wqqS0ltXVvIkXuDSvnvnkkI5yMIVErR20K8jSOKQm1FmK+fgAJ4koshFiu9oLiqu0Ejc0DuL3/XRsb4RuxjktKTb1khgBBtb+7idEk0sFR0RPefAweXImJkDHDm7SxjDwGJUubbqpdTxasPr0W+AHI1VUzsUsTiHAoyb0XDkYqHfDzhj/ZdIEl4zHQ3bEZvlD984ztAnmX2SuFLLKfXeAAGHei8MMixJvwxYkkPeYZ/5h8WgBZPP4heS2CPjwYExt29L8QIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQARjJFz++a9Z5IQGFzsZMrX2EDR5ML4xxUiQkbhld1S1PljOLcYFARDmUC2YYHOueU4ee8Jid9nPGEUebV/4Jok+b+oQh+dWMgiWjSLI7h5q4OYZ3VJtdlVwgMFt2iz+/4yBKMUZ50g3Qgg36vE34us+eKitg759JgCNsibxn0qtJgSPm0sgP2L6yTaLnoEUbXBRxCwynTSkp9ZijZqEzbhN0e2dWv7Rx/nfpohpDP6vEiFImKFHpDSv3M/5de1ytQzPFrZBYt9WlzlYwE1aD9FHCxdd+rWgYMVVoRaRmndpV/Rq3QUuDuFJtaoX11bC7ExkOpg9KstZzA63i3VcfYv" + } +} + +resource "google_iam_workforce_pool_provider_key" "default" { + workforce_pool_id = google_iam_workforce_pool.default.workforce_pool_id + location = google_iam_workforce_pool.default.location + provider_id = google_iam_workforce_pool_provider.default.provider_id + key_id = "my-key-%{random_suffix}" + + key_data { + key_spec = "RSA_2048" + } + use = "ENCRYPTION" +} +`, context) +} + +func testAccIAMWorkforcePoolWorkforcePoolProviderKey_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_workforce_pool" "default" { + workforce_pool_id = "my-pool-%{random_suffix}" + parent = "organizations/%{org_id}" + location = "global" +} + +resource "google_iam_workforce_pool_provider" "default" { + workforce_pool_id = google_iam_workforce_pool.default.workforce_pool_id + location = google_iam_workforce_pool.default.location + provider_id = "my-provider-%{random_suffix}" + attribute_mapping = { + "google.subject" = "assertion.sub" + } + saml { + idp_metadata_xml = " MIIDpDCCAoygAwIBAgIGAX7/5qPhMA0GCSqGSIb3DQEBCwUAMIGSMQswCQYDVQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEUMBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi00NTg0MjExHDAaBgkqhkiG9w0BCQEWDWluZm9Ab2t0YS5jb20wHhcNMjIwMjE2MDAxOTEyWhcNMzIwMjE2MDAyMDEyWjCBkjELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNVBAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtNDU4NDIxMRwwGgYJKoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxrBl7GKz52cRpxF9xCsirnRuMxnhFBaUrsHqAQrLqWmdlpNYZTVg+T9iQ+aq/iE68L+BRZcZniKIvW58wqqS0ltXVvIkXuDSvnvnkkI5yMIVErR20K8jSOKQm1FmK+fgAJ4koshFiu9oLiqu0Ejc0DuL3/XRsb4RuxjktKTb1khgBBtb+7idEk0sFR0RPefAweXImJkDHDm7SxjDwGJUubbqpdTxasPr0W+AHI1VUzsUsTiHAoyb0XDkYqHfDzhj/ZdIEl4zHQ3bEZvlD984ztAnmX2SuFLLKfXeAAGHei8MMixJvwxYkkPeYZ/5h8WgBZPP4heS2CPjwYExt29L8QIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQARjJFz++a9Z5IQGFzsZMrX2EDR5ML4xxUiQkbhld1S1PljOLcYFARDmUC2YYHOueU4ee8Jid9nPGEUebV/4Jok+b+oQh+dWMgiWjSLI7h5q4OYZ3VJtdlVwgMFt2iz+/4yBKMUZ50g3Qgg36vE34us+eKitg759JgCNsibxn0qtJgSPm0sgP2L6yTaLnoEUbXBRxCwynTSkp9ZijZqEzbhN0e2dWv7Rx/nfpohpDP6vEiFImKFHpDSv3M/5de1ytQzPFrZBYt9WlzlYwE1aD9FHCxdd+rWgYMVVoRaRmndpV/Rq3QUuDuFJtaoX11bC7ExkOpg9KstZzA63i3VcfYv" + } +} + +resource "google_iam_workforce_pool_provider_key" "default" { + workforce_pool_id = google_iam_workforce_pool.default.workforce_pool_id + location = google_iam_workforce_pool.default.location + provider_id = google_iam_workforce_pool_provider.default.provider_id + key_id = "my-other-key-%{random_suffix}" + + key_data { + key_spec = "RSA_3072" + } + use = "ENCRYPTION" +} +`, context) +} + +func testAccIAMWorkforcePoolWorkforcePoolProviderKey_destroy(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_workforce_pool" "default" { + workforce_pool_id = "my-pool-%{random_suffix}" + parent = "organizations/%{org_id}" + location = "global" +} + +resource "google_iam_workforce_pool_provider" "default" { + workforce_pool_id = google_iam_workforce_pool.default.workforce_pool_id + location = google_iam_workforce_pool.default.location + provider_id = "my-provider-%{random_suffix}" + attribute_mapping = { + "google.subject" = "assertion.sub" + } + saml { + idp_metadata_xml = " MIIDpDCCAoygAwIBAgIGAX7/5qPhMA0GCSqGSIb3DQEBCwUAMIGSMQswCQYDVQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEUMBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi00NTg0MjExHDAaBgkqhkiG9w0BCQEWDWluZm9Ab2t0YS5jb20wHhcNMjIwMjE2MDAxOTEyWhcNMzIwMjE2MDAyMDEyWjCBkjELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNVBAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtNDU4NDIxMRwwGgYJKoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxrBl7GKz52cRpxF9xCsirnRuMxnhFBaUrsHqAQrLqWmdlpNYZTVg+T9iQ+aq/iE68L+BRZcZniKIvW58wqqS0ltXVvIkXuDSvnvnkkI5yMIVErR20K8jSOKQm1FmK+fgAJ4koshFiu9oLiqu0Ejc0DuL3/XRsb4RuxjktKTb1khgBBtb+7idEk0sFR0RPefAweXImJkDHDm7SxjDwGJUubbqpdTxasPr0W+AHI1VUzsUsTiHAoyb0XDkYqHfDzhj/ZdIEl4zHQ3bEZvlD984ztAnmX2SuFLLKfXeAAGHei8MMixJvwxYkkPeYZ/5h8WgBZPP4heS2CPjwYExt29L8QIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQARjJFz++a9Z5IQGFzsZMrX2EDR5ML4xxUiQkbhld1S1PljOLcYFARDmUC2YYHOueU4ee8Jid9nPGEUebV/4Jok+b+oQh+dWMgiWjSLI7h5q4OYZ3VJtdlVwgMFt2iz+/4yBKMUZ50g3Qgg36vE34us+eKitg759JgCNsibxn0qtJgSPm0sgP2L6yTaLnoEUbXBRxCwynTSkp9ZijZqEzbhN0e2dWv7Rx/nfpohpDP6vEiFImKFHpDSv3M/5de1ytQzPFrZBYt9WlzlYwE1aD9FHCxdd+rWgYMVVoRaRmndpV/Rq3QUuDuFJtaoX11bC7ExkOpg9KstZzA63i3VcfYv" + } +} +`, context) +} diff --git a/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_workforce_pool_provider_key_id_test.go b/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_workforce_pool_provider_key_id_test.go new file mode 100644 index 000000000000..7f6d0673b7a3 --- /dev/null +++ b/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_workforce_pool_provider_key_id_test.go @@ -0,0 +1,33 @@ +package iamworkforcepool_test + +import ( + "strings" + "testing" + + "github.com/hashicorp/terraform-provider-google/google/services/iamworkforcepool" + "github.com/hashicorp/terraform-provider-google/google/verify" +) + +func TestValidateIAMWorkforcePoolWorkforcePoolProviderKeyId(t *testing.T) { + x := []verify.StringValidationTestCase{ + // No errors + {TestName: "with numbers", Value: "foobar123"}, + {TestName: "short", Value: "foo-"}, + {TestName: "long", Value: strings.Repeat("f", 32)}, + {TestName: "has a hyphen", Value: "foo-bar"}, + + // With errors + {TestName: "empty", Value: "", ExpectError: true}, + {TestName: "starts with a gcp-", Value: "gcp-foobar", ExpectError: true}, + {TestName: "with uppercase", Value: "fooBar", ExpectError: true}, + {TestName: "has an slash", Value: "foo/bar", ExpectError: true}, + {TestName: "has an backslash", Value: "foo\bar", ExpectError: true}, + {TestName: "too short", Value: "foo", ExpectError: true}, + {TestName: "too long", Value: strings.Repeat("f", 33), ExpectError: true}, + } + + es := verify.TestStringValidationCases(x, iamworkforcepool.ValidateWorkforcePoolProviderKeyId) + if len(es) > 0 { + t.Errorf("Failed to validate WorkforcePoolProviderKey names: %v", es) + } +} From de77e2d76ba8c0a027e698c7dd2cb2fb2926f961 Mon Sep 17 00:00:00 2001 From: Dawid212 Date: Thu, 22 May 2025 01:39:03 +0200 Subject: [PATCH 222/884] Add Mirror Percent to request_mirror_policy in URLMAP (#13974) --- mmv1/products/compute/UrlMap.yaml | 64 +++++++++++++++++++ .../url_map_default_mirror_percent.tf.tmpl | 54 ++++++++++++++++ ...ath_matcher_default_mirror_percent.tf.tmpl | 54 ++++++++++++++++ .../url_map_path_rule_mirror_percent.tf.tmpl | 54 ++++++++++++++++ .../url_map_route_rule_mirror_percent.tf.tmpl | 58 +++++++++++++++++ 5 files changed, 284 insertions(+) create mode 100644 mmv1/templates/terraform/examples/url_map_default_mirror_percent.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/url_map_path_matcher_default_mirror_percent.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/url_map_path_rule_mirror_percent.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/url_map_route_rule_mirror_percent.tf.tmpl diff --git a/mmv1/products/compute/UrlMap.yaml b/mmv1/products/compute/UrlMap.yaml index f137b5431645..164878701009 100644 --- a/mmv1/products/compute/UrlMap.yaml +++ b/mmv1/products/compute/UrlMap.yaml @@ -86,6 +86,38 @@ examples: service_a_backend_service_name: 'service-a' service_b_backend_service_name: 'service-b' health_check_name: 'health-check' + - name: 'url_map_default_mirror_percent' + primary_resource_id: 'urlmap' + min_version: 'beta' + vars: + url_map_name: 'urlmap' + home_backend_service_name: 'home' + mirror_backend_service_name: 'mirror' + health_check_name: 'health-check' + - name: 'url_map_path_matcher_default_mirror_percent' + primary_resource_id: 'urlmap' + min_version: 'beta' + vars: + url_map_name: 'urlmap' + home_backend_service_name: 'home' + mirror_backend_service_name: 'mirror' + health_check_name: 'health-check' + - name: 'url_map_path_rule_mirror_percent' + primary_resource_id: 'urlmap' + min_version: 'beta' + vars: + url_map_name: 'urlmap' + home_backend_service_name: 'home' + mirror_backend_service_name: 'mirror' + health_check_name: 'health-check' + - name: 'url_map_route_rule_mirror_percent' + primary_resource_id: 'urlmap' + min_version: 'beta' + vars: + url_map_name: 'urlmap' + home_backend_service_name: 'home' + mirror_backend_service_name: 'mirror' + health_check_name: 'health-check' - name: 'external_http_lb_mig_backend' primary_resource_id: 'default' vars: @@ -681,6 +713,14 @@ properties: custom_expand: 'templates/terraform/custom_expand/reference_to_backend.tmpl' resource: 'BackendService' imports: 'selfLink' + - name: 'mirrorPercent' + min_version: beta + type: Double + description: | + The percentage of requests to be mirrored to backendService. + The value must be between 0.0 and 100.0 inclusive. + validation: + function: 'validation.FloatBetween(0, 100)' - name: 'retryPolicy' type: NestedObject description: | @@ -1433,6 +1473,14 @@ properties: custom_expand: 'templates/terraform/custom_expand/reference_to_backend.tmpl' resource: 'BackendService' imports: 'selfLink' + - name: 'mirrorPercent' + min_version: beta + type: Double + description: | + The percentage of requests to be mirrored to backendService. + The value must be between 0.0 and 100.0 inclusive. + validation: + function: 'validation.FloatBetween(0, 100)' - name: 'retryPolicy' type: NestedObject description: | @@ -2101,6 +2149,14 @@ properties: custom_expand: 'templates/terraform/custom_expand/reference_to_backend.tmpl' resource: 'BackendService' imports: 'selfLink' + - name: 'mirrorPercent' + min_version: beta + type: Double + description: | + The percentage of requests to be mirrored to backendService. + The value must be between 0.0 and 100.0 inclusive. + validation: + function: 'validation.FloatBetween(0, 100)' - name: 'corsPolicy' type: NestedObject description: | @@ -2692,6 +2748,14 @@ properties: custom_expand: 'templates/terraform/custom_expand/reference_to_backend.tmpl' resource: 'BackendService' imports: 'selfLink' + - name: 'mirrorPercent' + min_version: beta + type: Double + description: | + The percentage of requests to be mirrored to backendService. + The value must be between 0.0 and 100.0 inclusive. + validation: + function: 'validation.FloatBetween(0, 100)' - name: 'corsPolicy' type: NestedObject description: | diff --git a/mmv1/templates/terraform/examples/url_map_default_mirror_percent.tf.tmpl b/mmv1/templates/terraform/examples/url_map_default_mirror_percent.tf.tmpl new file mode 100644 index 000000000000..5c084738bc05 --- /dev/null +++ b/mmv1/templates/terraform/examples/url_map_default_mirror_percent.tf.tmpl @@ -0,0 +1,54 @@ +resource "google_compute_url_map" "{{$.PrimaryResourceId}}" { + provider = google-beta + name = "{{index $.Vars "url_map_name"}}" + description = "Test for default route action mirror percent" + + default_service = google_compute_backend_service.home.id + + default_route_action { + request_mirror_policy { + backend_service = google_compute_backend_service.mirror.id + mirror_percent = 50.0 + } + } + + host_rule { + hosts = ["mysite.com"] + path_matcher = "allpaths" + } + + path_matcher { + name = "allpaths" + default_service = google_compute_backend_service.home.id + } +} + +resource "google_compute_backend_service" "home" { + provider = google-beta + name = "{{index $.Vars "home_backend_service_name"}}" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + load_balancing_scheme = "EXTERNAL_MANAGED" + + health_checks = [google_compute_health_check.default.id] +} + +resource "google_compute_backend_service" "mirror" { + provider = google-beta + name = "{{index $.Vars "mirror_backend_service_name"}}" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + load_balancing_scheme = "EXTERNAL_MANAGED" + + health_checks = [google_compute_health_check.default.id] +} + +resource "google_compute_health_check" "default" { + provider = google-beta + name = "{{index $.Vars "health_check_name"}}" + http_health_check { + port = 80 + } +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/url_map_path_matcher_default_mirror_percent.tf.tmpl b/mmv1/templates/terraform/examples/url_map_path_matcher_default_mirror_percent.tf.tmpl new file mode 100644 index 000000000000..77f840f3bf36 --- /dev/null +++ b/mmv1/templates/terraform/examples/url_map_path_matcher_default_mirror_percent.tf.tmpl @@ -0,0 +1,54 @@ +resource "google_compute_url_map" "{{$.PrimaryResourceId}}" { + provider = google-beta + name = "{{index $.Vars "url_map_name"}}" + description = "Test for default route action mirror percent" + + default_service = google_compute_backend_service.home.id + + default_route_action { + request_mirror_policy { + backend_service = google_compute_backend_service.mirror.id + mirror_percent = 50.0 + } + } + + host_rule { + hosts = ["mysite.com"] + path_matcher = "allpaths" + } + + path_matcher { + name = "allpaths" + default_service = google_compute_backend_service.home.id + } +} + +resource "google_compute_backend_service" "home" { + provider = google-beta + name = "{{index $.Vars "home_backend_service_name"}}" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + load_balancing_scheme = "EXTERNAL_MANAGED" + + health_checks = [google_compute_health_check.default.id] +} + +resource "google_compute_backend_service" "mirror" { + provider = google-beta + name = "{{index $.Vars "mirror_backend_service_name"}}" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + load_balancing_scheme = "EXTERNAL_MANAGED" + + health_checks = [google_compute_health_check.default.id] +} + +resource "google_compute_health_check" "default" { + provider = google-beta + name = "{{index $.Vars "health_check_name"}}" + http_health_check { + port = 80 + } +} diff --git a/mmv1/templates/terraform/examples/url_map_path_rule_mirror_percent.tf.tmpl b/mmv1/templates/terraform/examples/url_map_path_rule_mirror_percent.tf.tmpl new file mode 100644 index 000000000000..dec8a912ce8e --- /dev/null +++ b/mmv1/templates/terraform/examples/url_map_path_rule_mirror_percent.tf.tmpl @@ -0,0 +1,54 @@ +resource "google_compute_url_map" "{{$.PrimaryResourceId}}" { + provider = google-beta + name = "{{index $.Vars "url_map_name"}}" + description = "Test for path matcher default route action mirror percent" + + default_service = google_compute_backend_service.home.id + + host_rule { + hosts = ["mysite.com"] + path_matcher = "allpaths" + } + + path_matcher { + name = "allpaths" + default_service = google_compute_backend_service.home.id + + default_route_action { + request_mirror_policy { + backend_service = google_compute_backend_service.mirror.id + mirror_percent = 75.0 + } + } + } +} + +resource "google_compute_backend_service" "home" { + provider = google-beta + name = "{{index $.Vars "home_backend_service_name"}}" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + load_balancing_scheme = "EXTERNAL_MANAGED" + + health_checks = [google_compute_health_check.default.id] +} + +resource "google_compute_backend_service" "mirror" { + provider = google-beta + name = "{{index $.Vars "mirror_backend_service_name"}}" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + load_balancing_scheme = "EXTERNAL_MANAGED" + + health_checks = [google_compute_health_check.default.id] +} + +resource "google_compute_health_check" "default" { + provider = google-beta + name = "{{index $.Vars "health_check_name"}}" + http_health_check { + port = 80 + } +} diff --git a/mmv1/templates/terraform/examples/url_map_route_rule_mirror_percent.tf.tmpl b/mmv1/templates/terraform/examples/url_map_route_rule_mirror_percent.tf.tmpl new file mode 100644 index 000000000000..c5a783fc17ec --- /dev/null +++ b/mmv1/templates/terraform/examples/url_map_route_rule_mirror_percent.tf.tmpl @@ -0,0 +1,58 @@ +resource "google_compute_url_map" "{{$.PrimaryResourceId}}" { + provider = google-beta + name = "{{index $.Vars "url_map_name"}}" + description = "Test for path rule route action mirror percent" + + default_service = google_compute_backend_service.home.id + + host_rule { + hosts = ["mysite.com"] + path_matcher = "allpaths" + } + + path_matcher { + name = "allpaths" + default_service = google_compute_backend_service.home.id + + path_rule { + paths = ["/home"] + service = google_compute_backend_service.home.id + route_action { + request_mirror_policy { + backend_service = google_compute_backend_service.mirror.id + mirror_percent = 25.0 + } + } + } + } +} + +resource "google_compute_backend_service" "home" { + provider = google-beta + name = "{{index $.Vars "home_backend_service_name"}}" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + load_balancing_scheme = "EXTERNAL_MANAGED" + + health_checks = [google_compute_health_check.default.id] +} + +resource "google_compute_backend_service" "mirror" { + provider = google-beta + name = "{{index $.Vars "mirror_backend_service_name"}}" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + load_balancing_scheme = "EXTERNAL_MANAGED" + + health_checks = [google_compute_health_check.default.id] +} + +resource "google_compute_health_check" "default" { + provider = google-beta + name = "{{index $.Vars "health_check_name"}}" + http_health_check { + port = 80 + } +} \ No newline at end of file From 8eaf97e02dce235c657c9ca0bdb21d7abbdcd05b Mon Sep 17 00:00:00 2001 From: Scott Suarez Date: Wed, 21 May 2025 17:00:36 -0700 Subject: [PATCH 223/884] source service API and service package for all 99% of TGC resources (#14045) --- .../acctest/resource_inventory_reader.go | 212 ++++++++++++++++++ .../acctest/resource_inventory_test.go | 113 ++++++++++ .../terraform/acctest/tgc_utils.go | 38 +--- 3 files changed, 331 insertions(+), 32 deletions(-) create mode 100644 mmv1/third_party/terraform/acctest/resource_inventory_reader.go create mode 100644 mmv1/third_party/terraform/acctest/resource_inventory_test.go diff --git a/mmv1/third_party/terraform/acctest/resource_inventory_reader.go b/mmv1/third_party/terraform/acctest/resource_inventory_reader.go new file mode 100644 index 000000000000..afec311b6ab7 --- /dev/null +++ b/mmv1/third_party/terraform/acctest/resource_inventory_reader.go @@ -0,0 +1,212 @@ +package acctest + +import ( + "fmt" + "os" + "path/filepath" + "strings" + "sync" + + "gopkg.in/yaml.v2" +) + +// ResourceYAMLMetadata represents the structure of the metadata files +type ResourceYAMLMetadata struct { + Resource string `yaml:"resource"` + ApiServiceName string `yaml:"api_service_name"` + SourceFile string `yaml:"source_file"` +} + +// Cache structures to avoid repeated file system operations +var ( + // Cache for API service names (resourceName -> apiServiceName) + apiServiceNameCache = make(map[string]string) + // Cache for service packages (resourceType -> servicePackage) + servicePackageCache = make(map[string]string) + // Flag to track if cache has been populated + cachePopulated = false + // Mutex to protect cache access + cacheMutex sync.RWMutex +) + +// PopulateMetadataCache walks through all metadata files once and populates +// both the API service name and service package caches for improved performance +func PopulateMetadataCache() error { + cacheMutex.Lock() + defer cacheMutex.Unlock() + + // If cache is already populated, we can skip + if cachePopulated { + return nil + } + + baseDir, err := getServicesDir() + if err != nil { + return fmt.Errorf("failed to find services directory: %v", err) + } + + // Count for statistics + apiNameCount := 0 + servicePkgCount := 0 + + // Walk through all service directories once + err = filepath.Walk(baseDir, func(path string, info os.FileInfo, err error) error { + if err != nil { + return nil // Skip files with errors but continue walking + } + + // Look for metadata files + if !info.IsDir() && strings.HasPrefix(info.Name(), "resource_") && strings.HasSuffix(info.Name(), "_meta.yaml") { + // Read the file + content, err := os.ReadFile(path) + if err != nil { + return nil // Continue to next file + } + + // Parse YAML + var metadata ResourceYAMLMetadata + if err := yaml.Unmarshal(content, &metadata); err != nil { + return nil // Continue to next file + } + + // Skip if resource is empty + if metadata.Resource == "" { + return nil + } + + // Store API service name in cache + if metadata.ApiServiceName != "" { + apiServiceNameCache[metadata.Resource] = metadata.ApiServiceName + apiNameCount++ + } + + // Extract and store service package in cache + pathParts := strings.Split(path, string(os.PathSeparator)) + servicesIndex := -1 + for i, part := range pathParts { + if part == "services" { + servicesIndex = i + break + } + } + + if servicesIndex >= 0 && len(pathParts) > servicesIndex+1 { + servicePackage := pathParts[servicesIndex+1] // The part after "services" + servicePackageCache[metadata.Resource] = servicePackage + servicePkgCount++ + } + } + return nil + }) + + if err != nil { + return fmt.Errorf("error walking directory: %v", err) + } + + // Mark cache as populated + cachePopulated = true + + return nil +} + +// GetAPIServiceNameForResource finds the api_service_name for a given resource name +// If projectRoot is empty, it will attempt to find the project root automatically +func GetAPIServiceNameForResource(resourceName string) string { + // Make sure cache is populated + if !cachePopulated { + if err := PopulateMetadataCache(); err != nil { + return "failed_to_populate_metadata_cache" + } + } + + // Check cache + cacheMutex.RLock() + apiServiceName, found := apiServiceNameCache[resourceName] + cacheMutex.RUnlock() + + if !found { + return "unknown" + } + + return apiServiceName +} + +// GetServicePackageForResourceType finds the service package for a given resource type +// If projectRoot is empty, it will attempt to find the project root automatically +func GetServicePackageForResourceType(resourceType string) string { + // Make sure cache is populated + if !cachePopulated { + if err := PopulateMetadataCache(); err != nil { + return "failed_to_populate_metadata_cache" + } + } + + // Check cache + cacheMutex.RLock() + servicePackage, found := servicePackageCache[resourceType] + cacheMutex.RUnlock() + + if !found { + return "unknown" + } + + return servicePackage +} + +// getServicesDir returns the path to the services directory +// It will attempt to find the project root relative to cwd +func getServicesDir() (string, error) { + // Try to find project root + root, err := findProjectRoot() + if err == nil { + servicesDir := filepath.Join(root, "google-beta", "services") + if _, err := os.Stat(servicesDir); err == nil { + return servicesDir, nil + } + } + + // Last resort: try relative to current directory + currentDir, err := os.Getwd() + if err != nil { + return "", fmt.Errorf("failed to determine current directory: %v", err) + } + + // Try a few common relative paths + potentialPaths := []string{ + filepath.Join(currentDir, "google-beta", "services"), + filepath.Join(currentDir, "..", "google-beta", "services"), + filepath.Join(currentDir, "..", "..", "google-beta", "services"), + } + + for _, path := range potentialPaths { + if _, err := os.Stat(path); err == nil { + return path, nil + } + } + + return "", fmt.Errorf("unable to locate services directory, please provide explicit project root path") +} + +// findProjectRoot walks up from the current directory to find the project root +// by looking for the go.mod file +func findProjectRoot() (string, error) { + dir, err := os.Getwd() + if err != nil { + return "", err + } + + for { + // Check if go.mod exists in the current directory + if _, err := os.Stat(filepath.Join(dir, "go.mod")); err == nil { + return dir, nil + } + + // Move up to the parent directory + parentDir := filepath.Dir(dir) + if parentDir == dir { + // Reached the filesystem root without finding go.mod + return "", fmt.Errorf("could not find go.mod file in any parent directory") + } + dir = parentDir + } +} diff --git a/mmv1/third_party/terraform/acctest/resource_inventory_test.go b/mmv1/third_party/terraform/acctest/resource_inventory_test.go new file mode 100644 index 000000000000..d218b8de6808 --- /dev/null +++ b/mmv1/third_party/terraform/acctest/resource_inventory_test.go @@ -0,0 +1,113 @@ +package acctest_test + +import ( + "strings" + "testing" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/provider" +) + +func TestResourceInventoryMetadataFound(t *testing.T) { + resources := provider.ResourceMap() + + // Track statistics + var ( + totalResources = 0 + missingServicePkg = 0 + missingServiceName = 0 + ) + + // Create a map to store missing resources for summary report + missingServicePkgResources := make(map[string]bool) + missingServiceNameResources := make(map[string]bool) + + for resourceType := range resources { + if strings.HasSuffix(resourceType, "_iam_member") || + strings.HasSuffix(resourceType, "_iam_policy") || + strings.HasSuffix(resourceType, "_iam_binding") { + continue + } + totalResources++ + + // Log each resource being checked + // t.Logf("Checking metadata for resource: %s", resourceType) + + // Check for service package + servicePackage := acctest.GetServicePackageForResourceType(resourceType) + if servicePackage == "unknown" { + // t.Logf("WARNING: Could not find service package for resource %s: %v", resourceType) + missingServicePkg++ + missingServicePkgResources[resourceType] = true + } + + apiServiceName := acctest.GetAPIServiceNameForResource(resourceType) + // Check for API service name + if apiServiceName == "unknown" { + // t.Logf("WARNING: Could not find API service name for resource %s: %v", resourceType) + missingServiceName++ + missingServiceNameResources[resourceType] = true + } + t.Logf(" %s servicePackage: %s apiServiceName: %s", resourceType, servicePackage, apiServiceName) + + } + + // Generate a summary report + t.Logf("\n--- RESOURCE METADATA TEST SUMMARY ---") + t.Logf("Total resources checked: %d", totalResources) + t.Logf("Resources missing service package: %d (%.1f%%)", + missingServicePkg, + float64(missingServicePkg)/float64(totalResources)*100) + t.Logf("Resources missing API service name: %d (%.1f%%)", + missingServiceName, + float64(missingServiceName)/float64(totalResources)*100) + + // List resources missing metadata (limited to first 10 for readability) + if len(missingServicePkgResources) > 0 { + t.Log("\nResources missing service package (first 10):") + count := 0 + for res := range missingServicePkgResources { + t.Logf(" - %s", res) + count++ + if count >= 10 { + remaining := len(missingServicePkgResources) - 10 + if remaining > 0 { + t.Logf(" ... and %d more", remaining) + } + break + } + } + } + + if len(missingServiceNameResources) > 0 { + t.Log("\nResources missing API service name (first 10):") + count := 0 + for res := range missingServiceNameResources { + t.Logf(" - %s", res) + count++ + if count >= 10 { + remaining := len(missingServiceNameResources) - 10 + if remaining > 0 { + t.Logf(" ... and %d more", remaining) + } + break + } + } + } + + // Decide whether to fail the test based on coverage percentage + const requiredCoveragePercent = 90.0 + + servicePkgCoverage := (float64(totalResources-missingServicePkg) / float64(totalResources)) * 100 + serviceNameCoverage := (float64(totalResources-missingServiceName) / float64(totalResources)) * 100 + + if servicePkgCoverage < requiredCoveragePercent { + t.Errorf("Service package metadata coverage (%.1f%%) is below required threshold (%.1f%%)", + servicePkgCoverage, requiredCoveragePercent) + } + + if serviceNameCoverage < requiredCoveragePercent { + t.Errorf("API service name metadata coverage (%.1f%%) is below required threshold (%.1f%%)", + serviceNameCoverage, requiredCoveragePercent) + } +} diff --git a/mmv1/third_party/terraform/acctest/tgc_utils.go b/mmv1/third_party/terraform/acctest/tgc_utils.go index 4b8f4adcf1b5..9a3a10e0a27b 100644 --- a/mmv1/third_party/terraform/acctest/tgc_utils.go +++ b/mmv1/third_party/terraform/acctest/tgc_utils.go @@ -67,7 +67,11 @@ func CollectAllTgcMetadata(tgcPayload TgcMetadataPayload) resource.TestCheckFunc } // Resolve the CAI asset name - if apiServiceName, ok := ApiServiceNames[metadata.ResourceType]; ok { + apiServiceName := GetAPIServiceNameForResource(metadata.ResourceType) + if apiServiceName == "unknown" || apiServiceName == "failed_to_populate_metadata_cache" { + log.Printf("[DEBUG]TGC Terraform error: unknown resource type %s", metadata.ResourceType) + metadata.CaiAssetName = apiServiceName + } else { var rName string switch metadata.ResourceType { case "google_project": @@ -76,8 +80,6 @@ func CollectAllTgcMetadata(tgcPayload TgcMetadataPayload) resource.TestCheckFunc rName = rState.Primary.ID } metadata.CaiAssetName = fmt.Sprintf("//%s/%s", apiServiceName, rName) - } else { - metadata.CaiAssetName = "unknown" } // Resolve auto IDs in import metadata @@ -118,34 +120,6 @@ func parseResources(config string) []string { return resources } -// getServicePackage determines the service package for a resource type -func getServicePackage(resourceType string) string { - var ServicePackages = map[string]string{ - "google_compute_": "compute", - "google_storage_": "storage", - "google_sql_": "sql", - "google_container_": "container", - "google_bigquery_": "bigquery", - "google_project": "resourcemanager", - "google_cloud_run_": "cloudrun", - } - - // Check for exact matches first - if service, ok := ServicePackages[resourceType]; ok { - return service - } - - // Check for prefix matches - for prefix, service := range ServicePackages { - if strings.HasPrefix(resourceType, prefix) { - return service - } - } - - // Default to "unknown" if no match found - return "unknown" -} - // determineImportMetadata checks if the next step is an import step and extracts all import metadata func determineImportMetadata(steps []resource.TestStep, currentStepIndex int, resourceName string) ImportMetadata { var metadata ImportMetadata @@ -247,7 +221,7 @@ func extendWithTGCData(t *testing.T, c resource.TestCase) resource.TestCase { ResourceType: resourceType, ResourceAddress: res, ImportMetadata: importMeta, - Service: getServicePackage(resourceType), + Service: GetServicePackageForResourceType(resourceType), // CaiAssetName will be populated at runtime in the check function } } From e997de91d3955c3e8ff8c28aad0329dc44bbc48c Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Thu, 22 May 2025 09:05:04 -0700 Subject: [PATCH 224/884] Unskip TestAccNetworkSecurityGatewaySecurityPolicyRule_multiple (#14037) --- ...source_network_security_gateway_security_policy_rule_test.go | 2 -- 1 file changed, 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_gateway_security_policy_rule_test.go b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_gateway_security_policy_rule_test.go index a4e083fa8815..22823730a924 100644 --- a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_gateway_security_policy_rule_test.go +++ b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_gateway_security_policy_rule_test.go @@ -49,8 +49,6 @@ func TestAccNetworkSecurityGatewaySecurityPolicyRule_update(t *testing.T) { } func TestAccNetworkSecurityGatewaySecurityPolicyRule_multiple(t *testing.T) { - // Skip for now to avoid leaking resources until the fix for b/400293188 rolls out - t.Skip() t.Parallel() context := map[string]interface{}{ From 064fb2428809265bf0f10d1bbac1f7f3a20a682a Mon Sep 17 00:00:00 2001 From: Scott Suarez Date: Thu, 22 May 2025 09:27:34 -0700 Subject: [PATCH 225/884] refactor more templates that no longer need to be (#14060) --- ... => resource_compute_firewall_policy_with_rules_test.go} | 2 +- ...urce_compute_network_firewall_policy_with_rules_test.go} | 6 ++---- ...mpute_region_network_firewall_policy_with_rules_test.go} | 2 +- 3 files changed, 4 insertions(+), 6 deletions(-) rename mmv1/third_party/terraform/services/compute/{resource_compute_firewall_policy_with_rules_test.go.tmpl => resource_compute_firewall_policy_with_rules_test.go} (100%) rename mmv1/third_party/terraform/services/compute/{resource_compute_network_firewall_policy_with_rules_test.go.tmpl => resource_compute_network_firewall_policy_with_rules_test.go} (99%) rename mmv1/third_party/terraform/services/compute/{resource_compute_region_network_firewall_policy_with_rules_test.go.tmpl => resource_compute_region_network_firewall_policy_with_rules_test.go} (100%) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_with_rules_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_with_rules_test.go similarity index 100% rename from mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_with_rules_test.go.tmpl rename to mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_with_rules_test.go index cf2634c22a59..6c2ae3a26336 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_with_rules_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_with_rules_test.go @@ -1,4 +1,5 @@ package compute_test + import ( "testing" @@ -212,4 +213,3 @@ resource "google_network_security_security_profile" "security_profile_1" { } `, context) } - diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_network_firewall_policy_with_rules_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_network_firewall_policy_with_rules_test.go similarity index 99% rename from mmv1/third_party/terraform/services/compute/resource_compute_network_firewall_policy_with_rules_test.go.tmpl rename to mmv1/third_party/terraform/services/compute/resource_compute_network_firewall_policy_with_rules_test.go index 253a89f8d45c..43488d34c0bc 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_network_firewall_policy_with_rules_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_network_firewall_policy_with_rules_test.go @@ -1,11 +1,11 @@ package compute_test + import ( "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" - ) func TestAccComputeNetworkFirewallPolicyWithRules_update(t *testing.T) { @@ -13,7 +13,7 @@ func TestAccComputeNetworkFirewallPolicyWithRules_update(t *testing.T) { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "org_id": envvar.GetTestOrgFromEnv(t), + "org_id": envvar.GetTestOrgFromEnv(t), } acctest.VcrTest(t, resource.TestCase{ @@ -243,5 +243,3 @@ resource "google_network_security_security_profile" "security_profile_1" { } `, context) } - - diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_network_firewall_policy_with_rules_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_network_firewall_policy_with_rules_test.go similarity index 100% rename from mmv1/third_party/terraform/services/compute/resource_compute_region_network_firewall_policy_with_rules_test.go.tmpl rename to mmv1/third_party/terraform/services/compute/resource_compute_region_network_firewall_policy_with_rules_test.go index 5898bbc2c4b0..e6a8b8702e96 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_network_firewall_policy_with_rules_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_network_firewall_policy_with_rules_test.go @@ -1,4 +1,5 @@ package compute_test + import ( "testing" @@ -198,4 +199,3 @@ resource "google_tags_tag_value" "secure_tag_value_1" { } `, context) } - From 20f874a7dfb7eff06fa45a0b27fd25e8df2f788d Mon Sep 17 00:00:00 2001 From: Scott Suarez Date: Thu, 22 May 2025 11:20:59 -0700 Subject: [PATCH 226/884] Convert review functions to go-gh (#14031) --- .ci/magician/github/get.go | 44 ++++++++++---------- .ci/magician/github/integration_test.go | 55 ++++++++++++++++++++++--- .ci/magician/github/set.go | 43 ++++++++++++------- 3 files changed, 101 insertions(+), 41 deletions(-) diff --git a/.ci/magician/github/get.go b/.ci/magician/github/get.go index 667c8b498f0d..8df94827a5f9 100644 --- a/.ci/magician/github/get.go +++ b/.ci/magician/github/get.go @@ -17,7 +17,6 @@ package github import ( "fmt" - utils "magician/utility" "strconv" "time" @@ -94,45 +93,48 @@ func (c *Client) GetPullRequests(state, base, sort, direction string) ([]PullReq } // GetPullRequestRequestedReviewers gets requested reviewers for a PR -func (gh *Client) GetPullRequestRequestedReviewers(prNumber string) ([]User, error) { - url := fmt.Sprintf("https://api.github.com/repos/GoogleCloudPlatform/magic-modules/pulls/%s/requested_reviewers", prNumber) - - var requestedReviewers struct { - Users []User `json:"users"` +func (c *Client) GetPullRequestRequestedReviewers(prNumber string) ([]User, error) { + num, err := strconv.Atoi(prNumber) + if err != nil { + return nil, err } - err := utils.RequestCallWithRetry(url, "GET", gh.token, &requestedReviewers, nil) + reviewers, _, err := c.gh.PullRequests.ListReviewers(c.ctx, defaultOwner, defaultRepo, num, nil) if err != nil { return nil, err } - return requestedReviewers.Users, nil + return convertGHUsers(reviewers.Users), nil } // GetPullRequestPreviousReviewers gets previous reviewers for a PR -func (gh *Client) GetPullRequestPreviousReviewers(prNumber string) ([]User, error) { - url := fmt.Sprintf("https://api.github.com/repos/GoogleCloudPlatform/magic-modules/pulls/%s/reviews", prNumber) - - var reviews []struct { - User User `json:"user"` +func (c *Client) GetPullRequestPreviousReviewers(prNumber string) ([]User, error) { + num, err := strconv.Atoi(prNumber) + if err != nil { + return nil, err } - - err := utils.RequestCallWithRetry(url, "GET", gh.token, &reviews, nil) + reviews, _, err := c.gh.PullRequests.ListReviews(c.ctx, defaultOwner, defaultRepo, num, nil) if err != nil { return nil, err } - previousAssignedReviewers := map[string]User{} + // Use a map to deduplicate reviewers + reviewerMap := make(map[string]*gh.User) + for _, review := range reviews { - previousAssignedReviewers[review.User.Login] = review.User + if review.User != nil && review.User.Login != nil { + login := review.User.GetLogin() + reviewerMap[login] = review.User + } } - result := []User{} - for _, user := range previousAssignedReviewers { - result = append(result, user) + // Convert map to slice + reviewers := make([]*gh.User, 0, len(reviewerMap)) + for _, user := range reviewerMap { + reviewers = append(reviewers, user) } - return result, nil + return convertGHUsers(reviewers), nil } // GetCommitMessage gets a commit message diff --git a/.ci/magician/github/integration_test.go b/.ci/magician/github/integration_test.go index bf5a649f27a1..64ed13e0ba64 100644 --- a/.ci/magician/github/integration_test.go +++ b/.ci/magician/github/integration_test.go @@ -37,7 +37,9 @@ const ( testNonMember = "bananaman5000" testRepo = "magic-modules" testOwner = "GoogleCloudPlatform" - testPRNumber = "13969" // replace this with an actual PR Number + testReviewer = "melinath" + testPRNumber = "14031" + testOldPRNumber = "13969" // replace this with an actual PR Number testPRCommitSha = "4a8409686810551655eea2533e939cc5344e83e2" // replace this with an actual SHA testMainCommitSha = "fd910977cf24595d2c04e3f0a369a82c79fdb8f8" // replace this with an actual SHA testLabel = "terraform-3.0" @@ -276,27 +278,70 @@ func TestIntegrationCreateWorkflowDispatchEvent(t *testing.T) { t.Logf("Successfully triggered workflow dispatch event") } +func TestIntegrationRequestAndRemovePullRequestReviewers(t *testing.T) { + client := skipIfNoToken(t) + + // Request a reviewer + reviewers := []string{testReviewer} // Request the owner to review their own PR + err := client.RequestPullRequestReviewers(testPRNumber, reviewers) + if err != nil { + t.Fatalf("RequestPullRequestReviewers failed: %v", err) + } + + // Remove the reviewer + err = client.RemovePullRequestReviewers(testPRNumber, reviewers) + if err != nil { + t.Fatalf("RemovePullRequestReviewers failed: %v", err) + } + + t.Logf("Successfully requested and removed reviewers: %v", reviewers) +} + +func TestIntegrationGetPullRequestRequestedReviewers(t *testing.T) { + client := skipIfNoToken(t) + + reviewers, err := client.GetPullRequestRequestedReviewers(testPRNumber) + if err != nil { + t.Fatalf("GetPullRequestRequestedReviewers failed: %v", err) + } + + t.Logf("Found %d requested reviewers", len(reviewers)) + for i, reviewer := range reviewers { + t.Logf("Reviewer %d: %s", i+1, reviewer.Login) + } +} + +func TestIntegrationGetPullRequestPreviousReviewers(t *testing.T) { + client := skipIfNoToken(t) + + reviewers, err := client.GetPullRequestPreviousReviewers(testOldPRNumber) + if err != nil { + t.Fatalf("GetPullRequestPreviousReviewers failed: %v", err) + } + + t.Logf("Found %d previous reviewers", len(reviewers)) + for i, reviewer := range reviewers { + t.Logf("Previous reviewer %d: %s", i+1, reviewer.Login) + } +} + // TestIntegrationMergePullRequest is commented out as it has permanent effects // Uncomment and run only if you're sure you want to merge the PR /* func TestIntegrationMergePullRequest(t *testing.T) { client := skipIfNoToken(t) - // Skip this test by default as it has permanent effects if os.Getenv("RUN_MERGE_PR_TEST") != "true" { t.Skip("Skipping merge PR test: set RUN_MERGE_PR_TEST=true to run") } - // You'll need a valid commit SHA for this test if testPRCommitSha == "HEAD" { t.Skip("Skipping MergePullRequest test: need a valid commit SHA") } - err := client.MergePullRequest(testOwner, testRepo, testPRNumber, testPRCommitSha) if err != nil { t.Fatalf("MergePullRequest failed: %v", err) } - t.Logf("Successfully merged pull request") } */ diff --git a/.ci/magician/github/set.go b/.ci/magician/github/set.go index e56b5a03f5b5..c04e7daf936b 100644 --- a/.ci/magician/github/set.go +++ b/.ci/magician/github/set.go @@ -78,39 +78,52 @@ func (c *Client) UpdateComment(prNumber, comment string, id int) error { return nil } -func (gh *Client) RequestPullRequestReviewers(prNumber string, reviewers []string) error { - url := fmt.Sprintf("https://api.github.com/repos/GoogleCloudPlatform/magic-modules/pulls/%s/requested_reviewers", prNumber) +// RequestPullRequestReviewers adds reviewers to a pull request +func (c *Client) RequestPullRequestReviewers(prNumber string, reviewers []string) error { + if len(reviewers) == 0 { + return nil + } - body := map[string][]string{ - "reviewers": reviewers, - "team_reviewers": {}, + num, err := strconv.Atoi(prNumber) + if err != nil { + return err } - err := utils.RequestCallWithRetry(url, "POST", gh.token, nil, body) + // Create the reviewers request + reviewersRequest := gh.ReviewersRequest{ + Reviewers: reviewers, + } + + _, _, err = c.gh.PullRequests.RequestReviewers(c.ctx, defaultOwner, defaultRepo, num, reviewersRequest) if err != nil { return err } fmt.Printf("Successfully added reviewers %v to pull request %s\n", reviewers, prNumber) - return nil } -func (gh *Client) RemovePullRequestReviewers(prNumber string, reviewers []string) error { - url := fmt.Sprintf("https://api.github.com/repos/GoogleCloudPlatform/magic-modules/pulls/%s/requested_reviewers", prNumber) - - body := map[string][]string{ - "reviewers": reviewers, - "team_reviewers": {}, +// RemovePullRequestReviewers removes reviewers from a pull request +func (c *Client) RemovePullRequestReviewers(prNumber string, reviewers []string) error { + if len(reviewers) == 0 { + return nil } - err := utils.RequestCall(url, "DELETE", gh.token, nil, body) + num, err := strconv.Atoi(prNumber) if err != nil { return err } - fmt.Printf("Successfully removed reviewers %v to pull request %s\n", reviewers, prNumber) + reviewersRequest := gh.ReviewersRequest{ + Reviewers: reviewers, + } + + _, err = c.gh.PullRequests.RemoveReviewers(c.ctx, defaultOwner, defaultRepo, num, reviewersRequest) + if err != nil { + return err + } + fmt.Printf("Successfully removed reviewers %v from pull request %s\n", reviewers, prNumber) return nil } From 792e7f21526fae334828e3946790bc7d60bd9c60 Mon Sep 17 00:00:00 2001 From: Michael Lopez Date: Thu, 22 May 2025 20:23:33 +0200 Subject: [PATCH 227/884] gkehub: datasource for gke_hub_membership resource (#13915) --- .../provider/provider_mmv1_resources.go.tmpl | 1 + .../data_source_google_gke_hub_membership.go | 39 +++++ ...a_source_google_gke_hub_membership_test.go | 144 ++++++++++++++++++ .../docs/d/gke_hub_feature.html.markdown | 30 ++++ .../docs/d/gke_hub_membership.html.markdown | 35 +++++ 5 files changed, 249 insertions(+) create mode 100644 mmv1/third_party/terraform/services/gkehub/data_source_google_gke_hub_membership.go create mode 100644 mmv1/third_party/terraform/services/gkehub/data_source_google_gke_hub_membership_test.go create mode 100644 mmv1/third_party/terraform/website/docs/d/gke_hub_feature.html.markdown create mode 100644 mmv1/third_party/terraform/website/docs/d/gke_hub_membership.html.markdown diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index 9ab4ebf8a890..b9d632b45f16 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -134,6 +134,7 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_dns_managed_zone": dns.DataSourceDnsManagedZone(), "google_dns_managed_zones": dns.DataSourceDnsManagedZones(), "google_dns_record_set": dns.DataSourceDnsRecordSet(), + "google_gke_hub_membership": gkehub.DataSourceGoogleGkeHubMembership(), "google_gke_hub_membership_binding": gkehub2.DataSourceGoogleGkeHubMembershipBinding(), "google_gke_hub_feature": gkehub2.DataSourceGoogleGkeHubFeature(), "google_filestore_instance": filestore.DataSourceGoogleFilestoreInstance(), diff --git a/mmv1/third_party/terraform/services/gkehub/data_source_google_gke_hub_membership.go b/mmv1/third_party/terraform/services/gkehub/data_source_google_gke_hub_membership.go new file mode 100644 index 000000000000..94fad0369a13 --- /dev/null +++ b/mmv1/third_party/terraform/services/gkehub/data_source_google_gke_hub_membership.go @@ -0,0 +1,39 @@ +package gkehub + +import ( + "fmt" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func DataSourceGoogleGkeHubMembership() *schema.Resource { + dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceGKEHubMembership().Schema) + tpgresource.AddRequiredFieldsToSchema(dsSchema, "membership_id") + tpgresource.AddRequiredFieldsToSchema(dsSchema, "location") + tpgresource.AddOptionalFieldsToSchema(dsSchema, "project") + + return &schema.Resource{ + Read: dataSourceGoogleGkeHubMembershipRead, + Schema: dsSchema, + } +} + +func dataSourceGoogleGkeHubMembershipRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + + id, err := tpgresource.ReplaceVars(d, config, "projects/{{project}}/locations/{{location}}/memberships/{{membership_id}}") + if err != nil { + return fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + + err = resourceGKEHubMembershipRead(d, meta) + if err != nil { + return err + } + + // No labels or annotations for Membership datasource + return nil +} diff --git a/mmv1/third_party/terraform/services/gkehub/data_source_google_gke_hub_membership_test.go b/mmv1/third_party/terraform/services/gkehub/data_source_google_gke_hub_membership_test.go new file mode 100644 index 000000000000..9dac0b1556df --- /dev/null +++ b/mmv1/third_party/terraform/services/gkehub/data_source_google_gke_hub_membership_test.go @@ -0,0 +1,144 @@ +package gkehub_test + +import ( + "fmt" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func TestAccDataSourceGoogleGkeHubMembership_basic(t *testing.T) { + t.Parallel() + + project := envvar.GetTestProjectFromEnv() + gkeClusterRegion := "us-central1" + gkeClusterZone := "us-central1-a" + membershipLocation := "global" + randomSuffix := acctest.RandString(t, 10) + + // Define unique names for network and subnetwork for this test run + networkName := fmt.Sprintf("tf-test-mem-ds-net-%s", randomSuffix) + subnetworkName := fmt.Sprintf("tf-test-mem-ds-sub-%s", randomSuffix) + + context := map[string]interface{}{ + "project": project, + "gke_cluster_region": gkeClusterRegion, + "gke_cluster_zone": gkeClusterZone, + "membership_location": membershipLocation, + "random_suffix": randomSuffix, + "network_name": networkName, + "subnetwork_name": subnetworkName, + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGoogleGkeHubMembershipDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceGoogleGkeHubMembership_basic_config(context), + Check: resource.ComposeTestCheckFunc( + acctest.CheckDataSourceStateMatchesResourceState("data.google_gke_hub_membership.example", "google_gke_hub_membership.example"), + ), + }, + }, + }) +} + +func testAccDataSourceGoogleGkeHubMembership_basic_config(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_network" "default" { + project = "%{project}" + name = "%{network_name}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + project = "%{project}" + name = "%{subnetwork_name}" + ip_cidr_range = "10.2.0.0/16" // Example CIDR + region = "%{gke_cluster_region}" + network = google_compute_network.default.id +} + +resource "google_container_cluster" "primary" { + project = "%{project}" + name = "tf-test-mem-ds-cl-%{random_suffix}" + location = "%{gke_cluster_zone}" + initial_node_count = 1 + deletion_protection = false + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + + master_auth { + client_certificate_config { + issue_client_certificate = false + } + } +} + +resource "google_gke_hub_membership" "example" { + project = "%{project}" + membership_id = "tf-test-mem-%{random_suffix}" + location = "%{membership_location}" + + endpoint { + gke_cluster { + resource_link = "//container.googleapis.com/${google_container_cluster.primary.id}" + } + } + + depends_on = [google_container_cluster.primary] +} + +data "google_gke_hub_membership" "example" { + project = google_gke_hub_membership.example.project + location = google_gke_hub_membership.example.location + membership_id = google_gke_hub_membership.example.membership_id +} +`, context) +} + +func testAccCheckGoogleGkeHubMembershipDestroyProducer(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + for name, rs := range s.RootModule().Resources { + if rs.Type != "google_gke_hub_membership" { + continue + } + if strings.HasPrefix(name, "data.") { + continue + } + + config := acctest.GoogleProviderConfig(t) + url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{GKEHub2BasePath}}projects/{{project}}/locations/{{location}}/memberships/{{membership_id}}") + if err != nil { + return fmt.Errorf("Error constructing URL for GKE Hub Membership: %s", err) + } + + billingProject := "" + + if config.BillingProject != "" { + billingProject = config.BillingProject + } + + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + RawURL: url, + UserAgent: config.UserAgent, + Project: billingProject, + }) + + if err == nil { + return fmt.Errorf("GKEHubMembership still exists at %s", url) + } + } + return nil + } +} diff --git a/mmv1/third_party/terraform/website/docs/d/gke_hub_feature.html.markdown b/mmv1/third_party/terraform/website/docs/d/gke_hub_feature.html.markdown new file mode 100644 index 000000000000..8ae265a043c3 --- /dev/null +++ b/mmv1/third_party/terraform/website/docs/d/gke_hub_feature.html.markdown @@ -0,0 +1,30 @@ +--- +subcategory: "GKEHub" +description: |- + Retrieves the details of a GKE Hub Feature. +--- + +# `google_gke_hub_feature` +Retrieves the details of a specific GKE Hub Feature. Use this data source to retrieve the feature's configuration and state. + +## Example Usage + +```hcl +data "google_gke_hub_feature" "example" { + location = "global" + name = "servicemesh" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `name` - (Required) The name of the feature you want to know the status of. +* `location` - (Required) The location for the GKE Hub Feature. +* `project` - (Optional) The ID of the project in which the resource belongs. + If it is not provided, the provider project is used. + +## Attributes Reference + +See [google_gke_hub_feature](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/gke_hub_feature) resource for details of the available attributes. \ No newline at end of file diff --git a/mmv1/third_party/terraform/website/docs/d/gke_hub_membership.html.markdown b/mmv1/third_party/terraform/website/docs/d/gke_hub_membership.html.markdown new file mode 100644 index 000000000000..202636fa3bb9 --- /dev/null +++ b/mmv1/third_party/terraform/website/docs/d/gke_hub_membership.html.markdown @@ -0,0 +1,35 @@ +--- +subcategory: "GKEHub" +description: |- + Retrieves the details of a GKE Hub Membership. +--- + +# `google_gke_hub_membership` + +Retrieves the details of a specific GKE Hub Membership. Use this data source to retrieve the membership's configuration and state. + +## Example Usage + +```hcl +data "google_gke_hub_membership" "example" { + project = "my-project-id" + location = "global" + membership_id = "my-membership-id" # GKE Cluster's name +} +``` + +## Argument Reference + +The following arguments are supported: + +* `membership_id` - (Required) The GKE Hub Membership id or GKE Cluster's name. + +* `location` - (Required) The location for the GKE Hub Membership. + Currently only `global` is supported. + +* `project` - (Optional) The ID of the project in which the resource belongs. + If it is not provided, the provider project is used. + +## Attributes Reference + +See [google_gke_hub_membership](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/gke_hub_membership) resource for details of the available attributes. \ No newline at end of file From 56e961ef8a8ad7f734373f2f0e9ca718046c1fb1 Mon Sep 17 00:00:00 2001 From: Scott Suarez Date: Thu, 22 May 2025 11:28:17 -0700 Subject: [PATCH 228/884] Enable mmv1 unit tests to run on main and all paths (#14063) --- .github/workflows/unit-test-mmv1.yml | 52 +++++++++++++++++++--------- 1 file changed, 35 insertions(+), 17 deletions(-) diff --git a/.github/workflows/unit-test-mmv1.yml b/.github/workflows/unit-test-mmv1.yml index db75e507fffe..23c459b06865 100644 --- a/.github/workflows/unit-test-mmv1.yml +++ b/.github/workflows/unit-test-mmv1.yml @@ -3,9 +3,18 @@ name: mmv1 permissions: read-all on: + push: + branches: + - main + - 'FEATURE-BRANCH-*' + merge_group: + types: [checks_requested] pull_request: - paths: - - 'mmv1/**' + +concurrency: + group: ${{ github.event_name == 'merge_group' && format('mmv1-merge-group-{0}', github.event.merge_group.head_sha) || github.event_name == 'pull_request' && format('mmv1-pr-{0}', github.event.pull_request.number) || format('mmv1-commit-{0}', github.sha) }} + cancel-in-progress: true + jobs: version-guard-check: @@ -17,9 +26,9 @@ jobs: path: repo fetch-depth: 0 - name: Merge base branch - id: pull_request + if: github.event_name == 'pull_request' run: | - cd repo + cd repo git config user.name "modular-magician" git config user.email "magic-modules@google.com" git fetch origin ${{ github.base_ref }} # Fetch the base branch @@ -36,34 +45,43 @@ jobs: with: path: repo fetch-depth: 0 - - name: Check for mmv1 product file changes - id: pull_request + - name: Merge base branch + if: github.event_name == 'pull_request' run: | cd repo git config user.name "modular-magician" git config user.email "magic-modules@google.com" - git fetch origin ${{ github.base_ref }} # Fetch the base branch - git merge --no-ff origin/${{ github.base_ref }} # Merge with the base branch - yamlfiles=$(git diff --name-only origin/${{ github.base_ref }} -- mmv1/products) # Compare with the base branch - if [ ! -z "$yamlfiles" ]; then - echo "yamlfiles=repo/${yamlfiles//$'\n'/ repo/}" >> $GITHUB_OUTPUT - fi + git fetch origin ${{ github.base_ref }} + git merge --no-ff origin/${{ github.base_ref }} - name: Install yamllint - if: ${{ !failure() && steps.pull_request.outputs.yamlfiles != '' }} run: pip install yamllint==1.32.0 pyyaml==6.0.1 pathspec==0.12.1 --no-deps - - name: Lint YAML files - if: ${{ !failure() && steps.pull_request.outputs.yamlfiles != '' }} - run: yamllint -c repo/.yamllint ${{steps.pull_request.outputs.yamlfiles}} + - name: Lint all YAML files + run: | + cd repo + find mmv1/products -name "*.yaml" -o -name "*.yml" | xargs yamllint -c .yamllint unit-tests: runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.2 + - name: Checkout Repository + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.2 + with: + path: repo + fetch-depth: 0 + - name: Merge base branch + if: github.event_name == 'pull_request' + run: | + cd repo + git config user.name "modular-magician" + git config user.email "magic-modules@google.com" + git fetch origin ${{ github.base_ref }} # Fetch the base branch + git merge --no-ff origin/${{ github.base_ref }} # Merge with the base branch - name: Set up Go uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0 with: go-version: '^1.23' - name: Run mmv1 unit tests run: | + cd repo cd mmv1 go test ./... From e1e92064130d2231141a2f0c0958db7d0e68e5c8 Mon Sep 17 00:00:00 2001 From: Scott Suarez Date: Thu, 22 May 2025 11:58:00 -0700 Subject: [PATCH 229/884] Extend breaking change detector to work on new required and new optional with default (#14059) Co-authored-by: Stephen Lewis (Burrows) --- .../breaking-changes/breaking-changes.md | 7 +- .../breaking_changes/breaking_changes.go | 3 +- .../breaking_changes/field_diff.go | 61 ++- .../breaking_changes/field_diff_test.go | 132 ++++++- .../breaking_changes/mock_schema_diff_test.go | 36 ++ tools/diff-processor/diff/diff.go | 49 ++- tools/diff-processor/diff/diff_test.go | 364 ++++++++++++++++++ 7 files changed, 635 insertions(+), 17 deletions(-) create mode 100644 tools/diff-processor/breaking_changes/mock_schema_diff_test.go diff --git a/docs/content/breaking-changes/breaking-changes.md b/docs/content/breaking-changes/breaking-changes.md index debe0641cfe9..8a34fc167f56 100644 --- a/docs/content/breaking-changes/breaking-changes.md +++ b/docs/content/breaking-changes/breaking-changes.md @@ -60,7 +60,8 @@ For more information, see * Between complex types like changing a List to a Set. * Changing the field type between primitive and complex data types is not possible. For this scenario, field renames are preferred. -* Making an optional field required or adding a new required field +* Making an optional field required +* Adding a required field to a pre-existing resource at any level of nesting, unless it is being added at the same time as an optional ancestor * Adding an "ExactlyOneOf" constraint that causes one or more previously-optional fields to be required or conflict with each other * Making a settable field read-only * For MMv1 resources, adding `output: true` to an existing field. @@ -81,6 +82,9 @@ For more information, see if the change will destroy and recreate the resource due to changing an immutable value. Default changes in the provider are comparable in impact to default changes in an API, and modifying examples and modules may achieve the intended effect with a smaller blast radius. +* Adding an optional field with a default value and force new to a pre-existing resource at any level of nesting, unless it is being added at the same time as an optional ancestor + * This can be allowed if there is a confirmed API-level default that matches the schema default + * Please work with your reviewer and ensure this scenario is debugged carefully to avoid a destructive permadiff * Modifying how field data is stored in state * For example, changing the case of a value returned by the API in a flattener or decorder * Removing diff suppression from a field. @@ -88,6 +92,7 @@ For more information, see * For handwritten resources, removing `DiffSuppressFunc` from a field. * Removing update support from a field. + ### Making validation more strict * Increasing the minimum number of items in an array diff --git a/tools/diff-processor/breaking_changes/breaking_changes.go b/tools/diff-processor/breaking_changes/breaking_changes.go index 633a3fd4e652..1131dd4ffb8b 100644 --- a/tools/diff-processor/breaking_changes/breaking_changes.go +++ b/tools/diff-processor/breaking_changes/breaking_changes.go @@ -45,7 +45,8 @@ func ComputeBreakingChanges(schemaDiff diff.SchemaDiff) []BreakingChange { for field, fieldDiff := range resourceDiff.Fields { for _, rule := range FieldDiffRules { - for _, message := range rule.Messages(resource, field, fieldDiff) { + rd := schemaDiff[resource] + for _, message := range rule.Messages(resource, field, fieldDiff, rd) { breakingChanges = append(breakingChanges, NewBreakingChange(message, rule.Identifier)) } } diff --git a/tools/diff-processor/breaking_changes/field_diff.go b/tools/diff-processor/breaking_changes/field_diff.go index 0f4536558809..68857a77b886 100644 --- a/tools/diff-processor/breaking_changes/field_diff.go +++ b/tools/diff-processor/breaking_changes/field_diff.go @@ -13,13 +13,15 @@ import ( // regarding field attribute changes type FieldDiffRule struct { Identifier string - Messages func(resource, field string, fieldDiff diff.FieldDiff) []string + Messages func(resource, field string, fieldDiff diff.FieldDiff, resourceDiff diff.ResourceDiffInterface) []string } // FieldDiffRules is a list of FieldDiffRule // guarding against provider breaking changes var FieldDiffRules = []FieldDiffRule{ FieldChangingType, + FieldNewRequired, + FieldNewOptionalFieldWithDefault, FieldBecomingRequired, FieldBecomingComputedOnly, FieldOptionalComputedToOptional, @@ -34,7 +36,7 @@ var FieldChangingType = FieldDiffRule{ Messages: FieldChangingTypeMessages, } -func FieldChangingTypeMessages(resource, field string, fieldDiff diff.FieldDiff) []string { +func FieldChangingTypeMessages(resource, field string, fieldDiff diff.FieldDiff, _ diff.ResourceDiffInterface) []string { // Type change doesn't matter for added / removed fields if fieldDiff.Old == nil || fieldDiff.New == nil { return nil @@ -62,7 +64,7 @@ var FieldBecomingRequired = FieldDiffRule{ Messages: FieldBecomingRequiredMessages, } -func FieldBecomingRequiredMessages(resource, field string, fieldDiff diff.FieldDiff) []string { +func FieldBecomingRequiredMessages(resource, field string, fieldDiff diff.FieldDiff, _ diff.ResourceDiffInterface) []string { // Ignore for added / removed fields if fieldDiff.Old == nil || fieldDiff.New == nil { return nil @@ -80,7 +82,7 @@ var FieldBecomingComputedOnly = FieldDiffRule{ Messages: FieldBecomingComputedOnlyMessages, } -func FieldBecomingComputedOnlyMessages(resource, field string, fieldDiff diff.FieldDiff) []string { +func FieldBecomingComputedOnlyMessages(resource, field string, fieldDiff diff.FieldDiff, _ diff.ResourceDiffInterface) []string { // ignore for added / removed fields if fieldDiff.Old == nil || fieldDiff.New == nil { return nil @@ -103,7 +105,7 @@ var FieldOptionalComputedToOptional = FieldDiffRule{ Messages: FieldOptionalComputedToOptionalMessages, } -func FieldOptionalComputedToOptionalMessages(resource, field string, fieldDiff diff.FieldDiff) []string { +func FieldOptionalComputedToOptionalMessages(resource, field string, fieldDiff diff.FieldDiff, _ diff.ResourceDiffInterface) []string { // ignore for added / removed fields if fieldDiff.Old == nil || fieldDiff.New == nil { return nil @@ -120,7 +122,7 @@ var FieldDefaultModification = FieldDiffRule{ Messages: FieldDefaultModificationMessages, } -func FieldDefaultModificationMessages(resource, field string, fieldDiff diff.FieldDiff) []string { +func FieldDefaultModificationMessages(resource, field string, fieldDiff diff.FieldDiff, _ diff.ResourceDiffInterface) []string { // ignore for added / removed fields if fieldDiff.Old == nil || fieldDiff.New == nil { return nil @@ -155,7 +157,7 @@ var FieldGrowingMin = FieldDiffRule{ Messages: FieldGrowingMinMessages, } -func FieldGrowingMinMessages(resource, field string, fieldDiff diff.FieldDiff) []string { +func FieldGrowingMinMessages(resource, field string, fieldDiff diff.FieldDiff, _ diff.ResourceDiffInterface) []string { // ignore for added / removed fields if fieldDiff.Old == nil || fieldDiff.New == nil { return nil @@ -177,7 +179,7 @@ var FieldShrinkingMax = FieldDiffRule{ Messages: FieldShrinkingMaxMessages, } -func FieldShrinkingMaxMessages(resource, field string, fieldDiff diff.FieldDiff) []string { +func FieldShrinkingMaxMessages(resource, field string, fieldDiff diff.FieldDiff, _ diff.ResourceDiffInterface) []string { // ignore for added / removed fields if fieldDiff.Old == nil || fieldDiff.New == nil { return nil @@ -202,7 +204,7 @@ var FieldRemovingDiffSuppress = FieldDiffRule{ Messages: FieldRemovingDiffSuppressMessages, } -func FieldRemovingDiffSuppressMessages(resource, field string, fieldDiff diff.FieldDiff) []string { +func FieldRemovingDiffSuppressMessages(resource, field string, fieldDiff diff.FieldDiff, _ diff.ResourceDiffInterface) []string { // ignore for added / removed fields if fieldDiff.Old == nil || fieldDiff.New == nil { return nil @@ -214,3 +216,44 @@ func FieldRemovingDiffSuppressMessages(resource, field string, fieldDiff diff.Fi } return nil } + +var FieldNewRequired = FieldDiffRule{ + Identifier: "no-new-required", + Messages: FieldNewRequiredMessages, +} + +func FieldNewRequiredMessages(resource, field string, fieldDiff diff.FieldDiff, resourceDiff diff.ResourceDiffInterface) []string { + if resourceDiff.IsNewResource() || resourceDiff.IsFieldInNewNestedStructure(field) { + return nil + } + + // This rule applies to newly added fields (Old == nil). + if fieldDiff.Old == nil { + if fieldDiff.New.Required { + tmpl := "Field `%s` added as required on pre-existing resource `%s`" + return []string{fmt.Sprintf(tmpl, field, resource)} + } + } + return nil +} + +var FieldNewOptionalFieldWithDefault = FieldDiffRule{ + Identifier: "no-new-optional-default", + Messages: FieldNewOptionalFieldWithDefaultMessages, +} + +func FieldNewOptionalFieldWithDefaultMessages(resource, field string, fieldDiff diff.FieldDiff, resourceDiff diff.ResourceDiffInterface) []string { + if resourceDiff.IsNewResource() || resourceDiff.IsFieldInNewNestedStructure(field) { + return nil + } + + // This rule applies to newly added fields (Old == nil). + if fieldDiff.Old == nil { + if fieldDiff.New.Optional && fieldDiff.New.Default != nil && fieldDiff.New.ForceNew { + tmpl := "Field `%s` added as optional with a default value and force new on pre-existing resource `%s`. " + + "This can be allowed if there is a confirmed API-level default that matches the schema default" + return []string{fmt.Sprintf(tmpl, field, resource)} + } + } + return nil +} diff --git a/tools/diff-processor/breaking_changes/field_diff_test.go b/tools/diff-processor/breaking_changes/field_diff_test.go index 3e355aa9da91..1e7c7b092ac1 100644 --- a/tools/diff-processor/breaking_changes/field_diff_test.go +++ b/tools/diff-processor/breaking_changes/field_diff_test.go @@ -12,6 +12,7 @@ type fieldTestCase struct { name string oldField *schema.Schema newField *schema.Schema + resourceDiff diff.ResourceDiffInterface expectedViolation bool messageRegex string // Optional regex to validate the message content } @@ -94,8 +95,133 @@ var FieldBecomingRequiredTestCases = []fieldTestCase{ }, } -// !! min max ? -// isRuleBreak: FieldOptionalComputedToOptional_func, +func TestFieldNewRequired(t *testing.T) { + for _, tc := range FieldNewRequiredTestCases { + tc.check(FieldNewRequired, t) + } +} + +var FieldNewRequiredTestCases = []fieldTestCase{ + { + name: "existing resource - field added as required", + oldField: nil, + newField: &schema.Schema{ + Description: "beep", + Required: true, + }, + resourceDiff: existingResourceSchemaDiff, + expectedViolation: true, + }, + { + name: "new resource - field added as required but is new resource", + oldField: nil, + newField: &schema.Schema{ + Description: "beep", + Required: true, + }, + resourceDiff: newResourceSchemaDiff, + expectedViolation: false, + }, + { + name: "field in new nested structure - field added as required", + oldField: nil, + newField: &schema.Schema{ + Description: "beep", + Required: true, + }, + resourceDiff: fieldInNewStructureSchemaDiff, + expectedViolation: false, + }, +} + +func TestFieldNewOptionalWithDefault(t *testing.T) { + for _, tc := range FieldNewOptionalWithDefaultTestCases { + tc.check(FieldNewOptionalFieldWithDefault, t) + } +} + +var FieldNewOptionalWithDefaultTestCases = []fieldTestCase{ + { + name: "existing resource - new field added as optional with default and forcenew", + oldField: nil, + newField: &schema.Schema{ + Description: "beep", + Optional: true, + Default: "abc", + ForceNew: true, + }, + resourceDiff: existingResourceSchemaDiff, + expectedViolation: true, + }, + { + name: "existing resource - new field added as optional with falsey default and forcenew", + oldField: nil, + newField: &schema.Schema{ + Description: "beep", + Optional: true, + Default: false, + ForceNew: true, + }, + resourceDiff: existingResourceSchemaDiff, + expectedViolation: true, + }, + { + name: "existing resource - new field added as optional with default", + oldField: nil, + newField: &schema.Schema{ + Description: "beep", + Optional: true, + Default: "abc", + }, + resourceDiff: existingResourceSchemaDiff, + expectedViolation: false, + }, + { + name: "existing resource - new field added as optional with falsey default", + oldField: nil, + newField: &schema.Schema{ + Description: "beep", + Optional: true, + Default: false, + }, + resourceDiff: existingResourceSchemaDiff, + expectedViolation: false, + }, + { + name: "new resource - new field added as optional with default", + oldField: nil, + newField: &schema.Schema{ + Description: "beep", + Optional: true, + Default: "abc", + }, + resourceDiff: newResourceSchemaDiff, + expectedViolation: false, + }, + { + name: "new resource - new field added as optional with falsey default", + oldField: nil, + newField: &schema.Schema{ + Description: "beep", + Optional: true, + Default: false, + }, + resourceDiff: newResourceSchemaDiff, + expectedViolation: false, + }, + { + name: "field in new nested structure - new field added as optional with default and forcenew", + oldField: nil, + newField: &schema.Schema{ + Description: "beep", + Optional: true, + Default: "abc", + ForceNew: true, + }, + resourceDiff: fieldInNewStructureSchemaDiff, + expectedViolation: false, + }, +} func TestFieldChangingType(t *testing.T) { for _, tc := range FieldChangingTypeTestCases { @@ -594,7 +720,7 @@ var FieldShrinkingMaxTestCases = []fieldTestCase{ // Extended check method that also validates message content when expected func (tc *fieldTestCase) check(rule FieldDiffRule, t *testing.T) { - messages := rule.Messages("resource", "field", diff.FieldDiff{Old: tc.oldField, New: tc.newField}) + messages := rule.Messages("resource", "field", diff.FieldDiff{Old: tc.oldField, New: tc.newField}, tc.resourceDiff) violation := len(messages) > 0 // Check violation expectation diff --git a/tools/diff-processor/breaking_changes/mock_schema_diff_test.go b/tools/diff-processor/breaking_changes/mock_schema_diff_test.go new file mode 100644 index 000000000000..9bc1f187de71 --- /dev/null +++ b/tools/diff-processor/breaking_changes/mock_schema_diff_test.go @@ -0,0 +1,36 @@ +package breaking_changes + +// MockSchemaDiff implements the diff.SchemaDiff interface for testing +type MockSchemaDiff struct { + isNewResource bool + fieldsInNewStructure map[string]bool // Maps field names to whether they're in a new structure +} + +func (sd MockSchemaDiff) IsNewResource() bool { + return sd.isNewResource +} + +func (sd MockSchemaDiff) IsFieldInNewNestedStructure(field string) bool { + return sd.fieldsInNewStructure[field] +} + +// Create mock schema diffs for testing +var ( + // Mock for existing resource (not new, field not in new structure) + existingResourceSchemaDiff = MockSchemaDiff{ + isNewResource: false, + fieldsInNewStructure: make(map[string]bool), + } + + // Mock for new resource + newResourceSchemaDiff = MockSchemaDiff{ + isNewResource: true, + fieldsInNewStructure: make(map[string]bool), + } + + // Mock for field in new nested structure + fieldInNewStructureSchemaDiff = MockSchemaDiff{ + isNewResource: false, + fieldsInNewStructure: map[string]bool{"field": true}, + } +) diff --git a/tools/diff-processor/diff/diff.go b/tools/diff-processor/diff/diff.go index 0f938296b5bb..9b3970839b58 100644 --- a/tools/diff-processor/diff/diff.go +++ b/tools/diff-processor/diff/diff.go @@ -11,10 +11,16 @@ import ( // SchemaDiff is a nested map with resource names as top-level keys. type SchemaDiff map[string]ResourceDiff +type ResourceDiffInterface interface { + IsNewResource() bool + IsFieldInNewNestedStructure(fieldPath string) bool +} + type ResourceDiff struct { - ResourceConfig ResourceConfigDiff - Fields map[string]FieldDiff - FieldSets ResourceFieldSetsDiff + ResourceConfig ResourceConfigDiff + FlattenedSchema FlattenedSchemaRaw + Fields map[string]FieldDiff + FieldSets ResourceFieldSetsDiff } type ResourceFieldSetsDiff struct { @@ -41,6 +47,11 @@ type FieldDiff struct { New *schema.Schema } +type FlattenedSchemaRaw struct { + Old map[string]*schema.Schema + New map[string]*schema.Schema +} + func ComputeSchemaDiff(oldResourceMap, newResourceMap map[string]*schema.Resource) SchemaDiff { schemaDiff := make(SchemaDiff) for resource := range union(oldResourceMap, newResourceMap) { @@ -51,12 +62,14 @@ func ComputeSchemaDiff(oldResourceMap, newResourceMap map[string]*schema.Resourc var flattenedOldSchema map[string]*schema.Schema if oldResource, ok := oldResourceMap[resource]; ok { flattenedOldSchema = flattenSchema("", oldResource.Schema) + resourceDiff.FlattenedSchema.Old = flattenedOldSchema resourceDiff.ResourceConfig.Old = &schema.Resource{} } var flattenedNewSchema map[string]*schema.Schema if newResource, ok := newResourceMap[resource]; ok { flattenedNewSchema = flattenSchema("", newResource.Schema) + resourceDiff.FlattenedSchema.New = flattenedNewSchema resourceDiff.ResourceConfig.New = &schema.Resource{} } @@ -313,3 +326,33 @@ func setKey(set FieldSet) string { slice := setToSortedSlice(set) return strings.Join(slice, ",") } + +func (rd ResourceDiff) IsNewResource() bool { + rcd := rd.ResourceConfig + if rcd.Old == nil && rcd.New != nil { + return true + } + return false +} + +// IsFieldInNewNestedStructure determines if a field is part of a completely new nested structure +func (rd ResourceDiff) IsFieldInNewNestedStructure(fieldPath string) bool { + if rd.IsNewResource() { + return true + } + + // Get the parent path (everything before the last dot) + lastDotIndex := strings.LastIndex(fieldPath, ".") + if lastDotIndex == -1 { + // No parent path (top-level field) + return false + } + + parentPath := fieldPath[:lastDotIndex] + + // Check if parent exists in new schema but not in old schema + _, parentExistsInOld := rd.FlattenedSchema.Old[parentPath] + _, parentExistsInNew := rd.FlattenedSchema.New[parentPath] + + return !parentExistsInOld && parentExistsInNew +} diff --git a/tools/diff-processor/diff/diff_test.go b/tools/diff-processor/diff/diff_test.go index e3a37a32fe26..7e449e05fe19 100644 --- a/tools/diff-processor/diff/diff_test.go +++ b/tools/diff-processor/diff/diff_test.go @@ -1,6 +1,7 @@ package diff import ( + "strings" "testing" newProvider "google/provider/new/google/provider" @@ -1168,6 +1169,46 @@ func TestComputeSchemaDiff(t *testing.T) { Old: &schema.Resource{}, New: &schema.Resource{}, }, + FlattenedSchema: FlattenedSchemaRaw{ + Old: map[string]*schema.Schema{ + "field_one": {Type: schema.TypeString}, + "field_two": { + Type: schema.TypeList, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "field_three": {Type: schema.TypeString}, + }, + }, + }, + "field_two.field_three": {Type: schema.TypeString}, + }, + New: map[string]*schema.Schema{ + "field_one": {Type: schema.TypeString}, + "field_two": { + Type: schema.TypeList, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "field_three": { + Type: schema.TypeString, + ConflictsWith: []string{"field_two.0.field_four"}, + }, + "field_four": { + Type: schema.TypeInt, + ConflictsWith: []string{"field_two.0.field_three"}, + }, + }, + }, + }, + "field_two.field_three": { + Type: schema.TypeString, + ConflictsWith: []string{"field_two.0.field_four"}, + }, + "field_two.field_four": { + Type: schema.TypeInt, + ConflictsWith: []string{"field_two.0.field_three"}, + }, + }, + }, Fields: map[string]FieldDiff{ "field_two.field_three": FieldDiff{ Old: &schema.Schema{ @@ -1289,6 +1330,34 @@ func TestComputeSchemaDiff(t *testing.T) { Old: &schema.Resource{}, New: &schema.Resource{}, }, + FlattenedSchema: FlattenedSchemaRaw{ + Old: map[string]*schema.Schema{ + "field_one": {Type: schema.TypeString}, + "field_two": { + Type: schema.TypeList, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "field_three": {Type: schema.TypeString}, + }, + }, + }, + "field_two.field_three": {Type: schema.TypeString}, + }, + New: map[string]*schema.Schema{ + "field_one": {Type: schema.TypeString}, + "field_two": { + Type: schema.TypeList, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "field_three": {Type: schema.TypeString}, + "field_four": {Type: schema.TypeInt}, + }, + }, + }, + "field_two.field_three": {Type: schema.TypeString}, + "field_two.field_four": {Type: schema.TypeInt}, + }, + }, Fields: map[string]FieldDiff{ "field_two.field_four": FieldDiff{ Old: nil, @@ -1301,6 +1370,34 @@ func TestComputeSchemaDiff(t *testing.T) { Old: &schema.Resource{}, New: &schema.Resource{}, }, + FlattenedSchema: FlattenedSchemaRaw{ + Old: map[string]*schema.Schema{ + "field_one": {Type: schema.TypeString}, + "field_two": { + Type: schema.TypeList, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "field_three": {Type: schema.TypeString}, + }, + }, + }, + "field_two.field_three": {Type: schema.TypeString}, + }, + New: map[string]*schema.Schema{ + "field_one": {Type: schema.TypeString}, + "field_two": { + Type: schema.TypeList, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "field_three": {Type: schema.TypeString}, + "field_four": {Type: schema.TypeInt}, + }, + }, + }, + "field_two.field_three": {Type: schema.TypeString}, + "field_two.field_four": {Type: schema.TypeInt}, + }, + }, Fields: map[string]FieldDiff{ "field_two.field_four": FieldDiff{ Old: nil, @@ -1331,6 +1428,12 @@ func TestComputeSchemaDiff(t *testing.T) { Old: &schema.Resource{}, New: &schema.Resource{}, }, + FlattenedSchema: FlattenedSchemaRaw{ + Old: map[string]*schema.Schema{ + "field_one": {Type: schema.TypeString}, + }, + New: map[string]*schema.Schema{}, + }, Fields: map[string]FieldDiff{ "field_one": FieldDiff{ Old: &schema.Schema{Type: schema.TypeString}, @@ -1356,6 +1459,12 @@ func TestComputeSchemaDiff(t *testing.T) { Old: &schema.Resource{}, New: nil, }, + FlattenedSchema: FlattenedSchemaRaw{ + Old: map[string]*schema.Schema{ + "field_one": {Type: schema.TypeString}, + }, + New: nil, + }, Fields: map[string]FieldDiff{ "field_one": FieldDiff{ Old: &schema.Schema{Type: schema.TypeString}, @@ -1381,6 +1490,12 @@ func TestComputeSchemaDiff(t *testing.T) { Old: nil, New: &schema.Resource{}, }, + FlattenedSchema: FlattenedSchemaRaw{ + Old: nil, + New: map[string]*schema.Schema{ + "field_one": {Type: schema.TypeString}, + }, + }, Fields: map[string]FieldDiff{ "field_one": FieldDiff{ Old: nil, @@ -1402,3 +1517,252 @@ func TestComputeSchemaDiff(t *testing.T) { }) } } + +func TestIsNewResource(t *testing.T) { + cases := map[string]struct { + oldResourceMap map[string]*schema.Resource + newResourceMap map[string]*schema.Resource + resourceName string + expected bool + }{ + "resource exists in both maps": { + oldResourceMap: map[string]*schema.Resource{ + "google_resource": {Schema: map[string]*schema.Schema{}}, + }, + newResourceMap: map[string]*schema.Resource{ + "google_resource": {Schema: map[string]*schema.Schema{}}, + }, + resourceName: "google_resource", + expected: false, + }, + "resource only in new map": { + oldResourceMap: map[string]*schema.Resource{}, + newResourceMap: map[string]*schema.Resource{ + "google_resource": {Schema: map[string]*schema.Schema{}}, + }, + resourceName: "google_resource", + expected: true, + }, + "resource only in old map": { + oldResourceMap: map[string]*schema.Resource{ + "google_resource": {Schema: map[string]*schema.Schema{}}, + }, + newResourceMap: map[string]*schema.Resource{}, + resourceName: "google_resource", + expected: false, // ResourceConfig.New would be nil + }, + "resource not in diff because it has no changes": { + oldResourceMap: map[string]*schema.Resource{ + "google_resource": {Schema: map[string]*schema.Schema{}}, + }, + newResourceMap: map[string]*schema.Resource{ + "google_resource": {Schema: map[string]*schema.Schema{}}, + }, + resourceName: "non_existent_resource", + expected: false, // Resource isn't in the diff at all + }, + } + + for name, tc := range cases { + t.Run(name, func(t *testing.T) { + schemaDiff := ComputeSchemaDiff(tc.oldResourceMap, tc.newResourceMap) + resourceConfig, _ := schemaDiff[tc.resourceName] + result := resourceConfig.IsNewResource() + if result != tc.expected { + t.Errorf("IsNewResource(%q) = %v, want %v", tc.resourceName, result, tc.expected) + } + }) + } +} + +func TestIsFieldInNewNestedStructure(t *testing.T) { + cases := map[string]struct { + oldResourceMap map[string]*schema.Resource + newResourceMap map[string]*schema.Resource + resourceName string + fieldPath string + expected bool + }{ + "top-level field in existing resource": { + oldResourceMap: map[string]*schema.Resource{ + "google_resource": { + Schema: map[string]*schema.Schema{ + "old_field": {Type: schema.TypeString}, + }, + }, + }, + newResourceMap: map[string]*schema.Resource{ + "google_resource": { + Schema: map[string]*schema.Schema{ + "old_field": {Type: schema.TypeString}, + "new_field": {Type: schema.TypeString}, + }, + }, + }, + resourceName: "google_resource", + fieldPath: "new_field", + expected: false, // Top-level field, not in a nested structure + }, + "field in existing nested structure": { + oldResourceMap: map[string]*schema.Resource{ + "google_resource": { + Schema: map[string]*schema.Schema{ + "nested": { + Type: schema.TypeList, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "existing_field": {Type: schema.TypeString}, + }, + }, + }, + }, + }, + }, + newResourceMap: map[string]*schema.Resource{ + "google_resource": { + Schema: map[string]*schema.Schema{ + "nested": { + Type: schema.TypeList, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "existing_field": {Type: schema.TypeString}, + "new_field": {Type: schema.TypeString}, + }, + }, + }, + }, + }, + }, + resourceName: "google_resource", + fieldPath: "nested.new_field", + expected: false, // Parent "nested" exists in old schema + }, + "field in new nested structure": { + oldResourceMap: map[string]*schema.Resource{ + "google_resource": { + Schema: map[string]*schema.Schema{ + "old_field": {Type: schema.TypeString}, + }, + }, + }, + newResourceMap: map[string]*schema.Resource{ + "google_resource": { + Schema: map[string]*schema.Schema{ + "old_field": {Type: schema.TypeString}, + "new_nested": { + Type: schema.TypeList, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "new_field": {Type: schema.TypeString}, + }, + }, + }, + }, + }, + }, + resourceName: "google_resource", + fieldPath: "new_nested.new_field", + expected: true, // Parent "new_nested" doesn't exist in old schema + }, + "field in new deeply nested structure": { + oldResourceMap: map[string]*schema.Resource{ + "google_resource": { + Schema: map[string]*schema.Schema{ + "existing_nested": { + Type: schema.TypeList, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "existing_field": {Type: schema.TypeString}, + }, + }, + }, + }, + }, + }, + newResourceMap: map[string]*schema.Resource{ + "google_resource": { + Schema: map[string]*schema.Schema{ + "existing_nested": { + Type: schema.TypeList, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "existing_field": {Type: schema.TypeString}, + "new_nested": { + Type: schema.TypeList, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "new_field": {Type: schema.TypeString}, + }, + }, + }, + }, + }, + }, + }, + }, + }, + resourceName: "google_resource", + fieldPath: "existing_nested.new_nested.new_field", + expected: true, // Parent "existing_nested.new_nested" doesn't exist in old schema + }, + "field in new resource": { + oldResourceMap: map[string]*schema.Resource{}, + newResourceMap: map[string]*schema.Resource{ + "google_resource": { + Schema: map[string]*schema.Schema{ + "nested": { + Type: schema.TypeList, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "field": {Type: schema.TypeString}, + }, + }, + }, + }, + }, + }, + resourceName: "google_resource", + fieldPath: "nested.field", + expected: true, // New resource, so all fields are in new structures + }, + } + + for name, tc := range cases { + t.Run(name, func(t *testing.T) { + schemaDiff := ComputeSchemaDiff(tc.oldResourceMap, tc.newResourceMap) + + // Verify that FlattenedSchema was properly populated + if rd, ok := schemaDiff[tc.resourceName]; ok { + // Debug information for test verification + if tc.expected { + // If we expect the field to be in a new nested structure + // The parent path should not exist in the old schema but should exist in the new schema + lastDotIndex := strings.LastIndex(tc.fieldPath, ".") + if lastDotIndex != -1 { + parentPath := tc.fieldPath[:lastDotIndex] + _, parentInOld := rd.FlattenedSchema.Old[parentPath] + _, parentInNew := rd.FlattenedSchema.New[parentPath] + + // Log the verification for debugging + t.Logf("For %s: Parent path '%s' exists in old schema: %v, exists in new schema: %v", + tc.fieldPath, parentPath, parentInOld, parentInNew) + + // This should match our expectation + if parentInOld || !parentInNew { + t.Errorf("For field %s: Expected parent path %s to not exist in old schema and exist in new schema, but got old: %v, new: %v", + tc.fieldPath, parentPath, parentInOld, parentInNew) + } + } + } + } + + // Now test the actual method + resourceConfig := schemaDiff[tc.resourceName] + result := resourceConfig.IsFieldInNewNestedStructure(tc.fieldPath) + if result != tc.expected { + t.Errorf("IsFieldInNewNestedStructure(%q, %q) = %v, want %v", + tc.resourceName, tc.fieldPath, result, tc.expected) + } + }) + } +} From d35fc2f7a29476387f57ba69c616e3eedd5df331 Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Thu, 22 May 2025 21:31:04 +0200 Subject: [PATCH 230/884] feat: added `google_bigquery_table` data source (#14026) --- .../provider/provider_mmv1_resources.go.tmpl | 1 + .../data_source_google_bigquery_table.go | 51 +++++ .../data_source_google_bigquery_table_test.go | 190 ++++++++++++++++++ .../docs/d/bigquery_table.html.markdown | 36 ++++ 4 files changed, 278 insertions(+) create mode 100644 mmv1/third_party/terraform/services/bigquery/data_source_google_bigquery_table.go create mode 100644 mmv1/third_party/terraform/services/bigquery/data_source_google_bigquery_table_test.go create mode 100644 mmv1/third_party/terraform/website/docs/d/bigquery_table.html.markdown diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index b9d632b45f16..b5a86ff48b5b 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -44,6 +44,7 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_beyondcorp_app_gateway": beyondcorp.DataSourceGoogleBeyondcorpAppGateway(), "google_beyondcorp_security_gateway": beyondcorp.DataSourceGoogleBeyondcorpSecurityGateway(), "google_billing_account": billing.DataSourceGoogleBillingAccount(), + "google_bigquery_table": bigquery.DataSourceGoogleBigQueryTable(), "google_bigquery_tables": bigquery.DataSourceGoogleBigQueryTables(), "google_bigquery_dataset": bigquery.DataSourceGoogleBigqueryDataset(), "google_bigquery_default_service_account": bigquery.DataSourceGoogleBigqueryDefaultServiceAccount(), diff --git a/mmv1/third_party/terraform/services/bigquery/data_source_google_bigquery_table.go b/mmv1/third_party/terraform/services/bigquery/data_source_google_bigquery_table.go new file mode 100644 index 000000000000..c08d51788af8 --- /dev/null +++ b/mmv1/third_party/terraform/services/bigquery/data_source_google_bigquery_table.go @@ -0,0 +1,51 @@ +package bigquery + +import ( + "fmt" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func DataSourceGoogleBigQueryTable() *schema.Resource { + dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceBigQueryTable().Schema) + + tpgresource.AddRequiredFieldsToSchema(dsSchema, "dataset_id") + tpgresource.AddRequiredFieldsToSchema(dsSchema, "table_id") + tpgresource.AddOptionalFieldsToSchema(dsSchema, "project") + + return &schema.Resource{ + Read: dataSourceBigQueryTableRead, + Schema: dsSchema, + } +} + +func dataSourceBigQueryTableRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + project, err := tpgresource.GetProject(d, config) + if err != nil { + return fmt.Errorf("Error fetching project: %s", err) + } + + datasetID := d.Get("dataset_id").(string) + tableID := d.Get("table_id").(string) + + id := fmt.Sprintf("projects/%s/datasets/%s/tables/%s", project, datasetID, tableID) + d.SetId(id) + + err = resourceBigQueryTableRead(d, meta) + if err != nil { + return fmt.Errorf("Error retrieving table: %s", err) + } + + if err := tpgresource.SetDataSourceLabels(d); err != nil { + return err + } + + if d.Id() == "" { + return fmt.Errorf("%s not found", id) + } + + return nil +} diff --git a/mmv1/third_party/terraform/services/bigquery/data_source_google_bigquery_table_test.go b/mmv1/third_party/terraform/services/bigquery/data_source_google_bigquery_table_test.go new file mode 100644 index 000000000000..906978facf6b --- /dev/null +++ b/mmv1/third_party/terraform/services/bigquery/data_source_google_bigquery_table_test.go @@ -0,0 +1,190 @@ +package bigquery_test + +import ( + "encoding/json" + "fmt" + "regexp" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccDataSourceGoogleBigqueryTable_basic(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + expectedID := fmt.Sprintf("projects/%s/datasets/%s/tables/%s", envvar.GetTestProjectFromEnv(), fmt.Sprintf("tf_test_ds_%s", context["random_suffix"]), fmt.Sprintf("tf_test_table_%s", context["random_suffix"])) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceGoogleBigqueryTable_basic(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("data.google_bigquery_table.example", "table_id", fmt.Sprintf("tf_test_table_%s", context["random_suffix"])), + resource.TestCheckResourceAttr("data.google_bigquery_table.example", "dataset_id", fmt.Sprintf("tf_test_ds_%s", context["random_suffix"])), + resource.TestCheckResourceAttrSet("data.google_bigquery_table.example", "schema"), + resource.TestCheckResourceAttr("data.google_bigquery_table.example", "id", expectedID), + resource.TestCheckResourceAttrWith("data.google_bigquery_table.example", "schema", func(schema string) error { + var parsedSchema []map[string]interface{} + + if err := json.Unmarshal([]byte(schema), &parsedSchema); err != nil { + return fmt.Errorf("failed to parse schema JSON: %w", err) + } + + if len(parsedSchema) > 0 { + if parsedSchema[0]["name"] != "name" { + return fmt.Errorf("expected fields[0].name to be 'name', got '%v'", parsedSchema[0]["name"]) + } + if parsedSchema[0]["type"] != "STRING" { + return fmt.Errorf("expected fields[0].type to be 'STRING', got '%v'", parsedSchema[0]["type"]) + } + if parsedSchema[0]["mode"] != "NULLABLE" { + return fmt.Errorf("expected fields[0].mode to be 'NULLABLE', got '%v'", parsedSchema[0]["mode"]) + } + } + + if len(parsedSchema) > 2 { + if parsedSchema[2]["name"] != "address" { + return fmt.Errorf("expected fields[2].name to be 'address', got '%v'", parsedSchema[2]["name"]) + } + if subFields, ok := parsedSchema[2]["fields"].([]interface{}); ok && len(subFields) > 1 { + subField := subFields[1].(map[string]interface{}) + if subField["name"] != "zip" { + return fmt.Errorf("expected fields[2].fields[1].name to be 'zip', got '%v'", subField["name"]) + } + } + } + + if len(parsedSchema) > 4 { + if parsedSchema[4]["name"] != "policy_tag_test" { + return fmt.Errorf("expected fields[4].name to be 'policy_tag_test', got '%v'", parsedSchema[4]["name"]) + } + if policyTags, ok := parsedSchema[4]["policyTags"].(map[string]interface{}); ok { + if names, ok := policyTags["names"].([]interface{}); ok && len(names) > 0 { + if !regexp.MustCompile("^projects/[^/]+/locations/us-central1/taxonomies/[^/]+/policyTags/[^/]+$").MatchString(names[0].(string)) { + return fmt.Errorf("policy tag does not match expected pattern") + } + } + } + } + + return nil + }), + ), + }, + }, + }) +} + +func testAccDataSourceGoogleBigqueryTable_basic(context map[string]interface{}) string { + return acctest.Nprintf(` + + resource "google_data_catalog_policy_tag" "test" { + taxonomy = google_data_catalog_taxonomy.test.id + display_name = "Low security" + description = "A policy tag normally associated with low security items" + } + + resource "google_data_catalog_taxonomy" "test" { + region = "us-central1" + display_name = "taxonomy_%{random_suffix}" + description = "A collection of policy tags" + activated_policy_types = ["FINE_GRAINED_ACCESS_CONTROL"] + } + + resource "google_bigquery_dataset" "test" { + dataset_id = "tf_test_ds_%{random_suffix}" + friendly_name = "testing" + description = "This is a test description" + location = "us-central1" + default_table_expiration_ms = 3600000 + } + + resource "google_bigquery_table" "test" { + dataset_id = google_bigquery_dataset.test.dataset_id + table_id = "tf_test_table_%{random_suffix}" + deletion_protection = false + depends_on = [google_data_catalog_policy_tag.test] + schema = < Date: Thu, 22 May 2025 13:06:47 -0700 Subject: [PATCH 231/884] Update enrolled_teams.yml (#14065) --- tools/issue-labeler/labeler/enrolled_teams.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tools/issue-labeler/labeler/enrolled_teams.yml b/tools/issue-labeler/labeler/enrolled_teams.yml index aecdeda85fe0..8966af1d63cf 100755 --- a/tools/issue-labeler/labeler/enrolled_teams.yml +++ b/tools/issue-labeler/labeler/enrolled_teams.yml @@ -386,6 +386,8 @@ service/firebase: - google_firebase_storage.* - google_firebase_web.* - google_firebaserules_.* +service/firebaseapphosting: + resources: - google_firebase_app_hosting.* service/firebasedataconnect: resources: From 9bf45020364e42be1815c30a216fb7bd636b31e3 Mon Sep 17 00:00:00 2001 From: sahil-mahajan-google Date: Fri, 23 May 2025 02:50:33 +0530 Subject: [PATCH 232/884] Add Onprem Migration fields to Volume and Replication (#14039) --- mmv1/products/netapp/Volume.yaml | 38 +++++++++++++++++ mmv1/products/netapp/VolumeReplication.yaml | 47 +++++++++++++++++++++ 2 files changed, 85 insertions(+) diff --git a/mmv1/products/netapp/Volume.yaml b/mmv1/products/netapp/Volume.yaml index 582f12299a0d..e4e4af0d0bd7 100644 --- a/mmv1/products/netapp/Volume.yaml +++ b/mmv1/products/netapp/Volume.yaml @@ -521,3 +521,41 @@ properties: enum_values: - 'ENABLED' - 'PAUSED' + - name: 'hybridReplicationParameters' + type: NestedObject + description: |- + The Hybrid Replication parameters for the volume. + properties: + - name: 'replication' + type: String + description: | + Required. Desired name for the replication of this volume. + - name: 'peerVolumeName' + type: String + description: | + Required. Name of the user's local source volume to be peered with the destination volume. + - name: 'peerClusterName' + type: String + description: | + Required. Name of the user's local source cluster to be peered with the destination cluster. + - name: 'peerSvmName' + type: String + description: | + Required. Name of the user's local source vserver svm to be peered with the destination vserver svm. + - name: 'peerIpAddresses' + type: String + description: | + Required. List of node ip addresses to be peered with. + - name: 'clusterLocation' + type: String + description: | + Optional. Name of source cluster location associated with the Hybrid replication. This is a free-form field for the display purpose only. + - name: 'description' + type: String + description: | + Optional. Description of the replication. + - name: 'labels' + type: KeyValuePairs + description: | + Optional. Labels to be added to the replication as the key value pairs. + An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }. diff --git a/mmv1/products/netapp/VolumeReplication.yaml b/mmv1/products/netapp/VolumeReplication.yaml index 71d2de89f3a4..b636903f99a8 100644 --- a/mmv1/products/netapp/VolumeReplication.yaml +++ b/mmv1/products/netapp/VolumeReplication.yaml @@ -297,3 +297,50 @@ properties: type: String description: | An description of this resource. + - name: 'hybridReplicationType' + type: String + description: | + Hybrid replication type. + output: true + - name: 'hybridPeeringDetails' + type: NestedObject + description: |- + HybridPeeringDetails contains details about the hybrid peering. + output: true + properties: + - name: 'subnetIp' + type: String + description: | + Optional. IP address of the subnet. + output: true + - name: 'command' + type: String + description: | + Optional. Copy-paste-able commands to be used on user's ONTAP to accept peering requests. + output: true + - name: 'commandExpiryTime' + type: String + description: | + Optional. Expiration time for the peering command to be executed on user's ONTAP. + Uses RFC 3339, where generated output will always be Z-normalized and uses 0, 3, 6 or 9 fractional digits. Offsets other than "Z" are also accepted. + output: true + - name: 'passphrase' + type: String + description: | + Optional. Temporary passphrase generated to accept cluster peering command. + output: true + - name: 'peerVolumeName' + type: String + description: | + Optional. Name of the user's local source volume to be peered with the destination volume. + output: true + - name: 'peerClusterName' + type: String + description: | + Optional. Name of the user's local source cluster to be peered with the destination cluster. + output: true + - name: 'peerSvmName' + type: String + description: | + Optional. Name of the user's local source vserver svm to be peered with the destination vserver svm. + output: true From 4e6500dfd9fe480ae860109aeb26b6ad4c26a153 Mon Sep 17 00:00:00 2001 From: kautikdk <144651627+kautikdk@users.noreply.github.com> Date: Thu, 22 May 2025 21:25:03 +0000 Subject: [PATCH 233/884] Adds IP Filter feature support to google_storage_bucket resource. (#14002) --- .../storage/resource_storage_bucket.go.tmpl | 192 +++++++++++++++++ .../storage/resource_storage_bucket_test.go | 195 ++++++++++++++++++ .../docs/r/storage_bucket.html.markdown | 19 ++ 3 files changed, 406 insertions(+) diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket.go.tmpl b/mmv1/third_party/terraform/services/storage/resource_storage_bucket.go.tmpl index c05147dc01ab..aa5864bdbbeb 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket.go.tmpl +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket.go.tmpl @@ -576,6 +576,83 @@ func ResourceStorageBucket() *schema.Resource { Computed: true, Description: `The time at which the bucket's metadata or IAM policy was last updated, in RFC 3339 format.`, }, + "ip_filter": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Description: `The bucket IP filtering configuration.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "mode": { + Type: schema.TypeString, + Required: true, + Description: `The mode of the IP filter. Valid values are 'Enabled' and 'Disabled'.`, + ValidateFunc: validation.StringInSlice([]string{"Enabled", "Disabled"}, false), + }, + "public_network_source": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Description: `The public network IP address ranges that can access the bucket and its data.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "allowed_ip_cidr_ranges": { + Type: schema.TypeList, + Required: true, + Description: "The list of public IPv4, IPv6 cidr ranges that are allowed to access the bucket.", + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.IsCIDR, + }, + }, + }, + }, + }, + "vpc_network_sources": { + Type: schema.TypeList, + Optional: true, + Description: `The list of VPC networks that can access the bucket.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "allowed_ip_cidr_ranges": { + Type: schema.TypeList, + Required: true, + Description: "The list of public or private IPv4 and IPv6 CIDR ranges that can access the bucket.", + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.IsCIDR, + }, + }, + "network": { + Type: schema.TypeString, + Required: true, + Description: "Name of the network. Format: projects/{PROJECT_ID}/global/networks/{NETWORK_NAME}", + }, + }, + }, + }, + }, + }, + DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { + if k == "ip_filter.#" { + o, _ := d.GetChange("ip_filter") + l := o.([]interface{}) + if len(l) == 0 { + return false + } + + if contents, ok := l[0].(map[string]interface{}); !ok { + return false + } else if mode, ok := contents["mode"].(string); ok && mode == "Disabled" { + return true + } + return false + } else if k == "ip_filter.0.mode" { + return old == "Disabled" && new == "" + } + return false + }, + }, }, UseJSONNumber: true, } @@ -827,6 +904,10 @@ func resourceStorageBucketCreate(d *schema.ResourceData, meta interface{}) error sb.HierarchicalNamespace = expandBucketHierachicalNamespace(v.([]interface{})) } + if v, ok := d.GetOk("ip_filter"); ok { + sb.IpFilter = expandBucketIpFilter(v.([]interface{})) + } + var res *storage.Bucket err = transport_tpg.Retry(transport_tpg.RetryOptions{ @@ -1011,6 +1092,12 @@ func resourceStorageBucketUpdate(d *schema.ResourceData, meta interface{}) error } } + if d.HasChange("ip_filter") { + if v, ok := d.GetOk("ip_filter"); ok { + sb.IpFilter = expandBucketIpFilter(v.([]interface{})) + } + } + res, err := config.NewStorageClient(userAgent).Buckets.Patch(d.Get("name").(string), sb).Do() if err != nil { return err @@ -1945,6 +2032,107 @@ func lockRetentionPolicy(bucketsService *storage.BucketsService, bucketName stri return nil } +func flattenBucketIpFilter(ipFilter *storage.BucketIpFilter) []map[string]interface{} { + ipFilterList := make([]map[string]interface{}, 0, 1) + + if ipFilter == nil { + return ipFilterList + } + + filterItem := map[string]interface{}{ + "mode": ipFilter.Mode, + } + + if publicSrc := flattenBucketIpFilterPublicNetworkSource(ipFilter.PublicNetworkSource); publicSrc != nil { + filterItem["public_network_source"] = publicSrc + } + if vpcSrc := flattenBucketIpFilterVpcNetworkSources(ipFilter.VpcNetworkSources); vpcSrc != nil { + filterItem["vpc_network_sources"] = vpcSrc + } + + return append(ipFilterList, filterItem) +} + +func flattenBucketIpFilterPublicNetworkSource(publicNetworkSource *storage.BucketIpFilterPublicNetworkSource) []map[string]interface{} { + if publicNetworkSource == nil || len(publicNetworkSource.AllowedIpCidrRanges) == 0 { + return nil + } + + return []map[string]interface{}{ + { + "allowed_ip_cidr_ranges": publicNetworkSource.AllowedIpCidrRanges, + }, + } +} + +func flattenBucketIpFilterVpcNetworkSources(vpnNetworkSource []*storage.BucketIpFilterVpcNetworkSources) []map[string]interface{} { + if len(vpnNetworkSource) == 0 { + return nil + } + + srcs := make([]map[string]interface{}, 0, len(vpnNetworkSource)) + + for i := range vpnNetworkSource { + srcs = append(srcs, map[string]interface{}{ + "allowed_ip_cidr_ranges": vpnNetworkSource[i].AllowedIpCidrRanges, + "network": vpnNetworkSource[i].Network, + }) + } + + return srcs +} + +func expandBucketIpFilter(v interface{}) (*storage.BucketIpFilter) { + ipFilterList := v.([]interface{}) + if len(ipFilterList) == 0 || ipFilterList[0] == nil { + return nil + } + ipFilter := ipFilterList[0].(map[string]interface{}) + return &storage.BucketIpFilter{ + Mode: ipFilter["mode"].(string), + PublicNetworkSource: expandBucketIpFilterPublicNetworkSource(ipFilter["public_network_source"]), + VpcNetworkSources: expandBucketIpFilterVpcNetworkSources(ipFilter["vpc_network_sources"]), + ForceSendFields: []string{"PublicNetworkSource", "VpcNetworkSources"}, + } +} + +func expandBucketIpFilterPublicNetworkSource(v interface{}) (*storage.BucketIpFilterPublicNetworkSource) { + e := &storage.BucketIpFilterPublicNetworkSource{ + ForceSendFields: []string{"AllowedIpCidrRanges"}, + } + + publicNetworkSources := v.([]interface{}) + if len(publicNetworkSources) == 0 || publicNetworkSources[0] == nil { + return e + } + publicNetworkSource := publicNetworkSources[0].(map[string]interface{}) + cidrs := publicNetworkSource["allowed_ip_cidr_ranges"].([]interface{}) + if len(cidrs) == 0 { + return e + } + + e.AllowedIpCidrRanges = tpgresource.ConvertStringArr(cidrs) + return e +} + +func expandBucketIpFilterVpcNetworkSources(v interface{}) ([]*storage.BucketIpFilterVpcNetworkSources) { + vpcNetworkSources := v.([]interface{}) + if len(vpcNetworkSources) == 0 || vpcNetworkSources[0] == nil { + return nil + } + + transformedvpcNetworkSources := make([]*storage.BucketIpFilterVpcNetworkSources, 0, len(vpcNetworkSources)) + for i := range vpcNetworkSources { + transformedvpcNetworkSource := vpcNetworkSources[i].(map[string]interface{}) + transformedvpcNetworkSources = append(transformedvpcNetworkSources, &storage.BucketIpFilterVpcNetworkSources{ + AllowedIpCidrRanges: tpgresource.ConvertStringArr(transformedvpcNetworkSource["allowed_ip_cidr_ranges"].([]interface{})), + Network: transformedvpcNetworkSource["network"].(string), + }) + } + + return transformedvpcNetworkSources +} + // d.HasChange("lifecycle_rule") always returns true, giving false positives. This function detects changes // to the list size or the actions/conditions of rules directly. func detectLifecycleChange(d *schema.ResourceData) bool { @@ -2095,6 +2283,10 @@ func setStorageBucket(d *schema.ResourceData, config *transport_tpg.Config, res } } + if err := d.Set("ip_filter", flattenBucketIpFilter(res.IpFilter)); err != nil { + return fmt.Errorf("Error setting ip_filter: %s", err) + } + d.SetId(res.Id) return nil } diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go index ec66e8ea875d..92e0a6d209a3 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go @@ -1551,6 +1551,82 @@ func TestAccStorageBucket_hns_force_destroy(t *testing.T) { }) } +func TestAccStorageBucket_IPFilter(t *testing.T) { + t.Parallel() + + var bucket storage.Bucket + var disabled storage.Bucket + var noIPfilter storage.Bucket + bucketName := fmt.Sprintf("tf-test-ip-filter-bucket-%d", acctest.RandInt(t)) + nwSuffix := acctest.RandString(t, 8) + project := envvar.GetTestProjectFromEnv() + serviceAccount := envvar.GetTestServiceAccountFromEnv(t) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_withoutIPFilter(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &noIPfilter), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_IPFilter( + bucketName, nwSuffix, project, serviceAccount, + ), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_IPFilter_disable(bucketName, nwSuffix, project, serviceAccount), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &disabled), + testAccCheckStorageBucketWasUpdated(&disabled, &bucket), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_withoutIPFilter(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &noIPfilter), + testAccCheckStorageBucketWasUpdated(&noIPfilter, &disabled), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + }, + }) +} + func testAccCheckStorageBucketPutFolderItem(t *testing.T, bucketName string) resource.TestCheckFunc { return func(s *terraform.State) error { config := acctest.GoogleProviderConfig(t) @@ -2702,3 +2778,122 @@ resource "google_storage_bucket" "bucket" { } `, bucketName) } + +func testAccStorageBucket_IPFilter(bucketName string, nwSuffix string, project string, serviceAccount string) string { + return fmt.Sprintf(` +resource "google_compute_network" "vpc_gcs_ipfilter1" { + name = "tf-test-storage-ipfilter1-%s" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "ipfilter_1" { + name = "tf-test-storage-ipfilter1-%s" + ip_cidr_range = "10.201.0.0/16" + region = "us-central1" + network = google_compute_network.vpc_gcs_ipfilter1.id +} + +resource "google_project_iam_custom_role" "ipfilter_exempt_role" { + role_id = "_%s" + title = "IP Filter Exempt Role" + description = "A custom role to bypass IP Filtering on GCS bucket." + permissions = ["storage.buckets.exemptFromIpFilter"] +} + +resource "google_project_iam_member" "primary" { + project = "%s" + role = "projects/%s/roles/${google_project_iam_custom_role.ipfilter_exempt_role.role_id}" + member = "serviceAccount:%s" +} + +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "us-central1" + uniform_bucket_level_access = true + force_destroy = true + ip_filter { + mode = "Enabled" + public_network_source { + allowed_ip_cidr_ranges = ["0.0.0.0/0", "::/0"] + } + vpc_network_sources { + network = google_compute_network.vpc_gcs_ipfilter1.id + allowed_ip_cidr_ranges = ["0.0.0.0/0", "::/0"] + } + } +} +`, nwSuffix, nwSuffix, nwSuffix, project, project, serviceAccount, bucketName) +} + +func testAccStorageBucket_IPFilter_disable(bucketName string, nwSuffix string, project string, serviceAccount string) string { + return fmt.Sprintf(` +resource "google_compute_network" "vpc_gcs_ipfilter1" { + name = "tf-test-storage-ipfilter1-%s" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "ipfilter_1" { + name = "tf-test-storage-ipfilter1-%s" + ip_cidr_range = "10.201.0.0/16" + region = "us-central1" + network = google_compute_network.vpc_gcs_ipfilter1.id +} + +resource "google_compute_network" "vpc_gcs_ipfilter2" { + name = "tf-test-storage-ipfilter2-%s" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "ipfilter_2" { + name = "tf-test-storage-ipfilter2-%s" + ip_cidr_range = "10.202.0.0/16" + region = "us-central1" + network = google_compute_network.vpc_gcs_ipfilter2.id +} + +resource "google_project_iam_custom_role" "ipfilter_exempt_role" { + role_id = "_%s" + title = "IP Filter Exempt Role" + description = "A custom role to bypass IP Filtering on GCS bucket." + permissions = ["storage.buckets.exemptFromIpFilter"] +} + +resource "google_project_iam_member" "primary" { + project = "%s" + role = "projects/%s/roles/${google_project_iam_custom_role.ipfilter_exempt_role.role_id}" + member = "serviceAccount:%s" +} + +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "us-central1" + uniform_bucket_level_access = true + force_destroy = true + ip_filter { + mode = "Disabled" + public_network_source { + allowed_ip_cidr_ranges = ["192.0.2.0/24", "2001:db8::/32"] + } + vpc_network_sources { + network = google_compute_network.vpc_gcs_ipfilter1.id + allowed_ip_cidr_ranges = ["0.0.0.0/0", "::/0"] + } + vpc_network_sources { + network = google_compute_network.vpc_gcs_ipfilter2.id + allowed_ip_cidr_ranges = ["10.201.0.0/16", "10.202.0.0/16"] + } + } +} +`, nwSuffix, nwSuffix, nwSuffix, nwSuffix, nwSuffix, project, project, serviceAccount, bucketName) +} + +func testAccStorageBucket_withoutIPFilter(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "us-central1" + uniform_bucket_level_access = true + force_destroy = true +} +`, bucketName) +} diff --git a/mmv1/third_party/terraform/website/docs/r/storage_bucket.html.markdown b/mmv1/third_party/terraform/website/docs/r/storage_bucket.html.markdown index e5035cd8d1af..da7cee5efbc2 100644 --- a/mmv1/third_party/terraform/website/docs/r/storage_bucket.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/storage_bucket.html.markdown @@ -177,6 +177,8 @@ The following arguments are supported: * `updated` - (Computed) The time at which the bucket's metadata or IAM policy was last updated, in RFC 3339 format. +* `ip_filter` - (Optional) The bucket IP filtering configuration. Specifies the network sources that can access the bucket, as well as its underlying objects. Structure is [documented below](#nested_ip_filter). + The `lifecycle_rule` block supports: * `action` - (Required) The Lifecycle Rule's action configuration. A single block of this type is supported. Structure is [documented below](#nested_action). @@ -293,6 +295,23 @@ The following arguments are supported: * `enabled` - (Required) Enables hierarchical namespace for the bucket. +The `ip_filter` block supports: + +* `mode` - (Required) The state of the IP filter configuration. Valid values are `Enabled` and `Disabled`. When set to `Enabled`, IP filtering rules are applied to a bucket and all incoming requests to the bucket are evaluated against these rules. When set to `Disabled`, IP filtering rules are not applied to a bucket. + +* `public_network_source` - (Optional) The public network IP address ranges that can access the bucket and its data. Structure is [documented below](#nested_public_network_source). + +* `vpc_network_sources` - (Optional) The list of VPC networks that can access the bucket. Structure is [documented below](#nested_vpc_network_sources). + +The `public_network_source` block supports: + +* `allowed_ip_cidr_ranges` - The list of public IPv4 and IPv6 CIDR ranges that can access the bucket and its data. + +The `vpc_network_sources` block supports: + +* `network` - Name of the network. Format: `projects/PROJECT_ID/global/networks/NETWORK_NAME` + +* `allowed_ip_cidr_ranges` - The list of public or private IPv4 and IPv6 CIDR ranges that can access the bucket. ## Attributes Reference From a5c41a18d08d0a9c7e91fc7d9d830232c5589a1b Mon Sep 17 00:00:00 2001 From: harshithpatte-g Date: Fri, 23 May 2025 03:17:11 +0530 Subject: [PATCH 234/884] fix documentation of mtu field in interconnect attachments resource (#14053) --- mmv1/products/compute/InterconnectAttachment.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mmv1/products/compute/InterconnectAttachment.yaml b/mmv1/products/compute/InterconnectAttachment.yaml index 5c0404efeb12..140d2d2a0d4f 100644 --- a/mmv1/products/compute/InterconnectAttachment.yaml +++ b/mmv1/products/compute/InterconnectAttachment.yaml @@ -108,8 +108,8 @@ properties: - name: 'mtu' type: String description: | - Maximum Transmission Unit (MTU), in bytes, of packets passing through - this interconnect attachment. Currently, only 1440 and 1500 are allowed. If not specified, the value will default to 1440. + Maximum Transmission Unit (MTU), in bytes, of packets passing through this interconnect attachment. + Valid values are 1440, 1460, 1500, and 8896. If not specified, the value will default to 1440. default_from_api: true custom_flatten: 'templates/terraform/custom_flatten/float64_to_int_to_string.go.tmpl' - name: 'bandwidth' From cfc198d0a0f800ea5bfe81bfc91276b609eb925b Mon Sep 17 00:00:00 2001 From: stevenyang72 Date: Thu, 22 May 2025 15:46:05 -0700 Subject: [PATCH 235/884] Add workload identity pool managed identity. (#14048) --- .../WorkloadIdentityPoolManagedIdentity.yaml | 115 ++++++++++++ ...oad_identity_pool_managed_identity.go.tmpl | 44 +++++ ...oad_identity_pool_managed_identity.go.tmpl | 22 +++ ...entity_pool_managed_identity_basic.tf.tmpl | 21 +++ ...dentity_pool_managed_identity_full.tf.tmpl | 23 +++ ...dentity_pool_managed_identity_test.go.tmpl | 172 ++++++++++++++++++ 6 files changed, 397 insertions(+) create mode 100644 mmv1/products/iambeta/WorkloadIdentityPoolManagedIdentity.yaml create mode 100644 mmv1/templates/terraform/constants/iam_workload_identity_pool_managed_identity.go.tmpl create mode 100644 mmv1/templates/terraform/custom_check_destroy/iam_workload_identity_pool_managed_identity.go.tmpl create mode 100644 mmv1/templates/terraform/examples/iam_workload_identity_pool_managed_identity_basic.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/iam_workload_identity_pool_managed_identity_full.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_managed_identity_test.go.tmpl diff --git a/mmv1/products/iambeta/WorkloadIdentityPoolManagedIdentity.yaml b/mmv1/products/iambeta/WorkloadIdentityPoolManagedIdentity.yaml new file mode 100644 index 000000000000..2c2b51f085fb --- /dev/null +++ b/mmv1/products/iambeta/WorkloadIdentityPoolManagedIdentity.yaml @@ -0,0 +1,115 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'WorkloadIdentityPoolManagedIdentity' +description: | + Represents a managed identity for a workload identity pool namespace. +references: + guides: + 'Configure managed workload identity authentication for Compute Engine': 'https://cloud.google.com/iam/docs/create-managed-workload-identities' + 'Configure managed workload identity authentication for GKE': 'https://cloud.google.com/iam/docs/create-managed-workload-identities-gke' + api: 'https://cloud.google.com/iam/docs/reference/rest/v1/projects.locations.workloadIdentityPools.namespaces.managedIdentities' +min_version: beta +base_url: 'projects/{{project}}/locations/global/workloadIdentityPools/{{workload_identity_pool_id}}/namespaces/{{workload_identity_pool_namespace_id}}/managedIdentities' +self_link: 'projects/{{project}}/locations/global/workloadIdentityPools/{{workload_identity_pool_id}}/namespaces/{{workload_identity_pool_namespace_id}}/managedIdentities/{{workload_identity_pool_managed_identity_id}}' +create_url: 'projects/{{project}}/locations/global/workloadIdentityPools/{{workload_identity_pool_id}}/namespaces/{{workload_identity_pool_namespace_id}}/managedIdentities?workloadIdentityPoolManagedIdentityId={{workload_identity_pool_managed_identity_id}}' +update_verb: 'PATCH' +update_mask: true +import_format: + - 'projects/{{project}}/locations/global/workloadIdentityPools/{{workload_identity_pool_id}}/namespaces/{{workload_identity_pool_namespace_id}}/managedIdentities/{{workload_identity_pool_managed_identity_id}}' +autogen_async: true +custom_code: + constants: 'templates/terraform/constants/iam_workload_identity_pool_managed_identity.go.tmpl' + decoder: 'templates/terraform/decoders/treat_deleted_state_as_gone.go.tmpl' + test_check_destroy: 'templates/terraform/custom_check_destroy/iam_workload_identity_pool_managed_identity.go.tmpl' +examples: + - name: 'iam_workload_identity_pool_managed_identity_basic' + primary_resource_id: 'example' + vars: + workload_identity_pool_id: 'example-pool' + workload_identity_pool_namespace_id: 'example-namespace' + workload_identity_pool_managed_identity_id: 'example-managed-identity' + - name: 'iam_workload_identity_pool_managed_identity_full' + primary_resource_id: 'example' + vars: + workload_identity_pool_id: 'example-pool' + workload_identity_pool_namespace_id: 'example-namespace' + workload_identity_pool_managed_identity_id: 'example-managed-identity' +parameters: + - name: 'workload_identity_pool_id' + type: String + required: true + immutable: true + url_param_only: true + description: | + The ID to use for the pool, which becomes the final component of the resource name. This + value should be 4-32 characters, and may contain the characters [a-z0-9-]. The prefix + `gcp-` is reserved for use by Google, and may not be specified. + - name: 'workload_identity_pool_namespace_id' + type: String + required: true + immutable: true + url_param_only: true + description: | + The ID to use for the namespace. This value must: + * contain at most 63 characters + * contain only lowercase alphanumeric characters or `-` + * start with an alphanumeric character + * end with an alphanumeric character + + + The prefix `gcp-` will be reserved for future uses. + - name: 'workload_identity_pool_managed_identity_id' + type: String + required: true + immutable: true + url_param_only: true + description: | + The ID to use for the managed identity. This value must: + * contain at most 63 characters + * contain only lowercase alphanumeric characters or `-` + * start with an alphanumeric character + * end with an alphanumeric character + + + The prefix `gcp-` will be reserved for future uses. +properties: + - name: 'name' + type: String + description: | + The resource name of the managed identity as + `projects/{project_number}/locations/global/workloadIdentityPools/{workload_identity_pool_id}/namespaces/{workload_identity_pool_namespace_id}/managedIdentities/{workload_identity_pool_managed_identity_id}`. + output: true + - name: 'description' + type: String + description: | + A description of the managed identity. Cannot exceed 256 characters. + - name: 'state' + type: Enum + description: | + The current state of the managed identity. + * `ACTIVE`: The managed identity is active. + * `DELETED`: The managed identity is soft-deleted. Soft-deleted managed identities are + permanently deleted after approximately 30 days. You can restore a soft-deleted managed + identity using UndeleteWorkloadIdentityPoolManagedIdentity. You cannot reuse the ID of a + soft-deleted managed identity until it is permanently deleted. + output: true + enum_values: + - 'ACTIVE' + - 'DELETED' + - name: 'disabled' + type: Boolean + description: | + Whether the managed identity is disabled. If disabled, credentials may no longer be issued for + the identity, however existing credentials will still be accepted until they expire. diff --git a/mmv1/templates/terraform/constants/iam_workload_identity_pool_managed_identity.go.tmpl b/mmv1/templates/terraform/constants/iam_workload_identity_pool_managed_identity.go.tmpl new file mode 100644 index 000000000000..c00e6c226f8b --- /dev/null +++ b/mmv1/templates/terraform/constants/iam_workload_identity_pool_managed_identity.go.tmpl @@ -0,0 +1,44 @@ +const workloadIdentityPoolManagedIdentityIdRegexp = `^[0-9a-z-]+$` + +func ValidateWorkloadIdentityPoolManagedIdentityId(v interface{}, k string) (ws []string, errors []error) { + value := v.(string) + + if !regexp.MustCompile(workloadIdentityPoolManagedIdentityIdRegexp).MatchString(value) { + errors = append(errors, fmt.Errorf( + "%q must contain only lowercase letters (a-z), numbers (0-9), or dashes (-)", k)) + } + + if len(value) < 2 { + errors = append(errors, fmt.Errorf( + "%q cannot be less than 2 characters", k)) + return + } + + if len(value) > 63 { + errors = append(errors, fmt.Errorf( + "%q cannot be greater than 63 characters", k)) + } + + isLowerAlphaNumeric := func(r byte) bool { + return (r >= '0' && r <= '9') || (r >= 'a' && r <= 'z') + } + + firstChar := value[0] + if !isLowerAlphaNumeric(firstChar) { + errors = append(errors, fmt.Errorf( + "%q must start with an alphanumeric character", k)) + } + + lastChar := value[len(value) - 1] + if !isLowerAlphaNumeric(lastChar) { + errors = append(errors, fmt.Errorf( + "%q must end with an alphanumeric character", k)) + } + + if strings.HasPrefix(value, "gcp-") { + errors = append(errors, fmt.Errorf( + "%q (%q) can not start with \"gcp-\"", k, value)) + } + + return +} diff --git a/mmv1/templates/terraform/custom_check_destroy/iam_workload_identity_pool_managed_identity.go.tmpl b/mmv1/templates/terraform/custom_check_destroy/iam_workload_identity_pool_managed_identity.go.tmpl new file mode 100644 index 000000000000..87ad9345666d --- /dev/null +++ b/mmv1/templates/terraform/custom_check_destroy/iam_workload_identity_pool_managed_identity.go.tmpl @@ -0,0 +1,22 @@ +config := acctest.GoogleProviderConfig(t) + +url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{"{{"}}IAMBetaBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/locations/global/workloadIdentityPools/{{"{{"}}workload_identity_pool_id{{"}}"}}/namespaces/{{"{{"}}workload_identity_pool_namespace_id{{"}}"}}/managedIdentities/{{"{{"}}workload_identity_pool_managed_identity_id{{"}}"}}") +if err != nil { + return err +} + +res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + RawURL: url, + UserAgent: config.UserAgent, +}) +if err != nil { + return nil +} + +if v := res["state"]; v == "DELETED" { + return nil +} + +return fmt.Errorf("IAMBetaWorkloadIdentityPoolManagedIdentity still exists at %s", url) diff --git a/mmv1/templates/terraform/examples/iam_workload_identity_pool_managed_identity_basic.tf.tmpl b/mmv1/templates/terraform/examples/iam_workload_identity_pool_managed_identity_basic.tf.tmpl new file mode 100644 index 000000000000..42803e035e1b --- /dev/null +++ b/mmv1/templates/terraform/examples/iam_workload_identity_pool_managed_identity_basic.tf.tmpl @@ -0,0 +1,21 @@ +resource "google_iam_workload_identity_pool" "pool" { + provider = google-beta + + workload_identity_pool_id = "{{index $.Vars "workload_identity_pool_id"}}" + mode = "TRUST_DOMAIN" +} + +resource "google_iam_workload_identity_pool_namespace" "ns" { + provider = google-beta + + workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id + workload_identity_pool_namespace_id = "{{index $.Vars "workload_identity_pool_namespace_id"}}" +} + +resource "google_iam_workload_identity_pool_managed_identity" "{{$.PrimaryResourceId}}" { + provider = google-beta + + workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id + workload_identity_pool_namespace_id = google_iam_workload_identity_pool_namespace.ns.workload_identity_pool_namespace_id + workload_identity_pool_managed_identity_id = "{{index $.Vars "workload_identity_pool_managed_identity_id"}}" +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/iam_workload_identity_pool_managed_identity_full.tf.tmpl b/mmv1/templates/terraform/examples/iam_workload_identity_pool_managed_identity_full.tf.tmpl new file mode 100644 index 000000000000..2074a1566556 --- /dev/null +++ b/mmv1/templates/terraform/examples/iam_workload_identity_pool_managed_identity_full.tf.tmpl @@ -0,0 +1,23 @@ +resource "google_iam_workload_identity_pool" "pool" { + provider = google-beta + + workload_identity_pool_id = "{{index $.Vars "workload_identity_pool_id"}}" + mode = "TRUST_DOMAIN" +} + +resource "google_iam_workload_identity_pool_namespace" "ns" { + provider = google-beta + + workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id + workload_identity_pool_namespace_id = "{{index $.Vars "workload_identity_pool_namespace_id"}}" +} + +resource "google_iam_workload_identity_pool_managed_identity" "{{$.PrimaryResourceId}}" { + provider = google-beta + + workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id + workload_identity_pool_namespace_id = google_iam_workload_identity_pool_namespace.ns.workload_identity_pool_namespace_id + workload_identity_pool_managed_identity_id = "{{index $.Vars "workload_identity_pool_managed_identity_id"}}" + description = "Example Managed Identity in a Workload Identity Pool Namespace" + disabled = true +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_managed_identity_test.go.tmpl b/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_managed_identity_test.go.tmpl new file mode 100644 index 000000000000..ccf0d267ac1c --- /dev/null +++ b/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_managed_identity_test.go.tmpl @@ -0,0 +1,172 @@ +{{- if ne $.TargetVersionName "ga" -}} +package iambeta_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccIAMBetaWorkloadIdentityPoolManagedIdentity_minimal(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckIAMBetaWorkloadIdentityPoolManagedIdentityDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccIAMBetaWorkloadIdentityPoolManagedIdentity_minimal(context), + }, + { + ResourceName: "google_iam_workload_identity_pool_managed_identity.example", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"workload_identity_pool_id", "workload_identity_pool_namespace_id", "workload_identity_pool_managed_identity_id"}, + }, + { + Config: testAccIAMBetaWorkloadIdentityPoolManagedIdentity_updated(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_iam_workload_identity_pool_managed_identity.example", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_iam_workload_identity_pool_managed_identity.example", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"workload_identity_pool_id", "workload_identity_pool_namespace_id", "workload_identity_pool_managed_identity_id"}, + }, + }, + }) +} + +func TestAccIAMBetaWorkloadIdentityPoolManagedIdentity_full(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckIAMBetaWorkloadIdentityPoolManagedIdentityDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccIAMBetaWorkloadIdentityPoolManagedIdentity_full(context), + }, + { + ResourceName: "google_iam_workload_identity_pool_managed_identity.example", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"workload_identity_pool_id", "workload_identity_pool_namespace_id", "workload_identity_pool_managed_identity_id"}, + }, + { + Config: testAccIAMBetaWorkloadIdentityPoolManagedIdentity_updated(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_iam_workload_identity_pool_managed_identity.example", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_iam_workload_identity_pool_managed_identity.example", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"workload_identity_pool_id", "workload_identity_pool_namespace_id", "workload_identity_pool_managed_identity_id"}, + }, + }, + }) +} + +func testAccIAMBetaWorkloadIdentityPoolManagedIdentity_minimal(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_workload_identity_pool" "pool" { + provider = google-beta + + workload_identity_pool_id = "tf-test-example-pool%{random_suffix}" + mode = "TRUST_DOMAIN" +} + +resource "google_iam_workload_identity_pool_namespace" "ns" { + provider = google-beta + + workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id + workload_identity_pool_namespace_id = "tf-test-example-namespace%{random_suffix}" +} + +resource "google_iam_workload_identity_pool_managed_identity" "example" { + provider = google-beta + + workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id + workload_identity_pool_namespace_id = google_iam_workload_identity_pool_namespace.ns.workload_identity_pool_namespace_id + workload_identity_pool_managed_identity_id = "tf-test-example-managed-identity%{random_suffix}" +} +`, context) +} + +func testAccIAMBetaWorkloadIdentityPoolManagedIdentity_full(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_workload_identity_pool" "pool" { + provider = google-beta + + workload_identity_pool_id = "tf-test-example-pool%{random_suffix}" + mode = "TRUST_DOMAIN" +} + +resource "google_iam_workload_identity_pool_namespace" "ns" { + provider = google-beta + + workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id + workload_identity_pool_namespace_id = "tf-test-example-namespace%{random_suffix}" +} + +resource "google_iam_workload_identity_pool_managed_identity" "example" { + provider = google-beta + + workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id + workload_identity_pool_namespace_id = google_iam_workload_identity_pool_namespace.ns.workload_identity_pool_namespace_id + workload_identity_pool_managed_identity_id = "tf-test-example-managed-identity%{random_suffix}" + description = "Example Managed Identity in a Workload Identity Pool Namespace" + disabled = true +} +`, context) +} + +func testAccIAMBetaWorkloadIdentityPoolManagedIdentity_updated(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_workload_identity_pool" "pool" { + provider = google-beta + + workload_identity_pool_id = "tf-test-example-pool%{random_suffix}" + mode = "TRUST_DOMAIN" +} + +resource "google_iam_workload_identity_pool_namespace" "ns" { + provider = google-beta + + workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id + workload_identity_pool_namespace_id = "tf-test-example-namespace%{random_suffix}" +} + +resource "google_iam_workload_identity_pool_managed_identity" "example" { + provider = google-beta + + workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id + workload_identity_pool_namespace_id = google_iam_workload_identity_pool_namespace.ns.workload_identity_pool_namespace_id + workload_identity_pool_managed_identity_id = "tf-test-example-managed-identity%{random_suffix}" + description = "Updated Managed Identity in a Workload Identity Pool Namespace" + disabled = false +} +`, context) +} +{{- end -}} From 650116127f482560ea8121edcc6cc6a393c16a42 Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Thu, 22 May 2025 16:03:05 -0700 Subject: [PATCH 236/884] Add vacation for shuyama1 (#14017) --- .ci/magician/github/membership_data.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.ci/magician/github/membership_data.go b/.ci/magician/github/membership_data.go index ca596c162298..82a48c0753ed 100644 --- a/.ci/magician/github/membership_data.go +++ b/.ci/magician/github/membership_data.go @@ -103,8 +103,8 @@ var ( "shuyama1": { vacations: []Vacation{ { - startDate: newDate(2025, 3, 26), - endDate: newDate(2025, 4, 1), + startDate: newDate(2025, 5, 23), + endDate: newDate(2025, 5, 30), }, }, }, From 289fa4b6c4210cedbac093278f4640cd588ec33e Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Thu, 22 May 2025 16:05:41 -0700 Subject: [PATCH 237/884] update netapp network (#14061) --- mmv1/products/netapp/Backup.yaml | 2 +- mmv1/products/netapp/StoragePool.yaml | 2 +- mmv1/products/netapp/Volume.yaml | 2 +- mmv1/products/netapp/VolumeQuotaRule.yaml | 2 +- mmv1/products/netapp/VolumeReplication.yaml | 2 +- mmv1/products/netapp/VolumeSnapshot.yaml | 2 +- .../services/netapp/resource_netapp_backup_test.go | 6 +++--- .../netapp/resource_netapp_storage_pool_test.go | 10 +++++----- .../netapp/resource_netapp_volume_quotaRule_test.go | 2 +- .../netapp/resource_netapp_volume_replication_test.go | 2 +- .../netapp/resource_netapp_volume_snapshot_test.go | 2 +- .../services/netapp/resource_netapp_volume_test.go | 4 ++-- 12 files changed, 19 insertions(+), 19 deletions(-) diff --git a/mmv1/products/netapp/Backup.yaml b/mmv1/products/netapp/Backup.yaml index 091e3d8d245b..5511bd0332de 100644 --- a/mmv1/products/netapp/Backup.yaml +++ b/mmv1/products/netapp/Backup.yaml @@ -70,7 +70,7 @@ examples: backup_name: 'test-backup' network_name: 'network' test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' + 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' parameters: - name: 'location' type: String diff --git a/mmv1/products/netapp/StoragePool.yaml b/mmv1/products/netapp/StoragePool.yaml index a21c5894e4ae..4c95114f65f9 100644 --- a/mmv1/products/netapp/StoragePool.yaml +++ b/mmv1/products/netapp/StoragePool.yaml @@ -75,7 +75,7 @@ examples: pool_name: 'test-pool' network_name: 'test-network' test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' + 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' exclude_docs: true - name: 'Storage_pool_create_doc' primary_resource_id: 'test_pool' diff --git a/mmv1/products/netapp/Volume.yaml b/mmv1/products/netapp/Volume.yaml index e4e4af0d0bd7..698689d07995 100644 --- a/mmv1/products/netapp/Volume.yaml +++ b/mmv1/products/netapp/Volume.yaml @@ -56,7 +56,7 @@ examples: pool_name: 'test-pool' network_name: 'test-network' test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' + 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' ignore_read_extra: - 'deletion_policy' virtual_fields: diff --git a/mmv1/products/netapp/VolumeQuotaRule.yaml b/mmv1/products/netapp/VolumeQuotaRule.yaml index e4f85eb9f9bb..bd80d2b0fe0c 100644 --- a/mmv1/products/netapp/VolumeQuotaRule.yaml +++ b/mmv1/products/netapp/VolumeQuotaRule.yaml @@ -52,7 +52,7 @@ examples: network_name: 'test-network' quota_rule_name: 'test-volume-quota-rule' test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' + 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' parameters: - name: 'location' type: String diff --git a/mmv1/products/netapp/VolumeReplication.yaml b/mmv1/products/netapp/VolumeReplication.yaml index b636903f99a8..d7f4904eedc7 100644 --- a/mmv1/products/netapp/VolumeReplication.yaml +++ b/mmv1/products/netapp/VolumeReplication.yaml @@ -71,7 +71,7 @@ examples: destination_volume: 'destination-volume' network_name: 'test-network' test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' + 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' ignore_read_extra: - 'delete_destination_volume' - 'replication_enabled' diff --git a/mmv1/products/netapp/VolumeSnapshot.yaml b/mmv1/products/netapp/VolumeSnapshot.yaml index 455a3d3794f9..48f0174ae2bd 100644 --- a/mmv1/products/netapp/VolumeSnapshot.yaml +++ b/mmv1/products/netapp/VolumeSnapshot.yaml @@ -56,7 +56,7 @@ examples: network_name: 'test-network' snap_name: 'testvolumesnap' test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' + 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' parameters: - name: 'location' type: String diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_backup_test.go b/mmv1/third_party/terraform/services/netapp/resource_netapp_backup_test.go index c3ae4eae0e02..63e533adff2f 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_backup_test.go +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_backup_test.go @@ -12,7 +12,7 @@ import ( func TestAccNetappBackup_NetappBackupFull_update(t *testing.T) { context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } @@ -163,7 +163,7 @@ resource "google_netapp_backup" "test_backup" { func TestAccNetappBackup_NetappFlexBackup(t *testing.T) { context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } @@ -248,7 +248,7 @@ resource "google_netapp_backup" "test_backup" { func TestAccNetappBackup_NetappIntegratedBackup(t *testing.T) { context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go b/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go index 6b69d1f49a61..8c1c493a2957 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go @@ -13,7 +13,7 @@ func TestAccNetappStoragePool_storagePoolCreateExample_update(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } @@ -97,7 +97,7 @@ resource "google_netapp_storage_pool" "test_pool" { func TestAccNetappStoragePool_autoTieredStoragePoolCreateExample_update(t *testing.T) { context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } @@ -149,7 +149,7 @@ resource "google_netapp_storage_pool" "test_pool" { func TestAccNetappStoragePool_FlexRegionalStoragePoolCreateExample_update(t *testing.T) { context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } @@ -272,7 +272,7 @@ data "google_compute_network" "default" { func TestAccNetappStoragePool_FlexRegionalStoragePoolNoZone(t *testing.T) { context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } @@ -320,7 +320,7 @@ data "google_compute_network" "default" { func TestAccNetappStoragePool_customPerformanceStoragePoolCreateExample_update(t *testing.T) { context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_quotaRule_test.go b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_quotaRule_test.go index 0dd9f6ffb1cb..30c3374d57f4 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_quotaRule_test.go +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_quotaRule_test.go @@ -15,7 +15,7 @@ func TestAccNetappVolumeQuotaRule_netappVolumeQuotaRuleBasicExample_update(t *te t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_replication_test.go b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_replication_test.go index 04bce53188d6..8e17171ff727 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_replication_test.go +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_replication_test.go @@ -15,7 +15,7 @@ func TestAccNetappVolumeReplication_NetappVolumeReplicationCreateExample_update( t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_snapshot_test.go b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_snapshot_test.go index ca277f1a4e39..cef8f5ce594a 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_snapshot_test.go +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_snapshot_test.go @@ -15,7 +15,7 @@ func TestAccNetappVolumeSnapshot_volumeSnapshotCreateExample_update(t *testing.T t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go index e324b75b4e6c..ea1010fae9b9 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go @@ -20,7 +20,7 @@ import ( func TestAccNetappVolume_NetappVolumeBasicExample_update(t *testing.T) { context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } @@ -661,7 +661,7 @@ func testAccNetappVolume_volumeBasicExample_cleanupScheduledBackup(t *testing.T, func TestAccNetappVolume_autoTieredNetappVolume_update(t *testing.T) { context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } From a33ba64d2b9edd2a36f17e9cac821a9354fd9e9a Mon Sep 17 00:00:00 2001 From: Rohan Chawla <73727454+rohanchawla23@users.noreply.github.com> Date: Thu, 22 May 2025 17:05:03 -0700 Subject: [PATCH 238/884] Private CA - Differentiate unset and default values for is_ca/max_issuer_path_length in Certificate Templates (#14070) --- .../privateca/CertificateTemplate.yaml | 112 +++++- ...ca_certificate_template_509_config.go.tmpl | 53 +++ ...ca_certificate_template_509_config.go.tmpl | 20 + ...ero_max_issuer_path_length_null_ca.tf.tmpl | 85 +++++ .../services/privateca/privateca_utils.go | 79 +++- ...rce_privateca_certificate_template_test.go | 341 ++++++++++++++++++ 6 files changed, 686 insertions(+), 4 deletions(-) create mode 100644 mmv1/templates/terraform/custom_expand/privateca_certificate_template_509_config.go.tmpl create mode 100644 mmv1/templates/terraform/custom_flatten/privateca_certificate_template_509_config.go.tmpl create mode 100644 mmv1/templates/terraform/examples/privateca_template_zero_max_issuer_path_length_null_ca.tf.tmpl diff --git a/mmv1/products/privateca/CertificateTemplate.yaml b/mmv1/products/privateca/CertificateTemplate.yaml index 2f440e386815..311c1bd9d6f1 100644 --- a/mmv1/products/privateca/CertificateTemplate.yaml +++ b/mmv1/products/privateca/CertificateTemplate.yaml @@ -52,6 +52,11 @@ examples: primary_resource_name: 'fmt.Sprintf("tf-test-my-template%s", context["random_suffix"])' vars: name: 'my-template' + - name: 'privateca_template_zero_max_issuer_path_length_null_ca' + primary_resource_id: 'default' + primary_resource_name: 'fmt.Sprintf("tf-test-my-template%s", context["random_suffix"])' + vars: + name: 'my-template' parameters: properties: - name: 'name' @@ -69,6 +74,8 @@ properties: - name: 'predefinedValues' type: NestedObject description: Optional. A set of X.509 values that will be applied to all issued certificates that use this template. If the certificate request includes conflicting values for the same properties, they will be overwritten by the values defined here. If the issuing CaPool's IssuancePolicy defines conflicting baseline_values for the same properties, the certificate issuance request will fail. + custom_flatten: 'templates/terraform/custom_flatten/privateca_certificate_template_509_config.go.tmpl' + custom_expand: 'templates/terraform/custom_expand/privateca_certificate_template_509_config.go.tmpl' properties: - name: 'keyUsage' type: NestedObject @@ -178,11 +185,29 @@ properties: properties: - name: 'isCa' type: Boolean - description: Optional. Refers to the "CA" X.509 extension, which is a boolean value. When this value is missing, the extension will be omitted from the CA certificate. + description: | + Optional. Refers to the "CA" X.509 extension, which is a boolean value. When this value is true, the "CA" in Basic Constraints extension will be set to true. + send_empty_value: true + - name: 'nullCa' + type: Boolean + description: | + Optional. When true, the "CA" in Basic Constraints extension will be set to null and omitted from the CA certificate. + If both `is_ca` and `null_ca` are unset, the "CA" in Basic Constraints extension will be set to false. + Note that the behavior when `is_ca = false` for this resource is different from the behavior in the Certificate Authority, Certificate and CaPool resources. + url_param_only: true send_empty_value: true - name: 'maxIssuerPathLength' type: Integer - description: Optional. Refers to the path length restriction X.509 extension. For a CA certificate, this value describes the depth of subordinate CA certificates that are allowed. If this value is less than 0, the request will fail. If this value is missing, the max path length will be omitted from the CA certificate. + description: | + Optional. Refers to the "path length constraint" in Basic Constraints extension. For a CA certificate, this value describes the depth of + subordinate CA certificates that are allowed. If this value is less than 0, the request will fail. + - name: 'zeroMaxIssuerPathLength' + type: Boolean + description: | + Optional. When true, the "path length constraint" in Basic Constraints extension will be set to 0. + if both `max_issuer_path_length` and `zero_max_issuer_path_length` are unset, + the max path length will be omitted from the CA certificate. + url_param_only: true - name: 'policyIds' type: Array description: Optional. Describes the X.509 certificate policy object identifiers, per https://tools.ietf.org/html/rfc5280#section-4.2.1.4. @@ -227,6 +252,89 @@ properties: type: String description: Required. The value of this X.509 extension. required: true + - name: 'nameConstraints' + type: NestedObject + description: | + Describes the X.509 name constraints extension. + properties: + - name: 'critical' + type: Boolean + description: + Indicates whether or not the name constraints are marked + critical. + required: true + - name: 'permittedDnsNames' + type: Array + description: | + Contains permitted DNS names. Any DNS name that can be + constructed by simply adding zero or more labels to + the left-hand side of the name satisfies the name constraint. + For example, `example.com`, `www.example.com`, `www.sub.example.com` + would satisfy `example.com` while `example1.com` does not. + item_type: + type: String + - name: 'excludedDnsNames' + type: Array + description: | + Contains excluded DNS names. Any DNS name that can be + constructed by simply adding zero or more labels to + the left-hand side of the name satisfies the name constraint. + For example, `example.com`, `www.example.com`, `www.sub.example.com` + would satisfy `example.com` while `example1.com` does not. + item_type: + type: String + - name: 'permittedIpRanges' + type: Array + description: | + Contains the permitted IP ranges. For IPv4 addresses, the ranges + are expressed using CIDR notation as specified in RFC 4632. + For IPv6 addresses, the ranges are expressed in similar encoding as IPv4 + addresses. + item_type: + type: String + - name: 'excludedIpRanges' + type: Array + description: | + Contains the excluded IP ranges. For IPv4 addresses, the ranges + are expressed using CIDR notation as specified in RFC 4632. + For IPv6 addresses, the ranges are expressed in similar encoding as IPv4 + addresses. + item_type: + type: String + - name: 'permittedEmailAddresses' + type: Array + description: | + Contains the permitted email addresses. The value can be a particular + email address, a hostname to indicate all email addresses on that host or + a domain with a leading period (e.g. `.example.com`) to indicate + all email addresses in that domain. + item_type: + type: String + - name: 'excludedEmailAddresses' + type: Array + description: | + Contains the excluded email addresses. The value can be a particular + email address, a hostname to indicate all email addresses on that host or + a domain with a leading period (e.g. `.example.com`) to indicate + all email addresses in that domain. + item_type: + type: String + - name: 'permittedUris' + type: Array + description: | + Contains the permitted URIs that apply to the host part of the name. + The value can be a hostname or a domain with a + leading period (like `.example.com`) + item_type: + type: String + - name: 'excludedUris' + type: Array + description: | + Contains the excluded URIs that apply to the host part of the name. + The value can be a hostname or a domain with a + leading period (like `.example.com`) + item_type: + type: String - name: 'identityConstraints' type: NestedObject description: Optional. Describes constraints on identities that may be appear in Certificates issued using this template. If this is omitted, then this template will not add restrictions on a certificate's identity. diff --git a/mmv1/templates/terraform/custom_expand/privateca_certificate_template_509_config.go.tmpl b/mmv1/templates/terraform/custom_expand/privateca_certificate_template_509_config.go.tmpl new file mode 100644 index 000000000000..5cd89e071a77 --- /dev/null +++ b/mmv1/templates/terraform/custom_expand/privateca_certificate_template_509_config.go.tmpl @@ -0,0 +1,53 @@ +{{/* See mmv1/third_party/terraform/utils/privateca_utils.go for the sub-expanders and explanation */}} +func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + if v == nil { + return v, nil + } + l := v.([]interface{}) + if len(l) == 0 || l[0] == nil { + return nil, nil + } + raw := l[0] + original := raw.(map[string]interface{}) + if len(original) == 0 { + return nil, nil + } + transformed := make(map[string]interface{}) + + caOptions, err := expandPrivatecaCertificateTemplateConfigX509ConfigCaOptions(original["ca_options"], d, config) + if err != nil { + return nil, err + } + transformed["caOptions"] = caOptions + + keyUsage, err := expandPrivatecaCertificateConfigX509ConfigKeyUsage(original["key_usage"], d, config) + if err != nil { + return nil, err + } + transformed["keyUsage"] = keyUsage + + policyIds, err := expandPrivatecaCertificateConfigX509ConfigPolicyIds(original["policy_ids"], d, config) + if err != nil { + return nil, err + } + transformed["policyIds"] = policyIds + + aiaOcspServers, err := expandPrivatecaCertificateConfigX509ConfigAiaOcspServers(original["aia_ocsp_servers"], d, config) + if err != nil { + return nil, err + } + transformed["aiaOcspServers"] = aiaOcspServers + + addExts, err := expandPrivatecaCertificateConfigX509ConfigAdditionalExtensions(original["additional_extensions"], d, config) + if err != nil { + return nil, err + } + transformed["additionalExtensions"] = addExts + + nameConstraints, err := expandPrivatecaCertificateConfigX509ConfigNameConstraints(original["name_constraints"], d, config) + if err != nil { + return nil, err + } + transformed["nameConstraints"] = nameConstraints + return transformed, nil +} diff --git a/mmv1/templates/terraform/custom_flatten/privateca_certificate_template_509_config.go.tmpl b/mmv1/templates/terraform/custom_flatten/privateca_certificate_template_509_config.go.tmpl new file mode 100644 index 000000000000..97703b6c7264 --- /dev/null +++ b/mmv1/templates/terraform/custom_flatten/privateca_certificate_template_509_config.go.tmpl @@ -0,0 +1,20 @@ +{{/* See mmv1/third_party/terraform/utils/privateca_utils.go for the sub-expanders and explanation */}} +func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + v = make(map[string]interface{}) + } + original := v.(map[string]interface{}) + transformed := make(map[string]interface{}) + transformed["additional_extensions"] = + flattenPrivatecaCertificateConfigX509ConfigAdditionalExtensions(original["additionalExtensions"], d, config) + transformed["policy_ids"] = + flattenPrivatecaCertificateConfigX509ConfigPolicyIds(original["policyIds"], d, config) + transformed["aia_ocsp_servers"] = flattenPrivatecaCertificateConfigX509ConfigAiaOcspServers(original["aiaOcspServers"], d, config) + transformed["ca_options"] = + flattenPrivatecaCertificateTemplateConfigX509ConfigCaOptions(original["caOptions"], d, config) + transformed["key_usage"] = + flattenPrivatecaCertificateConfigX509ConfigKeyUsage(original["keyUsage"], d, config) + transformed["name_constraints"] = + flattenPrivatecaCertificateConfigX509ConfigNameConstraints(original["nameConstraints"], d, config) + return []interface{}{transformed} +} diff --git a/mmv1/templates/terraform/examples/privateca_template_zero_max_issuer_path_length_null_ca.tf.tmpl b/mmv1/templates/terraform/examples/privateca_template_zero_max_issuer_path_length_null_ca.tf.tmpl new file mode 100644 index 000000000000..09bd036806b5 --- /dev/null +++ b/mmv1/templates/terraform/examples/privateca_template_zero_max_issuer_path_length_null_ca.tf.tmpl @@ -0,0 +1,85 @@ +resource "google_privateca_certificate_template" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "name"}}" + location = "us-central1" + description = "A sample certificate template" + + identity_constraints { + allow_subject_alt_names_passthrough = true + allow_subject_passthrough = true + + cel_expression { + description = "Always true" + expression = "true" + location = "any.file.anywhere" + title = "Sample expression" + } + } + + maximum_lifetime = "86400s" + + passthrough_extensions { + additional_extensions { + object_id_path = [1, 6] + } + known_extensions = ["EXTENDED_KEY_USAGE"] + } + + predefined_values { + additional_extensions { + object_id { + object_id_path = [1, 6] + } + value = "c3RyaW5nCg==" + critical = true + } + aia_ocsp_servers = ["string"] + ca_options { + is_ca = false + null_ca = true + zero_max_issuer_path_length = true + max_issuer_path_length = 0 + } + key_usage { + base_key_usage { + cert_sign = false + content_commitment = true + crl_sign = false + data_encipherment = true + decipher_only = true + digital_signature = true + encipher_only = true + key_agreement = true + key_encipherment = true + } + extended_key_usage { + client_auth = true + code_signing = true + email_protection = true + ocsp_signing = true + server_auth = true + time_stamping = true + } + unknown_extended_key_usages { + object_id_path = [1, 6] + } + } + policy_ids { + object_id_path = [1, 6] + } + name_constraints { + critical = true + permitted_dns_names = ["*.example1.com", "*.example2.com"] + excluded_dns_names = ["*.deny.example1.com", "*.deny.example2.com"] + permitted_ip_ranges = ["10.0.0.0/8", "11.0.0.0/8"] + excluded_ip_ranges = ["10.1.1.0/24", "11.1.1.0/24"] + permitted_email_addresses = [".example1.com", ".example2.com"] + excluded_email_addresses = [".deny.example1.com", ".deny.example2.com"] + permitted_uris = [".example1.com", ".example2.com"] + excluded_uris = [".deny.example1.com", ".deny.example2.com"] + } + } + + labels = { + label-one = "value-one" + } +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/privateca/privateca_utils.go b/mmv1/third_party/terraform/services/privateca/privateca_utils.go index 51211be49454..10d85abdfa43 100644 --- a/mmv1/third_party/terraform/services/privateca/privateca_utils.go +++ b/mmv1/third_party/terraform/services/privateca/privateca_utils.go @@ -8,8 +8,12 @@ import ( transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) -// This file contains shared flatteners between PrivateCA Certificate, CaPool and CertificateAuthority. -// These resources share the x509Config (Certificate, CertificateAuthorty)/baselineValues (CaPool) object. +// This file contains shared flatteners between PrivateCA Certificate, CaPool, CertificateTemplate and +// CertificateAuthority. These resources share the x509Config (Certificate, CertificateAuthority)/ +// baselineValues (CaPool) object. CertificateTemplate contains the predefinedValues object, which is slightly +// different from the other two, and so requires its own functions to process. These functions are also contained +// in this file. +// // The API does not return this object if it only contains booleans with the default (false) value. This // causes problems if a user specifies only default values, as Terraform detects that the object has been // deleted on the API-side. This flattener creates default objects for sub-objects that match this pattern @@ -64,6 +68,50 @@ func expandPrivatecaCertificateConfigX509ConfigCaOptions(v interface{}, d tpgres return transformed, nil } +func expandPrivatecaCertificateTemplateConfigX509ConfigCaOptions(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + // Similar to expandPrivatecaCertificateConfigX509ConfigCaOptions, but only for use in + // Certificate Templates, which use a null_ca field instead of the non_ca field. + // Fields null_ca, zero_max_issuer_path_length are used to distinguish between + // unset booleans and booleans set with a default value. + // Unset is_ca or unset max_issuer_path_length either allow any values for these fields when + // used in an issuance policy, or allow the API to use default values when used in a + // certificate config. A default value of is_ca=false means that issued certificates cannot + // be CA certificates. A default value of max_issuer_path_length=0 means that the CA cannot + // issue CA certificates. + if v == nil { + return nil, nil + } + l := v.([]interface{}) + if len(l) == 0 || l[0] == nil { + return nil, nil + } + raw := l[0] + original := raw.(map[string]interface{}) + + nullCa := original["null_ca"].(bool) + isCa := original["is_ca"].(bool) + + zeroPathLength := original["zero_max_issuer_path_length"].(bool) + maxIssuerPathLength := original["max_issuer_path_length"].(int) + + transformed := make(map[string]interface{}) + + if nullCa && isCa { + return nil, fmt.Errorf("null_ca, is_ca can not be set to true at the same time.") + } + if zeroPathLength && maxIssuerPathLength > 0 { + return nil, fmt.Errorf("zero_max_issuer_path_length can not be set to true while max_issuer_path_length being set to a positive integer.") + } + + if !nullCa { + transformed["isCa"] = original["is_ca"] + } + if maxIssuerPathLength > 0 || zeroPathLength { + transformed["maxIssuerPathLength"] = original["max_issuer_path_length"] + } + return transformed, nil +} + func expandPrivatecaCertificateConfigX509ConfigKeyUsage(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { if v == nil { return v, nil @@ -361,6 +409,33 @@ func flattenPrivatecaCertificateConfigX509ConfigCaOptions(v interface{}, d *sche return []interface{}{transformed} } + +func flattenPrivatecaCertificateTemplateConfigX509ConfigCaOptions(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + // Special case here as the CaPool API returns an empty object rather than nil unlike the Certificate + // and CertificateAuthority APIs. + if v == nil || len(v.(map[string]interface{})) == 0 { + v = make(map[string]interface{}) + } + original := v.(map[string]interface{}) + transformed := make(map[string]interface{}) + + val, exists := original["isCa"] + transformed["is_ca"] = + flattenPrivatecaCertificateConfigX509ConfigCaOptionsIsCa(val, d, config) + if !exists { + transformed["null_ca"] = true + } + + val, exists = original["maxIssuerPathLength"] + transformed["max_issuer_path_length"] = + flattenPrivatecaCertificateConfigX509ConfigCaOptionsMaxIssuerPathLength(val, d, config) + if exists && int(val.(float64)) == 0 { + transformed["zero_max_issuer_path_length"] = true + } + + return []interface{}{transformed} +} + func flattenPrivatecaCertificateConfigX509ConfigCaOptionsIsCa(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { return v } diff --git a/mmv1/third_party/terraform/services/privateca/resource_privateca_certificate_template_test.go b/mmv1/third_party/terraform/services/privateca/resource_privateca_certificate_template_test.go index 35dccbfb4d1c..ff7e486ee91c 100644 --- a/mmv1/third_party/terraform/services/privateca/resource_privateca_certificate_template_test.go +++ b/mmv1/third_party/terraform/services/privateca/resource_privateca_certificate_template_test.go @@ -81,6 +81,60 @@ func TestAccPrivatecaCertificateTemplate_BasicCertificateTemplateLongForm(t *tes }) } +func TestAccPrivatecaCertificateTemplate_updateCaOption(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project_name": envvar.GetTestProjectFromEnv(), + "region": envvar.GetTestRegionFromEnv(), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckPrivatecaCertificateTemplateDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccPrivatecaCertificateTemplate_CertificateTemplateCaOptionIsCaIsTrueAndMaxPathIsPositive(context), + }, + { + ResourceName: "google_privateca_certificate_template.primary", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"predefined_values.0.key_usage.0.extended_key_usage", "labels", "terraform_labels", "project", "location", "name"}, + }, + { + Config: testAccPrivatecaCertificateTemplate_CertificateTemplateCaOptionIsCaIsFalse(context), + }, + { + ResourceName: "google_privateca_certificate_template.primary", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"predefined_values.0.key_usage.0.extended_key_usage", "labels", "terraform_labels", "project", "location", "name"}, + }, + { + Config: testAccPrivatecaCertificateTemplate_CertificateTemplateCaOptionIsCaIsNull(context), + }, + { + ResourceName: "google_privateca_certificate_template.primary", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"predefined_values.0.key_usage.0.extended_key_usage", "labels", "terraform_labels", "project", "location", "name"}, + }, + { + Config: testAccPrivatecaCertificateTemplate_CertificateTemplateCaOptionMaxIssuerPathLenghIsZero(context), + }, + { + ResourceName: "google_privateca_certificate_template.primary", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"predefined_values.0.key_usage.0.extended_key_usage", "labels", "terraform_labels", "project", "location", "name"}, + }, + }, + }) +} + func testAccPrivatecaCertificateTemplate_BasicCertificateTemplate(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_privateca_certificate_template" "primary" { @@ -430,3 +484,290 @@ resource "google_privateca_certificate_template" "primary" { `, context) } + +func testAccPrivatecaCertificateTemplate_CertificateTemplateCaOptionIsCaIsTrueAndMaxPathIsPositive(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_privateca_certificate_template" "primary" { + location = "%{region}" + name = "tf-test-template%{random_suffix}" + maximum_lifetime = "86400s" + description = "A sample certificate template" + identity_constraints { + allow_subject_alt_names_passthrough = true + allow_subject_passthrough = true + cel_expression { + description = "Always true" + expression = "true" + location = "any.file.anywhere" + title = "Sample expression" + } + } + passthrough_extensions { + additional_extensions { + object_id_path = [1, 6] + } + known_extensions = ["EXTENDED_KEY_USAGE"] + } + predefined_values { + additional_extensions { + object_id { + object_id_path = [1, 6] + } + value = "c3RyaW5nCg==" + critical = true + } + aia_ocsp_servers = ["string"] + ca_options { + is_ca = true + max_issuer_path_length = 6 + } + key_usage { + base_key_usage { + cert_sign = false + content_commitment = true + crl_sign = false + data_encipherment = true + decipher_only = true + digital_signature = true + encipher_only = true + key_agreement = true + key_encipherment = true + } + extended_key_usage { + client_auth = true + code_signing = true + email_protection = true + ocsp_signing = true + server_auth = true + time_stamping = true + } + unknown_extended_key_usages { + object_id_path = [1, 6] + } + } + policy_ids { + object_id_path = [1, 6] + } + } + project = "%{project_name}" + labels = { + label-two = "value-two" + } +} +`, context) +} + +func testAccPrivatecaCertificateTemplate_CertificateTemplateCaOptionIsCaIsFalse(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_privateca_certificate_template" "primary" { + location = "%{region}" + name = "tf-test-template%{random_suffix}" + maximum_lifetime = "86400s" + description = "An updated sample certificate template" + identity_constraints { + allow_subject_alt_names_passthrough = true + allow_subject_passthrough = true + cel_expression { + description = "Always true" + expression = "true" + location = "any.file.anywhere" + title = "Sample expression" + } + } + passthrough_extensions { + additional_extensions { + object_id_path = [1, 6] + } + known_extensions = ["EXTENDED_KEY_USAGE"] + } + predefined_values { + additional_extensions { + object_id { + object_id_path = [1, 6] + } + value = "c3RyaW5nCg==" + critical = true + } + aia_ocsp_servers = ["string"] + ca_options { + is_ca = false + } + key_usage { + base_key_usage { + cert_sign = false + content_commitment = true + crl_sign = false + data_encipherment = true + decipher_only = true + digital_signature = true + encipher_only = true + key_agreement = true + key_encipherment = true + } + extended_key_usage { + client_auth = true + code_signing = true + email_protection = true + ocsp_signing = true + server_auth = true + time_stamping = true + } + unknown_extended_key_usages { + object_id_path = [1, 6] + } + } + policy_ids { + object_id_path = [1, 6] + } + } + project = "%{project_name}" + labels = { + label-two = "value-two" + } +} +`, context) +} + +func testAccPrivatecaCertificateTemplate_CertificateTemplateCaOptionIsCaIsNull(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_privateca_certificate_template" "primary" { + location = "%{region}" + name = "tf-test-template%{random_suffix}" + maximum_lifetime = "86400s" + description = "An updated sample certificate template" + identity_constraints { + allow_subject_alt_names_passthrough = true + allow_subject_passthrough = true + cel_expression { + description = "Always true" + expression = "true" + location = "any.file.anywhere" + title = "Sample expression" + } + } + passthrough_extensions { + additional_extensions { + object_id_path = [1, 6] + } + known_extensions = ["EXTENDED_KEY_USAGE"] + } + predefined_values { + additional_extensions { + object_id { + object_id_path = [1, 6] + } + value = "c3RyaW5nCg==" + critical = true + } + aia_ocsp_servers = ["string"] + ca_options { + null_ca = true + is_ca = false + } + key_usage { + base_key_usage { + cert_sign = false + content_commitment = true + crl_sign = false + data_encipherment = true + decipher_only = true + digital_signature = true + encipher_only = true + key_agreement = true + key_encipherment = true + } + extended_key_usage { + client_auth = true + code_signing = true + email_protection = true + ocsp_signing = true + server_auth = true + time_stamping = true + } + unknown_extended_key_usages { + object_id_path = [1, 6] + } + } + policy_ids { + object_id_path = [1, 6] + } + } + project = "%{project_name}" + labels = { + label-two = "value-two" + } +} +`, context) +} + +func testAccPrivatecaCertificateTemplate_CertificateTemplateCaOptionMaxIssuerPathLenghIsZero(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_privateca_certificate_template" "primary" { + location = "%{region}" + name = "tf-test-template%{random_suffix}" + maximum_lifetime = "86400s" + description = "Another updated sample certificate template" + identity_constraints { + allow_subject_alt_names_passthrough = true + allow_subject_passthrough = true + cel_expression { + description = "Always true" + expression = "true" + location = "any.file.anywhere" + title = "Sample expression" + } + } + passthrough_extensions { + additional_extensions { + object_id_path = [1, 6] + } + known_extensions = ["EXTENDED_KEY_USAGE"] + } + predefined_values { + additional_extensions { + object_id { + object_id_path = [1, 6] + } + value = "c3RyaW5nCg==" + critical = true + } + aia_ocsp_servers = ["string"] + ca_options { + zero_max_issuer_path_length = true + max_issuer_path_length = 0 + } + key_usage { + base_key_usage { + cert_sign = false + content_commitment = true + crl_sign = false + data_encipherment = true + decipher_only = true + digital_signature = true + encipher_only = true + key_agreement = true + key_encipherment = true + } + extended_key_usage { + client_auth = true + code_signing = true + email_protection = true + ocsp_signing = true + server_auth = true + time_stamping = true + } + unknown_extended_key_usages { + object_id_path = [1, 6] + } + } + policy_ids { + object_id_path = [1, 6] + } + } + project = "%{project_name}" + labels = { + label-two = "value-two" + } +} +`, context) +} From 6a2d75e05916fe899e5fc9a33a056af836ce0507 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Fri, 23 May 2025 09:09:58 -0700 Subject: [PATCH 239/884] link from add-document to resource and field config docs (#14068) --- docs/content/document/add-documentation.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/content/document/add-documentation.md b/docs/content/document/add-documentation.md index f959ad587512..02462c3d14d0 100644 --- a/docs/content/document/add-documentation.md +++ b/docs/content/document/add-documentation.md @@ -7,7 +7,7 @@ aliases: # Add documentation -Documentation is autogenerated based on the resource and field configurations. This page describes how to add documentation to resources and fields. +Documentation is autogenerated based on the [resource]({{< ref "/develop/add-resource" >}}) and [field({{< ref "/develop/add-fields" >}}] configurations. This page describes how to add documentation to resources and fields. For more information about types of resources and the generation process overall, see [How Magic Modules works]({{< ref "/" >}}). From 9d911e6eea023a15277321de272fdff1083d73cb Mon Sep 17 00:00:00 2001 From: ML Date: Fri, 23 May 2025 20:36:02 +0200 Subject: [PATCH 240/884] Add a new resource google_dataplex_entry (#13924) Co-authored-by: Marek Lipert --- mmv1/products/dataplex/Entry.yaml | 276 ++++++ .../constants/dataplex_entry.go.tmpl | 193 ++++ .../dataplex_entry_aspects.go.tmpl | 44 + .../custom_import/dataplex_entry.go.tmpl | 20 + .../terraform/decoders/dataplex_entry.go.tmpl | 41 + .../terraform/encoders/dataplex_entry.go.tmpl | 17 + .../examples/dataplex_entry_basic.tf.tmpl | 20 + .../examples/dataplex_entry_full.tf.tmpl | 133 +++ .../terraform/pre_read/dataplex_entry.go.tmpl | 4 + .../pre_update/dataplex_entry.go.tmpl | 39 + .../resource_dataplex_entry_meta.yaml | 36 + .../dataplex/resource_dataplex_entry_test.go | 838 ++++++++++++++++++ 12 files changed, 1661 insertions(+) create mode 100644 mmv1/products/dataplex/Entry.yaml create mode 100644 mmv1/templates/terraform/constants/dataplex_entry.go.tmpl create mode 100644 mmv1/templates/terraform/custom_flatten/dataplex_entry_aspects.go.tmpl create mode 100644 mmv1/templates/terraform/custom_import/dataplex_entry.go.tmpl create mode 100644 mmv1/templates/terraform/decoders/dataplex_entry.go.tmpl create mode 100644 mmv1/templates/terraform/encoders/dataplex_entry.go.tmpl create mode 100644 mmv1/templates/terraform/examples/dataplex_entry_basic.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/dataplex_entry_full.tf.tmpl create mode 100644 mmv1/templates/terraform/pre_read/dataplex_entry.go.tmpl create mode 100644 mmv1/templates/terraform/pre_update/dataplex_entry.go.tmpl create mode 100644 mmv1/third_party/terraform/services/dataplex/resource_dataplex_entry_meta.yaml create mode 100644 mmv1/third_party/terraform/services/dataplex/resource_dataplex_entry_test.go diff --git a/mmv1/products/dataplex/Entry.yaml b/mmv1/products/dataplex/Entry.yaml new file mode 100644 index 000000000000..f7684c993b7a --- /dev/null +++ b/mmv1/products/dataplex/Entry.yaml @@ -0,0 +1,276 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'Entry' +description: | + An entry represents a data asset for which you capture metadata, such as a BigQuery table. + The primary constituents of an entry are aspects, which provide thematically coherent information. + Examples include a table's schema, sensitive data protection profile, data quality information, or a simple tag. + + **Important Considerations:** + + * There is a limit of 99 aspects per entry. + * The entry resource has to use project numbers and not project IDs. Therefore, if + a dependency was already provisioned using project ID, it needs to be referenced explicitly as a resource name + containing the project number. + +references: + guides: + 'Manage entries and ingest custom sources': 'https://cloud.google.com/dataplex/docs/ingest-custom-sources' + api: 'https://cloud.google.com/dataplex/docs/reference/rest/v1/projects.locations.entryGroups.entries' + +base_url: 'projects/{{project}}/locations/{{location}}/entryGroups/{{entry_group_id}}/entries/{{entry_id}}' +self_link: 'projects/{{project}}/locations/{{location}}/entryGroups/{{entry_group_id}}/entries/{{entry_id}}' +create_url: 'projects/{{project}}/locations/{{location}}/entryGroups/{{entry_group_id}}/entries?entryId={{entry_id}}' +update_verb: 'PATCH' +update_mask: true +import_format: + - 'projects/{{project}}/locations/{{location}}/entryGroups/{{entry_group_id}}/entries/{{entry_id}}' + +custom_code: + constants: templates/terraform/constants/dataplex_entry.go.tmpl + decoder: templates/terraform/decoders/dataplex_entry.go.tmpl + encoder: templates/terraform/encoders/dataplex_entry.go.tmpl + custom_import: templates/terraform/custom_import/dataplex_entry.go.tmpl + pre_read: templates/terraform/pre_read/dataplex_entry.go.tmpl + pre_update: templates/terraform/pre_update/dataplex_entry.go.tmpl + +timeouts: + insert_minutes: 5 + update_minutes: 5 + delete_minutes: 5 + +examples: + - name: 'dataplex_entry_basic' + primary_resource_id: 'test_basic' + primary_resource_name: 'fmt.Sprintf("tf-test-entry%s", context["random_suffix"])' + vars: + entry_id: 'entry-basic' + entry_group_name: 'entry-group-basic' + aspect_type_name: "aspect-type-basic" + entry_type_name: "entry-type-basic" + test_env_vars: + project_number: 'PROJECT_NUMBER' + - name: 'dataplex_entry_full' + primary_resource_id: 'test_entry_full' + primary_resource_name: 'fmt.Sprintf("tf-test-entry%s", context["random_suffix"])' + ignore_read_extra: + - 'aspects' + vars: + entry_id: 'entry-full/has/slashes' + entry_group_name: 'entry-group-full' + aspect_type_name: "aspect-type-full" + entry_type_name: "entry-type-full" + test_env_vars: + project_number: 'PROJECT_NUMBER' + +parameters: + - name: 'location' + type: String + url_param_only: true + immutable: true + description: | + The location where entry will be created. + + - name: 'entryGroupId' + type: String + url_param_only: true + immutable: true + description: | + The entry group id of the entry group the entry will be created in. + + - name: 'entryId' + type: String + url_param_only: true + immutable: true + description: | + The entry id of the entry. + +properties: + - name: 'name' + type: String + output: true + immutable: true + description: | + The relative resource name of the entry, in the format projects/{project_number}/locations/{locationId}/entryGroups/{entryGroupId}/entries/{entryId}. + + - name: 'entryType' + type: String + required: true + immutable: true + validation: + function: ProjectNumberValidation + description: | + The relative resource name of the entry type that was used to create this entry, in the format projects/{project_number}/locations/{locationId}/entryTypes/{entryTypeId}. + + - name: 'createTime' + type: Time + output: true + description: | + The time when the Entry was created in Dataplex. + + - name: 'updateTime' + type: Time + output: true + description: | + The time when the entry was last updated in Dataplex. + + - name: 'aspects' + type: Array + custom_flatten: 'templates/terraform/custom_flatten/dataplex_entry_aspects.go.tmpl' + description: | + The aspects that are attached to the entry. + + item_type: + type: NestedObject + properties: + - name: 'aspectKey' + type: String + required: true + validation: + function: AspectProjectNumberValidation + description: | + Depending on how the aspect is attached to the entry, the format of the aspect key can be one of the following: + + If the aspect is attached directly to the entry: {project_number}.{locationId}.{aspectTypeId} + If the aspect is attached to an entry's path: {project_number}.{locationId}.{aspectTypeId}@{path} + + - name: 'aspect' + type: NestedObject + required: true + properties: + - name: 'aspectType' + type: String + output: true + description: | + The resource name of the type used to create this Aspect. + + - name: 'path' + type: String + output: true + description: | + The path in the entry under which the aspect is attached. + + - name: 'createTime' + type: Time + output: true + description: | + The time when the Aspect was created. + + - name: 'updateTime' + type: Time + output: true + description: | + The time when the Aspect was last modified. + + - name: 'data' + type: String + required: true + state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' + custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' + custom_expand: 'templates/terraform/custom_expand/json_schema.tmpl' + validation: + function: 'validation.StringIsJSON' + description: | + The content of the aspect in JSON form, according to its aspect type schema. The maximum size of the field is 120KB (encoded as UTF-8). + + - name: 'parentEntry' + type: String + immutable: true + description: | + The resource name of the parent entry, in the format projects/{project_number}/locations/{locationId}/entryGroups/{entryGroupId}/entries/{entryId}. + + - name: 'fullyQualifiedName' + type: String + description: | + A name for the entry that can be referenced by an external system. For more information, see https://cloud.google.com/dataplex/docs/fully-qualified-names. + The maximum size of the field is 4000 characters. + + - name: 'entrySource' + type: NestedObject + default_from_api: true + properties: + - name: 'resource' + type: String + description: | + The name of the resource in the source system. Maximum length is 4,000 characters. + + - name: 'system' + type: String + description: | + The name of the source system. Maximum length is 64 characters. + + - name: 'platform' + type: String + description: | + The platform containing the source system. Maximum length is 64 characters. + + - name: 'displayName' + type: String + description: | + A user-friendly display name. Maximum length is 500 characters. + + - name: 'description' + type: String + description: | + A description of the data resource. Maximum length is 2,000 characters. + + - name: 'labels' + type: KeyValuePairs + description: | + User-defined labels. The maximum size of keys and values is 128 characters each. + An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }. + + - name: 'ancestors' + type: Array + immutable: true + item_type: + type: NestedObject + properties: + - name: 'name' + type: String + description: | + The name of the ancestor resource. + + - name: 'type' + type: String + description: | + The type of the ancestor resource. + + description: | + The entries representing the ancestors of the data resource in the source system. + + - name: 'createTime' + type: Time + validation: + function: 'validation.IsRFC3339Time' + description: | + The time when the resource was created in the source system. + + - name: 'updateTime' + type: Time + validation: + function: 'validation.IsRFC3339Time' + description: | + The time when the resource was last updated in the source system. + If the entry exists in the system and its EntrySource has updateTime populated, + further updates to the EntrySource of the entry must provide incremental updates to its updateTime. + + - name: 'location' + type: String + output: true + description: |- + Location of the resource in the source system. You can search the entry by this location. + By default, this should match the location of the entry group containing this entry. + A different value allows capturing the source location for data external to Google Cloud. diff --git a/mmv1/templates/terraform/constants/dataplex_entry.go.tmpl b/mmv1/templates/terraform/constants/dataplex_entry.go.tmpl new file mode 100644 index 000000000000..e09c9265a78d --- /dev/null +++ b/mmv1/templates/terraform/constants/dataplex_entry.go.tmpl @@ -0,0 +1,193 @@ +// GetEntry supports up to 100 aspects. Therefore we set a threshold at 99. +const maxAspectNumber = 99 + +// NumberOfAspectsValidation checks if the number of aspects on an entry exceeds certain threshold. +func NumberOfAspectsValidation(i interface{}, k string) (warnings []string, errors []error) { + s, isSlice := i.([]interface{}) + m, isMap := i.(map[string]interface{}) + + if !isSlice && !isMap { + errors = append(errors, fmt.Errorf("expected type of field %q to be array, but got %T", k, i)) + return warnings, errors + } + + if len(s)+len(m) > maxAspectNumber { + errors = append(errors, fmt.Errorf( + "field %q has an invalid content: %q. The maximal number of aspects is 99.", + k, i, + )) + } + + return warnings, errors +} + +// ProjectNumberValidation checks if the input string conforms to the pattern: +// "projects//" +func ProjectNumberValidation(i interface{}, k string) (warnings []string, errors []error) { + v, ok := i.(string) + + if !ok { + errors = append(errors, fmt.Errorf("expected type of field %q to be string, but got %T", k, i)) + return warnings, errors + } + + var projectNumberRegex = regexp.MustCompile(`^projects\/[1-9]\d*\/.+$`) + if !projectNumberRegex.MatchString(v) { + errors = append(errors, fmt.Errorf( + "field %q has an invalid format: %q. Expected format: 'projects//'. Please note that project IDs are not supported.", + k, v, + )) + } + + return warnings, errors +} + +// ProjectNumberValidation checks if the input string conforms to the pattern: +// "projects//" +func AspectProjectNumberValidation(i interface{}, k string) (warnings []string, errors []error) { + v, ok := i.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected type of field %q to be string, but got %T", k, i)) + return warnings, errors + } + + var numberDotAnythingRegex = regexp.MustCompile(`^[1-9]\d*\..+$`) + + if !numberDotAnythingRegex.MatchString(v) { + errors = append(errors, fmt.Errorf( + "field %q has an invalid format: %q. Expected format: '.anything'. Please note that project IDs are not supported.", + k, v, + )) + } + + return warnings, errors +} + +// FilterAspects filters the aspects in res based on aspectKeySet. +// It returns an error if type assertions fail. +func FilterAspects(aspectKeySet map[string]struct{}, res map[string]interface{}) error { + aspectsRaw, ok := res["aspects"] + if !ok || aspectsRaw == nil { + return nil + } + + aspectsMap, ok := aspectsRaw.(map[string]interface{}) + if !ok { + return fmt.Errorf("FilterAspects: 'aspects' field is not a map[string]interface{}, got %T", aspectsRaw) + } + + for key := range aspectsMap { + if _, keep := aspectKeySet[key]; !keep { + delete(aspectsMap, key) + } + } + return nil +} + +// AddAspectsToSet adds aspect keys from the aspects interface to the aspectKeySet. +// It returns an error if type assertions fail or expected keys are missing. +func AddAspectsToSet(aspectKeySet map[string]struct{}, aspects interface{}) error { + if aspects == nil { + return nil + } + aspectsSlice, ok := aspects.([]interface{}) + if !ok { + return fmt.Errorf("AddAspectsToSet: input 'aspects' is not a []interface{}, got %T", aspects) + } + + for i, aspectItemRaw := range aspectsSlice { + aspectMap, ok := aspectItemRaw.(map[string]interface{}) + if !ok { + return fmt.Errorf("AddAspectsToSet: item at index %d is not a map[string]interface{}, got %T", i, aspectItemRaw) + } + + keyRaw, keyExists := aspectMap["aspect_key"] + if !keyExists { + return fmt.Errorf("AddAspectsToSet: 'aspect_key' not found in aspect item at index %d", i) + } + + keyString, ok := keyRaw.(string) + if !ok { + return fmt.Errorf("AddAspectsToSet: 'aspect_key' in item at index %d is not a string, got %T", i, keyRaw) + } + aspectKeySet[keyString] = struct{}{} + } + return nil +} + +// InverseTransformAspects converts the "aspects" map back to a slice of maps, +// re-inserting the "aspectKey". Modifies obj in-place. +// It returns an error if type assertions fail. +func InverseTransformAspects(res map[string]interface{}) error { + aspectsRaw, ok := res["aspects"] + if !ok || aspectsRaw == nil { + return nil + } + + originalMap, ok := aspectsRaw.(map[string]interface{}) + if !ok { + return fmt.Errorf("InverseTransformAspects: 'aspects' field is not a map[string]interface{}, got %T", aspectsRaw) + } + + newSlice := make([]interface{}, 0, len(originalMap)) + + for key, value := range originalMap { + innerMap, ok := value.(map[string]interface{}) + if !ok { + return fmt.Errorf("InverseTransformAspects: value for key '%s' is not a map[string]interface{}, got %T", key, value) + } + box := make(map[string]interface{}, 2) + box["aspectKey"] = key + box["aspect"] = innerMap + newSlice = append(newSlice, box) + } + res["aspects"] = newSlice + return nil +} + +// TransformAspects concisely transforms the "aspects" slice within obj into a map. +// Modifies obj in-place. +// It returns an error if type assertions fail or expected keys are missing. +func TransformAspects(obj map[string]interface{}) error { + aspectsRaw, ok := obj["aspects"] + if !ok || aspectsRaw == nil { + return nil + } + + originalSlice, ok := aspectsRaw.([]interface{}) + if !ok { + return fmt.Errorf("TransformAspects: 'aspects' field is not a []interface{}, got %T", aspectsRaw) + } + + newMap := make(map[string]interface{}, len(originalSlice)) + for i, item := range originalSlice { + aspectMap, ok := item.(map[string]interface{}) + if !ok { + return fmt.Errorf("TransformAspects: item in 'aspects' slice at index %d is not a map[string]interface{}, got %T", i, item) + } + + keyRaw, keyExists := aspectMap["aspectKey"] + if !keyExists { + return fmt.Errorf("TransformAspects: 'aspectKey' not found in aspect item at index %d", i) + } + key, ok := keyRaw.(string) + if !ok { + return fmt.Errorf("TransformAspects: 'aspectKey' in item at index %d is not a string, got %T", i, keyRaw) + } + + valueRaw, valueExists := aspectMap["aspect"] + if !valueExists { + newMap[key] = map[string]interface{}{"data": map[string]interface{}{}} + continue + } + + value, ok := valueRaw.(map[string]interface{}) + if ok { + newMap[key] = value + } else { + newMap[key] = map[string]interface{}{"data": map[string]interface{}{}} + } + } + obj["aspects"] = newMap + return nil +} diff --git a/mmv1/templates/terraform/custom_flatten/dataplex_entry_aspects.go.tmpl b/mmv1/templates/terraform/custom_flatten/dataplex_entry_aspects.go.tmpl new file mode 100644 index 000000000000..de8dd922f1f9 --- /dev/null +++ b/mmv1/templates/terraform/custom_flatten/dataplex_entry_aspects.go.tmpl @@ -0,0 +1,44 @@ +// This file is a transposition of mmv1/templates/terraform/flatten_property_method.go.tmpl +// Most of the code is copied from there, with the exception of sorting logic. +func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]map[string]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + + {{- range $prop := $.ItemType.UserProperties }} + {{- if not (or $prop.IgnoreRead $prop.WriteOnly) }} + "{{ underscore $prop.Name }}": flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}{{$prop.TitlelizeProperty}}(original["{{ $prop.ApiName }}"], d, config), + {{- end }} + {{- end }} + }) + } + + configData := []map[string]interface{}{} + + for _, item := range d.Get("aspects").([]interface{}) { + configData = append(configData, item.(map[string]interface{})) + } + + sorted, err := tpgresource.SortMapsByConfigOrder(configData, transformed, "aspect_key") + if err != nil { + log.Printf("[ERROR] Could not sort API response value: %s", err) + return v + } + + return sorted +} + +{{- if $.NestedProperties }} + {{- range $prop := $.NestedProperties }} + {{ template "flattenPropertyMethod" $prop -}} + {{- end }} +{{- end }} diff --git a/mmv1/templates/terraform/custom_import/dataplex_entry.go.tmpl b/mmv1/templates/terraform/custom_import/dataplex_entry.go.tmpl new file mode 100644 index 000000000000..0b54654ae66a --- /dev/null +++ b/mmv1/templates/terraform/custom_import/dataplex_entry.go.tmpl @@ -0,0 +1,20 @@ + config := meta.(*transport_tpg.Config) + if err := tpgresource.ParseImportId([]string{ + "^projects/(?P[^/]+)/locations/(?P[^/]+)/entryGroups/(?P[^/]+)/entries/(?P.+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P.+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P.+)$", + }, d, config); err != nil { + return nil, err + } + + // Double curly braces are the templating language's special marker. + // We need them literally here, so apply a simple trick to force it. + template := "projects/{{"{{"}}project{{"}}"}}/locations/{{"{{"}}location{{"}}"}}/entryGroups/{{"{{"}}entry_group_id{{"}}"}}/entries/{{"{{"}}entry_id{{"}}"}}" + + // Replace import id for the resource id + id, err := tpgresource.ReplaceVars(d, config, template) + if err != nil { + return nil, fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + return []*schema.ResourceData{d}, nil diff --git a/mmv1/templates/terraform/decoders/dataplex_entry.go.tmpl b/mmv1/templates/terraform/decoders/dataplex_entry.go.tmpl new file mode 100644 index 000000000000..ecaec6094965 --- /dev/null +++ b/mmv1/templates/terraform/decoders/dataplex_entry.go.tmpl @@ -0,0 +1,41 @@ +aspects := res["aspects"] +if aspects != nil { + _, errors := NumberOfAspectsValidation(aspects, "aspects") + if len(errors) > 0 { + return nil, errors[0] + } +} + +aspectKeysOfInterest := make(map[string]struct{}) +var err error + +if d.HasChange("aspects") { + currentAspects, futureAspects := d.GetChange("aspects") + err = AddAspectsToSet(aspectKeysOfInterest, currentAspects) + if err != nil { + return nil, err + } + err = AddAspectsToSet(aspectKeysOfInterest, futureAspects) + if err != nil { + return nil, err + } +} else { + err = AddAspectsToSet(aspectKeysOfInterest, d.Get("aspects")) + if err != nil { + return nil, err + } +} + +err = FilterAspects(aspectKeysOfInterest, res) + +if err != nil { + return nil, err +} + +err = InverseTransformAspects(res) + +if err != nil { + return nil, err +} + +return res, nil diff --git a/mmv1/templates/terraform/encoders/dataplex_entry.go.tmpl b/mmv1/templates/terraform/encoders/dataplex_entry.go.tmpl new file mode 100644 index 000000000000..61def5ca7e99 --- /dev/null +++ b/mmv1/templates/terraform/encoders/dataplex_entry.go.tmpl @@ -0,0 +1,17 @@ +// The yaml file does not allow validation for Array fields. +// Therefore we add validation as a part of the encoding proecess. +aspects := obj["aspects"] +if aspects != nil { + _, errors := NumberOfAspectsValidation(aspects, "aspects") + if len(errors) > 0 { + return nil, errors[0] + } +} + +err := TransformAspects(obj) + +if err != nil { + return nil, err +} + +return obj, nil diff --git a/mmv1/templates/terraform/examples/dataplex_entry_basic.tf.tmpl b/mmv1/templates/terraform/examples/dataplex_entry_basic.tf.tmpl new file mode 100644 index 000000000000..2b938c94342b --- /dev/null +++ b/mmv1/templates/terraform/examples/dataplex_entry_basic.tf.tmpl @@ -0,0 +1,20 @@ +resource "google_dataplex_entry_group" "entry-group-basic" { + entry_group_id = "{{index $.Vars "entry_group_name"}}" + project = "{{index $.TestEnvVars "project_number"}}" + location = "us-central1" +} + +resource "google_dataplex_entry_type" "entry-type-basic" { + entry_type_id = "{{index $.Vars "entry_type_name"}}" + project = "{{index $.TestEnvVars "project_number"}}" + location = "us-central1" +} + +resource "google_dataplex_entry" "{{$.PrimaryResourceId}}" { + entry_group_id = google_dataplex_entry_group.entry-group-basic.entry_group_id + project = "{{index $.TestEnvVars "project_number"}}" + location = "us-central1" + entry_id = "{{index $.Vars "entry_id"}}" + entry_type = google_dataplex_entry_type.entry-type-basic.name +} + diff --git a/mmv1/templates/terraform/examples/dataplex_entry_full.tf.tmpl b/mmv1/templates/terraform/examples/dataplex_entry_full.tf.tmpl new file mode 100644 index 000000000000..195476825256 --- /dev/null +++ b/mmv1/templates/terraform/examples/dataplex_entry_full.tf.tmpl @@ -0,0 +1,133 @@ +resource "google_dataplex_aspect_type" "aspect-type-full-one" { + aspect_type_id = "{{index $.Vars "aspect_type_name"}}-one" + location = "us-central1" + project = "{{index $.TestEnvVars "project_number"}}" + + metadata_template = < 0 + + if hasError != tc.expectError { + t.Fatalf("%s: NumberOfAspectsValidation() error expectation mismatch: got error = %v (%v), want error = %v", tc.name, hasError, errors, tc.expectError) + } + + if tc.expectError && tc.errorMsg != "" { + found := false + for _, err := range errors { + if strings.Contains(err.Error(), tc.errorMsg) { // Check if error message contains the expected substring + found = true + break + } + } + if !found { + t.Errorf("%s: NumberOfAspectsValidation() expected error containing %q, but got: %v", tc.name, tc.errorMsg, errors) + } + } + }) + } +} + +func TestProjectNumberValidation(t *testing.T) { + fieldName := "some_field" + testCases := []struct { + name string + input interface{} + expectError bool + errorMsg string + }{ + {"valid input", "projects/1234567890/locations/us-central1", false, ""}, + {"valid input with only number", "projects/987/stuff", false, ""}, + {"valid input with trailing slash content", "projects/1/a/b/c", false, ""}, + {"valid input minimal", "projects/1/a", false, ""}, + {"invalid input trailing slash only", "projects/555/", true, "has an invalid format"}, + {"invalid type - int", 123, true, `to be string, but got int`}, + {"invalid type - nil", nil, true, `to be string, but got `}, + {"invalid format - missing 'projects/' prefix", "12345/locations/us", true, "has an invalid format"}, + {"invalid format - project number starts with 0", "projects/0123/data", true, "has an invalid format"}, + {"invalid format - no project number", "projects//data", true, "has an invalid format"}, + {"invalid format - letters instead of number", "projects/abc/data", true, "has an invalid format"}, + {"invalid format - missing content after number/", "projects/123", true, "has an invalid format"}, + {"invalid format - empty string", "", true, "has an invalid format"}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + _, errors := dataplex.ProjectNumberValidation(tc.input, fieldName) + hasError := len(errors) > 0 + + if hasError != tc.expectError { + t.Fatalf("%s: ProjectNumberValidation() error expectation mismatch: got error = %v (%v), want error = %v", tc.name, hasError, errors, tc.expectError) + } + + if tc.expectError && tc.errorMsg != "" { + found := false + for _, err := range errors { + if strings.Contains(err.Error(), tc.errorMsg) { // Check if error message contains the expected substring + found = true + break + } + } + if !found { + t.Errorf("%s: ProjectNumberValidation() expected error containing %q, but got: %v", tc.name, tc.errorMsg, errors) + } + } + }) + } +} + +func TestAspectProjectNumberValidation(t *testing.T) { + fieldName := "some_field" + testCases := []struct { + name string + input interface{} + expectError bool + errorMsg string + }{ + {"valid input", "1234567890.compute.googleapis.com/Disk", false, ""}, + {"valid input minimal", "1.a", false, ""}, + {"invalid input trailing dot only", "987.", true, "has an invalid format"}, + {"invalid type - int", 456, true, `to be string, but got int`}, + {"invalid type - nil", nil, true, `to be string, but got `}, + {"invalid format - missing number", ".compute.googleapis.com/Disk", true, "has an invalid format"}, + {"invalid format - number starts with 0", "0123.compute.googleapis.com/Disk", true, "has an invalid format"}, + {"invalid format - missing dot", "12345compute", true, "has an invalid format"}, + {"invalid format - letters instead of number", "abc.compute.googleapis.com/Disk", true, "has an invalid format"}, + {"invalid format - missing content after dot", "12345", true, "has an invalid format"}, + {"invalid format - empty string", "", true, "has an invalid format"}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + _, errors := dataplex.AspectProjectNumberValidation(tc.input, fieldName) + hasError := len(errors) > 0 + + if hasError != tc.expectError { + t.Fatalf("%s: AspectProjectNumberValidation() error expectation mismatch: got error = %v (%v), want error = %v", tc.name, hasError, errors, tc.expectError) + } + + if tc.expectError && tc.errorMsg != "" { + found := false + for _, err := range errors { + if strings.Contains(err.Error(), tc.errorMsg) { // Check if error message contains the expected substring + found = true + break + } + } + if !found { + t.Errorf("%s: AspectProjectNumberValidation() expected error containing %q, but got: %v", tc.name, tc.errorMsg, errors) + } + } + }) + } +} + +func TestFilterAspects(t *testing.T) { + testCases := []struct { + name string + aspectKeySet map[string]struct{} + resInput map[string]interface{} + expectedAspects map[string]interface{} + expectError bool + errorMsg string + }{ + {"aspects key is absent", map[string]struct{}{"keep": {}}, map[string]interface{}{"otherKey": "value"}, nil, false, ""}, + {"aspects value is nil", map[string]struct{}{"keep": {}}, map[string]interface{}{"aspects": nil}, nil, false, ""}, + {"empty aspectKeySet", map[string]struct{}{}, map[string]interface{}{"aspects": map[string]interface{}{"one": map[string]interface{}{"data": 1}, "two": map[string]interface{}{"data": 2}}}, map[string]interface{}{}, false, ""}, + {"keep all aspects", map[string]struct{}{"one": {}, "two": {}}, map[string]interface{}{"aspects": map[string]interface{}{"one": map[string]interface{}{"data": 1}, "two": map[string]interface{}{"data": 2}}}, map[string]interface{}{"one": map[string]interface{}{"data": 1}, "two": map[string]interface{}{"data": 2}}, false, ""}, + {"keep some aspects", map[string]struct{}{"two": {}, "three_not_present": {}}, map[string]interface{}{"aspects": map[string]interface{}{"one": map[string]interface{}{"data": 1}, "two": map[string]interface{}{"data": 2}}}, map[string]interface{}{"two": map[string]interface{}{"data": 2}}, false, ""}, + {"input aspects map is empty", map[string]struct{}{"keep": {}}, map[string]interface{}{"aspects": map[string]interface{}{}}, map[string]interface{}{}, false, ""}, + {"aspects is wrong type", map[string]struct{}{"keep": {}}, map[string]interface{}{"aspects": "not a map"}, nil, true, "FilterAspects: 'aspects' field is not a map[string]interface{}, got string"}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + resCopy := deepCopyMap(tc.resInput) + originalAspectsBeforeCall := deepCopyValue(resCopy["aspects"]) + + err := dataplex.FilterAspects(tc.aspectKeySet, resCopy) + + if tc.expectError { + if err == nil { + t.Fatalf("%s: Expected an error, but got nil", tc.name) + } + if tc.errorMsg != "" && !strings.Contains(err.Error(), tc.errorMsg) { + t.Errorf("%s: Expected error message containing %q, got %q", tc.name, tc.errorMsg, err.Error()) + } + if !reflect.DeepEqual(resCopy["aspects"], originalAspectsBeforeCall) { + t.Errorf("%s: resCopy['aspects'] was modified during error case.\nBefore: %#v\nAfter: %#v", tc.name, originalAspectsBeforeCall, resCopy["aspects"]) + } + return + } + + if err != nil { + t.Fatalf("%s: Did not expect an error, but got: %v", tc.name, err) + } + + actualAspectsRaw, aspectsKeyExists := resCopy["aspects"] + + if tc.expectedAspects == nil { + if aspectsKeyExists && actualAspectsRaw != nil { + if tc.name == "aspects key is absent" { + if aspectsKeyExists { + t.Errorf("%s: Expected 'aspects' key to be absent, but it exists with value: %v", tc.name, actualAspectsRaw) + } + } else { + t.Errorf("%s: Expected 'aspects' value to be nil, but got: %v", tc.name, actualAspectsRaw) + } + } + return + } + + if !aspectsKeyExists { + t.Fatalf("%s: Expected 'aspects' key to exist, but it was absent. Expected value: %#v", tc.name, tc.expectedAspects) + } + + actualAspects, ok := actualAspectsRaw.(map[string]interface{}) + if !ok { + t.Fatalf("%s: Expected 'aspects' to be a map[string]interface{}, but got %T. Value: %#v", tc.name, actualAspectsRaw, actualAspectsRaw) + } + + if !reflect.DeepEqual(actualAspects, tc.expectedAspects) { + t.Errorf("%s: FilterAspects() result mismatch:\ngot: %#v\nwant: %#v", tc.name, actualAspects, tc.expectedAspects) + } + }) + } +} + +func TestAddAspectsToSet(t *testing.T) { + testCases := []struct { + name string + initialSet map[string]struct{} + aspectsInput interface{} + expectedSet map[string]struct{} + expectError bool + errorMsg string + }{ + {"add to empty set", map[string]struct{}{}, []interface{}{map[string]interface{}{"aspect_key": "key1"}, map[string]interface{}{"aspect_key": "key2"}}, map[string]struct{}{"key1": {}, "key2": {}}, false, ""}, + {"add to existing set", map[string]struct{}{"existing": {}}, []interface{}{map[string]interface{}{"aspect_key": "key1"}}, map[string]struct{}{"existing": {}, "key1": {}}, false, ""}, + {"add duplicate keys", map[string]struct{}{}, []interface{}{map[string]interface{}{"aspect_key": "key1"}, map[string]interface{}{"aspect_key": "key1"}, map[string]interface{}{"aspect_key": "key2"}}, map[string]struct{}{"key1": {}, "key2": {}}, false, ""}, + {"input aspects is empty slice", map[string]struct{}{"existing": {}}, []interface{}{}, map[string]struct{}{"existing": {}}, false, ""}, + {"input aspects is nil", map[string]struct{}{"original": {}}, nil, map[string]struct{}{"original": {}}, false, ""}, + {"input aspects is wrong type", map[string]struct{}{}, "not a slice", map[string]struct{}{}, true, "AddAspectsToSet: input 'aspects' is not a []interface{}, got string"}, + {"item in slice is not a map", map[string]struct{}{}, []interface{}{"not a map"}, map[string]struct{}{}, true, "AddAspectsToSet: item at index 0 is not a map[string]interface{}, got string"}, + {"item map missing aspect_key", map[string]struct{}{}, []interface{}{map[string]interface{}{"wrong_key": "key1"}}, map[string]struct{}{}, true, "AddAspectsToSet: 'aspect_key' not found in aspect item at index 0"}, + {"aspect_key is not a string", map[string]struct{}{}, []interface{}{map[string]interface{}{"aspect_key": 123}}, map[string]struct{}{}, true, "AddAspectsToSet: 'aspect_key' in item at index 0 is not a string, got int"}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + currentSet := make(map[string]struct{}) + for k, v := range tc.initialSet { + currentSet[k] = v + } + + err := dataplex.AddAspectsToSet(currentSet, tc.aspectsInput) + + if tc.expectError { + if err == nil { + t.Fatalf("%s: Expected an error, but got nil", tc.name) + } + if tc.errorMsg != "" && !strings.Contains(err.Error(), tc.errorMsg) { + t.Errorf("%s: Expected error message containing %q, got %q", tc.name, tc.errorMsg, err.Error()) + } + } else { + if err != nil { + t.Fatalf("%s: Did not expect an error, but got: %v", tc.name, err) + } + if !reflect.DeepEqual(currentSet, tc.expectedSet) { + t.Errorf("%s: AddAspectsToSet() result mismatch:\ngot: %v\nwant: %v", tc.name, currentSet, tc.expectedSet) + } + } + }) + } +} + +func TestInverseTransformAspects(t *testing.T) { + testCases := []struct { + name string + resInput map[string]interface{} + expectedAspects []interface{} + expectNilAspects bool + expectError bool + errorMsg string + }{ + {"aspects key is absent", map[string]interface{}{"otherKey": "value"}, nil, true, false, ""}, + {"aspects value is nil", map[string]interface{}{"aspects": nil}, nil, true, false, ""}, + {"aspects is empty map", map[string]interface{}{"aspects": map[string]interface{}{}}, []interface{}{}, false, false, ""}, + {"aspects with one entry", map[string]interface{}{"aspects": map[string]interface{}{"key1": map[string]interface{}{"data": "value1"}}}, []interface{}{map[string]interface{}{"aspectKey": "key1", "aspect": map[string]interface{}{"data": "value1"}}}, false, false, ""}, + {"aspects with multiple entries", map[string]interface{}{"aspects": map[string]interface{}{"key2": map[string]interface{}{"data": "value2"}, "key1": map[string]interface{}{"data": "value1"}}}, []interface{}{map[string]interface{}{"aspectKey": "key1", "aspect": map[string]interface{}{"data": "value1"}}, map[string]interface{}{"aspectKey": "key2", "aspect": map[string]interface{}{"data": "value2"}}}, false, false, ""}, + {"aspects is wrong type (not map)", map[string]interface{}{"aspects": "not a map"}, nil, false, true, "InverseTransformAspects: 'aspects' field is not a map[string]interface{}, got string"}, + {"aspect value is not a map", map[string]interface{}{"aspects": map[string]interface{}{"key1": "not a map value"}}, nil, false, true, "InverseTransformAspects: value for key 'key1' is not a map[string]interface{}, got string"}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + resCopy := deepCopyMap(tc.resInput) + originalAspectsBeforeCall := deepCopyValue(resCopy["aspects"]) + + err := dataplex.InverseTransformAspects(resCopy) + + if tc.expectError { + if err == nil { + t.Fatalf("%s: Expected an error, but got nil", tc.name) + } + if tc.errorMsg != "" && !strings.Contains(err.Error(), tc.errorMsg) { + t.Errorf("%s: Expected error message containing %q, got %q", tc.name, tc.errorMsg, err.Error()) + } + if !reflect.DeepEqual(resCopy["aspects"], originalAspectsBeforeCall) { + t.Errorf("%s: resCopy['aspects'] was modified during error case.\nBefore: %#v\nAfter: %#v", tc.name, originalAspectsBeforeCall, resCopy["aspects"]) + } + return + } + + if err != nil { + t.Fatalf("%s: Did not expect an error, but got: %v", tc.name, err) + } + + actualAspectsRaw, aspectsKeyExists := resCopy["aspects"] + + if tc.expectNilAspects { + if aspectsKeyExists && actualAspectsRaw != nil { + t.Errorf("%s: Expected 'aspects' to be nil or absent, but got: %#v", tc.name, actualAspectsRaw) + } + return + } + + if !aspectsKeyExists { + t.Fatalf("%s: Expected 'aspects' key in result map, but it was missing. Expected value: %#v", tc.name, tc.expectedAspects) + } + if actualAspectsRaw == nil && tc.expectedAspects != nil { + t.Fatalf("%s: Expected 'aspects' to be non-nil, but got nil. Expected value: %#v", tc.name, tc.expectedAspects) + } + + actualAspectsSlice, ok := actualAspectsRaw.([]interface{}) + if !ok { + if tc.expectedAspects != nil || actualAspectsRaw != nil { + t.Fatalf("%s: Expected 'aspects' to be []interface{}, but got %T. Value: %#v", tc.name, actualAspectsRaw, actualAspectsRaw) + } + } + + if actualAspectsSlice != nil { + sortAspectSlice(actualAspectsSlice) + } + if tc.expectedAspects != nil { + sortAspectSlice(tc.expectedAspects) + } + + if !reflect.DeepEqual(actualAspectsSlice, tc.expectedAspects) { + t.Errorf("%s: InverseTransformAspects() result mismatch:\ngot: %#v\nwant: %#v", tc.name, actualAspectsSlice, tc.expectedAspects) + } + }) + } +} + +func TestTransformAspects(t *testing.T) { + testCases := []struct { + name string + objInput map[string]interface{} + expectedAspects map[string]interface{} + expectNilAspects bool + expectError bool + errorMsg string + }{ + {"aspects key is absent", map[string]interface{}{"otherKey": "value"}, nil, true, false, ""}, + {"aspects value is nil", map[string]interface{}{"aspects": nil}, nil, true, false, ""}, + {"aspects is empty slice", map[string]interface{}{"aspects": []interface{}{}}, map[string]interface{}{}, false, false, ""}, + {"aspects with one item", map[string]interface{}{"aspects": []interface{}{map[string]interface{}{"aspectKey": "key1", "aspect": map[string]interface{}{"data": "value1"}}}}, map[string]interface{}{"key1": map[string]interface{}{"data": "value1"}}, false, false, ""}, + {"aspects with one item that has no aspect", map[string]interface{}{"aspects": []interface{}{map[string]interface{}{"aspectKey": "key1"}}}, map[string]interface{}{"key1": map[string]interface{}{"data": map[string]interface{}{}}}, false, false, ""}, + {"aspects with multiple items", map[string]interface{}{"aspects": []interface{}{map[string]interface{}{"aspectKey": "key1", "aspect": map[string]interface{}{"data": "value1"}}, map[string]interface{}{"aspectKey": "key2", "aspect": map[string]interface{}{"data": "value2"}}}}, map[string]interface{}{"key1": map[string]interface{}{"data": "value1"}, "key2": map[string]interface{}{"data": "value2"}}, false, false, ""}, + {"aspects with duplicate aspectKey", map[string]interface{}{"aspects": []interface{}{map[string]interface{}{"aspectKey": "key1", "aspect": map[string]interface{}{"data": "value_first"}}, map[string]interface{}{"aspectKey": "key2", "aspect": map[string]interface{}{"data": "value2"}}, map[string]interface{}{"aspectKey": "key1", "aspect": map[string]interface{}{"data": "value_last"}}}}, map[string]interface{}{"key1": map[string]interface{}{"data": "value_last"}, "key2": map[string]interface{}{"data": "value2"}}, false, false, ""}, + {"aspects is wrong type (not slice)", map[string]interface{}{"aspects": "not a slice"}, nil, false, true, "TransformAspects: 'aspects' field is not a []interface{}, got string"}, + {"item in slice is not a map", map[string]interface{}{"aspects": []interface{}{"not a map"}}, nil, false, true, "TransformAspects: item in 'aspects' slice at index 0 is not a map[string]interface{}, got string"}, + {"item map missing aspectKey", map[string]interface{}{"aspects": []interface{}{map[string]interface{}{"wrongKey": "k1", "aspect": map[string]interface{}{}}}}, nil, false, true, "TransformAspects: 'aspectKey' not found in aspect item at index 0"}, + {"aspectKey is not a string", map[string]interface{}{"aspects": []interface{}{map[string]interface{}{"aspectKey": 123, "aspect": map[string]interface{}{}}}}, nil, false, true, "TransformAspects: 'aspectKey' in item at index 0 is not a string, got int"}, + {"aspect is present but wrong type", map[string]interface{}{"aspects": []interface{}{map[string]interface{}{"aspectKey": "key1", "aspect": "not a map"}}}, map[string]interface{}{"key1": map[string]interface{}{"data": map[string]interface{}{}}}, false, false, ""}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + objCopy := deepCopyMap(tc.objInput) + originalAspectsBeforeCall := deepCopyValue(objCopy["aspects"]) + + err := dataplex.TransformAspects(objCopy) + + if tc.expectError { + if err == nil { + t.Fatalf("%s: Expected an error, but got nil", tc.name) + } + if tc.errorMsg != "" && !strings.Contains(err.Error(), tc.errorMsg) { + t.Errorf("%s: Expected error message containing %q, got %q", tc.name, tc.errorMsg, err.Error()) + } + if !reflect.DeepEqual(objCopy["aspects"], originalAspectsBeforeCall) { + t.Errorf("%s: objCopy['aspects'] was modified during error case.\nBefore: %#v\nAfter: %#v", tc.name, originalAspectsBeforeCall, objCopy["aspects"]) + } + return + } + + if err != nil { + t.Fatalf("%s: Did not expect an error, but got: %v", tc.name, err) + } + + actualAspectsRaw, aspectsKeyExists := objCopy["aspects"] + + if tc.expectNilAspects { + if aspectsKeyExists && actualAspectsRaw != nil { + t.Errorf("%s: Expected 'aspects' to be nil or absent, but got: %#v", tc.name, actualAspectsRaw) + } + return + } + + if !aspectsKeyExists { + t.Fatalf("%s: Expected 'aspects' key in result map, but it was missing. Expected value: %#v", tc.name, tc.expectedAspects) + } + if actualAspectsRaw == nil && tc.expectedAspects != nil { + t.Fatalf("%s: Expected 'aspects' to be non-nil, but got nil. Expected value: %#v", tc.name, tc.expectedAspects) + } + + actualAspectsMap, ok := actualAspectsRaw.(map[string]interface{}) + if !ok { + if tc.expectedAspects != nil || actualAspectsRaw != nil { + t.Fatalf("%s: Expected 'aspects' to be map[string]interface{}, but got %T. Value: %#v", tc.name, actualAspectsRaw, actualAspectsRaw) + } + } + + if !reflect.DeepEqual(actualAspectsMap, tc.expectedAspects) { + t.Errorf("%s: TransformAspects() result mismatch:\ngot: %#v\nwant: %#v", tc.name, actualAspectsMap, tc.expectedAspects) + } + }) + } +} + +func deepCopyMap(original map[string]interface{}) map[string]interface{} { + if original == nil { + return nil + } + copyMap := make(map[string]interface{}, len(original)) + for key, value := range original { + copyMap[key] = deepCopyValue(value) + } + return copyMap +} + +func deepCopySlice(original []interface{}) []interface{} { + if original == nil { + return nil + } + copySlice := make([]interface{}, len(original)) + for i, value := range original { + copySlice[i] = deepCopyValue(value) + } + return copySlice +} + +func deepCopyValue(value interface{}) interface{} { + if value == nil { + return nil + } + switch v := value.(type) { + case map[string]interface{}: + return deepCopyMap(v) + case []interface{}: + return deepCopySlice(v) + default: + return v + } +} + +func sortAspectSlice(slice []interface{}) { + if slice == nil { + return + } + sort.SliceStable(slice, func(i, j int) bool { + mapI, okI := slice[i].(map[string]interface{}) + mapJ, okJ := slice[j].(map[string]interface{}) + if !okI || !okJ { + return false + } + + keyIRaw, keyIExists := mapI["aspectKey"] + keyJRaw, keyJExists := mapJ["aspectKey"] + + if !keyIExists || !keyJExists { + return false + } + + keyI, okI := keyIRaw.(string) + keyJ, okJ := keyJRaw.(string) + if !okI || !okJ { + return false + } + return keyI < keyJ + }) +} + +func TestAccDataplexEntry_dataplexEntryUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project_number": envvar.GetTestProjectNumberFromEnv(), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataplexEntryDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataplexEntry_dataplexEntryFullUpdatePrepare(context), + }, + { + ResourceName: "google_dataplex_entry.test_entry_full", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"aspects", "entry_group_id", "entry_id", "location"}, + }, + { + Config: testAccDataplexEntry_dataplexEntryUpdate(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_dataplex_entry.test_entry_full", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_dataplex_entry.test_entry_full", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"aspects", "entry_group_id", "entry_id", "location"}, + }, + }, + }) +} + +func testAccDataplexEntry_dataplexEntryFullUpdatePrepare(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_dataplex_aspect_type" "tf-test-aspect-type-full-one" { + aspect_type_id = "tf-test-aspect-type-full%{random_suffix}-one" + location = "us-central1" + project = "%{project_number}" + + metadata_template = < Date: Fri, 23 May 2025 12:54:45 -0700 Subject: [PATCH 241/884] change yaml lint to only check relevant files for pull_request (#14073) --- .github/workflows/unit-test-mmv1.yml | 23 +++++++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/.github/workflows/unit-test-mmv1.yml b/.github/workflows/unit-test-mmv1.yml index 23c459b06865..4fa97fcccb3f 100644 --- a/.github/workflows/unit-test-mmv1.yml +++ b/.github/workflows/unit-test-mmv1.yml @@ -53,12 +53,31 @@ jobs: git config user.email "magic-modules@google.com" git fetch origin ${{ github.base_ref }} git merge --no-ff origin/${{ github.base_ref }} + - name: Find YAML files to lint + id: yaml_files + run: | + cd repo + if [ "${{ github.event_name }}" == "pull_request" ]; then + # For PRs, get only changed files + yamlfiles=$(git diff --name-only origin/${{ github.base_ref }} -- mmv1/products) + if [ ! -z "$yamlfiles" ]; then + echo "yamlfiles=${yamlfiles//$'\n'/ }" >> $GITHUB_OUTPUT + fi + else + # For other events, get all YAML files + yamlfiles=$(find mmv1/products -name "*.yaml" -o -name "*.yml" | tr '\n' ' ') + if [ ! -z "$yamlfiles" ]; then + echo "yamlfiles=$yamlfiles" >> $GITHUB_OUTPUT + fi + fi - name: Install yamllint + if: ${{ !failure() && steps.yaml_files.outputs.yamlfiles != '' }} run: pip install yamllint==1.32.0 pyyaml==6.0.1 pathspec==0.12.1 --no-deps - - name: Lint all YAML files + - name: Lint YAML files + if: ${{ !failure() && steps.yaml_files.outputs.yamlfiles != '' }} run: | cd repo - find mmv1/products -name "*.yaml" -o -name "*.yml" | xargs yamllint -c .yamllint + yamllint -c .yamllint ${{ steps.yaml_files.outputs.yamlfiles }} unit-tests: runs-on: ubuntu-22.04 steps: From be50074796e563394b58c0778531485bcfa3d813 Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Fri, 23 May 2025 22:31:08 +0200 Subject: [PATCH 242/884] filestore: fixed `initial_replication` config not being send in request body to API (#14023) Co-authored-by: Stephen Lewis (Burrows) --- mmv1/products/filestore/Instance.yaml | 14 ++++++++-- .../resource_filestore_instance_test.go.tmpl | 27 ++++++++++++++----- 2 files changed, 32 insertions(+), 9 deletions(-) diff --git a/mmv1/products/filestore/Instance.yaml b/mmv1/products/filestore/Instance.yaml index 8c471e0d35b5..726598df1f53 100644 --- a/mmv1/products/filestore/Instance.yaml +++ b/mmv1/products/filestore/Instance.yaml @@ -370,9 +370,9 @@ properties: type: NestedObject description: | Replication configuration, once set, this cannot be updated. - Addtionally this should be specified on the replica instance only, indicating the active as the peer_instance - url_param_only: true + Additionally this should be specified on the replica instance only, indicating the active as the peer_instance immutable: true + ignore_read: true properties: - name: 'role' type: Enum @@ -402,6 +402,11 @@ properties: description: | Output only fields for replication configuration. properties: + - name: 'role' + type: Enum + description: | + The replication role. + output: true - name: 'replicas' type: Array description: | @@ -409,6 +414,11 @@ properties: item_type: type: NestedObject properties: + - name: 'peerInstance' + type: String + description: | + The peer instance. + output: true - name: 'state' type: Enum description: | diff --git a/mmv1/third_party/terraform/services/filestore/resource_filestore_instance_test.go.tmpl b/mmv1/third_party/terraform/services/filestore/resource_filestore_instance_test.go.tmpl index d490ec4c2fcb..c776dba31293 100644 --- a/mmv1/third_party/terraform/services/filestore/resource_filestore_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/filestore/resource_filestore_instance_test.go.tmpl @@ -465,6 +465,7 @@ func TestAccFilestoreInstance_replication(t *testing.T) { "location_1": "us-east1", "location_2": "us-west1", "tier": "ENTERPRISE", + "project": envvar.GetTestProjectFromEnv(), } acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -473,9 +474,21 @@ func TestAccFilestoreInstance_replication(t *testing.T) { Steps: []resource.TestStep{ { Config: testAccFilestoreInstance_replication(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_filestore_instance.replica_instance", + "effective_replication.0.replicas.0.peer_instance", + "projects/" + context["project"].(string) + "/locations/us-east1/instances/tf-test-source-instance-" + context["random_suffix"].(string), + ), + resource.TestCheckResourceAttr( + "google_filestore_instance.replica_instance", + "effective_replication.0.role", + "STANDBY", + ), + ), }, { - ResourceName: "google_filestore_instance.replica-instance", + ResourceName: "google_filestore_instance.replica_instance", ImportState: true, ImportStateVerify: true, ImportStateVerifyIgnore: []string{"zone", "initial_replication"}, @@ -486,11 +499,11 @@ func TestAccFilestoreInstance_replication(t *testing.T) { func testAccFilestoreInstance_replication(context map[string]interface{}) string { return acctest.Nprintf(` -resource "google_filestore_instance" "instance" { - name = "tf-test-instance-%{random_suffix}" +resource "google_filestore_instance" "source_instance" { + name = "tf-test-source-instance-%{random_suffix}" location = "%{location_1}" tier = "%{tier}" - description = "An instance created during testing." + description = "An source instance created during testing." file_shares { capacity_gb = 1024 @@ -503,8 +516,8 @@ resource "google_filestore_instance" "instance" { } } -resource "google_filestore_instance" "replica-instance" { - name = "tf-test-instance-%{random_suffix}" +resource "google_filestore_instance" "replica_instance" { + name = "tf-test-replica-instance-%{random_suffix}" location = "%{location_2}" tier = "%{tier}" description = "An replica instance created during testing." @@ -521,7 +534,7 @@ resource "google_filestore_instance" "replica-instance" { initial_replication { replicas { - peer_instance = google_filestore_instance.instance.id + peer_instance = google_filestore_instance.source_instance.id } } } From 1c6e4e4005a95b39a52632e58e73e470b97b89e7 Mon Sep 17 00:00:00 2001 From: Ilia Lazebnik Date: Fri, 23 May 2025 16:31:46 -0400 Subject: [PATCH 243/884] container: bump pod_autoscaling to GA (#13997) Signed-off-by: drfaust92 --- .../resource_container_cluster.go.tmpl | 18 ++++++------------ .../resource_container_cluster_test.go.tmpl | 3 --- .../docs/r/container_cluster.html.markdown | 2 +- 3 files changed, 7 insertions(+), 16 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl index 0f8bef478bd6..a51a3f8b2108 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl @@ -1597,7 +1597,7 @@ func ResourceContainerCluster() *schema.Resource { }, }, }, - +{{- end }} "pod_autoscaling": { Type: schema.TypeList, Optional: true, @@ -1620,7 +1620,6 @@ func ResourceContainerCluster() *schema.Resource { }, }, }, -{{- end }} "secret_manager_config": { Type: schema.TypeList, Optional: true, @@ -2572,8 +2571,8 @@ func resourceContainerClusterCreate(d *schema.ResourceData, meta interface{}) er IpAllocationPolicy: ipAllocationBlock, {{- if ne $.TargetVersionName "ga" }} PodSecurityPolicyConfig: expandPodSecurityPolicyConfig(d.Get("pod_security_policy_config")), - PodAutoscaling: expandPodAutoscaling(d.Get("pod_autoscaling")), {{- end }} + PodAutoscaling: expandPodAutoscaling(d.Get("pod_autoscaling")), SecretManagerConfig: expandSecretManagerConfig(d.Get("secret_manager_config")), Autoscaling: expandClusterAutoscaling(d.Get("cluster_autoscaling"), d), BinaryAuthorization: expandBinaryAuthorization(d.Get("binary_authorization")), @@ -3260,11 +3259,10 @@ func resourceContainerClusterRead(d *schema.ResourceData, meta interface{}) erro if err := d.Set("cluster_telemetry", flattenClusterTelemetry(cluster.ClusterTelemetry)); err != nil { return err } - +{{- end }} if err := d.Set("pod_autoscaling", flattenPodAutoscaling(cluster.PodAutoscaling)); err != nil { return err } -{{- end }} if err := d.Set("secret_manager_config", flattenSecretManagerConfig(cluster.SecretManagerConfig)); err != nil { return err @@ -4329,6 +4327,7 @@ func resourceContainerClusterUpdate(d *schema.ResourceData, meta interface{}) er } log.Printf("[INFO] GKE cluster %s pod security policy config has been updated", d.Id()) } +{{- end }} if d.HasChange("pod_autoscaling") { c := d.Get("pod_autoscaling") @@ -4357,8 +4356,6 @@ func resourceContainerClusterUpdate(d *schema.ResourceData, meta interface{}) er log.Printf("[INFO] GKE cluster %s horizontal pod autoscaling profile has been updated", d.Id()) } -{{- end }} - if d.HasChange("secret_manager_config") { c := d.Get("secret_manager_config") req := &container.UpdateClusterRequest{ @@ -5946,7 +5943,7 @@ func expandPodSecurityPolicyConfig(configured interface{}) *container.PodSecurit ForceSendFields: []string{"Enabled"}, } } - +{{- end }} func expandPodAutoscaling(configured interface{}) *container.PodAutoscaling { if configured == nil { return nil @@ -5968,7 +5965,6 @@ func expandPodAutoscaling(configured interface{}) *container.PodAutoscaling { return podAutoscaling } -{{- end }} func expandSecretManagerConfig(configured interface{}) *container.SecretManagerConfig { l := configured.([]interface{}) @@ -6940,7 +6936,7 @@ func flattenPodSecurityPolicyConfig(c *container.PodSecurityPolicyConfig) []map[ }, } } - +{{ end }} func flattenPodAutoscaling(c *container.PodAutoscaling) []map[string]interface{} { config := make([]map[string]interface{}, 0, 1) @@ -6954,8 +6950,6 @@ func flattenPodAutoscaling(c *container.PodAutoscaling) []map[string]interface{} return config } -{{ end }} - func flattenSecretManagerConfig(c *container.SecretManagerConfig) []map[string]interface{} { if c == nil { return []map[string]interface{}{ diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl index 8ae0024dc368..8ed7ffe556ea 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl @@ -869,7 +869,6 @@ func TestUnitContainerCluster_Rfc3339TimeDiffSuppress(t *testing.T) { } } -{{ if ne $.TargetVersionName `ga` -}} func TestAccContainerCluster_withPodAutoscaling(t *testing.T) { t.Parallel() @@ -927,8 +926,6 @@ resource "google_container_cluster" "pod_autoscaling_config" { } `, clusterName, networkName, subnetworkName, hpaProfile) } -{{- end }} - func testAccContainerCluster_enableMultiNetworking(clusterName string) string { return fmt.Sprintf(` diff --git a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown index a338c933f32a..6c9267e25630 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown @@ -293,7 +293,7 @@ region are guaranteed to support the same version. [PodSecurityPolicy](https://cloud.google.com/kubernetes-engine/docs/how-to/pod-security-policies) feature. Structure is [documented below](#nested_pod_security_policy_config). -* `pod_autoscaling` - (Optional, [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html)) Configuration for the +* `pod_autoscaling` - (Optional) Configuration for the Structure is [documented below](#nested_pod_autoscaling). * `secret_manager_config` - (Optional) Configuration for the From a9fbd62a12a03956e37a2318184aec51006aae8f Mon Sep 17 00:00:00 2001 From: James Alseth Date: Fri, 23 May 2025 14:34:58 -0700 Subject: [PATCH 244/884] fix(tgc): Correctly handle ancestry for resources with the "parent" field (#14071) Signed-off-by: James Alseth --- .../tgc/ancestrymanager/ancestrymanager.go | 19 +++---- .../ancestrymanager/ancestrymanager_test.go | 51 +++++++++++++++++-- ..._access_context_manager_access_policy.json | 6 +-- 3 files changed, 57 insertions(+), 19 deletions(-) diff --git a/mmv1/third_party/tgc/ancestrymanager/ancestrymanager.go b/mmv1/third_party/tgc/ancestrymanager/ancestrymanager.go index 0a77bc35bab4..6f0bb74a2372 100644 --- a/mmv1/third_party/tgc/ancestrymanager/ancestrymanager.go +++ b/mmv1/third_party/tgc/ancestrymanager/ancestrymanager.go @@ -162,15 +162,6 @@ func (m *manager) fetchAncestors(config *transport_tpg.Config, tfData tpgresourc return nil, fmt.Errorf("organization id not found in terraform data") } key = orgKey - case "iam.googleapis.com/Role": - // google_organization_iam_custom_role or google_project_iam_custom_role - if orgOK { - key = orgKey - } else if projectKey != "" { - key = projectKey - } else { - return []string{unknownOrg}, nil - } case "cloudresourcemanager.googleapis.com/Project", "cloudbilling.googleapis.com/ProjectBillingInfo": // for google_project and google_project_iam resources var ancestors []string @@ -205,10 +196,16 @@ func (m *manager) fetchAncestors(config *transport_tpg.Config, tfData tpgresourc key = projectKey default: - if projectKey == "" { + switch { + case orgOK: + key = orgKey + case folderOK: + key = folderKey + case projectKey != "": + key = projectKey + default: return []string{unknownOrg}, nil } - key = projectKey } return m.getAncestorsWithCache(key) } diff --git a/mmv1/third_party/tgc/ancestrymanager/ancestrymanager_test.go b/mmv1/third_party/tgc/ancestrymanager/ancestrymanager_test.go index dc88819d6108..fe3074ff6bf5 100644 --- a/mmv1/third_party/tgc/ancestrymanager/ancestrymanager_test.go +++ b/mmv1/third_party/tgc/ancestrymanager/ancestrymanager_test.go @@ -30,10 +30,12 @@ func TestGetAncestors(t *testing.T) { // Setup a simple test server to mock the response of resource manager. v3Responses := map[string]*crmv3.Project{ - "folders/bar": {Name: "folders/bar", Parent: "organizations/qux"}, - "organizations/qux": {Name: "organizations/qux", Parent: ""}, - "folders/bar2": {Name: "folders/bar2", Parent: "organizations/qux2"}, - "organizations/qux2": {Name: "organizations/qux2", Parent: ""}, + "folders/bar": {Name: "folders/bar", Parent: "organizations/qux"}, + "organizations/qux": {Name: "organizations/qux", Parent: ""}, + "folders/bar2": {Name: "folders/bar2", Parent: "organizations/qux2"}, + "organizations/qux2": {Name: "organizations/qux2", Parent: ""}, + "organizations/12345": {Name: "organizations/12345"}, + "folders/67890": {Name: "folders/67890", Parent: "organizations/12345"}, } v1Responses := map[string][]*crmv1.Ancestor{ ownerProject: { @@ -51,6 +53,13 @@ func TestGetAncestors(t *testing.T) { {ResourceId: &crmv1.ResourceId{Id: "bar2", Type: "folder"}}, {ResourceId: &crmv1.ResourceId{Id: "qux2", Type: "organization"}}, }, + "organizations/12345": { + {ResourceId: &crmv1.ResourceId{Id: "12345", Type: "organization"}}, + }, + "folders/67890": { + {ResourceId: &crmv1.ResourceId{Id: "67890", Type: "folder"}}, + {ResourceId: &crmv1.ResourceId{Id: "12345", Type: "organization"}}, + }, } ts := newTestServer(t, v1Responses, v3Responses) @@ -65,7 +74,9 @@ func TestGetAncestors(t *testing.T) { } entries := map[string]string{ - ownerProject: ownerAncestryPath, + ownerProject: ownerAncestryPath, + "organizations/12345": "organizations/12345", + "folders/67890": "organizations/12345/folders/67890", } p := provider.Provider() @@ -500,6 +511,36 @@ func TestGetAncestors(t *testing.T) { want: []string{"organizations/unknown"}, wantParent: "//cloudresourcemanager.googleapis.com/organizations/unknown", }, + { + name: "Org-level CuOP set with parent field", + data: tfdata.NewFakeResourceData( + "google_org_policy_custom_constraint", + p.ResourcesMap["google_org_policy_custom_constraint"].Schema, + map[string]interface{}{ + "parent": "organizations/12345", + }, + ), + asset: &resources.Asset{ + Type: "orgpolicy.googleapis.com/CustomConstraint", + }, + want: []string{"organizations/12345"}, + wantParent: "//cloudresourcemanager.googleapis.com/organizations/12345", + }, + { + name: "Folder-level Firewall Policy", + data: tfdata.NewFakeResourceData( + "google_compute_firewall_policy", + p.ResourcesMap["google_compute_firewall_policy"].Schema, + map[string]interface{}{ + "parent": "folders/67890", + }, + ), + asset: &resources.Asset{ + Type: "compute.googleapis.com/FirewallPolicy", + }, + want: []string{"folders/67890", "organizations/12345"}, + wantParent: "//cloudresourcemanager.googleapis.com/folders/67890", + }, } for _, c := range cases { for _, offline := range []bool{true, false} { diff --git a/mmv1/third_party/tgc/tests/data/example_access_context_manager_access_policy.json b/mmv1/third_party/tgc/tests/data/example_access_context_manager_access_policy.json index fe3b4aae1145..476366431b5f 100644 --- a/mmv1/third_party/tgc/tests/data/example_access_context_manager_access_policy.json +++ b/mmv1/third_party/tgc/tests/data/example_access_context_manager_access_policy.json @@ -2,12 +2,12 @@ { "name": "//accesscontextmanager.googleapis.com/accessPolicies/placeholder-BpLnfgDs", "asset_type": "accesscontextmanager.googleapis.com/AccessPolicy", - "ancestry_path": "{{.Ancestry}}/project/{{.Provider.project}}", + "ancestry_path": "organizations/{{.OrgID}}", "resource": { "version": "v1", "discovery_document_uri": "https://www.googleapis.com/discovery/v1/apis/accesscontextmanager/v1/rest", "discovery_name": "AccessPolicy", - "parent": "//cloudresourcemanager.googleapis.com/projects/{{.Provider.project}}", + "parent": "//cloudresourcemanager.googleapis.com/organizations/{{.OrgID}}", "data": { "parent": "organizations/{{.OrgID}}", "scopes": [ @@ -17,4 +17,4 @@ } } } -] \ No newline at end of file +] From 4dfedbe831718020bff7cc0e3f0d2cfb0e2f0da7 Mon Sep 17 00:00:00 2001 From: Madhura Phadnis Date: Fri, 23 May 2025 19:11:00 -0700 Subject: [PATCH 245/884] Update resource_network_security_client_tls_policy_test.go.tmpl (#14089) --- .../resource_network_security_client_tls_policy_test.go.tmpl | 5 ----- 1 file changed, 5 deletions(-) diff --git a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_client_tls_policy_test.go.tmpl b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_client_tls_policy_test.go.tmpl index 45106b1dbdec..e22f5dbeeb99 100644 --- a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_client_tls_policy_test.go.tmpl +++ b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_client_tls_policy_test.go.tmpl @@ -84,11 +84,6 @@ func testAccNetworkSecurityClientTlsPolicy_update(clientTlsPolicyName string) st target_uri = "unix:mypath1" } } - server_validation_ca { - grpc_endpoint { - target_uri = "unix:mypath2" - } - } } `, clientTlsPolicyName) } From 3c21c38a9cd54c7700a4ee1e0d77c3b2336553fd Mon Sep 17 00:00:00 2001 From: kautikdk <144651627+kautikdk@users.noreply.github.com> Date: Tue, 27 May 2025 16:16:46 +0000 Subject: [PATCH 246/884] Add documentation for `md5hexhash `field. (#14105) --- .../website/docs/r/storage_bucket_object.html.markdown | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/mmv1/third_party/terraform/website/docs/r/storage_bucket_object.html.markdown b/mmv1/third_party/terraform/website/docs/r/storage_bucket_object.html.markdown index 85b67ab8b973..36e1e4f33259 100644 --- a/mmv1/third_party/terraform/website/docs/r/storage_bucket_object.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/storage_bucket_object.html.markdown @@ -6,11 +6,11 @@ description: |- # google_storage_bucket_object -Creates a new object inside an existing bucket in Google cloud storage service (GCS). +Creates a new object inside an existing bucket in Google cloud storage service (GCS). [ACLs](https://cloud.google.com/storage/docs/access-control/lists) can be applied using the `google_storage_object_acl` resource. - For more information see -[the official documentation](https://cloud.google.com/storage/docs/key-terms#objects) -and + For more information see +[the official documentation](https://cloud.google.com/storage/docs/key-terms#objects) +and [API](https://cloud.google.com/storage/docs/json_api/v1/objects). A datasource can be used to retrieve the data of the stored object: @@ -121,6 +121,8 @@ exported: * `media_link` - (Computed) A url reference to download this object. +* `md5hexhash` - (Computed) Hex value of md5hash` + ## Timeouts This resource provides the following From 03c008f7dd4faeab9c59229d2e0e4146246b5997 Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Tue, 27 May 2025 18:43:35 +0200 Subject: [PATCH 247/884] docs: fix formatting mistake in `google_compute_security_policy` documentation (#14109) --- .../website/docs/r/compute_security_policy.html.markdown | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/website/docs/r/compute_security_policy.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_security_policy.html.markdown index 9b1aaa175604..7debcf485cfd 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_security_policy.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_security_policy.html.markdown @@ -390,7 +390,7 @@ The following arguments are supported: * `interval_sec` - (Required) Interval over which the threshold is computed. -* The `exceed_redirect_options` block supports: +The `exceed_redirect_options` block supports: * `type` - (Required) Type of the redirect action. From e2d2049fe24c148334a1b1baa4b7737b58a3e219 Mon Sep 17 00:00:00 2001 From: YashTayal04 <47032845+YashTayal04@users.noreply.github.com> Date: Tue, 27 May 2025 23:11:41 +0530 Subject: [PATCH 248/884] Add fields for custom ranges for interconnect attachment (#14106) --- .../compute/InterconnectAttachment.yaml | 35 +++++++++++++++++++ ...erconnect_attachment_custom_ranges.tf.tmpl | 29 +++++++++++++++ 2 files changed, 64 insertions(+) create mode 100644 mmv1/templates/terraform/examples/compute_interconnect_attachment_custom_ranges.tf.tmpl diff --git a/mmv1/products/compute/InterconnectAttachment.yaml b/mmv1/products/compute/InterconnectAttachment.yaml index 140d2d2a0d4f..b2f449feca52 100644 --- a/mmv1/products/compute/InterconnectAttachment.yaml +++ b/mmv1/products/compute/InterconnectAttachment.yaml @@ -63,6 +63,13 @@ examples: address_name: 'test-address' router_name: 'test-router' network_name: 'test-network' + - name: 'compute_interconnect_attachment_custom_ranges' + primary_resource_id: 'custom-ranges-interconnect-attachment' + vars: + interconnect_attachment_name: 'test-custom-ranges-interconnect-attachment' + router_name: 'test-router' + network_name: 'test-network' + min_version: beta parameters: - name: 'region' type: ResourceRef @@ -354,3 +361,31 @@ properties: You must always provide an up-to-date fingerprint hash in order to update or change labels, otherwise the request will fail with error 412 conditionNotMet. output: true + - name: 'candidateCloudRouterIpAddress' + type: String + description: | + Single IPv4 address + prefix length to be configured on the cloud router interface for this + interconnect attachment. Example: 203.0.113.1/29 + immutable: true + min_version: beta + - name: 'candidateCustomerRouterIpAddress' + type: String + description: | + Single IPv4 address + prefix length to be configured on the customer router interface for this + interconnect attachment. Example: 203.0.113.2/29 + immutable: true + min_version: beta + - name: 'candidateCloudRouterIpv6Address' + type: String + description: | + Single IPv6 address + prefix length to be configured on the cloud router interface for this + interconnect attachment. Example: 2001:db8::1/125 + immutable: true + min_version: beta + - name: 'candidateCustomerRouterIpv6Address' + type: String + description: | + Single IPv6 address + prefix length to be configured on the customer router interface for this + interconnect attachment. Example: 2001:db8::2/125 + immutable: true + min_version: beta diff --git a/mmv1/templates/terraform/examples/compute_interconnect_attachment_custom_ranges.tf.tmpl b/mmv1/templates/terraform/examples/compute_interconnect_attachment_custom_ranges.tf.tmpl new file mode 100644 index 000000000000..135350dde7b0 --- /dev/null +++ b/mmv1/templates/terraform/examples/compute_interconnect_attachment_custom_ranges.tf.tmpl @@ -0,0 +1,29 @@ +resource "google_compute_interconnect_attachment" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "interconnect_attachment_name"}}" + edge_availability_domain = "AVAILABILITY_DOMAIN_1" + type = "PARTNER" + router = google_compute_router.foobar.id + mtu = 1500 + stack_type = "IPV4_IPV6" + labels = { mykey = "myvalue" } + candidate_cloud_router_ip_address = "192.169.0.1/29" + candidate_customer_router_ip_address = "192.169.0.2/29" + candidate_cloud_router_ipv6_address = "748d:2f23:6651:9455:828b:ca81:6fe0:fed1/125" + candidate_customer_router_ipv6_address = "748d:2f23:6651:9455:828b:ca81:6fe0:fed2/125" + provider = google-beta +} + +resource "google_compute_router" "foobar" { + name = "{{index $.Vars "router_name"}}" + network = google_compute_network.foobar.name + bgp { + asn = 16550 + } + provider = google-beta +} + +resource "google_compute_network" "foobar" { + name = "{{index $.Vars "network_name"}}" + auto_create_subnetworks = false + provider = google-beta +} From 449c9ce6d06391b82e03bb4d4b820800b2bffde6 Mon Sep 17 00:00:00 2001 From: Misha Efimov Date: Tue, 27 May 2025 16:24:54 -0400 Subject: [PATCH 249/884] compute: Add example for `log_config` `optional_mode` and `optional_fields` in `google_compute_backend_service`. (#14083) Signed-off-by: Misha Efimov --- mmv1/products/compute/BackendService.yaml | 1 + .../examples/backend_service_custom_metrics.tf.tmpl | 5 +++++ 2 files changed, 6 insertions(+) diff --git a/mmv1/products/compute/BackendService.yaml b/mmv1/products/compute/BackendService.yaml index 33a8c8e1bb01..20ebcec22508 100644 --- a/mmv1/products/compute/BackendService.yaml +++ b/mmv1/products/compute/BackendService.yaml @@ -1553,6 +1553,7 @@ properties: This field can only be specified if logging is enabled for this backend service and "logConfig.optionalMode" was set to CUSTOM. Contains a list of optional fields you want to include in the logs. For example: serverInstance, serverGkeDetails.cluster, serverGkeDetails.pod.podNamespace + For example: orca_load_report, tls.protocol item_type: type: String - name: 'serviceLbPolicy' diff --git a/mmv1/templates/terraform/examples/backend_service_custom_metrics.tf.tmpl b/mmv1/templates/terraform/examples/backend_service_custom_metrics.tf.tmpl index 262892bb9faa..4ebba9fe80d6 100644 --- a/mmv1/templates/terraform/examples/backend_service_custom_metrics.tf.tmpl +++ b/mmv1/templates/terraform/examples/backend_service_custom_metrics.tf.tmpl @@ -37,6 +37,11 @@ resource "google_compute_backend_service" "{{$.PrimaryResourceId}}" { dry_run = false } } + log_config { + enable = true + optional_mode = "CUSTOM" + optional_fields = [ "orca_load_report", "tls.protocol" ] + } } resource "google_compute_health_check" "default" { From 5f89ba2abc3c481f983abcbd85b4bb614beb04a9 Mon Sep 17 00:00:00 2001 From: James Alseth Date: Tue, 27 May 2025 15:01:59 -0700 Subject: [PATCH 250/884] Add support for orgpolicy.googleapis.com/CustomConstraint to TGC (#14069) Signed-off-by: James Alseth --- .../tgc/resource_converters.go.tmpl | 1 + .../example_org_policy_custom_constraint.json | 20 ++++++++++ .../example_org_policy_custom_constraint.tf | 38 +++++++++++++++++++ 3 files changed, 59 insertions(+) create mode 100644 mmv1/third_party/tgc/tests/data/example_org_policy_custom_constraint.json create mode 100644 mmv1/third_party/tgc/tests/data/example_org_policy_custom_constraint.tf diff --git a/mmv1/third_party/tgc/resource_converters.go.tmpl b/mmv1/third_party/tgc/resource_converters.go.tmpl index 6b30091d1e27..e3f240ae5137 100644 --- a/mmv1/third_party/tgc/resource_converters.go.tmpl +++ b/mmv1/third_party/tgc/resource_converters.go.tmpl @@ -108,6 +108,7 @@ func ResourceConverters() map[string][]cai.ResourceConverter { "google_datastream_private_connection": {datastream.ResourceConverterDatastreamPrivateConnection()}, "google_datastream_stream": {datastream.ResourceConverterDatastreamStream()}, "google_firebase_project": {firebase.ResourceConverterFirebaseProject()}, + "google_org_policy_custom_constraint": {orgpolicy.ResourceConverterOrgPolicyCustomConstraint()}, "google_org_policy_policy": {resourcemanager.ResourceConverterOrgPolicyPolicy()}, "google_redis_instance": {redis.ResourceConverterRedisInstance()}, "google_spanner_database": {spanner.ResourceConverterSpannerDatabase()}, diff --git a/mmv1/third_party/tgc/tests/data/example_org_policy_custom_constraint.json b/mmv1/third_party/tgc/tests/data/example_org_policy_custom_constraint.json new file mode 100644 index 000000000000..75b160720b47 --- /dev/null +++ b/mmv1/third_party/tgc/tests/data/example_org_policy_custom_constraint.json @@ -0,0 +1,20 @@ +[ + { + "name": "//orgpolicy.googleapis.com/organizations/12345/customConstraints/custom.disableGkeAutoUpgrade", + "asset_type": "orgpolicy.googleapis.com/CustomConstraint", + "ancestry_path": "organizations/12345", + "resource": { + "version": "v2", + "discovery_document_uri": "https://www.googleapis.com/discovery/v1/apis/orgpolicy/v2/rest", + "discovery_name": "CustomConstraint", + "parent": "//cloudresourcemanager.googleapis.com/organizations/12345", + "data": { + "name": "organizations/12345/customConstraints/custom.disableGkeAutoUpgrade", + "actionType": "ALLOW", + "condition": "resource.management.autoUpgrade == false", + "methodTypes": ["CREATE", "UPDATE"], + "resourceTypes": ["container.googleapis.com/NodePool"] + } + } + } +] diff --git a/mmv1/third_party/tgc/tests/data/example_org_policy_custom_constraint.tf b/mmv1/third_party/tgc/tests/data/example_org_policy_custom_constraint.tf new file mode 100644 index 000000000000..b2e37e252a3d --- /dev/null +++ b/mmv1/third_party/tgc/tests/data/example_org_policy_custom_constraint.tf @@ -0,0 +1,38 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +terraform { + required_providers { + google = { + source = "hashicorp/google-beta" + version = "~> {{.Provider.version}}" + } + } +} + +provider "google" { + {{if .Provider.credentials }}credentials = "{{.Provider.credentials}}"{{end}} +} + +resource "google_org_policy_custom_constraint" "constraint" { + name = "custom.disableGkeAutoUpgrade" + parent = "organizations/12345" + + action_type = "ALLOW" + condition = "resource.management.autoUpgrade == false" + method_types = ["CREATE", "UPDATE"] + resource_types = ["container.googleapis.com/NodePool"] +} From aaead4a3f42b2064cece90ce4c8ad8e66373c100 Mon Sep 17 00:00:00 2001 From: Aiden Grossman Date: Tue, 27 May 2025 22:33:13 +0000 Subject: [PATCH 251/884] Fix default disk type in docs (#14096) --- .../terraform/website/docs/r/container_cluster.html.markdown | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown index 6c9267e25630..5bfc0c6939d2 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown @@ -857,7 +857,7 @@ The `master_authorized_networks_config.cidr_blocks` block supports: in GB. The smallest allowed disk size is 10GB. Defaults to 100GB. * `disk_type` - (Optional) Type of the disk attached to each node - (e.g. 'pd-standard', 'pd-balanced' or 'pd-ssd'). If unspecified, the default disk type is 'pd-standard' + (e.g. 'pd-standard', 'pd-balanced' or 'pd-ssd'). If unspecified, the default disk type is 'pd-balanced' * `enable_confidential_storage` - (Optional) Enabling Confidential Storage will create boot disk with confidential mode. It is disabled by default. From d0ad2efbaed9c4e862697767b4749278ffd7497e Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Tue, 27 May 2025 17:04:38 -0700 Subject: [PATCH 252/884] tgc-revival: HclWriteBlocks function modification (#14112) --- mmv1/third_party/tgc_next/pkg/cai2hcl/models/hcl_block.go | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/mmv1/third_party/tgc_next/pkg/cai2hcl/models/hcl_block.go b/mmv1/third_party/tgc_next/pkg/cai2hcl/models/hcl_block.go index 96b35171ead9..1d3aa8a8ad15 100644 --- a/mmv1/third_party/tgc_next/pkg/cai2hcl/models/hcl_block.go +++ b/mmv1/third_party/tgc_next/pkg/cai2hcl/models/hcl_block.go @@ -3,7 +3,6 @@ package models import ( "fmt" - "github.com/hashicorp/hcl/hcl/printer" "github.com/hashicorp/hcl/v2/hclwrite" "github.com/zclconf/go-cty/cty" ) @@ -20,12 +19,15 @@ func HclWriteBlocks(blocks []*TerraformResourceBlock) ([]byte, error) { for _, resourceBlock := range blocks { hclBlock := rootBody.AppendNewBlock("resource", resourceBlock.Labels) + resourceBody := hclBlock.Body() + resourceBody.SetAttributeRaw("provider", hclwrite.TokensForIdentifier("google-beta")) + if err := hclWriteBlock(resourceBlock.Value, hclBlock.Body()); err != nil { return nil, err } } - return printer.Format(f.Bytes()) + return hclwrite.Format(f.Bytes()), nil } func hclWriteBlock(val cty.Value, body *hclwrite.Body) error { @@ -49,7 +51,7 @@ func hclWriteBlock(val cty.Value, body *hclwrite.Body) error { return err } case objValType.IsCollectionType(): - if objVal.LengthInt() == 0 { + if objVal.LengthInt() == 0 && !objValType.IsSetType() { continue } // Presumes map should not contain object type. From f5c1a672fb556b9dfae7b75ebd2365402254bc74 Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Wed, 28 May 2025 17:50:52 +0200 Subject: [PATCH 253/884] feat: implemented `google_bigquery_datasets` data source (#14066) --- .../provider/provider_mmv1_resources.go.tmpl | 1 + .../data_source_google_bigquery_datasets.go | 161 ++++++++++++++++++ ...ta_source_google_bigquery_datasets_test.go | 106 ++++++++++++ .../docs/d/bigquery_datasets.html.markdown | 39 +++++ 4 files changed, 307 insertions(+) create mode 100644 mmv1/third_party/terraform/services/bigquery/data_source_google_bigquery_datasets.go create mode 100644 mmv1/third_party/terraform/services/bigquery/data_source_google_bigquery_datasets_test.go create mode 100644 mmv1/third_party/terraform/website/docs/d/bigquery_datasets.html.markdown diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index b5a86ff48b5b..1f1a3612f4de 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -47,6 +47,7 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_bigquery_table": bigquery.DataSourceGoogleBigQueryTable(), "google_bigquery_tables": bigquery.DataSourceGoogleBigQueryTables(), "google_bigquery_dataset": bigquery.DataSourceGoogleBigqueryDataset(), + "google_bigquery_datasets": bigquery.DataSourceGoogleBigqueryDatasets(), "google_bigquery_default_service_account": bigquery.DataSourceGoogleBigqueryDefaultServiceAccount(), "google_certificate_manager_certificates": certificatemanager.DataSourceGoogleCertificateManagerCertificates(), "google_certificate_manager_certificate_map": certificatemanager.DataSourceGoogleCertificateManagerCertificateMap(), diff --git a/mmv1/third_party/terraform/services/bigquery/data_source_google_bigquery_datasets.go b/mmv1/third_party/terraform/services/bigquery/data_source_google_bigquery_datasets.go new file mode 100644 index 000000000000..1bbc9c66d5f0 --- /dev/null +++ b/mmv1/third_party/terraform/services/bigquery/data_source_google_bigquery_datasets.go @@ -0,0 +1,161 @@ +package bigquery + +import ( + "fmt" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func DataSourceGoogleBigqueryDatasets() *schema.Resource { + dsSchema := map[string]*schema.Schema{ + "project": { + Type: schema.TypeString, + Optional: true, + Description: "The ID of the project in which the datasets are located. If it is not provided, the provider project is used.", + }, + "datasets": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "labels": { + Type: schema.TypeMap, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + Description: "The labels associated with this dataset. You can use these to organize and group your datasets.", + }, + "friendly_name": { + Type: schema.TypeString, + Computed: true, + Description: "A user-friendly name for the dataset.", + }, + "dataset_id": { + Type: schema.TypeString, + Computed: true, + Description: "A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_).", + }, + "location": { + Type: schema.TypeString, + Computed: true, + Description: "The geographic location where the dataset resides.", + }, + }, + }, + }, + } + + return &schema.Resource{ + Read: DataSourceGoogleBigQueryDatasetsRead, + Schema: dsSchema, + } +} + +func DataSourceGoogleBigQueryDatasetsRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + project, err := tpgresource.GetProject(d, config) + + if err != nil { + return fmt.Errorf("Error fetching project: %s", err) + } + + params := make(map[string]string) + datasets := make([]map[string]interface{}, 0) + + for { + url, err := tpgresource.ReplaceVars(d, config, "{{BigQueryBasePath}}projects/{{project}}/datasets") + if err != nil { + return err + } + + url, err = transport_tpg.AddQueryParams(url, params) + if err != nil { + return err + } + + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + RawURL: url, + UserAgent: userAgent, + }) + if err != nil { + return fmt.Errorf("Error retrieving datasets: %s", err) + } + + pageDatasets := flattenDataSourceGoogleBigQueryDatasetsList(res["datasets"]) + datasets = append(datasets, pageDatasets...) + + pToken, ok := res["nextPageToken"] + if ok && pToken != nil && pToken.(string) != "" { + params["pageToken"] = pToken.(string) + } else { + break + } + } + + if err := d.Set("datasets", datasets); err != nil { + return fmt.Errorf("Error retrieving datasets: %s", err) + } + + id := fmt.Sprintf("projects/%s/datasets", project) + d.SetId(id) + + return nil +} + +func flattenDataSourceGoogleBigQueryDatasetsList(res interface{}) []map[string]interface{} { + if res == nil { + return make([]map[string]interface{}, 0) + } + + ls := res.([]interface{}) + + datasets := make([]map[string]interface{}, 0, len(ls)) + + for _, raw := range ls { + output := raw.(map[string]interface{}) + + var mLabels map[string]interface{} + var mDatasetID string + var mFriendlyName string + var mLocation string + + if oLabels, ok := output["labels"].(map[string]interface{}); ok { + mLabels = oLabels + } else { + mLabels = make(map[string]interface{}) // Initialize as an empty map if labels are missing + } + + if oFriendlyName, ok := output["friendlyName"].(string); ok { + mFriendlyName = oFriendlyName + } + + if oDatasetReference, ok := output["datasetReference"].(map[string]interface{}); ok { + if datasetID, ok := oDatasetReference["datasetId"].(string); ok { + mDatasetID = datasetID + } + } + + if oLocation, ok := output["location"].(string); ok { + mLocation = oLocation + } + + datasets = append(datasets, map[string]interface{}{ + "labels": mLabels, + "friendly_name": mFriendlyName, + "dataset_id": mDatasetID, + "location": mLocation, + }) + } + + return datasets +} diff --git a/mmv1/third_party/terraform/services/bigquery/data_source_google_bigquery_datasets_test.go b/mmv1/third_party/terraform/services/bigquery/data_source_google_bigquery_datasets_test.go new file mode 100644 index 000000000000..74146ee7a310 --- /dev/null +++ b/mmv1/third_party/terraform/services/bigquery/data_source_google_bigquery_datasets_test.go @@ -0,0 +1,106 @@ +package bigquery_test + +import ( + "fmt" + "regexp" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccDataSourceGoogleBigqueryDatasets_basic(t *testing.T) { + t.Parallel() + + randomSuffix := acctest.RandString(t, 10) + projectID := envvar.GetTestProjectFromEnv() + + expectedDatasetFoo := map[string]string{ + "dataset_id": fmt.Sprintf("tf_test_foo_%s", randomSuffix), + "friendly_name": "Foo", + "location": "US", + "labels.%": "1", + "labels.goog-terraform-provisioned": "true", + } + + expectedDatasetBar := map[string]string{ + "dataset_id": fmt.Sprintf("tf_test_bar_%s", randomSuffix), + "friendly_name": "bar", + "location": "EU", + "labels.%": "1", + "labels.goog-terraform-provisioned": "true", + } + + nonExpectedDataset := map[string]string{ + "dataset_id": "non_existent_dataset", + "friendly_name": "I do not exist, and should throw an error", + "location": "NON_EXIST", + "labels.%": "8", + "labels.goog-terraform-provisioned": "Nah", + } + + context := map[string]interface{}{ + "random_suffix": randomSuffix, + "project_id": projectID, + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceGoogleBigqueryDatasets_basic(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckTypeSetElemNestedAttrs("data.google_bigquery_datasets.example", "datasets.*", expectedDatasetFoo), + resource.TestCheckTypeSetElemNestedAttrs("data.google_bigquery_datasets.example", "datasets.*", expectedDatasetBar), + // this check is intended to throw an error, see ExpectError below + resource.TestCheckTypeSetElemNestedAttrs("data.google_bigquery_datasets.example", "datasets.*", nonExpectedDataset), + ), + ExpectError: regexp.MustCompile(".*no TypeSet element \"datasets.*\", with nested attrs.*non_existent_dataset.*I do not exist, and should throw an error.*"), + }, + }, + }) +} + +func testAccDataSourceGoogleBigqueryDatasets_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_bigquery_dataset" "foo" { + dataset_id = "tf_test_foo_%{random_suffix}" + friendly_name = "Foo" + description = "This is a test description" + location = "US" + default_table_expiration_ms = 3600000 + + access { + role = "OWNER" + user_by_email = google_service_account.bqowner.email + } +} + +resource "google_bigquery_dataset" "bar" { + dataset_id = "tf_test_bar_%{random_suffix}" + friendly_name = "bar" + description = "This is a test description" + location = "EU" + default_table_expiration_ms = 3600000 + + access { + role = "OWNER" + user_by_email = google_service_account.bqowner.email + } +} + +resource "google_service_account" "bqowner" { + account_id = "tf-test-%{random_suffix}" +} + +data "google_bigquery_datasets" "example" { + project = "%{project_id}" + depends_on = [ + google_bigquery_dataset.foo, + google_bigquery_dataset.bar, + ] +} +`, context) +} diff --git a/mmv1/third_party/terraform/website/docs/d/bigquery_datasets.html.markdown b/mmv1/third_party/terraform/website/docs/d/bigquery_datasets.html.markdown new file mode 100644 index 000000000000..0079be5641bc --- /dev/null +++ b/mmv1/third_party/terraform/website/docs/d/bigquery_datasets.html.markdown @@ -0,0 +1,39 @@ +--- +subcategory: "BigQuery" +description: |- + A datasource to retrieve a list of datasets in a project. +--- + +# `google_bigquery_datasets` + +Get a list of datasets in a GCP project. For more information see +the [official documentation](https://cloud.google.com/bigquery/docs) +and [API](https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/list). + +## Example Usage + +```hcl +data "google_bigquery_datasets" "datasets" { + project = "my-project" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `project` - (Optional) The ID of the project in which the resource belongs. + If it is not provided, the provider project is used. + +## Attributes Reference + +The following attributes are exported: + +* `datasets` - A list of all retrieved BigQuery datasets. Structure is [defined below](#nested_datasets). + +The `datasets` block supports: + +* `labels` - User-provided dataset labels, in key/value pairs. +* `friendly_name` - The friendly name of the dataset. +* `dataset_id` - The id of the dataset. +* `location` - The geographic location of the dataset. From 6f8a91f79b2d20fa2f7def8e86b6b8252d5d6a1b Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Wed, 28 May 2025 09:32:23 -0700 Subject: [PATCH 254/884] tgc-revival: cai2hcl converter for compute instance (#14111) --- .../tgc_next/pkg/cai2hcl/convert.go | 2 - .../services/compute/compute_instance.go | 69 +++++++++++++++---- 2 files changed, 57 insertions(+), 14 deletions(-) diff --git a/mmv1/third_party/tgc_next/pkg/cai2hcl/convert.go b/mmv1/third_party/tgc_next/pkg/cai2hcl/convert.go index d9a769642a7a..22fae6b11c2d 100644 --- a/mmv1/third_party/tgc_next/pkg/cai2hcl/convert.go +++ b/mmv1/third_party/tgc_next/pkg/cai2hcl/convert.go @@ -37,7 +37,5 @@ func Convert(assets []caiasset.Asset, options *Options) ([]byte, error) { t, err := models.HclWriteBlocks(allBlocks) - options.ErrorLogger.Debug(string(t)) - return t, err } diff --git a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance.go b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance.go index ff6b00ac9260..3edeba398e2e 100644 --- a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance.go +++ b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance.go @@ -1,6 +1,7 @@ package compute import ( + "encoding/json" "fmt" "strings" @@ -103,7 +104,14 @@ func (c *ComputeInstanceConverter) convertResourceData(asset caiasset.Asset) (*m hclData["confidential_instance_config"] = flattenConfidentialInstanceConfig(instance.ConfidentialInstanceConfig) hclData["advanced_machine_features"] = flattenAdvancedMachineFeatures(instance.AdvancedMachineFeatures) hclData["reservation_affinity"] = flattenReservationAffinityTgc(instance.ReservationAffinity) - hclData["key_revocation_action_type"] = instance.KeyRevocationActionType + hclData["key_revocation_action_type"] = strings.TrimSuffix(instance.KeyRevocationActionType, "_ON_KEY_REVOCATION") + hclData["instance_encryption_key"] = flattenComputeInstanceEncryptionKey(instance.InstanceEncryptionKey) + + partnerMetadata, err := flattenPartnerMetadata(instance.PartnerMetadata) + if err != nil { + return nil, fmt.Errorf("Error parsing partner metadata: %s", err) + } + hclData["partner_metadata"] = partnerMetadata // TODO: convert details from the boot disk assets (separate disk assets) into initialize_params in cai2hcl? // It needs to integrate the disk assets into instance assets with the resolver. @@ -140,6 +148,18 @@ func flattenDisks(disks []*compute.AttachedDisk, instanceName string) ([]map[str // format: projects//locations//keyRings//cryptoKeys//cryptoKeyVersions/1 di["kms_key_self_link"] = strings.Split(disk.DiskEncryptionKey.KmsKeyName, "/cryptoKeyVersions")[0] } + + if key.RsaEncryptedKey != "" { + di["disk_encryption_key_rsa"] = key.RsaEncryptedKey + } + + if key.RawKey != "" { + di["disk_encryption_key_raw"] = key.RawKey + } + + if key.KmsKeyServiceAccount != "" { + di["disk_encryption_service_account"] = key.KmsKeyServiceAccount + } } attachedDisks = append(attachedDisks, di) } @@ -172,22 +192,34 @@ func flattenBootDisk(disk *compute.AttachedDisk, instanceName string) []map[stri } if disk.DiskEncryptionKey != nil { + // disk_encryption_key_sha256 is computed, so it is not converted. + if disk.DiskEncryptionKey.KmsKeyName != "" { // The response for crypto keys often includes the version of the key which needs to be removed // format: projects//locations//keyRings//cryptoKeys//cryptoKeyVersions/1 result["kms_key_self_link"] = strings.Split(disk.DiskEncryptionKey.KmsKeyName, "/cryptoKeyVersions")[0] } - } - // Don't convert the field with the default value - if disk.Interface != "SCSI" { - result["interface"] = disk.Interface - } + if disk.DiskEncryptionKey.KmsKeyServiceAccount != "" { + // The response for crypto keys often includes the version of the key which needs to be removed + // format: projects//locations//keyRings//cryptoKeys//cryptoKeyVersions/1 + result["disk_encryption_service_account"] = disk.DiskEncryptionKey.KmsKeyServiceAccount + } + + if disk.DiskEncryptionKey.RsaEncryptedKey != "" { + result["disk_encryption_key_rsa"] = disk.DiskEncryptionKey.RsaEncryptedKey + } - if !strings.HasSuffix(disk.Source, instanceName) { - result["source"] = tpgresource.ConvertSelfLinkToV1(disk.Source) + if disk.DiskEncryptionKey.RawKey != "" { + result["disk_encryption_key_raw"] = disk.DiskEncryptionKey.RawKey + } } + result["interface"] = disk.Interface + // "source" field is converted and "initialize_params" is not converted as these two fields conflict with each other. + result["source"] = tpgresource.ConvertSelfLinkToV1(disk.Source) + result["guest_os_features"] = flattenComputeInstanceGuestOsFeatures(disk.GuestOsFeatures) + if len(result) == 0 { return nil } @@ -204,10 +236,23 @@ func flattenScratchDisk(disk *compute.AttachedDisk) map[string]interface{} { result["device_name"] = disk.DeviceName } - // Don't convert the field with the default value - if disk.Interface != "SCSI" { - result["interface"] = disk.Interface - } + result["interface"] = disk.Interface return result } + +func flattenPartnerMetadata(partnerMetadata map[string]compute.StructuredEntries) (map[string]string, error) { + partnerMetadataMap := make(map[string]string) + for key, value := range partnerMetadata { + + jsonString, err := json.Marshal(&value) + if err != nil { + return nil, err + } + if value.Entries != nil { + partnerMetadataMap[key] = string(jsonString) + } + + } + return partnerMetadataMap, nil +} From 1653c80943837f76e79e3af1cca9d7526a6ef93e Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Wed, 28 May 2025 11:45:09 -0700 Subject: [PATCH 255/884] Removed post-create setting of identity fields that aren't computed or in id_format (#13836) --- mmv1/templates/terraform/resource.go.tmpl | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/mmv1/templates/terraform/resource.go.tmpl b/mmv1/templates/terraform/resource.go.tmpl index e573ad4523a6..bc8420d1633d 100644 --- a/mmv1/templates/terraform/resource.go.tmpl +++ b/mmv1/templates/terraform/resource.go.tmpl @@ -309,7 +309,7 @@ func resource{{ $.ResourceName -}}Create(d *schema.ResourceData, meta interface{ {{if and $.GetAsync ($.GetAsync.Allow "Create") -}} {{ if ($.GetAsync.IsA "OpAsync") -}} -{{ if and $.GetAsync.Result.ResourceInsideResponse (or $.GetIdentity $.HasComputedIdFormatFields) -}} +{{ if and $.GetAsync.Result.ResourceInsideResponse $.HasComputedIdFormatFields -}} // Use the resource in the operation response to populate // identity fields and d.Id() before read var opRes map[string]interface{} @@ -378,17 +378,6 @@ func resource{{ $.ResourceName -}}Create(d *schema.ResourceData, meta interface{ {{- end }} {{- end }}{{/* prop is potentially computed */}} {{- end }}{{/* range */}} - {{- else}} -{{- /* - Temporarily keeping these resources the same - but setting properties here should be unnecessary because the impacted fields aren't expected to change as a result of the API request. Will remove in a separate step for clarity. - */}} -{{- range $prop := $.GettableProperties }} -{{- if $.IsInIdentity $prop }} - if err := d.Set("{{ underscore $prop.Name -}}", flatten{{ if $.NestedQuery -}}Nested{{ end }}{{ $.ResourceName -}}{{ camelize $prop.Name "upper" -}}(opRes["{{ $prop.ApiName -}}"], d, config)); err != nil { - return err - } -{{- end}} -{{- end}} {{- end}} // This may have caused the ID to update - update it if so. From 6ae2602d81e7ebf4d5dae309ab82a76a1cf0451a Mon Sep 17 00:00:00 2001 From: hao-nan-li <100219545+hao-nan-li@users.noreply.github.com> Date: Wed, 28 May 2025 13:03:28 -0700 Subject: [PATCH 256/884] Initial commit for service account impersonation in different universe (#14064) --- .../universe/universe_domain_storage_test.go | 9 +++--- .../terraform/transport/config.go.tmpl | 32 +++++++++++++++++-- 2 files changed, 34 insertions(+), 7 deletions(-) diff --git a/mmv1/third_party/terraform/provider/universe/universe_domain_storage_test.go b/mmv1/third_party/terraform/provider/universe/universe_domain_storage_test.go index 259fe944968c..c359f0a7b1af 100644 --- a/mmv1/third_party/terraform/provider/universe/universe_domain_storage_test.go +++ b/mmv1/third_party/terraform/provider/universe/universe_domain_storage_test.go @@ -18,6 +18,7 @@ func TestAccUniverseDomainStorage(t *testing.T) { universeDomain := envvar.GetTestUniverseDomainFromEnv(t) bucketName := acctest.TestBucketName(t) + region := envvar.GetTestRegionFromEnv() acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -25,13 +26,13 @@ func TestAccUniverseDomainStorage(t *testing.T) { CheckDestroy: testAccStorageBucketDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccUniverseDomain_bucket(universeDomain, bucketName), + Config: testAccUniverseDomain_bucket(universeDomain, bucketName, region), }, }, }) } -func testAccUniverseDomain_bucket(universeDomain string, bucketName string) string { +func testAccUniverseDomain_bucket(universeDomain string, bucketName string, region string) string { return fmt.Sprintf(` provider "google" { universe_domain = "%s" @@ -39,7 +40,7 @@ provider "google" { resource "google_storage_bucket" "foo" { name = "%s" - location = "US" + location = "%s" } data "google_storage_bucket" "bar" { @@ -48,7 +49,7 @@ data "google_storage_bucket" "bar" { google_storage_bucket.foo, ] } -`, universeDomain, bucketName) +`, universeDomain, bucketName, region) } func testAccStorageBucketDestroyProducer(t *testing.T) func(s *terraform.State) error { diff --git a/mmv1/third_party/terraform/transport/config.go.tmpl b/mmv1/third_party/terraform/transport/config.go.tmpl index aeeeeb21c51d..fdc75f177038 100644 --- a/mmv1/third_party/terraform/transport/config.go.tmpl +++ b/mmv1/third_party/terraform/transport/config.go.tmpl @@ -14,6 +14,9 @@ import ( "time" "os" + "cloud.google.com/go/auth/credentials" + "cloud.google.com/go/auth/credentials/impersonate" + "cloud.google.com/go/auth/oauth2adapt" "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" "github.com/hashicorp/terraform-plugin-framework/schema/validator" @@ -1301,10 +1304,33 @@ func (c *Config) GetCredentials(clientScopes []string, initialCredentialsOnly bo } if c.ImpersonateServiceAccount != "" && !initialCredentialsOnly { - opts := []option.ClientOption{option.WithCredentialsJSON([]byte(contents)), option.ImpersonateCredentials(c.ImpersonateServiceAccount, c.ImpersonateServiceAccountDelegates...), option.WithScopes(clientScopes...)} - creds, err := transport.Creds(context.TODO(), opts...) + jsonCreds, err := credentials.DetectDefault(&credentials.DetectOptions{ + Scopes: clientScopes, + CredentialsJSON: []byte(contents), + }) if err != nil { - return googleoauth.Credentials{}, err + return googleoauth.Credentials{}, fmt.Errorf("error loading credentials: %s", err) + } + + impersonateOpts := &impersonate.CredentialsOptions{ + TargetPrincipal: c.ImpersonateServiceAccount, + Scopes: clientScopes, + Delegates: c.ImpersonateServiceAccountDelegates, + Credentials: jsonCreds, + } + + if c.UniverseDomain != "" && c.UniverseDomain != "googleapis.com" { + impersonateOpts.UniverseDomain = c.UniverseDomain + } + + authCred, err := impersonate.NewCredentials(impersonateOpts) + if err != nil { + return googleoauth.Credentials{}, fmt.Errorf("error loading credentials: %s", err) + } + + creds := oauth2adapt.Oauth2CredentialsFromAuthCredentials(authCred) + if err != nil { + return googleoauth.Credentials{}, fmt.Errorf("error loading credentials: %s", err) } return *creds, nil } From 114c0545e8f22f8a54ffcbdcfad60de237de8dc4 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Wed, 28 May 2025 14:07:37 -0700 Subject: [PATCH 257/884] tgc-revival: fetch tests data from GCS bucket (#14123) --- mmv1/third_party/tgc_next/test/setup.go | 86 +++++++++++++++++++++++++ mmv1/third_party/tgc_next/test/utils.go | 21 ++++++ 2 files changed, 107 insertions(+) create mode 100644 mmv1/third_party/tgc_next/test/setup.go create mode 100644 mmv1/third_party/tgc_next/test/utils.go diff --git a/mmv1/third_party/tgc_next/test/setup.go b/mmv1/third_party/tgc_next/test/setup.go new file mode 100644 index 000000000000..0b1b3c13ef1f --- /dev/null +++ b/mmv1/third_party/tgc_next/test/setup.go @@ -0,0 +1,86 @@ +package test + +import ( + "context" + "encoding/json" + "fmt" + "io" + "log" + "time" + + "cloud.google.com/go/storage" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" +) + +type ResourceMetadata struct { + CaiAssetName string `json:"cai_asset_name"` + CaiAssetData caiasset.Asset `json:"cai_asset_data"` + ResourceType string `json:"resource_type"` + ResourceAddress string `json:"resource_address"` + ImportMetadata ImportMetadata `json:"import_metadata,omitempty"` + Service string `json:"service"` +} + +type ImportMetadata struct { + Id string `json:"id,omitempty"` + IgnoredFields []string `json:"ignored_fields,omitempty"` +} + +type TgcMetadataPayload struct { + TestName string `json:"test_name"` + RawConfig string `json:"raw_config"` + ResourceMetadata map[string]*ResourceMetadata `json:"resource_metadata"` + PrimaryResource string `json:"primary_resource"` +} + +var ( + TestsMetadata = make(map[string]TgcMetadataPayload) + setupDone = false +) + +func ReadTestsDataFromGcs() (map[string]TgcMetadataPayload, error) { + if !setupDone { + bucketName := "cai_assets_metadata" + currentDate := time.Now() + + for len(TestsMetadata) == 0 { + objectName := fmt.Sprintf("nightly_tests/%s/nightly_tests_meta.json", currentDate.Format("2006-01-02")) + log.Printf("Read object %s from the bucket %s", objectName, bucketName) + + ctx := context.Background() + client, err := storage.NewClient(ctx) + if err != nil { + return nil, fmt.Errorf("storage.NewClient: %v", err) + } + defer client.Close() + + currentDate = currentDate.AddDate(0, 0, -1) + + rc, err := client.Bucket(bucketName).Object(objectName).NewReader(ctx) + if err != nil { + if err == storage.ErrObjectNotExist { + log.Printf("Object '%s' in bucket '%s' does NOT exist.\n", objectName, bucketName) + continue + } else { + return nil, fmt.Errorf("Object(%q).NewReader: %v", objectName, err) + } + } + defer rc.Close() + + data, err := io.ReadAll(rc) + if err != nil { + return nil, fmt.Errorf("io.ReadAll: %v", err) + } + + err = json.Unmarshal(data, &TestsMetadata) + if err != nil { + return nil, fmt.Errorf("json.Unmarshal: %v", err) + } + } + + // Uncomment this line to debug issues locally + // writeJSONFile("../../tests_metadata.json", TestsMetadata) + setupDone = true + } + return TestsMetadata, nil +} diff --git a/mmv1/third_party/tgc_next/test/utils.go b/mmv1/third_party/tgc_next/test/utils.go new file mode 100644 index 000000000000..dbf7c528c361 --- /dev/null +++ b/mmv1/third_party/tgc_next/test/utils.go @@ -0,0 +1,21 @@ +package test + +import ( + "encoding/json" + "fmt" + "os" +) + +// Writes the data into a JSON file +func writeJSONFile(filename string, data interface{}) error { + jsonData, err := json.MarshalIndent(data, "", " ") + if err != nil { + return fmt.Errorf("Error marshaling data for %s: %v\n", filename, err) + } + + err = os.WriteFile(filename, jsonData, 0644) + if err != nil { + return fmt.Errorf("Error writing to file %s: %v\n", filename, err) + } + return nil +} From 52572fefa5c6513c5c891db931990f39753fd84f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wiktor=20Niesiob=C4=99dzki?= Date: Thu, 29 May 2025 00:42:12 +0200 Subject: [PATCH 258/884] Add numeric_id to compute_instance_region_template (#14108) --- .../resource_compute_region_instance_template.go.tmpl | 11 +++++++++++ .../r/compute_region_instance_template.html.markdown | 2 ++ 2 files changed, 13 insertions(+) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template.go.tmpl index 36e3eb3351e8..a8b5bd424eb7 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template.go.tmpl @@ -650,6 +650,13 @@ Google Cloud KMS. Only one of kms_key_self_link, rsa_encrypted_key and raw_key m }, }, + "numeric_id": { + Type: schema.TypeString, + ForceNew: true, + Computed: true, + Description: `The ID of the template in numeric format.`, + }, + "project": { Type: schema.TypeString, Optional: true, @@ -1421,6 +1428,10 @@ func resourceComputeRegionInstanceTemplateRead(d *schema.ResourceData, meta inte } } + if err = d.Set("numeric_id", instanceTemplate["id"]); err != nil { + return fmt.Errorf("Error setting numeric_id: %s", err) + } + {{ if ne $.TargetVersionName `ga` -}} if instanceProperties.PartnerMetadata != nil { partnerMetadata, err := flattenPartnerMetadata(instanceProperties.PartnerMetadata) diff --git a/mmv1/third_party/terraform/website/docs/r/compute_region_instance_template.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_region_instance_template.html.markdown index c5fa24b0ba64..1e94cd62e829 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_region_instance_template.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_region_instance_template.html.markdown @@ -757,6 +757,8 @@ exported: * `metadata_fingerprint` - The unique fingerprint of the metadata. +* `numeric_id` - numeric identifier of the resource. + * `self_link` - The URI of the created resource. * `tags_fingerprint` - The unique fingerprint of the tags. From 96fba3fe71a281a2efe862a9772b6e4089b561cc Mon Sep 17 00:00:00 2001 From: MatthewVu-dev Date: Thu, 29 May 2025 09:17:18 -0700 Subject: [PATCH 259/884] (feature): Add IPV6 enum for PolicyBasedRoute.yaml and fix incorrect description for default src and dest ranges. (#13070) --- mmv1/products/networkconnectivity/PolicyBasedRoute.yaml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/mmv1/products/networkconnectivity/PolicyBasedRoute.yaml b/mmv1/products/networkconnectivity/PolicyBasedRoute.yaml index 2d80d2bd4e53..c5d2e90ffa48 100644 --- a/mmv1/products/networkconnectivity/PolicyBasedRoute.yaml +++ b/mmv1/products/networkconnectivity/PolicyBasedRoute.yaml @@ -87,6 +87,7 @@ properties: required: true enum_values: - 'IPV4' + - 'IPV6' # probably could have been an enum, but it's a string in the API - name: 'ipProtocol' type: String @@ -96,12 +97,12 @@ properties: - name: 'srcRange' type: String description: | - The source IP range of outgoing packets that this policy-based route applies to. Default is "0.0.0.0/0" if protocol version is IPv4. + The source IP range of outgoing packets that this policy-based route applies to. Default is "0.0.0.0/0". default_value: "0.0.0.0/0" - name: 'destRange' type: String description: | - The destination IP range of outgoing packets that this policy-based route applies to. Default is "0.0.0.0/0" if protocol version is IPv4. + The destination IP range of outgoing packets that this policy-based route applies to. Default is "0.0.0.0/0". default_value: "0.0.0.0/0" - name: 'nextHopOtherRoutes' type: Enum From 418b491546c651d901d02caf2fd6403b0a281624 Mon Sep 17 00:00:00 2001 From: James Alseth Date: Thu, 29 May 2025 09:28:51 -0700 Subject: [PATCH 260/884] Check if the http Client is nil (#14132) Signed-off-by: James Alseth --- mmv1/third_party/terraform/transport/transport.go | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/mmv1/third_party/terraform/transport/transport.go b/mmv1/third_party/terraform/transport/transport.go index 351ce30f8a88..54de21865aee 100644 --- a/mmv1/third_party/terraform/transport/transport.go +++ b/mmv1/third_party/terraform/transport/transport.go @@ -30,6 +30,10 @@ type SendRequestOptions struct { } func SendRequest(opt SendRequestOptions) (map[string]interface{}, error) { + if opt.Config == nil || opt.Config.Client == nil { + return nil, fmt.Errorf("client is nil for request to %s", opt.RawURL) + } + reqHeaders := opt.Headers if reqHeaders == nil { reqHeaders = make(http.Header) From 2d905b8a3beb5a70f81b52dacfe7af3f0c7a3105 Mon Sep 17 00:00:00 2001 From: Dawid212 Date: Thu, 29 May 2025 19:12:57 +0200 Subject: [PATCH 261/884] Add httpFilterConfigs and httpFilterMetadata fields to route rules in URL maps. (#14097) --- mmv1/products/compute/UrlMap.yaml | 71 +++++++++++ .../url_map_http_filter_configs.tf.tmpl | 78 ++++++++++++ .../url_map_http_filter_metadata.tf.tmpl | 111 ++++++++++++++++++ 3 files changed, 260 insertions(+) create mode 100644 mmv1/templates/terraform/examples/url_map_http_filter_configs.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/url_map_http_filter_metadata.tf.tmpl diff --git a/mmv1/products/compute/UrlMap.yaml b/mmv1/products/compute/UrlMap.yaml index 164878701009..74c57fef9542 100644 --- a/mmv1/products/compute/UrlMap.yaml +++ b/mmv1/products/compute/UrlMap.yaml @@ -154,6 +154,23 @@ examples: http_health_check_name: 'health-check' storage_bucket_name: 'static-asset-bucket' error_backend_bucket_name: 'error-backend-bucket' + - name: 'url_map_http_filter_configs' + primary_resource_id: 'urlmap' + min_version: 'beta' + vars: + url_map_name: 'urlmap' + default_backend_service_name: 'default-backend' + service_a_backend_service_name: 'service-a-backend' + health_check_name: 'health-check' + - name: 'url_map_http_filter_metadata' + primary_resource_id: 'urlmap' + min_version: 'beta' + vars: + url_map_name: 'urlmap' + default_backend_service_name: 'default-backend' + service_a_backend_service_name: 'service-a-backend' + service_b_backend_service_name: 'service-b-backend' + health_check_name: 'health-check' parameters: properties: - name: 'creationTimestamp' @@ -1831,6 +1848,60 @@ properties: If load balancer cannot reach the backendBucket, a simple Not Found Error will be returned, with the original response code (or overrideResponseCode if configured). resource: 'BackendBucket' imports: 'selfLink' + - name: 'httpFilterConfigs' + type: Array + min_version: 'beta' + description: | + Outbound route specific configuration for networkservices.HttpFilter resources enabled by Traffic Director. + httpFilterConfigs only applies for load balancers with loadBalancingScheme set to INTERNAL_SELF_MANAGED. + See ForwardingRule for more details. + + Not supported when the URL map is bound to a target gRPC proxy that has validateForProxyless field set to true. + item_type: + type: NestedObject + properties: + - name: 'filterName' + type: String + description: | + Name of the networkservices.HttpFilter resource this configuration belongs to. + This name must be known to the xDS client. Example: envoy.wasm + - name: 'configTypeUrl' + type: String + description: | + The fully qualified versioned proto3 type url of the protobuf that the filter expects for its contextual settings, + for example: type.googleapis.com/google.protobuf.Struct + - name: 'config' + type: String + description: | + The configuration needed to enable the networkservices.HttpFilter resource. + The configuration must be YAML formatted and only contain fields defined in the protobuf identified in configTypeUrl + - name: 'httpFilterMetadata' + type: Array + min_version: 'beta' + description: | + Outbound route specific metadata supplied to networkservices.HttpFilter resources enabled by Traffic Director. + httpFilterMetadata only applies for load balancers with loadBalancingScheme set to INTERNAL_SELF_MANAGED. + See ForwardingRule for more details. + + Not supported when the URL map is bound to a target gRPC proxy that has validateForProxyless field set to true. + item_type: + type: NestedObject + properties: + - name: 'filterName' + type: String + description: | + Name of the networkservices.HttpFilter resource this configuration belongs to. + This name must be known to the xDS client. Example: envoy.wasm + - name: 'configTypeUrl' + type: String + description: | + The fully qualified versioned proto3 type url of the protobuf that the filter expects for its contextual settings, + for example: type.googleapis.com/google.protobuf.Struct + - name: 'config' + type: String + description: | + The configuration needed to enable the networkservices.HttpFilter resource. + The configuration must be YAML formatted and only contain fields defined in the protobuf identified in configTypeUrl - name: 'defaultUrlRedirect' type: NestedObject # TODO: (mbang) won't work for array path matchers yet, uncomment here once they are supported. diff --git a/mmv1/templates/terraform/examples/url_map_http_filter_configs.tf.tmpl b/mmv1/templates/terraform/examples/url_map_http_filter_configs.tf.tmpl new file mode 100644 index 000000000000..1841187c2a77 --- /dev/null +++ b/mmv1/templates/terraform/examples/url_map_http_filter_configs.tf.tmpl @@ -0,0 +1,78 @@ +resource "google_compute_url_map" "{{$.PrimaryResourceId}}" { + provider = google-beta + name = "{{index $.Vars "url_map_name"}}" + description = "Test for httpFilterConfigs in route rules" + default_service = google_compute_backend_service.default.id + + host_rule { + hosts = ["mysite.com"] + path_matcher = "allpaths" + } + + path_matcher { + name = "allpaths" + default_service = google_compute_backend_service.default.id + + route_rules { + priority = 1 + service = google_compute_backend_service.service-a.id + match_rules { + prefix_match = "/" + ignore_case = true + } + http_filter_configs { + filter_name = "envoy.wasm" + config_type_url = "type.googleapis.com/google.protobuf.Struct" + config = jsonencode({ + name = "my-filter" + root_id = "my_root_id" + vm_config = { + vm_id = "my_vm_id" + runtime = "envoy.wasm.runtime.v8" + code = { + local = { + inline_string = "const WASM_BINARY = '...'" + } + } + } + }) + } + } + } + + test { + service = google_compute_backend_service.default.id + host = "mysite.com" + path = "/" + } +} + +resource "google_compute_backend_service" "default" { + provider = google-beta + name = "{{index $.Vars "default_backend_service_name"}}" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + load_balancing_scheme = "INTERNAL_SELF_MANAGED" + + health_checks = [google_compute_health_check.default.id] +} + +resource "google_compute_backend_service" "service-a" { + provider = google-beta + name = "{{index $.Vars "service_a_backend_service_name"}}" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + load_balancing_scheme = "INTERNAL_SELF_MANAGED" + + health_checks = [google_compute_health_check.default.id] +} + +resource "google_compute_health_check" "default" { + provider = google-beta + name = "{{index $.Vars "health_check_name"}}" + http_health_check { + port = 80 + } +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/url_map_http_filter_metadata.tf.tmpl b/mmv1/templates/terraform/examples/url_map_http_filter_metadata.tf.tmpl new file mode 100644 index 000000000000..54e2986aeb82 --- /dev/null +++ b/mmv1/templates/terraform/examples/url_map_http_filter_metadata.tf.tmpl @@ -0,0 +1,111 @@ +resource "google_compute_url_map" "{{$.PrimaryResourceId}}" { + provider = google-beta + name = "{{index $.Vars "url_map_name"}}" + description = "Test for httpFilterMetadata in route rules" + default_service = google_compute_backend_service.default.id + + host_rule { + hosts = ["mysite.com"] + path_matcher = "allpaths" + } + + path_matcher { + name = "allpaths" + default_service = google_compute_backend_service.default.id + + route_rules { + priority = 1 + service = google_compute_backend_service.service-a.id + match_rules { + prefix_match = "/" + ignore_case = true + } + http_filter_metadata { + filter_name = "envoy.wasm" + config_type_url = "type.googleapis.com/google.protobuf.Struct" + config = jsonencode({ + fields = { + timeout = { + string_value = "30s" + } + retries = { + number_value = 3 + } + debug = { + bool_value = true + } + } + }) + } + } + route_rules { + priority = 2 + service = google_compute_backend_service.service-b.id + match_rules { + prefix_match = "/api" + ignore_case = true + } + http_filter_metadata { + filter_name = "envoy.rate_limit" + config_type_url = "type.googleapis.com/google.protobuf.Struct" + config = jsonencode({ + fields = { + requests_per_unit = { + number_value = 100 + } + unit = { + string_value = "MINUTE" + } + } + }) + } + } + } + + test { + service = google_compute_backend_service.default.id + host = "mysite.com" + path = "/" + } +} + +resource "google_compute_backend_service" "default" { + provider = google-beta + name = "{{index $.Vars "default_backend_service_name"}}" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + load_balancing_scheme = "INTERNAL_SELF_MANAGED" + + health_checks = [google_compute_health_check.default.id] +} + +resource "google_compute_backend_service" "service-a" { + provider = google-beta + name = "{{index $.Vars "service_a_backend_service_name"}}" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + load_balancing_scheme = "INTERNAL_SELF_MANAGED" + + health_checks = [google_compute_health_check.default.id] +} + +resource "google_compute_backend_service" "service-b" { + provider = google-beta + name = "{{index $.Vars "service_b_backend_service_name"}}" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + load_balancing_scheme = "INTERNAL_SELF_MANAGED" + + health_checks = [google_compute_health_check.default.id] +} + +resource "google_compute_health_check" "default" { + provider = google-beta + name = "{{index $.Vars "health_check_name"}}" + http_health_check { + port = 80 + } +} \ No newline at end of file From 25e7362021e6f116feb2ae18212632e392c47bbb Mon Sep 17 00:00:00 2001 From: James Alseth Date: Thu, 29 May 2025 10:56:44 -0700 Subject: [PATCH 262/884] Move enabling SecretManager versions out of the expand function (#14130) Signed-off-by: James Alseth --- .../products/secretmanager/SecretVersion.yaml | 1 + .../RegionalSecretVersion.yaml | 1 + .../constants/regional_secret_version.go.tmpl | 35 +++++++++++++++++ .../constants/secret_version.go.tmpl | 35 +++++++++++++++++ .../regional_secret_version_enable.go.tmpl | 37 +----------------- .../secret_version_enable.go.tmpl | 39 +------------------ .../regional_secret_version.go.tmpl | 2 +- .../custom_update/secret_version.go.tmpl | 4 +- .../regional_secret_version.go.tmpl | 2 +- .../post_create/secret_version.go.tmpl | 4 +- 10 files changed, 81 insertions(+), 79 deletions(-) create mode 100644 mmv1/templates/terraform/constants/regional_secret_version.go.tmpl create mode 100644 mmv1/templates/terraform/constants/secret_version.go.tmpl diff --git a/mmv1/products/secretmanager/SecretVersion.yaml b/mmv1/products/secretmanager/SecretVersion.yaml index a3f0691ae0c1..ced84c3c1fda 100644 --- a/mmv1/products/secretmanager/SecretVersion.yaml +++ b/mmv1/products/secretmanager/SecretVersion.yaml @@ -50,6 +50,7 @@ custom_code: pre_delete: 'templates/terraform/pre_delete/secret_version_deletion_policy.go.tmpl' custom_import: 'templates/terraform/custom_import/secret_version.go.tmpl' raw_resource_config_validation: 'templates/terraform/validation/secret_version.go.tmpl' + constants: 'templates/terraform/constants/secret_version.go.tmpl' # Sweeper skipped as this resource has customized deletion. exclude_sweeper: true examples: diff --git a/mmv1/products/secretmanagerregional/RegionalSecretVersion.yaml b/mmv1/products/secretmanagerregional/RegionalSecretVersion.yaml index 6521262d6dc3..813119eacbf5 100644 --- a/mmv1/products/secretmanagerregional/RegionalSecretVersion.yaml +++ b/mmv1/products/secretmanagerregional/RegionalSecretVersion.yaml @@ -49,6 +49,7 @@ custom_code: custom_update: 'templates/terraform/custom_update/regional_secret_version.go.tmpl' pre_delete: 'templates/terraform/pre_delete/regional_secret_version_deletion_policy.go.tmpl' custom_import: 'templates/terraform/custom_import/regional_secret_version.go.tmpl' + constants: 'templates/terraform/constants/regional_secret_version.go.tmpl' # Sweeper skipped as this resource has customized deletion. exclude_sweeper: true examples: diff --git a/mmv1/templates/terraform/constants/regional_secret_version.go.tmpl b/mmv1/templates/terraform/constants/regional_secret_version.go.tmpl new file mode 100644 index 000000000000..9bbc39f51dc9 --- /dev/null +++ b/mmv1/templates/terraform/constants/regional_secret_version.go.tmpl @@ -0,0 +1,35 @@ +{{- if ne $.Compiler "terraformgoogleconversion-codegen" }} +func setEnabled(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) error { + name := d.Get("name").(string) + if name == "" { + return nil + } + + url, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}SecretManagerRegionalBasePath{{"}}"}}{{"{{"}}name{{"}}"}}") + if err != nil { + return err + } + if v == true { + url = fmt.Sprintf("%s:enable", url) + } else { + url = fmt.Sprintf("%s:disable", url) + } + + parts := strings.Split(name, "/") + project := parts[1] + + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "POST", + Project: project, + RawURL: url, + UserAgent: userAgent, + }) + return err +} +{{- end }} diff --git a/mmv1/templates/terraform/constants/secret_version.go.tmpl b/mmv1/templates/terraform/constants/secret_version.go.tmpl new file mode 100644 index 000000000000..a82f573aac02 --- /dev/null +++ b/mmv1/templates/terraform/constants/secret_version.go.tmpl @@ -0,0 +1,35 @@ +{{- if ne $.Compiler "terraformgoogleconversion-codegen" }} +func setEnabled(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) error { + name := d.Get("name").(string) + if name == "" { + return nil + } + + url, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}SecretManagerBasePath{{"}}"}}{{"{{"}}name{{"}}"}}") + if err != nil { + return err + } + if v == true { + url = fmt.Sprintf("%s:enable", url) + } else { + url = fmt.Sprintf("%s:disable", url) + } + + parts := strings.Split(name, "/") + project := parts[1] + + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "POST", + Project: project, + RawURL: url, + UserAgent: userAgent, + }) + return err +} +{{- end }} diff --git a/mmv1/templates/terraform/custom_expand/regional_secret_version_enable.go.tmpl b/mmv1/templates/terraform/custom_expand/regional_secret_version_enable.go.tmpl index a3a2c9effc47..8c4712454ff5 100644 --- a/mmv1/templates/terraform/custom_expand/regional_secret_version_enable.go.tmpl +++ b/mmv1/templates/terraform/custom_expand/regional_secret_version_enable.go.tmpl @@ -10,41 +10,6 @@ See the License for the specific language governing permissions and limitations under the License. */ -}} -func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - name := d.Get("name").(string) - if name == "" { - return "", nil - } - - url, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}SecretManagerRegionalBasePath{{"}}"}}{{"{{"}}name{{"}}"}}") - if err != nil { - return nil, err - } - - if v == true { - url = fmt.Sprintf("%s:enable", url) - } else { - url = fmt.Sprintf("%s:disable", url) - } - - parts := strings.Split(name, "/") - project := parts[1] - - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return nil, err - } - - _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "POST", - Project: project, - RawURL: url, - UserAgent: userAgent, - }) - if err != nil { - return nil, err - } - +func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(_ interface{}, _ tpgresource.TerraformResourceData, _ *transport_tpg.Config) (interface{}, error) { return nil, nil } diff --git a/mmv1/templates/terraform/custom_expand/secret_version_enable.go.tmpl b/mmv1/templates/terraform/custom_expand/secret_version_enable.go.tmpl index f1bd48c68a16..8c4712454ff5 100644 --- a/mmv1/templates/terraform/custom_expand/secret_version_enable.go.tmpl +++ b/mmv1/templates/terraform/custom_expand/secret_version_enable.go.tmpl @@ -10,41 +10,6 @@ See the License for the specific language governing permissions and limitations under the License. */ -}} -func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - name := d.Get("name").(string) - if name == "" { - return "", nil - } - - url, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}SecretManagerBasePath{{"}}"}}{{"{{"}}name{{"}}"}}") - if err != nil { - return nil, err - } - - if v == true { - url = fmt.Sprintf("%s:enable", url) - } else { - url = fmt.Sprintf("%s:disable", url) - } - - parts := strings.Split(name, "/") - project := parts[1] - - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return nil, err - } - - _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "POST", - Project: project, - RawURL: url, - UserAgent: userAgent, - }) - if err != nil { - return nil, err - } - +func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(_ interface{}, _ tpgresource.TerraformResourceData, _ *transport_tpg.Config) (interface{}, error) { return nil, nil -} \ No newline at end of file +} diff --git a/mmv1/templates/terraform/custom_update/regional_secret_version.go.tmpl b/mmv1/templates/terraform/custom_update/regional_secret_version.go.tmpl index 7cb6e95cdcff..c26d065d4b11 100644 --- a/mmv1/templates/terraform/custom_update/regional_secret_version.go.tmpl +++ b/mmv1/templates/terraform/custom_update/regional_secret_version.go.tmpl @@ -10,7 +10,7 @@ See the License for the specific language governing permissions and limitations under the License. */ -}} -_, err := expandSecretManagerRegionalRegionalSecretVersionEnabled(d.Get("enabled"), d, config) +err := setEnabled(d.Get("enabled"), d, config) if err != nil { return err } diff --git a/mmv1/templates/terraform/custom_update/secret_version.go.tmpl b/mmv1/templates/terraform/custom_update/secret_version.go.tmpl index 4f5dce983088..0f0a05981061 100644 --- a/mmv1/templates/terraform/custom_update/secret_version.go.tmpl +++ b/mmv1/templates/terraform/custom_update/secret_version.go.tmpl @@ -10,9 +10,9 @@ See the License for the specific language governing permissions and limitations under the License. */ -}} -_, err := expandSecretManagerSecretVersionEnabled(d.Get("enabled"), d, config) +err := setEnabled(d.Get("enabled"), d, config) if err != nil { return err } -return resourceSecretManagerSecretVersionRead(d, meta) \ No newline at end of file +return resourceSecretManagerSecretVersionRead(d, meta) diff --git a/mmv1/templates/terraform/post_create/regional_secret_version.go.tmpl b/mmv1/templates/terraform/post_create/regional_secret_version.go.tmpl index 8c97a8ae614a..38a1a3c8279a 100644 --- a/mmv1/templates/terraform/post_create/regional_secret_version.go.tmpl +++ b/mmv1/templates/terraform/post_create/regional_secret_version.go.tmpl @@ -20,7 +20,7 @@ if err := d.Set("name", name.(string)); err != nil { } d.SetId(name.(string)) -_, err = expandSecretManagerRegionalRegionalSecretVersionEnabled(d.Get("enabled"), d, config) +err = setEnabled(d.Get("enabled"), d, config) if err != nil { return err } diff --git a/mmv1/templates/terraform/post_create/secret_version.go.tmpl b/mmv1/templates/terraform/post_create/secret_version.go.tmpl index a718b67bee3a..2ab9396b0895 100644 --- a/mmv1/templates/terraform/post_create/secret_version.go.tmpl +++ b/mmv1/templates/terraform/post_create/secret_version.go.tmpl @@ -8,7 +8,7 @@ if err := d.Set("name", name.(string)); err != nil { } d.SetId(name.(string)) -_, err = expandSecretManagerSecretVersionEnabled(d.Get("enabled"), d, config) +err = setEnabled(d.Get("enabled"), d, config) if err != nil { return err -} \ No newline at end of file +} From 3b0be549b3c5c8f82275006c60010e534df781d4 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Thu, 29 May 2025 11:33:40 -0700 Subject: [PATCH 263/884] Documented validation best practices (#14124) Co-authored-by: Riley Karson --- docs/content/best-practices/validation.md | 34 +++++++++++++++++++++++ docs/content/reference/field.md | 13 ++++++--- 2 files changed, 43 insertions(+), 4 deletions(-) create mode 100644 docs/content/best-practices/validation.md diff --git a/docs/content/best-practices/validation.md b/docs/content/best-practices/validation.md new file mode 100644 index 000000000000..6005577098fe --- /dev/null +++ b/docs/content/best-practices/validation.md @@ -0,0 +1,34 @@ +--- +title: "Validation" +weight: 50 +--- + +# Validation + +There are a number of ways to add client-side validation to resources. The benefit of client-side validation is that errors can be surfaced at plan time, instead of partway through a (potentially very long) apply process, allowing for faster iteration. However, the tradeoff is that client-side validation can get out of sync with server-side validation, creating additional maintenance burden for the provider and preventing users from accessing the latest features without upgrading. + +Client-side validation is generally discouraged due to the low positive impact of an individual validation rule and outsized negative impact when client-side validation and API capabilities drift, requiring both provider changes and users to update. Client-side validation may be added in cases where it is extremely unlikely to change, covered below. + +The following sections cover best practices for specific types of client-side validation. + +## URL segments + +If a resource URL looks like: + +``` +projects/{project}/folders/{folder}/resource/{resource_id} +``` + +Adding validation for the last part of the path (`resource_id`) may be safe if there are specific restrictions that aren't going to change, such as following an external RFC or other spec/standard. However, if the API was ever less restrictive (or becomes less restrictive later), resources created with other tools and then imported into Terraform may be impossible to actually manage with Terraform (without deleting & recreating them) because the ID which was valid in the API violates the more restrictive validation in the provider. + +## Enum + +Enums are generally okay if they are exhaustive of all possible values for a clearly defined domain where new values are extremely unlikely. Otherwise, it is better to use a string field and add a link to the API documentation as a reference for the possible values. + +## Inter-field relationships + +[`conflicts`]({{< ref "/reference/field#conflicts" >}}), [`required_with`]({{< ref "/reference/field#required_with" >}}), [`exactly_one_of`]({{< ref "/reference/field#exactly_one_of" >}}), and [`at_least_one_of`]({{< ref "/reference/field#at_least_one_of" >}}) are often safe to add. However, if there is a chance that the API validation will relax in the future (such as two fields no longer being required together, or two fields no longer conflicting) it's better to not add the restriction in the first place. + +## Immutable facts + +It is safe to validate things that will definitely always be true about an API. For example, a `node_count` field will most likely always need to be non-negative. That is safe to validate. However, validating a max value for `node_count` may not be safe, because the API might increase the allowed values in the future. diff --git a/docs/content/reference/field.md b/docs/content/reference/field.md index 2cbf9b832b37..a35abe372240 100644 --- a/docs/content/reference/field.md +++ b/docs/content/reference/field.md @@ -285,6 +285,8 @@ Example: ``` ### `validation` +In many cases, it is better to avoid client-side validation. See [Best practices: Validation]({{< ref "/best-practices/validation" >}}) for more information. + Controls the value set for the field's [`ValidateFunc`](https://developer.hashicorp.com/terraform/plugin/sdkv2/schemas/schema-behaviors#validatefunc). For Enum fields, this will override the default validation (that the provided value is one of the enum [`values`](#values)). @@ -375,10 +377,11 @@ url_param_only: true ## `Enum` properties ### `enum_values` -Enum only. If the allowed values change frequently, use a String field instead -to allow better forwards-compatibility, and link to API documentation -stating the current allowed values in the String field's description. Do not -include UNSPECIFIED values in this list. +Enum only. If the allowed values may change in the future, use a String field instead and link to API documentation +stating the current allowed values in the String field's description. +See [Best practices: Validation]({{< ref "/best-practices/validation" >}}) for more information. + +Do not include UNSPECIFIED values in this list. Enums will validate that the provided field is in the allowed list unless a custom [`validation`]({{}}) is provided. @@ -432,6 +435,8 @@ item_type: Array only. Controls the [`ValidateFunc`](https://developer.hashicorp.com/terraform/plugin/sdkv2/schemas/schema-behaviors#validatefunc) used to validate individual items in the array. Behaves like [`validation`]({{}}). +In many cases, it is better to avoid client-side validation. See [Best practices: Validation]({{< ref "/best-practices/validation" >}}) for more information. + For arrays of enums, this will override the default validation (that the provided value is one of the enum [`values`](#values)). If you need additional validation on top of an enum, ensure that the supplied validation func also verifies the enum values are correct. From 0430f3a3cc091b80dfb45b5c4ebc1138fbff9c90 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Thu, 29 May 2025 11:44:56 -0700 Subject: [PATCH 264/884] Allow multiple cai asset names for one resource in tgc metadata model (#14090) --- mmv1/third_party/terraform/acctest/tgc_utils.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/mmv1/third_party/terraform/acctest/tgc_utils.go b/mmv1/third_party/terraform/acctest/tgc_utils.go index 9a3a10e0a27b..46fdfa762923 100644 --- a/mmv1/third_party/terraform/acctest/tgc_utils.go +++ b/mmv1/third_party/terraform/acctest/tgc_utils.go @@ -14,7 +14,7 @@ import ( ) type ResourceMetadata struct { - CaiAssetName string `json:"cai_asset_name"` + CaiAssetNames []string `json:"cai_asset_names"` ResourceType string `json:"resource_type"` ResourceAddress string `json:"resource_address"` ImportMetadata ImportMetadata `json:"import_metadata,omitempty"` @@ -70,7 +70,7 @@ func CollectAllTgcMetadata(tgcPayload TgcMetadataPayload) resource.TestCheckFunc apiServiceName := GetAPIServiceNameForResource(metadata.ResourceType) if apiServiceName == "unknown" || apiServiceName == "failed_to_populate_metadata_cache" { log.Printf("[DEBUG]TGC Terraform error: unknown resource type %s", metadata.ResourceType) - metadata.CaiAssetName = apiServiceName + metadata.CaiAssetNames = []string{apiServiceName} } else { var rName string switch metadata.ResourceType { @@ -79,7 +79,7 @@ func CollectAllTgcMetadata(tgcPayload TgcMetadataPayload) resource.TestCheckFunc default: rName = rState.Primary.ID } - metadata.CaiAssetName = fmt.Sprintf("//%s/%s", apiServiceName, rName) + metadata.CaiAssetNames = []string{fmt.Sprintf("//%s/%s", apiServiceName, rName)} } // Resolve auto IDs in import metadata @@ -222,7 +222,7 @@ func extendWithTGCData(t *testing.T, c resource.TestCase) resource.TestCase { ResourceAddress: res, ImportMetadata: importMeta, Service: GetServicePackageForResourceType(resourceType), - // CaiAssetName will be populated at runtime in the check function + // CaiAssetNames will be populated at runtime in the check function } } } From c09c91d5cc638a7f2f8ee74252ed237cc6846eb5 Mon Sep 17 00:00:00 2001 From: Madhu Suraj Date: Thu, 29 May 2025 12:34:53 -0700 Subject: [PATCH 265/884] Deprecate PSS feature. (#14134) Co-authored-by: Zhenhua Li --- mmv1/products/colab/RuntimeTemplate.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/mmv1/products/colab/RuntimeTemplate.yaml b/mmv1/products/colab/RuntimeTemplate.yaml index 31b575a60e59..92d504453b50 100644 --- a/mmv1/products/colab/RuntimeTemplate.yaml +++ b/mmv1/products/colab/RuntimeTemplate.yaml @@ -205,6 +205,7 @@ properties: The $(VAR_NAME) syntax can be escaped with a double $$, ie: $$(VAR_NAME). Escaped references will never be expanded, regardless of whether the variable exists or not.' - name: 'postStartupScriptConfig' + deprecation_message: '`post_startup_script_config` is deprecated and will be removed in a future major release. New resource creation with this field is unavailable at this time.' type: NestedObject description: 'Post startup script config.' properties: From 76449584a413f65f2ab12cba787e07939dc4ce3e Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Thu, 29 May 2025 21:38:07 +0200 Subject: [PATCH 266/884] fix: string based ordering for `google_compute_region_security_policy` causes recreate after apply (#14093) --- .../compute/RegionSecurityPolicy.yaml | 2 + .../constants/region_security_policy.go.tmpl | 31 +++++ ...ompute_region_security_policy_test.go.tmpl | 115 ++++++++++++++++++ 3 files changed, 148 insertions(+) create mode 100644 mmv1/templates/terraform/constants/region_security_policy.go.tmpl diff --git a/mmv1/products/compute/RegionSecurityPolicy.yaml b/mmv1/products/compute/RegionSecurityPolicy.yaml index 8cb057322d99..26ea54d916d6 100644 --- a/mmv1/products/compute/RegionSecurityPolicy.yaml +++ b/mmv1/products/compute/RegionSecurityPolicy.yaml @@ -39,6 +39,7 @@ async: result: resource_inside_response: false custom_code: + constants: 'templates/terraform/constants/region_security_policy.go.tmpl' sweeper: url_substitutions: - region: "us-south1" @@ -188,6 +189,7 @@ properties: description: | The set of rules that belong to this policy. There must always be a default rule (rule with priority 2147483647 and match "*"). If no rules are provided when creating a security policy, a default rule with action "allow" will be added. default_from_api: true + diff_suppress_func: 'resourceComputeRegionSecurityPolicySpecRulesDiffSuppress' item_type: type: NestedObject properties: diff --git a/mmv1/templates/terraform/constants/region_security_policy.go.tmpl b/mmv1/templates/terraform/constants/region_security_policy.go.tmpl new file mode 100644 index 000000000000..8c988835677e --- /dev/null +++ b/mmv1/templates/terraform/constants/region_security_policy.go.tmpl @@ -0,0 +1,31 @@ +{{- if ne $.Compiler "terraformgoogleconversion-codegen" }} +func resourceComputeRegionSecurityPolicySpecRulesDiffSuppress(k, o, n string, d *schema.ResourceData) bool { + oldCount, newCount := d.GetChange("rules.#") + var count int + // There could be duplicates - worth continuing even if the counts are unequal. + if oldCount.(int) < newCount.(int) { + count = newCount.(int) + } else { + count = oldCount.(int) + } + + old := make([]interface{}, 0, count) + new := make([]interface{}, 0, count) + for i := 0; i < count; i++ { + o, n := d.GetChange(fmt.Sprintf("rules.%d", i)) + + if o != nil { + old = append(old, o) + } + if n != nil { + new = append(new, n) + } + } + + oldSet := schema.NewSet(schema.HashResource(ResourceComputeRegionSecurityPolicy().Schema["rules"].Elem.(*schema.Resource)), old) + newSet := schema.NewSet(schema.HashResource(ResourceComputeRegionSecurityPolicy().Schema["rules"].Elem.(*schema.Resource)), new) + + return oldSet.Equal(newSet) +} + +{{- end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_security_policy_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_security_policy_test.go.tmpl index 5c8e00f02234..790f174c5de4 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_security_policy_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_security_policy_test.go.tmpl @@ -722,6 +722,121 @@ func testAccComputeRegionSecurityPolicy_withMultipleEnforceOnKeyConfigs_update(c `, context) } +func TestAccComputeRegionSecurityPolicy_regionSecurityPolicyRuleOrderingWithMultipleRules(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeRegionSecurityPolicyDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeRegionSecurityPolicy_ruleOrderingWithMultipleRules_create(context), + }, + { + ResourceName: "google_compute_region_security_policy.policy", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccComputeRegionSecurityPolicy_ruleOrderingWithMultipleRules_update(context), + }, + { + ResourceName: "google_compute_region_security_policy.policy", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + + +func testAccComputeRegionSecurityPolicy_ruleOrderingWithMultipleRules_create(context map[string]interface{}) string { + return acctest.Nprintf(` + +resource "google_compute_region_security_policy" "policy" { + name = "tf-test-ordering%{random_suffix}" + description = "basic region security policy with multiple rules" + type = "CLOUD_ARMOR" + region = "us-central1" + + rules { + action = "deny" + priority = "3000" + match { + expr { + expression = "request.path.matches(\"/login.html\") && token.recaptcha_session.score < 0.2" + } + } + } + + rules { + action = "deny" + priority = "2147483647" + match { + versioned_expr = "SRC_IPS_V1" + config { + src_ip_ranges = ["*"] + } + } + description = "default rule" + } +} + + `, context) +} + + +func testAccComputeRegionSecurityPolicy_ruleOrderingWithMultipleRules_update(context map[string]interface{}) string { + return acctest.Nprintf(` + +resource "google_compute_region_security_policy" "policy" { + name = "tf-test-ordering%{random_suffix}" + description = "basic region security policy with multiple rules, updated" + type = "CLOUD_ARMOR" + region = "us-central1" + + rules { + action = "allow" + priority = "4000" + match { + expr { + expression = "request.path.matches(\"/login.html\") && token.recaptcha_session.score < 0.2" + } + } + } + + rules { + action = "allow" + priority = "5000" + match { + expr { + expression = "request.path.matches(\"/404.html\") && token.recaptcha_session.score > 0.4" + } + } + description = "new rule" + } + + rules { + action = "deny" + priority = "2147483647" + match { + versioned_expr = "SRC_IPS_V1" + config { + src_ip_ranges = ["*"] + } + } + description = "default rule" + } +} + `, context) +} + + {{- if ne $.TargetVersionName "ga" }} func TestAccComputeRegionSecurityPolicy_regionSecurityPolicyWithRulesNetworkMatch(t *testing.T) { t.Parallel() From ad1fb3d70174c78908176897de606dc77f559551 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Thu, 29 May 2025 12:40:31 -0700 Subject: [PATCH 267/884] Documented how to create new test projects (#14140) --- docs/content/test/test.md | 68 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 67 insertions(+), 1 deletion(-) diff --git a/docs/content/test/test.md b/docs/content/test/test.md index 4a51917c3a5d..6723ae39432b 100644 --- a/docs/content/test/test.md +++ b/docs/content/test/test.md @@ -277,7 +277,7 @@ An update test is an **acceptance test** that creates the target resource and th {{< /tab >}} {{< /tabs >}} -## Bootstrapping API resources {#bootstrapping} +## Bootstrap API resources {#bootstrapping} Most acceptance tests run in a the default org and default test project, which means that they can conflict for quota, resource namespaces, and control over shared resources. You can work around these limitations with "bootstrapped" resources. @@ -445,6 +445,72 @@ func TestAccProductResource_update(t *testing.T) { {{< /tab >}} {{< /tabs >}} +## Create test projects +If [bootstrapping]({{< ref "#bootstrapping" >}}) doesn't work or isn't an option for some reason, you can also work around project quota issues or test project-global resources by creating a new test project. You will also need to enable any necessary APIs and wait for their enablement to propagate. + +```go +import ( + "testing" + "github.com/hashicorp/terraform-provider-google-beta/google-beta/acctest" + "github.com/hashicorp/terraform-provider-google-beta/google-beta/envvar" +) +func TestAccProductResourceName_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + "org_id": envvar.GetTestOrgFromEnv(t), + } + acctest.VcrTest(t, resource.TestCase{ + // ... + Steps: []resource.TestStep{ + { + testAccProductResourceName_update1(context), + }, + // ... + }, + }) +} + +func testAccProductResourceName_update1(context map[string]interface{}) string { + return accest.Nprintf(` +// Set up a test project +resource "google_project" "project" { + project_id = "tf-test%{random_suffix}" + name = "tf-test%{random_suffix}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" + deletion_policy = "DELETE" +} + +// Enable APIs in a deterministic order to avoid inconsistent VCR recordings +resource "google_project_service" "servicenetworking" { + project = google_project.project.project_id + service = "servicenetworking.googleapis.com" +} + +resource "google_project_service" "compute" { + project = google_project.project.project_id + service = "compute.googleapis.com" + depends_on = [google_project_service.servicenetworking] +} + +// wait for API enablement +resource "time_sleep" "wait_120_seconds" { + create_duration = "120s" + depends_on = [google_project_service.compute] +} + +resource "google_product_resource" "example" { + // ... + depends_on = [time_sleep.wait_120_seconds] +} + +`, context) +} +``` + ## Skip tests in VCR replaying mode {#skip-vcr} Acceptance tests are run in VCR replaying mode on PRs (using pre-recorded HTTP requests and responses) to reduce the time it takes to present results to contributors. However, not all resources or tests are possible to run in replaying mode. Incompatible tests should be skipped during VCR replaying mode. They will still run in our nightly test suite. From 40254546f9ed036c54c6b4fe299b87ee62c7c6a1 Mon Sep 17 00:00:00 2001 From: Matheus Guilherme Souza Aleixo <82680416+matheusaleixo-cit@users.noreply.github.com> Date: Thu, 29 May 2025 17:10:14 -0300 Subject: [PATCH 268/884] Added support for NAT64 when configuring Router NAT (#13522) --- mmv1/products/compute/RouterNat.yaml | 30 ++++ mmv1/products/dns/Policy.yaml | 14 ++ .../resource_compute_router_nat_test.go | 165 ++++++++++++++++++ .../services/dns/resource_dns_policy_test.go | 101 ++++++++++- 4 files changed, 309 insertions(+), 1 deletion(-) diff --git a/mmv1/products/compute/RouterNat.yaml b/mmv1/products/compute/RouterNat.yaml index 6c9a904460c8..48920f446c6f 100644 --- a/mmv1/products/compute/RouterNat.yaml +++ b/mmv1/products/compute/RouterNat.yaml @@ -257,6 +257,36 @@ properties: is_set: true item_type: type: String + - name: 'sourceSubnetworkIpRangesToNat64' + type: Enum + description: | + Specify the Nat option for NAT64, which can take one of the following values: + ALL_IPV6_SUBNETWORKS: All of the IP ranges in every Subnetwork are allowed to Nat. + LIST_OF_IPV6_SUBNETWORKS: A list of Subnetworks are allowed to Nat (specified in the field nat64Subnetwork below). + Note that if this field contains NAT64_ALL_V6_SUBNETWORKS no other Router.Nat section in this region can also enable NAT64 for any Subnetworks in this network. + Other Router.Nat sections can still be present to enable NAT44 only. + enum_values: + - 'ALL_IPV6_SUBNETWORKS' + - 'LIST_OF_IPV6_SUBNETWORKS' + - name: 'nat64Subnetwork' + type: Array + description: | + One or more subnetwork NAT configurations whose traffic should be translated by NAT64 Gateway. + Only used if `source_subnetwork_ip_ranges_to_nat64` is set to `LIST_OF_IPV6_SUBNETWORKS` + api_name: nat64Subnetworks + is_set: true + send_empty_value: true + set_hash_func: computeRouterNatSubnetworkHash + item_type: + type: NestedObject + properties: + - name: 'name' + type: ResourceRef + description: 'Self-link of the subnetwork resource that will use NAT64' + required: true + custom_expand: 'templates/terraform/custom_expand/resourceref_with_validation.go.tmpl' + resource: 'Subnetwork' + imports: 'selfLink' - name: 'minPortsPerVm' type: Integer description: | diff --git a/mmv1/products/dns/Policy.yaml b/mmv1/products/dns/Policy.yaml index 545fcf938f12..7f982a4bc9b9 100644 --- a/mmv1/products/dns/Policy.yaml +++ b/mmv1/products/dns/Policy.yaml @@ -103,6 +103,20 @@ properties: update_url: 'projects/{{project}}/policies/{{name}}' update_verb: 'PATCH' default_value: "Managed by Terraform" + - name: 'dns64Config' + type: NestedObject + default_from_api: true + description: Configurations related to DNS64 for this Policy. + properties: + - name: 'scope' + type: NestedObject + description: The scope to which DNS64 config will be applied to. + required: true + properties: + - name: 'allQueries' + type: Boolean + description: Controls whether DNS64 is enabled globally at the network level. + send_empty_value: true - name: 'enableInboundForwarding' type: Boolean description: | diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_router_nat_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_router_nat_test.go index 160d65f0b505..5dd18d65d99a 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_router_nat_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_router_nat_test.go @@ -797,6 +797,38 @@ func testAccCheckComputeRouterNatDelete(t *testing.T, n string) resource.TestChe } } +func TestAccComputeRouterNat_withNat64Configuration(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeRouterNatDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeRouterNatWithNat64Configuration(context), + }, + { + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccComputeRouterNatWithNat64ConfigurationUpdate(context), + }, + { + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + func testAccComputeRouterNatBasic(routerName string) string { return fmt.Sprintf(` resource "google_compute_network" "foobar" { @@ -2047,3 +2079,136 @@ resource "google_compute_router_nat" "foobar" { } `, testAccComputeRouterNatBaseResourcesWithPrivateNatSubnetworks(routerName, hubName), routerName) } + +func testAccComputeRouterNatWithNat64Configuration(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_dns_policy" "foobar" { + name = "tf-test-example-policy%{random_suffix}" + enable_inbound_forwarding = false + enable_logging = false + + dns64_config { + scope { + all_queries = true + } + } + networks { + network_url = google_compute_network.foobar.id + } +} + +resource "google_compute_network" "foobar" { + name = "tf-test-network%{random_suffix}" + enable_ula_internal_ipv6 = true + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "foobar" { + name = "tf-test-subnetwork-%{random_suffix}" + network = google_compute_network.foobar.self_link + ip_cidr_range = "10.0.0.0/16" + region = "us-central1" +} + +resource "google_compute_subnetwork" "foobar2" { + name = "tf-test-subnetwork-2-%{random_suffix}" + network = google_compute_network.foobar.self_link + ip_cidr_range = "10.182.0.0/20" + ipv6_access_type = "EXTERNAL" + stack_type = "IPV4_IPV6" + region = "us-central1" +} + +resource "google_compute_router" "foobar" { + name = "tf-test-router%{random_suffix}" + region = google_compute_subnetwork.foobar.region + network = google_compute_network.foobar.self_link + bgp { + asn = 64514 + } +} + +resource "google_compute_router_nat" "foobar" { + name = "tf-test-router-nat%{random_suffix}" + router = google_compute_router.foobar.name + region = google_compute_router.foobar.region + nat_ip_allocate_option = "AUTO_ONLY" + + source_subnetwork_ip_ranges_to_nat = "LIST_OF_SUBNETWORKS" + subnetwork { + name = google_compute_subnetwork.foobar.name + source_ip_ranges_to_nat = ["ALL_IP_RANGES"] + } + + source_subnetwork_ip_ranges_to_nat64 = "ALL_IPV6_SUBNETWORKS" +} +`, context) +} + +func testAccComputeRouterNatWithNat64ConfigurationUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_dns_policy" "foobar" { + name = "tf-test-example-policy%{random_suffix}" + enable_inbound_forwarding = false + enable_logging = false + + dns64_config { + scope { + all_queries = true + } + } + networks { + network_url = google_compute_network.foobar.id + } +} + +resource "google_compute_network" "foobar" { + name = "tf-test-network%{random_suffix}" + enable_ula_internal_ipv6 = true + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "foobar" { + name = "tf-test-subnetwork-%{random_suffix}" + network = google_compute_network.foobar.self_link + ip_cidr_range = "10.0.0.0/16" + region = "us-central1" +} + +resource "google_compute_subnetwork" "foobar2" { + name = "tf-test-subnetwork-2-%{random_suffix}" + network = google_compute_network.foobar.self_link + ip_cidr_range = "10.182.0.0/20" + ipv6_access_type = "EXTERNAL" + stack_type = "IPV4_IPV6" + region = "us-central1" +} + +resource "google_compute_router" "foobar" { + name = "tf-test-router%{random_suffix}" + region = google_compute_subnetwork.foobar.region + network = google_compute_network.foobar.self_link + bgp { + asn = 64514 + } +} + +resource "google_compute_router_nat" "foobar" { + name = "tf-test-router-nat%{random_suffix}" + router = google_compute_router.foobar.name + region = google_compute_router.foobar.region + nat_ip_allocate_option = "AUTO_ONLY" + + source_subnetwork_ip_ranges_to_nat = "LIST_OF_SUBNETWORKS" + subnetwork { + name = google_compute_subnetwork.foobar.name + source_ip_ranges_to_nat = ["ALL_IP_RANGES"] + } + + source_subnetwork_ip_ranges_to_nat64 = "LIST_OF_IPV6_SUBNETWORKS" + nat64_subnetwork { + name = google_compute_subnetwork.foobar2.name + } +} +`, context) +} diff --git a/mmv1/third_party/terraform/services/dns/resource_dns_policy_test.go b/mmv1/third_party/terraform/services/dns/resource_dns_policy_test.go index 5e4ea9ad6249..aa2811070392 100644 --- a/mmv1/third_party/terraform/services/dns/resource_dns_policy_test.go +++ b/mmv1/third_party/terraform/services/dns/resource_dns_policy_test.go @@ -2,9 +2,10 @@ package dns_test import ( "fmt" - "github.com/hashicorp/terraform-provider-google/google/acctest" "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) @@ -70,3 +71,101 @@ resource "google_compute_network" "network-2" { } `, suffix, forwarding, first_nameserver, second_nameserver, network, suffix, suffix) } + +func TestAccDNSPolicy_dnsPolicyDns64(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDNSPolicyDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDNSPolicy_dnsPolicyDns64(context), + }, + { + ResourceName: "google_dns_policy.example-policy", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDNSPolicy_dnsPolicyDns64Update(context), + }, + { + ResourceName: "google_dns_policy.example-policy", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccDNSPolicy_dnsPolicyDns64(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_dns_policy" "example-policy" { + name = "tf-test-example-policy%{random_suffix}" + enable_inbound_forwarding = false + + enable_logging = true + + dns64_config { + scope { + all_queries = true + } + } + + networks { + network_url = google_compute_network.network-1.id + } + networks { + network_url = google_compute_network.network-2.id + } +} + +resource "google_compute_network" "network-1" { + name = "tf-test-network-1%{random_suffix}" + auto_create_subnetworks = false +} + +resource "google_compute_network" "network-2" { + name = "tf-test-network-2%{random_suffix}" + auto_create_subnetworks = false +} +`, context) +} + +func testAccDNSPolicy_dnsPolicyDns64Update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_dns_policy" "example-policy" { + name = "tf-test-example-policy%{random_suffix}" + enable_inbound_forwarding = false + + enable_logging = true + + dns64_config { + scope {} + } + + networks { + network_url = google_compute_network.network-1.id + } + networks { + network_url = google_compute_network.network-2.id + } +} + +resource "google_compute_network" "network-1" { + name = "tf-test-network-1%{random_suffix}" + auto_create_subnetworks = false +} + +resource "google_compute_network" "network-2" { + name = "tf-test-network-2%{random_suffix}" + auto_create_subnetworks = false +} +`, context) +} From 2473e2dd12cfbf0bcfcb4a7790c7e1e45c3fe1d1 Mon Sep 17 00:00:00 2001 From: Jun Luo Date: Thu, 29 May 2025 13:56:53 -0700 Subject: [PATCH 269/884] Update description for import source arguments (#14114) Co-authored-by: Riley Karson --- mmv1/products/memorystore/Instance.yaml | 4 ++-- mmv1/products/redis/Cluster.yaml | 3 +-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/mmv1/products/memorystore/Instance.yaml b/mmv1/products/memorystore/Instance.yaml index 42c802d91f16..2661ef91fd4c 100644 --- a/mmv1/products/memorystore/Instance.yaml +++ b/mmv1/products/memorystore/Instance.yaml @@ -777,7 +777,7 @@ properties: is_set: true description: | URIs of the GCS objects to import. - Example: gs://bucket1/object1, gs//bucket2/folder2/object2 + Example: gs://bucket1/object1, gs://bucket2/folder2/object2 immutable: true required: true item_type: @@ -793,7 +793,7 @@ properties: - name: 'backup' type: String description: | - Example: //memorystore.googleapis.com/projects/{project}/locations/{location}/backups/{backupId}. In this case, it assumes the backup is under memorystore.googleapis.com. + Example: `projects/{project}/locations/{location}/backupCollections/{collection}/backups/{backup}`. immutable: true required: true - name: 'backupCollection' diff --git a/mmv1/products/redis/Cluster.yaml b/mmv1/products/redis/Cluster.yaml index 4f4b858093e9..2d0a32b65905 100644 --- a/mmv1/products/redis/Cluster.yaml +++ b/mmv1/products/redis/Cluster.yaml @@ -283,8 +283,7 @@ properties: - name: 'backup' type: String description: | - Example: //redis.googleapis.com/projects/{project}/locations/{location}/backupCollections/{collection}/backups/{backup} A shorter version (without the prefix) of the backup name is also supported, - like projects/{project}/locations/{location}/backupCollections/{collection}/backups/{backupId}. In this case, it assumes the backup is under redis.googleapis.com. + Example: `projects/{project}/locations/{location}/backupCollections/{collection}/backups/{backup}`. required: true immutable: true - name: 'backupCollection' From a6832c5aeb6bc24bd24223d88aaa41e817c94180 Mon Sep 17 00:00:00 2001 From: Cameron Thornton Date: Thu, 29 May 2025 17:04:28 -0500 Subject: [PATCH 270/884] Move instance settings fingerprint custom code to pre_* (#14143) --- mmv1/products/compute/InstanceSettings.yaml | 3 +- ...compute_instance_settings_fingerprint.tmpl | 30 ------------------- ...pute_instance_settings_fingerprint.go.tmpl | 21 +++++++++++++ 3 files changed, 23 insertions(+), 31 deletions(-) delete mode 100644 mmv1/templates/terraform/custom_expand/compute_instance_settings_fingerprint.tmpl create mode 100644 mmv1/templates/terraform/pre_create/compute_instance_settings_fingerprint.go.tmpl diff --git a/mmv1/products/compute/InstanceSettings.yaml b/mmv1/products/compute/InstanceSettings.yaml index 1ce2c6746646..1818f9aca839 100644 --- a/mmv1/products/compute/InstanceSettings.yaml +++ b/mmv1/products/compute/InstanceSettings.yaml @@ -41,6 +41,8 @@ async: result: resource_inside_response: false custom_code: + pre_create: 'templates/terraform/pre_create/compute_instance_settings_fingerprint.go.tmpl' + pre_update: 'templates/terraform/pre_create/compute_instance_settings_fingerprint.go.tmpl' custom_delete: 'templates/terraform/custom_delete/clear_instance_settings.go.tmpl' test_check_destroy: 'templates/terraform/custom_check_destroy/skip_delete_during_test.go.tmpl' examples: @@ -61,7 +63,6 @@ properties: The fingerprint used for optimistic locking of this resource. Used internally during updates. output: true - custom_expand: 'templates/terraform/custom_expand/compute_instance_settings_fingerprint.tmpl' - name: 'metadata' type: NestedObject description: | diff --git a/mmv1/templates/terraform/custom_expand/compute_instance_settings_fingerprint.tmpl b/mmv1/templates/terraform/custom_expand/compute_instance_settings_fingerprint.tmpl deleted file mode 100644 index f98f54248e63..000000000000 --- a/mmv1/templates/terraform/custom_expand/compute_instance_settings_fingerprint.tmpl +++ /dev/null @@ -1,30 +0,0 @@ -func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - project, err := tpgresource.GetProject(d, config) - if err != nil { - return nil, err - } - - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return nil, err - } - - url, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}ComputeBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/zones/{{"{{"}}zone{{"}}"}}/instanceSettings/{{"{{"}}name{{"}}"}}") - if err != nil { - return nil, err - } - - res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - Project: project, - RawURL: url, - UserAgent: userAgent, - }) - - if err != nil { - return nil, err - } - - return res["fingerprint"], nil -} \ No newline at end of file diff --git a/mmv1/templates/terraform/pre_create/compute_instance_settings_fingerprint.go.tmpl b/mmv1/templates/terraform/pre_create/compute_instance_settings_fingerprint.go.tmpl new file mode 100644 index 000000000000..f260b3cb2573 --- /dev/null +++ b/mmv1/templates/terraform/pre_create/compute_instance_settings_fingerprint.go.tmpl @@ -0,0 +1,21 @@ + +fingerprintUrl, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}ComputeBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/zones/{{"{{"}}zone{{"}}"}}/instanceSettings/{{"{{"}}name{{"}}"}}") +if err != nil { + return err +} + +fingerPrintRes, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: project, + RawURL: fingerprintUrl, + UserAgent: userAgent, +}) +if err != nil { + return err +} + +fingerprintProp = fingerPrintRes["fingerprint"] +if v, ok := d.GetOkExists("fingerprint"); !tpgresource.IsEmptyValue(reflect.ValueOf(fingerprintProp)) && (ok || !reflect.DeepEqual(v, fingerprintProp)) { + obj["fingerprint"] = fingerprintProp +} \ No newline at end of file From d9275486eafd4abd976638e4fe4e98ddaa82910d Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Thu, 29 May 2025 15:25:22 -0700 Subject: [PATCH 271/884] Check for fields that are needed for the read URL explicitly (#14146) --- mmv1/api/resource.go | 24 ++++++++++++++++------- mmv1/api/resource_test.go | 20 ++++++++++++++++--- mmv1/templates/terraform/resource.go.tmpl | 12 ++++++------ 3 files changed, 40 insertions(+), 16 deletions(-) diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index 725ec1abb931..7df33e7aa829 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -1217,20 +1217,30 @@ func (r Resource) GetIdFormat() string { } // Returns true if the Type is in the ID format and false otherwise. -func (r Resource) InIdFormat(prop Type) bool { - fields := r.ExtractIdentifiers(r.GetIdFormat()) - return slices.Contains(fields, google.Underscore(prop.Name)) +func (r Resource) InPostCreateComputed(prop Type) bool { + fields := map[string]struct{}{} + for _, f := range r.ExtractIdentifiers(r.GetIdFormat()) { + fields[f] = struct{}{} + } + for _, f := range r.ExtractIdentifiers(r.SelfLinkUri()) { + fields[f] = struct{}{} + } + _, ok := fields[google.Underscore(prop.Name)] + return ok } // Returns true if at least one of the fields in the ID format is computed -func (r Resource) HasComputedIdFormatFields() bool { - idFormatFields := map[string]struct{}{} +func (r Resource) HasPostCreateComputedFields() bool { + fields := map[string]struct{}{} for _, f := range r.ExtractIdentifiers(r.GetIdFormat()) { - idFormatFields[f] = struct{}{} + fields[f] = struct{}{} + } + for _, f := range r.ExtractIdentifiers(r.SelfLinkUri()) { + fields[f] = struct{}{} } for _, p := range r.GettableProperties() { // Skip fields not in the id format - if _, ok := idFormatFields[google.Underscore(p.Name)]; !ok { + if _, ok := fields[google.Underscore(p.Name)]; !ok { continue } if (p.Output || p.DefaultFromApi) && !p.IgnoreRead { diff --git a/mmv1/api/resource_test.go b/mmv1/api/resource_test.go index 6f88fc0ac35b..ad7dd327b288 100644 --- a/mmv1/api/resource_test.go +++ b/mmv1/api/resource_test.go @@ -358,7 +358,7 @@ func TestMagicianLocation(t *testing.T) { } } -func TestHasComputedIdFormatFields(t *testing.T) { +func TestHasPostCreateComputedFields(t *testing.T) { cases := []struct { name, description string resource Resource @@ -476,15 +476,29 @@ func TestHasComputedIdFormatFields(t *testing.T) { }, want: true, }, + { + name: "includes fields in self link that aren't in id format", + resource: Resource{ + IdFormat: "projects/{{project}}/resource/{{resource_id}}", + SelfLink: "{{name}}", + Properties: []*Type{ + { + Name: "name", + Output: true, + }, + }, + }, + want: true, + }, } for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { t.Parallel() - got := tc.resource.HasComputedIdFormatFields() + got := tc.resource.HasPostCreateComputedFields() if got != tc.want { - t.Errorf("HasComputedIdFormatFields(%q) returned unexpected value. got %t; want %t.", tc.name, got, tc.want) + t.Errorf("HasPostCreateComputedFields(%q) returned unexpected value. got %t; want %t.", tc.name, got, tc.want) } }) } diff --git a/mmv1/templates/terraform/resource.go.tmpl b/mmv1/templates/terraform/resource.go.tmpl index bc8420d1633d..f209f8c9cbfe 100644 --- a/mmv1/templates/terraform/resource.go.tmpl +++ b/mmv1/templates/terraform/resource.go.tmpl @@ -291,7 +291,7 @@ func resource{{ $.ResourceName -}}Create(d *schema.ResourceData, meta interface{ {{- /* Set computed resource properties required for building the ID from create API response (as long as Create doesn't use an async operation) */}} {{- /* This is necessary so that the ID is set correctly (and so that the following Read can succeed.) */}} {{- /* Technically this should possibly use the read URL explicitly, since id_format could differ - but that might need to be in addition to id_format anyway. */}} -{{- if and $.HasComputedIdFormatFields (or (or (not $.GetAsync) (not ($.GetAsync.Allow "Create"))) (and $.GetAsync (and ($.GetAsync.IsA "PollAsync") ($.GetAsync.Allow "Create"))))}} +{{- if and $.HasPostCreateComputedFields (or (or (not $.GetAsync) (not ($.GetAsync.Allow "Create"))) (and $.GetAsync (and ($.GetAsync.IsA "PollAsync") ($.GetAsync.Allow "Create"))))}} // Set computed resource properties from create API response so that they're available on the subsequent Read // call. err = resource{{ $.ResourceName }}PostCreateSetComputedFields(d, meta, res) @@ -309,7 +309,7 @@ func resource{{ $.ResourceName -}}Create(d *schema.ResourceData, meta interface{ {{if and $.GetAsync ($.GetAsync.Allow "Create") -}} {{ if ($.GetAsync.IsA "OpAsync") -}} -{{ if and $.GetAsync.Result.ResourceInsideResponse $.HasComputedIdFormatFields -}} +{{ if and $.GetAsync.Result.ResourceInsideResponse $.HasPostCreateComputedFields -}} // Use the resource in the operation response to populate // identity fields and d.Id() before read var opRes map[string]interface{} @@ -352,11 +352,11 @@ func resource{{ $.ResourceName -}}Create(d *schema.ResourceData, meta interface{ } {{- end}} {{- end}} - {{- if $.HasComputedIdFormatFields}} + {{- if $.HasPostCreateComputedFields}} {{- $renderedIdFromName := "false" }} {{- range $prop := $.GettableProperties }} {{- /* Check if prop is potentially computed */}} - {{- if and ($.InIdFormat $prop) (and (or $prop.Output $prop.DefaultFromApi) (not $prop.IgnoreRead)) }} + {{- if and ($.InPostCreateComputed $prop) (and (or $prop.Output $prop.DefaultFromApi) (not $prop.IgnoreRead)) }} {{- if and (eq $prop.CustomFlatten "templates/terraform/custom_flatten/id_from_name.tmpl") (eq $renderedIdFromName "false") }} // Setting `name` field so that `id_from_name` flattener will work properly. if err := d.Set("name", flatten{{ if $.NestedQuery -}}Nested{{end}}{{ $.ResourceName -}}Name(opRes["name"], d, config)); err != nil { @@ -1222,7 +1222,7 @@ func resource{{ $.ResourceName -}}PostCreateFailure(d *schema.ResourceData, meta {{ $.CustomTemplate $.StateMigrationFile false -}} {{- end }} -{{- if and $.HasComputedIdFormatFields (or (or (not $.GetAsync) (not ($.GetAsync.Allow "Create"))) (and $.GetAsync (and ($.GetAsync.IsA "PollAsync") ($.GetAsync.Allow "Create"))))}} +{{- if and $.HasPostCreateComputedFields (or (or (not $.GetAsync) (not ($.GetAsync.Allow "Create"))) (and $.GetAsync (and ($.GetAsync.IsA "PollAsync") ($.GetAsync.Allow "Create"))))}} func resource{{ $.ResourceName -}}PostCreateSetComputedFields(d *schema.ResourceData, meta interface{}, res map[string]interface{}) error { config := meta.(*transport_tpg.Config) {{- /* Don't render decoder for PollAsync resources - their decoders are expected to return `nil` until the resource completion completes, but we need to set their computed fields in order to call PollRead - so there can never be a dependency on the decoder. */}} @@ -1252,7 +1252,7 @@ func resource{{ $.ResourceName -}}PostCreateSetComputedFields(d *schema.Resource {{- $renderedIdFromName := "false" }} {{- range $prop := $.GettableProperties }} {{- /* Check if prop is potentially computed */}} - {{- if and ($.InIdFormat $prop) (and (or $prop.Output $prop.DefaultFromApi) (not $prop.IgnoreRead)) }} + {{- if and ($.InPostCreateComputed $prop) (and (or $prop.Output $prop.DefaultFromApi) (not $prop.IgnoreRead)) }} {{- if and (eq $prop.CustomFlatten "templates/terraform/custom_flatten/id_from_name.tmpl") (eq $renderedIdFromName "false") }} // Setting `name` field so that `id_from_name` flattener will work properly. if err := d.Set("name", flatten{{ if $.NestedQuery -}}Nested{{end}}{{ $.ResourceName -}}Name(res["name"], d, config)); err != nil { From d84065724079ab91f525225120c2512486482a2e Mon Sep 17 00:00:00 2001 From: Raj Anand <88097156+raazanand@users.noreply.github.com> Date: Fri, 30 May 2025 05:24:01 +0530 Subject: [PATCH 272/884] Immutable backups (#14080) Co-authored-by: Stephen Lewis (Burrows) --- mmv1/products/netapp/BackupVault.yaml | 26 +++ .../netapp/resource_netapp_backup_test.go | 170 ++++++++++++++++++ 2 files changed, 196 insertions(+) diff --git a/mmv1/products/netapp/BackupVault.yaml b/mmv1/products/netapp/BackupVault.yaml index 0bfb511d6aa8..a40e43d2a02a 100644 --- a/mmv1/products/netapp/BackupVault.yaml +++ b/mmv1/products/netapp/BackupVault.yaml @@ -116,3 +116,29 @@ properties: description: | Name of the Backup vault created in backup region. output: true + - name: 'backupRetentionPolicy' + type: NestedObject + description: | + Backup retention policy defining the retention of the backups. + properties: + - name: 'backupMinimumEnforcedRetentionDays' + type: Integer + description: | + Minimum retention duration in days for backups in the backup vault. + required: true + - name: 'dailyBackupImmutable' + type: Boolean + description: | + Indicates if the daily backups are immutable. At least one of daily_backup_immutable, weekly_backup_immutable, monthly_backup_immutable and manual_backup_immutable must be true. + - name: 'weeklyBackupImmutable' + type: Boolean + description: | + Indicates if the weekly backups are immutable. At least one of daily_backup_immutable, weekly_backup_immutable, monthly_backup_immutable and manual_backup_immutable must be true. + - name: 'monthlyBackupImmutable' + type: Boolean + description: | + Indicates if the monthly backups are immutable. At least one of daily_backup_immutable, weekly_backup_immutable, monthly_backup_immutable and manual_backup_immutable must be true. + - name: 'manualBackupImmutable' + type: Boolean + description: | + Indicates if the manual backups are immutable. At least one of daily_backup_immutable, weekly_backup_immutable, monthly_backup_immutable and manual_backup_immutable must be true. diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_backup_test.go b/mmv1/third_party/terraform/services/netapp/resource_netapp_backup_test.go index 63e533adff2f..388f10fef1ac 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_backup_test.go +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_backup_test.go @@ -325,3 +325,173 @@ resource "google_netapp_backup" "test_backup" { } `, context) } + +func TestAccNetappBackup_NetappImmutableBackup(t *testing.T) { + context := map[string]interface{}{ + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckNetappBackupDestroyProducer(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: testAccNetappBackup_ImmutableBackup(context), + }, + { + ResourceName: "google_netapp_backup.test_backup", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "location", "name", "terraform_labels", "vault_name"}, + }, + { + Config: testAccNetappBackup_ImmutableBackupUpdate(context), + }, + { + ResourceName: "google_netapp_backup.test_backup", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "location", "name", "terraform_labels", "vault_name"}, + }, + }, + }) +} + +func testAccNetappBackup_ImmutableBackup(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_compute_network" "default" { + name = "%{network_name}" +} +resource "google_netapp_storage_pool" "default" { + name = "tf-test-backup-pool%{random_suffix}" + location = "us-central1" + service_level = "FLEX" + capacity_gib = "2048" + network = data.google_compute_network.default.id + zone = "us-central1-a" + replica_zone = "us-central1-b" +} +resource "time_sleep" "wait_3_minutes" { + depends_on = [google_netapp_storage_pool.default] + create_duration = "3m" +} +resource "google_netapp_volume" "default" { + name = "tf-test-backup-volume%{random_suffix}" + location = "us-central1" + capacity_gib = "100" + share_name = "tf-test-backup-volume%{random_suffix}" + storage_pool = google_netapp_storage_pool.default.name + protocols = ["NFSV3"] + deletion_policy = "FORCE" + backup_config { + backup_vault = google_netapp_backup_vault.default.id + } +} +resource "google_netapp_backup_vault" "default" { + name = "tf-test-backup-vault%{random_suffix}" + location = "us-central1" + backup_retention_policy { + backup_minimum_enforced_retention_days = 2 + daily_backup_immutable = true + weekly_backup_immutable = false + monthly_backup_immutable = false + manual_backup_immutable = false + } +} +resource "google_netapp_volume_snapshot" "default" { + depends_on = [google_netapp_volume.default] + location = "us-central1" + volume_name = google_netapp_volume.default.name + description = "This is a test description" + name = "testvolumesnap%{random_suffix}" + labels = { + key= "test" + value= "snapshot" + } +} +resource "google_netapp_backup" "test_backup" { + name = "tf-test-test-backup%{random_suffix}" + description = "This is a test immutable backup" + source_volume = google_netapp_volume.default.id + location = "us-central1" + vault_name = google_netapp_backup_vault.default.name + source_snapshot = google_netapp_volume_snapshot.default.id + labels = { + key= "test" + value= "backup" + } +} +`, context) +} + +func testAccNetappBackup_ImmutableBackupUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_compute_network" "default" { + name = "%{network_name}" +} +resource "google_netapp_storage_pool" "default" { + name = "tf-test-backup-pool%{random_suffix}" + location = "us-central1" + service_level = "FLEX" + capacity_gib = "2048" + network = data.google_compute_network.default.id + zone = "us-central1-a" + replica_zone = "us-central1-b" +} +resource "time_sleep" "wait_3_minutes" { + depends_on = [google_netapp_storage_pool.default] + create_duration = "3m" +} +resource "google_netapp_volume" "default" { + name = "tf-test-backup-volume%{random_suffix}" + location = "us-central1" + capacity_gib = "100" + share_name = "tf-test-backup-volume%{random_suffix}" + storage_pool = google_netapp_storage_pool.default.name + protocols = ["NFSV3"] + deletion_policy = "FORCE" + backup_config { + backup_vault = google_netapp_backup_vault.default.id + } +} +resource "google_netapp_backup_vault" "default" { + name = "tf-test-backup-vault%{random_suffix}" + location = "us-central1" + backup_retention_policy { + backup_minimum_enforced_retention_days = 12 + daily_backup_immutable = true + weekly_backup_immutable = true + monthly_backup_immutable = true + manual_backup_immutable = true + } +} +resource "google_netapp_volume_snapshot" "default" { + depends_on = [google_netapp_volume.default] + location = "us-central1" + volume_name = google_netapp_volume.default.name + description = "This is a test description" + name = "testvolumesnap%{random_suffix}" + labels = { + key= "test" + value= "snapshot" + } +} +resource "google_netapp_backup" "test_backup" { + name = "tf-test-test-backup%{random_suffix}" + description = "This is a test immutable backup" + source_volume = google_netapp_volume.default.id + location = "us-central1" + vault_name = google_netapp_backup_vault.default.name + source_snapshot = google_netapp_volume_snapshot.default.id + labels = { + key= "test" + value= "backup" + } +} +`, context) +} From e953fc05196a4fb54eaf8ced67e21db71f4b2436 Mon Sep 17 00:00:00 2001 From: Tommy Reddad Date: Thu, 29 May 2025 19:15:59 -0600 Subject: [PATCH 273/884] eventarc: use tf-bootstrap prefix for bootstrapped resource names (#14139) --- mmv1/products/eventarc/Pipeline.yaml | 12 ++++++------ mmv1/products/eventarc/Trigger.yaml | 2 +- .../eventarc/resource_eventarc_message_bus_test.go | 6 +++--- .../eventarc/resource_eventarc_pipeline_test.go | 2 +- 4 files changed, 11 insertions(+), 11 deletions(-) diff --git a/mmv1/products/eventarc/Pipeline.yaml b/mmv1/products/eventarc/Pipeline.yaml index ed4a14eb74e9..4215498bbe1b 100644 --- a/mmv1/products/eventarc/Pipeline.yaml +++ b/mmv1/products/eventarc/Pipeline.yaml @@ -45,7 +45,7 @@ examples: test_env_vars: project_id: 'PROJECT_NAME' test_vars_overrides: - 'network_attachment_name': 'acctest.BootstrapNetworkAttachment(t, "tf-test-eventarc-pipeline-na", acctest.BootstrapSubnet(t, "tf-test-eventarc-pipeline-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-test-eventarc-pipeline-network")))' + 'network_attachment_name': 'acctest.BootstrapNetworkAttachment(t, "tf-bootstrap-eventarc-pipeline-na", acctest.BootstrapSubnet(t, "tf-bootstrap-eventarc-pipeline-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-bootstrap-eventarc-pipeline-network")))' - name: eventarc_pipeline_with_http_destination primary_resource_id: primary vars: @@ -54,7 +54,7 @@ examples: test_env_vars: project_id: 'PROJECT_NAME' test_vars_overrides: - 'network_attachment_name': 'acctest.BootstrapNetworkAttachment(t, "tf-test-eventarc-pipeline-na", acctest.BootstrapSubnet(t, "tf-test-eventarc-pipeline-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-test-eventarc-pipeline-network")))' + 'network_attachment_name': 'acctest.BootstrapNetworkAttachment(t, "tf-bootstrap-eventarc-pipeline-na", acctest.BootstrapSubnet(t, "tf-bootstrap-eventarc-pipeline-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-bootstrap-eventarc-pipeline-network")))' - name: eventarc_pipeline_with_workflow_destination primary_resource_id: primary vars: @@ -64,7 +64,7 @@ examples: test_env_vars: project_id: 'PROJECT_NAME' test_vars_overrides: - 'network_attachment_name': 'acctest.BootstrapNetworkAttachment(t, "tf-test-eventarc-pipeline-na", acctest.BootstrapSubnet(t, "tf-test-eventarc-pipeline-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-test-eventarc-pipeline-network")))' + 'network_attachment_name': 'acctest.BootstrapNetworkAttachment(t, "tf-bootstrap-eventarc-pipeline-na", acctest.BootstrapSubnet(t, "tf-bootstrap-eventarc-pipeline-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-bootstrap-eventarc-pipeline-network")))' - name: eventarc_pipeline_with_oidc_and_json_format primary_resource_id: primary vars: @@ -74,7 +74,7 @@ examples: project_id: 'PROJECT_NAME' service_account: 'SERVICE_ACCT' test_vars_overrides: - 'network_attachment_name': 'acctest.BootstrapNetworkAttachment(t, "tf-test-eventarc-pipeline-na", acctest.BootstrapSubnet(t, "tf-test-eventarc-pipeline-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-test-eventarc-pipeline-network")))' + 'network_attachment_name': 'acctest.BootstrapNetworkAttachment(t, "tf-bootstrap-eventarc-pipeline-na", acctest.BootstrapSubnet(t, "tf-bootstrap-eventarc-pipeline-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-bootstrap-eventarc-pipeline-network")))' - name: eventarc_pipeline_with_oauth_and_protobuf_format primary_resource_id: primary vars: @@ -84,7 +84,7 @@ examples: project_id: 'PROJECT_NAME' service_account: 'SERVICE_ACCT' test_vars_overrides: - 'network_attachment_name': 'acctest.BootstrapNetworkAttachment(t, "tf-test-eventarc-pipeline-na", acctest.BootstrapSubnet(t, "tf-test-eventarc-pipeline-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-test-eventarc-pipeline-network")))' + 'network_attachment_name': 'acctest.BootstrapNetworkAttachment(t, "tf-bootstrap-eventarc-pipeline-na", acctest.BootstrapSubnet(t, "tf-bootstrap-eventarc-pipeline-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-bootstrap-eventarc-pipeline-network")))' - name: eventarc_pipeline_with_cmek_and_avro_format primary_resource_id: primary bootstrap_iam: @@ -97,7 +97,7 @@ examples: test_env_vars: project_id: 'PROJECT_NAME' test_vars_overrides: - 'network_attachment_name': 'acctest.BootstrapNetworkAttachment(t, "tf-test-eventarc-pipeline-na", acctest.BootstrapSubnet(t, "tf-test-eventarc-pipeline-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-test-eventarc-pipeline-network")))' + 'network_attachment_name': 'acctest.BootstrapNetworkAttachment(t, "tf-bootstrap-eventarc-pipeline-na", acctest.BootstrapSubnet(t, "tf-bootstrap-eventarc-pipeline-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-bootstrap-eventarc-pipeline-network")))' 'key_name': 'acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-bootstrap-eventarc-pipeline-key").CryptoKey.Name' parameters: - name: location diff --git a/mmv1/products/eventarc/Trigger.yaml b/mmv1/products/eventarc/Trigger.yaml index 35c1d03dd84e..2bd878675905 100644 --- a/mmv1/products/eventarc/Trigger.yaml +++ b/mmv1/products/eventarc/Trigger.yaml @@ -50,7 +50,7 @@ examples: trigger_name: some-trigger network_attachment_name: network-attachment test_vars_overrides: - 'network_attachment_name': 'acctest.BootstrapNetworkAttachment(t, "tf-test-eventarc-trigger-na", acctest.BootstrapSubnet(t, "tf-test-eventarc-trigger-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-test-eventarc-trigger-network")))' + 'network_attachment_name': 'acctest.BootstrapNetworkAttachment(t, "tf-bootstrap-eventarc-trigger-na", acctest.BootstrapSubnet(t, "tf-bootstrap-eventarc-trigger-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-bootstrap-eventarc-trigger-network")))' test_env_vars: project_id: 'PROJECT_NAME' service_account: 'SERVICE_ACCT' diff --git a/mmv1/third_party/terraform/services/eventarc/resource_eventarc_message_bus_test.go b/mmv1/third_party/terraform/services/eventarc/resource_eventarc_message_bus_test.go index 7038f03b33cf..d5f747428849 100644 --- a/mmv1/third_party/terraform/services/eventarc/resource_eventarc_message_bus_test.go +++ b/mmv1/third_party/terraform/services/eventarc/resource_eventarc_message_bus_test.go @@ -416,7 +416,7 @@ func testAccEventarcMessageBus_pipeline(t *testing.T) { "project_id": envvar.GetTestProjectFromEnv(), "region": envvar.GetTestRegionFromEnv(), "random_suffix": acctest.RandString(t, 10), - "network_attachment_name": acctest.BootstrapNetworkAttachment(t, "tf-test-eventarc-messagebus-na", acctest.BootstrapSubnet(t, "tf-test-eventarc-messagebus-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-test-eventarc-messagebus-network"))), + "network_attachment_name": acctest.BootstrapNetworkAttachment(t, "tf-bootstrap-eventarc-messagebus-na", acctest.BootstrapSubnet(t, "tf-bootstrap-eventarc-messagebus-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-bootstrap-eventarc-messagebus-network"))), } acctest.VcrTest(t, resource.TestCase{ @@ -464,7 +464,7 @@ func testAccEventarcMessageBus_enrollment(t *testing.T) { "project_id": envvar.GetTestProjectFromEnv(), "region": envvar.GetTestRegionFromEnv(), "random_suffix": acctest.RandString(t, 10), - "network_attachment_name": acctest.BootstrapNetworkAttachment(t, "tf-test-eventarc-messagebus-na", acctest.BootstrapSubnet(t, "tf-test-eventarc-messagebus-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-test-eventarc-messagebus-network"))), + "network_attachment_name": acctest.BootstrapNetworkAttachment(t, "tf-bootstrap-eventarc-messagebus-na", acctest.BootstrapSubnet(t, "tf-bootstrap-eventarc-messagebus-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-bootstrap-eventarc-messagebus-network"))), } acctest.VcrTest(t, resource.TestCase{ @@ -531,7 +531,7 @@ func testAccEventarcMessageBus_updateEnrollment(t *testing.T) { "project_id": envvar.GetTestProjectFromEnv(), "region": envvar.GetTestRegionFromEnv(), "random_suffix": acctest.RandString(t, 10), - "network_attachment_name": acctest.BootstrapNetworkAttachment(t, "tf-test-eventarc-messagebus-na", acctest.BootstrapSubnet(t, "tf-test-eventarc-messagebus-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-test-eventarc-messagebus-network"))), + "network_attachment_name": acctest.BootstrapNetworkAttachment(t, "tf-bootstrap-eventarc-messagebus-na", acctest.BootstrapSubnet(t, "tf-bootstrap-eventarc-messagebus-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-bootstrap-eventarc-messagebus-network"))), } acctest.VcrTest(t, resource.TestCase{ diff --git a/mmv1/third_party/terraform/services/eventarc/resource_eventarc_pipeline_test.go b/mmv1/third_party/terraform/services/eventarc/resource_eventarc_pipeline_test.go index b7a188398ca2..76e623cb56fd 100644 --- a/mmv1/third_party/terraform/services/eventarc/resource_eventarc_pipeline_test.go +++ b/mmv1/third_party/terraform/services/eventarc/resource_eventarc_pipeline_test.go @@ -18,7 +18,7 @@ func TestAccEventarcPipeline_update(t *testing.T) { "service_account": envvar.GetTestServiceAccountFromEnv(t), "key_name": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-bootstrap-eventarc-pipeline-key").CryptoKey.Name, "key2_name": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-bootstrap-eventarc-pipeline-key2").CryptoKey.Name, - "network_attachment_name": acctest.BootstrapNetworkAttachment(t, "tf-test-eventarc-pipeline-na", acctest.BootstrapSubnet(t, "tf-test-eventarc-pipeline-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-test-eventarc-pipeline-network"))), + "network_attachment_name": acctest.BootstrapNetworkAttachment(t, "tf-bootstrap-eventarc-pipeline-na", acctest.BootstrapSubnet(t, "tf-bootstrap-eventarc-pipeline-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-bootstrap-eventarc-pipeline-network"))), "random_suffix": acctest.RandString(t, 10), } acctest.BootstrapIamMembers(t, []acctest.IamMember{ From b9d0e18f9288fed9b17972262cf39e8d5f66ac77 Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Fri, 30 May 2025 04:46:32 +0200 Subject: [PATCH 274/884] feat: `google_datastream_private_connection` resource `network_attachement` support via `psc_interface_config` (#13947) --- .../datastream/PrivateConnection.yaml | 42 +++++++++++++++++-- ...m_private_connection_psc_interface.tf.tmpl | 37 ++++++++++++++++ 2 files changed, 75 insertions(+), 4 deletions(-) create mode 100644 mmv1/templates/terraform/examples/datastream_private_connection_psc_interface.tf.tmpl diff --git a/mmv1/products/datastream/PrivateConnection.yaml b/mmv1/products/datastream/PrivateConnection.yaml index 9c91ead1d608..37aea53e9381 100644 --- a/mmv1/products/datastream/PrivateConnection.yaml +++ b/mmv1/products/datastream/PrivateConnection.yaml @@ -28,9 +28,9 @@ immutable: true import_format: - 'projects/{{project}}/locations/{{location}}/privateConnections/{{private_connection_id}}' timeouts: - insert_minutes: 20 - update_minutes: 20 - delete_minutes: 20 + insert_minutes: 30 + update_minutes: 30 + delete_minutes: 30 async: actions: ['create', 'delete', 'update'] type: 'OpAsync' @@ -51,6 +51,18 @@ examples: vars: private_connection_id: 'my-connection' network_name: 'my-network' + - name: 'datastream_private_connection_psc_interface' + primary_resource_id: 'default' + test_env_vars: + project: 'PROJECT_NAME' + project_number: 'PROJECT_NUMBER' + org_id: 'ORG_ID' + billing_account: 'BILLING_ACCT' + vars: + private_connection_id: 'my-connection' + network_attachment_name: 'my-network-attachment' + network_name: 'my-network' + subnetwork_name: 'my-subnetwork' parameters: - name: 'privateConnectionId' type: String @@ -116,7 +128,9 @@ properties: description: | The VPC Peering configuration is used to create VPC peering between Datastream and the consumer's VPC. - required: true + exactly_one_of: + - 'vpc_peering_config' + - 'psc_interface_config' properties: - name: 'vpc' type: String @@ -129,3 +143,23 @@ properties: description: | A free subnet for peering. (CIDR of /29) required: true + - name: 'pscInterfaceConfig' + type: NestedObject + description: | + The PSC Interface configuration is used to create PSC Interface + between Datastream and the consumer's PSC. + exactly_one_of: + - 'vpc_peering_config' + - 'psc_interface_config' + properties: + - name: 'networkAttachment' + type: String + description: | + Fully qualified name of the network attachment that Datastream will connect to. + Format: projects/{project}/regions/{region}/networkAttachments/{name} + + To get Datastream project for the accepted list: + `gcloud datastream private-connections create [PC ID] --location=[LOCATION] --network-attachment=[NA URI] --validate-only --display-name=[ANY STRING]` + Add Datastream project to the attachment accepted list: + `gcloud compute network-attachments update [NA URI] --region=[NA region] --producer-accept-list=[TP from prev command]` + required: true diff --git a/mmv1/templates/terraform/examples/datastream_private_connection_psc_interface.tf.tmpl b/mmv1/templates/terraform/examples/datastream_private_connection_psc_interface.tf.tmpl new file mode 100644 index 000000000000..c4ee92cc100b --- /dev/null +++ b/mmv1/templates/terraform/examples/datastream_private_connection_psc_interface.tf.tmpl @@ -0,0 +1,37 @@ +resource "google_datastream_private_connection" "{{$.PrimaryResourceId}}" { + display_name = "Connection profile" + location = "us-central1" + private_connection_id = "{{index $.Vars "private_connection_id"}}" + + labels = { + key = "value" + } + + psc_interface_config { + network_attachment = google_compute_network_attachment.default.id + } +} + +resource "google_compute_network_attachment" "default" { + name = "{{index $.Vars "network_attachment_name"}}" + region = "us-central1" + description = "basic network attachment description" + connection_preference = "ACCEPT_AUTOMATIC" + + subnetworks = [ + google_compute_subnetwork.default.self_link + ] +} + +resource "google_compute_network" "default" { + name = "{{index $.Vars "network_name"}}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + name = "{{index $.Vars "subnetwork_name"}}" + region = "us-central1" + + network = google_compute_network.default.id + ip_cidr_range = "10.0.0.0/16" +} From d903383d2d97ee948732da2229fb0d5ba27cdfa6 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Fri, 30 May 2025 08:51:19 -0700 Subject: [PATCH 275/884] tgc-revival: add testing utils (#14142) --- mmv1/third_party/tgc_next/go.mod | 1 + mmv1/third_party/tgc_next/test/utils.go | 83 +++++++++++++++++++++++++ 2 files changed, 84 insertions(+) diff --git a/mmv1/third_party/tgc_next/go.mod b/mmv1/third_party/tgc_next/go.mod index 6ea26054c30c..e9144f378cbb 100644 --- a/mmv1/third_party/tgc_next/go.mod +++ b/mmv1/third_party/tgc_next/go.mod @@ -5,6 +5,7 @@ go 1.23.0 toolchain go1.23.5 require ( + cloud.google.com/go/storage v1.50.0 github.com/apparentlymart/go-cidr v1.1.0 github.com/google/go-cmp v0.7.0 github.com/hashicorp/errwrap v1.1.0 diff --git a/mmv1/third_party/tgc_next/test/utils.go b/mmv1/third_party/tgc_next/test/utils.go index dbf7c528c361..d11248f0ae7c 100644 --- a/mmv1/third_party/tgc_next/test/utils.go +++ b/mmv1/third_party/tgc_next/test/utils.go @@ -1,9 +1,14 @@ package test import ( + "bytes" "encoding/json" "fmt" "os" + "os/exec" + "path/filepath" + "strings" + "testing" ) // Writes the data into a JSON file @@ -19,3 +24,81 @@ func writeJSONFile(filename string, data interface{}) error { } return nil } + +const ( + defaultOrganization = "529579013760" + defaultProject = "ci-test-project-nightly-beta" +) + +func terraformWorkflow(t *testing.T, dir, name string) { + terraformInit(t, "terraform", dir) + terraformPlan(t, "terraform", dir, name+".tfplan") + payload := terraformShow(t, "terraform", dir, name+".tfplan") + saveFile(t, dir, name+".tfplan.json", payload) +} + +func terraformInit(t *testing.T, executable, dir string) { + terraformExec(t, executable, dir, "init", "-input=false") +} + +func terraformPlan(t *testing.T, executable, dir, tfplan string) { + terraformExec(t, executable, dir, "plan", "-input=false", "-refresh=false", "-out", tfplan) +} + +func terraformShow(t *testing.T, executable, dir, tfplan string) []byte { + return terraformExec(t, executable, dir, "show", "--json", tfplan) +} + +func terraformExec(t *testing.T, executable, dir string, args ...string) []byte { + cmd := exec.Command(executable, args...) + cmd.Env = []string{ + "HOME=" + filepath.Join(dir, "fakehome"), + "GOOGLE_PROJECT=" + defaultProject, + "GOOGLE_FOLDER=" + "", + "GOOGLE_ORG=" + defaultOrganization, + "GOOGLE_OAUTH_ACCESS_TOKEN=fake-token", // GOOGLE_OAUTH_ACCESS_TOKEN is required so terraform plan does not require the google authentication cert + } + if os.Getenv("TF_CLI_CONFIG_FILE") != "" { + cmd.Env = append(cmd.Env, "TF_CLI_CONFIG_FILE="+os.Getenv("TF_CLI_CONFIG_FILE")) + } + cmd.Dir = dir + wantError := false + payload, _ := run(t, cmd, wantError) + return payload +} + +func saveFile(t *testing.T, dir, filename string, payload []byte) { + fullpath := filepath.Join(dir, filename) + f, err := os.Create(fullpath) + if err != nil { + t.Fatalf("error while creating file %s, error %v", fullpath, err) + } + _, err = f.Write(payload) + if err != nil { + t.Fatalf("error while writing to file %s, error %v", fullpath, err) + } +} + +// run a command and call t.Fatal on non-zero exit. +func run(t *testing.T, cmd *exec.Cmd, wantError bool) ([]byte, []byte) { + var stderr, stdout bytes.Buffer + cmd.Stderr, cmd.Stdout = &stderr, &stdout + err := cmd.Run() + if gotError := (err != nil); gotError != wantError { + t.Fatalf("running %s: \nerror=%v \nstderr=%s \nstdout=%s", cmd.String(), err, stderr.String(), stdout.String()) + } + // Print env, stdout and stderr if verbose flag is used. + if len(cmd.Env) != 0 { + t.Logf("=== Environment Variable of %s ===", cmd.String()) + t.Log(strings.Join(cmd.Env, "\n")) + } + if stdout.String() != "" { + t.Logf("=== STDOUT of %s ===", cmd.String()) + t.Log(stdout.String()) + } + if stderr.String() != "" { + t.Logf("=== STDERR of %s ===", cmd.String()) + t.Log(stderr.String()) + } + return stdout.Bytes(), stderr.Bytes() +} From c5be0a7895a8c4395fd206f6d5a243cfa11c50cb Mon Sep 17 00:00:00 2001 From: Ilia Lazebnik Date: Fri, 30 May 2025 14:56:38 -0400 Subject: [PATCH 276/884] container: bump flex_start to GA (#14094) Signed-off-by: drfaust92 --- .../terraform/services/container/node_config.go.tmpl | 6 ------ .../container/resource_container_cluster_test.go.tmpl | 4 ---- .../container/resource_container_node_pool_test.go.tmpl | 2 -- .../website/docs/r/container_cluster.html.markdown | 2 +- 4 files changed, 1 insertion(+), 13 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/node_config.go.tmpl b/mmv1/third_party/terraform/services/container/node_config.go.tmpl index f1e92234c700..6ccd2d65bcac 100644 --- a/mmv1/third_party/terraform/services/container/node_config.go.tmpl +++ b/mmv1/third_party/terraform/services/container/node_config.go.tmpl @@ -873,14 +873,12 @@ func schemaNodeConfig() *schema.Schema { ForceNew: true, Description: `The runtime of each node in the node pool in seconds, terminated by 's'. Example: "3600s".`, }, -{{ if ne $.TargetVersionName `ga` -}} "flex_start" : { Type: schema.TypeBool, Optional: true, ForceNew: true, Description: `Enables Flex Start provisioning model for the node pool`, }, -{{- end }} }, }, } @@ -1276,11 +1274,9 @@ func expandNodeConfig(v interface{}) *container.NodeConfig { nc.MaxRunDuration = v.(string) } - {{ if ne $.TargetVersionName `ga` -}} if v,ok := nodeConfig["flex_start"]; ok { nc.FlexStart = v.(bool) } - {{- end }} {{ if ne $.TargetVersionName `ga` -}} if v, ok := nodeConfig["host_maintenance_policy"]; ok { @@ -1713,9 +1709,7 @@ func flattenNodeConfig(c *container.NodeConfig, v interface{}) []map[string]inte "node_group": c.NodeGroup, "advanced_machine_features": flattenAdvancedMachineFeaturesConfig(c.AdvancedMachineFeatures), "max_run_duration": c.MaxRunDuration, -{{- if ne $.TargetVersionName "ga" }} "flex_start": c.FlexStart, -{{- end }} "sole_tenant_config": flattenSoleTenantConfig(c.SoleTenantConfig), "fast_socket": flattenFastSocket(c.FastSocket), "resource_manager_tags": flattenResourceManagerTags(c.ResourceManagerTags), diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl index 8ed7ffe556ea..7e18c681cf13 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl @@ -503,7 +503,6 @@ func TestAccContainerCluster_withMaxRunDuration(t *testing.T) { }) } -{{ if ne $.TargetVersionName `ga` -}} func TestAccContainerCluster_withFlexStart(t *testing.T) { t.Parallel() @@ -535,7 +534,6 @@ func TestAccContainerCluster_withFlexStart(t *testing.T) { }, }) } -{{- end }} func TestAccContainerCluster_withILBSubsetting(t *testing.T) { t.Parallel() @@ -7203,7 +7201,6 @@ resource "google_container_cluster" "max_run_duration" { `, clusterName, npName, duration, networkName, subnetworkName) } -{{ if ne $.TargetVersionName `ga` -}} func testAccContainerCluster_withFlexStart(clusterName, npName, networkName, subnetworkName string) string { return fmt.Sprintf(` data "google_container_engine_versions" "uscentral1a" { @@ -7253,7 +7250,6 @@ resource "google_container_cluster" "flex_start" { } `, clusterName, npName, networkName, subnetworkName) } -{{- end }} func testAccContainerCluster_withILBSubSetting(clusterName, npName, networkName, subnetworkName string) string { return fmt.Sprintf(` diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl index 112febeeb259..7d5b453ebc7c 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl @@ -4668,7 +4668,6 @@ resource "google_container_node_pool" "np" { `, clusterName, networkName, subnetworkName, np) } -{{ if ne $.TargetVersionName `ga` -}} func TestAccContainerNodePool_withFlexStart(t *testing.T) { t.Parallel() @@ -4749,7 +4748,6 @@ resource "google_container_node_pool" "np" { } `, clusterName, networkName, subnetworkName, np) } -{{- end }} func TestAccContainerNodePool_tpuTopology(t *testing.T) { t.Parallel() diff --git a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown index 5bfc0c6939d2..beba9a30082c 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown @@ -936,7 +936,7 @@ gvnic { * `max_run_duration` - (Optional) The runtime of each node in the node pool in seconds, terminated by 's'. Example: "3600s". -* `flex_start` - (Optional, [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html))) Enables Flex Start provisioning model for the node pool. +* `flex_start` - (Optional) Enables Flex Start provisioning model for the node pool. * `local_ssd_count` - (Optional) The amount of local SSD disks that will be attached to each cluster node. Defaults to 0. From 8b77348511ead66a07d5f91d619eb2bdfb8d8825 Mon Sep 17 00:00:00 2001 From: palramanathan <117597159+palramanathan@users.noreply.github.com> Date: Fri, 30 May 2025 12:32:21 -0700 Subject: [PATCH 277/884] Adding delegatingServiceAccount to output (#14067) --- mmv1/products/beyondcorp/SecurityGateway.yaml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/mmv1/products/beyondcorp/SecurityGateway.yaml b/mmv1/products/beyondcorp/SecurityGateway.yaml index 487b4608bf32..6eb4e755e96c 100644 --- a/mmv1/products/beyondcorp/SecurityGateway.yaml +++ b/mmv1/products/beyondcorp/SecurityGateway.yaml @@ -135,3 +135,8 @@ properties: type: String description: Identifier. Name of the resource. output: true + - name: delegatingServiceAccount + type: String + description: |- + Service account used for operations that involve resources in consumer projects. + output: true From e7df850b7b1bb9ac399789924b52c6b729085b12 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Wiatrowski?= Date: Fri, 30 May 2025 22:05:35 +0200 Subject: [PATCH 278/884] Update Gemini GeminiGcpEnablementSetting resource -- add the `disable_web_grounding` field (#14120) --- mmv1/products/gemini/GeminiGcpEnablementSetting.yaml | 3 +++ .../gemini_gemini_gcp_enablement_setting_basic.tf.tmpl | 1 + .../gemini_gemini_gcp_enablement_setting_binding_basic.tf.tmpl | 1 + ...source_gemini_gemini_gcp_enablement_setting_binding_test.go | 2 ++ .../resource_gemini_gemini_gcp_enablement_setting_test.go | 2 ++ 5 files changed, 9 insertions(+) diff --git a/mmv1/products/gemini/GeminiGcpEnablementSetting.yaml b/mmv1/products/gemini/GeminiGcpEnablementSetting.yaml index 5d1eec4e8c51..49a01ab3e820 100644 --- a/mmv1/products/gemini/GeminiGcpEnablementSetting.yaml +++ b/mmv1/products/gemini/GeminiGcpEnablementSetting.yaml @@ -68,3 +68,6 @@ properties: - name: enableCustomerDataSharing type: Boolean description: Whether customer data sharing should be enabled. + - name: disableWebGrounding + type: Boolean + description: Whether web grounding should be disabled. diff --git a/mmv1/templates/terraform/examples/gemini_gemini_gcp_enablement_setting_basic.tf.tmpl b/mmv1/templates/terraform/examples/gemini_gemini_gcp_enablement_setting_basic.tf.tmpl index 27cae70d0907..a614e3fa2e38 100644 --- a/mmv1/templates/terraform/examples/gemini_gemini_gcp_enablement_setting_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/gemini_gemini_gcp_enablement_setting_basic.tf.tmpl @@ -3,4 +3,5 @@ resource "google_gemini_gemini_gcp_enablement_setting" "{{$.PrimaryResourceId}}" location = "global" labels = {"my_key": "my_value"} enable_customer_data_sharing = true + disable_web_grounding = true } diff --git a/mmv1/templates/terraform/examples/gemini_gemini_gcp_enablement_setting_binding_basic.tf.tmpl b/mmv1/templates/terraform/examples/gemini_gemini_gcp_enablement_setting_binding_basic.tf.tmpl index a22431296516..ed6fe1601b99 100644 --- a/mmv1/templates/terraform/examples/gemini_gemini_gcp_enablement_setting_binding_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/gemini_gemini_gcp_enablement_setting_binding_basic.tf.tmpl @@ -3,6 +3,7 @@ resource "google_gemini_gemini_gcp_enablement_setting" "basic" { location = "global" labels = {"my_key": "my_value"} enable_customer_data_sharing = true + disable_web_grounding = true } resource "google_gemini_gemini_gcp_enablement_setting_binding" "{{$.PrimaryResourceId}}" { diff --git a/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_binding_test.go b/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_binding_test.go index ba034eeccc40..94da6898dc65 100644 --- a/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_binding_test.go +++ b/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_binding_test.go @@ -59,6 +59,7 @@ resource "google_gemini_gemini_gcp_enablement_setting" "basic" { location = "global" labels = {"my_key": "my_value"} enable_customer_data_sharing = true + disable_web_grounding = true } resource "google_gemini_gemini_gcp_enablement_setting_binding" "basic_binding" { @@ -80,6 +81,7 @@ resource "google_gemini_gemini_gcp_enablement_setting" "basic" { location = "global" labels = {"my_key" = "my_value"} enable_customer_data_sharing = false + disable_web_grounding = false } resource "google_gemini_gemini_gcp_enablement_setting_binding" "basic_binding" { diff --git a/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_test.go b/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_test.go index 0d1294cbc08c..7461e47c1525 100644 --- a/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_test.go +++ b/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_test.go @@ -51,6 +51,7 @@ resource "google_gemini_gemini_gcp_enablement_setting" "example" { location = "global" labels = {"my_key" = "my_value"} enable_customer_data_sharing = true + disable_web_grounding = true } `, context) } @@ -61,6 +62,7 @@ resource "google_gemini_gemini_gcp_enablement_setting" "example" { location = "global" labels = {"my_key" = "my_value"} enable_customer_data_sharing = false + disable_web_grounding = false } `, context) } From fd38b5bc0589cfed4b938234bcf5804a65004aff Mon Sep 17 00:00:00 2001 From: Ilia Lazebnik Date: Fri, 30 May 2025 16:18:09 -0400 Subject: [PATCH 279/884] container: add support for network_performance_config (#14095) Signed-off-by: drfaust92 --- .../resource_container_cluster.go.tmpl | 60 ++++++++++++++++++ .../resource_container_cluster_test.go.tmpl | 63 +++++++++++++++++++ .../docs/r/container_cluster.html.markdown | 6 ++ 3 files changed, 129 insertions(+) diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl index a51a3f8b2108..b5e37af2d212 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl @@ -2434,6 +2434,21 @@ func ResourceContainerCluster() *schema.Resource { Description: `Defines the config of in-transit encryption`, ValidateFunc: validation.StringInSlice([]string{"IN_TRANSIT_ENCRYPTION_CONFIG_UNSPECIFIED", "IN_TRANSIT_ENCRYPTION_DISABLED", "IN_TRANSIT_ENCRYPTION_INTER_NODE_TRANSPARENT"}, false), }, + "network_performance_config": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: `Network bandwidth tier configuration.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "total_egress_bandwidth_tier": { + Type: schema.TypeString, + Required: true, + Description: `Specifies the total network bandwidth tier for NodePools in the cluster.`, + }, + }, + }, + }, }, } } @@ -2600,6 +2615,7 @@ func resourceContainerClusterCreate(d *schema.ResourceData, meta interface{}) er EnableMultiNetworking: d.Get("enable_multi_networking").(bool), DefaultEnablePrivateNodes: expandDefaultEnablePrivateNodes(d), EnableFqdnNetworkPolicy: d.Get("enable_fqdn_network_policy").(bool), + NetworkPerformanceConfig: expandNetworkPerformanceConfig(d.Get("network_performance_config")), }, MasterAuth: expandMasterAuth(d.Get("master_auth")), NotificationConfig: expandNotificationConfig(d.Get("notification_config")), @@ -3290,6 +3306,9 @@ func resourceContainerClusterRead(d *schema.ResourceData, meta interface{}) erro if err := d.Set("gateway_api_config", flattenGatewayApiConfig(cluster.NetworkConfig.GatewayApiConfig)); err != nil { return err } + if err := d.Set("network_performance_config", flattenNetworkPerformanceConfig(cluster.NetworkConfig.NetworkPerformanceConfig)); err != nil { + return err + } if err := d.Set("fleet", flattenFleet(cluster.Fleet)); err != nil { return err } @@ -4538,6 +4557,24 @@ func resourceContainerClusterUpdate(d *schema.ResourceData, meta interface{}) er log.Printf("[INFO] GKE cluster %s resource usage export config has been updated", d.Id()) } + if d.HasChange("network_performance_config") { + if npc, ok := d.GetOk("network_performance_config"); ok { + req := &container.UpdateClusterRequest{ + Update: &container.ClusterUpdate{ + DesiredNetworkPerformanceConfig: expandNetworkPerformanceConfig(npc), + }, + } + + updateF := updateFunc(req, "updating GKE Network Performance Config") + // Call update serially. + if err := transport_tpg.LockedCall(lockKey, updateF); err != nil { + return err + } + + log.Printf("[INFO] GKE cluster %s Network Performance Config has been updated", d.Id()) + } + } + if d.HasChange("gateway_api_config") { if gac, ok := d.GetOk("gateway_api_config"); ok { req := &container.UpdateClusterRequest{ @@ -6047,6 +6084,18 @@ func expandDnsConfig(configured interface{}) *container.DNSConfig { } } +func expandNetworkPerformanceConfig(configured interface{}) *container.ClusterNetworkPerformanceConfig { + l := configured.([]interface{}) + if len(l) == 0 || l[0] == nil { + return nil + } + + config := l[0].(map[string]interface{}) + return &container.ClusterNetworkPerformanceConfig{ + TotalEgressBandwidthTier: config["total_egress_bandwidth_tier"].(string), + } +} + func expandGatewayApiConfig(configured interface{}) *container.GatewayAPIConfig { l := configured.([]interface{}) if len(l) == 0 || l[0] == nil { @@ -7046,6 +7095,17 @@ func flattenDnsConfig(c *container.DNSConfig) []map[string]interface{} { } } +func flattenNetworkPerformanceConfig(c *container.ClusterNetworkPerformanceConfig) []map[string]interface{} { + if c == nil { + return nil + } + return []map[string]interface{}{ + { + "total_egress_bandwidth_tier": c.TotalEgressBandwidthTier, + }, + } +} + func flattenGatewayApiConfig(c *container.GatewayAPIConfig) []map[string]interface{} { if c == nil { return nil diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl index 7e18c681cf13..7a5ae0c9ef1a 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl @@ -685,6 +685,45 @@ func TestAccContainerCluster_inTransitEncryptionConfig(t *testing.T) { }) } +func TestAccContainerCluster_networkPerformanceConfig(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) + networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") + subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_networkPerformanceConfig(clusterName, networkName, subnetworkName, "TIER_1"), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.primary", "network_performance_config.0.total_egress_bandwidth_tier", "TIER_1"), + ), + }, + { + ResourceName: "google_container_cluster.primary", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + { + Config: testAccContainerCluster_networkPerformanceConfig(clusterName, networkName, subnetworkName, "TIER_UNSPECIFIED"), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.primary", "network_performance_config.0.total_egress_bandwidth_tier", "TIER_UNSPECIFIED"), + ), + }, + { + ResourceName: "google_container_cluster.primary", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + }, + }) +} + func TestAccContainerCluster_withFQDNNetworkPolicy(t *testing.T) { t.Parallel() @@ -13833,4 +13872,28 @@ resource "google_container_cluster" "primary" { in_transit_encryption_config = "%s" } `, name, networkName, subnetworkName, config) +} + +func testAccContainerCluster_networkPerformanceConfig(name, networkName, subnetworkName, config string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "primary" { + name = "%s" + location = "us-central1-a" + initial_node_count = 1 + network = "%s" + subnetwork = "%s" + deletion_protection = false + + node_config { + machine_type = "n2-standard-32" + gvnic { + enabled = true + } + } + + network_performance_config { + total_egress_bandwidth_tier = "%s" + } +} +`, name, networkName, subnetworkName, config) } \ No newline at end of file diff --git a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown index beba9a30082c..57ee6cf26e19 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown @@ -941,6 +941,8 @@ gvnic { * `local_ssd_count` - (Optional) The amount of local SSD disks that will be attached to each cluster node. Defaults to 0. +* `network_performance_config` - (Optional) Network bandwidth tier configuration. Structure is [documented below](#network_performance_config). + * `machine_type` - (Optional) The name of a Google Compute Engine machine type. Defaults to `e2-medium`. To create a custom machine type, value should be set as specified [here](https://cloud.google.com/compute/docs/reference/latest/instances#machineType). @@ -1127,6 +1129,10 @@ sole_tenant_config { * `max_shared_clients_per_gpu` (Required) - The maximum number of containers that can share a GPU. +The `network_performance_config` block supports: + +* `total_egress_bandwidth_tier` (Required) - Specifies the total network bandwidth tier for NodePools in the cluster. + The `workload_identity_config` block supports: * `workload_pool` (Optional) - The workload pool to attach all Kubernetes service accounts to. From d238fedba9f5d28840f3dc63ccc2a7862b95d020 Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Fri, 30 May 2025 13:42:28 -0700 Subject: [PATCH 280/884] Update bootstrapped network names in AlloyDB tests (#14157) --- mmv1/products/alloydb/Backup.yaml | 4 +-- mmv1/products/alloydb/Cluster.yaml | 2 +- mmv1/products/alloydb/Instance.yaml | 4 +-- mmv1/products/alloydb/User.yaml | 4 +-- .../data_source_alloydb_cluster_test.go | 2 +- ...a_source_alloydb_database_instance_test.go | 2 +- .../alloydb/resource_alloydb_backup_test.go | 6 ++-- .../resource_alloydb_cluster_restore_test.go | 2 +- .../alloydb/resource_alloydb_cluster_test.go | 2 +- .../alloydb/resource_alloydb_instance_test.go | 18 +++++----- ...resource_alloydb_secondary_cluster_test.go | 36 +++++++++---------- ...esource_alloydb_secondary_instance_test.go | 14 ++++---- .../alloydb/resource_alloydb_user_test.go | 6 ++-- 13 files changed, 51 insertions(+), 51 deletions(-) diff --git a/mmv1/products/alloydb/Backup.yaml b/mmv1/products/alloydb/Backup.yaml index 621ea1b436fa..8d1185c5a649 100644 --- a/mmv1/products/alloydb/Backup.yaml +++ b/mmv1/products/alloydb/Backup.yaml @@ -60,7 +60,7 @@ examples: alloydb_instance_name: 'alloydb-instance' network_name: 'alloydb-network' test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1")' + 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1")' ignore_read_extra: - 'reconciling' - 'update_time' @@ -83,7 +83,7 @@ examples: alloydb_instance_name: 'alloydb-instance' network_name: 'alloydb-network' test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1")' + 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1")' ignore_read_extra: - 'reconciling' - 'update_time' diff --git a/mmv1/products/alloydb/Cluster.yaml b/mmv1/products/alloydb/Cluster.yaml index cce1fdb55d35..834a8aa8573b 100644 --- a/mmv1/products/alloydb/Cluster.yaml +++ b/mmv1/products/alloydb/Cluster.yaml @@ -112,7 +112,7 @@ examples: alloydb_secondary_cluster_name: 'alloydb-secondary-cluster' network_name: 'alloydb-network' test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1")' + 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1")' exclude_docs: true virtual_fields: - name: 'deletion_policy' diff --git a/mmv1/products/alloydb/Instance.yaml b/mmv1/products/alloydb/Instance.yaml index 5457b33c4df1..f93d3f089b4e 100644 --- a/mmv1/products/alloydb/Instance.yaml +++ b/mmv1/products/alloydb/Instance.yaml @@ -82,7 +82,7 @@ examples: alloydb_instance_name: 'alloydb-instance' network_name: 'alloydb-network' test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1")' + 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1")' ignore_read_extra: - 'reconciling' - 'update_time' @@ -96,7 +96,7 @@ examples: alloydb_secondary_instance_name: 'alloydb-secondary-instance' network_name: 'alloydb-secondary-network' test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1")' + 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1")' ignore_read_extra: - 'reconciling' - 'update_time' diff --git a/mmv1/products/alloydb/User.yaml b/mmv1/products/alloydb/User.yaml index df3ba9a3dcd3..8efb6d14a6fb 100644 --- a/mmv1/products/alloydb/User.yaml +++ b/mmv1/products/alloydb/User.yaml @@ -58,7 +58,7 @@ examples: alloydb_user_pass: 'user_secret' network_name: 'alloydb-network' test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1")' + 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1")' ignore_read_extra: - 'password' exclude_docs: true @@ -80,7 +80,7 @@ examples: alloydb_user_name: 'user2@foo.com' network_name: 'alloydb-network' test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1")' + 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1")' exclude_docs: true parameters: - name: 'cluster' diff --git a/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_cluster_test.go b/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_cluster_test.go index 69541de1a6b7..e520602a99f2 100644 --- a/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_cluster_test.go +++ b/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_cluster_test.go @@ -12,7 +12,7 @@ func TestAccAlloydbDatabaseClusterDatasourceConfig(t *testing.T) { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-cluster-ds"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), } acctest.VcrTest(t, resource.TestCase{ diff --git a/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_database_instance_test.go b/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_database_instance_test.go index cb1a5c799452..5220263365cd 100644 --- a/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_database_instance_test.go +++ b/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_database_instance_test.go @@ -12,7 +12,7 @@ func TestAccAlloydbDatabaseInstanceDatasourceConfig(t *testing.T) { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-instance-mandatory-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), } acctest.VcrTest(t, resource.TestCase{ diff --git a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_backup_test.go b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_backup_test.go index 6b92415691b9..271676fdc51e 100644 --- a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_backup_test.go +++ b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_backup_test.go @@ -12,7 +12,7 @@ func TestAccAlloydbBackup_update(t *testing.T) { random_suffix := acctest.RandString(t, 10) context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-backup-update-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), "random_suffix": random_suffix, } @@ -119,7 +119,7 @@ func TestAccAlloydbBackup_createBackupWithMandatoryFields(t *testing.T) { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-backup-mandatory-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -169,7 +169,7 @@ func TestAccAlloydbBackup_usingCMEK(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-backup-cmek-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), "random_suffix": acctest.RandString(t, 10), "kms_key_name": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-bootstrap-alloydb-backup-key1").CryptoKey.Name, } diff --git a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_cluster_restore_test.go b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_cluster_restore_test.go index 4685bd07752a..9d12fba70b1b 100644 --- a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_cluster_restore_test.go +++ b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_cluster_restore_test.go @@ -19,7 +19,7 @@ func TestAccAlloydbCluster_restore(t *testing.T) { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-instance-restore-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), } acctest.VcrTest(t, resource.TestCase{ diff --git a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_cluster_test.go b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_cluster_test.go index 1babba6c1cd5..3c7d9f433cae 100644 --- a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_cluster_test.go +++ b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_cluster_test.go @@ -71,7 +71,7 @@ func TestAccAlloydbCluster_upgrade(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-instance-upgrade-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), "random_suffix": acctest.RandString(t, 10), } diff --git a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go index d138851f7d33..f34ff12f7d78 100644 --- a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go +++ b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go @@ -12,7 +12,7 @@ func TestAccAlloydbInstance_update(t *testing.T) { random_suffix := acctest.RandString(t, 10) context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-instance-update-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), "random_suffix": random_suffix, } @@ -113,7 +113,7 @@ func TestAccAlloydbInstance_createInstanceWithMandatoryFields(t *testing.T) { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-instance-mandatory-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -133,7 +133,7 @@ func TestAccAlloydbInstance_stopstart(t *testing.T) { t.Parallel() suffix := acctest.RandString(t, 10) - networkName := acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-clientconnectionconfig") + networkName := acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1") context := map[string]interface{}{ "random_suffix": suffix, @@ -251,7 +251,7 @@ data "google_compute_network" "default" { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-instance-maximum-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -323,7 +323,7 @@ func TestAccAlloydbInstance_createPrimaryAndReadPoolInstance(t *testing.T) { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-instance-readpool-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -378,7 +378,7 @@ data "google_compute_network" "default" { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-instance-updatedb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -459,7 +459,7 @@ data "google_compute_network" "default" { func TestAccAlloydbInstance_createInstanceWithNetworkConfigAndAllocatedIPRange(t *testing.T) { t.Parallel() - testId := "alloydbinstance-network-config-1" + testId := "alloydb-1" addressName := acctest.BootstrapSharedTestGlobalAddress(t, testId) networkName := acctest.BootstrapSharedServiceNetworkingConnection(t, testId) @@ -517,7 +517,7 @@ func TestAccAlloydbInstance_clientConnectionConfig(t *testing.T) { t.Parallel() suffix := acctest.RandString(t, 10) - networkName := acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-clientconnectionconfig") + networkName := acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1") context := map[string]interface{}{ "random_suffix": suffix, @@ -682,7 +682,7 @@ func TestAccAlloydbInstance_networkConfig(t *testing.T) { t.Parallel() suffix := acctest.RandString(t, 10) - networkName := acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-networkconfig") + networkName := acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1") context1 := map[string]interface{}{ "random_suffix": suffix, diff --git a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_secondary_cluster_test.go b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_secondary_cluster_test.go index 9855f928c54d..23c9340b6dc0 100644 --- a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_secondary_cluster_test.go +++ b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_secondary_cluster_test.go @@ -12,7 +12,7 @@ import ( func TestAccAlloydbCluster_secondaryClusterMandatoryFields(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), "random_suffix": acctest.RandString(t, 10), } @@ -86,7 +86,7 @@ func TestAccAlloydbCluster_secondaryClusterMissingSecondaryConfig(t *testing.T) t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), "random_suffix": acctest.RandString(t, 10), } @@ -151,7 +151,7 @@ func TestAccAlloydbCluster_secondaryClusterDefinedSecondaryConfigButMissingClust t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), "random_suffix": acctest.RandString(t, 10), } @@ -219,7 +219,7 @@ func TestAccAlloydbCluster_secondaryClusterDefinedSecondaryConfigButClusterTypeI t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), "random_suffix": acctest.RandString(t, 10), } @@ -288,7 +288,7 @@ func TestAccAlloydbCluster_secondaryClusterUpdate(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), "random_suffix": acctest.RandString(t, 10), } @@ -374,7 +374,7 @@ func TestAccAlloydbCluster_secondaryClusterUsingCMEK(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), "random_suffix": acctest.RandString(t, 10), "kms_key_name": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-east1", "tf-bootstrap-alloydb-secondary-key1").CryptoKey.Name, } @@ -462,7 +462,7 @@ func TestAccAlloydbCluster_secondaryClusterWithNetworkConfig(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), "random_suffix": acctest.RandString(t, 10), } @@ -536,8 +536,8 @@ func TestAccAlloydbCluster_secondaryClusterWithNetworkConfigAndAllocatedIPRange( t.Parallel() context := map[string]interface{}{ - "address_name": acctest.BootstrapSharedTestGlobalAddress(t, "alloydbinstance-network-config-1"), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), + "address_name": acctest.BootstrapSharedTestGlobalAddress(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), "random_suffix": acctest.RandString(t, 10), } @@ -619,7 +619,7 @@ func TestAccAlloydbCluster_secondaryClusterPromote(t *testing.T) { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), "secondary_cluster_location": "us-east1", - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -774,7 +774,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndSimultaneousUpdate(t *testi context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), "secondary_cluster_location": "us-east1", - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -870,7 +870,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndDeleteOriginalPrimary(t *te context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), "secondary_cluster_location": "us-east1", - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -953,7 +953,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndUpdate(t *testing.T) { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), "secondary_cluster_location": "us-east1", - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -1058,8 +1058,8 @@ func TestAccAlloydbCluster_secondaryClusterPromoteWithNetworkConfigAndAllocatedI context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), - "address_name": acctest.BootstrapSharedTestGlobalAddress(t, "alloydbinstance-network-config-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "address_name": acctest.BootstrapSharedTestGlobalAddress(t, "alloydb-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -1226,7 +1226,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndAddAndDeleteAutomatedBackup context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), "secondary_cluster_location": "us-south1", - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), "hour": 23, } @@ -1367,7 +1367,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndDeleteTimeBasedRetentionPol context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), "secondary_cluster_location": "us-south1", - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -1589,7 +1589,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndAddContinuousBackupConfig(t context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), "secondary_cluster_location": "us-south1", - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), } acctest.VcrTest(t, resource.TestCase{ diff --git a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_secondary_instance_test.go b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_secondary_instance_test.go index eddf0b2c77fe..9f57d3cfb9fd 100644 --- a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_secondary_instance_test.go +++ b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_secondary_instance_test.go @@ -13,7 +13,7 @@ func TestAccAlloydbInstance_secondaryInstanceUpdateMachineConfig(t *testing.T) { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -167,7 +167,7 @@ func TestAccAlloydbInstance_secondaryInstanceWithReadPoolInstance(t *testing.T) context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -263,8 +263,8 @@ func TestAccAlloydbCluster_secondaryInstanceWithNetworkConfigAndAllocatedIPRange context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), - "address_name": acctest.BootstrapSharedTestGlobalAddress(t, "alloydbinstance-network-config-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "address_name": acctest.BootstrapSharedTestGlobalAddress(t, "alloydb-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -356,7 +356,7 @@ func TestAccAlloydbInstance_secondaryInstanceUpdateDatabaseFlag(t *testing.T) { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -455,7 +455,7 @@ func TestAccAlloydbInstance_secondaryInstanceUpdateQueryInsightConfig(t *testing context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -557,7 +557,7 @@ func TestAccAlloydbInstance_secondaryInstanceMaximumFields(t *testing.T) { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), } acctest.VcrTest(t, resource.TestCase{ diff --git a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_user_test.go b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_user_test.go index ef4d7c99d674..846a0bc53f02 100644 --- a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_user_test.go +++ b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_user_test.go @@ -11,7 +11,7 @@ func TestAccAlloydbUser_updateRoles_BuiltIn(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), "random_suffix": acctest.RandString(t, 10), } @@ -83,7 +83,7 @@ func TestAccAlloydbUser_updatePassword_BuiltIn(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), "random_suffix": acctest.RandString(t, 10), } @@ -155,7 +155,7 @@ func TestAccAlloydbUser_updateRoles_IAM(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), "random_suffix": acctest.RandString(t, 10), } From 055d8c6b8013e775d5592a8c730a649a52f00683 Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Fri, 30 May 2025 13:43:11 -0700 Subject: [PATCH 281/884] Update AlloyDB tests to use bootstrapped networks (#14160) --- mmv1/products/alloydb/Backup.yaml | 1 + mmv1/products/alloydb/Cluster.yaml | 4 ++++ .../alloydb_cluster_after_upgrade.tf.tmpl | 23 +++---------------- .../alloydb_cluster_before_upgrade.tf.tmpl | 23 +++---------------- 4 files changed, 11 insertions(+), 40 deletions(-) diff --git a/mmv1/products/alloydb/Backup.yaml b/mmv1/products/alloydb/Backup.yaml index 8d1185c5a649..de788f9de62c 100644 --- a/mmv1/products/alloydb/Backup.yaml +++ b/mmv1/products/alloydb/Backup.yaml @@ -75,6 +75,7 @@ examples: ignore_read_extra: - 'reconciling' - 'update_time' + exclude_test: true - name: 'alloydb_backup_full_test' primary_resource_id: 'default' vars: diff --git a/mmv1/products/alloydb/Cluster.yaml b/mmv1/products/alloydb/Cluster.yaml index 834a8aa8573b..6af6de90285c 100644 --- a/mmv1/products/alloydb/Cluster.yaml +++ b/mmv1/products/alloydb/Cluster.yaml @@ -72,12 +72,16 @@ examples: alloydb_cluster_name: 'alloydb-cluster' alloydb_instance_name: 'alloydb-instance' network_name: 'alloydb-network' + test_vars_overrides: + 'network_name': 'acctest.BootstrapSharedTestNetwork(t, "alloydb-1")' - name: 'alloydb_cluster_after_upgrade' primary_resource_id: 'default' vars: alloydb_cluster_name: 'alloydb-cluster' alloydb_instance_name: 'alloydb-instance' network_name: 'alloydb-network' + test_vars_overrides: + 'network_name': 'acctest.BootstrapSharedTestNetwork(t, "alloydb-1")' - name: 'alloydb_cluster_full' primary_resource_id: 'full' vars: diff --git a/mmv1/templates/terraform/examples/alloydb_cluster_after_upgrade.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_cluster_after_upgrade.tf.tmpl index 941f111db17e..672301dfc794 100644 --- a/mmv1/templates/terraform/examples/alloydb_cluster_after_upgrade.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_cluster_after_upgrade.tf.tmpl @@ -7,14 +7,13 @@ resource "google_alloydb_instance" "{{$.PrimaryResourceId}}" { cpu_count = 2 } - depends_on = [google_service_networking_connection.vpc_connection] } resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { cluster_id = "{{index $.Vars "alloydb_cluster_name"}}" location = "us-central1" network_config { - network = google_compute_network.default.id + network = data.google_compute_network.default.id } database_version = "POSTGRES_15" @@ -23,22 +22,6 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { } } -data "google_project" "project" {} - -resource "google_compute_network" "default" { +data "google_compute_network" "default" { name = "{{index $.Vars "network_name"}}" -} - -resource "google_compute_global_address" "private_ip_alloc" { - name = "{{index $.Vars "alloydb_cluster_name"}}" - address_type = "INTERNAL" - purpose = "VPC_PEERING" - prefix_length = 16 - network = google_compute_network.default.id -} - -resource "google_service_networking_connection" "vpc_connection" { - network = google_compute_network.default.id - service = "servicenetworking.googleapis.com" - reserved_peering_ranges = [google_compute_global_address.private_ip_alloc.name] -} +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/alloydb_cluster_before_upgrade.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_cluster_before_upgrade.tf.tmpl index c9339314e80f..9cc7adf3c6aa 100644 --- a/mmv1/templates/terraform/examples/alloydb_cluster_before_upgrade.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_cluster_before_upgrade.tf.tmpl @@ -7,14 +7,13 @@ resource "google_alloydb_instance" "{{$.PrimaryResourceId}}" { cpu_count = 2 } - depends_on = [google_service_networking_connection.vpc_connection] } resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { cluster_id = "{{index $.Vars "alloydb_cluster_name"}}" location = "us-central1" network_config { - network = google_compute_network.default.id + network = data.google_compute_network.default.id } database_version = "POSTGRES_14" @@ -23,22 +22,6 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { } } -data "google_project" "project" {} - -resource "google_compute_network" "default" { +data "google_compute_network" "default" { name = "{{index $.Vars "network_name"}}" -} - -resource "google_compute_global_address" "private_ip_alloc" { - name = "{{index $.Vars "alloydb_cluster_name"}}" - address_type = "INTERNAL" - purpose = "VPC_PEERING" - prefix_length = 16 - network = google_compute_network.default.id -} - -resource "google_service_networking_connection" "vpc_connection" { - network = google_compute_network.default.id - service = "servicenetworking.googleapis.com" - reserved_peering_ranges = [google_compute_global_address.private_ip_alloc.name] -} +} \ No newline at end of file From 0abbd68137a417bf4dfd97390512cd7f856ec508 Mon Sep 17 00:00:00 2001 From: rlapin-pl <114071972+rlapin-pl@users.noreply.github.com> Date: Sat, 31 May 2025 00:18:37 +0200 Subject: [PATCH 282/884] User workloads service account fix (#14155) Co-authored-by: rlapin-pl --- ...composer_user_workloads_config_map_test.go | 54 +++++++++++++- ...rce_composer_user_workloads_secret_test.go | 73 ++++++++++++++++--- 2 files changed, 114 insertions(+), 13 deletions(-) diff --git a/mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_config_map_test.go b/mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_config_map_test.go index aaa97b101c05..fc646087ad52 100644 --- a/mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_config_map_test.go +++ b/mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_config_map_test.go @@ -15,7 +15,8 @@ func TestAccComposerUserWorkloadsConfigMap_composerUserWorkloadsConfigMapBasicEx t.Parallel() context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), + "random_suffix": acctest.RandString(t, 10), + "service_account": fmt.Sprintf("tf-test-%d", acctest.RandInt(t)), } acctest.VcrTest(t, resource.TestCase{ @@ -51,7 +52,8 @@ func TestAccComposerUserWorkloadsConfigMap_composerUserWorkloadsConfigMapBasicEx t.Parallel() context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), + "random_suffix": acctest.RandString(t, 10), + "service_account": fmt.Sprintf("tf-test-%d", acctest.RandInt(t)), } acctest.VcrTest(t, resource.TestCase{ @@ -79,14 +81,30 @@ func TestAccComposerUserWorkloadsConfigMap_composerUserWorkloadsConfigMapBasicEx func testAccComposerUserWorkloadsConfigMap_composerUserWorkloadsConfigMapBasicExample_basic(context map[string]interface{}) string { return acctest.Nprintf(` +data "google_project" "project" {} + +resource "google_service_account" "test" { + account_id = "%{service_account}" + display_name = "Test Service Account for Composer Environment" +} +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} + resource "google_composer_environment" "environment" { name = "tf-test-test-environment%{random_suffix}" region = "us-central1" config { + node_config { + service_account = google_service_account.test.name + } software_config { image_version = "composer-3-airflow-2" } } + depends_on = [google_project_iam_member.composer-worker] } resource "google_composer_user_workloads_config_map" "config_map" { @@ -102,14 +120,30 @@ resource "google_composer_user_workloads_config_map" "config_map" { func testAccComposerUserWorkloadsConfigMap_composerUserWorkloadsConfigMapBasicExample_update(context map[string]interface{}) string { return acctest.Nprintf(` +data "google_project" "project" {} + +resource "google_service_account" "test" { + account_id = "%{service_account}" + display_name = "Test Service Account for Composer Environment" +} +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} + resource "google_composer_environment" "environment" { name = "tf-test-test-environment%{random_suffix}" region = "us-central1" config { + node_config { + service_account = google_service_account.test.name + } software_config { image_version = "composer-3-airflow-2" } } + depends_on = [google_project_iam_member.composer-worker] } resource "google_composer_user_workloads_config_map" "config_map" { @@ -125,14 +159,30 @@ resource "google_composer_user_workloads_config_map" "config_map" { func testAccComposerUserWorkloadsConfigMap_composerUserWorkloadsConfigMapBasicExample_delete(context map[string]interface{}) string { return acctest.Nprintf(` +data "google_project" "project" {} + +resource "google_service_account" "test" { + account_id = "%{service_account}" + display_name = "Test Service Account for Composer Environment" +} +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} + resource "google_composer_environment" "environment" { name = "tf-test-test-environment%{random_suffix}" region = "us-central1" config { + node_config { + service_account = google_service_account.test.name + } software_config { image_version = "composer-3-airflow-2" } } + depends_on = [google_project_iam_member.composer-worker] } `, context) } diff --git a/mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_secret_test.go b/mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_secret_test.go index aa600b16a0f6..367bf3609477 100644 --- a/mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_secret_test.go +++ b/mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_secret_test.go @@ -20,6 +20,7 @@ func TestAccComposerUserWorkloadsSecret_basic(t *testing.T) { envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) secretName := fmt.Sprintf("%s-%d", testComposerUserWorkloadsSecretPrefix, acctest.RandInt(t)) + serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -27,7 +28,7 @@ func TestAccComposerUserWorkloadsSecret_basic(t *testing.T) { CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerUserWorkloadsSecret_basic(envName, secretName, envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv()), + Config: testAccComposerUserWorkloadsSecret_basic(envName, secretName, envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), serviceAccount), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttrSet("google_composer_user_workloads_secret.test", "data.username"), resource.TestCheckResourceAttrSet("google_composer_user_workloads_secret.test", "data.password"), @@ -46,6 +47,7 @@ func TestAccComposerUserWorkloadsSecret_update(t *testing.T) { envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) secretName := fmt.Sprintf("%s-%d", testComposerUserWorkloadsSecretPrefix, acctest.RandInt(t)) + serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -53,10 +55,10 @@ func TestAccComposerUserWorkloadsSecret_update(t *testing.T) { CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerUserWorkloadsSecret_basic(envName, secretName, envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv()), + Config: testAccComposerUserWorkloadsSecret_basic(envName, secretName, envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), serviceAccount), }, { - Config: testAccComposerUserWorkloadsSecret_update(envName, secretName), + Config: testAccComposerUserWorkloadsSecret_update(envName, secretName, serviceAccount), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttrSet("google_composer_user_workloads_secret.test", "data.email"), resource.TestCheckResourceAttrSet("google_composer_user_workloads_secret.test", "data.password"), @@ -72,6 +74,7 @@ func TestAccComposerUserWorkloadsSecret_delete(t *testing.T) { envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) secretName := fmt.Sprintf("%s-%d", testComposerUserWorkloadsSecretPrefix, acctest.RandInt(t)) + serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -79,10 +82,10 @@ func TestAccComposerUserWorkloadsSecret_delete(t *testing.T) { CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerUserWorkloadsSecret_basic(envName, secretName, envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv()), + Config: testAccComposerUserWorkloadsSecret_basic(envName, secretName, envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), serviceAccount), }, { - Config: testAccComposerUserWorkloadsSecret_delete(envName), + Config: testAccComposerUserWorkloadsSecret_delete(envName, serviceAccount), Check: resource.ComposeTestCheckFunc( testAccComposerUserWorkloadsSecretDestroyed(t), ), @@ -91,15 +94,31 @@ func TestAccComposerUserWorkloadsSecret_delete(t *testing.T) { }) } -func testAccComposerUserWorkloadsSecret_basic(envName, secretName, project, region string) string { +func testAccComposerUserWorkloadsSecret_basic(envName, secretName, project, region, serviceAccount string) string { return fmt.Sprintf(` +data "google_project" "project" {} + +resource "google_service_account" "test" { + account_id = "%s" + display_name = "Test Service Account for Composer Environment" +} +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} + resource "google_composer_environment" "test" { name = "%s" config { + node_config { + service_account = google_service_account.test.name + } software_config { image_version = "composer-3-airflow-2" } } + depends_on = [google_project_iam_member.composer-worker] } resource "google_composer_user_workloads_secret" "test" { environment = google_composer_environment.test.name @@ -111,18 +130,34 @@ resource "google_composer_user_workloads_secret" "test" { password: base64encode("password"), } } -`, envName, secretName, project, region) +`, serviceAccount, envName, secretName, project, region) } -func testAccComposerUserWorkloadsSecret_update(envName, secretName string) string { +func testAccComposerUserWorkloadsSecret_update(envName, secretName, serviceAccount string) string { return fmt.Sprintf(` +data "google_project" "project" {} + +resource "google_service_account" "test" { + account_id = "%s" + display_name = "Test Service Account for Composer Environment" +} +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} + resource "google_composer_environment" "test" { name = "%s" config { + node_config { + service_account = google_service_account.test.name + } software_config { image_version = "composer-3-airflow-2" } } + depends_on = [google_project_iam_member.composer-worker] } resource "google_composer_user_workloads_secret" "test" { environment = google_composer_environment.test.name @@ -132,20 +167,36 @@ resource "google_composer_user_workloads_secret" "test" { password: base64encode("password"), } } -`, envName, secretName) +`, serviceAccount, envName, secretName) } -func testAccComposerUserWorkloadsSecret_delete(envName string) string { +func testAccComposerUserWorkloadsSecret_delete(envName, serviceAccount string) string { return fmt.Sprintf(` +data "google_project" "project" {} + +resource "google_service_account" "test" { + account_id = "%s" + display_name = "Test Service Account for Composer Environment" +} +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} + resource "google_composer_environment" "test" { name = "%s" config { + node_config { + service_account = google_service_account.test.name + } software_config { image_version = "composer-3-airflow-2" } } + depends_on = [google_project_iam_member.composer-worker] } -`, envName) +`, serviceAccount, envName) } func testAccComposerUserWorkloadsSecretDestroyed(t *testing.T) func(s *terraform.State) error { From 9065d358aeceba45231c81892fb6a3db69a573b2 Mon Sep 17 00:00:00 2001 From: tonybayvas Date: Fri, 30 May 2025 15:29:18 -0700 Subject: [PATCH 283/884] Create a terraform resource for the Fleet RBACRolebindingActuation Feature for supporting custom roles in scope RBACRoleBindings (#14046) Co-authored-by: Scott Suarez --- mmv1/products/gkehub2/Feature.yaml | 15 ++++ ..._feature_rbacrolebinding_actuation.tf.tmpl | 9 +++ .../gkehub_existing_feature.go.tmpl | 25 +++++++ .../gkehub_existing_feature.go.tmpl | 55 +++++++++++++++ .../resource_gke_hub_feature_test.go.tmpl | 70 +++++++++++++++++++ 5 files changed, 174 insertions(+) create mode 100644 mmv1/templates/terraform/examples/gkehub_feature_rbacrolebinding_actuation.tf.tmpl create mode 100644 mmv1/templates/terraform/pre_create/gkehub_existing_feature.go.tmpl create mode 100644 mmv1/templates/terraform/pre_delete/gkehub_existing_feature.go.tmpl diff --git a/mmv1/products/gkehub2/Feature.yaml b/mmv1/products/gkehub2/Feature.yaml index 8833d725b200..c2f1884a9f4e 100644 --- a/mmv1/products/gkehub2/Feature.yaml +++ b/mmv1/products/gkehub2/Feature.yaml @@ -51,6 +51,8 @@ iam_policy: - 'projects/{{project}}/locations/{{location}}/features/{{name}}' - '{{name}}' custom_code: + pre_create: templates/terraform/pre_create/gkehub_existing_feature.go.tmpl + pre_delete: templates/terraform/pre_delete/gkehub_existing_feature.go.tmpl # Skip sweeper gen since this is a child resource. exclude_sweeper: true legacy_long_form_project: true @@ -103,6 +105,10 @@ examples: primary_resource_id: 'feature' primary_resource_name: 'fmt.Sprint("clusterupgrade")' exclude_test: true + - name: 'gkehub_feature_rbacrolebinding_actuation' + primary_resource_id: 'feature' + primary_resource_name: 'fmt.Sprint("rbacrolebindingactuation")' + exclude_test: true parameters: - name: 'location' type: String @@ -241,6 +247,15 @@ properties: description: | Amount of time to "soak" after a rollout has been finished before marking it COMPLETE. Cannot exceed 30 days. required: true + - name: 'rbacrolebindingactuation' + type: NestedObject + description: RBACRolebinding Actuation feature spec. + properties: + - name: 'allowedCustomRoles' + type: Array + description: 'The list of allowed custom roles (ClusterRoles). If a custom role is not part of this list, it cannot be used in a fleet scope RBACRoleBinding. If a custom role in this list is in use, it cannot be removed from the list until the scope RBACRolebindings using it are deleted.' + item_type: + type: String - name: 'fleetDefaultMemberConfig' type: NestedObject description: Optional. Fleet Default Membership Configuration. diff --git a/mmv1/templates/terraform/examples/gkehub_feature_rbacrolebinding_actuation.tf.tmpl b/mmv1/templates/terraform/examples/gkehub_feature_rbacrolebinding_actuation.tf.tmpl new file mode 100644 index 000000000000..0b953c7f4d6b --- /dev/null +++ b/mmv1/templates/terraform/examples/gkehub_feature_rbacrolebinding_actuation.tf.tmpl @@ -0,0 +1,9 @@ +resource "google_gke_hub_feature" "feature" { + name = "rbacrolebindingactuation" + location = "global" + spec { + rbacrolebindingactuation { + allowed_custom_roles = ["custom-role1","custom-role2","custom-role3"] + } + } +} diff --git a/mmv1/templates/terraform/pre_create/gkehub_existing_feature.go.tmpl b/mmv1/templates/terraform/pre_create/gkehub_existing_feature.go.tmpl new file mode 100644 index 000000000000..ccc95ea82c46 --- /dev/null +++ b/mmv1/templates/terraform/pre_create/gkehub_existing_feature.go.tmpl @@ -0,0 +1,25 @@ +// Check if the fleet feature already exists. Do an update if so. + +getUrl, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}GKEHub2BasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/locations/{{"{{"}}location{{"}}"}}/features/{{"{{"}}name{{"}}"}}") +if err != nil { + return err +} +_, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: getUrl, + UserAgent: userAgent, + Headers: headers, +}) + +if err == nil { + // Fleet feature already exists + log.Printf("[DEBUG] Fleet feature already exists %s", d.Get("name")) + id, err := tpgresource.ReplaceVars(d, config, "{{$.GetIdFormat}}") + if err != nil { + return fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + return resourceGKEHub2FeatureUpdate(d, meta) +} diff --git a/mmv1/templates/terraform/pre_delete/gkehub_existing_feature.go.tmpl b/mmv1/templates/terraform/pre_delete/gkehub_existing_feature.go.tmpl new file mode 100644 index 000000000000..be59a11edc2b --- /dev/null +++ b/mmv1/templates/terraform/pre_delete/gkehub_existing_feature.go.tmpl @@ -0,0 +1,55 @@ +// Special handling for the mandatory 'rbacrolebindingactuation' feature. +// Instead of deleting it, we reset it to a default state by sending a PATCH request. +if d.Get("name").(string) == "rbacrolebindingactuation" { + log.Printf("[DEBUG] Mandatory feature 'rbacrolebindingactuation' detected. Resetting instead of deleting.") + + patchUrl, err := tpgresource.ReplaceVarsForId(d, config, "{{"{{"}}GKEHub2BasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/locations/{{"{{"}}location{{"}}"}}/features/{{"{{"}}name{{"}}"}}") + if err != nil { + return err + } + + // Construct the request body to clear the desired field. + obj := map[string]interface{}{ + "spec": map[string]interface{}{ + "rbacrolebindingactuation": map[string]interface{}{ + "allowedCustomRoles": []string{}, + }, + }, + } + + // A specific updateMask is required for a PATCH request. + updateMask := "spec.rbacrolebindingactuation.allowedCustomRoles" + url, err := transport_tpg.AddQueryParams(patchUrl, map[string]string{"updateMask": updateMask}) + if err != nil { + return err + } + + log.Printf("[DEBUG] Sending PATCH to reset Feature %q: %#v", d.Id(), obj) + + // Send the raw PATCH request. + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "PATCH", + Project: billingProject, + RawURL: url, + UserAgent: userAgent, + Body: obj, + Timeout: d.Timeout(schema.TimeoutDelete), // Use the delete timeout for this reset operation. + Headers: headers, + }) + if err != nil { + return fmt.Errorf("error resetting Feature %q: %s", d.Id(), err) + } + + // Wait for the long-running operation to complete. + err = GKEHub2OperationWaitTime( + config, res, tpgresource.GetResourceNameFromSelfLink(project), "Resetting Feature", userAgent, + d.Timeout(schema.TimeoutDelete)) + + if err != nil { + return fmt.Errorf("error waiting to reset Feature %q: %s", d.Id(), err) + } + + log.Printf("[DEBUG] Finished resetting Feature %q", d.Id()) + return nil +} diff --git a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.tmpl b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.tmpl index 88b1330b8c9b..5bbf840d3edf 100644 --- a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.tmpl +++ b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.tmpl @@ -978,6 +978,76 @@ resource "google_gke_hub_feature" "feature" { `, context) } +func TestAccGKEHubFeature_Rbacrolebindingactuation(t *testing.T) { + // VCR fails to handle batched project services + acctest.SkipIfVcr(t) + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "org_id": envvar.GetTestOrgFromEnv(t), + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGKEHubFeatureDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGKEHubFeature_Rbacrolebindingactuation(context), + }, + { + ResourceName: "google_gke_hub_feature.feature", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"project", "labels", "terraform_labels"}, + }, + { + Config: testAccGKEHubFeature_RbacrolebindingactuationUpdate(context), + }, + { + ResourceName: "google_gke_hub_feature.feature", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccGKEHubFeature_Rbacrolebindingactuation(context map[string]interface{}) string { + return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` +resource "google_gke_hub_feature" "feature" { + name = "rbacrolebindingactuation" + location = "global" + spec { + rbacrolebindingactuation { + allowed_custom_roles = ["custom-role1","custom-role2","custom-role3"] + } + } + depends_on = [google_project_service.anthos, google_project_service.gkehub] + project = google_project.project.project_id +} +`, context) +} + +func testAccGKEHubFeature_RbacrolebindingactuationUpdate(context map[string]interface{}) string { + return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` +resource "google_gke_hub_feature" "feature" { + name = "rbacrolebindingactuation" + location = "global" + spec { + rbacrolebindingactuation { + allowed_custom_roles = ["custom-role1","custom-role2","custom-role3","custom-role4"] + } + } + depends_on = [google_project_service.anthos, google_project_service.gkehub] + project = google_project.project.project_id +} +`, context) +} + func gkeHubFeatureProjectSetupForGA(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_project" "project" { From 3295915c3a9ac0de9de3b3d3eed1b4786d87c284 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Mon, 2 Jun 2025 08:17:26 -0700 Subject: [PATCH 284/884] Clarified best practices for testing singletons (#14137) --- docs/content/best-practices/common-resource-patterns.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/content/best-practices/common-resource-patterns.md b/docs/content/best-practices/common-resource-patterns.md index 3956932a2a18..26da45202b16 100644 --- a/docs/content/best-practices/common-resource-patterns.md +++ b/docs/content/best-practices/common-resource-patterns.md @@ -14,4 +14,6 @@ Implementing resources like this may require some or all of the following: 1. If there _isn't_ a create endpoint, set the [create_url]({{< ref "/reference/resource/#create_url" >}}) to point to the update endpoint. 1. If there _is_ a create endpoint, add [pre-create custom code]({{< ref "/develop/custom-code/#pre_post_injection" >}}) that implements "acquire-on-create" logic. The custom code should check whether the resource already exists with a read request, and if it does, run the update logic and return early. For example, see [mmv1/templates/terraform/pre_create/firebasehosting_site.go.tmpl](https://github.com/GoogleCloudPlatform/magic-modules/blob/dc4d9755cb9288177e0996c1c3b3fa9738ebdf89/mmv1/templates/terraform/pre_create/firebasehosting_site.go.tmpl). * Note: The main disadvantage of "acquire-on-create" logic is that users will not be presented with a diff between the resource's old and new states – because from the terraform perspective, the resource is only being created. Please upvote https://github.com/hashicorp/terraform/issues/19017 to request better support for this workflow. -1. If there is no delete endpoint, set [`exclude_delete: true`]({{< ref "/reference/resource/#create_url" >}}) at the top level of the resource. \ No newline at end of file +1. If there is no delete endpoint, set [`exclude_delete: true`]({{< ref "/reference/resource/#create_url" >}}) at the top level of the resource. + +Tests for singletons can run into issues because they are modifying a shared state. To avoid the problems this can cause, ensure that the tests [create dedicated parent resources]({{< ref "/test/test#create-test-projects" >}}) instead of modifying the default test environment. If there need to be multiple test cases, make sure they either have individual parent resources, or that they run serially, like [TestAccAccessContextManager](https://github.com/hashicorp/terraform-provider-google-beta/blob/88fa0756f2ce116765edd4c1551680d9029621f6/google-beta/services/accesscontextmanager/resource_access_context_manager_access_policy_test.go#L31-L33). From f566efc8a124c08d8a8948d09c169e96c19ae7cb Mon Sep 17 00:00:00 2001 From: Ryan Oaks Date: Mon, 2 Jun 2025 15:04:51 -0400 Subject: [PATCH 285/884] New changelog templates (#14087) --- .ci/changelog2.tmpl | 50 ++++++++++++++++++++++++++++++++++++++++++ .ci/release-note2.tmpl | 3 +++ 2 files changed, 53 insertions(+) create mode 100644 .ci/changelog2.tmpl create mode 100644 .ci/release-note2.tmpl diff --git a/.ci/changelog2.tmpl b/.ci/changelog2.tmpl new file mode 100644 index 000000000000..cea067f2a8d9 --- /dev/null +++ b/.ci/changelog2.tmpl @@ -0,0 +1,50 @@ +{{- if .NotesByType.unknown -}} +UNKNOWN CHANGELOG TYPE: +{{range .NotesByType.unknown -}} +* {{ template "note" .}} +{{ end -}} +{{- end -}} + +{{- if .NotesByType.note -}} +NOTES: +{{range .NotesByType.note -}} +* {{ template "note" .}} +{{ end -}} +{{- end -}} + +{{- if .NotesByType.deprecation -}} +DEPRECATIONS: +{{range .NotesByType.deprecation -}} +* {{ template "note" .}} +{{ end -}} +{{- end -}} + +{{- if index .NotesByType "breaking-change" -}} +BREAKING CHANGES: +{{range index .NotesByType "breaking-change" -}} +* {{ template "note" .}} +{{ end -}} +{{- end -}} + +{{- $features := combineTypes .NotesByType.feature (index .NotesByType "new-resource" ) (index .NotesByType "new-datasource") (index .NotesByType "new-data-source") (index .NotesByType "new-function" ) (index .NotesByType "new-ephemeral" ) -}} +{{- if $features }} +FEATURES: +{{range $features | sort -}} +* {{ template "note" . }} +{{ end -}} +{{- end -}} + +{{- $improvements := combineTypes .NotesByType.improvement .NotesByType.enhancement -}} +{{- if $improvements }} +IMPROVEMENTS: +{{range $improvements | sort -}} +* {{ template "note" . }} +{{ end -}} +{{- end -}} + +{{- if .NotesByType.bug }} +BUG FIXES: +{{range .NotesByType.bug -}} +* {{ template "note" . }} +{{ end -}} +{{- end -}} \ No newline at end of file diff --git a/.ci/release-note2.tmpl b/.ci/release-note2.tmpl new file mode 100644 index 000000000000..e1506f545b3d --- /dev/null +++ b/.ci/release-note2.tmpl @@ -0,0 +1,3 @@ +{{- define "note" -}} +{{if eq "new-resource" .Type}}**New Resource:** {{else if eq "new-datasource" .Type}}**New Data Source:** {{else if eq "new-function" .Type}}**New Function:** {{else if eq "new-ephemeral" .Type}}**New Ephemeral Resource:** {{ end }}{{.Body}} ([#{{- .Issue -}}]) +{{- end -}} \ No newline at end of file From 2245e57282e7f70ed0d8a871e014cd7a41088182 Mon Sep 17 00:00:00 2001 From: David Xia Date: Mon, 2 Jun 2025 15:24:25 -0400 Subject: [PATCH 286/884] doc: state possible `total_egress_bandwidth_tier` values (#14173) Signed-off-by: David Xia Co-authored-by: Scott Suarez --- .../services/container/resource_container_node_pool.go.tmpl | 2 +- .../terraform/website/docs/r/container_node_pool.html.markdown | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl index ccda8717798e..5f2a397de504 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl @@ -470,7 +470,7 @@ var schemaNodePool = map[string]*schema.Schema{ "total_egress_bandwidth_tier": { Type: schema.TypeString, Required: true, - Description: `Specifies the total network bandwidth tier for the NodePool.`, + Description: `Specifies the total network bandwidth tier for the NodePool. [Valid values](https://cloud.google.com/kubernetes-engine/docs/reference/rest/v1/projects.locations.clusters.nodePools#NodePool.Tier) include: "TIER_1" and "TIER_UNSPECIFIED".`, }, }, }, diff --git a/mmv1/third_party/terraform/website/docs/r/container_node_pool.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_node_pool.html.markdown index 9a2cc5411523..ea7b24126a27 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_node_pool.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_node_pool.html.markdown @@ -240,7 +240,8 @@ cluster. The `network_performance_config` block supports: -* `total_egress_bandwidth_tier` (Required) - Specifies the total network bandwidth tier for the NodePool. +* `total_egress_bandwidth_tier` (Required) - Specifies the total network bandwidth tier for the NodePool. [Valid values](https://cloud.google.com/kubernetes-engine/docs/reference/rest/v1/projects.locations.clusters.nodePools#NodePool.Tier) include: "TIER_1" and "TIER_UNSPECIFIED". +* ``` The `pod_cidr_overprovision_config` block supports: From 94eb252f527ddd75abe1369359596be55e350dee Mon Sep 17 00:00:00 2001 From: Tommy Reddad Date: Mon, 2 Jun 2025 14:06:31 -0600 Subject: [PATCH 287/884] NetworkAttachment is now optional in google_eventarc_pipeline (#14136) --- mmv1/products/eventarc/Pipeline.yaml | 11 +++---- ...rc_pipeline_with_topic_destination.tf.tmpl | 3 -- ...pipeline_with_workflow_destination.tf.tmpl | 3 -- .../resource_eventarc_message_bus_test.go | 33 ++++--------------- .../resource_eventarc_pipeline_test.go | 16 +++------ 5 files changed, 14 insertions(+), 52 deletions(-) diff --git a/mmv1/products/eventarc/Pipeline.yaml b/mmv1/products/eventarc/Pipeline.yaml index 4215498bbe1b..fb2d8c34646c 100644 --- a/mmv1/products/eventarc/Pipeline.yaml +++ b/mmv1/products/eventarc/Pipeline.yaml @@ -41,11 +41,8 @@ examples: vars: pipeline_name: some-pipeline topic_name: some-topic - network_attachment_name: some-network-attachment test_env_vars: project_id: 'PROJECT_NAME' - test_vars_overrides: - 'network_attachment_name': 'acctest.BootstrapNetworkAttachment(t, "tf-bootstrap-eventarc-pipeline-na", acctest.BootstrapSubnet(t, "tf-bootstrap-eventarc-pipeline-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-bootstrap-eventarc-pipeline-network")))' - name: eventarc_pipeline_with_http_destination primary_resource_id: primary vars: @@ -60,11 +57,8 @@ examples: vars: pipeline_name: some-pipeline workflow_name: some-workflow - network_attachment_name: some-network-attachment test_env_vars: project_id: 'PROJECT_NAME' - test_vars_overrides: - 'network_attachment_name': 'acctest.BootstrapNetworkAttachment(t, "tf-bootstrap-eventarc-pipeline-na", acctest.BootstrapSubnet(t, "tf-bootstrap-eventarc-pipeline-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-bootstrap-eventarc-pipeline-network")))' - name: eventarc_pipeline_with_oidc_and_json_format primary_resource_id: primary vars: @@ -316,9 +310,12 @@ properties: type: String description: |- Name of the NetworkAttachment that allows access to the consumer VPC. + Format: `projects/{PROJECT_ID}/regions/{REGION}/networkAttachments/{NETWORK_ATTACHMENT_NAME}` - required: true + + Required for HTTP endpoint destinations. Must not be specified for + Workflows, MessageBus, or Topic destinations. - name: httpEndpoint type: NestedObject description: Represents a HTTP endpoint destination. diff --git a/mmv1/templates/terraform/examples/eventarc_pipeline_with_topic_destination.tf.tmpl b/mmv1/templates/terraform/examples/eventarc_pipeline_with_topic_destination.tf.tmpl index d67d5bf7e1ed..d81767e8d365 100644 --- a/mmv1/templates/terraform/examples/eventarc_pipeline_with_topic_destination.tf.tmpl +++ b/mmv1/templates/terraform/examples/eventarc_pipeline_with_topic_destination.tf.tmpl @@ -7,9 +7,6 @@ resource "google_eventarc_pipeline" "{{$.PrimaryResourceId}}" { pipeline_id = "{{index $.Vars "pipeline_name"}}" destinations { topic = google_pubsub_topic.topic.id - network_config { - network_attachment = "projects/{{index $.TestEnvVars "project_id"}}/regions/us-central1/networkAttachments/{{index $.Vars "network_attachment_name"}}" - } } labels = { test_label = "test-eventarc-label" diff --git a/mmv1/templates/terraform/examples/eventarc_pipeline_with_workflow_destination.tf.tmpl b/mmv1/templates/terraform/examples/eventarc_pipeline_with_workflow_destination.tf.tmpl index 574b6cea16fb..62f776559d45 100644 --- a/mmv1/templates/terraform/examples/eventarc_pipeline_with_workflow_destination.tf.tmpl +++ b/mmv1/templates/terraform/examples/eventarc_pipeline_with_workflow_destination.tf.tmpl @@ -36,8 +36,5 @@ resource "google_eventarc_pipeline" "{{$.PrimaryResourceId}}" { pipeline_id = "{{index $.Vars "pipeline_name"}}" destinations { workflow = google_workflows_workflow.workflow.id - network_config { - network_attachment = "projects/{{index $.TestEnvVars "project_id"}}/regions/us-central1/networkAttachments/{{index $.Vars "network_attachment_name"}}" - } } } diff --git a/mmv1/third_party/terraform/services/eventarc/resource_eventarc_message_bus_test.go b/mmv1/third_party/terraform/services/eventarc/resource_eventarc_message_bus_test.go index d5f747428849..0b0eb8bcde18 100644 --- a/mmv1/third_party/terraform/services/eventarc/resource_eventarc_message_bus_test.go +++ b/mmv1/third_party/terraform/services/eventarc/resource_eventarc_message_bus_test.go @@ -413,10 +413,8 @@ resource "google_eventarc_message_bus" "message_bus" { // concerned with testing the Pipeline resource, which depends on a singleton MessageBus. func testAccEventarcMessageBus_pipeline(t *testing.T) { context := map[string]interface{}{ - "project_id": envvar.GetTestProjectFromEnv(), - "region": envvar.GetTestRegionFromEnv(), - "random_suffix": acctest.RandString(t, 10), - "network_attachment_name": acctest.BootstrapNetworkAttachment(t, "tf-bootstrap-eventarc-messagebus-na", acctest.BootstrapSubnet(t, "tf-bootstrap-eventarc-messagebus-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-bootstrap-eventarc-messagebus-network"))), + "region": envvar.GetTestRegionFromEnv(), + "random_suffix": acctest.RandString(t, 10), } acctest.VcrTest(t, resource.TestCase{ @@ -444,9 +442,6 @@ resource "google_eventarc_pipeline" "primary" { pipeline_id = "tf-test-some-pipeline%{random_suffix}" destinations { message_bus = google_eventarc_message_bus.primary.id - network_config { - network_attachment = "projects/%{project_id}/regions/%{region}/networkAttachments/%{network_attachment_name}" - } } } @@ -461,10 +456,8 @@ resource "google_eventarc_message_bus" "primary" { // concerned with testing the Enrollment resource, which depends on a singleton MessageBus. func testAccEventarcMessageBus_enrollment(t *testing.T) { context := map[string]interface{}{ - "project_id": envvar.GetTestProjectFromEnv(), - "region": envvar.GetTestRegionFromEnv(), - "random_suffix": acctest.RandString(t, 10), - "network_attachment_name": acctest.BootstrapNetworkAttachment(t, "tf-bootstrap-eventarc-messagebus-na", acctest.BootstrapSubnet(t, "tf-bootstrap-eventarc-messagebus-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-bootstrap-eventarc-messagebus-network"))), + "region": envvar.GetTestRegionFromEnv(), + "random_suffix": acctest.RandString(t, 10), } acctest.VcrTest(t, resource.TestCase{ @@ -511,9 +504,6 @@ resource "google_eventarc_pipeline" "pipeline" { pipeline_id = "tf-test-pipeline%{random_suffix}" destinations { topic = google_pubsub_topic.pipeline_topic.id - network_config { - network_attachment = "projects/%{project_id}/regions/%{region}/networkAttachments/%{network_attachment_name}" - } } } @@ -528,10 +518,8 @@ resource "google_eventarc_message_bus" "message_bus" { // concerned with testing the Enrollment resource, which depends on a singleton MessageBus. func testAccEventarcMessageBus_updateEnrollment(t *testing.T) { context := map[string]interface{}{ - "project_id": envvar.GetTestProjectFromEnv(), - "region": envvar.GetTestRegionFromEnv(), - "random_suffix": acctest.RandString(t, 10), - "network_attachment_name": acctest.BootstrapNetworkAttachment(t, "tf-bootstrap-eventarc-messagebus-na", acctest.BootstrapSubnet(t, "tf-bootstrap-eventarc-messagebus-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-bootstrap-eventarc-messagebus-network"))), + "region": envvar.GetTestRegionFromEnv(), + "random_suffix": acctest.RandString(t, 10), } acctest.VcrTest(t, resource.TestCase{ @@ -610,9 +598,6 @@ resource "google_eventarc_pipeline" "pipeline_update" { pipeline_id = "tf-test-pipeline2%{random_suffix}" destinations { topic = google_pubsub_topic.pipeline_update_topic.id - network_config { - network_attachment = "projects/%{project_id}/regions/%{region}/networkAttachments/%{network_attachment_name}" - } } } @@ -625,9 +610,6 @@ resource "google_eventarc_pipeline" "pipeline" { pipeline_id = "tf-test-pipeline%{random_suffix}" destinations { topic = google_pubsub_topic.pipeline_topic.id - network_config { - network_attachment = "projects/%{project_id}/regions/%{region}/networkAttachments/%{network_attachment_name}" - } } } @@ -657,9 +639,6 @@ resource "google_eventarc_pipeline" "pipeline_update" { pipeline_id = "tf-test-pipeline2%{random_suffix}" destinations { topic = google_pubsub_topic.pipeline_update_topic.id - network_config { - network_attachment = "projects/%{project_id}/regions/%{region}/networkAttachments/%{network_attachment_name}" - } } } diff --git a/mmv1/third_party/terraform/services/eventarc/resource_eventarc_pipeline_test.go b/mmv1/third_party/terraform/services/eventarc/resource_eventarc_pipeline_test.go index 76e623cb56fd..31031d833559 100644 --- a/mmv1/third_party/terraform/services/eventarc/resource_eventarc_pipeline_test.go +++ b/mmv1/third_party/terraform/services/eventarc/resource_eventarc_pipeline_test.go @@ -14,12 +14,10 @@ func TestAccEventarcPipeline_update(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "project_id": envvar.GetTestProjectFromEnv(), - "service_account": envvar.GetTestServiceAccountFromEnv(t), - "key_name": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-bootstrap-eventarc-pipeline-key").CryptoKey.Name, - "key2_name": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-bootstrap-eventarc-pipeline-key2").CryptoKey.Name, - "network_attachment_name": acctest.BootstrapNetworkAttachment(t, "tf-bootstrap-eventarc-pipeline-na", acctest.BootstrapSubnet(t, "tf-bootstrap-eventarc-pipeline-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-bootstrap-eventarc-pipeline-network"))), - "random_suffix": acctest.RandString(t, 10), + "service_account": envvar.GetTestServiceAccountFromEnv(t), + "key_name": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-bootstrap-eventarc-pipeline-key").CryptoKey.Name, + "key2_name": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-bootstrap-eventarc-pipeline-key2").CryptoKey.Name, + "random_suffix": acctest.RandString(t, 10), } acctest.BootstrapIamMembers(t, []acctest.IamMember{ { @@ -90,9 +88,6 @@ resource "google_eventarc_pipeline" "primary" { } destinations { topic = google_pubsub_topic.topic_update.id - network_config { - network_attachment = "projects/%{project_id}/regions/us-central1/networkAttachments/%{network_attachment_name}" - } authentication_config { google_oidc { service_account = "%{service_account}" @@ -153,9 +148,6 @@ resource "google_eventarc_pipeline" "primary" { pipeline_id = "tf-test-some-pipeline%{random_suffix}" destinations { topic = google_pubsub_topic.topic_update.id - network_config { - network_attachment = "projects/%{project_id}/regions/us-central1/networkAttachments/%{network_attachment_name}" - } } } `, context) From 59b199ee089c783ddd10f04a5490a2592ecf889e Mon Sep 17 00:00:00 2001 From: sachin purohit Date: Mon, 2 Jun 2025 14:41:38 -0700 Subject: [PATCH 288/884] feat: added 'data_source_id' for update requests through bigquery_data_transfer_config (#14164) --- .../pre_update/bigquerydatatransfer_config.tmpl | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/mmv1/templates/terraform/pre_update/bigquerydatatransfer_config.tmpl b/mmv1/templates/terraform/pre_update/bigquerydatatransfer_config.tmpl index e51ba35af4c3..7c4fb0f8a0e9 100644 --- a/mmv1/templates/terraform/pre_update/bigquerydatatransfer_config.tmpl +++ b/mmv1/templates/terraform/pre_update/bigquerydatatransfer_config.tmpl @@ -50,3 +50,12 @@ url, err = transport_tpg.AddQueryParams(url, map[string]string{"updateMask": str if err != nil { return err } + +// Primarily added to fix b/421406404 +// This field is immutable, so it should be safe to set it. +dataSourceIdProp, err := expandBigqueryDataTransferConfigDataSourceId(d.Get("data_source_id"), d, config) +if err != nil { + return err +} else if v, ok := d.GetOkExists("data_source_id"); !tpgresource.IsEmptyValue(reflect.ValueOf(dataSourceIdProp)) && (ok || !reflect.DeepEqual(v, dataSourceIdProp)) { + obj["dataSourceId"] = dataSourceIdProp +} From 772f0638763cead1861521d3c308ee37acc3864e Mon Sep 17 00:00:00 2001 From: bcreddy-gcp <123543489+bcreddy-gcp@users.noreply.github.com> Date: Mon, 2 Jun 2025 17:48:18 -0700 Subject: [PATCH 289/884] colab: fixed perma-diff in `google_colab_runtime_template` caused by the API returning a non-null default value. (#14179) --- mmv1/products/colab/RuntimeTemplate.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/mmv1/products/colab/RuntimeTemplate.yaml b/mmv1/products/colab/RuntimeTemplate.yaml index 92d504453b50..cd7c69f1cd7a 100644 --- a/mmv1/products/colab/RuntimeTemplate.yaml +++ b/mmv1/products/colab/RuntimeTemplate.yaml @@ -186,6 +186,7 @@ properties: description: 'The Cloud KMS encryption key (customer-managed encryption key) used to protect the runtime.' - name: softwareConfig type: NestedObject + default_from_api: true description: 'The notebook software configuration of the notebook runtime.' properties: - name: 'env' From 7d78c4f8e42b990c942be7d692bb2b5e80ff6fb4 Mon Sep 17 00:00:00 2001 From: Jack Weinbender Date: Tue, 3 Jun 2025 09:58:35 -0500 Subject: [PATCH 290/884] container: Allow `pod_ipv4_cidr_block` to accept netmask for `container_node_pool` (#13993) --- .../services/container/resource_container_node_pool.go.tmpl | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl index 5f2a397de504..2beb4211130e 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl @@ -15,7 +15,6 @@ import ( "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" - "github.com/hashicorp/terraform-provider-google/google/verify" {{ if eq $.TargetVersionName `ga` }} "google.golang.org/api/container/v1" @@ -390,7 +389,7 @@ var schemaNodePool = map[string]*schema.Schema{ Optional: true, ForceNew: true, Computed: true, - ValidateFunc: verify.ValidateIpCidrRange, + DiffSuppressFunc: tpgresource.CidrOrSizeDiffSuppress, Description: `The IP address range for pod IPs in this node pool. Only applicable if create_pod_range is true. Set to blank to have a range chosen with the default size. Set to /netmask (e.g. /14) to have a range chosen with a specific netmask. Set to a CIDR notation (e.g. 10.96.0.0/14) to pick a specific range to use.`, }, "additional_node_network_configs": { From 9b9fcb49b0f0449774fb6a375974d0fe253bdaa0 Mon Sep 17 00:00:00 2001 From: hao-nan-li <100219545+hao-nan-li@users.noreply.github.com> Date: Tue, 3 Jun 2025 09:21:15 -0700 Subject: [PATCH 291/884] Add regex to handle image link in universes (#14154) --- mmv1/templates/terraform/constants/disk.tmpl | 25 +++++++++++++------ .../terraform/services/compute/image.go | 12 +++++---- 2 files changed, 24 insertions(+), 13 deletions(-) diff --git a/mmv1/templates/terraform/constants/disk.tmpl b/mmv1/templates/terraform/constants/disk.tmpl index e73ff28c98bb..36a4041ce642 100644 --- a/mmv1/templates/terraform/constants/disk.tmpl +++ b/mmv1/templates/terraform/constants/disk.tmpl @@ -43,6 +43,20 @@ func IsDiskShrinkage(_ context.Context, old, new, _ interface{}) bool { return new.(int) < old.(int) } +func matchImageLink(old string) (string, string, bool) { + // 'old' is read from the API. + // In GCP It has the format 'https://www.googleapis.com/compute/v1/projects/(%s)/global/images/(%s)' + matches := resolveImageLink.FindStringSubmatch(old) + if matches == nil { + // In alternate universes, it has the format https://compute.%s/compute/[a-z0-9]+/projects/(%s)/global/images/(%s) + matches = resolveImageUniverseLink.FindStringSubmatch(old) + if matches == nil { + return "", "", false + } + } + return matches[1], matches[2], true +} + // We cannot suppress the diff for the case when family name is not part of the image name since we can't // make a network call in a DiffSuppressFunc. func DiskImageDiffSuppress(_, old, new string, _ *schema.ResourceData) bool { @@ -52,16 +66,11 @@ func DiskImageDiffSuppress(_, old, new string, _ *schema.ResourceData) bool { // you are probably looking for the diskImageFamilyEquals function and its subfunctions. // In order to keep this maintainable, we need to ensure that the positive and negative examples // in resource_compute_disk_test.go are as complete as possible. - - // 'old' is read from the API. - // It always has the format 'https://www.googleapis.com/compute/v1/projects/(%s)/global/images/(%s)' - matches := resolveImageLink.FindStringSubmatch(old) - if matches == nil { - // Image read from the API doesn't have the expected format. In practice, it should never happen + + oldProject, oldName, matched := matchImageLink(old) + if matched == false { return false } - oldProject := matches[1] - oldName := matches[2] // Partial or full self link family if resolveImageProjectFamily.MatchString(new) { diff --git a/mmv1/third_party/terraform/services/compute/image.go b/mmv1/third_party/terraform/services/compute/image.go index bc72330267f6..c51547f83f35 100644 --- a/mmv1/third_party/terraform/services/compute/image.go +++ b/mmv1/third_party/terraform/services/compute/image.go @@ -13,8 +13,9 @@ import ( ) const ( - resolveImageFamilyRegex = "[-_a-zA-Z0-9]*" - resolveImageImageRegex = "[-_a-zA-Z0-9]*" + resolveImageFamilyRegex = "[-_a-zA-Z0-9]*" + resolveImageImageRegex = "[-_a-zA-Z0-9]*" + resolveImageUniverseRegex = "[-_a-zA-Z0-9.]*" ) var ( @@ -28,6 +29,7 @@ var ( resolveImageFamily = regexp.MustCompile(fmt.Sprintf("^(%s)$", resolveImageFamilyRegex)) resolveImageImage = regexp.MustCompile(fmt.Sprintf("^(%s)$", resolveImageImageRegex)) resolveImageLink = regexp.MustCompile(fmt.Sprintf("^https://www.googleapis.com/compute/[a-z0-9]+/projects/(%s)/global/images/(%s)", verify.ProjectRegex, resolveImageImageRegex)) + resolveImageUniverseLink = regexp.MustCompile(fmt.Sprintf("^https://compute.%s/compute/[a-z0-9]+/projects/(%s)/global/images/(%s)", resolveImageUniverseRegex, verify.ProjectRegex, resolveImageImageRegex)) windowsSqlImage = regexp.MustCompile("^sql-(?:server-)?([0-9]{4})-([a-z]+)-windows-(?:server-)?([0-9]{4})(?:-r([0-9]+))?-dc-v[0-9]+$") canonicalUbuntuLtsImage = regexp.MustCompile("^ubuntu-(minimal-)?([0-9]+)(?:.*(arm64|amd64))?.*$") @@ -106,12 +108,12 @@ func ResolveImage(c *transport_tpg.Config, project, name, userAgent string) (str break } } - if c.UniverseDomain != "" && c.UniverseDomain != "googleapis.com" { - resolveImageLink = regexp.MustCompile(fmt.Sprintf("^https://compute.%s/compute/[a-z0-9]+/projects/(%s)/global/images/(%s)", c.UniverseDomain, verify.ProjectRegex, resolveImageImageRegex)) - } + switch { case resolveImageLink.MatchString(name): // https://www.googleapis.com/compute/v1/projects/xyz/global/images/xyz return name, nil + case resolveImageUniverseLink.MatchString(name): // https://compute.xyz/compute/[a-z0-9]+/projects/xyz/global/images/xyz + return name, nil case resolveImageProjectImage.MatchString(name): // projects/xyz/global/images/xyz res := resolveImageProjectImage.FindStringSubmatch(name) if err := sanityTestRegexMatches(2, res, "project image", name); err != nil { From 8436646da36e2ea1ca37df84395651059edc65f2 Mon Sep 17 00:00:00 2001 From: Scott Suarez Date: Tue, 3 Jun 2025 09:32:49 -0700 Subject: [PATCH 292/884] clean up GNUmake to remove unnecessary output (#14163) --- GNUmakefile | 2 -- 1 file changed, 2 deletions(-) diff --git a/GNUmakefile b/GNUmakefile index c29074d563bf..76ea7e731f17 100644 --- a/GNUmakefile +++ b/GNUmakefile @@ -67,8 +67,6 @@ terraform build provider: validate_environment clean-provider mmv1 tpgtools mmv1: @echo "Executing mmv1 build for $(OUTPUT_PATH)"; - # Chaining these with "&&" is critical so this will exit non-0 if the first - # command fails, since we're not forcing bash and errexit / pipefail here. @cd mmv1;\ if [ "$(VERSION)" = "ga" ]; then \ go run . --output $(OUTPUT_PATH) --version ga --no-docs $(mmv1_compile) \ From 6d7ab827e1611f696886806fb79b694d319833c8 Mon Sep 17 00:00:00 2001 From: Balanagu Harsha Vardhan Date: Tue, 3 Jun 2025 22:13:56 +0530 Subject: [PATCH 293/884] Add Curation resource of API hub (#14174) --- mmv1/products/apihub/Curation.yaml | 165 ++++++++++++++++++ .../examples/apihub_curation_basic.tf.tmpl | 17 ++ .../apihub/resource_apihub_curation_test.go | 87 +++++++++ 3 files changed, 269 insertions(+) create mode 100644 mmv1/products/apihub/Curation.yaml create mode 100644 mmv1/templates/terraform/examples/apihub_curation_basic.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/apihub/resource_apihub_curation_test.go diff --git a/mmv1/products/apihub/Curation.yaml b/mmv1/products/apihub/Curation.yaml new file mode 100644 index 000000000000..be9e5c5ca9e2 --- /dev/null +++ b/mmv1/products/apihub/Curation.yaml @@ -0,0 +1,165 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: Curation +description: Description +base_url: projects/{{project}}/locations/{{location}}/curations +update_mask: true +self_link: projects/{{project}}/locations/{{location}}/curations/{{curation_id}} +create_url: projects/{{project}}/locations/{{location}}/curations?curationId={{curation_id}} +update_verb: PATCH +id_format: projects/{{project}}/locations/{{location}}/curations/{{curation_id}} +import_format: + - projects/{{project}}/locations/{{location}}/curations/{{curation_id}} +autogen_async: true +autogen_status: Q3VyYXRpb24= +examples: + - name: apihub_curation_basic + primary_resource_id: apihub_curation_basic + vars: + curation_id: 'test' + # API hub instance needs to be created before this, and end to end creation of that resource using Terraform is not yet supported. + exclude_test: true + external_providers: ["time"] +parameters: + - name: location + type: String + description: Resource ID segment making up resource `name`. It identifies the resource within its parent collection as described in https://google.aip.dev/122. + immutable: true + url_param_only: true + required: true + - name: curationId + type: String + description: |- + The ID to use for the curation resource, which will become the final + component of the curations's resource name. This field is optional. + + * If provided, the same will be used. The service will throw an error if + the specified ID is already used by another curation resource in the API + hub. + * If not provided, a system generated ID will be used. + + This value should be 4-500 characters, and valid characters + are /a-z[0-9]-_/. + immutable: true + url_param_only: true + required: true +properties: + - name: name + type: String + description: |- + Identifier. The name of the curation. + + Format: + `projects/{project}/locations/{location}/curations/{curation}` + output: true + - name: displayName + type: String + description: The display name of the curation. + required: true + - name: endpoint + type: NestedObject + description: |- + The endpoint to be triggered for curation. + The endpoint will be invoked with a request payload containing + ApiMetadata. + Response should contain curated data in the form of + ApiMetadata. + required: true + immutable: true + properties: + - name: applicationIntegrationEndpointDetails + type: NestedObject + description: |- + The details of the Application Integration endpoint to be triggered for + curation. + required: true + immutable: true + properties: + - name: triggerId + type: String + description: The API trigger ID of the Application Integration workflow. + required: true + immutable: true + - name: uri + type: String + description: |- + The endpoint URI should be a valid REST URI for triggering an Application + Integration. + Format: + `https://integrations.googleapis.com/v1/{name=projects/*/locations/*/integrations/*}:execute` + or + `https://{location}-integrations.googleapis.com/v1/{name=projects/*/locations/*/integrations/*}:execute` + required: true + immutable: true + - name: lastExecutionState + type: String + description: |- + The last execution state of the curation. + Possible values: + LAST_EXECUTION_STATE_UNSPECIFIED + SUCCEEDED + FAILED + output: true + - name: lastExecutionErrorMessage + type: String + description: |- + Error message describing the failure, if any, during the last execution of + the curation. + output: true + - name: description + type: String + description: The description of the curation. + - name: pluginInstanceActions + type: Array + description: |- + The plugin instances and associated actions that are using the curation. + Note: A particular curation could be used by multiple plugin instances or + multiple actions in a plugin instance. + output: true + item_type: + type: NestedObject + properties: + - name: pluginInstance + type: String + description: |- + Plugin instance that is using the curation. + Format is + `projects/{project}/locations/{locati on}/plugins/{plugin}/instances/{instance}` + output: true + - name: actionId + type: String + description: |- + The action ID that is using the curation. + This should map to one of the action IDs specified + in action configs in the plugin. + output: true + - name: lastExecutionErrorCode + type: String + description: |- + The error code of the last execution of the curation. The error code is + populated only when the last execution state is failed. + Possible values: + ERROR_CODE_UNSPECIFIED + INTERNAL_ERROR + UNAUTHORIZED + output: true + - name: createTime + type: String + description: The time at which the curation was created. + output: true + - name: updateTime + type: String + description: The time at which the curation was last updated. + output: true diff --git a/mmv1/templates/terraform/examples/apihub_curation_basic.tf.tmpl b/mmv1/templates/terraform/examples/apihub_curation_basic.tf.tmpl new file mode 100644 index 000000000000..00ce0ec9d696 --- /dev/null +++ b/mmv1/templates/terraform/examples/apihub_curation_basic.tf.tmpl @@ -0,0 +1,17 @@ +resource "google_apihub_curation" "{{$.PrimaryResourceId}}" { + location = "us-central1" + curation_id = "{{index $.Vars "curation_id"}}" + project = "apihub-terraform" + display_name = "Test Curation" + description = "This is a sample curation resource managed by Terraform." + endpoint { + application_integration_endpoint_details { + trigger_id = "api_trigger/curation_API_1" + uri = "https://integrations.googleapis.com/v1/projects/1082615593856/locations/us-central1/integrations/curation:execute" + } + } + +} + + + diff --git a/mmv1/third_party/terraform/services/apihub/resource_apihub_curation_test.go b/mmv1/third_party/terraform/services/apihub/resource_apihub_curation_test.go new file mode 100644 index 000000000000..3c8590b3bcae --- /dev/null +++ b/mmv1/third_party/terraform/services/apihub/resource_apihub_curation_test.go @@ -0,0 +1,87 @@ +package apihub_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccApihubCuration_apihubCurationBasic_Update(t *testing.T) { + // This is added for reference, but the test needs to be skipped as it needs API hub instance as a prerequisite + // But the support for that resources is not yet complete. + t.Skip() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: testAccApihubCuration_apihubCuration_basic(context), + }, + { + ResourceName: "google_apihub_curation.apihub_curation_basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"curation_id", "location"}, + }, + { + Config: testAccApihubCuration_apihubCuration_update(context), + }, + { + ResourceName: "google_apihub_curation.apihub_curation_basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"curation_id", "location"}, + }, + }, + }) +} + +func testAccApihubCuration_apihubCuration_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_apihub_curation" "apihub_curation_basic" { + location = "us-central1" + curation_id = "test%{random_suffix}" + display_name = "Test Curation" + description = "This is a sample curation resource managed by Terraform." + endpoint { + application_integration_endpoint_details { + trigger_id = "api_trigger/curation_API_1" + uri = "https://integrations.googleapis.com/v1/projects/1082615593856/locations/us-central1/integrations/curation:execute" + } + } + +} + + +`, context) +} + +func testAccApihubCuration_apihubCuration_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_apihub_curation" "apihub_curation_basic" { + location = "us-central1" + curation_id = "test%{random_suffix}" + display_name = "Test Curation Updated" + description = "This is a sample updated curation resource managed by Terraform." + endpoint { + application_integration_endpoint_details { + trigger_id = "api_trigger/curation_API_1" + uri = "https://integrations.googleapis.com/v1/projects/1082615593856/locations/us-central1/integrations/curation:execute" + } + } + +} + + +`, context) +} From b90a3b8d15bb37421bb619315fcaa5631dfc1b8b Mon Sep 17 00:00:00 2001 From: David Xia Date: Tue, 3 Jun 2025 15:04:58 -0400 Subject: [PATCH 294/884] doc: update TPU topology link (#14171) Signed-off-by: David Xia --- .../services/container/resource_container_node_pool.go.tmpl | 2 +- .../terraform/website/docs/r/container_node_pool.html.markdown | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl index 2beb4211130e..dcbfb2945708 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl @@ -200,7 +200,7 @@ var schemaNodePool = map[string]*schema.Schema{ "tpu_topology": { Type: schema.TypeString, Optional: true, - Description: `TPU placement topology for pod slice node pool. https://cloud.google.com/tpu/docs/types-topologies#tpu_topologies`, + Description: `The TPU topology like "2x4" or "2x2x2". https://cloud.google.com/kubernetes-engine/docs/concepts/plan-tpus#topology`, }, }, }, diff --git a/mmv1/third_party/terraform/website/docs/r/container_node_pool.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_node_pool.html.markdown index ea7b24126a27..e4048200a472 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_node_pool.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_node_pool.html.markdown @@ -289,7 +289,7 @@ cluster. The resource policy must be in the same project and region as the node pool. If not found, InvalidArgument error is returned. -* `tpu_topology` - (Optional) The [TPU placement topology](https://cloud.google.com/tpu/docs/types-topologies#tpu_topologies) for pod slice node pool. +* `tpu_topology` - (Optional) The [TPU topology](https://cloud.google.com/kubernetes-engine/docs/concepts/plan-tpus#topology) like `"2x4"` or `"2x2x2"`. The `queued_provisioning` block supports: From 09739e55b2ec48288088b875b199fcde0525f93a Mon Sep 17 00:00:00 2001 From: stevenyang72 Date: Tue, 3 Jun 2025 13:22:10 -0700 Subject: [PATCH 295/884] Enforce id validation for WorkloadIdentityPool Managed Identity (#14126) --- .../WorkloadIdentityPoolManagedIdentity.yaml | 2 + .../WorkloadIdentityPoolNamespace.yaml | 4 +- ...tity_pool_managed_identity_id_test.go.tmpl | 38 +++++++++++++++++++ ...kload_identity_pool_namespace_test.go.tmpl | 6 +-- 4 files changed, 45 insertions(+), 5 deletions(-) create mode 100644 mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_managed_identity_id_test.go.tmpl diff --git a/mmv1/products/iambeta/WorkloadIdentityPoolManagedIdentity.yaml b/mmv1/products/iambeta/WorkloadIdentityPoolManagedIdentity.yaml index 2c2b51f085fb..e3299cb08da4 100644 --- a/mmv1/products/iambeta/WorkloadIdentityPoolManagedIdentity.yaml +++ b/mmv1/products/iambeta/WorkloadIdentityPoolManagedIdentity.yaml @@ -84,6 +84,8 @@ parameters: The prefix `gcp-` will be reserved for future uses. + validation: + function: 'ValidateWorkloadIdentityPoolManagedIdentityId' properties: - name: 'name' type: String diff --git a/mmv1/products/iambeta/WorkloadIdentityPoolNamespace.yaml b/mmv1/products/iambeta/WorkloadIdentityPoolNamespace.yaml index fc89c4f2b9c8..45bf3ac09eb2 100644 --- a/mmv1/products/iambeta/WorkloadIdentityPoolNamespace.yaml +++ b/mmv1/products/iambeta/WorkloadIdentityPoolNamespace.yaml @@ -39,12 +39,12 @@ examples: primary_resource_id: 'example' vars: workload_identity_pool_id: 'example-pool' - workload_identity_pool_namespace_id: 'example-nmspc' + workload_identity_pool_namespace_id: 'example-namespace' - name: 'iam_workload_identity_pool_namespace_full' primary_resource_id: 'example' vars: workload_identity_pool_id: 'example-pool' - workload_identity_pool_namespace_id: 'example-nmspc' + workload_identity_pool_namespace_id: 'example-namespace' parameters: - name: 'workload_identity_pool_id' type: String diff --git a/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_managed_identity_id_test.go.tmpl b/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_managed_identity_id_test.go.tmpl new file mode 100644 index 000000000000..c7f8beb10f52 --- /dev/null +++ b/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_managed_identity_id_test.go.tmpl @@ -0,0 +1,38 @@ +{{- if ne $.TargetVersionName "ga" -}} +package iambeta_test + +import ( + "strings" + "testing" + + "github.com/hashicorp/terraform-provider-google/google/services/iambeta" + "github.com/hashicorp/terraform-provider-google/google/verify" +) + +func TestValidateWorkloadIdentityPoolManagedIdentityId(t *testing.T) { + x := []verify.StringValidationTestCase{ + // No errors + {TestName: "basic", Value: "foobar"}, + {TestName: "with numbers", Value: "foobar123"}, + {TestName: "short", Value: "foos"}, + {TestName: "long", Value: "12345678901234567890123456789012"}, + {TestName: "has a hyphen", Value: "foo-bar"}, + + // With errors + {TestName: "empty", Value: "", ExpectError: true}, + {TestName: "starts with a gcp-", Value: "gcp-foobar", ExpectError: true}, + {TestName: "with uppercase", Value: "fooBar", ExpectError: true}, + {TestName: "has an slash", Value: "foo/bar", ExpectError: true}, + {TestName: "has an backslash", Value: "foo\bar", ExpectError: true}, + {TestName: "too short", Value: "f", ExpectError: true}, + {TestName: "too long", Value: strings.Repeat("f", 64), ExpectError: true}, + {TestName: "starts with non-alphanumeric", Value: "-foobar", ExpectError: true}, + {TestName: "ends with non-alphanumeric", Value: "foobar-", ExpectError: true}, + } + + es := verify.TestStringValidationCases(x, iambeta.ValidateWorkloadIdentityPoolManagedIdentityId) + if len(es) > 0 { + t.Errorf("Failed to validate WorkloadIdentityPoolManagedIdentity names: %v", es) + } +} +{{- end -}} diff --git a/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_namespace_test.go.tmpl b/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_namespace_test.go.tmpl index c81bda7329c1..9c5b5e4d069a 100644 --- a/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_namespace_test.go.tmpl +++ b/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_namespace_test.go.tmpl @@ -101,7 +101,7 @@ resource "google_iam_workload_identity_pool_namespace" "example" { provider = google-beta workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id - workload_identity_pool_namespace_id = "tf-test-example-nmspc%{random_suffix}" + workload_identity_pool_namespace_id = "tf-test-example-namespace%{random_suffix}" } `, context) } @@ -119,7 +119,7 @@ resource "google_iam_workload_identity_pool_namespace" "example" { provider = google-beta workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id - workload_identity_pool_namespace_id = "tf-test-example-nmspc%{random_suffix}" + workload_identity_pool_namespace_id = "tf-test-example-namespace%{random_suffix}" description = "Example Namespace in a Workload Identity Pool" disabled = true } @@ -139,7 +139,7 @@ resource "google_iam_workload_identity_pool_namespace" "example" { provider = google-beta workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id - workload_identity_pool_namespace_id = "tf-test-example-nmspc%{random_suffix}" + workload_identity_pool_namespace_id = "tf-test-example-namespace%{random_suffix}" description = "Updated Namespace in a Workload Identity Pool" disabled = false } From ed442e27493a4a75f4dc712ac4c30efe7033c876 Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Wed, 4 Jun 2025 00:26:00 +0200 Subject: [PATCH 296/884] feat: added `google_compute_snapshot_settings` resource (#14086) Co-authored-by: Stephen Lewis (Burrows) --- mmv1/products/compute/SnapshotSettings.yaml | 92 ++++++++++++++ .../compute_snapshot_settings.go.tmpl | 7 ++ ...apshot_settings_specific_locations.tf.tmpl | 9 ++ ...resource_compute_snapshot_settings_test.go | 116 ++++++++++++++++++ 4 files changed, 224 insertions(+) create mode 100644 mmv1/products/compute/SnapshotSettings.yaml create mode 100644 mmv1/templates/terraform/encoders/compute_snapshot_settings.go.tmpl create mode 100644 mmv1/templates/terraform/examples/snapshot_settings_specific_locations.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/compute/resource_compute_snapshot_settings_test.go diff --git a/mmv1/products/compute/SnapshotSettings.yaml b/mmv1/products/compute/SnapshotSettings.yaml new file mode 100644 index 000000000000..93c194dd8e98 --- /dev/null +++ b/mmv1/products/compute/SnapshotSettings.yaml @@ -0,0 +1,92 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'SnapshotSettings' +kind: 'compute#snapshotSettings' +description: | + Updates your project's snapshot settings and sets a new default storage location for snapshots. +references: + guides: + 'Official Documentation': 'https://cloud.google.com/compute/docs/disks/snapshot-settings#snapshot_settings_how_to_update' + api: 'https://cloud.google.com/compute/docs/reference/rest/v1/snapshotSettings' +base_url: 'projects/{{project}}/global/snapshotSettings' +update_url: 'projects/{{project}}/global/snapshotSettings' +import_format: + - 'projects/{{project}}/global/snapshotSettings/' +timeouts: + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 +create_verb: 'PATCH' +update_verb: 'PATCH' +custom_code: + encoder: 'templates/terraform/encoders/compute_snapshot_settings.go.tmpl' +autogen_async: true +async: + actions: ['create', 'update'] + type: 'OpAsync' + operation: + base_url: '{{op_id}}' + timeouts: + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 + result: + resource_inside_response: false +# there is only a GET and PATCH endpoint +exclude_delete: true +examples: + - name: 'snapshot_settings_specific_locations' + exclude_test: true + primary_resource_id: 'tf-test-snapshot-settings' + vars: + policy: 'SPECIFIC_LOCATIONS' +parameters: +properties: + - name: 'storageLocation' + type: NestedObject + description: | + Policy of which storage location is going to be resolved, and additional data + that particularizes how the policy is going to be carried out + required: true + properties: + - name: 'policy' + type: Enum + description: | + The chosen location policy + required: true + enum_values: + - 'NEAREST_MULTI_REGION' + - 'LOCAL_REGION' + - 'SPECIFIC_LOCATIONS' + - name: 'locations' + type: Map + description: | + When the policy is SPECIFIC_LOCATIONS, snapshots will be stored in the + locations listed in this field. Keys are Cloud Storage bucket locations. + Only one location can be specified. + key_name: 'location' + key_description: | + Name of the location. It should be one of the Cloud Storage buckets. + Only one location can be specified. + value_type: + name: location + type: NestedObject + properties: + - name: 'name' + type: String + description: | + Name of the location. It should be one of the Cloud Storage buckets. + Only one location can be specified. (should match location) + required: true diff --git a/mmv1/templates/terraform/encoders/compute_snapshot_settings.go.tmpl b/mmv1/templates/terraform/encoders/compute_snapshot_settings.go.tmpl new file mode 100644 index 000000000000..66c120878064 --- /dev/null +++ b/mmv1/templates/terraform/encoders/compute_snapshot_settings.go.tmpl @@ -0,0 +1,7 @@ +if storageLocation, ok := obj["storageLocation"].(map[string]interface{}); ok { + if _, ok := storageLocation["locations"]; !ok { + storageLocation["locations"] = nil + } +} + +return obj, nil \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/snapshot_settings_specific_locations.tf.tmpl b/mmv1/templates/terraform/examples/snapshot_settings_specific_locations.tf.tmpl new file mode 100644 index 000000000000..308050c66e74 --- /dev/null +++ b/mmv1/templates/terraform/examples/snapshot_settings_specific_locations.tf.tmpl @@ -0,0 +1,9 @@ +resource "google_compute_snapshot_settings" "{{$.PrimaryResourceId}}" { + storage_location { + policy = "SPECIFIC_LOCATIONS" + locations { + name = "us-central1" + location = "us-central1" + } + } +} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_snapshot_settings_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_snapshot_settings_test.go new file mode 100644 index 000000000000..93c9c5ca4e15 --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/resource_compute_snapshot_settings_test.go @@ -0,0 +1,116 @@ +package compute_test + +import ( + "github.com/hashicorp/terraform-plugin-testing/plancheck" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccComputeSnapshotSettings_snapshotSettings_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "org_id": envvar.GetTestOrgFromEnv(t), + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: testAccComputeSnapshotSettings_snapshotSettings_basic(context), + }, + { + ResourceName: "google_compute_snapshot_settings.tf_test_snapshot_settings", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccComputeSnapshotSettings_snapshotSettings_update(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_compute_snapshot_settings.tf_test_snapshot_settings", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_compute_snapshot_settings.tf_test_snapshot_settings", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccComputeSnapshotSettings_snapshotSettings_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_project" "project" { + project_id = "tf-test%{random_suffix}" + name = "tf-test%{random_suffix}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" + deletion_policy = "DELETE" +} + +resource "google_project_service" "compute" { + project = google_project.project.project_id + service = "compute.googleapis.com" +} + +resource "time_sleep" "wait_120_seconds" { + create_duration = "120s" + depends_on = [google_project_service.compute] +} + + +resource "google_compute_snapshot_settings" "tf_test_snapshot_settings" { + project = google_project.project.project_id + storage_location { + policy = "SPECIFIC_LOCATIONS" + locations { + name = "us-central1" + location = "us-central1" + } + } + depends_on = [time_sleep.wait_120_seconds] +} +`, context) +} + +func testAccComputeSnapshotSettings_snapshotSettings_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_project" "project" { + project_id = "tf-test%{random_suffix}" + name = "tf-test%{random_suffix}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" + deletion_policy = "DELETE" +} + +resource "google_project_service" "compute" { + project = google_project.project.project_id + service = "compute.googleapis.com" +} +resource "time_sleep" "wait_120_seconds" { + create_duration = "120s" + depends_on = [google_project_service.compute] +} + +resource "google_compute_snapshot_settings" "tf_test_snapshot_settings" { + project = google_project.project.project_id + storage_location { + policy = "NEAREST_MULTI_REGION" + } + depends_on = [time_sleep.wait_120_seconds] +} +`, context) +} From 57e9e1bead9cda80e9fb287e84aa21bcf3e8504c Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Tue, 3 Jun 2025 15:37:15 -0700 Subject: [PATCH 297/884] tgc-revival: implement testing logic (#14156) --- .../tgc_next/test/assert_test_files.go | 281 ++++++++++++++++++ mmv1/third_party/tgc_next/test/setup.go | 153 ++++++++++ 2 files changed, 434 insertions(+) create mode 100644 mmv1/third_party/tgc_next/test/assert_test_files.go diff --git a/mmv1/third_party/tgc_next/test/assert_test_files.go b/mmv1/third_party/tgc_next/test/assert_test_files.go new file mode 100644 index 000000000000..7c4d67a83aaa --- /dev/null +++ b/mmv1/third_party/tgc_next/test/assert_test_files.go @@ -0,0 +1,281 @@ +package test + +import ( + "context" + "fmt" + "log" + "os" + "path/filepath" + "strings" + "sync" + "testing" + + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl" + cai2hclconverters "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters/utils" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai" + tfplan2caiconverters "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/converters" + + "go.uber.org/zap" + "go.uber.org/zap/zaptest" + + "github.com/google/go-cmp/cmp" +) + +var ( + cacheMutex = sync.Mutex{} + tmpDir = os.TempDir() +) + +func BidirectionalConversion(t *testing.T, ignoredFields []string) { + resourceTestData, primaryResource, err := prepareTestData(t.Name()) + if err != nil { + t.Fatalf("Error preparing the input data: %#v", err) + } + + if resourceTestData == nil { + t.Skipf("The test data is unavailable.") + } + + // Create a temporary directory for running terraform. + tfDir, err := os.MkdirTemp(tmpDir, "terraform") + if err != nil { + log.Fatal(err) + } + defer os.RemoveAll(tfDir) + + logger := zaptest.NewLogger(t) + + // If the primary resource is available, only test the primary resource. + // Otherwise, test all of the resources in the test. + if primaryResource != "" { + err = testSingleResource(t, t.Name(), resourceTestData[primaryResource], tfDir, ignoredFields, logger) + if err != nil { + t.Fatalf("Test fails: %#v", err) + } + } else { + for _, testData := range resourceTestData { + err = testSingleResource(t, t.Name(), testData, tfDir, ignoredFields, logger) + if err != nil { + t.Fatalf("Test fails: %#v", err) + } + } + } +} + +// Tests a single resource +func testSingleResource(t *testing.T, testName string, testData ResourceTestData, tfDir string, ignoredFields []string, logger *zap.Logger) error { + resourceType := testData.ResourceType + if _, ok := tfplan2caiconverters.ConverterMap[resourceType]; !ok { + log.Printf("Test for %s is skipped as it is not supported in tfplan2cai conversion.", resourceType) + return nil + } + + assetType := testData.CaiAssetData.Type + if assetType == "" { + return fmt.Errorf("cai asset is unavailable for %s", testData.CaiAssetName) + } + if _, ok := cai2hclconverters.ConverterMap[assetType]; !ok { + log.Printf("Test for %s is skipped as it is not supported in cai2hcl conversion.", assetType) + return nil + } + + assets := []caiasset.Asset{testData.CaiAssetData} + + // Uncomment these lines when debugging issues locally + // assetFile := fmt.Sprintf("%s.json", t.Name()) + // writeJSONFile(assetFile, assets) + + // Step 1: Use cai2hcl to convert export assets into a Terraform configuration (export config). + // Compare all of the fields in raw config are in export config. + + exportConfigData, err := cai2hcl.Convert(assets, &cai2hcl.Options{ + ErrorLogger: logger, + }) + if err != nil { + return fmt.Errorf("error when converting the export assets into export config: %#v", err) + } + + // Uncomment these lines when debugging issues locally + // exportTfFile := fmt.Sprintf("%s_export.tf", t.Name()) + // err = os.WriteFile(exportTfFile, exportConfigData, 0644) + // if err != nil { + // log.Fatalf("error writing file %s", exportTfFile) + // } + // defer os.Remove(exportTfFile) + + exportTfFilePath := fmt.Sprintf("%s/%s_export.tf", tfDir, t.Name()) + err = os.WriteFile(exportTfFilePath, exportConfigData, 0644) + if err != nil { + return fmt.Errorf("error when writing the file %s", exportTfFilePath) + } + + exportResources, err := parseResourceConfigs(exportTfFilePath) + if err != nil { + return err + } + + if len(exportResources) == 0 { + return fmt.Errorf("missing hcl after cai2hcl conversion for CAI asset %s.", testData.CaiAssetName) + } + + ignoredFieldMap := make(map[string]bool, 0) + for _, f := range ignoredFields { + ignoredFieldMap[f] = true + } + + parsedExportConfig := exportResources[0].Attributes + missingKeys := compareHCLFields(testData.ParsedRawConfig, parsedExportConfig, "", ignoredFieldMap) + if len(missingKeys) > 0 { + return fmt.Errorf("missing fields in address %s after cai2hcl conversion:\n%s", testData.ResourceAddress, missingKeys) + } + + // Step 2 + // Run a terraform plan using export_config. + // Use tfplan2cai to convert the generated plan into CAI assets (roundtrip_assets). + // Convert roundtrip_assets back into a Terraform configuration (roundtrip_config) using cai2hcl. + // Compare roundtrip_config with export_config to ensure they are identical. + + // Convert the export config to roundtrip assets and then convert the roundtrip assets back to roundtrip config + ancestryCache := getAncestryCache(assets) + roundtripConfigData, err := getRoundtripConfig(t, testName, tfDir, ancestryCache, logger) + if err != nil { + return fmt.Errorf("error when converting the round-trip config: %#v", err) + } + + roundtripTfFilePath := fmt.Sprintf("%s_roundtrip.tf", testName) + err = os.WriteFile(roundtripTfFilePath, roundtripConfigData, 0644) + if err != nil { + return fmt.Errorf("error when writing the file %s", roundtripTfFilePath) + } + defer os.Remove(roundtripTfFilePath) + + if diff := cmp.Diff(string(roundtripConfigData), string(exportConfigData)); diff != "" { + log.Printf("Roundtrip config is different from the export config.\nroundtrip config:\n%s\nexport config:\n%s", string(roundtripConfigData), string(exportConfigData)) + return fmt.Errorf("Test %s got diff (-want +got): %s", testName, diff) + } + + return nil +} + +// Gets the ancestry cache for tfplan2cai conversion +func getAncestryCache(assets []caiasset.Asset) map[string]string { + ancestryCache := make(map[string]string, 0) + + for _, asset := range assets { + ancestors := asset.Ancestors + if len(ancestors) != 0 { + var path string + for i := len(ancestors) - 1; i >= 0; i-- { + curr := ancestors[i] + if path == "" { + path = curr + } else { + path = fmt.Sprintf("%s/%s", path, curr) + } + } + + if _, ok := ancestryCache[ancestors[0]]; !ok { + ancestryCache[ancestors[0]] = path + } + + project := utils.ParseFieldValue(asset.Name, "projects") + projectKey := fmt.Sprintf("projects/%s", project) + if strings.HasPrefix(ancestors[0], "projects") && ancestors[0] != projectKey { + if _, ok := ancestryCache[projectKey]; !ok { + ancestryCache[projectKey] = path + } + } + } + } + return ancestryCache +} + +// Compares HCL and finds all of the keys in map1 are in map2 +func compareHCLFields(map1, map2 map[string]interface{}, path string, ignoredFields map[string]bool) []string { + var missingKeys []string + for key, value1 := range map1 { + if value1 == nil { + continue + } + + currentPath := path + "." + key + if path == "" { + currentPath = key + } + + if ignoredFields[currentPath] { + continue + } + + value2, ok := map2[key] + if !ok || value2 == nil { + missingKeys = append(missingKeys, currentPath) + continue + } + + switch v1 := value1.(type) { + case map[string]interface{}: + v2, _ := value2.(map[string]interface{}) + missingKeys = append(missingKeys, compareHCLFields(v1, v2, currentPath, ignoredFields)...) + case []interface{}: + v2, _ := value2.([]interface{}) + + for i := 0; i < len(v1); i++ { + nestedMap1, ok1 := v1[i].(map[string]interface{}) + nestedMap2, ok2 := v2[i].(map[string]interface{}) + if ok1 && ok2 { + keys := compareHCLFields(nestedMap1, nestedMap2, fmt.Sprintf("%s[%d]", currentPath, i), ignoredFields) + missingKeys = append(missingKeys, keys...) + } + } + default: + } + } + + return missingKeys +} + +// Converts a tfplan to CAI asset, and then converts the CAI asset into HCL +func getRoundtripConfig(t *testing.T, testName string, tfDir string, ancestryCache map[string]string, logger *zap.Logger) ([]byte, error) { + fileName := fmt.Sprintf("%s_export", testName) + + // Run terraform init and terraform apply to generate tfplan.json files + terraformWorkflow(t, tfDir, fileName) + + planFile := fmt.Sprintf("%s.tfplan.json", fileName) + planfilePath := filepath.Join(tfDir, planFile) + jsonPlan, err := os.ReadFile(planfilePath) + if err != nil { + return nil, err + } + + ctx := context.Background() + roundtripAssets, err := tfplan2cai.Convert(ctx, jsonPlan, &tfplan2cai.Options{ + ErrorLogger: logger, + Offline: true, + DefaultProject: "ci-test-project-nightly-beta", + DefaultRegion: "", + DefaultZone: "", + UserAgent: "", + AncestryCache: ancestryCache, + }) + + if err != nil { + return nil, err + } + + // Uncomment these lines when debugging issues locally + // roundtripAssetFile := fmt.Sprintf("%s_roundtrip.json", t.Name()) + // writeJSONFile(roundtripAssetFile, roundtripAssets) + + data, err := cai2hcl.Convert(roundtripAssets, &cai2hcl.Options{ + ErrorLogger: logger, + }) + if err != nil { + return nil, err + } + + return data, nil +} diff --git a/mmv1/third_party/tgc_next/test/setup.go b/mmv1/third_party/tgc_next/test/setup.go index 0b1b3c13ef1f..a06b3766d1bb 100644 --- a/mmv1/third_party/tgc_next/test/setup.go +++ b/mmv1/third_party/tgc_next/test/setup.go @@ -6,10 +6,14 @@ import ( "fmt" "io" "log" + "os" "time" "cloud.google.com/go/storage" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/hclparse" + "github.com/hashicorp/hcl/v2/hclsyntax" ) type ResourceMetadata struct { @@ -33,6 +37,11 @@ type TgcMetadataPayload struct { PrimaryResource string `json:"primary_resource"` } +type ResourceTestData struct { + ParsedRawConfig map[string]interface{} `json:"parsed_raw_config"` + ResourceMetadata `json:"resource_metadata"` +} + var ( TestsMetadata = make(map[string]TgcMetadataPayload) setupDone = false @@ -84,3 +93,147 @@ func ReadTestsDataFromGcs() (map[string]TgcMetadataPayload, error) { } return TestsMetadata, nil } + +func prepareTestData(testName string) (map[string]ResourceTestData, string, error) { + var err error + cacheMutex.Lock() + TestsMetadata, err = ReadTestsDataFromGcs() + if err != nil { + return nil, "", err + } + cacheMutex.Unlock() + + testMetadata := TestsMetadata[testName] + resourceMetadata := testMetadata.ResourceMetadata + if len(resourceMetadata) == 0 { + log.Printf("Data of test is unavailable: %s", testName) + return nil, "", nil + } + + rawTfFile := fmt.Sprintf("%s.tf", testName) + err = os.WriteFile(rawTfFile, []byte(testMetadata.RawConfig), 0644) + if err != nil { + return nil, "", fmt.Errorf("error writing to file %s: %#v", rawTfFile, err) + } + defer os.Remove(rawTfFile) + + rawResourceConfigs, err := parseResourceConfigs(rawTfFile) + if err != nil { + return nil, "", fmt.Errorf("error parsing resource configs: %#v", err) + } + + if len(rawResourceConfigs) == 0 { + return nil, "", fmt.Errorf("Test %s fails: raw config is unavailable", testName) + } + + rawConfigMap := convertToConfigMap(rawResourceConfigs) + + resourceTestData := make(map[string]ResourceTestData, 0) + for address, metadata := range resourceMetadata { + resourceTestData[address] = ResourceTestData{ + ParsedRawConfig: rawConfigMap[address], + ResourceMetadata: *metadata, + } + } + + return resourceTestData, testMetadata.PrimaryResource, nil +} + +type Resource struct { + Type string `json:"type"` + Name string `json:"name"` + Attributes map[string]interface{} `json:"attributes"` +} + +// parseHCLBody recursively parses attributes and nested blocks from an HCL body. +func parseHCLBody(body hcl.Body, filePath string) ( + attributes map[string]interface{}, + diags hcl.Diagnostics, +) { + attributes = make(map[string]interface{}) + var allDiags hcl.Diagnostics + + if syntaxBody, ok := body.(*hclsyntax.Body); ok { + for _, attr := range syntaxBody.Attributes { + attributes[attr.Name] = true + } + + for _, block := range syntaxBody.Blocks { + nestedAttr, diags := parseHCLBody(block.Body, filePath) + if diags.HasErrors() { + allDiags = append(allDiags, diags...) + } + + attributes[block.Type] = nestedAttr + } + } else { + allDiags = append(allDiags, &hcl.Diagnostic{ + Severity: hcl.DiagWarning, + Summary: "Body type assertion to *hclsyntax.Body failed", + Detail: fmt.Sprintf("Cannot directly parse attributes for body of type %T. Attribute parsing may be incomplete.", body), + }) + } + + return attributes, allDiags +} + +// Parses a Terraform configuation file written with HCL +func parseResourceConfigs(filePath string) ([]Resource, error) { + src, err := os.ReadFile(filePath) + if err != nil { + return nil, fmt.Errorf("failed to read file %s: %s", filePath, err) + } + + parser := hclparse.NewParser() + hclFile, diags := parser.ParseHCL(src, filePath) + if diags.HasErrors() { + return nil, fmt.Errorf("parse HCL: %w", diags) + } + + if hclFile == nil { + return nil, fmt.Errorf("parsed HCL file %s is nil cannot proceed", filePath) + } + + var allParsedResources []Resource + + for _, block := range hclFile.Body.(*hclsyntax.Body).Blocks { + if block.Type == "resource" { + if len(block.Labels) != 2 { + log.Printf("Skipping address block with unexpected number of labels: %v", block.Labels) + continue + } + + resType := block.Labels[0] + resName := block.Labels[1] + attrs, procDiags := parseHCLBody(block.Body, filePath) + + if procDiags.HasErrors() { + log.Printf("Diagnostics while processing address %s.%s body in %s:", resType, resName, filePath) + for _, diag := range procDiags { + log.Printf(" - %s (Severity)", diag.Error()) + } + } + + gr := Resource{ + Type: resType, + Name: resName, + Attributes: attrs, + } + allParsedResources = append(allParsedResources, gr) + } + } + + return allParsedResources, nil +} + +// Converts the slice to map with resource address as the key +func convertToConfigMap(resources []Resource) map[string]map[string]interface{} { + configMap := make(map[string]map[string]interface{}, 0) + + for _, r := range resources { + addr := fmt.Sprintf("%s.%s", r.Type, r.Name) + configMap[addr] = r.Attributes + } + + return configMap +} From 59a3f31cef25d8d70bd87ce11a81541c9a3ccec1 Mon Sep 17 00:00:00 2001 From: porky256 <61063240+porky256@users.noreply.github.com> Date: Wed, 4 Jun 2025 18:04:29 +0200 Subject: [PATCH 298/884] Add example to network_security.backend_authentication_config (#14192) --- .../networksecurity/BackendAuthenticationConfig.yaml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/mmv1/products/networksecurity/BackendAuthenticationConfig.yaml b/mmv1/products/networksecurity/BackendAuthenticationConfig.yaml index ef0243f2b11a..c398f039c558 100644 --- a/mmv1/products/networksecurity/BackendAuthenticationConfig.yaml +++ b/mmv1/products/networksecurity/BackendAuthenticationConfig.yaml @@ -44,6 +44,13 @@ examples: resource_name: 'my-backend-authentication-config' certificate_name: 'my-certificate' trust_config_name: 'my-trust-config' + - name: 'backend_service_tls_settings' + primary_resource_id: 'default' + min_version: 'beta' + vars: + backend_service_name: 'backend-service' + health_check_name: 'health-check' + authentication_name: 'authentication' parameters: - name: 'name' From ec605304301c93eb494c8b5e2f54144545986753 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Wed, 4 Jun 2025 09:25:06 -0700 Subject: [PATCH 299/884] tgc-revival: compare assets in testing framework (#14189) --- .../tgc_next/test/assert_test_files.go | 62 +++++++++++++++---- mmv1/third_party/tgc_next/test/setup.go | 11 ++++ 2 files changed, 61 insertions(+), 12 deletions(-) diff --git a/mmv1/third_party/tgc_next/test/assert_test_files.go b/mmv1/third_party/tgc_next/test/assert_test_files.go index 7c4d67a83aaa..7f61e0645a34 100644 --- a/mmv1/third_party/tgc_next/test/assert_test_files.go +++ b/mmv1/third_party/tgc_next/test/assert_test_files.go @@ -31,7 +31,7 @@ var ( func BidirectionalConversion(t *testing.T, ignoredFields []string) { resourceTestData, primaryResource, err := prepareTestData(t.Name()) if err != nil { - t.Fatalf("Error preparing the input data: %#v", err) + t.Fatal("Error preparing the input data:", err) } if resourceTestData == nil { @@ -52,13 +52,13 @@ func BidirectionalConversion(t *testing.T, ignoredFields []string) { if primaryResource != "" { err = testSingleResource(t, t.Name(), resourceTestData[primaryResource], tfDir, ignoredFields, logger) if err != nil { - t.Fatalf("Test fails: %#v", err) + t.Fatal("Test fails:", err) } } else { for _, testData := range resourceTestData { err = testSingleResource(t, t.Name(), testData, tfDir, ignoredFields, logger) if err != nil { - t.Fatalf("Test fails: %#v", err) + t.Fatal("Test fails: ", err) } } } @@ -101,7 +101,7 @@ func testSingleResource(t *testing.T, testName string, testData ResourceTestData // exportTfFile := fmt.Sprintf("%s_export.tf", t.Name()) // err = os.WriteFile(exportTfFile, exportConfigData, 0644) // if err != nil { - // log.Fatalf("error writing file %s", exportTfFile) + // return fmt.Errorf("error writing file", exportTfFile) // } // defer os.Remove(exportTfFile) @@ -139,7 +139,7 @@ func testSingleResource(t *testing.T, testName string, testData ResourceTestData // Convert the export config to roundtrip assets and then convert the roundtrip assets back to roundtrip config ancestryCache := getAncestryCache(assets) - roundtripConfigData, err := getRoundtripConfig(t, testName, tfDir, ancestryCache, logger) + roundtripAssets, roundtripConfigData, err := getRoundtripConfig(t, testName, tfDir, ancestryCache, logger) if err != nil { return fmt.Errorf("error when converting the round-trip config: %#v", err) } @@ -153,7 +153,24 @@ func testSingleResource(t *testing.T, testName string, testData ResourceTestData if diff := cmp.Diff(string(roundtripConfigData), string(exportConfigData)); diff != "" { log.Printf("Roundtrip config is different from the export config.\nroundtrip config:\n%s\nexport config:\n%s", string(roundtripConfigData), string(exportConfigData)) - return fmt.Errorf("Test %s got diff (-want +got): %s", testName, diff) + return fmt.Errorf("test %s got diff (-want +got): %s", testName, diff) + } + + // Step 3 + // Compare most fields between the exported asset and roundtrip asset, except for "data" field for resource + assetMap := convertToAssetMap(assets) + roundtripAssetMap := convertToAssetMap(roundtripAssets) + for assetType, asset := range assetMap { + if roundtripAsset, ok := roundtripAssetMap[assetType]; !ok { + return fmt.Errorf("roundtrip asset for type %s is missing", assetType) + } else { + if err := compareAssetName(asset.Name, roundtripAsset.Name); err != nil { + return err + } + if diff := cmp.Diff(asset.Resource, roundtripAsset.Resource); diff != "" { + return fmt.Errorf("differences found between exported asset and roundtrip asset (-want +got):\n%s", diff) + } + } } return nil @@ -238,7 +255,7 @@ func compareHCLFields(map1, map2 map[string]interface{}, path string, ignoredFie } // Converts a tfplan to CAI asset, and then converts the CAI asset into HCL -func getRoundtripConfig(t *testing.T, testName string, tfDir string, ancestryCache map[string]string, logger *zap.Logger) ([]byte, error) { +func getRoundtripConfig(t *testing.T, testName string, tfDir string, ancestryCache map[string]string, logger *zap.Logger) ([]caiasset.Asset, []byte, error) { fileName := fmt.Sprintf("%s_export", testName) // Run terraform init and terraform apply to generate tfplan.json files @@ -248,7 +265,7 @@ func getRoundtripConfig(t *testing.T, testName string, tfDir string, ancestryCac planfilePath := filepath.Join(tfDir, planFile) jsonPlan, err := os.ReadFile(planfilePath) if err != nil { - return nil, err + return nil, nil, err } ctx := context.Background() @@ -263,19 +280,40 @@ func getRoundtripConfig(t *testing.T, testName string, tfDir string, ancestryCac }) if err != nil { - return nil, err + return nil, nil, err } // Uncomment these lines when debugging issues locally // roundtripAssetFile := fmt.Sprintf("%s_roundtrip.json", t.Name()) // writeJSONFile(roundtripAssetFile, roundtripAssets) - data, err := cai2hcl.Convert(roundtripAssets, &cai2hcl.Options{ + roundtripConfig, err := cai2hcl.Convert(roundtripAssets, &cai2hcl.Options{ ErrorLogger: logger, }) if err != nil { - return nil, err + return nil, nil, err } - return data, nil + return roundtripAssets, roundtripConfig, nil +} + +// Compares the asset name in export asset and roundtrip asset and ignores "null" in the name +// Example: //cloudresourcemanager.googleapis.com/projects/123456 +func compareAssetName(want, got string) error { + parts1 := strings.Split(want, "/") + parts2 := strings.Split(got, "/") + if len(parts1) != len(parts2) { + return fmt.Errorf("differences found between two asset names: want %s, got %s", want, got) + } + + for i, part := range parts1 { + if parts2[i] == "null" { + continue + } + + if part != parts2[i] { + return fmt.Errorf("differences found between two asset names: want %s, got %s", want, got) + } + } + return nil } diff --git a/mmv1/third_party/tgc_next/test/setup.go b/mmv1/third_party/tgc_next/test/setup.go index a06b3766d1bb..315cd7be93d8 100644 --- a/mmv1/third_party/tgc_next/test/setup.go +++ b/mmv1/third_party/tgc_next/test/setup.go @@ -237,3 +237,14 @@ func convertToConfigMap(resources []Resource) map[string]map[string]interface{} return configMap } + +// Converts the slice of assets to map with the asset name as the key +func convertToAssetMap(assets []caiasset.Asset) map[string]caiasset.Asset { + assetMap := make(map[string]caiasset.Asset) + + for _, asset := range assets { + asset.Resource.Data = nil + assetMap[asset.Type] = asset + } + return assetMap +} From 5ac3b652e3016aa08cac8411e6d465c4a75024b1 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Wed, 4 Jun 2025 13:09:24 -0700 Subject: [PATCH 300/884] tgc-revival: modify models for testing framework (#14188) --- .../tgc_next/test/assert_test_files.go | 22 ++++++++++--------- mmv1/third_party/tgc_next/test/setup.go | 17 +++++++------- 2 files changed, 20 insertions(+), 19 deletions(-) diff --git a/mmv1/third_party/tgc_next/test/assert_test_files.go b/mmv1/third_party/tgc_next/test/assert_test_files.go index 7f61e0645a34..f81ee8811e81 100644 --- a/mmv1/third_party/tgc_next/test/assert_test_files.go +++ b/mmv1/third_party/tgc_next/test/assert_test_files.go @@ -72,17 +72,19 @@ func testSingleResource(t *testing.T, testName string, testData ResourceTestData return nil } - assetType := testData.CaiAssetData.Type - if assetType == "" { - return fmt.Errorf("cai asset is unavailable for %s", testData.CaiAssetName) - } - if _, ok := cai2hclconverters.ConverterMap[assetType]; !ok { - log.Printf("Test for %s is skipped as it is not supported in cai2hcl conversion.", assetType) - return nil + assets := make([]caiasset.Asset, 0) + for assetName, assetData := range testData.Cai { + assets = append(assets, assetData.CaiAsset) + assetType := assetData.CaiAsset.Type + if assetType == "" { + return fmt.Errorf("cai asset is unavailable for %s", assetName) + } + if _, ok := cai2hclconverters.ConverterMap[assetType]; !ok { + log.Printf("Test for %s is skipped as it is not supported in cai2hcl conversion.", assetType) + return nil + } } - assets := []caiasset.Asset{testData.CaiAssetData} - // Uncomment these lines when debugging issues locally // assetFile := fmt.Sprintf("%s.json", t.Name()) // writeJSONFile(assetFile, assets) @@ -117,7 +119,7 @@ func testSingleResource(t *testing.T, testName string, testData ResourceTestData } if len(exportResources) == 0 { - return fmt.Errorf("missing hcl after cai2hcl conversion for CAI asset %s.", testData.CaiAssetName) + return fmt.Errorf("missing hcl after cai2hcl conversion for resource %s", testData.ResourceType) } ignoredFieldMap := make(map[string]bool, 0) diff --git a/mmv1/third_party/tgc_next/test/setup.go b/mmv1/third_party/tgc_next/test/setup.go index 315cd7be93d8..fee91324eed9 100644 --- a/mmv1/third_party/tgc_next/test/setup.go +++ b/mmv1/third_party/tgc_next/test/setup.go @@ -17,17 +17,16 @@ import ( ) type ResourceMetadata struct { - CaiAssetName string `json:"cai_asset_name"` - CaiAssetData caiasset.Asset `json:"cai_asset_data"` - ResourceType string `json:"resource_type"` - ResourceAddress string `json:"resource_address"` - ImportMetadata ImportMetadata `json:"import_metadata,omitempty"` - Service string `json:"service"` + CaiAssetNames []string `json:"cai_asset_names"` + ResourceType string `json:"resource_type"` + ResourceAddress string `json:"resource_address"` + Service string `json:"service"` + Cai map[string]*CaiData `json:"cai_data,omitempty"` // Holds the fetched CAI assets data } -type ImportMetadata struct { - Id string `json:"id,omitempty"` - IgnoredFields []string `json:"ignored_fields,omitempty"` +// CaiData holds the fetched CAI asset and related error information. +type CaiData struct { + CaiAsset caiasset.Asset `json:"cai_asset,omitempty"` } type TgcMetadataPayload struct { From 750385f6a3d36dae635e9c9b25eea9df20961921 Mon Sep 17 00:00:00 2001 From: Aman Mahendroo <30946991+amanMahendroo@users.noreply.github.com> Date: Thu, 5 Jun 2025 02:22:56 +0530 Subject: [PATCH 301/884] Add terraform support for InterconnectGroup and InterconnectAttachmentGroup resources (#13870) --- mmv1/products/compute/Interconnect.yaml | 9 + .../compute/InterconnectAttachment.yaml | 5 + .../compute/InterconnectAttachmentGroup.yaml | 280 ++++++++++++++++++ mmv1/products/compute/InterconnectGroup.yaml | 265 +++++++++++++++++ ...nterconnect_attachment_group_basic.tf.tmpl | 6 + .../examples/interconnect_group_basic.tf.tmpl | 6 + ...pute_interconnect_attachment_group_test.go | 95 ++++++ ...esource_compute_interconnect_group_test.go | 95 ++++++ 8 files changed, 761 insertions(+) create mode 100644 mmv1/products/compute/InterconnectAttachmentGroup.yaml create mode 100644 mmv1/products/compute/InterconnectGroup.yaml create mode 100644 mmv1/templates/terraform/examples/interconnect_attachment_group_basic.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/interconnect_group_basic.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/compute/resource_compute_interconnect_attachment_group_test.go create mode 100644 mmv1/third_party/terraform/services/compute/resource_compute_interconnect_group_test.go diff --git a/mmv1/products/compute/Interconnect.yaml b/mmv1/products/compute/Interconnect.yaml index f3d1b9ae06b9..87038eb0bcd3 100644 --- a/mmv1/products/compute/Interconnect.yaml +++ b/mmv1/products/compute/Interconnect.yaml @@ -412,3 +412,12 @@ properties: output: true item_type: type: String + - name: 'interconnectGroups' + type: Array + description: | + URLs of InterconnectGroups that include this Interconnect. + Order is arbitrary and items are unique. + output: true + is_set: true + item_type: + type: String diff --git a/mmv1/products/compute/InterconnectAttachment.yaml b/mmv1/products/compute/InterconnectAttachment.yaml index b2f449feca52..6de7abf63d20 100644 --- a/mmv1/products/compute/InterconnectAttachment.yaml +++ b/mmv1/products/compute/InterconnectAttachment.yaml @@ -389,3 +389,8 @@ properties: interconnect attachment. Example: 2001:db8::2/125 immutable: true min_version: beta + - name: 'attachmentGroup' + type: String + description: | + URL of the AttachmentGroup that includes this Attachment. + output: true diff --git a/mmv1/products/compute/InterconnectAttachmentGroup.yaml b/mmv1/products/compute/InterconnectAttachmentGroup.yaml new file mode 100644 index 000000000000..a059be45471b --- /dev/null +++ b/mmv1/products/compute/InterconnectAttachmentGroup.yaml @@ -0,0 +1,280 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'InterconnectAttachmentGroup' +kind: 'compute#InterconnectAttachmentGroup' +description: | + An interconnect attachment group resource allows customers to create, + analyze, and expand highly available deployments. +references: + guides: + 'Create a Dedicated Interconnect': 'https://cloud.google.com/network-connectivity/docs/interconnect/concepts/dedicated-overview' + api: 'https://cloud.google.com/compute/docs/reference/rest/v1/interconnects' +docs: +base_url: 'projects/{{project}}/global/interconnectAttachmentGroups' +self_link: 'projects/{{project}}/global/interconnectAttachmentGroups/{{name}}' +update_verb: 'PATCH' +timeouts: + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 +async: + actions: ['create', 'delete', 'update'] + type: 'OpAsync' + operation: + base_url: '{{op_id}}' + result: + resource_inside_response: false +examples: + - name: 'interconnect_attachment_group_basic' + primary_resource_id: 'example-interconnect-attachment-group' + vars: + interconnect_attachment_group_name: 'example-interconnect-attachment-group' + deletion_protection: 'true' + test_vars_overrides: + 'deletion_protection': 'false' +parameters: +properties: + - name: 'description' + type: String + description: | + An optional description of this resource. Provide this property when you create the resource. + - name: 'creationTimestamp' + type: Time + description: | + Creation timestamp in RFC3339 text format. + output: true + - name: 'name' + type: String + description: | + Name of the resource. Provided by the client when the resource is created. The name must be + 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters + long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first + character must be a lowercase letter, and all following characters must be a dash, + lowercase letter, or digit, except the last character, which cannot be a dash. + required: true + immutable: true + validation: + regex: '^[a-z]([-a-z0-9]*[a-z0-9])?$' + - name: 'attachments' + type: Map + description: | + Attachments in the AttachmentGroup. Keys are arbitrary user-specified + strings. Users are encouraged, but not required, to use their preferred + format for resource links as keys. + Note that there are add-members and remove-members methods in gcloud. + The size of this map is limited by an "Attachments per group" quota. + key_name: 'name' + key_description: | + Resource name + value_type: + name: mapObjectName + type: NestedObject + properties: + - name: 'attachment' + type: String + - name: 'interconnectGroup' + type: String + description: | + The URL of an InterconnectGroup that groups these Attachments' + Interconnects. Customers do not need to set this unless directed by + Google Support. + - name: 'intent' + type: NestedObject + description: | + The user's intent for this group. This is the only required field besides + the name that must be specified on group creation. + properties: + - name: 'availabilitySla' + type: Enum + description: | + Which SLA the user intends this group to support. + enum_values: + - 'PRODUCTION_NON_CRITICAL' + - 'PRODUCTION_CRITICAL' + - 'NO_SLA' + - 'AVAILABILITY_SLA_UNSPECIFIED' + required: true + - name: 'logicalStructure' + type: NestedObject + description: | + An analysis of the logical layout of Attachments in this + group. Every Attachment in the group is shown once in this structure. + output: true + properties: + - name: 'regions' + type: Array + description: | + The regions Attachments in this group are in. + output: true + item_type: + type: NestedObject + properties: + - name: 'region' + type: String + description: | + The name of a region, like "us-central1". + output: true + - name: 'metros' + type: Array + description: | + The metros of Attachments in this group in this region. + output: true + item_type: + type: NestedObject + properties: + - name: 'metro' + type: String + description: | + The name of the metro, as a three-letter lowercase + string like "iad". This is the first component of the location of an + Interconnect. + output: true + - name: 'facilities' + type: Array + description: | + The facilities used for this group's Attachments' + Interconnects. + output: true + item_type: + type: NestedObject + properties: + - name: 'facility' + type: String + description: | + The name of a facility, like "iad-1234". + output: true + - name: 'zones' + type: Array + description: | + The zones that Interconnects in this facility are + present in. + output: true + item_type: + type: NestedObject + properties: + - name: 'zone' + type: String + description: | + The zones that Attachments in this group are present + in, in the given facilities. This is inherited from their + Interconnects. + output: true + - name: 'attachment' + type: Array + description: | + URLs of Attachments in the given zone, to the given + region, on Interconnects in the given facility and metro. Every + Attachment in the AG has such an entry. + output: true + item_type: + type: String + - name: 'configured' + type: NestedObject + description: | + The redundancy this group is configured to support. The way a + user queries what SLA their Attachment gets is by looking at this field of + the Attachment's AttachmentGroup. + output: true + properties: + - name: 'availabilitySla' + type: NestedObject + description: | + Which SLA this group is configured to support, and why this + group does or does not meet that SLA's requirements. + output: true + properties: + - name: 'effectiveSla' + type: Enum + description: | + Which SLA this group supports. Options are the same as the + intent. + enum_values: + - 'PRODUCTION_CRITICAL' + - 'PRODUCTION_NON_CRITICAL' + - 'NO_SLA' + - 'EFFECTIVE_SLA_UNSPECIFIED' + output: true + - name: 'intendedSlaBlockers' + type: Array + description: | + Reasons why configuration.availabilitySLA.sla differs from + intent.availabilitySLA. This list is empty if and only if those are the + same. + output: true + item_type: + type: NestedObject + properties: + - name: 'blockerType' + type: Enum + description: | + The category of an unmet SLA requirement. + enum_values: + - 'BLOCKER_TYPE_UNSPECIFIED' + - 'NO_ATTACHMENTS' + - 'INCOMPATIBLE_REGIONS' + - 'INCOMPATIBLE_METROS' + - 'NO_ATTACHMENTS_IN_METRO_AND_ZONE' + - 'MISSING_GLOBAL_ROUTING' + - 'OTHER' + output: true + - name: 'explanation' + type: String + description: | + A human-readable explanation of this requirement and + why it's not met. This is set for every type of requirement. + output: true + - name: 'documentationLink' + type: String + description: | + The url of Google Cloud public documentation explaining + this requirement. This is set for every type of requirement. + output: true + - name: 'regions' + type: Array + description: | + Regions used to explain this blocker in more + detail. These are region names formatted like "us-central1". This + will be set for some blockers (like INCOMPATIBLE_REGIONS) but does + not apply to others. + item_type: + type: String + output: true + - name: 'metros' + type: Array + description: | + Metros used to explain this blocker in more detail. + These are three-letter lowercase strings like "iad". This will be set + for some blockers (like NO_ATTACHMENTS_IN_METRO_AND_ZONE) but does + not apply to others. + item_type: + type: String + output: true + - name: 'zones' + type: Array + description: | + Zones used to explain this blocker in more detail. + Format is "zone1" and/or "zone2". This will be set for some blockers + (like MISSING_ZONE) but does not apply to others. + item_type: + type: String + output: true + - name: 'attachments' + type: Array + description: | + URLs of any particular Attachments to explain this + blocker in more detail. + item_type: + type: String + output: true diff --git a/mmv1/products/compute/InterconnectGroup.yaml b/mmv1/products/compute/InterconnectGroup.yaml new file mode 100644 index 000000000000..233fc98a49c4 --- /dev/null +++ b/mmv1/products/compute/InterconnectGroup.yaml @@ -0,0 +1,265 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'InterconnectGroup' +kind: 'compute#InterconnectGroup' +description: | + An interconnect group resource allows customers to create, analyze, and + expand their redundant connections. +references: + guides: + 'Create a Dedicated Interconnect': 'https://cloud.google.com/network-connectivity/docs/interconnect/concepts/dedicated-overview' + api: 'https://cloud.google.com/compute/docs/reference/rest/v1/interconnects' +docs: +base_url: 'projects/{{project}}/global/interconnectGroups' +self_link: 'projects/{{project}}/global/interconnectGroups/{{name}}' +update_verb: 'PATCH' +timeouts: + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 +async: + actions: ['create', 'delete', 'update'] + type: 'OpAsync' + operation: + base_url: '{{op_id}}' + result: + resource_inside_response: false +examples: + - name: 'interconnect_group_basic' + primary_resource_id: 'example-interconnect-group' + vars: + interconnect_group_name: 'example-interconnect-group' + test_vars_overrides: + 'deletion_protection': 'false' +parameters: +properties: + - name: 'description' + type: String + description: | + An optional description of this resource. Provide this property when you create the resource. + - name: 'creationTimestamp' + type: Time + description: | + Creation timestamp in RFC3339 text format. + output: true + - name: 'name' + type: String + description: | + Name of the resource. Provided by the client when the resource is created. The name must be + 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters + long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first + character must be a lowercase letter, and all following characters must be a dash, + lowercase letter, or digit, except the last character, which cannot be a dash. + required: true + immutable: true + validation: + regex: '^[a-z]([-a-z0-9]*[a-z0-9])?$' + - name: 'interconnects' + type: Map + description: | + Interconnects in the InterconnectGroup. Keys are arbitrary user-specified + strings. Users are encouraged, but not required, to use their preferred + format for resource links as keys. + Note that there are add-members and remove-members methods in gcloud. + The size of this map is limited by an "Interconnects per group" quota. + key_name: 'name' + key_description: | + Resource name + value_type: + name: mapObjectName + type: NestedObject + properties: + - name: 'interconnect' + type: String + description: | + The URL of an Interconnect in this group. All Interconnects in the group are unique. + - name: 'intent' + type: NestedObject + description: | + The user's intent for this group. This is the only required field besides + the name that must be specified on group creation. + properties: + - name: 'topologyCapability' + type: Enum + description: | + The reliability the user intends this group to be capable of, in terms + of the Interconnect product SLAs. + enum_values: + - 'PRODUCTION_NON_CRITICAL' + - 'PRODUCTION_CRITICAL' + - 'NO_SLA' + - 'AVAILABILITY_SLA_UNSPECIFIED' + required: true + - name: 'physicalStructure' + type: NestedObject + description: | + An analysis of the physical layout of Interconnects in this + group. Every Interconnect in the group is shown once in this structure. + output: true + properties: + - name: 'metros' + type: Array + description: | + The metros Interconnects in this group are in. + output: true + item_type: + type: NestedObject + properties: + - name: 'metro' + type: String + description: | + The name of the metro, as a three-letter lowercase string + like "iad". This is the first component of the location of + Interconnects underneath this. + output: true + - name: 'facilities' + type: Array + description: | + The facilities Interconnects in this metro are present + in. + output: true + item_type: + type: NestedObject + properties: + - name: 'facility' + type: String + description: | + The ID of this facility, as a numeric string like + "5467". This is the third component of the location of Interconnects + in this facility. + output: true + - name: 'zones' + type: Array + description: | + The zones that Interconnects in this facility are + present in. + output: true + item_type: + type: NestedObject + properties: + - name: 'zone' + type: String + description: | + The name of the zone, either "zone1" or "zone2". + This is the second component of the location of Interconnects in + this facility. + output: true + - name: 'interconnects' + type: Array + description: | + URLs of Interconnects in this redundancy group in the + given metro, facility, and zone. + item_type: + type: String + output: true + - name: 'configured' + type: NestedObject + description: | + The status of the group as configured. This has the same + structure as the operational field reported by the OperationalStatus + method, but does not take into account the operational status of each + resource. + output: true + properties: + - name: 'topologyCapability' + type: NestedObject + description: | + How reliable this topology is configured to be, and why + this group does or does not meet the requirements for the intended + capability. + output: true + properties: + - name: 'supportedSla' + type: Enum + description: | + Which level of reliability this group is configured to + support. + enum_values: + - 'PRODUCTION_CRITICAL' + - 'PRODUCTION_NON_CRITICAL' + - 'NO_SLA' + - 'UNSPECIFIED' + output: true + - name: 'intendedCapabilityBlockers' + type: Array + description: | + Reasons why configuration.topologyCapability.sla differs + from intent.topologyCapability. This list is empty if and only if those + are the same. + output: true + item_type: + type: NestedObject + properties: + - name: 'blockerType' + type: Enum + description: | + The category of an unmet SLA requirement. The Intended + SLA Blockers section below explains this field and how it relates to + other fields in intendedCapabilityBlockers. + enum_values: + - 'UNSPECIFIED' + - 'NO_INTERCONNECTS' + - 'INCOMPATIBLE_METROS' + - 'NO_INTERCONNECTS_IN_METRO_AND_ZONE' + - 'NOT_AVAILABLE' + - 'OTHER' + output: true + - name: 'explanation' + type: String + description: | + A human-readable explanation of this requirement and + why it's not met. This is set for every type of requirement. + output: true + - name: 'documentationLink' + type: String + description: | + The url of Google Cloud public documentation explaining + this requirement. This is set for every type of requirement. + output: true + - name: 'metros' + type: Array + description: | + Metros used to explain this blocker in more detail. + These are three-letter lowercase strings like "iad". A blocker like + INCOMPATIBLE_METROS will specify the problematic metros in this + field. + item_type: + type: String + output: true + - name: 'facilities' + type: Array + description: | + Facilities used to explain this blocker in more detail. + Like physicalStructure.metros.facilities.facility, this is a numeric + string like "5467". + item_type: + type: String + output: true + - name: 'zones' + type: Array + description: | + Zones used to explain this blocker in more detail. + Zone names are "zone1" and/or "zone2". + item_type: + type: String + output: true + - name: 'interconnects' + type: Array + description: | + Interconnects used to explain this blocker in more + detail. + item_type: + type: String + output: true diff --git a/mmv1/templates/terraform/examples/interconnect_attachment_group_basic.tf.tmpl b/mmv1/templates/terraform/examples/interconnect_attachment_group_basic.tf.tmpl new file mode 100644 index 000000000000..0a1b83d2f043 --- /dev/null +++ b/mmv1/templates/terraform/examples/interconnect_attachment_group_basic.tf.tmpl @@ -0,0 +1,6 @@ +resource "google_compute_interconnect_attachment_group" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "interconnect_attachment_group_name"}}" + intent { + availability_sla = "NO_SLA" + } +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/interconnect_group_basic.tf.tmpl b/mmv1/templates/terraform/examples/interconnect_group_basic.tf.tmpl new file mode 100644 index 000000000000..3ca679114a46 --- /dev/null +++ b/mmv1/templates/terraform/examples/interconnect_group_basic.tf.tmpl @@ -0,0 +1,6 @@ +resource "google_compute_interconnect_group" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "interconnect_group_name"}}" + intent { + topology_capability = "NO_SLA" + } +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_interconnect_attachment_group_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_interconnect_attachment_group_test.go new file mode 100644 index 000000000000..0b6c23691860 --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/resource_compute_interconnect_attachment_group_test.go @@ -0,0 +1,95 @@ +package compute_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccComputeInterconnectAttachmentGroup_basic(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "deletion_protection": false, + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeInterconnectAttachmentGroupDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeInterconnectAttachmentGroup_basic(context), + }, + { + ResourceName: "google_compute_interconnect_attachment_group.example-interconnect-attachment-group", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccComputeInterconnectAttachmentGroup_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_interconnect_attachment_group" "example-interconnect-attachment-group" { + name = "tf-test-example-interconnect-attachment-group%{random_suffix}" + intent { + availability_sla = "NO_SLA" + } +} +`, context) +} + +func TestAccComputeInterconnectAttachmentGroup_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "deletion_protection": false, + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeInterconnectAttachmentGroup_basic(context), + }, + { + ResourceName: "google_compute_interconnect_attachment_group.example-interconnect-attachment-group", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccComputeInterconnectAttachmentGroup_update(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_compute_interconnect_attachment_group.example-interconnect-attachment-group", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_compute_interconnect_attachment_group.example-interconnect-attachment-group", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccComputeInterconnectAttachmentGroup_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_interconnect_attachment_group" "example-interconnect-attachment-group" { + name = "tf-test-example-interconnect-attachment-group%{random_suffix}" + intent { + availability_sla = "NO_SLA" + } + description = "New description" +} +`, context) +} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_interconnect_group_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_interconnect_group_test.go new file mode 100644 index 000000000000..89b7d56fcd60 --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/resource_compute_interconnect_group_test.go @@ -0,0 +1,95 @@ +package compute_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccComputeInterconnectGroup_basic(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "deletion_protection": false, + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeInterconnectGroup_basic(context), + }, + { + ResourceName: "google_compute_interconnect_group.example-interconnect-group", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccComputeInterconnectGroup_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_interconnect_group" "example-interconnect-group" { + name = "tf-test-example-interconnect-group%{random_suffix}" + intent { + topology_capability = "NO_SLA" + } +} +`, context) +} + +func TestAccComputeInterconnectGroup_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "deletion_protection": false, + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeInterconnectGroupDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeInterconnectGroup_basic(context), + }, + { + ResourceName: "google_compute_interconnect_group.example-interconnect-group", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccComputeInterconnectGroup_update(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_compute_interconnect_group.example-interconnect-group", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_compute_interconnect_group.example-interconnect-group", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccComputeInterconnectGroup_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_interconnect_group" "example-interconnect-group" { + name = "tf-test-example-interconnect-group%{random_suffix}" + intent { + topology_capability = "NO_SLA" + } + description = "New description" +} +`, context) +} From 29f87047a30c00fe7349ab484cd1f9a0c3e15d28 Mon Sep 17 00:00:00 2001 From: Andrew Browne <81702808+abbrowne126@users.noreply.github.com> Date: Wed, 4 Jun 2025 17:00:55 -0400 Subject: [PATCH 302/884] Add MessageTransform to 'google_pubsub_topic' (#13588) Co-authored-by: Scott Suarez --- mmv1/products/pubsub/Topic.yaml | 57 +++++++++++++++++++ .../pubsub/resource_pubsub_topic_test.go | 48 ++++++++++++++++ 2 files changed, 105 insertions(+) diff --git a/mmv1/products/pubsub/Topic.yaml b/mmv1/products/pubsub/Topic.yaml index 439fdcdc387e..8524e5e1b0da 100644 --- a/mmv1/products/pubsub/Topic.yaml +++ b/mmv1/products/pubsub/Topic.yaml @@ -438,3 +438,60 @@ properties: The GCP service account to be used for Federated Identity authentication with Confluent Cloud. required: true + - name: messageTransforms + type: Array + description: | + Transforms to be applied to messages published to the topic. Transforms are applied in the + order specified. + item_type: + type: NestedObject + properties: + - name: javascriptUdf + type: NestedObject + description: | + Javascript User Defined Function. If multiple Javascript UDFs are specified on a resource, + each one must have a unique `function_name`. + properties: + - name: functionName + type: String + required: true + description: | + Name of the JavaScript function that should be applied to Pub/Sub messages. + - name: code + type: String + required: true + description: | + JavaScript code that contains a function `function_name` with the + following signature: + ``` + /** + * Transforms a Pub/Sub message. + * + * @return {(Object)>|null)} - To + * filter a message, return `null`. To transform a message return a map + * with the following keys: + * - (required) 'data' : {string} + * - (optional) 'attributes' : {Object} + * Returning empty `attributes` will remove all attributes from the + * message. + * + * @param {(Object)>} Pub/Sub + * message. Keys: + * - (required) 'data' : {string} + * - (required) 'attributes' : {Object} + * + * @param {Object} metadata - Pub/Sub message metadata. + * Keys: + * - (required) 'message_id' : {string} + * - (optional) 'publish_time': {string} YYYY-MM-DDTHH:MM:SSZ format + * - (optional) 'ordering_key': {string} + */ + function (message, metadata) { + } + ``` + - name: disabled + type: Boolean + default_value: false + description: | + Controls whether or not to use this transform. If not set or `false`, + the transform will be applied to messages. Default: `true`. diff --git a/mmv1/third_party/terraform/services/pubsub/resource_pubsub_topic_test.go b/mmv1/third_party/terraform/services/pubsub/resource_pubsub_topic_test.go index a4efa7568eda..007a2384b471 100644 --- a/mmv1/third_party/terraform/services/pubsub/resource_pubsub_topic_test.go +++ b/mmv1/third_party/terraform/services/pubsub/resource_pubsub_topic_test.go @@ -583,3 +583,51 @@ resource "google_pubsub_topic" "foo" { } `, topic) } +func TestAccPubsubTopic_javascriptUdfUpdate(t *testing.T) { + t.Parallel() + + topic := fmt.Sprintf("tf-test-topic-%s", acctest.RandString(t, 10)) + + functionName := "my_func" + code := "function my_func(message, metadata) {return null;}" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckPubsubTopicDestroyProducer(t), + Steps: []resource.TestStep{ + // Initial transform + { + Config: testAccPubsubTopic_javascriptUdfSettings(topic, functionName, code), + }, + { + ResourceName: "google_pubsub_topic.foo", + ImportStateId: topic, + ImportState: true, + ImportStateVerify: true, + }, + // Destroy transform + { + ResourceName: "google_pubsub_topic.foo", + ImportStateId: topic, + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccPubsubTopic_javascriptUdfSettings(topic, functionName, code string) string { + return fmt.Sprintf(` +resource "google_pubsub_topic" "foo" { + name = "%s" + + message_transforms { + javascript_udf { + function_name = "%s" + code = "%s" + } + } +} + `, topic, functionName, code) +} From a1de096d40f56b1c4e39b69ae992fd3a267246a6 Mon Sep 17 00:00:00 2001 From: Andrew Browne <81702808+abbrowne126@users.noreply.github.com> Date: Wed, 4 Jun 2025 17:22:51 -0400 Subject: [PATCH 303/884] Add MessageTransform field to 'google_pubsub_subscription' (#13589) --- mmv1/products/pubsub/Subscription.yaml | 57 +++++++++++++ .../resource_pubsub_subscription_test.go | 83 +++++++++++++++++++ 2 files changed, 140 insertions(+) diff --git a/mmv1/products/pubsub/Subscription.yaml b/mmv1/products/pubsub/Subscription.yaml index 9b3e7064839b..eff35eff17bb 100644 --- a/mmv1/products/pubsub/Subscription.yaml +++ b/mmv1/products/pubsub/Subscription.yaml @@ -506,3 +506,60 @@ properties: Note that subscribers may still receive multiple copies of a message when `enable_exactly_once_delivery` is true if the message was published multiple times by a publisher client. These copies are considered distinct by Pub/Sub and have distinct messageId values + - name: messageTransforms + type: Array + description: | + Transforms to be applied to messages published to the topic. Transforms are applied in the + order specified. + item_type: + type: NestedObject + properties: + - name: javascriptUdf + type: NestedObject + description: | + Javascript User Defined Function. If multiple Javascript UDFs are specified on a resource, + each one must have a unique `function_name`. + properties: + - name: functionName + type: String + required: true + description: | + Name of the JavaScript function that should be applied to Pub/Sub messages. + - name: code + type: String + required: true + description: | + JavaScript code that contains a function `function_name` with the + following signature: + ``` + /** + * Transforms a Pub/Sub message. + * + * @return {(Object)>|null)} - To + * filter a message, return `null`. To transform a message return a map + * with the following keys: + * - (required) 'data' : {string} + * - (optional) 'attributes' : {Object} + * Returning empty `attributes` will remove all attributes from the + * message. + * + * @param {(Object)>} Pub/Sub + * message. Keys: + * - (required) 'data' : {string} + * - (required) 'attributes' : {Object} + * + * @param {Object} metadata - Pub/Sub message metadata. + * Keys: + * - (required) 'message_id' : {string} + * - (optional) 'publish_time': {string} YYYY-MM-DDTHH:MM:SSZ format + * - (optional) 'ordering_key': {string} + */ + function (message, metadata) { + } + ``` + - name: disabled + type: Boolean + default_value: false + description: | + Controls whether or not to use this transform. If not set or `false`, + the transform will be applied to messages. Default: `true`. diff --git a/mmv1/third_party/terraform/services/pubsub/resource_pubsub_subscription_test.go b/mmv1/third_party/terraform/services/pubsub/resource_pubsub_subscription_test.go index 932cad28804a..51d3184dda3d 100644 --- a/mmv1/third_party/terraform/services/pubsub/resource_pubsub_subscription_test.go +++ b/mmv1/third_party/terraform/services/pubsub/resource_pubsub_subscription_test.go @@ -539,6 +539,89 @@ func TestAccPubsubSubscription_filter(t *testing.T) { }) } +func TestAccPubsubSubscription_javascriptUdfUpdate(t *testing.T) { + t.Parallel() + + topic := fmt.Sprintf("tf-test-topic-%s", acctest.RandString(t, 10)) + subscriptionShort := fmt.Sprintf("tf-test-sub-%s", acctest.RandString(t, 10)) + functionName := "my_func" + code := "function my_func(message, metadata) {return null;}" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckPubsubSubscriptionDestroyProducer(t), + Steps: []resource.TestStep{ + // Initial transform + { + Config: testAccPubsubSubscription_javascriptUdfSettings(topic, subscriptionShort, functionName, code), + }, + { + ResourceName: "google_pubsub_subscription.foo", + ImportStateId: subscriptionShort, + ImportState: true, + ImportStateVerify: true, + }, + { + // Remove non-required field + Config: testAccPubsubSubscription_javascriptUdfSettings_noEnabled(topic, subscriptionShort, functionName, code), + }, + { + ResourceName: "google_pubsub_subscription.foo", + ImportStateId: subscriptionShort, + ImportState: true, + ImportStateVerify: true, + }, + // Destroy transform + { + ResourceName: "google_pubsub_topic.foo", + ImportStateId: topic, + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccPubsubSubscription_javascriptUdfSettings(topic, subscription, functionName, code string) string { + return fmt.Sprintf(` +resource "google_pubsub_topic" "foo" { + name = "%s" +} + +resource "google_pubsub_subscription" "foo" { + name = "%s" + topic = google_pubsub_topic.foo.id + message_transforms { + disabled = true + javascript_udf { + function_name = "%s" + code = "%s" + } + } +} +`, topic, subscription, functionName, code) +} + +func testAccPubsubSubscription_javascriptUdfSettings_noEnabled(topic, subscription, functionName, code string) string { + return fmt.Sprintf(` +resource "google_pubsub_topic" "foo" { + name = "%s" +} + +resource "google_pubsub_subscription" "foo" { + name = "%s" + topic = google_pubsub_topic.foo.id + message_transforms { + javascript_udf { + function_name = "%s" + code = "%s" + } + } +} +`, topic, subscription, functionName, code) +} + func testAccPubsubSubscription_emptyTTL(topic, subscription string) string { return fmt.Sprintf(` resource "google_pubsub_topic" "foo" { From 9bcac56781baeb9622e4da703720f517810c6c95 Mon Sep 17 00:00:00 2001 From: bcreddy-gcp <123543489+bcreddy-gcp@users.noreply.github.com> Date: Wed, 4 Jun 2025 14:25:50 -0700 Subject: [PATCH 304/884] colab: fixed perma-diff in google_colab_runtime_template caused by empty blocks. (#14197) --- mmv1/products/colab/RuntimeTemplate.yaml | 4 ++++ .../colab_runtime_template_full.tf.tmpl | 17 ++--------------- 2 files changed, 6 insertions(+), 15 deletions(-) diff --git a/mmv1/products/colab/RuntimeTemplate.yaml b/mmv1/products/colab/RuntimeTemplate.yaml index cd7c69f1cd7a..02ee8dadfa26 100644 --- a/mmv1/products/colab/RuntimeTemplate.yaml +++ b/mmv1/products/colab/RuntimeTemplate.yaml @@ -158,20 +158,24 @@ properties: type: NestedObject default_from_api: true description: 'EUC configuration of the NotebookRuntimeTemplate.' + diff_suppress_func: 'tpgresource.EmptyOrUnsetBlockDiffSuppress' properties: - name: 'eucDisabled' type: Boolean default_from_api: true description: 'Disable end user credential access for the runtime.' + diff_suppress_func: 'tpgresource.EmptyOrFalseSuppressBoolean' - name: shieldedVmConfig type: NestedObject default_from_api: true description: 'Runtime Shielded VM spec.' + diff_suppress_func: 'tpgresource.EmptyOrUnsetBlockDiffSuppress' properties: - name: 'enableSecureBoot' type: Boolean default_from_api: true description: 'Enables secure boot for the runtime.' + diff_suppress_func: 'tpgresource.EmptyOrFalseSuppressBoolean' - name: 'networkTags' type: Array item_type: diff --git a/mmv1/templates/terraform/examples/colab_runtime_template_full.tf.tmpl b/mmv1/templates/terraform/examples/colab_runtime_template_full.tf.tmpl index d5a9b79539ff..ffe8e7289448 100644 --- a/mmv1/templates/terraform/examples/colab_runtime_template_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/colab_runtime_template_full.tf.tmpl @@ -41,11 +41,11 @@ resource "google_colab_runtime_template" "{{$.PrimaryResourceId}}" { } euc_config { - euc_disabled = true + euc_disabled = false } shielded_vm_config { - enable_secure_boot = true + enable_secure_boot = false } network_tags = ["abc", "def"] @@ -53,17 +53,4 @@ resource "google_colab_runtime_template" "{{$.PrimaryResourceId}}" { encryption_spec { kms_key_name = "{{index $.Vars "key_name"}}" } - - software_config { - env { - name = "TEST" - value = 1 - } - - post_startup_script_config { - post_startup_script = "echo 'hello world'" - post_startup_script_url = "gs://colab-enterprise-pss-secure/secure_pss.sh" - post_startup_script_behavior = "RUN_ONCE" - } - } } From 8c9c52dda8dc6216fb8fbf2abad6fc8a2641833c Mon Sep 17 00:00:00 2001 From: Ashwin G Date: Wed, 4 Jun 2025 16:22:29 -0700 Subject: [PATCH 305/884] Handle diffs correctly for the profile field in the Network resource. (#14198) --- mmv1/products/compute/Network.yaml | 1 + .../resource_compute_network_test.go.tmpl | 43 +++++++++++++++++++ 2 files changed, 44 insertions(+) diff --git a/mmv1/products/compute/Network.yaml b/mmv1/products/compute/Network.yaml index d92227edc3b8..e94567ee10da 100644 --- a/mmv1/products/compute/Network.yaml +++ b/mmv1/products/compute/Network.yaml @@ -244,3 +244,4 @@ properties: following are valid URLs: * https://www.googleapis.com/compute/v1/projects/{projectId}/global/networkProfiles/{network_profile_name} * projects/{projectId}/global/networkProfiles/{network_profile_name} + diff_suppress_func: 'tpgresource.CompareSelfLinkRelativePaths' diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_network_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_network_test.go.tmpl index 4d6634292bdc..736e11bcfc30 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_network_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_network_test.go.tmpl @@ -281,6 +281,49 @@ func TestAccComputeNetwork_networkProfile(t *testing.T) { }) } +func TestComputeNetworkProfileDiffSuppress(t *testing.T) { + cases := map[string]struct { + Old, New string + ExpectDiffSuppress bool + }{ + "old: no previous profile, new: partial profile URL": { + Old: "", + New: "projects/dummy-project/global/networkProfiles/europe-west1-b-vpc-roce", + ExpectDiffSuppress: false, + }, + "old: no previous profile, new: full profile URL": { + Old: "", + New: "https://www.googleapis.com/compute/v1/projects/dummy-project/global/networkProfiles/europe-west1-b-vpc-roce", + ExpectDiffSuppress: false, + }, + "old: beta profile URL, new: partial profile URL": { + Old: "https://www.googleapis.com/compute/beta/projects/dummy-project/global/networkProfiles/europe-west1-b-vpc-roce", + New: "projects/dummy-project/global/networkProfiles/europe-west1-b-vpc-roce", + ExpectDiffSuppress: true, + }, + "old: v1 profile URL, new: partial profile URL": { + Old: "https://www.googleapis.com/compute/v1/projects/dummy-project/global/networkProfiles/europe-west1-b-vpc-roce", + New: "projects/dummy-project/global/networkProfiles/europe-west1-b-vpc-roce", + ExpectDiffSuppress: true, + }, + "old: beta profile URL, new: v1 profile URL": { + Old: "https://www.googleapis.com/compute/beta/projects/dummy-project/global/networkProfiles/europe-west1-b-vpc-roce", + New: "https://www.googleapis.com/compute/v1/projects/dummy-project/global/networkProfiles/europe-west1-b-vpc-roce", + ExpectDiffSuppress: true, + }, + } + + for tn, tc := range cases { + tc := tc + t.Run(tn, func(t *testing.T) { + t.Parallel() + if tpgresource.CompareSelfLinkRelativePaths("", tc.Old, tc.New, nil) != tc.ExpectDiffSuppress { + t.Errorf("%q => %q expected DiffSuppress to return %t", tc.Old, tc.New, tc.ExpectDiffSuppress) + } + }) + } +} + func TestAccComputeNetwork_numericId(t *testing.T) { t.Parallel() suffixName := acctest.RandString(t, 10) From 7bf76d42ca8dab3879cf9e69f74bc4db2d3938f6 Mon Sep 17 00:00:00 2001 From: Xian-Ji Chen <68801742+XianJiChen@users.noreply.github.com> Date: Wed, 4 Jun 2025 23:46:52 +0000 Subject: [PATCH 306/884] Terraform support for Dataplex publishing (#14113) Co-authored-by: Thomas Rodgers --- mmv1/products/dataplex/Datascan.yaml | 12 + .../dataplex_datascan_full_quality.tf.tmpl | 1 + ...ataplex_datascan_full_quality_test.tf.tmpl | 182 ++++++++++++++ .../resource_dataplex_datascan_test.go | 231 ++++++++++++++++++ 4 files changed, 426 insertions(+) create mode 100644 mmv1/templates/terraform/examples/dataplex_datascan_full_quality_test.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/dataplex/resource_dataplex_datascan_test.go diff --git a/mmv1/products/dataplex/Datascan.yaml b/mmv1/products/dataplex/Datascan.yaml index d595e1fa2e5a..52f0d4a84c15 100644 --- a/mmv1/products/dataplex/Datascan.yaml +++ b/mmv1/products/dataplex/Datascan.yaml @@ -84,6 +84,14 @@ examples: datascan_name: 'dataquality-full' test_env_vars: project_name: 'PROJECT_NAME' + exclude_test: true + - name: 'dataplex_datascan_full_quality_test' + primary_resource_id: 'full_quality_test' + vars: + datascan_name: 'dataquality-full-test' + test_env_vars: + project_name: 'PROJECT_NAME' + exclude_docs: true parameters: - name: 'location' type: String @@ -477,6 +485,10 @@ properties: The SQL statement. required: true min_size: 1 + - name: 'catalogPublishingEnabled' + type: Boolean + description: | + If set, the latest DataScan job result will be published to Dataplex Catalog. - name: 'dataProfileSpec' type: NestedObject description: | diff --git a/mmv1/templates/terraform/examples/dataplex_datascan_full_quality.tf.tmpl b/mmv1/templates/terraform/examples/dataplex_datascan_full_quality.tf.tmpl index ace148502f67..1451b94274a0 100644 --- a/mmv1/templates/terraform/examples/dataplex_datascan_full_quality.tf.tmpl +++ b/mmv1/templates/terraform/examples/dataplex_datascan_full_quality.tf.tmpl @@ -23,6 +23,7 @@ resource "google_dataplex_datascan" "{{$.PrimaryResourceId}}" { data_quality_spec { sampling_percent = 5 row_filter = "station_id > 1000" + catalog_publishing_enabled = true post_scan_actions { notification_report { recipients { diff --git a/mmv1/templates/terraform/examples/dataplex_datascan_full_quality_test.tf.tmpl b/mmv1/templates/terraform/examples/dataplex_datascan_full_quality_test.tf.tmpl new file mode 100644 index 000000000000..8ab23feeeb23 --- /dev/null +++ b/mmv1/templates/terraform/examples/dataplex_datascan_full_quality_test.tf.tmpl @@ -0,0 +1,182 @@ +resource "google_bigquery_dataset" "tf_test_dataset" { + dataset_id = "tf_test_dataset_id" + default_table_expiration_ms = 3600000 +} + +resource "google_bigquery_table" "tf_test_table" { + dataset_id = google_bigquery_dataset.tf_test_dataset.dataset_id + table_id = "tf_test_table_id" + deletion_protection = false + schema = < Date: Thu, 5 Jun 2025 09:04:49 -0700 Subject: [PATCH 307/884] Convert google_compute_firewall_policy_with_rules target_resources fields to RRs (#14186) --- mmv1/products/compute/FirewallPolicyWithRules.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mmv1/products/compute/FirewallPolicyWithRules.yaml b/mmv1/products/compute/FirewallPolicyWithRules.yaml index ee84d15c17eb..01a7e12baf82 100644 --- a/mmv1/products/compute/FirewallPolicyWithRules.yaml +++ b/mmv1/products/compute/FirewallPolicyWithRules.yaml @@ -282,7 +282,7 @@ properties: this rule. If this field is left blank, all VMs within the organization will receive the rule. item_type: - type: String + type: ResourceRef - name: 'disabled' type: Boolean description: | @@ -488,7 +488,7 @@ properties: within the organization will receive the rule. output: true item_type: - type: String + type: ResourceRef - name: 'disabled' type: Boolean description: | From 0046fe144ae1a3fa83c49bfcf12e7e1e2911cafb Mon Sep 17 00:00:00 2001 From: Nithin Daniel <55326622+nithindaniel@users.noreply.github.com> Date: Thu, 5 Jun 2025 09:58:12 -0700 Subject: [PATCH 308/884] Add support for Apigee environment client ip resolution config (#14184) Co-authored-by: Nithin Daniel --- mmv1/products/apigee/Environment.yaml | 32 ++++++++ .../examples/apigee_environment_basic.tf.tmpl | 6 ++ ...t_client_ip_resolution_config_test.tf.tmpl | 81 +++++++++++++++++++ ...resource_apigee_environment_update_test.go | 6 ++ 4 files changed, 125 insertions(+) create mode 100644 mmv1/templates/terraform/examples/apigee_environment_client_ip_resolution_config_test.tf.tmpl diff --git a/mmv1/products/apigee/Environment.yaml b/mmv1/products/apigee/Environment.yaml index e9d6564627e5..37e8de50d76f 100644 --- a/mmv1/products/apigee/Environment.yaml +++ b/mmv1/products/apigee/Environment.yaml @@ -96,6 +96,16 @@ examples: # Resource creation race skip_vcr: true external_providers: ["time"] + - name: 'apigee_environment_client_ip_resolution_config_test' + primary_resource_id: 'apigee_environment' + primary_resource_name: 'fmt.Sprintf("organizations/tf-test%s", context["random_suffix"]), fmt.Sprintf("tf-test%s", context["random_suffix"])' + test_env_vars: + org_id: 'ORG_ID' + billing_account: 'BILLING_ACCT' + exclude_docs: true + # Resource creation race + skip_vcr: true + external_providers: ["time"] parameters: - name: 'orgId' type: String @@ -213,3 +223,25 @@ properties: type: String description: | The property value. + - name: 'clientIpResolutionConfig' + type: NestedObject + description: | + The algorithm to resolve IP. This will affect Analytics, API Security, and other features that use the client ip. To remove a client ip resolution config, update the field to an empty value. Example: '{ "clientIpResolutionConfig" = {} }' For more information, see: https://cloud.google.com/apigee/docs/api-platform/system-administration/client-ip-resolution + properties: + - name: 'headerIndexAlgorithm' + type: NestedObject + description: | + Resolves the client ip based on a custom header. + exactly_one_of: + - client_ip_resolution_config.0.header_index_algorithm + properties: + - name: ipHeaderName + type: String + description: | + The name of the header to extract the client ip from. We are currently only supporting the X-Forwarded-For header. + required: true + - name: ipHeaderIndex + type: Integer + description: | + The index of the ip in the header. Positive indices 0, 1, 2, 3 chooses indices from the left (first ips). Negative indices -1, -2, -3 chooses indices from the right (last ips). + required: true diff --git a/mmv1/templates/terraform/examples/apigee_environment_basic.tf.tmpl b/mmv1/templates/terraform/examples/apigee_environment_basic.tf.tmpl index df9c2ea7b320..b202417439b0 100644 --- a/mmv1/templates/terraform/examples/apigee_environment_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/apigee_environment_basic.tf.tmpl @@ -30,5 +30,11 @@ resource "google_apigee_environment" "env" { description = "Apigee Environment" display_name = "environment-1" org_id = google_apigee_organization.apigee_org.id + client_ip_resolution_config { + header_index_algorithm { + ip_header_name = "X-Forwarded-For" + ip_header_index = 1 + } + } } diff --git a/mmv1/templates/terraform/examples/apigee_environment_client_ip_resolution_config_test.tf.tmpl b/mmv1/templates/terraform/examples/apigee_environment_client_ip_resolution_config_test.tf.tmpl new file mode 100644 index 000000000000..49e3cbf526bf --- /dev/null +++ b/mmv1/templates/terraform/examples/apigee_environment_client_ip_resolution_config_test.tf.tmpl @@ -0,0 +1,81 @@ +resource "google_project" "project" { + project_id = "tf-test%{random_suffix}" + name = "tf-test%{random_suffix}" + org_id = "{{index $.TestEnvVars "org_id"}}" + billing_account = "{{index $.TestEnvVars "billing_account"}}" + deletion_policy = "DELETE" +} + +resource "time_sleep" "wait_60_seconds" { + create_duration = "60s" + depends_on = [google_project.project] +} + +resource "google_project_service" "apigee" { + project = google_project.project.project_id + service = "apigee.googleapis.com" + depends_on = [time_sleep.wait_60_seconds] +} + +resource "google_project_service" "servicenetworking" { + project = google_project.project.project_id + service = "servicenetworking.googleapis.com" + depends_on = [google_project_service.apigee] +} + +resource "google_project_service" "compute" { + project = google_project.project.project_id + service = "compute.googleapis.com" + depends_on = [google_project_service.servicenetworking] +} + +resource "time_sleep" "wait_120_seconds" { + create_duration = "120s" + depends_on = [google_project_service.compute] +} + + +resource "google_compute_network" "apigee_network" { + name = "apigee-network" + project = google_project.project.project_id + depends_on = [time_sleep.wait_120_seconds] +} + +resource "google_compute_global_address" "apigee_range" { + name = "apigee-range" + purpose = "VPC_PEERING" + address_type = "INTERNAL" + prefix_length = 16 + network = google_compute_network.apigee_network.id + project = google_project.project.project_id +} + +resource "google_service_networking_connection" "apigee_vpc_connection" { + network = google_compute_network.apigee_network.id + service = "servicenetworking.googleapis.com" + reserved_peering_ranges = [google_compute_global_address.apigee_range.name] + depends_on = [google_project_service.servicenetworking] +} + +resource "google_apigee_organization" "apigee_org" { + analytics_region = "us-central1" + project_id = google_project.project.project_id + authorized_network = google_compute_network.apigee_network.id + depends_on = [ + google_service_networking_connection.apigee_vpc_connection, + google_project_service.apigee, + ] +} + +resource "google_apigee_environment" "{{$.PrimaryResourceId}}" { + org_id = google_apigee_organization.apigee_org.id + name = "tf-test%{random_suffix}" + description = "Apigee Environment" + display_name = "environment-1" + client_ip_resolution_config { + header_index_algorithm { + ip_header_name = "X-Forwarded-For" + ip_header_index = 1 + } + } +} diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_environment_update_test.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_environment_update_test.go index b61b4e453f80..4a1069bc1450 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_environment_update_test.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_environment_update_test.go @@ -129,6 +129,12 @@ resource "google_apigee_environment" "apigee_environment" { value = "property-1-value" } } + client_ip_resolution_config { + header_index_algorithm { + ip_header_name = "X-Forwarded-For" + ip_header_index = 1 + } + } } `, context) } From 204661f6ce9dd885616cb6c999fc005cea7f1bb9 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Thu, 5 Jun 2025 10:19:42 -0700 Subject: [PATCH 309/884] tgc-revival: add Location to asset resource (#14200) --- mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl | 2 ++ .../converters/services/compute/compute_instance.go | 4 ++++ mmv1/third_party/tgc_next/test/setup.go | 2 +- 3 files changed, 7 insertions(+), 1 deletion(-) diff --git a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl index 2a6941700d25..3270d0f7c0e1 100644 --- a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl +++ b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl @@ -97,6 +97,7 @@ func Get{{ $.ResourceName -}}CaiObject(d tpgresource.TerraformResourceData, conf return []caiasset.Asset{}, err } if obj, err := Get{{ $.ResourceName -}}ApiObject(d, config); err == nil { + location, _ := tpgresource.GetLocation(d, config) return []caiasset.Asset{{"{{"}} Name: name, Type: {{ $.ResourceName -}}AssetType, @@ -105,6 +106,7 @@ func Get{{ $.ResourceName -}}CaiObject(d tpgresource.TerraformResourceData, conf DiscoveryDocumentURI: "https://www.googleapis.com/discovery/v1/apis/{{ $productBackendName }}/{{ $apiVersion }}/rest", DiscoveryName: "{{ $.Name }}", Data: obj, + Location: location, }, {{"}}"}}, nil } else { diff --git a/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/compute/compute_instance.go b/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/compute/compute_instance.go index cdc5a13051ec..3d1e4f46f70c 100644 --- a/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/compute/compute_instance.go +++ b/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/compute/compute_instance.go @@ -1457,6 +1457,7 @@ func GetComputeInstanceCaiObject(d tpgresource.TerraformResourceData, config *tr return caiasset.Asset{}, err } if data, err := GetComputeInstanceData(d, config); err == nil { + location, _ := tpgresource.GetLocation(d, config) return caiasset.Asset{ Name: name, Type: ComputeInstanceAssetType, @@ -1465,6 +1466,7 @@ func GetComputeInstanceCaiObject(d tpgresource.TerraformResourceData, config *tr DiscoveryDocumentURI: "https://www.googleapis.com/discovery/v1/apis/compute/v1/rest", DiscoveryName: "Instance", Data: data, + Location: location, }, }, nil } else { @@ -1833,6 +1835,7 @@ func GetComputeDiskCaiObject(d tpgresource.TerraformResourceData, config *transp return caiasset.Asset{}, err } if data, err := GetComputeDiskData(d, config); err == nil { + location, _ := tpgresource.GetLocation(d, config) return caiasset.Asset{ Name: name, Type: ComputeDiskAssetType, @@ -1841,6 +1844,7 @@ func GetComputeDiskCaiObject(d tpgresource.TerraformResourceData, config *transp DiscoveryDocumentURI: "https://www.googleapis.com/discovery/v1/apis/compute/v1/rest", DiscoveryName: "Disk", Data: data, + Location: location, }, }, nil } else { diff --git a/mmv1/third_party/tgc_next/test/setup.go b/mmv1/third_party/tgc_next/test/setup.go index fee91324eed9..4a6ec086e7ec 100644 --- a/mmv1/third_party/tgc_next/test/setup.go +++ b/mmv1/third_party/tgc_next/test/setup.go @@ -96,11 +96,11 @@ func ReadTestsDataFromGcs() (map[string]TgcMetadataPayload, error) { func prepareTestData(testName string) (map[string]ResourceTestData, string, error) { var err error cacheMutex.Lock() + defer cacheMutex.Unlock() TestsMetadata, err = ReadTestsDataFromGcs() if err != nil { return nil, "", err } - cacheMutex.Unlock() testMetadata := TestsMetadata[testName] resourceMetadata := testMetadata.ResourceMetadata From 474f8d2b3f52e1c19c03ad02a1aca49400e8f27c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Wiatrowski?= Date: Thu, 5 Jun 2025 19:31:34 +0200 Subject: [PATCH 310/884] Update the Gemini DataSharingWithGoogleSetting resource -- add the `enable_data_sharing` field (#14205) Co-authored-by: Riley Karson --- mmv1/products/gemini/DataSharingWithGoogleSetting.yaml | 5 ++++- .../gemini_data_sharing_with_google_setting_basic.tf.tmpl | 1 + ...ni_data_sharing_with_google_setting_binding_basic.tf.tmpl | 1 + ...e_gemini_data_sharing_with_google_setting_binding_test.go | 2 ++ .../resource_gemini_data_sharing_with_google_setting_test.go | 2 ++ 5 files changed, 10 insertions(+), 1 deletion(-) diff --git a/mmv1/products/gemini/DataSharingWithGoogleSetting.yaml b/mmv1/products/gemini/DataSharingWithGoogleSetting.yaml index acfac1f91529..cb11c67cd586 100644 --- a/mmv1/products/gemini/DataSharingWithGoogleSetting.yaml +++ b/mmv1/products/gemini/DataSharingWithGoogleSetting.yaml @@ -67,4 +67,7 @@ properties: description: Labels as key value pairs. - name: enablePreviewDataSharing type: Boolean - description: Whether preview data sharing should be enabled. + description: Whether data sharing should be enabled in Preview products. + - name: enableDataSharing + type: Boolean + description: Whether data sharing should be enabled in GA products. diff --git a/mmv1/templates/terraform/examples/gemini_data_sharing_with_google_setting_basic.tf.tmpl b/mmv1/templates/terraform/examples/gemini_data_sharing_with_google_setting_basic.tf.tmpl index 191bc633f111..b3685e4d98a9 100644 --- a/mmv1/templates/terraform/examples/gemini_data_sharing_with_google_setting_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/gemini_data_sharing_with_google_setting_basic.tf.tmpl @@ -3,4 +3,5 @@ resource "google_gemini_data_sharing_with_google_setting" "{{$.PrimaryResourceId location = "global" labels = {"my_key": "my_value"} enable_preview_data_sharing = true + enable_data_sharing = true } diff --git a/mmv1/templates/terraform/examples/gemini_data_sharing_with_google_setting_binding_basic.tf.tmpl b/mmv1/templates/terraform/examples/gemini_data_sharing_with_google_setting_binding_basic.tf.tmpl index e2920e1ed91d..1405ee0d06ab 100644 --- a/mmv1/templates/terraform/examples/gemini_data_sharing_with_google_setting_binding_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/gemini_data_sharing_with_google_setting_binding_basic.tf.tmpl @@ -3,6 +3,7 @@ resource "google_gemini_data_sharing_with_google_setting" "basic" { location = "global" labels = {"my_key": "my_value"} enable_preview_data_sharing = true + enable_data_sharing = true } resource "google_gemini_data_sharing_with_google_setting_binding" "{{$.PrimaryResourceId}}" { diff --git a/mmv1/third_party/terraform/services/gemini/resource_gemini_data_sharing_with_google_setting_binding_test.go b/mmv1/third_party/terraform/services/gemini/resource_gemini_data_sharing_with_google_setting_binding_test.go index c5481e98360f..ad2de07295e2 100644 --- a/mmv1/third_party/terraform/services/gemini/resource_gemini_data_sharing_with_google_setting_binding_test.go +++ b/mmv1/third_party/terraform/services/gemini/resource_gemini_data_sharing_with_google_setting_binding_test.go @@ -59,6 +59,7 @@ resource "google_gemini_data_sharing_with_google_setting" "basic" { location = "global" labels = {"my_key" = "my_value"} enable_preview_data_sharing = true + enable_data_sharing = true } resource "google_gemini_data_sharing_with_google_setting_binding" "basic_binding" { @@ -80,6 +81,7 @@ resource "google_gemini_data_sharing_with_google_setting" "basic" { location = "global" labels = {"my_key" = "my_value"} enable_preview_data_sharing = true + enable_data_sharing = true } resource "google_gemini_data_sharing_with_google_setting_binding" "basic_binding" { diff --git a/mmv1/third_party/terraform/services/gemini/resource_gemini_data_sharing_with_google_setting_test.go b/mmv1/third_party/terraform/services/gemini/resource_gemini_data_sharing_with_google_setting_test.go index ddf22216594a..6c13c4f1b57d 100644 --- a/mmv1/third_party/terraform/services/gemini/resource_gemini_data_sharing_with_google_setting_test.go +++ b/mmv1/third_party/terraform/services/gemini/resource_gemini_data_sharing_with_google_setting_test.go @@ -50,6 +50,7 @@ resource "google_gemini_data_sharing_with_google_setting" "example" { data_sharing_with_google_setting_id = "%{setting_id}" location = "global" enable_preview_data_sharing = true + enable_data_sharing = true } `, context) } @@ -60,6 +61,7 @@ resource "google_gemini_data_sharing_with_google_setting" "example" { location = "global" labels = {"my_key" = "my_value"} enable_preview_data_sharing = false + enable_data_sharing = false } `, context) } From fb0e8a1f360a896048d4f1544ef4e8827f954d58 Mon Sep 17 00:00:00 2001 From: sachin purohit Date: Thu, 5 Jun 2025 13:09:33 -0700 Subject: [PATCH 311/884] fix(bigquery_dataset): Added logic for merging views and routines for google_bigquery_dataset_iam_member (#14208) --- .../bigquery/iam_bigquery_member_dataset.go | 7 +- ...source_bigquery_dataset_iam_member_test.go | 78 +++++++++++++++++-- 2 files changed, 76 insertions(+), 9 deletions(-) diff --git a/mmv1/third_party/terraform/services/bigquery/iam_bigquery_member_dataset.go b/mmv1/third_party/terraform/services/bigquery/iam_bigquery_member_dataset.go index a42a701912ea..ed3bd848a20a 100644 --- a/mmv1/third_party/terraform/services/bigquery/iam_bigquery_member_dataset.go +++ b/mmv1/third_party/terraform/services/bigquery/iam_bigquery_member_dataset.go @@ -128,8 +128,11 @@ func mergeAccess(newAccess []map[string]interface{}, currAccess []interface{}) [ for _, item := range currAccess { if itemMap, ok := item.(map[string]interface{}); ok { - // Check if the item has a "dataset" key - if _, ok := itemMap["dataset"]; ok { + if _, hasDataset := itemMap["dataset"]; hasDataset { + mergedAccess = append(mergedAccess, itemMap) + } else if _, hasView := itemMap["view"]; hasView { + mergedAccess = append(mergedAccess, itemMap) + } else if _, hasRoutine := itemMap["routine"]; hasRoutine { mergedAccess = append(mergedAccess, itemMap) } } diff --git a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_dataset_iam_member_test.go b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_dataset_iam_member_test.go index 794581f1b960..5becc009894e 100644 --- a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_dataset_iam_member_test.go +++ b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_dataset_iam_member_test.go @@ -18,9 +18,12 @@ func TestAccBigqueryDatasetIamMember_afterDatasetCreation(t *testing.T) { t.Parallel() projectID := envvar.GetTestProjectFromEnv() - datasetID := fmt.Sprintf("tf_test_%s", acctest.RandString(t, 10)) - authDatasetID := fmt.Sprintf("tf_test_%s", acctest.RandString(t, 10)) - saID := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + random_suffix := acctest.RandString(t, 10) + datasetID := fmt.Sprintf("tf_test_dataset_%s", random_suffix) + authDatasetID := fmt.Sprintf("tf_test_auth_dataset_%s", random_suffix) + routineID := fmt.Sprintf("tf_test_routine_%s", random_suffix) + tableID := fmt.Sprintf("tf_test_table_%s", random_suffix) + saID := fmt.Sprintf("tf-test-sa-%s", random_suffix) expected_auth := map[string]interface{}{ "dataset": map[string]interface{}{ @@ -31,6 +34,21 @@ func TestAccBigqueryDatasetIamMember_afterDatasetCreation(t *testing.T) { "targetTypes": []interface{}{"VIEWS"}, }, } + expected_routine := map[string]interface{}{ + "routine": map[string]interface{}{ + "projectId": projectID, + "datasetId": authDatasetID, + "routineId": routineID, + }, + } + + expected_view := map[string]interface{}{ + "view": map[string]interface{}{ + "projectId": projectID, + "datasetId": authDatasetID, + "tableId": tableID, + }, + } expected_sa := map[string]interface{}{ "role": "roles/viewer", @@ -42,8 +60,12 @@ func TestAccBigqueryDatasetIamMember_afterDatasetCreation(t *testing.T) { ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { - Config: testAccBigqueryDatasetIamMember_afterDatasetAccessCreation(projectID, datasetID, authDatasetID, saID), - Check: testAccCheckBigQueryDatasetIamMemberPresent(t, "google_bigquery_dataset.dataset", expected_auth), + Config: testAccBigqueryDatasetIamMember_afterDatasetAccessCreation(projectID, datasetID, authDatasetID, routineID, tableID, saID), + Check: resource.ComposeAggregateTestCheckFunc( + testAccCheckBigQueryDatasetIamMemberPresent(t, "google_bigquery_dataset.dataset", expected_auth), + testAccCheckBigQueryDatasetIamMemberPresent(t, "google_bigquery_dataset.dataset", expected_routine), + testAccCheckBigQueryDatasetIamMemberPresent(t, "google_bigquery_dataset.dataset", expected_view), + ), }, { // For iam_member to be non-authoritative, we want authorized datasets to be present after destroy, @@ -51,6 +73,8 @@ func TestAccBigqueryDatasetIamMember_afterDatasetCreation(t *testing.T) { Config: testAccBigqueryDatasetIamMember_destroy(datasetID), Check: resource.ComposeAggregateTestCheckFunc( testAccCheckBigQueryDatasetIamMemberPresent(t, "google_bigquery_dataset.dataset", expected_auth), + testAccCheckBigQueryDatasetIamMemberPresent(t, "google_bigquery_dataset.dataset", expected_routine), + testAccCheckBigQueryDatasetIamMemberPresent(t, "google_bigquery_dataset.dataset", expected_view), testAccCheckBigQueryDatasetIamMemberAbsent(t, "google_bigquery_dataset.dataset", expected_sa), ), }, @@ -228,13 +252,39 @@ resource "google_service_account" "bqviewer" { `, datasetID, saID) } -func testAccBigqueryDatasetIamMember_afterDatasetAccessCreation(projectID, datasetID, authDatasetID, saID string) string { +func testAccBigqueryDatasetIamMember_afterDatasetAccessCreation(projectID, datasetID, authDatasetID, routineID, tableID, saID string) string { return fmt.Sprintf(` resource "google_bigquery_dataset" "auth_dataset" { dataset_id = "%s" } +resource "google_bigquery_routine" "sproc" { + dataset_id = google_bigquery_dataset.auth_dataset.dataset_id + routine_id = "%s" + routine_type = "SCALAR_FUNCTION" + language = "SQL" + security_mode = "INVOKER" + definition_body = "1" +} + +resource "google_bigquery_table" "test" { + deletion_protection = false + table_id = "%s" + dataset_id = google_bigquery_dataset.auth_dataset.dataset_id + + schema = < Date: Thu, 5 Jun 2025 13:55:49 -0700 Subject: [PATCH 312/884] Update TestAccGeminiGeminiGcpEnablementSettingBinding_update value (#14207) --- .../gemini/DataSharingWithGoogleSettingBinding.yaml | 6 ++---- mmv1/products/gemini/GeminiGcpEnablementSettingBinding.yaml | 6 ++---- ...rce_gemini_gemini_gcp_enablement_setting_binding_test.go | 2 +- 3 files changed, 5 insertions(+), 9 deletions(-) diff --git a/mmv1/products/gemini/DataSharingWithGoogleSettingBinding.yaml b/mmv1/products/gemini/DataSharingWithGoogleSettingBinding.yaml index 8331e3b38226..aef767df4267 100644 --- a/mmv1/products/gemini/DataSharingWithGoogleSettingBinding.yaml +++ b/mmv1/products/gemini/DataSharingWithGoogleSettingBinding.yaml @@ -75,11 +75,9 @@ parameters: required: true properties: - name: product - type: Enum + type: String description: |- - Product type of the setting binding. - enum_values: - - 'GEMINI_CLOUD_ASSIST' + Product type of the setting binding. Values include GEMINI_IN_BIGQUERY, GEMINI_CLOUD_ASSIST, etc. See [product reference](https://cloud.google.com/gemini/docs/api/reference/rest/v1/projects.locations.dataSharingWithGoogleSettings.settingBindings) for a complete list. default_from_api: true - name: name type: String diff --git a/mmv1/products/gemini/GeminiGcpEnablementSettingBinding.yaml b/mmv1/products/gemini/GeminiGcpEnablementSettingBinding.yaml index c756c36f740c..344a41c87e71 100644 --- a/mmv1/products/gemini/GeminiGcpEnablementSettingBinding.yaml +++ b/mmv1/products/gemini/GeminiGcpEnablementSettingBinding.yaml @@ -82,11 +82,9 @@ properties: description: Target of the binding. required: true - name: product - type: Enum + type: String description: |- - Product type of the setting binding. - enum_values: - - 'GEMINI_IN_BIGQUERY' + Product type of the setting binding. Values include GEMINI_IN_BIGQUERY, GEMINI_CLOUD_ASSIST, etc. See [product reference](https://cloud.google.com/gemini/docs/api/reference/rest/v1/projects.locations.dataSharingWithGoogleSettings.settingBindings) for a complete list. default_from_api: true - name: name type: String diff --git a/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_binding_test.go b/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_binding_test.go index 94da6898dc65..af5ade59619a 100644 --- a/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_binding_test.go +++ b/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_binding_test.go @@ -90,7 +90,7 @@ resource "google_gemini_gemini_gcp_enablement_setting_binding" "basic_binding" { location = "global" target = "projects/${data.google_project.project.number}" labels = {"my_key" = "my_value"} - product = "GEMINI_IN_BIGQUERY" + product = "GEMINI_CLOUD_ASSIST" } `, context) } From 83607365276f2c193cb22d6fd2fbb1e1404a3646 Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Thu, 5 Jun 2025 23:12:41 +0200 Subject: [PATCH 313/884] feat: `google_cloud_run_v2_job` support for `depends_on` and `startup_probe` (#13838) --- mmv1/products/cloudrunv2/Job.yaml | 108 +++++ .../resource_cloud_run_v2_job_test.go.tmpl | 411 +++++++++++++++++- 2 files changed, 518 insertions(+), 1 deletion(-) diff --git a/mmv1/products/cloudrunv2/Job.yaml b/mmv1/products/cloudrunv2/Job.yaml index d0457dccefa3..1aa2bcaa3a7a 100644 --- a/mmv1/products/cloudrunv2/Job.yaml +++ b/mmv1/products/cloudrunv2/Job.yaml @@ -437,6 +437,114 @@ properties: type: String description: |- Container's working directory. If not specified, the container runtime's default will be used, which might be configured in the container image. + - name: 'dependsOn' + type: Array + description: |- + Names of the containers that must start before this container. + item_type: + type: String + - name: 'startupProbe' + type: NestedObject + description: |- + Startup probe of application within the container. + All other probes are disabled if a startup probe is provided, until it + succeeds. Container will not be added to service endpoints if the probe fails. + default_from_api: true + properties: + - name: 'initialDelaySeconds' + type: Integer + description: |- + Number of seconds after the container has started before the probe is + initiated. + Defaults to 0 seconds. Minimum value is 0. Maximum value is 240. + default_value: 0 + - name: 'timeoutSeconds' + type: Integer + description: |- + Number of seconds after which the probe times out. + Defaults to 1 second. Minimum value is 1. Maximum value is 3600. + Must be smaller than periodSeconds. + default_value: 1 + - name: 'periodSeconds' + type: Integer + description: |- + How often (in seconds) to perform the probe. + Default to 10 seconds. Minimum value is 1. Maximum value is 240. + default_value: 10 + - name: 'failureThreshold' + type: Integer + description: |- + Minimum consecutive failures for the probe to be considered failed after + having succeeded. Defaults to 3. Minimum value is 1. + default_value: 3 + - name: 'tcpSocket' + type: NestedObject + description: |- + TcpSocket specifies an action involving a TCP port. + send_empty_value: true + allow_empty_object: true + properties: + - name: 'port' + type: Integer + description: |- + Port number to access on the container. Number must be in the range 1 to 65535. + If not specified, defaults to the same value as container.ports[0].containerPort. + default_from_api: true + - name: 'httpGet' + type: NestedObject + description: |- + HttpGet specifies the http request to perform. + send_empty_value: true + allow_empty_object: true + properties: + - name: 'path' + type: String + description: |- + Path to access on the HTTP server. If set, it should not be empty string. + default_value: "/" + - name: 'port' + type: Integer + description: |- + Port number to access on the container. Number must be in the range 1 to 65535. + If not specified, defaults to the same value as container.ports[0].containerPort. + default_from_api: true + - name: 'httpHeaders' + type: Array + description: |- + Custom headers to set in the request. HTTP allows repeated headers. + item_type: + type: NestedObject + properties: + - name: 'name' + type: String + description: |- + The header field name. + required: true + - name: 'value' + type: String + description: |- + The header field value. + send_empty_value: true + default_value: "" + - name: 'grpc' + type: NestedObject + description: |- + GRPC specifies an action involving a GRPC port. + send_empty_value: true + allow_empty_object: true + properties: + - name: 'port' + type: Integer + description: |- + Port number to access on the container. Number must be in the range 1 to 65535. + If not specified, defaults to the same value as container.ports[0].containerPort. + default_from_api: true + - name: 'service' + type: String + description: |- + The name of the service to place in the gRPC HealthCheckRequest + (see https://github.com/grpc/grpc/blob/master/doc/health-checking.md). + If this is not specified, the default behavior is defined by gRPC. - name: 'volumes' type: Array description: |- diff --git a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl index 4f9b46d483a1..7fcf28a25b9e 100644 --- a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl @@ -6,7 +6,7 @@ import ( "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-provider-google/google/envvar" ) func TestAccCloudRunV2Job_cloudrunv2JobFullUpdate(t *testing.T) { @@ -473,6 +473,415 @@ func testAccCloudRunV2Job_cloudrunv2JobWithNfsVolume(context map[string]interfac `, context) } + +func TestAccCloudRunV2Job_cloudrunv2JobTCPProbesUpdate(t *testing.T) { + t.Parallel() + + jobName := fmt.Sprintf("tf-test-cloudrun-job%s", acctest.RandString(t, 10)) + context := map[string]interface{}{ + "job_name": jobName, + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckCloudRunV2JobDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccCloudRunV2Job_cloudrunv2JobWithEmptyTCPStartupProbe(context), + }, + { + ResourceName: "google_cloud_run_v2_job.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "location", "annotations", "deletion_protection"}, + }, + { + Config: testAccCloudRunV2Job_cloudrunv2JobUpdateWithTCPStartupProbe(context), + }, + { + ResourceName: "google_cloud_run_v2_job.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "location", "annotations", "deletion_protection"}, + }, + }, + }) +} + +func TestAccCloudRunV2Job_cloudrunv2JobHTTPProbesUpdate(t *testing.T) { + t.Parallel() + + jobName := fmt.Sprintf("tf-test-cloudrun-job%s", acctest.RandString(t, 10)) + context := map[string]interface{}{ + "job_name": jobName, + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckCloudRunV2JobDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccCloudRunV2Job_cloudrunv2JobUpdateWithEmptyHTTPStartupProbe(context), + }, + { + ResourceName: "google_cloud_run_v2_job.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "location", "annotations", "deletion_protection"}, + }, + { + Config: testAccCloudRunV2Job_cloudrunv2JobUpdateWithHTTPStartupProbe(context), + }, + { + ResourceName: "google_cloud_run_v2_job.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "location", "annotations", "deletion_protection"}, + }, + }, + }) +} + +func TestAccCloudRunV2Job_cloudrunv2JobGRPCProbesUpdate(t *testing.T) { + t.Parallel() + + jobName := fmt.Sprintf("tf-test-cloudrun-job%s", acctest.RandString(t, 10)) + context := map[string]interface{}{ + "job_name": jobName, + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckCloudRunV2JobDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccCloudRunV2Job_cloudRunJobUpdateWithEmptyGRPCStartupProbe(context), + }, + { + ResourceName: "google_cloud_run_v2_job.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "location", "annotations", "deletion_protection"}, + }, + { + Config: testAccCloudRunV2Job_cloudRunJobUpdateWithGRPCStartupProbe(context), + }, + { + ResourceName: "google_cloud_run_v2_job.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "location", "annotations", "deletion_protection"}, + }, + }, + }) +} + +func testAccCloudRunV2Job_cloudrunv2JobWithEmptyTCPStartupProbe(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_cloud_run_v2_job" "default" { + name = "%{job_name}" + location = "us-central1" + deletion_protection = false + + template { + template { + containers { + image = "us-docker.pkg.dev/cloudrun/container/job" + ports { + container_port = 8080 + } + startup_probe { + tcp_socket {} + } + } + } + } +} +`, context) +} + +func testAccCloudRunV2Job_cloudrunv2JobUpdateWithTCPStartupProbe(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_cloud_run_v2_job" "default" { + name = "%{job_name}" + location = "us-central1" + deletion_protection = false + + template { + template { + containers { + image = "us-docker.pkg.dev/cloudrun/container/hello" + ports { + container_port = 8080 + } + startup_probe { + initial_delay_seconds = 2 + period_seconds = 1 + timeout_seconds = 5 + failure_threshold = 2 + tcp_socket { + port = 8080 + } + } + } + } + } +} +`, context) +} + +func testAccCloudRunV2Job_cloudrunv2JobUpdateWithEmptyHTTPStartupProbe(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_cloud_run_v2_job" "default" { + name = "%{job_name}" + location = "us-central1" + deletion_protection = false + + template { + template { + containers { + image = "us-docker.pkg.dev/cloudrun/container/hello" + startup_probe { + http_get {} + } + } + } + } +} +`, context) +} + +func testAccCloudRunV2Job_cloudrunv2JobUpdateWithHTTPStartupProbe(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_cloud_run_v2_job" "default" { + name = "%{job_name}" + location = "us-central1" + deletion_protection = false + + template { + template { + containers { + image = "us-docker.pkg.dev/cloudrun/container/hello" + startup_probe { + initial_delay_seconds = 3 + period_seconds = 2 + timeout_seconds = 6 + failure_threshold = 3 + http_get { + path = "/some-path" + port = 8080 + http_headers { + name = "User-Agent" + value = "magic-modules" + } + http_headers { + name = "Some-Name" + } + } + } + } + } + } +} +`, context) +} + +func testAccCloudRunV2Job_cloudRunJobUpdateWithEmptyGRPCStartupProbe(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_cloud_run_v2_job" "default" { + name = "%{job_name}" + location = "us-central1" + deletion_protection = false + + template { + template { + containers { + image = "us-docker.pkg.dev/cloudrun/container/hello" + ports { + container_port = 8080 + } + startup_probe { + grpc {} + } + } + } + } +} +`, context) +} + +func testAccCloudRunV2Job_cloudRunJobUpdateWithGRPCStartupProbe(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_cloud_run_v2_job" "default" { + name = "%{job_name}" + location = "us-central1" + deletion_protection = false + + template { + template { + containers { + image = "us-docker.pkg.dev/cloudrun/container/hello" + ports { + container_port = 8080 + } + startup_probe { + grpc { + port = 8080 + service = "grpc.health.v1.Health" + } + } + } + } + } +} +`, context) +} + +func TestAccCloudRunV2Job_cloudrunv2JobDependsOnUpdate(t *testing.T) { + t.Parallel() + + jobName := fmt.Sprintf("tf-test-cloudrun-job%s", acctest.RandString(t, 10)) + context := map[string]interface{}{ + "job_name": jobName, + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckCloudRunV2JobDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccCloudRunV2Job_cloudRunJobWithoutDependsOn(context), + }, + { + ResourceName: "google_cloud_run_v2_job.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "location", "annotations", "deletion_protection"}, + }, + { + Config: testAccCloudRunV2Job_cloudRunJobWithDependsOn(context), + }, + { + ResourceName: "google_cloud_run_v2_job.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "location", "annotations", "deletion_protection"}, + }, + { + Config: testAccCloudRunV2Job_cloudRunJobWithDependsOnUpdate(context), + }, + { + ResourceName: "google_cloud_run_v2_job.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "location", "annotations", "deletion_protection"}, + }, + }, + }) +} + +func testAccCloudRunV2Job_cloudRunJobWithoutDependsOn(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_cloud_run_v2_job" "default" { + name = "%{job_name}" + location = "us-central1" + deletion_protection = false + + template { + template { + containers { + name = "foo" + image = "us-docker.pkg.dev/cloudrun/container/hello" + ports { + container_port = 8080 + } + startup_probe { + grpc {} + } + } + } + } +} +`, context) +} + + + +func testAccCloudRunV2Job_cloudRunJobWithDependsOn(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_cloud_run_v2_job" "default" { + name = "%{job_name}" + location = "us-central1" + deletion_protection = false + + template { + template { + containers { + name = "foo" + image = "us-docker.pkg.dev/cloudrun/container/hello" + ports { + container_port = 8080 + } + startup_probe { + grpc {} + } + } + containers { + name = "bar" + image = "us-docker.pkg.dev/cloudrun/container/hello" + ports { + container_port = 8080 + } + depends_on = [ + "foo" + ] + } + } + } +} +`, context) +} + +func testAccCloudRunV2Job_cloudRunJobWithDependsOnUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_cloud_run_v2_job" "default" { + name = "%{job_name}" + location = "us-central1" + deletion_protection = false + + template { + template { + containers { + name = "baz" + image = "us-docker.pkg.dev/cloudrun/container/hello" + ports { + container_port = 8080 + } + startup_probe { + grpc {} + } + } + containers { + name = "bar" + image = "us-docker.pkg.dev/cloudrun/container/hello" + ports { + container_port = 8080 + } + depends_on = [ + "baz" + ] + } + } + } +} +`, context) +} + + {{ if ne $.TargetVersionName `ga` -}} func TestAccCloudRunV2Job_cloudrunv2JobWithStartExecutionTokenUpdate(t *testing.T) { t.Parallel() From 68b97e43ea1c372f8ec51d68741ceb772fd4a3c2 Mon Sep 17 00:00:00 2001 From: paridhishah18 <166548459+paridhishah18@users.noreply.github.com> Date: Thu, 5 Jun 2025 14:14:12 -0700 Subject: [PATCH 314/884] fix multi-container test for worker pool (#14177) --- .../examples/cloudrunv2_worker_pool_multicontainer.tf.tmpl | 4 ---- 1 file changed, 4 deletions(-) diff --git a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_multicontainer.tf.tmpl b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_multicontainer.tf.tmpl index 51330b8353ae..1d8baaf3d142 100644 --- a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_multicontainer.tf.tmpl +++ b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_multicontainer.tf.tmpl @@ -17,10 +17,6 @@ resource "google_cloud_run_v2_worker_pool" "{{$.PrimaryResourceId}}" { containers { name = "hello-2" image = "us-docker.pkg.dev/cloudrun/container/worker-pool" - env { - name = "PORT" - value = "8081" - } } volumes { name = "empty-dir-volume" From 72dceeb6c5344fa731df64d3af30d4897bc89b6c Mon Sep 17 00:00:00 2001 From: Raj Anand <88097156+raazanand@users.noreply.github.com> Date: Fri, 6 Jun 2025 02:45:24 +0530 Subject: [PATCH 315/884] added delay b/w storage pool creation & volume creation (#14191) --- .../services/netapp/resource_netapp_backup_test.go | 7 +++++++ .../services/netapp/resource_netapp_volume_test.go | 9 ++++++++- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_backup_test.go b/mmv1/third_party/terraform/services/netapp/resource_netapp_backup_test.go index 388f10fef1ac..210adf935100 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_backup_test.go +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_backup_test.go @@ -256,6 +256,9 @@ func TestAccNetappBackup_NetappIntegratedBackup(t *testing.T) { PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccCheckNetappBackupDestroyProducer(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, Steps: []resource.TestStep{ { Config: testAccNetappBackup_IntegratedBackup(context), @@ -282,6 +285,10 @@ resource "google_netapp_storage_pool" "default" { capacity_gib = "2048" network = data.google_compute_network.default.id } +resource "time_sleep" "wait_3_minutes" { + depends_on = [google_netapp_storage_pool.default] + create_duration = "3m" +} resource "google_netapp_volume" "default" { name = "tf-test-backup-volume%{random_suffix}" location = google_netapp_storage_pool.default.location diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go index ea1010fae9b9..44a2250b1528 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go @@ -705,6 +705,10 @@ resource "google_netapp_storage_pool" "default" { network = data.google_compute_network.default.id allow_auto_tiering = true } +resource "time_sleep" "wait_3_minutes" { + depends_on = [google_netapp_storage_pool.default] + create_duration = "3m" +} resource "google_netapp_volume" "test_volume" { location = "us-west4" name = "tf-test-volume%{random_suffix}" @@ -733,7 +737,10 @@ resource "google_netapp_storage_pool" "default" { network = data.google_compute_network.default.id allow_auto_tiering = true } - +resource "time_sleep" "wait_3_minutes" { + depends_on = [google_netapp_storage_pool.default] + create_duration = "3m" +} resource "google_netapp_volume" "test_volume" { location = "us-west4" name = "tf-test-volume%{random_suffix}" From cf0f9e10219dc411b8966a574664327a2f08ea08 Mon Sep 17 00:00:00 2001 From: veraz0818 Date: Thu, 5 Jun 2025 14:17:52 -0700 Subject: [PATCH 316/884] Add private_registry_config to VMware user and admin cluster resources. (#14165) --- mmv1/products/gkeonprem/VmwareAdminCluster.yaml | 10 ++++++++++ .../gkeonprem_vmware_admin_cluster_basic.tf.tmpl | 4 ++++ .../gkeonprem_vmware_admin_cluster_full.tf.tmpl | 4 ++++ .../gkeonprem_vmware_admin_cluster_metallb.tf.tmpl | 4 ++++ 4 files changed, 22 insertions(+) diff --git a/mmv1/products/gkeonprem/VmwareAdminCluster.yaml b/mmv1/products/gkeonprem/VmwareAdminCluster.yaml index 74f6a344bea3..0963b1011da1 100644 --- a/mmv1/products/gkeonprem/VmwareAdminCluster.yaml +++ b/mmv1/products/gkeonprem/VmwareAdminCluster.yaml @@ -691,3 +691,13 @@ properties: name: enableAdvancedCluster description: If set, the advanced cluster feature is enabled. output: true + - type: NestedObject + name: privateRegistryConfig + description: Configuration for private registry. + properties: + - type: String + name: 'address' + description: The registry address. + - type: String + name: 'caCert' + description: The CA certificate public key for private registry. diff --git a/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_basic.tf.tmpl b/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_basic.tf.tmpl index 33e2aa89c39a..c29c374ecb83 100644 --- a/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_basic.tf.tmpl @@ -38,4 +38,8 @@ resource "google_gkeonprem_vmware_admin_cluster" "{{$.PrimaryResourceId}}" { snat_pool = "test-snat-pool" } } + private_registry_config { + address = "test-address" + ca_cert = "test-ca-cert" + } } diff --git a/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_full.tf.tmpl b/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_full.tf.tmpl index b446a094452f..cbf9945bd06f 100644 --- a/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_full.tf.tmpl @@ -85,4 +85,8 @@ resource "google_gkeonprem_vmware_admin_cluster" "{{$.PrimaryResourceId}}" { platform_config { required_platform_version = "1.31.0" } + private_registry_config { + address = "test-address" + ca_cert = "test-ca-cert" + } } diff --git a/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_metallb.tf.tmpl b/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_metallb.tf.tmpl index 7c63fca0f19f..b8f499ec7ec8 100644 --- a/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_metallb.tf.tmpl +++ b/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_metallb.tf.tmpl @@ -36,4 +36,8 @@ resource "google_gkeonprem_vmware_admin_cluster" "{{$.PrimaryResourceId}}" { enabled = true } } + private_registry_config { + address = "test-address" + ca_cert = "test-ca-cert" + } } From ac07bb1a4ab7ca16f3b0bd749d703c0610de59af Mon Sep 17 00:00:00 2001 From: Scott Suarez Date: Thu, 5 Jun 2025 14:20:44 -0700 Subject: [PATCH 317/884] Merge build env into go plus (#14209) --- .ci/containers/go-plus/Dockerfile | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/.ci/containers/go-plus/Dockerfile b/.ci/containers/go-plus/Dockerfile index dbe2c2e672a9..cd609168c0f8 100644 --- a/.ci/containers/go-plus/Dockerfile +++ b/.ci/containers/go-plus/Dockerfile @@ -3,6 +3,13 @@ FROM golang:1.23-bullseye AS builder ENV GOCACHE=/go/cache RUN apt-get update && apt-get install -y unzip + +# Download tpgtools dependencies (from build-env) +WORKDIR /app +ADD "https://raw.githubusercontent.com/GoogleCloudPlatform/magic-modules/main/tpgtools/go.mod" go.mod +ADD "https://raw.githubusercontent.com/GoogleCloudPlatform/magic-modules/main/tpgtools/go.sum" go.sum +RUN go mod download + WORKDIR /app1 # Add the source code and build ADD "https://github.com/GoogleCloudPlatform/magic-modules/archive/refs/heads/main.zip" source.zip @@ -20,8 +27,21 @@ ENV GOCACHE=/go/cache COPY --from=builder /go/pkg/mod /go/pkg/mod COPY --from=builder /go/cache /go/cache +# Add build-env environment variables +ENV GOPATH /go +ENV PATH /usr/local/go/bin:$PATH +ENV PATH $GOPATH/bin:$PATH +ENV GO111MODULE "on" +ENV LC_ALL=C.UTF-8 +ENV LANG=C.UTF-8 + +# Create GOPATH structure (from build-env) +RUN mkdir -p "$GOPATH/src" "$GOPATH/bin" && chmod -R 1777 "$GOPATH" + RUN apt-get update && \ apt-get install -y git jq unzip parallel curl && \ + # Add build-env packages + apt-get install -y openssh-client apt-transport-https ca-certificates netbase wget gcc make libjq1 && \ echo "deb [signed-by=/usr/share/keyrings/cloud.google.gpg] http://packages.cloud.google.com/apt cloud-sdk main" | tee -a /etc/apt/sources.list.d/google-cloud-sdk.list && \ curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key --keyring /usr/share/keyrings/cloud.google.gpg add - && \ apt-get update -y && \ @@ -29,7 +49,15 @@ RUN apt-get update && \ apt-get clean && \ rm -rf /var/lib/apt/lists/* +# Add git configuration (from build-env) +RUN git config --global user.name "Modular Magician" +RUN git config --global user.email "magic-modules@google.com" + RUN wget https://releases.hashicorp.com/terraform/1.11.0/terraform_1.11.0_linux_amd64.zip \ && unzip terraform_1.11.0_linux_amd64.zip \ && rm terraform_1.11.0_linux_amd64.zip \ && mv ./terraform /bin/terraform + +# Install Go tools (from build-env) +RUN go install golang.org/x/tools/cmd/goimports@d088b475e3360caabc032aaee1dc66351d4e729a +RUN go install github.com/github/hub@v2.11.2+incompatible \ No newline at end of file From 20dfdd6ce74aabd0900b0761b16f9a965f2b78f1 Mon Sep 17 00:00:00 2001 From: tonybayvas Date: Thu, 5 Jun 2025 17:33:51 -0700 Subject: [PATCH 318/884] Add a custom role field for the Scope RBACRolebindings (#14168) --- .../gkehub2/ScopeRBACRoleBinding.yaml | 16 ++ ...ope_rbac_custom_role_binding_basic.tf.tmpl | 26 ++++ ...ehub_scope_rbac_role_binding_basic.tf.tmpl | 1 - ...ce_gke_hub_scope_rbac_role_binding_test.go | 141 +++++++++++++++++- 4 files changed, 176 insertions(+), 8 deletions(-) create mode 100644 mmv1/templates/terraform/examples/gkehub_scope_rbac_custom_role_binding_basic.tf.tmpl diff --git a/mmv1/products/gkehub2/ScopeRBACRoleBinding.yaml b/mmv1/products/gkehub2/ScopeRBACRoleBinding.yaml index 2b960e60a0fa..858bcd52cb8a 100644 --- a/mmv1/products/gkehub2/ScopeRBACRoleBinding.yaml +++ b/mmv1/products/gkehub2/ScopeRBACRoleBinding.yaml @@ -51,6 +51,12 @@ examples: primary_resource_name: 'fmt.Sprintf(\"tf-test-scope%s\", context[\"random_suffix\"]), fmt.Sprintf(\"tf-test-rbac-role-binding%s\", context[\"random_suffix\"])' test_env_vars: project: 'PROJECT_NAME' + - name: 'gkehub_scope_rbac_custom_role_binding_basic' + primary_resource_id: 'scope_rbac_role_binding' + primary_resource_name: 'fmt.Sprintf(\"tf-test-scope%s\", context[\"random_suffix\"]), fmt.Sprintf(\"tf-test-custom-rbac-role-binding%s\", context[\"random_suffix\"])' + test_env_vars: + project: 'PROJECT_NAME' + exclude_test: true parameters: - name: 'scope_id' type: String @@ -143,6 +149,16 @@ properties: - 'ADMIN' - 'EDIT' - 'VIEW' + exactly_one_of: + - 'role.0.predefined_role' + - 'role.0.custom_role' + - name: 'customRole' + type: String + description: | + CustomRole is the custom Kubernetes ClusterRole to be used. The custom role format must be allowlisted in the rbacrolebindingactuation feature and RFC 1123 compliant. + exactly_one_of: + - 'role.0.predefined_role' + - 'role.0.custom_role' - name: 'labels' type: KeyValueLabels description: | diff --git a/mmv1/templates/terraform/examples/gkehub_scope_rbac_custom_role_binding_basic.tf.tmpl b/mmv1/templates/terraform/examples/gkehub_scope_rbac_custom_role_binding_basic.tf.tmpl new file mode 100644 index 000000000000..5328d1bdb347 --- /dev/null +++ b/mmv1/templates/terraform/examples/gkehub_scope_rbac_custom_role_binding_basic.tf.tmpl @@ -0,0 +1,26 @@ +resource "google_gke_hub_scope" "scope" { + scope_id = "tf-test-scope%{random_suffix}" +} + +resource "google_gke_hub_feature" "rbacrolebindingactuation" { + name = "rbacrolebindingactuation" + location = "global" + spec { + rbacrolebindingactuation { + allowed_custom_roles = ["my-custom-role"] + } + } +} + +resource "google_gke_hub_scope_rbac_role_binding" "{{$.PrimaryResourceId}}" { + scope_rbac_role_binding_id = "tf-test-scope-rbac-role-binding%{random_suffix}" + scope_id = google_gke_hub_scope.scope.scope_id + user = "test-email@gmail.com" + role { + custom_role = "my-custom-role" + } + labels = { + key = "value" + } + depends_on = [google_gke_hub_feature.rbacrolebindingactuation] +} diff --git a/mmv1/templates/terraform/examples/gkehub_scope_rbac_role_binding_basic.tf.tmpl b/mmv1/templates/terraform/examples/gkehub_scope_rbac_role_binding_basic.tf.tmpl index 1600645443bd..6f8d04fbb6dd 100644 --- a/mmv1/templates/terraform/examples/gkehub_scope_rbac_role_binding_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/gkehub_scope_rbac_role_binding_basic.tf.tmpl @@ -12,6 +12,5 @@ resource "google_gke_hub_scope_rbac_role_binding" "{{$.PrimaryResourceId}}" { labels = { key = "value" } - depends_on = [google_gke_hub_scope.scope] } diff --git a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_scope_rbac_role_binding_test.go b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_scope_rbac_role_binding_test.go index 8efb36aeb073..cae802710262 100644 --- a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_scope_rbac_role_binding_test.go +++ b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_scope_rbac_role_binding_test.go @@ -4,7 +4,6 @@ import ( "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" ) @@ -13,13 +12,16 @@ func TestAccGKEHub2ScopeRBACRoleBinding_gkehubScopeRbacRoleBindingBasicExample_u t.Parallel() context := map[string]interface{}{ - "project": envvar.GetTestProjectFromEnv(), - "random_suffix": acctest.RandString(t, 10), + "project": envvar.GetTestProjectFromEnv(), + "random_suffix": acctest.RandString(t, 10), + "org_id": envvar.GetTestOrgFromEnv(t), + "billing_account": envvar.GetTestBillingAccountFromEnv(t), } acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGKEHub2ScopeRBACRoleBindingDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccGKEHub2ScopeRBACRoleBinding_gkehubScopeRbacRoleBindingBasicExample_basic(context), @@ -51,7 +53,7 @@ resource "google_gke_hub_scope" "scoperbacrolebinding" { resource "google_gke_hub_scope_rbac_role_binding" "scoperbacrolebinding" { scope_rbac_role_binding_id = "tf-test-scope-rbac-role-binding%{random_suffix}" - scope_id = "tf-test-scope%{random_suffix}" + scope_id = google_gke_hub_scope.scoperbacrolebinding.scope_id user = "test-email@gmail.com" role { predefined_role = "ADMIN" @@ -59,7 +61,6 @@ resource "google_gke_hub_scope_rbac_role_binding" "scoperbacrolebinding" { labels = { key = "value" } - depends_on = [google_gke_hub_scope.scoperbacrolebinding] } `, context) } @@ -72,7 +73,7 @@ resource "google_gke_hub_scope" "scoperbacrolebinding" { resource "google_gke_hub_scope_rbac_role_binding" "scoperbacrolebinding" { scope_rbac_role_binding_id = "tf-test-scope-rbac-role-binding%{random_suffix}" - scope_id = "tf-test-scope%{random_suffix}" + scope_id = google_gke_hub_scope.scoperbacrolebinding.scope_id group = "test-email2@gmail.com" role { predefined_role = "VIEW" @@ -80,7 +81,133 @@ resource "google_gke_hub_scope_rbac_role_binding" "scoperbacrolebinding" { labels = { key = "updated_value" } - depends_on = [google_gke_hub_scope.scoperbacrolebinding] +} +`, context) +} + +func TestAccGKEHub2ScopeRBACRoleBinding_gkehubScopeRbacCustomRoleBindingBasicExample_update(t *testing.T) { + // VCR fails to handle batched project services + acctest.SkipIfVcr(t) + t.Parallel() + + context := map[string]interface{}{ + "project": envvar.GetTestProjectFromEnv(), + "random_suffix": acctest.RandString(t, 10), + "org_id": envvar.GetTestOrgFromEnv(t), + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGKEHub2ScopeRBACRoleBindingDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGKEHub2ScopeRBACRoleBinding_gkehubScopeRbacCustomRoleBindingBasicExample_basic(context), + }, + { + ResourceName: "google_gke_hub_scope_rbac_role_binding.scope_rbac_custom_role_binding", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "scope_id", "scope_rbac_role_binding_id", "terraform_labels"}, + }, + { + Config: testAccGKEHub2ScopeRBACRoleBinding_gkehubScopeRbacCustomRoleBindingBasicExample_update(context), + }, + { + ResourceName: "google_gke_hub_scope_rbac_role_binding.scope_rbac_custom_role_binding", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"scope_rbac_role_binding_id", "scope_id", "labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccGKEHub2ScopeRBACRoleBinding_gkehubScopeRbacCustomRoleBindingBasicExample_basic(context map[string]interface{}) string { + return gkeHubRRBActuationProjectSetupForGA(context) + acctest.Nprintf(` +resource "google_gke_hub_scope" "scope" { + scope_id = "tf-test-scope%{random_suffix}" + depends_on = [google_project_service.anthos, google_project_service.gkehub] +} + +resource "google_gke_hub_feature" "rbacrolebindingactuation" { + name = "rbacrolebindingactuation" + location = "global" + spec { + rbacrolebindingactuation { + allowed_custom_roles = ["my-custom-role", "my-custom-role-2"] + } + } + depends_on = [google_project_service.anthos, google_project_service.gkehub] +} + +resource "google_gke_hub_scope_rbac_role_binding" "scope_rbac_custom_role_binding" { + scope_rbac_role_binding_id = "tf-test-scope-rbac-role-binding%{random_suffix}" + scope_id = google_gke_hub_scope.scope.scope_id + user = "test-email@gmail.com" + role { + custom_role = "my-custom-role" + } + labels = { + key = "value" + } + depends_on = [google_gke_hub_feature.rbacrolebindingactuation] +} +`, context) +} + +func testAccGKEHub2ScopeRBACRoleBinding_gkehubScopeRbacCustomRoleBindingBasicExample_update(context map[string]interface{}) string { + return gkeHubRRBActuationProjectSetupForGA(context) + acctest.Nprintf(` +resource "google_gke_hub_scope" "scope" { + scope_id = "tf-test-scope%{random_suffix}" +} + +resource "google_gke_hub_feature" "rbacrolebindingactuation" { + name = "rbacrolebindingactuation" + location = "global" + spec { + rbacrolebindingactuation { + allowed_custom_roles = ["my-custom-role", "my-custom-role-2"] + } + } + depends_on = [google_project_service.anthos, google_project_service.gkehub] +} + +resource "google_gke_hub_scope_rbac_role_binding" "scope_rbac_custom_role_binding" { + scope_rbac_role_binding_id = "tf-test-scope-rbac-role-binding%{random_suffix}" + scope_id = google_gke_hub_scope.scope.scope_id + user = "test-email@gmail.com" + role { + custom_role = "my-custom-role-2" + } + labels = { + key = "value" + } + depends_on = [google_gke_hub_feature.rbacrolebindingactuation] +} +`, context) +} + +func gkeHubRRBActuationProjectSetupForGA(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_project" "project" { + name = "tf-test-gkehub%{random_suffix}" + project_id = "tf-test-gkehub%{random_suffix}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" + deletion_policy = "DELETE" +} + +resource "google_project_service" "anthos" { + project = google_project.project.project_id + service = "anthos.googleapis.com" +} + +resource "google_project_service" "gkehub" { + project = google_project.project.project_id + service = "gkehub.googleapis.com" + disable_on_destroy = false } `, context) } From d9b6b3e9728bf707018bf9333d5d7ca96b868f49 Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Fri, 6 Jun 2025 18:50:09 +0200 Subject: [PATCH 319/884] fix: bug on `google_compute_router_nat` where changes to `auto_network_tier` are always showed after initial apply (#14051) --- mmv1/products/compute/RouterNat.yaml | 1 + .../resource_compute_router_nat_test.go | 352 ++++++++++++------ 2 files changed, 242 insertions(+), 111 deletions(-) diff --git a/mmv1/products/compute/RouterNat.yaml b/mmv1/products/compute/RouterNat.yaml index 48920f446c6f..8c35ac4e8a62 100644 --- a/mmv1/products/compute/RouterNat.yaml +++ b/mmv1/products/compute/RouterNat.yaml @@ -512,6 +512,7 @@ properties: Must be one of: PREMIUM, STANDARD. If not specified, then the current project-level default tier is used. default_from_api: true + ignore_read: true enum_values: - 'PREMIUM' - 'STANDARD' diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_router_nat_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_router_nat_test.go index 5dd18d65d99a..bc28459922b2 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_router_nat_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_router_nat_test.go @@ -31,27 +31,31 @@ func TestAccComputeRouterNat_basic(t *testing.T) { }, { // implicitly full ImportStateId - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, { - ResourceName: "google_compute_router_nat.foobar", - ImportStateId: fmt.Sprintf("%s/%s/%s/%s", project, region, routerName, routerName), - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router_nat.foobar", + ImportStateId: fmt.Sprintf("%s/%s/%s/%s", project, region, routerName, routerName), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, { - ResourceName: "google_compute_router_nat.foobar", - ImportStateId: fmt.Sprintf("%s/%s/%s", region, routerName, routerName), - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router_nat.foobar", + ImportStateId: fmt.Sprintf("%s/%s/%s", region, routerName, routerName), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, { - ResourceName: "google_compute_router_nat.foobar", - ImportStateId: fmt.Sprintf("%s/%s", routerName, routerName), - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router_nat.foobar", + ImportStateId: fmt.Sprintf("%s/%s", routerName, routerName), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, { Config: testAccComputeRouterNatKeepRouter(routerName), @@ -77,9 +81,10 @@ func TestAccComputeRouterNat_update(t *testing.T) { Config: testAccComputeRouterNatBasicBeforeUpdate(routerName), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, { Config: testAccComputeRouterNatUpdated(routerName), @@ -90,9 +95,10 @@ func TestAccComputeRouterNat_update(t *testing.T) { }, }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, { Config: testAccComputeRouterNatUpdateToNatIPsId(routerName), @@ -103,9 +109,10 @@ func TestAccComputeRouterNat_update(t *testing.T) { }, }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, { Config: testAccComputeRouterNatUpdateToNatIPsName(routerName), @@ -116,9 +123,10 @@ func TestAccComputeRouterNat_update(t *testing.T) { }, }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, { Config: testAccComputeRouterNatBasicBeforeUpdate(routerName), @@ -129,9 +137,10 @@ func TestAccComputeRouterNat_update(t *testing.T) { }, }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, }, }) @@ -152,9 +161,10 @@ func TestAccComputeRouterNat_withManualIpAndSubnetConfiguration(t *testing.T) { Config: testAccComputeRouterNatWithManualIpAndSubnetConfiguration(routerName), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, }, }) @@ -175,49 +185,55 @@ func TestAccComputeRouterNat_withPortAllocationMethods(t *testing.T) { Config: testAccComputeRouterNatWithAllocationMethod(routerName, false, true), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, { Config: testAccComputeRouterNatWithAllocationMethod(routerName, true, false), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, { Config: testAccComputeRouterNatWithAllocationMethod(routerName, false, false), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, { Config: testAccComputeRouterNatWithAllocationMethod(routerName, true, false), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, { Config: testAccComputeRouterNatWithAllocationMethod(routerName, false, true), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, { Config: testAccComputeRouterNatWithAllocationMethodWithParameters(routerName, false, true, 256, 8192), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, }, }) @@ -244,9 +260,10 @@ func TestAccComputeRouterNat_withNatIpsAndDrainNatIps(t *testing.T) { Config: testAccComputeRouterNatWithNatIps(routerName), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, // (ERROR) - Should not allow draining IPs still in natIps { @@ -258,9 +275,10 @@ func TestAccComputeRouterNat_withNatIpsAndDrainNatIps(t *testing.T) { Config: testAccComputeRouterNatWithOneDrainOneRemovedNatIps(routerName), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, // (ERROR): Should not be able to drain previously removed natIps (#1) { @@ -290,105 +308,118 @@ func TestAccComputeRouterNat_withNatRules(t *testing.T) { Config: testAccComputeRouterNatRulesBasic_omitRules(routerName), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, { Config: testAccComputeRouterNatRulesBasic(routerName, 0, ruleDescription, match), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, { Config: testAccComputeRouterNatRulesBasic(routerName, 65000, ruleDescription, match), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, { Config: testAccComputeRouterNatRulesBasic(routerName, 100, ruleDescription, match), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, { Config: testAccComputeRouterNatRulesBasic(routerName, 100, ruleDescriptionUpdate, match), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, { Config: testAccComputeRouterNatRulesBasic(routerName, 100, ruleDescriptionUpdate, matchUpdate), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, { Config: testAccComputeRouterNatRulesWithSourceActiveAndDrainIps(routerName, 100, ruleDescriptionUpdate, matchUpdate), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, { Config: testAccComputeRouterNatRulesWithDrainIps(routerName, 100, ruleDescriptionUpdate, matchUpdate), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, { Config: testAccComputeRouterNatMultiRules(routerName), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, { Config: testAccComputeRouterNatRulesBasic_omitAction(routerName, 100, ruleDescriptionUpdate, matchUpdate), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, { Config: testAccComputeRouterNatRulesBasic_omitDescription(routerName, 100, matchUpdate), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, { Config: testAccComputeRouterNatMultiRulesWithIpId(routerName), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, { Config: testAccComputeRouterNatRulesBasic_omitRules(routerName), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, }, }) @@ -413,9 +444,10 @@ func TestAccComputeRouterNat_withEndpointTypes(t *testing.T) { ), }, { - ResourceName: testResourceName, - ImportState: true, - ImportStateVerify: true, + ResourceName: testResourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, { Config: testAccComputeRouterNatUpdateEndpointType(routerName, "ENDPOINT_TYPE_SWG"), @@ -424,9 +456,10 @@ func TestAccComputeRouterNat_withEndpointTypes(t *testing.T) { ), }, { - ResourceName: testResourceName, - ImportState: true, - ImportStateVerify: true, + ResourceName: testResourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, { Config: testAccComputeRouterNatUpdateEndpointType(routerName, "ENDPOINT_TYPE_VM"), @@ -435,9 +468,10 @@ func TestAccComputeRouterNat_withEndpointTypes(t *testing.T) { ), }, { - ResourceName: testResourceName, - ImportState: true, - ImportStateVerify: true, + ResourceName: testResourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, { Config: testAccComputeRouterNatUpdateEndpointType(routerName, "ENDPOINT_TYPE_MANAGED_PROXY_LB"), @@ -446,9 +480,10 @@ func TestAccComputeRouterNat_withEndpointTypes(t *testing.T) { ), }, { - ResourceName: testResourceName, - ImportState: true, - ImportStateVerify: true, + ResourceName: testResourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, }, }, }) @@ -471,9 +506,64 @@ func TestAccComputeRouterNat_AutoNetworkTier(t *testing.T) { }, { // implicitly full ImportStateId - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, + }, + }, + }) +} + +func TestAccComputeRouterNat_withPrivateNatNetworkTierStandard(t *testing.T) { + t.Parallel() + + project := envvar.GetTestProjectFromEnv() + region := envvar.GetTestRegionFromEnv() + + testId := acctest.RandString(t, 10) + routerName := fmt.Sprintf("tf-test-router-nat-%s", testId) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeRouterNatDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeRouterNatPrivateTypeNetworkTierStandard(routerName), + }, + { + // implicitly full ImportStateId + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, + }, + { + ResourceName: "google_compute_router_nat.foobar", + ImportStateId: fmt.Sprintf("%s/%s/%s/%s", project, region, routerName, routerName), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, + }, + { + ResourceName: "google_compute_router_nat.foobar", + ImportStateId: fmt.Sprintf("%s/%s/%s", region, routerName, routerName), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, + }, + { + ResourceName: "google_compute_router_nat.foobar", + ImportStateId: fmt.Sprintf("%s/%s", routerName, routerName), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"auto_network_tier"}, + }, + { + Config: testAccComputeRouterNatKeepRouter(routerName), + Check: testAccCheckComputeRouterNatDelete( + t, "google_compute_router_nat.foobar"), }, }, }) @@ -2080,6 +2170,46 @@ resource "google_compute_router_nat" "foobar" { `, testAccComputeRouterNatBaseResourcesWithPrivateNatSubnetworks(routerName, hubName), routerName) } +func testAccComputeRouterNatPrivateTypeNetworkTierStandard(routerName string) string { + return fmt.Sprintf(` +resource "google_compute_network" "foobar" { + name = "%s-net" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "foobar" { + name = "%s-subnet" + network = google_compute_network.foobar.self_link + ip_cidr_range = "10.0.0.0/16" + region = "us-central1" + purpose = "PRIVATE_NAT" +} + +resource "google_compute_router" "foobar" { + name = "%s" + region = google_compute_subnetwork.foobar.region + network = google_compute_network.foobar.self_link +} + +resource "google_compute_router_nat" "foobar" { + name = "%s" + router = google_compute_router.foobar.name + region = google_compute_router.foobar.region + source_subnetwork_ip_ranges_to_nat = "LIST_OF_SUBNETWORKS" + type = "PRIVATE" + enable_dynamic_port_allocation = false + enable_endpoint_independent_mapping = false + auto_network_tier = "STANDARD" + min_ports_per_vm = 32 + + subnetwork { + name = google_compute_subnetwork.foobar.id + source_ip_ranges_to_nat = ["ALL_IP_RANGES"] + } +} +`, routerName, routerName, routerName, routerName) +} + func testAccComputeRouterNatWithNat64Configuration(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_dns_policy" "foobar" { From 5a00889c09b96290e6394c12517e8475669c3793 Mon Sep 17 00:00:00 2001 From: sahil-mahajan-google Date: Fri, 6 Jun 2025 22:52:33 +0530 Subject: [PATCH 320/884] Add Flex AutoTier support for storagePool and Volume (#14212) Co-authored-by: Zhenhua Li --- mmv1/products/netapp/StoragePool.yaml | 14 ++ mmv1/products/netapp/Volume.yaml | 7 + ...resource_netapp_storage_pool_test.go.tmpl} | 143 ++++++++++++++++++ ...go => resource_netapp_volume_test.go.tmpl} | 117 +++++++++++++- 4 files changed, 279 insertions(+), 2 deletions(-) rename mmv1/third_party/terraform/services/netapp/{resource_netapp_storage_pool_test.go => resource_netapp_storage_pool_test.go.tmpl} (73%) rename mmv1/third_party/terraform/services/netapp/{resource_netapp_volume_test.go => resource_netapp_volume_test.go.tmpl} (86%) diff --git a/mmv1/products/netapp/StoragePool.yaml b/mmv1/products/netapp/StoragePool.yaml index 4c95114f65f9..3eaa6a0f9722 100644 --- a/mmv1/products/netapp/StoragePool.yaml +++ b/mmv1/products/netapp/StoragePool.yaml @@ -194,3 +194,17 @@ properties: type: String description: | Optional. Custom Performance Total IOPS of the pool If not provided, it will be calculated based on the totalThroughputMibps + - name: 'hotTierSizeGib' + type: String + description: | + Total hot tier capacity for the Storage Pool. It is applicable only to Flex service level. + It should be less than the minimum storage pool size and cannot be more than the current storage pool size. It cannot be decreased once set. + min_version: 'beta' + - name: 'enableHotTierAutoResize' + type: Boolean + send_empty_value: true + ignore_read: true + description: | + Flag indicating that the hot-tier threshold will be auto-increased by 10% of the hot-tier when it hits 100%. Default is true. + The increment will kick in only if the new size after increment is still less than or equal to storage pool size. + min_version: 'beta' diff --git a/mmv1/products/netapp/Volume.yaml b/mmv1/products/netapp/Volume.yaml index 698689d07995..e3df1d42acdf 100644 --- a/mmv1/products/netapp/Volume.yaml +++ b/mmv1/products/netapp/Volume.yaml @@ -507,6 +507,7 @@ properties: update_mask_fields: - 'tiering_policy.cooling_threshold_days' - 'tiering_policy.tier_action' + - 'tiering_policy.hot_tier_bypass_mode_enabled' properties: - name: 'coolingThresholdDays' type: Integer @@ -521,6 +522,12 @@ properties: enum_values: - 'ENABLED' - 'PAUSED' + - name: 'hotTierBypassModeEnabled' + type: Boolean + description: | + Optional. Flag indicating that the hot tier bypass mode is enabled. Default is false. + Only applicable to Flex service level. + min_version: 'beta' - name: 'hybridReplicationParameters' type: NestedObject description: |- diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go b/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl similarity index 73% rename from mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go rename to mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl index 8c1c493a2957..8863adf9d6a5 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl @@ -147,6 +147,149 @@ resource "google_netapp_storage_pool" "test_pool" { `, context) } +{{ if ne $.TargetVersionName `ga` -}} +func TestAccNetappStoragePool_flexAutoTierStoragePoolCreateExample_update(t *testing.T) { + context := map[string]interface{}{ + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckNetappStoragePoolDestroyProducer(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: testAccNetappStoragePool_flexAutoTierStoragePoolCreateExample_full(context), + }, + { + ResourceName: "google_netapp_storage_pool.test_pool", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"enable_hot_tier_auto_resize", "location", "name", "labels", "terraform_labels"}, + }, + { + Config: testAccNetappStoragePool_flexAutoTierStoragePoolCreateExample_update(context), + }, + { + ResourceName: "google_netapp_storage_pool.test_pool", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"enable_hot_tier_auto_resize", "location", "name", "labels", "terraform_labels"}, + }, + + { + Config: testAccNetappStoragePool_flexAutoTierStoragePoolCreateExample_update_2(context), + }, + { + ResourceName: "google_netapp_storage_pool.test_pool", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"enable_hot_tier_auto_resize", "location", "name", "labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccNetappStoragePool_flexAutoTierStoragePoolCreateExample_full(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_compute_network" "default" { + provider = google-beta + name = "%{network_name}" +} + +resource "google_netapp_storage_pool" "test_pool" { + provider = google-beta + name = "tf-test-pool%{random_suffix}" + location = "us-south1-a" + service_level = "FLEX" + capacity_gib = "2048" + network = data.google_compute_network.default.id + active_directory = "" + description = "this is a test description" + kms_config = "" + labels = { + key= "test" + value= "pool" + } + ldap_enabled = false + allow_auto_tiering = true + custom_performance_enabled = true + total_throughput_mibps = "64" + total_iops = "1024" + hot_tier_size_gib = "1024" + enable_hot_tier_auto_resize = false +} +`, context) +} + +func testAccNetappStoragePool_flexAutoTierStoragePoolCreateExample_update(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_compute_network" "default" { + provider = google-beta + name = "%{network_name}" +} + +resource "google_netapp_storage_pool" "test_pool" { + provider = google-beta + name = "tf-test-pool%{random_suffix}" + location = "us-south1-a" + service_level = "FLEX" + capacity_gib = "2048" + network = data.google_compute_network.default.id + active_directory = "" + description = "this is a test description" + kms_config = "" + labels = { + key= "test" + value= "pool" + } + ldap_enabled = false + allow_auto_tiering = true + custom_performance_enabled = true + total_throughput_mibps = "64" + total_iops = "1024" + hot_tier_size_gib = "1500" + enable_hot_tier_auto_resize = true +} +`, context) +} + +func testAccNetappStoragePool_flexAutoTierStoragePoolCreateExample_update_2(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_compute_network" "default" { + provider = google-beta + name = "%{network_name}" +} + +resource "google_netapp_storage_pool" "test_pool" { + provider = google-beta + name = "tf-test-pool%{random_suffix}" + location = "us-south1-a" + service_level = "FLEX" + capacity_gib = "2048" + network = data.google_compute_network.default.id + active_directory = "" + description = "this is a test description" + kms_config = "" + labels = { + key= "test" + value= "pool" + } + ldap_enabled = false + allow_auto_tiering = true + custom_performance_enabled = true + total_throughput_mibps = "64" + total_iops = "1024" + hot_tier_size_gib = "1500" +} +`, context) +} +{{ end }} + func TestAccNetappStoragePool_FlexRegionalStoragePoolCreateExample_update(t *testing.T) { context := map[string]interface{}{ "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go.tmpl similarity index 86% rename from mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go rename to mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go.tmpl index 44a2250b1528..88ea097e6554 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go.tmpl @@ -599,7 +599,7 @@ func testAccNetappVolume_volumeBasicExample_cleanupScheduledBackup(t *testing.T, if !ok { return fmt.Errorf("Not found: %v", vault) } - url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{NetappBasePath}}projects/{{project}}/locations/{{location}}/backupVaults/{{name}}/backups") + url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{"{{"}}NetappBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/locations/{{"{{"}}location{{"}}"}}/backupVaults/{{"{{"}}name{{"}}"}}/backups") if err != nil { return fmt.Errorf("Error : %v", err) } @@ -636,7 +636,7 @@ func testAccNetappVolume_volumeBasicExample_cleanupScheduledBackup(t *testing.T, return backupDataList[i].createTime.After(backupDataList[j].createTime) }) for i := range backupDataList { - baseUrl, err := tpgresource.ReplaceVarsForTest(config, rs, "{{NetappBasePath}}") + baseUrl, err := tpgresource.ReplaceVarsForTest(config, rs, "{{"{{"}}NetappBasePath{{"}}"}}") if err != nil { return fmt.Errorf("Error : %v", err) } @@ -759,3 +759,116 @@ data "google_compute_network" "default" { } `, context) } + + +{{ if ne $.TargetVersionName `ga` -}} +func TestAccNetappVolume_flexAutoTierNetappVolume_update(t *testing.T) { + context := map[string]interface{}{ + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckNetappVolumeDestroyProducer(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: testAccNetappVolume_flexAutoTierVolume_default(context), + }, + { + ResourceName: "google_netapp_volume.test_volume", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"restore_parameters", "location", "name", "deletion_policy", "labels", "terraform_labels"}, + }, + { + Config: testAccNetappVolume_flexAutoTierVolume_update(context), + }, + { + ResourceName: "google_netapp_volume.test_volume", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"restore_parameters", "location", "name", "deletion_policy", "labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccNetappVolume_flexAutoTierVolume_default(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_netapp_storage_pool" "default" { + provider = google-beta + name = "tf-test-pool%{random_suffix}" + location = "us-south1-a" + service_level = "FLEX" + capacity_gib = "2048" + network = data.google_compute_network.default.id + allow_auto_tiering = true + custom_performance_enabled = true + total_throughput_mibps = "64" + total_iops = "1024" + hot_tier_size_gib = "1024" + enable_hot_tier_auto_resize = true +} +resource "google_netapp_volume" "test_volume" { + provider = google-beta + location = "us-south1-a" + name = "tf-test-volume%{random_suffix}" + capacity_gib = "100" + share_name = "tf-test-volume%{random_suffix}" + storage_pool = google_netapp_storage_pool.default.name + protocols = ["NFSV3"] + tiering_policy { + cooling_threshold_days = 31 + tier_action = "ENABLED" + hot_tier_bypass_mode_enabled = false + } +} +data "google_compute_network" "default" { + provider = google-beta + name = "%{network_name}" +} +`, context) +} + +func testAccNetappVolume_flexAutoTierVolume_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_netapp_storage_pool" "default" { + provider = google-beta + name = "tf-test-pool%{random_suffix}" + location = "us-south1-a" + service_level = "FLEX" + capacity_gib = "2048" + network = data.google_compute_network.default.id + allow_auto_tiering = true + custom_performance_enabled = true + total_throughput_mibps = "64" + total_iops = "1024" + hot_tier_size_gib = "1024" + enable_hot_tier_auto_resize = true +} +resource "google_netapp_volume" "test_volume" { + provider = google-beta + location = "us-south1-a" + name = "tf-test-volume%{random_suffix}" + capacity_gib = "100" + share_name = "tf-test-volume%{random_suffix}" + storage_pool = google_netapp_storage_pool.default.name + protocols = ["NFSV3"] + tiering_policy { + cooling_threshold_days = 20 + tier_action = "ENABLED" + hot_tier_bypass_mode_enabled = true + } +} +data "google_compute_network" "default" { + provider = google-beta + name = "%{network_name}" +} +`, context) +} +{{ end }} From 941242c9d4c5a802b784afccbad4a85677d79712 Mon Sep 17 00:00:00 2001 From: DavinaRen Date: Fri, 6 Jun 2025 13:29:40 -0400 Subject: [PATCH 321/884] Add `Tool` resource to product Dialgoflow CX (#14091) --- mmv1/products/dialogflowcx/Agent.yaml | 30 ++ mmv1/products/dialogflowcx/Tool.yaml | 306 +++++++++++++++++ .../custom_import/dialogflowcx_tool.go.tmpl | 18 + .../dialogflowcx_tool_data_store.tf.tmpl | 41 +++ .../dialogflowcx_tool_function.tf.tmpl | 40 +++ .../dialogflowcx_tool_open_api.tf.tmpl | 78 +++++ .../post_delete/dialogflowcx_agent.go.tmpl | 34 ++ .../pre_delete/dialogflowcx_agent.go.tmpl | 20 ++ .../resource_dialogflow_cx_tool_test.go | 310 ++++++++++++++++++ 9 files changed, 877 insertions(+) create mode 100644 mmv1/products/dialogflowcx/Tool.yaml create mode 100644 mmv1/templates/terraform/custom_import/dialogflowcx_tool.go.tmpl create mode 100644 mmv1/templates/terraform/examples/dialogflowcx_tool_data_store.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/dialogflowcx_tool_function.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/dialogflowcx_tool_open_api.tf.tmpl create mode 100644 mmv1/templates/terraform/post_delete/dialogflowcx_agent.go.tmpl create mode 100644 mmv1/templates/terraform/pre_delete/dialogflowcx_agent.go.tmpl create mode 100644 mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflow_cx_tool_test.go diff --git a/mmv1/products/dialogflowcx/Agent.yaml b/mmv1/products/dialogflowcx/Agent.yaml index 7ae0704d3b32..ec0297c70b81 100644 --- a/mmv1/products/dialogflowcx/Agent.yaml +++ b/mmv1/products/dialogflowcx/Agent.yaml @@ -31,6 +31,17 @@ timeouts: update_minutes: 40 delete_minutes: 20 custom_code: + # An engine resource https://cloud.google.com/generative-ai-app-builder/docs/reference/rest/v1/projects.locations.collections.engines + # will be automatically created when we specify dataStoreConnections in Flow, Page, or Tool resources associated with the Agent. + # When delete_chat_engine_on_destroy is set to true, we must + # 1. extract the engine ID from the agent in pre_delete. + # 2. delete the agent. + # 3. delete the engine in post_delete. + # We can't delete the chat engine first because the agent depends on the chat engine. Deleting chat engine then allows deleting the + # data store. The chain of dependency is agent -> engine -> data store. + # TODO: remove pre_delete and post_delete after the delete API is updated to clean up the engine resource. + pre_delete: 'templates/terraform/pre_delete/dialogflowcx_agent.go.tmpl' + post_delete: 'templates/terraform/post_delete/dialogflowcx_agent.go.tmpl' exclude_sweeper: true examples: - name: 'dialogflowcx_agent_full' @@ -42,6 +53,25 @@ examples: - 'git_integration_settings.0.github_settings.0.access_token' - 'enable_stackdriver_logging' - 'advanced_settings.0.logging_settings' +virtual_fields: + - name: 'delete_chat_engine_on_destroy' + description: | + If set to `true`, Terraform will delete the chat engine associated with the agent when the agent is destroyed. + Otherwise, the chat engine will persist. + + This virtual field addresses a critical dependency chain: `agent` -> `engine` -> `data store`. The chat engine is automatically + provisioned when a data store is linked to the agent, meaning Terraform doesn't have direct control over its lifecycle as a managed + resource. This creates a problem when both the agent and data store are managed by Terraform and need to be destroyed. Without + delete_chat_engine_on_destroy set to true, the data store's deletion would fail because the unmanaged chat engine would still be + using it. This setting ensures that the entire dependency chain can be properly torn down. + See `mmv1/templates/terraform/examples/dialogflowcx_tool_data_store.tf.tmpl` as an example. + + Data store can be linked to an agent through the `knowledgeConnectorSettings` field of a [flow](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/projects.locations.agents.flows#resource:-flow) + or a [page](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/projects.locations.agents.flows.pages#resource:-page) + or the `dataStoreSpec` field of a [tool](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/projects.locations.agents.tools#resource:-tool). + The ID of the implicitly created engine is stored in the `genAppBuilderSettings` field of the [agent](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/projects.locations.agents#resource:-agent). + type: Boolean + default_value: false parameters: properties: - name: 'name' diff --git a/mmv1/products/dialogflowcx/Tool.yaml b/mmv1/products/dialogflowcx/Tool.yaml new file mode 100644 index 000000000000..07d48ceaa6a1 --- /dev/null +++ b/mmv1/products/dialogflowcx/Tool.yaml @@ -0,0 +1,306 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'Tool' +description: | + A tool provides a list of actions which are available to the Playbook to attain its goal. + A Tool consists of a description of the tool's usage and a specification of the tool which contains the schema and authentication information. +references: + guides: + 'Official Documentation': 'https://cloud.google.com/dialogflow/cx/docs' + api: 'https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/projects.locations.agents.tools' +id_format: '{{parent}}/tools/{{name}}' +base_url: '{{parent}}/tools' +update_verb: 'PATCH' +update_mask: true +import_format: + - '{{parent}}/tools/{{name}}' +timeouts: + insert_minutes: 40 + update_minutes: 40 + delete_minutes: 20 +custom_code: + pre_create: 'templates/terraform/pre_create/dialogflowcx_set_location_skip_default_obj.go.tmpl' + pre_read: 'templates/terraform/pre_create/dialogflow_set_location.go.tmpl' + pre_update: 'templates/terraform/pre_create/dialogflow_set_location.go.tmpl' + pre_delete: 'templates/terraform/pre_delete/dialogflowcx_set_location_skip_default_obj.go.tmpl' + custom_import: 'templates/terraform/custom_import/dialogflowcx_tool.go.tmpl' +exclude_sweeper: true +examples: + - name: 'dialogflowcx_tool_open_api' + primary_resource_id: 'open_api_tool' + vars: + agent_name: 'dialogflowcx-agent-open-api' + - name: 'dialogflowcx_tool_data_store' + primary_resource_id: 'data_store_tool' + vars: + agent_name: 'dialogflowcx-agent-data-store' + - name: 'dialogflowcx_tool_function' + primary_resource_id: 'function_tool' + vars: + agent_name: 'dialogflowcx-agent-fucntion' +parameters: + - name: 'parent' + type: String + description: | + The agent to create a Tool for. + Format: projects//locations//agents/. + url_param_only: true + immutable: true +properties: + - name: 'name' + type: String + description: | + The unique identifier of the Tool. + Format: projects//locations//agents//tools/. + output: true + custom_flatten: 'templates/terraform/custom_flatten/name_from_self_link.tmpl' + - name: 'displayName' + type: String + description: | + The human-readable name of the tool, unique within the agent. + required: true + - name: 'description' + type: String + description: | + High level description of the Tool and its usage. + required: true + - name: 'toolType' + type: Enum + description: | + The tool type. + output: true + - name: 'openApiSpec' + type: NestedObject + description: | + OpenAPI specification of the Tool. + This field is part of a union field `specification`: Only one of `openApiSpec`, `dataStoreSpec`, or `functionSpec` may be set. + properties: + - name: 'authentication' + type: NestedObject + description: | + Optional. Authentication information required by the API. + properties: + - name: 'apiKeyConfig' + type: NestedObject + description: | + Config for API key auth. + This field is part of a union field `auth_config`: Only one of `apiKeyConfig`, `oauthConfig`, `serviceAgentAuthConfig`, or `bearerTokenConfig` may be set. + properties: + - name: 'keyName' + type: String + description: | + The parameter name or the header name of the API key. + E.g., If the API request is "https://example.com/act?X-Api-Key=", "X-Api-Key" would be the parameter name. + required: true + - name: 'apiKey' + type: String + description: | + Optional. The API key. If the `secretVersionForApiKey`` field is set, this field will be ignored. + sensitive: true + ignore_read: true + - name: 'secretVersionForApiKey' + type: String + description: | + Optional. The name of the SecretManager secret version resource storing the API key. + If this field is set, the apiKey field will be ignored. + Format: projects/{project}/secrets/{secret}/versions/{version} + - name: 'requestLocation' + type: String + description: | + Key location in the request. + See [RequestLocation](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/projects.locations.agents.tools#requestlocation) for valid values. + required: true + - name: 'oauthConfig' + type: NestedObject + description: | + Config for OAuth. + This field is part of a union field `auth_config`: Only one of `apiKeyConfig`, `oauthConfig`, `serviceAgentAuthConfig`, or `bearerTokenConfig` may be set. + properties: + - name: 'oauthGrantType' + type: String + description: | + OAuth grant types. + See [OauthGrantType](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/projects.locations.agents.tools#oauthgranttype) for valid values + required: true + - name: 'clientId' + type: String + description: | + The client ID from the OAuth provider. + required: true + - name: 'clientSecret' + type: String + description: | + Optional. The client secret from the OAuth provider. If the `secretVersionForClientSecret` field is set, this field will be ignored. + sensitive: true + ignore_read: true + - name: 'secretVersionForClientSecret' + type: String + description: | + Optional. The name of the SecretManager secret version resource storing the client secret. + If this field is set, the clientSecret field will be ignored. + Format: projects/{project}/secrets/{secret}/versions/{version} + - name: 'tokenEndpoint' + type: String + description: | + The token endpoint in the OAuth provider to exchange for an access token. + required: true + - name: 'scopes' + type: Array + description: | + Optional. The OAuth scopes to grant. + item_type: + type: String + - name: 'serviceAgentAuthConfig' + type: NestedObject + description: | + Config for [Diglogflow service agent](https://cloud.google.com/iam/docs/service-agents#dialogflow-service-agent) auth. + This field is part of a union field `auth_config`: Only one of `apiKeyConfig`, `oauthConfig`, `serviceAgentAuthConfig`, or `bearerTokenConfig` may be set. + properties: + - name: 'serviceAgentAuth' + type: String + description: | + Optional. Indicate the auth token type generated from the Diglogflow service agent. + The generated token is sent in the Authorization header. + See [ServiceAgentAuth](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/projects.locations.agents.tools#serviceagentauth) for valid values. + - name: 'bearerTokenConfig' + type: NestedObject + description: | + Config for bearer token auth. + This field is part of a union field `auth_config`: Only one of `apiKeyConfig`, `oauthConfig`, `serviceAgentAuthConfig`, or `bearerTokenConfig` may be set. + properties: + - name: 'token' + type: String + description: | + Optional. The text token appended to the text Bearer to the request Authorization header. + [Session parameters reference](https://cloud.google.com/dialogflow/cx/docs/concept/parameter#session-ref) can be used to pass the token dynamically, e.g. `$session.params.parameter-id`. + sensitive: true + ignore_read: true + - name: 'secretVersionForToken' + type: String + description: | + Optional. The name of the SecretManager secret version resource storing the Bearer token. If this field is set, the `token` field will be ignored. + Format: projects/{project}/secrets/{secret}/versions/{version} + - name: 'tlsConfig' + type: NestedObject + description: | + Optional. TLS configuration for the HTTPS verification. + properties: + - name: 'caCerts' + type: Array + description: | + Specifies a list of allowed custom CA certificates for HTTPS verification. + required: true + item_type: + type: NestedObject + properties: + - name: 'displayName' + type: String + description: | + The name of the allowed custom CA certificates. This can be used to disambiguate the custom CA certificates. + required: true + - name: 'cert' + type: String + description: | + The allowed custom CA certificates (in DER format) for HTTPS verification. This overrides the default SSL trust store. + If this is empty or unspecified, Dialogflow will use Google's default trust store to verify certificates. + N.B. Make sure the HTTPS server certificates are signed with "subject alt name". + For instance a certificate can be self-signed using the following command: + ``` + openssl x509 -req -days 200 -in example.com.csr \ + -signkey example.com.key \ + -out example.com.crt \ + -extfile <(printf "\nsubjectAltName='DNS:www.example.com'") + ``` + A base64-encoded string. + required: true + - name: 'serviceDirectoryConfig' + type: NestedObject + description: | + Optional. Service Directory configuration. + properties: + - name: 'service' + type: String + description: | + The name of [Service Directory](https://cloud.google.com/service-directory/docs) service. + Format: projects//locations//namespaces//services/. LocationID of the service directory must be the same as the location of the agent. + required: true + - name: 'textSchema' + type: String + description: | + The OpenAPI schema specified as a text. + This field is part of a union field `schema`: only one of `textSchema` may be set. + required: true + - name: 'dataStoreSpec' + type: NestedObject + description: | + Data store search tool specification. + This field is part of a union field `specification`: Only one of `openApiSpec`, `dataStoreSpec`, or `functionSpec` may be set. + properties: + - name: 'dataStoreConnections' + type: Array + description: | + List of data stores to search. + required: true + item_type: + type: NestedObject + properties: + - name: 'dataStoreType' + type: String + description: | + The type of the connected data store. + See [DataStoreType](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/DataStoreConnection#datastoretype) for valid values. + - name: 'dataStore' + type: String + description: | + The full name of the referenced data store. Formats: projects/{project}/locations/{location}/collections/{collection}/dataStores/{dataStore} projects/{project}/locations/{location}/dataStores/{dataStore} + - name: 'documentProcessingMode' + type: String + description: | + The document processing mode for the data store connection. Should only be set for PUBLIC_WEB and UNSTRUCTURED data stores. If not set it is considered as DOCUMENTS, as this is the legacy mode. + See [DocumentProcessingMode](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/DataStoreConnection#documentprocessingmode) for valid values. + - name: 'fallbackPrompt' + type: NestedObject + description: | + Fallback prompt configurations to use. + required: true + allow_empty_object: true + send_empty_value: true + properties: [] # Meant to be an empty object with no properties. + - name: 'functionSpec' + type: NestedObject + description: | + Client side executed function specification. + This field is part of a union field `specification`: Only one of `openApiSpec`, `dataStoreSpec`, or `functionSpec` may be set. + properties: + - name: 'inputSchema' + type: String + description: | + Optional. The JSON schema is encapsulated in a [google.protobuf.Struct](https://protobuf.dev/reference/protobuf/google.protobuf/#struct) to describe the input of the function. + This input is a JSON object that contains the function's parameters as properties of the object + state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' + custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' + custom_expand: 'templates/terraform/custom_expand/json_schema.tmpl' + validation: + function: 'validation.StringIsJSON' + - name: 'outputSchema' + type: String + description: | + Optional. The JSON schema is encapsulated in a [google.protobuf.Struct](https://protobuf.dev/reference/protobuf/google.protobuf/#struct) to describe the output of the function. + This output is a JSON object that contains the function's parameters as properties of the object + state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' + custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' + custom_expand: 'templates/terraform/custom_expand/json_schema.tmpl' + validation: + function: 'validation.StringIsJSON' diff --git a/mmv1/templates/terraform/custom_import/dialogflowcx_tool.go.tmpl b/mmv1/templates/terraform/custom_import/dialogflowcx_tool.go.tmpl new file mode 100644 index 000000000000..ce7d263d3815 --- /dev/null +++ b/mmv1/templates/terraform/custom_import/dialogflowcx_tool.go.tmpl @@ -0,0 +1,18 @@ +config := meta.(*transport_tpg.Config) + +// current import_formats can't import fields with forward slashes in their value and parent contains slashes +if err := tpgresource.ParseImportId([]string{ + "(?P.+)/tools/(?P[^/]+)", + "(?P.+)/(?P[^/]+)", +}, d, config); err != nil { + return nil, err +} + +// Replace import id for the resource id +id, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}parent{{"}}"}}/tools/{{"{{"}}name{{"}}"}}") +if err != nil { + return nil, fmt.Errorf("Error constructing id: %s", err) +} +d.SetId(id) + +return []*schema.ResourceData{d}, nil \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/dialogflowcx_tool_data_store.tf.tmpl b/mmv1/templates/terraform/examples/dialogflowcx_tool_data_store.tf.tmpl new file mode 100644 index 000000000000..74da8056d4bb --- /dev/null +++ b/mmv1/templates/terraform/examples/dialogflowcx_tool_data_store.tf.tmpl @@ -0,0 +1,41 @@ +resource "google_dialogflow_cx_agent" "agent" { + display_name = "{{index $.Vars "agent_name"}}" + location = "global" + default_language_code = "en" + time_zone = "America/New_York" + description = "Example description." + delete_chat_engine_on_destroy = true + depends_on = [ + google_discovery_engine_data_store.my_datastore + ] +} + +resource "google_dialogflow_cx_tool" "{{$.PrimaryResourceId}}" { + parent = google_dialogflow_cx_agent.agent.id + display_name = "Example Data Store Tool" + description = "Example Description" + data_store_spec { + data_store_connections { + data_store_type = "UNSTRUCTURED" + data_store = "projects/${data.google_project.project.number}/locations/global/collections/default_collection/dataStores/${google_discovery_engine_data_store.my_datastore.data_store_id}" + document_processing_mode = "DOCUMENTS" + } + fallback_prompt {} + } + depends_on = [ + google_discovery_engine_data_store.my_datastore, + google_dialogflow_cx_agent.agent + ] +} + +resource "google_discovery_engine_data_store" "my_datastore" { + location = "global" + data_store_id = "datastore-tool-test-%{random_suffix}" + display_name = "datastore for Tool test" + industry_vertical = "GENERIC" + content_config = "NO_CONTENT" + solution_types = ["SOLUTION_TYPE_CHAT"] +} + +data "google_project" "project" { +} diff --git a/mmv1/templates/terraform/examples/dialogflowcx_tool_function.tf.tmpl b/mmv1/templates/terraform/examples/dialogflowcx_tool_function.tf.tmpl new file mode 100644 index 000000000000..e4b62f7790ab --- /dev/null +++ b/mmv1/templates/terraform/examples/dialogflowcx_tool_function.tf.tmpl @@ -0,0 +1,40 @@ +resource "google_dialogflow_cx_agent" "agent" { + display_name = "{{index $.Vars "agent_name"}}" + location = "global" + default_language_code = "en" + time_zone = "America/New_York" + description = "Example description." +} + +resource "google_dialogflow_cx_tool" "{{$.PrimaryResourceId}}" { + parent = google_dialogflow_cx_agent.agent.id + display_name = "Example Function Tool" + description = "Example Description" + function_spec { + input_schema = < 0 { + // An engine is linked to the Agent. Delete it. + engineIDIntf, ok := settings[0].(map[string]interface{})["engine"] + if !ok { + return fmt.Errorf("Expected key 'engine' in map %+v", settings[0]) + } + engineID, ok = engineIDIntf.(string) + if !ok { + return fmt.Errorf("Can convert engine ID %s to string", engineIDIntf) + } +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflow_cx_tool_test.go b/mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflow_cx_tool_test.go new file mode 100644 index 000000000000..03b640ea1591 --- /dev/null +++ b/mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflow_cx_tool_test.go @@ -0,0 +1,310 @@ +package dialogflowcx_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccDialogflowCXTool_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "org_id": envvar.GetTestOrgFromEnv(t), + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDialogflowCXTool_basic(context), + }, + { + ResourceName: "google_dialogflow_cx_tool.my_tool", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDialogflowCXTool_full_api_key(context), + }, + { + ResourceName: "google_dialogflow_cx_tool.my_tool", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"open_api_spec.0.authentication.0.api_key_config.0.api_key"}, + }, + { + Config: testAccDialogflowCXTool_full_service_agent_auth(context), + }, + { + ResourceName: "google_dialogflow_cx_tool.my_tool", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDialogflowCXTool_full_bearer_token(context), + }, + { + ResourceName: "google_dialogflow_cx_tool.my_tool", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"open_api_spec.0.authentication.0.bearer_token_config.0.token"}, + }, + }, + }) +} + +func testAccDialogflowCXTool_basic(context map[string]interface{}) string { + return acctest.Nprintf(` + resource "google_dialogflow_cx_agent" "agent_tool" { + display_name = "tf-test-%{random_suffix}" + location = "global" + default_language_code = "en" + time_zone = "America/New_York" + description = "ageng for tool test" + } + + resource "google_dialogflow_cx_tool" "my_tool" { + parent = google_dialogflow_cx_agent.agent_tool.id + display_name = "Example" + description = "Example Description" + } + `, context) +} + +func testAccDialogflowCXTool_full_api_key(context map[string]interface{}) string { + return acctest.Nprintf(` + resource "google_dialogflow_cx_agent" "agent_tool" { + display_name = "tf-test-%{random_suffix}" + location = "global" + default_language_code = "en" + time_zone = "America/New_York" + description = "ageng for tool test" + } + + resource "google_dialogflow_cx_tool" "my_tool" { + parent = google_dialogflow_cx_agent.agent_tool.id + display_name = "Example Open API Tool with api_key_config" + description = "Example Description" + open_api_spec { + authentication { + api_key_config { + key_name = "example key name" + api_key = "example key" + secret_version_for_api_key = "projects/-/secrets/-/versions/-" + request_location = "HEADER" + } + } + tls_config { + ca_certs { + display_name = "example ca cert name" + cert = base64encode("example cert") + } + } + service_directory_config { + service = "projects/-/locations/-/namespaces/-/services/-" + } + text_schema = < Date: Fri, 6 Jun 2025 23:02:30 +0530 Subject: [PATCH 322/884] Update changes to BackupPlan with respect to disk and regional disk (#13840) --- mmv1/products/backupdr/BackupPlan.yaml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/mmv1/products/backupdr/BackupPlan.yaml b/mmv1/products/backupdr/BackupPlan.yaml index c045e0c46b90..d64e98677d05 100644 --- a/mmv1/products/backupdr/BackupPlan.yaml +++ b/mmv1/products/backupdr/BackupPlan.yaml @@ -71,6 +71,14 @@ properties: description: | The Google Cloud Platform Service Account to be used by the BackupVault for taking backups. output: true + - name: 'supportedResourceTypes' + type: Array + description: | + The list of all resource types to which the `BackupPlan` can be applied. + min_version: beta + item_type: + type: String + output: true - name: 'resourceType' type: String description: | From 8a16cd73656f0c05b3a1bb0ef655cbe7d3965368 Mon Sep 17 00:00:00 2001 From: Riley Karson Date: Fri, 6 Jun 2025 11:21:54 -0700 Subject: [PATCH 323/884] Document architecture field in disk. (#14210) --- mmv1/products/compute/Disk.yaml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/mmv1/products/compute/Disk.yaml b/mmv1/products/compute/Disk.yaml index 29435dbb62f7..ffc2ae3a61fb 100644 --- a/mmv1/products/compute/Disk.yaml +++ b/mmv1/products/compute/Disk.yaml @@ -494,10 +494,8 @@ properties: - name: 'architecture' ignore_read: true type: String - The architecture of the disk. - enum_values: - - 'X86_64' - - 'ARM64' + description: | + The architecture of the disk. Values include `X86_64`, `ARM64`. - name: 'params' type: NestedObject ignore_read: true From 4290b176cf0b30160947668a193c63c4ce38a3e7 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Fri, 6 Jun 2025 11:57:10 -0700 Subject: [PATCH 324/884] tgc-revival: cai2hcl resource converter template (#14185) --- mmv1/api/resource.go | 10 ++ mmv1/provider/template_data.go | 1 + mmv1/provider/terraform_tgc_next.go | 1 + .../cai2hcl/resource_converter.go.tmpl | 116 ++++++++++++++++++ 4 files changed, 128 insertions(+) create mode 100644 mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index 7df33e7aa829..e2b0704cec63 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -1976,3 +1976,13 @@ func (r Resource) CodeHeader(templatePath string) string { func (r Resource) MarkdownHeader(templatePath string) string { return strings.Replace(r.CodeHeader(templatePath), "//", "#", -1) } + +// ==================== +// TGC +// ==================== +// Filters out computed properties during cai2hcl +func (r Resource) ReadPropertiesForTgc() []*Type { + return google.Reject(r.AllUserProperties(), func(v *Type) bool { + return v.Output + }) +} diff --git a/mmv1/provider/template_data.go b/mmv1/provider/template_data.go index da29e65278c1..656f8d4270f5 100644 --- a/mmv1/provider/template_data.go +++ b/mmv1/provider/template_data.go @@ -196,6 +196,7 @@ func (td *TemplateData) GenerateTGCResourceFile(templatePath, filePath string, r "templates/terraform/expand_property_method.go.tmpl", "templates/terraform/schema_property.go.tmpl", "templates/terraform/schema_subresource.go.tmpl", + "templates/terraform/flatten_property_method.go.tmpl", } td.GenerateFile(filePath, templatePath, resource, true, templates...) } diff --git a/mmv1/provider/terraform_tgc_next.go b/mmv1/provider/terraform_tgc_next.go index b6ffabe85afb..6304ecbc5f1a 100644 --- a/mmv1/provider/terraform_tgc_next.go +++ b/mmv1/provider/terraform_tgc_next.go @@ -93,6 +93,7 @@ func (tgc TerraformGoogleConversionNext) GenerateObject(object api.Resource, out if !object.IsExcluded() { tgc.GenerateResource(object, *templateData, outputFolder, generateCode, generateDocs, "tfplan2cai") + tgc.GenerateResource(object, *templateData, outputFolder, generateCode, generateDocs, "cai2hcl") } } diff --git a/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl b/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl new file mode 100644 index 000000000000..3b5d198eb1bb --- /dev/null +++ b/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl @@ -0,0 +1,116 @@ +{{/* The license inside this block applies to this file + Copyright 2025 Google LLC. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. */ -}} +{{$.CodeHeader TemplatePath}} + +package {{ lower $.ProductMetadata.Name }} + +import ( +{{/* We list all the v2 imports here and unstable imports, because we run 'goimports' to guess the correct + set of imports, which will never guess the major version correctly. */ -}} + "github.com/apparentlymart/go-cidr/cidr" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/id" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/retry" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/structure" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/logging" + "google.golang.org/api/bigtableadmin/v2" + "google.golang.org/api/googleapi" + + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters/utils" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" + transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/verify" +) + +{{- $caiProductBaseUrl := $.CaiProductBaseUrl }} +{{- $productBackendName := $.CaiProductBackendName $caiProductBaseUrl }} +{{- $apiVersion := $.CaiApiVersion $productBackendName $caiProductBaseUrl}} + +{{if $.CustomCode.Constants -}} + {{- $.CustomTemplate $.CustomCode.Constants true -}} +{{- end}} + +const {{ $.ResourceName -}}AssetType string = "{{ $productBackendName }}.googleapis.com/{{ $.Name -}}" +const {{ $.ResourceName -}}SchemaName string = "{{ $.TerraformName }}" + +type {{ $.ResourceName -}}Converter struct { + name string + schema map[string]*schema.Schema +} + +func New{{ $.ResourceName -}}Converter(provider *schema.Provider) models.Converter { + schema := provider.ResourcesMap[{{ $.ResourceName -}}SchemaName].Schema + + return &{{ $.ResourceName -}}Converter{ + name: {{ $.ResourceName -}}SchemaName, + schema: schema, + } +} + +// Convert converts asset to HCL resource blocks. +func (c *{{ $.ResourceName -}}Converter) Convert(asset caiasset.Asset) ([]*models.TerraformResourceBlock, error) { + var blocks []*models.TerraformResourceBlock + block, err := c.convertResourceData(asset) + if err != nil { + return nil, err + } + blocks = append(blocks, block) + return blocks, nil +} + +func (c *{{ $.ResourceName -}}Converter) convertResourceData(asset caiasset.Asset) (*models.TerraformResourceBlock, error) { + if asset.Resource == nil || asset.Resource.Data == nil { + return nil, fmt.Errorf("asset resource data is nil") + } + + res := asset.Resource.Data + config := utils.NewConfig() + d := &schema.ResourceData{} + + hclData := make(map[string]interface{}) + +{{ range $prop := $.ReadPropertiesForTgc }} +{{ if $prop.FlattenObject -}} +{{/* TODO */}} +{{- else -}} + hclData["{{ underscore $prop.Name -}}"] = flatten{{ if $.NestedQuery -}}Nested{{end}}{{ $.ResourceName -}}{{ camelize $prop.Name "upper" -}}(res["{{ $prop.ApiName -}}"], d, config) +{{- end}} +{{- end}} + + ctyVal, err := utils.MapToCtyValWithSchema(hclData, c.schema) + if err != nil { + return nil, err + } + return &models.TerraformResourceBlock{ + Labels: []string{c.name, res["name"].(string)}, + Value: ctyVal, + }, nil +} + +{{- range $prop := $.ReadPropertiesForTgc }} + {{ if $prop.IsA "KeyValueLabels" }} +func flatten{{$prop.GetPrefix}}{{$prop.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return utils.RemoveTerraformAttributionLabel(v) +} + {{ else }} + {{ template "flattenPropertyMethod" $prop -}} + {{- end }} +{{- end }} \ No newline at end of file From 72e6fe1da8894f2076d622342d6b6070e7c916a1 Mon Sep 17 00:00:00 2001 From: dishaagarwal03-google Date: Sat, 7 Jun 2025 01:59:00 +0530 Subject: [PATCH 325/884] Documentation changes to include "disk" resource type in backup_dr (#13922) --- mmv1/products/backupdr/BackupPlan.yaml | 11 ++++++- .../backupdr/BackupPlanAssociation.yaml | 6 +++- ...p_dr_backup_plan_for_disk_resource.tf.tmpl | 31 +++++++++++++++++++ 3 files changed, 46 insertions(+), 2 deletions(-) create mode 100644 mmv1/templates/terraform/examples/backup_dr_backup_plan_for_disk_resource.tf.tmpl diff --git a/mmv1/products/backupdr/BackupPlan.yaml b/mmv1/products/backupdr/BackupPlan.yaml index d64e98677d05..b02c79a1b5e6 100644 --- a/mmv1/products/backupdr/BackupPlan.yaml +++ b/mmv1/products/backupdr/BackupPlan.yaml @@ -35,6 +35,14 @@ examples: backup_plan_id: 'backup-plan-simple-test' test_env_vars: project: :PROJECT_NAME + - name: 'backup_dr_backup_plan_for_disk_resource' + primary_resource_id: 'my-disk-backup-plan-1' + min_version: 'beta' + vars: + backup_vault_id: 'backup-vault-disk-test' + backup_plan_id: 'backup-plan-disk-test' + test_env_vars: + project: :PROJECT_NAME parameters: - name: 'location' type: String @@ -82,7 +90,8 @@ properties: - name: 'resourceType' type: String description: | - The resource type to which the `BackupPlan` will be applied. Examples include, "compute.googleapis.com/Instance" and "storage.googleapis.com/Bucket". + The resource type to which the `BackupPlan` will be applied. + Examples include, "compute.googleapis.com/Instance", "compute.googleapis.com/Disk", and "storage.googleapis.com/Bucket". required: true - name: 'createTime' type: String diff --git a/mmv1/products/backupdr/BackupPlanAssociation.yaml b/mmv1/products/backupdr/BackupPlanAssociation.yaml index 36909dab6d15..8885ad232ea7 100644 --- a/mmv1/products/backupdr/BackupPlanAssociation.yaml +++ b/mmv1/products/backupdr/BackupPlanAssociation.yaml @@ -68,12 +68,16 @@ properties: type: String description: | The BP with which resource needs to be created + Note: + - A Backup Plan configured for 'compute.googleapis.com/Instance', can only protect instance type resources. + - A Backup Plan configured for 'compute.googleapis.com/Disk' can be used to protect both standard Disks and Regional Disks resources. required: true diff_suppress_func: 'tpgresource.ProjectNumberDiffSuppress' - name: 'resourceType' type: String description: | - The resource type of workload on which backupplan is applied + The resource type of workload on which backupplan is applied. + Examples include, "compute.googleapis.com/Instance", "compute.googleapis.com/Disk", and "compute.googleapis.com/RegionDisk" required: true - name: 'createTime' type: String diff --git a/mmv1/templates/terraform/examples/backup_dr_backup_plan_for_disk_resource.tf.tmpl b/mmv1/templates/terraform/examples/backup_dr_backup_plan_for_disk_resource.tf.tmpl new file mode 100644 index 000000000000..f4185401e77d --- /dev/null +++ b/mmv1/templates/terraform/examples/backup_dr_backup_plan_for_disk_resource.tf.tmpl @@ -0,0 +1,31 @@ +resource "google_backup_dr_backup_vault" "my_backup_vault" { + provider = google-beta + location = "us-central1" + backup_vault_id = "{{index $.Vars "backup_vault_id"}}" + backup_minimum_enforced_retention_duration = "100000s" +} + +resource "google_backup_dr_backup_plan" "{{$.PrimaryResourceId}}" { + provider = google-beta + location = "us-central1" + backup_plan_id = "{{index $.Vars "backup_plan_id"}}" + resource_type = "compute.googleapis.com/Disk" + backup_vault = google_backup_dr_backup_vault.my_backup_vault.id + + backup_rules { + rule_id = "rule-1" + backup_retention_days = 5 + + standard_schedule { + recurrence_type = "HOURLY" + hourly_frequency = 1 + time_zone = "UTC" + + backup_window { + start_hour_of_day = 0 + end_hour_of_day = 6 + } + } + } +} + From 7a0d5056fd594075260f399dcaa3c18736db3e9a Mon Sep 17 00:00:00 2001 From: Margubur Rahman <150442997+googlyrahman@users.noreply.github.com> Date: Sat, 7 Jun 2025 02:45:30 +0530 Subject: [PATCH 326/884] Remove the hard coded timeout while doing bucket deletion. (#14085) --- .../storage/resource_storage_bucket.go.tmpl | 111 +++++++++--------- 1 file changed, 53 insertions(+), 58 deletions(-) diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket.go.tmpl b/mmv1/third_party/terraform/services/storage/resource_storage_bucket.go.tmpl index aa5864bdbbeb..69232098a6ff 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket.go.tmpl +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket.go.tmpl @@ -710,71 +710,63 @@ func getAnywhereCacheListResult(d *schema.ResourceData, config *transport_tpg.Co } func deleteAnywhereCacheIfAny(d *schema.ResourceData, config *transport_tpg.Config) error { - // Get the initial list of Anywhere Caches - cacheList, err := getAnywhereCacheListResult(d, config) - if err != nil { - return err - } - - // If no cache exists initially, return early - if len(cacheList) == 0 { - return nil - } - - // Iterate over each object in the resource list - for _, item := range cacheList { - // Ensure the item is a map - obj, ok := item.(map[string]interface{}) - if !ok { - return fmt.Errorf("unexpected type for resource list item: %T", item) - } - - // Check the state of the object - state, ok := obj["state"].(string) - if !ok { - continue // If state is not a string, skip this item - } - if !strings.EqualFold(state, "running") && !strings.EqualFold(state, "paused") { - continue - } - - // Disable the cache if state is running or paused - anywhereCacheId, ok := obj["anywhereCacheId"].(string) - if !ok { - return fmt.Errorf("missing or invalid anywhereCacheId: %v", obj) - } - anywhereCacheUrl, err := tpgresource.ReplaceVars(d, config, "{{"{{StorageBasePath}}b/{{name}}/anywhereCaches/"}}") + for { + // Get the list of Anywhere Caches + cacheList, err := getAnywhereCacheListResult(d, config) if err != nil { return err } - disableUrl := anywhereCacheUrl + fmt.Sprintf("%s/disable", anywhereCacheId) - _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "POST", - Project: config.Project, - RawURL: disableUrl, - UserAgent: config.UserAgent, - }) - if err != nil { - return err + // Check if the cache list is empty + if len(cacheList) == 0 { + break } - } - time.Sleep(80 * time.Minute) // It takes around 70 minutes of time for cache to finally delete post it disable time. - // Post this time, we check again! - // Get the list of Anywhere Caches after the sleep - cacheList, err = getAnywhereCacheListResult(d, config) - if err != nil { - return err - } + // Iterate over each object in the resource list + for _, item := range cacheList { + // Ensure the item is a map + obj, ok := item.(map[string]interface{}) + if !ok { + return fmt.Errorf("unexpected type for resource list item: %T", item) + } - // Check if the cache list is now empty - if len(cacheList) == 0 { - return nil + // Check the state of the object + state, ok := obj["state"].(string) + if !ok { + continue // If state is not a string, skip this item + } + if !strings.EqualFold(state, "running") && !strings.EqualFold(state, "paused") { + continue + } + + // Disable the cache if state is running or paused + anywhereCacheId, ok := obj["anywhereCacheId"].(string) + if !ok { + return fmt.Errorf("missing or invalid anywhereCacheId: %v", obj) + } + anywhereCacheUrl, err := tpgresource.ReplaceVars(d, config, "{{"{{StorageBasePath}}b/{{name}}/anywhereCaches/"}}") + if err != nil { + return err + } + disableUrl := anywhereCacheUrl + fmt.Sprintf("%s/disable", anywhereCacheId) + + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "POST", + Project: config.Project, + RawURL: disableUrl, + UserAgent: config.UserAgent, + }) + if err != nil { + return err + } + } + + // Sleep for 1 minute + time.Sleep(1 * time.Minute) } - return fmt.Errorf("Error while deleting the cache: caches still exists post 80mins of their disable time") + return nil } func resourceDataplexLabelDiffSuppress(k, old, new string, d *schema.ResourceData) bool { @@ -1179,7 +1171,7 @@ func resourceStorageBucketDelete(d *schema.ResourceData, meta interface{}) error // Get the bucket bucket := d.Get("name").(string) - var listError, deleteObjectError error + var listError, deleteObjectError, deleteCacheError error for deleteObjectError == nil { res, err := config.NewStorageClient(userAgent).Objects.List(bucket).Versions(true).Do() if err != nil { @@ -1239,7 +1231,7 @@ func resourceStorageBucketDelete(d *schema.ResourceData, meta interface{}) error wp.Submit(func() { err = deleteAnywhereCacheIfAny(d, config) if err != nil { - deleteObjectError = fmt.Errorf("error deleting the caches on the bucket %s : %w", bucket, err) + deleteCacheError = fmt.Errorf("error deleting the caches on the bucket %s : %w", bucket, err) } }) @@ -1279,6 +1271,9 @@ func resourceStorageBucketDelete(d *schema.ResourceData, meta interface{}) error if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == 409 && strings.Contains(gerr.Message, "not empty") && deleteObjectError != nil { return fmt.Errorf("could not delete non-empty bucket due to error when deleting contents: %v", deleteObjectError) } + if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == 409 && strings.Contains(gerr.Message, "Anywhere Caches") && deleteCacheError != nil { + return fmt.Errorf("could not delete bucket due to error when deleting anywhere caches on it: %v", deleteCacheError) + } if err != nil { log.Printf("Error deleting bucket %s: %v", bucket, err) return err From e287e7e3a78d059be3f5a6012dcea005aff5c7e9 Mon Sep 17 00:00:00 2001 From: Dawid212 Date: Fri, 6 Jun 2025 23:23:51 +0200 Subject: [PATCH 327/884] Add headers, expectedOutputUrl, and expectedRedirectResponseCode fields to URL map (#14118) --- mmv1/products/compute/UrlMap.yaml | 52 ++++++++++++- .../url_map_test_expected_output_url.tf.tmpl | 52 +++++++++++++ .../examples/url_map_test_headers.tf.tmpl | 58 +++++++++++++++ ...rl_map_test_redirect_response_code.tf.tmpl | 73 +++++++++++++++++++ 4 files changed, 234 insertions(+), 1 deletion(-) create mode 100644 mmv1/templates/terraform/examples/url_map_test_expected_output_url.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/url_map_test_headers.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/url_map_test_redirect_response_code.tf.tmpl diff --git a/mmv1/products/compute/UrlMap.yaml b/mmv1/products/compute/UrlMap.yaml index 74c57fef9542..984121d7c50f 100644 --- a/mmv1/products/compute/UrlMap.yaml +++ b/mmv1/products/compute/UrlMap.yaml @@ -118,6 +118,24 @@ examples: home_backend_service_name: 'home' mirror_backend_service_name: 'mirror' health_check_name: 'health-check' + - name: 'url_map_test_headers' + primary_resource_id: 'urlmap' + vars: + url_map_name: 'urlmap' + backend_service_name: 'backend' + health_check_name: 'health-check' + - name: 'url_map_test_expected_output_url' + primary_resource_id: 'urlmap' + vars: + url_map_name: 'urlmap' + backend_service_name: 'backend' + health_check_name: 'health-check' + - name: 'url_map_test_redirect_response_code' + primary_resource_id: 'urlmap' + vars: + url_map_name: 'urlmap' + backend_service_name: 'backend' + health_check_name: 'health-check' - name: 'external_http_lb_mig_backend' primary_resource_id: 'default' vars: @@ -2427,13 +2445,45 @@ properties: description: | Path portion of the URL. required: true + - name: 'headers' + type: Array + description: | + HTTP headers for this request. + item_type: + type: NestedObject + properties: + - name: 'name' + type: String + description: | + Header name. + required: true + - name: 'value' + type: String + description: | + Header value. + required: true - name: 'service' type: ResourceRef description: The backend service or backend bucket link that should be matched by this test. - required: true custom_expand: 'templates/terraform/custom_expand/reference_to_backend.tmpl' resource: 'BackendService' imports: 'selfLink' + - name: 'expectedOutputUrl' + type: String + description: | + The expected output URL evaluated by the load balancer containing the scheme, host, path and query parameters. + + For rules that forward requests to backends, the test passes only when expectedOutputUrl matches the request forwarded by the load balancer to backends. For rules with urlRewrite, the test verifies that the forwarded request matches hostRewrite and pathPrefixRewrite in the urlRewrite action. When service is specified, expectedOutputUrl`s scheme is ignored. + + For rules with urlRedirect, the test passes only if expectedOutputUrl matches the URL in the load balancer's redirect response. If urlRedirect specifies httpsRedirect, the test passes only if the scheme in expectedOutputUrl is also set to HTTPS. If urlRedirect specifies stripQuery, the test passes only if expectedOutputUrl does not contain any query parameters. + + expectedOutputUrl is optional when service is specified. + - name: 'expectedRedirectResponseCode' + type: Integer + description: | + For rules with urlRedirect, the test passes only if expectedRedirectResponseCode matches the HTTP status code in load balancer's redirect response. + + expectedRedirectResponseCode cannot be set when service is set. - name: 'defaultUrlRedirect' type: NestedObject description: | diff --git a/mmv1/templates/terraform/examples/url_map_test_expected_output_url.tf.tmpl b/mmv1/templates/terraform/examples/url_map_test_expected_output_url.tf.tmpl new file mode 100644 index 000000000000..78eb119d47f9 --- /dev/null +++ b/mmv1/templates/terraform/examples/url_map_test_expected_output_url.tf.tmpl @@ -0,0 +1,52 @@ +resource "google_compute_health_check" "{{$.Vars.health_check_name}}" { + name = "{{index $.Vars "health_check_name"}}" + timeout_sec = 1 + check_interval_sec = 1 + + tcp_health_check { + port = "80" + } +} + +resource "google_compute_backend_service" "{{$.Vars.backend_service_name}}" { + name = "{{index $.Vars "backend_service_name"}}" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + + health_checks = [google_compute_health_check.{{$.Vars.health_check_name}}.id] +} + +resource "google_compute_url_map" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "url_map_name"}}" + description = "URL map with expected output URL tests" + default_service = google_compute_backend_service.{{$.Vars.backend_service_name}}.id + + test { + description = "Test with expected output URL" + host = "example.com" + path = "/" + service = google_compute_backend_service.{{$.Vars.backend_service_name}}.id + + headers { + name = "User-Agent" + value = "TestBot/1.0" + } + + expected_output_url = "http://example.com/" + } + + test { + description = "Test API routing with expected output URL" + host = "api.example.com" + path = "/v1/users" + service = google_compute_backend_service.{{$.Vars.backend_service_name}}.id + + headers { + name = "Authorization" + value = "Bearer token123" + } + + expected_output_url = "http://api.example.com/v1/users" + } +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/url_map_test_headers.tf.tmpl b/mmv1/templates/terraform/examples/url_map_test_headers.tf.tmpl new file mode 100644 index 000000000000..b672532762c5 --- /dev/null +++ b/mmv1/templates/terraform/examples/url_map_test_headers.tf.tmpl @@ -0,0 +1,58 @@ +resource "google_compute_health_check" "{{$.Vars.health_check_name}}" { + name = "{{index $.Vars "health_check_name"}}" + timeout_sec = 1 + check_interval_sec = 1 + + tcp_health_check { + port = "80" + } +} + +resource "google_compute_backend_service" "{{$.Vars.backend_service_name}}" { + name = "{{index $.Vars "backend_service_name"}}" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + + health_checks = [google_compute_health_check.{{$.Vars.health_check_name}}.id] +} + +resource "google_compute_url_map" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "url_map_name"}}" + description = "URL map with test headers" + default_service = google_compute_backend_service.{{$.Vars.backend_service_name}}.id + + test { + description = "Test with custom headers" + host = "example.com" + path = "/" + service = google_compute_backend_service.{{$.Vars.backend_service_name}}.id + + headers { + name = "User-Agent" + value = "TestBot/1.0" + } + + headers { + name = "X-Custom-Header" + value = "test-value" + } + } + + test { + description = "Test with authorization headers" + host = "api.example.com" + path = "/v1/test" + service = google_compute_backend_service.{{$.Vars.backend_service_name}}.id + + headers { + name = "Authorization" + value = "Bearer token123" + } + + headers { + name = "Content-Type" + value = "application/json" + } + } +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/url_map_test_redirect_response_code.tf.tmpl b/mmv1/templates/terraform/examples/url_map_test_redirect_response_code.tf.tmpl new file mode 100644 index 000000000000..24d9eb6a99c0 --- /dev/null +++ b/mmv1/templates/terraform/examples/url_map_test_redirect_response_code.tf.tmpl @@ -0,0 +1,73 @@ +resource "google_compute_health_check" "{{$.Vars.health_check_name}}" { + name = "{{index $.Vars "health_check_name"}}" + timeout_sec = 1 + check_interval_sec = 1 + + tcp_health_check { + port = "80" + } +} + +resource "google_compute_backend_service" "{{$.Vars.backend_service_name}}" { + name = "{{index $.Vars "backend_service_name"}}" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + + health_checks = [google_compute_health_check.{{$.Vars.health_check_name}}.id] +} + +resource "google_compute_url_map" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "url_map_name"}}" + description = "URL map with redirect response code tests" + default_service = google_compute_backend_service.{{$.Vars.backend_service_name}}.id + + host_rule { + hosts = ["example.com"] + path_matcher = "allpaths" + } + + path_matcher { + name = "allpaths" + default_service = google_compute_backend_service.{{$.Vars.backend_service_name}}.id + + path_rule { + paths = ["/redirect/*"] + url_redirect { + host_redirect = "newsite.com" + path_redirect = "/new-path/" + https_redirect = true + redirect_response_code = "MOVED_PERMANENTLY_DEFAULT" + strip_query = false + } + } + } + + test { + description = "Test redirect with expected response code" + host = "example.com" + path = "/redirect/old-page" + + headers { + name = "Referer" + value = "https://oldsite.com" + } + + expected_output_url = "https://newsite.com/new-path/" + expected_redirect_response_code = 301 + } + + test { + description = "Test another redirect scenario" + host = "example.com" + path = "/redirect/another-page" + + headers { + name = "User-Agent" + value = "TestBot/1.0" + } + + expected_output_url = "https://newsite.com/new-path/" + expected_redirect_response_code = 301 + } +} \ No newline at end of file From 02dc58ca5c7772fe4222c9333743d609188b61f5 Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Fri, 6 Jun 2025 14:44:27 -0700 Subject: [PATCH 328/884] fix mmv1 generation issue with PRODUCT specific (#14215) --- mmv1/main.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/main.go b/mmv1/main.go index 469fa990faba..4ca75098c8e6 100644 --- a/mmv1/main.go +++ b/mmv1/main.go @@ -286,6 +286,7 @@ func GenerateProduct(productName string, productsForVersionChannel chan *api.Pro productApi.Objects = resources productApi.Validate() + providerToGenerate := newProvider(*forceProvider, *version, productApi, startTime) productsForVersionChannel <- productApi if !slices.Contains(productsToGenerate, productName) { @@ -295,7 +296,6 @@ func GenerateProduct(productName string, productsForVersionChannel chan *api.Pro log.Printf("%s: Generating files", productName) - providerToGenerate := newProvider(*forceProvider, *version, productApi, startTime) providerToGenerate.Generate(*outputPath, productName, resourceToGenerate, generateCode, generateDocs) } From 2287e924c5ca5256dc45a37511353c4ed107f57c Mon Sep 17 00:00:00 2001 From: Ziting Date: Mon, 9 Jun 2025 10:43:19 -0700 Subject: [PATCH 329/884] Terraform should ignore "prodcurrent" and "prodprevious" when diff image subminor version for Dataproc clusters (#14216) --- .../services/dataproc/resource_dataproc_cluster.go | 9 ++++++--- .../dataproc/resource_dataproc_cluster_internal_test.go | 5 +++++ 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster.go b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster.go index 834f88fadc60..a7e2b57b25f1 100644 --- a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster.go +++ b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster.go @@ -5,6 +5,7 @@ import ( "fmt" "log" "regexp" + "slices" "strconv" "strings" "time" @@ -262,7 +263,7 @@ func ResourceDataprocCluster() *schema.Resource { Optional: true, Elem: &schema.Schema{Type: schema.TypeString}, Description: `The list of the labels (key/value pairs) configured on the resource and to be applied to instances in the cluster. - + **Note**: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.`, }, @@ -3442,8 +3443,10 @@ func dataprocImageVersionDiffSuppress(_, old, new string, _ *schema.ResourceData if newV.minor != oldV.minor { return false } - // Only compare subminor version if set in config version. - if newV.subminor != "" && newV.subminor != oldV.subminor { + + ignoreSubminor := []string{"", "prodcurrent", "prodprevious"} + // Only compare subminor version if set to a numeric value in config version. + if !slices.Contains(ignoreSubminor, newV.subminor) && newV.subminor != oldV.subminor { return false } // Only compare os if it is set in config version. diff --git a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_internal_test.go b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_internal_test.go index 4b3458e89571..fb5aa82c4808 100644 --- a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_internal_test.go +++ b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_internal_test.go @@ -93,6 +93,8 @@ func TestDataprocDiffSuppress(t *testing.T) { {"1.3.10-debian9", "1.3-debian9"}, {"1.3.10", "1.3"}, {"1.3-debian9", "1.3"}, + {"1.3.10-debian9", "1.3.prodprevious-debian9"}, + {"1.3.10-debian9", "1.3.prodcurrent-debian9"}, } noSuppress := [][]string{ @@ -106,6 +108,9 @@ func TestDataprocDiffSuppress(t *testing.T) { {"1.3", "1.3.10"}, {"1.3", "1.3.10-debian9"}, {"1.3", "1.3-debian9"}, + {"1.3.prodprevious-debian9", "1.3.10-debian9"}, + {"1.3.prodcurrent-debian9", "1.3.10-debian9"}, + {"1.3.10-debian9", "1.3.randomstring-debian9"}, } for _, tup := range doSuppress { From 47c5bf76534b05f25f690cc01ba3261b10be2ad1 Mon Sep 17 00:00:00 2001 From: rlapin-pl <114071972+rlapin-pl@users.noreply.github.com> Date: Mon, 9 Jun 2025 20:16:33 +0200 Subject: [PATCH 330/884] Fix service account issue (#14153) Co-authored-by: rlapin-pl --- .../resource_composer_environment_test.go | 1629 ++++++----------- .../docs/r/composer_environment.html.markdown | 2 + 2 files changed, 566 insertions(+), 1065 deletions(-) diff --git a/mmv1/third_party/terraform/services/composer/resource_composer_environment_test.go b/mmv1/third_party/terraform/services/composer/resource_composer_environment_test.go index 92113ecf2156..643d31d0224b 100644 --- a/mmv1/third_party/terraform/services/composer/resource_composer_environment_test.go +++ b/mmv1/third_party/terraform/services/composer/resource_composer_environment_test.go @@ -2,15 +2,15 @@ package composer_test import ( "fmt" + "log" + "regexp" + "strings" + "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" "github.com/hashicorp/terraform-provider-google/google/services/composer" tpgcompute "github.com/hashicorp/terraform-provider-google/google/services/compute" - "testing" - - "log" - "regexp" - "strings" "github.com/hashicorp/go-multierror" "github.com/hashicorp/terraform-plugin-testing/helper/resource" @@ -47,26 +47,22 @@ func bootstrapComposerServiceAgents(t *testing.T) { }) } -// Checks environment creation with minimum required information. -func TestAccComposerEnvironment_basic(t *testing.T) { +// Checks private environment creation for composer 2. +func TestAccComposerEnvironmentComposer2_private(t *testing.T) { t.Parallel() envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" + serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_basic(envName, network, subnetwork), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet("google_composer_environment.test", "config.0.airflow_uri"), - resource.TestCheckResourceAttrSet("google_composer_environment.test", "config.0.gke_cluster"), - resource.TestCheckResourceAttrSet("google_composer_environment.test", "config.0.node_count"), - resource.TestCheckResourceAttrSet("google_composer_environment.test", "config.0.node_config.0.zone"), - resource.TestCheckResourceAttrSet("google_composer_environment.test", "config.0.node_config.0.machine_type")), + Config: testAccComposerEnvironmentComposer2_private(envName, network, subnetwork, serviceAccount), }, { ResourceName: "google_composer_environment.test", @@ -85,20 +81,25 @@ func TestAccComposerEnvironment_basic(t *testing.T) { { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_basic(envName, network, subnetwork), + Config: testAccComposerEnvironmentComposer2_private(envName, network, subnetwork, serviceAccount), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -// Checks private environment creation for composer 1 and 2. -func TestAccComposerEnvironmentComposer1_private(t *testing.T) { +// Checks environment creation with minimum required information. +func TestAccComposerEnvironment_withEncryptionConfigComposer2(t *testing.T) { + acctest.SkipIfVcr(t) t.Parallel() + kms := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-bootstrap-composer2-key1") + pid := envvar.GetTestProjectFromEnv() + bootstrapComposerServiceAgents(t) envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" + serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -106,38 +107,33 @@ func TestAccComposerEnvironmentComposer1_private(t *testing.T) { CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironmentComposer1_private(envName, network, subnetwork), - }, - { - ResourceName: "google_composer_environment.test", - ImportState: true, - ImportStateVerify: true, + Config: testAccComposerEnvironment_encryptionCfg(pid, "2", "2", envName, kms.CryptoKey.Name, network, subnetwork, serviceAccount), }, { ResourceName: "google_composer_environment.test", ImportState: true, - ImportStateId: fmt.Sprintf("projects/%s/locations/%s/environments/%s", envvar.GetTestProjectFromEnv(), "us-central1", envName), ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO: Remove this check if firewall rules bug gets fixed by Composer. + // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironmentComposer1_private(envName, network, subnetwork), + Config: testAccComposerEnvironment_encryptionCfg(pid, "2", "2", envName, kms.CryptoKey.Name, network, subnetwork, serviceAccount), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironmentComposer2_private(t *testing.T) { +func TestAccComposerEnvironment_withMaintenanceWindow(t *testing.T) { t.Parallel() envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" + serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -145,39 +141,33 @@ func TestAccComposerEnvironmentComposer2_private(t *testing.T) { CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironmentComposer2_private(envName, network, subnetwork), + Config: testAccComposerEnvironment_maintenanceWindow(envName, network, subnetwork, serviceAccount), }, { ResourceName: "google_composer_environment.test", ImportState: true, ImportStateVerify: true, }, - { - ResourceName: "google_composer_environment.test", - ImportState: true, - ImportStateId: fmt.Sprintf("projects/%s/locations/%s/environments/%s", envvar.GetTestProjectFromEnv(), "us-central1", envName), - ImportStateVerify: true, - }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO: Remove this check if firewall rules bug gets fixed by Composer. + // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironmentComposer2_private(envName, network, subnetwork), + Config: testAccComposerEnvironment_maintenanceWindow(envName, network, subnetwork, serviceAccount), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -// Checks environment creation with minimum required information. -func TestAccComposerEnvironment_privateWithWebServerControl(t *testing.T) { +func TestAccComposerEnvironment_maintenanceWindowUpdate(t *testing.T) { t.Parallel() envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" + serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -185,25 +175,14 @@ func TestAccComposerEnvironment_privateWithWebServerControl(t *testing.T) { CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_privateWithWebServerControl(envName, network, subnetwork), - }, - { - ResourceName: "google_composer_environment.test", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccComposerEnvironment_privateWithWebServerControlUpdated(envName, network, subnetwork), + Config: testAccComposerEnvironment_maintenanceWindow(envName, network, subnetwork, serviceAccount), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, - ImportStateVerify: true, + Config: testAccComposerEnvironment_maintenanceWindowUpdate(envName, network, subnetwork, serviceAccount), }, { ResourceName: "google_composer_environment.test", ImportState: true, - ImportStateId: fmt.Sprintf("projects/%s/locations/%s/environments/%s", envvar.GetTestProjectFromEnv(), "us-central1", envName), ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, @@ -212,18 +191,20 @@ func TestAccComposerEnvironment_privateWithWebServerControl(t *testing.T) { { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_privateWithWebServerControlUpdated(envName, network, subnetwork), + Config: testAccComposerEnvironment_maintenanceWindowUpdate(envName, network, subnetwork, serviceAccount), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironment_withDatabaseConfig(t *testing.T) { +func TestAccComposerEnvironment_ComposerV2(t *testing.T) { t.Parallel() + envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" + serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -231,10 +212,7 @@ func TestAccComposerEnvironment_withDatabaseConfig(t *testing.T) { CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_databaseCfg(envName, network, subnetwork), - }, - { - Config: testAccComposerEnvironment_databaseCfgUpdated(envName, network, subnetwork), + Config: testAccComposerEnvironment_composerV2(envName, network, subnetwork, serviceAccount), }, { ResourceName: "google_composer_environment.test", @@ -243,26 +221,24 @@ func TestAccComposerEnvironment_withDatabaseConfig(t *testing.T) { }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO: Remove this check if firewall rules bug gets fixed by Composer. + // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_databaseCfgUpdated(envName, network, subnetwork), + Config: testAccComposerEnvironment_composerV2(envName, network, subnetwork, serviceAccount), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironment_withEncryptionConfigComposer1(t *testing.T) { +func TestAccComposerEnvironment_UpdateComposerV2ImageVersion(t *testing.T) { t.Parallel() - kms := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-bootstrap-composer1-key1") - pid := envvar.GetTestProjectFromEnv() - bootstrapComposerServiceAgents(t) envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" + serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -270,7 +246,10 @@ func TestAccComposerEnvironment_withEncryptionConfigComposer1(t *testing.T) { CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_encryptionCfg(pid, "1", "1", envName, kms.CryptoKey.Name, network, subnetwork), + Config: testAccComposerEnvironment_composerOldVersion(envName, network, subnetwork, serviceAccount), + }, + { + Config: testAccComposerEnvironment_composerNewVersion(envName, network, subnetwork, serviceAccount), }, { ResourceName: "google_composer_environment.test", @@ -283,23 +262,20 @@ func TestAccComposerEnvironment_withEncryptionConfigComposer1(t *testing.T) { { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_encryptionCfg(pid, "1", "1", envName, kms.CryptoKey.Name, network, subnetwork), + Config: testAccComposerEnvironment_composerNewVersion(envName, network, subnetwork, serviceAccount), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironment_withEncryptionConfigComposer2(t *testing.T) { - acctest.SkipIfVcr(t) +func TestAccComposerEnvironment_UpdateComposerV2ResilienceMode(t *testing.T) { t.Parallel() - kms := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-bootstrap-composer2-key1") - pid := envvar.GetTestProjectFromEnv() - bootstrapComposerServiceAgents(t) envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" + serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -307,7 +283,10 @@ func TestAccComposerEnvironment_withEncryptionConfigComposer2(t *testing.T) { CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_encryptionCfg(pid, "2", "2", envName, kms.CryptoKey.Name, network, subnetwork), + Config: testAccComposerEnvironment_composerV2HighResilience(envName, network, subnetwork, serviceAccount), + }, + { + Config: testAccComposerEnvironment_updateComposerV2StandardResilience(envName, network, subnetwork, serviceAccount), }, { ResourceName: "google_composer_environment.test", @@ -320,19 +299,20 @@ func TestAccComposerEnvironment_withEncryptionConfigComposer2(t *testing.T) { { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_encryptionCfg(pid, "2", "2", envName, kms.CryptoKey.Name, network, subnetwork), + Config: testAccComposerEnvironment_updateComposerV2StandardResilience(envName, network, subnetwork, serviceAccount), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironment_withMaintenanceWindow(t *testing.T) { +func TestAccComposerEnvironment_ComposerV2HighResilience(t *testing.T) { t.Parallel() envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" + serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -340,7 +320,7 @@ func TestAccComposerEnvironment_withMaintenanceWindow(t *testing.T) { CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_maintenanceWindow(envName, network, subnetwork), + Config: testAccComposerEnvironment_composerV2HighResilience(envName, network, subnetwork, serviceAccount), }, { ResourceName: "google_composer_environment.test", @@ -353,19 +333,20 @@ func TestAccComposerEnvironment_withMaintenanceWindow(t *testing.T) { { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_maintenanceWindow(envName, network, subnetwork), + Config: testAccComposerEnvironment_composerV2HighResilience(envName, network, subnetwork, serviceAccount), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironment_maintenanceWindowUpdate(t *testing.T) { +func TestAccComposerEnvironment_UpdateComposerV2WithTriggerer(t *testing.T) { t.Parallel() envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" + serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -373,10 +354,10 @@ func TestAccComposerEnvironment_maintenanceWindowUpdate(t *testing.T) { CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_maintenanceWindow(envName, network, subnetwork), + Config: testAccComposerEnvironment_composerV2(envName, network, subnetwork, serviceAccount), }, { - Config: testAccComposerEnvironment_maintenanceWindowUpdate(envName, network, subnetwork), + Config: testAccComposerEnvironment_composerV2WithDisabledTriggerer(envName, network, subnetwork, serviceAccount), }, { ResourceName: "google_composer_environment.test", @@ -385,18 +366,18 @@ func TestAccComposerEnvironment_maintenanceWindowUpdate(t *testing.T) { }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO: Remove this check if firewall rules bug gets fixed by Composer. + // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_maintenanceWindowUpdate(envName, network, subnetwork), + Config: testAccComposerEnvironment_composerV2WithDisabledTriggerer(envName, network, subnetwork, serviceAccount), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironment_ComposerV2(t *testing.T) { +func TestAccComposerEnvironment_UpdateComposerV2(t *testing.T) { t.Parallel() envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) @@ -412,6 +393,9 @@ func TestAccComposerEnvironment_ComposerV2(t *testing.T) { { Config: testAccComposerEnvironment_composerV2(envName, network, subnetwork, serviceAccount), }, + { + Config: testAccComposerEnvironment_updateComposerV2(envName, network, subnetwork, serviceAccount), + }, { ResourceName: "google_composer_environment.test", ImportState: true, @@ -423,14 +407,14 @@ func TestAccComposerEnvironment_ComposerV2(t *testing.T) { { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_composerV2(envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironment_updateComposerV2(envName, network, subnetwork, serviceAccount), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironment_UpdateComposerV2ImageVersion(t *testing.T) { +func TestAccComposerEnvironment_composerV2PrivateServiceConnect(t *testing.T) { t.Parallel() envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) @@ -444,10 +428,7 @@ func TestAccComposerEnvironment_UpdateComposerV2ImageVersion(t *testing.T) { CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_composerOldVersion(envName, network, subnetwork, serviceAccount), - }, - { - Config: testAccComposerEnvironment_composerNewVersion(envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironment_composerV2PrivateServiceConnect(envName, network, subnetwork, serviceAccount), }, { ResourceName: "google_composer_environment.test", @@ -460,14 +441,14 @@ func TestAccComposerEnvironment_UpdateComposerV2ImageVersion(t *testing.T) { { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_composerNewVersion(envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironment_composerV2PrivateServiceConnect(envName, network, subnetwork, serviceAccount), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironment_UpdateComposerV2ResilienceMode(t *testing.T) { +func TestAccComposerEnvironment_composerV2MasterAuthNetworks(t *testing.T) { t.Parallel() envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) @@ -481,10 +462,7 @@ func TestAccComposerEnvironment_UpdateComposerV2ResilienceMode(t *testing.T) { CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_composerV2HighResilience(envName, network, subnetwork, serviceAccount), - }, - { - Config: testAccComposerEnvironment_updateComposerV2StandardResilience(envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironment_MasterAuthNetworks("2", "2", envName, network, subnetwork, serviceAccount), }, { ResourceName: "google_composer_environment.test", @@ -497,14 +475,14 @@ func TestAccComposerEnvironment_UpdateComposerV2ResilienceMode(t *testing.T) { { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_updateComposerV2StandardResilience(envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironment_MasterAuthNetworks("2", "2", envName, network, subnetwork, serviceAccount), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironment_ComposerV2HighResilience(t *testing.T) { +func TestAccComposerEnvironment_composerV2MasterAuthNetworksUpdate(t *testing.T) { t.Parallel() envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) @@ -518,7 +496,10 @@ func TestAccComposerEnvironment_ComposerV2HighResilience(t *testing.T) { CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_composerV2HighResilience(envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironment_MasterAuthNetworks("2", "2", envName, network, subnetwork, serviceAccount), + }, + { + Config: testAccComposerEnvironment_MasterAuthNetworksUpdate("2", "2", envName, network, subnetwork, serviceAccount), }, { ResourceName: "google_composer_environment.test", @@ -531,14 +512,14 @@ func TestAccComposerEnvironment_ComposerV2HighResilience(t *testing.T) { { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_composerV2HighResilience(envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironment_MasterAuthNetworksUpdate("2", "2", envName, network, subnetwork, serviceAccount), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironment_UpdateComposerV2WithTriggerer(t *testing.T) { +func TestAccComposer2Environment_withNodeConfig(t *testing.T) { t.Parallel() envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) @@ -552,10 +533,7 @@ func TestAccComposerEnvironment_UpdateComposerV2WithTriggerer(t *testing.T) { CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_composerV2(envName, network, subnetwork, serviceAccount), - }, - { - Config: testAccComposerEnvironment_composerV2WithDisabledTriggerer(envName, network, subnetwork, serviceAccount), + Config: testAccComposer2Environment_nodeCfg(envName, network, subnetwork, serviceAccount), }, { ResourceName: "google_composer_environment.test", @@ -564,20 +542,19 @@ func TestAccComposerEnvironment_UpdateComposerV2WithTriggerer(t *testing.T) { }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. + // TODO: Remove this check if firewall rules bug gets fixed by Composer. { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_composerV2WithDisabledTriggerer(envName, network, subnetwork, serviceAccount), + Config: testAccComposer2Environment_nodeCfg(envName, network, subnetwork, serviceAccount), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironment_UpdateComposerV2(t *testing.T) { +func TestAccComposerEnvironmentAirflow2_withRecoveryConfig(t *testing.T) { t.Parallel() - envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" @@ -589,10 +566,15 @@ func TestAccComposerEnvironment_UpdateComposerV2(t *testing.T) { CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_composerV2(envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironment_airflow2RecoveryCfg(envName, network, subnetwork, serviceAccount), }, { - Config: testAccComposerEnvironment_updateComposerV2(envName, network, subnetwork, serviceAccount), + ResourceName: "google_composer_environment.test", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccComposerEnvironmentUpdate_airflow2RecoveryCfg(envName, network, subnetwork, serviceAccount), }, { ResourceName: "google_composer_environment.test", @@ -601,52 +583,82 @@ func TestAccComposerEnvironment_UpdateComposerV2(t *testing.T) { }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. + // TODO: Remove this check if firewall rules bug gets fixed by Composer. { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_updateComposerV2(envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironmentUpdate_airflow2RecoveryCfg(envName, network, subnetwork, serviceAccount), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironment_composerV2PrivateServiceConnect(t *testing.T) { +// Checks behavior of config for creation for attributes that must +// be updated during create. +func TestAccComposerEnvironment_fixPyPiPackages(t *testing.T) { t.Parallel() envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" + serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_composerV2PrivateServiceConnect(envName, network, subnetwork), + Config: testAccComposerEnvironment_fixPyPiPackages(envName, network, subnetwork, serviceAccount), + ExpectError: regexp.MustCompile("Failed to install Python packages"), + }, + { + Config: testAccComposerEnvironment_fixPyPiPackagesUpdate(envName, network, subnetwork, serviceAccount), }, { ResourceName: "google_composer_environment.test", ImportState: true, ImportStateVerify: true, }, - // This is a terrible clean-up step in order to get destroy to succeed, - // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. - { - PlanOnly: true, - ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_composerV2PrivateServiceConnect(envName, network, subnetwork), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), - }, }, }) } -func TestAccComposerEnvironment_composerV1MasterAuthNetworks(t *testing.T) { +func testAccComposerEnvironmentDestroyProducer(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + config := acctest.GoogleProviderConfig(t) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_composer_environment" { + continue + } + + idTokens := strings.Split(rs.Primary.ID, "/") + if len(idTokens) != 6 { + return fmt.Errorf("Invalid ID %q, expected format projects/{project}/regions/{region}/environments/{environment}", rs.Primary.ID) + } + envName := &composer.ComposerEnvironmentName{ + Project: idTokens[1], + Region: idTokens[3], + Environment: idTokens[5], + } + + _, err := config.NewComposerClient(config.UserAgent).Projects.Locations.Environments.Get(envName.ResourceName()).Do() + if err == nil { + return fmt.Errorf("environment %s still exists", envName.ResourceName()) + } + } + + return nil + } +} + +// Checks environment creation with custom bucket +func TestAccComposerEnvironment_customBucket(t *testing.T) { t.Parallel() + bucketName := fmt.Sprintf("%s-%d", testComposerBucketPrefix, acctest.RandInt(t)) envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" @@ -656,7 +668,7 @@ func TestAccComposerEnvironment_composerV1MasterAuthNetworks(t *testing.T) { CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_MasterAuthNetworks("1", "1", envName, network, subnetwork), + Config: testAccComposerEnvironment_customBucket(bucketName, envName, network, subnetwork), }, { ResourceName: "google_composer_environment.test", @@ -665,30 +677,33 @@ func TestAccComposerEnvironment_composerV1MasterAuthNetworks(t *testing.T) { }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. + // TODO: Remove this check if firewall rules bug gets fixed by Composer. { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_MasterAuthNetworks("1", "1", envName, network, subnetwork), + Config: testAccComposerEnvironment_customBucket(bucketName, envName, network, subnetwork), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironment_composerV2MasterAuthNetworks(t *testing.T) { +func TestAccComposerEnvironment_customBucketWithUrl(t *testing.T) { t.Parallel() + bucketName := fmt.Sprintf("%s-%d", testComposerBucketPrefix, acctest.RandInt(t)) envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" + serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_MasterAuthNetworks("2", "2", envName, network, subnetwork), + Config: testAccComposerEnvironment_customBucketWithUrl(bucketName, envName, network, subnetwork, serviceAccount), }, { ResourceName: "google_composer_environment.test", @@ -697,33 +712,33 @@ func TestAccComposerEnvironment_composerV2MasterAuthNetworks(t *testing.T) { }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. + // TODO: Remove this check if firewall rules bug gets fixed by Composer. { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_MasterAuthNetworks("2", "2", envName, network, subnetwork), + Config: testAccComposerEnvironment_customBucketWithUrl(bucketName, envName, network, subnetwork, serviceAccount), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironment_composerV1MasterAuthNetworksUpdate(t *testing.T) { +// Checks Composer 3 environment creation with new fields. +func TestAccComposerEnvironmentComposer3_basic(t *testing.T) { t.Parallel() envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" + serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_MasterAuthNetworks("1", "1", envName, network, subnetwork), - }, - { - Config: testAccComposerEnvironment_MasterAuthNetworksUpdate("1", "1", envName, network, subnetwork), + Config: testAccComposerEnvironmentComposer3_basic(envName, network, subnetwork, serviceAccount), }, { ResourceName: "google_composer_environment.test", @@ -732,33 +747,36 @@ func TestAccComposerEnvironment_composerV1MasterAuthNetworksUpdate(t *testing.T) }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. + // TODO: Remove this check if firewall rules bug gets fixed by Composer. { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_MasterAuthNetworksUpdate("1", "1", envName, network, subnetwork), + Config: testAccComposerEnvironmentComposer3_basic(envName, network, subnetwork, serviceAccount), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironment_composerV2MasterAuthNetworksUpdate(t *testing.T) { +// Checks Composer 3 specific updatable fields. +func TestAccComposerEnvironmentComposer3_update(t *testing.T) { t.Parallel() envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" + serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_MasterAuthNetworks("2", "2", envName, network, subnetwork), + Config: testAccComposerEnvironmentComposer3_basic(envName, network, subnetwork, serviceAccount), }, { - Config: testAccComposerEnvironment_MasterAuthNetworksUpdate("2", "2", envName, network, subnetwork), + Config: testAccComposerEnvironmentComposer3_update(envName, network, subnetwork, serviceAccount), }, { ResourceName: "google_composer_environment.test", @@ -767,23 +785,24 @@ func TestAccComposerEnvironment_composerV2MasterAuthNetworksUpdate(t *testing.T) }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. + // TODO: Remove this check if firewall rules bug gets fixed by Composer. { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_MasterAuthNetworksUpdate("2", "2", envName, network, subnetwork), + Config: testAccComposerEnvironmentComposer3_update(envName, network, subnetwork, serviceAccount), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposer2Environment_withNodeConfig(t *testing.T) { +func TestAccComposerEnvironmentComposer3_withNetworkSubnetworkAndAttachment_expectError(t *testing.T) { t.Parallel() envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" + networkAttachment := fmt.Sprintf("%s-%d", testComposerNetworkAttachmentPrefix, acctest.RandInt(t)) serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ @@ -792,31 +811,29 @@ func TestAccComposer2Environment_withNodeConfig(t *testing.T) { CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposer2Environment_nodeCfg(envName, network, subnetwork, serviceAccount), - }, - { - ResourceName: "google_composer_environment.test", - ImportState: true, - ImportStateVerify: true, + Config: testAccComposerEnvironmentComposer3_withNetworkSubnetworkAndAttachment_expectError(envName, networkAttachment, network, subnetwork, serviceAccount), + ExpectError: regexp.MustCompile("Conflicting configuration arguments"), }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO: Remove this check if firewall rules bug gets fixed by Composer. { PlanOnly: true, - ExpectNonEmptyPlan: false, - Config: testAccComposer2Environment_nodeCfg(envName, network, subnetwork, serviceAccount), + ExpectNonEmptyPlan: true, + Config: testAccComposerEnvironmentComposer3_basic(envName, network, subnetwork, serviceAccount), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironmentAirflow2_withRecoveryConfig(t *testing.T) { +func TestAccComposerEnvironmentComposer3_databaseRetention(t *testing.T) { t.Parallel() + envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" + serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -824,15 +841,7 @@ func TestAccComposerEnvironmentAirflow2_withRecoveryConfig(t *testing.T) { CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_airflow2RecoveryCfg(envName, network, subnetwork), - }, - { - ResourceName: "google_composer_environment.test", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccComposerEnvironmentUpdate_airflow2RecoveryCfg(envName, network, subnetwork), + Config: testAccComposerEnvironmentComposer3_databaseRetention(envName, network, subnetwork, serviceAccount), }, { ResourceName: "google_composer_environment.test", @@ -845,18 +854,22 @@ func TestAccComposerEnvironmentAirflow2_withRecoveryConfig(t *testing.T) { { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironmentUpdate_airflow2RecoveryCfg(envName, network, subnetwork), + Config: testAccComposerEnvironmentComposer3_databaseRetention(envName, network, subnetwork, serviceAccount), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironment_withSoftwareConfig(t *testing.T) { +func TestAccComposerEnvironmentComposer3_withNetworkAttachment(t *testing.T) { t.Parallel() + envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" + networkAttachment := fmt.Sprintf("%s-%d", testComposerNetworkAttachmentPrefix, acctest.RandInt(t)) + fullFormNetworkAttachmentName := fmt.Sprintf("projects/%s/regions/%s/networkAttachments/%s", envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), networkAttachment) + serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -864,7 +877,7 @@ func TestAccComposerEnvironment_withSoftwareConfig(t *testing.T) { CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_softwareCfg(envName, network, subnetwork), + Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, networkAttachment, network, subnetwork, serviceAccount), }, { ResourceName: "google_composer_environment.test", @@ -876,378 +889,9 @@ func TestAccComposerEnvironment_withSoftwareConfig(t *testing.T) { // TODO: Remove this check if firewall rules bug gets fixed by Composer. { PlanOnly: true, - ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_softwareCfg(envName, network, subnetwork), + Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, fullFormNetworkAttachmentName, network, subnetwork, serviceAccount), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), - }, - }, - }) -} - -func TestAccComposerEnvironmentAirflow2_withSoftwareConfig(t *testing.T) { - t.Parallel() - envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) - network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) - subnetwork := network + "-1" - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComposerEnvironment_airflow2SoftwareCfg(envName, network, subnetwork), - }, - { - ResourceName: "google_composer_environment.test", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccComposerEnvironmentUpdate_airflow2SoftwareCfg(envName, network, subnetwork), - }, - { - ResourceName: "google_composer_environment.test", - ImportState: true, - ImportStateVerify: true, - }, - // This is a terrible clean-up step in order to get destroy to succeed, - // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO: Remove this check if firewall rules bug gets fixed by Composer. - { - PlanOnly: true, - ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironmentUpdate_airflow2SoftwareCfg(envName, network, subnetwork), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), - }, - }, - }) -} - -// Checks behavior of config for creation for attributes that must -// be updated during create. -func TestAccComposerEnvironment_withUpdateOnCreate(t *testing.T) { - t.Parallel() - - envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) - network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) - subnetwork := network + "-1" - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComposerEnvironment_updateOnlyFields(envName, network, subnetwork), - }, - { - ResourceName: "google_composer_environment.test", - ImportState: true, - ImportStateVerify: true, - }, - // This is a terrible clean-up step in order to get destroy to succeed, - // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO: Remove this check if firewall rules bug gets fixed by Composer. - { - PlanOnly: true, - ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_updateOnlyFields(envName, network, subnetwork), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), - }, - }, - }) -} - -func TestAccComposerEnvironment_fixPyPiPackages(t *testing.T) { - t.Parallel() - - envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) - network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) - subnetwork := network + "-1" - serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComposerEnvironment_fixPyPiPackages(envName, network, subnetwork, serviceAccount), - ExpectError: regexp.MustCompile("Failed to install Python packages"), - }, - { - Config: testAccComposerEnvironment_fixPyPiPackagesUpdate(envName, network, subnetwork, serviceAccount), - }, - { - ResourceName: "google_composer_environment.test", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func testAccComposerEnvironmentDestroyProducer(t *testing.T) func(s *terraform.State) error { - return func(s *terraform.State) error { - config := acctest.GoogleProviderConfig(t) - - for _, rs := range s.RootModule().Resources { - if rs.Type != "google_composer_environment" { - continue - } - - idTokens := strings.Split(rs.Primary.ID, "/") - if len(idTokens) != 6 { - return fmt.Errorf("Invalid ID %q, expected format projects/{project}/regions/{region}/environments/{environment}", rs.Primary.ID) - } - envName := &composer.ComposerEnvironmentName{ - Project: idTokens[1], - Region: idTokens[3], - Environment: idTokens[5], - } - - _, err := config.NewComposerClient(config.UserAgent).Projects.Locations.Environments.Get(envName.ResourceName()).Do() - if err == nil { - return fmt.Errorf("environment %s still exists", envName.ResourceName()) - } - } - - return nil - } -} - -// Checks environment creation with custom bucket -func TestAccComposerEnvironment_customBucket(t *testing.T) { - t.Parallel() - - bucketName := fmt.Sprintf("%s-%d", testComposerBucketPrefix, acctest.RandInt(t)) - envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) - network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) - subnetwork := network + "-1" - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComposerEnvironment_customBucket(bucketName, envName, network, subnetwork), - }, - { - ResourceName: "google_composer_environment.test", - ImportState: true, - ImportStateVerify: true, - }, - // This is a terrible clean-up step in order to get destroy to succeed, - // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO: Remove this check if firewall rules bug gets fixed by Composer. - { - PlanOnly: true, - ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_customBucket(bucketName, envName, network, subnetwork), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), - }, - }, - }) -} - -func TestAccComposerEnvironment_customBucketWithUrl(t *testing.T) { - t.Parallel() - - bucketName := fmt.Sprintf("%s-%d", testComposerBucketPrefix, acctest.RandInt(t)) - envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) - network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) - subnetwork := network + "-1" - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComposerEnvironment_customBucketWithUrl(bucketName, envName, network, subnetwork), - }, - { - ResourceName: "google_composer_environment.test", - ImportState: true, - ImportStateVerify: true, - }, - // This is a terrible clean-up step in order to get destroy to succeed, - // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO: Remove this check if firewall rules bug gets fixed by Composer. - { - PlanOnly: true, - ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_customBucketWithUrl(bucketName, envName, network, subnetwork), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), - }, - }, - }) -} - -// Checks Composer 3 environment creation with new fields. -func TestAccComposerEnvironmentComposer3_basic(t *testing.T) { - t.Parallel() - - envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) - network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) - subnetwork := network + "-1" - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComposerEnvironmentComposer3_basic(envName, network, subnetwork), - }, - { - ResourceName: "google_composer_environment.test", - ImportState: true, - ImportStateVerify: true, - }, - // This is a terrible clean-up step in order to get destroy to succeed, - // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO: Remove this check if firewall rules bug gets fixed by Composer. - { - PlanOnly: true, - ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironmentComposer3_basic(envName, network, subnetwork), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), - }, - }, - }) -} - -// Checks Composer 3 specific updatable fields. -func TestAccComposerEnvironmentComposer3_update(t *testing.T) { - t.Parallel() - - envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) - network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) - subnetwork := network + "-1" - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComposerEnvironmentComposer3_basic(envName, network, subnetwork), - }, - { - Config: testAccComposerEnvironmentComposer3_update(envName, network, subnetwork), - }, - { - ResourceName: "google_composer_environment.test", - ImportState: true, - ImportStateVerify: true, - }, - // This is a terrible clean-up step in order to get destroy to succeed, - // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO: Remove this check if firewall rules bug gets fixed by Composer. - { - PlanOnly: true, - ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironmentComposer3_update(envName, network, subnetwork), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), - }, - }, - }) -} - -func TestAccComposerEnvironmentComposer3_withNetworkSubnetworkAndAttachment_expectError(t *testing.T) { - t.Parallel() - - envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) - network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) - subnetwork := network + "-1" - networkAttachment := fmt.Sprintf("%s-%d", testComposerNetworkAttachmentPrefix, acctest.RandInt(t)) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComposerEnvironmentComposer3_withNetworkSubnetworkAndAttachment_expectError(envName, networkAttachment, network, subnetwork), - ExpectError: regexp.MustCompile("Conflicting configuration arguments"), - }, - // This is a terrible clean-up step in order to get destroy to succeed, - // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO: Remove this check if firewall rules bug gets fixed by Composer. - { - PlanOnly: true, - ExpectNonEmptyPlan: true, - Config: testAccComposerEnvironmentComposer3_basic(envName, network, subnetwork), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), - }, - }, - }) -} - -func TestAccComposerEnvironmentComposer3_databaseRetention(t *testing.T) { - t.Parallel() - - envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) - network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) - subnetwork := network + "-1" - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComposerEnvironmentComposer3_databaseRetention(envName, network, subnetwork), - }, - { - ResourceName: "google_composer_environment.test", - ImportState: true, - ImportStateVerify: true, - }, - // This is a terrible clean-up step in order to get destroy to succeed, - // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO: Remove this check if firewall rules bug gets fixed by Composer. - { - PlanOnly: true, - ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironmentComposer3_databaseRetention(envName, network, subnetwork), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), - }, - }, - }) -} - -func TestAccComposerEnvironmentComposer3_withNetworkAttachment(t *testing.T) { - t.Parallel() - - envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) - network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) - subnetwork := network + "-1" - networkAttachment := fmt.Sprintf("%s-%d", testComposerNetworkAttachmentPrefix, acctest.RandInt(t)) - fullFormNetworkAttachmentName := fmt.Sprintf("projects/%s/regions/%s/networkAttachments/%s", envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), networkAttachment) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, networkAttachment, network, subnetwork), - }, - { - ResourceName: "google_composer_environment.test", - ImportState: true, - ImportStateVerify: true, - }, - // This is a terrible clean-up step in order to get destroy to succeed, - // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO: Remove this check if firewall rules bug gets fixed by Composer. - { - PlanOnly: true, - Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, fullFormNetworkAttachmentName, network, subnetwork), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), - ExpectNonEmptyPlan: true, + ExpectNonEmptyPlan: true, }, }, }) @@ -1261,6 +905,7 @@ func TestAccComposerEnvironmentComposer3_updateWithNetworkAttachment(t *testing. subnetwork := network + "-1" networkAttachment := fmt.Sprintf("%s-%d", testComposerNetworkAttachmentPrefix, acctest.RandInt(t)) fullFormNetworkAttachmentName := fmt.Sprintf("projects/%s/regions/%s/networkAttachments/%s", envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), networkAttachment) + serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -1268,10 +913,10 @@ func TestAccComposerEnvironmentComposer3_updateWithNetworkAttachment(t *testing. CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironmentComposer3_withNetworkAndSubnetwork(envName, networkAttachment, network, subnetwork), + Config: testAccComposerEnvironmentComposer3_withNetworkAndSubnetwork(envName, networkAttachment, network, subnetwork, serviceAccount), }, { - Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, networkAttachment, network, subnetwork), + Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, networkAttachment, network, subnetwork, serviceAccount), }, { ResourceName: "google_composer_environment.test", @@ -1283,7 +928,7 @@ func TestAccComposerEnvironmentComposer3_updateWithNetworkAttachment(t *testing. // TODO: Remove this check if firewall rules bug gets fixed by Composer. { PlanOnly: true, - Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, fullFormNetworkAttachmentName, network, subnetwork), + Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, fullFormNetworkAttachmentName, network, subnetwork, serviceAccount), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), ExpectNonEmptyPlan: true, }, @@ -1299,6 +944,7 @@ func TestAccComposerEnvironmentComposer3_updateWithNetworkAndSubnetwork(t *testi subnetwork := network + "-1" networkAttachment := fmt.Sprintf("%s-%d", testComposerNetworkAttachmentPrefix, acctest.RandInt(t)) fullFormNetworkAttachmentName := fmt.Sprintf("projects/%s/regions/%s/networkAttachments/%s", envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), networkAttachment) + serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -1306,10 +952,10 @@ func TestAccComposerEnvironmentComposer3_updateWithNetworkAndSubnetwork(t *testi CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, networkAttachment, network, subnetwork), + Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, networkAttachment, network, subnetwork, serviceAccount), }, { - Config: testAccComposerEnvironmentComposer3_withNetworkAndSubnetwork(envName, networkAttachment, network, subnetwork), + Config: testAccComposerEnvironmentComposer3_withNetworkAndSubnetwork(envName, networkAttachment, network, subnetwork, serviceAccount), }, { ResourceName: "google_composer_environment.test", @@ -1321,7 +967,7 @@ func TestAccComposerEnvironmentComposer3_updateWithNetworkAndSubnetwork(t *testi // TODO: Remove this check if firewall rules bug gets fixed by Composer. { PlanOnly: true, - Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, fullFormNetworkAttachmentName, network, subnetwork), + Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, fullFormNetworkAttachmentName, network, subnetwork, serviceAccount), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), ExpectNonEmptyPlan: true, }, @@ -1336,6 +982,7 @@ func TestAccComposerEnvironmentComposer3_updateToEmpty(t *testing.T) { envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" + serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -1343,10 +990,10 @@ func TestAccComposerEnvironmentComposer3_updateToEmpty(t *testing.T) { CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironmentComposer3_basic(envName, network, subnetwork), + Config: testAccComposerEnvironmentComposer3_basic(envName, network, subnetwork, serviceAccount), }, { - Config: testAccComposerEnvironmentComposer3_empty(envName, network, subnetwork), + Config: testAccComposerEnvironmentComposer3_empty(envName, network, subnetwork, serviceAccount), }, { ResourceName: "google_composer_environment.test", @@ -1359,7 +1006,7 @@ func TestAccComposerEnvironmentComposer3_updateToEmpty(t *testing.T) { { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironmentComposer3_empty(envName, network, subnetwork), + Config: testAccComposerEnvironmentComposer3_empty(envName, network, subnetwork, serviceAccount), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, @@ -1373,6 +1020,7 @@ func TestAccComposerEnvironmentComposer3_updateFromEmpty(t *testing.T) { envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" + serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -1380,10 +1028,10 @@ func TestAccComposerEnvironmentComposer3_updateFromEmpty(t *testing.T) { CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironmentComposer3_empty(envName, network, subnetwork), + Config: testAccComposerEnvironmentComposer3_empty(envName, network, subnetwork, serviceAccount), }, { - Config: testAccComposerEnvironmentComposer3_update(envName, network, subnetwork), + Config: testAccComposerEnvironmentComposer3_update(envName, network, subnetwork, serviceAccount), }, { ResourceName: "google_composer_environment.test", @@ -1396,7 +1044,7 @@ func TestAccComposerEnvironmentComposer3_updateFromEmpty(t *testing.T) { { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironmentComposer3_update(envName, network, subnetwork), + Config: testAccComposerEnvironmentComposer3_update(envName, network, subnetwork, serviceAccount), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, @@ -1410,6 +1058,7 @@ func TestAccComposerEnvironmentComposer3_upgrade_expectError(t *testing.T) { network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" errorRegExp, _ := regexp.Compile(".*upgrade to composer 3 is not yet supported.*") + serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -1417,10 +1066,10 @@ func TestAccComposerEnvironmentComposer3_upgrade_expectError(t *testing.T) { CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironmentComposer2_empty(envName, network, subnetwork), + Config: testAccComposerEnvironmentComposer2_empty(envName, network, subnetwork, serviceAccount), }, { - Config: testAccComposerEnvironmentComposer3_empty(envName, network, subnetwork), + Config: testAccComposerEnvironmentComposer3_empty(envName, network, subnetwork, serviceAccount), ExpectError: errorRegExp, }, // This is a terrible clean-up step in order to get destroy to succeed, @@ -1429,7 +1078,7 @@ func TestAccComposerEnvironmentComposer3_upgrade_expectError(t *testing.T) { { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironmentComposer2_empty(envName, network, subnetwork), + Config: testAccComposerEnvironmentComposer2_empty(envName, network, subnetwork, serviceAccount), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, @@ -1518,8 +1167,20 @@ resource "google_compute_subnetwork" "test" { `, bucketName, envName, network, subnetwork) } -func testAccComposerEnvironment_customBucketWithUrl(bucketName, envName, network, subnetwork string) string { +func testAccComposerEnvironment_customBucketWithUrl(bucketName, envName, network, subnetwork, serviceAccount string) string { return fmt.Sprintf(` +data "google_project" "project" {} + +resource "google_service_account" "test" { + account_id = "%s" + display_name = "Test Service Account for Composer Environment" +} +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} + resource "google_storage_bucket" "test" { name = "%s" location = "us-central1" @@ -1533,6 +1194,7 @@ resource "google_composer_environment" "test" { node_config { network = google_compute_network.test.self_link subnetwork = google_compute_subnetwork.test.self_link + service_account = google_service_account.test.name } software_config { image_version = "composer-2-airflow-2" @@ -1541,6 +1203,7 @@ resource "google_composer_environment" "test" { storage_config { bucket = google_storage_bucket.test.url } + depends_on = [google_project_iam_member.composer-worker] } // use a separate network to avoid conflicts with other tests running in parallel @@ -1556,93 +1219,23 @@ resource "google_compute_subnetwork" "test" { region = "us-central1" network = google_compute_network.test.self_link } -`, bucketName, envName, network, subnetwork) -} - -func testAccComposerEnvironment_basic(name, network, subnetwork string) string { - return fmt.Sprintf(` -resource "google_composer_environment" "test" { - name = "%s" - region = "us-central1" - config { - node_config { - network = google_compute_network.test.self_link - subnetwork = google_compute_subnetwork.test.self_link - zone = "us-central1-a" - machine_type = "n1-standard-1" - ip_allocation_policy { - use_ip_aliases = true - cluster_ipv4_cidr_block = "10.0.0.0/16" - } - } - software_config { - image_version = "composer-1-airflow-2.3" - } - } -} - -// use a separate network to avoid conflicts with other tests running in parallel -// that use the default network/subnet -resource "google_compute_network" "test" { - name = "%s" - auto_create_subnetworks = false -} - -resource "google_compute_subnetwork" "test" { - name = "%s" - ip_cidr_range = "10.2.0.0/16" - region = "us-central1" - network = google_compute_network.test.self_link -} -`, name, network, subnetwork) +`, serviceAccount, bucketName, envName, network, subnetwork) } -func testAccComposerEnvironmentComposer1_private(name, network, subnetwork string) string { +func testAccComposerEnvironmentComposer2_private(name, network, subnetwork, serviceAccount string) string { return fmt.Sprintf(` -resource "google_composer_environment" "test" { - name = "%s" - region = "us-central1" - - config { - node_config { - network = google_compute_network.test.self_link - subnetwork = google_compute_subnetwork.test.self_link - zone = "us-central1-a" - enable_ip_masq_agent = true - ip_allocation_policy { - use_ip_aliases = true - cluster_ipv4_cidr_block = "10.0.0.0/16" - } - } - software_config { - image_version = "composer-1-airflow-2" - } - private_environment_config { - enable_private_endpoint = true - enable_privately_used_public_ips = true - } - } -} - -// use a separate network to avoid conflicts with other tests running in parallel -// that use the default network/subnet -resource "google_compute_network" "test" { - name = "%s" - auto_create_subnetworks = false -} +data "google_project" "project" {} -resource "google_compute_subnetwork" "test" { - name = "%s" - ip_cidr_range = "10.2.0.0/16" - region = "us-central1" - network = google_compute_network.test.self_link - private_ip_google_access = true +resource "google_service_account" "test" { + account_id = "%s" + display_name = "Test Service Account for Composer Environment" } -`, name, network, subnetwork) +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" } -func testAccComposerEnvironmentComposer2_private(name, network, subnetwork string) string { - return fmt.Sprintf(` resource "google_composer_environment" "test" { name = "%s" region = "us-central1" @@ -1651,6 +1244,7 @@ resource "google_composer_environment" "test" { node_config { network = google_compute_network.test.self_link subnetwork = google_compute_subnetwork.test.self_link + service_account = google_service_account.test.name enable_ip_masq_agent = true ip_allocation_policy { cluster_ipv4_cidr_block = "10.56.0.0/14" @@ -1659,195 +1253,13 @@ resource "google_composer_environment" "test" { software_config { image_version = "composer-2-airflow-2" } - private_environment_config { - connection_type = "VPC_PEERING" - enable_private_endpoint = true - enable_privately_used_public_ips = true - } - } -} - -// use a separate network to avoid conflicts with other tests running in parallel -// that use the default network/subnet -resource "google_compute_network" "test" { - name = "%s" - auto_create_subnetworks = false -} - -resource "google_compute_subnetwork" "test" { - name = "%s" - ip_cidr_range = "10.2.0.0/16" - region = "us-central1" - network = google_compute_network.test.self_link - private_ip_google_access = true -} -`, name, network, subnetwork) -} - -func testAccComposerEnvironment_privateWithWebServerControl(name, network, subnetwork string) string { - return fmt.Sprintf(` -resource "google_composer_environment" "test" { - name = "%s" - region = "us-central1" - - config { - node_config { - network = google_compute_network.test.self_link - subnetwork = google_compute_subnetwork.test.self_link - zone = "us-central1-a" - ip_allocation_policy { - use_ip_aliases = true - cluster_ipv4_cidr_block = "10.56.0.0/14" - services_ipv4_cidr_block = "10.122.0.0/20" - } - } - private_environment_config { - enable_private_endpoint = false - web_server_ipv4_cidr_block = "172.30.240.0/24" - cloud_sql_ipv4_cidr_block = "10.32.0.0/12" - master_ipv4_cidr_block = "172.17.50.0/28" - } - software_config { - image_version = "composer-1-airflow-2" - } - web_server_network_access_control { - allowed_ip_range { - value = "192.168.0.1" - description = "my range1" - } - allowed_ip_range { - value = "0.0.0.0/0" - } - } - } -} - -// use a separate network to avoid conflicts with other tests running in parallel -// that use the default network/subnet -resource "google_compute_network" "test" { - name = "%s" - auto_create_subnetworks = false -} - -resource "google_compute_subnetwork" "test" { - name = "%s" - ip_cidr_range = "10.2.0.0/16" - region = "us-central1" - network = google_compute_network.test.self_link - private_ip_google_access = true -} -`, name, network, subnetwork) -} - -func testAccComposerEnvironment_privateWithWebServerControlUpdated(name, network, subnetwork string) string { - return fmt.Sprintf(` -resource "google_composer_environment" "test" { - name = "%s" - region = "us-central1" - - config { - node_config { - network = google_compute_network.test.self_link - subnetwork = google_compute_subnetwork.test.self_link - zone = "us-central1-a" - ip_allocation_policy { - use_ip_aliases = true - cluster_ipv4_cidr_block = "10.56.0.0/14" - services_ipv4_cidr_block = "10.122.0.0/20" - } - } - private_environment_config { - enable_private_endpoint = false - web_server_ipv4_cidr_block = "172.30.240.0/24" - cloud_sql_ipv4_cidr_block = "10.32.0.0/12" - master_ipv4_cidr_block = "172.17.50.0/28" - } - software_config { - image_version = "composer-1-airflow-2" - } - web_server_network_access_control { - allowed_ip_range { - value = "192.168.0.1" - description = "my range1" - } - allowed_ip_range { - value = "0.0.0.0/0" - } - } - } -} - -// use a separate network to avoid conflicts with other tests running in parallel -// that use the default network/subnet -resource "google_compute_network" "test" { - name = "%s" - auto_create_subnetworks = false -} - -resource "google_compute_subnetwork" "test" { - name = "%s" - ip_cidr_range = "10.2.0.0/16" - region = "us-central1" - network = google_compute_network.test.self_link - private_ip_google_access = true -} -`, name, network, subnetwork) -} - -func testAccComposerEnvironment_databaseCfg(name, network, subnetwork string) string { - return fmt.Sprintf(` -resource "google_composer_environment" "test" { - name = "%s" - region = "us-central1" - config { - node_config { - network = google_compute_network.test.self_link - subnetwork = google_compute_subnetwork.test.self_link - zone = "us-central1-a" - } - database_config { - machine_type = "db-n1-standard-4" - } - software_config { - image_version = "composer-1-airflow-2" - } - } -} - -// use a separate network to avoid conflicts with other tests running in parallel -// that use the default network/subnet -resource "google_compute_network" "test" { - name = "%s" - auto_create_subnetworks = false -} - -resource "google_compute_subnetwork" "test" { - name = "%s" - ip_cidr_range = "10.2.0.0/16" - region = "us-central1" - network = google_compute_network.test.self_link -} -`, name, network, subnetwork) -} - -func testAccComposerEnvironment_databaseCfgUpdated(name, network, subnetwork string) string { - return fmt.Sprintf(` -resource "google_composer_environment" "test" { - name = "%s" - region = "us-central1" - config { - node_config { - network = google_compute_network.test.self_link - subnetwork = google_compute_subnetwork.test.self_link - zone = "us-central1-a" - } - database_config { - machine_type = "db-n1-standard-8" - } - software_config { - image_version = "composer-1-airflow-2" + private_environment_config { + connection_type = "VPC_PEERING" + enable_private_endpoint = true + enable_privately_used_public_ips = true } } + depends_on = [google_project_iam_member.composer-worker] } // use a separate network to avoid conflicts with other tests running in parallel @@ -1858,19 +1270,29 @@ resource "google_compute_network" "test" { } resource "google_compute_subnetwork" "test" { - name = "%s" - ip_cidr_range = "10.2.0.0/16" - region = "us-central1" - network = google_compute_network.test.self_link + name = "%s" + ip_cidr_range = "10.2.0.0/16" + region = "us-central1" + network = google_compute_network.test.self_link + private_ip_google_access = true } -`, name, network, subnetwork) +`, serviceAccount, name, network, subnetwork) } -func testAccComposerEnvironment_encryptionCfg(pid, compVersion, airflowVersion, name, kmsKey, network, subnetwork string) string { +func testAccComposerEnvironment_encryptionCfg(pid, compVersion, airflowVersion, name, kmsKey, network, subnetwork, serviceAccount string) string { return fmt.Sprintf(` data "google_project" "project" { project_id = "%s" } +resource "google_service_account" "test" { + account_id = "%s" + display_name = "Test Service Account for Composer Environment" +} +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} resource "google_kms_crypto_key_iam_member" "iam" { crypto_key_id = "%s" @@ -1885,6 +1307,7 @@ resource "google_composer_environment" "test" { node_config { network = google_compute_network.test.self_link subnetwork = google_compute_subnetwork.test.self_link + service_account = google_service_account.test.name } software_config { @@ -1895,6 +1318,7 @@ resource "google_composer_environment" "test" { kms_key_name = "%s" } } + depends_on = [google_project_iam_member.composer-worker] } // use a separate network to avoid conflicts with other tests running in parallel // that use the default network/subnet @@ -1909,21 +1333,37 @@ resource "google_compute_subnetwork" "test" { network = google_compute_network.test.self_link } `, - pid, kmsKey, name, compVersion, airflowVersion, kmsKey, network, subnetwork) + pid, serviceAccount, kmsKey, name, compVersion, airflowVersion, kmsKey, network, subnetwork) } -func testAccComposerEnvironment_maintenanceWindow(envName, network, subnetwork string) string { +func testAccComposerEnvironment_maintenanceWindow(envName, network, subnetwork, serviceAccount string) string { return fmt.Sprintf(` +data "google_project" "project" {} + +resource "google_service_account" "test" { + account_id = "%s" + display_name = "Test Service Account for Composer Environment" +} +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} + resource "google_composer_environment" "test" { name = "%s" region = "us-central1" config { + node_config { + service_account = google_service_account.test.name + } maintenance_window { start_time = "2019-08-01T01:00:00Z" end_time = "2019-08-01T07:00:00Z" recurrence = "FREQ=WEEKLY;BYDAY=TU,WE" } } + depends_on = [google_project_iam_member.composer-worker] } resource "google_compute_network" "test" { @@ -1938,21 +1378,37 @@ resource "google_compute_subnetwork" "test" { network = google_compute_network.test.self_link } -`, envName, network, subnetwork) +`, serviceAccount, envName, network, subnetwork) } -func testAccComposerEnvironment_maintenanceWindowUpdate(envName, network, subnetwork string) string { +func testAccComposerEnvironment_maintenanceWindowUpdate(envName, network, subnetwork, serviceAccount string) string { return fmt.Sprintf(` +data "google_project" "project" {} + +resource "google_service_account" "test" { + account_id = "%s" + display_name = "Test Service Account for Composer Environment" +} +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} + resource "google_composer_environment" "test" { name = "%s" region = "us-central1" config { + node_config { + service_account = google_service_account.test.name + } maintenance_window { start_time = "2019-08-01T01:00:00Z" end_time = "2019-08-01T07:00:00Z" recurrence = "FREQ=DAILY" } } + depends_on = [google_project_iam_member.composer-worker] } resource "google_compute_network" "test" { @@ -1967,7 +1423,7 @@ resource "google_compute_subnetwork" "test" { network = google_compute_network.test.self_link } -`, envName, network, subnetwork) +`, serviceAccount, envName, network, subnetwork) } func testAccComposerEnvironment_composerV2WithDisabledTriggerer(envName, network, subnetwork, serviceAccount string) string { @@ -2322,8 +1778,20 @@ resource "google_compute_subnetwork" "test" { `, serviceAccount, envName, network, subnetwork) } -func testAccComposerEnvironment_composerV2PrivateServiceConnect(envName, network, subnetwork string) string { +func testAccComposerEnvironment_composerV2PrivateServiceConnect(envName, network, subnetwork, serviceAccount string) string { return fmt.Sprintf(` +data "google_project" "project" {} + +resource "google_service_account" "test" { + account_id = "%s" + display_name = "Test Service Account for Composer Environment" +} +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} + resource "google_composer_environment" "test" { name = "%s" region = "us-central1" @@ -2332,6 +1800,7 @@ resource "google_composer_environment" "test" { node_config { network = google_compute_network.test.self_link subnetwork = google_compute_subnetwork.test.self_link + service_account = google_service_account.test.name } software_config { @@ -2341,7 +1810,7 @@ resource "google_composer_environment" "test" { cloud_composer_connection_subnetwork = google_compute_subnetwork.test.self_link } } - + depends_on = [google_project_iam_member.composer-worker] } resource "google_compute_network" "test" { @@ -2357,11 +1826,23 @@ resource "google_compute_subnetwork" "test" { private_ip_google_access = true } -`, envName, network, subnetwork) +`, serviceAccount, envName, network, subnetwork) } -func testAccComposerEnvironment_MasterAuthNetworks(compVersion, airflowVersion, envName, network, subnetwork string) string { +func testAccComposerEnvironment_MasterAuthNetworks(compVersion, airflowVersion, envName, network, subnetwork, serviceAccount string) string { return fmt.Sprintf(` +data "google_project" "project" {} + +resource "google_service_account" "test" { + account_id = "%s" + display_name = "Test Service Account for Composer Environment" +} +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} + resource "google_composer_environment" "test" { name = "%s" region = "us-central1" @@ -2370,6 +1851,7 @@ resource "google_composer_environment" "test" { node_config { network = google_compute_network.test.self_link subnetwork = google_compute_subnetwork.test.self_link + service_account = google_service_account.test.name } software_config { @@ -2387,6 +1869,7 @@ resource "google_composer_environment" "test" { } } } + depends_on = [google_project_iam_member.composer-worker] } resource "google_compute_network" "test" { @@ -2401,7 +1884,7 @@ resource "google_compute_subnetwork" "test" { network = google_compute_network.test.self_link } -`, envName, compVersion, airflowVersion, network, subnetwork) +`, serviceAccount, envName, compVersion, airflowVersion, network, subnetwork) } func testAccComposerEnvironment_updateComposerV2StandardResilience(envName, network, subnetwork, serviceAccount string) string { @@ -2482,8 +1965,20 @@ resource "google_compute_subnetwork" "test" { `, serviceAccount, envName, network, subnetwork) } -func testAccComposerEnvironment_MasterAuthNetworksUpdate(compVersion, airflowVersion, envName, network, subnetwork string) string { +func testAccComposerEnvironment_MasterAuthNetworksUpdate(compVersion, airflowVersion, envName, network, subnetwork, serviceAccount string) string { return fmt.Sprintf(` +data "google_project" "project" {} + +resource "google_service_account" "test" { + account_id = "%s" + display_name = "Test Service Account for Composer Environment" +} +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} + resource "google_composer_environment" "test" { name = "%s" region = "us-central1" @@ -2492,6 +1987,7 @@ resource "google_composer_environment" "test" { node_config { network = google_compute_network.test.self_link subnetwork = google_compute_subnetwork.test.self_link + service_account = google_service_account.test.name } software_config { @@ -2506,6 +2002,7 @@ resource "google_composer_environment" "test" { } } } + depends_on = [google_project_iam_member.composer-worker] } resource "google_compute_network" "test" { @@ -2520,7 +2017,7 @@ resource "google_compute_subnetwork" "test" { network = google_compute_network.test.self_link } -`, envName, compVersion, airflowVersion, network, subnetwork) +`, serviceAccount, envName, compVersion, airflowVersion, network, subnetwork) } func testAccComposerEnvironment_updateComposerV2(name, network, subnetwork, serviceAccount string) string { @@ -2581,165 +2078,23 @@ resource "google_composer_environment" "test" { triggerer { cpu = 0.75 memory_gb = 2 - count = 1 - } - } - environment_size = "ENVIRONMENT_SIZE_LARGE" - data_retention_config { - task_logs_retention_config { - storage_mode = "CLOUD_LOGGING_AND_CLOUD_STORAGE" - } - } - private_environment_config { - enable_private_endpoint = true - cloud_composer_network_ipv4_cidr_block = "10.3.192.0/24" - master_ipv4_cidr_block = "172.16.194.0/23" - cloud_sql_ipv4_cidr_block = "10.3.224.0/20" - } - } - depends_on = [google_project_iam_member.composer-worker] -} - -resource "google_compute_network" "test" { - name = "%s" - auto_create_subnetworks = false -} - -resource "google_compute_subnetwork" "test" { - name = "%s" - ip_cidr_range = "10.2.0.0/16" - region = "us-east1" - network = google_compute_network.test.self_link - private_ip_google_access = true -} -`, serviceAccount, name, network, subnetwork) -} - -func testAccComposer2Environment_nodeCfg(environment, network, subnetwork, serviceAccount string) string { - return fmt.Sprintf(` -data "google_project" "project" {} - -resource "google_composer_environment" "test" { - name = "%s" - region = "us-central1" - config { - node_config { - network = google_compute_network.test.self_link - subnetwork = google_compute_subnetwork.test.self_link - - service_account = google_service_account.test.name - ip_allocation_policy { - cluster_ipv4_cidr_block = "10.0.0.0/16" - } - tags = toset(["t1", "t2"]) - } - software_config { - image_version = "composer-2-airflow-2" - } - } - depends_on = [google_project_iam_member.composer-worker] -} - -resource "google_compute_network" "test" { - name = "%s" - auto_create_subnetworks = false -} - -resource "google_compute_subnetwork" "test" { - name = "%s" - ip_cidr_range = "10.2.0.0/16" - region = "us-central1" - network = google_compute_network.test.self_link -} - -resource "google_service_account" "test" { - account_id = "%s" - display_name = "Test Service Account for Composer Environment" -} - -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" -} -`, environment, network, subnetwork, serviceAccount) -} - -func testAccComposerEnvironment_airflow2RecoveryCfg(name, network, subnetwork string) string { - return fmt.Sprintf(` -resource "google_composer_environment" "test" { - name = "%s" - region = "us-central1" - - config { - node_config { - network = google_compute_network.test.self_link - subnetwork = google_compute_subnetwork.test.self_link - ip_allocation_policy { - cluster_ipv4_cidr_block = "10.0.0.0/16" - } - } - - software_config { - image_version = "composer-2-airflow-2" - } - - recovery_config { - scheduled_snapshots_config { - enabled = true - snapshot_location = "gs://example-bucket/environment_snapshots" - snapshot_creation_schedule = "0 4 * * *" - time_zone = "UTC+01" - } - } - } - -} - -resource "google_compute_network" "test" { - name = "%s" - auto_create_subnetworks = false -} - -resource "google_compute_subnetwork" "test" { - name = "%s" - ip_cidr_range = "10.2.0.0/16" - region = "us-central1" - network = google_compute_network.test.self_link - private_ip_google_access = true -} -`, name, network, subnetwork) -} - -func testAccComposerEnvironmentUpdate_airflow2RecoveryCfg(name, network, subnetwork string) string { - return fmt.Sprintf(` -resource "google_composer_environment" "test" { - name = "%s" - region = "us-central1" - - config { - node_config { - network = google_compute_network.test.self_link - subnetwork = google_compute_subnetwork.test.self_link - ip_allocation_policy { - cluster_ipv4_cidr_block = "10.0.0.0/16" - } - } - - software_config { - image_version = "composer-2-airflow-2" - } - - recovery_config { - scheduled_snapshots_config { - enabled = true - snapshot_location = "gs://example-bucket/environment_snapshots2" - snapshot_creation_schedule = "1 2 * * *" - time_zone = "UTC+02" + count = 1 + } + } + environment_size = "ENVIRONMENT_SIZE_LARGE" + data_retention_config { + task_logs_retention_config { + storage_mode = "CLOUD_LOGGING_AND_CLOUD_STORAGE" } } + private_environment_config { + enable_private_endpoint = true + cloud_composer_network_ipv4_cidr_block = "10.3.192.0/24" + master_ipv4_cidr_block = "172.16.194.0/23" + cloud_sql_ipv4_cidr_block = "10.3.224.0/20" + } } - + depends_on = [google_project_iam_member.composer-worker] } resource "google_compute_network" "test" { @@ -2750,15 +2105,17 @@ resource "google_compute_network" "test" { resource "google_compute_subnetwork" "test" { name = "%s" ip_cidr_range = "10.2.0.0/16" - region = "us-central1" + region = "us-east1" network = google_compute_network.test.self_link private_ip_google_access = true } -`, name, network, subnetwork) +`, serviceAccount, name, network, subnetwork) } -func testAccComposerEnvironment_softwareCfg(name, network, subnetwork string) string { +func testAccComposer2Environment_nodeCfg(environment, network, subnetwork, serviceAccount string) string { return fmt.Sprintf(` +data "google_project" "project" {} + resource "google_composer_environment" "test" { name = "%s" region = "us-central1" @@ -2766,17 +2123,20 @@ resource "google_composer_environment" "test" { node_config { network = google_compute_network.test.self_link subnetwork = google_compute_subnetwork.test.self_link - zone = "us-central1-a" + + service_account = google_service_account.test.name + ip_allocation_policy { + cluster_ipv4_cidr_block = "10.0.0.0/16" + } + tags = toset(["t1", "t2"]) } software_config { - image_version = "composer-1-airflow-1" - python_version = "3" + image_version = "composer-2-airflow-2" } } + depends_on = [google_project_iam_member.composer-worker] } -// use a separate network to avoid conflicts with other tests running in parallel -// that use the default network/subnet resource "google_compute_network" "test" { name = "%s" auto_create_subnetworks = false @@ -2788,65 +2148,64 @@ resource "google_compute_subnetwork" "test" { region = "us-central1" network = google_compute_network.test.self_link } -`, name, network, subnetwork) -} -func testAccComposerEnvironment_updateOnlyFields(name, network, subnetwork string) string { - return fmt.Sprintf(` -resource "google_composer_environment" "test" { - name = "%s" - region = "us-central1" - config { - node_config { - network = google_compute_network.test.self_link - subnetwork = google_compute_subnetwork.test.self_link - zone = "us-central1-a" - } - software_config { - image_version = "composer-1-airflow-2" - pypi_packages = { - numpy = "" - } - } - } +resource "google_service_account" "test" { + account_id = "%s" + display_name = "Test Service Account for Composer Environment" } -// use a separate network to avoid conflicts with other tests running in parallel -// that use the default network/subnet -resource "google_compute_network" "test" { - name = "%s" - auto_create_subnetworks = false +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} +`, environment, network, subnetwork, serviceAccount) } -resource "google_compute_subnetwork" "test" { - name = "%s" - ip_cidr_range = "10.2.0.0/16" - region = "us-central1" - network = google_compute_network.test.self_link +func testAccComposerEnvironment_airflow2RecoveryCfg(name, network, subnetwork, serviceAccount string) string { + return fmt.Sprintf(` +data "google_project" "project" {} + +resource "google_service_account" "test" { + account_id = "%s" + display_name = "Test Service Account for Composer Environment" } -`, name, network, subnetwork) +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" } -func testAccComposerEnvironment_airflow2SoftwareCfg(name, network, subnetwork string) string { - return fmt.Sprintf(` resource "google_composer_environment" "test" { name = "%s" region = "us-central1" + config { node_config { - network = google_compute_network.test.self_link - subnetwork = google_compute_subnetwork.test.self_link - zone = "us-central1-a" + network = google_compute_network.test.self_link + subnetwork = google_compute_subnetwork.test.self_link + service_account = google_service_account.test.name + ip_allocation_policy { + cluster_ipv4_cidr_block = "10.0.0.0/16" + } } + software_config { - image_version = "composer-1-airflow-2" - scheduler_count = 2 + image_version = "composer-2-airflow-2" + } + + recovery_config { + scheduled_snapshots_config { + enabled = true + snapshot_location = "gs://example-bucket/environment_snapshots" + snapshot_creation_schedule = "0 4 * * *" + time_zone = "UTC+01" + } } } + depends_on = [google_project_iam_member.composer-worker] } -// use a separate network to avoid conflicts with other tests running in parallel -// that use the default network/subnet resource "google_compute_network" "test" { name = "%s" auto_create_subnetworks = false @@ -2857,30 +2216,55 @@ resource "google_compute_subnetwork" "test" { ip_cidr_range = "10.2.0.0/16" region = "us-central1" network = google_compute_network.test.self_link + private_ip_google_access = true } -`, name, network, subnetwork) +`, serviceAccount, name, network, subnetwork) } -func testAccComposerEnvironmentUpdate_airflow2SoftwareCfg(name, network, subnetwork string) string { +func testAccComposerEnvironmentUpdate_airflow2RecoveryCfg(name, network, subnetwork, serviceAccount string) string { return fmt.Sprintf(` +data "google_project" "project" {} + +resource "google_service_account" "test" { + account_id = "%s" + display_name = "Test Service Account for Composer Environment" +} +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} + resource "google_composer_environment" "test" { name = "%s" region = "us-central1" + config { node_config { - network = google_compute_network.test.self_link - subnetwork = google_compute_subnetwork.test.self_link - zone = "us-central1-a" + network = google_compute_network.test.self_link + subnetwork = google_compute_subnetwork.test.self_link + ip_allocation_policy { + cluster_ipv4_cidr_block = "10.0.0.0/16" + } + service_account = google_service_account.test.name } + software_config { - image_version = "composer-1-airflow-2" - scheduler_count = 3 + image_version = "composer-2-airflow-2" + } + + recovery_config { + scheduled_snapshots_config { + enabled = true + snapshot_location = "gs://example-bucket/environment_snapshots2" + snapshot_creation_schedule = "1 2 * * *" + time_zone = "UTC+02" + } } } + depends_on = [google_project_iam_member.composer-worker] } -// use a separate network to avoid conflicts with other tests running in parallel -// that use the default network/subnet resource "google_compute_network" "test" { name = "%s" auto_create_subnetworks = false @@ -2891,8 +2275,9 @@ resource "google_compute_subnetwork" "test" { ip_cidr_range = "10.2.0.0/16" region = "us-central1" network = google_compute_network.test.self_link + private_ip_google_access = true } -`, name, network, subnetwork) +`, serviceAccount, name, network, subnetwork) } func testAccComposerEnvironment_fixPyPiPackages(environment, network, subnetwork, serviceAccount string) string { @@ -3052,16 +2437,32 @@ resource "google_project_iam_member" "composer-worker" { `, environment, network, subnetwork, serviceAccount) } -func testAccComposerEnvironmentComposer2_empty(name, network, subnetwork string) string { +func testAccComposerEnvironmentComposer2_empty(name, network, subnetwork, serviceAccount string) string { return fmt.Sprintf(` +data "google_project" "project" {} + +resource "google_service_account" "test" { + account_id = "%s" + display_name = "Test Service Account for Composer Environment" +} +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} + resource "google_composer_environment" "test" { name = "%s" region = "us-central1" config { + node_config { + service_account = google_service_account.test.name + } software_config { image_version = "composer-2-airflow-2" } } + depends_on = [google_project_iam_member.composer-worker] } // use a separate network to avoid conflicts with other tests running in parallel @@ -3077,11 +2478,23 @@ resource "google_compute_subnetwork" "test" { region = "us-central1" network = google_compute_network.test.self_link } -`, name, network, subnetwork) +`, serviceAccount, name, network, subnetwork) } -func testAccComposerEnvironmentComposer3_empty(name, network, subnetwork string) string { +func testAccComposerEnvironmentComposer3_empty(name, network, subnetwork, serviceAccount string) string { return fmt.Sprintf(` +data "google_project" "project" {} + +resource "google_service_account" "test" { + account_id = "%s" + display_name = "Test Service Account for Composer Environment" +} +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} + resource "google_composer_environment" "test" { name = "%s" region = "us-central1" @@ -3092,8 +2505,10 @@ resource "google_composer_environment" "test" { node_config { network = google_compute_network.test.id subnetwork = google_compute_subnetwork.test.id + service_account = google_service_account.test.name } } + depends_on = [google_project_iam_member.composer-worker] } // use a separate network to avoid conflicts with other tests running in parallel @@ -3109,7 +2524,7 @@ resource "google_compute_subnetwork" "test" { region = "us-central1" network = google_compute_network.test.self_link } -`, name, network, subnetwork) +`, serviceAccount, name, network, subnetwork) } func testAccComposerEnvironmentComposer2_usesUnsupportedField(name string) string { @@ -3144,8 +2559,20 @@ resource "google_composer_environment" "test" { `, name) } -func testAccComposerEnvironmentComposer3_basic(name, network, subnetwork string) string { +func testAccComposerEnvironmentComposer3_basic(name, network, subnetwork, serviceAccount string) string { return fmt.Sprintf(` +data "google_project" "project" {} + +resource "google_service_account" "test" { + account_id = "%s" + display_name = "Test Service Account for Composer Environment" +} +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} + resource "google_composer_environment" "test" { name = "%s" region = "us-central1" @@ -3154,6 +2581,7 @@ resource "google_composer_environment" "test" { composer_internal_ipv4_cidr_block = "100.64.128.0/20" network = google_compute_network.test.id subnetwork = google_compute_subnetwork.test.id + service_account = google_service_account.test.name } software_config { image_version = "composer-3-airflow-2" @@ -3169,6 +2597,7 @@ resource "google_composer_environment" "test" { enable_private_environment = true enable_private_builds_only = true } + depends_on = [google_project_iam_member.composer-worker] } // use a separate network to avoid conflicts with other tests running in parallel @@ -3184,11 +2613,23 @@ resource "google_compute_subnetwork" "test" { region = "us-central1" network = google_compute_network.test.self_link } -`, name, network, subnetwork) +`, serviceAccount, name, network, subnetwork) } -func testAccComposerEnvironmentComposer3_update(name, network, subnetwork string) string { +func testAccComposerEnvironmentComposer3_update(name, network, subnetwork, serviceAccount string) string { return fmt.Sprintf(` +data "google_project" "project" {} + +resource "google_service_account" "test" { + account_id = "%s" + display_name = "Test Service Account for Composer Environment" +} +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} + resource "google_composer_environment" "test" { name = "%s" region = "us-central1" @@ -3197,6 +2638,7 @@ resource "google_composer_environment" "test" { network = google_compute_network.test_1.id subnetwork = google_compute_subnetwork.test_1.id composer_internal_ipv4_cidr_block = "100.64.128.0/20" + service_account = google_service_account.test.name } software_config { web_server_plugins_mode = "DISABLED" @@ -3213,6 +2655,7 @@ resource "google_composer_environment" "test" { enable_private_environment = false enable_private_builds_only = false } + depends_on = [google_project_iam_member.composer-worker] } // use a separate network to avoid conflicts with other tests running in parallel @@ -3240,22 +2683,36 @@ resource "google_compute_subnetwork" "test_1" { region = "us-central1" network = google_compute_network.test_1.self_link } -`, name, network, subnetwork, network+"-update", subnetwork+"update") +`, serviceAccount, name, network, subnetwork, network+"-update", subnetwork+"update") } -func testAccComposerEnvironmentComposer3_withNetworkAttachment(name, networkAttachment, network, subnetwork string) string { +func testAccComposerEnvironmentComposer3_withNetworkAttachment(name, networkAttachment, network, subnetwork, serviceAccount string) string { return fmt.Sprintf(` +data "google_project" "project" {} + +resource "google_service_account" "test" { + account_id = "%s" + display_name = "Test Service Account for Composer Environment" +} +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} + resource "google_composer_environment" "test" { name = "%s" region = "us-central1" config { node_config { composer_network_attachment = google_compute_network_attachment.test.id + service_account = google_service_account.test.name } software_config { image_version = "composer-3-airflow-2" } } + depends_on = [google_project_iam_member.composer-worker] } resource "google_compute_network_attachment" "test" { @@ -3294,11 +2751,23 @@ resource "google_compute_subnetwork" "test" { region = "us-central1" network = google_compute_network.test.self_link } -`, name, networkAttachment, network, subnetwork, network, subnetwork) +`, serviceAccount, name, networkAttachment, network, subnetwork, network, subnetwork) } -func testAccComposerEnvironmentComposer3_withNetworkAndSubnetwork(name, networkAttachment, network, subnetwork string) string { +func testAccComposerEnvironmentComposer3_withNetworkAndSubnetwork(name, networkAttachment, network, subnetwork, serviceAccount string) string { return fmt.Sprintf(` +data "google_project" "project" {} + +resource "google_service_account" "test" { + account_id = "%s" + display_name = "Test Service Account for Composer Environment" +} +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} + resource "google_composer_environment" "test" { name = "%s" region = "us-central1" @@ -3306,11 +2775,13 @@ resource "google_composer_environment" "test" { node_config { network = google_compute_network.test.id subnetwork = google_compute_subnetwork.test.id + service_account = google_service_account.test.name } software_config { image_version = "composer-3-airflow-2" } } + depends_on = [google_project_iam_member.composer-worker] } resource "google_compute_network_attachment" "test" { @@ -3349,11 +2820,23 @@ resource "google_compute_subnetwork" "test" { region = "us-central1" network = google_compute_network.test.self_link } -`, name, networkAttachment, network, subnetwork, network, subnetwork) +`, serviceAccount, name, networkAttachment, network, subnetwork, network, subnetwork) } -func testAccComposerEnvironmentComposer3_withNetworkSubnetworkAndAttachment_expectError(name, networkAttachment, network, subnetwork string) string { +func testAccComposerEnvironmentComposer3_withNetworkSubnetworkAndAttachment_expectError(name, networkAttachment, network, subnetwork, serviceAccount string) string { return fmt.Sprintf(` +data "google_project" "project" {} + +resource "google_service_account" "test" { + account_id = "%s" + display_name = "Test Service Account for Composer Environment" +} +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} + resource "google_composer_environment" "test" { name = "%s" region = "us-central1" @@ -3362,11 +2845,13 @@ resource "google_composer_environment" "test" { network = google_compute_network.test.id subnetwork = google_compute_subnetwork.test.id composer_network_attachment = google_compute_network_attachment.test.id + service_account = google_service_account.test.name } software_config { image_version = "composer-3-airflow-2" } } + depends_on = [google_project_iam_member.composer-worker] } resource "google_compute_network_attachment" "test" { @@ -3389,11 +2874,23 @@ resource "google_compute_subnetwork" "test" { region = "us-central1" network = google_compute_network.test.self_link } -`, name, networkAttachment, network, subnetwork) +`, serviceAccount, name, networkAttachment, network, subnetwork) } -func testAccComposerEnvironmentComposer3_databaseRetention(name, network, subnetwork string) string { +func testAccComposerEnvironmentComposer3_databaseRetention(name, network, subnetwork, serviceAccount string) string { return fmt.Sprintf(` +data "google_project" "project" {} + +resource "google_service_account" "test" { + account_id = "%s" + display_name = "Test Service Account for Composer Environment" +} +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} + resource "google_composer_environment" "test" { name = "%s" region = "us-central1" @@ -3404,6 +2901,7 @@ resource "google_composer_environment" "test" { node_config { network = google_compute_network.test.id subnetwork = google_compute_subnetwork.test.id + service_account = google_service_account.test.name } data_retention_config { airflow_metadata_retention_config { @@ -3412,6 +2910,7 @@ resource "google_composer_environment" "test" { } } } + depends_on = [google_project_iam_member.composer-worker] } // use a separate network to avoid conflicts with other tests running in parallel @@ -3427,7 +2926,7 @@ resource "google_compute_subnetwork" "test" { region = "us-central1" network = google_compute_network.test.self_link } -`, name, network, subnetwork) +`, serviceAccount, name, network, subnetwork) } // WARNING: This is not actually a check and is a terrible clean-up step because Composer Environments diff --git a/mmv1/third_party/terraform/website/docs/r/composer_environment.html.markdown b/mmv1/third_party/terraform/website/docs/r/composer_environment.html.markdown index 53fcc32c059a..62fd059fc110 100644 --- a/mmv1/third_party/terraform/website/docs/r/composer_environment.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/composer_environment.html.markdown @@ -1422,6 +1422,8 @@ The following arguments are supported: note that the service account must have `roles/composer.worker` for any GCP resources created under the Cloud Composer Environment. + This field is required for newly created environments. + * `tags` - (Optional) The list of instance tags applied to all node VMs. Tags are From d1bf3a336abe8b3cd04f71523781cf72a8d64d51 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Taneli=20Lepp=C3=A4?= Date: Mon, 9 Jun 2025 23:48:34 +0200 Subject: [PATCH 331/884] Make capacity immutable in google_network_security_address_group (#13784) --- mmv1/products/networksecurity/AddressGroup.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/mmv1/products/networksecurity/AddressGroup.yaml b/mmv1/products/networksecurity/AddressGroup.yaml index e9269dcccc72..b6ef870b8faf 100644 --- a/mmv1/products/networksecurity/AddressGroup.yaml +++ b/mmv1/products/networksecurity/AddressGroup.yaml @@ -135,6 +135,7 @@ properties: description: | Capacity of the Address Group. required: true + immutable: true - name: 'purpose' type: Array description: | From 99dfc9edea008ef880d10061a21a25c4f3344ddc Mon Sep 17 00:00:00 2001 From: martin-guillen Date: Mon, 9 Jun 2025 21:28:03 -0400 Subject: [PATCH 332/884] Improve Node Pool Reconciliation time by listing and caching node pools instead of getting them separately (#14175) --- .../resource_container_node_pool.go.tmpl | 93 +++++++++++++++++++ 1 file changed, 93 insertions(+) diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl index dcbfb2945708..f2229b4f3ccc 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl @@ -5,6 +5,9 @@ import ( "log" "regexp" "strings" +{{- if ne $.TargetVersionName `ga` }} + "sync" +{{- end }} "time" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" @@ -25,6 +28,78 @@ import ( var clusterIdRegex = regexp.MustCompile("projects/(?P[^/]+)/locations/(?P[^/]+)/clusters/(?P[^/]+)") +{{ if ne $.TargetVersionName `ga` }} +type nodePoolWithUpdateTime struct { + nodePool *container.NodePool + updateTime time.Time +} + +type nodePoolCache struct { + nodePools map[string]*nodePoolWithUpdateTime + ttl time.Duration + mutex sync.RWMutex +} + +func (nodePoolCache *nodePoolCache) get(nodePool string) (*container.NodePool, error) { + nodePoolCache.mutex.RLock() + defer nodePoolCache.mutex.RUnlock() + np, ok := nodePoolCache.nodePools[nodePool] + if !ok { + return nil, fmt.Errorf("NodePool %q was not found", nodePool) + } + return np.nodePool, nil +} + +func (nodePoolCache *nodePoolCache) refreshIfNeeded(d *schema.ResourceData, config *transport_tpg.Config, userAgent string, nodePoolInfo *NodePoolInformation, name string) error { + if !nodePoolCache.needsRefresh(nodePoolInfo.fullyQualifiedName(name)) { + return nil + } + + nodePoolCache.mutex.Lock() + defer nodePoolCache.mutex.Unlock() + + parent := fmt.Sprintf("projects/%s/locations/%s/clusters/%s", nodePoolInfo.project, nodePoolInfo.location, nodePoolInfo.cluster) + clusterNodePoolsListCall := config.NewContainerClient(userAgent).Projects.Locations.Clusters.NodePools.List(parent) + if config.UserProjectOverride { + clusterNodePoolsListCall.Header().Add("X-Goog-User-Project", nodePoolInfo.project) + } + listNodePoolsResponse, err := clusterNodePoolsListCall.Do() + if err != nil { + return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("NodePools from cluster %q", nodePoolInfo.cluster)) + } + + updateTime := time.Now() + for _, nodePool := range listNodePoolsResponse.NodePools { + nodePoolCache.nodePools[nodePoolInfo.fullyQualifiedName(nodePool.Name)] = &nodePoolWithUpdateTime{ + nodePool: nodePool, + updateTime: updateTime, + } + } + return nil +} + +func (nodePoolCache *nodePoolCache) needsRefresh(nodePool string) bool { + nodePoolCache.mutex.RLock() + defer nodePoolCache.mutex.RUnlock() + np, ok := nodePoolCache.nodePools[nodePool] + if !ok { + return true + } + return time.Since(np.updateTime) > nodePoolCache.ttl +} + +func (nodePoolCache *nodePoolCache) remove(nodePool string) { + nodePoolCache.mutex.Lock() + defer nodePoolCache.mutex.Unlock() + delete(nodePoolCache.nodePools, nodePool) +} + +var npCache = &nodePoolCache{ + nodePools: make(map[string]*nodePoolWithUpdateTime), + ttl: 30 * time.Second, +} +{{- end }} + func ResourceContainerNodePool() *schema.Resource { return &schema.Resource{ Create: resourceContainerNodePoolCreate, @@ -702,11 +777,16 @@ func resourceContainerNodePoolRead(d *schema.ResourceData, meta interface{}) err name := getNodePoolName(d.Id()) +{{ if eq $.TargetVersionName `ga` }} clusterNodePoolsGetCall := config.NewContainerClient(userAgent).Projects.Locations.Clusters.NodePools.Get(nodePoolInfo.fullyQualifiedName(name)) if config.UserProjectOverride { clusterNodePoolsGetCall.Header().Add("X-Goog-User-Project", nodePoolInfo.project) } nodePool, err := clusterNodePoolsGetCall.Do() +{{- else }} + npCache.refreshIfNeeded(d, config, userAgent, nodePoolInfo, name) + nodePool, err := npCache.get(nodePoolInfo.fullyQualifiedName(name)) +{{- end }} if err != nil { return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("NodePool %q from cluster %q", name, nodePoolInfo.cluster)) } @@ -766,6 +846,10 @@ func resourceContainerNodePoolUpdate(d *schema.ResourceData, meta interface{}) e return err } +{{ if ne $.TargetVersionName `ga` }} + npCache.remove(nodePoolInfo.fullyQualifiedName(name)) +{{- end }} + return resourceContainerNodePoolRead(d, meta) } @@ -846,6 +930,10 @@ func resourceContainerNodePoolDelete(d *schema.ResourceData, meta interface{}) e d.SetId("") +{{ if ne $.TargetVersionName `ga` }} + npCache.remove(nodePoolInfo.fullyQualifiedName(name)) +{{- end }} + return nil } @@ -862,11 +950,16 @@ func resourceContainerNodePoolExists(d *schema.ResourceData, meta interface{}) ( } name := getNodePoolName(d.Id()) +{{- if eq $.TargetVersionName `ga` }} clusterNodePoolsGetCall := config.NewContainerClient(userAgent).Projects.Locations.Clusters.NodePools.Get(nodePoolInfo.fullyQualifiedName(name)) if config.UserProjectOverride { clusterNodePoolsGetCall.Header().Add("X-Goog-User-Project", nodePoolInfo.project) } _, err = clusterNodePoolsGetCall.Do() +{{- else }} + npCache.refreshIfNeeded(d, config, userAgent, nodePoolInfo, name) + _, err = npCache.get(nodePoolInfo.fullyQualifiedName(name)) +{{- end }} if err != nil { if err = transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("Container NodePool %s", name)); err == nil { From 4e2b8f99d3002e8b01e76d7e4a0bb9917d13e731 Mon Sep 17 00:00:00 2001 From: FilipKubawskiOkta Date: Tue, 10 Jun 2025 17:31:58 +0200 Subject: [PATCH 333/884] Fix permadiff on `dataflow_flex_template_job` (#14038) --- ...esource_dataflow_flex_template_job.go.tmpl | 22 +++- ...ce_dataflow_flex_template_job_test.go.tmpl | 112 ++++++++++++++++++ 2 files changed, 129 insertions(+), 5 deletions(-) diff --git a/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job.go.tmpl b/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job.go.tmpl index a49ae520a043..2312ff2334b3 100644 --- a/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job.go.tmpl +++ b/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job.go.tmpl @@ -477,12 +477,24 @@ func resourceDataflowFlexTemplateJobRead(d *schema.ResourceData, meta interface{ } optionsMap := sdkPipelineOptions["options"].(map[string]interface{}) + // sdkPipelineOptions is not always populated with these values, hence the fallback + if _, ok := d.GetOk("num_workers"); !ok && optionsMap["numWorkers"] == nil || optionsMap["numWorkers"] == 0 { + optionsMap["numWorkers"] = job.Environment.WorkerPools[0].NumWorkers + } + if _, ok := d.GetOk("max_num_workers"); !ok && optionsMap["maxNumWorkers"] == nil || optionsMap["maxNumWorkers"] == 0 { + optionsMap["maxNumWorkers"] = job.Environment.WorkerPools[0].AutoscalingSettings.MaxNumWorkers + } + if _, ok := d.GetOk("machine_type"); !ok && optionsMap["workerMachineType"] == nil || optionsMap["workerMachineType"] == "" { + optionsMap["workerMachineType"] = job.Environment.WorkerPools[0].MachineType + } + if _, ok := d.GetOk("sdk_container_image"); !ok && optionsMap["sdkContainerImage"] == nil || optionsMap["sdkContainerImage"] == "" { + optionsMap["sdkContainerImage"] = job.Environment.WorkerPools[0].WorkerHarnessContainerImage + } + + if err := d.Set("temp_location", optionsMap["tempLocation"]); err != nil { return fmt.Errorf("Error setting temp_gcs_location: %s", err) } - if err := d.Set("network", optionsMap["network"]); err != nil { - return fmt.Errorf("Error setting network: %s", err) - } if err := d.Set("num_workers", optionsMap["numWorkers"]); err != nil { return fmt.Errorf("Error setting num_workers: %s", err) } @@ -495,10 +507,10 @@ func resourceDataflowFlexTemplateJobRead(d *schema.ResourceData, meta interface{ if err := d.Set("sdk_container_image", optionsMap["sdkContainerImage"]); err != nil { return fmt.Errorf("Error setting sdk_container_image: %s", err) } - if err := d.Set("network", optionsMap["network"]); err != nil { + if err := d.Set("network", job.Environment.WorkerPools[0].Network); err != nil { return fmt.Errorf("Error setting network: %s", err) } - if err := d.Set("subnetwork", optionsMap["subnetwork"]); err != nil { + if err := d.Set("subnetwork", job.Environment.WorkerPools[0].Subnetwork); err != nil { return fmt.Errorf("Error setting subnetwork: %s", err) } if err := d.Set("machine_type", optionsMap["workerMachineType"]); err != nil { diff --git a/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job_test.go.tmpl b/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job_test.go.tmpl index cb7c2847a19d..b0daab35b0a4 100644 --- a/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job_test.go.tmpl +++ b/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job_test.go.tmpl @@ -643,6 +643,50 @@ func TestAccDataflowFlexTemplateJob_enableStreamingEngine(t *testing.T) { }) } +func TestAccDataflowFlexTemplateJob_workerPoolsFallback(t *testing.T) { + acctest.SkipIfVcr(t) + t.Parallel() + + context1 := map[string]interface{}{ + "random_id": acctest.RandString(t, 10), + "max_workers": 2, + "num_workers": 1, + "machine_type": `"n1-standard-1"`, + } + + context2 := map[string]interface{}{ + "random_id": context1["random_id"], + "max_workers": 3, + "num_workers": 2, + "machine_type": `"n1-standard-1"`, + } + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataflowJobDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataflowFlexTemplateJob_workerPoolFallback(context1), + Check: resource.ComposeTestCheckFunc( + testAccDataflowFlexJobExists(t, "google_dataflow_flex_template_job.flex_job", false), + resource.TestCheckResourceAttr("google_dataflow_flex_template_job.flex_job", "num_workers", "1"), + resource.TestCheckResourceAttr("google_dataflow_flex_template_job.flex_job", "max_workers", "2"), + ), + }, + { + Config: testAccDataflowFlexTemplateJob_workerPoolFallback(context2), + Check: resource.ComposeTestCheckFunc( + testAccDataflowFlexJobExists(t, "google_dataflow_flex_template_job.flex_job", true), + resource.TestCheckResourceAttr("google_dataflow_flex_template_job.flex_job", "num_workers", "2"), + resource.TestCheckResourceAttr("google_dataflow_flex_template_job.flex_job", "max_workers", "3"), + ), + }, + }, + }) +} + + func testAccDataflowFlexTemplateJobHasNetwork(t *testing.T, res, expected string, wait bool) resource.TestCheckFunc { return func(s *terraform.State) error { instanceTmpl, err := testAccDataflowFlexTemplateGetGeneratedInstanceTemplate(t, s, res) @@ -2042,4 +2086,72 @@ resource "google_dataflow_flex_template_job" "flex_job" { } `, topicName, bucket, job) } + +func testAccDataflowFlexTemplateJob_workerPoolFallback(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_pubsub_topic" "example" { + name = "tf-test-topic-%{random_id}" +} + +data "google_storage_bucket_object" "flex_template" { + name = "latest/flex/Streaming_Data_Generator" + bucket = "dataflow-templates" +} + +resource "google_storage_bucket" "bucket" { + name = "tf-test-bucket-%{random_id}" + location = "US-CENTRAL1" + force_destroy = true + uniform_bucket_level_access = true +} + +resource "google_storage_bucket_object" "schema" { + name = "schema-%{random_id}.json" + bucket = google_storage_bucket.bucket.name + content = < Date: Tue, 10 Jun 2025 09:12:24 -0700 Subject: [PATCH 334/884] Add support for `managed_server_ca` to `google_redis_cluster` resource (#14172) --- mmv1/products/redis/Cluster.yaml | 19 ++++ .../terraform/decoders/redis_cluster.go.tmpl | 48 ++++++++++ .../redis_cluster_tls_enabled.tf.tmpl | 57 ++++++++++++ .../redis/resource_redis_cluster_test.go | 91 +++++++++++++++++++ 4 files changed, 215 insertions(+) create mode 100644 mmv1/templates/terraform/decoders/redis_cluster.go.tmpl create mode 100644 mmv1/templates/terraform/examples/redis_cluster_tls_enabled.tf.tmpl diff --git a/mmv1/products/redis/Cluster.yaml b/mmv1/products/redis/Cluster.yaml index 2d0a32b65905..5b26b4639464 100644 --- a/mmv1/products/redis/Cluster.yaml +++ b/mmv1/products/redis/Cluster.yaml @@ -117,6 +117,7 @@ async: resource_inside_response: false custom_code: encoder: 'templates/terraform/encoders/redis_cluster.go.tmpl' + decoder: 'templates/terraform/decoders/redis_cluster.go.tmpl' sweeper: ensure_value: field: deletionProtectionEnabled @@ -811,3 +812,21 @@ properties: - name: 'kmsKey' type: String description: The KMS key used to encrypt the at-rest data of the cluster. + - name: 'managedServerCa' + type: NestedObject + output: true + description: Cluster's Certificate Authority. This field will only be populated if Redis Cluster's transit_encryption_mode is TRANSIT_ENCRYPTION_MODE_SERVER_AUTHENTICATION + properties: + - name: 'caCerts' + type: Array + output: true + description: The PEM encoded CA certificate chains for redis managed server authentication + item_type: + type: NestedObject + properties: + - name: 'certificates' + type: Array + output: true + description: The certificates that form the CA chain, from leaf to root order + item_type: + type: String diff --git a/mmv1/templates/terraform/decoders/redis_cluster.go.tmpl b/mmv1/templates/terraform/decoders/redis_cluster.go.tmpl new file mode 100644 index 000000000000..2076004090c6 --- /dev/null +++ b/mmv1/templates/terraform/decoders/redis_cluster.go.tmpl @@ -0,0 +1,48 @@ +// Such custom code is necessary as the Cluster's certificate authority has to be retrieved via a dedicated +// getCertificateAuthority API. +// See https://cloud.google.com/memorystore/docs/cluster/reference/rest/v1/projects.locations.clusters/getCertificateAuthority#http-request +// for details about this API. +config := meta.(*transport_tpg.Config) + +userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) +if err != nil { + return nil, err +} + +// Only clusters with TRANSIT_ENCRYPTION_MODE_SERVER_AUTHENTICATION mode have certificate authority set +if v, ok := res["transitEncryptionMode"].(string); !ok || v!="TRANSIT_ENCRYPTION_MODE_SERVER_AUTHENTICATION" { + return res, nil +} + +url, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}RedisBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/locations/{{"{{"}}region{{"}}"}}/clusters/{{"{{"}}name{{"}}"}}/certificateAuthority") +if err != nil { + return nil, err +} + +billingProject := "" + +project, err := tpgresource.GetProject(d, config) +if err != nil { + return nil, fmt.Errorf("Error fetching project for Cluster: %s", err) +} + +billingProject = project + +// err == nil indicates that the billing_project value was found +if bp, err := tpgresource.GetBillingProject(d, config); err == nil { + billingProject = bp +} + +certificateAuthority, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: url, + UserAgent: userAgent, +}) +if err != nil { + return nil, fmt.Errorf("Error reading certificateAuthority: %s", err) +} + +res["managedServerCa"] = certificateAuthority["managedServerCa"] +return res, nil \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/redis_cluster_tls_enabled.tf.tmpl b/mmv1/templates/terraform/examples/redis_cluster_tls_enabled.tf.tmpl new file mode 100644 index 000000000000..4d67e1fd7fe7 --- /dev/null +++ b/mmv1/templates/terraform/examples/redis_cluster_tls_enabled.tf.tmpl @@ -0,0 +1,57 @@ +resource "google_redis_cluster" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "cluster_name"}}" + shard_count = 3 + psc_configs { + network = google_compute_network.consumer_net.id + } + region = "us-central1" + replica_count = 1 + node_type = "REDIS_SHARED_CORE_NANO" + transit_encryption_mode = "TRANSIT_ENCRYPTION_MODE_SERVER_AUTHENTICATION" + authorization_mode = "AUTH_MODE_DISABLED" + redis_configs = { + maxmemory-policy = "volatile-ttl" + } + deletion_protection_enabled = {{index $.Vars "deletion_protection_enabled"}} + + zone_distribution_config { + mode = "MULTI_ZONE" + } + maintenance_policy { + weekly_maintenance_window { + day = "MONDAY" + start_time { + hours = 1 + minutes = 0 + seconds = 0 + nanos = 0 + } + } + } + depends_on = [ + google_network_connectivity_service_connection_policy.default + ] +} + +resource "google_network_connectivity_service_connection_policy" "default" { + name = "{{index $.Vars "policy_name"}}" + location = "us-central1" + service_class = "gcp-memorystore-redis" + description = "my basic service connection policy" + network = google_compute_network.consumer_net.id + psc_config { + subnetworks = [google_compute_subnetwork.consumer_subnet.id] + } +} + +resource "google_compute_subnetwork" "consumer_subnet" { + name = "{{index $.Vars "subnet_name"}}" + ip_cidr_range = "10.0.0.248/29" + region = "us-central1" + network = google_compute_network.consumer_net.id +} + +resource "google_compute_network" "consumer_net" { + name = "{{index $.Vars "network_name"}}" + auto_create_subnetworks = false +} diff --git a/mmv1/third_party/terraform/services/redis/resource_redis_cluster_test.go b/mmv1/third_party/terraform/services/redis/resource_redis_cluster_test.go index 2c7ffdf39f29..e8938fa71046 100644 --- a/mmv1/third_party/terraform/services/redis/resource_redis_cluster_test.go +++ b/mmv1/third_party/terraform/services/redis/resource_redis_cluster_test.go @@ -1250,3 +1250,94 @@ func createRedisClusterResourceConfig(params *ClusterParams, isSecondaryCluster crossClusterReplicationConfigBlock, dependsOnBlock) } + +func TestAccRedisCluster_redisClusterTlsEnabled(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "deletion_protection_enabled": false, + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckRedisClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccRedisCluster_redisClusterTlsEnabled(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("google_redis_cluster.cluster-tls", "managed_server_ca.0.ca_certs.0.certificates.0"), + ), + }, + { + ResourceName: "google_redis_cluster.cluster-tls", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"gcs_source", "managed_backup_source", "name", "psc_configs", "region"}, + }, + }, + }) +} + +func testAccRedisCluster_redisClusterTlsEnabled(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_redis_cluster" "cluster-tls" { + name = "tf-test-tls-cluster%{random_suffix}" + shard_count = 3 + psc_configs { + network = google_compute_network.consumer_net.id + } + region = "us-central1" + replica_count = 1 + node_type = "REDIS_SHARED_CORE_NANO" + transit_encryption_mode = "TRANSIT_ENCRYPTION_MODE_SERVER_AUTHENTICATION" + authorization_mode = "AUTH_MODE_DISABLED" + redis_configs = { + maxmemory-policy = "volatile-ttl" + } + deletion_protection_enabled = %{deletion_protection_enabled} + + zone_distribution_config { + mode = "MULTI_ZONE" + } + maintenance_policy { + weekly_maintenance_window { + day = "MONDAY" + start_time { + hours = 1 + minutes = 0 + seconds = 0 + nanos = 0 + } + } + } + depends_on = [ + google_network_connectivity_service_connection_policy.default + ] +} + +resource "google_network_connectivity_service_connection_policy" "default" { + name = "tf-test-my-policy%{random_suffix}" + location = "us-central1" + service_class = "gcp-memorystore-redis" + description = "my basic service connection policy" + network = google_compute_network.consumer_net.id + psc_config { + subnetworks = [google_compute_subnetwork.consumer_subnet.id] + } +} + +resource "google_compute_subnetwork" "consumer_subnet" { + name = "tf-test-my-subnet%{random_suffix}" + ip_cidr_range = "10.0.0.248/29" + region = "us-central1" + network = google_compute_network.consumer_net.id +} + +resource "google_compute_network" "consumer_net" { + name = "tf-test-my-network%{random_suffix}" + auto_create_subnetworks = false +} +`, context) +} From fc13b991857e2e3e59f97c55d9f8937a1b82efa8 Mon Sep 17 00:00:00 2001 From: Ryan Oaks Date: Tue, 10 Jun 2025 13:31:25 -0400 Subject: [PATCH 335/884] Add unknown changelog entries for changelog generation (#14030) --- .ci/magician/cmd/generate_downstream.go | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.ci/magician/cmd/generate_downstream.go b/.ci/magician/cmd/generate_downstream.go index d93834e68dbf..3f88ad6c5daf 100644 --- a/.ci/magician/cmd/generate_downstream.go +++ b/.ci/magician/cmd/generate_downstream.go @@ -383,6 +383,12 @@ func addChangelogEntry(downstreamRepo *source.Repo, pullRequest *github.PullRequ return err } } + // If changelog entry is missing, add an entry "unknown: ". + if matches == nil { + if err := rnr.WriteFile(filepath.Join(".changelog", fmt.Sprintf("%d.txt", pullRequest.Number)), "unknown: "+pullRequest.Title); err != nil { + return err + } + } return rnr.PopDir() } From 7b98f9802701b5e9322f16ff112f3ee1b026c275 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Tue, 10 Jun 2025 10:49:49 -0700 Subject: [PATCH 336/884] tgc-revival: tgc encoders and decoders (#14222) --- mmv1/api/resource/custom_code.go | 7 ++++ mmv1/products/compute/Address.yaml | 1 + .../cai2hcl/resource_converter.go.tmpl | 34 ++++++++++++++++++- .../tgc_next/encoders/compute_address.go.tmpl | 6 ++++ .../tfplan2cai/resource_converter.go.tmpl | 17 +++++++++- .../tgc_next/pkg/tgcresource/utils.go | 23 +++++++++++++ 6 files changed, 86 insertions(+), 2 deletions(-) create mode 100644 mmv1/templates/tgc_next/encoders/compute_address.go.tmpl create mode 100644 mmv1/third_party/tgc_next/pkg/tgcresource/utils.go diff --git a/mmv1/api/resource/custom_code.go b/mmv1/api/resource/custom_code.go index a7281b918136..50e4425f618a 100644 --- a/mmv1/api/resource/custom_code.go +++ b/mmv1/api/resource/custom_code.go @@ -139,4 +139,11 @@ type CustomCode struct { TestCheckDestroy string `yaml:"test_check_destroy"` ValidateRawResourceConfigFuncs string `yaml:"raw_resource_config_validation"` + + // ==================== + // TGC Encoders & Decoders + // ==================== + TgcEncoder string `yaml:"tgc_encoder"` + + TgcDecoder string `yaml:"tgc_decoder"` } diff --git a/mmv1/products/compute/Address.yaml b/mmv1/products/compute/Address.yaml index 37b751efcea7..c1f943d35c4e 100644 --- a/mmv1/products/compute/Address.yaml +++ b/mmv1/products/compute/Address.yaml @@ -51,6 +51,7 @@ async: collection_url_key: 'items' custom_code: post_create: 'templates/terraform/post_create/labels.tmpl' + tgc_encoder: 'templates/tgc_next/encoders/compute_address.go.tmpl' sweeper: url_substitutions: - region: "us-west2" diff --git a/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl b/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl index 3b5d198eb1bb..fe909e8b6064 100644 --- a/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl +++ b/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl @@ -35,6 +35,7 @@ import ( "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters/utils" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tgcresource" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/verify" @@ -87,6 +88,25 @@ func (c *{{ $.ResourceName -}}Converter) convertResourceData(asset caiasset.Asse hclData := make(map[string]interface{}) +{{ if $.CustomCode.TgcDecoder -}} + res, err = resource{{ $.ResourceName -}}TgcDecoder(d, meta, res) + if err != nil { + return nil, err + } +{{ end}} + +{{ if $.CustomCode.Decoder -}} + res, err = resource{{ $.ResourceName -}}Decoder(d, meta, res) + if err != nil { + return nil, err + } + + if res == nil { + // Decoding the object has resulted in it being gone. It may be marked deleted. + return nil, nil + } +{{ end}} + {{ range $prop := $.ReadPropertiesForTgc }} {{ if $prop.FlattenObject -}} {{/* TODO */}} @@ -113,4 +133,16 @@ func flatten{{$prop.GetPrefix}}{{$prop.TitlelizeProperty}}(v interface{}, d *sch {{ else }} {{ template "flattenPropertyMethod" $prop -}} {{- end }} -{{- end }} \ No newline at end of file +{{- end }} + +{{- if $.CustomCode.TgcDecoder }} +func resource{{ $.ResourceName -}}TgcDecoder(d *schema.ResourceData, meta interface{}, res map[string]interface{}) (map[string]interface{}, error) { + {{ $.CustomTemplate $.CustomCode.TgcDecoder false -}} +} +{{- end }} + +{{- if $.CustomCode.Decoder }} +func resource{{ $.ResourceName -}}Decoder(d *schema.ResourceData, meta interface{}, res map[string]interface{}) (map[string]interface{}, error) { + {{ $.CustomTemplate $.CustomCode.Decoder false -}} +} +{{- end }} diff --git a/mmv1/templates/tgc_next/encoders/compute_address.go.tmpl b/mmv1/templates/tgc_next/encoders/compute_address.go.tmpl new file mode 100644 index 000000000000..34959285f3fe --- /dev/null +++ b/mmv1/templates/tgc_next/encoders/compute_address.go.tmpl @@ -0,0 +1,6 @@ +config := meta.(*transport_tpg.Config) + +obj["subnetwork"] = tgcresource.GetComputeSelfLink(config, obj["subnetwork"]) +obj["network"] = tgcresource.GetComputeSelfLink(config, obj["network"]) + +return obj, nil \ No newline at end of file diff --git a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl index 3270d0f7c0e1..e8c2565b275d 100644 --- a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl +++ b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl @@ -35,7 +35,9 @@ import ( "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/converters/cai" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tgcresource" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/verify" ) @@ -134,7 +136,14 @@ func Get{{ $.ResourceName -}}ApiObject(d tpgresource.TerraformResourceData, conf {{- end}} {{ if $.CustomCode.Encoder -}} - return resource{{ $.ResourceName -}}Encoder(d, config, obj) + obj, err = resource{{ $.ResourceName -}}Encoder(d, config, obj) + if err != nil { + return nil, err + } +{{- end }} + +{{- if $.CustomCode.TgcEncoder }} + return resource{{ $.ResourceName -}}TgcEncoder(d, config, obj) {{- else }} return obj, nil {{- end}} @@ -146,6 +155,12 @@ func resource{{ $.ResourceName -}}Encoder(d tpgresource.TerraformResourceData, m } {{- end}} +{{if $.CustomCode.TgcEncoder -}} +func resource{{ $.ResourceName -}}TgcEncoder(d tpgresource.TerraformResourceData, meta interface{}, obj map[string]interface{}) (map[string]interface{}, error) { +{{ $.CustomTemplate $.CustomCode.TgcEncoder false -}} +} +{{- end}} + {{ range $prop := $.SettableProperties }} {{- template "expandPropertyMethod" $prop -}} {{- end}} \ No newline at end of file diff --git a/mmv1/third_party/tgc_next/pkg/tgcresource/utils.go b/mmv1/third_party/tgc_next/pkg/tgcresource/utils.go new file mode 100644 index 000000000000..ba7fecf54d59 --- /dev/null +++ b/mmv1/third_party/tgc_next/pkg/tgcresource/utils.go @@ -0,0 +1,23 @@ +package tgcresource + +import ( + "fmt" + "strings" + + transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" +) + +func GetComputeSelfLink(config *transport_tpg.Config, raw interface{}) interface{} { + if raw == nil { + return nil + } + + v := raw.(string) + if v != "" && !strings.HasPrefix(v, "https://") { + if config.UniverseDomain == "" || config.UniverseDomain == "googleapis.com" { + return fmt.Sprintf("https://www.googleapis.com/compute/v1/%s", v) + } + } + + return v +} From dd00a3e19d1bfb39e63b454390ddd90c971bbd27 Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Tue, 10 Jun 2025 20:44:12 +0200 Subject: [PATCH 337/884] integrationconnectors: added support for `log_config.level` for `google_integration_connectors_connection` (#14229) --- mmv1/products/integrationconnectors/Connection.yaml | 10 ++++++++++ .../resource_integration_connectors_connection_test.go | 1 + 2 files changed, 11 insertions(+) diff --git a/mmv1/products/integrationconnectors/Connection.yaml b/mmv1/products/integrationconnectors/Connection.yaml index b156e7b33787..8e97b7bed018 100644 --- a/mmv1/products/integrationconnectors/Connection.yaml +++ b/mmv1/products/integrationconnectors/Connection.yaml @@ -569,6 +569,16 @@ properties: description: | Enabled represents whether logging is enabled or not for a connection. required: true + - name: 'level' + type: Enum + description: | + Log configuration level. + default_from_api: true + enum_values: + - 'LOG_LEVEL_UNSPECIFIED' + - 'ERROR' + - 'INFO' + - 'DEBUG' - name: 'sslConfig' type: NestedObject description: | diff --git a/mmv1/third_party/terraform/services/integrationconnectors/resource_integration_connectors_connection_test.go b/mmv1/third_party/terraform/services/integrationconnectors/resource_integration_connectors_connection_test.go index 15d96cf7c946..a52fa7aaa50e 100644 --- a/mmv1/third_party/terraform/services/integrationconnectors/resource_integration_connectors_connection_test.go +++ b/mmv1/third_party/terraform/services/integrationconnectors/resource_integration_connectors_connection_test.go @@ -219,6 +219,7 @@ resource "google_integration_connectors_connection" "zendeskconnection" { } log_config { enabled = true + level = "DEBUG" } node_config { min_node_count = 2 From e7a683e4ac3f7f29f4bfcee74fbd9b66d20b6813 Mon Sep 17 00:00:00 2001 From: Sepehr Javid <32390553+sepehrjavid@users.noreply.github.com> Date: Tue, 10 Jun 2025 21:15:37 +0200 Subject: [PATCH 338/884] add default_route_action to regional url map under path_matcher (#14081) Co-authored-by: Riley Karson --- mmv1/products/compute/RegionUrlMap.yaml | 396 +++++++++++++++++- ..._path_matcher_default_route_action.tf.tmpl | 179 ++++++++ 2 files changed, 571 insertions(+), 4 deletions(-) create mode 100644 mmv1/templates/terraform/examples/region_url_map_path_matcher_default_route_action.tf.tmpl diff --git a/mmv1/products/compute/RegionUrlMap.yaml b/mmv1/products/compute/RegionUrlMap.yaml index 5d6689b7dc0c..1ac370f5a6df 100644 --- a/mmv1/products/compute/RegionUrlMap.yaml +++ b/mmv1/products/compute/RegionUrlMap.yaml @@ -36,9 +36,9 @@ collection_url_key: 'items' custom_code: sweeper: url_substitutions: - - region: "us-central1" - - region: "europe-west1" - - region: "us-west1" + - region: 'us-central1' + - region: 'europe-west1' + - region: 'us-west1' examples: - name: 'region_url_map_basic' primary_resource_id: 'regionurlmap' @@ -120,7 +120,7 @@ examples: regional_l7_xlb_map: 'regional-l7-xlb-map' l7_xlb_proxy: 'l7-xlb-proxy' l7_xlb_forwarding_rule: 'l7-xlb-forwarding-rule' - # Similar to other samples + # Similar to other samples exclude_test: true exclude_docs: true - name: 'region_url_map_path_template_match' @@ -131,6 +131,13 @@ examples: cart_backend_service_name: 'cart-service' user_backend_service_name: 'user-service' health_check_name: 'health-check' + - name: 'region_url_map_path_matcher_default_route_action' + primary_resource_id: 'regionurlmap' + vars: + region_url_map_name: 'regionurlmap' + login_region_backend_service_name: 'login' + home_region_backend_service_name: 'home' + region_health_check_name: 'health-check' parameters: - name: 'region' type: ResourceRef @@ -244,6 +251,7 @@ properties: # exactly_one_of: # - path_matchers.0.default_service # - path_matchers.0.default_url_redirect + # - path_matchers.0.default_route_action.0.weighted_backend_services resource: 'RegionBackendService' imports: 'selfLink' - name: 'description' @@ -1460,6 +1468,7 @@ properties: # exactly_one_of: # - path_matchers.0.default_service # - path_matchers.0.default_url_redirect + # - path_matchers.0.default_route_action.0.weighted_backend_services description: | When none of the specified hostRules match, the request is redirected to a URL specified by defaultUrlRedirect. If defaultUrlRedirect is specified, defaultService or @@ -1525,6 +1534,385 @@ properties: retained. This field is required to ensure an empty block is not set. The normal default value is false. required: true + - name: 'defaultRouteAction' + type: NestedObject + # TODO: (mbang) conflicts also won't work for array path matchers yet, uncomment here once supported. + # conflicts: + # - path_matcher.path_matcher.default_url_redirect + description: | + defaultRouteAction takes effect when none of the pathRules or routeRules match. The load balancer performs + advanced routing actions like URL rewrites, header transformations, etc. prior to forwarding the request + to the selected backend. If defaultRouteAction specifies any weightedBackendServices, defaultService must not be set. + Conversely if defaultService is set, defaultRouteAction cannot contain any weightedBackendServices. + + Only one of defaultRouteAction or defaultUrlRedirect must be set. + properties: + - name: 'weightedBackendServices' + type: Array + # TODO: (mbang) won't work for array path matchers yet, uncomment here once they are supported. + # (github.com/hashicorp/terraform-plugin-sdk/issues/470) + # exactly_one_of: + # - path_matchers.0.default_service + # - path_matchers.0.default_url_redirect + # - path_matchers.0.default_route_action.0.weighted_backend_services + description: | + A list of weighted backend services to send traffic to when a route match occurs. + The weights determine the fraction of traffic that flows to their corresponding backend service. + If all traffic needs to go to a single backend service, there must be one weightedBackendService + with weight set to a non-zero number. + + Once a backendService is identified and before forwarding the request to the backend service, + advanced routing actions like Url rewrites and header transformations are applied depending on + additional settings specified in this HttpRouteAction. + item_type: + type: NestedObject + properties: + - name: 'backendService' + type: ResourceRef + description: | + The full or partial URL to the default BackendService resource. Before forwarding the + request to backendService, the loadbalancer applies any relevant headerActions + specified as part of this backendServiceWeight. + custom_expand: 'templates/terraform/custom_expand/reference_to_backend.tmpl' + resource: 'BackendService' + imports: 'selfLink' + - name: 'weight' + type: Integer + description: | + Specifies the fraction of traffic sent to backendService, computed as + weight / (sum of all weightedBackendService weights in routeAction) . + + The selection of a backend service is determined only for new traffic. Once a user's request + has been directed to a backendService, subsequent requests will be sent to the same backendService + as determined by the BackendService's session affinity policy. + + The value must be between 0 and 1000 + validation: + function: 'validation.IntBetween(0, 1000)' + - name: 'headerAction' + type: NestedObject + description: | + Specifies changes to request and response headers that need to take effect for + the selected backendService. + + headerAction specified here take effect before headerAction in the enclosing + HttpRouteRule, PathMatcher and UrlMap. + properties: + - name: 'requestHeadersToRemove' + type: Array + description: | + A list of header names for headers that need to be removed from the request prior to + forwarding the request to the backendService. + item_type: + type: String + - name: 'requestHeadersToAdd' + type: Array + description: | + Headers to add to a matching request prior to forwarding the request to the backendService. + item_type: + type: NestedObject + properties: + - name: 'headerName' + type: String + description: | + The name of the header to add. + - name: 'headerValue' + type: String + description: | + The value of the header to add. + - name: 'replace' + type: Boolean + description: | + If false, headerValue is appended to any values that already exist for the header. + If true, headerValue is set for the header, discarding any values that were set for that header. + default_value: false + - name: 'responseHeadersToRemove' + type: Array + description: | + A list of header names for headers that need to be removed from the response prior to sending the + response back to the client. + item_type: + type: String + - name: 'responseHeadersToAdd' + type: Array + description: | + Headers to add the response prior to sending the response back to the client. + item_type: + type: NestedObject + properties: + - name: 'headerName' + type: String + description: | + The name of the header to add. + - name: 'headerValue' + type: String + description: | + The value of the header to add. + - name: 'replace' + type: Boolean + description: | + If false, headerValue is appended to any values that already exist for the header. + If true, headerValue is set for the header, discarding any values that were set for that header. + default_value: false + - name: 'urlRewrite' + type: NestedObject + description: | + The spec to modify the URL of the request, prior to forwarding the request to the matched service. + properties: + - name: 'pathPrefixRewrite' + type: String + description: | + Prior to forwarding the request to the selected backend service, the matching portion of the + request's path is replaced by pathPrefixRewrite. + + The value must be between 1 and 1024 characters. + - name: 'hostRewrite' + type: String + description: | + Prior to forwarding the request to the selected service, the request's host header is replaced + with contents of hostRewrite. + + The value must be between 1 and 255 characters. + - name: 'pathTemplateRewrite' + type: string + description: | + If specified, the pattern rewrites the URL path (based on the :path header) using the HTTP template syntax. + + A corresponding pathTemplateMatch must be specified. Any template variables must exist in the pathTemplateMatch field. + + * At least one variable must be specified in the pathTemplateMatch field + * You can omit variables from the rewritten URL + * The * and ** operators cannot be matched unless they have a corresponding variable name - e.g. {format=*} or {var=**}. + + For example, a pathTemplateMatch of /static/{format=**} could be rewritten as /static/content/{format} to prefix + /content to the URL. Variables can also be re-ordered in a rewrite, so that /{country}/{format}/{suffix=**} can be + rewritten as /content/{format}/{country}/{suffix}. + + At least one non-empty routeRules[].matchRules[].path_template_match is required. + + Only one of pathPrefixRewrite or pathTemplateRewrite may be specified. + # TODO: (mbang) won't work for array path matchers yet, uncomment here once they are supported. + # (github.com/hashicorp/terraform-plugin-sdk/issues/470) + # exactly_one_of: + # - path_matchers.0.default_route_action.0.url_rewrite.path_prefix_rewrite + # - path_matchers.0.default_route_action.0.url_rewrite.path_template_rewrite + - name: 'timeout' + type: NestedObject + description: | + Specifies the timeout for the selected route. Timeout is computed from the time the request has been + fully processed (i.e. end-of-stream) up until the response has been completely processed. Timeout includes all retries. + + If not specified, will use the largest timeout among all backend services associated with the route. + default_from_api: true + properties: + - name: 'seconds' + type: String + description: | + Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive. + Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years + - name: 'nanos' + type: Integer + description: | + Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are represented + with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive. + - name: 'maxStreamDuration' + type: NestedObject + description: | + Specifies the maximum duration (timeout) for streams on the selected route. + Unlike the `Timeout` field where the timeout duration starts from the time the request + has been fully processed (known as end-of-stream), the duration in this field + is computed from the beginning of the stream until the response has been processed, + including all retries. A stream that does not complete in this duration is closed. + default_from_api: true + properties: + - name: 'nanos' + type: Integer + description: | + Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are represented + with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive. + - name: 'seconds' + type: String + description: | + Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive. + Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years + required: true + - name: 'retryPolicy' + type: NestedObject + description: | + Specifies the retry policy associated with this route. + properties: + - name: 'retryConditions' + type: Array + description: | + Specfies one or more conditions when this retry rule applies. Valid values are: + + * 5xx: Loadbalancer will attempt a retry if the backend service responds with any 5xx response code, + or if the backend service does not respond at all, example: disconnects, reset, read timeout, + * connection failure, and refused streams. + * gateway-error: Similar to 5xx, but only applies to response codes 502, 503 or 504. + * connect-failure: Loadbalancer will retry on failures connecting to backend services, + for example due to connection timeouts. + * retriable-4xx: Loadbalancer will retry for retriable 4xx response codes. + Currently the only retriable error supported is 409. + * refused-stream:Loadbalancer will retry if the backend service resets the stream with a REFUSED_STREAM error code. + This reset type indicates that it is safe to retry. + * cancelled: Loadbalancer will retry if the gRPC status code in the response header is set to cancelled + * deadline-exceeded: Loadbalancer will retry if the gRPC status code in the response header is set to deadline-exceeded + * resource-exhausted: Loadbalancer will retry if the gRPC status code in the response header is set to resource-exhausted + * unavailable: Loadbalancer will retry if the gRPC status code in the response header is set to unavailable + item_type: + type: String + - name: 'numRetries' + type: Integer + description: | + Specifies the allowed number retries. This number must be > 0. If not specified, defaults to 1. + validation: + function: 'validation.IntAtLeast(1)' + default_value: 1 + - name: 'perTryTimeout' + type: NestedObject + description: | + Specifies a non-zero timeout per retry attempt. + + If not specified, will use the timeout set in HttpRouteAction. If timeout in HttpRouteAction is not set, + will use the largest timeout among all backend services associated with the route. + properties: + - name: 'seconds' + type: String + description: | + Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive. + Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years + - name: 'nanos' + type: Integer + description: | + Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are + represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive. + - name: 'requestMirrorPolicy' + type: NestedObject + description: | + Specifies the policy on how requests intended for the route's backends are shadowed to a separate mirrored backend service. + Loadbalancer does not wait for responses from the shadow service. Prior to sending traffic to the shadow service, + the host / authority header is suffixed with -shadow. + properties: + - name: 'backendService' + type: ResourceRef + description: | + The full or partial URL to the BackendService resource being mirrored to. + required: true + custom_expand: 'templates/terraform/custom_expand/reference_to_backend.tmpl' + resource: 'BackendService' + imports: 'selfLink' + - name: 'corsPolicy' + type: NestedObject + description: | + The specification for allowing client side cross-origin requests. Please see + [W3C Recommendation for Cross Origin Resource Sharing](https://www.w3.org/TR/cors/) + properties: + - name: 'allowOrigins' + type: Array + description: | + Specifies the list of origins that will be allowed to do CORS requests. + An origin is allowed if it matches either an item in allowOrigins or an item in allowOriginRegexes. + item_type: + type: String + - name: 'allowOriginRegexes' + type: Array + description: | + Specifies the regular expression patterns that match allowed origins. For regular expression grammar + please see en.cppreference.com/w/cpp/regex/ecmascript + An origin is allowed if it matches either an item in allowOrigins or an item in allowOriginRegexes. + item_type: + type: String + - name: 'allowMethods' + type: Array + description: | + Specifies the content for the Access-Control-Allow-Methods header. + item_type: + type: String + - name: 'allowHeaders' + type: Array + description: | + Specifies the content for the Access-Control-Allow-Headers header. + item_type: + type: String + - name: 'exposeHeaders' + type: Array + description: | + Specifies the content for the Access-Control-Expose-Headers header. + item_type: + type: String + - name: 'maxAge' + type: Integer + description: | + Specifies how long results of a preflight request can be cached in seconds. + This translates to the Access-Control-Max-Age header. + - name: 'allowCredentials' + type: Boolean + description: | + In response to a preflight request, setting this to true indicates that the actual request can include user credentials. + This translates to the Access-Control-Allow-Credentials header. + default_value: false + - name: 'disabled' + type: Boolean + description: | + If true, specifies the CORS policy is disabled. The default value is false, which indicates that the CORS policy is in effect. + default_value: false + - name: 'faultInjectionPolicy' + type: NestedObject + description: | + The specification for fault injection introduced into traffic to test the resiliency of clients to backend service failure. + As part of fault injection, when clients send requests to a backend service, delays can be introduced by Loadbalancer on a + percentage of requests before sending those request to the backend service. Similarly requests from clients can be aborted + by the Loadbalancer for a percentage of requests. + + timeout and retryPolicy will be ignored by clients that are configured with a faultInjectionPolicy. + properties: + - name: 'delay' + type: NestedObject + description: | + The specification for how client requests are delayed as part of fault injection, before being sent to a backend service. + properties: + - name: 'fixedDelay' + type: NestedObject + description: | + Specifies the value of the fixed delay interval. + properties: + - name: 'seconds' + type: String + description: | + Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive. + Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years + - name: 'nanos' + type: Integer + description: | + Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are + represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive. + - name: 'percentage' + type: Double + description: | + The percentage of traffic (connections/operations/requests) on which delay will be introduced as part of fault injection. + The value must be between 0.0 and 100.0 inclusive. + validation: + function: 'validation.FloatBetween(0, 100)' + - name: 'abort' + type: NestedObject + description: | + The specification for how client requests are aborted as part of fault injection. + properties: + - name: 'httpStatus' + type: Integer + description: | + The HTTP status code used to abort the request. + The value must be between 200 and 599 inclusive. + validation: + function: 'validation.IntBetween(200, 599)' + - name: 'percentage' + type: Double + description: | + The percentage of traffic (connections/operations/requests) which will be aborted as part of fault injection. + The value must be between 0.0 and 100.0 inclusive. + validation: + function: 'validation.FloatBetween(0, 100)' - name: 'test' type: Array description: | diff --git a/mmv1/templates/terraform/examples/region_url_map_path_matcher_default_route_action.tf.tmpl b/mmv1/templates/terraform/examples/region_url_map_path_matcher_default_route_action.tf.tmpl new file mode 100644 index 000000000000..929b49bf9adf --- /dev/null +++ b/mmv1/templates/terraform/examples/region_url_map_path_matcher_default_route_action.tf.tmpl @@ -0,0 +1,179 @@ +resource "google_compute_region_url_map" "{{$.PrimaryResourceId}}" { + region = "us-central1" + + name = "{{index $.Vars "region_url_map_name"}}" + description = "a description" + default_service = google_compute_region_backend_service.home.id + + host_rule { + hosts = ["mysite.com"] + path_matcher = "allpaths" + } + + path_matcher { + name = "allpaths" + + default_route_action { + cors_policy { + disabled = false + allow_credentials = true + allow_headers = [ + "foobar" + ] + allow_methods = [ + "GET", + "POST", + ] + allow_origins = [ + "example.com" + ] + expose_headers = [ + "foobar" + ] + max_age = 60 + } + fault_injection_policy { + abort { + http_status = 500 + percentage = 0.5 + } + delay { + fixed_delay { + nanos = 500 + seconds = 0 + } + percentage = 0.5 + } + } + request_mirror_policy { + backend_service = google_compute_region_backend_service.home.id + } + retry_policy { + num_retries = 3 + per_try_timeout { + nanos = 500 + seconds = 0 + } + retry_conditions = [ + "5xx", + "gateway-error", + ] + } + timeout { + nanos = 500 + seconds = 0 + } + url_rewrite { + host_rewrite = "dev.example.com" + path_prefix_rewrite = "/v1/api/" + } + weighted_backend_services { + backend_service = google_compute_region_backend_service.home.id + header_action { + request_headers_to_add { + header_name = "foo-request-1" + header_value = "bar" + replace = true + } + request_headers_to_add { + header_name = "foo-request-2" + header_value = "bar" + replace = true + } + request_headers_to_remove = ["fizz"] + response_headers_to_add { + header_name = "foo-response-1" + header_value = "bar" + replace = true + } + response_headers_to_add { + header_name = "foo-response-2" + header_value = "bar" + replace = true + } + response_headers_to_remove = ["buzz"] + } + weight = 100 + } + weighted_backend_services { + backend_service = google_compute_region_backend_service.login.id + header_action { + request_headers_to_add { + header_name = "foo-request-1" + header_value = "bar" + replace = true + } + request_headers_to_add { + header_name = "foo-request-2" + header_value = "bar" + replace = true + } + request_headers_to_remove = ["fizz"] + response_headers_to_add { + header_name = "foo-response-1" + header_value = "bar" + replace = true + } + response_headers_to_add { + header_name = "foo-response-2" + header_value = "bar" + replace = true + } + response_headers_to_remove = ["buzz"] + } + weight = 200 + } + } + + path_rule { + paths = ["/home"] + service = google_compute_region_backend_service.home.id + } + + path_rule { + paths = ["/login"] + service = google_compute_region_backend_service.login.id + } + } + + test { + service = google_compute_region_backend_service.home.id + host = "hi.com" + path = "/home" + } +} + +resource "google_compute_region_backend_service" "login" { + region = "us-central1" + + name = "{{index $.Vars "login_region_backend_service_name"}}" + protocol = "HTTP" + load_balancing_scheme = "INTERNAL_MANAGED" + timeout_sec = 10 + + health_checks = [google_compute_region_health_check.default.id] +} + +resource "google_compute_region_backend_service" "home" { + region = "us-central1" + + name = "{{index $.Vars "home_region_backend_service_name"}}" + protocol = "HTTP" + load_balancing_scheme = "INTERNAL_MANAGED" + timeout_sec = 10 + + health_checks = [google_compute_region_health_check.default.id] +} + +resource "google_compute_region_health_check" "default" { + region = "us-central1" + + name = "{{index $.Vars "region_health_check_name"}}" + check_interval_sec = 1 + timeout_sec = 1 + http_health_check { + port = 80 + request_path = "/" + } +} + From 263e5f102aa345f4dd122597ab422477a737da30 Mon Sep 17 00:00:00 2001 From: Thomas Rodgers Date: Tue, 10 Jun 2025 12:16:03 -0700 Subject: [PATCH 339/884] Make VCR correctly handle compound tests (#13941) --- .../cmd/templates/vcr/record_replay.tmpl | 4 +- .ci/magician/cmd/test_terraform_vcr.go | 51 ++++++++++++++-- .ci/magician/vcr/tester.go | 41 ++++++++++--- .ci/magician/vcr/tester_test.go | 58 +++++++++++++++++++ 4 files changed, 138 insertions(+), 16 deletions(-) create mode 100644 .ci/magician/vcr/tester_test.go diff --git a/.ci/magician/cmd/templates/vcr/record_replay.tmpl b/.ci/magician/cmd/templates/vcr/record_replay.tmpl index 9a8b2859ac6a..d349b2bf4299 100644 --- a/.ci/magician/cmd/templates/vcr/record_replay.tmpl +++ b/.ci/magician/cmd/templates/vcr/record_replay.tmpl @@ -11,7 +11,7 @@ {{color "red" "Tests failed when rerunning REPLAYING mode:"}} {{range .ReplayingAfterRecordingResult.FailedTests -}} `{{.}}` {{/* remove trailing whitespace */ -}} - [[Error message](https://storage.cloud.google.com/{{$.LogBucket}}/{{$.Version}}/refs/heads/{{$.Head}}/artifacts/{{$.BuildID}}/build-log/replaying_build_after_recording/{{.}}_replaying_test.log)] {{/* remove trailing whitespace */ -}} + [[Error message](https://storage.cloud.google.com/{{$.LogBucket}}/{{$.Version}}/refs/heads/{{$.Head}}/artifacts/{{$.BuildID}}/build-log/replaying_build_after_recording/{{compoundTest .}}_replaying_test.log)] {{/* remove trailing whitespace */ -}} [[Debug log](https://storage.cloud.google.com/{{$.LogBucket}}/{{$.Version}}/refs/heads/{{$.Head}}/artifacts/{{$.BuildID}}/replaying_after_recording/{{.}}.log)] {{/* remove trailing whitespace */ -}} {{end}} @@ -30,7 +30,7 @@ Please fix these to complete your PR. If you believe these test failures to be i {{color "red" "Tests failed during RECORDING mode:"}} {{range .RecordingResult.FailedTests -}} `{{.}}` {{/* remove trailing whitespace */ -}} - [[Error message](https://storage.cloud.google.com/{{$.LogBucket}}/{{$.Version}}/refs/heads/{{$.Head}}/artifacts/{{$.BuildID}}/build-log/recording_build/{{.}}_recording_test.log)] {{/* remove trailing whitespace */ -}} + [[Error message](https://storage.cloud.google.com/{{$.LogBucket}}/{{$.Version}}/refs/heads/{{$.Head}}/artifacts/{{$.BuildID}}/build-log/recording_build/{{compoundTest .}}_recording_test.log)] {{/* remove trailing whitespace */ -}} [[Debug log](https://storage.cloud.google.com/{{$.LogBucket}}/{{$.Version}}/refs/heads/{{$.Head}}/artifacts/{{$.BuildID}}/recording/{{.}}.log)] {{/* remove trailing whitespace */ -}} {{end}} diff --git a/.ci/magician/cmd/test_terraform_vcr.go b/.ci/magician/cmd/test_terraform_vcr.go index 4c12b8b5755f..cca81a302e57 100644 --- a/.ci/magician/cmd/test_terraform_vcr.go +++ b/.ci/magician/cmd/test_terraform_vcr.go @@ -235,12 +235,13 @@ func execTestTerraformVCR(prNumber, mmCommitSha, buildID, projectID, buildStep, } notRunBeta, notRunGa := notRunTests(tpgRepo.UnifiedZeroDiff, tpgbRepo.UnifiedZeroDiff, replayingResult) + postReplayData := postReplay{ RunFullVCR: runFullVCR, AffectedServices: sort.StringSlice(servicesArr), NotRunBetaTests: notRunBeta, NotRunGATests: notRunGa, - ReplayingResult: replayingResult, + ReplayingResult: subtestResult(replayingResult), ReplayingErr: replayingErr, LogBucket: "ci-vcr-logs", Version: provider.Beta.String(), @@ -318,8 +319,8 @@ func execTestTerraformVCR(prNumber, mmCommitSha, buildID, projectID, buildStep, allRecordingPassed := len(recordingResult.FailedTests) == 0 && !hasTerminatedTests && recordingErr == nil recordReplayData := recordReplay{ - RecordingResult: recordingResult, - ReplayingAfterRecordingResult: replayingAfterRecordingResult, + RecordingResult: subtestResult(recordingResult), + ReplayingAfterRecordingResult: subtestResult(replayingAfterRecordingResult), RecordingErr: recordingErr, HasTerminatedTests: hasTerminatedTests, AllRecordingPassed: allRecordingPassed, @@ -386,6 +387,43 @@ func notRunTests(gaDiff, betaDiff string, result vcr.Result) ([]string, []string return notRunBeta, notRunGa } +func subtestResult(original vcr.Result) vcr.Result { + return vcr.Result{ + PassedTests: excludeCompoundTests(original.PassedTests, original.PassedSubtests), + FailedTests: excludeCompoundTests(original.FailedTests, original.FailedSubtests), + SkippedTests: excludeCompoundTests(original.SkippedTests, original.SkippedSubtests), + Panics: original.Panics, + } +} + +// Returns the name of the compound test that the given subtest belongs to. +func compoundTest(subtest string) string { + parts := strings.Split(subtest, "__") + if len(parts) != 2 { + return subtest + } + return parts[0] +} + +// Returns subtests and tests that are not compound tests. +func excludeCompoundTests(allTests, subtests []string) []string { + res := make([]string, 0, len(allTests)+len(subtests)) + compoundTests := make(map[string]struct{}, len(subtests)) + for _, subtest := range subtests { + if compound := compoundTest(subtest); compound != subtest { + compoundTests[compound] = struct{}{} + res = append(res, subtest) + } + } + for _, test := range allTests { + if _, ok := compoundTests[test]; !ok { + res = append(res, test) + } + } + sort.Strings(res) + return res +} + func modifiedPackages(changedFiles []string, version provider.Version) (map[string]struct{}, bool) { var goFiles []string for _, line := range changedFiles { @@ -468,9 +506,10 @@ func init() { func formatComment(fileName string, tmplText string, data any) (string, error) { funcs := template.FuncMap{ - "join": strings.Join, - "add": func(i, j int) int { return i + j }, - "color": color, + "join": strings.Join, + "add": func(i, j int) int { return i + j }, + "color": color, + "compoundTest": compoundTest, } tmpl, err := template.New(fileName).Funcs(funcs).Parse(tmplText) if err != nil { diff --git a/.ci/magician/vcr/tester.go b/.ci/magician/vcr/tester.go index 5b0d8975e122..54a6cf4a7839 100644 --- a/.ci/magician/vcr/tester.go +++ b/.ci/magician/vcr/tester.go @@ -13,10 +13,13 @@ import ( ) type Result struct { - PassedTests []string - SkippedTests []string - FailedTests []string - Panics []string + PassedTests []string + SkippedTests []string + FailedTests []string + PassedSubtests []string + SkippedSubtests []string + FailedSubtests []string + Panics []string } type Mode int @@ -66,6 +69,8 @@ const replayingTimeout = "240m" var testResultsExpression = regexp.MustCompile(`(?m:^--- (PASS|FAIL|SKIP): (TestAcc\w+))`) +var subtestResultsExpression = regexp.MustCompile(`(?m:^ --- (PASS|FAIL|SKIP): (TestAcc\w+)/(\w+))`) + var testPanicExpression = regexp.MustCompile(`^panic: .*`) var safeToLog = map[string]bool{ @@ -603,19 +608,39 @@ func collectResult(output string) Result { } resultSets[submatches[1]][submatches[2]] = struct{}{} } + matches = subtestResultsExpression.FindAllStringSubmatch(output, -1) + subtestResultSets := make(map[string]map[string]struct{}, 4) + for _, submatches := range matches { + if len(submatches) != 4 { + fmt.Printf("Warning: unexpected regex match found in test output: %v", submatches) + continue + } + if _, ok := subtestResultSets[submatches[1]]; !ok { + subtestResultSets[submatches[1]] = make(map[string]struct{}) + } + subtestResultSets[submatches[1]][fmt.Sprintf("%s__%s", submatches[2], submatches[3])] = struct{}{} + } results := make(map[string][]string, 4) results["PANIC"] = testPanicExpression.FindAllString(output, -1) sort.Strings(results["PANIC"]) + subtestResults := make(map[string][]string, 3) for _, kind := range []string{"FAIL", "PASS", "SKIP"} { for test := range resultSets[kind] { results[kind] = append(results[kind], test) } sort.Strings(results[kind]) + for subtest := range subtestResultSets[kind] { + subtestResults[kind] = append(subtestResults[kind], subtest) + } + sort.Strings(subtestResults[kind]) } return Result{ - FailedTests: results["FAIL"], - PassedTests: results["PASS"], - SkippedTests: results["SKIP"], - Panics: results["PANIC"], + FailedTests: results["FAIL"], + PassedTests: results["PASS"], + SkippedTests: results["SKIP"], + FailedSubtests: subtestResults["FAIL"], + PassedSubtests: subtestResults["PASS"], + SkippedSubtests: subtestResults["SKIP"], + Panics: results["PANIC"], } } diff --git a/.ci/magician/vcr/tester_test.go b/.ci/magician/vcr/tester_test.go new file mode 100644 index 000000000000..4ed29faa2c5a --- /dev/null +++ b/.ci/magician/vcr/tester_test.go @@ -0,0 +1,58 @@ +package vcr + +import ( + "testing" + + "github.com/google/go-cmp/cmp" +) + +func TestCollectResults(t *testing.T) { + for _, test := range []struct { + name string + output string + expected Result + }{ + { + name: "no compound tests", + output: `--- FAIL: TestAccServiceOneResourceOne (100.00s) +--- PASS: TestAccServiceOneResourceTwo (100.00s) +--- PASS: TestAccServiceTwoResourceOne (100.00s) +--- PASS: TestAccServiceTwoResourceTwo (100.00s) +`, + expected: Result{ + PassedTests: []string{"TestAccServiceOneResourceTwo", "TestAccServiceTwoResourceOne", "TestAccServiceTwoResourceTwo"}, + FailedTests: []string{"TestAccServiceOneResourceOne"}, + }, + }, + { + name: "compound tests", + output: `--- FAIL: TestAccServiceOneResourceOne (100.00s) +--- FAIL: TestAccServiceOneResourceTwo (100.00s) + --- PASS: TestAccServiceOneResourceTwo/test_one (100.00s) + --- FAIL: TestAccServiceOneResourceTwo/test_two (100.00s) +--- PASS: TestAccServiceTwoResourceOne (100.00s) + --- PASS: TestAccServiceTwoResourceOne/test_one (100.00s) + --- PASS: TestAccServiceTwoResourceOne/test_two (100.00s) +--- PASS: TestAccServiceTwoResourceTwo (100.00s) +`, + expected: Result{ + PassedTests: []string{ + "TestAccServiceTwoResourceOne", + "TestAccServiceTwoResourceTwo", + }, + FailedTests: []string{"TestAccServiceOneResourceOne", "TestAccServiceOneResourceTwo"}, + PassedSubtests: []string{ + "TestAccServiceOneResourceTwo__test_one", + "TestAccServiceTwoResourceOne__test_one", + "TestAccServiceTwoResourceOne__test_two", + }, + FailedSubtests: []string{"TestAccServiceOneResourceTwo__test_two"}, + }, + }, + } { + if diff := cmp.Diff(test.expected, collectResult(test.output)); diff != "" { + t.Errorf("collectResult(%q) got unexpected diff (-want +got):\n%s", test.output, diff) + } + } + +} From b4be65a8ee9eba1e2a53e3d7d140f669be0a5a4a Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Tue, 10 Jun 2025 14:33:43 -0700 Subject: [PATCH 340/884] disk sweeper pagination (#14231) --- .../compute/resource_compute_disk_sweeper.go | 108 ++++++++++-------- 1 file changed, 59 insertions(+), 49 deletions(-) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_disk_sweeper.go b/mmv1/third_party/terraform/services/compute/resource_compute_disk_sweeper.go index 15971a309b88..838b0010bf8a 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_disk_sweeper.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_disk_sweeper.go @@ -33,64 +33,74 @@ func testSweepDisk(region string) error { zones := []string{"us-central1-a", "us-central1-b", "us-central1-c", "us-central1-f", "us-east1-b", "us-east1-c", "us-east1-d", "us-west1-a", "us-west1-b", "us-west1-c"} for _, zone := range zones { servicesUrl := "https://compute.googleapis.com/compute/v1/projects/" + config.Project + "/zones/" + zone + "/disks" - res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - Project: config.Project, - RawURL: servicesUrl, - UserAgent: config.UserAgent, - }) - if err != nil { - log.Printf("[INFO][SWEEPER_LOG] Error in response from request %s: %s", servicesUrl, err) - return nil - } - - resourceList, ok := res["items"] - if !ok { - log.Printf("[INFO][SWEEPER_LOG] Nothing found in response.") - return nil - } + // Page zero's URL is the raw list URL. Successive pages will return the token for the next page. + pageUrl := servicesUrl + for { - rl := resourceList.([]interface{}) - - log.Printf("[INFO][SWEEPER_LOG] Found %d items in %s list response.", len(rl), resourceName) - // Count items that weren't sweeped. - nonPrefixCount := 0 - for _, ri := range rl { - obj := ri.(map[string]interface{}) - if obj["id"] == nil { - log.Printf("[INFO][SWEEPER_LOG] %s resource id was nil", resourceName) + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: config.Project, + RawURL: pageUrl, + UserAgent: config.UserAgent, + }) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] Error in response from request %s: %s", pageUrl, err) return nil } - id := obj["name"].(string) - // Increment count and skip if resource is not sweepable. - prefixes := []string{ - "pvc-", // b/291168201 + resourceList, ok := res["items"] + if !ok { + log.Printf("[INFO][SWEEPER_LOG] Nothing found in response.") + return nil } - if !sweeper.IsSweepableTestResource(id) && !sweeper.HasAnyPrefix(id, prefixes) { - nonPrefixCount++ - continue + + rl := resourceList.([]interface{}) + + log.Printf("[INFO][SWEEPER_LOG] Found %d items in %s list response.", len(rl), resourceName) + // Count items that weren't sweeped. + nonPrefixCount := 0 + for _, ri := range rl { + obj := ri.(map[string]interface{}) + if obj["id"] == nil { + log.Printf("[INFO][SWEEPER_LOG] %s resource id was nil", resourceName) + return nil + } + + id := obj["name"].(string) + // Increment count and skip if resource is not sweepable. + prefixes := []string{ + "pvc-", // b/291168201 + } + if !sweeper.IsSweepableTestResource(id) && !sweeper.HasAnyPrefix(id, prefixes) { + nonPrefixCount++ + continue + } + + deleteUrl := servicesUrl + "/" + id + // Don't wait on operations as we may have a lot to delete + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "DELETE", + Project: config.Project, + RawURL: deleteUrl, + UserAgent: config.UserAgent, + }) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] Error deleting for url %s : %s", deleteUrl, err) + } else { + log.Printf("[INFO][SWEEPER_LOG] Sent delete request for %s resource: %s", resourceName, id) + } } - deleteUrl := servicesUrl + "/" + id - // Don't wait on operations as we may have a lot to delete - _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "DELETE", - Project: config.Project, - RawURL: deleteUrl, - UserAgent: config.UserAgent, - }) - if err != nil { - log.Printf("[INFO][SWEEPER_LOG] Error deleting for url %s : %s", deleteUrl, err) - } else { - log.Printf("[INFO][SWEEPER_LOG] Sent delete request for %s resource: %s", resourceName, id) + if nonPrefixCount > 0 { + log.Printf("[INFO][SWEEPER_LOG] %d items without tf-test prefix remain for zone %s", nonPrefixCount, zone) } - } - if nonPrefixCount > 0 { - log.Printf("[INFO][SWEEPER_LOG] %d items without tf-test prefix remain for zone %s", nonPrefixCount, zone) + if res["nextPageToken"] == nil || res["nextPageToken"].(string) == "" { + break + } + pageUrl, err = transport_tpg.AddQueryParams(servicesUrl, map[string]string{"pageToken": res["nextPageToken"].(string)}) } } From 993035ebad64800af297ecf1abb43a480b881de8 Mon Sep 17 00:00:00 2001 From: Cezary Sobczak <57288981+Cezarus27@users.noreply.github.com> Date: Wed, 11 Jun 2025 00:23:18 +0200 Subject: [PATCH 341/884] Promote workloadPolicy and groupPlacementPolicy.gpuTopology to GA (#14194) Signed-off-by: Cezary Sobczak --- mmv1/products/compute/ResourcePolicy.yaml | 6 ------ .../resource_policy_placement_policy_gpu_topology.tf.tmpl | 1 - .../examples/resource_policy_workload_policy.tf.tmpl | 1 - ...urce_policy_workload_policy_accelerator_topology.tf.tmpl | 1 - ...rce_policy_workload_policy_max_topology_distance.tf.tmpl | 1 - 5 files changed, 10 deletions(-) diff --git a/mmv1/products/compute/ResourcePolicy.yaml b/mmv1/products/compute/ResourcePolicy.yaml index c4b0f1c71199..9b3f3a15e35b 100644 --- a/mmv1/products/compute/ResourcePolicy.yaml +++ b/mmv1/products/compute/ResourcePolicy.yaml @@ -81,22 +81,18 @@ examples: name: 'gce-policy' - name: 'resource_policy_workload_policy' primary_resource_id: 'bar' - min_version: 'beta' vars: name: 'gce-policy' - name: 'resource_policy_workload_policy_accelerator_topology' primary_resource_id: 'bar' - min_version: 'beta' vars: name: 'gce-policy' - name: 'resource_policy_workload_policy_max_topology_distance' primary_resource_id: 'bar' - min_version: 'beta' vars: name: 'gce-policy' - name: 'resource_policy_placement_policy_gpu_topology' primary_resource_id: 'baz' - min_version: 'beta' vars: name: 'gce-policy' - name: 'resource_policy_placement_policy_tpu_topology' @@ -336,7 +332,6 @@ properties: conflicts: - group_placement_policy.0.max_distance immutable: true - min_version: 'beta' - name: 'tpuTopology' type: String description: | @@ -414,7 +409,6 @@ properties: type: NestedObject description: | Represents the workload policy. - min_version: 'beta' properties: - name: 'type' type: Enum diff --git a/mmv1/templates/terraform/examples/resource_policy_placement_policy_gpu_topology.tf.tmpl b/mmv1/templates/terraform/examples/resource_policy_placement_policy_gpu_topology.tf.tmpl index fc3eec4128d6..b15e1650b5a4 100644 --- a/mmv1/templates/terraform/examples/resource_policy_placement_policy_gpu_topology.tf.tmpl +++ b/mmv1/templates/terraform/examples/resource_policy_placement_policy_gpu_topology.tf.tmpl @@ -1,5 +1,4 @@ resource "google_compute_resource_policy" "baz" { - provider = google-beta name = "{{index $.Vars "name"}}" region = "europe-west9" group_placement_policy { diff --git a/mmv1/templates/terraform/examples/resource_policy_workload_policy.tf.tmpl b/mmv1/templates/terraform/examples/resource_policy_workload_policy.tf.tmpl index 67c9d0e625a3..fbd4277cf8e6 100644 --- a/mmv1/templates/terraform/examples/resource_policy_workload_policy.tf.tmpl +++ b/mmv1/templates/terraform/examples/resource_policy_workload_policy.tf.tmpl @@ -1,7 +1,6 @@ resource "google_compute_resource_policy" "bar" { name = "{{index $.Vars "name"}}" region = "europe-west1" - provider = google-beta workload_policy { type = "HIGH_AVAILABILITY" } diff --git a/mmv1/templates/terraform/examples/resource_policy_workload_policy_accelerator_topology.tf.tmpl b/mmv1/templates/terraform/examples/resource_policy_workload_policy_accelerator_topology.tf.tmpl index 4daf4af43dfb..9ebf95a8ee17 100644 --- a/mmv1/templates/terraform/examples/resource_policy_workload_policy_accelerator_topology.tf.tmpl +++ b/mmv1/templates/terraform/examples/resource_policy_workload_policy_accelerator_topology.tf.tmpl @@ -1,7 +1,6 @@ resource "google_compute_resource_policy" "bar" { name = "{{index $.Vars "name"}}" region = "europe-west1" - provider = google-beta workload_policy { type = "HIGH_THROUGHPUT" accelerator_topology = "SOME NEW TOPOLOGY" diff --git a/mmv1/templates/terraform/examples/resource_policy_workload_policy_max_topology_distance.tf.tmpl b/mmv1/templates/terraform/examples/resource_policy_workload_policy_max_topology_distance.tf.tmpl index 560dd2aecf12..d44328781fa7 100644 --- a/mmv1/templates/terraform/examples/resource_policy_workload_policy_max_topology_distance.tf.tmpl +++ b/mmv1/templates/terraform/examples/resource_policy_workload_policy_max_topology_distance.tf.tmpl @@ -1,7 +1,6 @@ resource "google_compute_resource_policy" "bar" { name = "{{index $.Vars "name"}}" region = "europe-west1" - provider = google-beta workload_policy { type = "HIGH_THROUGHPUT" max_topology_distance = "BLOCK" From b3bac9fd0e0f3325f68e7b5fa9d528ac579cd77a Mon Sep 17 00:00:00 2001 From: Ryan Oaks Date: Tue, 10 Jun 2025 19:12:36 -0400 Subject: [PATCH 342/884] Fix runner to surface generic errors (#14214) Co-authored-by: Thomas Rodgers --- .ci/magician/exec/runner.go | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.ci/magician/exec/runner.go b/.ci/magician/exec/runner.go index 2de978885e98..dd79898d46f0 100644 --- a/.ci/magician/exec/runner.go +++ b/.ci/magician/exec/runner.go @@ -131,6 +131,9 @@ func (ar *Runner) Run(name string, args []string, env map[string]string) (string return "", fmt.Errorf("path error running %s: %v", name, typedErr) } + if err != nil { + return "", fmt.Errorf("error running %q: %v", name, err) + } return string(out), nil } From fe9d3179b0736a68e32b2324033a7c6dbb70af0a Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Tue, 10 Jun 2025 17:09:44 -0700 Subject: [PATCH 343/884] remove ignoredPorjectServices in google_project_service (#14234) --- .../resourcemanager/resource_google_project.go | 17 +++++++---------- .../resource_google_project_service.go.tmpl | 7 +------ ...urce_google_project_service_internal_test.go | 4 ---- 3 files changed, 8 insertions(+), 20 deletions(-) diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project.go index 44bbc733b22b..a7cce1993af7 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project.go @@ -810,16 +810,13 @@ func ListCurrentlyEnabledServices(project, billingProject, userAgent string, con // services are returned as "projects/{{project}}/services/{{name}}" name := tpgresource.GetResourceNameFromSelfLink(v.Name) - // if name not in ignoredProjectServicesSet - if _, ok := ignoredProjectServicesSet[name]; !ok { - apiServices[name] = struct{}{} - - // if a service has been renamed, set both. We'll deal - // with setting the right values later. - if v, ok := renamedServicesByOldAndNewServiceNames[name]; ok { - log.Printf("[DEBUG] Adding service alias for %s to enabled services: %s", name, v) - apiServices[v] = struct{}{} - } + apiServices[name] = struct{}{} + + // if a service has been renamed, set both. We'll deal + // with setting the right values later. + if v, ok := renamedServicesByOldAndNewServiceNames[name]; ok { + log.Printf("[DEBUG] Adding service alias for %s to enabled services: %s", name, v) + apiServices[v] = struct{}{} } } return nil diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service.go.tmpl b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service.go.tmpl index ff1fd2a8fb7e..bf174baf2c47 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service.go.tmpl +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service.go.tmpl @@ -21,11 +21,6 @@ import ( "google.golang.org/api/serviceusage/v1" ) -// These services can only be enabled as a side-effect of enabling other services, -// so don't bother storing them in the config or using them for diffing. -var ignoredProjectServices = []string{"dataproc-control.googleapis.com", "source.googleapis.com", "stackdriverprovisioning.googleapis.com"} -var ignoredProjectServicesSet = tpgresource.GolangSetFromStringSlice(ignoredProjectServices) - // Services that can't be user-specified but are otherwise valid. Renamed // services should be added to this set during major releases. var bannedProjectServices = []string{"bigquery-json.googleapis.com"} @@ -65,7 +60,7 @@ var renamedServicesByOldAndNewServiceNames = tpgresource.MergeStringMaps(Renamed const maxServiceUsageBatchSize = 20 func validateProjectServiceService(val interface{}, key string) (warns []string, errs []error) { - bannedServicesFunc := verify.StringNotInSlice(append(ignoredProjectServices, bannedProjectServices...), false) + bannedServicesFunc := verify.StringNotInSlice(bannedProjectServices, false) warns, errs = bannedServicesFunc(val, key) if len(errs) > 0 { return diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service_internal_test.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service_internal_test.go index 3e6a67969342..72db52c3aa8c 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service_internal_test.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service_internal_test.go @@ -9,10 +9,6 @@ func TestProjectServiceServiceValidateFunc(t *testing.T) { val interface{} ExpectValidationError bool }{ - "ignoredProjectService": { - val: "dataproc-control.googleapis.com", - ExpectValidationError: true, - }, "bannedProjectService": { val: "bigquery-json.googleapis.com", ExpectValidationError: true, From 7a2c1467b72a27b3b9ad53ee369ef56ec6d35f42 Mon Sep 17 00:00:00 2001 From: Dawid212 Date: Wed, 11 Jun 2025 04:07:33 +0200 Subject: [PATCH 344/884] Added Fingerprint to TargetHttpProxy and TartgetHttpsProxy (#14193) --- mmv1/products/compute/TargetHttpProxy.yaml | 16 ++++++ mmv1/products/compute/TargetHttpsProxy.yaml | 17 ++++++ .../target_http_proxy_fingerprint.tf.tmpl | 45 ++++++++++++++++ .../target_https_proxy_fingerprint.tf.tmpl | 54 +++++++++++++++++++ 4 files changed, 132 insertions(+) create mode 100644 mmv1/templates/terraform/examples/target_http_proxy_fingerprint.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/target_https_proxy_fingerprint.tf.tmpl diff --git a/mmv1/products/compute/TargetHttpProxy.yaml b/mmv1/products/compute/TargetHttpProxy.yaml index cd1ff2f7d3dc..d221a310f271 100644 --- a/mmv1/products/compute/TargetHttpProxy.yaml +++ b/mmv1/products/compute/TargetHttpProxy.yaml @@ -58,6 +58,13 @@ examples: vars: target_http_proxy_name: 'test-https-redirect-proxy' url_map_name: 'url-map' + - name: 'target_http_proxy_fingerprint' + primary_resource_id: 'default' + vars: + target_http_proxy_name: 'test-fingerprint-proxy' + url_map_name: 'url-map' + backend_service_name: 'backend-service' + http_health_check_name: 'http-health-check' parameters: properties: - name: 'creationTimestamp' @@ -112,3 +119,12 @@ properties: value is 600 seconds, the minimum allowed value is 5 seconds, and the maximum allowed value is 600 seconds. For Global external HTTP(S) load balancer (classic), this option is not available publicly. + - name: 'fingerprint' + type: Fingerprint + description: | + Fingerprint of this resource. A hash of the contents stored in this object. This field is used in optimistic locking. + This field will be ignored when inserting a TargetHttpProxy. An up-to-date fingerprint must be provided in order to + patch/update the TargetHttpProxy; otherwise, the request will fail with error 412 conditionNotMet. + To see the latest fingerprint, make a get() request to retrieve the TargetHttpProxy. + A base64-encoded string. + output: true diff --git a/mmv1/products/compute/TargetHttpsProxy.yaml b/mmv1/products/compute/TargetHttpsProxy.yaml index a9082e23415b..f0eed46422ae 100644 --- a/mmv1/products/compute/TargetHttpsProxy.yaml +++ b/mmv1/products/compute/TargetHttpsProxy.yaml @@ -79,6 +79,14 @@ examples: certificate_manager_certificate_name: 'my-certificate' url_map_name: 'url-map' backend_service_name: 'backend-service' + - name: 'target_https_proxy_fingerprint' + primary_resource_id: 'default' + vars: + target_https_proxy_name: 'test-fingerprint-proxy' + ssl_certificate_name: 'my-certificate' + url_map_name: 'url-map' + backend_service_name: 'backend-service' + http_health_check_name: 'http-health-check' parameters: properties: - name: 'creationTimestamp' @@ -238,3 +246,12 @@ properties: fingerprint_name: 'fingerprint' resource: 'ServerTlsPolicy' imports: 'selfLink' + - name: 'fingerprint' + type: Fingerprint + description: | + Fingerprint of this resource. A hash of the contents stored in this object. This field is used in optimistic locking. + This field will be ignored when inserting a TargetHttpsProxy. An up-to-date fingerprint must be provided in order to + patch the TargetHttpsProxy; otherwise, the request will fail with error 412 conditionNotMet. + To see the latest fingerprint, make a get() request to retrieve the TargetHttpsProxy. + A base64-encoded string. + output: true diff --git a/mmv1/templates/terraform/examples/target_http_proxy_fingerprint.tf.tmpl b/mmv1/templates/terraform/examples/target_http_proxy_fingerprint.tf.tmpl new file mode 100644 index 000000000000..839a65428c4b --- /dev/null +++ b/mmv1/templates/terraform/examples/target_http_proxy_fingerprint.tf.tmpl @@ -0,0 +1,45 @@ +resource "google_compute_target_http_proxy" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "target_http_proxy_name"}}" + url_map = google_compute_url_map.default.id +} + +resource "google_compute_url_map" "default" { + name = "{{index $.Vars "url_map_name"}}" + default_service = google_compute_backend_service.default.id + + host_rule { + hosts = ["mysite.com"] + path_matcher = "allpaths" + } + + path_matcher { + name = "allpaths" + default_service = google_compute_backend_service.default.id + + path_rule { + paths = ["/*"] + service = google_compute_backend_service.default.id + } + } +} + +resource "google_compute_backend_service" "default" { + name = "{{index $.Vars "backend_service_name"}}" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + + health_checks = [google_compute_http_health_check.default.id] +} + +resource "google_compute_http_health_check" "default" { + name = "{{index $.Vars "http_health_check_name"}}" + request_path = "/" + check_interval_sec = 1 + timeout_sec = 1 +} + +output "target_http_proxy_fingerprint" { + value = google_compute_target_http_proxy.{{$.PrimaryResourceId}}.fingerprint + description = "The fingerprint of the target HTTP proxy for optimistic locking" +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/target_https_proxy_fingerprint.tf.tmpl b/mmv1/templates/terraform/examples/target_https_proxy_fingerprint.tf.tmpl new file mode 100644 index 000000000000..1dadef6bce5a --- /dev/null +++ b/mmv1/templates/terraform/examples/target_https_proxy_fingerprint.tf.tmpl @@ -0,0 +1,54 @@ +resource "google_compute_target_https_proxy" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "target_https_proxy_name"}}" + url_map = google_compute_url_map.default.id + ssl_certificates = [google_compute_ssl_certificate.default.id] +} + +resource "google_compute_ssl_certificate" "default" { + name = "{{index $.Vars "ssl_certificate_name"}}" + private_key = file("path/to/private.key") + certificate = file("path/to/certificate.crt") +} + +resource "google_compute_url_map" "default" { + name = "{{index $.Vars "url_map_name"}}" + description = "a description" + + default_service = google_compute_backend_service.default.id + + host_rule { + hosts = ["mysite.com"] + path_matcher = "allpaths" + } + + path_matcher { + name = "allpaths" + default_service = google_compute_backend_service.default.id + + path_rule { + paths = ["/*"] + service = google_compute_backend_service.default.id + } + } +} + +resource "google_compute_backend_service" "default" { + name = "{{index $.Vars "backend_service_name"}}" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + + health_checks = [google_compute_http_health_check.default.id] +} + +resource "google_compute_http_health_check" "default" { + name = "{{index $.Vars "http_health_check_name"}}" + request_path = "/" + check_interval_sec = 1 + timeout_sec = 1 +} + +output "target_https_proxy_fingerprint" { + value = google_compute_target_https_proxy.{{$.PrimaryResourceId}}.fingerprint + description = "The fingerprint of the target HTTPS proxy for optimistic locking" +} \ No newline at end of file From 25c43e934d64c4f930ba681230941045b1bb3692 Mon Sep 17 00:00:00 2001 From: Niharika <35183015+niharika-98@users.noreply.github.com> Date: Thu, 12 Jun 2025 01:56:57 +0530 Subject: [PATCH 345/884] Enable update bpa support on BPA resource (#14102) --- .../backupdr/BackupPlanAssociation.yaml | 4 +- ..._backup_dr_backup_plan_association_test.go | 248 ++++++++++++++++++ 2 files changed, 251 insertions(+), 1 deletion(-) create mode 100644 mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_plan_association_test.go diff --git a/mmv1/products/backupdr/BackupPlanAssociation.yaml b/mmv1/products/backupdr/BackupPlanAssociation.yaml index 8885ad232ea7..edb94dd3cfdd 100644 --- a/mmv1/products/backupdr/BackupPlanAssociation.yaml +++ b/mmv1/products/backupdr/BackupPlanAssociation.yaml @@ -15,7 +15,8 @@ name: 'BackupPlanAssociation' base_url: projects/{{project}}/locations/{{location}}/backupPlanAssociations create_url: projects/{{project}}/locations/{{location}}/backupPlanAssociations/?backup_plan_association_id={{backup_plan_association_id}} self_link: projects/{{project}}/locations/{{location}}/backupPlanAssociations/{{backup_plan_association_id}} -immutable: true +update_verb: 'PATCH' +update_mask: true delete_url: projects/{{project}}/locations/{{location}}/backupPlanAssociations/{{backup_plan_association_id}} description: A Backup and DR BackupPlanAssociation. import_format: @@ -27,6 +28,7 @@ references: autogen_async: true timeouts: insert_minutes: 60 + update_minutes: 20 delete_minutes: 60 examples: - name: 'backup_dr_bpa' diff --git a/mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_plan_association_test.go b/mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_plan_association_test.go new file mode 100644 index 000000000000..528d77f38f0b --- /dev/null +++ b/mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_plan_association_test.go @@ -0,0 +1,248 @@ +package backupdr_test + +import ( + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + "testing" + "time" +) + +func TestAccBackupDRBackupPlanAssociation_fullUpdate(t *testing.T) { + // Uses time.Now + acctest.SkipIfVcr(t) + + t.Parallel() + + timeNow := time.Now().UTC() + referenceTime := time.Date(timeNow.Year(), timeNow.Month(), timeNow.Day(), 0, 0, 0, 0, time.UTC) + + context := map[string]interface{}{ + "project": envvar.GetTestProjectFromEnv(), + "effective_time": referenceTime.Add(24 * time.Hour).Format(time.RFC3339), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccBackupDRBackupPlanAssociation_fullCreate(context), + }, + { + ResourceName: "google_backup_dr_backup_plan_association.bpa", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"resource"}, + }, + { + Config: testAccBackupDRBackupPlanAssociation_fullUpdate(context), + }, + { + ResourceName: "google_backup_dr_backup_plan_association.bpa", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"resource"}, + }, + }, + }) +} + +func testAccBackupDRBackupPlanAssociation_fullCreate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_service_account" "default" { + account_id = "tf-test-my-custom-%{random_suffix}" + display_name = "Custom SA for VM Instance" +} + +resource "google_compute_instance" "default" { + name = "tf-test-compute-instance-%{random_suffix}" + machine_type = "n2-standard-2" + zone = "us-central1-a" + tags = ["foo", "bar"] + boot_disk { + initialize_params { + image = "debian-cloud/debian-11" + labels = { + my_label = "value" + } + } + } + // Local SSD disk + scratch_disk { + interface = "NVME" + } + network_interface { + network = "default" + access_config { + // Ephemeral public IP + } + } + service_account { + # Google recommends custom service accounts that have cloud-platform scope and permissions granted via IAM Roles. + email = google_service_account.default.email + scopes = ["cloud-platform"] + } +} +resource "google_backup_dr_backup_vault" "my-backup-vault" { + location ="us-central1" + backup_vault_id = "tf-test-bv-%{random_suffix}" + description = "This is a second backup vault built by Terraform." + backup_minimum_enforced_retention_duration = "100000s" + labels = { + foo = "bar1" + bar = "baz1" + } + annotations = { + annotations1 = "bar1" + annotations2 = "baz1" + } + force_update = "true" + force_delete = "true" + allow_missing = "true" +} + +resource "google_backup_dr_backup_plan" "foo" { + location = "us-central1" + backup_plan_id = "tf-test-bp-test-%{random_suffix}" + resource_type = "compute.googleapis.com/Instance" + backup_vault = google_backup_dr_backup_vault.my-backup-vault.name + + backup_rules { + rule_id = "rule-1" + backup_retention_days = 2 + + standard_schedule { + recurrence_type = "HOURLY" + hourly_frequency = 6 + time_zone = "UTC" + + backup_window { + start_hour_of_day = 0 + end_hour_of_day = 6 + } + } + } +} + +resource "google_backup_dr_backup_plan_association" "bpa" { + location = "us-central1" + backup_plan_association_id = "tf-test-bpa-test-%{random_suffix}" + resource = google_compute_instance.default.id + resource_type= "compute.googleapis.com/Instance" + backup_plan = google_backup_dr_backup_plan.foo.name +} +`, context) +} + +func testAccBackupDRBackupPlanAssociation_fullUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_service_account" "default" { + account_id = "tf-test-my-custom-%{random_suffix}" + display_name = "Custom SA for VM Instance" +} + +resource "google_compute_instance" "default" { + name = "tf-test-compute-instance-%{random_suffix}" + machine_type = "n2-standard-2" + zone = "us-central1-a" + tags = ["foo", "bar"] + boot_disk { + initialize_params { + image = "debian-cloud/debian-11" + labels = { + my_label = "value" + } + } + } + // Local SSD disk + scratch_disk { + interface = "NVME" + } + network_interface { + network = "default" + access_config { + // Ephemeral public IP + } + } + service_account { + # Google recommends custom service accounts that have cloud-platform scope and permissions granted via IAM Roles. + email = google_service_account.default.email + scopes = ["cloud-platform"] + } +} + +resource "google_backup_dr_backup_vault" "my-backup-vault" { + location ="us-central1" + backup_vault_id = "tf-test-bv-%{random_suffix}" + description = "This is a second backup vault built by Terraform." + backup_minimum_enforced_retention_duration = "100000s" + labels = { + foo = "bar1" + bar = "baz1" + } + annotations = { + annotations1 = "bar1" + annotations2 = "baz1" + } + force_update = "true" + force_delete = "true" + allow_missing = "true" +} + +resource "google_backup_dr_backup_plan" "updated-bp" { + location = "us-central1" + backup_plan_id = "tf-test-bp-test-1-%{random_suffix}" + resource_type = "compute.googleapis.com/Instance" + backup_vault = google_backup_dr_backup_vault.my-backup-vault.name + + backup_rules { + rule_id = "rule-1" + backup_retention_days = 4 + + standard_schedule { + recurrence_type = "HOURLY" + hourly_frequency = 10 + time_zone = "UTC" + + backup_window { + start_hour_of_day = 0 + end_hour_of_day = 6 + } + } + } +} + +resource "google_backup_dr_backup_plan" "foo" { + location = "us-central1" + backup_plan_id = "tf-test-bp-test-%{random_suffix}" + resource_type = "compute.googleapis.com/Instance" + backup_vault = google_backup_dr_backup_vault.my-backup-vault.name + + backup_rules { + rule_id = "rule-1" + backup_retention_days = 2 + + standard_schedule { + recurrence_type = "HOURLY" + hourly_frequency = 6 + time_zone = "UTC" + + backup_window { + start_hour_of_day = 0 + end_hour_of_day = 6 + } + } + } +} + +resource "google_backup_dr_backup_plan_association" "bpa" { + location = "us-central1" + backup_plan_association_id = "tf-test-bpa-test-%{random_suffix}" + resource = google_compute_instance.default.id + resource_type= "compute.googleapis.com/Instance" + backup_plan = google_backup_dr_backup_plan.updated-bp.name +} +`, context) +} From d3f837c62de3f2a34752288500576edf6fc6de9f Mon Sep 17 00:00:00 2001 From: Arnav Dham Date: Thu, 12 Jun 2025 02:11:23 +0530 Subject: [PATCH 346/884] Adding support for log_linked_dataset_query_user_email attribute for BigQuery Analytics Hub listings. (#14225) --- .../bigqueryanalyticshub/Listing.yaml | 14 ++++++++++ ...ting_log_linked_dataset_query_user.tf.tmpl | 26 +++++++++++++++++++ 2 files changed, 40 insertions(+) create mode 100644 mmv1/templates/terraform/examples/bigquery_analyticshub_listing_log_linked_dataset_query_user.tf.tmpl diff --git a/mmv1/products/bigqueryanalyticshub/Listing.yaml b/mmv1/products/bigqueryanalyticshub/Listing.yaml index a65d420ca9bf..29392b676f6e 100644 --- a/mmv1/products/bigqueryanalyticshub/Listing.yaml +++ b/mmv1/products/bigqueryanalyticshub/Listing.yaml @@ -67,6 +67,15 @@ examples: data_exchange_id: 'dcr_data_exchange' listing_id: 'dcr_listing' desc: 'example dcr data exchange' + - name: 'bigquery_analyticshub_listing_log_linked_dataset_query_user' + primary_resource_id: 'listing' + primary_resource_name: 'fmt.Sprintf("tf_test_log_email_de%s", context["random_suffix"]),fmt.Sprintf("tf_test_log_email_listing%s", context["random_suffix"])' + region_override: 'US' + vars: + data_exchange_id: 'tf_test_log_email_de' + listing_id: 'tf_test_log_email_listing' + dataset_id: 'tf_test_log_email_ds' + description: 'Example for log email test' parameters: properties: - name: 'name' @@ -194,3 +203,8 @@ properties: type: Boolean description: If true, restrict export of query result derived from restricted linked dataset table. + - name: 'logLinkedDatasetQueryUserEmail' + type: Boolean + description: + If true, subscriber email logging is enabled and all queries on the linked dataset will log the email address of the querying user. + immutable: true diff --git a/mmv1/templates/terraform/examples/bigquery_analyticshub_listing_log_linked_dataset_query_user.tf.tmpl b/mmv1/templates/terraform/examples/bigquery_analyticshub_listing_log_linked_dataset_query_user.tf.tmpl new file mode 100644 index 000000000000..8eec9b5459ef --- /dev/null +++ b/mmv1/templates/terraform/examples/bigquery_analyticshub_listing_log_linked_dataset_query_user.tf.tmpl @@ -0,0 +1,26 @@ +resource "google_bigquery_analytics_hub_data_exchange" "{{$.PrimaryResourceId}}_log_email" { + location = "US" + data_exchange_id = "{{index $.Vars "data_exchange_id"}}" + display_name = "{{index $.Vars "data_exchange_id"}}" + description = "{{index $.Vars "description"}}" +} + +resource "google_bigquery_analytics_hub_listing" "{{$.PrimaryResourceId}}" { + location = "US" + data_exchange_id = google_bigquery_analytics_hub_data_exchange.{{$.PrimaryResourceId}}_log_email.data_exchange_id + listing_id = "{{index $.Vars "listing_id"}}" + display_name = "{{index $.Vars "listing_id"}}" + description = "{{index $.Vars "description"}}" + log_linked_dataset_query_user_email = true + + bigquery_dataset { + dataset = google_bigquery_dataset.{{$.PrimaryResourceId}}_log_email.id + } +} + +resource "google_bigquery_dataset" "{{$.PrimaryResourceId}}_log_email" { + dataset_id = "{{index $.Vars "dataset_id"}}" + friendly_name = "{{index $.Vars "dataset_id"}}" + description = "{{index $.Vars "description"}}" + location = "US" +} \ No newline at end of file From 6b30c5e9d5e4b3cbfb0201f6b427ca33a6e9262c Mon Sep 17 00:00:00 2001 From: Matheus Guilherme Souza Aleixo <82680416+matheusaleixo-cit@users.noreply.github.com> Date: Wed, 11 Jun 2025 18:06:26 -0300 Subject: [PATCH 347/884] Fixed ServiceAttachment target_service region/location inconsistency when referencing a Secure Web Proxy on creation (#13862) --- .../terraform/tpgresource/field_helpers.go | 22 +++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/mmv1/third_party/terraform/tpgresource/field_helpers.go b/mmv1/third_party/terraform/tpgresource/field_helpers.go index 4a3c34aa864b..2fa1add6b666 100644 --- a/mmv1/third_party/terraform/tpgresource/field_helpers.go +++ b/mmv1/third_party/terraform/tpgresource/field_helpers.go @@ -17,6 +17,8 @@ const ( RegionalLinkTemplate = "projects/%s/regions/%s/%s/%s" RegionalLinkBasePattern = "projects/(.+)/regions/(.+)/%s/(.+)" RegionalPartialLinkBasePattern = "regions/(.+)/%s/(.+)" + LocalLinkBasePattern = "projects/(.+)/locations/(.+)/%s/(.+)" + LocalPartialLinkBasePattern = "locations/(.+)/%s/(.+)" ProjectLinkTemplate = "projects/%s/%s/%s" ProjectBasePattern = "projects/(.+)/%s/(.+)" OrganizationLinkTemplate = "organizations/%s/%s/%s" @@ -414,6 +416,16 @@ func ParseRegionalFieldValue(resourceType, fieldValue, projectSchemaField, regio }, nil } + r = regexp.MustCompile(fmt.Sprintf(LocalLinkBasePattern, resourceType)) + if parts := r.FindStringSubmatch(fieldValue); parts != nil { + return &RegionalFieldValue{ + Project: parts[1], + Region: parts[2], + Name: parts[3], + resourceType: resourceType, + }, nil + } + project, err := GetProjectFromSchema(projectSchemaField, d, config) if err != nil { return nil, err @@ -429,6 +441,16 @@ func ParseRegionalFieldValue(resourceType, fieldValue, projectSchemaField, regio }, nil } + r = regexp.MustCompile(fmt.Sprintf(LocalPartialLinkBasePattern, resourceType)) + if parts := r.FindStringSubmatch(fieldValue); parts != nil { + return &RegionalFieldValue{ + Project: project, + Region: parts[1], + Name: parts[2], + resourceType: resourceType, + }, nil + } + region, err := GetRegionFromSchema(regionSchemaField, zoneSchemaField, d, config) if err != nil { return nil, err From 2afd13c285b2e9bcf8ae15075a3ce6e80736fbc7 Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Wed, 11 Jun 2025 23:20:14 +0200 Subject: [PATCH 348/884] networkconnectivity: add `producer_instance_location` and `allowed_google_producers_resource_hierarchy_level` to `psc_config` for `google_network_connectivity_service_connection_policy` (#14170) --- .../ServiceConnectionPolicy.yaml | 23 +++++++++++++++++++ ...tivity_service_connection_policies_test.go | 10 ++++++-- 2 files changed, 31 insertions(+), 2 deletions(-) diff --git a/mmv1/products/networkconnectivity/ServiceConnectionPolicy.yaml b/mmv1/products/networkconnectivity/ServiceConnectionPolicy.yaml index 83a39ea98884..abccb1705741 100644 --- a/mmv1/products/networkconnectivity/ServiceConnectionPolicy.yaml +++ b/mmv1/products/networkconnectivity/ServiceConnectionPolicy.yaml @@ -118,6 +118,29 @@ properties: required: true item_type: type: String + - name: 'producerInstanceLocation' + type: Enum + description: | + ProducerInstanceLocation is used to specify which authorization mechanism to use to determine which projects + the Producer instance can be within. + default_from_api: true + enum_values: + - 'PRODUCER_INSTANCE_LOCATION_UNSPECIFIED' + - 'CUSTOM_RESOURCE_HIERARCHY_LEVELS' + - name: 'allowedGoogleProducersResourceHierarchyLevel' + type: Array + description: | + List of Projects, Folders, or Organizations from where the Producer instance can be within. For example, + a network administrator can provide both 'organizations/foo' and 'projects/bar' as + allowed_google_producers_resource_hierarchy_levels. This allowlists this network to connect with any Producer + instance within the 'foo' organization or the 'bar' project. By default, + allowedGoogleProducersResourceHierarchyLevel is empty. The format for each + allowedGoogleProducersResourceHierarchyLevel is / where is one of 'projects', 'folders', or 'organizations' + and is either the ID or the number of the resource type. Format for each + allowedGoogleProducersResourceHierarchyLevel value: 'projects/' or 'folders/' or 'organizations/' Eg. + [projects/my-project-id, projects/567, folders/891, organizations/123] + item_type: + type: String - name: 'limit' type: String description: | diff --git a/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_service_connection_policies_test.go b/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_service_connection_policies_test.go index 2ce8136a7fdd..67c15b84cbe4 100644 --- a/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_service_connection_policies_test.go +++ b/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_service_connection_policies_test.go @@ -6,12 +6,14 @@ import ( "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" ) func TestAccNetworkConnectivityServiceConnectionPolicy_update(t *testing.T) { t.Parallel() context := map[string]interface{}{ + "org_id": envvar.GetTestOrgFromEnv(t), "networkProducerName": fmt.Sprintf("tf-test-network-%s", acctest.RandString(t, 10)), "subnetworkProducerName1": fmt.Sprintf("tf-test-subnet-producer-%s", acctest.RandString(t, 10)), "subnetworkProducerName2": fmt.Sprintf("tf-test-subnet-producer-%s", acctest.RandString(t, 10)), @@ -101,8 +103,12 @@ resource "google_network_connectivity_service_connection_policy" "default" { service_class = "gcp-memorystore-redis" network = google_compute_network.producer_net.id psc_config { - subnetworks = [google_compute_subnetwork.producer_subnet1.id] - limit = 4 + producer_instance_location = "CUSTOM_RESOURCE_HIERARCHY_LEVELS" + subnetworks = [google_compute_subnetwork.producer_subnet1.id] + limit = 4 + allowed_google_producers_resource_hierarchy_level = [ + "organizations/%{org_id}", + ] } labels = { foo = "bar" From 4855b561930d8cd7c9f6500f4f215639ab2adc32 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Wed, 11 Jun 2025 16:00:17 -0700 Subject: [PATCH 349/884] tgc-revival: support compute address resource (#14244) --- mmv1/templates/tgc_next/cai2hcl/resource_converters.go.tmpl | 1 + mmv1/templates/tgc_next/tfplan2cai/resource_converters.go.tmpl | 1 + .../tgc_next/pkg/provider/provider_mmv1_resources.go | 3 +++ 3 files changed, 5 insertions(+) diff --git a/mmv1/templates/tgc_next/cai2hcl/resource_converters.go.tmpl b/mmv1/templates/tgc_next/cai2hcl/resource_converters.go.tmpl index 06160729863d..fe36339524e2 100644 --- a/mmv1/templates/tgc_next/cai2hcl/resource_converters.go.tmpl +++ b/mmv1/templates/tgc_next/cai2hcl/resource_converters.go.tmpl @@ -42,4 +42,5 @@ var provider *schema.Provider = tpg_provider.Provider() var ConverterMap = map[string]models.Converter{ resourcemanager.ProjectAssetType: resourcemanager.NewProjectConverter(provider), compute.ComputeInstanceAssetType: compute.NewComputeInstanceConverter(provider), + compute.ComputeAddressAssetType: compute.NewComputeAddressConverter(provider), } diff --git a/mmv1/templates/tgc_next/tfplan2cai/resource_converters.go.tmpl b/mmv1/templates/tgc_next/tfplan2cai/resource_converters.go.tmpl index 5c4ef3a46ad5..d357f700efb3 100644 --- a/mmv1/templates/tgc_next/tfplan2cai/resource_converters.go.tmpl +++ b/mmv1/templates/tgc_next/tfplan2cai/resource_converters.go.tmpl @@ -36,4 +36,5 @@ import ( var ConverterMap = map[string]cai.ResourceConverter{ "google_project": resourcemanager.ResourceConverterProject(), "google_compute_instance": compute.ResourceConverterComputeInstance(), + "google_compute_address": compute.ResourceConverterComputeAddress(), } \ No newline at end of file diff --git a/mmv1/third_party/tgc_next/pkg/provider/provider_mmv1_resources.go b/mmv1/third_party/tgc_next/pkg/provider/provider_mmv1_resources.go index 07dc94951017..4623038a414a 100644 --- a/mmv1/third_party/tgc_next/pkg/provider/provider_mmv1_resources.go +++ b/mmv1/third_party/tgc_next/pkg/provider/provider_mmv1_resources.go @@ -12,4 +12,7 @@ var handwrittenTfplan2caiResources = map[string]*schema.Resource{ "google_compute_instance": compute.ResourceComputeInstance(), "google_project": resourcemanager.ResourceGoogleProject(), // ####### END handwritten resources ########### + + // TODO: will generate it automatically for MMv1 resources. + "google_compute_address": compute.ResourceComputeAddress(), } From 37d6e938f5169acbbd30a3651b1d46e0e85b1309 Mon Sep 17 00:00:00 2001 From: NA2047 <12290725+NA2047@users.noreply.github.com> Date: Wed, 11 Jun 2025 16:08:51 -0700 Subject: [PATCH 350/884] Update for Docs for google_redis_cluster_user_created_connections to new url (#14240) --- mmv1/products/redis/ClusterUserCreatedConnections.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/products/redis/ClusterUserCreatedConnections.yaml b/mmv1/products/redis/ClusterUserCreatedConnections.yaml index 379d75834338..1c07c1c5ed54 100644 --- a/mmv1/products/redis/ClusterUserCreatedConnections.yaml +++ b/mmv1/products/redis/ClusterUserCreatedConnections.yaml @@ -19,7 +19,7 @@ description: | docs: note: | Please ensure your connections meet the requirements outlined at - https://cloud.devsite.corp.google.com/memorystore/docs/cluster/about-multiple-vpc-networking#application_connection_requirements. + https://cloud.google.com/memorystore/docs/cluster/about-multiple-vpc-networking. If you remove a connections item from the resource, the corresponding forwarding rule will no longer be functioning. If the corresponding forwarding rule is represented in your terraform configuration it is recommended to delete that `google_compute_forwarding_rule` resource at the same time. From 0b6839e7e34501a55ac94f19f03a7d8ab77651d2 Mon Sep 17 00:00:00 2001 From: Daniel Rieske Date: Thu, 12 Jun 2025 01:12:15 +0200 Subject: [PATCH 351/884] Fix `target_server` validation for service attachment (#14195) --- .../service_attachment_target_service.go.tmpl | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/mmv1/templates/terraform/custom_expand/service_attachment_target_service.go.tmpl b/mmv1/templates/terraform/custom_expand/service_attachment_target_service.go.tmpl index 512d7fcf22bf..2d22dfdd107a 100644 --- a/mmv1/templates/terraform/custom_expand/service_attachment_target_service.go.tmpl +++ b/mmv1/templates/terraform/custom_expand/service_attachment_target_service.go.tmpl @@ -1,15 +1,16 @@ func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { resource := strings.Split(v.(string), "/") - resourceKind := resource[len(resource)-2] - resourceBound := resource[len(resource)-4] - if len(resource) < 4 { + if len(resource) < 4 { return nil, fmt.Errorf("invalid value for target_service") } - - _, err := tpgresource.ParseRegionalFieldValue(resourceKind, v.(string), "project", resourceBound, "zone", d, config, true) + + resourceKind := resource[len(resource)-2] + resourceBound := resource[len(resource)-4] + + _, err := tpgresource.ParseRegionalFieldValue(resourceKind, v.(string), "project", resourceBound, "zone", d, config, true) if err != nil { return nil, fmt.Errorf("invalid value for target_service: %w", err) } - - return v, nil + + return v, nil } From f8525c5fdcb1b73905bdbc610d21b57d5bdfc46b Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Wed, 11 Jun 2025 16:37:10 -0700 Subject: [PATCH 352/884] Fix issue on hubs in google_beyondcorp_security_gateway (#14049) --- mmv1/products/beyondcorp/SecurityGateway.yaml | 8 +++++-- .../beyondcorp_security_gateway.go.tmpl | 24 +++++++++++++++++++ 2 files changed, 30 insertions(+), 2 deletions(-) create mode 100644 mmv1/templates/terraform/constants/beyondcorp_security_gateway.go.tmpl diff --git a/mmv1/products/beyondcorp/SecurityGateway.yaml b/mmv1/products/beyondcorp/SecurityGateway.yaml index 6eb4e755e96c..3a772561b28f 100644 --- a/mmv1/products/beyondcorp/SecurityGateway.yaml +++ b/mmv1/products/beyondcorp/SecurityGateway.yaml @@ -54,6 +54,8 @@ async: resource_inside_response: true include_project: false autogen_status: U2VjdXJpdHlHYXRld2F5 +custom_code: + constants: 'templates/terraform/constants/beyondcorp_security_gateway.go.tmpl' parameters: - name: location type: String @@ -104,15 +106,17 @@ properties: as a key. key_name: region key_description: The region to deploy the hub in. + set_hash_func: 'beyondcorpSecurityGatewayHubsHash' value_type: name: Hub type: NestedObject properties: - - name: internet_gateway + - name: internetGateway type: NestedObject description: Internet Gateway configuration. + default_from_api: true properties: - - name: assigned_ips + - name: assignedIps type: Array description: Output only. List of IP addresses assigned to the Cloud NAT. output: true diff --git a/mmv1/templates/terraform/constants/beyondcorp_security_gateway.go.tmpl b/mmv1/templates/terraform/constants/beyondcorp_security_gateway.go.tmpl new file mode 100644 index 000000000000..0274329133d3 --- /dev/null +++ b/mmv1/templates/terraform/constants/beyondcorp_security_gateway.go.tmpl @@ -0,0 +1,24 @@ +{{/* + The license inside this block applies to this file + Copyright 2025 Google Inc. + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ -}} +func beyondcorpSecurityGatewayHubsHash(v interface{}) int { + if v == nil { + return 0 + } + + var buf bytes.Buffer + m := v.(map[string]interface{}) + + buf.WriteString(fmt.Sprintf("%s-", m["region"].(string))) + + return tpgresource.Hashcode(buf.String()) +} \ No newline at end of file From 4a31346b714001338e63d03075658e8ecc4619d9 Mon Sep 17 00:00:00 2001 From: Niharika <35183015+niharika-98@users.noreply.github.com> Date: Thu, 12 Jun 2025 21:33:22 +0530 Subject: [PATCH 353/884] Update backup schedule to yearly to avoid any flakiness in future (#14245) Signed-off-by: Cezary Sobczak Co-authored-by: FilipKubawskiOkta Co-authored-by: Calvin Liu Co-authored-by: Ryan Oaks Co-authored-by: Zhenhua Li Co-authored-by: Ramon Vermeulen Co-authored-by: Sepehr Javid <32390553+sepehrjavid@users.noreply.github.com> Co-authored-by: Riley Karson Co-authored-by: Thomas Rodgers Co-authored-by: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Co-authored-by: Cezary Sobczak <57288981+Cezarus27@users.noreply.github.com> Co-authored-by: Dawid212 Co-authored-by: Arnav Dham Co-authored-by: Matheus Guilherme Souza Aleixo <82680416+matheusaleixo-cit@users.noreply.github.com> --- ..._backup_dr_backup_plan_association_test.go | 39 ++++++++++--------- 1 file changed, 21 insertions(+), 18 deletions(-) diff --git a/mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_plan_association_test.go b/mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_plan_association_test.go index 528d77f38f0b..492dcce0fc91 100644 --- a/mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_plan_association_test.go +++ b/mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_plan_association_test.go @@ -111,16 +111,17 @@ resource "google_backup_dr_backup_plan" "foo" { backup_rules { rule_id = "rule-1" - backup_retention_days = 2 + backup_retention_days = 366 standard_schedule { - recurrence_type = "HOURLY" - hourly_frequency = 6 - time_zone = "UTC" + recurrence_type = "YEARLY" + months = ["JANUARY"] + days_of_month = [15] + time_zone = "UTC" backup_window { - start_hour_of_day = 0 - end_hour_of_day = 6 + start_hour_of_day = 2 # Backup starts at 2:00 AM UTC + end_hour_of_day = 8 # Optional, backup window ends at 3:00 AM } } } @@ -199,16 +200,17 @@ resource "google_backup_dr_backup_plan" "updated-bp" { backup_rules { rule_id = "rule-1" - backup_retention_days = 4 + backup_retention_days = 366 standard_schedule { - recurrence_type = "HOURLY" - hourly_frequency = 10 - time_zone = "UTC" + recurrence_type = "YEARLY" + months = ["JANUARY"] + days_of_month = [15] + time_zone = "UTC" backup_window { - start_hour_of_day = 0 - end_hour_of_day = 6 + start_hour_of_day = 2 # Backup starts at 2:00 AM UTC + end_hour_of_day = 8 # Optional, backup window ends at 3:00 AM } } } @@ -222,16 +224,17 @@ resource "google_backup_dr_backup_plan" "foo" { backup_rules { rule_id = "rule-1" - backup_retention_days = 2 + backup_retention_days = 366 standard_schedule { - recurrence_type = "HOURLY" - hourly_frequency = 6 - time_zone = "UTC" + recurrence_type = "YEARLY" + months = ["JANUARY"] + days_of_month = [15] + time_zone = "UTC" backup_window { - start_hour_of_day = 0 - end_hour_of_day = 6 + start_hour_of_day = 2 # Backup starts at 2:00 AM UTC + end_hour_of_day = 8 # Optional, backup window ends at 3:00 AM } } } From 19c720b131030c2bd740d8d8fe0a59c8b2b3c594 Mon Sep 17 00:00:00 2001 From: Luca Prete Date: Thu, 12 Jun 2025 18:04:57 +0200 Subject: [PATCH 354/884] [#23232] Add region url_parameter to google_vertex_ai_index_endpoint_deployed_index (#14236) Co-authored-by: Luca Prete --- .../vertexai/IndexEndpointDeployedIndex.yaml | 5 ++ ...deployed_index_automatic_resources.tf.tmpl | 76 ++++++++-------- ...ndex_endpoint_deployed_index_basic.tf.tmpl | 88 ++++++++++--------- ..._endpoint_deployed_index_basic_two.tf.tmpl | 85 ++++++++++-------- ...deployed_index_dedicated_resources.tf.tmpl | 80 +++++++++-------- 5 files changed, 180 insertions(+), 154 deletions(-) diff --git a/mmv1/products/vertexai/IndexEndpointDeployedIndex.yaml b/mmv1/products/vertexai/IndexEndpointDeployedIndex.yaml index e2f4f9306576..0018354b2e88 100644 --- a/mmv1/products/vertexai/IndexEndpointDeployedIndex.yaml +++ b/mmv1/products/vertexai/IndexEndpointDeployedIndex.yaml @@ -108,6 +108,11 @@ parameters: immutable: true resource: 'IndexEndpoint' imports: 'name' + - name: 'region' + type: String + description: The region of the index endpoint deployment. eg us-central1 + url_param_only: true + immutable: true properties: - name: 'name' type: String diff --git a/mmv1/templates/terraform/examples/vertex_ai_index_endpoint_deployed_index_automatic_resources.tf.tmpl b/mmv1/templates/terraform/examples/vertex_ai_index_endpoint_deployed_index_automatic_resources.tf.tmpl index 42b020b82784..e54be719ddcc 100644 --- a/mmv1/templates/terraform/examples/vertex_ai_index_endpoint_deployed_index_automatic_resources.tf.tmpl +++ b/mmv1/templates/terraform/examples/vertex_ai_index_endpoint_deployed_index_automatic_resources.tf.tmpl @@ -1,67 +1,69 @@ resource "google_vertex_ai_index_endpoint_deployed_index" "{{$.PrimaryResourceId}}" { - depends_on = [ google_vertex_ai_index_endpoint.vertex_endpoint ] - index_endpoint = google_vertex_ai_index_endpoint.vertex_endpoint.id - index = google_vertex_ai_index.index.id // this is the index that will be deployed onto an endpoint deployed_index_id = "{{index $.Vars "deployed_index_id"}}" - display_name = "{{index $.Vars "display_name"}}" - automatic_resources{ + display_name = "{{index $.Vars "display_name"}}" + region = "us-central1" + index = google_vertex_ai_index.index.id + index_endpoint = google_vertex_ai_index_endpoint.vertex_endpoint.id + deployment_group = "test" + + automatic_resources { max_replica_count = 2 min_replica_count = 1 } - deployment_group = "test" -} - -resource "google_storage_bucket" "bucket" { - name = "{{index $.Vars "bucket_name"}}" - location = "us-central1" - uniform_bucket_level_access = true -} - -# The sample data comes from the following link: -# https://cloud.google.com/vertex-ai/docs/matching-engine/filtering#specify-namespaces-tokens -resource "google_storage_bucket_object" "data" { - name = "contents/data.json" - bucket = google_storage_bucket.bucket.name - content = < Date: Thu, 12 Jun 2025 14:43:47 -0700 Subject: [PATCH 355/884] add deprecation message to google_notebook_runtime (#14260) --- mmv1/products/notebooks/Runtime.yaml | 5 +++++ .../services/notebooks/resource_notebooks_instance_test.go | 2 ++ 2 files changed, 7 insertions(+) diff --git a/mmv1/products/notebooks/Runtime.yaml b/mmv1/products/notebooks/Runtime.yaml index 4d73e00b2ff6..720d217f5e80 100644 --- a/mmv1/products/notebooks/Runtime.yaml +++ b/mmv1/products/notebooks/Runtime.yaml @@ -13,6 +13,11 @@ --- name: 'Runtime' +# This resource should not be removed until the 2025 major release or later. +# Check instance availability first before fully removing. +deprecation_message: >- + `google_notebook_runtime` is deprecated and will be removed in a future major release. + Use `google_workbench_instance` instead. description: | A Cloud AI Platform Notebook runtime. diff --git a/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_test.go b/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_test.go index 819738664e66..b62361413ffd 100644 --- a/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_test.go +++ b/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_test.go @@ -32,6 +32,8 @@ func TestAccNotebooksInstance_create_vm_image(t *testing.T) { } func TestAccNotebooksInstance_update(t *testing.T) { + t.Skip() + context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), } From a2cd4476f236fb4a05ccd1f749d33493f715ce71 Mon Sep 17 00:00:00 2001 From: Thomas Rodgers Date: Thu, 12 Jun 2025 17:54:09 -0700 Subject: [PATCH 356/884] Revert "Add Terraform Support for WireGroups." (#14258) --- mmv1/products/compute/WireGroup.yaml | 193 ------------------ .../cross_site_network.go.tmpl | 23 --- .../custom_check_destroy/wire_group.go.tmpl | 22 -- .../custom_delete/cross_site_network.go.tmpl | 48 ----- .../examples/compute_wire_group_basic.tf.tmpl | 25 --- .../resource_compute_wire_group_test.go.tmpl | 115 ----------- 6 files changed, 426 deletions(-) delete mode 100644 mmv1/products/compute/WireGroup.yaml delete mode 100644 mmv1/templates/terraform/custom_check_destroy/cross_site_network.go.tmpl delete mode 100644 mmv1/templates/terraform/custom_check_destroy/wire_group.go.tmpl delete mode 100644 mmv1/templates/terraform/custom_delete/cross_site_network.go.tmpl delete mode 100644 mmv1/templates/terraform/examples/compute_wire_group_basic.tf.tmpl delete mode 100644 mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl diff --git a/mmv1/products/compute/WireGroup.yaml b/mmv1/products/compute/WireGroup.yaml deleted file mode 100644 index 62416f7773dc..000000000000 --- a/mmv1/products/compute/WireGroup.yaml +++ /dev/null @@ -1,193 +0,0 @@ -# Copyright 2024 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'WireGroup' -kind: 'compute#wireGroup' -description: | - The WireGroup resource represents a group of redundant wires between interconnects in two different metros. Each WireGroup belongs to a CrossSiteNetwork. A wire group defines endpoints and the wires which exist between them. - -references: - guides: - 'Create a WireGroup': 'https://cloud.google.com/network-connectivity/docs/interconnect/how-to/cross-site/modify-network#add-wire-group' - api: 'https://cloud.google.com/compute/docs/reference/rest/beta/wireGroups' -min_version: beta -docs: -id_format: 'projects/{{project}}/global/crossSiteNetworks/{{cross_site_network}}/wireGroups/{{name}}' -base_url: 'projects/{{project}}/global/crossSiteNetworks/{{cross_site_network}}/wireGroups' -self_link: 'projects/{{project}}/global/crossSiteNetworks/{{cross_site_network}}/wireGroups/{{name}}' -update_verb: 'PATCH' -update_mask: true -import_format: - - 'projects/{{project}}/global/crossSiteNetworks/{{cross_site_network}}/wireGroups/{{name}}' -timeouts: - insert_minutes: 20 - update_minutes: 20 - delete_minutes: 20 -async: - actions: ['create', 'delete', 'update'] - type: 'OpAsync' - operation: - base_url: '{{op_id}}' - result: - resource_inside_response: false -custom_code: - test_check_destroy: 'templates/terraform/custom_check_destroy/wire_group.go.tmpl' -examples: - - name: 'compute_wire_group_basic' - primary_resource_id: 'example-test-wire-group' - vars: - name: 'test-wire-group' - description: 'Example Wire Group' - cross_site_network: 'test-cross-site-network' - min_version: 'beta' - test_env_vars: - project: 'PROJECT_NAME' -parameters: - - name: 'crossSiteNetwork' - type: ResourceRef - description: Required cross site network to which wire group belongs. - required: true - immutable: true - url_param_only: true - resource: 'CrossSiteNetwork' - imports: 'name' - diff_suppress_func: 'tpgresource.CompareResourceNames' - custom_expand: 'templates/terraform/custom_expand/resourceref_with_validation.go.tmpl' - min_version: beta -properties: - - name: 'description' - type: String - description: | - An optional description of this resource. Provide this property when you create the resource. - - name: 'creationTimestamp' - type: Time - description: | - Creation timestamp in RFC3339 text format. - output: true - - name: 'name' - type: String - description: | - Name of the resource. Provided by the client when the resource is created. The name must be - 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters - long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first - character must be a lowercase letter, and all following characters must be a dash, - lowercase letter, or digit, except the last character, which cannot be a dash. - required: true - validation: - regex: '^[a-z]([-a-z0-9]*[a-z0-9])?$' - - name: endpoints - type: KeyValuePairs - description: | - Endpoints grouped by location, each mapping to interconnect configurations. - properties: - - name: interconnects - type: KeyValuePairs - description: | - Map of interconnect details. - properties: - - name: interconnect - type: string - - name: vlan_tags - type: Array - description: | - VLAN tags for the interconnect. - item_type: - type: integer - - name: adminEnabled - type: boolean - description: | - Indicates whether the wire group is administratively enabled. - - name: wireGroupProperties - type: NestedObject - description: | - Properties specific to the wire group. - properties: - - name: type - type: enum - description: | - Type of wire group (enum). - WIRE: a single pseudowire over two Interconnect connections with no redundancy. - REDUNDANT: two pseudowires over four Interconnect connections, with two connections in one metro and two connections in another metro. - BOX_AND_CROSS: four pseudowires over four Interconnect connections, with two connections in one metro and two connections in another metro. - enum_values: - - 'WIRE' - - 'REDUNDANT' - - 'BOX_AND_CROSS' - - name: wireProperties - type: NestedObject - description: | - Default properties for wires within the group. - properties: - - name: bandwidthUnmetered - type: Integer - description: | - The unmetered bandwidth setting. - - name: faultResponse - type: enum - description: | - Response when a fault is detected in a pseudowire: - NONE: default. - DISABLE_PORT: set the port line protocol down when inline probes detect a fault. This setting is only permitted on port mode pseudowires. - enum_values: - - 'NONE' - - 'DISABLE_PORT' - - name: wires - type: Array - description: | - The single/redundant wire(s) managed by the wire group. - output: true - item_type: - type: NestedObject - properties: - - name: label - type: string - - name: endpoints - type: Array - description: | - 'Wire endpoints are specific Interconnect connections.' - item_type: - type: NestedObject - properties: - - name: interconnect - type: string - - name: vlanTag - type: integer - - name: wireProperties - type: NestedObject - output: true # This is redundant if the parent 'wires' is output: true, but harmless - properties: - - name: bandwidthUnmetered - type: Integer - - name: faultResponse - type: enum - enum_values: - - 'NONE' - - 'DISABLE_PORT' - - name: adminEnabled - type: boolean - - name: topology - type: NestedObject - description: | - Topology details for the wire group configuration. - output: true - properties: - - name: endpoints - type: Array - item_type: - type: NestedObject - properties: - - name: label - type: string - - name: city - type: string diff --git a/mmv1/templates/terraform/custom_check_destroy/cross_site_network.go.tmpl b/mmv1/templates/terraform/custom_check_destroy/cross_site_network.go.tmpl deleted file mode 100644 index 901b6a90bc0d..000000000000 --- a/mmv1/templates/terraform/custom_check_destroy/cross_site_network.go.tmpl +++ /dev/null @@ -1,23 +0,0 @@ - config := acctest.GoogleProviderConfig(t) - - url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{"{{"}}ComputeBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/global/crossSiteNetworks") - if err != nil { - return err - } - - billingProject := "" - - if config.BillingProject != "" { - billingProject = config.BillingProject - } - - _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - Project: billingProject, - RawURL: url, - UserAgent: config.UserAgent, - }) - if err == nil { - return fmt.Errorf("ComputeCrossSiteNetwork still exists at %s", url) - } \ No newline at end of file diff --git a/mmv1/templates/terraform/custom_check_destroy/wire_group.go.tmpl b/mmv1/templates/terraform/custom_check_destroy/wire_group.go.tmpl deleted file mode 100644 index baad2abccd4a..000000000000 --- a/mmv1/templates/terraform/custom_check_destroy/wire_group.go.tmpl +++ /dev/null @@ -1,22 +0,0 @@ - config := acctest.GoogleProviderConfig(t) - url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{"{{"}}ComputeBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/global/crossSiteNetworks/{{"{{"}}cross_site_network{{"}}"}}/wireGroups/{{"{{"}}name{{"}}"}}") - if err != nil { - return err - } - - billingProject := "" - - if config.BillingProject != "" { - billingProject = config.BillingProject - } - - _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - Project: billingProject, - RawURL: url, - UserAgent: config.UserAgent, - }) - if err == nil { - return fmt.Errorf("ComputeWireGroup still exists at %s", url) - } \ No newline at end of file diff --git a/mmv1/templates/terraform/custom_delete/cross_site_network.go.tmpl b/mmv1/templates/terraform/custom_delete/cross_site_network.go.tmpl deleted file mode 100644 index 6ae8ec5fdc84..000000000000 --- a/mmv1/templates/terraform/custom_delete/cross_site_network.go.tmpl +++ /dev/null @@ -1,48 +0,0 @@ - - billingProject := "" - - project, err := tpgresource.GetProject(d, config) - if err != nil { - return fmt.Errorf("Error fetching project for CrossSiteNetwork: %s", err) - } - billingProject = project - - url, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}ComputeBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/global/crossSiteNetworks/{{"{{"}}crossSiteNetworks{{"}}"}}") - if err != nil { - return err - } - - var obj map[string]interface{} - - // err == nil indicates that the billing_project value was found - if bp, err := tpgresource.GetBillingProject(d, config); err == nil { - billingProject = bp - } - - headers := make(http.Header) - - log.Printf("[DEBUG] Deleting CrossSiteNetwork %q", d.Id()) - res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "DELETE", - Project: billingProject, - RawURL: url, - UserAgent: userAgent, - Body: obj, - Timeout: d.Timeout(schema.TimeoutDelete), - Headers: headers, - }) - if err != nil { - return transport_tpg.HandleNotFoundError(err, d, "CrossSiteNetwork") - } - - err = ComputeOperationWaitTime( - config, res, project, "Deleting CrossSiteNetwork", userAgent, - d.Timeout(schema.TimeoutDelete)) - - if err != nil { - return err - } - - log.Printf("[DEBUG] Finished deleting CrossSiteNetwork %q: %#v", d.Id(), res) - return nil \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/compute_wire_group_basic.tf.tmpl b/mmv1/templates/terraform/examples/compute_wire_group_basic.tf.tmpl deleted file mode 100644 index b4f19b51f3d3..000000000000 --- a/mmv1/templates/terraform/examples/compute_wire_group_basic.tf.tmpl +++ /dev/null @@ -1,25 +0,0 @@ -data "google_project" "project" { -provider = google-beta -} - -resource "google_compute_cross_site_network" "example-cross-site-network" { - name = "{{index $.Vars "cross_site_network"}}" - description = "Example cross site network" - provider = google-beta -} - -resource "google_compute_wire_group" "{{$.PrimaryResourceId}}" { - name = "{{index $.Vars "name"}}" - description = "{{index $.Vars "description"}}" - cross_site_network = "{{index $.Vars "cross_site_network"}}" - provider = google-beta - depends_on = [ - google_compute_cross_site_network.example-cross-site-network - ] - wire_properties { - bandwidth_unmetered = 10 - } - wire_group_properties { - type = "WIRE" - } -} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl deleted file mode 100644 index 7b9fdc3b965f..000000000000 --- a/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl +++ /dev/null @@ -1,115 +0,0 @@ -package compute_test -{{ if ne $.TargetVersionName `ga` -}} -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" - - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" -) - -func TestAccComputeWireGroup_update(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "project": envvar.GetTestProjectFromEnv(), - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), - CheckDestroy: testAccCheckComputeWireGroupDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeWireGroup_basic(context), - }, - { - ResourceName: "google_compute_wire_group.example-test-wire-group", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"cross_site_network"}, - }, - { - Config: testAccComputeWireGroup_update(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_compute_wire_group.example-test-wire-group", plancheck.ResourceActionUpdate), - }, - }, - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("google_compute_wire_group.example-test-wire-group", "description", "Example Wire Group Updated"+context["random_suffix"].(string)), - ), - }, - { - ResourceName: "google_compute_wire_group.example-test-wire-group", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"cross_site_network"}, - }, - }, - }) -} - -func testAccComputeWireGroup_basic(context map[string]interface{}) string { - return acctest.Nprintf(` -data "google_project" "project" { -provider = google-beta -} - -resource "google_compute_cross_site_network" "example-cross-site-network" { - name = "tf-test-cross-site-network%{random_suffix}" - description = "Example cross site network" - provider = google-beta -} - -resource "google_compute_wire_group" "example-test-wire-group" { - name = "tf-test-test-wire-group%{random_suffix}" - description = "Example Wire Group%{random_suffix}" - cross_site_network = google_compute_cross_site_network.example-cross-site-network.name - provider = google-beta - depends_on = [ - google_compute_cross_site_network.example-cross-site-network - ] - wire_properties { - bandwidth_unmetered = 1000 - } - wire_group_properties { - type = "REDUNDANT" - } -} -`, context) -} - -func testAccComputeWireGroup_update(context map[string]interface{}) string { - return acctest.Nprintf(` -data "google_project" "project" { -provider = google-beta -} - -resource "google_compute_cross_site_network" "example-cross-site-network" { - name = "tf-test-cross-site-network%{random_suffix}" - description = "Example cross site network" - provider = google-beta -} - -resource "google_compute_wire_group" "example-test-wire-group" { - name = "tf-test-test-wire-group%{random_suffix}" - description = "Example Wire Group Updated%{random_suffix}" - cross_site_network = google_compute_cross_site_network.example-cross-site-network.name - provider = google-beta - depends_on = [ - google_compute_cross_site_network.example-cross-site-network - ] - wire_properties { - bandwidth_unmetered = 1000 - } - wire_group_properties { - type = "REDUNDANT" - } -} -`, context) -} -{{- end }} \ No newline at end of file From 67eb3a1bb44d954a25278932544bc808eec1ee00 Mon Sep 17 00:00:00 2001 From: Dawid212 Date: Fri, 13 Jun 2025 03:44:39 +0200 Subject: [PATCH 357/884] Added cipher_suite to VpnTunnel (#14248) --- mmv1/products/compute/VpnTunnel.yaml | 77 +++++++++++++++++ .../examples/vpn_tunnel_cipher_suite.tf.tmpl | 84 +++++++++++++++++++ 2 files changed, 161 insertions(+) create mode 100644 mmv1/templates/terraform/examples/vpn_tunnel_cipher_suite.tf.tmpl diff --git a/mmv1/products/compute/VpnTunnel.yaml b/mmv1/products/compute/VpnTunnel.yaml index 63ec456d1ba1..6aa59f9b177a 100644 --- a/mmv1/products/compute/VpnTunnel.yaml +++ b/mmv1/products/compute/VpnTunnel.yaml @@ -52,6 +52,18 @@ examples: udp500_forwarding_rule_name: 'fr-udp500' udp4500_forwarding_rule_name: 'fr-udp4500' route_name: 'route1' + - name: 'vpn_tunnel_cipher_suite' + min_version: 'beta' + primary_resource_id: 'tunnel1' + vars: + vpn_tunnel_name: 'tunnel-cipher' + target_vpn_gateway_name: 'vpn-1' + network_name: 'network-1' + address_name: 'vpn-static-ip' + esp_forwarding_rule_name: 'fr-esp' + udp500_forwarding_rule_name: 'fr-udp500' + udp4500_forwarding_rule_name: 'fr-udp4500' + route_name: 'route1' parameters: - name: 'region' type: ResourceRef @@ -219,3 +231,68 @@ properties: type: String description: 'Detailed status message for the VPN tunnel.' output: true + - name: 'cipherSuite' + type: NestedObject + min_version: 'beta' + description: | + User specified list of ciphers to use for the phase 1 and phase 2 of the IKE protocol. + properties: + - name: 'phase1' + type: NestedObject + description: 'Cipher configuration for phase 1 of the IKE protocol.' + min_version: 'beta' + properties: + - name: 'encryption' + type: Array + description: 'Encryption algorithms.' + is_set: true + min_version: 'beta' + item_type: + type: String + - name: 'integrity' + type: Array + description: 'Integrity algorithms.' + is_set: true + min_version: 'beta' + item_type: + type: String + - name: 'prf' + type: Array + description: 'Pseudo-random functions.' + is_set: true + min_version: 'beta' + item_type: + type: String + - name: 'dh' + type: Array + description: 'Diffie-Hellman groups.' + is_set: true + min_version: 'beta' + item_type: + type: String + - name: 'phase2' + type: NestedObject + description: 'Cipher configuration for phase 2 of the IKE protocol.' + min_version: 'beta' + properties: + - name: 'encryption' + type: Array + description: 'Encryption algorithms.' + is_set: true + min_version: 'beta' + item_type: + type: String + - name: 'integrity' + type: Array + description: 'Integrity algorithms.' + is_set: true + min_version: 'beta' + item_type: + type: String + - name: 'pfs' + type: Array + description: 'Perfect forward secrecy groups.' + is_set: true + min_version: 'beta' + item_type: + type: String diff --git a/mmv1/templates/terraform/examples/vpn_tunnel_cipher_suite.tf.tmpl b/mmv1/templates/terraform/examples/vpn_tunnel_cipher_suite.tf.tmpl new file mode 100644 index 000000000000..2e1052da5374 --- /dev/null +++ b/mmv1/templates/terraform/examples/vpn_tunnel_cipher_suite.tf.tmpl @@ -0,0 +1,84 @@ +resource "google_compute_vpn_tunnel" "tunnel1" { + provider = google-beta + name = "{{index $.Vars "vpn_tunnel_name"}}" + peer_ip = "15.0.0.120" + shared_secret = "a secret message" + + target_vpn_gateway = google_compute_vpn_gateway.target_gateway.id + + cipher_suite { + phase1 { + encryption = ["AES-CBC-256"] + integrity = ["HMAC-SHA2-256-128"] + prf = ["PRF-HMAC-SHA2-256"] + dh = ["Group-14"] + } + phase2 { + encryption = ["AES-CBC-128"] + integrity = ["HMAC-SHA2-256-128"] + pfs = ["Group-14"] + } + } + + depends_on = [ + google_compute_forwarding_rule.fr_esp, + google_compute_forwarding_rule.fr_udp500, + google_compute_forwarding_rule.fr_udp4500, + ] + + labels = { + foo = "bar" + } +} + +resource "google_compute_vpn_gateway" "target_gateway" { + provider = google-beta + name = "{{index $.Vars "target_vpn_gateway_name"}}" + network = google_compute_network.network1.id +} + +resource "google_compute_network" "network1" { + provider = google-beta + name = "{{index $.Vars "network_name"}}" +} + +resource "google_compute_address" "vpn_static_ip" { + provider = google-beta + name = "{{index $.Vars "address_name"}}" +} + +resource "google_compute_forwarding_rule" "fr_esp" { + provider = google-beta + name = "{{index $.Vars "esp_forwarding_rule_name"}}" + ip_protocol = "ESP" + ip_address = google_compute_address.vpn_static_ip.address + target = google_compute_vpn_gateway.target_gateway.id +} + +resource "google_compute_forwarding_rule" "fr_udp500" { + provider = google-beta + name = "{{index $.Vars "udp500_forwarding_rule_name"}}" + ip_protocol = "UDP" + port_range = "500" + ip_address = google_compute_address.vpn_static_ip.address + target = google_compute_vpn_gateway.target_gateway.id +} + +resource "google_compute_forwarding_rule" "fr_udp4500" { + provider = google-beta + name = "{{index $.Vars "udp4500_forwarding_rule_name"}}" + ip_protocol = "UDP" + port_range = "4500" + ip_address = google_compute_address.vpn_static_ip.address + target = google_compute_vpn_gateway.target_gateway.id +} + +resource "google_compute_route" "route1" { + provider = google-beta + name = "{{index $.Vars "route_name"}}" + network = google_compute_network.network1.name + dest_range = "15.0.0.0/24" + priority = 1000 + + next_hop_vpn_tunnel = google_compute_vpn_tunnel.tunnel1.id +} From c4771becc50dbfd7554dbeba7382b06007a41653 Mon Sep 17 00:00:00 2001 From: Xian-Ji Chen <68801742+XianJiChen@users.noreply.github.com> Date: Fri, 13 Jun 2025 02:46:12 +0000 Subject: [PATCH 358/884] Add Terraform support for dataplex recommendation api (#13959) --- .../provider/provider_mmv1_resources.go.tmpl | 1 + ...data_source_dataplex_data_quality_rules.go | 334 ++++++++++++++++++ ...source_dataplex_data_quality_rules_test.go | 190 ++++++++++ .../dataplex_data_quality_rules.html.markdown | 38 ++ 4 files changed, 563 insertions(+) create mode 100644 mmv1/third_party/terraform/services/dataplex/data_source_dataplex_data_quality_rules.go create mode 100644 mmv1/third_party/terraform/services/dataplex/data_source_dataplex_data_quality_rules_test.go create mode 100644 mmv1/third_party/terraform/website/docs/d/dataplex_data_quality_rules.html.markdown diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index 1f1a3612f4de..0f159c666b36 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -130,6 +130,7 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_container_engine_versions": container.DataSourceGoogleContainerEngineVersions(), "google_container_registry_image": containeranalysis.DataSourceGoogleContainerImage(), "google_container_registry_repository": containeranalysis.DataSourceGoogleContainerRepo(), + "google_dataplex_data_quality_rules": dataplex.DataSourceDataplexDataQualityRules(), "google_dataproc_metastore_service": dataprocmetastore.DataSourceDataprocMetastoreService(), "google_datastream_static_ips": datastream.DataSourceGoogleDatastreamStaticIps(), "google_dns_keys": dns.DataSourceDNSKeys(), diff --git a/mmv1/third_party/terraform/services/dataplex/data_source_dataplex_data_quality_rules.go b/mmv1/third_party/terraform/services/dataplex/data_source_dataplex_data_quality_rules.go new file mode 100644 index 000000000000..0d163149c218 --- /dev/null +++ b/mmv1/third_party/terraform/services/dataplex/data_source_dataplex_data_quality_rules.go @@ -0,0 +1,334 @@ +package dataplex + +import ( + "fmt" + "strings" + "unicode" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func DataSourceDataplexDataQualityRules() *schema.Resource { + return &schema.Resource{ + Read: dataSourceDataplexDataQualityRulesRead, + Schema: map[string]*schema.Schema{ + "project": { + Type: schema.TypeString, + Optional: true, + }, + "location": { + Type: schema.TypeString, + Optional: true, + }, + "data_scan_id": { + Type: schema.TypeString, + Required: true, + }, + "rules": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "column": { + Type: schema.TypeString, + Computed: true, + Description: `The unnested column which this rule is evaluated against.`, + }, + "ignore_null": { + Type: schema.TypeBool, + Computed: true, + Description: `Rows with null values will automatically fail a rule, unless ignoreNull is true. In that case, such null rows are trivially considered passing. + This field is only valid for the following type of rules: RangeExpectation, RegexExpectation, SetExpectation, UniquenessExpectation`, + }, + "dimension": { + Type: schema.TypeString, + Computed: true, + Description: `The dimension a rule belongs to. Supported dimensions are "COMPLETENESS", "ACCURACY", "CONSISTENCY", "VALIDITY", "UNIQUENESS", "FRESHNESS", "VOLUME"`, + }, + "threshold": { + Type: schema.TypeFloat, + Computed: true, + Description: `The minimum ratio of passing_rows / total_rows required to pass this rule, with a range of [0.0, 1.0]. 0 indicates default value (i.e. 1.0). This field is only valid for row-level type rules.`, + }, + "name": { + Type: schema.TypeString, + Computed: true, + Description: `A mutable name for the rule. + The name must contain only letters (a-z, A-Z), numbers (0-9), or hyphens (-). + The maximum length is 63 characters. + Must start with a letter. + Must end with a number or a letter.`, + }, + "description": { + Type: schema.TypeString, + Computed: true, + Description: `Description of the rule. (The maximum length is 1,024 characters.)`, + }, + "suspended": { + Type: schema.TypeBool, + Computed: true, + Description: `Whether the Rule is active or suspended. Default is false.`, + }, + "range_expectation": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "min_value": { + Type: schema.TypeString, + Computed: true, + Description: `The minimum column value allowed for a row to pass this validation.`, + }, + "max_value": { + Type: schema.TypeString, + Computed: true, + Description: `The maximum column value allowed for a row to pass this validation.`, + }, + "strict_min_enabled": { + Type: schema.TypeBool, + Computed: true, + Description: `Whether each value needs to be strictly greater than ('>') the minimum, or if equality is allowed.`, + }, + "strict_max_enabled": { + Type: schema.TypeBool, + Computed: true, + Description: ` Whether each value needs to be strictly lesser than ('<') the maximum, or if equality is allowed.`, + }, + }, + }, + Description: `Row-level rule which evaluates whether each column value lies between a specified range.`, + }, + "non_null_expectation": { + Type: schema.TypeList, + Computed: true, + Description: `Row-level rule which evaluates whether each column value is null.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{}, + }, + }, + "set_expectation": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "values": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: `Expected values for the column value.`, + }, + }, + }, + Description: `Row-level rule which evaluates whether each column value is contained by a specified set.`, + }, + "regex_expectation": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "regex": { + Type: schema.TypeString, + Computed: true, + Description: `A regular expression the column value is expected to match.`, + }, + }, + }, + + Description: `Row-level rule which evaluates whether each column value matches a specified regex.`, + }, + "uniqueness_expectation": { + Type: schema.TypeList, + Computed: true, + Description: `Row-level rule which evaluates whether each column value is unique.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{}, + }, + }, + "statistic_range_expectation": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "statistic": { + Type: schema.TypeString, + Computed: true, + Description: `The list of aggregate metrics a rule can be evaluated against. + Possible values: ["STATISTIC_UNDEFINED", "MEAN", "MIN", "MAX"]`, + }, + "min_value": { + Type: schema.TypeString, + Computed: true, + Description: `The minimum column value allowed for a row to pass this validation.`, + }, + "max_value": { + Type: schema.TypeString, + Computed: true, + Description: `The maximum column value allowed for a row to pass this validation.`, + }, + "strict_min_enabled": { + Type: schema.TypeBool, + Computed: true, + Description: `Whether each value needs to be strictly greater than ('>') the minimum, or if equality is allowed.`, + }, + "strict_max_enabled": { + Type: schema.TypeBool, + Computed: true, + Description: ` Whether each value needs to be strictly lesser than ('<') the maximum, or if equality is allowed.`, + }, + }, + }, + Description: `Aggregate rule which evaluates whether the column aggregate statistic lies between a specified range.`, + }, + "row_condition_expectation": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "sql_expression": { + Type: schema.TypeString, + Computed: true, + Description: `The SQL expression.`, + }, + }, + }, + Description: `Row-level rule which evaluates whether each row in a table passes the specified condition.`, + }, + "table_condition_expectation": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "sql_expression": { + Type: schema.TypeString, + Computed: true, + Description: `The SQL expression.`, + }, + }, + }, + Description: `Aggregate rule which evaluates whether the provided expression is true for a table.`, + }, + "sql_assertion": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "sql_statement": { + Type: schema.TypeString, + Computed: true, + Description: `The SQL expression.`, + }, + }, + }, + Description: `Aggregate rule which evaluates the number of rows returned for the provided statement. If any rows are returned, this rule fails.`, + }, + }, + }, + }, + }, + } +} + +func camelToSnake(s string) string { + var result strings.Builder + for i, ch := range s { + if unicode.IsUpper(ch) { + if i > 0 { + result.WriteByte('_') + } + result.WriteRune(unicode.ToLower(ch)) + } else { + result.WriteRune(ch) + } + } + return result.String() +} + +func flattenDataSourceDataplexDataQualityRulesExpectation(expectation interface{}) []interface{} { + expectationsToSet := make(map[string]interface{}) + + if expectation == nil { + return []interface{}{expectationsToSet} + } + + originalExpectation := expectation.(map[string]interface{}) + for k, v := range originalExpectation { + snakeCaseKey := camelToSnake(k) + expectationsToSet[snakeCaseKey] = v + } + return []interface{}{expectationsToSet} +} + +func flattenDataSourceDataplexDataQualityRulesRules(rules interface{}) []interface{} { + rulesToSet := make([]interface{}, 0) + + originalRules := rules.([]interface{}) + + for _, rule := range originalRules { + + newRuleMap := make(map[string]interface{}) + ruleMap := rule.(map[string]interface{}) + + for k, v := range ruleMap { + snakeCaseKey := camelToSnake(k) + if strings.HasSuffix(k, "Expectation") { + // For expectation fields, need extra flatten + newRuleMap[snakeCaseKey] = flattenDataSourceDataplexDataQualityRulesExpectation(v) + } else { + // For other fields (column, dimension, threshold, etc.), directly assign + newRuleMap[snakeCaseKey] = v + } + } + rulesToSet = append(rulesToSet, newRuleMap) + } + + return rulesToSet +} + +func dataSourceDataplexDataQualityRulesRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + + location, err := tpgresource.GetLocation(d, config) + if err != nil { + return err + } + + data_scan_id := d.Get("data_scan_id").(string) + + url, err := tpgresource.ReplaceVars(d, config, "{{DataplexBasePath}}projects/{{project}}/locations/{{location}}/dataScans/{{data_scan_id}}:generateDataQualityRules") + if err != nil { + return err + } + + id := fmt.Sprintf("projects/%s/locations/%s/dataScans/%s", project, location, data_scan_id) + d.SetId(id) + + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "POST", + Project: project, + RawURL: url, + UserAgent: userAgent, + ErrorAbortPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.Is429QuotaError}, + }) + + if err != nil { + return transport_tpg.HandleDataSourceNotFoundError(err, d, fmt.Sprintf("DataQualityRules %q", d.Id()), url) + } + + if err := d.Set("rules", flattenDataSourceDataplexDataQualityRulesRules(res["rule"])); err != nil { + return fmt.Errorf("Error setting rule: %s", err) + } + + return nil +} diff --git a/mmv1/third_party/terraform/services/dataplex/data_source_dataplex_data_quality_rules_test.go b/mmv1/third_party/terraform/services/dataplex/data_source_dataplex_data_quality_rules_test.go new file mode 100644 index 000000000000..dc67c07ac24a --- /dev/null +++ b/mmv1/third_party/terraform/services/dataplex/data_source_dataplex_data_quality_rules_test.go @@ -0,0 +1,190 @@ +package dataplex_test + +import ( + "fmt" + "testing" + "time" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func TestAccDataplexDataQualityRules(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project": envvar.GetTestProjectFromEnv(), + "random_suffix": acctest.RandString(t, 10), + "location": envvar.GetTestRegionFromEnv(), + "data_scan_id": "tf-test-datascan-profile-id", + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDataplexDataQualityRules_datascan_config(context), + }, + { + RefreshState: true, + Check: testAccDataplexDataScanJobTriggerRunAndWaitUntilComplete(t, "google_dataplex_datascan.tf_test_datascan_profile"), + }, + { + Config: testAccDataplexDataQualityRules_rules_config(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("data.google_dataplex_data_quality_rules.generated_dq_rules", "rules.#", "7"), + ), + }, + }, + }) +} + +func testAccDataplexDataQualityRules_datascan_config(context map[string]interface{}) string { + return acctest.Nprintf(` + resource "google_dataplex_datascan" "tf_test_datascan_profile" { + location = "%{location}" + data_scan_id = "%{data_scan_id}-%{random_suffix}" + + data { + resource = "//bigquery.googleapis.com/projects/bigquery-public-data/datasets/samples/tables/shakespeare" + } + + execution_spec { + trigger { + on_demand {} + } + } + + data_profile_spec {} + + project = "%{project}" + }`, context) +} + +func testAccDataplexDataScanJobTriggerRunAndWaitUntilComplete(t *testing.T, resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + + rs, ok := s.RootModule().Resources[resourceName] + + if !ok { + return fmt.Errorf("Resource not found: %s", resourceName) + } + + config := acctest.GoogleProviderConfig(t) + url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{DataplexBasePath}}projects/{{project}}/locations/{{location}}/dataScans/{{data_scan_id}}:run") + if err != nil { + return fmt.Errorf("Failed to generate URL for triggering datascan run: %s", err) + } + + billingProject := "" + + if config.BillingProject != "" { + billingProject = config.BillingProject + } + + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "POST", + Project: billingProject, + RawURL: url, + UserAgent: config.UserAgent, + }) + + if err != nil { + return fmt.Errorf("Request for triggering data scan run failed: %s", err) + } + + dataScanJobId := extractDataScanJobId(res["job"]) + dataScanJobState := extractDataScanJobState(res["job"]) + + for dataScanJobState != "SUCCEEDED" { + dataScanJobState, err = getDataScanJobState(t, rs, dataScanJobId) + if err != nil { + return fmt.Errorf("Getting data scan job state failed: failed to get state: %s", err) + } + + switch dataScanJobState { + case "STATE_UNSPECIFIED", "RUNNING", "PENDING": + time.Sleep(10 * time.Second) // Pause for 10 seconds to prevend making too many api calls + case "CANCELING", "CANCELLED", "FAILED": + return fmt.Errorf("Data scan job failed: Invalid state: %s", dataScanJobState) + case "SUCCEEDED": + default: + return fmt.Errorf("Getting data scan job state failed: invalid state: %s", dataScanJobState) + } + } + + return nil + } +} + +func testAccDataplexDataQualityRules_rules_config(context map[string]interface{}) string { + return acctest.Nprintf(` + resource "google_dataplex_datascan" "tf_test_datascan_profile" { + location = "%{location}" + data_scan_id = "%{data_scan_id}-%{random_suffix}" + + data { + resource = "//bigquery.googleapis.com/projects/bigquery-public-data/datasets/samples/tables/shakespeare" + } + + execution_spec { + trigger { + on_demand {} + } + } + + data_profile_spec {} + + project = "%{project}" + } + + data "google_dataplex_data_quality_rules" "generated_dq_rules" { + project = google_dataplex_datascan.tf_test_datascan_profile.project + location = google_dataplex_datascan.tf_test_datascan_profile.location + data_scan_id = google_dataplex_datascan.tf_test_datascan_profile.data_scan_id + }`, context) +} + +func getDataScanJobState(t *testing.T, rs *terraform.ResourceState, dataScanJobId string) (string, error) { + config := acctest.GoogleProviderConfig(t) + url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{DataplexBasePath}}projects/{{project}}/locations/{{location}}/dataScans/{{data_scan_id}}/jobs/"+dataScanJobId) + if err != nil { + return "", fmt.Errorf("Failed to generate URL for getting data scan job state: %s", err) + } + + billingProject := "" + + if config.BillingProject != "" { + billingProject = config.BillingProject + } + + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: url, + UserAgent: config.UserAgent, + }) + + if err != nil { + return "", fmt.Errorf("Request for getting data scan job state failed: %s", err) + } + + return extractDataScanJobState(res), nil +} + +func extractDataScanJobState(job interface{}) string { + dataScanJob := job.(map[string]interface{}) + return dataScanJob["state"].(string) +} + +func extractDataScanJobId(job interface{}) string { + dataScanJob := job.(map[string]interface{}) + return dataScanJob["uid"].(string) +} diff --git a/mmv1/third_party/terraform/website/docs/d/dataplex_data_quality_rules.html.markdown b/mmv1/third_party/terraform/website/docs/d/dataplex_data_quality_rules.html.markdown new file mode 100644 index 000000000000..15271c4f3576 --- /dev/null +++ b/mmv1/third_party/terraform/website/docs/d/dataplex_data_quality_rules.html.markdown @@ -0,0 +1,38 @@ +--- +subcategory: "Dataplex" +description: |- + A datasource to retrieve the data quality rules generated based on a data profile scan. +--- + + +# `google_dataplex_data_quality_rules` +Retrieves the generated data quality rules for the creating a new data quality scan. +For more information see +the [official documentation](https://cloud.google.com/dataplex/docs) +and [API](https://cloud.google.com/dataplex/docs/reference/rest/v1/projects.locations.dataScans/generateDataQualityRules). + +## example + +```hcl +data "google_dataplex_data_quality_rules" "dqrs" { + project = "my-project" + location = "use-central1" + data_scan_id = "my-datascan-profile" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `project` - (Required) The ID of the project in which the datascan belongs. + +* `location` - (Required) The location where the referenced data profile scan resides. + +* `data_scan_id` - (Required) The ID of the data profile scan which the generation of quality rules will be basing on. + +## Attributes Reference + +The attributes are exported: + +* `rules` - (Computed) The list of generated data quality rules. For more details, please see the [datascan page](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/dataplex_datascan#nested_data_quality_spec_rules). \ No newline at end of file From 1d994ecdeb6a95a1f62a9b4c8faebe462d7337a8 Mon Sep 17 00:00:00 2001 From: echiugoog Date: Fri, 13 Jun 2025 12:43:27 -0400 Subject: [PATCH 359/884] Add in performance_monitoring_unit for advanced_machine_features in node_config (#14144) --- mmv1/third_party/terraform/go.mod | 26 +++--- mmv1/third_party/terraform/go.sum | 40 ++++----- .../services/container/node_config.go.tmpl | 9 ++ .../resource_container_cluster_test.go.tmpl | 86 +++++++++++++++++++ .../resource_container_node_pool_test.go.tmpl | 61 +++++++++++++ .../docs/r/container_cluster.html.markdown | 2 + 6 files changed, 191 insertions(+), 33 deletions(-) diff --git a/mmv1/third_party/terraform/go.mod b/mmv1/third_party/terraform/go.mod index c0b3994665ce..a05a5e76ccd3 100644 --- a/mmv1/third_party/terraform/go.mod +++ b/mmv1/third_party/terraform/go.mod @@ -3,6 +3,8 @@ module github.com/hashicorp/terraform-provider-google go 1.23.0 require ( + cloud.google.com/go/auth v0.16.1 + cloud.google.com/go/auth/oauth2adapt v0.2.8 cloud.google.com/go/bigtable v1.37.0 github.com/GoogleCloudPlatform/declarative-resource-client-library v1.79.0 github.com/apparentlymart/go-cidr v1.1.0 @@ -32,22 +34,21 @@ require ( golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 golang.org/x/net v0.40.0 golang.org/x/oauth2 v0.30.0 - google.golang.org/api v0.233.0 - google.golang.org/genproto/googleapis/rpc v0.0.0-20250505200425-f936aa4a68b2 - google.golang.org/grpc v1.72.0 + google.golang.org/api v0.235.0 + google.golang.org/genproto/googleapis/rpc v0.0.0-20250512202823-5a2f75b736a9 + google.golang.org/grpc v1.72.1 google.golang.org/protobuf v1.36.6 + gopkg.in/yaml.v2 v2.4.0 ) require ( bitbucket.org/creachadair/stringset v0.0.8 // indirect cel.dev/expr v0.20.0 // indirect cloud.google.com/go v0.120.0 // indirect - cloud.google.com/go/auth v0.16.1 // indirect - cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect - cloud.google.com/go/compute/metadata v0.6.0 // indirect - cloud.google.com/go/iam v1.5.0 // indirect - cloud.google.com/go/longrunning v0.6.6 // indirect - cloud.google.com/go/monitoring v1.24.1 // indirect + cloud.google.com/go/compute/metadata v0.7.0 // indirect + cloud.google.com/go/iam v1.5.2 // indirect + cloud.google.com/go/longrunning v0.6.7 // indirect + cloud.google.com/go/monitoring v1.24.2 // indirect github.com/ProtonMail/go-crypto v1.1.3 // indirect github.com/agext/levenshtein v1.2.2 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect @@ -69,7 +70,7 @@ require ( github.com/google/s2a-go v0.1.9 // indirect github.com/google/uuid v1.6.0 // indirect github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect - github.com/googleapis/gax-go/v2 v2.14.1 // indirect + github.com/googleapis/gax-go/v2 v2.14.2 // indirect github.com/hashicorp/go-checkpoint v0.5.0 // indirect github.com/hashicorp/go-hclog v1.6.3 // indirect github.com/hashicorp/go-plugin v1.6.2 // indirect @@ -115,8 +116,7 @@ require ( golang.org/x/time v0.11.0 // indirect golang.org/x/tools v0.22.0 // indirect google.golang.org/appengine v1.6.8 // indirect - google.golang.org/genproto v0.0.0-20250303144028-a0af3efb3deb // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20250414145226-207652e42e2e // indirect - gopkg.in/yaml.v2 v2.4.0 // indirect + google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20250505200425-f936aa4a68b2 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/mmv1/third_party/terraform/go.sum b/mmv1/third_party/terraform/go.sum index f6095302d9e2..b66aa96556a4 100644 --- a/mmv1/third_party/terraform/go.sum +++ b/mmv1/third_party/terraform/go.sum @@ -11,14 +11,14 @@ cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIi cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c= cloud.google.com/go/bigtable v1.37.0 h1:Q+x7y04lQ0B+WXp03wc1/FLhFt4CwcQdkwWT0M4Jp3w= cloud.google.com/go/bigtable v1.37.0/go.mod h1:HXqddP6hduwzrtiTCqZPpj9ij4hGZb4Zy1WF/dT+yaU= -cloud.google.com/go/compute/metadata v0.6.0 h1:A6hENjEsCDtC1k8byVsgwvVcioamEHvZ4j01OwKxG9I= -cloud.google.com/go/compute/metadata v0.6.0/go.mod h1:FjyFAW1MW0C203CEOMDTu3Dk1FlqW3Rga40jzHL4hfg= -cloud.google.com/go/iam v1.5.0 h1:QlLcVMhbLGOjRcGe6VTGGTyQib8dRLK2B/kYNV0+2xs= -cloud.google.com/go/iam v1.5.0/go.mod h1:U+DOtKQltF/LxPEtcDLoobcsZMilSRwR7mgNL7knOpo= -cloud.google.com/go/longrunning v0.6.6 h1:XJNDo5MUfMM05xK3ewpbSdmt7R2Zw+aQEMbdQR65Rbw= -cloud.google.com/go/longrunning v0.6.6/go.mod h1:hyeGJUrPHcx0u2Uu1UFSoYZLn4lkMrccJig0t4FI7yw= -cloud.google.com/go/monitoring v1.24.1 h1:vKiypZVFD/5a3BbQMvI4gZdl8445ITzXFh257XBgrS0= -cloud.google.com/go/monitoring v1.24.1/go.mod h1:Z05d1/vn9NaujqY2voG6pVQXoJGbp+r3laV+LySt9K0= +cloud.google.com/go/compute/metadata v0.7.0 h1:PBWF+iiAerVNe8UCHxdOt6eHLVc3ydFeOCw78U8ytSU= +cloud.google.com/go/compute/metadata v0.7.0/go.mod h1:j5MvL9PprKL39t166CoB1uVHfQMs4tFQZZcKwksXUjo= +cloud.google.com/go/iam v1.5.2 h1:qgFRAGEmd8z6dJ/qyEchAuL9jpswyODjA2lS+w234g8= +cloud.google.com/go/iam v1.5.2/go.mod h1:SE1vg0N81zQqLzQEwxL2WI6yhetBdbNQuTvIKCSkUHE= +cloud.google.com/go/longrunning v0.6.7 h1:IGtfDWHhQCgCjwQjV9iiLnUta9LBCo8R9QmAFsS/PrE= +cloud.google.com/go/longrunning v0.6.7/go.mod h1:EAFV3IZAKmM56TyiE6VAP3VoTzhZzySwI/YI1s/nRsY= +cloud.google.com/go/monitoring v1.24.2 h1:5OTsoJ1dXYIiMiuL+sYscLc9BumrL3CarVLL7dd7lHM= +cloud.google.com/go/monitoring v1.24.2/go.mod h1:x7yzPWcgDRnPEv3sI+jJGBkwl5qINf+6qY4eq0I9B4U= dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= @@ -128,8 +128,8 @@ github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/enterprise-certificate-proxy v0.3.6 h1:GW/XbdyBFQ8Qe+YAmFU9uHLo7OnF5tL52HFAgMmyrf4= github.com/googleapis/enterprise-certificate-proxy v0.3.6/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA= -github.com/googleapis/gax-go/v2 v2.14.1 h1:hb0FFeiPaQskmvakKu5EbCbpntQn48jyHuvrkurSS/Q= -github.com/googleapis/gax-go/v2 v2.14.1/go.mod h1:Hb/NubMaVM88SrNkvl8X/o8XWwDJEPqouaLeN2IUxoA= +github.com/googleapis/gax-go/v2 v2.14.2 h1:eBLnkZ9635krYIPD+ag1USrOAI0Nr0QYF3+/3GqO0k0= +github.com/googleapis/gax-go/v2 v2.14.2/go.mod h1:ON64QhlJkhVtSqp4v1uaK92VyZ2gmvDQsweuyLV+8+w= github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 h1:+9834+KizmvFV7pXQGSXQTsaWhq2GjuNUt0aUU0YBYw= github.com/grpc-ecosystem/go-grpc-middleware v1.3.0/go.mod h1:z0ButlSOZa5vEBq9m2m2hlwIgKw+rp3sdCBRoJY+30Y= github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA= @@ -381,8 +381,8 @@ golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8T golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/api v0.233.0 h1:iGZfjXAJiUFSSaekVB7LzXl6tRfEKhUN7FkZN++07tI= -google.golang.org/api v0.233.0/go.mod h1:TCIVLLlcwunlMpZIhIp7Ltk77W+vUSdUKAAIlbxY44c= +google.golang.org/api v0.235.0 h1:C3MkpQSRxS1Jy6AkzTGKKrpSCOd2WOGrezZ+icKSkKo= +google.golang.org/api v0.235.0/go.mod h1:QpeJkemzkFKe5VCE/PMv7GsUfn9ZF+u+q1Q7w6ckxTg= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= @@ -390,19 +390,19 @@ google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJ google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20200423170343-7949de9c1215/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20250303144028-a0af3efb3deb h1:ITgPrl429bc6+2ZraNSzMDk3I95nmQln2fuPstKwFDE= -google.golang.org/genproto v0.0.0-20250303144028-a0af3efb3deb/go.mod h1:sAo5UzpjUwgFBCzupwhcLcxHVDK7vG5IqI30YnwX2eE= -google.golang.org/genproto/googleapis/api v0.0.0-20250414145226-207652e42e2e h1:UdXH7Kzbj+Vzastr5nVfccbmFsmYNygVLSPk1pEfDoY= -google.golang.org/genproto/googleapis/api v0.0.0-20250414145226-207652e42e2e/go.mod h1:085qFyf2+XaZlRdCgKNCIZ3afY2p4HHZdoIRpId8F4A= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250505200425-f936aa4a68b2 h1:IqsN8hx+lWLqlN+Sc3DoMy/watjofWiU8sRFgQ8fhKM= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250505200425-f936aa4a68b2/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= +google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2 h1:1tXaIXCracvtsRxSBsYDiSBN0cuJvM7QYW+MrpIRY78= +google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2/go.mod h1:49MsLSx0oWMOZqcpB3uL8ZOkAh1+TndpJ8ONoCBWiZk= +google.golang.org/genproto/googleapis/api v0.0.0-20250505200425-f936aa4a68b2 h1:vPV0tzlsK6EzEDHNNH5sa7Hs9bd7iXR7B1tSiPepkV0= +google.golang.org/genproto/googleapis/api v0.0.0-20250505200425-f936aa4a68b2/go.mod h1:pKLAc5OolXC3ViWGI62vvC0n10CpwAtRcTNCFwTKBEw= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250512202823-5a2f75b736a9 h1:IkAfh6J/yllPtpYFU0zZN1hUPYdT0ogkBT/9hMxHjvg= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250512202823-5a2f75b736a9/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= -google.golang.org/grpc v1.72.0 h1:S7UkcVa60b5AAQTaO6ZKamFp1zMZSU0fGDK2WZLbBnM= -google.golang.org/grpc v1.72.0/go.mod h1:wH5Aktxcg25y1I3w7H69nHfXdOG3UiadoBtjh3izSDM= +google.golang.org/grpc v1.72.1 h1:HR03wO6eyZ7lknl75XlxABNVLLFc2PAb6mHlYh756mA= +google.golang.org/grpc v1.72.1/go.mod h1:wH5Aktxcg25y1I3w7H69nHfXdOG3UiadoBtjh3izSDM= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= diff --git a/mmv1/third_party/terraform/services/container/node_config.go.tmpl b/mmv1/third_party/terraform/services/container/node_config.go.tmpl index 6ccd2d65bcac..3a969bc4829d 100644 --- a/mmv1/third_party/terraform/services/container/node_config.go.tmpl +++ b/mmv1/third_party/terraform/services/container/node_config.go.tmpl @@ -11,6 +11,7 @@ import ( "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + "github.com/hashicorp/terraform-provider-google/google/verify" {{ if eq $.TargetVersionName `ga` }} "google.golang.org/api/container/v1" {{- else }} @@ -756,6 +757,12 @@ func schemaNodeConfig() *schema.Schema { ForceNew: true, Description: `Whether the node should have nested virtualization enabled.`, }, + "performance_monitoring_unit": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: verify.ValidateEnum([]string{"ARCHITECTURAL", "STANDARD", "ENHANCED"}), + Description: `Level of Performance Monitoring Unit (PMU) requested. If unset, no access to the PMU is assumed.`, + }, }, }, }, @@ -1255,6 +1262,7 @@ func expandNodeConfig(v interface{}) *container.NodeConfig { nc.AdvancedMachineFeatures = &container.AdvancedMachineFeatures{ ThreadsPerCore: int64(advanced_machine_features["threads_per_core"].(int)), EnableNestedVirtualization: advanced_machine_features["enable_nested_virtualization"].(bool), + PerformanceMonitoringUnit: advanced_machine_features["performance_monitoring_unit"].(string), } } @@ -1744,6 +1752,7 @@ func flattenAdvancedMachineFeaturesConfig(c *container.AdvancedMachineFeatures) result = append(result, map[string]interface{}{ "threads_per_core": c.ThreadsPerCore, "enable_nested_virtualization": c.EnableNestedVirtualization, + "performance_monitoring_unit": c.PerformanceMonitoringUnit, }) } return result diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl index 7a5ae0c9ef1a..7877ef3a89cd 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl @@ -13859,6 +13859,92 @@ resource "google_container_cluster" "primary" { `, clusterName, networkName, subnetworkName) } +func TestAccContainerCluster_withAdvancedMachineFeaturesPMU_Standard(t *testing.T) { + t.Parallel() + + suffix := acctest.RandString(t, 10) + clusterResourceName := "google_container_cluster.primary" + clusterName := fmt.Sprintf("tf-test-cluster-%s", suffix) + networkName := fmt.Sprintf("test-network-%s", suffix) + subnetworkName := fmt.Sprintf("test-subnetwork-%s", suffix) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withAdvancedMachineFeaturesPMU(clusterName, networkName, subnetworkName, "STANDARD"), + }, + { + ResourceName: clusterResourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + }, + }) +} + +func TestAccContainerCluster_withAdvancedMachineFeaturesPMU_Architectural(t *testing.T) { + t.Parallel() + + suffix := acctest.RandString(t, 10) + clusterResourceName := "google_container_cluster.primary" + clusterName := fmt.Sprintf("tf-test-cluster-%s", suffix) + networkName := fmt.Sprintf("test-network-%s", suffix) + subnetworkName := fmt.Sprintf("test-subnetwork-%s", suffix) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withAdvancedMachineFeaturesPMU(clusterName, networkName, subnetworkName, "ARCHITECTURAL"), + }, + { + ResourceName: clusterResourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + }, + }) +} + +func testAccContainerCluster_withAdvancedMachineFeaturesPMU(clusterName, networkName, subnetworkName, pmuLevel string) string { + return fmt.Sprintf(` +resource "google_compute_network" "default" { + name = "%s" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + name = "%s" + network = google_compute_network.default.name + ip_cidr_range = "10.9.0.0/16" + region = "us-central1" +} + +resource "google_container_cluster" "primary" { + name = "%s" + location = "us-central1-a" + initial_node_count = 1 + network = google_compute_network.default.name + subnetwork = google_compute_subnetwork.default.name + deletion_protection = false + node_config { + machine_type = "c4-standard-2" + advanced_machine_features { + threads_per_core = 2 + performance_monitoring_unit = "%s" + } + } +} +`, networkName, subnetworkName, clusterName, pmuLevel) +} + func testAccContainerCluster_inTransitEncryptionConfig(name, networkName, subnetworkName, config string) string { return fmt.Sprintf(` resource "google_container_cluster" "primary" { diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl index 7d5b453ebc7c..ba3ba81b1728 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl @@ -2471,6 +2471,67 @@ resource "google_container_node_pool" "np" { `, cluster, networkName, subnetworkName, enableNV, np, enableNV) } +func TestAccContainerNodePool_performanceMonitoringUnit(t *testing.T) { + t.Parallel() + + cluster := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) + np := fmt.Sprintf("tf-test-nodepool-%s", acctest.RandString(t, 10)) + networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") + subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckContainerNodePoolDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccContainerNodePool_performanceMonitoringUnit(cluster, np, networkName, subnetworkName, "ARCHITECTURAL"), + }, + { + ResourceName: "google_container_cluster.cluster", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + }, + }) +} + +func testAccContainerNodePool_performanceMonitoringUnit(cluster, np, networkName, subnetworkName, pmuLevel string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "cluster" { + name = "%s" + location = "us-central1-a" + initial_node_count = 1 + deletion_protection = false + network = "%s" + subnetwork = "%s" + + node_config { + machine_type = "c4-standard-4" + advanced_machine_features { + threads_per_core = 2 + performance_monitoring_unit = "%s" + } + } +} + +resource "google_container_node_pool" "np" { + name = "%s" + location = "us-central1-a" + cluster = google_container_cluster.cluster.name + initial_node_count = 2 + + node_config { + machine_type = "c4-standard-4" + advanced_machine_features { + threads_per_core = 2 + performance_monitoring_unit = "%s" + } + } +} +`, cluster, networkName, subnetworkName, pmuLevel, np, pmuLevel) +} func testAccCheckContainerNodePoolDestroyProducer(t *testing.T) func(s *terraform.State) error { return func(s *terraform.State) error { diff --git a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown index 57ee6cf26e19..931a758b1a17 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown @@ -1066,6 +1066,8 @@ sole_tenant_config { * `enable_nested_virtualization`- (Optional) Defines whether the instance should have nested virtualization enabled. Defaults to false. +* `performance_monitoring_unit` - (Optional) Defines the performance monitoring unit [PMU](https://cloud.google.com/compute/docs/pmu-overview) level. Valid values are `ARCHITECTURAL`, `STANDARD`, or `ENHANCED`. Defaults to off. + The `ephemeral_storage_config` block supports: * `local_ssd_count` (Required) - Number of local SSDs to use to back ephemeral storage. Uses NVMe interfaces. Each local SSD is 375 GB in size. If zero, it means to disable using local SSDs as ephemeral storage. From 414e570c6b89a048139fd7ec8813e02bac2205f9 Mon Sep 17 00:00:00 2001 From: Justin Scofield <47263509+scawful@users.noreply.github.com> Date: Fri, 13 Jun 2025 16:55:12 +0000 Subject: [PATCH 360/884] Add View resource for contactcenterinsights (#13766) --- mmv1/products/contactcenterinsights/View.yaml | 64 +++++++++++++++++ .../contactcenterinsights/product.yaml | 21 ++++++ ...contact_center_insights_view_basic.tf.tmpl | 5 ++ .../contact_center_insights_view_full.tf.tmpl | 5 ++ .../components/inputs/services_beta.kt | 5 ++ .../components/inputs/services_ga.kt | 5 ++ ...ource_contact_center_insights_view_test.go | 72 +++++++++++++++++++ 7 files changed, 177 insertions(+) create mode 100644 mmv1/products/contactcenterinsights/View.yaml create mode 100644 mmv1/products/contactcenterinsights/product.yaml create mode 100644 mmv1/templates/terraform/examples/contact_center_insights_view_basic.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/contact_center_insights_view_full.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/contactcenterinsights/resource_contact_center_insights_view_test.go diff --git a/mmv1/products/contactcenterinsights/View.yaml b/mmv1/products/contactcenterinsights/View.yaml new file mode 100644 index 000000000000..f89f3e8abd56 --- /dev/null +++ b/mmv1/products/contactcenterinsights/View.yaml @@ -0,0 +1,64 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: View +description: | + Insights View resource for filtering conversations +references: + api: 'https://cloud.google.com/contact-center/insights/docs/reference/rest/v1/projects.locations.views' +base_url: projects/{{project}}/locations/{{location}}/views +self_link: projects/{{project}}/locations/{{location}}/views/{{name}} +create_url: projects/{{project}}/locations/{{location}}/views +delete_url: projects/{{project}}/locations/{{location}}/views/{{name}} +update_verb: PATCH +update_mask: true +id_format: projects/{{project}}/locations/{{location}}/views/{{name}} +import_format: + - projects/{{project}}/locations/{{location}}/views/{{name}} +examples: + - name: 'contact_center_insights_view_basic' + primary_resource_id: 'basic_view' + - name: 'contact_center_insights_view_full' + primary_resource_id: 'full_view' +parameters: + - name: location + type: String + description: Location of the resource. + immutable: true + url_param_only: true + required: true +properties: + - name: name + type: String + description: |- + The resource name of the view. Randomly generated by Insights. + output: true + custom_flatten: 'templates/terraform/custom_flatten/name_from_self_link.tmpl' + - name: displayName + type: String + description: The human-readable display name of the view. + - name: createTime + type: String + description: Output only. The time at which this view was created. + output: true + - name: updateTime + type: String + description: Output only. The most recent time at which the view was updated. + output: true + - name: value + type: String + description: |- + A filter to reduce conversation results to a specific subset. + Refer to https://cloud.google.com/contact-center/insights/docs/filtering + for details. diff --git a/mmv1/products/contactcenterinsights/product.yaml b/mmv1/products/contactcenterinsights/product.yaml new file mode 100644 index 000000000000..4c4c79a150b0 --- /dev/null +++ b/mmv1/products/contactcenterinsights/product.yaml @@ -0,0 +1,21 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'ContactCenterInsights' +display_name: 'Contact Center AI Insights' +scopes: + - 'https://www.googleapis.com/auth/cloud-platform' +versions: + - base_url: 'https://contactcenterinsights.googleapis.com/v1/' + name: 'ga' diff --git a/mmv1/templates/terraform/examples/contact_center_insights_view_basic.tf.tmpl b/mmv1/templates/terraform/examples/contact_center_insights_view_basic.tf.tmpl new file mode 100644 index 000000000000..80c00b2b60be --- /dev/null +++ b/mmv1/templates/terraform/examples/contact_center_insights_view_basic.tf.tmpl @@ -0,0 +1,5 @@ +resource "google_contact_center_insights_view" "{{$.PrimaryResourceId}}" { + location = "us-central1" + display_name = "view-display-name" + value = "medium=\"CHAT\"" +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/contact_center_insights_view_full.tf.tmpl b/mmv1/templates/terraform/examples/contact_center_insights_view_full.tf.tmpl new file mode 100644 index 000000000000..ee36ab1756d2 --- /dev/null +++ b/mmv1/templates/terraform/examples/contact_center_insights_view_full.tf.tmpl @@ -0,0 +1,5 @@ +resource "google_contact_center_insights_view" "{{$.PrimaryResourceId}}" { + location = "us-central1" + display_name = "view-display-name" + value = "medium=\"PHONE_CALL\"" +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt b/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt index da21829d3144..a3572562a7dc 100644 --- a/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt +++ b/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt @@ -236,6 +236,11 @@ var ServicesListBeta = mapOf( "displayName" to "Compute", "path" to "./google-beta/services/compute" ), + "contactcenterinsights" to mapOf( + "name" to "contactcenterinsights", + "displayName" to "Contact Center AI Insights", + "path" to "./google-beta/services/contactcenterinsights" + ), "container" to mapOf( "name" to "container", "displayName" to "Container", diff --git a/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt b/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt index 375189e3d45a..c9a6fbf7ca8a 100644 --- a/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt +++ b/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt @@ -236,6 +236,11 @@ var ServicesListGa = mapOf( "displayName" to "Compute", "path" to "./google/services/compute" ), + "contactcenterinsights" to mapOf( + "name" to "contactcenterinsights", + "displayName" to "Contact Center AI Insights", + "path" to "./google/services/contactcenterinsights" + ), "container" to mapOf( "name" to "container", "displayName" to "Container", diff --git a/mmv1/third_party/terraform/services/contactcenterinsights/resource_contact_center_insights_view_test.go b/mmv1/third_party/terraform/services/contactcenterinsights/resource_contact_center_insights_view_test.go new file mode 100644 index 000000000000..f2ddc54cd7b8 --- /dev/null +++ b/mmv1/third_party/terraform/services/contactcenterinsights/resource_contact_center_insights_view_test.go @@ -0,0 +1,72 @@ +package contactcenterinsights_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccContactCenterInsightsView_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project_name": envvar.GetTestProjectFromEnv(), + "region": "us-central1", + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccContactCenterInsightsView_full(context), + }, + { + ResourceName: "google_contact_center_insights_view.full_view", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location"}, + }, + { + Config: testAccContactCenterInsightsView_update(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_contact_center_insights_view.full_view", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_contact_center_insights_view.full_view", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location"}, + }, + }, + }) +} + +func testAccContactCenterInsightsView_full(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_contact_center_insights_view" "full_view" { + project = "%{project_name}" + location = "%{region}" + display_name = "view-display-name-%{random_suffix}" + value = "medium=\"PHONE_CALL\"" +} +`, context) +} + +func testAccContactCenterInsightsView_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_contact_center_insights_view" "full_view" { + project = "%{project_name}" + location = "%{region}" + display_name = "view-display-name-%{random_suffix}-updated" + value = "medium=\"CHAT\"" +} +`, context) +} From dda748e7966c6fda8f885c8aace746c3b93e31bd Mon Sep 17 00:00:00 2001 From: liaoaohaha Date: Fri, 13 Jun 2025 10:05:09 -0700 Subject: [PATCH 361/884] Update dimension description to match the behavior (#14232) --- mmv1/products/dataplex/Datascan.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/products/dataplex/Datascan.yaml b/mmv1/products/dataplex/Datascan.yaml index 52f0d4a84c15..6cdabefd75e8 100644 --- a/mmv1/products/dataplex/Datascan.yaml +++ b/mmv1/products/dataplex/Datascan.yaml @@ -335,7 +335,7 @@ properties: - name: 'dimension' type: String description: | - The dimension a rule belongs to. Results are also aggregated at the dimension level. Supported dimensions are ["COMPLETENESS", "ACCURACY", "CONSISTENCY", "VALIDITY", "UNIQUENESS", "INTEGRITY"] + The dimension name a rule belongs to. Custom dimension name is supported with all uppercase letters and maximum length of 30 characters. required: true - name: 'threshold' type: Double From 3dda30b030822c122c5a9adad0ab0a3b4e246514 Mon Sep 17 00:00:00 2001 From: Wonje Kang <96211823+wonjekang@users.noreply.github.com> Date: Fri, 13 Jun 2025 13:20:00 -0400 Subject: [PATCH 362/884] Launch domainName forwarding target to GA (#14253) --- mmv1/products/dns/ManagedZone.yaml | 1 - .../services/dns/resource_dns_managed_zone_test.go.tmpl | 6 ++---- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/mmv1/products/dns/ManagedZone.yaml b/mmv1/products/dns/ManagedZone.yaml index 7781d6e75e08..08ba42d0df10 100644 --- a/mmv1/products/dns/ManagedZone.yaml +++ b/mmv1/products/dns/ManagedZone.yaml @@ -358,7 +358,6 @@ properties: - name: 'domainName' type: String description: 'Fully qualified domain name for the forwarding target.' - min_version: 'beta' - name: 'forwardingPath' type: Enum description: | diff --git a/mmv1/third_party/terraform/services/dns/resource_dns_managed_zone_test.go.tmpl b/mmv1/third_party/terraform/services/dns/resource_dns_managed_zone_test.go.tmpl index 3db0dcdef66c..ad473a06dc1b 100644 --- a/mmv1/third_party/terraform/services/dns/resource_dns_managed_zone_test.go.tmpl +++ b/mmv1/third_party/terraform/services/dns/resource_dns_managed_zone_test.go.tmpl @@ -662,7 +662,6 @@ resource "google_dns_managed_zone" "foobar" { `, suffix, suffix, description, project) } -{{ if not (or (eq $.TargetVersionName ``) (eq $.TargetVersionName `ga`)) }} func TestAccDNSManagedZone_privateForwardingWithDomainNameUpdate(t *testing.T) { t.Parallel() @@ -670,7 +669,7 @@ func TestAccDNSManagedZone_privateForwardingWithDomainNameUpdate(t *testing.T) { acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccCheckDNSManagedZoneDestroyProducer(t), Steps: []resource.TestStep{ { @@ -696,7 +695,6 @@ func TestAccDNSManagedZone_privateForwardingWithDomainNameUpdate(t *testing.T) { func testAccDnsManagedZone_privateForwardingWithDomainNameUpdate(suffix, domain_name, forwarding_path string) string { return fmt.Sprintf(` resource "google_dns_managed_zone" "private" { - provider = google-beta name = "private-zone-%s" dns_name = "private.example.com." description = "Example private DNS zone" @@ -716,13 +714,13 @@ resource "google_dns_managed_zone" "private" { } resource "google_compute_network" "network-1" { - provider = google-beta name = "tf-test-net-1-%s" auto_create_subnetworks = false } `, suffix, domain_name, forwarding_path, suffix) } +{{ if not (or (eq $.TargetVersionName ``) (eq $.TargetVersionName `ga`)) }} func TestAccDNSManagedZone_dnsManagedZoneUpdateWithServiceDirectory(t *testing.T) { t.Parallel() From 8b3badf77336c324c94ac5ac65e45148c57581b8 Mon Sep 17 00:00:00 2001 From: Sam Levenick Date: Fri, 13 Jun 2025 15:05:58 -0400 Subject: [PATCH 363/884] Update uses of ubuntu-2004-lts to 2204-lts (#14273) --- ...te_instance_from_machine_image_test.go.tmpl | 8 ++++---- ...compute_instance_from_template_test.go.tmpl | 4 ++-- ...urce_compute_instance_template_test.go.tmpl | 10 +++++----- .../resource_compute_instance_test.go.tmpl | 18 +++++++++--------- ...mpute_region_instance_template_test.go.tmpl | 10 +++++----- .../docs/r/compute_instance.html.markdown | 2 +- .../r/compute_instance_template.html.markdown | 2 +- 7 files changed, 27 insertions(+), 27 deletions(-) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_from_machine_image_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_from_machine_image_test.go.tmpl index 7b4849dbe01c..589e37d1ec69 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_from_machine_image_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_from_machine_image_test.go.tmpl @@ -428,7 +428,7 @@ resource "google_compute_instance" "vm1" { boot_disk { initialize_params { - image = "ubuntu-os-cloud/ubuntu-2004-lts" + image = "ubuntu-os-cloud/ubuntu-2204-lts" } } @@ -485,7 +485,7 @@ resource "google_compute_instance" "vm2" { boot_disk { initialize_params { - image = "ubuntu-os-cloud/ubuntu-2004-lts" + image = "ubuntu-os-cloud/ubuntu-2204-lts" } } @@ -541,7 +541,7 @@ resource "google_compute_instance" "vm3" { boot_disk { initialize_params { - image = "ubuntu-os-cloud/ubuntu-2004-lts" + image = "ubuntu-os-cloud/ubuntu-2204-lts" } } @@ -600,7 +600,7 @@ resource "google_compute_instance" "vm4" { boot_disk { initialize_params { - image = "ubuntu-os-cloud/ubuntu-2004-lts" + image = "ubuntu-os-cloud/ubuntu-2204-lts" } } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_from_template_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_from_template_test.go.tmpl index 5ccbfae30768..15a420339f63 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_from_template_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_from_template_test.go.tmpl @@ -1731,7 +1731,7 @@ resource "google_compute_instance_from_template" "inst" { func testAccComputeInstanceFromTemplate_confidentialInstanceConfigEnable(templateDisk string, image string, template string, instance string, template2 string, instance2 string, confidentialInstanceType string) string { return fmt.Sprintf(` data "google_compute_image" "my_image1" { - family = "ubuntu-2004-lts" + family = "ubuntu-2204-lts" project = "ubuntu-os-cloud" } @@ -1824,7 +1824,7 @@ resource "google_compute_instance_from_template" "inst2" { func testAccComputeInstanceFromTemplate_confidentialInstanceConfigNoConfigSevSnp(templateDisk string, image string, template string, instance string, template2 string, instance2 string, confidentialInstanceType string) string { return fmt.Sprintf(` data "google_compute_image" "my_image1" { - family = "ubuntu-2004-lts" + family = "ubuntu-2204-lts" project = "ubuntu-os-cloud" } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_test.go.tmpl index 75d6c1553bfd..1a4441e1582b 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_test.go.tmpl @@ -4189,7 +4189,7 @@ resource "google_compute_instance_template" "foobar" { func testAccComputeInstanceTemplateConfidentialInstanceConfigEnable(suffix string, confidentialInstanceType string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { - family = "ubuntu-2004-lts" + family = "ubuntu-2204-lts" project = "ubuntu-os-cloud" } @@ -4247,7 +4247,7 @@ resource "google_compute_instance_template" "foobar2" { func testAccComputeInstanceTemplateConfidentialInstanceConfigNoEnable(suffix string, minCpuPlatform, confidentialInstanceType string) string { return fmt.Sprintf(` data "google_compute_image" "my_image2" { - family = "ubuntu-2004-lts" + family = "ubuntu-2204-lts" project = "ubuntu-os-cloud" } @@ -4341,7 +4341,7 @@ resource "google_compute_instance_template" "foobar5" { func testAccComputeInstanceTemplateAdvancedMachineFeatures(suffix string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { - family = "ubuntu-2004-lts" + family = "ubuntu-2204-lts" project = "ubuntu-os-cloud" } @@ -4377,7 +4377,7 @@ resource "google_compute_instance_template" "foobar" { func testAccComputeInstanceTemplate_performanceMonitoringUnit(context map[string]interface{}) string { return acctest.Nprintf(` data "google_compute_image" "my_image" { - family = "ubuntu-2004-lts" + family = "ubuntu-2204-lts" project = "ubuntu-os-cloud" } @@ -4403,7 +4403,7 @@ resource "google_compute_instance_template" "foobar" { func testAccComputeInstanceTemplate_enableUefiNetworking(context map[string]interface{}) string { return acctest.Nprintf(` data "google_compute_image" "my_image" { - family = "ubuntu-2004-lts" + family = "ubuntu-2204-lts" project = "ubuntu-os-cloud" } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl index a47e3c26963a..7b7a4c336b04 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl @@ -9759,7 +9759,7 @@ resource "google_compute_instance" "foobar" { func testAccComputeInstanceConfidentialInstanceConfigEnable(instance string, confidentialInstanceType string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { - family = "ubuntu-2004-lts" + family = "ubuntu-2204-lts" project = "ubuntu-os-cloud" } @@ -9874,7 +9874,7 @@ resource "google_compute_instance" "foobar4" { func testAccComputeInstanceConfidentialInstanceConfigNoEnable(instance string, minCpuPlatform, confidentialInstanceType string) string { return fmt.Sprintf(` data "google_compute_image" "my_image2" { - family = "ubuntu-2004-lts" + family = "ubuntu-2204-lts" project = "ubuntu-os-cloud" } @@ -10773,7 +10773,7 @@ resource "google_compute_instance" "foobar" { func testAccComputeInstance_spotVM(instance string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { - family = "ubuntu-2004-lts" + family = "ubuntu-2204-lts" project = "ubuntu-os-cloud" } @@ -10805,7 +10805,7 @@ resource "google_compute_instance" "foobar" { func testAccComputeInstance_standardVM_maxRunDuration(instance string, instanceTerminationAction string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { - family = "ubuntu-2004-lts" + family = "ubuntu-2204-lts" project = "ubuntu-os-cloud" } @@ -10840,7 +10840,7 @@ resource "google_compute_instance" "foobar" { func testAccComputeInstance_standardVM_maxRunDurationUpdated(instance string, instanceTerminationAction string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { - family = "ubuntu-2004-lts" + family = "ubuntu-2204-lts" project = "ubuntu-os-cloud" } @@ -10916,7 +10916,7 @@ resource "google_compute_instance" "foobar" { func testAccComputeInstance_spotVM_maxRunDuration(instance string, instanceTerminationAction string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { - family = "ubuntu-2004-lts" + family = "ubuntu-2204-lts" project = "ubuntu-os-cloud" } @@ -10952,7 +10952,7 @@ resource "google_compute_instance" "foobar" { func testAccComputeInstance_localSsdRecoveryTimeout(instance string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { - family = "ubuntu-2004-lts" + family = "ubuntu-2204-lts" project = "ubuntu-os-cloud" } @@ -10986,7 +10986,7 @@ resource "google_compute_instance" "foobar" { func testAccComputeInstance_partnerMetadata_empty(instance string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { - family = "ubuntu-2004-lts" + family = "ubuntu-2204-lts" project = "ubuntu-os-cloud" } @@ -11011,7 +11011,7 @@ resource "google_compute_instance" "foobar" { func testAccComputeInstance_partnerMetadata(instance string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { - family = "ubuntu-2004-lts" + family = "ubuntu-2204-lts" project = "ubuntu-os-cloud" } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_test.go.tmpl index c0534475d920..989eb0c2b802 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_test.go.tmpl @@ -3390,7 +3390,7 @@ resource "google_compute_region_instance_template" "foobar" { func testAccComputeRegionInstanceTemplateConfidentialInstanceConfigEnable(suffix string, confidentialInstanceType string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { - family = "ubuntu-2004-lts" + family = "ubuntu-2204-lts" project = "ubuntu-os-cloud" } @@ -3450,7 +3450,7 @@ resource "google_compute_region_instance_template" "foobar2" { func testAccComputeRegionInstanceTemplateConfidentialInstanceConfigNoEnable(suffix string, minCpuPlatform, confidentialInstanceType string) string { return fmt.Sprintf(` data "google_compute_image" "my_image2" { - family = "ubuntu-2004-lts" + family = "ubuntu-2204-lts" project = "ubuntu-os-cloud" } @@ -3547,7 +3547,7 @@ resource "google_compute_region_instance_template" "foobar5" { func testAccComputeRegionInstanceTemplateAdvancedMachineFeatures(suffix string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { - family = "ubuntu-2004-lts" + family = "ubuntu-2204-lts" project = "ubuntu-os-cloud" } @@ -3584,7 +3584,7 @@ resource "google_compute_region_instance_template" "foobar" { func testAccComputeRegionInstanceTemplate_performanceMonitoringUnit(context map[string]interface{}) string { return acctest.Nprintf(` data "google_compute_image" "my_image" { - family = "ubuntu-2004-lts" + family = "ubuntu-2204-lts" project = "ubuntu-os-cloud" } @@ -3611,7 +3611,7 @@ resource "google_compute_region_instance_template" "foobar" { func testAccComputeRegionInstanceTemplate_enableUefiNetworking(context map[string]interface{}) string { return acctest.Nprintf(` data "google_compute_image" "my_image" { - family = "ubuntu-2004-lts" + family = "ubuntu-2204-lts" project = "ubuntu-os-cloud" } diff --git a/mmv1/third_party/terraform/website/docs/r/compute_instance.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_instance.html.markdown index f334b798825f..dca0f3569ded 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_instance.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_instance.html.markdown @@ -85,7 +85,7 @@ resource "google_compute_instance" "confidential_instance" { boot_disk { initialize_params { - image = "ubuntu-os-cloud/ubuntu-2004-lts" + image = "ubuntu-os-cloud/ubuntu-2204-lts" labels = { my_label = "value" } diff --git a/mmv1/third_party/terraform/website/docs/r/compute_instance_template.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_instance_template.html.markdown index f9ba186d4af1..f8862f1c3379 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_instance_template.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_instance_template.html.markdown @@ -246,7 +246,7 @@ resource "google_compute_instance_template" "confidential_instance_template" { } disk { - source_image = "ubuntu-os-cloud/ubuntu-2004-lts" + source_image = "ubuntu-os-cloud/ubuntu-2204-lts" } network_interface { From ce86dc91e78dd3df90bbdd62a5e8ec3ca3cfaa0c Mon Sep 17 00:00:00 2001 From: gurusai-voleti Date: Fri, 13 Jun 2025 20:08:23 +0000 Subject: [PATCH 364/884] fix: (storage) added source_md5hash field in bucket object (#14117) --- .../storage/resource_storage_bucket_object.go | 30 ++- .../resource_storage_bucket_object_test.go | 196 ++++++++++++++++++ .../r/storage_bucket_object.html.markdown | 2 + 3 files changed, 226 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object.go b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object.go index 249d5bf7fcc9..53de5580f67c 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object.go +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object.go @@ -10,6 +10,7 @@ import ( "os" "time" + "github.com/hashicorp/go-cty/cty" "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" @@ -132,9 +133,16 @@ func ResourceStorageBucketObject() *schema.Resource { Description: `A path to the data you want to upload. Must be defined if content is not.`, }, + "source_md5hash": { + Type: schema.TypeString, + Optional: true, + Description: `User-provided md5hash, Base 64 MD5 hash of the object data.`, + }, + // Detect changes to local file or changes made outside of Terraform to the file stored on the server. "detect_md5hash": { - Type: schema.TypeString, + Type: schema.TypeString, + Deprecated: "`detect_md5hash` is deprecated and will be removed in future release. Start using `source_md5hash` instead", // This field is not Computed because it needs to trigger a diff. Optional: true, // Makes the diff message nicer: @@ -147,6 +155,12 @@ func ResourceStorageBucketObject() *schema.Resource { // 3. Don't suppress the diff iff they don't match DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { localMd5Hash := "" + if d.GetRawConfig().GetAttr("source_md5hash") == cty.UnknownVal(cty.String) { + return true + } + if v, ok := d.GetOk("source_md5hash"); ok && v != "" { + return true + } if source, ok := d.GetOkExists("source"); ok { localMd5Hash = tpgresource.GetFileMd5Hash(source.(string)) } @@ -392,7 +406,7 @@ func resourceStorageBucketObjectUpdate(d *schema.ResourceData, meta interface{}) bucket := d.Get("bucket").(string) name := d.Get("name").(string) - if d.HasChange("content") || d.HasChange("detect_md5hash") { + if d.HasChange("content") || d.HasChange("source_md5hash") || d.HasChange("detect_md5hash") { // The KMS key name are not able to be set on create : // or you get error: Error uploading object test-maarc: googleapi: Error 400: Malformed Cloud KMS crypto key: projects/myproject/locations/myregion/keyRings/mykeyring/cryptoKeys/mykeyname/cryptoKeyVersions/1, invalid d.Set("kms_key_name", nil) @@ -480,6 +494,9 @@ func resourceStorageBucketObjectRead(d *schema.ResourceData, meta interface{}) e if err := d.Set("detect_md5hash", res.Md5Hash); err != nil { return fmt.Errorf("Error setting detect_md5hash: %s", err) } + if err := d.Set("source_md5hash", d.Get("source_md5hash")); err != nil { + return fmt.Errorf("Error setting source_md5hash: %s", err) + } if err := d.Set("generation", res.Generation); err != nil { return fmt.Errorf("Error setting generation: %s", err) } @@ -626,6 +643,11 @@ func flattenObjectRetention(objectRetention *storage.ObjectRetention) []map[stri func resourceStorageBucketObjectCustomizeDiff(ctx context.Context, d *schema.ResourceDiff, meta interface{}) error { localMd5Hash := "" + + if (d.GetRawConfig().GetAttr("source_md5hash") == cty.UnknownVal(cty.String)) || d.HasChange("source_md5hash") { + return showDiff(d) + } + if source, ok := d.GetOkExists("source"); ok { localMd5Hash = tpgresource.GetFileMd5Hash(source.(string)) } @@ -640,7 +662,10 @@ func resourceStorageBucketObjectCustomizeDiff(ctx context.Context, d *schema.Res if ok && oldMd5Hash == localMd5Hash { return nil } + return showDiff(d) +} +func showDiff(d *schema.ResourceDiff) error { err := d.SetNewComputed("md5hash") if err != nil { return fmt.Errorf("Error re-setting md5hash: %s", err) @@ -653,5 +678,6 @@ func resourceStorageBucketObjectCustomizeDiff(ctx context.Context, d *schema.Res if err != nil { return fmt.Errorf("Error re-setting generation: %s", err) } + return nil } diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object_test.go b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object_test.go index 92fc58b2b487..dadf2976f75d 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object_test.go +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object_test.go @@ -509,6 +509,93 @@ func TestResourceStorageBucketObjectUpdate_ContentChange(t *testing.T) { }) } +func TestAccStorageObject_sourceMd5Hash(t *testing.T) { + t.Parallel() + + bucketName := acctest.TestBucketName(t) + + data := []byte("data data data") + + writeMd5 := func(data []byte) string { + h := md5.New() + if _, err := h.Write(data); err != nil { + t.Errorf("error calculating md5: %v", err) + } + dataMd5 := base64.StdEncoding.EncodeToString(h.Sum(nil)) + return dataMd5 + } + + dataMd5 := writeMd5(data) + + updatedata := []byte("datum") + updatedDataMd5 := writeMd5(updatedata) + + testFile := getNewTmpTestFile(t, "tf-test") + if err := ioutil.WriteFile(testFile.Name(), data, 0644); err != nil { + t.Errorf("error writing file: %v", err) + } + + updateMd5 := []byte("sample") + newMd5 := writeMd5(updateMd5) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageObjectDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testGoogleStorageBucketsObjectBasic(bucketName, testFile.Name()), + Check: testAccCheckGoogleStorageObject(t, bucketName, objectName, dataMd5), + }, + { + PreConfig: func() { + if err := ioutil.WriteFile(testFile.Name(), updatedata, 0644); err != nil { + t.Errorf("error writing file: %v", err) + } + }, + Config: testGoogleStorageBucketsObjectFileMd5(bucketName, testFile.Name(), updatedDataMd5), + Check: testAccCheckGoogleStorageObject(t, bucketName, objectName, updatedDataMd5), + }, + { + Config: testGoogleStorageBucketsObjectFileMd5(bucketName, testFile.Name(), newMd5), + Check: testAccCheckGoogleStorageObject(t, bucketName, objectName, updatedDataMd5), + }, + }, + }) +} + +func TestAccStorageObject_knownAfterApply(t *testing.T) { + t.Parallel() + + bucketName := acctest.TestBucketName(t) + destinationFilePath := getNewTmpTestFile(t, "tf-test-apply-") + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageObjectDestroyProducer(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "local": resource.ExternalProvider{ + VersionConstraint: "> 2.5.0", + }, + }, + Steps: []resource.TestStep{ + { + Config: testGoogleStorageBucketObject(bucketName, "first", destinationFilePath.Name()), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleStorageValidOutput(t), + ), + }, + { + Config: testGoogleStorageBucketObjectKnownAfterApply(bucketName, "second", destinationFilePath.Name()), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleStorageValidOutput(t), + ), + }, + }, + }) +} + func testAccCheckGoogleStorageObject(t *testing.T, bucket, object, md5 string) resource.TestCheckFunc { return testAccCheckGoogleStorageObjectWithEncryption(t, bucket, object, md5, "") } @@ -847,3 +934,112 @@ func getNewTmpTestFile(t *testing.T, prefix string) *os.File { } return testFile } + +func testGoogleStorageBucketsObjectFileMd5(bucketName, sourceFilename, md5hash string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" +} + +resource "google_storage_bucket_object" "bo_1861894" { + name = "%s" + source_md5hash = "%s" + bucket = google_storage_bucket.bucket.name + source = "%s" +} +`, bucketName, objectName, md5hash, sourceFilename) +} + +func testAccCheckGoogleStorageValidOutput(t *testing.T) resource.TestCheckFunc { + return func(s *terraform.State) error { + var root = s.Modules[0] + var outputs, ok = root.Outputs["valid"] + + if !ok { + return fmt.Errorf("Error: `valid` output missing") + } + + if outputs == nil { + return fmt.Errorf("Terraform output `valid` does not exists") + } + + if outputs.Value == false { + return fmt.Errorf("File content is not valid") + } + return nil + } +} + +func testGoogleStorageBucketObject(bucketName, content, filename string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" +} + +resource "google_storage_bucket_object" "changing" { + bucket = google_storage_bucket.bucket.name + name = "dynamic" + content = "%s" +} + +resource "local_file" "test" { + content = jsonencode(google_storage_bucket_object.changing.content) + filename = "%s" +} + +resource "google_storage_bucket_object" "bo" { + source = local_file.test.filename + bucket = google_storage_bucket.bucket.name + name = "test-file-bucket" +} + +data "google_storage_bucket_object_content" "bo" { + bucket = google_storage_bucket_object.bo.bucket + name = google_storage_bucket_object.bo.name + depends_on = [google_storage_bucket_object.bo] +} + +output "valid" { + value = nonsensitive(local_file.test.content) == data.google_storage_bucket_object_content.bo.content +} +`, bucketName, content, filename) +} + +func testGoogleStorageBucketObjectKnownAfterApply(bucketName, content, filename string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" +} + +resource "google_storage_bucket_object" "changing" { + bucket = google_storage_bucket.bucket.name + name = "dynamic" + content = "%s" +} + +resource "local_file" "test" { + content = jsonencode(google_storage_bucket_object.changing.content) + filename = "%s" +} + +resource "google_storage_bucket_object" "bo" { + source = local_file.test.filename + source_md5hash = local_file.test.content_md5 + bucket = google_storage_bucket.bucket.name + name = "test-file-bucket" +} + +data "google_storage_bucket_object_content" "bo" { + bucket = google_storage_bucket_object.bo.bucket + name = google_storage_bucket_object.bo.name + depends_on = [google_storage_bucket_object.bo] +} + +output "valid" { + value = nonsensitive(local_file.test.content) == data.google_storage_bucket_object_content.bo.content +} +`, bucketName, content, filename) +} diff --git a/mmv1/third_party/terraform/website/docs/r/storage_bucket_object.html.markdown b/mmv1/third_party/terraform/website/docs/r/storage_bucket_object.html.markdown index 36e1e4f33259..3416c6551ec4 100644 --- a/mmv1/third_party/terraform/website/docs/r/storage_bucket_object.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/storage_bucket_object.html.markdown @@ -87,6 +87,8 @@ One of the following is required: * `kms_key_name` - (Optional) The resource name of the Cloud KMS key that will be used to [encrypt](https://cloud.google.com/storage/docs/encryption/using-customer-managed-keys) the object. +* `source_md5hash` - (Optional) User-provided md5hash to trigger replacement of object in storage bucket, Must be Base 64 MD5 hash of the object data. The usual way to set this is filemd5("file.zip"), where "file.zip" is the local filename + --- The `customer_encryption` block supports: From ff76c77f36764a57eb145bfe357246cecbdd83cf Mon Sep 17 00:00:00 2001 From: Pradeep Rao <84025829+pradeepcrao@users.noreply.github.com> Date: Fri, 13 Jun 2025 17:09:55 -0400 Subject: [PATCH 365/884] Promote CustomErrorResponsePolicy from beta (#14270) Signed-off-by: pcrao --- mmv1/products/compute/UrlMap.yaml | 5 ----- .../examples/url_map_custom_error_response_policy.tf.tmpl | 5 ----- 2 files changed, 10 deletions(-) diff --git a/mmv1/products/compute/UrlMap.yaml b/mmv1/products/compute/UrlMap.yaml index 984121d7c50f..ca0e6baf653d 100644 --- a/mmv1/products/compute/UrlMap.yaml +++ b/mmv1/products/compute/UrlMap.yaml @@ -165,7 +165,6 @@ examples: storage_bucket_name: 'static-asset-bucket' - name: 'url_map_custom_error_response_policy' primary_resource_id: 'urlmap' - min_version: 'beta' vars: url_map_name: 'urlmap' backend_service_name: 'login' @@ -394,7 +393,6 @@ properties: When used in conjunction with pathMatcher.defaultRouteAction.retryPolicy, retries take precedence. Only once all retries are exhausted, the defaultCustomErrorResponsePolicy is applied. While attempting a retry, if load balancer is successful in reaching the service, the defaultCustomErrorResponsePolicy is ignored and the response from the service is returned to the client. defaultCustomErrorResponsePolicy is supported only for global external Application Load Balancers. - min_version: 'beta' properties: - name: 'errorResponseRule' type: Array @@ -555,7 +553,6 @@ properties: A PathRule for /coming_soon/ is configured for the error code 404. If the request is for www.myotherdomain.com and a 404 is encountered, the policy under UrlMap.defaultCustomErrorResponsePolicy takes effect. If a 404 response is encountered for the request www.example.com/current_events/, the pathMatcher's policy takes effect. If however, the request for www.example.com/coming_soon/ encounters a 404, the policy in PathRule.customErrorResponsePolicy takes effect. If any of the requests in this example encounter a 500 error code, the policy at UrlMap.defaultCustomErrorResponsePolicy takes effect. customErrorResponsePolicy is supported only for global external Application Load Balancers. - min_version: 'beta' properties: - name: 'errorResponseRule' type: Array @@ -1816,7 +1813,6 @@ properties: type: NestedObject description: | customErrorResponsePolicy specifies how the Load Balancer returns error responses when BackendService or BackendBucket responds with an error. - min_version: 'beta' properties: - name: 'errorResponseRule' type: Array @@ -2373,7 +2369,6 @@ properties: When used in conjunction with pathMatcher.defaultRouteAction.retryPolicy, retries take precedence. Only once all retries are exhausted, the defaultCustomErrorResponsePolicy is applied. While attempting a retry, if load balancer is successful in reaching the service, the defaultCustomErrorResponsePolicy is ignored and the response from the service is returned to the client. defaultCustomErrorResponsePolicy is supported only for global external Application Load Balancers. - min_version: 'beta' properties: - name: 'errorResponseRule' type: Array diff --git a/mmv1/templates/terraform/examples/url_map_custom_error_response_policy.tf.tmpl b/mmv1/templates/terraform/examples/url_map_custom_error_response_policy.tf.tmpl index 1716f20d06e9..ef95b4470daa 100644 --- a/mmv1/templates/terraform/examples/url_map_custom_error_response_policy.tf.tmpl +++ b/mmv1/templates/terraform/examples/url_map_custom_error_response_policy.tf.tmpl @@ -1,5 +1,4 @@ resource "google_compute_url_map" "{{$.PrimaryResourceId}}" { - provider = google-beta name = "{{index $.Vars "url_map_name"}}" description = "a description" @@ -54,7 +53,6 @@ resource "google_compute_url_map" "{{$.PrimaryResourceId}}" { } resource "google_compute_backend_service" "example" { - provider = google-beta name = "{{index $.Vars "backend_service_name"}}" port_name = "http" protocol = "HTTP" @@ -65,7 +63,6 @@ resource "google_compute_backend_service" "example" { } resource "google_compute_http_health_check" "default" { - provider = google-beta name = "{{index $.Vars "http_health_check_name"}}" request_path = "/" check_interval_sec = 1 @@ -73,14 +70,12 @@ resource "google_compute_http_health_check" "default" { } resource "google_compute_backend_bucket" "error" { - provider = google-beta name = "{{index $.Vars "error_backend_bucket_name"}}" bucket_name = google_storage_bucket.error.name enable_cdn = true } resource "google_storage_bucket" "error" { - provider = google-beta name = "{{index $.Vars "storage_bucket_name"}}" location = "US" } From b7beaedd3cff71ce520f3ffa231a3fbd0cdaec03 Mon Sep 17 00:00:00 2001 From: Tlaquetzal Date: Fri, 13 Jun 2025 17:45:56 -0400 Subject: [PATCH 366/884] Updated the bigquery_table.html schema description (#14242) --- .../terraform/website/docs/r/bigquery_table.html.markdown | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/mmv1/third_party/terraform/website/docs/r/bigquery_table.html.markdown b/mmv1/third_party/terraform/website/docs/r/bigquery_table.html.markdown index 859215167d13..94d2362d862b 100644 --- a/mmv1/third_party/terraform/website/docs/r/bigquery_table.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/bigquery_table.html.markdown @@ -133,9 +133,10 @@ The following arguments are supported: ~>**NOTE:** Because this field expects a JSON string, any changes to the string will create a diff, even if the JSON itself hasn't changed. If the API returns a different value for the same schema, e.g. it - switched the order of values or replaced `STRUCT` field type with `RECORD` - field type, we currently cannot suppress the recurring diff this causes. - As a workaround, we recommend using the schema as returned by the API. + switched the order of values or replaced a field data type (`STRUCT` with + `RECORD`, `DECIMAL` with `NUMERIC`, etc.), we currently cannot suppress + the recurring diff this causes. As a workaround, we recommend using the + schema as returned by the API. ~>**NOTE:** If you use `external_data_configuration` [documented below](#nested_external_data_configuration) and do **not** set From 661e1bc3cf76c94f3fda9f69f2129daa4c432a84 Mon Sep 17 00:00:00 2001 From: StealthyCoder Date: Sat, 14 Jun 2025 00:18:39 +0100 Subject: [PATCH 367/884] fix(securitypolicy): add ja4 fingerprint (#14151) Signed-off-by: Eric Bode --- .../compute/RegionSecurityPolicy.yaml | 4 ++ .../compute/RegionSecurityPolicyRule.yaml | 4 ++ mmv1/products/compute/SecurityPolicyRule.yaml | 4 ++ ...ompute_region_security_policy_test.go.tmpl | 57 +++++++++++++++++++ .../resource_compute_security_policy.go.tmpl | 4 +- ...ource_compute_security_policy_rule_test.go | 56 ++++++++++++++++++ ...ource_compute_security_policy_test.go.tmpl | 53 +++++++++++++++++ .../r/compute_security_policy.html.markdown | 2 + 8 files changed, 182 insertions(+), 2 deletions(-) diff --git a/mmv1/products/compute/RegionSecurityPolicy.yaml b/mmv1/products/compute/RegionSecurityPolicy.yaml index 26ea54d916d6..48131b3db496 100644 --- a/mmv1/products/compute/RegionSecurityPolicy.yaml +++ b/mmv1/products/compute/RegionSecurityPolicy.yaml @@ -461,6 +461,7 @@ properties: * SNI: Server name indication in the TLS session of the HTTPS request. The key value is truncated to the first 128 bytes. The key type defaults to ALL on a HTTP session. * REGION_CODE: The country/region from which the request originates. * TLS_JA3_FINGERPRINT: JA3 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. + * TLS_JA4_FINGERPRINT: JA4 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. * USER_IP: The IP address of the originating client, which is resolved based on "userIpRequestHeaders" configured with the security policy. If there is no "userIpRequestHeaders" configuration or an IP address cannot be resolved from it, the key type defaults to IP. enum_values: - 'ALL' @@ -472,6 +473,7 @@ properties: - 'SNI' - 'REGION_CODE' - 'TLS_JA3_FINGERPRINT' + - 'TLS_JA4_FINGERPRINT' - 'USER_IP' - name: 'enforceOnKeyName' type: String @@ -501,6 +503,7 @@ properties: * SNI: Server name indication in the TLS session of the HTTPS request. The key value is truncated to the first 128 bytes. The key type defaults to ALL on a HTTP session. * REGION_CODE: The country/region from which the request originates. * TLS_JA3_FINGERPRINT: JA3 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. + * TLS_JA4_FINGERPRINT: JA4 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. * USER_IP: The IP address of the originating client, which is resolved based on "userIpRequestHeaders" configured with the security policy. If there is no "userIpRequestHeaders" configuration or an IP address cannot be resolved from it, the key type defaults to IP. enum_values: - 'ALL' @@ -512,6 +515,7 @@ properties: - 'SNI' - 'REGION_CODE' - 'TLS_JA3_FINGERPRINT' + - 'TLS_JA4_FINGERPRINT' - 'USER_IP' - name: 'enforceOnKeyName' type: String diff --git a/mmv1/products/compute/RegionSecurityPolicyRule.yaml b/mmv1/products/compute/RegionSecurityPolicyRule.yaml index b4ec0ffd7fb9..1f0f8ab7a71b 100644 --- a/mmv1/products/compute/RegionSecurityPolicyRule.yaml +++ b/mmv1/products/compute/RegionSecurityPolicyRule.yaml @@ -354,6 +354,7 @@ properties: * SNI: Server name indication in the TLS session of the HTTPS request. The key value is truncated to the first 128 bytes. The key type defaults to ALL on a HTTP session. * REGION_CODE: The country/region from which the request originates. * TLS_JA3_FINGERPRINT: JA3 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. + * TLS_JA4_FINGERPRINT: JA4 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. * USER_IP: The IP address of the originating client, which is resolved based on "userIpRequestHeaders" configured with the security policy. If there is no "userIpRequestHeaders" configuration or an IP address cannot be resolved from it, the key type defaults to IP. enum_values: - 'ALL' @@ -365,6 +366,7 @@ properties: - 'SNI' - 'REGION_CODE' - 'TLS_JA3_FINGERPRINT' + - 'TLS_JA4_FINGERPRINT' - 'USER_IP' - name: 'enforceOnKeyName' type: String @@ -394,6 +396,7 @@ properties: * SNI: Server name indication in the TLS session of the HTTPS request. The key value is truncated to the first 128 bytes. The key type defaults to ALL on a HTTP session. * REGION_CODE: The country/region from which the request originates. * TLS_JA3_FINGERPRINT: JA3 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. + * TLS_JA4_FINGERPRINT: JA4 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. * USER_IP: The IP address of the originating client, which is resolved based on "userIpRequestHeaders" configured with the security policy. If there is no "userIpRequestHeaders" configuration or an IP address cannot be resolved from it, the key type defaults to IP. enum_values: - 'ALL' @@ -405,6 +408,7 @@ properties: - 'SNI' - 'REGION_CODE' - 'TLS_JA3_FINGERPRINT' + - 'TLS_JA4_FINGERPRINT' - 'USER_IP' - name: 'enforceOnKeyName' type: String diff --git a/mmv1/products/compute/SecurityPolicyRule.yaml b/mmv1/products/compute/SecurityPolicyRule.yaml index 3ccaeb449e14..1b80486b6dbc 100644 --- a/mmv1/products/compute/SecurityPolicyRule.yaml +++ b/mmv1/products/compute/SecurityPolicyRule.yaml @@ -370,6 +370,7 @@ properties: * SNI: Server name indication in the TLS session of the HTTPS request. The key value is truncated to the first 128 bytes. The key type defaults to ALL on a HTTP session. * REGION_CODE: The country/region from which the request originates. * TLS_JA3_FINGERPRINT: JA3 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. + * TLS_JA4_FINGERPRINT: JA4 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. * USER_IP: The IP address of the originating client, which is resolved based on "userIpRequestHeaders" configured with the security policy. If there is no "userIpRequestHeaders" configuration or an IP address cannot be resolved from it, the key type defaults to IP. enum_values: - 'ALL' @@ -381,6 +382,7 @@ properties: - 'SNI' - 'REGION_CODE' - 'TLS_JA3_FINGERPRINT' + - 'TLS_JA4_FINGERPRINT' - 'USER_IP' - name: 'enforceOnKeyName' type: String @@ -410,6 +412,7 @@ properties: * SNI: Server name indication in the TLS session of the HTTPS request. The key value is truncated to the first 128 bytes. The key type defaults to ALL on a HTTP session. * REGION_CODE: The country/region from which the request originates. * TLS_JA3_FINGERPRINT: JA3 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. + * TLS_JA4_FINGERPRINT: JA4 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. * USER_IP: The IP address of the originating client, which is resolved based on "userIpRequestHeaders" configured with the security policy. If there is no "userIpRequestHeaders" configuration or an IP address cannot be resolved from it, the key type defaults to IP. enum_values: - 'ALL' @@ -421,6 +424,7 @@ properties: - 'SNI' - 'REGION_CODE' - 'TLS_JA3_FINGERPRINT' + - 'TLS_JA4_FINGERPRINT' - 'USER_IP' - name: 'enforceOnKeyName' type: String diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_security_policy_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_security_policy_test.go.tmpl index 790f174c5de4..b20b84882513 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_security_policy_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_security_policy_test.go.tmpl @@ -722,6 +722,63 @@ func testAccComputeRegionSecurityPolicy_withMultipleEnforceOnKeyConfigs_update(c `, context) } +func testAccComputeRegionSecurityPolicy_withMultipleEnforceOnKeyConfigs_ja4(context map[string]interface{}) string { + return acctest.Nprintf(` + resource "google_compute_region_security_policy" "policy" { + name = "tf-test%{random_suffix}" + type = "CLOUD_ARMOR" + region = "us-west2" + + rules { + priority = "100" + action = "throttle" + rate_limit_options { + conform_action = "allow" + exceed_action = "deny(429)" + + rate_limit_threshold { + count = 10 + interval_sec = 60 + } + + enforce_on_key_configs { + enforce_on_key_type = "USER_IP" + } + + enforce_on_key_configs { + enforce_on_key_type = "TLS_JA4_FINGERPRINT" + } + + enforce_on_key_configs { + enforce_on_key_type = "REGION_CODE" + } + } + match { + config { + src_ip_ranges = [ + "*" + ] + } + versioned_expr = "SRC_IPS_V1" + } + } + + rules { + action = "allow" + priority = "2147483647" + preview = false + match { + versioned_expr = "SRC_IPS_V1" + config { + src_ip_ranges = ["*"] + } + } + description = "default rule" + } + } + `, context) +} + func TestAccComputeRegionSecurityPolicy_regionSecurityPolicyRuleOrderingWithMultipleRules(t *testing.T) { t.Parallel() diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_security_policy.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_security_policy.go.tmpl index cbbe297d3c83..4ef570415e62 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_security_policy.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_security_policy.go.tmpl @@ -338,7 +338,7 @@ func ResourceComputeSecurityPolicy() *schema.Resource { Type: schema.TypeString, Optional: true, Description: `Determines the key to enforce the rateLimitThreshold on`, - ValidateFunc: validation.StringInSlice([]string{"ALL", "IP", "HTTP_HEADER", "XFF_IP", "HTTP_COOKIE", "HTTP_PATH", "SNI", "REGION_CODE", "TLS_JA3_FINGERPRINT", "USER_IP", ""}, false), + ValidateFunc: validation.StringInSlice([]string{"ALL", "IP", "HTTP_HEADER", "XFF_IP", "HTTP_COOKIE", "HTTP_PATH", "SNI", "REGION_CODE", "TLS_JA3_FINGERPRINT", "TLS_JA4_FINGERPRINT", "USER_IP", ""}, false), }, "enforce_on_key_name": { @@ -357,7 +357,7 @@ func ResourceComputeSecurityPolicy() *schema.Resource { Type: schema.TypeString, Optional: true, Description: `Determines the key to enforce the rate_limit_threshold on`, - ValidateFunc: validation.StringInSlice([]string{"ALL", "IP", "HTTP_HEADER", "XFF_IP", "HTTP_COOKIE", "HTTP_PATH", "SNI", "REGION_CODE", "TLS_JA3_FINGERPRINT", "USER_IP"}, false), + ValidateFunc: validation.StringInSlice([]string{"ALL", "IP", "HTTP_HEADER", "XFF_IP", "HTTP_COOKIE", "HTTP_PATH", "SNI", "REGION_CODE", "TLS_JA3_FINGERPRINT", "TLS_JA4_FINGERPRINT", "USER_IP"}, false), }, "enforce_on_key_name": { Type: schema.TypeString, diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_security_policy_rule_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_security_policy_rule_test.go index 14e340833c97..bed35a86c641 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_security_policy_rule_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_security_policy_rule_test.go @@ -237,6 +237,14 @@ func TestAccComputeSecurityPolicyRule_withRateLimitOption_withMultipleEnforceOnK ImportState: true, ImportStateVerify: true, }, + { + Config: testAccComputeSecurityPolicyRule_withRateLimitOption_withMultipleEnforceOnKeyConfigs3(spName), + }, + { + ResourceName: "google_compute_security_policy_rule.policy_rule", + ImportState: true, + ImportStateVerify: true, + }, }, }) } @@ -1022,6 +1030,54 @@ resource "google_compute_security_policy_rule" "policy_rule" { `, spName) } +func testAccComputeSecurityPolicyRule_withRateLimitOption_withMultipleEnforceOnKeyConfigs3(spName string) string { + return fmt.Sprintf(` +resource "google_compute_security_policy" "policy" { + name = "%s" + description = "basic policy base" +} + +resource "google_compute_security_policy_rule" "policy_rule" { + security_policy = google_compute_security_policy.policy.name + description = "throttle rule withMultipleEnforceOnKeyConfigs3" + action = "throttle" + priority = "100" + + match { + versioned_expr = "SRC_IPS_V1" + config { + src_ip_ranges = ["*"] + } + } + + rate_limit_options { + conform_action = "allow" + exceed_action = "deny(429)" + + rate_limit_threshold { + count = 10 + interval_sec = 60 + } + + enforce_on_key = "" + + enforce_on_key_configs { + enforce_on_key_type = "REGION_CODE" + } + + enforce_on_key_configs { + enforce_on_key_type = "TLS_JA4_FINGERPRINT" + } + + enforce_on_key_configs { + enforce_on_key_type = "USER_IP" + } + } +} + +`, spName) +} + func testAccComputeSecurityPolicyRule_withRateLimitOptions_withoutRateLimitOptions(spName string) string { return fmt.Sprintf(` resource "google_compute_security_policy" "policy" { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_security_policy_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_security_policy_test.go.tmpl index 1345b1d7f4fc..bfe19233e0c2 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_security_policy_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_security_policy_test.go.tmpl @@ -523,6 +523,14 @@ func TestAccComputeSecurityPolicy_withRateLimitOption_withMultipleEnforceOnKeyCo ImportState: true, ImportStateVerify: true, }, + { + Config: testAccComputeSecurityPolicy_withRateLimitOption_withMultipleEnforceOnKeyConfigs3(spName), + }, + { + ResourceName: "google_compute_security_policy.policy", + ImportState: true, + ImportStateVerify: true, + }, }, }) } @@ -1936,6 +1944,51 @@ resource "google_compute_security_policy" "policy" { `, spName) } +func testAccComputeSecurityPolicy_withRateLimitOption_withMultipleEnforceOnKeyConfigs3(spName string) string { + return fmt.Sprintf(` +resource "google_compute_security_policy" "policy" { + name = "%s" + description = "throttle rule with enforce_on_key_configs" + + rule { + action = "throttle" + priority = "2147483647" + match { + versioned_expr = "SRC_IPS_V1" + config { + src_ip_ranges = ["*"] + } + } + description = "default rule withMultipleEnforceOnKeyConfigs3" + + rate_limit_options { + conform_action = "allow" + exceed_action = "deny(429)" + + rate_limit_threshold { + count = 10 + interval_sec = 60 + } + + enforce_on_key = "" + + enforce_on_key_configs { + enforce_on_key_type = "REGION_CODE" + } + + enforce_on_key_configs { + enforce_on_key_type = "TLS_JA4_FINGERPRINT" + } + + enforce_on_key_configs { + enforce_on_key_type = "USER_IP" + } + } + } +} +`, spName) +} + func TestAccComputeSecurityPolicy_withRedirectOptionsRecaptcha(t *testing.T) { t.Parallel() diff --git a/mmv1/third_party/terraform/website/docs/r/compute_security_policy.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_security_policy.html.markdown index 7debcf485cfd..c1c4d8267d9d 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_security_policy.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_security_policy.html.markdown @@ -351,6 +351,7 @@ The following arguments are supported: * `SNI`: Server name indication in the TLS session of the HTTPS request. The key value is truncated to the first 128 bytes. The key type defaults to `ALL` on a HTTP session. * `REGION_CODE`: The country/region from which the request originates. * `TLS_JA3_FINGERPRINT`: JA3 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. + * `TLS_JA4_FINGERPRINT`: JA4 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. * `USER_IP`: The IP address of the originating client, which is resolved based on "user_ip_request_headers" configured with the securitypolicy. If there is no "user_ip_request_headers" configuration or an IP address cannot be resolved from it, the key type defaults to IP. * `enforce_on_key_name` - (Optional) Rate limit key name applicable only for the following key types: @@ -380,6 +381,7 @@ The following arguments are supported: * `SNI`: Server name indication in the TLS session of the HTTPS request. The key value is truncated to the first 128 bytes. The key type defaults to `ALL` on a HTTP session. * `REGION_CODE`: The country/region from which the request originates. * `TLS_JA3_FINGERPRINT`: JA3 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. + * `TLS_JA4_FINGERPRINT`: JA4 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. * `USER_IP`: The IP address of the originating client, which is resolved based on "user_ip_request_headers" configured with the securitypolicy. If there is no "user_ip_request_headers" configuration or an IP address cannot be resolved from it, the key type defaults to IP. * `exceed_redirect_options` - (Optional) Parameters defining the redirect action that is used as the exceed action. Cannot be specified if the exceed action is not redirect. Structure is [documented below](#nested_exceed_redirect_options). From 0cb2857aaae36dfabbfaae286126f0c6da0d272d Mon Sep 17 00:00:00 2001 From: Arnav Dham Date: Sat, 14 Jun 2025 05:07:31 +0530 Subject: [PATCH 368/884] Adding support for log_linked_dataset_query_user_email attribute for BigQuery Analytics Hub Data exchanges. (#14266) --- mmv1/products/bigqueryanalyticshub/DataExchange.yaml | 12 ++++++++++++ ...ta_exchange_log_linked_dataset_query_user.tf.tmpl | 7 +++++++ 2 files changed, 19 insertions(+) create mode 100644 mmv1/templates/terraform/examples/bigquery_analyticshub_data_exchange_log_linked_dataset_query_user.tf.tmpl diff --git a/mmv1/products/bigqueryanalyticshub/DataExchange.yaml b/mmv1/products/bigqueryanalyticshub/DataExchange.yaml index 03915fb20529..66aef2bf7760 100644 --- a/mmv1/products/bigqueryanalyticshub/DataExchange.yaml +++ b/mmv1/products/bigqueryanalyticshub/DataExchange.yaml @@ -59,6 +59,13 @@ examples: vars: data_exchange_id: 'dcr_data_exchange' desc: 'example dcr data exchange' + - name: 'bigquery_analyticshub_data_exchange_log_linked_dataset_query_user' + primary_resource_id: 'data_exchange' + primary_resource_name: 'fmt.Sprintf("tf_test_log_email_data_exchange%s", context["random_suffix"])' + region_override: 'US' + vars: + data_exchange_id: 'tf_test_log_email_data_exchange' + description: 'Example for log email test for data exchange' parameters: properties: - name: 'name' @@ -139,3 +146,8 @@ properties: - 'sharing_environment_config.0.dcr_exchange_config' properties: [] + - name: 'logLinkedDatasetQueryUserEmail' + type: Boolean + description: + If true, subscriber email logging is enabled and all queries on the linked dataset will log the email address of the querying user. + immutable: true diff --git a/mmv1/templates/terraform/examples/bigquery_analyticshub_data_exchange_log_linked_dataset_query_user.tf.tmpl b/mmv1/templates/terraform/examples/bigquery_analyticshub_data_exchange_log_linked_dataset_query_user.tf.tmpl new file mode 100644 index 000000000000..a1d37d294881 --- /dev/null +++ b/mmv1/templates/terraform/examples/bigquery_analyticshub_data_exchange_log_linked_dataset_query_user.tf.tmpl @@ -0,0 +1,7 @@ +resource "google_bigquery_analytics_hub_data_exchange" "{{$.PrimaryResourceId}}" { + location = "US" + data_exchange_id = "{{index $.Vars "data_exchange_id"}}" + display_name = "{{index $.Vars "data_exchange_id"}}" + description = "{{index $.Vars "description"}}" + log_linked_dataset_query_user_email = true +} \ No newline at end of file From 3297fddb4aea6583d2cafb33ac31a143e31b9cae Mon Sep 17 00:00:00 2001 From: Ryan Oaks Date: Mon, 16 Jun 2025 12:08:07 -0400 Subject: [PATCH 369/884] Fix TestAccSpannerInstance_noNodeCountSpecified to skip VCR (#14281) --- .../services/spanner/resource_spanner_instance_test.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/mmv1/third_party/terraform/services/spanner/resource_spanner_instance_test.go b/mmv1/third_party/terraform/services/spanner/resource_spanner_instance_test.go index 5277a16f4674..dac13ad9f4e6 100644 --- a/mmv1/third_party/terraform/services/spanner/resource_spanner_instance_test.go +++ b/mmv1/third_party/terraform/services/spanner/resource_spanner_instance_test.go @@ -74,6 +74,8 @@ func TestAccSpannerInstance_basicUpdateWithProviderDefaultLabels(t *testing.T) { } func TestAccSpannerInstance_noNodeCountSpecified(t *testing.T) { + // Cannot be run in VCR because no API calls are made + acctest.SkipIfVcr(t) t.Parallel() idName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) From 5784ed61288a41c0dd2d13bfe0c0b8eb0a070f4b Mon Sep 17 00:00:00 2001 From: Arnav Dham Date: Mon, 16 Jun 2025 21:40:52 +0530 Subject: [PATCH 370/884] Adding support for log_linked_dataset_query_user_email attribute for BigQuery Analytics Hub Listing Subscriptions. (#14277) --- mmv1/products/bigqueryanalyticshub/ListingSubscription.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/mmv1/products/bigqueryanalyticshub/ListingSubscription.yaml b/mmv1/products/bigqueryanalyticshub/ListingSubscription.yaml index ccbdc6bc6e07..3deabba27755 100644 --- a/mmv1/products/bigqueryanalyticshub/ListingSubscription.yaml +++ b/mmv1/products/bigqueryanalyticshub/ListingSubscription.yaml @@ -194,3 +194,7 @@ properties: type: string description: Output only. Name of the linked dataset, e.g. projects/subscriberproject/datasets/linkedDataset output: true + - name: 'logLinkedDatasetQueryUserEmail' + type: Boolean + description: 'Output only. By default, false. If true, the Subscriber agreed to the email sharing mandate that is enabled for Listing.' + output: true From 0377cfcc00365d575d4b0a8a146e8bd5be7738fe Mon Sep 17 00:00:00 2001 From: Ryan Oaks Date: Mon, 16 Jun 2025 12:28:49 -0400 Subject: [PATCH 371/884] Fix the logs that are uploaded during EAP VCR to match OSS (#14252) --- .ci/magician/cmd/test_eap_vcr.go | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/.ci/magician/cmd/test_eap_vcr.go b/.ci/magician/cmd/test_eap_vcr.go index 7534814914c7..e45deaf7dd6a 100644 --- a/.ci/magician/cmd/test_eap_vcr.go +++ b/.ci/magician/cmd/test_eap_vcr.go @@ -173,6 +173,15 @@ func execTestEAPVCR(changeNumber, genPath, kokoroArtifactsDir, modifiedFilePath return fmt.Errorf("error uploading cassettes: %w", err) } + if err := vt.UploadLogs(vcr.UploadLogsOptions{ + Head: head, + Parallel: true, + Mode: vcr.Recording, + Version: provider.Private, + }); err != nil { + return fmt.Errorf("error uploading recording logs: %w", err) + } + if hasPanics, err := handleEAPVCRPanics(head, kokoroArtifactsDir, modifiedFilePath, recordingResult, vcr.Recording, rnr); err != nil { return fmt.Errorf("error handling panics: %w", err) } else if hasPanics { @@ -191,10 +200,10 @@ func execTestEAPVCR(changeNumber, genPath, kokoroArtifactsDir, modifiedFilePath Head: head, Parallel: true, AfterRecording: true, - Mode: vcr.Recording, + Mode: vcr.Replaying, Version: provider.Private, }); err != nil { - return fmt.Errorf("error uploading recording logs: %w", err) + return fmt.Errorf("error uploading replaying after recording logs: %w", err) } } hasTerminatedTests := (len(recordingResult.PassedTests) + len(recordingResult.FailedTests)) < len(replayingResult.FailedTests) From ba5414bea0be2b8ae5db1bce2e75084c6c4d2a12 Mon Sep 17 00:00:00 2001 From: Ryan Oaks Date: Mon, 16 Jun 2025 12:29:32 -0400 Subject: [PATCH 372/884] Fix TestAccDataSourceGoogleGkeHubFeature_basic failing in VCR (#14282) --- .../gkehub2/data_source_google_gke_hub_feature_test.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mmv1/third_party/terraform/services/gkehub2/data_source_google_gke_hub_feature_test.go b/mmv1/third_party/terraform/services/gkehub2/data_source_google_gke_hub_feature_test.go index da4cf8be7dfa..c585194fc0a0 100644 --- a/mmv1/third_party/terraform/services/gkehub2/data_source_google_gke_hub_feature_test.go +++ b/mmv1/third_party/terraform/services/gkehub2/data_source_google_gke_hub_feature_test.go @@ -20,9 +20,9 @@ func TestAccDataSourceGoogleGkeHubFeature_basic(t *testing.T) { } acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - Providers: acctest.TestAccProviders, - CheckDestroy: testAccCheckGoogleGkeHubFeatureDestroyProducer(t), + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGoogleGkeHubFeatureDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccDataSourceGoogleGkeHubFeature_basic(context), From a5308d7d93e23a2256eedadb7e7c97780641ddd1 Mon Sep 17 00:00:00 2001 From: animeshnandanwar Date: Mon, 16 Jun 2025 10:02:39 -0700 Subject: [PATCH 373/884] Add resource Dataproc Session Template (#14201) Co-authored-by: Cameron Thornton --- mmv1/products/dataproc/SessionTemplate.yaml | 236 ++++++++++++++++++ ...dataproc_session_templates_jupyter.tf.tmpl | 22 ++ ...roc_session_templates_jupyter_full.tf.tmpl | 113 +++++++++ ...oc_session_templates_spark_connect.tf.tmpl | 17 ++ ...resource_dataproc_session_template_test.go | 198 +++++++++++++++ 5 files changed, 586 insertions(+) create mode 100644 mmv1/products/dataproc/SessionTemplate.yaml create mode 100644 mmv1/templates/terraform/examples/dataproc_session_templates_jupyter.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/dataproc_session_templates_jupyter_full.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/dataproc_session_templates_spark_connect.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/dataproc/resource_dataproc_session_template_test.go diff --git a/mmv1/products/dataproc/SessionTemplate.yaml b/mmv1/products/dataproc/SessionTemplate.yaml new file mode 100644 index 000000000000..8d1688319663 --- /dev/null +++ b/mmv1/products/dataproc/SessionTemplate.yaml @@ -0,0 +1,236 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'SessionTemplate' +api_variant_patterns: + - 'projects/{project}/locations/{location}/sessionTemplates/{sessionTemplate}' +description: | + A Dataproc Serverless session template defines the configuration settings for + creating one or more Dataproc Serverless interactive sessions. +references: + guides: + 'Dataproc Serverless Session Templates': 'https://cloud.google.com/dataproc-serverless/docs/guides/create-serverless-sessions-templates#create-dataproc-serverless-session-template' + api: 'https://cloud.google.com/dataproc-serverless/docs/reference/rest/v1/projects.locations.sessionTemplates' +docs: +id_format: '{{name}}' +base_url: 'projects/{{project}}/locations/{{location}}/sessionTemplates' +self_link: '{{name}}' +update_verb: 'PATCH' +autogen_async: true +import_format: + - '{{name}}' +custom_code: + custom_import: 'templates/terraform/custom_import/set_id_name_with_slashes.go.tmpl' +collection_url_key: 'sessionTemplates' +examples: + - name: 'dataproc_session_templates_jupyter' + primary_resource_id: 'example_session_templates_jupyter' + primary_resource_name: 'fmt.Sprintf("projects/%s/locations/%s/sessionTemplates/%s", envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), fmt.Sprintf("tf-test-session-templates-jupyter%s", context["random_suffix"]))' + vars: + name: 'jupyter-session-template' + subnetwork_name: 'default' + prevent_destroy: 'true' + test_env_vars: + project_name: 'PROJECT_NAME' + test_vars_overrides: + 'subnetwork_name': 'acctest.BootstrapSubnetWithFirewallForDataprocBatches(t, "jupyer-session-test-network", "jupyter-session-test-subnetwork")' + 'prevent_destroy': 'false' + ignore_read_extra: + - 'runtime_config.0.properties' + - name: 'dataproc_session_templates_jupyter_full' + primary_resource_id: 'dataproc_session_templates_jupyter_full' + primary_resource_name: 'fmt.Sprintf("projects/%s/locations/%s/sessionTemplates/%s", envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), fmt.Sprintf("tf-test-session-templates-jupyter%s", context["random_suffix"]))' + vars: + name: 'jupyter-session-template' + subnetwork_name: 'default' + prevent_destroy: 'true' + kms_key_name: 'example-key' + bucket_name: 'dataproc-bucket' + test_env_vars: + project_name: 'PROJECT_NAME' + test_vars_overrides: + 'subnetwork_name': 'acctest.BootstrapSubnetWithFirewallForDataprocBatches(t, "jupyer-session-test-network", "jupyter-session-test-subnetwork")' + 'prevent_destroy': 'false' + 'kms_key_name': 'acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-bootstrap-dataproc-session-template-key1").CryptoKey.Name' + ignore_read_extra: + - 'runtime_config.0.properties' + - name: 'dataproc_session_templates_spark_connect' + primary_resource_id: 'example_session_templates_spark_connect' + primary_resource_name: 'fmt.Sprintf("projects/%s/locations/%s/sessionTemplates/%s", envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), fmt.Sprintf("tf-test-session-templates-jupyter%s", context["random_suffix"]))' + vars: + name: 'sc-session-template' + subnetwork_name: 'default' + prevent_destroy: 'true' + test_env_vars: + project_name: 'PROJECT_NAME' + test_vars_overrides: + 'subnetwork_name': 'acctest.BootstrapSubnetWithFirewallForDataprocBatches(t, "spark-connect-session-test-network", "spark-connect-session-test-subnetwork")' + 'prevent_destroy': 'false' + ignore_read_extra: + - 'runtime_config.0.properties' +parameters: + - name: 'location' + type: String + description: | + The location in which the session template will be created in. + url_param_only: true + immutable: true +properties: + - name: 'name' + type: String + description: | + The resource name of the session template in the following format: + projects/{project}/locations/{location}/sessionTemplates/{template_id} + required: true + immutable: true + - name: 'uuid' + type: String + description: | + A session template UUID (Unique Universal Identifier). The service generates this value when it creates the session template. + output: true + - name: 'createTime' + type: String + description: | + The time when the session template was created. + output: true + - name: 'updateTime' + type: String + description: | + The time when the session template was updated. + output: true + - name: 'creator' + type: String + description: | + The email address of the user who created the session template. + output: true + - name: 'labels' + type: KeyValueLabels + description: | + The labels to associate with this session template. + - name: 'runtimeConfig' + type: NestedObject + description: | + Runtime configuration for the session template. + properties: + - name: 'version' + type: String + description: | + Version of the session runtime. + - name: 'containerImage' + type: String + description: | + Optional custom container image for the job runtime environment. If not specified, a default container image will be used. + - name: 'properties' + type: KeyValuePairs + description: | + A mapping of property names to values, which are used to configure workload execution. + - name: 'effective_properties' + type: KeyValuePairs + description: | + A mapping of property names to values, which are used to configure workload execution. + output: true + - name: 'environmentConfig' + type: NestedObject + description: | + Environment configuration for the session execution. + properties: + - name: 'executionConfig' + type: NestedObject + description: | + Execution configuration for a workload. + properties: + - name: 'serviceAccount' + type: String + description: | + Service account that used to execute workload. + default_from_api: true + - name: 'networkTags' + type: Array + description: | + Tags used for network traffic control. + item_type: + type: String + - name: 'kmsKey' + type: String + description: | + The Cloud KMS key to use for encryption. + - name: 'ttl' + type: String + description: | + The duration after which the workload will be terminated. + When the workload exceeds this duration, it will be unconditionally terminated without waiting for ongoing + work to finish. If ttl is not specified for a session workload, the workload will be allowed to run until it + exits naturally (or run forever without exiting). If ttl is not specified for an interactive session, + it defaults to 24 hours. If ttl is not specified for a batch that uses 2.1+ runtime version, it defaults to 4 hours. + Minimum value is 10 minutes; maximum value is 14 days. If both ttl and idleTtl are specified (for an interactive session), + the conditions are treated as OR conditions: the workload will be terminated when it has been idle for idleTtl or + when ttl has been exceeded, whichever occurs first. + default_from_api: true + - name: 'stagingBucket' + type: String + description: | + A Cloud Storage bucket used to stage workload dependencies, config files, and store + workload output and other ephemeral data, such as Spark history files. If you do not specify a staging bucket, + Cloud Dataproc will determine a Cloud Storage location according to the region where your workload is running, + and then create and manage project-level, per-location staging and temporary buckets. + This field requires a Cloud Storage bucket name, not a gs://... URI to a Cloud Storage bucket. + - name: 'subnetworkUri' + type: String + description: | + Subnetwork configuration for workload execution. + - name: 'peripheralsConfig' + type: NestedObject + description: | + Peripherals configuration that workload has access to. + default_from_api: true + allow_empty_object: true + properties: + - name: 'metastoreService' + type: String + description: | + Resource name of an existing Dataproc Metastore service. + - name: 'sparkHistoryServerConfig' + type: NestedObject + description: | + The Spark History Server configuration for the workload. + properties: + - name: 'dataprocCluster' + type: String + description: | + Resource name of an existing Dataproc Cluster to act as a Spark History Server for the workload. + - name: 'jupyterSession' + type: NestedObject + description: | + Jupyter configuration for an interactive session. + properties: + - name: 'kernel' + type: Enum + description: | + Kernel to be used with Jupyter interactive session. + enum_values: + - 'PYTHON' + - 'SCALA' + - name: 'displayName' + type: String + description: | + Display name, shown in the Jupyter kernelspec card. + - name: 'sparkConnectSession' + type: NestedObject + description: | + Spark connect configuration for an interactive session. + diff_suppress_func: 'tpgresource.EmptyOrUnsetBlockDiffSuppress' + allow_empty_object: true + send_empty_value: true + properties: + [] # Meant to be an empty object with no properties. diff --git a/mmv1/templates/terraform/examples/dataproc_session_templates_jupyter.tf.tmpl b/mmv1/templates/terraform/examples/dataproc_session_templates_jupyter.tf.tmpl new file mode 100644 index 000000000000..bd42b7948feb --- /dev/null +++ b/mmv1/templates/terraform/examples/dataproc_session_templates_jupyter.tf.tmpl @@ -0,0 +1,22 @@ +resource "google_dataproc_session_template" "{{$.PrimaryResourceId}}" { + name = "projects/{{index $.TestEnvVars "project_name"}}/locations/us-central1/sessionTemplates/{{index $.Vars "name"}}" + location = "us-central1" + labels = {"session_template_test": "terraform"} + + runtime_config { + properties = { "spark.dynamicAllocation.enabled": "false", "spark.executor.instances": "2" } + } + + environment_config { + execution_config { + subnetwork_uri = "{{index $.Vars "subnetwork_name"}}" + ttl = "3600s" + network_tags = ["tag1"] + } + } + + jupyter_session { + kernel = "PYTHON" + display_name = "tf python kernel" + } +} diff --git a/mmv1/templates/terraform/examples/dataproc_session_templates_jupyter_full.tf.tmpl b/mmv1/templates/terraform/examples/dataproc_session_templates_jupyter_full.tf.tmpl new file mode 100644 index 000000000000..81d7c2f4b01c --- /dev/null +++ b/mmv1/templates/terraform/examples/dataproc_session_templates_jupyter_full.tf.tmpl @@ -0,0 +1,113 @@ +data "google_project" "project" { +} + +data "google_storage_project_service_account" "gcs_account" { +} + +resource "google_dataproc_session_template" "{{$.PrimaryResourceId}}" { + name = "projects/{{index $.TestEnvVars "project_name"}}/locations/us-central1/sessionTemplates/{{index $.Vars "name"}}" + location = "us-central1" + labels = {"session_template_test": "terraform"} + + runtime_config { + properties = { "spark.dynamicAllocation.enabled": "false", "spark.executor.instances": "2" } + version = "2.2" + container_image = "us-docker.pkg.dev/{{index $.TestEnvVars "project_name"}}/s8s-spark-test-images/s8s-spark:latest" + } + + environment_config { + execution_config { + ttl = "3600s" + network_tags = ["tag1"] + kms_key = "{{index $.Vars "kms_key_name"}}" + subnetwork_uri = "{{index $.Vars "subnetwork_name"}}" + service_account = "${data.google_project.project.number}-compute@developer.gserviceaccount.com" + staging_bucket = google_storage_bucket.bucket.name + } + peripherals_config { + metastore_service = google_dataproc_metastore_service.ms.name + spark_history_server_config { + dataproc_cluster = google_dataproc_cluster.basic.id + } + } + } + + jupyter_session { + kernel = "PYTHON" + display_name = "tf python kernel" + } + + depends_on = [ + google_kms_crypto_key_iam_member.crypto_key_member_1, + ] +} + +resource "google_storage_bucket" "bucket" { + uniform_bucket_level_access = true + name = "{{index $.Vars "bucket_name"}}" + location = "US" + force_destroy = true +} + +resource "google_kms_crypto_key_iam_member" "crypto_key_member_1" { + crypto_key_id = "{{index $.Vars "kms_key_name"}}" + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + member = "serviceAccount:service-${data.google_project.project.number}@dataproc-accounts.iam.gserviceaccount.com" +} + +resource "google_dataproc_cluster" "basic" { + name = "{{index $.Vars "name"}}" + region = "us-central1" + + cluster_config { + # Keep the costs down with smallest config we can get away with + software_config { + override_properties = { + "dataproc:dataproc.allow.zero.workers" = "true" + "spark:spark.history.fs.logDirectory" = "gs://${google_storage_bucket.bucket.name}/*/spark-job-history" + } + } + + gce_cluster_config { + subnetwork = "{{index $.Vars "subnetwork_name"}}" + } + + endpoint_config { + enable_http_port_access = true + } + + master_config { + num_instances = 1 + machine_type = "e2-standard-2" + disk_config { + boot_disk_size_gb = 35 + } + } + + metastore_config { + dataproc_metastore_service = google_dataproc_metastore_service.ms.name + } + } +} + +resource "google_dataproc_metastore_service" "ms" { + service_id = "{{index $.Vars "name"}}" + location = "us-central1" + port = 9080 + tier = "DEVELOPER" + + maintenance_window { + hour_of_day = 2 + day_of_week = "SUNDAY" + } + + hive_metastore_config { + version = "3.1.2" + } + + network_config { + consumers { + subnetwork = "projects/{{index $.TestEnvVars "project_name"}}/regions/us-central1/subnetworks/{{index $.Vars "subnetwork_name"}}" + } + } +} diff --git a/mmv1/templates/terraform/examples/dataproc_session_templates_spark_connect.tf.tmpl b/mmv1/templates/terraform/examples/dataproc_session_templates_spark_connect.tf.tmpl new file mode 100644 index 000000000000..2e40d7161c04 --- /dev/null +++ b/mmv1/templates/terraform/examples/dataproc_session_templates_spark_connect.tf.tmpl @@ -0,0 +1,17 @@ +resource "google_dataproc_session_template" "{{$.PrimaryResourceId}}" { + name = "projects/{{index $.TestEnvVars "project_name"}}/locations/us-central1/sessionTemplates/{{index $.Vars "name"}}" + location = "us-central1" + labels = {"session_template_test": "terraform"} + + runtime_config { + properties = { "spark.dynamicAllocation.enabled": "false", "spark.executor.instances": "2" } + } + + environment_config { + execution_config { + subnetwork_uri = "{{index $.Vars "subnetwork_name"}}" + ttl = "3600s" + network_tags = ["tag1"] + } + } +} diff --git a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_session_template_test.go b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_session_template_test.go new file mode 100644 index 000000000000..c7f58783b3ca --- /dev/null +++ b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_session_template_test.go @@ -0,0 +1,198 @@ +package dataproc_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccDataprocSessionTemplate_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project_name": envvar.GetTestProjectFromEnv(), + "kms_key_name": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-bootstrap-dataproc-session-template-key1").CryptoKey.Name, + "prevent_destroy": false, + "subnetwork_name": acctest.BootstrapSubnetWithFirewallForDataprocBatches(t, "jupyer-session-test-network", "jupyter-session-test-subnetwork"), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataprocSessionTemplateDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataprocSessionTemplate_preupdate(context), + }, + { + ResourceName: "google_dataproc_session_template.dataproc_session_templates_jupyter_update", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "location", "runtime_config.0.properties", "terraform_labels"}, + }, + { + Config: testAccDataprocSessionTemplate_updated(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_dataproc_session_template.dataproc_session_templates_jupyter_update", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_dataproc_session_template.dataproc_session_templates_jupyter_update", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "location", "runtime_config.0.properties", "terraform_labels"}, + }, + }, + }) +} + +func testAccDataprocSessionTemplate_preupdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_dataproc_session_template" "dataproc_session_templates_jupyter_update" { + name = "projects/%{project_name}/locations/us-central1/sessionTemplates/tf-test-jupyter-session-template%{random_suffix}" + location = "us-central1" + labels = {"session_template_test": "terraform"} + + runtime_config { + properties = { "spark.dynamicAllocation.enabled": "false", "spark.executor.instances": "2" } + } + + environment_config { + execution_config { + subnetwork_uri = "%{subnetwork_name}" + ttl = "3600s" + network_tags = ["tag1"] + } + } + + jupyter_session { + kernel = "PYTHON" + display_name = "tf python kernel" + } +} +`, context) +} + +func testAccDataprocSessionTemplate_updated(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "project" { +} + +data "google_storage_project_service_account" "gcs_account" { +} + +resource "google_dataproc_session_template" "dataproc_session_templates_jupyter_update" { + name = "projects/%{project_name}/locations/us-central1/sessionTemplates/tf-test-jupyter-session-template%{random_suffix}" + location = "us-central1" + labels = {"session_template_test": "terraform"} + + runtime_config { + properties = { "spark.dynamicAllocation.enabled": "false", "spark.executor.instances": "2" } + version = "2.2" + } + + environment_config { + execution_config { + ttl = "4800s" + network_tags = ["tag2"] + kms_key = "%{kms_key_name}" + subnetwork_uri = "%{subnetwork_name}" + service_account = "${data.google_project.project.number}-compute@developer.gserviceaccount.com" + staging_bucket = google_storage_bucket.bucket.name + } + peripherals_config { + metastore_service = google_dataproc_metastore_service.ms.name + spark_history_server_config { + dataproc_cluster = google_dataproc_cluster.basic.id + } + } + } + + jupyter_session { + kernel = "SCALA" + display_name = "tf scala kernel" + } + + depends_on = [ + google_kms_crypto_key_iam_member.crypto_key_member_1, + ] +} + +resource "google_storage_bucket" "bucket" { + uniform_bucket_level_access = true + name = "tf-test-dataproc-bucket%{random_suffix}" + location = "US" + force_destroy = true +} + +resource "google_kms_crypto_key_iam_member" "crypto_key_member_1" { + crypto_key_id = "%{kms_key_name}" + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + member = "serviceAccount:service-${data.google_project.project.number}@dataproc-accounts.iam.gserviceaccount.com" +} + +resource "google_dataproc_cluster" "basic" { + name = "tf-test-jupyter-session-template%{random_suffix}" + region = "us-central1" + + cluster_config { + # Keep the costs down with smallest config we can get away with + software_config { + override_properties = { + "dataproc:dataproc.allow.zero.workers" = "true" + "spark:spark.history.fs.logDirectory" = "gs://${google_storage_bucket.bucket.name}/*/spark-job-history" + } + } + + gce_cluster_config { + subnetwork = "%{subnetwork_name}" + } + + endpoint_config { + enable_http_port_access = true + } + + master_config { + num_instances = 1 + machine_type = "e2-standard-2" + disk_config { + boot_disk_size_gb = 35 + } + } + + metastore_config { + dataproc_metastore_service = google_dataproc_metastore_service.ms.name + } + } +} + +resource "google_dataproc_metastore_service" "ms" { + service_id = "tf-test-jupyter-session-template%{random_suffix}" + location = "us-central1" + port = 9080 + tier = "DEVELOPER" + + maintenance_window { + hour_of_day = 2 + day_of_week = "SUNDAY" + } + + hive_metastore_config { + version = "3.1.2" + } + + network_config { + consumers { + subnetwork = "projects/%{project_name}/regions/us-central1/subnetworks/%{subnetwork_name}" + } + } +} +`, context) +} From 520cc8a0df02292749dde03f5e7ab7d811066252 Mon Sep 17 00:00:00 2001 From: Ryan Oaks Date: Mon, 16 Jun 2025 13:36:05 -0400 Subject: [PATCH 374/884] Fix gerrit comments to be properly formed for EAP VCR (#14251) --- .ci/magician/cmd/test_eap_vcr.go | 73 +++++++++++++++++++++++++------- 1 file changed, 57 insertions(+), 16 deletions(-) diff --git a/.ci/magician/cmd/test_eap_vcr.go b/.ci/magician/cmd/test_eap_vcr.go index e45deaf7dd6a..9fe0ece0900f 100644 --- a/.ci/magician/cmd/test_eap_vcr.go +++ b/.ci/magician/cmd/test_eap_vcr.go @@ -2,6 +2,7 @@ package cmd import ( _ "embed" + "encoding/json" "fmt" "magician/exec" "magician/provider" @@ -46,6 +47,42 @@ var tevOptionalEnvironmentVariables = [...]string{ "GOOGLE_VMWAREENGINE_PROJECT", } +// GerritComment is a single inline comment for a Gerrit CL. +// See go/kokoro-gob-scm#gerrit-inline-comments. +type GerritComment struct { + Path string `json:"path"` + Message string `json:"message"` +} + +// GerritCommenter is used to add comments to a Gerrit CL. +type GerritCommenter struct { + gerritCommentsFilename string + rnr ExecRunner + comments []GerritComment +} + +func NewGerritCommenter(gerritCommentsFilename string, rnr ExecRunner) *GerritCommenter { + return &GerritCommenter{ + gerritCommentsFilename: gerritCommentsFilename, + rnr: rnr, + } +} + +// Add adds a comment to the gerrit_comments_file json file. If a path is not +// specified, the comment is added at the patchset level, just like other +// kokoro messages. +func (g *GerritCommenter) Add(c GerritComment) error { + if c.Path == "" { + c.Path = "/PATCHSET_LEVEL" + } + g.comments = append(g.comments, c) + b, err := json.Marshal(g.comments) + if err != nil { + return err + } + return g.rnr.WriteFile(g.gerritCommentsFilename, string(b)) +} + var testEAPVCRCmd = &cobra.Command{ Use: "test-eap-vcr", Short: "Run vcr tests for affected packages in EAP", @@ -131,7 +168,10 @@ func execTestEAPVCR(changeNumber, genPath, kokoroArtifactsDir, modifiedFilePath return fmt.Errorf("error uploading replaying logs: %w", err) } - if hasPanics, err := handleEAPVCRPanics(head, kokoroArtifactsDir, modifiedFilePath, replayingResult, vcr.Replaying, rnr); err != nil { + // Comments for VCR must go in the gerrit_comments_acctest.json json file. + commenter := NewGerritCommenter(filepath.Join(kokoroArtifactsDir, "gerrit_comments_acctest.json"), rnr) + + if hasPanics, err := handleEAPVCRPanics(head, replayingResult, vcr.Replaying, commenter); err != nil { return fmt.Errorf("error handling panics: %w", err) } else if hasPanics { return nil @@ -154,8 +194,11 @@ func execTestEAPVCR(changeNumber, genPath, kokoroArtifactsDir, modifiedFilePath if err != nil { return fmt.Errorf("error formatting post replay comment: %w", err) } - if err := postGerritComment(kokoroArtifactsDir, modifiedFilePath, comment, rnr); err != nil { - return fmt.Errorf("error posting comment: %w", err) + c := GerritComment{ + Message: comment, + } + if err := commenter.Add(c); err != nil { + return fmt.Errorf("error adding comment: %w", err) } if len(replayingResult.FailedTests) > 0 { recordingResult, recordingErr := vt.RunParallel(vcr.RunOptions{ @@ -182,7 +225,7 @@ func execTestEAPVCR(changeNumber, genPath, kokoroArtifactsDir, modifiedFilePath return fmt.Errorf("error uploading recording logs: %w", err) } - if hasPanics, err := handleEAPVCRPanics(head, kokoroArtifactsDir, modifiedFilePath, recordingResult, vcr.Recording, rnr); err != nil { + if hasPanics, err := handleEAPVCRPanics(head, recordingResult, vcr.Recording, commenter); err != nil { return fmt.Errorf("error handling panics: %w", err) } else if hasPanics { return nil @@ -222,31 +265,29 @@ func execTestEAPVCR(changeNumber, genPath, kokoroArtifactsDir, modifiedFilePath if err != nil { return fmt.Errorf("error formatting record replay comment: %w", err) } - if err := postGerritComment(kokoroArtifactsDir, modifiedFilePath, recordReplayComment, rnr); err != nil { - return fmt.Errorf("error posting comment: %w", err) + c = GerritComment{ + Message: recordReplayComment, + } + if err := commenter.Add(c); err != nil { + return fmt.Errorf("error adding comment: %w", err) } } return nil } -func handleEAPVCRPanics(head, kokoroArtifactsDir, modifiedFilePath string, result vcr.Result, mode vcr.Mode, rnr ExecRunner) (bool, error) { +func handleEAPVCRPanics(head string, result vcr.Result, mode vcr.Mode, commenter *GerritCommenter) (bool, error) { if len(result.Panics) > 0 { - comment := fmt.Sprintf(`The provider crashed while running the VCR tests in %s mode. + c := GerritComment{ + Message: fmt.Sprintf(`The provider crashed while running the VCR tests in %s mode. Please fix it to complete your CL View the [build log](https://storage.cloud.google.com/ci-vcr-logs/%s/refs/heads/%s/build-log/%s_test.log)`, - provider.Private.String(), mode.Upper(), head, mode.Lower()) - if err := postGerritComment(kokoroArtifactsDir, modifiedFilePath, comment, rnr); err != nil { - return true, fmt.Errorf("error posting comment: %v", err) + provider.Private.String(), mode.Upper(), head, mode.Lower()), } - return true, nil + return true, commenter.Add(c) } return false, nil } -func postGerritComment(kokoroArtifactsDir, modifiedFilePath, comment string, rnr ExecRunner) error { - return rnr.AppendFile(filepath.Join(kokoroArtifactsDir, "gerrit_comments.json"), fmt.Sprintf("\n{path: \"%s\", message: \"%s\"}", modifiedFilePath, comment)) -} - func init() { rootCmd.AddCommand(testEAPVCRCmd) } From 07abfd25eefea8947d6bf9bc8250584b2f6c08ed Mon Sep 17 00:00:00 2001 From: Ryan Oaks Date: Mon, 16 Jun 2025 17:21:45 -0400 Subject: [PATCH 375/884] Fix links used for EAP VCR by omitting artifacts/buildId (#14254) --- .ci/magician/cmd/templates/vcr/record_replay.tmpl | 14 +++++++------- .ci/magician/cmd/test_terraform_vcr.go | 8 ++++++++ 2 files changed, 15 insertions(+), 7 deletions(-) diff --git a/.ci/magician/cmd/templates/vcr/record_replay.tmpl b/.ci/magician/cmd/templates/vcr/record_replay.tmpl index d349b2bf4299..e618d2d63b4a 100644 --- a/.ci/magician/cmd/templates/vcr/record_replay.tmpl +++ b/.ci/magician/cmd/templates/vcr/record_replay.tmpl @@ -2,7 +2,7 @@ {{color "green" "Tests passed during RECORDING mode:"}} {{range .RecordingResult.PassedTests -}} `{{.}}` {{/* remove trailing whitespace */ -}} - [[Debug log](https://storage.cloud.google.com/{{$.LogBucket}}/{{$.Version}}/refs/heads/{{$.Head}}/artifacts/{{$.BuildID}}/recording/{{.}}.log)] + [[Debug log]({{$.LogBaseUrl}}/recording/{{.}}.log)] {{/* remove trailing whitespace */ -}} {{end}} @@ -11,8 +11,8 @@ {{color "red" "Tests failed when rerunning REPLAYING mode:"}} {{range .ReplayingAfterRecordingResult.FailedTests -}} `{{.}}` {{/* remove trailing whitespace */ -}} - [[Error message](https://storage.cloud.google.com/{{$.LogBucket}}/{{$.Version}}/refs/heads/{{$.Head}}/artifacts/{{$.BuildID}}/build-log/replaying_build_after_recording/{{compoundTest .}}_replaying_test.log)] {{/* remove trailing whitespace */ -}} - [[Debug log](https://storage.cloud.google.com/{{$.LogBucket}}/{{$.Version}}/refs/heads/{{$.Head}}/artifacts/{{$.BuildID}}/replaying_after_recording/{{.}}.log)] + [[Error message]({{$.LogBaseUrl}}/build-log/replaying_build_after_recording/{{compoundTest .}}_replaying_test.log)] {{/* remove trailing whitespace */ -}} + [[Debug log]({{$.LogBaseUrl}}/replaying_after_recording/{{.}}.log)] {{/* remove trailing whitespace */ -}} {{end}} @@ -30,8 +30,8 @@ Please fix these to complete your PR. If you believe these test failures to be i {{color "red" "Tests failed during RECORDING mode:"}} {{range .RecordingResult.FailedTests -}} `{{.}}` {{/* remove trailing whitespace */ -}} - [[Error message](https://storage.cloud.google.com/{{$.LogBucket}}/{{$.Version}}/refs/heads/{{$.Head}}/artifacts/{{$.BuildID}}/build-log/recording_build/{{compoundTest .}}_recording_test.log)] {{/* remove trailing whitespace */ -}} - [[Debug log](https://storage.cloud.google.com/{{$.LogBucket}}/{{$.Version}}/refs/heads/{{$.Head}}/artifacts/{{$.BuildID}}/recording/{{.}}.log)] + [[Error message]({{$.LogBaseUrl}}/build-log/recording_build/{{compoundTest .}}_recording_test.log)] {{/* remove trailing whitespace */ -}} + [[Debug log]({{$.LogBaseUrl}}/recording/{{.}}.log)] {{/* remove trailing whitespace */ -}} {{end}} {{end}} {{- /* end of if gt (len .RecordingResult.FailedTests) 0 */ -}} @@ -42,5 +42,5 @@ Please fix these to complete your PR. If you believe these test failures to be i {{if .AllRecordingPassed}}{{color "green" "All tests passed!"}}{{end}} -View the [build log](https://storage.cloud.google.com/{{.LogBucket}}/{{.Version}}/refs/heads/{{.Head}}/artifacts/{{.BuildID}}/build-log/recording_test.log) {{/* remove trailing whitespace */ -}} -or the [debug log](https://console.cloud.google.com/storage/browser/{{.LogBucket}}/{{.Version}}/refs/heads/{{.Head}}/artifacts/{{.BuildID}}/recording) for each test +View the [build log]({{.LogBaseUrl}}/build-log/recording_test.log) {{/* remove trailing whitespace */ -}} +or the [debug log]({{.BrowseLogBaseUrl}}/recording) for each test diff --git a/.ci/magician/cmd/test_terraform_vcr.go b/.ci/magician/cmd/test_terraform_vcr.go index cca81a302e57..74f7673744d7 100644 --- a/.ci/magician/cmd/test_terraform_vcr.go +++ b/.ci/magician/cmd/test_terraform_vcr.go @@ -77,6 +77,8 @@ type recordReplay struct { Version string Head string BuildID string + LogBaseUrl string + BrowseLogBaseUrl string } var testTerraformVCRCmd = &cobra.Command{ @@ -528,5 +530,11 @@ func formatPostReplay(data postReplay) (string, error) { } func formatRecordReplay(data recordReplay) (string, error) { + logBasePath := fmt.Sprintf("%s/%s/refs/heads/%s/artifacts/%s", data.LogBucket, data.Version, data.Head, data.BuildID) + if data.BuildID == "" { + logBasePath = fmt.Sprintf("%s/%s/refs/heads/%s", data.LogBucket, data.Version, data.Head) + } + data.LogBaseUrl = fmt.Sprintf("https://storage.cloud.google.com/%s", logBasePath) + data.BrowseLogBaseUrl = fmt.Sprintf("https://console.cloud.google.com/storage/browser/%s", logBasePath) return formatComment("record_replay.tmpl", recordReplayTmplText, data) } From f23ddf01fafcbce9430001793156d3277641343c Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Mon, 16 Jun 2025 14:25:59 -0700 Subject: [PATCH 376/884] tgc-revival: modify testing Version and DiscoveryDocumentURI (#14262) --- mmv1/third_party/tgc_next/go.mod | 4 ++++ mmv1/third_party/tgc_next/go.sum | 20 +++++++++++++++++++ .../tgc_next/test/assert_test_files.go | 15 +++++++++++++- mmv1/third_party/tgc_next/test/setup.go | 1 - 4 files changed, 38 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/tgc_next/go.mod b/mmv1/third_party/tgc_next/go.mod index e9144f378cbb..1041133f8920 100644 --- a/mmv1/third_party/tgc_next/go.mod +++ b/mmv1/third_party/tgc_next/go.mod @@ -48,6 +48,9 @@ require ( cloud.google.com/go/longrunning v0.6.6 // indirect cloud.google.com/go/monitoring v1.24.1 // indirect github.com/GoogleCloudPlatform/declarative-resource-client-library v1.79.0 // indirect + github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.25.0 // indirect + github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.50.0 // indirect + github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.50.0 // indirect github.com/agext/levenshtein v1.2.3 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect github.com/cenkalti/backoff v2.2.1+incompatible // indirect @@ -95,6 +98,7 @@ require ( github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect go.opentelemetry.io/auto/sdk v1.1.0 // indirect + go.opentelemetry.io/contrib/detectors/gcp v1.34.0 // indirect go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.60.0 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0 // indirect go.opentelemetry.io/otel v1.35.0 // indirect diff --git a/mmv1/third_party/tgc_next/go.sum b/mmv1/third_party/tgc_next/go.sum index 5bcb088ceb12..3dcd2b906557 100644 --- a/mmv1/third_party/tgc_next/go.sum +++ b/mmv1/third_party/tgc_next/go.sum @@ -15,13 +15,27 @@ cloud.google.com/go/compute/metadata v0.6.0 h1:A6hENjEsCDtC1k8byVsgwvVcioamEHvZ4 cloud.google.com/go/compute/metadata v0.6.0/go.mod h1:FjyFAW1MW0C203CEOMDTu3Dk1FlqW3Rga40jzHL4hfg= cloud.google.com/go/iam v1.5.0 h1:QlLcVMhbLGOjRcGe6VTGGTyQib8dRLK2B/kYNV0+2xs= cloud.google.com/go/iam v1.5.0/go.mod h1:U+DOtKQltF/LxPEtcDLoobcsZMilSRwR7mgNL7knOpo= +cloud.google.com/go/logging v1.13.0 h1:7j0HgAp0B94o1YRDqiqm26w4q1rDMH7XNRU34lJXHYc= +cloud.google.com/go/logging v1.13.0/go.mod h1:36CoKh6KA/M0PbhPKMq6/qety2DCAErbhXT62TuXALA= cloud.google.com/go/longrunning v0.6.6 h1:XJNDo5MUfMM05xK3ewpbSdmt7R2Zw+aQEMbdQR65Rbw= cloud.google.com/go/longrunning v0.6.6/go.mod h1:hyeGJUrPHcx0u2Uu1UFSoYZLn4lkMrccJig0t4FI7yw= cloud.google.com/go/monitoring v1.24.1 h1:vKiypZVFD/5a3BbQMvI4gZdl8445ITzXFh257XBgrS0= cloud.google.com/go/monitoring v1.24.1/go.mod h1:Z05d1/vn9NaujqY2voG6pVQXoJGbp+r3laV+LySt9K0= +cloud.google.com/go/storage v1.50.0 h1:3TbVkzTooBvnZsk7WaAQfOsNrdoM8QHusXA1cpk6QJs= +cloud.google.com/go/storage v1.50.0/go.mod h1:l7XeiD//vx5lfqE3RavfmU9yvk5Pp0Zhcv482poyafY= +cloud.google.com/go/trace v1.11.3 h1:c+I4YFjxRQjvAhRmSsmjpASUKq88chOX854ied0K/pE= +cloud.google.com/go/trace v1.11.3/go.mod h1:pt7zCYiDSQjC9Y2oqCsh9jF4GStB/hmjrYLsxRR27q8= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/GoogleCloudPlatform/declarative-resource-client-library v1.79.0 h1:vaebDVboAZ2tbAoMKRsprO3zAdZnQegYFhkgAwjJC8g= github.com/GoogleCloudPlatform/declarative-resource-client-library v1.79.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.25.0 h1:3c8yed4lgqTt+oTQ+JNMDo+F4xprBf+O/il4ZC0nRLw= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.25.0/go.mod h1:obipzmGjfSjam60XLwGfqUkJsfiheAl+TUjG+4yzyPM= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.50.0 h1:5IT7xOdq17MtcdtL/vtl6mGfzhaq4m4vpollPRmlsBQ= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.50.0/go.mod h1:ZV4VOm0/eHR06JLrXWe09068dHpr3TRpY9Uo7T+anuA= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.50.0 h1:nNMpRpnkWDAaqcpxMJvxa/Ud98gjbYwayJY4/9bdjiU= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.50.0/go.mod h1:SZiPHWGOOk3bl8tkevxkoiwPgsIl6CwrWcbwjfHZpdM= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.50.0 h1:ig/FpDD2JofP/NExKQUbn7uOSZzJAQqogfqluZK4ed4= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.50.0/go.mod h1:otE2jQekW/PqXk1Awf5lmfokJx4uwuqcj1ab5SpGeW0= github.com/ProtonMail/go-crypto v1.1.3 h1:nRBOetoydLeUb4nHajyO2bKqMLfWQ/ZPwkXqXxPxCFk= github.com/ProtonMail/go-crypto v1.1.3/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7lmo= @@ -103,6 +117,8 @@ github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= github.com/google/go-cpy v0.0.0-20211218193943-a9c933c06932 h1:5/4TSDzpDnHQ8rKEEQBjRlYx77mHOvXu08oGchxej7o= github.com/google/go-cpy v0.0.0-20211218193943-a9c933c06932/go.mod h1:cC6EdPbj/17GFCPDK39NRarlMI+kt+O60S12cNB5J9Y= +github.com/google/martian/v3 v3.3.3 h1:DIhPTQrbPkgs2yJYdXU/eNACCG5DVQjySNRNlflZ9Fc= +github.com/google/martian/v3 v3.3.3/go.mod h1:iEPrYcgCF7jA9OtScMFQyAlZZ4YXTKEtJ1E6RWzmBA0= github.com/google/s2a-go v0.1.9 h1:LGD7gtMgezd8a/Xak7mEWL0PjoTQFvpRudN895yqKW0= github.com/google/s2a-go v0.1.9/go.mod h1:YA0Ei2ZQL3acow2O62kdp9UlnvMmU7kA6Eutn0dXayM= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= @@ -251,12 +267,16 @@ github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940 h1:4r45xpDWB6 github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940/go.mod h1:CmBdvvj3nqzfzJ6nTCIwDTPZ56aVGvDrmztiO5g3qrM= go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= +go.opentelemetry.io/contrib/detectors/gcp v1.34.0 h1:JRxssobiPg23otYU5SbWtQC//snGVIM3Tx6QRzlQBao= +go.opentelemetry.io/contrib/detectors/gcp v1.34.0/go.mod h1:cV4BMFcscUR/ckqLkbfQmF0PRsq8w/lMGzdbCSveBHo= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.60.0 h1:x7wzEgXfnzJcHDwStJT+mxOz4etr2EcexjqhBvmoakw= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.60.0/go.mod h1:rg+RlpR5dKwaS95IyyZqj5Wd4E13lk/msnTS0Xl9lJM= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0 h1:sbiXRNDSWJOTobXh5HyQKjq6wUC5tNybqjIqDpAY4CU= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0/go.mod h1:69uWxva0WgAA/4bu2Yy70SLDBwZXuQ6PbBpbsa5iZrQ= go.opentelemetry.io/otel v1.35.0 h1:xKWKPxrxB6OtMCbmMY021CqC45J+3Onta9MqjhnusiQ= go.opentelemetry.io/otel v1.35.0/go.mod h1:UEqy8Zp11hpkUrL73gSlELM0DupHoiq72dR+Zqel/+Y= +go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.29.0 h1:WDdP9acbMYjbKIyJUhTvtzj601sVJOqgWdUxSdR/Ysc= +go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.29.0/go.mod h1:BLbf7zbNIONBLPwvFnwNHGj4zge8uTCM/UPIVW1Mq2I= go.opentelemetry.io/otel/metric v1.35.0 h1:0znxYu2SNyuMSQT4Y9WDWej0VpcsxkuklLa4/siN90M= go.opentelemetry.io/otel/metric v1.35.0/go.mod h1:nKVFgxBZ2fReX6IlyW28MgZojkoAkJGaE8CpgeAU3oE= go.opentelemetry.io/otel/sdk v1.35.0 h1:iPctf8iprVySXSKJffSS79eOjl9pvxV9ZqOWT0QejKY= diff --git a/mmv1/third_party/tgc_next/test/assert_test_files.go b/mmv1/third_party/tgc_next/test/assert_test_files.go index f81ee8811e81..39f0ec922236 100644 --- a/mmv1/third_party/tgc_next/test/assert_test_files.go +++ b/mmv1/third_party/tgc_next/test/assert_test_files.go @@ -21,6 +21,7 @@ import ( "go.uber.org/zap/zaptest" "github.com/google/go-cmp/cmp" + "github.com/google/go-cmp/cmp/cmpopts" ) var ( @@ -169,7 +170,19 @@ func testSingleResource(t *testing.T, testName string, testData ResourceTestData if err := compareAssetName(asset.Name, roundtripAsset.Name); err != nil { return err } - if diff := cmp.Diff(asset.Resource, roundtripAsset.Resource); diff != "" { + if diff := cmp.Diff( + asset.Resource, + roundtripAsset.Resource, + cmpopts.IgnoreFields(caiasset.AssetResource{}, "Version", "Data"), + // Consider DiscoveryDocumentURI equal if they have the same number of path segments when split by "/". + cmp.FilterPath(func(p cmp.Path) bool { + return p.Last().String() == ".DiscoveryDocumentURI" + }, cmp.Comparer(func(x, y string) bool { + parts1 := strings.Split(x, "/") + parts2 := strings.Split(y, "/") + return len(parts1) == len(parts2) + })), + ); diff != "" { return fmt.Errorf("differences found between exported asset and roundtrip asset (-want +got):\n%s", diff) } } diff --git a/mmv1/third_party/tgc_next/test/setup.go b/mmv1/third_party/tgc_next/test/setup.go index 4a6ec086e7ec..24b0daccef7a 100644 --- a/mmv1/third_party/tgc_next/test/setup.go +++ b/mmv1/third_party/tgc_next/test/setup.go @@ -242,7 +242,6 @@ func convertToAssetMap(assets []caiasset.Asset) map[string]caiasset.Asset { assetMap := make(map[string]caiasset.Asset) for _, asset := range assets { - asset.Resource.Data = nil assetMap[asset.Type] = asset } return assetMap From b7f51d3cbb73b913749fc4f354fd5e8296b1661c Mon Sep 17 00:00:00 2001 From: Thomas Rodgers Date: Mon, 16 Jun 2025 16:31:24 -0700 Subject: [PATCH 377/884] tgc-revival: test template (#14235) Co-authored-by: Zhenhua Li --- mmv1/api/resource.go | 22 ++++++++++-- mmv1/api/resource/examples.go | 8 +++++ mmv1/provider/template_data.go | 8 +++++ mmv1/provider/terraform_tgc_next.go | 26 +++++++++++++- .../templates/tgc_next/test/test_file.go.tmpl | 36 +++++++++++++++++++ 5 files changed, 97 insertions(+), 3 deletions(-) create mode 100644 mmv1/templates/tgc_next/test/test_file.go.tmpl diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index e2b0704cec63..6fc73240fc22 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -1977,9 +1977,27 @@ func (r Resource) MarkdownHeader(templatePath string) string { return strings.Replace(r.CodeHeader(templatePath), "//", "#", -1) } +// TGC Methods // ==================== -// TGC -// ==================== +// Lists fields that test.BidirectionalConversion should ignore +func (r Resource) TGCTestIgnorePropertiesToStrings(e resource.Examples) []string { + var props []string + for _, tp := range r.VirtualFields { + props = append(props, google.Underscore(tp.Name)) + } + for _, tp := range r.AllUserProperties() { + if tp.UrlParamOnly { + props = append(props, google.Underscore(tp.Name)) + } + } + for _, tp := range e.TGCTestIgnoreExtra { + props = append(props, tp) + } + + slices.Sort(props) + return props +} + // Filters out computed properties during cai2hcl func (r Resource) ReadPropertiesForTgc() []*Type { return google.Reject(r.AllUserProperties(), func(v *Type) bool { diff --git a/mmv1/api/resource/examples.go b/mmv1/api/resource/examples.go index 89a981d9f329..884effd882e6 100644 --- a/mmv1/api/resource/examples.go +++ b/mmv1/api/resource/examples.go @@ -175,6 +175,14 @@ type Examples struct { DocumentationHCLText string `yaml:"-"` TestHCLText string `yaml:"-"` OicsHCLText string `yaml:"-"` + + // ==================== + // TGC + // ==================== + // Extra properties to ignore test. + // These properties are present in Terraform resources schema, but not in CAI assets. + // Virtual Fields and url parameters are already ignored by default and do not need to be duplicated here. + TGCTestIgnoreExtra []string `yaml:"tgc_test_ignore_extra,omitempty"` } // Set default value for fields diff --git a/mmv1/provider/template_data.go b/mmv1/provider/template_data.go index 656f8d4270f5..2adcaa281edf 100644 --- a/mmv1/provider/template_data.go +++ b/mmv1/provider/template_data.go @@ -209,6 +209,14 @@ func (td *TemplateData) GenerateTGCIamResourceFile(filePath string, resource api td.GenerateFile(filePath, templatePath, resource, true, templates...) } +func (td *TemplateData) GenerateTGCNextTestFile(filePath string, resource api.Resource) { + templatePath := "templates/tgc_next/test/test_file.go.tmpl" + templates := []string{ + templatePath, + } + td.GenerateFile(filePath, templatePath, resource, true, templates...) +} + func (td *TemplateData) GenerateFile(filePath, templatePath string, input any, goFormat bool, templates ...string) { templateFileName := filepath.Base(templatePath) diff --git a/mmv1/provider/terraform_tgc_next.go b/mmv1/provider/terraform_tgc_next.go index 6304ecbc5f1a..7d1d79dff92e 100644 --- a/mmv1/provider/terraform_tgc_next.go +++ b/mmv1/provider/terraform_tgc_next.go @@ -31,7 +31,7 @@ import ( "github.com/otiai10/copy" ) -// This proivder is for both tfplan2cai and cai2hcl conversions, +// TerraformGoogleConversionNext is for both tfplan2cai and cai2hcl conversions // and copying other files, such as transport.go type TerraformGoogleConversionNext struct { TargetVersionName string @@ -94,6 +94,7 @@ func (tgc TerraformGoogleConversionNext) GenerateObject(object api.Resource, out if !object.IsExcluded() { tgc.GenerateResource(object, *templateData, outputFolder, generateCode, generateDocs, "tfplan2cai") tgc.GenerateResource(object, *templateData, outputFolder, generateCode, generateDocs, "cai2hcl") + tgc.GenerateResourceTests(object, *templateData, outputFolder) } } @@ -113,6 +114,29 @@ func (tgc TerraformGoogleConversionNext) GenerateResource(object api.Resource, t func (tgc TerraformGoogleConversionNext) GenerateCaiToHclObjects(outputFolder, resourceToGenerate string, generateCode, generateDocs bool) { } +func (tgc *TerraformGoogleConversionNext) GenerateResourceTests(object api.Resource, templateData TemplateData, outputFolder string) { + eligibleExample := false + for _, example := range object.Examples { + if !example.ExcludeTest { + if object.ProductMetadata.VersionObjOrClosest(tgc.Version.Name).CompareTo(object.ProductMetadata.VersionObjOrClosest(example.MinVersion)) >= 0 { + eligibleExample = true + break + } + } + } + if !eligibleExample { + return + } + + productName := tgc.Product.ApiName + targetFolder := path.Join(outputFolder, "test", "services", productName) + if err := os.MkdirAll(targetFolder, os.ModePerm); err != nil { + log.Println(fmt.Errorf("error creating parent directory %v: %v", targetFolder, err)) + } + targetFilePath := path.Join(targetFolder, fmt.Sprintf("%s_%s_generated_test.go", productName, google.Underscore(object.Name))) + templateData.GenerateTGCNextTestFile(targetFilePath, object) +} + func (tgc TerraformGoogleConversionNext) CompileCommonFiles(outputFolder string, products []*api.Product, overridePath string) { resourceConverters := map[string]string{ // common diff --git a/mmv1/templates/tgc_next/test/test_file.go.tmpl b/mmv1/templates/tgc_next/test/test_file.go.tmpl new file mode 100644 index 000000000000..e9a08b298434 --- /dev/null +++ b/mmv1/templates/tgc_next/test/test_file.go.tmpl @@ -0,0 +1,36 @@ +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** Type: MMv1 *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Magic Modules and manual +// changes will be clobbered when the file is regenerated. +// +// Please read more about how to change this file in +// .github/CONTRIBUTING.md. +// +// ---------------------------------------------------------------------------- + +package {{$.PackageName}}_test + +import ( + "testing" + + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/test" +) + +{{ range $e := $.TestExamples }} +func TestAcc{{ $e.TestSlug $.ProductMetadata.Name $.Name }}(t *testing.T) { + t.Parallel() + + test.BidirectionalConversion( + t, + []string{ +{{- range $field := $.TGCTestIgnorePropertiesToStrings $e }} + "{{ $field }}", +{{- end }} + }, + ) +} +{{- end }} From 98a73be978b6bb7ffeec86f8da12f82620236fd6 Mon Sep 17 00:00:00 2001 From: Xian-Ji Chen <68801742+XianJiChen@users.noreply.github.com> Date: Mon, 16 Jun 2025 16:40:05 -0700 Subject: [PATCH 378/884] Add Terraform Support for Dataplex Discovery Scans (#14274) --- mmv1/products/dataplex/Datascan.yaml | 110 +++++++++++++++++- .../dataplex_datascan_basic_discovery.tf.tmpl | 24 ++++ .../dataplex_datascan_full_discovery.tf.tmpl | 68 +++++++++++ ...ataplex_datascan_full_quality_test.tf.tmpl | 4 +- 4 files changed, 203 insertions(+), 3 deletions(-) create mode 100644 mmv1/templates/terraform/examples/dataplex_datascan_basic_discovery.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/dataplex_datascan_full_discovery.tf.tmpl diff --git a/mmv1/products/dataplex/Datascan.yaml b/mmv1/products/dataplex/Datascan.yaml index 6cdabefd75e8..00f3c8ace7ed 100644 --- a/mmv1/products/dataplex/Datascan.yaml +++ b/mmv1/products/dataplex/Datascan.yaml @@ -92,6 +92,20 @@ examples: test_env_vars: project_name: 'PROJECT_NAME' exclude_docs: true + - name: 'dataplex_datascan_basic_discovery' + primary_resource_id: 'basic_discovery' + vars: + datascan_name: 'datadiscovery-basic' + test_env_vars: + project_name: 'PROJECT_NAME' + location: 'REGION' + - name: 'dataplex_datascan_full_discovery' + primary_resource_id: 'full_discovery' + vars: + datascan_name: 'datadiscovery-full' + test_env_vars: + project_name: 'PROJECT_NAME' + location: 'REGION' parameters: - name: 'location' type: String @@ -170,7 +184,7 @@ properties: type: String description: | The service-qualified full resource name of the cloud resource for a DataScan job to scan against. The field could be: - (Cloud Storage bucket for DataDiscoveryScan)BigQuery table of type "TABLE" for DataProfileScan/DataQualityScan. + Cloud Storage bucket (//storage.googleapis.com/projects/PROJECT_ID/buckets/BUCKET_ID) for DataDiscoveryScan OR BigQuery table of type "TABLE" (/bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID) for DataProfileScan/DataQualityScan. immutable: true exactly_one_of: - 'data.0.entity' @@ -242,6 +256,7 @@ properties: - 'DATA_SCAN_TYPE_UNSPECIFIED' - 'DATA_QUALITY' - 'DATA_PROFILE' + - 'DATA_DISCOVERY' - name: 'dataQualitySpec' type: NestedObject description: | @@ -249,6 +264,7 @@ properties: exactly_one_of: - 'data_quality_spec' - 'data_profile_spec' + - 'data_discovery_spec' properties: - name: 'samplingPercent' type: Double @@ -498,6 +514,7 @@ properties: exactly_one_of: - 'data_quality_spec' - 'data_profile_spec' + - 'data_discovery_spec' properties: - name: 'samplingPercent' type: Double @@ -552,3 +569,94 @@ properties: For instance, if 'x' is of nested field type, listing 'x' is supported but 'x.y.z' is not supported. Here 'y' and 'y.z' are nested fields of 'x'. item_type: type: String + - name: 'dataDiscoverySpec' + type: NestedObject + description: | + DataDiscoveryScan related setting. + send_empty_value: true + allow_empty_object: true + exactly_one_of: + - 'data_quality_spec' + - 'data_profile_spec' + - 'data_discovery_spec' + properties: + - name: 'bigqueryPublishingConfig' + type: NestedObject + description: | + Configuration for metadata publishing. + properties: + - name: tableType + type: Enum + description: | + Determines whether to publish discovered tables as BigLake external tables or non-BigLake external tables. + enum_values: + - TABLE_TYPE_UNSPECIFIED + - EXTERNAL + - BIGLAKE + - name: connection + type: String + description: | + The BigQuery connection used to create BigLake tables. Must be in the form `projects/{projectId}/locations/{locationId}/connections/{connection_id}`. + - name: location + type: String + description: | + The location of the BigQuery dataset to publish BigLake external or non-BigLake external tables to. + - name: project + type: String + description: | + The project of the BigQuery dataset to publish BigLake external or non-BigLake external tables to. If not specified, the project of the Cloud Storage bucket will be used. The format is "projects/{project_id_or_number}". + - name: 'storageConfig' + type: NestedObject + description: | + Configurations related to Cloud Storage as the data source. + properties: + - name: includePatterns + type: Array + description: | + Defines the data to include during discovery when only a subset of the data should be considered. Provide a list of patterns that identify the data to include. For Cloud Storage bucket assets, these patterns are interpreted as glob patterns used to match object names. For BigQuery dataset assets, these patterns are interpreted as patterns to match table names. + item_type: + type: String + - name: excludePatterns + type: Array + description: | + Defines the data to exclude during discovery. Provide a list of patterns that identify the data to exclude. For Cloud Storage bucket assets, these patterns are interpreted as glob patterns used to match object names. For BigQuery dataset assets, these patterns are interpreted as patterns to match table names. + item_type: + type: String + - name: csvOptions + type: NestedObject + description: | + Configuration for CSV data. + properties: + - name: headerRows + type: Integer + description: | + The number of rows to interpret as header rows that should be skipped when reading data rows. + - name: delimiter + type: String + description: | + The delimiter that is used to separate values. The default is `,` (comma). + - name: encoding + type: String + description: | + The character encoding of the data. The default is UTF-8. + - name: typeInferenceDisabled + type: Boolean + description: | + Whether to disable the inference of data types for CSV data. If true, all columns are registered as strings. + - name: quote + type: String + description: | + The character used to quote column values. Accepts `"` (double quotation mark) or `'` (single quotation mark). If unspecified, defaults to `"` (double quotation mark). + - name: jsonOptions + type: NestedObject + description: | + Configuration for JSON data. + properties: + - name: encoding + type: String + description: | + The character encoding of the data. The default is UTF-8. + - name: typeInferenceDisabled + type: Boolean + description: | + Whether to disable the inference of data types for JSON data. If true, all columns are registered as their primitive types (strings, number, or boolean). diff --git a/mmv1/templates/terraform/examples/dataplex_datascan_basic_discovery.tf.tmpl b/mmv1/templates/terraform/examples/dataplex_datascan_basic_discovery.tf.tmpl new file mode 100644 index 000000000000..f32628fb3c45 --- /dev/null +++ b/mmv1/templates/terraform/examples/dataplex_datascan_basic_discovery.tf.tmpl @@ -0,0 +1,24 @@ +resource "google_dataplex_datascan" "{{$.PrimaryResourceId}}" { + location = "us-central1" + data_scan_id = "{{index $.Vars "datascan_name"}}" + + data { + resource = "//storage.googleapis.com/projects/${google_storage_bucket.tf_test_bucket.project}/buckets/${google_storage_bucket.tf_test_bucket.name}" + } + + execution_spec { + trigger { + on_demand {} + } + } + + data_discovery_spec {} + + project = "{{index $.TestEnvVars "project_name"}}" +} + +resource "google_storage_bucket" "tf_test_bucket" { + name = "tf-test-bucket-name-%{random_suffix}" + location = "{{index $.TestEnvVars "location"}}" + uniform_bucket_level_access = true +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/dataplex_datascan_full_discovery.tf.tmpl b/mmv1/templates/terraform/examples/dataplex_datascan_full_discovery.tf.tmpl new file mode 100644 index 000000000000..21e365d20878 --- /dev/null +++ b/mmv1/templates/terraform/examples/dataplex_datascan_full_discovery.tf.tmpl @@ -0,0 +1,68 @@ +resource "google_dataplex_datascan" "{{$.PrimaryResourceId}}" { + location = "us-central1" + display_name = "Full Datascan Discovery" + data_scan_id = "{{index $.Vars "datascan_name"}}" + description = "Example resource - Full Datascan Discovery" + labels = { + author = "billing" + } + + data { + resource = "//storage.googleapis.com/projects/${google_storage_bucket.tf_test_bucket.project}/buckets/${google_storage_bucket.tf_test_bucket.name}" + } + + execution_spec { + trigger { + schedule { + cron = "TZ=America/New_York 1 1 * * *" + } + } + } + + data_discovery_spec { + bigquery_publishing_config { + table_type = "BIGLAKE" + connection = "projects/${google_bigquery_connection.tf_test_connection.project}/locations/${google_bigquery_connection.tf_test_connection.location}/connections/${google_bigquery_connection.tf_test_connection.connection_id}" + location = "${google_storage_bucket.tf_test_bucket.location}" + project = "projects/${google_storage_bucket.tf_test_bucket.project}" + } + + storage_config { + include_patterns = [ + "ai*", + "ml*", + ] + exclude_patterns = [ + "doc*", + "gen*", + ] + csv_options { + header_rows = 5 + delimiter = "," + encoding = "UTF-8" + type_inference_disabled = false + quote = "'" + } + json_options { + encoding = "UTF-8" + type_inference_disabled = false + } + } + } + + project = "{{index $.TestEnvVars "project_name"}}" +} + +resource "google_storage_bucket" "tf_test_bucket" { + name = "tf-test-bucket-name-%{random_suffix}" + location = "{{index $.TestEnvVars "location"}}" + uniform_bucket_level_access = true +} + +resource "google_bigquery_connection" "tf_test_connection" { + connection_id = "tf-test-connection-%{random_suffix}" + location = "us-central1" + friendly_name = "tf-test-connection-%{random_suffix}" + description = "a bigquery connection for tf test" + cloud_resource {} +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/dataplex_datascan_full_quality_test.tf.tmpl b/mmv1/templates/terraform/examples/dataplex_datascan_full_quality_test.tf.tmpl index 8ab23feeeb23..de0f595f1279 100644 --- a/mmv1/templates/terraform/examples/dataplex_datascan_full_quality_test.tf.tmpl +++ b/mmv1/templates/terraform/examples/dataplex_datascan_full_quality_test.tf.tmpl @@ -1,11 +1,11 @@ resource "google_bigquery_dataset" "tf_test_dataset" { - dataset_id = "tf_test_dataset_id" + dataset_id = "tf_test_dataset_id_%{random_suffix}" default_table_expiration_ms = 3600000 } resource "google_bigquery_table" "tf_test_table" { dataset_id = google_bigquery_dataset.tf_test_dataset.dataset_id - table_id = "tf_test_table_id" + table_id = "tf_test_table_id_%{random_suffix}" deletion_protection = false schema = < Date: Tue, 17 Jun 2025 12:24:19 -0500 Subject: [PATCH 379/884] update `default_from_api` and `send_empty_value` interaction in docs (#14029) --- docs/content/reference/field.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/content/reference/field.md b/docs/content/reference/field.md index a35abe372240..29428458cecd 100644 --- a/docs/content/reference/field.md +++ b/docs/content/reference/field.md @@ -174,7 +174,6 @@ value for the field. This attribute is useful for complex or frequently-changed API-side defaults, but provides less useful information at plan time than `default_value` and causes the provider to ignore user configurations that explicitly set the field to an "empty" value. -`default_from_api` and `send_empty_value` cannot both be true on the same field. Example: @@ -189,7 +188,10 @@ strings) to the API if set explicitly in the user's configuration. If false, This attribute is useful for fields where the API would behave differently for an "empty" value vs no value for a particular field - for example, boolean fields that have an API-side default of true. -`send_empty_value` and `default_from_api` cannot both be true on the same field. + +If true simulataneously with `default_from_api`, the provider will send empty values +explicitly set in configuration. If the field is unset, the provider will +accept API values as the default as usual with `default_from_api`. Due to a [bug](https://github.com/hashicorp/terraform-provider-google/issues/13201), NestedObject fields will currently be sent as `null` if unset (rather than being From e409cc3054cf55c83d942f56383e505bf41c5e19 Mon Sep 17 00:00:00 2001 From: Nandini Agrawal Date: Tue, 17 Jun 2025 23:35:56 +0530 Subject: [PATCH 380/884] Add Support for CSI (#14238) Co-authored-by: Thomas Rodgers --- mmv1/products/compute/Interconnect.yaml | 13 +++++++++++++ .../examples/compute_interconnect_basic.tf.tmpl | 2 +- .../compute_interconnect_basic_test.tf.tmpl | 2 +- 3 files changed, 15 insertions(+), 2 deletions(-) diff --git a/mmv1/products/compute/Interconnect.yaml b/mmv1/products/compute/Interconnect.yaml index 87038eb0bcd3..019498527e14 100644 --- a/mmv1/products/compute/Interconnect.yaml +++ b/mmv1/products/compute/Interconnect.yaml @@ -401,6 +401,7 @@ properties: interconnects.list of features requested for this Interconnect connection enum_values: - 'MACSEC' + - 'CROSS_SITE_NETWORK' - 'IF_MACSEC' - name: 'availableFeatures' type: Array @@ -410,6 +411,18 @@ properties: ports. If not present then the Interconnect connection is provisioned on non-MACsec capable ports and MACsec isn't supported and enabling MACsec fails). output: true + item_type: + type: Enum + enum_values: + - 'IF_CROSS_SITE_NETWORK' + - 'IF_MACSEC' + - 'MACSEC' + - name: 'wireGroups' + type: Array + min_version: beta + description: | + A list of the URLs of all CrossSiteNetwork WireGroups configured to use this Interconnect. The Interconnect cannot be deleted if this list is non-empty. + output: true item_type: type: String - name: 'interconnectGroups' diff --git a/mmv1/templates/terraform/examples/compute_interconnect_basic.tf.tmpl b/mmv1/templates/terraform/examples/compute_interconnect_basic.tf.tmpl index cdbf39957e21..4e5452560fbc 100644 --- a/mmv1/templates/terraform/examples/compute_interconnect_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/compute_interconnect_basic.tf.tmpl @@ -5,6 +5,6 @@ resource "google_compute_interconnect" "{{$.PrimaryResourceId}}" { customer_name = "{{index $.Vars "customer_name"}}" interconnect_type = "DEDICATED" link_type = "LINK_TYPE_ETHERNET_10G_LR" - location = "https://www.googleapis.com/compute/v1/projects/${data.google_project.project.name}/global/interconnectLocations/iad-zone1-1" + location = "https://www.googleapis.com/compute/v1/${data.google_project.project.id}/global/interconnectLocations/iad-zone1-1" requested_link_count = 1 } diff --git a/mmv1/templates/terraform/examples/compute_interconnect_basic_test.tf.tmpl b/mmv1/templates/terraform/examples/compute_interconnect_basic_test.tf.tmpl index 03c9440e5067..44de3435ced5 100644 --- a/mmv1/templates/terraform/examples/compute_interconnect_basic_test.tf.tmpl +++ b/mmv1/templates/terraform/examples/compute_interconnect_basic_test.tf.tmpl @@ -5,7 +5,7 @@ resource "google_compute_interconnect" "{{$.PrimaryResourceId}}" { customer_name = "internal_customer" # Special customer only available for Google testing. interconnect_type = "DEDICATED" link_type = "LINK_TYPE_ETHERNET_10G_LR" - location = "https://www.googleapis.com/compute/v1/projects/${data.google_project.project.name}/global/interconnectLocations/z2z-us-east4-zone1-lciadl-a" # Special location only available for Google testing. + location = "https://www.googleapis.com/compute/v1/${data.google_project.project.id}/global/interconnectLocations/z2z-us-east4-zone1-lciadl-a" # Special location only available for Google testing. requested_link_count = 1 admin_enabled = true description = "example description" From 9e4fd0e68bfda7fef4dc6769e8841819965ee4c2 Mon Sep 17 00:00:00 2001 From: Sam Levenick Date: Tue, 17 Jun 2025 16:43:24 -0400 Subject: [PATCH 381/884] Ignore newly set computed values for Dataflow FlexTemplateJob (#14285) --- ...ce_dataflow_flex_template_job_test.go.tmpl | 45 ++++++++++--------- 1 file changed, 24 insertions(+), 21 deletions(-) diff --git a/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job_test.go.tmpl b/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job_test.go.tmpl index b0daab35b0a4..a0659cc90aed 100644 --- a/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job_test.go.tmpl +++ b/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job_test.go.tmpl @@ -44,7 +44,7 @@ func TestAccDataflowFlexTemplateJob_basic(t *testing.T) { ResourceName: "google_dataflow_flex_template_job.flex_job", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, }, }, }) @@ -83,7 +83,7 @@ func TestAccDataflowFlexTemplateJob_streamUpdate(t *testing.T) { ResourceName: "google_dataflow_flex_template_job.flex_job", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "transform_name_mapping", "state", "container_spec_gcs_path", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "transform_name_mapping", "state", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, }, }, }) @@ -148,7 +148,7 @@ func TestAccDataflowFlexTemplateJob_FullUpdate(t *testing.T) { ResourceName: "google_dataflow_flex_template_job.flex_job_fullupdate", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, }, { Config: testAccDataflowFlexTemplateJob_dataflowFlexTemplateJobFullUpdate(job, bucket, topic, randStr), @@ -157,7 +157,7 @@ func TestAccDataflowFlexTemplateJob_FullUpdate(t *testing.T) { ResourceName: "google_dataflow_flex_template_job.flex_job_fullupdate", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, }, }, }) @@ -192,7 +192,7 @@ func TestAccDataflowFlexTemplateJob_withNetwork(t *testing.T) { ResourceName: "google_dataflow_flex_template_job.flex_job_network", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, }, { Config: testAccDataflowFlexTemplateJob_networkUpdate(job, network1, network2, bucket, topic), @@ -205,7 +205,7 @@ func TestAccDataflowFlexTemplateJob_withNetwork(t *testing.T) { ResourceName: "google_dataflow_flex_template_job.flex_job_network", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, }, }, }) @@ -241,7 +241,7 @@ func TestAccDataflowFlexTemplateJob_withSubNetwork(t *testing.T) { ResourceName: "google_dataflow_flex_template_job.flex_job_subnetwork", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, }, { Config: testAccDataflowFlexTemplateJob_subnetworkUpdate(job, network, subnetwork1, subnetwork2, bucket, topic), @@ -254,7 +254,7 @@ func TestAccDataflowFlexTemplateJob_withSubNetwork(t *testing.T) { ResourceName: "google_dataflow_flex_template_job.flex_job_subnetwork", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, }, }, }) @@ -288,7 +288,7 @@ func TestAccDataflowFlexTemplateJob_withIpConfig(t *testing.T) { ResourceName: "google_dataflow_flex_template_job.flex_job_ipconfig", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "ip_configuration", "state", "container_spec_gcs_path", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "ip_configuration", "state", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, }, }, }) @@ -333,7 +333,7 @@ func TestAccDataflowFlexTemplateJob_withKmsKey(t *testing.T) { ResourceName: "google_dataflow_flex_template_job.flex_job_kms", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, }, }, }) @@ -367,7 +367,7 @@ func TestAccDataflowFlexTemplateJob_withAdditionalExperiments(t *testing.T) { ResourceName: "google_dataflow_flex_template_job.flex_job_experiments", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "additional_experiments", "container_spec_gcs_path", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "additional_experiments", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, }, }, }) @@ -401,7 +401,7 @@ func TestAccDataflowFlexTemplateJob_withAdditionalPipelineOptions(t *testing.T) ResourceName: "google_dataflow_flex_template_job.flex_job_pipeline_options", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "additional_pipeline_options", "container_spec_gcs_path", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "additional_pipeline_options", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, }, }, }) @@ -422,6 +422,9 @@ func TestAccDataflowFlexTemplateJob_withProviderDefaultLabels(t *testing.T) { PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccCheckDataflowJobDestroyProducer(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, Steps: []resource.TestStep{ { Config: testAccDataflowFlexTemplateJob_withProviderDefaultLabels(job, bucket, topic, randStr), @@ -433,7 +436,7 @@ func TestAccDataflowFlexTemplateJob_withProviderDefaultLabels(t *testing.T) { ResourceName: "google_dataflow_flex_template_job.flex_job_fullupdate", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, }, { Config: testAccComputeAddress_resourceLabelsOverridesProviderDefaultLabels(job, bucket, topic, randStr), @@ -445,7 +448,7 @@ func TestAccDataflowFlexTemplateJob_withProviderDefaultLabels(t *testing.T) { ResourceName: "google_dataflow_flex_template_job.flex_job_fullupdate", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, }, { Config: testAccComputeAddress_moveResourceLabelToProviderDefaultLabels(job, bucket, topic, randStr), @@ -457,7 +460,7 @@ func TestAccDataflowFlexTemplateJob_withProviderDefaultLabels(t *testing.T) { ResourceName: "google_dataflow_flex_template_job.flex_job_fullupdate", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, }, { Config: testAccComputeAddress_resourceLabelsOverridesProviderDefaultLabels(job, bucket, topic, randStr), @@ -469,7 +472,7 @@ func TestAccDataflowFlexTemplateJob_withProviderDefaultLabels(t *testing.T) { ResourceName: "google_dataflow_flex_template_job.flex_job_fullupdate", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, }, { Config: testAccDataflowFlexTemplateJob_dataflowFlexTemplateJobFull(job, bucket, topic, randStr), @@ -481,7 +484,7 @@ func TestAccDataflowFlexTemplateJob_withProviderDefaultLabels(t *testing.T) { ResourceName: "google_dataflow_flex_template_job.flex_job_fullupdate", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, }, }, }) @@ -521,7 +524,7 @@ func TestAccDataflowJob_withAttributionLabelCreationOnly(t *testing.T) { ResourceName: "google_dataflow_job.big_data", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, }, { Config: testAccDataflowJob_attributionLabelUpdate(bucket, job, add, strategy), @@ -540,7 +543,7 @@ func TestAccDataflowJob_withAttributionLabelCreationOnly(t *testing.T) { ResourceName: "google_dataflow_job.big_data", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, }, }, }) @@ -578,7 +581,7 @@ func TestAccDataflowJob_withAttributionLabelProactive(t *testing.T) { ResourceName: "google_dataflow_job.big_data", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, }, { Config: testAccDataflowJob_attributionLabelUpdate(bucket, job, "true", strategy), @@ -597,7 +600,7 @@ func TestAccDataflowJob_withAttributionLabelProactive(t *testing.T) { ResourceName: "google_dataflow_job.big_data", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, }, }, }) From 35732ad957dc3edccc85e0e197060db0d0b88715 Mon Sep 17 00:00:00 2001 From: kautikdk <144651627+kautikdk@users.noreply.github.com> Date: Tue, 17 Jun 2025 20:50:23 +0000 Subject: [PATCH 382/884] Update google.golang.org/api package to the latest version (#14276) --- mmv1/third_party/terraform/go.mod | 42 ++++++++-------- mmv1/third_party/terraform/go.sum | 84 +++++++++++++++---------------- 2 files changed, 63 insertions(+), 63 deletions(-) diff --git a/mmv1/third_party/terraform/go.mod b/mmv1/third_party/terraform/go.mod index a05a5e76ccd3..d4778ece3f3f 100644 --- a/mmv1/third_party/terraform/go.mod +++ b/mmv1/third_party/terraform/go.mod @@ -3,7 +3,7 @@ module github.com/hashicorp/terraform-provider-google go 1.23.0 require ( - cloud.google.com/go/auth v0.16.1 + cloud.google.com/go/auth v0.16.2 cloud.google.com/go/auth/oauth2adapt v0.2.8 cloud.google.com/go/bigtable v1.37.0 github.com/GoogleCloudPlatform/declarative-resource-client-library v1.79.0 @@ -32,18 +32,18 @@ require ( github.com/stretchr/testify v1.10.0 go4.org/netipx v0.0.0-20231129151722-fdeea329fbba golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 - golang.org/x/net v0.40.0 + golang.org/x/net v0.41.0 golang.org/x/oauth2 v0.30.0 - google.golang.org/api v0.235.0 - google.golang.org/genproto/googleapis/rpc v0.0.0-20250512202823-5a2f75b736a9 - google.golang.org/grpc v1.72.1 + google.golang.org/api v0.237.0 + google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 + google.golang.org/grpc v1.73.0 google.golang.org/protobuf v1.36.6 gopkg.in/yaml.v2 v2.4.0 ) require ( bitbucket.org/creachadair/stringset v0.0.8 // indirect - cel.dev/expr v0.20.0 // indirect + cel.dev/expr v0.23.0 // indirect cloud.google.com/go v0.120.0 // indirect cloud.google.com/go/compute/metadata v0.7.0 // indirect cloud.google.com/go/iam v1.5.2 // indirect @@ -55,13 +55,13 @@ require ( github.com/cenkalti/backoff v2.2.1+incompatible // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect github.com/cloudflare/circl v1.3.7 // indirect - github.com/cncf/xds/go v0.0.0-20250121191232-2f005788dc42 // indirect + github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f // indirect github.com/envoyproxy/go-control-plane/envoy v1.32.4 // indirect github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect github.com/fatih/color v1.16.0 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect github.com/gammazero/deque v0.0.0-20180920172122-f6adf94963e4 // indirect - github.com/go-jose/go-jose/v4 v4.0.4 // indirect + github.com/go-jose/go-jose/v4 v4.0.5 // indirect github.com/go-logr/logr v1.4.2 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/golang/glog v1.2.4 // indirect @@ -101,20 +101,20 @@ require ( github.com/zclconf/go-cty v1.16.2 // indirect github.com/zeebo/errs v1.4.0 // indirect go.opentelemetry.io/auto/sdk v1.1.0 // indirect - go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.60.0 // indirect - go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0 // indirect - go.opentelemetry.io/otel v1.35.0 // indirect - go.opentelemetry.io/otel/metric v1.35.0 // indirect - go.opentelemetry.io/otel/sdk v1.35.0 // indirect - go.opentelemetry.io/otel/sdk/metric v1.35.0 // indirect - go.opentelemetry.io/otel/trace v1.35.0 // indirect - golang.org/x/crypto v0.38.0 // indirect - golang.org/x/mod v0.22.0 // indirect - golang.org/x/sync v0.14.0 // indirect + go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect + go.opentelemetry.io/otel v1.36.0 // indirect + go.opentelemetry.io/otel/metric v1.36.0 // indirect + go.opentelemetry.io/otel/sdk v1.36.0 // indirect + go.opentelemetry.io/otel/sdk/metric v1.36.0 // indirect + go.opentelemetry.io/otel/trace v1.36.0 // indirect + golang.org/x/crypto v0.39.0 // indirect + golang.org/x/mod v0.25.0 // indirect + golang.org/x/sync v0.15.0 // indirect golang.org/x/sys v0.33.0 // indirect - golang.org/x/text v0.25.0 // indirect - golang.org/x/time v0.11.0 // indirect - golang.org/x/tools v0.22.0 // indirect + golang.org/x/text v0.26.0 // indirect + golang.org/x/time v0.12.0 // indirect + golang.org/x/tools v0.33.0 // indirect google.golang.org/appengine v1.6.8 // indirect google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2 // indirect google.golang.org/genproto/googleapis/api v0.0.0-20250505200425-f936aa4a68b2 // indirect diff --git a/mmv1/third_party/terraform/go.sum b/mmv1/third_party/terraform/go.sum index b66aa96556a4..c3842716dd87 100644 --- a/mmv1/third_party/terraform/go.sum +++ b/mmv1/third_party/terraform/go.sum @@ -1,12 +1,12 @@ bitbucket.org/creachadair/stringset v0.0.8 h1:gQqe4vs8XWgMyijfyKE6K8o4TcyGGrRXe0JvHgx5H+M= bitbucket.org/creachadair/stringset v0.0.8/go.mod h1:AgthVMyMxC/6FK1KBJ2ALdqkZObGN8hOetgpwXyMn34= -cel.dev/expr v0.20.0 h1:OunBvVCfvpWlt4dN7zg3FM6TDkzOePe1+foGJ9AXeeI= -cel.dev/expr v0.20.0/go.mod h1:MrpN08Q+lEBs+bGYdLxxHkZoUSsCp0nSKTs0nTymJgw= +cel.dev/expr v0.23.0 h1:wUb94w6OYQS4uXraxo9U+wUAs9jT47Xvl4iPgAwM2ss= +cel.dev/expr v0.23.0/go.mod h1:hLPLo1W4QUmuYdA72RBX06QTs6MXw941piREPl3Yfiw= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.120.0 h1:wc6bgG9DHyKqF5/vQvX1CiZrtHnxJjBlKUyF9nP6meA= cloud.google.com/go v0.120.0/go.mod h1:/beW32s8/pGRuj4IILWQNd4uuebeT4dkOhKmkfit64Q= -cloud.google.com/go/auth v0.16.1 h1:XrXauHMd30LhQYVRHLGvJiYeczweKQXZxsTbV9TiguU= -cloud.google.com/go/auth v0.16.1/go.mod h1:1howDHJ5IETh/LwYs3ZxvlkXF48aSqqJUM+5o02dNOI= +cloud.google.com/go/auth v0.16.2 h1:QvBAGFPLrDeoiNjyfVunhQ10HKNYuOwZ5noee0M5df4= +cloud.google.com/go/auth v0.16.2/go.mod h1:sRBas2Y1fB1vZTdurouM0AzuYQBMZinrUYL8EufhtEA= cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc= cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c= cloud.google.com/go/bigtable v1.37.0 h1:Q+x7y04lQ0B+WXp03wc1/FLhFt4CwcQdkwWT0M4Jp3w= @@ -46,8 +46,8 @@ github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDk github.com/cloudflare/circl v1.3.7 h1:qlCDlTPz2n9fu58M0Nh1J/JzcFpfgkFHHX3O35r5vcU= github.com/cloudflare/circl v1.3.7/go.mod h1:sRTcRWXGLrKw6yIGJ+l7amYJFfAXbZG0kBSc8r4zxgA= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= -github.com/cncf/xds/go v0.0.0-20250121191232-2f005788dc42 h1:Om6kYQYDUk5wWbT0t0q6pvyM49i9XZAv9dDrkDA7gjk= -github.com/cncf/xds/go v0.0.0-20250121191232-2f005788dc42/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= +github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f h1:C5bqEmzEPLsHm9Mv73lSE9e9bKV23aB1vxOsmZrkl3k= +github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= github.com/creachadair/staticfile v0.1.2/go.mod h1:a3qySzCIXEprDGxk6tSxSI+dBBdLzqeBOMhZ+o2d3pM= github.com/cyphar/filepath-securejoin v0.2.5 h1:6iR5tXJ/e6tJZzzdMc1km3Sa7RRIVBKAK32O2s7AYfo= github.com/cyphar/filepath-securejoin v0.2.5/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= @@ -85,8 +85,8 @@ github.com/go-git/go-billy/v5 v5.6.0 h1:w2hPNtoehvJIxR00Vb4xX94qHQi/ApZfX+nBE2Cj github.com/go-git/go-billy/v5 v5.6.0/go.mod h1:sFDq7xD3fn3E0GOwUSZqHo9lrkmx8xJhA0ZrfvjBRGM= github.com/go-git/go-git/v5 v5.13.0 h1:vLn5wlGIh/X78El6r3Jr+30W16Blk0CTcxTYcYPWi5E= github.com/go-git/go-git/v5 v5.13.0/go.mod h1:Wjo7/JyVKtQgUNdXYXIepzWfJQkUEIGvkvVkiXRR/zw= -github.com/go-jose/go-jose/v4 v4.0.4 h1:VsjPI33J0SB9vQM6PLmNjoHqMQNGPiZ0rHL7Ni7Q6/E= -github.com/go-jose/go-jose/v4 v4.0.4/go.mod h1:NKb5HO1EZccyMpiZNbdUw/14tiXNyUJh188dfnMCAfc= +github.com/go-jose/go-jose/v4 v4.0.5 h1:M6T8+mKZl/+fNNuFHvGIzDz7BTLQPIounk/b9dw3AaE= +github.com/go-jose/go-jose/v4 v4.0.5/go.mod h1:s3P1lRrkT8igV8D9OjyL4WRyHvjB6a4JSllnOrmmBOA= github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= @@ -275,20 +275,20 @@ github.com/zeebo/errs v1.4.0 h1:XNdoD/RRMKP7HD0UhJnIzUy74ISdGGxURlYG8HSWSfM= github.com/zeebo/errs v1.4.0/go.mod h1:sgbWHsvVuTPHcqJJGQ1WhI5KbWlHYz+2+2C/LSEtCw4= go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= -go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.60.0 h1:x7wzEgXfnzJcHDwStJT+mxOz4etr2EcexjqhBvmoakw= -go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.60.0/go.mod h1:rg+RlpR5dKwaS95IyyZqj5Wd4E13lk/msnTS0Xl9lJM= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0 h1:sbiXRNDSWJOTobXh5HyQKjq6wUC5tNybqjIqDpAY4CU= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0/go.mod h1:69uWxva0WgAA/4bu2Yy70SLDBwZXuQ6PbBpbsa5iZrQ= -go.opentelemetry.io/otel v1.35.0 h1:xKWKPxrxB6OtMCbmMY021CqC45J+3Onta9MqjhnusiQ= -go.opentelemetry.io/otel v1.35.0/go.mod h1:UEqy8Zp11hpkUrL73gSlELM0DupHoiq72dR+Zqel/+Y= -go.opentelemetry.io/otel/metric v1.35.0 h1:0znxYu2SNyuMSQT4Y9WDWej0VpcsxkuklLa4/siN90M= -go.opentelemetry.io/otel/metric v1.35.0/go.mod h1:nKVFgxBZ2fReX6IlyW28MgZojkoAkJGaE8CpgeAU3oE= -go.opentelemetry.io/otel/sdk v1.35.0 h1:iPctf8iprVySXSKJffSS79eOjl9pvxV9ZqOWT0QejKY= -go.opentelemetry.io/otel/sdk v1.35.0/go.mod h1:+ga1bZliga3DxJ3CQGg3updiaAJoNECOgJREo9KHGQg= -go.opentelemetry.io/otel/sdk/metric v1.35.0 h1:1RriWBmCKgkeHEhM7a2uMjMUfP7MsOF5JpUCaEqEI9o= -go.opentelemetry.io/otel/sdk/metric v1.35.0/go.mod h1:is6XYCUMpcKi+ZsOvfluY5YstFnhW0BidkR+gL+qN+w= -go.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt/xgMs= -go.opentelemetry.io/otel/trace v1.35.0/go.mod h1:WUk7DtFp1Aw2MkvqGdwiXYDZZNvA/1J8o6xRXLrIkyc= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0 h1:q4XOmH/0opmeuJtPsbFNivyl7bCt7yRBbeEm2sC/XtQ= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0/go.mod h1:snMWehoOh2wsEwnvvwtDyFCxVeDAODenXHtn5vzrKjo= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 h1:F7Jx+6hwnZ41NSFTO5q4LYDtJRXBf2PD0rNBkeB/lus= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0/go.mod h1:UHB22Z8QsdRDrnAtX4PntOl36ajSxcdUMt1sF7Y6E7Q= +go.opentelemetry.io/otel v1.36.0 h1:UumtzIklRBY6cI/lllNZlALOF5nNIzJVb16APdvgTXg= +go.opentelemetry.io/otel v1.36.0/go.mod h1:/TcFMXYjyRNh8khOAO9ybYkqaDBb/70aVwkNML4pP8E= +go.opentelemetry.io/otel/metric v1.36.0 h1:MoWPKVhQvJ+eeXWHFBOPoBOi20jh6Iq2CcCREuTYufE= +go.opentelemetry.io/otel/metric v1.36.0/go.mod h1:zC7Ks+yeyJt4xig9DEw9kuUFe5C3zLbVjV2PzT6qzbs= +go.opentelemetry.io/otel/sdk v1.36.0 h1:b6SYIuLRs88ztox4EyrvRti80uXIFy+Sqzoh9kFULbs= +go.opentelemetry.io/otel/sdk v1.36.0/go.mod h1:+lC+mTgD+MUWfjJubi2vvXWcVxyr9rmlshZni72pXeY= +go.opentelemetry.io/otel/sdk/metric v1.36.0 h1:r0ntwwGosWGaa0CrSt8cuNuTcccMXERFwHX4dThiPis= +go.opentelemetry.io/otel/sdk/metric v1.36.0/go.mod h1:qTNOhFDfKRwX0yXOqJYegL5WRaW376QbB7P4Pb0qva4= +go.opentelemetry.io/otel/trace v1.36.0 h1:ahxWNuqZjpdiFAyrIoQ4GIiAIhxAunQR6MUoKrsNd4w= +go.opentelemetry.io/otel/trace v1.36.0/go.mod h1:gQ+OnDZzrybY4k4seLzPAWNwVBBVlF2szhehOBB/tGA= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= @@ -298,8 +298,8 @@ golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACk golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.38.0 h1:jt+WWG8IZlBnVbomuhg2Mdq0+BBQaHbtqHEFEigjUV8= -golang.org/x/crypto v0.38.0/go.mod h1:MvrbAqul58NNYPKnOra203SB9vpuZW0e+RRZV+Ggqjw= +golang.org/x/crypto v0.39.0 h1:SHs+kF4LP+f+p14esP5jAoDpHU8Gu/v9lFRK6IT5imM= +golang.org/x/crypto v0.39.0/go.mod h1:L+Xg3Wf6HoL4Bn4238Z6ft6KfEpN0tJGo53AAPC632U= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 h1:ESSUROHIBHg7USnszlcdmjBEwdMj9VUvU+OPk4yl2mc= golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8/go.mod h1:/lliqkxwWAhPjf5oSOIJup2XcqJaw8RGS6k3TGEc7GI= @@ -309,8 +309,8 @@ golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHl golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= -golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/mod v0.25.0 h1:n7a+ZbQKQA/Ysbyb0/6IbB1H/X41mKgbhfv7AfG/44w= +golang.org/x/mod v0.25.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -321,8 +321,8 @@ golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLL golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.40.0 h1:79Xs7wF06Gbdcg4kdCCIQArK11Z1hr5POQ6+fIYHNuY= -golang.org/x/net v0.40.0/go.mod h1:y0hY0exeL2Pku80/zKK7tpntoX23cqL3Oa6njdgRtds= +golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw= +golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI= golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU= @@ -332,8 +332,8 @@ golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.14.0 h1:woo0S4Yywslg6hp4eUFjTVOyKt0RookbpAHG4c1HmhQ= -golang.org/x/sync v0.14.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8= +golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -362,10 +362,10 @@ golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= -golang.org/x/text v0.25.0 h1:qVyWApTSYLk/drJRO5mDlNYskwQznZmkpV2c8q9zls4= -golang.org/x/text v0.25.0/go.mod h1:WEdwpYrmk1qmdHvhkSTNPm3app7v4rsT8F2UD6+VHIA= -golang.org/x/time v0.11.0 h1:/bpjEDfN9tkoN/ryeYHnv5hcMlc8ncjMcM4XBk5NWV0= -golang.org/x/time v0.11.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= +golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M= +golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA= +golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE= +golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= @@ -375,14 +375,14 @@ golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/tools v0.22.0 h1:gqSGLZqv+AI9lIQzniJ0nZDRG5GBPsSi+DRNHWNz6yA= -golang.org/x/tools v0.22.0/go.mod h1:aCwcsjqvq7Yqt6TNyX7QMU2enbQ/Gt0bo6krSeEri+c= +golang.org/x/tools v0.33.0 h1:4qz2S3zmRxbGIhDIAgjxvFutSvH5EfnsYrRBj0UI0bc= +golang.org/x/tools v0.33.0/go.mod h1:CIJMaWEY88juyUfo7UbgPqbC8rU2OqfAV1h2Qp0oMYI= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/api v0.235.0 h1:C3MkpQSRxS1Jy6AkzTGKKrpSCOd2WOGrezZ+icKSkKo= -google.golang.org/api v0.235.0/go.mod h1:QpeJkemzkFKe5VCE/PMv7GsUfn9ZF+u+q1Q7w6ckxTg= +google.golang.org/api v0.237.0 h1:MP7XVsGZesOsx3Q8WVa4sUdbrsTvDSOERd3Vh4xj/wc= +google.golang.org/api v0.237.0/go.mod h1:cOVEm2TpdAGHL2z+UwyS+kmlGr3bVWQQ6sYEqkKje50= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= @@ -394,15 +394,15 @@ google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2 h1:1tXaIXCracvtsRx google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2/go.mod h1:49MsLSx0oWMOZqcpB3uL8ZOkAh1+TndpJ8ONoCBWiZk= google.golang.org/genproto/googleapis/api v0.0.0-20250505200425-f936aa4a68b2 h1:vPV0tzlsK6EzEDHNNH5sa7Hs9bd7iXR7B1tSiPepkV0= google.golang.org/genproto/googleapis/api v0.0.0-20250505200425-f936aa4a68b2/go.mod h1:pKLAc5OolXC3ViWGI62vvC0n10CpwAtRcTNCFwTKBEw= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250512202823-5a2f75b736a9 h1:IkAfh6J/yllPtpYFU0zZN1hUPYdT0ogkBT/9hMxHjvg= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250512202823-5a2f75b736a9/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 h1:fc6jSaCT0vBduLYZHYrBBNY4dsWuvgyff9noRNDdBeE= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= -google.golang.org/grpc v1.72.1 h1:HR03wO6eyZ7lknl75XlxABNVLLFc2PAb6mHlYh756mA= -google.golang.org/grpc v1.72.1/go.mod h1:wH5Aktxcg25y1I3w7H69nHfXdOG3UiadoBtjh3izSDM= +google.golang.org/grpc v1.73.0 h1:VIWSmpI2MegBtTuFt5/JWy2oXxtjJ/e89Z70ImfD2ok= +google.golang.org/grpc v1.73.0/go.mod h1:50sbHOUqWoCQGI8V2HQLJM0B+LMlIUjNSZmow7EVBQc= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= From 52ed15d764d3c00c24155ff1613e75414408f715 Mon Sep 17 00:00:00 2001 From: hao-nan-li <100219545+hao-nan-li@users.noreply.github.com> Date: Tue, 17 Jun 2025 14:24:01 -0700 Subject: [PATCH 383/884] Adjust cloud_asset_resource datasource with universe_domain handling (#14288) --- ...ource_google_cloud_asset_resources_search_all.go.tmpl | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/services/cloudasset/data_source_google_cloud_asset_resources_search_all.go.tmpl b/mmv1/third_party/terraform/services/cloudasset/data_source_google_cloud_asset_resources_search_all.go.tmpl index 03de0caaf356..0335c4b0a469 100644 --- a/mmv1/third_party/terraform/services/cloudasset/data_source_google_cloud_asset_resources_search_all.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudasset/data_source_google_cloud_asset_resources_search_all.go.tmpl @@ -97,7 +97,14 @@ func datasourceGoogleCloudAssetResourcesSearchAllRead(d *schema.ResourceData, me query := d.Get("query").(string) assetTypes := d.Get("asset_types").([]interface{}) - url := fmt.Sprintf("https://cloudasset.googleapis.com/v1p1beta1/%s/resources:searchAll", scope) + universeDomain := config.UniverseDomain + url := "" + if universeDomain != "" && universeDomain != "googleapis.com" { + url = fmt.Sprintf("https://cloudasset.%s/v1p1beta1/%s/resources:searchAll", universeDomain, scope) + } else { + url = fmt.Sprintf("https://cloudasset.googleapis.com/v1p1beta1/%s/resources:searchAll", scope) + } + params["query"] = query url, err = transport_tpg.AddArrayQueryParams(url, "asset_types", assetTypes) From e0c92989345be99ed8f470b28397009476784f7c Mon Sep 17 00:00:00 2001 From: Stephane Charite Date: Tue, 17 Jun 2025 15:48:21 -0700 Subject: [PATCH 384/884] Add lustre instance performance tiers (#14180) --- mmv1/products/lustre/Instance.yaml | 84 ++++++++++--------- .../examples/lustre_instance_basic.tf.tmpl | 15 ++-- .../data_source_lustre_instance_test.go | 16 ++-- .../lustre/resource_lustre_instance_test.go | 38 +++++---- 4 files changed, 82 insertions(+), 71 deletions(-) diff --git a/mmv1/products/lustre/Instance.yaml b/mmv1/products/lustre/Instance.yaml index 0a63e7bbeba3..69e24a432e36 100644 --- a/mmv1/products/lustre/Instance.yaml +++ b/mmv1/products/lustre/Instance.yaml @@ -27,6 +27,7 @@ id_format: projects/{{project}}/locations/{{location}}/instances/{{instance_id}} import_format: - projects/{{project}}/locations/{{location}}/instances/{{instance_id}} examples: + - name: lustre_instance_basic primary_resource_id: 'instance' vars: @@ -63,7 +64,7 @@ parameters: - name: instanceId type: String description: |- - Required. The name of the Managed Lustre instance. + The name of the Managed Lustre instance. * Must contain only lowercase letters, numbers, and hyphens. * Must start with a letter. @@ -73,10 +74,41 @@ parameters: url_param_only: true required: true properties: + - name: capacityGib + type: String + description: |- + The storage capacity of the instance in gibibytes (GiB). Allowed values + are from `18000` to `954000`, in increments of 9000. + required: true + - name: updateTime + type: String + description: Timestamp when the instance was last updated. + output: true + - name: gkeSupportEnabled + type: Boolean + description: |- + Indicates whether you want to enable support for GKE clients. By default, + GKE clients are not supported. + - name: filesystem + type: String + description: |- + The filesystem name for this instance. This name is used by client-side + tools, including when mounting the instance. Must be eight characters or + less and can only contain letters and numbers. + immutable: true + required: true + - name: network + type: String + description: |- + The full name of the VPC network to which the instance is connected. + Must be in the format + `projects/{project_id}/global/networks/{network_name}`. + immutable: true + required: true - name: state type: String description: |- - Output only. The state of the instance. + The state of the instance. Possible values: STATE_UNSPECIFIED ACTIVE @@ -88,50 +120,26 @@ properties: output: true - name: mountPoint type: String - description: Output only. Mount point of the instance in the format `IP_ADDRESS@tcp:/FILESYSTEM`. + description: Mount point of the instance in the format `IP_ADDRESS@tcp:/FILESYSTEM`. output: true - - name: labels - type: KeyValueLabels - description: Optional. Labels as key value pairs. - - name: capacityGib - type: String - description: |- - Required. The storage capacity of the instance in gibibytes (GiB). Allowed values - are from 18000 to 954000, in increments of 9000. - required: true - name: createTime type: String - description: Output only. Timestamp when the instance was created. - output: true - - name: updateTime - type: String - description: Output only. Timestamp when the instance was last updated. + description: Timestamp when the instance was created. output: true - name: description type: String - description: Optional. A user-readable description of the instance. - - name: gkeSupportEnabled - type: Boolean + description: A user-readable description of the instance. + - name: labels + type: KeyValueLabels + description: Labels as key value pairs. + - name: perUnitStorageThroughput + type: String description: |- - Optional. Indicates whether you want to enable support for GKE clients. By default, - GKE clients are not supported. + The throughput of the instance in MB/s/TiB. + Valid values are 125, 250, 500, 1000. + required: true + immutable: true - name: name type: String description: Identifier. The name of the instance. output: true - - name: filesystem - type: String - description: |- - Required. Immutable. The filesystem name for this instance. This name is used by client-side - tools, including when mounting the instance. Must be 8 characters or less - and may only contain letters and numbers. - immutable: true - required: true - - name: network - type: String - description: |- - Required. Immutable. The full name of the VPC network to which the instance is connected. - Must be in the format - `projects/{project_id}/global/networks/{network_name}`. - immutable: true - required: true diff --git a/mmv1/templates/terraform/examples/lustre_instance_basic.tf.tmpl b/mmv1/templates/terraform/examples/lustre_instance_basic.tf.tmpl index 3381a3b7a57b..d9b68469417c 100644 --- a/mmv1/templates/terraform/examples/lustre_instance_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/lustre_instance_basic.tf.tmpl @@ -1,11 +1,12 @@ resource "google_lustre_instance" "{{$.PrimaryResourceId}}" { - instance_id = "{{index $.Vars "name"}}" - location = "us-central1-a" - description = "test lustre instance" - filesystem = "testfs" - capacity_gib = 18000 - network = data.google_compute_network.lustre-network.id - labels = { + instance_id = "{{index $.Vars "name"}}" + location = "us-central1-a" + description = "test lustre instance" + filesystem = "testfs" + capacity_gib = 18000 + network = data.google_compute_network.lustre-network.id + per_unit_storage_throughput = 1000 + labels = { test = "value" } timeouts { diff --git a/mmv1/third_party/terraform/services/lustre/data_source_lustre_instance_test.go b/mmv1/third_party/terraform/services/lustre/data_source_lustre_instance_test.go index c443c5969ef5..af0256f45088 100644 --- a/mmv1/third_party/terraform/services/lustre/data_source_lustre_instance_test.go +++ b/mmv1/third_party/terraform/services/lustre/data_source_lustre_instance_test.go @@ -37,14 +37,14 @@ func TestAccLustreInstanceDatasource_basic(t *testing.T) { func testAccLustreInstanceDatasource_basic(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_lustre_instance" "instance" { - instance_id = "my-instance-%{random_suffix}" - location = "us-central1-a" - filesystem = "testfs" - capacity_gib = 18000 - network = google_compute_network.producer_net.id - gke_support_enabled = false - - depends_on = [ google_service_networking_connection.service_con ] + instance_id = "my-instance-%{random_suffix}" + location = "us-central1-a" + filesystem = "testfs" + capacity_gib = 18000 + network = google_compute_network.producer_net.id + gke_support_enabled = false + per_unit_storage_throughput = 1000 + depends_on = [ google_service_networking_connection.service_con ] } resource "google_compute_subnetwork" "producer_subnet" { diff --git a/mmv1/third_party/terraform/services/lustre/resource_lustre_instance_test.go b/mmv1/third_party/terraform/services/lustre/resource_lustre_instance_test.go index 433a68a57d68..cb5cbc6a264f 100644 --- a/mmv1/third_party/terraform/services/lustre/resource_lustre_instance_test.go +++ b/mmv1/third_party/terraform/services/lustre/resource_lustre_instance_test.go @@ -54,15 +54,16 @@ func TestAccLustreInstance_update(t *testing.T) { func testAccLustreInstance_full(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_lustre_instance" "instance" { - instance_id = "tf-test-my-instance%{random_suffix}" - location = "us-central1-a" - filesystem = "testfs" - network = data.google_compute_network.lustre-network.id - gke_support_enabled = false - capacity_gib = 18000 - timeouts { - create = "120m" - } + instance_id = "tf-test-my-instance%{random_suffix}" + location = "us-central1-a" + filesystem = "testfs" + network = data.google_compute_network.lustre-network.id + gke_support_enabled = false + capacity_gib = 18000 + per_unit_storage_throughput = 1000 + timeouts { + create = "120m" + } } // This example assumes this network already exists. @@ -82,17 +83,18 @@ data "google_compute_network" "lustre-network" { func testAccLustreInstance_update(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_lustre_instance" "instance" { - instance_id = "tf-test-my-instance%{random_suffix}" - location = "us-central1-a" - filesystem = "testfs" - capacity_gib = 18000 - network = data.google_compute_network.lustre-network.id - description = "test-description" - labels = { + instance_id = "tf-test-my-instance%{random_suffix}" + location = "us-central1-a" + filesystem = "testfs" + capacity_gib = 18000 + network = data.google_compute_network.lustre-network.id + description = "test-description" + per_unit_storage_throughput = 1000 + labels = { test = "test-label" } - timeouts { - create = "120m" + timeouts { + create = "120m" } } From 799d0328ce3b1754b2f49cf04a906f15df89fae6 Mon Sep 17 00:00:00 2001 From: hao-nan-li <100219545+hao-nan-li@users.noreply.github.com> Date: Tue, 17 Jun 2025 17:32:59 -0700 Subject: [PATCH 385/884] Support adc impersonation in different universes (#14290) --- .../terraform/transport/config.go.tmpl | 29 +++++++++++++++++-- 1 file changed, 26 insertions(+), 3 deletions(-) diff --git a/mmv1/third_party/terraform/transport/config.go.tmpl b/mmv1/third_party/terraform/transport/config.go.tmpl index fdc75f177038..5d3a60bee1ac 100644 --- a/mmv1/third_party/terraform/transport/config.go.tmpl +++ b/mmv1/third_party/terraform/transport/config.go.tmpl @@ -1358,11 +1358,34 @@ func (c *Config) GetCredentials(clientScopes []string, initialCredentialsOnly bo var creds *googleoauth.Credentials var err error if c.ImpersonateServiceAccount != "" && !initialCredentialsOnly { - opts := option.ImpersonateCredentials(c.ImpersonateServiceAccount, c.ImpersonateServiceAccountDelegates...) - creds, err = transport.Creds(context.TODO(), opts, option.WithScopes(clientScopes...)) + defaultCreds, err := credentials.DetectDefault(&credentials.DetectOptions{ + Scopes: clientScopes, + }) if err != nil { - return googleoauth.Credentials{}, err + return googleoauth.Credentials{}, fmt.Errorf("error loading credentials: %s", err) + } + + impersonateOpts := &impersonate.CredentialsOptions{ + TargetPrincipal: c.ImpersonateServiceAccount, + Scopes: clientScopes, + Delegates: c.ImpersonateServiceAccountDelegates, + Credentials: defaultCreds, + } + + if c.UniverseDomain != "" && c.UniverseDomain != "googleapis.com" { + impersonateOpts.UniverseDomain = c.UniverseDomain + } + + authCred, err := impersonate.NewCredentials(impersonateOpts) + if err != nil { + return googleoauth.Credentials{}, fmt.Errorf("error loading credentials: %s", err) + } + + creds := oauth2adapt.Oauth2CredentialsFromAuthCredentials(authCred) + if err != nil { + return googleoauth.Credentials{}, fmt.Errorf("error loading credentials: %s", err) } + return *creds, nil } else { log.Printf("[INFO] Authenticating using DefaultClient...") log.Printf("[INFO] -- Scopes: %s", clientScopes) From f728b9c9e5405102689073fed048d8eab11930d7 Mon Sep 17 00:00:00 2001 From: Steven Davidovitz <13248+steved@users.noreply.github.com> Date: Wed, 18 Jun 2025 07:44:49 -0700 Subject: [PATCH 386/884] allow sending propagatedConnectionLimit zero value for ServiceAttachment (#14052) --- mmv1/products/compute/ServiceAttachment.yaml | 12 +++++++- .../compute_service_attachment.go.tmpl | 8 +++++ .../compute_service_attachment.go.tmpl | 7 +++++ ...esource_compute_service_attachment_test.go | 29 +++++++++++++++++-- 4 files changed, 52 insertions(+), 4 deletions(-) create mode 100644 mmv1/templates/terraform/encoders/compute_service_attachment.go.tmpl diff --git a/mmv1/products/compute/ServiceAttachment.yaml b/mmv1/products/compute/ServiceAttachment.yaml index 4813b6d0f765..24c11fef7796 100644 --- a/mmv1/products/compute/ServiceAttachment.yaml +++ b/mmv1/products/compute/ServiceAttachment.yaml @@ -38,6 +38,7 @@ async: custom_code: constants: 'templates/terraform/constants/compute_service_attachment.go.tmpl' update_encoder: 'templates/terraform/update_encoder/compute_service_attachment.go.tmpl' + encoder: 'templates/terraform/encoders/compute_service_attachment.go.tmpl' sweeper: url_substitutions: - region: "us-west2" @@ -264,5 +265,14 @@ properties: If the connection preference of the service attachment is ACCEPT_MANUAL, the limit applies to each project or network that is listed in the consumer accept list. If the connection preference of the service attachment is ACCEPT_AUTOMATIC, the limit applies to each project that contains a connected endpoint. - If unspecified, the default propagated connection limit is 250. + If unspecified, the default propagated connection limit is 250. To explicitly send a zero value, set `send_propagated_connection_limit_if_zero = true`. default_from_api: true +virtual_fields: + - name: 'send_propagated_connection_limit_if_zero' + description: | + Controls the behavior of propagated_connection_limit. + When false, setting propagated_connection_limit to zero causes the provider to use to the API's default value. + When true, the provider will set propagated_connection_limit to zero. + Defaults to false. + type: Boolean + default_value: false diff --git a/mmv1/templates/terraform/encoders/compute_service_attachment.go.tmpl b/mmv1/templates/terraform/encoders/compute_service_attachment.go.tmpl new file mode 100644 index 000000000000..ee3175eb5ede --- /dev/null +++ b/mmv1/templates/terraform/encoders/compute_service_attachment.go.tmpl @@ -0,0 +1,8 @@ +propagatedConnectionLimitProp := d.Get("propagated_connection_limit") +if sv, ok := d.GetOk("send_propagated_connection_limit_if_zero"); ok && sv.(bool) { + if v, ok := d.GetOkExists("propagated_connection_limit"); ok || !reflect.DeepEqual(v, propagatedConnectionLimitProp) { + obj["propagatedConnectionLimit"] = propagatedConnectionLimitProp + } +} + +return obj, nil diff --git a/mmv1/templates/terraform/update_encoder/compute_service_attachment.go.tmpl b/mmv1/templates/terraform/update_encoder/compute_service_attachment.go.tmpl index aca47912a356..867547c9e1d8 100644 --- a/mmv1/templates/terraform/update_encoder/compute_service_attachment.go.tmpl +++ b/mmv1/templates/terraform/update_encoder/compute_service_attachment.go.tmpl @@ -23,4 +23,11 @@ if v, ok := d.GetOkExists("enable_proxy_protocol"); !tpgresource.IsEmptyValue(re obj["enableProxyProtocol"] = enableProxyProtocolProp } +propagatedConnectionLimitProp := d.Get("propagated_connection_limit") +if sv, ok := d.GetOk("send_propagated_connection_limit_if_zero"); ok && sv.(bool) { + if v, ok := d.GetOkExists("propagated_connection_limit"); ok || !reflect.DeepEqual(v, propagatedConnectionLimitProp) { + obj["propagatedConnectionLimit"] = propagatedConnectionLimitProp + } +} + return obj, nil diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_service_attachment_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_service_attachment_test.go index ca1cbca38a37..014eec069952 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_service_attachment_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_service_attachment_test.go @@ -1,9 +1,11 @@ package compute_test import ( + "fmt" "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" "github.com/hashicorp/terraform-provider-google/google/acctest" ) @@ -29,7 +31,7 @@ func TestAccComputeServiceAttachment_serviceAttachmentBasicExampleUpdate(t *test ImportStateVerifyIgnore: []string{"target_service", "region"}, }, { - Config: testAccComputeServiceAttachment_serviceAttachmentBasicExampleUpdate(context, true), + Config: testAccComputeServiceAttachment_serviceAttachmentBasicExampleUpdate(context, true, -1), }, { ResourceName: "google_compute_service_attachment.psc_ilb_service_attachment", @@ -38,7 +40,7 @@ func TestAccComputeServiceAttachment_serviceAttachmentBasicExampleUpdate(t *test ImportStateVerifyIgnore: []string{"target_service", "region"}, }, { - Config: testAccComputeServiceAttachment_serviceAttachmentBasicExampleUpdate(context, false), + Config: testAccComputeServiceAttachment_serviceAttachmentBasicExampleUpdate(context, false, -1), }, { ResourceName: "google_compute_service_attachment.psc_ilb_service_attachment", @@ -46,6 +48,14 @@ func TestAccComputeServiceAttachment_serviceAttachmentBasicExampleUpdate(t *test ImportStateVerify: true, ImportStateVerifyIgnore: []string{"target_service", "region"}, }, + { + Config: testAccComputeServiceAttachment_serviceAttachmentBasicExampleUpdate(context, false, 0), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectNonEmptyPlan(), + }, + }, + }, }, }) } @@ -179,7 +189,7 @@ resource "google_compute_subnetwork" "psc_ilb_nat" { `, context) } -func testAccComputeServiceAttachment_serviceAttachmentBasicExampleUpdate(context map[string]interface{}, preventDestroy bool) string { +func testAccComputeServiceAttachment_serviceAttachmentBasicExampleUpdate(context map[string]interface{}, preventDestroy bool, propagatedConnectionLimit int) string { context["lifecycle_block"] = "" if preventDestroy { context["lifecycle_block"] = ` @@ -188,6 +198,18 @@ func testAccComputeServiceAttachment_serviceAttachmentBasicExampleUpdate(context }` } + switch { + case propagatedConnectionLimit == 0: + context["propagated_connection_limit"] = ` + propagated_connection_limit = 0 + send_propagated_connection_limit_if_zero = true + ` + case propagatedConnectionLimit > 0: + context["propagated_connection_limit"] = fmt.Sprintf("propagated_connection_limit = %d", propagatedConnectionLimit) + default: + context["propagated_connection_limit"] = "" + } + return acctest.Nprintf(` resource "google_compute_service_attachment" "psc_ilb_service_attachment" { name = "tf-test-my-psc-ilb%{random_suffix}" @@ -206,6 +228,7 @@ resource "google_compute_service_attachment" "psc_ilb_service_attachment" { } reconcile_connections = false %{lifecycle_block} + %{propagated_connection_limit} } resource "google_compute_address" "psc_ilb_consumer_address" { From 4b1b1b6a89694a6f593118b40d14db29b9d5c629 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Wed, 18 Jun 2025 09:22:17 -0700 Subject: [PATCH 387/884] tgc-revival: fix getting primary resource bug (#14296) --- mmv1/third_party/terraform/acctest/tgc_utils.go | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/acctest/tgc_utils.go b/mmv1/third_party/terraform/acctest/tgc_utils.go index 46fdfa762923..0f2f20e60ac5 100644 --- a/mmv1/third_party/terraform/acctest/tgc_utils.go +++ b/mmv1/third_party/terraform/acctest/tgc_utils.go @@ -129,8 +129,7 @@ func determineImportMetadata(steps []resource.TestStep, currentStepIndex int, re nextStep := steps[currentStepIndex+1] // Check if it's an import step for our resource - if nextStep.ImportState && (nextStep.ResourceName == resourceName || - strings.HasSuffix(nextStep.ResourceName, "."+strings.Split(resourceName, ".")[1])) { + if nextStep.ImportState && nextStep.ResourceName == resourceName { // Capture ignored fields if present if nextStep.ImportStateVerify && len(nextStep.ImportStateVerifyIgnore) > 0 { metadata.IgnoredFields = nextStep.ImportStateVerifyIgnore From b131c20591a7e28912ae906d6c9f58574e1ae955 Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Wed, 18 Jun 2025 20:01:48 +0200 Subject: [PATCH 388/884] compute: fixed `google_compute_firewall_policy_rule` staying disabled after apply with `disabled = false` (#14182) --- mmv1/products/compute/FirewallPolicyRule.yaml | 1 + ...ource_compute_firewall_policy_rule_test.go | 73 +++++++++++++++++++ 2 files changed, 74 insertions(+) diff --git a/mmv1/products/compute/FirewallPolicyRule.yaml b/mmv1/products/compute/FirewallPolicyRule.yaml index 4395443f3f18..5196c1d57f63 100644 --- a/mmv1/products/compute/FirewallPolicyRule.yaml +++ b/mmv1/products/compute/FirewallPolicyRule.yaml @@ -276,6 +276,7 @@ properties: type: String - name: 'disabled' type: Boolean + send_empty_value: true description: | Denotes whether the firewall policy rule is disabled. When set to true, the firewall policy rule is not enforced and traffic behaves as if it did not exist. diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_rule_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_rule_test.go index 6699ec98e280..0f825d51d481 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_rule_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_rule_test.go @@ -173,6 +173,43 @@ func TestAccComputeFirewallPolicyRule_basic(t *testing.T) { }) } +func TestAccComputeFirewallPolicyRule_disabled_enabled(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "org_name": fmt.Sprintf("organizations/%s", envvar.GetTestOrgFromEnv(t)), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeFirewallPolicyRule_disabled(context, true), + }, + { + ResourceName: "google_compute_firewall_policy_rule.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"firewall_policy"}, + }, + { + Config: testAccComputeFirewallPolicyRule_disabled(context, false), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_compute_firewall_policy_rule.default", "disabled", "false"), + ), + }, + { + ResourceName: "google_compute_firewall_policy_rule.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"firewall_policy"}, + }, + }, + }) +} + func testAccComputeFirewallPolicyRule_basic(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_folder" "folder" { @@ -778,3 +815,39 @@ resource "google_compute_firewall_policy_rule" "fw_policy_rule3" { } `, context) } + +func testAccComputeFirewallPolicyRule_disabled(context map[string]interface{}, disabled bool) string { + context["disabled"] = fmt.Sprintf("%t", disabled) + return acctest.Nprintf(` +resource "google_folder" "default" { + display_name = "tf-test-folder-%{random_suffix}" + parent = "%{org_name}" + deletion_protection = false +} + +resource "google_compute_firewall_policy" "default" { + parent = google_folder.default.name + short_name = "tf-test-policy-%{random_suffix}" + description = "Resource created for Terraform acceptance testing" +} + +resource "google_compute_firewall_policy_rule" "default" { + firewall_policy = google_compute_firewall_policy.default.name + description = "Resource created for Terraform acceptance testing" + priority = 9000 + enable_logging = true + action = "allow" + direction = "EGRESS" + disabled = %{disabled} + + match { + dest_ip_ranges = ["35.235.240.0/20"] + + layer4_configs { + ip_protocol = "tcp" + ports = [22] + } + } +} +`, context) +} From c0a3bac121592d3091beeda7ec6b7eb4ec2f3c21 Mon Sep 17 00:00:00 2001 From: vmiglani <142545940+vmiglani@users.noreply.github.com> Date: Wed, 18 Jun 2025 11:04:14 -0700 Subject: [PATCH 389/884] [AlloyDB] Support for PSA Allocated IP Range Override (#14247) Co-authored-by: Cameron Thornton --- mmv1/products/alloydb/Instance.yaml | 7 +++ .../alloydb/resource_alloydb_instance_test.go | 63 +++++++++++++++++++ 2 files changed, 70 insertions(+) diff --git a/mmv1/products/alloydb/Instance.yaml b/mmv1/products/alloydb/Instance.yaml index f93d3f089b4e..069500936ff0 100644 --- a/mmv1/products/alloydb/Instance.yaml +++ b/mmv1/products/alloydb/Instance.yaml @@ -443,6 +443,13 @@ properties: type: Boolean description: | Enabling outbound public ip for the instance. + - name: 'allocatedIpRangeOverride' + type: String + immutable: true + description: | + Name of the allocated IP range for the private IP AlloyDB instance, for example: "google-managed-services-default". + If set, the instance IPs will be created from this allocated range and will override the IP range used by the parent cluster. + The range name must comply with RFC 1035. Specifically, the name must be 1-63 characters long and match the regular expression [a-z]([-a-z0-9]*[a-z0-9])?. - name: 'publicIpAddress' type: String description: | diff --git a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go index f34ff12f7d78..72cec88fa08c 100644 --- a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go +++ b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go @@ -1082,3 +1082,66 @@ resource "google_alloydb_cluster" "default" { data "google_project" "project" {} `, context) } + +func TestAccAlloydbInstance_createPrimaryAndReadPoolInstanceWithAllocatedIpRangeOverride(t *testing.T) { + t.Parallel() + + testId := "alloydb-1" + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "address_name": acctest.BootstrapSharedTestGlobalAddress(t, testId), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, testId), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckAlloydbInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccAlloydbInstance_createPrimaryAndReadPoolInstanceWithAllocatedIpRangeOverride(context), + }, + }, + }) +} + +func testAccAlloydbInstance_createPrimaryAndReadPoolInstanceWithAllocatedIpRangeOverride(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_alloydb_instance" "primary" { + cluster = google_alloydb_cluster.default.name + instance_id = "tf-test-alloydb-instance%{random_suffix}" + instance_type = "PRIMARY" +} + +resource "google_alloydb_instance" "read_pool" { + cluster = google_alloydb_cluster.default.name + instance_id = "tf-test-alloydb-instance%{random_suffix}-read" + instance_type = "READ_POOL" + read_pool_config { + node_count = 4 + } + network_config { + allocated_ip_range_override = data.google_compute_global_address.private_ip_alloc.name + } + depends_on = [google_alloydb_instance.primary] +} + +resource "google_alloydb_cluster" "default" { + cluster_id = "tf-test-alloydb-cluster%{random_suffix}" + location = "us-central1" + network_config { + network = data.google_compute_network.default.id + } +} + +data "google_project" "project" {} + +data "google_compute_network" "default" { + name = "%{network_name}" +} + +data "google_compute_global_address" "private_ip_alloc" { + name = "%{address_name}" +} +`, context) +} From 4b0f7cbcaaa72cdfef1a1a2f92e21c9ce2116efe Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Wed, 18 Jun 2025 20:26:22 +0200 Subject: [PATCH 390/884] tags: fixed permadiff for `parent` argument on `google_tags_location_tag_binding` (#14220) --- ...resource_tags_location_tag_binding.go.tmpl | 1 + .../services/tags/resource_tags_test.go | 70 +++++++++++++++++++ 2 files changed, 71 insertions(+) diff --git a/mmv1/third_party/terraform/services/tags/resource_tags_location_tag_binding.go.tmpl b/mmv1/third_party/terraform/services/tags/resource_tags_location_tag_binding.go.tmpl index c729994f45b5..88e4c2f5898a 100644 --- a/mmv1/third_party/terraform/services/tags/resource_tags_location_tag_binding.go.tmpl +++ b/mmv1/third_party/terraform/services/tags/resource_tags_location_tag_binding.go.tmpl @@ -35,6 +35,7 @@ func ResourceTagsLocationTagBinding() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, + DiffSuppressFunc: tpgresource.ProjectNumberDiffSuppress, Description: `The full resource name of the resource the TagValue is bound to. E.g. //cloudresourcemanager.googleapis.com/projects/123`, }, "tag_value": { diff --git a/mmv1/third_party/terraform/services/tags/resource_tags_test.go b/mmv1/third_party/terraform/services/tags/resource_tags_test.go index a81bb3a1fa96..3501e8f3a342 100644 --- a/mmv1/third_party/terraform/services/tags/resource_tags_test.go +++ b/mmv1/third_party/terraform/services/tags/resource_tags_test.go @@ -873,6 +873,76 @@ resource "google_tags_location_tag_binding" "binding" { `, context) } +func TestAccTagsLocationTagBinding_locationTagBindingBasicWithProjectId(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "random": {}, + }, + CheckDestroy: testAccCheckTagsLocationTagBindingDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccTagsLocationTagBinding_locationTagBindingBasicExampleWithProjectId(context), + }, + { + ResourceName: "google_tags_location_tag_binding.binding", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccTagsLocationTagBinding_locationTagBindingBasicExampleWithProjectId(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "project" { +} + +resource "google_tags_tag_key" "key" { + parent = "organizations/${data.google_project.project.org_id}" + short_name = "keyname%{random_suffix}" + description = "For a certain set of resources." +} + +resource "google_tags_tag_value" "value" { + parent = google_tags_tag_key.key.id + short_name = "foo%{random_suffix}" + description = "For foo%{random_suffix} resources." +} + +resource "google_cloud_run_service" "default" { + name = "tf-test-cloudrun-srv%{random_suffix}" + location = "us-central1" + + template { + spec { + containers { + image = "us-docker.pkg.dev/cloudrun/container/hello" + } + } + } + + traffic { + percent = 100 + latest_revision = true + } +} + +resource "google_tags_location_tag_binding" "binding" { + parent = "//run.googleapis.com/projects/${data.google_project.project.project_id}/locations/${google_cloud_run_service.default.location}/services/${google_cloud_run_service.default.name}" + tag_value = google_tags_tag_value.value.id + location = "us-central1" +} +`, context) +} + func testAccCheckTagsLocationTagBindingDestroyProducer(t *testing.T) func(s *terraform.State) error { return func(s *terraform.State) error { for name, rs := range s.RootModule().Resources { From 75bf4f157e22a4a2041d39696fdccee2d41fc18f Mon Sep 17 00:00:00 2001 From: xuebaoZ Date: Wed, 18 Jun 2025 11:49:59 -0700 Subject: [PATCH 391/884] Fix the bug that sql instance can not enable mcp (#14263) --- .../resource_sql_database_instance.go.tmpl | 6 ++-- ...esource_sql_database_instance_test.go.tmpl | 32 +++++++++++++++++++ 2 files changed, 35 insertions(+), 3 deletions(-) diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl index ffbbb540f401..541219048dc8 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl @@ -1453,12 +1453,12 @@ func expandSqlDatabaseInstanceSettings(configured []interface{}, databaseVersion UserLabels: tpgresource.ConvertStringMap(_settings["user_labels"].(map[string]interface{})), BackupConfiguration: expandBackupConfiguration(_settings["backup_configuration"].([]interface{})), DatabaseFlags: expandDatabaseFlags(_settings["database_flags"].(*schema.Set).List()), - ConnectionPoolConfig: expandConnectionPoolConfig(_settings["connection_pool_config"].(*schema.Set).List()), IpConfiguration: expandIpConfiguration(_settings["ip_configuration"].([]interface{}), databaseVersion), LocationPreference: expandLocationPreference(_settings["location_preference"].([]interface{})), MaintenanceWindow: expandMaintenanceWindow(_settings["maintenance_window"].([]interface{})), InsightsConfig: expandInsightsConfig(_settings["insights_config"].([]interface{})), PasswordValidationPolicy: expandPasswordValidationPolicy(_settings["password_validation_policy"].([]interface{})), + ConnectionPoolConfig: expandConnectionPoolConfig(_settings["connection_pool_config"].(*schema.Set).List()), } resize := _settings["disk_autoresize"].(bool) @@ -2192,8 +2192,8 @@ func resourceSqlDatabaseInstanceUpdate(d *schema.ResourceData, meta interface{}) } // Database Version is required for all calls with Google ML integration enabled or it will be rejected by the API. - if d.Get("settings.0.enable_google_ml_integration").(bool) { - instance.DatabaseVersion = databaseVersion + if d.Get("settings.0.enable_google_ml_integration").(bool) || len(_settings["connection_pool_config"].(*schema.Set).List()) > 0 { + instance.DatabaseVersion = databaseVersion } failoverDrReplicaName := d.Get("replication_cluster.0.failover_dr_replica_name").(string) diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl index 4e11fff17e6a..e7656525a336 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl @@ -897,6 +897,38 @@ func TestAccSqlDatabaseInstance_withoutMCPEnabled(t *testing.T) { }) } +func TestAccSqlDatabaseInstance_updateMCPEnabled(t *testing.T) { + t.Parallel() + + instanceName := "tf-test-" + acctest.RandString(t, 10) + resourceName := "google_sql_database_instance.instance" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccSqlDatabaseInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccSqlDatabaseInstance_withoutMCPEnabled(instanceName), + }, + { + Config: testAccSqlDatabaseInstance_withMCPEnabled(instanceName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "settings.0.connection_pool_config.0.connection_pooling_enabled", "true"), + resource.TestCheckResourceAttr(resourceName, "settings.0.connection_pool_config.0.flags.#", "1"), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + }, + }) +} + + func TestAccSqlDatabaseInstance_withPSCEnabled_withoutAllowedConsumerProjects(t *testing.T) { t.Parallel() From f5a781fb120f33a10e8cd50728d86133afb36d72 Mon Sep 17 00:00:00 2001 From: Arnav Dham Date: Thu, 19 Jun 2025 01:29:56 +0530 Subject: [PATCH 392/884] Add Pub/Sub Topic Source Support to Analytics Hub Listing (#14302) --- .../bigqueryanalyticshub/Listing.yaml | 36 ++++++++++- ...gquery_analyticshub_listing_pubsub.tf.tmpl | 26 ++++++++ .../bigqueryanalyticshub_listing.go.tmpl | 20 ++++++ ...rce_bigquery_analytics_hub_listing_test.go | 62 +++++++++++++++++++ 4 files changed, 143 insertions(+), 1 deletion(-) create mode 100644 mmv1/templates/terraform/examples/bigquery_analyticshub_listing_pubsub.tf.tmpl create mode 100644 mmv1/templates/terraform/pre_update/bigqueryanalyticshub_listing.go.tmpl diff --git a/mmv1/products/bigqueryanalyticshub/Listing.yaml b/mmv1/products/bigqueryanalyticshub/Listing.yaml index 29392b676f6e..5a29b6597626 100644 --- a/mmv1/products/bigqueryanalyticshub/Listing.yaml +++ b/mmv1/products/bigqueryanalyticshub/Listing.yaml @@ -40,6 +40,7 @@ iam_policy: - 'projects/{{project}}/locations/{{location}}/dataExchanges/{{data_exchange_id}}/listings/{{listing_id}}' - '{{listing_id}}' custom_code: + pre_update: 'templates/terraform/pre_update/bigqueryanalyticshub_listing.go.tmpl' # Skipping the sweeper due to the non-standard base_url exclude_sweeper: true examples: @@ -76,6 +77,15 @@ examples: listing_id: 'tf_test_log_email_listing' dataset_id: 'tf_test_log_email_ds' description: 'Example for log email test' + - name: 'bigquery_analyticshub_listing_pubsub' + primary_resource_id: 'listing' + primary_resource_name: 'fmt.Sprintf("tf_test_pubsub_de%s", context["random_suffix"]),fmt.Sprintf("tf_test_listing%s", context["random_suffix"])' + region_override: 'US' + vars: + data_exchange_id: 'tf_test_pubsub_data_exchange' + listing_id: 'tf_test_pubsub_listing' + pubsub_topic_name: 'test_pubsub' + description: 'Example for pubsub topic source' parameters: properties: - name: 'name' @@ -162,8 +172,10 @@ properties: - name: 'bigqueryDataset' type: NestedObject description: Shared dataset i.e. BigQuery dataset source. - required: true immutable: true + exactly_one_of: + - 'pubsubTopic' + - 'bigqueryDataset' properties: - name: 'dataset' type: String @@ -186,6 +198,28 @@ properties: Format: For table: projects/{projectId}/datasets/{datasetId}/tables/{tableId} Example:"projects/test_project/datasets/test_dataset/tables/test_table" immutable: true diff_suppress_func: 'tpgresource.ProjectNumberDiffSuppress' + - name: 'pubsubTopic' + type: NestedObject + description: Pub/Sub topic source. + exactly_one_of: + - 'pubsubTopic' + - 'bigqueryDataset' + properties: + - name: 'topic' + type: String + description: |- + Resource name of the Pub/Sub topic source for this listing. e.g. projects/myproject/topics/topicId + required: true + immutable: true + diff_suppress_func: 'tpgresource.ProjectNumberDiffSuppress' + - name: 'dataAffinityRegions' + type: Array + is_set: true + description: |- + Region hint on where the data might be published. Data affinity regions are modifiable. + See https://cloud.google.com/about/locations for full listing of possible Cloud regions. + item_type: + type: String - name: 'restrictedExportConfig' type: NestedObject description: If set, restricted export configuration will be propagated and enforced on the linked dataset. diff --git a/mmv1/templates/terraform/examples/bigquery_analyticshub_listing_pubsub.tf.tmpl b/mmv1/templates/terraform/examples/bigquery_analyticshub_listing_pubsub.tf.tmpl new file mode 100644 index 000000000000..fd51f87eb84f --- /dev/null +++ b/mmv1/templates/terraform/examples/bigquery_analyticshub_listing_pubsub.tf.tmpl @@ -0,0 +1,26 @@ +resource "google_bigquery_analytics_hub_data_exchange" "{{$.PrimaryResourceId}}" { + location = "US" + data_exchange_id = "{{index $.Vars "data_exchange_id"}}" + display_name = "{{index $.Vars "data_exchange_id"}}" + description = "{{index $.Vars "description"}}" +} + +resource "google_pubsub_topic" "tf_test_pubsub_topic" { + name = "{{index $.Vars "pubsub_topic_name"}}" +} + +resource "google_bigquery_analytics_hub_listing" "{{$.PrimaryResourceId}}" { + location = "US" + data_exchange_id = google_bigquery_analytics_hub_data_exchange.{{$.PrimaryResourceId}}.data_exchange_id + listing_id = "{{index $.Vars "listing_id"}}" + display_name = "{{index $.Vars "listing_id"}}" + description = "{{index $.Vars "description"}}" + + pubsub_topic { + topic = google_pubsub_topic.tf_test_pubsub_topic.id + data_affinity_regions = [ + "us-central1", + "europe-west1" + ] + } +} \ No newline at end of file diff --git a/mmv1/templates/terraform/pre_update/bigqueryanalyticshub_listing.go.tmpl b/mmv1/templates/terraform/pre_update/bigqueryanalyticshub_listing.go.tmpl new file mode 100644 index 000000000000..de4b967c57d4 --- /dev/null +++ b/mmv1/templates/terraform/pre_update/bigqueryanalyticshub_listing.go.tmpl @@ -0,0 +1,20 @@ +if d.HasChange("pubsub_topic.0.data_affinity_regions"){ + // Split URL into base and query parts + parts := strings.SplitN(url, "?", 2) + if len(parts) == 2 { + base := parts[0] + query := parts[1] + + query = strings.ReplaceAll(query, "%2C", ",") + query = strings.ReplaceAll(query, "%2c", ",") + + // Replace "pubsubTopic" with "pubsubTopic.dataAffinityRegions" + query = strings.ReplaceAll(query, "pubsubTopic", "pubsubTopic.dataAffinityRegions") + + // Re-encode commas back + query = strings.ReplaceAll(query, ",", "%2C") + + url = base + "?" + query + } +} + diff --git a/mmv1/third_party/terraform/services/bigqueryanalyticshub/resource_bigquery_analytics_hub_listing_test.go b/mmv1/third_party/terraform/services/bigqueryanalyticshub/resource_bigquery_analytics_hub_listing_test.go index 095bd50a0476..7f2a686fd797 100644 --- a/mmv1/third_party/terraform/services/bigqueryanalyticshub/resource_bigquery_analytics_hub_listing_test.go +++ b/mmv1/third_party/terraform/services/bigqueryanalyticshub/resource_bigquery_analytics_hub_listing_test.go @@ -30,6 +30,26 @@ func TestAccBigqueryAnalyticsHubListing_bigqueryAnalyticshubListingUpdate(t *tes { Config: testAccBigqueryAnalyticsHubListing_bigqueryAnalyticshubListingUpdate(context), }, + { + ResourceName: "google_bigquery_analytics_hub_listing.listing", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccBigqueryAnalyticsHubListing_pubsubListingUpdateConfig(context, `["us-central1"]`, "Example for pubsub topic source - initial"), + Check: resource.ComposeTestCheckFunc( + // Verify initial state for Pub/Sub listing + resource.TestCheckResourceAttr("google_bigquery_analytics_hub_listing.listing_pubsub", "pubsub_topic.0.data_affinity_regions.#", "1"), + resource.TestCheckResourceAttr("google_bigquery_analytics_hub_listing.listing_pubsub", "pubsub_topic.0.data_affinity_regions.0", "us-central1"), + resource.TestCheckResourceAttr("google_bigquery_analytics_hub_listing.listing_pubsub", "description", "Example for pubsub topic source - initial"), + ), + }, + // Step 7: Import the updated Pub/Sub Topic listing to verify import after update. + { + ResourceName: "google_bigquery_analytics_hub_listing.listing_pubsub", + ImportState: true, + ImportStateVerify: true, + }, }, }) } @@ -63,3 +83,45 @@ resource "google_bigquery_dataset" "listing" { } `, context) } +func testAccBigqueryAnalyticsHubListing_pubsubListingUpdateConfig(context map[string]interface{}, dataAffinityRegionsHCL string, description string) string { + // Create a mutable copy of the context map + updatedContext := make(map[string]interface{}) + for k, v := range context { + updatedContext[k] = v + } + + // Directly assign the HCL string for data_affinity_regions and the description. + // dataAffinityRegionsHCL will be something like `["us-central1"]` or `["us-central1", "europe-west1"]` + updatedContext["data_affinity_regions_hcl"] = dataAffinityRegionsHCL + updatedContext["description_hcl"] = description + + return acctest.Nprintf(` +# Separate Data Exchange for the Pub/Sub listing to prevent conflicts +resource "google_bigquery_analytics_hub_data_exchange" "listing_pubsub" { + location = "US" + data_exchange_id = "tf_test_pubsub_data_exchange_update_%{random_suffix}" + display_name = "tf_test_pubsub_data_exchange_update_%{random_suffix}" + description = "Example for pubsub topic source - data exchange%{random_suffix}" +} + +# Pub/Sub Topic used as the source for the listing +resource "google_pubsub_topic" "tf_test_pubsub_topic" { + name = "tf_test_test_pubsub_update_%{random_suffix}" +} + +# BigQuery Analytics Hub Listing sourced from the Pub/Sub Topic +resource "google_bigquery_analytics_hub_listing" "listing_pubsub" { + location = "US" + data_exchange_id = google_bigquery_analytics_hub_data_exchange.listing_pubsub.data_exchange_id + listing_id = "tf_test_pubsub_listing_update_%{random_suffix}" + display_name = "tf_test_pubsub_listing_update_%{random_suffix}" + description = "%{description_hcl}" + primary_contact = "test_pubsub_contact@example.com" + + pubsub_topic { + topic = google_pubsub_topic.tf_test_pubsub_topic.id + data_affinity_regions = %{data_affinity_regions_hcl} + } +} +`, updatedContext) +} From 7289315b4ada4dfe3aac2e6d438ebdd2a3019731 Mon Sep 17 00:00:00 2001 From: zhihaos Date: Wed, 18 Jun 2025 16:06:49 -0400 Subject: [PATCH 393/884] Adding KMS CMEK support for Dialogflow (#14206) --- mmv1/products/dialogflow/EncryptionSpec.yaml | 78 +++++++++++++++++ .../dialogflow_encryption_spec_basic.tf.tmpl | 73 ++++++++++++++++ .../dialogflow_set_endpoint.go.tmpl | 6 ++ .../dialogflow/dialogflow_operation.go | 87 +++++++++++++++++++ 4 files changed, 244 insertions(+) create mode 100644 mmv1/products/dialogflow/EncryptionSpec.yaml create mode 100644 mmv1/templates/terraform/examples/dialogflow_encryption_spec_basic.tf.tmpl create mode 100644 mmv1/templates/terraform/pre_create/dialogflow_set_endpoint.go.tmpl create mode 100644 mmv1/third_party/terraform/services/dialogflow/dialogflow_operation.go diff --git a/mmv1/products/dialogflow/EncryptionSpec.yaml b/mmv1/products/dialogflow/EncryptionSpec.yaml new file mode 100644 index 000000000000..cc9a8fda9cfc --- /dev/null +++ b/mmv1/products/dialogflow/EncryptionSpec.yaml @@ -0,0 +1,78 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'EncryptionSpec' +description: | + Initializes a location-level encryption key specification. +references: + guides: + 'Official ES Documentation': 'https://cloud.google.com/dialogflow/es/docs' + 'Official CX Documentation': 'https://cloud.google.com/dialogflow/cx/docs' + api: 'https://cloud.google.com/dialogflow/es/docs/reference/rest/v2/projects.locations.encryptionSpec' + +base_url: 'projects/{{project}}/locations/{{location}}/encryptionSpec' + +create_url: 'projects/{{project}}/locations/{{location}}/encryptionSpec:initialize' + +exclude_read: true +exclude_delete: true +exclude_import: true +exclude_sweeper: true + +immutable: true + +custom_code: + pre_create: 'templates/terraform/pre_create/dialogflow_set_endpoint.go.tmpl' + +async: + actions: ['create'] + operation: + base_url: '{{op_id}}' + +parameters: + - name: 'location' + type: String + required: true + ignore_read: true + description: | + The location in which the encryptionSpec is to be initialized. + +properties: + - name: 'encryptionSpec' + type: NestedObject + required: true + properties: + - name: 'kmsKey' + type: String + required: true + description: | + The name of customer-managed encryption key that is used to secure a resource and its sub-resources. + If empty, the resource is secured by the default Google encryption key. + Only the key in the same location as this resource is allowed to be used for encryption. + Format: projects/{project}/locations/{location}/keyRings/{keyRing}/cryptoKeys/{key} + +examples: + - name: "dialogflow_encryption_spec_basic" + primary_resource_id: "my-encryption-spec" + min_version: "beta" + exclude_import_test: true + vars: + project_id: 'my-proj' + kms_keyring: 'my-keyring' + kms_key: 'my-key' + test_env_vars: + org_id: 'ORG_ID' + billing_acct: 'BILLING_ACCT' + external_providers: + - "time" diff --git a/mmv1/templates/terraform/examples/dialogflow_encryption_spec_basic.tf.tmpl b/mmv1/templates/terraform/examples/dialogflow_encryption_spec_basic.tf.tmpl new file mode 100644 index 000000000000..c01afead0cf3 --- /dev/null +++ b/mmv1/templates/terraform/examples/dialogflow_encryption_spec_basic.tf.tmpl @@ -0,0 +1,73 @@ +resource "google_project" "project" { + provider = google-beta + project_id = "{{index $.Vars "project_id"}}" + name = "{{index $.Vars "project_id"}}" + org_id = "{{index $.TestEnvVars "org_id"}}" + billing_account = "{{index $.TestEnvVars "billing_acct"}}" + deletion_policy = "DELETE" +} + +resource "google_project_service" "cloudkms" { + provider = google-beta + project = google_project.project.project_id + service = "cloudkms.googleapis.com" +} + +resource "google_project_service" "dialogflow" { + provider = google-beta + project = google_project.project.project_id + service = "dialogflow.googleapis.com" +} + +resource "time_sleep" "wait_enable_service_api" { + depends_on = [ + google_project_service.cloudkms, + google_project_service.dialogflow + ] + create_duration = "30s" +} + +resource "google_project_service_identity" "gcp_sa" { + provider = google-beta + service = "dialogflow.googleapis.com" + project = google_project.project.project_id + depends_on = [time_sleep.wait_enable_service_api] +} + +resource "time_sleep" "wait_create_sa" { + depends_on = [google_project_service_identity.gcp_sa] + create_duration = "30s" +} + +resource "google_kms_key_ring" "keyring" { + provider = google-beta + name = "{{index $.Vars "kms_keyring"}}" + location = "us-central1" + project = google_project.project.project_id + depends_on = [time_sleep.wait_enable_service_api] +} + +resource "google_kms_crypto_key" "key" { + provider = google-beta + name = "{{index $.Vars "kms_key"}}" + key_ring = google_kms_key_ring.keyring.id + purpose = "ENCRYPT_DECRYPT" +} + +resource "google_kms_crypto_key_iam_member" "crypto_key" { + provider = google-beta + crypto_key_id = google_kms_crypto_key.key.id + member = "${replace(google_project_service_identity.gcp_sa.member, "@gcp-sa-dialogflow.iam", "@gcp-sa-ccai-cmek.iam")}" + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + depends_on = [time_sleep.wait_create_sa] +} + +resource "google_dialogflow_encryption_spec" "{{$.PrimaryResourceId}}" { + provider = google-beta + project = google_project.project.project_id + location = "us-central1" + encryption_spec { + kms_key = google_kms_crypto_key.key.id + } + depends_on = [google_kms_crypto_key_iam_member.crypto_key] +} diff --git a/mmv1/templates/terraform/pre_create/dialogflow_set_endpoint.go.tmpl b/mmv1/templates/terraform/pre_create/dialogflow_set_endpoint.go.tmpl new file mode 100644 index 000000000000..87dc02e5c6a1 --- /dev/null +++ b/mmv1/templates/terraform/pre_create/dialogflow_set_endpoint.go.tmpl @@ -0,0 +1,6 @@ +location := d.Get("location").(string) + +// insert location into url for a different endpoint. +if strings.HasPrefix(url, "https://dialogflow.googleapis.com/v2/") { + url = strings.Replace(url, "https://dialogflow", fmt.Sprintf("https://%s-dialogflow", location), 1) +} diff --git a/mmv1/third_party/terraform/services/dialogflow/dialogflow_operation.go b/mmv1/third_party/terraform/services/dialogflow/dialogflow_operation.go new file mode 100644 index 000000000000..51c9efba9c6e --- /dev/null +++ b/mmv1/third_party/terraform/services/dialogflow/dialogflow_operation.go @@ -0,0 +1,87 @@ +package dialogflow + +import ( + "encoding/json" + "errors" + "fmt" + "regexp" + "time" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +type DialogflowOperationWaiter struct { + Config *transport_tpg.Config + UserAgent string + Project string + tpgresource.CommonOperationWaiter +} + +func (w *DialogflowOperationWaiter) QueryOp() (interface{}, error) { + if w == nil { + return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") + } + // Returns the proper get. + location := "" + if parts := regexp.MustCompile(`locations\/([^\/]*)\/`).FindStringSubmatch(w.CommonOperationWaiter.Op.Name); parts != nil { + location = parts[1] + } else { + return nil, fmt.Errorf( + "Saw %s when the op name is expected to contains location %s", + w.CommonOperationWaiter.Op.Name, + "projects/{{project}}/locations/{{location}}/...", + ) + } + + url := fmt.Sprintf("https://%s-dialogflow.googleapis.com/v2/%s", location, w.CommonOperationWaiter.Op.Name) + + return transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: w.Config, + Method: "GET", + Project: w.Project, + RawURL: url, + UserAgent: w.UserAgent, + }) +} + +func createDialogflowWaiter(config *transport_tpg.Config, op map[string]interface{}, project, activity, userAgent string) (*DialogflowOperationWaiter, error) { + w := &DialogflowOperationWaiter{ + Config: config, + UserAgent: userAgent, + Project: project, + } + if err := w.CommonOperationWaiter.SetOp(op); err != nil { + return nil, err + } + return w, nil +} + +// nolint: deadcode,unused +func DialogflowOperationWaitTimeWithResponse(config *transport_tpg.Config, op map[string]interface{}, response *map[string]interface{}, project, activity, userAgent string, timeout time.Duration) error { + w, err := createDialogflowWaiter(config, op, project, activity, userAgent) + if err != nil { + return err + } + if err := tpgresource.OperationWait(w, activity, timeout, config.PollInterval); err != nil { + return err + } + rawResponse := []byte(w.CommonOperationWaiter.Op.Response) + if len(rawResponse) == 0 { + return errors.New("`resource` not set in operation response") + } + return json.Unmarshal(rawResponse, response) +} + +func DialogflowOperationWaitTime(config *transport_tpg.Config, op map[string]interface{}, project, activity, userAgent string, timeout time.Duration) error { + if val, ok := op["name"]; !ok || val == "" { + // This was a synchronous call - there is no operation to wait for. + return nil + } + w, err := createDialogflowWaiter(config, op, project, activity, userAgent) + if err != nil { + // If w is nil, the op was synchronous. + return err + } + return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) +} From f90739d09c8c06bd63a491c5bd930584d6e3c7ff Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Wed, 18 Jun 2025 23:16:52 +0200 Subject: [PATCH 394/884] container: added `release_channel_upgrade_target_version` to `google_container_engine_versions` data source (#14287) --- .../data_source_google_container_engine_versions.go | 10 ++++++++++ ...ata_source_google_container_engine_versions_test.go | 5 +++++ .../docs/d/container_engine_versions.html.markdown | 1 + 3 files changed, 16 insertions(+) diff --git a/mmv1/third_party/terraform/services/container/data_source_google_container_engine_versions.go b/mmv1/third_party/terraform/services/container/data_source_google_container_engine_versions.go index 8f4006428741..551eebd8a4e0 100644 --- a/mmv1/third_party/terraform/services/container/data_source_google_container_engine_versions.go +++ b/mmv1/third_party/terraform/services/container/data_source_google_container_engine_versions.go @@ -58,6 +58,11 @@ func DataSourceGoogleContainerEngineVersions() *schema.Resource { Computed: true, Elem: &schema.Schema{Type: schema.TypeString}, }, + "release_channel_upgrade_target_version": { + Type: schema.TypeMap, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeString}, + }, }, } } @@ -126,8 +131,10 @@ func dataSourceGoogleContainerEngineVersionsRead(d *schema.ResourceData, meta in releaseChannelDefaultVersion := map[string]string{} releaseChannelLatestVersion := map[string]string{} + releaseChannelUpgradeTargetVersion := map[string]string{} for _, channelResp := range resp.Channels { releaseChannelDefaultVersion[channelResp.Channel] = channelResp.DefaultVersion + releaseChannelUpgradeTargetVersion[channelResp.Channel] = channelResp.UpgradeTargetVersion for _, v := range channelResp.ValidVersions { if strings.HasPrefix(v, d.Get("version_prefix").(string)) { releaseChannelLatestVersion[channelResp.Channel] = v @@ -142,6 +149,9 @@ func dataSourceGoogleContainerEngineVersionsRead(d *schema.ResourceData, meta in if err := d.Set("release_channel_latest_version", releaseChannelLatestVersion); err != nil { return fmt.Errorf("Error setting release_channel_latest_version: %s", err) } + if err := d.Set("release_channel_upgrade_target_version", releaseChannelUpgradeTargetVersion); err != nil { + return fmt.Errorf("Error setting release_channel_upgrade_target_version: %s", err) + } d.SetId(time.Now().UTC().String()) return nil diff --git a/mmv1/third_party/terraform/services/container/data_source_google_container_engine_versions_test.go b/mmv1/third_party/terraform/services/container/data_source_google_container_engine_versions_test.go index a8297604d920..152ead1957cf 100644 --- a/mmv1/third_party/terraform/services/container/data_source_google_container_engine_versions_test.go +++ b/mmv1/third_party/terraform/services/container/data_source_google_container_engine_versions_test.go @@ -122,6 +122,11 @@ func testAccCheckGoogleContainerEngineVersionsMeta(n string) resource.TestCheckF return errors.New("failed to read latest STABLE version") } + _, ok = rs.Primary.Attributes["release_channel_upgrade_target_version.STABLE"] + if !ok { + return errors.New("failed to read latest STABLE version") + } + return nil } } diff --git a/mmv1/third_party/terraform/website/docs/d/container_engine_versions.html.markdown b/mmv1/third_party/terraform/website/docs/d/container_engine_versions.html.markdown index 60d564479d86..e7e6f8e4b952 100644 --- a/mmv1/third_party/terraform/website/docs/d/container_engine_versions.html.markdown +++ b/mmv1/third_party/terraform/website/docs/d/container_engine_versions.html.markdown @@ -72,3 +72,4 @@ The following attributes are exported: * `default_cluster_version` - Version of Kubernetes the service deploys by default. * `release_channel_default_version` - A map from a release channel name to the channel's default version. See the docs on [available release channel names](https://cloud.google.com/kubernetes-engine/docs/reference/rest/v1/projects.locations.clusters#Cluster.Channel_1) for more details. * `release_channel_latest_version` - A map from a release channel name to the channel's latest version. See the docs on [available release channel names](https://cloud.google.com/kubernetes-engine/docs/reference/rest/v1/projects.locations.clusters#Cluster.Channel_1) for more details. +* `release_channel_upgrade_target_version` - A map from a release channel name to the channel's auto upgrade target version. See the docs on [available release channel names](https://cloud.google.com/kubernetes-engine/docs/reference/rest/v1/projects.locations.clusters#Cluster.Channel_1) for more details. From e01f6c66ac027c446ec64f684e071b0f7d94a7ab Mon Sep 17 00:00:00 2001 From: Hoang Pham Date: Wed, 18 Jun 2025 17:26:34 -0400 Subject: [PATCH 395/884] feat(bigtable): support row key schema for table. (#13614) --- .../bigtable/resource_bigtable_table.go | 60 +++++++ .../resource_bigtable_table_meta.yaml | 1 + .../bigtable/resource_bigtable_table_test.go | 149 ++++++++++++++++++ 3 files changed, 210 insertions(+) diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table.go index afbaede5ed3a..a426976f3dae 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table.go @@ -156,6 +156,17 @@ func ResourceBigtableTable() *schema.Resource { }, Description: `Defines an automated backup policy for a table, specified by Retention Period and Frequency. To _create_ a table with automated backup disabled, either omit the automated_backup_policy argument, or set both Retention Period and Frequency properties to "0". To disable automated backup on an _existing_ table that has automated backup enabled, set _both_ Retention Period and Frequency properties to "0". When updating an existing table, to modify the Retention Period or Frequency properties of the resource's automated backup policy, set the respective property to a non-zero value. If the automated_backup_policy argument is not provided in the configuration on update, the resource's automated backup policy will _not_ be modified.`, }, + "row_key_schema": { + Type: schema.TypeString, + Optional: true, + DiffSuppressFunc: typeDiffFunc, + Description: `Defines the row key schema of a table. To create or update a table with a row key schema, specify this argument. + Note that in-place update is not supported, and any in-place modification to the schema will lead to failure. + To update a schema, please clear it (by omitting the field), and update the resource again with a new schema.\n + + The schema must be a valid JSON encoded string representing a Type's struct protobuf message. Note that for bytes sequence (like delimited_bytes.delimiter) + the delimiter must be base64 encoded. For example, if you want to set a delimiter to a single byte character "#", it should be set to "Iw==", which is the base64 encoding of the byte sequence "#".`, + }, }, UseJSONNumber: true, } @@ -307,6 +318,15 @@ func resourceBigtableTableCreate(d *schema.ResourceData, meta interface{}) error } tblConf.ColumnFamilies = columnFamilies + // Set the row key schema if given + if rks, ok := d.GetOk("row_key_schema"); ok { + parsedSchema, err := getRowKeySchema(rks) + if err != nil { + return err + } + tblConf.RowKeySchema = parsedSchema + } + // This method may return before the table's creation is complete - we may need to wait until // it exists in the future. // Set a longer timeout as creating table and adding column families can be pretty slow. @@ -409,6 +429,17 @@ func resourceBigtableTableRead(d *schema.ResourceData, meta interface{}) error { } } + if table.RowKeySchema != nil { + marshalledRowKey, err := bigtable.MarshalJSON(*table.RowKeySchema) + if err != nil { + return err + } + d.Set("row_key_schema", string(marshalledRowKey)) + } else { + // String value is default to empty string, so need to set it to nil to specify that the row key schema is not set. + d.Set("row_key_schema", nil) + } + return nil } @@ -579,6 +610,23 @@ func resourceBigtableTableUpdate(d *schema.ResourceData, meta interface{}) error } } + if d.HasChange("row_key_schema") { + changedRks := d.Get("row_key_schema").(string) + if len(changedRks) == 0 { + if err := c.UpdateTableRemoveRowKeySchema(ctxWithTimeout, name); err != nil { + return fmt.Errorf("error removing row key schema on table %v: %v", name, err) + } + } else { + rks, err := getRowKeySchema(changedRks) + if err != nil { + return fmt.Errorf("failed to parse row key schema string %v: %v", changedRks, err) + } + if err = c.UpdateTableWithRowKeySchema(ctxWithTimeout, name, *rks); err != nil { + return fmt.Errorf("failed to update row key schema for table %v: %v", name, err) + } + } + } + return resourceBigtableTableRead(d, meta) } @@ -689,3 +737,15 @@ func getType(input interface{}) (bigtable.Type, error) { } return output, nil } + +func getRowKeySchema(input interface{}) (*bigtable.StructType, error) { + rks, err := getType(input) + if err != nil { + return nil, err + } + structRks, ok := rks.(bigtable.StructType) + if !ok { + return nil, fmt.Errorf("only struct type is accepted as row key schema") + } + return &structRks, nil +} diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_meta.yaml b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_meta.yaml index 18341ecb1a02..0cd79f2f2123 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_meta.yaml +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_meta.yaml @@ -13,4 +13,5 @@ fields: - field: 'instance_name' - field: 'name' - field: 'project' + - field: 'row_key_schema' - field: 'split_keys' diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_test.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_test.go index 20f8f4a66af8..2cdb5930380e 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_test.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_test.go @@ -147,6 +147,97 @@ func TestAccBigtableTable_familyType(t *testing.T) { }) } +func TestAccBigtableTable_testTableWithRowKeySchema(t *testing.T) { + // bigtable instance does not use the shared HTTP client, this test creates an instance + acctest.SkipIfVcr(t) + t.Parallel() + + instanceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + tableName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + family := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckBigtableTableDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccBigtableTable_rowKeySchema(instanceName, tableName, family, `{ + "structType": { + "fields": [{ + "fieldName": "myfield", + "type": { + "stringType": { "encoding": { "utf8Bytes": { } } } + } + }], + "encoding": { "orderedCodeBytes": { } } + } + }`), + Check: resource.ComposeTestCheckFunc( + testAccBigtableRowKeySchemaExists(t, "google_bigtable_table.table", true), + ), + }, + { + ResourceName: "google_bigtable_table.table", + ImportState: true, + ImportStateVerify: true, + }, + { + // In-place modification is not accepted + Config: testAccBigtableTable_rowKeySchema(instanceName, tableName, family, `{ + "structType": { + "fields": [{ + "fieldName": "newfieldname", + "type": { + "stringType": { "encoding": { "utf8Bytes": { } } } + } + }], + "encoding": { "orderedCodeBytes": { } } + } + }`), + ExpectError: regexp.MustCompile(".*Row key schema in-place modification is not allowed.*"), + }, + { + // Removing the schema is ok + Config: testAccBigtableTable_family(instanceName, tableName, family), + Check: resource.ComposeTestCheckFunc( + testAccBigtableRowKeySchemaExists(t, "google_bigtable_table.table", false), + ), + }, + { + ResourceName: "google_bigtable_table.table", + ImportState: true, + ImportStateVerify: true, + }, + // Set the schema to a new one is ok + { + Config: testAccBigtableTable_rowKeySchema(instanceName, tableName, family, `{ + "structType": { + "fields": [ + { + "fieldName": "mystringfield", + "type": { + "stringType": { "encoding": { "utf8Bytes": { } } } + } + }, + { + "fieldName": "myintfield", + "type": { + "int64Type": { "encoding": { "bigEndianBytes": { } } } + } + } + ], + "encoding": { "delimitedBytes": { "delimiter": "Iw==" } } + } + }`), + Check: resource.ComposeTestCheckFunc( + testAccBigtableRowKeySchemaExists(t, "google_bigtable_table.table", true), + ), + }, + }, + }) +} + func TestAccBigtableTable_deletion_protection_protected(t *testing.T) { // bigtable instance does not use the shared HTTP client, this test creates an instance acctest.SkipIfVcr(t) @@ -692,6 +783,35 @@ func testAccBigtableColumnFamilyExists(t *testing.T, table_name_space, family st } } +func testAccBigtableRowKeySchemaExists(t *testing.T, table_name_space string, expected_has_schema bool) resource.TestCheckFunc { + ctx := context.Background() + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[table_name_space] + if !ok { + return fmt.Errorf("Table not found during schema check: %v", table_name_space) + } + + config := acctest.GoogleProviderConfig(t) + c, err := config.BigTableClientFactory(config.UserAgent).NewAdminClient(config.Project, rs.Primary.Attributes["instance_name"]) + if err != nil { + return fmt.Errorf("Error starting admin client %s", err) + } + defer c.Close() + + table, err := c.TableInfo(ctx, rs.Primary.Attributes["name"]) + if err != nil { + return fmt.Errorf("Error retrieving table. Could not find %s in %s", rs.Primary.Attributes["name"], rs.Primary.Attributes["instance_name"]) + } + + actual_has_schema := (table.RowKeySchema != nil) + if actual_has_schema != expected_has_schema { + return fmt.Errorf("expecting table to have row key schema to be %v, got %v", expected_has_schema, actual_has_schema) + } + + return nil + } +} + func testAccBigtableChangeStreamDisabled(t *testing.T) resource.TestCheckFunc { var ctx = context.Background() return func(s *terraform.State) error { @@ -846,6 +966,35 @@ EOF `, instanceName, instanceName, tableName, family, familyType) } +func testAccBigtableTable_rowKeySchema(instanceName, tableName, family, rowKeySchema string) string { + return fmt.Sprintf(` +resource "google_bigtable_instance" "instance" { + name = "%s" + + cluster { + cluster_id = "%s" + zone = "us-central1-b" + } + + instance_type = "DEVELOPMENT" + deletion_protection = false +} + +resource "google_bigtable_table" "table" { + name = "%s" + instance_name = google_bigtable_instance.instance.name + + column_family { + family = "%s" + } + + row_key_schema = < Date: Wed, 18 Jun 2025 15:02:02 -0700 Subject: [PATCH 396/884] Revert combining the docker image split. add unzip && zip (#14308) --- .ci/containers/build-environment/Dockerfile | 2 +- .ci/containers/go-plus/Dockerfile | 32 ++------------------- 2 files changed, 3 insertions(+), 31 deletions(-) diff --git a/.ci/containers/build-environment/Dockerfile b/.ci/containers/build-environment/Dockerfile index 93e0c125c4ab..ab6ad04fdd31 100644 --- a/.ci/containers/build-environment/Dockerfile +++ b/.ci/containers/build-environment/Dockerfile @@ -25,7 +25,7 @@ ENV GO111MODULE "on" ENV LC_ALL=C.UTF-8 ENV LANG=C.UTF-8 -RUN apt-get update && apt-get install -y --no-install-recommends git openssh-client apt-transport-https ca-certificates curl netbase wget gcc make jq libjq1 +RUN apt-get update && apt-get install -y --no-install-recommends git openssh-client apt-transport-https ca-certificates curl netbase wget gcc make jq libjq1 unzip zip RUN git config --global user.name "Modular Magician" RUN git config --global user.email "magic-modules@google.com" diff --git a/.ci/containers/go-plus/Dockerfile b/.ci/containers/go-plus/Dockerfile index cd609168c0f8..b8348b3a61ea 100644 --- a/.ci/containers/go-plus/Dockerfile +++ b/.ci/containers/go-plus/Dockerfile @@ -3,13 +3,6 @@ FROM golang:1.23-bullseye AS builder ENV GOCACHE=/go/cache RUN apt-get update && apt-get install -y unzip - -# Download tpgtools dependencies (from build-env) -WORKDIR /app -ADD "https://raw.githubusercontent.com/GoogleCloudPlatform/magic-modules/main/tpgtools/go.mod" go.mod -ADD "https://raw.githubusercontent.com/GoogleCloudPlatform/magic-modules/main/tpgtools/go.sum" go.sum -RUN go mod download - WORKDIR /app1 # Add the source code and build ADD "https://github.com/GoogleCloudPlatform/magic-modules/archive/refs/heads/main.zip" source.zip @@ -27,21 +20,8 @@ ENV GOCACHE=/go/cache COPY --from=builder /go/pkg/mod /go/pkg/mod COPY --from=builder /go/cache /go/cache -# Add build-env environment variables -ENV GOPATH /go -ENV PATH /usr/local/go/bin:$PATH -ENV PATH $GOPATH/bin:$PATH -ENV GO111MODULE "on" -ENV LC_ALL=C.UTF-8 -ENV LANG=C.UTF-8 - -# Create GOPATH structure (from build-env) -RUN mkdir -p "$GOPATH/src" "$GOPATH/bin" && chmod -R 1777 "$GOPATH" - RUN apt-get update && \ - apt-get install -y git jq unzip parallel curl && \ - # Add build-env packages - apt-get install -y openssh-client apt-transport-https ca-certificates netbase wget gcc make libjq1 && \ + apt-get install -y git jq unzip zip parallel curl && \ echo "deb [signed-by=/usr/share/keyrings/cloud.google.gpg] http://packages.cloud.google.com/apt cloud-sdk main" | tee -a /etc/apt/sources.list.d/google-cloud-sdk.list && \ curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key --keyring /usr/share/keyrings/cloud.google.gpg add - && \ apt-get update -y && \ @@ -49,15 +29,7 @@ RUN apt-get update && \ apt-get clean && \ rm -rf /var/lib/apt/lists/* -# Add git configuration (from build-env) -RUN git config --global user.name "Modular Magician" -RUN git config --global user.email "magic-modules@google.com" - RUN wget https://releases.hashicorp.com/terraform/1.11.0/terraform_1.11.0_linux_amd64.zip \ && unzip terraform_1.11.0_linux_amd64.zip \ && rm terraform_1.11.0_linux_amd64.zip \ - && mv ./terraform /bin/terraform - -# Install Go tools (from build-env) -RUN go install golang.org/x/tools/cmd/goimports@d088b475e3360caabc032aaee1dc66351d4e729a -RUN go install github.com/github/hub@v2.11.2+incompatible \ No newline at end of file + && mv ./terraform /bin/terraform \ No newline at end of file From 33fb506e0db83e6a7185891eb245367d5de95155 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Wed, 18 Jun 2025 15:05:36 -0700 Subject: [PATCH 397/884] tgc-revival: add IncludeInTGCNext field (#14284) --- mmv1/api/resource.go | 9 ++- mmv1/api/type.go | 3 + mmv1/products/compute/Address.yaml | 1 + mmv1/provider/terraform_tgc_next.go | 59 +++++++++++++++---- .../cai2hcl/resource_converters.go.tmpl | 13 +++- .../provider/provider_mmv1_resources.go.tmpl} | 12 +++- .../tfplan2cai/resource_converters.go.tmpl | 13 +++- .../terraform/provider/provider.go.tmpl | 1 + .../tgc_next/test/assert_test_files.go | 32 +++++++--- 9 files changed, 114 insertions(+), 29 deletions(-) rename mmv1/{third_party/tgc_next/pkg/provider/provider_mmv1_resources.go => templates/tgc_next/provider/provider_mmv1_resources.go.tmpl} (67%) diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index 6fc73240fc22..fbd229866694 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -230,6 +230,9 @@ type Resource struct { // (i.e. terraform-provider-conversion) ExcludeTgc bool `yaml:"exclude_tgc,omitempty"` + // If true, include resource in the new package of TGC (terraform-provider-conversion) + IncludeInTGCNext bool `yaml:"include_in_tgc_next_DO_NOT_USE,omitempty"` + // If true, skip sweeper generation for this resource ExcludeSweeper bool `yaml:"exclude_sweeper,omitempty"` @@ -1988,11 +1991,11 @@ func (r Resource) TGCTestIgnorePropertiesToStrings(e resource.Examples) []string for _, tp := range r.AllUserProperties() { if tp.UrlParamOnly { props = append(props, google.Underscore(tp.Name)) + } else if tp.IsMissingInCai { + props = append(props, tp.MetadataLineage()) } } - for _, tp := range e.TGCTestIgnoreExtra { - props = append(props, tp) - } + props = append(props, e.TGCTestIgnoreExtra...) slices.Sort(props) return props diff --git a/mmv1/api/type.go b/mmv1/api/type.go index 75d41ddcb78f..66ac90255b17 100644 --- a/mmv1/api/type.go +++ b/mmv1/api/type.go @@ -293,6 +293,9 @@ type Type struct { // The prefix used as part of the property expand/flatten function name // flatten{{$.GetPrefix}}{{$.TitlelizeProperty}} Prefix string `yaml:"prefix,omitempty"` + + // The field is not present in CAI asset + IsMissingInCai bool `yaml:"is_missing_in_cai,omitempty"` } const MAX_NAME = 20 diff --git a/mmv1/products/compute/Address.yaml b/mmv1/products/compute/Address.yaml index c1f943d35c4e..814a52ed7db0 100644 --- a/mmv1/products/compute/Address.yaml +++ b/mmv1/products/compute/Address.yaml @@ -49,6 +49,7 @@ async: result: resource_inside_response: false collection_url_key: 'items' +include_in_tgc_next_DO_NOT_USE: true custom_code: post_create: 'templates/terraform/post_create/labels.tmpl' tgc_encoder: 'templates/tgc_next/encoders/compute_address.go.tmpl' diff --git a/mmv1/provider/terraform_tgc_next.go b/mmv1/provider/terraform_tgc_next.go index 7d1d79dff92e..3dbf1f8384eb 100644 --- a/mmv1/provider/terraform_tgc_next.go +++ b/mmv1/provider/terraform_tgc_next.go @@ -23,6 +23,7 @@ import ( "os" "path" "path/filepath" + "strings" "time" "github.com/GoogleCloudPlatform/magic-modules/mmv1/api" @@ -34,6 +35,10 @@ import ( // TerraformGoogleConversionNext is for both tfplan2cai and cai2hcl conversions // and copying other files, such as transport.go type TerraformGoogleConversionNext struct { + ResourceCount int + + ResourcesForVersion []ResourceIdentifier + TargetVersionName string Version product.Version @@ -43,6 +48,12 @@ type TerraformGoogleConversionNext struct { StartTime time.Time } +type ResourceIdentifier struct { + ServiceName string + TerraformName string + ResourceName string +} + func NewTerraformGoogleConversionNext(product *api.Product, versionName string, startTime time.Time) TerraformGoogleConversionNext { t := TerraformGoogleConversionNext{ Product: product, @@ -75,17 +86,7 @@ func (tgc TerraformGoogleConversionNext) Generate(outputFolder, productPath, res } func (tgc TerraformGoogleConversionNext) GenerateObject(object api.Resource, outputFolder, resourceToGenerate string, generateCode, generateDocs bool) { - if object.ExcludeTgc { - log.Printf("Skipping fine-grained resource %s", object.Name) - return - } - - // TODO: remove it after supporting most of resources. - supportList := map[string]bool{ - "ComputeAddress": true, - } - - if ok := supportList[object.ResourceName()]; !ok { + if !object.IncludeInTGCNext { return } @@ -138,6 +139,8 @@ func (tgc *TerraformGoogleConversionNext) GenerateResourceTests(object api.Resou } func (tgc TerraformGoogleConversionNext) CompileCommonFiles(outputFolder string, products []*api.Product, overridePath string) { + tgc.generateResourcesForVersion(products) + resourceConverters := map[string]string{ // common "pkg/transport/config.go": "third_party/terraform/transport/config.go.tmpl", @@ -145,6 +148,7 @@ func (tgc TerraformGoogleConversionNext) CompileCommonFiles(outputFolder string, "pkg/tpgresource/common_diff_suppress.go": "third_party/terraform/tpgresource/common_diff_suppress.go", "pkg/provider/provider.go": "third_party/terraform/provider/provider.go.tmpl", "pkg/provider/provider_validators.go": "third_party/terraform/provider/provider_validators.go", + "pkg/provider/provider_mmv1_resources.go": "templates/tgc_next/provider/provider_mmv1_resources.go.tmpl", // tfplan2cai "pkg/tfplan2cai/converters/resource_converters.go": "templates/tgc_next/tfplan2cai/resource_converters.go.tmpl", @@ -294,6 +298,39 @@ func (tgc TerraformGoogleConversionNext) replaceImportPath(outputFolder, target } } +// Generates the list of resources, and gets the count of resources. +// The resource object has the format +// +// { +// terraform_name: +// resource_name: +// } +// +// The variable resources_for_version is used to generate resources in file +// mmv1/templates/tgc_next/provider/provider_mmv1_resources.go.tmpl +func (tgc *TerraformGoogleConversionNext) generateResourcesForVersion(products []*api.Product) { + for _, productDefinition := range products { + service := strings.ToLower(productDefinition.Name) + for _, object := range productDefinition.Objects { + if object.Exclude || object.NotInVersion(productDefinition.VersionObjOrClosest(tgc.TargetVersionName)) { + continue + } + + if !object.IncludeInTGCNext { + continue + } + + tgc.ResourceCount++ + + tgc.ResourcesForVersion = append(tgc.ResourcesForVersion, ResourceIdentifier{ + ServiceName: service, + TerraformName: object.TerraformName(), + ResourceName: object.ResourceName(), + }) + } + } +} + type TgcWithProducts struct { TerraformGoogleConversionNext Compiler string diff --git a/mmv1/templates/tgc_next/cai2hcl/resource_converters.go.tmpl b/mmv1/templates/tgc_next/cai2hcl/resource_converters.go.tmpl index fe36339524e2..bfbcb0b80abf 100644 --- a/mmv1/templates/tgc_next/cai2hcl/resource_converters.go.tmpl +++ b/mmv1/templates/tgc_next/cai2hcl/resource_converters.go.tmpl @@ -29,7 +29,9 @@ package converters import ( "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/models" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters/services/compute" + {{- range $service := $.Products }} + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters/services/{{ lower $service.Name }}" + {{- end }} "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters/services/resourcemanager" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" @@ -40,7 +42,14 @@ var provider *schema.Provider = tpg_provider.Provider() // ConverterMap is a collection of converters instances, indexed by cai asset type. var ConverterMap = map[string]models.Converter{ + // ####### START handwritten resources ########### resourcemanager.ProjectAssetType: resourcemanager.NewProjectConverter(provider), compute.ComputeInstanceAssetType: compute.NewComputeInstanceConverter(provider), - compute.ComputeAddressAssetType: compute.NewComputeAddressConverter(provider), + // ####### END handwritten resources ########### + + {{- range $object := $.ResourcesForVersion }} + {{- if $object.ResourceName }} + {{ $object.ServiceName }}.{{ $object.ResourceName }}AssetType: {{ $object.ServiceName }}.New{{ $object.ResourceName -}}Converter(provider), + {{- end }} + {{- end }} } diff --git a/mmv1/third_party/tgc_next/pkg/provider/provider_mmv1_resources.go b/mmv1/templates/tgc_next/provider/provider_mmv1_resources.go.tmpl similarity index 67% rename from mmv1/third_party/tgc_next/pkg/provider/provider_mmv1_resources.go rename to mmv1/templates/tgc_next/provider/provider_mmv1_resources.go.tmpl index 4623038a414a..967e3d3f7adf 100644 --- a/mmv1/third_party/tgc_next/pkg/provider/provider_mmv1_resources.go +++ b/mmv1/templates/tgc_next/provider/provider_mmv1_resources.go.tmpl @@ -12,7 +12,13 @@ var handwrittenTfplan2caiResources = map[string]*schema.Resource{ "google_compute_instance": compute.ResourceComputeInstance(), "google_project": resourcemanager.ResourceGoogleProject(), // ####### END handwritten resources ########### - - // TODO: will generate it automatically for MMv1 resources. - "google_compute_address": compute.ResourceComputeAddress(), } + +// Generated resources: {{ $.ResourceCount }} +var generatedResources = map[string]*schema.Resource{ + {{- range $object := $.ResourcesForVersion }} + {{- if $object.ResourceName }} + "{{ $object.TerraformName }}": {{ $object.ServiceName }}.Resource{{ $object.ResourceName -}}(), + {{- end }} + {{- end }} +} \ No newline at end of file diff --git a/mmv1/templates/tgc_next/tfplan2cai/resource_converters.go.tmpl b/mmv1/templates/tgc_next/tfplan2cai/resource_converters.go.tmpl index d357f700efb3..3245a97681f8 100644 --- a/mmv1/templates/tgc_next/tfplan2cai/resource_converters.go.tmpl +++ b/mmv1/templates/tgc_next/tfplan2cai/resource_converters.go.tmpl @@ -29,12 +29,21 @@ package converters import ( "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/converters/cai" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/converters/services/compute" + {{- range $service := $.Products }} + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/converters/services/{{ lower $service.Name }}" + {{- end }} "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/converters/services/resourcemanager" ) var ConverterMap = map[string]cai.ResourceConverter{ + // ####### START handwritten resources ########### "google_project": resourcemanager.ResourceConverterProject(), "google_compute_instance": compute.ResourceConverterComputeInstance(), - "google_compute_address": compute.ResourceConverterComputeAddress(), + // ####### END handwritten resources ########### + + {{- range $object := $.ResourcesForVersion }} + {{- if $object.ResourceName }} + "{{ $object.TerraformName }}": {{ $object.ServiceName }}.ResourceConverter{{ $object.ResourceName -}}(), + {{- end }} + {{- end }} } \ No newline at end of file diff --git a/mmv1/third_party/terraform/provider/provider.go.tmpl b/mmv1/third_party/terraform/provider/provider.go.tmpl index 76f0eb3f7246..828254a7b757 100644 --- a/mmv1/third_party/terraform/provider/provider.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider.go.tmpl @@ -250,6 +250,7 @@ func ResourceMapWithErrors() (map[string]*schema.Resource, error) { {{- else }} return mergeResourceMaps( handwrittenTfplan2caiResources, + generatedResources, ) {{- end }} } diff --git a/mmv1/third_party/tgc_next/test/assert_test_files.go b/mmv1/third_party/tgc_next/test/assert_test_files.go index 39f0ec922236..770e2be6b0e2 100644 --- a/mmv1/third_party/tgc_next/test/assert_test_files.go +++ b/mmv1/third_party/tgc_next/test/assert_test_files.go @@ -51,13 +51,14 @@ func BidirectionalConversion(t *testing.T, ignoredFields []string) { // If the primary resource is available, only test the primary resource. // Otherwise, test all of the resources in the test. if primaryResource != "" { - err = testSingleResource(t, t.Name(), resourceTestData[primaryResource], tfDir, ignoredFields, logger) + t.Logf("Test for the primary resource %s begins.", primaryResource) + err = testSingleResource(t, t.Name(), resourceTestData[primaryResource], tfDir, ignoredFields, logger, true) if err != nil { t.Fatal("Test fails:", err) } } else { for _, testData := range resourceTestData { - err = testSingleResource(t, t.Name(), testData, tfDir, ignoredFields, logger) + err = testSingleResource(t, t.Name(), testData, tfDir, ignoredFields, logger, false) if err != nil { t.Fatal("Test fails: ", err) } @@ -66,11 +67,11 @@ func BidirectionalConversion(t *testing.T, ignoredFields []string) { } // Tests a single resource -func testSingleResource(t *testing.T, testName string, testData ResourceTestData, tfDir string, ignoredFields []string, logger *zap.Logger) error { +func testSingleResource(t *testing.T, testName string, testData ResourceTestData, tfDir string, ignoredFields []string, logger *zap.Logger, primaryResource bool) error { resourceType := testData.ResourceType - if _, ok := tfplan2caiconverters.ConverterMap[resourceType]; !ok { - log.Printf("Test for %s is skipped as it is not supported in tfplan2cai conversion.", resourceType) - return nil + var tfplan2caiSupported, cai2hclSupported bool + if _, tfplan2caiSupported = tfplan2caiconverters.ConverterMap[resourceType]; !tfplan2caiSupported { + log.Printf("%s is not supported in tfplan2cai conversion.", resourceType) } assets := make([]caiasset.Asset, 0) @@ -80,12 +81,24 @@ func testSingleResource(t *testing.T, testName string, testData ResourceTestData if assetType == "" { return fmt.Errorf("cai asset is unavailable for %s", assetName) } - if _, ok := cai2hclconverters.ConverterMap[assetType]; !ok { - log.Printf("Test for %s is skipped as it is not supported in cai2hcl conversion.", assetType) + if _, cai2hclSupported = cai2hclconverters.ConverterMap[assetType]; !cai2hclSupported { + log.Printf("%s is not supported in cai2hcl conversion.", assetType) + } + } + + if !tfplan2caiSupported && !cai2hclSupported { + if primaryResource { + return fmt.Errorf("conversion of the primary resource %s is not supported in tgc", testData.ResourceAddress) + } else { + log.Printf("Test for %s is skipped as conversion of the resource is not supported in tgc.", resourceType) return nil } } + if !(tfplan2caiSupported && cai2hclSupported) { + return fmt.Errorf("resource %s is supported in either tfplan2cai or cai2hcl within tgc, but not in both", resourceType) + } + // Uncomment these lines when debugging issues locally // assetFile := fmt.Sprintf("%s.json", t.Name()) // writeJSONFile(assetFile, assets) @@ -133,6 +146,7 @@ func testSingleResource(t *testing.T, testName string, testData ResourceTestData if len(missingKeys) > 0 { return fmt.Errorf("missing fields in address %s after cai2hcl conversion:\n%s", testData.ResourceAddress, missingKeys) } + log.Printf("Step 1 passes for resource %s. All of the fields in raw config are in export config", testData.ResourceAddress) // Step 2 // Run a terraform plan using export_config. @@ -158,6 +172,7 @@ func testSingleResource(t *testing.T, testName string, testData ResourceTestData log.Printf("Roundtrip config is different from the export config.\nroundtrip config:\n%s\nexport config:\n%s", string(roundtripConfigData), string(exportConfigData)) return fmt.Errorf("test %s got diff (-want +got): %s", testName, diff) } + log.Printf("Step 2 passes for resource %s. Roundtrip config and export config are identical", testData.ResourceAddress) // Step 3 // Compare most fields between the exported asset and roundtrip asset, except for "data" field for resource @@ -187,6 +202,7 @@ func testSingleResource(t *testing.T, testName string, testData ResourceTestData } } } + log.Printf("Step 3 passes for resource %s. Exported asset and roundtrip asset are identical", testData.ResourceAddress) return nil } From 03f6f8e0a157b7fa0aafc1f9f39ead3e96ba53ad Mon Sep 17 00:00:00 2001 From: Iris Chen <10179943+iyabchen@users.noreply.github.com> Date: Wed, 18 Jun 2025 16:36:31 -0700 Subject: [PATCH 398/884] Detect malformed frontmatter in changed doc files (#13609) --- .ci/magician/cmd/generate_comment.go | 55 ++++++++++++++++ .ci/magician/cmd/generate_comment_test.go | 76 +++++++++++++++++++++++ .ci/magician/go.mod | 3 + .ci/magician/go.sum | 6 ++ 4 files changed, 140 insertions(+) diff --git a/.ci/magician/cmd/generate_comment.go b/.ci/magician/cmd/generate_comment.go index 322c732d667a..ccdbd8bc28e9 100644 --- a/.ci/magician/cmd/generate_comment.go +++ b/.ci/magician/cmd/generate_comment.go @@ -16,6 +16,7 @@ package cmd import ( + "bytes" "encoding/json" "fmt" "os" @@ -35,6 +36,9 @@ import ( "github.com/GoogleCloudPlatform/magic-modules/tools/issue-labeler/labeler" "github.com/spf13/cobra" + "github.com/yuin/goldmark" + "github.com/yuin/goldmark/parser" + "go.abhg.dev/goldmark/frontmatter" "golang.org/x/exp/maps" _ "embed" @@ -345,6 +349,11 @@ func execGenerateComment(prNumber int, ghTokenMagicModules, buildId, buildStep, errors[repo.Title] = append(errors[repo.Title], "The missing doc detector failed to run.") } data.MissingDocs = missingDocs + + errStrs := checkDocumentFrontmatter(repo) + if len(errStrs) > 0 { + errors[repo.Title] = append(errors[repo.Title], errStrs...) + } } simpleDiff, err := computeAffectedResources(diffProcessorPath, rnr, repo) @@ -670,3 +679,49 @@ func pathChanged(path string, changedFiles []string) bool { func init() { rootCmd.AddCommand(generateCommentCmd) } + +// checkDocumentFrontmatter checks changed markdown files' frontmatter +// structure in the repo and returns error strings when applicable. +func checkDocumentFrontmatter(repo source.Repo) []string { + var errs []string + for _, f := range repo.ChangedFiles { + if !strings.HasSuffix(f, ".markdown") { + continue + } + src, err := os.ReadFile(filepath.Join(repo.Path, f)) + if err != nil { + errs = append(errs, "Error reading file "+f) + continue + } + + md := goldmark.New( + goldmark.WithExtensions(&frontmatter.Extender{}), + ) + + ctx := parser.NewContext() + var buff bytes.Buffer + + err = md.Convert(src, &buff, parser.WithContext(ctx)) + if err != nil { + errs = append(errs, "Error parsing file "+f) + continue + } + data := frontmatter.Get(ctx) + if data == nil { + errs = append(errs, fmt.Sprintf("No frontmatter found in file %s. This is usually due to an incorrect structure in the frontmatter.", f)) + continue + } + + var metadata struct { + Subcategory string + } + if err := data.Decode(&metadata); err != nil { + errs = append(errs, fmt.Sprintf("Failed to decode frontmatter in file %s. This is usually due to an incorrect structure in the frontmatter.", f)) + continue + } + if metadata.Subcategory == "" { + errs = append(errs, fmt.Sprintf("Failed to detect subcategory in the frontmatter in file %s.", f)) + } + } + return errs +} diff --git a/.ci/magician/cmd/generate_comment_test.go b/.ci/magician/cmd/generate_comment_test.go index 6cc1eb029b5d..f73683709ef8 100644 --- a/.ci/magician/cmd/generate_comment_test.go +++ b/.ci/magician/cmd/generate_comment_test.go @@ -18,6 +18,7 @@ package cmd import ( "fmt" "os" + "path/filepath" "reflect" "testing" @@ -550,3 +551,78 @@ func TestPathChanged(t *testing.T) { }) } } + +func TestCheckDocumentFrontmatter(t *testing.T) { + tmpDir := t.TempDir() + files := map[string]string{ + "malformed.markdown": ` +subcategory: Example Subcategory +--- +`, + "sample.markdown": ` +--- +subcategory: Example Subcategory +--- +`, + "missingsubcategory.markdown": ` +--- +random: Example Subcategory +--- +`, + } + for name, content := range files { + fullPath := filepath.Join(tmpDir, name) + err := os.WriteFile(fullPath, []byte(content), 0644) + if err != nil { + t.Fatalf("Failed to create file %s: %v", name, err) + } + } + + tests := []struct { + name string + changedFiles []string + wantErr bool + }{ + { + name: "No changed markdown files", + changedFiles: []string{"abc.txt"}, + wantErr: false, + }, + { + name: "malformed markdown", + changedFiles: []string{"malformed.markdown"}, + wantErr: true, + }, + { + name: "not exist markdown", + changedFiles: []string{"abc.markdown"}, + wantErr: true, + }, + { + name: "Changed files with no frontmatter", + changedFiles: []string{"sample.markdown"}, + wantErr: false, + }, + { + name: "Missing subcategory in frontmatter", + changedFiles: []string{"missingsubcategory.markdown"}, + wantErr: true, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + repo := source.Repo{ + Path: tmpDir, + ChangedFiles: tc.changedFiles, + } + got := checkDocumentFrontmatter(repo) + if tc.wantErr && len(got) == 0 { + t.Errorf("checkDocumentFrontmatter() = %v, want error", got) + } + if !tc.wantErr && len(got) > 0 { + t.Errorf("checkDocumentFrontmatter() = %v, want no error", got) + } + }) + } +} diff --git a/.ci/magician/go.mod b/.ci/magician/go.mod index c6e029ad9c04..f422afca2320 100644 --- a/.ci/magician/go.mod +++ b/.ci/magician/go.mod @@ -19,6 +19,8 @@ require ( github.com/google/go-github/v68 v68.0.0 github.com/otiai10/copy v1.12.0 github.com/stretchr/testify v1.10.0 + github.com/yuin/goldmark v1.7.8 + go.abhg.dev/goldmark/frontmatter v0.2.0 gopkg.in/yaml.v2 v2.4.0 ) @@ -30,6 +32,7 @@ require ( cloud.google.com/go/compute/metadata v0.6.0 // indirect cloud.google.com/go/iam v1.2.2 // indirect cloud.google.com/go/monitoring v1.21.2 // indirect + github.com/BurntSushi/toml v1.2.1 // indirect github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.25.0 // indirect github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.48.1 // indirect github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.48.1 // indirect diff --git a/.ci/magician/go.sum b/.ci/magician/go.sum index ffc9c5757f12..d5db267de18d 100644 --- a/.ci/magician/go.sum +++ b/.ci/magician/go.sum @@ -22,6 +22,8 @@ cloud.google.com/go/storage v1.50.0/go.mod h1:l7XeiD//vx5lfqE3RavfmU9yvk5Pp0Zhcv cloud.google.com/go/trace v1.11.2 h1:4ZmaBdL8Ng/ajrgKqY5jfvzqMXbrDcBsUGXOT9aqTtI= cloud.google.com/go/trace v1.11.2/go.mod h1:bn7OwXd4pd5rFuAnTrzBuoZ4ax2XQeG3qNgYmfCy0Io= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/toml v1.2.1 h1:9F2/+DoOYIOksmaJFPw1tGFy1eDnIJXg+UHjuD8lTak= +github.com/BurntSushi/toml v1.2.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.25.0 h1:3c8yed4lgqTt+oTQ+JNMDo+F4xprBf+O/il4ZC0nRLw= github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.25.0/go.mod h1:obipzmGjfSjam60XLwGfqUkJsfiheAl+TUjG+4yzyPM= github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.48.1 h1:UQ0AhxogsIRZDkElkblfnwjc3IaltCm2HUMvezQaL7s= @@ -130,6 +132,10 @@ github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/yuin/goldmark v1.7.8 h1:iERMLn0/QJeHFhxSt3p6PeN9mGnvIKSpG9YYorDMnic= +github.com/yuin/goldmark v1.7.8/go.mod h1:uzxRWxtg69N339t3louHJ7+O03ezfj6PlliRlaOzY1E= +go.abhg.dev/goldmark/frontmatter v0.2.0 h1:P8kPG0YkL12+aYk2yU3xHv4tcXzeVnN+gU0tJ5JnxRw= +go.abhg.dev/goldmark/frontmatter v0.2.0/go.mod h1:XqrEkZuM57djk7zrlRUB02x8I5J0px76YjkOzhB4YlU= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= go.opentelemetry.io/contrib/detectors/gcp v1.29.0 h1:TiaiXB4DpGD3sdzNlYQxruQngn5Apwzi1X0DRhuGvDQ= From 32a6666a4ac3523708ae837e04bcedeceadf4b5a Mon Sep 17 00:00:00 2001 From: vbhadoriaB <150216360+vbhadoriaB@users.noreply.github.com> Date: Thu, 19 Jun 2025 00:09:17 +0000 Subject: [PATCH 399/884] Add new fields in QAS resource (#14204) Co-authored-by: Stephen Lewis (Burrows) --- .../cloudquotas/QuotaAdjusterSettings.yaml | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/mmv1/products/cloudquotas/QuotaAdjusterSettings.yaml b/mmv1/products/cloudquotas/QuotaAdjusterSettings.yaml index 481fefd92bce..362472b02523 100644 --- a/mmv1/products/cloudquotas/QuotaAdjusterSettings.yaml +++ b/mmv1/products/cloudquotas/QuotaAdjusterSettings.yaml @@ -67,15 +67,25 @@ properties: type: String description: | The resource container that determines if the quota adjuster is set for this project. - Expect this field to be empty currently. output: true - name: 'effectiveEnablement' type: Enum description: | Based on the effective container`s setting above, determines Whether this resource container has the quota adjuster enabled. - Expect this field to be empty currently. output: true enum_values: - 'DEFAULT' - 'ENABLED' - 'DISABLED' + - name: 'inherited' + type: Boolean + description: | + Indicates whether the setting is inherited or explicitly specified. + output: true + - name: 'inheritedFrom' + type: String + description: | + The resource container from which the setting is inherited. This refers to the nearest ancestor with enablement set (either ENABLED or DISABLED). + The value can be `organizations/{organization_id}`, `folders/{folder_id}`, or can be `default` if no ancestor exists with enablement set. + The value will be empty when `enablement` is specified on this resource container. + output: true From c1d16d2e625d08c80417d68a865c17ba8bc7970d Mon Sep 17 00:00:00 2001 From: Lakshman Swaminathan Date: Thu, 19 Jun 2025 06:19:40 -0700 Subject: [PATCH 400/884] updated google_compute_node_group and google_compute_node_template name property to be required (#14272) Co-authored-by: luckyswaminathan --- mmv1/products/compute/NodeGroup.yaml | 1 + mmv1/products/compute/NodeTemplate.yaml | 1 + 2 files changed, 2 insertions(+) diff --git a/mmv1/products/compute/NodeGroup.yaml b/mmv1/products/compute/NodeGroup.yaml index 326a1c13d986..3a854983c1db 100644 --- a/mmv1/products/compute/NodeGroup.yaml +++ b/mmv1/products/compute/NodeGroup.yaml @@ -95,6 +95,7 @@ properties: type: String description: | Name of the resource. + required: true - name: 'nodeTemplate' type: ResourceRef description: | diff --git a/mmv1/products/compute/NodeTemplate.yaml b/mmv1/products/compute/NodeTemplate.yaml index e23d4317d85d..c5eb4ebb2461 100644 --- a/mmv1/products/compute/NodeTemplate.yaml +++ b/mmv1/products/compute/NodeTemplate.yaml @@ -79,6 +79,7 @@ properties: - name: 'name' type: String description: 'Name of the resource.' + required: true - name: 'nodeAffinityLabels' type: KeyValuePairs description: | From 817ef58a80eaf2d685f292007acc834520305e5b Mon Sep 17 00:00:00 2001 From: Balanagu Harsha Vardhan Date: Thu, 19 Jun 2025 23:52:53 +0530 Subject: [PATCH 401/884] Add Plugin Instance resource of API hub (#14280) --- mmv1/products/apihub/PluginInstance.yaml | 332 ++++++++++++++++++ .../apihub_plugin_instance_basic.tf.tmpl | 10 + 2 files changed, 342 insertions(+) create mode 100644 mmv1/products/apihub/PluginInstance.yaml create mode 100644 mmv1/templates/terraform/examples/apihub_plugin_instance_basic.tf.tmpl diff --git a/mmv1/products/apihub/PluginInstance.yaml b/mmv1/products/apihub/PluginInstance.yaml new file mode 100644 index 000000000000..0f1c823ca9f0 --- /dev/null +++ b/mmv1/products/apihub/PluginInstance.yaml @@ -0,0 +1,332 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: PluginInstance +description: Description +base_url: projects/{{project}}/locations/{{location}}/plugins/{{plugin}}/instances +immutable: true +self_link: projects/{{project}}/locations/{{location}}/plugins/{{plugin}}/instances/{{plugin_instance_id}} +create_url: projects/{{project}}/locations/{{location}}/plugins/{{plugin}}/instances?pluginInstanceId={{plugin_instance_id}} +id_format: projects/{{project}}/locations/{{location}}/plugins/{{plugin}}/instances/{{plugin_instance_id}} +import_format: + - projects/{{project}}/locations/{{location}}/plugins/{{plugin}}/instances/{{plugin_instance_id}} +examples: + - name: apihub_plugin_instance_basic + primary_resource_id: apihub_plugin_instance_basic + vars: + curation_id: 'test' + # API hub instance needs to be created before this, and end to end creation of that resource using Terraform is not yet supported. + exclude_test: true + external_providers: ["time"] +autogen_async: true +async: + operation: + timeouts: + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 + base_url: '{{op_id}}' + actions: + - create + - delete + - update + type: OpAsync + result: + resource_inside_response: true + include_project: false +autogen_status: UGx1Z2luSW5zdGFuY2U= +parameters: + - name: location + type: String + description: Resource ID segment making up resource `name`. It identifies the resource within its parent collection as described in https://google.aip.dev/122. + immutable: true + url_param_only: true + required: true + - name: plugin + type: String + description: Resource ID segment making up resource `name`. It identifies the resource within its parent collection as described in https://google.aip.dev/122. + immutable: true + url_param_only: true + required: true + - name: pluginInstanceId + type: String + description: |- + The ID to use for the plugin instance, which will become the final + component of the plugin instance's resource name. This field is optional. + + * If provided, the same will be used. The service will throw an error if + the specified id is already used by another plugin instance in the plugin + resource. + * If not provided, a system generated id will be used. + + This value should be 4-63 characters, and valid characters + are /a-z[0-9]-_/. + immutable: true + url_param_only: true + required: true +properties: + - name: name + type: String + description: |- + Identifier. The unique name of the plugin instance resource. + Format: + `projects/{project}/locations/{location}/plugins/{plugin}/instances/{instance}` + output: true + - name: disable + type: Boolean + default_value: false + update_url: projects/{{project}}/locations/{{location}}/plugins/{{plugin}}/instances/{{plugin_instance_id}}:enableAction + description: The display name for this plugin instance. Max length is 255 characters. + - name: authConfig + type: NestedObject + description: AuthConfig represents the authentication information. + properties: + - name: googleServiceAccountConfig + type: NestedObject + description: Config for Google service account authentication. + properties: + - name: serviceAccount + type: String + description: |- + The service account to be used for authenticating request. + + The `iam.serviceAccounts.getAccessToken` permission should be granted on + this service account to the impersonator service account. + required: true + - name: userPasswordConfig + type: NestedObject + description: Parameters to support Username and Password Authentication. + properties: + - name: username + type: String + description: Username. + required: true + - name: password + type: NestedObject + description: Secret provides a reference to entries in Secret Manager. + required: true + properties: + - name: secretVersion + type: String + description: |- + The resource name of the secret version in the format, + format as: `projects/*/secrets/*/versions/*`. + required: true + - name: apiKeyConfig + type: NestedObject + description: Config for authentication with API key. + properties: + - name: name + type: String + description: |- + The parameter name of the API key. + E.g. If the API request is "https://example.com/act?api_key=", + "api_key" would be the parameter name. + required: true + - name: apiKey + type: NestedObject + description: Secret provides a reference to entries in Secret Manager. + required: true + properties: + - name: secretVersion + type: String + description: |- + The resource name of the secret version in the format, + format as: `projects/*/secrets/*/versions/*`. + required: true + - name: httpElementLocation + type: String + description: |- + The location of the API key. + The default value is QUERY. + Possible values: + HTTP_ELEMENT_LOCATION_UNSPECIFIED + QUERY + HEADER + PATH + BODY + COOKIE + required: true + - name: oauth2ClientCredentialsConfig + type: NestedObject + description: |- + Parameters to support Oauth 2.0 client credentials grant authentication. + See https://tools.ietf.org/html/rfc6749#section-1.3.4 for more details. + properties: + - name: clientSecret + type: NestedObject + description: Secret provides a reference to entries in Secret Manager. + required: true + properties: + - name: secretVersion + type: String + description: |- + The resource name of the secret version in the format, + format as: `projects/*/secrets/*/versions/*`. + required: true + - name: clientId + type: String + description: The client identifier. + required: true + - name: authType + type: String + description: |2- + + Possible values: + AUTH_TYPE_UNSPECIFIED + NO_AUTH + GOOGLE_SERVICE_ACCOUNT + USER_PASSWORD + API_KEY + OAUTH2_CLIENT_CREDENTIALS + required: true + - name: state + type: String + description: |- + The current state of the plugin instance (e.g., enabled, disabled, + provisioning). + Possible values: + STATE_UNSPECIFIED + CREATING + ACTIVE + APPLYING_CONFIG + ERROR + FAILED + DELETING + output: true + - name: createTime + type: String + description: Timestamp indicating when the plugin instance was created. + output: true + - name: updateTime + type: String + description: Timestamp indicating when the plugin instance was last updated. + output: true + - name: displayName + type: String + description: The display name for this plugin instance. Max length is 255 characters. + required: true + - name: errorMessage + type: String + description: |- + Error message describing the failure, if any, during Create, Delete or + ApplyConfig operation corresponding to the plugin instance.This field will + only be populated if the plugin instance is in the ERROR or FAILED state. + output: true + - name: actions + type: Array + description: The action status for the plugin instance. + required: true + default_from_api: true + item_type: + type: NestedObject + properties: + - name: hubInstanceAction + type: NestedObject + output: true + description: The execution status for the plugin instance. + properties: + - name: currentExecutionState + type: String + description: |- + The current state of the execution. + Possible values: + CURRENT_EXECUTION_STATE_UNSPECIFIED + RUNNING + NOT_RUNNING + output: true + - name: lastExecution + type: NestedObject + description: The result of the last execution of the plugin instance. + output: true + properties: + - name: result + type: String + description: |- + The result of the last execution of the plugin instance. + Possible values: + RESULT_UNSPECIFIED + SUCCEEDED + FAILED + output: true + - name: errorMessage + type: String + description: |- + Error message describing the failure, if any, during the last + execution. + output: true + - name: startTime + type: String + description: The last execution start time of the plugin instance. + output: true + - name: endTime + type: String + description: The last execution end time of the plugin instance. + output: true + - name: actionId + type: String + description: |- + This should map to one of the action id specified + in actions_config in the plugin. + required: true + - name: state + type: String + description: |- + The current state of the plugin action in the plugin instance. + Possible values: + STATE_UNSPECIFIED + ENABLED + DISABLED + ENABLING + DISABLING + ERROR + output: true + - name: scheduleCronExpression + type: String + default_from_api: true + description: |- + The schedule for this plugin instance action. This can only be set if the + plugin supports API_HUB_SCHEDULE_TRIGGER mode for this action. + - name: curationConfig + type: NestedObject + default_from_api: true + description: The curation information for this plugin instance. + properties: + - name: customCuration + type: NestedObject + description: Custom curation information for this plugin instance. + properties: + - name: curation + type: String + description: |- + The unique name of the curation resource. This will be the name of the + curation resource in the format: + `projects/{project}/locations/{location}/curations/{curation}` + required: true + - name: curationType + type: String + default_from_api: true + description: |2- + + Possible values: + CURATION_TYPE_UNSPECIFIED + DEFAULT_CURATION_FOR_API_METADATA + CUSTOM_CURATION_FOR_API_METADATA + required: true + - name: scheduleTimeZone + type: String + default_from_api: true + description: |- + The time zone for the schedule cron expression. If not provided, UTC will + be used. diff --git a/mmv1/templates/terraform/examples/apihub_plugin_instance_basic.tf.tmpl b/mmv1/templates/terraform/examples/apihub_plugin_instance_basic.tf.tmpl new file mode 100644 index 000000000000..c2b19c82a6cc --- /dev/null +++ b/mmv1/templates/terraform/examples/apihub_plugin_instance_basic.tf.tmpl @@ -0,0 +1,10 @@ +resource "google_apihub_plugin_instance" "{{$.PrimaryResourceId}}" { + location = "us-central1" + plugin = "existing-plugin-id" + plugin_instance_id = "{{index $.Vars "curation_id"}}" + display_name = "Sample Plugin Instance Display Name" + disable = false + actions { + action_id = "existing-action-id" + } +} \ No newline at end of file From c089f586c4df6f9621646cf568ea7a8e2f28db00 Mon Sep 17 00:00:00 2001 From: iamkonohamaru Date: Sat, 21 Jun 2025 02:17:39 +0530 Subject: [PATCH 402/884] Update AutokeyConfig.yaml (#14299) --- mmv1/products/kms/AutokeyConfig.yaml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/mmv1/products/kms/AutokeyConfig.yaml b/mmv1/products/kms/AutokeyConfig.yaml index d25d82d5fb73..c50d9381d6cc 100644 --- a/mmv1/products/kms/AutokeyConfig.yaml +++ b/mmv1/products/kms/AutokeyConfig.yaml @@ -85,3 +85,8 @@ properties: CryptoKey for any new KeyHandle the Developer creates. Should have the form `projects/`. min_version: 'beta' + - name: 'etag' + type: String + description: 'The etag of the AutokeyConfig for optimistic concurrency control.' + min_version: 'beta' + output: true From 9c13e8626ca76c9a642bb297a8693aa42584ec07 Mon Sep 17 00:00:00 2001 From: Xian-Ji Chen <68801742+XianJiChen@users.noreply.github.com> Date: Fri, 20 Jun 2025 15:27:15 -0700 Subject: [PATCH 403/884] Fix for Dataplex full quality test failure (#14306) --- .../dataplex/resource_dataplex_datascan_test.go | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/mmv1/third_party/terraform/services/dataplex/resource_dataplex_datascan_test.go b/mmv1/third_party/terraform/services/dataplex/resource_dataplex_datascan_test.go index 07de8368dffc..d76a6dbca40c 100644 --- a/mmv1/third_party/terraform/services/dataplex/resource_dataplex_datascan_test.go +++ b/mmv1/third_party/terraform/services/dataplex/resource_dataplex_datascan_test.go @@ -54,13 +54,13 @@ func testAccDataplexDatascanDataplexDatascanFullQuality_full(context map[string] return acctest.Nprintf(` resource "google_bigquery_dataset" "tf_test_dataset" { - dataset_id = "tf_test_dataset_id" + dataset_id = "tf_test_dataset_id_%{random_suffix}" default_table_expiration_ms = 3600000 } resource "google_bigquery_table" "tf_test_table" { dataset_id = google_bigquery_dataset.tf_test_dataset.dataset_id - table_id = "tf_test_table" + table_id = "tf_test_table_%{random_suffix}" deletion_protection = false schema = < Date: Mon, 23 Jun 2025 10:43:56 -0500 Subject: [PATCH 404/884] Add create_ignore_already_exists to google_cloud_identity_group_membership (#14250) --- .../cloudidentity/GroupMembership.yaml | 7 + .../cloud_identity_group_membership.go.tmpl | 69 +++++++ ...oud_identity_group_membership_test.go.tmpl | 175 ++++++++++++++++++ 3 files changed, 251 insertions(+) create mode 100644 mmv1/templates/terraform/pre_create/cloud_identity_group_membership.go.tmpl diff --git a/mmv1/products/cloudidentity/GroupMembership.yaml b/mmv1/products/cloudidentity/GroupMembership.yaml index 9b0376a33642..1790f8d8869a 100644 --- a/mmv1/products/cloudidentity/GroupMembership.yaml +++ b/mmv1/products/cloudidentity/GroupMembership.yaml @@ -37,6 +37,7 @@ timeouts: update_minutes: 20 delete_minutes: 20 custom_code: + pre_create: templates/terraform/pre_create/cloud_identity_group_membership.go.tmpl post_create: 'templates/terraform/post_create/set_computed_name.tmpl' custom_update: 'templates/terraform/custom_update/cloud_identity_group_membership.go.tmpl' post_import: 'templates/terraform/post_import/cloud_identity_group_membership.go.tmpl' @@ -62,6 +63,12 @@ examples: identity_user: 'IDENTITY_USER' # Has a handwritten test due to CloudIdentityGroup-related tests needing to run synchronously exclude_test: true +virtual_fields: + - name: 'create_ignore_already_exists' + description: | + If set to true, skip group member creation if a membership with the same name already exists. Defaults to false. + type: Boolean + default_value: false parameters: - name: 'group' type: ResourceRef diff --git a/mmv1/templates/terraform/pre_create/cloud_identity_group_membership.go.tmpl b/mmv1/templates/terraform/pre_create/cloud_identity_group_membership.go.tmpl new file mode 100644 index 000000000000..5e8ca2a772d5 --- /dev/null +++ b/mmv1/templates/terraform/pre_create/cloud_identity_group_membership.go.tmpl @@ -0,0 +1,69 @@ +if d.Get("create_ignore_already_exists").(bool) { + log.Printf("[DEBUG] Calling get GroupMembership to check if membership already exists") + preferredMemberKeyPropTyped := tpgresource.CheckStringMap(preferredMemberKeyProp) + + params := map[string]string{ + "memberKey.id": preferredMemberKeyPropTyped["id"], + } + if ns, ok := preferredMemberKeyPropTyped["namespace"]; ok && ns != "" { + params["memberKey.namespace"] = ns + } + getUrl, err := transport_tpg.AddQueryParams(url+":lookup", params) + if err != nil { + return err + } + + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: getUrl, + UserAgent: userAgent, + Headers: headers, + }) + // Do normal create if membership does not exist + + if err != nil { + gerr, ok := err.(*googleapi.Error) + notFound := ok && gerr.Code == 404 + // If group membership is not found, we can proceed with the create operation. + if !notFound { + return fmt.Errorf("Error checking if GroupMembership exists: %s", err) + } + } else { + // Set computed resource properties from create API response so that they're available on the subsequent Read call. + err = resourceCloudIdentityGroupMembershipPostCreateSetComputedFields(d, meta, res) + if err != nil { + return fmt.Errorf("setting computed ID format fields: %w", err) + } + + // Store the ID now + id, err := tpgresource.ReplaceVars(d, config, "{{"{{name}}"}}") + if err != nil { + return fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + + // `name` is autogenerated from the api so needs to be set post-create + name, ok := res["name"] + if !ok { + respBody, ok := res["response"] + if !ok { + return fmt.Errorf("Create response didn't contain critical fields. Create may not have succeeded.") + } + + name, ok = respBody.(map[string]interface{})["name"] + if !ok { + return fmt.Errorf("Create response didn't contain critical fields. Create may not have succeeded.") + } + } + if err := d.Set("name", name.(string)); err != nil { + return fmt.Errorf("Error setting name: %s", err) + } + d.SetId(name.(string)) + + log.Printf("[DEBUG] Finished creating GroupMembership %q: %#v", d.Id(), res) + + return resourceCloudIdentityGroupMembershipRead(d, meta) + } +} diff --git a/mmv1/third_party/terraform/services/cloudidentity/resource_cloud_identity_group_membership_test.go.tmpl b/mmv1/third_party/terraform/services/cloudidentity/resource_cloud_identity_group_membership_test.go.tmpl index 6d03d0d1c2cc..f299956ee9d5 100644 --- a/mmv1/third_party/terraform/services/cloudidentity/resource_cloud_identity_group_membership_test.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudidentity/resource_cloud_identity_group_membership_test.go.tmpl @@ -561,3 +561,178 @@ func testAccCheckCloudIdentityGroupMembershipDestroyProducer(t *testing.T) func( return nil } } + +// Test setting create_ignore_already_exists on an existing resource +func testAccCloudIdentityGroupMembership_existingResourceCreateIgnoreAlreadyExists(t *testing.T) { + context := map[string]interface{}{ + "org_domain": envvar.GetTestOrgDomainFromEnv(t), + "cust_id": envvar.GetTestCustIdFromEnv(t), + "random_suffix": acctest.RandString(t, 10), + } + id := "groups/groupId/memberships/membershipId" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckCloudIdentityGroupMembershipDestroyProducer(t), + Steps: []resource.TestStep{ + // The first step creates a new resource with create_ignore_already_exists=false + { + Config: testAccCloudIdentityGroupMembershipCreateIgnoreAlreadyExists(context, false), + Check: resource.TestCheckResourceAttr("google_cloud_identity_group_membership.acceptance", "id", id), + }, + { + ResourceName: "google_cloud_identity_group_membership.acceptance", + ImportStateId: id, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"create_ignore_already_exists"}, // Import leaves this field out when false + }, + // The second step updates the resource to have create_ignore_already_exists=true + { + Config: testAccCloudIdentityGroupMembershipCreateIgnoreAlreadyExists(context, true), + Check: resource.TestCheckResourceAttr("google_cloud_identity_group_membership.acceptance", "id", id), + }, + }, + }) +} + +// Test the option to ignore ALREADY_EXISTS error from creating a Source Repository. +func testAccCloudIdentityGroupMembership_createIgnoreAlreadyExists(t *testing.T) { + context := map[string]interface{}{ + "org_domain": envvar.GetTestOrgDomainFromEnv(t), + "cust_id": envvar.GetTestCustIdFromEnv(t), + "random_suffix": acctest.RandString(t, 10), + } + id := "groups/groupId/memberships/membershipId" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckCloudIdentityGroupMembershipDestroyProducer(t), + Steps: []resource.TestStep{ + // The first step creates a group membership + { + Config: testAccCloudIdentityGroupMembershipCreateIgnoreAlreadyExists(context, false), + Check: resource.TestCheckResourceAttr("google_cloud_identity_group_membership.acceptance", "id", id), + }, + { + ResourceName: "google_cloud_identity_group_membership.acceptance", + ImportStateId: id, + ImportState: true, + ImportStateVerify: true, + }, + // The second step creates a new resource that duplicates with the existing group membership + { + Config: testAccCloudIdentityGroupMembershipDuplicateIgnoreAlreadyExists(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_cloud_identity_group_membership.acceptance", "id", id), + resource.TestCheckResourceAttr("google_cloud_identity_group_membership.duplicate", "id", id), + ), + }, + }, + }) +} + +func testAccCloudIdentityGroupMembershipCreateIgnoreAlreadyExists(context map[string]interface{}, ignore_already_exists bool) string { + context["create_ignore_already_exists"] = fmt.Sprintf("%t", ignore_already_exists) + return acctest.Nprintf(` +resource "google_cloud_identity_group" "group" { + display_name = "tf-test-my-identity-group%{random_suffix}" + + parent = "customers/%{cust_id}" + + group_key { + id = "tf-test-my-identity-group%{random_suffix}@%{org_domain}" + } + + labels = { + "cloudidentity.googleapis.com/groups.discussion_forum" = "" + } +} +resource "google_cloud_identity_group" "child-group" { + display_name = "tf-test-my-identity-group%{random_suffix}-child" + + parent = "customers/%{cust_id}" + + group_key { + id = "tf-test-my-identity-group%{random_suffix}-child@%{org_domain}" + } + + labels = { + "cloudidentity.googleapis.com/groups.discussion_forum" = "" + } +} + +resource "google_cloud_identity_group_membership" "acceptance" { + group = google_cloud_identity_group.group.id + + preferred_member_key { + id = google_cloud_identity_group.child-group.group_key[0].id + } + + roles { + name = "MEMBER" + } + + create_ignore_already_exists = %{create_ignore_already_exists} +} +`, context) +} + +func testAccCloudIdentityGroupMembershipDuplicateIgnoreAlreadyExists(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_cloud_identity_group" "group" { + display_name = "tf-test-my-identity-group%{random_suffix}" + + parent = "customers/%{cust_id}" + + group_key { + id = "tf-test-my-identity-group%{random_suffix}@%{org_domain}" + } + + labels = { + "cloudidentity.googleapis.com/groups.discussion_forum" = "" + } +} +resource "google_cloud_identity_group" "child-group" { + display_name = "tf-test-my-identity-group%{random_suffix}-child" + + parent = "customers/%{cust_id}" + + group_key { + id = "tf-test-my-identity-group%{random_suffix}-child@%{org_domain}" + } + + labels = { + "cloudidentity.googleapis.com/groups.discussion_forum" = "" + } +} + +resource "google_cloud_identity_group_membership" "acceptance" { + group = google_cloud_identity_group.group.id + + preferred_member_key { + id = google_cloud_identity_group.child-group.group_key[0].id + } + + roles { + name = "MEMBER" + } +} + +resource "google_cloud_identity_group_membership" "duplicate" { + group = google_cloud_identity_group.group.id + + preferred_member_key { + id = google_cloud_identity_group.child-group.group_key[0].id + } + + roles { + name = "MEMBER" + } + + create_ignore_already_exists = true +} +`, context) +} From 72a5ee17393652fdbdbef27c6f1c63f41048c255 Mon Sep 17 00:00:00 2001 From: panerorenn9541 <36008213+panerorenn9541@users.noreply.github.com> Date: Mon, 23 Jun 2025 12:20:47 -0700 Subject: [PATCH 405/884] Spanner MR CMEK Backup Schedule Integration (#14307) --- mmv1/products/spanner/BackupSchedule.yaml | 11 ++ .../resource_spanner_schedule_backup_test.go | 173 ++++++++++++++++++ 2 files changed, 184 insertions(+) diff --git a/mmv1/products/spanner/BackupSchedule.yaml b/mmv1/products/spanner/BackupSchedule.yaml index 67d5eb3112ee..e220711ac218 100644 --- a/mmv1/products/spanner/BackupSchedule.yaml +++ b/mmv1/products/spanner/BackupSchedule.yaml @@ -177,3 +177,14 @@ properties: description: | The resource name of the Cloud KMS key to use for encryption. Format: 'projects/{project}/locations/{location}/keyRings/{keyRing}/cryptoKeys/{cryptoKey}' + conflicts: + - encryption_config.0.kms_key_names + - name: 'kmsKeyNames' + type: Array + description: | + Fully qualified name of the KMS keys to use to encrypt this database. The keys must exist + in the same locations as the Spanner Database. + item_type: + type: String + conflicts: + - encryption_config.0.kms_key_name diff --git a/mmv1/third_party/terraform/services/spanner/resource_spanner_schedule_backup_test.go b/mmv1/third_party/terraform/services/spanner/resource_spanner_schedule_backup_test.go index 96310a48fc57..92f2c0b77e58 100644 --- a/mmv1/third_party/terraform/services/spanner/resource_spanner_schedule_backup_test.go +++ b/mmv1/third_party/terraform/services/spanner/resource_spanner_schedule_backup_test.go @@ -104,6 +104,76 @@ func TestAccSpannerBackupSchedule_CMEKFullBackup(t *testing.T) { }) } +func TestAccSpannerBackupSchedule_MRCMEKIncrementalBackup(t *testing.T) { + t.Parallel() + suffix := acctest.RandString(t, 10) + + kms1 := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-mr-cmek-test-key-us-central1") + kms2 := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-east1", "tf-mr-cmek-test-key-us-east1") + kms3 := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-east4", "tf-mr-cmek-test-key-us-east4") + + context := map[string]interface{}{ + "random_suffix": suffix, + "key_ring1": kms1.KeyRing.Name, + "key_name1": kms1.CryptoKey.Name, + "key_ring2": kms2.KeyRing.Name, + "key_name2": kms2.CryptoKey.Name, + "key_ring3": kms3.KeyRing.Name, + "key_name3": kms3.CryptoKey.Name, + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckSpannerBackupScheduleDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccSpannerBackupSchedule_MRCMEKIncremental(context), + }, + { + ResourceName: "google_spanner_backup_schedule.backup_schedule", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccSpannerBackupSchedule_MRCMEKFullBackup(t *testing.T) { + t.Parallel() + suffix := acctest.RandString(t, 10) + + kms1 := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-mr-cmek-test-key-us-central1") + kms2 := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-east1", "tf-mr-cmek-test-key-us-east1") + kms3 := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-east4", "tf-mr-cmek-test-key-us-east4") + + context := map[string]interface{}{ + "random_suffix": suffix, + "key_ring1": kms1.KeyRing.Name, + "key_name1": kms1.CryptoKey.Name, + "key_ring2": kms2.KeyRing.Name, + "key_name2": kms2.CryptoKey.Name, + "key_ring3": kms3.KeyRing.Name, + "key_name3": kms3.CryptoKey.Name, + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckSpannerBackupScheduleDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccSpannerBackupSchedule_MRCMEKFull(context), + }, + { + ResourceName: "google_spanner_backup_schedule.backup_schedule", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + func testAccSpannerBackupSchedule_basic(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_spanner_instance" "instance" { @@ -264,3 +334,106 @@ resource "google_spanner_backup_schedule" "backup_schedule" { } `, context) } + +func testAccSpannerBackupSchedule_MRCMEKIncremental(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_spanner_instance" "instance" { + name = "my-instance-%{random_suffix}" + config = "nam3" + display_name = "My Instance" + num_nodes = 1 + edition = "ENTERPRISE_PLUS" +} + +resource "google_spanner_database" "database" { + instance = google_spanner_instance.instance.name + name = "my-database-%{random_suffix}" + ddl = [ + "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", + ] + deletion_protection = false + + encryption_config { + kms_key_names = [ + "%{key_name1}", + "%{key_name2}", + "%{key_name3}", + ] + } +} + +resource "google_spanner_backup_schedule" "backup_schedule" { + instance = google_spanner_instance.instance.name + database = google_spanner_database.database.name + name = "my-backup-schedule-%{random_suffix}" + + retention_duration = "172800s" + + spec { + cron_spec { + text = "0 12 * * *" + } + } + + incremental_backup_spec {} + + encryption_config { + encryption_type = "GOOGLE_DEFAULT_ENCRYPTION" + } +} +`, context) +} + +func testAccSpannerBackupSchedule_MRCMEKFull(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_spanner_instance" "instance" { + name = "my-instance-%{random_suffix}" + config = "nam3" + display_name = "My Instance" + num_nodes = 1 + edition = "ENTERPRISE_PLUS" +} + +resource "google_spanner_database" "database" { + instance = google_spanner_instance.instance.name + name = "my-database-%{random_suffix}" + ddl = [ + "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", + ] + deletion_protection = false + + encryption_config { + kms_key_names = [ + "%{key_name1}", + "%{key_name2}", + "%{key_name3}", + ] + } +} + +resource "google_spanner_backup_schedule" "backup_schedule" { + instance = google_spanner_instance.instance.name + database = google_spanner_database.database.name + name = "my-backup-schedule-%{random_suffix}" + + retention_duration = "172800s" + + spec { + cron_spec { + text = "0 12 * * *" + } + } + + full_backup_spec {} + + encryption_config { + encryption_type = "CUSTOMER_MANAGED_ENCRYPTION" + kms_key_names = [ + "%{key_name1}", + "%{key_name2}", + "%{key_name3}", + ] + } +} +`, context) +} From e7fb783d099d8f4b401621525b5e7fdba62e5bf5 Mon Sep 17 00:00:00 2001 From: bcreddy-gcp <123543489+bcreddy-gcp@users.noreply.github.com> Date: Mon, 23 Jun 2025 12:21:34 -0700 Subject: [PATCH 406/884] notebooks: fix test failures in `google_notebooks_instance` (#14329) --- .../terraform/examples/notebook_instance_basic.tf.tmpl | 2 +- .../terraform/examples/notebook_instance_basic_gpu.tf.tmpl | 2 +- .../examples/notebook_instance_basic_stopped.tf.tmpl | 2 +- .../terraform/examples/notebook_instance_full.tf.tmpl | 2 +- .../notebooks/resource_notebooks_instance_gpu_test.go.tmpl | 2 +- .../resource_notebooks_instance_state_test.go.tmpl | 4 ++-- .../services/notebooks/resource_notebooks_instance_test.go | 6 +++--- 7 files changed, 10 insertions(+), 10 deletions(-) diff --git a/mmv1/templates/terraform/examples/notebook_instance_basic.tf.tmpl b/mmv1/templates/terraform/examples/notebook_instance_basic.tf.tmpl index c3a4e1e94acf..52ef097a32ec 100644 --- a/mmv1/templates/terraform/examples/notebook_instance_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/notebook_instance_basic.tf.tmpl @@ -4,6 +4,6 @@ resource "google_notebooks_instance" "{{$.PrimaryResourceId}}" { machine_type = "e2-medium" vm_image { project = "deeplearning-platform-release" - image_family = "tf-latest-cpu" + image_family = "pytorch-latest-cu124" } } diff --git a/mmv1/templates/terraform/examples/notebook_instance_basic_gpu.tf.tmpl b/mmv1/templates/terraform/examples/notebook_instance_basic_gpu.tf.tmpl index 00be07fddb1d..e6a90631481a 100644 --- a/mmv1/templates/terraform/examples/notebook_instance_basic_gpu.tf.tmpl +++ b/mmv1/templates/terraform/examples/notebook_instance_basic_gpu.tf.tmpl @@ -10,6 +10,6 @@ resource "google_notebooks_instance" "{{$.PrimaryResourceId}}" { } vm_image { project = "deeplearning-platform-release" - image_family = "tf-latest-gpu" + image_family = "pytorch-latest-cu124" } } diff --git a/mmv1/templates/terraform/examples/notebook_instance_basic_stopped.tf.tmpl b/mmv1/templates/terraform/examples/notebook_instance_basic_stopped.tf.tmpl index 45b14362e670..b8187eddcb73 100644 --- a/mmv1/templates/terraform/examples/notebook_instance_basic_stopped.tf.tmpl +++ b/mmv1/templates/terraform/examples/notebook_instance_basic_stopped.tf.tmpl @@ -4,7 +4,7 @@ resource "google_notebooks_instance" "{{$.PrimaryResourceId}}" { machine_type = "e2-medium" vm_image { project = "deeplearning-platform-release" - image_family = "tf-latest-cpu" + image_family = "pytorch-latest-cu124" } desired_state = "STOPPED" } diff --git a/mmv1/templates/terraform/examples/notebook_instance_full.tf.tmpl b/mmv1/templates/terraform/examples/notebook_instance_full.tf.tmpl index 930acc42a524..7c0f23128e54 100644 --- a/mmv1/templates/terraform/examples/notebook_instance_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/notebook_instance_full.tf.tmpl @@ -5,7 +5,7 @@ resource "google_notebooks_instance" "{{$.PrimaryResourceId}}" { vm_image { project = "deeplearning-platform-release" - image_family = "tf-latest-cpu" + image_family = "pytorch-latest-cu124" } instance_owners = [ "{{index $.TestEnvVars "service_account"}}"] diff --git a/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_gpu_test.go.tmpl b/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_gpu_test.go.tmpl index 74294cc7ed66..570f8122f460 100644 --- a/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_gpu_test.go.tmpl +++ b/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_gpu_test.go.tmpl @@ -45,7 +45,7 @@ resource "google_notebooks_instance" "test" { } vm_image { project = "deeplearning-platform-release" - image_family = "tf-latest-gpu" + image_family = "pytorch-latest-cu124" } install_gpu_driver = true accelerator_config { diff --git a/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_state_test.go.tmpl b/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_state_test.go.tmpl index e50bf5b9d2c2..18167fb4b4b9 100644 --- a/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_state_test.go.tmpl +++ b/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_state_test.go.tmpl @@ -60,7 +60,7 @@ resource "google_notebooks_instance" "test" { machine_type = "e2-medium" vm_image { project = "deeplearning-platform-release" - image_family = "tf-latest-cpu" + image_family = "pytorch-latest-cu124" } desired_state = "ACTIVE" } @@ -75,7 +75,7 @@ resource "google_notebooks_instance" "test" { machine_type = "e2-medium" vm_image { project = "deeplearning-platform-release" - image_family = "tf-latest-cpu" + image_family = "pytorch-latest-cu124" } desired_state = "STOPPED" } diff --git a/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_test.go b/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_test.go index b62361413ffd..fcac24848d0d 100644 --- a/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_test.go +++ b/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_test.go @@ -92,7 +92,7 @@ resource "google_notebooks_instance" "test" { vm_image { project = "deeplearning-platform-release" - image_family = "tf-latest-cpu" + image_family = "pytorch-latest-cu124" } } `, name) @@ -107,7 +107,7 @@ resource "google_notebooks_instance" "instance" { vm_image { project = "deeplearning-platform-release" - image_family = "tf-latest-cpu" + image_family = "pytorch-latest-cu124" } metadata = { @@ -133,7 +133,7 @@ resource "google_notebooks_instance" "instance" { vm_image { project = "deeplearning-platform-release" - image_family = "tf-latest-cpu" + image_family = "pytorch-latest-cu124" } metadata = { From fd94584767546dae798446f9aeebc6e340224f8f Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Mon, 23 Jun 2025 12:48:05 -0700 Subject: [PATCH 407/884] tgc-revival: replace PROJECT_ID with PROJECT_NUMBER in asset names (#14275) --- .../terraform/acctest/tgc_utils.go | 37 ++++++++++++++++--- 1 file changed, 32 insertions(+), 5 deletions(-) diff --git a/mmv1/third_party/terraform/acctest/tgc_utils.go b/mmv1/third_party/terraform/acctest/tgc_utils.go index 0f2f20e60ac5..f1d0d4e1c92a 100644 --- a/mmv1/third_party/terraform/acctest/tgc_utils.go +++ b/mmv1/third_party/terraform/acctest/tgc_utils.go @@ -11,6 +11,7 @@ import ( "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/terraform" + "github.com/hashicorp/terraform-provider-google/google/envvar" ) type ResourceMetadata struct { @@ -33,11 +34,29 @@ type TgcMetadataPayload struct { PrimaryResource string `json:"primary_resource"` } -// Hardcode the Terraform resource name -> API service name mapping temporarily. -// TODO: [tgc] read the mapping from the resource metadata files. -var ApiServiceNames = map[string]string{ - "google_compute_instance": "compute.googleapis.com", - "google_project": "cloudresourcemanager.googleapis.com", +// PROJECT_NUMBER instead of PROJECT_ID is in the CAI asset names for the resources in those services +// https://cloud.google.com/asset-inventory/docs/asset-names +var serviceWithProjectNumber = map[string]struct{}{ + "apikeys": {}, // DCL + "binaryauthorization": {}, + "cloudtasks": {}, + "cloudbuild": {}, + "colab": {}, + "containerattached": {}, + "containeraws": {}, + "containerazure": {}, + "dialogflowcx": {}, + "discoveryengine": {}, + "documentai": {}, + "healthcare": {}, + "iap": {}, + "identityplatform": {}, + "logging": {}, + "monitoring": {}, + "osconfig": {}, + "secretmanager": {}, + "secretmanagerregional": {}, + "vpcaccess": {}, } // encodeToBase64JSON converts a struct to base64-encoded JSON @@ -53,6 +72,9 @@ func encodeToBase64JSON(data interface{}) (string, error) { // CollectAllTgcMetadata collects metadata for all resources in a test step func CollectAllTgcMetadata(tgcPayload TgcMetadataPayload) resource.TestCheckFunc { return func(s *terraform.State) error { + projectId := envvar.GetTestProjectFromEnv() + projectNumber := envvar.GetTestProjectNumberFromEnv() + // Process each resource to get CAI asset names and resolve auto IDs for address, metadata := range tgcPayload.ResourceMetadata { // If there is import metadata update our primary resource @@ -79,6 +101,11 @@ func CollectAllTgcMetadata(tgcPayload TgcMetadataPayload) resource.TestCheckFunc default: rName = rState.Primary.ID } + + if _, ok := serviceWithProjectNumber[metadata.Service]; ok { + rName = strings.Replace(rName, projectId, projectNumber, 1) + } + metadata.CaiAssetNames = []string{fmt.Sprintf("//%s/%s", apiServiceName, rName)} } From 37e5a55ae5f0b13c80e5f48377d08daca5cff50a Mon Sep 17 00:00:00 2001 From: Keith Jordy <6444028+kjordy@users.noreply.github.com> Date: Mon, 23 Jun 2025 15:55:06 -0400 Subject: [PATCH 408/884] Make description and displayName optional and mutable (#14328) Co-authored-by: google-labs-jules[bot] <161369871+google-labs-jules[bot]@users.noreply.github.com> Co-authored-by: Cameron Thornton --- .../products/monitoring/MetricDescriptor.yaml | 5 --- ...ource_monitoring_metric_descriptor_test.go | 42 ++++++++++++++++--- 2 files changed, 36 insertions(+), 11 deletions(-) diff --git a/mmv1/products/monitoring/MetricDescriptor.yaml b/mmv1/products/monitoring/MetricDescriptor.yaml index 1fd268827b59..7294d5b177b5 100644 --- a/mmv1/products/monitoring/MetricDescriptor.yaml +++ b/mmv1/products/monitoring/MetricDescriptor.yaml @@ -156,20 +156,15 @@ properties: The supported units are a subset of The Unified Code for Units of Measure standard. More info can be found in the API documentation (https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.metricDescriptors). - immutable: true - name: 'description' type: String description: A detailed description of the metric, which can be used in documentation. - required: true - immutable: true - name: 'displayName' type: String description: A concise name for the metric, which can be displayed in user interfaces. Use sentence case without an ending period, for example "Request count". - required: true - immutable: true - name: 'metadata' type: NestedObject description: Metadata which can be used to guide usage of the metric. diff --git a/mmv1/third_party/terraform/services/monitoring/resource_monitoring_metric_descriptor_test.go b/mmv1/third_party/terraform/services/monitoring/resource_monitoring_metric_descriptor_test.go index 530ceab09f12..55f0f94b5c0b 100644 --- a/mmv1/third_party/terraform/services/monitoring/resource_monitoring_metric_descriptor_test.go +++ b/mmv1/third_party/terraform/services/monitoring/resource_monitoring_metric_descriptor_test.go @@ -17,7 +17,7 @@ func TestAccMonitoringMetricDescriptor_update(t *testing.T) { CheckDestroy: testAccCheckMonitoringMetricDescriptorDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccMonitoringMetricDescriptor_update("30s", "30s"), + Config: testAccMonitoringMetricDescriptor_update("initial description", "initial display name", "30s", "30s"), }, { ResourceName: "google_monitoring_metric_descriptor.basic", @@ -26,7 +26,7 @@ func TestAccMonitoringMetricDescriptor_update(t *testing.T) { ImportStateVerifyIgnore: []string{"metadata", "launch_stage"}, }, { - Config: testAccMonitoringMetricDescriptor_update("60s", "60s"), + Config: testAccMonitoringMetricDescriptor_update("updated description", "updated display name", "60s", "60s"), }, { ResourceName: "google_monitoring_metric_descriptor.basic", @@ -34,15 +34,24 @@ func TestAccMonitoringMetricDescriptor_update(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"metadata", "launch_stage"}, }, + { + Config: testAccMonitoringMetricDescriptor_omittedFields(), + }, + { + ResourceName: "google_monitoring_metric_descriptor.basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"metadata", "launch_stage", "description", "display_name"}, + }, }, }) } -func testAccMonitoringMetricDescriptor_update(samplePeriod, ingestDelay string) string { +func testAccMonitoringMetricDescriptor_update(description, displayName, samplePeriod, ingestDelay string) string { return fmt.Sprintf(` resource "google_monitoring_metric_descriptor" "basic" { - description = "Daily sales records from all branch stores." - display_name = "daily sales" + description = "%s" + display_name = "%s" type = "custom.googleapis.com/stores/daily_sales" metric_kind = "GAUGE" value_type = "DOUBLE" @@ -58,6 +67,27 @@ resource "google_monitoring_metric_descriptor" "basic" { ingest_delay = "%s" } } -`, samplePeriod, ingestDelay, +`, description, displayName, samplePeriod, ingestDelay, ) } + +func testAccMonitoringMetricDescriptor_omittedFields() string { + return ` +resource "google_monitoring_metric_descriptor" "basic" { + type = "custom.googleapis.com/stores/daily_sales" + metric_kind = "GAUGE" + value_type = "DOUBLE" + unit = "{USD}" + labels { + key = "key" + value_type = "STRING" + description = "description" + } + launch_stage = "BETA" + metadata { + sample_period = "30s" + ingest_delay = "30s" + } +} +` +} From 4646bd4c578b2b861da8475e83ab0c264a834004 Mon Sep 17 00:00:00 2001 From: Yanwei Guo Date: Mon, 23 Jun 2025 13:52:54 -0700 Subject: [PATCH 409/884] Add support for GPU fields in Cloud Run v2 Job (#14305) --- mmv1/products/cloudrunv2/Job.yaml | 20 ++++- .../examples/cloudrunv2_job_gpu.tf.tmpl | 17 ++++ .../resource_cloud_run_v2_job_test.go.tmpl | 86 +++++++++++++++++-- 3 files changed, 114 insertions(+), 9 deletions(-) create mode 100644 mmv1/templates/terraform/examples/cloudrunv2_job_gpu.tf.tmpl diff --git a/mmv1/products/cloudrunv2/Job.yaml b/mmv1/products/cloudrunv2/Job.yaml index 1aa2bcaa3a7a..c86b38b70eb2 100644 --- a/mmv1/products/cloudrunv2/Job.yaml +++ b/mmv1/products/cloudrunv2/Job.yaml @@ -125,6 +125,14 @@ examples: cloud_run_job_name: 'cloudrun-job' ignore_read_extra: - 'deletion_protection' + - name: 'cloudrunv2_job_gpu' + min_version: 'beta' + primary_resource_id: 'default' + primary_resource_name: 'fmt.Sprintf("tf-test-cloudrun-job%s", context["random_suffix"])' + vars: + cloud_run_job_name: 'cloudrun-job' + ignore_read_extra: + - 'deletion_protection' virtual_fields: - name: 'deletion_protection' description: | @@ -397,7 +405,7 @@ properties: - name: 'limits' type: KeyValuePairs description: |- - Only memory and CPU are supported. Use key `cpu` for CPU limit and `memory` for memory limit. Note: The only supported values for CPU are '1', '2', '4', and '8'. Setting 4 CPU requires at least 2Gi of memory. The values of the map is string form of the 'quantity' k8s type: https://github.com/kubernetes/kubernetes/blob/master/staging/src/k8s.io/apimachinery/pkg/api/resource/quantity.go + Only memory, CPU, and nvidia.com/gpu are supported. Use key `cpu` for CPU limit, `memory` for memory limit, `nvidia.com/gpu` for gpu limit. Note: The only supported values for CPU are '1', '2', '4', and '8'. Setting 4 CPU requires at least 2Gi of memory. The values of the map is string form of the 'quantity' k8s type: https://github.com/kubernetes/kubernetes/blob/master/staging/src/k8s.io/apimachinery/pkg/api/resource/quantity.go default_from_api: true - name: 'ports' type: Array @@ -765,6 +773,16 @@ properties: Number of retries allowed per Task, before marking this Task failed. Defaults to 3. Minimum value is 0. send_empty_value: true default_value: 3 + - name: 'nodeSelector' + min_version: beta + type: NestedObject + description: Node Selector describes the hardware requirements of the resources. + properties: + - name: 'accelerator' + type: String + description: + The GPU to attach to an instance. See https://cloud.google.com/run/docs/configuring/jobs/gpu for configuring GPU. + required: true - name: 'observedGeneration' type: String description: | diff --git a/mmv1/templates/terraform/examples/cloudrunv2_job_gpu.tf.tmpl b/mmv1/templates/terraform/examples/cloudrunv2_job_gpu.tf.tmpl new file mode 100644 index 000000000000..4961d934d95b --- /dev/null +++ b/mmv1/templates/terraform/examples/cloudrunv2_job_gpu.tf.tmpl @@ -0,0 +1,17 @@ +resource "google_cloud_run_v2_job" "{{$.PrimaryResourceId}}" { + provider = google-beta + name = "{{index $.Vars "cloud_run_job_name"}}" + location = "us-central1" + deletion_protection = false + launch_stage = "BETA" + template { + template { + containers { + image = "us-docker.pkg.dev/cloudrun/container/job" + } + node_selector { + accelerator = "nvidia-l4" + } + } + } +} diff --git a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl index 7fcf28a25b9e..0cdd6861aa33 100644 --- a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl @@ -211,7 +211,7 @@ resource "google_compute_network" "custom_test" { func TestAccCloudRunV2Job_cloudrunv2JobWithDirectVPCUpdate(t *testing.T) { t.Parallel() - jobName := fmt.Sprintf("tf-test-cloudrun-service%s", acctest.RandString(t, 10)) + jobName := fmt.Sprintf("tf-test-cloudrun-job%s", acctest.RandString(t, 10)) context := map[string]interface{}{ "job_name": jobName, "project": envvar.GetTestProjectFromEnv(), @@ -312,7 +312,7 @@ func TestAccCloudRunV2Job_cloudrunv2JobWithGcsUpdate(t *testing.T) { acctest.SkipIfVcr(t) t.Parallel() - jobName := fmt.Sprintf("tf-test-cloudrun-service%s", acctest.RandString(t, 10)) + jobName := fmt.Sprintf("tf-test-cloudrun-job%s", acctest.RandString(t, 10)) context := map[string]interface{}{ "job_name": jobName, } @@ -323,7 +323,7 @@ func TestAccCloudRunV2Job_cloudrunv2JobWithGcsUpdate(t *testing.T) { CheckDestroy: testAccCheckCloudRunV2JobDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccCloudRunV2Job_cloudrunv2JobWithNoVolume(context), + Config: testAccCloudRunV2Job_cloudrunv2BasicJob(context), }, { ResourceName: "google_cloud_run_v2_job.default", @@ -344,7 +344,7 @@ func TestAccCloudRunV2Job_cloudrunv2JobWithGcsUpdate(t *testing.T) { }) } -func testAccCloudRunV2Job_cloudrunv2JobWithNoVolume(context map[string]interface{}) string { +func testAccCloudRunV2Job_cloudrunv2BasicJob(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_cloud_run_v2_job" "default" { name = "%{job_name}" @@ -407,7 +407,7 @@ func TestAccCloudRunV2Job_cloudrunv2JobWithNfsUpdate(t *testing.T) { acctest.SkipIfVcr(t) t.Parallel() - jobName := fmt.Sprintf("tf-test-cloudrun-service%s", acctest.RandString(t, 10)) + jobName := fmt.Sprintf("tf-test-cloudrun-job%s", acctest.RandString(t, 10)) context := map[string]interface{}{ "job_name": jobName, } @@ -418,7 +418,7 @@ func TestAccCloudRunV2Job_cloudrunv2JobWithNfsUpdate(t *testing.T) { CheckDestroy: testAccCheckCloudRunV2JobDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccCloudRunV2Job_cloudrunv2JobWithNoVolume(context), + Config: testAccCloudRunV2Job_cloudrunv2BasicJob(context), }, { ResourceName: "google_cloud_run_v2_job.default", @@ -886,7 +886,7 @@ resource "google_cloud_run_v2_job" "default" { func TestAccCloudRunV2Job_cloudrunv2JobWithStartExecutionTokenUpdate(t *testing.T) { t.Parallel() - jobName := fmt.Sprintf("tf-test-cloudrun-service%s", acctest.RandString(t, 10)) + jobName := fmt.Sprintf("tf-test-cloudrun-job%s", acctest.RandString(t, 10)) context1 := map[string]interface{}{ "job_name": jobName, "token": "token1", @@ -944,7 +944,7 @@ func testAccCloudRunV2Job_cloudrunv2JobWithStartExecutionToken(context map[strin func TestAccCloudRunV2Job_cloudrunv2JobWithRunExecutionTokenUpdate(t *testing.T) { t.Parallel() - jobName := fmt.Sprintf("tf-test-cloudrun-service%s", acctest.RandString(t, 10)) + jobName := fmt.Sprintf("tf-test-cloudrun-job%s", acctest.RandString(t, 10)) context1 := map[string]interface{}{ "job_name": jobName, "token": "token1", @@ -998,4 +998,74 @@ func testAccCloudRunV2Job_cloudrunv2JobWithRunExecutionToken(context map[string] } `, context) } + +func TestAccCloudRunV2Job_cloudrunv2JobWithGpuUpdate(t *testing.T) { + acctest.SkipIfVcr(t) + t.Parallel() + + jobName := fmt.Sprintf("tf-test-cloudrun-job-gpu%s", acctest.RandString(t, 10)) + context := map[string]interface{}{ + "job_name": jobName, + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckCloudRunV2JobDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccCloudRunV2Job_cloudrunv2BasicJob(context), + }, + { + ResourceName: "google_cloud_run_v2_job.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "launch_stage", "deletion_protection"}, + }, + { + Config: testAccCloudRunV2Job_cloudrunv2JobWithGpu(context), + }, + { + ResourceName: "google_cloud_run_v2_job.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "launch_stage", "deletion_protection"}, + }, + }, + }) +} + +func testAccCloudRunV2Job_cloudrunv2JobWithGpu(context map[string]interface{}) string { + return acctest.Nprintf(` + resource "google_cloud_run_v2_job" "default" { + provider = google-beta + name = "%{job_name}" + location = "us-central1" + launch_stage = "BETA" + deletion_protection = false + template { + template { + containers { + image = "us-docker.pkg.dev/cloudrun/container/job" + resources { + limits = { + "cpu" = "4" + "memory" = "16Gi" + "nvidia.com/gpu" = "1" + } + } + } + node_selector { + accelerator = "nvidia-l4" + } + } + } + lifecycle { + ignore_changes = [ + launch_stage, + ] + } + } +`, context) +} {{- end }} From 3cc602c04473a48312f73fb197fad9f534491a7b Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Mon, 23 Jun 2025 14:37:18 -0700 Subject: [PATCH 410/884] tgc-revival: support compute tgc autoscaler (#14330) --- mmv1/api/type.go | 14 +++++++++++ mmv1/products/compute/Autoscaler.yaml | 4 +++ .../cai2hcl/resource_converter.go.tmpl | 2 ++ .../compute_auto_scaler_zone.go.tmpl | 22 ++++++++++++++++ .../custom_expand/compute_full_url.go.tmpl | 25 +++++++++++++++++++ .../compute_auto_scaler.go.tmpl | 3 +++ .../tfplan2cai/resource_converter.go.tmpl | 4 +++ .../tgc_next/test/assert_test_files.go | 2 +- 8 files changed, 75 insertions(+), 1 deletion(-) create mode 100644 mmv1/templates/tgc_next/custom_expand/compute_auto_scaler_zone.go.tmpl create mode 100644 mmv1/templates/tgc_next/custom_expand/compute_full_url.go.tmpl create mode 100644 mmv1/templates/tgc_next/custom_flatten/compute_auto_scaler.go.tmpl diff --git a/mmv1/api/type.go b/mmv1/api/type.go index 66ac90255b17..20377ebe56c3 100644 --- a/mmv1/api/type.go +++ b/mmv1/api/type.go @@ -296,6 +296,20 @@ type Type struct { // The field is not present in CAI asset IsMissingInCai bool `yaml:"is_missing_in_cai,omitempty"` + + // A custom expander replaces the default expander for an attribute. + // It is called as part of tfplan2cai conversion if + // object.input is false. It can return an object of any type, + // so the function header *is* part of the custom code template. + // As with flatten, `property` and `prefix` are available. + CustomTgcExpand string `yaml:"custom_tgc_expand,omitempty"` + + // A custom flattener replaces the default flattener for an attribute. + // It is called as part of cai2hcl conversion. It can return an object of any type, + // so the function header *is* a part of the custom code template. To help with + // creating the function header, `property` and `prefix` are available, + // just as they are in the standard flattener template. + CustomTgcFlatten string `yaml:"custom_tgc_flatten,omitempty"` } const MAX_NAME = 20 diff --git a/mmv1/products/compute/Autoscaler.yaml b/mmv1/products/compute/Autoscaler.yaml index c96c55e04624..de86488d98d7 100644 --- a/mmv1/products/compute/Autoscaler.yaml +++ b/mmv1/products/compute/Autoscaler.yaml @@ -42,6 +42,7 @@ async: result: resource_inside_response: false collection_url_key: 'items' +include_in_tgc_next_DO_NOT_USE: true custom_code: sweeper: url_substitutions: @@ -83,6 +84,8 @@ parameters: ignore_read: true default_from_api: true custom_expand: 'templates/terraform/custom_expand/resourceref_with_validation.go.tmpl' + custom_tgc_flatten: 'templates/tgc_next/custom_flatten/compute_auto_scaler.go.tmpl' + custom_tgc_expand: 'templates/tgc_next/custom_expand/compute_auto_scaler_zone.go.tmpl' resource: 'Zone' imports: 'name' properties: @@ -423,5 +426,6 @@ properties: URL of the managed instance group that this autoscaler will scale. required: true custom_expand: 'templates/terraform/custom_expand/compute_full_url.tmpl' + custom_tgc_expand: 'templates/tgc_next/custom_expand/compute_full_url.go.tmpl' resource: 'InstanceGroupManager' imports: 'selfLink' diff --git a/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl b/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl index fe909e8b6064..49e7ed3a9729 100644 --- a/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl +++ b/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl @@ -130,6 +130,8 @@ func (c *{{ $.ResourceName -}}Converter) convertResourceData(asset caiasset.Asse func flatten{{$prop.GetPrefix}}{{$prop.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { return utils.RemoveTerraformAttributionLabel(v) } + {{ else if $prop.CustomTgcFlatten }} + {{- $prop.CustomTemplate $prop.CustomTgcFlatten false -}} {{ else }} {{ template "flattenPropertyMethod" $prop -}} {{- end }} diff --git a/mmv1/templates/tgc_next/custom_expand/compute_auto_scaler_zone.go.tmpl b/mmv1/templates/tgc_next/custom_expand/compute_auto_scaler_zone.go.tmpl new file mode 100644 index 000000000000..49c55d8245ae --- /dev/null +++ b/mmv1/templates/tgc_next/custom_expand/compute_auto_scaler_zone.go.tmpl @@ -0,0 +1,22 @@ +{{/* + The license inside this block applies to this file + Copyright 2025 Google Inc. + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ -}} +func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + f, err := tpgresource.ParseProjectFieldValue("{{$.ResourceType}}", v.(string), "project", d, config, true) + if err != nil { + return nil, fmt.Errorf("Invalid value for {{underscore $.Name}}: %s", err) + } + + url := tgcresource.GetComputeSelfLink(config, f.RelativeLink()) + + return url, nil +} diff --git a/mmv1/templates/tgc_next/custom_expand/compute_full_url.go.tmpl b/mmv1/templates/tgc_next/custom_expand/compute_full_url.go.tmpl new file mode 100644 index 000000000000..428878caa29b --- /dev/null +++ b/mmv1/templates/tgc_next/custom_expand/compute_full_url.go.tmpl @@ -0,0 +1,25 @@ +{{/* + The license inside this block applies to this file + Copyright 2025 Google Inc. + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ -}} +func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + if v == nil || v.(string) == "" { + return "", nil + } + f, err := {{ template "expandResourceRef" dict "VarName" "v.(string)" "ResourceRef" $.ResourceRef "ResourceType" $.ResourceType}} + if err != nil { + return nil, fmt.Errorf("Invalid value for {{underscore $.Name}}: %s", err) + } + + url := tgcresource.GetComputeSelfLink(config, f.RelativeLink()) + + return url, nil +} diff --git a/mmv1/templates/tgc_next/custom_flatten/compute_auto_scaler.go.tmpl b/mmv1/templates/tgc_next/custom_flatten/compute_auto_scaler.go.tmpl new file mode 100644 index 000000000000..cec3af0e9ccb --- /dev/null +++ b/mmv1/templates/tgc_next/custom_flatten/compute_auto_scaler.go.tmpl @@ -0,0 +1,3 @@ +func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} \ No newline at end of file diff --git a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl index e8c2565b275d..fed089050f72 100644 --- a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl +++ b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl @@ -162,5 +162,9 @@ func resource{{ $.ResourceName -}}TgcEncoder(d tpgresource.TerraformResourceData {{- end}} {{ range $prop := $.SettableProperties }} + {{ if $prop.CustomTgcExpand }} + {{- $prop.CustomTemplate $prop.CustomTgcExpand false -}} + {{ else }} {{- template "expandPropertyMethod" $prop -}} + {{ end }} {{- end}} \ No newline at end of file diff --git a/mmv1/third_party/tgc_next/test/assert_test_files.go b/mmv1/third_party/tgc_next/test/assert_test_files.go index 770e2be6b0e2..f2e81cda2928 100644 --- a/mmv1/third_party/tgc_next/test/assert_test_files.go +++ b/mmv1/third_party/tgc_next/test/assert_test_files.go @@ -117,7 +117,7 @@ func testSingleResource(t *testing.T, testName string, testData ResourceTestData // exportTfFile := fmt.Sprintf("%s_export.tf", t.Name()) // err = os.WriteFile(exportTfFile, exportConfigData, 0644) // if err != nil { - // return fmt.Errorf("error writing file", exportTfFile) + // return fmt.Errorf("error writing file %s", exportTfFile) // } // defer os.Remove(exportTfFile) From 5a117c3fb9c4c3d595b9f9e5e8f4061c1105f7fe Mon Sep 17 00:00:00 2001 From: NA2047 <12290725+NA2047@users.noreply.github.com> Date: Mon, 23 Jun 2025 15:31:24 -0700 Subject: [PATCH 411/884] Adding effective_reserved_ip_range and fix for null value in redis datasource (#14289) --- mmv1/products/redis/Instance.yaml | 10 ++++++++++ .../redis/data_source_redis_instance.go | 17 +++++++++++++++++ .../redis/data_source_redis_instance_test.go | 1 + 3 files changed, 28 insertions(+) diff --git a/mmv1/products/redis/Instance.yaml b/mmv1/products/redis/Instance.yaml index 419ae6c60bb7..59166df38db8 100644 --- a/mmv1/products/redis/Instance.yaml +++ b/mmv1/products/redis/Instance.yaml @@ -445,6 +445,16 @@ properties: # but will be a subset of the range. ignore_read: true default_from_api: true + - name: 'effectiveReservedIpRange' + type: String + description: | + The CIDR range of internal addresses that are reserved for this + instance. If not provided, the service will choose an unused /29 + block, for example, 10.0.0.0/29 or 192.168.0.0/29. Ranges must be + unique and non-overlapping with existing subnets in an authorized + network. + output: true + api_name: reservedIpRange - name: 'tier' type: Enum description: | diff --git a/mmv1/third_party/terraform/services/redis/data_source_redis_instance.go b/mmv1/third_party/terraform/services/redis/data_source_redis_instance.go index 1d16a177a860..cd96ec0f141d 100644 --- a/mmv1/third_party/terraform/services/redis/data_source_redis_instance.go +++ b/mmv1/third_party/terraform/services/redis/data_source_redis_instance.go @@ -39,9 +39,26 @@ func dataSourceGoogleRedisInstanceRead(d *schema.ResourceData, meta interface{}) if err := tpgresource.SetDataSourceLabels(d); err != nil { return err } + // added to resolve a null value for reserved_ip_range. This was not getting populated due to the addtion of ignore_read + if err := SetDataSourceReservedIpRange(d); err != nil { + return err + } if d.Id() == "" { return fmt.Errorf("%s not found", id) } return nil } + +func SetDataSourceReservedIpRange(d *schema.ResourceData) error { + effectiveReservedIpRange := d.Get("effective_reserved_ip_range") + if effectiveReservedIpRange == nil { + return nil + } + + if err := d.Set("reserved_ip_range", effectiveReservedIpRange); err != nil { + return fmt.Errorf("Error setting reserved_ip_range in data source: %s", err) + } + + return nil +} diff --git a/mmv1/third_party/terraform/services/redis/data_source_redis_instance_test.go b/mmv1/third_party/terraform/services/redis/data_source_redis_instance_test.go index e1486ada8327..e50eed765216 100644 --- a/mmv1/third_party/terraform/services/redis/data_source_redis_instance_test.go +++ b/mmv1/third_party/terraform/services/redis/data_source_redis_instance_test.go @@ -19,6 +19,7 @@ func TestAccRedisInstanceDatasource_basic(t *testing.T) { Config: testAccRedisInstanceDatasourceConfig(acctest.RandString(t, 10)), Check: resource.ComposeTestCheckFunc( acctest.CheckDataSourceStateMatchesResourceState("data.google_redis_instance.redis", "google_redis_instance.redis"), + resource.TestCheckResourceAttrSet("data.google_redis_instance.redis", "reserved_ip_range"), ), }, }, From 859325913904352450ed79a49269c1fb8dd8e9ba Mon Sep 17 00:00:00 2001 From: coder-221 <185867912+coder-221@users.noreply.github.com> Date: Mon, 23 Jun 2025 16:23:07 -0700 Subject: [PATCH 412/884] Update Access Context Manager Test for user types (#14311) --- ..._context_manager_access_level_test.go.tmpl | 41 ++++++++++++++----- 1 file changed, 31 insertions(+), 10 deletions(-) diff --git a/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_access_level_test.go.tmpl b/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_access_level_test.go.tmpl index f2d5dd34bec7..5a086b792c93 100644 --- a/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_access_level_test.go.tmpl +++ b/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_access_level_test.go.tmpl @@ -46,7 +46,13 @@ func testAccAccessContextManagerAccessLevel_basicTest(t *testing.T) { } func testAccAccessContextManagerAccessLevel_fullTest(t *testing.T) { - org := envvar.GetTestOrgFromEnv(t) + context := map[string]interface{}{ + "org_id": envvar.GetTestOrgFromEnv(t), + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + "random_suffix": acctest.RandString(t, 10), + "policy_title": "my policy", + "level_title_name": "level", + } acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -54,7 +60,7 @@ func testAccAccessContextManagerAccessLevel_fullTest(t *testing.T) { CheckDestroy: testAccCheckAccessContextManagerAccessLevelDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccAccessContextManagerAccessLevel_full(org, "my policy", "level"), + Config: testAccAccessContextManagerAccessLevel_full(context), }, { ResourceName: "google_access_context_manager_access_level.test-access", @@ -202,23 +208,38 @@ resource "google_access_context_manager_access_level" "test-access" { `, org, policyTitle, levelTitleName, levelTitleName) } -func testAccAccessContextManagerAccessLevel_full(org, policyTitle, levelTitleName string) string { - return fmt.Sprintf(` +func testAccAccessContextManagerAccessLevel_full(context map[string]interface{}) string { + + return acctest.Nprintf(` resource "google_access_context_manager_access_policy" "test-access" { - parent = "organizations/%s" - title = "%s" + parent = "organizations/%{org_id}" + title = "%{policy_title}" +} + +resource "google_project" "project" { + project_id = "tf-test%{random_suffix}" + name = "tf-test%{random_suffix}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" + deletion_policy = "DELETE" +} + +resource "google_service_account" "test-account" { + account_id = "tf-test-account%{random_suffix}" + display_name = "Test Service Account" + project = google_project.project.project_id } resource "google_access_context_manager_access_level" "test-access" { parent = "accessPolicies/${google_access_context_manager_access_policy.test-access.name}" - name = "accessPolicies/${google_access_context_manager_access_policy.test-access.name}/accessLevels/%s" - title = "%s" + name = "accessPolicies/${google_access_context_manager_access_policy.test-access.name}/accessLevels/%{level_title_name}" + title = "%{level_title_name}" description = "hello" basic { combining_function = "AND" conditions { ip_subnetworks = ["192.0.4.0/24"] - members = ["user:test@google.com", "user:test2@google.com"] + members = ["serviceAccount:${google_service_account.test-account.email}"] negate = false device_policy { require_screen_lock = false @@ -236,5 +257,5 @@ resource "google_access_context_manager_access_level" "test-access" { } } } -`, org, policyTitle, levelTitleName, levelTitleName) +`, context) } From 5eb146af40feb1b9446018e7158aeaf7d237d535 Mon Sep 17 00:00:00 2001 From: Naga Bodepudi Date: Mon, 23 Jun 2025 19:39:47 -0400 Subject: [PATCH 413/884] Fix dns_record_set with failover example (#14310) --- .../terraform/website/docs/r/dns_record_set.html.markdown | 1 + 1 file changed, 1 insertion(+) diff --git a/mmv1/third_party/terraform/website/docs/r/dns_record_set.html.markdown b/mmv1/third_party/terraform/website/docs/r/dns_record_set.html.markdown index 0f3a73923593..25d84e9f5523 100644 --- a/mmv1/third_party/terraform/website/docs/r/dns_record_set.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/dns_record_set.html.markdown @@ -218,6 +218,7 @@ resource "google_dns_record_set" "a" { resource "google_dns_managed_zone" "prod" { name = "prod-zone" dns_name = "prod.mydomain.com." + visibility = "private" } resource "google_compute_forwarding_rule" "prod" { From 6a43e888929ae354606c87088bac0a6584c95923 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Tue, 24 Jun 2025 10:57:04 -0700 Subject: [PATCH 414/884] Modify the primary resource name for disk_async test (#14347) --- mmv1/products/compute/Disk.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/products/compute/Disk.yaml b/mmv1/products/compute/Disk.yaml index ffc2ae3a61fb..9a622697675b 100644 --- a/mmv1/products/compute/Disk.yaml +++ b/mmv1/products/compute/Disk.yaml @@ -73,7 +73,7 @@ examples: vars: disk_name: 'test-disk' - name: 'disk_async' - primary_resource_id: 'primary' + primary_resource_id: 'secondary' primary_resource_name: 'fmt.Sprintf("tf-test-test-disk%s", context["random_suffix"])' vars: disk_name: 'async-test-disk' From dbbc22cce76ad82b9eb5f734c1ed7a00a6afee34 Mon Sep 17 00:00:00 2001 From: Yuval Brik Date: Tue, 24 Jun 2025 21:07:09 +0300 Subject: [PATCH 415/884] Add Network Firewall Policy "Policy Type" field (#14249) --- .../compute/NetworkFirewallPolicy.yaml | 11 ++++++++++ .../NetworkFirewallPolicyWithRules.yaml | 11 ++++++++++ .../compute/RegionNetworkFirewallPolicy.yaml | 17 ++++++++++++++ .../RegionNetworkFirewallPolicyWithRules.yaml | 17 ++++++++++++++ ...rk_firewall_policy_with_rules_roce.tf.tmpl | 22 +++++++++++++++++++ ...egion_network_firewall_policy_roce.tf.tmpl | 6 +++++ 6 files changed, 84 insertions(+) create mode 100644 mmv1/templates/terraform/examples/compute_region_network_firewall_policy_with_rules_roce.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/region_network_firewall_policy_roce.tf.tmpl diff --git a/mmv1/products/compute/NetworkFirewallPolicy.yaml b/mmv1/products/compute/NetworkFirewallPolicy.yaml index e003f212f534..7d31a4affc07 100644 --- a/mmv1/products/compute/NetworkFirewallPolicy.yaml +++ b/mmv1/products/compute/NetworkFirewallPolicy.yaml @@ -57,6 +57,17 @@ properties: - name: 'description' type: String description: An optional description of this resource. Provide this property when you create the resource. + - name: 'policyType' + type: Enum + immutable: true + description: | + Policy type is used to determine which resources (networks) the policy can be associated with. + A policy can be associated with a network only if the network has the matching policyType in its network profile. + Different policy types may support some of the Firewall Rules features. + min_version: 'beta' + default_from_api: true + enum_values: + - 'VPC_POLICY' - name: 'fingerprint' type: Fingerprint description: Fingerprint of the resource. This field is used internally during updates of this resource. diff --git a/mmv1/products/compute/NetworkFirewallPolicyWithRules.yaml b/mmv1/products/compute/NetworkFirewallPolicyWithRules.yaml index 48f935b4d065..cc36572932c0 100644 --- a/mmv1/products/compute/NetworkFirewallPolicyWithRules.yaml +++ b/mmv1/products/compute/NetworkFirewallPolicyWithRules.yaml @@ -76,6 +76,17 @@ properties: - name: 'description' type: String description: An optional description of this resource. + - name: 'policyType' + type: Enum + immutable: true + description: | + Policy type is used to determine which resources (networks) the policy can be associated with. + A policy can be associated with a network only if the network has the matching policyType in its network profile. + Different policy types may support some of the Firewall Rules features. + default_from_api: true + min_version: 'beta' + enum_values: + - 'VPC_POLICY' - name: 'rule' type: Array description: A list of firewall policy rules. diff --git a/mmv1/products/compute/RegionNetworkFirewallPolicy.yaml b/mmv1/products/compute/RegionNetworkFirewallPolicy.yaml index 92531c95af69..cc5143b725a2 100644 --- a/mmv1/products/compute/RegionNetworkFirewallPolicy.yaml +++ b/mmv1/products/compute/RegionNetworkFirewallPolicy.yaml @@ -38,6 +38,11 @@ examples: primary_resource_id: 'policy' vars: policy_name: 'tf-test-policy' + - name: 'region_network_firewall_policy_roce' + primary_resource_id: 'policy' + min_version: beta + vars: + policy_name: 'rnf-policy' parameters: - name: 'region' type: String @@ -63,6 +68,18 @@ properties: - name: 'description' type: String description: An optional description of this resource. Provide this property when you create the resource. + - name: 'policyType' + type: Enum + immutable: true + description: | + Policy type is used to determine which resources (networks) the policy can be associated with. + A policy can be associated with a network only if the network has the matching policyType in its network profile. + Different policy types may support some of the Firewall Rules features. + min_version: 'beta' + default_from_api: true + enum_values: + - 'VPC_POLICY' + - 'RDMA_ROCE_POLICY' - name: 'fingerprint' type: Fingerprint description: Fingerprint of the resource. This field is used internally during updates of this resource. diff --git a/mmv1/products/compute/RegionNetworkFirewallPolicyWithRules.yaml b/mmv1/products/compute/RegionNetworkFirewallPolicyWithRules.yaml index 7681f5262886..3d9bd318ac32 100644 --- a/mmv1/products/compute/RegionNetworkFirewallPolicyWithRules.yaml +++ b/mmv1/products/compute/RegionNetworkFirewallPolicyWithRules.yaml @@ -52,6 +52,11 @@ examples: tag_value: 'tag-value' test_env_vars: org_id: 'ORG_ID' + - name: 'compute_region_network_firewall_policy_with_rules_roce' + primary_resource_id: 'policy' + min_version: beta + vars: + policy_name: 'rnf-policy' parameters: - name: 'region' type: String @@ -83,6 +88,18 @@ properties: - name: 'description' type: String description: An optional description of this resource. + - name: 'policyType' + type: Enum + immutable: true + description: | + Policy type is used to determine which resources (networks) the policy can be associated with. + A policy can be associated with a network only if the network has the matching policyType in its network profile. + Different policy types may support some of the Firewall Rules features. + default_from_api: true + min_version: 'beta' + enum_values: + - 'VPC_POLICY' + - 'RDMA_ROCE_POLICY' - name: 'rule' type: Array description: A list of firewall policy rules. diff --git a/mmv1/templates/terraform/examples/compute_region_network_firewall_policy_with_rules_roce.tf.tmpl b/mmv1/templates/terraform/examples/compute_region_network_firewall_policy_with_rules_roce.tf.tmpl new file mode 100644 index 000000000000..d9e3fb8cc5a1 --- /dev/null +++ b/mmv1/templates/terraform/examples/compute_region_network_firewall_policy_with_rules_roce.tf.tmpl @@ -0,0 +1,22 @@ +resource "google_compute_region_network_firewall_policy_with_rules" "{{$.PrimaryResourceId}}" { + provider = google-beta + name = "{{index $.Vars "policy_name"}}" + description = "Terraform test" + policy_type = "RDMA_ROCE_POLICY" + + rule { + description = "deny all rule" + priority = 1000 + enable_logging = true + action = "deny" + direction = "INGRESS" + + match { + src_ip_ranges = ["0.0.0.0/0"] + + layer4_config { + ip_protocol = "all" + } + } + } +} diff --git a/mmv1/templates/terraform/examples/region_network_firewall_policy_roce.tf.tmpl b/mmv1/templates/terraform/examples/region_network_firewall_policy_roce.tf.tmpl new file mode 100644 index 000000000000..ea644f9e7654 --- /dev/null +++ b/mmv1/templates/terraform/examples/region_network_firewall_policy_roce.tf.tmpl @@ -0,0 +1,6 @@ +resource "google_compute_region_network_firewall_policy" "{{$.PrimaryResourceId}}" { + provider = google-beta + name = "{{index $.Vars "policy_name"}}" + description = "Terraform test" + policy_type = "RDMA_ROCE_POLICY" +} From 9c9b349ac2ad9b1b32bbcb4fa332cde058022484 Mon Sep 17 00:00:00 2001 From: paridhishah18 <166548459+paridhishah18@users.noreply.github.com> Date: Tue, 24 Jun 2025 11:40:27 -0700 Subject: [PATCH 416/884] Promote launch stage to BETA (#14294) --- .../cloudrunv2_worker_pool_basic.tf.tmpl | 2 +- ...drunv2_worker_pool_custom_audiences.tf.tmpl | 2 +- .../cloudrunv2_worker_pool_directvpc.tf.tmpl | 2 +- .../cloudrunv2_worker_pool_limits.tf.tmpl | 2 +- .../cloudrunv2_worker_pool_mount_gcs.tf.tmpl | 2 +- .../cloudrunv2_worker_pool_mount_nfs.tf.tmpl | 2 +- ...oudrunv2_worker_pool_multicontainer.tf.tmpl | 3 +-- .../cloudrunv2_worker_pool_secret.tf.tmpl | 2 +- .../cloudrunv2_worker_pool_sql.tf.tmpl | 2 +- ...rce_google_cloud_run_v2_worker_pool_test.go | 2 +- ...ource_cloud_run_v2_worker_pool_test.go.tmpl | 18 +++++++++--------- 11 files changed, 19 insertions(+), 20 deletions(-) diff --git a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_basic.tf.tmpl b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_basic.tf.tmpl index 18183202d8ba..cd4fd00e0c30 100644 --- a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_basic.tf.tmpl @@ -2,7 +2,7 @@ resource "google_cloud_run_v2_worker_pool" "{{$.PrimaryResourceId}}" { name = "{{index $.Vars "cloud_run_worker_pool_name"}}" location = "us-central1" deletion_protection = false - launch_stage = "ALPHA" + launch_stage = "BETA" template { containers { diff --git a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_custom_audiences.tf.tmpl b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_custom_audiences.tf.tmpl index 6c27c3837755..f81cd898e482 100644 --- a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_custom_audiences.tf.tmpl +++ b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_custom_audiences.tf.tmpl @@ -2,7 +2,7 @@ resource "google_cloud_run_v2_worker_pool" "{{$.PrimaryResourceId}}" { name = "{{index $.Vars "cloud_run_worker_pool_name"}}" location = "us-central1" deletion_protection = false - launch_stage = "ALPHA" + launch_stage = "BETA" custom_audiences = ["aud1"] template { diff --git a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_directvpc.tf.tmpl b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_directvpc.tf.tmpl index 722108cda5b0..a54cf24360f2 100644 --- a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_directvpc.tf.tmpl +++ b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_directvpc.tf.tmpl @@ -2,7 +2,7 @@ resource "google_cloud_run_v2_worker_pool" "{{$.PrimaryResourceId}}" { name = "{{index $.Vars "cloud_run_worker_pool_name"}}" location = "us-central1" deletion_protection = false - launch_stage = "ALPHA" + launch_stage = "BETA" template { containers { diff --git a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_limits.tf.tmpl b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_limits.tf.tmpl index 0c23cab900ea..e8a74f73a755 100644 --- a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_limits.tf.tmpl +++ b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_limits.tf.tmpl @@ -2,7 +2,7 @@ resource "google_cloud_run_v2_worker_pool" "{{$.PrimaryResourceId}}" { name = "{{index $.Vars "cloud_run_worker_pool_name"}}" location = "us-central1" deletion_protection = false - launch_stage = "ALPHA" + launch_stage = "BETA" template { containers { diff --git a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_mount_gcs.tf.tmpl b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_mount_gcs.tf.tmpl index 626028614c1a..0ce8f0eee80b 100644 --- a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_mount_gcs.tf.tmpl +++ b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_mount_gcs.tf.tmpl @@ -3,7 +3,7 @@ resource "google_cloud_run_v2_worker_pool" "{{$.PrimaryResourceId}}" { location = "us-central1" deletion_protection = false - launch_stage = "ALPHA" + launch_stage = "BETA" template { containers { diff --git a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_mount_nfs.tf.tmpl b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_mount_nfs.tf.tmpl index 18642072cd13..aca5b4dbeb26 100644 --- a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_mount_nfs.tf.tmpl +++ b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_mount_nfs.tf.tmpl @@ -3,7 +3,7 @@ resource "google_cloud_run_v2_worker_pool" "{{$.PrimaryResourceId}}" { location = "us-central1" deletion_protection = false - launch_stage = "ALPHA" + launch_stage = "BETA" template { containers { diff --git a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_multicontainer.tf.tmpl b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_multicontainer.tf.tmpl index 1d8baaf3d142..049170b11fa8 100644 --- a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_multicontainer.tf.tmpl +++ b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_multicontainer.tf.tmpl @@ -2,13 +2,12 @@ resource "google_cloud_run_v2_worker_pool" "{{$.PrimaryResourceId}}" { name = "{{index $.Vars "cloud_run_worker_pool_name"}}" location = "us-central1" deletion_protection = false - launch_stage = "ALPHA" + launch_stage = "BETA" template { containers { name = "hello-1" image = "us-docker.pkg.dev/cloudrun/container/worker-pool" - depends_on = ["hello-2"] volume_mounts { name = "empty-dir-volume" mount_path = "/mnt" diff --git a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_secret.tf.tmpl b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_secret.tf.tmpl index 0c3e925f0631..17e04d5155e8 100644 --- a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_secret.tf.tmpl +++ b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_secret.tf.tmpl @@ -2,7 +2,7 @@ resource "google_cloud_run_v2_worker_pool" "{{$.PrimaryResourceId}}" { name = "{{index $.Vars "cloud_run_worker_pool_name"}}" location = "us-central1" deletion_protection = false - launch_stage = "ALPHA" + launch_stage = "BETA" template { volumes { diff --git a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_sql.tf.tmpl b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_sql.tf.tmpl index ffdd851e93d4..957b83b7e2b3 100644 --- a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_sql.tf.tmpl +++ b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_sql.tf.tmpl @@ -2,7 +2,7 @@ resource "google_cloud_run_v2_worker_pool" "{{$.PrimaryResourceId}}" { name = "{{index $.Vars "cloud_run_worker_pool_name"}}" location = "us-central1" deletion_protection = false - launch_stage = "ALPHA" + launch_stage = "BETA" template { diff --git a/mmv1/third_party/terraform/services/cloudrunv2/data_source_google_cloud_run_v2_worker_pool_test.go b/mmv1/third_party/terraform/services/cloudrunv2/data_source_google_cloud_run_v2_worker_pool_test.go index a8f0faad67d7..d7d716b59556 100644 --- a/mmv1/third_party/terraform/services/cloudrunv2/data_source_google_cloud_run_v2_worker_pool_test.go +++ b/mmv1/third_party/terraform/services/cloudrunv2/data_source_google_cloud_run_v2_worker_pool_test.go @@ -40,7 +40,7 @@ resource "google_cloud_run_v2_worker_pool" "hello" { name = "%s" location = "%s" deletion_protection = false - launch_stage = "ALPHA" + launch_stage = "BETA" template { containers { diff --git a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_worker_pool_test.go.tmpl b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_worker_pool_test.go.tmpl index 1f57cfc70c5c..9c65929807b8 100644 --- a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_worker_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_worker_pool_test.go.tmpl @@ -50,7 +50,7 @@ resource "google_cloud_run_v2_worker_pool" "default" { name = "tf-test-cloudrun-worker-pool%{random_suffix}" description = "description creating" location = "us-central1" - launch_stage = "ALPHA" + launch_stage = "BETA" annotations = { generated-by = "magic-modules" } @@ -102,7 +102,7 @@ resource "google_cloud_run_v2_worker_pool" "default" { description = "description updating" location = "us-central1" deletion_protection = false - launch_stage = "ALPHA" + launch_stage = "BETA" annotations = { generated-by = "magic-modules-files" @@ -194,7 +194,7 @@ resource "google_cloud_run_v2_worker_pool" "default" { description = "description creating" location = "us-central1" deletion_protection = false - launch_stage = "ALPHA" + launch_stage = "BETA" annotations = { @@ -298,7 +298,7 @@ resource "google_cloud_run_v2_worker_pool" "default" { name = "%{service_name}" location = "us-central1" deletion_protection = false - launch_stage = "ALPHA" + launch_stage = "BETA" template { containers { @@ -321,7 +321,7 @@ resource "google_cloud_run_v2_worker_pool" "default" { name = "%{service_name}" location = "us-central1" deletion_protection = false - launch_stage = "ALPHA" + launch_stage = "BETA" binary_authorization { policy = "projects/%{project}/platforms/cloudRun/policies/my-policy" @@ -389,7 +389,7 @@ resource "google_cloud_run_v2_worker_pool" "default" { name = "%s" location = "us-central1" deletion_protection = false - launch_stage = "ALPHA" + launch_stage = "BETA" template { containers { @@ -407,7 +407,7 @@ resource "google_cloud_run_v2_worker_pool" "default" { location = "us-central1" deletion_protection = false custom_audiences = ["%s"] - launch_stage = "ALPHA" + launch_stage = "BETA" template { containers { @@ -478,7 +478,7 @@ resource "google_cloud_run_v2_worker_pool" "default" { name = "tf-test-cloudrun-worker-pool%{random_suffix}" location = "us-central1" deletion_protection = false - launch_stage = "ALPHA" + launch_stage = "BETA" labels = { @@ -505,7 +505,7 @@ resource "google_cloud_run_v2_worker_pool" "default" { name = "tf-test-cloudrun-worker-pool%{random_suffix}" location = "us-central1" deletion_protection = false - launch_stage = "ALPHA" + launch_stage = "BETA" labels = { From 11209ac3e5d2c8a1ef3e5c0f4414c3a1e48b62af Mon Sep 17 00:00:00 2001 From: Arnav Dham Date: Wed, 25 Jun 2025 00:36:06 +0530 Subject: [PATCH 417/884] Removed Immutability for log_linked_dataset_query_user_email (#14316) --- mmv1/products/bigqueryanalyticshub/DataExchange.yaml | 3 +-- mmv1/products/bigqueryanalyticshub/Listing.yaml | 3 +-- .../resource_bigquery_analytics_hub_listing_test.go | 5 +++++ 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/mmv1/products/bigqueryanalyticshub/DataExchange.yaml b/mmv1/products/bigqueryanalyticshub/DataExchange.yaml index 66aef2bf7760..9617fc835bb1 100644 --- a/mmv1/products/bigqueryanalyticshub/DataExchange.yaml +++ b/mmv1/products/bigqueryanalyticshub/DataExchange.yaml @@ -149,5 +149,4 @@ properties: - name: 'logLinkedDatasetQueryUserEmail' type: Boolean description: - If true, subscriber email logging is enabled and all queries on the linked dataset will log the email address of the querying user. - immutable: true + If true, subscriber email logging is enabled and all queries on the linked dataset will log the email address of the querying user. Once enabled, this setting cannot be turned off. diff --git a/mmv1/products/bigqueryanalyticshub/Listing.yaml b/mmv1/products/bigqueryanalyticshub/Listing.yaml index 5a29b6597626..3c06cce7a17e 100644 --- a/mmv1/products/bigqueryanalyticshub/Listing.yaml +++ b/mmv1/products/bigqueryanalyticshub/Listing.yaml @@ -240,5 +240,4 @@ properties: - name: 'logLinkedDatasetQueryUserEmail' type: Boolean description: - If true, subscriber email logging is enabled and all queries on the linked dataset will log the email address of the querying user. - immutable: true + If true, subscriber email logging is enabled and all queries on the linked dataset will log the email address of the querying user. Once enabled, this setting cannot be turned off. diff --git a/mmv1/third_party/terraform/services/bigqueryanalyticshub/resource_bigquery_analytics_hub_listing_test.go b/mmv1/third_party/terraform/services/bigqueryanalyticshub/resource_bigquery_analytics_hub_listing_test.go index 7f2a686fd797..89eb9d2cffc0 100644 --- a/mmv1/third_party/terraform/services/bigqueryanalyticshub/resource_bigquery_analytics_hub_listing_test.go +++ b/mmv1/third_party/terraform/services/bigqueryanalyticshub/resource_bigquery_analytics_hub_listing_test.go @@ -29,6 +29,10 @@ func TestAccBigqueryAnalyticsHubListing_bigqueryAnalyticshubListingUpdate(t *tes }, { Config: testAccBigqueryAnalyticsHubListing_bigqueryAnalyticshubListingUpdate(context), + Check: resource.ComposeTestCheckFunc( + // Verify log_linked_dataset_query_user_email has been set to true (at top level) + resource.TestCheckResourceAttr("google_bigquery_analytics_hub_listing.listing", "log_linked_dataset_query_user_email", "true"), + ), }, { ResourceName: "google_bigquery_analytics_hub_listing.listing", @@ -69,6 +73,7 @@ resource "google_bigquery_analytics_hub_listing" "listing" { listing_id = "tf_test_my_listing%{random_suffix}" display_name = "tf_test_my_listing%{random_suffix}" description = "example data exchange update%{random_suffix}" + log_linked_dataset_query_user_email = true bigquery_dataset { dataset = google_bigquery_dataset.listing.id From c42b1889a8038f7019a94f82b6b513ff29ac5f7f Mon Sep 17 00:00:00 2001 From: coder-221 <185867912+coder-221@users.noreply.github.com> Date: Tue, 24 Jun 2025 12:19:55 -0700 Subject: [PATCH 418/884] Update service perimeter test for user types (#14325) --- ...ss_context_manager_access_level_condition_test.go | 4 ++-- ...ss_context_manager_service_perimeter_test.go.tmpl | 12 ++++++------ ...ccess_context_manager_services_perimeters_test.go | 8 ++++---- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_access_level_condition_test.go b/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_access_level_condition_test.go index 9c9ec415ebb2..e53b1ae3328a 100644 --- a/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_access_level_condition_test.go +++ b/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_access_level_condition_test.go @@ -25,7 +25,7 @@ func testAccAccessContextManagerAccessLevelCondition_basicTest(t *testing.T) { vpcName := fmt.Sprintf("test-vpc-%s", acctest.RandString(t, 10)) expected := map[string]interface{}{ - "members": []interface{}{"user:test@google.com", "user:test2@google.com", fmt.Sprintf("serviceAccount:%s@%s.iam.gserviceaccount.com", serviceAccountName, project)}, + "members": []interface{}{fmt.Sprintf("serviceAccount:%s@%s.iam.gserviceaccount.com", serviceAccountName, project)}, "devicePolicy": map[string]interface{}{ "requireCorpOwned": true, "osConstraints": []interface{}{ @@ -164,7 +164,7 @@ resource "google_compute_network" "vpc_network" { resource "google_access_context_manager_access_level_condition" "access-level-condition" { access_level = google_access_context_manager_access_level.test-access.name - members = ["user:test@google.com", "user:test2@google.com", "serviceAccount:${google_service_account.created-later.email}"] + members = ["serviceAccount:${google_service_account.created-later.email}"] negate = false device_policy { require_screen_lock = false diff --git a/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_service_perimeter_test.go.tmpl b/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_service_perimeter_test.go.tmpl index 08f854d44b90..e7b2f7cfccdc 100644 --- a/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_service_perimeter_test.go.tmpl +++ b/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_service_perimeter_test.go.tmpl @@ -258,7 +258,7 @@ resource "google_access_context_manager_service_perimeter" "test-access" { ingress_policies { title = "ingress policy 2" ingress_from { - identities = ["user:test@google.com"] + identities = ["group:test@google.com"] } ingress_to { resources = ["*"] @@ -267,7 +267,7 @@ resource "google_access_context_manager_service_perimeter" "test-access" { ingress_policies { title = "ingress policy 3" ingress_from { - identities = ["user:test@google.com"] + identities = ["group:test@google.com"] } ingress_to { resources = ["*"] @@ -302,7 +302,7 @@ resource "google_access_context_manager_service_perimeter" "test-access" { egress_policies { title = "egress policy 2" egress_from { - identities = ["user:test@google.com"] + identities = ["group:test@google.com"] } egress_to { resources = ["*"] @@ -311,7 +311,7 @@ resource "google_access_context_manager_service_perimeter" "test-access" { egress_policies { title = "egress policy 3" egress_from { - identities = ["user:test@google.com"] + identities = ["group:test@google.com"] } egress_to { resources = ["*"] @@ -368,7 +368,7 @@ resource "google_access_context_manager_service_perimeter" "test-access" { ingress_policies { title = "ingress policy 2" ingress_from { - identities = ["user:test@google.com"] + identities = ["group:test@google.com"] } ingress_to { resources = ["*"] @@ -403,7 +403,7 @@ resource "google_access_context_manager_service_perimeter" "test-access" { egress_policies { title = "egress policy 2" egress_from { - identities = ["user:test@google.com"] + identities = ["group:test@google.com"] } egress_to { resources = ["*"] diff --git a/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_services_perimeters_test.go b/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_services_perimeters_test.go index 90e47fc8fc83..afa80fb2a486 100644 --- a/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_services_perimeters_test.go +++ b/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_services_perimeters_test.go @@ -260,7 +260,7 @@ resource "google_access_context_manager_service_perimeters" "test-access" { ingress_policies { title = "ingress policy title 2" ingress_from { - identities = ["user:test@google.com"] + identities = ["group:test@google.com"] } ingress_to { resources = ["*"] @@ -286,7 +286,7 @@ resource "google_access_context_manager_service_perimeters" "test-access" { egress_policies { title = "egress policy title 2" egress_from { - identities = ["user:test@google.com"] + identities = ["group:test@google.com"] } egress_to { resources = ["*"] @@ -350,7 +350,7 @@ resource "google_access_context_manager_service_perimeters" "test-access" { ingress_policies { title = "ingress policy title 2" ingress_from { - identities = ["user:test@google.com"] + identities = ["group:test@google.com"] } ingress_to { resources = ["*"] @@ -376,7 +376,7 @@ resource "google_access_context_manager_service_perimeters" "test-access" { egress_policies { title = "egress policy title 2" egress_from { - identities = ["user:test@google.com"] + identities = ["group:test@google.com"] } egress_to { resources = ["*"] From 7d597a7796f5ff0aa776cbb31dc2c257e303f7aa Mon Sep 17 00:00:00 2001 From: dixuswe <152918466+dixuswe@users.noreply.github.com> Date: Tue, 24 Jun 2025 14:28:47 -0700 Subject: [PATCH 419/884] fix invalid build_config.worker_pool in cloudrunv2 example and test (#14291) --- .../terraform/examples/cloudrunv2_service_function.tf.tmpl | 1 - .../cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl | 2 -- 2 files changed, 3 deletions(-) diff --git a/mmv1/templates/terraform/examples/cloudrunv2_service_function.tf.tmpl b/mmv1/templates/terraform/examples/cloudrunv2_service_function.tf.tmpl index 2792a67004a4..4df0e9b4500b 100644 --- a/mmv1/templates/terraform/examples/cloudrunv2_service_function.tf.tmpl +++ b/mmv1/templates/terraform/examples/cloudrunv2_service_function.tf.tmpl @@ -16,7 +16,6 @@ resource "google_cloud_run_v2_service" "{{$.PrimaryResourceId}}" { image_uri = "us-docker.pkg.dev/cloudrun/container/hello" base_image = "us-central1-docker.pkg.dev/serverless-runtimes/google-22-full/runtimes/nodejs22" enable_automatic_updates = true - worker_pool = "worker-pool" environment_variables = { FOO_KEY = "FOO_VALUE" BAR_KEY = "BAR_VALUE" diff --git a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl index 2291c5b7e5cb..637a63830ff6 100644 --- a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl @@ -1446,7 +1446,6 @@ resource "google_cloud_run_v2_service" "default" { image_uri = "us-docker.pkg.dev/cloudrun/container/hello" base_image = "us-central1-docker.pkg.dev/serverless-runtimes/google-22-full/runtimes/nodejs22" enable_automatic_updates = true - worker_pool = "worker-pool" environment_variables = { FOO_KEY = "FOO_VALUE" BAR_KEY = "BAR_VALUE" @@ -1512,7 +1511,6 @@ resource "google_cloud_run_v2_service" "default" { image_uri = "gcr.io/cloudrun/hello:latest" base_image = "us-central1-docker.pkg.dev/serverless-runtimes/google-22-full/runtimes/nodejs20" enable_automatic_updates = false - worker_pool = "worker-pool-2" environment_variables = { FOO_KEY_FOO = "FOO_VALUE_FOO" BAR_KEY_BAR = "BAR_VALUE_BAR" From f4bd057e02ceff5aa727aaaa9aac87e19e90c536 Mon Sep 17 00:00:00 2001 From: DavinaRen Date: Tue, 24 Jun 2025 17:32:16 -0400 Subject: [PATCH 420/884] Add fine-grained resource GenerativeSettings to DialogflowCX (#14169) Co-authored-by: Nick Elliot --- .../dialogflowcx/GenerativeSettings.yaml | 170 ++++++++++++++++++ .../dialogflowcx_generative_settings.go.tmpl | 18 ++ ...logflowcx_generative_settings_full.tf.tmpl | 44 +++++ ...e_dialogflowcx_generative_settings_test.go | 125 +++++++++++++ 4 files changed, 357 insertions(+) create mode 100644 mmv1/products/dialogflowcx/GenerativeSettings.yaml create mode 100644 mmv1/templates/terraform/custom_import/dialogflowcx_generative_settings.go.tmpl create mode 100644 mmv1/templates/terraform/examples/dialogflowcx_generative_settings_full.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflowcx_generative_settings_test.go diff --git a/mmv1/products/dialogflowcx/GenerativeSettings.yaml b/mmv1/products/dialogflowcx/GenerativeSettings.yaml new file mode 100644 index 000000000000..16f029dbe8a6 --- /dev/null +++ b/mmv1/products/dialogflowcx/GenerativeSettings.yaml @@ -0,0 +1,170 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'GenerativeSettings' +description: | + Settings for Generative AI. +references: + guides: + 'Official Documentation': 'https://cloud.google.com/dialogflow/cx/docs' + api: 'https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/projects.locations.agents/getGenerativeSettings' +docs: +base_url: '{{parent}}/generativeSettings' +self_link: '{{parent}}/generativeSettings?languageCode={{language_code}}' +create_url: '{{parent}}/generativeSettings' +update_url: '{{parent}}/generativeSettings' +create_verb: 'PATCH' +update_verb: 'PATCH' +update_mask: true +exclude_delete: true +import_format: + - '{{parent}}/generativeSettings' +timeouts: + insert_minutes: 40 + update_minutes: 40 + delete_minutes: 20 +custom_code: + pre_create: 'templates/terraform/pre_create/dialogflowcx_set_location_skip_default_obj.go.tmpl' + pre_read: 'templates/terraform/pre_create/dialogflow_set_location.go.tmpl' + pre_update: 'templates/terraform/pre_create/dialogflow_set_location.go.tmpl' + pre_delete: 'templates/terraform/pre_delete/dialogflowcx_set_location_skip_default_obj.go.tmpl' + custom_import: 'templates/terraform/custom_import/dialogflowcx_generative_settings.go.tmpl' +exclude_sweeper: true +examples: + - name: 'dialogflowcx_generative_settings_full' + primary_resource_id: 'full_generative_settings' + vars: + agent_name: 'dialogflowcx-agent' +parameters: + - name: 'parent' + type: String + description: | + The agent to create a flow for. + Format: projects//locations//agents/. + url_param_only: true + immutable: true +properties: + - name: 'name' + type: String + description: | + The unique identifier of the generativeSettings. + Format: projects//locations//agents//generativeSettings. + output: true + custom_flatten: 'templates/terraform/custom_flatten/name_from_self_link.tmpl' + - name: 'fallbackSettings' + type: NestedObject + description: | + Settings for Generative Fallback. + properties: + - name: 'selectedPrompt' + type: String + description: | + Display name of the selected prompt. + - name: 'promptTemplates' + type: Array + # ignore reading results for fallback_settings.prompt_templates because it includes data that is auto-generated on the server side, + # e.g., prompt_templates with display names "Default" and "Example". + ignore_read: true + description: | + Stored prompts that can be selected, for example default templates like "conservative" or "chatty", or user defined ones. + item_type: + type: NestedObject + properties: + - name: 'displayName' + type: String + description: | + Prompt name. + - name: 'promptText' + type: String + description: | + Prompt text that is sent to a LLM on no-match default, placeholders are filled downstream. For example: "Here is a conversation $conversation, a response is: " + - name: 'frozen' + type: Boolean + description: | + If the flag is true, the prompt is frozen and cannot be modified by users. + - name: 'generativeSafetySettings' + type: NestedObject + description: | + Settings for Generative Safety. + w + properties: + - name: 'defaultBannedPhraseMatchStrategy' + type: String + description: | + Optional. Default phrase match strategy for banned phrases. + See [PhraseMatchStrategy](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/GenerativeSettings#phrasematchstrategy) for valid values. + - name: 'bannedPhrases' + type: Array + description: | + Banned phrases for generated text. + item_type: + type: NestedObject + properties: + - name: 'text' + type: String + description: | + Text input which can be used for prompt or banned phrases. + required: true + - name: 'languageCode' + type: String + description: | + Language code of the phrase. + required: true + - name: 'knowledgeConnectorSettings' + type: NestedObject + description: | + Settings for knowledge connector. + properties: + - name: 'business' + type: String + description: | + Name of the company, organization or other entity that the agent represents. Used for knowledge connector LLM prompt and for knowledge search. + - name: 'agent' + type: String + description: | + Name of the virtual agent. Used for LLM prompt. Can be left empty. + - name: 'agentIdentity' + type: String + description: | + Identity of the agent, e.g. "virtual agent", "AI assistant". + - name: 'businessDescription' + type: String + description: | + Company description, used for LLM prompt, e.g. "a family company selling freshly roasted coffee beans".`` + - name: 'agentScope' + type: String + description: | + Agent scope, e.g. "Example company website", "internal Example company website for employees", "manual of car owner". + - name: 'disableDataStoreFallback' + type: Boolean + description: | + Whether to disable fallback to Data Store search results (in case the LLM couldn't pick a proper answer). Per default the feature is enabled. + - name: 'languageCode' + type: String + description: | + Language for this settings. + required: true + - name: 'llmModelSettings' + type: NestedObject + description: | + LLM model settings. + properties: + - name: 'model' + type: String + description: | + The selected LLM model. + - name: 'promptText' + type: String + description: | + The custom prompt to use. diff --git a/mmv1/templates/terraform/custom_import/dialogflowcx_generative_settings.go.tmpl b/mmv1/templates/terraform/custom_import/dialogflowcx_generative_settings.go.tmpl new file mode 100644 index 000000000000..ffc597df1387 --- /dev/null +++ b/mmv1/templates/terraform/custom_import/dialogflowcx_generative_settings.go.tmpl @@ -0,0 +1,18 @@ +config := meta.(*transport_tpg.Config) + +// current import_formats can't import fields with forward slashes in their value and parent contains slashes +if err := tpgresource.ParseImportId([]string{ + "(?P.+)/generativeSettings\\?languageCode=(?P[a-zA-Z-]+)", + "(?P.+)/generativeSettings", +}, d, config); err != nil { + return nil, err +} + +// Replace import id for the resource id +id, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}parent{{"}}"}}/generativeSettings?languageCode={{"{{"}}language_code{{"}}"}}") +if err != nil { + return nil, fmt.Errorf("Error constructing id: %s", err) +} +d.SetId(id) + +return []*schema.ResourceData{d}, nil \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/dialogflowcx_generative_settings_full.tf.tmpl b/mmv1/templates/terraform/examples/dialogflowcx_generative_settings_full.tf.tmpl new file mode 100644 index 000000000000..b8ac8065612f --- /dev/null +++ b/mmv1/templates/terraform/examples/dialogflowcx_generative_settings_full.tf.tmpl @@ -0,0 +1,44 @@ +resource "google_dialogflow_cx_agent" "agent" { + display_name = "{{index $.Vars "agent_name"}}" + location = "global" + default_language_code = "en" + time_zone = "America/New_York" + description = "Example description." +} + +resource "google_dialogflow_cx_generative_settings" "{{$.PrimaryResourceId}}" { + parent = google_dialogflow_cx_agent.agent.id + + fallback_settings { + selected_prompt = "example prompt" + prompt_templates { + display_name = "example prompt" + prompt_text = "example prompt text" + frozen = false + } + } + + generative_safety_settings { + default_banned_phrase_match_strategy = "PARTIAL_MATCH" + banned_phrases { + text = "example text" + language_code = "en" + } + } + + knowledge_connector_settings { + business = "example business" + agent = "example agent" + agent_identity = "virtual agent" + business_description = "a family company selling freshly roasted coffee beans" + agent_scope = "Example company website" + disable_data_store_fallback = false + } + + language_code = "en" + + llm_model_settings { + model = "gemini-2.0-flash-001" + prompt_text = "example prompt text" + } +} diff --git a/mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflowcx_generative_settings_test.go b/mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflowcx_generative_settings_test.go new file mode 100644 index 000000000000..01b9a3bfb57b --- /dev/null +++ b/mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflowcx_generative_settings_test.go @@ -0,0 +1,125 @@ +package dialogflowcx_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccDialogflowCXGenerativeSettings_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "org_id": envvar.GetTestOrgFromEnv(t), + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDialogflowCXGenerativeSettings_full(context), + }, + { + ResourceName: "google_dialogflow_cx_generative_settings.my_generative_settings", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"fallback_settings.0.prompt_templates"}, + }, + { + Config: testAccDialogflowCXGenerativeSettings_update(context), + }, + { + ResourceName: "google_dialogflow_cx_generative_settings.my_generative_settings", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"fallback_settings.0.prompt_templates"}, + }, + }, + }) +} + +func testAccDialogflowCXGenerativeSettings_full(context map[string]interface{}) string { + return acctest.Nprintf(` + resource "google_dialogflow_cx_agent" "agent" { + display_name = "tf-test-%{random_suffix}update" + location = "global" + default_language_code = "en" + time_zone = "America/New_York" + description = "Example description." + } + + resource "google_dialogflow_cx_generative_settings" "my_generative_settings" { + parent = google_dialogflow_cx_agent.agent.id + + fallback_settings { + selected_prompt = "example prompt" + prompt_templates { + display_name = "example prompt" + prompt_text = "example prompt text" + frozen = false + } + } + + generative_safety_settings { + default_banned_phrase_match_strategy = "PARTIAL_MATCH" + banned_phrases { + text = "example text" + language_code = "en" + } + } + + knowledge_connector_settings { + business = "example business" + agent = "example agent" + agent_identity = "virtual agent" + business_description = "a family company selling freshly roasted coffee beans" + agent_scope = "Example company website" + disable_data_store_fallback = false + } + + language_code = "en" + + llm_model_settings { + model = "gemini-2.0-flash-001" + prompt_text = "example prompt text" + } + } +`, context) +} + +func testAccDialogflowCXGenerativeSettings_update(context map[string]interface{}) string { + return acctest.Nprintf(` + resource "google_dialogflow_cx_agent" "agent" { + display_name = "tf-test-%{random_suffix}update" + location = "global" + default_language_code = "en" + time_zone = "America/New_York" + description = "Example description." + } + + resource "google_dialogflow_cx_generative_settings" "my_generative_settings" { + parent = google_dialogflow_cx_agent.agent.id + + knowledge_connector_settings { + business = "updated business" + agent = "updated agent" + } + + fallback_settings { + selected_prompt = "example prompt" + prompt_templates { + display_name = "example prompt" + prompt_text = "example prompt text" + frozen = false + } + } + + language_code = "en" + } +`, context) +} From 62cdc6d3d0e607610af6c5272014e125b9422a55 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Tue, 24 Jun 2025 14:39:45 -0700 Subject: [PATCH 421/884] Added modelarmor api to main.tf (#14337) --- .ci/infra/terraform/main.tf | 1 + 1 file changed, 1 insertion(+) diff --git a/.ci/infra/terraform/main.tf b/.ci/infra/terraform/main.tf index 3e5c829db476..6d0943a6ea07 100644 --- a/.ci/infra/terraform/main.tf +++ b/.ci/infra/terraform/main.tf @@ -306,6 +306,7 @@ module "project-services" { "migrationcenter.googleapis.com", "ml.googleapis.com", "mobilecrashreporting.googleapis.com", + "modelarmor.googleapis.com", "monitoring.googleapis.com", "multiclustermetering.googleapis.com", "netapp.googleapis.com", From 5d3b0757f0c43b5de60d8c3cf7d4ff68279ebb64 Mon Sep 17 00:00:00 2001 From: NA2047 <12290725+NA2047@users.noreply.github.com> Date: Tue, 24 Jun 2025 15:46:28 -0700 Subject: [PATCH 422/884] Add CMEK support for Memorystore Instance (#14338) --- mmv1/products/memorystore/Instance.yaml | 12 +++++++++++- .../examples/memorystore_instance_full.tf.tmpl | 1 + 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/mmv1/products/memorystore/Instance.yaml b/mmv1/products/memorystore/Instance.yaml index 2661ef91fd4c..242824d3321f 100644 --- a/mmv1/products/memorystore/Instance.yaml +++ b/mmv1/products/memorystore/Instance.yaml @@ -56,14 +56,21 @@ examples: 'prevent_destroy': 'false' - name: 'memorystore_instance_full' primary_resource_id: 'instance-full' + bootstrap_iam: + - member: "serviceAccount:service-{project_number}@gcp-sa-memorystore.iam.gserviceaccount.com" + role: "roles/cloudkms.cryptoKeyEncrypterDecrypter" vars: instance_name: 'full-instance' policy_name: 'my-policy' subnet_name: 'my-subnet' network_name: 'my-network' prevent_destroy: 'true' + kms_key_name: "my-key" test_vars_overrides: 'prevent_destroy': 'false' + 'kms_key_name': 'acctest.BootstrapKMSKeyInLocation(t, "us-central1").CryptoKey.Name' + ignore_read_extra: + - 'update_time' - name: 'memorystore_instance_persistence_aof' primary_resource_id: 'instance-persistence-aof' vars: @@ -126,7 +133,6 @@ virtual_fields: description: "Immutable. User inputs for the auto-created endpoints connections. " type: Array - # is_set: true immutable: true conflicts: - desiredPscAutoConnections @@ -802,3 +808,7 @@ properties: The backup collection full resource name. Example: projects/{project}/locations/{location}/backupCollections/{collection} output: true + - name: kmsKey + type: String + description: The KMS key used to encrypt the at-rest data of the cluster + immutable: true diff --git a/mmv1/templates/terraform/examples/memorystore_instance_full.tf.tmpl b/mmv1/templates/terraform/examples/memorystore_instance_full.tf.tmpl index 5ec779d74e91..d004377f3d58 100644 --- a/mmv1/templates/terraform/examples/memorystore_instance_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/memorystore_instance_full.tf.tmpl @@ -10,6 +10,7 @@ resource "google_memorystore_instance" "{{$.PrimaryResourceId}}" { node_type = "SHARED_CORE_NANO" transit_encryption_mode = "TRANSIT_ENCRYPTION_DISABLED" authorization_mode = "AUTH_DISABLED" + kms_key = "{{index $.Vars "kms_key_name"}}" engine_configs = { maxmemory-policy = "volatile-ttl" } From c28331449b42ece61f8421f86fb2bc82a54d8e06 Mon Sep 17 00:00:00 2001 From: Rajesh Guptha Date: Wed, 25 Jun 2025 20:34:24 +0530 Subject: [PATCH 423/884] Added deletion Protection support for RegionalSecret (#14323) --- .../secretmanagerregional/RegionalSecret.yaml | 11 ++++ .../regional_secret_config_basic.tf.tmpl | 1 + .../pre_delete/regional_secret.go.tmpl | 3 + ...rce_secret_manager_regional_secret_test.go | 63 +++++++++++++++++++ 4 files changed, 78 insertions(+) create mode 100644 mmv1/templates/terraform/pre_delete/regional_secret.go.tmpl diff --git a/mmv1/products/secretmanagerregional/RegionalSecret.yaml b/mmv1/products/secretmanagerregional/RegionalSecret.yaml index c767e53ec7df..40caeaac73d1 100644 --- a/mmv1/products/secretmanagerregional/RegionalSecret.yaml +++ b/mmv1/products/secretmanagerregional/RegionalSecret.yaml @@ -43,12 +43,15 @@ iam_policy: - '{{secret_id}}' custom_code: pre_update: 'templates/terraform/pre_update/secret_manager_regional_secret.go.tmpl' + pre_delete: 'templates/terraform/pre_delete/regional_secret.go.tmpl' examples: - name: 'regional_secret_config_basic' primary_resource_id: 'regional-secret-basic' primary_resource_name: 'fmt.Sprintf("tf-test-tf-reg-secret%s", context["random_suffix"])' vars: secret_id: 'tf-reg-secret' + ignore_read_extra: + - 'deletion_protection' - name: 'regional_secret_with_cmek' primary_resource_id: 'regional-secret-with-cmek' vars: @@ -221,3 +224,11 @@ properties: For secret with versionDestroyTtl>0, version destruction doesn't happen immediately on calling destroy instead the version goes to a disabled state and the actual destruction happens after this TTL expires. It must be atleast 24h. +virtual_fields: + - name: 'deletion_protection' + description: | + Whether Terraform will be prevented from destroying the regional secret. Defaults to false. + When the field is set to true in Terraform state, a `terraform apply` + or `terraform destroy` that would delete the federation will fail. + type: Boolean + default_value: false diff --git a/mmv1/templates/terraform/examples/regional_secret_config_basic.tf.tmpl b/mmv1/templates/terraform/examples/regional_secret_config_basic.tf.tmpl index 9b6f09711d16..3b2a9ace9e0b 100644 --- a/mmv1/templates/terraform/examples/regional_secret_config_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/regional_secret_config_basic.tf.tmpl @@ -11,4 +11,5 @@ resource "google_secret_manager_regional_secret" "{{$.PrimaryResourceId}}" { key2 = "value2", key3 = "value3" } + deletion_protection = false } diff --git a/mmv1/templates/terraform/pre_delete/regional_secret.go.tmpl b/mmv1/templates/terraform/pre_delete/regional_secret.go.tmpl new file mode 100644 index 000000000000..0ac2035f9173 --- /dev/null +++ b/mmv1/templates/terraform/pre_delete/regional_secret.go.tmpl @@ -0,0 +1,3 @@ +if d.Get("deletion_protection").(bool) { + return fmt.Errorf("cannot destroy secretmanager regional secret without setting deletion_protection=false and running `terraform apply`") +} diff --git a/mmv1/third_party/terraform/services/secretmanagerregional/resource_secret_manager_regional_secret_test.go b/mmv1/third_party/terraform/services/secretmanagerregional/resource_secret_manager_regional_secret_test.go index 062f005b581d..c925d26a357b 100644 --- a/mmv1/third_party/terraform/services/secretmanagerregional/resource_secret_manager_regional_secret_test.go +++ b/mmv1/third_party/terraform/services/secretmanagerregional/resource_secret_manager_regional_secret_test.go @@ -1,6 +1,7 @@ package secretmanagerregional_test import ( + "regexp" "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" @@ -551,6 +552,38 @@ func TestAccSecretManagerRegionalRegionalSecret_versionAliasesUpdate(t *testing. }) } +func TestAccSecretManagerRegionalRegionalSecret_deletionprotection(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckSecretManagerRegionalRegionalSecretDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccSecretManagerRegionalSecretDeletionProtectionL1(context), + }, + { + ResourceName: "google_secret_manager_regional_secret.regional-secret-deletion-protection", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "secret_id", "terraform_labels", "deletion_protection"}, + }, + { + Config: testAccSecretManagerRegionalSecretDeletionProtectionL2(context), + ExpectError: regexp.MustCompile("deletion_protection"), + }, + { + Config: testAccSecretManagerRegionalSecretDeletionProtectionFalse(context), + }, + }, + }) +} + func testAccSecretManagerRegionalSecret_basic(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_secret_manager_regional_secret" "regional-secret-basic" { @@ -1307,3 +1340,33 @@ resource "google_secret_manager_regional_secret_version" "reg-secret-version-4" } `, context) } + +func testAccSecretManagerRegionalSecretDeletionProtectionL1(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_secret_manager_regional_secret" "regional-secret-deletion-protection" { + secret_id = "tf-test-reg-secret%{random_suffix}" + location = "us-central1" + deletion_protection = true +} +`, context) +} + +func testAccSecretManagerRegionalSecretDeletionProtectionL2(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_secret_manager_regional_secret" "regional-secret-deletion-protection" { + secret_id = "tf-test-reg-secret%{random_suffix}" + location = "us-west2" + deletion_protection = true +} +`, context) +} + +func testAccSecretManagerRegionalSecretDeletionProtectionFalse(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_secret_manager_regional_secret" "regional-secret-deletion-protection" { + secret_id = "tf-test-reg-secret%{random_suffix}" + location = "us-central1" + deletion_protection = false +} +`, context) +} From 352348ab272056856bf52bc3a72d449a15c4592e Mon Sep 17 00:00:00 2001 From: Andrew Peabody Date: Wed, 25 Jun 2025 10:16:13 -0700 Subject: [PATCH 424/884] doc: correct reservation_affinity level (#14357) --- .../terraform/website/docs/r/container_node_pool.html.markdown | 3 --- 1 file changed, 3 deletions(-) diff --git a/mmv1/third_party/terraform/website/docs/r/container_node_pool.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_node_pool.html.markdown index e4048200a472..01ad1ed5eee3 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_node_pool.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_node_pool.html.markdown @@ -173,9 +173,6 @@ cluster. * `queued_provisioning` - (Optional) Specifies node pool-level settings of queued provisioning. Structure is [documented below](#nested_queued_provisioning). -* `reservation_affinity` (Optional) The configuration of the desired reservation which instances could take capacity from. - Structure is [documented below](#nested_reservation_affinity). - The `autoscaling` block supports (either total or per zone limits are required): * `min_node_count` - (Optional) Minimum number of nodes per zone in the NodePool. From 69c70aa6b5991d5262605455e0b508ba6a632db7 Mon Sep 17 00:00:00 2001 From: stevenyang72 Date: Wed, 25 Jun 2025 10:51:58 -0700 Subject: [PATCH 425/884] Add attestation rule to workload identity pool managed identity. (#14223) --- .../WorkloadIdentityPoolManagedIdentity.yaml | 23 +++++++++++++ ...dentity_pool_managed_identity_full.tf.tmpl | 6 ++++ ...oad_identity_pool_managed_identity.go.tmpl | 32 +++++++++++++++++++ ...oad_identity_pool_managed_identity.go.tmpl | 18 +++++++++++ ...oad_identity_pool_managed_identity.go.tmpl | 7 ++++ ...dentity_pool_managed_identity_test.go.tmpl | 8 +++++ 6 files changed, 94 insertions(+) create mode 100644 mmv1/templates/terraform/post_create/iam_workload_identity_pool_managed_identity.go.tmpl create mode 100644 mmv1/templates/terraform/post_read/iam_workload_identity_pool_managed_identity.go.tmpl create mode 100644 mmv1/templates/terraform/pre_create/iam_workload_identity_pool_managed_identity.go.tmpl diff --git a/mmv1/products/iambeta/WorkloadIdentityPoolManagedIdentity.yaml b/mmv1/products/iambeta/WorkloadIdentityPoolManagedIdentity.yaml index e3299cb08da4..09b36fded96c 100644 --- a/mmv1/products/iambeta/WorkloadIdentityPoolManagedIdentity.yaml +++ b/mmv1/products/iambeta/WorkloadIdentityPoolManagedIdentity.yaml @@ -32,6 +32,9 @@ autogen_async: true custom_code: constants: 'templates/terraform/constants/iam_workload_identity_pool_managed_identity.go.tmpl' decoder: 'templates/terraform/decoders/treat_deleted_state_as_gone.go.tmpl' + post_create: 'templates/terraform/post_create/iam_workload_identity_pool_managed_identity.go.tmpl' + post_read: 'templates/terraform/post_read/iam_workload_identity_pool_managed_identity.go.tmpl' + pre_create: 'templates/terraform/pre_create/iam_workload_identity_pool_managed_identity.go.tmpl' test_check_destroy: 'templates/terraform/custom_check_destroy/iam_workload_identity_pool_managed_identity.go.tmpl' examples: - name: 'iam_workload_identity_pool_managed_identity_basic' @@ -46,6 +49,8 @@ examples: workload_identity_pool_id: 'example-pool' workload_identity_pool_namespace_id: 'example-namespace' workload_identity_pool_managed_identity_id: 'example-managed-identity' + test_env_vars: + project: 'PROJECT_NUMBER' parameters: - name: 'workload_identity_pool_id' type: String @@ -115,3 +120,21 @@ properties: description: | Whether the managed identity is disabled. If disabled, credentials may no longer be issued for the identity, however existing credentials will still be accepted until they expire. + - name: 'attestationRules' + type: Array + description: | + Defines which workloads can receive an identity within a pool. When an AttestationRule is + defined under a managed identity, matching workloads may receive that identity. A maximum of + 50 AttestationRules can be set. + update_url: 'projects/{{project}}/locations/global/workloadIdentityPools/{{workload_identity_pool_id}}/namespaces/{{workload_identity_pool_namespace_id}}/managedIdentities/{{workload_identity_pool_managed_identity_id}}:setAttestationRules' + update_verb: 'POST' + is_set: true + item_type: + type: NestedObject + properties: + - name: 'googleCloudResource' + type: String + description: | + A single workload operating on Google Cloud. For example: + `//compute.googleapis.com/projects/123/uid/zones/us-central1-a/instances/12345678`. + required: true diff --git a/mmv1/templates/terraform/examples/iam_workload_identity_pool_managed_identity_full.tf.tmpl b/mmv1/templates/terraform/examples/iam_workload_identity_pool_managed_identity_full.tf.tmpl index 2074a1566556..4943111b3592 100644 --- a/mmv1/templates/terraform/examples/iam_workload_identity_pool_managed_identity_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/iam_workload_identity_pool_managed_identity_full.tf.tmpl @@ -20,4 +20,10 @@ resource "google_iam_workload_identity_pool_managed_identity" "{{$.PrimaryResour workload_identity_pool_managed_identity_id = "{{index $.Vars "workload_identity_pool_managed_identity_id"}}" description = "Example Managed Identity in a Workload Identity Pool Namespace" disabled = true + attestation_rules { + google_cloud_resource = "//compute.googleapis.com/projects/{{index $.TestEnvVars "project"}}/uid/zones/us-central1-a/instances/12345678" + } + attestation_rules { + google_cloud_resource = "//run.googleapis.com/projects/{{index $.TestEnvVars "project"}}/name/locations/us-east1/services/my-service" + } } \ No newline at end of file diff --git a/mmv1/templates/terraform/post_create/iam_workload_identity_pool_managed_identity.go.tmpl b/mmv1/templates/terraform/post_create/iam_workload_identity_pool_managed_identity.go.tmpl new file mode 100644 index 000000000000..f14ff96f957b --- /dev/null +++ b/mmv1/templates/terraform/post_create/iam_workload_identity_pool_managed_identity.go.tmpl @@ -0,0 +1,32 @@ + // create attestation_rules + if hasRule { + qIdx := strings.Index(url, "?") + var basePath string + if qIdx != -1 { + basePath = url[:qIdx] + } else { + basePath = url + } + ruleUrl := basePath + "/" + d.Get("workload_identity_pool_managed_identity_id").(string) + ":setAttestationRules" + + ruleRes, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "POST", + Project: billingProject, + RawURL: ruleUrl, + UserAgent: userAgent, + Body: ruleObj, + Timeout: d.Timeout(schema.TimeoutCreate), + Headers: headers, + }) + if err != nil { + return fmt.Errorf("Error creating WorkloadIdentityPoolManagedIdentity %q: %s", d.Id(), err) + } + + err = IAMBetaOperationWaitTime( + config, ruleRes, project, "Creating WorkloadIdentityPoolManagedIdentity", userAgent, + d.Timeout(schema.TimeoutCreate)) + if err != nil { + return fmt.Errorf("Error waiting to create WorkloadIdentityPoolManagedIdentity: %s", err) + } + } \ No newline at end of file diff --git a/mmv1/templates/terraform/post_read/iam_workload_identity_pool_managed_identity.go.tmpl b/mmv1/templates/terraform/post_read/iam_workload_identity_pool_managed_identity.go.tmpl new file mode 100644 index 000000000000..ec5cf603b518 --- /dev/null +++ b/mmv1/templates/terraform/post_read/iam_workload_identity_pool_managed_identity.go.tmpl @@ -0,0 +1,18 @@ + // list attestation_rules + ruleUrl := url + ":listAttestationRules" + + ruleRes, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: ruleUrl, + UserAgent: userAgent, + Headers: headers, + }) + if err != nil { + return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("IAMBetaWorkloadIdentityPoolManagedIdentity %q", d.Id())) + } + + for k, v := range ruleRes { + res[k] = v + } \ No newline at end of file diff --git a/mmv1/templates/terraform/pre_create/iam_workload_identity_pool_managed_identity.go.tmpl b/mmv1/templates/terraform/pre_create/iam_workload_identity_pool_managed_identity.go.tmpl new file mode 100644 index 000000000000..8f11cde4c552 --- /dev/null +++ b/mmv1/templates/terraform/pre_create/iam_workload_identity_pool_managed_identity.go.tmpl @@ -0,0 +1,7 @@ + // see if we need to create attestation_rules + _, hasRule := d.GetOk("attestation_rules") + ruleObj := make(map[string]interface{}) + if hasRule { + ruleObj["attestationRules"] = attestationRulesProp + delete(obj, "attestationRules") + } \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_managed_identity_test.go.tmpl b/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_managed_identity_test.go.tmpl index ccf0d267ac1c..35f20fa9041b 100644 --- a/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_managed_identity_test.go.tmpl +++ b/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_managed_identity_test.go.tmpl @@ -8,6 +8,7 @@ import ( "github.com/hashicorp/terraform-plugin-testing/plancheck" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" ) func TestAccIAMBetaWorkloadIdentityPoolManagedIdentity_minimal(t *testing.T) { @@ -53,6 +54,7 @@ func TestAccIAMBetaWorkloadIdentityPoolManagedIdentity_full(t *testing.T) { t.Parallel() context := map[string]interface{}{ + "project": envvar.GetTestProjectNumberFromEnv(), "random_suffix": acctest.RandString(t, 10), } @@ -138,6 +140,12 @@ resource "google_iam_workload_identity_pool_managed_identity" "example" { workload_identity_pool_managed_identity_id = "tf-test-example-managed-identity%{random_suffix}" description = "Example Managed Identity in a Workload Identity Pool Namespace" disabled = true + attestation_rules { + google_cloud_resource = "//compute.googleapis.com/projects/%{project}/uid/zones/us-central1-a/instances/12345678" + } + attestation_rules { + google_cloud_resource = "//run.googleapis.com/projects/%{project}/name/locations/us-east1/services/my-service" + } } `, context) } From 28b326f06ce8ea51054089765348acc2ba922466 Mon Sep 17 00:00:00 2001 From: "Bob \"Wombat\" Hogg" Date: Wed, 25 Jun 2025 15:03:03 -0400 Subject: [PATCH 426/884] firestore_database: Remove the deprecation message for deletion_policy (#14365) --- mmv1/products/firestore/Database.yaml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/mmv1/products/firestore/Database.yaml b/mmv1/products/firestore/Database.yaml index 2a757fe8f2a8..64ef2120724e 100644 --- a/mmv1/products/firestore/Database.yaml +++ b/mmv1/products/firestore/Database.yaml @@ -145,7 +145,9 @@ virtual_fields: See also `delete_protection`. type: String default_value: "ABANDON" - deprecation_message: '`deletion_policy` is deprecated and will be removed in a future major release. Use `delete_protection_state` instead.' + # `deletion_policy` is deprecated and will be removed in a future major release. + # Once that release happens, you should use `delete_protection_state` instead. + # For now though, setting this field is necessary if you wish for your Firestore databases to be deleted upon `terraform destroy`. parameters: properties: - name: 'name' From e712589d87d81cdfe8ab303caf4cf72b7ba7ea94 Mon Sep 17 00:00:00 2001 From: kautikdk <144651627+kautikdk@users.noreply.github.com> Date: Wed, 25 Jun 2025 20:05:33 +0000 Subject: [PATCH 427/884] Adds new IP Filter access fields (#14315) --- .../storage/resource_storage_bucket.go.tmpl | 17 +++++- .../storage/resource_storage_bucket_test.go | 61 +++++++++++++++++++ .../docs/r/storage_bucket.html.markdown | 6 +- 3 files changed, 82 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket.go.tmpl b/mmv1/third_party/terraform/services/storage/resource_storage_bucket.go.tmpl index 69232098a6ff..761a06ffb0ad 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket.go.tmpl +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket.go.tmpl @@ -631,6 +631,17 @@ func ResourceStorageBucket() *schema.Resource { }, }, }, + "allow_cross_org_vpcs" : { + Type: schema.TypeBool, + Optional: true, + Description: `Whether to allow cross-org VPCs in the bucket's IP filter configuration.`, + RequiredWith: []string{"ip_filter.0.vpc_network_sources"}, + }, + "allow_all_service_agent_access" : { + Type: schema.TypeBool, + Optional: true, + Description: `Whether to allow all service agents to access the bucket regardless of the IP filter configuration.`, + }, }, }, DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { @@ -2036,6 +2047,8 @@ func flattenBucketIpFilter(ipFilter *storage.BucketIpFilter) []map[string]interf filterItem := map[string]interface{}{ "mode": ipFilter.Mode, + "allow_cross_org_vpcs": ipFilter.AllowCrossOrgVpcs, + "allow_all_service_agent_access": ipFilter.AllowAllServiceAgentAccess, } if publicSrc := flattenBucketIpFilterPublicNetworkSource(ipFilter.PublicNetworkSource); publicSrc != nil { @@ -2087,7 +2100,9 @@ func expandBucketIpFilter(v interface{}) (*storage.BucketIpFilter) { Mode: ipFilter["mode"].(string), PublicNetworkSource: expandBucketIpFilterPublicNetworkSource(ipFilter["public_network_source"]), VpcNetworkSources: expandBucketIpFilterVpcNetworkSources(ipFilter["vpc_network_sources"]), - ForceSendFields: []string{"PublicNetworkSource", "VpcNetworkSources"}, + AllowCrossOrgVpcs: ipFilter["allow_cross_org_vpcs"].(bool), + AllowAllServiceAgentAccess: ipFilter["allow_all_service_agent_access"].(bool), + ForceSendFields: []string{"PublicNetworkSource", "VpcNetworkSources", "AllowCrossOrgVpcs", "AllowAllServiceAgentAccess"}, } } diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go index 92e0a6d209a3..b7d31bac9b85 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go @@ -1595,6 +1595,21 @@ func TestAccStorageBucket_IPFilter(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"force_destroy"}, }, + { + Config: testAccStorageBucket_IPFilter_update( + bucketName, nwSuffix, project, serviceAccount, + ), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, { Config: testAccStorageBucket_IPFilter_disable(bucketName, nwSuffix, project, serviceAccount), Check: resource.ComposeTestCheckFunc( @@ -2820,6 +2835,52 @@ resource "google_storage_bucket" "bucket" { network = google_compute_network.vpc_gcs_ipfilter1.id allowed_ip_cidr_ranges = ["0.0.0.0/0", "::/0"] } + allow_all_service_agent_access = true + } +} +`, nwSuffix, nwSuffix, nwSuffix, project, project, serviceAccount, bucketName) +} + +func testAccStorageBucket_IPFilter_update(bucketName string, nwSuffix string, project string, serviceAccount string) string { + return fmt.Sprintf(` +resource "google_compute_network" "vpc_gcs_ipfilter1" { + name = "tf-test-storage-ipfilter1-%s" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "ipfilter_1" { + name = "tf-test-storage-ipfilter1-%s" + ip_cidr_range = "10.201.0.0/16" + region = "us-central1" + network = google_compute_network.vpc_gcs_ipfilter1.id +} + +resource "google_project_iam_custom_role" "ipfilter_exempt_role" { + role_id = "_%s" + title = "IP Filter Exempt Role" + description = "A custom role to bypass IP Filtering on GCS bucket." + permissions = ["storage.buckets.exemptFromIpFilter"] +} + +resource "google_project_iam_member" "primary" { + project = "%s" + role = "projects/%s/roles/${google_project_iam_custom_role.ipfilter_exempt_role.role_id}" + member = "serviceAccount:%s" +} + +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "us-central1" + uniform_bucket_level_access = true + force_destroy = true + ip_filter { + mode = "Enabled" + vpc_network_sources { + network = google_compute_network.vpc_gcs_ipfilter1.id + allowed_ip_cidr_ranges = ["0.0.0.0/0"] + } + allow_cross_org_vpcs = false + allow_all_service_agent_access = false } } `, nwSuffix, nwSuffix, nwSuffix, project, project, serviceAccount, bucketName) diff --git a/mmv1/third_party/terraform/website/docs/r/storage_bucket.html.markdown b/mmv1/third_party/terraform/website/docs/r/storage_bucket.html.markdown index da7cee5efbc2..9450a3ea945d 100644 --- a/mmv1/third_party/terraform/website/docs/r/storage_bucket.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/storage_bucket.html.markdown @@ -297,7 +297,11 @@ The following arguments are supported: The `ip_filter` block supports: -* `mode` - (Required) The state of the IP filter configuration. Valid values are `Enabled` and `Disabled`. When set to `Enabled`, IP filtering rules are applied to a bucket and all incoming requests to the bucket are evaluated against these rules. When set to `Disabled`, IP filtering rules are not applied to a bucket. +* `mode` - (Required) The state of the IP filter configuration. Valid values are `Enabled` and `Disabled`. When set to `Enabled`, IP filtering rules are applied to a bucket and all incoming requests to the bucket are evaluated against these rules. When set to `Disabled`, IP filtering rules are not applied to a bucket. **Note**: `allow_all_service_agent_access` must be supplied when `mode` is set to `Enabled`, it can be ommited for other values. + +* `allow_cross_org_vpcs` - (Optional) While set `true`, allows cross-org VPCs in the bucket's IP filter configuration. + +* `allow_all_service_agent_access` (Optional) While set `true`, allows all service agents to access the bucket regardless of the IP filter configuration. * `public_network_source` - (Optional) The public network IP address ranges that can access the bucket and its data. Structure is [documented below](#nested_public_network_source). From 3a8aa406dfd9f31ac2789662d086cd1cc20e0e9b Mon Sep 17 00:00:00 2001 From: bcreddy-gcp <123543489+bcreddy-gcp@users.noreply.github.com> Date: Wed, 25 Jun 2025 13:07:40 -0700 Subject: [PATCH 428/884] Allow setting `serial-port-logging-enable` in google_workbench_instance metadata (#14243) --- mmv1/products/workbench/Instance.yaml | 2 + .../constants/workbench_instance.go.tmpl | 40 ++++++++++++++++++- .../examples/workbench_instance_full.tf.tmpl | 3 +- .../resource_workbench_instance_test.go | 19 ++++++++- 4 files changed, 60 insertions(+), 4 deletions(-) diff --git a/mmv1/products/workbench/Instance.yaml b/mmv1/products/workbench/Instance.yaml index 0f4693a69381..6ae1ddb53636 100644 --- a/mmv1/products/workbench/Instance.yaml +++ b/mmv1/products/workbench/Instance.yaml @@ -56,6 +56,8 @@ sweeper: url_substitutions: - region: "us-central1-a" - region: "us-west1-a" +custom_diff: + - 'workbenchMetadataCustomizeDiff' examples: - name: 'workbench_instance_basic' primary_resource_id: 'instance' diff --git a/mmv1/templates/terraform/constants/workbench_instance.go.tmpl b/mmv1/templates/terraform/constants/workbench_instance.go.tmpl index 41c2c3438a78..c04436ed140a 100644 --- a/mmv1/templates/terraform/constants/workbench_instance.go.tmpl +++ b/mmv1/templates/terraform/constants/workbench_instance.go.tmpl @@ -23,6 +23,10 @@ func WorkbenchInstanceLabelsDiffSuppress(k, old, new string, d *schema.ResourceD } +var WorkbenchInstanceSettableUnmodifiableDefaultMetadata = []string{ + "serial-port-logging-enable", +} + var WorkbenchInstanceProvidedMetadata = []string{ "agent-health-check-interval-seconds", "agent-health-check-path", @@ -69,7 +73,6 @@ var WorkbenchInstanceProvidedMetadata = []string{ "report-system-status", "resource-url", "restriction", - "serial-port-logging-enable", "service-account-mode", "shutdown-script", "title", @@ -94,6 +97,12 @@ func WorkbenchInstanceMetadataDiffSuppress(k, old, new string, d *schema.Resourc } } + for _, metadata := range WorkbenchInstanceSettableUnmodifiableDefaultMetadata { + if strings.Contains(k, metadata) && new == "" { + return true + } + } + // Let diff be determined by metadata if strings.Contains(k, "gce_setup.0.metadata.%") { return true @@ -266,3 +275,32 @@ func mergeMaps(oldMap, newMap map[string]interface{}) map[string]string { return modifiedMap } {{- end }} + + +func workbenchMetadataCustomizeDiff(_ context.Context, diff *schema.ResourceDiff, meta interface{}) error { + if diff.HasChange("gce_setup.0.metadata") { + o, n := diff.GetChange("gce_setup.0.metadata") + oldMetadata := o.(map[string]interface{}) + newMetadata := n.(map[string]interface{}) + + for _, key := range WorkbenchInstanceSettableUnmodifiableDefaultMetadata { + oldValue, oldOk := oldMetadata[key] + newValue, newOk := newMetadata[key] + + // Condition to force new: + // 1. The key exists in both old and new metadata AND their values differ. + // 2. The key exists in new but not in old (meaning it was added). + // + // The key exists in old but not in new (meaning it was removed) is ignored. + if (oldOk && newOk && oldValue != newValue) || + (!oldOk && newOk) { + // If a change is detected for this specific key, force a new resource and stop checking. + if err := diff.ForceNew("gce_setup.0.metadata"); err != nil { + return err + } + return nil // Return nil immediately after forcing new + } + } + } + return nil +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/workbench_instance_full.tf.tmpl b/mmv1/templates/terraform/examples/workbench_instance_full.tf.tmpl index 66d02634531f..07df662cf2e0 100644 --- a/mmv1/templates/terraform/examples/workbench_instance_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/workbench_instance_full.tf.tmpl @@ -69,7 +69,8 @@ resource "google_workbench_instance" "{{$.PrimaryResourceId}}" { } metadata = { - terraform = "true" + terraform = "true", + serial-port-logging-enable = "false" } enable_ip_forwarding = true diff --git a/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_test.go b/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_test.go index 325324b42bc3..1ad111258b41 100644 --- a/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_test.go +++ b/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_test.go @@ -87,7 +87,8 @@ resource "google_workbench_instance" "instance" { } metadata = { - terraform = "true" + terraform = "true", + "serial-port-logging-enable" = "false", } } @@ -352,6 +353,19 @@ func TestAccWorkbenchInstance_updateMetadataKey(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, }, + { + Config: testAccWorkbenchInstance_update(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + }, { Config: testAccWorkbenchInstance_updateMetadata(context), Check: resource.ComposeTestCheckFunc( @@ -379,6 +393,7 @@ resource "google_workbench_instance" "instance" { metadata = { terraform = "true" "resource-url" = "new-fake-value", + "serial-port-logging-enable" = "true", } } @@ -401,7 +416,7 @@ resource "google_workbench_instance" "instance" { terraform = "true", "idle-timeout-seconds" = "10800", "image-url" = "fake-value", - "container-custom-params" = "test-params", + "container-custom-params" = "test-params", } } From 371bc196c35672fb8aa5a3f9f8330778bb052741 Mon Sep 17 00:00:00 2001 From: Balanagu Harsha Vardhan Date: Thu, 26 Jun 2025 02:33:03 +0530 Subject: [PATCH 429/884] Add Plugin resource of API hub (#14279) --- mmv1/products/apihub/Plugin.yaml | 261 ++++++++++++++++++ .../examples/apihub_plugin_full.tf.tmpl | 65 +++++ 2 files changed, 326 insertions(+) create mode 100644 mmv1/products/apihub/Plugin.yaml create mode 100644 mmv1/templates/terraform/examples/apihub_plugin_full.tf.tmpl diff --git a/mmv1/products/apihub/Plugin.yaml b/mmv1/products/apihub/Plugin.yaml new file mode 100644 index 000000000000..e6b7e9bc1245 --- /dev/null +++ b/mmv1/products/apihub/Plugin.yaml @@ -0,0 +1,261 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: Plugin +description: A plugin resource in the API hub. +base_url: projects/{{project}}/locations/{{location}}/plugins +immutable: true +self_link: projects/{{project}}/locations/{{location}}/plugins/{{plugin_id}} +create_url: projects/{{project}}/locations/{{location}}/plugins?pluginId={{plugin_id}} +id_format: projects/{{project}}/locations/{{location}}/plugins/{{plugin_id}} +import_format: + - projects/{{project}}/locations/{{location}}/plugins/{{plugin_id}} +examples: + - name: apihub_plugin_full + primary_resource_id: apihub_plugin_full + vars: + plugin_id: 'plugin-full' + # API hub instance needs to be created before this, and end to end creation of that resource using Terraform is not yet supported. + exclude_test: true + external_providers: ["time"] +autogen_async: true +autogen_status: UGx1Z2lu +parameters: + - name: location + type: String + description: Resource ID segment making up resource `name`. It identifies the resource within its parent collection as described in https://google.aip.dev/122. + immutable: true + url_param_only: true + required: true + - name: pluginId + type: String + description: |- + The ID to use for the Plugin resource, which will become the final + component of the Plugin's resource name. This field is optional. + + * If provided, the same will be used. The service will throw an error if + the specified id is already used by another Plugin resource in the API hub + instance. + * If not provided, a system generated id will be used. + + This value should be 4-63 characters, overall resource name which will be + of format + `projects/{project}/locations/{location}/plugins/{plugin}`, + its length is limited to 1000 characters and valid characters are + /a-z[0-9]-_/. + immutable: true + url_param_only: true + required: true +properties: + - name: description + type: String + description: |- + The plugin description. Max length is 2000 characters (Unicode code + points). + - name: state + type: String + description: |- + Represents the state of the plugin. + Note this field will not be set for plugins developed via plugin + framework as the state will be managed at plugin instance level. + Possible values: + STATE_UNSPECIFIED + ENABLED + DISABLED + output: true + - name: ownershipType + type: String + description: |- + The type of the plugin, indicating whether it is 'SYSTEM_OWNED' or + 'USER_OWNED'. + Possible values: + OWNERSHIP_TYPE_UNSPECIFIED + SYSTEM_OWNED + USER_OWNED + output: true + - name: actionsConfig + type: Array + description: The configuration of actions supported by the plugin. + item_type: + type: NestedObject + properties: + - name: id + type: String + description: The id of the action. + required: true + - name: displayName + type: String + description: The display name of the action. + required: true + - name: description + type: String + description: The description of the operation performed by the action. + required: true + - name: triggerMode + type: String + description: |- + The trigger mode supported by the action. + Possible values: + TRIGGER_MODE_UNSPECIFIED + API_HUB_ON_DEMAND_TRIGGER + API_HUB_SCHEDULE_TRIGGER + NON_API_HUB_MANAGED + required: true + - name: documentation + type: NestedObject + description: Documentation details. + properties: + - name: externalUri + type: String + description: The uri of the externally hosted documentation. + - name: pluginCategory + type: String + description: |2- + + Possible values: + PLUGIN_CATEGORY_UNSPECIFIED + API_GATEWAY + API_PRODUCER + - name: configTemplate + type: NestedObject + description: ConfigTemplate represents the configuration template for a plugin. + properties: + - name: authConfigTemplate + type: NestedObject + description: AuthConfigTemplate represents the authentication template for a plugin. + properties: + - name: supportedAuthTypes + type: Array + description: The list of authentication types supported by the plugin. + required: true + item_type: + type: String + - name: serviceAccount + type: NestedObject + description: Config for Google service account authentication. + properties: + - name: serviceAccount + type: String + description: |- + The service account to be used for authenticating request. + + The `iam.serviceAccounts.getAccessToken` permission should be granted on + this service account to the impersonator service account. + required: true + - name: additionalConfigTemplate + type: Array + description: |- + The list of additional configuration variables for the plugin's + configuration. + item_type: + type: NestedObject + properties: + - name: required + type: Boolean + description: |- + Flag represents that this `ConfigVariable` must be provided for a + PluginInstance. + - name: enumOptions + type: Array + description: Enum options. To be populated if `ValueType` is `ENUM`. + item_type: + type: NestedObject + properties: + - name: id + type: String + description: Id of the option. + required: true + - name: displayName + type: String + description: Display name of the option. + required: true + - name: description + type: String + description: Description of the option. + - name: multiSelectOptions + type: Array + description: Multi select options. To be populated if `ValueType` is `MULTI_SELECT`. + item_type: + type: NestedObject + properties: + - name: id + type: String + description: Id of the option. + required: true + - name: displayName + type: String + description: Display name of the option. + required: true + - name: description + type: String + description: Description of the option. + - name: id + type: String + description: ID of the config variable. Must be unique within the configuration. + required: true + - name: valueType + type: String + description: |- + Type of the parameter: string, int, bool etc. + Possible values: + VALUE_TYPE_UNSPECIFIED + STRING + INT + BOOL + SECRET + ENUM + MULTI_SELECT + MULTI_STRING + MULTI_INT + required: true + - name: description + type: String + description: Description. + - name: validationRegex + type: String + description: |- + Regular expression in RE2 syntax used for validating the `value` of a + `ConfigVariable`. + - name: name + type: String + description: |- + Identifier. The name of the plugin. + Format: `projects/{project}/locations/{location}/plugins/{plugin}` + output: true + - name: displayName + type: String + description: |- + The display name of the plugin. Max length is 50 characters (Unicode code + points). + required: true + - name: hostingService + type: NestedObject + description: |- + The information related to the service implemented by the plugin + developer, used to invoke the plugin's functionality. + properties: + - name: serviceUri + type: String + description: |- + The URI of the service implemented by the plugin developer, used to + invoke the plugin's functionality. This information is only required for + user defined plugins. + - name: createTime + type: String + description: Timestamp indicating when the plugin was created. + output: true + - name: updateTime + type: String + description: Timestamp indicating when the plugin was last updated. + output: true diff --git a/mmv1/templates/terraform/examples/apihub_plugin_full.tf.tmpl b/mmv1/templates/terraform/examples/apihub_plugin_full.tf.tmpl new file mode 100644 index 000000000000..6cf228865d10 --- /dev/null +++ b/mmv1/templates/terraform/examples/apihub_plugin_full.tf.tmpl @@ -0,0 +1,65 @@ +resource "google_apihub_plugin" "{{$.PrimaryResourceId}}" { + location = "us-central1" + display_name = "Test Plugin" + description="Test description" + plugin_id = "{{index $.Vars "plugin_id"}}" + plugin_category = "API_GATEWAY" + actions_config { + id = "sync-metadata" + display_name = "Sync Metadata" + description = "Syncs API metadata." + trigger_mode = "API_HUB_SCHEDULE_TRIGGER" + } + documentation { + external_uri = "https://example.com/plugin-documentation" + } + hosting_service { + service_uri = "https://your-plugin-service.example.com/api" + } + config_template { + auth_config_template { + supported_auth_types = ["NO_AUTH", "USER_PASSWORD"] + service_account { + service_account = "test@developer.gserviceaccount.com" + } + } + additional_config_template { + id = "string-val" + description = "API key for the service." + value_type = "STRING" + required = false + validation_regex = "^[a-zA-Z0-9]{5,20}$" + } + additional_config_template { + id = "integer-val" + description = "API key for the service." + value_type = "INT" + required = true + validation_regex = "" + } + additional_config_template { + id = "bool-val" + description = "API key for the service." + value_type = "BOOL" + required = false + validation_regex = "" + } + additional_config_template { + id = "enum-val" + description = "API key for the service." + value_type = "ENUM" + enum_options { + id = "Option1" + display_name = "Option1" + description = "Description for Option1" + } + enum_options { + id = "Option2" + display_name = "Option2" + description = "Description for Option2" + } + required = false + validation_regex = "" + } + } +} \ No newline at end of file From cc45804cbc1ee1c1a888b6cd88bb5ff6d77b02d3 Mon Sep 17 00:00:00 2001 From: Nandini Agrawal Date: Thu, 26 Jun 2025 02:33:18 +0530 Subject: [PATCH 430/884] Add WireGroup Resource (#14271) --- mmv1/products/compute/WireGroup.yaml | 212 ++++++++++++++++++ .../examples/compute_wire_group_basic.tf.tmpl | 27 +++ .../resource_compute_wire_group_test.go.tmpl | 117 ++++++++++ 3 files changed, 356 insertions(+) create mode 100644 mmv1/products/compute/WireGroup.yaml create mode 100644 mmv1/templates/terraform/examples/compute_wire_group_basic.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl diff --git a/mmv1/products/compute/WireGroup.yaml b/mmv1/products/compute/WireGroup.yaml new file mode 100644 index 000000000000..b81bc5975952 --- /dev/null +++ b/mmv1/products/compute/WireGroup.yaml @@ -0,0 +1,212 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'WireGroup' +kind: 'compute#wireGroup' +description: | + The WireGroup resource represents a group of redundant wires between interconnects in two different metros. Each WireGroup belongs to a CrossSiteNetwork. A wire group defines endpoints and the wires which exist between them. + +references: + guides: + 'Create a WireGroup': 'https://cloud.google.com/network-connectivity/docs/interconnect/how-to/cross-site/modify-network#add-wire-group' + api: 'https://cloud.google.com/compute/docs/reference/rest/beta/wireGroups' +min_version: beta +docs: +id_format: 'projects/{{project}}/global/crossSiteNetworks/{{cross_site_network}}/wireGroups/{{name}}' +base_url: 'projects/{{project}}/global/crossSiteNetworks/{{cross_site_network}}/wireGroups' +self_link: 'projects/{{project}}/global/crossSiteNetworks/{{cross_site_network}}/wireGroups/{{name}}' +update_verb: 'PATCH' +update_mask: true +import_format: + - 'projects/{{project}}/global/crossSiteNetworks/{{cross_site_network}}/wireGroups/{{name}}' +timeouts: + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 +async: + actions: ['create', 'delete', 'update'] + type: 'OpAsync' + operation: + base_url: '{{op_id}}' + result: + resource_inside_response: false +examples: + - name: 'compute_wire_group_basic' + primary_resource_id: 'example-test-wire-group' + vars: + name: 'test-wire-group' + description: 'Example Wire Group' + cross_site_network: 'test-cross-site-network' + min_version: 'beta' + test_env_vars: + project: 'PROJECT_NAME' +parameters: + - name: 'crossSiteNetwork' + type: ResourceRef + description: Required cross site network to which wire group belongs. + required: true + immutable: true + url_param_only: true + resource: 'CrossSiteNetwork' + imports: 'name' + diff_suppress_func: 'tpgresource.CompareResourceNames' + custom_expand: 'templates/terraform/custom_expand/resourceref_with_validation.go.tmpl' + min_version: beta +properties: + - name: 'description' + type: String + description: | + An optional description of this resource. Provide this property when you create the resource. + - name: 'creationTimestamp' + type: Time + description: | + Creation timestamp in RFC3339 text format. + output: true + - name: 'name' + type: String + description: | + Name of the resource. Provided by the client when the resource is created. The name must be + 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters + long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first + character must be a lowercase letter, and all following characters must be a dash, + lowercase letter, or digit, except the last character, which cannot be a dash. + required: true + validation: + regex: '^[a-z]([-a-z0-9]*[a-z0-9])?$' + - name: endpoints + type: Map + description: | + Endpoints grouped by location, each mapping to interconnect configurations. + key_name: 'endpoint' + key_description: | + The name of the endpoint, which is a city name. + value_type: + type: NestedObject + properties: + - name: interconnects + type: Map + key_name: interconnect_name + key_description: | + The name of the interconnect. + value_type: + type: NestedObject + description: | + Map of interconnect details. + properties: + - name: interconnect + type: string + - name: vlan_tags + type: Array + description: | + VLAN tags for the interconnect. + item_type: + type: Integer + - name: adminEnabled + type: Boolean + description: | + Indicates whether the wire group is administratively enabled. + default_value: true + - name: wireGroupProperties + type: NestedObject + description: | + Properties specific to the wire group. + properties: + - name: type + type: enum + description: | + Type of wire group (enum). + WIRE: a single pseudowire over two Interconnect connections with no redundancy. + REDUNDANT: two pseudowires over four Interconnect connections, with two connections in one metro and two connections in another metro. + BOX_AND_CROSS: four pseudowires over four Interconnect connections, with two connections in one metro and two connections in another metro. + enum_values: + - 'WIRE' + - 'REDUNDANT' + - 'BOX_AND_CROSS' + - name: wireProperties + type: NestedObject + description: | + Default properties for wires within the group. + properties: + - name: bandwidthUnmetered + type: Integer + description: | + The unmetered bandwidth setting. + - name: faultResponse + type: enum + description: | + Response when a fault is detected in a pseudowire: + NONE: default. + DISABLE_PORT: set the port line protocol down when inline probes detect a fault. This setting is only permitted on port mode pseudowires. + enum_values: + - 'NONE' + - 'DISABLE_PORT' + - name: wires + type: Array + description: | + The single/redundant wire(s) managed by the wire group. + output: true + item_type: + type: NestedObject + properties: + - name: label + type: string + output: true + - name: endpoints + type: Array + output: true + description: | + 'Wire endpoints are specific Interconnect connections.' + item_type: + type: NestedObject + properties: + - name: interconnect + type: string + output: true + - name: vlanTag + type: Integer + output: true + - name: wireProperties + type: NestedObject + output: true + properties: + - name: bandwidthUnmetered + type: Integer + output: true + - name: faultResponse + type: enum + output: true + enum_values: + - 'NONE' + - 'DISABLE_PORT' + - name: adminEnabled + type: Boolean + output: true + - name: topology + type: NestedObject + description: | + Topology details for the wire group configuration. + output: true + properties: + - name: endpoints + type: Array + output: true + item_type: + type: NestedObject + properties: + - name: label + type: string + output: true + - name: city + type: string + output: true diff --git a/mmv1/templates/terraform/examples/compute_wire_group_basic.tf.tmpl b/mmv1/templates/terraform/examples/compute_wire_group_basic.tf.tmpl new file mode 100644 index 000000000000..0519310d3fa8 --- /dev/null +++ b/mmv1/templates/terraform/examples/compute_wire_group_basic.tf.tmpl @@ -0,0 +1,27 @@ +data "google_project" "project" { +provider = google-beta +} + +resource "google_compute_cross_site_network" "example-cross-site-network" { + name = "{{index $.Vars "cross_site_network"}}" + description = "Example cross site network" + provider = google-beta +} + +resource "google_compute_wire_group" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "name"}}" + description = "{{index $.Vars "description"}}" + cross_site_network = "{{index $.Vars "cross_site_network"}}" + provider = google-beta + depends_on = [ + google_compute_cross_site_network.example-cross-site-network + ] + wire_properties { + bandwidth_unmetered = 10 + fault_response = "NONE" + } + wire_group_properties { + type = "WIRE" + } + admin_enabled = true +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl new file mode 100644 index 000000000000..716a5b383d8b --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl @@ -0,0 +1,117 @@ +package compute_test +{{ if ne $.TargetVersionName `ga` -}} +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccComputeWireGroup_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project": envvar.GetTestProjectFromEnv(), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckComputeWireGroupDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeWireGroup_basic(context), + }, + { + ResourceName: "google_compute_wire_group.example-test-wire-group", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"cross_site_network"}, + }, + { + Config: testAccComputeWireGroup_update(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_compute_wire_group.example-test-wire-group", plancheck.ResourceActionUpdate), + }, + }, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_compute_wire_group.example-test-wire-group", "description", "Example Wire Group Updated"+context["random_suffix"].(string)), + ), + }, + { + ResourceName: "google_compute_wire_group.example-test-wire-group", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"cross_site_network"}, + }, + }, + }) +} + +func testAccComputeWireGroup_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "project" { +provider = google-beta +} + +resource "google_compute_cross_site_network" "example-cross-site-network" { + name = "tf-test-cross-site-network%{random_suffix}" + description = "Example cross site network" + provider = google-beta +} + +resource "google_compute_wire_group" "example-test-wire-group" { + name = "tf-test-test-wire-group%{random_suffix}" + description = "Example Wire Group%{random_suffix}" + cross_site_network = google_compute_cross_site_network.example-cross-site-network.name + provider = google-beta + depends_on = [ + google_compute_cross_site_network.example-cross-site-network + ] + wire_properties { + bandwidth_unmetered = 1000 + } + wire_group_properties { + type = "REDUNDANT" + } + admin_enabled = true +} +`, context) +} + +func testAccComputeWireGroup_update(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "project" { +provider = google-beta +} + +resource "google_compute_cross_site_network" "example-cross-site-network" { + name = "tf-test-cross-site-network%{random_suffix}" + description = "Example cross site network" + provider = google-beta +} + +resource "google_compute_wire_group" "example-test-wire-group" { + name = "tf-test-test-wire-group%{random_suffix}" + description = "Example Wire Group Updated%{random_suffix}" + cross_site_network = google_compute_cross_site_network.example-cross-site-network.name + provider = google-beta + depends_on = [ + google_compute_cross_site_network.example-cross-site-network + ] + wire_properties { + bandwidth_unmetered = 1000 + } + wire_group_properties { + type = "REDUNDANT" + } + admin_enabled = true +} +`, context) +} +{{- end }} \ No newline at end of file From d5c0c5937afa2c0ce5db3475ae65dca9d0390406 Mon Sep 17 00:00:00 2001 From: Thomas Rodgers Date: Wed, 25 Jun 2025 14:20:48 -0700 Subject: [PATCH 431/884] Add access mode to compute region disk (#14358) --- mmv1/products/compute/Disk.yaml | 9 +++++---- mmv1/products/compute/RegionDisk.yaml | 19 +++++++++++++++++++ ...k_hyperdisk_balanced_ha_write_many.tf.tmpl | 7 +++++++ 3 files changed, 31 insertions(+), 4 deletions(-) create mode 100644 mmv1/templates/terraform/examples/region_disk_hyperdisk_balanced_ha_write_many.tf.tmpl diff --git a/mmv1/products/compute/Disk.yaml b/mmv1/products/compute/Disk.yaml index 9a622697675b..e9f2a2a96bf7 100644 --- a/mmv1/products/compute/Disk.yaml +++ b/mmv1/products/compute/Disk.yaml @@ -555,11 +555,12 @@ properties: - name: 'accessMode' type: String description: | - The accessMode of the disk. + The access mode of the disk. For example: - * READ_WRITE_SINGLE - * READ_WRITE_MANY - * READ_ONLY_SINGLE + * READ_WRITE_SINGLE: The default AccessMode, means the disk can be attached to single instance in RW mode. + * READ_WRITE_MANY: The AccessMode means the disk can be attached to multiple instances in RW mode. + * READ_ONLY_SINGLE: The AccessMode means the disk can be attached to multiple instances in RO mode. + The AccessMode is only valid for Hyperdisk disk types. required: false immutable: false default_from_api: true diff --git a/mmv1/products/compute/RegionDisk.yaml b/mmv1/products/compute/RegionDisk.yaml index df6fa60f6092..1955f37b3e09 100644 --- a/mmv1/products/compute/RegionDisk.yaml +++ b/mmv1/products/compute/RegionDisk.yaml @@ -84,6 +84,11 @@ examples: primary_resource_name: 'fmt.Sprintf("tf-test-my-region-disk%s", context["random_suffix"])' vars: region_disk_name: 'my-region-features-disk' + - name: 'region_disk_hyperdisk_balanced_ha_write_many' + primary_resource_id: 'primary' + primary_resource_name: 'fmt.Sprintf("tf-test-my-region-disk%s", context["random_suffix"])' + vars: + region_disk_name: 'my-region-hyperdisk' parameters: - name: 'region' type: ResourceRef @@ -376,6 +381,20 @@ properties: description: 'An applicable license URI' resource: 'License' imports: 'selfLink' + - name: 'accessMode' + type: String + description: | + The access mode of the disk. + For example: + * READ_WRITE_SINGLE: The default AccessMode, means the disk can be attached to single instance in RW mode. + * READ_WRITE_MANY: The AccessMode means the disk can be attached to multiple instances in RW mode. + * READ_ONLY_SINGLE: The AccessMode means the disk can be attached to multiple instances in RO mode. + The AccessMode is only valid for Hyperdisk disk types. + required: false + immutable: false + default_from_api: true + update_url: 'projects/{{project}}/regions/{{region}}/disks/{{name}}?paths=accessMode' + update_verb: 'PATCH' virtual_fields: - name: 'create_snapshot_before_destroy' type: Boolean diff --git a/mmv1/templates/terraform/examples/region_disk_hyperdisk_balanced_ha_write_many.tf.tmpl b/mmv1/templates/terraform/examples/region_disk_hyperdisk_balanced_ha_write_many.tf.tmpl new file mode 100644 index 000000000000..f18b782819de --- /dev/null +++ b/mmv1/templates/terraform/examples/region_disk_hyperdisk_balanced_ha_write_many.tf.tmpl @@ -0,0 +1,7 @@ +resource "google_compute_region_disk" "primary" { + name = "{{index $.Vars "region_disk_name"}}" + type = "hyperdisk-balanced-high-availability" + region = "us-central1" + replica_zones = ["us-central1-a", "us-central1-f"] + access_mode = "READ_WRITE_MANY" +} From 04ef399378e5ddbb624b68c6d05cf148f53bcb82 Mon Sep 17 00:00:00 2001 From: G-jackyli Date: Wed, 25 Jun 2025 14:32:01 -0700 Subject: [PATCH 432/884] Introduce confidential_instance_type into confidential_nodes config (#14190) --- .../services/container/node_config.go.tmpl | 10 +++ .../resource_container_cluster.go.tmpl | 16 ++++ .../resource_container_cluster_meta.yaml.tmpl | 4 + ...source_container_cluster_migratev1.go.tmpl | 8 ++ .../resource_container_cluster_test.go.tmpl | 80 +++++++++---------- ...esource_container_node_pool_meta.yaml.tmpl | 2 + .../resource_container_node_pool_test.go.tmpl | 51 +++++++----- .../docs/r/container_cluster.html.markdown | 6 ++ 8 files changed, 115 insertions(+), 62 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/node_config.go.tmpl b/mmv1/third_party/terraform/services/container/node_config.go.tmpl index 3a969bc4829d..3a2f2a03b429 100644 --- a/mmv1/third_party/terraform/services/container/node_config.go.tmpl +++ b/mmv1/third_party/terraform/services/container/node_config.go.tmpl @@ -838,6 +838,14 @@ func schemaNodeConfig() *schema.Schema { Required: true, Description: `Whether Confidential Nodes feature is enabled for all nodes in this pool.`, }, + "confidential_instance_type": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + DiffSuppressFunc: suppressDiffForConfidentialNodes, + Description: `Defines the type of technology used by the confidential node.`, + ValidateFunc: validation.StringInSlice([]string{"SEV", "SEV_SNP", "TDX"}, false), + }, }, }, }, @@ -1633,6 +1641,7 @@ func expandConfidentialNodes(configured interface{}) *container.ConfidentialNode config := l[0].(map[string]interface{}) return &container.ConfidentialNodes{ Enabled: config["enabled"].(bool), + ConfidentialInstanceType: config["confidential_instance_type"].(string), } } @@ -2149,6 +2158,7 @@ func flattenConfidentialNodes(c *container.ConfidentialNodes) []map[string]inter if c != nil { result = append(result, map[string]interface{}{ "enabled": c.Enabled, + "confidential_instance_type": c.ConfidentialInstanceType, }) } return result diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl index b5e37af2d212..751b4ce29792 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl @@ -143,6 +143,14 @@ var ( } return false }) + + suppressDiffForConfidentialNodes = schema.SchemaDiffSuppressFunc(func(k, oldValue, newValue string, d *schema.ResourceData) bool { + k = strings.Replace(k, "confidential_instance_type", "enabled", 1) + if v, _ := d.Get(k).(bool); v { + return oldValue == "SEV" && newValue == "" + } + return false + }) ) // Defines default nodel pool settings for the entire cluster. These settings are @@ -1410,6 +1418,14 @@ func ResourceContainerCluster() *schema.Resource { ForceNew: true, Description: `Whether Confidential Nodes feature is enabled for all nodes in this cluster.`, }, + "confidential_instance_type": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + DiffSuppressFunc: suppressDiffForConfidentialNodes, + Description: `Defines the type of technology used by the confidential node.`, + ValidateFunc: validation.StringInSlice([]string{"SEV", "SEV_SNP", "TDX"}, false), + }, }, }, }, diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_meta.yaml.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_meta.yaml.tmpl index 22079db2965f..17e3eba15d21 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_meta.yaml.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_meta.yaml.tmpl @@ -99,6 +99,7 @@ fields: - field: 'cluster_telemetry.type' {{- end }} - field: 'confidential_nodes.enabled' + - field: 'confidential_nodes.confidential_instance_type' - field: 'control_plane_endpoints_config.dns_endpoint_config.allow_external_traffic' - field: 'control_plane_endpoints_config.dns_endpoint_config.endpoint' - field: 'cost_management_config.enabled' @@ -219,6 +220,7 @@ fields: - field: 'node_config.advanced_machine_features.threads_per_core' - field: 'node_config.boot_disk_kms_key' - field: 'node_config.confidential_nodes.enabled' + - field: 'node_config.confidential_nodes.confidential_instance_type' - field: 'node_config.containerd_config.private_registry_access_config.certificate_authority_domain_config.fqdns' - field: 'node_config.containerd_config.private_registry_access_config.certificate_authority_domain_config.gcp_secret_manager_certificate_config.secret_uri' - field: 'node_config.containerd_config.private_registry_access_config.enabled' @@ -364,6 +366,8 @@ fields: api_field: 'node_pools.config.boot_disk_kms_key' - field: 'node_pool.node_config.confidential_nodes.enabled' api_field: 'node_pools.config.confidential_nodes.enabled' + - field: 'node_pool.node_config.confidential_nodes.confidential_instance_type' + api_field: 'node_pools.config.confidential_nodes.confidential_instance_type' - field: 'node_pool.node_config.containerd_config.private_registry_access_config.certificate_authority_domain_config.fqdns' api_field: 'node_pools.config.containerd_config.private_registry_access_config.certificate_authority_domain_config.fqdns' - field: 'node_pool.node_config.containerd_config.private_registry_access_config.certificate_authority_domain_config.gcp_secret_manager_certificate_config.secret_uri' diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_migratev1.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_migratev1.go.tmpl index a1af95014872..ad5b0a014dd1 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_migratev1.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_migratev1.go.tmpl @@ -1086,6 +1086,14 @@ func resourceContainerClusterResourceV1() *schema.Resource { ForceNew: true, Description: `Whether Confidential Nodes feature is enabled for all nodes in this cluster.`, }, + "confidential_instance_type": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + DiffSuppressFunc: suppressDiffForConfidentialNodes, + Description: `Defines the type of technology used by the confidential node.`, + ValidateFunc: validation.StringInSlice([]string{"SEV", "SEV_SNP", "TDX"}, false), + }, }, }, }, diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl index 7877ef3a89cd..c7a2363fe240 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl @@ -403,7 +403,7 @@ func TestAccContainerCluster_withConfidentialNodes(t *testing.T) { CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccContainerCluster_withConfidentialNodes(clusterName, npName, networkName, subnetworkName), + Config: testAccContainerCluster_withConfidentialNodes(clusterName, npName, networkName, subnetworkName, false, "", "n2d-standard-2"), }, { ResourceName: "google_container_cluster.confidential_nodes", @@ -412,7 +412,7 @@ func TestAccContainerCluster_withConfidentialNodes(t *testing.T) { ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { - Config: testAccContainerCluster_disableConfidentialNodes(clusterName, npName, networkName, subnetworkName), + Config: testAccContainerCluster_withConfidentialNodes(clusterName, npName, networkName, subnetworkName, true, "", "n2d-standard-2"), }, { ResourceName: "google_container_cluster.confidential_nodes", @@ -421,14 +421,32 @@ func TestAccContainerCluster_withConfidentialNodes(t *testing.T) { ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { - Config: testAccContainerCluster_withConfidentialNodes(clusterName, npName, networkName, subnetworkName), - }, + Config: testAccContainerCluster_withConfidentialNodes(clusterName, npName, networkName, subnetworkName, false, "SEV", "n2d-standard-2"), + }, + { + ResourceName: "google_container_cluster.confidential_nodes", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, { - ResourceName: "google_container_cluster.confidential_nodes", - ImportState: true, - ImportStateVerify: true, + Config: testAccContainerCluster_withConfidentialNodes(clusterName, npName, networkName, subnetworkName, false, "SEV_SNP", "n2d-standard-2"), + }, + { + ResourceName: "google_container_cluster.confidential_nodes", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, + }, + { + Config: testAccContainerCluster_withConfidentialNodes(clusterName, npName, networkName, subnetworkName, false, "TDX", "c3-standard-4"), + }, + { + ResourceName: "google_container_cluster.confidential_nodes", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, }, }) } @@ -7109,60 +7127,36 @@ resource "google_container_cluster" "filtered_notification_config" { `, topic, topic, clusterName, topic, networkName, subnetworkName) } -func testAccContainerCluster_withConfidentialNodes(clusterName, npName, networkName, subnetworkName string) string { - return fmt.Sprintf(` -resource "google_container_cluster" "confidential_nodes" { - name = "%s" - location = "us-central1-a" - release_channel { - channel = "RAPID" - } - - node_pool { - name = "%s" - initial_node_count = 1 - node_config { - machine_type = "n2d-standard-2" // can't be e2 because Confidential Nodes require AMD CPUs - } - } - - confidential_nodes { - enabled = true - } - network = "%s" - subnetwork = "%s" - - deletion_protection = false -} -`, clusterName, npName, networkName, subnetworkName) -} +func testAccContainerCluster_withConfidentialNodes(clusterName, npName, networkName, subnetworkName string, enable bool, confidentialInstanceType, machineType string) string { + confInsTypeString := "" + if confidentialInstanceType != "" { + confInsTypeString = fmt.Sprintf(`confidential_instance_type = "%s"`, confidentialInstanceType) + } -func testAccContainerCluster_disableConfidentialNodes(clusterName, npName, networkName, subnetworkName string) string { return fmt.Sprintf(` resource "google_container_cluster" "confidential_nodes" { name = "%s" location = "us-central1-a" - release_channel { - channel = "RAPID" - } node_pool { name = "%s" initial_node_count = 1 node_config { - machine_type = "n2d-standard-2" + machine_type = "%s" } } confidential_nodes { - enabled = false + enabled = %t + %s } + network = "%s" subnetwork = "%s" deletion_protection = false } -`, clusterName, npName, networkName, subnetworkName) +`, clusterName, npName, machineType, enable, confInsTypeString, networkName, subnetworkName) } func testAccContainerCluster_withLocalSsdEncryptionMode(clusterName, npName, networkName, subnetworkName, mode string) string { @@ -13982,4 +13976,4 @@ resource "google_container_cluster" "primary" { } } `, name, networkName, subnetworkName, config) -} \ No newline at end of file +} diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool_meta.yaml.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool_meta.yaml.tmpl index 21974f355452..c5462f2aff53 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool_meta.yaml.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool_meta.yaml.tmpl @@ -47,6 +47,8 @@ fields: api_field: 'config.boot_disk_kms_key' - field: 'node_config.confidential_nodes.enabled' api_field: 'config.confidential_nodes.enabled' + - field: 'node_config.confidential_nodes.confidential_instance_type' + api_field: 'config.confidential_nodes.confidential_instance_type' - field: 'node_config.containerd_config.private_registry_access_config.certificate_authority_domain_config.fqdns' api_field: 'config.containerd_config.private_registry_access_config.certificate_authority_domain_config.fqdns' - field: 'node_config.containerd_config.private_registry_access_config.certificate_authority_domain_config.gcp_secret_manager_certificate_config.secret_uri' diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl index ba3ba81b1728..0cd39c7a7b88 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl @@ -4516,7 +4516,7 @@ func TestAccContainerNodePool_withConfidentialNodes(t *testing.T) { CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccContainerNodePool_withConfidentialNodes(clusterName, np, networkName, subnetworkName, true), + Config: testAccContainerNodePool_withConfidentialNodes(clusterName, np, networkName, subnetworkName, false, "", "n2d-standard-2"), }, { ResourceName: "google_container_node_pool.np", @@ -4524,7 +4524,7 @@ func TestAccContainerNodePool_withConfidentialNodes(t *testing.T) { ImportStateVerify: true, }, { - Config: testAccContainerNodePool_withConfidentialNodes(clusterName, np, networkName, subnetworkName, false), + Config: testAccContainerNodePool_withConfidentialNodes(clusterName, np, networkName, subnetworkName, true, "", "n2d-standard-2"), }, { ResourceName: "google_container_node_pool.np", @@ -4532,29 +4532,41 @@ func TestAccContainerNodePool_withConfidentialNodes(t *testing.T) { ImportStateVerify: true, }, { - Config: testAccContainerNodePool_withConfidentialNodes(clusterName, np, networkName, subnetworkName, true), - }, - { - ResourceName: "google_container_node_pool.np", - ImportState: true, - ImportStateVerify: true, - }, + Config: testAccContainerNodePool_withConfidentialNodes(clusterName, np, networkName, subnetworkName, false, "SEV", "n2d-standard-2"), + }, + { + ResourceName: "google_container_node_pool.np", + ImportState: true, + ImportStateVerify: true, + },{ + Config: testAccContainerNodePool_withConfidentialNodes(clusterName, np, networkName, subnetworkName, false, "SEV_SNP", "n2d-standard-2"), + }, + { + ResourceName: "google_container_node_pool.np", + ImportState: true, + ImportStateVerify: true, + },{ + Config: testAccContainerNodePool_withConfidentialNodes(clusterName, np, networkName, subnetworkName, false, "TDX", "c3-standard-4"), + }, + { + ResourceName: "google_container_node_pool.np", + ImportState: true, + ImportStateVerify: true, + }, }, }) } -func testAccContainerNodePool_withConfidentialNodes(clusterName, np, networkName, subnetworkName string, confidential bool) string { +func testAccContainerNodePool_withConfidentialNodes(clusterName, np, networkName, subnetworkName string, enable bool, confidentialInstanceType, machineType string) string { + confInsTypeString := "" + if confidentialInstanceType != "" { + confInsTypeString = fmt.Sprintf(`confidential_instance_type = "%s"`, confidentialInstanceType) + } return fmt.Sprintf(` resource "google_container_cluster" "cluster" { name = "%s" location = "us-central1-a" initial_node_count = 1 - node_config { - confidential_nodes { - enabled = false - } - machine_type = "n2-standard-2" - } deletion_protection = false network = "%s" subnetwork = "%s" @@ -4566,13 +4578,14 @@ resource "google_container_node_pool" "np" { cluster = google_container_cluster.cluster.name initial_node_count = 1 node_config { - machine_type = "n2d-standard-2" // can't be e2 because Confidential Nodes require AMD CPUs + machine_type = "%s" confidential_nodes { - enabled = "%t" + enabled = %t + %s } } } -`, clusterName, networkName, subnetworkName, np, confidential) +`, clusterName, networkName, subnetworkName, np, machineType, enable, confInsTypeString) } func TestAccContainerNodePool_withLocalSsdEncryptionMode(t *testing.T) { diff --git a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown index 931a758b1a17..5339da1358b8 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown @@ -1052,6 +1052,9 @@ sole_tenant_config { * `enabled` (Required) - Enable Confidential GKE Nodes for this node pool, to enforce encryption of data in-use. +* `confidential_instance_type` (Optional) - Defines the type of technology used + by the confidential node. + The `node_affinity` block supports: * `key` (Required) - The default or custom node affinity label key name. @@ -1213,6 +1216,9 @@ notification_config { * `enabled` (Required) - Enable Confidential GKE Nodes for this cluster, to enforce encryption of data in-use. +* `confidential_instance_type` (Optional) - Defines the type of technology used + by the confidential node. + The `pod_security_policy_config` block supports: * `enabled` (Required) - Enable the PodSecurityPolicy controller for this cluster. From a2e12df95d1d5ebd4aff1d5f6b5534184046528b Mon Sep 17 00:00:00 2001 From: Simran Kaur <92652149+itssimrank@users.noreply.github.com> Date: Wed, 25 Jun 2025 15:27:00 -0700 Subject: [PATCH 433/884] Add support for accelerated upgrade feature flags (#14265) Co-authored-by: Simran Kaur --- mmv1/third_party/terraform/go.mod | 2 +- mmv1/third_party/terraform/go.sum | 4 +- .../resource_container_cluster.go.tmpl | 80 +++++++++++++++++++ .../resource_container_cluster_test.go.tmpl | 43 ++++++++++ .../docs/r/container_cluster.html.markdown | 10 +++ 5 files changed, 136 insertions(+), 3 deletions(-) diff --git a/mmv1/third_party/terraform/go.mod b/mmv1/third_party/terraform/go.mod index d4778ece3f3f..a4caeeaaeaf8 100644 --- a/mmv1/third_party/terraform/go.mod +++ b/mmv1/third_party/terraform/go.mod @@ -34,7 +34,7 @@ require ( golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 golang.org/x/net v0.41.0 golang.org/x/oauth2 v0.30.0 - google.golang.org/api v0.237.0 + google.golang.org/api v0.238.0 google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 google.golang.org/grpc v1.73.0 google.golang.org/protobuf v1.36.6 diff --git a/mmv1/third_party/terraform/go.sum b/mmv1/third_party/terraform/go.sum index c3842716dd87..f71f3717050b 100644 --- a/mmv1/third_party/terraform/go.sum +++ b/mmv1/third_party/terraform/go.sum @@ -381,8 +381,8 @@ golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8T golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/api v0.237.0 h1:MP7XVsGZesOsx3Q8WVa4sUdbrsTvDSOERd3Vh4xj/wc= -google.golang.org/api v0.237.0/go.mod h1:cOVEm2TpdAGHL2z+UwyS+kmlGr3bVWQQ6sYEqkKje50= +google.golang.org/api v0.238.0 h1:+EldkglWIg/pWjkq97sd+XxH7PxakNYoe/rkSTbnvOs= +google.golang.org/api v0.238.0/go.mod h1:cOVEm2TpdAGHL2z+UwyS+kmlGr3bVWQQ6sYEqkKje50= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl index 751b4ce29792..ec017eccc527 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl @@ -2107,6 +2107,24 @@ func ResourceContainerCluster() *schema.Resource { }, }, + "gke_auto_upgrade_config": { + Type: schema.TypeList, + Optional: true, + Computed: true, + Description: `Configuration options for the auto-upgrade patch type feature, which provide more control over the speed of automatic upgrades of your GKE clusters.`, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "patch_mode": { + Type: schema.TypeString, + Required: true, + Description: `The selected auto-upgrade patch type. Accepted values are: +* ACCELERATED: Upgrades to the latest available patch version in a given minor and release channel.`, + }, + }, + }, + }, + "tpu_ipv4_cidr_block": { Computed: true, Type: schema.TypeString, @@ -2613,6 +2631,7 @@ func resourceContainerClusterCreate(d *schema.ResourceData, meta interface{}) er ForceSendFields: []string{"Enabled"}, }, ReleaseChannel: expandReleaseChannel(d.Get("release_channel")), + GkeAutoUpgradeConfig: expandGkeAutoUpgradeConfig(d.Get("gke_auto_upgrade_config")), {{- if ne $.TargetVersionName "ga" }} ClusterTelemetry: expandClusterTelemetry(d.Get("cluster_telemetry")), {{- end }} @@ -3173,6 +3192,9 @@ func resourceContainerClusterRead(d *schema.ResourceData, meta interface{}) erro if err := d.Set("release_channel", flattenReleaseChannel(cluster.ReleaseChannel)); err != nil { return err } + if err := d.Set("gke_auto_upgrade_config", flattenGkeAutoUpgradeConfig(cluster.GkeAutoUpgradeConfig)); err != nil { + return err + } if err := d.Set("notification_config", flattenNotificationConfig(cluster.NotificationConfig)); err != nil { return err } @@ -3608,6 +3630,38 @@ func resourceContainerClusterUpdate(d *schema.ResourceData, meta interface{}) er log.Printf("[INFO] GKE cluster %s Release Channel has been updated to %#v", d.Id(), req.Update.DesiredReleaseChannel) } + if d.HasChange("gke_auto_upgrade_config") { + req := &container.UpdateClusterRequest{ + Update: &container.ClusterUpdate{ + GkeAutoUpgradeConfig: expandGkeAutoUpgradeConfig(d.Get("gke_auto_upgrade_config")), + }, + } + updateF := func() error { + log.Println("[DEBUG] updating gke_auto_upgrade_config") + name := containerClusterFullName(project, location, clusterName) + clusterUpdateCall := config.NewContainerClient(userAgent).Projects.Locations.Clusters.Update(name, req) + if config.UserProjectOverride { + clusterUpdateCall.Header().Add("X-Goog-User-Project", project) + } + op, err := clusterUpdateCall.Do() + if err != nil { + return err + } + + // Wait until it's updated + err = ContainerOperationWait(config, op, project, location, "updating GKE Auto Upgrade Config", userAgent, d.Timeout(schema.TimeoutUpdate)) + log.Println("[DEBUG] done updating gke_auto_upgrade_config") + return err + } + + // Call update serially. + if err := transport_tpg.LockedCall(lockKey, updateF); err != nil { + return err + } + + log.Printf("[INFO] GKE cluster %s GKE Auto Upgrade Config has been updated to %#v", d.Id(), req.Update.GkeAutoUpgradeConfig) + } + if d.HasChange("enable_intranode_visibility") { enabled := d.Get("enable_intranode_visibility").(bool) req := &container.UpdateClusterRequest{ @@ -5930,6 +5984,17 @@ func expandReleaseChannel(configured interface{}) *container.ReleaseChannel { } } +func expandGkeAutoUpgradeConfig(configured interface{}) *container.GkeAutoUpgradeConfig { + l := configured.([]interface{}) + if len(l) == 0 || l[0] == nil { + return nil + } + config := l[0].(map[string]interface{}) + return &container.GkeAutoUpgradeConfig{ + PatchMode: config["patch_mode"].(string), + } +} + {{ if ne $.TargetVersionName `ga` -}} func expandClusterTelemetry(configured interface{}) *container.ClusterTelemetry { l := configured.([]interface{}) @@ -6681,6 +6746,21 @@ func flattenReleaseChannel(c *container.ReleaseChannel) []map[string]interface{} return result } +func flattenGkeAutoUpgradeConfig(c *container.GkeAutoUpgradeConfig) []map[string]interface{} { + if c == nil { + return nil + } + + result := []map[string]interface{}{} + if c.PatchMode != "" { + result = append(result, map[string]interface{}{ + "patch_mode": c.PatchMode, + }) + } + + return result +} + {{ if ne $.TargetVersionName `ga` -}} func flattenClusterTelemetry(c *container.ClusterTelemetry) []map[string]interface{} { diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl index c7a2363fe240..04f2fe0c5f9b 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl @@ -1316,6 +1316,31 @@ func TestAccContainerCluster_withInvalidReleaseChannel(t *testing.T) { }) } +func TestAccContainerCluster_withAcceleratedGkeAutoUpgradeConfig(t *testing.T) { + t.Parallel() + clusterName := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) + networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") + subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withGkeAutoUpgradeConfig(clusterName, "ACCELERATED", networkName, subnetworkName), + }, + { + ResourceName: "google_container_cluster.with_gke_auto_upgrade_config", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"min_master_version", "deletion_protection"}, + }, + }, + }) +} + {{ if ne $.TargetVersionName `ga` -}} func TestAccContainerCluster_withTelemetryEnabled(t *testing.T) { t.Parallel() @@ -7471,6 +7496,24 @@ resource "google_container_cluster" "with_release_channel" { `, clusterName, channel, networkName, subnetworkName) } +func testAccContainerCluster_withGkeAutoUpgradeConfig(clusterName, patchMode, networkName, subnetworkName string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_gke_auto_upgrade_config" { + name = "%s" + location = "us-central1-a" + initial_node_count = 1 + + gke_auto_upgrade_config { + patch_mode = "%s" + } + network = "%s" + subnetwork = "%s" + + deletion_protection = false +} +`, clusterName, patchMode, networkName, subnetworkName) +} + {{ if ne $.TargetVersionName `ga` -}} func testAccContainerCluster_withTelemetryEnabled(clusterName, telemetryType, networkName, subnetworkName string) string { return fmt.Sprintf(` diff --git a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown index 5339da1358b8..7fd4edca2e97 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown @@ -330,6 +330,10 @@ field from your config will cause Terraform to stop managing your cluster's release channel, but will not unenroll it. Instead, use the `"UNSPECIFIED"` channel. Structure is [documented below](#nested_release_channel). +* `gke_auto_upgrade_config` - (Optional) +Configuration options for the auto-upgrade patch type feature, which provide more control over the speed of automatic upgrades of your GKE clusters. +Structure is [documented below](#nested_gke_auto_upgrade_config). + * `remove_default_node_pool` - (Optional) If `true`, deletes the default node pool upon cluster creation. If you're using `google_container_node_pool` resources with no default node pool, this should be set to `true`, alongside @@ -1335,6 +1339,12 @@ not. * STABLE: Every few months upgrade cadence; Production users who need stability above all else, and for whom frequent upgrades are too risky. * EXTENDED: GKE provides extended support for Kubernetes minor versions through the Extended channel. With this channel, you can stay on a minor version for up to 24 months. +The `gke_auto_upgrade_config` block supports: + +* `patch_mode` - (Required) The selected patch mode. + Accepted values are: + * ACCELERATED: Upgrades to the latest available patch version in a given minor and release channel. + The `cost_management_config` block supports: * `enabled` (Optional) - Whether to enable the [cost allocation](https://cloud.google.com/kubernetes-engine/docs/how-to/cost-allocations) feature. From 5d0add6a0f06d3f9c04bf049f33f801bc6069e5a Mon Sep 17 00:00:00 2001 From: Thomas Rodgers Date: Wed, 25 Jun 2025 15:56:25 -0700 Subject: [PATCH 434/884] Make reassign-reviewer regex less strict (#14359) --- .github/workflows/reassign-reviewer.yml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/.github/workflows/reassign-reviewer.yml b/.github/workflows/reassign-reviewer.yml index 5581c8519865..8d20c6ba380a 100644 --- a/.github/workflows/reassign-reviewer.yml +++ b/.github/workflows/reassign-reviewer.yml @@ -22,7 +22,7 @@ jobs: uses: actions-ecosystem/action-regex-match@d50fd2e7a37d0e617aea3d7ada663bd56862b9cc # v2.0.2 with: text: ${{ github.event.comment.body }} - regex: '(?:^|\n|\r)@modular-magician reassign-reviewer ?@?([a-zA-Z0-9-]+)?(?:$|\n|\r)' + regex: '.*@modular-magician reassign[- ]+review[^@\\n\\r]*@?([a-zA-Z0-9-_]*).*' - name: Checkout Repository if: steps.read-comment.outputs.match != '' uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.2 @@ -33,8 +33,6 @@ jobs: uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0 with: go-version: '^1.21' - # Disable caching for now due to issues with large provider dependency caches - cache: false - name: Build magician if: steps.read-comment.outputs.match != '' run: | From 415eb7bdbdce010e87914663d8dad6fc50d259e0 Mon Sep 17 00:00:00 2001 From: Somesh Mohan <68313271+somesh-m@users.noreply.github.com> Date: Thu, 26 Jun 2025 09:04:27 +0530 Subject: [PATCH 435/884] fix for scaling down to zero replica in Valkey (#14219) --- mmv1/products/memorystore/Instance.yaml | 1 + .../resource_memorystore_instance_test.go | 13 +++++++++++-- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/mmv1/products/memorystore/Instance.yaml b/mmv1/products/memorystore/Instance.yaml index 242824d3321f..5236e08234d0 100644 --- a/mmv1/products/memorystore/Instance.yaml +++ b/mmv1/products/memorystore/Instance.yaml @@ -265,6 +265,7 @@ properties: "Optional. Number of replica nodes per shard. If omitted the default is 0 replicas. " default_from_api: true + send_empty_value: true - name: 'authorizationMode' type: String description: diff --git a/mmv1/third_party/terraform/services/memorystore/resource_memorystore_instance_test.go b/mmv1/third_party/terraform/services/memorystore/resource_memorystore_instance_test.go index 0e02d4c4b5af..d766851991e8 100644 --- a/mmv1/third_party/terraform/services/memorystore/resource_memorystore_instance_test.go +++ b/mmv1/third_party/terraform/services/memorystore/resource_memorystore_instance_test.go @@ -14,7 +14,7 @@ import ( transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) -// Validate that replica count is updated for the instance +// Validate that replica count is updated for the instance: 1->2->0 func TestAccMemorystoreInstance_updateReplicaCount(t *testing.T) { t.Parallel() @@ -43,9 +43,18 @@ func TestAccMemorystoreInstance_updateReplicaCount(t *testing.T) { ImportState: true, ImportStateVerify: true, }, + { + // update the replica count to 0 + Config: createOrUpdateMemorystoreInstance(&InstanceParams{name: name, replicaCount: 0, shardCount: 3, preventDestroy: true, zoneDistributionMode: "MULTI_ZONE", deletionProtectionEnabled: false, maintenanceDay: "MONDAY", maintenanceHours: 1, maintenanceMinutes: 0, maintenanceSeconds: 0, maintenanceNanos: 0}), + }, + { + ResourceName: "google_memorystore_instance.test", + ImportState: true, + ImportStateVerify: true, + }, { // clean up the resource - Config: createOrUpdateMemorystoreInstance(&InstanceParams{name: name, replicaCount: 2, shardCount: 3, preventDestroy: false, zoneDistributionMode: "MULTI_ZONE", deletionProtectionEnabled: false, maintenanceDay: "MONDAY", maintenanceHours: 1, maintenanceMinutes: 0, maintenanceSeconds: 0, maintenanceNanos: 0}), + Config: createOrUpdateMemorystoreInstance(&InstanceParams{name: name, replicaCount: 0, shardCount: 3, preventDestroy: false, zoneDistributionMode: "MULTI_ZONE", deletionProtectionEnabled: false, maintenanceDay: "MONDAY", maintenanceHours: 1, maintenanceMinutes: 0, maintenanceSeconds: 0, maintenanceNanos: 0}), }, }, }) From e98be7249e853345ec49a563440e9ca0c339f17c Mon Sep 17 00:00:00 2001 From: sahil-mahajan-google Date: Thu, 26 Jun 2025 20:04:59 +0530 Subject: [PATCH 436/884] Add delay b/w storage pool creation & volume creation in backup test (#14312) --- .../services/netapp/resource_netapp_backup_test.go | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_backup_test.go b/mmv1/third_party/terraform/services/netapp/resource_netapp_backup_test.go index 210adf935100..b71381591517 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_backup_test.go +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_backup_test.go @@ -20,6 +20,9 @@ func TestAccNetappBackup_NetappBackupFull_update(t *testing.T) { PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccCheckNetappBackupDestroyProducer(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, Steps: []resource.TestStep{ { Config: testAccNetappBackup_NetappBackupFromVolumeSnapshot(context), @@ -57,6 +60,11 @@ resource "google_netapp_storage_pool" "default" { network = data.google_compute_network.default.id } +resource "time_sleep" "wait_3_minutes" { + depends_on = [google_netapp_storage_pool.default] + create_duration = "3m" +} + resource "google_netapp_volume" "default" { name = "tf-test-backup-volume%{random_suffix}" location = google_netapp_storage_pool.default.location @@ -116,6 +124,11 @@ resource "google_netapp_storage_pool" "default" { network = data.google_compute_network.default.id } +resource "time_sleep" "wait_3_minutes" { + depends_on = [google_netapp_storage_pool.default] + create_duration = "3m" +} + resource "google_netapp_volume" "default" { name = "tf-test-backup-volume%{random_suffix}" location = google_netapp_storage_pool.default.location From e65aad71f8b50540d57dae21af69e3ada69b4400 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Thu, 26 Jun 2025 10:22:27 -0700 Subject: [PATCH 437/884] tgc-revival: support compute disk (#14375) Co-authored-by: Thomas Rodgers --- mmv1/api/type.go | 3 ++ mmv1/products/compute/Disk.yaml | 2 + mmv1/provider/template_data.go | 2 + .../cai2hcl/resource_converter.go.tmpl | 9 ++-- ..._array_resourceref_with_validation.go.tmpl | 41 +++++++++++++++++++ .../tfplan2cai/resource_converter.go.tmpl | 12 +++--- .../services/compute/compute_instance.go | 5 +-- 7 files changed, 62 insertions(+), 12 deletions(-) create mode 100644 mmv1/templates/tgc_next/tfplan2cai/expand_array_resourceref_with_validation.go.tmpl diff --git a/mmv1/api/type.go b/mmv1/api/type.go index 20377ebe56c3..07894d9a1c22 100644 --- a/mmv1/api/type.go +++ b/mmv1/api/type.go @@ -310,6 +310,9 @@ type Type struct { // creating the function header, `property` and `prefix` are available, // just as they are in the standard flattener template. CustomTgcFlatten string `yaml:"custom_tgc_flatten,omitempty"` + + // If true, we will include the empty value of this attribute in CAI asset. + IncludeEmptyValueInCai bool `yaml:"include_empty_value_in_cai,omitempty"` } const MAX_NAME = 20 diff --git a/mmv1/products/compute/Disk.yaml b/mmv1/products/compute/Disk.yaml index e9f2a2a96bf7..3de58de78310 100644 --- a/mmv1/products/compute/Disk.yaml +++ b/mmv1/products/compute/Disk.yaml @@ -50,6 +50,7 @@ async: result: resource_inside_response: false collection_url_key: 'items' +include_in_tgc_next_DO_NOT_USE: true iam_policy: parent_resource_attribute: 'name' base_url: 'projects/{{project}}/zones/{{zone}}/disks/{{name}}' @@ -458,6 +459,7 @@ properties: Note: Only supported on hyperdisk skus, disk_encryption_key is required when setting to true required: false default_from_api: true + include_empty_value_in_cai: true - name: 'multiWriter' type: Boolean description: | diff --git a/mmv1/provider/template_data.go b/mmv1/provider/template_data.go index 2adcaa281edf..208790894be0 100644 --- a/mmv1/provider/template_data.go +++ b/mmv1/provider/template_data.go @@ -194,9 +194,11 @@ func (td *TemplateData) GenerateTGCResourceFile(templatePath, filePath string, r templates := []string{ templatePath, "templates/terraform/expand_property_method.go.tmpl", + "templates/terraform/expand_resource_ref.tmpl", "templates/terraform/schema_property.go.tmpl", "templates/terraform/schema_subresource.go.tmpl", "templates/terraform/flatten_property_method.go.tmpl", + "templates/tgc_next/tfplan2cai/expand_array_resourceref_with_validation.go.tmpl", } td.GenerateFile(filePath, templatePath, resource, true, templates...) } diff --git a/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl b/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl index 49e7ed3a9729..f104b9ffeeb7 100644 --- a/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl +++ b/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl @@ -45,8 +45,8 @@ import ( {{- $productBackendName := $.CaiProductBackendName $caiProductBaseUrl }} {{- $apiVersion := $.CaiApiVersion $productBackendName $caiProductBaseUrl}} -{{if $.CustomCode.Constants -}} - {{- $.CustomTemplate $.CustomCode.Constants true -}} +{{- range $prop := $.AllUserProperties }} +{{template "SchemaSubResource" $prop}} {{- end}} const {{ $.ResourceName -}}AssetType string = "{{ $productBackendName }}.googleapis.com/{{ $.Name -}}" @@ -82,6 +82,7 @@ func (c *{{ $.ResourceName -}}Converter) convertResourceData(asset caiasset.Asse return nil, fmt.Errorf("asset resource data is nil") } + var err error res := asset.Resource.Data config := utils.NewConfig() d := &schema.ResourceData{} @@ -89,14 +90,14 @@ func (c *{{ $.ResourceName -}}Converter) convertResourceData(asset caiasset.Asse hclData := make(map[string]interface{}) {{ if $.CustomCode.TgcDecoder -}} - res, err = resource{{ $.ResourceName -}}TgcDecoder(d, meta, res) + res, err = resource{{ $.ResourceName -}}TgcDecoder(d, config, res) if err != nil { return nil, err } {{ end}} {{ if $.CustomCode.Decoder -}} - res, err = resource{{ $.ResourceName -}}Decoder(d, meta, res) + res, err = resource{{ $.ResourceName -}}Decoder(d, config, res) if err != nil { return nil, err } diff --git a/mmv1/templates/tgc_next/tfplan2cai/expand_array_resourceref_with_validation.go.tmpl b/mmv1/templates/tgc_next/tfplan2cai/expand_array_resourceref_with_validation.go.tmpl new file mode 100644 index 000000000000..e491e591ec05 --- /dev/null +++ b/mmv1/templates/tgc_next/tfplan2cai/expand_array_resourceref_with_validation.go.tmpl @@ -0,0 +1,41 @@ +{{/* + The license inside this block applies to this file + Copyright 2025 Google Inc. + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ -}} +{{- define "expandArrayResourcerefWithValidation" }} +func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { +{{- if $.IsSet }} + vSet, ok := v.(*schema.Set) + if !ok || vSet == nil { + return nil, fmt.Errorf("invalid type for v: %T, expected *schema.Set", v) + } + v = vSet.List() +{{- end }} + vSlice, ok := v.([]interface{}) + if !ok || vSlice == nil { + return nil, fmt.Errorf("invalid type for v: %T, expected []interface{}", v) + } + req := make([]interface{}, 0, len(vSlice)) + for _, raw := range vSlice { + if raw == nil { + return nil, fmt.Errorf("Invalid value for {{underscore $.Name}}: nil") + } + f, err := {{ template "expandResourceRef" dict "VarName" "raw.(string)" "ResourceRef" $.ItemType.ResourceRef "ResourceType" $.ItemType.ResourceType}} + if err != nil { + return nil, fmt.Errorf("Invalid value for {{underscore $.Name}}: %s", err) + } + + fullUrl := tgcresource.GetComputeSelfLink(config, f.RelativeLink()) + req = append(req, fullUrl) + } + return req, nil +} +{{- end }} \ No newline at end of file diff --git a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl index fed089050f72..93f5ff14600b 100644 --- a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl +++ b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl @@ -89,16 +89,16 @@ func Resource{{ $.ResourceName -}}() *schema.Resource { func ResourceConverter{{ $.ResourceName -}}() cai.ResourceConverter { return cai.ResourceConverter{ - Convert: Get{{ $.ResourceName -}}CaiObject, + Convert: Get{{ $.ResourceName -}}CaiAssets, } } -func Get{{ $.ResourceName -}}CaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]caiasset.Asset, error) { +func Get{{ $.ResourceName -}}CaiAssets(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]caiasset.Asset, error) { name, err := cai.AssetName(d, config, "{{ $.CaiAssetNameTemplate $productBackendName }}") if err != nil { return []caiasset.Asset{}, err } - if obj, err := Get{{ $.ResourceName -}}ApiObject(d, config); err == nil { + if obj, err := Get{{ $.ResourceName -}}CaiObject(d, config); err == nil { location, _ := tpgresource.GetLocation(d, config) return []caiasset.Asset{{"{{"}} Name: name, @@ -116,7 +116,7 @@ func Get{{ $.ResourceName -}}CaiObject(d tpgresource.TerraformResourceData, conf } } -func Get{{ $.ResourceName -}}ApiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (map[string]interface{}, error) { +func Get{{ $.ResourceName -}}CaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (map[string]interface{}, error) { obj := make(map[string]interface{}) {{- range $prop := $.SettableProperties }} {{- if $prop.FlattenObject }} @@ -126,7 +126,7 @@ func Get{{ $.ResourceName -}}ApiObject(d tpgresource.TerraformResourceData, conf {{- end}} if err != nil { return nil, err -{{- if not $prop.SendEmptyValue }} +{{- if and (not $prop.SendEmptyValue) (not $prop.IncludeEmptyValueInCai) }} } else if v, ok := d.GetOkExists("{{underscore $prop.Name}}"); !tpgresource.IsEmptyValue(reflect.ValueOf({{ $prop.ApiName -}}Prop)) && (ok || !reflect.DeepEqual(v, {{ $prop.ApiName -}}Prop)) { {{- else }} } else if v, ok := d.GetOkExists("{{underscore $prop.Name}}"); ok || !reflect.DeepEqual(v, {{ $prop.ApiName -}}Prop) { @@ -164,6 +164,8 @@ func resource{{ $.ResourceName -}}TgcEncoder(d tpgresource.TerraformResourceData {{ range $prop := $.SettableProperties }} {{ if $prop.CustomTgcExpand }} {{- $prop.CustomTemplate $prop.CustomTgcExpand false -}} + {{ else if and ($prop.IsA "Array") ($prop.ItemType.IsA "ResourceRef")}} + {{- template "expandArrayResourcerefWithValidation" $prop -}} {{ else }} {{- template "expandPropertyMethod" $prop -}} {{ end }} diff --git a/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/compute/compute_instance.go b/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/compute/compute_instance.go index 3d1e4f46f70c..3eea13261684 100644 --- a/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/compute/compute_instance.go +++ b/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/compute/compute_instance.go @@ -20,7 +20,6 @@ import ( ) const ComputeInstanceAssetType string = "compute.googleapis.com/Instance" -const ComputeDiskAssetType string = "compute.googleapis.com/Disk" var ( advancedMachineFeaturesKeys = []string{ @@ -1440,7 +1439,7 @@ func ResourceConverterComputeInstance() cai.ResourceConverter { func GetComputeInstanceAndDisksCaiObjects(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]caiasset.Asset, error) { if instanceAsset, err := GetComputeInstanceCaiObject(d, config); err == nil { assets := []caiasset.Asset{instanceAsset} - if diskAsset, err := GetComputeDiskCaiObject(d, config); err == nil { + if diskAsset, err := GetComputeInstanceDiskCaiObject(d, config); err == nil { assets = append(assets, diskAsset) return assets, nil } else { @@ -1829,7 +1828,7 @@ func expandStoragePool(v interface{}, d tpgresource.TerraformResourceData, confi return nil, nil } -func GetComputeDiskCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (caiasset.Asset, error) { +func GetComputeInstanceDiskCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (caiasset.Asset, error) { name, err := cai.AssetName(d, config, "//compute.googleapis.com/projects/{{project}}/zones/{{zone}}/disks/{{name}}") if err != nil { return caiasset.Asset{}, err From 2f0f98c0800243b0684858bd04229c89503914d8 Mon Sep 17 00:00:00 2001 From: Luca Prete Date: Thu, 26 Jun 2025 19:29:03 +0200 Subject: [PATCH 438/884] Add support for secure tags to (hierarchical) FirewallPolicyRule (#14021) Co-authored-by: Luca Prete --- mmv1/products/compute/FirewallPolicyRule.yaml | 57 +++++++ .../examples/firewall_policy_rule.tf.tmpl | 17 ++ .../firewall_policy_rule_secure_tags.tf.tmpl | 60 +++++++ ...ource_compute_firewall_policy_rule_test.go | 152 ++++++++++++++++++ 4 files changed, 286 insertions(+) create mode 100644 mmv1/templates/terraform/examples/firewall_policy_rule_secure_tags.tf.tmpl diff --git a/mmv1/products/compute/FirewallPolicyRule.yaml b/mmv1/products/compute/FirewallPolicyRule.yaml index 5196c1d57f63..230625d28de7 100644 --- a/mmv1/products/compute/FirewallPolicyRule.yaml +++ b/mmv1/products/compute/FirewallPolicyRule.yaml @@ -50,6 +50,8 @@ examples: address_group: 'address-group' folder: 'folder' fw_policy: 'fw-policy' + tag_key: 'tag-key' + tag_value: 'tag-value' test_env_vars: org_id: 'ORG_ID' service_acct: 'SERVICE_ACCT' @@ -59,9 +61,20 @@ examples: folder: 'folder' fw_policy: 'fw-policy' network: 'network' + tag_key: 'tag-key' + tag_value: 'tag-value' test_env_vars: org_id: 'ORG_ID' min_version: beta + - name: 'firewall_policy_rule_secure_tags' + primary_resource_id: 'primary' + vars: + folder: 'folder' + fw_policy: 'fw-policy' + tag_key: 'tag-key' + tag_value: 'tag-value' + test_env_vars: + org_id: 'ORG_ID' parameters: - name: 'firewallPolicy' type: ResourceRef @@ -221,6 +234,27 @@ properties: Names of Network Threat Intelligence lists. The IPs in these lists will be matched against traffic source. item_type: type: String + - name: 'srcSecureTags' + type: Array + send_empty_value: true + description: | + List of secure tag values, which should be matched at the source of the traffic. For INGRESS rule, if all the srcSecureTag are INEFFECTIVE, and there is no srcIpRange, this rule will be ignored. Maximum number of source tag values allowed is 256. + item_type: + type: NestedObject + properties: + - name: 'name' + type: String + description: | + Name of the secure tag, created with TagManager's TagValue API. + diff_suppress_func: 'tpgresource.CompareSelfLinkOrResourceName' + - name: 'state' + type: Enum + description: | + State of the secure tag, either EFFECTIVE or INEFFECTIVE. A secure tag is INEFFECTIVE when it is deleted or its network is deleted. + output: true + enum_values: + - 'EFFECTIVE' + - 'INEFFECTIVE' - name: 'action' type: String description: 'The Action to perform when the client connection triggers the rule. Valid actions are "allow", "deny", "goto_next" and "apply_security_profile_group".' @@ -274,6 +308,29 @@ properties: A list of service accounts indicating the sets of instances that are applied with this rule. item_type: type: String + - name: 'targetSecureTags' + type: Array + send_empty_value: true + description: | + A list of secure tags that controls which instances the firewall rule applies to. + If targetSecureTag are specified, then the firewall rule applies only to instances in the VPC network that have one of those EFFECTIVE secure tags, if all the targetSecureTag are in INEFFECTIVE state, then this rule will be ignored. + targetSecureTag may not be set at the same time as targetServiceAccounts. If neither targetServiceAccounts nor targetSecureTag are specified, the firewall rule applies to all instances on the specified network. Maximum number of target secure tags allowed is 256. + item_type: + type: NestedObject + properties: + - name: 'name' + type: String + description: | + Name of the secure tag, created with TagManager's TagValue API. + diff_suppress_func: 'tpgresource.CompareSelfLinkOrResourceName' + - name: 'state' + type: Enum + description: | + State of the secure tag, either EFFECTIVE or INEFFECTIVE. A secure tag is INEFFECTIVE when it is deleted or its network is deleted. + output: true + enum_values: + - 'EFFECTIVE' + - 'INEFFECTIVE' - name: 'disabled' type: Boolean send_empty_value: true diff --git a/mmv1/templates/terraform/examples/firewall_policy_rule.tf.tmpl b/mmv1/templates/terraform/examples/firewall_policy_rule.tf.tmpl index 05332291a1fd..2a49c5291bb5 100644 --- a/mmv1/templates/terraform/examples/firewall_policy_rule.tf.tmpl +++ b/mmv1/templates/terraform/examples/firewall_policy_rule.tf.tmpl @@ -49,3 +49,20 @@ resource "google_compute_firewall_policy_rule" "{{$.PrimaryResourceId}}" { } } } + +resource "google_tags_tag_key" "basic_key" { + description = "For keyname resources." + parent = "organizations/{{index $.TestEnvVars "org_id"}}" + purpose = "GCE_FIREWALL" + short_name = "{{index $.Vars "tag_key"}}" + + purpose_data = { + organization = "auto" + } +} + +resource "google_tags_tag_value" "basic_value" { + description = "For valuename resources." + parent = google_tags_tag_key.basic_key.id + short_name = "{{index $.Vars "tag_value"}}" +} diff --git a/mmv1/templates/terraform/examples/firewall_policy_rule_secure_tags.tf.tmpl b/mmv1/templates/terraform/examples/firewall_policy_rule_secure_tags.tf.tmpl new file mode 100644 index 000000000000..8e5736762c18 --- /dev/null +++ b/mmv1/templates/terraform/examples/firewall_policy_rule_secure_tags.tf.tmpl @@ -0,0 +1,60 @@ +resource "google_folder" "folder" { + display_name = "{{index $.Vars "folder"}}" + parent = "organizations/{{index $.TestEnvVars "org_id"}}" + deletion_protection = false +} + +resource "google_compute_firewall_policy" "default" { + parent = google_folder.folder.id + short_name = "{{index $.Vars "fw_policy"}}" + description = "Resource created for Terraform acceptance testing" +} + +resource "google_compute_firewall_policy_rule" "{{$.PrimaryResourceId}}" { + firewall_policy = google_compute_firewall_policy.default.name + description = "Resource created for Terraform acceptance testing" + priority = 9000 + enable_logging = true + action = "allow" + direction = "INGRESS" + disabled = false + + target_secure_tags { + name = google_tags_tag_value.basic_value.id + } + + match { + src_ip_ranges = ["11.100.0.1/32"] + + src_secure_tags { + name = google_tags_tag_value.basic_value.id + } + + layer4_configs { + ip_protocol = "tcp" + ports = [8080] + } + + layer4_configs { + ip_protocol = "udp" + ports = [22] + } + } +} + +resource "google_tags_tag_key" "basic_key" { + description = "For keyname resources." + parent = "organizations/{{index $.TestEnvVars "org_id"}}" + purpose = "GCE_FIREWALL" + short_name = "{{index $.Vars "tag_key"}}" + + purpose_data = { + organization = "auto" + } +} + +resource "google_tags_tag_value" "basic_value" { + description = "For valuename resources." + parent = google_tags_tag_key.basic_key.id + short_name = "{{index $.Vars "tag_value"}}" +} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_rule_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_rule_test.go index 0f825d51d481..baf97eb84e78 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_rule_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_rule_test.go @@ -210,6 +210,45 @@ func TestAccComputeFirewallPolicyRule_disabled_enabled(t *testing.T) { }) } +func TestAccComputeFirewallPolicyRule_secureTags(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "org_id": envvar.GetTestOrgFromEnv(t), + "org_name": fmt.Sprintf("organizations/%s", envvar.GetTestOrgFromEnv(t)), + "project_name": envvar.GetTestProjectFromEnv(), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeFirewallPolicyRuleDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeFirewallPolicyRule_secureTags(context), + }, + { + ResourceName: "google_compute_firewall_policy_rule.fw_policy_rule", + ImportState: true, + ImportStateVerify: true, + // Referencing using ID causes import to fail + ImportStateVerifyIgnore: []string{"firewall_policy", "project"}, + }, + { + Config: testAccComputeFirewallPolicyRule_secureTagsUpdate(context), + }, + { + ResourceName: "google_compute_firewall_policy_rule.fw_policy_rule", + ImportState: true, + ImportStateVerify: true, + // Referencing using ID causes import to fail + ImportStateVerifyIgnore: []string{"firewall_policy", "project"}, + }, + }, + }) +} + func testAccComputeFirewallPolicyRule_basic(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_folder" "folder" { @@ -851,3 +890,116 @@ resource "google_compute_firewall_policy_rule" "default" { } `, context) } + +func testAccComputeFirewallPolicyRule_secureTags(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_folder" "folder" { + display_name = "tf-test-folder-%{random_suffix}" + parent = "%{org_name}" + deletion_protection = false +} + +resource "google_compute_firewall_policy" "fw_policy" { + parent = google_folder.folder.name + short_name = "tf-test-policy-%{random_suffix}" + description = "Resource created for Terraform acceptance testing" +} + +resource "google_compute_firewall_policy_rule" "fw_policy_rule" { + firewall_policy = google_compute_firewall_policy.fw_policy.id + description = "Resource created for Terraform acceptance testing" + priority = 9000 + enable_logging = true + action = "allow" + direction = "INGRESS" + disabled = false + tls_inspect = false + + match { + src_ip_ranges = ["11.100.0.1/32"] + + src_secure_tags { + name = google_tags_tag_value.basic_value.id + } + + layer4_configs { + ip_protocol = "tcp" + ports = [8080] + } + } +} + +resource "google_tags_tag_key" "basic_key" { + description = "For keyname resources." + parent = "organizations/%{org_id}" + purpose = "GCE_FIREWALL" + short_name = "tf-test-tagkey-%{random_suffix}" + + purpose_data = { + organization = "auto" + } +} + +resource "google_tags_tag_value" "basic_value" { + description = "For valuename resources." + parent = google_tags_tag_key.basic_key.id + short_name = "tf-test-tagvalue-%{random_suffix}" +} +`, context) +} + +func testAccComputeFirewallPolicyRule_secureTagsUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_folder" "folder" { + display_name = "tf-test-folder-%{random_suffix}" + parent = "%{org_name}" + deletion_protection = false +} + +resource "google_compute_firewall_policy" "fw_policy" { + parent = google_folder.folder.name + short_name = "tf-test-policy-%{random_suffix}" + description = "Resource created for Terraform acceptance testing" +} + +resource "google_compute_firewall_policy_rule" "fw_policy_rule" { + firewall_policy = google_compute_firewall_policy.fw_policy.id + description = "Resource created for Terraform acceptance testing" + priority = 9000 + enable_logging = true + action = "allow" + direction = "INGRESS" + disabled = false + tls_inspect = false + + target_secure_tags { + name = google_tags_tag_value.basic_value.id + } + + match { + src_ip_ranges = ["11.100.0.1/32"] + + layer4_configs { + ip_protocol = "tcp" + ports = [8080] + } + } +} + +resource "google_tags_tag_key" "basic_key" { + description = "For keyname resources." + parent = "organizations/%{org_id}" + purpose = "GCE_FIREWALL" + short_name = "tf-test-tagkey-%{random_suffix}" + purpose_data = { + organization = "auto" + } +} + +resource "google_tags_tag_value" "basic_value" { + description = "For valuename resources." + parent = google_tags_tag_key.basic_key.id + short_name = "tf-test-tagvalue-%{random_suffix}" +} +`, context) +} From 7443b5e046e31b21c3e4c17792ee1e90cf3878a6 Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Thu, 26 Jun 2025 20:46:01 +0200 Subject: [PATCH 439/884] alloydb: `machine_type` in the `machine_config` block is not yet GA in the API (#14344) --- mmv1/products/alloydb/Instance.yaml | 1 + ...nstance_test.go => resource_alloydb_instance_test.go.tmpl} | 4 ++++ 2 files changed, 5 insertions(+) rename mmv1/third_party/terraform/services/alloydb/{resource_alloydb_instance_test.go => resource_alloydb_instance_test.go.tmpl} (99%) diff --git a/mmv1/products/alloydb/Instance.yaml b/mmv1/products/alloydb/Instance.yaml index 069500936ff0..b791d5aeb06f 100644 --- a/mmv1/products/alloydb/Instance.yaml +++ b/mmv1/products/alloydb/Instance.yaml @@ -312,6 +312,7 @@ properties: E.g. "n2-highmem-4", "n2-highmem-8", "c4a-highmem-4-lssd". `cpu_count` must match the number of vCPUs in the machine type. default_from_api: true + min_version: 'beta' - name: 'clientConnectionConfig' type: NestedObject description: | diff --git a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go.tmpl similarity index 99% rename from mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go rename to mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go.tmpl index 72cec88fa08c..89dcdb83bb0f 100644 --- a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go +++ b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go.tmpl @@ -81,7 +81,9 @@ resource "google_alloydb_instance" "default" { machine_config { cpu_count = 4 + {{- if ne $.TargetVersionName "ga" }} machine_type = "n2-highmem-4" + {{ end }} } labels = { @@ -954,7 +956,9 @@ resource "google_alloydb_instance" "default" { instance_type = "PRIMARY" machine_config { cpu_count = 2 + {{- if ne $.TargetVersionName "ga" }} machine_type = "n2-highmem-2" + {{ end }} } psc_instance_config { allowed_consumer_projects = ["${data.google_project.project.number}"] From dcaa49edcf00661c47c667eb5cc9a0986da8510a Mon Sep 17 00:00:00 2001 From: Scott Suarez Date: Thu, 26 Jun 2025 13:02:48 -0700 Subject: [PATCH 440/884] Remove write-only resource validation warnings (#14385) --- mmv1/products/bigquerydatatransfer/Config.yaml | 1 - mmv1/products/secretmanager/SecretVersion.yaml | 1 - .../validation/bigquery_data_transfer_config.go.tmpl | 1 - mmv1/templates/terraform/validation/secret_version.go.tmpl | 1 - mmv1/third_party/terraform/services/sql/resource_sql_user.go | 4 ---- 5 files changed, 8 deletions(-) delete mode 100644 mmv1/templates/terraform/validation/bigquery_data_transfer_config.go.tmpl delete mode 100644 mmv1/templates/terraform/validation/secret_version.go.tmpl diff --git a/mmv1/products/bigquerydatatransfer/Config.yaml b/mmv1/products/bigquerydatatransfer/Config.yaml index 8206309eee45..df88222fc7b7 100644 --- a/mmv1/products/bigquerydatatransfer/Config.yaml +++ b/mmv1/products/bigquerydatatransfer/Config.yaml @@ -40,7 +40,6 @@ custom_code: decoder: 'templates/terraform/decoders/bigquery_data_transfer.go.tmpl' pre_update: 'templates/terraform/pre_update/bigquerydatatransfer_config.tmpl' custom_import: 'templates/terraform/custom_import/bigquery_data_transfer_self_link_as_name_set_location.go.tmpl' - raw_resource_config_validation: 'templates/terraform/validation/bigquery_data_transfer_config.go.tmpl' custom_diff: - 'sensitiveParamCustomizeDiff' - 'paramsCustomizeDiff' diff --git a/mmv1/products/secretmanager/SecretVersion.yaml b/mmv1/products/secretmanager/SecretVersion.yaml index ced84c3c1fda..d3e0335ee2bd 100644 --- a/mmv1/products/secretmanager/SecretVersion.yaml +++ b/mmv1/products/secretmanager/SecretVersion.yaml @@ -49,7 +49,6 @@ custom_code: custom_update: 'templates/terraform/custom_update/secret_version.go.tmpl' pre_delete: 'templates/terraform/pre_delete/secret_version_deletion_policy.go.tmpl' custom_import: 'templates/terraform/custom_import/secret_version.go.tmpl' - raw_resource_config_validation: 'templates/terraform/validation/secret_version.go.tmpl' constants: 'templates/terraform/constants/secret_version.go.tmpl' # Sweeper skipped as this resource has customized deletion. exclude_sweeper: true diff --git a/mmv1/templates/terraform/validation/bigquery_data_transfer_config.go.tmpl b/mmv1/templates/terraform/validation/bigquery_data_transfer_config.go.tmpl deleted file mode 100644 index 0f5b6a589974..000000000000 --- a/mmv1/templates/terraform/validation/bigquery_data_transfer_config.go.tmpl +++ /dev/null @@ -1 +0,0 @@ -validation.PreferWriteOnlyAttribute(cty.GetAttrPath("sensitive_params").IndexInt(0).GetAttr("secret_access_key"),cty.GetAttrPath("sensitive_params").IndexInt(0).GetAttr("secret_access_key_wo")) diff --git a/mmv1/templates/terraform/validation/secret_version.go.tmpl b/mmv1/templates/terraform/validation/secret_version.go.tmpl deleted file mode 100644 index 95ec5f7ba858..000000000000 --- a/mmv1/templates/terraform/validation/secret_version.go.tmpl +++ /dev/null @@ -1 +0,0 @@ -validation.PreferWriteOnlyAttribute(cty.GetAttrPath("secret_data"),cty.GetAttrPath("secret_data_wo")) diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_user.go b/mmv1/third_party/terraform/services/sql/resource_sql_user.go index ccac2a5f082d..5fec5c13ceb2 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_user.go +++ b/mmv1/third_party/terraform/services/sql/resource_sql_user.go @@ -62,10 +62,6 @@ func ResourceSqlUser() *schema.Resource { tpgresource.DefaultProviderProject, ), - ValidateRawResourceConfigFuncs: []schema.ValidateRawResourceConfigFunc{ - validation.PreferWriteOnlyAttribute(cty.GetAttrPath("password"), cty.GetAttrPath("password_wo")), - }, - SchemaVersion: 1, MigrateState: resourceSqlUserMigrateState, From 88082dabc5d8a36cd5879300e14bba8987fa2868 Mon Sep 17 00:00:00 2001 From: Scott Suarez Date: Thu, 26 Jun 2025 13:08:56 -0700 Subject: [PATCH 441/884] fix regex for reassign-reviewer (#14386) --- .github/workflows/reassign-reviewer.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/reassign-reviewer.yml b/.github/workflows/reassign-reviewer.yml index 8d20c6ba380a..c7d2a2384468 100644 --- a/.github/workflows/reassign-reviewer.yml +++ b/.github/workflows/reassign-reviewer.yml @@ -22,7 +22,7 @@ jobs: uses: actions-ecosystem/action-regex-match@d50fd2e7a37d0e617aea3d7ada663bd56862b9cc # v2.0.2 with: text: ${{ github.event.comment.body }} - regex: '.*@modular-magician reassign[- ]+review[^@\\n\\r]*@?([a-zA-Z0-9-_]*).*' + regex: '.*@modular-magician reassign[- ]+review[^@\n\r]*@?([a-zA-Z0-9-_]*).*' - name: Checkout Repository if: steps.read-comment.outputs.match != '' uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.2 From 5e1acf32fcfa1845452c5bdf37a83ba5aa6c1f54 Mon Sep 17 00:00:00 2001 From: NA2047 <12290725+NA2047@users.noreply.github.com> Date: Thu, 26 Jun 2025 13:57:12 -0700 Subject: [PATCH 442/884] Lustre Datasource Docs fix (#14384) --- .../terraform/website/docs/d/lustre_instance.html.markdown | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/website/docs/d/lustre_instance.html.markdown b/mmv1/third_party/terraform/website/docs/d/lustre_instance.html.markdown index 4c91149e60c7..95c0139243fe 100644 --- a/mmv1/third_party/terraform/website/docs/d/lustre_instance.html.markdown +++ b/mmv1/third_party/terraform/website/docs/d/lustre_instance.html.markdown @@ -13,7 +13,7 @@ Use this data source to get information about a Lustre instance. For more inform ```hcl data "google_lustre_instance" "instance" { name = "my-instance" - location = "us-central1" + zone = "us-central1-a" } ``` From dc69c53950557e414468e010038831e9c6d05ef3 Mon Sep 17 00:00:00 2001 From: Cezary Sobczak <57288981+Cezarus27@users.noreply.github.com> Date: Thu, 26 Jun 2025 23:14:35 +0200 Subject: [PATCH 443/884] Feature Gap: Implement `resourcePolicies.workloadPolicy` for MIG (#14255) Signed-off-by: Cezary Sobczak --- ...rce_compute_instance_group_manager.go.tmpl | 56 +++ ...ompute_instance_group_manager_test.go.tmpl | 333 ++++++++++++++++++ ...mpute_instance_group_manager.html.markdown | 64 ++++ 3 files changed, 453 insertions(+) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_group_manager.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_group_manager.go.tmpl index a442c66ce425..85ac3d6b44f1 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_group_manager.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_group_manager.go.tmpl @@ -1,6 +1,7 @@ package compute import ( + "context" "fmt" "log" "strings" @@ -38,6 +39,7 @@ func ResourceComputeInstanceGroupManager() *schema.Resource { CustomizeDiff: customdiff.All( tpgresource.DefaultProviderProject, tpgresource.DefaultProviderZone, + customdiff.ForceNewIfChange("resource_policies.0.workload_policy", ForceNewResourcePoliciesWorkloadPolicyIfNewIsEmpty), ), Schema: map[string]*schema.Schema{ "base_instance_name": { @@ -583,11 +585,31 @@ func ResourceComputeInstanceGroupManager() *schema.Resource { }, }, }, + "resource_policies": { + Type: schema.TypeList, + Optional: true, + Description: `Resource policies for this managed instance group.`, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "workload_policy": { + Type: schema.TypeString, + Optional: true, + DiffSuppressFunc: tpgresource.CompareSelfLinkRelativePaths, + Description: `The URL of the workload policy that is specified for this managed instance group. It can be a full or partial URL.`, + }, + }, + }, + }, }, UseJSONNumber: true, } } +func ForceNewResourcePoliciesWorkloadPolicyIfNewIsEmpty(_ context.Context, old, new, _ interface{}) bool { + return (old.(string) != "") && (new.(string) == "") +} + func parseUniqueId(s string) (string, string) { splits:= strings.SplitN(s, "?uniqueId=", 2) if len(splits) == 2 { @@ -676,6 +698,7 @@ func resourceComputeInstanceGroupManagerCreate(d *schema.ResourceData, meta inte InstanceLifecyclePolicy: expandInstanceLifecyclePolicy(d.Get("instance_lifecycle_policy").([]interface{})), AllInstancesConfig: expandAllInstancesConfig(nil, d.Get("all_instances_config").([]interface{})), StatefulPolicy: expandStatefulPolicy(d), + ResourcePolicies: expandResourcePolicies(d.Get("resource_policies").([]interface{})), {{- if ne $.TargetVersionName "ga" }} Params: expandInstanceGroupManagerParams(d), {{- end }} @@ -923,6 +946,9 @@ func resourceComputeInstanceGroupManagerRead(d *schema.ResourceData, meta interf if err = d.Set("status", flattenStatus(manager.Status)); err != nil { return fmt.Errorf("Error setting status in state: %s", err.Error()) } + if err = d.Set("resource_policies", flattenResourcePolicies(manager.ResourcePolicies)); err != nil { + return fmt.Errorf("Error setting resource_policies in state: %s", err.Error()) + } // If unset in state set to default value if d.Get("wait_for_instances_status").(string) == "" { @@ -1027,6 +1053,11 @@ func resourceComputeInstanceGroupManagerUpdate(d *schema.ResourceData, meta inte change = true } + if d.HasChange("resource_policies") { + updatedManager.ResourcePolicies = expandResourcePolicies(d.Get("resource_policies").([]interface{})) + change = true + } + if change { op, err := config.NewComputeClient(userAgent).InstanceGroupManagers.Patch(project, zone, d.Get("name").(string), updatedManager).Do() if err != nil { @@ -1293,6 +1324,20 @@ func expandVersions(configured []interface{}) []*compute.InstanceGroupManagerVer return versions } +func expandResourcePolicies(configured []interface{}) *compute.InstanceGroupManagerResourcePolicies { + resourcePolicies := &compute.InstanceGroupManagerResourcePolicies{} + + if len(configured) > 0 { + data := configured[0].(map[string]interface{}) + resourcePolicies.WorkloadPolicy = data["workload_policy"].(string) + resourcePolicies.ForceSendFields = []string{"WorkloadPolicy"} + } else { + resourcePolicies.NullFields = []string{"WorkloadPolicy"} + } + + return resourcePolicies +} + func expandFixedOrPercent(configured []interface{}) *compute.FixedOrPercent { fixedOrPercent := &compute.FixedOrPercent{} @@ -1634,6 +1679,17 @@ func flattenStatusAllInstancesConfig(allInstancesConfig *compute.InstanceGroupMa return results } +func flattenResourcePolicies(resourcePolicies *compute.InstanceGroupManagerResourcePolicies) []map[string]interface{} { + results := []map[string]interface{}{} + if resourcePolicies != nil { + data := map[string]interface{}{ + "workload_policy": resourcePolicies.WorkloadPolicy, + } + results = append(results, data) + } + return results +} + func resourceInstanceGroupManagerStateImporter(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { if err := d.Set("wait_for_instances", false); err != nil { return nil, fmt.Errorf("Error setting wait_for_instances: %s", err) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_group_manager_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_group_manager_test.go.tmpl index ad8a8daa10d0..d292fa273c84 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_group_manager_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_group_manager_test.go.tmpl @@ -1,7 +1,9 @@ package compute_test import ( + "context" "fmt" + "regexp" "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" @@ -10,8 +12,55 @@ import ( {{- if ne $.TargetVersionName "ga" }} "github.com/hashicorp/terraform-provider-google/google/envvar" {{- end }} + tpgcompute "github.com/hashicorp/terraform-provider-google/google/services/compute" ) +func TestForceNewResourcePoliciesWorkloadPolicyIfNewIsEmpty(t *testing.T) { + cases := map[string]struct { + Old, New interface{} + ExpectForceNew bool + }{ + "empty to empty": { + Old: "", + New: "", + ExpectForceNew: false, + }, + "empty to non-empty": { + Old: "", + New: "some-policy", + ExpectForceNew: false, + }, + "non-empty to non-empty": { + Old: "old-policy", + New: "new-policy", + ExpectForceNew: false, + }, + "non-empty to empty (should force new)": { + Old: "existing-policy", + New: "", + ExpectForceNew: true, + }, + "whitespace to empty (should force new)": { + Old: " ", + New: "", + ExpectForceNew: true, + }, + "policy name to empty (should force new)": { + Old: "projects/my-project/regions/us-central1/resourcePolicies/my-policy", + New: "", + ExpectForceNew: true, + }, + } + + for tn, tc := range cases { + result := tpgcompute.ForceNewResourcePoliciesWorkloadPolicyIfNewIsEmpty(context.Background(), tc.Old, tc.New, nil) + if result != tc.ExpectForceNew { + t.Errorf("bad: %s, %q => %q expect ForceNew to return %t but got %t", tn, tc.Old, tc.New, tc.ExpectForceNew, result) + } + } +} + + func TestAccInstanceGroupManager_basic(t *testing.T) { t.Parallel() @@ -472,6 +521,76 @@ func TestAccInstanceGroupManager_waitForStatus(t *testing.T) { }) } +func TestAccInstanceGroupManager_addingResourcePoliciesToMigWithNonZeroTargetSize_fails(t *testing.T) { + t.Parallel() + + suffix := acctest.RandString(t, 10) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckInstanceGroupManagerDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccInstanceGroupManager_withTargetSize(suffix), + }, + { + ResourceName: "google_compute_instance_group_manager.igm-workload-policy", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"status"}, + }, + { + Config: testAccInstanceGroupManager_resourcePoliciesWorkloadPolicyUpdateError(suffix), + ExpectError: regexp.MustCompile("Workload policy update is not allowed when the MIG has instances."), + }, + }, + }) +} + +func TestAccInstanceGroupManager_resourcePoliciesWorkloadPolicyUpdate(t *testing.T) { + t.Parallel() + + suffix := acctest.RandString(t, 10) + workloadPolicyResourceName := "workload_policy" + workloadPolicyResourceUpdate := "workload_policy_2" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckInstanceGroupManagerDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccInstanceGroupManager_resourcePoliciesWorkloadPolicyUpdate(suffix, workloadPolicyResourceName), + }, + { + ResourceName: "google_compute_instance_group_manager.igm-workload-policy", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"status"}, + }, + { + Config: testAccInstanceGroupManager_resourcePoliciesWorkloadPolicyUpdate(suffix, workloadPolicyResourceUpdate), + }, + { + ResourceName: "google_compute_instance_group_manager.igm-workload-policy", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"status"}, + }, + { + Config: testAccInstanceGroupManager_resourcePoliciesWorkloadPolicyUpdate2(suffix), + }, + { + ResourceName: "google_compute_instance_group_manager.igm-workload-policy", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"status"}, + }, + }, + }) +} + {{ if ne $.TargetVersionName `ga` -}} func TestAccInstanceGroupManager_resourceManagerTags(t *testing.T) { t.Parallel() @@ -1994,6 +2113,220 @@ resource "google_compute_per_instance_config" "per-instance" { `, template, target, igm, perInstanceConfig) } +func testAccInstanceGroupManager_withTargetSize(suffix string) string { + return fmt.Sprintf(` +data "google_compute_image" "my_image" { + family = "debian-11" + project = "debian-cloud" +} + +resource "google_compute_instance_template" "igm-basic" { + name = "igm-instance-template-%s" + machine_type = "a4-highgpu-8g" + can_ip_forward = false + tags = ["foo", "bar"] + + disk { + source_image = data.google_compute_image.my_image.self_link + auto_delete = true + boot = true + disk_type = "hyperdisk-balanced" + } + + network_interface { + network = "default" + } + + service_account { + scopes = ["userinfo-email", "compute-ro", "storage-ro"] + } +} + +resource "google_compute_instance_group_manager" "igm-workload-policy" { + description = "Terraform test instance group manager" + name = "igm-basic-workload-policy-%s" + + version { + name = "prod" + instance_template = google_compute_instance_template.igm-basic.self_link + } + + base_instance_name = "tf-test-igm-no-tp" + zone = "us-central1-b" + target_size = 2 +} +`, suffix, suffix) +} + +func testAccInstanceGroupManager_resourcePoliciesWorkloadPolicyUpdateError(suffix string) string { + return fmt.Sprintf(` +data "google_compute_image" "my_image" { + family = "debian-11" + project = "debian-cloud" +} + +resource "google_compute_resource_policy" "workload_policy_error" { + name = "tf-test-gce-policy-%s" + region = "us-central1" + workload_policy { + type = "HIGH_THROUGHPUT" + } +} + +resource "google_compute_instance_template" "igm-basic" { + name = "igm-instance-template-%s" + machine_type = "a4-highgpu-8g" + can_ip_forward = false + tags = ["foo", "bar"] + + disk { + source_image = data.google_compute_image.my_image.self_link + auto_delete = true + boot = true + disk_type = "hyperdisk-balanced" + } + + network_interface { + network = "default" + } + + service_account { + scopes = ["userinfo-email", "compute-ro", "storage-ro"] + } +} + +resource "google_compute_instance_group_manager" "igm-workload-policy" { + description = "Terraform test instance group manager" + name = "igm-basic-workload-policy-%s" + + version { + name = "prod" + instance_template = google_compute_instance_template.igm-basic.self_link + } + + base_instance_name = "tf-test-igm-no-tp" + zone = "us-central1-b" + target_size = 2 + + resource_policies { + workload_policy = google_compute_resource_policy.workload_policy_error.self_link + } +} +`, suffix, suffix, suffix) +} + +func testAccInstanceGroupManager_resourcePoliciesWorkloadPolicyUpdate(suffix, workloadPolicy string) string { + return fmt.Sprintf(` +data "google_compute_image" "my_image" { + family = "debian-11" + project = "debian-cloud" +} + +resource "google_compute_resource_policy" "workload_policy" { + name = "tf-test-gce-policy-%s" + region = "us-central1" + workload_policy { + type = "HIGH_THROUGHPUT" + } +} + +resource "google_compute_resource_policy" "workload_policy_2" { + name = "tf-test-gce-policy-%s-2" + region = "us-central1" + workload_policy { + type = "HIGH_THROUGHPUT" + } +} + + +resource "google_compute_instance_template" "igm-basic" { + name = "igm-instance-template-%s" + machine_type = "a4-highgpu-8g" + can_ip_forward = false + tags = ["foo", "bar"] + + disk { + source_image = data.google_compute_image.my_image.self_link + auto_delete = true + boot = true + disk_type = "hyperdisk-balanced" + } + + network_interface { + network = "default" + } + + service_account { + scopes = ["userinfo-email", "compute-ro", "storage-ro"] + } +} + +resource "google_compute_instance_group_manager" "igm-workload-policy" { + description = "Terraform test instance group manager" + name = "igm-basic-workload-policy-%s" + + version { + name = "prod" + instance_template = google_compute_instance_template.igm-basic.self_link + } + + base_instance_name = "tf-test-igm-no-tp" + zone = "us-central1-b" + target_size = 0 + + resource_policies { + workload_policy = google_compute_resource_policy.%s.self_link + } +} +`, suffix, suffix, suffix, suffix, workloadPolicy) +} + +func testAccInstanceGroupManager_resourcePoliciesWorkloadPolicyUpdate2(suffix string) string { + return fmt.Sprintf(` +data "google_compute_image" "my_image" { + family = "debian-11" + project = "debian-cloud" +} + +resource "google_compute_instance_template" "igm-basic" { + name = "igm-instance-template-%s" + machine_type = "a4-highgpu-8g" + can_ip_forward = false + tags = ["foo", "bar"] + + disk { + source_image = data.google_compute_image.my_image.self_link + auto_delete = true + boot = true + disk_type = "hyperdisk-balanced" + } + + network_interface { + network = "default" + } + + service_account { + scopes = ["userinfo-email", "compute-ro", "storage-ro"] + } +} + +resource "google_compute_instance_group_manager" "igm-workload-policy" { + description = "Terraform test instance group manager" + name = "igm-basic-workload-policy-%s" + + version { + name = "prod" + instance_template = google_compute_instance_template.igm-basic.self_link + } + + base_instance_name = "tf-test-igm-no-tp" + zone = "us-central1-b" + target_size = 0 +} +`, suffix, suffix) +} + + {{ if ne $.TargetVersionName `ga` -}} func testAccInstanceGroupManager_resourceManagerTags(template_name, tag_name, igm_name, project_id string) string { return fmt.Sprintf(` diff --git a/mmv1/third_party/terraform/website/docs/r/compute_instance_group_manager.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_instance_group_manager.html.markdown index 004d9b05bf45..46577fa5b71a 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_instance_group_manager.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_instance_group_manager.html.markdown @@ -113,6 +113,62 @@ resource "google_compute_instance_group_manager" "igm-sr" { } ``` +## Example Usage with resource policies (`google` provider) +```hcl +data "google_compute_image" "my_image" { + family = "debian-11" + project = "debian-cloud" +} + +resource "google_compute_resource_policy" "workload_policy" { + name = "tf-test-gce-policy" + region = "us-central1" + workload_policy { + type = "HIGH_THROUGHPUT" + } +} + +resource "google_compute_instance_template" "igm-basic" { + name = "igm-instance-template" + machine_type = "a4-highgpu-8g" + can_ip_forward = false + tags = ["foo", "bar"] + + disk { + source_image = data.google_compute_image.my_image.self_link + auto_delete = true + boot = true + disk_type = "hyperdisk-balanced" + } + + network_interface { + network = "default" + } + + service_account { + scopes = ["userinfo-email", "compute-ro", "storage-ro"] + } +} + +resource "google_compute_instance_group_manager" "igm-workload-policy" { + description = "Terraform test instance group manager" + name = "igm-basic-workload-policy" + + version { + name = "prod" + instance_template = google_compute_instance_template.igm-basic.self_link + } + + base_instance_name = "tf-test-igm-no-tp" + zone = "us-central1-b" + target_size = 0 + + resource_policies { + workload_policy = google_compute_resource_policy.workload_policy.self_link + } +} +``` + ## Argument Reference The following arguments are supported: @@ -198,6 +254,8 @@ group. You can specify only one value. Structure is [documented below](#nested_a * `params` - (Optional [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html)) Input only additional params for instance group manager creation. Structure is [documented below](#nested_params). For more information, see [API](https://cloud.google.com/compute/docs/reference/rest/beta/instanceGroupManagers/insert). +* `resource_policies` - (Optional) Resource policies for this managed instance group. Structure is [documented below](#nested_resource_policies). + - - - The `standby_policy` block supports: @@ -359,6 +417,12 @@ params{ * `resource_manager_tags` - (Optional) Resource manager tags to bind to the managed instance group. The tags are key-value pairs. Keys must be in the format tagKeys/123 and values in the format tagValues/456. For more information, see [Manage tags for resources](https://cloud.google.com/compute/docs/tag-resources) +- - - + +The `resource_policies` block supports: + +* `workload_policy` - (Optional) The URL of the workload policy that is specified for this managed instance group. It can be a full or partial URL. + ## Attributes Reference In addition to the arguments listed above, the following computed attributes are From c0648e2655b6baec9893eb3cde45a6189035c38b Mon Sep 17 00:00:00 2001 From: Veronika Herasymenko Date: Fri, 27 Jun 2025 01:45:08 +0200 Subject: [PATCH 444/884] Add resource_manager_tags support to Network api (#14119) --- mmv1/products/compute/Network.yaml | 15 ++++++ .../acctest/bootstrap_test_utils.go.tmpl | 31 +++++++----- .../resource_compute_network_test.go.tmpl | 48 +++++++++++++++++++ 3 files changed, 81 insertions(+), 13 deletions(-) diff --git a/mmv1/products/compute/Network.yaml b/mmv1/products/compute/Network.yaml index e94567ee10da..5a9a17bb44ef 100644 --- a/mmv1/products/compute/Network.yaml +++ b/mmv1/products/compute/Network.yaml @@ -245,3 +245,18 @@ properties: * https://www.googleapis.com/compute/v1/projects/{projectId}/global/networkProfiles/{network_profile_name} * projects/{projectId}/global/networkProfiles/{network_profile_name} diff_suppress_func: 'tpgresource.CompareSelfLinkRelativePaths' + - name: 'params' + type: NestedObject + ignore_read: true + immutable: true + description: | + Additional params passed with the request, but not persisted as part of resource payload + properties: + - name: 'resourceManagerTags' + type: KeyValuePairs + description: | + Resource manager tags to be bound to the network. Tag keys and values have the + same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, + and values are in the format tagValues/456. + api_name: resourceManagerTags + ignore_read: true diff --git a/mmv1/third_party/terraform/acctest/bootstrap_test_utils.go.tmpl b/mmv1/third_party/terraform/acctest/bootstrap_test_utils.go.tmpl index e1bdc3feb674..8814ba24db4f 100644 --- a/mmv1/third_party/terraform/acctest/bootstrap_test_utils.go.tmpl +++ b/mmv1/third_party/terraform/acctest/bootstrap_test_utils.go.tmpl @@ -2063,16 +2063,16 @@ const sharedTagKeyParentErr = "Parent %q is not valid. Should be in format: 'org func BootstrapSharedTestProjectTagKey(t *testing.T, testId string, obj map[string]interface{}) string { pid := envvar.GetTestProjectFromEnv() - return bootstrapSharedTestTagKey(t, testId, "projects/"+pid, obj) + return BootstrapSharedTestTagKeyDetails(t, testId, "projects/"+pid, obj)["shared_tag_key"] } func BootstrapSharedTestOrganizationTagKey(t *testing.T, testId string, obj map[string]interface{}) string { org := envvar.GetTestOrgFromEnv(t) - return bootstrapSharedTestTagKey(t, testId, "organizations/"+org, obj) + return BootstrapSharedTestTagKeyDetails(t, testId, "organizations/"+org, obj)["shared_tag_key"] } // parent should be in format: {"organization" OR "projects"}/{id} -func bootstrapSharedTestTagKey(t *testing.T, testId, parent string, obj map[string]interface{}) string { +func BootstrapSharedTestTagKeyDetails(t *testing.T, testId string, parent string, obj map[string]interface{}) map[string]string { sharedTagKey := fmt.Sprintf("%s-%s", sharedTagKeyPrefix, testId) parentSplit := strings.Split(parent, "/") @@ -2086,7 +2086,7 @@ func bootstrapSharedTestTagKey(t *testing.T, testId, parent string, obj map[stri config := BootstrapConfig(t) if config == nil { - return "" + return make(map[string]string) } log.Printf("[DEBUG] Getting shared test tag key %q", sharedTagKey) @@ -2135,7 +2135,7 @@ func bootstrapSharedTestTagKey(t *testing.T, testId, parent string, obj map[stri } } - _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + getTagKeyResponse, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ Config: config, Method: "GET", Project: config.Project, @@ -2148,29 +2148,32 @@ func bootstrapSharedTestTagKey(t *testing.T, testId, parent string, obj map[stri t.Fatalf("Error getting shared tag key %q: %s", sharedTagKey, err) } - return sharedTagKey + return map[string]string{ + "name": getTagKeyResponse["name"].(string), + "shared_tag_key": sharedTagKey, + } } const sharedTagValuePrefix = "tf-bootstrap-tagvalue" func BootstrapSharedTestProjectTagValue(t *testing.T, testId string, tagKey string) string { pid := envvar.GetTestProjectFromEnv() - return BootstrapSharedTestTagValue(t, testId, tagKey, pid) + return BootstrapSharedTestTagValueDetails(t, testId, tagKey, pid)["shared_tag_value"] } func BootstrapSharedTestOrganizationTagValue(t *testing.T, testId string, tagKey string) string { org := envvar.GetTestOrgFromEnv(t) - return BootstrapSharedTestTagValue(t, testId, tagKey, org) + return BootstrapSharedTestTagValueDetails(t, testId, tagKey, org)["shared_tag_value"] } -func BootstrapSharedTestTagValue(t *testing.T, testId string, tagKey, parentId string) string { +func BootstrapSharedTestTagValueDetails(t *testing.T, testId string, tagKey, parentId string) map[string]string { sharedTagValue := fmt.Sprintf("%s-%s", sharedTagValuePrefix, testId) tagKeyName := fmt.Sprintf("%s/%s", parentId, tagKey) tagValueName := fmt.Sprintf("%s/%s", tagKeyName, sharedTagValue) config := BootstrapConfig(t) if config == nil { - return "" + return make(map[string]string) } log.Printf("[DEBUG] Getting shared test tag value %q", sharedTagValue) @@ -2229,8 +2232,7 @@ func BootstrapSharedTestTagValue(t *testing.T, testId string, tagKey, parentId s t.Fatalf("Error waiting to create TagValue: %s", err) } } - - _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + getTagValueResponse, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ Config: config, Method: "GET", Project: config.Project, @@ -2243,7 +2245,10 @@ func BootstrapSharedTestTagValue(t *testing.T, testId string, tagKey, parentId s t.Fatalf("Error getting shared tag value %q: %s", sharedTagValue, err) } - return sharedTagValue + return map[string]string{ + "name": getTagValueResponse["name"].(string), + "shared_tag_value": sharedTagValue, + } } type BootstrapClient struct { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_network_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_network_test.go.tmpl index 736e11bcfc30..45779d6b14e4 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_network_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_network_test.go.tmpl @@ -509,6 +509,40 @@ func TestAccComputeNetwork_networkFirewallPolicyEnforcementOrderAndUpdate(t *tes }) } +func TestAccComputeNetwork_resourceManagerTags(t *testing.T) { + + t.Parallel() + + var network compute.Network + org := envvar.GetTestOrgFromEnv(t) + + suffixName := acctest.RandString(t, 10) + tagKeyResult := acctest.BootstrapSharedTestTagKeyDetails(t, "crm-networks-tagkey", "organizations/"+org, make(map[string]interface{})) + sharedTagkey,_ := tagKeyResult["shared_tag_key"] + tagValueResult := acctest.BootstrapSharedTestTagValueDetails(t, "crm-networks-tagvalue", sharedTagkey, org) + networkName := fmt.Sprintf("tf-test-network-resource-manager-tags-%s", suffixName) + context := map[string]interface{}{ + "network_name": networkName, + "tag_key_id": tagKeyResult["name"], + "tag_value_id": tagValueResult["name"], + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeNetworkDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeNetwork_resourceManagerTags(context), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeNetworkExists( + t, "google_compute_network.acc_network_with_resource_manager_tags", &network), + ), + }, + }, + }) +} + func testAccCheckComputeNetworkExists(t *testing.T, n string, network *compute.Network) resource.TestCheckFunc { return func(s *terraform.State) error { rs, ok := s.RootModule().Resources[n] @@ -838,3 +872,17 @@ resource "google_compute_network" "acc_network_firewall_policy_enforcement_order } `, networkName, order) } + +func testAccComputeNetwork_resourceManagerTags(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_network" "acc_network_with_resource_manager_tags" { + name = "%{network_name}" + auto_create_subnetworks = false + params { + resource_manager_tags = { + "%{tag_key_id}" = "%{tag_value_id}" + } + } +} +`, context) +} \ No newline at end of file From e12b729eaa66265aeaebd1af7e029135d2312e86 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Thu, 26 Jun 2025 17:18:58 -0700 Subject: [PATCH 445/884] vacation (#14389) --- .ci/magician/github/membership_data.go | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.ci/magician/github/membership_data.go b/.ci/magician/github/membership_data.go index 82a48c0753ed..bd30cc831126 100644 --- a/.ci/magician/github/membership_data.go +++ b/.ci/magician/github/membership_data.go @@ -81,7 +81,12 @@ var ( vacations: []Vacation{}, }, "melinath": { - vacations: []Vacation{}, + vacations: []Vacation{ + { + startDate: newDate(2025, 6, 26), + endDate: newDate(2025, 7, 7), + }, + }, }, "NickElliot": { vacations: []Vacation{}, From 4bba9147eeb72f7942b2d92bfb8c12177cd75f0f Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Fri, 27 Jun 2025 08:01:42 -0700 Subject: [PATCH 446/884] Skip unspecified values in autogen generation of enum fields (#14390) --- mmv1/openapi_generate/parser.go | 3 +++ 1 file changed, 3 insertions(+) diff --git a/mmv1/openapi_generate/parser.go b/mmv1/openapi_generate/parser.go index 2ff5339da761..c014dfe50ad4 100644 --- a/mmv1/openapi_generate/parser.go +++ b/mmv1/openapi_generate/parser.go @@ -357,6 +357,9 @@ func writeObject(name string, obj *openapi3.SchemaRef, objType openapi3.Types, u if len(obj.Value.Enum) > 0 { var enums []string for _, enum := range obj.Value.Enum { + if strings.HasSuffix(fmt.Sprintf("%v", enum), "_UNSPECIFIED") { + continue + } enums = append(enums, fmt.Sprintf("%v", enum)) } additionalDescription = fmt.Sprintf("\n Possible values:\n %s", strings.Join(enums, "\n")) From 524f17910271c89d09ef5e0a367356e2bfc0dcfd Mon Sep 17 00:00:00 2001 From: Anjali Soni Date: Fri, 27 Jun 2025 15:12:10 +0000 Subject: [PATCH 447/884] Remove validation from authConfigs in integration connector's connection resource. (#14392) --- .../integrationconnectors/Connection.yaml | 30 ------------------- 1 file changed, 30 deletions(-) diff --git a/mmv1/products/integrationconnectors/Connection.yaml b/mmv1/products/integrationconnectors/Connection.yaml index 8e97b7bed018..6b8efd70e2be 100644 --- a/mmv1/products/integrationconnectors/Connection.yaml +++ b/mmv1/products/integrationconnectors/Connection.yaml @@ -306,12 +306,6 @@ properties: type: NestedObject description: | User password for Authentication. - exactly_one_of: - - 'auth_config.0.user_password' - - 'auth_config.0.oauth2_jwt_bearer' - - 'auth_config.0.oauth2_client_credentials' - - 'auth_config.0.ssh_public_key' - - 'auth_config.0.oauth2_auth_code_flow' properties: - name: 'username' type: String @@ -333,12 +327,6 @@ properties: type: NestedObject description: | OAuth2 JWT Bearer for Authentication. - exactly_one_of: - - 'auth_config.0.user_password' - - 'auth_config.0.oauth2_jwt_bearer' - - 'auth_config.0.oauth2_client_credentials' - - 'auth_config.0.ssh_public_key' - - 'auth_config.0.oauth2_auth_code_flow' properties: - name: 'clientKey' type: NestedObject @@ -374,12 +362,6 @@ properties: type: NestedObject description: | OAuth3 Client Credentials for Authentication. - exactly_one_of: - - 'auth_config.0.user_password' - - 'auth_config.0.oauth2_jwt_bearer' - - 'auth_config.0.oauth2_client_credentials' - - 'auth_config.0.ssh_public_key' - - 'auth_config.0.oauth2_auth_code_flow' properties: - name: 'clientId' type: String @@ -401,12 +383,6 @@ properties: type: NestedObject description: | SSH Public Key for Authentication. - exactly_one_of: - - 'auth_config.0.user_password' - - 'auth_config.0.oauth2_jwt_bearer' - - 'auth_config.0.oauth2_client_credentials' - - 'auth_config.0.ssh_public_key' - - 'auth_config.0.oauth2_auth_code_flow' properties: - name: 'username' type: String @@ -443,12 +419,6 @@ properties: type: NestedObject description: | Parameters to support Oauth 2.0 Auth Code Grant Authentication. - exactly_one_of: - - 'auth_config.0.user_password' - - 'auth_config.0.oauth2_jwt_bearer' - - 'auth_config.0.oauth2_client_credentials' - - 'auth_config.0.ssh_public_key' - - 'auth_config.0.oauth2_auth_code_flow' properties: - name: 'clientId' type: String From 2a01bab760d35ee0f1fc93c679017c4d1b4beab5 Mon Sep 17 00:00:00 2001 From: Calvin Liu Date: Fri, 27 Jun 2025 08:20:51 -0700 Subject: [PATCH 448/884] Add support for `managed_server_ca` to Memorystore instance (#14352) --- mmv1/products/memorystore/Instance.yaml | 18 ++++ .../decoders/memorystore_instance.go.tmpl | 44 ++++++++++ .../resource_memorystore_instance_test.go | 82 +++++++++++++++++++ 3 files changed, 144 insertions(+) diff --git a/mmv1/products/memorystore/Instance.yaml b/mmv1/products/memorystore/Instance.yaml index 5236e08234d0..7cc491f6eaea 100644 --- a/mmv1/products/memorystore/Instance.yaml +++ b/mmv1/products/memorystore/Instance.yaml @@ -813,3 +813,21 @@ properties: type: String description: The KMS key used to encrypt the at-rest data of the cluster immutable: true + - name: 'managedServerCa' + type: NestedObject + output: true + description: Instance's Certificate Authority. This field will only be populated if instance's transit_encryption_mode is SERVER_AUTHENTICATION + properties: + - name: 'caCerts' + type: Array + output: true + description: The PEM encoded CA certificate chains for managed server authentication + item_type: + type: NestedObject + properties: + - name: 'certificates' + type: Array + output: true + description: The certificates that form the CA chain, from leaf to root order + item_type: + type: String diff --git a/mmv1/templates/terraform/decoders/memorystore_instance.go.tmpl b/mmv1/templates/terraform/decoders/memorystore_instance.go.tmpl index fcc04d550eaa..ee96e78e4c3c 100644 --- a/mmv1/templates/terraform/decoders/memorystore_instance.go.tmpl +++ b/mmv1/templates/terraform/decoders/memorystore_instance.go.tmpl @@ -94,4 +94,48 @@ } + // Such custom code is necessary as the instance's certificate authority has to be retrieved via a dedicated + // getCertificateAuthority API. + // See https://cloud.google.com/memorystore/docs/valkey/reference/rest/v1/projects.locations.instances/getCertificateAuthority + // for details about this API. + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return nil, err + } + + // Only instances with SERVER_AUTHENTICATION mode have certificate authority set + if v, ok := res["transitEncryptionMode"].(string); ok && v=="SERVER_AUTHENTICATION" { + url, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}MemorystoreBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/locations/{{"{{"}}region{{"}}"}}/instances/{{"{{"}}instance_id{{"}}"}}/certificateAuthority") + if err != nil { + return nil, err + } + + billingProject := "" + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return nil, fmt.Errorf("Error fetching project for instance: %s", err) + } + + billingProject = project + + // err == nil indicates that the billing_project value was found + if bp, err := tpgresource.GetBillingProject(d, config); err == nil { + billingProject = bp + } + + certificateAuthority, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: url, + UserAgent: userAgent, + }) + if err != nil { + return nil, fmt.Errorf("Error reading certificateAuthority: %s", err) + } + + res["managedServerCa"] = certificateAuthority["managedServerCa"] + } return res, nil \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/memorystore/resource_memorystore_instance_test.go b/mmv1/third_party/terraform/services/memorystore/resource_memorystore_instance_test.go index d766851991e8..b664074aeded 100644 --- a/mmv1/third_party/terraform/services/memorystore/resource_memorystore_instance_test.go +++ b/mmv1/third_party/terraform/services/memorystore/resource_memorystore_instance_test.go @@ -1453,3 +1453,85 @@ data "google_project" "project" { } `, params.name, params.replicaCount, params.shardCount, params.nodeType, params.deletionProtectionEnabled, params.engineVersion, strBuilder.String(), zoneDistributionConfigBlock, maintenancePolicyBlock, persistenceBlock, lifecycleBlock, secondaryInstanceBlock, params.name, params.name, params.name) } + +func TestAccMemorystoreInstance_memorystoreInstanceTlsEnabled(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckMemorystoreInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccMemorystoreInstance_memorystoreInstanceTlsEnabled(context), + Check: resource.TestCheckResourceAttrSet("google_memorystore_instance.instance-tls", "managed_server_ca.0.ca_certs.0.certificates.0"), + }, + { + ResourceName: "google_memorystore_instance.instance-tls", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"gcs_source", "instance_id", "labels", "location", "managed_backup_source", "terraform_labels"}, + }, + }, + }) +} + +func testAccMemorystoreInstance_memorystoreInstanceTlsEnabled(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_memorystore_instance" "instance-tls" { + instance_id = "tf-test-tls-instance%{random_suffix}" + shard_count = 1 + desired_auto_created_endpoints { + network = google_compute_network.producer_net.id + project_id = data.google_project.project.project_id + } + location = "us-central1" + deletion_protection_enabled = false + maintenance_policy { + weekly_maintenance_window { + day = "MONDAY" + start_time { + hours = 1 + minutes = 0 + seconds = 0 + nanos = 0 + } + } + } + depends_on = [ + google_network_connectivity_service_connection_policy.default + ] + transit_encryption_mode = "SERVER_AUTHENTICATION" +} + +resource "google_network_connectivity_service_connection_policy" "default" { + name = "tf-test-my-policy%{random_suffix}" + location = "us-central1" + service_class = "gcp-memorystore" + description = "my basic service connection policy" + network = google_compute_network.producer_net.id + psc_config { + subnetworks = [google_compute_subnetwork.producer_subnet.id] + } +} + +resource "google_compute_subnetwork" "producer_subnet" { + name = "tf-test-my-subnet%{random_suffix}" + ip_cidr_range = "10.0.0.248/29" + region = "us-central1" + network = google_compute_network.producer_net.id +} + +resource "google_compute_network" "producer_net" { + name = "tf-test-my-network%{random_suffix}" + auto_create_subnetworks = false +} + +data "google_project" "project" { +} +`, context) +} From 55fedca9b81f39ac1648a597935129e8e50f7c02 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Fri, 27 Jun 2025 09:13:34 -0700 Subject: [PATCH 449/884] Update enrolled_teams.yml (#14399) --- tools/issue-labeler/labeler/enrolled_teams.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tools/issue-labeler/labeler/enrolled_teams.yml b/tools/issue-labeler/labeler/enrolled_teams.yml index 8966af1d63cf..c89eaa8cfd42 100755 --- a/tools/issue-labeler/labeler/enrolled_teams.yml +++ b/tools/issue-labeler/labeler/enrolled_teams.yml @@ -184,6 +184,8 @@ service/compute-instances: service/compute-interconnect: resources: - google_compute_interconnect.* + - google_compute_wire_group + - google_compute_cross_site_network service/compute-ipam: resources: - google_compute_address @@ -326,6 +328,7 @@ service/dataproc: - google_dataproc_job.* - google_dataproc_workflow_template - google_dataproc_batch + - google_dataproc_session_template service/dataprocgdc: resources: - google_dataproc_gdc_.* @@ -489,6 +492,9 @@ service/metastore: service/migrationcenter: resources: - google_migration_center_.* +service/modelarmor: + resources: + - google_model_armor_.* service/monitoring-alerting: resources: - google_monitoring_notification_channel From 2065f5ee76d0a55d8f0e23f36d90409b1cbf4e96 Mon Sep 17 00:00:00 2001 From: Deepraj K Pednekar Date: Fri, 27 Jun 2025 16:15:28 +0000 Subject: [PATCH 450/884] Deprecated google_iap_client and google_iap_brand (#14345) Co-authored-by: Scott Suarez --- mmv1/products/iap/Brand.yaml | 2 ++ mmv1/products/iap/Client.yaml | 2 ++ 2 files changed, 4 insertions(+) diff --git a/mmv1/products/iap/Brand.yaml b/mmv1/products/iap/Brand.yaml index 70631bc128c0..6f070c3b4b27 100644 --- a/mmv1/products/iap/Brand.yaml +++ b/mmv1/products/iap/Brand.yaml @@ -13,6 +13,8 @@ --- name: 'Brand' +deprecation_message: >- + after July 2025, the `google_iap_brand` Terraform resource will no longer function as intended due to the deprecation of the IAP OAuth Admin API description: | OAuth brand data. Only "Organization Internal" brands can be created programmatically via API. To convert it into an external brands diff --git a/mmv1/products/iap/Client.yaml b/mmv1/products/iap/Client.yaml index bb24408b25c5..41ae2acea2dd 100644 --- a/mmv1/products/iap/Client.yaml +++ b/mmv1/products/iap/Client.yaml @@ -14,6 +14,8 @@ --- name: 'Client' api_resource_type_kind: IdentityAwareProxyClient +deprecation_message: >- + After July 2025, the `google_iap_client` Terraform resource will no longer function as intended due to the deprecation of the IAP OAuth Admin API description: | Contains the data that describes an Identity Aware Proxy owned client. From 05f009add6d2e868febc1ec978f618e1ff367058 Mon Sep 17 00:00:00 2001 From: srichaitanyab Date: Sat, 28 Jun 2025 01:17:15 +0530 Subject: [PATCH 451/884] Adding Terraform support for Model Armor (#14349) Co-authored-by: Stephen Lewis (Burrows) --- mmv1/products/modelarmor/Template.yaml | 264 ++++++++++++++++++ mmv1/products/modelarmor/product.yaml | 22 ++ ...r_template_multilanguage_detection.go.tmpl | 19 ++ .../modelarmor_template_basic.tf.tmpl | 12 + .../modelarmor_template_filter_config.tf.tmpl | 30 ++ .../modelarmor_template_label.tf.tmpl | 28 ++ ...elarmor_template_template_metadata.tf.tmpl | 26 ++ .../components/inputs/services_beta.kt | 5 + .../components/inputs/services_ga.kt | 5 + .../resource_model_armor_template_test.go | 207 ++++++++++++++ 10 files changed, 618 insertions(+) create mode 100644 mmv1/products/modelarmor/Template.yaml create mode 100644 mmv1/products/modelarmor/product.yaml create mode 100644 mmv1/templates/terraform/custom_flatten/modelarmor_template_multilanguage_detection.go.tmpl create mode 100644 mmv1/templates/terraform/examples/modelarmor_template_basic.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/modelarmor_template_filter_config.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/modelarmor_template_label.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/modelarmor_template_template_metadata.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/modelarmor/resource_model_armor_template_test.go diff --git a/mmv1/products/modelarmor/Template.yaml b/mmv1/products/modelarmor/Template.yaml new file mode 100644 index 000000000000..2bb004efba26 --- /dev/null +++ b/mmv1/products/modelarmor/Template.yaml @@ -0,0 +1,264 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: Template +description: | + A `Template` is a resource of Model Armor that lets you configure how Model Armor screens prompts and responses. + It functions as sets of customized filters and thresholds for different safety and security confidence levels, allowing control over what content is flagged. +base_url: projects/{{project}}/locations/{{location}}/templates +update_mask: true +self_link: projects/{{project}}/locations/{{location}}/templates/{{template_id}} +create_url: projects/{{project}}/locations/{{location}}/templates?templateId={{template_id}} +update_verb: PATCH +id_format: projects/{{project}}/locations/{{location}}/templates/{{template_id}} +import_format: + - projects/{{project}}/locations/{{location}}/templates/{{template_id}} +examples: + - name: 'modelarmor_template_basic' + primary_resource_id: 'template-basic' + test_vars_overrides: + templateId: '"modelarmor1"' + location: '"us-central1"' + - name: 'modelarmor_template_filter_config' + primary_resource_id: 'template-filter-config' + test_vars_overrides: + templateId: '"modelarmor2"' + location: '"us-central1"' + filter_config_rai_settings_rai_filters_0_filter_type: '"HATE_SPEECH"' + filter_config_rai_settings_rai_filters_0_confidence_level: '"HIGH"' + sdp_settings_config_type: '"basic_config"' + filter_config_sdp_settings_basic_config_filter_enforcement: '"ENABLED"' + filter_config_pi_and_jailbreak_filter_settings_filter_enforcement: '"ENABLED"' + filter_config_pi_and_jailbreak_filter_settings_confidence_level: '"MEDIUM_AND_ABOVE"' + filter_config_malicious_uri_filter_settings_filter_enforcement: '"ENABLED"' + template_metadata_multi_language_detection_enable_multi_language_detection: false + - name: 'modelarmor_template_template_metadata' + primary_resource_id: 'template-template-metadata' + test_vars_overrides: + templateId: '"modelarmor3"' + location: '"us-central1"' + filter_config_rai_settings_rai_filters_0_filter_type: '"HARASSMENT"' + filter_config_rai_settings_rai_filters_0_confidence_level: '"MEDIUM_AND_ABOVE"' + template_metadata_log_template_operations: true + template_metadata_log_sanitize_operations: false + template_metadata_multi_language_detection_enable_multi_language_detection: true + template_metadata_ignore_partial_invocation_failures: false + template_metadata_custom_llm_response_safety_error_message: '"This is a custom error message for LLM response"' + template_metadata_custom_prompt_safety_error_code: 400 + template_metadata_custom_prompt_safety_error_message: '"This is a custom error message for prompt"' + template_metadata_custom_llm_response_safety_error_code: 401 + template_metadata_enforcement_type: '"INSPECT_ONLY"' + - name: 'modelarmor_template_label' + primary_resource_id: 'template-label-advanced-config' + test_vars_overrides: + templateId: '"modelarmor4"' + location: '"us-central1"' + label_test_label: '"template-test-label"' + filter_config_rai_settings_rai_filters_0_filter_type: '"DANGEROUS"' + filter_config_rai_settings_rai_filters_0_confidence_level: '"MEDIUM_AND_ABOVE"' + sdp_settings_config_type: '"advanced_config"' + filter_config_sdp_settings_advanced_config_inspect_template: '"projects/llm-firewall-demo/locations/us-central1/inspectTemplates/t3"' + filter_config_sdp_settings_advanced_config_deidentify_template: '"projects/llm-firewall-demo/locations/us-central1/deidentifyTemplates/t2"' + filter_config_sdp_settings_basic_config_filter_enforcement: '"ENABLED"' + template_metadata_multi_language_detection_enable_multi_language_detection: false +autogen_status: VGVtcGxhdGU= +parameters: + - name: location + type: String + description: Resource ID segment making up resource `name`. It identifies the resource within its parent collection as described in https://google.aip.dev/122. + immutable: true + url_param_only: true + required: true + - name: templateId + type: String + description: |- + Id of the requesting object + If auto-generating Id server-side, remove this field and + template_id from the method_signature of Create RPC + immutable: true + url_param_only: true + required: true +properties: + - name: name + type: String + description: Identifier. name of resource + output: true + - name: createTime + type: String + description: 'Create time stamp' + output: true + - name: updateTime + type: String + description: 'Update time stamp' + output: true + - name: labels + type: KeyValueLabels + description: Labels as key value pairs + - name: filterConfig + type: NestedObject + description: Filters configuration. + required: true + send_empty_value: true + allow_empty_object: true + properties: + - name: maliciousUriFilterSettings + type: NestedObject + description: Malicious URI filter settings. + properties: + - name: filterEnforcement + type: String + description: |- + Tells whether the Malicious URI filter is enabled or disabled. + Possible values: + ENABLED + DISABLED + - name: raiSettings + type: NestedObject + description: Responsible AI Filter settings. + properties: + - name: raiFilters + type: Array + description: List of Responsible AI filters enabled for template. + required: true + item_type: + type: NestedObject + properties: + - name: filterType + type: String + description: |- + Possible values: + SEXUALLY_EXPLICIT + HATE_SPEECH + HARASSMENT + DANGEROUS + required: true + - name: confidenceLevel + type: String + description: |- + Possible values: + LOW_AND_ABOVE + MEDIUM_AND_ABOVE + HIGH + - name: sdpSettings + type: NestedObject + description: Sensitive Data Protection settings. + properties: + - name: advancedConfig + type: NestedObject + conflicts: [basicConfig] + description: Sensitive Data Protection Advanced configuration. + properties: + - name: inspectTemplate + type: String + description: |- + Sensitive Data Protection inspect template resource name + If only inspect template is provided (de-identify template not provided), + then Sensitive Data Protection InspectContent action is performed during + Sanitization. All Sensitive Data Protection findings identified during + inspection will be returned as SdpFinding in SdpInsepctionResult. + e.g:- + `projects/{project}/locations/{location}/inspectTemplates/{inspect_template}` + - name: deidentifyTemplate + type: String + description: |- + Optional Sensitive Data Protection Deidentify template resource name. + If provided then DeidentifyContent action is performed during Sanitization + using this template and inspect template. The De-identified data will + be returned in SdpDeidentifyResult. + Note that all info-types present in the deidentify template must be present + in inspect template. + e.g. + `projects/{project}/locations/{location}/deidentifyTemplates/{deidentify_template}` + - name: basicConfig + type: NestedObject + conflicts: [advancedConfig] + description: Sensitive Data Protection basic configuration. + properties: + - name: filterEnforcement + type: String + description: |- + Tells whether the Sensitive Data Protection basic config is enabled or + disabled. + Possible values: + ENABLED + DISABLED + - name: piAndJailbreakFilterSettings + type: NestedObject + description: Prompt injection and Jailbreak Filter settings. + properties: + - name: filterEnforcement + type: String + description: |- + Tells whether Prompt injection and Jailbreak filter is enabled or + disabled. + Possible values: + ENABLED + DISABLED + - name: confidenceLevel + type: String + description: |- + Possible values: + LOW_AND_ABOVE + MEDIUM_AND_ABOVE + HIGH + - name: templateMetadata + type: NestedObject + description: Message describing TemplateMetadata + allow_empty_object: true + properties: + - name: logTemplateOperations + type: Boolean + description: If true, log template crud operations. + - name: logSanitizeOperations + type: Boolean + description: If true, log sanitize operations. + - name: multiLanguageDetection + type: NestedObject + description: Metadata to enable multi language detection via template. + custom_flatten: 'templates/terraform/custom_flatten/modelarmor_template_multilanguage_detection.go.tmpl' + properties: + - name: enableMultiLanguageDetection + type: Boolean + description: If true, multi language detection will be enabled. + required: true + send_empty_value: true + - name: ignorePartialInvocationFailures + type: Boolean + description: If true, partial detector failures should be ignored. + - name: customPromptSafetyErrorCode + type: Integer + description: |- + Indicates the custom error code set by the user to be returned to the end + user by the service extension if the prompt trips Model Armor filters. + - name: customPromptSafetyErrorMessage + type: String + description: |- + Indicates the custom error message set by the user to be returned to the + end user if the prompt trips Model Armor filters. + - name: customLlmResponseSafetyErrorCode + type: Integer + description: |- + Indicates the custom error code set by the user to be returned to the end + user if the LLM response trips Model Armor filters. + - name: customLlmResponseSafetyErrorMessage + type: String + description: |- + Indicates the custom error message set by the user to be returned to the + end user if the LLM response trips Model Armor filters. + - name: enforcementType + type: String + description: |- + Possible values: + INSPECT_ONLY + INSPECT_AND_BLOCK diff --git a/mmv1/products/modelarmor/product.yaml b/mmv1/products/modelarmor/product.yaml new file mode 100644 index 000000000000..394654191cb6 --- /dev/null +++ b/mmv1/products/modelarmor/product.yaml @@ -0,0 +1,22 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: ModelArmor +display_name: Model Armor +scopes: + - https://www.googleapis.com/auth/cloud-platform +versions: + - name: ga + base_url: 'https://modelarmor.{{location}}.rep.googleapis.com/v1/' + cai_base_url: "https://modelarmor.googleapis.com/v1/" diff --git a/mmv1/templates/terraform/custom_flatten/modelarmor_template_multilanguage_detection.go.tmpl b/mmv1/templates/terraform/custom_flatten/modelarmor_template_multilanguage_detection.go.tmpl new file mode 100644 index 000000000000..85abb1979e7a --- /dev/null +++ b/mmv1/templates/terraform/custom_flatten/modelarmor_template_multilanguage_detection.go.tmpl @@ -0,0 +1,19 @@ +func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil // The whole multi_language_detection block is absent + } + original, ok := v.(map[string]interface{}) + if !ok { + return nil // Should not happen if API is consistent + } + // Populating the field even if the returned block is empty. + transformed := make(map[string]interface{}) + + if val, ok := original["enableMultiLanguageDetection"]; ok { + transformed["enable_multi_language_detection"] = val + } else { + // Since the field is REQUIRED in the schema and the block exists, default to false if the key is missing from the API response. + transformed["enable_multi_language_detection"] = false + } + return []interface{}{transformed} +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/modelarmor_template_basic.tf.tmpl b/mmv1/templates/terraform/examples/modelarmor_template_basic.tf.tmpl new file mode 100644 index 000000000000..24e22a5df56d --- /dev/null +++ b/mmv1/templates/terraform/examples/modelarmor_template_basic.tf.tmpl @@ -0,0 +1,12 @@ +resource "google_model_armor_template" "template-basic" { + location = "{{.Vars.location}}" + template_id = "{{.Vars.templateId}}" + + filter_config { + + } + + template_metadata { + + } +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/modelarmor_template_filter_config.tf.tmpl b/mmv1/templates/terraform/examples/modelarmor_template_filter_config.tf.tmpl new file mode 100644 index 000000000000..5e3940918a6f --- /dev/null +++ b/mmv1/templates/terraform/examples/modelarmor_template_filter_config.tf.tmpl @@ -0,0 +1,30 @@ +resource "google_model_armor_template" "template-filter-config" { + location = "{{.Vars.location}}" + template_id = "{{.Vars.templateId}}" + + filter_config { + rai_settings { + rai_filters { + filter_type = "{{.Vars.filter_config_rai_settings_rai_filters_0_filter_type}}" + confidence_level = "{{.Vars.filter_config_rai_settings_rai_filters_0_confidence_level}}" + } + } + sdp_settings { + basic_config { + filter_enforcement = "{{.Vars.filter_config_sdp_settings_basic_config_filter_enforcement}}" + } + } + pi_and_jailbreak_filter_settings { + filter_enforcement = "{{.Vars.filter_config_pi_and_jailbreak_filter_settings_filter_enforcement}}" + confidence_level = "{{.Vars.filter_config_pi_and_jailbreak_filter_settings_confidence_level}}" + } + malicious_uri_filter_settings { + filter_enforcement = "{{.Vars.filter_config_malicious_uri_filter_settings_filter_enforcement}}" + } + } + template_metadata { + multi_language_detection { + enable_multi_language_detection = {{.Vars.template_metadata_multi_language_detection_enable_multi_language_detection}} + } + } +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/modelarmor_template_label.tf.tmpl b/mmv1/templates/terraform/examples/modelarmor_template_label.tf.tmpl new file mode 100644 index 000000000000..a826014251bb --- /dev/null +++ b/mmv1/templates/terraform/examples/modelarmor_template_label.tf.tmpl @@ -0,0 +1,28 @@ +resource "google_model_armor_template" "template-label-advanced-config" { + location = "{{.Vars.location}}" + template_id = "{{.Vars.templateId}}" + + labels = { + "test-label" = "{{.Vars.label_test_label}}" + } + + filter_config { + rai_settings { + rai_filters { + filter_type = "{{.Vars.filter_config_rai_settings_rai_filters_0_filter_type}}" + confidence_level = "{{.Vars.filter_config_rai_settings_rai_filters_0_confidence_level}}" + } + } + sdp_settings { + advanced_config { + inspect_template = "{{.Vars.filter_config_sdp_settings_advanced_config_inspect_template}}" + deidentify_template = "{{.Vars.filter_config_sdp_settings_advanced_config_deidentify_template}}" + } + } + } + template_metadata { + multi_language_detection { + enable_multi_language_detection = {{.Vars.template_metadata_multi_language_detection_enable_multi_language_detection}} + } + } +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/modelarmor_template_template_metadata.tf.tmpl b/mmv1/templates/terraform/examples/modelarmor_template_template_metadata.tf.tmpl new file mode 100644 index 000000000000..4942290448d6 --- /dev/null +++ b/mmv1/templates/terraform/examples/modelarmor_template_template_metadata.tf.tmpl @@ -0,0 +1,26 @@ +resource "google_model_armor_template" "template-template-metadata" { + location = "{{.Vars.location}}" + template_id = "{{.Vars.templateId}}" + + filter_config { + rai_settings { + rai_filters { + filter_type = "{{.Vars.filter_config_rai_settings_rai_filters_0_filter_type}}" + confidence_level = "{{.Vars.filter_config_rai_settings_rai_filters_0_confidence_level}}" + } + } + } + template_metadata { + custom_llm_response_safety_error_message = "{{.Vars.template_metadata_custom_llm_response_safety_error_message}}" + log_sanitize_operations = {{.Vars.template_metadata_log_sanitize_operations}} + log_template_operations = {{.Vars.template_metadata_log_template_operations}} + multi_language_detection { + enable_multi_language_detection = {{.Vars.template_metadata_multi_language_detection_enable_multi_language_detection}} + } + ignore_partial_invocation_failures = {{.Vars.template_metadata_ignore_partial_invocation_failures}} + custom_prompt_safety_error_code = {{.Vars.template_metadata_custom_prompt_safety_error_code}} + custom_prompt_safety_error_message = "{{.Vars.template_metadata_custom_prompt_safety_error_message}}" + custom_llm_response_safety_error_code = {{.Vars.template_metadata_custom_llm_response_safety_error_code}} + enforcement_type = "{{.Vars.template_metadata_enforcement_type}}" + } +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt b/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt index a3572562a7dc..940fabcf48c7 100644 --- a/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt +++ b/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt @@ -561,6 +561,11 @@ var ServicesListBeta = mapOf( "displayName" to "Mlengine", "path" to "./google-beta/services/mlengine" ), + "modelarmor" to mapOf( + "name" to "modelarmor", + "displayName" to "ModelArmor", + "path" to "./google-beta/services/modelarmor" + ), "monitoring" to mapOf( "name" to "monitoring", "displayName" to "Monitoring", diff --git a/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt b/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt index c9a6fbf7ca8a..a499dfed57c3 100644 --- a/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt +++ b/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt @@ -556,6 +556,11 @@ var ServicesListGa = mapOf( "displayName" to "Mlengine", "path" to "./google/services/mlengine" ), + "modelarmor" to mapOf( + "name" to "modelarmor", + "displayName" to "ModelArmor", + "path" to "./googleservices/modelarmor" + ), "monitoring" to mapOf( "name" to "monitoring", "displayName" to "Monitoring", diff --git a/mmv1/third_party/terraform/services/modelarmor/resource_model_armor_template_test.go b/mmv1/third_party/terraform/services/modelarmor/resource_model_armor_template_test.go new file mode 100644 index 000000000000..f181162f363f --- /dev/null +++ b/mmv1/third_party/terraform/services/modelarmor/resource_model_armor_template_test.go @@ -0,0 +1,207 @@ +package modelarmor_test + +import ( + "bytes" + "fmt" + "testing" + "text/template" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +// Helper function to expand a template +func expandTemplate(tmplStr string, data map[string]interface{}) (string, error) { + tmpl, err := template.New("config").Parse(tmplStr) + if err != nil { + return "", err + } + var buf bytes.Buffer + err = tmpl.Execute(&buf, data) + if err != nil { + return "", err + } + return buf.String(), nil +} + +func TestAccModelArmorTemplate_basic(t *testing.T) { + t.Parallel() + + templateId := "modelarmor-test-basic-" + acctest.RandString(t, 10) + + basicContext := map[string]interface{}{ + "location": "us-central1", + "templateId": templateId, + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckModelArmorTemplateDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: func() string { + cfg, err := testAccModelArmorTemplate_basic_config(basicContext) + if err != nil { + t.Fatalf("Failed to expand basic config template: %v", err) + } + return cfg + }(), + }, + { + ResourceName: "google_model_armor_template.template-basic", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccModelArmorTemplate_basic_config(context map[string]interface{}) (string, error) { + const basic_template = ` +resource "google_model_armor_template" "template-basic" { + location = "{{.location}}" + template_id = "{{.templateId}}" + filter_config { + + } + template_metadata { + + } +}` + return expandTemplate(basic_template, context) +} + +func TestAccModelArmorTemplate_update(t *testing.T) { + t.Parallel() + + templateId := fmt.Sprintf("modelarmor-test-update-%s", acctest.RandString(t, 5)) + + context := map[string]interface{}{ + "templateId": templateId, + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckModelArmorTemplateDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccModelArmorTemplate_initial(context), + }, + { + ResourceName: "google_model_armor_template.test-resource", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "location", "template_id", "terraform_labels"}, + }, + { + Config: testAccModelArmorTemplate_update(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_model_armor_template.test-resource", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_model_armor_template.test-resource", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "location", "template_id", "terraform_labels"}, + }, + }, + }) +} + +func testAccModelArmorTemplate_initial(context map[string]interface{}) string { + return acctest.Nprintf(` + resource "google_model_armor_template" "test-resource" { + location = "us-central1" + template_id = "%{templateId}" + labels = { + "test-label" = "env-testing-initial" + } + filter_config { + rai_settings { + rai_filters { + filter_type = "HATE_SPEECH" + confidence_level = "MEDIUM_AND_ABOVE" + } + } + sdp_settings { + advanced_config { + inspect_template = "projects/llm-firewall-demo/locations/us-central1/inspectTemplates/t2" + deidentify_template = "projects/llm-firewall-demo/locations/us-central1/deidentifyTemplates/t3" + } + } + pi_and_jailbreak_filter_settings { + filter_enforcement = "ENABLED" + confidence_level = "HIGH" + } + malicious_uri_filter_settings { + filter_enforcement = "ENABLED" + } + } + template_metadata { + custom_llm_response_safety_error_message = "This is a custom error message for LLM response" + log_template_operations = true + log_sanitize_operations = true + multi_language_detection { + enable_multi_language_detection = true + } + ignore_partial_invocation_failures = true + custom_prompt_safety_error_code = 400 + custom_prompt_safety_error_message = "This is a custom error message for prompt" + custom_llm_response_safety_error_code = 401 + enforcement_type = "INSPECT_ONLY" + } + } + `, context) +} + +func testAccModelArmorTemplate_update(context map[string]interface{}) string { + return acctest.Nprintf(` + resource "google_model_armor_template" "test-resource" { + location = "us-central1" + template_id = "%{templateId}" + labels = { + "test-label" = "env-testing-updated" + } + filter_config { + rai_settings { + rai_filters { + filter_type = "DANGEROUS" + confidence_level = "LOW_AND_ABOVE" + } + } + sdp_settings { + basic_config{ + filter_enforcement = "ENABLED" + } + } + pi_and_jailbreak_filter_settings { + filter_enforcement = "DISABLED" + confidence_level = "MEDIUM_AND_ABOVE" + } + malicious_uri_filter_settings { + filter_enforcement = "DISABLED" + } + } + template_metadata { + custom_llm_response_safety_error_message = "Updated LLM error message" + log_template_operations = false + log_sanitize_operations = false + multi_language_detection { + enable_multi_language_detection = false + } + ignore_partial_invocation_failures = false + custom_prompt_safety_error_code = 404 + custom_prompt_safety_error_message = "Updated prompt error message" + custom_llm_response_safety_error_code = 500 + enforcement_type = "INSPECT_AND_BLOCK" + } + } + `, context) +} From beb69cd2e067f1d7edb4bbbf4c82a3468e95c724 Mon Sep 17 00:00:00 2001 From: Justin Scofield <47263509+scawful@users.noreply.github.com> Date: Fri, 27 Jun 2025 21:48:59 +0000 Subject: [PATCH 452/884] Add AnalysisRule resource for contactcenterinsights (#13430) --- .../contactcenterinsights/AnalysisRule.yaml | 177 ++++++++++++++++++ ...enter_insights_analysis_rule_basic.tf.tmpl | 4 + ...center_insights_analysis_rule_full.tf.tmpl | 28 +++ ...ter_insights_analysis_rule_profile.tf.tmpl | 28 +++ ...tact_center_insights_analysis_rule_test.go | 138 ++++++++++++++ 5 files changed, 375 insertions(+) create mode 100644 mmv1/products/contactcenterinsights/AnalysisRule.yaml create mode 100644 mmv1/templates/terraform/examples/contact_center_insights_analysis_rule_basic.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/contact_center_insights_analysis_rule_full.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/contact_center_insights_analysis_rule_profile.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/contactcenterinsights/resource_contact_center_insights_analysis_rule_test.go diff --git a/mmv1/products/contactcenterinsights/AnalysisRule.yaml b/mmv1/products/contactcenterinsights/AnalysisRule.yaml new file mode 100644 index 000000000000..8b378d9add16 --- /dev/null +++ b/mmv1/products/contactcenterinsights/AnalysisRule.yaml @@ -0,0 +1,177 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: AnalysisRule +description: | + The CCAI Insights project wide analysis rule. + This rule will be applied to all conversations that match the filter defined in the rule. + For a conversation matches the filter, the annotators specified in the rule will be run. + If a conversation matches multiple rules, a union of all the annotators will be run. + One project can have multiple analysis rules. +references: + guides: + 'Configure analysis rules using the API': 'https://cloud.google.com/contact-center/insights/docs/analysis-rule' + api: 'https://cloud.google.com/contact-center/insights/docs/reference/rest/v1/projects.locations.analysisRules' +base_url: projects/{{project}}/locations/{{location}}/analysisRules +self_link: projects/{{project}}/locations/{{location}}/analysisRules/{{name}} +create_url: projects/{{project}}/locations/{{location}}/analysisRules +delete_url: projects/{{project}}/locations/{{location}}/analysisRules/{{name}} +update_verb: 'PATCH' +update_mask: true +id_format: projects/{{project}}/locations/{{location}}/analysisRules/{{name}} +import_format: + - projects/{{project}}/locations/{{location}}/analysisRules/{{name}} +examples: + - name: 'contact_center_insights_analysis_rule_basic' + primary_resource_id: 'analysis_rule_basic' + - name: 'contact_center_insights_analysis_rule_full' + primary_resource_id: 'analysis_rule_full' + test_env_vars: + project_number: 'PROJECT_NUMBER' + - name: 'contact_center_insights_analysis_rule_profile' + primary_resource_id: 'analysis_rule_profile' + test_env_vars: + project_number: 'PROJECT_NUMBER' +parameters: + - name: location + type: String + description: Location of the resource. + immutable: true + url_param_only: true + required: true +properties: + - name: name + type: String + description: |- + The resource name of the analysis rule. Randomly generated by Insights. + output: true + custom_flatten: 'templates/terraform/custom_flatten/name_from_self_link.tmpl' + - name: createTime + type: String + description: Output only. The time at which this analysis rule was created. + output: true + - name: updateTime + type: String + description: Output only. The most recent time at which this analysis rule was updated. + output: true + - name: displayName + type: String + description: Display Name of the analysis rule. + - name: conversationFilter + type: String + description: |- + Filter for the conversations that should apply this analysis + rule. An empty filter means this analysis rule applies to all + conversations. + Refer to https://cloud.google.com/contact-center/insights/docs/filtering + for details. + - name: annotatorSelector + type: NestedObject + description: Selector of all available annotators and phrase matchers to run. + properties: + - name: runInterruptionAnnotator + type: Boolean + description: Whether to run the interruption annotator. + - name: phraseMatchers + type: Array + description: |- + The list of phrase matchers to run. If not provided, all active phrase + matchers will be used. If inactive phrase matchers are provided, they will + not be used. Phrase matchers will be run only if + run_phrase_matcher_annotator is set to true. Format: + projects/{project}/locations/{location}/phraseMatchers/{phrase_matcher} + item_type: + type: String + - name: runEntityAnnotator + type: Boolean + description: Whether to run the entity annotator. + - name: issueModels + type: Array + description: |- + The issue model to run. If not provided, the most recently deployed topic + model will be used. The provided issue model will only be used for + inference if the issue model is deployed and if run_issue_model_annotator + is set to true. If more than one issue model is provided, only the first + provided issue model will be used for inference. + item_type: + type: String + - name: runQaAnnotator + type: Boolean + description: Whether to run the QA annotator. + - name: runSilenceAnnotator + type: Boolean + description: Whether to run the silence annotator. + - name: runPhraseMatcherAnnotator + type: Boolean + description: Whether to run the active phrase matcher annotator(s). + - name: runSentimentAnnotator + type: Boolean + description: Whether to run the sentiment annotator. + - name: runIntentAnnotator + type: Boolean + description: Whether to run the intent annotator. + - name: runIssueModelAnnotator + type: Boolean + description: |- + Whether to run the issue model annotator. A model should have already been + deployed for this to take effect. + - name: runSummarizationAnnotator + type: Boolean + description: Whether to run the summarization annotator. + - name: summarizationConfig + type: NestedObject + description: Configuration for summarization. + properties: + - name: conversationProfile + type: String + description: |- + Resource name of the Dialogflow conversation profile. + Format: + projects/{project}/locations/{location}/conversationProfiles/{conversation_profile} + - name: summarizationModel + type: Enum + description: |- + Default summarization model to be used. + Possible values: + SUMMARIZATION_MODEL_UNSPECIFIED + BASELINE_MODEL + BASELINE_MODEL_V2_0 + enum_values: + - 'BASELINE_MODEL' + - 'BASELINE_MODEL_V2_0' + - name: qaConfig + type: NestedObject + description: Configuration for the QA feature. + properties: + - name: scorecardList + type: NestedObject + description: Container for a list of scorecards. + properties: + - name: qaScorecardRevisions + type: Array + description: List of QaScorecardRevisions. + item_type: + type: String + - name: analysisPercentage + type: Double + description: |- + Percentage of conversations that we should apply this analysis setting + automatically, between [0, 1]. For example, 0.1 means 10%. Conversations + are sampled in a determenestic way. The original runtime_percentage & + upload percentage will be replaced by defining filters on the conversation. + - name: active + type: Boolean + description: |- + If true, apply this rule to conversations. Otherwise, this rule is + inactive and saved as a draft. diff --git a/mmv1/templates/terraform/examples/contact_center_insights_analysis_rule_basic.tf.tmpl b/mmv1/templates/terraform/examples/contact_center_insights_analysis_rule_basic.tf.tmpl new file mode 100644 index 000000000000..00a65c16acc6 --- /dev/null +++ b/mmv1/templates/terraform/examples/contact_center_insights_analysis_rule_basic.tf.tmpl @@ -0,0 +1,4 @@ +resource "google_contact_center_insights_analysis_rule" "{{$.PrimaryResourceId}}" { + location = "us-central1" + display_name = "analysis-rule-display-name" +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/contact_center_insights_analysis_rule_full.tf.tmpl b/mmv1/templates/terraform/examples/contact_center_insights_analysis_rule_full.tf.tmpl new file mode 100644 index 000000000000..9fc8717f9ab9 --- /dev/null +++ b/mmv1/templates/terraform/examples/contact_center_insights_analysis_rule_full.tf.tmpl @@ -0,0 +1,28 @@ +resource "google_contact_center_insights_analysis_rule" "{{$.PrimaryResourceId}}" { + location = "us-central1" + display_name = "analysis-rule-display-name" + conversation_filter = "agent_id = \"1\"" + annotator_selector { + run_interruption_annotator = false + issue_models = ["projects/{{index $.TestEnvVars "project_number"}}/locations/us-central1/issueModels/some_issue_model_id"] + phrase_matchers = ["projects/{{index $.TestEnvVars "project_number"}}/locations/us-central1/phraseMatchers/123"] + qa_config { + scorecard_list { + qa_scorecard_revisions = ["projects/{{index $.TestEnvVars "project_number"}}/locations/us-central1/qaScorecards/*/revisions/some_scorecard_revision_id"] + } + } + run_entity_annotator = false + run_intent_annotator = false + run_issue_model_annotator = false + run_phrase_matcher_annotator = false + run_qa_annotator = false + run_sentiment_annotator = false + run_silence_annotator = true + run_summarization_annotator = false + summarization_config { + summarization_model = "BASELINE_MODEL" + } + } + analysis_percentage = 0.5 + active = true +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/contact_center_insights_analysis_rule_profile.tf.tmpl b/mmv1/templates/terraform/examples/contact_center_insights_analysis_rule_profile.tf.tmpl new file mode 100644 index 000000000000..5598ef184fd3 --- /dev/null +++ b/mmv1/templates/terraform/examples/contact_center_insights_analysis_rule_profile.tf.tmpl @@ -0,0 +1,28 @@ +resource "google_contact_center_insights_analysis_rule" "{{$.PrimaryResourceId}}" { + location = "us-central1" + display_name = "analysis-rule-display-name" + conversation_filter = "agent_id = \"1\"" + annotator_selector { + run_interruption_annotator = false + issue_models = ["projects/{{index $.TestEnvVars "project_number"}}/locations/us-central1/issueModels/some_issue_model_id"] + phrase_matchers = ["projects/{{index $.TestEnvVars "project_number"}}/locations/us-central1/phraseMatchers/123"] + qa_config { + scorecard_list { + qa_scorecard_revisions = ["projects/{{index $.TestEnvVars "project_number"}}/locations/us-central1/qaScorecards/*/revisions/some_scorecard_revision_id"] + } + } + run_entity_annotator = false + run_intent_annotator = false + run_issue_model_annotator = false + run_phrase_matcher_annotator = false + run_qa_annotator = false + run_sentiment_annotator = false + run_silence_annotator = true + run_summarization_annotator = false + summarization_config { + conversation_profile = "projects/{{index $.TestEnvVars "project_number"}}/locations/us-central1/conversationProfiles/some_conversation_profile" + } + } + analysis_percentage = 0.5 + active = true +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/contactcenterinsights/resource_contact_center_insights_analysis_rule_test.go b/mmv1/third_party/terraform/services/contactcenterinsights/resource_contact_center_insights_analysis_rule_test.go new file mode 100644 index 000000000000..08d87a18f658 --- /dev/null +++ b/mmv1/third_party/terraform/services/contactcenterinsights/resource_contact_center_insights_analysis_rule_test.go @@ -0,0 +1,138 @@ +package contactcenterinsights_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccContactCenterInsightsAnalysisRule_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "project_number": envvar.GetTestProjectNumberFromEnv(), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccInsightsAnalysisRule(context), + }, + { + ResourceName: "google_contact_center_insights_analysis_rule.default", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccContactCenterInsightsAnalysisRule_full(context), + }, + { + ResourceName: "google_contact_center_insights_analysis_rule.basic_analysis_rule", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"analysis_rule_id", "location"}, + }, + { + Config: testAccContactCenterInsightsAnalysisRule_update(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_contact_center_insights_analysis_rule.basic_analysis_rule", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_contact_center_insights_analysis_rule.basic_analysis_rule", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"analysis_rule_id", "location"}, + }, + }, + }) +} + +func testAccInsightsAnalysisRule(context map[string]interface{}) string { + return acctest.Nprintf(` + resource "google_contact_center_insights_analysis_rule" "default" { + display_name = "default-analysis-rule-display-name-%{random_suffix}" + location = "us-central1" + conversation_filter = "agent_id = \"1\"" + analysis_percentage = 0.5 + annotator_selector { + run_silence_annotator = true + } + active = true + } + `, context) +} + +func testAccContactCenterInsightsAnalysisRule_full(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_contact_center_insights_analysis_rule" "basic_analysis_rule" { + display_name = "analysis-rule-display-name-%{random_suffix}" + location = "us-central1" + conversation_filter = "agent_id = \"1\"" + annotator_selector { + run_interruption_annotator = false + issue_models = ["projects/%{project_number}/locations/us-central1/issueModels/some_issue_model_id"] + phrase_matchers = ["projects/%{project_number}/locations/us-central1/phraseMatchers/123"] + qa_config { + scorecard_list { + qa_scorecard_revisions = ["projects/%{project_number}/locations/us-central1/qaScorecards/*/revisions/some_scorecard_revision_id"] + } + } + run_entity_annotator = false + run_intent_annotator = false + run_issue_model_annotator = false + run_phrase_matcher_annotator = false + run_qa_annotator = false + run_sentiment_annotator = false + run_silence_annotator = true + run_summarization_annotator = false + summarization_config { + summarization_model = "BASELINE_MODEL" + } + } + analysis_percentage = 0.5 + active = true +} +`, context) +} + +func testAccContactCenterInsightsAnalysisRule_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_contact_center_insights_analysis_rule" "basic_analysis_rule" { + display_name = "analysis-rule-display-name-%{random_suffix}-updated" + location = "us-central1" + conversation_filter = "agent_id = \"1\"" + annotator_selector { + run_interruption_annotator = true + issue_models = ["projects/%{project_number}/locations/us-central1/issueModels/alt_issue_model_id"] + phrase_matchers = ["projects/%{project_number}/locations/us-central1/phraseMatchers/123"] + qa_config { + scorecard_list { + qa_scorecard_revisions = ["projects/%{project_number}/locations/us-central1/qaScorecards/*/revisions/alt_scorecard_revision_id"] + } + } + run_entity_annotator = true + run_intent_annotator = true + run_issue_model_annotator = false + run_phrase_matcher_annotator = true + run_qa_annotator = true + run_sentiment_annotator = true + run_silence_annotator = true + run_summarization_annotator = true + summarization_config { + summarization_model = "BASELINE_MODEL_V2_0" + } + } + analysis_percentage = 0.0 + active = false +} +`, context) +} From 8adc7b2623b87c8ebe270a7c2eda2efe683929bd Mon Sep 17 00:00:00 2001 From: NA2047 <12290725+NA2047@users.noreply.github.com> Date: Fri, 27 Jun 2025 15:23:21 -0700 Subject: [PATCH 453/884] Add datasource for google_redis_cluster (#14340) --- .../provider/provider_mmv1_resources.go.tmpl | 1 + .../redis/data_source_redis_cluster.go | 48 +++++++++++++++++++ .../redis/data_source_redis_cluster_test.go | 46 ++++++++++++++++++ .../docs/d/redis_cluster.html.markdown | 38 +++++++++++++++ 4 files changed, 133 insertions(+) create mode 100644 mmv1/third_party/terraform/services/redis/data_source_redis_cluster.go create mode 100644 mmv1/third_party/terraform/services/redis/data_source_redis_cluster_test.go create mode 100644 mmv1/third_party/terraform/website/docs/d/redis_cluster.html.markdown diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index 0f159c666b36..f4156d1e1017 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -270,6 +270,7 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_memorystore_instance": memorystore.DataSourceMemorystoreInstance(), "google_memcache_instance": memcache.DataSourceMemcacheInstance(), "google_redis_instance": redis.DataSourceGoogleRedisInstance(), + "google_redis_cluster": redis.DataSourceRedisCluster(), "google_vertex_ai_index": vertexai.DataSourceVertexAIIndex(), "google_vmwareengine_cluster": vmwareengine.DataSourceVmwareengineCluster(), "google_vmwareengine_external_access_rule": vmwareengine.DataSourceVmwareengineExternalAccessRule(), diff --git a/mmv1/third_party/terraform/services/redis/data_source_redis_cluster.go b/mmv1/third_party/terraform/services/redis/data_source_redis_cluster.go new file mode 100644 index 000000000000..2054bb8aacf7 --- /dev/null +++ b/mmv1/third_party/terraform/services/redis/data_source_redis_cluster.go @@ -0,0 +1,48 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 +package redis + +import ( + "fmt" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func DataSourceRedisCluster() *schema.Resource { + // Generate datasource schema from resource + dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceRedisCluster().Schema) + + // Set 'Required' schema elements + tpgresource.AddRequiredFieldsToSchema(dsSchema, "name") + // Set 'Optional' schema elements + tpgresource.AddOptionalFieldsToSchema(dsSchema, "project", "region") + + return &schema.Resource{ + Read: dataSourceRedisClusterRead, + Schema: dsSchema, + } +} + +func dataSourceRedisClusterRead(d *schema.ResourceData, meta interface{}) error { + id, err := tpgresource.ReplaceVars(d, meta.(*transport_tpg.Config), "projects/{{project}}/locations/{{region}}/clusters/{{name}}") + if err != nil { + return fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + + err = resourceRedisClusterRead(d, meta) + if err != nil { + return err + } + + if err := tpgresource.SetDataSourceLabels(d); err != nil { + return err + } + + if d.Id() == "" { + return fmt.Errorf("%s not found", id) + } + return nil +} diff --git a/mmv1/third_party/terraform/services/redis/data_source_redis_cluster_test.go b/mmv1/third_party/terraform/services/redis/data_source_redis_cluster_test.go new file mode 100644 index 000000000000..96a6843297a0 --- /dev/null +++ b/mmv1/third_party/terraform/services/redis/data_source_redis_cluster_test.go @@ -0,0 +1,46 @@ +package redis_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccRedisClusterDatasource(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccRedisClusterDatasourceConfig(context), + Check: resource.ComposeTestCheckFunc( + acctest.CheckDataSourceStateMatchesResourceState("data.google_redis_cluster.default", "google_redis_cluster.cluster"), + ), + }, + }, + }) +} + +func testAccRedisClusterDatasourceConfig(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_redis_cluster" "cluster" { + name = "tf-test-redis-cluster-%{random_suffix}" + shard_count = 1 + region = "us-central1" + deletion_protection_enabled = false + +} + +data "google_redis_cluster" "default" { + name = google_redis_cluster.cluster.name + region = "us-central1" +} +`, context) +} diff --git a/mmv1/third_party/terraform/website/docs/d/redis_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/d/redis_cluster.html.markdown new file mode 100644 index 000000000000..4c6a51769664 --- /dev/null +++ b/mmv1/third_party/terraform/website/docs/d/redis_cluster.html.markdown @@ -0,0 +1,38 @@ +--- +subcategory: "Memorystore (Redis)" +description: |- + Fetches the details of a Redis Cluster. +--- + +# google_redis_cluster + +Use this data source to get information about a Redis Cluster. For more details, see the [API documentation](https://cloud.google.com/memorystore/docs/cluster/reference/rest/v1/projects.locations.clusters). + +## Example Usage + +```hcl +data "google_redis_cluster" "default" { + name = "my-redis-cluster" + region = "us-central1" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `name` - + (Required) + The name of the Redis cluster. + +* `region` - + (Required) + The region of the Redis cluster. + +* `project` - + (optional) + The ID of the project in which the resource belongs. If it is not provided, the provider project is used. + +## Attributes Reference + +See [google_redis_cluster](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/redis_cluster) resource for details of all the available attributes. From 73468d1d982c5dd60800e0f1d1de3831cbaca603 Mon Sep 17 00:00:00 2001 From: Shrishty Chandra <3104562+shrishty@users.noreply.github.com> Date: Mon, 30 Jun 2025 18:13:53 +0530 Subject: [PATCH 454/884] 22603: add update_strategy to google_compute_network_peering (#13848) Co-authored-by: Shrishty Chandra --- .../resource_compute_network_peering.go.tmpl | 63 ++++++++++++--- ...urce_compute_network_peering_test.go.tmpl} | 79 +++++++++++++++++++ .../r/compute_network_peering.html.markdown | 3 + 3 files changed, 134 insertions(+), 11 deletions(-) rename mmv1/third_party/terraform/services/compute/{resource_compute_network_peering_test.go => resource_compute_network_peering_test.go.tmpl} (77%) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_network_peering.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_network_peering.go.tmpl index 5540d7f4fb4c..1afbaac3e4f0 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_network_peering.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_network_peering.go.tmpl @@ -113,6 +113,16 @@ func ResourceComputeNetworkPeering() *schema.Resource { Description: `Which IP version(s) of traffic and routes are allowed to be imported or exported between peer networks. The default value is IPV4_ONLY. Possible values: ["IPV4_ONLY", "IPV4_IPV6"]`, Default: "IPV4_ONLY", }, + + {{ if ne $.TargetVersionName `ga` }} + "update_strategy": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: verify.ValidateEnum([]string{"INDEPENDENT", "CONSENSUS"}), + Description: `The update strategy determines the semantics for updates and deletes to the peering connection configuration. The default value is INDEPENDENT. Possible values: ["INDEPENDENT", "CONSENSUS"]`, + Default: "INDEPENDENT", + }, + {{- end }} }, UseJSONNumber: true, } @@ -212,6 +222,12 @@ func resourceComputeNetworkPeeringRead(d *schema.ResourceData, meta interface{}) if err := d.Set("stack_type", flattenNetworkPeeringStackType(peering.StackType, d, config)); err != nil { return fmt.Errorf("Error setting stack_type: %s", err) } + + {{ if ne $.TargetVersionName `ga` }} + if err := d.Set("update_strategy", flattenNetworkPeeringUpdateStrategy(peering.UpdateStrategy, d, config)); err != nil { + return fmt.Errorf("Error setting update_strategy: %s", err) + } + {{- end }} return nil } @@ -312,17 +328,33 @@ func findPeeringFromNetwork(network *compute.Network, peeringName string) *compu return nil } func expandNetworkPeering(d *schema.ResourceData) *compute.NetworkPeering { - return &compute.NetworkPeering{ - ExchangeSubnetRoutes: true, - Name: d.Get("name").(string), - Network: d.Get("peer_network").(string), - ExportCustomRoutes: d.Get("export_custom_routes").(bool), - ImportCustomRoutes: d.Get("import_custom_routes").(bool), - ExportSubnetRoutesWithPublicIp: d.Get("export_subnet_routes_with_public_ip").(bool), - ImportSubnetRoutesWithPublicIp: d.Get("import_subnet_routes_with_public_ip").(bool), - StackType: d.Get("stack_type").(string), - ForceSendFields: []string{"ExportSubnetRoutesWithPublicIp", "ImportCustomRoutes", "ExportCustomRoutes"}, - } + {{ if eq $.TargetVersionName `ga` }} + return &compute.NetworkPeering{ + ExchangeSubnetRoutes: true, + Name: d.Get("name").(string), + Network: d.Get("peer_network").(string), + ExportCustomRoutes: d.Get("export_custom_routes").(bool), + ImportCustomRoutes: d.Get("import_custom_routes").(bool), + ExportSubnetRoutesWithPublicIp: d.Get("export_subnet_routes_with_public_ip").(bool), + ImportSubnetRoutesWithPublicIp: d.Get("import_subnet_routes_with_public_ip").(bool), + StackType: d.Get("stack_type").(string), + ForceSendFields: []string{"ExportSubnetRoutesWithPublicIp", "ImportCustomRoutes", "ExportCustomRoutes"}, + } + {{- else }} + return &compute.NetworkPeering{ + ExchangeSubnetRoutes: true, + Name: d.Get("name").(string), + Network: d.Get("peer_network").(string), + ExportCustomRoutes: d.Get("export_custom_routes").(bool), + ImportCustomRoutes: d.Get("import_custom_routes").(bool), + ExportSubnetRoutesWithPublicIp: d.Get("export_subnet_routes_with_public_ip").(bool), + ImportSubnetRoutesWithPublicIp: d.Get("import_subnet_routes_with_public_ip").(bool), + StackType: d.Get("stack_type").(string), + UpdateStrategy: d.Get("update_strategy").(string), + ForceSendFields: []string{"ExportSubnetRoutesWithPublicIp", "ImportCustomRoutes", "ExportCustomRoutes"}, + } + {{- end }} + } func flattenNetworkPeeringStackType(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { @@ -334,6 +366,15 @@ func flattenNetworkPeeringStackType(v interface{}, d *schema.ResourceData, confi return v } +func flattenNetworkPeeringUpdateStrategy(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + // To prevent the perma-diff caused by the absence of `update_strategy` in API responses for older resource. + if v == nil || tpgresource.IsEmptyValue(reflect.ValueOf(v)) { + return "INDEPENDENT" + } + + return v +} + func sortedNetworkPeeringMutexKeys(networkName, peerNetworkName *tpgresource.GlobalFieldValue) []string { // Whether you delete the peering from network A to B or the one from B to A, they // cannot happen at the same time. diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_network_peering_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_network_peering_test.go.tmpl similarity index 77% rename from mmv1/third_party/terraform/services/compute/resource_compute_network_peering_test.go rename to mmv1/third_party/terraform/services/compute/resource_compute_network_peering_test.go.tmpl index 928d3566a228..17d2a20537d9 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_network_peering_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_network_peering_test.go.tmpl @@ -141,6 +141,44 @@ func TestAccComputeNetworkPeering_stackType(t *testing.T) { } +{{ if ne $.TargetVersionName `ga` }} +func TestAccComputeNetworkPeering_updateStrategy(t *testing.T) { + t.Parallel() + + primaryNetworkName := fmt.Sprintf("tf-test-network-1-%d", acctest.RandInt(t)) + peeringNetworkName := fmt.Sprintf("tf-test-network-2-%d", acctest.RandInt(t)) + peeringName := fmt.Sprintf("tf-test-peering-%d", acctest.RandInt(t)) + importId := fmt.Sprintf("%s/%s/%s", envvar.GetTestProjectFromEnv(), primaryNetworkName, peeringName) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccComputeNetworkPeeringDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeNetworkPeering_updateStrategyDefault(primaryNetworkName, peeringNetworkName, peeringName), + }, + { + ResourceName: "google_compute_network_peering.foo", + ImportState: true, + ImportStateVerify: true, + ImportStateId: importId, + }, + { + Config: testAccComputeNetworkPeering_updateStrategyUpdate(primaryNetworkName, peeringNetworkName, peeringName), + }, + { + ResourceName: "google_compute_network_peering.foo", + ImportState: true, + ImportStateVerify: true, + ImportStateId: importId, + }, + }, + }) + +} +{{- end }} + func testAccComputeNetworkPeeringDestroyProducer(t *testing.T) func(s *terraform.State) error { return func(s *terraform.State) error { config := acctest.GoogleProviderConfig(t) @@ -277,3 +315,44 @@ resource "google_compute_network_peering" "foo" { } `, primaryNetworkName, peeringNetworkName, peeringName) } + +func testAccComputeNetworkPeering_updateStrategyDefault(primaryNetworkName, peeringNetworkName, peeringName string) string { + return fmt.Sprintf(` +resource "google_compute_network" "network1" { + name = "%s" + auto_create_subnetworks = false +} + +resource "google_compute_network" "network2" { + name = "%s" + auto_create_subnetworks = false +} + +resource "google_compute_network_peering" "foo" { + name = "%s" + network = google_compute_network.network1.self_link + peer_network = google_compute_network.network2.self_link +} +`, primaryNetworkName, peeringNetworkName, peeringName) +} + +func testAccComputeNetworkPeering_updateStrategyUpdate(primaryNetworkName, peeringNetworkName, peeringName string) string { + return fmt.Sprintf(` +resource "google_compute_network" "network1" { + name = "%s" + auto_create_subnetworks = false +} + +resource "google_compute_network" "network2" { + name = "%s" + auto_create_subnetworks = false +} + +resource "google_compute_network_peering" "foo" { + name = "%s" + network = google_compute_network.network1.self_link + peer_network = google_compute_network.network2.self_link + update_strategy = "CONSENSUS" +} +`, primaryNetworkName, peeringNetworkName, peeringName) +} diff --git a/mmv1/third_party/terraform/website/docs/r/compute_network_peering.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_network_peering.html.markdown index 73a3014c7e6f..410448fb8766 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_network_peering.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_network_peering.html.markdown @@ -68,6 +68,9 @@ Whether subnet routes with public IP range are imported. The default value is fa * `stack_type` - (Optional) Which IP version(s) of traffic and routes are allowed to be imported or exported between peer networks. The default value is IPV4_ONLY. Possible values: ["IPV4_ONLY", "IPV4_IPV6"]. +* `update_strategy` - (Optional, [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html)) +The update strategy determines the semantics for updates and deletes to the peering connection configuration. The default value is INDEPENDENT. Possible values: ["INDEPENDENT", "CONSENSUS"] + ## Attributes Reference In addition to the arguments listed above, the following computed attributes are From ca3661f6bb55067346f1eb402cf4f08e263393ce Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Mon, 30 Jun 2025 07:35:35 -0700 Subject: [PATCH 455/884] tgc-revival: support google_compute_region_autoscaler (#14383) --- mmv1/api/resource.go | 12 ++++++ mmv1/products/compute/Autoscaler.yaml | 3 -- mmv1/products/compute/RegionAutoscaler.yaml | 1 + mmv1/provider/template_data.go | 1 + mmv1/provider/terraform_tgc_next.go | 40 ++++++++++++++++--- .../cai2hcl/resource_converter.go.tmpl | 18 +++++++-- .../cai2hcl/resource_converters.go.tmpl | 20 ++++++---- .../custom_expand/compute_full_url.go.tmpl | 25 ------------ .../compute_auto_scaler.go.tmpl | 3 -- ...xpand_resourceref_with_validation.go.tmpl} | 7 ++-- .../tfplan2cai/resource_converter.go.tmpl | 10 +++-- .../cai2hcl/converters/convert_resource.go | 23 ++++++++++- 12 files changed, 107 insertions(+), 56 deletions(-) delete mode 100644 mmv1/templates/tgc_next/custom_expand/compute_full_url.go.tmpl delete mode 100644 mmv1/templates/tgc_next/custom_flatten/compute_auto_scaler.go.tmpl rename mmv1/templates/tgc_next/{custom_expand/compute_auto_scaler_zone.go.tmpl => tfplan2cai/expand_resourceref_with_validation.go.tmpl} (86%) diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index fbd229866694..3af4f4067e6f 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -2007,3 +2007,15 @@ func (r Resource) ReadPropertiesForTgc() []*Type { return v.Output }) } + +// The API resource type of the resource. Normally, it is the resource name. +// Rarely, it is the API "resource type kind". +// For example, the API resource type of "google_compute_autoscaler" is "ComputeAutoscalerAssetType". +// The API resource type of "google_compute_region_autoscaler" is also "ComputeAutoscalerAssetType". +func (r Resource) ApiResourceType() string { + if r.ApiResourceTypeKind != "" { + return fmt.Sprintf("%s%s", r.ProductMetadata.Name, r.ApiResourceTypeKind) + } + + return fmt.Sprintf("%s%s", r.ProductMetadata.Name, r.Name) +} diff --git a/mmv1/products/compute/Autoscaler.yaml b/mmv1/products/compute/Autoscaler.yaml index de86488d98d7..39a7aded9fa8 100644 --- a/mmv1/products/compute/Autoscaler.yaml +++ b/mmv1/products/compute/Autoscaler.yaml @@ -84,8 +84,6 @@ parameters: ignore_read: true default_from_api: true custom_expand: 'templates/terraform/custom_expand/resourceref_with_validation.go.tmpl' - custom_tgc_flatten: 'templates/tgc_next/custom_flatten/compute_auto_scaler.go.tmpl' - custom_tgc_expand: 'templates/tgc_next/custom_expand/compute_auto_scaler_zone.go.tmpl' resource: 'Zone' imports: 'name' properties: @@ -426,6 +424,5 @@ properties: URL of the managed instance group that this autoscaler will scale. required: true custom_expand: 'templates/terraform/custom_expand/compute_full_url.tmpl' - custom_tgc_expand: 'templates/tgc_next/custom_expand/compute_full_url.go.tmpl' resource: 'InstanceGroupManager' imports: 'selfLink' diff --git a/mmv1/products/compute/RegionAutoscaler.yaml b/mmv1/products/compute/RegionAutoscaler.yaml index c84ac30c0341..b475020433d7 100644 --- a/mmv1/products/compute/RegionAutoscaler.yaml +++ b/mmv1/products/compute/RegionAutoscaler.yaml @@ -43,6 +43,7 @@ async: result: resource_inside_response: false collection_url_key: 'items' +include_in_tgc_next_DO_NOT_USE: true custom_code: examples: - name: 'region_autoscaler_basic' diff --git a/mmv1/provider/template_data.go b/mmv1/provider/template_data.go index 208790894be0..fd05e0aa0042 100644 --- a/mmv1/provider/template_data.go +++ b/mmv1/provider/template_data.go @@ -199,6 +199,7 @@ func (td *TemplateData) GenerateTGCResourceFile(templatePath, filePath string, r "templates/terraform/schema_subresource.go.tmpl", "templates/terraform/flatten_property_method.go.tmpl", "templates/tgc_next/tfplan2cai/expand_array_resourceref_with_validation.go.tmpl", + "templates/tgc_next/tfplan2cai/expand_resourceref_with_validation.go.tmpl", } td.GenerateFile(filePath, templatePath, resource, true, templates...) } diff --git a/mmv1/provider/terraform_tgc_next.go b/mmv1/provider/terraform_tgc_next.go index 3dbf1f8384eb..ef2fe225cbb6 100644 --- a/mmv1/provider/terraform_tgc_next.go +++ b/mmv1/provider/terraform_tgc_next.go @@ -39,6 +39,10 @@ type TerraformGoogleConversionNext struct { ResourcesForVersion []ResourceIdentifier + // Multiple Terraform resources can share the same API resource type. + // For example, "google_compute_region_autoscaler" and "google_region_autoscaler" + ResourcesGroupedByApiResourceType map[string][]ResourceIdentifier + TargetVersionName string Version product.Version @@ -52,14 +56,16 @@ type ResourceIdentifier struct { ServiceName string TerraformName string ResourceName string + AliasName string // It can be "Default" or the same with ResourceName } func NewTerraformGoogleConversionNext(product *api.Product, versionName string, startTime time.Time) TerraformGoogleConversionNext { t := TerraformGoogleConversionNext{ - Product: product, - TargetVersionName: versionName, - Version: *product.VersionObjOrClosest(versionName), - StartTime: startTime, + Product: product, + TargetVersionName: versionName, + Version: *product.VersionObjOrClosest(versionName), + StartTime: startTime, + ResourcesGroupedByApiResourceType: make(map[string][]ResourceIdentifier), } t.Product.SetPropertiesBasedOnVersion(&t.Version) @@ -309,6 +315,8 @@ func (tgc TerraformGoogleConversionNext) replaceImportPath(outputFolder, target // The variable resources_for_version is used to generate resources in file // mmv1/templates/tgc_next/provider/provider_mmv1_resources.go.tmpl func (tgc *TerraformGoogleConversionNext) generateResourcesForVersion(products []*api.Product) { + resourcesGroupedByApiResourceType := make(map[string][]ResourceIdentifier) + for _, productDefinition := range products { service := strings.ToLower(productDefinition.Name) for _, object := range productDefinition.Objects { @@ -322,11 +330,31 @@ func (tgc *TerraformGoogleConversionNext) generateResourcesForVersion(products [ tgc.ResourceCount++ - tgc.ResourcesForVersion = append(tgc.ResourcesForVersion, ResourceIdentifier{ + resourceIdentifier := ResourceIdentifier{ ServiceName: service, TerraformName: object.TerraformName(), ResourceName: object.ResourceName(), - }) + AliasName: object.ResourceName(), + } + tgc.ResourcesForVersion = append(tgc.ResourcesForVersion, resourceIdentifier) + + apiResourceType := fmt.Sprintf("%s.%s", service, object.ApiResourceType()) + if _, ok := resourcesGroupedByApiResourceType[apiResourceType]; !ok { + resourcesGroupedByApiResourceType[apiResourceType] = make([]ResourceIdentifier, 0) + } + resourcesGroupedByApiResourceType[apiResourceType] = append(resourcesGroupedByApiResourceType[apiResourceType], resourceIdentifier) + } + } + + for apiResourceType, resources := range resourcesGroupedByApiResourceType { + // If no other Terraform resources share the API resource type, override the alias name as "Default" + if len(resources) == 1 { + for _, resourceIdentifier := range resources { + resourceIdentifier.AliasName = "Default" + tgc.ResourcesGroupedByApiResourceType[apiResourceType] = []ResourceIdentifier{resourceIdentifier} + } + } else { + tgc.ResourcesGroupedByApiResourceType[apiResourceType] = resources } } } diff --git a/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl b/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl index f104b9ffeeb7..0ace34134629 100644 --- a/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl +++ b/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl @@ -49,7 +49,10 @@ import ( {{template "SchemaSubResource" $prop}} {{- end}} -const {{ $.ResourceName -}}AssetType string = "{{ $productBackendName }}.googleapis.com/{{ $.Name -}}" +{{- if not $.ApiResourceTypeKind }} +const {{ $.ApiResourceType -}}AssetType string = "{{ $productBackendName }}.googleapis.com/{{ $.Name -}}" +{{- end }} + const {{ $.ResourceName -}}SchemaName string = "{{ $.TerraformName }}" type {{ $.ResourceName -}}Converter struct { @@ -127,12 +130,19 @@ func (c *{{ $.ResourceName -}}Converter) convertResourceData(asset caiasset.Asse } {{- range $prop := $.ReadPropertiesForTgc }} - {{ if $prop.IsA "KeyValueLabels" }} + {{ if $prop.CustomTgcFlatten }} + {{- $prop.CustomTemplate $prop.CustomTgcFlatten false -}} + {{ else if $prop.IsA "KeyValueLabels" }} func flatten{{$prop.GetPrefix}}{{$prop.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { return utils.RemoveTerraformAttributionLabel(v) } - {{ else if $prop.CustomTgcFlatten }} - {{- $prop.CustomTemplate $prop.CustomTgcFlatten false -}} + {{ else if or (and (eq $prop.Name "zone") $.HasZone) (and (eq $prop.Name "region") $.HasRegion) -}} +func flatten{{$prop.GetPrefix}}{{$prop.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return tpgresource.GetResourceNameFromSelfLink(v.(string)) +} {{ else }} {{ template "flattenPropertyMethod" $prop -}} {{- end }} diff --git a/mmv1/templates/tgc_next/cai2hcl/resource_converters.go.tmpl b/mmv1/templates/tgc_next/cai2hcl/resource_converters.go.tmpl index bfbcb0b80abf..6a42cd417c55 100644 --- a/mmv1/templates/tgc_next/cai2hcl/resource_converters.go.tmpl +++ b/mmv1/templates/tgc_next/cai2hcl/resource_converters.go.tmpl @@ -41,15 +41,21 @@ import ( var provider *schema.Provider = tpg_provider.Provider() // ConverterMap is a collection of converters instances, indexed by cai asset type. -var ConverterMap = map[string]models.Converter{ +var ConverterMap = map[string]map[string]models.Converter{ // ####### START handwritten resources ########### - resourcemanager.ProjectAssetType: resourcemanager.NewProjectConverter(provider), - compute.ComputeInstanceAssetType: compute.NewComputeInstanceConverter(provider), + resourcemanager.ProjectAssetType: { + "Default": resourcemanager.NewProjectConverter(provider), + }, + compute.ComputeInstanceAssetType: { + "Default": compute.NewComputeInstanceConverter(provider), + }, // ####### END handwritten resources ########### - {{- range $object := $.ResourcesForVersion }} - {{- if $object.ResourceName }} - {{ $object.ServiceName }}.{{ $object.ResourceName }}AssetType: {{ $object.ServiceName }}.New{{ $object.ResourceName -}}Converter(provider), - {{- end }} + {{- range $resourceType, $resources := $.ResourcesGroupedByApiResourceType }} + {{ $resourceType }}AssetType: { + {{- range $object := $resources }} + "{{ $object.AliasName }}": {{ $object.ServiceName }}.New{{ $object.ResourceName -}}Converter(provider), + {{- end }} + }, {{- end }} } diff --git a/mmv1/templates/tgc_next/custom_expand/compute_full_url.go.tmpl b/mmv1/templates/tgc_next/custom_expand/compute_full_url.go.tmpl deleted file mode 100644 index 428878caa29b..000000000000 --- a/mmv1/templates/tgc_next/custom_expand/compute_full_url.go.tmpl +++ /dev/null @@ -1,25 +0,0 @@ -{{/* - The license inside this block applies to this file - Copyright 2025 Google Inc. - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ -}} -func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - if v == nil || v.(string) == "" { - return "", nil - } - f, err := {{ template "expandResourceRef" dict "VarName" "v.(string)" "ResourceRef" $.ResourceRef "ResourceType" $.ResourceType}} - if err != nil { - return nil, fmt.Errorf("Invalid value for {{underscore $.Name}}: %s", err) - } - - url := tgcresource.GetComputeSelfLink(config, f.RelativeLink()) - - return url, nil -} diff --git a/mmv1/templates/tgc_next/custom_flatten/compute_auto_scaler.go.tmpl b/mmv1/templates/tgc_next/custom_flatten/compute_auto_scaler.go.tmpl deleted file mode 100644 index cec3af0e9ccb..000000000000 --- a/mmv1/templates/tgc_next/custom_flatten/compute_auto_scaler.go.tmpl +++ /dev/null @@ -1,3 +0,0 @@ -func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} \ No newline at end of file diff --git a/mmv1/templates/tgc_next/custom_expand/compute_auto_scaler_zone.go.tmpl b/mmv1/templates/tgc_next/tfplan2cai/expand_resourceref_with_validation.go.tmpl similarity index 86% rename from mmv1/templates/tgc_next/custom_expand/compute_auto_scaler_zone.go.tmpl rename to mmv1/templates/tgc_next/tfplan2cai/expand_resourceref_with_validation.go.tmpl index 49c55d8245ae..11f824f203cf 100644 --- a/mmv1/templates/tgc_next/custom_expand/compute_auto_scaler_zone.go.tmpl +++ b/mmv1/templates/tgc_next/tfplan2cai/expand_resourceref_with_validation.go.tmpl @@ -10,13 +10,14 @@ See the License for the specific language governing permissions and limitations under the License. */ -}} +{{- define "expandResourcerefWithValidation" }} func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { f, err := tpgresource.ParseProjectFieldValue("{{$.ResourceType}}", v.(string), "project", d, config, true) if err != nil { return nil, fmt.Errorf("Invalid value for {{underscore $.Name}}: %s", err) } - - url := tgcresource.GetComputeSelfLink(config, f.RelativeLink()) - return url, nil + fullUrl := tgcresource.GetComputeSelfLink(config, f.RelativeLink()) + return fullUrl, nil } +{{- end }} \ No newline at end of file diff --git a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl index 93f5ff14600b..4e0f3ec528d7 100644 --- a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl +++ b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl @@ -50,7 +50,9 @@ import ( {{- $.CustomTemplate $.CustomCode.Constants true -}} {{- end}} -const {{ $.ResourceName -}}AssetType string = "{{ $productBackendName }}.googleapis.com/{{ $.Name -}}" +{{- if not $.ApiResourceTypeKind }} +const {{ $.ApiResourceType -}}AssetType string = "{{ $productBackendName }}.googleapis.com/{{ $.Name -}}" +{{- end }} func Resource{{ $.ResourceName -}}() *schema.Resource { return &schema.Resource{ @@ -102,11 +104,11 @@ func Get{{ $.ResourceName -}}CaiAssets(d tpgresource.TerraformResourceData, conf location, _ := tpgresource.GetLocation(d, config) return []caiasset.Asset{{"{{"}} Name: name, - Type: {{ $.ResourceName -}}AssetType, + Type: {{ $.ApiResourceType -}}AssetType, Resource: &caiasset.AssetResource{ Version: "{{ $apiVersion }}", DiscoveryDocumentURI: "https://www.googleapis.com/discovery/v1/apis/{{ $productBackendName }}/{{ $apiVersion }}/rest", - DiscoveryName: "{{ $.Name }}", + DiscoveryName: "{{ or $.ApiResourceTypeKind $.Name }}", Data: obj, Location: location, }, @@ -166,6 +168,8 @@ func resource{{ $.ResourceName -}}TgcEncoder(d tpgresource.TerraformResourceData {{- $prop.CustomTemplate $prop.CustomTgcExpand false -}} {{ else if and ($prop.IsA "Array") ($prop.ItemType.IsA "ResourceRef")}} {{- template "expandArrayResourcerefWithValidation" $prop -}} + {{ else if ($prop.IsA "ResourceRef") }} + {{- template "expandResourcerefWithValidation" $prop -}} {{ else }} {{- template "expandPropertyMethod" $prop -}} {{ end }} diff --git a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/convert_resource.go b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/convert_resource.go index 3d79dc9fb385..7e18280a6d5a 100644 --- a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/convert_resource.go +++ b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/convert_resource.go @@ -1,14 +1,33 @@ package converters import ( + "strings" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/models" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" ) func ConvertResource(asset caiasset.Asset) ([]*models.TerraformResourceBlock, error) { - converter, ok := ConverterMap[asset.Type] - if !ok { + converters, ok := ConverterMap[asset.Type] + if !ok || len(converters) == 0 { return nil, nil } + + var converter models.Converter + // Normally, one asset type has only one converter. + if len(converters) == 1 { + for _, converter = range converters { + return converter.Convert(asset) + } + } + + // Edge cases + if asset.Type == "compute.googleapis.com/Autoscaler" { + if strings.Contains(asset.Name, "/zones/") { + converter = ConverterMap[asset.Type]["ComputeAutoscaler"] + } else { + converter = ConverterMap[asset.Type]["ComputeRegionAutoscaler"] + } + } return converter.Convert(asset) } From 5a4adeaf354a2d9d21fa63a214a487daff7c9a9d Mon Sep 17 00:00:00 2001 From: Ryo Ueda <56352572+uedar@users.noreply.github.com> Date: Tue, 1 Jul 2025 02:14:46 +0900 Subject: [PATCH 456/884] Add suspended field to dataplex data scan rules (#14167) --- mmv1/products/dataplex/Datascan.yaml | 5 +++++ .../services/dataplex/resource_dataplex_datascan_test.go | 1 + 2 files changed, 6 insertions(+) diff --git a/mmv1/products/dataplex/Datascan.yaml b/mmv1/products/dataplex/Datascan.yaml index 00f3c8ace7ed..0875890a9c99 100644 --- a/mmv1/products/dataplex/Datascan.yaml +++ b/mmv1/products/dataplex/Datascan.yaml @@ -365,6 +365,11 @@ properties: The maximum length is 63 characters. Must start with a letter. Must end with a number or a letter. + - name: 'suspended' + type: Boolean + description: | + Whether the Rule is active or suspended. Default = false. + default_value: false - name: 'description' type: String description: | diff --git a/mmv1/third_party/terraform/services/dataplex/resource_dataplex_datascan_test.go b/mmv1/third_party/terraform/services/dataplex/resource_dataplex_datascan_test.go index d76a6dbca40c..d42015c291c3 100644 --- a/mmv1/third_party/terraform/services/dataplex/resource_dataplex_datascan_test.go +++ b/mmv1/third_party/terraform/services/dataplex/resource_dataplex_datascan_test.go @@ -222,6 +222,7 @@ resource "google_dataplex_datascan" "full_quality" { strict_min_enabled = true strict_max_enabled = false } + suspended = true } } From d03b400dde6ac421779112b9647faa7caf4dfc2d Mon Sep 17 00:00:00 2001 From: Harshita1603 <45848384+Harshita1603@users.noreply.github.com> Date: Mon, 30 Jun 2025 22:55:21 +0530 Subject: [PATCH 457/884] Main (#14393) --- .../redis/resource_redis_instance_test.go | 54 ------------------- 1 file changed, 54 deletions(-) diff --git a/mmv1/third_party/terraform/services/redis/resource_redis_instance_test.go b/mmv1/third_party/terraform/services/redis/resource_redis_instance_test.go index 43f53e4ce0e2..b0ef69bf5be0 100644 --- a/mmv1/third_party/terraform/services/redis/resource_redis_instance_test.go +++ b/mmv1/third_party/terraform/services/redis/resource_redis_instance_test.go @@ -255,40 +255,6 @@ func TestAccRedisInstance_redisInstanceAuthEnabled(t *testing.T) { }) } -func TestAccRedisInstance_selfServiceUpdate(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckRedisInstanceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccRedisInstance_selfServiceUpdate20240411_00_00(context), - }, - { - ResourceName: "google_redis_instance.cache", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"region"}, - }, - { - Config: testAccRedisInstance_selfServiceUpdate20240503_00_00(context), - }, - { - ResourceName: "google_redis_instance.cache", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"region"}, - }, - }, - }) -} - func TestAccRedisInstance_downgradeRedisVersion(t *testing.T) { t.Parallel() @@ -408,26 +374,6 @@ resource "google_redis_instance" "cache" { `, context) } -func testAccRedisInstance_selfServiceUpdate20240411_00_00(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_redis_instance" "cache" { - name = "tf-test-memory-cache%{random_suffix}" - memory_size_gb = 1 - maintenance_version = "20240411_00_00" -} -`, context) -} - -func testAccRedisInstance_selfServiceUpdate20240503_00_00(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_redis_instance" "cache" { - name = "tf-test-memory-cache%{random_suffix}" - memory_size_gb = 1 - maintenance_version = "20240503_00_00" -} -`, context) -} - func testAccRedisInstance_redis5(name string) string { return fmt.Sprintf(` resource "google_redis_instance" "test" { From dc3394c8b038c85baad8dba6e3e16dfc6fce9686 Mon Sep 17 00:00:00 2001 From: kautikdk <144651627+kautikdk@users.noreply.github.com> Date: Mon, 30 Jun 2025 19:21:09 +0000 Subject: [PATCH 458/884] Fixes failing test: TestAccStorageBucketIamPolicy_destroy (#14404) --- .../services/storage/iam_storage_bucket_test.go | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/mmv1/third_party/terraform/services/storage/iam_storage_bucket_test.go b/mmv1/third_party/terraform/services/storage/iam_storage_bucket_test.go index 26299b8cc6c0..25017a6ffdf2 100644 --- a/mmv1/third_party/terraform/services/storage/iam_storage_bucket_test.go +++ b/mmv1/third_party/terraform/services/storage/iam_storage_bucket_test.go @@ -330,12 +330,16 @@ func TestAccStorageBucket_iamPolicyGeneratedWithCondition(t *testing.T) { func TestAccStorageBucketIamPolicy_destroy(t *testing.T) { t.Parallel() + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { - Config: testAccStorageBucketIamPolicy_destroy(), + Config: testAccStorageBucketIamPolicy_destroy(context), }, }, }) @@ -601,14 +605,14 @@ resource "google_storage_bucket_iam_policy" "foo" { `, context) } -func testAccStorageBucketIamPolicy_destroy() string { - return fmt.Sprintf(` +func testAccStorageBucketIamPolicy_destroy(context map[string]interface{}) string { + return acctest.Nprintf(` resource "google_service_account" "accessor" { account_id = "pub-sub-test-service-account" } resource "google_storage_bucket" "test_bucket" { - name = "sd-pubsub-test-bucket" + name = "tf-test-my-bucket%{random_suffix}" location = "US" storage_class = "STANDARD" @@ -658,5 +662,5 @@ resource "google_pubsub_topic_iam_policy" "topic_policy" { topic = google_pubsub_topic.topic.name policy_data = data.google_iam_policy.topic_policy_data.policy_data } -`) +`, context) } From 04ed9db5716febe83855c3383bb340b9f41e70ed Mon Sep 17 00:00:00 2001 From: Riley Karson Date: Mon, 30 Jun 2025 14:06:33 -0700 Subject: [PATCH 459/884] Add API references for gkeonprem resources (#14415) --- mmv1/products/gkeonprem/BareMetalAdminCluster.yaml | 2 ++ mmv1/products/gkeonprem/BareMetalCluster.yaml | 2 ++ mmv1/products/gkeonprem/BareMetalNodePool.yaml | 2 ++ mmv1/products/gkeonprem/VmwareAdminCluster.yaml | 4 +++- mmv1/products/gkeonprem/VmwareCluster.yaml | 2 ++ mmv1/products/gkeonprem/VmwareNodePool.yaml | 2 ++ 6 files changed, 13 insertions(+), 1 deletion(-) diff --git a/mmv1/products/gkeonprem/BareMetalAdminCluster.yaml b/mmv1/products/gkeonprem/BareMetalAdminCluster.yaml index 517a96b4b48c..a4e19e2f0564 100644 --- a/mmv1/products/gkeonprem/BareMetalAdminCluster.yaml +++ b/mmv1/products/gkeonprem/BareMetalAdminCluster.yaml @@ -14,6 +14,8 @@ --- name: 'BareMetalAdminCluster' description: "A Google Bare Metal Admin Cluster." +references: + api: 'https://cloud.google.com/kubernetes-engine/distributed-cloud/reference/on-prem-api/rest/v1/projects.locations.bareMetalAdminClusters' docs: id_format: 'projects/{{project}}/locations/{{location}}/bareMetalAdminClusters/{{name}}' base_url: 'projects/{{project}}/locations/{{location}}/bareMetalAdminClusters' diff --git a/mmv1/products/gkeonprem/BareMetalCluster.yaml b/mmv1/products/gkeonprem/BareMetalCluster.yaml index ba188bc3a769..79d4d3fac1ba 100644 --- a/mmv1/products/gkeonprem/BareMetalCluster.yaml +++ b/mmv1/products/gkeonprem/BareMetalCluster.yaml @@ -14,6 +14,8 @@ --- name: 'BareMetalCluster' description: "A Google Bare Metal User Cluster." +references: + api: 'https://cloud.google.com/kubernetes-engine/distributed-cloud/reference/on-prem-api/rest/v1/projects.locations.bareMetalClusters' docs: id_format: 'projects/{{project}}/locations/{{location}}/bareMetalClusters/{{name}}' base_url: 'projects/{{project}}/locations/{{location}}/bareMetalClusters' diff --git a/mmv1/products/gkeonprem/BareMetalNodePool.yaml b/mmv1/products/gkeonprem/BareMetalNodePool.yaml index 1b0f9d92d8a4..5e5cbe38ef4a 100644 --- a/mmv1/products/gkeonprem/BareMetalNodePool.yaml +++ b/mmv1/products/gkeonprem/BareMetalNodePool.yaml @@ -14,6 +14,8 @@ --- name: 'BareMetalNodePool' description: 'A Google Bare Metal Node Pool.' +references: + api: 'https://cloud.google.com/kubernetes-engine/distributed-cloud/reference/on-prem-api/rest/v1/projects.locations.bareMetalClusters.bareMetalNodePools' docs: id_format: 'projects/{{project}}/locations/{{location}}/bareMetalClusters/{{bare_metal_cluster}}/bareMetalNodePools/{{name}}' base_url: 'projects/{{project}}/locations/{{location}}/bareMetalClusters/{{bare_metal_cluster}}/bareMetalNodePools' diff --git a/mmv1/products/gkeonprem/VmwareAdminCluster.yaml b/mmv1/products/gkeonprem/VmwareAdminCluster.yaml index 0963b1011da1..a7e7c45413f2 100644 --- a/mmv1/products/gkeonprem/VmwareAdminCluster.yaml +++ b/mmv1/products/gkeonprem/VmwareAdminCluster.yaml @@ -13,6 +13,9 @@ --- name: "VmwareAdminCluster" +description: "A Google VMware Admin Cluster." +references: + api: 'https://cloud.google.com/kubernetes-engine/distributed-cloud/reference/on-prem-api/rest/v1/projects.locations.vmwareAdminClusters' min_version: beta base_url: "projects/{{project}}/locations/{{location}}/vmwareAdminClusters" create_url: "projects/{{project}}/locations/{{location}}/vmwareAdminClusters?vmware_admin_cluster_id={{name}}" @@ -20,7 +23,6 @@ update_url: "projects/{{project}}/locations/{{location}}/vmwareAdminClusters/{{n self_link: "projects/{{project}}/locations/{{location}}/vmwareAdminClusters/{{name}}" update_verb: "PATCH" update_mask: true -description: "A Google VMware Admin Cluster." exclude_delete: true id_format: "projects/{{project}}/locations/{{location}}/vmwareAdminClusters/{{name}}" import_format: ["projects/{{project}}/locations/{{location}}/vmwareAdminClusters/{{name}}"] diff --git a/mmv1/products/gkeonprem/VmwareCluster.yaml b/mmv1/products/gkeonprem/VmwareCluster.yaml index 65117f190f89..2be6a7d95568 100644 --- a/mmv1/products/gkeonprem/VmwareCluster.yaml +++ b/mmv1/products/gkeonprem/VmwareCluster.yaml @@ -14,6 +14,8 @@ --- name: 'VmwareCluster' description: 'A Google VMware User Cluster.' +references: + api: 'https://cloud.google.com/kubernetes-engine/distributed-cloud/reference/on-prem-api/rest/v1/projects.locations.vmwareClusters' docs: id_format: 'projects/{{project}}/locations/{{location}}/vmwareClusters/{{name}}' base_url: 'projects/{{project}}/locations/{{location}}/vmwareClusters' diff --git a/mmv1/products/gkeonprem/VmwareNodePool.yaml b/mmv1/products/gkeonprem/VmwareNodePool.yaml index 6207952441b8..05f84052babd 100644 --- a/mmv1/products/gkeonprem/VmwareNodePool.yaml +++ b/mmv1/products/gkeonprem/VmwareNodePool.yaml @@ -14,6 +14,8 @@ --- name: 'VmwareNodePool' description: "A Google Vmware Node Pool." +references: + api: 'https://cloud.google.com/kubernetes-engine/distributed-cloud/reference/on-prem-api/rest/v1/projects.locations.vmwareClusters.vmwareNodePools' docs: id_format: 'projects/{{project}}/locations/{{location}}/vmwareClusters/{{vmware_cluster}}/vmwareNodePools/{{name}}' base_url: 'projects/{{project}}/locations/{{location}}/vmwareClusters/{{vmware_cluster}}/vmwareNodePools' From 638a26c136b7c4ca786ec378fe0b7d2913b0e574 Mon Sep 17 00:00:00 2001 From: Ingmar Stein <490610+IngmarStein@users.noreply.github.com> Date: Tue, 1 Jul 2025 17:35:30 +0200 Subject: [PATCH 460/884] Make openapi-generate output more stable (#14426) --- mmv1/openapi_generate/parser.go | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/mmv1/openapi_generate/parser.go b/mmv1/openapi_generate/parser.go index c014dfe50ad4..b604d5503de4 100644 --- a/mmv1/openapi_generate/parser.go +++ b/mmv1/openapi_generate/parser.go @@ -19,6 +19,7 @@ import ( "context" "encoding/base64" "fmt" + "maps" "os" "path" "path/filepath" @@ -442,7 +443,8 @@ func writeObject(name string, obj *openapi3.SchemaRef, objType openapi3.Types, u func buildProperties(props openapi3.Schemas, required []string) []*api.Type { properties := []*api.Type{} - for k, prop := range props { + for _, k := range slices.Sorted(maps.Keys(props)) { + prop := props[k] propObj := writeObject(k, prop, propType(prop), false) if slices.Contains(required, k) { propObj.Required = true From 6894b2739eab66f604b7fe9964556cc9e930a3f0 Mon Sep 17 00:00:00 2001 From: jialei-chen <147877028+jialei-chen@users.noreply.github.com> Date: Tue, 1 Jul 2025 09:00:44 -0700 Subject: [PATCH 461/884] Add kmskeyname to datastore (#14161) --- mmv1/products/discoveryengine/DataStore.yaml | 18 ++++++++++++++++++ ...coveryengine_datastore_kms_key_name.tf.tmpl | 11 +++++++++++ 2 files changed, 29 insertions(+) create mode 100644 mmv1/templates/terraform/examples/discoveryengine_datastore_kms_key_name.tf.tmpl diff --git a/mmv1/products/discoveryengine/DataStore.yaml b/mmv1/products/discoveryengine/DataStore.yaml index 63c2b5839720..43be191b4e83 100644 --- a/mmv1/products/discoveryengine/DataStore.yaml +++ b/mmv1/products/discoveryengine/DataStore.yaml @@ -57,6 +57,13 @@ examples: primary_resource_name: 'fmt.Sprintf("tf_test_data_store%s", context["random_suffix"])' vars: data_store_id: 'data-store-id' + - name: 'discoveryengine_datastore_kms_key_name' + primary_resource_id: 'kms_key_name' + vars: + data_store_id: 'data-store-id' + kms_key_name: 'kms-key' + test_vars_overrides: + kms_key_name: 'acctest.BootstrapKMSKeyInLocation(t, "us").CryptoKey.Name' - name: 'discoveryengine_datastore_document_processing_config' primary_resource_id: 'document_processing_config' primary_resource_name: 'fmt.Sprintf("tf_test_data_store%s", context["random_suffix"])' @@ -185,6 +192,17 @@ properties: type: Boolean description: If set true, automatic refresh is disabled for the DataStore. required: false + - name: 'kmsKeyName' + type: String + description: | + KMS key resource name which will be used to encrypt resources: + `/{project}/locations/{location}/keyRings/{keyRing}/cryptoKeys/{keyId}` + The KMS key to be used to protect this DataStore at creation time. Must be + set for requests that need to comply with CMEK Org Policy protections. + If this field is set and processed successfully, the DataStore will be + protected by the KMS key, as indicated in the cmek_config field. + required: false + ignore_read: true - name: 'documentProcessingConfig' type: NestedObject description: | diff --git a/mmv1/templates/terraform/examples/discoveryengine_datastore_kms_key_name.tf.tmpl b/mmv1/templates/terraform/examples/discoveryengine_datastore_kms_key_name.tf.tmpl new file mode 100644 index 000000000000..bfb600663705 --- /dev/null +++ b/mmv1/templates/terraform/examples/discoveryengine_datastore_kms_key_name.tf.tmpl @@ -0,0 +1,11 @@ +resource "google_discovery_engine_data_store" "kms_key_name" { + location = "us" + data_store_id = "{{index $.Vars "data_store_id"}}" + display_name = "tf-test-structured-datastore" + industry_vertical = "GENERIC" + content_config = "NO_CONTENT" + solution_types = ["SOLUTION_TYPE_SEARCH"] + kms_key_name = "{{index $.Vars "kms_key_name"}}" + create_advanced_site_search = false + skip_default_schema_creation = false +} \ No newline at end of file From 578564a258b697a8b9969431679904aa29f689ed Mon Sep 17 00:00:00 2001 From: Ryan Oaks Date: Tue, 1 Jul 2025 12:03:39 -0400 Subject: [PATCH 462/884] Add roaks3 vacation (#14431) --- .ci/magician/github/membership_data.go | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.ci/magician/github/membership_data.go b/.ci/magician/github/membership_data.go index bd30cc831126..25581541116e 100644 --- a/.ci/magician/github/membership_data.go +++ b/.ci/magician/github/membership_data.go @@ -100,7 +100,12 @@ var ( }, }, "roaks3": { - vacations: []Vacation{}, + vacations: []Vacation{ + { + startDate: newDate(2025, 7, 1), + endDate: newDate(2025, 7, 14), + }, + }, }, "ScottSuarez": { vacations: []Vacation{}, From 8658b10c23c851976b167f6639c921b05ba9e31c Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Tue, 1 Jul 2025 18:36:16 +0200 Subject: [PATCH 463/884] secretmanager: added `fetch_secret_data` to `google_secret_manager_secret_version` to be able to skip fetching the secret data (#14313) --- ...ta_source_secret_manager_secret_version.go | 56 ++++++++++--------- ...urce_secret_manager_secret_version_test.go | 46 +++++++++++++++ ...ecret_manager_secret_version.html.markdown | 5 +- 3 files changed, 81 insertions(+), 26 deletions(-) diff --git a/mmv1/third_party/terraform/services/secretmanager/data_source_secret_manager_secret_version.go b/mmv1/third_party/terraform/services/secretmanager/data_source_secret_manager_secret_version.go index d656eae35b07..12b21bf5ac70 100644 --- a/mmv1/third_party/terraform/services/secretmanager/data_source_secret_manager_secret_version.go +++ b/mmv1/third_party/terraform/services/secretmanager/data_source_secret_manager_secret_version.go @@ -57,6 +57,11 @@ func DataSourceSecretManagerSecretVersion() *schema.Resource { Optional: true, Default: false, }, + "fetch_secret_data": { + Type: schema.TypeBool, + Optional: true, + Default: true, + }, }, } } @@ -137,16 +142,32 @@ func dataSourceSecretManagerSecretVersionRead(d *schema.ResourceData, meta inter return fmt.Errorf("error setting version: %s", err) } - url = fmt.Sprintf("%s:access", url) - resp, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - Project: project, - RawURL: url, - UserAgent: userAgent, - }) - if err != nil { - return fmt.Errorf("error retrieving available secret manager secret version access: %s", err.Error()) + if d.Get("fetch_secret_data").(bool) { + url = fmt.Sprintf("%s:access", url) + resp, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: project, + RawURL: url, + UserAgent: userAgent, + }) + if err != nil { + return fmt.Errorf("error retrieving available secret manager secret version access: %s", err.Error()) + } + data := resp["payload"].(map[string]interface{}) + var secretData string + if d.Get("is_secret_data_base64").(bool) { + secretData = data["data"].(string) + } else { + payloadData, err := base64.StdEncoding.DecodeString(data["data"].(string)) + if err != nil { + return fmt.Errorf("error decoding secret manager secret version data: %s", err.Error()) + } + secretData = string(payloadData) + } + if err := d.Set("secret_data", secretData); err != nil { + return fmt.Errorf("error setting secret_data: %s", err) + } } if err := d.Set("create_time", version["createTime"].(string)); err != nil { @@ -164,21 +185,6 @@ func dataSourceSecretManagerSecretVersionRead(d *schema.ResourceData, meta inter return fmt.Errorf("error setting enabled: %s", err) } - data := resp["payload"].(map[string]interface{}) - var secretData string - if d.Get("is_secret_data_base64").(bool) { - secretData = data["data"].(string) - } else { - payloadData, err := base64.StdEncoding.DecodeString(data["data"].(string)) - if err != nil { - return fmt.Errorf("error decoding secret manager secret version data: %s", err.Error()) - } - secretData = string(payloadData) - } - if err := d.Set("secret_data", secretData); err != nil { - return fmt.Errorf("error setting secret_data: %s", err) - } - d.SetId(nameValue.(string)) return nil } diff --git a/mmv1/third_party/terraform/services/secretmanager/data_source_secret_manager_secret_version_test.go b/mmv1/third_party/terraform/services/secretmanager/data_source_secret_manager_secret_version_test.go index 68890cf857d2..47427d1e4397 100644 --- a/mmv1/third_party/terraform/services/secretmanager/data_source_secret_manager_secret_version_test.go +++ b/mmv1/third_party/terraform/services/secretmanager/data_source_secret_manager_secret_version_test.go @@ -32,6 +32,27 @@ func TestAccDatasourceSecretManagerSecretVersion_basic(t *testing.T) { }) } +func TestAccDatasourceSecretManagerSecretVersion_fetchSecretDataFalse(t *testing.T) { + t.Parallel() + + randomString := acctest.RandString(t, 10) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckSecretManagerSecretVersionDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDatasourceSecretManagerSecretVersion_fetchSecretDataFalse(randomString), + Check: resource.ComposeTestCheckFunc( + testAccCheckDatasourceSecretManagerSecretVersion("data.google_secret_manager_secret_version.basic", "1"), + resource.TestCheckNoResourceAttr("data.google_secret_manager_secret_version.basic", "secret_data"), + ), + }, + }, + }) +} + func TestAccDatasourceSecretManagerSecretVersion_latest(t *testing.T) { t.Parallel() @@ -189,6 +210,31 @@ data "google_secret_manager_secret_version" "basic" { `, randomString, randomString) } +func testAccDatasourceSecretManagerSecretVersion_fetchSecretDataFalse(randomString string) string { + return fmt.Sprintf(` +resource "google_secret_manager_secret" "secret-basic" { + secret_id = "tf-test-secret-version-%s" + labels = { + label = "my-label" + } + replication { + auto {} + } +} + +resource "google_secret_manager_secret_version" "secret-version-basic" { + secret = google_secret_manager_secret.secret-basic.name + secret_data = "my-tf-test-secret-%s" +} + +data "google_secret_manager_secret_version" "basic" { + secret = google_secret_manager_secret_version.secret-version-basic.secret + version = 1 + fetch_secret_data = false +} +`, randomString, randomString) +} + func testAccDatasourceSecretManagerSecretVersion_withBase64SecretData(randomString, data string) string { return fmt.Sprintf(` resource "google_secret_manager_secret" "secret-basic-base64" { diff --git a/mmv1/third_party/terraform/website/docs/d/secret_manager_secret_version.html.markdown b/mmv1/third_party/terraform/website/docs/d/secret_manager_secret_version.html.markdown index a12499283f5d..4a509c3b1996 100644 --- a/mmv1/third_party/terraform/website/docs/d/secret_manager_secret_version.html.markdown +++ b/mmv1/third_party/terraform/website/docs/d/secret_manager_secret_version.html.markdown @@ -28,9 +28,12 @@ The following arguments are supported: * `version` - (Optional) The version of the secret to get. If it is not provided, the latest version is retrieved. -* `is_secret_data_base64` - (Optional) If set to 'true', the secret data is +* `is_secret_data_base64` - (Optional) If set to `true`, the secret data is expected to be base64-encoded string. +* `fetch_secret_data` - (Optional) If set to `false`, the `secret_data` + will not be fetched. Default is `true`. + ## Attributes Reference The following attributes are exported: From f339dbb510530b3192f47f965a496fc553d3c2e0 Mon Sep 17 00:00:00 2001 From: Riley Karson Date: Tue, 1 Jul 2025 09:40:26 -0700 Subject: [PATCH 464/884] Fix manual_assign type in google_gkeonprem_bare_metal_cluster (#14417) --- mmv1/products/gkeonprem/BareMetalCluster.yaml | 2 +- .../gkeonprem/resource_gkeonprem_bare_metal_cluster_test.go | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/mmv1/products/gkeonprem/BareMetalCluster.yaml b/mmv1/products/gkeonprem/BareMetalCluster.yaml index 79d4d3fac1ba..7bc573baf0d5 100644 --- a/mmv1/products/gkeonprem/BareMetalCluster.yaml +++ b/mmv1/products/gkeonprem/BareMetalCluster.yaml @@ -499,7 +499,7 @@ properties: If true, avoid using IPs ending in .0 or .255. This avoids buggy consumer devices mistakenly dropping IPv4 traffic for those special IP addresses. - name: 'manualAssign' - type: String + type: Boolean description: | If true, prevent IP addresses from being automatically assigned. - name: 'loadBalancerNodePoolConfig' diff --git a/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_bare_metal_cluster_test.go b/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_bare_metal_cluster_test.go index e5d8fdf33ee4..692a58875fd1 100644 --- a/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_bare_metal_cluster_test.go +++ b/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_bare_metal_cluster_test.go @@ -453,6 +453,7 @@ func testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateBgpLbStart(context m "10.200.0.14/32", "fd00:1::12/128" ] + manual_assign = true } load_balancer_node_pool_config { node_pool_config { From 32db52bfbd58c222f5fd04849ff746625bbe34e9 Mon Sep 17 00:00:00 2001 From: Riley Karson Date: Tue, 1 Jul 2025 10:08:18 -0700 Subject: [PATCH 465/884] Mark reviewer unavailability (#14432) --- .ci/magician/github/membership_data.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.ci/magician/github/membership_data.go b/.ci/magician/github/membership_data.go index 25581541116e..2e502f1eec40 100644 --- a/.ci/magician/github/membership_data.go +++ b/.ci/magician/github/membership_data.go @@ -64,8 +64,8 @@ var ( "BBBmau": { vacations: []Vacation{ { - startDate: newDate(2025, 4, 7), - endDate: newDate(2025, 4, 11), + startDate: newDate(2025, 7, 1), + endDate: newDate(2025, 7, 17), }, }, }, From cafe91ca9dc20fd7abafff04fdb3afc4aba825a4 Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Tue, 1 Jul 2025 19:54:38 +0200 Subject: [PATCH 466/884] billing: added `currency_code` to `google_billing_account` data source (#14301) --- .../services/billing/data_source_google_billing_account.go | 7 +++++++ .../billing/data_source_google_billing_account_test.go | 1 + .../terraform/website/docs/d/billing_account.html.markdown | 1 + 3 files changed, 9 insertions(+) diff --git a/mmv1/third_party/terraform/services/billing/data_source_google_billing_account.go b/mmv1/third_party/terraform/services/billing/data_source_google_billing_account.go index 7c47a741c13c..03cacb990813 100644 --- a/mmv1/third_party/terraform/services/billing/data_source_google_billing_account.go +++ b/mmv1/third_party/terraform/services/billing/data_source_google_billing_account.go @@ -47,6 +47,10 @@ func DataSourceGoogleBillingAccount() *schema.Resource { Optional: true, Default: true, }, + "currency_code": { + Type: schema.TypeString, + Computed: true, + }, }, } } @@ -125,6 +129,9 @@ func dataSourceBillingAccountRead(d *schema.ResourceData, meta interface{}) erro if err := d.Set("open", billingAccount.Open); err != nil { return fmt.Errorf("Error setting open: %s", err) } + if err := d.Set("currency_code", billingAccount.CurrencyCode); err != nil { + return fmt.Errorf("Error setting currency_code: %s", err) + } return nil } diff --git a/mmv1/third_party/terraform/services/billing/data_source_google_billing_account_test.go b/mmv1/third_party/terraform/services/billing/data_source_google_billing_account_test.go index dc355f9f7e3b..23381a323fe6 100644 --- a/mmv1/third_party/terraform/services/billing/data_source_google_billing_account_test.go +++ b/mmv1/third_party/terraform/services/billing/data_source_google_billing_account_test.go @@ -44,6 +44,7 @@ func TestAccDataSourceGoogleBillingAccount_byShortName(t *testing.T) { resource.TestCheckResourceAttr("data.google_billing_account.acct", "id", billingId), resource.TestCheckResourceAttr("data.google_billing_account.acct", "name", name), resource.TestCheckResourceAttr("data.google_billing_account.acct", "open", "true"), + resource.TestCheckResourceAttrSet("data.google_billing_account.acct", "currency_code"), ), }, }, diff --git a/mmv1/third_party/terraform/website/docs/d/billing_account.html.markdown b/mmv1/third_party/terraform/website/docs/d/billing_account.html.markdown index f4d303d27147..7cc208247d8f 100644 --- a/mmv1/third_party/terraform/website/docs/d/billing_account.html.markdown +++ b/mmv1/third_party/terraform/website/docs/d/billing_account.html.markdown @@ -45,3 +45,4 @@ The following additional attributes are exported: * `name` - The resource name of the billing account in the form `billingAccounts/{billing_account_id}`. * `project_ids` - The IDs of any projects associated with the billing account. `lookup_projects` must not be false for this to be populated. +* `currency_code` - The currency code of the billing account, e.g. `USD`. From b7b7d9ae2fbcd5693dddfe0fd0aa535bb893552c Mon Sep 17 00:00:00 2001 From: Akshat Jindal <67505646+akshat-jindal-nit@users.noreply.github.com> Date: Wed, 2 Jul 2025 00:54:28 +0530 Subject: [PATCH 467/884] Fix for unable to set `bgp_always_compare_med` in `google_compute_network` from `true` to `false` (#14424) --- .../update_encoder/compute_network.go.tmpl | 14 ++++++++++++-- .../compute/resource_compute_network_test.go.tmpl | 8 ++++++++ 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/mmv1/templates/terraform/update_encoder/compute_network.go.tmpl b/mmv1/templates/terraform/update_encoder/compute_network.go.tmpl index 084107f2772e..7b7362ade39a 100644 --- a/mmv1/templates/terraform/update_encoder/compute_network.go.tmpl +++ b/mmv1/templates/terraform/update_encoder/compute_network.go.tmpl @@ -1,2 +1,12 @@ -delete(obj, "numeric_id") // Field doesn't exist in the API -return obj, nil + // BGP always-compare-med + if d.HasChange("bgp_always_compare_med") { + if _, ok := obj["routingConfig"]; !ok { + obj["routingConfig"] = make(map[string]interface{}) + } + obj["routingConfig"].(map[string]interface{})["bgpAlwaysCompareMed"] = d.Get("bgp_always_compare_med").(bool) + } + + // now clean up the rest + delete(obj, "numeric_id") + return obj, nil + diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_network_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_network_test.go.tmpl index 45779d6b14e4..7d39c62d604d 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_network_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_network_test.go.tmpl @@ -211,6 +211,14 @@ func TestAccComputeNetwork_bgpAlwaysCompareMedAndUpdate(t *testing.T) { t, "google_compute_network.acc_network_bgp_always_compare_med", &network), resource.TestCheckResourceAttr("google_compute_network.acc_network_bgp_always_compare_med", "bgp_always_compare_med", "true"), ), + }, + { + Config: testAccComputeNetwork_bgp_always_compare_med(networkName, false), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeNetworkExists( + t, "google_compute_network.acc_network_bgp_always_compare_med", &network), + resource.TestCheckResourceAttr("google_compute_network.acc_network_bgp_always_compare_med", "bgp_always_compare_med", "false"), + ), }, }, }) From b7e1be6aeda183852cf109f985e716302481e14b Mon Sep 17 00:00:00 2001 From: Aleksandr Averbukh Date: Tue, 1 Jul 2025 22:00:17 +0200 Subject: [PATCH 468/884] Add discoveryengine_datastore layout_parsing_config nested fields. (#14425) --- mmv1/products/discoveryengine/DataStore.yaml | 84 ++++++++++++++++++- ...ment_processing_config_layout_full.tf.tmpl | 27 ++++++ 2 files changed, 109 insertions(+), 2 deletions(-) create mode 100644 mmv1/templates/terraform/examples/discoveryengine_datastore_document_processing_config_layout_full.tf.tmpl diff --git a/mmv1/products/discoveryengine/DataStore.yaml b/mmv1/products/discoveryengine/DataStore.yaml index 43be191b4e83..83503bd187da 100644 --- a/mmv1/products/discoveryengine/DataStore.yaml +++ b/mmv1/products/discoveryengine/DataStore.yaml @@ -81,6 +81,12 @@ examples: vars: data_store_id: 'data-store-id' exclude_docs: true + - name: 'discoveryengine_datastore_document_processing_config_layout_full' + primary_resource_id: 'document_processing_config_layout_full' + primary_resource_name: 'fmt.Sprintf("tf_test_data_store%s", context["random_suffix"])' + vars: + data_store_id: 'data-store-id' + exclude_docs: true - name: 'discoveryengine_datastore_advanced_site_search_config' primary_resource_id: 'advanced_site_search_config' primary_resource_name: 'fmt.Sprintf("tf_test_data_store%s", context["random_suffix"])' @@ -292,7 +298,44 @@ properties: - 'default_parsing_config.0.ocr_parsing_config' - 'default_parsing_config.0.layout_parsing_config' properties: - [] + - name: 'enableTableAnnotation' + type: Boolean + description: | + If true, the LLM based annotation is added to the table during parsing. + required: false + - name: 'enableImageAnnotation' + type: Boolean + description: | + If true, the LLM based annotation is added to the image during parsing. + required: false + - name: 'structuredContentTypes' + type: Array + description: | + Contains the required structure types to extract from the document. Supported values: `shareholder-structure`. + required: false + item_type: + type: String + - name: 'excludeHtmlElements' + type: Array + description: | + List of HTML elements to exclude from the parsed content. + required: false + item_type: + type: String + - name: 'excludeHtmlClasses' + type: Array + description: | + List of HTML classes to exclude from the parsed content. + required: false + item_type: + type: String + - name: 'excludeHtmlIds' + type: Array + description: | + List of HTML ids to exclude from the parsed content. + required: false + item_type: + type: String - name: 'parsingConfigOverrides' type: Map description: | @@ -345,7 +388,44 @@ properties: - 'default_parsing_config.0.ocr_parsing_config' - 'default_parsing_config.0.layout_parsing_config' properties: - [] + - name: 'enableTableAnnotation' + type: Boolean + description: | + If true, the LLM based annotation is added to the table during parsing. + required: false + - name: 'enableImageAnnotation' + type: Boolean + description: | + If true, the LLM based annotation is added to the image during parsing. + required: false + - name: 'structuredContentTypes' + type: Array + description: | + Contains the required structure types to extract from the document. Supported values: `shareholder-structure`. + required: false + item_type: + type: String + - name: 'excludeHtmlElements' + type: Array + description: | + List of HTML elements to exclude from the parsed content. + required: false + item_type: + type: String + - name: 'excludeHtmlClasses' + type: Array + description: | + List of HTML classes to exclude from the parsed content. + required: false + item_type: + type: String + - name: 'excludeHtmlIds' + type: Array + description: | + List of HTML ids to exclude from the parsed content. + required: false + item_type: + type: String - name: 'createTime' type: Time description: | diff --git a/mmv1/templates/terraform/examples/discoveryengine_datastore_document_processing_config_layout_full.tf.tmpl b/mmv1/templates/terraform/examples/discoveryengine_datastore_document_processing_config_layout_full.tf.tmpl new file mode 100644 index 000000000000..65e77418d2fd --- /dev/null +++ b/mmv1/templates/terraform/examples/discoveryengine_datastore_document_processing_config_layout_full.tf.tmpl @@ -0,0 +1,27 @@ +resource "google_discovery_engine_data_store" "document_processing_config_layout_full" { + location = "global" + data_store_id = "{{index $.Vars "data_store_id"}}" + display_name = "tf-test-structured-datastore" + industry_vertical = "GENERIC" + content_config = "CONTENT_REQUIRED" + solution_types = ["SOLUTION_TYPE_SEARCH"] + create_advanced_site_search = false + document_processing_config { + default_parsing_config { + layout_parsing_config { + enable_table_annotation = true + enable_image_annotation = true + structured_content_types = ["shareholder-structure"] + exclude_html_elements = ["nav", "footer"] + exclude_html_classes = ["overlay", "screenreader"] + exclude_html_ids = ["cookie-banner"] + } + } + chunking_config { + layout_based_chunking_config { + chunk_size = 500 + include_ancestor_headings = true + } + } + } +} From e3047eb7e4d23402edccf517ce1c501e4a09e800 Mon Sep 17 00:00:00 2001 From: Rajesh Guptha Date: Wed, 2 Jul 2025 01:54:44 +0530 Subject: [PATCH 469/884] Add deletion_protection field to Secret Manager Secret (#14394) --- mmv1/products/secretmanager/Secret.yaml | 11 +++ .../examples/secret_config_basic.tf.tmpl | 1 + .../pre_delete/secret_manager_secret.go.tmpl | 4 + .../resource_secret_manager_secret_test.go | 81 +++++++++++++++++++ 4 files changed, 97 insertions(+) create mode 100644 mmv1/templates/terraform/pre_delete/secret_manager_secret.go.tmpl diff --git a/mmv1/products/secretmanager/Secret.yaml b/mmv1/products/secretmanager/Secret.yaml index e1742248fd81..b284fd8d668c 100644 --- a/mmv1/products/secretmanager/Secret.yaml +++ b/mmv1/products/secretmanager/Secret.yaml @@ -42,6 +42,7 @@ iam_policy: custom_code: constants: 'templates/terraform/constants/secret_manager_secret.go.tmpl' pre_update: 'templates/terraform/pre_update/secret_manager_secret.go.tmpl' + pre_delete: 'templates/terraform/pre_delete/secret_manager_secret.go.tmpl' custom_diff: - 'secretManagerSecretAutoCustomizeDiff' examples: @@ -50,6 +51,8 @@ examples: primary_resource_name: 'fmt.Sprintf("secret%s", context["random_suffix"])' vars: secret_id: 'secret' + ignore_read_extra: + - 'deletion_protection' - name: 'secret_with_annotations' primary_resource_id: 'secret-with-annotations' vars: @@ -250,3 +253,11 @@ properties: description: | The Duration between rotation notifications. Must be in seconds and at least 3600s (1h) and at most 3153600000s (100 years). If rotationPeriod is set, `next_rotation_time` must be set. `next_rotation_time` will be advanced by this period when the service automatically sends rotation notifications. +virtual_fields: + - name: 'deletion_protection' + description: | + Whether Terraform will be prevented from destroying the secret. Defaults to false. + When the field is set to true in Terraform state, a `terraform apply` + or `terraform destroy` that would delete the secret will fail. + type: Boolean + default_value: false diff --git a/mmv1/templates/terraform/examples/secret_config_basic.tf.tmpl b/mmv1/templates/terraform/examples/secret_config_basic.tf.tmpl index ed2ffe869280..e209cc4c1424 100644 --- a/mmv1/templates/terraform/examples/secret_config_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/secret_config_basic.tf.tmpl @@ -15,4 +15,5 @@ resource "google_secret_manager_secret" "{{$.PrimaryResourceId}}" { } } } + deletion_protection = false } diff --git a/mmv1/templates/terraform/pre_delete/secret_manager_secret.go.tmpl b/mmv1/templates/terraform/pre_delete/secret_manager_secret.go.tmpl new file mode 100644 index 000000000000..0c588d6cd9eb --- /dev/null +++ b/mmv1/templates/terraform/pre_delete/secret_manager_secret.go.tmpl @@ -0,0 +1,4 @@ +if d.Get("deletion_protection").(bool) { + return fmt.Errorf("cannot destroy secret manager secret without setting deletion_protection=false and running `terraform apply`") +} + diff --git a/mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_test.go b/mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_test.go index 81ce574fe53a..f1132a348f3a 100644 --- a/mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_test.go +++ b/mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_test.go @@ -464,6 +464,33 @@ func TestAccSecretManagerSecret_updateBetweenTtlAndExpireTime(t *testing.T) { }) } +func TestAccSecretManagerSecret_DeletionProtection(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccSecretManagerSecret_deletionprotectionTrue(context), + }, + { + ResourceName: "google_secret_manager_secret.secret-deletionprotection", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels", "deletion_protection"}, + }, + { + Config: testAccSecretManagerSecret_deletionprotectionFalse(context), + }, + }, + }) +} + func testAccSecretManagerSecret_basic(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_secret_manager_secret" "secret-basic" { @@ -1220,3 +1247,57 @@ resource "google_secret_manager_secret" "secret-basic" { } `, context) } + +func testAccSecretManagerSecret_deletionprotectionTrue(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_secret_manager_secret" "secret-deletionprotection" { + secret_id = "tf-test-secret-%{random_suffix}" + + labels = { + label = "my-label" + } + + replication { + user_managed { + replicas { + location = "us-central1" + } + replicas { + location = "us-east1" + } + } + } + + ttl = "3600s" + + deletion_protection = true +} +`, context) +} + +func testAccSecretManagerSecret_deletionprotectionFalse(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_secret_manager_secret" "secret-deletionprotection" { + secret_id = "tf-test-secret-%{random_suffix}" + + labels = { + label = "my-label" + } + + replication { + user_managed { + replicas { + location = "us-central1" + } + replicas { + location = "us-east1" + } + } + } + + ttl = "3600s" + + deletion_protection = false +} +`, context) +} From 94f217fff9768674cb88e00dfc0211fe0f27f5e2 Mon Sep 17 00:00:00 2001 From: Abhijeet Dargude <144316709+dargudear-google@users.noreply.github.com> Date: Wed, 2 Jul 2025 02:56:20 +0530 Subject: [PATCH 470/884] (Beta) support for SM GKE auto rotation (#14362) --- .../resource_container_cluster.go.tmpl | 68 ++++++++++++++-- .../resource_container_cluster_test.go.tmpl | 80 +++++++++++++++++++ .../docs/r/container_cluster.html.markdown | 6 ++ 3 files changed, 149 insertions(+), 5 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl index ec017eccc527..8855d23a5d92 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl @@ -1649,6 +1649,30 @@ func ResourceContainerCluster() *schema.Resource { Required: true, Description: `Enable the Secret manager csi component.`, }, + {{- if ne $.TargetVersionName "ga" }} + "rotation_config" : { + Type: schema.TypeList, + Optional: true, + Computed: true, + MaxItems: 1, + Description: `Configuration for Secret Manager auto rotation.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "enabled": { + Type: schema.TypeBool, + Required: true, + Description: `Enable the Secret manager auto rotation.`, + }, + "rotation_interval": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `The interval between two consecutive rotations. Default rotation interval is 2 minutes`, + }, + }, + }, + }, + {{- end }} }, }, }, @@ -6091,10 +6115,30 @@ func expandSecretManagerConfig(configured interface{}) *container.SecretManagerC } config := l[0].(map[string]interface{}) - return &container.SecretManagerConfig{ + sc := &container.SecretManagerConfig{ Enabled: config["enabled"].(bool), ForceSendFields: []string{"Enabled"}, } + {{- if ne $.TargetVersionName "ga" }} + if autoRotation, ok := config["rotation_config"]; ok { + if autoRotationList, ok := autoRotation.([]interface{}); ok { + if len(autoRotationList) > 0 { + autoRotationConfig := autoRotationList[0].(map[string]interface{}) + if rotationInterval, ok := autoRotationConfig["rotation_interval"].(string); ok && rotationInterval != "" { + sc.RotationConfig = &container.RotationConfig{ + Enabled: autoRotationConfig["enabled"].(bool), + RotationInterval: rotationInterval, + } + } else { + sc.RotationConfig = &container.RotationConfig{ + Enabled: autoRotationConfig["enabled"].(bool), + } + } + } + } + } + {{- end }} + return sc } func expandDefaultMaxPodsConstraint(v interface{}) *container.MaxPodsConstraint { @@ -7103,11 +7147,25 @@ func flattenSecretManagerConfig(c *container.SecretManagerConfig) []map[string]i }, } } - return []map[string]interface{}{ - { - "enabled": c.Enabled, - }, + + result := make(map[string]interface{}) + + result["enabled"] = c.Enabled + + {{- if ne $.TargetVersionName "ga" }} + rotationList := []map[string]interface{}{} + if c.RotationConfig != nil { + rotationConfigMap := map[string]interface{}{ + "enabled": c.RotationConfig.Enabled, + } + if c.RotationConfig.RotationInterval != "" { + rotationConfigMap["rotation_interval"] = c.RotationConfig.RotationInterval + } + rotationList = append(rotationList, rotationConfigMap) } + result["rotation_config"] = rotationList + {{- end }} + return []map[string]interface{}{result} } diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl index 04f2fe0c5f9b..bc032e00b2cc 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl @@ -4190,6 +4190,24 @@ func TestAccContainerCluster_withSecretManagerConfig(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"deletion_protection"}, }, + { + Config: testAccContainerCluster_withSecretManagerRotationPeriodUpdated(pid, clusterName, networkName, subnetworkName), + }, + { + ResourceName: "google_container_cluster.primary", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + { + Config: testAccContainerCluster_withSecretManagerConfigRotationDisabled(pid, clusterName, networkName, subnetworkName), + }, + { + ResourceName: "google_container_cluster.primary", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, { Config: testAccContainerCluster_withSecretManagerConfigUpdated(pid, clusterName, networkName, subnetworkName), }, @@ -11482,6 +11500,68 @@ resource "google_container_cluster" "primary" { initial_node_count = 1 secret_manager_config { enabled = true +{{- if ne $.TargetVersionName "ga" }} + rotation_config { + enabled = true + rotation_interval = "300s" + } +{{- end }} + } + deletion_protection = false + network = "%s" + subnetwork = "%s" + workload_identity_config { + workload_pool = "${data.google_project.project.project_id}.svc.id.goog" + } +} +`, projectID, name, networkName, subnetworkName) +} + +func testAccContainerCluster_withSecretManagerRotationPeriodUpdated(projectID, name, networkName, subnetworkName string) string { + return fmt.Sprintf(` +data "google_project" "project" { + project_id = "%s" +} +resource "google_container_cluster" "primary" { + name = "%s" + location = "us-central1-a" + initial_node_count = 1 + secret_manager_config { + enabled = true +{{- if ne $.TargetVersionName "ga" }} + rotation_config { + enabled = true + rotation_interval = "120s" + } +{{- end }} + } + deletion_protection = false + network = "%s" + subnetwork = "%s" + workload_identity_config { + workload_pool = "${data.google_project.project.project_id}.svc.id.goog" + } +} +`, projectID, name, networkName, subnetworkName) +} + +func testAccContainerCluster_withSecretManagerConfigRotationDisabled(projectID, name, networkName, subnetworkName string) string { + return fmt.Sprintf(` +data "google_project" "project" { + project_id = "%s" +} +resource "google_container_cluster" "primary" { + name = "%s" + location = "us-central1-a" + initial_node_count = 1 + secret_manager_config { + enabled = true +{{- if ne $.TargetVersionName "ga" }} + rotation_config { + enabled = false + rotation_interval = "120s" + } +{{- end }} } deletion_protection = false network = "%s" diff --git a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown index 7fd4edca2e97..bbfaeb989788 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown @@ -1239,6 +1239,12 @@ notification_config { The `secret_manager_config` block supports: * `enabled` (Required) - Enable the Secret Manager add-on for this cluster. +* `rotation_config` (Optional, Beta) - config for secret manager auto rotation. Structure is [docuemented below](#rotation_config) + +The `rotation_config` block supports: + +* `enabled` (Optional) - Enable the roation in Secret Manager add-on for this cluster. +* `rotation_interval` (Optional) - The interval between two consecutive rotations. Default rotation interval is 2 minutes. The `user_managed_keys_config` block supports: From 783217f8939d1f8604d7ba84bf744c7b7f51455c Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Tue, 1 Jul 2025 14:41:26 -0700 Subject: [PATCH 471/884] Improve web doc field description order (#14422) --- .../terraform/resource.html.markdown.tmpl | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/mmv1/templates/terraform/resource.html.markdown.tmpl b/mmv1/templates/terraform/resource.html.markdown.tmpl index add27d89046c..ae1026a4b46f 100644 --- a/mmv1/templates/terraform/resource.html.markdown.tmpl +++ b/mmv1/templates/terraform/resource.html.markdown.tmpl @@ -93,14 +93,6 @@ The following arguments are supported: {{- end }} {{- end }} {{ "" }} -{{- range $p := $.AllUserProperties }} - {{- if $p.Required }} -{{- trimTemplate "nested_property_documentation.html.markdown.tmpl" $p -}} - {{- end}} -{{- end }} -- - - -{{ "" }} -{{ "" }} {{- range $p := $.RootProperties }} {{- if and (not $p.Required) (not $p.Output) (not $p.WriteOnly) }} {{- trimTemplate "property_documentation.html.markdown.tmpl" $p -}} @@ -120,6 +112,12 @@ The following arguments are supported: {{- if $.Docs.OptionalProperties }} {{ $.Docs.OptionalProperties }} {{- end }} +{{ "" }} +{{- range $p := $.AllUserProperties }} + {{- if $p.Required }} +{{- trimTemplate "nested_property_documentation.html.markdown.tmpl" $p -}} + {{- end}} +{{- end }} {{- range $p := $.AllUserProperties }} {{- if and (not $p.Required) (not $p.Output) }} {{- trimTemplate "nested_property_documentation.html.markdown.tmpl" $p -}} From 71e18534c08a137f28caacf92e452a011ff1bcac Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Wed, 2 Jul 2025 08:16:06 -0700 Subject: [PATCH 472/884] tgc-revival: support google_compute_backend_bucket (#14420) Co-authored-by: Thomas Rodgers --- mmv1/api/resource/examples.go | 6 ++ mmv1/products/compute/BackendBucket.yaml | 2 + .../templates/tgc_next/test/test_file.go.tmpl | 5 ++ .../tfplan2cai/resource_converter.go.tmpl | 3 + .../tgc_next/test/assert_test_files.go | 88 +++++++++++++++++-- 5 files changed, 98 insertions(+), 6 deletions(-) diff --git a/mmv1/api/resource/examples.go b/mmv1/api/resource/examples.go index 884effd882e6..344985a146fc 100644 --- a/mmv1/api/resource/examples.go +++ b/mmv1/api/resource/examples.go @@ -183,6 +183,12 @@ type Examples struct { // These properties are present in Terraform resources schema, but not in CAI assets. // Virtual Fields and url parameters are already ignored by default and do not need to be duplicated here. TGCTestIgnoreExtra []string `yaml:"tgc_test_ignore_extra,omitempty"` + // The properties ignored in CAI assets. It is rarely used and only used + // when the nested field has sent_empty_value: true. + // But its parent field is C + O and not specified in raw_config. + // Example: ['RESOURCE.cdnPolicy.signedUrlCacheMaxAgeSec']. + // "RESOURCE" means that the property is for resource data in CAI asset. + TGCTestIgnoreInAsset []string `yaml:"tgc_test_ignore_in_asset,omitempty"` } // Set default value for fields diff --git a/mmv1/products/compute/BackendBucket.yaml b/mmv1/products/compute/BackendBucket.yaml index 4f0601c6898c..04f155ebd773 100644 --- a/mmv1/products/compute/BackendBucket.yaml +++ b/mmv1/products/compute/BackendBucket.yaml @@ -41,6 +41,7 @@ async: result: resource_inside_response: false collection_url_key: 'items' +include_in_tgc_next_DO_NOT_USE: true iam_policy: parent_resource_attribute: 'name' example_config_body: 'templates/terraform/iam/iam_attributes.go.tmpl' @@ -59,6 +60,7 @@ examples: vars: backend_bucket_name: 'image-backend-bucket' bucket_name: 'image-store-bucket' + tgc_test_ignore_in_asset: ['RESOURCE.cdnPolicy.signedUrlCacheMaxAgeSec'] - name: 'backend_bucket_full' primary_resource_id: 'image_backend_full' vars: diff --git a/mmv1/templates/tgc_next/test/test_file.go.tmpl b/mmv1/templates/tgc_next/test/test_file.go.tmpl index e9a08b298434..f6b86c09be08 100644 --- a/mmv1/templates/tgc_next/test/test_file.go.tmpl +++ b/mmv1/templates/tgc_next/test/test_file.go.tmpl @@ -29,6 +29,11 @@ func TestAcc{{ $e.TestSlug $.ProductMetadata.Name $.Name }}(t *testing.T) { []string{ {{- range $field := $.TGCTestIgnorePropertiesToStrings $e }} "{{ $field }}", +{{- end }} + }, + []string{ +{{- range $field := $e.TGCTestIgnoreInAsset }} + "{{ $field }}", {{- end }} }, ) diff --git a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl index 4e0f3ec528d7..21ccc46db20f 100644 --- a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl +++ b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl @@ -102,6 +102,9 @@ func Get{{ $.ResourceName -}}CaiAssets(d tpgresource.TerraformResourceData, conf } if obj, err := Get{{ $.ResourceName -}}CaiObject(d, config); err == nil { location, _ := tpgresource.GetLocation(d, config) + if location == "" && strings.Contains(name, "/global/") { + location = "global" + } return []caiasset.Asset{{"{{"}} Name: name, Type: {{ $.ApiResourceType -}}AssetType, diff --git a/mmv1/third_party/tgc_next/test/assert_test_files.go b/mmv1/third_party/tgc_next/test/assert_test_files.go index f2e81cda2928..3b7e12a95201 100644 --- a/mmv1/third_party/tgc_next/test/assert_test_files.go +++ b/mmv1/third_party/tgc_next/test/assert_test_files.go @@ -29,7 +29,7 @@ var ( tmpDir = os.TempDir() ) -func BidirectionalConversion(t *testing.T, ignoredFields []string) { +func BidirectionalConversion(t *testing.T, ignoredFields []string, ignoredAssetFields []string) { resourceTestData, primaryResource, err := prepareTestData(t.Name()) if err != nil { t.Fatal("Error preparing the input data:", err) @@ -52,13 +52,13 @@ func BidirectionalConversion(t *testing.T, ignoredFields []string) { // Otherwise, test all of the resources in the test. if primaryResource != "" { t.Logf("Test for the primary resource %s begins.", primaryResource) - err = testSingleResource(t, t.Name(), resourceTestData[primaryResource], tfDir, ignoredFields, logger, true) + err = testSingleResource(t, t.Name(), resourceTestData[primaryResource], tfDir, ignoredFields, ignoredAssetFields, logger, true) if err != nil { t.Fatal("Test fails:", err) } } else { for _, testData := range resourceTestData { - err = testSingleResource(t, t.Name(), testData, tfDir, ignoredFields, logger, false) + err = testSingleResource(t, t.Name(), testData, tfDir, ignoredFields, ignoredAssetFields, logger, false) if err != nil { t.Fatal("Test fails: ", err) } @@ -67,7 +67,7 @@ func BidirectionalConversion(t *testing.T, ignoredFields []string) { } // Tests a single resource -func testSingleResource(t *testing.T, testName string, testData ResourceTestData, tfDir string, ignoredFields []string, logger *zap.Logger, primaryResource bool) error { +func testSingleResource(t *testing.T, testName string, testData ResourceTestData, tfDir string, ignoredFields []string, ignoredAssetFields []string, logger *zap.Logger, primaryResource bool) error { resourceType := testData.ResourceType var tfplan2caiSupported, cai2hclSupported bool if _, tfplan2caiSupported = tfplan2caiconverters.ConverterMap[resourceType]; !tfplan2caiSupported { @@ -156,7 +156,7 @@ func testSingleResource(t *testing.T, testName string, testData ResourceTestData // Convert the export config to roundtrip assets and then convert the roundtrip assets back to roundtrip config ancestryCache := getAncestryCache(assets) - roundtripAssets, roundtripConfigData, err := getRoundtripConfig(t, testName, tfDir, ancestryCache, logger) + roundtripAssets, roundtripConfigData, err := getRoundtripConfig(t, testName, tfDir, ancestryCache, logger, ignoredAssetFields) if err != nil { return fmt.Errorf("error when converting the round-trip config: %#v", err) } @@ -286,7 +286,7 @@ func compareHCLFields(map1, map2 map[string]interface{}, path string, ignoredFie } // Converts a tfplan to CAI asset, and then converts the CAI asset into HCL -func getRoundtripConfig(t *testing.T, testName string, tfDir string, ancestryCache map[string]string, logger *zap.Logger) ([]caiasset.Asset, []byte, error) { +func getRoundtripConfig(t *testing.T, testName string, tfDir string, ancestryCache map[string]string, logger *zap.Logger, ignoredAssetFields []string) ([]caiasset.Asset, []byte, error) { fileName := fmt.Sprintf("%s_export", testName) // Run terraform init and terraform apply to generate tfplan.json files @@ -314,6 +314,8 @@ func getRoundtripConfig(t *testing.T, testName string, tfDir string, ancestryCac return nil, nil, err } + deleteFieldsFromAssets(roundtripAssets, ignoredAssetFields) + // Uncomment these lines when debugging issues locally // roundtripAssetFile := fmt.Sprintf("%s_roundtrip.json", t.Name()) // writeJSONFile(roundtripAssetFile, roundtripAssets) @@ -328,6 +330,80 @@ func getRoundtripConfig(t *testing.T, testName string, tfDir string, ancestryCac return roundtripAssets, roundtripConfig, nil } +// Example: +// +// data := map[string]interface{}{ +// "database": map[string]interface{}{ +// "host": "localhost", +// "user": "admin", +// }, +// } +// +// Path of "host" in "data" is ["database", "host"] +type Field struct { + Path []string +} + +// Deletes fields from the resource data of CAI assets +func deleteFieldsFromAssets(assets []caiasset.Asset, ignoredResourceDataFields []string) []caiasset.Asset { + // The key is the content type, such as "resource" + ignoredFieldsMap := make(map[string][]Field, 0) + for _, ignoredField := range ignoredResourceDataFields { + parts := strings.Split(ignoredField, ".") + if len(parts) <= 1 { + continue + } + if parts[0] == "RESOURCE" { + if _, ok := ignoredFieldsMap["RESOURCE"]; !ok { + ignoredFieldsMap["RESOURCE"] = make([]Field, 0) + } + f := Field{Path: parts[1:]} + ignoredFieldsMap["RESOURCE"] = append(ignoredFieldsMap["RESOURCE"], f) + } + } + + for _, asset := range assets { + if asset.Resource != nil && asset.Resource.Data != nil { + data := asset.Resource.Data + for _, ignoredField := range ignoredFieldsMap["RESOURCE"] { + path := ignoredField.Path + deleteMapFieldByPath(data, path) + } + } + } + return assets +} + +// Deletes a field from a map by its path. +// Example: +// +// data := map[string]interface{}{ +// "database": map[string]interface{}{ +// "host": "localhost", +// "user": "admin", +// }, +// } +// +// path := ["database", "host"] +func deleteMapFieldByPath(data map[string]interface{}, path []string) { + i := 0 + for i < len(path)-1 { + k := path[i] + if v, ok := data[k]; ok { + if data, ok = v.(map[string]interface{}); ok && data != nil { + i++ + } else { + break + } + } else { + break + } + } + if i == len(path)-1 { + delete(data, path[i]) + } +} + // Compares the asset name in export asset and roundtrip asset and ignores "null" in the name // Example: //cloudresourcemanager.googleapis.com/projects/123456 func compareAssetName(want, got string) error { From f5a65439a262529af647c82f5db7a29f0797f33f Mon Sep 17 00:00:00 2001 From: Veronika Herasymenko Date: Wed, 2 Jul 2025 17:51:49 +0200 Subject: [PATCH 473/884] Add resource_manager_tags support to Route api (#14395) Co-authored-by: Scott Suarez --- mmv1/products/compute/Route.yaml | 17 +++++++ .../compute/resource_compute_route_test.go | 50 +++++++++++++++++++ 2 files changed, 67 insertions(+) diff --git a/mmv1/products/compute/Route.yaml b/mmv1/products/compute/Route.yaml index 02cb782b4671..e8164df51c12 100644 --- a/mmv1/products/compute/Route.yaml +++ b/mmv1/products/compute/Route.yaml @@ -366,3 +366,20 @@ properties: - 'ACTIVE' for an active route - 'INACTIVE' for an inactive route output: true + - name: 'params' + type: NestedObject + ignore_read: true + immutable: true + description: | + Additional params passed with the request, but not persisted as part of resource payload + properties: + - name: 'resourceManagerTags' + type: KeyValuePairs + description: | + Resource manager tags to be bound to the route. Tag keys and values have the + same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, + and values are in the format tagValues/456. The field is ignored when empty. + The field is immutable and causes resource replacement when mutated. This field is only + set at create time and modifying this field after creation will trigger recreation. + To apply tags to an existing resource, see the google_tags_tag_binding resource. + ignore_read: true diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_route_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_route_test.go index 2d69d740b63f..7edb2cf77fd1 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_route_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_route_test.go @@ -6,6 +6,7 @@ import ( "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" ) func TestAccComputeRoute_defaultInternetGateway(t *testing.T) { @@ -49,6 +50,55 @@ func TestAccComputeRoute_hopInstance(t *testing.T) { }) } +func TestAccComputeRoute_resourceManagerTags(t *testing.T) { + + org := envvar.GetTestOrgFromEnv(t) + + routeName := fmt.Sprintf("tf-test-route-resource-manager-tags-%s", acctest.RandString(t, 10)) + tagKeyResult := acctest.BootstrapSharedTestTagKeyDetails(t, "crm-nroute-tagkey", "organizations/"+org, make(map[string]interface{})) + sharedTagkey, _ := tagKeyResult["shared_tag_key"] + tagValueResult := acctest.BootstrapSharedTestTagValueDetails(t, "crm-route-tagvalue", sharedTagkey, org) + context := map[string]interface{}{ + "route_name": routeName, + "tag_key_id": tagKeyResult["name"], + "tag_value_id": tagValueResult["name"], + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeRouteDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeRoute_resourceManagerTags(context), + }, + { + ResourceName: "google_compute_route.acc_route_with_resource_manager_tags", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"params"}, // we don't read tags back. The whole params block is input only + }, + }, + }) +} + +func testAccComputeRoute_resourceManagerTags(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_route" "acc_route_with_resource_manager_tags" { + name = "%{route_name}" + dest_range = "0.0.0.0/0" + network = "default" + next_hop_gateway = "default-internet-gateway" + priority = 100 + params { + resource_manager_tags = { + "%{tag_key_id}" = "%{tag_value_id}" + } + } +} +`, context) +} + func testAccComputeRoute_defaultInternetGateway(suffix string) string { return fmt.Sprintf(` resource "google_compute_route" "foobar" { From 4a118c98de29826f4b9e07199807af58c405fc2f Mon Sep 17 00:00:00 2001 From: Iris Chen <10179943+iyabchen@users.noreply.github.com> Date: Wed, 2 Jul 2025 09:23:21 -0700 Subject: [PATCH 474/884] chore(ci): update frontmatter check to only cover website/docs folder (#14360) --- .ci/magician/cmd/generate_comment.go | 7 ++--- .ci/magician/cmd/generate_comment_test.go | 36 +++++++++++++++-------- 2 files changed, 26 insertions(+), 17 deletions(-) diff --git a/.ci/magician/cmd/generate_comment.go b/.ci/magician/cmd/generate_comment.go index ccdbd8bc28e9..5504e66aeeb0 100644 --- a/.ci/magician/cmd/generate_comment.go +++ b/.ci/magician/cmd/generate_comment.go @@ -685,6 +685,9 @@ func init() { func checkDocumentFrontmatter(repo source.Repo) []string { var errs []string for _, f := range repo.ChangedFiles { + if !strings.HasPrefix(f, "website/docs/r/") && !strings.HasPrefix(f, "website/docs/d/") { + continue + } if !strings.HasSuffix(f, ".markdown") { continue } @@ -717,10 +720,6 @@ func checkDocumentFrontmatter(repo source.Repo) []string { } if err := data.Decode(&metadata); err != nil { errs = append(errs, fmt.Sprintf("Failed to decode frontmatter in file %s. This is usually due to an incorrect structure in the frontmatter.", f)) - continue - } - if metadata.Subcategory == "" { - errs = append(errs, fmt.Sprintf("Failed to detect subcategory in the frontmatter in file %s.", f)) } } return errs diff --git a/.ci/magician/cmd/generate_comment_test.go b/.ci/magician/cmd/generate_comment_test.go index f73683709ef8..bd51b87c60ff 100644 --- a/.ci/magician/cmd/generate_comment_test.go +++ b/.ci/magician/cmd/generate_comment_test.go @@ -570,44 +570,54 @@ random: Example Subcategory --- `, } + + folderPath := filepath.Join(tmpDir, "website", "docs", "r") + if err := os.MkdirAll(folderPath, 0755); err != nil { + t.Fatal(err) + } for name, content := range files { - fullPath := filepath.Join(tmpDir, name) + fullPath := filepath.Join(folderPath, name) err := os.WriteFile(fullPath, []byte(content), 0644) if err != nil { t.Fatalf("Failed to create file %s: %v", name, err) } } + // write a file in other folders + if err := os.WriteFile(filepath.Join(tmpDir, "abc.md"), []byte("random"), 0644); err != nil { + t.Fatalf("Failed to create file %s: %v", filepath.Join(tmpDir, "abc.md"), err) + } + tests := []struct { name string changedFiles []string wantErr bool }{ { - name: "No changed markdown files", - changedFiles: []string{"abc.txt"}, + name: "not in relevant doc folder", + changedFiles: []string{"abc.md"}, + wantErr: false, + }, + { + name: "not markdown files", + changedFiles: []string{"website/docs/r/abc.txt"}, wantErr: false, }, { name: "malformed markdown", - changedFiles: []string{"malformed.markdown"}, + changedFiles: []string{"website/docs/r/malformed.markdown"}, wantErr: true, }, { - name: "not exist markdown", - changedFiles: []string{"abc.markdown"}, + name: "markdown not exist", + changedFiles: []string{"website/docs/d/sample.markdown"}, wantErr: true, }, { - name: "Changed files with no frontmatter", - changedFiles: []string{"sample.markdown"}, + name: "correct format", + changedFiles: []string{"website/docs/r/sample.markdown"}, wantErr: false, }, - { - name: "Missing subcategory in frontmatter", - changedFiles: []string{"missingsubcategory.markdown"}, - wantErr: true, - }, } for _, tc := range tests { From 1f319aa0cd1399380d668c77d72b19b317191812 Mon Sep 17 00:00:00 2001 From: Nikhil Bhoyar Date: Wed, 2 Jul 2025 21:55:50 +0530 Subject: [PATCH 475/884] Promote autokey resources to GA (#14202) --- mmv1/products/kms/AutokeyConfig.yaml | 5 +---- mmv1/products/kms/KeyHandle.yaml | 6 +----- .../acctest/bootstrap_test_utils.go.tmpl | 16 ---------------- .../provider/provider_mmv1_resources.go.tmpl | 2 -- ...=> data_source_google_kms_auotokey_config.go} | 4 ---- ...ta_source_google_kms_auotokey_config_test.go} | 3 --- ...tmpl => data_source_google_kms_key_handle.go} | 10 +++------- ...=> data_source_google_kms_key_handle_test.go} | 3 --- ...> data_source_google_kms_key_handles_test.go} | 4 ---- .../docs/d/kms_autokey_config.html.markdown | 4 ---- .../website/docs/d/kms_key_handle.html.markdown | 3 --- .../website/docs/d/kms_key_handles.html.markdown | 3 --- 12 files changed, 5 insertions(+), 58 deletions(-) rename mmv1/third_party/terraform/services/kms/{data_source_google_kms_auotokey_config.go.tmpl => data_source_google_kms_auotokey_config.go} (94%) rename mmv1/third_party/terraform/services/kms/{data_source_google_kms_auotokey_config_test.go.tmpl => data_source_google_kms_auotokey_config_test.go} (95%) rename mmv1/third_party/terraform/services/kms/{data_source_google_kms_key_handle.go.tmpl => data_source_google_kms_key_handle.go} (88%) rename mmv1/third_party/terraform/services/kms/{data_source_google_kms_key_handle_test.go.tmpl => data_source_google_kms_key_handle_test.go} (96%) rename mmv1/third_party/terraform/services/kms/{data_source_google_kms_key_handles_test.go.tmpl => data_source_google_kms_key_handles_test.go} (97%) diff --git a/mmv1/products/kms/AutokeyConfig.yaml b/mmv1/products/kms/AutokeyConfig.yaml index c50d9381d6cc..3f6b5bfdc208 100644 --- a/mmv1/products/kms/AutokeyConfig.yaml +++ b/mmv1/products/kms/AutokeyConfig.yaml @@ -22,7 +22,6 @@ description: | ~> **Note:** AutokeyConfigs cannot be deleted from Google Cloud Platform. Destroying a Terraform-managed AutokeyConfig will remove it from state but *will not delete the resource from the project.* -min_version: 'beta' references: guides: 'Cloud KMS with Autokey': 'https://cloud.google.com/kms/docs/kms-with-autokey' @@ -59,6 +58,7 @@ exclude_sweeper: true examples: - name: 'kms_autokey_config_all' primary_resource_id: 'example-autokeyconfig' + # test depends upon google_project_service_identity service which is still in beta, so we need to keep test limited to beta min_version: 'beta' vars: folder_name: 'my-folder' @@ -72,7 +72,6 @@ parameters: type: String description: | The folder for which to retrieve config. - min_version: 'beta' url_param_only: true required: true immutable: true @@ -84,9 +83,7 @@ properties: The target key project for a given folder where KMS Autokey will provision a CryptoKey for any new KeyHandle the Developer creates. Should have the form `projects/`. - min_version: 'beta' - name: 'etag' type: String description: 'The etag of the AutokeyConfig for optimistic concurrency control.' - min_version: 'beta' output: true diff --git a/mmv1/products/kms/KeyHandle.yaml b/mmv1/products/kms/KeyHandle.yaml index a65e9206c0b9..7d4e968958af 100644 --- a/mmv1/products/kms/KeyHandle.yaml +++ b/mmv1/products/kms/KeyHandle.yaml @@ -21,7 +21,6 @@ description: | ~> **Note:** KeyHandles cannot be deleted from Google Cloud Platform. Destroying a Terraform-managed KeyHandle will remove it from state but *will not delete the resource from the project.* -min_version: 'beta' references: guides: 'Cloud KMS with Autokey': 'https://cloud.google.com/kms/docs/kms-with-autokey' @@ -52,6 +51,7 @@ custom_code: examples: - name: 'kms_key_handle_basic' primary_resource_id: 'example-keyhandle' + # test depends upon google_project_service_identity service which is still in beta, so we need to keep test limited to beta min_version: 'beta' vars: folder_name: 'my-folder' @@ -67,7 +67,6 @@ parameters: description: | The location for the KeyHandle. A full list of valid locations can be found by running `gcloud kms locations list`. - min_version: 'beta' url_param_only: true required: true properties: @@ -75,7 +74,6 @@ properties: type: String description: | The resource name for the KeyHandle. - min_version: 'beta' required: true immutable: true - name: 'kmsKey' @@ -84,13 +82,11 @@ properties: A reference to a Cloud KMS CryptoKey that can be used for CMEK in the requested product/project/location, for example `projects/1/locations/us-east1/keyRings/foo/cryptoKeys/bar-ffffff` - min_version: 'beta' output: true - name: 'resourceTypeSelector' type: String description: | Selector of the resource type where we want to protect resources. For example, `storage.googleapis.com/Bucket`. - min_version: 'beta' required: true immutable: true diff --git a/mmv1/third_party/terraform/acctest/bootstrap_test_utils.go.tmpl b/mmv1/third_party/terraform/acctest/bootstrap_test_utils.go.tmpl index 8814ba24db4f..510fea090811 100644 --- a/mmv1/third_party/terraform/acctest/bootstrap_test_utils.go.tmpl +++ b/mmv1/third_party/terraform/acctest/bootstrap_test_utils.go.tmpl @@ -11,12 +11,9 @@ import ( "time" "net/http" -{{ if ne $.TargetVersionName `ga` -}} - // For beta tests only resourceManagerV3 "google.golang.org/api/cloudresourcemanager/v3" tpgservicusage "github.com/hashicorp/terraform-provider-google/google/services/serviceusage" "github.com/hashicorp/terraform-provider-google/google/services/kms" -{{- end }} "github.com/hashicorp/terraform-provider-google/google/envvar" "github.com/hashicorp/terraform-provider-google/google/services/tags" @@ -41,15 +38,10 @@ import ( var SharedKeyRing = "tftest-shared-keyring-1" -{{ if ne $.TargetVersionName `ga` -}} - var DefaultKeyHandleName = "eed58b7b-20ad-4da8-ad85-ba78a0d5ab87" var DefaultKeyHandleResourceType = "compute.googleapis.com/Disk" var CloudKmsSrviceName = "cloudkms.googleapis.com" -{{- end }} - - var SharedCryptoKey = map[string]string{ "ENCRYPT_DECRYPT": "tftest-shared-key-1", "ASYMMETRIC_SIGN": "tftest-shared-sign-key-1", @@ -91,8 +83,6 @@ func BootstrapKMSKeyWithPurposeInLocation(t *testing.T, purpose, locationID stri return BootstrapKMSKeyWithPurposeInLocationAndName(t, purpose, locationID, SharedCryptoKey[purpose]) } -{{ if ne $.TargetVersionName `ga` -}} - type BootstrappedKMSAutokey struct { *cloudkms.AutokeyConfig *cloudkms.KeyHandle @@ -327,8 +317,6 @@ func setPolicy(crmService *resourceManagerV3.Service, resourceType string, resou return nil } -{{- end }} - func BootstrapKMSKeyWithPurposeInLocationAndName(t *testing.T, purpose, locationID, keyShortName string) BootstrappedKMS { config := BootstrapConfig(t) if config == nil { @@ -883,8 +871,6 @@ func BootstrapServicePerimeterProjects(t *testing.T, desiredProjects int) []*clo return projects } -{{ if ne $.TargetVersionName `ga` -}} - // BootstrapFolder creates or get a folder having a input folderDisplayName within a TestOrgEnv func BootstrapFolder(t *testing.T, folderDisplayName string) *resourceManagerV3.Folder { config := BootstrapConfig(t) @@ -934,8 +920,6 @@ func BootstrapFolder(t *testing.T, folderDisplayName string) *resourceManagerV3. return folder } -{{- end }} - // BootstrapProject will create or get a project named // "" that will persist across test runs, // where projectIDSuffix is based off of getTestProjectFromEnv(). The reason diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index f4156d1e1017..3e292373bf63 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -154,11 +154,9 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_kms_crypto_key_versions": kms.DataSourceGoogleKmsCryptoKeyVersions(), "google_kms_key_ring": kms.DataSourceGoogleKmsKeyRing(), "google_kms_key_rings": kms.DataSourceGoogleKmsKeyRings(), - {{- if ne $.TargetVersionName "ga" }} "google_kms_key_handle": kms.DataSourceGoogleKmsKeyHandle(), "google_kms_autokey_config": kms.DataSourceGoogleKmsAutokeyConfig(), "google_kms_key_handles": kms.DataSourceGoogleKmsKeyHandles(), - {{- end }} "google_kms_secret": kms.DataSourceGoogleKmsSecret(), "google_kms_secret_ciphertext": kms.DataSourceGoogleKmsSecretCiphertext(), {{- if ne $.TargetVersionName "ga" }} diff --git a/mmv1/third_party/terraform/services/kms/data_source_google_kms_auotokey_config.go.tmpl b/mmv1/third_party/terraform/services/kms/data_source_google_kms_auotokey_config.go similarity index 94% rename from mmv1/third_party/terraform/services/kms/data_source_google_kms_auotokey_config.go.tmpl rename to mmv1/third_party/terraform/services/kms/data_source_google_kms_auotokey_config.go index 1a6a79a6c201..b500d313fd5f 100644 --- a/mmv1/third_party/terraform/services/kms/data_source_google_kms_auotokey_config.go.tmpl +++ b/mmv1/third_party/terraform/services/kms/data_source_google_kms_auotokey_config.go @@ -1,7 +1,5 @@ package kms -{{ if ne $.TargetVersionName `ga` -}} - import ( "fmt" @@ -36,5 +34,3 @@ func dataSourceGoogleKmsAutokeyConfigRead(d *schema.ResourceData, meta interface } return nil } - -{{ end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/kms/data_source_google_kms_auotokey_config_test.go.tmpl b/mmv1/third_party/terraform/services/kms/data_source_google_kms_auotokey_config_test.go similarity index 95% rename from mmv1/third_party/terraform/services/kms/data_source_google_kms_auotokey_config_test.go.tmpl rename to mmv1/third_party/terraform/services/kms/data_source_google_kms_auotokey_config_test.go index ef770fd0a44d..5875c6953613 100644 --- a/mmv1/third_party/terraform/services/kms/data_source_google_kms_auotokey_config_test.go.tmpl +++ b/mmv1/third_party/terraform/services/kms/data_source_google_kms_auotokey_config_test.go @@ -1,7 +1,5 @@ package kms_test -{{ if ne $.TargetVersionName `ga` -}} - import ( "fmt" "regexp" @@ -36,4 +34,3 @@ data "google_kms_autokey_config" "kms_autokey_config" { } `, folder) } -{{ end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handle.go.tmpl b/mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handle.go similarity index 88% rename from mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handle.go.tmpl rename to mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handle.go index 043d6d5c5039..72b7ff745873 100644 --- a/mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handle.go.tmpl +++ b/mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handle.go @@ -1,7 +1,5 @@ package kms -{{ if ne $.TargetVersionName `ga` -}} - import ( "fmt" @@ -30,9 +28,9 @@ func dataSourceGoogleKmsKeyHandleRead(d *schema.ResourceData, meta interface{}) return err } keyHandleId := KmsKeyHandleId{ - Name: d.Get("name").(string), - Location: d.Get("location").(string), - Project: project, + Name: d.Get("name").(string), + Location: d.Get("location").(string), + Project: project, } id := keyHandleId.KeyHandleId() d.SetId(id) @@ -46,5 +44,3 @@ func dataSourceGoogleKmsKeyHandleRead(d *schema.ResourceData, meta interface{}) } return nil } - -{{ end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handle_test.go.tmpl b/mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handle_test.go similarity index 96% rename from mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handle_test.go.tmpl rename to mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handle_test.go index 1fdeaafe082a..0a4fd012c720 100644 --- a/mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handle_test.go.tmpl +++ b/mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handle_test.go @@ -1,7 +1,5 @@ package kms_test -{{ if ne $.TargetVersionName `ga` -}} - import ( "fmt" "regexp" @@ -41,4 +39,3 @@ data "google_kms_key_handle" "kms_key_handle" { } `, keyHandleName, location, project) } -{{ end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handles_test.go.tmpl b/mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handles_test.go similarity index 97% rename from mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handles_test.go.tmpl rename to mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handles_test.go index 7112a99194c5..a71e40839d14 100644 --- a/mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handles_test.go.tmpl +++ b/mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handles_test.go @@ -2,8 +2,6 @@ // SPDX-License-Identifier: MPL-2.0 package kms_test -{{ if ne $.TargetVersionName `ga` -}} - import ( "errors" "fmt" @@ -72,5 +70,3 @@ data "google_kms_key_handles" "mykeyhandles" { `, project, location, filter) return str } - -{{ end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/website/docs/d/kms_autokey_config.html.markdown b/mmv1/third_party/terraform/website/docs/d/kms_autokey_config.html.markdown index 2654ec3016d5..580e27506fbe 100644 --- a/mmv1/third_party/terraform/website/docs/d/kms_autokey_config.html.markdown +++ b/mmv1/third_party/terraform/website/docs/d/kms_autokey_config.html.markdown @@ -8,10 +8,6 @@ description: |- Provides access to Google Cloud Platform KMS AutokeyConfig. A AutokeyConfig is a Cloud KMS resource that helps you safely span the separation of duties to create new Cloud KMS keys for CMEK using Autokey. -~> **Warning:** This resource is in beta, and should be used with the terraform-provider-google-beta provider. -See [Provider Versions](https://terraform.io/docs/providers/google/guides/provider_versions.html) for more details on beta resources. - - For more information see [the official documentation](https://cloud.google.com/kms/docs/reference/rest/v1/folders) and diff --git a/mmv1/third_party/terraform/website/docs/d/kms_key_handle.html.markdown b/mmv1/third_party/terraform/website/docs/d/kms_key_handle.html.markdown index 4f65356a27fe..78149971ee2a 100644 --- a/mmv1/third_party/terraform/website/docs/d/kms_key_handle.html.markdown +++ b/mmv1/third_party/terraform/website/docs/d/kms_key_handle.html.markdown @@ -8,9 +8,6 @@ description: |- Provides access to Google Cloud Platform KMS KeyHandle. A key handle is a Cloud KMS resource that helps you safely span the separation of duties to create new Cloud KMS keys for CMEK using Autokey. -~> **Warning:** This resource is in beta, and should be used with the terraform-provider-google-beta provider. -See [Provider Versions](https://terraform.io/docs/providers/google/guides/provider_versions.html) for more details on beta resources. - For more information see [the official documentation](https://cloud.google.com/kms/docs/resource-hierarchy#key_handles) and diff --git a/mmv1/third_party/terraform/website/docs/d/kms_key_handles.html.markdown b/mmv1/third_party/terraform/website/docs/d/kms_key_handles.html.markdown index 7cc9ac8ba4f5..93da252f63bd 100644 --- a/mmv1/third_party/terraform/website/docs/d/kms_key_handles.html.markdown +++ b/mmv1/third_party/terraform/website/docs/d/kms_key_handles.html.markdown @@ -8,9 +8,6 @@ description: |- Provides access to Google Cloud Platform KMS KeyHandle. A key handle is a Cloud KMS resource that helps you safely span the separation of duties to create new Cloud KMS keys for CMEK using Autokey. -~> **Warning:** This resource is in beta, and should be used with the terraform-provider-google-beta provider. -See [Provider Versions](https://terraform.io/docs/providers/google/guides/provider_versions.html) for more details on beta resources. - For more information see [the official documentation](https://cloud.google.com/kms/docs/resource-hierarchy#key_handles) and From e91684b90d0699e17b5928c271e951f8199e9a79 Mon Sep 17 00:00:00 2001 From: hao-nan-li <100219545+hao-nan-li@users.noreply.github.com> Date: Wed, 2 Jul 2025 09:27:23 -0700 Subject: [PATCH 476/884] TGC - Add new resource ExternalVpnGateway (#14437) --- mmv1/products/compute/ExternalVpnGateway.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/mmv1/products/compute/ExternalVpnGateway.yaml b/mmv1/products/compute/ExternalVpnGateway.yaml index 373bc1d5e912..37b2a89a4210 100644 --- a/mmv1/products/compute/ExternalVpnGateway.yaml +++ b/mmv1/products/compute/ExternalVpnGateway.yaml @@ -35,6 +35,7 @@ async: result: resource_inside_response: false collection_url_key: 'items' +include_in_tgc_next_DO_NOT_USE: true custom_code: examples: - name: 'external_vpn_gateway' From fec17bc61ffc05a533ba9571fed6e6f023300c1c Mon Sep 17 00:00:00 2001 From: Iris Chen <10179943+iyabchen@users.noreply.github.com> Date: Wed, 2 Jul 2025 09:48:05 -0700 Subject: [PATCH 477/884] Refactor versionguard to wrap it with cobra library (#14297) --- .github/workflows/unit-test-mmv1.yml | 7 ++- tools/template-check/cmd/root.go | 43 ++++++++++++++++ tools/template-check/cmd/versionguard.go | 61 +++++++++++++++++++++++ tools/template-check/go.mod | 15 +++++- tools/template-check/go.sum | 15 ++++++ tools/template-check/main.go | 63 +----------------------- 6 files changed, 141 insertions(+), 63 deletions(-) create mode 100644 tools/template-check/cmd/root.go create mode 100644 tools/template-check/cmd/versionguard.go create mode 100644 tools/template-check/go.sum diff --git a/.github/workflows/unit-test-mmv1.yml b/.github/workflows/unit-test-mmv1.yml index 4fa97fcccb3f..5b26e7397436 100644 --- a/.github/workflows/unit-test-mmv1.yml +++ b/.github/workflows/unit-test-mmv1.yml @@ -36,7 +36,12 @@ jobs: - name: Check for invalid version guards run: | cd repo/tools/template-check - git diff --name-only --diff-filter=d origin/${{ github.base_ref }} ../../*.tmpl | sed 's=^=../../=g' | go run main.go + tmpls=$(git diff --name-only --diff-filter=d origin/${{ github.base_ref }} ../../*.tmpl | sed 's=^=../../=g') + tmpls=${tmpls//$'\n'/,} + echo $tmpls + if [[ -n "$tmpls" ]]; then + go run main.go version-guard --file-list $tmpls + fi lint-yaml: runs-on: ubuntu-22.04 steps: diff --git a/tools/template-check/cmd/root.go b/tools/template-check/cmd/root.go new file mode 100644 index 000000000000..90b71bdc71c5 --- /dev/null +++ b/tools/template-check/cmd/root.go @@ -0,0 +1,43 @@ +package cmd + +import ( + "fmt" + "os" + + "github.com/spf13/cobra" +) + +const rootCmdDesc = "Utilities for template checks." + +type rootOptions struct { +} + +func newRootCmd() (*cobra.Command, *rootOptions, error) { + o := &rootOptions{} + cmd := &cobra.Command{ + Use: "template-check", + Short: rootCmdDesc, + Long: rootCmdDesc, + SilenceUsage: true, + SilenceErrors: true, + } + cmd.AddCommand(newversionGuardCmd(o)) + return cmd, o, nil +} + +// Execute is the entry-point for all commands. +// This lets us keep all new command functions private. +func Execute() { + rootCmd, _, err := newRootCmd() + if err != nil { + fmt.Printf("Error creating root logger: %s", err) + os.Exit(1) + } + err = rootCmd.Execute() + if err == nil { + os.Exit(0) + } else { + fmt.Println(err.Error()) + os.Exit(1) + } +} diff --git a/tools/template-check/cmd/versionguard.go b/tools/template-check/cmd/versionguard.go new file mode 100644 index 000000000000..f6885e3beefc --- /dev/null +++ b/tools/template-check/cmd/versionguard.go @@ -0,0 +1,61 @@ +package cmd + +import ( + "fmt" + + "io" + "os" + + "github.com/GoogleCloudPlatform/magic-modules/tools/template-check/gotemplate" + "github.com/spf13/cobra" +) + +const versionGuardDesc = `Check the files for version guards` + +type versionGuardOptions struct { + rootOptions *rootOptions + stdout io.Writer + fileList []string +} + +func newversionGuardCmd(rootOptions *rootOptions) *cobra.Command { + o := &versionGuardOptions{ + rootOptions: rootOptions, + stdout: os.Stdout, + } + command := &cobra.Command{ + Use: "version-guard", + Short: versionGuardDesc, + Long: versionGuardDesc, + RunE: func(c *cobra.Command, args []string) error { + return o.run() + }, + } + + command.Flags().StringSliceVar(&o.fileList, "file-list", []string{}, "file list to check") + return command + +} +func (o *versionGuardOptions) run() error { + if len(o.fileList) == 0 { + return nil + } + foundInvalidGuards := false + for _, fileName := range o.fileList { + results, err := gotemplate.CheckVersionGuardsForFile(fileName) + if err != nil { + return err + } + if len(results) > 0 { + fmt.Fprintf(os.Stderr, "%s:\n", fileName) + foundInvalidGuards = true + for _, result := range results { + fmt.Fprintf(os.Stderr, " %s\n", result) + } + } + } + if foundInvalidGuards { + return fmt.Errorf("found invalid version guards") + } + return nil +} diff --git a/tools/template-check/go.mod b/tools/template-check/go.mod index 9aac9498761d..a52e908e7994 100644 --- a/tools/template-check/go.mod +++ b/tools/template-check/go.mod @@ -1,3 +1,16 @@ module github.com/GoogleCloudPlatform/magic-modules/tools/template-check -go 1.23 +go 1.23.0 + +toolchain go1.23.1 + +require ( + github.com/google/go-cmp v0.7.0 + github.com/spf13/cobra v1.9.1 + gopkg.in/yaml.v2 v2.4.0 +) + +require ( + github.com/inconshreveable/mousetrap v1.1.0 // indirect + github.com/spf13/pflag v1.0.6 // indirect +) diff --git a/tools/template-check/go.sum b/tools/template-check/go.sum new file mode 100644 index 000000000000..69e02c70c16e --- /dev/null +++ b/tools/template-check/go.sum @@ -0,0 +1,15 @@ +github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= +github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo= +github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0= +github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o= +github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/tools/template-check/main.go b/tools/template-check/main.go index ac44d3b3a30a..f785150aed3d 100644 --- a/tools/template-check/main.go +++ b/tools/template-check/main.go @@ -1,66 +1,7 @@ package main -import ( - "bufio" - "flag" - "fmt" - "os" - - "github.com/GoogleCloudPlatform/magic-modules/tools/template-check/gotemplate" -) - -func isValidTemplate(filename string) (bool, error) { - results, err := gotemplate.CheckVersionGuardsForFile(filename) - if err != nil { - return false, err - } - - if len(results) > 0 { - fmt.Fprintf(os.Stderr, "error: invalid version checks found in %s:\n", filename) - for _, result := range results { - fmt.Fprintf(os.Stderr, " %s\n", result) - } - return false, nil - } - - return true, nil -} - -func checkTemplate(filename string) bool { - valid, err := isValidTemplate(filename) - if err != nil { - fmt.Fprintln(os.Stderr, err.Error()) - return false - } - return valid -} +import "github.com/GoogleCloudPlatform/magic-modules/tools/template-check/cmd" func main() { - flag.Usage = func() { - fmt.Fprintf(flag.CommandLine.Output(), "template-check - check that a template file is valid\n template-check [file]\n") - } - - flag.Parse() - - // Handle file as a positional argument - if flag.Arg(0) != "" { - if !checkTemplate(flag.Arg(0)) { - os.Exit(1) - } - os.Exit(0) - } - - // Handle files coming from a linux pipe - fileInfo, _ := os.Stdin.Stat() - if fileInfo.Mode()&os.ModeCharDevice == 0 { - exitStatus := 0 - scanner := bufio.NewScanner(bufio.NewReader(os.Stdin)) - for scanner.Scan() { - if !checkTemplate(scanner.Text()) { - exitStatus = 1 - } - } - - os.Exit(exitStatus) - } + cmd.Execute() } From b25580c8d072f13dadc9f0ce24994daecf2d19e5 Mon Sep 17 00:00:00 2001 From: dave-garred <117380993+dave-garred@users.noreply.github.com> Date: Wed, 2 Jul 2025 09:57:57 -0700 Subject: [PATCH 478/884] Add support for GKE anonymous authentication config (#14388) --- .../resource_container_cluster.go.tmpl | 69 +++++++++++++++++++ .../resource_container_cluster_test.go.tmpl | 56 +++++++++++++++ .../docs/r/container_cluster.html.markdown | 7 ++ 3 files changed, 132 insertions(+) diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl index 8855d23a5d92..97550b1d5f17 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl @@ -2507,6 +2507,26 @@ func ResourceContainerCluster() *schema.Resource { }, }, }, + "anonymous_authentication_config": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Computed: true, + Description: `AnonymousAuthenticationConfig allows users to restrict or enable anonymous access to the cluster.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "mode": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{"ENABLED", "LIMITED"}, false), + Description: `Setting this to LIMITED will restrict authentication of anonymous users to health check endpoints only. + Accepted values are: +* ENABLED: Authentication of anonymous users is enabled for all endpoints. +* LIMITED: Anonymous access is only allowed for health check endpoints.`, + }, + }, + }, + }, }, } } @@ -2836,6 +2856,10 @@ func resourceContainerClusterCreate(d *schema.ResourceData, meta interface{}) er cluster.EnterpriseConfig = expandEnterpriseConfig(v) } + if v, ok := d.GetOk("anonymous_authentication_config"); ok { + cluster.AnonymousAuthenticationConfig = expandAnonymousAuthenticationConfig(v) + } + needUpdateAfterCreate := false // For now PSC based cluster don't support `enable_private_endpoint` on `create`, but only on `update` API call. @@ -3416,6 +3440,10 @@ func resourceContainerClusterRead(d *schema.ResourceData, meta interface{}) erro return err } + if err := d.Set("anonymous_authentication_config", flattenAnonymousAuthenticationConfig(cluster.AnonymousAuthenticationConfig)); err != nil { + return err + } + return nil } @@ -4936,6 +4964,21 @@ func resourceContainerClusterUpdate(d *schema.ResourceData, meta interface{}) er log.Printf("[INFO] GKE cluster %s Enterprise Config has been updated to %#v", d.Id(), req.Update.DesiredSecurityPostureConfig) } + if d.HasChange("anonymous_authentication_config") { + req := &container.UpdateClusterRequest{ + Update: &container.ClusterUpdate{ + DesiredAnonymousAuthenticationConfig: expandAnonymousAuthenticationConfig( + d.Get("anonymous_authentication_config"), + ), + }, + } + updateF := updateFunc(req, "updating anonymous authentication config") + // Call update serially. + if err := transport_tpg.LockedCall(lockKey, updateF); err != nil { + return err + } + } + d.Partial(false) {{ if ne $.TargetVersionName `ga` -}} @@ -5701,6 +5744,15 @@ func flattenEnterpriseConfig(ec *container.EnterpriseConfig) []map[string]interf return []map[string]interface{}{result} } +func flattenAnonymousAuthenticationConfig(aac *container.AnonymousAuthenticationConfig) []map[string]interface{} { + if aac == nil { + return nil + } + result := make(map[string]interface{}) + result["mode"] = aac.Mode + return []map[string]interface{}{result} +} + func flattenAdditionalPodRangesConfig(ipAllocationPolicy *container.IPAllocationPolicy) []map[string]interface{} { if ipAllocationPolicy == nil { return nil @@ -5828,6 +5880,23 @@ func expandMasterAuthorizedNetworksConfig(d *schema.ResourceData) *container.Mas return result } +func expandAnonymousAuthenticationConfig(configured interface{}) *container.AnonymousAuthenticationConfig { + l, ok := configured.([]interface{}) + if len(l) == 0 || l[0] == nil || !ok { + return nil + } + + anonAuthConfig := l[0].(map[string]interface{}) + result := container.AnonymousAuthenticationConfig{} + + if v, ok := anonAuthConfig["mode"]; ok { + if mode, ok := v.(string); ok && mode != "" { + result.Mode = mode + } + } + return &result +} + func expandManCidrBlocks(configured interface{}) []*container.CidrBlock { config, ok := configured.(*schema.Set) if !ok { diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl index bc032e00b2cc..994dee9c2f2a 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl @@ -14100,3 +14100,59 @@ resource "google_container_cluster" "primary" { } `, name, networkName, subnetworkName, config) } + +func TestAccContainerCluster_withAnonymousAuthenticationConfig(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) + networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") + subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withAnonymousAuthenticationConfig(clusterName, networkName, subnetworkName, "LIMITED"), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.primary", "anonymous_authentication_config.0.mode", "LIMITED"), + ), + }, + { + ResourceName: "google_container_cluster.primary", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + { + Config: testAccContainerCluster_withAnonymousAuthenticationConfig(clusterName, networkName, subnetworkName, "ENABLED"), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.primary", "anonymous_authentication_config.0.mode", "ENABLED"), + ), + }, + { + ResourceName: "google_container_cluster.primary", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + }, + }) +} + +func testAccContainerCluster_withAnonymousAuthenticationConfig(name, networkName, subnetworkName string, mode string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "primary" { + name = "%s" + network = "%s" + subnetwork = "%s" + initial_node_count = 1 + deletion_protection = false + + anonymous_authentication_config { + mode = "%s" + } +} + `, name, networkName, subnetworkName, mode) +} diff --git a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown index bbfaeb989788..ae6f9fedaf35 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown @@ -423,6 +423,9 @@ Fleet configuration for the cluster. Structure is [documented below](#nested_fle * `enterprise_config` - (Optional) Configuration for [Enterprise edition].(https://cloud.google.com/kubernetes-engine/enterprise/docs/concepts/gke-editions). Structure is [documented below](#nested_enterprise_config). +* `anonymous_authentication_config` - (Optional) + Configuration for [anonymous authentication restrictions](https://cloud.google.com/kubernetes-engine/docs/how-to/hardening-your-cluster#restrict-anon-access). Structure is [documented below](#anonymous_authentication_config). + The `default_snat_status` block supports @@ -1544,6 +1547,10 @@ linux_node_config { * `desired_tier` - (Optional) Sets the tier of the cluster. Available options include `STANDARD` and `ENTERPRISE`. +The `anonymous_authentication_config` block supports: + +* `mode` - (Optional) Sets or removes authentication restrictions. Available options include `LIMITED` and `ENABLED`. + ## Attributes Reference From 5139401a0b62239ed6e796f2d29cb4fd4ff6e17f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Taneli=20Lepp=C3=A4?= Date: Wed, 2 Jul 2025 19:16:59 +0200 Subject: [PATCH 479/884] Async PD: change no disk status to a retryable error (#14361) --- .../compute/resource_compute_disk_async_replication.go.tmpl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_disk_async_replication.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_disk_async_replication.go.tmpl index 1daf655331c6..930038af39ef 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_disk_async_replication.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_disk_async_replication.go.tmpl @@ -155,7 +155,7 @@ func resourceDiskAsyncReplicationCreate(d *schema.ResourceData, meta interface{} return retry.NonRetryableError(err) } if diskStatus.ResourceStatus == nil { - return retry.NonRetryableError(fmt.Errorf("no resource status for disk: %s", resourceId)) + return retry.RetryableError(fmt.Errorf("no resource status for disk: %s", resourceId)) } if secondaryState, ok := diskStatus.ResourceStatus.AsyncSecondaryDisks[secondaryDisk]; ok { if secondaryState.State != "ACTIVE" { From 703f47391611573b5d78b2b1fa97f5bf9b30e540 Mon Sep 17 00:00:00 2001 From: Thomas Rodgers Date: Wed, 2 Jul 2025 10:26:31 -0700 Subject: [PATCH 480/884] tgc-revival: support resource `google_bigquery_dataset` (#14438) --- mmv1/products/bigquery/Dataset.yaml | 2 + .../cai2hcl/resource_converter.go.tmpl | 118 ++++++++++-------- .../tgc_next/test/assert_test_files.go | 32 ++--- mmv1/third_party/tgc_next/test/setup.go | 4 +- 4 files changed, 91 insertions(+), 65 deletions(-) diff --git a/mmv1/products/bigquery/Dataset.yaml b/mmv1/products/bigquery/Dataset.yaml index 55904db87680..0c8279302e6b 100644 --- a/mmv1/products/bigquery/Dataset.yaml +++ b/mmv1/products/bigquery/Dataset.yaml @@ -37,6 +37,7 @@ timeouts: insert_minutes: 20 update_minutes: 20 delete_minutes: 20 +include_in_tgc_next_DO_NOT_USE: true custom_code: constants: 'templates/terraform/constants/bigquery_dataset.go.tmpl' pre_read: 'templates/terraform/pre_read/bigquery_dataset.go.tmpl' @@ -127,6 +128,7 @@ properties: 'An array of objects that define dataset access for one or more entities.' is_set: true default_from_api: true + is_missing_in_cai: true item_type: type: NestedObject properties: diff --git a/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl b/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl index 0ace34134629..507334fa2e9d 100644 --- a/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl +++ b/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl @@ -19,26 +19,27 @@ package {{ lower $.ProductMetadata.Name }} import ( {{/* We list all the v2 imports here and unstable imports, because we run 'goimports' to guess the correct set of imports, which will never guess the major version correctly. */ -}} - "github.com/apparentlymart/go-cidr/cidr" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/id" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/retry" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/structure" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" - "github.com/hashicorp/terraform-plugin-sdk/v2/diag" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/logging" - "google.golang.org/api/bigtableadmin/v2" - "google.golang.org/api/googleapi" - - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters/utils" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tgcresource" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" - transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/verify" + "github.com/apparentlymart/go-cidr/cidr" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/id" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/logging" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/retry" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/structure" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + "google.golang.org/api/bigtableadmin/v2" + "google.golang.org/api/googleapi" + + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters/utils" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/models" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tgcresource" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" + transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/verify" ) {{- $caiProductBaseUrl := $.CaiProductBaseUrl }} @@ -90,33 +91,50 @@ func (c *{{ $.ResourceName -}}Converter) convertResourceData(asset caiasset.Asse config := utils.NewConfig() d := &schema.ResourceData{} + assetNameParts := strings.Split(asset.Name, "/") + hclBlockName := assetNameParts[len(assetNameParts)-1] + hclData := make(map[string]interface{}) {{ if $.CustomCode.TgcDecoder -}} - res, err = resource{{ $.ResourceName -}}TgcDecoder(d, config, res) - if err != nil { - return nil, err - } + res, err = resource{{ $.ResourceName -}}TgcDecoder(d, config, res) + if err != nil { + return nil, err + } {{ end}} {{ if $.CustomCode.Decoder -}} - res, err = resource{{ $.ResourceName -}}Decoder(d, config, res) - if err != nil { - return nil, err - } - - if res == nil { - // Decoding the object has resulted in it being gone. It may be marked deleted. - return nil, nil - } + res, err = resource{{ $.ResourceName -}}Decoder(d, config, res) + if err != nil { + return nil, err + } + + if res == nil { + // Decoding the object has resulted in it being gone. It may be marked deleted. + return nil, nil + } {{ end}} {{ range $prop := $.ReadPropertiesForTgc }} -{{ if $prop.FlattenObject -}} -{{/* TODO */}} -{{- else -}} - hclData["{{ underscore $prop.Name -}}"] = flatten{{ if $.NestedQuery -}}Nested{{end}}{{ $.ResourceName -}}{{ camelize $prop.Name "upper" -}}(res["{{ $prop.ApiName -}}"], d, config) -{{- end}} + {{ if $prop.FlattenObject -}} + // Terraform must set the top level schema field, but since this object contains collapsed properties + // it's difficult to know what the top level should be. Instead we just loop over the map returned from flatten. + if flattenedProp := flatten{{ if $.NestedQuery -}}Nested{{end}}{{ $.ResourceName -}}{{ camelize $prop.Name "upper" -}}(res["{{ $prop.ApiName -}}"], d, config); flattenedProp != nil { + flattenedPropSlice, ok := flattenedProp.([]interface{}) + if !ok || len(flattenedPropSlice) == 0 { + return nil, fmt.Errorf("unexpected type returned from flattener: %T", flattenedProp) + } + flattedPropMap, ok := flattenedPropSlice[0].(map[string]interface{}) + if !ok || len(flattedPropMap) == 0 { + return nil, fmt.Errorf("unexpected type returned from flattener: %T", flattenedPropSlice) + } + for k, v := range flattedPropMap { + hclData[k] = v + } + } + {{- else -}} + hclData["{{ underscore $prop.Name -}}"] = flatten{{ if $.NestedQuery -}}Nested{{end}}{{ $.ResourceName -}}{{ camelize $prop.Name "upper" -}}(res["{{ $prop.ApiName -}}"], d, config) + {{- end}} {{- end}} ctyVal, err := utils.MapToCtyValWithSchema(hclData, c.schema) @@ -124,38 +142,38 @@ func (c *{{ $.ResourceName -}}Converter) convertResourceData(asset caiasset.Asse return nil, err } return &models.TerraformResourceBlock{ - Labels: []string{c.name, res["name"].(string)}, + Labels: []string{c.name, hclBlockName}, Value: ctyVal, }, nil } -{{- range $prop := $.ReadPropertiesForTgc }} - {{ if $prop.CustomTgcFlatten }} - {{- $prop.CustomTemplate $prop.CustomTgcFlatten false -}} - {{ else if $prop.IsA "KeyValueLabels" }} +{{ range $prop := $.ReadPropertiesForTgc }} + {{- if $prop.CustomTgcFlatten }} +{{ $prop.CustomTemplate $prop.CustomTgcFlatten true -}} + {{- else if $prop.IsA "KeyValueLabels" }} func flatten{{$prop.GetPrefix}}{{$prop.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return utils.RemoveTerraformAttributionLabel(v) + return utils.RemoveTerraformAttributionLabel(v) } - {{ else if or (and (eq $prop.Name "zone") $.HasZone) (and (eq $prop.Name "region") $.HasRegion) -}} + {{- else if or (and (eq $prop.Name "zone") $.HasZone) (and (eq $prop.Name "region") $.HasRegion) -}} func flatten{{$prop.GetPrefix}}{{$prop.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { if v == nil { return v } return tpgresource.GetResourceNameFromSelfLink(v.(string)) } - {{ else }} - {{ template "flattenPropertyMethod" $prop -}} - {{- end }} + {{- else }} +{{ template "flattenPropertyMethod" $prop -}} + {{- end }} {{- end }} {{- if $.CustomCode.TgcDecoder }} func resource{{ $.ResourceName -}}TgcDecoder(d *schema.ResourceData, meta interface{}, res map[string]interface{}) (map[string]interface{}, error) { - {{ $.CustomTemplate $.CustomCode.TgcDecoder false -}} + {{ $.CustomTemplate $.CustomCode.TgcDecoder false -}} } {{- end }} {{- if $.CustomCode.Decoder }} func resource{{ $.ResourceName -}}Decoder(d *schema.ResourceData, meta interface{}, res map[string]interface{}) (map[string]interface{}, error) { - {{ $.CustomTemplate $.CustomCode.Decoder false -}} + {{ $.CustomTemplate $.CustomCode.Decoder false -}} } {{- end }} diff --git a/mmv1/third_party/tgc_next/test/assert_test_files.go b/mmv1/third_party/tgc_next/test/assert_test_files.go index 3b7e12a95201..2510f3e56897 100644 --- a/mmv1/third_party/tgc_next/test/assert_test_files.go +++ b/mmv1/third_party/tgc_next/test/assert_test_files.go @@ -99,9 +99,10 @@ func testSingleResource(t *testing.T, testName string, testData ResourceTestData return fmt.Errorf("resource %s is supported in either tfplan2cai or cai2hcl within tgc, but not in both", resourceType) } - // Uncomment these lines when debugging issues locally - // assetFile := fmt.Sprintf("%s.json", t.Name()) - // writeJSONFile(assetFile, assets) + if os.Getenv("WRITE_FILES") != "" { + assetFile := fmt.Sprintf("%s.json", t.Name()) + writeJSONFile(assetFile, assets) + } // Step 1: Use cai2hcl to convert export assets into a Terraform configuration (export config). // Compare all of the fields in raw config are in export config. @@ -113,13 +114,13 @@ func testSingleResource(t *testing.T, testName string, testData ResourceTestData return fmt.Errorf("error when converting the export assets into export config: %#v", err) } - // Uncomment these lines when debugging issues locally - // exportTfFile := fmt.Sprintf("%s_export.tf", t.Name()) - // err = os.WriteFile(exportTfFile, exportConfigData, 0644) - // if err != nil { - // return fmt.Errorf("error writing file %s", exportTfFile) - // } - // defer os.Remove(exportTfFile) + if os.Getenv("WRITE_FILES") != "" { + exportTfFile := fmt.Sprintf("%s_export.tf", t.Name()) + err = os.WriteFile(exportTfFile, exportConfigData, 0644) + if err != nil { + return fmt.Errorf("error writing file %s", exportTfFile) + } + } exportTfFilePath := fmt.Sprintf("%s/%s_export.tf", tfDir, t.Name()) err = os.WriteFile(exportTfFilePath, exportConfigData, 0644) @@ -166,7 +167,9 @@ func testSingleResource(t *testing.T, testName string, testData ResourceTestData if err != nil { return fmt.Errorf("error when writing the file %s", roundtripTfFilePath) } - defer os.Remove(roundtripTfFilePath) + if os.Getenv("WRITE_FILES") == "" { + defer os.Remove(roundtripTfFilePath) + } if diff := cmp.Diff(string(roundtripConfigData), string(exportConfigData)); diff != "" { log.Printf("Roundtrip config is different from the export config.\nroundtrip config:\n%s\nexport config:\n%s", string(roundtripConfigData), string(exportConfigData)) @@ -316,9 +319,10 @@ func getRoundtripConfig(t *testing.T, testName string, tfDir string, ancestryCac deleteFieldsFromAssets(roundtripAssets, ignoredAssetFields) - // Uncomment these lines when debugging issues locally - // roundtripAssetFile := fmt.Sprintf("%s_roundtrip.json", t.Name()) - // writeJSONFile(roundtripAssetFile, roundtripAssets) + if os.Getenv("WRITE_FILES") != "" { + roundtripAssetFile := fmt.Sprintf("%s_roundtrip.json", t.Name()) + writeJSONFile(roundtripAssetFile, roundtripAssets) + } roundtripConfig, err := cai2hcl.Convert(roundtripAssets, &cai2hcl.Options{ ErrorLogger: logger, diff --git a/mmv1/third_party/tgc_next/test/setup.go b/mmv1/third_party/tgc_next/test/setup.go index 24b0daccef7a..e6050de5ef3c 100644 --- a/mmv1/third_party/tgc_next/test/setup.go +++ b/mmv1/third_party/tgc_next/test/setup.go @@ -114,7 +114,9 @@ func prepareTestData(testName string) (map[string]ResourceTestData, string, erro if err != nil { return nil, "", fmt.Errorf("error writing to file %s: %#v", rawTfFile, err) } - defer os.Remove(rawTfFile) + if os.Getenv("WRITE_FILES") == "" { + defer os.Remove(rawTfFile) + } rawResourceConfigs, err := parseResourceConfigs(rawTfFile) if err != nil { From 96efd2dfc5272974d6096436c753127e09fc8a82 Mon Sep 17 00:00:00 2001 From: harshithpatte-g Date: Thu, 3 Jul 2025 00:09:44 +0530 Subject: [PATCH 481/884] Fix: Make "match" field in Route Policy's terms as required (#14402) --- mmv1/products/compute/RouterRoutePolicy.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mmv1/products/compute/RouterRoutePolicy.yaml b/mmv1/products/compute/RouterRoutePolicy.yaml index 33cd16b941e9..44e9f8b8c266 100644 --- a/mmv1/products/compute/RouterRoutePolicy.yaml +++ b/mmv1/products/compute/RouterRoutePolicy.yaml @@ -110,7 +110,8 @@ properties: - name: 'match' type: NestedObject description: | - CEL expression evaluated against a route to determine if this term applies (see Policy Language). When not set, the term applies to all routes. + CEL expression evaluated against a route to determine if this term applies (see Policy Language). + required: true properties: - name: 'expression' type: String From 880e9c240fa3939273b4e1587926b9f03a989b39 Mon Sep 17 00:00:00 2001 From: sachin purohit Date: Wed, 2 Jul 2025 11:49:09 -0700 Subject: [PATCH 482/884] =?UTF-8?q?impl(bigquery=5Ftable):=20implementatio?= =?UTF-8?q?n=20of=20ignore=5Fschema=5Fchanges=20virtual=E2=80=A6=20(#13576?= =?UTF-8?q?)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../bigquery/resource_bigquery_table.go.tmpl | 34 +++-- .../bigquery/resource_bigquery_table_test.go | 141 ++++++++++++++++++ .../docs/r/bigquery_table.html.markdown | 3 + 3 files changed, 169 insertions(+), 9 deletions(-) diff --git a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl index ae61e7ea8032..150ab98fa966 100644 --- a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl +++ b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl @@ -11,6 +11,8 @@ import ( "sort" "strconv" "strings" + + "golang.org/x/exp/slices" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" @@ -50,7 +52,7 @@ func bigQueryTablecheckNameExists(jsonList []interface{}) error { // Compares two json's while optionally taking in a compareMapKeyVal function. // This function will override any comparison of a given map[string]interface{} // on a specific key value allowing for a separate equality in specific scenarios -func jsonCompareWithMapKeyOverride(key string, a, b interface{}, compareMapKeyVal func(key string, val1, val2 map[string]interface{}) bool) (bool, error) { +func jsonCompareWithMapKeyOverride(key string, a, b interface{}, compareMapKeyVal func(key string, val1, val2 map[string]interface{}, d *schema.ResourceData) bool, d *schema.ResourceData) (bool, error) { switch a.(type) { case []interface{}: arrayA := a.([]interface{}) @@ -73,7 +75,7 @@ func jsonCompareWithMapKeyOverride(key string, a, b interface{}, compareMapKeyVa bigQueryTableSortArrayByName(arrayB) } for i := range arrayA { - eq, err := jsonCompareWithMapKeyOverride(strconv.Itoa(i), arrayA[i], arrayB[i], compareMapKeyVal) + eq, err := jsonCompareWithMapKeyOverride(strconv.Itoa(i), arrayA[i], arrayB[i], compareMapKeyVal, d) if err != nil { return false, err } else if !eq { @@ -103,14 +105,14 @@ func jsonCompareWithMapKeyOverride(key string, a, b interface{}, compareMapKeyVa } for subKey := range unionOfKeys { - eq := compareMapKeyVal(subKey, objectA, objectB) + eq := compareMapKeyVal(subKey, objectA, objectB, d) if !eq { valA, ok1 := objectA[subKey] valB, ok2 := objectB[subKey] if !ok1 || !ok2 { return false, nil } - eq, err := jsonCompareWithMapKeyOverride(subKey, valA, valB, compareMapKeyVal) + eq, err := jsonCompareWithMapKeyOverride(subKey, valA, valB, compareMapKeyVal, d) if err != nil || !eq { return false, err } @@ -136,7 +138,7 @@ func valueIsInArray(value interface{}, array []interface{}) bool { return false } -func bigQueryTableMapKeyOverride(key string, objectA, objectB map[string]interface{}) bool { +func bigQueryTableMapKeyOverride(key string, objectA, objectB map[string]interface{}, d *schema.ResourceData) bool { // we rely on the fallback to nil if the object does not have the key valA := objectA[key] valB := objectB[key] @@ -156,6 +158,14 @@ func bigQueryTableMapKeyOverride(key string, objectA, objectB map[string]interfa case "policyTags": eq := bigQueryTableNormalizePolicyTags(valA) == nil && bigQueryTableNormalizePolicyTags(valB) == nil return eq + case "dataPolicies": + if d == nil { + return false + } + // Access the ignore_schema_changes list from the Terraform configuration + ignoreSchemaChanges := d.Get("ignore_schema_changes").([]interface{}) + // Suppress diffs for the "dataPolicies" field if it was present in "ignore_schema_changes" + return slices.Contains(ignoreSchemaChanges, "dataPolicies") } // otherwise rely on default behavior @@ -163,7 +173,7 @@ func bigQueryTableMapKeyOverride(key string, objectA, objectB map[string]interfa } // Compare the JSON strings are equal -func bigQueryTableSchemaDiffSuppress(name, old, new string, _ *schema.ResourceData) bool { +func bigQueryTableSchemaDiffSuppress(name, old, new string, d *schema.ResourceData) bool { // The API can return an empty schema which gets encoded to "null" during read. if old == "null" { old = "[]" @@ -176,7 +186,7 @@ func bigQueryTableSchemaDiffSuppress(name, old, new string, _ *schema.ResourceDa log.Printf("[DEBUG] unable to unmarshal new json - %v", err) } - eq, err := jsonCompareWithMapKeyOverride(name, a, b, bigQueryTableMapKeyOverride) + eq, err := jsonCompareWithMapKeyOverride(name, a, b, bigQueryTableMapKeyOverride, d) if err != nil { log.Printf("[DEBUG] %v", err) log.Printf("[DEBUG] Error comparing JSON: %v, %v", old, new) @@ -1262,7 +1272,13 @@ func ResourceBigQueryTable() *schema.Resource { Computed: true, Description: `A hash of the resource.`, }, - + "ignore_schema_changes": { + Type: schema.TypeList, + Optional: true, + MaxItems: 10, + Description: `Mention which fields in schema are to be ignored`, + Elem: &schema.Schema{Type: schema.TypeString}, + }, // LastModifiedTime: [Output-only] The time when this table was last // modified, in milliseconds since the epoch. "last_modified_time": { @@ -2054,7 +2070,7 @@ type TableReference struct { func resourceBigQueryTableUpdate(d *schema.ResourceData, meta interface{}) error { // If only client-side fields were modified, short-circuit the Update function to avoid sending an update API request. - clientSideFields := map[string]bool{"deletion_protection": true, "table_metadata_view": true} + clientSideFields := map[string]bool{"deletion_protection": true, "ignore_schema_changes": true, "table_metadata_view": true} clientSideOnly := true for field := range ResourceBigQueryTable().Schema { if d.HasChange(field) && !clientSideFields[field] { diff --git a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go index 2160f6b3d066..12928ead405e 100644 --- a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go +++ b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go @@ -45,6 +45,51 @@ func TestAccBigQueryTable_Basic(t *testing.T) { }) } +func TestAccBigQueryTable_IgnoreSchemaDataPoliciesChanges(t *testing.T) { + t.Parallel() + + projectID := envvar.GetTestProjectFromEnv() + random_suffix := acctest.RandString(t, 10) + datasetID := fmt.Sprintf("tf_test_dataset_%s", random_suffix) + tableID := fmt.Sprintf("tf_test_table_%s", random_suffix) + dataPolicyID1 := fmt.Sprintf("tf_test_data_policy_%s", random_suffix) + dataPolicyName1 := fmt.Sprintf("projects/%s/locations/us-central1/dataPolicies/%s", projectID, dataPolicyID1) + dataPolicyID2 := fmt.Sprintf("tf_test_data_policy_%s", acctest.RandString(t, 10)) + dataPolicyName2 := fmt.Sprintf("projects/%s/locations/us-central1/dataPolicies/%s", projectID, dataPolicyID2) + dataCatTaxonomy := fmt.Sprintf("tf_test_taxonomy_%s", random_suffix) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccBigQueryTableDataPolicies(datasetID, tableID, dataPolicyID1, dataPolicyID2, dataCatTaxonomy, dataPolicyName1), + }, + { + ResourceName: "google_bigquery_table.test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_schema_changes"}, + }, + { + Config: testAccBigQueryTableDataPolicies(datasetID, tableID, dataPolicyID1, dataPolicyID2, dataCatTaxonomy, dataPolicyName2), + PlanOnly: true, + ExpectNonEmptyPlan: false, + }, + { + Config: testAccBigQueryTableUpdated(datasetID, tableID), + }, + { + ResourceName: "google_bigquery_table.test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_schema_changes"}, + }, + }, + }) +} + func TestAccBigQueryTable_TableMetadataView(t *testing.T) { t.Parallel() @@ -1981,6 +2026,102 @@ EOH `, datasetID, tableID) } +func testAccBigQueryTableDataPolicies(datasetID, tableID, dataPolicyID1, dataPolicyID2, dataCatTaxonomy, dataPolicyName string) string { + return fmt.Sprintf(` +resource "google_bigquery_dataset" "test" { + location = "us-central1" + dataset_id = "%s" +} + +resource "google_bigquery_datapolicy_data_policy" "data_policy1" { + location = "us-central1" + data_policy_id = "%s" + policy_tag = google_data_catalog_policy_tag.policy_tag.name + data_policy_type = "DATA_MASKING_POLICY" + data_masking_policy { + predefined_expression = "SHA256" + } +} + +resource "google_bigquery_datapolicy_data_policy" "data_policy2" { + location = "us-central1" + data_policy_id = "%s" + policy_tag = google_data_catalog_policy_tag.policy_tag.name + data_policy_type = "DATA_MASKING_POLICY" + data_masking_policy { + predefined_expression = "FIRST_FOUR_CHARACTERS" + } +} + +resource "google_data_catalog_policy_tag" "policy_tag" { + taxonomy = google_data_catalog_taxonomy.taxonomy.id + display_name = "Low security" + description = "A policy tag normally associated with low security items" +} + +resource "google_data_catalog_taxonomy" "taxonomy" { + region = "us-central1" + display_name = "%s" + description = "A collection of policy tags" + activated_policy_types = ["FINE_GRAINED_ACCESS_CONTROL"] +} + +resource "google_bigquery_table" "test" { + depends_on = [google_bigquery_datapolicy_data_policy.data_policy1, google_bigquery_datapolicy_data_policy.data_policy2] + deletion_protection = false + table_id = "%s" + dataset_id = google_bigquery_dataset.test.dataset_id + + ignore_schema_changes = [ + "dataPolicies" + ] + + schema = < Date: Wed, 2 Jul 2025 23:17:30 +0200 Subject: [PATCH 483/884] Add google_network_management_connectivity_test_run data source (#14364) Co-authored-by: Riley Karson --- .../provider/provider_mmv1_resources.go.tmpl | 1 + ...etwork_management_connectivity_test_run.go | 469 ++++++++++++++++++ ...k_management_connectivity_test_run_test.go | 113 +++++ ...gement_connectivity_test_run.html.markdown | 205 ++++++++ 4 files changed, 788 insertions(+) create mode 100644 mmv1/third_party/terraform/services/networkmanagement/data_source_network_management_connectivity_test_run.go create mode 100644 mmv1/third_party/terraform/services/networkmanagement/data_source_network_management_connectivity_test_run_test.go create mode 100644 mmv1/third_party/terraform/website/docs/d/network_management_connectivity_test_run.html.markdown diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index 3e292373bf63..859406a75bdf 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -281,6 +281,7 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_vmwareengine_subnet": vmwareengine.DataSourceVmwareengineSubnet(), "google_vmwareengine_vcenter_credentials": vmwareengine.DataSourceVmwareengineVcenterCredentials(), "google_compute_region_backend_service": compute.DataSourceGoogleComputeRegionBackendService(), + "google_network_management_connectivity_test_run": networkmanagement.DataSourceGoogleNetworkManagementTestRun(), // ####### END handwritten datasources ########### } diff --git a/mmv1/third_party/terraform/services/networkmanagement/data_source_network_management_connectivity_test_run.go b/mmv1/third_party/terraform/services/networkmanagement/data_source_network_management_connectivity_test_run.go new file mode 100644 index 000000000000..26cca8b3a065 --- /dev/null +++ b/mmv1/third_party/terraform/services/networkmanagement/data_source_network_management_connectivity_test_run.go @@ -0,0 +1,469 @@ +package networkmanagement + +import ( + "fmt" + "log" + "net/http" + "reflect" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func DataSourceGoogleNetworkManagementTestRun() *schema.Resource { + return &schema.Resource{ + Read: dataSourceGoogleNetworkManagementTestRun, + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `Unique name for the connectivity test.`, + }, + "reachability_details": { + Type: schema.TypeList, + Computed: true, + Description: `Connectivity test reachability details.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "result": { + Type: schema.TypeString, + Computed: true, + Description: `Status of the connectivity test: RESULT_UNSPECIFIED, REACHABLE, UNREACHABLE, AMBIGUOUS or UNDETERMINED.`, + }, + "traces": { + Type: schema.TypeList, + Computed: true, + Description: `List of connectivity test traces.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "endpoint_info": { + Type: schema.TypeList, + Computed: true, + Description: `Derived from the source and destination endpoints definition specified by user request, and validated by the data plane model.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "destination_ip": { + Type: schema.TypeString, + Computed: true, + Description: `Destination IP address.`, + }, + "destination_network_uri": { + Type: schema.TypeString, + Computed: true, + Description: `URI of the network where this packet is sent to.`, + }, + "destination_port": { + Type: schema.TypeInt, + Computed: true, + Description: `Destination port. Only valid when protocol is TCP or UDP.`, + }, + "protocol": { + Type: schema.TypeString, + Computed: true, + Description: `IP protocol in string format, for example: "TCP", "UDP", "ICMP".`, + }, + "source_agent_uri": { + Type: schema.TypeString, + Computed: true, + Description: `URI of the source telemetry agent this packet originates from.`, + }, + "source_ip": { + Type: schema.TypeString, + Computed: true, + Description: `Source IP address.`, + }, + "source_network_uri": { + Type: schema.TypeString, + Computed: true, + Description: `URI of the network where this packet originates from.`, + }, + "source_port": { + Type: schema.TypeInt, + Computed: true, + Description: `Source port. Only valid when protocol is TCP or UDP.`, + }, + }, + }, + }, + "forward_trace_id": { + Type: schema.TypeInt, + Computed: true, + Description: `ID of the trace.`, + }, + "steps": { + Type: schema.TypeList, + Computed: true, + Description: `A trace of a test contains multiple steps from the initial state to the final state (delivered, dropped, forwarded, or aborted).`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "causes_drop": { + Type: schema.TypeBool, + Computed: true, + Description: `If this step leads to the final state Drop.`, + }, + "description": { + Type: schema.TypeString, + Computed: true, + Description: `Description of the connectivity test step.`, + }, + "project_id": { + Type: schema.TypeString, + Computed: true, + Description: `Project ID of the connectivity test step.`, + }, + "state": { + Type: schema.TypeString, + Computed: true, + Description: `State of the connectivity test step.`, + }, + }, + }, + }, + }, + }, + }, + "verify_time": { + Type: schema.TypeString, + Computed: true, + Description: `Time when reachability details were determined. An RFC3339 timestamp in UTC time. +This in the format of yyyy-MM-ddTHH:mm:ss.SSSZ.`, + }, + }, + }, + }, + "project": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + }, + }, + UseJSONNumber: true, + } +} + +func dataSourceGoogleNetworkManagementTestRun(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + obj := make(map[string]interface{}) + nameProp, err := expandNetworkManagementConnectivityTestRunName(d.Get("name"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("name"); !tpgresource.IsEmptyValue(reflect.ValueOf(nameProp)) && (ok || !reflect.DeepEqual(v, nameProp)) { + obj["name"] = nameProp + } + + url, err := tpgresource.ReplaceVars(d, config, "{{NetworkManagementBasePath}}projects/{{project}}/locations/global/connectivityTests/{{name}}:rerun") + if err != nil { + return err + } + + log.Printf("[DEBUG] Rerunning ConnectivityTestRun: %#v", obj) + billingProject := "" + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return fmt.Errorf("Error fetching project for ConnectivityTestRun: %s", err) + } + billingProject = project + + // err == nil indicates that the billing_project value was found + if bp, err := tpgresource.GetBillingProject(d, config); err == nil { + billingProject = bp + } + + headers := make(http.Header) + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "POST", + Project: billingProject, + RawURL: url, + UserAgent: userAgent, + Body: obj, + Timeout: d.Timeout(schema.TimeoutCreate), + Headers: headers, + }) + if err != nil { + return fmt.Errorf("Error rerunning ConnectivityTestRun: %s", err) + } + + // Store the ID now + id, err := tpgresource.ReplaceVars(d, config, "projects/{{project}}/locations/global/connectivityTests/{{name}}") + if err != nil { + return fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + + err = NetworkManagementOperationWaitTime( + config, res, project, "Rerunning ConnectivityTestRun", userAgent, + d.Timeout(schema.TimeoutCreate)) + + if err != nil { + // The resource didn't actually create + d.SetId("") + return fmt.Errorf("Error waiting to rerun ConnectivityTestRun: %s", err) + } + + log.Printf("[DEBUG] Finished rerunning ConnectivityTestRun %q: %#v", d.Id(), res) + + return dataSourceGoogleNetworkManagementTestRunRead(d, meta) +} + +func dataSourceGoogleNetworkManagementTestRunRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + url, err := tpgresource.ReplaceVars(d, config, "{{NetworkManagementBasePath}}projects/{{project}}/locations/global/connectivityTests/{{name}}") + if err != nil { + return err + } + + billingProject := "" + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return fmt.Errorf("Error fetching project for ConnectivityTestRun: %s", err) + } + billingProject = project + + // err == nil indicates that the billing_project value was found + if bp, err := tpgresource.GetBillingProject(d, config); err == nil { + billingProject = bp + } + + headers := make(http.Header) + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: url, + UserAgent: userAgent, + Headers: headers, + }) + if err != nil { + return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("NetworkManagementConnectivityTestRun %q", d.Id())) + } + + if err := d.Set("project", project); err != nil { + return fmt.Errorf("Error reading ConnectivityTestRun: %s", err) + } + + if err := d.Set("name", flattenNetworkManagementConnectivityTestRunName(res["name"], d, config)); err != nil { + return fmt.Errorf("Error reading ConnectivityTestRun: %s", err) + } + if err := d.Set("reachability_details", flattenNetworkManagementConnectivityTestRunReachabilityDetails(res["reachabilityDetails"], d, config)); err != nil { + return fmt.Errorf("Error reading ConnectivityTestRun: %s", err) + } + + return nil +} + +func flattenNetworkManagementConnectivityTestRunName(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return tpgresource.GetResourceNameFromSelfLink(v.(string)) +} + +func flattenNetworkManagementConnectivityTestRunReachabilityDetails(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["result"] = + flattenNetworkManagementConnectivityTestRunReachabilityDetailsResult(original["result"], d, config) + transformed["verify_time"] = + flattenNetworkManagementConnectivityTestRunReachabilityDetailsVerifyTime(original["verifyTime"], d, config) + transformed["traces"] = + flattenNetworkManagementConnectivityTestRunReachabilityDetailsTraces(original["traces"], d, config) + return []interface{}{transformed} +} + +func flattenNetworkManagementConnectivityTestRunReachabilityDetailsResult(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenNetworkManagementConnectivityTestRunReachabilityDetailsVerifyTime(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenNetworkManagementConnectivityTestRunReachabilityDetailsTraces(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "endpoint_info": flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfo(original["endpointInfo"], d, config), + "steps": flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesSteps(original["steps"], d, config), + "forward_trace_id": flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesForwardTraceId(original["forwardTraceId"], d, config), + }) + } + return transformed +} +func flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfo(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["source_ip"] = + flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfoSourceIp(original["sourceIp"], d, config) + transformed["destination_ip"] = + flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfoDestinationIp(original["destinationIp"], d, config) + transformed["protocol"] = + flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfoProtocol(original["protocol"], d, config) + transformed["source_port"] = + flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfoSourcePort(original["sourcePort"], d, config) + transformed["destination_port"] = + flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfoDestinationPort(original["destinationPort"], d, config) + transformed["source_network_uri"] = + flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfoSourceNetworkUri(original["sourceNetworkUri"], d, config) + transformed["destination_network_uri"] = + flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfoDestinationNetworkUri(original["destinationNetworkUri"], d, config) + transformed["source_agent_uri"] = + flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfoSourceAgentUri(original["sourceAgentUri"], d, config) + return []interface{}{transformed} +} + +func flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfoSourceIp(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfoDestinationIp(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfoProtocol(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfoSourcePort(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + // Handles the string fixed64 format + if strVal, ok := v.(string); ok { + if intVal, err := tpgresource.StringToFixed64(strVal); err == nil { + return intVal + } + } + + // number values are represented as float64 + if floatVal, ok := v.(float64); ok { + intVal := int(floatVal) + return intVal + } + + return v // let terraform core handle it otherwise +} + +func flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfoDestinationPort(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + // Handles the string fixed64 format + if strVal, ok := v.(string); ok { + if intVal, err := tpgresource.StringToFixed64(strVal); err == nil { + return intVal + } + } + + // number values are represented as float64 + if floatVal, ok := v.(float64); ok { + intVal := int(floatVal) + return intVal + } + + return v // let terraform core handle it otherwise +} + +func flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfoSourceNetworkUri(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfoDestinationNetworkUri(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfoSourceAgentUri(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesSteps(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "description": flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesStepsDescription(original["description"], d, config), + "state": flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesStepsState(original["state"], d, config), + "causes_drop": flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesStepsCausesDrop(original["causesDrop"], d, config), + "project_id": flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesStepsProjectId(original["projectId"], d, config), + }) + } + return transformed +} +func flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesStepsDescription(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesStepsState(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesStepsCausesDrop(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesStepsProjectId(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesForwardTraceId(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + // Handles the string fixed64 format + if strVal, ok := v.(string); ok { + if intVal, err := tpgresource.StringToFixed64(strVal); err == nil { + return intVal + } + } + + // number values are represented as float64 + if floatVal, ok := v.(float64); ok { + intVal := int(floatVal) + return intVal + } + + return v // let terraform core handle it otherwise +} + +func expandNetworkManagementConnectivityTestRunName(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + f, err := tpgresource.ParseGlobalFieldValue("tests", v.(string), "project", d, config, true) + if err != nil { + return nil, fmt.Errorf("Invalid value for zone: %s", err) + } + return f.RelativeLink(), nil +} diff --git a/mmv1/third_party/terraform/services/networkmanagement/data_source_network_management_connectivity_test_run_test.go b/mmv1/third_party/terraform/services/networkmanagement/data_source_network_management_connectivity_test_run_test.go new file mode 100644 index 000000000000..e289e7e35975 --- /dev/null +++ b/mmv1/third_party/terraform/services/networkmanagement/data_source_network_management_connectivity_test_run_test.go @@ -0,0 +1,113 @@ +package networkmanagement_test + +import ( + "fmt" + "regexp" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccNetworkManagementConnectivityTestRun_basic(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckNetworkManagementConnectivityTestDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccNetworkManagementConnectivityTestRun_instanceToInstance(context), + Check: resource.ComposeTestCheckFunc( + resource.TestMatchResourceAttr("data.google_network_management_connectivity_test_run.conn-test", + "reachability_details.0.result", regexp.MustCompile("REACHABLE")), + ), + }, + }, + }) +} + +func testAccNetworkManagementConnectivityTestRun_instanceToInstance(context map[string]interface{}) string { + connTestCfg := acctest.Nprintf(` +data "google_network_management_connectivity_test_run" "conn-test" { + name = google_network_management_connectivity_test.conn-test.name +} + +resource "google_network_management_connectivity_test" "conn-test" { + name = "tf-test-conntest%{random_suffix}" + source { + instance = google_compute_instance.vm1.id + } + + destination { + instance = google_compute_instance.vm2.id + } + + protocol = "TCP" +} +`, context) + return fmt.Sprintf("%s\n\n%s\n\n", connTestCfg, testAccNetworkManagementConnectivityTestRun_baseResources(context)) +} + +func testAccNetworkManagementConnectivityTestRun_baseResources(context map[string]interface{}) string { + return acctest.Nprintf(` + +resource "google_compute_address" "addr" { + name = "tf-test-addr%{random_suffix}" + subnetwork = google_compute_subnetwork.subnet.id + address_type = "INTERNAL" + address = "10.0.43.43" + region = "us-central1" +} + +resource "google_compute_instance" "vm1" { + name = "tf-test-src-vm%{random_suffix}" + machine_type = "e2-medium" + boot_disk { + initialize_params { + image = data.google_compute_image.debian_11.id + } + } + network_interface { + network = google_compute_network.vpc.id + } +} + +resource "google_compute_instance" "vm2" { + name = "tf-test-vm-dest%{random_suffix}" + machine_type = "e2-medium" + + boot_disk { + initialize_params { + image = data.google_compute_image.debian_11.id + } + } + + network_interface { + network = google_compute_network.vpc.id + + } +} + +resource "google_compute_network" "vpc" { + name = "tf-test-connnet%{random_suffix}" +} + +resource "google_compute_subnetwork" "subnet" { + name = "tf-test-connet%{random_suffix}" + ip_cidr_range = "10.0.0.0/16" + region = "us-central1" + network = google_compute_network.vpc.id +} + +data "google_compute_image" "debian_11" { + family = "debian-11" + project = "debian-cloud" +} +`, context) +} diff --git a/mmv1/third_party/terraform/website/docs/d/network_management_connectivity_test_run.html.markdown b/mmv1/third_party/terraform/website/docs/d/network_management_connectivity_test_run.html.markdown new file mode 100644 index 000000000000..1364b53c8710 --- /dev/null +++ b/mmv1/third_party/terraform/website/docs/d/network_management_connectivity_test_run.html.markdown @@ -0,0 +1,205 @@ +--- +subcategory: "Network Management" +description: |- + A connectivity test is a static analysis of your resource configurations + that enables you to evaluate connectivity to and from Google Cloud + resources in your Virtual Private Cloud (VPC) network. +--- + +# google_network_management_connectivity_test_run + + +!> This datasource triggers side effects on the target resource. It will take a long time to refresh (i.e. `terraform plan` will take much longer than usual) and may modify the state of the parent resource or other copies of the resource copying the same parent. + +A connectivity test is a static analysis of your resource configurations +that enables you to evaluate connectivity to and from Google Cloud +resources in your Virtual Private Cloud (VPC) network. This data source allows +you to trigger a rerun operation on a connectivity test and return the results. + +To get more information about connectivity tests, see: + +* [API documentation](https://cloud.google.com/network-intelligence-center/docs/reference/networkmanagement/rest/v1/projects.locations.global.connectivityTests/rerun) +* How-to Guides + * [Official Documentation](https://cloud.google.com/network-intelligence-center/docs) + +## Example Usage - Network Management Connectivity Test Run Instances + +```hcl +data "google_network_management_connectivity_test_run" "instance-test-run" { + name = google_network_management_connectivity_test.instance-test.name +} + +resource "google_network_management_connectivity_test" "instance-test" { + name = "conn-test-instances" + source { + instance = google_compute_instance.source.id + } + + destination { + instance = google_compute_instance.destination.id + } + + protocol = "TCP" + labels = { + env = "test" + } +} + +resource "google_compute_instance" "source" { + name = "source-vm" + machine_type = "e2-medium" + + boot_disk { + initialize_params { + image = data.google_compute_image.debian_9.id + } + } + + network_interface { + network = google_compute_network.vpc.id + access_config { + } + } +} + +resource "google_compute_instance" "destination" { + name = "dest-vm" + machine_type = "e2-medium" + + boot_disk { + initialize_params { + image = data.google_compute_image.debian_9.id + } + } + + network_interface { + network = google_compute_network.vpc.id + access_config { + } + } +} + +resource "google_compute_network" "vpc" { + name = "conn-test-net" +} + +data "google_compute_image" "debian_9" { + family = "debian-11" + project = "debian-cloud" +} +``` + +## Argument Reference + +The following arguments are supported: + + +* `name` - + (Required) + Unique name for the connectivity test. + + +- - - + + +* `project` - (Optional) The ID of the project in which the resource belongs. + If it is not provided, the provider project is used. + + +## Attributes Reference + +In addition to the arguments listed above, the following computed attributes are exported: + +* `id` - an identifier for the resource with format `projects/{{project}}/locations/global/connectivityTests/{{name}}` + +* `reachability_details` - + Connectivity test reachability details. + Structure is [documented below](#nested_reachability_details). + + +The `reachability_details` block contains: + +* `result` - + (Output) + Status of the connectivity test: RESULT_UNSPECIFIED, REACHABLE, UNREACHABLE, AMBIGUOUS or UNDETERMINED. + +* `verify_time` - + (Output) + Time when reachability details were determined. An RFC3339 timestamp in UTC time. + This in the format of yyyy-MM-ddTHH:mm:ss.SSSZ. + +* `traces` - + (Output) + List of connectivity test traces. + Structure is [documented below](#nested_reachability_details_traces). + + +The `traces` block contains: + +* `endpoint_info` - + (Output) + Derived from the source and destination endpoints definition specified by user request, and validated by the data plane model. + Structure is [documented below](#nested_reachability_details_traces_traces_endpoint_info). + +* `steps` - + (Output) + A trace of a test contains multiple steps from the initial state to the final state (delivered, dropped, forwarded, or aborted). + Structure is [documented below](#nested_reachability_details_traces_traces_steps). + +* `forward_trace_id` - + (Output) + ID of the trace. + + +The `endpoint_info` block contains: + +* `source_ip` - + (Output) + Source IP address. + +* `destination_ip` - + (Output) + Destination IP address. + +* `protocol` - + (Output) + IP protocol in string format, for example: "TCP", "UDP", "ICMP". + +* `source_port` - + (Output) + Source port. Only valid when protocol is TCP or UDP. + +* `destination_port` - + (Output) + Destination port. Only valid when protocol is TCP or UDP. + +* `source_network_uri` - + (Output) + URI of the network where this packet originates from. + +* `destination_network_uri` - + (Output) + URI of the network where this packet is sent to. + +* `source_agent_uri` - + (Output) + URI of the source telemetry agent this packet originates from. + +The `steps` block contains: + +* `description` - + (Output) + Description of the connectivity test step. + +* `state` - + (Output) + State of the connectivity test step. + +* `causes_drop` - + (Output) + If this step leads to the final state Drop. + +* `project_id` - + (Output) + Project ID of the connectivity test step. + From 463feb4084611a2e7331f8086fe9307db9bd4b93 Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Wed, 2 Jul 2025 15:41:33 -0700 Subject: [PATCH 484/884] Bump teamcity execution timeout 12h -> 15h (#14450) --- mmv1/third_party/terraform/.teamcity/components/constants.kt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/.teamcity/components/constants.kt b/mmv1/third_party/terraform/.teamcity/components/constants.kt index f8ba7685ee2a..c2025eebab99 100644 --- a/mmv1/third_party/terraform/.teamcity/components/constants.kt +++ b/mmv1/third_party/terraform/.teamcity/components/constants.kt @@ -29,7 +29,7 @@ const val DefaultDaysOfMonth = "*" const val DefaultBranchName = "refs/heads/nightly-test" // Value used to make long-running builds fail due to a timeout -const val DefaultBuildTimeoutDuration = 60 * 12 // 12 hours in minutes +const val DefaultBuildTimeoutDuration = 60 * 15 // 15 hours in minutes // Values used to define and reference Shared Resource locks, used to avoid conflicting builds const val SharedResourceNameGa = "ci-test-project-nightly-ga Service Lock" From 2d2a4f08bb6723756b80aa5c4395ac0f5c3867f9 Mon Sep 17 00:00:00 2001 From: Samir Ribeiro <42391123+Samir-Cit@users.noreply.github.com> Date: Wed, 2 Jul 2025 20:35:41 -0300 Subject: [PATCH 485/884] GCS Backend Buckets Support For Cross-region Internal Application Load Balancer (#14178) --- mmv1/products/compute/BackendBucket.yaml | 17 ++++++++++ .../backend_bucket_global_ilb.tf.tmpl | 32 +++++++++++++++++++ 2 files changed, 49 insertions(+) create mode 100644 mmv1/templates/terraform/examples/backend_bucket_global_ilb.tf.tmpl diff --git a/mmv1/products/compute/BackendBucket.yaml b/mmv1/products/compute/BackendBucket.yaml index 04f155ebd773..d7892ff265e0 100644 --- a/mmv1/products/compute/BackendBucket.yaml +++ b/mmv1/products/compute/BackendBucket.yaml @@ -105,6 +105,15 @@ examples: backend_bucket_name: 'image-backend-bucket' bucket_name: 'image-store-bucket' exclude_docs: true + - name: 'backend_bucket_global_ilb' + primary_resource_id: 'global-ilb-backend' + test_env_vars: + org_id: 'ORG_ID' + billing_account: 'BILLING_ACCT' + vars: + backend_bucket_name: 'global-ilb-backend-bucket' + bucket_name: 'global-ilb-bucket' + exclude_docs: true parameters: properties: - name: 'bucketName' @@ -278,3 +287,11 @@ properties: immutable: true validation: regex: '^(?:[a-z](?:[-a-z0-9]{0,61}[a-z0-9])?)$' + - name: 'loadBalancingScheme' + type: Enum + description: | + The value can only be INTERNAL_MANAGED for cross-region internal layer 7 load balancer. + If loadBalancingScheme is not specified, the backend bucket can be used by classic global external load balancers, or global application external load balancers, or both. + enum_values: + - 'INTERNAL_MANAGED' + send_empty_value: true diff --git a/mmv1/templates/terraform/examples/backend_bucket_global_ilb.tf.tmpl b/mmv1/templates/terraform/examples/backend_bucket_global_ilb.tf.tmpl new file mode 100644 index 000000000000..2f2de2d13064 --- /dev/null +++ b/mmv1/templates/terraform/examples/backend_bucket_global_ilb.tf.tmpl @@ -0,0 +1,32 @@ +resource "google_project" "unarmored" { + project_id = "tf-test%{random_suffix}" + name = "tf-test%{random_suffix}" + org_id = "{{index $.TestEnvVars "org_id"}}" + billing_account = "{{index $.TestEnvVars "billing_account"}}" + deletion_policy = "DELETE" +} + +resource "google_project_service" "project" { + project = google_project.unarmored.number + service = "compute.googleapis.com" + disable_on_destroy = true +} + +resource "google_compute_backend_bucket" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "backend_bucket_name"}}" + project = google_project.unarmored.number + bucket_name = google_storage_bucket.{{$.PrimaryResourceId}}.name + load_balancing_scheme = "INTERNAL_MANAGED" + + depends_on = [google_project_service.project] +} + +resource "google_storage_bucket" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "bucket_name"}}" + project = google_project.unarmored.number + location = "US-CENTRAL1" + force_destroy = true + uniform_bucket_level_access = true + + depends_on = [google_project_service.project] +} \ No newline at end of file From fc5fba09c0ead725b15a6de80a9bbc4350d867df Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Mon, 7 Jul 2025 16:43:43 +0200 Subject: [PATCH 486/884] apigee: add `access_logging_config` support for `google_apigee_instance` (#14410) --- mmv1/products/apigee/Instance.yaml | 18 ++++++++++++++++++ .../examples/apigee_instance_full_test.tf.tmpl | 5 +++++ .../resource_apigee_instance_update_test.go | 10 ++++++++++ 3 files changed, 33 insertions(+) diff --git a/mmv1/products/apigee/Instance.yaml b/mmv1/products/apigee/Instance.yaml index bc671891a21c..05939782e1dc 100644 --- a/mmv1/products/apigee/Instance.yaml +++ b/mmv1/products/apigee/Instance.yaml @@ -204,3 +204,21 @@ properties: the format: projects/*/regions/*/serviceAttachments/* Apigee customers can privately forward traffic to this service attachment using the PSC endpoints. output: true + - name: 'accessLoggingConfig' + type: NestedObject + description: | + Access logging configuration enables the access logging feature at the instance. + Apigee customers can enable access logging to ship the access logs to their own project's cloud logging. + properties: + - name: 'enabled' + type: Boolean + required: true + description: | + Boolean flag that specifies whether the customer access log feature is enabled. + - name: 'filter' + type: String + description: | + Ship the access log entries that match the statusCode defined in the filter. + The statusCode is the only expected/supported filter field. (Ex: statusCode) + The filter will parse it to the Common Expression Language semantics for expression + evaluation to build the filter condition. (Ex: "filter": statusCode >= 200 && statusCode < 300 ) diff --git a/mmv1/templates/terraform/examples/apigee_instance_full_test.tf.tmpl b/mmv1/templates/terraform/examples/apigee_instance_full_test.tf.tmpl index 8b83461f0d9f..0b6d6ee1e0fe 100644 --- a/mmv1/templates/terraform/examples/apigee_instance_full_test.tf.tmpl +++ b/mmv1/templates/terraform/examples/apigee_instance_full_test.tf.tmpl @@ -135,4 +135,9 @@ resource "google_apigee_instance" "{{$.PrimaryResourceId}}" { display_name = "tf-test%{random_suffix}" org_id = google_apigee_organization.apigee_org.id disk_encryption_key_name = google_kms_crypto_key.apigee_key.id + + access_logging_config { + enabled = true, + filter = "status_code >= 200 && status_code < 300" + } } diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_instance_update_test.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_instance_update_test.go index 71f234952410..eb0aa901e152 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_instance_update_test.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_instance_update_test.go @@ -120,6 +120,11 @@ resource "google_apigee_instance" "apigee_instance" { consumer_accept_list = [ google_project.project1.project_id, ] + + access_logging_config { + enabled = false, + filter = "status_code >= 0 && status_code < 600" + } } `, context) } @@ -204,6 +209,11 @@ resource "google_apigee_instance" "apigee_instance" { google_project.project1.project_id, google_project.project2.project_id, ] + + access_logging_config { + enabled = true, + filter = "status_code >= 200 && status_code < 300" + } } `, context) } From dfe735ef19b62261f8ac9afa82c98c0ac5c036b6 Mon Sep 17 00:00:00 2001 From: Veronika Herasymenko Date: Mon, 7 Jul 2025 18:45:59 +0200 Subject: [PATCH 487/884] Add resource_manager_tags support to Firewall api (#14397) Co-authored-by: Scott Suarez --- mmv1/products/compute/Firewall.yaml | 17 +++++ .../compute/resource_compute_firewall_test.go | 63 +++++++++++++++++++ 2 files changed, 80 insertions(+) diff --git a/mmv1/products/compute/Firewall.yaml b/mmv1/products/compute/Firewall.yaml index 0fe92be2c8d0..d379841f3c8c 100644 --- a/mmv1/products/compute/Firewall.yaml +++ b/mmv1/products/compute/Firewall.yaml @@ -331,3 +331,20 @@ properties: - target_service_accounts item_type: type: String + - name: 'params' + type: NestedObject + ignore_read: true + immutable: true + description: | + Additional params passed with the request, but not persisted as part of resource payload + properties: + - name: 'resourceManagerTags' + type: KeyValuePairs + description: | + Resource manager tags to be bound to the firewall. Tag keys and values have the + same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, + and values are in the format tagValues/456. The field is ignored when empty. + The field is immutable and causes resource replacement when mutated. This field is only + set at create time and modifying this field after creation will trigger recreation. + To apply tags to an existing resource, see the google_tags_tag_binding resource. + ignore_read: true diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_firewall_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_firewall_test.go index ee3837129e55..1f1e6fb690b5 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_firewall_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_firewall_test.go @@ -7,6 +7,7 @@ import ( "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" ) func TestAccComputeFirewall_update(t *testing.T) { @@ -313,6 +314,68 @@ func TestAccComputeFirewall_moduleOutput(t *testing.T) { }) } +func TestAccComputeFirewall_resourceManagerTags(t *testing.T) { + t.Parallel() + + org := envvar.GetTestOrgFromEnv(t) + + networkName := fmt.Sprintf("tf-test-firewall-%s", acctest.RandString(t, 10)) + firewallName := fmt.Sprintf("tf-test-firewall-%s", acctest.RandString(t, 10)) + + tagKeyResult := acctest.BootstrapSharedTestTagKeyDetails(t, "crm-firewall-tagkey", "organizations/"+org, make(map[string]interface{})) + sharedTagkey, _ := tagKeyResult["shared_tag_key"] + tagValueResult := acctest.BootstrapSharedTestTagValueDetails(t, "crm-firewall-tagvalue", sharedTagkey, org) + + context := map[string]interface{}{ + "network_name": networkName, + "firewall_name": firewallName, + "tag_key_id": tagKeyResult["name"], + "tag_value_id": tagValueResult["name"], + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeFirewallDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeFirewall_resourceManagerTags(context), + }, + { + ResourceName: "google_compute_firewall.acc_firewall_with_resource_manager_tags", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"params"}, // we don't read tags back. The whole params block is input only + }, + }, + }) +} + +func testAccComputeFirewall_resourceManagerTags(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_network" "foobar" { + name = "%{network_name}" + auto_create_subnetworks = false +} + +resource "google_compute_firewall" "acc_firewall_with_resource_manager_tags" { + name = "%{firewall_name}" + description = "Resource created for Terraform acceptance testing" + network = google_compute_network.foobar.name + source_tags = ["foo"] + + allow { + protocol = "icmp" + } + params { + resource_manager_tags = { + "%{tag_key_id}" = "%{tag_value_id}" + } + } + } +`, context) +} + func testAccComputeFirewall_basic(network, firewall string) string { return fmt.Sprintf(` resource "google_compute_network" "foobar" { From d1d20f31b8975be16c7fa1250c39cf7e9f5ab013 Mon Sep 17 00:00:00 2001 From: Lingkai Shen Date: Mon, 7 Jul 2025 12:56:57 -0400 Subject: [PATCH 488/884] Skip deleting default Firebase Hosting Site (#14453) --- mmv1/products/firebasehosting/Site.yaml | 11 +++++++++ .../firebasehosting_default_site.go.tmpl | 23 +++++++++++++++++++ .../firebasehosting_site_default.tf.tmpl | 5 ++++ .../pre_delete/firebasehosting_site.go.tmpl | 4 ++++ 4 files changed, 43 insertions(+) create mode 100644 mmv1/templates/terraform/custom_check_destroy/firebasehosting_default_site.go.tmpl create mode 100644 mmv1/templates/terraform/examples/firebasehosting_site_default.tf.tmpl create mode 100644 mmv1/templates/terraform/pre_delete/firebasehosting_site.go.tmpl diff --git a/mmv1/products/firebasehosting/Site.yaml b/mmv1/products/firebasehosting/Site.yaml index a32466eb8da2..de08fa279010 100644 --- a/mmv1/products/firebasehosting/Site.yaml +++ b/mmv1/products/firebasehosting/Site.yaml @@ -35,6 +35,8 @@ timeouts: delete_minutes: 20 custom_code: pre_create: 'templates/terraform/pre_create/firebasehosting_site.go.tmpl' + pre_delete: 'templates/terraform/pre_delete/firebasehosting_site.go.tmpl' + test_check_destroy: 'templates/terraform/custom_check_destroy/firebasehosting_default_site.go.tmpl' examples: - name: 'firebasehosting_site_basic' primary_resource_id: 'default' @@ -53,6 +55,11 @@ examples: project_id: 'PROJECT_NAME' test_vars_overrides: 'display_name': '"tf-test Test web app for Firebase Hosting"' + - name: 'firebasehosting_site_default' + primary_resource_id: 'default' + min_version: 'beta' + test_env_vars: + project_id: 'PROJECT_NAME' parameters: - name: 'site_id' type: String @@ -88,3 +95,7 @@ properties: The default URL for the site in the form of https://{name}.web.app min_version: 'beta' output: true + - name: 'type' + type: String + output: true + description: The type of Hosting site, either 'DEFAULT_SITE' or `USER_SITE` diff --git a/mmv1/templates/terraform/custom_check_destroy/firebasehosting_default_site.go.tmpl b/mmv1/templates/terraform/custom_check_destroy/firebasehosting_default_site.go.tmpl new file mode 100644 index 000000000000..3f413ed9596e --- /dev/null +++ b/mmv1/templates/terraform/custom_check_destroy/firebasehosting_default_site.go.tmpl @@ -0,0 +1,23 @@ +config := acctest.GoogleProviderConfig(t) + +url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{"{{"}}FirebaseHostingBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/sites/{{"{{"}}site_id{{"}}"}}") +if err != nil { + return err +} + +billingProject := "" + +if config.BillingProject != "" { + billingProject = config.BillingProject +} + +resp, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: url, + UserAgent: config.UserAgent, +}) +if err == nil && resp["type"].(string) != "DEFAULT_SITE" { + return fmt.Errorf("Firebase Hosting Site still exists at %s", url) +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/firebasehosting_site_default.tf.tmpl b/mmv1/templates/terraform/examples/firebasehosting_site_default.tf.tmpl new file mode 100644 index 000000000000..b1b47d46b7dc --- /dev/null +++ b/mmv1/templates/terraform/examples/firebasehosting_site_default.tf.tmpl @@ -0,0 +1,5 @@ +resource "google_firebase_hosting_site" "default" { + provider = google-beta + project = "{{index $.TestEnvVars "project_id"}}" + site_id = "{{index $.TestEnvVars "project_id"}}" +} diff --git a/mmv1/templates/terraform/pre_delete/firebasehosting_site.go.tmpl b/mmv1/templates/terraform/pre_delete/firebasehosting_site.go.tmpl new file mode 100644 index 000000000000..f7e91ee2ceba --- /dev/null +++ b/mmv1/templates/terraform/pre_delete/firebasehosting_site.go.tmpl @@ -0,0 +1,4 @@ +if siteType := d.Get("type"); siteType == "DEFAULT_SITE" { + log.Printf("[WARN] Skip deleting default hosting side: %q", d.Get("name").(string)) + return nil +} From 72939789cf3d192d2509b07b97b56b518d29916f Mon Sep 17 00:00:00 2001 From: AJ Christensen Date: Tue, 8 Jul 2025 05:10:24 +1200 Subject: [PATCH 489/884] Fix JWKS JSON permadiff for workload identity pool provider (#14376) Signed-off-by: AJ Christensen --- mmv1/products/iambeta/WorkloadIdentityPoolProvider.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/mmv1/products/iambeta/WorkloadIdentityPoolProvider.yaml b/mmv1/products/iambeta/WorkloadIdentityPoolProvider.yaml index 177fbf91c251..c3ce3eb2acb0 100644 --- a/mmv1/products/iambeta/WorkloadIdentityPoolProvider.yaml +++ b/mmv1/products/iambeta/WorkloadIdentityPoolProvider.yaml @@ -310,6 +310,7 @@ properties: } ``` required: false + state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' - name: 'saml' type: NestedObject description: From 6ec21de08dd5a17b12bd0befec00f114eb191957 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Mon, 7 Jul 2025 13:15:09 -0700 Subject: [PATCH 490/884] tgc-revival: move services folder to top level (#14445) --- mmv1/provider/terraform_tgc_next.go | 34 +- .../cai2hcl/resource_converter.go.tmpl | 26 +- .../cai2hcl/resource_converters.go.tmpl | 12 +- .../provider/provider_mmv1_resources.go.tmpl | 4 +- .../tgc_next/services/resource.go.tmpl | 75 ++ .../tfplan2cai/resource_converter.go.tmpl | 47 +- .../tfplan2cai/resource_converters.go.tmpl | 12 +- .../cai2hcl/converters/convert_resource.go | 2 +- .../compute/compute_instance_helpers_tgc.go | 194 ----- .../pkg/cai2hcl/converters/utils/utils.go | 24 - .../cai2hcl/converters/utils/utils_test.go | 21 +- .../tgc_next/pkg/cai2hcl/models/converter.go | 2 +- .../services/compute/compute_instance.go | 701 +++--------------- .../compute/compute_instance_cai2hcl.go} | 40 +- .../compute/compute_instance_tfplan2cai.go | 654 ++++++++++++++++ .../services/resourcemanager/project.go | 151 +--- .../resourcemanager/project_cai2hcl.go} | 21 +- .../resourcemanager/project_tfplan2cai.go | 149 ++++ .../converters/cai/resource_converter.go | 2 +- .../tgc_next/pkg/tgcresource/utils.go | 19 + .../tgc_next/pkg/transport/config_tgc.go | 5 + 21 files changed, 1106 insertions(+), 1089 deletions(-) create mode 100644 mmv1/templates/tgc_next/services/resource.go.tmpl delete mode 100644 mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance_helpers_tgc.go rename mmv1/third_party/tgc_next/pkg/{tfplan2cai/converters => }/services/compute/compute_instance.go (72%) rename mmv1/third_party/tgc_next/pkg/{cai2hcl/converters/services/compute/compute_instance.go => services/compute/compute_instance_cai2hcl.go} (85%) create mode 100644 mmv1/third_party/tgc_next/pkg/services/compute/compute_instance_tfplan2cai.go rename mmv1/third_party/tgc_next/pkg/{tfplan2cai/converters => }/services/resourcemanager/project.go (54%) rename mmv1/third_party/tgc_next/pkg/{cai2hcl/converters/services/resourcemanager/project.go => services/resourcemanager/project_cai2hcl.go} (69%) create mode 100644 mmv1/third_party/tgc_next/pkg/services/resourcemanager/project_tfplan2cai.go create mode 100644 mmv1/third_party/tgc_next/pkg/transport/config_tgc.go diff --git a/mmv1/provider/terraform_tgc_next.go b/mmv1/provider/terraform_tgc_next.go index ef2fe225cbb6..8475124d31bf 100644 --- a/mmv1/provider/terraform_tgc_next.go +++ b/mmv1/provider/terraform_tgc_next.go @@ -99,21 +99,26 @@ func (tgc TerraformGoogleConversionNext) GenerateObject(object api.Resource, out templateData := NewTemplateData(outputFolder, tgc.TargetVersionName) if !object.IsExcluded() { - tgc.GenerateResource(object, *templateData, outputFolder, generateCode, generateDocs, "tfplan2cai") - tgc.GenerateResource(object, *templateData, outputFolder, generateCode, generateDocs, "cai2hcl") + tgc.GenerateResource(object, *templateData, outputFolder, generateCode, generateDocs) tgc.GenerateResourceTests(object, *templateData, outputFolder) } } -func (tgc TerraformGoogleConversionNext) GenerateResource(object api.Resource, templateData TemplateData, outputFolder string, generateCode, generateDocs bool, converter string) { +func (tgc TerraformGoogleConversionNext) GenerateResource(object api.Resource, templateData TemplateData, outputFolder string, generateCode, generateDocs bool) { productName := tgc.Product.ApiName - conveterFolder := fmt.Sprintf("pkg/%s/converters/services", converter) - targetFolder := path.Join(outputFolder, conveterFolder, productName) + targetFolder := path.Join(outputFolder, "pkg/services", productName) if err := os.MkdirAll(targetFolder, os.ModePerm); err != nil { log.Println(fmt.Errorf("error creating parent directory %v: %v", targetFolder, err)) } - templatePath := fmt.Sprintf("templates/tgc_next/%s/resource_converter.go.tmpl", converter) + converters := []string{"tfplan2cai", "cai2hcl"} + for _, converter := range converters { + templatePath := fmt.Sprintf("templates/tgc_next/%s/resource_converter.go.tmpl", converter) + targetFilePath := path.Join(targetFolder, fmt.Sprintf("%s_%s_%s.go", productName, google.Underscore(object.Name), converter)) + templateData.GenerateTGCResourceFile(templatePath, targetFilePath, object) + } + + templatePath := "templates/tgc_next/services/resource.go.tmpl" targetFilePath := path.Join(targetFolder, fmt.Sprintf("%s_%s.go", productName, google.Underscore(object.Name))) templateData.GenerateTGCResourceFile(templatePath, targetFilePath, object) } @@ -156,14 +161,15 @@ func (tgc TerraformGoogleConversionNext) CompileCommonFiles(outputFolder string, "pkg/provider/provider_validators.go": "third_party/terraform/provider/provider_validators.go", "pkg/provider/provider_mmv1_resources.go": "templates/tgc_next/provider/provider_mmv1_resources.go.tmpl", + // services + "pkg/services/compute/compute_instance_helpers.go": "third_party/terraform/services/compute/compute_instance_helpers.go.tmpl", + "pkg/services/compute/metadata.go": "third_party/terraform/services/compute/metadata.go.tmpl", + // tfplan2cai - "pkg/tfplan2cai/converters/resource_converters.go": "templates/tgc_next/tfplan2cai/resource_converters.go.tmpl", - "pkg/tfplan2cai/converters/services/compute/compute_instance_helpers.go": "third_party/terraform/services/compute/compute_instance_helpers.go.tmpl", - "pkg/tfplan2cai/converters/services/compute/metadata.go": "third_party/terraform/services/compute/metadata.go.tmpl", + "pkg/tfplan2cai/converters/resource_converters.go": "templates/tgc_next/tfplan2cai/resource_converters.go.tmpl", // cai2hcl - "pkg/cai2hcl/converters/resource_converters.go": "templates/tgc_next/cai2hcl/resource_converters.go.tmpl", - "pkg/cai2hcl/converters/services/compute/compute_instance_helpers.go": "third_party/terraform/services/compute/compute_instance_helpers.go.tmpl", + "pkg/cai2hcl/converters/resource_converters.go": "templates/tgc_next/cai2hcl/resource_converters.go.tmpl", } templateData := NewTemplateData(outputFolder, tgc.TargetVersionName) @@ -234,9 +240,9 @@ func (tgc TerraformGoogleConversionNext) CopyCommonFiles(outputFolder string, ge "pkg/verify/path_or_contents.go": "third_party/terraform/verify/path_or_contents.go", "pkg/version/version.go": "third_party/terraform/version/version.go", - // tfplan2cai - "pkg/tfplan2cai/converters/services/compute/image.go": "third_party/terraform/services/compute/image.go", - "pkg/tfplan2cai/converters/services/compute/disk_type.go": "third_party/terraform/services/compute/disk_type.go", + // services + "pkg/services/compute/image.go": "third_party/terraform/services/compute/image.go", + "pkg/services/compute/disk_type.go": "third_party/terraform/services/compute/disk_type.go", } tgc.CopyFileList(outputFolder, resourceConverters) } diff --git a/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl b/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl index 507334fa2e9d..c671618eee58 100644 --- a/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl +++ b/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl @@ -33,7 +33,6 @@ import ( "google.golang.org/api/bigtableadmin/v2" "google.golang.org/api/googleapi" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters/utils" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/models" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tgcresource" @@ -44,34 +43,23 @@ import ( {{- $caiProductBaseUrl := $.CaiProductBaseUrl }} {{- $productBackendName := $.CaiProductBackendName $caiProductBaseUrl }} -{{- $apiVersion := $.CaiApiVersion $productBackendName $caiProductBaseUrl}} -{{- range $prop := $.AllUserProperties }} -{{template "SchemaSubResource" $prop}} -{{- end}} - -{{- if not $.ApiResourceTypeKind }} -const {{ $.ApiResourceType -}}AssetType string = "{{ $productBackendName }}.googleapis.com/{{ $.Name -}}" -{{- end }} - -const {{ $.ResourceName -}}SchemaName string = "{{ $.TerraformName }}" - -type {{ $.ResourceName -}}Converter struct { +type {{ $.ResourceName -}}Cai2hclConverter struct { name string schema map[string]*schema.Schema } -func New{{ $.ResourceName -}}Converter(provider *schema.Provider) models.Converter { +func New{{ $.ResourceName -}}Cai2hclConverter(provider *schema.Provider) models.Cai2hclConverter { schema := provider.ResourcesMap[{{ $.ResourceName -}}SchemaName].Schema - return &{{ $.ResourceName -}}Converter{ + return &{{ $.ResourceName -}}Cai2hclConverter{ name: {{ $.ResourceName -}}SchemaName, schema: schema, } } // Convert converts asset to HCL resource blocks. -func (c *{{ $.ResourceName -}}Converter) Convert(asset caiasset.Asset) ([]*models.TerraformResourceBlock, error) { +func (c *{{ $.ResourceName -}}Cai2hclConverter) Convert(asset caiasset.Asset) ([]*models.TerraformResourceBlock, error) { var blocks []*models.TerraformResourceBlock block, err := c.convertResourceData(asset) if err != nil { @@ -81,14 +69,14 @@ func (c *{{ $.ResourceName -}}Converter) Convert(asset caiasset.Asset) ([]*model return blocks, nil } -func (c *{{ $.ResourceName -}}Converter) convertResourceData(asset caiasset.Asset) (*models.TerraformResourceBlock, error) { +func (c *{{ $.ResourceName -}}Cai2hclConverter) convertResourceData(asset caiasset.Asset) (*models.TerraformResourceBlock, error) { if asset.Resource == nil || asset.Resource.Data == nil { return nil, fmt.Errorf("asset resource data is nil") } var err error res := asset.Resource.Data - config := utils.NewConfig() + config := transport.NewConfig() d := &schema.ResourceData{} assetNameParts := strings.Split(asset.Name, "/") @@ -152,7 +140,7 @@ func (c *{{ $.ResourceName -}}Converter) convertResourceData(asset caiasset.Asse {{ $prop.CustomTemplate $prop.CustomTgcFlatten true -}} {{- else if $prop.IsA "KeyValueLabels" }} func flatten{{$prop.GetPrefix}}{{$prop.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return utils.RemoveTerraformAttributionLabel(v) + return tgcresource.RemoveTerraformAttributionLabel(v) } {{- else if or (and (eq $prop.Name "zone") $.HasZone) (and (eq $prop.Name "region") $.HasRegion) -}} func flatten{{$prop.GetPrefix}}{{$prop.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { diff --git a/mmv1/templates/tgc_next/cai2hcl/resource_converters.go.tmpl b/mmv1/templates/tgc_next/cai2hcl/resource_converters.go.tmpl index 6a42cd417c55..11a9ca891360 100644 --- a/mmv1/templates/tgc_next/cai2hcl/resource_converters.go.tmpl +++ b/mmv1/templates/tgc_next/cai2hcl/resource_converters.go.tmpl @@ -30,9 +30,9 @@ package converters import ( "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/models" {{- range $service := $.Products }} - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters/services/{{ lower $service.Name }}" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/services/{{ lower $service.Name }}" {{- end }} - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters/services/resourcemanager" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/services/resourcemanager" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" tpg_provider "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/provider" @@ -41,20 +41,20 @@ import ( var provider *schema.Provider = tpg_provider.Provider() // ConverterMap is a collection of converters instances, indexed by cai asset type. -var ConverterMap = map[string]map[string]models.Converter{ +var ConverterMap = map[string]map[string]models.Cai2hclConverter{ // ####### START handwritten resources ########### resourcemanager.ProjectAssetType: { - "Default": resourcemanager.NewProjectConverter(provider), + "Default": resourcemanager.NewProjectCai2hclConverter(provider), }, compute.ComputeInstanceAssetType: { - "Default": compute.NewComputeInstanceConverter(provider), + "Default": compute.NewComputeInstanceCai2hclConverter(provider), }, // ####### END handwritten resources ########### {{- range $resourceType, $resources := $.ResourcesGroupedByApiResourceType }} {{ $resourceType }}AssetType: { {{- range $object := $resources }} - "{{ $object.AliasName }}": {{ $object.ServiceName }}.New{{ $object.ResourceName -}}Converter(provider), + "{{ $object.AliasName }}": {{ $object.ServiceName }}.New{{ $object.ResourceName -}}Cai2hclConverter(provider), {{- end }} }, {{- end }} diff --git a/mmv1/templates/tgc_next/provider/provider_mmv1_resources.go.tmpl b/mmv1/templates/tgc_next/provider/provider_mmv1_resources.go.tmpl index 967e3d3f7adf..4aeecef0a663 100644 --- a/mmv1/templates/tgc_next/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/templates/tgc_next/provider/provider_mmv1_resources.go.tmpl @@ -3,8 +3,8 @@ package provider import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/converters/services/compute" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/converters/services/resourcemanager" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/services/compute" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/services/resourcemanager" ) var handwrittenTfplan2caiResources = map[string]*schema.Resource{ diff --git a/mmv1/templates/tgc_next/services/resource.go.tmpl b/mmv1/templates/tgc_next/services/resource.go.tmpl new file mode 100644 index 000000000000..f43c29904a24 --- /dev/null +++ b/mmv1/templates/tgc_next/services/resource.go.tmpl @@ -0,0 +1,75 @@ +{{/* The license inside this block applies to this file + Copyright 2025 Google LLC. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. */ -}} +{{$.CodeHeader TemplatePath}} + +package {{ lower $.ProductMetadata.Name }} + +import ( + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" + + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tgcresource" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" + transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/verify" +) + +{{- $caiProductBaseUrl := $.CaiProductBaseUrl }} +{{- $productBackendName := $.CaiProductBackendName $caiProductBaseUrl }} + +{{- if not $.ApiResourceTypeKind }} +const {{ $.ApiResourceType -}}AssetType string = "{{ $productBackendName }}.googleapis.com/{{ $.Name -}}" +{{- end }} + +const {{ $.ResourceName -}}SchemaName string = "{{ $.TerraformName }}" + +{{if $.CustomCode.Constants -}} + {{- $.CustomTemplate $.CustomCode.Constants true -}} +{{- end}} + +func Resource{{ $.ResourceName -}}() *schema.Resource { + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + {{- range $prop := $.OrderProperties $.AllUserProperties }} +{{template "SchemaFields" $prop -}} + {{- end }} + {{- range $prop := $.VirtualFields }} +{{template "SchemaFields" $prop -}} + {{- end }} +{{- if $.CustomCode.ExtraSchemaEntry }} + {{ $.CustomTemplate $.CustomCode.ExtraSchemaEntry false -}} +{{- end}} +{{ if $.HasProject -}} + "project": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + }, +{{- end}} +{{- if $.HasSelfLink }} + "self_link": { + Type: schema.TypeString, + Computed: true, + }, +{{- end}} + }, + UseJSONNumber: true, + } +} + +{{- range $prop := $.AllUserProperties }} +{{template "SchemaSubResource" $prop}} +{{- end}} \ No newline at end of file diff --git a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl index 21ccc46db20f..f94588e6a36b 100644 --- a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl +++ b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl @@ -46,51 +46,8 @@ import ( {{- $productBackendName := $.CaiProductBackendName $caiProductBaseUrl }} {{- $apiVersion := $.CaiApiVersion $productBackendName $caiProductBaseUrl}} -{{if $.CustomCode.Constants -}} - {{- $.CustomTemplate $.CustomCode.Constants true -}} -{{- end}} - -{{- if not $.ApiResourceTypeKind }} -const {{ $.ApiResourceType -}}AssetType string = "{{ $productBackendName }}.googleapis.com/{{ $.Name -}}" -{{- end }} - -func Resource{{ $.ResourceName -}}() *schema.Resource { - return &schema.Resource{ - Schema: map[string]*schema.Schema{ - {{- range $prop := $.OrderProperties $.AllUserProperties }} -{{template "SchemaFields" $prop -}} - {{- end }} - {{- range $prop := $.VirtualFields }} -{{template "SchemaFields" $prop -}} - {{- end }} -{{- if $.CustomCode.ExtraSchemaEntry }} - {{ $.CustomTemplate $.CustomCode.ExtraSchemaEntry false -}} -{{- end}} -{{ if $.HasProject -}} - "project": { - Type: schema.TypeString, - Optional: true, - Computed: true, - ForceNew: true, - }, -{{- end}} -{{- if $.HasSelfLink }} - "self_link": { - Type: schema.TypeString, - Computed: true, - }, -{{- end}} - }, - UseJSONNumber: true, - } -} - -{{- range $prop := $.AllUserProperties }} -{{template "SchemaSubResource" $prop}} -{{- end}} - -func ResourceConverter{{ $.ResourceName -}}() cai.ResourceConverter { - return cai.ResourceConverter{ +func {{ $.ResourceName -}}Tfplan2caiConverter() cai.Tfplan2caiConverter { + return cai.Tfplan2caiConverter{ Convert: Get{{ $.ResourceName -}}CaiAssets, } } diff --git a/mmv1/templates/tgc_next/tfplan2cai/resource_converters.go.tmpl b/mmv1/templates/tgc_next/tfplan2cai/resource_converters.go.tmpl index 3245a97681f8..9778eeda65d9 100644 --- a/mmv1/templates/tgc_next/tfplan2cai/resource_converters.go.tmpl +++ b/mmv1/templates/tgc_next/tfplan2cai/resource_converters.go.tmpl @@ -30,20 +30,20 @@ package converters import ( "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/converters/cai" {{- range $service := $.Products }} - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/converters/services/{{ lower $service.Name }}" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/services/{{ lower $service.Name }}" {{- end }} - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/converters/services/resourcemanager" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/services/resourcemanager" ) -var ConverterMap = map[string]cai.ResourceConverter{ +var ConverterMap = map[string]cai.Tfplan2caiConverter{ // ####### START handwritten resources ########### - "google_project": resourcemanager.ResourceConverterProject(), - "google_compute_instance": compute.ResourceConverterComputeInstance(), + "google_project": resourcemanager.ProjectTfplan2caiConverter(), + "google_compute_instance": compute.ComputeInstanceTfplan2caiConverter(), // ####### END handwritten resources ########### {{- range $object := $.ResourcesForVersion }} {{- if $object.ResourceName }} - "{{ $object.TerraformName }}": {{ $object.ServiceName }}.ResourceConverter{{ $object.ResourceName -}}(), + "{{ $object.TerraformName }}": {{ $object.ServiceName }}.{{ $object.ResourceName -}}Tfplan2caiConverter(), {{- end }} {{- end }} } \ No newline at end of file diff --git a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/convert_resource.go b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/convert_resource.go index 7e18280a6d5a..daabb215054f 100644 --- a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/convert_resource.go +++ b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/convert_resource.go @@ -13,7 +13,7 @@ func ConvertResource(asset caiasset.Asset) ([]*models.TerraformResourceBlock, er return nil, nil } - var converter models.Converter + var converter models.Cai2hclConverter // Normally, one asset type has only one converter. if len(converters) == 1 { for _, converter = range converters { diff --git a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance_helpers_tgc.go b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance_helpers_tgc.go deleted file mode 100644 index 1e66d37cd40b..000000000000 --- a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance_helpers_tgc.go +++ /dev/null @@ -1,194 +0,0 @@ -package compute - -import ( - "strings" - - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters/utils" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" - - compute "google.golang.org/api/compute/v0.beta" -) - -func flattenAliasIpRangeTgc(ranges []*compute.AliasIpRange) []map[string]interface{} { - rangesSchema := make([]map[string]interface{}, 0, len(ranges)) - for _, ipRange := range ranges { - rangesSchema = append(rangesSchema, map[string]interface{}{ - "ip_cidr_range": ipRange.IpCidrRange, - "subnetwork_range_name": ipRange.SubnetworkRangeName, - }) - } - return rangesSchema -} - -func flattenSchedulingTgc(resp *compute.Scheduling) []map[string]interface{} { - schedulingMap := make(map[string]interface{}, 0) - - // gracefulShutdown is not in the cai asset, so graceful_shutdown is skipped. - - if resp.InstanceTerminationAction != "" { - schedulingMap["instance_termination_action"] = resp.InstanceTerminationAction - } - - if resp.MinNodeCpus != 0 { - schedulingMap["min_node_cpus"] = resp.MinNodeCpus - } - - schedulingMap["on_host_maintenance"] = resp.OnHostMaintenance - - if resp.AutomaticRestart != nil && !*resp.AutomaticRestart { - schedulingMap["automatic_restart"] = *resp.AutomaticRestart - } - - if resp.Preemptible { - schedulingMap["preemptible"] = resp.Preemptible - } - - if resp.NodeAffinities != nil && len(resp.NodeAffinities) > 0 { - nodeAffinities := []map[string]interface{}{} - for _, na := range resp.NodeAffinities { - nodeAffinities = append(nodeAffinities, map[string]interface{}{ - "key": na.Key, - "operator": na.Operator, - "values": tpgresource.ConvertStringArrToInterface(na.Values), - }) - } - schedulingMap["node_affinities"] = nodeAffinities - } - - schedulingMap["provisioning_model"] = resp.ProvisioningModel - - if resp.AvailabilityDomain != 0 { - schedulingMap["availability_domain"] = resp.AvailabilityDomain - } - - if resp.MaxRunDuration != nil { - schedulingMap["max_run_duration"] = flattenComputeMaxRunDuration(resp.MaxRunDuration) - } - - if resp.OnInstanceStopAction != nil { - schedulingMap["on_instance_stop_action"] = flattenOnInstanceStopAction(resp.OnInstanceStopAction) - } - - if resp.HostErrorTimeoutSeconds != 0 { - schedulingMap["host_error_timeout_seconds"] = resp.HostErrorTimeoutSeconds - } - - if resp.MaintenanceInterval != "" { - schedulingMap["maintenance_interval"] = resp.MaintenanceInterval - } - - if resp.LocalSsdRecoveryTimeout != nil { - schedulingMap["local_ssd_recovery_timeout"] = flattenComputeLocalSsdRecoveryTimeout(resp.LocalSsdRecoveryTimeout) - } - - if len(schedulingMap) == 0 { - return nil - } - - return []map[string]interface{}{schedulingMap} -} - -func flattenNetworkInterfacesTgc(networkInterfaces []*compute.NetworkInterface, project string) ([]map[string]interface{}, string, string, error) { - flattened := make([]map[string]interface{}, len(networkInterfaces)) - var internalIP, externalIP string - - for i, iface := range networkInterfaces { - var ac []map[string]interface{} - ac, externalIP = flattenAccessConfigs(iface.AccessConfigs) - - flattened[i] = map[string]interface{}{ - "network_ip": iface.NetworkIP, - "access_config": ac, - "alias_ip_range": flattenAliasIpRangeTgc(iface.AliasIpRanges), - "nic_type": iface.NicType, - "stack_type": iface.StackType, - "ipv6_access_config": flattenIpv6AccessConfigs(iface.Ipv6AccessConfigs), - "ipv6_address": iface.Ipv6Address, - "network": tpgresource.ConvertSelfLinkToV1(iface.Network), - "subnetwork": tpgresource.ConvertSelfLinkToV1(iface.Subnetwork), - "internal_ipv6_prefix_length": iface.InternalIpv6PrefixLength, - } - - subnetProject := utils.ParseFieldValue(iface.Subnetwork, "projects") - if subnetProject != project { - flattened[i]["subnetwork_project"] = subnetProject - } - - // The field name is computed, no it is not converted. - - if iface.StackType != "IPV4_ONLY" { - flattened[i]["stack_type"] = iface.StackType - } - - if iface.QueueCount != 0 { - flattened[i]["queue_count"] = iface.QueueCount - } - - if internalIP == "" { - internalIP = iface.NetworkIP - } - - if iface.NetworkAttachment != "" { - networkAttachment, err := tpgresource.GetRelativePath(iface.NetworkAttachment) - if err != nil { - return nil, "", "", err - } - flattened[i]["network_attachment"] = networkAttachment - } - - // the security_policy for a network_interface is found in one of its accessConfigs. - if len(iface.AccessConfigs) > 0 && iface.AccessConfigs[0].SecurityPolicy != "" { - flattened[i]["security_policy"] = iface.AccessConfigs[0].SecurityPolicy - } else if len(iface.Ipv6AccessConfigs) > 0 && iface.Ipv6AccessConfigs[0].SecurityPolicy != "" { - flattened[i]["security_policy"] = iface.Ipv6AccessConfigs[0].SecurityPolicy - } - } - return flattened, internalIP, externalIP, nil -} - -func flattenServiceAccountsTgc(serviceAccounts []*compute.ServiceAccount) []map[string]interface{} { - result := make([]map[string]interface{}, len(serviceAccounts)) - for i, serviceAccount := range serviceAccounts { - scopes := serviceAccount.Scopes - if len(scopes) == 0 { - scopes = []string{} - } - result[i] = map[string]interface{}{ - "email": serviceAccount.Email, - "scopes": scopes, - } - } - return result -} - -func flattenGuestAcceleratorsTgc(accelerators []*compute.AcceleratorConfig) []map[string]interface{} { - acceleratorsSchema := make([]map[string]interface{}, len(accelerators)) - for i, accelerator := range accelerators { - acceleratorsSchema[i] = map[string]interface{}{ - "count": accelerator.AcceleratorCount, - "type": tpgresource.GetResourceNameFromSelfLink(accelerator.AcceleratorType), - } - } - return acceleratorsSchema -} - -func flattenReservationAffinityTgc(affinity *compute.ReservationAffinity) []map[string]interface{} { - if affinity == nil { - return nil - } - - // The values of ConsumeReservationType in cai assets are NO_ALLOCATION, SPECIFIC_ALLOCATION, ANY_ALLOCATION - crt := strings.ReplaceAll(affinity.ConsumeReservationType, "_ALLOCATION", "_RESERVATION") - flattened := map[string]interface{}{ - "type": crt, - } - - if crt == "SPECIFIC_RESERVATION" { - flattened["specific_reservation"] = []map[string]interface{}{{ - "key": affinity.Key, - "values": affinity.Values, - }} - } - - return []map[string]interface{}{flattened} -} diff --git a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils.go b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils.go index 8c84fe583098..2226456fd24e 100644 --- a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils.go +++ b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils.go @@ -5,7 +5,6 @@ import ( "fmt" "strings" - transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" hashicorpcty "github.com/hashicorp/go-cty/cty" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/zclconf/go-cty/cty" @@ -23,25 +22,6 @@ func ParseFieldValue(url string, name string) string { return "" } -// Remove the Terraform attribution label "goog-terraform-provisioned" from labels -func RemoveTerraformAttributionLabel(raw interface{}) interface{} { - if raw == nil { - return nil - } - - if labels, ok := raw.(map[string]string); ok { - delete(labels, "goog-terraform-provisioned") - return labels - } - - if labels, ok := raw.(map[string]interface{}); ok { - delete(labels, "goog-terraform-provisioned") - return labels - } - - return nil -} - // DecodeJSON decodes the map object into the target struct. func DecodeJSON(data map[string]interface{}, v interface{}) error { b, err := json.Marshal(data) @@ -90,10 +70,6 @@ func hashicorpCtyTypeToZclconfCtyType(t hashicorpcty.Type) (cty.Type, error) { return ret, nil } -func NewConfig() *transport_tpg.Config { - return &transport_tpg.Config{} -} - // normalizeFlattenedObj traverses the output map recursively, removes fields which are // not a part of TF schema and converts unmarshallable "schema.Set" objects to arrays. func normalizeFlattenedObj(obj interface{}, schemaPerProp map[string]*schema.Schema) interface{} { diff --git a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils_test.go b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils_test.go index d83dfcce47e3..bf0d6c5c80c5 100644 --- a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils_test.go +++ b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils_test.go @@ -1,10 +1,11 @@ -package utils +package utils_test import ( "testing" "github.com/stretchr/testify/assert" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters/utils" tpg_provider "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/provider" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" @@ -18,7 +19,7 @@ func TestSubsetOfFieldsMapsToCtyValue(t *testing.T) { "name": "forwarding-rule-1", } - val, err := MapToCtyValWithSchema(outputMap, schema) + val, err := utils.MapToCtyValWithSchema(outputMap, schema) assert.Nil(t, err) assert.Equal(t, "forwarding-rule-1", val.GetAttr("name").AsString()) @@ -31,7 +32,7 @@ func TestWrongFieldTypeBreaksConversion(t *testing.T) { "description": []string{"unknownValue"}, // string is required, not array. } - val, err := MapToCtyValWithSchema(outputMap, resourceSchema) + val, err := utils.MapToCtyValWithSchema(outputMap, resourceSchema) assert.True(t, val.IsNull()) assert.Contains(t, err.Error(), "string is required") @@ -44,7 +45,7 @@ func TestNilValue(t *testing.T) { "description": nil, } - val, err := MapToCtyValWithSchema(outputMap, resourceSchema) + val, err := utils.MapToCtyValWithSchema(outputMap, resourceSchema) assert.Nil(t, err) assert.Equal(t, cty.Value(cty.StringVal("fr-1")), val.GetAttr("name")) @@ -57,7 +58,7 @@ func TestNilValueInRequiredField(t *testing.T) { "name": nil, } - val, err := MapToCtyValWithSchema(outputMap, resourceSchema) + val, err := utils.MapToCtyValWithSchema(outputMap, resourceSchema) // In future we may want to fail in this case. assert.Nil(t, err) @@ -71,7 +72,7 @@ func TestFieldsWithTypeSlice(t *testing.T) { "resource_policies": []string{"test"}, } - val, err := MapToCtyValWithSchema(outputMap, resourceSchema) + val, err := utils.MapToCtyValWithSchema(outputMap, resourceSchema) assert.Nil(t, err) @@ -85,7 +86,7 @@ func TestMissingFieldDoesNotBreakConversionConversion(t *testing.T) { "unknownField": "unknownValue", } - val, err := MapToCtyValWithSchema(outputMap, resourceSchema) + val, err := utils.MapToCtyValWithSchema(outputMap, resourceSchema) assert.Nil(t, err) @@ -100,7 +101,7 @@ func TestFieldWithTypeSchemaSet(t *testing.T) { "resource_policies": schema.NewSet(schema.HashString, tpgresource.ConvertStringArrToInterface([]string{"test"})), } - val, err := MapToCtyValWithSchema(outputMap, resourceSchema) + val, err := utils.MapToCtyValWithSchema(outputMap, resourceSchema) assert.Nil(t, err) assert.Equal(t, []cty.Value{cty.StringVal("test")}, val.GetAttr("resource_policies").AsValueSlice()) @@ -128,7 +129,7 @@ func TestFieldWithTypeSchemaListAndNestedObject(t *testing.T) { }, } - val, err := MapToCtyValWithSchema(flattenedMap, resourceSchema) + val, err := utils.MapToCtyValWithSchema(flattenedMap, resourceSchema) assert.Nil(t, err) assert.Equal(t, @@ -167,7 +168,7 @@ func TestFieldWithTypeSchemaSetAndNestedObject(t *testing.T) { }), } - val, err := MapToCtyValWithSchema(flattenedMap, resourceSchema) + val, err := utils.MapToCtyValWithSchema(flattenedMap, resourceSchema) assert.Nil(t, err) assert.Equal(t, diff --git a/mmv1/third_party/tgc_next/pkg/cai2hcl/models/converter.go b/mmv1/third_party/tgc_next/pkg/cai2hcl/models/converter.go index 5b6555834009..0951b17299df 100644 --- a/mmv1/third_party/tgc_next/pkg/cai2hcl/models/converter.go +++ b/mmv1/third_party/tgc_next/pkg/cai2hcl/models/converter.go @@ -5,7 +5,7 @@ import ( ) // Converter interface for resources. -type Converter interface { +type Cai2hclConverter interface { // Convert turns asset into hcl blocks. Convert(asset caiasset.Asset) ([]*TerraformResourceBlock, error) } diff --git a/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/compute/compute_instance.go b/mmv1/third_party/tgc_next/pkg/services/compute/compute_instance.go similarity index 72% rename from mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/compute/compute_instance.go rename to mmv1/third_party/tgc_next/pkg/services/compute/compute_instance.go index 3eea13261684..0de28812aadd 100644 --- a/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/compute/compute_instance.go +++ b/mmv1/third_party/tgc_next/pkg/services/compute/compute_instance.go @@ -1,26 +1,23 @@ package compute import ( - "errors" - "fmt" "strings" - compute "google.golang.org/api/compute/v0.beta" - "google.golang.org/api/googleapi" - + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters/utils" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/verify" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/converters/cai" - - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" - transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/verify" + compute "google.golang.org/api/compute/v0.beta" ) +// ComputeInstanceAssetType is the CAI asset type name for compute instance. const ComputeInstanceAssetType string = "compute.googleapis.com/Instance" +// ComputeInstanceSchemaName is the TF resource schema name for compute instance. +const ComputeInstanceSchemaName string = "google_compute_instance" + var ( advancedMachineFeaturesKeys = []string{ "advanced_machine_features.0.enable_nested_virtualization", @@ -1430,638 +1427,186 @@ be from 0 to 999,999,999 inclusive.`, } } -func ResourceConverterComputeInstance() cai.ResourceConverter { - return cai.ResourceConverter{ - Convert: GetComputeInstanceAndDisksCaiObjects, +func flattenAliasIpRangeTgc(ranges []*compute.AliasIpRange) []map[string]interface{} { + rangesSchema := make([]map[string]interface{}, 0, len(ranges)) + for _, ipRange := range ranges { + rangesSchema = append(rangesSchema, map[string]interface{}{ + "ip_cidr_range": ipRange.IpCidrRange, + "subnetwork_range_name": ipRange.SubnetworkRangeName, + }) } + return rangesSchema } -func GetComputeInstanceAndDisksCaiObjects(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]caiasset.Asset, error) { - if instanceAsset, err := GetComputeInstanceCaiObject(d, config); err == nil { - assets := []caiasset.Asset{instanceAsset} - if diskAsset, err := GetComputeInstanceDiskCaiObject(d, config); err == nil { - assets = append(assets, diskAsset) - return assets, nil - } else { - return []caiasset.Asset{}, err - } - } else { - return []caiasset.Asset{}, err - } -} +func flattenSchedulingTgc(resp *compute.Scheduling) []map[string]interface{} { + schedulingMap := make(map[string]interface{}, 0) -func GetComputeInstanceCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (caiasset.Asset, error) { - name, err := cai.AssetName(d, config, "//compute.googleapis.com/projects/{{project}}/zones/{{zone}}/instances/{{name}}") - if err != nil { - return caiasset.Asset{}, err - } - if data, err := GetComputeInstanceData(d, config); err == nil { - location, _ := tpgresource.GetLocation(d, config) - return caiasset.Asset{ - Name: name, - Type: ComputeInstanceAssetType, - Resource: &caiasset.AssetResource{ - Version: "v1", - DiscoveryDocumentURI: "https://www.googleapis.com/discovery/v1/apis/compute/v1/rest", - DiscoveryName: "Instance", - Data: data, - Location: location, - }, - }, nil - } else { - return caiasset.Asset{}, err - } -} + // gracefulShutdown is not in the cai asset, so graceful_shutdown is skipped. -func GetComputeInstanceData(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (map[string]interface{}, error) { - project, err := tpgresource.GetProject(d, config) - if err != nil { - return nil, err + if resp.InstanceTerminationAction != "" { + schedulingMap["instance_termination_action"] = resp.InstanceTerminationAction } - instance, err := expandComputeInstance(project, d, config) - if err != nil { - return nil, err + if resp.MinNodeCpus != 0 { + schedulingMap["min_node_cpus"] = resp.MinNodeCpus } - return cai.JsonMap(instance) -} - -func expandComputeInstance(project string, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (*compute.Instance, error) { - // Get the machine type - var machineTypeUrl string - if mt, ok := d.GetOk("machine_type"); ok { - machineType, err := tpgresource.ParseMachineTypesFieldValue(mt.(string), d, config) - if err != nil { - return nil, fmt.Errorf( - "Error loading machine type: %s", - err) - } - machineTypeUrl = machineType.RelativeLink() - } + schedulingMap["on_host_maintenance"] = resp.OnHostMaintenance - // Build up the list of disks - disks := []*compute.AttachedDisk{} - if _, hasBootDisk := d.GetOk("boot_disk"); hasBootDisk { - bootDisk, err := expandBootDisk(d, config, project) - if err != nil { - return nil, err - } - disks = append(disks, bootDisk) + if resp.AutomaticRestart != nil && !*resp.AutomaticRestart { + schedulingMap["automatic_restart"] = *resp.AutomaticRestart } - if _, hasScratchDisk := d.GetOk("scratch_disk"); hasScratchDisk { - scratchDisks, err := expandScratchDisks(d, config, project) - if err != nil { - return nil, err - } - disks = append(disks, scratchDisks...) + if resp.Preemptible { + schedulingMap["preemptible"] = resp.Preemptible } - attachedDisksCount := d.Get("attached_disk.#").(int) - - for i := 0; i < attachedDisksCount; i++ { - diskConfig := d.Get(fmt.Sprintf("attached_disk.%d", i)).(map[string]interface{}) - disk, err := expandAttachedDisk(diskConfig, d, config) - if err != nil { - return nil, err + if resp.NodeAffinities != nil && len(resp.NodeAffinities) > 0 { + nodeAffinities := []map[string]interface{}{} + for _, na := range resp.NodeAffinities { + nodeAffinities = append(nodeAffinities, map[string]interface{}{ + "key": na.Key, + "operator": na.Operator, + "values": tpgresource.ConvertStringArrToInterface(na.Values), + }) } - - disks = append(disks, disk) + schedulingMap["node_affinities"] = nodeAffinities } - scheduling, err := expandSchedulingTgc(d.Get("scheduling")) - if err != nil { - return nil, fmt.Errorf("error creating scheduling: %s", err) - } + schedulingMap["provisioning_model"] = resp.ProvisioningModel - params, err := expandParams(d) - if err != nil { - return nil, fmt.Errorf("Error creating params: %s", err) + if resp.AvailabilityDomain != 0 { + schedulingMap["availability_domain"] = resp.AvailabilityDomain } - metadata, err := resourceInstanceMetadata(d) - if err != nil { - return nil, fmt.Errorf("Error creating metadata: %s", err) + if resp.MaxRunDuration != nil { + schedulingMap["max_run_duration"] = flattenComputeMaxRunDuration(resp.MaxRunDuration) } - partnerMetadata, err := resourceInstancePartnerMetadata(d) - if err != nil { - return nil, fmt.Errorf("Error creating partner metadata: %s", err) + if resp.OnInstanceStopAction != nil { + schedulingMap["on_instance_stop_action"] = flattenOnInstanceStopAction(resp.OnInstanceStopAction) } - networkInterfaces, err := expandNetworkInterfacesTgc(d, config) - if err != nil { - return nil, fmt.Errorf("Error creating network interfaces: %s", err) + if resp.HostErrorTimeoutSeconds != 0 { + schedulingMap["host_error_timeout_seconds"] = resp.HostErrorTimeoutSeconds } - networkPerformanceConfig, err := expandNetworkPerformanceConfig(d, config) - if err != nil { - return nil, fmt.Errorf("Error creating network performance config: %s", err) + if resp.MaintenanceInterval != "" { + schedulingMap["maintenance_interval"] = resp.MaintenanceInterval } - accels, err := expandInstanceGuestAccelerators(d, config) - if err != nil { - return nil, fmt.Errorf("Error creating guest accelerators: %s", err) + if resp.LocalSsdRecoveryTimeout != nil { + schedulingMap["local_ssd_recovery_timeout"] = flattenComputeLocalSsdRecoveryTimeout(resp.LocalSsdRecoveryTimeout) } - reservationAffinity, err := expandReservationAffinity(d) - if err != nil { - return nil, fmt.Errorf("Error creating reservation affinity: %s", err) + if len(schedulingMap) == 0 { + return nil } - // Create the instance information - return &compute.Instance{ - CanIpForward: d.Get("can_ip_forward").(bool), - Description: d.Get("description").(string), - Disks: disks, - MachineType: machineTypeUrl, - Metadata: metadata, - PartnerMetadata: partnerMetadata, - Name: d.Get("name").(string), - Zone: d.Get("zone").(string), - NetworkInterfaces: networkInterfaces, - NetworkPerformanceConfig: networkPerformanceConfig, - Tags: resourceInstanceTags(d), - Params: params, - Labels: tpgresource.ExpandLabels(d), - ServiceAccounts: expandServiceAccounts(d.Get("service_account").([]interface{})), - GuestAccelerators: accels, - MinCpuPlatform: d.Get("min_cpu_platform").(string), - Scheduling: scheduling, - DeletionProtection: d.Get("deletion_protection").(bool), - Hostname: d.Get("hostname").(string), - ConfidentialInstanceConfig: expandConfidentialInstanceConfig(d), - AdvancedMachineFeatures: expandAdvancedMachineFeatures(d), - ShieldedInstanceConfig: expandShieldedVmConfigs(d), - DisplayDevice: expandDisplayDevice(d), - ResourcePolicies: tpgresource.ConvertStringArr(d.Get("resource_policies").([]interface{})), - ReservationAffinity: reservationAffinity, - KeyRevocationActionType: d.Get("key_revocation_action_type").(string), - InstanceEncryptionKey: expandComputeInstanceEncryptionKey(d), - }, nil + return []map[string]interface{}{schedulingMap} } -func expandAttachedDisk(diskConfig map[string]interface{}, d tpgresource.TerraformResourceData, meta interface{}) (*compute.AttachedDisk, error) { - config := meta.(*transport_tpg.Config) - - s := diskConfig["source"].(string) - var sourceLink string - if strings.Contains(s, "regions/") { - source, err := tpgresource.ParseRegionDiskFieldValue(s, d, config) - if err != nil { - return nil, err - } - sourceLink = source.RelativeLink() - } else { - source, err := tpgresource.ParseDiskFieldValue(s, d, config) - if err != nil { - return nil, err - } - sourceLink = source.RelativeLink() - } - - disk := &compute.AttachedDisk{ - Source: fmt.Sprintf("https://www.googleapis.com/compute/v1/%s", sourceLink), - } - - if v, ok := diskConfig["mode"]; ok { - disk.Mode = v.(string) - } - - if v, ok := diskConfig["device_name"]; ok { - disk.DeviceName = v.(string) - } - - keyValue, keyOk := diskConfig["disk_encryption_key_raw"] - if keyOk { - if keyValue != "" { - disk.DiskEncryptionKey = &compute.CustomerEncryptionKey{ - RawKey: keyValue.(string), - } - } - } - - keyValue, keyOk = diskConfig["disk_encryption_key_rsa"] - if keyOk { - if keyValue != "" { - disk.DiskEncryptionKey = &compute.CustomerEncryptionKey{ - RsaEncryptedKey: keyValue.(string), - } - } - } - - kmsValue, kmsOk := diskConfig["kms_key_self_link"] - if kmsOk { - if keyOk && keyValue != "" && kmsValue != "" { - return nil, errors.New("Only one of kms_key_self_link and disk_encryption_key_raw can be set") - } - if kmsValue != "" { - disk.DiskEncryptionKey = &compute.CustomerEncryptionKey{ - KmsKeyName: kmsValue.(string), - } - } - } - - kmsServiceAccount, kmsServiceAccountOk := diskConfig["disk_encryption_service_account"] - if kmsServiceAccountOk { - if kmsServiceAccount != "" { - if disk.DiskEncryptionKey == nil { - disk.DiskEncryptionKey = &compute.CustomerEncryptionKey{ - KmsKeyServiceAccount: kmsServiceAccount.(string), - } - } - disk.DiskEncryptionKey.KmsKeyServiceAccount = kmsServiceAccount.(string) +func flattenNetworkInterfacesTgc(networkInterfaces []*compute.NetworkInterface, project string) ([]map[string]interface{}, string, string, error) { + flattened := make([]map[string]interface{}, len(networkInterfaces)) + var internalIP, externalIP string + + for i, iface := range networkInterfaces { + var ac []map[string]interface{} + ac, externalIP = flattenAccessConfigs(iface.AccessConfigs) + + flattened[i] = map[string]interface{}{ + "network_ip": iface.NetworkIP, + "access_config": ac, + "alias_ip_range": flattenAliasIpRangeTgc(iface.AliasIpRanges), + "nic_type": iface.NicType, + "stack_type": iface.StackType, + "ipv6_access_config": flattenIpv6AccessConfigs(iface.Ipv6AccessConfigs), + "ipv6_address": iface.Ipv6Address, + "network": tpgresource.ConvertSelfLinkToV1(iface.Network), + "subnetwork": tpgresource.ConvertSelfLinkToV1(iface.Subnetwork), + "internal_ipv6_prefix_length": iface.InternalIpv6PrefixLength, } - } - return disk, nil -} -// See comment on expandInstanceTemplateGuestAccelerators regarding why this -// code is duplicated. -func expandInstanceGuestAccelerators(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]*compute.AcceleratorConfig, error) { - configs, ok := d.GetOk("guest_accelerator") - if !ok { - return nil, nil - } - accels := configs.([]interface{}) - guestAccelerators := make([]*compute.AcceleratorConfig, 0, len(accels)) - for _, raw := range accels { - data := raw.(map[string]interface{}) - if data["count"].(int) == 0 { - continue - } - at, err := tpgresource.ParseAcceleratorFieldValue(data["type"].(string), d, config) - if err != nil { - return nil, fmt.Errorf("cannot parse accelerator type: %v", err) + subnetProject := utils.ParseFieldValue(iface.Subnetwork, "projects") + if subnetProject != project { + flattened[i]["subnetwork_project"] = subnetProject } - guestAccelerators = append(guestAccelerators, &compute.AcceleratorConfig{ - AcceleratorCount: int64(data["count"].(int)), - AcceleratorType: at.RelativeLink(), - }) - } - return guestAccelerators, nil -} + // The field name is computed, no it is not converted. -func expandParams(d tpgresource.TerraformResourceData) (*compute.InstanceParams, error) { - if _, ok := d.GetOk("params.0.resource_manager_tags"); ok { - params := &compute.InstanceParams{ - ResourceManagerTags: tpgresource.ExpandStringMap(d, "params.0.resource_manager_tags"), + if iface.StackType != "IPV4_ONLY" { + flattened[i]["stack_type"] = iface.StackType } - return params, nil - } - - return nil, nil -} - -func expandBootDisk(d tpgresource.TerraformResourceData, config *transport_tpg.Config, project string) (*compute.AttachedDisk, error) { - disk := &compute.AttachedDisk{ - AutoDelete: d.Get("boot_disk.0.auto_delete").(bool), - Boot: true, - } - - if v, ok := d.GetOk("boot_disk.0.device_name"); ok { - disk.DeviceName = v.(string) - } - if v, ok := d.GetOk("boot_disk.0.interface"); ok { - disk.Interface = v.(string) - } - - if v, ok := d.GetOk("boot_disk.0.guest_os_features"); ok { - disk.GuestOsFeatures = expandComputeInstanceGuestOsFeatures(v) - } - - if v, ok := d.GetOk("boot_disk.0.disk_encryption_key_raw"); ok { - if v != "" { - disk.DiskEncryptionKey = &compute.CustomerEncryptionKey{ - RawKey: v.(string), - } + if iface.QueueCount != 0 { + flattened[i]["queue_count"] = iface.QueueCount } - } - if v, ok := d.GetOk("boot_disk.0.disk_encryption_key_rsa"); ok { - if v != "" { - disk.DiskEncryptionKey = &compute.CustomerEncryptionKey{ - RsaEncryptedKey: v.(string), - } + if internalIP == "" { + internalIP = iface.NetworkIP } - } - if v, ok := d.GetOk("boot_disk.0.kms_key_self_link"); ok { - if v != "" { - disk.DiskEncryptionKey = &compute.CustomerEncryptionKey{ - KmsKeyName: v.(string), + if iface.NetworkAttachment != "" { + networkAttachment, err := tpgresource.GetRelativePath(iface.NetworkAttachment) + if err != nil { + return nil, "", "", err } + flattened[i]["network_attachment"] = networkAttachment } - } - - if v, ok := d.GetOk("boot_disk.0.disk_encryption_service_account"); ok { - if v != "" { - disk.DiskEncryptionKey.KmsKeyServiceAccount = v.(string) - } - } - - // disk_encryption_key_sha256 is computed, so it is not converted. - if v, ok := d.GetOk("boot_disk.0.source"); ok { - var err error - var source interface { - RelativeLink() string - } - if strings.Contains(v.(string), "regions/") { - source, err = tpgresource.ParseRegionDiskFieldValue(v.(string), d, config) - } else { - source, err = tpgresource.ParseDiskFieldValue(v.(string), d, config) - } - if err != nil { - return nil, err + // the security_policy for a network_interface is found in one of its accessConfigs. + if len(iface.AccessConfigs) > 0 && iface.AccessConfigs[0].SecurityPolicy != "" { + flattened[i]["security_policy"] = iface.AccessConfigs[0].SecurityPolicy + } else if len(iface.Ipv6AccessConfigs) > 0 && iface.Ipv6AccessConfigs[0].SecurityPolicy != "" { + flattened[i]["security_policy"] = iface.Ipv6AccessConfigs[0].SecurityPolicy } - disk.Source = fmt.Sprintf("https://www.googleapis.com/compute/v1/%s", source.RelativeLink()) - } - - if _, ok := d.GetOk("boot_disk.0.initialize_params"); ok { - if v, ok := d.GetOk("boot_disk.0.initialize_params.0.size"); ok { - disk.DiskSizeGb = int64(v.(int)) - } - } - - if v, ok := d.GetOk("boot_disk.0.initialize_params.0.architecture"); ok { - disk.Architecture = v.(string) - } - - if v, ok := d.GetOk("boot_disk.0.mode"); ok { - disk.Mode = v.(string) - } - - return disk, nil -} - -func expandScratchDisks(d tpgresource.TerraformResourceData, config *transport_tpg.Config, project string) ([]*compute.AttachedDisk, error) { - diskType, err := readDiskType(config, d, "local-ssd") - if err != nil { - return nil, fmt.Errorf("Error loading disk type 'local-ssd': %s", err) - } - - n := d.Get("scratch_disk.#").(int) - scratchDisks := make([]*compute.AttachedDisk, 0, n) - for i := 0; i < n; i++ { - scratchDisks = append(scratchDisks, &compute.AttachedDisk{ - AutoDelete: true, - Type: "SCRATCH", - Interface: d.Get(fmt.Sprintf("scratch_disk.%d.interface", i)).(string), - DeviceName: d.Get(fmt.Sprintf("scratch_disk.%d.device_name", i)).(string), - DiskSizeGb: int64(d.Get(fmt.Sprintf("scratch_disk.%d.size", i)).(int)), - InitializeParams: &compute.AttachedDiskInitializeParams{ - DiskType: diskType.RelativeLink(), - }, - }) - } - - return scratchDisks, nil -} - -func expandStoragePool(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - // ExpandStoragePoolUrl is generated by MMv1 - // return ExpandStoragePoolUrl(v, d, config) - return nil, nil -} - -func GetComputeInstanceDiskCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (caiasset.Asset, error) { - name, err := cai.AssetName(d, config, "//compute.googleapis.com/projects/{{project}}/zones/{{zone}}/disks/{{name}}") - if err != nil { - return caiasset.Asset{}, err - } - if data, err := GetComputeDiskData(d, config); err == nil { - location, _ := tpgresource.GetLocation(d, config) - return caiasset.Asset{ - Name: name, - Type: ComputeDiskAssetType, - Resource: &caiasset.AssetResource{ - Version: "v1", - DiscoveryDocumentURI: "https://www.googleapis.com/discovery/v1/apis/compute/v1/rest", - DiscoveryName: "Disk", - Data: data, - Location: location, - }, - }, nil - } else { - return caiasset.Asset{}, err } + return flattened, internalIP, externalIP, nil } -func GetComputeDiskData(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (map[string]interface{}, error) { - project, err := tpgresource.GetProject(d, config) - if err != nil { - return nil, err - } - - diskApiObj, err := expandBootDisk(d, config, project) - if err != nil { - return nil, err - } - - diskDetails, err := cai.JsonMap(diskApiObj) - if err != nil { - return nil, err - } - - if v, ok := d.GetOk("boot_disk.0.initialize_params.0.type"); ok { - diskTypeName := v.(string) - diskType, err := readDiskType(config, d, diskTypeName) - if err != nil { - return nil, fmt.Errorf("Error loading disk type '%s': %s", diskTypeName, err) +func flattenServiceAccountsTgc(serviceAccounts []*compute.ServiceAccount) []map[string]interface{} { + result := make([]map[string]interface{}, len(serviceAccounts)) + for i, serviceAccount := range serviceAccounts { + scopes := serviceAccount.Scopes + if len(scopes) == 0 { + scopes = []string{} } - diskDetails["DiskType"] = diskType.RelativeLink() - } - - if v, ok := d.GetOk("boot_disk.0.initialize_params.0.image"); ok { - diskDetails["SourceImage"] = v.(string) - } - - if _, ok := d.GetOk("boot_disk.0.initialize_params.0.labels"); ok { - diskDetails["Labels"] = tpgresource.ExpandStringMap(d, "boot_disk.0.initialize_params.0.labels") - } - - if _, ok := d.GetOk("boot_disk.0.initialize_params.0.resource_policies"); ok { - diskDetails["ResourcePolicies"] = tpgresource.ConvertStringArr(d.Get("boot_disk.0.initialize_params.0.resource_policies").([]interface{})) - } - - if v, ok := d.GetOk("boot_disk.0.initialize_params.0.provisioned_iops"); ok { - diskDetails["ProvisionedIops"] = int64(v.(int)) - } - - if v, ok := d.GetOk("boot_disk.0.initialize_params.0.provisioned_throughput"); ok { - diskDetails["ProvisionedThroughput"] = int64(v.(int)) - } - - if v, ok := d.GetOk("boot_disk.0.initialize_params.0.enable_confidential_compute"); ok { - diskDetails["EnableConfidentialCompute"] = v.(bool) - } - - if v, ok := d.GetOk("boot_disk.0.initialize_params.0.storage_pool"); ok { - storagePoolUrl, err := expandStoragePool(v, d, config) - if err != nil { - return nil, fmt.Errorf("Error resolving storage pool name '%s': '%s'", v.(string), err) + result[i] = map[string]interface{}{ + "email": serviceAccount.Email, + "scopes": scopes, } - diskDetails["StoragePool"] = storagePoolUrl.(string) } - - return diskDetails, nil + return result } -func expandNetworkInterfacesTgc(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]*compute.NetworkInterface, error) { - configs := d.Get("network_interface").([]interface{}) - ifaces := make([]*compute.NetworkInterface, len(configs)) - for i, raw := range configs { - data := raw.(map[string]interface{}) - - var networkAttachment = "" - network := data["network"].(string) - subnetwork := data["subnetwork"].(string) - if networkAttachmentObj, ok := data["network_attachment"]; ok { - networkAttachment = networkAttachmentObj.(string) - } - // Checks if networkAttachment is not specified in resource, network or subnetwork have to be specified. - if networkAttachment == "" && network == "" && subnetwork == "" { - return nil, fmt.Errorf("exactly one of network, subnetwork, or network_attachment must be provided") - } - - ifaces[i] = &compute.NetworkInterface{ - NetworkIP: data["network_ip"].(string), - Network: network, - NetworkAttachment: networkAttachment, - Subnetwork: subnetwork, - AccessConfigs: expandAccessConfigs(data["access_config"].([]interface{})), - AliasIpRanges: expandAliasIpRanges(data["alias_ip_range"].([]interface{})), - NicType: data["nic_type"].(string), - StackType: data["stack_type"].(string), - QueueCount: int64(data["queue_count"].(int)), - Ipv6AccessConfigs: expandIpv6AccessConfigs(data["ipv6_access_config"].([]interface{})), - Ipv6Address: data["ipv6_address"].(string), - InternalIpv6PrefixLength: int64(data["internal_ipv6_prefix_length"].(int)), +func flattenGuestAcceleratorsTgc(accelerators []*compute.AcceleratorConfig) []map[string]interface{} { + acceleratorsSchema := make([]map[string]interface{}, len(accelerators)) + for i, accelerator := range accelerators { + acceleratorsSchema[i] = map[string]interface{}{ + "count": accelerator.AcceleratorCount, + "type": tpgresource.GetResourceNameFromSelfLink(accelerator.AcceleratorType), } } - return ifaces, nil + return acceleratorsSchema } -func expandSchedulingTgc(v interface{}) (*compute.Scheduling, error) { - if v == nil { - // We can't set default values for lists. - return &compute.Scheduling{ - AutomaticRestart: googleapi.Bool(true), - }, nil - } - - ls := v.([]interface{}) - if len(ls) == 0 { - // We can't set default values for lists - return &compute.Scheduling{ - AutomaticRestart: googleapi.Bool(true), - }, nil - } - - if len(ls) > 1 || ls[0] == nil { - return nil, fmt.Errorf("expected exactly one scheduling block") - } - - original := ls[0].(map[string]interface{}) - scheduling := &compute.Scheduling{ - ForceSendFields: make([]string, 0, 4), - } - - if v, ok := original["automatic_restart"]; ok { - scheduling.AutomaticRestart = googleapi.Bool(v.(bool)) - scheduling.ForceSendFields = append(scheduling.ForceSendFields, "AutomaticRestart") +func flattenReservationAffinityTgc(affinity *compute.ReservationAffinity) []map[string]interface{} { + if affinity == nil { + return nil } - if v, ok := original["preemptible"]; ok { - scheduling.Preemptible = v.(bool) - scheduling.ForceSendFields = append(scheduling.ForceSendFields, "Preemptible") + // The values of ConsumeReservationType in cai assets are NO_ALLOCATION, SPECIFIC_ALLOCATION, ANY_ALLOCATION + crt := strings.ReplaceAll(affinity.ConsumeReservationType, "_ALLOCATION", "_RESERVATION") + flattened := map[string]interface{}{ + "type": crt, } - if v, ok := original["on_host_maintenance"]; ok { - scheduling.OnHostMaintenance = v.(string) - scheduling.ForceSendFields = append(scheduling.ForceSendFields, "OnHostMaintenance") - } - - if v, ok := original["node_affinities"]; ok && v != nil { - naSet := v.(*schema.Set).List() - scheduling.NodeAffinities = make([]*compute.SchedulingNodeAffinity, 0) - for _, nodeAffRaw := range naSet { - if nodeAffRaw == nil { - continue - } - nodeAff := nodeAffRaw.(map[string]interface{}) - transformed := &compute.SchedulingNodeAffinity{ - Key: nodeAff["key"].(string), - Operator: nodeAff["operator"].(string), - Values: tpgresource.ConvertStringArr(nodeAff["values"].(*schema.Set).List()), - } - scheduling.NodeAffinities = append(scheduling.NodeAffinities, transformed) - } + if crt == "SPECIFIC_RESERVATION" { + flattened["specific_reservation"] = []map[string]interface{}{{ + "key": affinity.Key, + "values": affinity.Values, + }} } - if v, ok := original["min_node_cpus"]; ok { - scheduling.MinNodeCpus = int64(v.(int)) - } - if v, ok := original["provisioning_model"]; ok { - scheduling.ProvisioningModel = v.(string) - scheduling.ForceSendFields = append(scheduling.ForceSendFields, "ProvisioningModel") - } - if v, ok := original["instance_termination_action"]; ok { - scheduling.InstanceTerminationAction = v.(string) - scheduling.ForceSendFields = append(scheduling.ForceSendFields, "InstanceTerminationAction") - } - if v, ok := original["availability_domain"]; ok && v != nil { - scheduling.AvailabilityDomain = int64(v.(int)) - } - if v, ok := original["max_run_duration"]; ok { - transformedMaxRunDuration, err := expandComputeMaxRunDuration(v) - if err != nil { - return nil, err - } - scheduling.MaxRunDuration = transformedMaxRunDuration - scheduling.ForceSendFields = append(scheduling.ForceSendFields, "MaxRunDuration") - } - - if v, ok := original["on_instance_stop_action"]; ok { - transformedOnInstanceStopAction, err := expandComputeOnInstanceStopAction(v) - if err != nil { - return nil, err - } - scheduling.OnInstanceStopAction = transformedOnInstanceStopAction - scheduling.ForceSendFields = append(scheduling.ForceSendFields, "OnInstanceStopAction") - } - if v, ok := original["host_error_timeout_seconds"]; ok { - if v != nil && v != 0 { - scheduling.HostErrorTimeoutSeconds = int64(v.(int)) - } - } - - if v, ok := original["maintenance_interval"]; ok { - scheduling.MaintenanceInterval = v.(string) - } - - if v, ok := original["graceful_shutdown"]; ok { - transformedGracefulShutdown, err := expandGracefulShutdown(v) - if err != nil { - return nil, err - } - scheduling.GracefulShutdown = transformedGracefulShutdown - scheduling.ForceSendFields = append(scheduling.ForceSendFields, "GracefulShutdown") - } - if v, ok := original["local_ssd_recovery_timeout"]; ok { - transformedLocalSsdRecoveryTimeout, err := expandComputeLocalSsdRecoveryTimeout(v) - if err != nil { - return nil, err - } - scheduling.LocalSsdRecoveryTimeout = transformedLocalSsdRecoveryTimeout - scheduling.ForceSendFields = append(scheduling.ForceSendFields, "LocalSsdRecoveryTimeout") - } - if v, ok := original["termination_time"]; ok { - scheduling.TerminationTime = v.(string) - } - return scheduling, nil + return []map[string]interface{}{flattened} } diff --git a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance.go b/mmv1/third_party/tgc_next/pkg/services/compute/compute_instance_cai2hcl.go similarity index 85% rename from mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance.go rename to mmv1/third_party/tgc_next/pkg/services/compute/compute_instance_cai2hcl.go index 3edeba398e2e..c39a7f2a623d 100644 --- a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance.go +++ b/mmv1/third_party/tgc_next/pkg/services/compute/compute_instance_cai2hcl.go @@ -1,43 +1,37 @@ package compute import ( - "encoding/json" "fmt" "strings" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters/utils" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/models" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tgcresource" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" compute "google.golang.org/api/compute/v0.beta" ) -// ComputeInstanceAssetType is the CAI asset type name for compute instance. -const ComputeInstanceAssetType string = "compute.googleapis.com/Instance" - -// ComputeInstanceSchemaName is the TF resource schema name for compute instance. -const ComputeInstanceSchemaName string = "google_compute_instance" - -// ComputeInstanceConverter for compute instance resource. -type ComputeInstanceConverter struct { +// ComputeInstanceCai2hclConverter for compute instance resource. +type ComputeInstanceCai2hclConverter struct { name string schema map[string]*schema.Schema } -// NewComputeInstanceConverter returns an HCL converter for compute instance. -func NewComputeInstanceConverter(provider *schema.Provider) models.Converter { +// NewComputeInstanceCai2hclConverter returns an HCL converter for compute instance. +func NewComputeInstanceCai2hclConverter(provider *schema.Provider) models.Cai2hclConverter { schema := provider.ResourcesMap[ComputeInstanceSchemaName].Schema - return &ComputeInstanceConverter{ + return &ComputeInstanceCai2hclConverter{ name: ComputeInstanceSchemaName, schema: schema, } } // Convert converts asset to HCL resource blocks. -func (c *ComputeInstanceConverter) Convert(asset caiasset.Asset) ([]*models.TerraformResourceBlock, error) { +func (c *ComputeInstanceCai2hclConverter) Convert(asset caiasset.Asset) ([]*models.TerraformResourceBlock, error) { var blocks []*models.TerraformResourceBlock block, err := c.convertResourceData(asset) if err != nil { @@ -47,7 +41,7 @@ func (c *ComputeInstanceConverter) Convert(asset caiasset.Asset) ([]*models.Terr return blocks, nil } -func (c *ComputeInstanceConverter) convertResourceData(asset caiasset.Asset) (*models.TerraformResourceBlock, error) { +func (c *ComputeInstanceCai2hclConverter) convertResourceData(asset caiasset.Asset) (*models.TerraformResourceBlock, error) { if asset.Resource == nil || asset.Resource.Data == nil { return nil, fmt.Errorf("asset resource data is nil") } @@ -78,7 +72,7 @@ func (c *ComputeInstanceConverter) convertResourceData(asset caiasset.Asset) (*m hclData["tags"] = tpgresource.ConvertStringArrToInterface(instance.Tags.Items) } - hclData["labels"] = utils.RemoveTerraformAttributionLabel(instance.Labels) + hclData["labels"] = tgcresource.RemoveTerraformAttributionLabel(instance.Labels) hclData["service_account"] = flattenServiceAccountsTgc(instance.ServiceAccounts) hclData["resource_policies"] = instance.ResourcePolicies @@ -240,19 +234,3 @@ func flattenScratchDisk(disk *compute.AttachedDisk) map[string]interface{} { return result } - -func flattenPartnerMetadata(partnerMetadata map[string]compute.StructuredEntries) (map[string]string, error) { - partnerMetadataMap := make(map[string]string) - for key, value := range partnerMetadata { - - jsonString, err := json.Marshal(&value) - if err != nil { - return nil, err - } - if value.Entries != nil { - partnerMetadataMap[key] = string(jsonString) - } - - } - return partnerMetadataMap, nil -} diff --git a/mmv1/third_party/tgc_next/pkg/services/compute/compute_instance_tfplan2cai.go b/mmv1/third_party/tgc_next/pkg/services/compute/compute_instance_tfplan2cai.go new file mode 100644 index 000000000000..a135cf6cb332 --- /dev/null +++ b/mmv1/third_party/tgc_next/pkg/services/compute/compute_instance_tfplan2cai.go @@ -0,0 +1,654 @@ +package compute + +import ( + "errors" + "fmt" + "strings" + + compute "google.golang.org/api/compute/v0.beta" + "google.golang.org/api/googleapi" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/converters/cai" + + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" + transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" +) + +func ComputeInstanceTfplan2caiConverter() cai.Tfplan2caiConverter { + return cai.Tfplan2caiConverter{ + Convert: GetComputeInstanceAndDisksCaiObjects, + } +} + +func GetComputeInstanceAndDisksCaiObjects(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]caiasset.Asset, error) { + if instanceAsset, err := GetComputeInstanceCaiObject(d, config); err == nil { + assets := []caiasset.Asset{instanceAsset} + if diskAsset, err := GetComputeInstanceDiskCaiObject(d, config); err == nil { + assets = append(assets, diskAsset) + return assets, nil + } else { + return []caiasset.Asset{}, err + } + } else { + return []caiasset.Asset{}, err + } +} + +func GetComputeInstanceCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (caiasset.Asset, error) { + name, err := cai.AssetName(d, config, "//compute.googleapis.com/projects/{{project}}/zones/{{zone}}/instances/{{name}}") + if err != nil { + return caiasset.Asset{}, err + } + if data, err := GetComputeInstanceData(d, config); err == nil { + location, _ := tpgresource.GetLocation(d, config) + return caiasset.Asset{ + Name: name, + Type: ComputeInstanceAssetType, + Resource: &caiasset.AssetResource{ + Version: "v1", + DiscoveryDocumentURI: "https://www.googleapis.com/discovery/v1/apis/compute/v1/rest", + DiscoveryName: "Instance", + Data: data, + Location: location, + }, + }, nil + } else { + return caiasset.Asset{}, err + } +} + +func GetComputeInstanceData(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (map[string]interface{}, error) { + project, err := tpgresource.GetProject(d, config) + if err != nil { + return nil, err + } + + instance, err := expandComputeInstance(project, d, config) + if err != nil { + return nil, err + } + + return cai.JsonMap(instance) +} + +func expandComputeInstance(project string, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (*compute.Instance, error) { + // Get the machine type + var machineTypeUrl string + if mt, ok := d.GetOk("machine_type"); ok { + machineType, err := tpgresource.ParseMachineTypesFieldValue(mt.(string), d, config) + if err != nil { + return nil, fmt.Errorf( + "Error loading machine type: %s", + err) + } + machineTypeUrl = machineType.RelativeLink() + } + + // Build up the list of disks + disks := []*compute.AttachedDisk{} + if _, hasBootDisk := d.GetOk("boot_disk"); hasBootDisk { + bootDisk, err := expandBootDisk(d, config, project) + if err != nil { + return nil, err + } + disks = append(disks, bootDisk) + } + + if _, hasScratchDisk := d.GetOk("scratch_disk"); hasScratchDisk { + scratchDisks, err := expandScratchDisks(d, config, project) + if err != nil { + return nil, err + } + disks = append(disks, scratchDisks...) + } + + attachedDisksCount := d.Get("attached_disk.#").(int) + + for i := 0; i < attachedDisksCount; i++ { + diskConfig := d.Get(fmt.Sprintf("attached_disk.%d", i)).(map[string]interface{}) + disk, err := expandAttachedDisk(diskConfig, d, config) + if err != nil { + return nil, err + } + + disks = append(disks, disk) + } + + scheduling, err := expandSchedulingTgc(d.Get("scheduling")) + if err != nil { + return nil, fmt.Errorf("error creating scheduling: %s", err) + } + + params, err := expandParams(d) + if err != nil { + return nil, fmt.Errorf("Error creating params: %s", err) + } + + metadata, err := resourceInstanceMetadata(d) + if err != nil { + return nil, fmt.Errorf("Error creating metadata: %s", err) + } + + partnerMetadata, err := resourceInstancePartnerMetadata(d) + if err != nil { + return nil, fmt.Errorf("Error creating partner metadata: %s", err) + } + + networkInterfaces, err := expandNetworkInterfacesTgc(d, config) + if err != nil { + return nil, fmt.Errorf("Error creating network interfaces: %s", err) + } + + networkPerformanceConfig, err := expandNetworkPerformanceConfig(d, config) + if err != nil { + return nil, fmt.Errorf("Error creating network performance config: %s", err) + } + + accels, err := expandInstanceGuestAccelerators(d, config) + if err != nil { + return nil, fmt.Errorf("Error creating guest accelerators: %s", err) + } + + reservationAffinity, err := expandReservationAffinity(d) + if err != nil { + return nil, fmt.Errorf("Error creating reservation affinity: %s", err) + } + + // Create the instance information + return &compute.Instance{ + CanIpForward: d.Get("can_ip_forward").(bool), + Description: d.Get("description").(string), + Disks: disks, + MachineType: machineTypeUrl, + Metadata: metadata, + PartnerMetadata: partnerMetadata, + Name: d.Get("name").(string), + Zone: d.Get("zone").(string), + NetworkInterfaces: networkInterfaces, + NetworkPerformanceConfig: networkPerformanceConfig, + Tags: resourceInstanceTags(d), + Params: params, + Labels: tpgresource.ExpandLabels(d), + ServiceAccounts: expandServiceAccounts(d.Get("service_account").([]interface{})), + GuestAccelerators: accels, + MinCpuPlatform: d.Get("min_cpu_platform").(string), + Scheduling: scheduling, + DeletionProtection: d.Get("deletion_protection").(bool), + Hostname: d.Get("hostname").(string), + ConfidentialInstanceConfig: expandConfidentialInstanceConfig(d), + AdvancedMachineFeatures: expandAdvancedMachineFeatures(d), + ShieldedInstanceConfig: expandShieldedVmConfigs(d), + DisplayDevice: expandDisplayDevice(d), + ResourcePolicies: tpgresource.ConvertStringArr(d.Get("resource_policies").([]interface{})), + ReservationAffinity: reservationAffinity, + KeyRevocationActionType: d.Get("key_revocation_action_type").(string), + InstanceEncryptionKey: expandComputeInstanceEncryptionKey(d), + }, nil +} + +func expandAttachedDisk(diskConfig map[string]interface{}, d tpgresource.TerraformResourceData, meta interface{}) (*compute.AttachedDisk, error) { + config := meta.(*transport_tpg.Config) + + s := diskConfig["source"].(string) + var sourceLink string + if strings.Contains(s, "regions/") { + source, err := tpgresource.ParseRegionDiskFieldValue(s, d, config) + if err != nil { + return nil, err + } + sourceLink = source.RelativeLink() + } else { + source, err := tpgresource.ParseDiskFieldValue(s, d, config) + if err != nil { + return nil, err + } + sourceLink = source.RelativeLink() + } + + disk := &compute.AttachedDisk{ + Source: fmt.Sprintf("https://www.googleapis.com/compute/v1/%s", sourceLink), + } + + if v, ok := diskConfig["mode"]; ok { + disk.Mode = v.(string) + } + + if v, ok := diskConfig["device_name"]; ok { + disk.DeviceName = v.(string) + } + + keyValue, keyOk := diskConfig["disk_encryption_key_raw"] + if keyOk { + if keyValue != "" { + disk.DiskEncryptionKey = &compute.CustomerEncryptionKey{ + RawKey: keyValue.(string), + } + } + } + + keyValue, keyOk = diskConfig["disk_encryption_key_rsa"] + if keyOk { + if keyValue != "" { + disk.DiskEncryptionKey = &compute.CustomerEncryptionKey{ + RsaEncryptedKey: keyValue.(string), + } + } + } + + kmsValue, kmsOk := diskConfig["kms_key_self_link"] + if kmsOk { + if keyOk && keyValue != "" && kmsValue != "" { + return nil, errors.New("Only one of kms_key_self_link and disk_encryption_key_raw can be set") + } + if kmsValue != "" { + disk.DiskEncryptionKey = &compute.CustomerEncryptionKey{ + KmsKeyName: kmsValue.(string), + } + } + } + + kmsServiceAccount, kmsServiceAccountOk := diskConfig["disk_encryption_service_account"] + if kmsServiceAccountOk { + if kmsServiceAccount != "" { + if disk.DiskEncryptionKey == nil { + disk.DiskEncryptionKey = &compute.CustomerEncryptionKey{ + KmsKeyServiceAccount: kmsServiceAccount.(string), + } + } + disk.DiskEncryptionKey.KmsKeyServiceAccount = kmsServiceAccount.(string) + } + } + return disk, nil +} + +// See comment on expandInstanceTemplateGuestAccelerators regarding why this +// code is duplicated. +func expandInstanceGuestAccelerators(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]*compute.AcceleratorConfig, error) { + configs, ok := d.GetOk("guest_accelerator") + if !ok { + return nil, nil + } + accels := configs.([]interface{}) + guestAccelerators := make([]*compute.AcceleratorConfig, 0, len(accels)) + for _, raw := range accels { + data := raw.(map[string]interface{}) + if data["count"].(int) == 0 { + continue + } + at, err := tpgresource.ParseAcceleratorFieldValue(data["type"].(string), d, config) + if err != nil { + return nil, fmt.Errorf("cannot parse accelerator type: %v", err) + } + guestAccelerators = append(guestAccelerators, &compute.AcceleratorConfig{ + AcceleratorCount: int64(data["count"].(int)), + AcceleratorType: at.RelativeLink(), + }) + } + + return guestAccelerators, nil +} + +func expandParams(d tpgresource.TerraformResourceData) (*compute.InstanceParams, error) { + if _, ok := d.GetOk("params.0.resource_manager_tags"); ok { + params := &compute.InstanceParams{ + ResourceManagerTags: tpgresource.ExpandStringMap(d, "params.0.resource_manager_tags"), + } + return params, nil + } + + return nil, nil +} + +func expandBootDisk(d tpgresource.TerraformResourceData, config *transport_tpg.Config, project string) (*compute.AttachedDisk, error) { + disk := &compute.AttachedDisk{ + AutoDelete: d.Get("boot_disk.0.auto_delete").(bool), + Boot: true, + } + + if v, ok := d.GetOk("boot_disk.0.device_name"); ok { + disk.DeviceName = v.(string) + } + + if v, ok := d.GetOk("boot_disk.0.interface"); ok { + disk.Interface = v.(string) + } + + if v, ok := d.GetOk("boot_disk.0.guest_os_features"); ok { + disk.GuestOsFeatures = expandComputeInstanceGuestOsFeatures(v) + } + + if v, ok := d.GetOk("boot_disk.0.disk_encryption_key_raw"); ok { + if v != "" { + disk.DiskEncryptionKey = &compute.CustomerEncryptionKey{ + RawKey: v.(string), + } + } + } + + if v, ok := d.GetOk("boot_disk.0.disk_encryption_key_rsa"); ok { + if v != "" { + disk.DiskEncryptionKey = &compute.CustomerEncryptionKey{ + RsaEncryptedKey: v.(string), + } + } + } + + if v, ok := d.GetOk("boot_disk.0.kms_key_self_link"); ok { + if v != "" { + disk.DiskEncryptionKey = &compute.CustomerEncryptionKey{ + KmsKeyName: v.(string), + } + } + } + + if v, ok := d.GetOk("boot_disk.0.disk_encryption_service_account"); ok { + if v != "" { + disk.DiskEncryptionKey.KmsKeyServiceAccount = v.(string) + } + } + + // disk_encryption_key_sha256 is computed, so it is not converted. + + if v, ok := d.GetOk("boot_disk.0.source"); ok { + var err error + var source interface { + RelativeLink() string + } + if strings.Contains(v.(string), "regions/") { + source, err = tpgresource.ParseRegionDiskFieldValue(v.(string), d, config) + } else { + source, err = tpgresource.ParseDiskFieldValue(v.(string), d, config) + } + if err != nil { + return nil, err + } + disk.Source = fmt.Sprintf("https://www.googleapis.com/compute/v1/%s", source.RelativeLink()) + } + + if _, ok := d.GetOk("boot_disk.0.initialize_params"); ok { + if v, ok := d.GetOk("boot_disk.0.initialize_params.0.size"); ok { + disk.DiskSizeGb = int64(v.(int)) + } + } + + if v, ok := d.GetOk("boot_disk.0.initialize_params.0.architecture"); ok { + disk.Architecture = v.(string) + } + + if v, ok := d.GetOk("boot_disk.0.mode"); ok { + disk.Mode = v.(string) + } + + return disk, nil +} + +func expandScratchDisks(d tpgresource.TerraformResourceData, config *transport_tpg.Config, project string) ([]*compute.AttachedDisk, error) { + diskType, err := readDiskType(config, d, "local-ssd") + if err != nil { + return nil, fmt.Errorf("Error loading disk type 'local-ssd': %s", err) + } + + n := d.Get("scratch_disk.#").(int) + scratchDisks := make([]*compute.AttachedDisk, 0, n) + for i := 0; i < n; i++ { + scratchDisks = append(scratchDisks, &compute.AttachedDisk{ + AutoDelete: true, + Type: "SCRATCH", + Interface: d.Get(fmt.Sprintf("scratch_disk.%d.interface", i)).(string), + DeviceName: d.Get(fmt.Sprintf("scratch_disk.%d.device_name", i)).(string), + DiskSizeGb: int64(d.Get(fmt.Sprintf("scratch_disk.%d.size", i)).(int)), + InitializeParams: &compute.AttachedDiskInitializeParams{ + DiskType: diskType.RelativeLink(), + }, + }) + } + + return scratchDisks, nil +} + +func expandStoragePool(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + // ExpandStoragePoolUrl is generated by MMv1 + // return ExpandStoragePoolUrl(v, d, config) + return nil, nil +} + +func GetComputeInstanceDiskCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (caiasset.Asset, error) { + name, err := cai.AssetName(d, config, "//compute.googleapis.com/projects/{{project}}/zones/{{zone}}/disks/{{name}}") + if err != nil { + return caiasset.Asset{}, err + } + if data, err := GetComputeDiskData(d, config); err == nil { + location, _ := tpgresource.GetLocation(d, config) + return caiasset.Asset{ + Name: name, + Type: ComputeDiskAssetType, + Resource: &caiasset.AssetResource{ + Version: "v1", + DiscoveryDocumentURI: "https://www.googleapis.com/discovery/v1/apis/compute/v1/rest", + DiscoveryName: "Disk", + Data: data, + Location: location, + }, + }, nil + } else { + return caiasset.Asset{}, err + } +} + +func GetComputeDiskData(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (map[string]interface{}, error) { + project, err := tpgresource.GetProject(d, config) + if err != nil { + return nil, err + } + + diskApiObj, err := expandBootDisk(d, config, project) + if err != nil { + return nil, err + } + + diskDetails, err := cai.JsonMap(diskApiObj) + if err != nil { + return nil, err + } + + if v, ok := d.GetOk("boot_disk.0.initialize_params.0.type"); ok { + diskTypeName := v.(string) + diskType, err := readDiskType(config, d, diskTypeName) + if err != nil { + return nil, fmt.Errorf("Error loading disk type '%s': %s", diskTypeName, err) + } + diskDetails["DiskType"] = diskType.RelativeLink() + } + + if v, ok := d.GetOk("boot_disk.0.initialize_params.0.image"); ok { + diskDetails["SourceImage"] = v.(string) + } + + if _, ok := d.GetOk("boot_disk.0.initialize_params.0.labels"); ok { + diskDetails["Labels"] = tpgresource.ExpandStringMap(d, "boot_disk.0.initialize_params.0.labels") + } + + if _, ok := d.GetOk("boot_disk.0.initialize_params.0.resource_policies"); ok { + diskDetails["ResourcePolicies"] = tpgresource.ConvertStringArr(d.Get("boot_disk.0.initialize_params.0.resource_policies").([]interface{})) + } + + if v, ok := d.GetOk("boot_disk.0.initialize_params.0.provisioned_iops"); ok { + diskDetails["ProvisionedIops"] = int64(v.(int)) + } + + if v, ok := d.GetOk("boot_disk.0.initialize_params.0.provisioned_throughput"); ok { + diskDetails["ProvisionedThroughput"] = int64(v.(int)) + } + + if v, ok := d.GetOk("boot_disk.0.initialize_params.0.enable_confidential_compute"); ok { + diskDetails["EnableConfidentialCompute"] = v.(bool) + } + + if v, ok := d.GetOk("boot_disk.0.initialize_params.0.storage_pool"); ok { + storagePoolUrl, err := expandStoragePool(v, d, config) + if err != nil { + return nil, fmt.Errorf("Error resolving storage pool name '%s': '%s'", v.(string), err) + } + diskDetails["StoragePool"] = storagePoolUrl.(string) + } + + return diskDetails, nil +} + +func expandNetworkInterfacesTgc(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]*compute.NetworkInterface, error) { + configs := d.Get("network_interface").([]interface{}) + ifaces := make([]*compute.NetworkInterface, len(configs)) + for i, raw := range configs { + data := raw.(map[string]interface{}) + + var networkAttachment = "" + network := data["network"].(string) + subnetwork := data["subnetwork"].(string) + if networkAttachmentObj, ok := data["network_attachment"]; ok { + networkAttachment = networkAttachmentObj.(string) + } + // Checks if networkAttachment is not specified in resource, network or subnetwork have to be specified. + if networkAttachment == "" && network == "" && subnetwork == "" { + return nil, fmt.Errorf("exactly one of network, subnetwork, or network_attachment must be provided") + } + + ifaces[i] = &compute.NetworkInterface{ + NetworkIP: data["network_ip"].(string), + Network: network, + NetworkAttachment: networkAttachment, + Subnetwork: subnetwork, + AccessConfigs: expandAccessConfigs(data["access_config"].([]interface{})), + AliasIpRanges: expandAliasIpRanges(data["alias_ip_range"].([]interface{})), + NicType: data["nic_type"].(string), + StackType: data["stack_type"].(string), + QueueCount: int64(data["queue_count"].(int)), + Ipv6AccessConfigs: expandIpv6AccessConfigs(data["ipv6_access_config"].([]interface{})), + Ipv6Address: data["ipv6_address"].(string), + InternalIpv6PrefixLength: int64(data["internal_ipv6_prefix_length"].(int)), + } + } + return ifaces, nil +} + +func expandSchedulingTgc(v interface{}) (*compute.Scheduling, error) { + if v == nil { + // We can't set default values for lists. + return &compute.Scheduling{ + AutomaticRestart: googleapi.Bool(true), + }, nil + } + + ls := v.([]interface{}) + if len(ls) == 0 { + // We can't set default values for lists + return &compute.Scheduling{ + AutomaticRestart: googleapi.Bool(true), + }, nil + } + + if len(ls) > 1 || ls[0] == nil { + return nil, fmt.Errorf("expected exactly one scheduling block") + } + + original := ls[0].(map[string]interface{}) + scheduling := &compute.Scheduling{ + ForceSendFields: make([]string, 0, 4), + } + + if v, ok := original["automatic_restart"]; ok { + scheduling.AutomaticRestart = googleapi.Bool(v.(bool)) + scheduling.ForceSendFields = append(scheduling.ForceSendFields, "AutomaticRestart") + } + + if v, ok := original["preemptible"]; ok { + scheduling.Preemptible = v.(bool) + scheduling.ForceSendFields = append(scheduling.ForceSendFields, "Preemptible") + } + + if v, ok := original["on_host_maintenance"]; ok { + scheduling.OnHostMaintenance = v.(string) + scheduling.ForceSendFields = append(scheduling.ForceSendFields, "OnHostMaintenance") + } + + if v, ok := original["node_affinities"]; ok && v != nil { + naSet := v.(*schema.Set).List() + scheduling.NodeAffinities = make([]*compute.SchedulingNodeAffinity, 0) + for _, nodeAffRaw := range naSet { + if nodeAffRaw == nil { + continue + } + nodeAff := nodeAffRaw.(map[string]interface{}) + transformed := &compute.SchedulingNodeAffinity{ + Key: nodeAff["key"].(string), + Operator: nodeAff["operator"].(string), + Values: tpgresource.ConvertStringArr(nodeAff["values"].(*schema.Set).List()), + } + scheduling.NodeAffinities = append(scheduling.NodeAffinities, transformed) + } + } + + if v, ok := original["min_node_cpus"]; ok { + scheduling.MinNodeCpus = int64(v.(int)) + } + if v, ok := original["provisioning_model"]; ok { + scheduling.ProvisioningModel = v.(string) + scheduling.ForceSendFields = append(scheduling.ForceSendFields, "ProvisioningModel") + } + if v, ok := original["instance_termination_action"]; ok { + scheduling.InstanceTerminationAction = v.(string) + scheduling.ForceSendFields = append(scheduling.ForceSendFields, "InstanceTerminationAction") + } + if v, ok := original["availability_domain"]; ok && v != nil { + scheduling.AvailabilityDomain = int64(v.(int)) + } + if v, ok := original["max_run_duration"]; ok { + transformedMaxRunDuration, err := expandComputeMaxRunDuration(v) + if err != nil { + return nil, err + } + scheduling.MaxRunDuration = transformedMaxRunDuration + scheduling.ForceSendFields = append(scheduling.ForceSendFields, "MaxRunDuration") + } + + if v, ok := original["on_instance_stop_action"]; ok { + transformedOnInstanceStopAction, err := expandComputeOnInstanceStopAction(v) + if err != nil { + return nil, err + } + scheduling.OnInstanceStopAction = transformedOnInstanceStopAction + scheduling.ForceSendFields = append(scheduling.ForceSendFields, "OnInstanceStopAction") + } + if v, ok := original["host_error_timeout_seconds"]; ok { + if v != nil && v != 0 { + scheduling.HostErrorTimeoutSeconds = int64(v.(int)) + } + } + + if v, ok := original["maintenance_interval"]; ok { + scheduling.MaintenanceInterval = v.(string) + } + + if v, ok := original["graceful_shutdown"]; ok { + transformedGracefulShutdown, err := expandGracefulShutdown(v) + if err != nil { + return nil, err + } + scheduling.GracefulShutdown = transformedGracefulShutdown + scheduling.ForceSendFields = append(scheduling.ForceSendFields, "GracefulShutdown") + } + if v, ok := original["local_ssd_recovery_timeout"]; ok { + transformedLocalSsdRecoveryTimeout, err := expandComputeLocalSsdRecoveryTimeout(v) + if err != nil { + return nil, err + } + scheduling.LocalSsdRecoveryTimeout = transformedLocalSsdRecoveryTimeout + scheduling.ForceSendFields = append(scheduling.ForceSendFields, "LocalSsdRecoveryTimeout") + } + if v, ok := original["termination_time"]; ok { + scheduling.TerminationTime = v.(string) + } + return scheduling, nil +} diff --git a/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/resourcemanager/project.go b/mmv1/third_party/tgc_next/pkg/services/resourcemanager/project.go similarity index 54% rename from mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/resourcemanager/project.go rename to mmv1/third_party/tgc_next/pkg/services/resourcemanager/project.go index af57aa05dd0d..c1ee60c4989f 100644 --- a/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/resourcemanager/project.go +++ b/mmv1/third_party/tgc_next/pkg/services/resourcemanager/project.go @@ -1,23 +1,19 @@ package resourcemanager import ( - "fmt" - "strconv" "strings" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/verify" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" +) - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/converters/cai" - - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" - transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/verify" +// ProjectAssetType is the CAI asset type name for project. +const ProjectAssetType string = "cloudresourcemanager.googleapis.com/Project" - "google.golang.org/api/cloudbilling/v1" - "google.golang.org/api/cloudresourcemanager/v1" -) +// ProjectSchemaName is the TF resource schema name for resourcemanager project. +const ProjectSchemaName string = "google_project" func ParseFolderId(v interface{}) string { folderId := v.(string) @@ -119,136 +115,3 @@ func ResourceGoogleProject() *schema.Resource { UseJSONNumber: true, } } - -func ResourceConverterProject() cai.ResourceConverter { - return cai.ResourceConverter{ - Convert: GetProjectAndBillingInfoCaiObjects, - } -} - -func GetProjectAndBillingInfoCaiObjects(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]caiasset.Asset, error) { - if projectAsset, err := GetProjectCaiObject(d, config); err == nil { - assets := []caiasset.Asset{projectAsset} - if _, ok := d.GetOk("billing_account"); !ok { - return assets, nil - } else { - if billingAsset, err := GetProjectBillingInfoCaiObject(d, config); err == nil { - assets = append(assets, billingAsset) - return assets, nil - } else { - return []caiasset.Asset{}, err - } - } - } else { - return []caiasset.Asset{}, err - } -} - -func GetProjectCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (caiasset.Asset, error) { - linkTmpl := "//cloudresourcemanager.googleapis.com/projects/{{number}}" - name, err := cai.AssetName(d, config, linkTmpl) - if err != nil { - return caiasset.Asset{}, err - } - if data, err := GetProjectData(d, config); err == nil { - return caiasset.Asset{ - Name: name, - Type: "cloudresourcemanager.googleapis.com/Project", - Resource: &caiasset.AssetResource{ - Version: "v1", - DiscoveryDocumentURI: "https://cloudresourcemanager.googleapis.com/$discovery/rest?version=v1", - DiscoveryName: "Project", - Data: data, - }, - }, nil - } else { - return caiasset.Asset{}, err - } -} - -func GetProjectData(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (map[string]interface{}, error) { - pid := d.Get("project_id").(string) - - project := &cloudresourcemanager.Project{ - ProjectId: pid, - Name: d.Get("name").(string), - } - - if res, ok := d.GetOk("number"); ok { - num, err := strconv.ParseInt(res.(string), 10, 64) - if err != nil { - return nil, err - } - - project.ProjectNumber = num - } - - if err := getParentResourceId(d, project); err != nil { - return nil, err - } - - if _, ok := d.GetOk("effective_labels"); ok { - project.Labels = tpgresource.ExpandEffectiveLabels(d) - } - - return cai.JsonMap(project) -} - -func getParentResourceId(d tpgresource.TerraformResourceData, p *cloudresourcemanager.Project) error { - orgId := d.Get("org_id").(string) - folderId := d.Get("folder_id").(string) - - if orgId != "" && folderId != "" { - return fmt.Errorf("'org_id' and 'folder_id' cannot be both set.") - } - - if orgId != "" { - p.Parent = &cloudresourcemanager.ResourceId{ - Id: orgId, - Type: "organization", - } - } - - if folderId != "" { - p.Parent = &cloudresourcemanager.ResourceId{ - Id: strings.TrimPrefix(folderId, "folders/"), - Type: "folder", - } - } - - return nil -} - -func GetProjectBillingInfoCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (caiasset.Asset, error) { - linkTmpl := "//cloudbilling.googleapis.com/projects/{{project_id_or_project}}/billingInfo" - name, err := cai.AssetName(d, config, linkTmpl) - if err != nil { - return caiasset.Asset{}, err - } - project := strings.Split(name, "/")[4] - if data, err := GetProjectBillingInfoData(d, project); err == nil { - return caiasset.Asset{ - Name: name, - Type: "cloudbilling.googleapis.com/ProjectBillingInfo", - Resource: &caiasset.AssetResource{ - Version: "v1", - DiscoveryDocumentURI: "https://cloudbilling.googleapis.com/$discovery/rest", - DiscoveryName: "ProjectBillingInfo", - Data: data, - Location: "global", - }, - }, nil - } else { - return caiasset.Asset{}, err - } -} - -func GetProjectBillingInfoData(d tpgresource.TerraformResourceData, project string) (map[string]interface{}, error) { - ba := &cloudbilling.ProjectBillingInfo{ - BillingAccountName: fmt.Sprintf("billingAccounts/%s", d.Get("billing_account")), - Name: fmt.Sprintf("projects/%s/billingInfo", project), - ProjectId: d.Get("project_id").(string), - } - - return cai.JsonMap(ba) -} diff --git a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/resourcemanager/project.go b/mmv1/third_party/tgc_next/pkg/services/resourcemanager/project_cai2hcl.go similarity index 69% rename from mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/resourcemanager/project.go rename to mmv1/third_party/tgc_next/pkg/services/resourcemanager/project_cai2hcl.go index 9ca8b5769e25..699326a59f42 100644 --- a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/resourcemanager/project.go +++ b/mmv1/third_party/tgc_next/pkg/services/resourcemanager/project_cai2hcl.go @@ -7,34 +7,29 @@ import ( "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters/utils" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/models" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tgcresource" tfschema "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) -// ProjectAssetType is the CAI asset type name for project. -const ProjectAssetType string = "cloudresourcemanager.googleapis.com/Project" - -// ProjectSchemaName is the TF resource schema name for resourcemanager project. -const ProjectSchemaName string = "google_project" - -// ProjectConverter for compute project resource. -type ProjectConverter struct { +// ProjectCai2hclConverter for compute project resource. +type ProjectCai2hclConverter struct { name string schema map[string]*tfschema.Schema } // NewProjectConverter returns an HCL converter for compute project. -func NewProjectConverter(provider *tfschema.Provider) models.Converter { +func NewProjectCai2hclConverter(provider *tfschema.Provider) models.Cai2hclConverter { schema := provider.ResourcesMap[ProjectSchemaName].Schema - return &ProjectConverter{ + return &ProjectCai2hclConverter{ name: ProjectSchemaName, schema: schema, } } // Convert converts asset resource data. -func (c *ProjectConverter) Convert(asset caiasset.Asset) ([]*models.TerraformResourceBlock, error) { +func (c *ProjectCai2hclConverter) Convert(asset caiasset.Asset) ([]*models.TerraformResourceBlock, error) { var blocks []*models.TerraformResourceBlock block, err := c.convertResourceData(asset) if err != nil { @@ -44,7 +39,7 @@ func (c *ProjectConverter) Convert(asset caiasset.Asset) ([]*models.TerraformRes return blocks, nil } -func (c *ProjectConverter) convertResourceData(asset caiasset.Asset) (*models.TerraformResourceBlock, error) { +func (c *ProjectCai2hclConverter) convertResourceData(asset caiasset.Asset) (*models.TerraformResourceBlock, error) { if asset.Resource == nil || asset.Resource.Data == nil { return nil, fmt.Errorf("asset resource data is nil") } @@ -54,7 +49,7 @@ func (c *ProjectConverter) convertResourceData(asset caiasset.Asset) (*models.Te hclData := make(map[string]interface{}) hclData["name"] = assetResourceData["name"] hclData["project_id"] = assetResourceData["projectId"] - hclData["labels"] = utils.RemoveTerraformAttributionLabel(assetResourceData["labels"]) + hclData["labels"] = tgcresource.RemoveTerraformAttributionLabel(assetResourceData["labels"]) if strings.Contains(asset.Resource.Parent, "folders/") { hclData["folder_id"] = utils.ParseFieldValue(asset.Resource.Parent, "folders") } else if strings.Contains(asset.Resource.Parent, "organizations/") { diff --git a/mmv1/third_party/tgc_next/pkg/services/resourcemanager/project_tfplan2cai.go b/mmv1/third_party/tgc_next/pkg/services/resourcemanager/project_tfplan2cai.go new file mode 100644 index 000000000000..cbe380607eb8 --- /dev/null +++ b/mmv1/third_party/tgc_next/pkg/services/resourcemanager/project_tfplan2cai.go @@ -0,0 +1,149 @@ +package resourcemanager + +import ( + "fmt" + "strconv" + "strings" + + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/converters/cai" + + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" + transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" + + "google.golang.org/api/cloudbilling/v1" + "google.golang.org/api/cloudresourcemanager/v1" +) + +func ProjectTfplan2caiConverter() cai.Tfplan2caiConverter { + return cai.Tfplan2caiConverter{ + Convert: GetProjectAndBillingInfoCaiObjects, + } +} + +func GetProjectAndBillingInfoCaiObjects(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]caiasset.Asset, error) { + if projectAsset, err := GetProjectCaiObject(d, config); err == nil { + assets := []caiasset.Asset{projectAsset} + if _, ok := d.GetOk("billing_account"); !ok { + return assets, nil + } else { + if billingAsset, err := GetProjectBillingInfoCaiObject(d, config); err == nil { + assets = append(assets, billingAsset) + return assets, nil + } else { + return []caiasset.Asset{}, err + } + } + } else { + return []caiasset.Asset{}, err + } +} + +func GetProjectCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (caiasset.Asset, error) { + linkTmpl := "//cloudresourcemanager.googleapis.com/projects/{{number}}" + name, err := cai.AssetName(d, config, linkTmpl) + if err != nil { + return caiasset.Asset{}, err + } + if data, err := GetProjectData(d, config); err == nil { + return caiasset.Asset{ + Name: name, + Type: "cloudresourcemanager.googleapis.com/Project", + Resource: &caiasset.AssetResource{ + Version: "v1", + DiscoveryDocumentURI: "https://cloudresourcemanager.googleapis.com/$discovery/rest?version=v1", + DiscoveryName: "Project", + Data: data, + }, + }, nil + } else { + return caiasset.Asset{}, err + } +} + +func GetProjectData(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (map[string]interface{}, error) { + pid := d.Get("project_id").(string) + + project := &cloudresourcemanager.Project{ + ProjectId: pid, + Name: d.Get("name").(string), + } + + if res, ok := d.GetOk("number"); ok { + num, err := strconv.ParseInt(res.(string), 10, 64) + if err != nil { + return nil, err + } + + project.ProjectNumber = num + } + + if err := getParentResourceId(d, project); err != nil { + return nil, err + } + + if _, ok := d.GetOk("effective_labels"); ok { + project.Labels = tpgresource.ExpandEffectiveLabels(d) + } + + return cai.JsonMap(project) +} + +func getParentResourceId(d tpgresource.TerraformResourceData, p *cloudresourcemanager.Project) error { + orgId := d.Get("org_id").(string) + folderId := d.Get("folder_id").(string) + + if orgId != "" && folderId != "" { + return fmt.Errorf("'org_id' and 'folder_id' cannot be both set.") + } + + if orgId != "" { + p.Parent = &cloudresourcemanager.ResourceId{ + Id: orgId, + Type: "organization", + } + } + + if folderId != "" { + p.Parent = &cloudresourcemanager.ResourceId{ + Id: strings.TrimPrefix(folderId, "folders/"), + Type: "folder", + } + } + + return nil +} + +func GetProjectBillingInfoCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (caiasset.Asset, error) { + linkTmpl := "//cloudbilling.googleapis.com/projects/{{project_id_or_project}}/billingInfo" + name, err := cai.AssetName(d, config, linkTmpl) + if err != nil { + return caiasset.Asset{}, err + } + project := strings.Split(name, "/")[4] + if data, err := GetProjectBillingInfoData(d, project); err == nil { + return caiasset.Asset{ + Name: name, + Type: "cloudbilling.googleapis.com/ProjectBillingInfo", + Resource: &caiasset.AssetResource{ + Version: "v1", + DiscoveryDocumentURI: "https://cloudbilling.googleapis.com/$discovery/rest", + DiscoveryName: "ProjectBillingInfo", + Data: data, + Location: "global", + }, + }, nil + } else { + return caiasset.Asset{}, err + } +} + +func GetProjectBillingInfoData(d tpgresource.TerraformResourceData, project string) (map[string]interface{}, error) { + ba := &cloudbilling.ProjectBillingInfo{ + BillingAccountName: fmt.Sprintf("billingAccounts/%s", d.Get("billing_account")), + Name: fmt.Sprintf("projects/%s/billingInfo", project), + ProjectId: d.Get("project_id").(string), + } + + return cai.JsonMap(ba) +} diff --git a/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/cai/resource_converter.go b/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/cai/resource_converter.go index abe5e18fbe14..49759bdd7888 100644 --- a/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/cai/resource_converter.go +++ b/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/cai/resource_converter.go @@ -14,7 +14,7 @@ type ConvertFunc func(d tpgresource.TerraformResourceData, config *transport_tpg // by Terraform, like IAM policies managed with member/binding resources. type FetchFullResourceFunc func(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (caiasset.Asset, error) -type ResourceConverter struct { +type Tfplan2caiConverter struct { Convert ConvertFunc FetchFullResource FetchFullResourceFunc } diff --git a/mmv1/third_party/tgc_next/pkg/tgcresource/utils.go b/mmv1/third_party/tgc_next/pkg/tgcresource/utils.go index ba7fecf54d59..03adc1ac46f0 100644 --- a/mmv1/third_party/tgc_next/pkg/tgcresource/utils.go +++ b/mmv1/third_party/tgc_next/pkg/tgcresource/utils.go @@ -7,6 +7,25 @@ import ( transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" ) +// Remove the Terraform attribution label "goog-terraform-provisioned" from labels +func RemoveTerraformAttributionLabel(raw interface{}) interface{} { + if raw == nil { + return nil + } + + if labels, ok := raw.(map[string]string); ok { + delete(labels, "goog-terraform-provisioned") + return labels + } + + if labels, ok := raw.(map[string]interface{}); ok { + delete(labels, "goog-terraform-provisioned") + return labels + } + + return nil +} + func GetComputeSelfLink(config *transport_tpg.Config, raw interface{}) interface{} { if raw == nil { return nil diff --git a/mmv1/third_party/tgc_next/pkg/transport/config_tgc.go b/mmv1/third_party/tgc_next/pkg/transport/config_tgc.go new file mode 100644 index 000000000000..27dd46309d2b --- /dev/null +++ b/mmv1/third_party/tgc_next/pkg/transport/config_tgc.go @@ -0,0 +1,5 @@ +package transport + +func NewConfig() *Config { + return &Config{} +} From c5fb67d2f163aff8fd858b073d932d99a43efed3 Mon Sep 17 00:00:00 2001 From: Lakshman Swaminathan Date: Tue, 8 Jul 2025 11:24:46 -0400 Subject: [PATCH 491/884] reform config change + testing to show the functionality a bit more (#14373) --- .../terraform/acctest/vcr_utils.go | 16 ++-- .../terraform/acctest/vcr_utils_test.go | 76 +++++++++++++++++++ 2 files changed, 86 insertions(+), 6 deletions(-) diff --git a/mmv1/third_party/terraform/acctest/vcr_utils.go b/mmv1/third_party/terraform/acctest/vcr_utils.go index d670c637fe2c..a41625ed8c4f 100644 --- a/mmv1/third_party/terraform/acctest/vcr_utils.go +++ b/mmv1/third_party/terraform/acctest/vcr_utils.go @@ -247,10 +247,10 @@ func initializeReleaseDiffTest(c resource.TestCase, testName string) resource.Te for _, testStep := range c.Steps { if testStep.Config != "" { ogConfig := testStep.Config - testStep.Config = reformConfigWithProvider(ogConfig, localProviderName) + testStep.Config = ReformConfigWithProvider(ogConfig, localProviderName) if testStep.ExpectError == nil && testStep.PlanOnly == false { newStep := resource.TestStep{ - Config: reformConfigWithProvider(ogConfig, releaseProvider), + Config: ReformConfigWithProvider(ogConfig, releaseProvider), } testStep.PlanOnly = true testStep.ExpectNonEmptyPlan = false @@ -267,19 +267,23 @@ func initializeReleaseDiffTest(c resource.TestCase, testName string) resource.Te return c } -func reformConfigWithProvider(config, provider string) string { +func ReformConfigWithProvider(config, provider string) string { configBytes := []byte(config) providerReplacement := fmt.Sprintf("provider = %s", provider) providerReplacementBytes := []byte(providerReplacement) providerBlock := regexp.MustCompile(`provider *=.*google-beta.*`) if providerBlock.Match(configBytes) { - return string(providerBlock.ReplaceAll(configBytes, providerReplacementBytes)) + out := string(providerBlock.ReplaceAll(configBytes, providerReplacementBytes)) + return out } - providerReplacement = fmt.Sprintf("${1}\n\t%s", providerReplacement) + providerReplacement = fmt.Sprintf("${1}\n %s\n", providerReplacement) providerReplacementBytes = []byte(providerReplacement) - resourceHeader := regexp.MustCompile(`(resource .*google_.* .*\w+.*\{.*)`) + // Match resource and data blocks that use google_ provider + // regex matches for labels resource and data blocks that use google_ provider + + resourceHeader := regexp.MustCompile(`((resource|data) .*google_.* .*\w+.*\{ *)`) return string(resourceHeader.ReplaceAll(configBytes, providerReplacementBytes)) } diff --git a/mmv1/third_party/terraform/acctest/vcr_utils_test.go b/mmv1/third_party/terraform/acctest/vcr_utils_test.go index 7ed5b87ecce2..8481ca7c200b 100644 --- a/mmv1/third_party/terraform/acctest/vcr_utils_test.go +++ b/mmv1/third_party/terraform/acctest/vcr_utils_test.go @@ -373,3 +373,79 @@ func prepareCassetteRequest(d requestDescription) cassette.Request { return req } + +func TestReformConfigWithProvider(t *testing.T) { + + type testCase struct { + name string + initialConfig string + providerToInsert string + expectedConfig string + } + + cases := map[string]testCase{ + "replaces_google_beta_with_local": { + name: "Replaces 'google-beta' provider with 'google-local'", + initialConfig: `resource "google_new_resource" { + provider = google-beta +}`, + providerToInsert: "google-local", + expectedConfig: `resource "google_new_resource" { + provider = google-local +}`, + }, + "inserts_local_provider_into_empty_config": { + name: "Inserts 'google-local' provider when no provider block exists", + initialConfig: `resource "google_alloydb_cluster" "default" { + location = "us-central1" + network_config { + network = google_compute_network.default.id + } +}`, + providerToInsert: "google-local", + expectedConfig: `resource "google_alloydb_cluster" "default" { + provider = google-local + + location = "us-central1" + network_config { + network = google_compute_network.default.id + } +}`, + }, + "no_change_if_target_provider_already_present": { + name: "Does not change config if target provider is already present", + initialConfig: `resource "google_new_resource" { + provider = google-local +}`, + providerToInsert: "google-local", + expectedConfig: `resource "google_new_resource" { + provider = google-local +}`, + }, + "inserts_provider_with_other_attributes": { + name: "Inserts provider into a resource block with other attributes but no existing provider", + initialConfig: `resource "google_compute_instance" "test" { + name = "test-instance" + machine_type = "e2-medium" +}`, + providerToInsert: "google-local", + expectedConfig: `resource "google_compute_instance" "test" { + provider = google-local + + name = "test-instance" + machine_type = "e2-medium" +}`, + }, + } + + for tn, tc := range cases { + t.Run(tn, func(t *testing.T) { + newConfig := acctest.ReformConfigWithProvider(tc.initialConfig, tc.providerToInsert) + + if newConfig != tc.expectedConfig { + t.Fatalf("Test Case: %s\nExpected config to be reformatted to:\n%q\nbut got:\n%q", tc.name, tc.expectedConfig, newConfig) + } + t.Logf("Test Case: %s\nReformed config:\n%s", tc.name, newConfig) + }) + } +} From d0141205dae41ed7cb070dc4dabf6397737656f6 Mon Sep 17 00:00:00 2001 From: Raj Anand <88097156+raazanand@users.noreply.github.com> Date: Tue, 8 Jul 2025 21:20:44 +0530 Subject: [PATCH 492/884] added delay b/w storage pool creation & volume creation (#14460) --- .../services/netapp/resource_netapp_backup_test.go | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_backup_test.go b/mmv1/third_party/terraform/services/netapp/resource_netapp_backup_test.go index b71381591517..3f4c16cd3fb8 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_backup_test.go +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_backup_test.go @@ -184,6 +184,9 @@ func TestAccNetappBackup_NetappFlexBackup(t *testing.T) { PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccCheckNetappBackupDestroyProducer(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, Steps: []resource.TestStep{ { Config: testAccNetappBackup_FlexBackup(context), @@ -214,6 +217,11 @@ resource "google_netapp_storage_pool" "default" { replica_zone = "us-east4-b" } +resource "time_sleep" "wait_3_minutes" { + depends_on = [google_netapp_storage_pool.default] + create_duration = "3m" +} + resource "google_netapp_volume" "default" { name = "tf-test-backup-volume%{random_suffix}" location = google_netapp_storage_pool.default.location From 1a5d843710965d7e5f729a116fd3d4f23982275e Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Tue, 8 Jul 2025 09:04:55 -0700 Subject: [PATCH 493/884] tgc-revival: fix TestAccComputeBackendBucket_backendBucketSecurityPolicyExample (#14468) --- mmv1/products/compute/BackendBucket.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/mmv1/products/compute/BackendBucket.yaml b/mmv1/products/compute/BackendBucket.yaml index d7892ff265e0..a0d701088e96 100644 --- a/mmv1/products/compute/BackendBucket.yaml +++ b/mmv1/products/compute/BackendBucket.yaml @@ -72,6 +72,7 @@ examples: vars: backend_bucket_name: 'image-backend-bucket' bucket_name: 'image-store-bucket' + tgc_test_ignore_in_asset: ['RESOURCE.cdnPolicy.signedUrlCacheMaxAgeSec'] - name: 'backend_bucket_query_string_whitelist' primary_resource_id: 'image_backend' vars: From ec019d7ba2761ef22eda76368e59e4b3a7c940d3 Mon Sep 17 00:00:00 2001 From: Niharika <35183015+niharika-98@users.noreply.github.com> Date: Tue, 8 Jul 2025 23:18:10 +0530 Subject: [PATCH 494/884] Supporting update BP (#14336) --- mmv1/products/backupdr/BackupPlan.yaml | 7 +- .../resource_backup_dr_backup_plan_test.go | 219 ++++++++++++++++++ 2 files changed, 225 insertions(+), 1 deletion(-) create mode 100644 mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_plan_test.go diff --git a/mmv1/products/backupdr/BackupPlan.yaml b/mmv1/products/backupdr/BackupPlan.yaml index b02c79a1b5e6..ffc566c48d3a 100644 --- a/mmv1/products/backupdr/BackupPlan.yaml +++ b/mmv1/products/backupdr/BackupPlan.yaml @@ -15,7 +15,8 @@ name: 'BackupPlan' base_url: projects/{{project}}/locations/{{location}}/backupPlans create_url: projects/{{project}}/locations/{{location}}/backupPlans/?backup_plan_id={{backup_plan_id}} self_link: projects/{{project}}/locations/{{location}}/backupPlans/{{backup_plan_id}} -immutable: true +update_verb: 'PATCH' +update_mask: true description: A backup plan defines when and how to back up a resource, including the backup's schedule, retention, and location. import_format: - 'projects/{{project}}/locations/{{location}}/backupPlans/{{backup_plan_id}}' @@ -26,6 +27,7 @@ references: autogen_async: true timeouts: insert_minutes: 60 + update_minutes: 60 delete_minutes: 60 examples: - name: 'backup_dr_backup_plan_simple' @@ -48,12 +50,14 @@ parameters: type: String required: true url_param_only: true + immutable: true description: | The location for the backup plan - name: 'backup_plan_id' type: String required: true url_param_only: true + immutable: true description: |- The ID of the backup plan properties: @@ -73,6 +77,7 @@ properties: description: | Backup vault where the backups gets stored using this Backup plan. required: true + immutable: true diff_suppress_func: 'tpgresource.ProjectNumberDiffSuppress' - name: 'backupVaultServiceAccount' type: String diff --git a/mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_plan_test.go b/mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_plan_test.go new file mode 100644 index 000000000000..2b01509366a9 --- /dev/null +++ b/mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_plan_test.go @@ -0,0 +1,219 @@ +package backupdr_test + +import ( + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + "testing" +) + +func TestAccBackupDRBackupPlan_fullUpdate(t *testing.T) { + + t.Parallel() + + context := map[string]interface{}{ + "project": envvar.GetTestProjectFromEnv(), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccBackupDRBackupPlan_fullCreate(context), + }, + { + ResourceName: "google_backup_dr_backup_plan.bp", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"resource"}, + }, + { + Config: testAccBackupDRBackupPlan_fullUpdate(context), + }, + { + ResourceName: "google_backup_dr_backup_plan.bp", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"resource"}, + }, + }, + }) +} + +func testAccBackupDRBackupPlan_fullCreate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_service_account" "default" { + account_id = "tf-test-my-custom-%{random_suffix}" + display_name = "Custom SA for VM Instance" +} + +resource "google_compute_instance" "default" { + name = "tf-test-compute-instance-%{random_suffix}" + machine_type = "n2-standard-2" + zone = "us-central1-a" + tags = ["foo", "bar"] + boot_disk { + initialize_params { + image = "debian-cloud/debian-11" + labels = { + my_label = "value" + } + } + } + // Local SSD disk + scratch_disk { + interface = "NVME" + } + network_interface { + network = "default" + access_config { + // Ephemeral public IP + } + } + service_account { + # Google recommends custom service accounts that have cloud-platform scope and permissions granted via IAM Roles. + email = google_service_account.default.email + scopes = ["cloud-platform"] + } +} +resource "google_backup_dr_backup_vault" "my-backup-vault" { + location ="us-central1" + backup_vault_id = "tf-test-bv-%{random_suffix}" + description = "This is a second backup vault built by Terraform." + backup_minimum_enforced_retention_duration = "100000s" + labels = { + foo = "bar1" + bar = "baz1" + } + annotations = { + annotations1 = "bar1" + annotations2 = "baz1" + } + force_update = "true" + force_delete = "true" + allow_missing = "true" +} + +resource "google_backup_dr_backup_plan" "bp" { + location = "us-central1" + backup_plan_id = "tf-test-bp-test-%{random_suffix}" + resource_type = "compute.googleapis.com/Instance" + backup_vault = google_backup_dr_backup_vault.my-backup-vault.name + + backup_rules { + rule_id = "rule-1" + backup_retention_days = 366 + + standard_schedule { + recurrence_type = "YEARLY" + months = ["JANUARY"] + days_of_month = [15] + time_zone = "UTC" + + backup_window { + start_hour_of_day = 2 # Backup starts at 2:00 AM UTC + end_hour_of_day = 8 # Optional, backup window ends at 3:00 AM + } + } + } +} + +`, context) +} + +func testAccBackupDRBackupPlan_fullUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_service_account" "default" { + account_id = "tf-test-my-custom-%{random_suffix}" + display_name = "Custom SA for VM Instance" +} + +resource "google_compute_instance" "default" { + name = "tf-test-compute-instance-%{random_suffix}" + machine_type = "n2-standard-2" + zone = "us-central1-a" + tags = ["foo", "bar"] + boot_disk { + initialize_params { + image = "debian-cloud/debian-11" + labels = { + my_label = "value" + } + } + } + // Local SSD disk + scratch_disk { + interface = "NVME" + } + network_interface { + network = "default" + access_config { + // Ephemeral public IP + } + } + service_account { + # Google recommends custom service accounts that have cloud-platform scope and permissions granted via IAM Roles. + email = google_service_account.default.email + scopes = ["cloud-platform"] + } +} + +resource "google_backup_dr_backup_vault" "my-backup-vault" { + location ="us-central1" + backup_vault_id = "tf-test-bv-%{random_suffix}" + description = "This is a second backup vault built by Terraform." + backup_minimum_enforced_retention_duration = "100000s" + labels = { + foo = "bar1" + bar = "baz1" + } + annotations = { + annotations1 = "bar1" + annotations2 = "baz1" + } + force_update = "true" + force_delete = "true" + allow_missing = "true" +} + +resource "google_backup_dr_backup_plan" "bp" { + location = "us-central1" + backup_plan_id = "tf-test-bp-test-%{random_suffix}" + resource_type = "compute.googleapis.com/Instance" + backup_vault = google_backup_dr_backup_vault.my-backup-vault.name + + backup_rules { + rule_id = "rule-1" + backup_retention_days = 366 + standard_schedule { + recurrence_type = "MONTHLY" # Updated recurrence_type from YEARLY + days_of_month = [1, 15] # Updated days_of_month + time_zone = "America/New_York" # Updated time_zone + + backup_window { + start_hour_of_day = 1 # Updated start hour + end_hour_of_day = 7 # Updated end hour + } + } + } + backup_rules { + # Adding a second rule to test weekly schedule + rule_id = "rule-2" + backup_retention_days = 60 # Different retention for rule-2 + + standard_schedule { + recurrence_type = "WEEKLY" + days_of_week = ["MONDAY", "FRIDAY"] # Added days_of_week + time_zone = "UTC" + + backup_window { + start_hour_of_day = 1 # Different backup window for rule-2 + end_hour_of_day = 7 + } + } + } +} +`, context) +} From af71f5563d3e5700b254cce01232501ba10d6121 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Tue, 8 Jul 2025 10:54:36 -0700 Subject: [PATCH 495/884] use project name in TestAccComputeBackendBucket_backendBucketGlobalIlbExample (#14475) --- .../terraform/examples/backend_bucket_global_ilb.tf.tmpl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/templates/terraform/examples/backend_bucket_global_ilb.tf.tmpl b/mmv1/templates/terraform/examples/backend_bucket_global_ilb.tf.tmpl index 2f2de2d13064..9497c721747b 100644 --- a/mmv1/templates/terraform/examples/backend_bucket_global_ilb.tf.tmpl +++ b/mmv1/templates/terraform/examples/backend_bucket_global_ilb.tf.tmpl @@ -14,7 +14,7 @@ resource "google_project_service" "project" { resource "google_compute_backend_bucket" "{{$.PrimaryResourceId}}" { name = "{{index $.Vars "backend_bucket_name"}}" - project = google_project.unarmored.number + project = google_project.unarmored.name bucket_name = google_storage_bucket.{{$.PrimaryResourceId}}.name load_balancing_scheme = "INTERNAL_MANAGED" From 639b7f6eed6bd19696b4a8afc67ae1bdef2ec11f Mon Sep 17 00:00:00 2001 From: chasevedder Date: Tue, 8 Jul 2025 12:24:32 -0700 Subject: [PATCH 496/884] Update service account creation to prevent failures due to eventual consistency (#14447) --- .../resource_google_service_account.go | 65 +++++++++++-------- 1 file changed, 37 insertions(+), 28 deletions(-) diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_service_account.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_service_account.go index 901b1d0ca975..3c810a7f790b 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_service_account.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_service_account.go @@ -122,54 +122,59 @@ func resourceGoogleServiceAccountCreate(d *schema.ResourceData, meta interface{} ServiceAccount: sa, } - sa, err = config.NewIamClient(userAgent).Projects.ServiceAccounts.Create("projects/"+project, r).Do() + d.SetId(fmt.Sprintf("projects/%s/serviceAccounts/%s@%s.iam.gserviceaccount.com", project, aid, project)) + + iamClient := config.NewIamClient(userAgent) + sa, err = iamClient.Projects.ServiceAccounts.Create("projects/"+project, r).Do() if err != nil { gerr, ok := err.(*googleapi.Error) alreadyExists := ok && gerr.Code == 409 && d.Get("create_ignore_already_exists").(bool) if alreadyExists { - sa = &iam.ServiceAccount{ - Name: fmt.Sprintf("projects/%s/serviceAccounts/%s@%s.iam.gserviceaccount.com", project, aid, project), - } + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (operr error) { + sa, saerr := iamClient.Projects.ServiceAccounts.Get(d.Id()).Do() + + if saerr != nil { + return saerr + } + return populateResourceData(d, sa) + }, + Timeout: d.Timeout(schema.TimeoutCreate), + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{ + transport_tpg.IsNotFoundRetryableError("service account creation"), + }, + }) + + return nil } else { return fmt.Errorf("Error creating service account: %s", err) } } - d.SetId(sa.Name) - - err = transport_tpg.Retry(transport_tpg.RetryOptions{ - RetryFunc: func() (operr error) { - _, saerr := config.NewIamClient(userAgent).Projects.ServiceAccounts.Get(d.Id()).Do() - return saerr - }, - Timeout: d.Timeout(schema.TimeoutCreate), - ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{ - transport_tpg.IsNotFoundRetryableError("service account creation"), - transport_tpg.IsForbiddenIamServiceAccountRetryableError("service account creation"), - }, - }) - - if err != nil { - return fmt.Errorf("Error reading service account after creation: %s", err) - } - // We poll until the resource is found due to eventual consistency issue - // on part of the api https://cloud.google.com/iam/docs/overview#consistency + // on part of the api https://cloud.google.com/iam/docs/overview#consistency. + // Wait for at least 3 successful responses in a row to ensure result is consistent. // IAM API returns 403 when the queried SA is not found, so we must ignore both 404 & 403 errors - err = transport_tpg.PollingWaitTime(resourceServiceAccountPollRead(d, meta), transport_tpg.PollCheckForExistenceWith403, "Creating Service Account", d.Timeout(schema.TimeoutCreate), 1) + transport_tpg.PollingWaitTime( + resourceServiceAccountPollRead(d, meta), + transport_tpg.PollCheckForExistence, + "Creating Service Account", + d.Timeout(schema.TimeoutCreate), + 3, // Number of consecutive occurences. + ) - if err != nil { - return err - } + populateResourceData(d, sa) // We can't guarantee complete consistency even after polling, // so sleep for some additional time to reduce the likelihood of // eventual consistency failures. time.Sleep(10 * time.Second) - return resourceGoogleServiceAccountRead(d, meta) + return nil } +// PollReadFunc for checking Service Account existence. +// If resourceData is not nil, it will be updated with the response. func resourceServiceAccountPollRead(d *schema.ResourceData, meta interface{}) transport_tpg.PollReadFunc { return func() (map[string]interface{}, error) { config := meta.(*transport_tpg.Config) @@ -201,6 +206,10 @@ func resourceGoogleServiceAccountRead(d *schema.ResourceData, meta interface{}) return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("Service Account %q", d.Id())) } + return populateResourceData(d, sa) +} + +func populateResourceData(d *schema.ResourceData, sa *iam.ServiceAccount) error { if err := d.Set("email", sa.Email); err != nil { return fmt.Errorf("Error setting email: %s", err) } From d240b1b6badff97a477f52b65676a2c964960601 Mon Sep 17 00:00:00 2001 From: Scott Suarez Date: Tue, 8 Jul 2025 12:43:02 -0700 Subject: [PATCH 497/884] No longer skip if env IdentityUserEnvVars not set (#14478) --- mmv1/third_party/terraform/envvar/envvar_utils.go | 1 - 1 file changed, 1 deletion(-) diff --git a/mmv1/third_party/terraform/envvar/envvar_utils.go b/mmv1/third_party/terraform/envvar/envvar_utils.go index ff0856ee38e2..fb34c9ec481f 100644 --- a/mmv1/third_party/terraform/envvar/envvar_utils.go +++ b/mmv1/third_party/terraform/envvar/envvar_utils.go @@ -146,7 +146,6 @@ func GetTestCredsFromEnv() string { // Returns googleapis.com if there's no universe set. func GetTestUniverseDomainFromEnv(t *testing.T) string { - SkipIfEnvNotSet(t, IdentityUserEnvVars...) return transport_tpg.MultiEnvSearch(UniverseDomainEnvVars) } From bba0340518856e05488c698e8056562e516bcc35 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Tue, 8 Jul 2025 13:12:54 -0700 Subject: [PATCH 498/884] tgc-revival: support google_compute_backend_service (#14474) --- mmv1/api/resource.go | 6 +++- mmv1/products/compute/BackendService.yaml | 20 ++++++++++++ mmv1/provider/template_data.go | 2 ++ .../terraform/expand_property_method.go.tmpl | 10 ++++-- .../terraform/flatten_property_method.go.tmpl | 4 +++ .../flatten_property_method_tgc.go.tmpl | 32 +++++++++++++++++++ .../cai2hcl/resource_converter.go.tmpl | 18 ++--------- .../custom_expand/original_value.go.tmpl | 3 ++ .../custom_expand/set_to_slice_or_nil.go.tmpl | 7 ++++ ...rvice_signed_url_cache_max_age_sec.go.tmpl | 20 ++++++++++++ .../tgc_next/decoders/backend_service.go.tmpl | 31 ++++++++++++++++++ .../expand_property_method_tgc.go.tmpl | 25 +++++++++++++++ .../tfplan2cai/resource_converter.go.tmpl | 10 +----- 13 files changed, 159 insertions(+), 29 deletions(-) create mode 100644 mmv1/templates/tgc_next/cai2hcl/flatten_property_method_tgc.go.tmpl create mode 100644 mmv1/templates/tgc_next/custom_expand/original_value.go.tmpl create mode 100644 mmv1/templates/tgc_next/custom_expand/set_to_slice_or_nil.go.tmpl create mode 100644 mmv1/templates/tgc_next/custom_flatten/compute_backend_service_signed_url_cache_max_age_sec.go.tmpl create mode 100644 mmv1/templates/tgc_next/decoders/backend_service.go.tmpl create mode 100644 mmv1/templates/tgc_next/tfplan2cai/expand_property_method_tgc.go.tmpl diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index 3af4f4067e6f..66bb50c8c6d9 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -1988,7 +1988,7 @@ func (r Resource) TGCTestIgnorePropertiesToStrings(e resource.Examples) []string for _, tp := range r.VirtualFields { props = append(props, google.Underscore(tp.Name)) } - for _, tp := range r.AllUserProperties() { + for _, tp := range r.AllNestedProperties(r.RootProperties()) { if tp.UrlParamOnly { props = append(props, google.Underscore(tp.Name)) } else if tp.IsMissingInCai { @@ -2019,3 +2019,7 @@ func (r Resource) ApiResourceType() string { return fmt.Sprintf("%s%s", r.ProductMetadata.Name, r.Name) } + +func (r Resource) IsTgcCompiler() bool { + return r.Compiler == "terraformgoogleconversionnext-codegen" +} diff --git a/mmv1/products/compute/BackendService.yaml b/mmv1/products/compute/BackendService.yaml index 20ebcec22508..af98a78f7547 100644 --- a/mmv1/products/compute/BackendService.yaml +++ b/mmv1/products/compute/BackendService.yaml @@ -43,6 +43,7 @@ async: result: resource_inside_response: false collection_url_key: 'items' +include_in_tgc_next_DO_NOT_USE: true iam_policy: allowed_iam_role: 'roles/compute.admin' parent_resource_attribute: 'name' @@ -55,6 +56,7 @@ custom_code: decoder: 'templates/terraform/decoders/backend_service.go.tmpl' post_create: 'templates/terraform/post_create/compute_backend_service_security_policy.go.tmpl' post_update: 'templates/terraform/post_create/compute_backend_service_security_policy.go.tmpl' + tgc_decoder: 'templates/tgc_next/decoders/backend_service.go.tmpl' schema_version: 1 examples: - name: 'backend_service_basic' @@ -96,12 +98,18 @@ examples: vars: backend_service_name: 'backend-service' health_check_name: 'health-check' + tgc_test_ignore_in_asset: + - 'RESOURCE.enableCDN' # It has false value in CAI asset - name: 'backend_service_traffic_director_ring_hash' primary_resource_id: 'default' min_version: 'beta' vars: backend_service_name: 'backend-service' health_check_name: 'health-check' + tgc_test_ignore_extra: + - outlier_detection.enforcing_consecutive_gateway_failure # Ignore the 0 value in config + tgc_test_ignore_in_asset: + - 'RESOURCE.enableCDN' # It has false value in CAI asset - name: 'backend_service_stateful_session_affinity' primary_resource_id: 'default' min_version: 'beta' @@ -147,6 +155,8 @@ examples: min_version: 'beta' vars: backend_service_name: 'backend-service' + tgc_test_ignore_in_asset: + - 'RESOURCE.enableCDN' # It has false value in CAI asset parameters: properties: - name: 'affinityCookieTtlSec' @@ -158,6 +168,7 @@ properties: maximum allowed value for TTL is one day. When the load balancing scheme is INTERNAL, this field is not used. + include_empty_value_in_cai: true - name: 'backend' type: Array description: | @@ -327,6 +338,7 @@ properties: description: | If true, the metric data is collected and reported to Cloud Monitoring, but is not used for load balancing. + include_empty_value_in_cai: true - name: 'maxUtilization' type: Double description: | @@ -608,6 +620,7 @@ properties: - 'cdn_policy.0.cache_key_policy.0.include_named_cookies' item_type: type: String + custom_tgc_expand: 'templates/tgc_next/custom_expand/set_to_slice_or_nil.go.tmpl' - name: 'queryStringWhitelist' type: Array description: | @@ -619,6 +632,7 @@ properties: delimiters. is_set: true send_empty_value: true + custom_tgc_expand: 'templates/tgc_next/custom_expand/set_to_slice_or_nil.go.tmpl' at_least_one_of: - 'cdn_policy.0.cache_key_policy.0.include_host' - 'cdn_policy.0.cache_key_policy.0.include_protocol' @@ -677,6 +691,7 @@ properties: - 'cdn_policy.0.cache_key_policy' - 'cdn_policy.0.signed_url_cache_max_age_sec' default_value: 3600 + custom_tgc_flatten: 'templates/tgc_next/custom_flatten/compute_backend_service_signed_url_cache_max_age_sec.go.tmpl' - name: 'defaultTtl' type: Integer description: | @@ -795,6 +810,7 @@ properties: type: Boolean description: | If true, enable Cloud CDN for this BackendService. + include_empty_value_in_cai: true - name: 'healthChecks' type: Array description: | @@ -844,6 +860,7 @@ properties: ignore_read: true sensitive: true send_empty_value: true + is_missing_in_cai: true - name: 'oauth2ClientSecretSha256' type: String description: | @@ -1103,6 +1120,7 @@ properties: required: true description: | If true, the metric data is not used for load balancing. + include_empty_value_in_cai: true - name: 'name' type: String description: | @@ -1405,6 +1423,7 @@ properties: diff_suppress_func: 'tpgresource.ProjectNumberDiffSuppress' resource: 'ClientTlsPolicy' imports: 'name' + custom_tgc_expand: 'templates/tgc_next/custom_expand/original_value.go.tmpl' - name: 'subjectAltNames' type: Array description: | @@ -1660,6 +1679,7 @@ properties: Dynamic forwarding configuration. This field is used to configure the backend service with dynamic forwarding feature which together with Service Extension allows customized and complex routing logic. min_version: beta + is_missing_in_cai: true properties: - name: 'ipPortSelection' type: NestedObject diff --git a/mmv1/provider/template_data.go b/mmv1/provider/template_data.go index fd05e0aa0042..7076e8711e5c 100644 --- a/mmv1/provider/template_data.go +++ b/mmv1/provider/template_data.go @@ -200,6 +200,8 @@ func (td *TemplateData) GenerateTGCResourceFile(templatePath, filePath string, r "templates/terraform/flatten_property_method.go.tmpl", "templates/tgc_next/tfplan2cai/expand_array_resourceref_with_validation.go.tmpl", "templates/tgc_next/tfplan2cai/expand_resourceref_with_validation.go.tmpl", + "templates/tgc_next/tfplan2cai/expand_property_method_tgc.go.tmpl", + "templates/tgc_next/cai2hcl/flatten_property_method_tgc.go.tmpl", } td.GenerateFile(filePath, templatePath, resource, true, templates...) } diff --git a/mmv1/templates/terraform/expand_property_method.go.tmpl b/mmv1/templates/terraform/expand_property_method.go.tmpl index 87da148953e8..21845c284957 100644 --- a/mmv1/templates/terraform/expand_property_method.go.tmpl +++ b/mmv1/templates/terraform/expand_property_method.go.tmpl @@ -31,7 +31,7 @@ func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.T transformed{{$prop.TitlelizeProperty}}, err := expand{{$.GetPrefix}}{{$.TitlelizeProperty}}{{$prop.TitlelizeProperty}}(original["{{ underscore $prop.Name }}"], d, config) if err != nil { return nil, err - {{- if $prop.SendEmptyValue }} + {{- if or ($prop.SendEmptyValue) (and $prop.IncludeEmptyValueInCai $.ResourceMetadata.IsTgcCompiler) }} } else { transformed["{{$prop.ApiName}}"] = transformed{{$prop.TitlelizeProperty}} {{- else }} @@ -69,7 +69,7 @@ func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.T transformed{{$prop.TitlelizeProperty}}, err := expand{{$.GetPrefix}}{{$.TitlelizeProperty}}{{$prop.TitlelizeProperty}}({{ if $prop.FlattenObject }}nil{{ else }}d.Get("{{ underscore $prop.Name }}"), d, config) if err != nil { return nil, err - {{- if $prop.SendEmptyValue }} + {{- if or ($prop.SendEmptyValue) (and $prop.IncludeEmptyValueInCai $.ResourceMetadata.IsTgcCompiler) }} } else { transformed["{{$prop.ApiName}}"] = transformed{{$prop.TitlelizeProperty}} {{- else }} @@ -122,7 +122,7 @@ func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.T transformed{{$prop.TitlelizeProperty}}, err := expand{{$.GetPrefix}}{{$.TitlelizeProperty}}{{$prop.TitlelizeProperty}}(original["{{ underscore $prop.Name }}"], d, config) if err != nil { return nil, err - {{- if $prop.SendEmptyValue }} + {{- if or ($prop.SendEmptyValue) (and $prop.IncludeEmptyValueInCai $.ResourceMetadata.IsTgcCompiler) }} } else { transformed["{{$prop.ApiName}}"] = transformed{{$prop.TitlelizeProperty}} {{- else }} @@ -160,7 +160,11 @@ func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.T {{ if $.NestedProperties }} {{- range $prop := $.NestedProperties }} {{- if not (and (hasPrefix $prop.Type "KeyValue") $prop.IgnoreWrite) }} + {{- if $.ResourceMetadata.IsTgcCompiler }} + {{- template "expandTgcPropertyMethod" $prop -}} + {{- else }} {{- template "expandPropertyMethod" $prop -}} + {{- end }} {{- end }} {{- end }} {{- end }}{{/* if $.NestedProperties */}} diff --git a/mmv1/templates/terraform/flatten_property_method.go.tmpl b/mmv1/templates/terraform/flatten_property_method.go.tmpl index 5737fd68ad04..b38732d1525e 100644 --- a/mmv1/templates/terraform/flatten_property_method.go.tmpl +++ b/mmv1/templates/terraform/flatten_property_method.go.tmpl @@ -151,7 +151,11 @@ func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.Reso } {{- if $.NestedProperties }} {{- range $prop := $.NestedProperties }} + {{- if $.ResourceMetadata.IsTgcCompiler }} + {{ template "flattenTgcPropertyMethod" $prop -}} + {{- else }} {{ template "flattenPropertyMethod" $prop -}} + {{- end }} {{- end }} {{- end }} {{- end }} diff --git a/mmv1/templates/tgc_next/cai2hcl/flatten_property_method_tgc.go.tmpl b/mmv1/templates/tgc_next/cai2hcl/flatten_property_method_tgc.go.tmpl new file mode 100644 index 000000000000..4cb7d48d8b92 --- /dev/null +++ b/mmv1/templates/tgc_next/cai2hcl/flatten_property_method_tgc.go.tmpl @@ -0,0 +1,32 @@ +{{/* The license inside this block applies to this file + Copyright 2025 Google LLC. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. */ -}} +{{- define "flattenTgcPropertyMethod" }} + {{- if $.CustomTgcFlatten }} +{{ $.CustomTemplate $.CustomTgcFlatten true -}} + {{- else if $.IsA "KeyValueLabels" }} +func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return tgcresource.RemoveTerraformAttributionLabel(v) +} + {{- else if or (and (eq $.Name "zone") $.ResourceMetadata.HasZone) (and (eq $.Name "region") $.ResourceMetadata.HasRegion) -}} +func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return tpgresource.GetResourceNameFromSelfLink(v.(string)) +} + {{- else }} +{{ template "flattenPropertyMethod" $ -}} + {{- end }} +{{- end }} \ No newline at end of file diff --git a/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl b/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl index c671618eee58..ec805842aacc 100644 --- a/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl +++ b/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl @@ -33,6 +33,7 @@ import ( "google.golang.org/api/bigtableadmin/v2" "google.golang.org/api/googleapi" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters/utils" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/models" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tgcresource" @@ -136,22 +137,7 @@ func (c *{{ $.ResourceName -}}Cai2hclConverter) convertResourceData(asset caiass } {{ range $prop := $.ReadPropertiesForTgc }} - {{- if $prop.CustomTgcFlatten }} -{{ $prop.CustomTemplate $prop.CustomTgcFlatten true -}} - {{- else if $prop.IsA "KeyValueLabels" }} -func flatten{{$prop.GetPrefix}}{{$prop.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return tgcresource.RemoveTerraformAttributionLabel(v) -} - {{- else if or (and (eq $prop.Name "zone") $.HasZone) (and (eq $prop.Name "region") $.HasRegion) -}} -func flatten{{$prop.GetPrefix}}{{$prop.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - if v == nil { - return v - } - return tpgresource.GetResourceNameFromSelfLink(v.(string)) -} - {{- else }} -{{ template "flattenPropertyMethod" $prop -}} - {{- end }} +{{- template "flattenTgcPropertyMethod" $prop -}} {{- end }} {{- if $.CustomCode.TgcDecoder }} diff --git a/mmv1/templates/tgc_next/custom_expand/original_value.go.tmpl b/mmv1/templates/tgc_next/custom_expand/original_value.go.tmpl new file mode 100644 index 000000000000..8566619d94b7 --- /dev/null +++ b/mmv1/templates/tgc_next/custom_expand/original_value.go.tmpl @@ -0,0 +1,3 @@ +func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} diff --git a/mmv1/templates/tgc_next/custom_expand/set_to_slice_or_nil.go.tmpl b/mmv1/templates/tgc_next/custom_expand/set_to_slice_or_nil.go.tmpl new file mode 100644 index 000000000000..b081d2b53cb5 --- /dev/null +++ b/mmv1/templates/tgc_next/custom_expand/set_to_slice_or_nil.go.tmpl @@ -0,0 +1,7 @@ +func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + vSet := v.(*schema.Set) + if vSet.Len() == 0 { + return nil, nil + } + return vSet.List(), nil +} diff --git a/mmv1/templates/tgc_next/custom_flatten/compute_backend_service_signed_url_cache_max_age_sec.go.tmpl b/mmv1/templates/tgc_next/custom_flatten/compute_backend_service_signed_url_cache_max_age_sec.go.tmpl new file mode 100644 index 000000000000..612e2e446a02 --- /dev/null +++ b/mmv1/templates/tgc_next/custom_flatten/compute_backend_service_signed_url_cache_max_age_sec.go.tmpl @@ -0,0 +1,20 @@ +func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return 3600 + } + + // Handles the string fixed64 format + if strVal, ok := v.(string); ok { + if intVal, err := tpgresource.StringToFixed64(strVal); err == nil { + return intVal + } + } + + // number values are represented as float64 + if floatVal, ok := v.(float64); ok { + intVal := int(floatVal) + return intVal + } + + return v // let terraform core handle it otherwise +} \ No newline at end of file diff --git a/mmv1/templates/tgc_next/decoders/backend_service.go.tmpl b/mmv1/templates/tgc_next/decoders/backend_service.go.tmpl new file mode 100644 index 000000000000..de931a039dea --- /dev/null +++ b/mmv1/templates/tgc_next/decoders/backend_service.go.tmpl @@ -0,0 +1,31 @@ +{{/* + The license inside this block applies to this file + Copyright 2025 Google Inc. + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ -}} + +if v, ok := res["backends"]; ok { + backends := v.([]interface{}) + for _, vBackend := range backends { + backend := vBackend.(map[string]interface{}) + if vCms, ok := backend["customMetrics"]; ok { + cms := vCms.([]interface{}) + for _, vCm := range cms { + cm := vCm.(map[string]interface{}) + if vMu, ok := cm["maxUtilization"]; ok { + mu := vMu.(float64) + cm["maxUtilization"] = fmt.Sprintf("%.1f", mu) + } + } + } + } +} + +return res, nil \ No newline at end of file diff --git a/mmv1/templates/tgc_next/tfplan2cai/expand_property_method_tgc.go.tmpl b/mmv1/templates/tgc_next/tfplan2cai/expand_property_method_tgc.go.tmpl new file mode 100644 index 000000000000..a2b200b52b17 --- /dev/null +++ b/mmv1/templates/tgc_next/tfplan2cai/expand_property_method_tgc.go.tmpl @@ -0,0 +1,25 @@ +{{/* The license inside this block applies to this file + Copyright 2025 Google LLC. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. */ -}} +{{- define "expandTgcPropertyMethod" }} + {{ if $.CustomTgcExpand }} +{{- $.CustomTemplate $.CustomTgcExpand false -}} + {{ else if and ($.IsA "Array") ($.ItemType.IsA "ResourceRef")}} +{{- template "expandArrayResourcerefWithValidation" $ -}} + {{ else if ($.IsA "ResourceRef") }} +{{- template "expandResourcerefWithValidation" $ -}} + {{ else }} +{{- template "expandPropertyMethod" $ -}} + {{ end }} +{{- end }} \ No newline at end of file diff --git a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl index f94588e6a36b..351ff01d054a 100644 --- a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl +++ b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl @@ -124,13 +124,5 @@ func resource{{ $.ResourceName -}}TgcEncoder(d tpgresource.TerraformResourceData {{- end}} {{ range $prop := $.SettableProperties }} - {{ if $prop.CustomTgcExpand }} - {{- $prop.CustomTemplate $prop.CustomTgcExpand false -}} - {{ else if and ($prop.IsA "Array") ($prop.ItemType.IsA "ResourceRef")}} - {{- template "expandArrayResourcerefWithValidation" $prop -}} - {{ else if ($prop.IsA "ResourceRef") }} - {{- template "expandResourcerefWithValidation" $prop -}} - {{ else }} - {{- template "expandPropertyMethod" $prop -}} - {{ end }} + {{- template "expandTgcPropertyMethod" $prop -}} {{- end}} \ No newline at end of file From 88ecca4890837c8b8f2d369593578706ed68b7f2 Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Tue, 8 Jul 2025 13:39:06 -0700 Subject: [PATCH 499/884] added tf-test prefix to dataproc tests (#14477) --- .../dataproc/resource_dataproc_job_test.go.tmpl | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_job_test.go.tmpl b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_job_test.go.tmpl index 9b6faf85a2d6..3617979e9f3c 100644 --- a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_job_test.go.tmpl +++ b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_job_test.go.tmpl @@ -48,7 +48,7 @@ func TestAccDataprocJob_updatable(t *testing.T) { var job dataproc.Job rnd := acctest.RandString(t, 10) - jobId := fmt.Sprintf("dproc-update-job-id-%s", rnd) + jobId := fmt.Sprintf("tf-test-dproc-update-job-id-%s", rnd) networkName := acctest.BootstrapSharedTestNetwork(t, "dataproc-cluster") subnetworkName := acctest.BootstrapSubnet(t, "dataproc-cluster", networkName) acctest.BootstrapFirewallForDataprocSharedNetwork(t, "dataproc-cluster", networkName) @@ -81,7 +81,7 @@ func TestAccDataprocJob_PySpark(t *testing.T) { var job dataproc.Job rnd := acctest.RandString(t, 10) - jobId := fmt.Sprintf("dproc-custom-job-id-%s", rnd) + jobId := fmt.Sprintf("tf-test-dproc-custom-job-id-%s", rnd) networkName := acctest.BootstrapSharedTestNetwork(t, "dataproc-cluster") subnetworkName := acctest.BootstrapSubnet(t, "dataproc-cluster", networkName) acctest.BootstrapFirewallForDataprocSharedNetwork(t, "dataproc-cluster", networkName) @@ -659,7 +659,7 @@ func matchError(attr, tf interface{}, gcp interface{}) string { var singleNodeClusterConfig = ` resource "google_dataproc_cluster" "basic" { - name = "dproc-job-test-%s" + name = "tf-test-dproc-job-%s" region = "us-central1" cluster_config { @@ -714,7 +714,7 @@ resource "google_dataproc_job" "pyspark" { cluster_name = google_dataproc_cluster.basic.name } reference { - job_id = "dproc-custom-job-id-%s" + job_id = "tf-test-dproc-custom-job-id-%s" } region = google_dataproc_cluster.basic.region @@ -883,7 +883,7 @@ resource "google_dataproc_job" "sparksql" { func testAccDataprocJob_presto(rnd, subnetworkName string) string { return fmt.Sprintf(` resource "google_dataproc_cluster" "basic" { - name = "dproc-job-test-%s" + name = "tf-test-dproc-job-%s" region = "us-central1" cluster_config { From b8e7ec3e77edddf83f4f51b75f240678469d7e73 Mon Sep 17 00:00:00 2001 From: Cezary Sobczak <57288981+Cezarus27@users.noreply.github.com> Date: Tue, 8 Jul 2025 23:24:30 +0200 Subject: [PATCH 500/884] Feature gap: Implement `aggregateReservation` and `enableEmergentMaintenance` (#14452) Signed-off-by: Cezary Sobczak --- mmv1/products/compute/FutureReservation.yaml | 61 +++++++++++++++++++ ..._reservation_aggregate_reservation.tf.tmpl | 28 +++++++++ 2 files changed, 89 insertions(+) create mode 100644 mmv1/templates/terraform/examples/future_reservation_aggregate_reservation.tf.tmpl diff --git a/mmv1/products/compute/FutureReservation.yaml b/mmv1/products/compute/FutureReservation.yaml index e05fe8eac7a6..c05e20f89e99 100644 --- a/mmv1/products/compute/FutureReservation.yaml +++ b/mmv1/products/compute/FutureReservation.yaml @@ -57,6 +57,14 @@ examples: project: 'PROJECT_NAME' org_id: 'ORG_ID' billing_account: 'BILLING_ACCT' + - name: 'future_reservation_aggregate_reservation' + primary_resource_id: 'gce_future_reservation' + vars: + future_reservation_name: 'gce-future-reservation-aggregate-reservation' + test_env_vars: + project: 'PROJECT_NAME' + org_id: 'ORG_ID' + billing_account: 'BILLING_ACCT' - name: 'shared_future_reservation' primary_resource_id: 'gce_future_reservation' vars: @@ -545,3 +553,56 @@ properties: type: Integer description: | Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive. + - name: 'aggregateReservation' + type: NestedObject + description: | + Aggregate reservation details for the future reservation. + immutable: true + properties: + - name: 'vmFamily' + type: Enum + description: | + The VM family that all instances scheduled against this reservation must belong to. + immutable: true + enum_values: + - 'VM_FAMILY_CLOUD_TPU_DEVICE_CT3' + - 'VM_FAMILY_CLOUD_TPU_LITE_DEVICE_CT5L' + - 'VM_FAMILY_CLOUD_TPU_LITE_POD_SLICE_CT5LP' + - 'VM_FAMILY_CLOUD_TPU_LITE_POD_SLICE_CT6E' + - 'VM_FAMILY_CLOUD_TPU_POD_SLICE_CT3P' + - 'VM_FAMILY_CLOUD_TPU_POD_SLICE_CT4P' + - 'VM_FAMILY_CLOUD_TPU_POD_SLICE_CT5P' + - name: reservedResources + type: Array + description: | + futureReservations.list of reserved resources (CPUs, memory, accelerators). + required: true + immutable: true + item_type: + type: NestedObject + properties: + - name: 'accelerator' + type: NestedObject + description: | + Properties of accelerator resources in this reservation. + immutable: true + properties: + - name: 'acceleratorCount' + type: Integer + description: | + Number of accelerators of specified type. + immutable: true + - name: 'acceleratorType' + type: String + description: | + Full or partial URL to accelerator type. e.g. "projects/{PROJECT}/zones/{ZONE}/acceleratorTypes/ct4l" + immutable: true + - name: 'workloadType' + type: Enum + description: | + The workload type of the instances that will target this reservation. + immutable: true + enum_values: + - 'BATCH' + - 'SERVING' + - 'UNSPECIFIED' diff --git a/mmv1/templates/terraform/examples/future_reservation_aggregate_reservation.tf.tmpl b/mmv1/templates/terraform/examples/future_reservation_aggregate_reservation.tf.tmpl new file mode 100644 index 000000000000..bbedca1323a2 --- /dev/null +++ b/mmv1/templates/terraform/examples/future_reservation_aggregate_reservation.tf.tmpl @@ -0,0 +1,28 @@ +resource "google_compute_future_reservation" "{{$.PrimaryResourceId}}" { + provider = google-beta + name = "{{index $.Vars "future_reservation_name"}}" + project = "{{index $.TestEnvVars "project"}}" + auto_delete_auto_created_reservations = true + planning_status = "DRAFT" + name_prefix = "fr-basic" + time_window { + start_time = "2025-11-01T00:00:00Z" + end_time = "2025-11-02T00:00:00Z" + } + aggregate_reservation { + vm_family = "VM_FAMILY_CLOUD_TPU_DEVICE_CT3" + workload_type = "UNSPECIFIED" + reserved_resources { + accelerator { + accelerator_count = 32 + accelerator_type = "projects/{{index $.TestEnvVars "project"}}/zones/us-central1-a/acceleratorTypes/ct3" + } + } + reserved_resources { + accelerator { + accelerator_count = 2 + accelerator_type = "projects/{{index $.TestEnvVars "project"}}/zones/us-central1-a/acceleratorTypes/ct3" + } + } + } +} From 1bb68aa01bfc0f515ac21a62d4691be8dc232823 Mon Sep 17 00:00:00 2001 From: Matheus Guilherme Souza Aleixo <82680416+matheusaleixo-cit@users.noreply.github.com> Date: Tue, 8 Jul 2025 20:37:08 -0300 Subject: [PATCH 501/884] Added support for SecurityPolicy requestBodyInspectionSize (#14434) --- .../resource_compute_security_policy.go.tmpl | 35 ++++++++++++++----- ...ource_compute_security_policy_test.go.tmpl | 35 +++++++++++++++++++ .../r/compute_security_policy.html.markdown | 2 ++ 3 files changed, 64 insertions(+), 8 deletions(-) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_security_policy.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_security_policy.go.tmpl index 4ef570415e62..a91284724b3f 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_security_policy.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_security_policy.go.tmpl @@ -535,6 +535,15 @@ func ResourceComputeSecurityPolicy() *schema.Resource { Description: `An optional list of case-insensitive request header names to use for resolving the callers client IP address.`, Elem: &schema.Schema{Type: schema.TypeString}, }, + {{- if ne $.TargetVersionName "ga" }} + "request_body_inspection_size": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ValidateFunc: validation.StringInSlice([]string{"8KB", "16KB", "32KB", "48KB", "64KB"}, false), + Description: `The maximum request size chosen by the customer with Waf enabled. Values supported are "8KB", "16KB, "32KB", "48KB" and "64KB". Values are case insensitive.`, + }, + {{- end }} }, }, }, @@ -889,7 +898,11 @@ func resourceComputeSecurityPolicyUpdate(d *schema.ResourceData, meta interface{ if d.HasChange("advanced_options_config") { securityPolicy.AdvancedOptionsConfig = expandSecurityPolicyAdvancedOptionsConfig(d.Get("advanced_options_config").([]interface{})) +{{ if eq $.TargetVersionName `ga` }} securityPolicy.ForceSendFields = append(securityPolicy.ForceSendFields, "AdvancedOptionsConfig", "advancedOptionsConfig.jsonParsing", "advancedOptionsConfig.jsonCustomConfig", "advancedOptionsConfig.logLevel") +{{- else }} + securityPolicy.ForceSendFields = append(securityPolicy.ForceSendFields, "AdvancedOptionsConfig", "advancedOptionsConfig.jsonParsing", "advancedOptionsConfig.jsonCustomConfig", "advancedOptionsConfig.logLevel", "advancedOptionsConfig.requestBodyInspectionSize") +{{- end }} securityPolicy.ForceSendFields = append(securityPolicy.ForceSendFields, "advanceOptionConfig.userIpRequestHeaders") if len(securityPolicy.AdvancedOptionsConfig.UserIpRequestHeaders) == 0 { // to clean this list we must send the updateMask of this field on the request. @@ -1352,10 +1365,13 @@ func expandSecurityPolicyAdvancedOptionsConfig(configured []interface{}) *comput data := configured[0].(map[string]interface{}) return &compute.SecurityPolicyAdvancedOptionsConfig{ - JsonParsing: data["json_parsing"].(string), - JsonCustomConfig: expandSecurityPolicyAdvancedOptionsConfigJsonCustomConfig(data["json_custom_config"].([]interface{})), - LogLevel: data["log_level"].(string), - UserIpRequestHeaders: tpgresource.ConvertStringArr(data["user_ip_request_headers"].(*schema.Set).List()), + JsonParsing: data["json_parsing"].(string), + JsonCustomConfig: expandSecurityPolicyAdvancedOptionsConfigJsonCustomConfig(data["json_custom_config"].([]interface{})), + LogLevel: data["log_level"].(string), + UserIpRequestHeaders: tpgresource.ConvertStringArr(data["user_ip_request_headers"].(*schema.Set).List()), + {{- if ne $.TargetVersionName "ga" }} + RequestBodyInspectionSize: data["request_body_inspection_size"].(string), + {{- end }} } } @@ -1365,10 +1381,13 @@ func flattenSecurityPolicyAdvancedOptionsConfig(conf *compute.SecurityPolicyAdva } data := map[string]interface{}{ - "json_parsing": conf.JsonParsing, - "json_custom_config": flattenSecurityPolicyAdvancedOptionsConfigJsonCustomConfig(conf.JsonCustomConfig), - "log_level": conf.LogLevel, - "user_ip_request_headers": schema.NewSet(schema.HashString, tpgresource.ConvertStringArrToInterface(conf.UserIpRequestHeaders)), + "json_parsing": conf.JsonParsing, + "json_custom_config": flattenSecurityPolicyAdvancedOptionsConfigJsonCustomConfig(conf.JsonCustomConfig), + "log_level": conf.LogLevel, + "user_ip_request_headers": schema.NewSet(schema.HashString, tpgresource.ConvertStringArrToInterface(conf.UserIpRequestHeaders)), + {{- if ne $.TargetVersionName "ga" }} + "request_body_inspection_size": conf.RequestBodyInspectionSize, + {{- end }} } return []map[string]interface{}{data} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_security_policy_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_security_policy_test.go.tmpl index bfe19233e0c2..59d7bdb90a4a 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_security_policy_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_security_policy_test.go.tmpl @@ -295,6 +295,17 @@ func TestAccComputeSecurityPolicy_withAdvancedOptionsConfig(t *testing.T) { ImportState: true, ImportStateVerify: true, }, + {{- if ne $.TargetVersionName "ga" }} + // Add request_body_inspection_size value + { + Config: testAccComputeSecurityPolicy_withAdvancedOptionsConfig_update4(spName), + }, + { + ResourceName: "google_compute_security_policy.policy", + ImportState: true, + ImportStateVerify: true, + }, + {{- end }} { Config: testAccComputeSecurityPolicy_basic(spName, "CLOUD_ARMOR"), }, @@ -1474,6 +1485,30 @@ resource "google_compute_security_policy" "policy" { `, spName) } +{{- if ne $.TargetVersionName "ga" }} +func testAccComputeSecurityPolicy_withAdvancedOptionsConfig_update4(spName string) string { + return fmt.Sprintf(` +resource "google_compute_security_policy" "policy" { + name = "%s" + description = "updated description changing json_parsing to STANDARD_WITH_GRAPHQL" + + advanced_options_config { + json_parsing = "STANDARD_WITH_GRAPHQL" + json_custom_config { + content_types = [ + "application/json", + "application/vnd.hyper+json" + ] + } + log_level = "NORMAL" + user_ip_request_headers = [] + request_body_inspection_size = "64KB" + } +} +`, spName) +} +{{- end }} + func testAccComputeSecurityPolicy_withoutAdaptiveProtection(spName string) string { return fmt.Sprintf(` resource "google_compute_security_policy" "policy" { diff --git a/mmv1/third_party/terraform/website/docs/r/compute_security_policy.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_security_policy.html.markdown index c1c4d8267d9d..ba850c6a7007 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_security_policy.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_security_policy.html.markdown @@ -207,6 +207,8 @@ The following arguments are supported: * `user_ip_request_headers` - (Optional) An optional list of case-insensitive request header names to use for resolving the callers client IP address. +* `request_body_inspection_size` - (Optional, [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html)) The maximum request size chosen by the customer with Waf enabled. Values supported are "8KB", "16KB, "32KB", "48KB" and "64KB". Values are case insensitive. + The `json_custom_config` block supports: * `content_types` - A list of custom Content-Type header values to apply the JSON parsing. The From eb8327944ac802282976a06c1996a53b9a2e444b Mon Sep 17 00:00:00 2001 From: Thomas Rodgers Date: Wed, 9 Jul 2025 08:02:54 -0700 Subject: [PATCH 502/884] tgc-revival: fix build errors from *AssetType constants (#14469) --- mmv1/api/product.go | 3 ++ mmv1/api/resource.go | 33 ++++++++++++++++++- .../tgc_next/services/resource.go.tmpl | 7 ++-- .../tfplan2cai/resource_converter.go.tmpl | 22 +++++++------ 4 files changed, 49 insertions(+), 16 deletions(-) diff --git a/mmv1/api/product.go b/mmv1/api/product.go index 10b93a67bf88..c373ed15856a 100644 --- a/mmv1/api/product.go +++ b/mmv1/api/product.go @@ -58,6 +58,9 @@ type Product struct { // base URL. Specific to defining the resource as a CAI asset. CaiBaseUrl string + // ApiResourceType of resources that already have an AssetType constant defined in the product. + ResourcesWithCaiAssetType map[string]struct{} + // A function reference designed for the rare case where you // need to use retries in operation calls. Used for the service api // as it enables itself (self referential) and can result in occasional diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index 66bb50c8c6d9..04fc40fab29a 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -1824,6 +1824,31 @@ func (r Resource) CaiProductBackendName(caiProductBaseUrl string) string { return strings.ToLower(backendUrl) } +// Returns the asset type for this resource. +func (r Resource) CaiAssetType() string { + baseURL := r.CaiProductBaseUrl() + productBackendName := r.CaiProductBackendName(baseURL) + assetName := r.Name + if r.ApiResourceTypeKind != "" { + assetName = r.ApiResourceTypeKind + } + return fmt.Sprintf("%s.googleapis.com/%s", productBackendName, assetName) +} + +// DefineAssetTypeForResourceInProduct marks the AssetType constant for this resource as defined. +// It returns true if this is the first time it's been called for this resource, +// and false otherwise, preventing duplicate definitions. +func (r Resource) DefineAssetTypeForResourceInProduct() bool { + if r.ProductMetadata.ResourcesWithCaiAssetType == nil { + r.ProductMetadata.ResourcesWithCaiAssetType = make(map[string]struct{}, 1) + } + if _, alreadyDefined := r.ProductMetadata.ResourcesWithCaiAssetType[r.ApiResourceType()]; alreadyDefined { + return false + } + r.ProductMetadata.ResourcesWithCaiAssetType[r.ApiResourceType()] = struct{}{} + return true +} + // Gets the Cai asset name template, which could include version // For example: //monitoring.googleapis.com/v3/projects/{{project}}/services/{{service_id}} func (r Resource) rawCaiAssetNameTemplate(productBackendName string) string { @@ -1984,7 +2009,13 @@ func (r Resource) MarkdownHeader(templatePath string) string { // ==================== // Lists fields that test.BidirectionalConversion should ignore func (r Resource) TGCTestIgnorePropertiesToStrings(e resource.Examples) []string { - var props []string + props := []string{ + "depends_on", + "count", + "for_each", + "provider", + "lifecycle", + } for _, tp := range r.VirtualFields { props = append(props, google.Underscore(tp.Name)) } diff --git a/mmv1/templates/tgc_next/services/resource.go.tmpl b/mmv1/templates/tgc_next/services/resource.go.tmpl index f43c29904a24..91ed7204d627 100644 --- a/mmv1/templates/tgc_next/services/resource.go.tmpl +++ b/mmv1/templates/tgc_next/services/resource.go.tmpl @@ -26,11 +26,8 @@ import ( "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/verify" ) -{{- $caiProductBaseUrl := $.CaiProductBaseUrl }} -{{- $productBackendName := $.CaiProductBackendName $caiProductBaseUrl }} - -{{- if not $.ApiResourceTypeKind }} -const {{ $.ApiResourceType -}}AssetType string = "{{ $productBackendName }}.googleapis.com/{{ $.Name -}}" +{{ if $.DefineAssetTypeForResourceInProduct -}} +const {{ $.ApiResourceType -}}AssetType string = "{{ $.CaiAssetType }}" {{- end }} const {{ $.ResourceName -}}SchemaName string = "{{ $.TerraformName }}" diff --git a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl index 351ff01d054a..05ed8ce32418 100644 --- a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl +++ b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl @@ -62,17 +62,19 @@ func Get{{ $.ResourceName -}}CaiAssets(d tpgresource.TerraformResourceData, conf if location == "" && strings.Contains(name, "/global/") { location = "global" } - return []caiasset.Asset{{"{{"}} - Name: name, - Type: {{ $.ApiResourceType -}}AssetType, - Resource: &caiasset.AssetResource{ - Version: "{{ $apiVersion }}", - DiscoveryDocumentURI: "https://www.googleapis.com/discovery/v1/apis/{{ $productBackendName }}/{{ $apiVersion }}/rest", - DiscoveryName: "{{ or $.ApiResourceTypeKind $.Name }}", - Data: obj, - Location: location, + return []caiasset.Asset{ + { + Name: name, + Type: {{ $.ApiResourceType -}}AssetType, + Resource: &caiasset.AssetResource{ + Version: "{{ $apiVersion }}", + DiscoveryDocumentURI: "https://www.googleapis.com/discovery/v1/apis/{{ $productBackendName }}/{{ $apiVersion }}/rest", + DiscoveryName: "{{ or $.ApiResourceTypeKind $.Name }}", + Data: obj, + Location: location, + }, }, - {{"}}"}}, nil + }, nil } else { return []caiasset.Asset{}, err } From b025ee356cd08711b75ce18ae386c4f0ad986032 Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Wed, 9 Jul 2025 17:04:14 +0200 Subject: [PATCH 503/884] compute: add support for `provisioned_iops` and `provisioned_throughput` in `google_compute_region_disk` (#14318) --- mmv1/products/compute/RegionDisk.yaml | 13 +++ .../resource_compute_region_disk_test.go.tmpl | 106 ++++++++++++++++++ 2 files changed, 119 insertions(+) diff --git a/mmv1/products/compute/RegionDisk.yaml b/mmv1/products/compute/RegionDisk.yaml index 1955f37b3e09..e4a6f499c1de 100644 --- a/mmv1/products/compute/RegionDisk.yaml +++ b/mmv1/products/compute/RegionDisk.yaml @@ -395,6 +395,19 @@ properties: default_from_api: true update_url: 'projects/{{project}}/regions/{{region}}/disks/{{name}}?paths=accessMode' update_verb: 'PATCH' + - name: 'provisionedIops' + type: Integer + description: | + Indicates how many IOPS to provision for the disk. This sets the number of I/O operations per second + that the disk can handle. Values must be between 10,000 and 120,000. + For more details, see the Extreme persistent disk [documentation](https://cloud.google.com/compute/docs/disks/extreme-persistent-disk). + default_from_api: true + - name: 'provisionedThroughput' + type: Integer + description: | + Indicates how much throughput to provision for the disk. This sets the number of throughput + mb per second that the disk can handle. Values must be greater than or equal to 1. + default_from_api: true virtual_fields: - name: 'create_snapshot_before_destroy' type: Boolean diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_disk_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_disk_test.go.tmpl index 159c6091fe2a..8f3d5d54d835 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_disk_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_disk_test.go.tmpl @@ -62,6 +62,50 @@ func TestAccComputeRegionDisk_basic(t *testing.T) { }) } +func TestAccComputeRegionDisk_hyperdisk(t *testing.T) { + t.Parallel() + + diskName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + var disk compute.Disk + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeRegionDiskDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeRegionDisk_hyperdisk(diskName, "self_link"), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeRegionDiskExists( + t, "google_compute_region_disk.regiondisk", &disk), + ), + }, + { + ResourceName: "google_compute_region_disk.regiondisk", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testAccComputeRegionDisk_hyperdiskUpdated(diskName, "name"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_compute_region_disk.regiondisk", "access_mode", "READ_WRITE_SINGLE"), + resource.TestCheckResourceAttr("google_compute_region_disk.regiondisk", "provisioned_iops", "20000"), + resource.TestCheckResourceAttr("google_compute_region_disk.regiondisk", "provisioned_throughput", "250"), + testAccCheckComputeRegionDiskExists(t, "google_compute_region_disk.regiondisk", &disk), + ), + }, + { + ResourceName: "google_compute_region_disk.regiondisk", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + }, + }) +} + func TestAccComputeRegionDisk_basicUpdate(t *testing.T) { t.Parallel() @@ -399,6 +443,68 @@ func testAccCheckComputeRegionDiskInstances(n string, disk *compute.Disk) resour } } +func testAccComputeRegionDisk_hyperdisk(diskName, refSelector string) string { + return fmt.Sprintf(` +resource "google_compute_disk" "disk" { + name = "%s" + image = "debian-cloud/debian-11" + size = 50 + type = "pd-ssd" + zone = "us-central1-a" +} + +resource "google_compute_snapshot" "snapdisk" { + name = "%s" + source_disk = google_compute_disk.disk.name + zone = "us-central1-a" +} + +resource "google_compute_region_disk" "regiondisk" { + name = "%s" + snapshot = google_compute_snapshot.snapdisk.%s + type = "hyperdisk-balanced-high-availability" + size = 50 + replica_zones = ["us-central1-a", "us-central1-f"] + + access_mode = "READ_WRITE_MANY" + provisioned_iops = 10000 + provisioned_throughput = 190 +} +`, diskName, diskName, diskName, refSelector) +} + +func testAccComputeRegionDisk_hyperdiskUpdated(diskName, refSelector string) string { + return fmt.Sprintf(` +resource "google_compute_disk" "disk" { + name = "%s" + image = "debian-cloud/debian-11" + size = 50 + type = "pd-ssd" + zone = "us-central1-a" +} + +resource "google_compute_snapshot" "snapdisk" { + name = "%s" + source_disk = google_compute_disk.disk.name + zone = "us-central1-a" +} + +resource "google_compute_region_disk" "regiondisk" { + name = "%s" + snapshot = google_compute_snapshot.snapdisk.%s + type = "hyperdisk-balanced-high-availability" + region = "us-central1" + + replica_zones = ["us-central1-a", "us-central1-f"] + + size = 100 + access_mode = "READ_WRITE_SINGLE" + provisioned_iops = 20000 + provisioned_throughput = 250 +} +`, diskName, diskName, diskName, refSelector) +} + func testAccComputeRegionDisk_basic(diskName, refSelector string) string { return fmt.Sprintf(` resource "google_compute_disk" "disk" { From c3dc0cbf59ba4e584cb6f16fee61fa0c1bcd997a Mon Sep 17 00:00:00 2001 From: Niharika <35183015+niharika-98@users.noreply.github.com> Date: Wed, 9 Jul 2025 21:52:49 +0530 Subject: [PATCH 504/884] Adding Service_config resource to Backupdr (#14471) --- mmv1/products/backupdr/ServiceConfig.yaml | 59 +++++++++++++++++++ .../examples/backup_dr_service_config.tf.tmpl | 4 ++ 2 files changed, 63 insertions(+) create mode 100644 mmv1/products/backupdr/ServiceConfig.yaml create mode 100644 mmv1/templates/terraform/examples/backup_dr_service_config.tf.tmpl diff --git a/mmv1/products/backupdr/ServiceConfig.yaml b/mmv1/products/backupdr/ServiceConfig.yaml new file mode 100644 index 000000000000..e641cd27ac2d --- /dev/null +++ b/mmv1/products/backupdr/ServiceConfig.yaml @@ -0,0 +1,59 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'ServiceConfig' +description: | + Initializes a Project-level default Backupdr config. It creates default Backupvault and default Backup Plan in same project for customers to protect instances. +references: + guides: + api: 'https://cloud.google.com/backup-disaster-recovery/docs/reference/rest/v1/projects.locations.serviceConfig' + +base_url: 'projects/{{project}}/locations/{{location}}/serviceConfig' + +create_url: 'projects/{{project}}/locations/{{location}}/serviceConfig:initialize' + +exclude_read: true +exclude_delete: true +exclude_import: true +exclude_sweeper: true + +immutable: true + +async: + actions: ['create'] + operation: + base_url: '{{op_id}}' + +examples: + - name: "backup_dr_service_config" + primary_resource_id: "my-service-config" + exclude_import_test: true + vars: + resource_type: 'compute.googleapis.com/Instance' + test_env_vars: + project: 'PROJECT_NAME' + +parameters: + - name: 'location' + type: String + required: true + url_param_only: true + description: | + The location in which the Service config is to be initialized. + +properties: + - name: 'resourceType' + type: String + required: true + description: The resource type to which the default service config will be applied. diff --git a/mmv1/templates/terraform/examples/backup_dr_service_config.tf.tmpl b/mmv1/templates/terraform/examples/backup_dr_service_config.tf.tmpl new file mode 100644 index 000000000000..076400311210 --- /dev/null +++ b/mmv1/templates/terraform/examples/backup_dr_service_config.tf.tmpl @@ -0,0 +1,4 @@ +resource "google_backup_dr_service_config" "bpa1" { + location = "us-central1" + resource_type= "compute.googleapis.com/Instance" +} \ No newline at end of file From 9d62929357b807ddfb77b0913ec861a49fa7b1b1 Mon Sep 17 00:00:00 2001 From: Mark van Holsteijn Date: Wed, 9 Jul 2025 18:38:42 +0200 Subject: [PATCH 505/884] feat: Add a Firestore Document data source (#14466) --- .../provider/provider_mmv1_resources.go.tmpl | 1 + .../data_source_google_firestore_document.go | 51 ++++++++++ ...a_source_google_firestore_document_test.go | 98 +++++++++++++++++++ .../docs/d/firestore_document.html.markdown | 43 ++++++++ 4 files changed, 193 insertions(+) create mode 100644 mmv1/third_party/terraform/services/firestore/data_source_google_firestore_document.go create mode 100644 mmv1/third_party/terraform/services/firestore/data_source_google_firestore_document_test.go create mode 100644 mmv1/third_party/terraform/website/docs/d/firestore_document.html.markdown diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index 859406a75bdf..a288f6ff57aa 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -166,6 +166,7 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_firebase_hosting_channel": firebasehosting.DataSourceGoogleFirebaseHostingChannel(), "google_firebase_web_app": firebase.DataSourceGoogleFirebaseWebApp(), {{- end }} + "google_firestore_document": firestore.DataSourceGoogleFirestoreDocument(), "google_folder": resourcemanager.DataSourceGoogleFolder(), "google_folders": resourcemanager.DataSourceGoogleFolders(), "google_folder_organization_policy": resourcemanager.DataSourceGoogleFolderOrganizationPolicy(), diff --git a/mmv1/third_party/terraform/services/firestore/data_source_google_firestore_document.go b/mmv1/third_party/terraform/services/firestore/data_source_google_firestore_document.go new file mode 100644 index 000000000000..6799e0b41cba --- /dev/null +++ b/mmv1/third_party/terraform/services/firestore/data_source_google_firestore_document.go @@ -0,0 +1,51 @@ +package firestore + +import ( + "fmt" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func DataSourceGoogleFirestoreDocument() *schema.Resource { + dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceFirestoreDocument().Schema) + tpgresource.AddRequiredFieldsToSchema(dsSchema, "collection") + tpgresource.AddRequiredFieldsToSchema(dsSchema, "document_id") + tpgresource.AddRequiredFieldsToSchema(dsSchema, "database") + tpgresource.AddOptionalFieldsToSchema(dsSchema, "project") + + return &schema.Resource{ + Read: DataSourceGoogleFirestoreDocumentRead, + Schema: dsSchema, + } +} + +func DataSourceGoogleFirestoreDocumentRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + + collection := d.Get("collection").(string) + document_id := d.Get("document_id").(string) + database := d.Get("database").(string) + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return fmt.Errorf("Error fetching project: %s", err) + } + + name := fmt.Sprintf("projects/%s/databases/%s/documents/%s/%s", project, database, collection, document_id) + d.SetId(name) + if err = d.Set("name", name); err != nil { + return err + } + err = resourceFirestoreDocumentRead(d, meta) + if err != nil { + return err + } + + if d.Id() == "" { + return fmt.Errorf("%s not found", name) + } + + return nil +} diff --git a/mmv1/third_party/terraform/services/firestore/data_source_google_firestore_document_test.go b/mmv1/third_party/terraform/services/firestore/data_source_google_firestore_document_test.go new file mode 100644 index 000000000000..5c7d40638477 --- /dev/null +++ b/mmv1/third_party/terraform/services/firestore/data_source_google_firestore_document_test.go @@ -0,0 +1,98 @@ +package firestore_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccDatasourceFirestoreDocument_simple(t *testing.T) { + t.Parallel() + + orgId := envvar.GetTestOrgFromEnv(t) + randomSuffix := acctest.RandString(t, 10) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: testAccDatasourceFirestoreDocument_simple(randomSuffix, orgId, "doc-id-1", "val1"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("data.google_firestore_document.instance", "fields", + "{\"something\":{\"mapValue\":{\"fields\":{\"yo\":{\"stringValue\":\"val1\"}}}}}"), + resource.TestCheckResourceAttr("data.google_firestore_document.instance", + "id", fmt.Sprintf("projects/tf-test-%s/databases/(default)/documents/somenewcollection/doc-id-1", randomSuffix)), + resource.TestCheckResourceAttr("data.google_firestore_document.instance", + "name", fmt.Sprintf("projects/tf-test-%s/databases/(default)/documents/somenewcollection/doc-id-1", randomSuffix)), + resource.TestCheckResourceAttr("data.google_firestore_document.instance", + "collection", "somenewcollection"), + resource.TestCheckResourceAttr("data.google_firestore_document.instance", + "database", "(default)"), + resource.TestCheckResourceAttrSet("data.google_firestore_document.instance", "path"), + resource.TestCheckResourceAttrSet("data.google_firestore_document.instance", "create_time"), + resource.TestCheckResourceAttrSet("data.google_firestore_document.instance", "update_time"), + ), + }, + }, + }) +} + +func testAccDatasourceFirestoreDocument_simple_basicDeps(randomSuffix, orgId string) string { + return fmt.Sprintf(` +resource "google_project" "project" { + project_id = "tf-test-%s" + name = "tf-test-%s" + org_id = "%s" + deletion_policy = "DELETE" +} + +resource "time_sleep" "wait_60_seconds" { + depends_on = [google_project.project] + + create_duration = "60s" +} + +resource "google_project_service" "firestore" { + project = google_project.project.project_id + service = "firestore.googleapis.com" + + # Needed for CI tests for permissions to propagate, should not be needed for actual usage + depends_on = [time_sleep.wait_60_seconds] +} + +resource "google_firestore_database" "database" { + project = google_project.project.project_id + name = "(default)" + location_id = "nam5" + type = "FIRESTORE_NATIVE" + + depends_on = [google_project_service.firestore] +} +`, randomSuffix, randomSuffix, orgId) +} + +func testAccDatasourceFirestoreDocument_simple(randomSuffix, orgId, name, val string) string { + return testAccDatasourceFirestoreDocument_simple_basicDeps(randomSuffix, orgId) + fmt.Sprintf(` +resource "google_firestore_document" "instance" { + project = google_project.project.project_id + database = google_firestore_database.database.name + collection = "somenewcollection" + document_id = "%s" + fields = "{\"something\":{\"mapValue\":{\"fields\":{\"yo\":{\"stringValue\":\"%s\"}}}}}" +} + +data "google_firestore_document" "instance" { + project = google_firestore_document.instance.project + database = google_firestore_document.instance.database + collection = google_firestore_document.instance.collection + document_id = google_firestore_document.instance.document_id +} +`, name, val) +} diff --git a/mmv1/third_party/terraform/website/docs/d/firestore_document.html.markdown b/mmv1/third_party/terraform/website/docs/d/firestore_document.html.markdown new file mode 100644 index 000000000000..7d0237e8f3f6 --- /dev/null +++ b/mmv1/third_party/terraform/website/docs/d/firestore_document.html.markdown @@ -0,0 +1,43 @@ +--- +subcategory: "Firestore" +description: |- + Read a document from a Firestore database +--- + + +# google_firestore_document + +Reads a document from a Firestore database. +See [the official documentation](https://cloud.google.com/firestore/native/docs/) +and +[API](https://cloud.google.com/firestore/docs/reference/rest/v1/projects.databases.documents/get/). + + +## Example Usage + +Retrieve a document from the Firestore database. + +```hcl +resource "google_firestore_document" "mydoc" { + project = google_firestore_database.database.project + database = google_firestore_database.database.name + collection = "somenewcollection" + document_id = "my-doc-id" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `database` - (Required) The name of the Firestore database. + +* `collection` - (Required) The name of the collection of documents. + +* `document_id` - (Required) The id of the document to get. + +* `project` - (Optional) The project in which the database resides. + +## Attributes Reference + +See [google_firestore_document](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/google_firestore_document) resource for details of the available attributes. From 8c29e2f4ad8a467a953d7ee93a8d430c2a2cbf3c Mon Sep 17 00:00:00 2001 From: veraz0818 Date: Wed, 9 Jul 2025 09:58:04 -0700 Subject: [PATCH 506/884] promote admin cluster resource to GA (#14448) --- mmv1/products/gkeonprem/VmwareAdminCluster.yaml | 4 ---- .../examples/gkeonprem_vmware_admin_cluster_basic.tf.tmpl | 1 - .../examples/gkeonprem_vmware_admin_cluster_full.tf.tmpl | 1 - .../examples/gkeonprem_vmware_admin_cluster_metallb.tf.tmpl | 1 - 4 files changed, 7 deletions(-) diff --git a/mmv1/products/gkeonprem/VmwareAdminCluster.yaml b/mmv1/products/gkeonprem/VmwareAdminCluster.yaml index a7e7c45413f2..9bc1ed558dcc 100644 --- a/mmv1/products/gkeonprem/VmwareAdminCluster.yaml +++ b/mmv1/products/gkeonprem/VmwareAdminCluster.yaml @@ -16,7 +16,6 @@ name: "VmwareAdminCluster" description: "A Google VMware Admin Cluster." references: api: 'https://cloud.google.com/kubernetes-engine/distributed-cloud/reference/on-prem-api/rest/v1/projects.locations.vmwareAdminClusters' -min_version: beta base_url: "projects/{{project}}/locations/{{location}}/vmwareAdminClusters" create_url: "projects/{{project}}/locations/{{location}}/vmwareAdminClusters?vmware_admin_cluster_id={{name}}" update_url: "projects/{{project}}/locations/{{location}}/vmwareAdminClusters/{{name}}" @@ -42,21 +41,18 @@ taint_resource_on_failed_create: true examples: - name: "gkeonprem_vmware_admin_cluster_basic" primary_resource_id: "admin-cluster-basic" - min_version: beta vars: name: "basic" test_env_vars: project: "fake-backend-360322" - name: 'gkeonprem_vmware_admin_cluster_full' primary_resource_id: 'admin-cluster-full' - min_version: beta vars: name: 'full' test_env_vars: project: 'fake-backend-360322' - name: 'gkeonprem_vmware_admin_cluster_metallb' primary_resource_id: 'admin-cluster-metallb' - min_version: beta vars: name: 'metallb' test_env_vars: diff --git a/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_basic.tf.tmpl b/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_basic.tf.tmpl index c29c374ecb83..1d7cba073c7d 100644 --- a/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_basic.tf.tmpl @@ -1,5 +1,4 @@ resource "google_gkeonprem_vmware_admin_cluster" "{{$.PrimaryResourceId}}" { - provider = google-beta name = "{{index $.Vars "name"}}" location = "us-west1" description = "test admin cluster" diff --git a/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_full.tf.tmpl b/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_full.tf.tmpl index cbf9945bd06f..e038d9b00553 100644 --- a/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_full.tf.tmpl @@ -1,5 +1,4 @@ resource "google_gkeonprem_vmware_admin_cluster" "{{$.PrimaryResourceId}}" { - provider = google-beta name = "{{index $.Vars "name"}}" location = "us-west1" description = "test admin cluster" diff --git a/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_metallb.tf.tmpl b/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_metallb.tf.tmpl index b8f499ec7ec8..aedef5e78f71 100644 --- a/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_metallb.tf.tmpl +++ b/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_metallb.tf.tmpl @@ -1,5 +1,4 @@ resource "google_gkeonprem_vmware_admin_cluster" "{{$.PrimaryResourceId}}" { - provider = google-beta name = "{{index $.Vars "name"}}" location = "us-west1" description = "test admin cluster" From d1ae76c4f13d91de8c85b4812918992cedcf62ab Mon Sep 17 00:00:00 2001 From: porky256 <61063240+porky256@users.noreply.github.com> Date: Wed, 9 Jul 2025 20:22:04 +0200 Subject: [PATCH 507/884] Promote network_security.backend_authentication_config to GA (#14213) --- .../certificatemanager/Certificate.yaml | 2 +- mmv1/products/compute/BackendService.yaml | 2 -- .../BackendAuthenticationConfig.yaml | 2 -- .../backend_service_tls_settings.tf.tmpl | 3 --- ..._backend_authentication_config_basic.tf.tmpl | 1 - ...y_backend_authentication_config_full.tf.tmpl | 3 --- ...urity_backend_authentication_config_test.go} | 17 +++-------------- 7 files changed, 4 insertions(+), 26 deletions(-) rename mmv1/third_party/terraform/services/networksecurity/{resource_network_security_backend_authentication_config_test.go.tmpl => resource_network_security_backend_authentication_config_test.go} (92%) diff --git a/mmv1/products/certificatemanager/Certificate.yaml b/mmv1/products/certificatemanager/Certificate.yaml index 2ba2854f40c3..b10f1dabef22 100644 --- a/mmv1/products/certificatemanager/Certificate.yaml +++ b/mmv1/products/certificatemanager/Certificate.yaml @@ -50,7 +50,7 @@ sweeper: - region: "us-west1" dependencies: - "google_compute_region_target_https_proxy" - # - "google_network_security_backend_authentication_config" (beta only) + - "google_network_security_backend_authentication_config" - "google_network_services_gateway" - "google_compute_target_https_proxy" - "google_compute_network" diff --git a/mmv1/products/compute/BackendService.yaml b/mmv1/products/compute/BackendService.yaml index af98a78f7547..9ffafa2ee70c 100644 --- a/mmv1/products/compute/BackendService.yaml +++ b/mmv1/products/compute/BackendService.yaml @@ -140,7 +140,6 @@ examples: network_name: 'network' - name: 'backend_service_tls_settings' primary_resource_id: 'default' - min_version: 'beta' vars: backend_service_name: 'backend-service' health_check_name: 'health-check' @@ -1584,7 +1583,6 @@ properties: type: NestedObject description: | Configuration for Backend Authenticated TLS and mTLS. May only be specified when the backend protocol is SSL, HTTPS or HTTP2. - min_version: beta properties: - name: 'sni' type: String diff --git a/mmv1/products/networksecurity/BackendAuthenticationConfig.yaml b/mmv1/products/networksecurity/BackendAuthenticationConfig.yaml index c398f039c558..27bc817ba6f3 100644 --- a/mmv1/products/networksecurity/BackendAuthenticationConfig.yaml +++ b/mmv1/products/networksecurity/BackendAuthenticationConfig.yaml @@ -24,7 +24,6 @@ create_url: 'projects/{{project}}/locations/{{location}}/backendAuthenticationCo update_verb: 'PATCH' update_mask: true autogen_async: true -min_version: 'beta' async: actions: ['create', 'delete', 'update'] type: 'OpAsync' @@ -46,7 +45,6 @@ examples: trust_config_name: 'my-trust-config' - name: 'backend_service_tls_settings' primary_resource_id: 'default' - min_version: 'beta' vars: backend_service_name: 'backend-service' health_check_name: 'health-check' diff --git a/mmv1/templates/terraform/examples/backend_service_tls_settings.tf.tmpl b/mmv1/templates/terraform/examples/backend_service_tls_settings.tf.tmpl index 2fc850f1e11b..f9fd2e28ce8b 100644 --- a/mmv1/templates/terraform/examples/backend_service_tls_settings.tf.tmpl +++ b/mmv1/templates/terraform/examples/backend_service_tls_settings.tf.tmpl @@ -1,5 +1,4 @@ resource "google_compute_backend_service" "{{$.PrimaryResourceId}}" { - provider = google-beta name = "{{index $.Vars "backend_service_name"}}" health_checks = [google_compute_health_check.default.id] load_balancing_scheme = "EXTERNAL_MANAGED" @@ -17,7 +16,6 @@ resource "google_compute_backend_service" "{{$.PrimaryResourceId}}" { } resource "google_compute_health_check" "default" { - provider = google-beta name = "{{index $.Vars "health_check_name"}}" http_health_check { port = 80 @@ -25,7 +23,6 @@ resource "google_compute_health_check" "default" { } resource "google_network_security_backend_authentication_config" "default" { - provider = google-beta name = "{{index $.Vars "authentication_name"}}" well_known_roots = "PUBLIC_ROOTS" } \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/network_security_backend_authentication_config_basic.tf.tmpl b/mmv1/templates/terraform/examples/network_security_backend_authentication_config_basic.tf.tmpl index fa24aefb84fc..81cc4daf64ad 100644 --- a/mmv1/templates/terraform/examples/network_security_backend_authentication_config_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/network_security_backend_authentication_config_basic.tf.tmpl @@ -1,5 +1,4 @@ resource "google_network_security_backend_authentication_config" "{{$.PrimaryResourceId}}" { - provider = google-beta name = "{{index $.Vars "resource_name"}}" labels = { foo = "bar" diff --git a/mmv1/templates/terraform/examples/network_security_backend_authentication_config_full.tf.tmpl b/mmv1/templates/terraform/examples/network_security_backend_authentication_config_full.tf.tmpl index e39905e9345d..7e9cbb321dd4 100644 --- a/mmv1/templates/terraform/examples/network_security_backend_authentication_config_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/network_security_backend_authentication_config_full.tf.tmpl @@ -1,5 +1,4 @@ resource "google_certificate_manager_certificate" "certificate" { - provider = google-beta name = "{{index $.Vars "certificate_name"}}" labels = { foo = "bar" @@ -13,7 +12,6 @@ resource "google_certificate_manager_certificate" "certificate" { } resource "google_certificate_manager_trust_config" "trust_config" { - provider = google-beta name = "{{index $.Vars "trust_config_name"}}" description = "sample description for the trust config" location = "global" @@ -33,7 +31,6 @@ resource "google_certificate_manager_trust_config" "trust_config" { } resource "google_network_security_backend_authentication_config" "default" { - provider = google-beta name = "{{index $.Vars "resource_name"}}" labels = { bar = "foo" diff --git a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_backend_authentication_config_test.go.tmpl b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_backend_authentication_config_test.go similarity index 92% rename from mmv1/third_party/terraform/services/networksecurity/resource_network_security_backend_authentication_config_test.go.tmpl rename to mmv1/third_party/terraform/services/networksecurity/resource_network_security_backend_authentication_config_test.go index 1c0a4594fb53..4c79cbc74ce7 100644 --- a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_backend_authentication_config_test.go.tmpl +++ b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_backend_authentication_config_test.go @@ -1,11 +1,11 @@ package networksecurity_test -{{- if ne $.TargetVersionName "ga" }} + import ( "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/plancheck" - + "github.com/hashicorp/terraform-provider-google/google/acctest" ) @@ -18,7 +18,7 @@ func TestAccNetworkSecurityBackendAuthenticationConfig_networkSecurityBackendAut acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { Config: testAccNetworkSecurityBackendAuthenticationConfig_networkSecurityBackendAuthenticationConfigFullExample_full(context), @@ -50,7 +50,6 @@ func TestAccNetworkSecurityBackendAuthenticationConfig_networkSecurityBackendAut func testAccNetworkSecurityBackendAuthenticationConfig_networkSecurityBackendAuthenticationConfigFullExample_full(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_certificate_manager_certificate" "certificate" { - provider = google-beta name = "tf-test-my-certificate%{random_suffix}" location = "global" self_managed { @@ -61,7 +60,6 @@ resource "google_certificate_manager_certificate" "certificate" { } resource "google_certificate_manager_trust_config" "trust_config" { - provider = google-beta name = "tf-test-my-trust-config%{random_suffix}" description = "sample description for the trust config" location = "global" @@ -77,7 +75,6 @@ resource "google_certificate_manager_trust_config" "trust_config" { } resource "google_network_security_backend_authentication_config" "default" { - provider = google-beta name = "tf-test-my-backend-authentication-config%{random_suffix}" location = "global" description = "my description" @@ -90,11 +87,7 @@ resource "google_network_security_backend_authentication_config" "default" { func testAccNetworkSecurityBackendAuthenticationConfig_networkSecurityBackendAuthenticationConfigFullExample_update(context map[string]interface{}) string { return acctest.Nprintf(` -data "google_project" "project" { - provider = google-beta -} resource "google_certificate_manager_certificate" "certificate" { - provider = google-beta name = "tf-test-my-certificate%{random_suffix}" location = "global" self_managed { @@ -105,7 +98,6 @@ resource "google_certificate_manager_certificate" "certificate" { } resource "google_certificate_manager_trust_config" "trust_config" { - provider = google-beta name = "tf-test-my-trust-config%{random_suffix}" description = "sample description for the trust config" location = "global" @@ -121,7 +113,6 @@ resource "google_certificate_manager_trust_config" "trust_config" { } resource "google_network_security_backend_authentication_config" "default" { - provider = google-beta name = "tf-test-my-backend-authentication-config%{random_suffix}" location = "global" description = "updated description" @@ -131,5 +122,3 @@ resource "google_network_security_backend_authentication_config" "default" { } `, context) } - -{{ end }} \ No newline at end of file From 2237d747b6273e287e99fae83838aecf0bbc208c Mon Sep 17 00:00:00 2001 From: Jared Date: Wed, 9 Jul 2025 11:29:33 -0700 Subject: [PATCH 508/884] artifactregistry: fix invalid cleanup policy in plan (#14451) --- .../artifact_registry_repository.go.tmpl | 70 ++++++++++++++----- ..._artifact_registry_repository_test.go.tmpl | 68 ++++++++++++++++++ 2 files changed, 119 insertions(+), 19 deletions(-) diff --git a/mmv1/templates/terraform/constants/artifact_registry_repository.go.tmpl b/mmv1/templates/terraform/constants/artifact_registry_repository.go.tmpl index d4e0e3d0a716..cd0c2dfc157e 100644 --- a/mmv1/templates/terraform/constants/artifact_registry_repository.go.tmpl +++ b/mmv1/templates/terraform/constants/artifact_registry_repository.go.tmpl @@ -68,23 +68,11 @@ func parseDurationAsSeconds(v string) (int, bool) { // Like tpgresource.DurationDiffSuppress, but supports 'd' func durationDiffSuppress(k, oldr, newr string, d *schema.ResourceData) bool { - o, n := d.GetChange(k) - old, ok := o.(string) + oldSeconds, ok := parseDurationAsSeconds(oldr) if !ok { return false } - new, ok := n.(string) - if !ok { - return false - } - if old == new { - return true - } - oldSeconds, ok := parseDurationAsSeconds(old) - if !ok { - return false - } - newSeconds, ok := parseDurationAsSeconds(new) + newSeconds, ok := parseDurationAsSeconds(newr) if !ok { return false } @@ -92,15 +80,59 @@ func durationDiffSuppress(k, oldr, newr string, d *schema.ResourceData) bool { } func mapHashID(v any) int { - obj, ok := v.(map[string]any) + replaceNestedValue(v, []string{"condition", "older_than"}, expandDuration) + replaceNestedValue(v, []string{"condition", "newer_than"}, expandDuration) + return schema.HashString(fmt.Sprintf("%v", v)) +} + +func expandDuration(v any) (any, bool) { + if val, ok := v.(string); ok { + if secs, ok := parseDurationAsSeconds(val); ok { + return fmt.Sprintf("%ds", secs), true + } + } + return nil, false + +} + +// Replace a value in a schema object, if it exists. +// Nested maps follow the pattern map[string]any -> [1]any -> map[string]any +func replaceNestedValue(obj any, keys []string, replaceFunc func(any) (any, bool)) { + if len(keys) == 0 { + return + } + next := obj + for _, key := range keys[:len(keys)-1] { + nextMap, ok := next.(map[string]any) + if !ok { + return + } + arrObj, ok := nextMap[key] + if !ok { + return + } + arr, ok := arrObj.([]any) + if !ok { + return + } + if len(arr) != 1 { + return + } + next = arr[0] + } + lastMap, ok := next.(map[string]any) if !ok { - return 0 + return } - s, ok := obj["id"].(string) + lastKey := keys[len(keys)-1] + last, ok := lastMap[lastKey] if !ok { - return 0 + return + } + result, ok := replaceFunc(last) + if ok { + lastMap[lastKey] = result } - return schema.HashString(s) } func isDefaultEnum(val any) bool { diff --git a/mmv1/third_party/terraform/services/artifactregistry/resource_artifact_registry_repository_test.go.tmpl b/mmv1/third_party/terraform/services/artifactregistry/resource_artifact_registry_repository_test.go.tmpl index 8da32e1976f7..b6a73470e45a 100644 --- a/mmv1/third_party/terraform/services/artifactregistry/resource_artifact_registry_repository_test.go.tmpl +++ b/mmv1/third_party/terraform/services/artifactregistry/resource_artifact_registry_repository_test.go.tmpl @@ -138,6 +138,74 @@ func TestAccArtifactRegistryRepository_kfp(t *testing.T) { }) } +func TestAccArtifactRegistryRepository_cleanup(t *testing.T) { + t.Parallel() + + repositoryID := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckArtifactRegistryRepositoryDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccArtifactRegistryRepository_cleanup(repositoryID), + }, + { + ResourceName: "google_artifact_registry_repository.test", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccArtifactRegistryRepository_cleanup2(repositoryID), + PlanOnly: true, + ExpectNonEmptyPlan: true, + }, + { + Config: testAccArtifactRegistryRepository_cleanup2(repositoryID), + }, + }, + }) +} + +func testAccArtifactRegistryRepository_cleanup(repositoryID string)string { + return fmt.Sprintf(` +resource "google_artifact_registry_repository" "test" { + repository_id = "%s" + location = "us-central1" + description = "cleanup with non-second time" + format = "DOCKER" + + cleanup_policies { + id = "delete" + action = "DELETE" + condition { + older_than = "7d" + } + } +} +`, repositoryID) +} + +func testAccArtifactRegistryRepository_cleanup2(repositoryID string)string { + return fmt.Sprintf(` +resource "google_artifact_registry_repository" "test" { + repository_id = "%s" + location = "us-central1" + description = "cleanup with non-second time" + format = "DOCKER" + + cleanup_policies { + id = "delete" + action = "DELETE" + condition { + older_than = "10d" + } + } +} +`, repositoryID) +} + func testAccArtifactRegistryRepository_update(repositoryID string) string { return fmt.Sprintf(` resource "google_artifact_registry_repository" "test" { From 2fa2e22b0d4b5f21d199ff6696d6d5c0d32b1550 Mon Sep 17 00:00:00 2001 From: Eric Pang Date: Wed, 9 Jul 2025 15:00:59 -0400 Subject: [PATCH 509/884] Add SecureSourceManager UpdateRepository (#14339) --- .../securesourcemanager/Repository.yaml | 13 ++- ...e_source_manager_repository_update_test.go | 100 ++++++++++++++++++ 2 files changed, 111 insertions(+), 2 deletions(-) create mode 100644 mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_repository_update_test.go diff --git a/mmv1/products/securesourcemanager/Repository.yaml b/mmv1/products/securesourcemanager/Repository.yaml index 599baf8e11d1..f585a5d6f665 100644 --- a/mmv1/products/securesourcemanager/Repository.yaml +++ b/mmv1/products/securesourcemanager/Repository.yaml @@ -21,7 +21,8 @@ references: docs: base_url: 'projects/{{project}}/locations/{{location}}/repositories?repository_id={{repository_id}}' self_link: 'projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}' -immutable: true +update_verb: 'PATCH' +update_mask: true import_format: - 'projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}' - '{{repository_id}}' @@ -31,7 +32,7 @@ timeouts: delete_minutes: 20 autogen_async: true async: - actions: ['create', 'delete'] + actions: ['create', 'update', 'delete'] type: 'OpAsync' operation: base_url: '{{op_id}}' @@ -72,12 +73,14 @@ examples: parameters: - name: 'location' type: String + immutable: true description: | The location for the Repository. url_param_only: true required: true - name: 'repository_id' type: String + immutable: true description: | The ID for the Repository. url_param_only: true @@ -97,6 +100,7 @@ properties: description: | The name of the instance in which the repository is hosted. required: true + immutable: true diff_suppress_func: 'tpgresource.ProjectNumberDiffSuppress' - name: 'uid' type: String @@ -139,13 +143,16 @@ properties: description: | Initial configurations for the repository. ignore_read: true + immutable: true properties: - name: 'defaultBranch' type: String + immutable: true description: | Default branch name of the repository. - name: 'gitignores' type: Array + immutable: true description: | List of gitignore template names user can choose from. Valid values can be viewed at https://cloud.google.com/secure-source-manager/docs/reference/rest/v1/projects.locations.repositories#initialconfig. @@ -153,11 +160,13 @@ properties: type: String - name: 'license' type: String + immutable: true description: | License template name user can choose from. Valid values can be viewed at https://cloud.google.com/secure-source-manager/docs/reference/rest/v1/projects.locations.repositories#initialconfig. - name: 'readme' type: String + immutable: true description: | README template name. Valid values can be viewed at https://cloud.google.com/secure-source-manager/docs/reference/rest/v1/projects.locations.repositories#initialconfig. diff --git a/mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_repository_update_test.go b/mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_repository_update_test.go new file mode 100644 index 000000000000..4a5264e2c8b7 --- /dev/null +++ b/mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_repository_update_test.go @@ -0,0 +1,100 @@ +package securesourcemanager_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccSecureSourceManagerRepository_secureSourceManagerRepositoryBasicExample_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "prevent_destroy": false, + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccSecureSourceManagerRepository_secureSourceManagerRepositoryBasicExample_basic(context), + }, + { + ResourceName: "google_secure_source_manager_repository.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"initial_config", "location", "repository_id"}, + }, + { + Config: testAccSecureSourceManagerRepository_secureSourceManagerRepositoryBasicExample_update(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_secure_source_manager_repository.default", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_secure_source_manager_repository.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"initial_config", "location", "repository_id"}, + }, + }, + }) +} + +func testAccSecureSourceManagerRepository_secureSourceManagerRepositoryBasicExample_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_secure_source_manager_instance" "instance" { + location = "us-central1" + instance_id = "tf-test-my-instance%{random_suffix}" + + # Prevent accidental deletions. + lifecycle { + prevent_destroy = "%{prevent_destroy}" + } +} + +resource "google_secure_source_manager_repository" "default" { + location = "us-central1" + repository_id = "tf-test-my-repository%{random_suffix}" + instance = google_secure_source_manager_instance.instance.name + + # Prevent accidental deletions. + lifecycle { + prevent_destroy = "%{prevent_destroy}" + } +} +`, context) +} + +func testAccSecureSourceManagerRepository_secureSourceManagerRepositoryBasicExample_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_secure_source_manager_instance" "instance" { + location = "us-central1" + instance_id = "tf-test-my-instance%{random_suffix}" + + # Prevent accidental deletions. + lifecycle { + prevent_destroy = "%{prevent_destroy}" + } +} + +resource "google_secure_source_manager_repository" "default" { + location = "us-central1" + repository_id = "tf-test-my-repository%{random_suffix}" + instance = google_secure_source_manager_instance.instance.name + + description = "new description" + + # Prevent accidental deletions. + lifecycle { + prevent_destroy = "%{prevent_destroy}" + } +} +`, context) +} From 31f6746662930dc3bc70b14e4450bbb705a23a32 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Wed, 9 Jul 2025 12:10:49 -0700 Subject: [PATCH 510/884] fix apigee instance tests (#14484) --- .../terraform/examples/apigee_instance_full_test.tf.tmpl | 2 +- .../services/apigee/resource_apigee_instance_update_test.go | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/mmv1/templates/terraform/examples/apigee_instance_full_test.tf.tmpl b/mmv1/templates/terraform/examples/apigee_instance_full_test.tf.tmpl index 0b6d6ee1e0fe..0c97983bd2f6 100644 --- a/mmv1/templates/terraform/examples/apigee_instance_full_test.tf.tmpl +++ b/mmv1/templates/terraform/examples/apigee_instance_full_test.tf.tmpl @@ -137,7 +137,7 @@ resource "google_apigee_instance" "{{$.PrimaryResourceId}}" { disk_encryption_key_name = google_kms_crypto_key.apigee_key.id access_logging_config { - enabled = true, + enabled = true filter = "status_code >= 200 && status_code < 300" } } diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_instance_update_test.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_instance_update_test.go index eb0aa901e152..c2c5097689d2 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_instance_update_test.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_instance_update_test.go @@ -122,7 +122,7 @@ resource "google_apigee_instance" "apigee_instance" { ] access_logging_config { - enabled = false, + enabled = false filter = "status_code >= 0 && status_code < 600" } } @@ -211,7 +211,7 @@ resource "google_apigee_instance" "apigee_instance" { ] access_logging_config { - enabled = true, + enabled = true filter = "status_code >= 200 && status_code < 300" } } From ae19e6d286bac44cc7e4da15c88cb3f1c1c00f7d Mon Sep 17 00:00:00 2001 From: Arnav Dham Date: Thu, 10 Jul 2025 01:36:20 +0530 Subject: [PATCH 511/884] Adding support for Routines in Listings (#14472) --- .../bigqueryanalyticshub/Listing.yaml | 24 ++++++++ ...y_analyticshub_listing_dcr_routine.tf.tmpl | 58 +++++++++++++++++++ 2 files changed, 82 insertions(+) create mode 100644 mmv1/templates/terraform/examples/bigquery_analyticshub_listing_dcr_routine.tf.tmpl diff --git a/mmv1/products/bigqueryanalyticshub/Listing.yaml b/mmv1/products/bigqueryanalyticshub/Listing.yaml index 3c06cce7a17e..ccec2c620280 100644 --- a/mmv1/products/bigqueryanalyticshub/Listing.yaml +++ b/mmv1/products/bigqueryanalyticshub/Listing.yaml @@ -86,6 +86,17 @@ examples: listing_id: 'tf_test_pubsub_listing' pubsub_topic_name: 'test_pubsub' description: 'Example for pubsub topic source' + - name: 'bigquery_analyticshub_listing_dcr_routine' + primary_resource_id: 'listing' + primary_resource_name: 'fmt.Sprintf("tf_test_pubsub_de%s", context["random_suffix"]),fmt.Sprintf("tf_test_listing%s", context["random_suffix"])' + region_override: 'US' + min_version: beta + vars: + data_exchange_id: 'tf_test_data_exchange' + listing_id: 'tf_test_listing_routine' + dataset_id: 'tf_test_dataset' + routine_id: 'tf_test_routine' + desc: 'Example for listing with routine' parameters: properties: - name: 'name' @@ -198,6 +209,19 @@ properties: Format: For table: projects/{projectId}/datasets/{datasetId}/tables/{tableId} Example:"projects/test_project/datasets/test_dataset/tables/test_table" immutable: true diff_suppress_func: 'tpgresource.ProjectNumberDiffSuppress' + exactly_one_of: + - 'table' + - 'routine' + - name: 'routine' + min_version: beta + type: String + description: | + Format: For routine: projects/{projectId}/datasets/{datasetId}/routines/{routineId} Example:"projects/test_project/datasets/test_dataset/routines/test_routine" + immutable: true + diff_suppress_func: 'tpgresource.ProjectNumberDiffSuppress' + exactly_one_of: + - 'table' + - 'routine' - name: 'pubsubTopic' type: NestedObject description: Pub/Sub topic source. diff --git a/mmv1/templates/terraform/examples/bigquery_analyticshub_listing_dcr_routine.tf.tmpl b/mmv1/templates/terraform/examples/bigquery_analyticshub_listing_dcr_routine.tf.tmpl new file mode 100644 index 000000000000..7871e97b5d3b --- /dev/null +++ b/mmv1/templates/terraform/examples/bigquery_analyticshub_listing_dcr_routine.tf.tmpl @@ -0,0 +1,58 @@ +resource "google_bigquery_analytics_hub_data_exchange" "dcr_data_exchange_example" { + provider = google-beta + location = "us" + data_exchange_id = "{{index $.Vars "data_exchange_id"}}" + display_name = "{{index $.Vars "data_exchange_id"}}" + description = "{{index $.Vars "desc"}}" + sharing_environment_config { + dcr_exchange_config {} + } +} + +resource "google_bigquery_dataset" "{{$.PrimaryResourceId}}" { + provider = google-beta + dataset_id = "{{index $.Vars "dataset_id"}}" + friendly_name = "{{index $.Vars "dataset_id"}}" + description = "{{index $.Vars "desc"}}" + location = "us" +} + +resource "google_bigquery_routine" "{{$.PrimaryResourceId}}" { + provider = google-beta + dataset_id = google_bigquery_dataset.{{$.PrimaryResourceId}}.dataset_id + routine_id = "{{index $.Vars "routine_id"}}" + routine_type = "TABLE_VALUED_FUNCTION" + language = "SQL" + description = "A DCR routine example." + definition_body = <<-EOS + SELECT 1 + value AS value + EOS + arguments { + name = "value" + argument_kind = "FIXED_TYPE" + data_type = jsonencode({ "typeKind" : "INT64" }) + } + return_table_type = jsonencode({ + "columns" : [ + { "name" : "value", "type" : { "typeKind" : "INT64" } }, + ] + }) +} + +resource "google_bigquery_analytics_hub_listing" "{{$.PrimaryResourceId}}" { + provider = google-beta + location = "US" + data_exchange_id = google_bigquery_analytics_hub_data_exchange.dcr_data_exchange_example.data_exchange_id + listing_id = "{{index $.Vars "listing_id"}}" + display_name = "{{index $.Vars "listing_id"}}" + description = "{{index $.Vars "desc"}}" + bigquery_dataset { + dataset = google_bigquery_dataset.{{$.PrimaryResourceId}}.id + selected_resources { + routine = google_bigquery_routine.{{$.PrimaryResourceId}}.id + } + } + restricted_export_config { + enabled = true + } +} \ No newline at end of file From c8fb9ca05b09df0ed5a41e55f48e404c99c83d6e Mon Sep 17 00:00:00 2001 From: Arnav Dham Date: Thu, 10 Jul 2025 02:58:22 +0530 Subject: [PATCH 512/884] Added DataExchangeSubscription Resource to BigQueryAnalyticsHub (#14370) --- .../DataExchangeSubscription.yaml | 257 ++++++++++++++++++ ...ticshub_data_exchange_subscription.go.tmpl | 19 ++ ...ticshub_data_exchange_subscription.go.tmpl | 14 + ...ticshub_data_exchange_subscription.go.tmpl | 36 +++ ...ub_dataexchange_subscription_basic.tf.tmpl | 94 +++++++ ...ticshub_data_exchange_subscription.go.tmpl | 51 ++++ ..._hub_dataexchangesubscription_test.go.tmpl | 153 +++++++++++ 7 files changed, 624 insertions(+) create mode 100644 mmv1/products/bigqueryanalyticshub/DataExchangeSubscription.yaml create mode 100644 mmv1/templates/terraform/custom_update/bigqueryanalyticshub_data_exchange_subscription.go.tmpl create mode 100644 mmv1/templates/terraform/decoders/bigqueryanalyticshub_data_exchange_subscription.go.tmpl create mode 100644 mmv1/templates/terraform/encoders/bigqueryanalyticshub_data_exchange_subscription.go.tmpl create mode 100644 mmv1/templates/terraform/examples/bigquery_analyticshub_dataexchange_subscription_basic.tf.tmpl create mode 100644 mmv1/templates/terraform/post_read/bigqueryanalyticshub_data_exchange_subscription.go.tmpl create mode 100644 mmv1/third_party/terraform/services/bigqueryanalyticshub/resource_bigquery_analytics_hub_dataexchangesubscription_test.go.tmpl diff --git a/mmv1/products/bigqueryanalyticshub/DataExchangeSubscription.yaml b/mmv1/products/bigqueryanalyticshub/DataExchangeSubscription.yaml new file mode 100644 index 000000000000..1e6743d18bef --- /dev/null +++ b/mmv1/products/bigqueryanalyticshub/DataExchangeSubscription.yaml @@ -0,0 +1,257 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'DataExchangeSubscription' +min_version: beta +api_resource_type_kind: Subscription +description: A Bigquery Analytics Hub Data Exchange subscription +references: + guides: + 'Official Documentation': 'https://cloud.google.com/bigquery/docs/analytics-hub-introduction' + api: 'https://cloud.google.com/bigquery/docs/reference/analytics-hub/rest/v1/projects.locations.subscriptions' +docs: + note: |- + When importing the resource with `terraform import`, provide the destination/subscriber's project and location + in the format projects/{{subscriber_project}}/locations/{{subscriber_location}}/subscriptions/{{subscription_id}} +base_url: 'projects/{{project}}/locations/{{location}}/subscriptions' +self_link: 'projects/{{project}}/locations/{{location}}/subscriptions/{{subscription_id}}' +create_url: 'projects/{{data_exchange_project}}/locations/{{data_exchange_location}}/dataExchanges/{{data_exchange_id}}:subscribe' +import_format: + - 'projects/{{project}}/locations/{{location}}/subscriptions/{{subscription_id}}' +custom_code: + decoder: 'templates/terraform/decoders/bigqueryanalyticshub_data_exchange_subscription.go.tmpl' + encoder: 'templates/terraform/encoders/bigqueryanalyticshub_data_exchange_subscription.go.tmpl' + post_read: 'templates/terraform/post_read/bigqueryanalyticshub_data_exchange_subscription.go.tmpl' + custom_update: 'templates/terraform/custom_update/bigqueryanalyticshub_data_exchange_subscription.go.tmpl' +sweeper: + url_substitutions: + - region: "us" +examples: + - name: 'bigquery_analyticshub_dataexchange_subscription_basic' + primary_resource_id: 'subscription' + primary_resource_name: 'fmt.Sprintf("tf_test_subscription_%s", context["random_suffix"])' + region_override: 'us' + ignore_read_extra: + - 'last_modify_time' + - 'state' + - 'linked_dataset_map' + - 'linked_resources' + vars: + data_exchange_id: 'my_test_dataexchange' + listing_dataset_id: 'listing_src_dataset' + listing_table_id: 'listing_src_table' + listing_id: 'my_test_listing' + subscription_id: 'my_subscription_id' + subscriber_contact_email: 'testuser@example.com' + # Variables for the Destination Dataset created by the Subscription + destination_dataset_id: 'subscribed_dest_dataset' + destination_dataset_friendly_name: 'Subscribed Destination Dataset' +virtual_fields: + - name: 'refresh_policy' + type: Enum + description: |- + Controls when the subscription is automatically refreshed by the provider. + * `ON_READ`: Default value if not specified. The subscription will be refreshed every time Terraform performs a read operation (e.g., `terraform plan`, `terraform apply`, `terraform refresh`). This ensures the state is always up-to-date. + * `ON_STALE`: The subscription will only be refreshed when its reported `state` (an output-only field from the API) is `STATE_STALE` during a Terraform read operation. + * `NEVER`: The provider will not automatically refresh the subscription. + default_value: 'ON_READ' + enum_values: + - 'ON_READ' + - 'ON_STALE' + - 'NEVER' +parameters: + - name: 'dataExchangeId' + type: String + immutable: true + description: |- + The ID of the data exchange. Must contain only Unicode letters, numbers (0-9), underscores (_). Should not use characters that require URL-escaping, or characters outside of ASCII, spaces. + url_param_only: true + required: true + - name: 'dataExchangeProject' + type: String + immutable: true + description: |- + The ID of the Google Cloud project where the Data Exchange is located. + url_param_only: true + required: true + diff_suppress_func: 'tpgresource.ProjectNumberDiffSuppress' + - name: 'dataExchangeLocation' + type: String + immutable: true + description: |- + The name of the location of the Data Exchange. + url_param_only: true + required: true + - name: 'location' + type: String + immutable: true + description: | + The geographic location where the Subscription (and its linked dataset) should reside. + This is the subscriber's desired location for the created resources. + See https://cloud.google.com/bigquery/docs/locations for supported locations. + url_param_only: true + required: true + custom_flatten: 'templates/terraform/custom_flatten/bigquery_dataset_location.go.tmpl' + diff_suppress_func: 'tpgresource.CaseDiffSuppress' + - name: 'subscriptionId' + type: String + immutable: true + description: |- + Name of the subscription to create. + required: true + custom_flatten: 'templates/terraform/custom_flatten/id_from_name.tmpl' + - name: 'subscriberContact' + type: String + immutable: true + description: |- + Email of the subscriber. + - name: 'destinationDataset' + type: NestedObject + immutable: true + description: + BigQuery destination dataset to create for the subscriber. + ignore_read: true + properties: + - name: 'location' + type: String + immutable: true + description: | + The geographic location where the dataset should reside. + See https://cloud.google.com/bigquery/docs/locations for supported locations. + required: true + custom_flatten: 'templates/terraform/custom_flatten/bigquery_dataset_location.go.tmpl' + diff_suppress_func: 'tpgresource.CaseDiffSuppress' + - name: 'datasetReference' + type: NestedObject + immutable: true + required: true + description: A reference that identifies the destination dataset. + properties: + - name: 'datasetId' + type: String + immutable: true + description: A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters. + required: true + - name: 'projectId' + type: String + immutable: true + description: The ID of the project containing this dataset. + required: true + diff_suppress_func: 'tpgresource.ProjectNumberDiffSuppress' + - name: 'friendlyName' + type: String + immutable: true + description: A descriptive name for the dataset. + - name: 'description' + type: String + immutable: true + description: A user-friendly description of the dataset. + - name: 'labels' + type: KeyValuePairs + immutable: true + description: | + The labels associated with this dataset. You can use these to + organize and group your datasets. +properties: + - name: 'name' + type: String + description: |- + The resource name of the subscription. e.g. "projects/myproject/locations/us/subscriptions/123" + output: true + - name: 'creationTime' + type: Time + description: |- + Timestamp when the subscription was created. + output: true + - name: 'lastModifyTime' + type: Time + description: |- + Timestamp when the subscription was last modified. + output: true + - name: 'organizationId' + type: String + description: |- + Organization of the project this subscription belongs to. + output: true + - name: 'organizationDisplayName' + type: String + description: |- + Display name of the project of this subscription. + output: true + - name: 'state' + type: String + description: |- + Current state of the subscription. + output: true + - name: 'resourceType' + type: String + description: |- + Listing shared asset type. + output: true + - name: 'linkedDatasetMap' + output: true + type: Map + description: |- + Output only. Map of listing resource names to associated linked resource, + e.g. projects/123/locations/us/dataExchanges/456/listings/789 -> projects/123/datasets/my_dataset + For Data Exchange subscriptions, this map may contain multiple entries if the Data Exchange has multiple listings. + key_name: resource_name + key_description: The associated linked resource + value_type: + name: linked_resource + type: NestedObject + properties: + - name: 'listing' + type: string + description: Output only. Listing for which linked resource is created. + output: true + - name: 'linkedDataset' + type: string + description: Output only. Name of the linked dataset, e.g. projects/subscriberproject/datasets/linkedDataset + output: true + exactly_one_of: + - 'linkedDataset' + - 'linkedPubsubSubscription' + - name: 'linkedPubsubSubscription' + type: string + description: Output only. Name of the Pub/Sub subscription, e.g. projects/subscriberproject/subscriptions/subscriptions/sub_id + output: true + exactly_one_of: + - 'linkedDataset' + - 'linkedPubsubSubscription' + - name: 'linkedResources' + type: Array + description: | + Output only. Linked resources created in the subscription. Only contains values if state = STATE_ACTIVE. + output: true + item_type: + type: NestedObject + properties: + - name: 'listing' + type: string + description: Output only. Listing for which linked resource is created. + output: true + - name: 'linkedDataset' + type: string + description: Output only. Name of the linked dataset, e.g. projects/subscriberproject/datasets/linkedDataset + output: true + - name: 'dataExchange' + type: String + description: |- + Output only. Resource name of the source Data Exchange. e.g. projects/123/locations/us/dataExchanges/456 + output: true + - name: 'logLinkedDatasetQueryUserEmail' + type: Boolean + description: 'Output only. By default, false. If true, the Subscriber agreed to the email sharing mandate that is enabled for DataExchange/Listing.' + output: true diff --git a/mmv1/templates/terraform/custom_update/bigqueryanalyticshub_data_exchange_subscription.go.tmpl b/mmv1/templates/terraform/custom_update/bigqueryanalyticshub_data_exchange_subscription.go.tmpl new file mode 100644 index 000000000000..3faa515f48a0 --- /dev/null +++ b/mmv1/templates/terraform/custom_update/bigqueryanalyticshub_data_exchange_subscription.go.tmpl @@ -0,0 +1,19 @@ +//If a mutable field is added later in the subscription resource, an update API endpoint will be created +//and this custom_update will have to be changed and will call a Update API as well as done by mutable resources. +// all other fields are immutable for now, don't do anything else + +_ = config + +// We can get here if 'refresh_policy' was updated in the HCL config. +// Since 'refresh_policy' has a default, d.Get("refresh_policy") will always return a string. +// We check if its value has actually changed from the prior state. +if d.HasChange("refresh_policy") { + // If 'refresh_policy' was changed by the user, ensure its new value is set in the state. + // For an Optional+Computed-false field like this, Terraform usually handles this, + // but this explicit Set operation mirrors the previous boolean field handling. + if err := d.Set("refresh_policy", d.Get("refresh_policy")); err != nil { + return fmt.Errorf("Error updating refresh_policy: %s", err) + } +} + +return nil \ No newline at end of file diff --git a/mmv1/templates/terraform/decoders/bigqueryanalyticshub_data_exchange_subscription.go.tmpl b/mmv1/templates/terraform/decoders/bigqueryanalyticshub_data_exchange_subscription.go.tmpl new file mode 100644 index 000000000000..2848a08eef05 --- /dev/null +++ b/mmv1/templates/terraform/decoders/bigqueryanalyticshub_data_exchange_subscription.go.tmpl @@ -0,0 +1,14 @@ +if v, ok := res["name"]; ok && v != nil { + name := v.(string) + parts := strings.Split(name, "/") + if len(parts) > 0 { + // The last part of the resource name is the subscription ID. + d.Set("subscription_id", parts[len(parts)-1]) + } +} + +if v, ok := res["subscriberContact"]; ok && v != nil { + d.Set("subscriber_contact", v.(string)) +} + +return res, nil \ No newline at end of file diff --git a/mmv1/templates/terraform/encoders/bigqueryanalyticshub_data_exchange_subscription.go.tmpl b/mmv1/templates/terraform/encoders/bigqueryanalyticshub_data_exchange_subscription.go.tmpl new file mode 100644 index 000000000000..106f0bbdb4c6 --- /dev/null +++ b/mmv1/templates/terraform/encoders/bigqueryanalyticshub_data_exchange_subscription.go.tmpl @@ -0,0 +1,36 @@ +config := meta.(*transport_tpg.Config) +if v, ok := d.GetOk("subscription_id"); ok { + obj["subscription"] = v.(string) + // Remove the auto-generated "subscriptionId" if it was added by MM, as it conflicts. + delete(obj, "subscriptionId") +} + +// The API expects a 'destination' field in the request body for the subscriber's +// project and location, e.g., "projects/my-project/locations/us-central1". +// This is derived from the 'project' and 'location' fields of the resource. +project := d.Get("project").(string) +location := d.Get("location").(string) +obj["destination"] = fmt.Sprintf("projects/%s/locations/%s", project, location) + +if v, ok := d.GetOk("subscriber_contact"); ok { + obj["subscriberContact"] = v.(string) +} + +if v, ok := d.GetOk("destination_dataset"); ok && v != nil { + expandedDataset, err := expandBigqueryAnalyticsHubDataExchangeSubscriptionDestinationDataset(v, d, config) + if err != nil { + return nil, fmt.Errorf("error expanding destination_dataset: %w", err) + } + // Ensure the expanded dataset is not empty before assigning, to avoid sending empty objects. + if expandedDataset != nil && !tpgresource.IsEmptyValue(reflect.ValueOf(expandedDataset)) { + obj["destinationDataset"] = expandedDataset + } else { + // If the expanded dataset is empty, remove it from the payload to avoid API errors. + delete(obj, "destinationDataset") + } +} else { + // If destination_dataset is not provided by the user, ensure it's not in the payload. + delete(obj, "destinationDataset") +} + +return obj, nil \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/bigquery_analyticshub_dataexchange_subscription_basic.tf.tmpl b/mmv1/templates/terraform/examples/bigquery_analyticshub_dataexchange_subscription_basic.tf.tmpl new file mode 100644 index 000000000000..c50103d3cb2f --- /dev/null +++ b/mmv1/templates/terraform/examples/bigquery_analyticshub_dataexchange_subscription_basic.tf.tmpl @@ -0,0 +1,94 @@ +resource "google_bigquery_analytics_hub_data_exchange" "{{$.PrimaryResourceId}}" { + provider = google-beta + location = "us" + data_exchange_id = "{{index $.Vars "data_exchange_id"}}" + display_name = "{{index $.Vars "data_exchange_id"}}" + description = "Test Data Exchange" + sharing_environment_config { + dcr_exchange_config {} + } +} + +resource "google_bigquery_dataset" "{{$.PrimaryResourceId}}" { + provider = google-beta + dataset_id = "{{index $.Vars "listing_dataset_id"}}" + friendly_name = "{{index $.Vars "listing_dataset_id"}}" + description = "Dataset for Listing" + location = "us" +} + +resource "google_bigquery_table" "{{$.PrimaryResourceId}}" { + provider = google-beta + deletion_protection = false + table_id = "{{index $.Vars "listing_table_id"}}" + dataset_id = google_bigquery_dataset.{{$.PrimaryResourceId}}.dataset_id + schema = < Date: Thu, 10 Jul 2025 01:31:37 +0200 Subject: [PATCH 513/884] Feature gap: Add missed fields for Reservation resource (#14181) Signed-off-by: Cezary Sobczak Co-authored-by: Riley Karson --- mmv1/products/compute/Reservation.yaml | 107 +++++++++++++++- .../examples/reservation_basic_beta.tf.tmpl | 16 +++ .../reservation_sharing_policy.tf.tmpl | 50 ++++++++ ...servation_source_instance_template.tf.tmpl | 48 +++++++ .../examples/shared_reservation_beta.tf.tmpl | 55 ++++++++ .../update_encoder/reservation.go.tmpl | 28 ++++ .../resource_compute_reservation_test.go | 120 ++++++++++++++++++ 7 files changed, 423 insertions(+), 1 deletion(-) create mode 100644 mmv1/templates/terraform/examples/reservation_basic_beta.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/reservation_sharing_policy.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/reservation_source_instance_template.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/shared_reservation_beta.tf.tmpl diff --git a/mmv1/products/compute/Reservation.yaml b/mmv1/products/compute/Reservation.yaml index ee9aba108e44..31f9617d4af8 100644 --- a/mmv1/products/compute/Reservation.yaml +++ b/mmv1/products/compute/Reservation.yaml @@ -58,6 +58,19 @@ examples: primary_resource_id: 'gce_reservation' vars: reservation_name: 'gce-reservation' + - name: 'reservation_basic_beta' + primary_resource_id: 'gce_reservation' + vars: + reservation_name: 'gce-reservation' + min_version: 'beta' + - name: 'reservation_source_instance_template' + primary_resource_id: 'gce_reservation_source_instance_template' + vars: + reservation_name: 'gce-reservation-source-instance-template' + - name: 'reservation_sharing_policy' + primary_resource_id: 'gce_reservation_sharing_policy' + vars: + reservation_name: 'gce-reservation-sharing-policy' - name: 'shared_reservation_basic' primary_resource_id: 'gce_reservation' vars: @@ -69,6 +82,18 @@ examples: exclude_docs: true # Resource creation race skip_vcr: true + - name: 'shared_reservation_beta' + primary_resource_id: 'gce_reservation' + vars: + reservation_name: 'gce-shared-reservation-beta' + test_env_vars: + project: 'PROJECT_NAME' + org_id: 'ORG_ID' + billing_account: 'BILLING_ACCT' + exclude_docs: true + # Resource creation race + skip_vcr: true + min_version: 'beta' parameters: - name: 'zone' type: ResourceRef @@ -154,6 +179,13 @@ properties: type: String description: | The project id/number, should be same as the key of this project config in the project map. + - name: 'projects' + type: Array + description: | + List of project IDs with which the reservation is shared. + item_type: + type: String + min_version: 'beta' - name: 'specificReservation' type: NestedObject description: | @@ -178,8 +210,11 @@ properties: type: NestedObject description: | The instance properties for the reservation. - required: true immutable: true + default_from_api: true + exactly_one_of: + - 'specific_reservation.0.instance_properties' + - 'specific_reservation.0.source_instance_template' properties: - name: 'machineType' type: String @@ -245,3 +280,73 @@ properties: The size of the disk in base-2 GB. required: true immutable: true + - name: 'maintenanceInterval' + type: Enum + description: | + Specifies the frequency of planned maintenance events. + enum_values: + - 'AS_NEEDED' + - 'PERIODIC' + - 'RECURRENT' + min_version: 'beta' + immutable: true + - name: 'sourceInstanceTemplate' + type: String + description: | + Specifies the instance template to create the reservation. If you use this field, you must exclude the + instanceProperties field. + exactly_one_of: + - 'specific_reservation.0.instance_properties' + - 'specific_reservation.0.source_instance_template' + - name: 'deleteAtTime' + type: String + description: | + Absolute time in future when the reservation will be auto-deleted by Compute Engine. Timestamp is represented in RFC3339 text format. + Cannot be used with delete_after_duration. + immutable: true + default_from_api: true + conflicts: + - 'delete_after_duration.0.seconds' + - 'delete_after_duration.0.nanos' + - name: 'deleteAfterDuration' + type: NestedObject + description: | + Duration after which the reservation will be auto-deleted by Compute Engine. Cannot be used with delete_at_time. + ignore_read: true + properties: + - name: 'seconds' + type: String + description: | + Number of seconds for the auto-delete duration. + immutable: true + conflicts: + - 'delete_at_time' + - name: 'nanos' + type: Integer + description: | + Number of nanoseconds for the auto-delete duration. + immutable: true + conflicts: + - 'delete_at_time' + - name: 'reservationSharingPolicy' + type: NestedObject + description: | + Sharing policy for reservations with Google Cloud managed services. + default_from_api: true + properties: + - name: 'serviceShareType' + type: Enum + description: | + Sharing config for all Google Cloud services. + enum_values: + - 'ALLOW_ALL' + - 'DISALLOW_ALL' + default_from_api: true + immutable: true + - name: 'enableEmergentMaintenance' + type: Boolean + description: | + Indicates if this group of VMs have emergent maintenance enabled. + immutable: true + ignore_read: true + min_version: 'beta' diff --git a/mmv1/templates/terraform/examples/reservation_basic_beta.tf.tmpl b/mmv1/templates/terraform/examples/reservation_basic_beta.tf.tmpl new file mode 100644 index 000000000000..eea6e709bddc --- /dev/null +++ b/mmv1/templates/terraform/examples/reservation_basic_beta.tf.tmpl @@ -0,0 +1,16 @@ +resource "google_compute_reservation" "{{$.PrimaryResourceId}}" { + provider = google-beta + name = "{{index $.Vars "reservation_name"}}" + zone = "us-central1-a" + + specific_reservation { + count = 1 + instance_properties { + min_cpu_platform = "Intel Cascade Lake" + machine_type = "n2-standard-2" + maintenance_interval = "PERIODIC" + } + } + + enable_emergent_maintenance = true +} diff --git a/mmv1/templates/terraform/examples/reservation_sharing_policy.tf.tmpl b/mmv1/templates/terraform/examples/reservation_sharing_policy.tf.tmpl new file mode 100644 index 000000000000..9a1d93400eaa --- /dev/null +++ b/mmv1/templates/terraform/examples/reservation_sharing_policy.tf.tmpl @@ -0,0 +1,50 @@ +data "google_compute_image" "my_image" { + family = "debian-11" + project = "debian-cloud" +} + +resource "google_compute_instance_template" "foobar" { + name = "tf-test-instance-template" + machine_type = "g2-standard-4" + can_ip_forward = false + tags = ["foo", "bar"] + + disk { + source_image = data.google_compute_image.my_image.self_link + auto_delete = true + boot = true + } + + network_interface { + network = "default" + } + + scheduling { + preemptible = false + automatic_restart = true + } + + metadata = { + foo = "bar" + } + service_account { + scopes = ["userinfo-email", "compute-ro", "storage-ro"] + } + labels = { + my_label = "foobar" + } +} + +resource "google_compute_reservation" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "reservation_name"}}" + zone = "us-central1-b" + + specific_reservation { + count = 2 + source_instance_template = google_compute_instance_template.foobar.self_link + } + + reservation_sharing_policy { + service_share_type = "ALLOW_ALL" + } +} diff --git a/mmv1/templates/terraform/examples/reservation_source_instance_template.tf.tmpl b/mmv1/templates/terraform/examples/reservation_source_instance_template.tf.tmpl new file mode 100644 index 000000000000..66424494d49c --- /dev/null +++ b/mmv1/templates/terraform/examples/reservation_source_instance_template.tf.tmpl @@ -0,0 +1,48 @@ +data "google_compute_image" "my_image" { + family = "debian-11" + project = "debian-cloud" +} + +resource "google_compute_instance_template" "foobar" { + name = "tf-test-instance-template" + machine_type = "n2-standard-2" + can_ip_forward = false + tags = ["foo", "bar"] + + disk { + source_image = data.google_compute_image.my_image.self_link + auto_delete = true + boot = true + } + + network_interface { + network = "default" + } + + scheduling { + preemptible = false + automatic_restart = true + } + + metadata = { + foo = "bar" + } + + service_account { + scopes = ["userinfo-email", "compute-ro", "storage-ro"] + } + + labels = { + my_label = "foobar" + } +} + +resource "google_compute_reservation" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "reservation_name"}}" + zone = "us-central1-a" + + specific_reservation { + count = 1 + source_instance_template = google_compute_instance_template.foobar.self_link + } +} diff --git a/mmv1/templates/terraform/examples/shared_reservation_beta.tf.tmpl b/mmv1/templates/terraform/examples/shared_reservation_beta.tf.tmpl new file mode 100644 index 000000000000..de72467ac91c --- /dev/null +++ b/mmv1/templates/terraform/examples/shared_reservation_beta.tf.tmpl @@ -0,0 +1,55 @@ +resource "google_project" "owner_project" { + provider = google-beta + project_id = "tf-test%{random_suffix}" + name = "tf-test%{random_suffix}" + org_id = "{{index $.TestEnvVars "org_id"}}" + billing_account = "{{index $.TestEnvVars "billing_account"}}" + deletion_policy = "DELETE" +} + + +resource "google_project_service" "compute" { + provider = google-beta + project = google_project.owner_project.project_id + service = "compute.googleapis.com" + disable_on_destroy = false +} + +resource "google_project" "guest_project" { + provider = google-beta + project_id = "tf-test-2%{random_suffix}" + name = "tf-test-2%{random_suffix}" + org_id = "{{index $.TestEnvVars "org_id"}}" + deletion_policy = "DELETE" +} + +resource "google_organization_policy" "shared_reservation_org_policy" { + provider = google-beta + org_id = "{{index $.TestEnvVars "org_id"}}" + constraint = "constraints/compute.sharedReservationsOwnerProjects" + list_policy { + allow { + values = ["projects/${google_project.owner_project.number}"] + } + } +} + +resource "google_compute_reservation" "{{$.PrimaryResourceId}}" { + provider = google-beta + project = google_project.owner_project.project_id + name = "{{index $.Vars "reservation_name"}}" + zone = "us-central1-a" + + specific_reservation { + count = 1 + instance_properties { + min_cpu_platform = "Intel Cascade Lake" + machine_type = "n2-standard-2" + } + } + share_settings { + share_type = "SPECIFIC_PROJECTS" + projects = [google_project.guest_project.name] + } + depends_on = [google_organization_policy.shared_reservation_org_policy,google_project_service.compute] +} \ No newline at end of file diff --git a/mmv1/templates/terraform/update_encoder/reservation.go.tmpl b/mmv1/templates/terraform/update_encoder/reservation.go.tmpl index 0aa16a8869a7..2037e35568e8 100644 --- a/mmv1/templates/terraform/update_encoder/reservation.go.tmpl +++ b/mmv1/templates/terraform/update_encoder/reservation.go.tmpl @@ -15,8 +15,36 @@ maskId := "" firstProject := true urlUpdateMask := "" +{{- if ne $.TargetVersionName "ga" }} + + if d.HasChange("share_settings.0.projects") { + // Get name. + nameProp, err := expandComputeReservationName(d.Get("name"), d, config) + if err != nil { + return nil, fmt.Errorf("Invalid value for name: %s", err) + } else if v, ok := d.GetOkExists("name"); !tpgresource.IsEmptyValue(reflect.ValueOf(nameProp)) && (ok || !reflect.DeepEqual(v, nameProp)) { + newObj["name"] = nameProp + } + // Get zone. + zoneProp, err := expandComputeReservationZone(d.Get("zone"), d, config) + if err != nil { + return nil, fmt.Errorf("Invalid value for zone: %s", err) + } else if v, ok := d.GetOkExists("zone"); !tpgresource.IsEmptyValue(reflect.ValueOf(zoneProp)) && (ok || !reflect.DeepEqual(v, zoneProp)) { + newObj["zone"] = zoneProp + } + transformed := make(map[string]interface{}) + // Set shareType and projects. + transformed["shareType"] = "SPECIFIC_PROJECTS" + transformed["projects"] = obj["shareSettings"].(map[string]interface{})["projects"] + urlUpdateMask = "?paths=shareSettings.projects" + newObj["shareSettings"] = transformed + newObj["urlUpdateMask"] = urlUpdateMask + + } else if d.HasChange("share_settings") { +{{- else }} if d.HasChange("share_settings") { +{{- end }} // Get name. nameProp, err := expandComputeReservationName(d.Get("name"), d, config) if err != nil { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_reservation_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_reservation_test.go index 6b72e8d4417d..e7a98248d138 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_reservation_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_reservation_test.go @@ -2,7 +2,9 @@ package compute_test import ( "fmt" + "regexp" "testing" + "time" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" @@ -38,6 +40,65 @@ func TestAccComputeReservation_update(t *testing.T) { }) } +func TestAccComputeReservation_deleteAtTime(t *testing.T) { + acctest.SkipIfVcr(t) // timestamp + t.Parallel() + + reservationName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + deleteTime := time.Now().UTC().Add(24 * time.Hour) // Set delete_at_time to 24 hours in the future + deleteAtTimeRFC3339 := deleteTime.Format(time.RFC3339) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeReservationDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeReservation_deleteAtTime_deleteAfterDuration(reservationName, deleteAtTimeRFC3339, deleteTime.Unix()), + ExpectError: regexp.MustCompile("Conflicting configuration arguments"), + }, + { + Config: testAccComputeReservation_deleteAtTime(reservationName, deleteAtTimeRFC3339), + }, + { + ResourceName: "google_compute_reservation.reservation", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccComputeReservation_deleteAfterDuration(t *testing.T) { + acctest.SkipIfVcr(t) // timestamp + t.Parallel() + + reservationName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + deleteTime := time.Now().UTC().Add(24 * time.Hour) // Set delete_at_time to 24 hours in the future + deleteAtTimeRFC3339 := deleteTime.Format(time.RFC3339) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeReservationDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeReservation_deleteAtTime_deleteAfterDuration(reservationName, deleteAtTimeRFC3339, deleteTime.Unix()), + ExpectError: regexp.MustCompile("Conflicting configuration arguments"), + }, + { + Config: testAccComputeReservation_deleteAfterDuration(reservationName, deleteTime.Unix()), + }, + { + ResourceName: "google_compute_reservation.reservation", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"delete_after_duration"}, + }, + }, + }) +} + func testAccComputeReservation_basic(reservationName, count string) string { return fmt.Sprintf(` resource "google_compute_reservation" "reservation" { @@ -54,3 +115,62 @@ resource "google_compute_reservation" "reservation" { } `, reservationName, count) } + +func testAccComputeReservation_deleteAtTime(reservationName, time string) string { + return fmt.Sprintf(` +resource "google_compute_reservation" "reservation" { + name = "%s" + zone = "us-central1-a" + delete_at_time = "%s" + + specific_reservation { + count = 2 + instance_properties { + min_cpu_platform = "Intel Cascade Lake" + machine_type = "n2-standard-2" + } + } +} +`, reservationName, time) +} + +func testAccComputeReservation_deleteAfterDuration(reservationName string, duration int64) string { + return fmt.Sprintf(` +resource "google_compute_reservation" "reservation" { + name = "%s" + zone = "us-central1-a" + delete_after_duration { + seconds = %d + } + + specific_reservation { + count = 2 + instance_properties { + min_cpu_platform = "Intel Cascade Lake" + machine_type = "n2-standard-2" + } + } +} +`, reservationName, duration) +} + +func testAccComputeReservation_deleteAtTime_deleteAfterDuration(reservationName, time string, duration int64) string { + return fmt.Sprintf(` +resource "google_compute_reservation" "reservation" { + name = "%s" + zone = "us-central1-a" + delete_at_time = "%s" + delete_after_duration { + seconds = %d + } + + specific_reservation { + count = 2 + instance_properties { + min_cpu_platform = "Intel Cascade Lake" + machine_type = "n2-standard-2" + } + } +} +`, reservationName, time, duration) +} From a18d63d9fdb51d6e80275ea5d288993a1189d574 Mon Sep 17 00:00:00 2001 From: Jahnavi Malhotra <90177675+jahnavi2k@users.noreply.github.com> Date: Thu, 10 Jul 2025 08:09:23 +0530 Subject: [PATCH 514/884] alloydb: Revert incorrect removal of machine_type from machine_config (it is GA) (#14470) --- mmv1/products/alloydb/Instance.yaml | 1 - ...nstance_test.go.tmpl => resource_alloydb_instance_test.go} | 4 ---- 2 files changed, 5 deletions(-) rename mmv1/third_party/terraform/services/alloydb/{resource_alloydb_instance_test.go.tmpl => resource_alloydb_instance_test.go} (99%) diff --git a/mmv1/products/alloydb/Instance.yaml b/mmv1/products/alloydb/Instance.yaml index b791d5aeb06f..069500936ff0 100644 --- a/mmv1/products/alloydb/Instance.yaml +++ b/mmv1/products/alloydb/Instance.yaml @@ -312,7 +312,6 @@ properties: E.g. "n2-highmem-4", "n2-highmem-8", "c4a-highmem-4-lssd". `cpu_count` must match the number of vCPUs in the machine type. default_from_api: true - min_version: 'beta' - name: 'clientConnectionConfig' type: NestedObject description: | diff --git a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go.tmpl b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go similarity index 99% rename from mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go.tmpl rename to mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go index 89dcdb83bb0f..72cec88fa08c 100644 --- a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go @@ -81,9 +81,7 @@ resource "google_alloydb_instance" "default" { machine_config { cpu_count = 4 - {{- if ne $.TargetVersionName "ga" }} machine_type = "n2-highmem-4" - {{ end }} } labels = { @@ -956,9 +954,7 @@ resource "google_alloydb_instance" "default" { instance_type = "PRIMARY" machine_config { cpu_count = 2 - {{- if ne $.TargetVersionName "ga" }} machine_type = "n2-highmem-2" - {{ end }} } psc_instance_config { allowed_consumer_projects = ["${data.google_project.project.number}"] From e0ec099cc6af8fec19043dcc3dcd7b986976d780 Mon Sep 17 00:00:00 2001 From: Scott Suarez Date: Wed, 9 Jul 2025 20:41:55 -0700 Subject: [PATCH 515/884] Remove noisy grpc logs and compress the ones we want into the test logs (#14488) --- mmv1/third_party/terraform/transport/config.go.tmpl | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/transport/config.go.tmpl b/mmv1/third_party/terraform/transport/config.go.tmpl index 5d3a60bee1ac..b45ecfb7bbbe 100644 --- a/mmv1/third_party/terraform/transport/config.go.tmpl +++ b/mmv1/third_party/terraform/transport/config.go.tmpl @@ -564,10 +564,9 @@ func (c *Config) LoadAndValidate(ctx context.Context) error { TimestampFormat: "2006/01/02 15:04:05", LogFormat: "%time% [%lvl%] %msg% \n", }) + logger.SetOutput(log.Writer()) alwaysLoggingDeciderClient := func(ctx context.Context, fullMethodName string) bool { return true } - grpc_logrus.ReplaceGrpcLogger(logrus.NewEntry(logger)) - c.gRPCLoggingOptions = append( c.gRPCLoggingOptions, option.WithGRPCDialOption(grpc.WithUnaryInterceptor( grpc_logrus.PayloadUnaryClientInterceptor(logrus.NewEntry(logger), alwaysLoggingDeciderClient))), From ba09ec30b0fa4b8ab6d32816158df3fa0b147f98 Mon Sep 17 00:00:00 2001 From: Thomas Rodgers Date: Thu, 10 Jul 2025 08:47:06 -0700 Subject: [PATCH 516/884] tgc-revival: rework comparison of terraform configs (#14490) --- .../tgc_next/test/assert_test_files.go | 77 +++++------ mmv1/third_party/tgc_next/test/hcl.go | 120 ++++++++++++++++++ mmv1/third_party/tgc_next/test/setup.go | 105 ++++----------- 3 files changed, 186 insertions(+), 116 deletions(-) create mode 100644 mmv1/third_party/tgc_next/test/hcl.go diff --git a/mmv1/third_party/tgc_next/test/assert_test_files.go b/mmv1/third_party/tgc_next/test/assert_test_files.go index 2510f3e56897..a84ae68e0a2b 100644 --- a/mmv1/third_party/tgc_next/test/assert_test_files.go +++ b/mmv1/third_party/tgc_next/test/assert_test_files.go @@ -6,6 +6,8 @@ import ( "log" "os" "path/filepath" + "sort" + "strconv" "strings" "sync" "testing" @@ -137,13 +139,13 @@ func testSingleResource(t *testing.T, testName string, testData ResourceTestData return fmt.Errorf("missing hcl after cai2hcl conversion for resource %s", testData.ResourceType) } - ignoredFieldMap := make(map[string]bool, 0) + ignoredFieldSet := make(map[string]struct{}, 0) for _, f := range ignoredFields { - ignoredFieldMap[f] = true + ignoredFieldSet[f] = struct{}{} } parsedExportConfig := exportResources[0].Attributes - missingKeys := compareHCLFields(testData.ParsedRawConfig, parsedExportConfig, "", ignoredFieldMap) + missingKeys := compareHCLFields(testData.ParsedRawConfig, parsedExportConfig, ignoredFieldSet) if len(missingKeys) > 0 { return fmt.Errorf("missing fields in address %s after cai2hcl conversion:\n%s", testData.ResourceAddress, missingKeys) } @@ -243,49 +245,52 @@ func getAncestryCache(assets []caiasset.Asset) map[string]string { return ancestryCache } -// Compares HCL and finds all of the keys in map1 are in map2 -func compareHCLFields(map1, map2 map[string]interface{}, path string, ignoredFields map[string]bool) []string { +// Compares HCL and finds all of the keys in map1 that are not in map2 +func compareHCLFields(map1, map2, ignoredFields map[string]struct{}) []string { var missingKeys []string - for key, value1 := range map1 { - if value1 == nil { + for key := range map1 { + if isIgnored(key, ignoredFields) { continue } - currentPath := path + "." + key - if path == "" { - currentPath = key + if _, ok := map2[key]; !ok { + missingKeys = append(missingKeys, key) } + } + sort.Strings(missingKeys) + return missingKeys +} - if ignoredFields[currentPath] { - continue - } +// Returns true if the given key should be ignored according to the given set of ignored fields. +func isIgnored(key string, ignoredFields map[string]struct{}) bool { + // Check for exact match first. + if _, ignored := ignoredFields[key]; ignored { + return true + } - value2, ok := map2[key] - if !ok || value2 == nil { - missingKeys = append(missingKeys, currentPath) - continue + // Check for partial matches. + parts := strings.Split(key, ".") + if len(parts) < 2 { + return false + } + var nonIntegerParts []string + for _, part := range parts { + if _, err := strconv.Atoi(part); err != nil { + nonIntegerParts = append(nonIntegerParts, part) } - - switch v1 := value1.(type) { - case map[string]interface{}: - v2, _ := value2.(map[string]interface{}) - missingKeys = append(missingKeys, compareHCLFields(v1, v2, currentPath, ignoredFields)...) - case []interface{}: - v2, _ := value2.([]interface{}) - - for i := 0; i < len(v1); i++ { - nestedMap1, ok1 := v1[i].(map[string]interface{}) - nestedMap2, ok2 := v2[i].(map[string]interface{}) - if ok1 && ok2 { - keys := compareHCLFields(nestedMap1, nestedMap2, fmt.Sprintf("%s[%d]", currentPath, i), ignoredFields) - missingKeys = append(missingKeys, keys...) - } - } - default: + } + var partialKey string + for _, part := range nonIntegerParts { + if partialKey == "" { + partialKey = part + } else { + partialKey += "." + part + } + if _, ignored := ignoredFields[partialKey]; ignored { + return true } } - - return missingKeys + return false } // Converts a tfplan to CAI asset, and then converts the CAI asset into HCL diff --git a/mmv1/third_party/tgc_next/test/hcl.go b/mmv1/third_party/tgc_next/test/hcl.go new file mode 100644 index 000000000000..5cfbdef07499 --- /dev/null +++ b/mmv1/third_party/tgc_next/test/hcl.go @@ -0,0 +1,120 @@ +package test + +import ( + "fmt" + "log" + + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/hclparse" + "github.com/hashicorp/hcl/v2/hclsyntax" +) + +func parseHCLBytes(src []byte, filePath string) (map[string]map[string]struct{}, error) { + parser := hclparse.NewParser() + hclFile, diags := parser.ParseHCL(src, filePath) + if diags.HasErrors() { + return nil, fmt.Errorf("parse HCL: %w", diags) + } + + if hclFile == nil { + return nil, fmt.Errorf("parsed HCL file %s is nil cannot proceed", filePath) + } + + parsed := make(map[string]map[string]struct{}) + + for _, block := range hclFile.Body.(*hclsyntax.Body).Blocks { + if block.Type == "resource" { + if len(block.Labels) != 2 { + log.Printf("Skipping address block with unexpected number of labels: %v", block.Labels) + continue + } + + resType := block.Labels[0] + resName := block.Labels[1] + addr := fmt.Sprintf("%s.%s", resType, resName) + attrs, procDiags := parseHCLBody(block.Body) + + if procDiags.HasErrors() { + log.Printf("Diagnostics while processing address %s.%s body in %s:", resType, resName, filePath) + for _, diag := range procDiags { + log.Printf(" - %s (Severity)", diag.Error()) + } + } + + flattenedAttrs := make(map[string]struct{}) + flatten(attrs, "", flattenedAttrs) + parsed[addr] = flattenedAttrs + } + } + return parsed, nil +} + +// parseHCLBody recursively parses attributes and nested blocks from an HCL body. +func parseHCLBody(body hcl.Body) ( + attributes map[string]any, + diags hcl.Diagnostics, +) { + attributes = make(map[string]any) + var allDiags hcl.Diagnostics + + if syntaxBody, ok := body.(*hclsyntax.Body); ok { + for _, attr := range syntaxBody.Attributes { + insert(struct{}{}, attr.Name, attributes) + } + + for _, block := range syntaxBody.Blocks { + nestedAttr, diags := parseHCLBody(block.Body) + if diags.HasErrors() { + allDiags = append(allDiags, diags...) + } + + insert(nestedAttr, block.Type, attributes) + } + } else { + allDiags = append(allDiags, &hcl.Diagnostic{ + Severity: hcl.DiagWarning, + Summary: "Body type assertion to *hclsyntax.Body failed", + Detail: fmt.Sprintf("Cannot directly parse attributes for body of type %T. Attribute parsing may be incomplete.", body), + }) + } + + return attributes, allDiags +} + +func insert(data any, key string, parent map[string]any) { + if existing, ok := parent[key]; ok { + if existingSlice, ok := existing.([]any); ok { + existingSlice = append(existingSlice, data) + } else { + // Until we see a second instance of a repeated block or attribute, it will look non-repeated. + parent[key] = []any{existing, data} + } + } else { + parent[key] = data + } +} + +func flatten(data interface{}, prefix string, result map[string]struct{}) { + switch v := data.(type) { + case map[string]interface{}: + for key, value := range v { + newPrefix := key + if prefix != "" { + newPrefix = prefix + "." + key + } + flatten(value, newPrefix, result) + } + case []interface{}: + if len(v) == 0 && prefix != "" { + result[prefix] = struct{}{} + } + for i, value := range v { + newPrefix := fmt.Sprintf("%s.%d", prefix, i) + flatten(value, newPrefix, result) + } + default: + if prefix != "" { + result[prefix] = struct{}{} + } + } +} diff --git a/mmv1/third_party/tgc_next/test/setup.go b/mmv1/third_party/tgc_next/test/setup.go index e6050de5ef3c..3b4032d0cc66 100644 --- a/mmv1/third_party/tgc_next/test/setup.go +++ b/mmv1/third_party/tgc_next/test/setup.go @@ -7,13 +7,11 @@ import ( "io" "log" "os" + "strings" "time" "cloud.google.com/go/storage" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" - "github.com/hashicorp/hcl/v2" - "github.com/hashicorp/hcl/v2/hclparse" - "github.com/hashicorp/hcl/v2/hclsyntax" ) type ResourceMetadata struct { @@ -37,10 +35,16 @@ type TgcMetadataPayload struct { } type ResourceTestData struct { - ParsedRawConfig map[string]interface{} `json:"parsed_raw_config"` + ParsedRawConfig map[string]struct{} `json:"parsed_raw_config"` ResourceMetadata `json:"resource_metadata"` } +type Resource struct { + Type string `json:"type"` + Name string `json:"name"` + Attributes map[string]struct{} `json:"attributes"` +} + var ( TestsMetadata = make(map[string]TgcMetadataPayload) setupDone = false @@ -86,8 +90,9 @@ func ReadTestsDataFromGcs() (map[string]TgcMetadataPayload, error) { } } - // Uncomment this line to debug issues locally - // writeJSONFile("../../tests_metadata.json", TestsMetadata) + if os.Getenv("WRITE_FILES") != "" { + writeJSONFile("../../tests_metadata.json", TestsMetadata) + } setupDone = true } return TestsMetadata, nil @@ -140,44 +145,6 @@ func prepareTestData(testName string) (map[string]ResourceTestData, string, erro return resourceTestData, testMetadata.PrimaryResource, nil } -type Resource struct { - Type string `json:"type"` - Name string `json:"name"` - Attributes map[string]interface{} `json:"attributes"` -} - -// parseHCLBody recursively parses attributes and nested blocks from an HCL body. -func parseHCLBody(body hcl.Body, filePath string) ( - attributes map[string]interface{}, - diags hcl.Diagnostics, -) { - attributes = make(map[string]interface{}) - var allDiags hcl.Diagnostics - - if syntaxBody, ok := body.(*hclsyntax.Body); ok { - for _, attr := range syntaxBody.Attributes { - attributes[attr.Name] = true - } - - for _, block := range syntaxBody.Blocks { - nestedAttr, diags := parseHCLBody(block.Body, filePath) - if diags.HasErrors() { - allDiags = append(allDiags, diags...) - } - - attributes[block.Type] = nestedAttr - } - } else { - allDiags = append(allDiags, &hcl.Diagnostic{ - Severity: hcl.DiagWarning, - Summary: "Body type assertion to *hclsyntax.Body failed", - Detail: fmt.Sprintf("Cannot directly parse attributes for body of type %T. Attribute parsing may be incomplete.", body), - }) - } - - return attributes, allDiags -} - // Parses a Terraform configuation file written with HCL func parseResourceConfigs(filePath string) ([]Resource, error) { src, err := os.ReadFile(filePath) @@ -185,51 +152,29 @@ func parseResourceConfigs(filePath string) ([]Resource, error) { return nil, fmt.Errorf("failed to read file %s: %s", filePath, err) } - parser := hclparse.NewParser() - hclFile, diags := parser.ParseHCL(src, filePath) - if diags.HasErrors() { - return nil, fmt.Errorf("parse HCL: %w", diags) - } - - if hclFile == nil { - return nil, fmt.Errorf("parsed HCL file %s is nil cannot proceed", filePath) + topLevel, err := parseHCLBytes(src, filePath) + if err != nil { + return nil, fmt.Errorf("failed to parse hcl bytes: %s", err) } var allParsedResources []Resource - - for _, block := range hclFile.Body.(*hclsyntax.Body).Blocks { - if block.Type == "resource" { - if len(block.Labels) != 2 { - log.Printf("Skipping address block with unexpected number of labels: %v", block.Labels) - continue - } - - resType := block.Labels[0] - resName := block.Labels[1] - attrs, procDiags := parseHCLBody(block.Body, filePath) - - if procDiags.HasErrors() { - log.Printf("Diagnostics while processing address %s.%s body in %s:", resType, resName, filePath) - for _, diag := range procDiags { - log.Printf(" - %s (Severity)", diag.Error()) - } - } - - gr := Resource{ - Type: resType, - Name: resName, - Attributes: attrs, - } - allParsedResources = append(allParsedResources, gr) + for addr, attrs := range topLevel { + addrParts := strings.Split(addr, ".") + if len(addrParts) != 2 { + return nil, fmt.Errorf("invalid resource address %s", addr) } + allParsedResources = append(allParsedResources, Resource{ + Type: addrParts[0], + Name: addrParts[1], + Attributes: attrs, + }) } - return allParsedResources, nil } // Converts the slice to map with resource address as the key -func convertToConfigMap(resources []Resource) map[string]map[string]interface{} { - configMap := make(map[string]map[string]interface{}, 0) +func convertToConfigMap(resources []Resource) map[string]map[string]struct{} { + configMap := make(map[string]map[string]struct{}, 0) for _, r := range resources { addr := fmt.Sprintf("%s.%s", r.Type, r.Name) From 22067ac14fd7f753b355676010e2eed1dd8079d8 Mon Sep 17 00:00:00 2001 From: Abhijeet Jha Date: Thu, 10 Jul 2025 17:44:19 +0000 Subject: [PATCH 517/884] Add application awareness on interconnect (#14321) --- mmv1/products/compute/Interconnect.yaml | 89 ++++++++++++ ...connect_application_awareness_test.go.tmpl | 131 ++++++++++++++++++ 2 files changed, 220 insertions(+) create mode 100644 mmv1/third_party/terraform/services/compute/resource_compute_interconnect_application_awareness_test.go.tmpl diff --git a/mmv1/products/compute/Interconnect.yaml b/mmv1/products/compute/Interconnect.yaml index 019498527e14..0943605e9a7d 100644 --- a/mmv1/products/compute/Interconnect.yaml +++ b/mmv1/products/compute/Interconnect.yaml @@ -434,3 +434,92 @@ properties: is_set: true item_type: type: String + - name: 'aaiEnabled' + type: Boolean + description: | + Enable or disable the Application Aware Interconnect(AAI) feature on this interconnect. + min_version: beta + - name: 'applicationAwareInterconnect' + type: NestedObject + description: | + Configuration that enables Media Access Control security (MACsec) on the Cloud + Interconnect connection between Google and your on-premises router. + min_version: beta + properties: + - name: 'profileDescription' + type: String + description: | + A description for the AAI profile on this interconnect. + min_version: beta + - name: 'strictPriorityPolicy' + type: NestedObject + allow_empty_object: true + description: | + Specify configuration for StrictPriorityPolicy. + properties: [] + min_version: beta + - name: 'bandwidthPercentagePolicy' + type: NestedObject + description: | + Bandwidth Percentage policy allows you to have granular control over how your Interconnect + bandwidth is utilized among your workloads mapping to different traffic classes. + min_version: beta + properties: + - name: bandwidthPercentage + type: Array + description: | + Specify bandwidth percentages for various traffic classes for queuing + type Bandwidth Percent. + api_name: bandwidthPercentages + min_version: beta + item_type: + type: NestedObject + properties: + - name: trafficClass + type: Enum + description: | + Enum representing the various traffic classes offered by AAI. + default_value: "TC_UNSPECIFIED" + enum_values: + - 'TC_UNSPECIFIED' + - 'TC1' + - 'TC2' + - 'TC3' + - 'TC4' + - 'TC5' + - 'TC6' + min_version: beta + - name: percentage + type: Integer + description: | + Bandwidth percentage for a specific traffic class. + min_version: beta + - name: shapeAveragePercentage + type: Array + description: | + Optional field to specify a list of shape average percentages to be + applied in conjunction with StrictPriorityPolicy or BandwidthPercentagePolicy + min_version: beta + api_name: shapeAveragePercentages + item_type: + type: NestedObject + properties: + - name: trafficClass + type: Enum + description: | + Enum representing the various traffic classes offered by AAI. + default_value: "TC_UNSPECIFIED" + enum_values: + - 'TC_UNSPECIFIED' + - 'TC1' + - 'TC2' + - 'TC3' + - 'TC4' + - 'TC5' + - 'TC6' + min_version: beta + - name: percentage + type: Integer + description: | + Bandwidth percentage for a specific traffic class. + min_version: beta diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_interconnect_application_awareness_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_interconnect_application_awareness_test.go.tmpl new file mode 100644 index 000000000000..fa2406ef7f8f --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/resource_compute_interconnect_application_awareness_test.go.tmpl @@ -0,0 +1,131 @@ +{{ if ne $.TargetVersionName `ga` -}} +package compute_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccComputeInterconnect_computeInterconnectBasicTestExample_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeInterconnectDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeInterconnect_computeInterconnect_create(context), + }, + { + ResourceName: "google_compute_interconnect.example-interconnect", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "location", "terraform_labels"}, + }, + { + Config: testAccComputeInterconnect_computeInterconnect_enable_aai(context), + }, + { + ResourceName: "google_compute_interconnect.example-interconnect", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "location", "terraform_labels"}, + }, + }, + }) +} + +func testAccComputeInterconnect_computeInterconnect_create(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "project" {} + +resource "google_compute_interconnect" "example-interconnect" { + name = "tf-test-example-interconnect%{random_suffix}" + customer_name = "internal_customer" # Special customer only available for Google testing. + interconnect_type = "DEDICATED" + link_type = "LINK_TYPE_ETHERNET_100G_LR" + location = "https://www.googleapis.com/compute/v1/projects/${data.google_project.project.name}/global/interconnectLocations/z2z-us-east4-zone1-pniada-a" # Special location only available for Google testing. + requested_link_count = 1 + admin_enabled = true + description = "example description" + macsec_enabled = false + noc_contact_email = "user@example.com" + labels = { + mykey = "myvalue" + } +} +`, context) +} + +func testAccComputeInterconnect_computeInterconnect_enable_aai(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "project" {} + +resource "google_compute_interconnect" "example-interconnect" { + name = "tf-test-example-interconnect%{random_suffix}" + customer_name = "internal_customer" # Special customer only available for Google testing. + interconnect_type = "DEDICATED" + link_type = "LINK_TYPE_ETHERNET_100G_LR" + location = "https://www.googleapis.com/compute/v1/projects/${data.google_project.project.name}/global/interconnectLocations/z2z-us-east4-zone1-pniada-a" # Special location only available for Google testing. + requested_link_count = 1 + admin_enabled = true + description = "example description" + macsec_enabled = false + noc_contact_email = "user@example.com" + labels = { + mykey = "myvalue" + } + aai_enabled = true + application_aware_interconnect { + profile_description = "application awareness config with BandwidthPercentage policy." + bandwidth_percentage_policy { + bandwidth_percentage { + traffic_class = "TC1" + percentage = 20 + } + bandwidth_percentage { + traffic_class = "TC2" + percentage = 20 + } + bandwidth_percentage { + traffic_class = "TC3" + percentage = 20 + } + bandwidth_percentage { + traffic_class = "TC4" + percentage = 20 + } + bandwidth_percentage { + traffic_class = "TC5" + percentage = 10 + } + bandwidth_percentage { + traffic_class = "TC6" + percentage = 10 + } + } + shape_average_percentage { + traffic_class = "TC1" + percentage = 30 + } + shape_average_percentage { + traffic_class = "TC2" + percentage = 25 + } + shape_average_percentage { + traffic_class = "TC3" + percentage = 25 + } + } +} +`, context) +} +{{- end }} \ No newline at end of file From 20efa818c0a79bf7054877fe2a0b4da47a5aec62 Mon Sep 17 00:00:00 2001 From: gurusai-voleti Date: Thu, 10 Jul 2025 17:45:08 +0000 Subject: [PATCH 518/884] feat: (storage) add flag to force empty content type (#14320) --- .../storage/resource_storage_bucket_object.go | 25 ++++++++++--- .../resource_storage_bucket_object_test.go | 37 +++++++++++++++++++ .../r/storage_bucket_object.html.markdown | 2 + 3 files changed, 58 insertions(+), 6 deletions(-) diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object.go b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object.go index 53de5580f67c..648485f3fab6 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object.go +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object.go @@ -83,11 +83,20 @@ func ResourceStorageBucketObject() *schema.Resource { }, "content_type": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - Computed: true, - Description: `Content-Type of the object data. Defaults to "application/octet-stream" or "text/plain; charset=utf-8".`, + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Computed: true, + ConflictsWith: []string{"force_empty_content_type"}, + Description: `Content-Type of the object data. Defaults to "application/octet-stream" or "text/plain; charset=utf-8".`, + }, + + "force_empty_content_type": { + Type: schema.TypeBool, + Optional: true, + ForceNew: true, + ConflictsWith: []string{"content_type"}, + Description: `Flag to set empty Content-Type.`, }, "content": { @@ -379,7 +388,11 @@ func resourceStorageBucketObjectCreate(d *schema.ResourceData, meta interface{}) insertCall := objectsService.Insert(bucket, object) insertCall.Name(name) - insertCall.Media(media) + if v, ok := d.GetOk("force_empty_content_type"); ok && v.(bool) { + insertCall.Media(media, googleapi.ContentType("")) + } else { + insertCall.Media(media) + } // This is done late as we need to add headers to enable customer encryption if v, ok := d.GetOk("customer_encryption"); ok { diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object_test.go b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object_test.go index dadf2976f75d..8fb5da2a36ed 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object_test.go +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object_test.go @@ -129,6 +129,26 @@ func TestAccStorageObject_content(t *testing.T) { "google_storage_bucket_object.object", "storage_class", "STANDARD"), ), }, + { + Config: testGoogleStorageBucketsObjectEmptyContentType(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleStorageObject(t, bucketName, objectName, dataMd5), + resource.TestCheckResourceAttr( + "google_storage_bucket_object.object", "content_type", ""), + resource.TestCheckResourceAttr( + "google_storage_bucket_object.object", "storage_class", "STANDARD"), + ), + }, + { + Config: testGoogleStorageBucketsObjectContent(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleStorageObject(t, bucketName, objectName, dataMd5), + resource.TestCheckResourceAttr( + "google_storage_bucket_object.object", "content_type", "text/plain; charset=utf-8"), + resource.TestCheckResourceAttr( + "google_storage_bucket_object.object", "storage_class", "STANDARD"), + ), + }, }, }) } @@ -708,6 +728,23 @@ resource "google_storage_bucket_object" "object" { `, bucketName, objectName, content) } +func testGoogleStorageBucketsObjectEmptyContentType(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" + force_destroy = true +} + +resource "google_storage_bucket_object" "object" { + name = "%s" + bucket = google_storage_bucket.bucket.name + content = "%s" + force_empty_content_type = true +} +`, bucketName, objectName, content) +} + func testGoogleStorageBucketsFolder(bucketName, folderName string) string { return fmt.Sprintf(` resource "google_storage_bucket" "bucket" { diff --git a/mmv1/third_party/terraform/website/docs/r/storage_bucket_object.html.markdown b/mmv1/third_party/terraform/website/docs/r/storage_bucket_object.html.markdown index 3416c6551ec4..2fe778b50363 100644 --- a/mmv1/third_party/terraform/website/docs/r/storage_bucket_object.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/storage_bucket_object.html.markdown @@ -89,6 +89,8 @@ One of the following is required: * `source_md5hash` - (Optional) User-provided md5hash to trigger replacement of object in storage bucket, Must be Base 64 MD5 hash of the object data. The usual way to set this is filemd5("file.zip"), where "file.zip" is the local filename +* `force_empty_content_type` - (Optional) When set to true, it ensure the object's Content-Type is empty. + --- The `customer_encryption` block supports: From b64304107afb35c90338aaae0451cdbf1dc33946 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Thu, 10 Jul 2025 11:19:57 -0700 Subject: [PATCH 519/884] tgc-revival: skip TestAccComputeBackendBucket_backendBucketGlobalIlbExample (#14496) --- mmv1/api/resource/examples.go | 3 +++ mmv1/products/compute/BackendBucket.yaml | 4 ++++ mmv1/templates/tgc_next/test/test_file.go.tmpl | 3 +++ 3 files changed, 10 insertions(+) diff --git a/mmv1/api/resource/examples.go b/mmv1/api/resource/examples.go index 344985a146fc..2040a19e535f 100644 --- a/mmv1/api/resource/examples.go +++ b/mmv1/api/resource/examples.go @@ -189,6 +189,9 @@ type Examples struct { // Example: ['RESOURCE.cdnPolicy.signedUrlCacheMaxAgeSec']. // "RESOURCE" means that the property is for resource data in CAI asset. TGCTestIgnoreInAsset []string `yaml:"tgc_test_ignore_in_asset,omitempty"` + // The reason to skip a test. For example, a link to a ticket explaining the issue that needs to be resolved before + // unskipping the test. If this is not empty, the test will be skipped. + TGCSkipTest string `yaml:"tgc_skip_test,omitempty"` } // Set default value for fields diff --git a/mmv1/products/compute/BackendBucket.yaml b/mmv1/products/compute/BackendBucket.yaml index a0d701088e96..4f40bb4dff4e 100644 --- a/mmv1/products/compute/BackendBucket.yaml +++ b/mmv1/products/compute/BackendBucket.yaml @@ -115,6 +115,9 @@ examples: backend_bucket_name: 'global-ilb-backend-bucket' bucket_name: 'global-ilb-bucket' exclude_docs: true + tgc_skip_test: 'Skip the test temporarily, as it takes time to fix it.' + tgc_test_ignore_extra: + - 'project' parameters: properties: - name: 'bucketName' @@ -289,6 +292,7 @@ properties: validation: regex: '^(?:[a-z](?:[-a-z0-9]{0,61}[a-z0-9])?)$' - name: 'loadBalancingScheme' + is_missing_in_cai: true type: Enum description: | The value can only be INTERNAL_MANAGED for cross-region internal layer 7 load balancer. diff --git a/mmv1/templates/tgc_next/test/test_file.go.tmpl b/mmv1/templates/tgc_next/test/test_file.go.tmpl index f6b86c09be08..43d14d982ea1 100644 --- a/mmv1/templates/tgc_next/test/test_file.go.tmpl +++ b/mmv1/templates/tgc_next/test/test_file.go.tmpl @@ -22,6 +22,9 @@ import ( {{ range $e := $.TestExamples }} func TestAcc{{ $e.TestSlug $.ProductMetadata.Name $.Name }}(t *testing.T) { + {{- if $e.TGCSkipTest }} + t.Skip("{{$e.TGCSkipTest}}") + {{- end }} t.Parallel() test.BidirectionalConversion( From 19ccb8f3c41fa1d6b081d367d10a8c38e952c343 Mon Sep 17 00:00:00 2001 From: jingqizz Date: Thu, 10 Jul 2025 20:22:52 +0000 Subject: [PATCH 520/884] Firestore Tags R2401 terraform support (#14489) Co-authored-by: Shuya Ma <87669292+shuyama1@users.noreply.github.com> --- mmv1/products/firestore/Database.yaml | 27 ++++++++++ .../firestore_database_with_tags.tf.tmpl | 11 ++++ .../resource_firestore_database_test.go | 54 +++++++++++++++++++ 3 files changed, 92 insertions(+) create mode 100644 mmv1/templates/terraform/examples/firestore_database_with_tags.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/firestore/resource_firestore_database_test.go diff --git a/mmv1/products/firestore/Database.yaml b/mmv1/products/firestore/Database.yaml index 64ef2120724e..d20ac330081e 100644 --- a/mmv1/products/firestore/Database.yaml +++ b/mmv1/products/firestore/Database.yaml @@ -72,6 +72,22 @@ examples: - 'project' - 'etag' - 'deletion_policy' + - name: 'firestore_database_with_tags' + primary_resource_id: 'database' + vars: + database_id: 'database-with-tags-id' + delete_protection_state: 'DELETE_PROTECTION_ENABLED' + tag_key_id: 'keyname' + tag_value_id: 'valuename' + test_env_vars: + project_id: 'PROJECT_NAME' + test_vars_overrides: + 'delete_protection_state': '"DELETE_PROTECTION_DISABLED"' + ignore_read_extra: + - 'project' + - 'etag' + - 'deletion_policy' + exclude_test: true - name: 'firestore_cmek_database' primary_resource_id: 'database' vars: @@ -315,3 +331,14 @@ properties: output: true item_type: type: String + - name: 'tags' + type: KeyValuePairs + description: | + Input only. A map of resource manager tags. Resource manager tag keys + and values have the same definition as resource manager tags. + Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/456. + The field is ignored when empty. The field is immutable and causes + resource replacement when mutated. To apply tags to an existing resource, see + the `google_tags_tag_value` resource. + immutable: true + ignore_read: true diff --git a/mmv1/templates/terraform/examples/firestore_database_with_tags.tf.tmpl b/mmv1/templates/terraform/examples/firestore_database_with_tags.tf.tmpl new file mode 100644 index 000000000000..11b65d9acf7e --- /dev/null +++ b/mmv1/templates/terraform/examples/firestore_database_with_tags.tf.tmpl @@ -0,0 +1,11 @@ +resource "google_firestore_database" "{{$.PrimaryResourceId}}" { + project = "{{index $.TestEnvVars "project_id"}}" + name = "{{index $.Vars "database_id"}}" + location_id = "nam5" + type = "FIRESTORE_NATIVE" + delete_protection_state = "{{index $.Vars "delete_protection_state"}}" + deletion_policy = "DELETE" + tags = { + "{{index $.Vars "tag_key_id"}}" = "{{index $.Vars "tag_value_id"}}" + } +} diff --git a/mmv1/third_party/terraform/services/firestore/resource_firestore_database_test.go b/mmv1/third_party/terraform/services/firestore/resource_firestore_database_test.go new file mode 100644 index 000000000000..1a17e8ba1906 --- /dev/null +++ b/mmv1/third_party/terraform/services/firestore/resource_firestore_database_test.go @@ -0,0 +1,54 @@ +package firestore_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccFirestoreDatabase_tags(t *testing.T) { + t.Parallel() + + // Bootstrap shared tag key and value + tagKey := acctest.BootstrapSharedTestProjectTagKey(t, "firestore-databases-tagkey", map[string]interface{}{}) + context := map[string]interface{}{ + "pid": envvar.GetTestProjectFromEnv(), + "tagKey": tagKey, + "tagValue": acctest.BootstrapSharedTestProjectTagValue(t, "firestore-databases-tagvalue", tagKey), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckFirestoreDatabaseDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccFirestoreDatabaseTags(context), + }, + { + ResourceName: "google_firestore_database.database", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"project", "etag", "deletion_policy", "tags"}, + }, + }, + }) +} + +func testAccFirestoreDatabaseTags(context map[string]interface{}) string { + return acctest.Nprintf(` + resource "google_firestore_database" "database" { + name = "tf-test-database-%{random_suffix}" + location_id = "nam5" + type = "FIRESTORE_NATIVE" + delete_protection_state = "DELETE_PROTECTION_DISABLED" + deletion_policy = "DELETE" + tags = { + "%{pid}/%{tagKey}" = "%{tagValue}" + } + } + `, context) +} From 49832642228f99ea4d4139a0aac679887eb7f855 Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Thu, 10 Jul 2025 22:46:22 +0200 Subject: [PATCH 521/884] compute: data source for `google_compute_network_attachment` (#14396) --- .../provider/provider_mmv1_resources.go.tmpl | 1 + ...ource_google_compute_network_attachment.go | 70 ++++++ ..._google_compute_network_attachment_test.go | 202 ++++++++++++++++++ .../compute_network_attachment.html.markdown | 38 ++++ 4 files changed, 311 insertions(+) create mode 100644 mmv1/third_party/terraform/services/compute/data_source_google_compute_network_attachment.go create mode 100644 mmv1/third_party/terraform/services/compute/data_source_google_compute_network_attachment_test.go create mode 100644 mmv1/third_party/terraform/website/docs/d/compute_network_attachment.html.markdown diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index a288f6ff57aa..a63f4b38e2c6 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -97,6 +97,7 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_compute_machine_types": compute.DataSourceGoogleComputeMachineTypes(), "google_compute_network": compute.DataSourceGoogleComputeNetwork(), "google_compute_networks": compute.DataSourceGoogleComputeNetworks(), + "google_compute_network_attachment": compute.DataSourceGoogleComputeNetworkAttachment(), "google_compute_network_endpoint_group": compute.DataSourceGoogleComputeNetworkEndpointGroup(), "google_compute_network_peering": compute.DataSourceComputeNetworkPeering(), "google_compute_node_types": compute.DataSourceGoogleComputeNodeTypes(), diff --git a/mmv1/third_party/terraform/services/compute/data_source_google_compute_network_attachment.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_network_attachment.go new file mode 100644 index 000000000000..94043e6780e8 --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_network_attachment.go @@ -0,0 +1,70 @@ +package compute + +import ( + "fmt" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func DataSourceGoogleComputeNetworkAttachment() *schema.Resource { + dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceComputeNetworkAttachment().Schema) + + tpgresource.AddRequiredFieldsToSchema(dsSchema, "name", "region") + tpgresource.AddOptionalFieldsToSchema(dsSchema, "project") + + return &schema.Resource{ + Read: dataSourceComputeNetworkAttachmentRead, + Schema: dsSchema, + } +} + +func dataSourceComputeNetworkAttachmentRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + project, err := tpgresource.GetProject(d, config) + if err != nil { + return fmt.Errorf("Error fetching project: %s", err) + } + + name := d.Get("name").(string) + region := d.Get("region").(string) + + id := fmt.Sprintf("projects/%s/regions/%s/networkAttachments/%s", project, region, name) + d.SetId(id) + + err = resourceComputeNetworkAttachmentRead(d, meta) + if err != nil { + return fmt.Errorf("Error reading Network Attachment %q: %s", id, err) + } + + // normalize fields to ensure they are in the correct format + // the API returns a full URL here for fields such as `network` and `region` and not just the resource name + if v, ok := d.Get("network").(string); ok && v != "" { + d.Set("network", tpgresource.GetResourceNameFromSelfLink(v)) + } + + if v, ok := d.Get("region").(string); ok && v != "" { + d.Set("region", tpgresource.GetResourceNameFromSelfLink(v)) + } + + if v, ok := d.Get("subnetworks").([]interface{}); ok && len(v) > 0 { + var subnetworks []string + for _, s := range v { + subnetworks = append(subnetworks, tpgresource.GetResourceNameFromSelfLink(s.(string))) + } + if err := d.Set("subnetworks", subnetworks); err != nil { + return fmt.Errorf("Error setting subnetworks: %s", err) + } + } + + if err := tpgresource.SetDataSourceLabels(d); err != nil { + return err + } + + if d.Id() == "" { + return fmt.Errorf("%s not found", id) + } + + return nil +} diff --git a/mmv1/third_party/terraform/services/compute/data_source_google_compute_network_attachment_test.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_network_attachment_test.go new file mode 100644 index 000000000000..9b584cd54ed6 --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_network_attachment_test.go @@ -0,0 +1,202 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 +package compute_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccDataSourceComputeNetworkAttachment_basic(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeNetworkAttachment_basic(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "name", fmt.Sprintf("tf-test-basic-network-attachment-%s", context["random_suffix"])), + resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "region", "us-central1"), + resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "description", "my basic network attachment"), + resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "connection_preference", "ACCEPT_AUTOMATIC"), + resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "subnetworks.#", "1"), + ), + }, + }, + }) +} + +func TestAccDataSourceComputeNetworkAttachment_full(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + "org_id": envvar.GetTestOrgFromEnv(t), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceComputeNetworkAttachment_full(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "name", fmt.Sprintf("tf-test-basic-network-attachment-%s", context["random_suffix"])), + resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "region", "us-central1"), + resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "description", "basic network attachment description"), + resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "connection_preference", "ACCEPT_MANUAL"), + resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "subnetworks.#", "1"), + resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "subnetworks.0", fmt.Sprintf("tf-test-basic-subnetwork1-%s", context["random_suffix"])), + resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "producer_accept_lists.#", "2"), + resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "producer_accept_lists.0", fmt.Sprintf("tf-test-prj-accept1-%s", context["random_suffix"])), + resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "producer_accept_lists.1", fmt.Sprintf("tf-test-prj-accept2-%s", context["random_suffix"])), + resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "producer_reject_lists.#", "2"), + resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "producer_reject_lists.0", fmt.Sprintf("tf-test-prj-reject1-%s", context["random_suffix"])), + resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "producer_reject_lists.1", fmt.Sprintf("tf-test-prj-reject2-%s", context["random_suffix"])), + ), + }, + }, + }) +} + +func testAccComputeNetworkAttachment_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_network" "default" { + name = "tf-test-basic-network%{random_suffix}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + name = "tf-test-basic-subnetwork%{random_suffix}" + region = "us-central1" + + network = google_compute_network.default.id + ip_cidr_range = "10.0.0.0/16" +} + +resource "google_compute_network_attachment" "default" { + name = "tf-test-basic-network-attachment-%{random_suffix}" + region = "us-central1" + description = "my basic network attachment" + + subnetworks = [google_compute_subnetwork.default.id] + connection_preference = "ACCEPT_AUTOMATIC" +} + +data "google_compute_network_attachment" "default" { + name = google_compute_network_attachment.default.name + region = google_compute_network_attachment.default.region + project = google_compute_network_attachment.default.project + depends_on = [ + google_compute_network.default, + google_compute_subnetwork.default, + google_compute_network_attachment.default, + ] +} +`, context) +} + +func testAccDataSourceComputeNetworkAttachment_full(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_network_attachment" "default" { + name = "tf-test-basic-network-attachment-%{random_suffix}" + region = "us-central1" + description = "basic network attachment description" + connection_preference = "ACCEPT_MANUAL" + + subnetworks = [ + google_compute_subnetwork.net1.self_link + ] + + producer_accept_lists = [ + google_project.accepted_producer_project1.project_id, + google_project.accepted_producer_project2.project_id + ] + + producer_reject_lists = [ + google_project.rejected_producer_project1.project_id, + google_project.rejected_producer_project2.project_id + ] +} + +resource "google_compute_network" "default" { + name = "tf-test-basic-network-%{random_suffix}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "net1" { + name = "tf-test-basic-subnetwork1-%{random_suffix}" + region = "us-central1" + + network = google_compute_network.default.id + ip_cidr_range = "10.0.0.0/16" +} + +resource "google_compute_subnetwork" "net2" { + name = "tf-test-basic-subnetwork2-%{random_suffix}" + region = "us-central1" + + network = google_compute_network.default.id + ip_cidr_range = "10.1.0.0/16" +} + +resource "google_project" "rejected_producer_project1" { + project_id = "tf-test-prj-reject1-%{random_suffix}" + name = "tf-test-prj-reject1-%{random_suffix}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" + deletion_policy = "DELETE" +} + +resource "google_project" "rejected_producer_project2" { + project_id = "tf-test-prj-reject2-%{random_suffix}" + name = "tf-test-prj-reject2-%{random_suffix}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" + deletion_policy = "DELETE" +} + +resource "google_project" "accepted_producer_project1" { + project_id = "tf-test-prj-accept1-%{random_suffix}" + name = "tf-test-prj-accept1-%{random_suffix}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" + deletion_policy = "DELETE" +} + +resource "google_project" "accepted_producer_project2" { + project_id = "tf-test-prj-accept2-%{random_suffix}" + name = "tf-test-prj-accept2-%{random_suffix}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" + deletion_policy = "DELETE" +} + +data "google_compute_network_attachment" "default" { + name = google_compute_network_attachment.default.name + region = google_compute_network_attachment.default.region + project = google_compute_network_attachment.default.project + depends_on = [ + google_compute_network_attachment.default, + google_compute_network.default, + google_compute_subnetwork.net1, + google_compute_subnetwork.net2, + google_project.accepted_producer_project1, + google_project.accepted_producer_project2, + google_project.rejected_producer_project1, + google_project.rejected_producer_project2, + ] +} +`, context) +} diff --git a/mmv1/third_party/terraform/website/docs/d/compute_network_attachment.html.markdown b/mmv1/third_party/terraform/website/docs/d/compute_network_attachment.html.markdown new file mode 100644 index 000000000000..61dd7aafed0a --- /dev/null +++ b/mmv1/third_party/terraform/website/docs/d/compute_network_attachment.html.markdown @@ -0,0 +1,38 @@ +--- +subcategory: "Compute Engine" +description: |- + A data source to retrieve a network attachment +--- + +# `google_compute_network_attachment` + +Get a specific network attachment within a region. For more information see +the [official documentation](https://cloud.google.com/vpc/docs/about-network-attachments) +and [API](https://cloud.google.com/compute/docs/reference/rest/v1/networkAttachments/get). + +## Example Usage + +```hcl +data "google_compute_network_attachment" "default" { + project = "my-project" + name = "my-network-attachment" + region = "europe-west1" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `name` - (Required) The name of the network attachment to retrieve. + The name must be unique within the region. + +* `region` - (Required) The region in which the network attachment resides. + For example, `europe-west1`. + +* `project` - (Optional) The ID of the project in which the resource belongs. + If it is not provided, the provider project is used. + +## Attributes Reference + +See [google_compute_network_attachment](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/bigquery_table#attributes-reference) resource for details of the available attributes. \ No newline at end of file From b55239a0de1eaa3984929aa464ffa5a10a18cef6 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Thu, 10 Jul 2025 13:47:03 -0700 Subject: [PATCH 522/884] Mark accessLoggingConfig immutable in apigee instance (#14494) --- mmv1/products/apigee/Instance.yaml | 3 +++ .../services/apigee/resource_apigee_instance_update_test.go | 4 ++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/mmv1/products/apigee/Instance.yaml b/mmv1/products/apigee/Instance.yaml index 05939782e1dc..b51712e9f528 100644 --- a/mmv1/products/apigee/Instance.yaml +++ b/mmv1/products/apigee/Instance.yaml @@ -206,6 +206,7 @@ properties: output: true - name: 'accessLoggingConfig' type: NestedObject + immutable: true description: | Access logging configuration enables the access logging feature at the instance. Apigee customers can enable access logging to ship the access logs to their own project's cloud logging. @@ -213,10 +214,12 @@ properties: - name: 'enabled' type: Boolean required: true + immutable: true description: | Boolean flag that specifies whether the customer access log feature is enabled. - name: 'filter' type: String + immutable: true description: | Ship the access log entries that match the statusCode defined in the filter. The statusCode is the only expected/supported filter field. (Ex: statusCode) diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_instance_update_test.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_instance_update_test.go index c2c5097689d2..5027d34de265 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_instance_update_test.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_instance_update_test.go @@ -211,8 +211,8 @@ resource "google_apigee_instance" "apigee_instance" { ] access_logging_config { - enabled = true - filter = "status_code >= 200 && status_code < 300" + enabled = false + filter = "status_code >= 0 && status_code < 600" } } `, context) From 3825ace7647d3ea0f5707b1d4fb75783b075e94b Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Thu, 10 Jul 2025 14:17:46 -0700 Subject: [PATCH 523/884] tgc-revival: support google_compute_subnetwork (#14491) --- mmv1/api/type.go | 14 +++++++++++ mmv1/products/compute/BackendService.yaml | 1 - mmv1/products/compute/Subnetwork.yaml | 6 +++++ .../custom_expand/original_value.go.tmpl | 3 --- .../decoders/compute_subnetwork.go.tmpl | 23 +++++++++++++++++++ .../tgc_next/services/resource.go.tmpl | 1 + .../expand_property_method_tgc.go.tmpl | 6 +++++ .../tgc_next/test/assert_test_files.go | 6 ++++- 8 files changed, 55 insertions(+), 5 deletions(-) delete mode 100644 mmv1/templates/tgc_next/custom_expand/original_value.go.tmpl create mode 100644 mmv1/templates/tgc_next/decoders/compute_subnetwork.go.tmpl diff --git a/mmv1/api/type.go b/mmv1/api/type.go index 07894d9a1c22..6bc841686112 100644 --- a/mmv1/api/type.go +++ b/mmv1/api/type.go @@ -856,6 +856,20 @@ func (t Type) ResourceRef() *Resource { return resources[0] } +// Checks if the referenced resource is in the same product or not +func (t Type) IsResourceRefFound() bool { + if !t.IsA("ResourceRef") { + return false + } + + product := t.ResourceMetadata.ProductMetadata + resources := google.Select(product.Objects, func(obj *Resource) bool { + return obj.Name == t.Resource + }) + + return len(resources) != 0 +} + // TODO rewrite: validation // func (t *Type) check_resource_ref_property_exists // return unless defined?(resource_ref.all_user_properties) diff --git a/mmv1/products/compute/BackendService.yaml b/mmv1/products/compute/BackendService.yaml index 9ffafa2ee70c..3bb0fb56697f 100644 --- a/mmv1/products/compute/BackendService.yaml +++ b/mmv1/products/compute/BackendService.yaml @@ -1422,7 +1422,6 @@ properties: diff_suppress_func: 'tpgresource.ProjectNumberDiffSuppress' resource: 'ClientTlsPolicy' imports: 'name' - custom_tgc_expand: 'templates/tgc_next/custom_expand/original_value.go.tmpl' - name: 'subjectAltNames' type: Array description: | diff --git a/mmv1/products/compute/Subnetwork.yaml b/mmv1/products/compute/Subnetwork.yaml index c22c26b95c6b..eeb93d7f7a55 100644 --- a/mmv1/products/compute/Subnetwork.yaml +++ b/mmv1/products/compute/Subnetwork.yaml @@ -45,6 +45,7 @@ references: docs: base_url: 'projects/{{project}}/regions/{{region}}/subnetworks' has_self_link: true +include_in_tgc_next_DO_NOT_USE: true immutable: true timeouts: insert_minutes: 20 @@ -67,6 +68,7 @@ custom_code: extra_schema_entry: 'templates/terraform/extra_schema_entry/subnetwork.tmpl' constants: 'templates/terraform/constants/subnetwork.tmpl' post_update: 'templates/terraform/post_update/compute_subnetwork.go.tmpl' + tgc_decoder: 'templates/tgc_next/decoders/compute_subnetwork.go.tmpl' custom_diff: - 'customdiff.ForceNewIfChange("ip_cidr_range", IsShrinkageIpCidr)' - 'sendSecondaryIpRangeIfEmptyDiff' @@ -325,6 +327,7 @@ properties: default_from_api: true update_url: 'projects/{{project}}/regions/{{region}}/subnetworks/{{name}}/setPrivateIpGoogleAccess' update_verb: 'POST' + include_empty_value_in_cai: true - name: 'privateIpv6GoogleAccess' type: String description: The private IPv6 google access type for the VMs in this subnet. @@ -472,6 +475,7 @@ properties: - name: 'ipCollection' type: String ignore_read: true + is_missing_in_cai: true description: | Resource reference of a PublicDelegatedPrefix. The PDP must be a sub-PDP in EXTERNAL_IPV6_SUBNETWORK_CREATION mode. @@ -507,6 +511,7 @@ properties: update_url: 'projects/{{project}}/regions/{{region}}/subnetworks/{{name}}' update_verb: 'PATCH' fingerprint_name: 'fingerprint' + is_missing_in_cai: true - name: 'enableFlowLogs' type: Boolean description: | @@ -515,6 +520,7 @@ properties: org policy, if there is no org policy specified, then it will default to disabled. This field isn't supported if the subnet purpose field is set to REGIONAL_MANAGED_PROXY. default_from_api: true + include_empty_value_in_cai: true deprecation_message: 'This field is being removed in favor of log_config. If log_config is present, flow logs are enabled.' - name: 'state' type: Enum diff --git a/mmv1/templates/tgc_next/custom_expand/original_value.go.tmpl b/mmv1/templates/tgc_next/custom_expand/original_value.go.tmpl deleted file mode 100644 index 8566619d94b7..000000000000 --- a/mmv1/templates/tgc_next/custom_expand/original_value.go.tmpl +++ /dev/null @@ -1,3 +0,0 @@ -func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} diff --git a/mmv1/templates/tgc_next/decoders/compute_subnetwork.go.tmpl b/mmv1/templates/tgc_next/decoders/compute_subnetwork.go.tmpl new file mode 100644 index 000000000000..906a333a5ab2 --- /dev/null +++ b/mmv1/templates/tgc_next/decoders/compute_subnetwork.go.tmpl @@ -0,0 +1,23 @@ +{{/* + The license inside this block applies to this file + Copyright 2025 Google Inc. + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ -}} +// In the GET API response, the field stackType is not present. +// In CAI asset, "stackType" has value "UNSPECIFIED_STACK_TYPE" +// So set the value to empty string in this case. +if raw, ok := res["stackType"]; ok { + v := raw.(string) + if v == "UNSPECIFIED_STACK_TYPE" { + res["stackType"] = "" + } +} + +return res, nil \ No newline at end of file diff --git a/mmv1/templates/tgc_next/services/resource.go.tmpl b/mmv1/templates/tgc_next/services/resource.go.tmpl index 91ed7204d627..343957bc0b4d 100644 --- a/mmv1/templates/tgc_next/services/resource.go.tmpl +++ b/mmv1/templates/tgc_next/services/resource.go.tmpl @@ -20,6 +20,7 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" + "github.com/apparentlymart/go-cidr/cidr" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tgcresource" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" diff --git a/mmv1/templates/tgc_next/tfplan2cai/expand_property_method_tgc.go.tmpl b/mmv1/templates/tgc_next/tfplan2cai/expand_property_method_tgc.go.tmpl index a2b200b52b17..072c45383ee0 100644 --- a/mmv1/templates/tgc_next/tfplan2cai/expand_property_method_tgc.go.tmpl +++ b/mmv1/templates/tgc_next/tfplan2cai/expand_property_method_tgc.go.tmpl @@ -18,7 +18,13 @@ {{ else if and ($.IsA "Array") ($.ItemType.IsA "ResourceRef")}} {{- template "expandArrayResourcerefWithValidation" $ -}} {{ else if ($.IsA "ResourceRef") }} + {{- if $.IsResourceRefFound }} {{- template "expandResourcerefWithValidation" $ -}} + {{- else }} +func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + {{- end }} {{ else }} {{- template "expandPropertyMethod" $ -}} {{ end }} diff --git a/mmv1/third_party/tgc_next/test/assert_test_files.go b/mmv1/third_party/tgc_next/test/assert_test_files.go index a84ae68e0a2b..3ae15f85a407 100644 --- a/mmv1/third_party/tgc_next/test/assert_test_files.go +++ b/mmv1/third_party/tgc_next/test/assert_test_files.go @@ -76,6 +76,10 @@ func testSingleResource(t *testing.T, testName string, testData ResourceTestData log.Printf("%s is not supported in tfplan2cai conversion.", resourceType) } + if testData.Cai == nil { + return fmt.Errorf("cai asset is unavailable for resource %s", testData.ResourceAddress) + } + assets := make([]caiasset.Asset, 0) for assetName, assetData := range testData.Cai { assets = append(assets, assetData.CaiAsset) @@ -147,7 +151,7 @@ func testSingleResource(t *testing.T, testName string, testData ResourceTestData parsedExportConfig := exportResources[0].Attributes missingKeys := compareHCLFields(testData.ParsedRawConfig, parsedExportConfig, ignoredFieldSet) if len(missingKeys) > 0 { - return fmt.Errorf("missing fields in address %s after cai2hcl conversion:\n%s", testData.ResourceAddress, missingKeys) + return fmt.Errorf("missing fields in resource %s after cai2hcl conversion:\n%s", testData.ResourceAddress, missingKeys) } log.Printf("Step 1 passes for resource %s. All of the fields in raw config are in export config", testData.ResourceAddress) From db60c259a1c68d7de8638c0f0cc5ed5c57b7a89b Mon Sep 17 00:00:00 2001 From: Thomas Rodgers Date: Thu, 10 Jul 2025 14:55:51 -0700 Subject: [PATCH 524/884] tgc-revival: sort all list elements before comparing hcl (#14500) --- mmv1/third_party/tgc_next/test/hcl.go | 64 ++++++-- mmv1/third_party/tgc_next/test/hcl_test.go | 172 +++++++++++++++++++++ 2 files changed, 226 insertions(+), 10 deletions(-) create mode 100644 mmv1/third_party/tgc_next/test/hcl_test.go diff --git a/mmv1/third_party/tgc_next/test/hcl.go b/mmv1/third_party/tgc_next/test/hcl.go index 5cfbdef07499..6702e53cfec7 100644 --- a/mmv1/third_party/tgc_next/test/hcl.go +++ b/mmv1/third_party/tgc_next/test/hcl.go @@ -3,6 +3,8 @@ package test import ( "fmt" "log" + "sort" + "strings" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/hclparse" @@ -94,9 +96,9 @@ func insert(data any, key string, parent map[string]any) { } } -func flatten(data interface{}, prefix string, result map[string]struct{}) { +func flatten(data any, prefix string, result map[string]struct{}) { switch v := data.(type) { - case map[string]interface{}: + case map[string]any: for key, value := range v { newPrefix := key if prefix != "" { @@ -104,17 +106,59 @@ func flatten(data interface{}, prefix string, result map[string]struct{}) { } flatten(value, newPrefix, result) } - case []interface{}: - if len(v) == 0 && prefix != "" { - result[prefix] = struct{}{} - } - for i, value := range v { - newPrefix := fmt.Sprintf("%s.%d", prefix, i) - flatten(value, newPrefix, result) - } + case []any: + flattenSlice(prefix, v, result) default: if prefix != "" { result[prefix] = struct{}{} } } } + +func flattenSlice(prefix string, v []any, result map[string]struct{}) { + if len(v) == 0 && prefix != "" { + result[prefix] = struct{}{} + return + } + + type sortableElement struct { + flatKeys string + flattened map[string]struct{} + } + + sortable := make([]sortableElement, len(v)) + for i, value := range v { + flattened := make(map[string]struct{}) + flatten(value, "", flattened) + keys := make([]string, 0, len(flattened)) + for k := range flattened { + keys = append(keys, k) + } + sort.Strings(keys) + sortable[i] = sortableElement{ + flatKeys: strings.Join(keys, ";"), + flattened: flattened, + } + } + + sort.Slice(sortable, func(i, j int) bool { + return sortable[i].flatKeys < sortable[j].flatKeys + }) + + for i, element := range sortable { + newPrefix := fmt.Sprintf("%s.%d", prefix, i) + if len(element.flattened) == 0 { + if newPrefix != "" { + result[newPrefix] = struct{}{} + } + } else { + for k := range element.flattened { + newKey := newPrefix + if k != "" { + newKey = newPrefix + "." + k + } + result[newKey] = struct{}{} + } + } + } +} diff --git a/mmv1/third_party/tgc_next/test/hcl_test.go b/mmv1/third_party/tgc_next/test/hcl_test.go new file mode 100644 index 000000000000..1356e617f012 --- /dev/null +++ b/mmv1/third_party/tgc_next/test/hcl_test.go @@ -0,0 +1,172 @@ +package test + +import ( + "testing" + + "github.com/google/go-cmp/cmp" +) + +var ( + basicHCL = ` +resource "google_project_service" "project" { + service = "iam.googleapis.com" +} +` + nestedBlocksHCL = ` +resource "google_storage_bucket" "bucket" { + name = "my-bucket" + location = "US" + force_destroy = true + + lifecycle_rule { + action { + type = "Delete" + } + condition { + age = 30 + } + } +} +` + multipleResourcesHCL = ` +resource "google_project_service" "project" { + service = "iam.googleapis.com" +} + +resource "google_storage_bucket" "bucket" { + name = "my-bucket" +} +` + listOfNestedObjectsHCL = ` +resource "google_compute_firewall" "default" { + name = "test-firewall" + network = google_compute_network.default.name + + allow { + protocol = "icmp" + } + + allow { + protocol = "tcp" + ports = ["80", "8080", "1000-2000"] + } + + source_tags = ["web"] +} +` + listOfMultiLevelNestedObjectsHCL = ` +resource "google_compute_firewall" "default" { + name = "test-firewall" + network = google_compute_network.default.name + + allow { + protocol = "icmp" + } + + allow { + protocol = "tcp" + ports = ["80", "8080", "1000-2000"] + } + + source_tags = ["web"] +} +` +) + +func TestParseHCLBytes(t *testing.T) { + t.Parallel() + cases := []struct { + name string + hcl string + exp map[string]map[string]struct{} + expectErr bool + }{ + { + name: "basic", + hcl: basicHCL, + exp: map[string]map[string]struct{}{ + "google_project_service.project": { + "service": {}, + }, + }, + }, + { + name: "nested blocks", + hcl: nestedBlocksHCL, + exp: map[string]map[string]struct{}{ + "google_storage_bucket.bucket": { + "name": {}, + "location": {}, + "force_destroy": {}, + "lifecycle_rule.action.type": {}, + "lifecycle_rule.condition.age": {}, + }, + }, + }, + { + name: "multiple resources", + hcl: multipleResourcesHCL, + exp: map[string]map[string]struct{}{ + "google_project_service.project": { + "service": {}, + }, + "google_storage_bucket.bucket": { + "name": {}, + }, + }, + }, + { + name: "resource with a list of nested objects", + hcl: listOfNestedObjectsHCL, + exp: map[string]map[string]struct{}{ + "google_compute_firewall.default": { + "allow.0.ports": {}, // "ports" appears in first element due to sorting + "allow.0.protocol": {}, + "allow.1.protocol": {}, + "name": {}, + "network": {}, + "source_tags": {}, + }, + }, + }, + { + name: "resource with a list of multi-level nested objects", + hcl: listOfMultiLevelNestedObjectsHCL, + exp: map[string]map[string]struct{}{ + "google_compute_firewall.default": { + "allow.0.ports": {}, // "ports" appears in first element due to sorting + "allow.0.protocol": {}, + "allow.1.protocol": {}, + "name": {}, + "network": {}, + "source_tags": {}, + }, + }, + }, + { + name: "invalid hcl", + hcl: `resource "google_project_service" "project" {`, + expectErr: true, + }, + } + + for _, tc := range cases { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + got, err := parseHCLBytes([]byte(tc.hcl), "test.hcl") + if tc.expectErr { + if err == nil { + t.Fatal("expected error, got nil") + } + return + } + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if diff := cmp.Diff(tc.exp, got); diff != "" { + t.Errorf("unexpected diff (-want +got): %s", diff) + } + }) + } +} From 7ab56cffd1c4afd2926d379db320529d85dbb3cc Mon Sep 17 00:00:00 2001 From: Thomas Rodgers Date: Thu, 10 Jul 2025 15:47:52 -0700 Subject: [PATCH 525/884] tgc-revival: fixed test case for hcl parsing (#14502) --- mmv1/third_party/tgc_next/test/hcl_test.go | 26 ++++++++++++++-------- 1 file changed, 17 insertions(+), 9 deletions(-) diff --git a/mmv1/third_party/tgc_next/test/hcl_test.go b/mmv1/third_party/tgc_next/test/hcl_test.go index 1356e617f012..b6d4b7f2fb18 100644 --- a/mmv1/third_party/tgc_next/test/hcl_test.go +++ b/mmv1/third_party/tgc_next/test/hcl_test.go @@ -60,12 +60,18 @@ resource "google_compute_firewall" "default" { network = google_compute_network.default.name allow { - protocol = "icmp" + protocol = "tcp" + ports = ["80", "8080", "1000-2000"] } allow { - protocol = "tcp" - ports = ["80", "8080", "1000-2000"] + protocol = "icmp" + a_second_level { + b = true + } + a_second_level { + a = false + } } source_tags = ["web"] @@ -134,12 +140,14 @@ func TestParseHCLBytes(t *testing.T) { hcl: listOfMultiLevelNestedObjectsHCL, exp: map[string]map[string]struct{}{ "google_compute_firewall.default": { - "allow.0.ports": {}, // "ports" appears in first element due to sorting - "allow.0.protocol": {}, - "allow.1.protocol": {}, - "name": {}, - "network": {}, - "source_tags": {}, + "allow.0.a_second_level.0.a": {}, + "allow.0.a_second_level.1.b": {}, + "allow.0.protocol": {}, + "allow.1.ports": {}, + "allow.1.protocol": {}, + "name": {}, + "network": {}, + "source_tags": {}, }, }, }, From c71eb1ad8fc358cd8e82e82d4f30d72a501db495 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Wiatrowski?= Date: Fri, 11 Jul 2025 18:41:12 +0200 Subject: [PATCH 526/884] Update Gemini GeminiGcpEnablementSetting resource -- add the web_grounding_type field (#14443) Co-authored-by: Cameron Thornton --- mmv1/products/gemini/GeminiGcpEnablementSetting.yaml | 12 +++++++++++- ...emini_gemini_gcp_enablement_setting_basic.tf.tmpl | 2 +- ...mini_gcp_enablement_setting_binding_basic.tf.tmpl | 2 +- ...ini_gemini_gcp_enablement_setting_binding_test.go | 4 ++-- ...urce_gemini_gemini_gcp_enablement_setting_test.go | 4 ++-- 5 files changed, 17 insertions(+), 7 deletions(-) diff --git a/mmv1/products/gemini/GeminiGcpEnablementSetting.yaml b/mmv1/products/gemini/GeminiGcpEnablementSetting.yaml index 49a01ab3e820..0020c68ffd8f 100644 --- a/mmv1/products/gemini/GeminiGcpEnablementSetting.yaml +++ b/mmv1/products/gemini/GeminiGcpEnablementSetting.yaml @@ -70,4 +70,14 @@ properties: description: Whether customer data sharing should be enabled. - name: disableWebGrounding type: Boolean - description: Whether web grounding should be disabled. + description: |- + Whether web grounding should be disabled. + deprecation_message: |- + `disable_web_grounding` is deprecated. Use `web_grounding_type` instead. + - name: webGroundingType + type: String + description: |- + Web grounding type. + Possible values: + GROUNDING_WITH_GOOGLE_SEARCH + WEB_GROUNDING_FOR_ENTERPRISE diff --git a/mmv1/templates/terraform/examples/gemini_gemini_gcp_enablement_setting_basic.tf.tmpl b/mmv1/templates/terraform/examples/gemini_gemini_gcp_enablement_setting_basic.tf.tmpl index a614e3fa2e38..0eb3055eded4 100644 --- a/mmv1/templates/terraform/examples/gemini_gemini_gcp_enablement_setting_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/gemini_gemini_gcp_enablement_setting_basic.tf.tmpl @@ -3,5 +3,5 @@ resource "google_gemini_gemini_gcp_enablement_setting" "{{$.PrimaryResourceId}}" location = "global" labels = {"my_key": "my_value"} enable_customer_data_sharing = true - disable_web_grounding = true + web_grounding_type = "WEB_GROUNDING_FOR_ENTERPRISE" } diff --git a/mmv1/templates/terraform/examples/gemini_gemini_gcp_enablement_setting_binding_basic.tf.tmpl b/mmv1/templates/terraform/examples/gemini_gemini_gcp_enablement_setting_binding_basic.tf.tmpl index ed6fe1601b99..071d7f389155 100644 --- a/mmv1/templates/terraform/examples/gemini_gemini_gcp_enablement_setting_binding_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/gemini_gemini_gcp_enablement_setting_binding_basic.tf.tmpl @@ -3,7 +3,7 @@ resource "google_gemini_gemini_gcp_enablement_setting" "basic" { location = "global" labels = {"my_key": "my_value"} enable_customer_data_sharing = true - disable_web_grounding = true + web_grounding_type = "WEB_GROUNDING_FOR_ENTERPRISE" } resource "google_gemini_gemini_gcp_enablement_setting_binding" "{{$.PrimaryResourceId}}" { diff --git a/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_binding_test.go b/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_binding_test.go index af5ade59619a..a69ef8ff388f 100644 --- a/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_binding_test.go +++ b/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_binding_test.go @@ -59,7 +59,7 @@ resource "google_gemini_gemini_gcp_enablement_setting" "basic" { location = "global" labels = {"my_key": "my_value"} enable_customer_data_sharing = true - disable_web_grounding = true + web_grounding_type = "WEB_GROUNDING_FOR_ENTERPRISE" } resource "google_gemini_gemini_gcp_enablement_setting_binding" "basic_binding" { @@ -81,7 +81,7 @@ resource "google_gemini_gemini_gcp_enablement_setting" "basic" { location = "global" labels = {"my_key" = "my_value"} enable_customer_data_sharing = false - disable_web_grounding = false + web_grounding_type = "GROUNDING_WITH_GOOGLE_SEARCH" } resource "google_gemini_gemini_gcp_enablement_setting_binding" "basic_binding" { diff --git a/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_test.go b/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_test.go index 7461e47c1525..93eb4a8f5afe 100644 --- a/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_test.go +++ b/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_test.go @@ -51,7 +51,7 @@ resource "google_gemini_gemini_gcp_enablement_setting" "example" { location = "global" labels = {"my_key" = "my_value"} enable_customer_data_sharing = true - disable_web_grounding = true + web_grounding_type = "WEB_GROUNDING_FOR_ENTERPRISE" } `, context) } @@ -62,7 +62,7 @@ resource "google_gemini_gemini_gcp_enablement_setting" "example" { location = "global" labels = {"my_key" = "my_value"} enable_customer_data_sharing = false - disable_web_grounding = false + web_grounding_type = "GROUNDING_WITH_GOOGLE_SEARCH" } `, context) } From f6234b822930dfe2fa6153bd321728a10e06080b Mon Sep 17 00:00:00 2001 From: zhihaos Date: Fri, 11 Jul 2025 13:37:52 -0400 Subject: [PATCH 527/884] Adding flexible webhook support; (#14459) --- mmv1/products/dialogflowcx/Webhook.yaml | 303 +++++++++++++++--- ... => dialogflowcx_webhook_flexible.tf.tmpl} | 11 + ...webhook_service_directory_flexible.tf.tmpl | 37 +++ ...webhook_service_directory_standard.tf.tmpl | 45 +++ .../dialogflowcx_webhook_standard.tf.tmpl | 42 +++ 5 files changed, 399 insertions(+), 39 deletions(-) rename mmv1/templates/terraform/examples/{dialogflowcx_webhook_full.tf.tmpl => dialogflowcx_webhook_flexible.tf.tmpl} (60%) create mode 100644 mmv1/templates/terraform/examples/dialogflowcx_webhook_service_directory_flexible.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/dialogflowcx_webhook_service_directory_standard.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/dialogflowcx_webhook_standard.tf.tmpl diff --git a/mmv1/products/dialogflowcx/Webhook.yaml b/mmv1/products/dialogflowcx/Webhook.yaml index 57b270cc6f73..126b21034e9f 100644 --- a/mmv1/products/dialogflowcx/Webhook.yaml +++ b/mmv1/products/dialogflowcx/Webhook.yaml @@ -38,8 +38,20 @@ custom_code: custom_import: 'templates/terraform/custom_import/dialogflowcx_webhook.go.tmpl' exclude_sweeper: true examples: - - name: 'dialogflowcx_webhook_full' - primary_resource_id: 'basic_webhook' + - name: 'dialogflowcx_webhook_standard' + primary_resource_id: 'standard_webhook' + vars: + agent_name: 'dialogflowcx-agent' + - name: 'dialogflowcx_webhook_flexible' + primary_resource_id: 'flexible_webhook' + vars: + agent_name: 'dialogflowcx-agent' + - name: 'dialogflowcx_webhook_service_directory_standard' + primary_resource_id: 'standard_webhook' + vars: + agent_name: 'dialogflowcx-agent' + - name: 'dialogflowcx_webhook_service_directory_flexible' + primary_resource_id: 'flexible_webhook' vars: agent_name: 'dialogflowcx-agent' parameters: @@ -71,27 +83,134 @@ properties: type: Boolean description: | Indicates whether the webhook is disabled. - - name: 'genericWebService' + - name: genericWebService type: NestedObject - description: | - Configuration for a generic web service. + description: Represents configuration for a generic web service. properties: - - name: 'uri' - type: String - description: | - Whether to use speech adaptation for speech recognition. - required: true - - name: 'requestHeaders' - type: KeyValuePairs - description: | - The HTTP request headers to send together with webhook requests. - immutable: true - - name: 'allowedCaCerts' + - name: allowedCaCerts type: Array - description: | - Specifies a list of allowed custom CA certificates (in DER format) for HTTPS verification. + description: |- + Specifies a list of allowed custom CA certificates (in DER format) for + HTTPS verification. This overrides the default SSL trust store. If this + is empty or unspecified, Dialogflow will use Google's default trust store + to verify certificates. + N.B. Make sure the HTTPS server certificates are signed with "subject alt + name". For instance a certificate can be self-signed using the following + command, + openssl x509 -req -days 200 -in example.com.csr \ + -signkey example.com.key \ + -out example.com.crt \ + -extfile <(printf "\nsubjectAltName='DNS:www.example.com'") item_type: type: String + - name: httpMethod + type: Enum + description: |- + HTTP method for the flexible webhook calls. Standard webhook always uses + POST. + enum_values: + - 'POST' + - 'GET' + - 'HEAD' + - 'PUT' + - 'DELETE' + - 'PATCH' + - 'OPTIONS' + - name: oauthConfig + type: NestedObject + description: |- + Represents configuration of OAuth client credential flow for 3rd party + API authentication. + properties: + - name: clientId + type: String + description: The client ID provided by the 3rd party platform. + required: true + - name: clientSecret + type: String + description: |- + The client secret provided by the 3rd party platform. If the + `secret_version_for_client_secret` field is set, this field will be + ignored. + ignore_read: true + - name: scopes + type: Array + description: The OAuth scopes to grant. + item_type: + type: String + - name: secretVersionForClientSecret + type: String + description: |- + The name of the SecretManager secret version resource storing the + client secret. If this field is set, the `client_secret` field will be + ignored. + Format: `projects/{project}/secrets/{secret}/versions/{version}` + - name: tokenEndpoint + type: String + description: |- + The token endpoint provided by the 3rd party platform to exchange an + access token. + required: true + - name: parameterMapping + type: KeyValuePairs + description: |- + Maps the values extracted from specific fields of the flexible webhook + response into session parameters. + - Key: session parameter name + - Value: field path in the webhook response + - name: requestBody + type: String + description: Defines a custom JSON object as request body to send to flexible webhook. + - name: requestHeaders + type: KeyValuePairs + description: The HTTP request headers to send together with webhook requests. + - name: secretVersionForUsernamePassword + type: String + description: |- + The SecretManager secret version resource storing the username:password + pair for HTTP Basic authentication. + Format: `projects/{project}/secrets/{secret}/versions/{version}` + - name: secretVersionsForRequestHeaders + type: Map + description: |- + The HTTP request headers to send together with webhook requests. Header + values are stored in SecretManager secret versions. + + When the same header name is specified in both `request_headers` and + `secret_versions_for_request_headers`, the value in + `secret_versions_for_request_headers` will be used. + key_name: 'key' + value_type: + name: 'secretVersionsForRequestHeader' + type: NestedObject + properties: + - name: secretVersion + type: String + description: | + The SecretManager secret version resource storing the header value. + Format: `projects/{project}/secrets/{secret}/versions/{version}` + required: true + - name: serviceAgentAuth + type: Enum + description: |- + Indicate the auth token type generated from the [Diglogflow service + agent](https://cloud.google.com/iam/docs/service-agents#dialogflow-service-agent). + The generated token is sent in the Authorization header. + enum_values: + - 'NONE' + - 'ID_TOKEN' + - 'ACCESS_TOKEN' + - name: uri + type: String + description: The webhook URI for receiving POST requests. It must use https protocol. + required: true + - name: webhookType + type: Enum + description: |- + Type of the webhook. + enum_values: + - 'STANDARD' + - 'FLEXIBLE' - name: 'serviceDirectory' type: NestedObject description: | @@ -102,42 +221,148 @@ properties: description: | The name of Service Directory service. required: true - - name: 'genericWebService' + - name: genericWebService type: NestedObject - description: | - The name of Service Directory service. - required: true + description: Represents configuration for a generic web service. properties: - - name: 'uri' - type: String - description: | - Whether to use speech adaptation for speech recognition. - required: true - - name: 'requestHeaders' - type: KeyValuePairs - description: | - The HTTP request headers to send together with webhook requests. - immutable: true - - name: 'allowedCaCerts' + - name: allowedCaCerts type: Array - description: | - Specifies a list of allowed custom CA certificates (in DER format) for HTTPS verification. + description: |- + Specifies a list of allowed custom CA certificates (in DER format) for + HTTPS verification. This overrides the default SSL trust store. If this + is empty or unspecified, Dialogflow will use Google's default trust store + to verify certificates. + N.B. Make sure the HTTPS server certificates are signed with "subject alt + name". For instance a certificate can be self-signed using the following + command, + openssl x509 -req -days 200 -in example.com.csr \ + -signkey example.com.key \ + -out example.com.crt \ + -extfile <(printf "\nsubjectAltName='DNS:www.example.com'") item_type: type: String + - name: httpMethod + type: Enum + description: |- + HTTP method for the flexible webhook calls. Standard webhook always uses + POST. + enum_values: + - 'POST' + - 'GET' + - 'HEAD' + - 'PUT' + - 'DELETE' + - 'PATCH' + - 'OPTIONS' + - name: oauthConfig + type: NestedObject + description: |- + Represents configuration of OAuth client credential flow for 3rd party + API authentication. + properties: + - name: clientId + type: String + description: The client ID provided by the 3rd party platform. + required: true + - name: clientSecret + type: String + description: |- + The client secret provided by the 3rd party platform. If the + `secret_version_for_client_secret` field is set, this field will be + ignored. + ignore_read: true + - name: scopes + type: Array + description: The OAuth scopes to grant. + item_type: + type: String + - name: secretVersionForClientSecret + type: String + description: |- + The name of the SecretManager secret version resource storing the + client secret. If this field is set, the `client_secret` field will be + ignored. + Format: `projects/{project}/secrets/{secret}/versions/{version}` + - name: tokenEndpoint + type: String + description: |- + The token endpoint provided by the 3rd party platform to exchange an + access token. + required: true + - name: parameterMapping + type: KeyValuePairs + description: |- + Maps the values extracted from specific fields of the flexible webhook + response into session parameters. + - Key: session parameter name + - Value: field path in the webhook response + - name: requestBody + type: String + description: Defines a custom JSON object as request body to send to flexible webhook. + - name: requestHeaders + type: KeyValuePairs + description: The HTTP request headers to send together with webhook requests. + - name: secretVersionForUsernamePassword + type: String + description: |- + The SecretManager secret version resource storing the username:password + pair for HTTP Basic authentication. + Format: `projects/{project}/secrets/{secret}/versions/{version}` + - name: secretVersionsForRequestHeaders + type: Map + description: |- + The HTTP request headers to send together with webhook requests. Header + values are stored in SecretManager secret versions. + + When the same header name is specified in both `request_headers` and + `secret_versions_for_request_headers`, the value in + `secret_versions_for_request_headers` will be used. + key_name: 'key' + value_type: + name: 'secretVersionsForRequestHeader' + type: NestedObject + properties: + - name: secretVersion + type: String + description: | + The SecretManager secret version resource storing the header value. + Format: `projects/{project}/secrets/{secret}/versions/{version}` + required: true + - name: serviceAgentAuth + type: Enum + description: |- + Indicate the auth token type generated from the [Diglogflow service + agent](https://cloud.google.com/iam/docs/service-agents#dialogflow-service-agent). + The generated token is sent in the Authorization header. + enum_values: + - 'NONE' + - 'ID_TOKEN' + - 'ACCESS_TOKEN' + - name: uri + type: String + description: The webhook URI for receiving POST requests. It must use https protocol. + required: true + - name: webhookType + type: Enum + description: |- + Type of the webhook. + enum_values: + - 'STANDARD' + - 'FLEXIBLE' - name: 'startFlow' type: String description: | - Name of the start flow in this agent. A start flow will be automatically created when the agent is created, and can only be deleted by deleting the agent. Format: projects//locations//agents//flows/. + Deprecated. Name of the start flow in this agent. A start flow will be automatically created when the agent is created, and can only be deleted by deleting the agent. Format: projects//locations//agents//flows/. output: true - name: 'securitySettings' type: String description: | - Name of the SecuritySettings reference for the agent. Format: projects//locations//securitySettings/. + Deprecated. Name of the SecuritySettings reference for the agent. Format: projects//locations//securitySettings/. - name: 'enableStackdriverLogging' type: Boolean description: | - Determines whether this agent should log conversation queries. + Deprecated. Determines whether this agent should log conversation queries. - name: 'enableSpellCorrection' type: Boolean description: | - Indicates if automatic spell correction is enabled in detect intent requests. + Deprecated. Indicates if automatic spell correction is enabled in detect intent requests. diff --git a/mmv1/templates/terraform/examples/dialogflowcx_webhook_full.tf.tmpl b/mmv1/templates/terraform/examples/dialogflowcx_webhook_flexible.tf.tmpl similarity index 60% rename from mmv1/templates/terraform/examples/dialogflowcx_webhook_full.tf.tmpl rename to mmv1/templates/terraform/examples/dialogflowcx_webhook_flexible.tf.tmpl index dc90b4f0fb7c..7913fd7595b5 100644 --- a/mmv1/templates/terraform/examples/dialogflowcx_webhook_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/dialogflowcx_webhook_flexible.tf.tmpl @@ -19,5 +19,16 @@ resource "google_dialogflow_cx_webhook" "{{$.PrimaryResourceId}}" { display_name = "MyFlow" generic_web_service { uri = "https://example.com" + request_headers = { "example-key": "example-value" } + webhook_type = "FLEXIBLE" + oauth_config { + client_id = "example-client-id" + client_secret = "projects/example-proj/secrets/example-secret/versions/example-version" + token_endpoint = "https://example.com" + } + service_agent_auth = "NONE" + http_method = "POST" + request_body = "{\"example-key\": \"example-value\"}" + parameter_mapping = { "example-parameter": "examplePath" } } } diff --git a/mmv1/templates/terraform/examples/dialogflowcx_webhook_service_directory_flexible.tf.tmpl b/mmv1/templates/terraform/examples/dialogflowcx_webhook_service_directory_flexible.tf.tmpl new file mode 100644 index 000000000000..db43d7142f29 --- /dev/null +++ b/mmv1/templates/terraform/examples/dialogflowcx_webhook_service_directory_flexible.tf.tmpl @@ -0,0 +1,37 @@ +resource "google_dialogflow_cx_agent" "agent" { + display_name = "{{index $.Vars "agent_name"}}" + location = "us-central1" + default_language_code = "en" + supported_language_codes = ["it","de","es"] + time_zone = "America/New_York" + description = "Example description." + avatar_uri = "https://cloud.google.com/_static/images/cloud/icons/favicons/onecloud/super_cloud.png" + enable_stackdriver_logging = true + enable_spell_correction = true + speech_to_text_settings { + enable_speech_adaptation = true + } +} + + +resource "google_dialogflow_cx_webhook" "{{$.PrimaryResourceId}}" { + parent = google_dialogflow_cx_agent.agent.id + display_name = "MyFlow" + service_directory { + service = "projects/example-proj/locations/us-central1/namespaces/example-namespace/services/example-service" + generic_web_service { + uri = "https://example.com" + request_headers = { "example-key": "example-value" } + webhook_type = "FLEXIBLE" + oauth_config { + client_id = "example-client-id" + client_secret = "projects/example-proj/secrets/example-secret/versions/example-version" + token_endpoint = "https://example.com" + } + service_agent_auth = "NONE" + http_method = "POST" + request_body = "{\"example-key\": \"example-value\"}" + parameter_mapping = { "example-parameter": "examplePath" } + } + } +} diff --git a/mmv1/templates/terraform/examples/dialogflowcx_webhook_service_directory_standard.tf.tmpl b/mmv1/templates/terraform/examples/dialogflowcx_webhook_service_directory_standard.tf.tmpl new file mode 100644 index 000000000000..562769c0c31e --- /dev/null +++ b/mmv1/templates/terraform/examples/dialogflowcx_webhook_service_directory_standard.tf.tmpl @@ -0,0 +1,45 @@ +resource "google_dialogflow_cx_agent" "agent" { + display_name = "{{index $.Vars "agent_name"}}" + location = "us-central1" + default_language_code = "en" + supported_language_codes = ["it","de","es"] + time_zone = "America/New_York" + description = "Example description." + avatar_uri = "https://cloud.google.com/_static/images/cloud/icons/favicons/onecloud/super_cloud.png" + enable_stackdriver_logging = true + enable_spell_correction = true + speech_to_text_settings { + enable_speech_adaptation = true + } +} + + +resource "google_dialogflow_cx_webhook" "{{$.PrimaryResourceId}}" { + parent = google_dialogflow_cx_agent.agent.id + display_name = "MyFlow" + service_directory { + service = "projects/example-proj/locations/us-central1/namespaces/example-namespace/services/example-service" + generic_web_service { + allowed_ca_certs = ["BQA="] + uri = "https://example.com" + request_headers = { "example-key": "example-value" } + webhook_type = "STANDARD" + oauth_config { + client_id = "example-client-id" + secret_version_for_client_secret = "projects/example-proj/secrets/example-secret/versions/example-version" + token_endpoint = "https://example.com" + scopes = ["example-scope"] + } + service_agent_auth = "NONE" + secret_version_for_username_password = "projects/example-proj/secrets/example-secret/versions/example-version" + secret_versions_for_request_headers { + key = "example-key-1" + secret_version = "projects/example-proj/secrets/example-secret/versions/example-version" + } + secret_versions_for_request_headers { + key = "example-key-2" + secret_version = "projects/example-proj/secrets/example-secret/versions/example-version-2" + } + } + } +} diff --git a/mmv1/templates/terraform/examples/dialogflowcx_webhook_standard.tf.tmpl b/mmv1/templates/terraform/examples/dialogflowcx_webhook_standard.tf.tmpl new file mode 100644 index 000000000000..7a96560b50ac --- /dev/null +++ b/mmv1/templates/terraform/examples/dialogflowcx_webhook_standard.tf.tmpl @@ -0,0 +1,42 @@ +resource "google_dialogflow_cx_agent" "agent" { + display_name = "{{index $.Vars "agent_name"}}" + location = "global" + default_language_code = "en" + supported_language_codes = ["it","de","es"] + time_zone = "America/New_York" + description = "Example description." + avatar_uri = "https://cloud.google.com/_static/images/cloud/icons/favicons/onecloud/super_cloud.png" + enable_stackdriver_logging = true + enable_spell_correction = true + speech_to_text_settings { + enable_speech_adaptation = true + } +} + + +resource "google_dialogflow_cx_webhook" "{{$.PrimaryResourceId}}" { + parent = google_dialogflow_cx_agent.agent.id + display_name = "MyFlow" + generic_web_service { + allowed_ca_certs = ["BQA="] + uri = "https://example.com" + request_headers = { "example-key": "example-value" } + webhook_type = "STANDARD" + oauth_config { + client_id = "example-client-id" + secret_version_for_client_secret = "projects/example-proj/secrets/example-secret/versions/example-version" + token_endpoint = "https://example.com" + scopes = ["example-scope"] + } + service_agent_auth = "NONE" + secret_version_for_username_password = "projects/example-proj/secrets/example-secret/versions/example-version" + secret_versions_for_request_headers { + key = "example-key-1" + secret_version = "projects/example-proj/secrets/example-secret/versions/example-version" + } + secret_versions_for_request_headers { + key = "example-key-2" + secret_version = "projects/example-proj/secrets/example-secret/versions/example-version-2" + } + } +} From 7d188847cd3340abdb7ef137768d3a9ef3c85b28 Mon Sep 17 00:00:00 2001 From: hao-nan-li <100219545+hao-nan-li@users.noreply.github.com> Date: Fri, 11 Jul 2025 10:55:06 -0700 Subject: [PATCH 528/884] tgc-revival: support google_compute_firewall (#14504) --- mmv1/products/compute/Firewall.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/mmv1/products/compute/Firewall.yaml b/mmv1/products/compute/Firewall.yaml index d379841f3c8c..8ea2600cd31d 100644 --- a/mmv1/products/compute/Firewall.yaml +++ b/mmv1/products/compute/Firewall.yaml @@ -50,6 +50,7 @@ async: result: resource_inside_response: false collection_url_key: 'items' +include_in_tgc_next_DO_NOT_USE: true custom_code: extra_schema_entry: 'templates/terraform/extra_schema_entry/firewall.tmpl' constants: 'templates/terraform/constants/firewall.tmpl' @@ -70,6 +71,8 @@ examples: firewall_name: 'my-firewall-rule' test_env_vars: project: 'PROJECT_NAME' + tgc_test_ignore_extra: + - 'project' parameters: properties: # TODO(nelsonjr): [nice to have] Make the format here simpler to use, in From 8850d26a9a0049b206305240fdae5e40592fb2aa Mon Sep 17 00:00:00 2001 From: Thomas Rodgers Date: Fri, 11 Jul 2025 12:28:26 -0700 Subject: [PATCH 529/884] Revert "Update service account creation to prevent failures due to eventual consistency" (#14506) --- .../resource_google_service_account.go | 65 ++++++++----------- 1 file changed, 28 insertions(+), 37 deletions(-) diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_service_account.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_service_account.go index 3c810a7f790b..901b1d0ca975 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_service_account.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_service_account.go @@ -122,59 +122,54 @@ func resourceGoogleServiceAccountCreate(d *schema.ResourceData, meta interface{} ServiceAccount: sa, } - d.SetId(fmt.Sprintf("projects/%s/serviceAccounts/%s@%s.iam.gserviceaccount.com", project, aid, project)) - - iamClient := config.NewIamClient(userAgent) - sa, err = iamClient.Projects.ServiceAccounts.Create("projects/"+project, r).Do() + sa, err = config.NewIamClient(userAgent).Projects.ServiceAccounts.Create("projects/"+project, r).Do() if err != nil { gerr, ok := err.(*googleapi.Error) alreadyExists := ok && gerr.Code == 409 && d.Get("create_ignore_already_exists").(bool) if alreadyExists { - err = transport_tpg.Retry(transport_tpg.RetryOptions{ - RetryFunc: func() (operr error) { - sa, saerr := iamClient.Projects.ServiceAccounts.Get(d.Id()).Do() - - if saerr != nil { - return saerr - } - return populateResourceData(d, sa) - }, - Timeout: d.Timeout(schema.TimeoutCreate), - ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{ - transport_tpg.IsNotFoundRetryableError("service account creation"), - }, - }) - - return nil + sa = &iam.ServiceAccount{ + Name: fmt.Sprintf("projects/%s/serviceAccounts/%s@%s.iam.gserviceaccount.com", project, aid, project), + } } else { return fmt.Errorf("Error creating service account: %s", err) } } + d.SetId(sa.Name) + + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (operr error) { + _, saerr := config.NewIamClient(userAgent).Projects.ServiceAccounts.Get(d.Id()).Do() + return saerr + }, + Timeout: d.Timeout(schema.TimeoutCreate), + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{ + transport_tpg.IsNotFoundRetryableError("service account creation"), + transport_tpg.IsForbiddenIamServiceAccountRetryableError("service account creation"), + }, + }) + + if err != nil { + return fmt.Errorf("Error reading service account after creation: %s", err) + } + // We poll until the resource is found due to eventual consistency issue - // on part of the api https://cloud.google.com/iam/docs/overview#consistency. - // Wait for at least 3 successful responses in a row to ensure result is consistent. + // on part of the api https://cloud.google.com/iam/docs/overview#consistency // IAM API returns 403 when the queried SA is not found, so we must ignore both 404 & 403 errors - transport_tpg.PollingWaitTime( - resourceServiceAccountPollRead(d, meta), - transport_tpg.PollCheckForExistence, - "Creating Service Account", - d.Timeout(schema.TimeoutCreate), - 3, // Number of consecutive occurences. - ) + err = transport_tpg.PollingWaitTime(resourceServiceAccountPollRead(d, meta), transport_tpg.PollCheckForExistenceWith403, "Creating Service Account", d.Timeout(schema.TimeoutCreate), 1) - populateResourceData(d, sa) + if err != nil { + return err + } // We can't guarantee complete consistency even after polling, // so sleep for some additional time to reduce the likelihood of // eventual consistency failures. time.Sleep(10 * time.Second) - return nil + return resourceGoogleServiceAccountRead(d, meta) } -// PollReadFunc for checking Service Account existence. -// If resourceData is not nil, it will be updated with the response. func resourceServiceAccountPollRead(d *schema.ResourceData, meta interface{}) transport_tpg.PollReadFunc { return func() (map[string]interface{}, error) { config := meta.(*transport_tpg.Config) @@ -206,10 +201,6 @@ func resourceGoogleServiceAccountRead(d *schema.ResourceData, meta interface{}) return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("Service Account %q", d.Id())) } - return populateResourceData(d, sa) -} - -func populateResourceData(d *schema.ResourceData, sa *iam.ServiceAccount) error { if err := d.Set("email", sa.Email); err != nil { return fmt.Errorf("Error setting email: %s", err) } From e0cf9294dbb7d6f67f62ac6064dd59c9b5fd0afd Mon Sep 17 00:00:00 2001 From: Ron Gal <125445217+ron-gal@users.noreply.github.com> Date: Fri, 11 Jul 2025 17:02:34 -0400 Subject: [PATCH 530/884] feat(bigtable): Add support for schema bundles (#14458) --- mmv1/products/bigtable/SchemaBundle.yaml | 98 +++++++++++++++++++ .../examples/bigtable_schema_bundle.tf.tmpl | 30 ++++++ .../resource_bigtable_schema_bundle_test.go | 98 +++++++++++++++++++ .../test-fixtures/proto_schema_bundle.pb | 6 ++ .../test-fixtures/proto_schema_bundle.proto | 22 +++++ .../updated_proto_schema_bundle.pb | 7 ++ .../updated_proto_schema_bundle.proto | 23 +++++ 7 files changed, 284 insertions(+) create mode 100644 mmv1/products/bigtable/SchemaBundle.yaml create mode 100644 mmv1/templates/terraform/examples/bigtable_schema_bundle.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/bigtable/resource_bigtable_schema_bundle_test.go create mode 100644 mmv1/third_party/terraform/services/bigtable/test-fixtures/proto_schema_bundle.pb create mode 100644 mmv1/third_party/terraform/services/bigtable/test-fixtures/proto_schema_bundle.proto create mode 100644 mmv1/third_party/terraform/services/bigtable/test-fixtures/updated_proto_schema_bundle.pb create mode 100644 mmv1/third_party/terraform/services/bigtable/test-fixtures/updated_proto_schema_bundle.proto diff --git a/mmv1/products/bigtable/SchemaBundle.yaml b/mmv1/products/bigtable/SchemaBundle.yaml new file mode 100644 index 000000000000..129906179825 --- /dev/null +++ b/mmv1/products/bigtable/SchemaBundle.yaml @@ -0,0 +1,98 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'SchemaBundle' +kind: 'bigtable#schemaBundle' +description: | + A schema bundle object that can be referenced in SQL queries. +references: + guides: + api: 'https://cloud.google.com/bigtable/docs/reference/admin/rest/v2/projects.instances.tables.schemaBundles' +docs: +id_format: 'projects/{{project}}/instances/{{instance}}/tables/{{table}}/schemaBundles/{{schema_bundle_id}}' +base_url: 'projects/{{project}}/instances/{{instance}}/tables/{{table}}/schemaBundles?schemaBundleId={{schema_bundle_id}}' +self_link: 'projects/{{project}}/instances/{{instance}}/tables/{{table}}/schemaBundles/{{schema_bundle_id}}' +create_url: 'projects/{{project}}/instances/{{instance}}/tables/{{table}}/schemaBundles?schemaBundleId={{schema_bundle_id}}' +update_url: 'projects/{{project}}/instances/{{instance}}/tables/{{table}}/schemaBundles/{{schema_bundle_id}}?ignoreWarnings={{ignore_warnings}}' +update_verb: 'PATCH' +update_mask: true +delete_url: 'projects/{{project}}/instances/{{instance}}/tables/{{table}}/schemaBundles/{{schema_bundle_id}}' +import_format: + - 'projects/{{project}}/instances/{{instance}}/tables/{{table}}/schemaBundles/{{schema_bundle_id}}' +timeouts: + insert_minutes: 10 + update_minutes: 10 + delete_minutes: 10 +exclude_sweeper: true +examples: + - name: 'bigtable_schema_bundle' + primary_resource_id: 'schema_bundle' + vars: + instance_name: 'bt-instance' + table_name: 'bt-table' + schema_bundle_name: 'bt-schema-bundle' + ignore_read_extra: + - 'ignore_warnings' + # bigtable instance does not use the shared HTTP client, this test creates an instance + skip_vcr: true +parameters: + - name: 'schemaBundleId' + type: String + description: + 'The unique name of the schema bundle in the form + `[_a-zA-Z0-9][-_.a-zA-Z0-9]*`.' + url_param_only: true + required: true + immutable: true + - name: 'instance' + type: String + description: 'The name of the instance to create the schema bundle within.' + url_param_only: true + immutable: true + diff_suppress_func: 'tpgresource.CompareResourceNames' + - name: 'table' + type: String + description: 'The name of the table to create the schema bundle within.' + url_param_only: true + immutable: true + diff_suppress_func: 'tpgresource.CompareResourceNames' + - name: 'ignoreWarnings' + type: Boolean + description: + 'If true, allow backwards incompatible changes.' + url_param_only: true + default_value: false +properties: + - name: 'name' + type: String + description: + 'The unique name of the requested schema bundle. Values are of the form + `projects//instances//tables//schemaBundles/`.' + output: true + - name: 'protoSchema' + type: NestedObject + description: | + File descriptor set, generated by protoc. + To generate, use protoc with imports and source info included. For an example test.proto file, the following command would put the value in a new file named out.pb. + + $ protoc --include_imports --include_source_info test.proto -o out.pb + required: true + properties: + - name: 'protoDescriptors' + type: String + description: | + Base64 encoded content of the file. + required: true + validation: + function: 'verify.ValidateBase64String' diff --git a/mmv1/templates/terraform/examples/bigtable_schema_bundle.tf.tmpl b/mmv1/templates/terraform/examples/bigtable_schema_bundle.tf.tmpl new file mode 100644 index 000000000000..0e666bf09e36 --- /dev/null +++ b/mmv1/templates/terraform/examples/bigtable_schema_bundle.tf.tmpl @@ -0,0 +1,30 @@ +resource "google_bigtable_instance" "instance" { + name = "{{index $.Vars "instance_name"}}" + cluster { + cluster_id = "cluster-1" + zone = "us-east1-b" + num_nodes = 1 + storage_type = "HDD" + } + + deletion_protection = false +} + +resource "google_bigtable_table" "table" { + name = "{{index $.Vars "table_name"}}" + instance_name = google_bigtable_instance.instance.name + + column_family { + family = "CF" + } +} + +resource "google_bigtable_schema_bundle" "{{$.PrimaryResourceId}}" { + schema_bundle_id = "{{index $.Vars "schema_bundle_name"}}" + instance = google_bigtable_instance.instance.name + table = google_bigtable_table.table.name + + proto_schema { + proto_descriptors = filebase64("test-fixtures/proto_schema_bundle.pb") + } +} diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_schema_bundle_test.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_schema_bundle_test.go new file mode 100644 index 000000000000..73293bf94f10 --- /dev/null +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_schema_bundle_test.go @@ -0,0 +1,98 @@ +package bigtable_test + +import ( + "fmt" + + "testing" + + "github.com/hashicorp/terraform-plugin-testing/plancheck" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" +) + +func TestAccBigtableSchemaBundle_update(t *testing.T) { + // bigtable instance does not use the shared HTTP client, this test creates an instance + acctest.SkipIfVcr(t) + t.Parallel() + + instanceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + tableName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + sbName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccBigtableSchemaBundle_update(instanceName, tableName, sbName, "proto_schema_bundle"), + }, + { + ResourceName: "google_bigtable_schema_bundle.schema_bundle", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ignore_warnings"}, + }, + { + Config: testAccBigtableSchemaBundle_update(instanceName, tableName, sbName, "updated_proto_schema_bundle"), + + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_bigtable_schema_bundle.schema_bundle", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_bigtable_schema_bundle.schema_bundle", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ignore_warnings"}, + }, + { + Config: testAccBigtableSchemaBundle_update(instanceName, tableName, sbName, "proto_schema_bundle"), + }, + { + ResourceName: "google_bigtable_schema_bundle.schema_bundle", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ignore_warnings"}, + }, + }, + }) +} + +func testAccBigtableSchemaBundle_update(instanceName, tableName, sbName, fileName string) string { + return fmt.Sprintf(` +resource "google_bigtable_instance" "instance" { + name = "%s" + cluster { + cluster_id = "%s-c" + zone = "us-east1-b" + } + + deletion_protection = false +} + +resource "google_bigtable_table" "table" { + name = "%s" + instance_name = google_bigtable_instance.instance.id + + column_family { + family = "CF" + } +} + +resource "google_bigtable_schema_bundle" "schema_bundle" { + schema_bundle_id = "%s" + instance = google_bigtable_instance.instance.name + table = google_bigtable_table.table.name + + proto_schema { + proto_descriptors = filebase64("test-fixtures/%s.pb") + } + + ignore_warnings = true +} +`, instanceName, instanceName, tableName, sbName, fileName) +} diff --git a/mmv1/third_party/terraform/services/bigtable/test-fixtures/proto_schema_bundle.pb b/mmv1/third_party/terraform/services/bigtable/test-fixtures/proto_schema_bundle.pb new file mode 100644 index 000000000000..c9ac4086f5d7 --- /dev/null +++ b/mmv1/third_party/terraform/services/bigtable/test-fixtures/proto_schema_bundle.pb @@ -0,0 +1,6 @@ + +q +proto_schema_bundle.proto#gcloud.bigtable.schema_bundles.test"' +Author + +first_name ( R firstNamebproto3 \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/bigtable/test-fixtures/proto_schema_bundle.proto b/mmv1/third_party/terraform/services/bigtable/test-fixtures/proto_schema_bundle.proto new file mode 100644 index 000000000000..e03f0ccccbef --- /dev/null +++ b/mmv1/third_party/terraform/services/bigtable/test-fixtures/proto_schema_bundle.proto @@ -0,0 +1,22 @@ +/* +Copyright 2025 Google LLC +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +// The `proto_schema_bundle.pb` binary is generated from this source file, via command: +// protoc --include_imports --descriptor_set_out=proto_schema_bundle.pb proto_schema_bundle.proto + +syntax = "proto3"; + +package gcloud.bigtable.schema_bundles.test; + +message Author { + string first_name = 1; +} diff --git a/mmv1/third_party/terraform/services/bigtable/test-fixtures/updated_proto_schema_bundle.pb b/mmv1/third_party/terraform/services/bigtable/test-fixtures/updated_proto_schema_bundle.pb new file mode 100644 index 000000000000..21f877a2fe11 --- /dev/null +++ b/mmv1/third_party/terraform/services/bigtable/test-fixtures/updated_proto_schema_bundle.pb @@ -0,0 +1,7 @@ + + +!updated_proto_schema_bundle.proto#gcloud.bigtable.schema_bundles.test"D +Author + +first_name ( R firstName + last_name ( RlastNamebproto3 \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/bigtable/test-fixtures/updated_proto_schema_bundle.proto b/mmv1/third_party/terraform/services/bigtable/test-fixtures/updated_proto_schema_bundle.proto new file mode 100644 index 000000000000..e9894cabd20a --- /dev/null +++ b/mmv1/third_party/terraform/services/bigtable/test-fixtures/updated_proto_schema_bundle.proto @@ -0,0 +1,23 @@ +/* +Copyright 2025 Google LLC +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +// The `updated_proto_schema_bundle.pb` binary is generated from this source file, via command: +// protoc --include_imports --descriptor_set_out=updated_proto_schema_bundle.pb updated_proto_schema_bundle.proto + +syntax = "proto3"; + +package gcloud.bigtable.schema_bundles.test; + +message Author { + string first_name = 1; + string last_name = 2; +} From 8191167fdb7cb193a3dd7f6c5a7c025f171aa2b3 Mon Sep 17 00:00:00 2001 From: Yanwei Guo Date: Fri, 11 Jul 2025 14:04:20 -0700 Subject: [PATCH 531/884] Add support for GPU fields in Cloud Run v2 Job (GA) (#14423) --- mmv1/products/cloudrunv2/Job.yaml | 2 -- .../templates/terraform/examples/cloudrunv2_job_gpu.tf.tmpl | 1 - .../cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl | 6 +++--- 3 files changed, 3 insertions(+), 6 deletions(-) diff --git a/mmv1/products/cloudrunv2/Job.yaml b/mmv1/products/cloudrunv2/Job.yaml index c86b38b70eb2..57ae1e01c290 100644 --- a/mmv1/products/cloudrunv2/Job.yaml +++ b/mmv1/products/cloudrunv2/Job.yaml @@ -126,7 +126,6 @@ examples: ignore_read_extra: - 'deletion_protection' - name: 'cloudrunv2_job_gpu' - min_version: 'beta' primary_resource_id: 'default' primary_resource_name: 'fmt.Sprintf("tf-test-cloudrun-job%s", context["random_suffix"])' vars: @@ -774,7 +773,6 @@ properties: send_empty_value: true default_value: 3 - name: 'nodeSelector' - min_version: beta type: NestedObject description: Node Selector describes the hardware requirements of the resources. properties: diff --git a/mmv1/templates/terraform/examples/cloudrunv2_job_gpu.tf.tmpl b/mmv1/templates/terraform/examples/cloudrunv2_job_gpu.tf.tmpl index 4961d934d95b..5581f4bece44 100644 --- a/mmv1/templates/terraform/examples/cloudrunv2_job_gpu.tf.tmpl +++ b/mmv1/templates/terraform/examples/cloudrunv2_job_gpu.tf.tmpl @@ -1,5 +1,4 @@ resource "google_cloud_run_v2_job" "{{$.PrimaryResourceId}}" { - provider = google-beta name = "{{index $.Vars "cloud_run_job_name"}}" location = "us-central1" deletion_protection = false diff --git a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl index 0cdd6861aa33..083f4f82c6bf 100644 --- a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl @@ -998,6 +998,8 @@ func testAccCloudRunV2Job_cloudrunv2JobWithRunExecutionToken(context map[string] } `, context) } +{{- end }} + func TestAccCloudRunV2Job_cloudrunv2JobWithGpuUpdate(t *testing.T) { acctest.SkipIfVcr(t) @@ -1010,7 +1012,7 @@ func TestAccCloudRunV2Job_cloudrunv2JobWithGpuUpdate(t *testing.T) { acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccCheckCloudRunV2JobDestroyProducer(t), Steps: []resource.TestStep{ { @@ -1038,7 +1040,6 @@ func TestAccCloudRunV2Job_cloudrunv2JobWithGpuUpdate(t *testing.T) { func testAccCloudRunV2Job_cloudrunv2JobWithGpu(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_cloud_run_v2_job" "default" { - provider = google-beta name = "%{job_name}" location = "us-central1" launch_stage = "BETA" @@ -1068,4 +1069,3 @@ func testAccCloudRunV2Job_cloudrunv2JobWithGpu(context map[string]interface{}) s } `, context) } -{{- end }} From 7c186245dea2466a349c1e97d6930273e6c879b5 Mon Sep 17 00:00:00 2001 From: Samir Ribeiro <42391123+Samir-Cit@users.noreply.github.com> Date: Fri, 11 Jul 2025 19:44:14 -0300 Subject: [PATCH 532/884] Fix: Container Node Pool - Resource Manager Tags (test) (#14401) --- .../resource_container_node_pool_test.go.tmpl | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl index 0cd39c7a7b88..10b731caf5d9 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl @@ -5111,11 +5111,6 @@ resource "google_container_cluster" "primary" { deletion_protection = false network = "%{network}" subnetwork = "%{subnet}" - - timeouts { - create = "30m" - update = "40m" - } } # Separately Managed Node Pool @@ -5163,11 +5158,6 @@ resource "google_container_cluster" "primary" { deletion_protection = false network = "%{network}" subnetwork = "%{subnet}" - - timeouts { - create = "30m" - update = "40m" - } } # Separately Managed Node Pool @@ -5216,11 +5206,6 @@ resource "google_container_cluster" "primary" { deletion_protection = false network = "%{network}" subnetwork = "%{subnet}" - - timeouts { - create = "30m" - update = "40m" - } } # Separately Managed Node Pool From 4a5a206f842c4562ae164b9cdcdd00f6c6dee9fa Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Fri, 11 Jul 2025 16:28:41 -0700 Subject: [PATCH 533/884] add tf-test prefix to compute reservation and dialogflowcx tests (#14509) --- mmv1/products/compute/Reservation.yaml | 2 ++ mmv1/products/dialogflowcx/Flow.yaml | 1 + mmv1/products/dialogflowcx/Page.yaml | 1 + mmv1/products/dialogflowcx/Tool.yaml | 1 + .../templates/terraform/examples/dialogflowcx_flow_full.tf.tmpl | 2 +- .../templates/terraform/examples/dialogflowcx_page_full.tf.tmpl | 2 +- .../terraform/examples/dialogflowcx_tool_data_store.tf.tmpl | 2 +- .../terraform/examples/reservation_sharing_policy.tf.tmpl | 2 +- .../examples/reservation_source_instance_template.tf.tmpl | 2 +- .../services/dialogflowcx/resource_dialogflowcx_page_test.go | 2 +- 10 files changed, 11 insertions(+), 6 deletions(-) diff --git a/mmv1/products/compute/Reservation.yaml b/mmv1/products/compute/Reservation.yaml index 31f9617d4af8..acdf22082a24 100644 --- a/mmv1/products/compute/Reservation.yaml +++ b/mmv1/products/compute/Reservation.yaml @@ -66,10 +66,12 @@ examples: - name: 'reservation_source_instance_template' primary_resource_id: 'gce_reservation_source_instance_template' vars: + instance-template: 'instance-template' reservation_name: 'gce-reservation-source-instance-template' - name: 'reservation_sharing_policy' primary_resource_id: 'gce_reservation_sharing_policy' vars: + instance-template: 'instance-template' reservation_name: 'gce-reservation-sharing-policy' - name: 'shared_reservation_basic' primary_resource_id: 'gce_reservation' diff --git a/mmv1/products/dialogflowcx/Flow.yaml b/mmv1/products/dialogflowcx/Flow.yaml index 7bd0cba5e376..2eab19e025e9 100644 --- a/mmv1/products/dialogflowcx/Flow.yaml +++ b/mmv1/products/dialogflowcx/Flow.yaml @@ -49,6 +49,7 @@ examples: vars: agent_name: 'dialogflowcx-agent' bucket_name: 'dialogflowcx-bucket' + data-store: 'datastore-flow-full' - name: 'dialogflowcx_flow_default_start_flow' primary_resource_id: 'default_start_flow' vars: diff --git a/mmv1/products/dialogflowcx/Page.yaml b/mmv1/products/dialogflowcx/Page.yaml index d6980fec118c..d20ef5fb368f 100644 --- a/mmv1/products/dialogflowcx/Page.yaml +++ b/mmv1/products/dialogflowcx/Page.yaml @@ -42,6 +42,7 @@ examples: primary_resource_id: 'basic_page' vars: agent_name: 'dialogflowcx-agent' + data-store: 'datastore-page-full' parameters: - name: 'parent' type: String diff --git a/mmv1/products/dialogflowcx/Tool.yaml b/mmv1/products/dialogflowcx/Tool.yaml index 07d48ceaa6a1..7f8671fb0f29 100644 --- a/mmv1/products/dialogflowcx/Tool.yaml +++ b/mmv1/products/dialogflowcx/Tool.yaml @@ -46,6 +46,7 @@ examples: primary_resource_id: 'data_store_tool' vars: agent_name: 'dialogflowcx-agent-data-store' + data_store: 'datastore-tool' - name: 'dialogflowcx_tool_function' primary_resource_id: 'function_tool' vars: diff --git a/mmv1/templates/terraform/examples/dialogflowcx_flow_full.tf.tmpl b/mmv1/templates/terraform/examples/dialogflowcx_flow_full.tf.tmpl index 455507b5be80..0a4b1cfa7d77 100644 --- a/mmv1/templates/terraform/examples/dialogflowcx_flow_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/dialogflowcx_flow_full.tf.tmpl @@ -410,7 +410,7 @@ resource "google_dialogflow_cx_flow" "{{$.PrimaryResourceId}}" { resource "google_discovery_engine_data_store" "my_datastore" { location = "global" - data_store_id = "datastore-flow-full" + data_store_id = "{{index $.Vars "data-store"}}" display_name = "datastore-flow-full" industry_vertical = "GENERIC" content_config = "NO_CONTENT" diff --git a/mmv1/templates/terraform/examples/dialogflowcx_page_full.tf.tmpl b/mmv1/templates/terraform/examples/dialogflowcx_page_full.tf.tmpl index 8a1e8a28c666..8f4b1b4ba633 100644 --- a/mmv1/templates/terraform/examples/dialogflowcx_page_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/dialogflowcx_page_full.tf.tmpl @@ -672,7 +672,7 @@ resource "google_dialogflow_cx_page" "my_page2" { resource "google_discovery_engine_data_store" "my_datastore" { location = "global" - data_store_id = "datastore-page-full" + data_store_id = "{{index $.Vars "data-store"}}" display_name = "datastore-page-full" industry_vertical = "GENERIC" content_config = "NO_CONTENT" diff --git a/mmv1/templates/terraform/examples/dialogflowcx_tool_data_store.tf.tmpl b/mmv1/templates/terraform/examples/dialogflowcx_tool_data_store.tf.tmpl index 74da8056d4bb..b75752d4f3e4 100644 --- a/mmv1/templates/terraform/examples/dialogflowcx_tool_data_store.tf.tmpl +++ b/mmv1/templates/terraform/examples/dialogflowcx_tool_data_store.tf.tmpl @@ -30,7 +30,7 @@ resource "google_dialogflow_cx_tool" "{{$.PrimaryResourceId}}" { resource "google_discovery_engine_data_store" "my_datastore" { location = "global" - data_store_id = "datastore-tool-test-%{random_suffix}" + data_store_id = "{{index $.Vars "data_store"}}" display_name = "datastore for Tool test" industry_vertical = "GENERIC" content_config = "NO_CONTENT" diff --git a/mmv1/templates/terraform/examples/reservation_sharing_policy.tf.tmpl b/mmv1/templates/terraform/examples/reservation_sharing_policy.tf.tmpl index 9a1d93400eaa..6353c88f895f 100644 --- a/mmv1/templates/terraform/examples/reservation_sharing_policy.tf.tmpl +++ b/mmv1/templates/terraform/examples/reservation_sharing_policy.tf.tmpl @@ -4,7 +4,7 @@ data "google_compute_image" "my_image" { } resource "google_compute_instance_template" "foobar" { - name = "tf-test-instance-template" + name = "{{index $.Vars "instance-template"}}" machine_type = "g2-standard-4" can_ip_forward = false tags = ["foo", "bar"] diff --git a/mmv1/templates/terraform/examples/reservation_source_instance_template.tf.tmpl b/mmv1/templates/terraform/examples/reservation_source_instance_template.tf.tmpl index 66424494d49c..33b900bf7d94 100644 --- a/mmv1/templates/terraform/examples/reservation_source_instance_template.tf.tmpl +++ b/mmv1/templates/terraform/examples/reservation_source_instance_template.tf.tmpl @@ -4,7 +4,7 @@ data "google_compute_image" "my_image" { } resource "google_compute_instance_template" "foobar" { - name = "tf-test-instance-template" + name = "{{index $.Vars "instance-template"}}" machine_type = "n2-standard-2" can_ip_forward = false tags = ["foo", "bar"] diff --git a/mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflowcx_page_test.go b/mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflowcx_page_test.go index f438837e55e6..a23053ea67c4 100644 --- a/mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflowcx_page_test.go +++ b/mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflowcx_page_test.go @@ -640,7 +640,7 @@ func testAccDialogflowCXPage_full(context map[string]interface{}) string { resource "google_discovery_engine_data_store" "my_datastore" { location = "global" - data_store_id = "datastore-page-update" + data_store_id = "tf-test-datastore-page-update%{random_suffix}" display_name = "datastore-page-update" industry_vertical = "GENERIC" content_config = "NO_CONTENT" From 8fe80be7392a0caaa782e6802479acc9ab769bc5 Mon Sep 17 00:00:00 2001 From: himanikh Date: Mon, 14 Jul 2025 06:40:44 -0700 Subject: [PATCH 534/884] Update redis instance example (#14512) --- mmv1/templates/terraform/examples/redis_instance_cmek.tf.tmpl | 3 +-- mmv1/templates/terraform/examples/redis_instance_full.tf.tmpl | 2 +- mmv1/templates/terraform/examples/redis_instance_mrr.tf.tmpl | 3 +-- .../terraform/examples/redis_instance_private_service.tf.tmpl | 2 +- .../examples/redis_instance_private_service_test.tf.tmpl | 2 +- 5 files changed, 5 insertions(+), 7 deletions(-) diff --git a/mmv1/templates/terraform/examples/redis_instance_cmek.tf.tmpl b/mmv1/templates/terraform/examples/redis_instance_cmek.tf.tmpl index a18f28dae0d1..fc9aff616fb4 100644 --- a/mmv1/templates/terraform/examples/redis_instance_cmek.tf.tmpl +++ b/mmv1/templates/terraform/examples/redis_instance_cmek.tf.tmpl @@ -8,9 +8,8 @@ resource "google_redis_instance" "{{$.PrimaryResourceId}}" { authorized_network = data.google_compute_network.redis-network.id - redis_version = "REDIS_6_X" + redis_version = "REDIS_7_2" display_name = "Terraform Test Instance" - reserved_ip_range = "192.168.0.0/29" labels = { my_key = "my_val" diff --git a/mmv1/templates/terraform/examples/redis_instance_full.tf.tmpl b/mmv1/templates/terraform/examples/redis_instance_full.tf.tmpl index adeec52b088f..68297f64d313 100644 --- a/mmv1/templates/terraform/examples/redis_instance_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/redis_instance_full.tf.tmpl @@ -8,7 +8,7 @@ resource "google_redis_instance" "{{$.PrimaryResourceId}}" { authorized_network = data.google_compute_network.redis-network.id - redis_version = "REDIS_4_0" + redis_version = "REDIS_7_2" display_name = "Terraform Test Instance" reserved_ip_range = "192.168.0.0/29" diff --git a/mmv1/templates/terraform/examples/redis_instance_mrr.tf.tmpl b/mmv1/templates/terraform/examples/redis_instance_mrr.tf.tmpl index e366bb220da2..a2e3f0c4de17 100644 --- a/mmv1/templates/terraform/examples/redis_instance_mrr.tf.tmpl +++ b/mmv1/templates/terraform/examples/redis_instance_mrr.tf.tmpl @@ -8,9 +8,8 @@ resource "google_redis_instance" "{{$.PrimaryResourceId}}" { authorized_network = data.google_compute_network.redis-network.id - redis_version = "REDIS_6_X" + redis_version = "REDIS_7_2" display_name = "Terraform Test Instance" - reserved_ip_range = "192.168.0.0/28" replica_count = 5 read_replicas_mode = "READ_REPLICAS_ENABLED" diff --git a/mmv1/templates/terraform/examples/redis_instance_private_service.tf.tmpl b/mmv1/templates/terraform/examples/redis_instance_private_service.tf.tmpl index 1a0c7b84baa7..90fb26d70ada 100644 --- a/mmv1/templates/terraform/examples/redis_instance_private_service.tf.tmpl +++ b/mmv1/templates/terraform/examples/redis_instance_private_service.tf.tmpl @@ -35,7 +35,7 @@ resource "google_redis_instance" "{{$.PrimaryResourceId}}" { authorized_network = google_compute_network.redis-network.id connect_mode = "PRIVATE_SERVICE_ACCESS" - redis_version = "REDIS_4_0" + redis_version = "REDIS_7_2" display_name = "Terraform Test Instance" depends_on = [google_service_networking_connection.private_service_connection] diff --git a/mmv1/templates/terraform/examples/redis_instance_private_service_test.tf.tmpl b/mmv1/templates/terraform/examples/redis_instance_private_service_test.tf.tmpl index f54737bab145..4197ed585612 100644 --- a/mmv1/templates/terraform/examples/redis_instance_private_service_test.tf.tmpl +++ b/mmv1/templates/terraform/examples/redis_instance_private_service_test.tf.tmpl @@ -21,7 +21,7 @@ resource "google_redis_instance" "{{$.PrimaryResourceId}}" { authorized_network = data.google_compute_network.redis-network.id connect_mode = "PRIVATE_SERVICE_ACCESS" - redis_version = "REDIS_4_0" + redis_version = "REDIS_7_2" display_name = "Terraform Test Instance" lifecycle { From 5746c7c76bf085fc57b92ce67850c36e52c2a0ec Mon Sep 17 00:00:00 2001 From: Sam Levenick Date: Mon, 14 Jul 2025 09:46:26 -0400 Subject: [PATCH 535/884] Add dialogflowcx generator (#14511) --- mmv1/products/dialogflowcx/Generator.yaml | 138 ++++++++++++++++++ .../dialogflowcx_generator.go.tmpl | 18 +++ .../dialogflowcx_generator_basic.tf.tmpl | 24 +++ .../pre_create/dialogflowcx_generator.go.tmpl | 24 +++ .../resource_dialogflow_cx_generator_test.go | 88 +++++++++++ 5 files changed, 292 insertions(+) create mode 100644 mmv1/products/dialogflowcx/Generator.yaml create mode 100644 mmv1/templates/terraform/custom_import/dialogflowcx_generator.go.tmpl create mode 100644 mmv1/templates/terraform/examples/dialogflowcx_generator_basic.tf.tmpl create mode 100644 mmv1/templates/terraform/pre_create/dialogflowcx_generator.go.tmpl create mode 100644 mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflow_cx_generator_test.go diff --git a/mmv1/products/dialogflowcx/Generator.yaml b/mmv1/products/dialogflowcx/Generator.yaml new file mode 100644 index 000000000000..1be77ea63593 --- /dev/null +++ b/mmv1/products/dialogflowcx/Generator.yaml @@ -0,0 +1,138 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: Generator +description: + Generators contain prompt to be sent to the LLM model to generate text. + The prompt can contain parameters which will be resolved before calling the model. + It can optionally contain banned phrases to ensure the model responses are safe. +references: + guides: + 'Official Documentation': 'https://cloud.google.com/dialogflow/cx/docs' + api: 'https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/projects.locations.agents.generators' +id_format: '{{parent}}/generators/{{name}}' +base_url: '{{parent}}/generators' +update_verb: 'PATCH' +update_mask: true +import_format: + - '{{parent}}/generators/{{name}}' +timeouts: + insert_minutes: 40 + update_minutes: 40 + delete_minutes: 20 +custom_code: + pre_create: 'templates/terraform/pre_create/dialogflowcx_generator.go.tmpl' + pre_read: 'templates/terraform/pre_create/dialogflow_set_location.go.tmpl' + pre_update: 'templates/terraform/pre_create/dialogflow_set_location.go.tmpl' + pre_delete: 'templates/terraform/pre_delete/dialogflowcx_set_location_skip_default_obj.go.tmpl' + custom_import: 'templates/terraform/custom_import/dialogflowcx_generator.go.tmpl' +exclude_sweeper: true +examples: + - name: 'dialogflowcx_generator_basic' + primary_resource_id: 'generator' + vars: + agent_name: 'dialogflowcx-agent-fucntion' +parameters: + - name: 'parent' + type: String + description: | + The agent to create a Generator for. + Format: projects//locations//agents/. + url_param_only: true + immutable: true + - name: 'languageCode' + type: String + description: | + The language to create generators for the following fields: + * Generator.prompt_text.text + If not specified, the agent's default language is used. + url_param_only: true + immutable: true +properties: + - name: 'name' + type: String + description: | + The unique identifier of the Generator. + Format: projects//locations//agents//generators/. + output: true + custom_flatten: 'templates/terraform/custom_flatten/name_from_self_link.tmpl' + - name: 'displayName' + type: String + description: | + The human-readable name of the generator, unique within the agent. + required: true + - name: 'llmModelSettings' + type: NestedObject + description: | + The LLM model settings. + properties: + - name: 'model' + type: String + description: | + The selected LLM model. + - name: 'promptText' + type: String + description: | + The custom prompt to use. + - name: 'modelParameter' + type: NestedObject + description: | + Parameters passed to the LLM to configure its behavior. + properties: + - name: 'temperature' + type: Double + description: | + The temperature used for sampling. Temperature sampling occurs after both topP and topK have been applied. + Valid range: [0.0, 1.0] Low temperature = less random. High temperature = more random. + - name: 'maxDecodeSteps' + type: Integer + description: | + The maximum number of tokens to generate. + - name: 'topP' + type: Double + description: | + If set, only the tokens comprising the top topP probability mass are considered. + If both topP and topK are set, topP will be used for further refining candidates selected with topK. + Valid range: (0.0, 1.0]. Small topP = less random. Large topP = more random. + - name: 'topK' + type: Integer + description: | + If set, the sampling process in each step is limited to the topK tokens with highest probabilities. + Valid range: [1, 40] or 1000+. Small topK = less random. Large topK = more random. + - name: 'placeholders' + type: Array + description: | + List of custom placeholders in the prompt text. + item_type: + type: NestedObject + properties: + - name: 'id' + type: String + description: | + Unique ID used to map custom placeholder to parameters in fulfillment. + - name: 'name' + type: String + description: | + Custom placeholder value in the prompt text. + - name: 'promptText' + type: NestedObject + required: true + ignore_read: true + description: | + Prompt for the LLM model. + properties: + - name: 'text' + type: String + description: | + Text input which can be used for prompt or banned phrases. diff --git a/mmv1/templates/terraform/custom_import/dialogflowcx_generator.go.tmpl b/mmv1/templates/terraform/custom_import/dialogflowcx_generator.go.tmpl new file mode 100644 index 000000000000..bcdaa7503d69 --- /dev/null +++ b/mmv1/templates/terraform/custom_import/dialogflowcx_generator.go.tmpl @@ -0,0 +1,18 @@ +config := meta.(*transport_tpg.Config) + +// current import_formats can't import fields with forward slashes in their value and parent contains slashes +if err := tpgresource.ParseImportId([]string{ + "(?P.+)/generators/(?P[^/]+)", + "(?P.+)/(?P[^/]+)", +}, d, config); err != nil { + return nil, err +} + +// Replace import id for the resource id +id, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}parent{{"}}"}}/generators/{{"{{"}}name{{"}}"}}") +if err != nil { + return nil, fmt.Errorf("Error constructing id: %s", err) +} +d.SetId(id) + +return []*schema.ResourceData{d}, nil \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/dialogflowcx_generator_basic.tf.tmpl b/mmv1/templates/terraform/examples/dialogflowcx_generator_basic.tf.tmpl new file mode 100644 index 000000000000..30f870b0ee8c --- /dev/null +++ b/mmv1/templates/terraform/examples/dialogflowcx_generator_basic.tf.tmpl @@ -0,0 +1,24 @@ +resource "google_dialogflow_cx_agent" "agent" { + display_name = "{{index $.Vars "agent_name"}}" + location = "global" + default_language_code = "en" + supported_language_codes = ["fr","de","es"] + time_zone = "America/New_York" + description = "Example description." +} + +resource "google_dialogflow_cx_generator" "{{$.PrimaryResourceId}}" { + parent = google_dialogflow_cx_agent.agent.id + language_code = "fr" + display_name = "TF Prompt generator" + llm_model_settings { + model = "gemini-2.0-flash-001" + prompt_text = "Return me some great results" + } + prompt_text { + text = "Send me great results in french" + } + model_parameter { + temperature = 0.55 + } +} diff --git a/mmv1/templates/terraform/pre_create/dialogflowcx_generator.go.tmpl b/mmv1/templates/terraform/pre_create/dialogflowcx_generator.go.tmpl new file mode 100644 index 000000000000..d822da017eaf --- /dev/null +++ b/mmv1/templates/terraform/pre_create/dialogflowcx_generator.go.tmpl @@ -0,0 +1,24 @@ +// extract location from the parent +location := "" + +if parts := regexp.MustCompile(`locations\/([^\/]*)\/`).FindStringSubmatch(d.Get("parent").(string)); parts != nil { + location = parts[1] +} else { + return fmt.Errorf( + "Saw %s when the parent is expected to contains location %s", + d.Get("parent"), + "projects/{{"{{"}}project{{"}}"}}/locations/{{"{{"}}location{{"}}"}}/...", + ) +} + +// only insert location into url if the base_url in products/dialogflowcx/product.yaml is used +if strings.HasPrefix(url, "https://-dialogflow.googleapis.com/v3/") { + url = strings.Replace(url,"-dialogflow",fmt.Sprintf("%s-dialogflow",location),1) +} + +if v, ok := d.GetOk("language_code") ; ok { + url, err = transport_tpg.AddQueryParams(url, map[string]string{"languageCode": fmt.Sprintf("%v", v)}) + if err != nil { + return err + } +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflow_cx_generator_test.go b/mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflow_cx_generator_test.go new file mode 100644 index 000000000000..828b5602d19d --- /dev/null +++ b/mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflow_cx_generator_test.go @@ -0,0 +1,88 @@ +package dialogflowcx_test + +import ( + "testing" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" +) + +func TestAccDialogflowCXGenerator_dialogflowcxGeneratorUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDialogflowCXGeneratorDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDialogflowCXGenerator_dialogflowcxGeneratorBasicExample(context), + }, + { + ResourceName: "google_dialogflow_cx_generator.generator", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"language_code", "parent", "prompt_text"}, + }, + { + Config: testAccDialogflowCXGenerator_dialogflowcxGeneratorUpdate(context), + }, + { + ResourceName: "google_dialogflow_cx_generator.generator", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"language_code", "parent", "prompt_text"}, + }, + { + Config: testAccDialogflowCXGenerator_dialogflowcxGeneratorBasicExample(context), + }, + { + ResourceName: "google_dialogflow_cx_generator.generator", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"language_code", "parent", "prompt_text"}, + }, + }, + }) +} + +func testAccDialogflowCXGenerator_dialogflowcxGeneratorUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_dialogflow_cx_agent" "agent" { + display_name = "tf-test-dialogflowcx-agent-fucntion%{random_suffix}" + location = "global" + default_language_code = "en" + supported_language_codes = ["fr","de","es"] + time_zone = "America/New_York" + description = "Example description." +} + +resource "google_dialogflow_cx_generator" "generator" { + parent = google_dialogflow_cx_agent.agent.id + language_code = "es" + display_name = "TF Prompt generator different" + llm_model_settings { + model = "gemini-2.0-flash-001" + prompt_text = "Other results" + } + prompt_text { + text = "Send me great results in Spanish for $placeholder" + } + model_parameter { + temperature = 0.58 + max_decode_steps = 10 + top_p = 0.1 + top_k = 2000 + } + placeholders { + id = "my-id" + name = "placeholder" + } +} +`, context) +} From 7b55b294a194b0e7bcc384297136c568c56332af Mon Sep 17 00:00:00 2001 From: Eric Pang Date: Mon, 14 Jul 2025 12:15:43 -0400 Subject: [PATCH 536/884] Add deletion policy to Secure Source Manager instance (#14454) --- .../securesourcemanager/BranchRule.yaml | 6 +++ .../securesourcemanager/Instance.yaml | 49 ++++++++++++------- .../securesourcemanager/Repository.yaml | 6 +++ ...e_source_manager_branch_rule_basic.tf.tmpl | 5 +- ...ce_manager_branch_rule_with_fields.tf.tmpl | 5 +- ...cure_source_manager_instance_basic.tf.tmpl | 4 +- ...ecure_source_manager_instance_cmek.tf.tmpl | 4 +- ...re_source_manager_instance_private.tf.tmpl | 4 +- ...nager_instance_private_psc_backend.tf.tmpl | 4 +- ...ager_instance_private_psc_endpoint.tf.tmpl | 4 +- ...ance_workforce_identity_federation.tf.tmpl | 4 +- ...re_source_manager_repository_basic.tf.tmpl | 4 +- ..._manager_repository_initial_config.tf.tmpl | 6 +-- .../securesourcemanager_instance.go.tmpl | 7 +++ 14 files changed, 63 insertions(+), 49 deletions(-) create mode 100644 mmv1/templates/terraform/pre_delete/securesourcemanager_instance.go.tmpl diff --git a/mmv1/products/securesourcemanager/BranchRule.yaml b/mmv1/products/securesourcemanager/BranchRule.yaml index 4462bae669e2..ddb1dc08e4ac 100644 --- a/mmv1/products/securesourcemanager/BranchRule.yaml +++ b/mmv1/products/securesourcemanager/BranchRule.yaml @@ -48,10 +48,13 @@ examples: repository_id: 'my-basic-repository' instance_id: 'my-basic-instance' prevent_destroy: 'true' + deletion_policy: '"PREVENT"' test_vars_overrides: 'prevent_destroy': 'false' + 'deletion_policy': '"DELETE"' oics_vars_overrides: 'prevent_destroy': 'false' + 'deletion_policy': '"DELETE"' - name: 'secure_source_manager_branch_rule_with_fields' primary_resource_id: 'default' vars: @@ -59,10 +62,13 @@ examples: repository_id: 'my-initial-repository' instance_id: 'my-initial-instance' prevent_destroy: 'true' + deletion_policy: '"PREVENT"' test_vars_overrides: 'prevent_destroy': 'false' + 'deletion_policy': '"DELETE"' oics_vars_overrides: 'prevent_destroy': 'false' + 'deletion_policy': '"DELETE"' parameters: - name: 'branch_rule_id' type: String diff --git a/mmv1/products/securesourcemanager/Instance.yaml b/mmv1/products/securesourcemanager/Instance.yaml index c49835376553..c97176c5f4ab 100644 --- a/mmv1/products/securesourcemanager/Instance.yaml +++ b/mmv1/products/securesourcemanager/Instance.yaml @@ -52,17 +52,18 @@ iam_policy: - 'projects/{{project}}/locations/{{location}}/instances/{{instance_id}}' - '{{instance_id}}' custom_code: + pre_delete: 'templates/terraform/pre_delete/securesourcemanager_instance.go.tmpl' examples: - name: 'secure_source_manager_instance_basic' primary_resource_id: 'default' primary_resource_name: 'fmt.Sprintf("tf-test-my-instance%s", context["random_suffix"])' vars: instance_id: 'my-instance' - prevent_destroy: 'true' + deletion_policy: '"PREVENT"' test_vars_overrides: - 'prevent_destroy': 'false' + 'deletion_policy': '"DELETE"' oics_vars_overrides: - 'prevent_destroy': 'false' + 'deletion_policy': '"DELETE"' ignore_read_extra: - 'update_time' - name: 'secure_source_manager_instance_cmek' @@ -71,12 +72,12 @@ examples: vars: instance_id: 'my-instance' kms_key_name: 'my-key' - prevent_destroy: 'true' + deletion_policy: '"PREVENT"' test_vars_overrides: - 'prevent_destroy': 'false' + 'deletion_policy': '"DELETE"' 'kms_key_name': 'acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-bootstrap-secure-source-manager-key1").CryptoKey.Name' oics_vars_overrides: - 'prevent_destroy': 'false' + 'deletion_policy': '"DELETE"' ignore_read_extra: - 'update_time' - name: 'secure_source_manager_instance_private' @@ -86,11 +87,11 @@ examples: instance_id: 'my-instance' ca_pool_id: 'ca-pool' root_ca_id: 'root-ca' - prevent_destroy: 'true' + deletion_policy: '"PREVENT"' test_vars_overrides: - 'prevent_destroy': 'false' + 'deletion_policy': '"DELETE"' oics_vars_overrides: - 'prevent_destroy': 'false' + 'deletion_policy': '"DELETE"' external_providers: ["time"] ignore_read_extra: - 'update_time' @@ -109,11 +110,11 @@ examples: instance_id: 'my-instance' ca_pool_id: 'ca-pool' root_ca_id: 'root-ca' - prevent_destroy: 'true' + deletion_policy: '"PREVENT"' test_vars_overrides: - 'prevent_destroy': 'false' + 'deletion_policy': '"DELETE"' oics_vars_overrides: - 'prevent_destroy': 'false' + 'deletion_policy': '"DELETE"' external_providers: ["time"] ignore_read_extra: - 'update_time' @@ -129,11 +130,11 @@ examples: instance_id: 'my-instance' ca_pool_id: 'ca-pool' root_ca_id: 'root-ca' - prevent_destroy: 'true' + deletion_policy: '"PREVENT"' test_vars_overrides: - 'prevent_destroy': 'false' + 'deletion_policy': '"DELETE"' oics_vars_overrides: - 'prevent_destroy': 'false' + 'deletion_policy': '"DELETE"' external_providers: ["time"] ignore_read_extra: - 'update_time' @@ -142,11 +143,11 @@ examples: primary_resource_name: 'fmt.Sprintf("tf-test-my-instance%s", context["random_suffix"])' vars: instance_id: 'my-instance' - prevent_destroy: 'true' + deletion_policy: '"PREVENT"' test_vars_overrides: - 'prevent_destroy': 'false' + 'deletion_policy': '"DELETE"' oics_vars_overrides: - 'prevent_destroy': 'false' + 'deletion_policy': '"DELETE"' ignore_read_extra: - 'update_time' parameters: @@ -164,6 +165,18 @@ parameters: url_param_only: true required: true immutable: true +virtual_fields: + - name: 'deletion_policy' + type: String + description: | + The deletion policy for the instance. Setting `ABANDON` allows the resource + to be abandoned, rather than deleted. Setting `DELETE` deletes the resource + and all its contents. Setting `PREVENT` prevents the resource from being deleted. + Default is `DELETE`. Possible values are: + * DELETE + * PREVENT + * ABANDON + default_value: 'DELETE' properties: - name: 'name' type: String diff --git a/mmv1/products/securesourcemanager/Repository.yaml b/mmv1/products/securesourcemanager/Repository.yaml index f585a5d6f665..bb0871c329f4 100644 --- a/mmv1/products/securesourcemanager/Repository.yaml +++ b/mmv1/products/securesourcemanager/Repository.yaml @@ -54,10 +54,13 @@ examples: vars: repository_id: 'my-repository' instance_id: 'my-instance' + deletion_policy: '"PREVENT"' prevent_destroy: 'true' test_vars_overrides: + 'deletion_policy': '"DELETE"' 'prevent_destroy': 'false' oics_vars_overrides: + 'deletion_policy': '"DELETE"' 'prevent_destroy': 'false' - name: 'secure_source_manager_repository_initial_config' primary_resource_id: 'default' @@ -65,10 +68,13 @@ examples: vars: repository_id: 'my-repository' instance_id: 'my-instance' + deletion_policy: '"PREVENT"' prevent_destroy: 'true' test_vars_overrides: + 'deletion_policy': '"DELETE"' 'prevent_destroy': 'false' oics_vars_overrides: + 'deletion_policy': '"DELETE"' 'prevent_destroy': 'false' parameters: - name: 'location' diff --git a/mmv1/templates/terraform/examples/secure_source_manager_branch_rule_basic.tf.tmpl b/mmv1/templates/terraform/examples/secure_source_manager_branch_rule_basic.tf.tmpl index 395020b4a1be..cb795c3967d2 100644 --- a/mmv1/templates/terraform/examples/secure_source_manager_branch_rule_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/secure_source_manager_branch_rule_basic.tf.tmpl @@ -1,10 +1,9 @@ resource "google_secure_source_manager_instance" "instance" { location = "us-central1" instance_id = "{{index $.Vars "instance_id"}}" + # Prevent accidental deletions. - lifecycle { - prevent_destroy = "{{index $.Vars "prevent_destroy"}}" - } + deletion_policy = "{{index $.Vars "deletion_policy"}}" } resource "google_secure_source_manager_repository" "repository" { diff --git a/mmv1/templates/terraform/examples/secure_source_manager_branch_rule_with_fields.tf.tmpl b/mmv1/templates/terraform/examples/secure_source_manager_branch_rule_with_fields.tf.tmpl index be6b581b660a..3a6ccabc0d30 100644 --- a/mmv1/templates/terraform/examples/secure_source_manager_branch_rule_with_fields.tf.tmpl +++ b/mmv1/templates/terraform/examples/secure_source_manager_branch_rule_with_fields.tf.tmpl @@ -1,10 +1,9 @@ resource "google_secure_source_manager_instance" "instance" { location = "us-central1" instance_id = "{{index $.Vars "instance_id"}}" + # Prevent accidental deletions. - lifecycle { - prevent_destroy = "{{index $.Vars "prevent_destroy"}}" - } + deletion_policy = "{{index $.Vars "deletion_policy"}}" } resource "google_secure_source_manager_repository" "repository" { diff --git a/mmv1/templates/terraform/examples/secure_source_manager_instance_basic.tf.tmpl b/mmv1/templates/terraform/examples/secure_source_manager_instance_basic.tf.tmpl index d00c77bcb263..ce3f47b3a324 100644 --- a/mmv1/templates/terraform/examples/secure_source_manager_instance_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/secure_source_manager_instance_basic.tf.tmpl @@ -6,7 +6,5 @@ resource "google_secure_source_manager_instance" "{{$.PrimaryResourceId}}" { } # Prevent accidental deletions. - lifecycle { - prevent_destroy = "{{index $.Vars "prevent_destroy"}}" - } + deletion_policy = "{{index $.Vars "deletion_policy"}}" } diff --git a/mmv1/templates/terraform/examples/secure_source_manager_instance_cmek.tf.tmpl b/mmv1/templates/terraform/examples/secure_source_manager_instance_cmek.tf.tmpl index 37c085e77cbf..4f20c600fa90 100644 --- a/mmv1/templates/terraform/examples/secure_source_manager_instance_cmek.tf.tmpl +++ b/mmv1/templates/terraform/examples/secure_source_manager_instance_cmek.tf.tmpl @@ -15,9 +15,7 @@ resource "google_secure_source_manager_instance" "{{$.PrimaryResourceId}}" { ] # Prevent accidental deletions. - lifecycle { - prevent_destroy = "{{index $.Vars "prevent_destroy"}}" - } + deletion_policy = "{{index $.Vars "deletion_policy"}}" } data "google_project" "project" {} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/secure_source_manager_instance_private.tf.tmpl b/mmv1/templates/terraform/examples/secure_source_manager_instance_private.tf.tmpl index d4f1df7b60bd..38dc1f3f4c5d 100644 --- a/mmv1/templates/terraform/examples/secure_source_manager_instance_private.tf.tmpl +++ b/mmv1/templates/terraform/examples/secure_source_manager_instance_private.tf.tmpl @@ -62,9 +62,7 @@ resource "google_secure_source_manager_instance" "{{$.PrimaryResourceId}}" { } # Prevent accidental deletions. - lifecycle { - prevent_destroy = "{{index $.Vars "prevent_destroy"}}" - } + deletion_policy = "{{index $.Vars "deletion_policy"}}" depends_on = [ google_privateca_certificate_authority.root_ca, diff --git a/mmv1/templates/terraform/examples/secure_source_manager_instance_private_psc_backend.tf.tmpl b/mmv1/templates/terraform/examples/secure_source_manager_instance_private_psc_backend.tf.tmpl index db941e7fe8ed..7270bacb81e6 100644 --- a/mmv1/templates/terraform/examples/secure_source_manager_instance_private_psc_backend.tf.tmpl +++ b/mmv1/templates/terraform/examples/secure_source_manager_instance_private_psc_backend.tf.tmpl @@ -65,9 +65,7 @@ resource "google_secure_source_manager_instance" "{{$.PrimaryResourceId}}" { } # Prevent accidental deletions. - lifecycle { - prevent_destroy = "{{index $.Vars "prevent_destroy"}}" - } + deletion_policy = "{{index $.Vars "deletion_policy"}}" depends_on = [ google_privateca_certificate_authority.root_ca, diff --git a/mmv1/templates/terraform/examples/secure_source_manager_instance_private_psc_endpoint.tf.tmpl b/mmv1/templates/terraform/examples/secure_source_manager_instance_private_psc_endpoint.tf.tmpl index 167222bde11d..83a6faf16b57 100644 --- a/mmv1/templates/terraform/examples/secure_source_manager_instance_private_psc_endpoint.tf.tmpl +++ b/mmv1/templates/terraform/examples/secure_source_manager_instance_private_psc_endpoint.tf.tmpl @@ -65,9 +65,7 @@ resource "google_secure_source_manager_instance" "{{$.PrimaryResourceId}}" { } # Prevent accidental deletions. - lifecycle { - prevent_destroy = "{{index $.Vars "prevent_destroy"}}" - } + deletion_policy = "{{index $.Vars "deletion_policy"}}" depends_on = [ google_privateca_certificate_authority.root_ca, diff --git a/mmv1/templates/terraform/examples/secure_source_manager_instance_workforce_identity_federation.tf.tmpl b/mmv1/templates/terraform/examples/secure_source_manager_instance_workforce_identity_federation.tf.tmpl index a1702f85d64a..3a27e8ea5212 100644 --- a/mmv1/templates/terraform/examples/secure_source_manager_instance_workforce_identity_federation.tf.tmpl +++ b/mmv1/templates/terraform/examples/secure_source_manager_instance_workforce_identity_federation.tf.tmpl @@ -7,7 +7,5 @@ resource "google_secure_source_manager_instance" "{{$.PrimaryResourceId}}" { } # Prevent accidental deletions. - lifecycle { - prevent_destroy = "{{index $.Vars "prevent_destroy"}}" - } + deletion_policy = "{{index $.Vars "deletion_policy"}}" } diff --git a/mmv1/templates/terraform/examples/secure_source_manager_repository_basic.tf.tmpl b/mmv1/templates/terraform/examples/secure_source_manager_repository_basic.tf.tmpl index ac62f0cffcd4..eb8f3a138416 100644 --- a/mmv1/templates/terraform/examples/secure_source_manager_repository_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/secure_source_manager_repository_basic.tf.tmpl @@ -3,9 +3,7 @@ resource "google_secure_source_manager_instance" "instance" { instance_id = "{{index $.Vars "instance_id"}}" # Prevent accidental deletions. - lifecycle { - prevent_destroy = "{{index $.Vars "prevent_destroy"}}" - } + deletion_policy = "{{index $.Vars "deletion_policy"}}" } resource "google_secure_source_manager_repository" "{{$.PrimaryResourceId}}" { diff --git a/mmv1/templates/terraform/examples/secure_source_manager_repository_initial_config.tf.tmpl b/mmv1/templates/terraform/examples/secure_source_manager_repository_initial_config.tf.tmpl index b3b54bfdd790..ea07d9afa4c6 100644 --- a/mmv1/templates/terraform/examples/secure_source_manager_repository_initial_config.tf.tmpl +++ b/mmv1/templates/terraform/examples/secure_source_manager_repository_initial_config.tf.tmpl @@ -2,10 +2,8 @@ resource "google_secure_source_manager_instance" "instance" { location = "us-central1" instance_id = "{{index $.Vars "instance_id"}}" - # For preventing accidental deletions - lifecycle { - prevent_destroy = "{{index $.Vars "prevent_destroy"}}" - } + # Prevent accidental deletions. + deletion_policy = "{{index $.Vars "deletion_policy"}}" } resource "google_secure_source_manager_repository" "{{$.PrimaryResourceId}}" { diff --git a/mmv1/templates/terraform/pre_delete/securesourcemanager_instance.go.tmpl b/mmv1/templates/terraform/pre_delete/securesourcemanager_instance.go.tmpl new file mode 100644 index 000000000000..6548d8c63d6b --- /dev/null +++ b/mmv1/templates/terraform/pre_delete/securesourcemanager_instance.go.tmpl @@ -0,0 +1,7 @@ +deletionPolicy := d.Get("deletion_policy"); + +if deletionPolicy == "ABANDON" { + return nil; +} else if deletionPolicy == "PREVENT" { + return fmt.Errorf(`cannot destroy instance without setting deletion_policy="DELETE"`) +} \ No newline at end of file From cc879c1842d200d43434d868829e8c37dc59b8ce Mon Sep 17 00:00:00 2001 From: Scott Suarez Date: Mon, 14 Jul 2025 09:49:07 -0700 Subject: [PATCH 537/884] Fix TRACE log level override in VCR tests by setting TF_LOG_CORE to match TeamCity (#14486) --- .ci/magician/cmd/vcr_cassette_update_test.go | 7 +++++-- .ci/magician/vcr/tester.go | 3 +++ 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/.ci/magician/cmd/vcr_cassette_update_test.go b/.ci/magician/cmd/vcr_cassette_update_test.go index 0d391e0f5d45..e06fa239ed95 100644 --- a/.ci/magician/cmd/vcr_cassette_update_test.go +++ b/.ci/magician/cmd/vcr_cassette_update_test.go @@ -325,6 +325,7 @@ func TestExecVCRCassetteUpdate(t *testing.T) { "SA_KEY": "sa_key", "TF_ACC": "1", "TF_LOG": "DEBUG", + "TF_LOG_CORE": "WARN", "TF_LOG_PATH_MASK": "/mock/dir/magic-modules/.ci/magician/testlogs/replaying/beta/%s.log", "TF_LOG_SDK_FRAMEWORK": "INFO", "TF_SCHEMA_PANIC_ON_ERROR": "1", @@ -339,8 +340,8 @@ func TestExecVCRCassetteUpdate(t *testing.T) { { name: "replay failed then record", cmdResults: map[string]string{ - "gopath/src/github.com/hashicorp/terraform-provider-google-beta go [test -parallel 32 -v -run=TestAcc -timeout 240m -ldflags=-X=github.com/hashicorp/terraform-provider-google-beta/version.ProviderVersion=acc -vet=off] map[ACCTEST_PARALLELISM:32 GOOGLE_APPLICATION_CREDENTIALS:/mock/dir/magic-modules/.ci/magician/sa_key.json GOOGLE_CREDENTIALS:sa_key GOOGLE_TEST_DIRECTORY: SA_KEY:sa_key TF_ACC:1 TF_LOG:DEBUG TF_LOG_PATH_MASK:/mock/dir/magic-modules/.ci/magician/testlogs/replaying/beta/%s.log TF_LOG_SDK_FRAMEWORK:INFO TF_SCHEMA_PANIC_ON_ERROR:1 VCR_MODE:REPLAYING VCR_PATH:/mock/dir/magic-modules/.ci/magician/cassettes/beta]": "--- FAIL: TestAccContainerNodePool_defaultDriverInstallation (590.29s)", - "gopath/src/github.com/hashicorp/terraform-provider-google-beta go [test -parallel 1 -v -run=TestAccContainerNodePool_defaultDriverInstallation$ -timeout 240m -ldflags=-X=github.com/hashicorp/terraform-provider-google-beta/version.ProviderVersion=acc -vet=off] map[ACCTEST_PARALLELISM:1 GOOGLE_APPLICATION_CREDENTIALS:/mock/dir/magic-modules/.ci/magician/sa_key.json GOOGLE_CREDENTIALS:sa_key GOOGLE_TEST_DIRECTORY: SA_KEY:sa_key TF_ACC:1 TF_LOG:DEBUG TF_LOG_PATH_MASK:/mock/dir/magic-modules/.ci/magician/testlogs/recording/beta/%s.log TF_LOG_SDK_FRAMEWORK:INFO TF_SCHEMA_PANIC_ON_ERROR:1 VCR_MODE:RECORDING VCR_PATH:/mock/dir/magic-modules/.ci/magician/cassettes/beta]": "--- PASS: TestAccContainerNodePool_defaultDriverInstallation (590.29s)", + "gopath/src/github.com/hashicorp/terraform-provider-google-beta go [test -parallel 32 -v -run=TestAcc -timeout 240m -ldflags=-X=github.com/hashicorp/terraform-provider-google-beta/version.ProviderVersion=acc -vet=off] map[ACCTEST_PARALLELISM:32 GOOGLE_APPLICATION_CREDENTIALS:/mock/dir/magic-modules/.ci/magician/sa_key.json GOOGLE_CREDENTIALS:sa_key GOOGLE_TEST_DIRECTORY: SA_KEY:sa_key TF_ACC:1 TF_LOG:DEBUG TF_LOG_CORE:WARN TF_LOG_PATH_MASK:/mock/dir/magic-modules/.ci/magician/testlogs/replaying/beta/%s.log TF_LOG_SDK_FRAMEWORK:INFO TF_SCHEMA_PANIC_ON_ERROR:1 VCR_MODE:REPLAYING VCR_PATH:/mock/dir/magic-modules/.ci/magician/cassettes/beta]": "--- FAIL: TestAccContainerNodePool_defaultDriverInstallation (590.29s)", + "gopath/src/github.com/hashicorp/terraform-provider-google-beta go [test -parallel 1 -v -run=TestAccContainerNodePool_defaultDriverInstallation$ -timeout 240m -ldflags=-X=github.com/hashicorp/terraform-provider-google-beta/version.ProviderVersion=acc -vet=off] map[ACCTEST_PARALLELISM:1 GOOGLE_APPLICATION_CREDENTIALS:/mock/dir/magic-modules/.ci/magician/sa_key.json GOOGLE_CREDENTIALS:sa_key GOOGLE_TEST_DIRECTORY: SA_KEY:sa_key TF_ACC:1 TF_LOG:DEBUG TF_LOG_CORE:WARN TF_LOG_PATH_MASK:/mock/dir/magic-modules/.ci/magician/testlogs/recording/beta/%s.log TF_LOG_SDK_FRAMEWORK:INFO TF_SCHEMA_PANIC_ON_ERROR:1 VCR_MODE:RECORDING VCR_PATH:/mock/dir/magic-modules/.ci/magician/cassettes/beta]": "--- PASS: TestAccContainerNodePool_defaultDriverInstallation (590.29s)", }, expectedCalls: map[string][]ParameterList{ "Run": { @@ -357,6 +358,7 @@ func TestExecVCRCassetteUpdate(t *testing.T) { "SA_KEY": "sa_key", "TF_ACC": "1", "TF_LOG": "DEBUG", + "TF_LOG_CORE": "WARN", "TF_LOG_PATH_MASK": "/mock/dir/magic-modules/.ci/magician/testlogs/replaying/beta/%s.log", "TF_LOG_SDK_FRAMEWORK": "INFO", "TF_SCHEMA_PANIC_ON_ERROR": "1", @@ -375,6 +377,7 @@ func TestExecVCRCassetteUpdate(t *testing.T) { "SA_KEY": "sa_key", "TF_ACC": "1", "TF_LOG": "DEBUG", + "TF_LOG_CORE": "WARN", "TF_LOG_PATH_MASK": "/mock/dir/magic-modules/.ci/magician/testlogs/recording/beta/%s.log", "TF_LOG_SDK_FRAMEWORK": "INFO", "TF_SCHEMA_PANIC_ON_ERROR": "1", diff --git a/.ci/magician/vcr/tester.go b/.ci/magician/vcr/tester.go index 54a6cf4a7839..6f01ee352357 100644 --- a/.ci/magician/vcr/tester.go +++ b/.ci/magician/vcr/tester.go @@ -105,6 +105,7 @@ var safeToLog = map[string]bool{ "SA_KEY": false, "TF_ACC": true, "TF_LOG": true, + "TF_LOG_CORE": true, "TF_LOG_PATH_MASK": true, "TF_LOG_SDK_FRAMEWORK": true, "TF_SCHEMA_PANIC_ON_ERROR": true, @@ -258,6 +259,7 @@ func (vt *Tester) Run(opt RunOptions) (Result, error) { "GOOGLE_CREDENTIALS": vt.env["SA_KEY"], "GOOGLE_TEST_DIRECTORY": strings.Join(opt.TestDirs, " "), "TF_LOG": "DEBUG", + "TF_LOG_CORE": "WARN", "TF_LOG_SDK_FRAMEWORK": "INFO", "TF_LOG_PATH_MASK": filepath.Join(logPath, "%s.log"), "TF_ACC": "1", @@ -405,6 +407,7 @@ func (vt *Tester) runInParallel(mode Mode, version provider.Version, testDir, te "GOOGLE_CREDENTIALS": vt.env["SA_KEY"], "GOOGLE_TEST_DIRECTORY": testDir, "TF_LOG": "DEBUG", + "TF_LOG_CORE": "WARN", "TF_LOG_SDK_FRAMEWORK": "INFO", "TF_LOG_PATH_MASK": filepath.Join(logPath, "%s.log"), "TF_ACC": "1", From 0f7294bb78dcea5e3f13b11915cfa5eb0fac267c Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Mon, 14 Jul 2025 14:56:21 -0700 Subject: [PATCH 538/884] tgc-revival: convert project during cai2hcl (#14503) --- mmv1/products/compute/BackendBucket.yaml | 4 +--- mmv1/products/compute/Firewall.yaml | 2 -- mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl | 4 +++- mmv1/templates/tgc_next/decoders/compute_subnetwork.go.tmpl | 6 +++--- .../pkg/tfplan2cai/ancestrymanager/ancestrymanager.go | 4 +++- 5 files changed, 10 insertions(+), 10 deletions(-) diff --git a/mmv1/products/compute/BackendBucket.yaml b/mmv1/products/compute/BackendBucket.yaml index 4f40bb4dff4e..a92ae63a5127 100644 --- a/mmv1/products/compute/BackendBucket.yaml +++ b/mmv1/products/compute/BackendBucket.yaml @@ -115,9 +115,6 @@ examples: backend_bucket_name: 'global-ilb-backend-bucket' bucket_name: 'global-ilb-bucket' exclude_docs: true - tgc_skip_test: 'Skip the test temporarily, as it takes time to fix it.' - tgc_test_ignore_extra: - - 'project' parameters: properties: - name: 'bucketName' @@ -277,6 +274,7 @@ properties: - name: 'enableCdn' type: Boolean description: 'If true, enable Cloud CDN for this BackendBucket.' + include_empty_value_in_cai: true - name: 'name' type: String description: | diff --git a/mmv1/products/compute/Firewall.yaml b/mmv1/products/compute/Firewall.yaml index 8ea2600cd31d..32c7b1ff2b0b 100644 --- a/mmv1/products/compute/Firewall.yaml +++ b/mmv1/products/compute/Firewall.yaml @@ -71,8 +71,6 @@ examples: firewall_name: 'my-firewall-rule' test_env_vars: project: 'PROJECT_NAME' - tgc_test_ignore_extra: - - 'project' parameters: properties: # TODO(nelsonjr): [nice to have] Make the format here simpler to use, in diff --git a/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl b/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl index ec805842aacc..b41ce4c49cab 100644 --- a/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl +++ b/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl @@ -103,7 +103,9 @@ func (c *{{ $.ResourceName -}}Cai2hclConverter) convertResourceData(asset caiass return nil, nil } {{ end}} - +{{- if $.HasProject -}} + hclData["project"] = utils.ParseFieldValue(asset.Name, "projects") +{{- end}} {{ range $prop := $.ReadPropertiesForTgc }} {{ if $prop.FlattenObject -}} // Terraform must set the top level schema field, but since this object contains collapsed properties diff --git a/mmv1/templates/tgc_next/decoders/compute_subnetwork.go.tmpl b/mmv1/templates/tgc_next/decoders/compute_subnetwork.go.tmpl index 906a333a5ab2..748efa32f2f4 100644 --- a/mmv1/templates/tgc_next/decoders/compute_subnetwork.go.tmpl +++ b/mmv1/templates/tgc_next/decoders/compute_subnetwork.go.tmpl @@ -15,9 +15,9 @@ // So set the value to empty string in this case. if raw, ok := res["stackType"]; ok { v := raw.(string) - if v == "UNSPECIFIED_STACK_TYPE" { - res["stackType"] = "" - } + if v == "UNSPECIFIED_STACK_TYPE" { + res["stackType"] = "" + } } return res, nil \ No newline at end of file diff --git a/mmv1/third_party/tgc_next/pkg/tfplan2cai/ancestrymanager/ancestrymanager.go b/mmv1/third_party/tgc_next/pkg/tfplan2cai/ancestrymanager/ancestrymanager.go index 3d21fabf4352..f4e14a1f6e07 100644 --- a/mmv1/third_party/tgc_next/pkg/tfplan2cai/ancestrymanager/ancestrymanager.go +++ b/mmv1/third_party/tgc_next/pkg/tfplan2cai/ancestrymanager/ancestrymanager.go @@ -387,7 +387,9 @@ func (m *manager) SetAncestors(d tpgresource.TerraformResourceData, config *tran return fmt.Errorf("getting resource ancestry or parent failed: %w", err) } - cai.Resource.Parent = parent + if cai.Resource != nil { + cai.Resource.Parent = parent + } cai.Ancestors = ancestors return nil } From 252760c33d8e0eb58ddd8a3e06ab3dc7beae0d84 Mon Sep 17 00:00:00 2001 From: jakebennert Date: Mon, 14 Jul 2025 17:59:44 -0400 Subject: [PATCH 539/884] Add Mirror Percent to Request Mirror Policy in Regional Url Map (#14493) --- mmv1/products/compute/RegionUrlMap.yaml | 64 +++++++++++++++++++ ...ion_url_map_default_mirror_percent.tf.tmpl | 59 +++++++++++++++++ ...ath_matcher_default_mirror_percent.tf.tmpl | 59 +++++++++++++++++ ...n_url_map_path_rule_mirror_percent.tf.tmpl | 59 +++++++++++++++++ ..._url_map_route_rule_mirror_percent.tf.tmpl | 63 ++++++++++++++++++ 5 files changed, 304 insertions(+) create mode 100644 mmv1/templates/terraform/examples/region_url_map_default_mirror_percent.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/region_url_map_path_matcher_default_mirror_percent.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/region_url_map_path_rule_mirror_percent.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/region_url_map_route_rule_mirror_percent.tf.tmpl diff --git a/mmv1/products/compute/RegionUrlMap.yaml b/mmv1/products/compute/RegionUrlMap.yaml index 1ac370f5a6df..4fe1431636ef 100644 --- a/mmv1/products/compute/RegionUrlMap.yaml +++ b/mmv1/products/compute/RegionUrlMap.yaml @@ -138,6 +138,38 @@ examples: login_region_backend_service_name: 'login' home_region_backend_service_name: 'home' region_health_check_name: 'health-check' + - name: 'region_url_map_default_mirror_percent' + primary_resource_id: 'regionurlmap' + min_version: 'beta' + vars: + region_url_map_name: 'regionurlmap' + home_backend_service_name: 'home' + mirror_backend_service_name: 'mirror' + region_health_check_name: 'health-check' + - name: 'region_url_map_path_matcher_default_mirror_percent' + primary_resource_id: 'regionurlmap' + min_version: 'beta' + vars: + region_url_map_name: 'regionurlmap' + home_backend_service_name: 'home' + mirror_backend_service_name: 'mirror' + region_health_check_name: 'health-check' + - name: 'region_url_map_path_rule_mirror_percent' + primary_resource_id: 'regionurlmap' + min_version: 'beta' + vars: + region_url_map_name: 'regionurlmap' + home_backend_service_name: 'home' + mirror_backend_service_name: 'mirror' + region_health_check_name: 'health-check' + - name: 'region_url_map_route_rule_mirror_percent' + primary_resource_id: 'regionurlmap' + min_version: 'beta' + vars: + region_url_map_name: 'regionurlmap' + home_backend_service_name: 'home' + mirror_backend_service_name: 'mirror' + region_health_check_name: 'health-check' parameters: - name: 'region' type: ResourceRef @@ -733,6 +765,14 @@ properties: custom_expand: 'templates/terraform/custom_expand/resourceref_with_validation.go.tmpl' resource: 'RegionBackendService' imports: 'selfLink' + - name: 'mirrorPercent' + min_version: beta + type: Double + description: | + The percentage of requests to be mirrored to backendService. + The value must be between 0.0 and 100.0 inclusive. + validation: + function: 'validation.FloatBetween(0, 100)' - name: 'retryPolicy' type: NestedObject description: | @@ -1195,6 +1235,14 @@ properties: custom_expand: 'templates/terraform/custom_expand/resourceref_with_validation.go.tmpl' resource: 'RegionBackendService' imports: 'selfLink' + - name: 'mirrorPercent' + min_version: beta + type: Double + description: | + The percentage of requests to be mirrored to backendService. + The value must be between 0.0 and 100.0 inclusive. + validation: + function: 'validation.FloatBetween(0, 100)' - name: 'retryPolicy' type: NestedObject description: | @@ -1802,6 +1850,14 @@ properties: custom_expand: 'templates/terraform/custom_expand/reference_to_backend.tmpl' resource: 'BackendService' imports: 'selfLink' + - name: 'mirrorPercent' + min_version: beta + type: Double + description: | + The percentage of requests to be mirrored to backendService. + The value must be between 0.0 and 100.0 inclusive. + validation: + function: 'validation.FloatBetween(0, 100)' - name: 'corsPolicy' type: NestedObject description: | @@ -2283,6 +2339,14 @@ properties: custom_expand: 'templates/terraform/custom_expand/resourceref_with_validation.go.tmpl' resource: 'RegionBackendService' imports: 'selfLink' + - name: 'mirrorPercent' + min_version: beta + type: Double + description: | + The percentage of requests to be mirrored to backendService. + The value must be between 0.0 and 100.0 inclusive. + validation: + function: 'validation.FloatBetween(0, 100)' - name: 'corsPolicy' type: NestedObject description: | diff --git a/mmv1/templates/terraform/examples/region_url_map_default_mirror_percent.tf.tmpl b/mmv1/templates/terraform/examples/region_url_map_default_mirror_percent.tf.tmpl new file mode 100644 index 000000000000..05d5579a9e33 --- /dev/null +++ b/mmv1/templates/terraform/examples/region_url_map_default_mirror_percent.tf.tmpl @@ -0,0 +1,59 @@ +resource "google_compute_region_url_map" "{{$.PrimaryResourceId}}" { + provider = google-beta + region = "us-central1" + name = "{{index $.Vars "region_url_map_name"}}" + description = "Test for default route action mirror percent" + + default_service = google_compute_region_backend_service.home.id + + default_route_action { + request_mirror_policy { + backend_service = google_compute_region_backend_service.mirror.id + mirror_percent = 50.0 + } + } + + host_rule { + hosts = ["mysite.com"] + path_matcher = "allpaths" + } + + path_matcher { + name = "allpaths" + default_service = google_compute_region_backend_service.home.id + } +} + +resource "google_compute_region_backend_service" "home" { + provider = google-beta + region = "us-central1" + name = "{{index $.Vars "home_backend_service_name"}}" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + load_balancing_scheme = "INTERNAL_MANAGED" + + health_checks = [google_compute_region_health_check.default.id] +} + +resource "google_compute_region_backend_service" "mirror" { + provider = google-beta + region = "us-central1" + name = "{{index $.Vars "mirror_backend_service_name"}}" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + load_balancing_scheme = "INTERNAL_MANAGED" + + health_checks = [google_compute_region_health_check.default.id] +} + +resource "google_compute_region_health_check" "default" { + provider = google-beta + region = "us-central1" + name = "{{index $.Vars "region_health_check_name"}}" + http_health_check { + port = 80 + } +} + diff --git a/mmv1/templates/terraform/examples/region_url_map_path_matcher_default_mirror_percent.tf.tmpl b/mmv1/templates/terraform/examples/region_url_map_path_matcher_default_mirror_percent.tf.tmpl new file mode 100644 index 000000000000..05d5579a9e33 --- /dev/null +++ b/mmv1/templates/terraform/examples/region_url_map_path_matcher_default_mirror_percent.tf.tmpl @@ -0,0 +1,59 @@ +resource "google_compute_region_url_map" "{{$.PrimaryResourceId}}" { + provider = google-beta + region = "us-central1" + name = "{{index $.Vars "region_url_map_name"}}" + description = "Test for default route action mirror percent" + + default_service = google_compute_region_backend_service.home.id + + default_route_action { + request_mirror_policy { + backend_service = google_compute_region_backend_service.mirror.id + mirror_percent = 50.0 + } + } + + host_rule { + hosts = ["mysite.com"] + path_matcher = "allpaths" + } + + path_matcher { + name = "allpaths" + default_service = google_compute_region_backend_service.home.id + } +} + +resource "google_compute_region_backend_service" "home" { + provider = google-beta + region = "us-central1" + name = "{{index $.Vars "home_backend_service_name"}}" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + load_balancing_scheme = "INTERNAL_MANAGED" + + health_checks = [google_compute_region_health_check.default.id] +} + +resource "google_compute_region_backend_service" "mirror" { + provider = google-beta + region = "us-central1" + name = "{{index $.Vars "mirror_backend_service_name"}}" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + load_balancing_scheme = "INTERNAL_MANAGED" + + health_checks = [google_compute_region_health_check.default.id] +} + +resource "google_compute_region_health_check" "default" { + provider = google-beta + region = "us-central1" + name = "{{index $.Vars "region_health_check_name"}}" + http_health_check { + port = 80 + } +} + diff --git a/mmv1/templates/terraform/examples/region_url_map_path_rule_mirror_percent.tf.tmpl b/mmv1/templates/terraform/examples/region_url_map_path_rule_mirror_percent.tf.tmpl new file mode 100644 index 000000000000..d04ef4fbbc07 --- /dev/null +++ b/mmv1/templates/terraform/examples/region_url_map_path_rule_mirror_percent.tf.tmpl @@ -0,0 +1,59 @@ +resource "google_compute_region_url_map" "{{$.PrimaryResourceId}}" { + provider = google-beta + region = "us-central1" + name = "{{index $.Vars "region_url_map_name"}}" + description = "Test for path matcher default route action mirror percent" + + default_service = google_compute_region_backend_service.home.id + + host_rule { + hosts = ["mysite.com"] + path_matcher = "allpaths" + } + + path_matcher { + name = "allpaths" + default_service = google_compute_region_backend_service.home.id + + default_route_action { + request_mirror_policy { + backend_service = google_compute_region_backend_service.mirror.id + mirror_percent = 75.0 + } + } + } +} + +resource "google_compute_region_backend_service" "home" { + provider = google-beta + region = "us-central1" + name = "{{index $.Vars "home_backend_service_name"}}" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + load_balancing_scheme = "INTERNAL_MANAGED" + + health_checks = [google_compute_region_health_check.default.id] +} + +resource "google_compute_region_backend_service" "mirror" { + provider = google-beta + region = "us-central1" + name = "{{index $.Vars "mirror_backend_service_name"}}" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + load_balancing_scheme = "INTERNAL_MANAGED" + + health_checks = [google_compute_region_health_check.default.id] +} + +resource "google_compute_region_health_check" "default" { + provider = google-beta + region = "us-central1" + name = "{{index $.Vars "region_health_check_name"}}" + http_health_check { + port = 80 + } +} + diff --git a/mmv1/templates/terraform/examples/region_url_map_route_rule_mirror_percent.tf.tmpl b/mmv1/templates/terraform/examples/region_url_map_route_rule_mirror_percent.tf.tmpl new file mode 100644 index 000000000000..9ecd7b9cf90a --- /dev/null +++ b/mmv1/templates/terraform/examples/region_url_map_route_rule_mirror_percent.tf.tmpl @@ -0,0 +1,63 @@ +resource "google_compute_region_url_map" "{{$.PrimaryResourceId}}" { + provider = google-beta + region = "us-central1" + name = "{{index $.Vars "region_url_map_name"}}" + description = "Test for path rule route action mirror percent" + + default_service = google_compute_region_backend_service.home.id + + host_rule { + hosts = ["mysite.com"] + path_matcher = "allpaths" + } + + path_matcher { + name = "allpaths" + default_service = google_compute_region_backend_service.home.id + + path_rule { + paths = ["/home"] + service = google_compute_region_backend_service.home.id + route_action { + request_mirror_policy { + backend_service = google_compute_region_backend_service.mirror.id + mirror_percent = 25.0 + } + } + } + } +} + +resource "google_compute_region_backend_service" "home" { + provider = google-beta + region = "us-central1" + name = "{{index $.Vars "home_backend_service_name"}}" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + load_balancing_scheme = "INTERNAL_MANAGED" + + health_checks = [google_compute_region_health_check.default.id] +} + +resource "google_compute_region_backend_service" "mirror" { + provider = google-beta + region = "us-central1" + name = "{{index $.Vars "mirror_backend_service_name"}}" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + load_balancing_scheme = "INTERNAL_MANAGED" + + health_checks = [google_compute_region_health_check.default.id] +} + +resource "google_compute_region_health_check" "default" { + provider = google-beta + region = "us-central1" + name = "{{index $.Vars "region_health_check_name"}}" + http_health_check { + port = 80 + } +} + From 06813816e6aab79bf53ea0d04dc2b0c54efa9204 Mon Sep 17 00:00:00 2001 From: Anjali Date: Tue, 15 Jul 2025 09:06:45 -0700 Subject: [PATCH 540/884] Update dataproc cluster_config.security_config to include support for identity_config (#14433) --- .../dataproc/resource_dataproc_cluster.go | 64 +++++++++- .../resource_dataproc_cluster_meta.yaml | 1 + .../resource_dataproc_cluster_test.go | 109 ++++++++++++++++++ .../docs/r/dataproc_cluster.html.markdown | 14 ++- 4 files changed, 184 insertions(+), 4 deletions(-) diff --git a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster.go b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster.go index a7e2b57b25f1..eab121b29d9b 100644 --- a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster.go +++ b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster.go @@ -665,6 +665,7 @@ func ResourceDataprocCluster() *schema.Resource { "metadata": { Type: schema.TypeMap, Optional: true, + Computed: true, AtLeastOneOf: gceClusterConfigKeys, Elem: &schema.Schema{Type: schema.TypeString}, ForceNew: true, @@ -1300,9 +1301,13 @@ func ResourceDataprocCluster() *schema.Resource { Schema: map[string]*schema.Schema{ "kerberos_config": { Type: schema.TypeList, - Required: true, Description: "Kerberos related configuration", - MaxItems: 1, + Optional: true, + ExactlyOneOf: []string{ + "cluster_config.0.security_config.0.kerberos_config", + "cluster_config.0.security_config.0.identity_config", + }, + MaxItems: 1, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "cross_realm_trust_admin_server": { @@ -1386,6 +1391,26 @@ by Dataproc`, }, }, }, + "identity_config": { + Type: schema.TypeList, + Description: "Identity related configuration", + Optional: true, + ExactlyOneOf: []string{ + "cluster_config.0.security_config.0.kerberos_config", + "cluster_config.0.security_config.0.identity_config", + }, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "user_service_account_mapping": { + Type: schema.TypeMap, + Required: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: `User to service account mappings for multi-tenancy.`, + }, + }, + }, + }, }, }, }, @@ -2286,7 +2311,28 @@ func expandGceClusterConfig(d *schema.ResourceData, config *transport_tpg.Config func expandSecurityConfig(cfg map[string]interface{}) *dataproc.SecurityConfig { conf := &dataproc.SecurityConfig{} if kfg, ok := cfg["kerberos_config"]; ok { - conf.KerberosConfig = expandKerberosConfig(kfg.([]interface{})[0].(map[string]interface{})) + k := kfg.([]interface{}) + if len(k) > 0 { + conf.KerberosConfig = expandKerberosConfig(k[0].(map[string]interface{})) + } + } + if ifg, ok := cfg["identity_config"]; ok { + i := ifg.([]interface{}) + if len(i) > 0 { + conf.IdentityConfig = expandIdentityConfig(i[0].(map[string]interface{})) + } + } + return conf +} + +func expandIdentityConfig(cfg map[string]interface{}) *dataproc.IdentityConfig { + conf := &dataproc.IdentityConfig{} + if v, ok := cfg["user_service_account_mapping"]; ok { + m := make(map[string]string) + for k, val := range v.(map[string]interface{}) { + m[k] = val.(string) + } + conf.UserServiceAccountMapping = m } return conf } @@ -2966,6 +3012,7 @@ func flattenSecurityConfig(d *schema.ResourceData, sc *dataproc.SecurityConfig) } data := map[string]interface{}{ "kerberos_config": flattenKerberosConfig(d, sc.KerberosConfig), + "identity_config": flattenIdentityConfig(d, sc.IdentityConfig), } return []map[string]interface{}{data} @@ -2996,6 +3043,17 @@ func flattenKerberosConfig(d *schema.ResourceData, kfg *dataproc.KerberosConfig) return []map[string]interface{}{data} } +func flattenIdentityConfig(d *schema.ResourceData, ifg *dataproc.IdentityConfig) []map[string]interface{} { + if ifg == nil { + return nil + } + data := map[string]interface{}{ + "user_service_account_mapping": d.Get("cluster_config.0.security_config.0.identity_config.0.user_service_account_mapping").(map[string]interface{}), + } + + return []map[string]interface{}{data} +} + func flattenSoftwareConfig(d *schema.ResourceData, sc *dataproc.SoftwareConfig) []map[string]interface{} { data := map[string]interface{}{ "image_version": sc.ImageVersion, diff --git a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_meta.yaml b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_meta.yaml index 464f5759cd76..fc4ee60e90b4 100644 --- a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_meta.yaml +++ b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_meta.yaml @@ -85,6 +85,7 @@ fields: - field: 'cluster_config.security_config.kerberos_config.tgt_lifetime_hours' - field: 'cluster_config.security_config.kerberos_config.truststore_password_uri' - field: 'cluster_config.security_config.kerberos_config.truststore_uri' + - field: 'cluster_config.security_config.identity_config.user_service_account_mapping' - field: 'cluster_config.software_config.image_version' - field: 'cluster_config.software_config.optional_components' - field: 'cluster_config.software_config.override_properties' diff --git a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_test.go b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_test.go index a97933fcb679..bcc040593848 100644 --- a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_test.go +++ b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_test.go @@ -1100,6 +1100,64 @@ func TestAccDataprocCluster_withKerberos(t *testing.T) { }) } +func TestAccDataprocCluster_withIdentityConfig(t *testing.T) { + t.Parallel() + + rnd := acctest.RandString(t, 10) + networkName := acctest.BootstrapSharedTestNetwork(t, "dataproc-cluster") + subnetworkName := acctest.BootstrapSubnet(t, "dataproc-cluster", networkName) + acctest.BootstrapFirewallForDataprocSharedNetwork(t, "dataproc-cluster", networkName) + + var cluster dataproc.Cluster + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataprocClusterDestroy(t), + Steps: []resource.TestStep{ + { + Config: testAccDataprocCluster_withIdentityConfig(rnd, subnetworkName), + Check: resource.ComposeTestCheckFunc( + testAccCheckDataprocClusterExists(t, "google_dataproc_cluster.identity_config", &cluster), + ), + }, + }, + }) +} + +// Test updating identity_config.user_service_account_mapping field +func TestAccDataprocCluster_updateIdentityConfigUserMapping(t *testing.T) { + t.Parallel() + + rnd := acctest.RandString(t, 10) + networkName := acctest.BootstrapSharedTestNetwork(t, "dataproc-cluster") + subnetworkName := acctest.BootstrapSubnet(t, "dataproc-cluster", networkName) + acctest.BootstrapFirewallForDataprocSharedNetwork(t, "dataproc-cluster", networkName) + + var cluster dataproc.Cluster + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataprocClusterDestroy(t), + Steps: []resource.TestStep{ + { + Config: testAccDataprocCluster_updateIdentityConfig(rnd, subnetworkName, "bob@company.com", "bob-sa@iam.gserviceaccount.com"), + Check: resource.ComposeTestCheckFunc( + testAccCheckDataprocClusterExists(t, "google_dataproc_cluster.identity_config_user_mapping", &cluster), + resource.TestCheckResourceAttr("google_dataproc_cluster.identity_config_user_mapping", "cluster_config.0.security_config.0.identity_config.0.user_service_account_mapping.bob@company.com", "bob-sa@iam.gserviceaccount.com"), + ), + }, + { + Config: testAccDataprocCluster_updateIdentityConfig(rnd, subnetworkName, "alice@company.com", "alice-sa@iam.gserviceaccount.com"), + Check: resource.ComposeTestCheckFunc( + testAccCheckDataprocClusterExists(t, "google_dataproc_cluster.identity_config_user_mapping", &cluster), + resource.TestCheckResourceAttr("google_dataproc_cluster.identity_config_user_mapping", "cluster_config.0.security_config.0.identity_config.0.user_service_account_mapping.alice@company.com", "alice-sa@iam.gserviceaccount.com"), + ), + }, + }, + }) +} + func TestAccDataprocCluster_withAutoscalingPolicy(t *testing.T) { t.Parallel() @@ -2641,6 +2699,57 @@ resource "google_dataproc_cluster" "kerb" { `, rnd, rnd, rnd, subnetworkName, kmsKey) } +func testAccDataprocCluster_withIdentityConfig(rnd, subnetworkName string) string { + return fmt.Sprintf(` +resource "google_dataproc_cluster" "identity_config" { + name = "tf-test-dataproc-identity-%s" + region = "us-central1" + cluster_config { + gce_cluster_config { + subnetwork = "%s" + } + security_config { + identity_config { + user_service_account_mapping = { + "bob@company.com" = "bob-sa@iam.gserviceaccouts.com" + } + } + } + } +} +`, rnd, subnetworkName) +} + +func testAccDataprocCluster_updateIdentityConfig(rnd, subnetworkName, user, sa string) string { + return fmt.Sprintf(` +resource "google_dataproc_cluster" "identity_config_user_mapping" { + name = "tf-test-dataproc-update-identity-%s" + region = "us-central1" + + cluster_config { + gce_cluster_config { + subnetwork = "%s" + } + security_config { + identity_config { + user_service_account_mapping = { + "%s" = "%s" + } + } + } + master_config { + num_instances = 1 + machine_type = "n1-standard-2" + } + worker_config { + num_instances = 2 + machine_type = "n1-standard-2" + } + } +} +`, rnd, subnetworkName, user, sa) +} + func testAccDataprocCluster_withAutoscalingPolicy(rnd, subnetworkName string) string { return fmt.Sprintf(` resource "google_dataproc_cluster" "basic" { diff --git a/mmv1/third_party/terraform/website/docs/r/dataproc_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/r/dataproc_cluster.html.markdown index e4e4fe15eefd..423b437406b0 100644 --- a/mmv1/third_party/terraform/website/docs/r/dataproc_cluster.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/dataproc_cluster.html.markdown @@ -716,11 +716,17 @@ cluster_config { kms_key_uri = "projects/projectId/locations/locationId/keyRings/keyRingId/cryptoKeys/keyId" root_principal_password_uri = "bucketId/o/objectId" } + identity_config { + user_service_account_mapping = { + "user@company.com" = "service-account@iam.gserviceaccounts.com" + } + } } } ``` -* `kerberos_config` (Required) Kerberos Configuration +* `kerberos_config` (Optional) Kerberos Configuration. At least one of `identity_config` + or `kerberos_config` is required. * `cross_realm_trust_admin_server` - (Optional) The admin server (IP or hostname) for the remote trusted realm in a cross realm trust relationship. @@ -768,6 +774,12 @@ cluster_config { * `truststore_uri` - (Optional) The Cloud Storage URI of the truststore file used for SSL encryption. If not provided, Dataproc will provide a self-signed certificate. +* `identity_config` (Optional) Identity Configuration. At least one of `identity_config` + or `kerberos_config` is required. + + * `user_service_account_mapping` - (Required) The end user to service account mappings + in a service account based multi-tenant cluster + - - - The `cluster_config.autoscaling_config` block supports: From 17dcdc6bdfbcc5ad3bdbb23b3b546f572aec4b07 Mon Sep 17 00:00:00 2001 From: sahil-mahajan-google Date: Tue, 15 Jul 2025 21:46:41 +0530 Subject: [PATCH 541/884] Modify allow autotiering field in netapp storage pools to mutable (#14517) --- mmv1/products/netapp/StoragePool.yaml | 1 - .../resource_netapp_storage_pool_test.go.tmpl | 34 +++++++++++++++++++ 2 files changed, 34 insertions(+), 1 deletion(-) diff --git a/mmv1/products/netapp/StoragePool.yaml b/mmv1/products/netapp/StoragePool.yaml index 3eaa6a0f9722..cc2ae1b0352e 100644 --- a/mmv1/products/netapp/StoragePool.yaml +++ b/mmv1/products/netapp/StoragePool.yaml @@ -180,7 +180,6 @@ properties: description: | Optional. True if the storage pool supports Auto Tiering enabled volumes. Default is false. Auto-tiering can be enabled after storage pool creation but it can't be disabled once enabled. - immutable: true - name: 'customPerformanceEnabled' type: Boolean description: | diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl b/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl index 8863adf9d6a5..7f283395f5b4 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl @@ -118,6 +118,15 @@ func TestAccNetappStoragePool_autoTieredStoragePoolCreateExample_update(t *testi ImportStateVerify: true, ImportStateVerifyIgnore: []string{"location", "name", "labels", "terraform_labels"}, }, + { + Config: testAccNetappStoragePool_autoTieredStoragePoolCreateExample_update(context), + }, + { + ResourceName: "google_netapp_storage_pool.test_pool", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "name", "labels", "terraform_labels"}, + }, }, }) } @@ -128,6 +137,31 @@ data "google_compute_network" "default" { name = "%{network_name}" } +resource "google_netapp_storage_pool" "test_pool" { + name = "tf-test-pool%{random_suffix}" + location = "us-east4" + service_level = "PREMIUM" + capacity_gib = "2048" + network = data.google_compute_network.default.id + active_directory = "" + description = "this is a test description" + kms_config = "" + labels = { + key= "test" + value= "pool" + } + ldap_enabled = false + allow_auto_tiering = false +} +`, context) +} + +func testAccNetappStoragePool_autoTieredStoragePoolCreateExample_update(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_compute_network" "default" { + name = "%{network_name}" +} + resource "google_netapp_storage_pool" "test_pool" { name = "tf-test-pool%{random_suffix}" location = "us-east4" From 912e07a4e4f881805a69003f5ab4ed2b34564f25 Mon Sep 17 00:00:00 2001 From: kgala2 Date: Tue, 15 Jul 2025 18:02:07 +0000 Subject: [PATCH 542/884] feat: support PSC outbound's network attachment field (#14462) Co-authored-by: Riley Karson --- .../resource_sql_database_instance.go.tmpl | 7 + ...esource_sql_database_instance_test.go.tmpl | 175 ++++++++++++++++++ .../r/sql_database_instance.html.markdown | 27 +++ 3 files changed, 209 insertions(+) diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl index 541219048dc8..e9fe3fb697ef 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl @@ -532,6 +532,11 @@ is set to true. Defaults to ZONAL.`, Set: schema.HashString, Description: `List of consumer projects that are allow-listed for PSC connections to this instance. This instance can be connected to with PSC from any network in these projects. Each consumer project in this list may be represented by a project number (numeric) or by a project id (alphanumeric).`, }, + "network_attachment_uri": { + Type: schema.TypeString, + Optional: true, + Description: `Name of network attachment resource used to authorize a producer service to connect a PSC interface to the consumer's VPC. For example: "projects/myProject/regions/myRegion/networkAttachments/myNetworkAttachment". This is required to enable outbound connection on a PSC instance.`, + }, "psc_auto_connections": { Type: schema.TypeList, Optional: true, @@ -1596,6 +1601,7 @@ func expandPscConfig(configured []interface{}) *sqladmin.PscConfig { return &sqladmin.PscConfig{ PscEnabled: _entry["psc_enabled"].(bool), AllowedConsumerProjects: tpgresource.ConvertStringArr(_entry["allowed_consumer_projects"].(*schema.Set).List()), + NetworkAttachmentUri: _entry["network_attachment_uri"].(string), PscAutoConnections: expandPscAutoConnectionConfig(_entry["psc_auto_connections"].([]interface{})), } } @@ -2641,6 +2647,7 @@ func flattenPscConfigs(pscConfig *sqladmin.PscConfig) interface{} { data := map[string]interface{}{ "psc_enabled": pscConfig.PscEnabled, "allowed_consumer_projects": schema.NewSet(schema.HashString, tpgresource.ConvertStringArrToInterface(pscConfig.AllowedConsumerProjects)), + "network_attachment_uri": pscConfig.NetworkAttachmentUri, "psc_auto_connections": flattenPscAutoConnections(pscConfig.PscAutoConnections), } diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl index e7656525a336..aa65fa71f080 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl @@ -1183,6 +1183,86 @@ func TestAccSqlDatabaseInstance_withPSCEnabled_withIpV4Enabled(t *testing.T) { }) } +func TestAccSqlDatabaseInstance_withPscEnabled_withNetworkAttachmentUri_thenRemoveNetworkAttachment(t *testing.T) { + t.Parallel() + + random_suffix := acctest.RandString(t, 10) + instanceName := "tf-test-" + random_suffix + projectId := envvar.GetTestProjectFromEnv() + region := "us-central1" + networkNameStr := "tf-test-cloud-sql-network-" + random_suffix + subnetworkNameStr := "tf-test-cloud-sql-subnetwork-" + random_suffix + networkAttachmentNameStr := "tf-test-cloud-sql-update-na-" + random_suffix + networkName := acctest.BootstrapSharedTestNetwork(t, networkNameStr) + subnetworkName := acctest.BootstrapSubnet(t, subnetworkNameStr, networkName) + networkAttachmentName := acctest.BootstrapNetworkAttachment(t, networkAttachmentNameStr, subnetworkName) + networkAttachmentUri := fmt.Sprintf("projects/%s/regions/%s/networkAttachments/%s", projectId, region, networkAttachmentName) + + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccSqlDatabaseInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccSqlDatabaseInstance_withPSCEnabled_withoutPscOutbound(instanceName), + Check: resource.ComposeTestCheckFunc(verifyPscNetorkAttachmentOperation("google_sql_database_instance.instance", true, true, "")), + }, + { + ResourceName: "google_sql_database_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateIdPrefix: fmt.Sprintf("%s/", projectId), + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + { + Config: testAccSqlDatabaseInstance_withPSCEnabled_withNetworkAttachmentUri(instanceName, networkAttachmentUri), + Check: resource.ComposeTestCheckFunc(verifyPscNetorkAttachmentOperation("google_sql_database_instance.instance", true, true, networkAttachmentUri)), + }, + { + ResourceName: "google_sql_database_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateIdPrefix: fmt.Sprintf("%s/", projectId), + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + { + Config: testAccSqlDatabaseInstance_withPSCEnabled_withoutPscOutbound(instanceName), + Check: resource.ComposeTestCheckFunc(verifyPscNetorkAttachmentOperation("google_sql_database_instance.instance", true, true, "")), + }, + }, + }) +} + +func TestAccSqlDatabaseInstance_withPscEnabled_withNetworkAttachmentUriOnCreate(t *testing.T) { + t.Parallel() + + random_suffix := acctest.RandString(t, 10) + instanceName := "tf-test-" + random_suffix + projectId := envvar.GetTestProjectFromEnv() + region := "us-central1" + networkNameStr := "tf-test-cloud-sql-network-" + random_suffix + subnetworkNameStr := "tf-test-cloud-sql-subnetwork-" + random_suffix + networkAttachmentNameStr := "tf-test-cloud-sql-update-na-" + random_suffix + networkName := acctest.BootstrapSharedTestNetwork(t, networkNameStr) + subnetworkName := acctest.BootstrapSubnet(t, subnetworkNameStr, networkName) + networkAttachmentName := acctest.BootstrapNetworkAttachment(t, networkAttachmentNameStr, subnetworkName) + networkAttachmentUri := fmt.Sprintf("projects/%s/regions/%s/networkAttachments/%s", projectId, region, networkAttachmentName) + + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccSqlDatabaseInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccSqlDatabaseInstance_withPSCEnabled_withNetworkAttachmentUri(instanceName, networkAttachmentUri), + ExpectError: regexp.MustCompile(`.*Network attachment used for Private Service Connect interfaces can not be assigned with instance creation.*`), + }, + }, + }) +} + func TestAccSqlDatabaseInstance_withPrivateNetwork_withAllocatedIpRange(t *testing.T) { t.Parallel() @@ -4843,6 +4923,49 @@ func verifyPscAutoConnectionsOperation(resourceName string, isPscConfigExpected } } +func verifyPscNetorkAttachmentOperation(resourceName string, isPscConfigExpected bool, expectedPscEnabled bool, expectedNetworkAttachmentUri string ) func(*terraform.State) error { + return func(s *terraform.State) error { + resource, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Can't find %s in state", resourceName) + } + + resourceAttributes := resource.Primary.Attributes + _, ok = resourceAttributes["settings.0.ip_configuration.#"] + if !ok { + return fmt.Errorf("settings.0.ip_configuration.# block is not present in state for %s", resourceName) + } + + if isPscConfigExpected { + _, ok := resourceAttributes["settings.0.ip_configuration.0.psc_config.#"] + if !ok { + return fmt.Errorf("settings.0.ip_configuration.0.psc_config property is not present or set in state of %s", resourceName) + } + + pscEnabledStr, ok := resourceAttributes["settings.0.ip_configuration.0.psc_config.0.psc_enabled"] + pscEnabled, err := strconv.ParseBool(pscEnabledStr) + if err != nil || pscEnabled != expectedPscEnabled { + return fmt.Errorf("settings.0.ip_configuration.0.psc_config.0.psc_enabled property value is not set as expected in state of %s, expected %v, actual %v", resourceName, expectedPscEnabled, pscEnabled) + } + + networkAttachmentUriStr, ok := resourceAttributes["settings.0.ip_configuration.0.psc_config.0.network_attachment_uri"] + if !ok { + return fmt.Errorf("settings.0.ip_configuration.0.psc_config.0.network_attachment_uri block is not present in state for %s", resourceName) + } + + if networkAttachmentUriStr != expectedNetworkAttachmentUri && len(networkAttachmentUriStr) == 0 { + return fmt.Errorf("settings.0.ip_configuration.0.psc_config.0.network_attachment_uri block is not set in state for %s", resourceName) + } + + if networkAttachmentUriStr != expectedNetworkAttachmentUri { + return fmt.Errorf("settings.0.ip_configuration.0.psc_config.0.network_attachment_uri block does not match the expected value for %s", resourceName) + } + } + + return nil + } +} + func testAccSqlDatabaseInstance_withoutMCPEnabled(instanceName string) string { return fmt.Sprintf(` resource "google_sql_database_instance" "instance" { @@ -4903,6 +5026,32 @@ resource "google_sql_database_instance" "instance" { `, instanceName) } +func testAccSqlDatabaseInstance_withPSCEnabled_withoutPscOutbound(instanceName string) string { + return fmt.Sprintf(` +resource "google_sql_database_instance" "instance" { + name = "%s" + region = "us-central1" + database_version = "MYSQL_8_0" + deletion_protection = false + settings { + tier = "db-g1-small" + ip_configuration { + psc_config { + psc_enabled = true + network_attachment_uri = "" + } + ipv4_enabled = false + } + backup_configuration { + enabled = true + binary_log_enabled = true + } + availability_type = "REGIONAL" + } +} +`, instanceName) +} + func testAccSqlDatabaseInstance_withPSCEnabled_withPscAutoConnections(instanceName string, projectId string, networkName string) string { return fmt.Sprintf(` data "google_compute_network" "testnetwork" { @@ -4936,6 +5085,32 @@ resource "google_sql_database_instance" "instance" { `, networkName, instanceName, projectId, networkName, projectId) } +func testAccSqlDatabaseInstance_withPSCEnabled_withNetworkAttachmentUri(instanceName string, networkAttachmentUri string) string { + return fmt.Sprintf(` + +resource "google_sql_database_instance" "instance" { + name = "%s" + region = "us-central1" + database_version = "MYSQL_8_0" + deletion_protection = false + settings { + tier = "db-g1-small" + ip_configuration { + psc_config { + psc_enabled = true + network_attachment_uri = "%s" + } + ipv4_enabled = false + } + backup_configuration { + enabled = true + binary_log_enabled = true + } + availability_type = "REGIONAL" + } +}`, instanceName, networkAttachmentUri) +} + func testAccSqlDatabaseInstance_withPrivateNetwork_withoutAllocatedIpRange(databaseName, networkName string, specifyPrivatePathOption bool, enablePrivatePath bool) string { privatePathOption := "" if specifyPrivatePathOption { diff --git a/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown b/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown index 22fac0853045..6e3e9f68ded7 100644 --- a/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown @@ -244,6 +244,31 @@ resource "google_sql_database_instance" "main" { } ``` +### Cloud SQL Instance with PSC outbound + +```hcl +resource "google_sql_database_instance" "main" { + name = "psc-enabled-main-instance" + database_version = "MYSQL_8_0" + settings { + tier = "db-f1-micro" + ip_configuration { + psc_config { + psc_enabled = true + allowed_consumer_projects = ["allowed-consumer-project-name"] + network_attachment_uri = "network-attachment-uri" + } + ipv4_enabled = false + } + backup_configuration { + enabled = true + binary_log_enabled = true + } + availability_type = "REGIONAL" + } +} +``` + ## Argument Reference The following arguments are supported: @@ -465,6 +490,8 @@ The optional `settings.ip_configuration.psc_config` sublist supports: * `consumer_network` - "The consumer network of this consumer endpoint. This must be a resource path that includes both the host project and the network name. For example, `projects/project1/global/networks/network1`. The consumer host project of this network might be different from the consumer service project." +* `network_attachment_uri` - (Optional) Network Attachment URI in the format `projects/project1/regions/region1/networkAttachments/networkAttachment1` to enable outbound connectivity on PSC instance. + * `consumer_service_project_id` - (Optional) The project ID of consumer service project of this consumer endpoint. The optional `settings.location_preference` subblock supports: From 5c433cc3c370d7aa8e04f8b7bf7e8ad0560b5017 Mon Sep 17 00:00:00 2001 From: kgala2 Date: Tue, 15 Jul 2025 19:28:23 +0000 Subject: [PATCH 543/884] Revert "feat: support PSC outbound's network attachment field" (#14533) --- .../resource_sql_database_instance.go.tmpl | 7 - ...esource_sql_database_instance_test.go.tmpl | 175 ------------------ .../r/sql_database_instance.html.markdown | 27 --- 3 files changed, 209 deletions(-) diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl index e9fe3fb697ef..541219048dc8 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl @@ -532,11 +532,6 @@ is set to true. Defaults to ZONAL.`, Set: schema.HashString, Description: `List of consumer projects that are allow-listed for PSC connections to this instance. This instance can be connected to with PSC from any network in these projects. Each consumer project in this list may be represented by a project number (numeric) or by a project id (alphanumeric).`, }, - "network_attachment_uri": { - Type: schema.TypeString, - Optional: true, - Description: `Name of network attachment resource used to authorize a producer service to connect a PSC interface to the consumer's VPC. For example: "projects/myProject/regions/myRegion/networkAttachments/myNetworkAttachment". This is required to enable outbound connection on a PSC instance.`, - }, "psc_auto_connections": { Type: schema.TypeList, Optional: true, @@ -1601,7 +1596,6 @@ func expandPscConfig(configured []interface{}) *sqladmin.PscConfig { return &sqladmin.PscConfig{ PscEnabled: _entry["psc_enabled"].(bool), AllowedConsumerProjects: tpgresource.ConvertStringArr(_entry["allowed_consumer_projects"].(*schema.Set).List()), - NetworkAttachmentUri: _entry["network_attachment_uri"].(string), PscAutoConnections: expandPscAutoConnectionConfig(_entry["psc_auto_connections"].([]interface{})), } } @@ -2647,7 +2641,6 @@ func flattenPscConfigs(pscConfig *sqladmin.PscConfig) interface{} { data := map[string]interface{}{ "psc_enabled": pscConfig.PscEnabled, "allowed_consumer_projects": schema.NewSet(schema.HashString, tpgresource.ConvertStringArrToInterface(pscConfig.AllowedConsumerProjects)), - "network_attachment_uri": pscConfig.NetworkAttachmentUri, "psc_auto_connections": flattenPscAutoConnections(pscConfig.PscAutoConnections), } diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl index aa65fa71f080..e7656525a336 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl @@ -1183,86 +1183,6 @@ func TestAccSqlDatabaseInstance_withPSCEnabled_withIpV4Enabled(t *testing.T) { }) } -func TestAccSqlDatabaseInstance_withPscEnabled_withNetworkAttachmentUri_thenRemoveNetworkAttachment(t *testing.T) { - t.Parallel() - - random_suffix := acctest.RandString(t, 10) - instanceName := "tf-test-" + random_suffix - projectId := envvar.GetTestProjectFromEnv() - region := "us-central1" - networkNameStr := "tf-test-cloud-sql-network-" + random_suffix - subnetworkNameStr := "tf-test-cloud-sql-subnetwork-" + random_suffix - networkAttachmentNameStr := "tf-test-cloud-sql-update-na-" + random_suffix - networkName := acctest.BootstrapSharedTestNetwork(t, networkNameStr) - subnetworkName := acctest.BootstrapSubnet(t, subnetworkNameStr, networkName) - networkAttachmentName := acctest.BootstrapNetworkAttachment(t, networkAttachmentNameStr, subnetworkName) - networkAttachmentUri := fmt.Sprintf("projects/%s/regions/%s/networkAttachments/%s", projectId, region, networkAttachmentName) - - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccSqlDatabaseInstanceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccSqlDatabaseInstance_withPSCEnabled_withoutPscOutbound(instanceName), - Check: resource.ComposeTestCheckFunc(verifyPscNetorkAttachmentOperation("google_sql_database_instance.instance", true, true, "")), - }, - { - ResourceName: "google_sql_database_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateIdPrefix: fmt.Sprintf("%s/", projectId), - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - { - Config: testAccSqlDatabaseInstance_withPSCEnabled_withNetworkAttachmentUri(instanceName, networkAttachmentUri), - Check: resource.ComposeTestCheckFunc(verifyPscNetorkAttachmentOperation("google_sql_database_instance.instance", true, true, networkAttachmentUri)), - }, - { - ResourceName: "google_sql_database_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateIdPrefix: fmt.Sprintf("%s/", projectId), - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - { - Config: testAccSqlDatabaseInstance_withPSCEnabled_withoutPscOutbound(instanceName), - Check: resource.ComposeTestCheckFunc(verifyPscNetorkAttachmentOperation("google_sql_database_instance.instance", true, true, "")), - }, - }, - }) -} - -func TestAccSqlDatabaseInstance_withPscEnabled_withNetworkAttachmentUriOnCreate(t *testing.T) { - t.Parallel() - - random_suffix := acctest.RandString(t, 10) - instanceName := "tf-test-" + random_suffix - projectId := envvar.GetTestProjectFromEnv() - region := "us-central1" - networkNameStr := "tf-test-cloud-sql-network-" + random_suffix - subnetworkNameStr := "tf-test-cloud-sql-subnetwork-" + random_suffix - networkAttachmentNameStr := "tf-test-cloud-sql-update-na-" + random_suffix - networkName := acctest.BootstrapSharedTestNetwork(t, networkNameStr) - subnetworkName := acctest.BootstrapSubnet(t, subnetworkNameStr, networkName) - networkAttachmentName := acctest.BootstrapNetworkAttachment(t, networkAttachmentNameStr, subnetworkName) - networkAttachmentUri := fmt.Sprintf("projects/%s/regions/%s/networkAttachments/%s", projectId, region, networkAttachmentName) - - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccSqlDatabaseInstanceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccSqlDatabaseInstance_withPSCEnabled_withNetworkAttachmentUri(instanceName, networkAttachmentUri), - ExpectError: regexp.MustCompile(`.*Network attachment used for Private Service Connect interfaces can not be assigned with instance creation.*`), - }, - }, - }) -} - func TestAccSqlDatabaseInstance_withPrivateNetwork_withAllocatedIpRange(t *testing.T) { t.Parallel() @@ -4923,49 +4843,6 @@ func verifyPscAutoConnectionsOperation(resourceName string, isPscConfigExpected } } -func verifyPscNetorkAttachmentOperation(resourceName string, isPscConfigExpected bool, expectedPscEnabled bool, expectedNetworkAttachmentUri string ) func(*terraform.State) error { - return func(s *terraform.State) error { - resource, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Can't find %s in state", resourceName) - } - - resourceAttributes := resource.Primary.Attributes - _, ok = resourceAttributes["settings.0.ip_configuration.#"] - if !ok { - return fmt.Errorf("settings.0.ip_configuration.# block is not present in state for %s", resourceName) - } - - if isPscConfigExpected { - _, ok := resourceAttributes["settings.0.ip_configuration.0.psc_config.#"] - if !ok { - return fmt.Errorf("settings.0.ip_configuration.0.psc_config property is not present or set in state of %s", resourceName) - } - - pscEnabledStr, ok := resourceAttributes["settings.0.ip_configuration.0.psc_config.0.psc_enabled"] - pscEnabled, err := strconv.ParseBool(pscEnabledStr) - if err != nil || pscEnabled != expectedPscEnabled { - return fmt.Errorf("settings.0.ip_configuration.0.psc_config.0.psc_enabled property value is not set as expected in state of %s, expected %v, actual %v", resourceName, expectedPscEnabled, pscEnabled) - } - - networkAttachmentUriStr, ok := resourceAttributes["settings.0.ip_configuration.0.psc_config.0.network_attachment_uri"] - if !ok { - return fmt.Errorf("settings.0.ip_configuration.0.psc_config.0.network_attachment_uri block is not present in state for %s", resourceName) - } - - if networkAttachmentUriStr != expectedNetworkAttachmentUri && len(networkAttachmentUriStr) == 0 { - return fmt.Errorf("settings.0.ip_configuration.0.psc_config.0.network_attachment_uri block is not set in state for %s", resourceName) - } - - if networkAttachmentUriStr != expectedNetworkAttachmentUri { - return fmt.Errorf("settings.0.ip_configuration.0.psc_config.0.network_attachment_uri block does not match the expected value for %s", resourceName) - } - } - - return nil - } -} - func testAccSqlDatabaseInstance_withoutMCPEnabled(instanceName string) string { return fmt.Sprintf(` resource "google_sql_database_instance" "instance" { @@ -5026,32 +4903,6 @@ resource "google_sql_database_instance" "instance" { `, instanceName) } -func testAccSqlDatabaseInstance_withPSCEnabled_withoutPscOutbound(instanceName string) string { - return fmt.Sprintf(` -resource "google_sql_database_instance" "instance" { - name = "%s" - region = "us-central1" - database_version = "MYSQL_8_0" - deletion_protection = false - settings { - tier = "db-g1-small" - ip_configuration { - psc_config { - psc_enabled = true - network_attachment_uri = "" - } - ipv4_enabled = false - } - backup_configuration { - enabled = true - binary_log_enabled = true - } - availability_type = "REGIONAL" - } -} -`, instanceName) -} - func testAccSqlDatabaseInstance_withPSCEnabled_withPscAutoConnections(instanceName string, projectId string, networkName string) string { return fmt.Sprintf(` data "google_compute_network" "testnetwork" { @@ -5085,32 +4936,6 @@ resource "google_sql_database_instance" "instance" { `, networkName, instanceName, projectId, networkName, projectId) } -func testAccSqlDatabaseInstance_withPSCEnabled_withNetworkAttachmentUri(instanceName string, networkAttachmentUri string) string { - return fmt.Sprintf(` - -resource "google_sql_database_instance" "instance" { - name = "%s" - region = "us-central1" - database_version = "MYSQL_8_0" - deletion_protection = false - settings { - tier = "db-g1-small" - ip_configuration { - psc_config { - psc_enabled = true - network_attachment_uri = "%s" - } - ipv4_enabled = false - } - backup_configuration { - enabled = true - binary_log_enabled = true - } - availability_type = "REGIONAL" - } -}`, instanceName, networkAttachmentUri) -} - func testAccSqlDatabaseInstance_withPrivateNetwork_withoutAllocatedIpRange(databaseName, networkName string, specifyPrivatePathOption bool, enablePrivatePath bool) string { privatePathOption := "" if specifyPrivatePathOption { diff --git a/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown b/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown index 6e3e9f68ded7..22fac0853045 100644 --- a/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown @@ -244,31 +244,6 @@ resource "google_sql_database_instance" "main" { } ``` -### Cloud SQL Instance with PSC outbound - -```hcl -resource "google_sql_database_instance" "main" { - name = "psc-enabled-main-instance" - database_version = "MYSQL_8_0" - settings { - tier = "db-f1-micro" - ip_configuration { - psc_config { - psc_enabled = true - allowed_consumer_projects = ["allowed-consumer-project-name"] - network_attachment_uri = "network-attachment-uri" - } - ipv4_enabled = false - } - backup_configuration { - enabled = true - binary_log_enabled = true - } - availability_type = "REGIONAL" - } -} -``` - ## Argument Reference The following arguments are supported: @@ -490,8 +465,6 @@ The optional `settings.ip_configuration.psc_config` sublist supports: * `consumer_network` - "The consumer network of this consumer endpoint. This must be a resource path that includes both the host project and the network name. For example, `projects/project1/global/networks/network1`. The consumer host project of this network might be different from the consumer service project." -* `network_attachment_uri` - (Optional) Network Attachment URI in the format `projects/project1/regions/region1/networkAttachments/networkAttachment1` to enable outbound connectivity on PSC instance. - * `consumer_service_project_id` - (Optional) The project ID of consumer service project of this consumer endpoint. The optional `settings.location_preference` subblock supports: From fb0ddf23ea6cc20d3dc57a25e93df81c3b4df446 Mon Sep 17 00:00:00 2001 From: Arnav Dham Date: Wed, 16 Jul 2025 02:24:25 +0530 Subject: [PATCH 544/884] Updated Dependencies for BigQuery Go client (#14515) --- mmv1/third_party/terraform/go.mod | 13 +++++++-- mmv1/third_party/terraform/go.sum | 45 ++++++++++++++++++++++++++++--- 2 files changed, 52 insertions(+), 6 deletions(-) diff --git a/mmv1/third_party/terraform/go.mod b/mmv1/third_party/terraform/go.mod index a4caeeaaeaf8..0246c5fa176b 100644 --- a/mmv1/third_party/terraform/go.mod +++ b/mmv1/third_party/terraform/go.mod @@ -5,6 +5,7 @@ go 1.23.0 require ( cloud.google.com/go/auth v0.16.2 cloud.google.com/go/auth/oauth2adapt v0.2.8 + cloud.google.com/go/bigquery v1.69.0 cloud.google.com/go/bigtable v1.37.0 github.com/GoogleCloudPlatform/declarative-resource-client-library v1.79.0 github.com/apparentlymart/go-cidr v1.1.0 @@ -31,7 +32,7 @@ require ( github.com/sirupsen/logrus v1.8.1 github.com/stretchr/testify v1.10.0 go4.org/netipx v0.0.0-20231129151722-fdeea329fbba - golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 + golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 golang.org/x/net v0.41.0 golang.org/x/oauth2 v0.30.0 google.golang.org/api v0.238.0 @@ -44,13 +45,14 @@ require ( require ( bitbucket.org/creachadair/stringset v0.0.8 // indirect cel.dev/expr v0.23.0 // indirect - cloud.google.com/go v0.120.0 // indirect + cloud.google.com/go v0.121.0 // indirect cloud.google.com/go/compute/metadata v0.7.0 // indirect cloud.google.com/go/iam v1.5.2 // indirect cloud.google.com/go/longrunning v0.6.7 // indirect cloud.google.com/go/monitoring v1.24.2 // indirect github.com/ProtonMail/go-crypto v1.1.3 // indirect github.com/agext/levenshtein v1.2.2 // indirect + github.com/apache/arrow/go/v15 v15.0.2 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect github.com/cenkalti/backoff v2.2.1+incompatible // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect @@ -64,8 +66,10 @@ require ( github.com/go-jose/go-jose/v4 v4.0.5 // indirect github.com/go-logr/logr v1.4.2 // indirect github.com/go-logr/stdr v1.2.2 // indirect + github.com/goccy/go-json v0.10.2 // indirect github.com/golang/glog v1.2.4 // indirect github.com/golang/protobuf v1.5.4 // indirect + github.com/google/flatbuffers v23.5.26+incompatible // indirect github.com/google/go-cpy v0.0.0-20211218193943-a9c933c06932 // indirect github.com/google/s2a-go v0.1.9 // indirect github.com/google/uuid v1.6.0 // indirect @@ -83,6 +87,8 @@ require ( github.com/hashicorp/terraform-registry-address v0.2.4 // indirect github.com/hashicorp/terraform-svchost v0.1.1 // indirect github.com/hashicorp/yamux v0.1.1 // indirect + github.com/klauspost/compress v1.16.7 // indirect + github.com/klauspost/cpuid/v2 v2.2.5 // indirect github.com/kylelemons/godebug v1.1.0 // indirect github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-isatty v0.0.20 // indirect @@ -92,6 +98,7 @@ require ( github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/mitchellh/reflectwalk v1.0.2 // indirect github.com/oklog/run v1.0.0 // indirect + github.com/pierrec/lz4/v4 v4.1.18 // indirect github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/spiffe/go-spiffe/v2 v2.5.0 // indirect @@ -100,6 +107,7 @@ require ( github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect github.com/zclconf/go-cty v1.16.2 // indirect github.com/zeebo/errs v1.4.0 // indirect + github.com/zeebo/xxh3 v1.0.2 // indirect go.opentelemetry.io/auto/sdk v1.1.0 // indirect go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect @@ -115,6 +123,7 @@ require ( golang.org/x/text v0.26.0 // indirect golang.org/x/time v0.12.0 // indirect golang.org/x/tools v0.33.0 // indirect + golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect google.golang.org/appengine v1.6.8 // indirect google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2 // indirect google.golang.org/genproto/googleapis/api v0.0.0-20250505200425-f936aa4a68b2 // indirect diff --git a/mmv1/third_party/terraform/go.sum b/mmv1/third_party/terraform/go.sum index f71f3717050b..ff5055ce953f 100644 --- a/mmv1/third_party/terraform/go.sum +++ b/mmv1/third_party/terraform/go.sum @@ -3,33 +3,47 @@ bitbucket.org/creachadair/stringset v0.0.8/go.mod h1:AgthVMyMxC/6FK1KBJ2ALdqkZOb cel.dev/expr v0.23.0 h1:wUb94w6OYQS4uXraxo9U+wUAs9jT47Xvl4iPgAwM2ss= cel.dev/expr v0.23.0/go.mod h1:hLPLo1W4QUmuYdA72RBX06QTs6MXw941piREPl3Yfiw= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.120.0 h1:wc6bgG9DHyKqF5/vQvX1CiZrtHnxJjBlKUyF9nP6meA= -cloud.google.com/go v0.120.0/go.mod h1:/beW32s8/pGRuj4IILWQNd4uuebeT4dkOhKmkfit64Q= +cloud.google.com/go v0.121.0 h1:pgfwva8nGw7vivjZiRfrmglGWiCJBP+0OmDpenG/Fwg= +cloud.google.com/go v0.121.0/go.mod h1:rS7Kytwheu/y9buoDmu5EIpMMCI4Mb8ND4aeN4Vwj7Q= cloud.google.com/go/auth v0.16.2 h1:QvBAGFPLrDeoiNjyfVunhQ10HKNYuOwZ5noee0M5df4= cloud.google.com/go/auth v0.16.2/go.mod h1:sRBas2Y1fB1vZTdurouM0AzuYQBMZinrUYL8EufhtEA= cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc= cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c= +cloud.google.com/go/bigquery v1.69.0 h1:rZvHnjSUs5sHK3F9awiuFk2PeOaB8suqNuim21GbaTc= +cloud.google.com/go/bigquery v1.69.0/go.mod h1:TdGLquA3h/mGg+McX+GsqG9afAzTAcldMjqhdjHTLew= cloud.google.com/go/bigtable v1.37.0 h1:Q+x7y04lQ0B+WXp03wc1/FLhFt4CwcQdkwWT0M4Jp3w= cloud.google.com/go/bigtable v1.37.0/go.mod h1:HXqddP6hduwzrtiTCqZPpj9ij4hGZb4Zy1WF/dT+yaU= cloud.google.com/go/compute/metadata v0.7.0 h1:PBWF+iiAerVNe8UCHxdOt6eHLVc3ydFeOCw78U8ytSU= cloud.google.com/go/compute/metadata v0.7.0/go.mod h1:j5MvL9PprKL39t166CoB1uVHfQMs4tFQZZcKwksXUjo= +cloud.google.com/go/datacatalog v1.26.0 h1:eFgygb3DTufTWWUB8ARk+dSuXz+aefNJXTlkWlQcWwE= +cloud.google.com/go/datacatalog v1.26.0/go.mod h1:bLN2HLBAwB3kLTFT5ZKLHVPj/weNz6bR0c7nYp0LE14= cloud.google.com/go/iam v1.5.2 h1:qgFRAGEmd8z6dJ/qyEchAuL9jpswyODjA2lS+w234g8= cloud.google.com/go/iam v1.5.2/go.mod h1:SE1vg0N81zQqLzQEwxL2WI6yhetBdbNQuTvIKCSkUHE= cloud.google.com/go/longrunning v0.6.7 h1:IGtfDWHhQCgCjwQjV9iiLnUta9LBCo8R9QmAFsS/PrE= cloud.google.com/go/longrunning v0.6.7/go.mod h1:EAFV3IZAKmM56TyiE6VAP3VoTzhZzySwI/YI1s/nRsY= cloud.google.com/go/monitoring v1.24.2 h1:5OTsoJ1dXYIiMiuL+sYscLc9BumrL3CarVLL7dd7lHM= cloud.google.com/go/monitoring v1.24.2/go.mod h1:x7yzPWcgDRnPEv3sI+jJGBkwl5qINf+6qY4eq0I9B4U= +cloud.google.com/go/storage v1.53.0 h1:gg0ERZwL17pJ+Cz3cD2qS60w1WMDnwcm5YPAIQBHUAw= +cloud.google.com/go/storage v1.53.0/go.mod h1:7/eO2a/srr9ImZW9k5uufcNahT2+fPb8w5it1i5boaA= dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/GoogleCloudPlatform/declarative-resource-client-library v1.79.0 h1:vaebDVboAZ2tbAoMKRsprO3zAdZnQegYFhkgAwjJC8g= github.com/GoogleCloudPlatform/declarative-resource-client-library v1.79.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.27.0 h1:ErKg/3iS1AKcTkf3yixlZ54f9U1rljCkQyEXWUnIUxc= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.27.0/go.mod h1:yAZHSGnqScoU556rBOVkwLze6WP5N+U11RHuWaGVxwY= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.51.0 h1:fYE9p3esPxA/C0rQ0AHhP0drtPXDRhaWiwg1DPqO7IU= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.51.0/go.mod h1:BnBReJLvVYx2CS/UHOgVz2BXKXD9wsQPxZug20nZhd0= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.51.0 h1:6/0iUd0xrnX7qt+mLNRwg5c0PGv8wpE8K90ryANQwMI= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.51.0/go.mod h1:otE2jQekW/PqXk1Awf5lmfokJx4uwuqcj1ab5SpGeW0= github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= github.com/ProtonMail/go-crypto v1.1.3 h1:nRBOetoydLeUb4nHajyO2bKqMLfWQ/ZPwkXqXxPxCFk= github.com/ProtonMail/go-crypto v1.1.3/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= github.com/agext/levenshtein v1.2.2 h1:0S/Yg6LYmFJ5stwQeRp6EeOcCbj7xiqQSdNelsXvaqE= github.com/agext/levenshtein v1.2.2/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= +github.com/apache/arrow/go/v15 v15.0.2 h1:60IliRbiyTWCWjERBCkO1W4Qun9svcYoZrSLcyOsMLE= +github.com/apache/arrow/go/v15 v15.0.2/go.mod h1:DGXsR3ajT524njufqf95822i+KTh+yea1jass9YXgjA= github.com/apparentlymart/go-cidr v1.1.0 h1:2mAhrMoF+nhXqxTzSZMUzDHkLjmIHC+Zzn4tdgBZjnU= github.com/apparentlymart/go-cidr v1.1.0/go.mod h1:EBcsNrHc3zQeuaeCeCtQruQm+n9/YjEn/vI25Lg7Gwc= github.com/apparentlymart/go-textseg/v12 v12.0.0/go.mod h1:S/4uRK2UtaQttw1GenVJEynmyUenKwP++x/+DdGV/Ec= @@ -97,6 +111,8 @@ github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68= github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= +github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= +github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= @@ -115,6 +131,8 @@ github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= github.com/google/btree v1.1.3 h1:CVpQJjYgC4VbzxeGVHfvZrv1ctoYCAI8vbl07Fcxlyg= github.com/google/btree v1.1.3/go.mod h1:qOPhT0dTNdNzV6Z/lhRX0YXUafgPLFUh+gZMl761Gm4= +github.com/google/flatbuffers v23.5.26+incompatible h1:M9dgRyhJemaM4Sw8+66GHBu8ioaQmyPLg1b8VwK5WJg= +github.com/google/flatbuffers v23.5.26+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= @@ -122,6 +140,8 @@ github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= github.com/google/go-cpy v0.0.0-20211218193943-a9c933c06932 h1:5/4TSDzpDnHQ8rKEEQBjRlYx77mHOvXu08oGchxej7o= github.com/google/go-cpy v0.0.0-20211218193943-a9c933c06932/go.mod h1:cC6EdPbj/17GFCPDK39NRarlMI+kt+O60S12cNB5J9Y= +github.com/google/martian/v3 v3.3.3 h1:DIhPTQrbPkgs2yJYdXU/eNACCG5DVQjySNRNlflZ9Fc= +github.com/google/martian/v3 v3.3.3/go.mod h1:iEPrYcgCF7jA9OtScMFQyAlZZ4YXTKEtJ1E6RWzmBA0= github.com/google/s2a-go v0.1.9 h1:LGD7gtMgezd8a/Xak7mEWL0PjoTQFvpRudN895yqKW0= github.com/google/s2a-go v0.1.9/go.mod h1:YA0Ei2ZQL3acow2O62kdp9UlnvMmU7kA6Eutn0dXayM= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= @@ -192,6 +212,10 @@ github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4 github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/compress v1.16.7 h1:2mk3MPGNzKyxErAw8YaohYh69+pa4sIQSC0fPGCFR9I= +github.com/klauspost/compress v1.16.7/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= +github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg= +github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= @@ -229,6 +253,8 @@ github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx github.com/oklog/run v1.0.0 h1:Ru7dDtJNOyC66gQ5dQmaCa0qIsAUFY3sFpK1Xk8igrw= github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= +github.com/pierrec/lz4/v4 v4.1.18 h1:xaKrnTkyoqfh1YItXl56+6KJNVYWlEEPuAQW9xsplYQ= +github.com/pierrec/lz4/v4 v4.1.18/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pjbgf/sha1cd v0.3.0 h1:4D5XXmUUBUl/xQ6IjCkEAbqXskkq/4O7LmGn0AqMDs4= github.com/pjbgf/sha1cd v0.3.0/go.mod h1:nZ1rrWOcGJ5uZgEEVL1VUM9iRQiZvWdbZjkKyFzPPsI= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -271,10 +297,16 @@ github.com/zclconf/go-cty v1.16.2 h1:LAJSwc3v81IRBZyUVQDUdZ7hs3SYs9jv0eZJDWHD/70 github.com/zclconf/go-cty v1.16.2/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE= github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940 h1:4r45xpDWB6ZMSMNJFMOjqrGHynW3DIBuR2H9j0ug+Mo= github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940/go.mod h1:CmBdvvj3nqzfzJ6nTCIwDTPZ56aVGvDrmztiO5g3qrM= +github.com/zeebo/assert v1.3.0 h1:g7C04CbJuIDKNPFHmsk4hwZDO5O+kntRxzaUoNXj+IQ= +github.com/zeebo/assert v1.3.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0= github.com/zeebo/errs v1.4.0 h1:XNdoD/RRMKP7HD0UhJnIzUy74ISdGGxURlYG8HSWSfM= github.com/zeebo/errs v1.4.0/go.mod h1:sgbWHsvVuTPHcqJJGQ1WhI5KbWlHYz+2+2C/LSEtCw4= +github.com/zeebo/xxh3 v1.0.2 h1:xZmwmqxHZA8AI603jOQ0tMqmBr9lPeFwGg6d+xy9DC0= +github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA= go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= +go.opentelemetry.io/contrib/detectors/gcp v1.35.0 h1:bGvFt68+KTiAKFlacHW6AhA56GF2rS0bdD3aJYEnmzA= +go.opentelemetry.io/contrib/detectors/gcp v1.35.0/go.mod h1:qGWP8/+ILwMRIUf9uIVLloR1uo5ZYAslM4O6OqUi1DA= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0 h1:q4XOmH/0opmeuJtPsbFNivyl7bCt7yRBbeEm2sC/XtQ= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0/go.mod h1:snMWehoOh2wsEwnvvwtDyFCxVeDAODenXHtn5vzrKjo= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 h1:F7Jx+6hwnZ41NSFTO5q4LYDtJRXBf2PD0rNBkeB/lus= @@ -301,8 +333,8 @@ golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5y golang.org/x/crypto v0.39.0 h1:SHs+kF4LP+f+p14esP5jAoDpHU8Gu/v9lFRK6IT5imM= golang.org/x/crypto v0.39.0/go.mod h1:L+Xg3Wf6HoL4Bn4238Z6ft6KfEpN0tJGo53AAPC632U= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 h1:ESSUROHIBHg7USnszlcdmjBEwdMj9VUvU+OPk4yl2mc= -golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8/go.mod h1:/lliqkxwWAhPjf5oSOIJup2XcqJaw8RGS6k3TGEc7GI= +golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 h1:2dVuKD2vS7b0QIHQbpyTISPd0LeHDbnYEryqj5Q1ug8= +golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56/go.mod h1:M4RDyNAINzryxdtnbRXRL/OHtkFuWGRjvuhBJpk2IlY= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= @@ -350,6 +382,7 @@ golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= @@ -381,6 +414,10 @@ golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8T golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da h1:noIWHXmPHxILtqtCOPIhSt0ABwskkZKjD3bXGnZGpNY= +golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90= +gonum.org/v1/gonum v0.12.0 h1:xKuo6hzt+gMav00meVPUlXwSdoEJP46BR+wdxQEFK2o= +gonum.org/v1/gonum v0.12.0/go.mod h1:73TDxJfAAHeA8Mk9mf8NlIppyhQNo5GLTcYeqgo2lvY= google.golang.org/api v0.238.0 h1:+EldkglWIg/pWjkq97sd+XxH7PxakNYoe/rkSTbnvOs= google.golang.org/api v0.238.0/go.mod h1:cOVEm2TpdAGHL2z+UwyS+kmlGr3bVWQQ6sYEqkKje50= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= From 6b5339e3e9a21912606e775ac261f5d97290cc1f Mon Sep 17 00:00:00 2001 From: Veronika Herasymenko Date: Tue, 15 Jul 2025 23:26:49 +0200 Subject: [PATCH 545/884] Add resource_manager_tags support to Subnetwork api (#14398) --- mmv1/products/compute/Subnetwork.yaml | 18 ++++++ .../resource_compute_subnetwork_test.go.tmpl | 59 +++++++++++++++++++ 2 files changed, 77 insertions(+) diff --git a/mmv1/products/compute/Subnetwork.yaml b/mmv1/products/compute/Subnetwork.yaml index eeb93d7f7a55..ec5b99889f65 100644 --- a/mmv1/products/compute/Subnetwork.yaml +++ b/mmv1/products/compute/Subnetwork.yaml @@ -530,3 +530,21 @@ properties: set to INTERNAL_HTTPS_LOAD_BALANCER and indicates that connections to the load balancer are being drained. A subnetwork that is draining cannot be used or modified until it reaches a status of READY' output: true + - name: 'params' + type: NestedObject + ignore_read: true + immutable: true + description: | + Additional params passed with the request, but not persisted as part of resource payload + properties: + - name: 'resourceManagerTags' + type: KeyValuePairs + description: | + Resource manager tags to be bound to the subnetwork. Tag keys and values have the + same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, + and values are in the format tagValues/456. The field is ignored when empty. + The field is immutable and causes resource replacement when mutated. This field is only + set at create time and modifying this field after creation will trigger recreation. + To apply tags to an existing resource, see the google_tags_tag_binding resource. + ignore_read: true + immutable: true diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_subnetwork_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_subnetwork_test.go.tmpl index e4461c0902bc..74a8e6b6c3ae 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_subnetwork_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_subnetwork_test.go.tmpl @@ -1,5 +1,6 @@ package compute_test + import ( "context" "fmt" @@ -8,6 +9,7 @@ import ( "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/terraform" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" "github.com/hashicorp/terraform-plugin-testing/plancheck" tpgcompute "github.com/hashicorp/terraform-provider-google/google/services/compute" @@ -496,6 +498,63 @@ func TestAccComputeSubnetwork_internal_ipv6(t *testing.T) { }) } +func TestAccComputeSubnetwork_resourceManagerTags(t *testing.T) { + t.Parallel() + + var subnetwork compute.Subnetwork + org := envvar.GetTestOrgFromEnv(t) + + suffixName := acctest.RandString(t, 10) + tagKeyResult := acctest.BootstrapSharedTestTagKeyDetails(t, "crm-subnetworks-tagkey", "organizations/"+org, make(map[string]interface{})) + sharedTagkey,_ := tagKeyResult["shared_tag_key"] + tagValueResult := acctest.BootstrapSharedTestTagValueDetails(t, "crm-subnetworks-tagvalue", sharedTagkey, org) + + cnName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + subnetworkName := fmt.Sprintf("tf-test-subnetwork-resource-manager-tags-%s", suffixName) + context := map[string]interface{}{ + "subnetwork_name": subnetworkName, + "network_name": cnName, + "tag_key_id": tagKeyResult["name"], + "tag_value_id": tagValueResult["name"], + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeSubnetworkDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeSubnetwork_resourceManagerTags(context), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeSubnetworkExists( + t, "google_compute_subnetwork.acc_subnetwork_with_resource_manager_tags", &subnetwork), + ), + }, + }, + }) +} + +func testAccComputeSubnetwork_resourceManagerTags(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_network" "custom-test" { + name = "%{network_name}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "acc_subnetwork_with_resource_manager_tags" { + name = "%{subnetwork_name}" + ip_cidr_range = "10.0.0.0/16" + region = "us-central1" + network = google_compute_network.custom-test.self_link + params { + resource_manager_tags = { + "%{tag_key_id}" = "%{tag_value_id}" + } + } +} +`, context) +} + func testAccCheckComputeSubnetworkExists(t *testing.T, n string, subnetwork *compute.Subnetwork) resource.TestCheckFunc { return func(s *terraform.State) error { rs, ok := s.RootModule().Resources[n] From caea27f09d2370dede06d07c4e808ba82a7ca447 Mon Sep 17 00:00:00 2001 From: Jared Date: Tue, 15 Jul 2025 15:59:37 -0700 Subject: [PATCH 546/884] Fix artifactRegistryRepositoryRemoteAptExample test + update docs (#14523) --- mmv1/products/artifactregistry/Repository.yaml | 4 ++-- .../examples/artifact_registry_repository_remote_apt.tf.tmpl | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/mmv1/products/artifactregistry/Repository.yaml b/mmv1/products/artifactregistry/Repository.yaml index bf5e4e0e7acf..645916cc635a 100644 --- a/mmv1/products/artifactregistry/Repository.yaml +++ b/mmv1/products/artifactregistry/Repository.yaml @@ -99,7 +99,7 @@ examples: - name: 'artifact_registry_repository_remote_apt' primary_resource_id: 'my-repo' vars: - repository_id: 'debian-buster' + repository_id: 'debian-stable' desc: 'example remote apt repository' - name: 'artifact_registry_repository_remote_yum' primary_resource_id: 'my-repo' @@ -483,7 +483,7 @@ properties: - name: 'repositoryBase' type: Enum description: |- - A common public repository base for Apt, e.g. `"debian/dists/buster"` + A common public repository base for Apt, e.g. `"debian/dists/stable"` required: true immutable: true enum_values: diff --git a/mmv1/templates/terraform/examples/artifact_registry_repository_remote_apt.tf.tmpl b/mmv1/templates/terraform/examples/artifact_registry_repository_remote_apt.tf.tmpl index ddffa4d557c1..0494ba3feaaa 100644 --- a/mmv1/templates/terraform/examples/artifact_registry_repository_remote_apt.tf.tmpl +++ b/mmv1/templates/terraform/examples/artifact_registry_repository_remote_apt.tf.tmpl @@ -5,11 +5,11 @@ resource "google_artifact_registry_repository" "{{$.PrimaryResourceId}}" { format = "APT" mode = "REMOTE_REPOSITORY" remote_repository_config { - description = "Debian buster remote repository" + description = "Debian stable remote repository" apt_repository { public_repository { repository_base = "DEBIAN" - repository_path = "debian/dists/buster" + repository_path = "debian/dists/stable" } } } From f1c1ddbcbc37444937d55931b82d5aac554ab32f Mon Sep 17 00:00:00 2001 From: srichaitanyab Date: Wed, 16 Jul 2025 04:47:04 +0530 Subject: [PATCH 547/884] switched to the v1beta API for modelarmor and add google_model_armor_floorsetting Resource (#14457) --- mmv1/products/modelarmor/product.yaml | 5 +- .../modelarmorglobal/Floorsetting.yaml | 248 ++++++++++++++++++ mmv1/products/modelarmorglobal/product.yaml | 26 ++ ...oorsetting_multilanguage_detection.go.tmpl | 19 ++ .../modelarmorglobal_floorsetting.go.tmpl | 19 ++ ..._floorsetting_ai_platform_metadata.tf.tmpl | 21 ++ .../modelarmor_floorsetting_basic.tf.tmpl | 8 + ...elarmor_floorsetting_filter_config.tf.tmpl | 27 ++ .../modelarmor_floorsetting_sleep.go.tmpl | 4 + .../components/inputs/services_beta.kt | 7 +- .../components/inputs/services_ga.kt | 9 +- .../resource_model_armor_template_test.go | 38 +-- .../resource_model_armor_floorsetting_test.go | 131 +++++++++ 13 files changed, 528 insertions(+), 34 deletions(-) create mode 100644 mmv1/products/modelarmorglobal/Floorsetting.yaml create mode 100644 mmv1/products/modelarmorglobal/product.yaml create mode 100644 mmv1/templates/terraform/custom_flatten/modelarmor_floorsetting_multilanguage_detection.go.tmpl create mode 100644 mmv1/templates/terraform/custom_import/modelarmorglobal_floorsetting.go.tmpl create mode 100644 mmv1/templates/terraform/examples/modelarmor_floorsetting_ai_platform_metadata.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/modelarmor_floorsetting_basic.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/modelarmor_floorsetting_filter_config.tf.tmpl create mode 100644 mmv1/templates/terraform/post_create/modelarmor_floorsetting_sleep.go.tmpl create mode 100644 mmv1/third_party/terraform/services/modelarmorglobal/resource_model_armor_floorsetting_test.go diff --git a/mmv1/products/modelarmor/product.yaml b/mmv1/products/modelarmor/product.yaml index 394654191cb6..7d842c2933d7 100644 --- a/mmv1/products/modelarmor/product.yaml +++ b/mmv1/products/modelarmor/product.yaml @@ -1,4 +1,4 @@ -# Copyright 2024 Google Inc. +# Copyright 2025 Google Inc. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -20,3 +20,6 @@ versions: - name: ga base_url: 'https://modelarmor.{{location}}.rep.googleapis.com/v1/' cai_base_url: "https://modelarmor.googleapis.com/v1/" + - name: beta + base_url: 'https://modelarmor.{{location}}.rep.googleapis.com/v1beta/' + cai_base_url: "https://modelarmor.googleapis.com/v1beta/" diff --git a/mmv1/products/modelarmorglobal/Floorsetting.yaml b/mmv1/products/modelarmorglobal/Floorsetting.yaml new file mode 100644 index 000000000000..ef138bfdd1df --- /dev/null +++ b/mmv1/products/modelarmorglobal/Floorsetting.yaml @@ -0,0 +1,248 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: Floorsetting +description: | + Model Armor floor settings define rules that dictate minimum requirements for all Model Armor templates created at a specific point in the Google Cloud resource hierarchy (that is, at an organization, folder, or project level). If there are + multiple floor settings that conflict with each other, the settings lower in the resource hierarchy take precedence + For more information, checkout https://cloud.google.com/security-command-center/docs/model_armor_floor_settings +base_url: '{{parent}}/locations/{{location}}/floorSetting' +update_mask: true +id_format: '{{parent}}/locations/{{location}}/floorSetting' +self_link: '{{parent}}/locations/{{location}}/floorSetting' +create_url: '{{parent}}/locations/{{location}}/floorSetting' +# This is a singleton resource that is already created, so create +# is really an update, and therefore should be PATCHed. +create_verb: 'PATCH' +update_url: '{{parent}}/locations/{{location}}/floorSetting' +update_verb: 'PATCH' +# This is a singleton resource that cannot be deleted, so skip delete. +exclude_delete: true +import_format: + - '{{parent}}/locations/{{location}}/floorSetting' +custom_code: + custom_import: 'templates/terraform/custom_import/modelarmorglobal_floorsetting.go.tmpl' + post_create: 'templates/terraform/post_create/modelarmor_floorsetting_sleep.go.tmpl' + post_update: 'templates/terraform/post_create/modelarmor_floorsetting_sleep.go.tmpl' +examples: + # Excluding tests as they modify a singleton resource and hence will result in race conditions + - name: 'modelarmor_floorsetting_basic' + primary_resource_id: 'floorsetting-basic' + test_env_vars: + project_id: 'PROJECT_NAME' + exclude_test: true + - name: 'modelarmor_floorsetting_filter_config' + primary_resource_id: 'floorsetting-filter-config' + test_env_vars: + project_id: 'PROJECT_NAME' + exclude_test: true + - name: 'modelarmor_floorsetting_ai_platform_metadata' + primary_resource_id: 'floorsetting-integrated-metadata' + test_env_vars: + project_id: 'PROJECT_NAME' + exclude_test: true +parameters: + - name: location + type: String + description: Resource ID segment making up resource `name`. It identifies the resource within its parent collection as described in https://google.aip.dev/122. + immutable: true + url_param_only: true + required: true + - name: parent + type: String + description: | + Will be any one of these: + + * `projects/{project}` + * `folders/{folder}` + * `organizations/{organizationId}` + immutable: true + url_param_only: true + required: true +properties: + - name: name + type: String + description: Identifier. The resource name. + output: true + - name: createTime + type: String + description: '[Output only] Create timestamp' + output: true + - name: updateTime + type: String + description: '[Output only] Update timestamp' + output: true + - name: filterConfig + type: NestedObject + description: Filters configuration. + required: true + send_empty_value: true + allow_empty_object: true + properties: + - name: maliciousUriFilterSettings + type: NestedObject + description: Malicious URI filter settings. + properties: + - name: filterEnforcement + type: String + description: |- + Tells whether the Malicious URI filter is enabled or disabled. + Possible values: + ENABLED + DISABLED + - name: raiSettings + type: NestedObject + description: Responsible AI Filter settings. + properties: + - name: raiFilters + type: Array + description: List of Responsible AI filters enabled for template. + required: true + item_type: + type: NestedObject + properties: + - name: filterType + type: String + description: |- + Possible values: + SEXUALLY_EXPLICIT + HATE_SPEECH + HARASSMENT + DANGEROUS + required: true + - name: confidenceLevel + type: String + description: |- + Possible values: + LOW_AND_ABOVE + MEDIUM_AND_ABOVE + HIGH + - name: sdpSettings + type: NestedObject + description: Sensitive Data Protection settings. + properties: + - name: advancedConfig + type: NestedObject + conflicts: + - filter_config.0.sdp_settings.0.basic_config + description: Sensitive Data Protection Advanced configuration. + properties: + - name: inspectTemplate + type: String + description: |- + Sensitive Data Protection inspect template resource name + + If only inspect template is provided (de-identify template not provided), + then Sensitive Data Protection InspectContent action is performed during + Sanitization. All Sensitive Data Protection findings identified during + inspection will be returned as SdpFinding in SdpInsepctionResult. + + e.g:- + `projects/{project}/locations/{location}/inspectTemplates/{inspect_template}` + - name: deidentifyTemplate + type: String + description: |- + Optional Sensitive Data Protection Deidentify template resource name. + + If provided then DeidentifyContent action is performed during Sanitization + using this template and inspect template. The De-identified data will + be returned in SdpDeidentifyResult. + Note that all info-types present in the deidentify template must be present + in inspect template. + + e.g. + `projects/{project}/locations/{location}/deidentifyTemplates/{deidentify_template}` + - name: basicConfig + type: NestedObject + conflicts: + - filter_config.0.sdp_settings.0.advanced_config + description: Sensitive Data Protection basic configuration. + properties: + - name: filterEnforcement + type: String + description: |- + Tells whether the Sensitive Data Protection basic config is enabled or + disabled. + Possible values: + ENABLED + DISABLED + - name: piAndJailbreakFilterSettings + type: NestedObject + description: Prompt injection and Jailbreak Filter settings. + properties: + - name: filterEnforcement + type: String + description: |- + Tells whether Prompt injection and Jailbreak filter is enabled or + disabled. + Possible values: + ENABLED + DISABLED + - name: confidenceLevel + type: String + description: |- + Possible values: + LOW_AND_ABOVE + MEDIUM_AND_ABOVE + HIGH + - name: enableFloorSettingEnforcement + type: Boolean + description: Floor Settings enforcement status. + send_empty_value: true + - name: integratedServices + type: Array + description: List of integrated services for which the floor setting is applicable. + item_type: + type: String + description: |- + Possible values: + AI_PLATFORM + - name: aiPlatformFloorSetting + type: NestedObject + description: AI Platform floor setting. + allow_empty_object: true + properties: + - name: inspectOnly + type: Boolean + description: |- + If true, Model Armor filters will be run in inspect only mode. No action + will be taken on the request. + exactly_one_of: + - ai_platform_floor_setting.0.inspect_only + - ai_platform_floor_setting.0.inspect_and_block + - name: inspectAndBlock + type: Boolean + description: |- + If true, Model Armor filters will be run in inspect and block mode. + Requests that trip Model Armor filters will be blocked. + exactly_one_of: + - ai_platform_floor_setting.0.inspect_only + - ai_platform_floor_setting.0.inspect_and_block + - name: enableCloudLogging + type: Boolean + description: If true, log Model Armor filter results to Cloud Logging. + - name: floorSettingMetadata + type: NestedObject + description: Metadata to enable multi language detection via floor setting. + properties: + - name: multiLanguageDetection + type: NestedObject + description: Metadata for multi language detection. + custom_flatten: 'templates/terraform/custom_flatten/modelarmor_floorsetting_multilanguage_detection.go.tmpl' + properties: + - name: enableMultiLanguageDetection + type: Boolean + description: If true, multi language detection will be enabled. + required: true + send_empty_value: true diff --git a/mmv1/products/modelarmorglobal/product.yaml b/mmv1/products/modelarmorglobal/product.yaml new file mode 100644 index 000000000000..84ab6097edad --- /dev/null +++ b/mmv1/products/modelarmorglobal/product.yaml @@ -0,0 +1,26 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: ModelArmorGlobal +legacy_name: model_armor +display_name: Model Armor +scopes: + - https://www.googleapis.com/auth/cloud-platform +versions: + - name: ga + base_url: 'https://modelarmor.googleapis.com/v1/' + cai_base_url: "https://modelarmor.googleapis.com/v1/" + - name: beta + base_url: 'https://modelarmor.googleapis.com/v1beta/' + cai_base_url: "https://modelarmor.googleapis.com/v1beta/" diff --git a/mmv1/templates/terraform/custom_flatten/modelarmor_floorsetting_multilanguage_detection.go.tmpl b/mmv1/templates/terraform/custom_flatten/modelarmor_floorsetting_multilanguage_detection.go.tmpl new file mode 100644 index 000000000000..6ef741cc1780 --- /dev/null +++ b/mmv1/templates/terraform/custom_flatten/modelarmor_floorsetting_multilanguage_detection.go.tmpl @@ -0,0 +1,19 @@ +func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return []interface{}{map[string]interface{}{"enable_multi_language_detection": false}} + } + original, ok := v.(map[string]interface{}) + if !ok { + return nil // Should not happen if API is consistent + } + // Populating the field even if the returned block is empty. + transformed := make(map[string]interface{}) + + if val, ok := original["enableMultiLanguageDetection"]; ok { + transformed["enable_multi_language_detection"] = val + } else { + // Since the field is REQUIRED in the schema and the block exists, default to false if the key is missing from the API response. + transformed["enable_multi_language_detection"] = false + } + return []interface{}{transformed} +} \ No newline at end of file diff --git a/mmv1/templates/terraform/custom_import/modelarmorglobal_floorsetting.go.tmpl b/mmv1/templates/terraform/custom_import/modelarmorglobal_floorsetting.go.tmpl new file mode 100644 index 000000000000..c3990c07374c --- /dev/null +++ b/mmv1/templates/terraform/custom_import/modelarmorglobal_floorsetting.go.tmpl @@ -0,0 +1,19 @@ +config := meta.(*transport_tpg.Config) + +if err := tpgresource.ParseImportId([]string{ + "^(?P.+)/locations/(?P[^/]+)/floorSetting$", + "^(?P.+)/(?P[^/]+)$", +}, d, config); err != nil { + return nil, err +} + +// Replace import id for the resource id +id, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}parent{{"}}"}}/locations/{{"{{"}}location{{"}}"}}/floorSetting") + +if err != nil { + return nil, fmt.Errorf("Error constructing id: %s", err) +} + +d.SetId(id) + +return []*schema.ResourceData{d}, nil \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/modelarmor_floorsetting_ai_platform_metadata.tf.tmpl b/mmv1/templates/terraform/examples/modelarmor_floorsetting_ai_platform_metadata.tf.tmpl new file mode 100644 index 000000000000..fd46a2f384aa --- /dev/null +++ b/mmv1/templates/terraform/examples/modelarmor_floorsetting_ai_platform_metadata.tf.tmpl @@ -0,0 +1,21 @@ +resource "google_model_armor_floorsetting" "floorsetting-integrated-metadata" { + location = "global" + parent = "projects/{{index $.TestEnvVars "project_id"}}" + + filter_config { + + } + + enable_floor_setting_enforcement = false + + ai_platform_floor_setting { + inspect_only = true + enable_cloud_logging = true + } + + floor_setting_metadata { + multi_language_detection { + enable_multi_language_detection = false + } + } +} diff --git a/mmv1/templates/terraform/examples/modelarmor_floorsetting_basic.tf.tmpl b/mmv1/templates/terraform/examples/modelarmor_floorsetting_basic.tf.tmpl new file mode 100644 index 000000000000..cfdef5d108d2 --- /dev/null +++ b/mmv1/templates/terraform/examples/modelarmor_floorsetting_basic.tf.tmpl @@ -0,0 +1,8 @@ +resource "google_model_armor_floorsetting" "floorsetting-basic" { + parent = "projects/{{index $.TestEnvVars "project_id"}}" + location = "global" + + filter_config { + + } +} diff --git a/mmv1/templates/terraform/examples/modelarmor_floorsetting_filter_config.tf.tmpl b/mmv1/templates/terraform/examples/modelarmor_floorsetting_filter_config.tf.tmpl new file mode 100644 index 000000000000..3e352a661994 --- /dev/null +++ b/mmv1/templates/terraform/examples/modelarmor_floorsetting_filter_config.tf.tmpl @@ -0,0 +1,27 @@ +resource "google_model_armor_floorsetting" "floorsetting-filter-config" { + location = "global" + parent = "project/{{index $.TestEnvVars "project_id"}}" + + filter_config { + rai_settings { + rai_filters { + filter_type = "DANGEROUS" + confidence_level = "MEDIUM_AND_ABOVE" + } + } + sdp_settings { + basic_config { + filter_enforcement = "ENABLED" + } + } + pi_and_jailbreak_filter_settings { + filter_enforcement = "ENABLED" + confidence_level = "HIGH" + } + malicious_uri_filter_settings { + filter_enforcement = "ENABLED" + } + } + + enable_floor_setting_enforcement = true +} diff --git a/mmv1/templates/terraform/post_create/modelarmor_floorsetting_sleep.go.tmpl b/mmv1/templates/terraform/post_create/modelarmor_floorsetting_sleep.go.tmpl new file mode 100644 index 000000000000..7627fe2902a0 --- /dev/null +++ b/mmv1/templates/terraform/post_create/modelarmor_floorsetting_sleep.go.tmpl @@ -0,0 +1,4 @@ +// This is useful if the resource in question doesn't have a perfectly consistent API +// That is, the Operation for Create might return before the Get operation shows the +// completed state of the resource. +time.Sleep(30 * time.Second) \ No newline at end of file diff --git a/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt b/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt index 940fabcf48c7..27669d77eca6 100644 --- a/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt +++ b/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt @@ -562,10 +562,15 @@ var ServicesListBeta = mapOf( "path" to "./google-beta/services/mlengine" ), "modelarmor" to mapOf( - "name" to "modelarmor", + "name" to "ModelArmor", "displayName" to "ModelArmor", "path" to "./google-beta/services/modelarmor" ), + "modelarmorglobal" to mapOf( + "name" to "ModelArmorGlobal", + "displayName" to "ModelArmorGlobal", + "path" to "./google-beta/services/modelarmorglobal" + ), "monitoring" to mapOf( "name" to "monitoring", "displayName" to "Monitoring", diff --git a/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt b/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt index a499dfed57c3..774f10b326fb 100644 --- a/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt +++ b/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt @@ -557,9 +557,14 @@ var ServicesListGa = mapOf( "path" to "./google/services/mlengine" ), "modelarmor" to mapOf( - "name" to "modelarmor", + "name" to "ModelArmor", "displayName" to "ModelArmor", - "path" to "./googleservices/modelarmor" + "path" to "./google/services/modelarmor" + ), + "modelarmorglobal" to mapOf( + "name" to "ModelArmorGlobal", + "displayName" to "ModelArmorGlobal", + "path" to "./google/services/modelarmorglobal" ), "monitoring" to mapOf( "name" to "monitoring", diff --git a/mmv1/third_party/terraform/services/modelarmor/resource_model_armor_template_test.go b/mmv1/third_party/terraform/services/modelarmor/resource_model_armor_template_test.go index f181162f363f..e9a41f543703 100644 --- a/mmv1/third_party/terraform/services/modelarmor/resource_model_armor_template_test.go +++ b/mmv1/third_party/terraform/services/modelarmor/resource_model_armor_template_test.go @@ -1,10 +1,8 @@ package modelarmor_test import ( - "bytes" "fmt" "testing" - "text/template" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/plancheck" @@ -12,20 +10,6 @@ import ( "github.com/hashicorp/terraform-provider-google/google/acctest" ) -// Helper function to expand a template -func expandTemplate(tmplStr string, data map[string]interface{}) (string, error) { - tmpl, err := template.New("config").Parse(tmplStr) - if err != nil { - return "", err - } - var buf bytes.Buffer - err = tmpl.Execute(&buf, data) - if err != nil { - return "", err - } - return buf.String(), nil -} - func TestAccModelArmorTemplate_basic(t *testing.T) { t.Parallel() @@ -42,13 +26,7 @@ func TestAccModelArmorTemplate_basic(t *testing.T) { CheckDestroy: testAccCheckModelArmorTemplateDestroyProducer(t), Steps: []resource.TestStep{ { - Config: func() string { - cfg, err := testAccModelArmorTemplate_basic_config(basicContext) - if err != nil { - t.Fatalf("Failed to expand basic config template: %v", err) - } - return cfg - }(), + Config: testAccModelArmorTemplate_basic_config(basicContext), }, { ResourceName: "google_model_armor_template.template-basic", @@ -59,19 +37,18 @@ func TestAccModelArmorTemplate_basic(t *testing.T) { }) } -func testAccModelArmorTemplate_basic_config(context map[string]interface{}) (string, error) { - const basic_template = ` +func testAccModelArmorTemplate_basic_config(context map[string]interface{}) string { + return acctest.Nprintf(` resource "google_model_armor_template" "template-basic" { - location = "{{.location}}" - template_id = "{{.templateId}}" + location = "%{location}" + template_id = "%{templateId}" filter_config { } template_metadata { } -}` - return expandTemplate(basic_template, context) +}`, context) } func TestAccModelArmorTemplate_update(t *testing.T) { @@ -80,6 +57,7 @@ func TestAccModelArmorTemplate_update(t *testing.T) { templateId := fmt.Sprintf("modelarmor-test-update-%s", acctest.RandString(t, 5)) context := map[string]interface{}{ + "location": "us-central1", "templateId": templateId, } @@ -118,7 +96,7 @@ func TestAccModelArmorTemplate_update(t *testing.T) { func testAccModelArmorTemplate_initial(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_model_armor_template" "test-resource" { - location = "us-central1" + location = "%{location}" template_id = "%{templateId}" labels = { "test-label" = "env-testing-initial" diff --git a/mmv1/third_party/terraform/services/modelarmorglobal/resource_model_armor_floorsetting_test.go b/mmv1/third_party/terraform/services/modelarmorglobal/resource_model_armor_floorsetting_test.go new file mode 100644 index 000000000000..e30c658aa49e --- /dev/null +++ b/mmv1/third_party/terraform/services/modelarmorglobal/resource_model_armor_floorsetting_test.go @@ -0,0 +1,131 @@ +package modelarmorglobal_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccModelArmorGlobalFloorsetting_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project_id": envvar.GetTestProjectFromEnv(), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccModelArmorGlobalFloorsetting_initial(context), + }, + { + ResourceName: "google_model_armor_floorsetting.test-resource", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "parent"}, + }, + { + Config: testAccModelArmorGlobalFloorsetting_updated(context), + }, + { + ResourceName: "google_model_armor_floorsetting.test-resource", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "parent"}, + }, + }, + }) +} + +func testAccModelArmorGlobalFloorsetting_initial(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_model_armor_floorsetting" "test-resource" { + location = "global" + parent = "projects/%{project_id}" + + filter_config { + rai_settings { + rai_filters { + filter_type = "DANGEROUS" + confidence_level = "LOW_AND_ABOVE" + } + } + sdp_settings { + basic_config { + filter_enforcement = "ENABLED" + } + } + pi_and_jailbreak_filter_settings { + filter_enforcement = "ENABLED" + confidence_level = "MEDIUM_AND_ABOVE" + } + malicious_uri_filter_settings { + filter_enforcement = "ENABLED" + } + } + + enable_floor_setting_enforcement = true + + integrated_services = [ "AI_PLATFORM" ] + + ai_platform_floor_setting { + inspect_only = true + enable_cloud_logging = true + } + + floor_setting_metadata { + multi_language_detection { + enable_multi_language_detection = true + } + } +} +`, context) +} + +func testAccModelArmorGlobalFloorsetting_updated(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_model_armor_floorsetting" "test-resource" { + location = "global" + parent = "projects/%{project_id}" + + filter_config { + rai_settings { + rai_filters { + filter_type = "SEXUALLY_EXPLICIT" + confidence_level = "HIGH" + } + } + sdp_settings { + advanced_config { + inspect_template = "projects/modelarmor-api-test/locations/global/inspectTemplates/modelarmor-tf-test" + deidentify_template = "projects/modelarmor-api-test/locations/us-central1/deidentifyTemplates/modelarmor-tf-test" + } + } + pi_and_jailbreak_filter_settings { + filter_enforcement = "ENABLED" + confidence_level = "MEDIUM_AND_ABOVE" + } + malicious_uri_filter_settings { + filter_enforcement = "ENABLED" + } + } + + ai_platform_floor_setting { + inspect_and_block = false + enable_cloud_logging = false + } + + floor_setting_metadata { + multi_language_detection { + enable_multi_language_detection = false + } + } +} +`, context) +} From f45c99516fecb4697ba72ca59a7bf2c762bd7279 Mon Sep 17 00:00:00 2001 From: hao-nan-li <100219545+hao-nan-li@users.noreply.github.com> Date: Tue, 15 Jul 2025 16:48:21 -0700 Subject: [PATCH 548/884] tgc-revival: add support for google_compute_firewall_policy (#14537) --- mmv1/api/resource.go | 3 +++ mmv1/products/compute/FirewallPolicy.yaml | 2 ++ .../cai2hcl/resource_converter.go.tmpl | 5 +++++ .../ancestrymanager/ancestrymanager.go | 19 ++++++++----------- 4 files changed, 18 insertions(+), 11 deletions(-) diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index 04fc40fab29a..4ccb44674d99 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -233,6 +233,9 @@ type Resource struct { // If true, include resource in the new package of TGC (terraform-provider-conversion) IncludeInTGCNext bool `yaml:"include_in_tgc_next_DO_NOT_USE,omitempty"` + // Name of the hcl resource block used in TGC + TgcHclBlockName string `yaml:"tgc_hcl_block_name,omitempty"` + // If true, skip sweeper generation for this resource ExcludeSweeper bool `yaml:"exclude_sweeper,omitempty"` diff --git a/mmv1/products/compute/FirewallPolicy.yaml b/mmv1/products/compute/FirewallPolicy.yaml index 0eb7f969b700..5c4d516d6bbe 100644 --- a/mmv1/products/compute/FirewallPolicy.yaml +++ b/mmv1/products/compute/FirewallPolicy.yaml @@ -41,6 +41,8 @@ custom_code: post_update: 'templates/terraform/constants/compute_firewall_policy_operation.go.tmpl' custom_diff: - 'tpgresource.DefaultProviderProject' +include_in_tgc_next_DO_NOT_USE: true +tgc_hcl_block_name: 'shortName' examples: - name: 'firewall_policy' primary_resource_id: 'default' diff --git a/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl b/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl index b41ce4c49cab..283b2e011fca 100644 --- a/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl +++ b/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl @@ -80,8 +80,13 @@ func (c *{{ $.ResourceName -}}Cai2hclConverter) convertResourceData(asset caiass config := transport.NewConfig() d := &schema.ResourceData{} +{{ if $.TgcHclBlockName -}} + hclBlockName := res["{{ $.TgcHclBlockName -}}"].(string) + + {{- else -}} assetNameParts := strings.Split(asset.Name, "/") hclBlockName := assetNameParts[len(assetNameParts)-1] +{{ end}} hclData := make(map[string]interface{}) diff --git a/mmv1/third_party/tgc_next/pkg/tfplan2cai/ancestrymanager/ancestrymanager.go b/mmv1/third_party/tgc_next/pkg/tfplan2cai/ancestrymanager/ancestrymanager.go index f4e14a1f6e07..1d70d9619bff 100644 --- a/mmv1/third_party/tgc_next/pkg/tfplan2cai/ancestrymanager/ancestrymanager.go +++ b/mmv1/third_party/tgc_next/pkg/tfplan2cai/ancestrymanager/ancestrymanager.go @@ -164,15 +164,6 @@ func (m *manager) fetchAncestors(config *transport_tpg.Config, tfData tpgresourc return nil, fmt.Errorf("organization id not found in terraform data") } key = orgKey - case "iam.googleapis.com/Role": - // google_organization_iam_custom_role or google_project_iam_custom_role - if orgOK { - key = orgKey - } else if projectKey != "" { - key = projectKey - } else { - return []string{unknownOrg}, nil - } case "cloudresourcemanager.googleapis.com/Project", "cloudbilling.googleapis.com/ProjectBillingInfo": // for google_project and google_project_iam resources var ancestors []string @@ -207,10 +198,16 @@ func (m *manager) fetchAncestors(config *transport_tpg.Config, tfData tpgresourc key = projectKey default: - if projectKey == "" { + switch { + case orgOK: + key = orgKey + case folderOK: + key = folderKey + case projectKey != "": + key = projectKey + default: return []string{unknownOrg}, nil } - key = projectKey } return m.getAncestorsWithCache(key) } From 0fbf49ee2f6b2b01100b64b075524381a123facc Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Wed, 16 Jul 2025 08:45:51 -0700 Subject: [PATCH 549/884] tgc-revival: support google_compute_url_map (#14524) --- mmv1/api/product/version.go | 7 ++-- mmv1/api/resource.go | 14 +++++++ mmv1/products/compute/Address.yaml | 1 - mmv1/products/compute/Disk.yaml | 1 + mmv1/products/compute/UrlMap.yaml | 12 ++++++ mmv1/products/compute/product.yaml | 2 + mmv1/provider/template_data.go | 3 +- .../array_resourceref_with_validation.go.tmpl | 5 +++ .../resourceref_with_validation.go.tmpl | 5 +++ .../terraform/flatten_property_method.go.tmpl | 9 ++++ .../cai2hcl/full_to_relative_path.go.tmpl | 10 +++++ .../tgc_next/encoders/compute_address.go.tmpl | 6 --- .../tgc_next/encoders/compute_disk.go.tmpl | 5 +++ ..._array_resourceref_with_validation.go.tmpl | 41 ------------------- .../expand_property_method_tgc.go.tmpl | 10 ----- ...expand_resourceref_with_validation.go.tmpl | 23 ----------- .../tgc_next/pkg/tgcresource/utils.go | 9 ++-- .../tgc_next/test/assert_test_files.go | 5 ++- 18 files changed, 76 insertions(+), 92 deletions(-) create mode 100644 mmv1/templates/tgc_next/cai2hcl/full_to_relative_path.go.tmpl delete mode 100644 mmv1/templates/tgc_next/encoders/compute_address.go.tmpl create mode 100644 mmv1/templates/tgc_next/encoders/compute_disk.go.tmpl delete mode 100644 mmv1/templates/tgc_next/tfplan2cai/expand_array_resourceref_with_validation.go.tmpl delete mode 100644 mmv1/templates/tgc_next/tfplan2cai/expand_resourceref_with_validation.go.tmpl diff --git a/mmv1/api/product/version.go b/mmv1/api/product/version.go index d94c0a41e8e5..b36dfd461d5a 100644 --- a/mmv1/api/product/version.go +++ b/mmv1/api/product/version.go @@ -26,9 +26,10 @@ var ORDER = []string{"ga", "beta", "alpha", "private"} // a superset of beta, and beta a superset of GA. Each version will have a // different version url. type Version struct { - CaiBaseUrl string `yaml:"cai_base_url,omitempty"` - BaseUrl string `yaml:"base_url"` - Name string + CaiBaseUrl string `yaml:"cai_base_url,omitempty"` + CaiLegacyBaseUrl string `yaml:"cai_legacy_base_url,omitempty"` + BaseUrl string `yaml:"base_url"` + Name string } func (v *Version) Validate(pName string) { diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index 4ccb44674d99..709cb7680702 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -1820,6 +1820,20 @@ func (r Resource) CaiProductBaseUrl() string { return baseUrl } +// Gets the CAI product legacy base url. +// For example, https://www.googleapis.com/compute/v1/ for compute +func (r Resource) CaiProductLegacyBaseUrl() string { + version := r.ProductMetadata.VersionObjOrClosest(r.TargetVersionName) + baseUrl := version.CaiLegacyBaseUrl + if baseUrl == "" { + baseUrl = version.CaiBaseUrl + } + if baseUrl == "" { + baseUrl = version.BaseUrl + } + return baseUrl +} + // Returns the Cai product backend name from the version base url // base_url: https://accessapproval.googleapis.com/v1/ -> accessapproval func (r Resource) CaiProductBackendName(caiProductBaseUrl string) string { diff --git a/mmv1/products/compute/Address.yaml b/mmv1/products/compute/Address.yaml index 814a52ed7db0..7107c7d747aa 100644 --- a/mmv1/products/compute/Address.yaml +++ b/mmv1/products/compute/Address.yaml @@ -52,7 +52,6 @@ collection_url_key: 'items' include_in_tgc_next_DO_NOT_USE: true custom_code: post_create: 'templates/terraform/post_create/labels.tmpl' - tgc_encoder: 'templates/tgc_next/encoders/compute_address.go.tmpl' sweeper: url_substitutions: - region: "us-west2" diff --git a/mmv1/products/compute/Disk.yaml b/mmv1/products/compute/Disk.yaml index 3de58de78310..63a4be3a624c 100644 --- a/mmv1/products/compute/Disk.yaml +++ b/mmv1/products/compute/Disk.yaml @@ -64,6 +64,7 @@ custom_code: update_encoder: 'templates/terraform/update_encoder/hyper_disk.go.tmpl' decoder: 'templates/terraform/decoders/disk.tmpl' pre_delete: 'templates/terraform/pre_delete/detach_disk.tmpl' + tgc_encoder: 'templates/tgc_next/encoders/compute_disk.go.tmpl' custom_diff: - 'customdiff.ForceNewIfChange("size", IsDiskShrinkage)' - 'hyperDiskIopsUpdateDiffSuppress' diff --git a/mmv1/products/compute/UrlMap.yaml b/mmv1/products/compute/UrlMap.yaml index ca0e6baf653d..5fbee1b216be 100644 --- a/mmv1/products/compute/UrlMap.yaml +++ b/mmv1/products/compute/UrlMap.yaml @@ -36,6 +36,7 @@ async: result: resource_inside_response: false collection_url_key: 'items' +include_in_tgc_next_DO_NOT_USE: true custom_code: examples: - name: 'url_map_bucket_and_service' @@ -52,6 +53,8 @@ examples: url_map_name: 'urlmap' home_backend_service_name: 'home' health_check_name: 'health-check' + tgc_test_ignore_extra: + - 'path_matcher.route_rules.url_redirect.https_redirect' - name: 'url_map_traffic_director_route_partial' primary_resource_id: 'urlmap' vars: @@ -402,6 +405,7 @@ properties: For example, assume that you configure a rule for 401 (Un-authorized) code, and another for all 4 series error codes (4XX). If the backend service returns a 401, then the rule for 401 will be applied. However if the backend service returns a 403, the rule for 4xx takes effect. api_name: errorResponseRules + is_missing_in_cai: true item_type: type: NestedObject properties: @@ -562,6 +566,7 @@ properties: For example, assume that you configure a rule for 401 (Un-authorized) code, and another for all 4 series error codes (4XX). If the backend service returns a 401, then the rule for 401 will be applied. However if the backend service returns a 403, the rule for 4xx takes effect. api_name: errorResponseRules + is_missing_in_cai: true item_type: type: NestedObject properties: @@ -751,6 +756,7 @@ properties: description: | The percentage of requests to be mirrored to backendService. The value must be between 0.0 and 100.0 inclusive. + is_missing_in_cai: true validation: function: 'validation.FloatBetween(0, 100)' - name: 'retryPolicy' @@ -769,6 +775,7 @@ properties: properties: - name: 'nanos' type: Integer + include_empty_value_in_cai: true description: | Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are represented with a 0 `seconds` field and a positive @@ -1865,6 +1872,7 @@ properties: - name: 'httpFilterConfigs' type: Array min_version: 'beta' + is_missing_in_cai: true description: | Outbound route specific configuration for networkservices.HttpFilter resources enabled by Traffic Director. httpFilterConfigs only applies for load balancers with loadBalancingScheme set to INTERNAL_SELF_MANAGED. @@ -1892,6 +1900,7 @@ properties: - name: 'httpFilterMetadata' type: Array min_version: 'beta' + is_missing_in_cai: true description: | Outbound route specific metadata supplied to networkservices.HttpFilter resources enabled by Traffic Director. httpFilterMetadata only applies for load balancers with loadBalancingScheme set to INTERNAL_SELF_MANAGED. @@ -2237,6 +2246,7 @@ properties: - name: 'mirrorPercent' min_version: beta type: Double + is_missing_in_cai: true description: | The percentage of requests to be mirrored to backendService. The value must be between 0.0 and 100.0 inclusive. @@ -2378,6 +2388,7 @@ properties: For example, assume that you configure a rule for 401 (Un-authorized) code, and another for all 4 series error codes (4XX). If the backend service returns a 401, then the rule for 401 will be applied. However if the backend service returns a 403, the rule for 4xx takes effect. api_name: errorResponseRules + is_missing_in_cai: true item_type: type: NestedObject properties: @@ -2866,6 +2877,7 @@ properties: imports: 'selfLink' - name: 'mirrorPercent' min_version: beta + is_missing_in_cai: true type: Double description: | The percentage of requests to be mirrored to backendService. diff --git a/mmv1/products/compute/product.yaml b/mmv1/products/compute/product.yaml index a77b1c0718a7..012ca3739a61 100644 --- a/mmv1/products/compute/product.yaml +++ b/mmv1/products/compute/product.yaml @@ -17,7 +17,9 @@ display_name: 'Compute Engine' versions: - name: 'ga' base_url: 'https://compute.googleapis.com/compute/v1/' + cai_legacy_base_url: 'https://www.googleapis.com/compute/v1/' - name: 'beta' base_url: 'https://compute.googleapis.com/compute/beta/' + cai_legacy_base_url: 'https://www.googleapis.com/compute/v1/' scopes: - 'https://www.googleapis.com/auth/compute' diff --git a/mmv1/provider/template_data.go b/mmv1/provider/template_data.go index 7076e8711e5c..f70a50486a81 100644 --- a/mmv1/provider/template_data.go +++ b/mmv1/provider/template_data.go @@ -198,10 +198,9 @@ func (td *TemplateData) GenerateTGCResourceFile(templatePath, filePath string, r "templates/terraform/schema_property.go.tmpl", "templates/terraform/schema_subresource.go.tmpl", "templates/terraform/flatten_property_method.go.tmpl", - "templates/tgc_next/tfplan2cai/expand_array_resourceref_with_validation.go.tmpl", - "templates/tgc_next/tfplan2cai/expand_resourceref_with_validation.go.tmpl", "templates/tgc_next/tfplan2cai/expand_property_method_tgc.go.tmpl", "templates/tgc_next/cai2hcl/flatten_property_method_tgc.go.tmpl", + "templates/tgc_next/cai2hcl/full_to_relative_path.go.tmpl", } td.GenerateFile(filePath, templatePath, resource, true, templates...) } diff --git a/mmv1/templates/terraform/custom_expand/array_resourceref_with_validation.go.tmpl b/mmv1/templates/terraform/custom_expand/array_resourceref_with_validation.go.tmpl index 3493454ce2e5..5d85d48d779d 100644 --- a/mmv1/templates/terraform/custom_expand/array_resourceref_with_validation.go.tmpl +++ b/mmv1/templates/terraform/custom_expand/array_resourceref_with_validation.go.tmpl @@ -24,7 +24,12 @@ func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.T if err != nil { return nil, fmt.Errorf("Invalid value for {{underscore $.Name}}: %s", err) } + {{- if $.ResourceMetadata.IsTgcCompiler }} + url := tgcresource.GetFullUrl(config, f.RelativeLink(), "{{$.ResourceMetadata.CaiProductLegacyBaseUrl}}") + req = append(req, url) + {{- else }} req = append(req, f.RelativeLink()) + {{- end }} } return req, nil } diff --git a/mmv1/templates/terraform/custom_expand/resourceref_with_validation.go.tmpl b/mmv1/templates/terraform/custom_expand/resourceref_with_validation.go.tmpl index 973e737d8467..718548e9eead 100644 --- a/mmv1/templates/terraform/custom_expand/resourceref_with_validation.go.tmpl +++ b/mmv1/templates/terraform/custom_expand/resourceref_with_validation.go.tmpl @@ -15,5 +15,10 @@ func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.T if err != nil { return nil, fmt.Errorf("Invalid value for {{underscore $.Name}}: %s", err) } + {{- if $.ResourceMetadata.IsTgcCompiler }} + url := tgcresource.GetFullUrl(config, f.RelativeLink(), "{{$.ResourceMetadata.CaiProductLegacyBaseUrl}}") + return url, nil + {{- else }} return f.RelativeLink(), nil + {{- end }} } diff --git a/mmv1/templates/terraform/flatten_property_method.go.tmpl b/mmv1/templates/terraform/flatten_property_method.go.tmpl index b38732d1525e..f9dae217371c 100644 --- a/mmv1/templates/terraform/flatten_property_method.go.tmpl +++ b/mmv1/templates/terraform/flatten_property_method.go.tmpl @@ -132,10 +132,14 @@ func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.Reso } return tpgresource.ConvertAndMapStringArr(v.([]interface{}), tpgresource.ConvertSelfLinkToV1) {{- else if $.IsA "ResourceRef" }} + {{- if $.ResourceMetadata.IsTgcCompiler }} + {{- template "fullToRelativePath" $ -}} + {{- else }} if v == nil { return v } return tpgresource.ConvertSelfLinkToV1(v.(string)) + {{- end }} {{- else if $.IsSet }} if v == nil { return v @@ -145,6 +149,11 @@ func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.Reso {{- else if or ($.ItemType.IsA "String") ($.ItemType.IsA "Enum") }} return schema.NewSet(schema.HashString, v.([]interface{})) {{- end }} + {{- else if and ($.ResourceMetadata.IsTgcCompiler) ($.IsA "Boolean") ($.Required) }} + if v == nil { + return false + } + return v {{- else }} return v {{- end }} diff --git a/mmv1/templates/tgc_next/cai2hcl/full_to_relative_path.go.tmpl b/mmv1/templates/tgc_next/cai2hcl/full_to_relative_path.go.tmpl new file mode 100644 index 000000000000..5c84b3188429 --- /dev/null +++ b/mmv1/templates/tgc_next/cai2hcl/full_to_relative_path.go.tmpl @@ -0,0 +1,10 @@ +{{- define "fullToRelativePath" }} + if v == nil { + return v + } + relative, err := tpgresource.GetRelativePath(v.(string)) + if err != nil { + return v + } + return relative +{{- end }} \ No newline at end of file diff --git a/mmv1/templates/tgc_next/encoders/compute_address.go.tmpl b/mmv1/templates/tgc_next/encoders/compute_address.go.tmpl deleted file mode 100644 index 34959285f3fe..000000000000 --- a/mmv1/templates/tgc_next/encoders/compute_address.go.tmpl +++ /dev/null @@ -1,6 +0,0 @@ -config := meta.(*transport_tpg.Config) - -obj["subnetwork"] = tgcresource.GetComputeSelfLink(config, obj["subnetwork"]) -obj["network"] = tgcresource.GetComputeSelfLink(config, obj["network"]) - -return obj, nil \ No newline at end of file diff --git a/mmv1/templates/tgc_next/encoders/compute_disk.go.tmpl b/mmv1/templates/tgc_next/encoders/compute_disk.go.tmpl new file mode 100644 index 000000000000..3936ad1725d1 --- /dev/null +++ b/mmv1/templates/tgc_next/encoders/compute_disk.go.tmpl @@ -0,0 +1,5 @@ +config := meta.(*transport_tpg.Config) + +obj["type"] = tgcresource.GetFullUrl(config, obj["type"], "https://www.googleapis.com/compute/v1/") + +return obj, nil \ No newline at end of file diff --git a/mmv1/templates/tgc_next/tfplan2cai/expand_array_resourceref_with_validation.go.tmpl b/mmv1/templates/tgc_next/tfplan2cai/expand_array_resourceref_with_validation.go.tmpl deleted file mode 100644 index e491e591ec05..000000000000 --- a/mmv1/templates/tgc_next/tfplan2cai/expand_array_resourceref_with_validation.go.tmpl +++ /dev/null @@ -1,41 +0,0 @@ -{{/* - The license inside this block applies to this file - Copyright 2025 Google Inc. - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ -}} -{{- define "expandArrayResourcerefWithValidation" }} -func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { -{{- if $.IsSet }} - vSet, ok := v.(*schema.Set) - if !ok || vSet == nil { - return nil, fmt.Errorf("invalid type for v: %T, expected *schema.Set", v) - } - v = vSet.List() -{{- end }} - vSlice, ok := v.([]interface{}) - if !ok || vSlice == nil { - return nil, fmt.Errorf("invalid type for v: %T, expected []interface{}", v) - } - req := make([]interface{}, 0, len(vSlice)) - for _, raw := range vSlice { - if raw == nil { - return nil, fmt.Errorf("Invalid value for {{underscore $.Name}}: nil") - } - f, err := {{ template "expandResourceRef" dict "VarName" "raw.(string)" "ResourceRef" $.ItemType.ResourceRef "ResourceType" $.ItemType.ResourceType}} - if err != nil { - return nil, fmt.Errorf("Invalid value for {{underscore $.Name}}: %s", err) - } - - fullUrl := tgcresource.GetComputeSelfLink(config, f.RelativeLink()) - req = append(req, fullUrl) - } - return req, nil -} -{{- end }} \ No newline at end of file diff --git a/mmv1/templates/tgc_next/tfplan2cai/expand_property_method_tgc.go.tmpl b/mmv1/templates/tgc_next/tfplan2cai/expand_property_method_tgc.go.tmpl index 072c45383ee0..3d5b82b33b21 100644 --- a/mmv1/templates/tgc_next/tfplan2cai/expand_property_method_tgc.go.tmpl +++ b/mmv1/templates/tgc_next/tfplan2cai/expand_property_method_tgc.go.tmpl @@ -15,16 +15,6 @@ {{- define "expandTgcPropertyMethod" }} {{ if $.CustomTgcExpand }} {{- $.CustomTemplate $.CustomTgcExpand false -}} - {{ else if and ($.IsA "Array") ($.ItemType.IsA "ResourceRef")}} -{{- template "expandArrayResourcerefWithValidation" $ -}} - {{ else if ($.IsA "ResourceRef") }} - {{- if $.IsResourceRefFound }} -{{- template "expandResourcerefWithValidation" $ -}} - {{- else }} -func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - {{- end }} {{ else }} {{- template "expandPropertyMethod" $ -}} {{ end }} diff --git a/mmv1/templates/tgc_next/tfplan2cai/expand_resourceref_with_validation.go.tmpl b/mmv1/templates/tgc_next/tfplan2cai/expand_resourceref_with_validation.go.tmpl deleted file mode 100644 index 11f824f203cf..000000000000 --- a/mmv1/templates/tgc_next/tfplan2cai/expand_resourceref_with_validation.go.tmpl +++ /dev/null @@ -1,23 +0,0 @@ -{{/* - The license inside this block applies to this file - Copyright 2025 Google Inc. - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ -}} -{{- define "expandResourcerefWithValidation" }} -func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - f, err := tpgresource.ParseProjectFieldValue("{{$.ResourceType}}", v.(string), "project", d, config, true) - if err != nil { - return nil, fmt.Errorf("Invalid value for {{underscore $.Name}}: %s", err) - } - - fullUrl := tgcresource.GetComputeSelfLink(config, f.RelativeLink()) - return fullUrl, nil -} -{{- end }} \ No newline at end of file diff --git a/mmv1/third_party/tgc_next/pkg/tgcresource/utils.go b/mmv1/third_party/tgc_next/pkg/tgcresource/utils.go index 03adc1ac46f0..0f2ab5affff3 100644 --- a/mmv1/third_party/tgc_next/pkg/tgcresource/utils.go +++ b/mmv1/third_party/tgc_next/pkg/tgcresource/utils.go @@ -26,15 +26,16 @@ func RemoveTerraformAttributionLabel(raw interface{}) interface{} { return nil } -func GetComputeSelfLink(config *transport_tpg.Config, raw interface{}) interface{} { - if raw == nil { - return nil +// Gets the full url from relative url +func GetFullUrl(config *transport_tpg.Config, raw interface{}, baseUrl string) interface{} { + if raw == nil || baseUrl == "" { + return raw } v := raw.(string) if v != "" && !strings.HasPrefix(v, "https://") { if config.UniverseDomain == "" || config.UniverseDomain == "googleapis.com" { - return fmt.Sprintf("https://www.googleapis.com/compute/v1/%s", v) + return fmt.Sprintf("%s%s", baseUrl, v) } } diff --git a/mmv1/third_party/tgc_next/test/assert_test_files.go b/mmv1/third_party/tgc_next/test/assert_test_files.go index 3ae15f85a407..fa837235916a 100644 --- a/mmv1/third_party/tgc_next/test/assert_test_files.go +++ b/mmv1/third_party/tgc_next/test/assert_test_files.go @@ -77,7 +77,8 @@ func testSingleResource(t *testing.T, testName string, testData ResourceTestData } if testData.Cai == nil { - return fmt.Errorf("cai asset is unavailable for resource %s", testData.ResourceAddress) + log.Printf("SKIP: cai asset is unavailable for resource %s", testData.ResourceAddress) + return nil } assets := make([]caiasset.Asset, 0) @@ -96,7 +97,7 @@ func testSingleResource(t *testing.T, testName string, testData ResourceTestData if primaryResource { return fmt.Errorf("conversion of the primary resource %s is not supported in tgc", testData.ResourceAddress) } else { - log.Printf("Test for %s is skipped as conversion of the resource is not supported in tgc.", resourceType) + log.Printf("SKIP: conversion of the resource %s is not supported in tgc.", resourceType) return nil } } From a459d6a9b3e2f1993d81b61535448375c8ed6b40 Mon Sep 17 00:00:00 2001 From: Rajesh Guptha Date: Wed, 16 Jul 2025 21:21:57 +0530 Subject: [PATCH 550/884] Add tags to Secret Manager Secret TagsR2401 (#14408) --- mmv1/products/secretmanager/Secret.yaml | 8 ++ .../resource_secret_manager_secret_test.go | 84 +++++++++++++++++++ 2 files changed, 92 insertions(+) diff --git a/mmv1/products/secretmanager/Secret.yaml b/mmv1/products/secretmanager/Secret.yaml index b284fd8d668c..e54451d30967 100644 --- a/mmv1/products/secretmanager/Secret.yaml +++ b/mmv1/products/secretmanager/Secret.yaml @@ -253,6 +253,14 @@ properties: description: | The Duration between rotation notifications. Must be in seconds and at least 3600s (1h) and at most 3153600000s (100 years). If rotationPeriod is set, `next_rotation_time` must be set. `next_rotation_time` will be advanced by this period when the service automatically sends rotation notifications. + - name: 'tags' + type: KeyValuePairs + description: | + A map of resource manager tags. + Resource manager tag keys and values have the same definition as resource manager tags. + Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/{tag_value_id}. + immutable: true + ignore_read: true virtual_fields: - name: 'deletion_protection' description: | diff --git a/mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_test.go b/mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_test.go index f1132a348f3a..2e5db070d291 100644 --- a/mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_test.go +++ b/mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_test.go @@ -491,6 +491,39 @@ func TestAccSecretManagerSecret_DeletionProtection(t *testing.T) { }) } +func TestAccSecretManagerSecret_tags(t *testing.T) { + t.Parallel() + + tagKey := acctest.BootstrapSharedTestOrganizationTagKey(t, "secret_manager_secret-tagkey", map[string]interface{}{}) + + context := map[string]interface{}{ + "org": envvar.GetTestOrgFromEnv(t), + "tagKey": tagKey, + "tagValue": acctest.BootstrapSharedTestOrganizationTagValue(t, "secret_manager_secret-tagvalue", tagKey), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckSecretManagerSecretDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccSecretManagerSecretTags(context), + }, + { + ResourceName: "google_secret_manager_secret.secret-tags", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels", "deletion_protection", "tags"}, + }, + { + Config: testAccSecretManagerSecretTagsDeletionProtection(context), + }, + }, + }) +} + func testAccSecretManagerSecret_basic(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_secret_manager_secret" "secret-basic" { @@ -1301,3 +1334,54 @@ resource "google_secret_manager_secret" "secret-deletionprotection" { } `, context) } + +func testAccSecretManagerSecretTags(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_secret_manager_secret" "secret-tags" { + secret_id = "tf-test-secret-%{random_suffix}" + labels = { + label = "my-label" + } + replication { + user_managed { + replicas { + location = "us-central1" + } + replicas { + location = "us-east1" + } + } + } + ttl = "3600s" + tags = { + "%{org}/%{tagKey}" = "%{tagValue}" + } +} +`, context) +} + +func testAccSecretManagerSecretTagsDeletionProtection(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_secret_manager_secret" "secret-tags" { + secret_id = "tf-test-secret-%{random_suffix}" + labels = { + label = "my-label" + } + replication { + user_managed { + replicas { + location = "us-central1" + } + replicas { + location = "us-east1" + } + } + } + ttl = "3600s" + tags = { + "%{org}/%{tagKey}" = "%{tagValue}" + } + deletion_protection = false +} +`, context) +} From 5bd3a4ce516d6c8e70051c8f88ae7fa34ff2424b Mon Sep 17 00:00:00 2001 From: srichaitanyab Date: Wed, 16 Jul 2025 22:51:56 +0530 Subject: [PATCH 551/884] Fixed conflicts in SDP settings for Model Armor (#14541) --- mmv1/products/modelarmor/Template.yaml | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/mmv1/products/modelarmor/Template.yaml b/mmv1/products/modelarmor/Template.yaml index 2bb004efba26..fa564361710d 100644 --- a/mmv1/products/modelarmor/Template.yaml +++ b/mmv1/products/modelarmor/Template.yaml @@ -1,4 +1,4 @@ -# Copyright 2024 Google Inc. +# Copyright 2025 Google Inc. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -156,7 +156,8 @@ properties: properties: - name: advancedConfig type: NestedObject - conflicts: [basicConfig] + conflicts: + - filter_config.0.sdp_settings.0.basic_config description: Sensitive Data Protection Advanced configuration. properties: - name: inspectTemplate @@ -182,7 +183,8 @@ properties: `projects/{project}/locations/{location}/deidentifyTemplates/{deidentify_template}` - name: basicConfig type: NestedObject - conflicts: [advancedConfig] + conflicts: + - filter_config.0.sdp_settings.0.advanced_config description: Sensitive Data Protection basic configuration. properties: - name: filterEnforcement From 8e6eb640d98c4a804a937cc103275bb25f4c456c Mon Sep 17 00:00:00 2001 From: Thomas Rodgers Date: Wed, 16 Jul 2025 10:48:21 -0700 Subject: [PATCH 552/884] tgc-revival: support cloudfunctions2 function in tgc next (#14539) --- mmv1/api/resource.go | 2 +- mmv1/api/type.go | 3 +++ mmv1/products/cloudfunctions2/Function.yaml | 1 + mmv1/products/compute/BackendService.yaml | 2 +- .../cai2hcl/resource_converter.go.tmpl | 19 +++++++----------- ...o.tmpl => compute_backend_service.go.tmpl} | 0 .../tgc_next/pkg/tgcresource/utils.go | 20 +++++++++++++++++++ .../tgc_next/test/assert_test_files.go | 2 +- 8 files changed, 34 insertions(+), 15 deletions(-) rename mmv1/templates/tgc_next/decoders/{backend_service.go.tmpl => compute_backend_service.go.tmpl} (100%) diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index 709cb7680702..30839e260a4d 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -2052,7 +2052,7 @@ func (r Resource) TGCTestIgnorePropertiesToStrings(e resource.Examples) []string // Filters out computed properties during cai2hcl func (r Resource) ReadPropertiesForTgc() []*Type { return google.Reject(r.AllUserProperties(), func(v *Type) bool { - return v.Output + return v.Output || v.UrlParamOnly }) } diff --git a/mmv1/api/type.go b/mmv1/api/type.go index 6bc841686112..e0581f7e3d55 100644 --- a/mmv1/api/type.go +++ b/mmv1/api/type.go @@ -912,6 +912,9 @@ func (t Type) UserProperties() []*Type { } return google.Reject(t.Properties, func(p *Type) bool { + if t.ResourceMetadata.IsTgcCompiler() { + return p.Exclude || p.Output + } return p.Exclude }) } diff --git a/mmv1/products/cloudfunctions2/Function.yaml b/mmv1/products/cloudfunctions2/Function.yaml index 3d2cd85db46e..51c8b30033eb 100644 --- a/mmv1/products/cloudfunctions2/Function.yaml +++ b/mmv1/products/cloudfunctions2/Function.yaml @@ -32,6 +32,7 @@ timeouts: insert_minutes: 60 update_minutes: 60 delete_minutes: 60 +include_in_tgc_next_DO_NOT_USE: true autogen_async: true async: actions: ['create', 'delete', 'update'] diff --git a/mmv1/products/compute/BackendService.yaml b/mmv1/products/compute/BackendService.yaml index 3bb0fb56697f..e776ee67648e 100644 --- a/mmv1/products/compute/BackendService.yaml +++ b/mmv1/products/compute/BackendService.yaml @@ -56,7 +56,7 @@ custom_code: decoder: 'templates/terraform/decoders/backend_service.go.tmpl' post_create: 'templates/terraform/post_create/compute_backend_service_security_policy.go.tmpl' post_update: 'templates/terraform/post_create/compute_backend_service_security_policy.go.tmpl' - tgc_decoder: 'templates/tgc_next/decoders/backend_service.go.tmpl' + tgc_decoder: 'templates/tgc_next/decoders/compute_backend_service.go.tmpl' schema_version: 1 examples: - name: 'backend_service_basic' diff --git a/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl b/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl index 283b2e011fca..468f6e954430 100644 --- a/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl +++ b/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl @@ -108,24 +108,19 @@ func (c *{{ $.ResourceName -}}Cai2hclConverter) convertResourceData(asset caiass return nil, nil } {{ end}} + +{{ range $param := $.UserParameters }} +{{/* Attempt to parse all parameters from asset name. */}} + hclData["{{ underscore $param.Name }}"] = utils.ParseFieldValue(asset.Name, "{{ underscore (plural $param.Name) }}") +{{ end }} {{- if $.HasProject -}} hclData["project"] = utils.ParseFieldValue(asset.Name, "projects") {{- end}} {{ range $prop := $.ReadPropertiesForTgc }} {{ if $prop.FlattenObject -}} - // Terraform must set the top level schema field, but since this object contains collapsed properties - // it's difficult to know what the top level should be. Instead we just loop over the map returned from flatten. if flattenedProp := flatten{{ if $.NestedQuery -}}Nested{{end}}{{ $.ResourceName -}}{{ camelize $prop.Name "upper" -}}(res["{{ $prop.ApiName -}}"], d, config); flattenedProp != nil { - flattenedPropSlice, ok := flattenedProp.([]interface{}) - if !ok || len(flattenedPropSlice) == 0 { - return nil, fmt.Errorf("unexpected type returned from flattener: %T", flattenedProp) - } - flattedPropMap, ok := flattenedPropSlice[0].(map[string]interface{}) - if !ok || len(flattedPropMap) == 0 { - return nil, fmt.Errorf("unexpected type returned from flattener: %T", flattenedPropSlice) - } - for k, v := range flattedPropMap { - hclData[k] = v + if err := tgcresource.MergeFlattenedProperties(hclData, flattenedProp); err != nil { + return nil, fmt.Errorf("error merging flattened properties from {{ $prop.Name }}: %s", err) } } {{- else -}} diff --git a/mmv1/templates/tgc_next/decoders/backend_service.go.tmpl b/mmv1/templates/tgc_next/decoders/compute_backend_service.go.tmpl similarity index 100% rename from mmv1/templates/tgc_next/decoders/backend_service.go.tmpl rename to mmv1/templates/tgc_next/decoders/compute_backend_service.go.tmpl diff --git a/mmv1/third_party/tgc_next/pkg/tgcresource/utils.go b/mmv1/third_party/tgc_next/pkg/tgcresource/utils.go index 0f2ab5affff3..07f42af30b4e 100644 --- a/mmv1/third_party/tgc_next/pkg/tgcresource/utils.go +++ b/mmv1/third_party/tgc_next/pkg/tgcresource/utils.go @@ -41,3 +41,23 @@ func GetFullUrl(config *transport_tpg.Config, raw interface{}, baseUrl string) i return v } + +// Terraform must set the top level schema field, but since this object contains collapsed properties +// it's difficult to know what the top level should be. Instead we just loop over the map returned from flatten. +func MergeFlattenedProperties(hclData map[string]interface{}, flattenedProp interface{}) error { + if flattenedProp == nil { + return nil + } + flattenedPropSlice, ok := flattenedProp.([]interface{}) + if !ok || len(flattenedPropSlice) == 0 { + return fmt.Errorf("unexpected type returned from flattener: %T", flattenedProp) + } + flattedPropMap, ok := flattenedPropSlice[0].(map[string]interface{}) + if !ok || len(flattedPropMap) == 0 { + return fmt.Errorf("unexpected type returned from flattener: %T", flattenedPropSlice) + } + for k, v := range flattedPropMap { + hclData[k] = v + } + return nil +} diff --git a/mmv1/third_party/tgc_next/test/assert_test_files.go b/mmv1/third_party/tgc_next/test/assert_test_files.go index fa837235916a..9d14cd8107de 100644 --- a/mmv1/third_party/tgc_next/test/assert_test_files.go +++ b/mmv1/third_party/tgc_next/test/assert_test_files.go @@ -198,7 +198,7 @@ func testSingleResource(t *testing.T, testName string, testData ResourceTestData if diff := cmp.Diff( asset.Resource, roundtripAsset.Resource, - cmpopts.IgnoreFields(caiasset.AssetResource{}, "Version", "Data"), + cmpopts.IgnoreFields(caiasset.AssetResource{}, "Version", "Data", "DiscoveryDocumentURI"), // Consider DiscoveryDocumentURI equal if they have the same number of path segments when split by "/". cmp.FilterPath(func(p cmp.Path) bool { return p.Last().String() == ".DiscoveryDocumentURI" From b3f12cd568ede4b0d9a3bf0ac50bc47420cd8b87 Mon Sep 17 00:00:00 2001 From: ZeePal Date: Thu, 17 Jul 2025 05:19:46 +1000 Subject: [PATCH 553/884] added GOOGLE_CLOUD_NETAPP_VOLUMES to google_vmwareengine_network_peering (#14540) --- mmv1/products/vmwareengine/NetworkPeering.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/mmv1/products/vmwareengine/NetworkPeering.yaml b/mmv1/products/vmwareengine/NetworkPeering.yaml index 3279552a69ca..7cc42633e210 100644 --- a/mmv1/products/vmwareengine/NetworkPeering.yaml +++ b/mmv1/products/vmwareengine/NetworkPeering.yaml @@ -138,6 +138,7 @@ properties: - 'NETAPP_CLOUD_VOLUMES' - 'THIRD_PARTY_SERVICE' - 'DELL_POWERSCALE' + - 'GOOGLE_CLOUD_NETAPP_VOLUMES' - name: 'uid' type: String description: | From cf178e71132ac5e3fe2a7340c34c5dc1876a1a98 Mon Sep 17 00:00:00 2001 From: Huici Pan Date: Wed, 16 Jul 2025 15:52:39 -0700 Subject: [PATCH 554/884] =?UTF-8?q?Adding=20support=20for=20new=20Preview?= =?UTF-8?q?=20Feature=20resource=20and=20a=20test=20into=20a=20bet?= =?UTF-8?q?=E2=80=A6=20(#14363)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- mmv1/products/compute/PreviewFeature.yaml | 84 +++++++++++++++++++ .../examples/preview_feature_basic.tf.tmpl | 10 +++ .../resource_compute_preview_features_test.go | 55 ++++++++++++ 3 files changed, 149 insertions(+) create mode 100644 mmv1/products/compute/PreviewFeature.yaml create mode 100644 mmv1/templates/terraform/examples/preview_feature_basic.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/compute/resource_compute_preview_features_test.go diff --git a/mmv1/products/compute/PreviewFeature.yaml b/mmv1/products/compute/PreviewFeature.yaml new file mode 100644 index 000000000000..45967adc14bd --- /dev/null +++ b/mmv1/products/compute/PreviewFeature.yaml @@ -0,0 +1,84 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +# API resource name +name: 'PreviewFeature' +kind: 'compute#PreviewFeature' +description: | + Represents a single Google Compute Engine preview feature such as Alpha API access, which can be enabled or disabled for a project. +min_version: 'beta' +references: + guides: + 'Use the Compute Engine alpha API': 'https://cloud.google.com/compute/docs/reference/rest/alpha' + api: 'https://cloud.google.com/compute/docs/reference/rest/beta/PreviewFeatures' +docs: +base_url: 'projects/{{project}}/global/previewFeatures' +has_self_link: false + +create_url: 'projects/{{project}}/global/previewFeatures/{{name}}' +create_verb: 'PATCH' +update_url: 'projects/{{project}}/global/previewFeatures/{{name}}' +update_verb: 'PATCH' +update_mask: true +exclude_delete: true +timeouts: + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 + +autogen_async: true +async: + actions: ['create', 'delete', 'update'] + type: 'OpAsync' + operation: + base_url: '{{op_id}}' + result: + resource_inside_response: false + +examples: + - name: 'preview_feature_basic' + primary_resource_id: 'gce_preview_feature' + +parameters: + - name: 'name' + type: String + required: true + immutable: true + url_param_only: true + description: | + The name of the preview feature. + +properties: + - name: 'activationStatus' + type: Enum + description: 'The activation status of the preview feature.' + required: true + enum_values: + - 'ENABLED' + - 'DISABLED' + - name: 'rolloutOperation' + type: NestedObject + description: 'The rollout operation of the feature.' + ignore_read: true + properties: + - name: 'rolloutInput' + type: NestedObject + description: 'The input for the rollout operation.' + properties: + - name: 'predefinedRolloutPlan' + type: Enum + description: 'Predefined rollout plans.' + required: true + enum_values: + - 'ROLLOUT_PLAN_FAST_ROLLOUT' diff --git a/mmv1/templates/terraform/examples/preview_feature_basic.tf.tmpl b/mmv1/templates/terraform/examples/preview_feature_basic.tf.tmpl new file mode 100644 index 000000000000..3ff908fcddeb --- /dev/null +++ b/mmv1/templates/terraform/examples/preview_feature_basic.tf.tmpl @@ -0,0 +1,10 @@ +resource "google_compute_preview_feature" "{{ .PrimaryResourceId }}" { + provider = google-beta + name = "alpha-api-access" + activation_status = "DISABLED" + rollout_operation { + rollout_input { + predefined_rollout_plan = "ROLLOUT_PLAN_FAST_ROLLOUT" + } + } +} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_preview_features_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_preview_features_test.go new file mode 100644 index 000000000000..a2e6cc2a26f9 --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/resource_compute_preview_features_test.go @@ -0,0 +1,55 @@ +package compute_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccComputePreviewFeature_update(t *testing.T) { + t.Parallel() + + // The specific feature name to test. + featureName := "alpha-api-access" + // The resource name in Terraform state. + resourceName := "google_compute_preview_feature.acceptance" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + // Step 1: Disable the "alpha-api-access" feature and verify its attributes. + { + Config: testAccComputePreviewFeature_disable(featureName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "name", featureName), + resource.TestCheckResourceAttr(resourceName, "activation_status", "DISABLED"), + ), + }, + // Step 2: Verify that the resource can be successfully imported. + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"rollout_operation"}, + }, + }, + }) +} + +func testAccComputePreviewFeature_disable(name string) string { + return fmt.Sprintf(` +resource "google_compute_preview_feature" "acceptance" { + name = "%s" + activation_status = "DISABLED" + + rollout_operation { + rollout_input { + predefined_rollout_plan = "ROLLOUT_PLAN_FAST_ROLLOUT" + } + } +} +`, name) +} From 06e819a3f6f827bd5c914aa6df3bb3f27cfbcccf Mon Sep 17 00:00:00 2001 From: Eliza Huang Date: Wed, 16 Jul 2025 16:07:58 -0700 Subject: [PATCH 555/884] Add Endpoint for Vertex Model Garden OSS Gen AI model deployment (#14416) Co-authored-by: Francis O'Hara Aidoo Co-authored-by: francis-ohara Co-authored-by: aamorifreeman --- .../EndpointWithModelGardenDeployment.yaml | 992 ++++++++++++++++++ .../examples/vertex_ai_deploy.tf.tmpl | 9 + ...point_with_model_garden_deployment.go.tmpl | 59 ++ .../resource_vertex_ai_deploy_test.go | 80 ++ 4 files changed, 1140 insertions(+) create mode 100644 mmv1/products/vertexai/EndpointWithModelGardenDeployment.yaml create mode 100644 mmv1/templates/terraform/examples/vertex_ai_deploy.tf.tmpl create mode 100644 mmv1/templates/terraform/post_create/resource_vertexai_endpoint_with_model_garden_deployment.go.tmpl create mode 100644 mmv1/third_party/terraform/services/vertexai/resource_vertex_ai_deploy_test.go diff --git a/mmv1/products/vertexai/EndpointWithModelGardenDeployment.yaml b/mmv1/products/vertexai/EndpointWithModelGardenDeployment.yaml new file mode 100644 index 000000000000..7ba7d6581224 --- /dev/null +++ b/mmv1/products/vertexai/EndpointWithModelGardenDeployment.yaml @@ -0,0 +1,992 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: EndpointWithModelGardenDeployment +api_resource_type_kind: Endpoint +description: | + Create an Endpoint and deploy a Model Garden model to it. + + ~> **Note:** This resource does not currently support deletion via Terraform and must be manually deleted if not in use. + See https://cloud.google.com/vertex-ai/docs/predictions/undeploy-model for instructions on how to undeploy a model and delete an endpoint + via the Google Cloud console. +references: + guides: + "Use models in Model Garden": "https://cloud.google.com/vertex-ai/generative-ai/docs/model-garden/use-models" + "Overview of Model Garden": "https://cloud.google.com/vertex-ai/generative-ai/docs/model-garden/explore-models" + "Overview of self-deployed models": "https://cloud.google.com/vertex-ai/generative-ai/docs/model-garden/self-deployed-models" + api: "https://cloud.google.com/vertex-ai/docs/reference/rest/v1/projects.locations/deploy" +docs: +id_format: "projects/{{project}}/locations/{{location}}/endpoints/{{endpoint}}" +base_url: "projects/{{project}}/locations/{{location}}:deploy" +self_link: "projects/{{project}}/locations/{{location}}/endpoints/{{endpoint}}" +create_url: "projects/{{project}}/locations/{{location}}:deploy" +immutable: true +exclude_read: true +exclude_delete: true # the resource does not yet support deletion +exclude_import: true # the resource does not support import +timeouts: + insert_minutes: 180 +autogen_status: RW5kcG9pbnRXaXRoTW9kZWxHYXJkZW5EZXBsb3ltZW50 +async: + actions: ["create"] + type: "OpAsync" + operation: + timeouts: + insert_minutes: 180 + base_url: "{{op_id}}" + result: + resource_inside_response: true +custom_code: + post_create: "templates/terraform/post_create/resource_vertexai_endpoint_with_model_garden_deployment.go.tmpl" +examples: + - name: "vertex_ai_deploy" + primary_resource_id: "deploy" + vars: + project: "vertex-ai" + publisher_model_name: "publisher_model_name" + ignore_read_extra: + - "project" + exclude_test: true +parameters: + - name: location + type: String + description: + Resource ID segment making up resource `location`. It identifies the + resource within its parent collection as described in https://google.aip.dev/122. + immutable: true + url_param_only: true + required: true +properties: + - name: endpoint + type: String + description: + Resource ID segment making up resource `endpoint`. It identifies the + resource within its parent collection as described in https://google.aip.dev/122. + url_param_only: true + output: true + - name: publisherModelName + type: String + description: |- + The Model Garden model to deploy. + Format: + `publishers/{publisher}/models/{publisher_model}@{version_id}`, or + `publishers/hf-{hugging-face-author}/models/{hugging-face-model-name}@001`. + exactly_one_of: + - "publisher_model_name" + - "hugging_face_model_id" + - name: huggingFaceModelId + type: String + description: |- + The Hugging Face model to deploy. + Format: Hugging Face model ID like `google/gemma-2-2b-it`. + exactly_one_of: + - "publisher_model_name" + - "hugging_face_model_id" + - name: modelConfig + type: NestedObject + description: The model config to use for the deployment. + properties: + - name: huggingFaceCacheEnabled + type: Boolean + description: |- + If true, the model will deploy with a cached version instead of directly + downloading the model artifacts from Hugging Face. This is suitable for + VPC-SC users with limited internet access. + - name: modelDisplayName + type: String + description: |- + The user-specified display name of the uploaded model. If not + set, a default name will be used. + - name: containerSpec + type: NestedObject + description: |- + Specification of a container for serving predictions. Some fields in this + message correspond to fields in the [Kubernetes Container v1 core + specification](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#container-v1-core). + properties: + - name: ports + type: Array + description: |- + List of ports to expose from the container. Vertex AI sends any + prediction requests that it receives to the first port on this list. Vertex + AI also sends + [liveness and health + checks](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#liveness) + to this port. + + If you do not specify this field, it defaults to following value: + + ```json + [ + { + "containerPort": 8080 + } + ] + ``` + + Vertex AI does not use ports other than the first one listed. This field + corresponds to the `ports` field of the Kubernetes Containers + [v1 core + API](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#container-v1-core). + immutable: true + item_type: + type: NestedObject + properties: + - name: containerPort + type: Integer + description: |- + The number of the port to expose on the pod's IP address. + Must be a valid port number, between 1 and 65535 inclusive. + - name: predictRoute + type: String + description: |- + HTTP path on the container to send prediction requests to. Vertex AI + forwards requests sent using + projects.locations.endpoints.predict to this + path on the container's IP address and port. Vertex AI then returns the + container's response in the API response. + + For example, if you set this field to `/foo`, then when Vertex AI + receives a prediction request, it forwards the request body in a POST + request to the `/foo` path on the port of your container specified by the + first value of this `ModelContainerSpec`'s + ports field. + + If you don't specify this field, it defaults to the following value when + you deploy this Model to an Endpoint:/v1/endpoints/ENDPOINT/deployedModels/DEPLOYED_MODEL:predict + The placeholders in this value are replaced as follows: + + * ENDPOINT: The last segment (following `endpoints/`)of the + Endpoint.name][] field of the Endpoint where this Model has been + deployed. (Vertex AI makes this value available to your container code + as the [`AIP_ENDPOINT_ID` environment + variable](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#aip-variables).) + + * DEPLOYED_MODEL: DeployedModel.id of the `DeployedModel`. + (Vertex AI makes this value available to your container code + as the [`AIP_DEPLOYED_MODEL_ID` environment + variable](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#aip-variables).) + immutable: true + - name: healthRoute + type: String + description: |- + HTTP path on the container to send health checks to. Vertex AI + intermittently sends GET requests to this path on the container's IP + address and port to check that the container is healthy. Read more about + [health + checks](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#health). + + For example, if you set this field to `/bar`, then Vertex AI + intermittently sends a GET request to the `/bar` path on the port of your + container specified by the first value of this `ModelContainerSpec`'s + ports field. + + If you don't specify this field, it defaults to the following value when + you deploy this Model to an Endpoint:/v1/endpoints/ENDPOINT/deployedModels/DEPLOYED_MODEL:predict + The placeholders in this value are replaced as follows: + + * ENDPOINT: The last segment (following `endpoints/`)of the + Endpoint.name][] field of the Endpoint where this Model has been + deployed. (Vertex AI makes this value available to your container code + as the [`AIP_ENDPOINT_ID` environment + variable](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#aip-variables).) + + * DEPLOYED_MODEL: DeployedModel.id of the `DeployedModel`. + (Vertex AI makes this value available to your container code as the + [`AIP_DEPLOYED_MODEL_ID` environment + variable](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#aip-variables).) + immutable: true + - name: deploymentTimeout + type: String + description: |- + Deployment timeout. + Limit for deployment timeout is 2 hours. + immutable: true + - name: startupProbe + type: NestedObject + description: |- + Probe describes a health check to be performed against a container to + determine whether it is alive or ready to receive traffic. + properties: + - name: exec + type: NestedObject + description: ExecAction specifies a command to execute. + properties: + - name: command + type: Array + description: |- + Command is the command line to execute inside the container, the working + directory for the command is root ('/') in the container's filesystem. + The command is simply exec'd, it is not run inside a shell, so + traditional shell instructions ('|', etc) won't work. To use a shell, you + need to explicitly call out to that shell. Exit status of 0 is treated as + live/healthy and non-zero is unhealthy. + item_type: + type: String + - name: httpGet + type: NestedObject + description: HttpGetAction describes an action based on HTTP Get requests. + properties: + - name: path + type: String + description: Path to access on the HTTP server. + - name: port + type: Integer + description: |- + Number of the port to access on the container. + Number must be in the range 1 to 65535. + - name: host + type: String + description: |- + Host name to connect to, defaults to the model serving container's IP. + You probably want to set "Host" in httpHeaders instead. + - name: scheme + type: String + description: |- + Scheme to use for connecting to the host. + Defaults to HTTP. Acceptable values are "HTTP" or "HTTPS". + - name: httpHeaders + type: Array + description: + Custom headers to set in the request. HTTP allows repeated + headers. + item_type: + type: NestedObject + properties: + - name: value + type: String + description: The header field value + - name: name + type: String + description: |- + The header field name. + This will be canonicalized upon output, so case-variant names will be + understood as the same header. + - name: grpc + type: NestedObject + description: GrpcAction checks the health of a container using a gRPC service. + properties: + - name: port + type: Integer + description: + Port number of the gRPC service. Number must be in the range + 1 to 65535. + - name: service + type: String + description: |- + Service is the name of the service to place in the gRPC + HealthCheckRequest. See + https://github.com/grpc/grpc/blob/master/doc/health-checking.md. + + If this is not specified, the default behavior is defined by gRPC. + - name: tcpSocket + type: NestedObject + description: |- + TcpSocketAction probes the health of a container by opening a TCP socket + connection. + properties: + - name: port + type: Integer + description: |- + Number of the port to access on the container. + Number must be in the range 1 to 65535. + - name: host + type: String + description: |- + Optional: Host name to connect to, defaults to the model serving + container's IP. + - name: timeoutSeconds + type: Integer + description: |- + Number of seconds after which the probe times out. Defaults to 1 second. + Minimum value is 1. Must be greater or equal to period_seconds. + + Maps to Kubernetes probe argument 'timeoutSeconds'. + - name: successThreshold + type: Integer + description: |- + Number of consecutive successes before the probe is considered successful. + Defaults to 1. Minimum value is 1. + + Maps to Kubernetes probe argument 'successThreshold'. + - name: initialDelaySeconds + type: Integer + description: |- + Number of seconds to wait before starting the probe. Defaults to 0. + Minimum value is 0. + + Maps to Kubernetes probe argument 'initialDelaySeconds'. + - name: periodSeconds + type: Integer + description: |- + How often (in seconds) to perform the probe. Default to 10 seconds. + Minimum value is 1. Must be less than timeout_seconds. + + Maps to Kubernetes probe argument 'periodSeconds'. + - name: failureThreshold + type: Integer + description: |- + Number of consecutive failures before the probe is considered failed. + Defaults to 3. Minimum value is 1. + + Maps to Kubernetes probe argument 'failureThreshold'. + - name: healthProbe + type: NestedObject + description: |- + Probe describes a health check to be performed against a container to + determine whether it is alive or ready to receive traffic. + properties: + - name: exec + type: NestedObject + description: ExecAction specifies a command to execute. + properties: + - name: command + type: Array + description: |- + Command is the command line to execute inside the container, the working + directory for the command is root ('/') in the container's filesystem. + The command is simply exec'd, it is not run inside a shell, so + traditional shell instructions ('|', etc) won't work. To use a shell, you + need to explicitly call out to that shell. Exit status of 0 is treated as + live/healthy and non-zero is unhealthy. + item_type: + type: String + - name: httpGet + type: NestedObject + description: HttpGetAction describes an action based on HTTP Get requests. + properties: + - name: path + type: String + description: Path to access on the HTTP server. + - name: port + type: Integer + description: |- + Number of the port to access on the container. + Number must be in the range 1 to 65535. + - name: host + type: String + description: |- + Host name to connect to, defaults to the model serving container's IP. + You probably want to set "Host" in httpHeaders instead. + - name: scheme + type: String + description: |- + Scheme to use for connecting to the host. + Defaults to HTTP. Acceptable values are "HTTP" or "HTTPS". + - name: httpHeaders + type: Array + description: + Custom headers to set in the request. HTTP allows repeated + headers. + item_type: + type: NestedObject + properties: + - name: name + type: String + description: |- + The header field name. + This will be canonicalized upon output, so case-variant names will be + understood as the same header. + - name: value + type: String + description: The header field value + - name: grpc + type: NestedObject + description: GrpcAction checks the health of a container using a gRPC service. + properties: + - name: port + type: Integer + description: + Port number of the gRPC service. Number must be in the range + 1 to 65535. + - name: service + type: String + description: |- + Service is the name of the service to place in the gRPC + HealthCheckRequest. See + https://github.com/grpc/grpc/blob/master/doc/health-checking.md. + + If this is not specified, the default behavior is defined by gRPC. + - name: tcpSocket + type: NestedObject + description: |- + TcpSocketAction probes the health of a container by opening a TCP socket + connection. + properties: + - name: port + type: Integer + description: |- + Number of the port to access on the container. + Number must be in the range 1 to 65535. + - name: host + type: String + description: |- + Optional: Host name to connect to, defaults to the model serving + container's IP. + - name: timeoutSeconds + type: Integer + description: |- + Number of seconds after which the probe times out. Defaults to 1 second. + Minimum value is 1. Must be greater or equal to period_seconds. + + Maps to Kubernetes probe argument 'timeoutSeconds'. + - name: successThreshold + type: Integer + description: |- + Number of consecutive successes before the probe is considered successful. + Defaults to 1. Minimum value is 1. + + Maps to Kubernetes probe argument 'successThreshold'. + - name: initialDelaySeconds + type: Integer + description: |- + Number of seconds to wait before starting the probe. Defaults to 0. + Minimum value is 0. + + Maps to Kubernetes probe argument 'initialDelaySeconds'. + - name: periodSeconds + type: Integer + description: |- + How often (in seconds) to perform the probe. Default to 10 seconds. + Minimum value is 1. Must be less than timeout_seconds. + + Maps to Kubernetes probe argument 'periodSeconds'. + - name: failureThreshold + type: Integer + description: |- + Number of consecutive failures before the probe is considered failed. + Defaults to 3. Minimum value is 1. + + Maps to Kubernetes probe argument 'failureThreshold'. + - name: imageUri + type: String + description: |- + URI of the Docker image to be used as the custom container for serving + predictions. This URI must identify an image in Artifact Registry or + Container Registry. Learn more about the [container publishing + requirements](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#publishing), + including permissions requirements for the Vertex AI Service Agent. + + The container image is ingested upon ModelService.UploadModel, stored + internally, and this original path is afterwards not used. + + To learn about the requirements for the Docker image itself, see + [Custom container + requirements](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#). + + You can use the URI to one of Vertex AI's [pre-built container images for + prediction](https://cloud.google.com/vertex-ai/docs/predictions/pre-built-containers) + in this field. + immutable: true + required: true + - name: command + type: Array + description: |- + Specifies the command that runs when the container starts. This overrides + the container's + [ENTRYPOINT](https://docs.docker.com/engine/reference/builder/#entrypoint). + Specify this field as an array of executable and arguments, similar to a + Docker `ENTRYPOINT`'s "exec" form, not its "shell" form. + + If you do not specify this field, then the container's `ENTRYPOINT` runs, + in conjunction with the args field or the + container's [`CMD`](https://docs.docker.com/engine/reference/builder/#cmd), + if either exists. If this field is not specified and the container does not + have an `ENTRYPOINT`, then refer to the Docker documentation about [how + `CMD` and `ENTRYPOINT` + interact](https://docs.docker.com/engine/reference/builder/#understand-how-cmd-and-entrypoint-interact). + + If you specify this field, then you can also specify the `args` field to + provide additional arguments for this command. However, if you specify this + field, then the container's `CMD` is ignored. See the + [Kubernetes documentation about how the + `command` and `args` fields interact with a container's `ENTRYPOINT` and + `CMD`](https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#notes). + + In this field, you can reference [environment variables set by Vertex + AI](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#aip-variables) + and environment variables set in the env field. + You cannot reference environment variables set in the Docker image. In + order for environment variables to be expanded, reference them by using the + following syntax:$(VARIABLE_NAME) + Note that this differs from Bash variable expansion, which does not use + parentheses. If a variable cannot be resolved, the reference in the input + string is used unchanged. To avoid variable expansion, you can escape this + syntax with `$$`; for example:$$(VARIABLE_NAME) + This field corresponds to the `command` field of the Kubernetes Containers + [v1 core + API](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#container-v1-core). + immutable: true + item_type: + type: String + - name: args + type: Array + description: |- + Specifies arguments for the command that runs when the container starts. + This overrides the container's + [`CMD`](https://docs.docker.com/engine/reference/builder/#cmd). Specify + this field as an array of executable and arguments, similar to a Docker + `CMD`'s "default parameters" form. + + If you don't specify this field but do specify the + command field, then the command from the + `command` field runs without any additional arguments. See the + [Kubernetes documentation about how the + `command` and `args` fields interact with a container's `ENTRYPOINT` and + `CMD`](https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#notes). + + If you don't specify this field and don't specify the `command` field, + then the container's + [`ENTRYPOINT`](https://docs.docker.com/engine/reference/builder/#cmd) and + `CMD` determine what runs based on their default behavior. See the Docker + documentation about [how `CMD` and `ENTRYPOINT` + interact](https://docs.docker.com/engine/reference/builder/#understand-how-cmd-and-entrypoint-interact). + + In this field, you can reference [environment variables + set by Vertex + AI](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#aip-variables) + and environment variables set in the env field. + You cannot reference environment variables set in the Docker image. In + order for environment variables to be expanded, reference them by using the + following syntax:$(VARIABLE_NAME) + Note that this differs from Bash variable expansion, which does not use + parentheses. If a variable cannot be resolved, the reference in the input + string is used unchanged. To avoid variable expansion, you can escape this + syntax with `$$`; for example:$$(VARIABLE_NAME) + This field corresponds to the `args` field of the Kubernetes Containers + [v1 core + API](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#container-v1-core). + immutable: true + item_type: + type: String + - name: grpcPorts + type: Array + description: |- + List of ports to expose from the container. Vertex AI sends gRPC + prediction requests that it receives to the first port on this list. Vertex + AI also sends liveness and health checks to this port. + + If you do not specify this field, gRPC requests to the container will be + disabled. + + Vertex AI does not use ports other than the first one listed. This field + corresponds to the `ports` field of the Kubernetes Containers v1 core API. + immutable: true + item_type: + type: NestedObject + properties: + - name: containerPort + type: Integer + description: |- + The number of the port to expose on the pod's IP address. + Must be a valid port number, between 1 and 65535 inclusive. + - name: sharedMemorySizeMb + type: String + description: |- + The amount of the VM memory to reserve as the shared memory for the model + in megabytes. + immutable: true + - name: livenessProbe + type: NestedObject + description: |- + Probe describes a health check to be performed against a container to + determine whether it is alive or ready to receive traffic. + properties: + - name: exec + type: NestedObject + description: ExecAction specifies a command to execute. + properties: + - name: command + type: Array + description: |- + Command is the command line to execute inside the container, the working + directory for the command is root ('/') in the container's filesystem. + The command is simply exec'd, it is not run inside a shell, so + traditional shell instructions ('|', etc) won't work. To use a shell, you + need to explicitly call out to that shell. Exit status of 0 is treated as + live/healthy and non-zero is unhealthy. + item_type: + type: String + - name: httpGet + type: NestedObject + description: HttpGetAction describes an action based on HTTP Get requests. + properties: + - name: path + type: String + description: Path to access on the HTTP server. + - name: port + type: Integer + description: |- + Number of the port to access on the container. + Number must be in the range 1 to 65535. + - name: host + type: String + description: |- + Host name to connect to, defaults to the model serving container's IP. + You probably want to set "Host" in httpHeaders instead. + - name: scheme + type: String + description: |- + Scheme to use for connecting to the host. + Defaults to HTTP. Acceptable values are "HTTP" or "HTTPS". + - name: httpHeaders + type: Array + description: + Custom headers to set in the request. HTTP allows repeated + headers. + item_type: + type: NestedObject + properties: + - name: name + type: String + description: |- + The header field name. + This will be canonicalized upon output, so case-variant names will be + understood as the same header. + - name: value + type: String + description: The header field value + - name: grpc + type: NestedObject + description: GrpcAction checks the health of a container using a gRPC service. + properties: + - name: service + type: String + description: |- + Service is the name of the service to place in the gRPC + HealthCheckRequest. See + https://github.com/grpc/grpc/blob/master/doc/health-checking.md. + + If this is not specified, the default behavior is defined by gRPC. + - name: port + type: Integer + description: + Port number of the gRPC service. Number must be in the range + 1 to 65535. + - name: tcpSocket + type: NestedObject + description: |- + TcpSocketAction probes the health of a container by opening a TCP socket + connection. + properties: + - name: port + type: Integer + description: |- + Number of the port to access on the container. + Number must be in the range 1 to 65535. + - name: host + type: String + description: |- + Optional: Host name to connect to, defaults to the model serving + container's IP. + - name: timeoutSeconds + type: Integer + description: |- + Number of seconds after which the probe times out. Defaults to 1 second. + Minimum value is 1. Must be greater or equal to period_seconds. + + Maps to Kubernetes probe argument 'timeoutSeconds'. + - name: successThreshold + type: Integer + description: |- + Number of consecutive successes before the probe is considered successful. + Defaults to 1. Minimum value is 1. + + Maps to Kubernetes probe argument 'successThreshold'. + - name: initialDelaySeconds + type: Integer + description: |- + Number of seconds to wait before starting the probe. Defaults to 0. + Minimum value is 0. + + Maps to Kubernetes probe argument 'initialDelaySeconds'. + - name: periodSeconds + type: Integer + description: |- + How often (in seconds) to perform the probe. Default to 10 seconds. + Minimum value is 1. Must be less than timeout_seconds. + + Maps to Kubernetes probe argument 'periodSeconds'. + - name: failureThreshold + type: Integer + description: |- + Number of consecutive failures before the probe is considered failed. + Defaults to 3. Minimum value is 1. + + Maps to Kubernetes probe argument 'failureThreshold'. + - name: env + type: Array + description: |- + List of environment variables to set in the container. After the container + starts running, code running in the container can read these environment + variables. + + Additionally, the command and + args fields can reference these variables. Later + entries in this list can also reference earlier entries. For example, the + following example sets the variable `VAR_2` to have the value `foo bar`: + + ```json + [ + { + "name": "VAR_1", + "value": "foo" + }, + { + "name": "VAR_2", + "value": "$(VAR_1) bar" + } + ] + ``` + + If you switch the order of the variables in the example, then the expansion + does not occur. + + This field corresponds to the `env` field of the Kubernetes Containers + [v1 core + API](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#container-v1-core). + immutable: true + item_type: + type: NestedObject + properties: + - name: name + type: String + description: Name of the environment variable. Must be a valid C identifier. + required: true + - name: value + type: String + description: |- + Variables that reference a $(VAR_NAME) are expanded + using the previous defined environment variables in the container and + any service environment variables. If a variable cannot be resolved, + the reference in the input string will be unchanged. The $(VAR_NAME) + syntax can be escaped with a double $$, ie: $$(VAR_NAME). Escaped + references will never be expanded, regardless of whether the variable + exists or not. + required: true + - name: acceptEula + type: Boolean + description: |- + Whether the user accepts the End User License Agreement (EULA) + for the model. + - name: huggingFaceAccessToken + type: String + description: |- + The Hugging Face read access token used to access the model + artifacts of gated models. + - name: endpointConfig + type: NestedObject + description: The endpoint config to use for the deployment. + properties: + - name: endpointDisplayName + type: String + description: |- + The user-specified display name of the endpoint. If not set, a + default name will be used. + - name: dedicatedEndpointEnabled + type: Boolean + description: |- + If true, the endpoint will be exposed through a dedicated + DNS [Endpoint.dedicated_endpoint_dns]. Your request to the dedicated DNS + will be isolated from other users' traffic and will have better + performance and reliability. Note: Once you enabled dedicated endpoint, + you won't be able to send request to the shared DNS + {region}-aiplatform.googleapis.com. The limitations will be removed soon. + - name: deployConfig + type: NestedObject + description: The deploy config to use for the deployment. + properties: + - name: systemLabels + type: KeyValuePairs + description: |- + System labels for Model Garden deployments. + These labels are managed by Google and for tracking purposes only. + - name: dedicatedResources + type: NestedObject + description: |- + A description of resources that are dedicated to a DeployedModel or + DeployedIndex, and that need a higher degree of manual configuration. + properties: + - name: machineSpec + type: NestedObject + description: Specification of a single machine. + required: true + properties: + - name: reservationAffinity + type: NestedObject + description: |- + A ReservationAffinity can be used to configure a Vertex AI resource (e.g., a + DeployedModel) to draw its Compute Engine resources from a Shared + Reservation, or exclusively from on-demand capacity. + properties: + - name: reservationAffinityType + type: String + description: |- + Specifies the reservation affinity type. + Possible values: + TYPE_UNSPECIFIED + NO_RESERVATION + ANY_RESERVATION + SPECIFIC_RESERVATION + required: true + - name: key + type: String + description: |- + Corresponds to the label key of a reservation resource. To target a + SPECIFIC_RESERVATION by name, use `compute.googleapis.com/reservation-name` + as the key and specify the name of your reservation as its value. + - name: values + type: Array + description: |- + Corresponds to the label values of a reservation resource. This must be the + full resource name of the reservation or reservation block. + item_type: + type: String + - name: machineType + type: String + description: |- + The type of the machine. + + See the [list of machine types supported for + prediction](https://cloud.google.com/vertex-ai/docs/predictions/configure-compute#machine-types) + + See the [list of machine types supported for custom + training](https://cloud.google.com/vertex-ai/docs/training/configure-compute#machine-types). + + For DeployedModel this field is optional, and the default + value is `n1-standard-2`. For BatchPredictionJob or as part of + WorkerPoolSpec this field is required. + immutable: true + - name: acceleratorType + type: String + description: |2- + + Possible values: + ACCELERATOR_TYPE_UNSPECIFIED + NVIDIA_TESLA_K80 + NVIDIA_TESLA_P100 + NVIDIA_TESLA_V100 + NVIDIA_TESLA_P4 + NVIDIA_TESLA_T4 + NVIDIA_TESLA_A100 + NVIDIA_A100_80GB + NVIDIA_L4 + NVIDIA_H100_80GB + NVIDIA_H100_MEGA_80GB + NVIDIA_H200_141GB + NVIDIA_B200 + TPU_V2 + TPU_V3 + TPU_V4_POD + TPU_V5_LITEPOD + - name: acceleratorCount + type: Integer + description: The number of accelerators to attach to the machine. + - name: tpuTopology + type: String + description: |- + The topology of the TPUs. Corresponds to the TPU topologies available from + GKE. (Example: tpu_topology: "2x2x1"). + immutable: true + - name: multihostGpuNodeCount + type: Integer + description: The number of nodes per replica for multihost GPU deployments. + immutable: true + - name: minReplicaCount + type: Integer + description: |- + The minimum number of machine replicas that will be always deployed on. + This value must be greater than or equal to 1. + + If traffic increases, it may dynamically be deployed onto more replicas, + and as traffic decreases, some of these extra replicas may be freed. + immutable: true + required: true + - name: maxReplicaCount + type: Integer + description: |- + The maximum number of replicas that may be deployed on when the traffic + against it increases. If the requested value is too large, the deployment + will error, but if deployment succeeds then the ability to scale to that + many replicas is guaranteed (barring service outages). If traffic increases + beyond what its replicas at maximum may handle, a portion of the traffic + will be dropped. If this value is not provided, will use + min_replica_count as the default value. + + The value of this field impacts the charge against Vertex CPU and GPU + quotas. Specifically, you will be charged for (max_replica_count * + number of cores in the selected machine type) and (max_replica_count * + number of GPUs per replica in the selected machine type). + immutable: true + - name: requiredReplicaCount + type: Integer + description: |- + Number of required available replicas for the deployment to succeed. + This field is only needed when partial deployment/mutation is + desired. If set, the deploy/mutate operation will succeed once + available_replica_count reaches required_replica_count, and the rest of + the replicas will be retried. If not set, the default + required_replica_count will be min_replica_count. + - name: autoscalingMetricSpecs + type: Array + description: |- + The metric specifications that overrides a resource + utilization metric (CPU utilization, accelerator's duty cycle, and so on) + target value (default to 60 if not set). At most one entry is allowed per + metric. + + If machine_spec.accelerator_count is + above 0, the autoscaling will be based on both CPU utilization and + accelerator's duty cycle metrics and scale up when either metrics exceeds + its target value while scale down if both metrics are under their target + value. The default target value is 60 for both metrics. + + If machine_spec.accelerator_count is + 0, the autoscaling will be based on CPU utilization metric only with + default target value 60 if not explicitly set. + + For example, in the case of Online Prediction, if you want to override + target CPU utilization to 80, you should set + autoscaling_metric_specs.metric_name + to `aiplatform.googleapis.com/prediction/online/cpu/utilization` and + autoscaling_metric_specs.target to `80`. + immutable: true + item_type: + type: NestedObject + properties: + - name: metricName + type: String + description: |- + The resource metric name. + Supported metrics: + + * For Online Prediction: + * `aiplatform.googleapis.com/prediction/online/accelerator/duty_cycle` + * `aiplatform.googleapis.com/prediction/online/cpu/utilization` + required: true + - name: target + type: Integer + description: |- + The target resource utilization in percentage (1% - 100%) for the given + metric; once the real usage deviates from the target by a certain + percentage, the machine replicas change. The default value is 60 + (representing 60%) if not provided. + - name: spot + type: Boolean + description: |- + If true, schedule the deployment workload on [spot + VMs](https://cloud.google.com/kubernetes-engine/docs/concepts/spot-vms). + - name: fastTryoutEnabled + type: Boolean + description: If true, enable the QMT fast tryout feature for this model if possible. diff --git a/mmv1/templates/terraform/examples/vertex_ai_deploy.tf.tmpl b/mmv1/templates/terraform/examples/vertex_ai_deploy.tf.tmpl new file mode 100644 index 000000000000..3f778965209f --- /dev/null +++ b/mmv1/templates/terraform/examples/vertex_ai_deploy.tf.tmpl @@ -0,0 +1,9 @@ +resource "google_vertex_ai_endpoint_with_model_garden_deployment" "{{$.PrimaryResourceId}}" { + publisher_model_name = "publishers/google/models/paligemma@paligemma-224-float32" + location = "us-central1" + model_config { + accept_eula = true + } +} + +data "google_project" "project" {} diff --git a/mmv1/templates/terraform/post_create/resource_vertexai_endpoint_with_model_garden_deployment.go.tmpl b/mmv1/templates/terraform/post_create/resource_vertexai_endpoint_with_model_garden_deployment.go.tmpl new file mode 100644 index 000000000000..d7d88c74ed80 --- /dev/null +++ b/mmv1/templates/terraform/post_create/resource_vertexai_endpoint_with_model_garden_deployment.go.tmpl @@ -0,0 +1,59 @@ +log.Printf("[DEBUG] Beginning post_create for Vertex AI Endpoint with Model Garden Deployment") + +// Log Terraform resource data +log.Printf("[DEBUG] Terraform Resource Data (d): ID=%s", d.Id()) +for key, val := range d.State().Attributes { + log.Printf("[DEBUG] d.State().Attributes[%s] = %s", key, val) +} +log.Printf("[DEBUG] d.Get(\"project\") = %v", d.Get("project")) +log.Printf("[DEBUG] d.Get(\"location\") = %v", d.Get("location")) +log.Printf("[DEBUG] d.Get(\"publisher_model_name\") = %v", d.Get("publisher_model_name")) + +// Log res structure +log.Printf("[DEBUG] Top-level keys in res:") +for k := range res { + log.Printf("[DEBUG] - %s", k) +} + +// Declare and populate opRes +var opRes map[string]interface{} +err = VertexAIOperationWaitTimeWithResponse( + config, res, &opRes, d.Get("project").(string), "Vertex AI deployModel operation", userAgent, + d.Timeout(schema.TimeoutCreate), +) +if err != nil { + d.SetId("") + return fmt.Errorf("Error waiting for deploy operation: %s", err) +} + +// Log keys in opRes +log.Printf("[DEBUG] opRes successfully retrieved. Keys:") +for k := range opRes { + log.Printf("[DEBUG] - %s", k) +} + +// Extract full endpoint resource name +endpointFull, ok := opRes["endpoint"].(string) +if !ok || endpointFull == "" { + log.Printf("[ERROR] 'endpoint' not found or empty in opRes. Full opRes: %#v", opRes) + return fmt.Errorf("Create response didn't contain 'endpoint'. Create may not have succeeded.") +} +log.Printf("[DEBUG] Extracted full endpoint from opRes: %s", endpointFull) + +// Check format and extract endpoint name without strict project name match +parts := strings.Split(endpointFull, "/") +if len(parts) != 6 || parts[0] != "projects" || parts[2] != "locations" || parts[4] != "endpoints" { + log.Printf("[ERROR] Unexpected endpoint format. Got: %s", endpointFull) + return fmt.Errorf("unexpected format for endpoint: %s", endpointFull) +} +endpoint := parts[5] +log.Printf("[DEBUG] Parsed endpoint ID: %s", endpoint) + +// Set Terraform fields +if err := d.Set("endpoint", endpoint); err != nil { + return fmt.Errorf("Error setting endpoint: %s", err) +} +d.SetId(endpointFull) +log.Printf("[DEBUG] Set Terraform resource ID to: %s", endpointFull) + +return nil diff --git a/mmv1/third_party/terraform/services/vertexai/resource_vertex_ai_deploy_test.go b/mmv1/third_party/terraform/services/vertexai/resource_vertex_ai_deploy_test.go new file mode 100644 index 000000000000..c3b3505329bd --- /dev/null +++ b/mmv1/third_party/terraform/services/vertexai/resource_vertex_ai_deploy_test.go @@ -0,0 +1,80 @@ +package vertexai_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccVertexAIEndpointWithModelGardenDeployment_basic(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: nil, // the resource does not yet support deletion + Steps: []resource.TestStep{ + { + Config: testAccVertexAIEndpointWithModelGardenDeployment_basic(context), + }, + }, + }) +} + +func testAccVertexAIEndpointWithModelGardenDeployment_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_vertex_ai_endpoint_with_model_garden_deployment" "test" { + publisher_model_name = "publishers/google/models/paligemma@paligemma-224-float32" + location = "us-central1" + model_config { + accept_eula = true + } +} +`, context) +} + +func TestAccVertexAIEndpointWithModelGardenDeployment_withConfigs(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: nil, + Steps: []resource.TestStep{ + { + Config: testAccVertexAIEndpointWithModelGardenDeployment_withConfigs(context), + }, + }, + }) +} + +func testAccVertexAIEndpointWithModelGardenDeployment_withConfigs(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_vertex_ai_endpoint_with_model_garden_deployment" "test_with_configs" { + publisher_model_name = "publishers/google/models/paligemma@paligemma-224-float32" + location = "us-central1" + model_config { + accept_eula = true + } + deploy_config { + dedicated_resources { + machine_spec { + machine_type = "g2-standard-16" + accelerator_type = "NVIDIA_L4" + accelerator_count = 1 + } + min_replica_count = 1 + } + } +} +`, context) +} From 35f8b242628dc96a24654cb9f4b300e488b9b4ae Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Wed, 16 Jul 2025 16:21:40 -0700 Subject: [PATCH 556/884] Updated ignore_read docs (#14549) --- docs/content/reference/field.md | 17 ++--------------- 1 file changed, 2 insertions(+), 15 deletions(-) diff --git a/docs/content/reference/field.md b/docs/content/reference/field.md index 29428458cecd..1f24ec8abc06 100644 --- a/docs/content/reference/field.md +++ b/docs/content/reference/field.md @@ -130,13 +130,8 @@ on the user's configuration. If false or unset, the provider sets the field's value in the resource state based on the API response. Only use this attribute if the field cannot be read from GCP due to either API or provider constraints. -Nested fields currently -[do not support `ignore_read`](https://github.com/hashicorp/terraform-provider-google/issues/12410) -but can replicate the behavior by implementing a -[`custom_flatten`]({{< ref "/develop/custom-code#custom_flatten" >}}) -that always ignores the value returned by the API. [Example](https://github.com/GoogleCloudPlatform/magic-modules/blob/5923d4cb878396a04bed9beaf22a8478e8b1e6a5/mmv1/templates/terraform/custom_flatten/source_representation_instance_configuration_password.go.tmpl). -Any fields using a custom flatten also need to be added to `ignore_read_extra` -for any examples where the field is set. +`ignore_read` is current not supported inside arrays of nested objects. See [tpg#23630](https://github.com/hashicorp/terraform-provider-google/issues/23630) +for details and workarounds. Example: YAML @@ -144,14 +139,6 @@ Example: YAML ignore_read: true ``` -Example: Custom flatten - -```go -func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return d.Get("password") -} -``` - ### `default_value` Sets a client-side default value for the field. This should be used if the API has a default value that applies in all cases and is stable. Removing From 1e9e374c7397e234777c4b255dd2924531fc0daf Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Wed, 16 Jul 2025 17:06:56 -0700 Subject: [PATCH 557/884] tgc-revival: add google_compute_health_check (#14550) --- mmv1/products/compute/HealthCheck.yaml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/mmv1/products/compute/HealthCheck.yaml b/mmv1/products/compute/HealthCheck.yaml index 5c51747e8aa0..5e7092ec02c9 100644 --- a/mmv1/products/compute/HealthCheck.yaml +++ b/mmv1/products/compute/HealthCheck.yaml @@ -51,6 +51,7 @@ async: result: resource_inside_response: false collection_url_key: 'items' +include_in_tgc_next_DO_NOT_USE: true custom_code: constants: 'templates/terraform/constants/health_check.tmpl' encoder: 'templates/terraform/encoders/health_check_type.tmpl' @@ -113,11 +114,13 @@ examples: min_version: 'beta' vars: health_check_name: 'grpc-with-tls-health-check' + tgc_skip_test: 'grpcTlsHealthCheck is not in CAI asset, but is required in this test.' - name: 'health_check_grpc_with_tls_full' primary_resource_id: 'grpc-with-tls-health-check' min_version: 'beta' vars: health_check_name: 'grpc-with-tls-health-check' + tgc_skip_test: 'grpcTlsHealthCheck is not in CAI asset, but is required in this test.' - name: 'health_check_with_logging' primary_resource_id: 'health-check-with-logging' min_version: 'beta' @@ -202,6 +205,7 @@ properties: type: String min_size: 3 max_size: 3 + is_missing_in_cai: true - name: 'unhealthyThreshold' type: Integer description: | @@ -895,6 +899,7 @@ properties: - 'grpc_health_check' - 'grpc_tls_health_check' diff_suppress_func: 'portDiffSuppress' + is_missing_in_cai: true properties: - name: 'port' type: Integer From d72b533f650bc8a6b0581a791095c5b833dd8762 Mon Sep 17 00:00:00 2001 From: sachin purohit Date: Wed, 16 Jul 2025 17:29:19 -0700 Subject: [PATCH 558/884] Fixing hive partitioning diff (#14483) --- .../bigquery/resource_bigquery_table.go.tmpl | 71 ++++- .../bigquery/resource_bigquery_table_test.go | 270 ++++++++++++------ .../docs/r/bigquery_table.html.markdown | 2 + 3 files changed, 256 insertions(+), 87 deletions(-) diff --git a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl index 150ab98fa966..88721b0b5e40 100644 --- a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl +++ b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl @@ -1345,6 +1345,13 @@ func ResourceBigQueryTable() *schema.Resource { Description: `Whether Terraform will be prevented from destroying the instance. When the field is set to true or unset in Terraform state, a terraform apply or terraform destroy that would delete the table will fail. When the field is set to false, deleting the table is allowed.`, }, + "ignore_auto_generated_schema": { + Type: schema.TypeBool, + Optional: true, + Default: false, + Description: `Whether Terraform will prevent implicitly added columns in schema from showing diff.`, + }, + // TableConstraints: [Optional] Defines the primary key and foreign keys. "table_constraints": { Type: schema.TypeList, @@ -1594,6 +1601,49 @@ func ResourceBigQueryTable() *schema.Resource { } } +// filterLiveSchemaByConfig compares a live schema from the BQ API with a schema from +// the Terraform config. It returns a new schema containing only the fields +// that are defined in the config, effectively removing any columns that were +// auto-generated by the service (e.g., hive partitioning keys). +// +// Parameters: +// - liveSchema: The schema returned from a BigQuery API Read/Get call. This may contain extra columns. +// - configSchema: The schema built from the user's Terraform configuration (`d.Get("schema")`). This is the source of truth. +// +// Returns: +// +// A new *bigquery.TableSchema containing a filtered list of fields. +func filterLiveSchemaByConfig(liveSchema *bigquery.TableSchema, configSchema *bigquery.TableSchema) *bigquery.TableSchema { + if liveSchema == nil || configSchema == nil { + // If either schema is nil, there's nothing to compare, so return an empty schema. + return &bigquery.TableSchema{Fields: []*bigquery.TableFieldSchema{}} + } + + // 1. Create a lookup map of all column names defined in the configuration. + // This provides fast O(1) average time complexity for lookups. + configFieldsMap := make(map[string]bool) + for _, field := range configSchema.Fields { + configFieldsMap[field.Name] = true + } + + // 2. Iterate through the fields in the live schema and keep only the ones + // that exist in our configuration map. + var filteredFields []*bigquery.TableFieldSchema + for _, liveField := range liveSchema.Fields { + // If the live field's name is present in the map of configured fields... + if _, ok := configFieldsMap[liveField.Name]; ok { + // ...then it's a field we care about. Add it to our filtered list. + filteredFields = append(filteredFields, liveField) + } else { + log.Printf("[DEBUG] auto-generated column `%s` dropped during Table read.", liveField.Name) + } + } + + return &bigquery.TableSchema{ + Fields: filteredFields, + } +} + func resourceTable(d *schema.ResourceData, meta interface{}) (*bigquery.Table, error) { config := meta.(*transport_tpg.Config) @@ -1983,7 +2033,17 @@ func resourceBigQueryTableRead(d *schema.ResourceData, meta interface{}) error { } if res.Schema != nil { - schema, err := flattenSchema(res.Schema) + table, err := resourceTable(d, meta) + if err != nil { + return err + } + + schemaFiltered := res.Schema + ignore, ok := d.Get("ignore_auto_generated_schema").(bool) + if ok && ignore { + schemaFiltered = filterLiveSchemaByConfig(res.Schema, table.Schema) + } + schema, err := flattenSchema(schemaFiltered) if err != nil { return err } @@ -2070,7 +2130,7 @@ type TableReference struct { func resourceBigQueryTableUpdate(d *schema.ResourceData, meta interface{}) error { // If only client-side fields were modified, short-circuit the Update function to avoid sending an update API request. - clientSideFields := map[string]bool{"deletion_protection": true, "ignore_schema_changes": true, "table_metadata_view": true} + clientSideFields := map[string]bool{"deletion_protection": true, "ignore_schema_changes": true, "ignore_auto_generated_schema": true, "table_metadata_view": true} clientSideOnly := true for field := range ResourceBigQueryTable().Schema { if d.HasChange(field) && !clientSideFields[field] { @@ -2118,8 +2178,11 @@ func resourceBigQueryTableUpdate(d *schema.ResourceData, meta interface{}) error tableID: tableID, } - if err = resourceBigQueryTableColumnDrop(config, userAgent, table, tableReference, tableMetadataView); err != nil { - return err + // If we are supposed to ignore server generated schema columns, we don't need to drop them + if !d.Get("ignore_auto_generated_schema").(bool) { + if err = resourceBigQueryTableColumnDrop(config, userAgent, table, tableReference, tableMetadataView); err != nil { + return err + } } if _, err = config.NewBigQueryClient(userAgent).Tables.Update(project, datasetID, tableID, table).Do(); err != nil { diff --git a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go index 12928ead405e..80150016530a 100644 --- a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go +++ b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go @@ -30,7 +30,7 @@ func TestAccBigQueryTable_Basic(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, { Config: testAccBigQueryTableUpdated(datasetID, tableID), @@ -39,7 +39,7 @@ func TestAccBigQueryTable_Basic(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -70,7 +70,7 @@ func TestAccBigQueryTable_IgnoreSchemaDataPoliciesChanges(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_schema_changes"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "ignore_schema_changes"}, }, { Config: testAccBigQueryTableDataPolicies(datasetID, tableID, dataPolicyID1, dataPolicyID2, dataCatTaxonomy, dataPolicyName2), @@ -84,7 +84,7 @@ func TestAccBigQueryTable_IgnoreSchemaDataPoliciesChanges(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_schema_changes"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "ignore_schema_changes"}, }, }, }) @@ -108,7 +108,7 @@ func TestAccBigQueryTable_TableMetadataView(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "last_modified_time", "table_metadata_view"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "last_modified_time", "table_metadata_view"}, }, { Config: testAccBigQueryTableUpdated(datasetID, tableID), @@ -117,7 +117,7 @@ func TestAccBigQueryTable_TableMetadataView(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "last_modified_time", "table_metadata_view"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "last_modified_time", "table_metadata_view"}, }, }, }) @@ -141,7 +141,7 @@ func TestAccBigQueryTable_OnlyDeletionProtectionUpdate(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, { Config: testAccBigQueryTableBasicSchema(datasetID, tableID), @@ -150,7 +150,7 @@ func TestAccBigQueryTable_OnlyDeletionProtectionUpdate(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -174,7 +174,7 @@ func TestAccBigQueryTable_OnlyNestedFieldUpdate(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, { Config: testAccBigQueryTableTimePartitioningWithExpirationMs(datasetID, tableID, 2000), @@ -183,7 +183,7 @@ func TestAccBigQueryTable_OnlyNestedFieldUpdate(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -207,7 +207,7 @@ func TestAccBigQueryTable_DropColumns(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, { Config: testAccBigQueryTableTimePartitioningDropColumnsUpdate(datasetID, tableID), @@ -216,7 +216,7 @@ func TestAccBigQueryTable_DropColumns(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -242,7 +242,7 @@ func TestAccBigQueryTable_Kms(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -266,7 +266,7 @@ func TestAccBigQueryTable_HourlyTimePartitioning(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, { Config: testAccBigQueryTableUpdated(datasetID, tableID), @@ -275,7 +275,7 @@ func TestAccBigQueryTable_HourlyTimePartitioning(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -299,7 +299,7 @@ func TestAccBigQueryTable_MonthlyTimePartitioning(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, { Config: testAccBigQueryTableUpdated(datasetID, tableID), @@ -308,7 +308,7 @@ func TestAccBigQueryTable_MonthlyTimePartitioning(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -332,7 +332,7 @@ func TestAccBigQueryTable_YearlyTimePartitioning(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, { Config: testAccBigQueryTableUpdated(datasetID, tableID), @@ -341,7 +341,7 @@ func TestAccBigQueryTable_YearlyTimePartitioning(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -366,7 +366,7 @@ func TestAccBigQueryTable_HivePartitioning(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -391,7 +391,7 @@ func TestAccBigQueryTable_HivePartitioningCustomSchema_update(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"external_data_configuration.0.schema", "labels", "deletion_protection"}, + ImportStateVerifyIgnore: []string{"external_data_configuration.0.schema", "labels", "deletion_protection", "ignore_auto_generated_schema"}, }, { Config: testAccBigQueryTableHivePartitioningCustomSchema(bucketName, datasetID, tableID, "new-label"), @@ -400,7 +400,7 @@ func TestAccBigQueryTable_HivePartitioningCustomSchema_update(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"external_data_configuration.0.schema", "labels", "deletion_protection"}, + ImportStateVerifyIgnore: []string{"external_data_configuration.0.schema", "labels", "deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -426,7 +426,7 @@ func TestAccBigQueryTable_AvroPartitioning(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -452,7 +452,7 @@ func TestAccBigQueryBigLakeManagedTable(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -520,7 +520,7 @@ func TestAccBigQueryExternalDataTable_json(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"external_data_configuration.0.schema", "deletion_protection"}, + ImportStateVerifyIgnore: []string{"external_data_configuration.0.schema", "deletion_protection", "ignore_auto_generated_schema"}, }, { Config: testAccBigQueryTableJson(datasetID, tableID, bucketName, "UTF-16BE"), @@ -547,7 +547,7 @@ func TestAccBigQueryTable_RangePartitioning(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -571,7 +571,7 @@ func TestAccBigQueryTable_PrimaryKey(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -598,7 +598,7 @@ func TestAccBigQueryTable_ForeignKey(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -625,7 +625,7 @@ func TestAccBigQueryTable_updateTableConstraints(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, { Config: testAccBigQueryTableTableConstraintsUpdate(projectID, datasetID, tableID_pk, tableID_fk), @@ -634,7 +634,7 @@ func TestAccBigQueryTable_updateTableConstraints(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -658,7 +658,7 @@ func TestAccBigQueryTable_View(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -682,7 +682,7 @@ func TestAccBigQueryTable_updateView(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, { Config: testAccBigQueryTableWithNewSqlView(datasetID, tableID), @@ -691,7 +691,7 @@ func TestAccBigQueryTable_updateView(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -715,7 +715,7 @@ func TestAccBigQueryTable_WithViewAndSchema(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, { Config: testAccBigQueryTableWithViewAndSchema(datasetID, tableID, "table description2"), @@ -724,7 +724,7 @@ func TestAccBigQueryTable_WithViewAndSchema(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -751,13 +751,13 @@ func TestAccBigQueryTable_MaterializedView_DailyTimePartioning_Basic(t *testing. ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema"}, }, { ResourceName: "google_bigquery_table.mv_test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema"}, }, { Config: testAccBigQueryTableWithMatViewDailyTimePartitioning_basic(datasetID, tableID, materialized_viewID, queryNew), @@ -766,13 +766,13 @@ func TestAccBigQueryTable_MaterializedView_DailyTimePartioning_Basic(t *testing. ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema"}, }, { ResourceName: "google_bigquery_table.mv_test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -802,13 +802,13 @@ func TestAccBigQueryTable_MaterializedView_DailyTimePartioning_Update(t *testing ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema"}, }, { ResourceName: "google_bigquery_table.mv_test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema"}, }, { Config: testAccBigQueryTableWithMatViewDailyTimePartitioning(datasetID, tableID, materialized_viewID, enable_refresh, refresh_interval_ms, query), @@ -817,13 +817,13 @@ func TestAccBigQueryTable_MaterializedView_DailyTimePartioning_Update(t *testing ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema"}, }, { ResourceName: "google_bigquery_table.mv_test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -850,13 +850,13 @@ func TestAccBigQueryTable_MaterializedView_NonIncremental_basic(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "require_partition_filter", "time_partitioning.0.require_partition_filter"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "require_partition_filter", "time_partitioning.0.require_partition_filter"}, }, { ResourceName: "google_bigquery_table.mv_test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "require_partition_filter", "time_partitioning.0.require_partition_filter"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "require_partition_filter", "time_partitioning.0.require_partition_filter"}, }, }, }) @@ -1208,6 +1208,45 @@ func TestAccBigQueryExternalDataTable_CSV_WithSchema_InvalidSchemas(t *testing.T }) } +func TestAccBigQueryExternalDataTable_CSV_WithSchemaAndConnectionIDAndHivePartitioning(t *testing.T) { + t.Parallel() + + bucketName := acctest.TestBucketName(t) + objectName := fmt.Sprintf("country_partitioned=US/tf_test_%s.csv", acctest.RandString(t, 10)) + + datasetID := fmt.Sprintf("tf_test_%s", acctest.RandString(t, 10)) + tableID := fmt.Sprintf("tf_test_%s", acctest.RandString(t, 10)) + connectionID := fmt.Sprintf("tf_test_%s", acctest.RandString(t, 10)) + + projectID := envvar.GetTestProjectFromEnv() + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccBigQueryTableFromGCSWithSchemaWithConnectionIdAndHivePartitioning(datasetID, tableID, connectionID, projectID, bucketName, objectName, TEST_SIMPLE_CSV, TEST_SIMPLE_CSV_SCHEMA), + }, + { + ResourceName: "google_bigquery_table.test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "schema"}, + }, + { + Config: testAccBigQueryTableFromGCSWithSchema(datasetID, tableID, bucketName, objectName, TEST_SIMPLE_CSV, TEST_SIMPLE_CSV_SCHEMA), + }, + { + ResourceName: "google_bigquery_table.test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "schema"}, + }, + }, + }) +} + func TestAccBigQueryExternalDataTable_CSV_WithSchemaAndConnectionID_UpdateNoConnectionID(t *testing.T) { t.Parallel() @@ -1232,7 +1271,7 @@ func TestAccBigQueryExternalDataTable_CSV_WithSchemaAndConnectionID_UpdateNoConn ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema"}, }, { Config: testAccBigQueryTableFromGCSWithSchema(datasetID, tableID, bucketName, objectName, TEST_SIMPLE_CSV, TEST_SIMPLE_CSV_SCHEMA), @@ -1241,7 +1280,7 @@ func TestAccBigQueryExternalDataTable_CSV_WithSchemaAndConnectionID_UpdateNoConn ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -1271,7 +1310,7 @@ func TestAccBigQueryExternalDataTable_CSV_WithSchema_UpdateToConnectionID(t *tes ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema"}, }, { Config: testAccBigQueryTableFromGCSWithSchemaWithConnectionId(datasetID, tableID, connectionID, projectID, bucketName, objectName, TEST_SIMPLE_CSV, TEST_SIMPLE_CSV_SCHEMA), @@ -1280,7 +1319,7 @@ func TestAccBigQueryExternalDataTable_CSV_WithSchema_UpdateToConnectionID(t *tes ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema"}, }, { Config: testAccBigQueryTableFromGCSWithSchemaWithConnectionId2(datasetID, tableID, connectionID, projectID, bucketName, objectName, TEST_SIMPLE_CSV, TEST_SIMPLE_CSV_SCHEMA), @@ -1289,7 +1328,7 @@ func TestAccBigQueryExternalDataTable_CSV_WithSchema_UpdateToConnectionID(t *tes ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -1316,7 +1355,7 @@ func TestAccBigQueryExternalDataTable_CSV_WithSchema_UpdateAllowQuotedNewlines(t ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema"}, }, { Config: testAccBigQueryTableFromGCSWithSchema_UpdatAllowQuotedNewlines(datasetID, tableID, bucketName, objectName, TEST_SIMPLE_CSV, TEST_SIMPLE_CSV_SCHEMA), @@ -1325,7 +1364,7 @@ func TestAccBigQueryExternalDataTable_CSV_WithSchema_UpdateAllowQuotedNewlines(t ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -1353,7 +1392,7 @@ func TestAccBigQueryDataTable_bigtable(t *testing.T) { ResourceName: "google_bigquery_table.table", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -1381,7 +1420,7 @@ func TestAccBigQueryDataTable_bigtable_options(t *testing.T) { ResourceName: "google_bigquery_table.table", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, { Config: testAccBigQueryTableFromBigtable(context), @@ -1409,7 +1448,7 @@ func TestAccBigQueryDataTable_sheet(t *testing.T) { ResourceName: "google_bigquery_table.table", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -1433,7 +1472,7 @@ func TestAccBigQueryDataTable_jsonEquivalency(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "labels", "terraform_labels", "ignore_auto_generated_schema"}, }, { Config: testAccBigQueryTable_jsonEqModeRemoved(datasetID, tableID), @@ -1442,7 +1481,7 @@ func TestAccBigQueryDataTable_jsonEquivalency(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "labels", "terraform_labels", "ignore_auto_generated_schema"}, }, }, }) @@ -1492,7 +1531,7 @@ func TestAccBigQueryDataTable_expandArray(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "labels", "terraform_labels", "ignore_auto_generated_schema"}, }, { Config: testAccBigQueryTable_arrayExpanded(datasetID, tableID), @@ -1501,7 +1540,7 @@ func TestAccBigQueryDataTable_expandArray(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "labels", "terraform_labels", "ignore_auto_generated_schema"}, }, }, }) @@ -1525,7 +1564,7 @@ func TestAccBigQueryTable_allowDestroy(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "labels", "terraform_labels", "ignore_auto_generated_schema"}, }, { Config: testAccBigQueryTable_noAllowDestroy(datasetID, tableID), @@ -1557,7 +1596,7 @@ func TestAccBigQueryTable_emptySchema(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, { Config: testAccBigQueryTable_emptySchema(datasetID, tableID), @@ -1566,7 +1605,7 @@ func TestAccBigQueryTable_emptySchema(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -1591,7 +1630,7 @@ func TestAccBigQueryTable_Update_SchemaWithoutPolicyTagsToWithPolicyTags(t *test ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, { Config: testAccBigQueryTableBasicSchemaWithPolicyTags(datasetID, tableID, projectID), @@ -1600,7 +1639,7 @@ func TestAccBigQueryTable_Update_SchemaWithoutPolicyTagsToWithPolicyTags(t *test ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -1625,7 +1664,7 @@ func TestAccBigQueryTable_Update_SchemaWithPolicyTagsToNoPolicyTag(t *testing.T) ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, { Config: testAccBigQueryTableBasicSchema(datasetID, tableID), @@ -1634,7 +1673,7 @@ func TestAccBigQueryTable_Update_SchemaWithPolicyTagsToNoPolicyTag(t *testing.T) ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -1659,7 +1698,7 @@ func TestAccBigQueryTable_Update_SchemaWithPolicyTagsToEmptyPolicyTag(t *testing ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, { Config: testAccBigQueryTableBasicSchemaWithEmptyPolicyTags(datasetID, tableID), @@ -1668,7 +1707,7 @@ func TestAccBigQueryTable_Update_SchemaWithPolicyTagsToEmptyPolicyTag(t *testing ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -1693,7 +1732,7 @@ func TestAccBigQueryTable_Update_SchemaWithPolicyTagsToEmptyPolicyTagNames(t *te ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, { Config: testAccBigQueryTableBasicSchemaWithEmptyPolicyTagNames(datasetID, tableID), @@ -1702,7 +1741,7 @@ func TestAccBigQueryTable_Update_SchemaWithPolicyTagsToEmptyPolicyTagNames(t *te ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -1801,7 +1840,7 @@ func TestAccBigQueryTable_TableReplicationInfo_WithoutReplicationInterval(t *tes ResourceName: "google_bigquery_table.replica_mv", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -1836,7 +1875,7 @@ func TestAccBigQueryTable_TableReplicationInfo_WithReplicationInterval(t *testin ResourceName: "google_bigquery_table.replica_mv", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -1867,7 +1906,7 @@ func TestAccBigQueryTable_ResourceTags(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, { Config: testAccBigQueryTableWithResourceTagsUpdate(context), @@ -1876,7 +1915,7 @@ func TestAccBigQueryTable_ResourceTags(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, // testAccBigQueryTableWithResourceTagsDestroy must be called at the end of this test to clear the resource tag bindings of the table before deletion. { @@ -1886,7 +1925,7 @@ func TestAccBigQueryTable_ResourceTags(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -1914,7 +1953,7 @@ func TestAccBigQueryTable_externalCatalogTableOptions(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, { Config: testAccBigQueryTable_externalCatalogTableOptions_update(context), @@ -1923,7 +1962,7 @@ func TestAccBigQueryTable_externalCatalogTableOptions(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -1950,7 +1989,7 @@ func TestAccBigQueryTable_foreignTypeInfo(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, }, }, }) @@ -3150,17 +3189,17 @@ resource "google_bigquery_table" "test" { table_id = "%s" dataset_id = google_bigquery_dataset.test.dataset_id external_data_configuration { - connection_id = local.connection_id_reformatted + connection_id = local.connection_id_reformatted autodetect = false source_format = "PARQUET" source_uris = [ "gs://${google_storage_bucket.test.name}/*", ] - metadata_cache_mode = "%s" - hive_partitioning_options { - source_uri_prefix = "gs://${google_storage_bucket.test.name}/" - } + metadata_cache_mode = "%s" + hive_partitioning_options { + source_uri_prefix = "gs://${google_storage_bucket.test.name}/" + } } max_staleness = "%s" @@ -3550,6 +3589,70 @@ resource "google_bigquery_table" "test" { `, connectionID, datasetID, bucketName, objectName, tableID, maxStaleness) } +func testAccBigQueryTableFromGCSWithSchemaWithConnectionIdAndHivePartitioning(datasetID, tableID, connectionID, projectID, bucketName, objectName, content, schema string) string { + return fmt.Sprintf(` +resource "google_bigquery_dataset" "test" { + dataset_id = "%s" +} +resource "google_storage_bucket" "test" { + name = "%s" + location = "US" + force_destroy = true + uniform_bucket_level_access = true +} +resource "google_storage_bucket_object" "test" { + name = "%s" + content = < Date: Thu, 17 Jul 2025 16:48:59 +0200 Subject: [PATCH 559/884] Add support for secure tags to (hierarchical) FirewallPolicyWithRules (#14380) Co-authored-by: Luca Prete --- .../compute/FirewallPolicyWithRules.yaml | 126 +++++++++++ ...te_firewall_policy_with_rules_full.tf.tmpl | 43 ++++ ...compute_firewall_policy_with_rules_test.go | 195 +++++++++++++----- 3 files changed, 311 insertions(+), 53 deletions(-) diff --git a/mmv1/products/compute/FirewallPolicyWithRules.yaml b/mmv1/products/compute/FirewallPolicyWithRules.yaml index 01a7e12baf82..693d15691d41 100644 --- a/mmv1/products/compute/FirewallPolicyWithRules.yaml +++ b/mmv1/products/compute/FirewallPolicyWithRules.yaml @@ -46,6 +46,8 @@ examples: network: 'network' security_profile: 'sp' security_profile_group: 'spg' + tag_key: 'tag-key' + tag_value: 'tag-value' test_env_vars: org_id: 'ORG_ID' parameters: @@ -206,6 +208,33 @@ properties: The IPs in these lists will be matched against traffic destination. item_type: type: String + - name: 'srcSecureTag' + type: Array + description: | + List of secure tag values, which should be matched at the source + of the traffic. + For INGRESS rule, if all the srcSecureTag are INEFFECTIVE, + and there is no srcIpRange, this rule will be ignored. + Maximum number of source tag values allowed is 256. + api_name: srcSecureTags + item_type: + type: NestedObject + properties: + - name: 'name' + type: String + description: | + Name of the secure tag, created with TagManager's TagValue API. + @pattern tagValues/[0-9]+ + - name: 'state' + type: Enum + description: | + [Output Only] State of the secure tag, either `EFFECTIVE` or + `INEFFECTIVE`. A secure tag is `INEFFECTIVE` when it is deleted + or its network is deleted. + output: true + enum_values: + - 'EFFECTIVE' + - 'INEFFECTIVE' - name: 'layer4Config' type: Array description: | @@ -235,6 +264,39 @@ properties: ["12345-12349"]. item_type: type: String + - name: 'targetSecureTag' + type: Array + description: | + A list of secure tags that controls which instances the firewall rule + applies to. If targetSecureTag are specified, then the + firewall rule applies only to instances in the VPC network that have one + of those EFFECTIVE secure tags, if all the target_secure_tag are in + INEFFECTIVE state, then this rule will be ignored. + targetSecureTag may not be set at the same time as + targetServiceAccounts. + If neither targetServiceAccounts nor + targetSecureTag are specified, the firewall rule applies + to all instances on the specified network. + Maximum number of target secure tags allowed is 256. + api_name: targetSecureTags + item_type: + type: NestedObject + properties: + - name: 'name' + type: String + description: | + Name of the secure tag, created with TagManager's TagValue API. + @pattern tagValues/[0-9]+ + - name: 'state' + type: Enum + description: | + [Output Only] State of the secure tag, either `EFFECTIVE` or + `INEFFECTIVE`. A secure tag is `INEFFECTIVE` when it is deleted + or its network is deleted. + output: true + enum_values: + - 'EFFECTIVE' + - 'INEFFECTIVE' - name: 'action' type: String description: | @@ -436,6 +498,70 @@ properties: output: true item_type: type: String + - name: 'srcSecureTag' + type: Array + description: | + List of secure tag values, which should be matched at the source + of the traffic. + For INGRESS rule, if all the srcSecureTag are INEFFECTIVE, + and there is no srcIpRange, this rule will be ignored. + Maximum number of source tag values allowed is 256. + api_name: srcSecureTags + output: true + item_type: + type: NestedObject + properties: + - name: 'name' + type: String + description: | + Name of the secure tag, created with TagManager's TagValue API. + @pattern tagValues/[0-9]+ + output: true + - name: 'state' + type: Enum + description: | + [Output Only] State of the secure tag, either `EFFECTIVE` or + `INEFFECTIVE`. A secure tag is `INEFFECTIVE` when it is deleted + or its network is deleted. + output: true + enum_values: + - 'EFFECTIVE' + - 'INEFFECTIVE' + - name: 'targetSecureTag' + type: Array + description: | + A list of secure tags that controls which instances the firewall rule + applies to. If targetSecureTag are specified, then the + firewall rule applies only to instances in the VPC network that have one + of those EFFECTIVE secure tags, if all the target_secure_tag are in + INEFFECTIVE state, then this rule will be ignored. + targetSecureTag may not be set at the same time as + targetServiceAccounts. + If neither targetServiceAccounts nor + targetSecureTag are specified, the firewall rule applies + to all instances on the specified network. + Maximum number of target secure tags allowed is 256. + api_name: targetSecureTags + output: true + item_type: + type: NestedObject + properties: + - name: 'name' + type: String + description: | + Name of the secure tag, created with TagManager's TagValue API. + @pattern tagValues/[0-9]+ + output: true + - name: 'state' + type: Enum + description: | + [Output Only] State of the secure tag, either `EFFECTIVE` or + `INEFFECTIVE`. A secure tag is `INEFFECTIVE` when it is deleted + or its network is deleted. + output: true + enum_values: + - 'EFFECTIVE' + - 'INEFFECTIVE' - name: 'action' type: String description: | diff --git a/mmv1/templates/terraform/examples/compute_firewall_policy_with_rules_full.tf.tmpl b/mmv1/templates/terraform/examples/compute_firewall_policy_with_rules_full.tf.tmpl index 4c83c878312d..c5956b2b34a1 100644 --- a/mmv1/templates/terraform/examples/compute_firewall_policy_with_rules_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/compute_firewall_policy_with_rules_full.tf.tmpl @@ -68,6 +68,32 @@ resource "google_compute_firewall_policy_with_rules" "{{$.PrimaryResourceId}}" { } } } + + rule { + description = "secure tags" + rule_name = "secure tags rule" + priority = 4000 + enable_logging = false + action = "allow" + direction = "INGRESS" + + target_secure_tag { + name = google_tags_tag_value.basic_value.id + } + + match { + src_ip_ranges = ["11.100.0.1/32"] + + src_secure_tag { + name = google_tags_tag_value.basic_value.id + } + + layer4_config { + ip_protocol = "tcp" + ports = [8080] + } + } + } } resource "google_network_security_address_group" "address_group_1" { @@ -98,3 +124,20 @@ resource "google_compute_network" "network" { name = "{{index $.Vars "network"}}" auto_create_subnetworks = false } + +resource "google_tags_tag_key" "basic_key" { + description = "For keyname resources." + parent = "organizations/{{index $.TestEnvVars "org_id"}}" + purpose = "GCE_FIREWALL" + short_name = "{{index $.Vars "tag_key"}}" + + purpose_data = { + organization = "auto" + } +} + +resource "google_tags_tag_value" "basic_value" { + description = "For valuename resources." + parent = google_tags_tag_key.basic_key.id + short_name = "{{index $.Vars "tag_value"}}" +} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_with_rules_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_with_rules_test.go index 6c2ae3a26336..7423fee3b20f 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_with_rules_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_with_rules_test.go @@ -48,63 +48,92 @@ data "google_project" "project" { } resource "google_compute_firewall_policy_with_rules" "firewall-policy-with-rules" { - short_name = "tf-test-tf-fw-org-policy-with-rules%{random_suffix}" + short_name = "tf-test-tf-fw-org-policy-with-rules%{random_suffix}" description = "Terraform test" - parent = "organizations/%{org_id}" + parent = "organizations/%{org_id}" rule { - description = "tcp rule" - priority = 1000 - enable_logging = true - action = "allow" - direction = "EGRESS" + description = "tcp rule" + priority = 1000 + enable_logging = true + action = "allow" + direction = "EGRESS" + target_resources = [google_compute_network.network.self_link] + match { + dest_ip_ranges = ["11.100.0.1/32"] + dest_fqdns = ["www.yyy.com", "www.zzz.com"] + dest_region_codes = ["HK", "IN"] + dest_threat_intelligences = ["iplist-search-engines-crawlers", "iplist-tor-exit-nodes"] + dest_address_groups = [google_network_security_address_group.address_group_1.id] + layer4_config { ip_protocol = "tcp" ports = [8080, 7070] } - dest_ip_ranges = ["11.100.0.1/32"] - dest_fqdns = ["www.yyy.com", "www.zzz.com"] - dest_region_codes = ["HK", "IN"] - dest_threat_intelligences = ["iplist-search-engines-crawlers", "iplist-tor-exit-nodes"] - dest_address_groups = [google_network_security_address_group.address_group_1.id] } - target_resources = [google_compute_network.network.self_link] } + rule { description = "udp rule" priority = 2000 enable_logging = false action = "deny" direction = "INGRESS" + disabled = true + match { + src_ip_ranges = ["0.0.0.0/0"] + src_fqdns = ["www.abc.com", "www.def.com"] + src_region_codes = ["US", "CA"] + src_threat_intelligences = ["iplist-known-malicious-ips", "iplist-public-clouds"] + src_address_groups = [google_network_security_address_group.address_group_1.id] + layer4_config { ip_protocol = "udp" } + } + } + + rule { + description = "security profile group rule" + rule_name = "tcp rule" + priority = 3000 + enable_logging = false + action = "apply_security_profile_group" + direction = "INGRESS" + target_service_accounts = ["test@google.com"] + security_profile_group = "//networksecurity.googleapis.com/${google_network_security_security_profile_group.security_profile_group_1.id}" + tls_inspect = true + + match { src_ip_ranges = ["0.0.0.0/0"] - src_fqdns = ["www.abc.com", "www.def.com"] - src_region_codes = ["US", "CA"] - src_threat_intelligences = ["iplist-known-malicious-ips", "iplist-public-clouds"] - src_address_groups = [google_network_security_address_group.address_group_1.id] + + layer4_config { + ip_protocol = "tcp" + } } - disabled = true } + rule { - description = "security profile group rule" - rule_name = "tcp rule" - priority = 3000 + description = "secure tags" + rule_name = "secure tags" + priority = 4000 enable_logging = false - action = "apply_security_profile_group" + action = "allow" direction = "INGRESS" + match { + src_ip_ranges = ["0.0.0.0/0"] + + src_secure_tag { + name = google_tags_tag_value.basic_value.id + } + layer4_config { ip_protocol = "tcp" } - src_ip_ranges = ["0.0.0.0/0"] } - target_service_accounts = ["test@google.com"] - security_profile_group = "//networksecurity.googleapis.com/${google_network_security_security_profile_group.security_profile_group_1.id}" - tls_inspect = true } } @@ -126,10 +155,27 @@ resource "google_network_security_security_profile_group" "security_profile_grou } resource "google_network_security_security_profile" "security_profile_1" { - name = "tf-test-tf-security-profile%{random_suffix}" - type = "THREAT_PREVENTION" + name = "tf-test-tf-security-profile%{random_suffix}" + type = "THREAT_PREVENTION" + parent = "organizations/%{org_id}" + location = "global" +} + +resource "google_tags_tag_key" "basic_key" { + description = "For keyname resources." parent = "organizations/%{org_id}" - location = "global" + purpose = "GCE_FIREWALL" + short_name = "tf-test-tagkey-%{random_suffix}" + + purpose_data = { + organization = "auto" + } +} + +resource "google_tags_tag_value" "basic_value" { + description = "For valuename resources." + parent = google_tags_tag_key.basic_key.id + short_name = "tf-test-tagvalue-%{random_suffix}" } resource "google_compute_network" "network" { @@ -146,9 +192,9 @@ data "google_project" "project" { } resource "google_compute_firewall_policy_with_rules" "firewall-policy-with-rules" { - short_name = "tf-test-tf-fw-org-policy-with-rules%{random_suffix}" + short_name = "tf-test-tf-fw-org-policy-with-rules%{random_suffix}" description = "Terraform test - update" - parent = "organizations/%{org_id}" + parent = "organizations/%{org_id}" rule { description = "tcp rule - update" @@ -157,35 +203,61 @@ resource "google_compute_firewall_policy_with_rules" "firewall-policy-with-rules enable_logging = false action = "deny" direction = "INGRESS" + match { + src_ip_ranges = ["11.100.0.1/32", "0.0.0.0/0"] + src_fqdns = ["www.yyy.com"] + src_region_codes = ["HK"] + src_threat_intelligences = ["iplist-search-engines-crawlers"] + layer4_config { ip_protocol = "udp" ports = [8080] } - src_ip_ranges = ["11.100.0.1/32", "0.0.0.0/0"] - src_fqdns = ["www.yyy.com"] - src_region_codes = ["HK"] - src_threat_intelligences = ["iplist-search-engines-crawlers"] } } + rule { - description = "udp rule" - priority = 3000 - enable_logging = false - action = "deny" - direction = "INGRESS" - match { - layer4_config { - ip_protocol = "all" - } - src_ip_ranges = ["0.0.0.0/0"] - src_fqdns = ["www.abc.com", "www.xyz.com"] - src_region_codes = ["US", "CA", "FR"] - src_threat_intelligences = ["iplist-known-malicious-ips", "iplist-public-clouds"] - src_address_groups = [google_network_security_address_group.address_group_1.id] + description = "udp rule" + priority = 3000 + enable_logging = false + action = "deny" + direction = "INGRESS" + disabled = false + + match { + src_ip_ranges = ["0.0.0.0/0"] + src_fqdns = ["www.abc.com", "www.xyz.com"] + src_region_codes = ["US", "CA", "FR"] + src_threat_intelligences = ["iplist-known-malicious-ips", "iplist-public-clouds"] + src_address_groups = [google_network_security_address_group.address_group_1.id] + + layer4_config { + ip_protocol = "all" } - disabled = false } + } + + rule { + description = "secure tags" + rule_name = "secure tags" + priority = 4000 + enable_logging = false + action = "allow" + direction = "INGRESS" + + target_secure_tag { + name = google_tags_tag_value.basic_value.id + } + + match { + src_ip_ranges = ["0.0.0.0/0"] + + layer4_config { + ip_protocol = "tcp" + } + } + } } resource "google_network_security_address_group" "address_group_1" { @@ -206,10 +278,27 @@ resource "google_network_security_security_profile_group" "security_profile_grou } resource "google_network_security_security_profile" "security_profile_1" { - name = "tf-test-tf-security-profile%{random_suffix}" - type = "THREAT_PREVENTION" + name = "tf-test-tf-security-profile%{random_suffix}" + type = "THREAT_PREVENTION" + parent = "organizations/%{org_id}" + location = "global" +} + +resource "google_tags_tag_key" "basic_key" { + description = "For keyname resources." parent = "organizations/%{org_id}" - location = "global" + purpose = "GCE_FIREWALL" + short_name = "tf-test-tagkey-%{random_suffix}" + + purpose_data = { + organization = "auto" + } +} + +resource "google_tags_tag_value" "basic_value" { + description = "For valuename resources." + parent = google_tags_tag_key.basic_key.id + short_name = "tf-test-tagvalue-%{random_suffix}" } `, context) } From f12b46327606e1961812595cb0f0cb07358493bc Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Thu, 17 Jul 2025 12:53:55 -0700 Subject: [PATCH 560/884] tgc-revival: add google_compute_network (#14545) --- mmv1/api/type.go | 2 +- mmv1/products/compute/Network.yaml | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/mmv1/api/type.go b/mmv1/api/type.go index e0581f7e3d55..8d9e85569761 100644 --- a/mmv1/api/type.go +++ b/mmv1/api/type.go @@ -434,7 +434,7 @@ func (t Type) Lineage() string { // This format is intended for resource metadata, to be used for connecting a Terraform // type with a corresponding API type. func (t Type) MetadataLineage() string { - if t.ParentMetadata == nil { + if t.ParentMetadata == nil || t.ParentMetadata.FlattenObject { return google.Underscore(t.Name) } diff --git a/mmv1/products/compute/Network.yaml b/mmv1/products/compute/Network.yaml index 5a9a17bb44ef..bc282210b110 100644 --- a/mmv1/products/compute/Network.yaml +++ b/mmv1/products/compute/Network.yaml @@ -36,6 +36,7 @@ async: result: resource_inside_response: false collection_url_key: 'items' +include_in_tgc_next_DO_NOT_USE: true sweeper: dependencies: # - "google_network_security_intercept_endpoint_group" (beta only) @@ -176,6 +177,7 @@ properties: enum_values: - 'LEGACY' - 'STANDARD' + is_missing_in_cai: true - name: 'bgpAlwaysCompareMed' type: Boolean description: | @@ -185,6 +187,7 @@ properties: default_from_api: true update_url: 'projects/{{project}}/global/networks/{{name}}' update_verb: 'PATCH' + is_missing_in_cai: true - name: 'bgpInterRegionCost' type: Enum description: | @@ -196,6 +199,7 @@ properties: enum_values: - 'DEFAULT' - 'ADD_COST_TO_MED' + is_missing_in_cai: true - name: 'mtu' type: Integer description: | From d373ee17c81d16161aae941daa38a7f8fea698a7 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Thu, 17 Jul 2025 13:03:40 -0700 Subject: [PATCH 561/884] Added a go.mod to declare magic modules as a go module (#14559) --- go.mod | 3 +++ main.go | 7 +++++++ 2 files changed, 10 insertions(+) create mode 100644 go.mod create mode 100644 main.go diff --git a/go.mod b/go.mod new file mode 100644 index 000000000000..2c6c03665927 --- /dev/null +++ b/go.mod @@ -0,0 +1,3 @@ +module github.com/GoogleCloudPlatform/magic-modules + +go 1.23.0 diff --git a/main.go b/main.go new file mode 100644 index 000000000000..82002367b4e8 --- /dev/null +++ b/main.go @@ -0,0 +1,7 @@ +package main + +import "fmt" + +func main() { + fmt.Println("This is currently a stub no-op function.") +} From 70af4bc1ae285987a4043d3a52b99213f76340b1 Mon Sep 17 00:00:00 2001 From: Rishita Golla Date: Thu, 17 Jul 2025 13:14:29 -0700 Subject: [PATCH 562/884] Upgrade google.golang.org/api to the latest version (#14554) --- mmv1/third_party/terraform/go.mod | 11 +-------- mmv1/third_party/terraform/go.sum | 41 ++----------------------------- 2 files changed, 3 insertions(+), 49 deletions(-) diff --git a/mmv1/third_party/terraform/go.mod b/mmv1/third_party/terraform/go.mod index 0246c5fa176b..a6c484ae6e53 100644 --- a/mmv1/third_party/terraform/go.mod +++ b/mmv1/third_party/terraform/go.mod @@ -5,7 +5,6 @@ go 1.23.0 require ( cloud.google.com/go/auth v0.16.2 cloud.google.com/go/auth/oauth2adapt v0.2.8 - cloud.google.com/go/bigquery v1.69.0 cloud.google.com/go/bigtable v1.37.0 github.com/GoogleCloudPlatform/declarative-resource-client-library v1.79.0 github.com/apparentlymart/go-cidr v1.1.0 @@ -35,7 +34,7 @@ require ( golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 golang.org/x/net v0.41.0 golang.org/x/oauth2 v0.30.0 - google.golang.org/api v0.238.0 + google.golang.org/api v0.242.0 google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 google.golang.org/grpc v1.73.0 google.golang.org/protobuf v1.36.6 @@ -52,7 +51,6 @@ require ( cloud.google.com/go/monitoring v1.24.2 // indirect github.com/ProtonMail/go-crypto v1.1.3 // indirect github.com/agext/levenshtein v1.2.2 // indirect - github.com/apache/arrow/go/v15 v15.0.2 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect github.com/cenkalti/backoff v2.2.1+incompatible // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect @@ -66,10 +64,8 @@ require ( github.com/go-jose/go-jose/v4 v4.0.5 // indirect github.com/go-logr/logr v1.4.2 // indirect github.com/go-logr/stdr v1.2.2 // indirect - github.com/goccy/go-json v0.10.2 // indirect github.com/golang/glog v1.2.4 // indirect github.com/golang/protobuf v1.5.4 // indirect - github.com/google/flatbuffers v23.5.26+incompatible // indirect github.com/google/go-cpy v0.0.0-20211218193943-a9c933c06932 // indirect github.com/google/s2a-go v0.1.9 // indirect github.com/google/uuid v1.6.0 // indirect @@ -87,8 +83,6 @@ require ( github.com/hashicorp/terraform-registry-address v0.2.4 // indirect github.com/hashicorp/terraform-svchost v0.1.1 // indirect github.com/hashicorp/yamux v0.1.1 // indirect - github.com/klauspost/compress v1.16.7 // indirect - github.com/klauspost/cpuid/v2 v2.2.5 // indirect github.com/kylelemons/godebug v1.1.0 // indirect github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-isatty v0.0.20 // indirect @@ -98,7 +92,6 @@ require ( github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/mitchellh/reflectwalk v1.0.2 // indirect github.com/oklog/run v1.0.0 // indirect - github.com/pierrec/lz4/v4 v4.1.18 // indirect github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/spiffe/go-spiffe/v2 v2.5.0 // indirect @@ -107,7 +100,6 @@ require ( github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect github.com/zclconf/go-cty v1.16.2 // indirect github.com/zeebo/errs v1.4.0 // indirect - github.com/zeebo/xxh3 v1.0.2 // indirect go.opentelemetry.io/auto/sdk v1.1.0 // indirect go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect @@ -123,7 +115,6 @@ require ( golang.org/x/text v0.26.0 // indirect golang.org/x/time v0.12.0 // indirect golang.org/x/tools v0.33.0 // indirect - golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect google.golang.org/appengine v1.6.8 // indirect google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2 // indirect google.golang.org/genproto/googleapis/api v0.0.0-20250505200425-f936aa4a68b2 // indirect diff --git a/mmv1/third_party/terraform/go.sum b/mmv1/third_party/terraform/go.sum index ff5055ce953f..26a11f59899d 100644 --- a/mmv1/third_party/terraform/go.sum +++ b/mmv1/third_party/terraform/go.sum @@ -9,41 +9,27 @@ cloud.google.com/go/auth v0.16.2 h1:QvBAGFPLrDeoiNjyfVunhQ10HKNYuOwZ5noee0M5df4= cloud.google.com/go/auth v0.16.2/go.mod h1:sRBas2Y1fB1vZTdurouM0AzuYQBMZinrUYL8EufhtEA= cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc= cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c= -cloud.google.com/go/bigquery v1.69.0 h1:rZvHnjSUs5sHK3F9awiuFk2PeOaB8suqNuim21GbaTc= -cloud.google.com/go/bigquery v1.69.0/go.mod h1:TdGLquA3h/mGg+McX+GsqG9afAzTAcldMjqhdjHTLew= cloud.google.com/go/bigtable v1.37.0 h1:Q+x7y04lQ0B+WXp03wc1/FLhFt4CwcQdkwWT0M4Jp3w= cloud.google.com/go/bigtable v1.37.0/go.mod h1:HXqddP6hduwzrtiTCqZPpj9ij4hGZb4Zy1WF/dT+yaU= cloud.google.com/go/compute/metadata v0.7.0 h1:PBWF+iiAerVNe8UCHxdOt6eHLVc3ydFeOCw78U8ytSU= cloud.google.com/go/compute/metadata v0.7.0/go.mod h1:j5MvL9PprKL39t166CoB1uVHfQMs4tFQZZcKwksXUjo= -cloud.google.com/go/datacatalog v1.26.0 h1:eFgygb3DTufTWWUB8ARk+dSuXz+aefNJXTlkWlQcWwE= -cloud.google.com/go/datacatalog v1.26.0/go.mod h1:bLN2HLBAwB3kLTFT5ZKLHVPj/weNz6bR0c7nYp0LE14= cloud.google.com/go/iam v1.5.2 h1:qgFRAGEmd8z6dJ/qyEchAuL9jpswyODjA2lS+w234g8= cloud.google.com/go/iam v1.5.2/go.mod h1:SE1vg0N81zQqLzQEwxL2WI6yhetBdbNQuTvIKCSkUHE= cloud.google.com/go/longrunning v0.6.7 h1:IGtfDWHhQCgCjwQjV9iiLnUta9LBCo8R9QmAFsS/PrE= cloud.google.com/go/longrunning v0.6.7/go.mod h1:EAFV3IZAKmM56TyiE6VAP3VoTzhZzySwI/YI1s/nRsY= cloud.google.com/go/monitoring v1.24.2 h1:5OTsoJ1dXYIiMiuL+sYscLc9BumrL3CarVLL7dd7lHM= cloud.google.com/go/monitoring v1.24.2/go.mod h1:x7yzPWcgDRnPEv3sI+jJGBkwl5qINf+6qY4eq0I9B4U= -cloud.google.com/go/storage v1.53.0 h1:gg0ERZwL17pJ+Cz3cD2qS60w1WMDnwcm5YPAIQBHUAw= -cloud.google.com/go/storage v1.53.0/go.mod h1:7/eO2a/srr9ImZW9k5uufcNahT2+fPb8w5it1i5boaA= dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/GoogleCloudPlatform/declarative-resource-client-library v1.79.0 h1:vaebDVboAZ2tbAoMKRsprO3zAdZnQegYFhkgAwjJC8g= github.com/GoogleCloudPlatform/declarative-resource-client-library v1.79.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.27.0 h1:ErKg/3iS1AKcTkf3yixlZ54f9U1rljCkQyEXWUnIUxc= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.27.0/go.mod h1:yAZHSGnqScoU556rBOVkwLze6WP5N+U11RHuWaGVxwY= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.51.0 h1:fYE9p3esPxA/C0rQ0AHhP0drtPXDRhaWiwg1DPqO7IU= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.51.0/go.mod h1:BnBReJLvVYx2CS/UHOgVz2BXKXD9wsQPxZug20nZhd0= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.51.0 h1:6/0iUd0xrnX7qt+mLNRwg5c0PGv8wpE8K90ryANQwMI= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.51.0/go.mod h1:otE2jQekW/PqXk1Awf5lmfokJx4uwuqcj1ab5SpGeW0= github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= github.com/ProtonMail/go-crypto v1.1.3 h1:nRBOetoydLeUb4nHajyO2bKqMLfWQ/ZPwkXqXxPxCFk= github.com/ProtonMail/go-crypto v1.1.3/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= github.com/agext/levenshtein v1.2.2 h1:0S/Yg6LYmFJ5stwQeRp6EeOcCbj7xiqQSdNelsXvaqE= github.com/agext/levenshtein v1.2.2/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= -github.com/apache/arrow/go/v15 v15.0.2 h1:60IliRbiyTWCWjERBCkO1W4Qun9svcYoZrSLcyOsMLE= -github.com/apache/arrow/go/v15 v15.0.2/go.mod h1:DGXsR3ajT524njufqf95822i+KTh+yea1jass9YXgjA= github.com/apparentlymart/go-cidr v1.1.0 h1:2mAhrMoF+nhXqxTzSZMUzDHkLjmIHC+Zzn4tdgBZjnU= github.com/apparentlymart/go-cidr v1.1.0/go.mod h1:EBcsNrHc3zQeuaeCeCtQruQm+n9/YjEn/vI25Lg7Gwc= github.com/apparentlymart/go-textseg/v12 v12.0.0/go.mod h1:S/4uRK2UtaQttw1GenVJEynmyUenKwP++x/+DdGV/Ec= @@ -111,8 +97,6 @@ github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68= github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= -github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= -github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= @@ -131,8 +115,6 @@ github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= github.com/google/btree v1.1.3 h1:CVpQJjYgC4VbzxeGVHfvZrv1ctoYCAI8vbl07Fcxlyg= github.com/google/btree v1.1.3/go.mod h1:qOPhT0dTNdNzV6Z/lhRX0YXUafgPLFUh+gZMl761Gm4= -github.com/google/flatbuffers v23.5.26+incompatible h1:M9dgRyhJemaM4Sw8+66GHBu8ioaQmyPLg1b8VwK5WJg= -github.com/google/flatbuffers v23.5.26+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= @@ -140,8 +122,6 @@ github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= github.com/google/go-cpy v0.0.0-20211218193943-a9c933c06932 h1:5/4TSDzpDnHQ8rKEEQBjRlYx77mHOvXu08oGchxej7o= github.com/google/go-cpy v0.0.0-20211218193943-a9c933c06932/go.mod h1:cC6EdPbj/17GFCPDK39NRarlMI+kt+O60S12cNB5J9Y= -github.com/google/martian/v3 v3.3.3 h1:DIhPTQrbPkgs2yJYdXU/eNACCG5DVQjySNRNlflZ9Fc= -github.com/google/martian/v3 v3.3.3/go.mod h1:iEPrYcgCF7jA9OtScMFQyAlZZ4YXTKEtJ1E6RWzmBA0= github.com/google/s2a-go v0.1.9 h1:LGD7gtMgezd8a/Xak7mEWL0PjoTQFvpRudN895yqKW0= github.com/google/s2a-go v0.1.9/go.mod h1:YA0Ei2ZQL3acow2O62kdp9UlnvMmU7kA6Eutn0dXayM= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= @@ -212,10 +192,6 @@ github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4 github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/klauspost/compress v1.16.7 h1:2mk3MPGNzKyxErAw8YaohYh69+pa4sIQSC0fPGCFR9I= -github.com/klauspost/compress v1.16.7/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= -github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg= -github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= @@ -253,8 +229,6 @@ github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx github.com/oklog/run v1.0.0 h1:Ru7dDtJNOyC66gQ5dQmaCa0qIsAUFY3sFpK1Xk8igrw= github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= -github.com/pierrec/lz4/v4 v4.1.18 h1:xaKrnTkyoqfh1YItXl56+6KJNVYWlEEPuAQW9xsplYQ= -github.com/pierrec/lz4/v4 v4.1.18/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pjbgf/sha1cd v0.3.0 h1:4D5XXmUUBUl/xQ6IjCkEAbqXskkq/4O7LmGn0AqMDs4= github.com/pjbgf/sha1cd v0.3.0/go.mod h1:nZ1rrWOcGJ5uZgEEVL1VUM9iRQiZvWdbZjkKyFzPPsI= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -297,16 +271,10 @@ github.com/zclconf/go-cty v1.16.2 h1:LAJSwc3v81IRBZyUVQDUdZ7hs3SYs9jv0eZJDWHD/70 github.com/zclconf/go-cty v1.16.2/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE= github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940 h1:4r45xpDWB6ZMSMNJFMOjqrGHynW3DIBuR2H9j0ug+Mo= github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940/go.mod h1:CmBdvvj3nqzfzJ6nTCIwDTPZ56aVGvDrmztiO5g3qrM= -github.com/zeebo/assert v1.3.0 h1:g7C04CbJuIDKNPFHmsk4hwZDO5O+kntRxzaUoNXj+IQ= -github.com/zeebo/assert v1.3.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0= github.com/zeebo/errs v1.4.0 h1:XNdoD/RRMKP7HD0UhJnIzUy74ISdGGxURlYG8HSWSfM= github.com/zeebo/errs v1.4.0/go.mod h1:sgbWHsvVuTPHcqJJGQ1WhI5KbWlHYz+2+2C/LSEtCw4= -github.com/zeebo/xxh3 v1.0.2 h1:xZmwmqxHZA8AI603jOQ0tMqmBr9lPeFwGg6d+xy9DC0= -github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA= go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= -go.opentelemetry.io/contrib/detectors/gcp v1.35.0 h1:bGvFt68+KTiAKFlacHW6AhA56GF2rS0bdD3aJYEnmzA= -go.opentelemetry.io/contrib/detectors/gcp v1.35.0/go.mod h1:qGWP8/+ILwMRIUf9uIVLloR1uo5ZYAslM4O6OqUi1DA= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0 h1:q4XOmH/0opmeuJtPsbFNivyl7bCt7yRBbeEm2sC/XtQ= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0/go.mod h1:snMWehoOh2wsEwnvvwtDyFCxVeDAODenXHtn5vzrKjo= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 h1:F7Jx+6hwnZ41NSFTO5q4LYDtJRXBf2PD0rNBkeB/lus= @@ -382,7 +350,6 @@ golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= @@ -414,12 +381,8 @@ golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8T golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da h1:noIWHXmPHxILtqtCOPIhSt0ABwskkZKjD3bXGnZGpNY= -golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90= -gonum.org/v1/gonum v0.12.0 h1:xKuo6hzt+gMav00meVPUlXwSdoEJP46BR+wdxQEFK2o= -gonum.org/v1/gonum v0.12.0/go.mod h1:73TDxJfAAHeA8Mk9mf8NlIppyhQNo5GLTcYeqgo2lvY= -google.golang.org/api v0.238.0 h1:+EldkglWIg/pWjkq97sd+XxH7PxakNYoe/rkSTbnvOs= -google.golang.org/api v0.238.0/go.mod h1:cOVEm2TpdAGHL2z+UwyS+kmlGr3bVWQQ6sYEqkKje50= +google.golang.org/api v0.242.0 h1:7Lnb1nfnpvbkCiZek6IXKdJ0MFuAZNAJKQfA1ws62xg= +google.golang.org/api v0.242.0/go.mod h1:cOVEm2TpdAGHL2z+UwyS+kmlGr3bVWQQ6sYEqkKje50= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= From aa8309f6db2dd99dbe310a751392ea2cdb50e230 Mon Sep 17 00:00:00 2001 From: wdarling-g Date: Thu, 17 Jul 2025 15:27:48 -0700 Subject: [PATCH 563/884] Add support for name in GcpUserAccessBinding (#14520) --- .../GcpUserAccessBinding.yaml | 6 +- ...xt_manager_gcp_user_access_binding_test.go | 93 ++++++++++++++++++- 2 files changed, 96 insertions(+), 3 deletions(-) diff --git a/mmv1/products/accesscontextmanager/GcpUserAccessBinding.yaml b/mmv1/products/accesscontextmanager/GcpUserAccessBinding.yaml index 18138a9a3ca7..7fcf1bbf8718 100644 --- a/mmv1/products/accesscontextmanager/GcpUserAccessBinding.yaml +++ b/mmv1/products/accesscontextmanager/GcpUserAccessBinding.yaml @@ -131,12 +131,16 @@ properties: - name: restrictedClientApplication type: NestedObject description: | - Optional. The application that is subject to this binding's scope. + Optional. The application that is subject to this binding's scope. Only one of clientId or name should be specified. properties: - name: clientId type: String description: | The OAuth client ID of the application. + - name: name + type: String + description: | + The name of the application. Example: "Cloud Console" - name: 'activeSettings' type: NestedObject description: | diff --git a/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_gcp_user_access_binding_test.go b/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_gcp_user_access_binding_test.go index 3648712e92f9..1c7aee210b8f 100644 --- a/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_gcp_user_access_binding_test.go +++ b/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_gcp_user_access_binding_test.go @@ -41,6 +41,15 @@ func testAccAccessContextManagerGcpUserAccessBinding_basicTest(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"organization_id"}, }, + { + Config: testAccAccessContextManagerGcpUserAccessBinding_accessContextManagerGcpUserAccessBindingNamedExample(context), + }, + { + ResourceName: "google_access_context_manager_gcp_user_access_binding.gcp_user_access_binding", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"organization_id"}, + }, }, }) } @@ -92,7 +101,6 @@ resource "google_access_context_manager_gcp_user_access_binding" "gcp_user_acces google_access_context_manager_access_level.tf_test_access_level_id_for_user_access_binding%{random_suffix}.name, ] session_settings { - max_inactivity = "300s" session_length = "1800s" session_length_enabled = true session_reauth_method = "LOGIN" @@ -111,7 +119,88 @@ resource "google_access_context_manager_gcp_user_access_binding" "gcp_user_acces google_access_context_manager_access_level.tf_test_access_level_id_for_user_access_binding%{random_suffix}.name, ] session_settings { - max_inactivity = "300s" + session_length = "1800s" + session_length_enabled = true + session_reauth_method = "LOGIN" + use_oidc_max_age = false + } + } + dry_run_settings { + access_levels = [ + google_access_context_manager_access_level.tf_test_access_level_id_for_user_access_binding%{random_suffix}.name, + ] + } + } +} +`, context) +} + +func testAccAccessContextManagerGcpUserAccessBinding_accessContextManagerGcpUserAccessBindingNamedExample(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_cloud_identity_group" "group" { + display_name = "tf-test-my-identity-group%{random_suffix}" + + parent = "customers/%{cust_id}" + + group_key { + id = "tf-test-my-identity-group%{random_suffix}@%{org_domain}" + } + + labels = { + "cloudidentity.googleapis.com/groups.discussion_forum" = "" + } +} + +resource "google_access_context_manager_access_level" "tf_test_access_level_id_for_user_access_binding%{random_suffix}" { + parent = "accessPolicies/${google_access_context_manager_access_policy.access-policy.name}" + name = "accessPolicies/${google_access_context_manager_access_policy.access-policy.name}/accessLevels/tf_test_chromeos_no_lock%{random_suffix}" + title = "tf_test_chromeos_no_lock%{random_suffix}" + basic { + conditions { + device_policy { + require_screen_lock = true + os_constraints { + os_type = "DESKTOP_CHROME_OS" + } + } + regions = [ + "US", + ] + } + } +} + +resource "google_access_context_manager_access_policy" "access-policy" { + parent = "organizations/%{org_id}" + title = "my policy" +} + +resource "google_access_context_manager_gcp_user_access_binding" "gcp_user_access_binding" { + organization_id = "%{org_id}" + group_key = trimprefix(google_cloud_identity_group.group.id, "groups/") + access_levels = [ + google_access_context_manager_access_level.tf_test_access_level_id_for_user_access_binding%{random_suffix}.name, + ] + session_settings { + session_length = "1800s" + session_length_enabled = true + session_reauth_method = "LOGIN" + use_oidc_max_age = false + } + scoped_access_settings { + scope { + client_scope { + restricted_client_application { + name = "Cloud Console" + } + } + } + active_settings { + access_levels = [ + google_access_context_manager_access_level.tf_test_access_level_id_for_user_access_binding%{random_suffix}.name, + ] + session_settings { + max_inactivity = "400s" session_length = "1800s" session_length_enabled = true session_reauth_method = "LOGIN" From 2877ca94fbbd2d15879d8879e2ffdbd224c120ca Mon Sep 17 00:00:00 2001 From: chasevedder Date: Thu, 17 Jul 2025 15:43:28 -0700 Subject: [PATCH 564/884] =?UTF-8?q?Update=20service=20account=20creation?= =?UTF-8?q?=20to=20be=20more=20resilient=20to=20eventual=20cons=E2=80=A6?= =?UTF-8?q?=20(#14547)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../resource_google_service_account.go | 65 +++++++++++-------- 1 file changed, 38 insertions(+), 27 deletions(-) diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_service_account.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_service_account.go index 901b1d0ca975..fd50b35de26e 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_service_account.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_service_account.go @@ -122,54 +122,61 @@ func resourceGoogleServiceAccountCreate(d *schema.ResourceData, meta interface{} ServiceAccount: sa, } - sa, err = config.NewIamClient(userAgent).Projects.ServiceAccounts.Create("projects/"+project, r).Do() + iamClient := config.NewIamClient(userAgent) + sa, err = iamClient.Projects.ServiceAccounts.Create("projects/"+project, r).Do() if err != nil { gerr, ok := err.(*googleapi.Error) alreadyExists := ok && gerr.Code == 409 && d.Get("create_ignore_already_exists").(bool) if alreadyExists { - sa = &iam.ServiceAccount{ - Name: fmt.Sprintf("projects/%s/serviceAccounts/%s@%s.iam.gserviceaccount.com", project, aid, project), - } + fullServiceAccountName := fmt.Sprintf("projects/%s/serviceAccounts/%s@%s.iam.gserviceaccount.com", project, aid, project) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (operr error) { + sa, saerr := iamClient.Projects.ServiceAccounts.Get(fullServiceAccountName).Do() + + if saerr != nil { + return saerr + } + + d.SetId(sa.Name) + return populateResourceData(d, sa) + }, + Timeout: d.Timeout(schema.TimeoutCreate), + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{ + transport_tpg.IsNotFoundRetryableError("service account creation"), + }, + }) + + return nil } else { return fmt.Errorf("Error creating service account: %s", err) } } d.SetId(sa.Name) - - err = transport_tpg.Retry(transport_tpg.RetryOptions{ - RetryFunc: func() (operr error) { - _, saerr := config.NewIamClient(userAgent).Projects.ServiceAccounts.Get(d.Id()).Do() - return saerr - }, - Timeout: d.Timeout(schema.TimeoutCreate), - ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{ - transport_tpg.IsNotFoundRetryableError("service account creation"), - transport_tpg.IsForbiddenIamServiceAccountRetryableError("service account creation"), - }, - }) - - if err != nil { - return fmt.Errorf("Error reading service account after creation: %s", err) - } + populateResourceData(d, sa) // We poll until the resource is found due to eventual consistency issue - // on part of the api https://cloud.google.com/iam/docs/overview#consistency + // on part of the api https://cloud.google.com/iam/docs/overview#consistency. + // Wait for at least 3 successful responses in a row to ensure result is consistent. // IAM API returns 403 when the queried SA is not found, so we must ignore both 404 & 403 errors - err = transport_tpg.PollingWaitTime(resourceServiceAccountPollRead(d, meta), transport_tpg.PollCheckForExistenceWith403, "Creating Service Account", d.Timeout(schema.TimeoutCreate), 1) - - if err != nil { - return err - } + transport_tpg.PollingWaitTime( + resourceServiceAccountPollRead(d, meta), + transport_tpg.PollCheckForExistence, + "Creating Service Account", + d.Timeout(schema.TimeoutCreate), + 3, // Number of consecutive occurences. + ) // We can't guarantee complete consistency even after polling, // so sleep for some additional time to reduce the likelihood of // eventual consistency failures. time.Sleep(10 * time.Second) - return resourceGoogleServiceAccountRead(d, meta) + return nil } +// PollReadFunc for checking Service Account existence. +// If resourceData is not nil, it will be updated with the response. func resourceServiceAccountPollRead(d *schema.ResourceData, meta interface{}) transport_tpg.PollReadFunc { return func() (map[string]interface{}, error) { config := meta.(*transport_tpg.Config) @@ -201,6 +208,10 @@ func resourceGoogleServiceAccountRead(d *schema.ResourceData, meta interface{}) return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("Service Account %q", d.Id())) } + return populateResourceData(d, sa) +} + +func populateResourceData(d *schema.ResourceData, sa *iam.ServiceAccount) error { if err := d.Set("email", sa.Email); err != nil { return fmt.Errorf("Error setting email: %s", err) } From 745629c7a3dca30663f654865a5cc2b0d9f0639b Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Fri, 18 Jul 2025 10:14:58 -0700 Subject: [PATCH 565/884] Fix teamcity runs for modelarmor (#14566) --- .../terraform/.teamcity/components/inputs/services_beta.kt | 4 ++-- .../terraform/.teamcity/components/inputs/services_ga.kt | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt b/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt index 27669d77eca6..e16d98f34deb 100644 --- a/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt +++ b/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt @@ -562,12 +562,12 @@ var ServicesListBeta = mapOf( "path" to "./google-beta/services/mlengine" ), "modelarmor" to mapOf( - "name" to "ModelArmor", + "name" to "modelarmor", "displayName" to "ModelArmor", "path" to "./google-beta/services/modelarmor" ), "modelarmorglobal" to mapOf( - "name" to "ModelArmorGlobal", + "name" to "modelarmorglobal", "displayName" to "ModelArmorGlobal", "path" to "./google-beta/services/modelarmorglobal" ), diff --git a/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt b/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt index 774f10b326fb..2c32d877bc36 100644 --- a/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt +++ b/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt @@ -557,12 +557,12 @@ var ServicesListGa = mapOf( "path" to "./google/services/mlengine" ), "modelarmor" to mapOf( - "name" to "ModelArmor", + "name" to "modelarmor", "displayName" to "ModelArmor", "path" to "./google/services/modelarmor" ), "modelarmorglobal" to mapOf( - "name" to "ModelArmorGlobal", + "name" to "modelarmorglobal", "displayName" to "ModelArmorGlobal", "path" to "./google/services/modelarmorglobal" ), From 09aa5a13159bb6da6caf75d341185c6feb59a074 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Fri, 18 Jul 2025 10:41:10 -0700 Subject: [PATCH 566/884] Tweaked reassign-reviewer regex (#14565) --- .github/workflows/reassign-reviewer.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/reassign-reviewer.yml b/.github/workflows/reassign-reviewer.yml index c7d2a2384468..835c5f8e7802 100644 --- a/.github/workflows/reassign-reviewer.yml +++ b/.github/workflows/reassign-reviewer.yml @@ -22,7 +22,7 @@ jobs: uses: actions-ecosystem/action-regex-match@d50fd2e7a37d0e617aea3d7ada663bd56862b9cc # v2.0.2 with: text: ${{ github.event.comment.body }} - regex: '.*@modular-magician reassign[- ]+review[^@\n\r]*@?([a-zA-Z0-9-_]*).*' + regex: '.*@modular-magician (re)?assign[- ]review(er)? @?([a-zA-Z0-9-_]*).*' - name: Checkout Repository if: steps.read-comment.outputs.match != '' uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.2 From 6b7ae2d49d477a8319d28659fb9ff46c8a2433e3 Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Fri, 18 Jul 2025 21:59:52 +0200 Subject: [PATCH 567/884] sql: fix nil pointer while importing `google_sql_database` and remove invalid import_format (#14428) --- mmv1/products/sql/Database.yaml | 1 - .../pre_read/sql_database_activation_policy.tmpl | 12 ++++++------ 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/mmv1/products/sql/Database.yaml b/mmv1/products/sql/Database.yaml index e523d433662f..7a74376617a1 100644 --- a/mmv1/products/sql/Database.yaml +++ b/mmv1/products/sql/Database.yaml @@ -26,7 +26,6 @@ import_format: - '{{project}}/{{instance}}/{{name}}' - 'instances/{{instance}}/databases/{{name}}' - '{{instance}}/{{name}}' - - '{{name}}' timeouts: insert_minutes: 20 update_minutes: 20 diff --git a/mmv1/templates/terraform/pre_read/sql_database_activation_policy.tmpl b/mmv1/templates/terraform/pre_read/sql_database_activation_policy.tmpl index 582504e94e11..72e5edbe2e77 100644 --- a/mmv1/templates/terraform/pre_read/sql_database_activation_policy.tmpl +++ b/mmv1/templates/terraform/pre_read/sql_database_activation_policy.tmpl @@ -1,8 +1,8 @@ instance := d.Get("instance").(string) databaseInstance, err := config.NewSqlAdminClient(userAgent).Instances.Get(project, instance).Do() - if err != nil { - return err - } - if databaseInstance.Settings.ActivationPolicy != "ALWAYS" { - return nil - } \ No newline at end of file +if err != nil { + return err +} +if databaseInstance.Settings != nil && databaseInstance.Settings.ActivationPolicy != "ALWAYS" { + return nil +} \ No newline at end of file From 69c959c090620cf764b22461b6b5b62717f11da1 Mon Sep 17 00:00:00 2001 From: animeshnandanwar Date: Fri, 18 Jul 2025 13:22:39 -0700 Subject: [PATCH 568/884] Update dataproc Batch and dataproc SessionTemplate resource to support authentication config. (#14534) Co-authored-by: Shuya Ma <87669292+shuyama1@users.noreply.github.com> --- mmv1/products/dataproc/Batch.yaml | 12 ++++++++++++ mmv1/products/dataproc/SessionTemplate.yaml | 12 ++++++++++++ .../examples/dataproc_batch_spark_full.tf.tmpl | 5 ++++- .../dataproc_session_templates_jupyter.tf.tmpl | 3 +++ .../dataproc_session_templates_jupyter_full.tf.tmpl | 3 +++ 5 files changed, 34 insertions(+), 1 deletion(-) diff --git a/mmv1/products/dataproc/Batch.yaml b/mmv1/products/dataproc/Batch.yaml index 4865221c64a8..c2402f6f7b1d 100644 --- a/mmv1/products/dataproc/Batch.yaml +++ b/mmv1/products/dataproc/Batch.yaml @@ -377,6 +377,18 @@ properties: Subnetwork configuration for workload execution. conflicts: - environment_config.0.execution_config.0.network_uri + - name: 'authenticationConfig' + type: NestedObject + description: | + Authentication configuration for a workload is used to set the default identity for the workload execution. + properties: + - name: userWorkloadAuthenticationType + type: Enum + description: | + Authentication type for the user workload running in containers. + enum_values: + - SERVICE_ACCOUNT + - END_USER_CREDENTIALS - name: 'peripheralsConfig' type: NestedObject description: | diff --git a/mmv1/products/dataproc/SessionTemplate.yaml b/mmv1/products/dataproc/SessionTemplate.yaml index 8d1688319663..6bb9457b85c4 100644 --- a/mmv1/products/dataproc/SessionTemplate.yaml +++ b/mmv1/products/dataproc/SessionTemplate.yaml @@ -189,6 +189,18 @@ properties: type: String description: | Subnetwork configuration for workload execution. + - name: 'authenticationConfig' + type: NestedObject + description: | + Authentication configuration for a workload is used to set the default identity for the workload execution. + properties: + - name: userWorkloadAuthenticationType + type: Enum + description: | + Authentication type for the user workload running in containers. + enum_values: + - SERVICE_ACCOUNT + - END_USER_CREDENTIALS - name: 'peripheralsConfig' type: NestedObject description: | diff --git a/mmv1/templates/terraform/examples/dataproc_batch_spark_full.tf.tmpl b/mmv1/templates/terraform/examples/dataproc_batch_spark_full.tf.tmpl index bd07b77f1357..e6a24e32411e 100644 --- a/mmv1/templates/terraform/examples/dataproc_batch_spark_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/dataproc_batch_spark_full.tf.tmpl @@ -22,6 +22,9 @@ resource "google_dataproc_batch" "{{$.PrimaryResourceId}}" { network_uri = "default" service_account = "${data.google_project.project.number}-compute@developer.gserviceaccount.com" staging_bucket = google_storage_bucket.bucket.name + authentication_config { + user_workload_authentication_type = "SERVICE_ACCOUNT" + } } peripherals_config { metastore_service = google_dataproc_metastore_service.ms.name @@ -100,4 +103,4 @@ resource "google_dataproc_cluster" "basic" { hive_metastore_config { version = "3.1.2" } -} \ No newline at end of file +} diff --git a/mmv1/templates/terraform/examples/dataproc_session_templates_jupyter.tf.tmpl b/mmv1/templates/terraform/examples/dataproc_session_templates_jupyter.tf.tmpl index bd42b7948feb..77d78c01fb0e 100644 --- a/mmv1/templates/terraform/examples/dataproc_session_templates_jupyter.tf.tmpl +++ b/mmv1/templates/terraform/examples/dataproc_session_templates_jupyter.tf.tmpl @@ -12,6 +12,9 @@ resource "google_dataproc_session_template" "{{$.PrimaryResourceId}}" { subnetwork_uri = "{{index $.Vars "subnetwork_name"}}" ttl = "3600s" network_tags = ["tag1"] + authentication_config { + user_workload_authentication_type = "END_USER_CREDENTIALS" + } } } diff --git a/mmv1/templates/terraform/examples/dataproc_session_templates_jupyter_full.tf.tmpl b/mmv1/templates/terraform/examples/dataproc_session_templates_jupyter_full.tf.tmpl index 81d7c2f4b01c..1c69c0e3fc70 100644 --- a/mmv1/templates/terraform/examples/dataproc_session_templates_jupyter_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/dataproc_session_templates_jupyter_full.tf.tmpl @@ -23,6 +23,9 @@ resource "google_dataproc_session_template" "{{$.PrimaryResourceId}}" { subnetwork_uri = "{{index $.Vars "subnetwork_name"}}" service_account = "${data.google_project.project.number}-compute@developer.gserviceaccount.com" staging_bucket = google_storage_bucket.bucket.name + authentication_config { + user_workload_authentication_type = "SERVICE_ACCOUNT" + } } peripherals_config { metastore_service = google_dataproc_metastore_service.ms.name From 139470dd488ef0586d95f018d8e6fa6a14382cc8 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Fri, 18 Jul 2025 13:55:15 -0700 Subject: [PATCH 569/884] Fixed dos line endings (#14568) --- .../network_connectivity_hub_basic.tf.tmpl | 14 +- ...k_connectivity_hub_with_export_psc.tf.tmpl | 10 +- ...n_network_endpoint_group_appengine.tf.tmpl | 148 +++++++++--------- ...ork_endpoint_group_appengine_empty.tf.tmpl | 14 +- ...on_network_endpoint_group_cloudrun.tf.tmpl | 52 +++--- ...n_network_endpoint_group_functions.tf.tmpl | 64 ++++---- 6 files changed, 151 insertions(+), 151 deletions(-) diff --git a/mmv1/templates/terraform/examples/network_connectivity_hub_basic.tf.tmpl b/mmv1/templates/terraform/examples/network_connectivity_hub_basic.tf.tmpl index 1bfba77d608a..604899da16e5 100644 --- a/mmv1/templates/terraform/examples/network_connectivity_hub_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/network_connectivity_hub_basic.tf.tmpl @@ -1,7 +1,7 @@ -resource "google_network_connectivity_hub" "{{$.PrimaryResourceId}}" { - name = "{{index $.Vars "resource_name"}}" - description = "A sample hub" - labels = { - label-one = "value-one" - } -} +resource "google_network_connectivity_hub" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "resource_name"}}" + description = "A sample hub" + labels = { + label-one = "value-one" + } +} diff --git a/mmv1/templates/terraform/examples/network_connectivity_hub_with_export_psc.tf.tmpl b/mmv1/templates/terraform/examples/network_connectivity_hub_with_export_psc.tf.tmpl index a07d2d0078bf..a3b587a9361c 100644 --- a/mmv1/templates/terraform/examples/network_connectivity_hub_with_export_psc.tf.tmpl +++ b/mmv1/templates/terraform/examples/network_connectivity_hub_with_export_psc.tf.tmpl @@ -1,5 +1,5 @@ -resource "google_network_connectivity_hub" "{{$.PrimaryResourceId}}" { - name = "{{index $.Vars "resource_name"}}" - description = "A sample hub with Private Service Connect transitivity is enabled" - export_psc = true -} +resource "google_network_connectivity_hub" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "resource_name"}}" + description = "A sample hub with Private Service Connect transitivity is enabled" + export_psc = true +} diff --git a/mmv1/templates/terraform/examples/region_network_endpoint_group_appengine.tf.tmpl b/mmv1/templates/terraform/examples/region_network_endpoint_group_appengine.tf.tmpl index 6d8f1ee21a56..322de50ac4a0 100644 --- a/mmv1/templates/terraform/examples/region_network_endpoint_group_appengine.tf.tmpl +++ b/mmv1/templates/terraform/examples/region_network_endpoint_group_appengine.tf.tmpl @@ -1,75 +1,75 @@ -// App Engine Example -resource "google_compute_region_network_endpoint_group" "{{$.PrimaryResourceId}}" { - name = "{{index $.Vars "neg_name"}}" - network_endpoint_type = "SERVERLESS" - region = "us-central1" - app_engine { - service = google_app_engine_flexible_app_version.{{$.PrimaryResourceId}}.service - version = google_app_engine_flexible_app_version.{{$.PrimaryResourceId}}.version_id - } -} - -resource "google_app_engine_flexible_app_version" "{{$.PrimaryResourceId}}" { - version_id = "v1" - service = "{{index $.Vars "neg_name"}}" - runtime = "nodejs" - flexible_runtime_settings { - operating_system = "ubuntu22" - runtime_version = "20" - } - - entrypoint { - shell = "node ./app.js" - } - - deployment { - zip { - source_url = "https://storage.googleapis.com/${google_storage_bucket.{{$.PrimaryResourceId}}.name}/${google_storage_bucket_object.{{$.PrimaryResourceId}}.name}" - } - } - - liveness_check { - path = "/" - } - - readiness_check { - path = "/" - } - - env_variables = { - port = "8080" - } - - handlers { - url_regex = ".*\\/my-path\\/*" - security_level = "SECURE_ALWAYS" - login = "LOGIN_REQUIRED" - auth_fail_action = "AUTH_FAIL_ACTION_REDIRECT" - - static_files { - path = "my-other-path" - upload_path_regex = ".*\\/my-path\\/*" - } - } - - automatic_scaling { - cool_down_period = "120s" - cpu_utilization { - target_utilization = 0.5 - } - } - - delete_service_on_destroy = true -} - -resource "google_storage_bucket" "{{$.PrimaryResourceId}}" { - name = "{{index $.Vars "neg_name"}}" - location = "US" - uniform_bucket_level_access = true -} - -resource "google_storage_bucket_object" "{{$.PrimaryResourceId}}" { - name = "hello-world.zip" - bucket = google_storage_bucket.{{$.PrimaryResourceId}}.name - source = "./test-fixtures/hello-world.zip" +// App Engine Example +resource "google_compute_region_network_endpoint_group" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "neg_name"}}" + network_endpoint_type = "SERVERLESS" + region = "us-central1" + app_engine { + service = google_app_engine_flexible_app_version.{{$.PrimaryResourceId}}.service + version = google_app_engine_flexible_app_version.{{$.PrimaryResourceId}}.version_id + } +} + +resource "google_app_engine_flexible_app_version" "{{$.PrimaryResourceId}}" { + version_id = "v1" + service = "{{index $.Vars "neg_name"}}" + runtime = "nodejs" + flexible_runtime_settings { + operating_system = "ubuntu22" + runtime_version = "20" + } + + entrypoint { + shell = "node ./app.js" + } + + deployment { + zip { + source_url = "https://storage.googleapis.com/${google_storage_bucket.{{$.PrimaryResourceId}}.name}/${google_storage_bucket_object.{{$.PrimaryResourceId}}.name}" + } + } + + liveness_check { + path = "/" + } + + readiness_check { + path = "/" + } + + env_variables = { + port = "8080" + } + + handlers { + url_regex = ".*\\/my-path\\/*" + security_level = "SECURE_ALWAYS" + login = "LOGIN_REQUIRED" + auth_fail_action = "AUTH_FAIL_ACTION_REDIRECT" + + static_files { + path = "my-other-path" + upload_path_regex = ".*\\/my-path\\/*" + } + } + + automatic_scaling { + cool_down_period = "120s" + cpu_utilization { + target_utilization = 0.5 + } + } + + delete_service_on_destroy = true +} + +resource "google_storage_bucket" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "neg_name"}}" + location = "US" + uniform_bucket_level_access = true +} + +resource "google_storage_bucket_object" "{{$.PrimaryResourceId}}" { + name = "hello-world.zip" + bucket = google_storage_bucket.{{$.PrimaryResourceId}}.name + source = "./test-fixtures/hello-world.zip" } \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/region_network_endpoint_group_appengine_empty.tf.tmpl b/mmv1/templates/terraform/examples/region_network_endpoint_group_appengine_empty.tf.tmpl index 0a221538ac12..ef273a0fa91f 100644 --- a/mmv1/templates/terraform/examples/region_network_endpoint_group_appengine_empty.tf.tmpl +++ b/mmv1/templates/terraform/examples/region_network_endpoint_group_appengine_empty.tf.tmpl @@ -1,8 +1,8 @@ -// App Engine Example -resource "google_compute_region_network_endpoint_group" "{{$.PrimaryResourceId}}" { - name = "{{index $.Vars "neg_name"}}" - network_endpoint_type = "SERVERLESS" - region = "us-central1" - app_engine { - } +// App Engine Example +resource "google_compute_region_network_endpoint_group" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "neg_name"}}" + network_endpoint_type = "SERVERLESS" + region = "us-central1" + app_engine { + } } \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/region_network_endpoint_group_cloudrun.tf.tmpl b/mmv1/templates/terraform/examples/region_network_endpoint_group_cloudrun.tf.tmpl index e6c5c2b58715..a35755c73fb8 100644 --- a/mmv1/templates/terraform/examples/region_network_endpoint_group_cloudrun.tf.tmpl +++ b/mmv1/templates/terraform/examples/region_network_endpoint_group_cloudrun.tf.tmpl @@ -1,27 +1,27 @@ -// Cloud Run Example -resource "google_compute_region_network_endpoint_group" "{{$.PrimaryResourceId}}" { - name = "{{index $.Vars "neg_name"}}" - network_endpoint_type = "SERVERLESS" - region = "us-central1" - cloud_run { - service = google_cloud_run_service.{{$.PrimaryResourceId}}.name - } -} - -resource "google_cloud_run_service" "{{$.PrimaryResourceId}}" { - name = "{{index $.Vars "neg_name"}}" - location = "us-central1" - - template { - spec { - containers { - image = "us-docker.pkg.dev/cloudrun/container/hello" - } - } - } - - traffic { - percent = 100 - latest_revision = true - } +// Cloud Run Example +resource "google_compute_region_network_endpoint_group" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "neg_name"}}" + network_endpoint_type = "SERVERLESS" + region = "us-central1" + cloud_run { + service = google_cloud_run_service.{{$.PrimaryResourceId}}.name + } +} + +resource "google_cloud_run_service" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "neg_name"}}" + location = "us-central1" + + template { + spec { + containers { + image = "us-docker.pkg.dev/cloudrun/container/hello" + } + } + } + + traffic { + percent = 100 + latest_revision = true + } } \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/region_network_endpoint_group_functions.tf.tmpl b/mmv1/templates/terraform/examples/region_network_endpoint_group_functions.tf.tmpl index f4e2932c4fb9..475cdd344829 100644 --- a/mmv1/templates/terraform/examples/region_network_endpoint_group_functions.tf.tmpl +++ b/mmv1/templates/terraform/examples/region_network_endpoint_group_functions.tf.tmpl @@ -1,33 +1,33 @@ -// Cloud Functions Example -resource "google_compute_region_network_endpoint_group" "{{$.PrimaryResourceId}}" { - name = "{{index $.Vars "neg_name"}}" - network_endpoint_type = "SERVERLESS" - region = "us-central1" - cloud_function { - function = google_cloudfunctions_function.{{$.PrimaryResourceId}}.name - } -} - -resource "google_cloudfunctions_function" "{{$.PrimaryResourceId}}" { - name = "{{index $.Vars "neg_name"}}" - description = "My function" - runtime = "nodejs20" - - available_memory_mb = 128 - source_archive_bucket = google_storage_bucket.bucket.name - source_archive_object = google_storage_bucket_object.archive.name - trigger_http = true - timeout = 60 - entry_point = "helloGET" -} - -resource "google_storage_bucket" "bucket" { - name = "{{index $.Vars "bucket_name"}}" - location = "US" -} - -resource "google_storage_bucket_object" "archive" { - name = "index.zip" - bucket = google_storage_bucket.bucket.name - source = "{{index $.Vars "zip_path"}}" +// Cloud Functions Example +resource "google_compute_region_network_endpoint_group" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "neg_name"}}" + network_endpoint_type = "SERVERLESS" + region = "us-central1" + cloud_function { + function = google_cloudfunctions_function.{{$.PrimaryResourceId}}.name + } +} + +resource "google_cloudfunctions_function" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "neg_name"}}" + description = "My function" + runtime = "nodejs20" + + available_memory_mb = 128 + source_archive_bucket = google_storage_bucket.bucket.name + source_archive_object = google_storage_bucket_object.archive.name + trigger_http = true + timeout = 60 + entry_point = "helloGET" +} + +resource "google_storage_bucket" "bucket" { + name = "{{index $.Vars "bucket_name"}}" + location = "US" +} + +resource "google_storage_bucket_object" "archive" { + name = "index.zip" + bucket = google_storage_bucket.bucket.name + source = "{{index $.Vars "zip_path"}}" } \ No newline at end of file From 0f0666039376411d4c993798dc4162dd864a3bd3 Mon Sep 17 00:00:00 2001 From: Yanwei Guo Date: Fri, 18 Jul 2025 13:57:04 -0700 Subject: [PATCH 570/884] Add support for GPU redundancy to Cloud Run v2 job (#14519) --- mmv1/products/cloudrunv2/Job.yaml | 5 +++++ mmv1/templates/terraform/examples/cloudrunv2_job_gpu.tf.tmpl | 1 + .../cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl | 1 + .../third_party/tgc/tests/data/example_cloud_run_v2_job.json | 3 ++- 4 files changed, 9 insertions(+), 1 deletion(-) diff --git a/mmv1/products/cloudrunv2/Job.yaml b/mmv1/products/cloudrunv2/Job.yaml index 57ae1e01c290..a299faa44161 100644 --- a/mmv1/products/cloudrunv2/Job.yaml +++ b/mmv1/products/cloudrunv2/Job.yaml @@ -781,6 +781,11 @@ properties: description: The GPU to attach to an instance. See https://cloud.google.com/run/docs/configuring/jobs/gpu for configuring GPU. required: true + - name: 'gpuZonalRedundancyDisabled' + type: Boolean + description: True if GPU zonal redundancy is disabled on this execution. + default_from_api: true + send_empty_value: true - name: 'observedGeneration' type: String description: | diff --git a/mmv1/templates/terraform/examples/cloudrunv2_job_gpu.tf.tmpl b/mmv1/templates/terraform/examples/cloudrunv2_job_gpu.tf.tmpl index 5581f4bece44..a5ac9f3cac9d 100644 --- a/mmv1/templates/terraform/examples/cloudrunv2_job_gpu.tf.tmpl +++ b/mmv1/templates/terraform/examples/cloudrunv2_job_gpu.tf.tmpl @@ -11,6 +11,7 @@ resource "google_cloud_run_v2_job" "{{$.PrimaryResourceId}}" { node_selector { accelerator = "nvidia-l4" } + gpu_zonal_redundancy_disabled = true } } } diff --git a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl index 083f4f82c6bf..d58b6d7fede3 100644 --- a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl @@ -1059,6 +1059,7 @@ func testAccCloudRunV2Job_cloudrunv2JobWithGpu(context map[string]interface{}) s node_selector { accelerator = "nvidia-l4" } + gpu_zonal_redundancy_disabled = true } } lifecycle { diff --git a/mmv1/third_party/tgc/tests/data/example_cloud_run_v2_job.json b/mmv1/third_party/tgc/tests/data/example_cloud_run_v2_job.json index e49031623723..a0dc9ac77308 100644 --- a/mmv1/third_party/tgc/tests/data/example_cloud_run_v2_job.json +++ b/mmv1/third_party/tgc/tests/data/example_cloud_run_v2_job.json @@ -18,7 +18,8 @@ "image":"us-docker.pkg.dev/cloudrun/container/hello" } ], - "maxRetries":3 + "maxRetries":3, + "gpuZonalRedundancyDisabled":false } } } From d33e4e1f5f08cac6f0ac3dece5bbf9969a7b5ec0 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Fri, 18 Jul 2025 14:16:27 -0700 Subject: [PATCH 571/884] Concat licenses at the top level (#14571) --- LICENSE | 377 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 377 insertions(+) diff --git a/LICENSE b/LICENSE index ef51da2b0e8d..dcb8de7551d3 100644 --- a/LICENSE +++ b/LICENSE @@ -1,4 +1,381 @@ +Files: tools/go-changelog/*, mmv1/third_party/terraform/* +Mozilla Public License Version 2.0 +================================== + +1. Definitions +-------------- +1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + +1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + +1.6. "Executable Form" + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + +1.8. "License" + means this document. + +1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + +1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + +1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + +1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + +1.13. "Source Code Form" + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants and Conditions +-------------------------------- + +2.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + +(b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution +become effective for each Contribution on the date the Contributor first +distributes such Contribution. + +2.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under +this License. No additional rights or licenses will be implied from the +distribution or licensing of Covered Software under this License. +Notwithstanding Section 2.1(b) above, no patent license is granted by a +Contributor: + +(a) for any code that a Contributor has removed from Covered Software; + or + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + +(c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + +This License does not grant any rights in the trademarks, service marks, +or logos of any Contributor (except as may be necessary to comply with +the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to +distribute the Covered Software under a subsequent version of this +License (see Section 10.2) or under the terms of a Secondary License (if +permitted under the terms of Section 3.3). + +2.5. Representation + +Each Contributor represents that the Contributor believes its +Contributions are its original creation(s) or it has sufficient rights +to grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + +This License is not intended to limit any rights You have under +applicable copyright doctrines of fair use, fair dealing, or other +equivalents. + +2.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted +in Section 2.1. + +3. Responsibilities +------------------- + +3.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any +Modifications that You create or to which You contribute, must be under +the terms of this License. You must inform recipients that the Source +Code Form of the Covered Software is governed by the terms of this +License, and how they can obtain a copy of this License. You may not +attempt to alter or restrict the recipients' rights in the Source Code +Form. + +3.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + +(a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + +(b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, +provided that You also comply with the requirements of this License for +the Covered Software. If the Larger Work is a combination of Covered +Software with a work governed by one or more Secondary Licenses, and the +Covered Software is not Incompatible With Secondary Licenses, this +License permits You to additionally distribute such Covered Software +under the terms of such Secondary License(s), so that the recipient of +the Larger Work may, at their option, further distribute the Covered +Software under the terms of either this License or such Secondary +License(s). + +3.4. Notices + +You may not remove or alter the substance of any license notices +(including copyright notices, patent notices, disclaimers of warranty, +or limitations of liability) contained within the Source Code Form of +the Covered Software, except that You may alter any license notices to +the extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, +indemnity or liability obligations to one or more recipients of Covered +Software. However, You may do so only on Your own behalf, and not on +behalf of any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity, or liability obligation is offered by +You alone, and You hereby agree to indemnify every Contributor for any +liability incurred by such Contributor as a result of warranty, support, +indemnity or liability terms You offer. You may include additional +disclaimers of warranty and limitations of liability specific to any +jurisdiction. + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Software due to +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) +describe the limitations and the code they affect. Such description must +be placed in a text file included with all distributions of the Covered +Software under this License. Except to the extent prohibited by statute +or regulation, such description must be sufficiently detailed for a +recipient of ordinary skill to be able to understand it. + +5. Termination +-------------- + +5.1. The rights granted under this License will terminate automatically +if You fail to comply with any of its terms. However, if You become +compliant, then the rights granted under this License from a particular +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an +ongoing basis, if such Contributor fails to notify You of the +non-compliance by some reasonable means prior to 60 days after You have +come back into compliance. Moreover, Your grants from a particular +Contributor are reinstated on an ongoing basis if such Contributor +notifies You of the non-compliance by some reasonable means, this is the +first time You have received notice of non-compliance with this License +from such Contributor, and You become compliant prior to 30 days after +Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent +infringement claim (excluding declaratory judgment actions, +counter-claims, and cross-claims) alleging that a Contributor Version +directly or indirectly infringes any patent, then the rights granted to +You by any and all Contributors for the Covered Software under Section +2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all +end user license agreements (excluding distributors and resellers) which +have been validly granted by You or Your distributors under this License +prior to termination shall survive termination. + +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ + +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the +courts of a jurisdiction where the defendant maintains its principal +place of business and such litigation shall be governed by laws of that +jurisdiction, without reference to its conflict-of-law provisions. +Nothing in this Section shall prevent a party's ability to bring +cross-claims or counter-claims. + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. Any law or regulation which provides +that the language of a contract shall be construed against the drafter +shall not be used to construe this License against a Contributor. + +10. Versions of the License +--------------------------- + +10.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section +10.3, no one other than the license steward has the right to modify or +publish new versions of this License. Each version will be given a +distinguishing version number. + +10.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version +of the License under which You originally received the Covered Software, +or under the terms of any subsequent version published by the license +steward. + +10.3. Modified Versions + +If you create software not governed by this License, and you want to +create a new license for such software, you may create and use a +modified version of this License if you rename the license and remove +any references to the name of the license steward (except to note that +such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses + +If You choose to distribute Source Code Form that is Incompatible With +Secondary Licenses under the terms of this version of the License, the +notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular +file, then You may include the notice in a location (such as a LICENSE +file in a relevant directory) where a recipient would be likely to look +for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. + +--- + +Files: *, excluding tools/go-changelog/* and mmv1/third_party/terraform/* Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ From bf6ad5178c1a525865c03075e8dd7fb03d29456a Mon Sep 17 00:00:00 2001 From: Ricardo Godoy <88061327+rickygodoy@users.noreply.github.com> Date: Fri, 18 Jul 2025 20:26:43 -0300 Subject: [PATCH 572/884] New resource: google_apigee_api_product (#14546) --- mmv1/products/apigee/ApiProduct.yaml | 423 ++++++++++++++ .../custom_import/apigee_api_product.go.tmpl | 42 ++ .../examples/apigee_api_product_basic.tf.tmpl | 45 ++ .../apigee_api_product_basic_test.tf.tmpl | 86 +++ ...apigee_api_product_with_attributes.tf.tmpl | 193 +++++++ ...e_api_product_with_attributes_test.tf.tmpl | 234 ++++++++ ..._api_product_with_legacy_operation.tf.tmpl | 68 +++ ...product_with_legacy_operation_test.tf.tmpl | 109 ++++ ...resource_apigee_api_product_update_test.go | 518 ++++++++++++++++++ 9 files changed, 1718 insertions(+) create mode 100644 mmv1/products/apigee/ApiProduct.yaml create mode 100644 mmv1/templates/terraform/custom_import/apigee_api_product.go.tmpl create mode 100644 mmv1/templates/terraform/examples/apigee_api_product_basic.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/apigee_api_product_basic_test.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/apigee_api_product_with_attributes.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/apigee_api_product_with_attributes_test.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/apigee_api_product_with_legacy_operation.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/apigee_api_product_with_legacy_operation_test.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/apigee/resource_apigee_api_product_update_test.go diff --git a/mmv1/products/apigee/ApiProduct.yaml b/mmv1/products/apigee/ApiProduct.yaml new file mode 100644 index 000000000000..9adbeef3ec14 --- /dev/null +++ b/mmv1/products/apigee/ApiProduct.yaml @@ -0,0 +1,423 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: "ApiProduct" +description: | + An `ApiProduct` in Apigee. +references: + guides: + "Creating an API product": "https://cloud.google.com/apigee/docs/api-platform/publish/what-api-product" + api: "https://cloud.google.com/apigee/docs/reference/apis/apigee/rest/v1/organizations.apiproducts#ApiProduct" +docs: +base_url: "{{org_id}}/apiproducts" +self_link: "{{org_id}}/apiproducts/{{name}}" +import_format: + - "{{org_id}}/apiproducts/{{name}}" + - "{{org_id}}/{{name}}" +custom_code: + custom_import: "templates/terraform/custom_import/apigee_api_product.go.tmpl" +examples: + - name: "apigee_api_product_basic" + vars: + instance_name: "my-instance" + product_name: "my-product" + exclude_test: true + - name: "apigee_api_product_basic_test" + primary_resource_id: "apigee_api_product" + test_env_vars: + org_id: "ORG_ID" + billing_account: "BILLING_ACCT" + exclude_docs: true + external_providers: ["time"] + - name: "apigee_api_product_with_legacy_operation" + vars: + instance_name: "my-instance" + product_name: "my-product" + exclude_test: true + - name: "apigee_api_product_with_legacy_operation_test" + primary_resource_id: "apigee_api_product" + test_env_vars: + org_id: "ORG_ID" + billing_account: "BILLING_ACCT" + exclude_docs: true + external_providers: ["time"] + - name: "apigee_api_product_with_attributes" + vars: + instance_name: "my-instance" + product_name: "my-product" + exclude_test: true + - name: "apigee_api_product_with_attributes_test" + primary_resource_id: "apigee_api_product" + test_env_vars: + org_id: "ORG_ID" + billing_account: "BILLING_ACCT" + exclude_docs: true + external_providers: ["time"] +parameters: + - name: "orgId" + type: String + description: | + The Apigee Organization associated with the Apigee API product, + in the format `organizations/{{org_name}}`. + url_param_only: true + required: true + immutable: true + +properties: + - name: "name" + type: String + description: | + Internal name of the API product. + required: true + immutable: true + validation: + regex: '^[a-z][a-z0-9._\-$ %]*$' + + - name: "displayName" + type: String + description: | + Name displayed in the UI or developer portal to developers registering for API access. + required: true + + - name: "description" + type: String + description: | + Description of the API product. Include key information about the API product that is not captured by other fields. + + - name: "approvalType" + type: Enum + description: | + Flag that specifies how API keys are approved to access the APIs defined by the API product. + Valid values are `auto` or `manual`. + enum_values: + - "auto" + - "manual" + + - name: "attributes" + type: Array + is_set: true + description: | + Array of attributes that may be used to extend the default API product profile with customer-specific metadata. You can specify a maximum of 18 attributes. + Use this property to specify the access level of the API product as either public, private, or internal. + item_type: + type: NestedObject + properties: + - name: "name" + type: String + description: | + Key of the attribute. + - name: "value" + type: String + description: | + Value of the attribute. + + - name: "apiResources" + type: Array + is_set: true + description: | + Comma-separated list of API resources to be bundled in the API product. By default, the resource paths are mapped from the proxy.pathsuffix variable. + The proxy path suffix is defined as the URI fragment following the ProxyEndpoint base path. For example, if the apiResources element is defined to be /forecastrss and the base path defined for the API proxy is /weather, then only requests to /weather/forecastrss are permitted by the API product. + item_type: + type: String + + - name: "environments" + type: Array + is_set: true + description: | + Comma-separated list of environment names to which the API product is bound. Requests to environments that are not listed are rejected. + By specifying one or more environments, you can bind the resources listed in the API product to a specific environment, preventing developers from accessing those resources through API proxies deployed in another environment. + item_type: + type: String + - name: "proxies" + type: Array + is_set: true + description: | + Comma-separated list of API proxy names to which this API product is bound. By specifying API proxies, you can associate resources in the API product with specific API proxies, preventing developers from accessing those resources through other API proxies. + Apigee rejects requests to API proxies that are not listed. + item_type: + type: String + + - name: "scopes" + type: Array + description: | + Comma-separated list of OAuth scopes that are validated at runtime. Apigee validates that the scopes in any access token presented match the scopes defined in the OAuth policy associated with the API product. + item_type: + type: String + + - name: "quota" + type: String + description: | + Number of request messages permitted per app by this API product for the specified quotaInterval and quotaTimeUnit. + For example, a quota of 50, for a quotaInterval of 12 and a quotaTimeUnit of hours means 50 requests are allowed every 12 hours. + + - name: "quotaInterval" + type: String + description: | + Time interval over which the number of request messages is calculated. + + - name: "quotaTimeUnit" + type: String + description: | + Time unit defined for the quotaInterval. Valid values include second, minute, hour, day, month or year. + + - name: "createdAt" + type: String + description: | + Response only. Creation time of this environment as milliseconds since epoch. + output: true + + - name: "lastModifiedAt" + type: String + description: | + Response only. Modified time of this environment as milliseconds since epoch. + output: true + + - name: "operationGroup" + type: NestedObject + description: | + Configuration used to group Apigee proxies or remote services with resources, method types, and quotas. The resource refers to the resource URI (excluding the base path). With this grouping, the API product creator is able to fine-tune and give precise control over which REST methods have access to specific resources and how many calls can be made (using the quota setting). + Note: The apiResources setting cannot be specified for both the API product and operation group; otherwise the call will fail. + properties: + - name: "operationConfigs" + type: Array + is_set: true + description: | + Required. List of operation configurations for either Apigee API proxies or other remote services that are associated with this API product. + item_type: + type: NestedObject + properties: + - name: "apiSource" + type: String + description: | + Required. Name of the API proxy or remote service with which the resources, methods, and quota are associated. + - name: "operations" + type: Array + description: | + List of resource/method pairs for the API proxy or remote service to which quota will applied. + Note: Currently, you can specify only a single resource/method pair. The call will fail if more than one resource/method pair is provided. + item_type: + type: NestedObject + properties: + - name: "resource" + type: String + description: | + Required. REST resource path associated with the API proxy or remote service. + - name: "methods" + type: Array + is_set: true + description: | + Methods refers to the REST verbs, when none specified, all verb types are allowed. + item_type: + type: String + - name: "quota" + type: NestedObject + description: | + Quota parameters to be enforced for the resources, methods, and API source combination. If none are specified, quota enforcement will not be done. + properties: + - name: "limit" + type: String + description: | + Required. Upper limit allowed for the time interval and time unit specified. Requests exceeding this limit will be rejected. + - name: "interval" + type: String + description: | + Required. Time interval over which the number of request messages is calculated. + - name: "timeUnit" + type: String + description: | + Time unit defined for the interval. Valid values include second, minute, hour, day, month or year. If limit and interval are valid, the default value is hour; otherwise, the default is null. + - name: "attributes" + type: Array + is_set: true + description: | + Custom attributes associated with the operation. + item_type: + type: NestedObject + properties: + - name: "name" + type: String + description: | + Key of the attribute. + - name: "value" + type: String + description: | + Value of the attribute. + - name: "operationConfigType" + type: Enum + description: | + Flag that specifes whether the configuration is for Apigee API proxy or a remote service. Valid values include proxy or remoteservice. Defaults to proxy. Set to proxy when Apigee API proxies are associated with the API product. Set to remoteservice when non-Apigee proxies like Istio-Envoy are associated with the API product. + enum_values: + - "proxy" + - "remoteservice" + + - name: "graphqlOperationGroup" + type: NestedObject + description: | + Configuration used to group Apigee proxies or remote services with graphQL operation name, graphQL operation type and quotas. This grouping allows us to precisely set quota for a particular combination of graphQL name and operation type for a particular proxy request. If graphQL name is not set, this would imply quota will be applied on all graphQL requests matching the operation type. + properties: + - name: "operationConfigs" + type: Array + is_set: true + description: | + List of graphQL operation configuration details associated with Apigee API proxies or remote services. Remote services are non-Apigee proxies, such as Istio-Envoy. + item_type: + type: NestedObject + properties: + - name: "apiSource" + type: String + description: | + Required. Name of the API proxy endpoint or remote service with which the GraphQL operation and quota are associated. + - name: "operations" + type: Array + is_set: true + description: | + Required. List of GraphQL name/operation type pairs for the proxy or remote service to which quota will be applied. If only operation types are specified, the quota will be applied to all GraphQL requests irrespective of the GraphQL name. + + Note: Currently, you can specify only a single GraphQLOperation. Specifying more than one will cause the operation to fail. + item_type: + type: NestedObject + properties: + - name: "operationTypes" + type: Array + is_set: true + description: | + Required. GraphQL operation types. Valid values include query or mutation. + Note: Apigee does not currently support subscription types. + item_type: + type: String + - name: "operation" + type: String + description: | + GraphQL operation name. The name and operation type will be used to apply quotas. If no name is specified, the quota will be applied to all GraphQL operations irrespective of their operation names in the payload. + - name: "quota" + type: NestedObject + description: | + Quota parameters to be enforced for the resources, methods, and API source combination. If none are specified, quota enforcement will not be done. + properties: + - name: "limit" + type: String + description: | + Required. Upper limit allowed for the time interval and time unit specified. Requests exceeding this limit will be rejected. + - name: "interval" + type: String + description: | + Required. Time interval over which the number of request messages is calculated. + - name: "timeUnit" + type: String + description: | + Time unit defined for the interval. Valid values include second, minute, hour, day, month or year. If limit and interval are valid, the default value is hour; otherwise, the default is null. + - name: "attributes" + type: Array + is_set: true + description: | + Custom attributes associated with the operation. + item_type: + type: NestedObject + properties: + - name: "name" + type: String + description: | + Key of the attribute. + - name: "value" + type: String + description: | + Value of the attribute. + - name: "operationConfigType" + type: Enum + description: | + Flag that specifes whether the configuration is for Apigee API proxy or a remote service. Valid values include proxy or remoteservice. Defaults to proxy. Set to proxy when Apigee API proxies are associated with the API product. Set to remoteservice when non-Apigee proxies like Istio-Envoy are associated with the API product. + enum_values: + - "proxy" + - "remoteservice" + + - name: "grpcOperationGroup" + type: NestedObject + description: | + Optional. Configuration used to group Apigee proxies with gRPC services and method names. This grouping allows us to set quota for a particular proxy with the gRPC service name and method. If a method name is not set, this implies quota and authorization are applied to all gRPC methods implemented by that proxy for that particular gRPC service. + properties: + - name: "operationConfigs" + type: Array + is_set: true + description: | + Required. List of operation configurations for either Apigee API proxies that are associated with this API product. + item_type: + type: NestedObject + properties: + - name: "apiSource" + type: String + description: | + Required. Name of the API proxy with which the gRPC operation and quota are associated. + - name: "methods" + type: Array + is_set: true + description: | + List of unqualified gRPC method names for the proxy to which quota will be applied. If this field is empty, the Quota will apply to all operations on the gRPC service defined on the proxy. + + Example: Given a proxy that is configured to serve com.petstore.PetService, the methods com.petstore.PetService.ListPets and com.petstore.PetService.GetPet would be specified here as simply ["ListPets", "GetPet"]. + + Note: Currently, you can specify only a single GraphQLOperation. Specifying more than one will cause the operation to fail. + item_type: + type: String + - name: "quota" + type: NestedObject + description: | + Quota parameters to be enforced for the resources, methods, and API source combination. If none are specified, quota enforcement will not be done. + properties: + - name: "limit" + type: String + description: | + Required. Upper limit allowed for the time interval and time unit specified. Requests exceeding this limit will be rejected. + - name: "interval" + type: String + description: | + Required. Time interval over which the number of request messages is calculated. + - name: "timeUnit" + type: String + description: | + Time unit defined for the interval. Valid values include second, minute, hour, day, month or year. If limit and interval are valid, the default value is hour; otherwise, the default is null. + - name: "attributes" + type: Array + is_set: true + description: | + Custom attributes associated with the operation. + item_type: + type: NestedObject + properties: + - name: "name" + type: String + description: | + Key of the attribute. + - name: "value" + type: String + description: | + Value of the attribute. + - name: "service" + type: String + description: | + Required. gRPC Service name associated to be associated with the API proxy, on which quota rules can be applied upon. + + - name: "quotaCounterScope" + type: Enum + description: | + Scope of the quota decides how the quota counter gets applied and evaluate for quota violation. If the Scope is set as PROXY, then all the operations defined for the APIproduct that are associated with the same proxy will share the same quota counter set at the APIproduct level, making it a global counter at a proxy level. If the Scope is set as OPERATION, then each operations get the counter set at the API product dedicated, making it a local counter. Note that, the QuotaCounterScope applies only when an operation does not have dedicated quota set for itself. + enum_values: + - "QUOTA_COUNTER_SCOPE_UNSPECIFIED" + - "PROXY" + - "OPERATION" + + - name: "space" + type: String + immutable: true + description: | + Optional. The resource ID of the parent Space. If not set, the parent resource will be the Organization. diff --git a/mmv1/templates/terraform/custom_import/apigee_api_product.go.tmpl b/mmv1/templates/terraform/custom_import/apigee_api_product.go.tmpl new file mode 100644 index 000000000000..ff9c4d39157e --- /dev/null +++ b/mmv1/templates/terraform/custom_import/apigee_api_product.go.tmpl @@ -0,0 +1,42 @@ +config := meta.(*transport_tpg.Config) + +// current import_formats cannot import fields with forward slashes in their value +if err := tpgresource.ParseImportId([]string{"(?P.+)"}, d, config); err != nil { + return nil, err +} + +nameParts := strings.Split(d.Get("name").(string), "/") +if len(nameParts) == 4 { + // `organizations/{{"{{"}}org_name{{"}}"}}/apiproducts/{{"{{"}}name{{"}}"}}` + orgId := fmt.Sprintf("organizations/%s", nameParts[1]) + if err := d.Set("org_id", orgId); err != nil { + return nil, fmt.Errorf("Error setting org_id: %s", err) + } + if err := d.Set("name", nameParts[3]); err != nil { + return nil, fmt.Errorf("Error setting name: %s", err) + } +} else if len(nameParts) == 3 { + // `organizations/{{"{{"}}org_name{{"}}"}}/{{"{{"}}name{{"}}"}}` + orgId := fmt.Sprintf("organizations/%s", nameParts[1]) + if err := d.Set("org_id", orgId); err != nil { + return nil, fmt.Errorf("Error setting org_id: %s", err) + } + if err := d.Set("name", nameParts[2]); err != nil { + return nil, fmt.Errorf("Error setting name: %s", err) + } +} else { + return nil, fmt.Errorf( + "Saw %s when the name is expected to have shape %s or %s", + d.Get("name"), + "organizations/{{"{{"}}org_name{{"}}"}}/apiproducts/{{"{{"}}name{{"}}"}}", + "organizations/{{"{{"}}org_name{{"}}"}}/{{"{{"}}name{{"}}"}}") +} + +// Replace import id for the resource id +id, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}org_id{{"}}"}}/apiproducts/{{"{{"}}name{{"}}"}}") +if err != nil { + return nil, fmt.Errorf("Error constructing id: %s", err) +} +d.SetId(id) + +return []*schema.ResourceData{d}, nil diff --git a/mmv1/templates/terraform/examples/apigee_api_product_basic.tf.tmpl b/mmv1/templates/terraform/examples/apigee_api_product_basic.tf.tmpl new file mode 100644 index 000000000000..ecd11e31de36 --- /dev/null +++ b/mmv1/templates/terraform/examples/apigee_api_product_basic.tf.tmpl @@ -0,0 +1,45 @@ +data "google_client_config" "current" {} + +resource "google_compute_network" "apigee_network" { + name = "apigee-network" +} + +resource "google_compute_global_address" "apigee_range" { + name = "apigee-range" + purpose = "VPC_PEERING" + address_type = "INTERNAL" + prefix_length = 16 + network = google_compute_network.apigee_network.id +} + +resource "google_service_networking_connection" "apigee_vpc_connection" { + network = google_compute_network.apigee_network.id + service = "servicenetworking.googleapis.com" + reserved_peering_ranges = [google_compute_global_address.apigee_range.name] +} + +resource "google_apigee_organization" "apigee_org" { + analytics_region = "us-central1" + project_id = data.google_client_config.current.project + authorized_network = google_compute_network.apigee_network.id + depends_on = [google_service_networking_connection.apigee_vpc_connection] +} + +resource "google_apigee_instance" "apigee_instance" { + name = "{{index $.Vars "instance_name"}}" + location = "us-central1" + org_id = google_apigee_organization.apigee_org.id + peering_cidr_range = "SLASH_22" +} + +resource "google_apigee_api_product" "basic_api_product" { + org_id = google_apigee_organization.apigee_org.id + name = "{{index $.Vars "product_name"}}" + display_name = "My Basic API Product" + + approval_type = "auto" + + depends_on = [ + google_apigee_instance.apigee_instance + ] +} diff --git a/mmv1/templates/terraform/examples/apigee_api_product_basic_test.tf.tmpl b/mmv1/templates/terraform/examples/apigee_api_product_basic_test.tf.tmpl new file mode 100644 index 000000000000..fec8e03f9587 --- /dev/null +++ b/mmv1/templates/terraform/examples/apigee_api_product_basic_test.tf.tmpl @@ -0,0 +1,86 @@ +resource "google_project" "project" { + project_id = "tf-test%{random_suffix}" + name = "tf-test%{random_suffix}" + org_id = "{{index $.TestEnvVars "org_id"}}" + billing_account = "{{index $.TestEnvVars "billing_account"}}" + deletion_policy = "DELETE" +} + +resource "time_sleep" "wait_60_seconds" { + create_duration = "60s" + depends_on = [google_project.project] +} + +resource "google_project_service" "apigee" { + project = google_project.project.project_id + service = "apigee.googleapis.com" + depends_on = [time_sleep.wait_60_seconds] +} + +resource "google_project_service" "compute" { + project = google_project.project.project_id + service = "compute.googleapis.com" + depends_on = [google_project_service.apigee] +} + +resource "google_project_service" "servicenetworking" { + project = google_project.project.project_id + service = "servicenetworking.googleapis.com" + depends_on = [google_project_service.compute] +} + +resource "time_sleep" "wait_120_seconds" { + create_duration = "120s" + depends_on = [google_project_service.servicenetworking] +} + +resource "google_compute_network" "apigee_network" { + name = "apigee-network" + project = google_project.project.project_id + depends_on = [time_sleep.wait_120_seconds] +} + +resource "google_compute_global_address" "apigee_range" { + name = "apigee-range" + purpose = "VPC_PEERING" + address_type = "INTERNAL" + prefix_length = 16 + network = google_compute_network.apigee_network.id + project = google_project.project.project_id +} + +resource "google_service_networking_connection" "apigee_vpc_connection" { + network = google_compute_network.apigee_network.id + service = "servicenetworking.googleapis.com" + reserved_peering_ranges = [google_compute_global_address.apigee_range.name] + depends_on = [google_project_service.servicenetworking] +} + +resource "google_apigee_organization" "apigee_org" { + analytics_region = "us-central1" + project_id = google_project.project.project_id + authorized_network = google_compute_network.apigee_network.id + depends_on = [ + google_service_networking_connection.apigee_vpc_connection, + google_project_service.apigee, + ] +} + +resource "google_apigee_instance" "apigee_instance" { + name = "tf-test%{random_suffix}" + location = "us-central1" + org_id = google_apigee_organization.apigee_org.id + peering_cidr_range = "SLASH_22" +} + +resource "google_apigee_api_product" "{{$.PrimaryResourceId}}" { + org_id = google_apigee_organization.apigee_org.id + name = "basic-api-product" + display_name = "My Basic API Product" + + approval_type = "auto" + + depends_on = [ + google_apigee_instance.apigee_instance + ] +} diff --git a/mmv1/templates/terraform/examples/apigee_api_product_with_attributes.tf.tmpl b/mmv1/templates/terraform/examples/apigee_api_product_with_attributes.tf.tmpl new file mode 100644 index 000000000000..74b64f402560 --- /dev/null +++ b/mmv1/templates/terraform/examples/apigee_api_product_with_attributes.tf.tmpl @@ -0,0 +1,193 @@ +data "google_client_config" "current" {} + +resource "google_compute_network" "apigee_network" { + name = "apigee-network" +} + +resource "google_compute_global_address" "apigee_range" { + name = "apigee-range" + purpose = "VPC_PEERING" + address_type = "INTERNAL" + prefix_length = 16 + network = google_compute_network.apigee_network.id +} + +resource "google_service_networking_connection" "apigee_vpc_connection" { + network = google_compute_network.apigee_network.id + service = "servicenetworking.googleapis.com" + reserved_peering_ranges = [google_compute_global_address.apigee_range.name] +} + +resource "google_apigee_organization" "apigee_org" { + analytics_region = "us-central1" + project_id = data.google_client_config.current.project + authorized_network = google_compute_network.apigee_network.id + depends_on = [google_service_networking_connection.apigee_vpc_connection] +} + +resource "google_apigee_instance" "apigee_instance" { + name = "{{index $.Vars "instance_name"}}" + location = "us-central1" + org_id = google_apigee_organization.apigee_org.id + peering_cidr_range = "SLASH_22" +} + +resource "google_apigee_api_product" "full_api_product" { + org_id = google_apigee_organization.apigee_org.id + name = "{{index $.Vars "product_name"}}" + display_name = "My full API Product" + + approval_type = "auto" + + description = "This is a sample API Product created with Terraform." + + quota = "10000" + quota_interval = "1" + quota_time_unit = "day" + quota_counter_scope = "PROXY" + + environments = ["dev", "hom"] + scopes = [ + "read:weather", + "write:reports" + ] + + attributes { + name = "access" + value = "private" + } + + attributes { + name = "custom" + value = "value" + } + + operation_group { + operation_config_type = "proxy" + + operation_configs { + api_source = "anoter-proxy" + + operations { + resource = "/" + methods = ["POST", "GET"] + } + + quota { + limit = "1000" + interval = "5" + time_unit = "minute" + } + + attributes { + name = "custom" + value = "value" + } + } + + operation_configs { + api_source = "hello-world" + + operations { + resource = "/test" + methods = ["POST", "GET"] + } + + quota { + limit = "10" + interval = "30" + time_unit = "second" + } + + attributes { + name = "custom" + value = "value" + } + } + } + + graphql_operation_group { + operation_config_type = "proxy" + + operation_configs { + api_source = "hello-world" + + quota { + limit = "30" + interval = "50" + time_unit = "second" + } + + operations { + operation_types = ["QUERY"] + operation = "test" + } + + attributes { + name = "custom" + value = "value" + } + } + + operation_configs { + api_source = "another-proxy" + + quota { + limit = "50000" + interval = "12" + time_unit = "hour" + } + + operations { + operation_types = ["MUTATION"] + operation = "test" + } + + attributes { + name = "custom" + value = "value" + } + } + } + + grpc_operation_group { + + operation_configs { + api_source = "another-proxy" + service = "grpc another test" + methods = ["method3", "method4"] + + quota { + limit = "1000000" + interval = "1" + time_unit = "month" + } + + attributes { + name = "graph" + value = "value" + } + } + + operation_configs { + api_source = "hello-world" + service = "grpc test" + methods = ["method1", "method2"] + + quota { + limit = "5" + interval = "1" + time_unit = "second" + } + + attributes { + name = "graph" + value = "value" + } + } + } + + depends_on = [ + google_apigee_instance.apigee_instance + ] +} diff --git a/mmv1/templates/terraform/examples/apigee_api_product_with_attributes_test.tf.tmpl b/mmv1/templates/terraform/examples/apigee_api_product_with_attributes_test.tf.tmpl new file mode 100644 index 000000000000..ac91919ad5eb --- /dev/null +++ b/mmv1/templates/terraform/examples/apigee_api_product_with_attributes_test.tf.tmpl @@ -0,0 +1,234 @@ +resource "google_project" "project" { + project_id = "tf-test%{random_suffix}" + name = "tf-test%{random_suffix}" + org_id = "{{index $.TestEnvVars "org_id"}}" + billing_account = "{{index $.TestEnvVars "billing_account"}}" + deletion_policy = "DELETE" +} + +resource "time_sleep" "wait_60_seconds" { + create_duration = "60s" + depends_on = [google_project.project] +} + +resource "google_project_service" "apigee" { + project = google_project.project.project_id + service = "apigee.googleapis.com" + depends_on = [time_sleep.wait_60_seconds] +} + +resource "google_project_service" "compute" { + project = google_project.project.project_id + service = "compute.googleapis.com" + depends_on = [google_project_service.apigee] +} + +resource "google_project_service" "servicenetworking" { + project = google_project.project.project_id + service = "servicenetworking.googleapis.com" + depends_on = [google_project_service.compute] +} + +resource "time_sleep" "wait_120_seconds" { + create_duration = "120s" + depends_on = [google_project_service.servicenetworking] +} + +resource "google_compute_network" "apigee_network" { + name = "apigee-network" + project = google_project.project.project_id + depends_on = [time_sleep.wait_120_seconds] +} + +resource "google_compute_global_address" "apigee_range" { + name = "apigee-range" + purpose = "VPC_PEERING" + address_type = "INTERNAL" + prefix_length = 16 + network = google_compute_network.apigee_network.id + project = google_project.project.project_id +} + +resource "google_service_networking_connection" "apigee_vpc_connection" { + network = google_compute_network.apigee_network.id + service = "servicenetworking.googleapis.com" + reserved_peering_ranges = [google_compute_global_address.apigee_range.name] + depends_on = [google_project_service.servicenetworking] +} + +resource "google_apigee_organization" "apigee_org" { + analytics_region = "us-central1" + project_id = google_project.project.project_id + authorized_network = google_compute_network.apigee_network.id + depends_on = [ + google_service_networking_connection.apigee_vpc_connection, + google_project_service.apigee, + ] +} + +resource "google_apigee_instance" "apigee_instance" { + name = "tf-test%{random_suffix}" + location = "us-central1" + org_id = google_apigee_organization.apigee_org.id + peering_cidr_range = "SLASH_22" +} + +resource "google_apigee_api_product" "{{$.PrimaryResourceId}}" { + org_id = google_apigee_organization.apigee_org.id + name = "full-api-product" + display_name = "My full API Product" + + approval_type = "auto" + + description = "This is a sample API Product created with Terraform." + + quota = "10000" + quota_interval = "1" + quota_time_unit = "day" + quota_counter_scope = "PROXY" + + environments = ["dev", "hom"] + scopes = [ + "read:weather", + "write:reports" + ] + + attributes { + name = "access" + value = "private" + } + + attributes { + name = "custom" + value = "value" + } + + operation_group { + operation_config_type = "proxy" + + operation_configs { + api_source = "anoter-proxy" + + operations { + resource = "/" + methods = ["POST", "GET"] + } + + quota { + limit = "1000" + interval = "5" + time_unit = "minute" + } + + attributes { + name = "custom" + value = "value" + } + } + + operation_configs { + api_source = "hello-world" + + operations { + resource = "/test" + methods = ["POST", "GET"] + } + + quota { + limit = "10" + interval = "30" + time_unit = "second" + } + + attributes { + name = "custom" + value = "value" + } + } + } + + graphql_operation_group { + operation_config_type = "proxy" + + operation_configs { + api_source = "hello-world" + + quota { + limit = "30" + interval = "50" + time_unit = "second" + } + + operations { + operation_types = ["QUERY"] + operation = "test" + } + + attributes { + name = "custom" + value = "value" + } + } + + operation_configs { + api_source = "another-proxy" + + quota { + limit = "50000" + interval = "12" + time_unit = "hour" + } + + operations { + operation_types = ["MUTATION"] + operation = "test" + } + + attributes { + name = "custom" + value = "value" + } + } + } + + grpc_operation_group { + + operation_configs { + api_source = "another-proxy" + service = "grpc another test" + methods = ["method3", "method4"] + + quota { + limit = "1000000" + interval = "1" + time_unit = "month" + } + + attributes { + name = "graph" + value = "value" + } + } + + operation_configs { + api_source = "hello-world" + service = "grpc test" + methods = ["method1", "method2"] + + quota { + limit = "5" + interval = "1" + time_unit = "second" + } + + attributes { + name = "graph" + value = "value" + } + } + } + + depends_on = [ + google_apigee_instance.apigee_instance + ] +} diff --git a/mmv1/templates/terraform/examples/apigee_api_product_with_legacy_operation.tf.tmpl b/mmv1/templates/terraform/examples/apigee_api_product_with_legacy_operation.tf.tmpl new file mode 100644 index 000000000000..ba85ea349890 --- /dev/null +++ b/mmv1/templates/terraform/examples/apigee_api_product_with_legacy_operation.tf.tmpl @@ -0,0 +1,68 @@ +data "google_client_config" "current" {} + +resource "google_compute_network" "apigee_network" { + name = "apigee-network" +} + +resource "google_compute_global_address" "apigee_range" { + name = "apigee-range" + purpose = "VPC_PEERING" + address_type = "INTERNAL" + prefix_length = 16 + network = google_compute_network.apigee_network.id +} + +resource "google_service_networking_connection" "apigee_vpc_connection" { + network = google_compute_network.apigee_network.id + service = "servicenetworking.googleapis.com" + reserved_peering_ranges = [google_compute_global_address.apigee_range.name] +} + +resource "google_apigee_organization" "apigee_org" { + analytics_region = "us-central1" + project_id = data.google_client_config.current.project + authorized_network = google_compute_network.apigee_network.id + depends_on = [google_service_networking_connection.apigee_vpc_connection] +} + +resource "google_apigee_instance" "apigee_instance" { + name = "{{index $.Vars "instance_name"}}" + location = "us-central1" + org_id = google_apigee_organization.apigee_org.id + peering_cidr_range = "SLASH_22" +} + +resource "google_apigee_api_product" "full_api_product" { + org_id = google_apigee_organization.apigee_org.id + name = "{{index $.Vars "product_name"}}" + display_name = "My full API Product" + + approval_type = "auto" + + description = "This is a sample API Product created with Terraform." + + attributes { + name = "access" + value = "private" + } + + environments = ["dev", "hom"] + proxies = ["hello-world"] + api_resources = [ + "/", + "/weather/**" + ] + scopes = [ + "read:weather", + "write:reports" + ] + + quota = "10000" + quota_interval = "1" + quota_time_unit = "day" + quota_counter_scope = "PROXY" + + depends_on = [ + google_apigee_instance.apigee_instance + ] +} diff --git a/mmv1/templates/terraform/examples/apigee_api_product_with_legacy_operation_test.tf.tmpl b/mmv1/templates/terraform/examples/apigee_api_product_with_legacy_operation_test.tf.tmpl new file mode 100644 index 000000000000..4948c768becb --- /dev/null +++ b/mmv1/templates/terraform/examples/apigee_api_product_with_legacy_operation_test.tf.tmpl @@ -0,0 +1,109 @@ +resource "google_project" "project" { + project_id = "tf-test%{random_suffix}" + name = "tf-test%{random_suffix}" + org_id = "{{index $.TestEnvVars "org_id"}}" + billing_account = "{{index $.TestEnvVars "billing_account"}}" + deletion_policy = "DELETE" +} + +resource "time_sleep" "wait_60_seconds" { + create_duration = "60s" + depends_on = [google_project.project] +} + +resource "google_project_service" "apigee" { + project = google_project.project.project_id + service = "apigee.googleapis.com" + depends_on = [time_sleep.wait_60_seconds] +} + +resource "google_project_service" "compute" { + project = google_project.project.project_id + service = "compute.googleapis.com" + depends_on = [google_project_service.apigee] +} + +resource "google_project_service" "servicenetworking" { + project = google_project.project.project_id + service = "servicenetworking.googleapis.com" + depends_on = [google_project_service.compute] +} + +resource "time_sleep" "wait_120_seconds" { + create_duration = "120s" + depends_on = [google_project_service.servicenetworking] +} + +resource "google_compute_network" "apigee_network" { + name = "apigee-network" + project = google_project.project.project_id + depends_on = [time_sleep.wait_120_seconds] +} + +resource "google_compute_global_address" "apigee_range" { + name = "apigee-range" + purpose = "VPC_PEERING" + address_type = "INTERNAL" + prefix_length = 16 + network = google_compute_network.apigee_network.id + project = google_project.project.project_id +} + +resource "google_service_networking_connection" "apigee_vpc_connection" { + network = google_compute_network.apigee_network.id + service = "servicenetworking.googleapis.com" + reserved_peering_ranges = [google_compute_global_address.apigee_range.name] + depends_on = [google_project_service.servicenetworking] +} + +resource "google_apigee_organization" "apigee_org" { + analytics_region = "us-central1" + project_id = google_project.project.project_id + authorized_network = google_compute_network.apigee_network.id + depends_on = [ + google_service_networking_connection.apigee_vpc_connection, + google_project_service.apigee, + ] +} + +resource "google_apigee_instance" "apigee_instance" { + name = "tf-test%{random_suffix}" + location = "us-central1" + org_id = google_apigee_organization.apigee_org.id + peering_cidr_range = "SLASH_22" +} + +resource "google_apigee_api_product" "{{$.PrimaryResourceId}}" { + org_id = google_apigee_organization.apigee_org.id + name = "legacy-operation-api-product" + display_name = "My legacy operation API Product" + + approval_type = "auto" + + description = "This is a sample API Product created with Terraform." + + attributes { + name = "access" + value = "private" + } + + environments = ["dev", "hom"] + proxies = ["hello-world"] + api_resources = [ + "/", + "/weather/**" + ] + scopes = [ + "read:weather", + "write:reports" + ] + + quota = "10000" + quota_interval = "1" + quota_time_unit = "day" + quota_counter_scope = "PROXY" + + depends_on = [ + google_apigee_instance.apigee_instance + ] +} diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_api_product_update_test.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_api_product_update_test.go new file mode 100644 index 000000000000..fb1cc182568d --- /dev/null +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_api_product_update_test.go @@ -0,0 +1,518 @@ +package apigee_test + +import ( + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + "testing" +) + +func TestAccApigeeApiProduct_apigeeApiProduct_full(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + "org_id": envvar.GetTestOrgFromEnv(t), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + CheckDestroy: testAccCheckApigeeApiProductDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccApigeeApiProduct_apigeeApiProduct_full(context), + }, + { + ResourceName: "google_apigee_api_product.apigee_api_product", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"org_id"}, + }, + { + Config: testAccApigeeApiProduct_apigeeApiProduct_update(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_apigee_api_product.apigee_api_product", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_apigee_api_product.apigee_api_product", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"org_id"}, + }, + }, + }) +} + +func testAccApigeeApiProduct_apigeeApiProduct_full(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_project" "project" { + project_id = "tf-test%{random_suffix}" + name = "tf-test%{random_suffix}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" + deletion_policy = "DELETE" +} +resource "time_sleep" "wait_60_seconds" { + create_duration = "60s" + depends_on = [google_project.project] +} +resource "google_project_service" "apigee" { + project = google_project.project.project_id + service = "apigee.googleapis.com" + depends_on = [time_sleep.wait_60_seconds] +} +resource "google_project_service" "compute" { + project = google_project.project.project_id + service = "compute.googleapis.com" + depends_on = [google_project_service.apigee] +} +resource "google_project_service" "servicenetworking" { + project = google_project.project.project_id + service = "servicenetworking.googleapis.com" + depends_on = [google_project_service.compute] +} +resource "time_sleep" "wait_120_seconds" { + create_duration = "120s" + depends_on = [google_project_service.servicenetworking] +} +resource "google_compute_network" "apigee_network" { + name = "apigee-network" + project = google_project.project.project_id + depends_on = [time_sleep.wait_120_seconds] +} +resource "google_compute_global_address" "apigee_range" { + name = "apigee-range" + purpose = "VPC_PEERING" + address_type = "INTERNAL" + prefix_length = 16 + network = google_compute_network.apigee_network.id + project = google_project.project.project_id +} +resource "google_service_networking_connection" "apigee_vpc_connection" { + network = google_compute_network.apigee_network.id + service = "servicenetworking.googleapis.com" + reserved_peering_ranges = [google_compute_global_address.apigee_range.name] + depends_on = [google_project_service.servicenetworking] +} +resource "google_apigee_organization" "apigee_org" { + analytics_region = "us-central1" + project_id = google_project.project.project_id + authorized_network = google_compute_network.apigee_network.id + depends_on = [ + google_service_networking_connection.apigee_vpc_connection, + google_project_service.apigee, + ] +} +resource "google_apigee_instance" "apigee_instance" { + name = "tf-test%{random_suffix}" + location = "us-central1" + org_id = google_apigee_organization.apigee_org.id + peering_cidr_range = "SLASH_22" +} +resource "google_apigee_api_product" "apigee_api_product" { + org_id = google_apigee_organization.apigee_org.id + name = "tf-test%{random_suffix}" + display_name = "My full API Product" + + approval_type = "auto" + + description = "This is a sample API Product created with Terraform." + + quota = "10000" + quota_interval = "1" + quota_time_unit = "day" + quota_counter_scope = "PROXY" + + environments = ["dev", "hom"] + scopes = [ + "read:weather", + "write:reports" + ] + + attributes { + name = "access" + value = "private" + } + + attributes { + name = "custom" + value = "value" + } + + operation_group { + operation_config_type = "proxy" + + operation_configs { + api_source = "anoter-proxy" + + operations { + resource = "/" + methods = ["POST", "GET"] + } + + quota { + limit = "1000" + interval = "5" + time_unit = "minute" + } + + attributes { + name = "custom" + value = "value" + } + } + + operation_configs { + api_source = "hello-world" + + operations { + resource = "/test" + methods = ["POST", "GET"] + } + + quota { + limit = "10" + interval = "30" + time_unit = "second" + } + + attributes { + name = "custom" + value = "value" + } + } + } + + graphql_operation_group { + operation_config_type = "proxy" + + operation_configs { + api_source = "hello-world" + + quota { + limit = "30" + interval = "50" + time_unit = "second" + } + + operations { + operation_types = ["QUERY"] + operation = "test" + } + + attributes { + name = "custom" + value = "value" + } + } + + operation_configs { + api_source = "another-proxy" + + quota { + limit = "50000" + interval = "12" + time_unit = "hour" + } + + operations { + operation_types = ["MUTATION"] + operation = "test" + } + + attributes { + name = "custom" + value = "value" + } + } + } + + grpc_operation_group { + + operation_configs { + api_source = "another-proxy" + service = "grpc another test" + methods = ["method3", "method4"] + + quota { + limit = "1000000" + interval = "1" + time_unit = "month" + } + + attributes { + name = "graph" + value = "value" + } + } + + operation_configs { + api_source = "hello-world" + service = "grpc test" + methods = ["method1", "method2"] + + quota { + limit = "5" + interval = "1" + time_unit = "second" + } + + attributes { + name = "graph" + value = "value" + } + } + } + + depends_on = [ + google_apigee_instance.apigee_instance + ] +} +`, context) +} + +func testAccApigeeApiProduct_apigeeApiProduct_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_project" "project" { + project_id = "tf-test%{random_suffix}" + name = "tf-test%{random_suffix}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" + deletion_policy = "DELETE" +} +resource "time_sleep" "wait_60_seconds" { + create_duration = "60s" + depends_on = [google_project.project] +} +resource "google_project_service" "apigee" { + project = google_project.project.project_id + service = "apigee.googleapis.com" + depends_on = [time_sleep.wait_60_seconds] +} +resource "google_project_service" "compute" { + project = google_project.project.project_id + service = "compute.googleapis.com" + depends_on = [google_project_service.apigee] +} +resource "google_project_service" "servicenetworking" { + project = google_project.project.project_id + service = "servicenetworking.googleapis.com" + depends_on = [google_project_service.compute] +} +resource "time_sleep" "wait_120_seconds" { + create_duration = "120s" + depends_on = [google_project_service.servicenetworking] +} +resource "google_compute_network" "apigee_network" { + name = "apigee-network" + project = google_project.project.project_id + depends_on = [time_sleep.wait_120_seconds] +} +resource "google_compute_global_address" "apigee_range" { + name = "apigee-range" + purpose = "VPC_PEERING" + address_type = "INTERNAL" + prefix_length = 16 + network = google_compute_network.apigee_network.id + project = google_project.project.project_id +} +resource "google_service_networking_connection" "apigee_vpc_connection" { + network = google_compute_network.apigee_network.id + service = "servicenetworking.googleapis.com" + reserved_peering_ranges = [google_compute_global_address.apigee_range.name] + depends_on = [google_project_service.servicenetworking] +} +resource "google_apigee_organization" "apigee_org" { + analytics_region = "us-central1" + project_id = google_project.project.project_id + authorized_network = google_compute_network.apigee_network.id + depends_on = [ + google_service_networking_connection.apigee_vpc_connection, + google_project_service.apigee, + ] +} +resource "google_apigee_instance" "apigee_instance" { + name = "tf-test%{random_suffix}" + location = "us-central1" + org_id = google_apigee_organization.apigee_org.id + peering_cidr_range = "SLASH_22" +} +resource "google_apigee_developer" "apigee_developer" { + email = "tf-test%{random_suffix}@acme.com" + first_name = "John" + last_name = "Doe" + user_name = "john.doe" + org_id = google_apigee_organization.apigee_org.id + depends_on = [ + google_apigee_instance.apigee_instance + ] +} +resource "google_apigee_api_product" "apigee_api_product" { + org_id = google_apigee_organization.apigee_org.id + name = "tf-test%{random_suffix}" + display_name = "My full API Product" + + approval_type = "auto" + + description = "This is a sample API Product created with Terraform." + + quota = "5000" + quota_interval = "2" + quota_time_unit = "day" + quota_counter_scope = "PROXY" + + environments = ["dev"] + scopes = [ + "read:weather" + ] + + attributes { + name = "access" + value = "private" + } + + attributes { + name = "custom" + value = "value_changed" + } + + operation_group { + operation_config_type = "proxy" + + operation_configs { + api_source = "anoter-proxy" + + operations { + resource = "/changed" + methods = ["POST", "GET", "PUT"] + } + + quota { + limit = "500" + interval = "6" + time_unit = "minute" + } + + attributes { + name = "custom" + value = "value_changed" + } + } + + operation_configs { + api_source = "hello-world" + + operations { + resource = "/test_changed" + methods = ["POST"] + } + + quota { + limit = "7" + interval = "20" + time_unit = "second" + } + + attributes { + name = "custom" + value = "value_changed" + } + } + } + + graphql_operation_group { + operation_config_type = "proxy" + + operation_configs { + api_source = "hello-world" + + quota { + limit = "20" + interval = "40" + time_unit = "second" + } + + operations { + operation_types = ["MUTATION"] + operation = "test_changed" + } + + attributes { + name = "custom" + value = "value_changed" + } + } + + operation_configs { + api_source = "another-proxy" + + quota { + limit = "5000" + interval = "10" + time_unit = "hour" + } + + operations { + operation_types = ["QUERY"] + operation = "test_changed" + } + + attributes { + name = "custom" + value = "value_changed" + } + } + } + + grpc_operation_group { + + operation_configs { + api_source = "another-proxy" + service = "grpc another test" + methods = ["method3_changed", "method4_changed"] + + quota { + limit = "10000" + interval = "10" + time_unit = "month" + } + + attributes { + name = "graph" + value = "value_changed" + } + } + + operation_configs { + api_source = "hello-world" + service = "grpc test" + methods = ["method1_changed", "method2_changed"] + + quota { + limit = "50" + interval = "5" + time_unit = "hour" + } + + attributes { + name = "graph" + value = "value_changed" + } + } + } + + depends_on = [ + google_apigee_instance.apigee_instance + ] +} +`, context) +} From 8c6d3669d21f06745f850f77839ab71e65781f5f Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Mon, 21 Jul 2025 08:37:17 -0700 Subject: [PATCH 573/884] Fixed references to global variables in mmv1/main.go (#14573) --- mmv1/main.go | 82 +++++++++++++++++++++++++--------------------------- 1 file changed, 40 insertions(+), 42 deletions(-) diff --git a/mmv1/main.go b/mmv1/main.go index 4ca75098c8e6..ef62e7ba5f88 100644 --- a/mmv1/main.go +++ b/mmv1/main.go @@ -26,32 +26,30 @@ var wg sync.WaitGroup // TODO rewrite: additional flags // Example usage: --output $GOPATH/src/github.com/terraform-providers/terraform-provider-google-beta -var outputPath = flag.String("output", "", "path to output generated files to") +var outputPathFlag = flag.String("output", "", "path to output generated files to") // Example usage: --version beta -var version = flag.String("version", "", "optional version name. If specified, this version is preferred for resource generation when applicable") +var versionFlag = flag.String("version", "", "optional version name. If specified, this version is preferred for resource generation when applicable") -var overrideDirectory = flag.String("overrides", "", "directory containing yaml overrides") +var overrideDirectoryFlag = flag.String("overrides", "", "directory containing yaml overrides") -var product = flag.String("product", "", "optional product name. If specified, the resources under the specific product will be generated. Otherwise, resources under all products will be generated.") +var productFlag = flag.String("product", "", "optional product name. If specified, the resources under the specific product will be generated. Otherwise, resources under all products will be generated.") -var resourceToGenerate = flag.String("resource", "", "optional resource name. Limits generation to the specified resource within a particular product.") +var resourceFlag = flag.String("resource", "", "optional resource name. Limits generation to the specified resource within a particular product.") var doNotGenerateCode = flag.Bool("no-code", false, "do not generate code") var doNotGenerateDocs = flag.Bool("no-docs", false, "do not generate docs") -var forceProvider = flag.String("provider", "", "optional provider name. If specified, a non-default provider will be used.") +var providerFlag = flag.String("provider", "", "optional provider name. If specified, a non-default provider will be used.") var openapiGenerate = flag.Bool("openapi-generate", false, "Generate MMv1 YAML from openapi directory (Experimental)") -// Example usage: --yaml -var yamlMode = flag.Bool("yaml", false, "copy text over from ruby yaml to go yaml") - -var showImportDiffs = flag.Bool("show-import-diffs", false, "write go import diffs to stdout") +var showImportDiffsFlag = flag.Bool("show-import-diffs", false, "write go import diffs to stdout") func main() { + // Handle all flags in main. Other functions must not access flag values directly. flag.Parse() if *openapiGenerate { @@ -60,24 +58,25 @@ func main() { return } - if outputPath == nil || *outputPath == "" { + if *outputPathFlag == "" { log.Printf("No output path specified, exiting") return } - if version == nil || *version == "" { + GenerateProducts(*productFlag, *resourceFlag, *providerFlag, *versionFlag, *outputPathFlag, *overrideDirectoryFlag, !*doNotGenerateCode, !*doNotGenerateDocs, *showImportDiffsFlag) +} + +func GenerateProducts(product, resource, providerName, version, outputPath, overrideDirectory string, generateCode, generateDocs, showImportDiffs bool) { + if version == "" { log.Printf("No version specified, assuming ga") - *version = "ga" + version = "ga" } - - var generateCode = !*doNotGenerateCode - var generateDocs = !*doNotGenerateDocs var productsToGenerate []string var allProducts = false - if product == nil || *product == "" { + if product == "" { allProducts = true } else { - var productToGenerate = fmt.Sprintf("products/%s", *product) + var productToGenerate = fmt.Sprintf("products/%s", product) productsToGenerate = []string{productToGenerate} } @@ -92,26 +91,26 @@ func main() { allProductFiles = append(allProductFiles, fmt.Sprintf("products/%s", filepath.Base(dir))) } - if *overrideDirectory != "" { - log.Printf("Using override directory %s", *overrideDirectory) + if overrideDirectory != "" { + log.Printf("Using override directory %s", overrideDirectory) // Normalize override dir to a path that is relative to the magic-modules directory // This is needed for templates that concatenate pwd + override dir + path - if filepath.IsAbs(*overrideDirectory) { + if filepath.IsAbs(overrideDirectory) { wd, err := os.Getwd() if err != nil { panic(err) } - *overrideDirectory, err = filepath.Rel(wd, *overrideDirectory) - log.Printf("Override directory normalized to relative path %s", *overrideDirectory) + overrideDirectory, err = filepath.Rel(wd, overrideDirectory) + log.Printf("Override directory normalized to relative path %s", overrideDirectory) } - overrideFiles, err := filepath.Glob(fmt.Sprintf("%s/products/**/product.yaml", *overrideDirectory)) + overrideFiles, err := filepath.Glob(fmt.Sprintf("%s/products/**/product.yaml", overrideDirectory)) if err != nil { panic(err) } for _, filePath := range overrideFiles { - product, err := filepath.Rel(*overrideDirectory, filePath) + product, err := filepath.Rel(overrideDirectory, filePath) if err != nil { panic(err) } @@ -132,18 +131,17 @@ func main() { } startTime := time.Now() - providerName := "default (terraform)" - if *forceProvider != "" { - providerName = *forceProvider + if providerName == "" { + providerName = "default (terraform)" } - log.Printf("Generating MM output to '%s'", *outputPath) - log.Printf("Building %s version", *version) + log.Printf("Generating MM output to '%s'", outputPath) + log.Printf("Building %s version", version) log.Printf("Building %s provider", providerName) productsForVersionChannel := make(chan *api.Product, len(allProductFiles)) for _, productFile := range allProductFiles { wg.Add(1) - go GenerateProduct(productFile, productsForVersionChannel, startTime, productsToGenerate, *resourceToGenerate, *overrideDirectory, generateCode, generateDocs) + go GenerateProduct(version, providerName, productFile, outputPath, productsForVersionChannel, startTime, productsToGenerate, resource, overrideDirectory, generateCode, generateDocs) } wg.Wait() @@ -159,17 +157,17 @@ func main() { // In order to only copy/compile files once per provider this must be called outside // of the products loop. Create an MMv1 provider with an arbitrary product (the first loaded). - providerToGenerate := newProvider(*forceProvider, *version, productsForVersion[0], startTime) - providerToGenerate.CopyCommonFiles(*outputPath, generateCode, generateDocs) + providerToGenerate := newProvider(providerName, version, productsForVersion[0], startTime) + providerToGenerate.CopyCommonFiles(outputPath, generateCode, generateDocs) if generateCode { - providerToGenerate.CompileCommonFiles(*outputPath, productsForVersion, "") + providerToGenerate.CompileCommonFiles(outputPath, productsForVersion, "") } - provider.FixImports(*outputPath, *showImportDiffs) + provider.FixImports(outputPath, showImportDiffs) } -func GenerateProduct(productName string, productsForVersionChannel chan *api.Product, startTime time.Time, productsToGenerate []string, resourceToGenerate, overrideDirectory string, generateCode, generateDocs bool) { +func GenerateProduct(version, providerName, productName, outputPath string, productsForVersionChannel chan *api.Product, startTime time.Time, productsToGenerate []string, resourceToGenerate, overrideDirectory string, generateCode, generateDocs bool) { defer wg.Done() productYamlPath := path.Join(productName, "product.yaml") @@ -207,8 +205,8 @@ func GenerateProduct(productName string, productsForVersionChannel chan *api.Pro var resources []*api.Resource = make([]*api.Resource, 0) - if !productApi.ExistsAtVersionOrLower(*version) { - log.Printf("%s does not have a '%s' version, skipping", productName, *version) + if !productApi.ExistsAtVersionOrLower(version) { + log.Printf("%s does not have a '%s' version, skipping", productName, version) return } @@ -236,7 +234,7 @@ func GenerateProduct(productName string, productsForVersionChannel chan *api.Pro api.Compile(resourceYamlPath, resource, overrideDirectory) resource.SourceYamlFile = resourceYamlPath - resource.TargetVersionName = *version + resource.TargetVersionName = version resource.Properties = resource.AddLabelsRelatedFields(resource.PropertiesWithExcluded(), nil) resource.SetDefault(productApi) resource.Validate() @@ -269,7 +267,7 @@ func GenerateProduct(productName string, productsForVersionChannel chan *api.Pro api.Compile(overrideYamlPath, resource, overrideDirectory) } - resource.TargetVersionName = *version + resource.TargetVersionName = version resource.Properties = resource.AddLabelsRelatedFields(resource.PropertiesWithExcluded(), nil) resource.SetDefault(productApi) resource.Validate() @@ -286,7 +284,7 @@ func GenerateProduct(productName string, productsForVersionChannel chan *api.Pro productApi.Objects = resources productApi.Validate() - providerToGenerate := newProvider(*forceProvider, *version, productApi, startTime) + providerToGenerate := newProvider(providerName, version, productApi, startTime) productsForVersionChannel <- productApi if !slices.Contains(productsToGenerate, productName) { @@ -296,7 +294,7 @@ func GenerateProduct(productName string, productsForVersionChannel chan *api.Pro log.Printf("%s: Generating files", productName) - providerToGenerate.Generate(*outputPath, productName, resourceToGenerate, generateCode, generateDocs) + providerToGenerate.Generate(outputPath, productName, resourceToGenerate, generateCode, generateDocs) } func newProvider(providerName, version string, productApi *api.Product, startTime time.Time) provider.Provider { From 46ebd82dd66b14defcbd48c8f815434f2c1ddfe6 Mon Sep 17 00:00:00 2001 From: Ryan Oaks Date: Mon, 21 Jul 2025 16:20:52 -0400 Subject: [PATCH 574/884] Remove use of ENHANCED performance_monitoring_unit in tests for compute instances (#14557) --- ...source_compute_instance_template_test.go.tmpl | 16 ---------------- .../resource_compute_instance_test.go.tmpl | 13 ------------- ...compute_region_instance_template_test.go.tmpl | 16 ---------------- 3 files changed, 45 deletions(-) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_test.go.tmpl index 1a4441e1582b..778e88ba42a3 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_test.go.tmpl @@ -937,10 +937,6 @@ func TestAccComputeInstanceTemplate_performanceMonitoringUnit(t *testing.T) { "instance_name": fmt.Sprintf("tf-test-instance-template-%s", acctest.RandString(t, 10)), "performance_monitoring_unit": "STANDARD", } - context_2 := map[string]interface{}{ - "instance_name": context_1["instance_name"].(string), - "performance_monitoring_unit": "ENHANCED", - } context_3 := map[string]interface{}{ "instance_name": context_1["instance_name"].(string), "performance_monitoring_unit": "ARCHITECTURAL", @@ -963,18 +959,6 @@ func TestAccComputeInstanceTemplate_performanceMonitoringUnit(t *testing.T) { ImportState: true, ImportStateVerify: true, }, - { - Config: testAccComputeInstanceTemplate_performanceMonitoringUnit(context_2), - Check: resource.ComposeTestCheckFunc( - testAccCheckComputeInstanceTemplateExists(t, "google_compute_instance_template.foobar", &instanceTemplate), - resource.TestCheckResourceAttr("google_compute_instance_template.foobar", "advanced_machine_features.0.performance_monitoring_unit", "ENHANCED"), - ), - }, - { - ResourceName: "google_compute_instance_template.foobar", - ImportState: true, - ImportStateVerify: true, - }, { Config: testAccComputeInstanceTemplate_performanceMonitoringUnit(context_3), Check: resource.ComposeTestCheckFunc( diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl index 7b7a4c336b04..b3a76dd0f26f 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl @@ -1916,10 +1916,6 @@ func TestAccComputeInstance_performanceMonitoringUnit(t *testing.T) { "instance_name": fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)), "performance_monitoring_unit": "STANDARD", } - context_2 := map[string]interface{}{ - "instance_name": context_1["instance_name"].(string), - "performance_monitoring_unit": "ENHANCED", - } context_3 := map[string]interface{}{ "instance_name": context_1["instance_name"].(string), "performance_monitoring_unit": "ARCHITECTURAL", @@ -1939,15 +1935,6 @@ func TestAccComputeInstance_performanceMonitoringUnit(t *testing.T) { ), }, computeInstanceImportStep("us-central1-a", context_1["instance_name"].(string), []string{"allow_stopping_for_update"}), - { - Config: testAccComputeInstance_performanceMonitoringUnit(context_2), - Check: resource.ComposeTestCheckFunc( - testAccCheckComputeInstanceExists( - t, "google_compute_instance.foobar", &instance), - resource.TestCheckResourceAttr("google_compute_instance.foobar", "advanced_machine_features.0.performance_monitoring_unit", "ENHANCED"), - ), - }, - computeInstanceImportStep("us-central1-a", context_2["instance_name"].(string), []string{"allow_stopping_for_update"}), { Config: testAccComputeInstance_performanceMonitoringUnit(context_3), Check: resource.ComposeTestCheckFunc( diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_test.go.tmpl index 989eb0c2b802..eb7f8681125d 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_test.go.tmpl @@ -809,10 +809,6 @@ func TestAccComputeRegionInstanceTemplate_performanceMonitoringUnit(t *testing.T "instance_name": fmt.Sprintf("tf-test-instance-template-%s", acctest.RandString(t, 10)), "performance_monitoring_unit": "STANDARD", } - context_2 := map[string]interface{}{ - "instance_name": context_1["instance_name"].(string), - "performance_monitoring_unit": "ENHANCED", - } context_3 := map[string]interface{}{ "instance_name": context_1["instance_name"].(string), "performance_monitoring_unit": "ARCHITECTURAL", @@ -835,18 +831,6 @@ func TestAccComputeRegionInstanceTemplate_performanceMonitoringUnit(t *testing.T ImportState: true, ImportStateVerify: true, }, - { - Config: testAccComputeRegionInstanceTemplate_performanceMonitoringUnit(context_2), - Check: resource.ComposeTestCheckFunc( - testAccCheckComputeRegionInstanceTemplateExists(t, "google_compute_region_instance_template.foobar", &instanceTemplate), - resource.TestCheckResourceAttr("google_compute_region_instance_template.foobar", "advanced_machine_features.0.performance_monitoring_unit", "ENHANCED"), - ), - }, - { - ResourceName: "google_compute_region_instance_template.foobar", - ImportState: true, - ImportStateVerify: true, - }, { Config: testAccComputeRegionInstanceTemplate_performanceMonitoringUnit(context_3), Check: resource.ComposeTestCheckFunc( From 2b0b0acae29734ca937c7feac869e2f5b2fb8841 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Mon, 21 Jul 2025 14:02:12 -0700 Subject: [PATCH 575/884] tgc-revival: support google_compute_global_address (#14562) --- mmv1/api/product.go | 2 +- mmv1/api/resource.go | 48 ++++++++++++------- mmv1/products/cloudfunctions2/Function.yaml | 1 + mmv1/products/compute/GlobalAddress.yaml | 1 + mmv1/products/compute/RegionAutoscaler.yaml | 1 + mmv1/provider/terraform_tgc_next.go | 30 ++++++------ .../cai2hcl/resource_converters.go.tmpl | 2 +- .../tgc_next/services/resource.go.tmpl | 2 +- .../tfplan2cai/resource_converter.go.tmpl | 2 +- 9 files changed, 53 insertions(+), 36 deletions(-) diff --git a/mmv1/api/product.go b/mmv1/api/product.go index c373ed15856a..4d6140c72e0c 100644 --- a/mmv1/api/product.go +++ b/mmv1/api/product.go @@ -58,7 +58,7 @@ type Product struct { // base URL. Specific to defining the resource as a CAI asset. CaiBaseUrl string - // ApiResourceType of resources that already have an AssetType constant defined in the product. + // CaiResourceType of resources that already have an AssetType constant defined in the product. ResourcesWithCaiAssetType map[string]struct{} // A function reference designed for the rare case where you diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index 30839e260a4d..cd4a3a5add43 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -226,16 +226,6 @@ type Resource struct { // If true, resource is not importable ExcludeImport bool `yaml:"exclude_import,omitempty"` - // If true, exclude resource from Terraform Validator - // (i.e. terraform-provider-conversion) - ExcludeTgc bool `yaml:"exclude_tgc,omitempty"` - - // If true, include resource in the new package of TGC (terraform-provider-conversion) - IncludeInTGCNext bool `yaml:"include_in_tgc_next_DO_NOT_USE,omitempty"` - - // Name of the hcl resource block used in TGC - TgcHclBlockName string `yaml:"tgc_hcl_block_name,omitempty"` - // If true, skip sweeper generation for this resource ExcludeSweeper bool `yaml:"exclude_sweeper,omitempty"` @@ -362,6 +352,30 @@ type Resource struct { ImportPath string `yaml:"-"` SourceYamlFile string `yaml:"-"` + + // ==================== + // TGC + // ==================== + TGCResource `yaml:",inline"` +} + +type TGCResource struct { + // If true, exclude resource from Terraform Validator + // (i.e. terraform-provider-conversion) + ExcludeTgc bool `yaml:"exclude_tgc,omitempty"` + + // If true, include resource in the new package of TGC (terraform-provider-conversion) + IncludeInTGCNext bool `yaml:"include_in_tgc_next_DO_NOT_USE,omitempty"` + + // Name of the hcl resource block used in TGC + TgcHclBlockName string `yaml:"tgc_hcl_block_name,omitempty"` + + // The resource kind in CAI. + // If this is not set, then :name is used instead. + // For example: compute.googleapis.com/Address has Address for CaiResourceKind, + // and compute.googleapis.com/GlobalAddress has GlobalAddress for CaiResourceKind. + // But they have the same api resource type: address + CaiResourceKind string `yaml:"cai_resource_kind,omitempty"` } func (r *Resource) UnmarshalYAML(unmarshal func(any) error) error { @@ -1846,8 +1860,8 @@ func (r Resource) CaiAssetType() string { baseURL := r.CaiProductBaseUrl() productBackendName := r.CaiProductBackendName(baseURL) assetName := r.Name - if r.ApiResourceTypeKind != "" { - assetName = r.ApiResourceTypeKind + if r.CaiResourceKind != "" { + assetName = r.CaiResourceKind } return fmt.Sprintf("%s.googleapis.com/%s", productBackendName, assetName) } @@ -1859,10 +1873,10 @@ func (r Resource) DefineAssetTypeForResourceInProduct() bool { if r.ProductMetadata.ResourcesWithCaiAssetType == nil { r.ProductMetadata.ResourcesWithCaiAssetType = make(map[string]struct{}, 1) } - if _, alreadyDefined := r.ProductMetadata.ResourcesWithCaiAssetType[r.ApiResourceType()]; alreadyDefined { + if _, alreadyDefined := r.ProductMetadata.ResourcesWithCaiAssetType[r.CaiResourceType()]; alreadyDefined { return false } - r.ProductMetadata.ResourcesWithCaiAssetType[r.ApiResourceType()] = struct{}{} + r.ProductMetadata.ResourcesWithCaiAssetType[r.CaiResourceType()] = struct{}{} return true } @@ -2060,9 +2074,9 @@ func (r Resource) ReadPropertiesForTgc() []*Type { // Rarely, it is the API "resource type kind". // For example, the API resource type of "google_compute_autoscaler" is "ComputeAutoscalerAssetType". // The API resource type of "google_compute_region_autoscaler" is also "ComputeAutoscalerAssetType". -func (r Resource) ApiResourceType() string { - if r.ApiResourceTypeKind != "" { - return fmt.Sprintf("%s%s", r.ProductMetadata.Name, r.ApiResourceTypeKind) +func (r Resource) CaiResourceType() string { + if r.CaiResourceKind != "" { + return fmt.Sprintf("%s%s", r.ProductMetadata.Name, r.CaiResourceKind) } return fmt.Sprintf("%s%s", r.ProductMetadata.Name, r.Name) diff --git a/mmv1/products/cloudfunctions2/Function.yaml b/mmv1/products/cloudfunctions2/Function.yaml index 51c8b30033eb..937da02deb11 100644 --- a/mmv1/products/cloudfunctions2/Function.yaml +++ b/mmv1/products/cloudfunctions2/Function.yaml @@ -14,6 +14,7 @@ --- name: 'function' api_resource_type_kind: Function +cai_resource_kind: Function description: | A Cloud Function that contains user computation executed in response to an event. references: diff --git a/mmv1/products/compute/GlobalAddress.yaml b/mmv1/products/compute/GlobalAddress.yaml index 0f130cd27b86..9f0a273d5ea4 100644 --- a/mmv1/products/compute/GlobalAddress.yaml +++ b/mmv1/products/compute/GlobalAddress.yaml @@ -38,6 +38,7 @@ async: result: resource_inside_response: false collection_url_key: 'items' +include_in_tgc_next_DO_NOT_USE: true custom_code: pre_create: 'templates/terraform/pre_create/compute_global_address.go.tmpl' post_create: 'templates/terraform/post_create/labels.tmpl' diff --git a/mmv1/products/compute/RegionAutoscaler.yaml b/mmv1/products/compute/RegionAutoscaler.yaml index b475020433d7..2663c9da37b6 100644 --- a/mmv1/products/compute/RegionAutoscaler.yaml +++ b/mmv1/products/compute/RegionAutoscaler.yaml @@ -14,6 +14,7 @@ --- name: 'RegionAutoscaler' api_resource_type_kind: Autoscaler +cai_resource_kind: Autoscaler api_variant_patterns: - 'projects/{project}/regions/{region}/autoscalers/{autoscaler}' kind: 'compute#autoscaler' diff --git a/mmv1/provider/terraform_tgc_next.go b/mmv1/provider/terraform_tgc_next.go index 8475124d31bf..8fd165bfd61a 100644 --- a/mmv1/provider/terraform_tgc_next.go +++ b/mmv1/provider/terraform_tgc_next.go @@ -39,9 +39,9 @@ type TerraformGoogleConversionNext struct { ResourcesForVersion []ResourceIdentifier - // Multiple Terraform resources can share the same API resource type. + // Multiple Terraform resources can share the same CAI resource type. // For example, "google_compute_region_autoscaler" and "google_region_autoscaler" - ResourcesGroupedByApiResourceType map[string][]ResourceIdentifier + ResourcesByCaiResourceType map[string][]ResourceIdentifier TargetVersionName string @@ -61,11 +61,11 @@ type ResourceIdentifier struct { func NewTerraformGoogleConversionNext(product *api.Product, versionName string, startTime time.Time) TerraformGoogleConversionNext { t := TerraformGoogleConversionNext{ - Product: product, - TargetVersionName: versionName, - Version: *product.VersionObjOrClosest(versionName), - StartTime: startTime, - ResourcesGroupedByApiResourceType: make(map[string][]ResourceIdentifier), + Product: product, + TargetVersionName: versionName, + Version: *product.VersionObjOrClosest(versionName), + StartTime: startTime, + ResourcesByCaiResourceType: make(map[string][]ResourceIdentifier), } t.Product.SetPropertiesBasedOnVersion(&t.Version) @@ -321,7 +321,7 @@ func (tgc TerraformGoogleConversionNext) replaceImportPath(outputFolder, target // The variable resources_for_version is used to generate resources in file // mmv1/templates/tgc_next/provider/provider_mmv1_resources.go.tmpl func (tgc *TerraformGoogleConversionNext) generateResourcesForVersion(products []*api.Product) { - resourcesGroupedByApiResourceType := make(map[string][]ResourceIdentifier) + resourcesByCaiResourceType := make(map[string][]ResourceIdentifier) for _, productDefinition := range products { service := strings.ToLower(productDefinition.Name) @@ -344,23 +344,23 @@ func (tgc *TerraformGoogleConversionNext) generateResourcesForVersion(products [ } tgc.ResourcesForVersion = append(tgc.ResourcesForVersion, resourceIdentifier) - apiResourceType := fmt.Sprintf("%s.%s", service, object.ApiResourceType()) - if _, ok := resourcesGroupedByApiResourceType[apiResourceType]; !ok { - resourcesGroupedByApiResourceType[apiResourceType] = make([]ResourceIdentifier, 0) + caiResourceType := fmt.Sprintf("%s.%s", service, object.CaiResourceType()) + if _, ok := resourcesByCaiResourceType[caiResourceType]; !ok { + resourcesByCaiResourceType[caiResourceType] = make([]ResourceIdentifier, 0) } - resourcesGroupedByApiResourceType[apiResourceType] = append(resourcesGroupedByApiResourceType[apiResourceType], resourceIdentifier) + resourcesByCaiResourceType[caiResourceType] = append(resourcesByCaiResourceType[caiResourceType], resourceIdentifier) } } - for apiResourceType, resources := range resourcesGroupedByApiResourceType { + for caiResourceType, resources := range resourcesByCaiResourceType { // If no other Terraform resources share the API resource type, override the alias name as "Default" if len(resources) == 1 { for _, resourceIdentifier := range resources { resourceIdentifier.AliasName = "Default" - tgc.ResourcesGroupedByApiResourceType[apiResourceType] = []ResourceIdentifier{resourceIdentifier} + tgc.ResourcesByCaiResourceType[caiResourceType] = []ResourceIdentifier{resourceIdentifier} } } else { - tgc.ResourcesGroupedByApiResourceType[apiResourceType] = resources + tgc.ResourcesByCaiResourceType[caiResourceType] = resources } } } diff --git a/mmv1/templates/tgc_next/cai2hcl/resource_converters.go.tmpl b/mmv1/templates/tgc_next/cai2hcl/resource_converters.go.tmpl index 11a9ca891360..6338ae48945c 100644 --- a/mmv1/templates/tgc_next/cai2hcl/resource_converters.go.tmpl +++ b/mmv1/templates/tgc_next/cai2hcl/resource_converters.go.tmpl @@ -51,7 +51,7 @@ var ConverterMap = map[string]map[string]models.Cai2hclConverter{ }, // ####### END handwritten resources ########### - {{- range $resourceType, $resources := $.ResourcesGroupedByApiResourceType }} + {{- range $resourceType, $resources := $.ResourcesByCaiResourceType}} {{ $resourceType }}AssetType: { {{- range $object := $resources }} "{{ $object.AliasName }}": {{ $object.ServiceName }}.New{{ $object.ResourceName -}}Cai2hclConverter(provider), diff --git a/mmv1/templates/tgc_next/services/resource.go.tmpl b/mmv1/templates/tgc_next/services/resource.go.tmpl index 343957bc0b4d..46312301b458 100644 --- a/mmv1/templates/tgc_next/services/resource.go.tmpl +++ b/mmv1/templates/tgc_next/services/resource.go.tmpl @@ -28,7 +28,7 @@ import ( ) {{ if $.DefineAssetTypeForResourceInProduct -}} -const {{ $.ApiResourceType -}}AssetType string = "{{ $.CaiAssetType }}" +const {{ $.CaiResourceType -}}AssetType string = "{{ $.CaiAssetType }}" {{- end }} const {{ $.ResourceName -}}SchemaName string = "{{ $.TerraformName }}" diff --git a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl index 05ed8ce32418..d58f8edbd8b4 100644 --- a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl +++ b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl @@ -65,7 +65,7 @@ func Get{{ $.ResourceName -}}CaiAssets(d tpgresource.TerraformResourceData, conf return []caiasset.Asset{ { Name: name, - Type: {{ $.ApiResourceType -}}AssetType, + Type: {{ $.CaiResourceType -}}AssetType, Resource: &caiasset.AssetResource{ Version: "{{ $apiVersion }}", DiscoveryDocumentURI: "https://www.googleapis.com/discovery/v1/apis/{{ $productBackendName }}/{{ $apiVersion }}/rest", From ff10588710b8dcbc7c489dc1a9ae9bbfe2beaff1 Mon Sep 17 00:00:00 2001 From: Calvin Liu Date: Mon, 21 Jul 2025 14:19:20 -0700 Subject: [PATCH 576/884] use `location` instead of `region` when populating location field when calling getCertificateAuthority (#14536) --- .../terraform/decoders/memorystore_instance.go.tmpl | 2 +- .../memorystore/resource_memorystore_instance_test.go | 8 +++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/mmv1/templates/terraform/decoders/memorystore_instance.go.tmpl b/mmv1/templates/terraform/decoders/memorystore_instance.go.tmpl index ee96e78e4c3c..68ce254147ec 100644 --- a/mmv1/templates/terraform/decoders/memorystore_instance.go.tmpl +++ b/mmv1/templates/terraform/decoders/memorystore_instance.go.tmpl @@ -106,7 +106,7 @@ // Only instances with SERVER_AUTHENTICATION mode have certificate authority set if v, ok := res["transitEncryptionMode"].(string); ok && v=="SERVER_AUTHENTICATION" { - url, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}MemorystoreBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/locations/{{"{{"}}region{{"}}"}}/instances/{{"{{"}}instance_id{{"}}"}}/certificateAuthority") + url, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}MemorystoreBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/locations/{{"{{"}}location{{"}}"}}/instances/{{"{{"}}instance_id{{"}}"}}/certificateAuthority") if err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/memorystore/resource_memorystore_instance_test.go b/mmv1/third_party/terraform/services/memorystore/resource_memorystore_instance_test.go index b664074aeded..12b182cfdea1 100644 --- a/mmv1/third_party/terraform/services/memorystore/resource_memorystore_instance_test.go +++ b/mmv1/third_party/terraform/services/memorystore/resource_memorystore_instance_test.go @@ -1459,6 +1459,8 @@ func TestAccMemorystoreInstance_memorystoreInstanceTlsEnabled(t *testing.T) { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), + // Until https://github.com/hashicorp/terraform-provider-google/issues/23619 is fixed, use regions other than us-central1 to prevent issues like https://github.com/hashicorp/terraform-provider-google/issues/23543 + "location": "us-east1", } acctest.VcrTest(t, resource.TestCase{ @@ -1489,7 +1491,7 @@ resource "google_memorystore_instance" "instance-tls" { network = google_compute_network.producer_net.id project_id = data.google_project.project.project_id } - location = "us-central1" + location = "%{location}" deletion_protection_enabled = false maintenance_policy { weekly_maintenance_window { @@ -1510,7 +1512,7 @@ resource "google_memorystore_instance" "instance-tls" { resource "google_network_connectivity_service_connection_policy" "default" { name = "tf-test-my-policy%{random_suffix}" - location = "us-central1" + location = "%{location}" service_class = "gcp-memorystore" description = "my basic service connection policy" network = google_compute_network.producer_net.id @@ -1522,7 +1524,7 @@ resource "google_network_connectivity_service_connection_policy" "default" { resource "google_compute_subnetwork" "producer_subnet" { name = "tf-test-my-subnet%{random_suffix}" ip_cidr_range = "10.0.0.248/29" - region = "us-central1" + region = "%{location}" network = google_compute_network.producer_net.id } From 741571bc4df66c9f2cc35bc0ba72aa0a27213a18 Mon Sep 17 00:00:00 2001 From: himanikh Date: Mon, 21 Jul 2025 14:30:16 -0700 Subject: [PATCH 577/884] Revise the deprecation message for discovery_endpoints in Memorystore Instance resource (#14570) --- mmv1/products/memorystore/Instance.yaml | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/mmv1/products/memorystore/Instance.yaml b/mmv1/products/memorystore/Instance.yaml index 7cc491f6eaea..627a5c9dfaf3 100644 --- a/mmv1/products/memorystore/Instance.yaml +++ b/mmv1/products/memorystore/Instance.yaml @@ -287,9 +287,14 @@ properties: - name: 'discoveryEndpoints' type: Array description: - "Output only. Endpoints clients can connect to the instance through. - Currently only one\ndiscovery endpoint is supported. " - deprecation_message: '`discovery_endpoints` is deprecated Use `endpoints` instead.' + "Deprecated. Output only. Endpoints clients can connect to the instance through." + deprecation_message: + This field is deprecated. As a result it will not be populated + if the connections are created using `desired_auto_created_endpoints` + parameter or `google_memorystore_instance_desired_user_created_endpoints` + resource. Instead of this parameter, for discovery, use + `endpoints.connections.pscConnection` and `endpoints.connections.pscAutoConnection` + with `connectionType` CONNECTION_TYPE_DISCOVERY. output: true item_type: type: NestedObject From 675304b598a1608912119a7101f75616a5b6040d Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Mon, 21 Jul 2025 17:46:42 -0700 Subject: [PATCH 578/884] tgc-revival: add google_compute_image (#14591) --- mmv1/api/resource.go | 28 +++++++++++-------- mmv1/products/cloudfunctions2/Function.yaml | 1 - mmv1/products/compute/GlobalAddress.yaml | 1 + mmv1/products/compute/Image.yaml | 1 + mmv1/products/compute/RegionAutoscaler.yaml | 1 - .../tgc_next/test/assert_test_files.go | 2 +- 6 files changed, 19 insertions(+), 15 deletions(-) diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index cd4a3a5add43..5eef07eac22a 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -1859,11 +1859,7 @@ func (r Resource) CaiProductBackendName(caiProductBaseUrl string) string { func (r Resource) CaiAssetType() string { baseURL := r.CaiProductBaseUrl() productBackendName := r.CaiProductBackendName(baseURL) - assetName := r.Name - if r.CaiResourceKind != "" { - assetName = r.CaiResourceKind - } - return fmt.Sprintf("%s.googleapis.com/%s", productBackendName, assetName) + return fmt.Sprintf("%s.googleapis.com/%s", productBackendName, r.CaiResourceName()) } // DefineAssetTypeForResourceInProduct marks the AssetType constant for this resource as defined. @@ -2070,16 +2066,24 @@ func (r Resource) ReadPropertiesForTgc() []*Type { }) } -// The API resource type of the resource. Normally, it is the resource name. -// Rarely, it is the API "resource type kind". -// For example, the API resource type of "google_compute_autoscaler" is "ComputeAutoscalerAssetType". -// The API resource type of "google_compute_region_autoscaler" is also "ComputeAutoscalerAssetType". +// For example, the CAI resource type with product of "google_compute_autoscaler" is "ComputeAutoscalerAssetType". +// The CAI resource type with product of "google_compute_region_autoscaler" is also "ComputeAutoscalerAssetType". func (r Resource) CaiResourceType() string { + return fmt.Sprintf("%s%s", r.ProductMetadata.Name, r.CaiResourceName()) +} + +// The API resource type of the resource. Normally, it is the resource name. +// Rarely, it is the API "resource type kind" or CAI "resource kind" +// For example, the CAI resource type of "google_compute_autoscaler" is "Autoscaler". +// The CAI resource type of "google_compute_region_autoscaler" is also "Autoscaler". +func (r Resource) CaiResourceName() string { if r.CaiResourceKind != "" { - return fmt.Sprintf("%s%s", r.ProductMetadata.Name, r.CaiResourceKind) + return r.CaiResourceKind } - - return fmt.Sprintf("%s%s", r.ProductMetadata.Name, r.Name) + if r.ApiResourceTypeKind != "" { + return r.ApiResourceTypeKind + } + return r.Name } func (r Resource) IsTgcCompiler() bool { diff --git a/mmv1/products/cloudfunctions2/Function.yaml b/mmv1/products/cloudfunctions2/Function.yaml index 937da02deb11..51c8b30033eb 100644 --- a/mmv1/products/cloudfunctions2/Function.yaml +++ b/mmv1/products/cloudfunctions2/Function.yaml @@ -14,7 +14,6 @@ --- name: 'function' api_resource_type_kind: Function -cai_resource_kind: Function description: | A Cloud Function that contains user computation executed in response to an event. references: diff --git a/mmv1/products/compute/GlobalAddress.yaml b/mmv1/products/compute/GlobalAddress.yaml index 9f0a273d5ea4..33ed7872f069 100644 --- a/mmv1/products/compute/GlobalAddress.yaml +++ b/mmv1/products/compute/GlobalAddress.yaml @@ -14,6 +14,7 @@ --- name: 'GlobalAddress' api_resource_type_kind: Address +cai_resource_kind: GlobalAddress kind: 'compute#address' description: | Represents a Global Address resource. Global addresses are used for diff --git a/mmv1/products/compute/Image.yaml b/mmv1/products/compute/Image.yaml index ef66b209418f..40b33a1081f2 100644 --- a/mmv1/products/compute/Image.yaml +++ b/mmv1/products/compute/Image.yaml @@ -56,6 +56,7 @@ iam_policy: parent_resource_attribute: 'image' iam_conditions_request_type: 'QUERY_PARAM' example_config_body: 'templates/terraform/iam/iam_attributes.go.tmpl' +include_in_tgc_next_DO_NOT_USE: true custom_code: examples: - name: 'image_basic' diff --git a/mmv1/products/compute/RegionAutoscaler.yaml b/mmv1/products/compute/RegionAutoscaler.yaml index 2663c9da37b6..b475020433d7 100644 --- a/mmv1/products/compute/RegionAutoscaler.yaml +++ b/mmv1/products/compute/RegionAutoscaler.yaml @@ -14,7 +14,6 @@ --- name: 'RegionAutoscaler' api_resource_type_kind: Autoscaler -cai_resource_kind: Autoscaler api_variant_patterns: - 'projects/{project}/regions/{region}/autoscalers/{autoscaler}' kind: 'compute#autoscaler' diff --git a/mmv1/third_party/tgc_next/test/assert_test_files.go b/mmv1/third_party/tgc_next/test/assert_test_files.go index 9d14cd8107de..8f252a3987c8 100644 --- a/mmv1/third_party/tgc_next/test/assert_test_files.go +++ b/mmv1/third_party/tgc_next/test/assert_test_files.go @@ -198,7 +198,7 @@ func testSingleResource(t *testing.T, testName string, testData ResourceTestData if diff := cmp.Diff( asset.Resource, roundtripAsset.Resource, - cmpopts.IgnoreFields(caiasset.AssetResource{}, "Version", "Data", "DiscoveryDocumentURI"), + cmpopts.IgnoreFields(caiasset.AssetResource{}, "Version", "Data", "Location", "DiscoveryDocumentURI"), // Consider DiscoveryDocumentURI equal if they have the same number of path segments when split by "/". cmp.FilterPath(func(p cmp.Path) bool { return p.Last().String() == ".DiscoveryDocumentURI" From 18d1c9897344d7dea68755faf396bdbc5ffb22e4 Mon Sep 17 00:00:00 2001 From: Naga Bodepudi Date: Tue, 22 Jul 2025 12:02:14 -0400 Subject: [PATCH 579/884] Fix TestAccDNSManagedZone_dnsManagedZoneCloudLoggingExample (#14499) --- mmv1/products/dns/ManagedZone.yaml | 3 +++ .../terraform/examples/dns_managed_zone_cloud_logging.tf.tmpl | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/mmv1/products/dns/ManagedZone.yaml b/mmv1/products/dns/ManagedZone.yaml index 08ba42d0df10..f33cc1a52ad5 100644 --- a/mmv1/products/dns/ManagedZone.yaml +++ b/mmv1/products/dns/ManagedZone.yaml @@ -116,6 +116,9 @@ examples: primary_resource_id: 'cloud-logging-enabled-zone' vars: zone_name: 'cloud-logging-enabled-zone' + dns_name: 'services.example.com.' + test_vars_overrides: + 'dns_name': '"services.example.com-" + acctest.RandString(t, 10) + "."' virtual_fields: - name: 'force_destroy' description: 'Set this true to delete all records in the zone.' diff --git a/mmv1/templates/terraform/examples/dns_managed_zone_cloud_logging.tf.tmpl b/mmv1/templates/terraform/examples/dns_managed_zone_cloud_logging.tf.tmpl index abdde5de1116..07aa3051da25 100644 --- a/mmv1/templates/terraform/examples/dns_managed_zone_cloud_logging.tf.tmpl +++ b/mmv1/templates/terraform/examples/dns_managed_zone_cloud_logging.tf.tmpl @@ -1,6 +1,6 @@ resource "google_dns_managed_zone" "{{$.PrimaryResourceId}}" { name = "{{index $.Vars "zone_name"}}" - dns_name = "services.example.com." + dns_name = "{{index $.Vars "dns_name"}}" description = "Example cloud logging enabled DNS zone" labels = { foo = "bar" From b244fae53fbdb0ad329b18af2c24c85daa252c59 Mon Sep 17 00:00:00 2001 From: nimish-khurana <221070136+nimish-khurana@users.noreply.github.com> Date: Tue, 22 Jul 2025 22:11:54 +0530 Subject: [PATCH 580/884] feat: Add resource google_oracle_database_odb_network (#14576) Co-authored-by: Scott Suarez --- mmv1/products/oracledatabase/OdbNetwork.yaml | 116 ++++++++++++++++++ .../oracledatabase_odbnetwork.tf.tmpl | 15 +++ .../oracledatabase_odbnetwork.go.tmpl | 3 + 3 files changed, 134 insertions(+) create mode 100644 mmv1/products/oracledatabase/OdbNetwork.yaml create mode 100644 mmv1/templates/terraform/examples/oracledatabase_odbnetwork.tf.tmpl create mode 100644 mmv1/templates/terraform/pre_delete/oracledatabase_odbnetwork.go.tmpl diff --git a/mmv1/products/oracledatabase/OdbNetwork.yaml b/mmv1/products/oracledatabase/OdbNetwork.yaml new file mode 100644 index 000000000000..28dc955f5b92 --- /dev/null +++ b/mmv1/products/oracledatabase/OdbNetwork.yaml @@ -0,0 +1,116 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'OdbNetwork' +description: An OdbNetwork resource which represents a private network providing connectivity between OracleDatabase resources and Google Cloud VPC network. +references: + guides: + 'OracleDatabase@Google Cloud': https://cloud.google.com/oracle/database/docs/overview' +base_url: 'projects/{{project}}/locations/{{location}}/odbNetworks' +immutable: true +self_link: 'projects/{{project}}/locations/{{location}}/odbNetworks/{{odb_network_id}}' +create_url: 'projects/{{project}}/locations/{{location}}/odbNetworks?odbNetworkId={{odb_network_id}}' +id_format: 'projects/{{project}}/locations/{{location}}/odbNetworks/{{odb_network_id}}' +import_format: + - 'projects/{{project}}/locations/{{location}}/odbNetworks/{{odb_network_id}}' +custom_code: + pre_delete: 'templates/terraform/pre_delete/oracledatabase_odbnetwork.go.tmpl' +examples: + - name: oracledatabase_odbnetwork + primary_resource_id: my-odbnetwork + vars: + project: my-project + odb_network_id: my-odbnetwork + deletion_protection: 'true' + ignore_read_extra: + - 'deletion_protection' + test_vars_overrides: + deletion_protection: 'false' + project: '"oci-terraform-testing-prod"' + odb_network_id: 'fmt.Sprintf("tf-test-odbnetwork-%s", acctest.RandString(t, 10))' +virtual_fields: + - name: 'deletion_protection' + type: Boolean + default_value: true + description: 'Whether or not to allow Terraform to destroy the instance. + Unless this field is set to false in Terraform state, a terraform destroy + or terraform apply that would delete the instance will fail.' +autogen_async: true +async: + operation: + timeouts: + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 + base_url: '{{op_id}}' + actions: + - create + - delete + - update + type: OpAsync + result: + resource_inside_response: true + include_project: false +autogen_status: T2RiTmV0d29yaw== +parameters: + - name: location + type: String + description: Resource ID segment making up resource `name`. It identifies the resource within its parent collection as described in https://google.aip.dev/122. + immutable: true + url_param_only: true + required: true + - name: odbNetworkId + type: String + description: |- + The ID of the OdbNetwork to create. This value is restricted + to (^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$) and must be a maximum of 63 + characters in length. The value must start with a letter and end with + a letter or a number. + immutable: true + url_param_only: true + required: true +properties: + - name: createTime + type: String + description: The date and time that the OdbNetwork was created. + output: true + - name: entitlementId + type: String + description: The ID of the subscription entitlement associated with the OdbNetwork. + output: true + - name: labels + type: KeyValueLabels + description: Labels or tags associated with the resource. + - name: name + type: String + description: |- + Identifier. The name of the OdbNetwork resource in the following format: + projects/{project}/locations/{region}/odbNetworks/{odb_network} + output: true + - name: network + type: String + description: |- + The name of the VPC network in the following format: + projects/{project}/global/networks/{network} + required: true + - name: state + type: String + description: |- + State of the ODB Network. + Possible values: + PROVISIONING + AVAILABLE + TERMINATING + FAILED + output: true diff --git a/mmv1/templates/terraform/examples/oracledatabase_odbnetwork.tf.tmpl b/mmv1/templates/terraform/examples/oracledatabase_odbnetwork.tf.tmpl new file mode 100644 index 000000000000..86d8150f5ba7 --- /dev/null +++ b/mmv1/templates/terraform/examples/oracledatabase_odbnetwork.tf.tmpl @@ -0,0 +1,15 @@ +resource "google_oracle_database_odb_network" "{{$.PrimaryResourceId}}"{ + odb_network_id = "{{index $.Vars "odb_network_id"}}" + location = "us-west3" + project = "{{index $.Vars "project"}}" + network = data.google_compute_network.default.id + labels = { + terraform_created = "true" + } + deletion_protection = "{{index $.Vars "deletion_protection"}}" +} + +data "google_compute_network" "default" { + name = "new" + project = "{{index $.Vars "project"}}" +} \ No newline at end of file diff --git a/mmv1/templates/terraform/pre_delete/oracledatabase_odbnetwork.go.tmpl b/mmv1/templates/terraform/pre_delete/oracledatabase_odbnetwork.go.tmpl new file mode 100644 index 000000000000..3d3d974e8b47 --- /dev/null +++ b/mmv1/templates/terraform/pre_delete/oracledatabase_odbnetwork.go.tmpl @@ -0,0 +1,3 @@ +if d.Get("deletion_protection").(bool) { + return fmt.Errorf("cannot destroy google_oracle_database_odb_network resource with id : %q without setting deletion_protection=false and running `terraform apply`", d.Id()) +} From 2ad81ba0529c614321954a74e39c0a7efe04dff9 Mon Sep 17 00:00:00 2001 From: bcreddy-gcp <123543489+bcreddy-gcp@users.noreply.github.com> Date: Tue, 22 Jul 2025 09:57:33 -0700 Subject: [PATCH 581/884] Reservation Affinity for workbench instances (#13328) --- mmv1/products/workbench/Instance.yaml | 36 +++++++++++++++++++ .../workbench_instance_basic_gpu.tf.tmpl | 28 +++++++++++++++ .../examples/workbench_instance_full.tf.tmpl | 27 ++++++++++++++ 3 files changed, 91 insertions(+) diff --git a/mmv1/products/workbench/Instance.yaml b/mmv1/products/workbench/Instance.yaml index 6ae1ddb53636..491ac1c0ae72 100644 --- a/mmv1/products/workbench/Instance.yaml +++ b/mmv1/products/workbench/Instance.yaml @@ -77,6 +77,7 @@ examples: region_override: 'us-west1-a' vars: instance_name: 'workbench-instance' + reservation_name: 'wbi-reservation' ignore_read_extra: - 'gce_setup.0.vm_image' - name: 'workbench_instance_labels_stopped' @@ -97,6 +98,7 @@ examples: vars: instance_name: 'workbench-instance' network_name: 'wbi-test-default' + reservation_name: 'wbi-reservation' key_name: 'my-crypto-key' test_env_vars: project_id: 'PROJECT_NAME' @@ -461,6 +463,40 @@ properties: Defines the type of technology used by the confidential instance. enum_values: - 'SEV' + - name: 'reservationAffinity' + type: NestedObject + immutable: true + default_from_api: true + description: | + Reservations that this instance can consume from. + properties: + - name: 'consumeReservationType' + type: Enum + immutable: true + default_from_api: true + description: | + Specifies the type of reservation from which this instance can consume resources: + RESERVATION_ANY (default), RESERVATION_SPECIFIC, or RESERVATION_NONE. + enum_values: + - 'RESERVATION_NONE' + - 'RESERVATION_ANY' + - 'RESERVATION_SPECIFIC' + - name: 'key' + immutable: true + description: | + Corresponds to the label key of a reservation resource. To target a + RESERVATION_SPECIFIC by name, use compute.googleapis.com/reservation-name + as the key and specify the name of your reservation as its value. + - name: 'values' + type: Array + immutable: true + item_type: + type: String + description: | + Corresponds to the label values of a reservation resource. This can be + either a name to a reservation in the same project or + "projects/different-project/reservations/some-reservation-name" + to target a shared reservation in the same zone but in a different project. - name: 'proxyUri' type: String description: | diff --git a/mmv1/templates/terraform/examples/workbench_instance_basic_gpu.tf.tmpl b/mmv1/templates/terraform/examples/workbench_instance_basic_gpu.tf.tmpl index 4f2bff3f8d61..8dfdd0073d84 100644 --- a/mmv1/templates/terraform/examples/workbench_instance_basic_gpu.tf.tmpl +++ b/mmv1/templates/terraform/examples/workbench_instance_basic_gpu.tf.tmpl @@ -1,3 +1,23 @@ +resource "google_compute_reservation" "gpu_reservation" { + name = "{{index $.Vars "reservation_name"}}" + zone = "us-central1-a" + + specific_reservation { + count = 1 + + instance_properties { + machine_type = "n1-standard-1" + + guest_accelerators { + accelerator_type = "nvidia-tesla-t4" + accelerator_count = 1 + } + } + } + + specific_reservation_required = false +} + resource "google_workbench_instance" "{{$.PrimaryResourceId}}" { name = "{{index $.Vars "instance_name"}}" location = "us-central1-a" @@ -11,5 +31,13 @@ resource "google_workbench_instance" "{{$.PrimaryResourceId}}" { project = "cloud-notebooks-managed" family = "workbench-instances" } + reservation_affinity { + consume_reservation_type = "RESERVATION_ANY" + } } + + depends_on = [ + google_compute_reservation.gpu_reservation + ] + } diff --git a/mmv1/templates/terraform/examples/workbench_instance_full.tf.tmpl b/mmv1/templates/terraform/examples/workbench_instance_full.tf.tmpl index 07df662cf2e0..00bf66d19580 100644 --- a/mmv1/templates/terraform/examples/workbench_instance_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/workbench_instance_full.tf.tmpl @@ -22,6 +22,26 @@ resource "google_service_account_iam_binding" "act_as_permission" { ] } +resource "google_compute_reservation" "gpu_reservation" { + name = "{{index $.Vars "reservation_name"}}" + zone = "us-central1-a" + + specific_reservation { + count = 1 + + instance_properties { + machine_type = "n1-standard-4" + + guest_accelerators { + accelerator_type = "nvidia-tesla-t4" + accelerator_count = 1 + } + } + } + + specific_reservation_required = true +} + resource "google_workbench_instance" "{{$.PrimaryResourceId}}" { name = "{{index $.Vars "instance_name"}}" location = "us-central1-a" @@ -73,6 +93,12 @@ resource "google_workbench_instance" "{{$.PrimaryResourceId}}" { serial-port-logging-enable = "false" } + reservation_affinity { + consume_reservation_type = "RESERVATION_SPECIFIC" + key = "compute.googleapis.com/reservation-name" + values = [google_compute_reservation.gpu_reservation.name] + } + enable_ip_forwarding = true tags = ["abc", "def"] @@ -96,5 +122,6 @@ resource "google_workbench_instance" "{{$.PrimaryResourceId}}" { google_compute_subnetwork.my_subnetwork, google_compute_address.static, google_service_account_iam_binding.act_as_permission, + google_compute_reservation.gpu_reservation ] } From 976992ef2911d9d483db77b551d6e7310529694d Mon Sep 17 00:00:00 2001 From: Thomas Rodgers Date: Tue, 22 Jul 2025 11:22:58 -0700 Subject: [PATCH 582/884] tgc-revival: support google_blockchain_node_engine_blockchain_nodes (#14561) --- mmv1/api/resource.go | 14 +++++++++ .../blockchainnodeengine/BlockchainNodes.yaml | 1 + .../cai2hcl/resource_converter.go.tmpl | 10 ++---- .../pkg/cai2hcl/converters/utils/utils.go | 31 +++++++++++++++++++ .../tgc_next/test/assert_test_files.go | 7 +++++ 5 files changed, 56 insertions(+), 7 deletions(-) diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index 5eef07eac22a..8370cc211bef 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -2066,6 +2066,20 @@ func (r Resource) ReadPropertiesForTgc() []*Type { }) } +// OutputFieldSetStr returns a Go-syntax string representation of a set +// containing all the output properties for a resource. +// The property names are converted to snake_case. +// This is useful for generating code that requires a map literal of field names. +func (r Resource) OutputFieldSetStr() string { + fieldNames := make(map[string]struct{}) + for _, tp := range r.AllUserProperties() { + if tp.Output { + fieldNames[google.Underscore(tp.Name)] = struct{}{} + } + } + return fmt.Sprintf("%#v", fieldNames) +} + // For example, the CAI resource type with product of "google_compute_autoscaler" is "ComputeAutoscalerAssetType". // The CAI resource type with product of "google_compute_region_autoscaler" is also "ComputeAutoscalerAssetType". func (r Resource) CaiResourceType() string { diff --git a/mmv1/products/blockchainnodeengine/BlockchainNodes.yaml b/mmv1/products/blockchainnodeengine/BlockchainNodes.yaml index 43afc298cec6..472c5ae0d6c0 100644 --- a/mmv1/products/blockchainnodeengine/BlockchainNodes.yaml +++ b/mmv1/products/blockchainnodeengine/BlockchainNodes.yaml @@ -30,6 +30,7 @@ timeouts: insert_minutes: 45 update_minutes: 20 delete_minutes: 35 +include_in_tgc_next_DO_NOT_USE: true autogen_async: true async: actions: ['create', 'delete', 'update'] diff --git a/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl b/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl index 468f6e954430..3c3d7ab2d33c 100644 --- a/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl +++ b/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl @@ -109,13 +109,9 @@ func (c *{{ $.ResourceName -}}Cai2hclConverter) convertResourceData(asset caiass } {{ end}} -{{ range $param := $.UserParameters }} -{{/* Attempt to parse all parameters from asset name. */}} - hclData["{{ underscore $param.Name }}"] = utils.ParseFieldValue(asset.Name, "{{ underscore (plural $param.Name) }}") -{{ end }} -{{- if $.HasProject -}} - hclData["project"] = utils.ParseFieldValue(asset.Name, "projects") -{{- end}} +{{/* Attempt to parse all self-link parameters from asset name. */}} + outputFields := {{ $.OutputFieldSetStr }} + utils.ParseUrlParamValuesFromAssetName(asset.Name, "{{ $.CaiAssetNameTemplate $productBackendName }}", outputFields, hclData) {{ range $prop := $.ReadPropertiesForTgc }} {{ if $prop.FlattenObject -}} if flattenedProp := flatten{{ if $.NestedQuery -}}Nested{{end}}{{ $.ResourceName -}}{{ camelize $prop.Name "upper" -}}(res["{{ $prop.ApiName -}}"], d, config); flattenedProp != nil { diff --git a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils.go b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils.go index 2226456fd24e..f8947e065da4 100644 --- a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils.go +++ b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils.go @@ -22,6 +22,37 @@ func ParseFieldValue(url string, name string) string { return "" } +/* + ParseUrlParamValuesFromAssetName uses CaiAssetNameTemplate to parse hclData from assetName, filtering out all outputFields + +template: //bigquery.googleapis.com/projects/{{project}}/datasets/{{dataset_id}} +assetName: //bigquery.googleapis.com/projects/my-project/datasets/my-dataset +hclData: [project:my-project dataset_id:my-dataset] +*/ +func ParseUrlParamValuesFromAssetName(assetName, template string, outputFields map[string]struct{}, hclData map[string]any) { + fragments := strings.Split(template, "/") + if len(fragments) < 2 { + // We need a field and a prefix. + return + } + fields := make(map[string]string) // keys are prefixes in URI, values are names of fields + for ix, item := range fragments[1:] { + if trimmed, ok := strings.CutPrefix(item, "{{"); ok { + if trimmed, ok = strings.CutSuffix(trimmed, "}}"); ok { + fields[fragments[ix]] = trimmed // ix is relative to the subslice + } + } + } + fragments = strings.Split(assetName, "/") + for ix, item := range fragments[:len(fragments)-1] { + if fieldName, ok := fields[item]; ok { + if _, isOutput := outputFields[fieldName]; !isOutput { + hclData[fieldName] = fragments[ix+1] + } + } + } +} + // DecodeJSON decodes the map object into the target struct. func DecodeJSON(data map[string]interface{}, v interface{}) error { b, err := json.Marshal(data) diff --git a/mmv1/third_party/tgc_next/test/assert_test_files.go b/mmv1/third_party/tgc_next/test/assert_test_files.go index 8f252a3987c8..f3c238028e06 100644 --- a/mmv1/third_party/tgc_next/test/assert_test_files.go +++ b/mmv1/third_party/tgc_next/test/assert_test_files.go @@ -207,6 +207,13 @@ func testSingleResource(t *testing.T, testName string, testData ResourceTestData parts2 := strings.Split(y, "/") return len(parts1) == len(parts2) })), + cmp.FilterPath(func(p cmp.Path) bool { + return p.Last().String() == ".DiscoveryName" + }, cmp.Comparer(func(x, y string) bool { + xParts := strings.Split(x, "/") + yParts := strings.Split(y, "/") + return xParts[len(xParts)-1] == yParts[len(yParts)-1] + })), ); diff != "" { return fmt.Errorf("differences found between exported asset and roundtrip asset (-want +got):\n%s", diff) } From ce186968fbcb6e413e5eed6138abc08bc2676bc7 Mon Sep 17 00:00:00 2001 From: Jay Zhou Date: Tue, 22 Jul 2025 15:25:28 -0400 Subject: [PATCH 583/884] Remove cloudrun add-on from resource_container_cluster_test (#14588) --- .../resource_container_cluster_test.go.tmpl | 55 ------------------- 1 file changed, 55 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl index 994dee9c2f2a..2ed5d5c63f1d 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl @@ -242,15 +242,6 @@ func TestAccContainerCluster_withAddons(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"min_master_version", "deletion_protection"}, }, - { - Config: testAccContainerCluster_withInternalLoadBalancer(pid, clusterName, networkName, subnetworkName), - }, - { - ResourceName: "google_container_cluster.primary", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"min_master_version", "deletion_protection"}, - }, }, }) } @@ -6877,9 +6868,6 @@ resource "google_container_cluster" "primary" { gcp_filestore_csi_driver_config { enabled = false } - cloudrun_config { - disabled = true - } dns_cache_config { enabled = false } @@ -6952,9 +6940,6 @@ resource "google_container_cluster" "primary" { gcp_filestore_csi_driver_config { enabled = true } - cloudrun_config { - disabled = false - } dns_cache_config { enabled = true } @@ -7003,46 +6988,6 @@ resource "google_container_cluster" "primary" { `, projectID, clusterName, networkName, subnetworkName) } -func testAccContainerCluster_withInternalLoadBalancer(projectID string, clusterName, networkName, subnetworkName string) string { - return fmt.Sprintf(` -data "google_project" "project" { - project_id = "%s" -} - -resource "google_container_cluster" "primary" { - name = "%s" - location = "us-central1-a" - initial_node_count = 1 - - min_master_version = "latest" - - workload_identity_config { - workload_pool = "${data.google_project.project.project_id}.svc.id.goog" - } - - addons_config { - http_load_balancing { - disabled = false - } - horizontal_pod_autoscaling { - disabled = false - } - network_policy_config { - disabled = false - } - cloudrun_config { - disabled = false - load_balancer_type = "LOAD_BALANCER_TYPE_INTERNAL" - } - } - network = "%s" - subnetwork = "%s" - - deletion_protection = false -} -`, projectID, clusterName, networkName, subnetworkName) -} - func testAccContainerCluster_withNotificationConfig(clusterName, topic, networkName, subnetworkName string) string { return fmt.Sprintf(` From 83a7b7d3d13ffee7637d30e8c151a5938f973924 Mon Sep 17 00:00:00 2001 From: animeshnandanwar Date: Tue, 22 Jul 2025 13:42:51 -0700 Subject: [PATCH 584/884] Added `IdleTtl` field for dataproc session template resource. (#14585) --- mmv1/products/dataproc/SessionTemplate.yaml | 8 ++++++++ .../examples/dataproc_session_templates_jupyter.tf.tmpl | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/mmv1/products/dataproc/SessionTemplate.yaml b/mmv1/products/dataproc/SessionTemplate.yaml index 6bb9457b85c4..06f77418bb16 100644 --- a/mmv1/products/dataproc/SessionTemplate.yaml +++ b/mmv1/products/dataproc/SessionTemplate.yaml @@ -165,6 +165,14 @@ properties: type: String description: | The Cloud KMS key to use for encryption. + - name: 'idleTtl' + type: String + description: | + The duration to keep the session alive while it's idling. + Exceeding this threshold causes the session to terminate. Minimum value is 10 minutes; maximum value is 14 day. + Defaults to 1 hour if not set. If both ttl and idleTtl are specified for an interactive session, the conditions + are treated as OR conditions: the workload will be terminated when it has been idle for idleTtl or when ttl has + been exceeded, whichever occurs first. - name: 'ttl' type: String description: | diff --git a/mmv1/templates/terraform/examples/dataproc_session_templates_jupyter.tf.tmpl b/mmv1/templates/terraform/examples/dataproc_session_templates_jupyter.tf.tmpl index 77d78c01fb0e..8061eb325b6a 100644 --- a/mmv1/templates/terraform/examples/dataproc_session_templates_jupyter.tf.tmpl +++ b/mmv1/templates/terraform/examples/dataproc_session_templates_jupyter.tf.tmpl @@ -10,7 +10,7 @@ resource "google_dataproc_session_template" "{{$.PrimaryResourceId}}" { environment_config { execution_config { subnetwork_uri = "{{index $.Vars "subnetwork_name"}}" - ttl = "3600s" + idle_ttl = "3600s" network_tags = ["tag1"] authentication_config { user_workload_authentication_type = "END_USER_CREDENTIALS" From 9ea8b807fb8bfab4ffbf8888b7561ee068954e8b Mon Sep 17 00:00:00 2001 From: FilipKubawskiOkta Date: Wed, 23 Jul 2025 00:16:34 +0200 Subject: [PATCH 585/884] Update docs for `google_compute_forwarding_rule` (#14593) --- mmv1/products/compute/ForwardingRule.yaml | 3 --- 1 file changed, 3 deletions(-) diff --git a/mmv1/products/compute/ForwardingRule.yaml b/mmv1/products/compute/ForwardingRule.yaml index 7975ee959dc8..d6f3cdb556bc 100644 --- a/mmv1/products/compute/ForwardingRule.yaml +++ b/mmv1/products/compute/ForwardingRule.yaml @@ -506,9 +506,6 @@ properties: The forwarded traffic must be of a type appropriate to the target object. * For load balancers, see the "Target" column in [Port specifications](https://cloud.google.com/load-balancing/docs/forwarding-rule-concepts#ip_address_specifications). - * For Private Service Connect forwarding rules that forward traffic to Google APIs, provide the name of a supported Google API bundle: - * `vpc-sc` - [ APIs that support VPC Service Controls](https://cloud.google.com/vpc-service-controls/docs/supported-products). - * `all-apis` - [All supported Google APIs](https://cloud.google.com/vpc/docs/private-service-connect#supported-apis). For Private Service Connect forwarding rules that forward traffic to managed services, the target must be a service attachment. update_url: 'projects/{{project}}/regions/{{region}}/forwardingRules/{{name}}/setTarget' From 8acad60da82a666c6e0bc8beaf0984796813022a Mon Sep 17 00:00:00 2001 From: bcreddy-gcp <123543489+bcreddy-gcp@users.noreply.github.com> Date: Tue, 22 Jul 2025 15:30:52 -0700 Subject: [PATCH 586/884] WBI EUC (#14548) --- mmv1/products/workbench/Instance.yaml | 13 ++++++++++ .../constants/workbench_instance.go.tmpl | 26 ++++++++++++++++++- .../examples/workbench_instance_euc.tf.tmpl | 16 ++++++++++++ 3 files changed, 54 insertions(+), 1 deletion(-) create mode 100644 mmv1/templates/terraform/examples/workbench_instance_euc.tf.tmpl diff --git a/mmv1/products/workbench/Instance.yaml b/mmv1/products/workbench/Instance.yaml index 491ac1c0ae72..bfdf2ff10181 100644 --- a/mmv1/products/workbench/Instance.yaml +++ b/mmv1/products/workbench/Instance.yaml @@ -115,6 +115,15 @@ examples: region_override: 'us-west1-a' vars: instance_name: 'workbench-instance' + - name: 'workbench_instance_euc' + primary_resource_id: 'instance' + primary_resource_name: 'fmt.Sprintf("tf-test-workbench-instance%s", context["random_suffix"])' + region_override: 'us-west1-a' + vars: + instance_name: 'workbench-instance' + test_env_vars: + project_id: 'PROJECT_NAME' + project_number: 'PROJECT_NUMBER' virtual_fields: - name: 'desired_state' description: | @@ -612,3 +621,7 @@ properties: description: | Flag that specifies that a notebook can be accessed with third party identity provider. + - name: 'enableManagedEuc' + type: Boolean + description: | + Flag to enable managed end user credentials for the instance. diff --git a/mmv1/templates/terraform/constants/workbench_instance.go.tmpl b/mmv1/templates/terraform/constants/workbench_instance.go.tmpl index c04436ed140a..09e63d4fc722 100644 --- a/mmv1/templates/terraform/constants/workbench_instance.go.tmpl +++ b/mmv1/templates/terraform/constants/workbench_instance.go.tmpl @@ -24,7 +24,22 @@ func WorkbenchInstanceLabelsDiffSuppress(k, old, new string, d *schema.ResourceD var WorkbenchInstanceSettableUnmodifiableDefaultMetadata = []string{ - "serial-port-logging-enable", + "serial-port-logging-enable", +} + +var WorkbenchInstanceEUCProvidedAdditionalMetadata = []string{ + "enable-oslogin", + "disable-ssh", + "ssh-keys", + "block-project-ssh-keys", + "post-startup-script", + "post-startup-script-behavior", + "startup-script", + "startup-script-url", + "gce-container-declaration", + "gce-software-declaration", + "serial-port-enable", + "euc-enabled", } var WorkbenchInstanceProvidedMetadata = []string{ @@ -45,6 +60,7 @@ var WorkbenchInstanceProvidedMetadata = []string{ "dataproc-region", "dataproc-service-account", "disable-check-xsrf", + "enable-euc", "framework", "generate-diagnostics-bucket", "generate-diagnostics-file", @@ -96,6 +112,14 @@ func WorkbenchInstanceMetadataDiffSuppress(k, old, new string, d *schema.Resourc return true } } + + if d.Get("enable_managed_euc").(bool){ + for _, metadata := range WorkbenchInstanceEUCProvidedAdditionalMetadata { + if key == metadata { + return true + } + } + } for _, metadata := range WorkbenchInstanceSettableUnmodifiableDefaultMetadata { if strings.Contains(k, metadata) && new == "" { diff --git a/mmv1/templates/terraform/examples/workbench_instance_euc.tf.tmpl b/mmv1/templates/terraform/examples/workbench_instance_euc.tf.tmpl new file mode 100644 index 000000000000..dabb6da89522 --- /dev/null +++ b/mmv1/templates/terraform/examples/workbench_instance_euc.tf.tmpl @@ -0,0 +1,16 @@ +resource "google_workbench_instance" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "instance_name"}}" + location = "us-central1-a" + + gce_setup { + machine_type = "e2-standard-4" + + metadata = { + terraform = "true" + } + } + + instance_owners = ["example@example.com"] + + enable_managed_euc = "true" +} From 0a7f36d7051ebe93cfc8f79658126411a153b889 Mon Sep 17 00:00:00 2001 From: nimish-khurana Date: Wed, 23 Jul 2025 20:16:44 +0530 Subject: [PATCH 587/884] feat: add OdbNetwork support in OracleDatabase AutonomousDatabase (#14586) --- .../oracledatabase/AutonomousDatabase.yaml | 48 ++++++++++++++++++- ...ase_autonomous_database_odbnetwork.tf.tmpl | 17 +++++++ ...abase_autonomous_database_publicip.tf.tmpl | 16 +++++++ 3 files changed, 79 insertions(+), 2 deletions(-) create mode 100644 mmv1/templates/terraform/examples/oracledatabase_autonomous_database_odbnetwork.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/oracledatabase_autonomous_database_publicip.tf.tmpl diff --git a/mmv1/products/oracledatabase/AutonomousDatabase.yaml b/mmv1/products/oracledatabase/AutonomousDatabase.yaml index d22aacbbe4bc..3fb1691b9ddc 100644 --- a/mmv1/products/oracledatabase/AutonomousDatabase.yaml +++ b/mmv1/products/oracledatabase/AutonomousDatabase.yaml @@ -73,6 +73,36 @@ examples: deletion_protection: 'false' database_name: 'fmt.Sprintf("tftestdatabase%s", acctest.RandString(t, 10))' endpoint_name: 'fmt.Sprintf("tftestendpoint%s", acctest.RandString(t, 10))' + - name: 'oracledatabase_autonomous_database_odbnetwork' + primary_resource_id: 'myADB' + vars: + project: 'my-project' + autonomous_database_id: 'my-instance' + database_name: 'mydatabase' + odb_network: 'projects/my-project/locations/europe-west2/odbNetworks/my-odbnetwork' + odb_subnet: 'projects/my-project/locations/europe-west2/odbNetworks/my-odbnetwork/odbSubnets/my-odbsubnet' + deletion_protection: 'true' + ignore_read_extra: + - 'deletion_protection' + test_vars_overrides: + deletion_protection: 'false' + project: '"oci-terraform-testing-prod"' + database_name: 'fmt.Sprintf("tftestdatabase%s", acctest.RandString(t, 10))' + odb_network: '"projects/oci-terraform-testing-prod/locations/europe-west2/odbNetworks/tf-test-permanent-odbnetwork"' + odb_subnet: '"projects/oci-terraform-testing-prod/locations/europe-west2/odbNetworks/tf-test-permanent-odbnetwork/odbSubnets/tf-test-permanent-client-odbsubnet"' + - name: 'oracledatabase_autonomous_database_publicip' + primary_resource_id: 'myADB' + vars: + project: 'my-project' + autonomous_database_id: 'my-instance' + database_name: 'mydatabase' + deletion_protection: 'true' + ignore_read_extra: + - 'deletion_protection' + test_vars_overrides: + deletion_protection: 'false' + project: '"oci-terraform-testing-prod"' + database_name: 'fmt.Sprintf("tftestdatabase%s", acctest.RandString(t, 10))' virtual_fields: - name: 'deletion_protection' type: Boolean @@ -623,11 +653,25 @@ properties: type: String description: "The name of the VPC network used by the Autonomous Database.\nFormat: projects/{project}/global/networks/{network} " - required: true + required: false - name: 'cidr' type: String description: 'The subnet CIDR range for the Autonmous Database. ' - required: true + required: false + - name: odbNetwork + type: String + description: |- + The name of the OdbNetwork associated with the Autonomous Database. + Format: + projects/{project}/locations/{location}/odbNetworks/{odb_network} + It is optional but if specified, this should match the parent ODBNetwork of + the odb_subnet and backup_odb_subnet. + - name: odbSubnet + type: String + description: |- + The name of the OdbSubnet associated with the Autonomous Database for + IP allocation. Format: + projects/{project}/locations/{location}/odbNetworks/{odb_network}/odbSubnets/{odb_subnet} - name: 'createTime' type: String description: 'The date and time that the Autonomous Database was created. ' diff --git a/mmv1/templates/terraform/examples/oracledatabase_autonomous_database_odbnetwork.tf.tmpl b/mmv1/templates/terraform/examples/oracledatabase_autonomous_database_odbnetwork.tf.tmpl new file mode 100644 index 000000000000..0dbb6742838a --- /dev/null +++ b/mmv1/templates/terraform/examples/oracledatabase_autonomous_database_odbnetwork.tf.tmpl @@ -0,0 +1,17 @@ +resource "google_oracle_database_autonomous_database" "{{$.PrimaryResourceId}}"{ + autonomous_database_id = "{{index $.Vars "autonomous_database_id"}}" + location = "europe-west2" + project = "{{index $.Vars "project"}}" + database = "{{index $.Vars "database_name"}}" + admin_password = "123Abpassword" + odb_network = "{{index $.Vars "odb_network"}}" + odb_subnet = "{{index $.Vars "odb_subnet"}}" + properties { + compute_count = "2" + data_storage_size_tb="1" + db_version = "19c" + db_workload = "OLTP" + license_type = "LICENSE_INCLUDED" + } + deletion_protection = "{{index $.Vars "deletion_protection"}}" +} diff --git a/mmv1/templates/terraform/examples/oracledatabase_autonomous_database_publicip.tf.tmpl b/mmv1/templates/terraform/examples/oracledatabase_autonomous_database_publicip.tf.tmpl new file mode 100644 index 000000000000..a40d91c11b19 --- /dev/null +++ b/mmv1/templates/terraform/examples/oracledatabase_autonomous_database_publicip.tf.tmpl @@ -0,0 +1,16 @@ +resource "google_oracle_database_autonomous_database" "{{$.PrimaryResourceId}}"{ + autonomous_database_id = "{{index $.Vars "autonomous_database_id"}}" + location = "europe-west2" + project = "{{index $.Vars "project"}}" + database = "{{index $.Vars "database_name"}}" + admin_password = "123Abpassword" + properties { + compute_count = "2" + data_storage_size_tb="1" + db_version = "19c" + db_workload = "OLTP" + license_type = "LICENSE_INCLUDED" + mtls_connection_required = "true" + } + deletion_protection = "{{index $.Vars "deletion_protection"}}" +} From 86ab0ea4e6eb2854c8c8d2cca0c18e3f6c9350e6 Mon Sep 17 00:00:00 2001 From: Sebastian Kaliszewski Date: Wed, 23 Jul 2025 16:55:05 +0200 Subject: [PATCH 588/884] Autoallocation options support for networkconnectivity InternalRange (#14526) --- .../networkconnectivity/InternalRange.yaml | 34 +++++++++++++++++++ ...ternal_ranges_allocation_algoritms.tf.tmpl | 18 ++++++++++ ...llocation_algoritms_random_first_n.tf.tmpl | 19 +++++++++++ 3 files changed, 71 insertions(+) create mode 100644 mmv1/templates/terraform/examples/network_connectivity_internal_ranges_allocation_algoritms.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/network_connectivity_internal_ranges_allocation_algoritms_random_first_n.tf.tmpl diff --git a/mmv1/products/networkconnectivity/InternalRange.yaml b/mmv1/products/networkconnectivity/InternalRange.yaml index b9bb9d6ffc67..fff15a04e858 100644 --- a/mmv1/products/networkconnectivity/InternalRange.yaml +++ b/mmv1/products/networkconnectivity/InternalRange.yaml @@ -71,6 +71,16 @@ examples: internal_range_name: 'migration' network_name: 'internal-ranges' source_subnet_name: 'source-subnet' + - name: 'network_connectivity_internal_ranges_allocation_algoritms' + primary_resource_id: 'default' + vars: + internal_range_name: 'allocation-algorithms' + network_name: 'internal-ranges' + - name: 'network_connectivity_internal_ranges_allocation_algoritms_random_first_n' + primary_resource_id: 'default' + vars: + internal_range_name: 'allocation-algorithms-random-first-n' + network_name: 'internal-ranges' parameters: properties: - name: 'name' @@ -141,6 +151,30 @@ properties: Only IPv4 CIDR ranges are supported. item_type: type: String + - name: 'allocationOptions' + type: NestedObject + description: | + Options for automatically allocating a free range with a size given by prefixLength. + immutable: true + properties: + - name: 'allocationStrategy' + type: Enum + enum_values: + - 'RANDOM' + - 'FIRST_AVAILABLE' + - 'RANDOM_FIRST_N_AVAILABLE' + - 'FIRST_SMALLEST_FITTING' + description: | + Optional. Sets the strategy used to automatically find a free range of a size given by prefixLength. Can be set only when trying to create a reservation that automatically finds the free range to reserve. + immutable: true + - name: 'firstAvailableRangesLookupSize' + type: Integer + description: | + Must be set when allocation_strategy is RANDOM_FIRST_N_AVAILABLE, otherwise must remain unset. Defines the size of the set of free ranges from which RANDOM_FIRST_N_AVAILABLE strategy randomy selects one, + in other words it sets the N in the RANDOM_FIRST_N_AVAILABLE. + validation: + function: 'validation.IntAtLeast(1)' + immutable: true - name: 'users' type: Array description: | diff --git a/mmv1/templates/terraform/examples/network_connectivity_internal_ranges_allocation_algoritms.tf.tmpl b/mmv1/templates/terraform/examples/network_connectivity_internal_ranges_allocation_algoritms.tf.tmpl new file mode 100644 index 000000000000..d5229d6fc50a --- /dev/null +++ b/mmv1/templates/terraform/examples/network_connectivity_internal_ranges_allocation_algoritms.tf.tmpl @@ -0,0 +1,18 @@ +resource "google_network_connectivity_internal_range" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "internal_range_name"}}" + network = google_compute_network.default.id + usage = "FOR_VPC" + peering = "FOR_SELF" + prefix_length = 24 + target_cidr_range = [ + "192.16.0.0/16" + ] + allocation_options { + allocation_strategy = "FIRST_SMALLEST_FITTING" + } +} + +resource "google_compute_network" "default" { + name = "{{index $.Vars "network_name"}}" + auto_create_subnetworks = false +} diff --git a/mmv1/templates/terraform/examples/network_connectivity_internal_ranges_allocation_algoritms_random_first_n.tf.tmpl b/mmv1/templates/terraform/examples/network_connectivity_internal_ranges_allocation_algoritms_random_first_n.tf.tmpl new file mode 100644 index 000000000000..65312d4ad6f6 --- /dev/null +++ b/mmv1/templates/terraform/examples/network_connectivity_internal_ranges_allocation_algoritms_random_first_n.tf.tmpl @@ -0,0 +1,19 @@ +resource "google_network_connectivity_internal_range" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "internal_range_name"}}" + network = google_compute_network.default.id + usage = "FOR_VPC" + peering = "FOR_SELF" + prefix_length = 24 + target_cidr_range = [ + "192.16.0.0/16" + ] + allocation_options { + allocation_strategy = "RANDOM_FIRST_N_AVAILABLE" + first_available_ranges_lookup_size = 20 + } +} + +resource "google_compute_network" "default" { + name = "{{index $.Vars "network_name"}}" + auto_create_subnetworks = false +} From 8d01619fdc88fe2ba0d9bab6165abb3fca745131 Mon Sep 17 00:00:00 2001 From: nimish-khurana Date: Wed, 23 Jul 2025 21:13:21 +0530 Subject: [PATCH 589/884] feat: add OdbNetwork support in OracleDatabase CloudVmCluster resource (#14587) --- .../oracledatabase/CloudVmCluster.yaml | 50 +++++++++++++++++-- ...atabase_cloud_vmcluster_odbnetwork.tf.tmpl | 34 +++++++++++++ 2 files changed, 81 insertions(+), 3 deletions(-) create mode 100644 mmv1/templates/terraform/examples/oracledatabase_cloud_vmcluster_odbnetwork.tf.tmpl diff --git a/mmv1/products/oracledatabase/CloudVmCluster.yaml b/mmv1/products/oracledatabase/CloudVmCluster.yaml index da816972400a..ced187994a37 100644 --- a/mmv1/products/oracledatabase/CloudVmCluster.yaml +++ b/mmv1/products/oracledatabase/CloudVmCluster.yaml @@ -65,6 +65,30 @@ examples: # See: https://github.com/hashicorp/terraform-provider-google/issues/20599 cloud_vm_cluster_id: 'fmt.Sprintf("ofake-tf-test-vmcluster-basic-%s", acctest.RandString(t, 10))' cloud_exadata_infrastructure_id: 'fmt.Sprintf("ofake-tf-test-exadata-for-vmcluster-basic-%s", acctest.RandString(t, 10))' + - name: 'oracledatabase_cloud_vmcluster_odbnetwork' + primary_resource_id: 'my_vmcluster' + vars: + project: 'my-project' + cloud_vm_cluster_id: 'my-instance' + cloud_exadata_infrastructure_id: 'my-exadata' + odb_network: 'projects/my-project/locations/europe-west2/odbNetworks/my-odbnetwork' + odb_subnet: 'projects/my-project/locations/europe-west2/odbNetworks/my-odbnetwork/odbSubnets/my-odbsubnet' + backup_odb_subnet: 'projects/my-project/locations/europe-west2/odbNetworks/my-odbnetwork/odbSubnets/my-backup-odbsubnet' + deletion_protection: 'true' + ignore_read_extra: + - 'deletion_protection' + test_vars_overrides: + deletion_protection: 'false' + project: '"oci-terraform-testing-prod"' + # ofake- prefix is needed to create a dummy resource for testing purposes only + # See: https://github.com/hashicorp/terraform-provider-google/issues/19983#issuecomment-2516403770 + # As a result these resources are not sweepable + # See: https://github.com/hashicorp/terraform-provider-google/issues/20599 + cloud_vm_cluster_id: 'fmt.Sprintf("ofake-tf-test-vmcluster-odbnetwork-%s", acctest.RandString(t, 10))' + cloud_exadata_infrastructure_id: 'fmt.Sprintf("ofake-tf-test-exadata-for-vmcluster-odbnetwork-%s", acctest.RandString(t, 10))' + odb_network: '"projects/oci-terraform-testing-prod/locations/europe-west2/odbNetworks/tf-test-permanent-odbnetwork"' + odb_subnet: '"projects/oci-terraform-testing-prod/locations/europe-west2/odbNetworks/tf-test-permanent-odbnetwork/odbSubnets/tf-test-permanent-client-odbsubnet"' + backup_odb_subnet: '"projects/oci-terraform-testing-prod/locations/europe-west2/odbNetworks/tf-test-permanent-odbnetwork/odbSubnets/tf-test-permanent-backup-odbsubnet"' - name: 'oracledatabase_cloud_vmcluster_full' primary_resource_id: 'my_vmcluster' vars: @@ -290,12 +314,32 @@ properties: - name: 'cidr' type: String description: 'Network settings. CIDR to use for cluster IP allocation. ' - required: true + required: false - name: 'backupSubnetCidr' type: String description: 'CIDR range of the backup subnet. ' - required: true + required: false - name: 'network' type: String description: "The name of the VPC network.\nFormat: projects/{project}/global/networks/{network} " - required: true + required: false + - name: odbNetwork + type: String + description: |- + The name of the OdbNetwork associated with the VM Cluster. + Format: + projects/{project}/locations/{location}/odbNetworks/{odb_network} + It is optional but if specified, this should match the parent ODBNetwork of + the odb_subnet and backup_odb_subnet. + - name: odbSubnet + type: String + description: |- + The name of the OdbSubnet associated with the VM Cluster for + IP allocation. Format: + projects/{project}/locations/{location}/odbNetworks/{odb_network}/odbSubnets/{odb_subnet} + - name: backupOdbSubnet + type: String + description: |- + The name of the backup OdbSubnet associated with the VM Cluster. + Format: + projects/{project}/locations/{location}/odbNetworks/{odb_network}/odbSubnets/{odb_subnet} diff --git a/mmv1/templates/terraform/examples/oracledatabase_cloud_vmcluster_odbnetwork.tf.tmpl b/mmv1/templates/terraform/examples/oracledatabase_cloud_vmcluster_odbnetwork.tf.tmpl new file mode 100644 index 000000000000..15685ba5c365 --- /dev/null +++ b/mmv1/templates/terraform/examples/oracledatabase_cloud_vmcluster_odbnetwork.tf.tmpl @@ -0,0 +1,34 @@ +resource "google_oracle_database_cloud_vm_cluster" "{{$.PrimaryResourceId}}"{ + cloud_vm_cluster_id = "{{index $.Vars "cloud_vm_cluster_id"}}" + display_name = "{{index $.Vars "cloud_vm_cluster_id"}} displayname" + location = "europe-west2" + project = "{{index $.Vars "project"}}" + exadata_infrastructure = google_oracle_database_cloud_exadata_infrastructure.cloudExadataInfrastructures.id + odb_network = "{{index $.Vars "odb_network"}}" + odb_subnet = "{{index $.Vars "odb_subnet"}}" + backup_odb_subnet = "{{index $.Vars "backup_odb_subnet"}}" + properties { + license_type = "LICENSE_INCLUDED" + ssh_public_keys = ["ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCz1X2744t+6vRLmE5u6nHi6/QWh8bQDgHmd+OIxRQIGA/IWUtCs2FnaCNZcqvZkaeyjk5v0lTA/n+9jvO42Ipib53athrfVG8gRt8fzPL66C6ZqHq+6zZophhrCdfJh/0G4x9xJh5gdMprlaCR1P8yAaVvhBQSKGc4SiIkyMNBcHJ5YTtMQMTfxaB4G1sHZ6SDAY9a6Cq/zNjDwfPapWLsiP4mRhE5SSjJX6l6EYbkm0JeLQg+AbJiNEPvrvDp1wtTxzlPJtIivthmLMThFxK7+DkrYFuLvN5AHUdo9KTDLvHtDCvV70r8v0gafsrKkM/OE9Jtzoo0e1N/5K/ZdyFRbAkFT4QSF3nwpbmBWLf2Evg//YyEuxnz4CwPqFST2mucnrCCGCVWp1vnHZ0y30nM35njLOmWdRDFy5l27pKUTwLp02y3UYiiZyP7d3/u5pKiN4vC27VuvzprSdJxWoAvluOiDeRh+/oeQDowxoT/Oop8DzB9uJmjktXw8jyMW2+Rpg+ENQqeNgF1OGlEzypaWiRskEFlkpLb4v/s3ZDYkL1oW0Nv/J8LTjTOTEaYt2Udjoe9x2xWiGnQixhdChWuG+MaoWffzUgx1tsVj/DBXijR5DjkPkrA1GA98zd3q8GKEaAdcDenJjHhNYSd4+rE9pIsnYn7fo5X/tFfcQH1XQ== nobody@google.com"] + cpu_core_count = "4" + gi_version = "19.0.0.0" + hostname_prefix = "hostname1" + } + + deletion_protection = "{{index $.Vars "deletion_protection"}}" +} + +resource "google_oracle_database_cloud_exadata_infrastructure" "cloudExadataInfrastructures"{ + cloud_exadata_infrastructure_id = "{{index $.Vars "cloud_exadata_infrastructure_id"}}" + display_name = "{{index $.Vars "cloud_exadata_infrastructure_id"}} displayname" + location = "europe-west2" + project = "{{index $.Vars "project"}}" + properties { + shape = "Exadata.X9M" + compute_count= "2" + storage_count= "3" + } + + deletion_protection = "{{index $.Vars "deletion_protection"}}" +} + From 495fc6c752f20810cfc2b20bf33a0a3d047e7287 Mon Sep 17 00:00:00 2001 From: shivangd <30335782+Gorlami96@users.noreply.github.com> Date: Wed, 23 Jul 2025 22:09:03 +0530 Subject: [PATCH 590/884] Router tags at creation (#14527) --- mmv1/products/compute/Router.yaml | 16 ++++ ...o => resource_compute_router_test.go.tmpl} | 75 +++++++++++++++++++ 2 files changed, 91 insertions(+) rename mmv1/third_party/terraform/services/compute/{resource_compute_router_test.go => resource_compute_router_test.go.tmpl} (81%) diff --git a/mmv1/products/compute/Router.yaml b/mmv1/products/compute/Router.yaml index d762c7a40409..904a3edff560 100644 --- a/mmv1/products/compute/Router.yaml +++ b/mmv1/products/compute/Router.yaml @@ -233,3 +233,19 @@ properties: description: | Value of the key used for MD5 authentication. required: true + - name: 'params' + type: NestedObject + min_version: 'beta' + ignore_read: true + immutable: true + description: | + Additional params passed with the request, but not persisted as part of resource payload + properties: + - name: 'resourceManagerTags' + type: KeyValuePairs + description: | + Resource manager tags to be bound to the router. Tag keys and values have the + same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, + and values are in the format tagValues/456. + api_name: resourceManagerTags + ignore_read: true diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_router_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_router_test.go.tmpl similarity index 81% rename from mmv1/third_party/terraform/services/compute/resource_compute_router_test.go rename to mmv1/third_party/terraform/services/compute/resource_compute_router_test.go.tmpl index 876e8ed61194..af98e66adbae 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_router_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_router_test.go.tmpl @@ -211,6 +211,46 @@ func TestAccComputeRouter_addAndUpdateIdentifierRangeBgp(t *testing.T) { }) } + +{{- if ne $.TargetVersionName "ga" }} +func TestAccComputeRouter_resourceManagerTags(t *testing.T) { + t.Parallel() + org := envvar.GetTestOrgFromEnv(t) + + suffixName := acctest.RandString(t, 10) + tagKeyResult := acctest.BootstrapSharedTestTagKeyDetails(t, "crm-routers-tagkey", "organizations/"+org, make(map[string]interface{})) + sharedTagkey,_ := tagKeyResult["shared_tag_key"] + tagValueResult := acctest.BootstrapSharedTestTagValueDetails(t, "crm-routers-tagvalue", sharedTagkey, org) + routerName := fmt.Sprintf("tf-test-router-resource-manager-tags-%s", suffixName) + networkName := fmt.Sprintf("tf-test-network-resource-manager-tags-%s-net", suffixName) + subnetName := fmt.Sprintf("tf-test-subnet-resource-manager-tags-%s-subnet", suffixName) + context := map[string]interface{}{ + "network_name": networkName, + "subnet_name": subnetName, + "router_name": routerName, + "tag_key_id": tagKeyResult["name"], + "tag_value_id": tagValueResult["name"], + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckComputeRouterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeRouter_resourceManagerTags(context), + }, + { + ResourceName: "google_compute_router.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"params"}, + }, + }, + }) +} +{{- end }} + func testAccComputeRouterBasic(routerName, resourceRegion string) string { return fmt.Sprintf(` resource "google_compute_network" "foobar" { @@ -387,3 +427,38 @@ resource "google_compute_router" "foobar" { } `, routerName, routerName) } + + +{{- if ne $.TargetVersionName "ga" }} +func testAccComputeRouter_resourceManagerTags(context map[string]interface{}) string { + return acctest.Nprintf(` + resource "google_compute_network" "foobar" { + provider = google-beta + name = "%{network_name}" + auto_create_subnetworks = false + } + + resource "google_compute_subnetwork" "foobar" { + provider = google-beta + name = "%{subnet_name}" + network = google_compute_network.foobar.self_link + ip_cidr_range = "10.0.0.0/16" + } + + resource "google_compute_router" "foobar" { + provider = google-beta + name = "%{router_name}" + region = google_compute_subnetwork.foobar.region + network = google_compute_network.foobar.name + bgp { + asn = 4294967294 + } + params { + resource_manager_tags = { + "%{tag_key_id}" = "%{tag_value_id}" + } + } + } + `, context) +} +{{- end }} From 29b8cadc6dd2822bd42fe5b741d992424ce235f6 Mon Sep 17 00:00:00 2001 From: wj-chen Date: Wed, 23 Jul 2025 09:40:23 -0700 Subject: [PATCH 591/884] Add a message for the change in default value for view.use_legacy_sql in google_bigquery_table (#14574) --- .../terraform/website/docs/r/bigquery_table.html.markdown | 2 ++ 1 file changed, 2 insertions(+) diff --git a/mmv1/third_party/terraform/website/docs/r/bigquery_table.html.markdown b/mmv1/third_party/terraform/website/docs/r/bigquery_table.html.markdown index b54c6bef3213..b92ced3e90e2 100644 --- a/mmv1/third_party/terraform/website/docs/r/bigquery_table.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/bigquery_table.html.markdown @@ -426,6 +426,8 @@ The following arguments are supported: * `use_legacy_sql` - (Optional) Specifies whether to use BigQuery's legacy SQL for this view. The default value is true. If set to false, the view will use BigQuery's standard SQL. + -> **Note**: Starting in provider version `7.0.0`, no default value is + provided for this field unless explicitly set in the configuration. The `materialized_view` block supports: From ae0d4a552b10e43b19a58d91b35c23a4c6915eb4 Mon Sep 17 00:00:00 2001 From: jialei-chen <147877028+jialei-chen@users.noreply.github.com> Date: Wed, 23 Jul 2025 09:41:42 -0700 Subject: [PATCH 592/884] Add a resource google_discovery_engine_recommendation_engine (#14538) --- .../discoveryengine/RecommendationEngine.yaml | 212 ++++++++++++++++++ ...tion_engine_hardcode_solution_type.go.tmpl | 3 + ...ngine_recommendationengine_generic.tf.tmpl | 20 ++ ...yengine_recommendationengine_media.tf.tmpl | 30 +++ ...overy_engine_recommendation_engine_test.go | 115 ++++++++++ 5 files changed, 380 insertions(+) create mode 100644 mmv1/products/discoveryengine/RecommendationEngine.yaml create mode 100644 mmv1/templates/terraform/encoders/discovery_engine_recommendation_engine_hardcode_solution_type.go.tmpl create mode 100644 mmv1/templates/terraform/examples/discoveryengine_recommendationengine_generic.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/discoveryengine_recommendationengine_media.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/discoveryengine/resource_discovery_engine_recommendation_engine_test.go diff --git a/mmv1/products/discoveryengine/RecommendationEngine.yaml b/mmv1/products/discoveryengine/RecommendationEngine.yaml new file mode 100644 index 000000000000..a28b51213529 --- /dev/null +++ b/mmv1/products/discoveryengine/RecommendationEngine.yaml @@ -0,0 +1,212 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'RecommendationEngine' +api_resource_type_kind: Engine +description: | + Vertex AI Search recommendation apps. +references: + guides: + 'Create a Recommendation Engine': 'https://cloud.google.com/generative-ai-app-builder/docs/create-generic-recommendations-app' + api: 'https://cloud.google.com/generative-ai-app-builder/docs/reference/rest/v1/projects.locations.collections.engines' +base_url: 'projects/{{project}}/locations/{{location}}/collections/default_collection/engines/{{engine_id}}' +self_link: 'projects/{{project}}/locations/{{location}}/collections/default_collection/engines/{{engine_id}}' +create_url: 'projects/{{project}}/locations/{{location}}/collections/default_collection/engines?engineId={{engine_id}}' +update_url: 'projects/{{project}}/locations/{{location}}/collections/default_collection/engines/{{engine_id}}' +update_verb: 'PATCH' +update_mask: true +delete_url: 'projects/{{project}}/locations/{{location}}/collections/default_collection/engines/{{engine_id}}' +import_format: + - 'projects/{{project}}/locations/{{location}}/collections/default_collection/engines/{{engine_id}}' +timeouts: + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 +autogen_async: false +async: + actions: ['create', 'delete'] + type: 'OpAsync' + operation: + base_url: '{{op_id}}' + result: + resource_inside_response: true +custom_code: + encoder: 'templates/terraform/encoders/discovery_engine_recommendation_engine_hardcode_solution_type.go.tmpl' +examples: + - name: 'discoveryengine_recommendationengine_generic' + primary_resource_id: 'generic' + vars: + engine_id: 'recommendation-engine-id' + data_store_id: 'recommendation-datastore-id' + - name: 'discoveryengine_recommendationengine_media' + primary_resource_id: 'media' + vars: + engine_id: 'recommendation-engine-id' + data_store_id: 'recommendation-datastore-id' +parameters: + - name: 'engineId' + type: String + description: | + Unique ID to use for Recommendation Engine. + url_param_only: true + required: true + immutable: true + - name: 'location' + type: String + description: | + The geographic location where the data store should reside. The value can + only be one of "global", "us" and "eu". + url_param_only: true + required: true + immutable: true +properties: + - name: 'name' + type: String + description: | + The unique full resource name of the recommendation engine. Values are of the format + `projects/{project}/locations/{location}/collections/{collection}/engines/{engine_id}`. + This field must be a UTF-8 encoded string with a length limit of 1024 characters. + output: true + - name: 'displayName' + type: String + description: | + Required. The display name of the engine. Should be human readable. UTF-8 encoded string with limit of 1024 characters. + required: true + - name: 'createTime' + type: Time + description: | + Timestamp the Engine was created at. + output: true + - name: 'updateTime' + type: Time + description: | + Timestamp the Engine was last updated. + output: true + - name: 'dataStoreIds' + type: Array + description: | + The data stores associated with this engine. For SOLUTION_TYPE_RECOMMENDATION type of engines, they can only associate with at most one data store. + required: true + item_type: + type: String + - name: 'industryVertical' + type: Enum + description: | + The industry vertical that the engine registers. The restriction of the Engine industry vertical is based on DataStore: If unspecified, default to GENERIC. Vertical on Engine has to match vertical of the DataStore liniked to the engine. + immutable: true + ignore_read: true + default_value: "GENERIC" + enum_values: + - 'GENERIC' + - 'MEDIA' + - name: 'mediaRecommendationEngineConfig' + type: NestedObject + description: | + Configurations for a Media Recommendation Engine. Only applicable on the data stores + with SOLUTION_TYPE_RECOMMENDATION solution type and MEDIA industry vertical. + properties: + - name: 'type' + type: String + description: | + The type of engine. e.g., `recommended-for-you`. + This field together with MediaRecommendationEngineConfig.optimizationObjective describes + engine metadata to use to control engine training and serving. + Currently supported values: `recommended-for-you`, `others-you-may-like`, + `more-like-this`, `most-popular-items`. + - name: 'optimizationObjective' + type: String + description: | + The optimization objective. e.g., `cvr`. + This field together with MediaRecommendationEngineConfig.type describes + engine metadata to use to control engine training and serving. + Currently supported values: `ctr`, `cvr`. + If not specified, we choose default based on engine type. Default depends on type of recommendation: + `recommended-for-you` => `ctr` + `others-you-may-like` => `ctr` + - name: 'optimizationObjectiveConfig' + type: NestedObject + description: | + Name and value of the custom threshold for cvr optimization_objective. + For target_field `watch-time`, target_field_value must be an integer + value indicating the media progress time in seconds between (0, 86400] + (excludes 0, includes 86400) (e.g., 90). + For target_field `watch-percentage`, the target_field_value must be a + valid float value between (0, 1.0] (excludes 0, includes 1.0) (e.g., 0.5). + properties: + - name: 'targetField' + type: String + description: | + The name of the field to target. Currently supported values: `watch-percentage`, `watch-time`. + - name: 'targetFieldValueFloat' + type: Double + description: | + The threshold to be applied to the target (e.g., 0.5). + - name: 'trainingState' + type: Enum + description: | + The training state that the engine is in (e.g. `TRAINING` or `PAUSED`). + Since part of the cost of running the service + is frequency of training - this can be used to determine when to train + engine in order to control cost. If not specified: the default value for + `CreateEngine` method is `TRAINING`. The default value for + `UpdateEngine` method is to keep the state the same as before. + enum_values: + - 'PAUSED' + - 'TRAINING' + - name: 'engineFeaturesConfig' + type: NestedObject + description: | + More feature configs of the selected engine type. + exactly_one_of: + - recommended_for_you_config + - most_popular_config + properties: + - name: 'recommendedForYouConfig' + type: NestedObject + description: | + Additional feature configurations for creating a `recommended-for-you` engine. + properties: + - name: 'contextEventType' + type: String + description: | + The type of event with which the engine is queried at prediction time. + If set to `generic`, only `view-item`, `media-play`,and + `media-complete` will be used as `context-event` in engine training. If + set to `view-home-page`, `view-home-page` will also be used as + `context-events` in addition to `view-item`, `media-play`, and + `media-complete`. Currently supported for the `recommended-for-you` + engine. Currently supported values: `view-home-page`, `generic`. + - name: 'mostPopularConfig' + type: NestedObject + description: | + Feature configurations that are required for creating a Most Popular engine. + properties: + - name: 'timeWindowDays' + type: Integer + description: | + The time window of which the engine is queried at training and + prediction time. Positive integers only. The value translates to the + last X days of events. Currently required for the `most-popular-items` + engine. + - name: 'commonConfig' + type: NestedObject + description: | + Common config spec that specifies the metadata of the engine. + immutable: true + ignore_read: true + properties: + - name: 'companyName' + type: String + description: | + The name of the company, business or entity that is associated with the engine. Setting this may help improve LLM related features.cd diff --git a/mmv1/templates/terraform/encoders/discovery_engine_recommendation_engine_hardcode_solution_type.go.tmpl b/mmv1/templates/terraform/encoders/discovery_engine_recommendation_engine_hardcode_solution_type.go.tmpl new file mode 100644 index 000000000000..84bb8d3e49b6 --- /dev/null +++ b/mmv1/templates/terraform/encoders/discovery_engine_recommendation_engine_hardcode_solution_type.go.tmpl @@ -0,0 +1,3 @@ +// hard code solutionType to "SOLUTION_TYPE_RECOMMENDATION" for recommendation engine resource +obj["solutionType"] = "SOLUTION_TYPE_RECOMMENDATION" +return obj, nil \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/discoveryengine_recommendationengine_generic.tf.tmpl b/mmv1/templates/terraform/examples/discoveryengine_recommendationengine_generic.tf.tmpl new file mode 100644 index 000000000000..8411a6d63a81 --- /dev/null +++ b/mmv1/templates/terraform/examples/discoveryengine_recommendationengine_generic.tf.tmpl @@ -0,0 +1,20 @@ +resource "google_discovery_engine_data_store" "generic" { + location = "global" + data_store_id = "{{index $.Vars "data_store_id"}}" + display_name = "tf-test-structured-datastore" + industry_vertical = "GENERIC" + content_config = "NO_CONTENT" + solution_types = ["SOLUTION_TYPE_RECOMMENDATION"] + create_advanced_site_search = false + skip_default_schema_creation = false +} +resource "google_discovery_engine_recommendation_engine" "generic" { + engine_id = "{{index $.Vars "engine_id"}}" + location = google_discovery_engine_data_store.generic.location + display_name = "Example Recommendation Engine" + data_store_ids = [google_discovery_engine_data_store.generic.data_store_id] + industry_vertical = "GENERIC" + common_config { + company_name = "test-company" + } +} diff --git a/mmv1/templates/terraform/examples/discoveryengine_recommendationengine_media.tf.tmpl b/mmv1/templates/terraform/examples/discoveryengine_recommendationengine_media.tf.tmpl new file mode 100644 index 000000000000..637f4984229c --- /dev/null +++ b/mmv1/templates/terraform/examples/discoveryengine_recommendationengine_media.tf.tmpl @@ -0,0 +1,30 @@ +resource "google_discovery_engine_data_store" "media" { + location = "global" + data_store_id = "{{index $.Vars "data_store_id"}}" + display_name = "tf-test-structured-datastore" + industry_vertical = "MEDIA" + content_config = "NO_CONTENT" + solution_types = ["SOLUTION_TYPE_RECOMMENDATION"] + create_advanced_site_search = false + skip_default_schema_creation = false +} +resource "google_discovery_engine_recommendation_engine" "media" { + engine_id = "{{index $.Vars "engine_id"}}" + location = google_discovery_engine_data_store.media.location + display_name = "Example Media Recommendation Engine" + data_store_ids = [google_discovery_engine_data_store.media.data_store_id] + industry_vertical = "MEDIA" + media_recommendation_engine_config { + type = "recommended-for-you" + optimization_objective = "ctr" + training_state = "PAUSED" + engine_features_config { + recommended_for_you_config { + context_event_type = "generic" + } + } + } + common_config { + company_name = "test-company" + } +} diff --git a/mmv1/third_party/terraform/services/discoveryengine/resource_discovery_engine_recommendation_engine_test.go b/mmv1/third_party/terraform/services/discoveryengine/resource_discovery_engine_recommendation_engine_test.go new file mode 100644 index 000000000000..746755a49910 --- /dev/null +++ b/mmv1/third_party/terraform/services/discoveryengine/resource_discovery_engine_recommendation_engine_test.go @@ -0,0 +1,115 @@ +package discoveryengine_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccDiscoveryEngineRecommendationEngine_discoveryengineRecommendationengineMediaExample_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDiscoveryEngineRecommendationEngine_discoveryengineRecommendationengineMediaExample(context), + }, + { + ResourceName: "google_discovery_engine_recommendation_engine.media", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"common_config", "engine_id", "industry_vertical", "location"}, + }, + { + Config: testAccDiscoveryEngineRecommendationEngine_discoveryengineRecommendationengineMediaExample(context), + }, + { + ResourceName: "google_discovery_engine_recommendation_engine.media", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"common_config", "engine_id", "industry_vertical", "location"}, + }, + }, + }) +} + +func testAccDiscoveryEngineRecommendationEngine_discoveryengineRecommendationengineMediaExample_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_discovery_engine_data_store" "media" { + location = "global" + data_store_id = "tf-test-recommendation-datastore-id%{random_suffix}" + display_name = "tf-test-structured-datastore" + industry_vertical = "MEDIA" + content_config = "NO_CONTENT" + solution_types = ["SOLUTION_TYPE_RECOMMENDATION"] + create_advanced_site_search = false + skip_default_schema_creation = false +} +resource "google_discovery_engine_recommendation_engine" "media" { + engine_id = "tf-test-recommendation-engine-id%{random_suffix}" + location = google_discovery_engine_data_store.media.location + display_name = "Example Media Recommendation Engine" + data_store_ids = [google_discovery_engine_data_store.media.data_store_id] + industry_vertical = "MEDIA" + media_recommendation_engine_config { + type = "recommended-for-you" + optimization_objective = "ctr" + training_state = "PAUSED" + engine_features_config { + recommended_for_you_config { + context_event_type = "generic" + } + } + } + common_config { + company_name = "test-company" + } +} +`, context) +} + +func testAccDiscoveryEngineRecommendationEngine_discoveryengineRecommendationengineMediaExample_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_discovery_engine_data_store" "media" { + location = "global" + data_store_id = "tf-test-recommendation-datastore-id%{random_suffix}" + display_name = "tf-test-structured-datastore" + industry_vertical = "MEDIA" + content_config = "NO_CONTENT" + solution_types = ["SOLUTION_TYPE_RECOMMENDATION"] + create_advanced_site_search = false + skip_default_schema_creation = false +} +resource "google_discovery_engine_recommendation_engine" "media" { + engine_id = "tf-test-recommendation-engine-id%{random_suffix}" + location = google_discovery_engine_data_store.media.location + display_name = "Example Media Recommendation Engine" + data_store_ids = [google_discovery_engine_data_store.media.data_store_id] + industry_vertical = "MEDIA" + media_recommendation_engine_config { + type = "recommended-for-you" + optimization_objective = "cvr" + optimization_objective_config { + target_field = "watch-percentage" + target_field_value_float = 0.5 + } + training_state = "PAUSED" + engine_features_config { + recommended_for_you_config { + context_event_type = "generic" + } + } + } + common_config { + company_name = "test-company" + } +} +`, context) +} From 08698d305b13f176f511a628a4cc02c1a71ca231 Mon Sep 17 00:00:00 2001 From: Eric Pang Date: Wed, 23 Jul 2025 14:14:31 -0400 Subject: [PATCH 593/884] Add deletion policy to Secure Source Manager repository (#14532) --- .../securesourcemanager/BranchRule.yaml | 6 ------ .../securesourcemanager/Instance.yaml | 5 +++-- .../securesourcemanager/Repository.yaml | 20 +++++++++++++------ ...e_source_manager_branch_rule_basic.tf.tmpl | 5 ++--- ...ce_manager_branch_rule_with_fields.tf.tmpl | 5 ++--- ...re_source_manager_repository_basic.tf.tmpl | 4 +--- ..._manager_repository_initial_config.tf.tmpl | 4 +--- ...curesourcemanager_deletion_policy.go.tmpl} | 2 +- 8 files changed, 24 insertions(+), 27 deletions(-) rename mmv1/templates/terraform/pre_delete/{securesourcemanager_instance.go.tmpl => securesourcemanager_deletion_policy.go.tmpl} (71%) diff --git a/mmv1/products/securesourcemanager/BranchRule.yaml b/mmv1/products/securesourcemanager/BranchRule.yaml index ddb1dc08e4ac..fd7d5be807d7 100644 --- a/mmv1/products/securesourcemanager/BranchRule.yaml +++ b/mmv1/products/securesourcemanager/BranchRule.yaml @@ -47,13 +47,10 @@ examples: branch_rule_id: 'my-basic-branchrule' repository_id: 'my-basic-repository' instance_id: 'my-basic-instance' - prevent_destroy: 'true' deletion_policy: '"PREVENT"' test_vars_overrides: - 'prevent_destroy': 'false' 'deletion_policy': '"DELETE"' oics_vars_overrides: - 'prevent_destroy': 'false' 'deletion_policy': '"DELETE"' - name: 'secure_source_manager_branch_rule_with_fields' primary_resource_id: 'default' @@ -61,13 +58,10 @@ examples: branch_rule_id: 'my-initial-branchrule' repository_id: 'my-initial-repository' instance_id: 'my-initial-instance' - prevent_destroy: 'true' deletion_policy: '"PREVENT"' test_vars_overrides: - 'prevent_destroy': 'false' 'deletion_policy': '"DELETE"' oics_vars_overrides: - 'prevent_destroy': 'false' 'deletion_policy': '"DELETE"' parameters: - name: 'branch_rule_id' diff --git a/mmv1/products/securesourcemanager/Instance.yaml b/mmv1/products/securesourcemanager/Instance.yaml index c97176c5f4ab..6e97fd0a575d 100644 --- a/mmv1/products/securesourcemanager/Instance.yaml +++ b/mmv1/products/securesourcemanager/Instance.yaml @@ -52,7 +52,7 @@ iam_policy: - 'projects/{{project}}/locations/{{location}}/instances/{{instance_id}}' - '{{instance_id}}' custom_code: - pre_delete: 'templates/terraform/pre_delete/securesourcemanager_instance.go.tmpl' + pre_delete: 'templates/terraform/pre_delete/securesourcemanager_deletion_policy.go.tmpl' examples: - name: 'secure_source_manager_instance_basic' primary_resource_id: 'default' @@ -171,7 +171,8 @@ virtual_fields: description: | The deletion policy for the instance. Setting `ABANDON` allows the resource to be abandoned, rather than deleted. Setting `DELETE` deletes the resource - and all its contents. Setting `PREVENT` prevents the resource from being deleted. + and all its contents. Setting `PREVENT` prevents the resource from accidental + deletion by erroring out during plan. Default is `DELETE`. Possible values are: * DELETE * PREVENT diff --git a/mmv1/products/securesourcemanager/Repository.yaml b/mmv1/products/securesourcemanager/Repository.yaml index bb0871c329f4..908bf7a46a59 100644 --- a/mmv1/products/securesourcemanager/Repository.yaml +++ b/mmv1/products/securesourcemanager/Repository.yaml @@ -47,6 +47,7 @@ iam_policy: - 'projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}' - '{{repository_id}}' custom_code: + pre_delete: 'templates/terraform/pre_delete/securesourcemanager_deletion_policy.go.tmpl' examples: - name: 'secure_source_manager_repository_basic' primary_resource_id: 'default' @@ -55,13 +56,10 @@ examples: repository_id: 'my-repository' instance_id: 'my-instance' deletion_policy: '"PREVENT"' - prevent_destroy: 'true' test_vars_overrides: 'deletion_policy': '"DELETE"' - 'prevent_destroy': 'false' oics_vars_overrides: 'deletion_policy': '"DELETE"' - 'prevent_destroy': 'false' - name: 'secure_source_manager_repository_initial_config' primary_resource_id: 'default' primary_resource_name: 'fmt.Sprintf("tf-test-my-repository%s", context["random_suffix"])' @@ -69,13 +67,10 @@ examples: repository_id: 'my-repository' instance_id: 'my-instance' deletion_policy: '"PREVENT"' - prevent_destroy: 'true' test_vars_overrides: 'deletion_policy': '"DELETE"' - 'prevent_destroy': 'false' oics_vars_overrides: 'deletion_policy': '"DELETE"' - 'prevent_destroy': 'false' parameters: - name: 'location' type: String @@ -91,6 +86,19 @@ parameters: The ID for the Repository. url_param_only: true required: true +virtual_fields: + - name: 'deletion_policy' + type: String + description: | + The deletion policy for the repository. Setting `ABANDON` allows the resource + to be abandoned, rather than deleted. Setting `DELETE` deletes the resource + and all its contents. Setting `PREVENT` prevents the resource from accidental deletion + by erroring out during plan. + Default is `DELETE`. Possible values are: + * DELETE + * PREVENT + * ABANDON + default_value: 'DELETE' properties: - name: 'name' type: String diff --git a/mmv1/templates/terraform/examples/secure_source_manager_branch_rule_basic.tf.tmpl b/mmv1/templates/terraform/examples/secure_source_manager_branch_rule_basic.tf.tmpl index cb795c3967d2..c2e83a420d05 100644 --- a/mmv1/templates/terraform/examples/secure_source_manager_branch_rule_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/secure_source_manager_branch_rule_basic.tf.tmpl @@ -10,10 +10,9 @@ resource "google_secure_source_manager_repository" "repository" { repository_id = "{{index $.Vars "repository_id"}}" location = google_secure_source_manager_instance.instance.location instance = google_secure_source_manager_instance.instance.name + # Prevent accidental deletions. - lifecycle { - prevent_destroy = "{{index $.Vars "prevent_destroy"}}" - } + deletion_policy = "{{index $.Vars "deletion_policy"}}" } resource "google_secure_source_manager_branch_rule" "basic" { diff --git a/mmv1/templates/terraform/examples/secure_source_manager_branch_rule_with_fields.tf.tmpl b/mmv1/templates/terraform/examples/secure_source_manager_branch_rule_with_fields.tf.tmpl index 3a6ccabc0d30..61f21168d1bc 100644 --- a/mmv1/templates/terraform/examples/secure_source_manager_branch_rule_with_fields.tf.tmpl +++ b/mmv1/templates/terraform/examples/secure_source_manager_branch_rule_with_fields.tf.tmpl @@ -10,10 +10,9 @@ resource "google_secure_source_manager_repository" "repository" { repository_id = "{{index $.Vars "repository_id"}}" instance = google_secure_source_manager_instance.instance.name location = google_secure_source_manager_instance.instance.location + # Prevent accidental deletions. - lifecycle { - prevent_destroy = "{{index $.Vars "prevent_destroy"}}" - } + deletion_policy = "{{index $.Vars "deletion_policy"}}" } resource "google_secure_source_manager_branch_rule" "default" { diff --git a/mmv1/templates/terraform/examples/secure_source_manager_repository_basic.tf.tmpl b/mmv1/templates/terraform/examples/secure_source_manager_repository_basic.tf.tmpl index eb8f3a138416..7e7caa18bc54 100644 --- a/mmv1/templates/terraform/examples/secure_source_manager_repository_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/secure_source_manager_repository_basic.tf.tmpl @@ -12,7 +12,5 @@ resource "google_secure_source_manager_repository" "{{$.PrimaryResourceId}}" { instance = google_secure_source_manager_instance.instance.name # Prevent accidental deletions. - lifecycle { - prevent_destroy = "{{index $.Vars "prevent_destroy"}}" - } + deletion_policy = "{{index $.Vars "deletion_policy"}}" } diff --git a/mmv1/templates/terraform/examples/secure_source_manager_repository_initial_config.tf.tmpl b/mmv1/templates/terraform/examples/secure_source_manager_repository_initial_config.tf.tmpl index ea07d9afa4c6..ce8e522f75ad 100644 --- a/mmv1/templates/terraform/examples/secure_source_manager_repository_initial_config.tf.tmpl +++ b/mmv1/templates/terraform/examples/secure_source_manager_repository_initial_config.tf.tmpl @@ -20,7 +20,5 @@ resource "google_secure_source_manager_repository" "{{$.PrimaryResourceId}}" { } # Prevent accidental deletions. - lifecycle { - prevent_destroy = "{{index $.Vars "prevent_destroy"}}" - } + deletion_policy = "{{index $.Vars "deletion_policy"}}" } diff --git a/mmv1/templates/terraform/pre_delete/securesourcemanager_instance.go.tmpl b/mmv1/templates/terraform/pre_delete/securesourcemanager_deletion_policy.go.tmpl similarity index 71% rename from mmv1/templates/terraform/pre_delete/securesourcemanager_instance.go.tmpl rename to mmv1/templates/terraform/pre_delete/securesourcemanager_deletion_policy.go.tmpl index 6548d8c63d6b..26f84c8f949b 100644 --- a/mmv1/templates/terraform/pre_delete/securesourcemanager_instance.go.tmpl +++ b/mmv1/templates/terraform/pre_delete/securesourcemanager_deletion_policy.go.tmpl @@ -3,5 +3,5 @@ deletionPolicy := d.Get("deletion_policy"); if deletionPolicy == "ABANDON" { return nil; } else if deletionPolicy == "PREVENT" { - return fmt.Errorf(`cannot destroy instance without setting deletion_policy="DELETE"`) + return fmt.Errorf(`cannot destroy resource without setting deletion_policy="DELETE"`) } \ No newline at end of file From 2a8ed563698df4a9e72a59416c00ca47322de5a7 Mon Sep 17 00:00:00 2001 From: nimish-khurana Date: Thu, 24 Jul 2025 00:18:43 +0530 Subject: [PATCH 594/884] Feature/add resource google oracle database odb subnet (#14577) --- mmv1/products/oracledatabase/OdbSubnet.yaml | 126 ++++++++++++++++++ .../examples/oracledatabase_odbsubnet.tf.tmpl | 12 ++ .../oracledatabase_odbsubnet.go.tmpl | 3 + 3 files changed, 141 insertions(+) create mode 100644 mmv1/products/oracledatabase/OdbSubnet.yaml create mode 100644 mmv1/templates/terraform/examples/oracledatabase_odbsubnet.tf.tmpl create mode 100644 mmv1/templates/terraform/pre_delete/oracledatabase_odbsubnet.go.tmpl diff --git a/mmv1/products/oracledatabase/OdbSubnet.yaml b/mmv1/products/oracledatabase/OdbSubnet.yaml new file mode 100644 index 000000000000..ca3b6d28d5bb --- /dev/null +++ b/mmv1/products/oracledatabase/OdbSubnet.yaml @@ -0,0 +1,126 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'OdbSubnet' +description: 'An OdbSubnet resource which represents a subnet under an OdbNetwork.' +references: + guides: + 'OracleDatabase@Google Cloud': https://cloud.google.com/oracle/database/docs/overview' +base_url: 'projects/{{project}}/locations/{{location}}/odbNetworks/{{odbnetwork}}/odbSubnets' +immutable: true +self_link: 'projects/{{project}}/locations/{{location}}/odbNetworks/{{odbnetwork}}/odbSubnets/{{odb_subnet_id}}' +create_url: 'projects/{{project}}/locations/{{location}}/odbNetworks/{{odbnetwork}}/odbSubnets?odbSubnetId={{odb_subnet_id}}' +id_format: 'projects/{{project}}/locations/{{location}}/odbNetworks/{{odbnetwork}}/odbSubnets/{{odb_subnet_id}}' +import_format: + - 'projects/{{project}}/locations/{{location}}/odbNetworks/{{odbnetwork}}/odbSubnets/{{odb_subnet_id}}' +custom_code: + pre_delete: 'templates/terraform/pre_delete/oracledatabase_odbsubnet.go.tmpl' +examples: + - name: oracledatabase_odbsubnet + primary_resource_id: my-odbsubnet + vars: + project: my-project + odb_network_id: my-odbnetwork + odb_subnet_id: my-odbsubnet + deletion_protection: 'true' + ignore_read_extra: + - 'deletion_protection' + test_vars_overrides: + deletion_protection: 'false' + project: '"oci-terraform-testing-prod"' + odb_network_id: '"tf-test-permanent-odbnetwork"' + odb_subnet_id: 'fmt.Sprintf("tf-test-odbsubnet-%s", acctest.RandString(t, 10))' +virtual_fields: + - name: 'deletion_protection' + type: Boolean + default_value: true + description: 'Whether or not to allow Terraform to destroy the instance. + Unless this field is set to false in Terraform state, a terraform destroy + or terraform apply that would delete the instance will fail.' +autogen_async: true +async: + operation: + timeouts: + insert_minutes: 90 + update_minutes: 90 + delete_minutes: 90 + base_url: '{{op_id}}' + actions: + - create + - delete + - update + type: OpAsync + result: + resource_inside_response: true + include_project: false +autogen_status: T2RiU3VibmV0 +parameters: + - name: location + type: String + description: Resource ID segment making up resource `name`. It identifies the resource within its parent collection as described in https://google.aip.dev/122. + immutable: true + url_param_only: true + required: true + - name: odbnetwork + type: String + description: Resource ID segment making up resource `name`. It identifies the resource within its parent collection as described in https://google.aip.dev/122. + immutable: true + url_param_only: true + required: true + - name: odbSubnetId + type: String + description: |- + The ID of the OdbSubnet to create. This value is restricted + to (^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$) and must be a maximum of 63 + characters in length. The value must start with a letter and end with + a letter or a number. + immutable: true + url_param_only: true + required: true +properties: + - name: cidrRange + type: String + description: The CIDR range of the subnet. + required: true + - name: createTime + type: String + description: The date and time that the OdbNetwork was created. + output: true + - name: labels + type: KeyValueLabels + description: Labels or tags associated with the resource. + - name: name + type: String + description: |- + Identifier. The name of the OdbSubnet resource in the following format: + projects/{project}/locations/{location}/odbNetworks/{odb_network}/odbSubnets/{odb_subnet} + output: true + - name: purpose + type: String + description: |- + Purpose of the subnet. + Possible values: + CLIENT_SUBNET + BACKUP_SUBNET + required: true + - name: state + type: String + description: |- + State of the ODB Subnet. + Possible values: + PROVISIONING + AVAILABLE + TERMINATING + FAILED + output: true diff --git a/mmv1/templates/terraform/examples/oracledatabase_odbsubnet.tf.tmpl b/mmv1/templates/terraform/examples/oracledatabase_odbsubnet.tf.tmpl new file mode 100644 index 000000000000..4a7fe1751189 --- /dev/null +++ b/mmv1/templates/terraform/examples/oracledatabase_odbsubnet.tf.tmpl @@ -0,0 +1,12 @@ +resource "google_oracle_database_odb_subnet" "{{$.PrimaryResourceId}}"{ + odb_subnet_id = "{{index $.Vars "odb_subnet_id"}}" + location = "europe-west2" + project = "{{index $.Vars "project"}}" + odbnetwork = "{{index $.Vars "odb_network_id"}}" + cidr_range = "10.1.1.0/24" + purpose = "CLIENT_SUBNET" + labels = { + terraform_created = "true" + } + deletion_protection = "{{index $.Vars "deletion_protection"}}" +} diff --git a/mmv1/templates/terraform/pre_delete/oracledatabase_odbsubnet.go.tmpl b/mmv1/templates/terraform/pre_delete/oracledatabase_odbsubnet.go.tmpl new file mode 100644 index 000000000000..b81a3712b592 --- /dev/null +++ b/mmv1/templates/terraform/pre_delete/oracledatabase_odbsubnet.go.tmpl @@ -0,0 +1,3 @@ +if d.Get("deletion_protection").(bool) { + return fmt.Errorf("cannot destroy google_oracle_database_odb_subnet resource with id : %q without setting deletion_protection=false and running `terraform apply`", d.Id()) +} From 449e0b9f2a73fce948312f6e1613eda8a4416528 Mon Sep 17 00:00:00 2001 From: chenir0219 Date: Wed, 23 Jul 2025 21:52:41 +0000 Subject: [PATCH 595/884] add support for updating iops, throughput, accessmode in google_compute_region_disk (#14609) --- mmv1/products/compute/RegionDisk.yaml | 5 +++++ .../compute/resource_compute_region_disk_test.go.tmpl | 7 +++++++ 2 files changed, 12 insertions(+) diff --git a/mmv1/products/compute/RegionDisk.yaml b/mmv1/products/compute/RegionDisk.yaml index e4a6f499c1de..d68a4b4a6c82 100644 --- a/mmv1/products/compute/RegionDisk.yaml +++ b/mmv1/products/compute/RegionDisk.yaml @@ -61,6 +61,7 @@ iam_policy: custom_code: encoder: 'templates/terraform/encoders/disk.tmpl' decoder: 'templates/terraform/decoders/disk.tmpl' + update_encoder: 'templates/terraform/update_encoder/hyper_disk.go.tmpl' pre_delete: 'templates/terraform/pre_delete/detach_disk.tmpl' custom_diff: - 'customdiff.ForceNewIfChange("size", IsDiskShrinkage)' @@ -402,12 +403,16 @@ properties: that the disk can handle. Values must be between 10,000 and 120,000. For more details, see the Extreme persistent disk [documentation](https://cloud.google.com/compute/docs/disks/extreme-persistent-disk). default_from_api: true + update_url: 'projects/{{project}}/regions/{{region}}/disks/{{name}}?paths=provisionedIops' + update_verb: 'PATCH' - name: 'provisionedThroughput' type: Integer description: | Indicates how much throughput to provision for the disk. This sets the number of throughput mb per second that the disk can handle. Values must be greater than or equal to 1. default_from_api: true + update_url: 'projects/{{project}}/regions/{{region}}/disks/{{name}}?paths=provisionedThroughput' + update_verb: 'PATCH' virtual_fields: - name: 'create_snapshot_before_destroy' type: Boolean diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_disk_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_disk_test.go.tmpl index 8f3d5d54d835..7f9acb4fc8dd 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_disk_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_disk_test.go.tmpl @@ -12,6 +12,7 @@ import ( "github.com/hashicorp/terraform-plugin-testing/terraform" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-plugin-testing/plancheck" {{ if eq $.TargetVersionName `ga` }} "google.golang.org/api/compute/v1" @@ -89,6 +90,12 @@ func TestAccComputeRegionDisk_hyperdisk(t *testing.T) { }, { Config: testAccComputeRegionDisk_hyperdiskUpdated(diskName, "name"), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + // Check that the update is done in-place + plancheck.ExpectResourceAction("google_compute_region_disk.regiondisk", plancheck.ResourceActionUpdate), + }, + }, Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("google_compute_region_disk.regiondisk", "access_mode", "READ_WRITE_SINGLE"), resource.TestCheckResourceAttr("google_compute_region_disk.regiondisk", "provisioned_iops", "20000"), From 8c156514f34ad58fab298797f62157dc0446f89f Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Wed, 23 Jul 2025 15:56:23 -0700 Subject: [PATCH 596/884] Removing stubs (#14613) --- go.mod | 3 --- main.go | 7 ------- 2 files changed, 10 deletions(-) delete mode 100644 go.mod delete mode 100644 main.go diff --git a/go.mod b/go.mod deleted file mode 100644 index 2c6c03665927..000000000000 --- a/go.mod +++ /dev/null @@ -1,3 +0,0 @@ -module github.com/GoogleCloudPlatform/magic-modules - -go 1.23.0 diff --git a/main.go b/main.go deleted file mode 100644 index 82002367b4e8..000000000000 --- a/main.go +++ /dev/null @@ -1,7 +0,0 @@ -package main - -import "fmt" - -func main() { - fmt.Println("This is currently a stub no-op function.") -} From 577a685f67f698670cad2929c1f821eea942de7e Mon Sep 17 00:00:00 2001 From: Robert Teller <31879487+r-teller@users.noreply.github.com> Date: Thu, 24 Jul 2025 09:26:26 -0700 Subject: [PATCH 597/884] Fixed incorrect immutable flag on linked_router_appliance_instances instances (#14578) --- mmv1/products/networkconnectivity/Spoke.yaml | 3 -- ...esource_network_connectivity_spoke_test.go | 51 ++++++++++++++++++- 2 files changed, 50 insertions(+), 4 deletions(-) diff --git a/mmv1/products/networkconnectivity/Spoke.yaml b/mmv1/products/networkconnectivity/Spoke.yaml index 831d45f6f387..205ca6da5a8e 100644 --- a/mmv1/products/networkconnectivity/Spoke.yaml +++ b/mmv1/products/networkconnectivity/Spoke.yaml @@ -226,7 +226,6 @@ properties: type: Array description: The list of router appliance instances required: true - immutable: true item_type: description: The list of router appliance instances type: NestedObject @@ -234,13 +233,11 @@ properties: - name: 'virtualMachine' type: String description: The URI of the virtual machine resource - immutable: true required: true diff_suppress_func: 'tpgresource.CompareSelfLinkOrResourceName' - name: 'ipAddress' type: String description: The IP address on the VM to use for peering. - immutable: true required: true - name: 'siteToSiteDataTransfer' type: Boolean diff --git a/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_spoke_test.go b/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_spoke_test.go index 9c28e20a50f9..ac8597ae627d 100644 --- a/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_spoke_test.go +++ b/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_spoke_test.go @@ -4,6 +4,7 @@ import ( "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" ) @@ -114,6 +115,11 @@ func TestAccNetworkConnectivitySpoke_RouterApplianceHandWritten(t *testing.T) { }, { Config: testAccNetworkConnectivitySpoke_RouterApplianceHandWrittenUpdate1(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_network_connectivity_spoke.primary", plancheck.ResourceActionUpdate), + }, + }, }, { ResourceName: "google_network_connectivity_spoke.primary", @@ -356,6 +362,27 @@ resource "google_compute_instance" "router-instance1" { } } +resource "google_compute_instance" "router-instance2" { + name = "tf-test-router-instance2%{random_suffix}" + machine_type = "e2-medium" + can_ip_forward = true + zone = "%{zone}" + + boot_disk { + initialize_params { + image = "projects/debian-cloud/global/images/debian-10-buster-v20210817" + } + } + + network_interface { + subnetwork = google_compute_subnetwork.subnetwork.name + network_ip = "10.0.0.3" + access_config { + network_tier = "PREMIUM" + } + } +} + resource "google_network_connectivity_hub" "basic_hub" { name = "tf-test-hub%{random_suffix}" description = "A sample hub" @@ -419,6 +446,27 @@ resource "google_compute_instance" "router-instance1" { } } +resource "google_compute_instance" "router-instance2" { + name = "tf-test-router-instance2%{random_suffix}" + machine_type = "e2-medium" + can_ip_forward = true + zone = "%{zone}" + + boot_disk { + initialize_params { + image = "projects/debian-cloud/global/images/debian-10-buster-v20210817" + } + } + + network_interface { + subnetwork = google_compute_subnetwork.subnetwork.name + network_ip = "10.0.0.3" + access_config { + network_tier = "PREMIUM" + } + } +} + resource "google_network_connectivity_hub" "basic_hub" { name = "tf-test-hub%{random_suffix}" description = "A sample hub" @@ -440,6 +488,7 @@ resource "google_network_connectivity_spoke" "primary" { virtual_machine = google_compute_instance.router-instance1.self_link ip_address = "10.0.0.2" } + include_import_ranges = ["ALL_IPV4_RANGES"] site_to_site_data_transfer = true } } @@ -516,7 +565,7 @@ resource "google_network_connectivity_spoke" "primary" { location = "%{region}" description = "An UPDATED sample spoke with two linked routher appliance instances" labels = { - label-two = "value-two" + label-two = "value-three" } hub = google_network_connectivity_hub.basic_hub.id linked_router_appliance_instances { From f2e730e77c51e76788a85221b031e1271490174c Mon Sep 17 00:00:00 2001 From: Rajesh Guptha Date: Thu, 24 Jul 2025 23:27:46 +0530 Subject: [PATCH 598/884] Adding Tags support for Regional Secrets (#14314) --- .../secretmanagerregional/RegionalSecret.yaml | 8 +++ ...rce_secret_manager_regional_secret_test.go | 58 +++++++++++++++++++ 2 files changed, 66 insertions(+) diff --git a/mmv1/products/secretmanagerregional/RegionalSecret.yaml b/mmv1/products/secretmanagerregional/RegionalSecret.yaml index 40caeaac73d1..09641d016359 100644 --- a/mmv1/products/secretmanagerregional/RegionalSecret.yaml +++ b/mmv1/products/secretmanagerregional/RegionalSecret.yaml @@ -224,6 +224,14 @@ properties: For secret with versionDestroyTtl>0, version destruction doesn't happen immediately on calling destroy instead the version goes to a disabled state and the actual destruction happens after this TTL expires. It must be atleast 24h. + - name: 'tags' + type: KeyValuePairs + description: | + A map of resource manager tags. + Resource manager tag keys and values have the same definition as resource manager tags. + Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/{tag_value_id}. + immutable: true + ignore_read: true virtual_fields: - name: 'deletion_protection' description: | diff --git a/mmv1/third_party/terraform/services/secretmanagerregional/resource_secret_manager_regional_secret_test.go b/mmv1/third_party/terraform/services/secretmanagerregional/resource_secret_manager_regional_secret_test.go index c925d26a357b..bba70b014d66 100644 --- a/mmv1/third_party/terraform/services/secretmanagerregional/resource_secret_manager_regional_secret_test.go +++ b/mmv1/third_party/terraform/services/secretmanagerregional/resource_secret_manager_regional_secret_test.go @@ -5,6 +5,7 @@ import ( "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) @@ -584,6 +585,38 @@ func TestAccSecretManagerRegionalRegionalSecret_deletionprotection(t *testing.T) }) } +func TestAccSecretManagerRegionalRegionalSecret_tags(t *testing.T) { + t.Parallel() + + tagKey := acctest.BootstrapSharedTestOrganizationTagKey(t, "secretmanager_regional_regionalsecret-tagkey", map[string]interface{}{}) + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "org": envvar.GetTestOrgFromEnv(t), + "tagKey": tagKey, + "tagValue": acctest.BootstrapSharedTestOrganizationTagValue(t, "secretmanager_regional_regionalsecret-tagvalue", tagKey), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckSecretManagerRegionalRegionalSecretDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccSecretManagerRegionalSecretTags(context), + }, + { + ResourceName: "google_secret_manager_regional_secret.regional-secret-basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "secret_id", "terraform_labels", "deletion_protection", "tags"}, + }, + { + Config: testAccSecretManagerRegionalSecretTagsDeletionProtection(context), + }, + }, + }) +} + func testAccSecretManagerRegionalSecret_basic(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_secret_manager_regional_secret" "regional-secret-basic" { @@ -1370,3 +1403,28 @@ resource "google_secret_manager_regional_secret" "regional-secret-deletion-prote } `, context) } + +func testAccSecretManagerRegionalSecretTags(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_secret_manager_regional_secret" "regional-secret-basic" { + secret_id = "tf-test-reg-secret-%{random_suffix}" + location = "us-central1" + tags = { + "%{org}/%{tagKey}" = "%{tagValue}" + } +} +`, context) +} + +func testAccSecretManagerRegionalSecretTagsDeletionProtection(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_secret_manager_regional_secret" "regional-secret-basic" { + secret_id = "tf-test-reg-secret-%{random_suffix}" + location = "us-central1" + tags = { + "%{org}/%{tagKey}" = "%{tagValue}" + } + deletion_protection = false +} +`, context) +} From c879104805c3f173ec73911318e25ba78935bd98 Mon Sep 17 00:00:00 2001 From: sameer-google Date: Thu, 24 Jul 2025 23:34:02 +0530 Subject: [PATCH 599/884] Storage Insights Datasets terraform support (#14564) --- .../storageinsights/DatasetConfig.yaml | 290 +++++++++++++++ ...e_insights_dataset_config_excludes.tf.tmpl | 20 ++ ...e_insights_dataset_config_includes.tf.tmpl | 25 ++ .../storage_insights_dataset_config.go.tmpl | 33 ++ .../storage_insights_dataset_config.go.tmpl | 116 ++++++ ...ce_storage_insights_dataset_config_test.go | 331 ++++++++++++++++++ 6 files changed, 815 insertions(+) create mode 100644 mmv1/products/storageinsights/DatasetConfig.yaml create mode 100644 mmv1/templates/terraform/examples/storage_insights_dataset_config_excludes.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/storage_insights_dataset_config_includes.tf.tmpl create mode 100644 mmv1/templates/terraform/post_create/storage_insights_dataset_config.go.tmpl create mode 100644 mmv1/templates/terraform/pre_update/storage_insights_dataset_config.go.tmpl create mode 100644 mmv1/third_party/terraform/services/storageinsights/resource_storage_insights_dataset_config_test.go diff --git a/mmv1/products/storageinsights/DatasetConfig.yaml b/mmv1/products/storageinsights/DatasetConfig.yaml new file mode 100644 index 000000000000..382395fc32dd --- /dev/null +++ b/mmv1/products/storageinsights/DatasetConfig.yaml @@ -0,0 +1,290 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'DatasetConfig' +description: | + Represents a Storage Insights DatasetConfig. +references: + guides: + 'Official Documentation': 'https://cloud.google.com/storage/docs/insights/datasets' + api: 'https://cloud.google.com/storage/docs/insights/reference/rest/v1/projects.locations.datasetConfigs' + + +base_url: 'projects/{{project}}/locations/{{location}}/datasetConfigs' +self_link: 'projects/{{project}}/locations/{{location}}/datasetConfigs/{{dataset_config_id}}' +create_url: 'projects/{{project}}/locations/{{location}}/datasetConfigs?datasetConfigId={{dataset_config_id}}' + +custom_code: + post_create: templates/terraform/post_create/storage_insights_dataset_config.go.tmpl + pre_update: templates/terraform/pre_update/storage_insights_dataset_config.go.tmpl + +update_verb: 'PATCH' + +# Constructing updateMask in pre_update due to API exceptions. +update_mask: false + +# If true, code for handling long-running operations is generated along with +# the resource. If false, that code is not generated. +# write operations of dataset config is an LRO. +autogen_async: true +async: !ruby/object:Api::OpAsync + operation: !ruby/object:Api::OpAsync::Operation + base_url: '{{op_id}}' + +import_format: + - 'projects/{{project}}/locations/{{location}}/datasetConfigs/{{dataset_config_id}}' + +examples: + - name: 'storage_insights_dataset_config_includes' + primary_resource_id: 'config_includes' + vars: + dataset_config_id: 'my_config_includes' + - name: 'storage_insights_dataset_config_excludes' + primary_resource_id: 'config_excludes' + vars: + dataset_config_id: 'my_config_excludes' + +virtual_fields: + - name: 'link_dataset' + type: Boolean + default_value: false + description: | + A boolean terraform only flag to link/unlink dataset. + + Setting this field to true while creation will automatically link the created dataset as an additional functionality. + -> **Note** A dataset config resource can only be destroyed once it is unlinked, + so users must set this field to false to unlink the dataset and destroy the dataset config resource. + +parameters: + - name: 'location' + type: String + required: true + immutable: true + url_param_only: true + description: | + The location of the DatasetConfig. + - name: 'datasetConfigId' + type: String + required: true + immutable: true + url_param_only: true + description: | + The user-defined ID of the DatasetConfig + +properties: + - name: 'name' + type: String + description: | + The full canonical resource name of the DatasetConfig (e.g., projects/P/locations/L/datasetConfigs/ID). + output: true + - name: 'createTime' + type: String + description: | + The UTC time at which the DatasetConfig was created. This is auto-populated. + output: true + - name: 'updateTime' + type: String + description: | + The UTC time at which the DatasetConfig was updated. This is auto-populated. + output: true + - name: 'uid' + type: String + description: | + System generated unique identifier for the resource. + output: true + - name: 'organizationNumber' + type: String + description: | + Organization resource ID that the source projects should belong to. + Projects that do not belong to the provided organization are not considered when creating the dataset. + default_from_api: true + immutable: true + - name: 'includeNewlyCreatedBuckets' + type: Boolean + description: | + If set to true, the request includes all the newly created buckets in the dataset that meet the inclusion and exclusion rules. + - name: 'retentionPeriodDays' + type: Integer + description: | + Number of days of history that must be retained. + required: true + - name: 'link' + type: NestedObject + description: | + Details of the linked DatasetConfig. + output: true + properties: + - name: 'dataset' + type: String + output: true + description: | + Dataset name for the linked DatasetConfig. + - name: 'linked' + type: Boolean + output: true + description: | + State of the linked DatasetConfig. + - name: 'identity' + type: NestedObject + description: | + Identity used by DatasetConfig. + immutable: true + required: true + properties: + - name: 'name' + type: String + output: true + description: | + Name of the identity. + - name: 'type' + type: Enum + required: true + description: | + Type of identity to use for the DatasetConfig. + enum_values: + - 'IDENTITY_TYPE_PER_CONFIG' + - 'IDENTITY_TYPE_PER_PROJECT' + - name: 'datasetConfigState' + type: Enum + description: | + State of the DatasetConfig. + output: true + enum_values: + - 'CONFIG_STATE_UNSPECIFIED' + - 'CONFIG_STATE_ACTIVE' + - 'CONFIG_STATE_VERIFICATION_IN_PROGRESS' + - 'CONFIG_STATE_CREATED' + - 'CONFIG_STATE_PROCESSING' + - name: 'description' + type: String + description: | + An optional user-provided description for the dataset configuration with a maximum length of 256 characters. + - name: 'sourceProjects' + type: NestedObject + description: | + Defines the options for providing source projects for the DatasetConfig. + properties: + - name: 'projectNumbers' + type: Array + description: | + The list of project numbers to include in the DatasetConfig. + item_type: + type: String + exactly_one_of: + - 'source_projects' + - 'source_folders' + - 'organization_scope' + - name: 'sourceFolders' + type: NestedObject + description: | + Defines the options for providing source folders for the DatasetConfig. + properties: + - name: 'folderNumbers' + type: Array + description: | + The list of folder numbers to include in the DatasetConfig. + item_type: + type: String + exactly_one_of: + - 'source_projects' + - 'source_folders' + - 'organization_scope' + - name: 'organizationScope' + type: Boolean + description: | + Defines the options for providing a source organization for the DatasetConfig. + exactly_one_of: + - 'source_projects' + - 'source_folders' + - 'organization_scope' + - name: 'includeCloudStorageLocations' + type: NestedObject + description: | + Defines the options for including cloud storage locations for the DatasetConfig. + properties: + - name: locations + type: Array + required: true + description: | + The list of cloud storage locations to include in the DatasetConfig. + item_type: + type: String + conflicts: + - 'exclude_cloud_storage_locations' + - name: 'excludeCloudStorageLocations' + type: NestedObject + description: | + Defines the options for excluding cloud storage locations for the DatasetConfig. + properties: + - name: locations + type: Array + required: true + description: | + The list of cloud storage locations to exclude in the DatasetConfig. + item_type: + type: String + conflicts: + - 'include_cloud_storage_locations' + - name: 'includeCloudStorageBuckets' + type: NestedObject + description: | + Defines the options for including cloud storage buckets for the DatasetConfig. + properties: + - name: cloudStorageBuckets + type: Array + required: true + description: | + The list of cloud storage buckets/bucket prefix regexes to include in the DatasetConfig. + item_type: + type: NestedObject + properties: + - name: bucketName + type: String + description: | + The list of cloud storage bucket names to include in the DatasetConfig. + Exactly one of the bucket_name and bucket_prefix_regex should be specified. + - name: bucketPrefixRegex + type: String + description: | + The list of regex patterns for bucket names matching the regex. + Regex should follow the syntax specified in google/re2 on GitHub. + Exactly one of the bucket_name and bucket_prefix_regex should be specified. + conflicts: + - 'exclude_cloud_storage_buckets' + - name: 'excludeCloudStorageBuckets' + type: NestedObject + description: | + Defined the options for excluding cloud storage buckets for the DatasetConfig. + properties: + - name: cloudStorageBuckets + type: Array + required: true + description: | + The list of cloud storage buckets/bucket prefix regexes to exclude in the DatasetConfig. + item_type: + type: NestedObject + properties: + - name: bucketName + type: String + description: | + The list of cloud storage bucket names to exclude in the DatasetConfig. + Exactly one of the bucket_name and bucket_prefix_regex should be specified. + - name: bucketPrefixRegex + type: String + description: | + The list of regex patterns for bucket names matching the regex. + Regex should follow the syntax specified in google/re2 on GitHub. + Exactly one of the bucket_name and bucket_prefix_regex should be specified. + conflicts: + - 'include_cloud_storage_buckets' diff --git a/mmv1/templates/terraform/examples/storage_insights_dataset_config_excludes.tf.tmpl b/mmv1/templates/terraform/examples/storage_insights_dataset_config_excludes.tf.tmpl new file mode 100644 index 000000000000..18777c4b5a21 --- /dev/null +++ b/mmv1/templates/terraform/examples/storage_insights_dataset_config_excludes.tf.tmpl @@ -0,0 +1,20 @@ +resource "google_storage_insights_dataset_config" "{{$.PrimaryResourceId}}" { + location = "us-central1" + dataset_config_id = "{{$.Vars.dataset_config_id}}" + retention_period_days = 1 + organization_scope = true + identity { + type = "IDENTITY_TYPE_PER_PROJECT" + } + exclude_cloud_storage_locations { + locations = ["us-east1"] + } + exclude_cloud_storage_buckets { + cloud_storage_buckets { + bucket_name = "sample-bucket" + } + cloud_storage_buckets { + bucket_name = "sample-regex" + } + } +} diff --git a/mmv1/templates/terraform/examples/storage_insights_dataset_config_includes.tf.tmpl b/mmv1/templates/terraform/examples/storage_insights_dataset_config_includes.tf.tmpl new file mode 100644 index 000000000000..e22a8eb05308 --- /dev/null +++ b/mmv1/templates/terraform/examples/storage_insights_dataset_config_includes.tf.tmpl @@ -0,0 +1,25 @@ +resource "google_storage_insights_dataset_config" "{{$.PrimaryResourceId}}" { + location = "us-central1" + dataset_config_id = "{{$.Vars.dataset_config_id}}" + retention_period_days = 1 + source_projects { + project_numbers = ["123", "456", "789"] + } + identity { + type = "IDENTITY_TYPE_PER_CONFIG" + } + description = "Sample Description" + link_dataset = false + include_newly_created_buckets = true + include_cloud_storage_locations { + locations = ["us-east1"] + } + include_cloud_storage_buckets { + cloud_storage_buckets { + bucket_name = "sample-bucket" + } + cloud_storage_buckets { + bucket_name = "sample-regex" + } + } +} diff --git a/mmv1/templates/terraform/post_create/storage_insights_dataset_config.go.tmpl b/mmv1/templates/terraform/post_create/storage_insights_dataset_config.go.tmpl new file mode 100644 index 000000000000..2570b9e6df72 --- /dev/null +++ b/mmv1/templates/terraform/post_create/storage_insights_dataset_config.go.tmpl @@ -0,0 +1,33 @@ +if d.Get("link_dataset") == true { + + linkUrl := strings.Replace(url, "?datasetConfigId=", "/", 1) + ":linkDataset" + + // err == nil indicates that the billing_project value was found + if bp, err := tpgresource.GetBillingProject(d, config); err == nil { + billingProject = bp + } + + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "POST", + Project: billingProject, + RawURL: linkUrl, + UserAgent: userAgent, + Timeout: d.Timeout(schema.TimeoutUpdate), + Headers: headers, + }) + + if err != nil { + return fmt.Errorf("Error Linking DatasetConfig %q: %s", d.Id(), err) + } else { + log.Printf("[DEBUG] Finished Linking DatasetConfig %q: %#v", d.Id(), res) + } + + err = StorageInsightsOperationWaitTime( + config, res, project, "Linking DatasetConfig", userAgent, + d.Timeout(schema.TimeoutUpdate)) + + if err != nil { + return err + } +} diff --git a/mmv1/templates/terraform/pre_update/storage_insights_dataset_config.go.tmpl b/mmv1/templates/terraform/pre_update/storage_insights_dataset_config.go.tmpl new file mode 100644 index 000000000000..456ef222d86d --- /dev/null +++ b/mmv1/templates/terraform/pre_update/storage_insights_dataset_config.go.tmpl @@ -0,0 +1,116 @@ +updateMask := []string{} + +if d.HasChange("include_newly_created_buckets") { + updateMask = append(updateMask, "includeNewlyCreatedBuckets") +} + +if d.HasChange("retention_period_days") { + updateMask = append(updateMask, "retentionPeriodDays") +} + +if d.HasChange("description") { + updateMask = append(updateMask, "description") +} + +if d.HasChange("include_cloud_storage_locations") { + _, new_storage_locations := d.GetChange("include_cloud_storage_locations") + if new_locations, ok := new_storage_locations.([]interface{}); ok && len(new_locations) > 0 { + updateMask = append(updateMask, "includeCloudStorageLocations") + } +} + +if d.HasChange("exclude_cloud_storage_locations") { + _, new_storage_locations := d.GetChange("exclude_cloud_storage_locations") + if new_locations, ok := new_storage_locations.([]interface{}); ok && len(new_locations) > 0 { + updateMask = append(updateMask, "excludeCloudStorageLocations") + } +} + +if d.HasChange("include_cloud_storage_buckets") { + _, new_storage_buckets := d.GetChange("include_cloud_storage_buckets") + if new_buckets, ok := new_storage_buckets.([]interface{}); ok && len(new_buckets) > 0 { + updateMask = append(updateMask, "includeCloudStorageBuckets") + } +} + +if d.HasChange("exclude_cloud_storage_buckets") { + _, new_storage_buckets := d.GetChange("exclude_cloud_storage_buckets") + if new_buckets, ok := new_storage_buckets.([]interface{}); ok && len(new_buckets) > 0 { + updateMask = append(updateMask, "excludeCloudStorageBuckets") + } +} + +if d.HasChange("source_projects") { + _, new_source_projects := d.GetChange("source_projects") + if new_project_numbers, ok := new_source_projects.([]interface{}); ok && len(new_project_numbers) > 0 { + updateMask = append(updateMask, "sourceProjects") + } +} + +if d.HasChange("source_folders") { + _, new_source_folders := d.GetChange("source_folders") + if new_folder_numbers, ok := new_source_folders.([]interface{}); ok && len(new_folder_numbers) > 0 { + updateMask = append(updateMask, "sourceFolders") + } +} + +if d.HasChange("organization_scope") { + _, new_organization_scope := d.GetChange("organization_scope") + if new_organization_scope == true { + updateMask = append(updateMask, "organizationScope") + } +} + + +// Link or Unlink a dataset if required +if d.HasChange("link_dataset") { + _, new_link_dataset := d.GetChange("link_dataset") + linkAPIEndPoint := "linkDataset" + if new_link_dataset == false { + linkAPIEndPoint = "unlinkDataset" + } + + linkUrl := fmt.Sprintf("%s:%s", url, linkAPIEndPoint) + + // err == nil indicates that the billing_project value was found + if bp, err := tpgresource.GetBillingProject(d, config); err == nil { + billingProject = bp + } + + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "POST", + Project: billingProject, + RawURL: linkUrl, + UserAgent: userAgent, + Timeout: d.Timeout(schema.TimeoutUpdate), + Headers: headers, + }) + + if err != nil { + return fmt.Errorf("Error %v DatasetConfig %q: %s", linkAPIEndPoint, d.Id(), err) + } else { + log.Printf("[DEBUG] Finished %s DatasetConfig %q: %#v", linkAPIEndPoint, d.Id(), res) + } + + err = StorageInsightsOperationWaitTime( + config, res, project, "Linking/Unlinking DatasetConfig", userAgent, + d.Timeout(schema.TimeoutUpdate)) + + if err != nil { + return err + } +} + + +// if updateMask is empty we are not updating anything so skip the post +if len(updateMask) == 0 { + return resourceStorageInsightsDatasetConfigRead(d, meta) +} + +// updateMask is a URL parameter but not present in the schema, so ReplaceVars +// won't set it +url, err = transport_tpg.AddQueryParams(url, map[string]string{"updateMask": strings.Join(updateMask, ",")}) +if err != nil { + return err +} diff --git a/mmv1/third_party/terraform/services/storageinsights/resource_storage_insights_dataset_config_test.go b/mmv1/third_party/terraform/services/storageinsights/resource_storage_insights_dataset_config_test.go new file mode 100644 index 000000000000..48194cdc1d38 --- /dev/null +++ b/mmv1/third_party/terraform/services/storageinsights/resource_storage_insights_dataset_config_test.go @@ -0,0 +1,331 @@ +package storageinsights_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/plancheck" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccStorageInsightsDatasetConfigExample_update_scope(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageInsightsDatasetConfigExample_update_project(context), + }, + { + ResourceName: "google_storage_insights_dataset_config.config", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"dataset_config_id", "location"}, + }, + { + Config: testAccStorageInsightsDatasetConfigExample_update_org(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_storage_insights_dataset_config.config", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_storage_insights_dataset_config.config", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"dataset_config_id", "location"}, + }, + { + Config: testAccStorageInsightsDatasetConfigExample_update_folder(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_storage_insights_dataset_config.config", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_storage_insights_dataset_config.config", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"dataset_config_id", "location"}, + }, + { + Config: testAccStorageInsightsDatasetConfigExample_update_org(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_storage_insights_dataset_config.config", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_storage_insights_dataset_config.config", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"dataset_config_id", "location"}, + }, + { + Config: testAccStorageInsightsDatasetConfigExample_update_project(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_storage_insights_dataset_config.config", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_storage_insights_dataset_config.config", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"dataset_config_id", "location"}, + }, + { + Config: testAccStorageInsightsDatasetConfigExample_update_folder(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_storage_insights_dataset_config.config", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_storage_insights_dataset_config.config", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"dataset_config_id", "location"}, + }, + { + Config: testAccStorageInsightsDatasetConfigExample_update_project(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_storage_insights_dataset_config.config", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_storage_insights_dataset_config.config", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"dataset_config_id", "location"}, + }, + }, + }) +} + +func TestAccStorageInsightsDatasetConfigExample_update_filters(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageInsightsDatasetConfigExample_full_filters(context), + }, + { + ResourceName: "google_storage_insights_dataset_config.config", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"dataset_config_id", "location"}, + }, + { + Config: testAccStorageInsightsDatasetConfigExample_update_filters(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_storage_insights_dataset_config.config", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_storage_insights_dataset_config.config", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"dataset_config_id", "location"}, + }, + }, + }) +} + +func TestAccStorageInsightsDatasetConfigExample_update_link(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "org_id": envvar.GetTestOrgFromEnv(t), + "project_id": envvar.GetTestProjectFromEnv(), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageInsightsDatasetConfigExample_full_link(context), + }, + { + ResourceName: "google_storage_insights_dataset_config.config", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"dataset_config_id", "location", "link_dataset"}, + }, + { + Config: testAccStorageInsightsDatasetConfigExample_update_unlink(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_storage_insights_dataset_config.config", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_storage_insights_dataset_config.config", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"dataset_config_id", "location", "link_dataset"}, + }, + }, + }) +} + +func testAccStorageInsightsDatasetConfigExample_update_project(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_storage_insights_dataset_config" "config" { + location = "us-central1" + dataset_config_id = "tf_test_my_config%{random_suffix}" + retention_period_days = 1 + source_projects { + project_numbers = ["123", "456"] + } + identity { + type = "IDENTITY_TYPE_PER_CONFIG" + } +} +`, context) +} + +func testAccStorageInsightsDatasetConfigExample_update_folder(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_storage_insights_dataset_config" "config" { + location = "us-central1" + dataset_config_id = "tf_test_my_config%{random_suffix}" + retention_period_days = 1 + source_folders { + folder_numbers = ["123", "456"] + } + identity { + type = "IDENTITY_TYPE_PER_CONFIG" + } +} +`, context) +} + +func testAccStorageInsightsDatasetConfigExample_update_org(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_storage_insights_dataset_config" "config" { + location = "us-central1" + dataset_config_id = "tf_test_my_config%{random_suffix}" + retention_period_days = 1 + organization_scope = true + identity { + type = "IDENTITY_TYPE_PER_CONFIG" + } +} +`, context) +} + +func testAccStorageInsightsDatasetConfigExample_full_link(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_storage_insights_dataset_config" "config" { + location = "us-central1" + dataset_config_id = "tf_test_my_config%{random_suffix}" + retention_period_days = 1 + organization_scope = true + identity { + type = "IDENTITY_TYPE_PER_CONFIG" + } + link_dataset = true + organization_number = "%{org_id}" + project = "%{project_id}" +} +`, context) +} + +func testAccStorageInsightsDatasetConfigExample_update_unlink(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_storage_insights_dataset_config" "config" { + location = "us-central1" + dataset_config_id = "tf_test_my_config%{random_suffix}" + retention_period_days = 1 + organization_scope = true + identity { + type = "IDENTITY_TYPE_PER_CONFIG" + } + link_dataset = false + project = "%{project_id}" +} +`, context) +} + +func testAccStorageInsightsDatasetConfigExample_full_filters(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_storage_insights_dataset_config" "config" { + location = "us-central1" + dataset_config_id = "tf_test_my_config%{random_suffix}" + retention_period_days = 1 + organization_scope = true + identity { + type = "IDENTITY_TYPE_PER_CONFIG" + } + description = "A sample description for dataset" + include_newly_created_buckets = true + include_cloud_storage_locations { + locations = ["us-east1", "europe-west2"] + } + exclude_cloud_storage_buckets { + cloud_storage_buckets { + bucket_name = "gs://sample-bucket1/" + } + cloud_storage_buckets { + bucket_prefix_regex = "gs://sample*/" + } + } +} +`, context) +} + +func testAccStorageInsightsDatasetConfigExample_update_filters(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_storage_insights_dataset_config" "config" { + location = "us-central1" + dataset_config_id = "tf_test_my_config%{random_suffix}" + retention_period_days = 1 + organization_scope = true + identity { + type = "IDENTITY_TYPE_PER_CONFIG" + } + include_newly_created_buckets = false + exclude_cloud_storage_locations { + locations = ["us-east1", "europe-west2"] + } + include_cloud_storage_buckets { + cloud_storage_buckets { + bucket_name = "gs://sample-bucket1/" + } + cloud_storage_buckets { + bucket_prefix_regex = "gs://sample*/" + } + } +} +`, context) +} From 8a0d34c31a1a6e0d5d49487706e637b664c6a82d Mon Sep 17 00:00:00 2001 From: bcreddy-gcp <123543489+bcreddy-gcp@users.noreply.github.com> Date: Thu, 24 Jul 2025 11:32:20 -0700 Subject: [PATCH 600/884] enabled the beta provider for `google_workbench_instance` (#14614) --- mmv1/products/workbench/product.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/mmv1/products/workbench/product.yaml b/mmv1/products/workbench/product.yaml index f453551ea553..1502414d84d3 100644 --- a/mmv1/products/workbench/product.yaml +++ b/mmv1/products/workbench/product.yaml @@ -15,6 +15,8 @@ name: 'Workbench' display_name: 'Vertex AI Workbench' versions: + - name: 'beta' + base_url: 'https://notebooks.googleapis.com/v2/' - name: 'ga' base_url: 'https://notebooks.googleapis.com/v2/' scopes: From c4fa1597d0989587a385f91bb6ef45fbd2a52d4e Mon Sep 17 00:00:00 2001 From: sameer-google Date: Fri, 25 Jul 2025 00:18:49 +0530 Subject: [PATCH 601/884] Storage Insights Datasets Datasource (#14618) --- .../provider/provider_mmv1_resources.go.tmpl | 1 + ..._source_storage_insights_dataset_config.go | 41 +++++++++++++++++++ ...rage_insights_dataset_config.html.markdown | 36 ++++++++++++++++ 3 files changed, 78 insertions(+) create mode 100644 mmv1/third_party/terraform/services/storageinsights/data_source_storage_insights_dataset_config.go create mode 100644 mmv1/third_party/terraform/website/docs/d/storage_insights_dataset_config.html.markdown diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index a63f4b38e2c6..07830118fe48 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -254,6 +254,7 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_storage_control_folder_intelligence_config": storagecontrol.DataSourceGoogleStorageControlFolderIntelligenceConfig(), "google_storage_control_organization_intelligence_config": storagecontrol.DataSourceGoogleStorageControlOrganizationIntelligenceConfig(), "google_storage_control_project_intelligence_config": storagecontrol.DataSourceGoogleStorageControlProjectIntelligenceConfig(), + "google_storage_insights_dataset_config": storageinsights.DataSourceGoogleStorageInsightsDatasetConfig(), "google_storage_object_signed_url": storage.DataSourceGoogleSignedUrl(), "google_storage_project_service_account": storage.DataSourceGoogleStorageProjectServiceAccount(), "google_storage_transfer_project_service_account": storagetransfer.DataSourceGoogleStorageTransferProjectServiceAccount(), diff --git a/mmv1/third_party/terraform/services/storageinsights/data_source_storage_insights_dataset_config.go b/mmv1/third_party/terraform/services/storageinsights/data_source_storage_insights_dataset_config.go new file mode 100644 index 000000000000..b8a916b47a25 --- /dev/null +++ b/mmv1/third_party/terraform/services/storageinsights/data_source_storage_insights_dataset_config.go @@ -0,0 +1,41 @@ +package storageinsights + +import ( + "fmt" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func DataSourceGoogleStorageInsightsDatasetConfig() *schema.Resource { + + dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceStorageInsightsDatasetConfig().Schema) + tpgresource.AddRequiredFieldsToSchema(dsSchema, "location", "dataset_config_id") + tpgresource.AddOptionalFieldsToSchema(dsSchema, "project") + + return &schema.Resource{ + Read: dataSourceGoogleStorageInsightsDatasetConfigRead, + Schema: dsSchema, + } +} + +func dataSourceGoogleStorageInsightsDatasetConfigRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + + id, err := tpgresource.ReplaceVars(d, config, "projects/{{project}}/locations/{{location}}/datasetConfigs/{{dataset_config_id}}") + if err != nil { + return fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + err = resourceStorageInsightsDatasetConfigRead(d, meta) + if err != nil { + return err + } + + if d.Id() == "" { + return fmt.Errorf("%s not found", id) + } + + return nil +} diff --git a/mmv1/third_party/terraform/website/docs/d/storage_insights_dataset_config.html.markdown b/mmv1/third_party/terraform/website/docs/d/storage_insights_dataset_config.html.markdown new file mode 100644 index 000000000000..e22e8716ab69 --- /dev/null +++ b/mmv1/third_party/terraform/website/docs/d/storage_insights_dataset_config.html.markdown @@ -0,0 +1,36 @@ +--- +subcategory: "Cloud Storage Insights" +description: |- + Represents a Storage Insights DatasetConfig. +--- + +# google_storage_insights_dataset_config + +Use this data source to get information about a Storage Insights Dataset Config resource. +See [the official documentation](https://cloud.google.com/storage/docs/insights/datasets) +and +[API](https://cloud.google.com/storage/docs/insights/reference/rest/v1/projects.locations.datasetConfigs). + + +## Example Usage + +```hcl +data "google_storage_insights_dataset_config" "sample-config" { + project = "sample_project" + location = "sample_location" + dataset_config_id = "sample_dataset_config_id" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `project` - (Optional) The name of the GCP project in which dataset config exists. Can be configured through config as well. +* `location` - (Required) The location of the Dataset Config. +* `dataset_config_id` - (Required) The user-defined ID of the DatasetConfig + + +## Attributes Reference + +See [google_storage_insights_dataset_config](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/storage_insights_dataset_config#argument-reference) resource for details of the available attributes. From 93194a0e85346d333a1f171dbe434c696cb1b43d Mon Sep 17 00:00:00 2001 From: Lakshman Swaminathan Date: Thu, 24 Jul 2025 16:07:10 -0400 Subject: [PATCH 602/884] fixing permadiff create_without_validation (#14543) --- .../datastream/ConnectionProfile.yaml | 5 +++- .../datastream_connection_profile.go.tmpl | 23 +++++++++++++++++++ .../datastream_connection_profile.go.tmpl | 18 +++++++++++++++ ...urce_datastream_connection_profile_test.go | 8 +++---- 4 files changed, 49 insertions(+), 5 deletions(-) create mode 100644 mmv1/templates/terraform/constants/datastream_connection_profile.go.tmpl create mode 100644 mmv1/templates/terraform/pre_create/datastream_connection_profile.go.tmpl diff --git a/mmv1/products/datastream/ConnectionProfile.yaml b/mmv1/products/datastream/ConnectionProfile.yaml index 24e8cb23f246..99184fdd44d8 100644 --- a/mmv1/products/datastream/ConnectionProfile.yaml +++ b/mmv1/products/datastream/ConnectionProfile.yaml @@ -23,7 +23,7 @@ docs: id_format: 'projects/{{project}}/locations/{{location}}/connectionProfiles/{{connection_profile_id}}' base_url: 'projects/{{project}}/locations/{{location}}/connectionProfiles' self_link: 'projects/{{project}}/locations/{{location}}/connectionProfiles/{{connection_profile_id}}' -create_url: 'projects/{{project}}/locations/{{location}}/connectionProfiles?connectionProfileId={{connection_profile_id}}&force={{create_without_validation}}' +create_url: 'projects/{{project}}/locations/{{location}}/connectionProfiles?connectionProfileId={{connection_profile_id}}' update_verb: 'PATCH' update_mask: true import_format: @@ -40,6 +40,8 @@ async: result: resource_inside_response: true custom_code: + constants: 'templates/terraform/constants/datastream_connection_profile.go.tmpl' + pre_create: 'templates/terraform/pre_create/datastream_connection_profile.go.tmpl' examples: - name: 'datastream_connection_profile_basic' primary_resource_id: 'default' @@ -122,6 +124,7 @@ parameters: required: false immutable: true default_value: false + diff_suppress_func: 'resourceDataStreamStreamCreateWithoutValidationDiffSuppress' - name: 'location' type: String description: | diff --git a/mmv1/templates/terraform/constants/datastream_connection_profile.go.tmpl b/mmv1/templates/terraform/constants/datastream_connection_profile.go.tmpl new file mode 100644 index 000000000000..e28149a702c4 --- /dev/null +++ b/mmv1/templates/terraform/constants/datastream_connection_profile.go.tmpl @@ -0,0 +1,23 @@ +{{/* + The license inside this block applies to this file + Copyright 2024 Google Inc. + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ -}} + +func resourceDataStreamStreamCreateWithoutValidationDiffSuppress(k, old, new string, d *schema.ResourceData) bool { + // If the old value was "false" and the new value is now unset (empty string), + // return true to suppress the diff. + if (old == "" && new == "false") || (old == "false" && new == "") { + return true + } + + // Otherwise, do not suppress the diff. + return false +} \ No newline at end of file diff --git a/mmv1/templates/terraform/pre_create/datastream_connection_profile.go.tmpl b/mmv1/templates/terraform/pre_create/datastream_connection_profile.go.tmpl new file mode 100644 index 000000000000..dee1facac030 --- /dev/null +++ b/mmv1/templates/terraform/pre_create/datastream_connection_profile.go.tmpl @@ -0,0 +1,18 @@ +{{/* + The license inside this block applies to this file + Copyright 2024 Google Inc. + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ -}} + +if d.Get("create_without_validation").(bool) { + url, err = transport_tpg.AddQueryParams(url, map[string]string{"force": "true"}) +} else { + url, err = transport_tpg.AddQueryParams(url, map[string]string{"force": "false"}) +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/datastream/resource_datastream_connection_profile_test.go b/mmv1/third_party/terraform/services/datastream/resource_datastream_connection_profile_test.go index d29028d83f53..ed33c1f14748 100644 --- a/mmv1/third_party/terraform/services/datastream/resource_datastream_connection_profile_test.go +++ b/mmv1/third_party/terraform/services/datastream/resource_datastream_connection_profile_test.go @@ -36,7 +36,7 @@ func TestAccDatastreamConnectionProfile_update(t *testing.T) { ResourceName: "google_datastream_connection_profile.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"connection_profile_id", "location"}, + ImportStateVerifyIgnore: []string{"create_without_validation", "connection_profile_id", "location"}, }, { Config: testAccDatastreamConnectionProfile_update2(context, true), @@ -45,7 +45,7 @@ func TestAccDatastreamConnectionProfile_update(t *testing.T) { ResourceName: "google_datastream_connection_profile.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"connection_profile_id", "location", "postgresql_profile.0.password"}, + ImportStateVerifyIgnore: []string{"create_without_validation", "connection_profile_id", "location", "postgresql_profile.0.password"}, }, { // Disable prevent_destroy @@ -58,7 +58,7 @@ func TestAccDatastreamConnectionProfile_update(t *testing.T) { ResourceName: "google_datastream_connection_profile.mysql_con_profile", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"connection_profile_id", "location", "mysql_profile.0.password"}, + ImportStateVerifyIgnore: []string{"create_without_validation", "connection_profile_id", "location", "mysql_profile.0.password"}, }, { // run once more to update the password. it should update it in-place @@ -68,7 +68,7 @@ func TestAccDatastreamConnectionProfile_update(t *testing.T) { ResourceName: "google_datastream_connection_profile.mysql_con_profile", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"connection_profile_id", "location", "mysql_profile.0.password"}, + ImportStateVerifyIgnore: []string{"create_without_validation", "connection_profile_id", "location", "mysql_profile.0.password"}, }, { // Disable prevent_destroy From c36c0303d05a0558c15f70fedb1504cd3dd27782 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Thu, 24 Jul 2025 13:54:24 -0700 Subject: [PATCH 603/884] tgc-revival: add google_pubsub_topic (#14605) --- mmv1/products/pubsub/Topic.yaml | 6 +++++- mmv1/templates/tgc_next/encoders/pubsub_topic.go.tmpl | 10 ++++++++++ .../tgc_next/tfplan2cai/resource_converter.go.tmpl | 2 +- 3 files changed, 16 insertions(+), 2 deletions(-) create mode 100644 mmv1/templates/tgc_next/encoders/pubsub_topic.go.tmpl diff --git a/mmv1/products/pubsub/Topic.yaml b/mmv1/products/pubsub/Topic.yaml index 8524e5e1b0da..ea2d2f7ce35b 100644 --- a/mmv1/products/pubsub/Topic.yaml +++ b/mmv1/products/pubsub/Topic.yaml @@ -52,9 +52,10 @@ iam_policy: custom_code: encoder: 'templates/terraform/encoders/no_send_name.go.tmpl' update_encoder: 'templates/terraform/update_encoder/pubsub_topic.tmpl' + tgc_encoder: 'templates/tgc_next/encoders/pubsub_topic.go.tmpl' error_retry_predicates: - - 'transport_tpg.PubsubTopicProjectNotReady' +include_in_tgc_next_DO_NOT_USE: true examples: - name: 'pubsub_topic_basic' primary_resource_id: 'example' @@ -150,10 +151,12 @@ properties: operations on this topic and subscribe operations on any subscription attached to this topic in any region that is not in `allowedPersistenceRegions`. required: false + is_missing_in_cai: true - name: 'schemaSettings' type: NestedObject description: | Settings for validating messages published against a schema. + is_missing_in_cai: true properties: - name: 'schema' type: String @@ -182,6 +185,7 @@ properties: set, message retention is controlled by settings on individual subscriptions. The rotation period has the format of a decimal number, followed by the letter `s` (seconds). Cannot be more than 31 days or less than 10 minutes. + is_missing_in_cai: true - name: 'ingestionDataSourceSettings' type: NestedObject description: | diff --git a/mmv1/templates/tgc_next/encoders/pubsub_topic.go.tmpl b/mmv1/templates/tgc_next/encoders/pubsub_topic.go.tmpl new file mode 100644 index 000000000000..68fc32a4e971 --- /dev/null +++ b/mmv1/templates/tgc_next/encoders/pubsub_topic.go.tmpl @@ -0,0 +1,10 @@ +config := meta.(*transport_tpg.Config) + +nameProp, err := expandPubsubTopicName(d.Get("name"), d, config) +if err != nil { + return nil, err +} else if v, ok := d.GetOkExists("name"); !tpgresource.IsEmptyValue(reflect.ValueOf(nameProp)) && (ok || !reflect.DeepEqual(v, nameProp)) { + obj["name"] = nameProp +} + +return obj, nil \ No newline at end of file diff --git a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl index d58f8edbd8b4..9347e52fe96c 100644 --- a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl +++ b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl @@ -59,7 +59,7 @@ func Get{{ $.ResourceName -}}CaiAssets(d tpgresource.TerraformResourceData, conf } if obj, err := Get{{ $.ResourceName -}}CaiObject(d, config); err == nil { location, _ := tpgresource.GetLocation(d, config) - if location == "" && strings.Contains(name, "/global/") { + if location == "" { location = "global" } return []caiasset.Asset{ From 9ff76c887a35b268b981a8f8172413bd48ff92a5 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Thu, 24 Jul 2025 15:20:45 -0700 Subject: [PATCH 604/884] Removed stale TODO assignments (#14631) --- .ci/magician/cmd/check_cassettes.go | 2 +- .../cmd/create_test_failure_ticket.go | 2 +- .ci/magician/cmd/generate_comment.go | 2 +- mmv1/api/type.go | 2 +- mmv1/openapi_generate/parser.go | 2 +- .../apigee/SecurityMonitoringCondition.yaml | 4 +-- .../appengine/FlexibleAppVersion.yaml | 4 +-- .../appengine/StandardAppVersion.yaml | 4 +-- .../products/artifactregistry/Repository.yaml | 6 ++-- mmv1/products/bigquery/Job.yaml | 4 +-- mmv1/products/bigquery/Table.yaml | 2 +- .../binaryauthorization/Attestor.yaml | 4 +-- mmv1/products/compute/Address.yaml | 2 +- mmv1/products/compute/Disk.yaml | 4 +-- mmv1/products/compute/DiskType.yaml | 4 +-- mmv1/products/compute/Firewall.yaml | 2 +- mmv1/products/compute/ForwardingRule.yaml | 2 +- .../compute/GlobalForwardingRule.yaml | 2 +- mmv1/products/compute/Image.yaml | 4 +-- mmv1/products/compute/Instance.yaml | 10 +++--- .../compute/InstanceGroupManager.yaml | 2 +- mmv1/products/compute/RegionAutoscaler.yaml | 2 +- mmv1/products/compute/RegionDisk.yaml | 4 +-- .../compute/RegionInstanceGroupManager.yaml | 2 +- mmv1/products/compute/Router.yaml | 2 +- mmv1/products/compute/ServiceAttachment.yaml | 4 +-- mmv1/products/compute/Snapshot.yaml | 2 +- mmv1/products/dns/ManagedZone.yaml | 4 +-- mmv1/products/firestore/Index.yaml | 6 ++-- mmv1/products/gkebackup/BackupPlan.yaml | 6 ++-- .../networksecurity/AddressGroup.yaml | 2 +- mmv1/products/osconfig/GuestPolicies.yaml | 32 +++++++++---------- mmv1/products/vertexai/Endpoint.yaml | 2 +- .../workstations/WorkstationCluster.yaml | 2 +- .../workstations/WorkstationConfig.yaml | 4 +-- mmv1/provider/provider.go | 2 +- .../base_configs/iam_test_file.go.tmpl | 2 +- .../terraform/update_encoder/ssl_policy.tmpl | 4 +-- .../acctest/bootstrap_test_utils.go.tmpl | 2 +- .../fwresource/framework_location.go | 4 +-- .../resource_bigtable_instance_test.go | 2 +- .../bigtable/resource_bigtable_table.go | 2 +- .../resource_composer_environment_test.go | 22 ++++++------- .../resource_dataproc_job_test.go.tmpl | 4 +-- .../resource_eventarc_message_bus_test.go | 2 +- .../resource_runtimeconfig_variable.go.tmpl | 2 +- .../storage/iam_storage_bucket_test.go | 2 +- .../iam_storage_managed_folder_test.go | 2 +- .../resource_storage_object_acl_test.go | 2 +- .../transport/error_retry_predicates.go | 2 +- tpgtools/property.go | 2 +- tpgtools/sample.go | 2 +- tpgtools/templates/resource.go.tmpl | 2 +- tpgtools/templates/serialization.go.tmpl | 4 +-- 54 files changed, 105 insertions(+), 105 deletions(-) diff --git a/.ci/magician/cmd/check_cassettes.go b/.ci/magician/cmd/check_cassettes.go index 74b5ec3ce3d6..bbf94d800933 100644 --- a/.ci/magician/cmd/check_cassettes.go +++ b/.ci/magician/cmd/check_cassettes.go @@ -130,7 +130,7 @@ func execCheckCassettes(commit string, vt *vcr.Tester, ctlr *source.Controller) return fmt.Errorf("error uploading logs: %w", err) } fmt.Println(len(result.FailedTests), " failed tests: ", result.FailedTests) - // TODO(trodge) report these failures to bigquery + // TODO report these failures to bigquery fmt.Println(len(result.PassedTests), " passed tests: ", result.PassedTests) fmt.Println(len(result.SkippedTests), " skipped tests: ", result.SkippedTests) diff --git a/.ci/magician/cmd/create_test_failure_ticket.go b/.ci/magician/cmd/create_test_failure_ticket.go index 7576ace9d918..41b4716794cb 100644 --- a/.ci/magician/cmd/create_test_failure_ticket.go +++ b/.ci/magician/cmd/create_test_failure_ticket.go @@ -521,7 +521,7 @@ func init() { } var ( - // TODO(shuyama1): add all mismatch resource names + // TODO: add all mismatch resource names resourceNameConverter = map[string]string{ "google_iam3_projects_policy_binding": "google_iam_projects_policy_binding", "google_iam3_organizations_policy_binding": "google_iam_organizations_policy_binding", diff --git a/.ci/magician/cmd/generate_comment.go b/.ci/magician/cmd/generate_comment.go index 5504e66aeeb0..976435500c75 100644 --- a/.ci/magician/cmd/generate_comment.go +++ b/.ci/magician/cmd/generate_comment.go @@ -180,7 +180,7 @@ func listGCEnvironmentVariables() string { func execGenerateComment(prNumber int, ghTokenMagicModules, buildId, buildStep, projectId, commitSha string, gh GithubClient, rnr ExecRunner, ctlr *source.Controller) error { errors := map[string][]string{"Other": []string{}} - // TODO(ScottSuarez) - temporary fix to ensure the label is removed. + // TODO - temporary fix to ensure the label is removed. // Once we migrate to the new trigger there is an explicit task // for this and this line can be removed. gh.RemoveLabel(fmt.Sprint(prNumber), "awaiting-approval") diff --git a/mmv1/api/type.go b/mmv1/api/type.go index 8d9e85569761..bd0f4c7fa0d6 100644 --- a/mmv1/api/type.go +++ b/mmv1/api/type.go @@ -1225,7 +1225,7 @@ func (t *Type) ProviderOnly() bool { // fields still need to be included, ie: // flattenedField > newParent > renameMe should be passed to this function as // flattened_field.0.new_parent.0.im_renamed -// TODO(emilymye): Change format of input for +// TODO: Change format of input for // exactly_one_of/at_least_one_of/etc to use camelcase, MM properities and // convert to snake in this method func (t *Type) GetPropertySchemaPath(schemaPath string) string { diff --git a/mmv1/openapi_generate/parser.go b/mmv1/openapi_generate/parser.go index b604d5503de4..f9ae4b845cb7 100644 --- a/mmv1/openapi_generate/parser.go +++ b/mmv1/openapi_generate/parser.go @@ -163,7 +163,7 @@ func buildProduct(filePath, output string, root *openapi3.T, header []byte) stri apiVersion := &product.Version{} apiVersion.BaseUrl = fmt.Sprintf("%s/%s/", server, version) - // TODO(slevenick) figure out how to tell the API version + // TODO figure out how to tell the API version apiVersion.Name = "ga" apiProduct.Versions = []*product.Version{apiVersion} diff --git a/mmv1/products/apigee/SecurityMonitoringCondition.yaml b/mmv1/products/apigee/SecurityMonitoringCondition.yaml index fb4d5a459c6e..6960f7be563f 100644 --- a/mmv1/products/apigee/SecurityMonitoringCondition.yaml +++ b/mmv1/products/apigee/SecurityMonitoringCondition.yaml @@ -82,12 +82,12 @@ properties: type: NestedObject exactly_one_of: - include_all_resources - # TODO(hashicorp/terraform-provider-google#22581): add this block back + test once deployment is supported + # TODO: hashicorp/terraform-provider-google#22581 add this block back + test once deployment is supported # - include properties: [] send_empty_value: true allow_empty_object: true - # TODO(hashicorp/terraform-provider-google#22581): add this block back + test once deployment is supported + # TODO: hashicorp/terraform-provider-google#22581 add this block back + test once deployment is supported # - name: 'include' # type: NestedObject # properties: diff --git a/mmv1/products/appengine/FlexibleAppVersion.yaml b/mmv1/products/appengine/FlexibleAppVersion.yaml index 07f1cacf6a04..28c28cf7b31a 100644 --- a/mmv1/products/appengine/FlexibleAppVersion.yaml +++ b/mmv1/products/appengine/FlexibleAppVersion.yaml @@ -322,7 +322,7 @@ properties: - 'REDIRECT_HTTP_RESPONSE_CODE_307' - name: 'script' type: NestedObject - # TODO (mbang): Exactly one of script, staticFiles, or apiEndpoint must be set + # TODO: Exactly one of script, staticFiles, or apiEndpoint must be set description: | Executes a script to handle the requests that match this URL pattern. Only the auto value is supported for Node.js in the App Engine standard environment, for example "script:" "auto". @@ -334,7 +334,7 @@ properties: required: true - name: 'staticFiles' type: NestedObject - # TODO (mbang): Exactly one of script, staticFiles, or apiEndpoint must be set + # TODO: Exactly one of script, staticFiles, or apiEndpoint must be set description: | Files served directly to the user for a given URL, such as images, CSS stylesheets, or JavaScript source files. Static file handlers describe which files in the application directory are static files, and which URLs serve them. diff --git a/mmv1/products/appengine/StandardAppVersion.yaml b/mmv1/products/appengine/StandardAppVersion.yaml index c4a8a296c0e8..d3d740b1e6c9 100644 --- a/mmv1/products/appengine/StandardAppVersion.yaml +++ b/mmv1/products/appengine/StandardAppVersion.yaml @@ -183,7 +183,7 @@ properties: - 'REDIRECT_HTTP_RESPONSE_CODE_307' - name: 'script' type: NestedObject - # TODO (mbang): Exactly one of script, staticFiles, or apiEndpoint must be set + # TODO: Exactly one of script, staticFiles, or apiEndpoint must be set description: | Executes a script to handle the requests that match this URL pattern. Only the auto value is supported for Node.js in the App Engine standard environment, for example "script:" "auto". @@ -195,7 +195,7 @@ properties: required: true - name: 'staticFiles' type: NestedObject - # TODO (mbang): Exactly one of script, staticFiles, or apiEndpoint must be set + # TODO: Exactly one of script, staticFiles, or apiEndpoint must be set description: | Files served directly to the user for a given URL, such as images, CSS stylesheets, or JavaScript source files. Static file handlers describe which files in the application directory are static files, and which URLs serve them. properties: diff --git a/mmv1/products/artifactregistry/Repository.yaml b/mmv1/products/artifactregistry/Repository.yaml index 645916cc635a..3b9ffebd2bff 100644 --- a/mmv1/products/artifactregistry/Repository.yaml +++ b/mmv1/products/artifactregistry/Repository.yaml @@ -42,7 +42,7 @@ async: iam_policy: method_name_separator: ':' allowed_iam_role: 'roles/artifactregistry.reader' - # TODO (camthornton): Change to repository_id in 4.0 + # TODO: Change to repository_id in 4.0 parent_resource_attribute: 'repository' base_url: 'projects/{{project}}/locations/{{location}}/repositories/{{name}}' example_config_body: 'templates/terraform/iam/iam_attributes.go.tmpl' @@ -389,7 +389,7 @@ properties: type: NestedObject description: |- Policy condition for matching versions. - # TODO (jrsb): exactly_one_of: condition, mostRecentVersions + # TODO: exactly_one_of: condition, mostRecentVersions properties: - name: 'tagState' type: Enum @@ -435,7 +435,7 @@ properties: description: |- Policy condition for retaining a minimum number of versions. May only be specified with a Keep action. - # TODO (jrsb): exactly_one_of: condition, mostRecentVersions + # TODO: exactly_one_of: condition, mostRecentVersions properties: - name: 'packageNamePrefixes' type: Array diff --git a/mmv1/products/bigquery/Job.yaml b/mmv1/products/bigquery/Job.yaml index b389e34b97df..7f6cb7b330f1 100644 --- a/mmv1/products/bigquery/Job.yaml +++ b/mmv1/products/bigquery/Job.yaml @@ -239,13 +239,13 @@ properties: properties: - name: 'resourceUri' type: String - # TODO (mbang): exactly_one_of: resourceUri, inlineCode + # TODO: exactly_one_of: resourceUri, inlineCode description: 'A code resource to load from a Google Cloud Storage URI (gs://bucket/path).' - name: 'inlineCode' type: String - # TODO (mbang): exactly_one_of: resourceUri, inlineCode + # TODO: exactly_one_of: resourceUri, inlineCode description: | An inline resource that contains code for a user-defined function (UDF). Providing a inline code resource is equivalent to providing a URI for a file containing the same code. diff --git a/mmv1/products/bigquery/Table.yaml b/mmv1/products/bigquery/Table.yaml index 402ee2e93e1b..b7a0d5ba0271 100644 --- a/mmv1/products/bigquery/Table.yaml +++ b/mmv1/products/bigquery/Table.yaml @@ -45,7 +45,7 @@ examples: dataset_id: 'dataset_id' table_id: 'table_id' parameters: - # TODO(alexstephen): Remove once we have support for placing + # TODO: Remove once we have support for placing # nested object fields in URL - name: 'dataset' type: String diff --git a/mmv1/products/binaryauthorization/Attestor.yaml b/mmv1/products/binaryauthorization/Attestor.yaml index b5bc05f30bdb..d0862a41eb03 100644 --- a/mmv1/products/binaryauthorization/Attestor.yaml +++ b/mmv1/products/binaryauthorization/Attestor.yaml @@ -114,7 +114,7 @@ properties: default_from_api: true - name: 'asciiArmoredPgpPublicKey' type: String - # TODO (mbang): Exactly one of asciiArmoredPgpPublicKey or pkixPublicKey must be set + # TODO: Exactly one of asciiArmoredPgpPublicKey or pkixPublicKey must be set description: | ASCII-armored representation of a PGP public key, as the entire output by the command @@ -127,7 +127,7 @@ properties: be overwritten by the API-calculated ID. - name: 'pkixPublicKey' type: NestedObject - # TODO (mbang): Exactly one of asciiArmoredPgpPublicKey or pkixPublicKey must be set + # TODO: Exactly one of asciiArmoredPgpPublicKey or pkixPublicKey must be set description: | A raw PKIX SubjectPublicKeyInfo format public key. diff --git a/mmv1/products/compute/Address.yaml b/mmv1/products/compute/Address.yaml index 7107c7d747aa..f25c8cf09da9 100644 --- a/mmv1/products/compute/Address.yaml +++ b/mmv1/products/compute/Address.yaml @@ -80,7 +80,7 @@ examples: address_name: 'my-internal-address' # It is almost identical to internal_with_gce_endpoint exclude_docs: true - # TODO(rileykarson): Remove this example when instance is supported + # TODO: Remove this example when instance is supported - name: 'instance_with_ip' primary_resource_id: 'static' vars: diff --git a/mmv1/products/compute/Disk.yaml b/mmv1/products/compute/Disk.yaml index 63a4be3a624c..a31d894097f9 100644 --- a/mmv1/products/compute/Disk.yaml +++ b/mmv1/products/compute/Disk.yaml @@ -130,7 +130,7 @@ properties: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied encryption key that protects this resource. output: true - # TODO(chrisst) Change to ResourceRef once KMS is in Magic Modules + # TODO Change to ResourceRef once KMS is in Magic Modules - name: 'kmsKeySelfLink' type: String description: | @@ -240,7 +240,7 @@ properties: description: | Specifies a 256-bit customer-supplied encryption key, encoded in RFC 4648 base64 to either encrypt or decrypt this resource. - # TODO(chrisst) Change to ResourceRef once KMS is in Magic Modules + # TODO Change to ResourceRef once KMS is in Magic Modules - name: 'kmsKeySelfLink' type: String description: | diff --git a/mmv1/products/compute/DiskType.yaml b/mmv1/products/compute/DiskType.yaml index a9dfe0d19ab9..964b7df6923a 100644 --- a/mmv1/products/compute/DiskType.yaml +++ b/mmv1/products/compute/DiskType.yaml @@ -14,7 +14,7 @@ --- name: 'DiskType' kind: 'compute#diskType' -# TODO(nelsonjr): Search all documentation for references of using URL (like +# TODO: Search all documentation for references of using URL (like # the description below) and replace with the proper reference to the # corresponding type. description: | @@ -22,7 +22,7 @@ description: | of disk to use, such as a pd-ssd, pd-balanced or pd-standard. To reference a disk type, use the disk type's full or partial URL. exclude: true -# TODO(nelsonjr): Temporarily make DiskType virtual so no tests gets +# TODO: Temporarily make DiskType virtual so no tests gets # triggered for create. Implement support for read only objects, and delete # the virtual tag # | readonly: true diff --git a/mmv1/products/compute/Firewall.yaml b/mmv1/products/compute/Firewall.yaml index 32c7b1ff2b0b..30ce8906f8e9 100644 --- a/mmv1/products/compute/Firewall.yaml +++ b/mmv1/products/compute/Firewall.yaml @@ -73,7 +73,7 @@ examples: project: 'PROJECT_NAME' parameters: properties: - # TODO(nelsonjr): [nice to have] Make the format here simpler to use, in + # TODO: [nice to have] Make the format here simpler to use, in # the form of # 22/tcp, [12345-23456]/tcp. It requires a conversion # function to the # final JSON format expected by the API for this # proposal to work. diff --git a/mmv1/products/compute/ForwardingRule.yaml b/mmv1/products/compute/ForwardingRule.yaml index d6f3cdb556bc..ceaab26c4bf4 100644 --- a/mmv1/products/compute/ForwardingRule.yaml +++ b/mmv1/products/compute/ForwardingRule.yaml @@ -413,7 +413,7 @@ properties: For Private Service Connect forwarding rules that forward traffic to Google APIs, a network must be provided. default_from_api: true - # TODO(nelsonjr): When implementing new types enable converting the + # TODO: When implementing new types enable converting the # manifest input from a single value to a range of form NN-NN. The API # accepts a single value, e.g. '80', but the API stores and returns # '80-80'. This causes idempotency false positive. diff --git a/mmv1/products/compute/GlobalForwardingRule.yaml b/mmv1/products/compute/GlobalForwardingRule.yaml index e3f4bd9c967b..dcf27c10af18 100644 --- a/mmv1/products/compute/GlobalForwardingRule.yaml +++ b/mmv1/products/compute/GlobalForwardingRule.yaml @@ -413,7 +413,7 @@ properties: For Private Service Connect forwarding rules that forward traffic to Google APIs, a network must be provided. default_from_api: true - # TODO(nelsonjr): When implementing new types enable converting the + # TODO: When implementing new types enable converting the # manifest input from a single value to a range of form NN-NN. The API # accepts a single value, e.g. '80', but the API stores and returns # '80-80'. This causes idempotency false positive. diff --git a/mmv1/products/compute/Image.yaml b/mmv1/products/compute/Image.yaml index 40b33a1081f2..6571ab51ff4b 100644 --- a/mmv1/products/compute/Image.yaml +++ b/mmv1/products/compute/Image.yaml @@ -106,7 +106,7 @@ properties: type: Integer description: | Size of the image when restored onto a persistent disk (in GB). - # TODO(alexstephen): Build family support. + # TODO: Build family support. # Families use a different API default_from_api: true - name: 'family' @@ -241,7 +241,7 @@ properties: This is provided by the client when the disk image is created. api_name: sha1Checksum diff_suppress_func: 'tpgresource.Base64DiffSuppress' - # TODO(alexstephen): Figure out cross-module ResourceRefs + # TODO: Figure out cross-module ResourceRefs - name: 'source' type: String description: | diff --git a/mmv1/products/compute/Instance.yaml b/mmv1/products/compute/Instance.yaml index fc8fd5e82cf6..9e3bcca3b078 100644 --- a/mmv1/products/compute/Instance.yaml +++ b/mmv1/products/compute/Instance.yaml @@ -247,7 +247,7 @@ properties: - 'SCSI' - 'NVME' # Ignoring kind - It's a constant and we don't need it. - # TODO(alexstephen): Place in licenses - it's a Array of + # TODO: Place in licenses - it's a Array of # ResourceRefs - name: 'mode' type: Enum @@ -298,7 +298,7 @@ properties: description: | The number of the guest accelerator cards exposed to this instance. - # TODO(alexstephen): Change to ResourceRef once AcceleratorType is + # TODO: Change to ResourceRef once AcceleratorType is # created. - name: 'acceleratorType' type: String @@ -334,7 +334,7 @@ properties: Labels to apply to this instance. A list of key->value pairs. update_url: 'projects/{{project}}/zones/{{zone}}/instances/{{name}}/setLabels' update_verb: 'POST' - # TODO(nelsonjr): Implement updating metadata *after* resource is created. + # TODO: Implement updating metadata *after* resource is created. # Expose instance 'metadata' as a simple name/value pair hash. However the API # defines metadata as a NestedObject with the following layout: @@ -365,7 +365,7 @@ properties: description: 'A reference to a machine type which defines VM kind.' update_url: 'projects/{{project}}/zones/{{zone}}/instances/{{name}}/setMachineType' update_verb: 'POST' - # TODO(alexstephen): Add metadata + # TODO: Add metadata custom_expand: 'templates/terraform/custom_expand/resourceref_with_validation.go.tmpl' resource: 'MachineType' imports: 'selfLink' @@ -670,7 +670,7 @@ properties: by the setTags method. Each tag within the list must comply with RFC1035. properties: - # TODO(alexstephen) Investigate bytes type + # TODO Investigate bytes type - name: 'fingerprint' type: String description: | diff --git a/mmv1/products/compute/InstanceGroupManager.yaml b/mmv1/products/compute/InstanceGroupManager.yaml index fe416170ccec..4720d32165c5 100644 --- a/mmv1/products/compute/InstanceGroupManager.yaml +++ b/mmv1/products/compute/InstanceGroupManager.yaml @@ -174,7 +174,7 @@ properties: The name of the managed instance group. The name must be 1-63 characters long, and comply with RFC1035. required: true - # TODO(nelsonjr): Make namedPorts a NameValue(name[string], port[integer]) + # TODO: Make namedPorts a NameValue(name[string], port[integer]) - name: 'namedPorts' type: Array description: diff --git a/mmv1/products/compute/RegionAutoscaler.yaml b/mmv1/products/compute/RegionAutoscaler.yaml index b475020433d7..fc6895c803b1 100644 --- a/mmv1/products/compute/RegionAutoscaler.yaml +++ b/mmv1/products/compute/RegionAutoscaler.yaml @@ -396,7 +396,7 @@ properties: A description of a scaling schedule. - name: 'target' type: String - # TODO(#303): resourceref once RegionIGM exists + # TODO: #303 resourceref once RegionIGM exists # resource: 'RegionInstanceGroupManager' # imports: 'selfLink' description: | diff --git a/mmv1/products/compute/RegionDisk.yaml b/mmv1/products/compute/RegionDisk.yaml index d68a4b4a6c82..d4d219944771 100644 --- a/mmv1/products/compute/RegionDisk.yaml +++ b/mmv1/products/compute/RegionDisk.yaml @@ -152,7 +152,7 @@ properties: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied encryption key that protects this resource. output: true - # TODO(chrisst) Change to ResourceRef once KMS is in Magic Modules + # TODO Change to ResourceRef once KMS is in Magic Modules - name: 'kmsKeyName' type: String description: | @@ -170,7 +170,7 @@ properties: description: | Specifies a 256-bit customer-supplied encryption key, encoded in RFC 4648 base64 to either encrypt or decrypt this resource. - # TODO(chrisst) Change to ResourceRef once KMS is in Magic Modules + # TODO Change to ResourceRef once KMS is in Magic Modules - name: 'kmsKeyName' type: String description: | diff --git a/mmv1/products/compute/RegionInstanceGroupManager.yaml b/mmv1/products/compute/RegionInstanceGroupManager.yaml index 71b3e7cf6387..5037bc480c1c 100644 --- a/mmv1/products/compute/RegionInstanceGroupManager.yaml +++ b/mmv1/products/compute/RegionInstanceGroupManager.yaml @@ -179,7 +179,7 @@ properties: The name of the managed instance group. The name must be 1-63 characters long, and comply with RFC1035. required: true - # TODO(nelsonjr): Make namedPorts a NameValue(name[string], port[integer]) + # TODO: Make namedPorts a NameValue(name[string], port[integer]) - name: 'namedPorts' type: Array description: diff --git a/mmv1/products/compute/Router.yaml b/mmv1/products/compute/Router.yaml index 904a3edff560..846c992203d5 100644 --- a/mmv1/products/compute/Router.yaml +++ b/mmv1/products/compute/Router.yaml @@ -159,7 +159,7 @@ properties: This enum field has the one valid value: ALL_SUBNETS send_empty_value: true - # TODO(#324): enum? + # TODO: #324 enum? item_type: type: String - name: 'advertisedIpRanges' diff --git a/mmv1/products/compute/ServiceAttachment.yaml b/mmv1/products/compute/ServiceAttachment.yaml index 24c11fef7796..9ebe513041ed 100644 --- a/mmv1/products/compute/ServiceAttachment.yaml +++ b/mmv1/products/compute/ServiceAttachment.yaml @@ -230,13 +230,13 @@ properties: properties: - name: 'projectIdOrNum' type: String - # TODO (laurensknoll): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) description: | A project that is allowed to connect to this service attachment. Only one of project_id_or_num and network_url may be set. - name: 'networkUrl' type: String - # TODO (laurensknoll): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) description: | The network that is allowed to connect to this service attachment. Only one of project_id_or_num and network_url may be set. diff --git a/mmv1/products/compute/Snapshot.yaml b/mmv1/products/compute/Snapshot.yaml index 088b93bb4c86..4fafb7962670 100644 --- a/mmv1/products/compute/Snapshot.yaml +++ b/mmv1/products/compute/Snapshot.yaml @@ -136,7 +136,7 @@ parameters: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied encryption key that protects this resource. output: true - # TODO(chrisst) Change to ResourceRef once KMS is in Magic Modules + # TODO Change to ResourceRef once KMS is in Magic Modules - name: 'kmsKeySelfLink' type: String description: | diff --git a/mmv1/products/dns/ManagedZone.yaml b/mmv1/products/dns/ManagedZone.yaml index f33cc1a52ad5..fc5960ab94cc 100644 --- a/mmv1/products/dns/ManagedZone.yaml +++ b/mmv1/products/dns/ManagedZone.yaml @@ -317,7 +317,7 @@ properties: item_type: type: NestedObject properties: - # TODO(drebes): Make 'networkUrl' a ResourceRef once cross-module references + # TODO: Make 'networkUrl' a ResourceRef once cross-module references # are possible. - name: 'networkUrl' type: String @@ -384,7 +384,7 @@ properties: description: 'The network with which to peer.' required: true properties: - # TODO(drebes): Make 'networkUrl' a ResourceRef once cross-module references + # TODO: Make 'networkUrl' a ResourceRef once cross-module references # are possible. - name: 'networkUrl' type: String diff --git a/mmv1/products/firestore/Index.yaml b/mmv1/products/firestore/Index.yaml index 438ebf4ec651..571e67499892 100644 --- a/mmv1/products/firestore/Index.yaml +++ b/mmv1/products/firestore/Index.yaml @@ -172,7 +172,7 @@ properties: Name of the field. - name: 'order' type: Enum - # TODO (mbang): Exactly one of order, arrayConfig, or vectorConfig must be set + # TODO: Exactly one of order, arrayConfig, or vectorConfig must be set description: | Indicates that this field supports ordering by the specified order or comparing using =, <, <=, >, >=. Only one of `order`, `arrayConfig`, and `vectorConfig` can be specified. @@ -181,7 +181,7 @@ properties: - 'DESCENDING' - name: 'arrayConfig' type: Enum - # TODO (mbang): Exactly one of order, arrayConfig, or vectorConfig must be set + # TODO: Exactly one of order, arrayConfig, or vectorConfig must be set description: | Indicates that this field supports operations on arrayValues. Only one of `order`, `arrayConfig`, and `vectorConfig` can be specified. @@ -189,7 +189,7 @@ properties: - 'CONTAINS' - name: 'vectorConfig' type: NestedObject - # TODO (mbang): Exactly one of order, arrayConfig, or vectorConfig must be set + # TODO: Exactly one of order, arrayConfig, or vectorConfig must be set description: | Indicates that this field supports vector search operations. Only one of `order`, `arrayConfig`, and `vectorConfig` can be specified. Vector Fields should come after the field path `__name__`. diff --git a/mmv1/products/gkebackup/BackupPlan.yaml b/mmv1/products/gkebackup/BackupPlan.yaml index 9910c9df2d31..6f5208e93e32 100644 --- a/mmv1/products/gkebackup/BackupPlan.yaml +++ b/mmv1/products/gkebackup/BackupPlan.yaml @@ -318,7 +318,7 @@ properties: function: 'verify.ValidateDuration()' - name: 'singleOccurrenceDate' type: NestedObject - # TODO (cmfeng): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) description: | No recurrence. The exclusion window occurs only once and on this date in UTC. Only one of singleOccurrenceDate, daily and daysOfWeek may be set. @@ -337,14 +337,14 @@ properties: Day of a month. - name: 'daily' type: Boolean - # TODO (cmfeng): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) description: | The exclusion window occurs every day if set to "True". Specifying this field to "False" is an error. Only one of singleOccurrenceDate, daily and daysOfWeek may be set. - name: 'daysOfWeek' type: NestedObject - # TODO (cmfeng): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) description: | The exclusion window occurs on these days of each week in UTC. Only one of singleOccurrenceDate, daily and daysOfWeek may be set. diff --git a/mmv1/products/networksecurity/AddressGroup.yaml b/mmv1/products/networksecurity/AddressGroup.yaml index b6ef870b8faf..f1454643f4e1 100644 --- a/mmv1/products/networksecurity/AddressGroup.yaml +++ b/mmv1/products/networksecurity/AddressGroup.yaml @@ -15,7 +15,7 @@ name: 'AddressGroup' description: | AddressGroup is a resource that specifies how a collection of IP/DNS used in Firewall Policy. - # TODO(diogoesteves): change the url to GA once it is available. + # TODO: change the url to GA once it is available. references: guides: 'Use AddressGroups': 'https://cloud.google.com/vpc/docs/use-address-groups-firewall-policies' diff --git a/mmv1/products/osconfig/GuestPolicies.yaml b/mmv1/products/osconfig/GuestPolicies.yaml index e7dc14eeba65..bd37400aa888 100644 --- a/mmv1/products/osconfig/GuestPolicies.yaml +++ b/mmv1/products/osconfig/GuestPolicies.yaml @@ -254,7 +254,7 @@ properties: description: | An Apt Repository. min_version: 'beta' - # TODO (mbang): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) properties: - name: 'archiveType' type: Enum @@ -296,7 +296,7 @@ properties: description: | A Yum Repository. min_version: 'beta' - # TODO (mbang): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) properties: - name: 'id' type: String @@ -328,7 +328,7 @@ properties: description: | A Zypper Repository. min_version: 'beta' - # TODO (mbang): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) properties: - name: 'id' type: String @@ -360,7 +360,7 @@ properties: description: | A Goo Repository. min_version: 'beta' - # TODO (mbang): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) properties: - name: 'name' type: String @@ -424,7 +424,7 @@ properties: description: | A generic remote artifact. min_version: 'beta' - # TODO (mbang): add `conflicts` when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO: add `conflicts` when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) properties: - name: 'uri' type: String @@ -443,7 +443,7 @@ properties: description: | A Google Cloud Storage artifact. min_version: 'beta' - # TODO (mbang): add `conflicts` when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO: add `conflicts` when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) properties: - name: 'bucket' type: String @@ -477,7 +477,7 @@ properties: description: | Copies a file onto the instance. min_version: 'beta' - # TODO (mbang): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) properties: - name: 'artifactId' type: String @@ -514,7 +514,7 @@ properties: description: | Extracts an archive into the specified directory. min_version: 'beta' - # TODO (mbang): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) properties: - name: 'artifactId' type: String @@ -546,7 +546,7 @@ properties: description: | Installs an MSI file. min_version: 'beta' - # TODO (mbang): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) properties: - name: 'artifactId' type: String @@ -575,7 +575,7 @@ properties: description: | Installs a deb file via dpkg. min_version: 'beta' - # TODO (mbang): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) properties: - name: 'artifactId' type: String @@ -588,7 +588,7 @@ properties: description: | Installs an rpm file via the rpm utility. min_version: 'beta' - # TODO (mbang): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) properties: - name: 'artifactId' type: String @@ -601,7 +601,7 @@ properties: description: | Executes an artifact or local file. min_version: 'beta' - # TODO (mbang): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) properties: - name: 'args' type: Array @@ -621,19 +621,19 @@ properties: description: | The id of the relevant artifact in the recipe. min_version: 'beta' - # TODO (mbang): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) - name: 'localPath' type: String description: | The absolute path of the file on the local filesystem. min_version: 'beta' - # TODO (mbang): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) - name: 'scriptRun' type: NestedObject description: | Runs commands in a shell. min_version: 'beta' - # TODO (mbang): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) properties: - name: 'script' type: String @@ -672,7 +672,7 @@ properties: description: | Copies a file onto the instance. min_version: 'beta' - # TODO (mbang): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) properties: - name: 'artifactId' type: String diff --git a/mmv1/products/vertexai/Endpoint.yaml b/mmv1/products/vertexai/Endpoint.yaml index dd9f87c030c2..30046d585e5d 100644 --- a/mmv1/products/vertexai/Endpoint.yaml +++ b/mmv1/products/vertexai/Endpoint.yaml @@ -139,7 +139,7 @@ properties: training](https://cloud.google.com/vertex-ai/docs/training/configure-compute#machine-types). For DeployedModel this field is optional, and the default value is `n1-standard-2`. For BatchPredictionJob or as part - of WorkerPoolSpec this field is required. TODO(rsurowka): + of WorkerPoolSpec this field is required. TODO: Try to better unify the required vs optional.' output: true - name: 'acceleratorType' diff --git a/mmv1/products/workstations/WorkstationCluster.yaml b/mmv1/products/workstations/WorkstationCluster.yaml index bc8f1a43429f..4cd7b9dbdefa 100644 --- a/mmv1/products/workstations/WorkstationCluster.yaml +++ b/mmv1/products/workstations/WorkstationCluster.yaml @@ -85,7 +85,7 @@ parameters: The location where the workstation cluster should reside. min_version: 'beta' url_param_only: true - # TODO(esu): Change to required, as it's not possible for this field to be omitted on the API side. + # TODO: Change to required, as it's not possible for this field to be omitted on the API side. immutable: true properties: - name: 'name' diff --git a/mmv1/products/workstations/WorkstationConfig.yaml b/mmv1/products/workstations/WorkstationConfig.yaml index 33371b537c7d..da390ecea1e5 100644 --- a/mmv1/products/workstations/WorkstationConfig.yaml +++ b/mmv1/products/workstations/WorkstationConfig.yaml @@ -340,7 +340,7 @@ properties: properties: - name: 'enableConfidentialCompute' type: Boolean - # TODO(esu): Change this to required in next breaking release. + # TODO: Change this to required in next breaking release. description: | Whether the instance has confidential compute enabled. min_version: 'beta' @@ -490,7 +490,7 @@ properties: description: | Name of the snapshot to use as the source for the disk. This can be the snapshot's `self_link`, `id`, or a string in the format of `projects/{project}/global/snapshots/{snapshot}`. If set, `sizeGb` and `fsType` must be empty. Can only be updated if it has an existing value. min_version: 'beta' - # TODO(esu): Add conflicting fields once complex lists are supported. + # TODO: Add conflicting fields once complex lists are supported. - name: 'ephemeralDirectories' type: Array description: | diff --git a/mmv1/provider/provider.go b/mmv1/provider/provider.go index 89f1fe550fc2..7da908c8aa5e 100644 --- a/mmv1/provider/provider.go +++ b/mmv1/provider/provider.go @@ -24,7 +24,7 @@ const RESOURCE_DIRECTORY_BETA = "google-beta" const RESOURCE_DIRECTORY_PRIVATE = "google-private" const RESOURCE_DIRECTORY_TGC = "pkg" -// # TODO(nelsonjr): Review all object interfaces and move to private methods +// # TODO: Review all object interfaces and move to private methods // # that should not be exposed outside the object hierarchy. func ProviderName(t Provider) string { return reflect.TypeOf(t).Name() diff --git a/mmv1/templates/terraform/examples/base_configs/iam_test_file.go.tmpl b/mmv1/templates/terraform/examples/base_configs/iam_test_file.go.tmpl index d0060704b8f8..4bb4d9d3a9a6 100644 --- a/mmv1/templates/terraform/examples/base_configs/iam_test_file.go.tmpl +++ b/mmv1/templates/terraform/examples/base_configs/iam_test_file.go.tmpl @@ -353,7 +353,7 @@ func TestAcc{{ $.ResourceName }}IamPolicyGenerated_withCondition(t *testing.T) { { Config: testAcc{{ $.ResourceName }}IamPolicy_withConditionGenerated(context), Check: resource.ComposeAggregateTestCheckFunc( - // TODO(SarahFrench) - uncomment once https://github.com/GoogleCloudPlatform/magic-modules/pull/6466 merged + // TODO - uncomment once https://github.com/GoogleCloudPlatform/magic-modules/pull/6466 merged // resource.TestCheckResourceAttr("data.google_iam_policy.foo", "policy_data", expectedPolicyData), resource.TestCheckResourceAttr("{{ $.IamTerraformName }}_policy.foo", "policy_data", expectedPolicyData), resource.TestCheckResourceAttrWith("data.google_iam_policy.foo", "policy_data", tpgresource.CheckGoogleIamPolicy), diff --git a/mmv1/templates/terraform/update_encoder/ssl_policy.tmpl b/mmv1/templates/terraform/update_encoder/ssl_policy.tmpl index 3dd77710418a..7ecda8395307 100644 --- a/mmv1/templates/terraform/update_encoder/ssl_policy.tmpl +++ b/mmv1/templates/terraform/update_encoder/ssl_policy.tmpl @@ -10,10 +10,10 @@ See the License for the specific language governing permissions and limitations under the License. */ -}} -// TODO(https://github.com/GoogleCloudPlatform/magic-modules/issues/184): Handle fingerprint consistently +// TODO: https://github.com/GoogleCloudPlatform/magic-modules/issues/184 Handle fingerprint consistently obj["fingerprint"] = d.Get("fingerprint") -// TODO(https://github.com/GoogleCloudPlatform/magic-modules/issues/183): Can we generalize this +// TODO: https://github.com/GoogleCloudPlatform/magic-modules/issues/183 Can we generalize this // Send a null fields if customFeatures is empty. if v, ok := obj["customFeatures"]; ok && len(v.([]interface{})) == 0 { obj["customFeatures"] = nil diff --git a/mmv1/third_party/terraform/acctest/bootstrap_test_utils.go.tmpl b/mmv1/third_party/terraform/acctest/bootstrap_test_utils.go.tmpl index 510fea090811..9df43a962d27 100644 --- a/mmv1/third_party/terraform/acctest/bootstrap_test_utils.go.tmpl +++ b/mmv1/third_party/terraform/acctest/bootstrap_test_utils.go.tmpl @@ -385,7 +385,7 @@ func BootstrapKMSKeyWithPurposeInLocationAndName(t *testing.T, purpose, location t.Fatalf("Unable to bootstrap KMS key. CryptoKey is nil!") } - // TODO(b/372305432): Use the pagination properly. + // TODO: b/372305432 Use the pagination properly. ckvResp, err := kmsClient.Projects.Locations.KeyRings.CryptoKeys.CryptoKeyVersions.List(keyName).Do() if err != nil { t.Fatalf("Unable to list cryptoKeyVersions: %v", err) diff --git a/mmv1/third_party/terraform/fwresource/framework_location.go b/mmv1/third_party/terraform/fwresource/framework_location.go index 565aa9089420..8e227a83524c 100644 --- a/mmv1/third_party/terraform/fwresource/framework_location.go +++ b/mmv1/third_party/terraform/fwresource/framework_location.go @@ -70,7 +70,7 @@ func (ld *LocationDescription) GetLocation() (types.String, error) { } func (ld *LocationDescription) GetRegion() (types.String, error) { - // TODO(SarahFrench): Make empty strings not ignored, see https://github.com/hashicorp/terraform-provider-google/issues/14447 + // TODO: Make empty strings not ignored, see https://github.com/hashicorp/terraform-provider-google/issues/14447 // For all checks in this function body // Region from resource config @@ -104,7 +104,7 @@ func (ld *LocationDescription) GetRegion() (types.String, error) { } func (ld *LocationDescription) GetZone() (types.String, error) { - // TODO(SarahFrench): Make empty strings not ignored, see https://github.com/hashicorp/terraform-provider-google/issues/14447 + // TODO: Make empty strings not ignored, see https://github.com/hashicorp/terraform-provider-google/issues/14447 // For all checks in this function body if !ld.ResourceZone.IsNull() && !ld.ResourceZone.IsUnknown() && !ld.ResourceZone.Equal(types.StringValue("")) { diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance_test.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance_test.go index ecc9023c9e45..9ff69fb1f1ec 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance_test.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance_test.go @@ -196,7 +196,7 @@ func TestAccBigtableInstance_kms(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"deletion_protection", "instance_type"}, // we don't read instance type back }, - // TODO(kevinsi4508): Verify that the instance can be recreated due to `kms_key_name` change. + // TODO: Verify that the instance can be recreated due to `kms_key_name` change. { Config: testAccBigtableInstance_kms(pid, instanceName, kms2.CryptoKey.Name, 3), PlanOnly: true, diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table.go index a426976f3dae..66df55fb8b2c 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table.go @@ -682,7 +682,7 @@ func FlattenColumnFamily(families []bigtable.FamilyInfo) ([]map[string]interface return result, nil } -// TODO(rileykarson): Fix the stored import format after rebasing 3.0.0 +// TODO: Fix the stored import format after rebasing 3.0.0 func resourceBigtableTableImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ diff --git a/mmv1/third_party/terraform/services/composer/resource_composer_environment_test.go b/mmv1/third_party/terraform/services/composer/resource_composer_environment_test.go index 643d31d0224b..d392131b742d 100644 --- a/mmv1/third_party/terraform/services/composer/resource_composer_environment_test.go +++ b/mmv1/third_party/terraform/services/composer/resource_composer_environment_test.go @@ -116,7 +116,7 @@ func TestAccComposerEnvironment_withEncryptionConfigComposer2(t *testing.T) { }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. + // TODO: Remove this check if firewall rules bug gets fixed by Composer. { PlanOnly: true, ExpectNonEmptyPlan: false, @@ -150,7 +150,7 @@ func TestAccComposerEnvironment_withMaintenanceWindow(t *testing.T) { }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. + // TODO: Remove this check if firewall rules bug gets fixed by Composer. { PlanOnly: true, ExpectNonEmptyPlan: false, @@ -221,7 +221,7 @@ func TestAccComposerEnvironment_ComposerV2(t *testing.T) { }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. + // TODO: Remove this check if firewall rules bug gets fixed by Composer. { PlanOnly: true, ExpectNonEmptyPlan: false, @@ -258,7 +258,7 @@ func TestAccComposerEnvironment_UpdateComposerV2ImageVersion(t *testing.T) { }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. + // TODO: Remove this check if firewall rules bug gets fixed by Composer. { PlanOnly: true, ExpectNonEmptyPlan: false, @@ -295,7 +295,7 @@ func TestAccComposerEnvironment_UpdateComposerV2ResilienceMode(t *testing.T) { }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. + // TODO: Remove this check if firewall rules bug gets fixed by Composer. { PlanOnly: true, ExpectNonEmptyPlan: false, @@ -329,7 +329,7 @@ func TestAccComposerEnvironment_ComposerV2HighResilience(t *testing.T) { }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. + // TODO: Remove this check if firewall rules bug gets fixed by Composer. { PlanOnly: true, ExpectNonEmptyPlan: false, @@ -366,7 +366,7 @@ func TestAccComposerEnvironment_UpdateComposerV2WithTriggerer(t *testing.T) { }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. + // TODO: Remove this check if firewall rules bug gets fixed by Composer. { PlanOnly: true, ExpectNonEmptyPlan: false, @@ -403,7 +403,7 @@ func TestAccComposerEnvironment_UpdateComposerV2(t *testing.T) { }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. + // TODO: Remove this check if firewall rules bug gets fixed by Composer. { PlanOnly: true, ExpectNonEmptyPlan: false, @@ -437,7 +437,7 @@ func TestAccComposerEnvironment_composerV2PrivateServiceConnect(t *testing.T) { }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. + // TODO: Remove this check if firewall rules bug gets fixed by Composer. { PlanOnly: true, ExpectNonEmptyPlan: false, @@ -471,7 +471,7 @@ func TestAccComposerEnvironment_composerV2MasterAuthNetworks(t *testing.T) { }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. + // TODO: Remove this check if firewall rules bug gets fixed by Composer. { PlanOnly: true, ExpectNonEmptyPlan: false, @@ -508,7 +508,7 @@ func TestAccComposerEnvironment_composerV2MasterAuthNetworksUpdate(t *testing.T) }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. + // TODO: Remove this check if firewall rules bug gets fixed by Composer. { PlanOnly: true, ExpectNonEmptyPlan: false, diff --git a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_job_test.go.tmpl b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_job_test.go.tmpl index 3617979e9f3c..776dfac39de1 100644 --- a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_job_test.go.tmpl +++ b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_job_test.go.tmpl @@ -26,7 +26,7 @@ type jobTestField struct { gcp_attr interface{} } -// TODO (mbang): Test `ExactlyOneOf` here +// TODO: Test `ExactlyOneOf` here // func TestAccDataprocJob_failForMissingJobConfig(t *testing.T) { // t.Parallel() @@ -645,7 +645,7 @@ func matchError(attr, tf interface{}, gcp interface{}) string { return fmt.Sprintf("Cluster has mismatched %s.\nTF State: %+v\nGCP State: %+v", attr, tf, gcp) } -// TODO (mbang): Test `ExactlyOneOf` here +// TODO: Test `ExactlyOneOf` here // func testAccDataprocJob_missingJobConf() string { // return ` // resource "google_dataproc_job" "missing_config" { diff --git a/mmv1/third_party/terraform/services/eventarc/resource_eventarc_message_bus_test.go b/mmv1/third_party/terraform/services/eventarc/resource_eventarc_message_bus_test.go index 0b0eb8bcde18..4697db07ef60 100644 --- a/mmv1/third_party/terraform/services/eventarc/resource_eventarc_message_bus_test.go +++ b/mmv1/third_party/terraform/services/eventarc/resource_eventarc_message_bus_test.go @@ -583,7 +583,7 @@ resource "google_eventarc_enrollment" "primary" { annotations = { updated_test_annotation = "updated-test-eventarc-annotation" } - # TODO(tommyreddad) As of time of writing, enrollments can't be updated + # TODO As of time of writing, enrollments can't be updated # if their pipeline has been deleted. So use this workaround until the # underlying issue in the Eventarc API is fixed. depends_on = [google_eventarc_pipeline.pipeline] diff --git a/mmv1/third_party/terraform/services/runtimeconfig/resource_runtimeconfig_variable.go.tmpl b/mmv1/third_party/terraform/services/runtimeconfig/resource_runtimeconfig_variable.go.tmpl index 78fb3b12229f..52227b380ecb 100644 --- a/mmv1/third_party/terraform/services/runtimeconfig/resource_runtimeconfig_variable.go.tmpl +++ b/mmv1/third_party/terraform/services/runtimeconfig/resource_runtimeconfig_variable.go.tmpl @@ -204,7 +204,7 @@ func newRuntimeconfigVariableFromResourceData(d *schema.ResourceData, project st text := d.Get("text") value := d.Get("value") - // TODO(selmanj) here we assume it's a simple name, not a full name. Should probably support full name as well + // TODO here we assume it's a simple name, not a full name. Should probably support full name as well parent = d.Get("parent").(string) name := d.Get("name").(string) diff --git a/mmv1/third_party/terraform/services/storage/iam_storage_bucket_test.go b/mmv1/third_party/terraform/services/storage/iam_storage_bucket_test.go index 25017a6ffdf2..9b72d3e2f99e 100644 --- a/mmv1/third_party/terraform/services/storage/iam_storage_bucket_test.go +++ b/mmv1/third_party/terraform/services/storage/iam_storage_bucket_test.go @@ -311,7 +311,7 @@ func TestAccStorageBucket_iamPolicyGeneratedWithCondition(t *testing.T) { { Config: testAccStorageBucket_withConditionIamPolicy(context), Check: resource.ComposeAggregateTestCheckFunc( - // TODO(SarahFrench) - uncomment once https://github.com/GoogleCloudPlatform/magic-modules/pull/6466 merged + // TODO - uncomment once https://github.com/GoogleCloudPlatform/magic-modules/pull/6466 merged // resource.TestCheckResourceAttr("data.google_iam_policy.foo", "policy_data", expectedPolicyData), resource.TestCheckResourceAttr("google_storage_bucket_iam_policy.foo", "policy_data", expectedPolicyData), resource.TestCheckResourceAttrWith("data.google_iam_policy.foo", "policy_data", tpgresource.CheckGoogleIamPolicy), diff --git a/mmv1/third_party/terraform/services/storage/iam_storage_managed_folder_test.go b/mmv1/third_party/terraform/services/storage/iam_storage_managed_folder_test.go index befb1adf2177..98f1628a9881 100644 --- a/mmv1/third_party/terraform/services/storage/iam_storage_managed_folder_test.go +++ b/mmv1/third_party/terraform/services/storage/iam_storage_managed_folder_test.go @@ -309,7 +309,7 @@ func TestAccStorageManagedFolderIamPolicyGenerated_withCondition(t *testing.T) { { Config: testAccStorageManagedFolderIamPolicy_withConditionGenerated(context), Check: resource.ComposeAggregateTestCheckFunc( - // TODO(SarahFrench) - uncomment once https://github.com/GoogleCloudPlatform/magic-modules/pull/6466 merged + // TODO - uncomment once https://github.com/GoogleCloudPlatform/magic-modules/pull/6466 merged // resource.TestCheckResourceAttr("data.google_iam_policy.foo", "policy_data", expectedPolicyData), resource.TestCheckResourceAttr("google_storage_managed_folder_iam_policy.foo", "policy_data", expectedPolicyData), resource.TestCheckResourceAttrWith("data.google_iam_policy.foo", "policy_data", tpgresource.CheckGoogleIamPolicy), diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_object_acl_test.go b/mmv1/third_party/terraform/services/storage/resource_storage_object_acl_test.go index e23f5903bb0b..851ea03fba5e 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_object_acl_test.go +++ b/mmv1/third_party/terraform/services/storage/resource_storage_object_acl_test.go @@ -337,7 +337,7 @@ func TestAccStorageObjectAcl_noOwner(t *testing.T) { t.Errorf("error writing file: %v", err) } - // TODO (mbang) we can leave this one using the SDK provider as we need to overwrite the configure function, + // TODO we can leave this one using the SDK provider as we need to overwrite the configure function, // which we can't do in the plugin-framework version of the provider. When this resource does get updated to // use plugin-framework, best I can guess we'll want to do something similar to NewFrameworkTestProvider where // we have a nested production version of the provider, we re-write configure to call the production version and diff --git a/mmv1/third_party/terraform/transport/error_retry_predicates.go b/mmv1/third_party/terraform/transport/error_retry_predicates.go index 801d63214fdd..842ec063c799 100644 --- a/mmv1/third_party/terraform/transport/error_retry_predicates.go +++ b/mmv1/third_party/terraform/transport/error_retry_predicates.go @@ -483,7 +483,7 @@ func PubsubTopicProjectNotReady(err error) (bool, string) { } // Retry on comon googleapi error codes for retryable errors. -// TODO(#5609): This may not need to be applied globally - figure out +// TODO: #5609 This may not need to be applied globally - figure out // what retryable error codes apply to which API. func isCommonRetryableErrorCode(err error) (bool, string) { gerr, ok := err.(*googleapi.Error) diff --git a/tpgtools/property.go b/tpgtools/property.go index 21b4341b7965..aca248781c04 100644 --- a/tpgtools/property.go +++ b/tpgtools/property.go @@ -603,7 +603,7 @@ func createPropertiesFromSchema(schema *openapi.Schema, typeFetcher *TypeFetcher if v, ok := v.Extension["x-dcl-conflicts"].([]interface{}); ok { // NOTE: DCL not label x-dcl-conflicts for reused types - // TODO(shuya): handle nested field when b/213503595 got fixed + // TODO: handle nested field when b/213503595 got fixed if parent == nil { for _, ci := range v { diff --git a/tpgtools/sample.go b/tpgtools/sample.go index 4bf0c058c509..561963db0867 100644 --- a/tpgtools/sample.go +++ b/tpgtools/sample.go @@ -337,7 +337,7 @@ func (s *Sample) EnumerateWithUpdateSamples() []Sample { for i, update := range s.Updates { newSample := *s primaryResource := update.Resource - // TODO(magic-modules-eng): Consume new dependency list. + // TODO: Consume new dependency list. newSample.PrimaryResource = &primaryResource if !newSample.isNativeHCL() { var newDeps []Dependency diff --git a/tpgtools/templates/resource.go.tmpl b/tpgtools/templates/resource.go.tmpl index e508882710dc..ddced3024eea 100644 --- a/tpgtools/templates/resource.go.tmpl +++ b/tpgtools/templates/resource.go.tmpl @@ -264,7 +264,7 @@ func resource{{$.PathType}}Create(d *schema.ResourceData, meta interface{}) erro {{ end }} {{- if $.UseTerraformID }} -{{/* TODO(magic-modules-eng): When the DCL can correctly handle IDs for regional/global splits, all resources +{{/* TODO: When the DCL can correctly handle IDs for regional/global splits, all resources should be converted to use the DCL's ID method, so normalization can be uniform. */}} id, err := {{ $.IDFunction }}(d, config, "{{$.ID}}") {{- else }} diff --git a/tpgtools/templates/serialization.go.tmpl b/tpgtools/templates/serialization.go.tmpl index c1c46b76f030..f903d1d8ff2c 100644 --- a/tpgtools/templates/serialization.go.tmpl +++ b/tpgtools/templates/serialization.go.tmpl @@ -172,7 +172,7 @@ func {{ $res.TitleCaseFullName }}{{$version.SerializationSuffix}}AsHCL(r {{$res. {{- end }} } {{- else if eq $field.Type.String "TypeMap" }} - {{- /* TODO(magic-modules-eng): Implement maps with non-string values */}} + {{- /* TODO: Implement maps with non-string values */}} outputConfig += "{{ if not $field.Collapsed }}\t{{end}}{{$field.Name}} = {" keys{{$field.PackageName}} := []string{} // golang range goes over maps in an arbitrary order- we've gotta order the @@ -238,7 +238,7 @@ func convert{{$res.TitleCaseFullName}}{{$version.SerializationSuffix}}{{$v.Packa {{- end }} } {{- else if eq $field.Type.String "TypeMap" }} - {{- /* TODO(magic-modules-eng): Implement maps with non-string values */}} + {{- /* TODO: Implement maps with non-string values */}} outputConfig += "{{ if not $field.Collapsed }}\t{{end}}{{$field.Name}} = {" keys{{$field.PackageName}} := []string{} // golang range goes over maps in an arbitrary order- we've gotta order the From eb520d3b16cb0154850bd9320957c88e41407f43 Mon Sep 17 00:00:00 2001 From: Mauricio Alvarez Leon <65101411+BBBmau@users.noreply.github.com> Date: Thu, 24 Jul 2025 16:10:50 -0700 Subject: [PATCH 605/884] `teamcity`: add release_tests subproject (#14628) --- .../builds/build_configuration_per_package.kt | 14 ++-- .../builds/build_configuration_sweepers.kt | 5 +- .../build_configuration_vcr_recording.kt | 3 +- .../components/builds/build_parameters.kt | 5 +- .../.teamcity/components/constants.kt | 4 +- .../projects/google_beta_subproject.kt | 4 ++ .../projects/reused/nightly_tests.kt | 3 + .../projects/reused/weekly_diff_tests.kt | 72 +++++++++++++++++++ .../terraform/.teamcity/tests/test_utils.kt | 1 + .../.teamcity/tests/weekly_diff_project.kt | 50 +++++++++++++ 10 files changed, 148 insertions(+), 13 deletions(-) create mode 100644 mmv1/third_party/terraform/.teamcity/components/projects/reused/weekly_diff_tests.kt create mode 100644 mmv1/third_party/terraform/.teamcity/tests/weekly_diff_project.kt diff --git a/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_per_package.kt b/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_per_package.kt index 703181eb9e90..f652b2dabcc9 100644 --- a/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_per_package.kt +++ b/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_per_package.kt @@ -20,7 +20,7 @@ import replaceCharsId // BuildConfigurationsForPackages accepts a map containing details of multiple packages in a provider and returns a list of build configurations for them all. // Intended to be used in projects where we're testing all packages, e.g. the nightly test projects -fun BuildConfigurationsForPackages(packages: Map>, providerName: String, parentProjectName: String, vcsRoot: GitVcsRoot, sharedResources: List, environmentVariables: AccTestConfiguration, testPrefix: String = "TestAcc"): List { +fun BuildConfigurationsForPackages(packages: Map>, providerName: String, parentProjectName: String, vcsRoot: GitVcsRoot, sharedResources: List, environmentVariables: AccTestConfiguration, testPrefix: String = "TestAcc", releaseDiffTest: String = "false"): List { val list = ArrayList() // Create build configurations for all packages, except sweeper @@ -28,7 +28,7 @@ fun BuildConfigurationsForPackages(packages: Map>, p val path: String = info.getValue("path").toString() val displayName: String = info.getValue("displayName").toString() - val pkg = PackageDetails(packageName, displayName, providerName, parentProjectName) + val pkg = PackageDetails(packageName, displayName, providerName, parentProjectName, releaseDiffTest) val buildConfig = pkg.buildConfiguration(path, vcsRoot, sharedResources, environmentVariables, testPrefix = testPrefix) list.add(buildConfig) } @@ -38,12 +38,12 @@ fun BuildConfigurationsForPackages(packages: Map>, p // BuildConfigurationForSinglePackage accepts details of a single package in a provider and returns a build configuration for it // Intended to be used in short-lived projects where we're testing specific packages, e.g. feature branch testing -fun BuildConfigurationForSinglePackage(packageName: String, packagePath: String, packageDisplayName: String, providerName: String, parentProjectName: String, vcsRoot: GitVcsRoot, sharedResources: List, environmentVariables: AccTestConfiguration, testPrefix: String = "TestAcc"): BuildType{ - val pkg = PackageDetails(packageName, packageDisplayName, providerName, parentProjectName) +fun BuildConfigurationForSinglePackage(packageName: String, packagePath: String, packageDisplayName: String, providerName: String, parentProjectName: String, vcsRoot: GitVcsRoot, sharedResources: List, environmentVariables: AccTestConfiguration, testPrefix: String = "TestAcc", releaseDiffTest: String = "false"): BuildType{ + val pkg = PackageDetails(packageName, packageDisplayName, providerName, parentProjectName, releaseDiffTest) return pkg.buildConfiguration(packagePath, vcsRoot, sharedResources, environmentVariables, testPrefix = testPrefix) } -class PackageDetails(private val packageName: String, private val displayName: String, private val providerName: String, private val parentProjectName: String) { +class PackageDetails(private val packageName: String, private val displayName: String, private val providerName: String, private val parentProjectName: String, private val releaseDiffTest: String) { // buildConfiguration returns a BuildType for a service package // For BuildType docs, see https://teamcity.jetbrains.com/app/dsl-documentation/root/build-type/index.html @@ -91,7 +91,7 @@ class PackageDetails(private val packageName: String, private val displayName: S params { configureGoogleSpecificTestParameters(environmentVariables) - acceptanceTestBuildParams(parallelism, testPrefix, testTimeout) + acceptanceTestBuildParams(parallelism, testPrefix, testTimeout, releaseDiffTest) terraformLoggingParameters(environmentVariables, providerName) terraformCoreBinaryTesting() terraformShouldPanicForSchemaErrors() @@ -124,4 +124,4 @@ class PackageDetails(private val packageName: String, private val displayName: S var id = "%s_%s_PACKAGE_%s".format(this.parentProjectName, this.providerName, this.packageName) return replaceCharsId(id) } -} +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_sweepers.kt b/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_sweepers.kt index 275cb9fb86b5..0c93a9183c94 100644 --- a/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_sweepers.kt +++ b/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_sweepers.kt @@ -61,6 +61,7 @@ class SweeperDetails(private val sweeperName: String, private val parentProjectN // These hardcoded values affect the sweeper CLI command's behaviour val testPrefix = "TestAcc" val testTimeout = "12" + val releaseDiffTest = "false" return BuildType { @@ -97,7 +98,7 @@ class SweeperDetails(private val sweeperName: String, private val parentProjectN params { configureGoogleSpecificTestParameters(environmentVariables) - acceptanceTestBuildParams(parallelism, testPrefix, testTimeout) + acceptanceTestBuildParams(parallelism, testPrefix, testTimeout, releaseDiffTest) sweeperParameters(sweeperRegions, sweeperRun) terraformLoggingParameters(environmentVariables, providerName) terraformCoreBinaryTesting() @@ -131,4 +132,4 @@ class SweeperDetails(private val sweeperName: String, private val parentProjectN var id = "%s_%s".format(this.parentProjectName, this.sweeperName) return replaceCharsId(id) } -} +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_vcr_recording.kt b/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_vcr_recording.kt index 92eef200790c..b1531bfefd28 100644 --- a/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_vcr_recording.kt +++ b/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_vcr_recording.kt @@ -27,6 +27,7 @@ class VcrDetails(private val providerName: String, private val buildId: String, val testTimeout = "12" val parallelism = DefaultParallelism val buildTimeout: Int = DefaultBuildTimeoutDuration + val releaseDiffTest = "false" // Path is just ./google(-beta) here, whereas nightly test builds use paths like ./google/something/specific // This helps VCR testing builds to run tests across multiple packages @@ -70,7 +71,7 @@ class VcrDetails(private val providerName: String, private val buildId: String, params { configureGoogleSpecificTestParameters(environmentVariables) vcrEnvironmentVariables(environmentVariables, providerName) - acceptanceTestBuildParams(parallelism, testPrefix, testTimeout) + acceptanceTestBuildParams(parallelism, testPrefix, testTimeout, releaseDiffTest) terraformLoggingParameters(environmentVariables, providerName) terraformCoreBinaryTesting() terraformShouldPanicForSchemaErrors() diff --git a/mmv1/third_party/terraform/.teamcity/components/builds/build_parameters.kt b/mmv1/third_party/terraform/.teamcity/components/builds/build_parameters.kt index 4acd6411e05d..4a2af9b93e15 100644 --- a/mmv1/third_party/terraform/.teamcity/components/builds/build_parameters.kt +++ b/mmv1/third_party/terraform/.teamcity/components/builds/build_parameters.kt @@ -205,11 +205,12 @@ fun ParametrizedWithType.configureGoogleSpecificTestParameters(config: AccTestCo // ParametrizedWithType.acceptanceTestBuildParams sets build params that affect how commands to run // acceptance tests are templated -fun ParametrizedWithType.acceptanceTestBuildParams(parallelism: Int, prefix: String, timeout: String) { +fun ParametrizedWithType.acceptanceTestBuildParams(parallelism: Int, prefix: String, timeout: String, releaseDiffTest: String) { hiddenVariable("env.TF_ACC", "1", "Set to a value to run the Acceptance Tests") text("PARALLELISM", "%d".format(parallelism)) text("TEST_PREFIX", prefix) text("TIMEOUT", timeout) + text("RELEASE_DIFF", "true") } // ParametrizedWithType.sweeperParameters sets build parameters that affect how sweepers are run @@ -301,4 +302,4 @@ fun ParametrizedWithType.hiddenVariable(name: String, value: String, description fun ParametrizedWithType.hiddenPasswordVariable(name: String, value: String, description: String) { password(name, value, "", description, ParameterDisplay.HIDDEN) -} +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/.teamcity/components/constants.kt b/mmv1/third_party/terraform/.teamcity/components/constants.kt index c2025eebab99..1323bb41f9ab 100644 --- a/mmv1/third_party/terraform/.teamcity/components/constants.kt +++ b/mmv1/third_party/terraform/.teamcity/components/constants.kt @@ -8,6 +8,7 @@ // Provider name that matches the name in the Registry const val ProviderNameGa = "google" const val ProviderNameBeta = "google-beta" +const val ProviderNameBetaDiffTest = "google-beta-diff-test" // specifies the default hour (UTC) at which tests should be triggered, if enabled const val DefaultStartHour = 4 @@ -42,10 +43,11 @@ const val ServiceSweeperCronName = "$ServiceSweeperName - Cron" const val ServiceSweeperManualName = "$ServiceSweeperName - Manual" const val ProjectSweeperName = "Project Sweeper" const val NightlyTestsProjectId = "NightlyTests" +const val WeeklyDiffTestsProjectId = "WeeklyDiffTests" const val MMUpstreamProjectId = "MMUpstreamTests" const val VcrRecordingProjectId = "VCRRecording" // Artifact rules controls which artifacts are uploaded to S3 // https://www.jetbrains.com/help/teamcity/2024.07/configuring-general-settings.html#Artifact+Paths // The value below lacks a file extension, to allow upload of individual .txt files or a single .tar.gz file -const val ArtifactRules = "%teamcity.build.checkoutDir%/debug*" +const val ArtifactRules = "%teamcity.build.checkoutDir%/debug*" \ No newline at end of file diff --git a/mmv1/third_party/terraform/.teamcity/components/projects/google_beta_subproject.kt b/mmv1/third_party/terraform/.teamcity/components/projects/google_beta_subproject.kt index a80aadd6d889..75eabec8a70c 100644 --- a/mmv1/third_party/terraform/.teamcity/components/projects/google_beta_subproject.kt +++ b/mmv1/third_party/terraform/.teamcity/components/projects/google_beta_subproject.kt @@ -12,6 +12,7 @@ import builds.* import jetbrains.buildServer.configs.kotlin.Project import projects.reused.mmUpstream import projects.reused.nightlyTests +import projects.reused.weeklyDiffTests import projects.reused.vcrRecording import replaceCharsId import vcs_roots.HashiCorpVCSRootBeta @@ -41,6 +42,9 @@ fun googleSubProjectBeta(allConfig: AllContextParameters): Project { // This is only present for the Beta provider, as only TPGB VCR recordings are used. subProject(vcrRecording(betaId, ProviderNameBeta, HashiCorpVCSRootBeta, ModularMagicianVCSRootBeta, vcrConfig)) + // Beta Diff Test project that uses hashicorp/terraform-provider-google-beta-diff-test + subProject(weeklyDiffTests(betaId + "_DIFF_TEST", ProviderNameBeta, HashiCorpVCSRootBeta, betaConfig, NightlyTriggerConfiguration(daysOfWeek = "SAT", nightlyTestsEnabled = false))) + params { readOnlySettings() } diff --git a/mmv1/third_party/terraform/.teamcity/components/projects/reused/nightly_tests.kt b/mmv1/third_party/terraform/.teamcity/components/projects/reused/nightly_tests.kt index 90fa2d49947d..b742a8c1899e 100644 --- a/mmv1/third_party/terraform/.teamcity/components/projects/reused/nightly_tests.kt +++ b/mmv1/third_party/terraform/.teamcity/components/projects/reused/nightly_tests.kt @@ -10,6 +10,7 @@ package projects.reused import NightlyTestsProjectId import ProviderNameBeta import ProviderNameGa +import ProviderNameBetaDiffTest import ServiceSweeperName import SharedResourceNameBeta import SharedResourceNameGa @@ -32,6 +33,7 @@ fun nightlyTests(parentProject:String, providerName: String, vcsRoot: GitVcsRoot when(providerName) { ProviderNameGa -> sharedResources = arrayListOf(SharedResourceNameGa) ProviderNameBeta -> sharedResources = arrayListOf(SharedResourceNameBeta) + ProviderNameBetaDiffTest -> sharedResources = arrayListOf(SharedResourceNameBeta) else -> throw Exception("Provider name not supplied when generating a nightly test subproject") } @@ -48,6 +50,7 @@ fun nightlyTests(parentProject:String, providerName: String, vcsRoot: GitVcsRoot when(providerName) { ProviderNameGa -> sweepersList = SweepersListGa ProviderNameBeta -> sweepersList = SweepersListBeta + ProviderNameBetaDiffTest -> sweepersList = SweepersListBeta else -> throw Exception("Provider name not supplied when generating a nightly test subproject") } val serviceSweeperConfig = BuildConfigurationForServiceSweeper(providerName, ServiceSweeperName, sweepersList, projectId, vcsRoot, sharedResources, config) diff --git a/mmv1/third_party/terraform/.teamcity/components/projects/reused/weekly_diff_tests.kt b/mmv1/third_party/terraform/.teamcity/components/projects/reused/weekly_diff_tests.kt new file mode 100644 index 000000000000..ab3119eeb340 --- /dev/null +++ b/mmv1/third_party/terraform/.teamcity/components/projects/reused/weekly_diff_tests.kt @@ -0,0 +1,72 @@ +/* + * Copyright (c) HashiCorp, Inc. + * SPDX-License-Identifier: MPL-2.0 + */ + +// This file is maintained in the GoogleCloudPlatform/magic-modules repository and copied into the downstream provider repositories. Any changes to this file in the downstream will be overwritten. + +package projects.reused + +import NightlyTestsProjectId +import ProviderNameGa +import ProviderNameBeta +import ServiceSweeperName +import SharedResourceNameBeta +import SharedResourceNameGa +import builds.* +import generated.SweepersListBeta +import generated.SweepersListGa +import jetbrains.buildServer.configs.kotlin.Project +import jetbrains.buildServer.configs.kotlin.vcs.GitVcsRoot +import replaceCharsId + +fun weeklyDiffTests(parentProject:String, providerName: String, vcsRoot: GitVcsRoot, config: AccTestConfiguration, cron: NightlyTriggerConfiguration): Project { + + var projectId = "${parentProject}_${NightlyTestsProjectId}" + projectId = replaceCharsId(projectId) + + // Nightly test projects run all acceptance tests overnight + // Here we ensure the project uses the appropriate Shared Resource to ensure no clashes between builds and/or sweepers + var sharedResources: ArrayList + when(providerName) { + ProviderNameGa -> sharedResources = arrayListOf(SharedResourceNameGa) + ProviderNameBeta -> sharedResources = arrayListOf(SharedResourceNameBeta) + else -> throw Exception("Provider name not supplied when generating a weekly diff test subproject") + } + + // Create build configs to run acceptance tests for each package defined in packages.kt and services.kt files + // and add cron trigger to them all + val allPackages = getAllPackageInProviderVersion(providerName) + val packageBuildConfigs = BuildConfigurationsForPackages(allPackages, providerName, projectId, vcsRoot, sharedResources, config, releaseDiffTest = "true") + packageBuildConfigs.forEach { buildConfiguration -> + buildConfiguration.addTrigger(cron) + } + + // Create build config for sweeping the nightly test project + var sweepersList: Map> + when(providerName) { + ProviderNameGa -> sweepersList = SweepersListGa + ProviderNameBeta -> sweepersList = SweepersListBeta + else -> throw Exception("Provider name not supplied when generating a weekly diff test subproject") + } + val serviceSweeperConfig = BuildConfigurationForServiceSweeper(providerName, ServiceSweeperName, sweepersList, projectId, vcsRoot, sharedResources, config) + val sweeperCron = cron.clone() + sweeperCron.startHour += 5 // Ensure triggered after the package test builds are triggered + serviceSweeperConfig.addTrigger(sweeperCron) + + return Project { + id(projectId) + name = "Weekly Diff Tests" + description = "A project connected to the hashicorp/terraform-provider-${providerName} repository, where scheduled weekly diff tests run and users can trigger ad-hoc builds" + + // Register build configs in the project + packageBuildConfigs.forEach { buildConfiguration -> + buildType(buildConfiguration) + } + buildType(serviceSweeperConfig) + + params{ + configureGoogleSpecificTestParameters(config) + } + } +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/.teamcity/tests/test_utils.kt b/mmv1/third_party/terraform/.teamcity/tests/test_utils.kt index e2e481679b89..02d35308545d 100644 --- a/mmv1/third_party/terraform/.teamcity/tests/test_utils.kt +++ b/mmv1/third_party/terraform/.teamcity/tests/test_utils.kt @@ -15,6 +15,7 @@ import org.junit.Assert.fail const val gaProjectName = "Google" const val betaProjectName = "Google Beta" const val nightlyTestsProjectName = "Nightly Tests" +const val weeklyDiffTestsProjectName = "Weekly Diff Tests" const val mmUpstreamProjectName = "Upstream MM Testing" const val projectSweeperProjectName = "Project Sweeper" diff --git a/mmv1/third_party/terraform/.teamcity/tests/weekly_diff_project.kt b/mmv1/third_party/terraform/.teamcity/tests/weekly_diff_project.kt new file mode 100644 index 000000000000..0d411ea5352a --- /dev/null +++ b/mmv1/third_party/terraform/.teamcity/tests/weekly_diff_project.kt @@ -0,0 +1,50 @@ +/* + * Copyright (c) HashiCorp, Inc. + * SPDX-License-Identifier: MPL-2.0 + */ + +// This file is maintained in the GoogleCloudPlatform/magic-modules repository and copied into the downstream provider repositories. Any changes to this file in the downstream will be overwritten. + +package tests + +import jetbrains.buildServer.configs.kotlin.triggers.ScheduleTrigger +import org.junit.Assert.assertTrue +import org.junit.Test +import projects.googleCloudRootProject + +class WeeklyDiffTestProjectsTests { + @Test + fun allBuildsShouldHaveTrigger() { + val root = googleCloudRootProject(testContextParameters()) + + // Find GA nightly test project + // var gaNightlyTestProject = getNestedProjectFromRoot(root, gaProjectName, weeklyDiffTestsProjectName) + + // Find Beta nightly test project + var betaWeeklyDiffTestProject = getNestedProjectFromRoot(root, betaProjectName, weeklyDiffTestsProjectName) + + // Make assertions about builds in both weekly diff test projects + (betaWeeklyDiffTestProject.buildTypes).forEach{bt -> + assertTrue("Build configuration `${bt.name}` should contain at least one trigger", bt.triggers.items.isNotEmpty()) + // Look for at least one CRON trigger + var found: Boolean = false + lateinit var schedulingTrigger: ScheduleTrigger + for (item in bt.triggers.items){ + if (item.type == "schedulingTrigger") { + schedulingTrigger = item as ScheduleTrigger + found = true + break + } + } + + assertTrue("Build configuration `${bt.name}` should contain a CRON/'schedulingTrigger' trigger", found) + + // Check that weekly diff test is being ran on the nightly-test branch + var isNightlyTestBranch: Boolean = false + if (schedulingTrigger.branchFilter == "+:refs/heads/nightly-test"){ + isNightlyTestBranch = true + } + assertTrue("Build configuration `${bt.name}` is using the nightly-test branch filter;", isNightlyTestBranch) + } + } +} From 9570a4cbc49edb81eb399f317a297129154b9757 Mon Sep 17 00:00:00 2001 From: jacek-izykowski Date: Fri, 25 Jul 2025 01:17:12 +0200 Subject: [PATCH 606/884] Cloud Composer - handle "empty changes" to recovery_config (#14604) --- .../services/composer/resource_composer_environment.go.tmpl | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/composer/resource_composer_environment.go.tmpl b/mmv1/third_party/terraform/services/composer/resource_composer_environment.go.tmpl index d37923fa8bb2..bbdbcb9e3368 100644 --- a/mmv1/third_party/terraform/services/composer/resource_composer_environment.go.tmpl +++ b/mmv1/third_party/terraform/services/composer/resource_composer_environment.go.tmpl @@ -1528,7 +1528,9 @@ func resourceComposerEnvironmentUpdate(d *schema.ResourceData, meta interface{}) patchObj.Config.RecoveryConfig = config.RecoveryConfig } err = resourceComposerEnvironmentPatchField("config.RecoveryConfig.ScheduledSnapshotsConfig", userAgent, patchObj, d, tfConfig) - if err != nil { + // Empty ScheduledSnapshotsConfig and config with scheduled snapshots explicitly disabled (and nothing else configured) represent in fact the same configuration. + // If applying a change fails specifically because it does not bring any actual modification, this error should be silently ignored. + if err != nil && !strings.Contains(err.Error(), "No change in configuration."){ return err } } @@ -3140,4 +3142,4 @@ func gscBucketNameDiffSuppress(_, old, new string, _ *schema.ResourceData) bool return true } return false -} \ No newline at end of file +} From 5ceb67aa5ab21a19772580405a837d64ee224a58 Mon Sep 17 00:00:00 2001 From: Thomas Rodgers Date: Thu, 24 Jul 2025 16:53:54 -0700 Subject: [PATCH 607/884] Make space optional in reassign-reviewer regex (#14634) --- .github/workflows/reassign-reviewer.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/reassign-reviewer.yml b/.github/workflows/reassign-reviewer.yml index 835c5f8e7802..e27cd2148200 100644 --- a/.github/workflows/reassign-reviewer.yml +++ b/.github/workflows/reassign-reviewer.yml @@ -22,7 +22,7 @@ jobs: uses: actions-ecosystem/action-regex-match@d50fd2e7a37d0e617aea3d7ada663bd56862b9cc # v2.0.2 with: text: ${{ github.event.comment.body }} - regex: '.*@modular-magician (re)?assign[- ]review(er)? @?([a-zA-Z0-9-_]*).*' + regex: '.*@modular-magician (re)?assign[- ]review(er)? ?@?([a-zA-Z0-9-_]*).*' - name: Checkout Repository if: steps.read-comment.outputs.match != '' uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.2 From 6ed57e7115dfc47942f40db3324599e177d9844b Mon Sep 17 00:00:00 2001 From: gurusai-voleti Date: Fri, 25 Jul 2025 12:55:06 +0000 Subject: [PATCH 608/884] fix: (storage) remove client side bucket name validation as same validations being performed on API (#14582) --- mmv1/products/storage/Bucket.yaml | 2 - .../storage/resource_storage_bucket.go.tmpl | 2 - .../resource_storage_bucket_600_migration.go | 20 ++++---- .../terraform/verify/validation.go | 48 ------------------- .../terraform/verify/validation_test.go | 40 ---------------- .../docs/r/storage_bucket.html.markdown | 2 +- 6 files changed, 9 insertions(+), 105 deletions(-) diff --git a/mmv1/products/storage/Bucket.yaml b/mmv1/products/storage/Bucket.yaml index f4bef03a27bc..d7496ddbfa44 100644 --- a/mmv1/products/storage/Bucket.yaml +++ b/mmv1/products/storage/Bucket.yaml @@ -392,8 +392,6 @@ properties: - name: 'name' type: String description: 'The name of the bucket' - validation: - function: 'verify.ValidateGCSName' - name: 'owner' type: NestedObject description: | diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket.go.tmpl b/mmv1/third_party/terraform/services/storage/resource_storage_bucket.go.tmpl index 761a06ffb0ad..796af9a55efd 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket.go.tmpl +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket.go.tmpl @@ -15,7 +15,6 @@ import ( "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" - "github.com/hashicorp/terraform-provider-google/google/verify" "github.com/gammazero/workerpool" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" @@ -72,7 +71,6 @@ func ResourceStorageBucket() *schema.Resource { Required: true, ForceNew: true, Description: `The name of the bucket.`, - ValidateFunc: verify.ValidateGCSName, }, "encryption": { diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_600_migration.go b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_600_migration.go index 39ff367d6f56..3f953333d804 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_600_migration.go +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_600_migration.go @@ -8,8 +8,6 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" - - "github.com/hashicorp/terraform-provider-google/google/verify" ) func resourceStorageBucketV1() *schema.Resource { @@ -24,11 +22,10 @@ func resourceStorageBucketV1() *schema.Resource { Schema: map[string]*schema.Schema{ "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: `The name of the bucket.`, - ValidateFunc: verify.ValidateGCSName, + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `The name of the bucket.`, }, "encryption": { @@ -544,11 +541,10 @@ func resourceStorageBucketV2() *schema.Resource { }, Schema: map[string]*schema.Schema{ "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: `The name of the bucket.`, - ValidateFunc: verify.ValidateGCSName, + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `The name of the bucket.`, }, "encryption": { diff --git a/mmv1/third_party/terraform/verify/validation.go b/mmv1/third_party/terraform/verify/validation.go index 25c0d280d4c9..5f4393137f69 100644 --- a/mmv1/third_party/terraform/verify/validation.go +++ b/mmv1/third_party/terraform/verify/validation.go @@ -74,16 +74,6 @@ var ( Rfc6996Asn32BitMin = int64(4200000000) Rfc6996Asn32BitMax = int64(4294967294) GcpRouterPartnerAsn = int64(16550) - - // Format of GCS Bucket Name - // https://cloud.google.com/storage/docs/naming-buckets - GCSNameValidChars = "^[a-z0-9_.-]*$" - GCSNameStartEndChars = "^[a-z|0-9].*[a-z|0-9]$" - GCSNameLength = "^.{3,222}" - GCSNameLengthSplit = "^.{1,63}$" - GCSNameCidr = "^[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}$" - GCSNameGoogPrefix = "^goog.*$" - GCSNameContainsGoogle = "^.*google.*$" ) var Rfc1918Networks = []string{ @@ -99,44 +89,6 @@ func ValidateGCEName(v interface{}, k string) (ws []string, errors []error) { return ValidateRegexp(re)(v, k) } -// validateGCSName ensures the name of a gcs bucket matches the requirements for GCS Buckets -// https://cloud.google.com/storage/docs/naming-buckets -func ValidateGCSName(v interface{}, k string) (ws []string, errors []error) { - value := v.(string) - - if !regexp.MustCompile(GCSNameValidChars).MatchString(value) { - errors = append(errors, fmt.Errorf("%q name value can only contain lowercase letters, numeric characters, dashes (-), underscores (_), and dots (.)", value)) - } - - if !regexp.MustCompile(GCSNameStartEndChars).MatchString(value) { - errors = append(errors, fmt.Errorf("%q name value must start and end with a number or letter", value)) - } - - if !regexp.MustCompile(GCSNameLength).MatchString(value) { - errors = append(errors, fmt.Errorf("%q name value must contain 3-63 characters. Names containing dots can contain up to 222 characters, but each dot-separated component can be no longer than 63 characters", value)) - } - - for _, str := range strings.Split(value, ".") { - if !regexp.MustCompile(GCSNameLengthSplit).MatchString(str) { - errors = append(errors, fmt.Errorf("%q name value must contain 3-63 characters. Names containing dots can contain up to 222 characters, but each dot-separated component can be no longer than 63 characters", value)) - } - } - - if regexp.MustCompile(GCSNameCidr).MatchString(value) { - errors = append(errors, fmt.Errorf("%q name value cannot be represented as an IP address in dotted-decimal notation (for example, 192.168.5.4)", value)) - } - - if regexp.MustCompile(GCSNameGoogPrefix).MatchString(value) { - errors = append(errors, fmt.Errorf("%q name value cannot begin with the \"goog\" prefix", value)) - } - - if regexp.MustCompile(GCSNameContainsGoogle).MatchString(strings.ReplaceAll(value, "0", "o")) { - errors = append(errors, fmt.Errorf("%q name value cannot contain \"google\" or close misspellings, such as \"g00gle\"", value)) - } - - return -} - // Ensure that the BGP ASN value of Cloud Router is a valid value as per RFC6996 or a value of 16550 func ValidateRFC6996Asn(v interface{}, k string) (ws []string, errors []error) { value := int64(v.(int)) diff --git a/mmv1/third_party/terraform/verify/validation_test.go b/mmv1/third_party/terraform/verify/validation_test.go index 19555861fb64..3de72257ea74 100644 --- a/mmv1/third_party/terraform/verify/validation_test.go +++ b/mmv1/third_party/terraform/verify/validation_test.go @@ -323,43 +323,3 @@ func TestValidateIAMCustomRoleIDRegex(t *testing.T) { t.Errorf("Failed to validate IAMCustomRole IDs: %v", es) } } - -func TestValidateGCSName(t *testing.T) { - x := []StringValidationTestCase{ - // No errors - {TestName: "basic", Value: "foobar"}, - {TestName: "has number", Value: "foobar1"}, - {TestName: "all numbers", Value: "12345"}, - {TestName: "all _", Value: "foo_bar_baz"}, - {TestName: "all -", Value: "foo-bar-baz"}, - {TestName: "begins with number", Value: "1foo-bar_baz"}, - {TestName: "ends with number", Value: "foo-bar_baz1"}, - {TestName: "almost an ip", Value: "192.168.5.foo"}, - {TestName: "has _", Value: "foo-bar_baz"}, - {TestName: "--", Value: "foo--bar"}, - {TestName: "__", Value: "foo__bar"}, - {TestName: "-goog", Value: "foo-goog"}, - {TestName: ".goog", Value: "foo.goog"}, - - // With errors - {TestName: "invalid char $", Value: "foo$bar", ExpectError: true}, - {TestName: "has uppercase", Value: "fooBar", ExpectError: true}, - {TestName: "begins with -", Value: "-foobar", ExpectError: true}, - {TestName: "ends with -", Value: "foobar-", ExpectError: true}, - {TestName: "begins with _", Value: "_foobar", ExpectError: true}, - {TestName: "ends with _", Value: "foobar_", ExpectError: true}, - {TestName: "less than 3 chars", Value: "fo", ExpectError: true}, - {TestName: "..", Value: "foo..bar", ExpectError: true}, - {TestName: "greater than 63 chars with no .", Value: "my-really-long-bucket-name-with-invalid-that-does-not-contain-a-period", ExpectError: true}, - {TestName: "greater than 63 chars between .", Value: "my.really-long-bucket-name-with-invalid-that-does-contain-a-period-but.is-too-long", ExpectError: true}, - {TestName: "has goog prefix", Value: "goog-foobar", ExpectError: true}, - {TestName: "almost an ip", Value: "192.168.5.1", ExpectError: true}, - {TestName: "contains google", Value: "foobar-google", ExpectError: true}, - {TestName: "contains close misspelling of google", Value: "foo-go0gle-bar", ExpectError: true}, - } - - es := TestStringValidationCases(x, ValidateGCSName) - if len(es) > 0 { - t.Errorf("Failed to validate GCS names: %v", es) - } -} diff --git a/mmv1/third_party/terraform/website/docs/r/storage_bucket.html.markdown b/mmv1/third_party/terraform/website/docs/r/storage_bucket.html.markdown index 9450a3ea945d..88a1f361665c 100644 --- a/mmv1/third_party/terraform/website/docs/r/storage_bucket.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/storage_bucket.html.markdown @@ -121,7 +121,7 @@ resource "google_storage_bucket" "hns-enabled" { The following arguments are supported: -* `name` - (Required) The name of the bucket. +* `name` - (Required) The name of the bucket. Bucket names must be in lowercase and no more than 63 characters long. You can find the complete list of bucket naming rules [here](https://cloud.google.com/storage/docs/buckets#naming). * `location` - (Required) The [GCS location](https://cloud.google.com/storage/docs/bucket-locations). From 70b2a26153f4d8444f33988863dcb53579f1b7cb Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Fri, 25 Jul 2025 17:32:10 +0200 Subject: [PATCH 609/884] new-resource: `google_apigee_security_action` (#14317) --- mmv1/products/apigee/SecurityAction.yaml | 246 +++++++ .../apigee_security_action_basic.tf.tmpl | 70 ++ .../resource_apigee_security_action_test.go | 611 ++++++++++++++++++ 3 files changed, 927 insertions(+) create mode 100644 mmv1/products/apigee/SecurityAction.yaml create mode 100644 mmv1/templates/terraform/examples/apigee_security_action_basic.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/apigee/resource_apigee_security_action_test.go diff --git a/mmv1/products/apigee/SecurityAction.yaml b/mmv1/products/apigee/SecurityAction.yaml new file mode 100644 index 000000000000..c4bb992c1713 --- /dev/null +++ b/mmv1/products/apigee/SecurityAction.yaml @@ -0,0 +1,246 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'SecurityAction' +description: | + A SecurityAction is rule that can be enforced at an environment level. + The result is one of: - A denied API call - An explicitly allowed API call + - A flagged API call (HTTP headers added before the target receives it) + At least one condition is required to create a SecurityAction. +references: + guides: + 'Creating security actions': 'https://cloud.google.com/apigee/docs/api-security/security-actions-api#create-security-actions' + api: 'https://cloud.google.com/apigee/docs/reference/apis/apigee/rest/v1/organizations.environments.securityActions/create' +docs: +base_url: 'organizations/{{org_id}}/environments/{{env_id}}/securityActions' +self_link: 'organizations/{{org_id}}/environments/{{env_id}}/securityActions/{{security_action_id}}' +create_url: 'organizations/{{org_id}}/environments/{{env_id}}/securityActions?securityActionId={{security_action_id}}' +immutable: true +import_format: + - 'organizations/{{org_id}}/environments/{{env_id}}/securityActions/{{security_action_id}}' +examples: + - name: 'apigee_security_action_basic' + vars: + network_name: 'my-network' + global_address_name: 'my-address' + environment_name: 'my-environment' + security_action_id: 'my-security-action' + primary_resource_id: 'apigee_security_action' + test_env_vars: + org_id: 'ORG_ID' + billing_account: 'BILLING_ACCT' + exclude_test: true +parameters: + - name: 'orgId' + type: String + description: | + The organization that this security action applies to. + url_param_only: true + required: true + immutable: true + - name: 'envId' + type: String + description: | + The Apigee environment that this security action applies to. + required: true + immutable: true + url_param_only: true + - name: 'securityActionId' + type: String + description: | + The ID to use for the SecurityAction, which will become the final component of the action's resource name. + This value should be 0-61 characters, and valid format is (^a-z?$). + required: true + immutable: true + url_param_only: true +properties: + - name: 'description' + type: String + description: | + An optional user provided description of the SecurityAction. + - name: 'state' + type: Enum + description: | + Only an ENABLED SecurityAction is enforced. An ENABLED SecurityAction past its expiration time will not be enforced. + required: true + enum_values: + - 'ENABLED' + - 'DISABLED' + - name: 'createTime' + type: String + description: | + The create time for this SecurityAction. + Uses RFC 3339, where generated output will always be Z-normalized and uses 0, 3, 6 or 9 fractional digits. + Offsets other than "Z" are also accepted. Examples: "2014-10-02T15:01:23Z", "2014-10-02T15:01:23.045123456Z" or "2014-10-02T15:01:23+05:30". + output: true + - name: 'updateTime' + type: String + description: | + The update time for this SecurityAction. This reflects when this SecurityAction changed states. + Uses RFC 3339, where generated output will always be Z-normalized and uses 0, 3, 6 or 9 fractional digits. + Offsets other than "Z" are also accepted. Examples: "2014-10-02T15:01:23Z", "2014-10-02T15:01:23.045123456Z" or "2014-10-02T15:01:23+05:30". + output: true + - name: 'apiProxies' + type: Array + description: | + If unset, this would apply to all proxies in the environment. + If set, this action is enforced only if at least one proxy in the repeated + list is deployed at the time of enforcement. If set, several restrictions are enforced on SecurityActions. + There can be at most 100 enabled actions with proxies set in an env. + Several other restrictions apply on conditions and are detailed later. + item_type: + type: String + - name: 'conditionConfig' + type: NestedObject + required: true + description: | + A valid SecurityAction must contain at least one condition. + properties: + - name: 'ipAddressRanges' + type: Array + description: | + A list of IP addresses. This could be either IPv4 or IPv6. Limited to 100 per action. + item_type: + type: String + - name: 'botReasons' + type: Array + description: | + A list of Bot Reasons. Current options: Flooder, Brute Guessor, Static Content Scraper, + OAuth Abuser, Robot Abuser, TorListRule, Advanced Anomaly Detection, Advanced API Scraper, + Search Engine Crawlers, Public Clouds, Public Cloud AWS, Public Cloud Azure, and Public Cloud Google. + item_type: + type: String + - name: 'httpMethods' + type: Array + description: | + Act only on particular HTTP methods. E.g. A read-only API can block POST/PUT/DELETE methods. + Accepted values are: GET, HEAD, POST, PUT, DELETE, CONNECT, OPTIONS, TRACE and PATCH. + item_type: + type: String + - name: 'apiKeys' + type: Array + description: | + A list of API keys. Limit 1000 per action. + item_type: + type: String + - name: 'accessTokens' + type: Array + description: | + A list of accessTokens. Limit 1000 per action. + item_type: + type: String + - name: 'apiProducts' + type: Array + description: | + A list of API Products. Limit 1000 per action. + item_type: + type: String + - name: 'developerApps' + type: Array + description: | + A list of developer apps. Limit 1000 per action. + item_type: + type: String + - name: 'developers' + type: Array + description: | + A list of developers. Limit 1000 per action. + item_type: + type: String + - name: 'userAgents' + type: Array + description: | + A list of user agents to deny. We look for exact matches. Limit 50 per action. + item_type: + type: String + - name: 'regionCodes' + type: Array + description: | + A list of countries/region codes to act on, e.g. US. This follows https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2. + item_type: + type: String + - name: 'asns' + type: Array + description: | + A list of ASN numbers to act on, e.g. 23. https://en.wikipedia.org/wiki/Autonomous_system_(Internet) + This uses int64 instead of uint32 because of https://linter.aip.dev/141/forbidden-types. + item_type: + type: String + - name: 'allow' + type: NestedObject + description: | + Allow a request through if it matches this SecurityAction. + exactly_one_of: + - 'allow' + - 'deny' + - 'flag' + # empty object with no properties, see: https://cloud.google.com/apigee/docs/reference/apis/apigee/rest/v1/organizations.environments.securityActions#Allow + allow_empty_object: true + send_empty_value: true + properties: [] + - name: 'deny' + type: NestedObject + description: | + Deny a request through if it matches this SecurityAction. + exactly_one_of: + - 'allow' + - 'deny' + - 'flag' + properties: + - name: 'responseCode' + type: Integer + description: | + The HTTP response code if the Action = DENY. + - name: 'flag' + type: NestedObject + description: | + Flag a request through if it matches this SecurityAction. + exactly_one_of: + - 'allow' + - 'deny' + - 'flag' + properties: + - name: 'headers' + type: Array + description: | + A list of HTTP headers to be sent to the target in case of a FLAG SecurityAction. + Limit 5 headers per SecurityAction. + At least one is mandatory. + item_type: + type: NestedObject + properties: + - name: 'name' + type: String + description: | + The header name to be sent to the target. + - name: 'value' + type: String + description: | + The header value to be sent to the target. + - name: 'expireTime' + type: String + description: | + The expiration for this SecurityAction. + Uses RFC 3339, where generated output will always be Z-normalized and uses 0, 3, 6 or 9 + fractional digits. Offsets other than "Z" are also accepted. + Examples: "2014-10-02T15:01:23Z", "2014-10-02T15:01:23.045123456Z" or "2014-10-02T15:01:23+05:30". + conflicts: + - 'ttl' + - name: 'ttl' + type: String + description: | + The TTL for this SecurityAction. + A duration in seconds with up to nine fractional digits, ending with 's'. Example: "3.5s". + conflicts: + - 'expireTime' diff --git a/mmv1/templates/terraform/examples/apigee_security_action_basic.tf.tmpl b/mmv1/templates/terraform/examples/apigee_security_action_basic.tf.tmpl new file mode 100644 index 000000000000..842ffa7f4da8 --- /dev/null +++ b/mmv1/templates/terraform/examples/apigee_security_action_basic.tf.tmpl @@ -0,0 +1,70 @@ +data "google_client_config" "current" {} + +resource "google_compute_network" "apigee_network" { + name = "{{index $.Vars "network_name"}}" +} + +resource "google_compute_global_address" "apigee_range" { + name ="{{index $.Vars "global_address_name"}}" + purpose = "VPC_PEERING" + address_type = "INTERNAL" + prefix_length = 16 + network = google_compute_network.apigee_network.id +} + +resource "google_service_networking_connection" "apigee_vpc_connection" { + network = google_compute_network.apigee_network.id + service = "servicenetworking.googleapis.com" + reserved_peering_ranges = [google_compute_global_address.apigee_range.name] +} + +resource "google_apigee_organization" "apigee_org" { + analytics_region = "us-central1" + project_id = data.google_client_config.current.project + authorized_network = google_compute_network.apigee_network.id + depends_on = [google_service_networking_connection.apigee_vpc_connection] +} + +resource "google_apigee_environment" "env" { + name = "{{index $.Vars "environment_name"}}" + description = "Apigee Environment" + display_name = "environment-1" + org_id = google_apigee_organization.apigee_org.id +} + +resource "google_apigee_addons_config" "apigee_org_security_addons_config" { + org = google_apigee_organization.apigee_org.name + addons_config { + api_security_config { + enabled = true + } + } +} + +resource "google_apigee_security_action" "{{$.PrimaryResourceId}}" { + security_action_id = "{{index $.Vars "security_action_id"}}" + org_id = google_apigee_organization.apigee_org.name + env_id = google_apigee_environment.env.name + description = "Apigee Security Action" + state = "ENABLED" + + condition_config { + ip_address_ranges = [ + "100.0.220.1", + "200.0.0.1", + ] + + bot_reasons = [ + "Flooder", + "Public Cloud Azure", + "Public Cloud AWS", + ] + } + + allow {} + + expire_time = "2025-12-31T23:59:59Z" + depends_on = [ + google_apigee_addons_config.apigee_org_security_addons_config + ] +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_security_action_test.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_security_action_test.go new file mode 100644 index 000000000000..c49f19b29c06 --- /dev/null +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_security_action_test.go @@ -0,0 +1,611 @@ +package apigee_test + +import ( + "fmt" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func testAccCheckApigeeSecurityActionDestroyProducer(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + for name, rs := range s.RootModule().Resources { + if rs.Type != "google_apigee_security_action" { + continue + } + if strings.HasPrefix(name, "data.") { + continue + } + + config := acctest.GoogleProviderConfig(t) + + url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{ApigeeBasePath}}organizations/{{org_id}}/environments/{{env_id}}/securityActions/{{security_action_id}}") + if err != nil { + return err + } + + billingProject := "" + + if config.BillingProject != "" { + billingProject = config.BillingProject + } + + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: url, + UserAgent: config.UserAgent, + }) + if err == nil { + return fmt.Errorf("ApigeeSecurityAction still exists at %s", url) + } + } + + return nil + } +} + +func TestAccApigeeSecurityAction_apigeeSecurityActionFull(t *testing.T) { + acctest.SkipIfVcr(t) + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + CheckDestroy: testAccCheckApigeeSecurityActionDestroyProducer(t), + /* allow, deny and flag are mutually exclusive, so we test them in sequence */ + /* also all conditions except ip_address_ranges and bot_reasons seem to be mutually exclusive, so we test them in sequence */ + Steps: []resource.TestStep{ + { + Config: testAccApigeeSecurityAction_apigeeSecurityActionFullAllow(context), + }, + { + ResourceName: "google_apigee_security_action.default", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccApigeeSecurityAction_apigeeSecurityActionFullDeny(context), + }, + { + ResourceName: "google_apigee_security_action.default", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccApigeeSecurityAction_apigeeSecurityActionFullHttpMethods(context), + }, + { + ResourceName: "google_apigee_security_action.default", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccApigeeSecurityAction_apigeeSecurityActionFullFlag(context), + }, + { + ResourceName: "google_apigee_security_action.default", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccApigeeSecurityAction_apigeeSecurityActionFullApiKeys(context), + }, + { + ResourceName: "google_apigee_security_action.default", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccApigeeSecurityAction_apigeeSecurityActionFullAccessTokens(context), + }, + { + ResourceName: "google_apigee_security_action.default", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccApigeeSecurityAction_apigeeSecurityActionFullApiProducts(context), + }, + + { + ResourceName: "google_apigee_security_action.default", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccApigeeSecurityAction_apigeeSecurityActionFullDeveloperApps(context), + }, + { + ResourceName: "google_apigee_security_action.default", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccApigeeSecurityAction_apigeeSecurityActionFullDevelopers(context), + }, + { + ResourceName: "google_apigee_security_action.default", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccApigeeSecurityAction_apigeeSecurityActionFullUserAgents(context), + }, + { + ResourceName: "google_apigee_security_action.default", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccApigeeSecurityAction_apigeeSecurityActionFullRegionCodes(context), + }, + { + ResourceName: "google_apigee_security_action.default", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccApigeeSecurityAction_apigeeSecurityActionFullAsns(context), + }, + { + ResourceName: "google_apigee_security_action.default", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccApigeeSecurityAction_apigeeSecurityActionFullTTL(context), + ExpectNonEmptyPlan: true, // ttl change enforces recreation of the resource + }, + }, + }) +} + +func testAccApigeeSecurityAction_apigeeBase(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_client_config" "current" {} + +resource "google_compute_network" "apigee_network" { + name = "tf-test-network-%{random_suffix}" +} + +resource "google_compute_global_address" "apigee_range" { + name = "tf-test-address-%{random_suffix}" + purpose = "VPC_PEERING" + address_type = "INTERNAL" + prefix_length = 16 + network = google_compute_network.apigee_network.id +} + +resource "google_service_networking_connection" "apigee_vpc_connection" { + network = google_compute_network.apigee_network.id + service = "servicenetworking.googleapis.com" + reserved_peering_ranges = [google_compute_global_address.apigee_range.name] +} + +resource "google_apigee_organization" "apigee_org" { + analytics_region = "us-central1" + project_id = data.google_client_config.current.project + authorized_network = google_compute_network.apigee_network.id + depends_on = [google_service_networking_connection.apigee_vpc_connection] +} + +resource "google_apigee_environment" "env" { + name = "tf-test-env-%{random_suffix}" + description = "Apigee Environment" + display_name = "environment-1" + org_id = google_apigee_organization.apigee_org.id +} + +resource "google_apigee_addons_config" "apigee_org_security_addons_config" { + org = google_apigee_organization.apigee_org.name + addons_config { + api_security_config { + enabled = true + } + } +} +`, context) +} + +func testAccApigeeSecurityAction_apigeeSecurityActionFullAllow(context map[string]interface{}) string { + return testAccApigeeSecurityAction_apigeeBase(context) + acctest.Nprintf(` +resource "google_apigee_security_action" "default" { + security_action_id = "tf-test-%{random_suffix}" + org_id = google_apigee_organization.apigee_org.name + env_id = google_apigee_environment.env.name + description = "Apigee Security Action" + state = "ENABLED" + + condition_config { + ip_address_ranges = [ + "100.0.220.1", + "200.0.0.1", + ] + + bot_reasons = [ + "Flooder", + "Public Cloud Azure", + "Public Cloud AWS", + ] + } + + allow {} + + expire_time = "2032-12-31T23:59:59Z" + depends_on = [ + google_apigee_addons_config.apigee_org_security_addons_config + ] +} +`, context) +} + +func testAccApigeeSecurityAction_apigeeSecurityActionFullFlag(context map[string]interface{}) string { + return testAccApigeeSecurityAction_apigeeBase(context) + acctest.Nprintf(` +resource "google_apigee_security_action" "default" { + security_action_id = "tf-test-%{random_suffix}" + org_id = google_apigee_organization.apigee_org.name + env_id = google_apigee_environment.env.name + description = "Apigee Security Action" + state = "ENABLED" + + condition_config { + ip_address_ranges = [ + "100.0.220.1", + "200.0.0.1", + ] + + bot_reasons = [ + "Flooder", + "Public Cloud Azure", + "Public Cloud AWS", + ] + } + + flag { + headers { + name = "X-Flag-Header" + value = "flag-value" + } + headers { + name = "X-Flag-Header-2" + value = "flag-value-2" + } + } + + expire_time = "2032-12-31T23:59:59Z" + depends_on = [ + google_apigee_addons_config.apigee_org_security_addons_config + ] +} +`, context) +} + +func testAccApigeeSecurityAction_apigeeSecurityActionFullDeny(context map[string]interface{}) string { + return testAccApigeeSecurityAction_apigeeBase(context) + acctest.Nprintf(` +resource "google_apigee_security_action" "default" { + security_action_id = "tf-test-%{random_suffix}" + org_id = google_apigee_organization.apigee_org.name + env_id = google_apigee_environment.env.name + description = "Apigee Security Action" + state = "ENABLED" + + condition_config { + ip_address_ranges = [ + "100.0.220.1", + "200.0.0.1", + ] + + bot_reasons = [ + "Flooder", + "Public Cloud Azure", + "Public Cloud AWS", + ] + } + + deny { + response_code = 403 + } + + expire_time = "2032-12-31T23:59:59Z" + depends_on = [ + google_apigee_addons_config.apigee_org_security_addons_config + ] +} +`, context) +} + +func testAccApigeeSecurityAction_apigeeSecurityActionFullHttpMethods(context map[string]interface{}) string { + return testAccApigeeSecurityAction_apigeeBase(context) + acctest.Nprintf(` +resource "google_apigee_security_action" "default" { + security_action_id = "tf-test-%{random_suffix}" + org_id = google_apigee_organization.apigee_org.name + env_id = google_apigee_environment.env.name + description = "Apigee Security Action" + state = "ENABLED" + + condition_config { + http_methods = [ + "GET", + "POST", + "PUT", + ] + } + + deny { + response_code = 403 + } + + expire_time = "2032-12-31T23:59:59Z" + depends_on = [ + google_apigee_addons_config.apigee_org_security_addons_config + ] +} +`, context) +} + +func testAccApigeeSecurityAction_apigeeSecurityActionFullApiKeys(context map[string]interface{}) string { + return testAccApigeeSecurityAction_apigeeBase(context) + acctest.Nprintf(` +resource "google_apigee_security_action" "default" { + security_action_id = "tf-test-%{random_suffix}" + org_id = google_apigee_organization.apigee_org.name + env_id = google_apigee_environment.env.name + description = "Apigee Security Action" + state = "ENABLED" + + condition_config { + api_keys = [ + "foo-key", + "bar-key", + ] + } + + deny { + response_code = 403 + } + + expire_time = "2032-12-31T23:59:59Z" + depends_on = [ + google_apigee_addons_config.apigee_org_security_addons_config + ] +} +`, context) +} + +func testAccApigeeSecurityAction_apigeeSecurityActionFullAccessTokens(context map[string]interface{}) string { + return testAccApigeeSecurityAction_apigeeBase(context) + acctest.Nprintf(` +resource "google_apigee_security_action" "default" { + security_action_id = "tf-test-%{random_suffix}" + org_id = google_apigee_organization.apigee_org.name + env_id = google_apigee_environment.env.name + description = "Apigee Security Action" + state = "ENABLED" + + condition_config { + access_tokens = [ + "foo-token", + "bar-token", + ] + } + + deny { + response_code = 403 + } + + expire_time = "2032-12-31T23:59:59Z" + depends_on = [ + google_apigee_addons_config.apigee_org_security_addons_config + ] +} +`, context) +} + +func testAccApigeeSecurityAction_apigeeSecurityActionFullApiProducts(context map[string]interface{}) string { + return testAccApigeeSecurityAction_apigeeBase(context) + acctest.Nprintf(` +resource "google_apigee_security_action" "default" { + security_action_id = "tf-test-%{random_suffix}" + org_id = google_apigee_organization.apigee_org.name + env_id = google_apigee_environment.env.name + description = "Apigee Security Action" + state = "ENABLED" + + condition_config { + api_products = [ + "foo-product", + "bar-product", + ] + } + + deny { + response_code = 403 + } + + expire_time = "2032-12-31T23:59:59Z" + depends_on = [ + google_apigee_addons_config.apigee_org_security_addons_config + ] +} +`, context) +} + +func testAccApigeeSecurityAction_apigeeSecurityActionFullDeveloperApps(context map[string]interface{}) string { + return testAccApigeeSecurityAction_apigeeBase(context) + acctest.Nprintf(` +resource "google_apigee_security_action" "default" { + security_action_id = "tf-test-%{random_suffix}" + org_id = google_apigee_organization.apigee_org.name + env_id = google_apigee_environment.env.name + description = "Apigee Security Action" + state = "ENABLED" + + condition_config { + developer_apps = [ + "foo-app", + "bar-app", + ] + } + + deny { + response_code = 403 + } + + expire_time = "2032-12-31T23:59:59Z" + depends_on = [ + google_apigee_addons_config.apigee_org_security_addons_config + ] +} +`, context) +} + +func testAccApigeeSecurityAction_apigeeSecurityActionFullDevelopers(context map[string]interface{}) string { + return testAccApigeeSecurityAction_apigeeBase(context) + acctest.Nprintf(` +resource "google_apigee_security_action" "default" { + security_action_id = "tf-test-%{random_suffix}" + org_id = google_apigee_organization.apigee_org.name + env_id = google_apigee_environment.env.name + description = "Apigee Security Action" + state = "ENABLED" + + condition_config { + developers = [ + "foo-developer", + "bar-developer", + ] + } + + deny { + response_code = 403 + } + + expire_time = "2032-12-31T23:59:59Z" + depends_on = [ + google_apigee_addons_config.apigee_org_security_addons_config + ] +} +`, context) +} + +func testAccApigeeSecurityAction_apigeeSecurityActionFullUserAgents(context map[string]interface{}) string { + return testAccApigeeSecurityAction_apigeeBase(context) + acctest.Nprintf(` +resource "google_apigee_security_action" "default" { + security_action_id = "tf-test-%{random_suffix}" + org_id = google_apigee_organization.apigee_org.name + env_id = google_apigee_environment.env.name + description = "Apigee Security Action" + state = "ENABLED" + + condition_config { + user_agents = [ + "Mozilla/5.0", + "curl/7.64.1", + ] + } + + deny { + response_code = 403 + } + + expire_time = "2032-12-31T23:59:59Z" + depends_on = [ + google_apigee_addons_config.apigee_org_security_addons_config + ] +} +`, context) +} + +func testAccApigeeSecurityAction_apigeeSecurityActionFullRegionCodes(context map[string]interface{}) string { + return testAccApigeeSecurityAction_apigeeBase(context) + acctest.Nprintf(` +resource "google_apigee_security_action" "default" { + security_action_id = "tf-test-%{random_suffix}" + org_id = google_apigee_organization.apigee_org.name + env_id = google_apigee_environment.env.name + description = "Apigee Security Action" + state = "ENABLED" + + condition_config { + region_codes = [ + "US", + "CA", + ] + } + + deny { + response_code = 403 + } + + expire_time = "2032-12-31T23:59:59Z" + depends_on = [ + google_apigee_addons_config.apigee_org_security_addons_config + ] +} +`, context) +} + +func testAccApigeeSecurityAction_apigeeSecurityActionFullAsns(context map[string]interface{}) string { + return testAccApigeeSecurityAction_apigeeBase(context) + acctest.Nprintf(` +resource "google_apigee_security_action" "default" { + security_action_id = "tf-test-%{random_suffix}" + org_id = google_apigee_organization.apigee_org.name + env_id = google_apigee_environment.env.name + description = "Apigee Security Action" + state = "ENABLED" + + condition_config { + asns = [ + "23", + "42", + ] + } + + deny { + response_code = 403 + } + + expire_time = "2032-12-31T23:59:59Z" + depends_on = [ + google_apigee_addons_config.apigee_org_security_addons_config + ] +} +`, context) +} + +func testAccApigeeSecurityAction_apigeeSecurityActionFullTTL(context map[string]interface{}) string { + return testAccApigeeSecurityAction_apigeeBase(context) + acctest.Nprintf(` +resource "google_apigee_security_action" "default" { + security_action_id = "tf-test-%{random_suffix}" + org_id = google_apigee_organization.apigee_org.name + env_id = google_apigee_environment.env.name + description = "Apigee Security Action" + state = "ENABLED" + + condition_config { + asns = [ + "23", + "42", + ] + } + + deny { + response_code = 403 + } + + ttl = "3600s" + depends_on = [ + google_apigee_addons_config.apigee_org_security_addons_config + ] +} +`, context) +} From 38985d4bfbc34ee1b733e541ea541d9622b5a6c4 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Fri, 25 Jul 2025 09:09:56 -0700 Subject: [PATCH 610/884] clarified internal setup (not internal only) (#14632) --- .ci/infra/terraform/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.ci/infra/terraform/README.md b/.ci/infra/terraform/README.md index fac7f9e8e323..bc93932c0749 100644 --- a/.ci/infra/terraform/README.md +++ b/.ci/infra/terraform/README.md @@ -12,8 +12,8 @@ Prerequisites: - A BeyondCorp subscription on the organization After applying this configuration: -- (Internal only) Enable stubbed calls for GKE MultiCloud resources -- (Internal only) Verify ownership of `hashicorptest.com` for new service account +- (Internal setup) Enable stubbed calls for GKE MultiCloud resources +- (Internal setup) Verify ownership of `hashicorptest.com` for new service account - Enable Media CDN - Enable Access Boundary permissions - Enable BigQuery Table IAM conditions From 924875f5d0492abeffd656a641c5e066a1543ad5 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Fri, 25 Jul 2025 10:35:39 -0700 Subject: [PATCH 611/884] Removed some more stale TODO assignments and standardized on TODO instead of NOTE (#14644) --- .../services/resourcemanager/resource_google_project.go | 2 +- ...esource_google_service_networking_peered_dns_domain.go | 2 +- .../resource_service_networking_connection.go | 8 ++++---- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project.go index a7cce1993af7..b87981203290 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project.go @@ -774,7 +774,7 @@ func doEnableServicesRequest(services []string, project, billingProject, userAge // Handle errors that are retryable at call time for serviceusage // Specifically, errors in https://cloud.google.com/service-usage/docs/reference/rest/v1/services/batchEnable#response-body // Errors in operations are handled separately. -// NOTE(rileykarson): This should probably be turned into a retry predicate +// TODO: This should probably be turned into a retry predicate func handleServiceUsageRetryablePreconditionError(err error) error { if err == nil { return nil diff --git a/mmv1/third_party/terraform/services/servicenetworking/resource_google_service_networking_peered_dns_domain.go b/mmv1/third_party/terraform/services/servicenetworking/resource_google_service_networking_peered_dns_domain.go index 9c93f55eb655..534c4be65432 100644 --- a/mmv1/third_party/terraform/services/servicenetworking/resource_google_service_networking_peered_dns_domain.go +++ b/mmv1/third_party/terraform/services/servicenetworking/resource_google_service_networking_peered_dns_domain.go @@ -240,7 +240,7 @@ func resourceGoogleServiceNetworkingPeeredDNSDomainDelete(d *schema.ResourceData return nil } -// NOTE(deviavir): An out of band aspect of this API is that it uses a unique formatting of network +// TODO: An out of band aspect of this API is that it uses a unique formatting of network // different from the standard self_link URI. It requires a call to the resource manager to get the project // number for the current project. func getProjectNumber(d *schema.ResourceData, config *transport_tpg.Config, project, userAgent string) (string, error) { diff --git a/mmv1/third_party/terraform/services/servicenetworking/resource_service_networking_connection.go b/mmv1/third_party/terraform/services/servicenetworking/resource_service_networking_connection.go index d3e5b76c0b7d..dfac78f3392a 100644 --- a/mmv1/third_party/terraform/services/servicenetworking/resource_service_networking_connection.go +++ b/mmv1/third_party/terraform/services/servicenetworking/resource_service_networking_connection.go @@ -41,7 +41,7 @@ func ResourceServiceNetworkingConnection() *schema.Resource { DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, Description: `Name of VPC network connected with service producers using VPC peering.`, }, - // NOTE(craigatgoogle): This field is weird, it's required to make the Insert/List calls as a parameter + // TODO: This field is weird, it's required to make the Insert/List calls as a parameter // named "parent", however it's also defined in the response as an output field called "peering", which // uses "-" as a delimiter instead of ".". To alleviate user confusion I've opted to model the gcloud // CLI's approach, calling the field "service" and accepting the same format as the CLI with the "." @@ -341,7 +341,7 @@ func resourceServiceNetworkingConnectionImportState(d *schema.ResourceData, meta return []*schema.ResourceData{d}, nil } -// NOTE(craigatgoogle): The Connection resource in this API doesn't have an Id field, so inorder +// TODO: The Connection resource in this API doesn't have an Id field, so inorder // to support the Read method, we create an Id using the tuple(Network, Service). type connectionId struct { Network string @@ -379,7 +379,7 @@ func parseConnectionId(id string) (*connectionId, error) { }, nil } -// NOTE(craigatgoogle): An out of band aspect of this API is that it uses a unique formatting of network +// TODO: An out of band aspect of this API is that it uses a unique formatting of network // different from the standard self_link URI. It requires a call to the resource manager to get the project // number for the current project. func RetrieveServiceNetworkingNetworkName(d *schema.ResourceData, config *transport_tpg.Config, network, userAgent string) (string, error) { @@ -422,7 +422,7 @@ func RetrieveServiceNetworkingNetworkName(d *schema.ResourceData, config *transp const parentServicePattern = "^services/.+$" -// NOTE(craigatgoogle): An out of band aspect of this API is that it requires the service name to be +// TODO: An out of band aspect of this API is that it requires the service name to be // formatted as "services/" func formatParentService(service string) string { r := regexp.MustCompile(parentServicePattern) From cd66345bcedca4bf8dc6fbbf513e88d316f91ad9 Mon Sep 17 00:00:00 2001 From: Eric Pang Date: Fri, 25 Jul 2025 13:48:06 -0400 Subject: [PATCH 612/884] Remove extra double quote in securesourcemanager docs (#14642) --- mmv1/products/securesourcemanager/BranchRule.yaml | 4 ++-- mmv1/products/securesourcemanager/Instance.yaml | 12 ++++++------ mmv1/products/securesourcemanager/Repository.yaml | 4 ++-- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/mmv1/products/securesourcemanager/BranchRule.yaml b/mmv1/products/securesourcemanager/BranchRule.yaml index fd7d5be807d7..4eb27bf14d76 100644 --- a/mmv1/products/securesourcemanager/BranchRule.yaml +++ b/mmv1/products/securesourcemanager/BranchRule.yaml @@ -47,7 +47,7 @@ examples: branch_rule_id: 'my-basic-branchrule' repository_id: 'my-basic-repository' instance_id: 'my-basic-instance' - deletion_policy: '"PREVENT"' + deletion_policy: 'PREVENT' test_vars_overrides: 'deletion_policy': '"DELETE"' oics_vars_overrides: @@ -58,7 +58,7 @@ examples: branch_rule_id: 'my-initial-branchrule' repository_id: 'my-initial-repository' instance_id: 'my-initial-instance' - deletion_policy: '"PREVENT"' + deletion_policy: 'PREVENT' test_vars_overrides: 'deletion_policy': '"DELETE"' oics_vars_overrides: diff --git a/mmv1/products/securesourcemanager/Instance.yaml b/mmv1/products/securesourcemanager/Instance.yaml index 6e97fd0a575d..c154cda42263 100644 --- a/mmv1/products/securesourcemanager/Instance.yaml +++ b/mmv1/products/securesourcemanager/Instance.yaml @@ -59,7 +59,7 @@ examples: primary_resource_name: 'fmt.Sprintf("tf-test-my-instance%s", context["random_suffix"])' vars: instance_id: 'my-instance' - deletion_policy: '"PREVENT"' + deletion_policy: 'PREVENT' test_vars_overrides: 'deletion_policy': '"DELETE"' oics_vars_overrides: @@ -72,7 +72,7 @@ examples: vars: instance_id: 'my-instance' kms_key_name: 'my-key' - deletion_policy: '"PREVENT"' + deletion_policy: 'PREVENT' test_vars_overrides: 'deletion_policy': '"DELETE"' 'kms_key_name': 'acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-bootstrap-secure-source-manager-key1").CryptoKey.Name' @@ -87,7 +87,7 @@ examples: instance_id: 'my-instance' ca_pool_id: 'ca-pool' root_ca_id: 'root-ca' - deletion_policy: '"PREVENT"' + deletion_policy: 'PREVENT' test_vars_overrides: 'deletion_policy': '"DELETE"' oics_vars_overrides: @@ -110,7 +110,7 @@ examples: instance_id: 'my-instance' ca_pool_id: 'ca-pool' root_ca_id: 'root-ca' - deletion_policy: '"PREVENT"' + deletion_policy: 'PREVENT' test_vars_overrides: 'deletion_policy': '"DELETE"' oics_vars_overrides: @@ -130,7 +130,7 @@ examples: instance_id: 'my-instance' ca_pool_id: 'ca-pool' root_ca_id: 'root-ca' - deletion_policy: '"PREVENT"' + deletion_policy: 'PREVENT' test_vars_overrides: 'deletion_policy': '"DELETE"' oics_vars_overrides: @@ -143,7 +143,7 @@ examples: primary_resource_name: 'fmt.Sprintf("tf-test-my-instance%s", context["random_suffix"])' vars: instance_id: 'my-instance' - deletion_policy: '"PREVENT"' + deletion_policy: 'PREVENT' test_vars_overrides: 'deletion_policy': '"DELETE"' oics_vars_overrides: diff --git a/mmv1/products/securesourcemanager/Repository.yaml b/mmv1/products/securesourcemanager/Repository.yaml index 908bf7a46a59..d9d15ff238b5 100644 --- a/mmv1/products/securesourcemanager/Repository.yaml +++ b/mmv1/products/securesourcemanager/Repository.yaml @@ -55,7 +55,7 @@ examples: vars: repository_id: 'my-repository' instance_id: 'my-instance' - deletion_policy: '"PREVENT"' + deletion_policy: 'PREVENT' test_vars_overrides: 'deletion_policy': '"DELETE"' oics_vars_overrides: @@ -66,7 +66,7 @@ examples: vars: repository_id: 'my-repository' instance_id: 'my-instance' - deletion_policy: '"PREVENT"' + deletion_policy: 'PREVENT' test_vars_overrides: 'deletion_policy': '"DELETE"' oics_vars_overrides: From 2c573ca8af18310080bf5f859b8936cc2d0c13ae Mon Sep 17 00:00:00 2001 From: Cameron Thornton Date: Fri, 25 Jul 2025 13:20:24 -0500 Subject: [PATCH 613/884] google_iap_settings: use billing project instead of GetProject (#14630) --- .../terraform/custom_delete/clear_iap_settings.go.tmpl | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/mmv1/templates/terraform/custom_delete/clear_iap_settings.go.tmpl b/mmv1/templates/terraform/custom_delete/clear_iap_settings.go.tmpl index fd49e8f716df..937c6a9e1429 100644 --- a/mmv1/templates/terraform/custom_delete/clear_iap_settings.go.tmpl +++ b/mmv1/templates/terraform/custom_delete/clear_iap_settings.go.tmpl @@ -3,9 +3,9 @@ if err != nil { return err } -project, err := tpgresource.GetProject(d, config) -if err != nil { - return fmt.Errorf("Error fetching project for Settings: %s", err) +billingProject := "" +if bp, err := tpgresource.GetBillingProject(d, config); err == nil { + billingProject = bp } headers := make(http.Header) @@ -17,7 +17,7 @@ log.Printf("[DEBUG] Updating Settings %q: %#v", d.Id(), obj) res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ Config: config, Method: "PATCH", - Project: project, + Project: billingProject, RawURL: url, UserAgent: userAgent, Body: obj, From f1be5bddf4718587356998ca58787297c92f1d3a Mon Sep 17 00:00:00 2001 From: Lakshman Swaminathan Date: Fri, 25 Jul 2025 16:32:44 -0400 Subject: [PATCH 614/884] teamcity configs branch for weekly diff tests (#14607) --- scripts/main.go | 125 ++++++++++++++++++++++++++++++++++++++++++++ scripts/teamcity.go | 123 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 248 insertions(+) create mode 100644 scripts/main.go create mode 100644 scripts/teamcity.go diff --git a/scripts/main.go b/scripts/main.go new file mode 100644 index 000000000000..344b8905f608 --- /dev/null +++ b/scripts/main.go @@ -0,0 +1,125 @@ +package main + +import ( + "bufio" + "bytes" + "flag" + "fmt" + "io" + "os" + "os/exec" + "strings" + "time" +) + +func usage() string { + return `Usage: + teamcity-go-test -test [-parallelism n] [-timeout t] + + Test names must be listed one per line on stdin. +` +} + +func main() { + testBinary := flag.String("test", "", "executable containing the tests to run") + parallelism := flag.Int("parallelism", 1, "number of tests to execute in parallel") + timeout := flag.String("timeout", "", "an optional per-test timeout") + flag.Parse() + + if testBinary == nil || *testBinary == "" { + fmt.Fprint(os.Stderr, usage()) + os.Exit(1) + } + + if _, err := os.Stat(*testBinary); err != nil { + fmt.Fprintf(os.Stderr, "Cannot find binary: %s\n", *testBinary) + os.Exit(1) + } + + testNames := make([]string, 0, 0) + stdInReader := bufio.NewReader(os.Stdin) + + for { + line, err := stdInReader.ReadString('\n') + if err != nil { + if err == io.EOF { + if strings.TrimSpace(line) != "" { + testNames = append(testNames, line) + } + break + } + fmt.Fprintf(os.Stderr, "error reading stdin: %s", err) + os.Exit(1) + } + + if strings.TrimSpace(line) != "" { + testNames = append(testNames, line) + } + } + + testQueue := make(chan string) + messages := make(chan string) + completed := make(chan struct{}) + + for i := 0; i < *parallelism; i++ { + go runWorker(testQueue, messages, completed, *testBinary, *timeout) + } + + go func() { + for _, testName := range testNames { + testQueue <- strings.TrimSpace(testName) + } + }() + + resultsCount := 0 + for { + select { + case message := <-messages: + fmt.Printf("%s", message) + case <-completed: + resultsCount++ + } + + if resultsCount == len(testNames) { + break + } + } +} + +func runWorker(inputQueue <-chan string, messages chan<- string, done chan<- struct{}, binaryName, timeout string) { + for { + select { + case testName := <-inputQueue: + test := NewTeamCityTest(testName) + //messages <- fmt.Sprintf("%s", test.FormatStartNotice()) + runTest(test, binaryName, timeout) + messages <- test.FormatTestOutput() + done <- struct{}{} + } + } +} + +func runTest(test *TeamCityTest, binaryName, timeout string) { + var out bytes.Buffer + var errOut bytes.Buffer + + test.Started = time.Now() + + args := []string{ + "-test.v", + "-test.run", + fmt.Sprintf("^%s$", test.Name), + } + if timeout != "" { + args = append(args, "-test.timeout") + args = append(args, timeout) + } + + cmd := exec.Command(binaryName, args...) + cmd.Stdout = &out + cmd.Stderr = &errOut + // Not sure what to do with errors here other than report them out to the runner. + cmd.Run() + + test.ParseTestRunnerOutput(out.String(), errOut.String()) +} diff --git a/scripts/teamcity.go b/scripts/teamcity.go new file mode 100644 index 000000000000..4e169e4ceffd --- /dev/null +++ b/scripts/teamcity.go @@ -0,0 +1,123 @@ +package main + +import ( + "bytes" + "fmt" + "regexp" + "strings" + "time" +) + +const ( + TeamCityTimestampFormat = "2006-01-02T15:04:05.000" + TeamCityTestStarted = "##teamcity[testStarted timestamp='%s' name='%s']\n" + TeamCityTestFailed = "##teamcity[testFailed timestamp='%s' name='%s']\n" + TeamCityTestFinished = "##teamcity[testFinished timestamp='%s' name='%s']\n" + TeamCityTestFailedRace = "##teamcity[testFailed timestamp='%s' name='%s' message='Race detected!']\n" + TeamCityTestIgnored = "##teamcity[testIgnored timestamp='%s' name='%s']\n" + TeamCityTestFailedPanic = "##teamcity[testFailed timestamp='%s' name='%s' message='Test ended in panic.']\n" + TeamCityTestDiffFailed = "##teamcity[testDiffFailed timestamp='%s' name='%s']\n" + TeamCityTestStdOut = "##teamcity[testStdOut name='%s' out='%s']\n" + TeamCityTestStdErr = "##teamcity[testStdErr name='%s' out='%s']\n" +) + +var ( + end = regexp.MustCompile(`--- (PASS|SKIP|FAIL):\s+([a-zA-Z_]\S*) \(([\.\d]+)\)`) + diff = regexp.MustCompile(`\[Diff\] (.*)`) + paniced = regexp.MustCompile(`panic:\s+(.*)\s+\[recovered\]\n`) + //suite = regexp.MustCompile("^(ok|FAIL)\\s+([^\\s]+)\\s+([\\.\\d]+)s") + race = regexp.MustCompile("^WARNING: DATA RACE") +) + +type TeamCityTest struct { + Name, Output, ErrOutput, Duration string + Race, Fail, Skip, Pass, Diff bool + Started time.Time +} + +func NewTeamCityTest(testName string) *TeamCityTest { + return &TeamCityTest{ + Name: testName, + } +} + +func (test *TeamCityTest) ParseTestRunnerOutput(testOutput string, errOutput string) { + hasDataRace := race.MatchString(testOutput) + test.Race = hasDataRace + + resultDiff := diff.FindStringSubmatch(testOutput) + if resultDiff != nil { + test.Diff = true + } else { + resultLines := end.FindStringSubmatch(testOutput) + if resultLines != nil { + switch resultLines[1] { + case "PASS": + test.Pass = true + case "SKIP": + test.Skip = true + case "FAIL": + test.Fail = true + } + test.Duration = resultLines[3] + } + } + test.Output = testOutput + test.ErrOutput = errOutput +} + +func (test *TeamCityTest) FormatTestOutput() string { + now := time.Now().Format(TeamCityTimestampFormat) + + var output bytes.Buffer + + output.WriteString(fmt.Sprintf(TeamCityTestStarted, test.Started.Format(TeamCityTimestampFormat), test.Name)) + + output.WriteString(fmt.Sprintf(TeamCityTestStdOut, test.Name, escapeOutput(test.Output))) + output.WriteString(fmt.Sprintf(TeamCityTestStdErr, test.Name, escapeOutput(test.ErrOutput))) + + if test.Diff { + output.WriteString(fmt.Sprintf(TeamCityTestDiffFailed, now, test.Name)) + // have to fail so that teamcity catches failure correctly + output.WriteString(fmt.Sprintf(TeamCityTestFailedPanic, now, test.Name)) + output.WriteString(fmt.Sprintf(TeamCityTestFinished, now, test.Name)) + return output.String() + + } + + if test.Fail { + output.WriteString(fmt.Sprintf(TeamCityTestFailed, now, test.Name)) + output.WriteString(fmt.Sprintf(TeamCityTestFinished, now, test.Name)) + return output.String() + } + + if test.Race { + output.WriteString(fmt.Sprintf(TeamCityTestFailedRace, now, test.Name)) + output.WriteString(fmt.Sprintf(TeamCityTestFinished, now, test.Name)) + return output.String() + } + + if test.Skip { + output.WriteString(fmt.Sprintf(TeamCityTestIgnored, now, test.Name)) + return output.String() + } + + if test.Pass { + output.WriteString(fmt.Sprintf(TeamCityTestFinished, now, test.Name)) + return output.String() + } + + // test passes if no diff, even if failure (failure artifacts will be in regular_failure_file.log) + output.WriteString(fmt.Sprintf(TeamCityTestFinished, now, test.Name)) + + return output.String() +} + +func escapeOutput(outputLines string) string { + newOutput := strings.Replace(outputLines, "|", "||", -1) + newOutput = strings.Replace(newOutput, "\n", "|n", -1) + newOutput = strings.Replace(newOutput, "'", "|'", -1) + newOutput = strings.Replace(newOutput, "]", "|]", -1) + newOutput = strings.Replace(newOutput, "[", "|[", -1) + return newOutput +} From 899b9f4ff19956d8ba683d6d6c13b4521628f86c Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Fri, 25 Jul 2025 14:12:21 -0700 Subject: [PATCH 615/884] Fixed regex to not capture re or er (#14651) --- .github/workflows/reassign-reviewer.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/reassign-reviewer.yml b/.github/workflows/reassign-reviewer.yml index e27cd2148200..9da2f2dcbe01 100644 --- a/.github/workflows/reassign-reviewer.yml +++ b/.github/workflows/reassign-reviewer.yml @@ -22,7 +22,7 @@ jobs: uses: actions-ecosystem/action-regex-match@d50fd2e7a37d0e617aea3d7ada663bd56862b9cc # v2.0.2 with: text: ${{ github.event.comment.body }} - regex: '.*@modular-magician (re)?assign[- ]review(er)? ?@?([a-zA-Z0-9-_]*).*' + regex: '.*@modular-magician (?:re)?assign[- ]review(?:er)? ?@?([a-zA-Z0-9-_]*).*' - name: Checkout Repository if: steps.read-comment.outputs.match != '' uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.2 From b66fd0d5ae2d7293f2af6fa1b5b8ad7356ca752d Mon Sep 17 00:00:00 2001 From: Thomas Rodgers Date: Fri, 25 Jul 2025 14:18:39 -0700 Subject: [PATCH 616/884] tgc-revival: make test data fall back to most recent run (#14645) --- mmv1/third_party/tgc_next/test/setup.go | 117 ++++++++++++++++++------ 1 file changed, 87 insertions(+), 30 deletions(-) diff --git a/mmv1/third_party/tgc_next/test/setup.go b/mmv1/third_party/tgc_next/test/setup.go index 3b4032d0cc66..c37282bde32c 100644 --- a/mmv1/third_party/tgc_next/test/setup.go +++ b/mmv1/third_party/tgc_next/test/setup.go @@ -27,6 +27,11 @@ type CaiData struct { CaiAsset caiasset.Asset `json:"cai_asset,omitempty"` } +type NightlyRun struct { + MetadataByTest map[string]TgcMetadataPayload + Date time.Time +} + type TgcMetadataPayload struct { TestName string `json:"test_name"` RawConfig string `json:"raw_config"` @@ -45,49 +50,59 @@ type Resource struct { Attributes map[string]struct{} `json:"attributes"` } +const ( + ymdFormat = "2006-01-02" + maxRetries = 30 +) + var ( - TestsMetadata = make(map[string]TgcMetadataPayload) + TestsMetadata = make([]NightlyRun, maxRetries) setupDone = false ) -func ReadTestsDataFromGcs() (map[string]TgcMetadataPayload, error) { +func ReadTestsDataFromGcs() ([]NightlyRun, error) { if !setupDone { bucketName := "cai_assets_metadata" currentDate := time.Now() + ctx := context.Background() + client, err := storage.NewClient(ctx) + if err != nil { + return nil, fmt.Errorf("storage.NewClient: %v", err) + } + defer client.Close() - for len(TestsMetadata) == 0 { - objectName := fmt.Sprintf("nightly_tests/%s/nightly_tests_meta.json", currentDate.Format("2006-01-02")) - log.Printf("Read object %s from the bucket %s", objectName, bucketName) - - ctx := context.Background() - client, err := storage.NewClient(ctx) - if err != nil { - return nil, fmt.Errorf("storage.NewClient: %v", err) - } - defer client.Close() - - currentDate = currentDate.AddDate(0, 0, -1) + bucket := client.Bucket(bucketName) - rc, err := client.Bucket(bucketName).Object(objectName).NewReader(ctx) + var allErrs error + retries := 0 + for i := 0; i < len(TestsMetadata); i++ { + metadata, err := readTestsDataFromGCSForRun(ctx, currentDate, bucketName, bucket) if err != nil { - if err == storage.ErrObjectNotExist { - log.Printf("Object '%s' in bucket '%s' does NOT exist.\n", objectName, bucketName) - continue + if allErrs == nil { + allErrs = fmt.Errorf("reading tests data from gcs: %v", err) } else { - return nil, fmt.Errorf("Object(%q).NewReader: %v", objectName, err) + allErrs = fmt.Errorf("%v, %v", allErrs, err) } } - defer rc.Close() - - data, err := io.ReadAll(rc) - if err != nil { - return nil, fmt.Errorf("io.ReadAll: %v", err) + if metadata == nil { + // Keep looking until we find a date with metadata. + i-- + retries++ + if retries > maxRetries { + // Stop looking when we find maxRetries dates with no metadata. + return nil, fmt.Errorf("too many retries, %v", allErrs) + } + } else { + TestsMetadata[i] = NightlyRun{ + MetadataByTest: metadata, + Date: currentDate, + } } + currentDate = currentDate.AddDate(0, 0, -1) + } - err = json.Unmarshal(data, &TestsMetadata) - if err != nil { - return nil, fmt.Errorf("json.Unmarshal: %v", err) - } + if allErrs != nil { + return nil, allErrs } if os.Getenv("WRITE_FILES") != "" { @@ -98,6 +113,35 @@ func ReadTestsDataFromGcs() (map[string]TgcMetadataPayload, error) { return TestsMetadata, nil } +func readTestsDataFromGCSForRun(ctx context.Context, currentDate time.Time, bucketName string, bucket *storage.BucketHandle) (map[string]TgcMetadataPayload, error) { + metadata := make(map[string]TgcMetadataPayload) + objectName := fmt.Sprintf("nightly_tests/%s/nightly_tests_meta.json", currentDate.Format(ymdFormat)) + log.Printf("Read object %s from the bucket %s", objectName, bucketName) + + rc, err := bucket.Object(objectName).NewReader(ctx) + if err != nil { + if err == storage.ErrObjectNotExist { + log.Printf("Object '%s' in bucket '%s' does NOT exist.\n", objectName, bucketName) + return nil, nil + } else { + return nil, fmt.Errorf("Object(%q).NewReader: %v", objectName, err) + } + } + defer rc.Close() + + data, err := io.ReadAll(rc) + if err != nil { + return nil, fmt.Errorf("io.ReadAll: %v", err) + } + + err = json.Unmarshal(data, &metadata) + if err != nil { + return nil, fmt.Errorf("json.Unmarshal: %v", err) + } + + return metadata, nil +} + func prepareTestData(testName string) (map[string]ResourceTestData, string, error) { var err error cacheMutex.Lock() @@ -107,8 +151,21 @@ func prepareTestData(testName string) (map[string]ResourceTestData, string, erro return nil, "", err } - testMetadata := TestsMetadata[testName] - resourceMetadata := testMetadata.ResourceMetadata + var testMetadata TgcMetadataPayload + var resourceMetadata map[string]*ResourceMetadata + for _, run := range TestsMetadata { + var ok bool + testMetadata, ok = run.MetadataByTest[testName] + if ok { + log.Printf("Found metadata for %s from run on %s", testName, run.Date.Format(ymdFormat)) + resourceMetadata = testMetadata.ResourceMetadata + if len(resourceMetadata) > 0 { + break + } + } + log.Printf("Missing metadata for %s from run on %s, looking at previous run", testName, run.Date.Format(ymdFormat)) + } + if len(resourceMetadata) == 0 { log.Printf("Data of test is unavailable: %s", testName) return nil, "", nil From 84c0a7fa32de0671e0147dbf7f4ca7f76499d686 Mon Sep 17 00:00:00 2001 From: Rishita Golla Date: Fri, 25 Jul 2025 15:05:07 -0700 Subject: [PATCH 617/884] Add support for Lustre CSI Driver on GKE (#14435) --- .../resource_container_cluster.go.tmpl | 47 +++++++++++++++++++ .../resource_container_cluster_test.go.tmpl | 9 +++- .../docs/r/container_cluster.html.markdown | 9 ++++ 3 files changed, 64 insertions(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl index 97550b1d5f17..4e746a25c57a 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl @@ -97,6 +97,7 @@ var ( "addons_config.0.stateful_ha_config", "addons_config.0.ray_operator_config", "addons_config.0.parallelstore_csi_driver_config", + "addons_config.0.lustre_csi_driver_config", {{- if ne $.TargetVersionName "ga" }} "addons_config.0.istio_config", "addons_config.0.kalm_config", @@ -491,6 +492,29 @@ func ResourceContainerCluster() *schema.Resource { }, }, }, + "lustre_csi_driver_config": { + Type: schema.TypeList, + Optional: true, + Computed: true, + AtLeastOneOf: addonsConfigKeys, + MaxItems: 1, + Description: `Configuration for the Lustre CSI driver. Defaults to disabled; set enabled = true to enable.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "enabled": { + Type: schema.TypeBool, + Required: true, + Description: `Whether the Lustre CSI driver is enabled for this cluster.`, + }, + "enable_legacy_lustre_port": { + Type: schema.TypeBool, + Optional: true, + Description: `If set to true, the Lustre CSI driver will initialize LNet (the virtual network layer for Lustre kernel module) using port 6988. + This flag is required to workaround a port conflict with the gke-metadata-server on GKE nodes.`, + }, + }, + }, + }, {{- if ne $.TargetVersionName "ga" }} "istio_config": { Type: schema.TypeList, @@ -5290,6 +5314,20 @@ func expandClusterAddonsConfig(configured interface{}) *container.AddonsConfig { } } + if v, ok := config["lustre_csi_driver_config"]; ok && len(v.([]interface{})) > 0 { + lustreConfig := v.([]interface{})[0].(map[string]interface{}) + ac.LustreCsiDriverConfig = &container.LustreCsiDriverConfig{ + Enabled: lustreConfig["enabled"].(bool), + ForceSendFields: []string{"Enabled"}, + } + + // Check for enable_legacy_lustre_port + if val, ok := lustreConfig["enable_legacy_lustre_port"]; ok { + ac.LustreCsiDriverConfig.EnableLegacyLustrePort = val.(bool) + ac.LustreCsiDriverConfig.ForceSendFields = append(ac.LustreCsiDriverConfig.ForceSendFields, "EnableLegacyLustrePort") + } + } + {{ if ne $.TargetVersionName `ga` -}} if v, ok := config["istio_config"]; ok && len(v.([]interface{})) > 0 { addon := v.([]interface{})[0].(map[string]interface{}) @@ -6719,6 +6757,15 @@ func flattenClusterAddonsConfig(c *container.AddonsConfig) []map[string]interfac }, } } + if c.LustreCsiDriverConfig != nil { + lustreConfig := c.LustreCsiDriverConfig + result["lustre_csi_driver_config"] = []map[string]interface{}{ + { + "enabled": lustreConfig.Enabled, + "enable_legacy_lustre_port":lustreConfig.EnableLegacyLustrePort, + }, + } + } {{ if ne $.TargetVersionName `ga` -}} if c.IstioConfig != nil { diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl index 2ed5d5c63f1d..e2af6d09209c 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl @@ -6892,6 +6892,9 @@ resource "google_container_cluster" "primary" { parallelstore_csi_driver_config { enabled = false } + lustre_csi_driver_config { + enabled = false + } {{- if ne $.TargetVersionName "ga" }} istio_config { disabled = true @@ -6967,8 +6970,12 @@ resource "google_container_cluster" "primary" { enabled = true } } - parallelstore_csi_driver_config { + parallelstore_csi_driver_config { + enabled = true + } + lustre_csi_driver_config { enabled = true + enable_legacy_lustre_port=true } {{- if ne $.TargetVersionName "ga" }} istio_config { diff --git a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown index ae6f9fedaf35..631d8d6a20af 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown @@ -513,6 +513,15 @@ Fleet configuration for the cluster. Structure is [documented below](#nested_fle It is enabled by default for Autopilot clusters with version 1.29 or later; set `enabled = true` to enable it explicitly. See [Enable the Parallelstore CSI driver](https://cloud.google.com/kubernetes-engine/docs/how-to/persistent-volumes/parallelstore-csi-new-volume#enable) for more information. +* `lustre_csi_driver_config` - (Optional) The status of the Lustre CSI driver addon, + which allows the usage of a Lustre instances as volumes. + It is disabled by default for Standard clusters; set `enabled = true` to enable. + It is disabled by default for Autopilot clusters; set `enabled = true` to enable. + Lustre CSI Driver Config has optional subfield + `enable_legacy_lustre_port` which allows the Lustre CSI driver to initialize LNet (the virtual networklayer for Lustre kernel module) using port 6988. + This flag is required to workaround a port conflict with the gke-metadata-server on GKE nodes. + See [Enable Lustre CSI driver](https://cloud.google.com/kubernetes-engine/docs/how-to/persistent-volumes/lustre-csi-driver-new-volume) for more information. + This example `addons_config` disables two addons: ```hcl From 98082d25758820898bb3d61155b2a3708f02607d Mon Sep 17 00:00:00 2001 From: paridhishah18 <166548459+paridhishah18@users.noreply.github.com> Date: Fri, 25 Jul 2025 15:09:47 -0700 Subject: [PATCH 618/884] fix tests for worker pool (#14372) --- .../terraform/examples/cloudrunv2_worker_pool_gpu.tf.tmpl | 1 + .../resource_cloud_run_v2_worker_pool_test.go.tmpl | 6 ++---- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_gpu.tf.tmpl b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_gpu.tf.tmpl index 6e6031c10048..fe3c2dab66e7 100644 --- a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_gpu.tf.tmpl +++ b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_gpu.tf.tmpl @@ -18,5 +18,6 @@ resource "google_cloud_run_v2_worker_pool" "{{$.PrimaryResourceId}}" { node_selector { accelerator = "nvidia-l4" } + gpu_zonal_redundancy_disabled = true } } \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_worker_pool_test.go.tmpl b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_worker_pool_test.go.tmpl index 9c65929807b8..0a1b772455c5 100644 --- a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_worker_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_worker_pool_test.go.tmpl @@ -561,12 +561,11 @@ resource "google_cloud_run_v2_worker_pool" "default" { description = "description creating" location = "us-central1" deletion_protection = false - launch_stage = "ALPHA" + launch_stage = "BETA" annotations = { generated-by = "magic-modules" } scaling { - scaling_mode = "MANUAL" manual_instance_count = 5 } @@ -601,9 +600,8 @@ resource "google_cloud_run_v2_worker_pool" "default" { } client = "client-1" client_version = "client-version-1" - launch_stage = "ALPHA" + launch_stage = "BETA" scaling { - scaling_mode = "MANUAL" manual_instance_count = 2 } template { From 1ce0b2a7286805decace89454f4bef9d844d890d Mon Sep 17 00:00:00 2001 From: Arnav Dham Date: Sat, 26 Jul 2025 03:44:58 +0530 Subject: [PATCH 619/884] AnalyticsHub Marketplace Changes (#14556) --- .../bigqueryanalyticshub/Listing.yaml | 38 ++++++++++ .../ListingSubscription.yaml | 16 +++++ ...y_analyticshub_listing_marketplace.tf.tmpl | 27 ++++++++ .../bigquery_analytics_hub_listing.go.tmpl | 4 ++ ...rce_bigquery_analytics_hub_listing_test.go | 69 +++++++++++++++++++ 5 files changed, 154 insertions(+) create mode 100644 mmv1/templates/terraform/examples/bigquery_analyticshub_listing_marketplace.tf.tmpl create mode 100644 mmv1/templates/terraform/pre_delete/bigquery_analytics_hub_listing.go.tmpl diff --git a/mmv1/products/bigqueryanalyticshub/Listing.yaml b/mmv1/products/bigqueryanalyticshub/Listing.yaml index ccec2c620280..77edec0e437a 100644 --- a/mmv1/products/bigqueryanalyticshub/Listing.yaml +++ b/mmv1/products/bigqueryanalyticshub/Listing.yaml @@ -41,6 +41,7 @@ iam_policy: - '{{listing_id}}' custom_code: pre_update: 'templates/terraform/pre_update/bigqueryanalyticshub_listing.go.tmpl' + pre_delete: 'templates/terraform/pre_delete/bigquery_analytics_hub_listing.go.tmpl' # Skipping the sweeper due to the non-standard base_url exclude_sweeper: true examples: @@ -97,6 +98,21 @@ examples: dataset_id: 'tf_test_dataset' routine_id: 'tf_test_routine' desc: 'Example for listing with routine' + - name: 'bigquery_analyticshub_listing_marketplace' + primary_resource_id: 'listing' + primary_resource_name: 'fmt.Sprintf("tf_test_my_data_exchange%s", context["random_suffix"]), fmt.Sprintf("tf_test_my_listing%s", context["random_suffix"])' + region_override: 'us' + vars: + data_exchange_id: 'my_data_exchange' + listing_id: 'my_listing' + desc: 'example data exchange' + ignore_read_extra: + - 'delete_commercial' +virtual_fields: + - name: 'delete_commercial' + type: Boolean + description: |- + If the listing is commercial then this field must be set to true, otherwise a failure is thrown. This acts as a safety guard to avoid deleting commercial listings accidentally. parameters: properties: - name: 'name' @@ -265,3 +281,25 @@ properties: type: Boolean description: If true, subscriber email logging is enabled and all queries on the linked dataset will log the email address of the querying user. Once enabled, this setting cannot be turned off. + - name: 'commercialInfo' + type: NestedObject + description: | + Commercial info contains the information about the commercial data products associated with the listing. + output: true + properties: + - name: 'cloudMarketplace' + type: NestedObject + description: Details of the Marketplace Data Product associated with the Listing. + output: true + properties: + - name: 'service' + type: String + description: | + Resource name of the commercial service associated with the Marketplace Data Product. e.g. example.com + output: true + - name: 'commercialState' + type: String + description: | + Commercial state of the Marketplace Data Product. + Possible values: COMMERCIAL_STATE_UNSPECIFIED, ONBOARDING, ACTIVE + output: true diff --git a/mmv1/products/bigqueryanalyticshub/ListingSubscription.yaml b/mmv1/products/bigqueryanalyticshub/ListingSubscription.yaml index 3deabba27755..f0d7aa963e6c 100644 --- a/mmv1/products/bigqueryanalyticshub/ListingSubscription.yaml +++ b/mmv1/products/bigqueryanalyticshub/ListingSubscription.yaml @@ -198,3 +198,19 @@ properties: type: Boolean description: 'Output only. By default, false. If true, the Subscriber agreed to the email sharing mandate that is enabled for Listing.' output: true + - name: 'commercialInfo' + type: NestedObject + description: | + Commercial info metadata for this subscription. This is set if this is a commercial subscription i.e. if this subscription was created from subscribing to a commercial listing. + output: true + properties: + - name: 'cloudMarketplace' + type: NestedObject + description: Cloud Marketplace commercial metadata for this subscription. + output: true + properties: + - name: 'order' + type: String + description: | + Resource name of the Marketplace Order. + output: true diff --git a/mmv1/templates/terraform/examples/bigquery_analyticshub_listing_marketplace.tf.tmpl b/mmv1/templates/terraform/examples/bigquery_analyticshub_listing_marketplace.tf.tmpl new file mode 100644 index 000000000000..fe475b47a1db --- /dev/null +++ b/mmv1/templates/terraform/examples/bigquery_analyticshub_listing_marketplace.tf.tmpl @@ -0,0 +1,27 @@ +resource "google_bigquery_analytics_hub_data_exchange" "{{$.PrimaryResourceId}}" { + location = "US" + data_exchange_id = "{{index $.Vars "data_exchange_id"}}" + display_name = "{{index $.Vars "data_exchange_id"}}" + description = "{{index $.Vars "desc"}}" +} + +resource "google_bigquery_analytics_hub_listing" "{{$.PrimaryResourceId}}" { + location = "US" + data_exchange_id = google_bigquery_analytics_hub_data_exchange.{{$.PrimaryResourceId}}.data_exchange_id + listing_id = "{{index $.Vars "listing_id"}}" + display_name = "{{index $.Vars "listing_id"}}" + description = "{{index $.Vars "desc"}}" + delete_commercial = true + + bigquery_dataset { + dataset = google_bigquery_dataset.{{$.PrimaryResourceId}}.id + } + +} + +resource "google_bigquery_dataset" "{{$.PrimaryResourceId}}" { + dataset_id = "{{index $.Vars "listing_id"}}" + friendly_name = "{{index $.Vars "listing_id"}}" + description = "{{index $.Vars "desc"}}" + location = "US" +} \ No newline at end of file diff --git a/mmv1/templates/terraform/pre_delete/bigquery_analytics_hub_listing.go.tmpl b/mmv1/templates/terraform/pre_delete/bigquery_analytics_hub_listing.go.tmpl new file mode 100644 index 000000000000..8da26b393417 --- /dev/null +++ b/mmv1/templates/terraform/pre_delete/bigquery_analytics_hub_listing.go.tmpl @@ -0,0 +1,4 @@ +deleteCommercial := d.Get("delete_commercial") +if deleteCommercial != nil { + url = url + "?deleteCommercial=" + fmt.Sprintf("%v", deleteCommercial) +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/bigqueryanalyticshub/resource_bigquery_analytics_hub_listing_test.go b/mmv1/third_party/terraform/services/bigqueryanalyticshub/resource_bigquery_analytics_hub_listing_test.go index 89eb9d2cffc0..a7345dd04720 100644 --- a/mmv1/third_party/terraform/services/bigqueryanalyticshub/resource_bigquery_analytics_hub_listing_test.go +++ b/mmv1/third_party/terraform/services/bigqueryanalyticshub/resource_bigquery_analytics_hub_listing_test.go @@ -130,3 +130,72 @@ resource "google_bigquery_analytics_hub_listing" "listing_pubsub" { } `, updatedContext) } + +func TestAccBigqueryAnalyticsHubListing_bigqueryAnalyticshubListingMarketplaceUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckBigqueryAnalyticsHubListingDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccBigqueryAnalyticsHubListing_bigqueryAnalyticshubListingMarketplaceExample(context), + }, + { + ResourceName: "google_bigquery_analytics_hub_listing.listing", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"data_exchange_id", "listing_id", "location", "delete_commercial"}, + }, + { + Config: testAccBigqueryAnalyticsHubListing_bigqueryAnalyticshubListingMarketplaceUpdate(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_bigquery_analytics_hub_listing.listing", "delete_commercial", "false"), + ), + }, + { + ResourceName: "google_bigquery_analytics_hub_listing.listing", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"data_exchange_id", "listing_id", "location", "delete_commercial"}, + }, + }, + }) +} + +func testAccBigqueryAnalyticsHubListing_bigqueryAnalyticshubListingMarketplaceUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_bigquery_analytics_hub_data_exchange" "listing" { + location = "US" + data_exchange_id = "tf_test_my_data_exchange%{random_suffix}" + display_name = "tf_test_my_data_exchange%{random_suffix}" + description = "example data exchange%{random_suffix}" +} + +resource "google_bigquery_analytics_hub_listing" "listing" { + location = "US" + data_exchange_id = google_bigquery_analytics_hub_data_exchange.listing.data_exchange_id + listing_id = "tf_test_my_listing%{random_suffix}" + display_name = "tf_test_my_listing%{random_suffix}" + description = "example data exchange%{random_suffix}" + delete_commercial = false + + bigquery_dataset { + dataset = google_bigquery_dataset.listing.id + } + +} + +resource "google_bigquery_dataset" "listing" { + dataset_id = "tf_test_my_listing%{random_suffix}" + friendly_name = "tf_test_my_listing%{random_suffix}" + description = "example data exchange%{random_suffix}" + location = "US" +} +`, context) +} From 54845fb2fba75038679c886c944d97f3edb6b46e Mon Sep 17 00:00:00 2001 From: jialei-chen <147877028+jialei-chen@users.noreply.github.com> Date: Mon, 28 Jul 2025 10:05:32 -0700 Subject: [PATCH 620/884] Add a new resource CmekConfig. (#13419) --- mmv1/products/discoveryengine/CmekConfig.yaml | 139 ++++++++++++++++++ ...discoveryengine_cmekconfig_default.tf.tmpl | 5 + .../discoveryengine_cmekconfig_kmskey.go.tmpl | 4 + ...ource_discovery_engine_cmek_config_test.go | 77 ++++++++++ 4 files changed, 225 insertions(+) create mode 100644 mmv1/products/discoveryengine/CmekConfig.yaml create mode 100644 mmv1/templates/terraform/examples/discoveryengine_cmekconfig_default.tf.tmpl create mode 100644 mmv1/templates/terraform/update_encoder/discoveryengine_cmekconfig_kmskey.go.tmpl create mode 100644 mmv1/third_party/terraform/services/discoveryengine/resource_discovery_engine_cmek_config_test.go diff --git a/mmv1/products/discoveryengine/CmekConfig.yaml b/mmv1/products/discoveryengine/CmekConfig.yaml new file mode 100644 index 000000000000..bac218339859 --- /dev/null +++ b/mmv1/products/discoveryengine/CmekConfig.yaml @@ -0,0 +1,139 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'CmekConfig' +description: | + CmekConfig represents configurations used to enable CMEK data encryption with + Cloud KMS keys. +references: + guides: + api: 'https://cloud.google.com/generative-ai-app-builder/docs/reference/rest/v1/projects.locations.cmekConfigs' +base_url: 'projects/{{project}}/locations/{{location}}/cmekConfigs' +self_link: 'projects/{{project}}/locations/{{location}}/cmekConfigs/{{cmek_config_id}}' +# Update API is also used as create API. +create_url: 'projects/{{project}}/locations/{{location}}/cmekConfigs/{{cmek_config_id}}?&setDefault={{set_default}}' +create_verb: 'PATCH' +update_url: 'projects/{{project}}/locations/{{location}}/cmekConfigs/{{cmek_config_id}}?&setDefault={{set_default}}' +update_verb: 'PATCH' +delete_url: 'projects/{{project}}/locations/{{location}}/cmekConfigs/{{cmek_config_id}}' +import_format: + - 'projects/{{project}}/locations/{{location}}/cmekConfigs/{{cmek_config_id}}' +timeouts: + insert_minutes: 60 + update_minutes: 60 + delete_minutes: 60 +autogen_async: false +async: + actions: ['create', 'delete', 'update'] + type: 'OpAsync' + operation: + base_url: '{{op_id}}' + timeouts: + insert_minutes: 60 + update_minutes: 60 + delete_minutes: 60 + result: + resource_inside_response: true +custom_code: + update_encoder: 'templates/terraform/update_encoder/discoveryengine_cmekconfig_kmskey.go.tmpl' +examples: + - name: 'discoveryengine_cmekconfig_default' + primary_resource_id: 'default' + primary_resource_name: 'fmt.Sprintf("tf_test_cmek_config%s", context["random_suffix"])' + vars: + cmek_config_id: 'cmek-config-id' + kms_key_name: 'kms-key-name' + test_vars_overrides: + kms_key_name: 'acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us", "tftest-shared-key-5").CryptoKey.Name' + ignore_read_extra: + - 'project' +parameters: + - name: 'location' + type: String + description: | + The geographic location where the CMEK config should reside. The value can + only be one of "us" and "eu". + url_param_only: true + required: true + immutable: true + - name: 'cmekConfigId' + type: String + description: | + The unique id of the cmek config. + url_param_only: true + immutable: true + - name: 'setDefault' + type: Boolean + description: | + Set the following CmekConfig as the default to be used for child resources + if one is not specified. The default value is true. + url_param_only: true + default_value: true + immutable: true +properties: + - name: 'name' + type: String + description: | + The unique full resource name of the cmek config. Values are of the format + `projects/{project}/locations/{location}/cmekConfigs/{cmek_config_id}`. + This field must be a UTF-8 encoded string with a length limit of 1024 + characters. + output: true + - name: 'kmsKey' + type: String + description: | + KMS key resource name which will be used to encrypt resources + `projects/{project}/locations/{location}/keyRings/{keyRing}/cryptoKeys/{keyId}`. + required: true + immutable: true + - name: 'kmsKeyVersion' + type: String + description: | + KMS key version resource name which will be used to encrypt resources + `/cryptoKeyVersions/{keyVersion}`. + output: true + - name: 'state' + type: String + description: | + The state of the CmekConfig. + output: true + - name: 'isDefault' + type: Boolean + description: | + The default CmekConfig for the Customer. + output: true + - name: 'lastRotationTimestampMicros' + type: Integer + description: | + The timestamp of the last key rotation. + output: true + - name: 'singleRegionKeys' + type: Array + description: | + Single-regional CMEKs that are required for some VAIS features. + item_type: + type: NestedObject + properties: + - name: 'kmsKey' + type: String + description: | + Single-regional kms key resource name which will be used to encrypt + resources + `projects/{project}/locations/{location}/keyRings/{keyRing}/cryptoKeys/{keyId}`. + required: true + - name: 'notebooklmState' + type: String + description: | + Whether the NotebookLM Corpus is ready to be used. + output: true diff --git a/mmv1/templates/terraform/examples/discoveryengine_cmekconfig_default.tf.tmpl b/mmv1/templates/terraform/examples/discoveryengine_cmekconfig_default.tf.tmpl new file mode 100644 index 000000000000..f5868ffe5670 --- /dev/null +++ b/mmv1/templates/terraform/examples/discoveryengine_cmekconfig_default.tf.tmpl @@ -0,0 +1,5 @@ +resource "google_discovery_engine_cmek_config" "default" { + location = "us" + cmek_config_id = "{{index $.Vars "cmek_config_id"}}" + kms_key = "{{index $.Vars "kms_key_name"}}" +} diff --git a/mmv1/templates/terraform/update_encoder/discoveryengine_cmekconfig_kmskey.go.tmpl b/mmv1/templates/terraform/update_encoder/discoveryengine_cmekconfig_kmskey.go.tmpl new file mode 100644 index 000000000000..d9cbfab2b092 --- /dev/null +++ b/mmv1/templates/terraform/update_encoder/discoveryengine_cmekconfig_kmskey.go.tmpl @@ -0,0 +1,4 @@ +// Always force-send `kms_key` value. This field is immutable and required. +// In update operation, the immutable value of this field is ignored and not found, generating generating error: "Field \"kms_key_name\" is a required field, but no value is found." +obj["kmsKey"] = d.Get("kms_key") +return obj, nil \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/discoveryengine/resource_discovery_engine_cmek_config_test.go b/mmv1/third_party/terraform/services/discoveryengine/resource_discovery_engine_cmek_config_test.go new file mode 100644 index 000000000000..b11b8378e5a5 --- /dev/null +++ b/mmv1/third_party/terraform/services/discoveryengine/resource_discovery_engine_cmek_config_test.go @@ -0,0 +1,77 @@ +package discoveryengine_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccDiscoveryEngineCmekConfig_discoveryengineCmekconfigDefaultExample_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "kms_key_name": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us", "tftest-shared-key-4").CryptoKey.Name, + "single_region_kms_key_name1": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-east1", "tftest-shared-key-us-east1").CryptoKey.Name, + "single_region_kms_key_name2": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tftest-shared-key-us-central1").CryptoKey.Name, + "single_region_kms_key_name3": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-west1", "tftest-shared-key-us-west1").CryptoKey.Name, + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDiscoveryEngineCmekConfig_discoveryengineCmekconfigDefaultExample_basic(context), + }, + { + ResourceName: "google_discovery_engine_cmek_config.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"cmek_config_id", "location", "project", "set_default"}, + }, + { + Config: testAccDiscoveryEngineCmekConfig_discoveryengineCmekconfigDefaultExample_update(context), + }, + { + ResourceName: "google_discovery_engine_cmek_config.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"cmek_config_id", "location", "project", "set_default"}, + }, + }, + }) +} + +func testAccDiscoveryEngineCmekConfig_discoveryengineCmekconfigDefaultExample_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_discovery_engine_cmek_config" "default" { + location = "us" + cmek_config_id = "tf-test-cmek-config-id%{random_suffix}" + kms_key = "%{kms_key_name}" + set_default = false +} +`, context) +} + +func testAccDiscoveryEngineCmekConfig_discoveryengineCmekconfigDefaultExample_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_discovery_engine_cmek_config" "default" { + location = "us" + cmek_config_id = "tf-test-cmek-config-id%{random_suffix}" + kms_key = "%{kms_key_name}" + set_default = false + single_region_keys { + kms_key = "%{single_region_kms_key_name1}" + } + single_region_keys { + kms_key = "%{single_region_kms_key_name2}" + } + single_region_keys { + kms_key = "%{single_region_kms_key_name3}" + } + +} +`, context) +} From 3a5447dd3a19e3856d1e395af6a2754ff046113b Mon Sep 17 00:00:00 2001 From: Mauricio Alvarez Leon <65101411+BBBmau@users.noreply.github.com> Date: Mon, 28 Jul 2025 10:07:26 -0700 Subject: [PATCH 621/884] bump sdk/v2 + mux + plugin-go versions to latest (#14654) --- mmv1/third_party/terraform/go.mod | 24 +++++----- mmv1/third_party/terraform/go.sum | 73 +++++++++++++++---------------- 2 files changed, 48 insertions(+), 49 deletions(-) diff --git a/mmv1/third_party/terraform/go.mod b/mmv1/third_party/terraform/go.mod index a6c484ae6e53..780f605cd7e4 100644 --- a/mmv1/third_party/terraform/go.mod +++ b/mmv1/third_party/terraform/go.mod @@ -15,16 +15,16 @@ require ( github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 github.com/hashicorp/errwrap v1.0.0 github.com/hashicorp/go-cleanhttp v0.5.2 - github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320 + github.com/hashicorp/go-cty v1.5.0 github.com/hashicorp/go-multierror v1.1.1 github.com/hashicorp/go-version v1.7.0 - github.com/hashicorp/terraform-json v0.24.0 - github.com/hashicorp/terraform-plugin-framework v1.13.0 + github.com/hashicorp/terraform-json v0.25.0 + github.com/hashicorp/terraform-plugin-framework v1.15.0 github.com/hashicorp/terraform-plugin-framework-validators v0.9.0 - github.com/hashicorp/terraform-plugin-go v0.26.0 + github.com/hashicorp/terraform-plugin-go v0.28.0 github.com/hashicorp/terraform-plugin-log v0.9.0 - github.com/hashicorp/terraform-plugin-mux v0.17.0 - github.com/hashicorp/terraform-plugin-sdk/v2 v2.36.0 + github.com/hashicorp/terraform-plugin-mux v0.20.0 + github.com/hashicorp/terraform-plugin-sdk/v2 v2.37.0 github.com/hashicorp/terraform-plugin-testing v1.5.1 github.com/mitchellh/go-homedir v1.1.0 github.com/mitchellh/hashstructure v1.1.0 @@ -49,12 +49,12 @@ require ( cloud.google.com/go/iam v1.5.2 // indirect cloud.google.com/go/longrunning v0.6.7 // indirect cloud.google.com/go/monitoring v1.24.2 // indirect - github.com/ProtonMail/go-crypto v1.1.3 // indirect + github.com/ProtonMail/go-crypto v1.1.6 // indirect github.com/agext/levenshtein v1.2.2 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect github.com/cenkalti/backoff v2.2.1+incompatible // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect - github.com/cloudflare/circl v1.3.7 // indirect + github.com/cloudflare/circl v1.6.0 // indirect github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f // indirect github.com/envoyproxy/go-control-plane/envoy v1.32.4 // indirect github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect @@ -73,14 +73,14 @@ require ( github.com/googleapis/gax-go/v2 v2.14.2 // indirect github.com/hashicorp/go-checkpoint v0.5.0 // indirect github.com/hashicorp/go-hclog v1.6.3 // indirect - github.com/hashicorp/go-plugin v1.6.2 // indirect + github.com/hashicorp/go-plugin v1.6.3 // indirect github.com/hashicorp/go-retryablehttp v0.7.7 // indirect github.com/hashicorp/go-uuid v1.0.3 // indirect - github.com/hashicorp/hc-install v0.9.1 // indirect + github.com/hashicorp/hc-install v0.9.2 // indirect github.com/hashicorp/hcl/v2 v2.23.0 // indirect github.com/hashicorp/logutils v1.0.0 // indirect - github.com/hashicorp/terraform-exec v0.22.0 // indirect - github.com/hashicorp/terraform-registry-address v0.2.4 // indirect + github.com/hashicorp/terraform-exec v0.23.0 // indirect + github.com/hashicorp/terraform-registry-address v0.2.5 // indirect github.com/hashicorp/terraform-svchost v0.1.1 // indirect github.com/hashicorp/yamux v0.1.1 // indirect github.com/kylelemons/godebug v1.1.0 // indirect diff --git a/mmv1/third_party/terraform/go.sum b/mmv1/third_party/terraform/go.sum index 26a11f59899d..90f85eadf8b5 100644 --- a/mmv1/third_party/terraform/go.sum +++ b/mmv1/third_party/terraform/go.sum @@ -26,8 +26,8 @@ github.com/GoogleCloudPlatform/declarative-resource-client-library v1.79.0 h1:va github.com/GoogleCloudPlatform/declarative-resource-client-library v1.79.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= -github.com/ProtonMail/go-crypto v1.1.3 h1:nRBOetoydLeUb4nHajyO2bKqMLfWQ/ZPwkXqXxPxCFk= -github.com/ProtonMail/go-crypto v1.1.3/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= +github.com/ProtonMail/go-crypto v1.1.6 h1:ZcV+Ropw6Qn0AX9brlQLAUXfqLBc7Bl+f/DmNxpLfdw= +github.com/ProtonMail/go-crypto v1.1.6/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= github.com/agext/levenshtein v1.2.2 h1:0S/Yg6LYmFJ5stwQeRp6EeOcCbj7xiqQSdNelsXvaqE= github.com/agext/levenshtein v1.2.2/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= github.com/apparentlymart/go-cidr v1.1.0 h1:2mAhrMoF+nhXqxTzSZMUzDHkLjmIHC+Zzn4tdgBZjnU= @@ -43,14 +43,14 @@ github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= -github.com/cloudflare/circl v1.3.7 h1:qlCDlTPz2n9fu58M0Nh1J/JzcFpfgkFHHX3O35r5vcU= -github.com/cloudflare/circl v1.3.7/go.mod h1:sRTcRWXGLrKw6yIGJ+l7amYJFfAXbZG0kBSc8r4zxgA= +github.com/cloudflare/circl v1.6.0 h1:cr5JKic4HI+LkINy2lg3W2jF8sHCVTBncJr5gIIq7qk= +github.com/cloudflare/circl v1.6.0/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f h1:C5bqEmzEPLsHm9Mv73lSE9e9bKV23aB1vxOsmZrkl3k= github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= github.com/creachadair/staticfile v0.1.2/go.mod h1:a3qySzCIXEprDGxk6tSxSI+dBBdLzqeBOMhZ+o2d3pM= -github.com/cyphar/filepath-securejoin v0.2.5 h1:6iR5tXJ/e6tJZzzdMc1km3Sa7RRIVBKAK32O2s7AYfo= -github.com/cyphar/filepath-securejoin v0.2.5/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= +github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s= +github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -81,10 +81,10 @@ github.com/gammazero/workerpool v0.0.0-20181230203049-86a96b5d5d92 h1:EipXK6U05I github.com/gammazero/workerpool v0.0.0-20181230203049-86a96b5d5d92/go.mod h1:w9RqFVO2BM3xwWEcAB8Fwp0OviTBBEiRmSBDfbXnd3w= github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI= github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic= -github.com/go-git/go-billy/v5 v5.6.0 h1:w2hPNtoehvJIxR00Vb4xX94qHQi/ApZfX+nBE2Cjio8= -github.com/go-git/go-billy/v5 v5.6.0/go.mod h1:sFDq7xD3fn3E0GOwUSZqHo9lrkmx8xJhA0ZrfvjBRGM= -github.com/go-git/go-git/v5 v5.13.0 h1:vLn5wlGIh/X78El6r3Jr+30W16Blk0CTcxTYcYPWi5E= -github.com/go-git/go-git/v5 v5.13.0/go.mod h1:Wjo7/JyVKtQgUNdXYXIepzWfJQkUEIGvkvVkiXRR/zw= +github.com/go-git/go-billy/v5 v5.6.2 h1:6Q86EsPXMa7c3YZ3aLAQsMA0VlWmy43r6FHqa/UNbRM= +github.com/go-git/go-billy/v5 v5.6.2/go.mod h1:rcFC2rAsp/erv7CMz9GczHcuD0D32fWzH+MJAU+jaUU= +github.com/go-git/go-git/v5 v5.14.0 h1:/MD3lCrGjCen5WfEAzKg00MJJffKhC8gzS80ycmCi60= +github.com/go-git/go-git/v5 v5.14.0/go.mod h1:Z5Xhoia5PcWA3NF8vRLURn9E5FRhSl7dGj9ItW3Wk5k= github.com/go-jose/go-jose/v4 v4.0.5 h1:M6T8+mKZl/+fNNuFHvGIzDz7BTLQPIounk/b9dw3AaE= github.com/go-jose/go-jose/v4 v4.0.5/go.mod h1:s3P1lRrkT8igV8D9OjyL4WRyHvjB6a4JSllnOrmmBOA= github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= @@ -102,8 +102,8 @@ github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69 github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/glog v1.2.4 h1:CNNw5U8lSiiBk7druxtSHHTsRWcxKoac6kZKm2peBBc= github.com/golang/glog v1.2.4/go.mod h1:6AhwSGph0fcJtXVM/PEHPqZlFeoLxhs7/t5UDAwmO+w= -github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= -github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ= +github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= @@ -139,14 +139,14 @@ github.com/hashicorp/go-checkpoint v0.5.0/go.mod h1:7nfLNL10NsxqO4iWuW6tWW0HjZuD github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= -github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320 h1:1/D3zfFHttUKaCaGKZ/dR2roBXv0vKbSCnssIldfQdI= -github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320/go.mod h1:EiZBMaudVLy8fmjf9Npq1dq9RalhveqZG5w/yz3mHWs= +github.com/hashicorp/go-cty v1.5.0 h1:EkQ/v+dDNUqnuVpmS5fPqyY71NXVgT5gf32+57xY8g0= +github.com/hashicorp/go-cty v1.5.0/go.mod h1:lFUCG5kd8exDobgSfyj4ONE/dc822kiYMguVKdHGMLM= github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k= github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= -github.com/hashicorp/go-plugin v1.6.2 h1:zdGAEd0V1lCaU0u+MxWQhtSDQmahpkwOun8U8EiRVog= -github.com/hashicorp/go-plugin v1.6.2/go.mod h1:CkgLQ5CZqNmdL9U9JzM532t8ZiYQ35+pj3b1FD37R0Q= +github.com/hashicorp/go-plugin v1.6.3 h1:xgHB+ZUSYeuJi96WtxEjzi23uh7YQpznjGh0U0UUrwg= +github.com/hashicorp/go-plugin v1.6.3/go.mod h1:MRobyh+Wc/nYy1V4KAXUiYfzxoYhs7V1mlH1Z7iY2h0= github.com/hashicorp/go-retryablehttp v0.7.7 h1:C8hUCYzor8PIfXHa4UrZkU4VvK8o9ISHxT2Q8+VepXU= github.com/hashicorp/go-retryablehttp v0.7.7/go.mod h1:pkQpWZeYWskR+D1tR2O5OcBFOxfA7DoAO6xtkuQnHTk= github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= @@ -154,32 +154,32 @@ github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/C github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-version v1.7.0 h1:5tqGy27NaOTB8yJKUZELlFAS/LTKJkrmONwQKeRZfjY= github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= -github.com/hashicorp/hc-install v0.9.1 h1:gkqTfE3vVbafGQo6VZXcy2v5yoz2bE0+nhZXruCuODQ= -github.com/hashicorp/hc-install v0.9.1/go.mod h1:pWWvN/IrfeBK4XPeXXYkL6EjMufHkCK5DvwxeLKuBf0= +github.com/hashicorp/hc-install v0.9.2 h1:v80EtNX4fCVHqzL9Lg/2xkp62bbvQMnvPQ0G+OmtO24= +github.com/hashicorp/hc-install v0.9.2/go.mod h1:XUqBQNnuT4RsxoxiM9ZaUk0NX8hi2h+Lb6/c0OZnC/I= github.com/hashicorp/hcl/v2 v2.23.0 h1:Fphj1/gCylPxHutVSEOf2fBOh1VE4AuLV7+kbJf3qos= github.com/hashicorp/hcl/v2 v2.23.0/go.mod h1:62ZYHrXgPoX8xBnzl8QzbWq4dyDsDtfCRgIq1rbJEvA= github.com/hashicorp/logutils v1.0.0 h1:dLEQVugN8vlakKOUE3ihGLTZJRB4j+M2cdTm/ORI65Y= github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= -github.com/hashicorp/terraform-exec v0.22.0 h1:G5+4Sz6jYZfRYUCg6eQgDsqTzkNXV+fP8l+uRmZHj64= -github.com/hashicorp/terraform-exec v0.22.0/go.mod h1:bjVbsncaeh8jVdhttWYZuBGj21FcYw6Ia/XfHcNO7lQ= -github.com/hashicorp/terraform-json v0.24.0 h1:rUiyF+x1kYawXeRth6fKFm/MdfBS6+lW4NbeATsYz8Q= -github.com/hashicorp/terraform-json v0.24.0/go.mod h1:Nfj5ubo9xbu9uiAoZVBsNOjvNKB66Oyrvtit74kC7ow= -github.com/hashicorp/terraform-plugin-framework v1.13.0 h1:8OTG4+oZUfKgnfTdPTJwZ532Bh2BobF4H+yBiYJ/scw= -github.com/hashicorp/terraform-plugin-framework v1.13.0/go.mod h1:j64rwMGpgM3NYXTKuxrCnyubQb/4VKldEKlcG8cvmjU= +github.com/hashicorp/terraform-exec v0.23.0 h1:MUiBM1s0CNlRFsCLJuM5wXZrzA3MnPYEsiXmzATMW/I= +github.com/hashicorp/terraform-exec v0.23.0/go.mod h1:mA+qnx1R8eePycfwKkCRk3Wy65mwInvlpAeOwmA7vlY= +github.com/hashicorp/terraform-json v0.25.0 h1:rmNqc/CIfcWawGiwXmRuiXJKEiJu1ntGoxseG1hLhoQ= +github.com/hashicorp/terraform-json v0.25.0/go.mod h1:sMKS8fiRDX4rVlR6EJUMudg1WcanxCMoWwTLkgZP/vc= +github.com/hashicorp/terraform-plugin-framework v1.15.0 h1:LQ2rsOfmDLxcn5EeIwdXFtr03FVsNktbbBci8cOKdb4= +github.com/hashicorp/terraform-plugin-framework v1.15.0/go.mod h1:hxrNI/GY32KPISpWqlCoTLM9JZsGH3CyYlir09bD/fI= github.com/hashicorp/terraform-plugin-framework-validators v0.9.0 h1:LYz4bXh3t7bTEydXOmPDPupRRnA480B/9+jV8yZvxBA= github.com/hashicorp/terraform-plugin-framework-validators v0.9.0/go.mod h1:+BVERsnfdlhYR2YkXMBtPnmn9UsL19U3qUtSZ+Y/5MY= -github.com/hashicorp/terraform-plugin-go v0.26.0 h1:cuIzCv4qwigug3OS7iKhpGAbZTiypAfFQmw8aE65O2M= -github.com/hashicorp/terraform-plugin-go v0.26.0/go.mod h1:+CXjuLDiFgqR+GcrM5a2E2Kal5t5q2jb0E3D57tTdNY= +github.com/hashicorp/terraform-plugin-go v0.28.0 h1:zJmu2UDwhVN0J+J20RE5huiF3XXlTYVIleaevHZgKPA= +github.com/hashicorp/terraform-plugin-go v0.28.0/go.mod h1:FDa2Bb3uumkTGSkTFpWSOwWJDwA7bf3vdP3ltLDTH6o= github.com/hashicorp/terraform-plugin-log v0.9.0 h1:i7hOA+vdAItN1/7UrfBqBwvYPQ9TFvymaRGZED3FCV0= github.com/hashicorp/terraform-plugin-log v0.9.0/go.mod h1:rKL8egZQ/eXSyDqzLUuwUYLVdlYeamldAHSxjUFADow= -github.com/hashicorp/terraform-plugin-mux v0.17.0 h1:/J3vv3Ps2ISkbLPiZOLspFcIZ0v5ycUXCEQScudGCCw= -github.com/hashicorp/terraform-plugin-mux v0.17.0/go.mod h1:yWuM9U1Jg8DryNfvCp+lH70WcYv6D8aooQxxxIzFDsE= -github.com/hashicorp/terraform-plugin-sdk/v2 v2.36.0 h1:7/iejAPyCRBhqAg3jOx+4UcAhY0A+Sg8B+0+d/GxSfM= -github.com/hashicorp/terraform-plugin-sdk/v2 v2.36.0/go.mod h1:TiQwXAjFrgBf5tg5rvBRz8/ubPULpU0HjSaVi5UoJf8= +github.com/hashicorp/terraform-plugin-mux v0.20.0 h1:3QpBnI9uCuL0Yy2Rq/kR9cOdmOFNhw88A2GoZtk5aXM= +github.com/hashicorp/terraform-plugin-mux v0.20.0/go.mod h1:wSIZwJjSYk86NOTX3fKUlThMT4EAV1XpBHz9SAvjQr4= +github.com/hashicorp/terraform-plugin-sdk/v2 v2.37.0 h1:NFPMacTrY/IdcIcnUB+7hsore1ZaRWU9cnB6jFoBnIM= +github.com/hashicorp/terraform-plugin-sdk/v2 v2.37.0/go.mod h1:QYmYnLfsosrxjCnGY1p9c7Zj6n9thnEE+7RObeYs3fA= github.com/hashicorp/terraform-plugin-testing v1.5.1 h1:T4aQh9JAhmWo4+t1A7x+rnxAJHCDIYW9kXyo4sVO92c= github.com/hashicorp/terraform-plugin-testing v1.5.1/go.mod h1:dg8clO6K59rZ8w9EshBmDp1CxTIPu3yA4iaDpX1h5u0= -github.com/hashicorp/terraform-registry-address v0.2.4 h1:JXu/zHB2Ymg/TGVCRu10XqNa4Sh2bWcqCNyKWjnCPJA= -github.com/hashicorp/terraform-registry-address v0.2.4/go.mod h1:tUNYTVyCtU4OIGXXMDp7WNcJ+0W1B4nmstVDgHMjfAU= +github.com/hashicorp/terraform-registry-address v0.2.5 h1:2GTftHqmUhVOeuu9CW3kwDkRe4pcBDq0uuK5VJngU1M= +github.com/hashicorp/terraform-registry-address v0.2.5/go.mod h1:PpzXWINwB5kuVS5CA7m1+eO2f1jKb5ZDIxrOPfpnGkg= github.com/hashicorp/terraform-svchost v0.1.1 h1:EZZimZ1GxdqFRinZ1tpJwVxxt49xc/S52uzrw4x0jKQ= github.com/hashicorp/terraform-svchost v0.1.1/go.mod h1:mNsjQfZyf/Jhz35v6/0LWcv26+X7JPS+buii2c9/ctc= github.com/hashicorp/yamux v0.1.1 h1:yrQxtgseBDrq9Y652vSRDvsKCJKOUD+GzTS4Y0Y8pvE= @@ -229,8 +229,8 @@ github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx github.com/oklog/run v1.0.0 h1:Ru7dDtJNOyC66gQ5dQmaCa0qIsAUFY3sFpK1Xk8igrw= github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= -github.com/pjbgf/sha1cd v0.3.0 h1:4D5XXmUUBUl/xQ6IjCkEAbqXskkq/4O7LmGn0AqMDs4= -github.com/pjbgf/sha1cd v0.3.0/go.mod h1:nZ1rrWOcGJ5uZgEEVL1VUM9iRQiZvWdbZjkKyFzPPsI= +github.com/pjbgf/sha1cd v0.3.2 h1:a9wb0bp1oC2TGwStyn0Umc/IGKQnEgF0vVaZ8QF8eo4= +github.com/pjbgf/sha1cd v0.3.2/go.mod h1:zQWigSxVmsHEZow5qaLtPYxpcKMMQpa09ixqBxuCS6A= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 h1:GFCKgmp0tecUJ0sJuv4pzYCqS9+RGSn52M3FUwPs+uo= github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1:t/avpk3KcrXxUnYOhZhMXJlSEyie6gQbtLq5NM3loB8= @@ -244,8 +244,8 @@ github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE= github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= -github.com/skeema/knownhosts v1.3.0 h1:AM+y0rI04VksttfwjkSTNQorvGqmwATnvnAHpSgc0LY= -github.com/skeema/knownhosts v1.3.0/go.mod h1:sPINvnADmT/qYH1kfv+ePMmOBTH6Tbl7b5LvTDjFK7M= +github.com/skeema/knownhosts v1.3.1 h1:X2osQ+RAjK76shCbvhHHHVl3ZlgDm8apHEHFqRjnBY8= +github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQNxPwTcfiY= github.com/spiffe/go-spiffe/v2 v2.5.0 h1:N2I01KCUkv1FAjZXJMwh95KK1ZIQLYbPfhaxw8WS0hE= github.com/spiffe/go-spiffe/v2 v2.5.0/go.mod h1:P+NxobPc6wXhVtINNtFjNWGBTreew1GBUCwT2wPmb7g= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= @@ -358,7 +358,6 @@ golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuX golang.org/x/term v0.32.0 h1:DR4lr0TjUs3epypdhTOkMmuF5CDFJ/8pOnbzMZPQ7bg= golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= From 808c26035370f8b005d09b5078e8093596d122b1 Mon Sep 17 00:00:00 2001 From: Jesse DeJong Date: Mon, 28 Jul 2025 15:05:45 -0400 Subject: [PATCH 622/884] Update Managed Kafka Cluster resource to support mTLS (#14413) --- mmv1/products/managedkafka/Cluster.yaml | 36 +++++++++++ .../managedkafka_cluster_mtls.tf.tmpl | 36 +++++++++++ .../resource_managed_kafka_cluster_test.go | 60 +++++++++++++++++++ 3 files changed, 132 insertions(+) create mode 100644 mmv1/templates/terraform/examples/managedkafka_cluster_mtls.tf.tmpl diff --git a/mmv1/products/managedkafka/Cluster.yaml b/mmv1/products/managedkafka/Cluster.yaml index a6b23a066ccd..1cede7806e0e 100644 --- a/mmv1/products/managedkafka/Cluster.yaml +++ b/mmv1/products/managedkafka/Cluster.yaml @@ -44,6 +44,11 @@ examples: cluster_id: 'my-cluster' key_name: 'example-key' keyring_name: 'example-keyring' + - name: 'managedkafka_cluster_mtls' + primary_resource_id: 'example' + vars: + cluster_id: 'my-cluster' + ca_pool_id: 'my-ca-pool' - name: 'managedkafka_cluster_cmek' primary_resource_id: 'example' min_version: 'beta' @@ -151,3 +156,34 @@ properties: type: String description: "The current state of the cluster. Possible values: `STATE_UNSPECIFIED`, `CREATING`, `ACTIVE`, `DELETING`." output: true + - name: 'tlsConfig' + type: NestedObject + default_from_api: true + description: "TLS configuration for the Kafka cluster. This is used to configure mTLS authentication. To clear our a TLS configuration that has been previously set, please explicitly add an empty `tls_config` block." + properties: + - name: 'trustConfig' + type: NestedObject + allow_empty_object: true + description: "The configuration of the broker truststore. If specified, clients can use mTLS for authentication." + properties: + - name: 'casConfigs' + type: Array + description: "Configuration for the Google Certificate Authority Service. To support mTLS, you must specify at least one `cas_configs` block. A maximum of 10 CA pools can be specified. Additional CA pools may be specified with additional `cas_configs` blocks." + item_type: + type: NestedObject + properties: + - name: 'caPool' + type: String + description: "The name of the CA pool to pull CA certificates from. The CA pool does not need + to be in the same project or location as the Kafka cluster. Must be in the format `projects/PROJECT_ID/locations/LOCATION/caPools/CA_POOL_ID." + required: true + diff_suppress_func: 'tpgresource.ProjectNumberDiffSuppress' + - name: 'sslPrincipalMappingRules' + type: String + description: "The rules for mapping mTLS certificate Distinguished Names (DNs) to + shortened principal names for Kafka ACLs. This field corresponds exactly + to the ssl.principal.mapping.rules broker config and matches the format + and syntax defined in the Apache Kafka documentation. Setting or + modifying this field will trigger a rolling restart of the Kafka + brokers to apply the change. An empty string means that the default + Kafka behavior is used. Example: `RULE:^CN=(.?),OU=ServiceUsers.$/$1@example.com/,DEFAULT`" diff --git a/mmv1/templates/terraform/examples/managedkafka_cluster_mtls.tf.tmpl b/mmv1/templates/terraform/examples/managedkafka_cluster_mtls.tf.tmpl new file mode 100644 index 000000000000..b40d254da964 --- /dev/null +++ b/mmv1/templates/terraform/examples/managedkafka_cluster_mtls.tf.tmpl @@ -0,0 +1,36 @@ +resource "google_managed_kafka_cluster" "{{$.PrimaryResourceId}}" { + cluster_id = "{{index $.Vars "cluster_id"}}" + location = "us-central1" + capacity_config { + vcpu_count = 3 + memory_bytes = 3221225472 + } + gcp_config { + access_config { + network_configs { + subnet = "projects/${data.google_project.project.number}/regions/us-central1/subnetworks/default" + } + } + } + tls_config { + trust_config { + cas_configs { + ca_pool = google_privateca_ca_pool.ca_pool.id + } + } + ssl_principal_mapping_rules = "RULE:pattern/replacement/L,DEFAULT" + } +} + +resource "google_privateca_ca_pool" "ca_pool" { + name = "{{index $.Vars "ca_pool_id"}}" + location = "us-central1" + tier = "ENTERPRISE" + publishing_options { + publish_ca_cert = true + publish_crl = true + } +} + +data "google_project" "project" { +} diff --git a/mmv1/third_party/terraform/services/managedkafka/resource_managed_kafka_cluster_test.go b/mmv1/third_party/terraform/services/managedkafka/resource_managed_kafka_cluster_test.go index 472a90c4bad8..29e641cebda1 100644 --- a/mmv1/third_party/terraform/services/managedkafka/resource_managed_kafka_cluster_test.go +++ b/mmv1/third_party/terraform/services/managedkafka/resource_managed_kafka_cluster_test.go @@ -37,6 +37,15 @@ func TestAccManagedKafkaCluster_update(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"cluster_id", "labels", "location", "terraform_labels"}, }, + { + Config: testAccManagedKafkaCluster_updateTlsConfigToEmpty(context), + }, + { + ResourceName: "google_managed_kafka_cluster.example", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"cluster_id", "labels", "location", "terraform_labels"}, + }, }, }) } @@ -89,6 +98,57 @@ resource "google_managed_kafka_cluster" "example" { rebalance_config { mode = "AUTO_REBALANCE_ON_SCALE_UP" } + tls_config { + trust_config { + cas_configs { + ca_pool = google_privateca_ca_pool.ca_pool.id + } + } + ssl_principal_mapping_rules = "RULE:pattern/replacement/L,DEFAULT" + } + labels = { + key = "new-value" + } +} + +resource "google_privateca_ca_pool" "ca_pool" { + name = "tf-test-pool-%{random_suffix}" + location = "us-central1" + tier = "ENTERPRISE" + publishing_options { + publish_ca_cert = true + publish_crl = true + } +} + +data "google_project" "project" { +} +`, context) +} + +func testAccManagedKafkaCluster_updateTlsConfigToEmpty(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_managed_kafka_cluster" "example" { + cluster_id = "tf-test-my-cluster%{random_suffix}" + location = "us-central1" + capacity_config { + vcpu_count = 4 + memory_bytes = 4512135122 + } + gcp_config { + access_config { + network_configs { + subnet = "projects/${data.google_project.project.number}/regions/us-central1/subnetworks/default" + } + } + } + rebalance_config { + mode = "AUTO_REBALANCE_ON_SCALE_UP" + } + tls_config { + trust_config { + } + } labels = { key = "new-value" } From e51ccd68dd929fc88d87126a33ed7d2162258754 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Mon, 28 Jul 2025 13:06:49 -0700 Subject: [PATCH 623/884] Revert "Add support for GPU redundancy to Cloud Run v2 job" (#14669) --- mmv1/products/cloudrunv2/Job.yaml | 5 ----- mmv1/templates/terraform/examples/cloudrunv2_job_gpu.tf.tmpl | 1 - .../cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl | 1 - .../third_party/tgc/tests/data/example_cloud_run_v2_job.json | 3 +-- 4 files changed, 1 insertion(+), 9 deletions(-) diff --git a/mmv1/products/cloudrunv2/Job.yaml b/mmv1/products/cloudrunv2/Job.yaml index a299faa44161..57ae1e01c290 100644 --- a/mmv1/products/cloudrunv2/Job.yaml +++ b/mmv1/products/cloudrunv2/Job.yaml @@ -781,11 +781,6 @@ properties: description: The GPU to attach to an instance. See https://cloud.google.com/run/docs/configuring/jobs/gpu for configuring GPU. required: true - - name: 'gpuZonalRedundancyDisabled' - type: Boolean - description: True if GPU zonal redundancy is disabled on this execution. - default_from_api: true - send_empty_value: true - name: 'observedGeneration' type: String description: | diff --git a/mmv1/templates/terraform/examples/cloudrunv2_job_gpu.tf.tmpl b/mmv1/templates/terraform/examples/cloudrunv2_job_gpu.tf.tmpl index a5ac9f3cac9d..5581f4bece44 100644 --- a/mmv1/templates/terraform/examples/cloudrunv2_job_gpu.tf.tmpl +++ b/mmv1/templates/terraform/examples/cloudrunv2_job_gpu.tf.tmpl @@ -11,7 +11,6 @@ resource "google_cloud_run_v2_job" "{{$.PrimaryResourceId}}" { node_selector { accelerator = "nvidia-l4" } - gpu_zonal_redundancy_disabled = true } } } diff --git a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl index d58b6d7fede3..083f4f82c6bf 100644 --- a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl @@ -1059,7 +1059,6 @@ func testAccCloudRunV2Job_cloudrunv2JobWithGpu(context map[string]interface{}) s node_selector { accelerator = "nvidia-l4" } - gpu_zonal_redundancy_disabled = true } } lifecycle { diff --git a/mmv1/third_party/tgc/tests/data/example_cloud_run_v2_job.json b/mmv1/third_party/tgc/tests/data/example_cloud_run_v2_job.json index a0dc9ac77308..e49031623723 100644 --- a/mmv1/third_party/tgc/tests/data/example_cloud_run_v2_job.json +++ b/mmv1/third_party/tgc/tests/data/example_cloud_run_v2_job.json @@ -18,8 +18,7 @@ "image":"us-docker.pkg.dev/cloudrun/container/hello" } ], - "maxRetries":3, - "gpuZonalRedundancyDisabled":false + "maxRetries":3 } } } From 83c5d0350c5a6ccc292f8e4671b7f2c51f18a52e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B6rn?= <81525627+bestefreund@users.noreply.github.com> Date: Mon, 28 Jul 2025 22:07:54 +0200 Subject: [PATCH 624/884] Add data source for retrieving Artifact Registry Docker images (#14657) --- .../provider/provider_mmv1_resources.go.tmpl | 3 +- ..._source_artifact_registry_docker_images.go | 191 ++++++++++++++++++ ...ce_artifact_registry_docker_images_test.go | 42 ++++ ...ifact_registry_docker_images.html.markdown | 56 +++++ 4 files changed, 291 insertions(+), 1 deletion(-) create mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_docker_images.go create mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_docker_images_test.go create mode 100644 mmv1/third_party/terraform/website/docs/d/artifact_registry_docker_images.html.markdown diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index 07830118fe48..7a9e57dad24b 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -27,7 +27,8 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_alloydb_supported_database_flags": alloydb.DataSourceAlloydbSupportedDatabaseFlags(), "google_alloydb_instance": alloydb.DataSourceAlloydbDatabaseInstance(), "google_artifact_registry_docker_image": artifactregistry.DataSourceArtifactRegistryDockerImage(), - "google_artifact_registry_locations": artifactregistry.DataSourceGoogleArtifactRegistryLocations(), + "google_artifact_registry_docker_images": artifactregistry.DataSourceArtifactRegistryDockerImages(), + "google_artifact_registry_locations": artifactregistry.DataSourceGoogleArtifactRegistryLocations(), "google_artifact_registry_repository": artifactregistry.DataSourceArtifactRegistryRepository(), "google_apphub_discovered_workload": apphub.DataSourceApphubDiscoveredWorkload(), "google_app_engine_default_service_account": appengine.DataSourceGoogleAppEngineDefaultServiceAccount(), diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_docker_images.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_docker_images.go new file mode 100644 index 000000000000..b6c8f3dbba7c --- /dev/null +++ b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_docker_images.go @@ -0,0 +1,191 @@ +package artifactregistry + +import ( + "fmt" + "net/http" + "net/url" + "strings" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func DataSourceArtifactRegistryDockerImages() *schema.Resource { + return &schema.Resource{ + Read: dataSourceArtifactRegistryDockerImagesRead, + Schema: map[string]*schema.Schema{ + "location": { + Type: schema.TypeString, + Required: true, + }, + "repository_id": { + Type: schema.TypeString, + Required: true, + }, + "project": { + Type: schema.TypeString, + Optional: true, + }, + "docker_images": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "image_name": { + Type: schema.TypeString, + Computed: true, + }, + "name": { + Type: schema.TypeString, + Computed: true, + }, + "self_link": { + Type: schema.TypeString, + Computed: true, + }, + "tags": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + "image_size_bytes": { + Type: schema.TypeString, + Computed: true, + }, + "media_type": { + Type: schema.TypeString, + Computed: true, + }, + "upload_time": { + Type: schema.TypeString, + Computed: true, + }, + "build_time": { + Type: schema.TypeString, + Computed: true, + }, + "update_time": { + Type: schema.TypeString, + Computed: true, + }, + }, + }, + }, + }, + } +} + +func dataSourceArtifactRegistryDockerImagesRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + + basePath, err := tpgresource.ReplaceVars(d, config, "{{ArtifactRegistryBasePath}}") + if err != nil { + return fmt.Errorf("Error setting Artifact Registry base path: %s", err) + } + + resourcePath, err := tpgresource.ReplaceVars(d, config, fmt.Sprintf("projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/dockerImages")) + if err != nil { + return fmt.Errorf("Error setting resource path: %s", err) + } + + urlRequest := basePath + resourcePath + + headers := make(http.Header) + dockerImages := make([]map[string]interface{}, 0) + pageToken := "" + + for { + u, err := url.Parse(urlRequest) + if err != nil { + return fmt.Errorf("Error parsing URL: %s", err) + } + + q := u.Query() + if pageToken != "" { + q.Set("pageToken", pageToken) + } + u.RawQuery = q.Encode() + + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + RawURL: u.String(), + UserAgent: userAgent, + Headers: headers, + }) + + if err != nil { + return fmt.Errorf("Error listing Artifact Registry Docker images: %s", err) + } + + if items, ok := res["dockerImages"].([]interface{}); ok { + for _, item := range items { + image := item.(map[string]interface{}) + + name, ok := image["name"].(string) + if !ok { + return fmt.Errorf("Error getting Artifact Registry Docker image name: %s", err) + } + + lastComponent := name[strings.LastIndex(name, "/")+1:] + imageName := strings.SplitN(strings.Split(lastComponent, "@")[0], ":", 2)[0] + + var tags []string + if rawTags, ok := image["tags"].([]interface{}); ok { + for _, tag := range rawTags { + if tagStr, ok := tag.(string); ok { + tags = append(tags, tagStr) + } + } + } + + getString := func(m map[string]interface{}, key string) string { + if v, ok := m[key].(string); ok { + return v + } + return "" + } + + dockerImages = append(dockerImages, map[string]interface{}{ + "image_name": imageName, + "name": name, + "self_link": getString(image, "uri"), + "tags": tags, + "image_size_bytes": getString(image, "imageSizeBytes"), + "media_type": getString(image, "mediaType"), + "upload_time": getString(image, "uploadTime"), + "build_time": getString(image, "buildTime"), + "update_time": getString(image, "updateTime"), + }) + } + } + + if nextToken, ok := res["nextPageToken"].(string); ok && nextToken != "" { + pageToken = nextToken + } else { + break + } + } + + if err := d.Set("project", project); err != nil { + return fmt.Errorf("Error setting project: %s", err) + } + + if err := d.Set("docker_images", dockerImages); err != nil { + return fmt.Errorf("Error setting Artifact Registry Docker images: %s", err) + } + + d.SetId(resourcePath) + + return nil +} diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_docker_images_test.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_docker_images_test.go new file mode 100644 index 000000000000..1a110653faa5 --- /dev/null +++ b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_docker_images_test.go @@ -0,0 +1,42 @@ +package artifactregistry_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccDataSourceArtifactRegistryDockerImages_basic(t *testing.T) { + t.Parallel() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceArtifactRegistryDockerImagesConfig, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("data.google_artifact_registry_docker_images.this", "project"), + resource.TestCheckResourceAttrSet("data.google_artifact_registry_docker_images.this", "location"), + resource.TestCheckResourceAttrSet("data.google_artifact_registry_docker_images.this", "repository_id"), + resource.TestCheckResourceAttrSet("data.google_artifact_registry_docker_images.this", "docker_images.0.image_name"), + resource.TestCheckResourceAttrSet("data.google_artifact_registry_docker_images.this", "docker_images.0.name"), + resource.TestCheckResourceAttrSet("data.google_artifact_registry_docker_images.this", "docker_images.0.self_link"), + ), + }, + }, + }) +} + +// Test the data source against the public AR repos +// https://console.cloud.google.com/artifacts/docker/cloudrun/us/container +// https://console.cloud.google.com/artifacts/docker/go-containerregistry/us/gcr.io +// Currently, gcr.io does not provide a imageSizeBytes or buildTime field in the JSON response +const testAccDataSourceArtifactRegistryDockerImagesConfig = ` +data "google_artifact_registry_docker_images" "this" { + project = "go-containerregistry" + location = "us" + repository_id = "gcr.io" +} +` diff --git a/mmv1/third_party/terraform/website/docs/d/artifact_registry_docker_images.html.markdown b/mmv1/third_party/terraform/website/docs/d/artifact_registry_docker_images.html.markdown new file mode 100644 index 000000000000..df1bd121b74e --- /dev/null +++ b/mmv1/third_party/terraform/website/docs/d/artifact_registry_docker_images.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "Artifact Registry" +description: |- + Get information about Docker images within a Google Artifact Registry repository. +--- + +# google_artifact_registry_docker_images + +Get information about Artifact Registry Docker images. +See [the official documentation](https://cloud.google.com/artifact-registry/docs/docker) +and [API](https://cloud.google.com/artifact-registry/docs/reference/rest/v1/projects.locations.repositories.dockerImages/list). + +## Example Usage + +```hcl +data "google_artifact_registry_docker_images" "my_images" { + location = "us-central1" + repository_id = "example-repo" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `location` - (Required) The location of the Artifact Registry repository. + +* `repository_id` - (Required) The last part of the repository name to fetch from. + +* `project` - (Optional) The project ID in which the resource belongs. If it is not provided, the provider project is used. + +## Attributes Reference + +The following attributes are exported: + +* `docker_images` - A list of all retrieved Artifact Registry Docker images. Structure is [defined below](#nested_docker_images). + +The `docker_images` block supports: + +* `name` - The fully qualified name of the fetched image. This name has the form: `projects/{{project}}/locations/{{location}}/repository/{{repository_id}}/dockerImages/{{docker_image}}`. For example, `projects/test-project/locations/us-west4/repositories/test-repo/dockerImages/nginx@sha256:e9954c1fc875017be1c3e36eca16be2d9e9bccc4bf072163515467d6a823c7cf` + +* `image_name` - Extracted short name of the image (last part of `name`, without tag or digest). For example, from `.../nginx@sha256:...` → `nginx`. + +* `self_link` - The URI to access the image. For example, `us-west4-docker.pkg.dev/test-project/test-repo/nginx@sha256:e9954c1fc875017be1c3e36eca16be2d9e9bccc4bf072163515467d6a823c7cf` + +* `tags` - A list of all tags associated with the image. + +* `image_size_bytes` - Calculated size of the image in bytes. + +* `media_type` - Media type of this image, e.g. `application/vnd.docker.distribution.manifest.v2+json`. + +* `upload_time` - The time, as a RFC 3339 string, the image was uploaded. For example, `2014-10-02T15:01:23.045123456Z`. + +* `build_time` - The time, as a RFC 3339 string, this image was built. + +* `update_time` - The time, as a RFC 3339 string, this image was updated. From a74fa584581912b6a8d957f27400e7ecf9e49c8b Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Mon, 28 Jul 2025 13:54:04 -0700 Subject: [PATCH 625/884] upgrade provider version for tgc (#14661) --- mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl | 1 + .../tgc_next/provider/provider_mmv1_resources.go.tmpl | 4 +++- mmv1/third_party/tgc/tests/source/environment_test.go | 2 +- mmv1/third_party/tgc_next/go.mod | 2 +- mmv1/third_party/tgc_next/go.sum | 4 ++-- 5 files changed, 8 insertions(+), 5 deletions(-) diff --git a/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl b/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl index 3c3d7ab2d33c..f9efdc3e73fc 100644 --- a/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl +++ b/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl @@ -38,6 +38,7 @@ import ( "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tgcresource" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/verify" ) diff --git a/mmv1/templates/tgc_next/provider/provider_mmv1_resources.go.tmpl b/mmv1/templates/tgc_next/provider/provider_mmv1_resources.go.tmpl index 4aeecef0a663..d6b30a5209fd 100644 --- a/mmv1/templates/tgc_next/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/templates/tgc_next/provider/provider_mmv1_resources.go.tmpl @@ -3,7 +3,9 @@ package provider import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/services/compute" + {{- range $object := $.ResourcesForVersion }} + "github.com/hashicorp/terraform-provider-google/google/services/{{ $object.ServiceName }}" + {{- end }} "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/services/resourcemanager" ) diff --git a/mmv1/third_party/tgc/tests/source/environment_test.go b/mmv1/third_party/tgc/tests/source/environment_test.go index 16b97af0d3ea..2db8f002ca65 100644 --- a/mmv1/third_party/tgc/tests/source/environment_test.go +++ b/mmv1/third_party/tgc/tests/source/environment_test.go @@ -18,7 +18,7 @@ const ( defaultOrganizationDomain = "meep.test.com" defaultOrganizationTarget = "13579" defaultProject = "foobar" - defaultProviderVersion = "5.5.0" // if dev override is enabled, the provider version is ignored in terraform execution + defaultProviderVersion = "6.14.0" // if dev override is enabled, the provider version is ignored in terraform execution defaultRegion = "us-central1" defaultServiceAccount = "meep@foobar.iam.gserviceaccount.com" ) diff --git a/mmv1/third_party/tgc_next/go.mod b/mmv1/third_party/tgc_next/go.mod index 1041133f8920..ce14728d8dbb 100644 --- a/mmv1/third_party/tgc_next/go.mod +++ b/mmv1/third_party/tgc_next/go.mod @@ -14,7 +14,7 @@ require ( github.com/hashicorp/hcl/v2 v2.23.0 github.com/hashicorp/terraform-json v0.24.0 github.com/hashicorp/terraform-plugin-sdk/v2 v2.36.0 - github.com/hashicorp/terraform-provider-google-beta v1.20.1-0.20250515195612-fa096fe771b5 + github.com/hashicorp/terraform-provider-google-beta v1.20.1-0.20250728173411-5cb5742bc083 github.com/mitchellh/go-homedir v1.1.0 github.com/pkg/errors v0.9.1 github.com/stretchr/testify v1.10.0 diff --git a/mmv1/third_party/tgc_next/go.sum b/mmv1/third_party/tgc_next/go.sum index 3dcd2b906557..2b625c7e5997 100644 --- a/mmv1/third_party/tgc_next/go.sum +++ b/mmv1/third_party/tgc_next/go.sum @@ -176,8 +176,8 @@ github.com/hashicorp/terraform-plugin-sdk/v2 v2.36.0 h1:7/iejAPyCRBhqAg3jOx+4UcA github.com/hashicorp/terraform-plugin-sdk/v2 v2.36.0/go.mod h1:TiQwXAjFrgBf5tg5rvBRz8/ubPULpU0HjSaVi5UoJf8= github.com/hashicorp/terraform-plugin-testing v1.5.1 h1:T4aQh9JAhmWo4+t1A7x+rnxAJHCDIYW9kXyo4sVO92c= github.com/hashicorp/terraform-plugin-testing v1.5.1/go.mod h1:dg8clO6K59rZ8w9EshBmDp1CxTIPu3yA4iaDpX1h5u0= -github.com/hashicorp/terraform-provider-google-beta v1.20.1-0.20250515195612-fa096fe771b5 h1:PY+Jbw367pf9I5cfUQhhDla9pmoysVMQZMWljyqL1aw= -github.com/hashicorp/terraform-provider-google-beta v1.20.1-0.20250515195612-fa096fe771b5/go.mod h1:5a610wnrgp3L1ejcgWda9y9WBVJ1QbcjZSX7OChO2uU= +github.com/hashicorp/terraform-provider-google-beta v1.20.1-0.20250728173411-5cb5742bc083 h1:23TrEMAu7jpigg52dSymKmxVNFPwQc4z/pPLEI7PdgA= +github.com/hashicorp/terraform-provider-google-beta v1.20.1-0.20250728173411-5cb5742bc083/go.mod h1:E6QxtUznA+Ul5ek4hxqjGU3VrSWx/NWaqIpFNdUouu4= github.com/hashicorp/terraform-registry-address v0.2.4 h1:JXu/zHB2Ymg/TGVCRu10XqNa4Sh2bWcqCNyKWjnCPJA= github.com/hashicorp/terraform-registry-address v0.2.4/go.mod h1:tUNYTVyCtU4OIGXXMDp7WNcJ+0W1B4nmstVDgHMjfAU= github.com/hashicorp/terraform-svchost v0.1.1 h1:EZZimZ1GxdqFRinZ1tpJwVxxt49xc/S52uzrw4x0jKQ= From 504edc437613507cb3ff44753d0cb5552ad1b00c Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Tue, 29 Jul 2025 00:52:48 +0200 Subject: [PATCH 626/884] compute: fix documentation naming conflict, added `network_name` to data source `google_compute_subnetworks` and deprecated `network_self_link` (#14583) --- .../compute/data_source_google_compute_subnetworks.go | 7 +++++++ .../website/docs/d/compute_subnetworks.html.markdown | 1 + 2 files changed, 8 insertions(+) diff --git a/mmv1/third_party/terraform/services/compute/data_source_google_compute_subnetworks.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_subnetworks.go index 117928d98c5c..1c4444fdb391 100644 --- a/mmv1/third_party/terraform/services/compute/data_source_google_compute_subnetworks.go +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_subnetworks.go @@ -52,6 +52,12 @@ func DataSourceGoogleComputeSubnetworks() *schema.Resource { "network_self_link": { Type: schema.TypeString, Computed: true, + // TODO: remove in next major release (7.0.0) also from docs and implementation below + Deprecated: "Use `network_name` instead. This field will be removed in a future major release.", + }, + "network_name": { + Type: schema.TypeString, + Computed: true, }, "private_ip_google_access": { Type: schema.TypeBool, @@ -101,6 +107,7 @@ func dataSourceGoogleComputeSubnetworksRead(d *schema.ResourceData, meta interfa "name": subnet.Name, "network_self_link": filepath.Base(subnet.Network), "network": subnet.Network, + "network_name": filepath.Base(subnet.Network), "private_ip_google_access": subnet.PrivateIpGoogleAccess, "self_link": subnet.SelfLink, }) diff --git a/mmv1/third_party/terraform/website/docs/d/compute_subnetworks.html.markdown b/mmv1/third_party/terraform/website/docs/d/compute_subnetworks.html.markdown index f62f951c366f..ca4749a31078 100644 --- a/mmv1/third_party/terraform/website/docs/d/compute_subnetworks.html.markdown +++ b/mmv1/third_party/terraform/website/docs/d/compute_subnetworks.html.markdown @@ -42,6 +42,7 @@ The following arguments are supported: * `ip_cidr_range` - The IP address range represented as a CIDR block. * `name` - The name of the subnetwork. * `network` - The self link of the parent network. +* `network_self_link` - (Deprecated) The name of the parent network computed from `network` attribute. (deprecated and will be removed in a future major release. Use `network_name` instead.) * `network_name` - The name of the parent network computed from `network` attribute. * `private_ip_google_access` - Whether the VMs in the subnet can access Google services without assigned external IP addresses. * `self_link` - The self link of the subnetwork. From c694d05d499253deb1e8267926d086d1cceefdf3 Mon Sep 17 00:00:00 2001 From: Thomas Rodgers Date: Mon, 28 Jul 2025 16:30:56 -0700 Subject: [PATCH 627/884] tgc-revival: lower maxRetries to 3 when fetching test metadata (#14672) --- mmv1/third_party/tgc_next/test/setup.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/third_party/tgc_next/test/setup.go b/mmv1/third_party/tgc_next/test/setup.go index c37282bde32c..b6af12bf6cc6 100644 --- a/mmv1/third_party/tgc_next/test/setup.go +++ b/mmv1/third_party/tgc_next/test/setup.go @@ -52,7 +52,7 @@ type Resource struct { const ( ymdFormat = "2006-01-02" - maxRetries = 30 + maxRetries = 3 ) var ( From 5e4819cf3c177e79c7f7a4e7b7e64585990114b3 Mon Sep 17 00:00:00 2001 From: Dawid212 Date: Tue, 29 Jul 2025 18:29:43 +0200 Subject: [PATCH 628/884] Add Labels field to StoragePool (#14346) --- mmv1/products/compute/StoragePool.yaml | 4 ++++ .../terraform/examples/compute_storage_pool_basic.tf.tmpl | 7 +++++++ .../terraform/examples/compute_storage_pool_full.tf.tmpl | 7 +++++++ 3 files changed, 18 insertions(+) diff --git a/mmv1/products/compute/StoragePool.yaml b/mmv1/products/compute/StoragePool.yaml index 80d869b02969..c6e457c51a60 100644 --- a/mmv1/products/compute/StoragePool.yaml +++ b/mmv1/products/compute/StoragePool.yaml @@ -277,6 +277,10 @@ properties: - "STANDARD" - "ADVANCED" default_from_api: true + - name: "labels" + type: KeyValueLabels + description: | + Labels to apply to this storage pool. These can be later modified by the setLabels method. virtual_fields: - name: "deletion_protection" type: Boolean diff --git a/mmv1/templates/terraform/examples/compute_storage_pool_basic.tf.tmpl b/mmv1/templates/terraform/examples/compute_storage_pool_basic.tf.tmpl index 8468bd6d9049..d3de7653249d 100644 --- a/mmv1/templates/terraform/examples/compute_storage_pool_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/compute_storage_pool_basic.tf.tmpl @@ -9,6 +9,13 @@ resource "google_compute_storage_pool" "{{$.PrimaryResourceId}}" { zone = "us-central1-a" + labels = { + environment = "test" + purpose = "storage-pool-testing" + team = "infrastructure" + cost_center = "engineering" + } + deletion_protection = false } diff --git a/mmv1/templates/terraform/examples/compute_storage_pool_full.tf.tmpl b/mmv1/templates/terraform/examples/compute_storage_pool_full.tf.tmpl index b0d0c365572d..c3df417c089d 100644 --- a/mmv1/templates/terraform/examples/compute_storage_pool_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/compute_storage_pool_full.tf.tmpl @@ -12,6 +12,13 @@ resource "google_compute_storage_pool" "{{$.PrimaryResourceId}}" { storage_pool_type = data.google_compute_storage_pool_types.balanced.self_link + labels = { + environment = "test" + purpose = "storage-pool-testing" + team = "infrastructure" + cost_center = "engineering" + } + deletion_protection = false zone = "us-central1-a" From 4d83b02cd4033953e83e3e064585bf661fe6f6a5 Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Tue, 29 Jul 2025 18:43:42 +0200 Subject: [PATCH 629/884] feat: implementation for iam resources on `google_iam_workforce_pool` (#13861) --- .../products/iamworkforcepool/WorkforcePool.yaml | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/mmv1/products/iamworkforcepool/WorkforcePool.yaml b/mmv1/products/iamworkforcepool/WorkforcePool.yaml index 66675f58baff..af11bfd15a44 100644 --- a/mmv1/products/iamworkforcepool/WorkforcePool.yaml +++ b/mmv1/products/iamworkforcepool/WorkforcePool.yaml @@ -50,19 +50,32 @@ custom_code: exclude_sweeper: true examples: - name: 'iam_workforce_pool_basic' + primary_resource_name: 'fmt.Sprintf("tf-test-example-pool%s", context["random_suffix"])' primary_resource_id: 'example' + region_override: 'global' vars: workforce_pool_id: 'example-pool' test_env_vars: org_id: 'ORG_ID' - name: 'iam_workforce_pool_full' + primary_resource_name: 'fmt.Sprintf("tf-test-example-pool%s", context["random_suffix"])' primary_resource_id: 'example' + region_override: 'global' vars: workforce_pool_id: 'example-pool' test_env_vars: org_id: 'ORG_ID' +iam_policy: + parent_resource_attribute: 'workforce_pool_id' + method_name_separator: ':' + example_config_body: 'templates/terraform/iam/iam_attributes.go.tmpl' + allowed_iam_role: 'roles/iam.workforcePoolViewer' + admin_iam_role: 'roles/iam.workforcePoolAdmin' + fetch_iam_policy_verb: 'POST' + import_format: + - 'locations/{{location}}/workforcePools/{{workforce_pool_id}}' + - '{{workforce_pool_id}}' parameters: -properties: - name: 'location' type: String description: The location for the resource. @@ -80,6 +93,7 @@ properties: immutable: true validation: function: 'ValidateWorkforcePoolId' +properties: - name: 'name' type: String description: | From eebcc7ec770fbad4004b933defcafc49850b9a0c Mon Sep 17 00:00:00 2001 From: efe Date: Tue, 29 Jul 2025 12:12:01 -0500 Subject: [PATCH 630/884] Update Instance.yaml to add new trial editions to Looker (#14678) --- mmv1/products/looker/Instance.yaml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/mmv1/products/looker/Instance.yaml b/mmv1/products/looker/Instance.yaml index e0e7d93acb77..228836f05401 100644 --- a/mmv1/products/looker/Instance.yaml +++ b/mmv1/products/looker/Instance.yaml @@ -409,6 +409,9 @@ properties: - LOOKER_CORE_NONPROD_STANDARD_ANNUAL: nonprod subscription standard instance - LOOKER_CORE_NONPROD_ENTERPRISE_ANNUAL: nonprod subscription enterprise instance - LOOKER_CORE_NONPROD_EMBED_ANNUAL: nonprod subscription embed instance + - LOOKER_CORE_TRIAL_STANDARD: A standard trial edition of Looker (Google Cloud core) product. + - LOOKER_CORE_TRIAL_ENTERPRISE: An enterprise trial edition of Looker (Google Cloud core) product. + - LOOKER_CORE_TRIAL_EMBED: An embed trial edition of Looker (Google Cloud core) product. immutable: true default_value: "LOOKER_CORE_TRIAL" enum_values: @@ -420,6 +423,9 @@ properties: - 'LOOKER_CORE_NONPROD_STANDARD_ANNUAL' - 'LOOKER_CORE_NONPROD_ENTERPRISE_ANNUAL' - 'LOOKER_CORE_NONPROD_EMBED_ANNUAL' + - 'LOOKER_CORE_TRIAL_STANDARD' + - 'LOOKER_CORE_TRIAL_ENTERPRISE' + - 'LOOKER_CORE_TRIAL_EMBED' - name: 'privateIpEnabled' type: Boolean description: | From 79e25c3e0aa3ac7286c7ae8197c40c9b68a7fc43 Mon Sep 17 00:00:00 2001 From: bcreddy-gcp <123543489+bcreddy-gcp@users.noreply.github.com> Date: Tue, 29 Jul 2025 10:34:44 -0700 Subject: [PATCH 631/884] Fix EUC test (#14668) --- .../examples/workbench_instance_euc.tf.tmpl | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/mmv1/templates/terraform/examples/workbench_instance_euc.tf.tmpl b/mmv1/templates/terraform/examples/workbench_instance_euc.tf.tmpl index dabb6da89522..d97b1b5e9936 100644 --- a/mmv1/templates/terraform/examples/workbench_instance_euc.tf.tmpl +++ b/mmv1/templates/terraform/examples/workbench_instance_euc.tf.tmpl @@ -1,3 +1,11 @@ +resource "google_service_account_iam_binding" "act_as_permission" { + service_account_id = "projects/{{index $.TestEnvVars "project_id"}}/serviceAccounts/{{index $.TestEnvVars "project_number"}}-compute@developer.gserviceaccount.com" + role = "roles/iam.serviceAccountUser" + members = [ + "user:example@example.com", + ] +} + resource "google_workbench_instance" "{{$.PrimaryResourceId}}" { name = "{{index $.Vars "instance_name"}}" location = "us-central1-a" @@ -13,4 +21,8 @@ resource "google_workbench_instance" "{{$.PrimaryResourceId}}" { instance_owners = ["example@example.com"] enable_managed_euc = "true" + + depends_on = [ + google_service_account_iam_binding.act_as_permission, + ] } From b93d9c3b28c98a3c2ffba8f4ad580132880296e1 Mon Sep 17 00:00:00 2001 From: Francis O'Hara Aidoo Date: Tue, 29 Jul 2025 10:56:47 -0700 Subject: [PATCH 632/884] Add delete support for Endpoint With Model Garden Deployment resource (#14650) --- .../EndpointWithModelGardenDeployment.yaml | 56 +++++- ...point_with_model_garden_deployment.go.tmpl | 102 +++++++++++ ...tf.tmpl => vertex_ai_deploy_basic.tf.tmpl} | 2 - ...vertex_ai_deploy_huggingface_model.tf.tmpl | 7 + ...deploy_multiple_models_in_parallel.tf.tmpl | 53 ++++++ ...deploy_multiple_models_in_sequence.tf.tmpl | 55 ++++++ .../vertex_ai_deploy_with_configs.tf.tmpl | 17 ++ ...point_with_model_garden_deployment.go.tmpl | 57 +++++- .../resource_vertex_ai_deploy_test.go | 167 +++++++++++++++++- 9 files changed, 497 insertions(+), 19 deletions(-) create mode 100644 mmv1/templates/terraform/custom_delete/resource_vertexai_endpoint_with_model_garden_deployment.go.tmpl rename mmv1/templates/terraform/examples/{vertex_ai_deploy.tf.tmpl => vertex_ai_deploy_basic.tf.tmpl} (88%) create mode 100644 mmv1/templates/terraform/examples/vertex_ai_deploy_huggingface_model.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/vertex_ai_deploy_multiple_models_in_parallel.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/vertex_ai_deploy_multiple_models_in_sequence.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/vertex_ai_deploy_with_configs.tf.tmpl diff --git a/mmv1/products/vertexai/EndpointWithModelGardenDeployment.yaml b/mmv1/products/vertexai/EndpointWithModelGardenDeployment.yaml index 7ba7d6581224..09a05c90c9d9 100644 --- a/mmv1/products/vertexai/EndpointWithModelGardenDeployment.yaml +++ b/mmv1/products/vertexai/EndpointWithModelGardenDeployment.yaml @@ -16,10 +16,6 @@ name: EndpointWithModelGardenDeployment api_resource_type_kind: Endpoint description: | Create an Endpoint and deploy a Model Garden model to it. - - ~> **Note:** This resource does not currently support deletion via Terraform and must be manually deleted if not in use. - See https://cloud.google.com/vertex-ai/docs/predictions/undeploy-model for instructions on how to undeploy a model and delete an endpoint - via the Google Cloud console. references: guides: "Use models in Model Garden": "https://cloud.google.com/vertex-ai/generative-ai/docs/model-garden/use-models" @@ -33,10 +29,10 @@ self_link: "projects/{{project}}/locations/{{location}}/endpoints/{{endpoint}}" create_url: "projects/{{project}}/locations/{{location}}:deploy" immutable: true exclude_read: true -exclude_delete: true # the resource does not yet support deletion exclude_import: true # the resource does not support import timeouts: insert_minutes: 180 + delete_minutes: 20 autogen_status: RW5kcG9pbnRXaXRoTW9kZWxHYXJkZW5EZXBsb3ltZW50 async: actions: ["create"] @@ -49,15 +45,48 @@ async: resource_inside_response: true custom_code: post_create: "templates/terraform/post_create/resource_vertexai_endpoint_with_model_garden_deployment.go.tmpl" + custom_delete: "templates/terraform/custom_delete/resource_vertexai_endpoint_with_model_garden_deployment.go.tmpl" examples: - - name: "vertex_ai_deploy" + - name: "vertex_ai_deploy_basic" + primary_resource_id: "deploy" + vars: + project: "vertex-ai" + publisher_model_name: "publisher_model_name" + ignore_read_extra: + - "project" + exclude_test: true # handwritten test required since resource does not support import + - name: "vertex_ai_deploy_huggingface_model" primary_resource_id: "deploy" vars: project: "vertex-ai" publisher_model_name: "publisher_model_name" ignore_read_extra: - "project" - exclude_test: true + exclude_test: true # handwritten test required since resource does not support import + - name: "vertex_ai_deploy_with_configs" + primary_resource_id: "deploy" + vars: + project: "vertex-ai" + publisher_model_name: "publisher_model_name" + ignore_read_extra: + - "project" + exclude_test: true # handwritten test required since resource does not support import + - name: "vertex_ai_deploy_multiple_models_in_parallel" + primary_resource_id: "deploy" + vars: + project: "vertex-ai" + publisher_model_name: "publisher_model_name" + ignore_read_extra: + - "project" + exclude_test: true # handwritten test required since resource does not support import + - name: "vertex_ai_deploy_multiple_models_in_sequence" + primary_resource_id: "deploy" + vars: + project: "vertex-ai" + publisher_model_name: "publisher_model_name" + ignore_read_extra: + - "project" + exclude_test: true # handwritten test required since resource does not support import parameters: - name: location type: String @@ -75,6 +104,19 @@ properties: resource within its parent collection as described in https://google.aip.dev/122. url_param_only: true output: true + - name: deployedModelId + type: String + description: | + Output only. The unique numeric ID that Vertex AI assigns to the model at the time it is deployed to the endpoint. + It is required to undeploy the model from the endpoint during resource deletion as described in + https://cloud.google.com/vertex-ai/docs/reference/rest/v1/projects.locations.endpoints/undeployModel. + output: true + - name: deployedModelDisplayName + type: String + description: | + Output only. The display name assigned to the model deployed to the endpoint. + This is not required to delete the resource but is used for debug logging. + output: true - name: publisherModelName type: String description: |- diff --git a/mmv1/templates/terraform/custom_delete/resource_vertexai_endpoint_with_model_garden_deployment.go.tmpl b/mmv1/templates/terraform/custom_delete/resource_vertexai_endpoint_with_model_garden_deployment.go.tmpl new file mode 100644 index 000000000000..9c64d6982892 --- /dev/null +++ b/mmv1/templates/terraform/custom_delete/resource_vertexai_endpoint_with_model_garden_deployment.go.tmpl @@ -0,0 +1,102 @@ +log.Printf("[DEBUG] Beginning custom_delete for Vertex AI Endpoint with Model Garden Deployment") + +// Log resource ID for debugging purposes +log.Printf("[DEBUG] Resource ID: %s", d.Id()) + +billingProject := "" + +project, err := tpgresource.GetProject(d, config) +if err != nil { + return fmt.Errorf("Error fetching project for EndpointWithModelGardenDeployment: %s", err) +} +billingProject = project + +// err == nil indicates that the billing_project value was found +if bp, err := tpgresource.GetBillingProject(d, config); err == nil { + billingProject = bp +} + +// Retrieve deployed model ID and display name from Terraform fields +deployedModelId, ok := d.Get("deployed_model_id").(string) +if !ok { + return fmt.Errorf("wrong type for deployedModelId field (%T), expected string", d.Get("deployedModelId")) +} + + +deployedModelDisplayName, ok := d.Get("deployed_model_display_name").(string) +if !ok { + return fmt.Errorf("wrong type for deployedModelDisplayName field (%T), expected string", d.Get("deployedModelDisplayName")) +} + +// Undeploy the model +undeployUrl, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}VertexAIBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/locations/{{"{{"}}location{{"}}"}}/endpoints/{{"{{"}}endpoint{{"}}"}}:undeployModel") +if err != nil { + return err +} +undeployHeaders := make(http.Header) + +undeployBody := map[string]interface{}{ + "deployedModelId": deployedModelId, +} + +log.Printf("[DEBUG] Undeploying model %s from EndpointWithModelGardenDeployment %q", deployedModelDisplayName, d.Id()) + +undeployRes, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "POST", + Project: billingProject, + RawURL: undeployUrl, + UserAgent: userAgent, + Body: undeployBody, + Timeout: d.Timeout(schema.TimeoutDelete), + Headers: undeployHeaders, +}) +if err != nil { + return fmt.Errorf("Error undeploying model from EndpointWithModelGardenDeployment: %s", err) +} + +err = VertexAIOperationWaitTime( + config, undeployRes, project, fmt.Sprintf("Undeploying model %s from EndpointWithModelGardenDeployment", deployedModelDisplayName), userAgent, + d.Timeout(schema.TimeoutDelete)) + +if err != nil { + // The model could not be undeployed + return fmt.Errorf("Error waiting to undeploy model %s from EndpointWithModelGardenDeployment: %s", deployedModelDisplayName, err) +} + +log.Printf("[DEBUG] Finished undeploying model %s from EndpointWithModelGardenDeployment %q: %#v", deployedModelDisplayName, d.Id(), undeployRes) + + +// Delete Endpoint +deleteUrl, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}VertexAIBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/locations/{{"{{"}}location{{"}}"}}/endpoints/{{"{{"}}endpoint{{"}}"}}") +if err != nil { + return err +} +deleteHeaders := make(http.Header) +var deleteBody map[string]interface{} + +log.Printf("[DEBUG] Deleting EndpointWithModelGardenDeployment %q", d.Id()) +deleteRes, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "DELETE", + Project: billingProject, + RawURL: deleteUrl, + UserAgent: userAgent, + Body: deleteBody, + Timeout: d.Timeout(schema.TimeoutDelete), + Headers: deleteHeaders, +}) +if err != nil { + return transport_tpg.HandleNotFoundError(err, d, "EndpointWithModelGardenDeployment") +} + +err = VertexAIOperationWaitTime( + config, deleteRes, project, "Deleting EndpointWithModelGardenDeployment", userAgent, + d.Timeout(schema.TimeoutDelete)) + +if err != nil { + return err +} + +log.Printf("[DEBUG] Finished deleting EndpointWithModelGardenDeployment %q: %#v", d.Id(), deleteRes) +return nil \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/vertex_ai_deploy.tf.tmpl b/mmv1/templates/terraform/examples/vertex_ai_deploy_basic.tf.tmpl similarity index 88% rename from mmv1/templates/terraform/examples/vertex_ai_deploy.tf.tmpl rename to mmv1/templates/terraform/examples/vertex_ai_deploy_basic.tf.tmpl index 3f778965209f..f9713d42412a 100644 --- a/mmv1/templates/terraform/examples/vertex_ai_deploy.tf.tmpl +++ b/mmv1/templates/terraform/examples/vertex_ai_deploy_basic.tf.tmpl @@ -5,5 +5,3 @@ resource "google_vertex_ai_endpoint_with_model_garden_deployment" "{{$.PrimaryRe accept_eula = true } } - -data "google_project" "project" {} diff --git a/mmv1/templates/terraform/examples/vertex_ai_deploy_huggingface_model.tf.tmpl b/mmv1/templates/terraform/examples/vertex_ai_deploy_huggingface_model.tf.tmpl new file mode 100644 index 000000000000..5babe95a2901 --- /dev/null +++ b/mmv1/templates/terraform/examples/vertex_ai_deploy_huggingface_model.tf.tmpl @@ -0,0 +1,7 @@ +resource "google_vertex_ai_endpoint_with_model_garden_deployment" "{{$.PrimaryResourceId}}" { + hugging_face_model_id = "Qwen/Qwen3-0.6B" + location = "us-central1" + model_config { + accept_eula = true + } +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/vertex_ai_deploy_multiple_models_in_parallel.tf.tmpl b/mmv1/templates/terraform/examples/vertex_ai_deploy_multiple_models_in_parallel.tf.tmpl new file mode 100644 index 000000000000..f39ff8f42029 --- /dev/null +++ b/mmv1/templates/terraform/examples/vertex_ai_deploy_multiple_models_in_parallel.tf.tmpl @@ -0,0 +1,53 @@ +resource "google_vertex_ai_endpoint_with_model_garden_deployment" "{{$.PrimaryResourceId}}-gemma-1_1-2b-it" { + publisher_model_name = "publishers/google/models/gemma@gemma-1.1-2b-it" + location = "us-central1" + model_config { + accept_eula = true + } + deploy_config { + dedicated_resources { + machine_spec { + machine_type = "g2-standard-12" + accelerator_type = "us-central1" + accelerator_count = 1 + } + min_replica_count = 1 + } + } +} + +resource "google_vertex_ai_endpoint_with_model_garden_deployment" "{{$.PrimaryResourceId}}-qwen3-0_6b" { + hugging_face_model_id = "Qwen/Qwen3-0.6B" + location = "us-central1" + model_config { + accept_eula = true + } + deploy_config { + dedicated_resources { + machine_spec { + machine_type = "g2-standard-12" + accelerator_type = "NVIDIA_L4" + accelerator_count = 1 + } + min_replica_count = 1 + } + } +} + +resource "google_vertex_ai_endpoint_with_model_garden_deployment" "{{$.PrimaryResourceId}}-llama-3_2-1b" { + publisher_model_name = "publishers/meta/models/llama3-2@llama-3.2-1b" + location = "us-central1" + model_config { + accept_eula = true + } + deploy_config { + dedicated_resources { + machine_spec { + machine_type = "g2-standard-12" + accelerator_type = "NVIDIA_L4" + accelerator_count = 1 + } + min_replica_count = 1 + } + } +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/vertex_ai_deploy_multiple_models_in_sequence.tf.tmpl b/mmv1/templates/terraform/examples/vertex_ai_deploy_multiple_models_in_sequence.tf.tmpl new file mode 100644 index 000000000000..e4bbbf6ce581 --- /dev/null +++ b/mmv1/templates/terraform/examples/vertex_ai_deploy_multiple_models_in_sequence.tf.tmpl @@ -0,0 +1,55 @@ +resource "google_vertex_ai_endpoint_with_model_garden_deployment" "{{$.PrimaryResourceId}}-gemma-1_1-2b-it" { + publisher_model_name = "publishers/google/models/gemma@gemma-1.1-2b-it" + location = "us-central1" + model_config { + accept_eula = true + } + deploy_config { + dedicated_resources { + machine_spec { + machine_type = "g2-standard-12" + accelerator_type = "NVIDIA_L4" + accelerator_count = 1 + } + min_replica_count = 1 + } + } +} + +resource "google_vertex_ai_endpoint_with_model_garden_deployment" "{{$.PrimaryResourceId}}-qwen3-0_6b" { + hugging_face_model_id = "Qwen/Qwen3-0.6B" + location = "us-central1" + model_config { + accept_eula = true + } + deploy_config { + dedicated_resources { + machine_spec { + machine_type = "g2-standard-12" + accelerator_type = "NVIDIA_L4" + accelerator_count = 1 + } + min_replica_count = 1 + } + } + depends_on = [ google_vertex_ai_endpoint_with_model_garden_deployment.{{$.PrimaryResourceId}}-gemma-1_1-2b-it ] +} + +resource "google_vertex_ai_endpoint_with_model_garden_deployment" "{{$.PrimaryResourceId}}-llama-3_2-1b" { + publisher_model_name = "publishers/meta/models/llama3-2@llama-3.2-1b" + location = "us-central1" + model_config { + accept_eula = true + } + deploy_config { + dedicated_resources { + machine_spec { + machine_type = "g2-standard-12" + accelerator_type = "NVIDIA_L4" + accelerator_count = 1 + } + min_replica_count = 1 + } + } + depends_on = [ google_vertex_ai_endpoint_with_model_garden_deployment.{{$.PrimaryResourceId}}-qwen3-0_6b ] +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/vertex_ai_deploy_with_configs.tf.tmpl b/mmv1/templates/terraform/examples/vertex_ai_deploy_with_configs.tf.tmpl new file mode 100644 index 000000000000..4437abbeb5e4 --- /dev/null +++ b/mmv1/templates/terraform/examples/vertex_ai_deploy_with_configs.tf.tmpl @@ -0,0 +1,17 @@ +resource "google_vertex_ai_endpoint_with_model_garden_deployment" "{{$.PrimaryResourceId}}" { + publisher_model_name = "publishers/google/models/paligemma@paligemma-224-float32" + location = "us-central1" + model_config { + accept_eula = true + } + deploy_config { + dedicated_resources { + machine_spec { + machine_type = "g2-standard-16" + accelerator_type = "NVIDIA_L4" + accelerator_count = 1 + } + min_replica_count = 1 + } + } +} \ No newline at end of file diff --git a/mmv1/templates/terraform/post_create/resource_vertexai_endpoint_with_model_garden_deployment.go.tmpl b/mmv1/templates/terraform/post_create/resource_vertexai_endpoint_with_model_garden_deployment.go.tmpl index d7d88c74ed80..70f0451e2a9c 100644 --- a/mmv1/templates/terraform/post_create/resource_vertexai_endpoint_with_model_garden_deployment.go.tmpl +++ b/mmv1/templates/terraform/post_create/resource_vertexai_endpoint_with_model_garden_deployment.go.tmpl @@ -56,4 +56,59 @@ if err := d.Set("endpoint", endpoint); err != nil { d.SetId(endpointFull) log.Printf("[DEBUG] Set Terraform resource ID to: %s", endpointFull) -return nil +// Extract deployedModelId and deployedModelDisplayName of model deployed to the endpoint +// Make API call to read the endpoint and retrieve deployed model ID and display name +readUrl, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}VertexAIBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/locations/{{"{{"}}location{{"}}"}}/endpoints/{{"{{"}}endpoint{{"}}"}}") +if err != nil { + return err +} +readHeaders := make(http.Header) + +readRes, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions { + Config: config, + Method: "GET", + Project: billingProject, + RawURL: readUrl, + UserAgent: userAgent, + Headers: readHeaders, +}) +if err != nil { + return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("VertexAIEndpointWithModelGardenDeployment %q", d.Id())) +} + +// Access the 'deployedModels' attribute from the response. +deployedModelsRaw, ok := readRes["deployedModels"] +if !ok || deployedModelsRaw == nil { + log.Printf("[ERROR] No deployed models found in the endpoint response") + return fmt.Errorf("Error creating EndpointWithModelGardenDeployment: No deployed models found in the endpoint response.") +} + +deployedModels, ok := deployedModelsRaw.([]interface{}) +if !ok { + log.Printf("[ERROR] 'deployedModels' field of endpoint response is not an array as expected") + return fmt.Errorf("Error creating EndpointWithModelGardenDeployment: 'deployedModels' field is not an array as expected.") +} + +// Access first (and only) deployed model at endpoint +deployedModelRaw := deployedModels[0] + +deployedModel, ok := deployedModelRaw.(map[string]interface{}) +if !ok { + return fmt.Errorf("Error creating EndpointWithModelGardenDeployment: model in 'deployedModels' field of endpoint response is not a map as expected.") +} + +// Extract deployed model ID and display name and set Terraform fields +if deployedModelId, ok := deployedModel["id"].(string); ok { + log.Printf("[DEBUG] ID of deployed model: %s", deployedModelId) + + if err := d.Set("deployed_model_id", deployedModelId); err != nil { + return fmt.Errorf("Error setting deployedModelId: %s", err) + } +} +if deployedModelDisplayName, ok := deployedModel["displayName"].(string); ok { + log.Printf("[DEBUG] Display name of deployed model: %s", deployedModelDisplayName) + + if err := d.Set("deployed_model_display_name", deployedModelDisplayName); err != nil { + return fmt.Errorf("Error setting deployedModelDisplayName: %s", err) + } +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/vertexai/resource_vertex_ai_deploy_test.go b/mmv1/third_party/terraform/services/vertexai/resource_vertex_ai_deploy_test.go index c3b3505329bd..df07ab5c7370 100644 --- a/mmv1/third_party/terraform/services/vertexai/resource_vertex_ai_deploy_test.go +++ b/mmv1/third_party/terraform/services/vertexai/resource_vertex_ai_deploy_test.go @@ -1,23 +1,23 @@ package vertexai_test import ( - "testing" - + "fmt" "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + "strings" + "testing" ) func TestAccVertexAIEndpointWithModelGardenDeployment_basic(t *testing.T) { t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - + context := map[string]interface{}{"random_suffix": acctest.RandString(t, 10)} acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: nil, // the resource does not yet support deletion + CheckDestroy: testAccCheckVertexAIEndpointWithModelGardenDeploymentDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccVertexAIEndpointWithModelGardenDeployment_basic(context), @@ -48,7 +48,7 @@ func TestAccVertexAIEndpointWithModelGardenDeployment_withConfigs(t *testing.T) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: nil, + CheckDestroy: testAccCheckVertexAIEndpointWithModelGardenDeploymentDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccVertexAIEndpointWithModelGardenDeployment_withConfigs(context), @@ -78,3 +78,152 @@ resource "google_vertex_ai_endpoint_with_model_garden_deployment" "test_with_con } `, context) } + +func TestAccVertexAIEndpointWithModelGardenDeployment_huggingfaceModel(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckVertexAIEndpointWithModelGardenDeploymentDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccVertexAIEndpointWithModelGardenDeployment_huggingfaceModel(context), + }, + }, + }) +} + +func testAccVertexAIEndpointWithModelGardenDeployment_huggingfaceModel(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_vertex_ai_endpoint_with_model_garden_deployment" "deploy" { + hugging_face_model_id = "Qwen/Qwen3-0.6B" + location = "us-central1" + model_config { + accept_eula = true + } +} +`, context) +} + +func TestAccVertexAIEndpointWithModelGardenDeployment_multipleModelsInSequence(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckVertexAIEndpointWithModelGardenDeploymentDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccVertexAIEndpointWithModelGardenDeployment_multipleModelsInSequence(context), + }, + }, + }) +} + +func testAccVertexAIEndpointWithModelGardenDeployment_multipleModelsInSequence(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_vertex_ai_endpoint_with_model_garden_deployment" "deploy-gemma-1_1-2b-it" { + publisher_model_name = "publishers/google/models/gemma@gemma-1.1-2b-it" + location = "us-central1" + model_config { + accept_eula = true + } + deploy_config { + dedicated_resources { + machine_spec { + machine_type = "g2-standard-12" + accelerator_type = "NVIDIA_L4" + accelerator_count = 1 + } + min_replica_count = 1 + } + } +} + +resource "google_vertex_ai_endpoint_with_model_garden_deployment" "deploy-qwen3-0_6b" { + hugging_face_model_id = "Qwen/Qwen3-0.6B" + location = "us-central1" + model_config { + accept_eula = true + } + deploy_config { + dedicated_resources { + machine_spec { + machine_type = "g2-standard-12" + accelerator_type = "NVIDIA_L4" + accelerator_count = 1 + } + min_replica_count = 1 + } + } + depends_on = [ google_vertex_ai_endpoint_with_model_garden_deployment.deploy-gemma-1_1-2b-it ] +} + +resource "google_vertex_ai_endpoint_with_model_garden_deployment" "deploy-llama-3_2-1b" { + publisher_model_name = "publishers/meta/models/llama3-2@llama-3.2-1b" + location = "us-central1" + model_config { + accept_eula = true + } + deploy_config { + dedicated_resources { + machine_spec { + machine_type = "g2-standard-12" + accelerator_type = "NVIDIA_L4" + accelerator_count = 1 + } + min_replica_count = 1 + } + } + depends_on = [ google_vertex_ai_endpoint_with_model_garden_deployment.deploy-qwen3-0_6b ] +} +`, context) +} + +func testAccCheckVertexAIEndpointWithModelGardenDeploymentDestroyProducer(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + for name, rs := range s.RootModule().Resources { + if rs.Type != "google_vertex_ai_endpoint_with_model_garden_deployment" { + continue + } + if strings.HasPrefix(name, "data.") { + continue + } + + config := acctest.GoogleProviderConfig(t) + + url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{VertexAIBasePath}}projects/{{project}}/locations/{{location}}/endpoints/{{endpoint}}") + if err != nil { + return err + } + + billingProject := "" + + if config.BillingProject != "" { + billingProject = config.BillingProject + } + + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: url, + UserAgent: config.UserAgent, + }) + if err == nil { + return fmt.Errorf("VertexAIEndpointWithModelGardenDeployment still exists at %s", url) + } + } + + return nil + } +} From 5829303176bf80991d6934227b50822517b11d56 Mon Sep 17 00:00:00 2001 From: ishamiGIT <202351040+ishamiGIT@users.noreply.github.com> Date: Tue, 29 Jul 2025 14:21:59 -0400 Subject: [PATCH 633/884] Add terraform support for Developer Connect Insights (#14412) --- .../developerconnect/InsightsConfig.yaml | 248 +++++++++++++ ...oper_connect_insights_config_basic.tf.tmpl | 135 +++++++ ..._developer_connect_insights_config_test.go | 341 ++++++++++++++++++ 3 files changed, 724 insertions(+) create mode 100644 mmv1/products/developerconnect/InsightsConfig.yaml create mode 100644 mmv1/templates/terraform/examples/developer_connect_insights_config_basic.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_insights_config_test.go diff --git a/mmv1/products/developerconnect/InsightsConfig.yaml b/mmv1/products/developerconnect/InsightsConfig.yaml new file mode 100644 index 000000000000..acbff0377c39 --- /dev/null +++ b/mmv1/products/developerconnect/InsightsConfig.yaml @@ -0,0 +1,248 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: InsightsConfig +description: Description +base_url: projects/{{project}}/locations/{{location}}/insightsConfigs +update_mask: true +self_link: projects/{{project}}/locations/{{location}}/insightsConfigs/{{insights_config_id}} +create_url: projects/{{project}}/locations/{{location}}/insightsConfigs?insightsConfigId={{insights_config_id}} +update_verb: PATCH +id_format: projects/{{project}}/locations/{{location}}/insightsConfigs/{{insights_config_id}} +import_format: + - projects/{{project}}/locations/{{location}}/insightsConfigs/{{insights_config_id}} +examples: + - name: 'developer_connect_insights_config_basic' + external_providers: ["time"] + primary_resource_id: 'insights_config' + test_env_vars: + org_id: "ORG_ID" + billing_account: "BILLING_ACCT" + skip_vcr: true +autogen_async: true +async: + operation: + timeouts: + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 + base_url: '{{op_id}}' + actions: + - create + - delete + - update + type: OpAsync + result: + resource_inside_response: true + include_project: false +autogen_status: SW5zaWdodHNDb25maWc= +parameters: + - name: location + type: String + description: Resource ID segment making up resource `name`. It identifies the resource within its parent collection as described in https://google.aip.dev/122. + immutable: true + url_param_only: true + required: true + - name: insightsConfigId + type: String + description: ID of the requesting InsightsConfig. + immutable: true + url_param_only: true + required: true +properties: + - name: appHubApplication + type: String + description: |- + The name of the App Hub Application. + Format: + projects/{project}/locations/{location}/applications/{application} + required: true + - name: name + type: String + description: |- + Identifier. The name of the InsightsConfig. + Format: + projects/{project}/locations/{location}/insightsConfigs/{insightsConfig} + output: true + - name: updateTime + type: String + description: '[Output only] Update timestamp' + output: true + - name: artifactConfigs + type: Array + description: The artifact configurations of the artifacts that are deployed. + item_type: + type: NestedObject + properties: + - name: googleArtifactRegistry + type: NestedObject + description: Google Artifact Registry configurations. + properties: + - name: projectId + type: String + description: The host project of Artifact Registry. + required: true + - name: artifactRegistryPackage + type: String + description: The name of the artifact registry package. + immutable: true + required: true + - name: googleArtifactAnalysis + type: NestedObject + description: Google Artifact Analysis configurations. + properties: + - name: projectId + type: String + description: The project id of the project where the provenance is stored. + required: true + - name: uri + type: String + description: |- + The URI of the artifact that is deployed. + e.g. `us-docker.pkg.dev/my-project/my-repo/image`. + The URI does not include the tag / digest because it captures a lineage of + artifacts. + immutable: true + - name: annotations + type: KeyValueAnnotations + description: |- + User specified annotations. See https://google.aip.dev/148#annotations + for more details such as format and size limitations. + - name: labels + type: KeyValueLabels + description: Set of labels associated with an InsightsConfig. + ignore_read: true + - name: reconciling + type: Boolean + description: |- + Reconciling (https://google.aip.dev/128#reconciliation). + Set to true if the current state of InsightsConfig does not match the + user's intended state, and the service is actively updating the resource to + reconcile them. This can happen due to user-triggered updates or + system actions like failover or maintenance. + output: true + - name: errors + type: Array + description: |- + Any errors that occurred while setting up the InsightsConfig. + Each error will be in the format: `field_name: error_message`, e.g. + GetAppHubApplication: Permission denied while getting App Hub + application. Please grant permissions to the P4SA. + output: true + item_type: + type: NestedObject + properties: + - name: code + type: Integer + description: The status code, which should be an enum value of google.rpc.Code. + output: true + - name: message + type: String + description: |- + A developer-facing error message, which should be in English. Any + user-facing error message should be localized and sent in the + google.rpc.Status.details field, or localized by the client. + output: true + - name: details + type: Array + description: |- + A list of messages that carry the error details. There is a common set of + message types for APIs to use. + output: true + item_type: + type: NestedObject + properties: + - name: detail_message + type: String + description: |- + A message with details about the error. + output: true + - name: createTime + type: String + description: '[Output only] Create timestamp' + output: true + - name: runtimeConfigs + type: Array + description: The runtime configurations where the application is deployed. + output: true + item_type: + type: NestedObject + properties: + - name: uri + type: String + description: |- + The URI of the runtime configuration. + For GKE, this is the cluster name. + For Cloud Run, this is the service name. + immutable: true + required: true + - name: state + type: String + description: |- + The state of the Runtime. + Possible values: + STATE_UNSPECIFIED + LINKED + UNLINKED + output: true + - name: gkeWorkload + type: NestedObject + description: GKEWorkload represents the Google Kubernetes Engine runtime. + properties: + - name: cluster + type: String + description: |- + The name of the GKE cluster. + Format: + `projects/{project}/locations/{location}/clusters/{cluster}`. + immutable: true + required: true + - name: deployment + type: String + description: |- + The name of the GKE deployment. + Format: + `projects/{project}/locations/{location}/clusters/{cluster}/namespaces/{namespace}/deployments/{deployment}`. + output: true + - name: appHubWorkload + type: NestedObject + description: AppHubWorkload represents the App Hub Workload. + properties: + - name: criticality + type: String + description: The criticality of the App Hub Workload. + output: true + - name: environment + type: String + description: The environment of the App Hub Workload. + output: true + - name: workload + type: String + description: |- + Output only. The name of the App Hub Workload. + Format: + `projects/{project}/locations/{location}/applications/{application}/workloads/{workload}`. + output: true + immutable: false + required: false + - name: state + type: String + description: |- + The state of the InsightsConfig. + Possible values: + STATE_UNSPECIFIED + PENDING + COMPLETE + ERROR + output: true diff --git a/mmv1/templates/terraform/examples/developer_connect_insights_config_basic.tf.tmpl b/mmv1/templates/terraform/examples/developer_connect_insights_config_basic.tf.tmpl new file mode 100644 index 000000000000..385790c16026 --- /dev/null +++ b/mmv1/templates/terraform/examples/developer_connect_insights_config_basic.tf.tmpl @@ -0,0 +1,135 @@ +resource "google_project" "project" { + project_id = "dci-tf-%{random_suffix}" + name = "Service Project" + org_id = "{{index $.TestEnvVars "org_id"}}" + billing_account = "{{index $.TestEnvVars "billing_account"}}" + deletion_policy = "DELETE" +} + +# Grant Permissions +resource "google_project_iam_member" "apphub_permissions" { + project = google_project.project.project_id + role = "roles/apphub.admin" + member = "serviceAccount:hashicorp-test-runner@ci-test-project-188019.iam.gserviceaccount.com" +} + +resource "google_project_iam_member" "insights_agent" { + project = google_project.project.project_id + role = "roles/developerconnect.insightsAgent" + member = "serviceAccount:66214305248-compute@developer.gserviceaccount.com" +} + +# Enable APIs +resource "google_project_service" "apphub_api_service" { + project = google_project.project.project_id + service = "apphub.googleapis.com" + disable_dependent_services=true + depends_on = [google_project.project] +} + +resource "google_project_service" "containeranalysis_api" { + project = google_project.project.project_id + service = "containeranalysis.googleapis.com" + disable_dependent_services=true + depends_on = [google_project.project] +} + +resource "google_project_service" "containerscanning_api" { + project = google_project.project.project_id + service = "containerscanning.googleapis.com" + disable_dependent_services=true + depends_on = [google_project.project] +} + +resource "google_project_service" "container_api" { + project = google_project.project.project_id + service = "container.googleapis.com" + disable_dependent_services=true + depends_on = [google_project.project] +} + +resource "google_project_service" "artifactregistry_api" { + project = google_project.project.project_id + service = "artifactregistry.googleapis.com" + disable_dependent_services=true + depends_on = [google_project.project] +} + +resource "google_project_service" "cloudbuild_api" { + project = google_project.project.project_id + service = "cloudbuild.googleapis.com" + disable_dependent_services=true + depends_on = [google_project.project] +} + +resource "google_project_service" "cloudasset_api" { + project = google_project.project.project_id + service = "cloudasset.googleapis.com" + disable_dependent_services=true + depends_on = [google_project.project] +} + +resource "google_project_service" "compute_api" { + project = google_project.project.project_id + service = "compute.googleapis.com" + disable_dependent_services=true + depends_on = [google_project.project] +} + +resource "google_project_service" "devconnect_api" { + project = google_project.project.project_id + service = "developerconnect.googleapis.com" + depends_on = [google_project.project] +} + +# Wait delay after enabling APIs and granting permissions +resource "time_sleep" "wait_for_propagation" { + depends_on = [ + google_project_iam_member.apphub_permissions, + google_project_iam_member.insights_agent, + google_project_service.apphub_api_service, + google_project_service.containeranalysis_api, + google_project_service.containerscanning_api, + google_project_service.container_api, + google_project_service.artifactregistry_api, + google_project_service.artifactregistry_api, + google_project_service.cloudbuild_api, + google_project_service.cloudasset_api, + google_project_service.compute_api, + google_project_service.devconnect_api, + ] + create_duration = "120s" +} + +resource "google_apphub_application" "my_apphub_application" { + location = "us-central1" + application_id = "tf-test-example-application%{random_suffix}" + scope { + type = "REGIONAL" + } + project = google_project.project.project_id + depends_on = [time_sleep.wait_for_propagation] +} + +resource "google_developer_connect_insights_config" "insights_config" { + location = "us-central1" + insights_config_id = "tf-test-ic%{random_suffix}" + project = google_project.project.project_id + annotations = {} + labels = {} + app_hub_application = format("//apphub.googleapis.com/projects/%s/locations/%s/applications/%s", + google_project.project.number, + google_apphub_application.my_apphub_application.location, + google_apphub_application.my_apphub_application.application_id) + artifact_configs { + google_artifact_analysis { + project_id = google_project.project.project_id + } + google_artifact_registry { + artifact_registry_package = "my-package" + project_id = google_project.project.project_id + } + uri = "us-docker.pkg.dev/my-project/my-repo/my-image" + } + depends_on = [time_sleep.wait_for_propagation] +} diff --git a/mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_insights_config_test.go b/mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_insights_config_test.go new file mode 100644 index 000000000000..44237e1f2c17 --- /dev/null +++ b/mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_insights_config_test.go @@ -0,0 +1,341 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** Type: MMv1 *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Magic Modules and manual +// changes will be clobbered when the file is regenerated. +// +// Please read more about how to change this file in +// .github/CONTRIBUTING.md. +// +// ---------------------------------------------------------------------------- + +package developerconnect_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccDeveloperConnectInsightsConfig_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "org_id": envvar.GetTestOrgFromEnv(t), + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + } + + acctest.SkipIfVcr(t) // See: https://github.com/GoogleCloudPlatform/magic-modules/pull/14412 + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: testAccDeveloperConnectInsightsConfig_basic(context), + }, + { + ResourceName: "google_developer_connect_insights_config.insights_config", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"insights_config_id", "labels", "location", "terraform_labels", "workload"}, + }, + { + Config: testAccDeveloperConnectInsightsConfig_update(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_developer_connect_insights_config.insights_config", plancheck.ResourceActionDestroyBeforeCreate), + }, + }, + }, + { + ResourceName: "google_developer_connect_insights_config.insights_config", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"insights_config_id", "location", "labels", "terraform_labels", "workload"}, + }, + }, + }) +} + +func testAccDeveloperConnectInsightsConfig_basic(context map[string]interface{}) string { + return acctest.Nprintf(` + resource "google_project" "project" { + project_id = "dci-tf-%{random_suffix}" + name = "Service Project" + org_id = "%{org_id}" + billing_account = "%{billing_account}" + deletion_policy = "DELETE" + } + + # Grant Permissions + resource "google_project_iam_member" "apphub_permissions" { + project = google_project.project.project_id + role = "roles/apphub.admin" + member = "serviceAccount:hashicorp-test-runner@ci-test-project-188019.iam.gserviceaccount.com" + } + + resource "google_project_iam_member" "insights_agent" { + project = google_project.project.project_id + role = "roles/developerconnect.insightsAgent" + member = "serviceAccount:66214305248-compute@developer.gserviceaccount.com" + } + + # Enable APIs + resource "google_project_service" "apphub_api_service" { + project = google_project.project.project_id + service = "apphub.googleapis.com" + disable_dependent_services=true + depends_on = [google_project.project] + } + + resource "google_project_service" "containeranalysis_api" { + project = google_project.project.project_id + service = "containeranalysis.googleapis.com" + disable_dependent_services=true + depends_on = [google_project.project] + } + + resource "google_project_service" "containerscanning_api" { + project = google_project.project.project_id + service = "containerscanning.googleapis.com" + disable_dependent_services=true + depends_on = [google_project.project] + } + + resource "google_project_service" "container_api" { + project = google_project.project.project_id + service = "container.googleapis.com" + disable_dependent_services=true + depends_on = [google_project.project] + } + + resource "google_project_service" "artifactregistry_api" { + project = google_project.project.project_id + service = "artifactregistry.googleapis.com" + disable_dependent_services=true + depends_on = [google_project.project] + } + + resource "google_project_service" "cloudbuild_api" { + project = google_project.project.project_id + service = "cloudbuild.googleapis.com" + disable_dependent_services=true + depends_on = [google_project.project] + } + + resource "google_project_service" "cloudasset_api" { + project = google_project.project.project_id + service = "cloudasset.googleapis.com" + disable_dependent_services=true + depends_on = [google_project.project] + } + + resource "google_project_service" "compute_api" { + project = google_project.project.project_id + service = "compute.googleapis.com" + disable_dependent_services=true + depends_on = [google_project.project] + } + + resource "google_project_service" "devconnect_api" { + project = google_project.project.project_id + service = "developerconnect.googleapis.com" + depends_on = [google_project.project] + } + + # Wait delay after enabling APIs and granting permissions + resource "time_sleep" "wait_for_propagation" { + depends_on = [ + google_project_iam_member.apphub_permissions, + google_project_iam_member.insights_agent, + google_project_service.apphub_api_service, + google_project_service.containeranalysis_api, + google_project_service.containerscanning_api, + google_project_service.container_api, + google_project_service.artifactregistry_api, + google_project_service.artifactregistry_api, + google_project_service.cloudbuild_api, + google_project_service.cloudasset_api, + google_project_service.compute_api, + google_project_service.devconnect_api, + ] + create_duration = "120s" + } + + resource "google_apphub_application" "my_apphub_application" { + location = "us-central1" + application_id = "tf-test-example-application%{random_suffix}" + scope { + type = "REGIONAL" + } + project = google_project.project.project_id + depends_on = [time_sleep.wait_for_propagation] + } + + resource "google_developer_connect_insights_config" "insights_config" { + location = "us-central1" + insights_config_id = "tf-test-ic%{random_suffix}" + project = google_project.project.project_id + annotations = {} + labels = {} + app_hub_application = format("//apphub.googleapis.com/projects/%s/locations/%s/applications/%s", + google_project.project.number, + google_apphub_application.my_apphub_application.location, + google_apphub_application.my_apphub_application.application_id) + + depends_on = [time_sleep.wait_for_propagation] + } + `, context) +} + +func testAccDeveloperConnectInsightsConfig_update(context map[string]interface{}) string { + return acctest.Nprintf(` + resource "google_project" "project" { + project_id = "dci-tf-%{random_suffix}" + name = "Service Project" + org_id = "%{org_id}" + billing_account = "%{billing_account}" + deletion_policy = "DELETE" + } + + # Grant Permissions + resource "google_project_iam_member" "apphub_permissions" { + project = google_project.project.project_id + role = "roles/apphub.admin" + member = "serviceAccount:hashicorp-test-runner@ci-test-project-188019.iam.gserviceaccount.com" + } + + resource "google_project_iam_member" "insights_agent" { + project = google_project.project.project_id + role = "roles/developerconnect.insightsAgent" + member = "serviceAccount:66214305248-compute@developer.gserviceaccount.com" + } + + # Enable APIs + resource "google_project_service" "apphub_api_service" { + project = google_project.project.project_id + service = "apphub.googleapis.com" + disable_dependent_services=true + depends_on = [google_project.project] + } + + resource "google_project_service" "containeranalysis_api" { + project = google_project.project.project_id + service = "containeranalysis.googleapis.com" + disable_dependent_services=true + depends_on = [google_project.project] + } + + resource "google_project_service" "containerscanning_api" { + project = google_project.project.project_id + service = "containerscanning.googleapis.com" + disable_dependent_services=true + depends_on = [google_project.project] + } + + resource "google_project_service" "container_api" { + project = google_project.project.project_id + service = "container.googleapis.com" + disable_dependent_services=true + depends_on = [google_project.project] + } + + resource "google_project_service" "artifactregistry_api" { + project = google_project.project.project_id + service = "artifactregistry.googleapis.com" + disable_dependent_services=true + depends_on = [google_project.project] + } + + resource "google_project_service" "cloudbuild_api" { + project = google_project.project.project_id + service = "cloudbuild.googleapis.com" + disable_dependent_services=true + depends_on = [google_project.project] + } + + resource "google_project_service" "cloudasset_api" { + project = google_project.project.project_id + service = "cloudasset.googleapis.com" + disable_dependent_services=true + depends_on = [google_project.project] + } + + resource "google_project_service" "compute_api" { + project = google_project.project.project_id + service = "compute.googleapis.com" + disable_dependent_services=true + depends_on = [google_project.project] + } + + resource "google_project_service" "devconnect_api" { + project = google_project.project.project_id + service = "developerconnect.googleapis.com" + depends_on = [google_project.project] + } + + # Wait delay after enabling APIs and granting permissions + resource "time_sleep" "wait_for_propagation" { + depends_on = [ + google_project_iam_member.apphub_permissions, + google_project_iam_member.insights_agent, + google_project_service.apphub_api_service, + google_project_service.containeranalysis_api, + google_project_service.containerscanning_api, + google_project_service.container_api, + google_project_service.artifactregistry_api, + google_project_service.artifactregistry_api, + google_project_service.cloudbuild_api, + google_project_service.cloudasset_api, + google_project_service.compute_api, + google_project_service.devconnect_api, + ] + create_duration = "120s" + } + + resource "google_apphub_application" "my_apphub_application" { + location = "us-central1" + application_id = "tf-test-example-application%{random_suffix}" + scope { + type = "REGIONAL" + } + project = google_project.project.project_id + depends_on = [time_sleep.wait_for_propagation] + } + resource "google_developer_connect_insights_config" "insights_config" { + location = "us-central1" + insights_config_id = "tf-test-ic%{random_suffix}" + project = google_project.project.project_id + annotations = {} + labels = {} + app_hub_application = format("//apphub.googleapis.com/projects/%s/locations/%s/applications/%s", + google_project.project.number, + google_apphub_application.my_apphub_application.location, + google_apphub_application.my_apphub_application.application_id) + artifact_configs { + google_artifact_analysis { + project_id = google_project.project.project_id + } + google_artifact_registry { + artifact_registry_package = "my-package" + project_id = google_project.project.project_id + } + uri = "us-docker.pkg.dev/my-project/my-repo/my-image" + } + depends_on = [time_sleep.wait_for_propagation] + } + `, context) +} From 2d21edf9799f64547e79421b35e4bbf124b5e0ac Mon Sep 17 00:00:00 2001 From: Paridhi Shah <166548459+paridhishah18@users.noreply.github.com> Date: Wed, 30 Jul 2025 05:17:34 -0700 Subject: [PATCH 634/884] Allow setting manual instance count to 0 when creating workerpool. (#14680) --- mmv1/products/cloudrunv2/WorkerPool.yaml | 1 + ...urce_cloud_run_v2_worker_pool_test.go.tmpl | 54 +++++++++++++++++++ 2 files changed, 55 insertions(+) diff --git a/mmv1/products/cloudrunv2/WorkerPool.yaml b/mmv1/products/cloudrunv2/WorkerPool.yaml index 37ecf07f8773..13f7c75a8974 100644 --- a/mmv1/products/cloudrunv2/WorkerPool.yaml +++ b/mmv1/products/cloudrunv2/WorkerPool.yaml @@ -299,6 +299,7 @@ properties: type: Integer description: | The total number of instances in manual scaling mode. + send_empty_value: true - name: 'template' type: NestedObject description: | diff --git a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_worker_pool_test.go.tmpl b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_worker_pool_test.go.tmpl index 0a1b772455c5..cd1640ba5ccd 100644 --- a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_worker_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_worker_pool_test.go.tmpl @@ -614,3 +614,57 @@ resource "google_cloud_run_v2_worker_pool" "default" { `, context) } + +func TestAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolWithManualInstanceCountZero(t *testing.T) { + t.Parallel() + context := map[string]interface{} { + "random_suffix" : acctest.RandString(t, 10), + } + acctest.VcrTest(t, resource.TestCase { + PreCheck: func() { acctest.AccTestPreCheck(t)}, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckCloudRunV2WorkerPoolDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolWithManualInstanceCountZero(context), + }, + { + ResourceName: "google_cloud_run_v2_worker_pool.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "location", "annotations", "labels", "terraform_labels", "launch_stage", "deletion_protection"}, + }, + }, + }) +} + +func testAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolWithManualInstanceCountZero(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_cloud_run_v2_worker_pool" "default" { + name = "tf-test-cloudrun-worker-pool%{random_suffix}" + description = "description creating" + location = "us-central1" + deletion_protection = false + launch_stage = "BETA" + annotations = { + generated-by = "magic-modules" + } + scaling { + manual_instance_count = 0 + } + + labels = { + label-1 = "value-1" + } + client = "client-1" + client_version = "client-version-1" + template { + containers { + name = "container-1" + image = "us-docker.pkg.dev/cloudrun/container/worker-pool" + } + } +} + +`, context) +} \ No newline at end of file From 7690fd70556b564d2b97805be0fffed88042a382 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Wed, 30 Jul 2025 09:01:14 -0700 Subject: [PATCH 635/884] tgc: manage Makefile of tgc (#14675) --- mmv1/third_party/tgc_next/Makefile | 38 ++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 mmv1/third_party/tgc_next/Makefile diff --git a/mmv1/third_party/tgc_next/Makefile b/mmv1/third_party/tgc_next/Makefile new file mode 100644 index 000000000000..d202b15330d7 --- /dev/null +++ b/mmv1/third_party/tgc_next/Makefile @@ -0,0 +1,38 @@ +build_dir=bin +TF_CONFIG_FILE=tf-dev-override.tfrc +TEST?=$$(go list -e ./... | grep -v github.com/GoogleCloudPlatform/terraform-google-conversion/v6/test) + +build: + GO111MODULE=on go build -o ./${build_dir}/tfplan2cai ./cmd/tfplan2cai + GO111MODULE=on go build -o ./${build_dir}/tgc ./cmd/tgc + +test: + go version + terraform --version + ./config-tf-dev-override.sh + TF_CLI_CONFIG_FILE="$${PWD}/${TF_CONFIG_FILE}" GO111MODULE=on go test $(TEST) $(TESTARGS) -timeout 30m -short + +test-integration: + go version + terraform --version + ./config-tf-dev-override.sh + TF_CLI_CONFIG_FILE="$${PWD}/${TF_CONFIG_FILE}" GO111MODULE=on go test -run=TestAcc $(TESTPATH) $(TESTARGS) -timeout 30m -v ./... + +test-go-licenses: + cd .. && go version && go install github.com/google/go-licenses@latest + $$(go env GOPATH)/bin/go-licenses check ./... --ignore github.com/dnaeon/go-vcr + +run-docker: + docker run -it \ + -v `pwd`:/terraform-google-conversion \ + -v ${GOOGLE_APPLICATION_CREDENTIALS}:/terraform-google-conversion/credentials.json \ + -w /terraform-google-conversion \ + --entrypoint=/bin/bash \ + --env TEST_PROJECT=${PROJECT_ID} \ + --env GOOGLE_APPLICATION_CREDENTIALS=/terraform-google-conversion/credentials.json \ + gcr.io/graphite-docker-images/go-plus; + +release: + ./release.sh ${VERSION} + +.PHONY: build test test-integration test-go-licenses run-docker release From ba28984274ee65fd07d2b1f4e75a92956d26ac8c Mon Sep 17 00:00:00 2001 From: NA2047 <12290725+NA2047@users.noreply.github.com> Date: Wed, 30 Jul 2025 11:02:32 -0700 Subject: [PATCH 636/884] Add allow_fewer_zones_deployment to Redis Cluster (#14676) Co-authored-by: Scott Suarez --- mmv1/products/redis/Cluster.yaml | 8 ++++++++ .../terraform/examples/redis_cluster_aof.tf.tmpl | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/mmv1/products/redis/Cluster.yaml b/mmv1/products/redis/Cluster.yaml index 5b26b4639464..39ea126efb16 100644 --- a/mmv1/products/redis/Cluster.yaml +++ b/mmv1/products/redis/Cluster.yaml @@ -378,6 +378,14 @@ properties: type: String description: | Immutable. The zone for single zone Memorystore Redis cluster. + - name: 'allowFewerZonesDeployment' + type: Boolean + immutable: true + description: | + Allows customers to specify if they are okay with deploying a multi-zone + cluster in less than 3 zones. Once set, if there is a zonal outage during + the cluster creation, the cluster will only be deployed in 2 zones, and + stay within the 2 zones for its lifecycle. - name: 'pscConfigs' type: Array description: | diff --git a/mmv1/templates/terraform/examples/redis_cluster_aof.tf.tmpl b/mmv1/templates/terraform/examples/redis_cluster_aof.tf.tmpl index 8ac4ee710395..789bf86028d6 100644 --- a/mmv1/templates/terraform/examples/redis_cluster_aof.tf.tmpl +++ b/mmv1/templates/terraform/examples/redis_cluster_aof.tf.tmpl @@ -13,7 +13,7 @@ resource "google_redis_cluster" "{{$.PrimaryResourceId}}" { maxmemory-policy = "volatile-ttl" } deletion_protection_enabled = {{index $.Vars "deletion_protection_enabled"}} - + allow_fewer_zones_deployment = true zone_distribution_config { mode = "MULTI_ZONE" } From 72a030b7dd4d714568235137f4024d5c57c53cf0 Mon Sep 17 00:00:00 2001 From: Ryan Oaks Date: Wed, 30 Jul 2025 14:03:20 -0400 Subject: [PATCH 637/884] Remove requirement of github in EAP VCR merge (#14662) --- .ci/magician/cmd/vcr_merge_eap.go | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/.ci/magician/cmd/vcr_merge_eap.go b/.ci/magician/cmd/vcr_merge_eap.go index 5797f0aaf52d..2c6d68436454 100644 --- a/.ci/magician/cmd/vcr_merge_eap.go +++ b/.ci/magician/cmd/vcr_merge_eap.go @@ -3,7 +3,6 @@ package cmd import ( "fmt" "magician/exec" - "magician/github" "magician/source" "os" @@ -26,11 +25,6 @@ var vcrMergeEapCmd = &cobra.Command{ clNumber := args[0] fmt.Println("CL number:", clNumber) - githubToken, ok := os.LookupEnv("GITHUB_TOKEN_CLASSIC") - if !ok { - return fmt.Errorf("did not provide GITHUB_TOKEN_CLASSIC environment variable") - } - baseBranch := os.Getenv("BASE_BRANCH") if baseBranch == "" { return fmt.Errorf("environment variable BASE_BRANCH is empty") @@ -41,12 +35,11 @@ var vcrMergeEapCmd = &cobra.Command{ return fmt.Errorf("error creating Runner: %w", err) } - gh := github.NewClient(githubToken) - return execVCRMergeEAP(gh, clNumber, baseBranch, rnr) + return execVCRMergeEAP(clNumber, baseBranch, rnr) }, } -func execVCRMergeEAP(gh GithubClient, clNumber, baseBranch string, runner source.Runner) error { +func execVCRMergeEAP(clNumber, baseBranch string, runner source.Runner) error { head := "auto-cl-" + clNumber mergeCassettes("gs://ci-vcr-cassettes/private", baseBranch, fmt.Sprintf("refs/heads/%s", head), runner) return nil From 2ce8b9eefff48bdb2ff5ca8d18cc205085d62ba5 Mon Sep 17 00:00:00 2001 From: Arnav Dham Date: Wed, 30 Jul 2025 23:34:50 +0530 Subject: [PATCH 638/884] Adding support for missing AH fields (#14637) --- .../bigqueryanalyticshub/DataExchange.yaml | 14 ++++ .../bigqueryanalyticshub/Listing.yaml | 24 +++++++ ..._analyticshub_public_data_exchange.tf.tmpl | 7 ++ ...gquery_analyticshub_public_listing.tf.tmpl | 28 ++++++++ ...igquery_analytics_hub_dataexchange_test.go | 54 +++++++++++++++ ...rce_bigquery_analytics_hub_listing_test.go | 68 ++++++++++++++++++- 6 files changed, 194 insertions(+), 1 deletion(-) create mode 100644 mmv1/templates/terraform/examples/bigquery_analyticshub_public_data_exchange.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/bigquery_analyticshub_public_listing.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/bigqueryanalyticshub/resource_bigquery_analytics_hub_dataexchange_test.go diff --git a/mmv1/products/bigqueryanalyticshub/DataExchange.yaml b/mmv1/products/bigqueryanalyticshub/DataExchange.yaml index 9617fc835bb1..001a662ed643 100644 --- a/mmv1/products/bigqueryanalyticshub/DataExchange.yaml +++ b/mmv1/products/bigqueryanalyticshub/DataExchange.yaml @@ -66,6 +66,13 @@ examples: vars: data_exchange_id: 'tf_test_log_email_data_exchange' description: 'Example for log email test for data exchange' + - name: 'bigquery_analyticshub_public_data_exchange' + primary_resource_id: 'data_exchange' + primary_resource_name: 'fmt.Sprintf("tf_test_log_email_data_exchange%s", context["random_suffix"])' + region_override: 'US' + vars: + data_exchange_id: 'public_data_exchange' + desc: 'Example for public data exchange' parameters: properties: - name: 'name' @@ -146,6 +153,13 @@ properties: - 'sharing_environment_config.0.dcr_exchange_config' properties: [] + - name: 'discoveryType' + type: Enum + enum_values: + - 'DISCOVERY_TYPE_PRIVATE' + - 'DISCOVERY_TYPE_PUBLIC' + default_from_api: true + description: Type of discovery on the discovery page for all the listings under this exchange. Cannot be set for a Data Clean Room. Updating this field also updates (overwrites) the discoveryType field for all the listings under this exchange. - name: 'logLinkedDatasetQueryUserEmail' type: Boolean description: diff --git a/mmv1/products/bigqueryanalyticshub/Listing.yaml b/mmv1/products/bigqueryanalyticshub/Listing.yaml index 77edec0e437a..8eb1f631e2d3 100644 --- a/mmv1/products/bigqueryanalyticshub/Listing.yaml +++ b/mmv1/products/bigqueryanalyticshub/Listing.yaml @@ -98,6 +98,14 @@ examples: dataset_id: 'tf_test_dataset' routine_id: 'tf_test_routine' desc: 'Example for listing with routine' + - name: 'bigquery_analyticshub_public_listing' + primary_resource_id: 'listing' + primary_resource_name: 'fmt.Sprintf("tf_test_my_data_exchange%s", context["random_suffix"]), fmt.Sprintf("tf_test_my_listing%s", context["random_suffix"])' + region_override: 'US' + vars: + data_exchange_id: 'my_data_exchange' + listing_id: 'my_listing' + desc: 'example public listing' - name: 'bigquery_analyticshub_listing_marketplace' primary_resource_id: 'listing' primary_resource_name: 'fmt.Sprintf("tf_test_my_data_exchange%s", context["random_suffix"]), fmt.Sprintf("tf_test_my_listing%s", context["random_suffix"])' @@ -281,6 +289,22 @@ properties: type: Boolean description: If true, subscriber email logging is enabled and all queries on the linked dataset will log the email address of the querying user. Once enabled, this setting cannot be turned off. + - name: 'state' + type: String + description: |- + Current state of the listing. + output: true + - name: 'discoveryType' + type: Enum + enum_values: + - 'DISCOVERY_TYPE_PRIVATE' + - 'DISCOVERY_TYPE_PUBLIC' + default_from_api: true + description: Specifies the type of discovery on the discovery page. Cannot be set for a restricted listing. Note that this does not control the visibility of the exchange/listing which is defined by IAM permission. + - name: 'allowOnlyMetadataSharing' + type: Boolean + immutable: true + description: If true, the listing is only available to get the resource metadata. Listing is non subscribable. - name: 'commercialInfo' type: NestedObject description: | diff --git a/mmv1/templates/terraform/examples/bigquery_analyticshub_public_data_exchange.tf.tmpl b/mmv1/templates/terraform/examples/bigquery_analyticshub_public_data_exchange.tf.tmpl new file mode 100644 index 000000000000..ace1b471ce57 --- /dev/null +++ b/mmv1/templates/terraform/examples/bigquery_analyticshub_public_data_exchange.tf.tmpl @@ -0,0 +1,7 @@ +resource "google_bigquery_analytics_hub_data_exchange" "{{$.PrimaryResourceId}}" { + location = "US" + data_exchange_id = "{{index $.Vars "data_exchange_id"}}" + display_name = "{{index $.Vars "data_exchange_id"}}" + description = "{{index $.Vars "desc"}}" + discovery_type = "DISCOVERY_TYPE_PUBLIC" +} diff --git a/mmv1/templates/terraform/examples/bigquery_analyticshub_public_listing.tf.tmpl b/mmv1/templates/terraform/examples/bigquery_analyticshub_public_listing.tf.tmpl new file mode 100644 index 000000000000..2cdd7bdf97d7 --- /dev/null +++ b/mmv1/templates/terraform/examples/bigquery_analyticshub_public_listing.tf.tmpl @@ -0,0 +1,28 @@ +resource "google_bigquery_analytics_hub_data_exchange" "{{$.PrimaryResourceId}}" { + location = "US" + data_exchange_id = "{{index $.Vars "data_exchange_id"}}" + display_name = "{{index $.Vars "data_exchange_id"}}" + description = "{{index $.Vars "desc"}}" + discovery_type = "DISCOVERY_TYPE_PUBLIC" +} + +resource "google_bigquery_analytics_hub_listing" "{{$.PrimaryResourceId}}" { + location = "US" + data_exchange_id = google_bigquery_analytics_hub_data_exchange.{{$.PrimaryResourceId}}.data_exchange_id + listing_id = "{{index $.Vars "listing_id"}}" + display_name = "{{index $.Vars "listing_id"}}" + description = "{{index $.Vars "desc"}}" + discovery_type = "DISCOVERY_TYPE_PUBLIC" + allow_only_metadata_sharing= false + + bigquery_dataset { + dataset = google_bigquery_dataset.{{$.PrimaryResourceId}}.id + } +} + +resource "google_bigquery_dataset" "{{$.PrimaryResourceId}}" { + dataset_id = "{{index $.Vars "listing_id"}}" + friendly_name = "{{index $.Vars "listing_id"}}" + description = "{{index $.Vars "desc"}}" + location = "US" +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/bigqueryanalyticshub/resource_bigquery_analytics_hub_dataexchange_test.go b/mmv1/third_party/terraform/services/bigqueryanalyticshub/resource_bigquery_analytics_hub_dataexchange_test.go new file mode 100644 index 000000000000..cbf5a65e9580 --- /dev/null +++ b/mmv1/third_party/terraform/services/bigqueryanalyticshub/resource_bigquery_analytics_hub_dataexchange_test.go @@ -0,0 +1,54 @@ +package bigqueryanalyticshub_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccBigqueryAnalyticsHubDataExchange_bigqueryAnalyticshubPublicDataExchangeUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckBigqueryAnalyticsHubDataExchangeDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccBigqueryAnalyticsHubDataExchange_bigqueryAnalyticshubPublicDataExchangeExample(context), + }, + { + ResourceName: "google_bigquery_analytics_hub_data_exchange.data_exchange", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"data_exchange_id", "location"}, + }, + { + Config: testAccBigqueryAnalyticsHubDataExchange_bigqueryAnalyticshubPublicDataExchangeUpdate(context), + }, + { + ResourceName: "google_bigquery_analytics_hub_data_exchange.data_exchange", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"data_exchange_id", "location"}, + }, + }, + }) +} + +func testAccBigqueryAnalyticsHubDataExchange_bigqueryAnalyticshubPublicDataExchangeUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_bigquery_analytics_hub_data_exchange" "data_exchange" { + location = "US" + data_exchange_id = "tf_test_public_data_exchange%{random_suffix}" + display_name = "tf_test_public_data_exchange%{random_suffix}" + description = "Example for public data exchange%{random_suffix}" + discovery_type = "DISCOVERY_TYPE_PRIVATE" +} +`, context) +} diff --git a/mmv1/third_party/terraform/services/bigqueryanalyticshub/resource_bigquery_analytics_hub_listing_test.go b/mmv1/third_party/terraform/services/bigqueryanalyticshub/resource_bigquery_analytics_hub_listing_test.go index a7345dd04720..29812b24369b 100644 --- a/mmv1/third_party/terraform/services/bigqueryanalyticshub/resource_bigquery_analytics_hub_listing_test.go +++ b/mmv1/third_party/terraform/services/bigqueryanalyticshub/resource_bigquery_analytics_hub_listing_test.go @@ -188,13 +188,79 @@ resource "google_bigquery_analytics_hub_listing" "listing" { bigquery_dataset { dataset = google_bigquery_dataset.listing.id } +} + +resource "google_bigquery_dataset" "listing" { + dataset_id = "tf_test_my_listing%{random_suffix}" + friendly_name = "tf_test_my_listing%{random_suffix}" + description = "example data exchange%{random_suffix}" + location = "US" +} +`, context) +} + +func TestAccBigqueryAnalyticsHubListing_bigqueryAnalyticshubPublicListingUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckBigqueryAnalyticsHubListingDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccBigqueryAnalyticsHubListing_bigqueryAnalyticshubPublicListingExample(context), + }, + { + ResourceName: "google_bigquery_analytics_hub_listing.listing", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"data_exchange_id", "listing_id", "location"}, + }, + { + Config: testAccBigqueryAnalyticsHubListing_bigqueryAnalyticshubPublicListingUpdate(context), + }, + { + ResourceName: "google_bigquery_analytics_hub_listing.listing", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"data_exchange_id", "listing_id", "location"}, + }, + }, + }) +} + +func testAccBigqueryAnalyticsHubListing_bigqueryAnalyticshubPublicListingUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_bigquery_analytics_hub_data_exchange" "listing" { + location = "US" + data_exchange_id = "tf_test_my_data_exchange%{random_suffix}" + display_name = "tf_test_my_data_exchange%{random_suffix}" + description = "example public listing%{random_suffix}" + discovery_type = "DISCOVERY_TYPE_PRIVATE" +} +resource "google_bigquery_analytics_hub_listing" "listing" { + location = "US" + data_exchange_id = google_bigquery_analytics_hub_data_exchange.listing.data_exchange_id + listing_id = "tf_test_my_listing%{random_suffix}" + display_name = "tf_test_my_listing%{random_suffix}" + description = "example public listing%{random_suffix}" + discovery_type = "DISCOVERY_TYPE_PRIVATE" + allow_only_metadata_sharing= false + + bigquery_dataset { + dataset = google_bigquery_dataset.listing.id + } } resource "google_bigquery_dataset" "listing" { dataset_id = "tf_test_my_listing%{random_suffix}" friendly_name = "tf_test_my_listing%{random_suffix}" - description = "example data exchange%{random_suffix}" + description = "example public listing%{random_suffix}" location = "US" } `, context) From 1d1e523dff7e1d0dcf5aa8f9e9cb9c05a0524069 Mon Sep 17 00:00:00 2001 From: Stephane Charite Date: Wed, 30 Jul 2025 17:37:19 -0700 Subject: [PATCH 639/884] Fix Lustre resource names to avoid leaks (#14691) --- .../services/lustre/data_source_lustre_instance_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/services/lustre/data_source_lustre_instance_test.go b/mmv1/third_party/terraform/services/lustre/data_source_lustre_instance_test.go index af0256f45088..da7ff4e55dbf 100644 --- a/mmv1/third_party/terraform/services/lustre/data_source_lustre_instance_test.go +++ b/mmv1/third_party/terraform/services/lustre/data_source_lustre_instance_test.go @@ -37,7 +37,7 @@ func TestAccLustreInstanceDatasource_basic(t *testing.T) { func testAccLustreInstanceDatasource_basic(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_lustre_instance" "instance" { - instance_id = "my-instance-%{random_suffix}" + instance_id = "tf-test-%{random_suffix}" location = "us-central1-a" filesystem = "testfs" capacity_gib = 18000 From 9b19bbaa949ab5b9e4786bf33d2a15082591685b Mon Sep 17 00:00:00 2001 From: Andrew Browne <81702808+abbrowne126@users.noreply.github.com> Date: Thu, 31 Jul 2025 10:01:05 -0400 Subject: [PATCH 640/884] feat: define smt examples for pubsub topics and subscriptions (#14627) --- mmv1/products/pubsub/Subscription.yaml | 10 ++++ mmv1/products/pubsub/Topic.yaml | 8 +++ .../pubsub_subscription_multiple_smts.tf.tmpl | 49 +++++++++++++++++++ .../pubsub_subscription_single_smt.tf.tmpl | 21 ++++++++ .../pubsub_topic_multiple_smts.tf.tmpl | 44 +++++++++++++++++ .../examples/pubsub_topic_single_smt.tf.tmpl | 15 ++++++ 6 files changed, 147 insertions(+) create mode 100644 mmv1/templates/terraform/examples/pubsub_subscription_multiple_smts.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/pubsub_subscription_single_smt.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/pubsub_topic_multiple_smts.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/pubsub_topic_single_smt.tf.tmpl diff --git a/mmv1/products/pubsub/Subscription.yaml b/mmv1/products/pubsub/Subscription.yaml index eff35eff17bb..9f9099c98f98 100644 --- a/mmv1/products/pubsub/Subscription.yaml +++ b/mmv1/products/pubsub/Subscription.yaml @@ -116,6 +116,16 @@ examples: subscription_name: 'example-subscription' bucket_name: 'example-bucket' service_account_id: 'example-stw' + - name: 'pubsub_subscription_single_smt' + primary_resource_id: 'example' + vars: + topic_name: 'example-topic' + subscription_name: 'example-subscription' + - name: 'pubsub_subscription_multiple_smts' + primary_resource_id: 'example' + vars: + topic_name: 'example-topic' + subscription_name: 'example-subscription' parameters: properties: - name: 'name' diff --git a/mmv1/products/pubsub/Topic.yaml b/mmv1/products/pubsub/Topic.yaml index ea2d2f7ce35b..ae9b00f8c2bf 100644 --- a/mmv1/products/pubsub/Topic.yaml +++ b/mmv1/products/pubsub/Topic.yaml @@ -100,6 +100,14 @@ examples: primary_resource_id: 'example' vars: topic_name: 'example-topic' + - name: 'pubsub_topic_single_smt' + primary_resource_id: 'example' + vars: + topic_name: 'example-topic' + - name: 'pubsub_topic_multiple_smts' + primary_resource_id: 'example' + vars: + topic_name: 'example-topic' parameters: properties: - name: 'name' diff --git a/mmv1/templates/terraform/examples/pubsub_subscription_multiple_smts.tf.tmpl b/mmv1/templates/terraform/examples/pubsub_subscription_multiple_smts.tf.tmpl new file mode 100644 index 000000000000..b89901657f12 --- /dev/null +++ b/mmv1/templates/terraform/examples/pubsub_subscription_multiple_smts.tf.tmpl @@ -0,0 +1,49 @@ +resource "google_pubsub_topic" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "topic_name"}}" +} + +locals { + smts = [ + { + function_name = "redactSSN" + code = < Date: Thu, 31 Jul 2025 07:55:02 -0700 Subject: [PATCH 641/884] =?UTF-8?q?fix(hashicorp/terraform-provider-google?= =?UTF-8?q?#23718):=20removing=20unnecessary=20=E2=80=A6=20(#14659)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- mmv1/products/bigquery/Table.yaml | 8 + .../bigquery/resource_bigquery_table.go.tmpl | 90 ++++++++-- .../bigquery/resource_bigquery_table_test.go | 164 +++++++++--------- 3 files changed, 163 insertions(+), 99 deletions(-) diff --git a/mmv1/products/bigquery/Table.yaml b/mmv1/products/bigquery/Table.yaml index b7a0d5ba0271..8327fc0e524c 100644 --- a/mmv1/products/bigquery/Table.yaml +++ b/mmv1/products/bigquery/Table.yaml @@ -638,3 +638,11 @@ virtual_fields: View sets the optional parameter "view": Specifies the view that determines which table information is returned. By default, basic table information and storage statistics (STORAGE_STATS) are returned. Possible values: TABLE_METADATA_VIEW_UNSPECIFIED, BASIC, STORAGE_STATS, FULL + - name: 'ignore_auto_generated_schema' + type: Boolean + description: | + If set to true, Terraform will prevent implicitly added columns in schema from showing diff. + - name: 'generated_schema_columns' + type: String + description: | + (Output-only) A list of autogenerated schema fields. diff --git a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl index 88721b0b5e40..dce185ebebf6 100644 --- a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl +++ b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl @@ -1352,6 +1352,12 @@ func ResourceBigQueryTable() *schema.Resource { Description: `Whether Terraform will prevent implicitly added columns in schema from showing diff.`, }, + "generated_schema_columns": { + Type: schema.TypeString, + Computed: true, + Description: `(Output-only) A list of autogenerated schema fields.`, + }, + // TableConstraints: [Optional] Defines the primary key and foreign keys. "table_constraints": { Type: schema.TypeList, @@ -1602,21 +1608,21 @@ func ResourceBigQueryTable() *schema.Resource { } // filterLiveSchemaByConfig compares a live schema from the BQ API with a schema from -// the Terraform config. It returns a new schema containing only the fields -// that are defined in the config, effectively removing any columns that were -// auto-generated by the service (e.g., hive partitioning keys). -// +// the Terraform config. It returns two values: +// 1. A new *bigquery.TableSchema containing a filtered list of fields that are defined in the config, +// effectively removing any columns that were auto-generated by the service (e.g., hive partitioning keys). +// 2. A slice of *bigquery.TableFieldSchema for the fields that were auto-generated by the service. // Parameters: // - liveSchema: The schema returned from a BigQuery API Read/Get call. This may contain extra columns. // - configSchema: The schema built from the user's Terraform configuration (`d.Get("schema")`). This is the source of truth. -// -// Returns: -// -// A new *bigquery.TableSchema containing a filtered list of fields. -func filterLiveSchemaByConfig(liveSchema *bigquery.TableSchema, configSchema *bigquery.TableSchema) *bigquery.TableSchema { - if liveSchema == nil || configSchema == nil { - // If either schema is nil, there's nothing to compare, so return an empty schema. - return &bigquery.TableSchema{Fields: []*bigquery.TableFieldSchema{}} +func filterLiveSchemaByConfig(liveSchema *bigquery.TableSchema, configSchema *bigquery.TableSchema) (*bigquery.TableSchema, []*bigquery.TableFieldSchema) { + if liveSchema == nil { + // If live schema is nil, there's nothing to filter or collect. + return &bigquery.TableSchema{Fields: []*bigquery.TableFieldSchema{}}, nil + } + if configSchema == nil || len(configSchema.Fields) == 0 { + // If config schema is nil or empty, all live fields are considered auto-generated. + return &bigquery.TableSchema{Fields: []*bigquery.TableFieldSchema{}}, liveSchema.Fields } // 1. Create a lookup map of all column names defined in the configuration. @@ -1629,19 +1635,21 @@ func filterLiveSchemaByConfig(liveSchema *bigquery.TableSchema, configSchema *bi // 2. Iterate through the fields in the live schema and keep only the ones // that exist in our configuration map. var filteredFields []*bigquery.TableFieldSchema + var autogeneratedFields []*bigquery.TableFieldSchema for _, liveField := range liveSchema.Fields { // If the live field's name is present in the map of configured fields... if _, ok := configFieldsMap[liveField.Name]; ok { // ...then it's a field we care about. Add it to our filtered list. filteredFields = append(filteredFields, liveField) } else { - log.Printf("[DEBUG] auto-generated column `%s` dropped during Table read.", liveField.Name) + log.Printf("[DEBUG] auto-generated column `%s` collected during Table read.", liveField.Name) + autogeneratedFields = append(autogeneratedFields, liveField) } } return &bigquery.TableSchema{ Fields: filteredFields, - } + }, autogeneratedFields } func resourceTable(d *schema.ResourceData, meta interface{}) (*bigquery.Table, error) { @@ -2033,15 +2041,36 @@ func resourceBigQueryTableRead(d *schema.ResourceData, meta interface{}) error { } if res.Schema != nil { - table, err := resourceTable(d, meta) - if err != nil { - return err + var configSchema *bigquery.TableSchema + if v, ok := d.GetOk("schema"); ok { + _, viewPresent := d.GetOk("view") + _, materializedViewPresent := d.GetOk("materialized_view") + managePolicyTags := !viewPresent && !materializedViewPresent + configSchema, err = expandSchema(v, managePolicyTags) + if err != nil { + return err + } } schemaFiltered := res.Schema ignore, ok := d.Get("ignore_auto_generated_schema").(bool) if ok && ignore { - schemaFiltered = filterLiveSchemaByConfig(res.Schema, table.Schema) + var autogeneratedFields []*bigquery.TableFieldSchema + schemaFiltered, autogeneratedFields = filterLiveSchemaByConfig(res.Schema, configSchema) + if len(autogeneratedFields) > 0 { + autogeneratedFieldsJson, err := json.Marshal(autogeneratedFields) + if err != nil { + return fmt.Errorf("error marshalling autogenerated schema fields: %w", err) + } + if err := d.Set("generated_schema_columns", string(autogeneratedFieldsJson)); err != nil { + return fmt.Errorf("error setting generated_schema_columns: %w", err) + } + } else { + d.Set("generated_schema_columns", "") + } + } else { + // If not ignoring, ensure the field is cleared + d.Set("generated_schema_columns", "") } schema, err := flattenSchema(schemaFiltered) if err != nil { @@ -2128,9 +2157,28 @@ type TableReference struct { tableID string } +func addAutoGenSchemaFields(d *schema.ResourceData, table *bigquery.Table) error { + // When ignore_auto_generated_schema is true, we must include the autogenerated fields + // in the update payload to avoid the API thinking we're trying to delete them. + if ignore, enabled := d.Get("ignore_auto_generated_schema").(bool); enabled && ignore { + // Only proceed if the table has a schema to begin with. + if table.Schema != nil { + if autogenStr, ok := d.Get("generated_schema_columns").(string); ok && autogenStr != "" { + var autogenFields []*bigquery.TableFieldSchema + if err := json.Unmarshal([]byte(autogenStr), &autogenFields); err != nil { + return fmt.Errorf("failed to unmarshal autogenerated schema fields: %w", err) + } + table.Schema.Fields = append(table.Schema.Fields, autogenFields...) + log.Printf("[DEBUG] Appended %d autogenerated fields to schema for update", len(autogenFields)) + } + } + } + return nil +} + func resourceBigQueryTableUpdate(d *schema.ResourceData, meta interface{}) error { // If only client-side fields were modified, short-circuit the Update function to avoid sending an update API request. - clientSideFields := map[string]bool{"deletion_protection": true, "ignore_schema_changes": true, "ignore_auto_generated_schema": true, "table_metadata_view": true} + clientSideFields := map[string]bool{"deletion_protection": true, "ignore_schema_changes": true, "ignore_auto_generated_schema": true, "table_metadata_view": true} clientSideOnly := true for field := range ResourceBigQueryTable().Schema { if d.HasChange(field) && !clientSideFields[field] { @@ -2153,6 +2201,10 @@ func resourceBigQueryTableUpdate(d *schema.ResourceData, meta interface{}) error return err } + if err := addAutoGenSchemaFields(d, table); err != nil { + return err + } + if table.ExternalDataConfiguration != nil && table.ExternalDataConfiguration.Schema != nil { log.Printf("[INFO] Removing ExternalDataConfiguration.Schema when updating BigQuery table %s", d.Id()) table.ExternalDataConfiguration.Schema = nil diff --git a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go index 80150016530a..08f8863cfff8 100644 --- a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go +++ b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go @@ -30,7 +30,7 @@ func TestAccBigQueryTable_Basic(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, { Config: testAccBigQueryTableUpdated(datasetID, tableID), @@ -39,7 +39,7 @@ func TestAccBigQueryTable_Basic(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -70,7 +70,7 @@ func TestAccBigQueryTable_IgnoreSchemaDataPoliciesChanges(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "ignore_schema_changes"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns", "ignore_schema_changes"}, }, { Config: testAccBigQueryTableDataPolicies(datasetID, tableID, dataPolicyID1, dataPolicyID2, dataCatTaxonomy, dataPolicyName2), @@ -84,7 +84,7 @@ func TestAccBigQueryTable_IgnoreSchemaDataPoliciesChanges(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "ignore_schema_changes"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns", "ignore_schema_changes"}, }, }, }) @@ -108,7 +108,7 @@ func TestAccBigQueryTable_TableMetadataView(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "last_modified_time", "table_metadata_view"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns", "last_modified_time", "table_metadata_view"}, }, { Config: testAccBigQueryTableUpdated(datasetID, tableID), @@ -117,7 +117,7 @@ func TestAccBigQueryTable_TableMetadataView(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "last_modified_time", "table_metadata_view"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns", "last_modified_time", "table_metadata_view"}, }, }, }) @@ -141,7 +141,7 @@ func TestAccBigQueryTable_OnlyDeletionProtectionUpdate(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, { Config: testAccBigQueryTableBasicSchema(datasetID, tableID), @@ -150,7 +150,7 @@ func TestAccBigQueryTable_OnlyDeletionProtectionUpdate(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -174,7 +174,7 @@ func TestAccBigQueryTable_OnlyNestedFieldUpdate(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, { Config: testAccBigQueryTableTimePartitioningWithExpirationMs(datasetID, tableID, 2000), @@ -183,7 +183,7 @@ func TestAccBigQueryTable_OnlyNestedFieldUpdate(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -207,7 +207,7 @@ func TestAccBigQueryTable_DropColumns(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, { Config: testAccBigQueryTableTimePartitioningDropColumnsUpdate(datasetID, tableID), @@ -216,7 +216,7 @@ func TestAccBigQueryTable_DropColumns(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -242,7 +242,7 @@ func TestAccBigQueryTable_Kms(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -266,7 +266,7 @@ func TestAccBigQueryTable_HourlyTimePartitioning(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, { Config: testAccBigQueryTableUpdated(datasetID, tableID), @@ -275,7 +275,7 @@ func TestAccBigQueryTable_HourlyTimePartitioning(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -299,7 +299,7 @@ func TestAccBigQueryTable_MonthlyTimePartitioning(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, { Config: testAccBigQueryTableUpdated(datasetID, tableID), @@ -308,7 +308,7 @@ func TestAccBigQueryTable_MonthlyTimePartitioning(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -332,7 +332,7 @@ func TestAccBigQueryTable_YearlyTimePartitioning(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, { Config: testAccBigQueryTableUpdated(datasetID, tableID), @@ -341,7 +341,7 @@ func TestAccBigQueryTable_YearlyTimePartitioning(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -366,7 +366,7 @@ func TestAccBigQueryTable_HivePartitioning(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -391,7 +391,7 @@ func TestAccBigQueryTable_HivePartitioningCustomSchema_update(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"external_data_configuration.0.schema", "labels", "deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"external_data_configuration.0.schema", "labels", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, { Config: testAccBigQueryTableHivePartitioningCustomSchema(bucketName, datasetID, tableID, "new-label"), @@ -400,7 +400,7 @@ func TestAccBigQueryTable_HivePartitioningCustomSchema_update(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"external_data_configuration.0.schema", "labels", "deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"external_data_configuration.0.schema", "labels", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -426,7 +426,7 @@ func TestAccBigQueryTable_AvroPartitioning(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -452,7 +452,7 @@ func TestAccBigQueryBigLakeManagedTable(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -520,7 +520,7 @@ func TestAccBigQueryExternalDataTable_json(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"external_data_configuration.0.schema", "deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"external_data_configuration.0.schema", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, { Config: testAccBigQueryTableJson(datasetID, tableID, bucketName, "UTF-16BE"), @@ -547,7 +547,7 @@ func TestAccBigQueryTable_RangePartitioning(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -571,7 +571,7 @@ func TestAccBigQueryTable_PrimaryKey(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -598,7 +598,7 @@ func TestAccBigQueryTable_ForeignKey(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -625,7 +625,7 @@ func TestAccBigQueryTable_updateTableConstraints(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, { Config: testAccBigQueryTableTableConstraintsUpdate(projectID, datasetID, tableID_pk, tableID_fk), @@ -634,7 +634,7 @@ func TestAccBigQueryTable_updateTableConstraints(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -658,7 +658,7 @@ func TestAccBigQueryTable_View(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -682,7 +682,7 @@ func TestAccBigQueryTable_updateView(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, { Config: testAccBigQueryTableWithNewSqlView(datasetID, tableID), @@ -691,7 +691,7 @@ func TestAccBigQueryTable_updateView(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -715,7 +715,7 @@ func TestAccBigQueryTable_WithViewAndSchema(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, { Config: testAccBigQueryTableWithViewAndSchema(datasetID, tableID, "table description2"), @@ -724,7 +724,7 @@ func TestAccBigQueryTable_WithViewAndSchema(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -751,13 +751,13 @@ func TestAccBigQueryTable_MaterializedView_DailyTimePartioning_Basic(t *testing. ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, { ResourceName: "google_bigquery_table.mv_test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, { Config: testAccBigQueryTableWithMatViewDailyTimePartitioning_basic(datasetID, tableID, materialized_viewID, queryNew), @@ -766,13 +766,13 @@ func TestAccBigQueryTable_MaterializedView_DailyTimePartioning_Basic(t *testing. ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, { ResourceName: "google_bigquery_table.mv_test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -802,13 +802,13 @@ func TestAccBigQueryTable_MaterializedView_DailyTimePartioning_Update(t *testing ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, { ResourceName: "google_bigquery_table.mv_test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, { Config: testAccBigQueryTableWithMatViewDailyTimePartitioning(datasetID, tableID, materialized_viewID, enable_refresh, refresh_interval_ms, query), @@ -817,13 +817,13 @@ func TestAccBigQueryTable_MaterializedView_DailyTimePartioning_Update(t *testing ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, { ResourceName: "google_bigquery_table.mv_test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -850,13 +850,13 @@ func TestAccBigQueryTable_MaterializedView_NonIncremental_basic(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "require_partition_filter", "time_partitioning.0.require_partition_filter"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns", "require_partition_filter", "time_partitioning.0.require_partition_filter"}, }, { ResourceName: "google_bigquery_table.mv_test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "require_partition_filter", "time_partitioning.0.require_partition_filter"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns", "require_partition_filter", "time_partitioning.0.require_partition_filter"}, }, }, }) @@ -1232,7 +1232,7 @@ func TestAccBigQueryExternalDataTable_CSV_WithSchemaAndConnectionIDAndHivePartit ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "schema"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns", "schema"}, }, { Config: testAccBigQueryTableFromGCSWithSchema(datasetID, tableID, bucketName, objectName, TEST_SIMPLE_CSV, TEST_SIMPLE_CSV_SCHEMA), @@ -1241,7 +1241,7 @@ func TestAccBigQueryExternalDataTable_CSV_WithSchemaAndConnectionIDAndHivePartit ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "schema"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns", "schema"}, }, }, }) @@ -1271,7 +1271,7 @@ func TestAccBigQueryExternalDataTable_CSV_WithSchemaAndConnectionID_UpdateNoConn ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, { Config: testAccBigQueryTableFromGCSWithSchema(datasetID, tableID, bucketName, objectName, TEST_SIMPLE_CSV, TEST_SIMPLE_CSV_SCHEMA), @@ -1280,7 +1280,7 @@ func TestAccBigQueryExternalDataTable_CSV_WithSchemaAndConnectionID_UpdateNoConn ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -1310,7 +1310,7 @@ func TestAccBigQueryExternalDataTable_CSV_WithSchema_UpdateToConnectionID(t *tes ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, { Config: testAccBigQueryTableFromGCSWithSchemaWithConnectionId(datasetID, tableID, connectionID, projectID, bucketName, objectName, TEST_SIMPLE_CSV, TEST_SIMPLE_CSV_SCHEMA), @@ -1319,7 +1319,7 @@ func TestAccBigQueryExternalDataTable_CSV_WithSchema_UpdateToConnectionID(t *tes ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, { Config: testAccBigQueryTableFromGCSWithSchemaWithConnectionId2(datasetID, tableID, connectionID, projectID, bucketName, objectName, TEST_SIMPLE_CSV, TEST_SIMPLE_CSV_SCHEMA), @@ -1328,7 +1328,7 @@ func TestAccBigQueryExternalDataTable_CSV_WithSchema_UpdateToConnectionID(t *tes ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -1355,7 +1355,7 @@ func TestAccBigQueryExternalDataTable_CSV_WithSchema_UpdateAllowQuotedNewlines(t ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, { Config: testAccBigQueryTableFromGCSWithSchema_UpdatAllowQuotedNewlines(datasetID, tableID, bucketName, objectName, TEST_SIMPLE_CSV, TEST_SIMPLE_CSV_SCHEMA), @@ -1364,7 +1364,7 @@ func TestAccBigQueryExternalDataTable_CSV_WithSchema_UpdateAllowQuotedNewlines(t ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -1392,7 +1392,7 @@ func TestAccBigQueryDataTable_bigtable(t *testing.T) { ResourceName: "google_bigquery_table.table", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -1420,7 +1420,7 @@ func TestAccBigQueryDataTable_bigtable_options(t *testing.T) { ResourceName: "google_bigquery_table.table", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, { Config: testAccBigQueryTableFromBigtable(context), @@ -1448,7 +1448,7 @@ func TestAccBigQueryDataTable_sheet(t *testing.T) { ResourceName: "google_bigquery_table.table", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -1472,7 +1472,7 @@ func TestAccBigQueryDataTable_jsonEquivalency(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "labels", "terraform_labels", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "labels", "terraform_labels", "ignore_auto_generated_schema", "generated_schema_columns"}, }, { Config: testAccBigQueryTable_jsonEqModeRemoved(datasetID, tableID), @@ -1481,7 +1481,7 @@ func TestAccBigQueryDataTable_jsonEquivalency(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "labels", "terraform_labels", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "labels", "terraform_labels", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -1531,7 +1531,7 @@ func TestAccBigQueryDataTable_expandArray(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "labels", "terraform_labels", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "labels", "terraform_labels", "ignore_auto_generated_schema", "generated_schema_columns"}, }, { Config: testAccBigQueryTable_arrayExpanded(datasetID, tableID), @@ -1540,7 +1540,7 @@ func TestAccBigQueryDataTable_expandArray(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "labels", "terraform_labels", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "labels", "terraform_labels", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -1564,7 +1564,7 @@ func TestAccBigQueryTable_allowDestroy(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "labels", "terraform_labels", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "labels", "terraform_labels", "ignore_auto_generated_schema", "generated_schema_columns"}, }, { Config: testAccBigQueryTable_noAllowDestroy(datasetID, tableID), @@ -1596,7 +1596,7 @@ func TestAccBigQueryTable_emptySchema(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, { Config: testAccBigQueryTable_emptySchema(datasetID, tableID), @@ -1605,7 +1605,7 @@ func TestAccBigQueryTable_emptySchema(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -1630,7 +1630,7 @@ func TestAccBigQueryTable_Update_SchemaWithoutPolicyTagsToWithPolicyTags(t *test ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, { Config: testAccBigQueryTableBasicSchemaWithPolicyTags(datasetID, tableID, projectID), @@ -1639,7 +1639,7 @@ func TestAccBigQueryTable_Update_SchemaWithoutPolicyTagsToWithPolicyTags(t *test ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -1664,7 +1664,7 @@ func TestAccBigQueryTable_Update_SchemaWithPolicyTagsToNoPolicyTag(t *testing.T) ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, { Config: testAccBigQueryTableBasicSchema(datasetID, tableID), @@ -1673,7 +1673,7 @@ func TestAccBigQueryTable_Update_SchemaWithPolicyTagsToNoPolicyTag(t *testing.T) ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -1698,7 +1698,7 @@ func TestAccBigQueryTable_Update_SchemaWithPolicyTagsToEmptyPolicyTag(t *testing ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, { Config: testAccBigQueryTableBasicSchemaWithEmptyPolicyTags(datasetID, tableID), @@ -1707,7 +1707,7 @@ func TestAccBigQueryTable_Update_SchemaWithPolicyTagsToEmptyPolicyTag(t *testing ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -1732,7 +1732,7 @@ func TestAccBigQueryTable_Update_SchemaWithPolicyTagsToEmptyPolicyTagNames(t *te ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, { Config: testAccBigQueryTableBasicSchemaWithEmptyPolicyTagNames(datasetID, tableID), @@ -1741,7 +1741,7 @@ func TestAccBigQueryTable_Update_SchemaWithPolicyTagsToEmptyPolicyTagNames(t *te ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -1840,7 +1840,7 @@ func TestAccBigQueryTable_TableReplicationInfo_WithoutReplicationInterval(t *tes ResourceName: "google_bigquery_table.replica_mv", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -1875,7 +1875,7 @@ func TestAccBigQueryTable_TableReplicationInfo_WithReplicationInterval(t *testin ResourceName: "google_bigquery_table.replica_mv", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -1906,7 +1906,7 @@ func TestAccBigQueryTable_ResourceTags(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, { Config: testAccBigQueryTableWithResourceTagsUpdate(context), @@ -1915,7 +1915,7 @@ func TestAccBigQueryTable_ResourceTags(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, // testAccBigQueryTableWithResourceTagsDestroy must be called at the end of this test to clear the resource tag bindings of the table before deletion. { @@ -1925,7 +1925,7 @@ func TestAccBigQueryTable_ResourceTags(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -1953,7 +1953,7 @@ func TestAccBigQueryTable_externalCatalogTableOptions(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, { Config: testAccBigQueryTable_externalCatalogTableOptions_update(context), @@ -1962,7 +1962,7 @@ func TestAccBigQueryTable_externalCatalogTableOptions(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -1989,7 +1989,7 @@ func TestAccBigQueryTable_foreignTypeInfo(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, }, }, }) @@ -3294,6 +3294,7 @@ resource "google_storage_bucket" "test" { name = "%s" location = "US" force_destroy = true + uniform_bucket_level_access = true } resource "google_storage_bucket_object" "test" { @@ -3842,6 +3843,9 @@ resource "google_bigquery_table" "test" { deletion_protection = false table_id = "%s" dataset_id = google_bigquery_dataset.test.dataset_id + + ignore_auto_generated_schema = true + schema = < Date: Thu, 31 Jul 2025 20:47:10 +0530 Subject: [PATCH 642/884] Added File Examples for Parameter Version Resources (#14698) --- .../parametermanager/ParameterVersion.yaml | 16 ++++++++++++++++ .../RegionalParameterVersion.yaml | 16 ++++++++++++++++ ...er_version_with_json_format_with_file.tf.tmpl | 10 ++++++++++ ...er_version_with_yaml_format_with_file.tf.tmpl | 10 ++++++++++ ...er_version_with_json_format_with_file.tf.tmpl | 11 +++++++++++ ...er_version_with_yaml_format_with_file.tf.tmpl | 11 +++++++++++ .../parameter_data_json_format.json | 6 ++++++ .../parameter_data_yaml_format.yaml | 4 ++++ .../regional_parameter_data_json_format.json | 6 ++++++ .../regional_parameter_data_yaml_format.yaml | 4 ++++ 10 files changed, 94 insertions(+) create mode 100644 mmv1/templates/terraform/examples/parameter_version_with_json_format_with_file.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/parameter_version_with_yaml_format_with_file.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/regional_parameter_version_with_json_format_with_file.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/regional_parameter_version_with_yaml_format_with_file.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/parametermanager/test-fixtures/parameter_data_json_format.json create mode 100644 mmv1/third_party/terraform/services/parametermanager/test-fixtures/parameter_data_yaml_format.yaml create mode 100644 mmv1/third_party/terraform/services/parametermanagerregional/test-fixtures/regional_parameter_data_json_format.json create mode 100644 mmv1/third_party/terraform/services/parametermanagerregional/test-fixtures/regional_parameter_data_yaml_format.yaml diff --git a/mmv1/products/parametermanager/ParameterVersion.yaml b/mmv1/products/parametermanager/ParameterVersion.yaml index 13a3ffa9708f..9fd89c41cb51 100644 --- a/mmv1/products/parametermanager/ParameterVersion.yaml +++ b/mmv1/products/parametermanager/ParameterVersion.yaml @@ -57,6 +57,22 @@ examples: vars: parameter_id: 'parameter' parameter_version_id: 'parameter_version' + - name: 'parameter_version_with_json_format_with_file' + primary_resource_id: 'parameter-version-with-json-format-with-file' + vars: + parameter_id: 'parameter' + parameter_version_id: 'parameter_version' + data: parameter-json-data.json + test_vars_overrides: + 'data': '"./test-fixtures/parameter_data_json_format.json"' + - name: 'parameter_version_with_yaml_format_with_file' + primary_resource_id: 'parameter-version-with-yaml-format-with-file' + vars: + parameter_id: 'parameter' + parameter_version_id: 'parameter_version' + data: parameter-yaml-data.yaml + test_vars_overrides: + 'data': '"./test-fixtures/parameter_data_yaml_format.yaml"' custom_code: custom_import: 'templates/terraform/custom_import/parameter_manager_parameter_version.go.tmpl' parameters: diff --git a/mmv1/products/parametermanagerregional/RegionalParameterVersion.yaml b/mmv1/products/parametermanagerregional/RegionalParameterVersion.yaml index ca9cae6ef3cf..13dee82c0960 100644 --- a/mmv1/products/parametermanagerregional/RegionalParameterVersion.yaml +++ b/mmv1/products/parametermanagerregional/RegionalParameterVersion.yaml @@ -58,6 +58,22 @@ examples: bootstrap_iam: - member: "serviceAccount:service-{project_number}@gcp-sa-pm.iam.gserviceaccount.com" role: "roles/cloudkms.cryptoKeyEncrypterDecrypter" + - name: 'regional_parameter_version_with_json_format_with_file' + primary_resource_id: 'regional-parameter-version-with-json-format-with-file' + vars: + parameter_id: 'regional_parameter' + parameter_version_id: 'regional_parameter_version' + data: regional-parameter-json-data.json + test_vars_overrides: + 'data': '"./test-fixtures/regional_parameter_data_json_format.json"' + - name: 'regional_parameter_version_with_yaml_format_with_file' + primary_resource_id: 'regional-parameter-version-with-yaml-format-with-file' + vars: + parameter_id: 'regional_parameter' + parameter_version_id: 'regional_parameter_version' + data: regional-parameter-yaml-data.yaml + test_vars_overrides: + 'data': '"./test-fixtures/regional_parameter_data_yaml_format.yaml"' custom_code: pre_create: 'templates/terraform/pre_create/parameter_manager_regional_parameter_version.go.tmpl' custom_import: 'templates/terraform/custom_import/parameter_manager_regional_parameter_version.go.tmpl' diff --git a/mmv1/templates/terraform/examples/parameter_version_with_json_format_with_file.tf.tmpl b/mmv1/templates/terraform/examples/parameter_version_with_json_format_with_file.tf.tmpl new file mode 100644 index 000000000000..b10091516eca --- /dev/null +++ b/mmv1/templates/terraform/examples/parameter_version_with_json_format_with_file.tf.tmpl @@ -0,0 +1,10 @@ +resource "google_parameter_manager_parameter" "parameter-basic" { + parameter_id = "{{index $.Vars "parameter_id"}}" + format = "JSON" +} + +resource "google_parameter_manager_parameter_version" "{{$.PrimaryResourceId}}" { + parameter = google_parameter_manager_parameter.parameter-basic.id + parameter_version_id = "{{index $.Vars "parameter_version_id"}}" + parameter_data = file("{{index $.Vars "data"}}") +} diff --git a/mmv1/templates/terraform/examples/parameter_version_with_yaml_format_with_file.tf.tmpl b/mmv1/templates/terraform/examples/parameter_version_with_yaml_format_with_file.tf.tmpl new file mode 100644 index 000000000000..a1d6af0bbd0c --- /dev/null +++ b/mmv1/templates/terraform/examples/parameter_version_with_yaml_format_with_file.tf.tmpl @@ -0,0 +1,10 @@ +resource "google_parameter_manager_parameter" "parameter-basic" { + parameter_id = "{{index $.Vars "parameter_id"}}" + format = "YAML" +} + +resource "google_parameter_manager_parameter_version" "{{$.PrimaryResourceId}}" { + parameter = google_parameter_manager_parameter.parameter-basic.id + parameter_version_id = "{{index $.Vars "parameter_version_id"}}" + parameter_data = file("{{index $.Vars "data"}}") +} diff --git a/mmv1/templates/terraform/examples/regional_parameter_version_with_json_format_with_file.tf.tmpl b/mmv1/templates/terraform/examples/regional_parameter_version_with_json_format_with_file.tf.tmpl new file mode 100644 index 000000000000..eaaac2142d3f --- /dev/null +++ b/mmv1/templates/terraform/examples/regional_parameter_version_with_json_format_with_file.tf.tmpl @@ -0,0 +1,11 @@ +resource "google_parameter_manager_regional_parameter" "regional-parameter-basic" { + parameter_id = "{{index $.Vars "parameter_id"}}" + format = "JSON" + location = "us-central1" +} + +resource "google_parameter_manager_regional_parameter_version" "{{$.PrimaryResourceId}}" { + parameter = google_parameter_manager_regional_parameter.regional-parameter-basic.id + parameter_version_id = "{{index $.Vars "parameter_version_id"}}" + parameter_data = file("{{index $.Vars "data"}}") +} diff --git a/mmv1/templates/terraform/examples/regional_parameter_version_with_yaml_format_with_file.tf.tmpl b/mmv1/templates/terraform/examples/regional_parameter_version_with_yaml_format_with_file.tf.tmpl new file mode 100644 index 000000000000..269c9b9841c1 --- /dev/null +++ b/mmv1/templates/terraform/examples/regional_parameter_version_with_yaml_format_with_file.tf.tmpl @@ -0,0 +1,11 @@ +resource "google_parameter_manager_regional_parameter" "regional-parameter-basic" { + parameter_id = "{{index $.Vars "parameter_id"}}" + format = "YAML" + location = "us-central1" +} + +resource "google_parameter_manager_regional_parameter_version" "{{$.PrimaryResourceId}}" { + parameter = google_parameter_manager_regional_parameter.regional-parameter-basic.id + parameter_version_id = "{{index $.Vars "parameter_version_id"}}" + parameter_data = file("{{index $.Vars "data"}}") +} diff --git a/mmv1/third_party/terraform/services/parametermanager/test-fixtures/parameter_data_json_format.json b/mmv1/third_party/terraform/services/parametermanager/test-fixtures/parameter_data_json_format.json new file mode 100644 index 000000000000..aee129d0f4d9 --- /dev/null +++ b/mmv1/third_party/terraform/services/parametermanager/test-fixtures/parameter_data_json_format.json @@ -0,0 +1,6 @@ +{ + "db_host": "localhost", + "db_name": "testdb", + "db_user": "testuser", + "db_port": 5432 +} diff --git a/mmv1/third_party/terraform/services/parametermanager/test-fixtures/parameter_data_yaml_format.yaml b/mmv1/third_party/terraform/services/parametermanager/test-fixtures/parameter_data_yaml_format.yaml new file mode 100644 index 000000000000..d633d82a6ea8 --- /dev/null +++ b/mmv1/third_party/terraform/services/parametermanager/test-fixtures/parameter_data_yaml_format.yaml @@ -0,0 +1,4 @@ +db_host: localhost +db_port: 5432 +db_name: testdb +db_user: testuser diff --git a/mmv1/third_party/terraform/services/parametermanagerregional/test-fixtures/regional_parameter_data_json_format.json b/mmv1/third_party/terraform/services/parametermanagerregional/test-fixtures/regional_parameter_data_json_format.json new file mode 100644 index 000000000000..aee129d0f4d9 --- /dev/null +++ b/mmv1/third_party/terraform/services/parametermanagerregional/test-fixtures/regional_parameter_data_json_format.json @@ -0,0 +1,6 @@ +{ + "db_host": "localhost", + "db_name": "testdb", + "db_user": "testuser", + "db_port": 5432 +} diff --git a/mmv1/third_party/terraform/services/parametermanagerregional/test-fixtures/regional_parameter_data_yaml_format.yaml b/mmv1/third_party/terraform/services/parametermanagerregional/test-fixtures/regional_parameter_data_yaml_format.yaml new file mode 100644 index 000000000000..d633d82a6ea8 --- /dev/null +++ b/mmv1/third_party/terraform/services/parametermanagerregional/test-fixtures/regional_parameter_data_yaml_format.yaml @@ -0,0 +1,4 @@ +db_host: localhost +db_port: 5432 +db_name: testdb +db_user: testuser From 047b7ad2bcb7c5db89d3b219d41a0d5605639876 Mon Sep 17 00:00:00 2001 From: Yanwei Guo Date: Thu, 31 Jul 2025 08:38:18 -0700 Subject: [PATCH 643/884] Add support for GPU redundancy to Cloud Run v2 job (#14673) --- mmv1/products/cloudrunv2/Job.yaml | 3 +++ mmv1/templates/terraform/examples/cloudrunv2_job_gpu.tf.tmpl | 1 + .../services/cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl | 1 + 3 files changed, 5 insertions(+) diff --git a/mmv1/products/cloudrunv2/Job.yaml b/mmv1/products/cloudrunv2/Job.yaml index 57ae1e01c290..4e4b200d4cfe 100644 --- a/mmv1/products/cloudrunv2/Job.yaml +++ b/mmv1/products/cloudrunv2/Job.yaml @@ -781,6 +781,9 @@ properties: description: The GPU to attach to an instance. See https://cloud.google.com/run/docs/configuring/jobs/gpu for configuring GPU. required: true + - name: 'gpuZonalRedundancyDisabled' + type: Boolean + description: True if GPU zonal redundancy is disabled on this execution. - name: 'observedGeneration' type: String description: | diff --git a/mmv1/templates/terraform/examples/cloudrunv2_job_gpu.tf.tmpl b/mmv1/templates/terraform/examples/cloudrunv2_job_gpu.tf.tmpl index 5581f4bece44..a5ac9f3cac9d 100644 --- a/mmv1/templates/terraform/examples/cloudrunv2_job_gpu.tf.tmpl +++ b/mmv1/templates/terraform/examples/cloudrunv2_job_gpu.tf.tmpl @@ -11,6 +11,7 @@ resource "google_cloud_run_v2_job" "{{$.PrimaryResourceId}}" { node_selector { accelerator = "nvidia-l4" } + gpu_zonal_redundancy_disabled = true } } } diff --git a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl index 083f4f82c6bf..d58b6d7fede3 100644 --- a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl @@ -1059,6 +1059,7 @@ func testAccCloudRunV2Job_cloudrunv2JobWithGpu(context map[string]interface{}) s node_selector { accelerator = "nvidia-l4" } + gpu_zonal_redundancy_disabled = true } } lifecycle { From 7bd4d85d4a417cb3995e7ff9c3d2ccd7908f787e Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Thu, 31 Jul 2025 09:27:30 -0700 Subject: [PATCH 644/884] tgc-revival: add google_pubsub_subscription (#14688) Co-authored-by: Thomas Rodgers --- mmv1/products/pubsub/Subscription.yaml | 9 ++++++++ mmv1/products/pubsub/Topic.yaml | 2 +- .../pubsub_subscription_attributes.go.tmpl | 22 +++++++++++++++++++ ..._topic.go.tmpl => pubsub_add_name.go.tmpl} | 0 4 files changed, 32 insertions(+), 1 deletion(-) create mode 100644 mmv1/templates/tgc_next/custom_expand/pubsub_subscription_attributes.go.tmpl rename mmv1/templates/tgc_next/encoders/{pubsub_topic.go.tmpl => pubsub_add_name.go.tmpl} (100%) diff --git a/mmv1/products/pubsub/Subscription.yaml b/mmv1/products/pubsub/Subscription.yaml index 9f9099c98f98..3f380f8849fb 100644 --- a/mmv1/products/pubsub/Subscription.yaml +++ b/mmv1/products/pubsub/Subscription.yaml @@ -29,6 +29,7 @@ create_verb: 'PUT' update_url: 'projects/{{project}}/subscriptions/{{name}}' update_verb: 'PATCH' update_mask: true +include_in_tgc_next_DO_NOT_USE: true timeouts: insert_minutes: 20 update_minutes: 20 @@ -44,6 +45,7 @@ custom_code: constants: 'templates/terraform/constants/subscription.go.tmpl' encoder: 'templates/terraform/encoders/no_send_name.go.tmpl' update_encoder: 'templates/terraform/update_encoder/pubsub_subscription.tmpl' + tgc_encoder: 'templates/tgc_next/encoders/pubsub_add_name.go.tmpl' examples: - name: 'pubsub_subscription_push' primary_resource_id: 'example' @@ -55,6 +57,8 @@ examples: vars: topic_name: 'example-topic' subscription_name: 'example-subscription' + tgc_test_ignore_extra: + - enable_message_ordering # ignore its false value in configuration - name: 'pubsub_subscription_pull_filter' primary_resource_id: 'example' vars: @@ -166,6 +170,7 @@ properties: description: | The name of the table to which to write data, of the form {projectId}.{datasetId}.{tableId} required: true + is_missing_in_cai: true - name: 'useTopicSchema' type: Boolean description: | @@ -180,6 +185,7 @@ properties: must be published in JSON format. Only one of use_topic_schema and use_table_schema can be set. conflicts: - use_topic_schema + is_missing_in_cai: true - name: 'writeMetadata' type: Boolean description: | @@ -197,6 +203,7 @@ properties: The service account to use to write to BigQuery. If not specified, the Pub/Sub [service agent](https://cloud.google.com/iam/docs/service-agents), service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com, is used. + is_missing_in_cai: true - name: 'cloudStorageConfig' type: NestedObject description: | @@ -206,6 +213,7 @@ properties: conflicts: - push_config - bigquery_config + is_missing_in_cai: true properties: - name: 'bucket' type: String @@ -339,6 +347,7 @@ properties: - v1beta1: uses the push format defined in the v1beta1 Pub/Sub API. - v1 or v1beta2: uses the push format defined in the v1 Pub/Sub API. diff_suppress_func: 'IgnoreMissingKeyInMap("x-goog-version")' + custom_tgc_expand: templates/tgc_next/custom_expand/pubsub_subscription_attributes.go.tmpl - name: 'noWrapper' type: NestedObject description: | diff --git a/mmv1/products/pubsub/Topic.yaml b/mmv1/products/pubsub/Topic.yaml index ae9b00f8c2bf..b35f57a428cf 100644 --- a/mmv1/products/pubsub/Topic.yaml +++ b/mmv1/products/pubsub/Topic.yaml @@ -52,7 +52,7 @@ iam_policy: custom_code: encoder: 'templates/terraform/encoders/no_send_name.go.tmpl' update_encoder: 'templates/terraform/update_encoder/pubsub_topic.tmpl' - tgc_encoder: 'templates/tgc_next/encoders/pubsub_topic.go.tmpl' + tgc_encoder: 'templates/tgc_next/encoders/pubsub_add_name.go.tmpl' error_retry_predicates: - 'transport_tpg.PubsubTopicProjectNotReady' include_in_tgc_next_DO_NOT_USE: true diff --git a/mmv1/templates/tgc_next/custom_expand/pubsub_subscription_attributes.go.tmpl b/mmv1/templates/tgc_next/custom_expand/pubsub_subscription_attributes.go.tmpl new file mode 100644 index 000000000000..e1d5f7ad792f --- /dev/null +++ b/mmv1/templates/tgc_next/custom_expand/pubsub_subscription_attributes.go.tmpl @@ -0,0 +1,22 @@ +func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + m := make(map[string]string) + if v == nil { + return m, nil + } + + vMap, ok := v.(map[string]interface{}) + if !ok { + return m, fmt.Errorf("non-map v: %v (%T)", v, v) + } + + // The default value is present in CAI asset + if len(vMap) == 0 { + m["x-goog-version"] = "v1" + return m, nil + } + + for k, val := range vMap { + m[k] = val.(string) + } + return m, nil +} diff --git a/mmv1/templates/tgc_next/encoders/pubsub_topic.go.tmpl b/mmv1/templates/tgc_next/encoders/pubsub_add_name.go.tmpl similarity index 100% rename from mmv1/templates/tgc_next/encoders/pubsub_topic.go.tmpl rename to mmv1/templates/tgc_next/encoders/pubsub_add_name.go.tmpl From 24c6c1c3465a3196971711491f46c98cb8b2b450 Mon Sep 17 00:00:00 2001 From: zlq Date: Thu, 31 Jul 2025 09:57:44 -0700 Subject: [PATCH 645/884] container: add rbac config binding (#14692) --- .../resource_container_cluster.go.tmpl | 77 ++++++++++++++++++- .../resource_container_cluster_test.go.tmpl | 64 +++++++++++++++ .../docs/r/container_cluster.html.markdown | 7 ++ 3 files changed, 145 insertions(+), 3 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl index 4e746a25c57a..63f2d5e01796 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl @@ -2551,6 +2551,27 @@ func ResourceContainerCluster() *schema.Resource { }, }, }, + "rbac_binding_config": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Computed: true, + Description: `RBACBindingConfig allows user to restrict ClusterRoleBindings an RoleBindings that can be created.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "enable_insecure_binding_system_unauthenticated": { + Type: schema.TypeBool, + Optional: true, + Description: `Setting this to true will allow any ClusterRoleBinding and RoleBinding with subjects system:anonymous or system:unauthenticated.`, + }, + "enable_insecure_binding_system_authenticated": { + Type: schema.TypeBool, + Optional: true, + Description: `Setting this to true will allow any ClusterRoleBinding and RoleBinding with subjects system:authenticated.`, + }, + }, + }, + }, }, } } @@ -2880,9 +2901,13 @@ func resourceContainerClusterCreate(d *schema.ResourceData, meta interface{}) er cluster.EnterpriseConfig = expandEnterpriseConfig(v) } - if v, ok := d.GetOk("anonymous_authentication_config"); ok { - cluster.AnonymousAuthenticationConfig = expandAnonymousAuthenticationConfig(v) - } + if v, ok := d.GetOk("anonymous_authentication_config"); ok { + cluster.AnonymousAuthenticationConfig = expandAnonymousAuthenticationConfig(v) + } + + if v, ok := d.GetOk("rbac_binding_config"); ok { + cluster.RbacBindingConfig = expandRBACBindingConfig(v) + } needUpdateAfterCreate := false @@ -3468,6 +3493,10 @@ func resourceContainerClusterRead(d *schema.ResourceData, meta interface{}) erro return err } + if err := d.Set("rbac_binding_config", flattenRBACBindingConfig(cluster.RbacBindingConfig)); err != nil { + return err + } + return nil } @@ -5003,6 +5032,22 @@ func resourceContainerClusterUpdate(d *schema.ResourceData, meta interface{}) er } } + if d.HasChange("rbac_binding_config") { + req := &container.UpdateClusterRequest{ + Update: &container.ClusterUpdate{ + DesiredRbacBindingConfig: expandRBACBindingConfig(d.Get("rbac_binding_config")), + ForceSendFields: []string{"DesiredRbacBindingConfig"}, + }} + + updateF := updateFunc(req, "updating GKE cluster RBAC binding config") + // Call update serially. + if err := transport_tpg.LockedCall(lockKey, updateF); err != nil { + return err + } + + log.Printf("[INFO] GKE cluster %s's RBAC binding config has been updated", d.Id()) + } + d.Partial(false) {{ if ne $.TargetVersionName `ga` -}} @@ -6577,6 +6622,20 @@ func expandWorkloadAltsConfig(configured interface{}) *container.WorkloadALTSCon } {{- end }} +func expandRBACBindingConfig(configured interface{}) *container.RBACBindingConfig { + l := configured.([]interface{}) + if len(l) == 0 || l[0] == nil { + return nil + } + + config := l[0].(map[string]interface{}) + return &container.RBACBindingConfig{ + EnableInsecureBindingSystemUnauthenticated: config["enable_insecure_binding_system_unauthenticated"].(bool), + EnableInsecureBindingSystemAuthenticated: config["enable_insecure_binding_system_authenticated"].(bool), + ForceSendFields: []string{"EnableInsecureBindingSystemUnauthenticated", "EnableInsecureBindingSystemAuthenticated"}, + } +} + func flattenNotificationConfig(c *container.NotificationConfig) []map[string]interface{} { if c == nil { return nil @@ -7568,6 +7627,18 @@ func flattenWorkloadAltsConfig(c *container.WorkloadALTSConfig) []map[string]int } {{- end }} +func flattenRBACBindingConfig(c *container.RBACBindingConfig) []map[string]interface{} { + if c == nil { + return nil + } + return []map[string]interface{}{ + { + "enable_insecure_binding_system_authenticated": c.EnableInsecureBindingSystemAuthenticated, + "enable_insecure_binding_system_unauthenticated": c.EnableInsecureBindingSystemUnauthenticated, + }, + } +} + func resourceContainerClusterStateImporter(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl index e2af6d09209c..b1dde35b4df1 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl @@ -14108,3 +14108,67 @@ resource "google_container_cluster" "primary" { } `, name, networkName, subnetworkName, mode) } + +func TestAccContainerCluster_RbacBindingConfig(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) + networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") + subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_RbacBindingConfig(clusterName, networkName, subnetworkName, true, true), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.primary", "rbac_binding_config.#", "1"), + resource.TestCheckResourceAttr("google_container_cluster.primary", "rbac_binding_config.0.enable_insecure_binding_system_unauthenticated", "true"), + resource.TestCheckResourceAttr("google_container_cluster.primary", "rbac_binding_config.0.enable_insecure_binding_system_authenticated", "true"), + ), + }, + { + ResourceName: "google_container_cluster.primary", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + { + Config: testAccContainerCluster_RbacBindingConfig(clusterName, networkName, subnetworkName, false, false), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.primary", "rbac_binding_config.#", "1"), + resource.TestCheckResourceAttr("google_container_cluster.primary", "rbac_binding_config.0.enable_insecure_binding_system_unauthenticated", "false"), + resource.TestCheckResourceAttr("google_container_cluster.primary", "rbac_binding_config.0.enable_insecure_binding_system_authenticated", "false"), + ), + }, + { + ResourceName: "google_container_cluster.primary", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + }, + }) +} + +func testAccContainerCluster_RbacBindingConfig(clusterName, networkName, subnetworkName string, unauthenticated, authenticated bool) string { + return fmt.Sprintf(` +resource "google_container_cluster" "primary" { + name = "%s" + location = "us-central1-a" + initial_node_count = 1 + + network = "%s" + subnetwork = "%s" + + rbac_binding_config { + enable_insecure_binding_system_unauthenticated = %t + enable_insecure_binding_system_authenticated = %t + } + + deletion_protection = false +} +`, clusterName, networkName, subnetworkName, unauthenticated, authenticated) +} diff --git a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown index 631d8d6a20af..e11e2f173080 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown @@ -426,6 +426,8 @@ Fleet configuration for the cluster. Structure is [documented below](#nested_fle * `anonymous_authentication_config` - (Optional) Configuration for [anonymous authentication restrictions](https://cloud.google.com/kubernetes-engine/docs/how-to/hardening-your-cluster#restrict-anon-access). Structure is [documented below](#anonymous_authentication_config). +* `rbac_binding_config` - (Optional) + RBACBindingConfig allows user to restrict ClusterRoleBindings an RoleBindings that can be created. Structure is [documented below](#nested_rbac_binding_config). The `default_snat_status` block supports @@ -1560,6 +1562,11 @@ linux_node_config { * `mode` - (Optional) Sets or removes authentication restrictions. Available options include `LIMITED` and `ENABLED`. +The `rbac_binding_config` block supports: + +* `enable_insecure_binding_system_unauthenticated` - (Optional) Setting this to true will allow any ClusterRoleBinding and RoleBinding with subjects system:anonymous or system:unauthenticated. +* `enable_insecure_binding_system_authenticated` - (Optional) Setting this to true will allow any ClusterRoleBinding and RoleBinding with subjects system:authenticated. + ## Attributes Reference From 1c18e538cc02706427d8275f27c809475a6bd9c0 Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Thu, 31 Jul 2025 10:14:20 -0700 Subject: [PATCH 646/884] deprecate `google_service_account_key.project` (#14683) --- .../data_source_google_service_account_key.go | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/resourcemanager/data_source_google_service_account_key.go b/mmv1/third_party/terraform/services/resourcemanager/data_source_google_service_account_key.go index 95b47a88cc79..6c5002ab40c0 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/data_source_google_service_account_key.go +++ b/mmv1/third_party/terraform/services/resourcemanager/data_source_google_service_account_key.go @@ -30,8 +30,9 @@ func DataSourceGoogleServiceAccountKey() *schema.Resource { ValidateFunc: validation.StringInSlice([]string{"TYPE_NONE", "TYPE_X509_PEM_FILE", "TYPE_RAW_PUBLIC_KEY"}, false), }, "project": { - Type: schema.TypeString, - Optional: true, + Type: schema.TypeString, + Optional: true, + Deprecated: "`project` is deprecated and will be removed in a future major release. This field is non-functional and can be removed from your configuration safely.", }, "key_algorithm": { Type: schema.TypeString, From 5541ea4be8120c52f0429129e87d7f01d2f63f3b Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Thu, 31 Jul 2025 10:44:35 -0700 Subject: [PATCH 647/884] Update lustre instance datasource tests to use bootstrapped network (#14696) --- .../data_source_lustre_instance_test.go | 38 ++++++------------- 1 file changed, 12 insertions(+), 26 deletions(-) diff --git a/mmv1/third_party/terraform/services/lustre/data_source_lustre_instance_test.go b/mmv1/third_party/terraform/services/lustre/data_source_lustre_instance_test.go index da7ff4e55dbf..fd4618b72873 100644 --- a/mmv1/third_party/terraform/services/lustre/data_source_lustre_instance_test.go +++ b/mmv1/third_party/terraform/services/lustre/data_source_lustre_instance_test.go @@ -11,6 +11,7 @@ func TestAccLustreInstanceDatasource_basic(t *testing.T) { t.Parallel() context := map[string]interface{}{ + "network_name": acctest.BootstrapSharedTestNetwork(t, "default-vpc"), "random_suffix": acctest.RandString(t, 10), } @@ -41,36 +42,21 @@ resource "google_lustre_instance" "instance" { location = "us-central1-a" filesystem = "testfs" capacity_gib = 18000 - network = google_compute_network.producer_net.id + network = data.google_compute_network.lustre-network.id gke_support_enabled = false per_unit_storage_throughput = 1000 - depends_on = [ google_service_networking_connection.service_con ] } -resource "google_compute_subnetwork" "producer_subnet" { - name = "tf-test-my-subnet-%{random_suffix}" - ip_cidr_range = "10.0.0.248/29" - region = "us-central1" - network = google_compute_network.producer_net.id -} - -resource "google_compute_network" "producer_net" { - name = "tf-test-my-network-%{random_suffix}" - auto_create_subnetworks = false -} - -resource "google_compute_global_address" "private_ip_alloc" { - name = "private-ip-alloc-%{random_suffix}" - purpose = "VPC_PEERING" - address_type = "INTERNAL" - prefix_length = 16 - network = google_compute_network.producer_net.id -} - -resource "google_service_networking_connection" "service_con" { - network = google_compute_network.producer_net.id - service = "servicenetworking.googleapis.com" - reserved_peering_ranges = [google_compute_global_address.private_ip_alloc.name] +// This example assumes this network already exists. +// The API creates a tenant network per network authorized for a +// Lustre instance and that network is not deleted when the user-created +// network (authorized_network) is deleted, so this prevents issues +// with tenant network quota. +// If this network hasn't been created and you are using this example in your +// config, add an additional network resource or change +// this from "data"to "resource" +data "google_compute_network" "lustre-network" { + name = "%{network_name}" } data "google_lustre_instance" "default" { From 61384c598e30e348fa6ddbb8f2b4c536d5d6f93e Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Thu, 31 Jul 2025 10:51:52 -0700 Subject: [PATCH 648/884] tgc-revival: add google_alloydb_backup (#14684) --- mmv1/products/alloydb/Backup.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/mmv1/products/alloydb/Backup.yaml b/mmv1/products/alloydb/Backup.yaml index de788f9de62c..825525c71828 100644 --- a/mmv1/products/alloydb/Backup.yaml +++ b/mmv1/products/alloydb/Backup.yaml @@ -40,6 +40,7 @@ async: resource_inside_response: false custom_code: encoder: 'templates/terraform/encoders/alloydb_backup.tmpl' +include_in_tgc_next_DO_NOT_USE: true examples: - name: 'alloydb_backup_basic' primary_resource_id: 'default' From d89bbe609da2df03a3d421c78e79df35ed0e726d Mon Sep 17 00:00:00 2001 From: gurusai-voleti Date: Thu, 31 Jul 2025 18:05:45 +0000 Subject: [PATCH 649/884] feat: (storage) added deletion_policy field in storage bucket object (#14594) Co-authored-by: Nick Elliot --- .../storage/resource_storage_bucket_object.go | 14 +++++ .../resource_storage_bucket_object_test.go | 62 +++++++++++++++++++ .../r/storage_bucket_object.html.markdown | 2 + 3 files changed, 78 insertions(+) diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object.go b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object.go index 648485f3fab6..e65c8fc9b2bd 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object.go +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object.go @@ -15,6 +15,7 @@ import ( transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" "crypto/sha256" "encoding/base64" @@ -307,6 +308,13 @@ func ResourceStorageBucketObject() *schema.Resource { Computed: true, Description: `A url reference to download this object.`, }, + + "deletion_policy": { + Type: schema.TypeString, + Optional: true, + Description: `The deletion policy for the object. Setting ABANDON allows the resource to be abandoned rather than deleted when removed from your Terraform configuration.`, + ValidateFunc: validation.StringInSlice([]string{"ABANDON"}, false), + }, }, UseJSONNumber: true, } @@ -571,6 +579,12 @@ func resourceStorageBucketObjectDelete(d *schema.ResourceData, meta interface{}) return err } + if deletionPolicy := d.Get("deletion_policy"); deletionPolicy == "ABANDON" { + log.Printf("[WARN] Object %q deletion_policy is set to 'ABANDON', object deletion has been abandoned", d.Id()) + d.SetId("") + return nil + } + bucket := d.Get("bucket").(string) name := d.Get("name").(string) diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object_test.go b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object_test.go index 8fb5da2a36ed..7f9fb67cbc0c 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object_test.go +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object_test.go @@ -616,6 +616,29 @@ func TestAccStorageObject_knownAfterApply(t *testing.T) { }) } +func TestAccStorageObject_objectDeletionPolicy(t *testing.T) { + t.Parallel() + + bucketName := acctest.TestBucketName(t) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageObjectDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testGoogleStorageBucketsObjectDeletionPolicy(bucketName, "samplecontent"), + }, + { + Config: testGoogleStorageBucketsObjectAbandon(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageObjectExists(t, bucketName), + ), + }, + }, + }) +} + func testAccCheckGoogleStorageObject(t *testing.T, bucket, object, md5 string) resource.TestCheckFunc { return testAccCheckGoogleStorageObjectWithEncryption(t, bucket, object, md5, "") } @@ -1080,3 +1103,42 @@ output "valid" { } `, bucketName, content, filename) } + +func testGoogleStorageBucketsObjectDeletionPolicy(bucketName string, customContent string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" +} + +resource "google_storage_bucket_object" "object" { + name = "%s" + bucket = google_storage_bucket.bucket.name + content = "%s" + deletion_policy = "ABANDON" +} +`, bucketName, objectName, customContent) +} + +func testGoogleStorageBucketsObjectAbandon(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" + force_destroy = true +} +`, bucketName) +} + +func testAccCheckStorageObjectExists(t *testing.T, bucketName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + + config := acctest.GoogleProviderConfig(t) + + _, err := config.NewStorageClient(config.UserAgent).Objects.Get(bucketName, objectName).Do() + if err != nil { + return err + } + return nil + } +} diff --git a/mmv1/third_party/terraform/website/docs/r/storage_bucket_object.html.markdown b/mmv1/third_party/terraform/website/docs/r/storage_bucket_object.html.markdown index 2fe778b50363..86e7b39b008f 100644 --- a/mmv1/third_party/terraform/website/docs/r/storage_bucket_object.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/storage_bucket_object.html.markdown @@ -91,6 +91,8 @@ One of the following is required: * `force_empty_content_type` - (Optional) When set to true, it ensure the object's Content-Type is empty. +* `deletion_policy` - (Optional) When set to ABANDON, the object won't be deleted from storage bucket. Instead, it will only be removed from terraform's state file. + --- The `customer_encryption` block supports: From 7ba274daf445d6edc0f934b4e7ec26f41cc34fc0 Mon Sep 17 00:00:00 2001 From: Liyun Huang Date: Thu, 31 Jul 2025 14:06:59 -0400 Subject: [PATCH 650/884] backupretentioninheritance (#14555) --- mmv1/products/backupdr/BackupVault.yaml | 9 +++++++++ .../examples/backup_dr_backup_vault_full.tf.tmpl | 1 + .../backupdr/resource_backup_dr_backup_vault_test.go | 6 ++++-- 3 files changed, 14 insertions(+), 2 deletions(-) diff --git a/mmv1/products/backupdr/BackupVault.yaml b/mmv1/products/backupdr/BackupVault.yaml index 52f933b7f99a..e9c4cc5efa4b 100644 --- a/mmv1/products/backupdr/BackupVault.yaml +++ b/mmv1/products/backupdr/BackupVault.yaml @@ -179,3 +179,12 @@ properties: - 'WITHIN_ORGANIZATION' - 'UNRESTRICTED' - 'WITHIN_ORG_BUT_UNRESTRICTED_FOR_BA' + - name: 'backupRetentionInheritance' + type: Enum + ignore_read: true + description: | + How a backup's enforced retention end time is inherited. Default value is `INHERIT_VAULT_RETENTION` if not provided during creation. + enum_values: + - 'BACKUP_RETENTION_INHERITANCE_UNSPECIFIED' + - 'INHERIT_VAULT_RETENTION' + - 'MATCH_BACKUP_EXPIRE_TIME' diff --git a/mmv1/templates/terraform/examples/backup_dr_backup_vault_full.tf.tmpl b/mmv1/templates/terraform/examples/backup_dr_backup_vault_full.tf.tmpl index f527cf9fa2f9..4b51780d77d4 100644 --- a/mmv1/templates/terraform/examples/backup_dr_backup_vault_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/backup_dr_backup_vault_full.tf.tmpl @@ -13,6 +13,7 @@ resource "google_backup_dr_backup_vault" "{{$.PrimaryResourceId}}" { } force_update = "true" access_restriction = "WITHIN_ORGANIZATION" + backup_retention_inheritance = "INHERIT_VAULT_RETENTION" ignore_inactive_datasources = "true" ignore_backup_plan_references = "true" allow_missing = "true" diff --git a/mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_vault_test.go b/mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_vault_test.go index 1f8e0d4a4a3e..33d24d8ba32a 100644 --- a/mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_vault_test.go +++ b/mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_vault_test.go @@ -34,7 +34,7 @@ func TestAccBackupDRBackupVault_fullUpdate(t *testing.T) { ResourceName: "google_backup_dr_backup_vault.backup-vault-test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"allow_missing", "annotations", "backup_vault_id", "force_delete", "force_update", "ignore_backup_plan_references", "ignore_inactive_datasources", "access_restriction", "labels", "location", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"allow_missing", "annotations", "backup_vault_id", "force_delete", "force_update", "ignore_backup_plan_references", "ignore_inactive_datasources", "backup_retention_inheritance", "access_restriction", "labels", "location", "terraform_labels"}, }, { Config: testAccBackupDRBackupVault_fullUpdate(context), @@ -43,7 +43,7 @@ func TestAccBackupDRBackupVault_fullUpdate(t *testing.T) { ResourceName: "google_backup_dr_backup_vault.backup-vault-test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"allow_missing", "annotations", "backup_vault_id", "force_delete", "force_update", "ignore_backup_plan_references", "ignore_inactive_datasources", "access_restriction", "labels", "location", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"allow_missing", "annotations", "backup_vault_id", "force_delete", "force_update", "ignore_backup_plan_references", "ignore_inactive_datasources", "backup_retention_inheritance", "access_restriction", "labels", "location", "terraform_labels"}, }, }, }) @@ -68,6 +68,7 @@ resource "google_backup_dr_backup_vault" "backup-vault-test" { force_update = "true" ignore_inactive_datasources = "true" access_restriction = "WITHIN_ORGANIZATION" + backup_retention_inheritance = "INHERIT_VAULT_RETENTION" ignore_backup_plan_references = "true" allow_missing = "true" } @@ -92,6 +93,7 @@ resource "google_backup_dr_backup_vault" "backup-vault-test" { } force_update = "true" access_restriction = "WITHIN_ORGANIZATION" + backup_retention_inheritance = "INHERIT_VAULT_RETENTION" ignore_inactive_datasources = "true" ignore_backup_plan_references = "true" allow_missing = "true" From 8ee30ea250a5b3892de70a27115c687a84f60169 Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Thu, 31 Jul 2025 11:12:04 -0700 Subject: [PATCH 651/884] fix lustre instance sweeper (#14690) --- mmv1/products/lustre/Instance.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/mmv1/products/lustre/Instance.yaml b/mmv1/products/lustre/Instance.yaml index 69e24a432e36..69e865e18e68 100644 --- a/mmv1/products/lustre/Instance.yaml +++ b/mmv1/products/lustre/Instance.yaml @@ -26,6 +26,9 @@ update_verb: PATCH id_format: projects/{{project}}/locations/{{location}}/instances/{{instance_id}} import_format: - projects/{{project}}/locations/{{location}}/instances/{{instance_id}} +sweeper: + url_substitutions: + - location: "us-central1-a" examples: - name: lustre_instance_basic From 2324d9364f822d0c4472126fe31bf66ca187121c Mon Sep 17 00:00:00 2001 From: dixuswe <152918466+dixuswe@users.noreply.github.com> Date: Thu, 31 Jul 2025 11:24:40 -0700 Subject: [PATCH 652/884] google_cloudfunctions_function base image policy fields (#14552) --- .../resource_cloudfunctions_function.go | 113 +++++++++++++ ...resource_cloudfunctions_function_meta.yaml | 2 + ...ource_cloudfunctions_function_test.go.tmpl | 148 ++++++++++++++++++ .../r/cloudfunctions_function.html.markdown | 8 + 4 files changed, 271 insertions(+) diff --git a/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function.go b/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function.go index 225678bdb29a..5fbab6b22280 100644 --- a/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function.go +++ b/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function.go @@ -492,11 +492,42 @@ func ResourceCloudFunctionsFunction() *schema.Resource { }, }, }, + + "automatic_update_policy": { + Type: schema.TypeList, + Optional: true, + Computed: true, + ConflictsWith: []string{"on_deploy_update_policy"}, + MaxItems: 1, + Description: `Security patches are applied automatically to the runtime without requiring the function to be redeployed.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{}, + }, + }, + + "on_deploy_update_policy": { + Type: schema.TypeList, + Optional: true, + ConflictsWith: []string{"automatic_update_policy"}, + MaxItems: 1, + Description: `Security patches are only applied when a function is redeployed.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "runtime_version": { + Type: schema.TypeString, + Computed: true, + Description: `The runtime version which was used during latest function deployment.`, + }, + }, + }, + }, + "status": { Type: schema.TypeString, Computed: true, Description: `Describes the current stage of a deployment.`, }, + "version_id": { Type: schema.TypeString, Computed: true, @@ -590,6 +621,14 @@ func resourceCloudFunctionsCreate(d *schema.ResourceData, meta interface{}) erro "You must specify a trigger when deploying a new function.") } + if v, ok := d.GetOk("automatic_update_policy"); ok { + function.AutomaticUpdatePolicy = expandAutomaticUpdatePolicy(v.([]interface{})) + function.OnDeployUpdatePolicy = nil + } else if v, ok := d.GetOk("on_deploy_update_policy"); ok { + function.OnDeployUpdatePolicy = expandOnDeployUpdatePolicy(v.([]interface{})) + function.AutomaticUpdatePolicy = nil + } + if v, ok := d.GetOk("ingress_settings"); ok { function.IngressSettings = v.(string) } @@ -808,6 +847,25 @@ func resourceCloudFunctionsRead(d *schema.ResourceData, meta interface{}) error if err := d.Set("version_id", strconv.FormatInt(function.VersionId, 10)); err != nil { return fmt.Errorf("Error setting version_id: %s", err) } + // check the on_deploy_update_policy first as it's mutually exclusive to automatice_update_policy, and the latter is system default + if function.OnDeployUpdatePolicy != nil { + if err := d.Set("on_deploy_update_policy", flattenOnDeployUpdatePolicy(function.OnDeployUpdatePolicy)); err != nil { + return fmt.Errorf("Error setting on_deploy_update_policy: %s", err) + } + function.AutomaticUpdatePolicy = nil + d.Set("automatic_update_policy", nil) + } else { + d.Set("on_deploy_update_policy", nil) + } + + if function.AutomaticUpdatePolicy != nil { + if err := d.Set("automatic_update_policy", flattenAutomaticUpdatePolicy(function.AutomaticUpdatePolicy)); err != nil { + return fmt.Errorf("Error setting automatic_update_policy: %s", err) + } + d.Set("on_deploy_update_policy", nil) + } else { + d.Set("automatic_update_policy", nil) + } return nil } @@ -964,6 +1022,22 @@ func resourceCloudFunctionsUpdate(d *schema.ResourceData, meta interface{}) erro updateMaskArr = append(updateMaskArr, "buildServiceAccount") } + if d.HasChange("automatic_update_policy") { + function.AutomaticUpdatePolicy = expandAutomaticUpdatePolicy(d.Get("automatic_update_policy").([]interface{})) + if function.AutomaticUpdatePolicy != nil { + function.OnDeployUpdatePolicy = nil + } + updateMaskArr = append(updateMaskArr, "automatic_update_policy") + } + + if d.HasChange("on_deploy_update_policy") { + function.OnDeployUpdatePolicy = expandOnDeployUpdatePolicy(d.Get("on_deploy_update_policy").([]interface{})) + if function.OnDeployUpdatePolicy != nil { + function.AutomaticUpdatePolicy = nil + } + updateMaskArr = append(updateMaskArr, "on_deploy_update_policy") + } + if len(updateMaskArr) > 0 { log.Printf("[DEBUG] Send Patch CloudFunction Configuration request: %#v", function) updateMask := strings.Join(updateMaskArr, ",") @@ -1232,3 +1306,42 @@ func flattenSecretVersion(secretVersions []*cloudfunctions.SecretVersion) []map[ } return result } + +func expandAutomaticUpdatePolicy(configured []interface{}) *cloudfunctions.AutomaticUpdatePolicy { + if len(configured) == 0 { + return nil + } + return &cloudfunctions.AutomaticUpdatePolicy{} +} + +func flattenAutomaticUpdatePolicy(policy *cloudfunctions.AutomaticUpdatePolicy) []map[string]interface{} { + result := make([]map[string]interface{}, 0, 1) + if policy == nil { + return nil + } + // Have to append an empty element for empty message type + result = append(result, map[string]interface{}{}) + return result +} + +func expandOnDeployUpdatePolicy(configured []interface{}) *cloudfunctions.OnDeployUpdatePolicy { + if len(configured) == 0 { + return nil + } + return &cloudfunctions.OnDeployUpdatePolicy{} +} + +func flattenOnDeployUpdatePolicy(policy *cloudfunctions.OnDeployUpdatePolicy) []map[string]interface{} { + result := make([]map[string]interface{}, 0, 1) + if policy == nil { + return nil + } + + result = append(result, map[string]interface{}{ + "runtime_version": policy.RuntimeVersion, + }) + + log.Printf("flatten on_deploy_update_policy to: %s", result) + + return result +} diff --git a/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function_meta.yaml b/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function_meta.yaml index c0a8e21b27b8..ffb9b5a0a060 100644 --- a/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function_meta.yaml +++ b/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function_meta.yaml @@ -51,3 +51,5 @@ fields: - field: 'version_id' - field: 'vpc_connector' - field: 'vpc_connector_egress_settings' + - field: 'automatic_update_policy' + - field: 'on_deploy_update_policy.runtime_version' diff --git a/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function_test.go.tmpl b/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function_test.go.tmpl index 156116cb011b..0f3790c0e92f 100644 --- a/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function_test.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function_test.go.tmpl @@ -628,6 +628,70 @@ func TestAccCloudFunctionsFunction_buildServiceAccount(t *testing.T) { }) } +func TestAccCloudFunctionsFunction_abiuCRUD(t *testing.T) { + t.Parallel() + + var function cloudfunctions.CloudFunction + + funcResourceName := "google_cloudfunctions_function.function" + functionName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + bucketName := fmt.Sprintf("tf-test-bucket-%d", acctest.RandInt(t)) + zipFilePath := acctest.CreateZIPArchiveForCloudFunctionSource(t, testHTTPTriggerPath) + defer os.Remove(zipFilePath) // clean up + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccCloudFunctionsFunction_abiuAutomatic(functionName, bucketName, zipFilePath), + Check: resource.ComposeTestCheckFunc( + testAccCloudFunctionsFunctionExists( + t, funcResourceName, &function), + resource.TestCheckResourceAttrSet(funcResourceName, + "automatic_update_policy.#"), + ), + }, + { + ResourceName: funcResourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"build_environment_variables", "labels", "terraform_labels"}, + }, + { + Config: testAccCloudFunctionsFunction_abiuOndeploy(functionName, bucketName, zipFilePath), + Check: resource.ComposeTestCheckFunc( + testAccCloudFunctionsFunctionExists( + t, funcResourceName, &function), + resource.TestCheckResourceAttrSet(funcResourceName, + "on_deploy_update_policy.#"), + ), + }, + { + ResourceName: funcResourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"build_environment_variables", "labels", "terraform_labels"}, + }, + { + Config: testAccCloudFunctionsFunction_basic(functionName, bucketName, zipFilePath), + Check: resource.ComposeTestCheckFunc( + testAccCloudFunctionsFunctionExists( + t, funcResourceName, &function), + resource.TestCheckResourceAttrSet(funcResourceName, + "automatic_update_policy.#"), + ), + }, + { + ResourceName: funcResourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"build_environment_variables", "labels", "terraform_labels"}, + }, + }, + }) +} + func testAccCheckCloudFunctionsFunctionDestroyProducer(t *testing.T) func(s *terraform.State) error { return func(s *terraform.State) error { config := acctest.GoogleProviderConfig(t) @@ -1503,3 +1567,87 @@ resource "google_cloudfunctions_function" "function" { } `, bucketName, zipFilePath, saName, serviceAccount, functionName) } + +func testAccCloudFunctionsFunction_abiuAutomatic(functionName string, bucketName string, zipFilePath string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" + uniform_bucket_level_access = true +} + +resource "google_storage_bucket_object" "archive" { + name = "index.zip" + bucket = google_storage_bucket.bucket.name + source = "%s" +} + +resource "google_cloudfunctions_function" "function" { + name = "%s" + runtime = "nodejs20" + description = "test function" + docker_registry = "ARTIFACT_REGISTRY" + available_memory_mb = 128 + source_archive_bucket = google_storage_bucket.bucket.name + source_archive_object = google_storage_bucket_object.archive.name + trigger_http = true + timeout = 61 + entry_point = "helloGET" + ingress_settings = "ALLOW_INTERNAL_ONLY" + labels = { + my-label = "my-label-value" + } + environment_variables = { + TEST_ENV_VARIABLE = "test-env-variable-value" + } + build_environment_variables = { + TEST_ENV_VARIABLE = "test-build-env-variable-value" + } + automatic_update_policy {} + max_instances = 10 + min_instances = 3 +} +`, bucketName, zipFilePath, functionName) +} + +func testAccCloudFunctionsFunction_abiuOndeploy(functionName string, bucketName string, zipFilePath string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" + uniform_bucket_level_access = true +} + +resource "google_storage_bucket_object" "archive" { + name = "index.zip" + bucket = google_storage_bucket.bucket.name + source = "%s" +} + +resource "google_cloudfunctions_function" "function" { + name = "%s" + runtime = "nodejs20" + description = "test function" + docker_registry = "ARTIFACT_REGISTRY" + available_memory_mb = 128 + source_archive_bucket = google_storage_bucket.bucket.name + source_archive_object = google_storage_bucket_object.archive.name + trigger_http = true + timeout = 61 + entry_point = "helloGET" + ingress_settings = "ALLOW_INTERNAL_ONLY" + labels = { + my-label = "my-label-value" + } + environment_variables = { + TEST_ENV_VARIABLE = "test-env-variable-value" + } + build_environment_variables = { + TEST_ENV_VARIABLE = "test-build-env-variable-value" + } + on_deploy_update_policy {} + max_instances = 10 + min_instances = 3 +} +`, bucketName, zipFilePath, functionName) +} diff --git a/mmv1/third_party/terraform/website/docs/r/cloudfunctions_function.html.markdown b/mmv1/third_party/terraform/website/docs/r/cloudfunctions_function.html.markdown index d6afdf250da0..a42abdbe11da 100644 --- a/mmv1/third_party/terraform/website/docs/r/cloudfunctions_function.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/cloudfunctions_function.html.markdown @@ -179,6 +179,10 @@ Please refer to the field 'effective_labels' for all of the labels present on th * `secret_volumes` - (Optional) Secret volumes configuration. Structure is [documented below](#nested_secret_volumes). +* `automatic_update_policy` - (Optional) Security patches are applied automatically to the runtime without requiring the function to be redeployed. This should be specified as an empty block and cannot be set alongside `on_deploy_update_policy`. + +* `on_deploy_update_policy` - (Optional) Security patches are only applied when a function is redeployed. This should be specified as an empty block and cannot be set alongside `automatic_update_policy`. Structure is [documented below](#nested_on_deploy_update_policy). + The `event_trigger` block supports: * `event_type` - (Required) The type of event to observe. For example: `"google.storage.object.finalize"`. @@ -212,6 +216,10 @@ which to observe events. For example, `"myBucket"` or `"projects/my-project/topi * `version` - (Required) Version of the secret (version number or the string "latest"). It is recommended to use a numeric version for secret environment variables as any updates to the secret value is not reflected until new clones start. +The `on_deploy_update_policy` block supports: + +* `runtime_version` - (Output) The runtime version which was used during latest function deployment. + The `secret_volumes` block supports: * `mount_path` - (Required) The path within the container to mount the secret volume. For example, setting the mount_path as "/etc/secrets" would mount the secret value files under the "/etc/secrets" directory. This directory will also be completely shadowed and unavailable to mount any other secrets. Recommended mount paths: "/etc/secrets" Restricted mount paths: "/cloudsql", "/dev/log", "/pod", "/proc", "/var/log". From 1847a50f6ae0b6dfac4a6d8ad274ef6f57fc5348 Mon Sep 17 00:00:00 2001 From: Thomas Rodgers Date: Thu, 31 Jul 2025 11:39:22 -0700 Subject: [PATCH 653/884] Make adding exactly-one-of at root level of resource a breaking change (#14596) --- .../breaking_changes/resource_diff.go | 124 ++++++++++++++---- .../breaking_changes/resource_diff_test.go | 24 ++++ tools/diff-processor/diff/diff.go | 2 - tools/diff-processor/diff/sets.go | 97 +++++++------- tools/diff-processor/diff/sets_test.go | 5 + 5 files changed, 180 insertions(+), 72 deletions(-) diff --git a/tools/diff-processor/breaking_changes/resource_diff.go b/tools/diff-processor/breaking_changes/resource_diff.go index 3b9a029493c4..4aa1c7264de2 100644 --- a/tools/diff-processor/breaking_changes/resource_diff.go +++ b/tools/diff-processor/breaking_changes/resource_diff.go @@ -2,6 +2,7 @@ package breaking_changes import ( "fmt" + "strings" "github.com/GoogleCloudPlatform/magic-modules/tools/diff-processor/diff" ) @@ -43,38 +44,111 @@ var AddingExactlyOneOf = ResourceDiffRule{ } func AddingExactlyOneOfMessages(resource string, resourceDiff diff.ResourceDiff) []string { - var messages []string - newFieldSets := make(map[string]diff.FieldSet) // Set of field sets in new and not in old. - oldFieldSets := make(map[string]diff.FieldSet) // Set of field sets in old and not in new. - for key, fieldSet := range resourceDiff.FieldSets.New.ExactlyOneOf { - if _, ok := resourceDiff.FieldSets.Old.ExactlyOneOf[key]; !ok { - newFieldSets[key] = fieldSet - } - } - for key, fieldSet := range resourceDiff.FieldSets.Old.ExactlyOneOf { - if _, ok := resourceDiff.FieldSets.New.ExactlyOneOf[key]; !ok { - oldFieldSets[key] = fieldSet + messages := []string{} + + for newKey, newSet := range resourceDiff.FieldSets.New.ExactlyOneOf { + if _, ok := resourceDiff.FieldSets.Old.ExactlyOneOf[newKey]; ok { + continue // Unchanged EOO. } - } - // Find old field sets which are subsets of new field sets. - for _, newFieldSet := range newFieldSets { - var addedFields diff.FieldSet - found := false - for _, oldFieldSet := range oldFieldSets { - if oldFieldSet.IsSubsetOf(newFieldSet) { - addedFields = newFieldSet.Difference(oldFieldSet) - found = true + + // Determine the type of change. + isSimpleModification := false + var simpleAddedFields diff.FieldSet + + for _, oldSet := range resourceDiff.FieldSets.Old.ExactlyOneOf { + if oldSet.IsSubsetOf(newSet) { + isSimpleModification = true + simpleAddedFields = newSet.Difference(oldSet) break } } - if !found { - addedFields = newFieldSet - } - for field := range addedFields { - if fieldDiff, ok := resourceDiff.Fields[field]; ok && fieldDiff.Old != nil && !fieldDiff.Old.Required { + + if isSimpleModification { + // Simple modification: only added fields to an existing EOO. + // Only added *existing* optional fields are breaking. + for field := range simpleAddedFields { + if !isNewField(field, resourceDiff) && !isExistingFieldRequired(field, resourceDiff) { + messages = append(messages, fmt.Sprintf("Field `%s` within resource `%s` was added to exactly one of", field, resource)) + } + } + } else if isComplexModification(newSet, resourceDiff) { + // Complex modification: e.g., add and remove. + // Any existing, optional field in the new set is breaking. New fields are not. + for field := range newSet { + if !isNewField(field, resourceDiff) && !isExistingFieldRequired(field, resourceDiff) { + messages = append(messages, fmt.Sprintf("Field `%s` within resource `%s` was added to exactly one of", field, resource)) + } + } + } else { + // Brand new EOO. + // Not breaking if it relaxes a previously required field. + isRelaxingRequired := false + for field := range newSet { + if isExistingFieldRequired(field, resourceDiff) { + isRelaxingRequired = true + break + } + } + if isRelaxingRequired { + continue + } + + // Not breaking if all fields are in a new optional ancestor. + isContained := true + if len(newSet) == 0 { + isContained = false + } + for field := range newSet { + if !isContainedInNewOptionalAncestor(field, resourceDiff) { + isContained = false + break + } + } + if isContained { + continue + } + + // Otherwise, all fields are breaking. + for field := range newSet { messages = append(messages, fmt.Sprintf("Field `%s` within resource `%s` was added to exactly one of", field, resource)) } } } return messages } + +func isComplexModification(newSet diff.FieldSet, resourceDiff diff.ResourceDiff) bool { + for _, oldSet := range resourceDiff.FieldSets.Old.ExactlyOneOf { + if len(newSet.Intersection(oldSet)) > 0 { + return true + } + } + return false +} + +func isNewField(field string, diff diff.ResourceDiff) bool { + fieldDiff, ok := diff.Fields[field] + return !ok || fieldDiff.Old == nil +} + +func isExistingFieldRequired(field string, diff diff.ResourceDiff) bool { + fieldDiff, ok := diff.Fields[field] + return ok && fieldDiff.Old != nil && fieldDiff.Old.Required +} + +func isContainedInNewOptionalAncestor(field string, diff diff.ResourceDiff) bool { + parts := strings.Split(field, ".") + if len(parts) < 2 { + return false + } + ancestorName := strings.Join(parts[:len(parts)-1], ".") + ancestorDiff, ok := diff.Fields[ancestorName] + if !ok { + return false + } + + isAncestorNew := ancestorDiff.Old == nil && ancestorDiff.New != nil + isAncestorOptional := ancestorDiff.New != nil && ancestorDiff.New.Optional + + return isAncestorNew && isAncestorOptional +} diff --git a/tools/diff-processor/breaking_changes/resource_diff_test.go b/tools/diff-processor/breaking_changes/resource_diff_test.go index 8719423f79cf..a614f346f426 100644 --- a/tools/diff-processor/breaking_changes/resource_diff_test.go +++ b/tools/diff-processor/breaking_changes/resource_diff_test.go @@ -168,6 +168,30 @@ var resourceSchemaRule_AddingExactlyOneOf_TestCases = []resourceSchemaTestCase{ }, expectedFields: []string{"field-c"}, }, + { + name: "adding new fields to new exactly-one-of", + resourceDiff: diff.ResourceDiff{ + FieldSets: diff.ResourceFieldSetsDiff{ + Old: diff.ResourceFieldSets{}, + New: diff.ResourceFieldSets{ + ExactlyOneOf: map[string]diff.FieldSet{ + "field-a,field-b": {"field-a": {}, "field-b": {}}, + }, + }, + }, + Fields: map[string]diff.FieldDiff{ + "field-a": { + Old: nil, + New: &schema.Schema{Description: "beep", Optional: true}, + }, + "field-b": { + Old: nil, + New: &schema.Schema{Description: "boop", Optional: true}, + }, + }, + }, + expectedFields: []string{"field-a", "field-b"}, + }, { name: "adding new exactly-one-of with an existing field", resourceDiff: diff.ResourceDiff{ diff --git a/tools/diff-processor/diff/diff.go b/tools/diff-processor/diff/diff.go index 9b3970839b58..d1966b537ef5 100644 --- a/tools/diff-processor/diff/diff.go +++ b/tools/diff-processor/diff/diff.go @@ -35,8 +35,6 @@ type ResourceFieldSets struct { RequiredWith map[string]FieldSet } -type FieldSet map[string]struct{} - type ResourceConfigDiff struct { Old *schema.Resource New *schema.Resource diff --git a/tools/diff-processor/diff/sets.go b/tools/diff-processor/diff/sets.go index 39e26f36c702..4a8e73c96ad4 100644 --- a/tools/diff-processor/diff/sets.go +++ b/tools/diff-processor/diff/sets.go @@ -5,70 +5,77 @@ import ( "strings" ) -// Return the union of two maps, overwriting any shared keys with the second map's values -func union[K comparable, V any](map1, map2 map[K]V) map[K]V { - if len(map1) == 0 { - return map2 - } - if len(map2) == 0 { - return map1 +// FieldSet is a set of strings representing fields. +type FieldSet map[string]struct{} + +// Difference returns the fields in s that are not in other. +func (s FieldSet) Difference(other FieldSet) FieldSet { + diff := make(FieldSet) + for k := range s { + if _, ok := other[k]; !ok { + diff[k] = struct{}{} + } } - merged := make(map[K]V, len(map1)+len(map2)) - for k, v := range map1 { - merged[k] = v + return diff +} + +// IsSubsetOf returns true if s is a subset of other. +func (s FieldSet) IsSubsetOf(other FieldSet) bool { + for k := range s { + if _, ok := other[k]; !ok { + return false + } } - for k, v := range map2 { - merged[k] = v + return true +} + +// Intersection returns the fields that are in both s and other. +func (s FieldSet) Intersection(other FieldSet) FieldSet { + intersection := make(FieldSet) + for k := range s { + if _, ok := other[k]; ok { + intersection[k] = struct{}{} + } } - return merged + return intersection } -func sliceToSetRemoveZeroPadding(slice []string) map[string]struct{} { - set := make(map[string]struct{}) - for _, item := range slice { - set[removeZeroPadding(item)] = struct{}{} +func sliceToSet(slice []string) FieldSet { + set := make(FieldSet) + for _, s := range slice { + if s != "" { + set[s] = struct{}{} + } } return set } -// field1.0.field2 -> field1.field2 -func removeZeroPadding(zeroPadded string) string { - var trimmed string - for _, part := range strings.Split(zeroPadded, ".") { - if part != "0" { - trimmed += part + "." +func sliceToSetRemoveZeroPadding(slice []string) FieldSet { + set := make(FieldSet) + for _, s := range slice { + if s != "" { + set[strings.ReplaceAll(s, ".0", "")] = struct{}{} } } - if trimmed == "" { - return "" - } - return trimmed[:len(trimmed)-1] + return set } -func setToSortedSlice(set map[string]struct{}) []string { +func setToSortedSlice(set FieldSet) []string { slice := make([]string, 0, len(set)) - for item := range set { - slice = append(slice, item) + for k := range set { + slice = append(slice, k) } sort.Strings(slice) return slice } -func (fs FieldSet) IsSubsetOf(other FieldSet) bool { - for field := range fs { - if _, ok := other[field]; !ok { - return false - } +func union[T any](a, b map[string]T) map[string]struct{} { + c := make(map[string]struct{}) + for k := range a { + c[k] = struct{}{} } - return true -} - -func (fs FieldSet) Difference(subset FieldSet) map[string]struct{} { - diff := make(map[string]struct{}) - for k := range fs { - if _, ok := subset[k]; !ok { - diff[k] = struct{}{} - } + for k := range b { + c[k] = struct{}{} } - return diff + return c } diff --git a/tools/diff-processor/diff/sets_test.go b/tools/diff-processor/diff/sets_test.go index 4f58ec8319d4..84387798de03 100644 --- a/tools/diff-processor/diff/sets_test.go +++ b/tools/diff-processor/diff/sets_test.go @@ -1,11 +1,16 @@ package diff import ( + "strings" "testing" "github.com/google/go-cmp/cmp" ) +func removeZeroPadding(s string) string { + return strings.ReplaceAll(s, ".0", "") +} + func TestRemoveZeroPadding(t *testing.T) { for _, tc := range []struct { name string From e906de9394e439bf8bdd746935c39d4f9352e2cc Mon Sep 17 00:00:00 2001 From: Cameron Thornton Date: Thu, 31 Jul 2025 17:06:15 -0500 Subject: [PATCH 654/884] Add 7.0 major version testing project (#14707) Co-authored-by: Sarah French --- .../FEATURE-BRANCH-major-release-7.0.0.kt | 105 ++++++++++++++++++ .../projects/google_beta_subproject.kt | 2 +- .../projects/google_ga_subproject.kt | 2 +- .../components/projects/root_project.kt | 4 + 4 files changed, 111 insertions(+), 2 deletions(-) create mode 100644 mmv1/third_party/terraform/.teamcity/components/projects/feature_branches/FEATURE-BRANCH-major-release-7.0.0.kt diff --git a/mmv1/third_party/terraform/.teamcity/components/projects/feature_branches/FEATURE-BRANCH-major-release-7.0.0.kt b/mmv1/third_party/terraform/.teamcity/components/projects/feature_branches/FEATURE-BRANCH-major-release-7.0.0.kt new file mode 100644 index 000000000000..5c463e28febc --- /dev/null +++ b/mmv1/third_party/terraform/.teamcity/components/projects/feature_branches/FEATURE-BRANCH-major-release-7.0.0.kt @@ -0,0 +1,105 @@ +/* + * Copyright (c) HashiCorp, Inc. + * SPDX-License-Identifier: MPL-2.0 + */ + +// This file is controlled by MMv1, any changes made here will be overwritten + +package projects.feature_branches + +import ProviderNameBeta +import ProviderNameGa +import builds.* +import jetbrains.buildServer.configs.kotlin.Project +import jetbrains.buildServer.configs.kotlin.vcs.GitVcsRoot +import projects.reused.nightlyTests +import replaceCharsId + +const val branchName = "FEATURE-BRANCH-major-release-7.0.0" + +// VCS Roots specifically for pulling code from the feature branches in the downstream repos + +object HashicorpVCSRootGa_featureBranchMajorRelease700: GitVcsRoot({ + name = "VCS root for the hashicorp/terraform-provider-${ProviderNameGa} repo @ refs/heads/${branchName}" + url = "https://github.com/hashicorp/terraform-provider-${ProviderNameGa}" + branch = "refs/heads/${branchName}" + branchSpec = """ + +:(refs/heads/*) + -:refs/pulls/* + """.trimIndent() +}) + +object HashicorpVCSRootBeta_featureBranchMajorRelease700: GitVcsRoot({ + name = "VCS root for the hashicorp/terraform-provider-${ProviderNameBeta} repo @ refs/heads/${branchName}" + url = "https://github.com/hashicorp/terraform-provider-${ProviderNameBeta}" + branch = "refs/heads/${branchName}" + branchSpec = """ + +:(refs/heads/*) + -:refs/pulls/* + """.trimIndent() +}) + +fun featureBranchMajorRelease700_Project(allConfig: AllContextParameters): Project { + + val projectId = replaceCharsId(branchName) + val gaProjectId = replaceCharsId(projectId + "_GA") + val betaProjectId= replaceCharsId(projectId + "_BETA") + + // Get config for using the GA and Beta identities + val gaConfig = getGaAcceptanceTestConfig(allConfig) + val betaConfig = getBetaAcceptanceTestConfig(allConfig) + + return Project{ + id(projectId) + name = "7.0.0 Major Release Testing" + description = "Subproject for testing feature branch $branchName" + + // Register feature branch-specific VCS roots in the project + vcsRoot(HashicorpVCSRootGa_featureBranchMajorRelease700) + vcsRoot(HashicorpVCSRootBeta_featureBranchMajorRelease700) + + // Nested Nightly Test project that uses hashicorp/terraform-provider-google + subProject( + Project{ + id(gaProjectId) + name = "Google" + subProject( + nightlyTests( + gaProjectId, + ProviderNameGa, + HashicorpVCSRootGa_featureBranchMajorRelease700, + gaConfig, + NightlyTriggerConfiguration( + branch = "refs/heads/${branchName}", // Make triggered builds use the feature branch + daysOfWeek = "4" // Wednesday for GA, TeamCity numbers days Sun=1...Sat=7 + ), + ) + ) + } + ) + + // Nested Nightly Test project that uses hashicorp/terraform-provider-google-beta + subProject( + Project { + id(betaProjectId) + name = "Google Beta" + subProject( + nightlyTests( + betaProjectId, + ProviderNameBeta, + HashicorpVCSRootBeta_featureBranchMajorRelease700, + betaConfig, + NightlyTriggerConfiguration( + branch = "refs/heads/${branchName}", // Make triggered builds use the feature branch + daysOfWeek="4" // Wednesday for Beta, TeamCity numbers days Sun=1...Sat=7 + ), + ) + ) + } + ) + + params { + readOnlySettings() + } + } +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/.teamcity/components/projects/google_beta_subproject.kt b/mmv1/third_party/terraform/.teamcity/components/projects/google_beta_subproject.kt index 75eabec8a70c..f3a04d9df13f 100644 --- a/mmv1/third_party/terraform/.teamcity/components/projects/google_beta_subproject.kt +++ b/mmv1/third_party/terraform/.teamcity/components/projects/google_beta_subproject.kt @@ -33,7 +33,7 @@ fun googleSubProjectBeta(allConfig: AllContextParameters): Project { description = "Subproject containing builds for testing the Beta version of the Google provider" // Nightly Test project that uses hashicorp/terraform-provider-google-beta - subProject(nightlyTests(betaId, ProviderNameBeta, HashiCorpVCSRootBeta, betaConfig, NightlyTriggerConfiguration())) + subProject(nightlyTests(betaId, ProviderNameBeta, HashiCorpVCSRootBeta, betaConfig, NightlyTriggerConfiguration(daysOfWeek="1-3,5-7"))) // All nights except Wednesday (4) for Beta; feature branch testing happens on Wednesdays and TeamCity numbers days Sun=1...Sat=7 // MM Upstream project that uses modular-magician/terraform-provider-google-beta subProject(mmUpstream(betaId, ProviderNameBeta, ModularMagicianVCSRootBeta, HashiCorpVCSRootBeta, vcrConfig, NightlyTriggerConfiguration())) diff --git a/mmv1/third_party/terraform/.teamcity/components/projects/google_ga_subproject.kt b/mmv1/third_party/terraform/.teamcity/components/projects/google_ga_subproject.kt index cd45d7d754c8..fbf3685fbd94 100644 --- a/mmv1/third_party/terraform/.teamcity/components/projects/google_ga_subproject.kt +++ b/mmv1/third_party/terraform/.teamcity/components/projects/google_ga_subproject.kt @@ -31,7 +31,7 @@ fun googleSubProjectGa(allConfig: AllContextParameters): Project { description = "Subproject containing builds for testing the GA version of the Google provider" // Nightly Test project that uses hashicorp/terraform-provider-google - subProject(nightlyTests(gaId, ProviderNameGa, HashiCorpVCSRootGa, gaConfig, NightlyTriggerConfiguration())) + subProject(nightlyTests(gaId, ProviderNameGa, HashiCorpVCSRootGa, gaConfig, NightlyTriggerConfiguration(daysOfWeek="1-3,5-7"))) // All nights except Wednesday (4) for GA; feature branch testing happens on Wednesday and TeamCity numbers days Sun=1...Sat=7 // MM Upstream project that uses modular-magician/terraform-provider-google subProject(mmUpstream(gaId, ProviderNameGa, ModularMagicianVCSRootGa, HashiCorpVCSRootGa, vcrConfig, NightlyTriggerConfiguration())) diff --git a/mmv1/third_party/terraform/.teamcity/components/projects/root_project.kt b/mmv1/third_party/terraform/.teamcity/components/projects/root_project.kt index 3c96dea4f099..c810b73b9605 100644 --- a/mmv1/third_party/terraform/.teamcity/components/projects/root_project.kt +++ b/mmv1/third_party/terraform/.teamcity/components/projects/root_project.kt @@ -19,6 +19,7 @@ import generated.ServicesListGa import jetbrains.buildServer.configs.kotlin.Project import jetbrains.buildServer.configs.kotlin.sharedResource import projects.feature_branches.featureBranchResourceIdentitySubProject +import projects.feature_branches.featureBranchMajorRelease700_Project // googleCloudRootProject returns a root project that contains a subprojects for the GA and Beta version of the // Google provider. There are also resources to help manage the test projects used for acceptance tests. @@ -66,6 +67,9 @@ fun googleCloudRootProject(allConfig: AllContextParameters): Project { // Feature branch-testing projects - these will be added and removed as needed + // Feature branch testing + subProject(featureBranchMajorRelease700_Project(allConfig)) // FEATURE-BRANCH-major-release-7.0.0 + params { readOnlySettings() } From 3b205bfa011454876f8843e99244539579e8a2d4 Mon Sep 17 00:00:00 2001 From: Glen Yu Date: Thu, 31 Jul 2025 18:29:12 -0400 Subject: [PATCH 655/884] Documentation: google_sql_database_instance formatting & clarity (#14656) Signed-off-by: Glen Yu --- .../website/docs/r/sql_database_instance.html.markdown | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown b/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown index 22fac0853045..8ae88ec888d6 100644 --- a/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown @@ -173,7 +173,7 @@ resource "google_sql_database_instance" "main" { } ``` -### Cloud SQL Instance with MCP +### Cloud SQL Instance with Managed Connection Pooling ```hcl resource "google_sql_database_instance" "instance" { name: = "mcp-enabled-main-instance" @@ -347,9 +347,9 @@ The `settings` block supports: * `disk_autoresize_limit` - (Optional) The maximum size to which storage capacity can be automatically increased. The default value is 0, which specifies that there is no limit. -* `disk_size` - (Optional) The size of data disk, in GB. Size of a running instance cannot be reduced but can be increased. The minimum value is 10GB for PD_SSD, PD_HDD and 20GB for HYPERDISK_BALANCED. Note that this value will override the resizing from `disk_autoresize` if that feature is enabled. To avoid this, set `lifecycle.ignore_changes` on this field. +* `disk_size` - (Optional) The size of data disk, in GB. Size of a running instance cannot be reduced but can be increased. The minimum value is 10GB for `PD_SSD`, `PD_HDD` and 20GB for `HYPERDISK_BALANCED`. Note that this value will override the resizing from `disk_autoresize` if that feature is enabled. To avoid this, set `lifecycle.ignore_changes` on this field. -* `disk_type` - (Optional) The type of data disk: PD_SSD, PD_HDD, or HYPERDISK_BALANCED. Defaults to `PD_SSD`. HYPERDISK_BALANCED is preview. +* `disk_type` - (Optional) The type of data disk: `PD_SSD`, `PD_HDD`, or `HYPERDISK_BALANCED`. Defaults to `PD_SSD`. `HYPERDISK_BALANCED` is preview. * `data_disk_provisioned_iops` - (Optional, Beta) Provisioned number of I/O operations per second for the data disk. This field is only used for `HYPERDISK_BALANCED` disk types. From b31b9f99bcc83e258f72b2087bbb20224256ae39 Mon Sep 17 00:00:00 2001 From: Richard Belleville Date: Thu, 31 Jul 2025 16:01:46 -0700 Subject: [PATCH 656/884] Support additional_ip_ranges_config (adding multiple subnets to a cluster) (#14521) --- .../resource_container_cluster.go.tmpl | 129 +++++++++++++- .../resource_container_cluster_meta.yaml.tmpl | 2 + .../resource_container_cluster_test.go.tmpl | 165 ++++++++++++++++++ .../resource_container_node_pool.go.tmpl | 6 + ...esource_container_node_pool_meta.yaml.tmpl | 1 + .../docs/r/container_cluster.html.markdown | 10 ++ .../docs/r/container_node_pool.html.markdown | 2 + 7 files changed, 310 insertions(+), 5 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl index 63f2d5e01796..6a708088b5b6 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl @@ -1832,6 +1832,29 @@ func ResourceContainerCluster() *schema.Resource { }, }, }, + "additional_ip_ranges_config": { + Type: schema.TypeList, + Optional: true, + Description: `AdditionalIPRangesConfig is the configuration for individual additional subnetworks attached to the cluster`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "subnetwork": { + Type: schema.TypeString, + Required: true, + DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, + Description: `Name of the subnetwork. This can be the full path of the subnetwork or just the name.`, + }, + "pod_ipv4_range_names": { + Type: schema.TypeList, + Optional: true, + Description: `List of secondary ranges names within this subnetwork that can be used for pod IPs.`, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + }, + + }, + + }, }, }, }, @@ -2677,7 +2700,7 @@ func resourceContainerClusterCreate(d *schema.ResourceData, meta interface{}) er } } - ipAllocationBlock, err := expandIPAllocationPolicy(d.Get("ip_allocation_policy"), d.Get("networking_mode").(string), d.Get("enable_autopilot").(bool)) + ipAllocationBlock, aircs, err := expandIPAllocationPolicy(d.Get("ip_allocation_policy"), d, d.Get("networking_mode").(string), d.Get("enable_autopilot").(bool), config) if err != nil { return err } @@ -2911,6 +2934,10 @@ func resourceContainerClusterCreate(d *schema.ResourceData, meta interface{}) er needUpdateAfterCreate := false + if len(aircs) > 0 { + needUpdateAfterCreate = true + } + // For now PSC based cluster don't support `enable_private_endpoint` on `create`, but only on `update` API call. // If cluster is PSC based and enable_private_endpoint is set to true we will ignore it on `create` call and update cluster right after creation. enablePrivateEndpointPSCCluster := isEnablePrivateEndpointPSCCluster(cluster) @@ -3038,6 +3065,13 @@ func resourceContainerClusterCreate(d *schema.ResourceData, meta interface{}) er } update.ForceSendFields = append(update.ForceSendFields, "DesiredAddonsConfig.GcePersistentDiskCsiDriverConfig.Enabled"); } + + if len(aircs) > 0 { + update.DesiredAdditionalIpRangesConfig = &container.DesiredAdditionalIPRangesConfig{ + AdditionalIpRangesConfigs: aircs, + } + } + req := &container.UpdateClusterRequest{Update: update} err = transport_tpg.Retry(transport_tpg.RetryOptions{ @@ -4245,6 +4279,30 @@ func resourceContainerClusterUpdate(d *schema.ResourceData, meta interface{}) er log.Printf("[INFO] GKE cluster %s's AdditionalPodRangesConfig has been updated", d.Id()) } + if d.HasChange("ip_allocation_policy.0.additional_ip_ranges_config") { + c := d.Get("ip_allocation_policy.0.additional_ip_ranges_config") + aircs, err := expandAdditionalIpRangesConfigs(c, d, config) + if err != nil { + return err + } + + req := &container.UpdateClusterRequest{ + Update: &container.ClusterUpdate{ + DesiredAdditionalIpRangesConfig: &container.DesiredAdditionalIPRangesConfig{ + AdditionalIpRangesConfigs: aircs, + }, + }, + } + + updateF := updateFunc(req, "updating AdditionalIpRangesConfig") + // Call update serially. + if err := transport_tpg.LockedCall(lockKey, updateF); err != nil { + return err + } + + log.Printf("[INFO] GKE cluster %s's AdditionalIpRangesConfig has been updated", d.Id()) + } + if n, ok := d.GetOk("node_pool.#"); ok { for i := 0; i < n.(int); i++ { nodePoolInfo, err := extractNodePoolInformationFromCluster(d, config, clusterName) @@ -5407,23 +5465,66 @@ func expandPodCidrOverprovisionConfig(configured interface{}) *container.PodCIDR } } -func expandIPAllocationPolicy(configured interface{}, networkingMode string, autopilot bool) (*container.IPAllocationPolicy, error) { +func expandPodIpv4RangeNames(configured interface{}) []string { + l := configured.([]interface{}) + if len(l) == 0 || l[0] == nil { + return nil + } + var ranges []string + for _, rawRange := range l { + ranges = append(ranges, rawRange.(string)) + } + return ranges +} + +func expandAdditionalIpRangesConfigs(configured interface{}, d *schema.ResourceData, c *transport_tpg.Config) ([]*container.AdditionalIPRangesConfig, error) { + l := configured.([]interface{}) + if len(l) == 0 || l[0] == nil { + return nil, nil + } + var additionalIpRangesConfig []*container.AdditionalIPRangesConfig + for _, rawConfig := range l { + config := rawConfig.(map[string]interface{}) + subnetwork, err := tpgresource.ParseSubnetworkFieldValue(config["subnetwork"].(string), d, c) + if err != nil { + return nil, err + } + additionalIpRangesConfig = append(additionalIpRangesConfig, &container.AdditionalIPRangesConfig { + Subnetwork: subnetwork.RelativeLink(), + PodIpv4RangeNames: expandPodIpv4RangeNames(config["pod_ipv4_range_names"]), + }) + } + + return additionalIpRangesConfig, nil +} + +func expandIPAllocationPolicy(configured interface{}, d *schema.ResourceData, networkingMode string, autopilot bool, c *transport_tpg.Config) (*container.IPAllocationPolicy, []*container.AdditionalIPRangesConfig, error) { l := configured.([]interface{}) if len(l) == 0 || l[0] == nil { if networkingMode == "VPC_NATIVE" { - return nil, nil + return nil, nil, nil } return &container.IPAllocationPolicy{ UseIpAliases: false, UseRoutes: true, StackType: "IPV4", ForceSendFields: []string{"UseIpAliases"}, - }, nil + }, nil, nil } config := l[0].(map[string]interface{}) stackType := config["stack_type"].(string) + // We expand and return additional_ip_ranges_config separately because + // this field is OUTPUT_ONLY for ClusterCreate RPCs. Instead, during the + // Terraform Create flow, we follow the CreateCluster (without + // additional_ip_ranges_config populated) with an UpdateCluster (_with_ + // additional_ip_ranges_config populated). + additionalIpRangesConfigs, err := expandAdditionalIpRangesConfigs(config["additional_ip_ranges_config"], d, c) + if err != nil { + return nil, nil, err + } + return &container.IPAllocationPolicy{ UseIpAliases: networkingMode == "VPC_NATIVE" || networkingMode == "", ClusterIpv4CidrBlock: config["cluster_ipv4_cidr_block"].(string), @@ -5434,7 +5535,7 @@ func expandIPAllocationPolicy(configured interface{}, networkingMode string, aut UseRoutes: networkingMode == "ROUTES", StackType: stackType, PodCidrOverprovisionConfig: expandPodCidrOverprovisionConfig(config["pod_cidr_overprovision_config"]), - }, nil + }, additionalIpRangesConfigs, nil } func expandMaintenancePolicy(d *schema.ResourceData, meta interface{}) *container.MaintenancePolicy { @@ -7040,6 +7141,23 @@ func flattenPodCidrOverprovisionConfig(c *container.PodCIDROverprovisionConfig) } } +func flattenAdditionalIpRangesConfigs(c []*container.AdditionalIPRangesConfig) []map[string]interface{} { + if len(c) == 0 { + return nil + } + + var outRanges []map[string]interface{} + for _, rangeConfig := range c { + outRangeConfig := map[string]interface{} { + "subnetwork": rangeConfig.Subnetwork, + "pod_ipv4_range_names": rangeConfig.PodIpv4RangeNames, + } + outRanges = append(outRanges, outRangeConfig) + } + + return outRanges +} + func flattenIPAllocationPolicy(c *container.Cluster, d *schema.ResourceData, config *transport_tpg.Config) ([]map[string]interface{}, error) { // If IP aliasing isn't enabled, none of the values in this block can be set. if c == nil || c.IpAllocationPolicy == nil || !c.IpAllocationPolicy.UseIpAliases { @@ -7070,6 +7188,7 @@ func flattenIPAllocationPolicy(c *container.Cluster, d *schema.ResourceData, con "stack_type": p.StackType, "pod_cidr_overprovision_config": flattenPodCidrOverprovisionConfig(p.PodCidrOverprovisionConfig), "additional_pod_ranges_config": flattenAdditionalPodRangesConfig(c.IpAllocationPolicy), + "additional_ip_ranges_config": flattenAdditionalIpRangesConfigs(p.AdditionalIpRangesConfigs), }, }, nil } diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_meta.yaml.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_meta.yaml.tmpl index 17e3eba15d21..c476c2bd18f3 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_meta.yaml.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_meta.yaml.tmpl @@ -160,6 +160,8 @@ fields: - field: 'identity_service_config.enabled' - field: 'initial_node_count' - field: 'ip_allocation_policy.additional_pod_ranges_config.pod_range_names' + - field: 'ip_allocation_policy.additional_ip_ranges_config.subnetwork' + - field: 'ip_allocation_policy.additional_ip_ranges_config.pod_ipv4_range_names' - field: 'ip_allocation_policy.cluster_ipv4_cidr_block' - field: 'ip_allocation_policy.cluster_secondary_range_name' - field: 'ip_allocation_policy.pod_cidr_overprovision_config.disabled' diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl index b1dde35b4df1..d5b1b9772fa6 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl @@ -14053,6 +14053,88 @@ resource "google_container_cluster" "primary" { `, name, networkName, subnetworkName, config) } +func TestAccContainerCluster_additional_ip_ranges_config_on_create(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, 2, 2), + }, + { + ResourceName: "google_container_cluster.primary", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + Check: resource.TestCheckResourceAttrSet("google_container_cluster.primary", "node_pool.0.network_config.subnetwork"), + }, + }, + }) +} + +func TestAccContainerCluster_additional_ip_ranges_config_on_update(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, 0, 0), + }, + { + ResourceName: "google_container_cluster.primary", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + Check: resource.TestCheckResourceAttrSet("google_container_cluster.primary", "node_pool.0.network_config.subnetwork"), + }, + { + Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, 1, 1), + }, + { + ResourceName: "google_container_cluster.primary", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + { + Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, 0, 0), + }, + { + ResourceName: "google_container_cluster.primary", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + { + Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, 2, 2), + }, + { + ResourceName: "google_container_cluster.primary", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + { + Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, 0, 0), + }, + { + ResourceName: "google_container_cluster.primary", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + }, + }) +} + func TestAccContainerCluster_withAnonymousAuthenticationConfig(t *testing.T) { t.Parallel() @@ -14093,6 +14175,89 @@ func TestAccContainerCluster_withAnonymousAuthenticationConfig(t *testing.T) { }) } +func testAccContainerCluster_additional_ip_ranges_config(name string, additionalSubnetCount int, secondaryRangeCount int) string { + var subnetStr string + var additionalIpRangesStr string + cumulativeRangeIndex := 0 + for subnetIndex := 0; subnetIndex < additionalSubnetCount; subnetIndex++ { + var secondaryRangeStr string + var podIpv4RangeStr string + for rangeIndex := 0; rangeIndex < secondaryRangeCount; rangeIndex++ { + secondaryRangeStr += fmt.Sprintf(` + secondary_ip_range { + range_name = "range-%d" + ip_cidr_range = "10.0.%d.0/24" + } + `, cumulativeRangeIndex, cumulativeRangeIndex) + + podIpv4RangeStr += fmt.Sprintf("google_compute_subnetwork.extra_%d.secondary_ip_range[%d].range_name", subnetIndex, rangeIndex) + if rangeIndex != secondaryRangeCount - 1 { + podIpv4RangeStr += ", " + } + cumulativeRangeIndex++ + } + + subnetStr += fmt.Sprintf(` + resource "google_compute_subnetwork" "extra_%d" { + ip_cidr_range = "10.1.%d.0/24" + name = "tf-test-subnet-%d" + network = google_compute_network.main.self_link + region = "us-central1" + %s + } + `, subnetIndex, subnetIndex, subnetIndex, secondaryRangeStr) + + additionalIpRangesStr += fmt.Sprintf(` + additional_ip_ranges_config { + subnetwork = google_compute_subnetwork.extra_%d.id + pod_ipv4_range_names = [%s] + } + `, subnetIndex, podIpv4RangeStr) + } + + return fmt.Sprintf(` + resource "google_compute_network" "main" { + name = "%s" + auto_create_subnetworks = false + } + + resource "google_compute_subnetwork" "main" { + ip_cidr_range = "10.2.0.0/24" + name = "main" + network = google_compute_network.main.self_link + region = "us-central1" + + secondary_ip_range { + range_name = "services" + ip_cidr_range = "10.3.0.0/16" + } + + secondary_ip_range { + range_name = "pods" + ip_cidr_range = "10.4.0.0/16" + } + } + + %s + + resource "google_container_cluster" "primary" { + name = "%s" + location = "us-central1-a" + network = google_compute_network.main.name + subnetwork = google_compute_subnetwork.main.name + initial_node_count = 1 + + ip_allocation_policy { + cluster_secondary_range_name = "pods" + services_secondary_range_name = "services" + %s + } + + deletion_protection = false + } + `, name, subnetStr, name, additionalIpRangesStr) +} + func testAccContainerCluster_withAnonymousAuthenticationConfig(name, networkName, subnetworkName string, mode string) string { return fmt.Sprintf(` resource "google_container_cluster" "primary" { diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl index f2229b4f3ccc..c8694d6d241a 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl @@ -549,6 +549,11 @@ var schemaNodePool = map[string]*schema.Schema{ }, }, }, + "subnetwork": { + Type: schema.TypeString, + Computed: true, + Description: `The subnetwork path for the node pool. Format: projects/{project}/regions/{region}/subnetworks/{subnetwork} . If the cluster is associated with multiple subnetworks, the subnetwork for the node pool is picked based on the IP utilization during node pool creation and is immutable.`, + }, }, }, }, @@ -1320,6 +1325,7 @@ func flattenNodeNetworkConfig(c *container.NodeNetworkConfig, d *schema.Resource "network_performance_config": flattenNodeNetworkPerformanceConfig(c.NetworkPerformanceConfig), "additional_node_network_configs": flattenAdditionalNodeNetworkConfig(c.AdditionalNodeNetworkConfigs), "additional_pod_network_configs": flattenAdditionalPodNetworkConfig(c.AdditionalPodNetworkConfigs), + "subnetwork": c.Subnetwork, }) } return result diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool_meta.yaml.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool_meta.yaml.tmpl index c5462f2aff53..7cf47290f1f7 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool_meta.yaml.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool_meta.yaml.tmpl @@ -39,6 +39,7 @@ fields: api_field: 'network_config.pod_cidr_overprovision_config.disable' - field: 'network_config.pod_ipv4_cidr_block' - field: 'network_config.pod_range' + - field: 'network_config.subnetwork' - field: 'node_config.advanced_machine_features.enable_nested_virtualization' api_field: 'config.advanced_machine_features.enable_nested_virtualization' - field: 'node_config.advanced_machine_features.threads_per_core' diff --git a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown index e11e2f173080..0d15fbcf450d 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown @@ -824,12 +824,22 @@ Possible values are `IPV4` and `IPV4_IPV6`. the cluster level. Used for Autopilot clusters and Standard clusters with which control of the secondary Pod IP address assignment to node pools isn't needed. Structure is [documented below](#nested_additional_pod_ranges_config). +* `additional_ip_ranges_config` - (Optional) The configuration for individual additional subnetworks attached to the cluster. +Structure is [documented below](#nested_additional_ip_ranges_config). + The `additional_pod_ranges_config` block supports: * `pod_range_names` - (Required) The names of the Pod ranges to add to the cluster. +The `additional_ip_ranges_config` block supports: + +* `subnetwork` - (Required) Name of the subnetwork. This can be the full path of the subnetwork or just the name. + +* `pod_ipv4_range_names`- (Required) List of secondary ranges names within this subnetwork that can be used for pod IPs. + + The `master_auth` block supports: * `client_certificate_config` - (Required) Whether client certificate authorization is enabled for this cluster. For example: diff --git a/mmv1/third_party/terraform/website/docs/r/container_node_pool.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_node_pool.html.markdown index 01ad1ed5eee3..1e0cf928b373 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_node_pool.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_node_pool.html.markdown @@ -221,6 +221,8 @@ cluster. * `network_performance_config` - (Optional) Network bandwidth tier configuration. Structure is [documented below](#network_performance_config). +* `subnetwork` - (Optional) The subnetwork path for the node pool. Format: `projects/{project}/regions/{region}/subnetworks/{subnetwork}`. If the cluster is associated with multiple subnetworks, the subnetwork for the node pool is picked based on the IP utilization during node pool creation and is immutable + The `additional_node_network_configs` block supports: * `network` - Name of the VPC where the additional interface belongs. From f6df1955224718ce2a34a7c42a4271c3f12200a0 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Thu, 31 Jul 2025 16:02:17 -0700 Subject: [PATCH 657/884] Update enrolled_teams.yml (#14712) --- tools/issue-labeler/labeler/enrolled_teams.yml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tools/issue-labeler/labeler/enrolled_teams.yml b/tools/issue-labeler/labeler/enrolled_teams.yml index c89eaa8cfd42..d7aaa4a2fca2 100755 --- a/tools/issue-labeler/labeler/enrolled_teams.yml +++ b/tools/issue-labeler/labeler/enrolled_teams.yml @@ -29,6 +29,10 @@ service/aiplatform-prediction: - google_vertex_ai_endpoint - google_vertex_ai_deployment_resource_pool - google_ml_engine_model + - google_vertex_ai_endpoint_with_model_garden_deployment +service/aiplatform-rag-engine: + resources: + - google_vertex_ai_rag_engine_config service/aiplatform-tensorboard: resources: - google_vertex_ai_tensorboard @@ -645,6 +649,9 @@ service/run: team: cloud-run-control-plane resources: - google_cloud_run_.* +service/saasservicemgmt: + resources: + - google_saas_runtime_.* service/secretmanager: resources: - google_secret_manager_.* From 3d02165a48f83da43a68939a2ea18b1ae5c79978 Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Thu, 31 Jul 2025 17:04:35 -0700 Subject: [PATCH 658/884] fix TestAccDiscoveryEngineCmekConfig_* tests (#14694) --- mmv1/products/discoveryengine/CmekConfig.yaml | 3 ++ ...discoveryengine_cmekconfig_default.tf.tmpl | 9 +++++ ...ource_discovery_engine_cmek_config_test.go | 37 ++++++++++++++++++- 3 files changed, 48 insertions(+), 1 deletion(-) diff --git a/mmv1/products/discoveryengine/CmekConfig.yaml b/mmv1/products/discoveryengine/CmekConfig.yaml index bac218339859..2ac10fce4c33 100644 --- a/mmv1/products/discoveryengine/CmekConfig.yaml +++ b/mmv1/products/discoveryengine/CmekConfig.yaml @@ -47,6 +47,9 @@ async: resource_inside_response: true custom_code: update_encoder: 'templates/terraform/update_encoder/discoveryengine_cmekconfig_kmskey.go.tmpl' +sweeper: + url_substitutions: + - location: "us" examples: - name: 'discoveryengine_cmekconfig_default' primary_resource_id: 'default' diff --git a/mmv1/templates/terraform/examples/discoveryengine_cmekconfig_default.tf.tmpl b/mmv1/templates/terraform/examples/discoveryengine_cmekconfig_default.tf.tmpl index f5868ffe5670..2575c9433e94 100644 --- a/mmv1/templates/terraform/examples/discoveryengine_cmekconfig_default.tf.tmpl +++ b/mmv1/templates/terraform/examples/discoveryengine_cmekconfig_default.tf.tmpl @@ -2,4 +2,13 @@ resource "google_discovery_engine_cmek_config" "default" { location = "us" cmek_config_id = "{{index $.Vars "cmek_config_id"}}" kms_key = "{{index $.Vars "kms_key_name"}}" + depends_on = [google_kms_crypto_key_iam_member.crypto_key] } + +data "google_project" "project" {} + +resource "google_kms_crypto_key_iam_member" "crypto_key" { + crypto_key_id = "{{index $.Vars "kms_key_name"}}" + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-discoveryengine.iam.gserviceaccount.com" +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/discoveryengine/resource_discovery_engine_cmek_config_test.go b/mmv1/third_party/terraform/services/discoveryengine/resource_discovery_engine_cmek_config_test.go index b11b8378e5a5..9e8f5417e72f 100644 --- a/mmv1/third_party/terraform/services/discoveryengine/resource_discovery_engine_cmek_config_test.go +++ b/mmv1/third_party/terraform/services/discoveryengine/resource_discovery_engine_cmek_config_test.go @@ -11,7 +11,7 @@ func TestAccDiscoveryEngineCmekConfig_discoveryengineCmekconfigDefaultExample_up t.Parallel() context := map[string]interface{}{ - "kms_key_name": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us", "tftest-shared-key-4").CryptoKey.Name, + "kms_key_name": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us", "tftest-shared-key-6").CryptoKey.Name, "single_region_kms_key_name1": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-east1", "tftest-shared-key-us-east1").CryptoKey.Name, "single_region_kms_key_name2": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tftest-shared-key-us-central1").CryptoKey.Name, "single_region_kms_key_name3": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-west1", "tftest-shared-key-us-west1").CryptoKey.Name, @@ -51,6 +51,15 @@ resource "google_discovery_engine_cmek_config" "default" { cmek_config_id = "tf-test-cmek-config-id%{random_suffix}" kms_key = "%{kms_key_name}" set_default = false + depends_on = [google_kms_crypto_key_iam_member.crypto_key] +} + +data "google_project" "project" {} + +resource "google_kms_crypto_key_iam_member" "crypto_key" { + crypto_key_id = "%{kms_key_name}" + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-discoveryengine.iam.gserviceaccount.com" } `, context) } @@ -71,7 +80,33 @@ resource "google_discovery_engine_cmek_config" "default" { single_region_keys { kms_key = "%{single_region_kms_key_name3}" } + depends_on = [google_kms_crypto_key_iam_member.crypto_key] +} + +data "google_project" "project" {} + +resource "google_kms_crypto_key_iam_member" "crypto_key" { + crypto_key_id = "%{kms_key_name}" + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-alloydb.iam.gserviceaccount.com" +} + +resource "google_kms_crypto_key_iam_member" "single_region_crypto_key1" { + crypto_key_id = "%{single_region_kms_key_name1}" + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-discoveryengine.iam.gserviceaccount.com" +} + +resource "google_kms_crypto_key_iam_member" "single_region_crypto_key2" { + crypto_key_id = "%{single_region_kms_key_name2}" + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-discoveryengine.iam.gserviceaccount.com" +} +resource "google_kms_crypto_key_iam_member" "single_region_crypto_key3" { + crypto_key_id = "%{single_region_kms_key_name3}" + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-discoveryengine.iam.gserviceaccount.com" } `, context) } From c8fb492da2d9c946ad25cd239f2ff182432f9882 Mon Sep 17 00:00:00 2001 From: Mohit Swain Date: Fri, 1 Aug 2025 00:12:06 +0000 Subject: [PATCH 659/884] Add support for dataproc cluster_tier (#14595) --- .../dataproc/resource_dataproc_cluster.go | 19 ++++- .../resource_dataproc_cluster_meta.yaml | 1 + .../resource_dataproc_cluster_test.go | 70 +++++++++++++++++++ .../docs/r/dataproc_cluster.html.markdown | 4 ++ 4 files changed, 91 insertions(+), 3 deletions(-) diff --git a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster.go b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster.go index eab121b29d9b..b207a545038b 100644 --- a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster.go +++ b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster.go @@ -102,6 +102,7 @@ var ( } clusterConfigKeys = []string{ + "cluster_config.0.cluster_tier", "cluster_config.0.staging_bucket", "cluster_config.0.temp_bucket", "cluster_config.0.gce_cluster_config", @@ -552,7 +553,15 @@ func ResourceDataprocCluster() *schema.Resource { Description: `Allows you to configure various aspects of the cluster.`, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ - + "cluster_tier": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `Specifies the tier of the cluster created.`, + AtLeastOneOf: clusterConfigKeys, + ForceNew: true, + ValidateFunc: validation.StringInSlice([]string{"CLUSTER_TIER_UNSPECIFIED", "CLUSTER_TIER_STANDARD", "CLUSTER_TIER_PREMIUM"}, false), + }, "staging_bucket": { Type: schema.TypeString, Optional: true, @@ -2073,6 +2082,10 @@ func expandClusterConfig(d *schema.ResourceData, config *transport_tpg.Config) ( conf.TempBucket = v.(string) } + if v, ok := d.GetOk("cluster_config.0.cluster_tier"); ok { + conf.ClusterTier = v.(string) + } + c, err := expandGceClusterConfig(d, config) if err != nil { return nil, err @@ -2969,8 +2982,8 @@ func flattenClusterConfig(d *schema.ResourceData, cfg *dataproc.ClusterConfig) ( } data := map[string]interface{}{ - "staging_bucket": d.Get("cluster_config.0.staging_bucket").(string), - + "staging_bucket": d.Get("cluster_config.0.staging_bucket").(string), + "cluster_tier": d.Get("cluster_config.0.cluster_tier").(string), "bucket": cfg.ConfigBucket, "temp_bucket": cfg.TempBucket, "gce_cluster_config": flattenGceClusterConfig(d, cfg.GceClusterConfig), diff --git a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_meta.yaml b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_meta.yaml index fc4ee60e90b4..41cab5b3a61a 100644 --- a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_meta.yaml +++ b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_meta.yaml @@ -92,6 +92,7 @@ fields: - field: 'cluster_config.software_config.properties' - field: 'cluster_config.staging_bucket' - field: 'cluster_config.temp_bucket' + - field: 'cluster_config.cluster_tier' - field: 'cluster_config.worker_config.accelerators.accelerator_count' - field: 'cluster_config.worker_config.accelerators.accelerator_type' - field: 'cluster_config.worker_config.disk_config.boot_disk_size_gb' diff --git a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_test.go b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_test.go index bcc040593848..d47fb21248af 100644 --- a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_test.go +++ b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_test.go @@ -1224,6 +1224,76 @@ func TestAccDataprocCluster_withMetastoreConfig(t *testing.T) { }) } +func TestAccDataprocCluster_withClusterTier(t *testing.T) { + t.Parallel() + + var cluster dataproc.Cluster + rnd := acctest.RandString(t, 10) + networkName := acctest.BootstrapSharedTestNetwork(t, "dataproc-cluster") + subnetworkName := acctest.BootstrapSubnet(t, "dataproc-cluster", networkName) + acctest.BootstrapFirewallForDataprocSharedNetwork(t, "dataproc-cluster", networkName) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataprocClusterDestroy(t), + Steps: []resource.TestStep{ + { + // Set tier to CLUSTER_TIER_STANDARD + Config: testAccDataprocCluster_withClusterTier(rnd, subnetworkName, "CLUSTER_TIER_STANDARD"), + Check: resource.ComposeTestCheckFunc( + testAccCheckDataprocClusterExists(t, "google_dataproc_cluster.tier_cluster", &cluster), + resource.TestCheckResourceAttr("google_dataproc_cluster.tier_cluster", "cluster_config.0.cluster_tier", "CLUSTER_TIER_STANDARD"), + ), + }, + { + // Set tier to CLUSTER_TIER_PREMIUM + Config: testAccDataprocCluster_withClusterTier(rnd, subnetworkName, "CLUSTER_TIER_PREMIUM"), + Check: resource.ComposeTestCheckFunc( + testAccCheckDataprocClusterExists(t, "google_dataproc_cluster.tier_cluster", &cluster), + resource.TestCheckResourceAttr("google_dataproc_cluster.tier_cluster", "cluster_config.0.cluster_tier", "CLUSTER_TIER_PREMIUM"), + ), + }, + }, + }) +} + +func testAccDataprocCluster_withClusterTier(rnd, subnetworkName, tier string) string { + tierConfig := "" + if tier != "" { + tierConfig = fmt.Sprintf(`cluster_tier = "%s"`, tier) + } + clusterName := fmt.Sprintf("tf-test-dproc-tier-%s", rnd) + bucketName := clusterName + "-temp-bucket" + + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" + force_destroy = "true" +} + +resource "google_dataproc_cluster" "tier_cluster" { + name = "%s" + region = "us-central1" + + cluster_config { + %s + staging_bucket = google_storage_bucket.bucket.name + temp_bucket = google_storage_bucket.bucket.name + + software_config { + image_version = "2.3.4-debian12" + } + + gce_cluster_config { + subnetwork = "%s" + } + } +} +`, bucketName, clusterName, tierConfig, subnetworkName) +} + func testAccCheckDataprocClusterDestroy(t *testing.T) resource.TestCheckFunc { return func(s *terraform.State) error { config := acctest.GoogleProviderConfig(t) diff --git a/mmv1/third_party/terraform/website/docs/r/dataproc_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/r/dataproc_cluster.html.markdown index 423b437406b0..e2f75aea6c68 100644 --- a/mmv1/third_party/terraform/website/docs/r/dataproc_cluster.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/dataproc_cluster.html.markdown @@ -45,6 +45,8 @@ resource "google_dataproc_cluster" "mycluster" { cluster_config { staging_bucket = "dataproc-staging-bucket" + cluster_tier = "CLUSTER_TIER_STANDARD" + master_config { num_instances = 1 machine_type = "e2-medium" @@ -341,6 +343,8 @@ resource "google_dataproc_cluster" "accelerated_cluster" { and jobs data, such as Spark and MapReduce history files. Note: If you don't explicitly specify a `temp_bucket` then GCP will auto create / assign one for you. +* `cluster_tier` - (Optional) The tier of the cluster. + * `gce_cluster_config` (Optional) Common config settings for resources of Google Compute Engine cluster instances, applicable to all instances in the cluster. Structure [defined below](#nested_gce_cluster_config). From 793720bff5dac8ae41f2a6ff3714b63d0973570e Mon Sep 17 00:00:00 2001 From: Lakshman Swaminathan Date: Fri, 1 Aug 2025 11:28:50 -0400 Subject: [PATCH 660/884] adding all the release diff testing logic (#14476) --- .../terraform/acctest/vcr_utils.go | 133 +++++++++++++++++- .../terraform/acctest/vcr_utils_test.go | 123 ++++++++++++++++ 2 files changed, 250 insertions(+), 6 deletions(-) diff --git a/mmv1/third_party/terraform/acctest/vcr_utils.go b/mmv1/third_party/terraform/acctest/vcr_utils.go index a41625ed8c4f..a7ef40efbf10 100644 --- a/mmv1/third_party/terraform/acctest/vcr_utils.go +++ b/mmv1/third_party/terraform/acctest/vcr_utils.go @@ -6,6 +6,7 @@ import ( "encoding/json" "errors" "fmt" + "io" "io/ioutil" "log" "math/rand" @@ -52,6 +53,8 @@ var configs map[string]*transport_tpg.Config var sources map[string]VcrSource +var diffFlag = "[Diff]" + // VcrSource is a source for a given VCR test with the value that seeded it type VcrSource struct { seed int64 @@ -145,7 +148,15 @@ func VcrTest(t *testing.T, c resource.TestCase) { if IsVcrEnabled() { defer closeRecorder(t) } else if isReleaseDiffEnabled() { - c = initializeReleaseDiffTest(c, t.Name()) + // creates temporary file for the individual test, will be a temporary to store the output + tempOutputFile, err := createTemporaryFile() + if err != nil { + t.Errorf("creating temporary file %v", err) + } + defer func() { + writeOutputFileDeferFunction(tempOutputFile, t.Failed()) + }() + c = initializeReleaseDiffTest(c, t.Name(), tempOutputFile) } c = extendWithTGCData(t, c) @@ -166,6 +177,16 @@ func VcrTest(t *testing.T, c resource.TestCase) { resource.Test(t, c) } +func createTemporaryFile() (*os.File, error) { + // creates temporary file for the individual test, will be a temporary to store the output + tempOutputFile, err := os.CreateTemp("", "release_diff_test_output_*.log") + if err != nil { + return nil, err + } + + return tempOutputFile, nil +} + // We need to explicitly close the VCR recorder to save the cassette func closeRecorder(t *testing.T) { configsLock.RLock() @@ -207,7 +228,7 @@ func isReleaseDiffEnabled() bool { return releaseDiff != "" } -func initializeReleaseDiffTest(c resource.TestCase, testName string) resource.TestCase { +func initializeReleaseDiffTest(c resource.TestCase, testName string, tempOutputFile *os.File) resource.TestCase { var releaseProvider string packagePath := fmt.Sprint(reflect.TypeOf(transport_tpg.Config{}).PkgPath()) if strings.Contains(packagePath, "google-beta") { @@ -220,7 +241,9 @@ func initializeReleaseDiffTest(c resource.TestCase, testName string) resource.Te c.ExternalProviders[releaseProvider] = resource.ExternalProvider{} } else { c.ExternalProviders = map[string]resource.ExternalProvider{ - releaseProvider: {}, + releaseProvider: { + // if left empty fetches most recent release provider + }, } } @@ -242,14 +265,33 @@ func initializeReleaseDiffTest(c resource.TestCase, testName string) resource.Te }, } } + // InsertDiffSteps adds modified steps to the test that run with an external provider + // these steps do the actual infrastructure provisioning, and c.Steps is updated in the method to have the modified steps + c = InsertDiffSteps(c, tempOutputFile, releaseProvider, localProviderName) + return c +} + +// InsertDiffSteps inserts a new step into the test case that reformats the config to use the release provider - this allows us to see the diff +// between the local provider and the release provider +func InsertDiffSteps(c resource.TestCase, tempOutputFile *os.File, releaseProvider string, localProviderName string) resource.TestCase { + var countSteps = 0 var replacementSteps []resource.TestStep for _, testStep := range c.Steps { + countSteps++ if testStep.Config != "" { ogConfig := testStep.Config + fmt.Fprintf(tempOutputFile, "[DEBUG] Original config: %s\n", ogConfig) testStep.Config = ReformConfigWithProvider(ogConfig, localProviderName) - if testStep.ExpectError == nil && testStep.PlanOnly == false { + fmt.Fprintf(tempOutputFile, "[DEBUG] Reformatted config: %s\n", testStep.Config) + testStep.PreConfig = func() { + fmt.Fprintf(tempOutputFile, "%s Step %d\n", diffFlag, countSteps) + } + if testStep.ExpectError == nil && !testStep.PlanOnly { newStep := resource.TestStep{ + PreConfig: func() { + fmt.Fprintf(tempOutputFile, "Regular Step %d\n", countSteps) + }, Config: ReformConfigWithProvider(ogConfig, releaseProvider), } testStep.PlanOnly = true @@ -261,12 +303,14 @@ func initializeReleaseDiffTest(c resource.TestCase, testName string) resource.Te replacementSteps = append(replacementSteps, testStep) } } - c.Steps = replacementSteps - return c } +// reformConfigWithProvider reformats the config to use the given provider +// The method matches a regex for the provider block and replaces it with the given provider. +// For example: ' data "google_compute_network" "default" { provider = "google-local" } ' +// will be reformatted to ' data "google_compute_network" "default" { provider = "google-beta" } ' func ReformConfigWithProvider(config, provider string) string { configBytes := []byte(config) providerReplacement := fmt.Sprintf("provider = %s", provider) @@ -287,6 +331,83 @@ func ReformConfigWithProvider(config, provider string) string { return string(resourceHeader.ReplaceAll(configBytes, providerReplacementBytes)) } +// ReadDiffOutput reads the outputted temporary file and returns its contents +func ReadDiffOutput(f *os.File) (string, error) { + if f == nil { + return "", fmt.Errorf("file handle is nil") + } + + // Seek to the beginning of the file in case it was just written to. + if _, err := f.Seek(0, io.SeekStart); err != nil { + return "", fmt.Errorf("failed to seek to beginning of file: %w", err) + } + + // Read the entire file content. + content, err := os.ReadFile(f.Name()) + if err != nil { + return "", fmt.Errorf("failed to read file: %w", err) + } + + return string(content), nil +} + +// parseReleaseDiffOutput reads the temporary file created during the release diff test and returns whether the last line has a [Diff] flag, the test output, and any errors +func ParseReleaseDiffOutput(output string) (isDiff bool) { + trimmedOutput := strings.TrimSpace(output) + if trimmedOutput == "" { + return false + } + + lines := strings.Split(trimmedOutput, "\n") + lastLine := lines[len(lines)-1] + + isDiff = strings.HasPrefix(lastLine, diffFlag) + + return isDiff +} + +func writeOutputFileDeferFunction(tempOutputFile *os.File, failed bool) { + if tempOutputFile == nil { + return + } + // parses the temporary file created during the release diff test and returns the last line of output + // This is useful for extracting the diff output from the file after the test has run + + testOutput, err := ReadDiffOutput(tempOutputFile) + if err != nil { + fmt.Printf("Error reading temporary file: %v\n", err) + return + } + isDiff := ParseReleaseDiffOutput(testOutput) + tempOutputFile.Close() + err = os.Remove(tempOutputFile.Name()) + if err != nil { + fmt.Printf("Temporary File Deletion Error: %v\n", err) + } + regularFailureFile, err := os.Create(filepath.Join("", "regular_failure_file.log")) + if err != nil { + fmt.Printf("Error creating file: %v\n", err) + return + } + defer regularFailureFile.Close() + diffFailureFile, err := os.Create(filepath.Join("", "diff_failure_file.log")) + if err != nil { + fmt.Printf("Error creating file: %v\n", err) + return + } + defer diffFailureFile.Close() + if failed { + // Check if the output line starts with "[Diff]" + if isDiff { + fmt.Fprintf(os.Stdout, "%s Breaking Change Detected] \n", diffFlag) + fmt.Fprintf(diffFailureFile, "%s %s\n", diffFlag, testOutput) + } else { + fmt.Fprintf(regularFailureFile, testOutput) + fmt.Fprintf(regularFailureFile, "FAILED --- %s\n", testOutput) + } + } +} + // HandleVCRConfiguration configures the recorder (github.com/dnaeon/go-vcr/recorder) used in the VCR test // This includes: // - Setting the recording/replaying mode diff --git a/mmv1/third_party/terraform/acctest/vcr_utils_test.go b/mmv1/third_party/terraform/acctest/vcr_utils_test.go index 8481ca7c200b..f3b6c701b0bf 100644 --- a/mmv1/third_party/terraform/acctest/vcr_utils_test.go +++ b/mmv1/third_party/terraform/acctest/vcr_utils_test.go @@ -7,9 +7,13 @@ import ( "io" "net/http" "net/url" + "os" + "regexp" "testing" "github.com/dnaeon/go-vcr/cassette" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" "github.com/hashicorp/terraform-provider-google/google/acctest" ) @@ -449,3 +453,122 @@ func TestReformConfigWithProvider(t *testing.T) { }) } } + +func TestInsertDiffSteps(t *testing.T) { + + var dummyCase = resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: `resource "google_new_resource" "original" { + provider = google-beta + }`, + }, + { + Config: `resource "google_new_resource" "original" { + provider = google-beta + }`, + }, + { + ResourceName: "google_pubsub_subscription.example", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"topic"}, + }, + { + Config: `resource "google_example_widget" "foo" { + name = "dummy" + provider = google-beta + }`, + Check: resource.ComposeTestCheckFunc( + func(*terraform.State) error { return nil }, + ), + }, + { + Config: `provider = "google-local" + // ... configuration that is expected to cause an error + `, + ExpectError: regexp.MustCompile(`"restore_continuous_backup_source": conflicts with restore_backup_source`), + }, + }, + } + temp_file, err := os.CreateTemp("", "release_diff_test_output_*.log") + if err != nil { + t.Fatalf("Failed to create temp file: %v", err) + } + dummyCase = acctest.InsertDiffSteps(dummyCase, temp_file, "google-beta", "google-local") + + // Expected steps after InsertDiffSteps runs. + // A "diff" step (using 'google-local') is added for each original step containing a Config field, + // unless the step has ExpectError set. + var expectedSteps = []resource.TestStep{ + { + Config: `resource "google_new_resource" "original" { + provider = google-beta + }`, + }, + { + Config: `resource "google_new_resource" "original" { + provider = google-local + }`, + ExpectNonEmptyPlan: false, + PlanOnly: true, + }, + { + Config: `resource "google_new_resource" "original" { + provider = google-beta + }`, + }, + { + Config: `resource "google_new_resource" "original" { + provider = google-local + }`, + ExpectNonEmptyPlan: false, + PlanOnly: true, + }, + { + ResourceName: "google_pubsub_subscription.example", // No config, so no diff step added + }, + { + Config: `resource "google_example_widget" "foo" { + name = "dummy" + provider = google-beta + }`, + Check: resource.ComposeTestCheckFunc( + func(*terraform.State) error { return nil }, + ), + }, + { + Config: `resource "google_example_widget" "foo" { + name = "dummy" + provider = google-local + }`, + Check: resource.ComposeTestCheckFunc( + func(*terraform.State) error { return nil }, + ), + ExpectNonEmptyPlan: false, + PlanOnly: true, + }, + { + Config: `provider = "google-local" + // ... configuration that is expected to cause an error + `, // expect error means we don't do a second step + }, + } + + if len(dummyCase.Steps) != len(expectedSteps) { + t.Fatalf("Expected %d steps, but got %d", len(expectedSteps), len(dummyCase.Steps)) + } + + for i, step := range dummyCase.Steps { + if step.Config != expectedSteps[i].Config { + t.Fatalf("Expected step %d config to be:\n%q\nbut got:\n%q", i, expectedSteps[i].Config, step.Config) + } + if step.PlanOnly != expectedSteps[i].PlanOnly { + t.Fatalf("Expected step %d to have PlanOnly set to %v, but got %v", i, expectedSteps[i].PlanOnly, step.PlanOnly) + } + } + + defer os.Remove(temp_file.Name()) +} From d322d9567dc1d2e8a910fee2fa2e2fbd2fd71740 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Fri, 1 Aug 2025 08:46:12 -0700 Subject: [PATCH 661/884] Noted need for ExternalProviders when creating test projects (#14149) --- docs/content/test/test.md | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/docs/content/test/test.md b/docs/content/test/test.md index 6723ae39432b..9fa7856a3c5f 100644 --- a/docs/content/test/test.md +++ b/docs/content/test/test.md @@ -451,8 +451,11 @@ If [bootstrapping]({{< ref "#bootstrapping" >}}) doesn't work or isn't an option ```go import ( "testing" - "github.com/hashicorp/terraform-provider-google-beta/google-beta/acctest" - "github.com/hashicorp/terraform-provider-google-beta/google-beta/envvar" + + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google-beta/google-beta/acctest" + "github.com/hashicorp/terraform-provider-google-beta/google-beta/envvar" ) func TestAccProductResourceName_update(t *testing.T) { t.Parallel() @@ -464,6 +467,10 @@ func TestAccProductResourceName_update(t *testing.T) { } acctest.VcrTest(t, resource.TestCase{ // ... + // Add ExternalProviders so you can use `time_sleep` + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, Steps: []resource.TestStep{ { testAccProductResourceName_update1(context), From a95b1884a83422a5b4f312e7217dd13c8c2d827b Mon Sep 17 00:00:00 2001 From: Ryan Oaks Date: Fri, 1 Aug 2025 13:05:25 -0400 Subject: [PATCH 662/884] Add roaks3 vacation (#14714) --- .ci/magician/github/membership_data.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.ci/magician/github/membership_data.go b/.ci/magician/github/membership_data.go index 2e502f1eec40..efddefdb5b98 100644 --- a/.ci/magician/github/membership_data.go +++ b/.ci/magician/github/membership_data.go @@ -102,8 +102,8 @@ var ( "roaks3": { vacations: []Vacation{ { - startDate: newDate(2025, 7, 1), - endDate: newDate(2025, 7, 14), + startDate: newDate(2025, 8, 1), + endDate: newDate(2025, 8, 11), }, }, }, From a434c98db3c1190ac4f9fac65d7780de52570ddc Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Fri, 1 Aug 2025 11:06:42 -0700 Subject: [PATCH 663/884] Mark JSON fields as such in meta.yaml files (#14703) --- mmv1/api/type.go | 10 ++++++++++ mmv1/templates/terraform/metadata.yaml.tmpl | 3 +++ .../bigquery/resource_bigquery_table_meta.yaml.tmpl | 2 ++ .../bigtable/resource_bigtable_table_meta.yaml | 1 + 4 files changed, 16 insertions(+) diff --git a/mmv1/api/type.go b/mmv1/api/type.go index bd0f4c7fa0d6..3156d021b149 100644 --- a/mmv1/api/type.go +++ b/mmv1/api/type.go @@ -1274,3 +1274,13 @@ func (t Type) GetPropertySchemaPathList(propertyList []string) []string { } return list } + +func (t Type) IsJsonField() bool { + if t.CustomFlatten == "templates/terraform/custom_flatten/json_schema.tmpl" { + return true + } + if t.CustomExpand == "templates/terraform/custom_expand/json_schema.tmpl" || t.CustomExpand == "templates/terraform/custom_expand/json_value.tmpl" { + return true + } + return false +} diff --git a/mmv1/templates/terraform/metadata.yaml.tmpl b/mmv1/templates/terraform/metadata.yaml.tmpl index 77476e08e626..426424016aaa 100644 --- a/mmv1/templates/terraform/metadata.yaml.tmpl +++ b/mmv1/templates/terraform/metadata.yaml.tmpl @@ -19,6 +19,9 @@ fields: {{- if and (ne $p.MetadataLineage $p.MetadataApiLineage) (not $p.ProviderOnly) }} api_field: '{{ $p.MetadataApiLineage }}' {{- end }} + {{- if $p.IsJsonField }} + json: true + {{- end }} {{- if $p.ProviderOnly }} provider_only: true {{- end }} diff --git a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_meta.yaml.tmpl b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_meta.yaml.tmpl index 720a051337e3..97c634f5784c 100644 --- a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_meta.yaml.tmpl +++ b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_meta.yaml.tmpl @@ -82,6 +82,7 @@ fields: - field: 'external_data_configuration.parquet_options.enum_as_string' - field: 'external_data_configuration.reference_file_schema_uri' - field: 'external_data_configuration.schema' + json: true - field: 'external_data_configuration.source_format' - field: 'external_data_configuration.source_uris' - field: 'friendly_name' @@ -104,6 +105,7 @@ fields: - field: 'require_partition_filter' - field: 'resource_tags' - field: 'schema' + json: true {{- if ne $.TargetVersionName "ga" }} - field: 'schema_foreign_type_info.type_system' {{- end }} diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_meta.yaml b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_meta.yaml index 0cd79f2f2123..fb0dfb987c44 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_meta.yaml +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_meta.yaml @@ -14,4 +14,5 @@ fields: - field: 'name' - field: 'project' - field: 'row_key_schema' + json: true - field: 'split_keys' From 9d840706da9b60cad5c7267a450808e56bfed9f3 Mon Sep 17 00:00:00 2001 From: Nathaniel Ford Date: Fri, 1 Aug 2025 12:02:32 -0700 Subject: [PATCH 664/884] Implement boot disk config for Hyperdisk provisioned iops and throughput for container node_config resources. (#14600) Co-authored-by: Nathaniel Ford --- .../services/container/node_config.go.tmpl | 132 +++++- .../resource_container_node_pool_test.go.tmpl | 402 ++++++++++++++++++ .../docs/r/container_cluster.html.markdown | 18 +- 3 files changed, 546 insertions(+), 6 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/node_config.go.tmpl b/mmv1/third_party/terraform/services/container/node_config.go.tmpl index 3a2f2a03b429..9c4763e7f5a4 100644 --- a/mmv1/third_party/terraform/services/container/node_config.go.tmpl +++ b/mmv1/third_party/terraform/services/container/node_config.go.tmpl @@ -146,6 +146,8 @@ func schemaNodeConfig() *schema.Schema { Description: `Type of the disk attached to each node. Such as pd-standard, pd-balanced or pd-ssd`, }, + "boot_disk": schemaBootDiskConfig(), + "guest_accelerator": { Type: schema.TypeList, Optional: true, @@ -899,6 +901,46 @@ func schemaNodeConfig() *schema.Schema { } } + +func schemaBootDiskConfig() *schema.Schema { + return &schema.Schema{ + Type: schema.TypeList, + Optional: true, + Computed: true, + MaxItems: 1, + Description: `Boot disk configuration for node pools nodes.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "disk_type": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `Type of the disk attached to each node. Such as pd-standard, pd-balanced or pd-ssd`, + }, + "size_gb": { + Type: schema.TypeInt, + Optional: true, + Computed: true, + ValidateFunc: validation.IntAtLeast(10), + Description: `Size of the disk attached to each node, specified in GB. The smallest allowed disk size is 10GB.`, + }, + "provisioned_iops": { + Type: schema.TypeInt, + Optional: true, + Computed: true, + Description: `Configured IOPs provisioning. Only valid with disk type hyperdisk-balanced.`, + }, + "provisioned_throughput": { + Type: schema.TypeInt, + Optional: true, + Computed: true, + Description: `Configured throughput provisioning. Only valid with disk type hyperdisk-balanced.`, + }, + }, + }, + } +} + // Separate since this currently only supports a single value -- a subset of // the overall NodeKubeletConfig func schemaNodePoolAutoConfigNodeKubeletConfig() *schema.Schema { @@ -1034,6 +1076,10 @@ func expandNodeConfig(v interface{}) *container.NodeConfig { nc.DiskType = v.(string) } + if v, ok := nodeConfig["boot_disk"]; ok { + nc.BootDisk = expandBootDiskConfig(v) + } + if v, ok := nodeConfig["local_ssd_count"]; ok { nc.LocalSsdCount = int64(v.(int)) } @@ -1307,6 +1353,36 @@ func expandNodeConfig(v interface{}) *container.NodeConfig { return nc } +func expandBootDiskConfig(v interface{}) *container.BootDisk { + bd := &container.BootDisk{} + if v == nil { + return nil + } + ls := v.([]interface{}) + if len(ls) == 0 { + return nil + } + cfg := ls[0].(map[string]interface{}) + + if v, ok := cfg["disk_type"]; ok { + bd.DiskType = v.(string) + } + + if v, ok := cfg["size_gb"]; ok { + bd.SizeGb = int64(v.(int)) + } + + if v, ok := cfg["provisioned_iops"]; ok { + bd.ProvisionedIops = int64(v.(int)) + } + + if v, ok := cfg["provisioned_throughput"]; ok { + bd.ProvisionedThroughput = int64(v.(int)) + } + + return bd +} + func expandResourceManagerTags(v interface{}) *container.ResourceManagerTags { if v == nil { return nil @@ -1688,11 +1764,12 @@ func flattenNodeConfig(c *container.NodeConfig, v interface{}) []map[string]inte "containerd_config": flattenContainerdConfig(c.ContainerdConfig), "disk_size_gb": c.DiskSizeGb, "disk_type": c.DiskType, + "boot_disk": flattenBootDiskConfig(c.BootDisk), "guest_accelerator": flattenContainerGuestAccelerators(c.Accelerators), "local_ssd_count": c.LocalSsdCount, "logging_variant": flattenLoggingVariant(c.LoggingConfig), {{- if ne $.TargetVersionName "ga" }} - "ephemeral_storage_config": flattenEphemeralStorageConfig(c.EphemeralStorageConfig), + "ephemeral_storage_config": flattenEphemeralStorageConfig(c.EphemeralStorageConfig), {{- end }} "local_nvme_ssd_block_config": flattenLocalNvmeSsdBlockConfig(c.LocalNvmeSsdBlockConfig), "ephemeral_storage_local_ssd_config": flattenEphemeralStorageLocalSsdConfig(c.EphemeralStorageLocalSsdConfig), @@ -1741,6 +1818,23 @@ func flattenNodeConfig(c *container.NodeConfig, v interface{}) []map[string]inte return config } +func flattenBootDiskConfig(c *container.BootDisk) []map[string]interface{} { + config := []map[string]interface{}{} + + if c == nil { + return config + } + + config = append(config, map[string]interface{}{ + "disk_type": c.DiskType, + "size_gb": c.SizeGb, + "provisioned_iops": c.ProvisionedIops, + "provisioned_throughput": c.ProvisionedThroughput, + }) + + return config +} + func flattenResourceManagerTags(c *container.ResourceManagerTags) map[string]interface{} { if c == nil { return nil @@ -2332,7 +2426,9 @@ func nodePoolNodeConfigUpdate(d *schema.ResourceData, config *transport_tpg.Conf if d.HasChange("node_config.0.disk_size_gb") || d.HasChange("node_config.0.disk_type") || d.HasChange("node_config.0.machine_type") || - d.HasChange("node_config.0.storage_pools") { + d.HasChange("node_config.0.storage_pools") || + d.HasChange("node_config.0.boot_disk") { + req := &container.UpdateNodePoolRequest{ Name: name, DiskSizeGb: int64(d.Get("node_config.0.disk_size_gb").(int)), @@ -2350,6 +2446,34 @@ func nodePoolNodeConfigUpdate(d *schema.ResourceData, config *transport_tpg.Conf req.StoragePools = storagePools } + if v, ok := d.GetOk("node_config.0.boot_disk"); ok { + bd := expandBootDiskConfig(v) + req.BootDisk = bd + + // The following checks are to ensure that the migrating fields are handled properly. + // Migrating fields are disk_type -> boot_disk.disk_type and disk_size_gb -> boot_disk.size_gb + // If the legacy (top level) disk_type field is not changing, nil it out to allow the API to fill it in. + legacyDiskTypeOld, legacyDiskTypeNew := d.GetChange("node_config.0.disk_type") + if legacyDiskTypeOld == legacyDiskTypeNew { + req.DiskType = "" + } + // If the new boot disk configuration disk_filed is not changing, nil it out to allow the API to fill it in. + bootDiskTypeOld, bootDiskTypeNew := d.GetChange("node_config.0.boot_disk.0.disk_type") + if bootDiskTypeOld == bootDiskTypeNew { + req.BootDisk.DiskType = "" + } + // If the legacy (top level) disk_size_gb field is not changing, nil it out to allow the API to fill it in. + legacyDiskSizeGbOld, legacyDiskSizeGbNew := d.GetChange("node_config.0.disk_size_gb") + if legacyDiskSizeGbOld == legacyDiskSizeGbNew { + req.DiskSizeGb = 0 + } + // if the new boot disk configuration size_gb field is not changing, nil it out to allow the API to fill it in. + bootDiskSizeGbOld, bootDiskSizeGbNew := d.GetChange("node_config.0.boot_disk.0.size_gb") + if bootDiskSizeGbOld == bootDiskSizeGbNew { + req.BootDisk.SizeGb = 0 + } + } + updateF := func() error { clusterNodePoolsUpdateCall := config.NewContainerClient(userAgent).Projects.Locations.Clusters.NodePools.Update(nodePoolInfo.fullyQualifiedName(name), req) if config.UserProjectOverride { @@ -2364,14 +2488,14 @@ func nodePoolNodeConfigUpdate(d *schema.ResourceData, config *transport_tpg.Conf return ContainerOperationWait(config, op, nodePoolInfo.project, nodePoolInfo.location, - "updating GKE node pool disk_size_gb/disk_type/machine_type/storage_pools", userAgent, + "updating GKE node pool disk_size_gb/disk_type/machine_type/storage_pools/boot_disk", userAgent, timeout) } if err := retryWhileIncompatibleOperation(timeout, npLockKey, updateF); err != nil { return err } - log.Printf("[INFO] Updated disk disk_size_gb/disk_type/machine_type/storage_pools for Node Pool %s", d.Id()) + log.Printf("[INFO] Updated disk disk_size_gb/disk_type/machine_type/storage_pools/boot_disk for Node Pool %s", d.Id()) } if d.HasChange(prefix + "node_config.0.taint") { diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl index 10b731caf5d9..3cc91e2d917b 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl @@ -317,6 +317,408 @@ func TestAccContainerNodePool_withNodeConfig(t *testing.T) { }) } + +func TestAccContainerNodePool_withClusterBootDisk(t *testing.T) { + t.Parallel() + + cluster := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) + networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") + subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckContainerNodePoolDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccContainerNodePool_withClusterBootDisk(cluster, networkName, subnetworkName), + }, + { + ResourceName: "google_container_cluster.cluster", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + { + Config: testAccContainerNodePool_withClusterBootDiskUpdate(cluster, networkName, subnetworkName), + }, + { + ResourceName: "google_container_cluster.cluster", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + }, + }) +} + +func testAccContainerNodePool_withClusterBootDisk(cluster, networkName, subnetworkName string) string { + return fmt.Sprintf(` +provider "google" { + alias = "user-project-override" + user_project_override = true +} +resource "google_container_cluster" "cluster" { + provider = google.user-project-override + name = "%s" + location = "us-central1-a" + initial_node_count = 3 + deletion_protection = false + network = "%s" + subnetwork = "%s" + + node_config { + machine_type = "c3-standard-4" + boot_disk { + size_gb = 100 + disk_type = "hyperdisk-balanced" + provisioned_iops = 3456 + provisioned_throughput = 234 + } + } +} +`, cluster, networkName, subnetworkName) +} + +func testAccContainerNodePool_withClusterBootDiskUpdate(cluster, networkName, subnetworkName string) string { + return fmt.Sprintf(` +provider "google" { + alias = "user-project-override" + user_project_override = true +} +resource "google_container_cluster" "cluster" { + provider = google.user-project-override + name = "%s" + location = "us-central1-a" + initial_node_count = 3 + deletion_protection = false + network = "%s" + subnetwork = "%s" + + node_config { + machine_type = "c3-standard-4" + boot_disk { + size_gb = 170 + disk_type = "hyperdisk-balanced" + provisioned_iops = 4567 + provisioned_throughput = 345 + } + } +} +`, cluster, networkName, subnetworkName) +} + + + +func TestAccContainerNodePool_withNodeConfigWithBootDiskConfig(t *testing.T) { + t.Parallel() + + cluster := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) + nodePool := fmt.Sprintf("tf-test-nodepool-%s", acctest.RandString(t, 10)) + networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") + subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckContainerNodePoolDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccContainerNodePool_withBootDisk(cluster, nodePool, networkName, subnetworkName), + }, + { + ResourceName: "google_container_node_pool.np", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccContainerNodePool_withBootDiskUpdate(cluster, nodePool, networkName, subnetworkName), + }, + { + ResourceName: "google_container_node_pool.np", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerNodePool_withNodeConfigWithBootDiskConfigChangeType(t *testing.T) { + t.Parallel() + + cluster := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) + nodePool := fmt.Sprintf("tf-test-nodepool-%s", acctest.RandString(t, 10)) + networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") + subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckContainerNodePoolDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccContainerNodePool_withBootDisk(cluster, nodePool, networkName, subnetworkName), + }, + { + ResourceName: "google_container_node_pool.np", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccContainerNodePool_withBootDiskTypeChangeUpdate(cluster, nodePool, networkName, subnetworkName), + }, + { + ResourceName: "google_container_node_pool.np", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerNodePool_withNodeConfigWithBootDiskConfigChangeTypeLegacy(t *testing.T) { + t.Parallel() + + cluster := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) + nodePool := fmt.Sprintf("tf-test-nodepool-%s", acctest.RandString(t, 10)) + networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") + subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckContainerNodePoolDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccContainerNodePool_withBootDisk(cluster, nodePool, networkName, subnetworkName), + }, + { + ResourceName: "google_container_node_pool.np", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccContainerNodePool_withLegacyBootDiskTypeSize(cluster, nodePool, networkName, subnetworkName), + }, + { + ResourceName: "google_container_node_pool.np", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerNodePool_withLegacyNodeConfigAndBootDiskUpdate(t *testing.T) { + // Ensure that the legacy configuration (top level of node_config) can be updated to the new configuration (in boot_disk). + t.Parallel() + + cluster := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) + nodePool := fmt.Sprintf("tf-test-nodepool-%s", acctest.RandString(t, 10)) + networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") + subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckContainerNodePoolDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccContainerNodePool_withLegacyBootDiskTypeSize(cluster, nodePool, networkName, subnetworkName), + }, + { + ResourceName: "google_container_node_pool.np", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccContainerNodePool_withLegacyBootDiskTypeSizeUpdateNew(cluster, nodePool, networkName, subnetworkName), + }, + { + ResourceName: "google_container_node_pool.np", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccContainerNodePool_withBootDisk(cluster, np, networkName, subnetworkName string) string { + return fmt.Sprintf(` +provider "google" { + alias = "user-project-override" + user_project_override = true +} +resource "google_container_cluster" "cluster" { + provider = google.user-project-override + name = "%s" + location = "us-central1-a" + initial_node_count = 3 + deletion_protection = false + network = "%s" + subnetwork = "%s" +} + +resource "google_container_node_pool" "np" { + provider = google.user-project-override + name = "%s" + location = "us-central1-a" + cluster = google_container_cluster.cluster.name + initial_node_count = 3 + + node_config { + machine_type = "c3-standard-4" + boot_disk { + size_gb = 100 + disk_type = "hyperdisk-balanced" + provisioned_iops = 3456 + provisioned_throughput = 234 + } + } +} +`, cluster, networkName, subnetworkName, np) +} + +func testAccContainerNodePool_withBootDiskUpdate(cluster, np, networkName, subnetworkName string) string { + return fmt.Sprintf(` +provider "google" { + alias = "user-project-override" + user_project_override = true +} +resource "google_container_cluster" "cluster" { + provider = google.user-project-override + name = "%s" + location = "us-central1-a" + initial_node_count = 3 + deletion_protection = false + network = "%s" + subnetwork = "%s" +} + +resource "google_container_node_pool" "np" { + provider = google.user-project-override + name = "%s" + location = "us-central1-a" + cluster = google_container_cluster.cluster.name + initial_node_count = 3 + + node_config { + machine_type = "c3-standard-4" + boot_disk { + size_gb = 200 + disk_type = "hyperdisk-balanced" + provisioned_iops = 4567 + provisioned_throughput = 345 + } + } +} +`, cluster, networkName, subnetworkName, np) +} + +// Checks to ensure that boot disk type can be moved off of hyperdisk-balanced. +func testAccContainerNodePool_withBootDiskTypeChangeUpdate(cluster, np, networkName, subnetworkName string) string { + return fmt.Sprintf(` +provider "google" { + alias = "user-project-override" + user_project_override = true +} +resource "google_container_cluster" "cluster" { + provider = google.user-project-override + name = "%s" + location = "us-central1-a" + initial_node_count = 3 + deletion_protection = false + network = "%s" + subnetwork = "%s" +} + +resource "google_container_node_pool" "np" { + provider = google.user-project-override + name = "%s" + location = "us-central1-a" + cluster = google_container_cluster.cluster.name + initial_node_count = 3 + + node_config { + machine_type = "c3-standard-4" + boot_disk { + size_gb = 201 + disk_type = "pd-balanced" + } + } +} +`, cluster, networkName, subnetworkName, np) +} + +// Checks to ensure legacy boot disk type and size can be used. +func testAccContainerNodePool_withLegacyBootDiskTypeSize(cluster, np, networkName, subnetworkName string) string { + return fmt.Sprintf(` +provider "google" { + alias = "user-project-override" + user_project_override = true +} +resource "google_container_cluster" "cluster" { + provider = google.user-project-override + name = "%s" + location = "us-central1-a" + initial_node_count = 3 + deletion_protection = false + network = "%s" + subnetwork = "%s" +} + +resource "google_container_node_pool" "np" { + provider = google.user-project-override + name = "%s" + location = "us-central1-a" + cluster = google_container_cluster.cluster.name + initial_node_count = 3 + + node_config { + machine_type = "c3-standard-4" + disk_type = "pd-balanced" + disk_size_gb = 202 + } +} +`, cluster, networkName, subnetworkName, np) +} + +// Used to check that legacy -> new updates work. +func testAccContainerNodePool_withLegacyBootDiskTypeSizeUpdateNew(cluster, np, networkName, subnetworkName string) string { + return fmt.Sprintf(` +provider "google" { + alias = "user-project-override" + user_project_override = true +} +resource "google_container_cluster" "cluster" { + provider = google.user-project-override + name = "%s" + location = "us-central1-a" + initial_node_count = 3 + deletion_protection = false + network = "%s" + subnetwork = "%s" +} + +resource "google_container_node_pool" "np" { + provider = google.user-project-override + name = "%s" + location = "us-central1-a" + cluster = google_container_cluster.cluster.name + initial_node_count = 3 + + node_config { + machine_type = "c3-standard-4" + boot_disk { + size_gb = 203 + disk_type = "hyperdisk-balanced" + } + } +} +`, cluster, networkName, subnetworkName, np) +} + + func TestAccContainerNodePool_withTaintsUpdate(t *testing.T) { t.Parallel() diff --git a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown index 0d15fbcf450d..5d08d8233656 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown @@ -879,13 +879,16 @@ The `master_authorized_networks_config.cidr_blocks` block supports: The `node_config` block supports: +* `boot_disk` - (Optional) Configuration of the node pool boot disk. Structure is [documented below](#nested_boot_disk) + * `confidential_nodes` - (Optional) Configuration for Confidential Nodes feature. Structure is [documented below](#nested_confidential_nodes). * `disk_size_gb` - (Optional) Size of the disk attached to each node, specified - in GB. The smallest allowed disk size is 10GB. Defaults to 100GB. + in GB. The smallest allowed disk size is 10GB. Defaults to 100GB. This is being migrated to `boot_disk.size_gb`, and must match if specified in both places. + Prefer configuring `boot_disk`. * `disk_type` - (Optional) Type of the disk attached to each node - (e.g. 'pd-standard', 'pd-balanced' or 'pd-ssd'). If unspecified, the default disk type is 'pd-balanced' + (e.g. 'pd-standard', 'pd-balanced' or 'pd-ssd'). If unspecified, the default disk type is 'pd-balanced' This is being migrated to `boot_disk.disk_type`, and must match if specified in both places. Prefer configuring `boot_disk`. * `enable_confidential_storage` - (Optional) Enabling Confidential Storage will create boot disk with confidential mode. It is disabled by default. @@ -1074,6 +1077,17 @@ sole_tenant_config { * `advanced_machine_features` - (Optional) Specifies options for controlling advanced machine features. Structure is [documented below](#nested_advanced_machine_features). +The `boot_disk` block supports: + +* `size_gb` - (Optional) Size of the disk attached to each node, specified + in GB. The smallest allowed disk size is 10GB. Defaults to 100GB. This is being migrated from `node_config.disk_size_gb`, and must match if specified in both places. Prefer using this field. + +* `disk_type` - (Optional) Type of the disk attached to each node + (e.g. 'pd-standard', 'pd-balanced', 'pd-ssd', 'hyperdisk-balanced'). If unspecified, the default disk type is 'pd-balanced' This is being migrated from `node_config.disk_type`, and must match if specified in both places. Prefer using this field. + +* `provisioned_iops` - (Optional) Configure disk IOPs. This is only valid if the `disk_type` is 'hyperdisk-balanced'. See [performance limit documention](https://cloud.google.com/compute/docs/disks/hyperdisk-perf-limits) for more information about valid values. + +* `provisioned_throughput` - (Optional) Configure disk throughput. This is only valid if the `disk_type` is 'hyperdisk-balanced'. See [performance limit documention](https://cloud.google.com/compute/docs/disks/hyperdisk-perf-limits) for more information about valid values. The `confidential_nodes` block supports: From 39554acbae380c69fa64c03fb9c30282525409c1 Mon Sep 17 00:00:00 2001 From: Sam Levenick Date: Fri, 1 Aug 2025 15:46:18 -0400 Subject: [PATCH 665/884] Add error checking to calls of nodePoolNodeConfigUpdate (#14706) --- .../services/container/resource_container_cluster.go.tmpl | 4 +++- .../services/container/resource_container_node_pool.go.tmpl | 6 ++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl index 6a708088b5b6..0fc730f721d0 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl @@ -4418,7 +4418,9 @@ func resourceContainerClusterUpdate(d *schema.ResourceData, meta interface{}) er return err } - nodePoolNodeConfigUpdate(d, config, nodePoolInfo, "", defaultPool, d.Timeout(schema.TimeoutUpdate)) + if err = nodePoolNodeConfigUpdate(d, config, nodePoolInfo, "", defaultPool, d.Timeout(schema.TimeoutUpdate)); err != nil { + return err + } } if d.HasChange("notification_config") { diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl index c8694d6d241a..2531ce078380 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl @@ -1505,13 +1505,15 @@ func nodePoolUpdate(d *schema.ResourceData, meta interface{}, nodePoolInfo *Node } if err := retryWhileIncompatibleOperation(timeout, npLockKey, updateF); err != nil { - return err + return err } log.Printf("[INFO] Updated autoscaling in Node Pool %s", d.Id()) } if d.HasChange(prefix + "node_config") { - nodePoolNodeConfigUpdate(d, config, nodePoolInfo, prefix, name, timeout) + if err := nodePoolNodeConfigUpdate(d, config, nodePoolInfo, prefix, name, timeout); err != nil { + return err + } } if d.HasChange(prefix + "node_count") { From f72b525ec7a890efd412e156069950f1b38fcc72 Mon Sep 17 00:00:00 2001 From: Chun Wang Date: Fri, 1 Aug 2025 13:11:23 -0700 Subject: [PATCH 666/884] Support singProcessOOMKill in node_kubelet_config (#14704) --- .../services/container/node_config.go.tmpl | 9 +++++++++ .../resource_container_node_pool_test.go.tmpl | 13 ++++++++----- .../website/docs/r/container_cluster.html.markdown | 2 ++ 3 files changed, 19 insertions(+), 5 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/node_config.go.tmpl b/mmv1/third_party/terraform/services/container/node_config.go.tmpl index 9c4763e7f5a4..29ffe7e5dd81 100644 --- a/mmv1/third_party/terraform/services/container/node_config.go.tmpl +++ b/mmv1/third_party/terraform/services/container/node_config.go.tmpl @@ -666,6 +666,11 @@ func schemaNodeConfig() *schema.Schema { Optional: true, Description: `Defines a comma-separated allowlist of unsafe sysctls or sysctl patterns which can be set on the Pods.`, Elem: &schema.Schema{Type: schema.TypeString}, + }, + "single_process_oom_kill": { + Type: schema.TypeBool, + Optional: true, + Description: `Defines whether to enable single process OOM killer.`, }, }, }, @@ -1477,6 +1482,9 @@ func expandKubeletConfig(v interface{}) *container.NodeKubeletConfig { for i, s := range sysctls { kConfig.AllowedUnsafeSysctls[i] = s.(string) } + } + if singleProcessOomKill, ok := cfg["single_process_oom_kill"]; ok { + kConfig.SingleProcessOomKill = singleProcessOomKill.(bool) } return kConfig } @@ -2131,6 +2139,7 @@ func flattenKubeletConfig(c *container.NodeKubeletConfig) []map[string]interface "image_minimum_gc_age": c.ImageMinimumGcAge, "image_maximum_gc_age": c.ImageMaximumGcAge, "allowed_unsafe_sysctls": c.AllowedUnsafeSysctls, + "single_process_oom_kill": c.SingleProcessOomKill, }) } return result diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl index 3cc91e2d917b..dcdd1d4c7085 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl @@ -947,7 +947,7 @@ func TestAccContainerNodePool_withKubeletConfig(t *testing.T) { CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccContainerNodePool_withKubeletConfig(cluster, np, "static", "100ms", networkName, subnetworkName, "TRUE", "100Mi", "1m", "10m", true, 2048, 10, 10, 85), + Config: testAccContainerNodePool_withKubeletConfig(cluster, np, "static", "100ms", networkName, subnetworkName, "TRUE", "100Mi", "1m", "10m", true, true, 2048, 10, 10, 85), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ acctest.ExpectNoDelete(), @@ -956,6 +956,8 @@ func TestAccContainerNodePool_withKubeletConfig(t *testing.T) { Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("google_container_node_pool.with_kubelet_config", "node_config.0.kubelet_config.0.cpu_cfs_quota", "true"), + resource.TestCheckResourceAttr("google_container_node_pool.with_kubelet_config", + "node_config.0.kubelet_config.0.single_process_oom_kill", "true"), resource.TestCheckResourceAttr("google_container_node_pool.with_kubelet_config", "node_config.0.kubelet_config.0.insecure_kubelet_readonly_port_enabled", "TRUE"), resource.TestCheckResourceAttr("google_container_node_pool.with_kubelet_config", @@ -982,7 +984,7 @@ func TestAccContainerNodePool_withKubeletConfig(t *testing.T) { ImportStateVerify: true, }, { - Config: testAccContainerNodePool_withKubeletConfig(cluster, np, "", "", networkName, subnetworkName, "FALSE", "200Mi", "30s", "", false, 1024, 5, 50, 80), + Config: testAccContainerNodePool_withKubeletConfig(cluster, np, "", "", networkName, subnetworkName, "FALSE", "200Mi", "30s", "", false, true, 1024, 5, 50, 80), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ acctest.ExpectNoDelete(), @@ -1020,7 +1022,7 @@ func TestAccContainerNodePool_withInvalidKubeletCpuManagerPolicy(t *testing.T) { CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccContainerNodePool_withKubeletConfig(cluster, np, "dontexist", "100us", networkName, subnetworkName,"TRUE", "", "", "", false, 1024, 2, 70, 75), + Config: testAccContainerNodePool_withKubeletConfig(cluster, np, "dontexist", "100us", networkName, subnetworkName,"TRUE", "", "", "", false, true, 1024, 2, 70, 75), ExpectError: regexp.MustCompile(`.*to be one of \["?static"? "?none"? "?"?\].*`), }, }, @@ -3853,7 +3855,7 @@ resource "google_container_node_pool" "with_sandbox_config" { } {{- end }} -func testAccContainerNodePool_withKubeletConfig(cluster, np, policy, period, networkName, subnetworkName, insecureKubeletReadonlyPortEnabled, containerLogMaxSize, imageMinimumGcAge, imageMaximumGcAge string, quota bool, podPidsLimit, containerLogMaxFiles, imageGcLowThresholdPercent, imageGcHighThresholdPercent int) string { +func testAccContainerNodePool_withKubeletConfig(cluster, np, policy, period, networkName, subnetworkName, insecureKubeletReadonlyPortEnabled, containerLogMaxSize, imageMinimumGcAge, imageMaximumGcAge string, quota, singleProcessOomKill bool, podPidsLimit, containerLogMaxFiles, imageGcLowThresholdPercent, imageGcHighThresholdPercent int) string { return fmt.Sprintf(` data "google_container_engine_versions" "central1a" { location = "us-central1-a" @@ -3891,6 +3893,7 @@ resource "google_container_node_pool" "with_kubelet_config" { image_minimum_gc_age = %q image_maximum_gc_age = %q allowed_unsafe_sysctls = ["kernel.shm*", "kernel.msg*", "kernel.sem", "fs.mqueue.*", "net.*"] + single_process_oom_kill = %v } oauth_scopes = [ "https://www.googleapis.com/auth/logging.write", @@ -3899,7 +3902,7 @@ resource "google_container_node_pool" "with_kubelet_config" { logging_variant = "DEFAULT" } } -`, cluster, networkName, subnetworkName, np, policy, quota, period, insecureKubeletReadonlyPortEnabled, podPidsLimit, containerLogMaxSize, containerLogMaxFiles, imageGcLowThresholdPercent, imageGcHighThresholdPercent, imageMinimumGcAge, imageMaximumGcAge) +`, cluster, networkName, subnetworkName, np, policy, quota, period, insecureKubeletReadonlyPortEnabled, podPidsLimit, containerLogMaxSize, containerLogMaxFiles, imageGcLowThresholdPercent, imageGcHighThresholdPercent, imageMinimumGcAge, imageMaximumGcAge, singleProcessOomKill) } func testAccContainerNodePool_withLinuxNodeConfig(cluster, np, tcpMem, networkName, subnetworkName string) string { diff --git a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown index 5d08d8233656..20490ff2b14f 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown @@ -1483,6 +1483,8 @@ such as `"300ms"`. Valid time units are "ns", "us" (or "µs"), "ms", "s", "m", * `allowed_unsafe_sysctls` - (Optional) Defines a comma-separated allowlist of unsafe sysctls or sysctl patterns which can be set on the Pods. The allowed sysctl groups are `kernel.shm*`, `kernel.msg*`, `kernel.sem`, `fs.mqueue.*`, and `net.*`. +* `single_process_oom_kill` - (Optional) Defines whether to enable single process OOM killer. If true, the processes in the container will be OOM killed individually instead of as a group. + The `linux_node_config` block supports: * `sysctls` - (Optional) The Linux kernel parameters to be applied to the nodes From 9bc5f1474f0aeeeaecefddea86d57e229946c437 Mon Sep 17 00:00:00 2001 From: NA2047 <12290725+NA2047@users.noreply.github.com> Date: Fri, 1 Aug 2025 13:35:31 -0700 Subject: [PATCH 667/884] Add allow_fewer_zones_deployment to Memorystore (#14677) --- mmv1/products/memorystore/Instance.yaml | 10 ++++- .../memorystore_instance_full.tf.tmpl | 45 ++++++++++--------- 2 files changed, 32 insertions(+), 23 deletions(-) diff --git a/mmv1/products/memorystore/Instance.yaml b/mmv1/products/memorystore/Instance.yaml index 627a5c9dfaf3..4689f7d3e36f 100644 --- a/mmv1/products/memorystore/Instance.yaml +++ b/mmv1/products/memorystore/Instance.yaml @@ -65,7 +65,7 @@ examples: subnet_name: 'my-subnet' network_name: 'my-network' prevent_destroy: 'true' - kms_key_name: "my-key" + kms_key_name: 'my-key' test_vars_overrides: 'prevent_destroy': 'false' 'kms_key_name': 'acctest.BootstrapKMSKeyInLocation(t, "us-central1").CryptoKey.Name' @@ -517,6 +517,14 @@ properties: enum_values: - 'MULTI_ZONE' - 'SINGLE_ZONE' + - name: 'allowFewerZonesDeployment' + type: Boolean + description: | + Allows customers to specify if they are okay with deploying a multi-zone + instance in less than 3 zones. Once set, if there is a zonal outage during + the instance creation, the instance will only be deployed in 2 zones, and + stay within the 2 zones for its lifecycle. + immutable: true - name: 'deletionProtectionEnabled' type: Boolean description: "Optional. If set to true deletion of the instance will fail. " diff --git a/mmv1/templates/terraform/examples/memorystore_instance_full.tf.tmpl b/mmv1/templates/terraform/examples/memorystore_instance_full.tf.tmpl index d004377f3d58..30d2cafe722b 100644 --- a/mmv1/templates/terraform/examples/memorystore_instance_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/memorystore_instance_full.tf.tmpl @@ -1,39 +1,40 @@ resource "google_memorystore_instance" "{{$.PrimaryResourceId}}" { - instance_id = "{{index $.Vars "instance_name"}}" - shard_count = 1 + instance_id = "{{index $.Vars "instance_name"}}" + shard_count = 1 desired_auto_created_endpoints { - network = google_compute_network.producer_net.id - project_id = data.google_project.project.project_id - } - location = "us-central1" - replica_count = 1 - node_type = "SHARED_CORE_NANO" - transit_encryption_mode = "TRANSIT_ENCRYPTION_DISABLED" - authorization_mode = "AUTH_DISABLED" - kms_key = "{{index $.Vars "kms_key_name"}}" - engine_configs = { - maxmemory-policy = "volatile-ttl" + network = google_compute_network.producer_net.id + project_id = data.google_project.project.project_id + } + location = "us-central1" + replica_count = 1 + node_type = "SHARED_CORE_NANO" + transit_encryption_mode = "TRANSIT_ENCRYPTION_DISABLED" + authorization_mode = "AUTH_DISABLED" + kms_key = "{{index $.Vars "kms_key_name"}}" + engine_configs = { + maxmemory-policy = "volatile-ttl" } + allow_fewer_zones_deployment = true zone_distribution_config { - mode = "SINGLE_ZONE" - zone = "us-central1-b" + mode = "SINGLE_ZONE" + zone = "us-central1-b" } maintenance_policy { weekly_maintenance_window { - day = "MONDAY" + day = "MONDAY" start_time { - hours = 1 - minutes = 0 - seconds = 0 - nanos = 0 + hours = 1 + minutes = 0 + seconds = 0 + nanos = 0 } } } engine_version = "VALKEY_7_2" deletion_protection_enabled = false - mode = "CLUSTER" + mode = "CLUSTER" persistence_config { - mode = "RDB" + mode = "RDB" rdb_config { rdb_snapshot_period = "ONE_HOUR" rdb_snapshot_start_time = "2024-10-02T15:01:23Z" From cbdfbe2c85e18b7d309b76ffdce384ceeb86cea7 Mon Sep 17 00:00:00 2001 From: aditikumarii-google Date: Sat, 2 Aug 2025 02:31:17 +0530 Subject: [PATCH 668/884] Adding log_retention_days field to existing bp resource for backupdr (#14622) --- mmv1/products/backupdr/BackupPlan.yaml | 14 +++- ...p_dr_backup_plan_for_csql_resource.tf.tmpl | 29 +++++++ .../data_source_backup_dr_backup_plan_test.go | 77 ++++++++++++++++++- 3 files changed, 118 insertions(+), 2 deletions(-) create mode 100644 mmv1/templates/terraform/examples/backup_dr_backup_plan_for_csql_resource.tf.tmpl diff --git a/mmv1/products/backupdr/BackupPlan.yaml b/mmv1/products/backupdr/BackupPlan.yaml index ffc566c48d3a..c3bc7c401f27 100644 --- a/mmv1/products/backupdr/BackupPlan.yaml +++ b/mmv1/products/backupdr/BackupPlan.yaml @@ -45,6 +45,13 @@ examples: backup_plan_id: 'backup-plan-disk-test' test_env_vars: project: :PROJECT_NAME + - name: 'backup_dr_backup_plan_for_csql_resource' + primary_resource_id: 'my-csql-backup-plan-1' + vars: + backup_vault_id: 'backup-vault-csql-test' + backup_plan_id: 'backup-plan-csql-test' + test_env_vars: + project: :PROJECT_NAME parameters: - name: 'location' type: String @@ -96,7 +103,7 @@ properties: type: String description: | The resource type to which the `BackupPlan` will be applied. - Examples include, "compute.googleapis.com/Instance", "compute.googleapis.com/Disk", and "storage.googleapis.com/Bucket". + Examples include, "compute.googleapis.com/Instance", "compute.googleapis.com/Disk", "sqladmin.googleapis.com/Instance" and "storage.googleapis.com/Bucket". required: true - name: 'createTime' type: String @@ -236,3 +243,8 @@ properties: description: | The hour of the day (1-24) when the window ends, for example, if the value of end hour of the day is 10, that means the backup window end time is 10:00. The end hour of the day should be greater than the start + - name: 'logRetentionDays' + type: Integer + immutable: true + description: | + This is only applicable for CloudSql resource. Days for which logs will be stored. This value should be greater than or equal to minimum enforced log retention duration of the backup vault. diff --git a/mmv1/templates/terraform/examples/backup_dr_backup_plan_for_csql_resource.tf.tmpl b/mmv1/templates/terraform/examples/backup_dr_backup_plan_for_csql_resource.tf.tmpl new file mode 100644 index 000000000000..dee347472956 --- /dev/null +++ b/mmv1/templates/terraform/examples/backup_dr_backup_plan_for_csql_resource.tf.tmpl @@ -0,0 +1,29 @@ +resource "google_backup_dr_backup_vault" "my_backup_vault" { + location = "us-central1" + backup_vault_id = "{{index $.Vars "backup_vault_id"}}" + backup_minimum_enforced_retention_duration = "100000s" +} + +resource "google_backup_dr_backup_plan" "{{$.PrimaryResourceId}}" { + location = "us-central1" + backup_plan_id = "{{index $.Vars "backup_plan_id"}}" + resource_type = "sqladmin.googleapis.com/Instance" + backup_vault = google_backup_dr_backup_vault.my_backup_vault.id + + backup_rules { + rule_id = "rule-1" + backup_retention_days = 5 + + standard_schedule { + recurrence_type = "HOURLY" + hourly_frequency = 6 + time_zone = "UTC" + + backup_window { + start_hour_of_day = 0 + end_hour_of_day = 6 + } + } + } + log_retention_days = 4 +} diff --git a/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_backup_plan_test.go b/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_backup_plan_test.go index 8677d3815b54..71489048da69 100644 --- a/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_backup_plan_test.go +++ b/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_backup_plan_test.go @@ -1,9 +1,10 @@ package backupdr_test import ( + "testing" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" - "testing" ) func TestAccDataSourceGoogleBackupDRBackupPlan_basic(t *testing.T) { @@ -26,6 +27,26 @@ func TestAccDataSourceGoogleBackupDRBackupPlan_basic(t *testing.T) { }) } +func TestAccDataSourceGoogleBackupDRBackupPlan_csql(t *testing.T) { + t.Parallel() + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckBackupDRBackupPlanDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceGoogleBackupDRBackupPlan_csql(context), + Check: resource.ComposeTestCheckFunc( + acctest.CheckDataSourceStateMatchesResourceState("data.google_backup_dr_backup_plan.fetch-bp", "google_backup_dr_backup_plan.csql-test"), + ), + }, + }, + }) +} + func testAccDataSourceGoogleBackupDRBackupPlan_basic(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_backup_dr_backup_vault" "my-backup-vault-1" { @@ -78,3 +99,57 @@ data "google_backup_dr_backup_plan" "fetch-bp" { } `, context) } + +func testAccDataSourceGoogleBackupDRBackupPlan_csql(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_backup_dr_backup_vault" "my-backup-vault-csql" { + location ="us-central1" + backup_vault_id = "tf-test-bv-%{random_suffix}" + description = "This is a backup vault built by Terraform for cloudsql." + backup_minimum_enforced_retention_duration = "100000s" + labels = { + foo = "bar1" + bar = "baz1" + } + annotations = { + annotations1 = "bar1" + annotations2 = "baz1" + } + force_update = "true" + force_delete = "true" + allow_missing = "true" +} + + +resource "google_backup_dr_backup_plan" "csql-test" { + location = "us-central1" + backup_plan_id = "tf-test-bp-%{random_suffix}" + resource_type= "sqladmin.googleapis.com/Instance" + backup_vault = google_backup_dr_backup_vault.my-backup-vault-csql.name + depends_on=[ google_backup_dr_backup_vault.my-backup-vault-csql ] + lifecycle { + ignore_changes = [backup_vault] + } + log_retention_days = 4 + backup_rules { + rule_id = "rule-1" + backup_retention_days = 5 + standard_schedule { + recurrence_type = "HOURLY" + hourly_frequency = 6 + time_zone = "UTC" + backup_window{ + start_hour_of_day = 0 + end_hour_of_day = 24 + } + } + } +} + +data "google_backup_dr_backup_plan" "fetch-bp" { + location = "us-central1" + backup_plan_id=google_backup_dr_backup_plan.csql-test.backup_plan_id + depends_on= [ google_backup_dr_backup_plan.csql-test ] + } +`, context) +} From d818748e90f15298a468bd13535c332ea6576120 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Fri, 1 Aug 2025 14:58:41 -0700 Subject: [PATCH 669/884] Keep the gateway id not changed in update test (#14718) --- .../resource_api_gateway_gateway_test.go.tmpl | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/mmv1/third_party/terraform/services/apigateway/resource_api_gateway_gateway_test.go.tmpl b/mmv1/third_party/terraform/services/apigateway/resource_api_gateway_gateway_test.go.tmpl index f10a3d3c88fa..cf52682b47e3 100644 --- a/mmv1/third_party/terraform/services/apigateway/resource_api_gateway_gateway_test.go.tmpl +++ b/mmv1/third_party/terraform/services/apigateway/resource_api_gateway_gateway_test.go.tmpl @@ -6,6 +6,7 @@ import ( "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" ) func TestAccApiGatewayGateway_apigatewayGatewayBasicExampleUpdated(t *testing.T) { @@ -25,6 +26,11 @@ func TestAccApiGatewayGateway_apigatewayGatewayBasicExampleUpdated(t *testing.T) }, { Config: testAccApiGatewayGateway_apigatewayGatewayBasicExampleUpdated(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_api_gateway_gateway.api_gw", plancheck.ResourceActionUpdate), + }, + }, }, }, }) @@ -34,16 +40,13 @@ func testAccApiGatewayGateway_apigatewayGatewayBasicExampleUpdated(context map[s return acctest.Nprintf(` resource "google_api_gateway_api" "api_gw" { provider = google-beta - api_id = "tf-test-api-gw%{random_suffix}" + api_id = "tf-test-my-api%{random_suffix}" } resource "google_api_gateway_api_config" "api_gw" { provider = google-beta api = google_api_gateway_api.api_gw.api_id - api_config_id = "tf-test-api-gw%{random_suffix}" - lifecycle { - create_before_destroy = true - } + api_config_id = "tf-test-my-config%{random_suffix}" openapi_documents { document { @@ -51,12 +54,15 @@ resource "google_api_gateway_api_config" "api_gw" { contents = filebase64("test-fixtures/openapi.yaml") } } + lifecycle { + create_before_destroy = true + } } resource "google_api_gateway_gateway" "api_gw" { provider = google-beta api_config = google_api_gateway_api_config.api_gw.id - gateway_id = "tf-test-api-gw%{random_suffix}" + gateway_id = "tf-test-my-gateway%{random_suffix}" display_name = "MM Dev API Gateway" labels = { environment = "dev" From 6ef2dd0a28972b15871a3b75519fa6f924d1d4c2 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Fri, 1 Aug 2025 15:15:49 -0700 Subject: [PATCH 670/884] Upgrade dcl 1.80.0 (#14705) --- mmv1/third_party/terraform/go.mod | 2 +- mmv1/third_party/terraform/go.sum | 4 ++-- tpgtools/go.mod | 2 +- tpgtools/go.sum | 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/mmv1/third_party/terraform/go.mod b/mmv1/third_party/terraform/go.mod index 780f605cd7e4..d670d3a09c9e 100644 --- a/mmv1/third_party/terraform/go.mod +++ b/mmv1/third_party/terraform/go.mod @@ -6,7 +6,7 @@ require ( cloud.google.com/go/auth v0.16.2 cloud.google.com/go/auth/oauth2adapt v0.2.8 cloud.google.com/go/bigtable v1.37.0 - github.com/GoogleCloudPlatform/declarative-resource-client-library v1.79.0 + github.com/GoogleCloudPlatform/declarative-resource-client-library v1.80.0 github.com/apparentlymart/go-cidr v1.1.0 github.com/davecgh/go-spew v1.1.1 github.com/dnaeon/go-vcr v1.0.1 diff --git a/mmv1/third_party/terraform/go.sum b/mmv1/third_party/terraform/go.sum index 90f85eadf8b5..1204e197fb26 100644 --- a/mmv1/third_party/terraform/go.sum +++ b/mmv1/third_party/terraform/go.sum @@ -22,8 +22,8 @@ cloud.google.com/go/monitoring v1.24.2/go.mod h1:x7yzPWcgDRnPEv3sI+jJGBkwl5qINf+ dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.79.0 h1:vaebDVboAZ2tbAoMKRsprO3zAdZnQegYFhkgAwjJC8g= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.79.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.80.0 h1:ZpQrm5i+ppVxTQjp6lU2APyAejavB/d7G2gZNu2RxsU= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.80.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= github.com/ProtonMail/go-crypto v1.1.6 h1:ZcV+Ropw6Qn0AX9brlQLAUXfqLBc7Bl+f/DmNxpLfdw= diff --git a/tpgtools/go.mod b/tpgtools/go.mod index 4d9c2785f262..ce165743cf36 100644 --- a/tpgtools/go.mod +++ b/tpgtools/go.mod @@ -4,7 +4,7 @@ go 1.23 require ( bitbucket.org/creachadair/stringset v0.0.11 - github.com/GoogleCloudPlatform/declarative-resource-client-library v1.79.0 + github.com/GoogleCloudPlatform/declarative-resource-client-library v1.80.0 github.com/golang/glog v1.1.2 github.com/hashicorp/hcl v1.0.0 github.com/kylelemons/godebug v1.1.0 diff --git a/tpgtools/go.sum b/tpgtools/go.sum index 7f702b3eb310..faba4095e176 100644 --- a/tpgtools/go.sum +++ b/tpgtools/go.sum @@ -6,8 +6,8 @@ cloud.google.com/go/compute v1.23.0/go.mod h1:4tCnrn48xsqlwSAiLf1HXMQk8CONslYbdi cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.79.0 h1:vaebDVboAZ2tbAoMKRsprO3zAdZnQegYFhkgAwjJC8g= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.79.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.80.0 h1:ZpQrm5i+ppVxTQjp6lU2APyAejavB/d7G2gZNu2RxsU= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.80.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4= github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= From 4054b19da404eddae03ac173225daf293debefbd Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Fri, 1 Aug 2025 16:26:59 -0700 Subject: [PATCH 671/884] tgc-revival: use timestamp in check function of each test step to read CAI asset (#14681) --- .../terraform/acctest/tgc_utils.go | 27 +++++++++---------- 1 file changed, 13 insertions(+), 14 deletions(-) diff --git a/mmv1/third_party/terraform/acctest/tgc_utils.go b/mmv1/third_party/terraform/acctest/tgc_utils.go index f1d0d4e1c92a..7d8a30b018f1 100644 --- a/mmv1/third_party/terraform/acctest/tgc_utils.go +++ b/mmv1/third_party/terraform/acctest/tgc_utils.go @@ -8,6 +8,7 @@ import ( "regexp" "strings" "testing" + "time" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/terraform" @@ -27,11 +28,14 @@ type ImportMetadata struct { IgnoredFields []string `json:"ignored_fields,omitempty"` } +// The metadata for each step in one test type TgcMetadataPayload struct { TestName string `json:"test_name"` + StepNumber int `json:"step_number"` RawConfig string `json:"raw_config"` ResourceMetadata map[string]ResourceMetadata `json:"resource_metadata"` PrimaryResource string `json:"primary_resource"` + CaiReadTime time.Time `json:"cai_read_time"` } // PROJECT_NUMBER instead of PROJECT_ID is in the CAI asset names for the resources in those services @@ -72,6 +76,8 @@ func encodeToBase64JSON(data interface{}) (string, error) { // CollectAllTgcMetadata collects metadata for all resources in a test step func CollectAllTgcMetadata(tgcPayload TgcMetadataPayload) resource.TestCheckFunc { return func(s *terraform.State) error { + tgcPayload.CaiReadTime = time.Now() + projectId := envvar.GetTestProjectFromEnv() projectNumber := envvar.GetTestProjectNumberFromEnv() @@ -118,12 +124,14 @@ func CollectAllTgcMetadata(tgcPayload TgcMetadataPayload) resource.TestCheckFunc tgcPayload.ResourceMetadata[address] = metadata } + log.Printf("[DEBUG] tgcPayload caireadtime %s", tgcPayload.CaiReadTime) + // Encode the entire payload to base64 JSON encodedData, err := encodeToBase64JSON(tgcPayload) if err != nil { - log.Printf("[DEBUG]TGC Terraform error: %v", err) + log.Printf("[DEBUG]test_step_number=%d TGC Terraform error: %v", tgcPayload.StepNumber, err) } else { - log.Printf("[DEBUG]TGC Terraform metadata: %s", encodedData) + log.Printf("[DEBUG]test_step_number=%d TGC Terraform metadata: %s", tgcPayload.StepNumber, encodedData) } return nil @@ -193,20 +201,10 @@ func determineImportMetadata(steps []resource.TestStep, currentStepIndex int, re func extendWithTGCData(t *testing.T, c resource.TestCase) resource.TestCase { var updatedSteps []resource.TestStep - // Find the last non-plan config step - lastNonPlanConfigStep := -1 - for i := len(c.Steps) - 1; i >= 0; i-- { - step := c.Steps[i] - if step.Config != "" && !step.PlanOnly { - lastNonPlanConfigStep = i - break - } - } - // Process all steps for i, step := range c.Steps { - // If this is the last non-plan config step, add our TGC check - if i == lastNonPlanConfigStep { + // If this is a non-plan config step, add our TGC check + if step.Config != "" && !step.PlanOnly { // Parse resources from the config resources := parseResources(step.Config) @@ -230,6 +228,7 @@ func extendWithTGCData(t *testing.T, c resource.TestCase) resource.TestCase { // Create the consolidated TGC payload tgcPayload := TgcMetadataPayload{ TestName: t.Name(), + StepNumber: i + 1, // Step number starts from 1 RawConfig: step.Config, ResourceMetadata: resourceMetadata, } From 72af06137cd1cfb8688e97d3bcea29e8de9af414 Mon Sep 17 00:00:00 2001 From: sanmahapatra <168236987+sanmahapatra@users.noreply.github.com> Date: Mon, 4 Aug 2025 06:57:06 -0700 Subject: [PATCH 672/884] Add the PSA write endpoint (#14510) --- .../resource_sql_database_instance.go.tmpl | 9 ++ ...esource_sql_database_instance_test.go.tmpl | 106 ++++++++++++++++++ .../r/sql_database_instance.html.markdown | 2 + 3 files changed, 117 insertions(+) diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl index 541219048dc8..858c7ce3afb3 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl @@ -990,6 +990,11 @@ is set to true. Defaults to ZONAL.`, MaxItems: 1, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ + "psa_write_endpoint": { + Type: schema.TypeString, + Optional: true, + Description: fmt.Sprintf(`If set, this field indicates this instance has a private service access (PSA) DNS endpoint that is pointing to the primary instance of the cluster. If this instance is the primary, then the DNS endpoint points to this instance. After a switchover or replica failover operation, this DNS endpoint points to the promoted instance. This is a read-only field, returned to the user as information. This field can exist even if a standalone instance doesn't have a DR replica yet or the DR replica is deleted.`), + }, "failover_dr_replica_name": { Type: schema.TypeString, Optional: true, @@ -2556,6 +2561,10 @@ func flattenDatabaseFlags(databaseFlags []*sqladmin.DatabaseFlags) []map[string] // is nil since replication_cluster is computed+optional. func flattenReplicationCluster(replicationCluster *sqladmin.ReplicationCluster, d *schema.ResourceData) []map[string]interface{} { data := make(map[string]interface{}) + data["psa_write_endpoint"] = "" + if replicationCluster != nil && replicationCluster.PsaWriteEndpoint != "" { + data["psa_write_endpoint"] = replicationCluster.PsaWriteEndpoint + } data["failover_dr_replica_name"] = "" if replicationCluster != nil && replicationCluster.FailoverDrReplicaName != "" { data["failover_dr_replica_name"] = replicationCluster.FailoverDrReplicaName diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl index e7656525a336..43da7a87a8ad 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl @@ -2739,6 +2739,60 @@ func TestAccSqlDatabaseInstance_SwitchoverSuccess(t *testing.T) { }) } +func TestAccSqlDatabaseInstance_MysqlEplusWithPrivateNetwork(t *testing.T) { + t.Parallel() + + instanceName := "tf-test-" + acctest.RandString(t, 10) + networkName := acctest.BootstrapSharedServiceNetworkingConnection(t, "endpoint") + projectId := envvar.GetTestProjectFromEnv() + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccSqlDatabaseInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testGoogleSqlDatabaseInstanceConfig_eplusOnPrivateNetwork(projectId, networkName, instanceName, "MYSQL_8_0"), + Check: resource.ComposeTestCheckFunc(verifyCreateOperationOnEplusWithPrivateNetwork("google_sql_database_instance.instance")), + }, + { + ResourceName: "google_sql_database_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateIdPrefix: fmt.Sprintf("%s/", projectId), + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + }, + }) +} + +func TestAccSqlDatabaseInstance_PostgresEplusWithPrivateNetwork(t *testing.T) { + t.Parallel() + + instanceName := "tf-test-" + acctest.RandString(t, 10) + networkName := acctest.BootstrapSharedServiceNetworkingConnection(t, "endpoint") + projectId := envvar.GetTestProjectFromEnv() + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccSqlDatabaseInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testGoogleSqlDatabaseInstanceConfig_eplusOnPrivateNetwork(projectId, networkName, instanceName, "POSTGRES_12"), + Check: resource.ComposeTestCheckFunc(verifyCreateOperationOnEplusWithPrivateNetwork("google_sql_database_instance.instance")), + }, + { + ResourceName: "google_sql_database_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateIdPrefix: fmt.Sprintf("%s/", projectId), + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + }, + }) +} + // Switchover for MySQL. func TestAccSqlDatabaseInstance_MysqlSwitchoverSuccess(t *testing.T) { t.Parallel() @@ -4037,6 +4091,35 @@ resource "google_sql_database_instance" "original-replica" { `, replicaName) } +func testGoogleSqlDatabaseInstanceConfig_eplusOnPrivateNetwork(project, networkName, instanceName, databaseVersion string) string { + return fmt.Sprintf(` +data "google_compute_network" "servicenet" { + name = "%s" +} + +resource "google_sql_database_instance" "instance" { + project = "%s" + name = "%s" + region = "us-east1" + database_version = "%s" + instance_type = "CLOUD_SQL_INSTANCE" + deletion_protection = false + + settings { + tier = "db-perf-optimized-N-2" + edition = "ENTERPRISE_PLUS" + ip_configuration { + ipv4_enabled = "false" + private_network = data.google_compute_network.servicenet.self_link + } + backup_configuration { + enabled = true + } + } +} +`, networkName, project, instanceName, databaseVersion) +} + func testGoogleSqlDatabaseInstanceConfig_mysqlEplusWithReplica(project, primaryName, replicaName string) string { return fmt.Sprintf(` resource "google_sql_database_instance" "original-primary" { @@ -4796,6 +4879,29 @@ func verifyPscOperation(resourceName string, isPscConfigExpected bool, expectedP } } +func verifyCreateOperationOnEplusWithPrivateNetwork(resourceName string) func(*terraform.State) error { + return func(s *terraform.State) error { + resource, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Can't find %s in state", resourceName) + } + + resourceAttributes := resource.Primary.Attributes + _, ok = resourceAttributes["replication_cluster.#"] + if !ok { + return fmt.Errorf("replication_cluster.# block is not present in state for %s", resourceName) + } + + _, ok = resourceAttributes["replication_cluster.0.psa_write_endpoint"] + if !ok { + return fmt.Errorf("replication_cluster.psa_write_endpoint is not present in state for %s", resourceName) + } + + return nil + } +} + + func verifyPscAutoConnectionsOperation(resourceName string, isPscConfigExpected bool, expectedPscEnabled bool, isPscAutoConnectionConfigExpected bool, expectedConsumerNetwork string, expectedConsumerProject string) func(*terraform.State) error { return func(s *terraform.State) error { resource, ok := s.RootModule().Resources[resourceName] diff --git a/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown b/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown index 8ae88ec888d6..af89a6dad2e9 100644 --- a/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown @@ -586,6 +586,8 @@ block during resource creation/update will trigger the restore action after the The optional, computed `replication_cluster` block represents a primary instance and disaster recovery replica pair. Applicable to MySQL and PostgreSQL. This field can be set only after both the primary and replica are created. This block supports: +* `psa_write_endpoint`: Read-only field which if set, indicates this instance has a private service access (PSA) DNS endpoint that is pointing to the primary instance of the cluster. If this instance is the primary, then the DNS endpoint points to this instance. After a switchover or replica failover operation, this DNS endpoint points to the promoted instance. This is a read-only field, returned to the user as information. This field can exist even if a standalone instance doesn't have a DR replica yet or the DR replica is deleted. + * `failover_dr_replica_name`: (Optional) If the instance is a primary instance, then this field identifies the disaster recovery (DR) replica. The standard format of this field is "your-project:your-instance". You can also set this field to "your-instance", but cloud SQL backend will convert it to the aforementioned standard format. * `dr_replica`: Read-only field that indicates whether the replica is a DR replica. From dc9299db43788b29f84a639dc03ef0d8ee68ca85 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Mon, 4 Aug 2025 09:53:30 -0700 Subject: [PATCH 673/884] tgc-revival: add google_network_security_server_tls_policy (#14716) --- mmv1/products/networksecurity/ServerTlsPolicy.yaml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/mmv1/products/networksecurity/ServerTlsPolicy.yaml b/mmv1/products/networksecurity/ServerTlsPolicy.yaml index 94b868ebbcf0..e7192d07212f 100644 --- a/mmv1/products/networksecurity/ServerTlsPolicy.yaml +++ b/mmv1/products/networksecurity/ServerTlsPolicy.yaml @@ -41,6 +41,7 @@ async: delete_minutes: 30 result: resource_inside_response: false +include_in_tgc_next_DO_NOT_USE: true custom_code: sweeper: url_substitutions: @@ -52,19 +53,27 @@ examples: primary_resource_id: 'default' vars: resource_name: 'my-server-tls-policy' + tgc_test_ignore_extra: + - 'allow_open' # Ignore its false value in Terraform configuration - name: 'network_security_server_tls_policy_advanced' primary_resource_id: 'default' vars: resource_name: 'my-server-tls-policy' + tgc_test_ignore_extra: + - 'allow_open' # Ignore its false value in Terraform configuration - name: 'network_security_server_tls_policy_server_cert' primary_resource_id: 'default' vars: resource_name: 'my-server-tls-policy' + tgc_test_ignore_extra: + - 'allow_open' # Ignore its false value in Terraform configuration - name: 'network_security_server_tls_policy_mtls' primary_resource_id: 'default' vars: resource_name: 'my-server-tls-policy' trust_config_name: 'my-trust-config' + tgc_test_ignore_extra: + - 'allow_open' # Ignore its false value in Terraform configuration parameters: - name: 'name' type: String @@ -94,6 +103,7 @@ properties: - name: 'labels' type: KeyValueLabels description: Set of label tags associated with the ServerTlsPolicy resource. + is_missing_in_cai: true - name: 'description' type: String description: | From a2e298ea3e6bcfba4c2860093aa85d834e0c994e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B6rn?= <81525627+bestefreund@users.noreply.github.com> Date: Mon, 4 Aug 2025 19:05:55 +0200 Subject: [PATCH 674/884] Add singular data source for retrieving an Artifact Registry version (#14721) --- .../provider/provider_mmv1_resources.go.tmpl | 1 + .../data_source_artifact_registry_version.go | 206 ++++++++++++++++++ ...a_source_artifact_registry_version_test.go | 38 ++++ .../d/artifact_registry_version.html.markdown | 51 +++++ 4 files changed, 296 insertions(+) create mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_version.go create mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_version_test.go create mode 100644 mmv1/third_party/terraform/website/docs/d/artifact_registry_version.html.markdown diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index 7a9e57dad24b..b22f4fac9bd3 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -30,6 +30,7 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_artifact_registry_docker_images": artifactregistry.DataSourceArtifactRegistryDockerImages(), "google_artifact_registry_locations": artifactregistry.DataSourceGoogleArtifactRegistryLocations(), "google_artifact_registry_repository": artifactregistry.DataSourceArtifactRegistryRepository(), + "google_artifact_registry_version": artifactregistry.DataSourceArtifactRegistryVersion(), "google_apphub_discovered_workload": apphub.DataSourceApphubDiscoveredWorkload(), "google_app_engine_default_service_account": appengine.DataSourceGoogleAppEngineDefaultServiceAccount(), "google_apphub_application": apphub.DataSourceGoogleApphubApplication(), diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_version.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_version.go new file mode 100644 index 000000000000..9965bfb7b758 --- /dev/null +++ b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_version.go @@ -0,0 +1,206 @@ +package artifactregistry + +import ( + "fmt" + "net/http" + "net/url" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func DataSourceArtifactRegistryVersion() *schema.Resource { + return &schema.Resource{ + Read: DataSourceArtifactRegistryVersionRead, + + Schema: map[string]*schema.Schema{ + "location": { + Type: schema.TypeString, + Required: true, + }, + "repository_id": { + Type: schema.TypeString, + Required: true, + }, + "package_name": { + Type: schema.TypeString, + Required: true, + }, + "version_name": { + Type: schema.TypeString, + Required: true, + }, + "view": { + Type: schema.TypeString, + Optional: true, + Default: "BASIC", + ValidateFunc: validateViewArtifactRegistryVersion, + }, + "project": { + Type: schema.TypeString, + Optional: true, + }, + "name": { + Type: schema.TypeString, + Computed: true, + }, + "description": { + Type: schema.TypeString, + Computed: true, + }, + "related_tags": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Computed: true, + }, + "version": { + Type: schema.TypeString, + Computed: true, + }, + }, + }, + }, + "create_time": { + Type: schema.TypeString, + Computed: true, + }, + "update_time": { + Type: schema.TypeString, + Computed: true, + }, + "annotations": { + Type: schema.TypeMap, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + }, + } +} + +func DataSourceArtifactRegistryVersionRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return fmt.Errorf("Error setting Artifact Registry user agent: %s", err) + } + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return fmt.Errorf("Error setting Artifact Registry project: %s", err) + } + + basePath, err := tpgresource.ReplaceVars(d, config, "{{ArtifactRegistryBasePath}}") + if err != nil { + return fmt.Errorf("Error setting Artifact Registry base path: %s", err) + } + + resourcePath, err := tpgresource.ReplaceVars(d, config, fmt.Sprintf("projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/packages/{{package_name}}/versions/{{version_name}}")) + if err != nil { + return fmt.Errorf("Error setting resource path: %s", err) + } + + view := d.Get("view").(string) + + urlRequest := basePath + resourcePath + + u, err := url.Parse(urlRequest) + if err != nil { + return fmt.Errorf("Error parsing URL: %s", err) + } + + q := u.Query() + q.Set("view", view) + u.RawQuery = q.Encode() + urlRequest = u.String() + + headers := make(http.Header) + + u, err = url.Parse(urlRequest) + if err != nil { + return fmt.Errorf("Error parsing URL: %s", err) + } + + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + RawURL: u.String(), + UserAgent: userAgent, + Headers: headers, + }) + if err != nil { + return fmt.Errorf("Error getting Artifact Registry version: %s", err) + } + + var relatedTags []map[string]interface{} + if rawTags, ok := res["relatedTags"].([]interface{}); ok { + for _, rawTag := range rawTags { + if tagMap, ok := rawTag.(map[string]interface{}); ok { + entry := map[string]interface{}{ + "name": tagMap["name"], + "version": tagMap["version"], + } + relatedTags = append(relatedTags, entry) + } + } + } + + annotations := make(map[string]string) + if anno, ok := res["annotations"].(map[string]interface{}); ok { + for k, v := range anno { + if val, ok := v.(string); ok { + annotations[k] = val + } + } + } + + getString := func(m map[string]interface{}, key string) string { + if v, ok := m[key].(string); ok { + return v + } + return "" + } + + name := getString(res, "name") + + if err := d.Set("project", project); err != nil { + return err + } + if err := d.Set("name", name); err != nil { + return err + } + if err := d.Set("description", getString(res, "description")); err != nil { + return err + } + if err := d.Set("related_tags", relatedTags); err != nil { + return err + } + if err := d.Set("create_time", getString(res, "createTime")); err != nil { + return err + } + if err := d.Set("update_time", getString(res, "updateTime")); err != nil { + return err + } + if err := d.Set("annotations", annotations); err != nil { + return err + } + + d.SetId(name) + + return nil +} + +func validateViewArtifactRegistryVersion(val interface{}, key string) ([]string, []error) { + v := val.(string) + var errs []error + + if v != "BASIC" && v != "FULL" { + errs = append(errs, fmt.Errorf("%q must be either 'BASIC' or 'FULL', got %q", key, v)) + } + + return nil, errs +} diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_version_test.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_version_test.go new file mode 100644 index 000000000000..fab8ed95a440 --- /dev/null +++ b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_version_test.go @@ -0,0 +1,38 @@ +package artifactregistry_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccDataSourceArtifactRegistryVersion_basic(t *testing.T) { + t.Parallel() + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceArtifactRegistryVersionConfig, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("data.google_artifact_registry_version.this", "name", "projects/go-containerregistry/locations/us/repositories/gcr.io/packages/gcrane/versions/sha256:c0cf52c2bd8c636bbf701c6c74c5ff819447d384dc957d52a52a668de63e8f5d"), + ), + }, + }, + }) +} + +// Test the data source against the public AR repos +// https://console.cloud.google.com/artifacts/docker/cloudrun/us/container +// https://console.cloud.google.com/artifacts/docker/go-containerregistry/us/gcr.io +const testAccDataSourceArtifactRegistryVersionConfig = ` +data "google_artifact_registry_version" "this" { + project = "go-containerregistry" + location = "us" + repository_id = "gcr.io" + package_name = "gcrane" + version_name = "sha256:c0cf52c2bd8c636bbf701c6c74c5ff819447d384dc957d52a52a668de63e8f5d" +} +` diff --git a/mmv1/third_party/terraform/website/docs/d/artifact_registry_version.html.markdown b/mmv1/third_party/terraform/website/docs/d/artifact_registry_version.html.markdown new file mode 100644 index 000000000000..ddcd65975c4b --- /dev/null +++ b/mmv1/third_party/terraform/website/docs/d/artifact_registry_version.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "Artifact Registry" +description: |- + Get information about a version within a Google Artifact Registry repository. +--- + +# google_artifact_registry_version +This data source fetches information of a version from a provided Artifact Registry repository. + +## Example Usage + +```hcl +data "google_artifact_registry_versions" "my_versions" { + location = "us-central1" + repository_id = "example-repo" + package_name = "example-package" + version_name = "latest" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `location` - (Required) The location of the artifact registry. + +* `repository_id` - (Required) The last part of the repository name to fetch from. + +* `package_name` - (Required) The name of the package. + +* `version_name` - (Required) The name of the version. + +* `view` - (Optional) The view, which determines what version information is returned in a response. Possible values are `"BASIC"` and `"FULL"`. Defaults to `"BASIC"`. + +* `project` - (Optional) The project ID in which the resource belongs. If it is not provided, the provider project is used. + +## Attributes Reference + +The following computed attributes are exported: + +* `name` - The name of the version, for example: `projects/p1/locations/us-central1/repositories/repo1/packages/pkg1/versions/version1`. If the package part contains slashes, the slashes are escaped. + +* `description` - Description of the version, as specified in its metadata. + +* `related_tags` - A list of related tags. Will contain up to 100 tags that reference this version. + +* `create_time` - The time, as a RFC 3339 string, this package was created. + +* `update_time` - The time, as a RFC 3339 string, this package was last updated. This includes publishing a new version of the package. + +* `annotations` - Client specified annotations. From 2776e2c00c5f8c25182dbe895f23dcc856d86088 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Mon, 4 Aug 2025 10:06:19 -0700 Subject: [PATCH 675/884] tgc: skip failed tests for now (#14731) --- mmv1/products/pubsub/Subscription.yaml | 1 + mmv1/products/pubsub/Topic.yaml | 1 + 2 files changed, 2 insertions(+) diff --git a/mmv1/products/pubsub/Subscription.yaml b/mmv1/products/pubsub/Subscription.yaml index 3f380f8849fb..a770ea6048b5 100644 --- a/mmv1/products/pubsub/Subscription.yaml +++ b/mmv1/products/pubsub/Subscription.yaml @@ -130,6 +130,7 @@ examples: vars: topic_name: 'example-topic' subscription_name: 'example-subscription' + tgc_skip_test: 'The dynamic block is in test configuration. The test takes time to fix.' parameters: properties: - name: 'name' diff --git a/mmv1/products/pubsub/Topic.yaml b/mmv1/products/pubsub/Topic.yaml index b35f57a428cf..9043692921cb 100644 --- a/mmv1/products/pubsub/Topic.yaml +++ b/mmv1/products/pubsub/Topic.yaml @@ -108,6 +108,7 @@ examples: primary_resource_id: 'example' vars: topic_name: 'example-topic' + tgc_skip_test: 'The dynamic block is in test configuration. The test takes time to fix.' parameters: properties: - name: 'name' From 9c92ee9bdd1cd6bb4adf7a1e1c0a0f59f918232b Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Mon, 4 Aug 2025 10:17:43 -0700 Subject: [PATCH 676/884] clarified that external issues are preferred because they can auto-close (#14708) --- docs/content/code-review/review-pr.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/content/code-review/review-pr.md b/docs/content/code-review/review-pr.md index 6eb13e961dd5..5e52bf22212e 100644 --- a/docs/content/code-review/review-pr.md +++ b/docs/content/code-review/review-pr.md @@ -21,6 +21,7 @@ The following types of PRs may require additional scrutiny and/or multiple revie 1. Read the PR description to understand the context and ensure the PR either * is linked to a GitHub issue or an internal bug * if not, check the [issue tracker](https://github.com/hashicorp/terraform-provider-google/issues) to see whether the feature has already been requested and add the issues in the description, if any. + * "Fixes {github_issue_link}" is preferred if an external issue is available because it will [auto-close the issue](https://docs.github.com/en/issues/tracking-your-work-with-issues/using-issues/linking-a-pull-request-to-an-issue) when the PR is merged. However, there's no need to create an external issue solely for this purpose. * establishes clear context itself via title or description. 2. If the PR adds any new resource, ensure that the resource does not already exist in the [GA provider](https://github.com/hashicorp/terraform-provider-google) or [beta provider](https://github.com/hashicorp/terraform-provider-google-beta) 1. Read through all the changes in the PR, generated code in the downstreams and the API documentation to ensure that: From 8743d13a441b5fd82b4885110b8fcef824b0a90e Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Mon, 4 Aug 2025 10:37:21 -0700 Subject: [PATCH 677/884] Fix VCR replaying report format (#14720) --- .ci/magician/cmd/templates/vcr/post_replay.tmpl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.ci/magician/cmd/templates/vcr/post_replay.tmpl b/.ci/magician/cmd/templates/vcr/post_replay.tmpl index e89973cca41e..b914d0286884 100644 --- a/.ci/magician/cmd/templates/vcr/post_replay.tmpl +++ b/.ci/magician/cmd/templates/vcr/post_replay.tmpl @@ -31,6 +31,7 @@ None {{end}} + {{ if gt (len .ReplayingResult.FailedTests) 0 -}} #### Action taken
@@ -52,4 +53,4 @@ None {{- end}} View the [build log](https://storage.cloud.google.com/{{.LogBucket}}/{{.Version}}/refs/heads/{{.Head}}/artifacts/{{.BuildID}}/build-log/replaying_test.log) -{{- end}} \ No newline at end of file +{{- end}} From b2b908ab8b7b5d0913eb76bcf1522b1a454a6a2f Mon Sep 17 00:00:00 2001 From: "Laurenz K." <45950275+laurenz-k@users.noreply.github.com> Date: Mon, 4 Aug 2025 21:39:38 +0200 Subject: [PATCH 678/884] Add support for `networksecurity.googleapis.com/ServerTlsPolicy` to TGC cai2hcl (#14699) --- mmv1/third_party/cai2hcl/convert_test.go | 12 +- mmv1/third_party/cai2hcl/converter_map.go | 5 + .../networksecurity/server_tls_policy.go | 173 +++++++++ .../networksecurity/server_tls_policy_test.go | 13 + .../testdata/server_tls_policy.json | 361 ++++++++++++++++++ .../testdata/server_tls_policy.tf | 194 ++++++++++ 6 files changed, 756 insertions(+), 2 deletions(-) create mode 100644 mmv1/third_party/cai2hcl/services/networksecurity/server_tls_policy.go create mode 100644 mmv1/third_party/cai2hcl/services/networksecurity/server_tls_policy_test.go create mode 100644 mmv1/third_party/cai2hcl/services/networksecurity/testdata/server_tls_policy.json create mode 100644 mmv1/third_party/cai2hcl/services/networksecurity/testdata/server_tls_policy.tf diff --git a/mmv1/third_party/cai2hcl/convert_test.go b/mmv1/third_party/cai2hcl/convert_test.go index fa2e160c6696..0d8fc7bac6bb 100644 --- a/mmv1/third_party/cai2hcl/convert_test.go +++ b/mmv1/third_party/cai2hcl/convert_test.go @@ -1,9 +1,8 @@ package cai2hcl_test import ( - "testing" - cai2hclTesting "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/cai2hcl/testing" + "testing" ) func TestConvertCompute(t *testing.T) { @@ -23,3 +22,12 @@ func TestConvertResourcemanager(t *testing.T) { "project_create", }) } + +func TestConvertNetworksecurity(t *testing.T) { + cai2hclTesting.AssertTestFiles( + t, + "./services/networksecurity/testdata", + []string{ + "server_tls_policy", + }) +} diff --git a/mmv1/third_party/cai2hcl/converter_map.go b/mmv1/third_party/cai2hcl/converter_map.go index 65ad92505446..81f909ff4e24 100644 --- a/mmv1/third_party/cai2hcl/converter_map.go +++ b/mmv1/third_party/cai2hcl/converter_map.go @@ -3,6 +3,7 @@ package cai2hcl import ( "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/cai2hcl/common" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/cai2hcl/services/compute" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/cai2hcl/services/networksecurity" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/cai2hcl/services/resourcemanager" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" tpg_provider "github.com/hashicorp/terraform-provider-google-beta/google-beta/provider" @@ -22,6 +23,8 @@ var AssetTypeToConverter = map[string]string{ resourcemanager.ProjectAssetType: "google_project", resourcemanager.ProjectBillingAssetType: "google_project", + + networksecurity.ServerTLSPolicyAssetType: "google_network_security_server_tls_policy", } // ConverterMap is a collection of converters instances, indexed by name. @@ -35,4 +38,6 @@ var ConverterMap = map[string]common.Converter{ "google_compute_region_health_check": compute.NewComputeRegionHealthCheckConverter(provider), "google_project": resourcemanager.NewProjectConverter(provider), + + "google_network_security_server_tls_policy": networksecurity.NewServerTLSPolicyConverter(provider), } diff --git a/mmv1/third_party/cai2hcl/services/networksecurity/server_tls_policy.go b/mmv1/third_party/cai2hcl/services/networksecurity/server_tls_policy.go new file mode 100644 index 000000000000..23b476b006f4 --- /dev/null +++ b/mmv1/third_party/cai2hcl/services/networksecurity/server_tls_policy.go @@ -0,0 +1,173 @@ +package networksecurity + +import ( + "errors" + "fmt" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/cai2hcl/common" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/caiasset" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + netsecapi "google.golang.org/api/networksecurity/v1" + "strings" +) + +// ServerTLSPolicyAssetType is the CAI asset type name. +const ServerTLSPolicyAssetType string = "networksecurity.googleapis.com/ServerTlsPolicy" + +// ServerTLSPolicySchemaName is the TF resource schema name. +const ServerTLSPolicySchemaName string = "google_network_security_server_tls_policy" + +// ServerTLSPolicyConverter for networksecurity server tls policy resource. +type ServerTLSPolicyConverter struct { + name string + schema map[string]*schema.Schema +} + +// NewServerTLSPolicyConverter returns an HCL converter. +func NewServerTLSPolicyConverter(provider *schema.Provider) common.Converter { + schema := provider.ResourcesMap[ServerTLSPolicySchemaName].Schema + + return &ServerTLSPolicyConverter{ + name: ServerTLSPolicySchemaName, + schema: schema, + } +} + +// Convert converts CAI assets to HCL resource blocks (Provider version: 6.45.0) +func (c *ServerTLSPolicyConverter) Convert(assets []*caiasset.Asset) ([]*common.HCLResourceBlock, error) { + var blocks []*common.HCLResourceBlock + var err error + + for _, asset := range assets { + if asset == nil { + continue + } else if asset.Resource == nil || asset.Resource.Data == nil { + return nil, fmt.Errorf("INVALID_ARGUMENT: Asset resource data is nil") + } else if asset.Type != ServerTLSPolicyAssetType { + return nil, fmt.Errorf("INVALID_ARGUMENT: Expected asset of type %s, but received %s", ServerTLSPolicyAssetType, asset.Type) + } + block, errConvert := c.convertResourceData(asset) + blocks = append(blocks, block) + if errConvert != nil { + err = errors.Join(err, errConvert) + } + } + return blocks, err +} + +func (c *ServerTLSPolicyConverter) convertResourceData(asset *caiasset.Asset) (*common.HCLResourceBlock, error) { + if asset == nil || asset.Resource == nil || asset.Resource.Data == nil { + return nil, fmt.Errorf("INVALID_ARGUMENT: Asset resource data is nil") + } + + hcl, _ := flattenServerTLSPolicy(asset.Resource) + + ctyVal, err := common.MapToCtyValWithSchema(hcl, c.schema) + if err != nil { + return nil, err + } + + resourceName := hcl["name"].(string) + return &common.HCLResourceBlock{ + Labels: []string{c.name, resourceName}, + Value: ctyVal, + }, nil +} + +func flattenServerTLSPolicy(resource *caiasset.AssetResource) (map[string]any, error) { + result := make(map[string]any) + + var serverTLSPolicy *netsecapi.ServerTlsPolicy + if err := common.DecodeJSON(resource.Data, &serverTLSPolicy); err != nil { + return nil, err + } + + result["name"] = flattenName(serverTLSPolicy.Name) + result["labels"] = serverTLSPolicy.Labels + result["description"] = serverTLSPolicy.Description + result["allow_open"] = serverTLSPolicy.AllowOpen + result["server_certificate"] = flattenServerCertificate(serverTLSPolicy.ServerCertificate) + result["mtls_policy"] = flattenMTLSPolicy(serverTLSPolicy.MtlsPolicy) + result["project"] = flattenProjectName(serverTLSPolicy.Name) + + result["location"] = resource.Location + + return result, nil +} + +func flattenName(name string) string { + tokens := strings.Split(name, "/") + return tokens[len(tokens)-1] +} + +func flattenServerCertificate(certificate *netsecapi.GoogleCloudNetworksecurityV1CertificateProvider) []map[string]any { + if certificate == nil { + return nil + } + + result := make(map[string]any) + result["certificate_provider_instance"] = flattenCertificateProviderInstance(certificate.CertificateProviderInstance) + result["grpc_endpoint"] = flattenGrpcEndpoint(certificate.GrpcEndpoint) + + return []map[string]any{result} +} + +func flattenMTLSPolicy(policy *netsecapi.MTLSPolicy) []map[string]any { + if policy == nil { + return nil + } + + result := make(map[string]any) + result["client_validation_mode"] = policy.ClientValidationMode + result["client_validation_trust_config"] = policy.ClientValidationTrustConfig + result["client_validation_ca"] = flattenClientValidationCA(policy.ClientValidationCa) + + return []map[string]any{result} +} + +func flattenCertificateProviderInstance(instance *netsecapi.CertificateProviderInstance) []map[string]any { + if instance == nil { + return nil + } + + result := make(map[string]any) + result["plugin_instance"] = instance.PluginInstance + + return []map[string]any{result} +} + +func flattenGrpcEndpoint(endpoint *netsecapi.GoogleCloudNetworksecurityV1GrpcEndpoint) []map[string]any { + if endpoint == nil { + return nil + } + + result := make(map[string]any) + result["target_uri"] = endpoint.TargetUri + + return []map[string]any{result} +} + +func flattenClientValidationCA(cas []*netsecapi.ValidationCA) []map[string]any { + if cas == nil { + return nil + } + + result := make([]map[string]any, 0, len(cas)) + + for _, ca := range cas { + converted := map[string]any{ + "certificate_provider_instance": flattenCertificateProviderInstance(ca.CertificateProviderInstance), + "grpc_endpoint": flattenGrpcEndpoint(ca.GrpcEndpoint), + } + result = append(result, converted) + } + + return result +} + +func flattenProjectName(name string) string { + tokens := strings.Split(name, "/") + if len(tokens) < 2 || tokens[0] != "projects" { + return "" + } + return tokens[1] +} diff --git a/mmv1/third_party/cai2hcl/services/networksecurity/server_tls_policy_test.go b/mmv1/third_party/cai2hcl/services/networksecurity/server_tls_policy_test.go new file mode 100644 index 000000000000..9fc4b867c58c --- /dev/null +++ b/mmv1/third_party/cai2hcl/services/networksecurity/server_tls_policy_test.go @@ -0,0 +1,13 @@ +package networksecurity_test + +import ( + cai2hcl_testing "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/cai2hcl/testing" + "testing" +) + +func TestServerTlsPolicy(t *testing.T) { + cai2hcl_testing.AssertTestFiles( + t, + "./testdata", + []string{"server_tls_policy"}) +} diff --git a/mmv1/third_party/cai2hcl/services/networksecurity/testdata/server_tls_policy.json b/mmv1/third_party/cai2hcl/services/networksecurity/testdata/server_tls_policy.json new file mode 100644 index 000000000000..085ae9d032a8 --- /dev/null +++ b/mmv1/third_party/cai2hcl/services/networksecurity/testdata/server_tls_policy.json @@ -0,0 +1,361 @@ +[ + { + "ancestors": [ + "projects/307841421122", + "folders/1004165107538", + "folders/422052295010", + "folders/23774682723", + "folders/134336129404", + "folders/376645683816", + "organizations/433637338589" + ], + "asset_type": "networksecurity.googleapis.com/ServerTlsPolicy", + "name": "//networksecurity.googleapis.com/projects/ccm-breakit/locations/global/serverTlsPolicies/lb_mtls_policy", + "resource": { + "data": { + "createTime": "2025-07-29T16:00:11.184079186Z", + "description": "my description", + "labels": { + "foo": "bar" + }, + "mtlsPolicy": { + "clientValidationMode": "REJECT_INVALID", + "clientValidationTrustConfig": "projects/307841421122/locations/global/trustConfigs/id-4adf7779-1e9f-4124-9438-652c80886074" + }, + "name": "projects/ccm-breakit/locations/global/serverTlsPolicies/lb_mtls_policy", + "updateTime": "2025-07-29T16:00:15.415731403Z" + }, + "discovery_document_uri": "https://networksecurity.googleapis.com/$discovery/rest", + "discovery_name": "ServerTlsPolicy", + "location": "global", + "parent": "//cloudresourcemanager.googleapis.com/projects/307841421122", + "version": "v1" + }, + "updateTime": "2025-07-29T18:00:00Z" + }, + { + "ancestors": [ + "projects/307841421122", + "folders/1004165107538", + "folders/422052295010", + "folders/23774682723", + "folders/134336129404", + "folders/376645683816", + "organizations/433637338589" + ], + "asset_type": "networksecurity.googleapis.com/ServerTlsPolicy", + "name": "//networksecurity.googleapis.com/projects/ccm-breakit/locations/global/serverTlsPolicies/td_mtls_policy", + "resource": { + "data": { + "createTime": "2025-07-29T16:00:12.082558809Z", + "description": "my description", + "labels": { + "foo": "bar" + }, + "mtlsPolicy": { + "clientValidationCa": [ + { + "certificateProviderInstance": { + "pluginInstance": "google_cloud_private_spiffe" + } + } + ] + }, + "name": "projects/ccm-breakit/locations/global/serverTlsPolicies/td_mtls_policy", + "serverCertificate": { + "certificateProviderInstance": { + "pluginInstance": "google_cloud_private_spiffe" + } + }, + "updateTime": "2025-07-29T16:00:15.692522561Z" + }, + "discovery_document_uri": "https://networksecurity.googleapis.com/$discovery/rest", + "discovery_name": "ServerTlsPolicy", + "location": "global", + "parent": "//cloudresourcemanager.googleapis.com/projects/307841421122", + "version": "v1" + }, + "updateTime": "2025-07-29T18:00:00Z" + }, + { + "ancestors": [ + "projects/307841421122", + "folders/1004165107538", + "folders/422052295010", + "folders/23774682723", + "folders/134336129404", + "folders/376645683816", + "organizations/433637338589" + ], + "asset_type": "networksecurity.googleapis.com/ServerTlsPolicy", + "name": "//networksecurity.googleapis.com/projects/ccm-breakit/locations/global/serverTlsPolicies/td_with_server_cert_policy", + "resource": { + "data": { + "createTime": "2025-07-29T16:00:12.040588118Z", + "description": "my description", + "name": "projects/ccm-breakit/locations/global/serverTlsPolicies/td_with_server_cert_policy", + "serverCertificate": { + "grpcEndpoint": { + "targetUri": "unix:mypath" + } + }, + "updateTime": "2025-07-29T16:00:15.680321984Z" + }, + "discovery_document_uri": "https://networksecurity.googleapis.com/$discovery/rest", + "discovery_name": "ServerTlsPolicy", + "location": "global", + "parent": "//cloudresourcemanager.googleapis.com/projects/307841421122", + "version": "v1" + }, + "updateTime": "2025-07-29T18:00:00Z" + }, + { + "ancestors": [ + "projects/307841421122", + "folders/1004165107538", + "folders/422052295010", + "folders/23774682723", + "folders/134336129404", + "folders/376645683816", + "organizations/433637338589" + ], + "asset_type": "networksecurity.googleapis.com/ServerTlsPolicy", + "name": "//networksecurity.googleapis.com/projects/ccm-breakit/locations/global/serverTlsPolicies/empty_description_policy", + "resource": { + "data": { + "createTime": "2025-07-29T16:00:11.660089355Z", + "labels": { + "foo": "bar" + }, + "mtlsPolicy": { + "clientValidationMode": "REJECT_INVALID", + "clientValidationTrustConfig": "projects/307841421122/locations/global/trustConfigs/id-4adf7779-1e9f-4124-9438-652c80886074" + }, + "name": "projects/ccm-breakit/locations/global/serverTlsPolicies/empty_description_policy", + "updateTime": "2025-07-29T16:00:16.847799545Z" + }, + "discovery_document_uri": "https://networksecurity.googleapis.com/$discovery/rest", + "discovery_name": "ServerTlsPolicy", + "location": "global", + "parent": "//cloudresourcemanager.googleapis.com/projects/307841421122", + "version": "v1" + }, + "updateTime": "2025-07-29T18:00:00Z" + }, + { + "ancestors": [ + "projects/307841421122", + "folders/1004165107538", + "folders/422052295010", + "folders/23774682723", + "folders/134336129404", + "folders/376645683816", + "organizations/433637338589" + ], + "asset_type": "networksecurity.googleapis.com/ServerTlsPolicy", + "name": "//networksecurity.googleapis.com/projects/ccm-breakit/locations/global/serverTlsPolicies/empty_labels_policy", + "resource": { + "data": { + "createTime": "2025-07-29T16:00:12.040240475Z", + "description": "my description", + "mtlsPolicy": { + "clientValidationMode": "REJECT_INVALID", + "clientValidationTrustConfig": "projects/307841421122/locations/global/trustConfigs/id-4adf7779-1e9f-4124-9438-652c80886074" + }, + "name": "projects/ccm-breakit/locations/global/serverTlsPolicies/empty_labels_policy", + "updateTime": "2025-07-29T16:00:16.309813819Z" + }, + "discovery_document_uri": "https://networksecurity.googleapis.com/$discovery/rest", + "discovery_name": "ServerTlsPolicy", + "location": "global", + "parent": "//cloudresourcemanager.googleapis.com/projects/307841421122", + "version": "v1" + }, + "updateTime": "2025-07-29T18:00:00Z" + }, + { + "ancestors": [ + "projects/307841421122", + "folders/1004165107538", + "folders/422052295010", + "folders/23774682723", + "folders/134336129404", + "folders/376645683816", + "organizations/433637338589" + ], + "asset_type": "networksecurity.googleapis.com/ServerTlsPolicy", + "name": "//networksecurity.googleapis.com/projects/ccm-breakit/locations/us-central1/serverTlsPolicies/regional_location_policy", + "resource": { + "data": { + "createTime": "2025-07-29T16:00:12.162242768Z", + "description": "my description", + "labels": { + "foo": "bar" + }, + "mtlsPolicy": { + "clientValidationMode": "REJECT_INVALID", + "clientValidationTrustConfig": "projects/307841421122/locations/us-central1/trustConfigs/tsmx-20250609-tc1" + }, + "name": "projects/ccm-breakit/locations/us-central1/serverTlsPolicies/regional_location_policy", + "updateTime": "2025-07-29T16:00:15.08724113Z" + }, + "discovery_document_uri": "https://networksecurity.googleapis.com/$discovery/rest", + "discovery_name": "ServerTlsPolicy", + "location": "us-central1", + "parent": "//cloudresourcemanager.googleapis.com/projects/307841421122", + "version": "v1" + }, + "updateTime": "2025-07-29T18:00:00Z" + }, + { + "ancestors": [ + "projects/307841421122", + "folders/1004165107538", + "folders/422052295010", + "folders/23774682723", + "folders/134336129404", + "folders/376645683816", + "organizations/433637338589" + ], + "asset_type": "networksecurity.googleapis.com/ServerTlsPolicy", + "name": "//networksecurity.googleapis.com/projects/ccm-breakit/locations/global/serverTlsPolicies/lb_mtls_allow_invalid_cert_policy", + "resource": { + "data": { + "createTime": "2025-07-29T16:00:12.078450339Z", + "description": "my description", + "labels": { + "foo": "bar" + }, + "mtlsPolicy": { + "clientValidationMode": "ALLOW_INVALID_OR_MISSING_CLIENT_CERT" + }, + "name": "projects/ccm-breakit/locations/global/serverTlsPolicies/lb_mtls_allow_invalid_cert_policy", + "updateTime": "2025-07-29T16:00:16.300643457Z" + }, + "discovery_document_uri": "https://networksecurity.googleapis.com/$discovery/rest", + "discovery_name": "ServerTlsPolicy", + "location": "global", + "parent": "//cloudresourcemanager.googleapis.com/projects/307841421122", + "version": "v1" + }, + "updateTime": "2025-07-29T18:00:00Z" + }, + { + "ancestors": [ + "projects/307841421122", + "folders/1004165107538", + "folders/422052295010", + "folders/23774682723", + "folders/134336129404", + "folders/376645683816", + "organizations/433637338589" + ], + "asset_type": "networksecurity.googleapis.com/ServerTlsPolicy", + "name": "//networksecurity.googleapis.com/projects/ccm-breakit/locations/global/serverTlsPolicies/td_allow_open_policy", + "resource": { + "data": { + "allowOpen": true, + "createTime": "2025-07-29T16:00:11.930403186Z", + "description": "my description", + "mtlsPolicy": { + "clientValidationCa": [ + { + "certificateProviderInstance": { + "pluginInstance": "google_cloud_private_spiffe" + } + } + ] + }, + "name": "projects/ccm-breakit/locations/global/serverTlsPolicies/td_allow_open_policy", + "serverCertificate": { + "grpcEndpoint": { + "targetUri": "unix:mypath" + } + }, + "updateTime": "2025-07-29T16:00:15.644106332Z" + }, + "discovery_document_uri": "https://networksecurity.googleapis.com/$discovery/rest", + "discovery_name": "ServerTlsPolicy", + "location": "global", + "parent": "//cloudresourcemanager.googleapis.com/projects/307841421122", + "version": "v1" + }, + "updateTime": "2025-07-29T18:00:00Z" + }, + { + "ancestors": [ + "projects/307841421122", + "folders/1004165107538", + "folders/422052295010", + "folders/23774682723", + "folders/134336129404", + "folders/376645683816", + "organizations/433637338589" + ], + "asset_type": "networksecurity.googleapis.com/ServerTlsPolicy", + "name": "//networksecurity.googleapis.com/projects/ccm-breakit/locations/global/serverTlsPolicies/td_with_cert_provider_policy", + "resource": { + "data": { + "createTime": "2025-07-29T16:00:12.122393281Z", + "description": "my description", + "name": "projects/ccm-breakit/locations/global/serverTlsPolicies/td_with_cert_provider_policy", + "serverCertificate": { + "certificateProviderInstance": { + "pluginInstance": "google_cloud_private_spiffe" + } + }, + "updateTime": "2025-07-29T16:00:15.720820072Z" + }, + "discovery_document_uri": "https://networksecurity.googleapis.com/$discovery/rest", + "discovery_name": "ServerTlsPolicy", + "location": "global", + "parent": "//cloudresourcemanager.googleapis.com/projects/307841421122", + "version": "v1" + }, + "updateTime": "2025-07-29T18:00:00Z" + }, + { + "ancestors": [ + "projects/307841421122", + "folders/1004165107538", + "folders/422052295010", + "folders/23774682723", + "folders/134336129404", + "folders/376645683816", + "organizations/433637338589" + ], + "asset_type": "networksecurity.googleapis.com/ServerTlsPolicy", + "name": "//networksecurity.googleapis.com/projects/ccm-breakit/locations/global/serverTlsPolicies/td_mtls_client_validation_grpc_policy", + "resource": { + "data": { + "createTime": "2025-07-29T16:00:12.000713965Z", + "description": "my description", + "labels": { + "foo": "bar" + }, + "mtlsPolicy": { + "clientValidationCa": [ + { + "grpcEndpoint": { + "targetUri": "unix:mypath" + } + } + ] + }, + "name": "projects/ccm-breakit/locations/global/serverTlsPolicies/td_mtls_client_validation_grpc_policy", + "serverCertificate": { + "certificateProviderInstance": { + "pluginInstance": "google_cloud_private_spiffe" + } + }, + "updateTime": "2025-07-29T16:00:15.701713898Z" + }, + "discovery_document_uri": "https://networksecurity.googleapis.com/$discovery/rest", + "discovery_name": "ServerTlsPolicy", + "location": "global", + "parent": "//cloudresourcemanager.googleapis.com/projects/307841421122", + "version": "v1" + }, + "updateTime": "2025-07-29T18:00:00Z" + } +] \ No newline at end of file diff --git a/mmv1/third_party/cai2hcl/services/networksecurity/testdata/server_tls_policy.tf b/mmv1/third_party/cai2hcl/services/networksecurity/testdata/server_tls_policy.tf new file mode 100644 index 000000000000..f9cadcf99e2e --- /dev/null +++ b/mmv1/third_party/cai2hcl/services/networksecurity/testdata/server_tls_policy.tf @@ -0,0 +1,194 @@ +resource "google_network_security_server_tls_policy" "lb_mtls_policy" { + allow_open = false + description = "my description" + + labels = { + foo = "bar" + } + + location = "global" + + mtls_policy { + client_validation_mode = "REJECT_INVALID" + client_validation_trust_config = "projects/307841421122/locations/global/trustConfigs/id-4adf7779-1e9f-4124-9438-652c80886074" + } + + name = "lb_mtls_policy" + project = "ccm-breakit" +} + +resource "google_network_security_server_tls_policy" "td_mtls_policy" { + allow_open = false + description = "my description" + + labels = { + foo = "bar" + } + + location = "global" + + mtls_policy { + client_validation_ca { + certificate_provider_instance { + plugin_instance = "google_cloud_private_spiffe" + } + } + } + + name = "td_mtls_policy" + project = "ccm-breakit" + + server_certificate { + certificate_provider_instance { + plugin_instance = "google_cloud_private_spiffe" + } + } +} + +resource "google_network_security_server_tls_policy" "td_with_server_cert_policy" { + allow_open = false + description = "my description" + location = "global" + name = "td_with_server_cert_policy" + project = "ccm-breakit" + + server_certificate { + grpc_endpoint { + target_uri = "unix:mypath" + } + } +} + +resource "google_network_security_server_tls_policy" "empty_description_policy" { + allow_open = false + + labels = { + foo = "bar" + } + + location = "global" + + mtls_policy { + client_validation_mode = "REJECT_INVALID" + client_validation_trust_config = "projects/307841421122/locations/global/trustConfigs/id-4adf7779-1e9f-4124-9438-652c80886074" + } + + name = "empty_description_policy" + project = "ccm-breakit" +} + +resource "google_network_security_server_tls_policy" "empty_labels_policy" { + allow_open = false + description = "my description" + location = "global" + + mtls_policy { + client_validation_mode = "REJECT_INVALID" + client_validation_trust_config = "projects/307841421122/locations/global/trustConfigs/id-4adf7779-1e9f-4124-9438-652c80886074" + } + + name = "empty_labels_policy" + project = "ccm-breakit" +} + +resource "google_network_security_server_tls_policy" "regional_location_policy" { + allow_open = false + description = "my description" + + labels = { + foo = "bar" + } + + location = "us-central1" + + mtls_policy { + client_validation_mode = "REJECT_INVALID" + client_validation_trust_config = "projects/307841421122/locations/us-central1/trustConfigs/tsmx-20250609-tc1" + } + + name = "regional_location_policy" + project = "ccm-breakit" +} + +resource "google_network_security_server_tls_policy" "lb_mtls_allow_invalid_cert_policy" { + allow_open = false + description = "my description" + + labels = { + foo = "bar" + } + + location = "global" + + mtls_policy { + client_validation_mode = "ALLOW_INVALID_OR_MISSING_CLIENT_CERT" + } + + name = "lb_mtls_allow_invalid_cert_policy" + project = "ccm-breakit" +} + +resource "google_network_security_server_tls_policy" "td_allow_open_policy" { + allow_open = true + description = "my description" + location = "global" + + mtls_policy { + client_validation_ca { + certificate_provider_instance { + plugin_instance = "google_cloud_private_spiffe" + } + } + } + + name = "td_allow_open_policy" + project = "ccm-breakit" + + server_certificate { + grpc_endpoint { + target_uri = "unix:mypath" + } + } +} + +resource "google_network_security_server_tls_policy" "td_with_cert_provider_policy" { + allow_open = false + description = "my description" + location = "global" + name = "td_with_cert_provider_policy" + project = "ccm-breakit" + + server_certificate { + certificate_provider_instance { + plugin_instance = "google_cloud_private_spiffe" + } + } +} + +resource "google_network_security_server_tls_policy" "td_mtls_client_validation_grpc_policy" { + allow_open = false + description = "my description" + + labels = { + foo = "bar" + } + + location = "global" + + mtls_policy { + client_validation_ca { + grpc_endpoint { + target_uri = "unix:mypath" + } + } + } + + name = "td_mtls_client_validation_grpc_policy" + project = "ccm-breakit" + + server_certificate { + certificate_provider_instance { + plugin_instance = "google_cloud_private_spiffe" + } + } +} From 73f7e164d94c4da554f4d41cdcb31114d920d19d Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Mon, 4 Aug 2025 14:31:27 -0700 Subject: [PATCH 679/884] Fix partial match regexes (#14610) --- .../terraform/custom_import/extract_taxonomy.go.tmpl | 2 +- .../custom_import/vertex_ai_tensorboard_import.go.tmpl | 8 ++++---- .../terraform/services/apigee/resource_apigee_api.go | 4 ++-- .../terraform/services/apigee/resource_apigee_flowhook.go | 4 ++-- .../resource_apigee_keystores_aliases_key_cert_file.go | 4 ++-- .../apigee/resource_apigee_keystores_aliases_pkcs12.go | 4 ++-- .../services/apigee/resource_apigee_sharedflow.go | 4 ++-- .../apigee/resource_apigee_sharedflow_deployment.go | 4 ++-- .../services/bigquery/resource_bigquery_table.go.tmpl | 6 +++--- .../bigtable/resource_bigtable_authorized_view.go | 6 +++--- .../services/bigtable/resource_bigtable_instance.go | 6 +++--- .../services/bigtable/resource_bigtable_table.go | 6 +++--- .../cloudfunctions/resource_cloudfunctions_function.go | 6 +++--- .../services/compute/resource_compute_instance.go.tmpl | 6 +++--- .../compute/resource_compute_instance_group.go.tmpl | 6 +++--- .../resource_compute_project_metadata_item.go.tmpl | 4 ++-- .../services/compute/resource_compute_target_pool.go.tmpl | 8 ++++---- .../terraform/services/dns/resource_dns_record_set.go | 6 +++--- .../osconfig/resource_os_config_os_policy_assignment.go | 6 +++--- .../resource_google_folder_organization_policy.go | 6 +++--- .../resource_google_project_iam_custom_role.go | 6 +++--- .../resource_google_project_organization_policy.go | 6 +++--- .../resourcemanager/resource_google_service_account.go | 6 +++--- .../services/sql/resource_sql_database_instance.go.tmpl | 6 +++--- tpgtools/ignored_handwritten/custom_import.go | 8 ++++---- 25 files changed, 69 insertions(+), 69 deletions(-) diff --git a/mmv1/templates/terraform/custom_import/extract_taxonomy.go.tmpl b/mmv1/templates/terraform/custom_import/extract_taxonomy.go.tmpl index c99cf546f939..17d7a76b1bfb 100644 --- a/mmv1/templates/terraform/custom_import/extract_taxonomy.go.tmpl +++ b/mmv1/templates/terraform/custom_import/extract_taxonomy.go.tmpl @@ -1,7 +1,7 @@ config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "(?Pprojects/[^/]+/locations/[^/]+/taxonomies/[^/]+)/policyTags/(?P.+)"}, d, config); err != nil { + "^(?Pprojects/[^/]+/locations/[^/]+/taxonomies/[^/]+)/policyTags/(?P.+)$"}, d, config); err != nil { return nil, err } diff --git a/mmv1/templates/terraform/custom_import/vertex_ai_tensorboard_import.go.tmpl b/mmv1/templates/terraform/custom_import/vertex_ai_tensorboard_import.go.tmpl index be6cd588a588..7d834ceeba51 100644 --- a/mmv1/templates/terraform/custom_import/vertex_ai_tensorboard_import.go.tmpl +++ b/mmv1/templates/terraform/custom_import/vertex_ai_tensorboard_import.go.tmpl @@ -1,9 +1,9 @@ config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/locations/(?P[^/]+)/tensorboards/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)", + "^projects/(?P[^/]+)/locations/(?P[^/]+)/tensorboards/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_api.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_api.go index 57375e59f2ae..2921e19e0caa 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_api.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_api.go @@ -324,8 +324,8 @@ func resourceApigeeApiDelete(d *schema.ResourceData, meta interface{}) error { func resourceApigeeApiImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "organizations/(?P[^/]+)/apis/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", + "^organizations/(?P[^/]+)/apis/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_flowhook.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_flowhook.go index 8a5d6ac30957..635838873cd0 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_flowhook.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_flowhook.go @@ -223,8 +223,8 @@ func resourceApigeeFlowhookDelete(d *schema.ResourceData, meta interface{}) erro func resourceApigeeFlowhookImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "organizations/(?P[^/]+)/environments/(?P[^/]+)/flowhooks/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "^organizations/(?P[^/]+)/environments/(?P[^/]+)/flowhooks/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_key_cert_file.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_key_cert_file.go index 48747a85d5b9..03d842bfc305 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_key_cert_file.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_key_cert_file.go @@ -362,8 +362,8 @@ func resourceApigeeKeystoresAliasesKeyCertFileDelete(d *schema.ResourceData, met func resourceApigeeKeystoresAliasesKeyCertFileImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "organizations/(?P[^/]+)/environments/(?P[^/]+)/keystores/(?P[^/]+)/aliases/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "^organizations/(?P[^/]+)/environments/(?P[^/]+)/keystores/(?P[^/]+)/aliases/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_pkcs12.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_pkcs12.go index 12b1f85fc0ef..ef6a2655cf39 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_pkcs12.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_pkcs12.go @@ -299,8 +299,8 @@ func ResourceApigeeKeystoresAliasesPkcs12Delete(d *schema.ResourceData, meta int func ResourceApigeeKeystoresAliasesPkcs12Import(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "organizations/(?P[^/]+)/environments/(?P[^/]+)/keystores/(?P[^/]+)/aliases/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "^organizations/(?P[^/]+)/environments/(?P[^/]+)/keystores/(?P[^/]+)/aliases/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow.go index 4820b95768d7..3b0eba665a4c 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow.go @@ -324,8 +324,8 @@ func resourceApigeeSharedFlowDelete(d *schema.ResourceData, meta interface{}) er func resourceApigeeSharedFlowImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "organizations/(?P[^/]+)/sharedflows/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", + "^organizations/(?P[^/]+)/sharedflows/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow_deployment.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow_deployment.go index 7cd90e8676b0..d6c715152bbd 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow_deployment.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow_deployment.go @@ -228,8 +228,8 @@ func resourceApigeeSharedflowDeploymentDelete(d *schema.ResourceData, meta inter func resourceApigeeSharedflowDeploymentImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "organizations/(?P[^/]+)/environments/(?P[^/]+)/sharedflows/(?P[^/]+)/revisions/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "^organizations/(?P[^/]+)/environments/(?P[^/]+)/sharedflows/(?P[^/]+)/revisions/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl index dce185ebebf6..bebe08d56791 100644 --- a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl +++ b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl @@ -3501,9 +3501,9 @@ func flattenSerDeInfo(si *bigquery.SerDeInfo) []map[string]interface{} { func resourceBigQueryTableImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/datasets/(?P[^/]+)/tables/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", + "^projects/(?P[^/]+)/datasets/(?P[^/]+)/tables/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_authorized_view.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_authorized_view.go index 4a26b6e82a8b..b34ff48e2388 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_authorized_view.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_authorized_view.go @@ -355,9 +355,9 @@ func resourceBigtableAuthorizedViewDestroy(d *schema.ResourceData, meta interfac func resourceBigtableAuthorizedViewImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/instances/(?P[^/]+)/tables/(?P[^/]+)/authorizedViews/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "^projects/(?P[^/]+)/instances/(?P[^/]+)/tables/(?P[^/]+)/authorizedViews/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance.go index 1487ce288b7e..c51d6b8c68ff 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance.go @@ -829,9 +829,9 @@ func resourceBigtableInstanceClusterReorderTypeListFunc(diff tpgresource.Terrafo func resourceBigtableInstanceImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/instances/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)", + "^projects/(?P[^/]+)/instances/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table.go index 66df55fb8b2c..91cddb12b30d 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table.go @@ -686,9 +686,9 @@ func FlattenColumnFamily(families []bigtable.FamilyInfo) ([]map[string]interface func resourceBigtableTableImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/instances/(?P[^/]+)/tables/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", + "^projects/(?P[^/]+)/instances/(?P[^/]+)/tables/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function.go b/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function.go index 5fbab6b22280..739b37d86a47 100644 --- a/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function.go +++ b/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function.go @@ -65,9 +65,9 @@ func (s *CloudFunctionId) locationId() string { func parseCloudFunctionId(d *schema.ResourceData, config *transport_tpg.Config) (*CloudFunctionId, error) { if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/locations/(?P[^/]+)/functions/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)", + "^projects/(?P[^/]+)/locations/(?P[^/]+)/functions/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl index 46ea496e1388..718a857dab8d 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl @@ -3436,9 +3436,9 @@ func resourceComputeInstanceDelete(d *schema.ResourceData, meta interface{}) err func resourceComputeInstanceImportState(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/zones/(?P[^/]+)/instances/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)", + "^projects/(?P[^/]+)/zones/(?P[^/]+)/instances/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_group.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_group.go.tmpl index b5782de796cb..af13d60d0d6b 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_group.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_group.go.tmpl @@ -455,9 +455,9 @@ func resourceComputeInstanceGroupDelete(d *schema.ResourceData, meta interface{} func resourceComputeInstanceGroupImportState(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/zones/(?P[^/]+)/instanceGroups/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", + "^projects/(?P[^/]+)/zones/(?P[^/]+)/instanceGroups/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_project_metadata_item.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_project_metadata_item.go.tmpl index fdf16116aa12..4f10ee9db007 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_project_metadata_item.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_project_metadata_item.go.tmpl @@ -184,8 +184,8 @@ func resourceComputeProjectMetadataItemDelete(d *schema.ResourceData, meta inter func resourceComputeProjectMetadataItemImportState(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/meta-data/(?P[^/]+)", - "(?P[^/]+)", + "^projects/(?P[^/]+)/meta-data/(?P[^/]+)$", + "^(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_target_pool.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_target_pool.go.tmpl index d5617b8e3e96..612ec7399dd1 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_target_pool.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_target_pool.go.tmpl @@ -571,10 +571,10 @@ func resourceComputeTargetPoolDelete(d *schema.ResourceData, meta interface{}) e func resourceTargetPoolStateImporter(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/regions/(?P[^/]+)/targetPools/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)", + "^projects/(?P[^/]+)/regions/(?P[^/]+)/targetPools/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/dns/resource_dns_record_set.go b/mmv1/third_party/terraform/services/dns/resource_dns_record_set.go index a9f713075b73..a8e74f92a90d 100644 --- a/mmv1/third_party/terraform/services/dns/resource_dns_record_set.go +++ b/mmv1/third_party/terraform/services/dns/resource_dns_record_set.go @@ -648,9 +648,9 @@ func resourceDnsRecordSetUpdate(d *schema.ResourceData, meta interface{}) error func resourceDnsRecordSetImportState(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/managedZones/(?P[^/]+)/rrsets/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "^projects/(?P[^/]+)/managedZones/(?P[^/]+)/rrsets/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/osconfig/resource_os_config_os_policy_assignment.go b/mmv1/third_party/terraform/services/osconfig/resource_os_config_os_policy_assignment.go index 5ae751ff3792..ad832cd01496 100644 --- a/mmv1/third_party/terraform/services/osconfig/resource_os_config_os_policy_assignment.go +++ b/mmv1/third_party/terraform/services/osconfig/resource_os_config_os_policy_assignment.go @@ -1445,9 +1445,9 @@ func resourceOSConfigOSPolicyAssignmentDelete(d *schema.ResourceData, meta inter func resourceOSConfigOSPolicyAssignmentImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/locations/(?P[^/]+)/osPolicyAssignments/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", + "^projects/(?P[^/]+)/locations/(?P[^/]+)/osPolicyAssignments/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder_organization_policy.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder_organization_policy.go index 18ec055b54d1..6e17b8d7603b 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder_organization_policy.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder_organization_policy.go @@ -47,9 +47,9 @@ func resourceFolderOrgPolicyImporter(d *schema.ResourceData, meta interface{}) ( config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "folders/(?P[^/]+)/constraints/(?P[^/]+)", - "folders/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)"}, + "^folders/(?P[^/]+)/constraints/(?P[^/]+)$", + "^folders/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$"}, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_custom_role.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_custom_role.go index ef27f2aacb9a..9b4828db0ab8 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_custom_role.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_custom_role.go @@ -238,9 +238,9 @@ func resourceGoogleProjectIamCustomRoleDelete(d *schema.ResourceData, meta inter func resourceGoogleProjectIamCustomRoleImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/roles/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)", + "^projects/(?P[^/]+)/roles/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_organization_policy.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_organization_policy.go index 503117edf5b8..88ae902d0783 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_organization_policy.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_organization_policy.go @@ -47,9 +47,9 @@ func resourceProjectOrgPolicyImporter(d *schema.ResourceData, meta interface{}) config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+):constraints/(?P[^/]+)", - "(?P[^/]+):constraints/(?P[^/]+)", - "(?P[^/]+):(?P[^/]+)"}, + "^projects/(?P[^/]+):constraints/(?P[^/]+)$", + "^(?P[^/]+):constraints/(?P[^/]+)$", + "^(?P[^/]+):(?P[^/]+)$"}, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_service_account.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_service_account.go index fd50b35de26e..21e829a6374a 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_service_account.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_service_account.go @@ -321,9 +321,9 @@ func resourceGoogleServiceAccountUpdate(d *schema.ResourceData, meta interface{} func resourceGoogleServiceAccountImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/serviceAccounts/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)"}, d, config); err != nil { + "^projects/(?P[^/]+)/serviceAccounts/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)$"}, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl index 858c7ce3afb3..4dbd333cb162 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl @@ -2335,9 +2335,9 @@ func resourceSqlDatabaseInstanceDelete(d *schema.ResourceData, meta interface{}) func resourceSqlDatabaseInstanceImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/instances/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)"}, d, config); err != nil { + "^projects/(?P[^/]+)/instances/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)$"}, d, config); err != nil { return nil, err } diff --git a/tpgtools/ignored_handwritten/custom_import.go b/tpgtools/ignored_handwritten/custom_import.go index be5cfbc9c300..b50236160e00 100644 --- a/tpgtools/ignored_handwritten/custom_import.go +++ b/tpgtools/ignored_handwritten/custom_import.go @@ -10,8 +10,8 @@ import ( func sourceRepoImport(d *schema.ResourceData, config *transport_tpg.Config) error { if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/repos/(?P.+)", - "(?P.+)", + "^projects/(?P[^/]+)/repos/(?P.+)$", + "^(?P.+)$", }, d, config); err != nil { return err } @@ -28,8 +28,8 @@ func sourceRepoImport(d *schema.ResourceData, config *transport_tpg.Config) erro func runtimeconfigVariableImport(d *schema.ResourceData, config *transport_tpg.Config) error { if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/configs/(?P[^/]+)/variables/(?P.+)", - "(?P[^/]+)/(?P.+)", + "^projects/(?P[^/]+)/configs/(?P[^/]+)/variables/(?P.+)$", + "^(?P[^/]+)/(?P.+)$", }, d, config); err != nil { return err } From a3d65ddb3ecdeb05646d894e2a34776f81c4e2b1 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Mon, 4 Aug 2025 14:46:23 -0700 Subject: [PATCH 680/884] Redirected CI test PRs to Scott (#14735) --- .github/workflows/request-reviewer.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/request-reviewer.yml b/.github/workflows/request-reviewer.yml index b8e8d80043ff..d0e0c83540f4 100644 --- a/.github/workflows/request-reviewer.yml +++ b/.github/workflows/request-reviewer.yml @@ -37,5 +37,9 @@ jobs: cd .ci/magician go build . - name: Request reviewer + if: ${{ github.event.issue.user.login }} != 'copybara-service' run: .ci/magician/magician request-reviewer ${{ github.event.pull_request.number || github.event.issue.number }} + - name: Request reviewer (copybara) + if: ${{ github.event.issue.user.login }} == 'copybara-service' + run: gh pr edit ${{ github.event.pull_request.number || github.event.issue.number }} --add-reviewer "@ScottSuarez" From 300fd02f0804df1144861c8d6cf0206fd3a01e08 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Mon, 4 Aug 2025 15:50:55 -0700 Subject: [PATCH 681/884] Fixed GH token env var name (#14739) --- .github/workflows/request-reviewer.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/request-reviewer.yml b/.github/workflows/request-reviewer.yml index d0e0c83540f4..d1e852f4a8c8 100644 --- a/.github/workflows/request-reviewer.yml +++ b/.github/workflows/request-reviewer.yml @@ -41,5 +41,7 @@ jobs: run: .ci/magician/magician request-reviewer ${{ github.event.pull_request.number || github.event.issue.number }} - name: Request reviewer (copybara) if: ${{ github.event.issue.user.login }} == 'copybara-service' + env: + GH_TOKEN: ${{secrets.GITHUB_TOKEN}} run: gh pr edit ${{ github.event.pull_request.number || github.event.issue.number }} --add-reviewer "@ScottSuarez" From fb641335b851e1e861507b0c42a6220fe7cdd21c Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Mon, 4 Aug 2025 15:58:46 -0700 Subject: [PATCH 682/884] Set up action to override breaking changes failures (#14740) --- .github/workflows/override-labels.yml | 29 +++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) create mode 100644 .github/workflows/override-labels.yml diff --git a/.github/workflows/override-labels.yml b/.github/workflows/override-labels.yml new file mode 100644 index 000000000000..a658bf452d53 --- /dev/null +++ b/.github/workflows/override-labels.yml @@ -0,0 +1,29 @@ +name: override-labels + +permissions: read-all + +on: + pull_request_target: + types: [labeled, unlabeled] + + +jobs: + override-breaking-change: + runs-on: ubuntu-22.04 + if: github.event.label.name == 'override-breaking-change' + permissions: + statuses: write + env: + STATE: "${{ github.event.action == 'labeled' && 'success' || 'failure' }}" + DESCRIPTION: "${{ github.event.action == 'labeled' && 'override-breaking-change applied' || 'override-breaking-change removed' }}" + steps: + - name: Override breaking changes label applied + shell: bash + run: | + curl -L \ + -X POST \ + -H "Accept: application/vnd.github+json" \ + -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + https://api.github.com/repos/${{ github.repository }}/statuses/${{ github.event.pull_request.head.sha }} \ + -d '{"state":"${{ env.STATE }}","description":"${{ env.DESCRIPTION }}","context":"terraform-provider-breaking-change-test"}' From 3e1875aeb84685f4c8c52c5b1338d5b1b2bac3ec Mon Sep 17 00:00:00 2001 From: Salome Papiashvili Date: Tue, 5 Aug 2025 01:22:13 +0200 Subject: [PATCH 683/884] Add service account to datasource tests (#14730) --- ...source_google_composer_environment_test.go | 15 ++++++++++++++- ...composer_user_workloads_config_map_test.go | 15 +++++++++++++++ ...gle_composer_user_workloads_secret_test.go | 19 +++++++++++++++++-- 3 files changed, 46 insertions(+), 3 deletions(-) diff --git a/mmv1/third_party/terraform/services/composer/data_source_google_composer_environment_test.go b/mmv1/third_party/terraform/services/composer/data_source_google_composer_environment_test.go index c73feb1b1e63..e8bf78d776c6 100644 --- a/mmv1/third_party/terraform/services/composer/data_source_google_composer_environment_test.go +++ b/mmv1/third_party/terraform/services/composer/data_source_google_composer_environment_test.go @@ -15,7 +15,8 @@ func TestAccDataSourceComposerEnvironment_basic(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), + "random_suffix": acctest.RandString(t, 10), + "service_account": fmt.Sprintf("tf-test-%d", acctest.RandInt(t)), } acctest.VcrTest(t, resource.TestCase{ @@ -79,6 +80,7 @@ func testAccCheckGoogleComposerEnvironmentMeta(n string) resource.TestCheckFunc func testAccDataSourceComposerEnvironment_basic(context map[string]interface{}) string { return acctest.Nprintf(` +data "google_project" "project" {} resource "google_composer_environment" "test" { name = "tf-test-composer-env-%{random_suffix}" region = "us-central1" @@ -88,6 +90,7 @@ resource "google_composer_environment" "test" { network = google_compute_network.test.self_link subnetwork = google_compute_subnetwork.test.self_link zone = "us-central1-a" + service_account = google_service_account.test.name } software_config { image_version = "composer-1-airflow-2" @@ -96,6 +99,7 @@ resource "google_composer_environment" "test" { labels = { my-label = "my-label-value" } + depends_on = [google_project_iam_member.composer-worker] } // use a separate network to avoid conflicts with other tests running in parallel @@ -116,5 +120,14 @@ data "google_composer_environment" "test" { name = google_composer_environment.test.name region = google_composer_environment.test.region } +resource "google_service_account" "test" { + account_id = "%{service_account}" + display_name = "Test Service Account for Composer Environment" +} +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} `, context) } diff --git a/mmv1/third_party/terraform/services/composer/data_source_google_composer_user_workloads_config_map_test.go b/mmv1/third_party/terraform/services/composer/data_source_google_composer_user_workloads_config_map_test.go index ea8e664c2773..d2c6b856a2aa 100644 --- a/mmv1/third_party/terraform/services/composer/data_source_google_composer_user_workloads_config_map_test.go +++ b/mmv1/third_party/terraform/services/composer/data_source_google_composer_user_workloads_config_map_test.go @@ -14,6 +14,7 @@ func TestAccDataSourceComposerUserWorkloadsConfigMap_basic(t *testing.T) { context := map[string]interface{}{ "env_name": fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)), "config_map_name": fmt.Sprintf("tf-test-composer-config-map-%d", acctest.RandInt(t)), + "service_account": fmt.Sprintf("tf-test-%d", acctest.RandInt(t)), } acctest.VcrTest(t, resource.TestCase{ @@ -33,13 +34,18 @@ func TestAccDataSourceComposerUserWorkloadsConfigMap_basic(t *testing.T) { func testAccDataSourceComposerUserWorkloadsConfigMap_basic(context map[string]interface{}) string { return acctest.Nprintf(` +data "google_project" "project" {} resource "google_composer_environment" "test" { name = "%{env_name}" config { software_config { image_version = "composer-3-airflow-2" } + node_config { + service_account = google_service_account.test.name + } } + depends_on = [google_project_iam_member.composer-worker] } resource "google_composer_user_workloads_config_map" "test" { environment = google_composer_environment.test.name @@ -53,5 +59,14 @@ data "google_composer_user_workloads_config_map" "test" { name = google_composer_user_workloads_config_map.test.name environment = google_composer_environment.test.name } +resource "google_service_account" "test" { + account_id = "%{service_account}" + display_name = "Test Service Account for Composer Environment" +} +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} `, context) } diff --git a/mmv1/third_party/terraform/services/composer/data_source_google_composer_user_workloads_secret_test.go b/mmv1/third_party/terraform/services/composer/data_source_google_composer_user_workloads_secret_test.go index 713f3b5ac1c5..3e7b477cada6 100644 --- a/mmv1/third_party/terraform/services/composer/data_source_google_composer_user_workloads_secret_test.go +++ b/mmv1/third_party/terraform/services/composer/data_source_google_composer_user_workloads_secret_test.go @@ -15,8 +15,9 @@ func TestAccDataSourceComposerUserWorkloadsSecret_basic(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "env_name": fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)), - "secret_name": fmt.Sprintf("%s-%d", testComposerUserWorkloadsSecretPrefix, acctest.RandInt(t)), + "env_name": fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)), + "secret_name": fmt.Sprintf("%s-%d", testComposerUserWorkloadsSecretPrefix, acctest.RandInt(t)), + "service_account": fmt.Sprintf("tf-test-%d", acctest.RandInt(t)), } acctest.VcrTest(t, resource.TestCase{ @@ -76,13 +77,18 @@ func checkSecretDataSourceMatchesResource() resource.TestCheckFunc { func testAccDataSourceComposerUserWorkloadsSecret_basic(context map[string]interface{}) string { return acctest.Nprintf(` +data "google_project" "project" {} resource "google_composer_environment" "test" { name = "%{env_name}" config { software_config { image_version = "composer-3-airflow-2" } + node_config { + service_account = google_service_account.test.name + } } + depends_on = [google_project_iam_member.composer-worker] } resource "google_composer_user_workloads_secret" "test" { environment = google_composer_environment.test.name @@ -96,5 +102,14 @@ data "google_composer_user_workloads_secret" "test" { name = google_composer_user_workloads_secret.test.name environment = google_composer_environment.test.name } +resource "google_service_account" "test" { + account_id = "%{service_account}" + display_name = "Test Service Account for Composer Environment" +} +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} `, context) } From 378a1e421331cb3bae7985b51ba6063105621427 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Mon, 4 Aug 2025 16:43:46 -0700 Subject: [PATCH 684/884] Fixed GHA step conditions (#14741) --- .github/workflows/request-reviewer.yml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/request-reviewer.yml b/.github/workflows/request-reviewer.yml index d1e852f4a8c8..c5c3ccbb7b26 100644 --- a/.github/workflows/request-reviewer.yml +++ b/.github/workflows/request-reviewer.yml @@ -37,11 +37,10 @@ jobs: cd .ci/magician go build . - name: Request reviewer - if: ${{ github.event.issue.user.login }} != 'copybara-service' + if: ${{ github.event.issue.user.login != 'copybara-service' }} run: .ci/magician/magician request-reviewer ${{ github.event.pull_request.number || github.event.issue.number }} - name: Request reviewer (copybara) - if: ${{ github.event.issue.user.login }} == 'copybara-service' + if: ${{ github.event.issue.user.login == 'copybara-service' }} env: GH_TOKEN: ${{secrets.GITHUB_TOKEN}} run: gh pr edit ${{ github.event.pull_request.number || github.event.issue.number }} --add-reviewer "@ScottSuarez" - From 8f0886b9c4f191312ea4601fa682f6f098e1b15b Mon Sep 17 00:00:00 2001 From: Lakshman Swaminathan Date: Tue, 5 Aug 2025 12:55:43 -0400 Subject: [PATCH 685/884] moving teamcity test scripts for difftesting (#14738) --- {scripts => mmv1/third_party/teamcitytestscripts}/main.go | 0 {scripts => mmv1/third_party/teamcitytestscripts}/teamcity.go | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename {scripts => mmv1/third_party/teamcitytestscripts}/main.go (100%) rename {scripts => mmv1/third_party/teamcitytestscripts}/teamcity.go (100%) diff --git a/scripts/main.go b/mmv1/third_party/teamcitytestscripts/main.go similarity index 100% rename from scripts/main.go rename to mmv1/third_party/teamcitytestscripts/main.go diff --git a/scripts/teamcity.go b/mmv1/third_party/teamcitytestscripts/teamcity.go similarity index 100% rename from scripts/teamcity.go rename to mmv1/third_party/teamcitytestscripts/teamcity.go From 684d09e93ca0841fc71f151a61bf8deed3b565f7 Mon Sep 17 00:00:00 2001 From: Lakshman Swaminathan Date: Tue, 5 Aug 2025 13:02:06 -0400 Subject: [PATCH 686/884] difftest logic moving to a different file (#14737) --- .../terraform/acctest/diff_utils.go | 202 +++++++++++++++++ .../terraform/acctest/diff_utils_test.go | 206 ++++++++++++++++++ .../terraform/acctest/vcr_utils.go | 188 ---------------- .../terraform/acctest/vcr_utils_test.go | 199 ----------------- 4 files changed, 408 insertions(+), 387 deletions(-) create mode 100644 mmv1/third_party/terraform/acctest/diff_utils.go create mode 100644 mmv1/third_party/terraform/acctest/diff_utils_test.go diff --git a/mmv1/third_party/terraform/acctest/diff_utils.go b/mmv1/third_party/terraform/acctest/diff_utils.go new file mode 100644 index 000000000000..d101364e0c4e --- /dev/null +++ b/mmv1/third_party/terraform/acctest/diff_utils.go @@ -0,0 +1,202 @@ +package acctest + +import ( + "fmt" + "io" + "os" + "path/filepath" + "reflect" + "regexp" + "strings" + + "github.com/hashicorp/terraform-plugin-go/tfprotov5" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" +) + +func isReleaseDiffEnabled() bool { + releaseDiff := os.Getenv("RELEASE_DIFF") + return releaseDiff != "" +} + +func initializeReleaseDiffTest(c resource.TestCase, testName string, tempOutputFile *os.File) resource.TestCase { + var releaseProvider string + packagePath := fmt.Sprint(reflect.TypeOf(transport_tpg.Config{}).PkgPath()) + if strings.Contains(packagePath, "google-beta") { + releaseProvider = "google-beta" + } else { + releaseProvider = "google" + } + + if c.ExternalProviders != nil { + c.ExternalProviders[releaseProvider] = resource.ExternalProvider{} + } else { + c.ExternalProviders = map[string]resource.ExternalProvider{ + releaseProvider: { + // if left empty fetches most recent release provider + }, + } + } + + localProviderName := "google-local" + if c.Providers != nil { + c.Providers = map[string]*schema.Provider{ + localProviderName: GetSDKProvider(testName), + } + c.ProtoV5ProviderFactories = map[string]func() (tfprotov5.ProviderServer, error){ + localProviderName: func() (tfprotov5.ProviderServer, error) { + return nil, nil + }, + } + } else { + c.ProtoV5ProviderFactories = map[string]func() (tfprotov5.ProviderServer, error){ + localProviderName: func() (tfprotov5.ProviderServer, error) { + provider, err := MuxedProviders(testName) + return provider(), err + }, + } + } + // InsertDiffSteps adds modified steps to the test that run with an external provider + // these steps do the actual infrastructure provisioning, and c.Steps is updated in the method to have the modified steps + c = InsertDiffSteps(c, tempOutputFile, releaseProvider, localProviderName) + return c +} + +// InsertDiffSteps inserts a new step into the test case that reformats the config to use the release provider - this allows us to see the diff +// between the local provider and the release provider +func InsertDiffSteps(c resource.TestCase, tempOutputFile *os.File, releaseProvider string, localProviderName string) resource.TestCase { + var countSteps = 0 + + var replacementSteps []resource.TestStep + for _, testStep := range c.Steps { + countSteps++ + if testStep.Config != "" { + ogConfig := testStep.Config + fmt.Fprintf(tempOutputFile, "[DEBUG] Original config: %s\n", ogConfig) + testStep.Config = ReformConfigWithProvider(ogConfig, localProviderName) + fmt.Fprintf(tempOutputFile, "[DEBUG] Reformatted config: %s\n", testStep.Config) + testStep.PreConfig = func() { + fmt.Fprintf(tempOutputFile, "%s Step %d\n", diffFlag, countSteps) + } + if testStep.ExpectError == nil && !testStep.PlanOnly { + newStep := resource.TestStep{ + PreConfig: func() { + fmt.Fprintf(tempOutputFile, "Regular Step %d\n", countSteps) + }, + Config: ReformConfigWithProvider(ogConfig, releaseProvider), + } + testStep.PlanOnly = true + testStep.ExpectNonEmptyPlan = false + replacementSteps = append(replacementSteps, newStep) + } + replacementSteps = append(replacementSteps, testStep) + } else { + replacementSteps = append(replacementSteps, testStep) + } + } + c.Steps = replacementSteps + return c +} + +// reformConfigWithProvider reformats the config to use the given provider +// The method matches a regex for the provider block and replaces it with the given provider. +// For example: ' data "google_compute_network" "default" { provider = "google-local" } ' +// will be reformatted to ' data "google_compute_network" "default" { provider = "google-beta" } ' +func ReformConfigWithProvider(config, provider string) string { + configBytes := []byte(config) + providerReplacement := fmt.Sprintf("provider = %s", provider) + providerReplacementBytes := []byte(providerReplacement) + providerBlock := regexp.MustCompile(`provider *=.*google-beta.*`) + + if providerBlock.Match(configBytes) { + out := string(providerBlock.ReplaceAll(configBytes, providerReplacementBytes)) + return out + } + + providerReplacement = fmt.Sprintf("${1}\n %s\n", providerReplacement) + providerReplacementBytes = []byte(providerReplacement) + // Match resource and data blocks that use google_ provider + // regex matches for labels resource and data blocks that use google_ provider + + resourceHeader := regexp.MustCompile(`((resource|data) .*google_.* .*\w+.*\{ *)`) + return string(resourceHeader.ReplaceAll(configBytes, providerReplacementBytes)) +} + +// ReadDiffOutput reads the outputted temporary file and returns its contents +func ReadDiffOutput(f *os.File) (string, error) { + if f == nil { + return "", fmt.Errorf("file handle is nil") + } + + // Seek to the beginning of the file in case it was just written to. + if _, err := f.Seek(0, io.SeekStart); err != nil { + return "", fmt.Errorf("failed to seek to beginning of file: %w", err) + } + + // Read the entire file content. + content, err := os.ReadFile(f.Name()) + if err != nil { + return "", fmt.Errorf("failed to read file: %w", err) + } + + return string(content), nil +} + +// parseReleaseDiffOutput reads the temporary file created during the release diff test and returns whether the last line has a [Diff] flag, the test output, and any errors +func ParseReleaseDiffOutput(output string) (isDiff bool) { + trimmedOutput := strings.TrimSpace(output) + if trimmedOutput == "" { + return false + } + + lines := strings.Split(trimmedOutput, "\n") + lastLine := lines[len(lines)-1] + + isDiff = strings.HasPrefix(lastLine, diffFlag) + + return isDiff +} + +func writeOutputFileDeferFunction(tempOutputFile *os.File, failed bool) { + if tempOutputFile == nil { + return + } + // parses the temporary file created during the release diff test and returns the last line of output + // This is useful for extracting the diff output from the file after the test has run + + testOutput, err := ReadDiffOutput(tempOutputFile) + if err != nil { + fmt.Printf("Error reading temporary file: %v\n", err) + return + } + isDiff := ParseReleaseDiffOutput(testOutput) + tempOutputFile.Close() + err = os.Remove(tempOutputFile.Name()) + if err != nil { + fmt.Printf("Temporary File Deletion Error: %v\n", err) + } + regularFailureFile, err := os.Create(filepath.Join("", "regular_failure_file.log")) + if err != nil { + fmt.Printf("Error creating file: %v\n", err) + return + } + defer regularFailureFile.Close() + diffFailureFile, err := os.Create(filepath.Join("", "diff_failure_file.log")) + if err != nil { + fmt.Printf("Error creating file: %v\n", err) + return + } + defer diffFailureFile.Close() + if failed { + // Check if the output line starts with "[Diff]" + if isDiff { + fmt.Fprintf(os.Stdout, "%s Breaking Change Detected] \n", diffFlag) + fmt.Fprintf(diffFailureFile, "%s %s\n", diffFlag, testOutput) + } else { + fmt.Fprintf(regularFailureFile, testOutput) + fmt.Fprintf(regularFailureFile, "FAILED --- %s\n", testOutput) + } + } +} diff --git a/mmv1/third_party/terraform/acctest/diff_utils_test.go b/mmv1/third_party/terraform/acctest/diff_utils_test.go new file mode 100644 index 000000000000..c5ed0a0dc4ca --- /dev/null +++ b/mmv1/third_party/terraform/acctest/diff_utils_test.go @@ -0,0 +1,206 @@ +package acctest_test + +import ( + "os" + "regexp" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestReformConfigWithProvider(t *testing.T) { + + type testCase struct { + name string + initialConfig string + providerToInsert string + expectedConfig string + } + + cases := map[string]testCase{ + "replaces_google_beta_with_local": { + name: "Replaces 'google-beta' provider with 'google-local'", + initialConfig: `resource "google_new_resource" { + provider = google-beta +}`, + providerToInsert: "google-local", + expectedConfig: `resource "google_new_resource" { + provider = google-local +}`, + }, + "inserts_local_provider_into_empty_config": { + name: "Inserts 'google-local' provider when no provider block exists", + initialConfig: `resource "google_alloydb_cluster" "default" { + location = "us-central1" + network_config { + network = google_compute_network.default.id + } +}`, + providerToInsert: "google-local", + expectedConfig: `resource "google_alloydb_cluster" "default" { + provider = google-local + + location = "us-central1" + network_config { + network = google_compute_network.default.id + } +}`, + }, + "no_change_if_target_provider_already_present": { + name: "Does not change config if target provider is already present", + initialConfig: `resource "google_new_resource" { + provider = google-local +}`, + providerToInsert: "google-local", + expectedConfig: `resource "google_new_resource" { + provider = google-local +}`, + }, + "inserts_provider_with_other_attributes": { + name: "Inserts provider into a resource block with other attributes but no existing provider", + initialConfig: `resource "google_compute_instance" "test" { + name = "test-instance" + machine_type = "e2-medium" +}`, + providerToInsert: "google-local", + expectedConfig: `resource "google_compute_instance" "test" { + provider = google-local + + name = "test-instance" + machine_type = "e2-medium" +}`, + }, + } + + for tn, tc := range cases { + t.Run(tn, func(t *testing.T) { + newConfig := acctest.ReformConfigWithProvider(tc.initialConfig, tc.providerToInsert) + + if newConfig != tc.expectedConfig { + t.Fatalf("Test Case: %s\nExpected config to be reformatted to:\n%q\nbut got:\n%q", tc.name, tc.expectedConfig, newConfig) + } + t.Logf("Test Case: %s\nReformed config:\n%s", tc.name, newConfig) + }) + } +} + +func TestInsertDiffSteps(t *testing.T) { + + var dummyCase = resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: `resource "google_new_resource" "original" { + provider = google-beta + }`, + }, + { + Config: `resource "google_new_resource" "original" { + provider = google-beta + }`, + }, + { + ResourceName: "google_pubsub_subscription.example", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"topic"}, + }, + { + Config: `resource "google_example_widget" "foo" { + name = "dummy" + provider = google-beta + }`, + Check: resource.ComposeTestCheckFunc( + func(*terraform.State) error { return nil }, + ), + }, + { + Config: `provider = "google-local" + // ... configuration that is expected to cause an error + `, + ExpectError: regexp.MustCompile(`"restore_continuous_backup_source": conflicts with restore_backup_source`), + }, + }, + } + temp_file, err := os.CreateTemp("", "release_diff_test_output_*.log") + if err != nil { + t.Fatalf("Failed to create temp file: %v", err) + } + dummyCase = acctest.InsertDiffSteps(dummyCase, temp_file, "google-beta", "google-local") + + // Expected steps after InsertDiffSteps runs. + // A "diff" step (using 'google-local') is added for each original step containing a Config field, + // unless the step has ExpectError set. + var expectedSteps = []resource.TestStep{ + { + Config: `resource "google_new_resource" "original" { + provider = google-beta + }`, + }, + { + Config: `resource "google_new_resource" "original" { + provider = google-local + }`, + ExpectNonEmptyPlan: false, + PlanOnly: true, + }, + { + Config: `resource "google_new_resource" "original" { + provider = google-beta + }`, + }, + { + Config: `resource "google_new_resource" "original" { + provider = google-local + }`, + ExpectNonEmptyPlan: false, + PlanOnly: true, + }, + { + ResourceName: "google_pubsub_subscription.example", // No config, so no diff step added + }, + { + Config: `resource "google_example_widget" "foo" { + name = "dummy" + provider = google-beta + }`, + Check: resource.ComposeTestCheckFunc( + func(*terraform.State) error { return nil }, + ), + }, + { + Config: `resource "google_example_widget" "foo" { + name = "dummy" + provider = google-local + }`, + Check: resource.ComposeTestCheckFunc( + func(*terraform.State) error { return nil }, + ), + ExpectNonEmptyPlan: false, + PlanOnly: true, + }, + { + Config: `provider = "google-local" + // ... configuration that is expected to cause an error + `, // expect error means we don't do a second step + }, + } + + if len(dummyCase.Steps) != len(expectedSteps) { + t.Fatalf("Expected %d steps, but got %d", len(expectedSteps), len(dummyCase.Steps)) + } + + for i, step := range dummyCase.Steps { + if step.Config != expectedSteps[i].Config { + t.Fatalf("Expected step %d config to be:\n%q\nbut got:\n%q", i, expectedSteps[i].Config, step.Config) + } + if step.PlanOnly != expectedSteps[i].PlanOnly { + t.Fatalf("Expected step %d to have PlanOnly set to %v, but got %v", i, expectedSteps[i].PlanOnly, step.PlanOnly) + } + } + + defer os.Remove(temp_file.Name()) +} diff --git a/mmv1/third_party/terraform/acctest/vcr_utils.go b/mmv1/third_party/terraform/acctest/vcr_utils.go index a7ef40efbf10..628f2837f64e 100644 --- a/mmv1/third_party/terraform/acctest/vcr_utils.go +++ b/mmv1/third_party/terraform/acctest/vcr_utils.go @@ -6,7 +6,6 @@ import ( "encoding/json" "errors" "fmt" - "io" "io/ioutil" "log" "math/rand" @@ -14,7 +13,6 @@ import ( "os" "path/filepath" "reflect" - "regexp" "slices" "strconv" "strings" @@ -33,7 +31,6 @@ import ( "github.com/hashicorp/terraform-plugin-framework/datasource" fwDiags "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/provider" - "github.com/hashicorp/terraform-plugin-go/tfprotov5" "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" @@ -223,191 +220,6 @@ func closeRecorder(t *testing.T) { } } -func isReleaseDiffEnabled() bool { - releaseDiff := os.Getenv("RELEASE_DIFF") - return releaseDiff != "" -} - -func initializeReleaseDiffTest(c resource.TestCase, testName string, tempOutputFile *os.File) resource.TestCase { - var releaseProvider string - packagePath := fmt.Sprint(reflect.TypeOf(transport_tpg.Config{}).PkgPath()) - if strings.Contains(packagePath, "google-beta") { - releaseProvider = "google-beta" - } else { - releaseProvider = "google" - } - - if c.ExternalProviders != nil { - c.ExternalProviders[releaseProvider] = resource.ExternalProvider{} - } else { - c.ExternalProviders = map[string]resource.ExternalProvider{ - releaseProvider: { - // if left empty fetches most recent release provider - }, - } - } - - localProviderName := "google-local" - if c.Providers != nil { - c.Providers = map[string]*schema.Provider{ - localProviderName: GetSDKProvider(testName), - } - c.ProtoV5ProviderFactories = map[string]func() (tfprotov5.ProviderServer, error){ - localProviderName: func() (tfprotov5.ProviderServer, error) { - return nil, nil - }, - } - } else { - c.ProtoV5ProviderFactories = map[string]func() (tfprotov5.ProviderServer, error){ - localProviderName: func() (tfprotov5.ProviderServer, error) { - provider, err := MuxedProviders(testName) - return provider(), err - }, - } - } - // InsertDiffSteps adds modified steps to the test that run with an external provider - // these steps do the actual infrastructure provisioning, and c.Steps is updated in the method to have the modified steps - c = InsertDiffSteps(c, tempOutputFile, releaseProvider, localProviderName) - return c -} - -// InsertDiffSteps inserts a new step into the test case that reformats the config to use the release provider - this allows us to see the diff -// between the local provider and the release provider -func InsertDiffSteps(c resource.TestCase, tempOutputFile *os.File, releaseProvider string, localProviderName string) resource.TestCase { - var countSteps = 0 - - var replacementSteps []resource.TestStep - for _, testStep := range c.Steps { - countSteps++ - if testStep.Config != "" { - ogConfig := testStep.Config - fmt.Fprintf(tempOutputFile, "[DEBUG] Original config: %s\n", ogConfig) - testStep.Config = ReformConfigWithProvider(ogConfig, localProviderName) - fmt.Fprintf(tempOutputFile, "[DEBUG] Reformatted config: %s\n", testStep.Config) - testStep.PreConfig = func() { - fmt.Fprintf(tempOutputFile, "%s Step %d\n", diffFlag, countSteps) - } - if testStep.ExpectError == nil && !testStep.PlanOnly { - newStep := resource.TestStep{ - PreConfig: func() { - fmt.Fprintf(tempOutputFile, "Regular Step %d\n", countSteps) - }, - Config: ReformConfigWithProvider(ogConfig, releaseProvider), - } - testStep.PlanOnly = true - testStep.ExpectNonEmptyPlan = false - replacementSteps = append(replacementSteps, newStep) - } - replacementSteps = append(replacementSteps, testStep) - } else { - replacementSteps = append(replacementSteps, testStep) - } - } - c.Steps = replacementSteps - return c -} - -// reformConfigWithProvider reformats the config to use the given provider -// The method matches a regex for the provider block and replaces it with the given provider. -// For example: ' data "google_compute_network" "default" { provider = "google-local" } ' -// will be reformatted to ' data "google_compute_network" "default" { provider = "google-beta" } ' -func ReformConfigWithProvider(config, provider string) string { - configBytes := []byte(config) - providerReplacement := fmt.Sprintf("provider = %s", provider) - providerReplacementBytes := []byte(providerReplacement) - providerBlock := regexp.MustCompile(`provider *=.*google-beta.*`) - - if providerBlock.Match(configBytes) { - out := string(providerBlock.ReplaceAll(configBytes, providerReplacementBytes)) - return out - } - - providerReplacement = fmt.Sprintf("${1}\n %s\n", providerReplacement) - providerReplacementBytes = []byte(providerReplacement) - // Match resource and data blocks that use google_ provider - // regex matches for labels resource and data blocks that use google_ provider - - resourceHeader := regexp.MustCompile(`((resource|data) .*google_.* .*\w+.*\{ *)`) - return string(resourceHeader.ReplaceAll(configBytes, providerReplacementBytes)) -} - -// ReadDiffOutput reads the outputted temporary file and returns its contents -func ReadDiffOutput(f *os.File) (string, error) { - if f == nil { - return "", fmt.Errorf("file handle is nil") - } - - // Seek to the beginning of the file in case it was just written to. - if _, err := f.Seek(0, io.SeekStart); err != nil { - return "", fmt.Errorf("failed to seek to beginning of file: %w", err) - } - - // Read the entire file content. - content, err := os.ReadFile(f.Name()) - if err != nil { - return "", fmt.Errorf("failed to read file: %w", err) - } - - return string(content), nil -} - -// parseReleaseDiffOutput reads the temporary file created during the release diff test and returns whether the last line has a [Diff] flag, the test output, and any errors -func ParseReleaseDiffOutput(output string) (isDiff bool) { - trimmedOutput := strings.TrimSpace(output) - if trimmedOutput == "" { - return false - } - - lines := strings.Split(trimmedOutput, "\n") - lastLine := lines[len(lines)-1] - - isDiff = strings.HasPrefix(lastLine, diffFlag) - - return isDiff -} - -func writeOutputFileDeferFunction(tempOutputFile *os.File, failed bool) { - if tempOutputFile == nil { - return - } - // parses the temporary file created during the release diff test and returns the last line of output - // This is useful for extracting the diff output from the file after the test has run - - testOutput, err := ReadDiffOutput(tempOutputFile) - if err != nil { - fmt.Printf("Error reading temporary file: %v\n", err) - return - } - isDiff := ParseReleaseDiffOutput(testOutput) - tempOutputFile.Close() - err = os.Remove(tempOutputFile.Name()) - if err != nil { - fmt.Printf("Temporary File Deletion Error: %v\n", err) - } - regularFailureFile, err := os.Create(filepath.Join("", "regular_failure_file.log")) - if err != nil { - fmt.Printf("Error creating file: %v\n", err) - return - } - defer regularFailureFile.Close() - diffFailureFile, err := os.Create(filepath.Join("", "diff_failure_file.log")) - if err != nil { - fmt.Printf("Error creating file: %v\n", err) - return - } - defer diffFailureFile.Close() - if failed { - // Check if the output line starts with "[Diff]" - if isDiff { - fmt.Fprintf(os.Stdout, "%s Breaking Change Detected] \n", diffFlag) - fmt.Fprintf(diffFailureFile, "%s %s\n", diffFlag, testOutput) - } else { - fmt.Fprintf(regularFailureFile, testOutput) - fmt.Fprintf(regularFailureFile, "FAILED --- %s\n", testOutput) - } - } -} - // HandleVCRConfiguration configures the recorder (github.com/dnaeon/go-vcr/recorder) used in the VCR test // This includes: // - Setting the recording/replaying mode diff --git a/mmv1/third_party/terraform/acctest/vcr_utils_test.go b/mmv1/third_party/terraform/acctest/vcr_utils_test.go index f3b6c701b0bf..7ed5b87ecce2 100644 --- a/mmv1/third_party/terraform/acctest/vcr_utils_test.go +++ b/mmv1/third_party/terraform/acctest/vcr_utils_test.go @@ -7,13 +7,9 @@ import ( "io" "net/http" "net/url" - "os" - "regexp" "testing" "github.com/dnaeon/go-vcr/cassette" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/terraform" "github.com/hashicorp/terraform-provider-google/google/acctest" ) @@ -377,198 +373,3 @@ func prepareCassetteRequest(d requestDescription) cassette.Request { return req } - -func TestReformConfigWithProvider(t *testing.T) { - - type testCase struct { - name string - initialConfig string - providerToInsert string - expectedConfig string - } - - cases := map[string]testCase{ - "replaces_google_beta_with_local": { - name: "Replaces 'google-beta' provider with 'google-local'", - initialConfig: `resource "google_new_resource" { - provider = google-beta -}`, - providerToInsert: "google-local", - expectedConfig: `resource "google_new_resource" { - provider = google-local -}`, - }, - "inserts_local_provider_into_empty_config": { - name: "Inserts 'google-local' provider when no provider block exists", - initialConfig: `resource "google_alloydb_cluster" "default" { - location = "us-central1" - network_config { - network = google_compute_network.default.id - } -}`, - providerToInsert: "google-local", - expectedConfig: `resource "google_alloydb_cluster" "default" { - provider = google-local - - location = "us-central1" - network_config { - network = google_compute_network.default.id - } -}`, - }, - "no_change_if_target_provider_already_present": { - name: "Does not change config if target provider is already present", - initialConfig: `resource "google_new_resource" { - provider = google-local -}`, - providerToInsert: "google-local", - expectedConfig: `resource "google_new_resource" { - provider = google-local -}`, - }, - "inserts_provider_with_other_attributes": { - name: "Inserts provider into a resource block with other attributes but no existing provider", - initialConfig: `resource "google_compute_instance" "test" { - name = "test-instance" - machine_type = "e2-medium" -}`, - providerToInsert: "google-local", - expectedConfig: `resource "google_compute_instance" "test" { - provider = google-local - - name = "test-instance" - machine_type = "e2-medium" -}`, - }, - } - - for tn, tc := range cases { - t.Run(tn, func(t *testing.T) { - newConfig := acctest.ReformConfigWithProvider(tc.initialConfig, tc.providerToInsert) - - if newConfig != tc.expectedConfig { - t.Fatalf("Test Case: %s\nExpected config to be reformatted to:\n%q\nbut got:\n%q", tc.name, tc.expectedConfig, newConfig) - } - t.Logf("Test Case: %s\nReformed config:\n%s", tc.name, newConfig) - }) - } -} - -func TestInsertDiffSteps(t *testing.T) { - - var dummyCase = resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: `resource "google_new_resource" "original" { - provider = google-beta - }`, - }, - { - Config: `resource "google_new_resource" "original" { - provider = google-beta - }`, - }, - { - ResourceName: "google_pubsub_subscription.example", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"topic"}, - }, - { - Config: `resource "google_example_widget" "foo" { - name = "dummy" - provider = google-beta - }`, - Check: resource.ComposeTestCheckFunc( - func(*terraform.State) error { return nil }, - ), - }, - { - Config: `provider = "google-local" - // ... configuration that is expected to cause an error - `, - ExpectError: regexp.MustCompile(`"restore_continuous_backup_source": conflicts with restore_backup_source`), - }, - }, - } - temp_file, err := os.CreateTemp("", "release_diff_test_output_*.log") - if err != nil { - t.Fatalf("Failed to create temp file: %v", err) - } - dummyCase = acctest.InsertDiffSteps(dummyCase, temp_file, "google-beta", "google-local") - - // Expected steps after InsertDiffSteps runs. - // A "diff" step (using 'google-local') is added for each original step containing a Config field, - // unless the step has ExpectError set. - var expectedSteps = []resource.TestStep{ - { - Config: `resource "google_new_resource" "original" { - provider = google-beta - }`, - }, - { - Config: `resource "google_new_resource" "original" { - provider = google-local - }`, - ExpectNonEmptyPlan: false, - PlanOnly: true, - }, - { - Config: `resource "google_new_resource" "original" { - provider = google-beta - }`, - }, - { - Config: `resource "google_new_resource" "original" { - provider = google-local - }`, - ExpectNonEmptyPlan: false, - PlanOnly: true, - }, - { - ResourceName: "google_pubsub_subscription.example", // No config, so no diff step added - }, - { - Config: `resource "google_example_widget" "foo" { - name = "dummy" - provider = google-beta - }`, - Check: resource.ComposeTestCheckFunc( - func(*terraform.State) error { return nil }, - ), - }, - { - Config: `resource "google_example_widget" "foo" { - name = "dummy" - provider = google-local - }`, - Check: resource.ComposeTestCheckFunc( - func(*terraform.State) error { return nil }, - ), - ExpectNonEmptyPlan: false, - PlanOnly: true, - }, - { - Config: `provider = "google-local" - // ... configuration that is expected to cause an error - `, // expect error means we don't do a second step - }, - } - - if len(dummyCase.Steps) != len(expectedSteps) { - t.Fatalf("Expected %d steps, but got %d", len(expectedSteps), len(dummyCase.Steps)) - } - - for i, step := range dummyCase.Steps { - if step.Config != expectedSteps[i].Config { - t.Fatalf("Expected step %d config to be:\n%q\nbut got:\n%q", i, expectedSteps[i].Config, step.Config) - } - if step.PlanOnly != expectedSteps[i].PlanOnly { - t.Fatalf("Expected step %d to have PlanOnly set to %v, but got %v", i, expectedSteps[i].PlanOnly, step.PlanOnly) - } - } - - defer os.Remove(temp_file.Name()) -} From 028523ecd78936c24b14c6f08bf13c4421ab320c Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Tue, 5 Aug 2025 10:38:16 -0700 Subject: [PATCH 687/884] Fix GCE shared reservation tests (#14745) --- mmv1/products/compute/Reservation.yaml | 4 --- .../shared_future_reservation.tf.tmpl | 15 +++++----- .../examples/shared_reservation_basic.tf.tmpl | 15 +++++----- .../examples/shared_reservation_beta.tf.tmpl | 17 ++++++----- ..._compute_shared_reservation_update_test.go | 30 ++++++++++--------- 5 files changed, 41 insertions(+), 40 deletions(-) diff --git a/mmv1/products/compute/Reservation.yaml b/mmv1/products/compute/Reservation.yaml index acdf22082a24..f459bee899aa 100644 --- a/mmv1/products/compute/Reservation.yaml +++ b/mmv1/products/compute/Reservation.yaml @@ -82,8 +82,6 @@ examples: org_id: 'ORG_ID' billing_account: 'BILLING_ACCT' exclude_docs: true - # Resource creation race - skip_vcr: true - name: 'shared_reservation_beta' primary_resource_id: 'gce_reservation' vars: @@ -93,8 +91,6 @@ examples: org_id: 'ORG_ID' billing_account: 'BILLING_ACCT' exclude_docs: true - # Resource creation race - skip_vcr: true min_version: 'beta' parameters: - name: 'zone' diff --git a/mmv1/templates/terraform/examples/shared_future_reservation.tf.tmpl b/mmv1/templates/terraform/examples/shared_future_reservation.tf.tmpl index c88b3d4bcf02..44ffa16ff14a 100644 --- a/mmv1/templates/terraform/examples/shared_future_reservation.tf.tmpl +++ b/mmv1/templates/terraform/examples/shared_future_reservation.tf.tmpl @@ -19,12 +19,13 @@ resource "google_project" "guest_project" { deletion_policy = "DELETE" } -resource "google_organization_policy" "shared_future_reservation_org_policy" { - org_id = "{{index $.TestEnvVars "org_id"}}" - constraint = "constraints/compute.sharedReservationsOwnerProjects" - list_policy { - allow { - values = ["projects/${google_project.owner_project.number}"] +resource "google_org_policy_policy" "shared_reservation_org_policy" { + name = "projects/${google_project.owner_project.project_id}/policies/compute.sharedReservationsOwnerProjects" + parent = "projects/${google_project.owner_project.project_id}" + + spec { + rules { + allow_all = "TRUE" } } } @@ -47,7 +48,7 @@ resource "google_compute_future_reservation" "{{$.PrimaryResourceId}}" { depends_on = [ - google_organization_policy.shared_future_reservation_org_policy, + google_org_policy_policy.shared_future_reservation_org_policy, google_project_service.compute ] } \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/shared_reservation_basic.tf.tmpl b/mmv1/templates/terraform/examples/shared_reservation_basic.tf.tmpl index 202f64c469d6..645287066b3e 100644 --- a/mmv1/templates/terraform/examples/shared_reservation_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/shared_reservation_basic.tf.tmpl @@ -20,12 +20,13 @@ resource "google_project" "guest_project" { deletion_policy = "DELETE" } -resource "google_organization_policy" "shared_reservation_org_policy" { - org_id = "{{index $.TestEnvVars "org_id"}}" - constraint = "constraints/compute.sharedReservationsOwnerProjects" - list_policy { - allow { - values = ["projects/${google_project.owner_project.number}"] +resource "google_org_policy_policy" "shared_reservation_org_policy" { + name = "projects/${google_project.owner_project.project_id}/policies/compute.sharedReservationsOwnerProjects" + parent = "projects/${google_project.owner_project.project_id}" + + spec { + rules { + allow_all = "TRUE" } } } @@ -49,5 +50,5 @@ resource "google_compute_reservation" "{{$.PrimaryResourceId}}" { project_id = google_project.guest_project.project_id } } - depends_on = [google_organization_policy.shared_reservation_org_policy,google_project_service.compute] + depends_on = [google_org_policy_policy.shared_reservation_org_policy,google_project_service.compute] } \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/shared_reservation_beta.tf.tmpl b/mmv1/templates/terraform/examples/shared_reservation_beta.tf.tmpl index de72467ac91c..1d20092bb008 100644 --- a/mmv1/templates/terraform/examples/shared_reservation_beta.tf.tmpl +++ b/mmv1/templates/terraform/examples/shared_reservation_beta.tf.tmpl @@ -23,13 +23,14 @@ resource "google_project" "guest_project" { deletion_policy = "DELETE" } -resource "google_organization_policy" "shared_reservation_org_policy" { - provider = google-beta - org_id = "{{index $.TestEnvVars "org_id"}}" - constraint = "constraints/compute.sharedReservationsOwnerProjects" - list_policy { - allow { - values = ["projects/${google_project.owner_project.number}"] +resource "google_org_policy_policy" "shared_reservation_org_policy" { + provider = google-beta + name = "projects/${google_project.owner_project.project_id}/policies/compute.sharedReservationsOwnerProjects" + parent = "projects/${google_project.owner_project.project_id}" + + spec { + rules { + allow_all = "TRUE" } } } @@ -51,5 +52,5 @@ resource "google_compute_reservation" "{{$.PrimaryResourceId}}" { share_type = "SPECIFIC_PROJECTS" projects = [google_project.guest_project.name] } - depends_on = [google_organization_policy.shared_reservation_org_policy,google_project_service.compute] + depends_on = [google_org_policy_policy.shared_reservation_org_policy,google_project_service.compute] } \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_shared_reservation_update_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_shared_reservation_update_test.go index 52b836733dd1..704f1fa067dd 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_shared_reservation_update_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_shared_reservation_update_test.go @@ -95,12 +95,13 @@ resource "google_project" "guest_project_third" { deletion_policy = "DELETE" } -resource "google_organization_policy" "shared_reservation_org_policy" { - org_id = "%{org_id}" - constraint = "constraints/compute.sharedReservationsOwnerProjects" - list_policy { - allow { - values = ["projects/${google_project.owner_project.number}"] +resource "google_org_policy_policy" "shared_reservation_org_policy" { + name = "projects/${google_project.owner_project.project_id}/policies/compute.sharedReservationsOwnerProjects" + parent = "projects/${google_project.owner_project.project_id}" + + spec { + rules { + allow_all = "TRUE" } } } @@ -142,7 +143,7 @@ resource "google_compute_reservation" "gce_reservation" { project_id = google_project.guest_project.project_id } } - depends_on = [google_organization_policy.shared_reservation_org_policy,google_project_service.compute,google_project_service.compute_second_project,google_project_service.compute_third_project] + depends_on = [google_org_policy_policy.shared_reservation_org_policy,google_project_service.compute,google_project_service.compute_second_project,google_project_service.compute_third_project] } `, context) } @@ -187,12 +188,13 @@ resource "google_project" "guest_project_third" { deletion_policy = "DELETE" } -resource "google_organization_policy" "shared_reservation_org_policy" { - org_id = "%{org_id}" - constraint = "constraints/compute.sharedReservationsOwnerProjects" - list_policy { - allow { - values = ["projects/${google_project.owner_project.number}"] +resource "google_org_policy_policy" "shared_reservation_org_policy" { + name = "projects/${google_project.owner_project.project_id}/policies/compute.sharedReservationsOwnerProjects" + parent = "projects/${google_project.owner_project.project_id}" + + spec { + rules { + allow_all = "TRUE" } } } @@ -242,7 +244,7 @@ resource "google_compute_reservation" "gce_reservation" { project_id = google_project.guest_project_third.project_id } } - depends_on = [google_organization_policy.shared_reservation_org_policy,google_project_service.compute,google_project_service.compute_second_project,google_project_service.compute_third_project] + depends_on = [google_org_policy_policy.shared_reservation_org_policy,google_project_service.compute,google_project_service.compute_second_project,google_project_service.compute_third_project] } `, context) } From 6e3e5e72779e156dea77b4ab04519a03827604cb Mon Sep 17 00:00:00 2001 From: Harshal Neelkamal <17376513+HarshalNeelkamal@users.noreply.github.com> Date: Tue, 5 Aug 2025 10:52:25 -0700 Subject: [PATCH 688/884] support updating user_managed_keys_config on GKE Clusters (#14695) --- .../resource_container_cluster.go.tmpl | 21 ++- .../resource_container_cluster_test.go.tmpl | 135 +++++++++++++++++- 2 files changed, 154 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl index 0fc730f721d0..7abd84c1a4d8 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl @@ -2437,7 +2437,6 @@ func ResourceContainerCluster() *schema.Resource { "user_managed_keys_config": { Type: schema.TypeList, Optional: true, - ForceNew: true, MaxItems: 1, Description: `The custom keys configuration of the cluster.`, Elem: &schema.Resource{ @@ -2445,21 +2444,25 @@ func ResourceContainerCluster() *schema.Resource { "cluster_ca": { Type: schema.TypeString, Optional: true, + ForceNew: true, Description: `The Certificate Authority Service caPool to use for the cluster CA in this cluster.`, }, "etcd_api_ca": { Type: schema.TypeString, Optional: true, + ForceNew: true, Description: `The Certificate Authority Service caPool to use for the etcd API CA in this cluster.`, }, "etcd_peer_ca": { Type: schema.TypeString, Optional: true, + ForceNew: true, Description: `The Certificate Authority Service caPool to use for the etcd peer CA in this cluster.`, }, "aggregation_ca": { Type: schema.TypeString, Optional: true, + ForceNew: true, Description: `The Certificate Authority Service caPool to use for the aggreation CA in this cluster.`, }, "service_account_signing_keys": { @@ -5179,6 +5182,22 @@ func resourceContainerClusterUpdate(d *schema.ResourceData, meta interface{}) er log.Printf("[INFO] GKE cluster %s's WorkloadALTSConfig has been updated", d.Id()) } {{- end }} + + if d.HasChange("user_managed_keys_config") { + req := &container.UpdateClusterRequest{ + Update: &container.ClusterUpdate{ + DesiredUserManagedKeysConfig: expandUserManagedKeysConfig(d.Get("user_managed_keys_config")), + }, + } + + updateF := updateFunc(req, "updating user managed keys config") + if err := transport_tpg.LockedCall(lockKey, updateF); err != nil { + return err + } + + log.Printf("[INFO] GKE cluster %s user managed keys config has been updated to %#v", d.Id(), req.Update.DesiredUserManagedKeysConfig) + } + return resourceContainerClusterRead(d, meta) } diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl index d5b1b9772fa6..d10d4db1672a 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl @@ -5990,7 +5990,6 @@ func TestAccContainerCluster_WithCPAFeatures(t *testing.T) { CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), Steps: []resource.TestStep{ { - // We are only supporting CPA features on create for now. Config: testAccContainerCluster_EnableCPAFeatures(context), }, { @@ -14274,6 +14273,140 @@ resource "google_container_cluster" "primary" { `, name, networkName, subnetworkName, mode) } +func TestAccContainerCluster_WithCPAFeaturesUpdate(t *testing.T) { + t.Parallel() + + suffix := acctest.RandString(t, 10) + clusterName := fmt.Sprintf("tf-test-cluster-%s", suffix) + networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") + subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) + + // Bootstrap KMS keys and needed IAM role. + diskKey := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "control-plane-disk-encryption") + signingKey1 := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ASYMMETRIC_SIGN", "us-central1", "rs256-service-account-signing-1") + signingKey2 := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ASYMMETRIC_SIGN", "us-central1", "rs256-service-account-signing-2") + backupKey := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "etcd-backups") + + // Here, we are granting the container engine service agent permissions on + // *ALL* Cloud KMS keys in the project. A more realistic usage would be to + // grant the service agent the necessary roles only on the individual keys + // we have created. + acctest.BootstrapIamMembers(t, []acctest.IamMember{ + { + Member: "serviceAccount:service-{project_number}@container-engine-robot.iam.gserviceaccount.com", + Role: "roles/container.cloudKmsKeyUser", + }, + { + Member: "serviceAccount:service-{project_number}@container-engine-robot.iam.gserviceaccount.com", + Role: "roles/privateca.certificateManager", + }, + { + Member: "serviceAccount:service-{project_number}@container-engine-robot.iam.gserviceaccount.com", + Role: "roles/cloudkms.cryptoKeyEncrypterDecrypter", + }, + { + Member: "serviceAccount:service-{project_number}@container-engine-robot.iam.gserviceaccount.com", + Role: "roles/cloudkms.cryptoKeyEncrypterDecrypterViaDelegation", + }, + }) + + // Find an active cryptoKeyVersion on the signing key. + var signingCryptoKeyVersion1 *cloudkms.CryptoKeyVersion + for _, ckv := range signingKey1.CryptoKeyVersions { + if ckv.State == "ENABLED" && ckv.Algorithm == "RSA_SIGN_PKCS1_4096_SHA256" { + signingCryptoKeyVersion1 = ckv + } + } + if signingCryptoKeyVersion1 == nil { + t.Fatal("Didn't find an appropriate cryptoKeyVersion for signingCryptoKeyVersion1 to use as the service account signing key") + } + + var signingCryptoKeyVersion2 *cloudkms.CryptoKeyVersion + for _, ckv := range signingKey2.CryptoKeyVersions { + if ckv.State == "ENABLED" && ckv.Algorithm == "RSA_SIGN_PKCS1_4096_SHA256" { + signingCryptoKeyVersion2 = ckv + } + } + if signingCryptoKeyVersion2 == nil { + t.Fatal("Didn't find an appropriate cryptoKeyVersion for signingCryptoKeyVersion2 to use as the service account signing key") + } + + context := map[string]interface{}{ + "resource_name": clusterName, + "networkName": networkName, + "subnetworkName": subnetworkName, + "disk_key": diskKey.CryptoKey.Name, + "backup_key": backupKey.CryptoKey.Name, + "signing_cryptokeyversion": signingCryptoKeyVersion1.Name, + "random_suffix": suffix, + } + + updateContext:= map[string]interface{}{ + "resource_name": clusterName, + "networkName": networkName, + "subnetworkName": subnetworkName, + "disk_key": diskKey.CryptoKey.Name, + "backup_key": backupKey.CryptoKey.Name, + "signing_cryptokeyversion": signingCryptoKeyVersion2.Name, + "random_suffix": suffix, + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_EnableCPAFeaturesWithSAkeys(context), + }, + { + ResourceName: "google_container_cluster.with_cpa_features", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + { + Config: testAccContainerCluster_EnableCPAFeaturesWithSAkeys(updateContext), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_container_cluster.with_cpa_features", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_container_cluster.with_cpa_features", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + }, + }) +} + +func testAccContainerCluster_EnableCPAFeaturesWithSAkeys(context map[string]interface{}) string { + return acctest.Nprintf(` + resource "google_container_cluster" "with_cpa_features" { + name = "%{resource_name}" + location = "us-central1-a" + initial_node_count = 1 + release_channel { + channel = "RAPID" + } + user_managed_keys_config { + service_account_signing_keys = [ + "%{signing_cryptokeyversion}", + ] + service_account_verification_keys = [ + "%{signing_cryptokeyversion}", + ] + } + deletion_protection = false + network = "%{networkName}" + subnetwork = "%{subnetworkName}" + } + `, context) +} + func TestAccContainerCluster_RbacBindingConfig(t *testing.T) { t.Parallel() From a2a4c66df74f57c2451b644b391a127352fc367f Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Tue, 5 Aug 2025 10:53:16 -0700 Subject: [PATCH 689/884] Remove dynamic block from pubsub tests (#14743) --- .../pubsub_subscription_multiple_smts.tf.tmpl | 44 ++++++++----------- .../pubsub_topic_multiple_smts.tf.tmpl | 42 +++++++----------- 2 files changed, 35 insertions(+), 51 deletions(-) diff --git a/mmv1/templates/terraform/examples/pubsub_subscription_multiple_smts.tf.tmpl b/mmv1/templates/terraform/examples/pubsub_subscription_multiple_smts.tf.tmpl index b89901657f12..3528bd015fc7 100644 --- a/mmv1/templates/terraform/examples/pubsub_subscription_multiple_smts.tf.tmpl +++ b/mmv1/templates/terraform/examples/pubsub_subscription_multiple_smts.tf.tmpl @@ -2,9 +2,12 @@ resource "google_pubsub_topic" "{{$.PrimaryResourceId}}" { name = "{{index $.Vars "topic_name"}}" } -locals { - smts = [ - { +resource "google_pubsub_subscription" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "subscription_name"}}" + topic = google_pubsub_topic.{{$.PrimaryResourceId}}.id + + message_transforms { + javascript_udf { function_name = "redactSSN" code = < Date: Tue, 5 Aug 2025 13:54:22 -0700 Subject: [PATCH 690/884] fix: access_contetx_manager_access_policy scopes field should be immutable (#14742) Signed-off-by: James Alseth --- mmv1/products/accesscontextmanager/AccessPolicy.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/mmv1/products/accesscontextmanager/AccessPolicy.yaml b/mmv1/products/accesscontextmanager/AccessPolicy.yaml index 96e5f22c6a4e..5544e8ccabd9 100644 --- a/mmv1/products/accesscontextmanager/AccessPolicy.yaml +++ b/mmv1/products/accesscontextmanager/AccessPolicy.yaml @@ -90,6 +90,7 @@ parameters: description: | Folder or project on which this policy is applicable. Format: 'folders/{{folder_id}}' or 'projects/{{project_number}}' + immutable: true item_type: type: String max_size: 1 From 04202619f315b842411dfba9c9a2626c5b187050 Mon Sep 17 00:00:00 2001 From: Yuuki Takahashi <20282867+yktakaha4@users.noreply.github.com> Date: Wed, 6 Aug 2025 06:13:30 +0900 Subject: [PATCH 691/884] add cloudfront domain to storage transfer job (#14409) --- .../resource_storage_transfer_job.go.tmpl | 15 ++++++++++++++- .../resource_storage_transfer_job_meta.yaml.tmpl | 1 + .../docs/r/storage_transfer_job.html.markdown | 2 ++ 3 files changed, 17 insertions(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go.tmpl b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go.tmpl index f283d995173d..0d5c7ae687f1 100644 --- a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go.tmpl +++ b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go.tmpl @@ -750,6 +750,11 @@ func awsS3DataSchema() *schema.Resource { Optional: true, Description: `Egress bytes over a Google-managed private network. This network is shared between other users of Storage Transfer Service.`, }, + "cloudfront_domain": { + Type: schema.TypeString, + Optional: true, + Description: `The CloudFront distribution domain name pointing to this bucket, to use when fetching. See [Transfer from S3 via CloudFront](https://cloud.google.com/storage-transfer/docs/s3-cloudfront) for more information. Format: https://{id}.cloudfront.net or any valid custom domain. Must begin with https://.`, + }, }, } } @@ -1331,6 +1336,10 @@ func expandAwsS3Data(awsS3Datas []interface{}) *storagetransfer.AwsS3Data { result.ManagedPrivateNetwork = v.(bool) } + if v, ok := awsS3Data["cloudfront_domain"]; ok { + result.CloudfrontDomain = v.(string) + } + return result } @@ -1347,7 +1356,11 @@ func flattenAwsS3Data(awsS3Data *storagetransfer.AwsS3Data, d *schema.ResourceDa if awsS3Data.ManagedPrivateNetwork { data["managed_private_network"] = awsS3Data.ManagedPrivateNetwork } - + + if awsS3Data.CloudfrontDomain != "" { + data["cloudfront_domain"] = awsS3Data.CloudfrontDomain + } + return []map[string]interface{}{data} } diff --git a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_meta.yaml.tmpl b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_meta.yaml.tmpl index 900666301705..8b8233b517a9 100644 --- a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_meta.yaml.tmpl +++ b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_meta.yaml.tmpl @@ -45,6 +45,7 @@ fields: - field: 'transfer_spec.aws_s3_data_source.aws_access_key.access_key_id' - field: 'transfer_spec.aws_s3_data_source.aws_access_key.secret_access_key' - field: 'transfer_spec.aws_s3_data_source.bucket_name' + - field: 'transfer_spec.aws_s3_data_source.cloudfront_domain' - field: 'transfer_spec.aws_s3_data_source.managed_private_network' - field: 'transfer_spec.aws_s3_data_source.path' - field: 'transfer_spec.aws_s3_data_source.role_arn' diff --git a/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown b/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown index 3b51006a47ee..ea65dc1eeac5 100644 --- a/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown @@ -262,6 +262,8 @@ A duration in seconds with up to nine fractional digits, terminated by 's'. Exam * `managed_private_network` - (Optional) Egress bytes over a Google-managed private network. This network is shared between other users of Storage Transfer Service. +* `cloudfront_domain` - (Optional) The CloudFront distribution domain name pointing to this bucket, to use when fetching. See [Transfer from S3 via CloudFront](https://cloud.google.com/storage-transfer/docs/s3-cloudfront) for more information. Format: `https://{id}.cloudfront.net` or any valid custom domain. Must begin with `https://`. + The `aws_access_key` block supports: * `access_key_id` - (Required) AWS Key ID. From b640ff5422203db3e98596611f0fde3c2d976cce Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Tue, 5 Aug 2025 16:32:23 -0700 Subject: [PATCH 692/884] Fix composer tests (#14749) --- .../composer/UserWorkloadsConfigMap.yaml | 1 + ...er_user_workloads_config_map_basic.tf.tmpl | 17 ++++++++++++ .../resource_composer_environment_test.go | 27 ++++++++++++++----- 3 files changed, 39 insertions(+), 6 deletions(-) diff --git a/mmv1/products/composer/UserWorkloadsConfigMap.yaml b/mmv1/products/composer/UserWorkloadsConfigMap.yaml index 4efc7c9ec7fe..19d0a5564be0 100644 --- a/mmv1/products/composer/UserWorkloadsConfigMap.yaml +++ b/mmv1/products/composer/UserWorkloadsConfigMap.yaml @@ -32,6 +32,7 @@ examples: - name: 'composer_user_workloads_config_map_basic' primary_resource_id: 'config_map' vars: + service_account_name: 'test-sa' environment_name: 'test-environment' config_map_name: 'test-config-map' parameters: diff --git a/mmv1/templates/terraform/examples/composer_user_workloads_config_map_basic.tf.tmpl b/mmv1/templates/terraform/examples/composer_user_workloads_config_map_basic.tf.tmpl index 5b53968aac54..4084cc474fd6 100644 --- a/mmv1/templates/terraform/examples/composer_user_workloads_config_map_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/composer_user_workloads_config_map_basic.tf.tmpl @@ -1,3 +1,16 @@ +data "google_project" "project" {} + +resource "google_service_account" "test" { + account_id = "{{index $.Vars "service_account_name"}}" + display_name = "Test Service Account for Composer Environment" +} + +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} + resource "google_composer_environment" "environment" { name = "{{index $.Vars "environment_name"}}" region = "us-central1" @@ -5,7 +18,11 @@ resource "google_composer_environment" "environment" { software_config { image_version = "composer-3-airflow-2" } + node_config { + service_account = google_service_account.test.name + } } + depends_on = [google_project_iam_member.composer-worker] } resource "google_composer_user_workloads_config_map" "{{$.PrimaryResourceId}}" { diff --git a/mmv1/third_party/terraform/services/composer/resource_composer_environment_test.go b/mmv1/third_party/terraform/services/composer/resource_composer_environment_test.go index d392131b742d..932b63a01db4 100644 --- a/mmv1/third_party/terraform/services/composer/resource_composer_environment_test.go +++ b/mmv1/third_party/terraform/services/composer/resource_composer_environment_test.go @@ -662,13 +662,14 @@ func TestAccComposerEnvironment_customBucket(t *testing.T) { envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" + serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_customBucket(bucketName, envName, network, subnetwork), + Config: testAccComposerEnvironment_customBucket(bucketName, envName, network, subnetwork, serviceAccount), }, { ResourceName: "google_composer_environment.test", @@ -681,7 +682,7 @@ func TestAccComposerEnvironment_customBucket(t *testing.T) { { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_customBucket(bucketName, envName, network, subnetwork), + Config: testAccComposerEnvironment_customBucket(bucketName, envName, network, subnetwork, serviceAccount), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, @@ -1123,8 +1124,21 @@ func TestAccComposerEnvironmentComposer3_usesUnsupportedField_expectError(t *tes }) } -func testAccComposerEnvironment_customBucket(bucketName, envName, network, subnetwork string) string { +func testAccComposerEnvironment_customBucket(bucketName, envName, network, subnetwork, serviceAccount string) string { return fmt.Sprintf(` +data "google_project" "project" {} + +resource "google_service_account" "test" { + account_id = "%s" + display_name = "Test Service Account for Composer Environment" +} + +resource "google_project_iam_member" "composer-worker" { + project = data.google_project.project.project_id + role = "roles/composer.worker" + member = "serviceAccount:${google_service_account.test.email}" +} + resource "google_storage_bucket" "test" { name = "%s" location = "us-central1" @@ -1138,6 +1152,7 @@ resource "google_composer_environment" "test" { node_config { network = google_compute_network.test.self_link subnetwork = google_compute_subnetwork.test.self_link + service_account = google_service_account.test.name ip_allocation_policy { cluster_ipv4_cidr_block = "10.0.0.0/16" } @@ -1149,6 +1164,7 @@ resource "google_composer_environment" "test" { storage_config { bucket = google_storage_bucket.test.name } + depends_on = [google_project_iam_member.composer-worker] } // use a separate network to avoid conflicts with other tests running in parallel @@ -1164,7 +1180,7 @@ resource "google_compute_subnetwork" "test" { region = "us-central1" network = google_compute_network.test.self_link } -`, bucketName, envName, network, subnetwork) +`, serviceAccount, bucketName, envName, network, subnetwork) } func testAccComposerEnvironment_customBucketWithUrl(bucketName, envName, network, subnetwork, serviceAccount string) string { @@ -1300,7 +1316,6 @@ resource "google_kms_crypto_key_iam_member" "iam" { member = "serviceAccount:service-${data.google_project.project.number}@gs-project-accounts.iam.gserviceaccount.com" } resource "google_composer_environment" "test" { - depends_on = [google_kms_crypto_key_iam_member.iam] name = "%s" region = "us-central1" config { @@ -1318,7 +1333,7 @@ resource "google_composer_environment" "test" { kms_key_name = "%s" } } - depends_on = [google_project_iam_member.composer-worker] + depends_on = [google_project_iam_member.composer-worker, google_kms_crypto_key_iam_member.iam] } // use a separate network to avoid conflicts with other tests running in parallel // that use the default network/subnet From 3fca0031989c22ba5ed9fff14fe86feb15845cb4 Mon Sep 17 00:00:00 2001 From: Darshan Mehta <8850770+darshanmehta17@users.noreply.github.com> Date: Tue, 5 Aug 2025 16:43:00 -0700 Subject: [PATCH 693/884] feat: (vertexai) add rag_engine_config resource support (#14709) --- mmv1/products/vertexai/RagEngineConfig.yaml | 110 ++++++++++++++++++ .../vertex_ai_rag_engine_config.go.tmpl | 44 +++++++ .../vertex_ai_rag_engine_config_basic.tf.tmpl | 7 ++ ...vertex_ai_rag_engine_config_scaled.tf.tmpl | 7 ++ ...ai_rag_engine_config_unprovisioned.tf.tmpl | 7 ++ ...source_vertex_ai_rag_engine_config_test.go | 51 ++++++++ 6 files changed, 226 insertions(+) create mode 100644 mmv1/products/vertexai/RagEngineConfig.yaml create mode 100644 mmv1/templates/terraform/custom_delete/vertex_ai_rag_engine_config.go.tmpl create mode 100644 mmv1/templates/terraform/examples/vertex_ai_rag_engine_config_basic.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/vertex_ai_rag_engine_config_scaled.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/vertex_ai_rag_engine_config_unprovisioned.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/vertexai/resource_vertex_ai_rag_engine_config_test.go diff --git a/mmv1/products/vertexai/RagEngineConfig.yaml b/mmv1/products/vertexai/RagEngineConfig.yaml new file mode 100644 index 000000000000..a6be4dbee80d --- /dev/null +++ b/mmv1/products/vertexai/RagEngineConfig.yaml @@ -0,0 +1,110 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'RagEngineConfig' +description: | + Vertex AI RAG Engine lets you scale your RagManagedDb instance based on your usage and performance requirements using a choice of two tiers, and optionally, lets you delete your Vertex AI RAG Engine data using a third tier. The tier is a project-level setting that's available in the RagEngineConfig resource that impacts all RAG corpora using RagManagedDb. The following tiers are available in RagEngineConfig: Basic, Scaled and Unprovisioned. +references: + guides: + 'Official Documentation': 'https://cloud.google.com/vertex-ai/generative-ai/docs/rag-engine/understanding-ragmanageddb' + api: 'https://cloud.google.com/vertex-ai/generative-ai/docs/reference/rest/v1/RagEngineConfig' +docs: +id_format: 'projects/{{project}}/locations/{{region}}/ragEngineConfig' +base_url: 'projects/{{project}}/locations/{{region}}/ragEngineConfig' +self_link: 'projects/{{project}}/locations/{{region}}/ragEngineConfig' +create_url: 'projects/{{project}}/locations/{{region}}/ragEngineConfig' +# This is a singleton resource that is already created, so create +# is really an update, and therefore should be PATCHed. +create_verb: 'PATCH' +update_url: 'projects/{{project}}/locations/{{region}}/ragEngineConfig' +update_verb: 'PATCH' +delete_url: 'projects/{{project}}/locations/{{region}}/ragEngineConfig' +delete_verb: 'PATCH' +import_format: + - 'projects/{{project}}/locations/{{region}}/ragEngineConfig' +update_mask: false +timeouts: + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 +autogen_async: true +async: + actions: ['create', 'update'] + type: 'OpAsync' + operation: + base_url: '{{op_id}}' + result: + resource_inside_response: true +custom_code: + custom_delete: "templates/terraform/custom_delete/vertex_ai_rag_engine_config.go.tmpl" +examples: + - name: 'vertex_ai_rag_engine_config_basic' + exclude_test: true + - name: 'vertex_ai_rag_engine_config_scaled' + exclude_test: true + - name: 'vertex_ai_rag_engine_config_unprovisioned' + exclude_test: true +parameters: + - name: 'region' + type: String + description: The region of the RagEngineConfig. eg us-central1 + url_param_only: true + immutable: true + default_from_api: true +properties: + - name: 'ragManagedDbConfig' + type: NestedObject + description: | + Required. The config of the RagManagedDb used by RagEngine. + required: true + properties: + - name: 'scaled' + type: NestedObject + exactly_one_of: + - 'rag_managed_db_config.0.scaled' + - 'rag_managed_db_config.0.basic' + - 'rag_managed_db_config.0.unprovisioned' + description: | + Scaled tier offers production grade performance along with autoscaling functionality. It is suitable for customers with large amounts of data or performance sensitive workloads. + allow_empty_object: true + send_empty_value: true + properties: [] + - name: 'basic' + type: NestedObject + exactly_one_of: + - 'rag_managed_db_config.0.scaled' + - 'rag_managed_db_config.0.basic' + - 'rag_managed_db_config.0.unprovisioned' + description: | + Basic tier is a cost-effective and low compute tier suitable for the following cases: Experimenting with RagManagedDb, Small data size, Latency insensitive workload, Only using RAG Engine with external vector DBs. + NOTE: This is the default tier if not explicitly chosen. + allow_empty_object: true + send_empty_value: true + properties: [] + - name: 'unprovisioned' + type: NestedObject + exactly_one_of: + - 'rag_managed_db_config.0.scaled' + - 'rag_managed_db_config.0.basic' + - 'rag_managed_db_config.0.unprovisioned' + description: | + Disables the RAG Engine service and deletes all your data held within this service. This will halt the billing of the service. + NOTE: Once deleted the data cannot be recovered. To start using RAG Engine again, you will need to update the tier by calling the UpdateRagEngineConfig API. + allow_empty_object: true + send_empty_value: true + properties: [] + - name: 'name' + type: String + description: The resource name of the Dataset. This value is set by Google. + output: true diff --git a/mmv1/templates/terraform/custom_delete/vertex_ai_rag_engine_config.go.tmpl b/mmv1/templates/terraform/custom_delete/vertex_ai_rag_engine_config.go.tmpl new file mode 100644 index 000000000000..a26444c3cd78 --- /dev/null +++ b/mmv1/templates/terraform/custom_delete/vertex_ai_rag_engine_config.go.tmpl @@ -0,0 +1,44 @@ +log.Printf("[DEBUG] Beginning custom_delete for Vertex AI RagEngineConfig") + +project, err := tpgresource.GetProject(d, config) +if err != nil { + return fmt.Errorf("Error fetching project for RagEngineConfig: %s", err) +} + + +// Update RagEngineConfig tier to Unprovisioned +deleteUrl, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}VertexAIBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/locations/{{"{{"}}region{{"}}"}}/ragEngineConfig") +if err != nil { + return err +} +deleteHeaders := make(http.Header) +deleteBody := map[string]interface{}{ + "ragManagedDbConfig": map[string]interface{}{ + "unprovisioned": map[string]interface{}{}, + }, +} +log.Printf("[DEBUG] Updating RagEngineConfig tier to Unprovisioned") +deleteRes, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "PATCH", + Project: project, + RawURL: deleteUrl, + UserAgent: userAgent, + Body: deleteBody, + Timeout: d.Timeout(schema.TimeoutDelete), + Headers: deleteHeaders, +}) +if err != nil { + return transport_tpg.HandleNotFoundError(err, d, "RagEngineConfig") +} + +err = VertexAIOperationWaitTime( + config, deleteRes, project, "Updating RagEngineConfig tier to Unprovisioned", userAgent, + d.Timeout(schema.TimeoutDelete)) + +if err != nil { + return err +} + +log.Printf("[DEBUG] Finished Updating RagEngineConfig tier to Unprovisioned: %#v", deleteRes) +return nil \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/vertex_ai_rag_engine_config_basic.tf.tmpl b/mmv1/templates/terraform/examples/vertex_ai_rag_engine_config_basic.tf.tmpl new file mode 100644 index 000000000000..f83fbaa91fb6 --- /dev/null +++ b/mmv1/templates/terraform/examples/vertex_ai_rag_engine_config_basic.tf.tmpl @@ -0,0 +1,7 @@ +resource "google_vertex_ai_rag_engine_config" "{{$.PrimaryResourceId}}" { + region = "us-central1" + rag_managed_db_config { + basic { + } + } +} diff --git a/mmv1/templates/terraform/examples/vertex_ai_rag_engine_config_scaled.tf.tmpl b/mmv1/templates/terraform/examples/vertex_ai_rag_engine_config_scaled.tf.tmpl new file mode 100644 index 000000000000..3d0abc1db0e6 --- /dev/null +++ b/mmv1/templates/terraform/examples/vertex_ai_rag_engine_config_scaled.tf.tmpl @@ -0,0 +1,7 @@ +resource "google_vertex_ai_rag_engine_config" "{{$.PrimaryResourceId}}" { + region = "us-central1" + rag_managed_db_config { + scaled { + } + } +} diff --git a/mmv1/templates/terraform/examples/vertex_ai_rag_engine_config_unprovisioned.tf.tmpl b/mmv1/templates/terraform/examples/vertex_ai_rag_engine_config_unprovisioned.tf.tmpl new file mode 100644 index 000000000000..90fa5109e42e --- /dev/null +++ b/mmv1/templates/terraform/examples/vertex_ai_rag_engine_config_unprovisioned.tf.tmpl @@ -0,0 +1,7 @@ +resource "google_vertex_ai_rag_engine_config" "{{$.PrimaryResourceId}}" { + region = "us-central1" + rag_managed_db_config { + unprovisioned { + } + } +} diff --git a/mmv1/third_party/terraform/services/vertexai/resource_vertex_ai_rag_engine_config_test.go b/mmv1/third_party/terraform/services/vertexai/resource_vertex_ai_rag_engine_config_test.go new file mode 100644 index 000000000000..20f5f798ddc9 --- /dev/null +++ b/mmv1/third_party/terraform/services/vertexai/resource_vertex_ai_rag_engine_config_test.go @@ -0,0 +1,51 @@ +package vertexai_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccVertexAIRagEngineConfig_basic(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project": envvar.GetTestProjectFromEnv(), + } + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccVertexAIRagEngineConfig_basic(context), + }, + { + Config: testAccVertexAIRagEngineConfig_unprovisioned(context), + }, + }, + }) +} + +func testAccVertexAIRagEngineConfig_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_vertex_ai_rag_engine_config" "test" { + region = "us-central1" + rag_managed_db_config { + basic {} + } +} +`, context) +} + +func testAccVertexAIRagEngineConfig_unprovisioned(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_vertex_ai_rag_engine_config" "test" { + region = "us-central1" + rag_managed_db_config { + unprovisioned {} + } +} +`, context) +} From 80305a922bb880ed74453edd86666053f79f0c7e Mon Sep 17 00:00:00 2001 From: Lakshman Swaminathan Date: Tue, 5 Aug 2025 20:09:28 -0400 Subject: [PATCH 694/884] moving teamcitytestscripts again so they are provider generated (#14748) --- .../{ => terraform/scripts}/teamcitytestscripts/main.go | 0 .../{ => terraform/scripts}/teamcitytestscripts/teamcity.go | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename mmv1/third_party/{ => terraform/scripts}/teamcitytestscripts/main.go (100%) rename mmv1/third_party/{ => terraform/scripts}/teamcitytestscripts/teamcity.go (100%) diff --git a/mmv1/third_party/teamcitytestscripts/main.go b/mmv1/third_party/terraform/scripts/teamcitytestscripts/main.go similarity index 100% rename from mmv1/third_party/teamcitytestscripts/main.go rename to mmv1/third_party/terraform/scripts/teamcitytestscripts/main.go diff --git a/mmv1/third_party/teamcitytestscripts/teamcity.go b/mmv1/third_party/terraform/scripts/teamcitytestscripts/teamcity.go similarity index 100% rename from mmv1/third_party/teamcitytestscripts/teamcity.go rename to mmv1/third_party/terraform/scripts/teamcitytestscripts/teamcity.go From b952e715f5b712e7946de72c75d54b8e903554d8 Mon Sep 17 00:00:00 2001 From: Luca Prete Date: Wed, 6 Aug 2025 22:10:14 +0200 Subject: [PATCH 695/884] google_compute_service_attachment: allow use of global target forwarding rules (#14686) Co-authored-by: Luca Prete --- mmv1/products/compute/ServiceAttachment.yaml | 4 + .../service_attachment_target_service.go.tmpl | 8 -- ...ervice_attachment_cross_region_ilb.tf.tmpl | 77 +++++++++++++++++++ 3 files changed, 81 insertions(+), 8 deletions(-) create mode 100644 mmv1/templates/terraform/examples/service_attachment_cross_region_ilb.tf.tmpl diff --git a/mmv1/products/compute/ServiceAttachment.yaml b/mmv1/products/compute/ServiceAttachment.yaml index 9ebe513041ed..f89c9250f4ee 100644 --- a/mmv1/products/compute/ServiceAttachment.yaml +++ b/mmv1/products/compute/ServiceAttachment.yaml @@ -95,6 +95,10 @@ examples: producer_forwarding_rule_name: 'producer-forwarding-rule' consumer_address_name: 'psc-ilb-consumer-address' consumer_forwarding_rule_name: 'psc-ilb-consumer-forwarding-rule' + - name: 'service_attachment_cross_region_ilb' + primary_resource_id: 'psc_ilb_service_attachment' + vars: + name: 'sa' parameters: - name: 'region' type: ResourceRef diff --git a/mmv1/templates/terraform/custom_expand/service_attachment_target_service.go.tmpl b/mmv1/templates/terraform/custom_expand/service_attachment_target_service.go.tmpl index 2d22dfdd107a..5679a4e6e2fb 100644 --- a/mmv1/templates/terraform/custom_expand/service_attachment_target_service.go.tmpl +++ b/mmv1/templates/terraform/custom_expand/service_attachment_target_service.go.tmpl @@ -4,13 +4,5 @@ func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.T return nil, fmt.Errorf("invalid value for target_service") } - resourceKind := resource[len(resource)-2] - resourceBound := resource[len(resource)-4] - - _, err := tpgresource.ParseRegionalFieldValue(resourceKind, v.(string), "project", resourceBound, "zone", d, config, true) - if err != nil { - return nil, fmt.Errorf("invalid value for target_service: %w", err) - } - return v, nil } diff --git a/mmv1/templates/terraform/examples/service_attachment_cross_region_ilb.tf.tmpl b/mmv1/templates/terraform/examples/service_attachment_cross_region_ilb.tf.tmpl new file mode 100644 index 000000000000..4df295d5738e --- /dev/null +++ b/mmv1/templates/terraform/examples/service_attachment_cross_region_ilb.tf.tmpl @@ -0,0 +1,77 @@ +resource "google_compute_service_attachment" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "name"}}" + region = "us-central1" + description = "A service attachment configured with Terraform" + connection_preference = "ACCEPT_AUTOMATIC" + enable_proxy_protocol = false + nat_subnets = [google_compute_subnetwork.subnetwork_psc.id] + target_service = google_compute_global_forwarding_rule.forwarding_rule.id +} + +resource "google_compute_global_forwarding_rule" "forwarding_rule" { + name = "{{index $.Vars "name"}}" + target = google_compute_target_http_proxy.http_proxy.id + network = google_compute_network.network.id + subnetwork = google_compute_subnetwork.subnetwork.id + port_range = "80" + load_balancing_scheme = "INTERNAL_MANAGED" + + depends_on = [google_compute_subnetwork.subnetwork_proxy] +} + +resource "google_compute_target_http_proxy" "http_proxy" { + name = "{{index $.Vars "name"}}" + description = "a description" + url_map = google_compute_url_map.url_map.id +} + +resource "google_compute_url_map" "url_map" { + name = "{{index $.Vars "name"}}" + description = "Url map." + default_service = google_compute_backend_service.backend_service.id +} + +resource "google_compute_backend_service" "backend_service" { + name = "{{index $.Vars "name"}}" + load_balancing_scheme = "INTERNAL_MANAGED" + health_checks = [google_compute_health_check.health_check.id] +} + +resource "google_compute_health_check" "health_check" { + name = "{{index $.Vars "name"}}" + check_interval_sec = 1 + timeout_sec = 1 + + tcp_health_check { + port = "80" + } +} + +resource "google_compute_subnetwork" "subnetwork_psc" { + name = "{{index $.Vars "name"}}-psc" + region = "us-central1" + network = google_compute_network.network.id + purpose = "PRIVATE_SERVICE_CONNECT" + ip_cidr_range = "10.1.0.0/16" +} + +resource "google_compute_subnetwork" "subnetwork_proxy" { + name = "{{index $.Vars "name"}}-proxy" + region = "us-central1" + network = google_compute_network.network.id + purpose = "GLOBAL_MANAGED_PROXY" + role = "ACTIVE" + ip_cidr_range = "10.2.0.0/16" +} + +resource "google_compute_subnetwork" "subnetwork" { + name = "{{index $.Vars "name"}}" + region = "us-central1" + network = google_compute_network.network.id + ip_cidr_range = "10.0.0.0/16" +} + +resource "google_compute_network" "network" { + name = "{{index $.Vars "name"}}" + auto_create_subnetworks = false +} From 24be22a2a5c44b111e1ebb4dcaa66e65cbb964ff Mon Sep 17 00:00:00 2001 From: kgala2 Date: Wed, 6 Aug 2025 21:37:58 +0000 Subject: [PATCH 696/884] feat: support PSC outbound's network attachment field (#14766) --- .../resource_sql_database_instance.go.tmpl | 7 + ...esource_sql_database_instance_test.go.tmpl | 175 ++++++++++++++++++ .../r/sql_database_instance.html.markdown | 27 +++ 3 files changed, 209 insertions(+) diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl index 4dbd333cb162..7dbeff3f8c78 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl @@ -532,6 +532,11 @@ is set to true. Defaults to ZONAL.`, Set: schema.HashString, Description: `List of consumer projects that are allow-listed for PSC connections to this instance. This instance can be connected to with PSC from any network in these projects. Each consumer project in this list may be represented by a project number (numeric) or by a project id (alphanumeric).`, }, + "network_attachment_uri": { + Type: schema.TypeString, + Optional: true, + Description: `Name of network attachment resource used to authorize a producer service to connect a PSC interface to the consumer's VPC. For example: "projects/myProject/regions/myRegion/networkAttachments/myNetworkAttachment". This is required to enable outbound connection on a PSC instance.`, + }, "psc_auto_connections": { Type: schema.TypeList, Optional: true, @@ -1601,6 +1606,7 @@ func expandPscConfig(configured []interface{}) *sqladmin.PscConfig { return &sqladmin.PscConfig{ PscEnabled: _entry["psc_enabled"].(bool), AllowedConsumerProjects: tpgresource.ConvertStringArr(_entry["allowed_consumer_projects"].(*schema.Set).List()), + NetworkAttachmentUri: _entry["network_attachment_uri"].(string), PscAutoConnections: expandPscAutoConnectionConfig(_entry["psc_auto_connections"].([]interface{})), } } @@ -2650,6 +2656,7 @@ func flattenPscConfigs(pscConfig *sqladmin.PscConfig) interface{} { data := map[string]interface{}{ "psc_enabled": pscConfig.PscEnabled, "allowed_consumer_projects": schema.NewSet(schema.HashString, tpgresource.ConvertStringArrToInterface(pscConfig.AllowedConsumerProjects)), + "network_attachment_uri": pscConfig.NetworkAttachmentUri, "psc_auto_connections": flattenPscAutoConnections(pscConfig.PscAutoConnections), } diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl index 43da7a87a8ad..0e6bec16cf76 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl @@ -1183,6 +1183,86 @@ func TestAccSqlDatabaseInstance_withPSCEnabled_withIpV4Enabled(t *testing.T) { }) } +func TestAccSqlDatabaseInstance_withPscEnabled_withNetworkAttachmentUri_thenRemoveNetworkAttachment(t *testing.T) { + t.Parallel() + + random_suffix := acctest.RandString(t, 10) + instanceName := "tf-test-" + random_suffix + projectId := envvar.GetTestProjectFromEnv() + region := "us-central1" + networkNameStr := "tf-test-cloud-sql-network-" + random_suffix + subnetworkNameStr := "tf-test-cloud-sql-subnetwork-" + random_suffix + networkAttachmentNameStr := "tf-test-cloud-sql-update-na-" + random_suffix + networkName := acctest.BootstrapSharedTestNetwork(t, networkNameStr) + subnetworkName := acctest.BootstrapSubnet(t, subnetworkNameStr, networkName) + networkAttachmentName := acctest.BootstrapNetworkAttachment(t, networkAttachmentNameStr, subnetworkName) + networkAttachmentUri := fmt.Sprintf("projects/%s/regions/%s/networkAttachments/%s", projectId, region, networkAttachmentName) + + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccSqlDatabaseInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccSqlDatabaseInstance_withPSCEnabled_withoutPscOutbound(instanceName), + Check: resource.ComposeTestCheckFunc(verifyPscNetorkAttachmentOperation("google_sql_database_instance.instance", true, true, "")), + }, + { + ResourceName: "google_sql_database_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateIdPrefix: fmt.Sprintf("%s/", projectId), + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + { + Config: testAccSqlDatabaseInstance_withPSCEnabled_withNetworkAttachmentUri(instanceName, networkAttachmentUri), + Check: resource.ComposeTestCheckFunc(verifyPscNetorkAttachmentOperation("google_sql_database_instance.instance", true, true, networkAttachmentUri)), + }, + { + ResourceName: "google_sql_database_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateIdPrefix: fmt.Sprintf("%s/", projectId), + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + { + Config: testAccSqlDatabaseInstance_withPSCEnabled_withoutPscOutbound(instanceName), + Check: resource.ComposeTestCheckFunc(verifyPscNetorkAttachmentOperation("google_sql_database_instance.instance", true, true, "")), + }, + }, + }) +} + +func TestAccSqlDatabaseInstance_withPscEnabled_withNetworkAttachmentUriOnCreate(t *testing.T) { + t.Parallel() + + random_suffix := acctest.RandString(t, 10) + instanceName := "tf-test-" + random_suffix + projectId := envvar.GetTestProjectFromEnv() + region := "us-central1" + networkNameStr := "tf-test-cloud-sql-network-" + random_suffix + subnetworkNameStr := "tf-test-cloud-sql-subnetwork-" + random_suffix + networkAttachmentNameStr := "tf-test-cloud-sql-update-na-" + random_suffix + networkName := acctest.BootstrapSharedTestNetwork(t, networkNameStr) + subnetworkName := acctest.BootstrapSubnet(t, subnetworkNameStr, networkName) + networkAttachmentName := acctest.BootstrapNetworkAttachment(t, networkAttachmentNameStr, subnetworkName) + networkAttachmentUri := fmt.Sprintf("projects/%s/regions/%s/networkAttachments/%s", projectId, region, networkAttachmentName) + + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccSqlDatabaseInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccSqlDatabaseInstance_withPSCEnabled_withNetworkAttachmentUri(instanceName, networkAttachmentUri), + ExpectError: regexp.MustCompile(`.*Network attachment used for Private Service Connect interfaces can not be assigned with instance creation.*`), + }, + }, + }) +} + func TestAccSqlDatabaseInstance_withPrivateNetwork_withAllocatedIpRange(t *testing.T) { t.Parallel() @@ -4949,6 +5029,49 @@ func verifyPscAutoConnectionsOperation(resourceName string, isPscConfigExpected } } +func verifyPscNetorkAttachmentOperation(resourceName string, isPscConfigExpected bool, expectedPscEnabled bool, expectedNetworkAttachmentUri string ) func(*terraform.State) error { + return func(s *terraform.State) error { + resource, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Can't find %s in state", resourceName) + } + + resourceAttributes := resource.Primary.Attributes + _, ok = resourceAttributes["settings.0.ip_configuration.#"] + if !ok { + return fmt.Errorf("settings.0.ip_configuration.# block is not present in state for %s", resourceName) + } + + if isPscConfigExpected { + _, ok := resourceAttributes["settings.0.ip_configuration.0.psc_config.#"] + if !ok { + return fmt.Errorf("settings.0.ip_configuration.0.psc_config property is not present or set in state of %s", resourceName) + } + + pscEnabledStr, ok := resourceAttributes["settings.0.ip_configuration.0.psc_config.0.psc_enabled"] + pscEnabled, err := strconv.ParseBool(pscEnabledStr) + if err != nil || pscEnabled != expectedPscEnabled { + return fmt.Errorf("settings.0.ip_configuration.0.psc_config.0.psc_enabled property value is not set as expected in state of %s, expected %v, actual %v", resourceName, expectedPscEnabled, pscEnabled) + } + + networkAttachmentUriStr, ok := resourceAttributes["settings.0.ip_configuration.0.psc_config.0.network_attachment_uri"] + if !ok { + return fmt.Errorf("settings.0.ip_configuration.0.psc_config.0.network_attachment_uri block is not present in state for %s", resourceName) + } + + if networkAttachmentUriStr != expectedNetworkAttachmentUri && len(networkAttachmentUriStr) == 0 { + return fmt.Errorf("settings.0.ip_configuration.0.psc_config.0.network_attachment_uri block is not set in state for %s", resourceName) + } + + if networkAttachmentUriStr != expectedNetworkAttachmentUri { + return fmt.Errorf("settings.0.ip_configuration.0.psc_config.0.network_attachment_uri block does not match the expected value for %s", resourceName) + } + } + + return nil + } +} + func testAccSqlDatabaseInstance_withoutMCPEnabled(instanceName string) string { return fmt.Sprintf(` resource "google_sql_database_instance" "instance" { @@ -5009,6 +5132,32 @@ resource "google_sql_database_instance" "instance" { `, instanceName) } +func testAccSqlDatabaseInstance_withPSCEnabled_withoutPscOutbound(instanceName string) string { + return fmt.Sprintf(` +resource "google_sql_database_instance" "instance" { + name = "%s" + region = "us-central1" + database_version = "MYSQL_8_0" + deletion_protection = false + settings { + tier = "db-g1-small" + ip_configuration { + psc_config { + psc_enabled = true + network_attachment_uri = "" + } + ipv4_enabled = false + } + backup_configuration { + enabled = true + binary_log_enabled = true + } + availability_type = "REGIONAL" + } +} +`, instanceName) +} + func testAccSqlDatabaseInstance_withPSCEnabled_withPscAutoConnections(instanceName string, projectId string, networkName string) string { return fmt.Sprintf(` data "google_compute_network" "testnetwork" { @@ -5042,6 +5191,32 @@ resource "google_sql_database_instance" "instance" { `, networkName, instanceName, projectId, networkName, projectId) } +func testAccSqlDatabaseInstance_withPSCEnabled_withNetworkAttachmentUri(instanceName string, networkAttachmentUri string) string { + return fmt.Sprintf(` + +resource "google_sql_database_instance" "instance" { + name = "%s" + region = "us-central1" + database_version = "MYSQL_8_0" + deletion_protection = false + settings { + tier = "db-g1-small" + ip_configuration { + psc_config { + psc_enabled = true + network_attachment_uri = "%s" + } + ipv4_enabled = false + } + backup_configuration { + enabled = true + binary_log_enabled = true + } + availability_type = "REGIONAL" + } +}`, instanceName, networkAttachmentUri) +} + func testAccSqlDatabaseInstance_withPrivateNetwork_withoutAllocatedIpRange(databaseName, networkName string, specifyPrivatePathOption bool, enablePrivatePath bool) string { privatePathOption := "" if specifyPrivatePathOption { diff --git a/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown b/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown index af89a6dad2e9..291786bfe74c 100644 --- a/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown @@ -244,6 +244,31 @@ resource "google_sql_database_instance" "main" { } ``` +### Cloud SQL Instance with PSC outbound + +```hcl +resource "google_sql_database_instance" "main" { + name = "psc-enabled-main-instance" + database_version = "MYSQL_8_0" + settings { + tier = "db-f1-micro" + ip_configuration { + psc_config { + psc_enabled = true + allowed_consumer_projects = ["allowed-consumer-project-name"] + network_attachment_uri = "network-attachment-uri" + } + ipv4_enabled = false + } + backup_configuration { + enabled = true + binary_log_enabled = true + } + availability_type = "REGIONAL" + } +} +``` + ## Argument Reference The following arguments are supported: @@ -465,6 +490,8 @@ The optional `settings.ip_configuration.psc_config` sublist supports: * `consumer_network` - "The consumer network of this consumer endpoint. This must be a resource path that includes both the host project and the network name. For example, `projects/project1/global/networks/network1`. The consumer host project of this network might be different from the consumer service project." +* `network_attachment_uri` - (Optional) Network Attachment URI in the format `projects/project1/regions/region1/networkAttachments/networkAttachment1` to enable outbound connectivity on PSC instance. + * `consumer_service_project_id` - (Optional) The project ID of consumer service project of this consumer endpoint. The optional `settings.location_preference` subblock supports: From 96d24cd82ac7937404534872fd7ec28b039b3e06 Mon Sep 17 00:00:00 2001 From: zhihaos Date: Wed, 6 Aug 2025 17:42:53 -0400 Subject: [PATCH 697/884] Adding initial Playbook support for Dialogflow CX (#14446) --- mmv1/products/dialogflowcx/Playbook.yaml | 166 ++++++++++++++++++ .../dialogflowcx_playbook.go.tmpl | 18 ++ .../dialogflowcx_playbook_basic.tf.tmpl | 44 +++++ .../dialogflowcx_playbook_fulfillment.tf.tmpl | 86 +++++++++ 4 files changed, 314 insertions(+) create mode 100644 mmv1/products/dialogflowcx/Playbook.yaml create mode 100644 mmv1/templates/terraform/custom_import/dialogflowcx_playbook.go.tmpl create mode 100644 mmv1/templates/terraform/examples/dialogflowcx_playbook_basic.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/dialogflowcx_playbook_fulfillment.tf.tmpl diff --git a/mmv1/products/dialogflowcx/Playbook.yaml b/mmv1/products/dialogflowcx/Playbook.yaml new file mode 100644 index 000000000000..6912742b2203 --- /dev/null +++ b/mmv1/products/dialogflowcx/Playbook.yaml @@ -0,0 +1,166 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'Playbook' +description: | + Playbook is the basic building block to instruct the LLM how to execute a certain task. +references: + guides: + 'Official CX Documentation': 'https://cloud.google.com/dialogflow/cx/docs' + api: 'https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/projects.locations.agents.playbooks' + +base_url: '{{parent}}/playbooks' +self_link: '{{parent}}/playbooks/{{name}}' + +create_url: '{{parent}}/playbooks' + +update_verb: 'PATCH' +update_mask: true + +custom_code: + pre_create: 'templates/terraform/pre_create/dialogflow_set_location.go.tmpl' + pre_read: 'templates/terraform/pre_create/dialogflow_set_location.go.tmpl' + pre_update: 'templates/terraform/pre_create/dialogflow_set_location.go.tmpl' + pre_delete: 'templates/terraform/pre_create/dialogflow_set_location.go.tmpl' + custom_import: 'templates/terraform/custom_import/dialogflowcx_playbook.go.tmpl' + +examples: + - name: "dialogflowcx_playbook_basic" + primary_resource_id: "my-playbook" + vars: + agent_name: 'dialogflowcx-agent-basic' + - name: "dialogflowcx_playbook_fulfillment" + primary_resource_id: "my-playbook" + vars: + agent_name: 'dialogflowcx-agent' + bucket_name: 'dialogflowcx-bucket' + +parameters: + - name: 'parent' + type: String + description: | + The agent to create a Playbook for. + Format: projects//locations//agents/. + url_param_only: true + immutable: true + +properties: + - name: 'name' + type: String + description: | + The unique identifier of the Playbook. + Format: projects//locations//agents//playbooks/. + output: true + custom_flatten: 'templates/terraform/custom_flatten/name_from_self_link.tmpl' + - name: 'displayName' + type: String + description: | + The human-readable name of the playbook, unique within an agent. + required: true + - name: 'goal' + type: String + description: | + High level description of the goal the playbook intend to accomplish. A goal should be concise since it's visible to other playbooks that may reference this playbook. + required: true + - name: 'instruction' + type: NestedObject + description: | + Instruction to accomplish target goal. + properties: + - name: 'guidelines' + type: String + description: | + General guidelines for the playbook. These are unstructured instructions that are not directly part of the goal, e.g. "Always be polite". It's valid for this text to be long and used instead of steps altogether. + - name: 'steps' + type: Array + description: | + Ordered list of step by step execution instructions to accomplish target goal. + item_type: + type: NestedObject + properties: + - name: 'steps' + type: String + description: | + Sub-processing needed to execute the current step. + + This field uses JSON data as a string. The value provided must be a valid JSON representation documented in [Step](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/projects.locations.agents.playbooks#step). + state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' + custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' + custom_expand: 'templates/terraform/custom_expand/json_value.tmpl' + validation: + function: 'validation.StringIsJSON' + - name: 'text' + type: String + description: | + Step instruction in text format. + - name: 'tokenCount' + type: String + description: | + Estimated number of tokes current playbook takes when sent to the LLM. + output: true + - name: 'createTime' + type: Time + description: | + The timestamp of initial playbook creation. + + Uses RFC 3339, where generated output will always be Z-normalized and uses 0, 3, 6 or 9 fractional digits. Offsets other than "Z" are also accepted. Examples: "2014-10-02T15:01:23Z", "2014-10-02T15:01:23.045123456Z" or "2014-10-02T15:01:23+05:30". + output: true + - name: 'updateTime' + type: Time + description: | + Last time the playbook version was updated. + + Uses RFC 3339, where generated output will always be Z-normalized and uses 0, 3, 6 or 9 fractional digits. Offsets other than "Z" are also accepted. Examples: "2014-10-02T15:01:23Z", "2014-10-02T15:01:23.045123456Z" or "2014-10-02T15:01:23+05:30". + output: true + - name: 'referencedPlaybooks' + type: Array + description: | + The resource name of other playbooks referenced by the current playbook in the instructions. + output: true + item_type: + type: String + - name: 'referencedFlows' + type: Array + description: | + The resource name of flows referenced by the current playbook in the instructions. + output: true + item_type: + type: String + - name: 'referencedTools' + type: Array + description: | + The resource name of tools referenced by the current playbook in the instructions. If not provided explicitly, they are will be implied using the tool being referenced in goal and steps. + item_type: + type: String + - name: 'llmModelSettings' + type: NestedObject + description: | + Llm model settings for the playbook. + properties: + - name: 'model' + type: String + description: | + The selected LLM model. + - name: 'promptText' + type: String + description: | + The custom prompt to use. + - name: 'playbookType' + type: Enum + description: Type of the playbook. + ignore_read: true + enum_values: + - 'PLAYBOOK_TYPE_UNSPECIFIED' + - 'TASK' + - 'ROUTINE' diff --git a/mmv1/templates/terraform/custom_import/dialogflowcx_playbook.go.tmpl b/mmv1/templates/terraform/custom_import/dialogflowcx_playbook.go.tmpl new file mode 100644 index 000000000000..522f50f48ebf --- /dev/null +++ b/mmv1/templates/terraform/custom_import/dialogflowcx_playbook.go.tmpl @@ -0,0 +1,18 @@ +config := meta.(*transport_tpg.Config) + +// current import_formats can't import fields with forward slashes in their value and parent contains slashes +if err := tpgresource.ParseImportId([]string{ + "(?P.+)/playbooks/(?P[^/]+)", + "(?P.+)/(?P[^/]+)", +}, d, config); err != nil { + return nil, err +} + +// Replace import id for the resource id +id, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}parent{{"}}"}}/playbooks/{{"{{"}}name{{"}}"}}") +if err != nil { + return nil, fmt.Errorf("Error constructing id: %s", err) +} +d.SetId(id) + +return []*schema.ResourceData{d}, nil diff --git a/mmv1/templates/terraform/examples/dialogflowcx_playbook_basic.tf.tmpl b/mmv1/templates/terraform/examples/dialogflowcx_playbook_basic.tf.tmpl new file mode 100644 index 000000000000..59192db097e3 --- /dev/null +++ b/mmv1/templates/terraform/examples/dialogflowcx_playbook_basic.tf.tmpl @@ -0,0 +1,44 @@ +resource "google_dialogflow_cx_agent" "agent" { + display_name = "{{index $.Vars "agent_name"}}" + location = "global" + default_language_code = "en" + time_zone = "America/New_York" + description = "Example description." +} + +resource "google_dialogflow_cx_playbook" "{{$.PrimaryResourceId}}" { + parent = google_dialogflow_cx_agent.agent.id + display_name = "Example Display Name" + goal = "Example Goal" + playbook_type = "ROUTINE" + instruction { + steps { + text = "step 1" + steps = jsonencode([ + { + "text": "step 1 1" + }, + { + "text": "step 1 2", + "steps": [ + { + "text": "step 1 2 1" + }, + { + "text": "step 1 2 2" + } + ] + }, + { + "text": "step 1 3" + } + ]) + } + steps { + text = "step 2" + } + steps { + text = "step 3" + } + } +} diff --git a/mmv1/templates/terraform/examples/dialogflowcx_playbook_fulfillment.tf.tmpl b/mmv1/templates/terraform/examples/dialogflowcx_playbook_fulfillment.tf.tmpl new file mode 100644 index 000000000000..3227046d5be8 --- /dev/null +++ b/mmv1/templates/terraform/examples/dialogflowcx_playbook_fulfillment.tf.tmpl @@ -0,0 +1,86 @@ +resource "google_dialogflow_cx_agent" "agent" { + display_name = "{{index $.Vars "agent_name"}}" + location = "global" + default_language_code = "en" + time_zone = "America/New_York" + description = "Example description." +} + +resource "google_storage_bucket" "bucket" { + name = "{{index $.Vars "bucket_name"}}" + location = "US" + uniform_bucket_level_access = true +} + +resource "google_dialogflow_cx_webhook" "my_webhook" { + parent = google_dialogflow_cx_agent.agent.id + display_name = "MyWebhook" + generic_web_service { + uri = "https://example.com" + } +} + +resource "google_dialogflow_cx_tool" "my_tool" { + parent = google_dialogflow_cx_agent.agent.id + display_name = "Example Tool" + description = "Example Description" +} + +resource "google_dialogflow_cx_generator" "my_generator" { + parent = google_dialogflow_cx_agent.agent.id + display_name = "TF Prompt generator" + llm_model_settings { + model = "gemini-2.0-flash-001" + prompt_text = "Return me some great results" + } + prompt_text { + text = "Send me great results in french" + } + model_parameter { + temperature = 0.55 + } +} + +resource "google_dialogflow_cx_playbook" "{{$.PrimaryResourceId}}" { + parent = google_dialogflow_cx_agent.agent.id + display_name = "Playbook Example with Fulfillment" + goal = "Example Goal" + instruction { + guidelines = "Example Guidelines" + steps { + text = "step 1" + steps = jsonencode([ + { + "text": "step 1 1" + }, + { + "text": "step 1 2", + "steps": [ + { + "text": "step 1 2 1" + }, + { + "text": "step 1 2 2" + } + ] + }, + { + "text": "step 1 3" + } + ]) + } + steps { + text = "step 2" + } + steps { + text = "step 3" + } + } + + llm_model_settings { + model = "gemini-2.0-flash-001" + prompt_text = "Return me some great results" + } + + referenced_tools = [google_dialogflow_cx_tool.my_tool.id] +} From 180a4f3ce5e5e5d415f35b0ca3cbb8e25af5b4b1 Mon Sep 17 00:00:00 2001 From: Yu Liao Date: Wed, 6 Aug 2025 15:06:51 -0700 Subject: [PATCH 698/884] replaced selfsubjectreviews with resourceclaims for beta api test, selfsubjectreviews was GAed, and its beta API was removed (#14755) --- .../resource_container_cluster_test.go.tmpl | 28 ++++++++----------- 1 file changed, 12 insertions(+), 16 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl index d10d4db1672a..b0675ba761b7 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl @@ -11243,28 +11243,24 @@ resource "google_container_cluster" "primary" { min_master_version = data.google_container_engine_versions.uscentral1a.release_channel_latest_version["STABLE"] initial_node_count = 1 - # This feature has been available since GKE 1.27, and currently the only - # supported Beta API is authentication.k8s.io/v1beta1/selfsubjectreviews. - # However, in the future, more Beta APIs will be supported, such as the - # resource.k8s.io group. At the same time, some existing Beta APIs will be - # deprecated as the feature will be GAed, and the Beta API will be eventually - # removed. In the case of the SelfSubjectReview API, it is planned to be GAed - # in Kubernetes as of 1.28. And, the Beta API of SelfSubjectReview will be removed - # after at least 3 minor version bumps, so it will be removed as of Kubernetes 1.31 - # or later. - # https://pr.k8s.io/117713 + # Some existing Beta APIs will be deprecated as the feature will be GAed, + # and the Beta API will be eventually removed. In the case of the ResourceClaims + # and its depended APIs, they are GAed in Kubernetes as of 1.34. And, the Beta APIs + # will be removed after at least 3 minor version bumps, so it will be removed as + # of Kubernetes 1.37 or later. + # https://pr.k8s.io/132706 # https://kubernetes.io/docs/reference/using-api/deprecation-guide/ # - # The new Beta APIs will be available since GKE 1.28 - # - admissionregistration.k8s.io/v1beta1/validatingadmissionpolicies - # - admissionregistration.k8s.io/v1beta1/validatingadmissionpolicybindings - # https://pr.k8s.io/118644 - # # Removing the Beta API from Kubernetes will break the test. # TODO: Replace the Beta API with one available on the version of GKE # if the test is broken. enable_k8s_beta_apis { - enabled_apis = ["authentication.k8s.io/v1beta1/selfsubjectreviews"] + enabled_apis = [ + "resource.k8s.io/v1beta1/deviceclasses", + "resource.k8s.io/v1beta1/resourceclaims", + "resource.k8s.io/v1beta1/resourceclaimtemplates", + "resource.k8s.io/v1beta1/resourceslices" + ] } network = "%s" subnetwork = "%s" From cc48a538db0c22ec1b9748637dbe4fe01d028e8f Mon Sep 17 00:00:00 2001 From: Phil Sung Date: Wed, 6 Aug 2025 15:25:32 -0700 Subject: [PATCH 699/884] Support Cloud SQL read pool (#14088) --- .../sql/data_source_sql_database_instances.go | 2 +- .../resource_sql_database_instance.go.tmpl | 51 +++- ...esource_sql_database_instance_test.go.tmpl | 259 ++++++++++++++++++ .../r/sql_database_instance.html.markdown | 14 +- 4 files changed, 317 insertions(+), 9 deletions(-) diff --git a/mmv1/third_party/terraform/services/sql/data_source_sql_database_instances.go b/mmv1/third_party/terraform/services/sql/data_source_sql_database_instances.go index 8f48b4a5df94..55b03d879137 100644 --- a/mmv1/third_party/terraform/services/sql/data_source_sql_database_instances.go +++ b/mmv1/third_party/terraform/services/sql/data_source_sql_database_instances.go @@ -146,7 +146,7 @@ func flattenDatasourceGoogleDatabaseInstancesList(fetchedInstances []*sqladmin.D instance["available_maintenance_versions"] = rawInstance.AvailableMaintenanceVersions instance["instance_type"] = rawInstance.InstanceType instance["service_account_email_address"] = rawInstance.ServiceAccountEmailAddress - instance["settings"] = flattenSettings(rawInstance.Settings, d) + instance["settings"] = flattenSettings(rawInstance.Settings, rawInstance.InstanceType, d) if rawInstance.DiskEncryptionConfiguration != nil { instance["encryption_key_name"] = rawInstance.DiskEncryptionConfiguration.KmsKeyName diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl index 7dbeff3f8c78..6ed39b1db93c 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl @@ -314,7 +314,16 @@ func ResourceSqlDatabaseInstance() *schema.Resource { settings.backup_configuration.enabled is set to true. For MySQL instances, ensure that settings.backup_configuration.binary_log_enabled is set to true. For Postgres instances, ensure that settings.backup_configuration.point_in_time_recovery_enabled -is set to true. Defaults to ZONAL.`, +is set to true. Defaults to ZONAL. +For read pool instances, this field is read-only. The availability type is changed by specifying +the number of nodes (node_count).`, + }, + "effective_availability_type": { + Type: schema.TypeString, + Computed: true, + Description: `The availability type of the Cloud SQL instance, high availability +(REGIONAL) or single zone (ZONAL). This field always contains the value that is reported by the +API (for read pools, effective_availability_type may differ from availability_type).`, }, "backup_configuration": { Type: schema.TypeList, @@ -879,7 +888,14 @@ is set to true. Defaults to ZONAL.`, Type: schema.TypeString, Computed: true, Optional: true, - Description: `The type of the instance. The valid values are:- 'SQL_INSTANCE_TYPE_UNSPECIFIED', 'CLOUD_SQL_INSTANCE', 'ON_PREMISES_INSTANCE' and 'READ_REPLICA_INSTANCE'.`, + Description: `The type of the instance. See https://cloud.google.com/sql/docs/mysql/admin-api/rest/v1/instances#SqlInstanceType for supported values.`, + }, + + "node_count": { + Type: schema.TypeInt, + Computed: true, + Optional: true, + Description: `For a read pool instance, the number of nodes in the read pool.`, }, "replica_configuration": { @@ -1263,6 +1279,10 @@ func resourceSqlDatabaseInstanceCreate(d *schema.ResourceData, meta interface{}) instance.InstanceType = d.Get("instance_type").(string) } + if _, ok := d.GetOk("node_count"); ok { + instance.NodeCount = int64(d.Get("node_count").(int)) + } + instance.RootPassword = d.Get("root_password").(string) // Modifying a replica during Create can cause problems if the master is @@ -1851,10 +1871,12 @@ func resourceSqlDatabaseInstanceRead(d *schema.ResourceData, meta interface{}) e if err := d.Set("instance_type", instance.InstanceType); err != nil { return fmt.Errorf("Error setting instance_type: %s", err) } - if err := d.Set("settings", flattenSettings(instance.Settings, d)); err != nil { + if err := d.Set("node_count", instance.NodeCount); err != nil { + return fmt.Errorf("Error setting node_count: %s", err) + } + if err := d.Set("settings", flattenSettings(instance.Settings, instance.InstanceType, d)); err != nil { log.Printf("[WARN] Failed to set SQL Database Instance Settings") } - if instance.DiskEncryptionConfiguration != nil { if err := d.Set("encryption_key_name", instance.DiskEncryptionConfiguration.KmsKeyName); err != nil { return fmt.Errorf("Error setting encryption_key_name: %s", err) @@ -2202,6 +2224,10 @@ func resourceSqlDatabaseInstanceUpdate(d *schema.ResourceData, meta interface{}) instance.InstanceType = d.Get("instance_type").(string) } + if _, ok := d.GetOk("node_count"); ok { + instance.NodeCount = int64(d.Get("node_count").(int)) + } + // Database Version is required for all calls with Google ML integration enabled or it will be rejected by the API. if d.Get("settings.0.enable_google_ml_integration").(bool) || len(_settings["connection_pool_config"].(*schema.Set).List()) > 0 { instance.DatabaseVersion = databaseVersion @@ -2361,13 +2387,14 @@ func resourceSqlDatabaseInstanceImport(d *schema.ResourceData, meta interface{}) return []*schema.ResourceData{d}, nil } -func flattenSettings(settings *sqladmin.Settings, d *schema.ResourceData) []map[string]interface{} { +func flattenSettings(settings *sqladmin.Settings, iType string, d *schema.ResourceData) []map[string]interface{} { data := map[string]interface{}{ "version": settings.SettingsVersion, "tier": settings.Tier, "edition": flattenEdition(settings.Edition), "activation_policy": settings.ActivationPolicy, - "availability_type": settings.AvailabilityType, + "availability_type": d.Get("settings.0.availability_type"), + "effective_availability_type": settings.AvailabilityType, "collation": settings.Collation, "connector_enforcement": settings.ConnectorEnforcement, "disk_type": settings.DataDiskType, @@ -2384,6 +2411,18 @@ func flattenSettings(settings *sqladmin.Settings, d *schema.ResourceData) []map[ "retain_backups_on_delete": settings.RetainBackupsOnDelete, } + if data["availability_type"] == "" { + data["availability_type"] = "ZONAL" + } + // For read pools, availability type is server managed. Above, we + // pull it from the old TF resource so that it never shows a + // diff. Now, here, for non-pool instances, we overwrite it with the + // value obtained from the API (which would be the typical way to + // populate the field). + if iType != "READ_POOL_INSTANCE" { + data["availability_type"] = settings.AvailabilityType + } + if settings.ActiveDirectoryConfig != nil { data["active_directory_config"] = flattenActiveDirectoryConfig(settings.ActiveDirectoryConfig) } diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl index 0e6bec16cf76..2a86ad8c74b8 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl @@ -4,6 +4,7 @@ import ( "fmt" "regexp" "strconv" + "strings" "testing" "time" @@ -3029,6 +3030,189 @@ func TestAccSqlDatabaseInstance_PostgresSwitchoverSuccess(t *testing.T) { }) } +// Read pool for Postgres. Scale out (change node count) +func TestAccSqlDatabaseInstance_PostgresReadPoolScaleOutSuccess(t *testing.T) { + t.Parallel() + primaryName := "tf-test-pg-readpool-primary-" + acctest.RandString(t, 10) + readPoolName := "tf-test-pg-readpool-" + acctest.RandString(t, 10) + project := envvar.GetTestProjectFromEnv() + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccSqlDatabaseInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testGoogleSqlDatabaseInstanceConfig_eplusWithReadPool(project, primaryName, ReadPoolConfig{ + DatabaseType: "POSTGRES_15", + ReplicaName: readPoolName, + NodeCount: 1, + }), + }, + { + ResourceName: "google_sql_database_instance.original-primary", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + { + ResourceName: "google_sql_database_instance.original-read-pool", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + { + Config: testGoogleSqlDatabaseInstanceConfig_eplusWithReadPool(project, primaryName, ReadPoolConfig{ + DatabaseType: "POSTGRES_15", + ReplicaName: readPoolName, + NodeCount: 2, + }), + }, + { + ResourceName: "google_sql_database_instance.original-primary", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + { + ResourceName: "google_sql_database_instance.original-read-pool", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + }, + }) +} + +// Read pool for Postgres. Scale up (change machine type) +func TestAccSqlDatabaseInstance_PostgresReadPoolScaleUpSuccess(t *testing.T) { + t.Parallel() + primaryName := "tf-test-pg-readpool-mtc-primary-" + acctest.RandString(t, 10) + readPoolName := "tf-test-pg-readpool-mtc-" + acctest.RandString(t, 10) + project := envvar.GetTestProjectFromEnv() + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccSqlDatabaseInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testGoogleSqlDatabaseInstanceConfig_eplusWithReadPool(project, primaryName, ReadPoolConfig{ + DatabaseType: "POSTGRES_15", + ReplicaName: readPoolName, + NodeCount: 1, + ReplicaMachineType: "db-perf-optimized-N-2", + }), + }, + { + ResourceName: "google_sql_database_instance.original-primary", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + { + ResourceName: "google_sql_database_instance.original-read-pool", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + { + Config: testGoogleSqlDatabaseInstanceConfig_eplusWithReadPool(project, primaryName, ReadPoolConfig{ + DatabaseType: "POSTGRES_15", + ReplicaName: readPoolName, + NodeCount: 1, + ReplicaMachineType: "db-perf-optimized-N-4", + }), + }, + { + ResourceName: "google_sql_database_instance.original-primary", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + { + ResourceName: "google_sql_database_instance.original-read-pool", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + }, + }) +} + +// Read pool for MySQL. Enable and disable read pool +func TestAccSqlDatabaseInstance_MysqlReadPoolEnableDisableSuccess(t *testing.T) { + t.Parallel() + primaryName := "tf-test-mysql-readpool-primary-" + acctest.RandString(t, 10) + readPoolName := "tf-test-mysql-readpool-" + acctest.RandString(t, 10) + project := envvar.GetTestProjectFromEnv() + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccSqlDatabaseInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testGoogleSqlDatabaseInstanceConfig_eplusWithReadPool(project, primaryName, ReadPoolConfig{ + DatabaseType: "MYSQL_8_0", + ReplicaName: readPoolName, + InstanceType: "READ_REPLICA_INSTANCE", + }), + }, + { + ResourceName: "google_sql_database_instance.original-primary", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + { + ResourceName: "google_sql_database_instance.original-read-pool", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + // Enable read pool + { + Config: testGoogleSqlDatabaseInstanceConfig_eplusWithReadPool(project, primaryName, ReadPoolConfig{ + DatabaseType: "MYSQL_8_0", + ReplicaName: readPoolName, + InstanceType: "READ_POOL_INSTANCE", + NodeCount: 1, + }), + }, + { + ResourceName: "google_sql_database_instance.original-primary", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + { + ResourceName: "google_sql_database_instance.original-read-pool", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + // Disable read pool + { + Config: testGoogleSqlDatabaseInstanceConfig_eplusWithReadPool(project, primaryName, ReadPoolConfig{ + DatabaseType: "MYSQL_8_0", + ReplicaName: readPoolName, + InstanceType: "READ_REPLICA_INSTANCE", + }), + }, + { + ResourceName: "google_sql_database_instance.original-primary", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + { + ResourceName: "google_sql_database_instance.original-read-pool", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + }, + }) +} + func TestAccSqlDatabaseInstance_updateSslOptionsForPostgreSQL(t *testing.T) { t.Parallel() @@ -4757,6 +4941,81 @@ resource "google_sql_database_instance" "original-replica" { `, project, replicaName) } +type ReadPoolConfig struct { + DatabaseType string + ReplicaName string + // InstanceType specifies the instance type of the replica, + // defaulting to READ_POOL_INSTANCE. + // + // Despite the naming of this struct, you can also set it to + // READ_REPLICA_INSTANCE to create an ordinary read replica in order + // to test enable/disable pool scenarios. + InstanceType string + NodeCount int64 + // ReplicaMachineType gives the machine type of the read pool nodes + // or read replica. It defaults to db-perf-optimized-N-2. + ReplicaMachineType string +} + +func testGoogleSqlDatabaseInstanceConfig_eplusWithReadPool(project, primaryName string, rpconfig ReadPoolConfig) string { + nodeCountStr := "" + if rpconfig.NodeCount > 0 { + nodeCountStr = fmt.Sprintf(` node_count = %d +`, rpconfig.NodeCount) + } + + if rpconfig.InstanceType == "" { + rpconfig.InstanceType = "READ_POOL_INSTANCE" + } + + if rpconfig.ReplicaMachineType == "" { + rpconfig.ReplicaMachineType = "db-perf-optimized-N-2" + } + + primaryTxnLogs := "" + if strings.HasPrefix(rpconfig.DatabaseType, "MYSQL") { + primaryTxnLogs = "binary_log_enabled = true\n" + } else if strings.HasPrefix(rpconfig.DatabaseType, "POSTGRES") { + primaryTxnLogs = "point_in_time_recovery_enabled = true\n" + } + + return fmt.Sprintf(` +resource "google_sql_database_instance" "original-primary" { + project = "%s" + name = "%s" + region = "us-east1" + database_version = "%s" + instance_type = "CLOUD_SQL_INSTANCE" + deletion_protection = false + + settings { + tier = "db-perf-optimized-N-2" + edition = "ENTERPRISE_PLUS" + backup_configuration { + enabled = true +%s + } + } +} + +resource "google_sql_database_instance" "original-read-pool" { + project = "%s" + name = "%s" + region = "us-east1" + database_version = "%s" + instance_type = "%s" +%s + master_instance_name = google_sql_database_instance.original-primary.name + deletion_protection = false + + settings { + tier = "%s" + edition = "ENTERPRISE_PLUS" + } +} +`, project, primaryName, rpconfig.DatabaseType, primaryTxnLogs, project, rpconfig.ReplicaName, rpconfig.DatabaseType, rpconfig.InstanceType, nodeCountStr, rpconfig.ReplicaMachineType) +} + func testAccSqlDatabaseInstance_basicInstanceForPsc(instanceName string, projectId string, orgId string, billingAccount string) string { return fmt.Sprintf(` resource "google_project" "testproject" { diff --git a/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown b/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown index 291786bfe74c..5d3abe5dc5ed 100644 --- a/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown @@ -352,11 +352,13 @@ The `settings` block supports: active. Can be either `ALWAYS`, `NEVER` or `ON_DEMAND`. * `availability_type` - (Optional) The availability type of the Cloud SQL - instance, high availability (`REGIONAL`) or single zone (`ZONAL`).' For all instances, ensure that + instance, high availability (`REGIONAL`) or single zone (`ZONAL`). For all instances, ensure that `settings.backup_configuration.enabled` is set to `true`. For MySQL instances, ensure that `settings.backup_configuration.binary_log_enabled` is set to `true`. For Postgres and SQL Server instances, ensure that `settings.backup_configuration.point_in_time_recovery_enabled` is set to `true`. Defaults to `ZONAL`. + For read pool instances, this field is read-only. The availability type is changed by specifying + the number of nodes (`node_count`). * `collation` - (Optional) The name of server instance collation. @@ -380,6 +382,8 @@ The `settings` block supports: * `data_disk_provisioned_throughput` - (Optional, Beta) Provisioned throughput measured in MiB per second for the data disk. This field is only used for `HYPERDISK_BALANCED` disk types. +* `node_count` - For a read pool instance, the number of nodes in the read pool. + * `pricing_plan` - (Optional) Pricing plan for this instance, can only be `PER_USE`. * `time_zone` - (Optional) The time_zone to be used by the database engine (supported only for SQL Server), in SQL Server timezone format. @@ -685,13 +689,19 @@ performing filtering in a Terraform config. * `psc_service_attachment_link` - the URI that points to the service attachment of the instance. -* `instance_type` - The type of the instance. The supported values are `SQL_INSTANCE_TYPE_UNSPECIFIED`, `CLOUD_SQL_INSTANCE`, `ON_PREMISES_INSTANCE` and `READ_REPLICA_INSTANCE`. +* `instance_type` - The type of the instance. See [API reference for SqlInstanceType](https://cloud.google.com/sql/docs/mysql/admin-api/rest/v1/instances#SqlInstanceType) for supported values. ~> **NOTE:** Users can upgrade a read replica instance to a stand-alone Cloud SQL instance with the help of `instance_type`. To promote, users have to set the `instance_type` property as `CLOUD_SQL_INSTANCE` and remove/unset `master_instance_name` and `replica_configuration` from instance configuration. This operation might cause your instance to restart. * `settings.version` - Used to make sure changes to the `settings` block are atomic. +* `settings.0.effective_availability_type` - (Computed) The availability type of + the Cloud SQL instance, high availability (REGIONAL) or single zone + (ZONAL). This field always contains the value that is reported by the API (for + read pools, `settings.0.effective_availability_type` may differ from + `settings.0.availability_type`). + * `server_ca_cert.0.cert` - The CA Certificate used to connect to the SQL Instance via SSL. * `server_ca_cert.0.common_name` - The CN valid for the CA Cert. From 0a8d2dc34a72de743d269c156768ea2f7d390bae Mon Sep 17 00:00:00 2001 From: sachin purohit Date: Wed, 6 Aug 2025 15:53:29 -0700 Subject: [PATCH 700/884] =?UTF-8?q?fix(bigquery=5Fdataset):=20fixed=20hand?= =?UTF-8?q?ling=20of=20non-legacy=20roles=20for=20access=20=E2=80=A6=20(#1?= =?UTF-8?q?4569)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- mmv1/products/bigquery/Dataset.yaml | 1 + .../constants/bigquery_dataset.go.tmpl | 33 +++++++++++++++++++ .../examples/bigquery_dataset_basic.tf.tmpl | 2 +- 3 files changed, 35 insertions(+), 1 deletion(-) diff --git a/mmv1/products/bigquery/Dataset.yaml b/mmv1/products/bigquery/Dataset.yaml index 0c8279302e6b..e541c8f0b63d 100644 --- a/mmv1/products/bigquery/Dataset.yaml +++ b/mmv1/products/bigquery/Dataset.yaml @@ -129,6 +129,7 @@ properties: is_set: true default_from_api: true is_missing_in_cai: true + set_hash_func: 'resourceBigqueryDatasetAccessHash' item_type: type: NestedObject properties: diff --git a/mmv1/templates/terraform/constants/bigquery_dataset.go.tmpl b/mmv1/templates/terraform/constants/bigquery_dataset.go.tmpl index 3a5f4b930334..8a1bc0702c0c 100644 --- a/mmv1/templates/terraform/constants/bigquery_dataset.go.tmpl +++ b/mmv1/templates/terraform/constants/bigquery_dataset.go.tmpl @@ -1,5 +1,11 @@ const datasetIdRegexp = `^[0-9A-Za-z_]+$` +var bigqueryDatasetAccessPrimitiveToRoleMap = map[string]string{ + "OWNER": "roles/bigquery.dataOwner", + "WRITER": "roles/bigquery.dataEditor", + "READER": "roles/bigquery.dataViewer", +} + func validateDatasetId(v interface{}, k string) (ws []string, errors []error) { value := v.(string) if !regexp.MustCompile(datasetIdRegexp).MatchString(value) { @@ -23,3 +29,30 @@ func validateDefaultTableExpirationMs(v interface{}, k string) (ws []string, err return } + +{{- if ne $.Compiler "terraformgoogleconversion-codegen" }} +// bigqueryDatasetAccessHash is a custom hash function for the access block. +// It normalizes the 'role' field before hashing, treating legacy roles +// and their modern IAM equivalents as the same. +func resourceBigqueryDatasetAccessHash(v interface{}) int { + m, ok := v.(map[string]interface{}) + if !ok { + return 0 + } + // Make a copy of the map to avoid modifying the underlying data. + copy := make(map[string]interface{}, len(m)) + for k, val := range m { + copy[k] = val + } + + // Normalize the role if it exists and matches a legacy role. + if role, ok := copy["role"].(string); ok { + if newRole, ok := bigqueryDatasetAccessPrimitiveToRoleMap[role]; ok { + copy["role"] = newRole + } + } + + // Use the default HashResource function on the (potentially modified) copy. + return schema.HashResource(bigqueryDatasetAccessSchema())(copy) +} +{{- end }} diff --git a/mmv1/templates/terraform/examples/bigquery_dataset_basic.tf.tmpl b/mmv1/templates/terraform/examples/bigquery_dataset_basic.tf.tmpl index dd40bc580adb..563893aeb3fc 100644 --- a/mmv1/templates/terraform/examples/bigquery_dataset_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/bigquery_dataset_basic.tf.tmpl @@ -10,7 +10,7 @@ resource "google_bigquery_dataset" "{{$.PrimaryResourceId}}" { } access { - role = "OWNER" + role = "roles/bigquery.dataOwner" user_by_email = google_service_account.bqowner.email } From 41894c0a36625a4cc4533ffe5fcc8fd3fb54f337 Mon Sep 17 00:00:00 2001 From: gurusai-voleti Date: Wed, 6 Aug 2025 23:06:11 +0000 Subject: [PATCH 701/884] feat:(storagetransfer) added federated identity config for azure storage transfer (#14427) --- ....tmpl => resource_storage_transfer_job.go} | 123 ++++++++++++------ ...> resource_storage_transfer_job_meta.yaml} | 4 +- .../docs/r/storage_transfer_job.html.markdown | 12 +- 3 files changed, 92 insertions(+), 47 deletions(-) rename mmv1/third_party/terraform/services/storagetransfer/{resource_storage_transfer_job.go.tmpl => resource_storage_transfer_job.go} (94%) rename mmv1/third_party/terraform/services/storagetransfer/{resource_storage_transfer_job_meta.yaml.tmpl => resource_storage_transfer_job_meta.yaml} (95%) diff --git a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go.tmpl b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go similarity index 94% rename from mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go.tmpl rename to mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go index 0d5c7ae687f1..e437d31ea2c6 100644 --- a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go.tmpl +++ b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go @@ -99,12 +99,11 @@ var ( "transfer_spec.0.aws_s3_data_source.0.aws_access_key", "transfer_spec.0.aws_s3_data_source.0.role_arn", } - {{- if ne $.TargetVersionName "ga" }} azureOptionCredentials = []string{ "transfer_spec.0.azure_blob_storage_data_source.0.azure_credentials", "transfer_spec.0.azure_blob_storage_data_source.0.credentials_secret", + "transfer_spec.0.azure_blob_storage_data_source.0.federated_identity_config", } - {{- end }} ) func ResourceStorageTransferJob() *schema.Resource { @@ -142,10 +141,10 @@ func ResourceStorageTransferJob() *schema.Resource { Description: `The project in which the resource belongs. If it is not provided, the provider project is used.`, }, "event_stream": { - Type: schema.TypeList, - Optional: true, - MaxItems: 1, - ConflictsWith: []string{"schedule"}, + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + ConflictsWith: []string{"schedule"}, DiffSuppressFunc: diffSuppressEventStream, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ @@ -174,7 +173,7 @@ func ResourceStorageTransferJob() *schema.Resource { MaxItems: 1, Optional: true, ConflictsWith: []string{"transfer_spec", "schedule"}, - ExactlyOneOf: []string{"transfer_spec", "replication_spec"}, + ExactlyOneOf: []string{"transfer_spec", "replication_spec"}, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "object_conditions": objectConditionsSchema(replicationSpecObjectConditionsKeys), @@ -200,11 +199,11 @@ func ResourceStorageTransferJob() *schema.Resource { Description: `Replication specification.`, }, "transfer_spec": { - Type: schema.TypeList, - Optional: true, - MaxItems: 1, + Type: schema.TypeList, + Optional: true, + MaxItems: 1, ConflictsWith: []string{"replication_spec"}, - ExactlyOneOf: []string{"transfer_spec", "replication_spec"}, + ExactlyOneOf: []string{"transfer_spec", "replication_spec"}, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "object_conditions": objectConditionsSchema(transferSpecObjectConditionsKeys), @@ -328,8 +327,8 @@ func ResourceStorageTransferJob() *schema.Resource { Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "log_actions": { - Type: schema.TypeList, - Optional: true, + Type: schema.TypeList, + Optional: true, AtLeastOneOf: []string{"logging_config.0.enable_on_prem_gcs_transfer_logs", "logging_config.0.log_actions", "logging_config.0.log_action_states"}, Elem: &schema.Schema{ Type: schema.TypeString, @@ -338,8 +337,8 @@ func ResourceStorageTransferJob() *schema.Resource { Description: `Specifies the actions to be logged. Not supported for transfers with PosifxFilesystem data sources; use enable_on_prem_gcs_transfer_logs instead.`, }, "log_action_states": { - Type: schema.TypeList, - Optional: true, + Type: schema.TypeList, + Optional: true, AtLeastOneOf: []string{"logging_config.0.enable_on_prem_gcs_transfer_logs", "logging_config.0.log_actions", "logging_config.0.log_action_states"}, Elem: &schema.Schema{ Type: schema.TypeString, @@ -348,10 +347,10 @@ func ResourceStorageTransferJob() *schema.Resource { Description: `States in which logActions are logged. Not supported for transfers with PosifxFilesystem data sources; use enable_on_prem_gcs_transfer_logs instead.`, }, "enable_on_prem_gcs_transfer_logs": { - Type: schema.TypeBool, - Optional: true, + Type: schema.TypeBool, + Optional: true, AtLeastOneOf: []string{"logging_config.0.enable_on_prem_gcs_transfer_logs", "logging_config.0.log_actions", "logging_config.0.log_action_states"}, - Description: `For transfers with a PosixFilesystem source, this option enables the Cloud Storage transfer logs for this transfer.`, + Description: `For transfers with a PosixFilesystem source, this option enables the Cloud Storage transfer logs for this transfer.`, }, }, }, @@ -816,12 +815,8 @@ func azureBlobStorageDataSchema() *schema.Resource { }, "azure_credentials": { Type: schema.TypeList, - {{- if ne $.TargetVersionName "ga" }} Optional: true, ExactlyOneOf: azureOptionCredentials, - {{- else }} - Required: true, - {{- end }} MaxItems: 1, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ @@ -835,14 +830,35 @@ func azureBlobStorageDataSchema() *schema.Resource { }, Description: ` Credentials used to authenticate API requests to Azure.`, }, - {{- if ne $.TargetVersionName "ga" }} "credentials_secret": { - Optional: true, Type: schema.TypeString, + Optional: true, + ExactlyOneOf: azureOptionCredentials, Description: `The Resource name of a secret in Secret Manager containing SAS Credentials in JSON form. Service Agent must have permissions to access secret. If credentials_secret is specified, do not specify azure_credentials.`, + }, + "federated_identity_config": { + Type: schema.TypeList, + Optional: true, ExactlyOneOf: azureOptionCredentials, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "client_id": { + Type: schema.TypeString, + Required: true, + Sensitive: true, + Description: `The client (application) ID of the application with federated credentials.`, + }, + "tenant_id": { + Type: schema.TypeString, + Required: true, + Sensitive: true, + Description: `The tenant (directory) ID of the application with federated credentials.`, + }, + }, + }, + Description: ` Workload Identity Details used to authenticate API requests to Azure.`, }, - {{- end }} }, } } @@ -1134,6 +1150,30 @@ func resourceStorageTransferJobStateImporter(d *schema.ResourceData, meta interf return []*schema.ResourceData{d}, nil } +func expandAzureFederatedIdentifyConfig(federatedIdentifyConfig []interface{}) *storagetransfer.FederatedIdentityConfig { + if len(federatedIdentifyConfig) == 0 || federatedIdentifyConfig[0] == nil { + return nil + } + + federatedIdentifyCfg := federatedIdentifyConfig[0].(map[string]interface{}) + return &storagetransfer.FederatedIdentityConfig{ + ClientId: federatedIdentifyCfg["client_id"].(string), + TenantId: federatedIdentifyCfg["tenant_id"].(string), + } +} + +func flattenAzureFederatedIdentifyConfig(d *schema.ResourceData) []map[string]interface{} { + if (d.Get("transfer_spec.0.azure_blob_storage_data_source.0.federated_identity_config.0.client_id") == "") || (d.Get("transfer_spec.0.azure_blob_storage_data_source.0.federated_identity_config.0.tenant_id") == "") { + return []map[string]interface{}{} + } + + data := map[string]interface{}{ + "client_id": d.Get("transfer_spec.0.azure_blob_storage_data_source.0.federated_identity_config.0.client_id"), + "tenant_id": d.Get("transfer_spec.0.azure_blob_storage_data_source.0.federated_identity_config.0.tenant_id"), + } + return []map[string]interface{}{data} +} + func expandDates(dates []interface{}) *storagetransfer.Date { if len(dates) == 0 || dates[0] == nil { return nil @@ -1349,10 +1389,10 @@ func flattenAwsS3Data(awsS3Data *storagetransfer.AwsS3Data, d *schema.ResourceDa "path": awsS3Data.Path, "role_arn": awsS3Data.RoleArn, } - if _, exist := d.GetOk("transfer_spec.0.aws_s3_data_source.0.aws_access_key"); exist{ + if _, exist := d.GetOk("transfer_spec.0.aws_s3_data_source.0.aws_access_key"); exist { data["aws_access_key"] = flattenAwsAccessKeys(d) } - + if awsS3Data.ManagedPrivateNetwork { data["managed_private_network"] = awsS3Data.ManagedPrivateNetwork } @@ -1433,11 +1473,10 @@ func expandAzureCredentials(azureCredentials []interface{}) *storagetransfer.Azu } func flattenAzureCredentials(d *schema.ResourceData) []map[string]interface{} { - {{- if ne $.TargetVersionName "ga" }} if d.Get("transfer_spec.0.azure_blob_storage_data_source.0.azure_credentials.0.sas_token") == "" { return []map[string]interface{}{} } - {{- end }} + data := map[string]interface{}{ "sas_token": d.Get("transfer_spec.0.azure_blob_storage_data_source.0.azure_credentials.0.sas_token"), } @@ -1453,25 +1492,23 @@ func expandAzureBlobStorageData(azureBlobStorageDatas []interface{}) *storagetra azureBlobStorageData := azureBlobStorageDatas[0].(map[string]interface{}) return &storagetransfer.AzureBlobStorageData{ - Container: azureBlobStorageData["container"].(string), - Path: azureBlobStorageData["path"].(string), - StorageAccount: azureBlobStorageData["storage_account"].(string), - AzureCredentials: expandAzureCredentials(azureBlobStorageData["azure_credentials"].([]interface{})), - {{- if ne $.TargetVersionName "ga" }} - CredentialsSecret: azureBlobStorageData["credentials_secret"].(string), - {{- end }} + Container: azureBlobStorageData["container"].(string), + Path: azureBlobStorageData["path"].(string), + StorageAccount: azureBlobStorageData["storage_account"].(string), + AzureCredentials: expandAzureCredentials(azureBlobStorageData["azure_credentials"].([]interface{})), + CredentialsSecret: azureBlobStorageData["credentials_secret"].(string), + FederatedIdentityConfig: expandAzureFederatedIdentifyConfig(azureBlobStorageData["federated_identity_config"].([]interface{})), } } func flattenAzureBlobStorageData(azureBlobStorageData *storagetransfer.AzureBlobStorageData, d *schema.ResourceData) []map[string]interface{} { data := map[string]interface{}{ - "container": azureBlobStorageData.Container, - "path": azureBlobStorageData.Path, - "storage_account": azureBlobStorageData.StorageAccount, - "azure_credentials": flattenAzureCredentials(d), - {{- if ne $.TargetVersionName "ga" }} - "credentials_secret": azureBlobStorageData.CredentialsSecret, - {{- end }} + "container": azureBlobStorageData.Container, + "path": azureBlobStorageData.Path, + "storage_account": azureBlobStorageData.StorageAccount, + "azure_credentials": flattenAzureCredentials(d), + "federated_identity_config": flattenAzureFederatedIdentifyConfig(d), + "credentials_secret": azureBlobStorageData.CredentialsSecret, } return []map[string]interface{}{data} diff --git a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_meta.yaml.tmpl b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_meta.yaml similarity index 95% rename from mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_meta.yaml.tmpl rename to mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_meta.yaml index 8b8233b517a9..1992d537368f 100644 --- a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_meta.yaml.tmpl +++ b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_meta.yaml @@ -51,9 +51,9 @@ fields: - field: 'transfer_spec.aws_s3_data_source.role_arn' - field: 'transfer_spec.azure_blob_storage_data_source.azure_credentials.sas_token' - field: 'transfer_spec.azure_blob_storage_data_source.container' -{{- if ne $.TargetVersionName "ga" }} - field: 'transfer_spec.azure_blob_storage_data_source.credentials_secret' -{{- end }} + - field: 'transfer_spec.azure_blob_storage_data_source.federated_identity_config.client_id' + - field: 'transfer_spec.azure_blob_storage_data_source.federated_identity_config.tenant_id' - field: 'transfer_spec.azure_blob_storage_data_source.path' - field: 'transfer_spec.azure_blob_storage_data_source.storage_account' - field: 'transfer_spec.gcs_data_sink.bucket_name' diff --git a/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown b/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown index ea65dc1eeac5..b83ea6ebe37a 100644 --- a/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown @@ -282,14 +282,22 @@ The `aws_access_key` block supports: * `path` - (Required) Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'. -* `credentials_secret` - (Optional, [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html)) Full Resource name of a secret in Secret Manager containing [SAS Credentials in JSON form](https://cloud.google.com/storage-transfer/docs/reference/rest/v1/TransferSpec#azureblobstoragedata:~:text=begin%20with%20a%20%27/%27.-,credentialsSecret,-string). Service Agent for Storage Transfer must have permissions to access secret. If credentials_secret is specified, do not specify azure_credentials.`, +* `credentials_secret` - (Optional, (https://terraform.io/docs/providers/google/guides/provider_versions.html)) Full Resource name of a secret in Secret Manager containing [SAS Credentials in JSON form](https://cloud.google.com/storage-transfer/docs/reference/rest/v1/TransferSpec#azureblobstoragedata:~:text=begin%20with%20a%20%27/%27.-,credentialsSecret,-string). Service Agent for Storage Transfer must have permissions to access secret. If credentials_secret is specified, do not specify azure_credentials.`, -* `azure_credentials` - (Required in GA, Optional in [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html)) Credentials used to authenticate API requests to Azure block. +* `azure_credentials` - (Optional, (https://terraform.io/docs/providers/google/guides/provider_versions.html)) Credentials used to authenticate API requests to Azure block. + +* `federated_identity_config` - (Optional) Federated identity config of a user registered Azure application. Structure [documented below](#nested_federated_identity_config). The `azure_credentials` block supports: * `sas_token` - (Required) Azure shared access signature. See [Grant limited access to Azure Storage resources using shared access signatures (SAS)](https://docs.microsoft.com/en-us/azure/storage/common/storage-sas-overview). +The `federated_identity_config` block supports: + +* `client_id` - (Required) The client (application) ID of the application with federated credentials. + +* `tenant_id` - (Required) The client (directory) ID of the application with federated credentials. + The `schedule_start_date` and `schedule_end_date` blocks support: * `year` - (Required) Year of date. Must be from 1 to 9999. From fdbed9e5633537a89113198ff9f0612a5e10bcfa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B6rn?= <81525627+bestefreund@users.noreply.github.com> Date: Thu, 7 Aug 2025 01:08:32 +0200 Subject: [PATCH 702/884] Add singular data source for retrieving a package from an Artifact Registry repository (#14693) --- .../provider/provider_mmv1_resources.go.tmpl | 1 + .../data_source_artifact_registry_package.go | 136 ++++++++++++++++++ ...a_source_artifact_registry_package_test.go | 37 +++++ .../d/artifact_registry_package.html.markdown | 41 ++++++ 4 files changed, 215 insertions(+) create mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_package.go create mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_package_test.go create mode 100644 mmv1/third_party/terraform/website/docs/d/artifact_registry_package.html.markdown diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index b22f4fac9bd3..eefde7b48be9 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -30,6 +30,7 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_artifact_registry_docker_images": artifactregistry.DataSourceArtifactRegistryDockerImages(), "google_artifact_registry_locations": artifactregistry.DataSourceGoogleArtifactRegistryLocations(), "google_artifact_registry_repository": artifactregistry.DataSourceArtifactRegistryRepository(), + "google_artifact_registry_package": artifactregistry.DataSourceArtifactRegistryPackage(), "google_artifact_registry_version": artifactregistry.DataSourceArtifactRegistryVersion(), "google_apphub_discovered_workload": apphub.DataSourceApphubDiscoveredWorkload(), "google_app_engine_default_service_account": appengine.DataSourceGoogleAppEngineDefaultServiceAccount(), diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_package.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_package.go new file mode 100644 index 000000000000..71edbf2cf241 --- /dev/null +++ b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_package.go @@ -0,0 +1,136 @@ +package artifactregistry + +import ( + "fmt" + "net/http" + "net/url" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func DataSourceArtifactRegistryPackage() *schema.Resource { + return &schema.Resource{ + Read: DataSourceArtifactRegistryPackageRead, + + Schema: map[string]*schema.Schema{ + "location": { + Type: schema.TypeString, + Required: true, + }, + "repository_id": { + Type: schema.TypeString, + Required: true, + }, + "name": { + Type: schema.TypeString, + Required: true, + }, + "project": { + Type: schema.TypeString, + Optional: true, + }, + "display_name": { + Type: schema.TypeString, + Computed: true, + }, + "create_time": { + Type: schema.TypeString, + Computed: true, + }, + "update_time": { + Type: schema.TypeString, + Computed: true, + }, + "annotations": { + Type: schema.TypeMap, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + }, + } +} + +func DataSourceArtifactRegistryPackageRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return fmt.Errorf("Error setting Artifact Registry user agent: %s", err) + } + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return fmt.Errorf("Error setting Artifact Registry project: %s", err) + } + + basePath, err := tpgresource.ReplaceVars(d, config, "{{ArtifactRegistryBasePath}}") + if err != nil { + return fmt.Errorf("Error setting Artifact Registry base path: %s", err) + } + + resourcePath, err := tpgresource.ReplaceVars(d, config, fmt.Sprintf("projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/packages/{{name}}")) + if err != nil { + return fmt.Errorf("Error setting resource path: %s", err) + } + + urlRequest := basePath + resourcePath + headers := make(http.Header) + + u, err := url.Parse(urlRequest) + if err != nil { + return fmt.Errorf("Error parsing URL: %s", err) + } + + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + RawURL: u.String(), + UserAgent: userAgent, + Headers: headers, + }) + if err != nil { + return fmt.Errorf("Error getting Artifact Registry package: %s", err) + } + + annotations := make(map[string]string) + if anno, ok := res["annotations"].(map[string]interface{}); ok { + for k, v := range anno { + if val, ok := v.(string); ok { + annotations[k] = val + } + } + } + + getString := func(m map[string]interface{}, key string) string { + if v, ok := m[key].(string); ok { + return v + } + return "" + } + + name := getString(res, "name") + + if err := d.Set("project", project); err != nil { + return err + } + if err := d.Set("name", name); err != nil { + return err + } + if err := d.Set("display_name", getString(res, "displayName")); err != nil { + return err + } + if err := d.Set("create_time", getString(res, "createTime")); err != nil { + return err + } + if err := d.Set("update_time", getString(res, "updateTime")); err != nil { + return err + } + if err := d.Set("annotations", annotations); err != nil { + return err + } + + d.SetId(name) + + return nil +} diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_package_test.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_package_test.go new file mode 100644 index 000000000000..78d713183500 --- /dev/null +++ b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_package_test.go @@ -0,0 +1,37 @@ +package artifactregistry_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccDataSourceArtifactRegistryPackage_basic(t *testing.T) { + t.Parallel() + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceArtifactRegistryPackageConfig, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("data.google_artifact_registry_package.this", "name", "projects/go-containerregistry/locations/us/repositories/gcr.io/packages/gcrane"), + ), + }, + }, + }) +} + +// Test the data source against the public AR repos +// https://console.cloud.google.com/artifacts/docker/cloudrun/us/container +// https://console.cloud.google.com/artifacts/docker/go-containerregistry/us/gcr.io +const testAccDataSourceArtifactRegistryPackageConfig = ` +data "google_artifact_registry_package" "this" { + project = "go-containerregistry" + location = "us" + repository_id = "gcr.io" + name = "gcrane" +} +` diff --git a/mmv1/third_party/terraform/website/docs/d/artifact_registry_package.html.markdown b/mmv1/third_party/terraform/website/docs/d/artifact_registry_package.html.markdown new file mode 100644 index 000000000000..fa984c363fb9 --- /dev/null +++ b/mmv1/third_party/terraform/website/docs/d/artifact_registry_package.html.markdown @@ -0,0 +1,41 @@ +--- +subcategory: "Artifact Registry" +description: |- + Get information about a package within a Google Artifact Registry repository. +--- + +# google_artifact_registry_package +This data source fetches information of a package from a provided Artifact Registry repository. + +## Example Usage + +```hcl +resource "google_artifact_registry_package" "my_package" { + location = "us-west1" + repository_id = "my-repository" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `location` - (Required) The location of the artifact registry. + +* `repository_id` - (Required) The last part of the repository name to fetch from. + +* `name` - (Required) The name of the package. + +* `project` - (Optional) The project ID in which the resource belongs. If it is not provided, the provider project is used. + +## Attributes Reference + +The following computed attributes are exported: + +* `display_name` - The display name of the package. + +* `create_time` - The time, as a RFC 3339 string, this package was created. + +* `update_time` - The time, as a RFC 3339 string, this package was last updated. This includes publishing a new version of the package. + +* `annotations` - Client specified annotations. From b765b5a1fa5a0ffa87eccf3c38ecf6eda8104fe0 Mon Sep 17 00:00:00 2001 From: Lakshman Swaminathan Date: Wed, 6 Aug 2025 20:08:01 -0400 Subject: [PATCH 703/884] diff flag nil fix (#14763) --- mmv1/third_party/terraform/acctest/diff_utils.go | 10 ++++++---- mmv1/third_party/terraform/acctest/vcr_utils.go | 2 -- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/mmv1/third_party/terraform/acctest/diff_utils.go b/mmv1/third_party/terraform/acctest/diff_utils.go index d101364e0c4e..d23f226180f4 100644 --- a/mmv1/third_party/terraform/acctest/diff_utils.go +++ b/mmv1/third_party/terraform/acctest/diff_utils.go @@ -16,6 +16,8 @@ import ( "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) +const diffTag = "[Diff]" + func isReleaseDiffEnabled() bool { releaseDiff := os.Getenv("RELEASE_DIFF") return releaseDiff != "" @@ -78,7 +80,7 @@ func InsertDiffSteps(c resource.TestCase, tempOutputFile *os.File, releaseProvid testStep.Config = ReformConfigWithProvider(ogConfig, localProviderName) fmt.Fprintf(tempOutputFile, "[DEBUG] Reformatted config: %s\n", testStep.Config) testStep.PreConfig = func() { - fmt.Fprintf(tempOutputFile, "%s Step %d\n", diffFlag, countSteps) + fmt.Fprintf(tempOutputFile, "%s Step %d\n", diffTag, countSteps) } if testStep.ExpectError == nil && !testStep.PlanOnly { newStep := resource.TestStep{ @@ -154,7 +156,7 @@ func ParseReleaseDiffOutput(output string) (isDiff bool) { lines := strings.Split(trimmedOutput, "\n") lastLine := lines[len(lines)-1] - isDiff = strings.HasPrefix(lastLine, diffFlag) + isDiff = strings.HasPrefix(lastLine, diffTag) return isDiff } @@ -192,8 +194,8 @@ func writeOutputFileDeferFunction(tempOutputFile *os.File, failed bool) { if failed { // Check if the output line starts with "[Diff]" if isDiff { - fmt.Fprintf(os.Stdout, "%s Breaking Change Detected] \n", diffFlag) - fmt.Fprintf(diffFailureFile, "%s %s\n", diffFlag, testOutput) + fmt.Fprintf(os.Stdout, "%s Breaking Change Detected] \n", diffTag) + fmt.Fprintf(diffFailureFile, "%s %s\n", diffTag, testOutput) } else { fmt.Fprintf(regularFailureFile, testOutput) fmt.Fprintf(regularFailureFile, "FAILED --- %s\n", testOutput) diff --git a/mmv1/third_party/terraform/acctest/vcr_utils.go b/mmv1/third_party/terraform/acctest/vcr_utils.go index 628f2837f64e..a726982a3a5d 100644 --- a/mmv1/third_party/terraform/acctest/vcr_utils.go +++ b/mmv1/third_party/terraform/acctest/vcr_utils.go @@ -50,8 +50,6 @@ var configs map[string]*transport_tpg.Config var sources map[string]VcrSource -var diffFlag = "[Diff]" - // VcrSource is a source for a given VCR test with the value that seeded it type VcrSource struct { seed int64 From e648f737c0ff7e5bf13546f9c438dc8db91e6dd8 Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Wed, 6 Aug 2025 17:10:50 -0700 Subject: [PATCH 704/884] fix sweepers for resources with overridden names (#14756) --- mmv1/api/resource.go | 6 +++++- mmv1/products/storagetransfer/AgentPool.yaml | 1 + 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index 8370cc211bef..f4d8f34bd3b9 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -408,7 +408,11 @@ func (r *Resource) SetDefault(product *Product) { r.ApiName = r.Name } if r.CollectionUrlKey == "" { - r.CollectionUrlKey = google.Camelize(google.Plural(r.Name), "lower") + key := r.Name + if r.ApiResourceTypeKind != "" { + key = r.ApiResourceTypeKind + } + r.CollectionUrlKey = google.Camelize(google.Plural(key), "lower") } if r.IdFormat == "" { r.IdFormat = r.SelfLinkUri() diff --git a/mmv1/products/storagetransfer/AgentPool.yaml b/mmv1/products/storagetransfer/AgentPool.yaml index 68d501a2ce90..acb915902484 100644 --- a/mmv1/products/storagetransfer/AgentPool.yaml +++ b/mmv1/products/storagetransfer/AgentPool.yaml @@ -14,6 +14,7 @@ --- name: 'AgentPool' api_resource_type_kind: agentPools +collection_url_key: agentPools description: 'Represents an On-Premises Agent pool.' references: guides: From d2f671c4bbbebb52a68e10cce540368f9e125ce1 Mon Sep 17 00:00:00 2001 From: Andrew Ferg Date: Thu, 7 Aug 2025 13:41:00 -0400 Subject: [PATCH 705/884] Add HA Policy to Regional Backend Services (#14522) --- .../compute/RegionBackendService.yaml | 72 +++++ .../region_backend_service_ha_policy.tf.tmpl | 17 ++ ...nd_service_ha_policy_manual_leader.tf.tmpl | 72 +++++ ...ice_ha_policy_manual_leader_update_test.go | 248 ++++++++++++++++++ 4 files changed, 409 insertions(+) create mode 100644 mmv1/templates/terraform/examples/region_backend_service_ha_policy.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/region_backend_service_ha_policy_manual_leader.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_ha_policy_manual_leader_update_test.go diff --git a/mmv1/products/compute/RegionBackendService.yaml b/mmv1/products/compute/RegionBackendService.yaml index a392909e9a68..ed56d1af4162 100644 --- a/mmv1/products/compute/RegionBackendService.yaml +++ b/mmv1/products/compute/RegionBackendService.yaml @@ -144,6 +144,19 @@ examples: min_version: 'beta' vars: region_backend_service_name: 'region-service' + - name: 'region_backend_service_ha_policy' + primary_resource_id: 'default' + vars: + region_backend_service_name: 'region-service' + network_name: 'rbs-net' + - name: 'region_backend_service_ha_policy_manual_leader' + primary_resource_id: 'default' + vars: + region_backend_service_name: 'region-service' + network_name: 'rbs-net' + subnetwork_name: 'rbs-subnet' + instance_name: 'rbs-instance' + neg_name: 'rbs-neg' parameters: - name: 'region' type: ResourceRef @@ -1472,3 +1485,62 @@ properties: description: | A boolean flag enabling IP:PORT based dynamic forwarding. immutable: true + - name: 'haPolicy' + type: NestedObject + description: | + Configures self-managed High Availability (HA) for External and Internal Protocol Forwarding. + The backends of this regional backend service must only specify zonal network endpoint groups + (NEGs) of type GCE_VM_IP. Note that haPolicy is not for load balancing, and therefore cannot + be specified with sessionAffinity, connectionTrackingPolicy, and failoverPolicy. haPolicy + requires customers to be responsible for tracking backend endpoint health and electing a + leader among the healthy endpoints. Therefore, haPolicy cannot be specified with healthChecks. + haPolicy can only be specified for External Passthrough Network Load Balancers and Internal + Passthrough Network Load Balancers. + conflicts: + - sessionAffinity + - connectionTrackingPolicy + - failoverPolicy + - healthChecks + properties: + - name: 'fastIPMove' + type: Enum + description: | + Specifies whether fast IP move is enabled, and if so, the mechanism to achieve it. + Supported values are: + + * `DISABLED`: Fast IP Move is disabled. You can only use the haPolicy.leader API to + update the leader. + + * `GARP_RA`: Provides a method to very quickly define a new network endpoint as the + leader. This method is faster than updating the leader using the + haPolicy.leader API. Fast IP move works as follows: The VM hosting the + network endpoint that should become the new leader sends either a + Gratuitous ARP (GARP) packet (IPv4) or an ICMPv6 Router Advertisement(RA) + packet (IPv6). Google Cloud immediately but temporarily associates the + forwarding rule IP address with that VM, and both new and in-flight packets + are quickly delivered to that VM. + immutable: true + enum_values: + - 'DISABLED' + - 'GARP_RA' + - name: 'leader' + type: NestedObject + description: | + Selects one of the network endpoints attached to the backend NEGs of this service as the + active endpoint (the leader) that receives all traffic. + properties: + - name: 'backendGroup' + type: ResourceRef + description: | + A fully-qualified URL of the zonal Network Endpoint Group (NEG) that the leader is + attached to. + - name: 'networkEndpoint' + type: NestedObject + description: | + The network endpoint within the leader.backendGroup that is designated as the leader. + properties: + - name: 'instance' + type: String + description: | + The name of the VM instance of the leader network endpoint. The instance must + already be attached to the NEG specified in the haPolicy.leader.backendGroup. diff --git a/mmv1/templates/terraform/examples/region_backend_service_ha_policy.tf.tmpl b/mmv1/templates/terraform/examples/region_backend_service_ha_policy.tf.tmpl new file mode 100644 index 000000000000..a0904db5dc44 --- /dev/null +++ b/mmv1/templates/terraform/examples/region_backend_service_ha_policy.tf.tmpl @@ -0,0 +1,17 @@ +resource "google_compute_network" "default" { + name = "{{index $.Vars "network_name"}}" +} + +resource "google_compute_region_backend_service" "{{$.PrimaryResourceId}}" { + region = "us-central1" + name = "{{index $.Vars "region_backend_service_name"}}" + protocol = "UDP" + load_balancing_scheme = "EXTERNAL" + network = google_compute_network.default.id + ha_policy { + fast_ip_move = "GARP_RA" + } + // Must explicitly disable connection draining to override default value. + connection_draining_timeout_sec = 0 +} + diff --git a/mmv1/templates/terraform/examples/region_backend_service_ha_policy_manual_leader.tf.tmpl b/mmv1/templates/terraform/examples/region_backend_service_ha_policy_manual_leader.tf.tmpl new file mode 100644 index 000000000000..efcce80b4442 --- /dev/null +++ b/mmv1/templates/terraform/examples/region_backend_service_ha_policy_manual_leader.tf.tmpl @@ -0,0 +1,72 @@ +resource "google_compute_network" "default" { + name = "{{index $.Vars "network_name"}}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + name = "{{index $.Vars "subnetwork_name"}}" + ip_cidr_range = "10.1.2.0/24" + region = "us-central1" + network = google_compute_network.default.id +} + +resource "google_compute_network_endpoint" "endpoint" { + network_endpoint_group = google_compute_network_endpoint_group.neg.name + + instance = google_compute_instance.endpoint-instance.name + ip_address = google_compute_instance.endpoint-instance.network_interface[0].network_ip +} + +data "google_compute_image" "my_image" { + family = "debian-12" + project = "debian-cloud" +} + +resource "google_compute_instance" "endpoint-instance" { + name = "{{index $.Vars "instance_name"}}" + machine_type = "e2-medium" + + boot_disk { + initialize_params { + image = data.google_compute_image.my_image.self_link + } + } + + network_interface { + subnetwork = google_compute_subnetwork.default.id + access_config { + } + } +} + +resource "google_compute_network_endpoint_group" "neg" { + name = "{{index $.Vars "neg_name"}}" + network_endpoint_type = "GCE_VM_IP" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + zone = "us-central1-a" +} + +resource "google_compute_region_backend_service" "{{$.PrimaryResourceId}}" { + region = "us-central1" + name = "{{index $.Vars "region_backend_service_name"}}" + protocol = "UDP" + load_balancing_scheme = "EXTERNAL" + network = google_compute_network.default.id + backend { + group = google_compute_network_endpoint_group.neg.self_link + balancing_mode = "CONNECTION" + } + ha_policy { + fast_ip_move = "GARP_RA" + leader { + backend_group = google_compute_network_endpoint_group.neg.self_link + network_endpoint { + instance = google_compute_instance.endpoint-instance.name + } + } + } + // Must explicitly disable connection draining to override default value. + connection_draining_timeout_sec = 0 +} + diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_ha_policy_manual_leader_update_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_ha_policy_manual_leader_update_test.go new file mode 100644 index 000000000000..085f5cc19f8b --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_ha_policy_manual_leader_update_test.go @@ -0,0 +1,248 @@ +package compute_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccComputeRegionBackendService_regionBackendServiceHaPolicyManualLeader_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeRegionBackendServiceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeRegionBackendService_regionBackendServiceHaPolicyManualLeader_full(context), + }, + { + ResourceName: "google_compute_region_backend_service.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"iap.0.oauth2_client_secret", "network", "region"}, + }, + { + Config: testAccComputeRegionBackendService_regionBackendServiceHaPolicyManualLeader_update(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_compute_region_backend_service.default", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_compute_region_backend_service.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"iap.0.oauth2_client_secret", "network", "region"}, + }, + }, + }) +} + +func testAccComputeRegionBackendService_regionBackendServiceHaPolicyManualLeader_full(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_network" "default" { + name = "tf-test-rbs-net%{random_suffix}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + name = "tf-test-rbs-subnet%{random_suffix}" + ip_cidr_range = "10.1.2.0/24" + region = "us-central1" + network = google_compute_network.default.id +} + +resource "google_compute_network_endpoint" "endpoint1" { + network_endpoint_group = google_compute_network_endpoint_group.neg.name + + instance = google_compute_instance.endpoint-instance1.name + ip_address = google_compute_instance.endpoint-instance1.network_interface[0].network_ip +} + +resource "google_compute_network_endpoint" "endpoint2" { + network_endpoint_group = google_compute_network_endpoint_group.neg.name + + instance = google_compute_instance.endpoint-instance2.name + ip_address = google_compute_instance.endpoint-instance2.network_interface[0].network_ip +} + +data "google_compute_image" "my_image" { + family = "debian-12" + project = "debian-cloud" +} + +resource "google_compute_instance" "endpoint-instance1" { + name = "tf-test-rbs-instance1-%{random_suffix}" + machine_type = "e2-medium" + + boot_disk { + initialize_params { + image = data.google_compute_image.my_image.self_link + } + } + + network_interface { + subnetwork = google_compute_subnetwork.default.id + access_config { + } + } +} + +resource "google_compute_instance" "endpoint-instance2" { + name = "tf-test-rbs-instance2-%{random_suffix}" + machine_type = "e2-medium" + + boot_disk { + initialize_params { + image = data.google_compute_image.my_image.self_link + } + } + + network_interface { + subnetwork = google_compute_subnetwork.default.id + access_config { + } + } +} + +resource "google_compute_network_endpoint_group" "neg" { + name = "tf-test-rbs-neg%{random_suffix}" + network_endpoint_type = "GCE_VM_IP" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + zone = "us-central1-a" +} + +resource "google_compute_region_backend_service" "default" { + region = "us-central1" + name = "tf-test-region-service%{random_suffix}" + protocol = "UDP" + load_balancing_scheme = "EXTERNAL" + network = google_compute_network.default.id + backend { + group = google_compute_network_endpoint_group.neg.self_link + balancing_mode = "CONNECTION" + } + ha_policy { + fast_ip_move = "GARP_RA" + leader { + backend_group = google_compute_network_endpoint_group.neg.self_link + network_endpoint { + instance = google_compute_instance.endpoint-instance1.name + } + } + } + // Must explicitly disable connection draining to override default value. + connection_draining_timeout_sec = 0 +} +`, context) +} + +func testAccComputeRegionBackendService_regionBackendServiceHaPolicyManualLeader_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_network" "default" { + name = "tf-test-rbs-net%{random_suffix}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + name = "tf-test-rbs-subnet%{random_suffix}" + ip_cidr_range = "10.1.2.0/24" + region = "us-central1" + network = google_compute_network.default.id +} + +resource "google_compute_network_endpoint" "endpoint1" { + network_endpoint_group = google_compute_network_endpoint_group.neg.name + + instance = google_compute_instance.endpoint-instance1.name + ip_address = google_compute_instance.endpoint-instance1.network_interface[0].network_ip +} + +resource "google_compute_network_endpoint" "endpoint2" { + network_endpoint_group = google_compute_network_endpoint_group.neg.name + + instance = google_compute_instance.endpoint-instance2.name + ip_address = google_compute_instance.endpoint-instance2.network_interface[0].network_ip +} + +data "google_compute_image" "my_image" { + family = "debian-12" + project = "debian-cloud" +} + +resource "google_compute_instance" "endpoint-instance1" { + name = "tf-test-rbs-instance1-%{random_suffix}" + machine_type = "e2-medium" + + boot_disk { + initialize_params { + image = data.google_compute_image.my_image.self_link + } + } + + network_interface { + subnetwork = google_compute_subnetwork.default.id + access_config { + } + } +} + +resource "google_compute_instance" "endpoint-instance2" { + name = "tf-test-rbs-instance2-%{random_suffix}" + machine_type = "e2-medium" + + boot_disk { + initialize_params { + image = data.google_compute_image.my_image.self_link + } + } + + network_interface { + subnetwork = google_compute_subnetwork.default.id + access_config { + } + } +} + +resource "google_compute_network_endpoint_group" "neg" { + name = "tf-test-rbs-neg%{random_suffix}" + network_endpoint_type = "GCE_VM_IP" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + zone = "us-central1-a" +} + +resource "google_compute_region_backend_service" "default" { + region = "us-central1" + name = "tf-test-region-service%{random_suffix}" + protocol = "UDP" + load_balancing_scheme = "EXTERNAL" + network = google_compute_network.default.id + backend { + group = google_compute_network_endpoint_group.neg.self_link + balancing_mode = "CONNECTION" + } + ha_policy { + fast_ip_move = "GARP_RA" + leader { + backend_group = google_compute_network_endpoint_group.neg.self_link + network_endpoint { + instance = google_compute_instance.endpoint-instance2.name + } + } + } + // Must explicitly disable connection draining to override default value. + connection_draining_timeout_sec = 0 +} +`, context) +} From 3cf02749769e9cc6dd7283c900c63da7574d65d0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B6rn?= <81525627+bestefreund@users.noreply.github.com> Date: Thu, 7 Aug 2025 20:09:17 +0200 Subject: [PATCH 706/884] Add plural data source for retrieving Artifact Registry repositories (#14633) --- .../provider/provider_mmv1_resources.go.tmpl | 1 + ...a_source_artifact_registry_repositories.go | 173 ++++++++++++++++++ ...rce_artifact_registry_repositories_test.go | 127 +++++++++++++ .../d/artifact_registry_repositories.markdown | 48 +++++ 4 files changed, 349 insertions(+) create mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_repositories.go create mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_repositories_test.go create mode 100644 mmv1/third_party/terraform/website/docs/d/artifact_registry_repositories.markdown diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index eefde7b48be9..08de3a35013f 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -29,6 +29,7 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_artifact_registry_docker_image": artifactregistry.DataSourceArtifactRegistryDockerImage(), "google_artifact_registry_docker_images": artifactregistry.DataSourceArtifactRegistryDockerImages(), "google_artifact_registry_locations": artifactregistry.DataSourceGoogleArtifactRegistryLocations(), + "google_artifact_registry_repositories": artifactregistry.DataSourceArtifactRegistryRepositories(), "google_artifact_registry_repository": artifactregistry.DataSourceArtifactRegistryRepository(), "google_artifact_registry_package": artifactregistry.DataSourceArtifactRegistryPackage(), "google_artifact_registry_version": artifactregistry.DataSourceArtifactRegistryVersion(), diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_repositories.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_repositories.go new file mode 100644 index 000000000000..3c94509a9482 --- /dev/null +++ b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_repositories.go @@ -0,0 +1,173 @@ +package artifactregistry + +import ( + "fmt" + "net/http" + "net/url" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func DataSourceArtifactRegistryRepositories() *schema.Resource { + return &schema.Resource{ + Read: dataSourceArtifactRegistryRepositoriesRead, + Schema: map[string]*schema.Schema{ + "location": { + Type: schema.TypeString, + Required: true, + }, + "name_filter": { + Type: schema.TypeString, + Optional: true, + }, + "project": { + Type: schema.TypeString, + Optional: true, + }, + "repositories": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "id": { + Type: schema.TypeString, + Computed: true, + }, + "repository_id": { + Type: schema.TypeString, + Computed: true, + }, + "format": { + Type: schema.TypeString, + Computed: true, + }, + "description": { + Type: schema.TypeString, + Computed: true, + }, + "create_time": { + Type: schema.TypeString, + Computed: true, + }, + "update_time": { + Type: schema.TypeString, + Computed: true, + }, + }, + }, + }, + }, + } +} + +func dataSourceArtifactRegistryRepositoriesRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + + basePath, err := tpgresource.ReplaceVars(d, config, "{{ArtifactRegistryBasePath}}") + if err != nil { + return fmt.Errorf("Error setting Artifact Registry base path: %s", err) + } + + resourcePath, err := tpgresource.ReplaceVars(d, config, fmt.Sprintf("projects/{{project}}/locations/{{location}}/repositories")) + if err != nil { + return fmt.Errorf("Error setting resource path: %s", err) + } + + urlRequest := basePath + resourcePath + + nameFilter := "" + if v, ok := d.GetOk("name_filter"); ok { + nameFilter = fmt.Sprintf("name=\"%s/%s\"", resourcePath, v.(string)) + + u, err := url.Parse(urlRequest) + if err != nil { + return fmt.Errorf("Error parsing URL: %s", err) + } + + q := u.Query() + q.Set("filter", nameFilter) + u.RawQuery = q.Encode() + urlRequest = u.String() + } + + headers := make(http.Header) + repos := make([]map[string]interface{}, 0) + pageToken := "" + + for { + u, err := url.Parse(urlRequest) + if err != nil { + return fmt.Errorf("Error parsing URL: %s", err) + } + + q := u.Query() + if nameFilter != "" { + q.Set("filter", nameFilter) + } + if pageToken != "" { + q.Set("pageToken", pageToken) + } + u.RawQuery = q.Encode() + + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + RawURL: u.String(), + UserAgent: userAgent, + Headers: headers, + }) + + if err != nil { + return fmt.Errorf("Error listing Artifact Registry repositories: %s", err) + } + + if items, ok := res["repositories"].([]interface{}); ok { + for _, item := range items { + repo := item.(map[string]interface{}) + repos = append(repos, map[string]interface{}{ + "id": repo["name"], + "repository_id": tpgresource.GetResourceNameFromSelfLink(repo["name"].(string)), + "format": repo["format"], + "description": repo["description"], + "create_time": repo["createTime"], + "update_time": repo["updateTime"], + }) + } + } + + if nextToken, ok := res["nextPageToken"].(string); ok && nextToken != "" { + pageToken = nextToken + } else { + break + } + } + + if err := d.Set("project", project); err != nil { + return fmt.Errorf("Error setting project: %s", err) + } + + if err := d.Set("repositories", repos); err != nil { + return fmt.Errorf("Error setting Artifact Registry repositories: %s", err) + } + + setId := resourcePath + + if nameFilter != "" { + setId += "/" + nameFilter + } + + d.SetId(setId) + + return nil +} diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_repositories_test.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_repositories_test.go new file mode 100644 index 000000000000..cb569bbbe419 --- /dev/null +++ b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_repositories_test.go @@ -0,0 +1,127 @@ +package artifactregistry_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccDataSourceArtifactRegistryRepositories_basic(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "location": envvar.GetTestRegionFromEnv(), + "random_suffix": acctest.RandString(t, 8), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceArtifactRegistryRepositoriesConfig(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("data.google_artifact_registry_repositories.all", "project"), + resource.TestCheckResourceAttrSet("data.google_artifact_registry_repositories.all", "repositories.0.id"), + resource.TestCheckResourceAttrSet("data.google_artifact_registry_repositories.all", "repositories.1.id"), + resource.TestCheckResourceAttrSet("data.google_artifact_registry_repositories.all", "repositories.2.id"), + resource.TestCheckResourceAttrSet("data.google_artifact_registry_repositories.all", "repositories.0.repository_id"), + resource.TestCheckResourceAttrSet("data.google_artifact_registry_repositories.all", "repositories.0.format"), + resource.TestCheckResourceAttrSet("data.google_artifact_registry_repositories.all", "repositories.0.create_time"), + resource.TestCheckResourceAttrSet("data.google_artifact_registry_repositories.all", "repositories.0.update_time"), + ), + }, + { + Config: testAccDataSourceArtifactRegistryRepositoriesConfigWithFilter(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("data.google_artifact_registry_repositories.filtered", "repositories.0.id"), + resource.TestCheckResourceAttrSet("data.google_artifact_registry_repositories.filtered", "repositories.0.repository_id"), + resource.TestCheckResourceAttrSet("data.google_artifact_registry_repositories.filtered", "repositories.0.format"), + resource.TestCheckResourceAttrSet("data.google_artifact_registry_repositories.filtered", "repositories.0.create_time"), + resource.TestCheckResourceAttrSet("data.google_artifact_registry_repositories.filtered", "repositories.0.update_time"), + ), + }, + }, + }) +} + +func testAccDataSourceArtifactRegistryRepositoriesConfig(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_artifact_registry_repository" "repo1" { + location = "%{location}" + repository_id = "tf-test-repo1%{random_suffix}" + format = "DOCKER" + description = "repo1 desc" +} + +resource "google_artifact_registry_repository" "repo2" { + location = "%{location}" + repository_id = "tf-test-repo2%{random_suffix}" + format = "DOCKER" + description = "repo2 desc" +} + +resource "google_artifact_registry_repository" "repo3" { + location = "%{location}" + repository_id = "tf-test-repo3%{random_suffix}" + format = "DOCKER" + description = "repo3 desc" +} + +data "google_artifact_registry_repositories" "all" { + location = "%{location}" + + depends_on = [ + google_artifact_registry_repository.repo1, + google_artifact_registry_repository.repo2, + google_artifact_registry_repository.repo3, + ] +} +`, context) +} + +func testAccDataSourceArtifactRegistryRepositoriesConfigWithFilter(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_artifact_registry_repository" "repo1" { + location = "%{location}" + repository_id = "tf-test-repo1%{random_suffix}" + format = "DOCKER" + description = "repo1 desc" +} + +resource "google_artifact_registry_repository" "repo2" { + location = "%{location}" + repository_id = "tf-test-repo2%{random_suffix}" + format = "DOCKER" + description = "repo2 desc" +} + +resource "google_artifact_registry_repository" "repo3" { + location = "%{location}" + repository_id = "tf-test-repo3%{random_suffix}" + format = "DOCKER" + description = "repo3 desc" +} + +resource "google_artifact_registry_repository" "repo4" { + location = "%{location}" + repository_id = "tf-acc-repo4%{random_suffix}" + format = "DOCKER" + description = "acc desc" +} + +data "google_artifact_registry_repositories" "filtered" { + location = "%{location}" + name_filter = "*acc*" + + depends_on = [ + google_artifact_registry_repository.repo1, + google_artifact_registry_repository.repo2, + google_artifact_registry_repository.repo3, + google_artifact_registry_repository.repo4, + ] +} +`, context) +} diff --git a/mmv1/third_party/terraform/website/docs/d/artifact_registry_repositories.markdown b/mmv1/third_party/terraform/website/docs/d/artifact_registry_repositories.markdown new file mode 100644 index 000000000000..ead542ec12ff --- /dev/null +++ b/mmv1/third_party/terraform/website/docs/d/artifact_registry_repositories.markdown @@ -0,0 +1,48 @@ +--- +subcategory: "Artifact Registry" +description: |- + Get information about Artifact Registry repositories. +--- + +# google_artifact_registry_repositories + +Get information about Artifact Registry repositories. +See [the official documentation](https://cloud.google.com/artifact-registry/docs) +and [API](https://cloud.google.com/artifact-registry/docs/reference/rest/v1/projects.locations.repositories/list). + +```hcl +data "google_artifact_registry_repositories" "example" { + location = "us-central1" + project = "my-project" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `location` - (Optional) The location of the artifact registry repositories. eg `us-central1`. + +* `name_filter` - (Optional) Optional. An expression for filtering the results by name. You can also use wildcards `*`. I.e. `my-repo`, `*-repo`, `my-*`, `*-re*`. For further information reach out to the [API docs](https://cloud.google.com/artifact-registry/docs/reference/rest/v1/projects.locations.repositories/list). + +* `project` - (Optional) The ID of the project. If it is not provided, the provider project is used. + +## Attributes Reference + +The following attributes are exported: + +* `repositories` - A list of all retrieved Artifact Registry repositories. Structure is [defined below](#nested_repositories). + +The `repositories` block supports: + +* `id` - An identifier for the resource with format `projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}` + +* `repository_id` - The last part of the repository name, for example: `"repo1"` + +* `format` - The format of packages that are stored in the repository. Supported formats can be found [here](https://cloud.google.com/artifact-registry/docs/supported-formats). + +* `description` - The user-provided description of the repository. + +* `create_time` - The time when the repository was created. + +* `update_time` - The time when the repository was last updated. From f1dd51a0473204f16817614ff04b1a8f08b65d22 Mon Sep 17 00:00:00 2001 From: chethangowda89 <110084536+chethangowda89@users.noreply.github.com> Date: Thu, 7 Aug 2025 14:59:24 -0500 Subject: [PATCH 707/884] Add enable_advanced_cluster example to vmware admin cluster (#14750) Co-authored-by: Chethan Gowda --- mmv1/products/gkeonprem/VmwareAdminCluster.yaml | 2 +- .../examples/gkeonprem_vmware_admin_cluster_metallb.tf.tmpl | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/mmv1/products/gkeonprem/VmwareAdminCluster.yaml b/mmv1/products/gkeonprem/VmwareAdminCluster.yaml index 9bc1ed558dcc..9cc09f9d9a29 100644 --- a/mmv1/products/gkeonprem/VmwareAdminCluster.yaml +++ b/mmv1/products/gkeonprem/VmwareAdminCluster.yaml @@ -688,7 +688,7 @@ properties: - type: Boolean name: enableAdvancedCluster description: If set, the advanced cluster feature is enabled. - output: true + default_from_api: true - type: NestedObject name: privateRegistryConfig description: Configuration for private registry. diff --git a/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_metallb.tf.tmpl b/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_metallb.tf.tmpl index aedef5e78f71..609e2149036d 100644 --- a/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_metallb.tf.tmpl +++ b/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_metallb.tf.tmpl @@ -3,8 +3,9 @@ resource "google_gkeonprem_vmware_admin_cluster" "{{$.PrimaryResourceId}}" { location = "us-west1" description = "test admin cluster" bootstrap_cluster_membership = "projects/870316890899/locations/global/memberships/gkeonprem-terraform-test" - on_prem_version = "1.31.0-gke.35" + on_prem_version = "1.33.0-gke.35" image_type = "ubuntu_containerd" + enable_advanced_cluster = true vcenter { resource_pool = "test resource pool" datastore = "test data store" From ec2f2462eb88f8d04b14855c4e2651fc04f2c0b5 Mon Sep 17 00:00:00 2001 From: Thomas Rodgers Date: Thu, 7 Aug 2025 21:23:33 +0000 Subject: [PATCH 708/884] tgc-revival: support google_certificate_manager_certificate (#14722) --- mmv1/products/certificatemanager/Certificate.yaml | 6 ++++++ mmv1/templates/terraform/flatten_property_method.go.tmpl | 4 ++-- .../decoders/certificatemanager_certificate.go.tmpl | 9 +++++++++ .../encoders/certificatemanager_certificate.go.tmpl | 7 +++++++ 4 files changed, 24 insertions(+), 2 deletions(-) create mode 100644 mmv1/templates/tgc_next/decoders/certificatemanager_certificate.go.tmpl create mode 100644 mmv1/templates/tgc_next/encoders/certificatemanager_certificate.go.tmpl diff --git a/mmv1/products/certificatemanager/Certificate.yaml b/mmv1/products/certificatemanager/Certificate.yaml index b10f1dabef22..8e298db14953 100644 --- a/mmv1/products/certificatemanager/Certificate.yaml +++ b/mmv1/products/certificatemanager/Certificate.yaml @@ -28,6 +28,7 @@ timeouts: insert_minutes: 20 update_minutes: 20 delete_minutes: 20 +include_in_tgc_next_DO_NOT_USE: true autogen_async: true async: actions: ['create', 'delete', 'update'] @@ -38,6 +39,8 @@ async: resource_inside_response: false custom_code: constants: 'templates/terraform/constants/cert_manager.tmpl' + tgc_encoder: 'templates/tgc_next/encoders/certificatemanager_certificate.go.tmpl' + tgc_decoder: 'templates/tgc_next/decoders/certificatemanager_certificate.go.tmpl' schema_version: 1 state_upgraders: true sweeper: @@ -174,6 +177,7 @@ properties: Leaf certificate comes first, followed by intermediate ones if any. immutable: true sensitive: true + is_missing_in_cai: true exactly_one_of: - 'self_managed.0.certificate_pem' - 'self_managed.0.pem_certificate' @@ -184,6 +188,7 @@ properties: The private key of the leaf certificate in PEM-encoded form. immutable: true sensitive: true + is_missing_in_cai: true exactly_one_of: - 'self_managed.0.private_key_pem' - 'self_managed.0.pem_private_key' @@ -204,6 +209,7 @@ properties: The private key of the leaf certificate in PEM-encoded form. immutable: true sensitive: true + is_missing_in_cai: true exactly_one_of: - 'self_managed.0.private_key_pem' - 'self_managed.0.pem_private_key' diff --git a/mmv1/templates/terraform/flatten_property_method.go.tmpl b/mmv1/templates/terraform/flatten_property_method.go.tmpl index f9dae217371c..cc31bb5b190b 100644 --- a/mmv1/templates/terraform/flatten_property_method.go.tmpl +++ b/mmv1/templates/terraform/flatten_property_method.go.tmpl @@ -18,7 +18,7 @@ {{- $.CustomTemplate $.CustomFlatten false -}} {{- else -}} func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - {{- if $.IgnoreRead }} + {{- if and $.IgnoreRead (not $.ResourceMetadata.IsTgcCompiler) }} return d.Get("{{ $.TerraformLineage }}") {{- else if $.IsA "NestedObject" }} if v == nil { @@ -168,4 +168,4 @@ func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.Reso {{- end }} {{- end }} {{- end }} -{{ end }} \ No newline at end of file +{{ end }} diff --git a/mmv1/templates/tgc_next/decoders/certificatemanager_certificate.go.tmpl b/mmv1/templates/tgc_next/decoders/certificatemanager_certificate.go.tmpl new file mode 100644 index 000000000000..f7e84128a130 --- /dev/null +++ b/mmv1/templates/tgc_next/decoders/certificatemanager_certificate.go.tmpl @@ -0,0 +1,9 @@ +if sm, ok := res["selfManaged"].(map[string]interface{}); ok { + sm["pemCertificate"] = res["pemCertificate"] + sm["pemPrivateKey"] = "hidden" +} +if vStr, ok := res["scope"].(string); ok && vStr == "DEFAULT" { + // Omit the default value. + delete(res, "scope") +} +return res, nil diff --git a/mmv1/templates/tgc_next/encoders/certificatemanager_certificate.go.tmpl b/mmv1/templates/tgc_next/encoders/certificatemanager_certificate.go.tmpl new file mode 100644 index 000000000000..940af8041c71 --- /dev/null +++ b/mmv1/templates/tgc_next/encoders/certificatemanager_certificate.go.tmpl @@ -0,0 +1,7 @@ +if _, ok := d.GetOk("self_managed"); ok { + // self_managed.pem_certificate goes in root level of cai asset data. + selfManagedPemCertificateProp := d.Get("self_managed.0.pem_certificate") + obj["pemCertificate"] = selfManagedPemCertificateProp.(string) +} + +return obj, nil From 3ec900635e3ff89e0a326555c6623f5ed91f9266 Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Thu, 7 Aug 2025 14:30:57 -0700 Subject: [PATCH 709/884] fix plural formatting (#14769) --- mmv1/google/string_utils.go | 3 ++- mmv1/google/string_utils_test.go | 5 +++++ mmv1/products/dialogflowcx/SecuritySettings.yaml | 1 + 3 files changed, 8 insertions(+), 1 deletion(-) diff --git a/mmv1/google/string_utils.go b/mmv1/google/string_utils.go index 338f9b9f4d8a..13fb90f1a469 100644 --- a/mmv1/google/string_utils.go +++ b/mmv1/google/string_utils.go @@ -83,7 +83,8 @@ func Plural(source string) string { } // mesh -> meshes - if strings.HasSuffix(source, "esh") { + // messageBus -> messageBuses + if strings.HasSuffix(source, "esh") || strings.HasSuffix(source, "s") { return fmt.Sprintf("%ses", source) } diff --git a/mmv1/google/string_utils_test.go b/mmv1/google/string_utils_test.go index b2f570ab2137..153bc8191fa3 100644 --- a/mmv1/google/string_utils_test.go +++ b/mmv1/google/string_utils_test.go @@ -101,6 +101,11 @@ func TestStringPlural(t *testing.T) { term: "gateway", expected: "gateways", }, + { + description: "Plural camelcase string ending with s", + term: "messageBus", + expected: "messageBuses", + }, } for _, tc := range cases { diff --git a/mmv1/products/dialogflowcx/SecuritySettings.yaml b/mmv1/products/dialogflowcx/SecuritySettings.yaml index 1dc22a343a54..80054ed2b1dd 100644 --- a/mmv1/products/dialogflowcx/SecuritySettings.yaml +++ b/mmv1/products/dialogflowcx/SecuritySettings.yaml @@ -13,6 +13,7 @@ --- name: 'SecuritySettings' +collection_url_key: 'securitySettings' description: | Represents the settings related to security issues, such as data redaction and data retention. It may take hours for updates on the settings to propagate to all the related components and take effect. Multiple security settings can be configured in each location. Each agent can specify the security settings to apply, and each setting can be applied to multiple agents in the same project and location. From 1647dcd72917149dad6f6f995cf355593c2ed3e7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Taneli=20Lepp=C3=A4?= Date: Fri, 8 Aug 2025 00:24:27 +0200 Subject: [PATCH 710/884] Add advanced_options_config to regional security policies. (#14726) --- .../compute/RegionSecurityPolicy.yaml | 52 ++++++++++++++ ...urity_policy_with_advanced_options.tf.tmpl | 14 ++++ ...ompute_region_security_policy_test.go.tmpl | 72 +++++++++++++++++++ 3 files changed, 138 insertions(+) create mode 100644 mmv1/templates/terraform/examples/region_security_policy_with_advanced_options.tf.tmpl diff --git a/mmv1/products/compute/RegionSecurityPolicy.yaml b/mmv1/products/compute/RegionSecurityPolicy.yaml index 48131b3db496..4e0c2bf7e254 100644 --- a/mmv1/products/compute/RegionSecurityPolicy.yaml +++ b/mmv1/products/compute/RegionSecurityPolicy.yaml @@ -132,6 +132,58 @@ properties: - 'ADVANCED' - 'ADVANCED_PREVIEW' - 'STANDARD' + - name: "advancedOptionsConfig" + type: NestedObject + description: | + Advanced Options Config of this security policy. + properties: + - name: "jsonParsing" + type: Enum + description: | + JSON body parsing. Supported values include: "DISABLED", "STANDARD", "STANDARD_WITH_GRAPHQL". + enum_values: + - "DISABLED" + - "STANDARD" + - "STANDARD_WITH_GRAPHQL" + - name: "jsonCustomConfig" + type: NestedObject + description: | + Custom configuration to apply the JSON parsing. Only applicable when JSON parsing is set to STANDARD. + properties: + - name: "contentTypes" + type: Array + description: | + A list of custom Content-Type header values to apply the JSON parsing. + item_type: + type: String + is_set: true + required: true + - name: "logLevel" + type: Enum + description: | + Logging level. Supported values include: "NORMAL", "VERBOSE". + enum_values: + - "NORMAL" + - "VERBOSE" + - name: "userIpRequestHeaders" + type: Array + description: | + An optional list of case-insensitive request header names to use for resolving the callers client IP address. + item_type: + type: String + is_set: true + - name: requestBodyInspectionSize + type: Enum + description: | + The maximum request size chosen by the customer with Waf enabled. Values supported are "8KB", "16KB, "32KB", "48KB" and "64KB". + Values are case insensitive. + enum_values: + - "8KB" + - "16KB" + - "32KB" + - "48KB" + - "64KB" + min_version: beta - name: 'selfLink' type: String description: | diff --git a/mmv1/templates/terraform/examples/region_security_policy_with_advanced_options.tf.tmpl b/mmv1/templates/terraform/examples/region_security_policy_with_advanced_options.tf.tmpl new file mode 100644 index 000000000000..6292b08d28bd --- /dev/null +++ b/mmv1/templates/terraform/examples/region_security_policy_with_advanced_options.tf.tmpl @@ -0,0 +1,14 @@ +resource "google_compute_region_security_policy" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "sec_policy_name"}}" + description = "with advanced config" + type = "CLOUD_ARMOR" + + advanced_options_config { + json_parsing = "STANDARD_WITH_GRAPHQL" + json_custom_config { + content_types = ["application/json"] + } + log_level = "VERBOSE" + user_ip_request_headers = ["x-forwarded-for"] + } +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_security_policy_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_security_policy_test.go.tmpl index b20b84882513..32f060468122 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_security_policy_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_security_policy_test.go.tmpl @@ -1115,4 +1115,76 @@ func testAccComputeRegionSecurityPolicy_withNetworkMatch_update(context map[stri } `, context) } + +func TestAccComputeRegionSecurityPolicy_withAdvancedOptions(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeRegionSecurityPolicyDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeRegionSecurityPolicy_withAdvancedOptions(context), + }, + { + ResourceName: "google_compute_region_security_policy.policy", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccComputeRegionSecurityPolicy_withAdvancedOptionsUpdate(context), + }, + { + ResourceName: "google_compute_region_security_policy.policy", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccComputeRegionSecurityPolicy_withAdvancedOptions(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_region_security_policy" "policy" { + name = "tf-test%{random_suffix}" + description = "basic region security policy" + type = "CLOUD_ARMOR" + + advanced_options_config { + json_parsing = "STANDARD_WITH_GRAPHQL" + json_custom_config { + content_types = ["application/json"] + } + log_level = "VERBOSE" + user_ip_request_headers = ["x-forwarded-for"] + request_body_inspection_size = "8KB" + } +} +`, context) +} + +func testAccComputeRegionSecurityPolicy_withAdvancedOptionsUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_region_security_policy" "policy" { + name = "tf-test%{random_suffix}" + description = "basic region security policy" + type = "CLOUD_ARMOR" + + advanced_options_config { + json_parsing = "STANDARD" + json_custom_config { + content_types = ["text/json"] + } + log_level = "NORMAL" + user_ip_request_headers = ["x-real-ip"] + request_body_inspection_size = "16KB" + } +} +`, context) +} {{- end }} From ec7a688a76ccbdc5a6843553495a930172b39e9b Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Thu, 7 Aug 2025 15:28:26 -0700 Subject: [PATCH 711/884] tgc-revival: Automatically check if false value should be set in CAI assets (#14783) --- mmv1/api/type.go | 25 ++++++++++++++++++- mmv1/products/bigquery/Dataset.yaml | 5 ++++ mmv1/products/compute/BackendService.yaml | 2 -- mmv1/products/compute/Disk.yaml | 1 - mmv1/products/compute/Subnetwork.yaml | 2 -- .../terraform/expand_property_method.go.tmpl | 6 ++--- .../tfplan2cai/resource_converter.go.tmpl | 2 +- 7 files changed, 33 insertions(+), 10 deletions(-) diff --git a/mmv1/api/type.go b/mmv1/api/type.go index 3156d021b149..690687db966d 100644 --- a/mmv1/api/type.go +++ b/mmv1/api/type.go @@ -311,8 +311,17 @@ type Type struct { // just as they are in the standard flattener template. CustomTgcFlatten string `yaml:"custom_tgc_flatten,omitempty"` - // If true, we will include the empty value of this attribute in CAI asset. + // If true, the empty value of this attribute in CAI asset is included. IncludeEmptyValueInCai bool `yaml:"include_empty_value_in_cai,omitempty"` + + // If the property is type of bool and has `defaul_from_api: true`, + // include empty value in CAI asset by default during tfplan2cai conversion. + // Use `exclude_false_in_cai` to override the default behavior + // when the default value on API side is true. + // + // If a property is missing in CAI asset, use `is_missing_in_cai: true` + // and `exclude_false_in_cai: true` is not needed + ExcludeFalseInCai bool `yaml:"exclude_false_in_cai,omitempty"` } const MAX_NAME = 20 @@ -1284,3 +1293,17 @@ func (t Type) IsJsonField() bool { } return false } + +// Checks if the empty value should be set in CAI assets during tfplan2cai conversion +func (t Type) TGCSendEmptyValue() bool { + if t.IncludeEmptyValueInCai { + return true + } + + // Automatically check if false value should be set in CAI assets + if t.IsA("Boolean") { + return t.Required || (t.DefaultFromApi && !t.IsMissingInCai && !t.ExcludeFalseInCai) + } + + return false +} diff --git a/mmv1/products/bigquery/Dataset.yaml b/mmv1/products/bigquery/Dataset.yaml index e541c8f0b63d..15a545f81a6b 100644 --- a/mmv1/products/bigquery/Dataset.yaml +++ b/mmv1/products/bigquery/Dataset.yaml @@ -401,6 +401,11 @@ properties: By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references. default_from_api: true + # This property has default value `true` in the real CAI asset. + # The bool property with `default_from_api: true` will have false value in converted CAI asset + # by default during tfplan2cai. + # Use exclude_false_in_cai: true to override the default behavior during tfplan2cai conversion. + exclude_false_in_cai: true - name: 'defaultCollation' type: String description: | diff --git a/mmv1/products/compute/BackendService.yaml b/mmv1/products/compute/BackendService.yaml index e776ee67648e..7ddc4ee360b4 100644 --- a/mmv1/products/compute/BackendService.yaml +++ b/mmv1/products/compute/BackendService.yaml @@ -337,7 +337,6 @@ properties: description: | If true, the metric data is collected and reported to Cloud Monitoring, but is not used for load balancing. - include_empty_value_in_cai: true - name: 'maxUtilization' type: Double description: | @@ -1119,7 +1118,6 @@ properties: required: true description: | If true, the metric data is not used for load balancing. - include_empty_value_in_cai: true - name: 'name' type: String description: | diff --git a/mmv1/products/compute/Disk.yaml b/mmv1/products/compute/Disk.yaml index a31d894097f9..d4e18c02ea62 100644 --- a/mmv1/products/compute/Disk.yaml +++ b/mmv1/products/compute/Disk.yaml @@ -460,7 +460,6 @@ properties: Note: Only supported on hyperdisk skus, disk_encryption_key is required when setting to true required: false default_from_api: true - include_empty_value_in_cai: true - name: 'multiWriter' type: Boolean description: | diff --git a/mmv1/products/compute/Subnetwork.yaml b/mmv1/products/compute/Subnetwork.yaml index ec5b99889f65..bcbd5fb2fe0d 100644 --- a/mmv1/products/compute/Subnetwork.yaml +++ b/mmv1/products/compute/Subnetwork.yaml @@ -327,7 +327,6 @@ properties: default_from_api: true update_url: 'projects/{{project}}/regions/{{region}}/subnetworks/{{name}}/setPrivateIpGoogleAccess' update_verb: 'POST' - include_empty_value_in_cai: true - name: 'privateIpv6GoogleAccess' type: String description: The private IPv6 google access type for the VMs in this subnet. @@ -520,7 +519,6 @@ properties: org policy, if there is no org policy specified, then it will default to disabled. This field isn't supported if the subnet purpose field is set to REGIONAL_MANAGED_PROXY. default_from_api: true - include_empty_value_in_cai: true deprecation_message: 'This field is being removed in favor of log_config. If log_config is present, flow logs are enabled.' - name: 'state' type: Enum diff --git a/mmv1/templates/terraform/expand_property_method.go.tmpl b/mmv1/templates/terraform/expand_property_method.go.tmpl index 21845c284957..1f28f3b01997 100644 --- a/mmv1/templates/terraform/expand_property_method.go.tmpl +++ b/mmv1/templates/terraform/expand_property_method.go.tmpl @@ -31,7 +31,7 @@ func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.T transformed{{$prop.TitlelizeProperty}}, err := expand{{$.GetPrefix}}{{$.TitlelizeProperty}}{{$prop.TitlelizeProperty}}(original["{{ underscore $prop.Name }}"], d, config) if err != nil { return nil, err - {{- if or ($prop.SendEmptyValue) (and $prop.IncludeEmptyValueInCai $.ResourceMetadata.IsTgcCompiler) }} + {{- if or ($prop.SendEmptyValue) (and $prop.TGCSendEmptyValue $.ResourceMetadata.IsTgcCompiler) }} } else { transformed["{{$prop.ApiName}}"] = transformed{{$prop.TitlelizeProperty}} {{- else }} @@ -69,7 +69,7 @@ func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.T transformed{{$prop.TitlelizeProperty}}, err := expand{{$.GetPrefix}}{{$.TitlelizeProperty}}{{$prop.TitlelizeProperty}}({{ if $prop.FlattenObject }}nil{{ else }}d.Get("{{ underscore $prop.Name }}"), d, config) if err != nil { return nil, err - {{- if or ($prop.SendEmptyValue) (and $prop.IncludeEmptyValueInCai $.ResourceMetadata.IsTgcCompiler) }} + {{- if or ($prop.SendEmptyValue) (and $prop.TGCSendEmptyValue $.ResourceMetadata.IsTgcCompiler) }} } else { transformed["{{$prop.ApiName}}"] = transformed{{$prop.TitlelizeProperty}} {{- else }} @@ -122,7 +122,7 @@ func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.T transformed{{$prop.TitlelizeProperty}}, err := expand{{$.GetPrefix}}{{$.TitlelizeProperty}}{{$prop.TitlelizeProperty}}(original["{{ underscore $prop.Name }}"], d, config) if err != nil { return nil, err - {{- if or ($prop.SendEmptyValue) (and $prop.IncludeEmptyValueInCai $.ResourceMetadata.IsTgcCompiler) }} + {{- if or ($prop.SendEmptyValue) (and $prop.TGCSendEmptyValue $.ResourceMetadata.IsTgcCompiler) }} } else { transformed["{{$prop.ApiName}}"] = transformed{{$prop.TitlelizeProperty}} {{- else }} diff --git a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl index 9347e52fe96c..be1a1e4e3eef 100644 --- a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl +++ b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl @@ -90,7 +90,7 @@ func Get{{ $.ResourceName -}}CaiObject(d tpgresource.TerraformResourceData, conf {{- end}} if err != nil { return nil, err -{{- if and (not $prop.SendEmptyValue) (not $prop.IncludeEmptyValueInCai) }} +{{- if and (not $prop.SendEmptyValue) (not $prop.TGCSendEmptyValue) }} } else if v, ok := d.GetOkExists("{{underscore $prop.Name}}"); !tpgresource.IsEmptyValue(reflect.ValueOf({{ $prop.ApiName -}}Prop)) && (ok || !reflect.DeepEqual(v, {{ $prop.ApiName -}}Prop)) { {{- else }} } else if v, ok := d.GetOkExists("{{underscore $prop.Name}}"); ok || !reflect.DeepEqual(v, {{ $prop.ApiName -}}Prop) { From 2bcf2b9af05df4091987ccd3ca123125c78c685a Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Thu, 7 Aug 2025 16:29:45 -0700 Subject: [PATCH 712/884] tgc-revival: add google_alloydb_cluster (#14768) --- mmv1/products/alloydb/Cluster.yaml | 7 ++++++ ...alloydb_cluster_input_user_flatten.go.tmpl | 24 +++++++++++++++++++ 2 files changed, 31 insertions(+) create mode 100644 mmv1/templates/tgc_next/custom_flatten/alloydb_cluster_input_user_flatten.go.tmpl diff --git a/mmv1/products/alloydb/Cluster.yaml b/mmv1/products/alloydb/Cluster.yaml index 6af6de90285c..9a1341808086 100644 --- a/mmv1/products/alloydb/Cluster.yaml +++ b/mmv1/products/alloydb/Cluster.yaml @@ -61,6 +61,7 @@ custom_code: pre_delete: 'templates/terraform/pre_delete/alloydb_cluster.go.tmpl' # Skipping the sweeper because we need to force-delete clusters. exclude_sweeper: true +include_in_tgc_next_DO_NOT_USE: true examples: - name: 'alloydb_cluster_basic' primary_resource_id: 'default' @@ -302,17 +303,20 @@ properties: Initial user to setup during cluster creation. ignore_read: true custom_flatten: 'templates/terraform/custom_flatten/alloydb_cluster_input_user_flatten.go.tmpl' + custom_tgc_flatten: 'templates/tgc_next/custom_flatten/alloydb_cluster_input_user_flatten.go.tmpl' properties: - name: 'user' type: String description: | The database username. + is_missing_in_cai: true - name: 'password' type: String description: | The initial password for the user. required: true sensitive: true + is_missing_in_cai: true - name: 'restoreBackupSource' type: NestedObject description: | @@ -447,14 +451,17 @@ properties: Hours of day in 24 hour format. Should be from 0 to 23. An API may choose to allow the value "24:00:00" for scenarios like business closing time. - name: 'minutes' type: Integer + is_missing_in_cai: true description: | Minutes of hour of day. Currently, only the value 0 is supported. - name: 'seconds' type: Integer + is_missing_in_cai: true description: | Seconds of minutes of the time. Currently, only the value 0 is supported. - name: 'nanos' type: Integer + is_missing_in_cai: true description: | Fractions of seconds in nanoseconds. Currently, only the value 0 is supported. - name: 'timeBasedRetention' diff --git a/mmv1/templates/tgc_next/custom_flatten/alloydb_cluster_input_user_flatten.go.tmpl b/mmv1/templates/tgc_next/custom_flatten/alloydb_cluster_input_user_flatten.go.tmpl new file mode 100644 index 000000000000..fd462707e678 --- /dev/null +++ b/mmv1/templates/tgc_next/custom_flatten/alloydb_cluster_input_user_flatten.go.tmpl @@ -0,0 +1,24 @@ +{{/* + The license inside this block applies to this file + Copyright 2025 Google Inc. + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ -}} +func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil || len(v.([]interface{})) == 0 { + return nil + } + + return []interface{}{ + map[string]interface{}{ + "user": d.Get("initial_user.0.user"), + "password": d.Get("initial_user.0.password"), + }, + } +} From 749ae8b7fa58387a89d03b31316176624596e20e Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Fri, 8 Aug 2025 09:29:04 -0700 Subject: [PATCH 713/884] Update membership_data.go (#14787) --- .ci/magician/github/membership_data.go | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.ci/magician/github/membership_data.go b/.ci/magician/github/membership_data.go index efddefdb5b98..390d48b868f4 100644 --- a/.ci/magician/github/membership_data.go +++ b/.ci/magician/github/membership_data.go @@ -135,7 +135,12 @@ var ( }, }, "trodge": { - vacations: []Vacation{}, + vacations: []Vacation{ + { + startDate: newDate(2025, 8, 7), + endDate: newDate(2025, 8, 10), + }, + }, }, "zli82016": { vacations: []Vacation{ From edfafe7fb66324fdeb05ff5a347bdd220f1607b2 Mon Sep 17 00:00:00 2001 From: Lakshman Swaminathan Date: Fri, 8 Aug 2025 16:16:50 -0400 Subject: [PATCH 714/884] Lakshman single datasource generation (#14598) --- mmv1/api/resource.go | 41 +++++++++ mmv1/products/iap/Client.yaml | 1 + mmv1/provider/template_data.go | 8 ++ mmv1/provider/terraform.go | 15 +++ mmv1/templates/terraform/datasource.go.tmpl | 92 +++++++++++++++++++ .../provider/provider_mmv1_resources.go.tmpl | 2 +- .../services/iap/data_source_iap_client.go | 39 -------- 7 files changed, 158 insertions(+), 40 deletions(-) create mode 100644 mmv1/templates/terraform/datasource.go.tmpl delete mode 100644 mmv1/third_party/terraform/services/iap/data_source_iap_client.go diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index f4d8f34bd3b9..478dd484d696 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -226,6 +226,9 @@ type Resource struct { // If true, resource is not importable ExcludeImport bool `yaml:"exclude_import,omitempty"` + // If true, resource should be autogenerated as a data source + GenerateDatasource bool `yaml:"generate_datasource,omitempty"` + // If true, skip sweeper generation for this resource ExcludeSweeper bool `yaml:"exclude_sweeper,omitempty"` @@ -1993,6 +1996,44 @@ func urlContainsOnlyAllowedKeys(templateURL string, allowedKeys []string) bool { return true } +func (r Resource) ShouldGenerateSingularDataSource() bool { + return r.GenerateDatasource +} + +// DatasourceOptionalFields returns a list of fields from the resource's URI +// that should be marked as "Required". +func (r Resource) DatasourceRequiredFields() []string { + requiredFields := []string{} + uriParts := strings.Split(r.SelfLink, "/") + + for _, part := range uriParts { + if strings.HasPrefix(part, "{{") && strings.HasSuffix(part, "}}") { + field := strings.TrimSuffix(strings.TrimPrefix(part, "{{"), "}}") + if field != "region" && field != "project" && field != "zone" { + requiredFields = append(requiredFields, field) + } + } + } + return requiredFields +} + +// DatasourceOptionalFields returns a list of fields from the resource's URI +// that should be marked as "Optional". +func (r Resource) DatasourceOptionalFields() []string { + optionalFields := []string{} + uriParts := strings.Split(r.SelfLink, "/") + + for _, part := range uriParts { + if strings.HasPrefix(part, "{{") && strings.HasSuffix(part, "}}") { + field := strings.TrimSuffix(strings.TrimPrefix(part, "{{"), "}}") + if field == "region" || field == "project" || field == "zone" { + optionalFields = append(optionalFields, field) + } + } + } + return optionalFields +} + func (r Resource) ShouldGenerateSweepers() bool { if !r.ExcludeSweeper && !utils.IsEmpty(r.Sweeper) { return true diff --git a/mmv1/products/iap/Client.yaml b/mmv1/products/iap/Client.yaml index 41ae2acea2dd..b3604a627a00 100644 --- a/mmv1/products/iap/Client.yaml +++ b/mmv1/products/iap/Client.yaml @@ -33,6 +33,7 @@ self_link: '{{brand}}/identityAwareProxyClients/{{client_id}}' immutable: true import_format: - '{{brand}}/identityAwareProxyClients/{{client_id}}' +generate_datasource: true timeouts: insert_minutes: 20 update_minutes: 20 diff --git a/mmv1/provider/template_data.go b/mmv1/provider/template_data.go index f70a50486a81..645da0d1463f 100644 --- a/mmv1/provider/template_data.go +++ b/mmv1/provider/template_data.go @@ -93,6 +93,14 @@ func (td *TemplateData) GenerateMetadataFile(filePath string, resource api.Resou td.GenerateFile(filePath, templatePath, resource, false, templates...) } +func (td *TemplateData) GenerateDataSourceFile(filePath string, resource api.Resource) { + templatePath := "templates/terraform/datasource.go.tmpl" + templates := []string{ + templatePath, + } + td.GenerateFile(filePath, templatePath, resource, true, templates...) +} + func (td *TemplateData) GenerateProductFile(filePath string, product api.Product) { templatePath := "templates/terraform/product.go.tmpl" templates := []string{ diff --git a/mmv1/provider/terraform.go b/mmv1/provider/terraform.go index 1c0779ec1f57..17abcb4d4def 100644 --- a/mmv1/provider/terraform.go +++ b/mmv1/provider/terraform.go @@ -107,6 +107,7 @@ func (t *Terraform) GenerateObject(object api.Resource, outputFolder, productPat // log.Printf("Generating %s tests", object.Name) t.GenerateResourceTests(object, *templateData, outputFolder) t.GenerateResourceSweeper(object, *templateData, outputFolder) + t.GenerateSingularDataSource(object, *templateData, outputFolder) // log.Printf("Generating %s metadata", object.Name) t.GenerateResourceMetadata(object, *templateData, outputFolder) } @@ -188,6 +189,20 @@ func (t *Terraform) GenerateResourceSweeper(object api.Resource, templateData Te templateData.GenerateSweeperFile(targetFilePath, object) } +func (t *Terraform) GenerateSingularDataSource(object api.Resource, templateData TemplateData, outputFolder string) { + if !object.ShouldGenerateSingularDataSource() { + return + } + + productName := t.Product.ApiName + targetFolder := path.Join(outputFolder, t.FolderName(), "services", productName) + if err := os.MkdirAll(targetFolder, os.ModePerm); err != nil { + log.Println(fmt.Errorf("error creating parent directory %v: %v", targetFolder, err)) + } + targetFilePath := path.Join(targetFolder, fmt.Sprintf("data_source_%s.go", t.ResourceGoFilename(object))) + templateData.GenerateDataSourceFile(targetFilePath, object) +} + // GenerateProduct creates the product.go file for a given service directory. // This will be used to seed the directory and add a package-level comment // specific to the product. diff --git a/mmv1/templates/terraform/datasource.go.tmpl b/mmv1/templates/terraform/datasource.go.tmpl new file mode 100644 index 000000000000..89bdf90c72d3 --- /dev/null +++ b/mmv1/templates/terraform/datasource.go.tmpl @@ -0,0 +1,92 @@ +{{/* The license inside this block applies to this file + Copyright 2024 Google LLC. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. */ -}} +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +{{$.CodeHeader TemplatePath}} +package {{ lower $.ProductMetadata.Name }} + +import ( + + "fmt" + "log" + "net/http" + "reflect" +{{- if $.SupportsIndirectUserProjectOverride }} + "regexp" +{{- end }} +{{- if or (and (not $.Immutable) ($.UpdateMask)) $.LegacyLongFormProject }} + "strings" +{{- end }} + "time" + +{{/* # We list all the v2 imports here, because we run 'goimports' to guess the correct */}} +{{/* # set of imports, which will never guess the major version correctly. */}} + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/structure" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/logging" + "github.com/hashicorp/go-cty/cty" + + "{{ $.ImportPath }}/tpgresource" + transport_tpg "{{ $.ImportPath }}/transport" + "{{ $.ImportPath }}/verify" + +{{ if $.FlattenedProperties }} + "google.golang.org/api/googleapi" +{{- end}} +) + +func DataSource{{ .ResourceName -}}() *schema.Resource { + rs := Resource{{ .ResourceName -}}().Schema + + dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(rs) + + // Set 'Required' schema elements + tpgresource.AddRequiredFieldsToSchema(dsSchema, {{range $index, $field := .DatasourceRequiredFields}}{{if gt $index 0}}, {{end}}{{printf "%q" $field}}{{end}}) + + // Set 'Optional' schema elements + tpgresource.AddOptionalFieldsToSchema(dsSchema, {{range $index, $field := .DatasourceOptionalFields}}{{if gt $index 0}}, {{end}}{{printf "%q" $field}}{{end}}) + + return &schema.Resource{ + Read: dataSource{{ $.ResourceName -}}Read, + Schema: dsSchema, + } +} + +func dataSource{{ $.ResourceName -}}Read(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + + id, err := tpgresource.ReplaceVars{{if $.LegacyLongFormProject -}}ForId{{ end -}}(d, config, "{{$.SelfLinkUri}}{{$.ReadQueryParams}}") + if err != nil { + return err + } + d.SetId(id) + + err = resource{{ $.ResourceName -}}Read(d, meta) + if err != nil { + return err + } + + if d.Id() == "" { + return fmt.Errorf("%s not found", id) + } + + return nil +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index 08de3a35013f..9d58d8d16024 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -151,7 +151,7 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_iam_testable_permissions": resourcemanager.DataSourceGoogleIamTestablePermissions(), "google_iam_workload_identity_pool": iambeta.DataSourceIAMBetaWorkloadIdentityPool(), "google_iam_workload_identity_pool_provider": iambeta.DataSourceIAMBetaWorkloadIdentityPoolProvider(), - "google_iap_client": iap.DataSourceGoogleIapClient(), + "google_iap_client": iap.DataSourceIapClient(), "google_kms_crypto_key": kms.DataSourceGoogleKmsCryptoKey(), "google_kms_crypto_keys": kms.DataSourceGoogleKmsCryptoKeys(), "google_kms_crypto_key_version": kms.DataSourceGoogleKmsCryptoKeyVersion(), diff --git a/mmv1/third_party/terraform/services/iap/data_source_iap_client.go b/mmv1/third_party/terraform/services/iap/data_source_iap_client.go deleted file mode 100644 index 736e18da0fe3..000000000000 --- a/mmv1/third_party/terraform/services/iap/data_source_iap_client.go +++ /dev/null @@ -1,39 +0,0 @@ -package iap - -import ( - "fmt" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -func DataSourceGoogleIapClient() *schema.Resource { - - dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceIapClient().Schema) - tpgresource.AddRequiredFieldsToSchema(dsSchema, "brand", "client_id") - - return &schema.Resource{ - Read: dataSourceGoogleIapClientRead, - Schema: dsSchema, - } -} - -func dataSourceGoogleIapClientRead(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - - id, err := tpgresource.ReplaceVars(d, config, "{{brand}}/identityAwareProxyClients/{{client_id}}") - if err != nil { - return fmt.Errorf("Error constructing id: %s", err) - } - d.SetId(id) - err = resourceIapClientRead(d, meta) - if err != nil { - return err - } - - if d.Id() == "" { - return fmt.Errorf("%s not found", id) - } - return nil -} From 1bad78721ba8adac14525c60cc1300b9b2bafef5 Mon Sep 17 00:00:00 2001 From: Sam Levenick Date: Fri, 8 Aug 2025 17:10:09 -0400 Subject: [PATCH 715/884] Add tags to contactcenterinsights resources (#14792) --- mmv1/products/contactcenterinsights/AnalysisRule.yaml | 1 + mmv1/products/contactcenterinsights/View.yaml | 1 + 2 files changed, 2 insertions(+) diff --git a/mmv1/products/contactcenterinsights/AnalysisRule.yaml b/mmv1/products/contactcenterinsights/AnalysisRule.yaml index 8b378d9add16..cd540ad80c8c 100644 --- a/mmv1/products/contactcenterinsights/AnalysisRule.yaml +++ b/mmv1/products/contactcenterinsights/AnalysisRule.yaml @@ -32,6 +32,7 @@ update_mask: true id_format: projects/{{project}}/locations/{{location}}/analysisRules/{{name}} import_format: - projects/{{project}}/locations/{{location}}/analysisRules/{{name}} +autogen_status: QW5hbHlzaXNSdWxl examples: - name: 'contact_center_insights_analysis_rule_basic' primary_resource_id: 'analysis_rule_basic' diff --git a/mmv1/products/contactcenterinsights/View.yaml b/mmv1/products/contactcenterinsights/View.yaml index f89f3e8abd56..c5e9fc90aa4a 100644 --- a/mmv1/products/contactcenterinsights/View.yaml +++ b/mmv1/products/contactcenterinsights/View.yaml @@ -26,6 +26,7 @@ update_mask: true id_format: projects/{{project}}/locations/{{location}}/views/{{name}} import_format: - projects/{{project}}/locations/{{location}}/views/{{name}} +autogen_status: Vmlldw== examples: - name: 'contact_center_insights_view_basic' primary_resource_id: 'basic_view' From e610fd0a978fa452634dfdb74668eecb86b9bd04 Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Fri, 8 Aug 2025 23:57:51 +0200 Subject: [PATCH 716/884] provider: eliminated the need to manually add `*_wo` and `*_wo_version` for write-only properties (#14230) Co-authored-by: Stephen Lewis (Burrows) --- docs/content/reference/field.md | 9 +- mmv1/api/resource.go | 86 ++++- mmv1/api/resource_test.go | 330 ++++++++++++++++++ mmv1/api/type.go | 49 ++- mmv1/api/type_test.go | 2 +- mmv1/main.go | 4 +- .../products/bigquerydatatransfer/Config.yaml | 22 +- .../monitoring/UptimeCheckConfig.yaml | 21 +- .../products/secretmanager/SecretVersion.yaml | 16 - .../terraform/flatten_property_method.go.tmpl | 2 +- 10 files changed, 474 insertions(+), 67 deletions(-) diff --git a/docs/content/reference/field.md b/docs/content/reference/field.md index 1f24ec8abc06..c07098e02f0c 100644 --- a/docs/content/reference/field.md +++ b/docs/content/reference/field.md @@ -108,10 +108,11 @@ sensitive: true ``` ### `write_only` -If true, the field is considered "write-only", which means that its value will -be obscured in Terraform output as well as not be stored in state. This field is meant to replace `sensitive` as it doesn't store the value in state. -See [Ephemerality in Resources - Use Write-only arguments](https://developer.hashicorp.com/terraform/language/resources/ephemeral/write-only) -for more information. +Set to true to enable write-only functionality for this field. +If true, the write-only fields will be automatically generated by the code generator (`_wo` and `_wo_version`). +When the write-only variant of a field is used, it means that its value will be obscured in Terraform output as well as not be stored in state. +This field is meant to replace `sensitive` as it doesn't store the value in state. +See [Ephemerality in Resources - Use Write-only arguments](https://developer.hashicorp.com/terraform/language/resources/ephemeral/write-only) for more information. Write-only fields are only supported in Terraform v1.11+. Because the provider supports earlier Terraform versions, write only fields must be paired with (mutually exclusive) `sensitive` fields covering the same functionality for compatibility with those older versions. This field cannot be used in conjuction with `immutable` or `sensitive`. diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index 478dd484d696..db45e65aeda5 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -737,14 +737,94 @@ func (r Resource) GetIdentity() []*Type { }) } -func (r *Resource) AddLabelsRelatedFields(props []*Type, parent *Type) []*Type { +func buildWriteOnlyField(name string, versionFieldName string, originalField *Type, originalFieldLineage string) *Type { + description := fmt.Sprintf("%s Note: This property is write-only and will not be read from the API. For more info see [updating write-only attributes](/docs/providers/google/guides/using_write_only_attributes.html#updating-write-only-attributes)", originalField.Description) + fieldPathOriginalField := originalFieldLineage + fieldPathCurrentField := strings.ReplaceAll(originalFieldLineage, google.Underscore(originalField.Name), google.Underscore(name)) + requiredWith := strings.ReplaceAll(originalFieldLineage, google.Underscore(originalField.Name), google.Underscore(versionFieldName)) + + apiName := originalField.ApiName + if apiName == "" { + apiName = originalField.Name + } + + options := []func(*Type){ + propertyWithType("String"), + propertyWithRequired(false), + propertyWithDescription(description), + propertyWithWriteOnly(true), + propertyWithApiName(apiName), + propertyWithIgnoreRead(true), + propertyWithRequiredWith([]string{requiredWith}), + } + + if originalField.Required { + exactlyOneOf := append(originalField.ExactlyOneOf, fieldPathOriginalField, fieldPathCurrentField) + options = append(options, propertyWithExactlyOneOf(exactlyOneOf)) + } else { + conflicts := append(originalField.Conflicts, fieldPathOriginalField) + options = append(options, propertyWithConflicts(conflicts)) + } + + if len(originalField.AtLeastOneOf) > 0 { + atLeastOneOf := append(originalField.AtLeastOneOf, fieldPathCurrentField) + options = append(options, propertyWithAtLeastOneOf(atLeastOneOf)) + } + + return NewProperty(name, originalField.ApiName, options) +} + +func buildWriteOnlyVersionField(name string, originalField *Type, writeOnlyField *Type, originalFieldLineage string) *Type { + description := fmt.Sprintf("Triggers update of %s write-only. For more info see [updating write-only attributes](/docs/providers/google/guides/using_write_only_attributes.html#updating-write-only-attributes)", google.Underscore(writeOnlyField.Name)) + requiredWith := strings.ReplaceAll(originalFieldLineage, google.Underscore(originalField.Name), google.Underscore(writeOnlyField.Name)) + + options := []func(*Type){ + propertyWithType("String"), + propertyWithImmutable(originalField.Immutable), + propertyWithDescription(description), + propertyWithRequiredWith([]string{requiredWith}), + propertyWithClientSide(true), + } + + return NewProperty(name, name, options) +} + +func (r *Resource) addWriteOnlyFields(props []*Type, propWithWoConfigured *Type, propWithWoConfiguredLineagePath string) []*Type { + if len(propWithWoConfigured.RequiredWith) > 0 { + log.Fatalf("WriteOnly property '%s' in resource '%s' cannot have RequiredWith set. This combination is not supported.", propWithWoConfigured.Name, r.Name) + } + woFieldName := fmt.Sprintf("%sWo", propWithWoConfigured.Name) + woVersionFieldName := fmt.Sprintf("%sVersion", woFieldName) + writeOnlyField := buildWriteOnlyField(woFieldName, woVersionFieldName, propWithWoConfigured, propWithWoConfiguredLineagePath) + writeOnlyVersionField := buildWriteOnlyVersionField(woVersionFieldName, propWithWoConfigured, writeOnlyField, propWithWoConfiguredLineagePath) + props = append(props, writeOnlyField, writeOnlyVersionField) + return props +} + +func (r *Resource) buildCurrentPropLineage(p *Type, lineage string) string { + underscoreName := google.Underscore(p.Name) + if lineage == "" { + return underscoreName + } + return fmt.Sprintf("%s.0.%s", lineage, underscoreName) +} + +// AddExtraFields processes properties and adds supplementary fields based on property types. +// It handles write-only properties, labels, and annotations. +func (r *Resource) AddExtraFields(props []*Type, parent *Type, lineage string) []*Type { for _, p := range props { + currentPropLineage := r.buildCurrentPropLineage(p, lineage) + if p.WriteOnly && !strings.HasSuffix(p.Name, "Wo") { + props = r.addWriteOnlyFields(props, p, currentPropLineage) + p.WriteOnly = false + p.Required = false + } if p.IsA("KeyValueLabels") { props = r.addLabelsFields(props, parent, p) } else if p.IsA("KeyValueAnnotations") { props = r.addAnnotationsFields(props, parent, p) } else if p.IsA("NestedObject") && len(p.AllProperties()) > 0 { - p.Properties = r.AddLabelsRelatedFields(p.AllProperties(), p) + p.Properties = r.AddExtraFields(p.AllProperties(), p, currentPropLineage) } } return props @@ -763,6 +843,7 @@ func (r *Resource) addLabelsFields(props []*Type, parent *Type, labels *Type) [] terraformLabelsField := buildTerraformLabelsField("labels", parent, labels) effectiveLabelsField := buildEffectiveLabelsField("labels", labels) + props = append(props, terraformLabelsField, effectiveLabelsField) // The effective_labels field is used to write to API, instead of the labels field. @@ -799,6 +880,7 @@ func (r *Resource) addAnnotationsFields(props []*Type, parent *Type, annotations } effectiveAnnotationsField := buildEffectiveLabelsField("annotations", annotations) + props = append(props, effectiveAnnotationsField) return props } diff --git a/mmv1/api/resource_test.go b/mmv1/api/resource_test.go index ad7dd327b288..46a34b8b8036 100644 --- a/mmv1/api/resource_test.go +++ b/mmv1/api/resource_test.go @@ -4,6 +4,7 @@ import ( "os" "path/filepath" "reflect" + "slices" "strings" "testing" @@ -503,3 +504,332 @@ func TestHasPostCreateComputedFields(t *testing.T) { }) } } + +func TestResourceAddExtraFields(t *testing.T) { + t.Parallel() + + createTestResource := func(name string) *Resource { + return &Resource{ + Name: name, + ProductMetadata: &Product{ + Name: "testproduct", + }, + } + } + + createTestType := func(name, typeStr string, options ...func(*Type)) *Type { + t := &Type{ + Name: name, + Type: typeStr, + } + for _, option := range options { + option(t) + } + return t + } + + withWriteOnly := func(writeOnly bool) func(*Type) { + return func(t *Type) { t.WriteOnly = writeOnly } + } + withRequired := func(required bool) func(*Type) { + return func(t *Type) { t.Required = required } + } + withDescription := func(desc string) func(*Type) { + return func(t *Type) { t.Description = desc } + } + withProperties := func(props []*Type) func(*Type) { + return func(t *Type) { t.Properties = props } + } + + t.Run("WriteOnly property adds companion fields", func(t *testing.T) { + t.Parallel() + + resource := createTestResource("testresource") + writeOnlyProp := createTestType("password", "String", + withWriteOnly(true), + withRequired(true), + withDescription("A password field"), + ) + + props := []*Type{writeOnlyProp} + result := resource.AddExtraFields(props, nil, "") + + if len(result) != 3 { + t.Errorf("Expected 3 properties after adding WriteOnly fields, got %d", len(result)) + } + + if writeOnlyProp.WriteOnly { + t.Error("Original WriteOnly property should have WriteOnly set to false after processing") + } + if writeOnlyProp.Required { + t.Error("Original WriteOnly property should have Required set to false after processing") + } + + var foundWoField, foundVersionField bool + for _, prop := range result { + if prop.Name == "passwordWo" { + foundWoField = true + if !prop.WriteOnly { + t.Error("passwordWo field should have WriteOnly=true") + } + } + if prop.Name == "passwordWoVersion" { + foundVersionField = true + if !prop.ClientSide { + t.Error("passwordWoVersion field should have ClientSide=true") + } + } + } + + if !foundWoField { + t.Error("Expected to find passwordWo field") + } + if !foundVersionField { + t.Error("Expected to find passwordWoVersion field") + } + }) + + t.Run("KeyValueLabels property adds terraform and effective labels", func(t *testing.T) { + t.Parallel() + + resource := createTestResource("testresource") + labelsType := &Type{ + Name: "labels", + Type: "KeyValueLabels", + Description: "Resource labels", + } + + props := []*Type{labelsType} + result := resource.AddExtraFields(props, nil, "") + + if len(result) != 3 { + t.Errorf("Expected 3 properties after adding labels fields, got %d", len(result)) + } + + if !labelsType.IgnoreWrite { + t.Error("Original labels field should have IgnoreWrite=true after processing") + } + if !strings.Contains(labelsType.Description, "**Note**") { + t.Error("Original labels field description should contain note after processing") + } + + var foundTerraformLabels, foundEffectiveLabels bool + for _, prop := range result { + if prop.Name == "terraformLabels" { + foundTerraformLabels = true + if prop.Type != "KeyValueTerraformLabels" { + t.Errorf("terraformLabels should have type KeyValueTerraformLabels, got %s", prop.Type) + } + } + if prop.Name == "effectiveLabels" { + foundEffectiveLabels = true + if prop.Type != "KeyValueEffectiveLabels" { + t.Errorf("effectiveLabels should have type KeyValueEffectiveLabels, got %s", prop.Type) + } + } + } + + if !foundTerraformLabels { + t.Error("Expected to find terraformLabels field") + } + if !foundEffectiveLabels { + t.Error("Expected to find effectiveLabels field") + } + + expectedDiff := "tpgresource.SetLabelsDiff" + if !slices.Contains(resource.CustomDiff, expectedDiff) { + t.Errorf("Expected CustomDiff to contain %s", expectedDiff) + } + }) + + t.Run("KeyValueLabels with ExcludeAttributionLabel adds different CustomDiff", func(t *testing.T) { + t.Parallel() + + resource := createTestResource("testresource") + resource.ExcludeAttributionLabel = true + + labelsType := &Type{ + Name: "labels", + Type: "KeyValueLabels", + } + + props := []*Type{labelsType} + resource.AddExtraFields(props, nil, "") + + expectedDiff := "tpgresource.SetLabelsDiffWithoutAttributionLabel" + if !slices.Contains(resource.CustomDiff, expectedDiff) { + t.Errorf("Expected CustomDiff to contain %s", expectedDiff) + } + }) + + t.Run("KeyValueLabels with metadata parent adds metadata CustomDiff", func(t *testing.T) { + t.Parallel() + + resource := createTestResource("testresource") + parent := &Type{Name: "metadata"} + + labelsType := &Type{ + Name: "labels", + Type: "KeyValueLabels", + } + + props := []*Type{labelsType} + resource.AddExtraFields(props, parent, "") + + expectedDiff := "tpgresource.SetMetadataLabelsDiff" + if !slices.Contains(resource.CustomDiff, expectedDiff) { + t.Errorf("Expected CustomDiff to contain %s", expectedDiff) + } + }) + + t.Run("KeyValueAnnotations property adds effective annotations", func(t *testing.T) { + t.Parallel() + + resource := createTestResource("testresource") + annotationsType := &Type{ + Name: "annotations", + Type: "KeyValueAnnotations", + Description: "Resource annotations", + } + + props := []*Type{annotationsType} + result := resource.AddExtraFields(props, nil, "") + + if len(result) != 2 { + t.Errorf("Expected 2 properties after adding annotations fields, got %d", len(result)) + } + + if !annotationsType.IgnoreWrite { + t.Error("Original annotations field should have IgnoreWrite=true after processing") + } + + var foundEffectiveAnnotations bool + for _, prop := range result { + if prop.Name == "effectiveAnnotations" { + foundEffectiveAnnotations = true + if prop.Type != "KeyValueEffectiveLabels" { + t.Errorf("effectiveAnnotations should have type KeyValueEffectiveLabels, got %s", prop.Type) + } + } + } + + if !foundEffectiveAnnotations { + t.Error("Expected to find effectiveAnnotations field") + } + + expectedDiff := "tpgresource.SetAnnotationsDiff" + if !slices.Contains(resource.CustomDiff, expectedDiff) { + t.Errorf("Expected CustomDiff to contain %s", expectedDiff) + } + }) + + t.Run("NestedObject with properties processes recursively", func(t *testing.T) { + t.Parallel() + + resource := createTestResource("testresource") + + nestedWriteOnly := createTestType("nestedPassword", "String", withWriteOnly(true)) + nestedObject := createTestType("config", "NestedObject", withProperties([]*Type{nestedWriteOnly})) + + props := []*Type{nestedObject} + result := resource.AddExtraFields(props, nil, "") + + if len(result) != 1 { + t.Errorf("Expected 1 top-level property, got %d", len(result)) + } + + if len(nestedObject.Properties) != 3 { + t.Errorf("Expected 3 nested properties after recursive processing, got %d", len(nestedObject.Properties)) + } + + if nestedWriteOnly.WriteOnly { + t.Error("Nested WriteOnly property should have WriteOnly=false after processing") + } + }) + + t.Run("Empty NestedObject properties are not processed", func(t *testing.T) { + t.Parallel() + + resource := createTestResource("testresource") + emptyNestedObject := createTestType("config", "NestedObject", withProperties([]*Type{})) + + props := []*Type{emptyNestedObject} + result := resource.AddExtraFields(props, nil, "") + + if len(result) != 1 { + t.Errorf("Expected 1 property, got %d", len(result)) + } + if len(emptyNestedObject.Properties) != 0 { + t.Errorf("Expected 0 nested properties, got %d", len(emptyNestedObject.Properties)) + } + }) + + t.Run("WriteOnly property already ending with Wo is skipped", func(t *testing.T) { + t.Parallel() + + resource := createTestResource("testresource") + woProperty := createTestType("passwordWo", "String", withWriteOnly(true)) + + props := []*Type{woProperty} + result := resource.AddExtraFields(props, nil, "") + + if len(result) != 1 { + t.Errorf("Expected 1 property for Wo-suffixed field, got %d", len(result)) + } + + if !woProperty.WriteOnly { + t.Error("Wo-suffixed property should remain WriteOnly=true") + } + }) + + t.Run("Regular properties are passed through unchanged", func(t *testing.T) { + t.Parallel() + + resource := createTestResource("testresource") + regularProp := createTestType("name", "String", withRequired(true)) + + props := []*Type{regularProp} + result := resource.AddExtraFields(props, nil, "") + + if len(result) != 1 { + t.Errorf("Expected 1 property for regular field, got %d", len(result)) + } + + if result[0] != regularProp { + t.Error("Regular property should be passed through unchanged") + } + if !regularProp.Required { + t.Error("Regular property Required should be unchanged") + } + }) + + t.Run("Multiple property types processed correctly", func(t *testing.T) { + t.Parallel() + + resource := createTestResource("testresource") + + regularProp := createTestType("name", "String") + writeOnlyProp := createTestType("password", "String", withWriteOnly(true)) + labelsType := &Type{Name: "labels", Type: "KeyValueLabels"} + + props := []*Type{regularProp, writeOnlyProp, labelsType} + result := resource.AddExtraFields(props, nil, "") + + // Should have: name + password + passwordWo + passwordWoVersion + labels + terraformLabels + effectiveLabels = 7 + if len(result) != 7 { + t.Errorf("Expected 7 properties total, got %d", len(result)) + } + + names := make(map[string]bool) + for _, prop := range result { + names[prop.Name] = true + } + + expectedNames := []string{"name", "password", "passwordWo", "passwordWoVersion", "labels", "terraformLabels", "effectiveLabels"} + for _, expected := range expectedNames { + if !names[expected] { + t.Errorf("Expected to find property named %s", expected) + } + } + }) +} diff --git a/mmv1/api/type.go b/mmv1/api/type.go index 690687db966d..078775772585 100644 --- a/mmv1/api/type.go +++ b/mmv1/api/type.go @@ -624,7 +624,6 @@ func (t Type) ExactlyOneOfList() []string { if t.ResourceMetadata == nil { return []string{} } - return t.ExactlyOneOf } @@ -1014,6 +1013,54 @@ func propertyWithIgnoreWrite(ignoreWrite bool) func(*Type) { } } +func propertyWithRequired(required bool) func(*Type) { + return func(p *Type) { + p.Required = required + } +} + +func propertyWithWriteOnly(writeOnly bool) func(*Type) { + return func(p *Type) { + p.WriteOnly = writeOnly + } +} + +func propertyWithIgnoreRead(ignoreRead bool) func(*Type) { + return func(p *Type) { + p.IgnoreRead = ignoreRead + } +} + +func propertyWithConflicts(conflicts []string) func(*Type) { + return func(p *Type) { + p.Conflicts = conflicts + } +} + +func propertyWithRequiredWith(requiredWith []string) func(*Type) { + return func(p *Type) { + p.RequiredWith = requiredWith + } +} + +func propertyWithExactlyOneOf(exactlyOneOf []string) func(*Type) { + return func(p *Type) { + p.ExactlyOneOf = exactlyOneOf + } +} + +func propertyWithAtLeastOneOf(atLeastOneOf []string) func(*Type) { + return func(p *Type) { + p.AtLeastOneOf = atLeastOneOf + } +} + +func propertyWithApiName(apiName string) func(*Type) { + return func(p *Type) { + p.ApiName = apiName + } +} + func (t *Type) validateLabelsField() { productName := t.ResourceMetadata.ProductMetadata.Name resourceName := t.ResourceMetadata.Name diff --git a/mmv1/api/type_test.go b/mmv1/api/type_test.go index 3d46d120a31f..f6a738d248f2 100644 --- a/mmv1/api/type_test.go +++ b/mmv1/api/type_test.go @@ -361,7 +361,7 @@ func TestProviderOnly(t *testing.T) { }, }, } - labeled.Properties = labeled.AddLabelsRelatedFields(labeled.PropertiesWithExcluded(), nil) + labeled.Properties = labeled.AddExtraFields(labeled.PropertiesWithExcluded(), nil, "") labeled.SetDefault(nil) cases := []struct { diff --git a/mmv1/main.go b/mmv1/main.go index ef62e7ba5f88..9e7c0fc88612 100644 --- a/mmv1/main.go +++ b/mmv1/main.go @@ -235,7 +235,7 @@ func GenerateProduct(version, providerName, productName, outputPath string, prod resource.SourceYamlFile = resourceYamlPath resource.TargetVersionName = version - resource.Properties = resource.AddLabelsRelatedFields(resource.PropertiesWithExcluded(), nil) + resource.Properties = resource.AddExtraFields(resource.PropertiesWithExcluded(), nil, "") resource.SetDefault(productApi) resource.Validate() resources = append(resources, resource) @@ -268,7 +268,7 @@ func GenerateProduct(version, providerName, productName, outputPath string, prod } resource.TargetVersionName = version - resource.Properties = resource.AddLabelsRelatedFields(resource.PropertiesWithExcluded(), nil) + resource.Properties = resource.AddExtraFields(resource.PropertiesWithExcluded(), nil, "") resource.SetDefault(productApi) resource.Validate() resources = append(resources, resource) diff --git a/mmv1/products/bigquerydatatransfer/Config.yaml b/mmv1/products/bigquerydatatransfer/Config.yaml index df88222fc7b7..b9bb9cf4b137 100644 --- a/mmv1/products/bigquerydatatransfer/Config.yaml +++ b/mmv1/products/bigquerydatatransfer/Config.yaml @@ -222,30 +222,10 @@ properties: to a different credential configuration in the config will require an apply to update state. url_param_only: true properties: - - name: 'secretAccessKeyWoVersion' - type: Integer - url_param_only: true - required_with: - - 'sensitive_params.0.secretAccessKeyWo' - description: | - The version of the sensitive params - used to trigger updates of the write-only params. For more info see [updating write-only attributes](/docs/providers/google/guides/using_write_only_attributes.html#updating-write-only-attributes) - name: 'secretAccessKey' type: String description: | The Secret Access Key of the AWS account transferring data from. sensitive: true - at_least_one_of: - - 'sensitive_params.0.secretAccessKey' - - 'sensitive_params.0.secretAccessKeyWo' - conflicts: - - 'sensitive_params.0.secretAccessKeyWo' - - name: 'secretAccessKeyWo' # Wo is convention for write-only properties - type: String - description: | - The Secret Access Key of the AWS account transferring data from. write_only: true - at_least_one_of: - - 'sensitive_params.0.secretAccessKeyWo' - - 'sensitive_params.0.secretAccessKey' - conflicts: - - 'sensitive_params.0.secretAccessKey' + required: true diff --git a/mmv1/products/monitoring/UptimeCheckConfig.yaml b/mmv1/products/monitoring/UptimeCheckConfig.yaml index 67f7fdd0e791..9affd60788a1 100644 --- a/mmv1/products/monitoring/UptimeCheckConfig.yaml +++ b/mmv1/products/monitoring/UptimeCheckConfig.yaml @@ -246,27 +246,10 @@ properties: - name: 'password' type: String description: The password to authenticate. - exactly_one_of: - - 'password' - - 'password_wo' + required: true + write_only: true sensitive: true custom_flatten: 'templates/terraform/custom_flatten/uptime_check_http_password.tmpl' - - name: 'passwordWo' - type: String - description: The password to authenticate. - exactly_one_of: - - 'passwordWo' - - 'password' - required_with: - - 'http_check.0.auth_info.0.password_wo_version' - write_only: true - - name: 'passwordWoVersion' - type: String - immutable: true - ignore_read: true - description: The password write-only version. - required_with: - - 'http_check.0.auth_info.0.password_wo' - name: 'username' type: String description: The username to authenticate. diff --git a/mmv1/products/secretmanager/SecretVersion.yaml b/mmv1/products/secretmanager/SecretVersion.yaml index d3e0335ee2bd..ac840f29e772 100644 --- a/mmv1/products/secretmanager/SecretVersion.yaml +++ b/mmv1/products/secretmanager/SecretVersion.yaml @@ -160,22 +160,6 @@ properties: type: String description: The secret data. Must be no larger than 64KiB. api_name: data - conflicts: - - 'secretDataWo' immutable: true sensitive: true - - name: 'secretDataWo' - type: String - description: The secret data. Must be no larger than 64KiB. For more info see [updating write-only attributes](/docs/providers/google/guides/using_write_only_attributes.html#updating-write-only-attributes) - api_name: data - required_with: - - 'SecretDataWoVersion' - conflicts: - - 'payload.0.secretData' write_only: true - - name: 'SecretDataWoVersion' - type: Integer - default_value: 0 - url_param_only: true - description: Triggers update of secret data write-only. For more info see [updating write-only attributes](/docs/providers/google/guides/using_write_only_attributes.html#updating-write-only-attributes) - immutable: true diff --git a/mmv1/templates/terraform/flatten_property_method.go.tmpl b/mmv1/templates/terraform/flatten_property_method.go.tmpl index cc31bb5b190b..fdc86f83f27f 100644 --- a/mmv1/templates/terraform/flatten_property_method.go.tmpl +++ b/mmv1/templates/terraform/flatten_property_method.go.tmpl @@ -18,7 +18,7 @@ {{- $.CustomTemplate $.CustomFlatten false -}} {{- else -}} func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - {{- if and $.IgnoreRead (not $.ResourceMetadata.IsTgcCompiler) }} + {{- if or (and $.IgnoreRead (not $.ResourceMetadata.IsTgcCompiler)) $.ClientSide }} return d.Get("{{ $.TerraformLineage }}") {{- else if $.IsA "NestedObject" }} if v == nil { From d7bdeec6c2eef5a809e4201c81c0861ccbf1fd85 Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Fri, 8 Aug 2025 15:21:18 -0700 Subject: [PATCH 717/884] Update images for notebooks instance tests (#14719) --- mmv1/products/notebooks/Instance.yaml | 7 +++++++ .../examples/notebook_instance_basic.tf.tmpl | 4 ++-- .../examples/notebook_instance_basic_gpu.tf.tmpl | 4 ++-- .../examples/notebook_instance_basic_stopped.tf.tmpl | 4 ++-- .../examples/notebook_instance_full.tf.tmpl | 6 +++--- .../resource_notebooks_instance_gpu_test.go.tmpl | 4 ++-- .../resource_notebooks_instance_state_test.go.tmpl | 8 ++++---- .../notebooks/resource_notebooks_instance_test.go | 12 ++++++------ 8 files changed, 28 insertions(+), 21 deletions(-) diff --git a/mmv1/products/notebooks/Instance.yaml b/mmv1/products/notebooks/Instance.yaml index c412d9f3c9f6..2cd080c9abfa 100644 --- a/mmv1/products/notebooks/Instance.yaml +++ b/mmv1/products/notebooks/Instance.yaml @@ -80,6 +80,7 @@ examples: instance_name: 'notebooks-instance' ignore_read_extra: - 'desired_state' + - 'update_time' skip_test: https://github.com/hashicorp/terraform-provider-google/issues/17593#issuecomment-2888583933 - name: 'notebook_instance_basic_container' primary_resource_id: 'instance' @@ -87,12 +88,16 @@ examples: region_override: 'us-west1-a' vars: instance_name: 'notebooks-instance' + ignore_read_extra: + - 'update_time' - name: 'notebook_instance_basic_gpu' primary_resource_id: 'instance' primary_resource_name: 'fmt.Sprintf("tf-test-notebooks-instance%s", context["random_suffix"])' region_override: 'us-west1-a' vars: instance_name: 'notebooks-instance' + ignore_read_extra: + - 'update_time' - name: 'notebook_instance_full' primary_resource_id: 'instance' primary_resource_name: 'fmt.Sprintf("tf-test-notebooks-instance%s", context["random_suffix"])' @@ -104,6 +109,8 @@ examples: service_account: 'SERVICE_ACCT' test_vars_overrides: 'key_name': 'acctest.BootstrapKMSKeyInLocation(t, "global").CryptoKey.Name' + ignore_read_extra: + - 'update_time' virtual_fields: - name: 'desired_state' description: | diff --git a/mmv1/templates/terraform/examples/notebook_instance_basic.tf.tmpl b/mmv1/templates/terraform/examples/notebook_instance_basic.tf.tmpl index 52ef097a32ec..c9d9073769ff 100644 --- a/mmv1/templates/terraform/examples/notebook_instance_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/notebook_instance_basic.tf.tmpl @@ -3,7 +3,7 @@ resource "google_notebooks_instance" "{{$.PrimaryResourceId}}" { location = "us-west1-a" machine_type = "e2-medium" vm_image { - project = "deeplearning-platform-release" - image_family = "pytorch-latest-cu124" + project = "cloud-notebooks-managed" + image_family = "workbench-instances" } } diff --git a/mmv1/templates/terraform/examples/notebook_instance_basic_gpu.tf.tmpl b/mmv1/templates/terraform/examples/notebook_instance_basic_gpu.tf.tmpl index e6a90631481a..9670dd3d90bd 100644 --- a/mmv1/templates/terraform/examples/notebook_instance_basic_gpu.tf.tmpl +++ b/mmv1/templates/terraform/examples/notebook_instance_basic_gpu.tf.tmpl @@ -9,7 +9,7 @@ resource "google_notebooks_instance" "{{$.PrimaryResourceId}}" { core_count = 1 } vm_image { - project = "deeplearning-platform-release" - image_family = "pytorch-latest-cu124" + project = "cloud-notebooks-managed" + image_family = "workbench-instances" } } diff --git a/mmv1/templates/terraform/examples/notebook_instance_basic_stopped.tf.tmpl b/mmv1/templates/terraform/examples/notebook_instance_basic_stopped.tf.tmpl index b8187eddcb73..9ad592402e6a 100644 --- a/mmv1/templates/terraform/examples/notebook_instance_basic_stopped.tf.tmpl +++ b/mmv1/templates/terraform/examples/notebook_instance_basic_stopped.tf.tmpl @@ -3,8 +3,8 @@ resource "google_notebooks_instance" "{{$.PrimaryResourceId}}" { location = "us-west1-a" machine_type = "e2-medium" vm_image { - project = "deeplearning-platform-release" - image_family = "pytorch-latest-cu124" + project = "cloud-notebooks-managed" + image_family = "workbench-instances" } desired_state = "STOPPED" } diff --git a/mmv1/templates/terraform/examples/notebook_instance_full.tf.tmpl b/mmv1/templates/terraform/examples/notebook_instance_full.tf.tmpl index 7c0f23128e54..55817aff86bf 100644 --- a/mmv1/templates/terraform/examples/notebook_instance_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/notebook_instance_full.tf.tmpl @@ -4,8 +4,8 @@ resource "google_notebooks_instance" "{{$.PrimaryResourceId}}" { machine_type = "e2-medium" vm_image { - project = "deeplearning-platform-release" - image_family = "pytorch-latest-cu124" + project = "cloud-notebooks-managed" + image_family = "workbench-instances" } instance_owners = [ "{{index $.TestEnvVars "service_account"}}"] @@ -13,7 +13,7 @@ resource "google_notebooks_instance" "{{$.PrimaryResourceId}}" { install_gpu_driver = true boot_disk_type = "PD_SSD" - boot_disk_size_gb = 110 + boot_disk_size_gb = 150 no_public_ip = true no_proxy_access = true diff --git a/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_gpu_test.go.tmpl b/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_gpu_test.go.tmpl index 570f8122f460..a435f330b1ad 100644 --- a/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_gpu_test.go.tmpl +++ b/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_gpu_test.go.tmpl @@ -44,8 +44,8 @@ resource "google_notebooks_instance" "test" { terraform = "true" } vm_image { - project = "deeplearning-platform-release" - image_family = "pytorch-latest-cu124" + project = "cloud-notebooks-managed" + image_family = "workbench-instances" } install_gpu_driver = true accelerator_config { diff --git a/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_state_test.go.tmpl b/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_state_test.go.tmpl index 18167fb4b4b9..8c872bc37fc2 100644 --- a/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_state_test.go.tmpl +++ b/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_state_test.go.tmpl @@ -59,8 +59,8 @@ resource "google_notebooks_instance" "test" { location = "us-west1-a" machine_type = "e2-medium" vm_image { - project = "deeplearning-platform-release" - image_family = "pytorch-latest-cu124" + project = "cloud-notebooks-managed" + image_family = "workbench-instances" } desired_state = "ACTIVE" } @@ -74,8 +74,8 @@ resource "google_notebooks_instance" "test" { location = "us-west1-a" machine_type = "e2-medium" vm_image { - project = "deeplearning-platform-release" - image_family = "pytorch-latest-cu124" + project = "cloud-notebooks-managed" + image_family = "workbench-instances" } desired_state = "STOPPED" } diff --git a/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_test.go b/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_test.go index fcac24848d0d..bb7da4791158 100644 --- a/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_test.go +++ b/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_test.go @@ -91,8 +91,8 @@ resource "google_notebooks_instance" "test" { } vm_image { - project = "deeplearning-platform-release" - image_family = "pytorch-latest-cu124" + project = "cloud-notebooks-managed" + image_family = "workbench-instances" } } `, name) @@ -106,8 +106,8 @@ resource "google_notebooks_instance" "instance" { machine_type = "e2-medium" vm_image { - project = "deeplearning-platform-release" - image_family = "pytorch-latest-cu124" + project = "cloud-notebooks-managed" + image_family = "workbench-instances" } metadata = { @@ -132,8 +132,8 @@ resource "google_notebooks_instance" "instance" { machine_type = "e2-medium" vm_image { - project = "deeplearning-platform-release" - image_family = "pytorch-latest-cu124" + project = "cloud-notebooks-managed" + image_family = "workbench-instances" } metadata = { From 2d2db6b49cb57e4a3e4e5a1d3180844e3eec9a2f Mon Sep 17 00:00:00 2001 From: maayanbeltzer Date: Sun, 10 Aug 2025 23:40:35 +0000 Subject: [PATCH 718/884] Update VpcFlowLogs to support subnet and vpc on Project Level (#14381) --- .../networkmanagement/VpcFlowLogsConfig.yaml | 58 ++++-- ...flow_logs_config_interconnect_full.tf.tmpl | 35 ---- ...vpc_flow_logs_config_network_basic.tf.tmpl | 15 ++ ..._vpc_flow_logs_config_subnet_basic.tf.tmpl | 24 +++ ...ment_vpc_flow_logs_config_vpn_full.tf.tmpl | 71 ------- ...agement_vpc_flow_logs_config_test.go.tmpl} | 188 ++++++++++++++++++ 6 files changed, 264 insertions(+), 127 deletions(-) delete mode 100644 mmv1/templates/terraform/examples/network_management_vpc_flow_logs_config_interconnect_full.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/network_management_vpc_flow_logs_config_network_basic.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/network_management_vpc_flow_logs_config_subnet_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/network_management_vpc_flow_logs_config_vpn_full.tf.tmpl rename mmv1/third_party/terraform/services/networkmanagement/{resource_network_management_vpc_flow_logs_config_test.go => resource_network_management_vpc_flow_logs_config_test.go.tmpl} (57%) diff --git a/mmv1/products/networkmanagement/VpcFlowLogsConfig.yaml b/mmv1/products/networkmanagement/VpcFlowLogsConfig.yaml index f03e3ca43683..aeaad17276f0 100644 --- a/mmv1/products/networkmanagement/VpcFlowLogsConfig.yaml +++ b/mmv1/products/networkmanagement/VpcFlowLogsConfig.yaml @@ -15,7 +15,7 @@ name: 'VpcFlowLogsConfig' description: VPC Flow Logs Config is a resource that lets you configure - Flow Logs for VPC, Interconnect attachments or VPN Tunnels. + Flow Logs for Networks, Subnets, Interconnect attachments or VPN Tunnels. id_format: 'projects/{{project}}/locations/{{location}}/vpcFlowLogsConfigs/{{vpc_flow_logs_config_id}}' base_url: 'projects/{{project}}/locations/{{location}}/vpcFlowLogsConfigs' self_link: 'projects/{{project}}/locations/{{location}}/vpcFlowLogsConfigs/{{vpc_flow_logs_config_id}}' @@ -41,13 +41,6 @@ sweeper: url_substitutions: - region: "global" examples: - - name: 'network_management_vpc_flow_logs_config_interconnect_full' - primary_resource_id: 'interconnect-test' - vars: - network_name: 'full-interconnect-test-network' - router_name: 'full-interconnect-test-router' - vpc_flow_logs_config_id: 'full-interconnect-test-id' - interconnect_attachment_name: 'full-interconnect-test-id' - name: 'network_management_vpc_flow_logs_config_interconnect_basic' primary_resource_id: 'interconnect-test' vars: @@ -67,18 +60,19 @@ examples: esp_forwarding_rule_name: 'basic-test-fresp' route_name: 'basic-test-route' vpc_flow_logs_config_id: 'basic-test-id' - - name: 'network_management_vpc_flow_logs_config_vpn_full' - primary_resource_id: 'vpn-test' + - name: 'network_management_vpc_flow_logs_config_network_basic' + primary_resource_id: 'network-test' + min_version: 'beta' vars: - network_name: 'full-test-network' - vpn_tunnel_name: 'full-test-tunnel' - target_vpn_gateway_name: 'full-test-gateway' - address_name: 'full-test-address' - udp500_forwarding_rule_name: 'full-test-fr500' - udp4500_forwarding_rule_name: 'full-test-fr4500' - esp_forwarding_rule_name: 'full-test-fresp' - route_name: 'full-test-route' - vpc_flow_logs_config_id: 'full-test-id' + network_name: 'basic-network-test-network' + vpc_flow_logs_config_id: 'basic-network-test-id' + - name: 'network_management_vpc_flow_logs_config_subnet_basic' + primary_resource_id: 'subnet-test' + min_version: 'beta' + vars: + network_name: 'basic-subnet-test-network' + subnetwork_name: 'basic-subnet-test-subnetwork' + vpc_flow_logs_config_id: 'basic-subnet-test-id' parameters: - name: 'location' type: String @@ -112,13 +106,14 @@ properties: default_from_api: true description: | Optional. The state of the VPC Flow Log configuration. Default value - is ENABLED. When creating a new configuration, it must be enabled. Possible + is ENABLED. When creating a new configuration, it must be enabled. + Possible values: STATE_UNSPECIFIED ENABLED DISABLED - name: 'aggregationInterval' type: String default_from_api: true description: | Optional. The aggregation interval for the logs. Default value is - INTERVAL_5_SEC. Possible values: AGGREGATION_INTERVAL_UNSPECIFIED INTERVAL_5_SEC INTERVAL_30_SEC INTERVAL_1_MIN INTERVAL_5_MIN INTERVAL_10_MIN INTERVAL_15_MIN" + INTERVAL_5_SEC. Possible values: AGGREGATION_INTERVAL_UNSPECIFIED INTERVAL_5_SEC INTERVAL_30_SEC INTERVAL_1_MIN INTERVAL_5_MIN INTERVAL_10_MIN INTERVAL_15_MIN - name: 'flowSampling' type: Double default_from_api: true @@ -153,6 +148,17 @@ properties: type: String description: | Traffic will be logged from the VPN Tunnel. Format: projects/{project_id}/regions/{region}/vpnTunnels/{name} + - name: 'subnet' + min_version: 'beta' + type: String + description: | + Traffic will be logged from VMs within the subnetwork. Format: projects/{project_id}/regions/{region}/subnetworks/{name} + - name: 'network' + min_version: 'beta' + type: String + description: | + Traffic will be logged from VMs, VPN tunnels and Interconnect Attachments within the network. Format: projects/{project_id}/global/networks/{name} + - name: 'labels' type: KeyValueLabels description: | @@ -167,3 +173,13 @@ properties: description: | Output only. The time the config was updated. output: true + - name: 'targetResourceState' + type: String + description: |- + Describes the state of the configured target resource for diagnostic + purposes. + Possible values: + TARGET_RESOURCE_STATE_UNSPECIFIED + TARGET_RESOURCE_EXISTS + TARGET_RESOURCE_DOES_NOT_EXIST + output: true diff --git a/mmv1/templates/terraform/examples/network_management_vpc_flow_logs_config_interconnect_full.tf.tmpl b/mmv1/templates/terraform/examples/network_management_vpc_flow_logs_config_interconnect_full.tf.tmpl deleted file mode 100644 index fbc87c248f6e..000000000000 --- a/mmv1/templates/terraform/examples/network_management_vpc_flow_logs_config_interconnect_full.tf.tmpl +++ /dev/null @@ -1,35 +0,0 @@ -data "google_project" "project" { -} - -resource "google_network_management_vpc_flow_logs_config" "{{$.PrimaryResourceId}}" { - vpc_flow_logs_config_id = "{{index $.Vars "vpc_flow_logs_config_id"}}" - location = "global" - interconnect_attachment = "projects/${data.google_project.project.number}/regions/us-east4/interconnectAttachments/${google_compute_interconnect_attachment.attachment.name}" - state = "ENABLED" - aggregation_interval = "INTERVAL_5_SEC" - description = "VPC Flow Logs over a VPN Gateway." - flow_sampling = 0.5 - metadata = "INCLUDE_ALL_METADATA" -} - -resource "google_compute_network" "network" { - name = "{{index $.Vars "network_name"}}" -} - -resource "google_compute_router" "router" { - name = "{{index $.Vars "router_name"}}" - network = google_compute_network.network.name - bgp { - asn = 16550 - } -} - -resource "google_compute_interconnect_attachment" "attachment" { - name = "{{index $.Vars "interconnect_attachment_name"}}" - edge_availability_domain = "AVAILABILITY_DOMAIN_1" - type = "PARTNER" - router = google_compute_router.router.id - mtu = 1500 -} - - diff --git a/mmv1/templates/terraform/examples/network_management_vpc_flow_logs_config_network_basic.tf.tmpl b/mmv1/templates/terraform/examples/network_management_vpc_flow_logs_config_network_basic.tf.tmpl new file mode 100644 index 000000000000..7fa3502839b5 --- /dev/null +++ b/mmv1/templates/terraform/examples/network_management_vpc_flow_logs_config_network_basic.tf.tmpl @@ -0,0 +1,15 @@ +data "google_project" "project" { + provider = google-beta +} + +resource "google_network_management_vpc_flow_logs_config" "{{$.PrimaryResourceId}}" { + provider = google-beta + vpc_flow_logs_config_id = "{{index $.Vars "vpc_flow_logs_config_id"}}" + location = "global" + network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.network.name}" +} + +resource "google_compute_network" "network" { + provider = google-beta + name = "{{index $.Vars "network_name"}}" +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/network_management_vpc_flow_logs_config_subnet_basic.tf.tmpl b/mmv1/templates/terraform/examples/network_management_vpc_flow_logs_config_subnet_basic.tf.tmpl new file mode 100644 index 000000000000..1e59a36cbfe7 --- /dev/null +++ b/mmv1/templates/terraform/examples/network_management_vpc_flow_logs_config_subnet_basic.tf.tmpl @@ -0,0 +1,24 @@ +data "google_project" "project" { + provider = google-beta +} + +resource "google_network_management_vpc_flow_logs_config" "{{$.PrimaryResourceId}}" { + provider = google-beta + vpc_flow_logs_config_id = "{{index $.Vars "vpc_flow_logs_config_id"}}" + location = "global" + subnet = "projects/${data.google_project.project.number}/regions/us-central1/subnetworks/${google_compute_subnetwork.subnetwork.name}" +} + +resource "google_compute_network" "network" { + provider = google-beta + name = "{{index $.Vars "network_name"}}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "subnetwork" { + provider = google-beta + name = "{{index $.Vars "subnetwork_name"}}" + ip_cidr_range = "10.2.0.0/16" + region = "us-central1" + network = google_compute_network.network.id +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/network_management_vpc_flow_logs_config_vpn_full.tf.tmpl b/mmv1/templates/terraform/examples/network_management_vpc_flow_logs_config_vpn_full.tf.tmpl deleted file mode 100644 index 90de9e6bddf8..000000000000 --- a/mmv1/templates/terraform/examples/network_management_vpc_flow_logs_config_vpn_full.tf.tmpl +++ /dev/null @@ -1,71 +0,0 @@ -data "google_project" "project" { -} - -resource "google_network_management_vpc_flow_logs_config" "{{$.PrimaryResourceId}}" { - vpc_flow_logs_config_id = "{{index $.Vars "vpc_flow_logs_config_id"}}" - location = "global" - vpn_tunnel = "projects/${data.google_project.project.number}/regions/us-central1/vpnTunnels/${google_compute_vpn_tunnel.tunnel.name}" - state = "ENABLED" - aggregation_interval = "INTERVAL_5_SEC" - description = "VPC Flow Logs over a VPN Gateway." - flow_sampling = 0.5 - metadata = "INCLUDE_ALL_METADATA" -} - -resource "google_compute_vpn_tunnel" "tunnel" { - name = "{{index $.Vars "vpn_tunnel_name"}}" - peer_ip = "15.0.0.120" - shared_secret = "a secret message" - target_vpn_gateway = google_compute_vpn_gateway.target_gateway.id - - depends_on = [ - google_compute_forwarding_rule.fr_esp, - google_compute_forwarding_rule.fr_udp500, - google_compute_forwarding_rule.fr_udp4500, - ] -} - -resource "google_compute_vpn_gateway" "target_gateway" { - name = "{{index $.Vars "target_vpn_gateway_name"}}" - network = google_compute_network.network.id -} - -resource "google_compute_network" "network" { - name = "{{index $.Vars "network_name"}}" -} - -resource "google_compute_address" "vpn_static_ip" { - name = "{{index $.Vars "address_name"}}" -} - -resource "google_compute_forwarding_rule" "fr_esp" { - name = "{{index $.Vars "esp_forwarding_rule_name"}}" - ip_protocol = "ESP" - ip_address = google_compute_address.vpn_static_ip.address - target = google_compute_vpn_gateway.target_gateway.id -} - -resource "google_compute_forwarding_rule" "fr_udp500" { - name = "{{index $.Vars "udp500_forwarding_rule_name"}}" - ip_protocol = "UDP" - port_range = "500" - ip_address = google_compute_address.vpn_static_ip.address - target = google_compute_vpn_gateway.target_gateway.id -} - -resource "google_compute_forwarding_rule" "fr_udp4500" { - name = "{{index $.Vars "udp4500_forwarding_rule_name"}}" - ip_protocol = "UDP" - port_range = "4500" - ip_address = google_compute_address.vpn_static_ip.address - target = google_compute_vpn_gateway.target_gateway.id -} - -resource "google_compute_route" "route" { - name = "{{index $.Vars "route_name"}}" - network = google_compute_network.network.name - dest_range = "15.0.0.0/24" - priority = 1000 - next_hop_vpn_tunnel = google_compute_vpn_tunnel.tunnel.id -} - diff --git a/mmv1/third_party/terraform/services/networkmanagement/resource_network_management_vpc_flow_logs_config_test.go b/mmv1/third_party/terraform/services/networkmanagement/resource_network_management_vpc_flow_logs_config_test.go.tmpl similarity index 57% rename from mmv1/third_party/terraform/services/networkmanagement/resource_network_management_vpc_flow_logs_config_test.go rename to mmv1/third_party/terraform/services/networkmanagement/resource_network_management_vpc_flow_logs_config_test.go.tmpl index 2c00b5c634a9..12c083a79f4f 100644 --- a/mmv1/third_party/terraform/services/networkmanagement/resource_network_management_vpc_flow_logs_config_test.go +++ b/mmv1/third_party/terraform/services/networkmanagement/resource_network_management_vpc_flow_logs_config_test.go.tmpl @@ -182,6 +182,194 @@ resource "google_network_management_vpc_flow_logs_config" "example" { return fmt.Sprintf("%s\n\n%s\n\n", vpcFlowLogsCfg, testAccNetworkManagementVpcFlowLogsConfig_baseResources(context)) } +{{ if ne $.TargetVersionName "ga" -}} +func TestAccNetworkManagementVpcFlowLogsConfig_network(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckNetworkManagementVpcFlowLogsConfigDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccNetworkManagementVpcFlowLogsConfig_network(context), + }, + { + ResourceName: "google_network_management_vpc_flow_logs_config.network-test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "vpc_flow_logs_config_id"}, + }, + { + Config: testAccNetworkManagementVpcFlowLogsConfig_networkUpdate(context), + }, + { + ResourceName: "google_network_management_vpc_flow_logs_config.network-test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "vpc_flow_logs_config_id"}, + }, + }, + }) +} + +func testAccNetworkManagementVpcFlowLogsConfig_network(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "project" { + provider = google-beta +} + +resource "google_compute_network" "network" { + provider = google-beta + name = "tf-test-flow-logs-network-%{random_suffix}" +} + +resource "google_network_management_vpc_flow_logs_config" "network-test" { + provider = google-beta + vpc_flow_logs_config_id = "tf-test-network-id-%{random_suffix}" + location = "global" + network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.network.name}" + state = "ENABLED" +} +`, context) +} + +func testAccNetworkManagementVpcFlowLogsConfig_networkUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "project" { + provider = google-beta +} + +resource "google_compute_network" "network" { + provider = google-beta + name = "tf-test-flow-logs-network-%{random_suffix}" +} + +resource "google_compute_network" "network_update" { + provider = google-beta + name = "tf-test-flow-logs-network-update-%{random_suffix}" +} + +resource "google_network_management_vpc_flow_logs_config" "network-test" { + provider = google-beta + vpc_flow_logs_config_id = "tf-test-network-id-%{random_suffix}" + location = "global" + network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.network_update.name}" + state = "DISABLED" + aggregation_interval = "INTERVAL_10_MIN" + flow_sampling = 0.05 + metadata = "INCLUDE_ALL_METADATA" + description = "Updated description for network test" +} +`, context) +} + +func TestAccNetworkManagementVpcFlowLogsConfig_subnet(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckNetworkManagementVpcFlowLogsConfigDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccNetworkManagementVpcFlowLogsConfig_subnet(context), + }, + { + ResourceName: "google_network_management_vpc_flow_logs_config.subnet-test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "vpc_flow_logs_config_id"}, + }, + { + Config: testAccNetworkManagementVpcFlowLogsConfig_subnetUpdate(context), + }, + { + ResourceName: "google_network_management_vpc_flow_logs_config.subnet-test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "vpc_flow_logs_config_id"}, + }, + }, + }) +} + +func testAccNetworkManagementVpcFlowLogsConfig_subnet(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "project" { + provider = google-beta +} + +resource "google_compute_network" "network" { + name = "tf-test-subnet-network-%{random_suffix}" + auto_create_subnetworks = false + provider = google-beta +} + +resource "google_compute_subnetwork" "subnet" { + provider = google-beta + name = "tf-test-flow-logs-subnet-%{random_suffix}" + ip_cidr_range = "10.2.0.0/16" + region = "us-central1" + network = google_compute_network.network.id +} + +resource "google_network_management_vpc_flow_logs_config" "subnet-test" { + provider = google-beta + vpc_flow_logs_config_id = "tf-test-subnet-id-%{random_suffix}" + location = "global" + subnet = "projects/${data.google_project.project.number}/regions/${google_compute_subnetwork.subnet.region}/subnetworks/${google_compute_subnetwork.subnet.name}" +} +`, context) +} + +func testAccNetworkManagementVpcFlowLogsConfig_subnetUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "project" { + provider = google-beta +} + +resource "google_compute_network" "network" { + provider = google-beta + name = "tf-test-subnet-network-%{random_suffix}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "subnet" { + provider = google-beta + name = "tf-test-flow-logs-subnet-%{random_suffix}" + ip_cidr_range = "10.2.0.0/16" + region = "us-central1" + network = google_compute_network.network.id +} + +resource "google_compute_subnetwork" "subnet_update" { + provider = google-beta + name = "tf-test-flow-logs-subnet-update-%{random_suffix}" + ip_cidr_range = "10.3.0.0/16" + region = "us-central1" + network = google_compute_network.network.id +} + +resource "google_network_management_vpc_flow_logs_config" "subnet-test" { + provider = google-beta + vpc_flow_logs_config_id = "tf-test-subnet-id-%{random_suffix}" + location = "global" + subnet = "projects/${data.google_project.project.number}/regions/${google_compute_subnetwork.subnet_update.region}/subnetworks/${google_compute_subnetwork.subnet_update.name}" + state = "ENABLED" +} +`, context) +} +{{ end }} + func testAccNetworkManagementVpcFlowLogsConfig_baseResources(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_compute_vpn_tunnel" "tunnel" { From 3d28d0bc3a88567e6aa49d29803dd5c98fe0c965 Mon Sep 17 00:00:00 2001 From: Aman Mahendroo <30946991+amanMahendroo@users.noreply.github.com> Date: Mon, 11 Aug 2025 21:06:50 +0530 Subject: [PATCH 719/884] Make LinkedVpcNetwork mutable (#14757) --- mmv1/products/networkconnectivity/Spoke.yaml | 4 +++- .../resource_network_connectivity_spoke_test.go | 4 ++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/mmv1/products/networkconnectivity/Spoke.yaml b/mmv1/products/networkconnectivity/Spoke.yaml index 205ca6da5a8e..57b3bcb1c965 100644 --- a/mmv1/products/networkconnectivity/Spoke.yaml +++ b/mmv1/products/networkconnectivity/Spoke.yaml @@ -254,12 +254,14 @@ properties: - name: 'linkedVpcNetwork' type: NestedObject description: VPC network that is associated with the spoke. - immutable: true conflicts: - linked_interconnect_attachments - linked_router_appliance_instances - linked_vpn_tunnels - linked_producer_vpc_network + update_mask_fields: + - 'linkedVpcNetwork.excludeExportRanges' + - 'linkedVpcNetwork.includeExportRanges' properties: - name: 'uri' type: String diff --git a/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_spoke_test.go b/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_spoke_test.go index ac8597ae627d..07b490cff4c5 100644 --- a/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_spoke_test.go +++ b/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_spoke_test.go @@ -649,11 +649,11 @@ resource "google_network_connectivity_spoke" "primary" { hub = google_network_connectivity_hub.basic_hub.id linked_vpc_network { exclude_export_ranges = [ - "198.51.100.0/24", + "198.51.110.0/24", "10.10.0.0/16" ] include_export_ranges = [ - "198.51.100.0/23", + "198.51.110.0/23", "10.0.0.0/8" ] uri = google_compute_network.network.self_link From 5bcf5e2eaa7dd68b590b1bee6636796eb056ba7a Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Mon, 11 Aug 2025 18:47:02 +0200 Subject: [PATCH 720/884] bigqueryreservation: added support for `scaling_mode` and `max_slots` properties on `google_bigquery_reservation` (beta) (#14056) --- .../bigqueryreservation/Reservation.yaml | 97 +++++++++++++++++++ ...esource_bigquery_reservation_test.go.tmpl} | 77 +++++++++++++++ 2 files changed, 174 insertions(+) rename mmv1/third_party/terraform/services/bigqueryreservation/{resource_bigquery_reservation_test.go => resource_bigquery_reservation_test.go.tmpl} (51%) diff --git a/mmv1/products/bigqueryreservation/Reservation.yaml b/mmv1/products/bigqueryreservation/Reservation.yaml index d6b973adba4e..fa6a7158d557 100644 --- a/mmv1/products/bigqueryreservation/Reservation.yaml +++ b/mmv1/products/bigqueryreservation/Reservation.yaml @@ -156,3 +156,100 @@ properties: replicated to the secondary. output: true output: true + - name: 'scalingMode' + type: Enum + min_version: beta + description: | + The scaling mode for the reservation. If the field is present but maxSlots is not present, + requests will be rejected with error code google.rpc.Code.INVALID_ARGUMENT. + + Enum values: + + `SCALING_MODE_UNSPECIFIED`: Default value of ScalingMode. + + `AUTOSCALE_ONLY`: The reservation will scale up only using slots from autoscaling. It will + not use any idle slots even if there may be some available. The upper limit that autoscaling + can scale up to will be maxSlots - baseline. For example, if maxSlots is 1000, baseline is 200 + and customer sets ScalingMode to AUTOSCALE_ONLY, then autoscalerg will scale up to 800 slots + and no idle slots will be used. Please note, in this mode, the ignoreIdleSlots field must be + set to true. Otherwise the request will be rejected with error code + google.rpc.Code.INVALID_ARGUMENT. + + `IDLE_SLOTS_ONLY`: The reservation will scale up using only idle slots contributed by other + reservations or from unassigned commitments. If no idle slots are available it will not scale + up further. If the idle slots which it is using are reclaimed by the contributing reservation(s) + it may be forced to scale down. The max idle slots the reservation can be maxSlots - baseline + capacity. For example, if maxSlots is 1000, baseline is 200 and customer sets ScalingMode to + IDLE_SLOTS_ONLY, 1. if there are 1000 idle slots available in other reservations, the + reservation will scale up to 1000 slots with 200 baseline and 800 idle slots. 2. if there are + 500 idle slots available in other reservations, the reservation will scale up to 700 slots with + 200 baseline and 300 idle slots. Please note, in this mode, the reservation might not be able to + scale up to maxSlots. Please note, in this mode, the ignoreIdleSlots field must be set to false. + Otherwise the request will be rejected with error code google.rpc.Code.INVALID_ARGUMENT + + `ALL_SLOTS`: The reservation will scale up using all slots available to it. It will use idle slots + contributed by other reservations or from unassigned commitments first. If no idle slots are + available it will scale up using autoscaling. For example, if maxSlots is 1000, baseline is 200 + and customer sets ScalingMode to ALL_SLOTS, 1. if there are 800 idle slots available in other + reservations, the reservation will scale up to 1000 slots with 200 baseline and 800 idle slots. 2. + if there are 500 idle slots available in other reservations, the reservation will scale up to 1000 + slots with 200 baseline, 500 idle slots and 300 autoscaling slots. 3. if there are no idle slots + available in other reservations, it will scale up to 1000 slots with 200 baseline and 800 + autoscaling slots. Please note, in this mode, the ignoreIdleSlots field must be set to false. + Otherwise the request will be rejected with error code google.rpc.Code.INVALID_ARGUMENT. + enum_values: + - 'SCALING_MODE_UNSPECIFIED' + - 'AUTOSCALE_ONLY' + - 'IDLE_SLOTS_ONLY' + - 'ALL_SLOTS' + required_with: + - 'maxSlots' + conflicts: + - 'autoscale' + - name: 'maxSlots' + type: Integer + min_version: beta + description: | + The overall max slots for the reservation, covering slotCapacity (baseline), idle slots + (if ignoreIdleSlots is false) and scaled slots. If present, the reservation won't use + more than the specified number of slots, even if there is demand and supply (from idle + slots). NOTE: capping a reservation's idle slot usage is best effort and its usage may + exceed the maxSlots value. However, in terms of autoscale.current_slots (which accounts + for the additional added slots), it will never exceed the maxSlots - baseline. + + This field must be set together with the scalingMode enum value, otherwise the request + will be rejected with error code google.rpc.Code.INVALID_ARGUMENT. + + If the maxSlots and scalingMode are set, the autoscale or autoscale.max_slots field + must be unset. Otherwise the request will be rejected with error code + google.rpc.Code.INVALID_ARGUMENT. However, the autoscale field may still be in the + output. The autopscale.max_slots will always show as 0 and the autoscaler.current_slots + will represent the current slots from autoscaler excluding idle slots. For example, + if the maxSlots is 1000 and scalingMode is AUTOSCALE_ONLY, then in the output, the + autoscaler.max_slots will be 0 and the autoscaler.current_slots may be any value + between 0 and 1000. + + If the maxSlots is 1000, scalingMode is ALL_SLOTS, the baseline is 100 and idle slots + usage is 200, then in the output, the autoscaler.max_slots will be 0 and the + autoscaler.current_slots will not be higher than 700. + + If the maxSlots is 1000, scalingMode is IDLE_SLOTS_ONLY, then in the output, the + autoscaler field will be null. + + If the maxSlots and scalingMode are set, then the ignoreIdleSlots field must be + aligned with the scalingMode enum value.(See details in ScalingMode comments). + Otherwise the request will be rejected with error code google.rpc.Code.INVALID_ARGUMENT. + + Please note, the maxSlots is for user to manage the part of slots greater than the + baseline. Therefore, we don't allow users to set maxSlots smaller or equal to the + baseline as it will not be meaningful. If the field is present and + slotCapacity>=maxSlots, requests will be rejected with error code + google.rpc.Code.INVALID_ARGUMENT. + + Please note that if maxSlots is set to 0, we will treat it as unset. Customers can set + maxSlots to 0 and set scalingMode to SCALING_MODE_UNSPECIFIED to disable the maxSlots + feature. + required_with: + - 'scalingMode' + conflicts: + - 'autoscale' diff --git a/mmv1/third_party/terraform/services/bigqueryreservation/resource_bigquery_reservation_test.go b/mmv1/third_party/terraform/services/bigqueryreservation/resource_bigquery_reservation_test.go.tmpl similarity index 51% rename from mmv1/third_party/terraform/services/bigqueryreservation/resource_bigquery_reservation_test.go rename to mmv1/third_party/terraform/services/bigqueryreservation/resource_bigquery_reservation_test.go.tmpl index a7f5f15bee03..66fcc9c455e4 100644 --- a/mmv1/third_party/terraform/services/bigqueryreservation/resource_bigquery_reservation_test.go +++ b/mmv1/third_party/terraform/services/bigqueryreservation/resource_bigquery_reservation_test.go.tmpl @@ -38,6 +38,41 @@ func TestAccBigqueryReservation_withDisasterRecovery_update(t *testing.T) { }) } +{{ if ne $.TargetVersionName `ga` -}} + +func TestAccBigqueryReservation_withScalingMode_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccBigqueryReservation_withScalingMode_basic(context), + }, + { + ResourceName: "google_bigquery_reservation.reservation", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccBigqueryReservation_withScalingMode_update(context), + }, + { + ResourceName: "google_bigquery_reservation.reservation", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +{{ end }} + func testAccBigqueryReservation_withDisasterRecovery_basic(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_bigquery_reservation" "reservation" { @@ -78,3 +113,45 @@ resource "google_bigquery_reservation" "reservation" { } `, context) } + +{{ if ne $.TargetVersionName `ga` -}} + +func testAccBigqueryReservation_withScalingMode_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_bigquery_reservation" "reservation" { + provider = google-beta + name = "tf-test-reservation-%{random_suffix}" + location = "us-west2" + + // Set to 0 for testing purposes + // In reality this would be larger than zero + slot_capacity = 0 + edition = "ENTERPRISE_PLUS" + ignore_idle_slots = true + concurrency = 0 + max_slots = 100 + scaling_mode = "AUTOSCALE_ONLY" +} +`, context) +} + +func testAccBigqueryReservation_withScalingMode_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_bigquery_reservation" "reservation" { + provider = google-beta + name = "tf-test-reservation-%{random_suffix}" + location = "us-west2" + + // Set to 0 for testing purposes + // In reality this would be larger than zero + slot_capacity = 0 + edition = "ENTERPRISE_PLUS" + ignore_idle_slots = false + concurrency = 0 + max_slots = 50 + scaling_mode = "ALL_SLOTS" +} +`, context) +} + +{{ end }} \ No newline at end of file From 507c4826a1e6a7a8a0f951ce36a8c9f360adf81d Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Mon, 11 Aug 2025 10:06:59 -0700 Subject: [PATCH 721/884] tgc-revival: set an object to nil if its all values are nil during cai2hcl (#14789) --- .../terraform/flatten_property_method.go.tmpl | 5 +++++ .../third_party/tgc_next/pkg/tgcresource/utils.go | 15 +++++++++++++++ 2 files changed, 20 insertions(+) diff --git a/mmv1/templates/terraform/flatten_property_method.go.tmpl b/mmv1/templates/terraform/flatten_property_method.go.tmpl index fdc86f83f27f..92387f432fff 100644 --- a/mmv1/templates/terraform/flatten_property_method.go.tmpl +++ b/mmv1/templates/terraform/flatten_property_method.go.tmpl @@ -47,6 +47,11 @@ func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.Reso flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}{{$prop.TitlelizeProperty}}(original["{{ $prop.ApiName }}"], d, config) {{- end }} {{- end }} + {{- if and $.ResourceMetadata.IsTgcCompiler (not $.AllowEmptyObject) }} + if tgcresource.AllValuesAreNil(transformed) { + return nil + } + {{- end }} return []interface{}{transformed} {{- else if and ($.IsA "Array") ($.ItemType.IsA "NestedObject") }} if v == nil { diff --git a/mmv1/third_party/tgc_next/pkg/tgcresource/utils.go b/mmv1/third_party/tgc_next/pkg/tgcresource/utils.go index 07f42af30b4e..d0b3f9db3f64 100644 --- a/mmv1/third_party/tgc_next/pkg/tgcresource/utils.go +++ b/mmv1/third_party/tgc_next/pkg/tgcresource/utils.go @@ -61,3 +61,18 @@ func MergeFlattenedProperties(hclData map[string]interface{}, flattenedProp inte } return nil } + +// Checks if all values in the map are nil +func AllValuesAreNil(m map[string]interface{}) bool { + if len(m) == 0 { + return true + } + + for _, v := range m { + if v != nil { + return false + } + } + + return true +} From a5eb629466bace118f97a02f670a7884fa2333a8 Mon Sep 17 00:00:00 2001 From: Jaylon McShan Date: Mon, 11 Aug 2025 12:45:34 -0500 Subject: [PATCH 722/884] Make metadata required in Vertex AI Index resource (#14382) --- mmv1/products/vertexai/Index.yaml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/mmv1/products/vertexai/Index.yaml b/mmv1/products/vertexai/Index.yaml index 5ee3e4fa1170..dd7730740afe 100644 --- a/mmv1/products/vertexai/Index.yaml +++ b/mmv1/products/vertexai/Index.yaml @@ -84,7 +84,10 @@ properties: # https://cloud.google.com/vertex-ai/docs/matching-engine/configuring-indexes - name: 'metadata' type: NestedObject - description: An additional information about the Index + description: |- + Additional information about the Index. + Although this field is not marked as required in the API specification, it is currently required when creating an Index and must be provided. + Attempts to create an Index without this field will result in an API error. properties: - name: 'contentsDeltaUri' type: String From 6885be8e45459c85150f42425e5a855aea102a63 Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Mon, 11 Aug 2025 11:37:35 -0700 Subject: [PATCH 723/884] Fix google_apigee_sharedflow_deployment import (#14811) --- .../services/apigee/resource_apigee_sharedflow_deployment.go | 1 + 1 file changed, 1 insertion(+) diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow_deployment.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow_deployment.go index d6c715152bbd..68f1c91d9ac2 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow_deployment.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow_deployment.go @@ -229,6 +229,7 @@ func resourceApigeeSharedflowDeploymentImport(d *schema.ResourceData, meta inter config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ "^organizations/(?P[^/]+)/environments/(?P[^/]+)/sharedflows/(?P[^/]+)/revisions/(?P[^/]+)$", + "^organizations/(?P[^/]+)/environments/(?P[^/]+)/sharedflows/(?P[^/]+)/revisions/(?P[^/]+)/deployments$", "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err From 40f4b368c9a7cc3a7096729f9a761d4636d0926d Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Mon, 11 Aug 2025 13:39:46 -0700 Subject: [PATCH 724/884] tgc-revival: retry 3 times for missing fields error (#14793) Co-authored-by: Thomas Rodgers --- mmv1/third_party/tgc_next/go.mod | 1 + mmv1/third_party/tgc_next/go.sum | 2 + .../tgc_next/test/assert_test_files.go | 78 ++++++++++++------- mmv1/third_party/tgc_next/test/setup.go | 27 +++---- 4 files changed, 66 insertions(+), 42 deletions(-) diff --git a/mmv1/third_party/tgc_next/go.mod b/mmv1/third_party/tgc_next/go.mod index ce14728d8dbb..c5490885bad5 100644 --- a/mmv1/third_party/tgc_next/go.mod +++ b/mmv1/third_party/tgc_next/go.mod @@ -29,6 +29,7 @@ require ( github.com/hashicorp/go-cleanhttp v0.5.2 github.com/hashicorp/terraform-plugin-framework v1.13.0 github.com/hashicorp/terraform-plugin-framework-validators v0.12.0 + github.com/sethvargo/go-retry v0.3.0 github.com/sirupsen/logrus v1.9.3 github.com/spf13/cobra v1.8.1 golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 diff --git a/mmv1/third_party/tgc_next/go.sum b/mmv1/third_party/tgc_next/go.sum index 2b625c7e5997..594c84f65387 100644 --- a/mmv1/third_party/tgc_next/go.sum +++ b/mmv1/third_party/tgc_next/go.sum @@ -235,6 +235,8 @@ github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1: github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/sethvargo/go-retry v0.3.0 h1:EEt31A35QhrcRZtrYFDTBg91cqZVnFL2navjDrah2SE= +github.com/sethvargo/go-retry v0.3.0/go.mod h1:mNX17F0C/HguQMyMyJxcnU471gOZGxCLyYaFyAZraas= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= diff --git a/mmv1/third_party/tgc_next/test/assert_test_files.go b/mmv1/third_party/tgc_next/test/assert_test_files.go index f3c238028e06..5da0f9113751 100644 --- a/mmv1/third_party/tgc_next/test/assert_test_files.go +++ b/mmv1/third_party/tgc_next/test/assert_test_files.go @@ -11,6 +11,7 @@ import ( "strings" "sync" "testing" + "time" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl" cai2hclconverters "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters" @@ -18,6 +19,7 @@ import ( "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai" tfplan2caiconverters "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/converters" + "github.com/sethvargo/go-retry" "go.uber.org/zap" "go.uber.org/zap/zaptest" @@ -32,38 +34,57 @@ var ( ) func BidirectionalConversion(t *testing.T, ignoredFields []string, ignoredAssetFields []string) { - resourceTestData, primaryResource, err := prepareTestData(t.Name()) - if err != nil { - t.Fatal("Error preparing the input data:", err) - } - - if resourceTestData == nil { - t.Skipf("The test data is unavailable.") - } - - // Create a temporary directory for running terraform. - tfDir, err := os.MkdirTemp(tmpDir, "terraform") - if err != nil { - log.Fatal(err) - } - defer os.RemoveAll(tfDir) + retries := 0 + flakyAction := func(ctx context.Context) error { + log.Printf("Starting the retry %d", retries) + resourceTestData, primaryResource, err := prepareTestData(t.Name(), retries) + retries++ + if err != nil { + return fmt.Errorf("error preparing the input data: %v", err) + } - logger := zaptest.NewLogger(t) + if resourceTestData == nil { + return retry.RetryableError(fmt.Errorf("fail: test data is unavailable")) + } - // If the primary resource is available, only test the primary resource. - // Otherwise, test all of the resources in the test. - if primaryResource != "" { - t.Logf("Test for the primary resource %s begins.", primaryResource) - err = testSingleResource(t, t.Name(), resourceTestData[primaryResource], tfDir, ignoredFields, ignoredAssetFields, logger, true) + // Create a temporary directory for running terraform. + tfDir, err := os.MkdirTemp(tmpDir, "terraform") if err != nil { - t.Fatal("Test fails:", err) + return err } - } else { - for _, testData := range resourceTestData { - err = testSingleResource(t, t.Name(), testData, tfDir, ignoredFields, ignoredAssetFields, logger, false) + defer os.RemoveAll(tfDir) + + logger := zaptest.NewLogger(t) + + // If the primary resource is specified, only test the primary resource. + // Otherwise, test all of the resources in the test. + if primaryResource != "" { + t.Logf("Test for the primary resource %s begins.", primaryResource) + err = testSingleResource(t, t.Name(), resourceTestData[primaryResource], tfDir, ignoredFields, ignoredAssetFields, logger, true) if err != nil { - t.Fatal("Test fails: ", err) + return err } + } else { + for _, testData := range resourceTestData { + err = testSingleResource(t, t.Name(), testData, tfDir, ignoredFields, ignoredAssetFields, logger, false) + if err != nil { + return err + } + } + } + + return nil + } + + backoffPolicy := retry.WithMaxRetries(maxRetries, retry.NewConstant(50*time.Millisecond)) + + t.Log("Starting test with retry logic.") + + if err := retry.Do(context.Background(), backoffPolicy, flakyAction); err != nil { + if strings.Contains(err.Error(), "test data is unavailable") { + t.Skipf("Test skipped because data was unavailable after all retries: %v", err) + } else { + t.Fatalf("Failed after all retries %d: %v", retries, err) } } } @@ -151,8 +172,11 @@ func testSingleResource(t *testing.T, testName string, testData ResourceTestData parsedExportConfig := exportResources[0].Attributes missingKeys := compareHCLFields(testData.ParsedRawConfig, parsedExportConfig, ignoredFieldSet) + + // Sometimes, the reason for missing fields could be CAI asset data issue. if len(missingKeys) > 0 { - return fmt.Errorf("missing fields in resource %s after cai2hcl conversion:\n%s", testData.ResourceAddress, missingKeys) + log.Printf("missing fields in resource %s after cai2hcl conversion:\n%s", testData.ResourceAddress, missingKeys) + return retry.RetryableError(fmt.Errorf("missing fields")) } log.Printf("Step 1 passes for resource %s. All of the fields in raw config are in export config", testData.ResourceAddress) diff --git a/mmv1/third_party/tgc_next/test/setup.go b/mmv1/third_party/tgc_next/test/setup.go index b6af12bf6cc6..07e8f20c209d 100644 --- a/mmv1/third_party/tgc_next/test/setup.go +++ b/mmv1/third_party/tgc_next/test/setup.go @@ -142,7 +142,7 @@ func readTestsDataFromGCSForRun(ctx context.Context, currentDate time.Time, buck return metadata, nil } -func prepareTestData(testName string) (map[string]ResourceTestData, string, error) { +func prepareTestData(testName string, retries int) (map[string]ResourceTestData, string, error) { var err error cacheMutex.Lock() defer cacheMutex.Unlock() @@ -153,23 +153,20 @@ func prepareTestData(testName string) (map[string]ResourceTestData, string, erro var testMetadata TgcMetadataPayload var resourceMetadata map[string]*ResourceMetadata - for _, run := range TestsMetadata { - var ok bool - testMetadata, ok = run.MetadataByTest[testName] - if ok { - log.Printf("Found metadata for %s from run on %s", testName, run.Date.Format(ymdFormat)) - resourceMetadata = testMetadata.ResourceMetadata - if len(resourceMetadata) > 0 { - break - } - } - log.Printf("Missing metadata for %s from run on %s, looking at previous run", testName, run.Date.Format(ymdFormat)) - } - if len(resourceMetadata) == 0 { + run := TestsMetadata[retries] + testMetadata, ok := run.MetadataByTest[testName] + if !ok { log.Printf("Data of test is unavailable: %s", testName) return nil, "", nil } + resourceMetadata = testMetadata.ResourceMetadata + if len(resourceMetadata) == 0 { + log.Printf("Data of resource is unavailable: %s", testName) + return nil, "", nil + } + + log.Printf("Found metadata for %s from run on %s", testName, run.Date.Format(ymdFormat)) rawTfFile := fmt.Sprintf("%s.tf", testName) err = os.WriteFile(rawTfFile, []byte(testMetadata.RawConfig), 0644) @@ -186,7 +183,7 @@ func prepareTestData(testName string) (map[string]ResourceTestData, string, erro } if len(rawResourceConfigs) == 0 { - return nil, "", fmt.Errorf("Test %s fails: raw config is unavailable", testName) + return nil, "", fmt.Errorf("test %s fails: raw config is unavailable", testName) } rawConfigMap := convertToConfigMap(rawResourceConfigs) From e7925fadccf657162782dddca9957c2819ca6eef Mon Sep 17 00:00:00 2001 From: Riley Karson Date: Mon, 11 Aug 2025 14:18:43 -0700 Subject: [PATCH 725/884] Fix markdown lists in sql_instance_switchover.html.markdown (#14816) --- .../sql_instance_switchover.html.markdown | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/mmv1/third_party/terraform/website/docs/guides/sql_instance_switchover.html.markdown b/mmv1/third_party/terraform/website/docs/guides/sql_instance_switchover.html.markdown index eaa817f0de0c..af3b45ca3c77 100644 --- a/mmv1/third_party/terraform/website/docs/guides/sql_instance_switchover.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/sql_instance_switchover.html.markdown @@ -32,11 +32,11 @@ replica_configuration { } ``` -2. Invoke switchover on the replica \ -a. Change `instance_type` from `READ_REPLICA_INSTANCE` to `CLOUD_SQL_INSTANCE` \ -b. Remove `master_instance_name` \ -c. Remove `replica_configuration` \ -d. Add current primary's name to the replica's `replica_names` list +2. Invoke switchover on the replica + * Change `instance_type` from `READ_REPLICA_INSTANCE` to `CLOUD_SQL_INSTANCE` + * Remove `master_instance_name` + * Remove `replica_configuration` + * Add current primary's name to the replica's `replica_names` list ```diff resource "google_sql_database_instance" "original-replica" { @@ -54,13 +54,13 @@ resource "google_sql_database_instance" "original-replica" { } ``` -3. Update the old primary and run `terraform plan` \ -a. Change `instance_type` from `CLOUD_SQL_INSTANCE` to `READ_REPLICA_INSTANCE` \ -b. Set `master_instance_name` to the new primary (original replica) \ -c. Set `replica_configuration` and indicate this is a `cascadable-replica` \ -d. Remove old replica from `replica_names` \ - ~> **NOTE**: Do **not** delete the replica_names field, even if it has no replicas remaining. Set replica_names = [ ] to indicate it having no replicas. \ -e. Run `terraform plan` and verify that everything is done in-place (or data will be lost) +3. Update the old primary and run `terraform plan` + * Change `instance_type` from `CLOUD_SQL_INSTANCE` to `READ_REPLICA_INSTANCE` + * Set `master_instance_name` to the new primary (original replica) + * Set `replica_configuration` and indicate this is a `cascadable-replica` + * Remove old replica from `replica_names` + ~> **NOTE**: Do **not** delete the replica_names field, even if it has no replicas remaining. Set replica_names = [ ] to indicate it having no replicas. + * Run `terraform plan` and verify that everything is done in-place (or data will be lost) ```diff resource "google_sql_database_instance" "original-primary" { From 724b055ffc333a133cc8b8e2de58bff80ebbd033 Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Mon, 11 Aug 2025 15:45:39 -0700 Subject: [PATCH 726/884] Revert "Fix partial match regexes" (#14814) --- .../terraform/custom_import/extract_taxonomy.go.tmpl | 2 +- .../custom_import/vertex_ai_tensorboard_import.go.tmpl | 8 ++++---- .../terraform/services/apigee/resource_apigee_api.go | 4 ++-- .../terraform/services/apigee/resource_apigee_flowhook.go | 4 ++-- .../resource_apigee_keystores_aliases_key_cert_file.go | 4 ++-- .../apigee/resource_apigee_keystores_aliases_pkcs12.go | 4 ++-- .../services/apigee/resource_apigee_sharedflow.go | 4 ++-- .../apigee/resource_apigee_sharedflow_deployment.go | 5 ++--- .../services/bigquery/resource_bigquery_table.go.tmpl | 6 +++--- .../bigtable/resource_bigtable_authorized_view.go | 6 +++--- .../services/bigtable/resource_bigtable_instance.go | 6 +++--- .../services/bigtable/resource_bigtable_table.go | 6 +++--- .../cloudfunctions/resource_cloudfunctions_function.go | 6 +++--- .../services/compute/resource_compute_instance.go.tmpl | 6 +++--- .../compute/resource_compute_instance_group.go.tmpl | 6 +++--- .../resource_compute_project_metadata_item.go.tmpl | 4 ++-- .../services/compute/resource_compute_target_pool.go.tmpl | 8 ++++---- .../terraform/services/dns/resource_dns_record_set.go | 6 +++--- .../osconfig/resource_os_config_os_policy_assignment.go | 6 +++--- .../resource_google_folder_organization_policy.go | 6 +++--- .../resource_google_project_iam_custom_role.go | 6 +++--- .../resource_google_project_organization_policy.go | 6 +++--- .../resourcemanager/resource_google_service_account.go | 6 +++--- .../services/sql/resource_sql_database_instance.go.tmpl | 6 +++--- tpgtools/ignored_handwritten/custom_import.go | 8 ++++---- 25 files changed, 69 insertions(+), 70 deletions(-) diff --git a/mmv1/templates/terraform/custom_import/extract_taxonomy.go.tmpl b/mmv1/templates/terraform/custom_import/extract_taxonomy.go.tmpl index 17d7a76b1bfb..c99cf546f939 100644 --- a/mmv1/templates/terraform/custom_import/extract_taxonomy.go.tmpl +++ b/mmv1/templates/terraform/custom_import/extract_taxonomy.go.tmpl @@ -1,7 +1,7 @@ config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^(?Pprojects/[^/]+/locations/[^/]+/taxonomies/[^/]+)/policyTags/(?P.+)$"}, d, config); err != nil { + "(?Pprojects/[^/]+/locations/[^/]+/taxonomies/[^/]+)/policyTags/(?P.+)"}, d, config); err != nil { return nil, err } diff --git a/mmv1/templates/terraform/custom_import/vertex_ai_tensorboard_import.go.tmpl b/mmv1/templates/terraform/custom_import/vertex_ai_tensorboard_import.go.tmpl index 7d834ceeba51..be6cd588a588 100644 --- a/mmv1/templates/terraform/custom_import/vertex_ai_tensorboard_import.go.tmpl +++ b/mmv1/templates/terraform/custom_import/vertex_ai_tensorboard_import.go.tmpl @@ -1,9 +1,9 @@ config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+)/locations/(?P[^/]+)/tensorboards/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)$", + "projects/(?P[^/]+)/locations/(?P[^/]+)/tensorboards/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_api.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_api.go index 2921e19e0caa..57375e59f2ae 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_api.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_api.go @@ -324,8 +324,8 @@ func resourceApigeeApiDelete(d *schema.ResourceData, meta interface{}) error { func resourceApigeeApiImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^organizations/(?P[^/]+)/apis/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)$", + "organizations/(?P[^/]+)/apis/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_flowhook.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_flowhook.go index 635838873cd0..8a5d6ac30957 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_flowhook.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_flowhook.go @@ -223,8 +223,8 @@ func resourceApigeeFlowhookDelete(d *schema.ResourceData, meta interface{}) erro func resourceApigeeFlowhookImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^organizations/(?P[^/]+)/environments/(?P[^/]+)/flowhooks/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "organizations/(?P[^/]+)/environments/(?P[^/]+)/flowhooks/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_key_cert_file.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_key_cert_file.go index 03d842bfc305..48747a85d5b9 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_key_cert_file.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_key_cert_file.go @@ -362,8 +362,8 @@ func resourceApigeeKeystoresAliasesKeyCertFileDelete(d *schema.ResourceData, met func resourceApigeeKeystoresAliasesKeyCertFileImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^organizations/(?P[^/]+)/environments/(?P[^/]+)/keystores/(?P[^/]+)/aliases/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "organizations/(?P[^/]+)/environments/(?P[^/]+)/keystores/(?P[^/]+)/aliases/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_pkcs12.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_pkcs12.go index ef6a2655cf39..12b1f85fc0ef 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_pkcs12.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_pkcs12.go @@ -299,8 +299,8 @@ func ResourceApigeeKeystoresAliasesPkcs12Delete(d *schema.ResourceData, meta int func ResourceApigeeKeystoresAliasesPkcs12Import(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^organizations/(?P[^/]+)/environments/(?P[^/]+)/keystores/(?P[^/]+)/aliases/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "organizations/(?P[^/]+)/environments/(?P[^/]+)/keystores/(?P[^/]+)/aliases/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow.go index 3b0eba665a4c..4820b95768d7 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow.go @@ -324,8 +324,8 @@ func resourceApigeeSharedFlowDelete(d *schema.ResourceData, meta interface{}) er func resourceApigeeSharedFlowImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^organizations/(?P[^/]+)/sharedflows/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)$", + "organizations/(?P[^/]+)/sharedflows/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow_deployment.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow_deployment.go index 68f1c91d9ac2..7cd90e8676b0 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow_deployment.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow_deployment.go @@ -228,9 +228,8 @@ func resourceApigeeSharedflowDeploymentDelete(d *schema.ResourceData, meta inter func resourceApigeeSharedflowDeploymentImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^organizations/(?P[^/]+)/environments/(?P[^/]+)/sharedflows/(?P[^/]+)/revisions/(?P[^/]+)$", - "^organizations/(?P[^/]+)/environments/(?P[^/]+)/sharedflows/(?P[^/]+)/revisions/(?P[^/]+)/deployments$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "organizations/(?P[^/]+)/environments/(?P[^/]+)/sharedflows/(?P[^/]+)/revisions/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl index bebe08d56791..dce185ebebf6 100644 --- a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl +++ b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl @@ -3501,9 +3501,9 @@ func flattenSerDeInfo(si *bigquery.SerDeInfo) []map[string]interface{} { func resourceBigQueryTableImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+)/datasets/(?P[^/]+)/tables/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)$", + "projects/(?P[^/]+)/datasets/(?P[^/]+)/tables/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_authorized_view.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_authorized_view.go index b34ff48e2388..4a26b6e82a8b 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_authorized_view.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_authorized_view.go @@ -355,9 +355,9 @@ func resourceBigtableAuthorizedViewDestroy(d *schema.ResourceData, meta interfac func resourceBigtableAuthorizedViewImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+)/instances/(?P[^/]+)/tables/(?P[^/]+)/authorizedViews/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "projects/(?P[^/]+)/instances/(?P[^/]+)/tables/(?P[^/]+)/authorizedViews/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance.go index c51d6b8c68ff..1487ce288b7e 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance.go @@ -829,9 +829,9 @@ func resourceBigtableInstanceClusterReorderTypeListFunc(diff tpgresource.Terrafo func resourceBigtableInstanceImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+)/instances/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)$", + "projects/(?P[^/]+)/instances/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table.go index 91cddb12b30d..66df55fb8b2c 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table.go @@ -686,9 +686,9 @@ func FlattenColumnFamily(families []bigtable.FamilyInfo) ([]map[string]interface func resourceBigtableTableImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+)/instances/(?P[^/]+)/tables/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)$", + "projects/(?P[^/]+)/instances/(?P[^/]+)/tables/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function.go b/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function.go index 739b37d86a47..5fbab6b22280 100644 --- a/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function.go +++ b/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function.go @@ -65,9 +65,9 @@ func (s *CloudFunctionId) locationId() string { func parseCloudFunctionId(d *schema.ResourceData, config *transport_tpg.Config) (*CloudFunctionId, error) { if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+)/locations/(?P[^/]+)/functions/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)$", + "projects/(?P[^/]+)/locations/(?P[^/]+)/functions/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl index 718a857dab8d..46ea496e1388 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl @@ -3436,9 +3436,9 @@ func resourceComputeInstanceDelete(d *schema.ResourceData, meta interface{}) err func resourceComputeInstanceImportState(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+)/zones/(?P[^/]+)/instances/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)$", + "projects/(?P[^/]+)/zones/(?P[^/]+)/instances/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_group.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_group.go.tmpl index af13d60d0d6b..b5782de796cb 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_group.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_group.go.tmpl @@ -455,9 +455,9 @@ func resourceComputeInstanceGroupDelete(d *schema.ResourceData, meta interface{} func resourceComputeInstanceGroupImportState(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+)/zones/(?P[^/]+)/instanceGroups/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)$", + "projects/(?P[^/]+)/zones/(?P[^/]+)/instanceGroups/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_project_metadata_item.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_project_metadata_item.go.tmpl index 4f10ee9db007..fdf16116aa12 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_project_metadata_item.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_project_metadata_item.go.tmpl @@ -184,8 +184,8 @@ func resourceComputeProjectMetadataItemDelete(d *schema.ResourceData, meta inter func resourceComputeProjectMetadataItemImportState(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+)/meta-data/(?P[^/]+)$", - "^(?P[^/]+)$", + "projects/(?P[^/]+)/meta-data/(?P[^/]+)", + "(?P[^/]+)", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_target_pool.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_target_pool.go.tmpl index 612ec7399dd1..d5617b8e3e96 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_target_pool.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_target_pool.go.tmpl @@ -571,10 +571,10 @@ func resourceComputeTargetPoolDelete(d *schema.ResourceData, meta interface{}) e func resourceTargetPoolStateImporter(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+)/regions/(?P[^/]+)/targetPools/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)$", + "projects/(?P[^/]+)/regions/(?P[^/]+)/targetPools/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/dns/resource_dns_record_set.go b/mmv1/third_party/terraform/services/dns/resource_dns_record_set.go index a8e74f92a90d..a9f713075b73 100644 --- a/mmv1/third_party/terraform/services/dns/resource_dns_record_set.go +++ b/mmv1/third_party/terraform/services/dns/resource_dns_record_set.go @@ -648,9 +648,9 @@ func resourceDnsRecordSetUpdate(d *schema.ResourceData, meta interface{}) error func resourceDnsRecordSetImportState(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+)/managedZones/(?P[^/]+)/rrsets/(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "projects/(?P[^/]+)/managedZones/(?P[^/]+)/rrsets/(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/osconfig/resource_os_config_os_policy_assignment.go b/mmv1/third_party/terraform/services/osconfig/resource_os_config_os_policy_assignment.go index ad832cd01496..5ae751ff3792 100644 --- a/mmv1/third_party/terraform/services/osconfig/resource_os_config_os_policy_assignment.go +++ b/mmv1/third_party/terraform/services/osconfig/resource_os_config_os_policy_assignment.go @@ -1445,9 +1445,9 @@ func resourceOSConfigOSPolicyAssignmentDelete(d *schema.ResourceData, meta inter func resourceOSConfigOSPolicyAssignmentImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+)/locations/(?P[^/]+)/osPolicyAssignments/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)$", + "projects/(?P[^/]+)/locations/(?P[^/]+)/osPolicyAssignments/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder_organization_policy.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder_organization_policy.go index 6e17b8d7603b..18ec055b54d1 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder_organization_policy.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder_organization_policy.go @@ -47,9 +47,9 @@ func resourceFolderOrgPolicyImporter(d *schema.ResourceData, meta interface{}) ( config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^folders/(?P[^/]+)/constraints/(?P[^/]+)$", - "^folders/(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)$"}, + "folders/(?P[^/]+)/constraints/(?P[^/]+)", + "folders/(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)"}, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_custom_role.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_custom_role.go index 9b4828db0ab8..ef27f2aacb9a 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_custom_role.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_custom_role.go @@ -238,9 +238,9 @@ func resourceGoogleProjectIamCustomRoleDelete(d *schema.ResourceData, meta inter func resourceGoogleProjectIamCustomRoleImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+)/roles/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)$", + "projects/(?P[^/]+)/roles/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_organization_policy.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_organization_policy.go index 88ae902d0783..503117edf5b8 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_organization_policy.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_organization_policy.go @@ -47,9 +47,9 @@ func resourceProjectOrgPolicyImporter(d *schema.ResourceData, meta interface{}) config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+):constraints/(?P[^/]+)$", - "^(?P[^/]+):constraints/(?P[^/]+)$", - "^(?P[^/]+):(?P[^/]+)$"}, + "projects/(?P[^/]+):constraints/(?P[^/]+)", + "(?P[^/]+):constraints/(?P[^/]+)", + "(?P[^/]+):(?P[^/]+)"}, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_service_account.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_service_account.go index 21e829a6374a..fd50b35de26e 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_service_account.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_service_account.go @@ -321,9 +321,9 @@ func resourceGoogleServiceAccountUpdate(d *schema.ResourceData, meta interface{} func resourceGoogleServiceAccountImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+)/serviceAccounts/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)$"}, d, config); err != nil { + "projects/(?P[^/]+)/serviceAccounts/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)"}, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl index 6ed39b1db93c..bd3c036f73f3 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl @@ -2367,9 +2367,9 @@ func resourceSqlDatabaseInstanceDelete(d *schema.ResourceData, meta interface{}) func resourceSqlDatabaseInstanceImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+)/instances/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)$"}, d, config); err != nil { + "projects/(?P[^/]+)/instances/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)"}, d, config); err != nil { return nil, err } diff --git a/tpgtools/ignored_handwritten/custom_import.go b/tpgtools/ignored_handwritten/custom_import.go index b50236160e00..be5cfbc9c300 100644 --- a/tpgtools/ignored_handwritten/custom_import.go +++ b/tpgtools/ignored_handwritten/custom_import.go @@ -10,8 +10,8 @@ import ( func sourceRepoImport(d *schema.ResourceData, config *transport_tpg.Config) error { if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+)/repos/(?P.+)$", - "^(?P.+)$", + "projects/(?P[^/]+)/repos/(?P.+)", + "(?P.+)", }, d, config); err != nil { return err } @@ -28,8 +28,8 @@ func sourceRepoImport(d *schema.ResourceData, config *transport_tpg.Config) erro func runtimeconfigVariableImport(d *schema.ResourceData, config *transport_tpg.Config) error { if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+)/configs/(?P[^/]+)/variables/(?P.+)$", - "^(?P[^/]+)/(?P.+)$", + "projects/(?P[^/]+)/configs/(?P[^/]+)/variables/(?P.+)", + "(?P[^/]+)/(?P.+)", }, d, config); err != nil { return err } From c1a0a2bc4c8079b3294e2507430bb000f7b080f8 Mon Sep 17 00:00:00 2001 From: zoeyai-google Date: Mon, 11 Aug 2025 16:57:24 -0700 Subject: [PATCH 727/884] Deprecate Cloud TPU tpu_tensorflow_versions resoruce (#14803) Co-authored-by: Stephen Lewis (Burrows) --- .../services/tpu/data_source_tpu_tensorflow_versions.go | 3 +++ .../website/docs/d/tpu_tensorflow_versions.html.markdown | 4 ++++ 2 files changed, 7 insertions(+) diff --git a/mmv1/third_party/terraform/services/tpu/data_source_tpu_tensorflow_versions.go b/mmv1/third_party/terraform/services/tpu/data_source_tpu_tensorflow_versions.go index f17ddc36d101..96e983f57ac1 100644 --- a/mmv1/third_party/terraform/services/tpu/data_source_tpu_tensorflow_versions.go +++ b/mmv1/third_party/terraform/services/tpu/data_source_tpu_tensorflow_versions.go @@ -30,6 +30,9 @@ func DataSourceTpuTensorflowVersions() *schema.Resource { Elem: &schema.Schema{Type: schema.TypeString}, }, }, + DeprecationMessage: "`google_tpu_node` is deprecated and will be removed in a future major release. " + + "Use `google_tpu_v2_vm` instead. " + + "For moving from TPU Node to TPU VM architecture, see https://cloud.google.com/tpu/docs/system-architecture-tpu-vm#from-tpu-node-to-tpu-vm.", } } diff --git a/mmv1/third_party/terraform/website/docs/d/tpu_tensorflow_versions.html.markdown b/mmv1/third_party/terraform/website/docs/d/tpu_tensorflow_versions.html.markdown index 4978589a5d7b..b4e66c8ab27b 100644 --- a/mmv1/third_party/terraform/website/docs/d/tpu_tensorflow_versions.html.markdown +++ b/mmv1/third_party/terraform/website/docs/d/tpu_tensorflow_versions.html.markdown @@ -3,6 +3,10 @@ subcategory: "Cloud TPU" description: |- Get available TensorFlow versions. --- +~> **Warning:** +`google_tpu_tensorflow_versions` is deprecated and will be removed in a future major release. + Use `google_tpu_v2_runtime_versions` instead. For moving from TPU Node to TPU VM architecture, see + https://cloud.google.com/tpu/docs/system-architecture-tpu-vm#from-tpu-node-to-tpu-vm. # google_tpu_tensorflow_versions From feddc3c376cc17317610b864a96b4b49610d0232 Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Tue, 12 Aug 2025 12:31:10 -0700 Subject: [PATCH 728/884] resolve sync conflicts --- mmv1/products/compute/Subnetwork.yaml | 10 -- ... => resource_storage_transfer_job.go.tmpl} | 0 .../data_source_tpu_tensorflow_versions.go | 96 ------------------- 3 files changed, 106 deletions(-) rename mmv1/third_party/terraform/services/storagetransfer/{resource_storage_transfer_job.go => resource_storage_transfer_job.go.tmpl} (100%) delete mode 100644 mmv1/third_party/terraform/services/tpu/data_source_tpu_tensorflow_versions.go diff --git a/mmv1/products/compute/Subnetwork.yaml b/mmv1/products/compute/Subnetwork.yaml index bcbd5fb2fe0d..f3ebd6ada0c9 100644 --- a/mmv1/products/compute/Subnetwork.yaml +++ b/mmv1/products/compute/Subnetwork.yaml @@ -359,7 +359,6 @@ properties: fingerprint_name: 'fingerprint' custom_flatten: 'templates/terraform/custom_flatten/subnetwork_log_config.go.tmpl' custom_expand: 'templates/terraform/custom_expand/subnetwork_log_config.go.tmpl' - diff_suppress_func: 'subnetworkLogConfigDiffSuppress' properties: - name: 'aggregationInterval' type: Enum @@ -511,15 +510,6 @@ properties: update_verb: 'PATCH' fingerprint_name: 'fingerprint' is_missing_in_cai: true - - name: 'enableFlowLogs' - type: Boolean - description: | - Whether to enable flow logging for this subnetwork. If this field is not explicitly set, - it will not appear in get listings. If not set the default behavior is determined by the - org policy, if there is no org policy specified, then it will default to disabled. - This field isn't supported if the subnet purpose field is set to REGIONAL_MANAGED_PROXY. - default_from_api: true - deprecation_message: 'This field is being removed in favor of log_config. If log_config is present, flow logs are enabled.' - name: 'state' type: Enum description: | diff --git a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go rename to mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go.tmpl diff --git a/mmv1/third_party/terraform/services/tpu/data_source_tpu_tensorflow_versions.go b/mmv1/third_party/terraform/services/tpu/data_source_tpu_tensorflow_versions.go deleted file mode 100644 index 96e983f57ac1..000000000000 --- a/mmv1/third_party/terraform/services/tpu/data_source_tpu_tensorflow_versions.go +++ /dev/null @@ -1,96 +0,0 @@ -package tpu - -import ( - "fmt" - "log" - "sort" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -func DataSourceTpuTensorflowVersions() *schema.Resource { - return &schema.Resource{ - Read: dataSourceTpuTensorFlowVersionsRead, - Schema: map[string]*schema.Schema{ - "project": { - Type: schema.TypeString, - Optional: true, - Computed: true, - }, - "zone": { - Type: schema.TypeString, - Optional: true, - Computed: true, - }, - "versions": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Schema{Type: schema.TypeString}, - }, - }, - DeprecationMessage: "`google_tpu_node` is deprecated and will be removed in a future major release. " + - "Use `google_tpu_v2_vm` instead. " + - "For moving from TPU Node to TPU VM architecture, see https://cloud.google.com/tpu/docs/system-architecture-tpu-vm#from-tpu-node-to-tpu-vm.", - } -} - -func dataSourceTpuTensorFlowVersionsRead(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return err - } - - project, err := tpgresource.GetProject(d, config) - if err != nil { - return err - } - - zone, err := tpgresource.GetZone(d, config) - if err != nil { - return err - } - - url, err := tpgresource.ReplaceVars(d, config, "{{TPUBasePath}}projects/{{project}}/locations/{{zone}}/tensorflowVersions") - if err != nil { - return err - } - - versionsRaw, err := tpgresource.PaginatedListRequest(project, url, userAgent, config, flattenTpuTensorflowVersions) - if err != nil { - return fmt.Errorf("Error listing TPU Tensorflow versions: %s", err) - } - - versions := make([]string, len(versionsRaw)) - for i, ver := range versionsRaw { - versions[i] = ver.(string) - } - sort.Strings(versions) - - log.Printf("[DEBUG] Received Google TPU Tensorflow Versions: %q", versions) - - if err := d.Set("versions", versions); err != nil { - return fmt.Errorf("Error setting versions: %s", err) - } - if err := d.Set("zone", zone); err != nil { - return fmt.Errorf("Error setting zone: %s", err) - } - if err := d.Set("project", project); err != nil { - return fmt.Errorf("Error setting project: %s", err) - } - d.SetId(fmt.Sprintf("projects/%s/zones/%s", project, zone)) - - return nil -} - -func flattenTpuTensorflowVersions(resp map[string]interface{}) []interface{} { - verObjList := resp["tensorflowVersions"].([]interface{}) - versions := make([]interface{}, len(verObjList)) - for i, v := range verObjList { - verObj := v.(map[string]interface{}) - versions[i] = verObj["version"] - } - return versions -} From e7e62c630e9a44d5f7fff59c90ab696167754158 Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Mon, 28 Apr 2025 10:03:19 -0700 Subject: [PATCH 729/884] add upgrade guide to 7.0.0 branch (#13790) --- .../guides/version_7_upgrade.html.markdown | 109 ++++++++++++++++++ 1 file changed, 109 insertions(+) create mode 100644 mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown new file mode 100644 index 000000000000..2b7ba1125ecb --- /dev/null +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -0,0 +1,109 @@ +--- +page_title: "Terraform provider for Google Cloud 7.0.0 Upgrade Guide" +description: |- + Terraform provider for Google Cloud 7.0.0 Upgrade Guide +--- + +# Terraform Google Provider 7.0.0 Upgrade Guide + +The `7.0.0` release of the Google provider for Terraform is a major version and +includes some changes that you will need to consider when upgrading. This guide +is intended to help with that process and focuses only on the changes necessary +to upgrade from the final `6.X` series release to `7.0.0`. + +Most of the changes outlined in this guide have been previously marked as +deprecated in the Terraform `plan`/`apply` output throughout previous provider +releases, up to and including the final `6.X` series release. These changes, +such as deprecation notices, can always be found in the CHANGELOG of the +affected providers. [google](https://github.com/hashicorp/terraform-provider-google/blob/main/CHANGELOG.md) +[google-beta](https://github.com/hashicorp/terraform-provider-google-beta/blob/main/CHANGELOG.md) + +## I accidentally upgraded to 7.0.0, how do I downgrade to `6.X`? + +If you've inadvertently upgraded to `7.0.0`, first see the +[Provider Version Configuration Guide](#provider-version-configuration) to lock +your provider version; if you've constrained the provider to a lower version +such as shown in the previous version example in that guide, Terraform will pull +in a `6.X` series release on `terraform init`. + +If you've only ran `terraform init` or `terraform plan`, your state will not +have been modified and downgrading your provider is sufficient. + +If you've ran `terraform refresh` or `terraform apply`, Terraform may have made +state changes in the meantime. + +* If you're using a local state, or a remote state backend that does not support +versioning, `terraform refresh` with a downgraded provider is likely sufficient +to revert your state. The Google provider generally refreshes most state +information from the API, and the properties necessary to do so have been left +unchanged. + +* If you're using a remote state backend that supports versioning such as +[Google Cloud Storage](https://developer.hashicorp.com/terraform/language/settings/backends/gcs), +you can revert the Terraform state file to a previous version. If you do +so and Terraform had created resources as part of a `terraform apply` in the +meantime, you'll need to either delete them by hand or `terraform import` them +so Terraform knows to manage them. + +## Provider Version Configuration + +-> Before upgrading to version 7.0.0, it is recommended to upgrade to the most +recent `6.X` series release of the provider, make the changes noted in this guide, +and ensure that your environment successfully runs +[`terraform plan`](https://developer.hashicorp.com/terraform/cli/commands/plan) +without unexpected changes or deprecation notices. + +It is recommended to use [version constraints](https://developer.hashicorp.com/terraform/language/providers/requirements#requiring-providers) +when configuring Terraform providers. If you are following that recommendation, +update the version constraints in your Terraform configuration and run +[`terraform init`](https://developer.hashicorp.com/terraform/cli/commands/init) to download +the new version. + +If you aren't using version constraints, you can use `terraform init -upgrade` +in order to upgrade your provider to the latest released version. + +For example, given this previous configuration: + +```hcl +terraform { + required_providers { + google = { + version = "~> 5.30.0" + } + } +} +``` + +An updated configuration: + +```hcl +terraform { + required_providers { + google = { + version = "~> 7.0.0" + } + } +} +``` + +## Provider + +### Provider-level change example header + +Description of the change and how users should adjust their configuration (if needed). + +## Datasources + +## Datasource: `google_product_datasource` + +### Datasource-level change example header + +Description of the change and how users should adjust their configuration (if needed). + +## Resources + +## Resource: `google_product_resource` + +### Resource-level change example header + +Description of the change and how users should adjust their configuration (if needed). From 24480c882ab5fdb2d93d47007e82d7b431e4363d Mon Sep 17 00:00:00 2001 From: Ron Gal <125445217+ron-gal@users.noreply.github.com> Date: Wed, 25 Jun 2025 13:33:43 -0400 Subject: [PATCH 730/884] feat(bigtable): rename instance to instance_name for table_iam resource (#14350) --- .../services/bigtable/iam_bigtable_table.go | 2 +- .../resource_bigtable_table_iam_test.go | 34 +++++++++---------- .../d/bigtable_table_iam_policy.html.markdown | 4 +-- .../guides/version_7_upgrade.html.markdown | 18 ++++++++++ .../docs/r/bigtable_table_iam.html.markdown | 26 +++++++------- 5 files changed, 51 insertions(+), 33 deletions(-) diff --git a/mmv1/third_party/terraform/services/bigtable/iam_bigtable_table.go b/mmv1/third_party/terraform/services/bigtable/iam_bigtable_table.go index 774efb4f761c..03c647546453 100644 --- a/mmv1/third_party/terraform/services/bigtable/iam_bigtable_table.go +++ b/mmv1/third_party/terraform/services/bigtable/iam_bigtable_table.go @@ -14,7 +14,7 @@ import ( ) var IamBigtableTableSchema = map[string]*schema.Schema{ - "instance": { + "instance_name": { Type: schema.TypeString, Required: true, ForceNew: true, diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_iam_test.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_iam_test.go index e729f0a49a81..dd31e17eb757 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_iam_test.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_iam_test.go @@ -140,10 +140,10 @@ resource "google_service_account" "test-account2" { } resource "google_bigtable_table_iam_binding" "binding" { - instance = google_bigtable_instance.instance.name - table = google_bigtable_table.table.name - role = "%s" - members = [ + instance_name = google_bigtable_instance.instance.name + table = google_bigtable_table.table.name + role = "%s" + members = [ "serviceAccount:${google_service_account.test-account1.email}", ] } @@ -163,10 +163,10 @@ resource "google_service_account" "test-account2" { } resource "google_bigtable_table_iam_binding" "binding" { - instance = google_bigtable_instance.instance.name - table = google_bigtable_table.table.name - role = "%s" - members = [ + instance_name = google_bigtable_instance.instance.name + table = google_bigtable_table.table.name + role = "%s" + members = [ "serviceAccount:${google_service_account.test-account1.email}", "serviceAccount:${google_service_account.test-account2.email}", ] @@ -182,10 +182,10 @@ resource "google_service_account" "test-account" { } resource "google_bigtable_table_iam_member" "member" { - instance = google_bigtable_instance.instance.name - table = google_bigtable_table.table.name - role = "%s" - member = "serviceAccount:${google_service_account.test-account.email}" + instance_name = google_bigtable_instance.instance.name + table = google_bigtable_table.table.name + role = "%s" + member = "serviceAccount:${google_service_account.test-account.email}" } `, instance, cluster, cluster, account, role) } @@ -205,14 +205,14 @@ data "google_iam_policy" "policy" { } resource "google_bigtable_table_iam_policy" "policy" { - instance = google_bigtable_instance.instance.name - table = google_bigtable_table.table.name - policy_data = data.google_iam_policy.policy.policy_data + instance_name = google_bigtable_instance.instance.name + table = google_bigtable_table.table.name + policy_data = data.google_iam_policy.policy.policy_data } data "google_bigtable_table_iam_policy" "policy" { - instance = google_bigtable_instance.instance.name - table = google_bigtable_table.table.name + instance_name = google_bigtable_instance.instance.name + table = google_bigtable_table.table.name } `, instance, cluster, cluster, account, role) diff --git a/mmv1/third_party/terraform/website/docs/d/bigtable_table_iam_policy.html.markdown b/mmv1/third_party/terraform/website/docs/d/bigtable_table_iam_policy.html.markdown index e44bd2b3e6f4..bfec2bb723b4 100644 --- a/mmv1/third_party/terraform/website/docs/d/bigtable_table_iam_policy.html.markdown +++ b/mmv1/third_party/terraform/website/docs/d/bigtable_table_iam_policy.html.markdown @@ -12,8 +12,8 @@ Retrieves the current IAM policy data for a Bigtable Table. ```hcl data "google_bigtable_table_iam_policy" "policy" { - instance = google_bigtable_instance.instance.name - table = google_bigtable_table.table.name + instance_name = google_bigtable_instance.instance.name + table = google_bigtable_table.table.name } ``` diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index 2b7ba1125ecb..e469a53221da 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -107,3 +107,21 @@ Description of the change and how users should adjust their configuration (if ne ### Resource-level change example header Description of the change and how users should adjust their configuration (if needed). + +## Resource: `google_bigtable_table_iam_policy` + +### `instance` is now removed + +`instance` has been removed in favor of `instance_name`. + +## Resource: `google_bigtable_table_iam_binding` + +### `instance` is now removed + +`instance` has been removed in favor of `instance_name`. + +## Resource: `google_bigtable_table_iam_member` + +### `instance` is now removed + +`instance` has been removed in favor of `instance_name`. diff --git a/mmv1/third_party/terraform/website/docs/r/bigtable_table_iam.html.markdown b/mmv1/third_party/terraform/website/docs/r/bigtable_table_iam.html.markdown index 7007e82bb986..b8dd500407fb 100644 --- a/mmv1/third_party/terraform/website/docs/r/bigtable_table_iam.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/bigtable_table_iam.html.markdown @@ -29,10 +29,10 @@ data "google_iam_policy" "admin" { } resource "google_bigtable_table_iam_policy" "editor" { - project = "your-project" - instance = "your-bigtable-instance" - table = "your-bigtable-table" - policy_data = data.google_iam_policy.admin.policy_data + project = "your-project" + instance_name = "your-bigtable-instance" + table = "your-bigtable-table" + policy_data = data.google_iam_policy.admin.policy_data } ``` @@ -40,10 +40,10 @@ resource "google_bigtable_table_iam_policy" "editor" { ```hcl resource "google_bigtable_table_iam_binding" "editor" { - table = "your-bigtable-table" - instance = "your-bigtable-instance" - role = "roles/bigtable.user" - members = [ + table = "your-bigtable-table" + instance_name = "your-bigtable-instance" + role = "roles/bigtable.user" + members = [ "user:jane@example.com", ] } @@ -53,10 +53,10 @@ resource "google_bigtable_table_iam_binding" "editor" { ```hcl resource "google_bigtable_table_iam_member" "editor" { - table = "your-bigtable-table" - instance = "your-bigtable-instance" - role = "roles/bigtable.user" - member = "user:jane@example.com" + table = "your-bigtable-table" + instance_name = "your-bigtable-instance" + role = "roles/bigtable.user" + member = "user:jane@example.com" } ``` @@ -64,7 +64,7 @@ resource "google_bigtable_table_iam_member" "editor" { The following arguments are supported: -* `instance` - (Required) The name or relative resource id of the instance that owns the table. +* `instance_name` - (Required) The name or relative resource id of the instance that owns the table. * `table` - (Required) The name or relative resource id of the table to manage IAM policies for. From f47fd9bf889613256e8ea2d8affee1f681f4fcc7 Mon Sep 17 00:00:00 2001 From: gurusai-voleti Date: Wed, 2 Jul 2025 18:28:57 +0000 Subject: [PATCH 731/884] fix: (storagetransfer) path validation for GCS path source and sink (#14377) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: drfaust92 Signed-off-by: James Alseth Signed-off-by: Cezary Sobczak Signed-off-by: Misha Efimov Signed-off-by: David Xia Signed-off-by: pcrao Signed-off-by: Eric Bode Co-authored-by: Nick Elliot Co-authored-by: Cameron Thornton Co-authored-by: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Co-authored-by: tulika-aakriti Co-authored-by: Scott Suarez Co-authored-by: anthonyrtong Co-authored-by: Zhenhua Li Co-authored-by: NA2047 <12290725+NA2047@users.noreply.github.com> Co-authored-by: Chris Hawk Co-authored-by: Ilia Lazebnik Co-authored-by: Ramon Vermeulen Co-authored-by: Sam Levenick Co-authored-by: Shrishty Chandra <3104562+shrishty@users.noreply.github.com> Co-authored-by: Shrishty Chandra Co-authored-by: Sharan Teja M Co-authored-by: Stephen Lewis (Burrows) Co-authored-by: James Alseth Co-authored-by: Riley Karson Co-authored-by: stevenyang72 Co-authored-by: oferhandel-google Co-authored-by: Jatin Miglani Co-authored-by: translucens Co-authored-by: Sing Co-authored-by: paridhishah18 <166548459+paridhishah18@users.noreply.github.com> Co-authored-by: Ronson Xaviour <50081163+ronsonx@users.noreply.github.com> Co-authored-by: Ronson Xaviour Co-authored-by: Thomas Rodgers Co-authored-by: Cezary Sobczak <57288981+Cezarus27@users.noreply.github.com> Co-authored-by: Betto Cerrillos <32439055+Berro321@users.noreply.github.com> Co-authored-by: Iris Chen <10179943+iyabchen@users.noreply.github.com> Co-authored-by: Or Sela Co-authored-by: Samir Ribeiro <42391123+Samir-Cit@users.noreply.github.com> Co-authored-by: Mauricio Alvarez Leon <65101411+BBBmau@users.noreply.github.com> Co-authored-by: kigesui Co-authored-by: Meng Yang Co-authored-by: Ashwin G Co-authored-by: Allison Fisher Co-authored-by: mihhalj Co-authored-by: Guy Bidkar <5646214+gbidkar@users.noreply.github.com> Co-authored-by: Dawid212 Co-authored-by: Michael Lopez Co-authored-by: Stephen Lewis (Burrows) Co-authored-by: sahil-mahajan-google Co-authored-by: kautikdk <144651627+kautikdk@users.noreply.github.com> Co-authored-by: harshithpatte-g Co-authored-by: Rohan Chawla <73727454+rohanchawla23@users.noreply.github.com> Co-authored-by: ML Co-authored-by: Marek Lipert Co-authored-by: James Alseth Co-authored-by: Madhura Phadnis Co-authored-by: YashTayal04 <47032845+YashTayal04@users.noreply.github.com> Co-authored-by: Misha Efimov Co-authored-by: Aiden Grossman Co-authored-by: hao-nan-li <100219545+hao-nan-li@users.noreply.github.com> Co-authored-by: Wiktor Niesiobędzki Co-authored-by: MatthewVu-dev Co-authored-by: Madhu Suraj Co-authored-by: Matheus Guilherme Souza Aleixo <82680416+matheusaleixo-cit@users.noreply.github.com> Co-authored-by: Jun Luo Co-authored-by: Raj Anand <88097156+raazanand@users.noreply.github.com> Co-authored-by: Tommy Reddad Co-authored-by: palramanathan <117597159+palramanathan@users.noreply.github.com> Co-authored-by: Michał Wiatrowski Co-authored-by: rlapin-pl <114071972+rlapin-pl@users.noreply.github.com> Co-authored-by: rlapin-pl Co-authored-by: tonybayvas Co-authored-by: Ryan Oaks Co-authored-by: David Xia Co-authored-by: sachin purohit Co-authored-by: bcreddy-gcp <123543489+bcreddy-gcp@users.noreply.github.com> Co-authored-by: Jack Weinbender Co-authored-by: Balanagu Harsha Vardhan Co-authored-by: porky256 <61063240+porky256@users.noreply.github.com> Co-authored-by: Aman Mahendroo <30946991+amanMahendroo@users.noreply.github.com> Co-authored-by: Andrew Browne <81702808+abbrowne126@users.noreply.github.com> Co-authored-by: Ashwin G Co-authored-by: Xian-Ji Chen <68801742+XianJiChen@users.noreply.github.com> Co-authored-by: Nithin Daniel <55326622+nithindaniel@users.noreply.github.com> Co-authored-by: Nithin Daniel Co-authored-by: Michał Wiatrowski Co-authored-by: veraz0818 Co-authored-by: DavinaRen Co-authored-by: dishaagarwal03-google Co-authored-by: Margubur Rahman <150442997+googlyrahman@users.noreply.github.com> Co-authored-by: Nandini Agrawal Co-authored-by: Ziting Co-authored-by: Taneli Leppä Co-authored-by: martin-guillen Co-authored-by: FilipKubawskiOkta Co-authored-by: Calvin Liu Co-authored-by: Sepehr Javid <32390553+sepehrjavid@users.noreply.github.com> Co-authored-by: Niharika <35183015+niharika-98@users.noreply.github.com> Co-authored-by: Arnav Dham Co-authored-by: Daniel Rieske Co-authored-by: Luca Prete Co-authored-by: Luca Prete Co-authored-by: echiugoog Co-authored-by: Justin Scofield <47263509+scawful@users.noreply.github.com> Co-authored-by: liaoaohaha Co-authored-by: Wonje Kang <96211823+wonjekang@users.noreply.github.com> Co-authored-by: Pradeep Rao <84025829+pradeepcrao@users.noreply.github.com> Co-authored-by: Tlaquetzal Co-authored-by: StealthyCoder Co-authored-by: animeshnandanwar Co-authored-by: Nandini Agrawal Co-authored-by: Stephane Charite Co-authored-by: Steven Davidovitz <13248+steved@users.noreply.github.com> Co-authored-by: vmiglani <142545940+vmiglani@users.noreply.github.com> Co-authored-by: xuebaoZ Co-authored-by: zhihaos Co-authored-by: Hoang Pham Co-authored-by: vbhadoriaB <150216360+vbhadoriaB@users.noreply.github.com> Co-authored-by: Lakshman Swaminathan Co-authored-by: luckyswaminathan Co-authored-by: iamkonohamaru Co-authored-by: Brad Fisher Co-authored-by: panerorenn9541 <36008213+panerorenn9541@users.noreply.github.com> Co-authored-by: Keith Jordy <6444028+kjordy@users.noreply.github.com> Co-authored-by: google-labs-jules[bot] <161369871+google-labs-jules[bot]@users.noreply.github.com> Co-authored-by: Yanwei Guo Co-authored-by: coder-221 <185867912+coder-221@users.noreply.github.com> Co-authored-by: Naga Bodepudi Co-authored-by: Yuval Brik Co-authored-by: Ron Gal <125445217+ron-gal@users.noreply.github.com> --- .../resource_storage_transfer_job.go.tmpl | 16 ++- .../resource_storage_transfer_job_test.go | 104 ++++++++++++++++++ .../guides/version_7_upgrade.html.markdown | 10 ++ 3 files changed, 128 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go.tmpl b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go.tmpl index e437d31ea2c6..8187246ffdd2 100644 --- a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go.tmpl +++ b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go.tmpl @@ -4,6 +4,7 @@ import ( "fmt" "log" "reflect" + "regexp" "strings" "time" @@ -694,14 +695,25 @@ func gcsDataSchema() *schema.Resource { }, "path": { Optional: true, - Computed: true, Type: schema.TypeString, - Description: `Google Cloud Storage path in bucket to transfer`, + Description: `Google Cloud Storage path in bucket to transfer. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should not begin with a '/'.`, + ValidateFunc: validateGCSDataPath, }, }, } } +func validateGCSDataPath(v interface{}, k string) (ws []string, errors []error) { + value := v.(string) + value = strings.TrimSpace(value) + // checks if path not started with "/" + regex, err := regexp.Compile("^/+") + if err == nil && len(value) > 0 && regex.Match([]byte(value)) { + errors = append(errors, fmt.Errorf("%q cannot start with /", k)) + } + return +} + func awsS3DataSchema() *schema.Resource { return &schema.Resource{ Schema: map[string]*schema.Schema{ diff --git a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_test.go b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_test.go index 1d50924e8060..09647d6ff895 100644 --- a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_test.go +++ b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_test.go @@ -573,6 +573,32 @@ func TestAccStorageTransferJob_hdfsSource(t *testing.T) { }) } +func TestAccStorageTransferJob_transferUpdateToEmptyString(t *testing.T) { + t.Parallel() + + testDataSourceBucketName := acctest.RandString(t, 10) + testDataSinkName := acctest.RandString(t, 10) + testTransferJobDescription := acctest.RandString(t, 10) + testTransferJobName := fmt.Sprintf("tf-test-transfer-job-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageTransferJobDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageTransferJob_transferJobGcsPath(envvar.GetTestProjectFromEnv(), testDataSourceBucketName, testDataSinkName, testTransferJobDescription, testTransferJobName, "bar/"), + }, + { + Config: testAccStorageTransferJob_transferJobGcsPath(envvar.GetTestProjectFromEnv(), testDataSourceBucketName, testDataSinkName, testTransferJobDescription, testTransferJobName, ""), + }, + { + Config: testAccStorageTransferJob_transferJobGcsPath(envvar.GetTestProjectFromEnv(), testDataSourceBucketName, testDataSinkName, testTransferJobDescription, testTransferJobName, "bar/"), + }, + }, + }) +} + func testAccStorageTransferJobDestroyProducer(t *testing.T) func(s *terraform.State) error { return func(s *terraform.State) error { config := acctest.GoogleProviderConfig(t) @@ -2399,3 +2425,81 @@ resource "google_storage_transfer_job" "transfer_job" { } `, project, dataSourceBucketName, project, dataSinkBucketName, project, transferJobDescription, project) } + +func testAccStorageTransferJob_transferJobGcsPath(project string, dataSourceBucketName string, dataSinkBucketName string, transferJobDescription string, testTransferJobName string, gcsPath string) string { + return fmt.Sprintf(` + data "google_storage_transfer_project_service_account" "default" { + project = "%s" + } + + resource "google_storage_bucket" "data_source" { + name = "%s" + project = "%s" + location = "US" + force_destroy = true + uniform_bucket_level_access = true + } + + resource "google_storage_bucket_iam_member" "data_source" { + bucket = google_storage_bucket.data_source.name + role = "roles/storage.admin" + member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" + } + + resource "google_storage_bucket" "data_sink" { + name = "%s" + project = "%s" + location = "US" + force_destroy = true + uniform_bucket_level_access = true + } + + resource "google_storage_bucket_iam_member" "data_sink" { + bucket = google_storage_bucket.data_sink.name + role = "roles/storage.admin" + member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" + } + + resource "google_storage_transfer_job" "transfer_job" { + name = "transferJobs/%s" + description = "%s" + project = "%s" + + transfer_spec { + gcs_data_source { + bucket_name = google_storage_bucket.data_source.name + path = "foo/" + } + gcs_data_sink { + bucket_name = google_storage_bucket.data_sink.name + path = "%s" + } + } + + schedule { + schedule_start_date { + year = 2018 + month = 10 + day = 1 + } + schedule_end_date { + year = 2019 + month = 10 + day = 1 + } + start_time_of_day { + hours = 0 + minutes = 30 + seconds = 0 + nanos = 0 + } + repeat_interval = "604800s" + } + + depends_on = [ + google_storage_bucket_iam_member.data_source, + google_storage_bucket_iam_member.data_sink, + ] + } + `, project, dataSourceBucketName, project, dataSinkBucketName, project, testTransferJobName, transferJobDescription, project, gcsPath) +} diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index e469a53221da..da0230b6ab05 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -125,3 +125,13 @@ Description of the change and how users should adjust their configuration (if ne ### `instance` is now removed `instance` has been removed in favor of `instance_name`. + +## Resource: `google_storage_transfer_job` + +### `transfer_spec.gcs_data_sink.path` Implemented validation to prevent strings from starting with a '/' character, while still permitting empty strings." + +### `transfer_spec.gcs_data_source.path` Implemented validation to prevent strings from starting with a '/' character, while still permitting empty strings." + +### `replication_spec.gcs_data_source.path` Implemented validation to prevent strings from starting with a '/' character, while still permitting empty strings." + +### `replication_spec.gcs_data_sink.path` Implemented validation to prevent strings from starting with a '/' character, while still permitting empty strings." From 92b2524e6341cfa471d97ee59c4ba999ed53bd46 Mon Sep 17 00:00:00 2001 From: gurusai-voleti Date: Tue, 8 Jul 2025 17:03:09 +0000 Subject: [PATCH 732/884] fix: (storage) data type change for retention_period (#14442) Co-authored-by: Riley Karson --- .../storage/resource_storage_bucket.go.tmpl | 26 +- .../resource_storage_bucket_600_migration.go | 530 ++++++++++++++++++ .../storage/resource_storage_bucket_test.go | 6 +- .../guides/version_7_upgrade.html.markdown | 10 +- .../docs/r/storage_bucket.html.markdown | 2 +- .../tgc/services/storage/storage_bucket.go | 5 +- 6 files changed, 564 insertions(+), 15 deletions(-) diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket.go.tmpl b/mmv1/third_party/terraform/services/storage/resource_storage_bucket.go.tmpl index 796af9a55efd..b7cb4ec0857f 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket.go.tmpl +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket.go.tmpl @@ -6,7 +6,6 @@ import ( "errors" "fmt" "log" - "math" "regexp" "runtime" "strconv" @@ -46,7 +45,7 @@ func ResourceStorageBucket() *schema.Resource { Read: schema.DefaultTimeout(4 * time.Minute), }, - SchemaVersion: 3, + SchemaVersion: 4, StateUpgraders: []schema.StateUpgrader{ { Type: resourceStorageBucketV0().CoreConfigSchema().ImpliedType(), @@ -63,6 +62,11 @@ func ResourceStorageBucket() *schema.Resource { Upgrade: ResourceStorageBucketStateUpgradeV2, Version: 2, }, + { + Type: resourceStorageBucketV3().CoreConfigSchema().ImpliedType(), + Upgrade: ResourceStorageBucketStateUpgradeV3, + Version: 3, + }, }, Schema: map[string]*schema.Schema{ @@ -407,9 +411,8 @@ func ResourceStorageBucket() *schema.Resource { Description: `If set to true, the bucket will be locked and permanently restrict edits to the bucket's retention policy. Caution: Locking a bucket is an irreversible action.`, }, "retention_period": { - Type: schema.TypeInt, + Type: schema.TypeString, Required: true, - ValidateFunc: validation.IntBetween(1, math.MaxInt32), Description: `The period of time, in seconds, that objects in the bucket must be retained and cannot be deleted, overwritten, or archived. The value must be less than 3,155,760,000 seconds.`, }, }, @@ -862,7 +865,11 @@ func resourceStorageBucketCreate(d *schema.ResourceData, meta interface{}) error retentionPolicy := retention_policies[0].(map[string]interface{}) if v, ok := retentionPolicy["retention_period"]; ok { - sb.RetentionPolicy.RetentionPeriod = int64(v.(int)) + value, err := strconv.ParseInt(v.(string), 10, 64) + if err != nil { + return err + } + sb.RetentionPolicy.RetentionPeriod = value } } } @@ -1470,9 +1477,14 @@ func expandBucketRetentionPolicy(configured interface{}) *storage.BucketRetentio } retentionPolicy := retentionPolicies[0].(map[string]interface{}) + var retentionPeriod int64 + if v, ok := retentionPolicy["retention_period"]; ok { + retentionPeriod, _ = strconv.ParseInt(v.(string), 10, 64) + } + bucketRetentionPolicy := &storage.BucketRetentionPolicy{ IsLocked: retentionPolicy["is_locked"].(bool), - RetentionPeriod: int64(retentionPolicy["retention_period"].(int)), + RetentionPeriod: retentionPeriod, } return bucketRetentionPolicy @@ -1487,7 +1499,7 @@ func flattenBucketRetentionPolicy(bucketRetentionPolicy *storage.BucketRetention retentionPolicy := map[string]interface{}{ "is_locked": bucketRetentionPolicy.IsLocked, - "retention_period": bucketRetentionPolicy.RetentionPeriod, + "retention_period": fmt.Sprintf("%d", bucketRetentionPolicy.RetentionPeriod), } bucketRetentionPolicies = append(bucketRetentionPolicies, retentionPolicy) diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_600_migration.go b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_600_migration.go index 3f953333d804..fad2e44bd970 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_600_migration.go +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_600_migration.go @@ -4,6 +4,7 @@ import ( "context" "log" "math" + "strconv" "strings" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" @@ -1050,3 +1051,532 @@ func ResourceStorageBucketStateUpgradeV2(_ context.Context, rawState map[string] log.Printf("[DEBUG] Attributes after migration: %#v", rawState) return rawState, nil } + +func resourceStorageBucketV3() *schema.Resource { + return &schema.Resource{ + StateUpgraders: []schema.StateUpgrader{ + { + Type: resourceStorageBucketV0().CoreConfigSchema().ImpliedType(), + Upgrade: ResourceStorageBucketStateUpgradeV0, + Version: 0, + }, + { + Type: resourceStorageBucketV1().CoreConfigSchema().ImpliedType(), + Upgrade: ResourceStorageBucketStateUpgradeV1, + Version: 1, + }, + { + Type: resourceStorageBucketV2().CoreConfigSchema().ImpliedType(), + Upgrade: ResourceStorageBucketStateUpgradeV1, + Version: 2, + }, + }, + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `The name of the bucket.`, + ValidateFunc: verify.ValidateGCSName, + }, + + "encryption": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "default_kms_key_name": { + Type: schema.TypeString, + Required: true, + Description: `A Cloud KMS key that will be used to encrypt objects inserted into this bucket, if no encryption method is specified. You must pay attention to whether the crypto key is available in the location that this bucket is created in. See the docs for more details.`, + }, + }, + }, + Description: `The bucket's encryption configuration.`, + }, + + "requester_pays": { + Type: schema.TypeBool, + Optional: true, + Description: `Enables Requester Pays on a storage bucket.`, + }, + + "force_destroy": { + Type: schema.TypeBool, + Optional: true, + Default: false, + Description: `When deleting a bucket, this boolean option will delete all contained objects. If you try to delete a bucket that contains objects, Terraform will fail that run.`, + }, + + "labels": { + Type: schema.TypeMap, + ValidateFunc: labelKeyValidator, + Optional: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: `A set of key/value label pairs to assign to the bucket.`, + }, + + "terraform_labels": { + Type: schema.TypeMap, + Computed: true, + Description: `The combination of labels configured directly on the resource and default labels configured on the provider.`, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + + "effective_labels": { + Type: schema.TypeMap, + Computed: true, + Description: `All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Terraform, other clients and services.`, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + + "location": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + StateFunc: func(s interface{}) string { + return strings.ToUpper(s.(string)) + }, + Description: `The Google Cloud Storage location`, + }, + + "project": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + Description: `The ID of the project in which the resource belongs. If it is not provided, the provider project is used.`, + }, + + "project_number": { + Type: schema.TypeInt, + Computed: true, + Description: `The project number of the project in which the resource belongs.`, + }, + + "self_link": { + Type: schema.TypeString, + Computed: true, + Description: `The URI of the created resource.`, + }, + + "url": { + Type: schema.TypeString, + Computed: true, + Description: `The base URL of the bucket, in the format gs://.`, + }, + + "storage_class": { + Type: schema.TypeString, + Optional: true, + Default: "STANDARD", + Description: `The Storage Class of the new bucket. Supported values include: STANDARD, MULTI_REGIONAL, REGIONAL, NEARLINE, COLDLINE, ARCHIVE.`, + }, + + "lifecycle_rule": { + Type: schema.TypeList, + Optional: true, + MaxItems: 100, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "action": { + Type: schema.TypeSet, + Required: true, + MinItems: 1, + MaxItems: 1, + Set: resourceGCSBucketLifecycleRuleActionHash, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "type": { + Type: schema.TypeString, + Required: true, + Description: `The type of the action of this Lifecycle Rule. Supported values include: Delete, SetStorageClass and AbortIncompleteMultipartUpload.`, + }, + "storage_class": { + Type: schema.TypeString, + Optional: true, + Description: `The target Storage Class of objects affected by this Lifecycle Rule. Supported values include: MULTI_REGIONAL, REGIONAL, NEARLINE, COLDLINE, ARCHIVE.`, + }, + }, + }, + Description: `The Lifecycle Rule's action configuration. A single block of this type is supported.`, + }, + "condition": { + Type: schema.TypeSet, + Required: true, + MinItems: 1, + MaxItems: 1, + Set: resourceGCSBucketLifecycleRuleConditionHash, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "age": { + Type: schema.TypeInt, + Optional: true, + Description: `Minimum age of an object in days to satisfy this condition.`, + }, + "created_before": { + Type: schema.TypeString, + Optional: true, + Description: `Creation date of an object in RFC 3339 (e.g. 2017-06-13) to satisfy this condition.`, + }, + "custom_time_before": { + Type: schema.TypeString, + Optional: true, + Description: `Creation date of an object in RFC 3339 (e.g. 2017-06-13) to satisfy this condition.`, + }, + "days_since_custom_time": { + Type: schema.TypeInt, + Optional: true, + Description: `Number of days elapsed since the user-specified timestamp set on an object.`, + }, + "days_since_noncurrent_time": { + Type: schema.TypeInt, + Optional: true, + Description: `Number of days elapsed since the noncurrent timestamp of an object. This + condition is relevant only for versioned objects.`, + }, + "noncurrent_time_before": { + Type: schema.TypeString, + Optional: true, + Description: `Creation date of an object in RFC 3339 (e.g. 2017-06-13) to satisfy this condition.`, + }, + "no_age": { + Type: schema.TypeBool, + Deprecated: "`no_age` is deprecated and will be removed in a future major release. Use `send_age_if_zero` instead.", + Optional: true, + Description: `While set true, age value will be omitted.Required to set true when age is unset in the config file.`, + }, + "with_state": { + Type: schema.TypeString, + Computed: true, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{"LIVE", "ARCHIVED", "ANY", ""}, false), + Description: `Match to live and/or archived objects. Unversioned buckets have only live objects. Supported values include: "LIVE", "ARCHIVED", "ANY".`, + }, + "matches_storage_class": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: `Storage Class of objects to satisfy this condition. Supported values include: MULTI_REGIONAL, REGIONAL, NEARLINE, COLDLINE, ARCHIVE, STANDARD, DURABLE_REDUCED_AVAILABILITY.`, + }, + "num_newer_versions": { + Type: schema.TypeInt, + Optional: true, + Description: `Relevant only for versioned objects. The number of newer versions of an object to satisfy this condition.`, + }, + "matches_prefix": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: `One or more matching name prefixes to satisfy this condition.`, + }, + "matches_suffix": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: `One or more matching name suffixes to satisfy this condition.`, + }, + "send_age_if_zero": { + Type: schema.TypeBool, + Optional: true, + Default: true, + Description: `While set true, age value will be sent in the request even for zero value of the field. This field is only useful for setting 0 value to the age field. It can be used alone or together with age.`, + }, + "send_days_since_noncurrent_time_if_zero": { + Type: schema.TypeBool, + Optional: true, + Description: `While set true, days_since_noncurrent_time value will be sent in the request even for zero value of the field. This field is only useful for setting 0 value to the days_since_noncurrent_time field. It can be used alone or together with days_since_noncurrent_time.`, + }, + "send_days_since_custom_time_if_zero": { + Type: schema.TypeBool, + Optional: true, + Description: `While set true, days_since_custom_time value will be sent in the request even for zero value of the field. This field is only useful for setting 0 value to the days_since_custom_time field. It can be used alone or together with days_since_custom_time.`, + }, + "send_num_newer_versions_if_zero": { + Type: schema.TypeBool, + Optional: true, + Description: `While set true, num_newer_versions value will be sent in the request even for zero value of the field. This field is only useful for setting 0 value to the num_newer_versions field. It can be used alone or together with num_newer_versions.`, + }, + }, + }, + Description: `The Lifecycle Rule's condition configuration.`, + }, + }, + }, + Description: `The bucket's Lifecycle Rules configuration.`, + }, + + "enable_object_retention": { + Type: schema.TypeBool, + Optional: true, + ForceNew: true, + Description: `Enables each object in the bucket to have its own retention policy, which prevents deletion until stored for a specific length of time.`, + }, + + "versioning": { + Type: schema.TypeList, + Optional: true, + Computed: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "enabled": { + Type: schema.TypeBool, + Required: true, + Description: `While set to true, versioning is fully enabled for this bucket.`, + }, + }, + }, + Description: `The bucket's Versioning configuration.`, + }, + + "autoclass": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "enabled": { + Type: schema.TypeBool, + Required: true, + Description: `While set to true, autoclass automatically transitions objects in your bucket to appropriate storage classes based on each object's access pattern.`, + }, + "terminal_storage_class": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `The storage class that objects in the bucket eventually transition to if they are not read for a certain length of time. Supported values include: NEARLINE, ARCHIVE.`, + }, + }, + }, + Description: `The bucket's autoclass configuration.`, + DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { + _, n := d.GetChange(strings.TrimSuffix(k, ".#")) + if !strings.HasSuffix(k, ".#") { + return false + } + var l []interface{} + if new == "1" && old == "0" { + l = n.([]interface{}) + contents, ok := l[0].(map[string]interface{}) + if !ok { + return false + } + if contents["enabled"] == false { + return true + } + } + if new == "0" && old == "1" { + n := d.Get(strings.TrimSuffix(k, ".#")) + l = n.([]interface{}) + contents := l[0].(map[string]interface{}) + if contents["enabled"] == false { + return true + } + } + return false + }, + }, + "website": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "main_page_suffix": { + Type: schema.TypeString, + Optional: true, + AtLeastOneOf: []string{"website.0.not_found_page", "website.0.main_page_suffix"}, + Description: `Behaves as the bucket's directory index where missing objects are treated as potential directories.`, + DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { + return old != "" && new == "" + }, + }, + "not_found_page": { + Type: schema.TypeString, + Optional: true, + AtLeastOneOf: []string{"website.0.main_page_suffix", "website.0.not_found_page"}, + Description: `The custom object to return when a requested resource is not found.`, + DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { + return old != "" && new == "" + }, + }, + }, + }, + Description: `Configuration if the bucket acts as a website.`, + }, + + "retention_policy": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "is_locked": { + Type: schema.TypeBool, + Optional: true, + Default: false, + Description: `If set to true, the bucket will be locked and permanently restrict edits to the bucket's retention policy. Caution: Locking a bucket is an irreversible action.`, + }, + "retention_period": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validation.IntBetween(1, math.MaxInt32), + Description: `The period of time, in seconds, that objects in the bucket must be retained and cannot be deleted, overwritten, or archived. The value must be less than 3,155,760,000 seconds.`, + }, + }, + }, + Description: `Configuration of the bucket's data retention policy for how long objects in the bucket should be retained.`, + }, + + "cors": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "origin": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + Description: `The list of Origins eligible to receive CORS response headers. Note: "*" is permitted in the list of origins, and means "any Origin".`, + }, + "method": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + Description: `The list of HTTP methods on which to include CORS response headers, (GET, OPTIONS, POST, etc) Note: "*" is permitted in the list of methods, and means "any method".`, + }, + "response_header": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + Description: `The list of HTTP headers other than the simple response headers to give permission for the user-agent to share across domains.`, + }, + "max_age_seconds": { + Type: schema.TypeInt, + Optional: true, + Description: `The value, in seconds, to return in the Access-Control-Max-Age header used in preflight responses.`, + }, + }, + }, + Description: `The bucket's Cross-Origin Resource Sharing (CORS) configuration.`, + }, + + "default_event_based_hold": { + Type: schema.TypeBool, + Optional: true, + Description: `Whether or not to automatically apply an eventBasedHold to new objects added to the bucket.`, + }, + + "logging": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "log_bucket": { + Type: schema.TypeString, + Required: true, + Description: `The bucket that will receive log objects.`, + }, + "log_object_prefix": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `The object prefix for log objects. If it's not provided, by default Google Cloud Storage sets this to this bucket's name.`, + }, + }, + }, + Description: `The bucket's Access & Storage Logs configuration.`, + }, + "uniform_bucket_level_access": { + Type: schema.TypeBool, + Optional: true, + Computed: true, + Description: `Enables uniform bucket-level access on a bucket.`, + }, + "custom_placement_config": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "data_locations": { + Type: schema.TypeSet, + Required: true, + ForceNew: true, + MaxItems: 2, + MinItems: 2, + Elem: &schema.Schema{ + Type: schema.TypeString, + StateFunc: func(s interface{}) string { + return strings.ToUpper(s.(string)) + }, + }, + Description: `The list of individual regions that comprise a dual-region bucket. See the docs for a list of acceptable regions. Note: If any of the data_locations changes, it will recreate the bucket.`, + }, + }, + }, + Description: `The bucket's custom location configuration, which specifies the individual regions that comprise a dual-region bucket. If the bucket is designated a single or multi-region, the parameters are empty.`, + }, + "rpo": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `Specifies the RPO setting of bucket. If set 'ASYNC_TURBO', The Turbo Replication will be enabled for the dual-region bucket. Value 'DEFAULT' will set RPO setting to default. Turbo Replication is only for buckets in dual-regions.See the docs for more details.`, + }, + "public_access_prevention": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `Prevents public access to a bucket.`, + }, + "soft_delete_policy": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Computed: true, + Description: `The bucket's soft delete policy, which defines the period of time that soft-deleted objects will be retained, and cannot be permanently deleted. If it is not provided, by default Google Cloud Storage sets this to default soft delete policy`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "retention_duration_seconds": { + Type: schema.TypeInt, + Default: 604800, + Optional: true, + Description: `The duration in seconds that soft-deleted objects in the bucket will be retained and cannot be permanently deleted. Default value is 604800.`, + }, + "effective_time": { + Type: schema.TypeString, + Computed: true, + Description: `Server-determined value that indicates the time from which the policy, or one with a greater retention, was effective. This value is in RFC 3339 format.`, + }, + }, + }, + }, + }, + UseJSONNumber: true, + } +} + +func ResourceStorageBucketStateUpgradeV3(_ context.Context, rawState map[string]interface{}, meta interface{}) (map[string]interface{}, error) { + log.Printf("[DEBUG] Attributes before migration: %#v", rawState) + if rawState["retention_policy"] != nil { + retentionPolicies := rawState["retention_policy"].([]interface{}) + if len(retentionPolicies) > 0 { + retentionPolicy := retentionPolicies[0].(map[string]interface{}) + if v, ok := retentionPolicy["retention_period"]; ok { + retentionPolicy["retention_period"] = strconv.Itoa(v.(int)) + } + } + } + log.Printf("[DEBUG] Attributes after migration: %#v", rawState) + return rawState, nil +} diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go index b7d31bac9b85..40aec7f963d9 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go @@ -2680,7 +2680,7 @@ resource "google_storage_bucket" "bucket" { force_destroy = true retention_policy { - retention_period = 10 + retention_period = "10" } } `, bucketName) @@ -2695,7 +2695,7 @@ resource "google_storage_bucket" "bucket" { retention_policy { is_locked = true - retention_period = 10 + retention_period = "10" } } `, bucketName) @@ -2788,7 +2788,7 @@ resource "google_storage_bucket" "bucket" { force_destroy = true retention_policy { - retention_period = 3600 + retention_period = "3600" } } `, bucketName) diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index da0230b6ab05..5df81a98b0df 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -102,11 +102,15 @@ Description of the change and how users should adjust their configuration (if ne ## Resources -## Resource: `google_product_resource` +## Resource: `google_storage_bucket` -### Resource-level change example header +### `retention_period` changed to `string` data type -Description of the change and how users should adjust their configuration (if needed). +`retention_period` was changed to the [`string` data type](https://developer.hashicorp.com/terraform/language/expressions/types#string) to handle higher values for the bucket's retention period. + +Terraform [Type Conversion](https://developer.hashicorp.com/terraform/language/expressions/types#type-conversion) will handle the change automatically for most configurations, and they will not need to be modified. + +To reflect the new type explicitly, surround the current integer value in quotes, i.e. `retention_period = 10` -> `retention_period = "10"`. ## Resource: `google_bigtable_table_iam_policy` diff --git a/mmv1/third_party/terraform/website/docs/r/storage_bucket.html.markdown b/mmv1/third_party/terraform/website/docs/r/storage_bucket.html.markdown index 88a1f361665c..0b4f4b9bd5d9 100644 --- a/mmv1/third_party/terraform/website/docs/r/storage_bucket.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/storage_bucket.html.markdown @@ -255,7 +255,7 @@ The following arguments are supported: * `is_locked` - (Optional) If set to `true`, the bucket will be [locked](https://cloud.google.com/storage/docs/using-bucket-lock#lock-bucket) and permanently restrict edits to the bucket's retention policy. Caution: Locking a bucket is an irreversible action. -* `retention_period` - (Required) The period of time, in seconds, that objects in the bucket must be retained and cannot be deleted, overwritten, or archived. The value must be less than 2,147,483,647 seconds. +* `retention_period` - (Required) The period of time, in seconds, that objects in the bucket must be retained and cannot be deleted, overwritten, or archived. The value must be less than 3,155,760,000 seconds. The `logging` block supports: diff --git a/mmv1/third_party/tgc/services/storage/storage_bucket.go b/mmv1/third_party/tgc/services/storage/storage_bucket.go index 48ecbd64aa3a..b7f83e34d92f 100644 --- a/mmv1/third_party/tgc/services/storage/storage_bucket.go +++ b/mmv1/third_party/tgc/services/storage/storage_bucket.go @@ -10,6 +10,7 @@ package storage import ( "fmt" + "strconv" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" @@ -213,8 +214,10 @@ func expandBucketRetentionPolicy(configured interface{}) *storage.BucketRetentio } retentionPolicy := retentionPolicies[0].(map[string]interface{}) + value, _ := strconv.ParseInt(retentionPolicy["retention_period"].(string), 10, 64) + bucketRetentionPolicy := &storage.BucketRetentionPolicy{ - RetentionPeriod: int64(retentionPolicy["retention_period"].(int)), + RetentionPeriod: value, } return bucketRetentionPolicy From 46e2711b7ed78b2ff97ef7860ac3cc7cee74661c Mon Sep 17 00:00:00 2001 From: haiyanmeng Date: Fri, 11 Jul 2025 17:11:40 -0400 Subject: [PATCH 733/884] Update beta api endpoint from v1beta1 to v1beta as v1beta1 will be deprecated soon (#14495) --- mmv1/products/gkehub/Membership.yaml | 6 ------ mmv1/products/gkehub/product.yaml | 2 +- .../website/docs/guides/version_7_upgrade.html.markdown | 6 ++++++ 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/mmv1/products/gkehub/Membership.yaml b/mmv1/products/gkehub/Membership.yaml index ef242bb3e385..f599106c5b9f 100644 --- a/mmv1/products/gkehub/Membership.yaml +++ b/mmv1/products/gkehub/Membership.yaml @@ -122,12 +122,6 @@ properties: description: | The unique identifier of the membership. output: true - - name: 'description' - type: String - description: | - The name of this entity type to be displayed on the console. This field is unavailable in v1 of the API. - min_version: 'beta' - deprecation_message: '`description` is deprecated and will be removed in a future major release.' - name: 'labels' type: KeyValueLabels description: | diff --git a/mmv1/products/gkehub/product.yaml b/mmv1/products/gkehub/product.yaml index eb701f7c8923..db104a135598 100644 --- a/mmv1/products/gkehub/product.yaml +++ b/mmv1/products/gkehub/product.yaml @@ -17,7 +17,7 @@ legacy_name: 'gke_hub' display_name: 'GKEHub' versions: - name: 'beta' - base_url: 'https://gkehub.googleapis.com/v1beta1/' + base_url: 'https://gkehub.googleapis.com/v1beta/' - name: 'ga' base_url: 'https://gkehub.googleapis.com/v1/' scopes: diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index 5df81a98b0df..179f5e244b12 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -130,6 +130,12 @@ To reflect the new type explicitly, surround the current integer value in quotes `instance` has been removed in favor of `instance_name`. +## Resource: `google_gke_hub_membership` + +### `description` is now removed + +Remove `description` from your configuration after upgrade. + ## Resource: `google_storage_transfer_job` ### `transfer_spec.gcs_data_sink.path` Implemented validation to prevent strings from starting with a '/' character, while still permitting empty strings." From 271a9d16cfb0e5594f1327b6a45efbc7ea9f3a1f Mon Sep 17 00:00:00 2001 From: Thomas Rodgers Date: Mon, 14 Jul 2025 12:58:19 -0700 Subject: [PATCH 734/884] Delete notebooks location (#14479) --- mmv1/products/notebooks/Location.yaml | 43 ------------------- .../guides/version_7_upgrade.html.markdown | 26 ++++++----- 2 files changed, 15 insertions(+), 54 deletions(-) delete mode 100644 mmv1/products/notebooks/Location.yaml diff --git a/mmv1/products/notebooks/Location.yaml b/mmv1/products/notebooks/Location.yaml deleted file mode 100644 index 1438575441d3..000000000000 --- a/mmv1/products/notebooks/Location.yaml +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright 2024 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'Location' -kind: 'compute#zone' -description: 'Represents a Location resource.' -deprecation_message: >- - `google_notebooks_location` is deprecated and will be removed in a future major release. - This resource is not functional. -readonly: true -docs: -base_url: 'projects/{{project}}/locations' -has_self_link: true -timeouts: - insert_minutes: 20 - update_minutes: 20 - delete_minutes: 20 -async: - actions: ['create', 'delete', 'update'] - type: 'OpAsync' - operation: - base_url: '{{op_id}}' - result: - resource_inside_response: true -collection_url_key: 'items' -custom_code: -parameters: -properties: - - name: 'name' - type: String - description: 'Name of the Location resource.' - custom_flatten: 'templates/terraform/custom_flatten/name_from_self_link.tmpl' diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index 179f5e244b12..7714d280b547 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -102,15 +102,17 @@ Description of the change and how users should adjust their configuration (if ne ## Resources -## Resource: `google_storage_bucket` +## Resource: `google_bigtable_table_iam_binding` -### `retention_period` changed to `string` data type +### `instance` is now removed -`retention_period` was changed to the [`string` data type](https://developer.hashicorp.com/terraform/language/expressions/types#string) to handle higher values for the bucket's retention period. +`instance` has been removed in favor of `instance_name`. -Terraform [Type Conversion](https://developer.hashicorp.com/terraform/language/expressions/types#type-conversion) will handle the change automatically for most configurations, and they will not need to be modified. +## Resource: `google_bigtable_table_iam_member` -To reflect the new type explicitly, surround the current integer value in quotes, i.e. `retention_period = 10` -> `retention_period = "10"`. +### `instance` is now removed + +`instance` has been removed in favor of `instance_name`. ## Resource: `google_bigtable_table_iam_policy` @@ -118,17 +120,19 @@ To reflect the new type explicitly, surround the current integer value in quotes `instance` has been removed in favor of `instance_name`. -## Resource: `google_bigtable_table_iam_binding` +## Resource: `google_notebooks_location` is now removed -### `instance` is now removed +This resource is not functional. -`instance` has been removed in favor of `instance_name`. +## Resource: `google_storage_bucket` -## Resource: `google_bigtable_table_iam_member` +### `retention_period` changed to `string` data type -### `instance` is now removed +`retention_period` was changed to the [`string` data type](https://developer.hashicorp.com/terraform/language/expressions/types#string) to handle higher values for the bucket's retention period. -`instance` has been removed in favor of `instance_name`. +Terraform [Type Conversion](https://developer.hashicorp.com/terraform/language/expressions/types#type-conversion) will handle the change automatically for most configurations, and they will not need to be modified. + +To reflect the new type explicitly, surround the current integer value in quotes, i.e. `retention_period = 10` -> `retention_period = "10"`. ## Resource: `google_gke_hub_membership` From 641acc6c6c2316fde6a8ab7cd6dd87eed6d772ad Mon Sep 17 00:00:00 2001 From: hao-nan-li <100219545+hao-nan-li@users.noreply.github.com> Date: Thu, 24 Jul 2025 09:23:58 -0700 Subject: [PATCH 735/884] Remove enable_flow_logs from google_compute_subnetwork (#14612) --- mmv1/templates/terraform/constants/subnetwork.tmpl | 12 ------------ .../docs/guides/version_7_upgrade.html.markdown | 6 ++++++ 2 files changed, 6 insertions(+), 12 deletions(-) diff --git a/mmv1/templates/terraform/constants/subnetwork.tmpl b/mmv1/templates/terraform/constants/subnetwork.tmpl index 78697330c8e6..9969642edc58 100644 --- a/mmv1/templates/terraform/constants/subnetwork.tmpl +++ b/mmv1/templates/terraform/constants/subnetwork.tmpl @@ -48,15 +48,3 @@ func sendSecondaryIpRangeIfEmptyDiff(_ context.Context, diff *schema.ResourceDif return nil } - -// DiffSuppressFunc for `log_config`. -func subnetworkLogConfigDiffSuppress(k, old, new string, d *schema.ResourceData) bool { - // If enable_flow_logs is enabled and log_config is not set, ignore the diff - if enable_flow_logs := d.Get("enable_flow_logs"); enable_flow_logs.(bool) { - logConfig := d.GetRawConfig().GetAttr("log_config") - logConfigIsEmpty := logConfig.IsNull() || logConfig.LengthInt() == 0 - return logConfigIsEmpty - } - - return false -} diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index 7714d280b547..f467ebc1ae73 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -120,6 +120,12 @@ Description of the change and how users should adjust their configuration (if ne `instance` has been removed in favor of `instance_name`. +## Resource: `google_compute_subnetwork` + +### `enable_flow_logs`is now removed + +`enable_flow_logs` has been removed in favor of `log_config`. + ## Resource: `google_notebooks_location` is now removed This resource is not functional. From 38b103cc6296a0147853ff7a38201211f9b006ba Mon Sep 17 00:00:00 2001 From: hao-nan-li <100219545+hao-nan-li@users.noreply.github.com> Date: Mon, 28 Jul 2025 10:56:18 -0700 Subject: [PATCH 736/884] Mark `load_balancing_scheme` field required. (#14624) --- mmv1/products/networkservices/LbTrafficExtension.yaml | 1 + .../website/docs/guides/version_7_upgrade.html.markdown | 6 ++++++ 2 files changed, 7 insertions(+) diff --git a/mmv1/products/networkservices/LbTrafficExtension.yaml b/mmv1/products/networkservices/LbTrafficExtension.yaml index a71a99c345c2..d5ddcdd1d34e 100644 --- a/mmv1/products/networkservices/LbTrafficExtension.yaml +++ b/mmv1/products/networkservices/LbTrafficExtension.yaml @@ -201,6 +201,7 @@ properties: For more information, refer to [Choosing a load balancer](https://cloud.google.com/load-balancing/docs/backend-service) and [Supported application load balancers](https://cloud.google.com/service-extensions/docs/callouts-overview#supported-lbs). immutable: true + required: true enum_values: - 'INTERNAL_MANAGED' - 'EXTERNAL_MANAGED' diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index f467ebc1ae73..753d9042c173 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -146,6 +146,12 @@ To reflect the new type explicitly, surround the current integer value in quotes Remove `description` from your configuration after upgrade. +## Resource: `google_network_services_lb_traffic_extension` + +### `load_balancing_scheme` is now required + +`load_balancing_scheme` is now a required field. + ## Resource: `google_storage_transfer_job` ### `transfer_spec.gcs_data_sink.path` Implemented validation to prevent strings from starting with a '/' character, while still permitting empty strings." From 21e4ab9ac5370fd33757385507fd450f18b63222 Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Mon, 28 Jul 2025 12:55:43 -0700 Subject: [PATCH 737/884] syncing removal of validation to v3 schema --- .../storage/resource_storage_bucket_600_migration.go | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_600_migration.go b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_600_migration.go index fad2e44bd970..bf4a561b50ea 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_600_migration.go +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_600_migration.go @@ -1073,11 +1073,10 @@ func resourceStorageBucketV3() *schema.Resource { }, Schema: map[string]*schema.Schema{ "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: `The name of the bucket.`, - ValidateFunc: verify.ValidateGCSName, + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `The name of the bucket.`, }, "encryption": { From d5d263618c704af31572c6b744f4f30683c536ee Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Tue, 29 Jul 2025 23:10:43 +0200 Subject: [PATCH 738/884] cloudfunctions2: changed `service` argument in `service_config` of `google_cloudfunctions2_function` to attribute (#14648) Co-authored-by: Shuya Ma <87669292+shuyama1@users.noreply.github.com> --- mmv1/products/cloudfunctions2/Function.yaml | 2 +- .../website/docs/guides/version_7_upgrade.html.markdown | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/mmv1/products/cloudfunctions2/Function.yaml b/mmv1/products/cloudfunctions2/Function.yaml index 51c8b30033eb..e3534fabce79 100644 --- a/mmv1/products/cloudfunctions2/Function.yaml +++ b/mmv1/products/cloudfunctions2/Function.yaml @@ -503,7 +503,7 @@ properties: type: String description: | Name of the service associated with a Function. - default_from_api: true + output: true - name: 'timeoutSeconds' type: Integer description: | diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index 753d9042c173..df921efce378 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -161,3 +161,9 @@ Remove `description` from your configuration after upgrade. ### `replication_spec.gcs_data_source.path` Implemented validation to prevent strings from starting with a '/' character, while still permitting empty strings." ### `replication_spec.gcs_data_sink.path` Implemented validation to prevent strings from starting with a '/' character, while still permitting empty strings." + +## Resource: `google_cloudfunctions2_function` + +### `service_config.service` is changed from `Argument` to `Attribute` + +Remove `service_config.service` from your configuration after upgrade. From 6a153946415922d364139f353fa25b8c5d521e08 Mon Sep 17 00:00:00 2001 From: Paridhi Shah <166548459+paridhishah18@users.noreply.github.com> Date: Thu, 31 Jul 2025 11:04:05 -0700 Subject: [PATCH 739/884] remove dependsOn field as it is not supported for workerpools and fix failing test. (#14295) --- mmv1/products/cloudrunv2/WorkerPool.yaml | 6 ------ .../website/docs/guides/version_7_upgrade.html.markdown | 6 ++++++ 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/mmv1/products/cloudrunv2/WorkerPool.yaml b/mmv1/products/cloudrunv2/WorkerPool.yaml index 13f7c75a8974..4f6958c4aa1e 100644 --- a/mmv1/products/cloudrunv2/WorkerPool.yaml +++ b/mmv1/products/cloudrunv2/WorkerPool.yaml @@ -478,12 +478,6 @@ properties: type: String description: |- Container's working directory. If not specified, the container runtime's default will be used, which might be configured in the container image. - - name: 'dependsOn' - type: Array - description: |- - Containers which should be started before this container. If specified the container will wait to start until all containers with the listed names are healthy. - item_type: - type: String - name: 'volumes' type: Array description: |- diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index df921efce378..a487a1526410 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -167,3 +167,9 @@ Remove `description` from your configuration after upgrade. ### `service_config.service` is changed from `Argument` to `Attribute` Remove `service_config.service` from your configuration after upgrade. + +## Resource: `google_cloud_run_v2_worker_pool` + +### `template.containers.depends_on` is reomved as it is not supported. + +Remove `template.containers.depends_on` from your configuration after upgrade. From 557c7dc932837347a0e61aadb97db323b18fc718 Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Fri, 1 Aug 2025 21:49:57 +0200 Subject: [PATCH 740/884] vertexai: marked `enable_secure_private_service_connect` in `google_vertex_ai_endpoint` as beta isntead of GA (beta) (#14665) --- mmv1/products/vertexai/Endpoint.yaml | 3 ++- .../vertex_ai_endpoint_private_service_connect.tf.tmpl | 1 - .../website/docs/guides/version_7_upgrade.html.markdown | 6 +++++- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/mmv1/products/vertexai/Endpoint.yaml b/mmv1/products/vertexai/Endpoint.yaml index 30046d585e5d..7dd64bb13d28 100644 --- a/mmv1/products/vertexai/Endpoint.yaml +++ b/mmv1/products/vertexai/Endpoint.yaml @@ -21,7 +21,7 @@ description: references: guides: 'Official Documentation': 'https://cloud.google.com/vertex-ai/docs' - api: 'https://cloud.google.com/vertex-ai/docs/reference/rest/v1beta1/projects.locations.endpoints' + api: 'https://cloud.google.com/vertex-ai/docs/reference/rest/v1/projects.locations.endpoints' docs: base_url: 'projects/{{project}}/locations/{{location}}/endpoints' self_link: 'projects/{{project}}/locations/{{location}}/endpoints/{{name}}' @@ -458,6 +458,7 @@ properties: description: 'A list of Projects from which the forwarding rule will target the service attachment.' - name: 'enableSecurePrivateServiceConnect' + min_version: 'beta' type: Boolean description: 'If set to true, enable secure private service connect with IAM authorization. Otherwise, private service connect will be done without authorization. Note latency will be slightly increased if authorization is enabled.' diff --git a/mmv1/templates/terraform/examples/vertex_ai_endpoint_private_service_connect.tf.tmpl b/mmv1/templates/terraform/examples/vertex_ai_endpoint_private_service_connect.tf.tmpl index f8dd51f3f1eb..eea9532d043b 100644 --- a/mmv1/templates/terraform/examples/vertex_ai_endpoint_private_service_connect.tf.tmpl +++ b/mmv1/templates/terraform/examples/vertex_ai_endpoint_private_service_connect.tf.tmpl @@ -12,7 +12,6 @@ resource "google_vertex_ai_endpoint" "{{$.PrimaryResourceId}}" { project_allowlist = [ "${data.google_project.project.project_id}" ] - enable_secure_private_service_connect = false } } diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index a487a1526410..50e9d1b4a456 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -170,6 +170,10 @@ Remove `service_config.service` from your configuration after upgrade. ## Resource: `google_cloud_run_v2_worker_pool` -### `template.containers.depends_on` is reomved as it is not supported. +### `template.containers.depends_on` is removed as it is not supported. Remove `template.containers.depends_on` from your configuration after upgrade. + +## Resource: `google_vertex_ai_endpoint` + +### `enable_secure_private_service_connect` is removed as it is not available in the GA version of the API, only in the beta version. From 66933ee3ada997ff62a99c9503e9c494b4e5a24a Mon Sep 17 00:00:00 2001 From: Cameron Thornton Date: Fri, 8 Aug 2025 13:49:21 -0500 Subject: [PATCH 741/884] make event_type required in `google_cloudfunctions2_function` resource (#14791) --- mmv1/products/cloudfunctions2/Function.yaml | 1 + .../website/docs/guides/version_7_upgrade.html.markdown | 4 ++++ 2 files changed, 5 insertions(+) diff --git a/mmv1/products/cloudfunctions2/Function.yaml b/mmv1/products/cloudfunctions2/Function.yaml index e3534fabce79..b7381c6cfda7 100644 --- a/mmv1/products/cloudfunctions2/Function.yaml +++ b/mmv1/products/cloudfunctions2/Function.yaml @@ -678,6 +678,7 @@ properties: default_from_api: true - name: 'eventType' type: String + required: true description: 'Required. The type of event to observe.' - name: 'eventFilters' type: Array diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index 50e9d1b4a456..7b33a23fe275 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -164,6 +164,10 @@ Remove `description` from your configuration after upgrade. ## Resource: `google_cloudfunctions2_function` +### `event_trigger.event_type` is now required + +The `event_type` field is now required when `event_trigger` is configured. + ### `service_config.service` is changed from `Argument` to `Attribute` Remove `service_config.service` from your configuration after upgrade. From bc5738c9c78c38dec5f0cf73f889ae1491d5d5a4 Mon Sep 17 00:00:00 2001 From: Cameron Thornton Date: Mon, 11 Aug 2025 11:30:27 -0500 Subject: [PATCH 742/884] Changed default on disable_on_destroy to false (#14790) --- ...dbuild_bitbucket_server_config_peered_network.tf.tmpl | 1 - .../terraform/examples/eventarc_basic_tf.tf.tmpl | 2 -- .../terraform/examples/eventarc_workflows.tf.tmpl | 3 --- ...firebase_app_check_play_integrity_config_full.tf.tmpl | 1 - ...ebase_app_check_play_integrity_config_minimal.tf.tmpl | 3 --- ...e_app_check_recaptcha_enterprise_config_basic.tf.tmpl | 3 --- .../firebase_app_check_service_config_enforced.tf.tmpl | 1 - .../firebase_app_check_service_config_off.tf.tmpl | 1 - .../firebase_app_check_service_config_unenforced.tf.tmpl | 1 - .../examples/firebase_app_hosting_backend_full.tf.tmpl | 2 -- .../firebase_app_hosting_backend_minimal.tf.tmpl | 2 -- .../examples/firebase_app_hosting_build_full.tf.tmpl | 2 -- .../examples/firebase_app_hosting_build_minimal.tf.tmpl | 2 -- .../firebase_app_hosting_traffic_rollout_policy.tf.tmpl | 2 -- ...e_app_hosting_traffic_rollout_policy_disabled.tf.tmpl | 2 -- .../examples/firebase_app_hosting_traffic_target.tf.tmpl | 2 -- .../firebase_database_instance_default_database.tf.tmpl | 4 ---- .../examples/firebasedataconnect_service_basic.tf.tmpl | 1 - ...rebasedataconnect_service_with_force_deletion.tf.tmpl | 1 - .../terraform/examples/kms_autokey_config_all.tf.tmpl | 1 - .../terraform/examples/kms_key_handle_basic.tf.tmpl | 1 - .../terraform/examples/shared_future_reservation.tf.tmpl | 1 - .../terraform/examples/shared_reservation_basic.tf.tmpl | 1 - .../terraform/examples/shared_reservation_beta.tf.tmpl | 1 - ..._featureonlinestore_featureview_cross_project.tf.tmpl | 1 - .../terraform/provider/provider_billing_project_test.go | 3 --- ...e_bigquery_analytics_hub_listing_subscription_test.go | 1 - .../resource_compute_shared_reservation_update_test.go | 8 -------- .../container/resource_container_cluster_test.go.tmpl | 2 -- ...esource_document_ai_warehouse_document_schema_test.go | 1 - ...source_firebase_app_check_service_config_test.go.tmpl | 3 --- .../resource_firebase_data_connect_service_test.go | 1 - .../resource_gke_hub_feature_membership_test.go.tmpl | 4 ---- .../services/gkehub2/iam_gke_hub_feature_test.go | 5 ----- .../gkehub2/resource_gke_hub_feature_test.go.tmpl | 9 --------- .../services/gkehub2/resource_gke_hub_fleet_test.go.tmpl | 2 -- .../resource_gke_hub_scope_rbac_role_binding_test.go | 1 - .../resource_google_project_service.go.tmpl | 2 -- .../resource_google_project_service_test.go.tmpl | 6 ++++-- .../guides/external_credentials_stacks.html.markdown | 1 - .../website/docs/guides/version_7_upgrade.html.markdown | 8 ++++++++ .../website/docs/r/cloudbuild_worker_pool.html.markdown | 1 - .../website/docs/r/google_project_service.html.markdown | 7 ++----- 43 files changed, 14 insertions(+), 93 deletions(-) diff --git a/mmv1/templates/terraform/examples/cloudbuild_bitbucket_server_config_peered_network.tf.tmpl b/mmv1/templates/terraform/examples/cloudbuild_bitbucket_server_config_peered_network.tf.tmpl index 190cf5ebd566..15e1351804e7 100644 --- a/mmv1/templates/terraform/examples/cloudbuild_bitbucket_server_config_peered_network.tf.tmpl +++ b/mmv1/templates/terraform/examples/cloudbuild_bitbucket_server_config_peered_network.tf.tmpl @@ -2,7 +2,6 @@ data "google_project" "project" {} resource "google_project_service" "servicenetworking" { service = "servicenetworking.googleapis.com" - disable_on_destroy = false } resource "google_compute_network" "vpc_network" { diff --git a/mmv1/templates/terraform/examples/eventarc_basic_tf.tf.tmpl b/mmv1/templates/terraform/examples/eventarc_basic_tf.tf.tmpl index 2aa6cd00729a..d7db6749e22e 100644 --- a/mmv1/templates/terraform/examples/eventarc_basic_tf.tf.tmpl +++ b/mmv1/templates/terraform/examples/eventarc_basic_tf.tf.tmpl @@ -7,14 +7,12 @@ data "google_project" "project" { resource "google_project_service" "run" { provider = google-beta service = "run.googleapis.com" - disable_on_destroy = false } # Enable Eventarc API resource "google_project_service" "eventarc" { provider = google-beta service = "eventarc.googleapis.com" - disable_on_destroy = false } # Deploy Cloud Run service diff --git a/mmv1/templates/terraform/examples/eventarc_workflows.tf.tmpl b/mmv1/templates/terraform/examples/eventarc_workflows.tf.tmpl index ea7f916116fd..50edabb245f4 100644 --- a/mmv1/templates/terraform/examples/eventarc_workflows.tf.tmpl +++ b/mmv1/templates/terraform/examples/eventarc_workflows.tf.tmpl @@ -7,21 +7,18 @@ data "google_project" "project" { resource "google_project_service" "eventarc" { provider = google-beta service = "eventarc.googleapis.com" - disable_on_destroy = false } # Enable Pub/Sub API resource "google_project_service" "pubsub" { provider = google-beta service = "pubsub.googleapis.com" - disable_on_destroy = false } # Enable Workflows API resource "google_project_service" "workflows" { provider = google-beta service = "workflows.googleapis.com" - disable_on_destroy = false } diff --git a/mmv1/templates/terraform/examples/firebase_app_check_play_integrity_config_full.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_check_play_integrity_config_full.tf.tmpl index db5a93b8a4ec..eba66461acce 100644 --- a/mmv1/templates/terraform/examples/firebase_app_check_play_integrity_config_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebase_app_check_play_integrity_config_full.tf.tmpl @@ -6,7 +6,6 @@ resource "google_project_service" "play_integrity" { service = "playintegrity.googleapis.com" # Don't disable the service if the resource block is removed by accident. - disable_on_destroy = false } resource "google_firebase_android_app" "default" { diff --git a/mmv1/templates/terraform/examples/firebase_app_check_play_integrity_config_minimal.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_check_play_integrity_config_minimal.tf.tmpl index bbe75253a126..62c7717d0815 100644 --- a/mmv1/templates/terraform/examples/firebase_app_check_play_integrity_config_minimal.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebase_app_check_play_integrity_config_minimal.tf.tmpl @@ -4,9 +4,6 @@ resource "google_project_service" "play_integrity" { project = "{{index $.TestEnvVars "project_id"}}" service = "playintegrity.googleapis.com" - - # Don't disable the service if the resource block is removed by accident. - disable_on_destroy = false } resource "google_firebase_android_app" "default" { diff --git a/mmv1/templates/terraform/examples/firebase_app_check_recaptcha_enterprise_config_basic.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_check_recaptcha_enterprise_config_basic.tf.tmpl index ee2abef0f7eb..2fad49e48fc6 100644 --- a/mmv1/templates/terraform/examples/firebase_app_check_recaptcha_enterprise_config_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebase_app_check_recaptcha_enterprise_config_basic.tf.tmpl @@ -4,9 +4,6 @@ resource "google_project_service" "recaptcha_enterprise" { project = "{{index $.TestEnvVars "project_id"}}" service = "recaptchaenterprise.googleapis.com" - - # Don't disable the service if the resource block is removed by accident. - disable_on_destroy = false } resource "google_firebase_web_app" "default" { diff --git a/mmv1/templates/terraform/examples/firebase_app_check_service_config_enforced.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_check_service_config_enforced.tf.tmpl index e29fb3518c28..e886b85db32b 100644 --- a/mmv1/templates/terraform/examples/firebase_app_check_service_config_enforced.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebase_app_check_service_config_enforced.tf.tmpl @@ -1,7 +1,6 @@ resource "google_project_service" "appcheck" { project = "{{index $.TestEnvVars "project_id"}}" service = "firebaseappcheck.googleapis.com" - disable_on_destroy = false } resource "google_firebase_app_check_service_config" "default" { diff --git a/mmv1/templates/terraform/examples/firebase_app_check_service_config_off.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_check_service_config_off.tf.tmpl index ff000e5a3b42..be56b53643ce 100644 --- a/mmv1/templates/terraform/examples/firebase_app_check_service_config_off.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebase_app_check_service_config_off.tf.tmpl @@ -1,7 +1,6 @@ resource "google_project_service" "appcheck" { project = "{{index $.TestEnvVars "project_id"}}" service = "firebaseappcheck.googleapis.com" - disable_on_destroy = false } resource "google_firebase_app_check_service_config" "default" { diff --git a/mmv1/templates/terraform/examples/firebase_app_check_service_config_unenforced.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_check_service_config_unenforced.tf.tmpl index 11f85955c357..3ece67283265 100644 --- a/mmv1/templates/terraform/examples/firebase_app_check_service_config_unenforced.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebase_app_check_service_config_unenforced.tf.tmpl @@ -1,7 +1,6 @@ resource "google_project_service" "appcheck" { project = "{{index $.TestEnvVars "project_id"}}" service = "firebaseappcheck.googleapis.com" - disable_on_destroy = false } resource "google_firebase_app_check_service_config" "default" { diff --git a/mmv1/templates/terraform/examples/firebase_app_hosting_backend_full.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_hosting_backend_full.tf.tmpl index a6e4a28da553..c50660acf7fe 100644 --- a/mmv1/templates/terraform/examples/firebase_app_hosting_backend_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebase_app_hosting_backend_full.tf.tmpl @@ -60,7 +60,5 @@ resource "google_project_iam_member" "app_hosting_sa_runner" { resource "google_project_service" "fah" { project = "{{index $.TestEnvVars "project_id"}}" service = "firebaseapphosting.googleapis.com" - - disable_on_destroy = false } ### diff --git a/mmv1/templates/terraform/examples/firebase_app_hosting_backend_minimal.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_hosting_backend_minimal.tf.tmpl index 1b98329068f9..1c25f3967af4 100644 --- a/mmv1/templates/terraform/examples/firebase_app_hosting_backend_minimal.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebase_app_hosting_backend_minimal.tf.tmpl @@ -34,7 +34,5 @@ resource "google_project_iam_member" "app_hosting_sa_runner" { resource "google_project_service" "fah" { project = "{{index $.TestEnvVars "project_id"}}" service = "firebaseapphosting.googleapis.com" - - disable_on_destroy = false } ### diff --git a/mmv1/templates/terraform/examples/firebase_app_hosting_build_full.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_hosting_build_full.tf.tmpl index 801f28a88a0a..9a1524b62f24 100644 --- a/mmv1/templates/terraform/examples/firebase_app_hosting_build_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebase_app_hosting_build_full.tf.tmpl @@ -56,7 +56,5 @@ resource "google_project_iam_member" "app_hosting_sa_runner" { resource "google_project_service" "fah" { project = "{{index $.TestEnvVars "project_id"}}" service = "firebaseapphosting.googleapis.com" - - disable_on_destroy = false } ### diff --git a/mmv1/templates/terraform/examples/firebase_app_hosting_build_minimal.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_hosting_build_minimal.tf.tmpl index 23a7c8d83914..2871a6dfb35b 100644 --- a/mmv1/templates/terraform/examples/firebase_app_hosting_build_minimal.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebase_app_hosting_build_minimal.tf.tmpl @@ -47,7 +47,5 @@ resource "google_project_iam_member" "app_hosting_sa_runner" { resource "google_project_service" "fah" { project = "{{index $.TestEnvVars "project_id"}}" service = "firebaseapphosting.googleapis.com" - - disable_on_destroy = false } ### diff --git a/mmv1/templates/terraform/examples/firebase_app_hosting_traffic_rollout_policy.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_hosting_traffic_rollout_policy.tf.tmpl index e8048229281c..6caa5b35e3e1 100644 --- a/mmv1/templates/terraform/examples/firebase_app_hosting_traffic_rollout_policy.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebase_app_hosting_traffic_rollout_policy.tf.tmpl @@ -44,7 +44,5 @@ resource "google_project_iam_member" "app_hosting_sa_runner" { resource "google_project_service" "fah" { project = "{{index $.TestEnvVars "project_id"}}" service = "firebaseapphosting.googleapis.com" - - disable_on_destroy = false } ### diff --git a/mmv1/templates/terraform/examples/firebase_app_hosting_traffic_rollout_policy_disabled.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_hosting_traffic_rollout_policy_disabled.tf.tmpl index 0c9c43112824..8194f2b02f29 100644 --- a/mmv1/templates/terraform/examples/firebase_app_hosting_traffic_rollout_policy_disabled.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebase_app_hosting_traffic_rollout_policy_disabled.tf.tmpl @@ -45,7 +45,5 @@ resource "google_project_iam_member" "app_hosting_sa_runner" { resource "google_project_service" "fah" { project = "{{index $.TestEnvVars "project_id"}}" service = "firebaseapphosting.googleapis.com" - - disable_on_destroy = false } ### diff --git a/mmv1/templates/terraform/examples/firebase_app_hosting_traffic_target.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_hosting_traffic_target.tf.tmpl index 98fa778f879d..4606bfc1111d 100644 --- a/mmv1/templates/terraform/examples/firebase_app_hosting_traffic_target.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebase_app_hosting_traffic_target.tf.tmpl @@ -60,7 +60,5 @@ resource "google_project_iam_member" "app_hosting_sa_runner" { resource "google_project_service" "fah" { project = "{{index $.TestEnvVars "project_id"}}" service = "firebaseapphosting.googleapis.com" - - disable_on_destroy = false } ### diff --git a/mmv1/templates/terraform/examples/firebase_database_instance_default_database.tf.tmpl b/mmv1/templates/terraform/examples/firebase_database_instance_default_database.tf.tmpl index 2ded65e3bcd0..db506733e11d 100644 --- a/mmv1/templates/terraform/examples/firebase_database_instance_default_database.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebase_database_instance_default_database.tf.tmpl @@ -13,8 +13,6 @@ resource "google_project_service" "firebase" { provider = google-beta project = google_project.default.project_id service = "firebase.googleapis.com" - - disable_on_destroy = false } resource "google_firebase_project" "default" { @@ -28,8 +26,6 @@ resource "google_project_service" "firebase_database" { provider = google-beta project = google_firebase_project.default.project service = "firebasedatabase.googleapis.com" - - disable_on_destroy = false } resource "time_sleep" "wait_60_seconds" { diff --git a/mmv1/templates/terraform/examples/firebasedataconnect_service_basic.tf.tmpl b/mmv1/templates/terraform/examples/firebasedataconnect_service_basic.tf.tmpl index ca2a62c09e80..228b973ec3f5 100644 --- a/mmv1/templates/terraform/examples/firebasedataconnect_service_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebasedataconnect_service_basic.tf.tmpl @@ -2,7 +2,6 @@ resource "google_project_service" "fdc" { project = "{{index $.TestEnvVars "project_id"}}" service = "firebasedataconnect.googleapis.com" - disable_on_destroy = false } # Create a Firebase Data Connect service diff --git a/mmv1/templates/terraform/examples/firebasedataconnect_service_with_force_deletion.tf.tmpl b/mmv1/templates/terraform/examples/firebasedataconnect_service_with_force_deletion.tf.tmpl index 4d35b5c4278d..fd81b92dd5f7 100644 --- a/mmv1/templates/terraform/examples/firebasedataconnect_service_with_force_deletion.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebasedataconnect_service_with_force_deletion.tf.tmpl @@ -2,7 +2,6 @@ resource "google_project_service" "fdc" { project = "{{index $.TestEnvVars "project_id"}}" service = "firebasedataconnect.googleapis.com" - disable_on_destroy = false } # Create a Firebase Data Connect service diff --git a/mmv1/templates/terraform/examples/kms_autokey_config_all.tf.tmpl b/mmv1/templates/terraform/examples/kms_autokey_config_all.tf.tmpl index aa3ad3661cc6..fb2d32eb7aa9 100644 --- a/mmv1/templates/terraform/examples/kms_autokey_config_all.tf.tmpl +++ b/mmv1/templates/terraform/examples/kms_autokey_config_all.tf.tmpl @@ -22,7 +22,6 @@ resource "google_project_service" "kms_api_service" { provider = google-beta service = "cloudkms.googleapis.com" project = google_project.key_project.project_id - disable_on_destroy = false disable_dependent_services = true depends_on = [google_project.key_project] } diff --git a/mmv1/templates/terraform/examples/kms_key_handle_basic.tf.tmpl b/mmv1/templates/terraform/examples/kms_key_handle_basic.tf.tmpl index b67930b88d11..20988805fc79 100644 --- a/mmv1/templates/terraform/examples/kms_key_handle_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/kms_key_handle_basic.tf.tmpl @@ -33,7 +33,6 @@ resource "google_project_service" "kms_api_service" { provider = google-beta service = "cloudkms.googleapis.com" project = google_project.key_project.project_id - disable_on_destroy = false disable_dependent_services = true depends_on = [google_project.key_project] } diff --git a/mmv1/templates/terraform/examples/shared_future_reservation.tf.tmpl b/mmv1/templates/terraform/examples/shared_future_reservation.tf.tmpl index 44ffa16ff14a..1cc7fd2342c5 100644 --- a/mmv1/templates/terraform/examples/shared_future_reservation.tf.tmpl +++ b/mmv1/templates/terraform/examples/shared_future_reservation.tf.tmpl @@ -9,7 +9,6 @@ resource "google_project" "owner_project" { resource "google_project_service" "compute" { project = google_project.owner_project.project_id service = "compute.googleapis.com" - disable_on_destroy = false } resource "google_project" "guest_project" { diff --git a/mmv1/templates/terraform/examples/shared_reservation_basic.tf.tmpl b/mmv1/templates/terraform/examples/shared_reservation_basic.tf.tmpl index 645287066b3e..886579825f7d 100644 --- a/mmv1/templates/terraform/examples/shared_reservation_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/shared_reservation_basic.tf.tmpl @@ -10,7 +10,6 @@ resource "google_project" "owner_project" { resource "google_project_service" "compute" { project = google_project.owner_project.project_id service = "compute.googleapis.com" - disable_on_destroy = false } resource "google_project" "guest_project" { diff --git a/mmv1/templates/terraform/examples/shared_reservation_beta.tf.tmpl b/mmv1/templates/terraform/examples/shared_reservation_beta.tf.tmpl index 1d20092bb008..9148cb898094 100644 --- a/mmv1/templates/terraform/examples/shared_reservation_beta.tf.tmpl +++ b/mmv1/templates/terraform/examples/shared_reservation_beta.tf.tmpl @@ -12,7 +12,6 @@ resource "google_project_service" "compute" { provider = google-beta project = google_project.owner_project.project_id service = "compute.googleapis.com" - disable_on_destroy = false } resource "google_project" "guest_project" { diff --git a/mmv1/templates/terraform/examples/vertex_ai_featureonlinestore_featureview_cross_project.tf.tmpl b/mmv1/templates/terraform/examples/vertex_ai_featureonlinestore_featureview_cross_project.tf.tmpl index e81d0081edc8..786e75321265 100644 --- a/mmv1/templates/terraform/examples/vertex_ai_featureonlinestore_featureview_cross_project.tf.tmpl +++ b/mmv1/templates/terraform/examples/vertex_ai_featureonlinestore_featureview_cross_project.tf.tmpl @@ -28,7 +28,6 @@ resource "google_project_service" "vertexai" { create = "30m" update = "40m" } - disable_on_destroy = false # Needed for CI tests for permissions to propagate, should not be needed for actual usage depends_on = [time_sleep.wait_60_seconds] } diff --git a/mmv1/third_party/terraform/provider/provider_billing_project_test.go b/mmv1/third_party/terraform/provider/provider_billing_project_test.go index 452ddbfa2b0b..e9f36c187931 100644 --- a/mmv1/third_party/terraform/provider/provider_billing_project_test.go +++ b/mmv1/third_party/terraform/provider/provider_billing_project_test.go @@ -265,8 +265,6 @@ resource "google_project" "project" { resource "google_project_service" "serviceusage" { project = google_project.project.project_id service = "serviceusage.googleapis.com" - - disable_on_destroy = false # Need it enabled in the project when the test disables services in post-test cleanup } `, context) } @@ -310,7 +308,6 @@ resource "google_project_service" "pubsub" { resource "google_project_service" "cloudresourcemanager" { project = google_project.project.project_id service = "cloudresourcemanager.googleapis.com" - disable_on_destroy = false # Need it enabled in the project when the test deletes the project resource in post-test cleanup } `, context) } diff --git a/mmv1/third_party/terraform/services/bigqueryanalyticshub/resource_bigquery_analytics_hub_listing_subscription_test.go b/mmv1/third_party/terraform/services/bigqueryanalyticshub/resource_bigquery_analytics_hub_listing_subscription_test.go index 155ecb59f738..e4e4ee291689 100644 --- a/mmv1/third_party/terraform/services/bigqueryanalyticshub/resource_bigquery_analytics_hub_listing_subscription_test.go +++ b/mmv1/third_party/terraform/services/bigqueryanalyticshub/resource_bigquery_analytics_hub_listing_subscription_test.go @@ -74,7 +74,6 @@ resource "google_project" "project" { resource "google_project_service" "analyticshub" { project = google_project.project.project_id service = "analyticshub.googleapis.com" - disable_on_destroy = false # Need it enabled in the project when the test disables services in post-test cleanup } resource "google_bigquery_analytics_hub_data_exchange" "subscription" { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_shared_reservation_update_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_shared_reservation_update_test.go index 704f1fa067dd..c13210c1ff96 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_shared_reservation_update_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_shared_reservation_update_test.go @@ -68,7 +68,6 @@ resource "google_project" "owner_project" { resource "google_project_service" "compute" { project = google_project.owner_project.project_id service = "compute.googleapis.com" - disable_on_destroy = false } resource "google_project" "guest_project" { @@ -109,19 +108,16 @@ resource "google_org_policy_policy" "shared_reservation_org_policy" { resource "google_project_service" "compute_second_project" { project = google_project.guest_project.project_id service = "compute.googleapis.com" - disable_on_destroy = false } resource "google_project_service" "compute_third_project" { project = google_project.guest_project_second.project_id service = "compute.googleapis.com" - disable_on_destroy = false } resource "google_project_service" "compute_fourth_project" { project = google_project.guest_project_third.project_id service = "compute.googleapis.com" - disable_on_destroy = false } resource "google_compute_reservation" "gce_reservation" { @@ -161,7 +157,6 @@ resource "google_project" "owner_project" { resource "google_project_service" "compute" { project = google_project.owner_project.project_id service = "compute.googleapis.com" - disable_on_destroy = false } resource "google_project" "guest_project" { @@ -202,19 +197,16 @@ resource "google_org_policy_policy" "shared_reservation_org_policy" { resource "google_project_service" "compute_second_project" { project = google_project.guest_project.project_id service = "compute.googleapis.com" - disable_on_destroy = false } resource "google_project_service" "compute_third_project" { project = google_project.guest_project_second.project_id service = "compute.googleapis.com" - disable_on_destroy = false } resource "google_project_service" "compute_fourth_project" { project = google_project.guest_project_third.project_id service = "compute.googleapis.com" - disable_on_destroy = false } resource "google_compute_reservation" "gce_reservation" { diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl index b0675ba761b7..eed554ce3320 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl @@ -8668,12 +8668,10 @@ func testAccContainerCluster_withNodeConfigReservationAffinitySpecific(reservati resource "google_project_service" "compute" { service = "compute.googleapis.com" - disable_on_destroy = false } resource "google_project_service" "container" { service = "container.googleapis.com" - disable_on_destroy = false depends_on = [google_project_service.compute] } diff --git a/mmv1/third_party/terraform/services/documentaiwarehouse/resource_document_ai_warehouse_document_schema_test.go b/mmv1/third_party/terraform/services/documentaiwarehouse/resource_document_ai_warehouse_document_schema_test.go index e2521f2b5b33..1a3a5568b955 100644 --- a/mmv1/third_party/terraform/services/documentaiwarehouse/resource_document_ai_warehouse_document_schema_test.go +++ b/mmv1/third_party/terraform/services/documentaiwarehouse/resource_document_ai_warehouse_document_schema_test.go @@ -132,7 +132,6 @@ resource "google_project" "project" { resource "google_project_service" "contentwarehouse" { project = google_project.project.project_id service = "contentwarehouse.googleapis.com" - disable_on_destroy = false } resource "time_sleep" "wait_120s" { diff --git a/mmv1/third_party/terraform/services/firebaseappcheck/resource_firebase_app_check_service_config_test.go.tmpl b/mmv1/third_party/terraform/services/firebaseappcheck/resource_firebase_app_check_service_config_test.go.tmpl index dfd7901eff82..9a518159c6f5 100644 --- a/mmv1/third_party/terraform/services/firebaseappcheck/resource_firebase_app_check_service_config_test.go.tmpl +++ b/mmv1/third_party/terraform/services/firebaseappcheck/resource_firebase_app_check_service_config_test.go.tmpl @@ -77,14 +77,12 @@ resource "google_project_service" "firebase" { provider = google-beta project = google_project.default.project_id service = "firebase.googleapis.com" - disable_on_destroy = false } resource "google_project_service" "database" { provider = google-beta project = google_project.default.project_id service = "firebasedatabase.googleapis.com" - disable_on_destroy = false depends_on = [ google_project_service.firebase, ] @@ -94,7 +92,6 @@ resource "google_project_service" "appcheck" { provider = google-beta project = google_project.default.project_id service = "firebaseappcheck.googleapis.com" - disable_on_destroy = false depends_on = [ google_project_service.database, ] diff --git a/mmv1/third_party/terraform/services/firebasedataconnect/resource_firebase_data_connect_service_test.go b/mmv1/third_party/terraform/services/firebasedataconnect/resource_firebase_data_connect_service_test.go index 26c5bad11741..8dea93b53594 100644 --- a/mmv1/third_party/terraform/services/firebasedataconnect/resource_firebase_data_connect_service_test.go +++ b/mmv1/third_party/terraform/services/firebasedataconnect/resource_firebase_data_connect_service_test.go @@ -61,7 +61,6 @@ func testAccFirebaseDataConnectService_update(context map[string]interface{}, di resource "google_project_service" "fdc" { project = "%{project_id}" service = "firebasedataconnect.googleapis.com" - disable_on_destroy = false } # Create an FDC service diff --git a/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_test.go.tmpl b/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_test.go.tmpl index 2cea9bd3eff9..8f2b784f4b21 100644 --- a/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_test.go.tmpl +++ b/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_test.go.tmpl @@ -1308,7 +1308,6 @@ resource "google_project" "project" { resource "google_project_service" "anthos" { project = google_project.project.project_id service = "anthos.googleapis.com" - disable_on_destroy = false } resource "google_project_service" "mesh" { @@ -1339,19 +1338,16 @@ resource "google_project_service" "mcsd" { resource "google_project_service" "compute" { project = google_project.project.project_id service = "compute.googleapis.com" - disable_on_destroy = false } resource "google_project_service" "container" { project = google_project.project.project_id service = "container.googleapis.com" - disable_on_destroy = false } resource "google_project_service" "gkehub" { project = google_project.project.project_id service = "gkehub.googleapis.com" - disable_on_destroy = false } // It needs waiting until the API services are really activated. diff --git a/mmv1/third_party/terraform/services/gkehub2/iam_gke_hub_feature_test.go b/mmv1/third_party/terraform/services/gkehub2/iam_gke_hub_feature_test.go index 48eb09f11440..c9133ec25c35 100644 --- a/mmv1/third_party/terraform/services/gkehub2/iam_gke_hub_feature_test.go +++ b/mmv1/third_party/terraform/services/gkehub2/iam_gke_hub_feature_test.go @@ -137,7 +137,6 @@ resource "google_project_service" "mcsd" { resource "google_project_service" "gkehub" { project = google_project.project.project_id service = "gkehub.googleapis.com" - disable_on_destroy = false } resource "google_gke_hub_feature" "feature" { name = "multiclusterservicediscovery" @@ -174,7 +173,6 @@ resource "google_project_service" "mcsd" { resource "google_project_service" "gkehub" { project = google_project.project.project_id service = "gkehub.googleapis.com" - disable_on_destroy = false } resource "google_gke_hub_feature" "feature" { name = "multiclusterservicediscovery" @@ -224,7 +222,6 @@ resource "google_project_service" "mcsd" { resource "google_project_service" "gkehub" { project = google_project.project.project_id service = "gkehub.googleapis.com" - disable_on_destroy = false } resource "google_gke_hub_feature" "feature" { name = "multiclusterservicediscovery" @@ -262,7 +259,6 @@ resource "google_project_service" "mcsd" { resource "google_project_service" "gkehub" { project = google_project.project.project_id service = "gkehub.googleapis.com" - disable_on_destroy = false } resource "google_gke_hub_feature" "feature" { name = "multiclusterservicediscovery" @@ -299,7 +295,6 @@ resource "google_project_service" "mcsd" { resource "google_project_service" "gkehub" { project = google_project.project.project_id service = "gkehub.googleapis.com" - disable_on_destroy = false } resource "google_gke_hub_feature" "feature" { name = "multiclusterservicediscovery" diff --git a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.tmpl b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.tmpl index 5bbf840d3edf..37e510333765 100644 --- a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.tmpl +++ b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.tmpl @@ -178,21 +178,18 @@ resource "google_project_service" "mcsd" { resource "google_project_service" "compute" { project = google_project.project.project_id service = "compute.googleapis.com" - disable_on_destroy = false provider = google-beta } resource "google_project_service" "container" { project = google_project.project.project_id service = "container.googleapis.com" - disable_on_destroy = false provider = google-beta } resource "google_project_service" "gkehub" { project = google_project.project.project_id service = "gkehub.googleapis.com" - disable_on_destroy = false provider = google-beta } `, context) @@ -1086,13 +1083,11 @@ resource "google_project_service" "mcsd" { resource "google_project_service" "compute" { project = google_project.project.project_id service = "compute.googleapis.com" - disable_on_destroy = false } resource "google_project_service" "container" { project = google_project.project.project_id service = "container.googleapis.com" - disable_on_destroy = false } resource "google_project_service" "anthos" { @@ -1103,7 +1098,6 @@ resource "google_project_service" "anthos" { resource "google_project_service" "gkehub" { project = google_project.project.project_id service = "gkehub.googleapis.com" - disable_on_destroy = false } resource "google_project" "project_2" { @@ -1117,19 +1111,16 @@ resource "google_project" "project_2" { resource "google_project_service" "compute_2" { project = google_project.project_2.project_id service = "compute.googleapis.com" - disable_on_destroy = false } resource "google_project_service" "container_2" { project = google_project.project_2.project_id service = "container.googleapis.com" - disable_on_destroy = false } resource "google_project_service" "gkehub_2" { project = google_project.project_2.project_id service = "gkehub.googleapis.com" - disable_on_destroy = false } `, context) } diff --git a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_fleet_test.go.tmpl b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_fleet_test.go.tmpl index 7fb8d5170a94..8127202e4e94 100644 --- a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_fleet_test.go.tmpl +++ b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_fleet_test.go.tmpl @@ -124,14 +124,12 @@ resource "google_project" "project" { resource "google_project_service" "gkehub" { project = google_project.project.project_id service = "gkehub.googleapis.com" - disable_on_destroy = false depends_on = [google_project_service.anthos] } resource "google_project_service" "anthos" { project = google_project.project.project_id service = "anthos.googleapis.com" - disable_on_destroy = false } resource "time_sleep" "wait_for_gkehub_enablement" { diff --git a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_scope_rbac_role_binding_test.go b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_scope_rbac_role_binding_test.go index cae802710262..ae358d84ebb7 100644 --- a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_scope_rbac_role_binding_test.go +++ b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_scope_rbac_role_binding_test.go @@ -207,7 +207,6 @@ resource "google_project_service" "anthos" { resource "google_project_service" "gkehub" { project = google_project.project.project_id service = "gkehub.googleapis.com" - disable_on_destroy = false } `, context) } diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service.go.tmpl b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service.go.tmpl index bf174baf2c47..b496cd7f1643 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service.go.tmpl +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service.go.tmpl @@ -119,13 +119,11 @@ func ResourceGoogleProjectService() *schema.Resource { "disable_on_destroy": { Type: schema.TypeBool, Optional: true, - Default: true, }, {{- if ne $.TargetVersionName "ga" }} "check_if_service_has_usage_on_destroy": { Type: schema.TypeBool, Optional: true, - Default: false, }, {{- end }} }, diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service_test.go.tmpl b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service_test.go.tmpl index c5fdaded7489..8fcb603673f0 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service_test.go.tmpl +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service_test.go.tmpl @@ -282,14 +282,16 @@ resource "google_project" "acceptance" { } resource "google_project_service" "test" { - project = google_project.acceptance.project_id - service = "%s" + project = google_project.acceptance.project_id + service = "%s" + disable_on_destroy = true } resource "google_project_service" "test2" { project = google_project.acceptance.project_id service = "%s" disable_dependent_services = %s + disable_on_destroy = true } `, pid, pid, org, billing, services[0], services[1], disableDependentServices) } diff --git a/mmv1/third_party/terraform/website/docs/guides/external_credentials_stacks.html.markdown b/mmv1/third_party/terraform/website/docs/guides/external_credentials_stacks.html.markdown index f214d75715de..4bd5c4c8a879 100644 --- a/mmv1/third_party/terraform/website/docs/guides/external_credentials_stacks.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/external_credentials_stacks.html.markdown @@ -69,7 +69,6 @@ resource "google_project_service" "services" { project = var.project_id service = each.key disable_dependent_services = false - disable_on_destroy = false } # Create Workload Identity Pool (reference google_project_service to ensure APIs are enabled) diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index 7b33a23fe275..3a7d56e0ed4f 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -181,3 +181,11 @@ Remove `template.containers.depends_on` from your configuration after upgrade. ## Resource: `google_vertex_ai_endpoint` ### `enable_secure_private_service_connect` is removed as it is not available in the GA version of the API, only in the beta version. + +## Resource: `google_project_service` + +### `disable_on_destroy` now defaults to `false` + +The default value for `disable_on_destroy` has been changed to `false`. The previous default (`true`) created a risk of unintended service disruptions, as destroying a single `google_project_service` resource would disable the API for the entire project. + +Now, destroying the resource will only remove it from Terraform's state and leave the service enabled. To disable a service when the resource is destroyed, you must now make an explicit decision by setting `disable_on_destroy = true`. \ No newline at end of file diff --git a/mmv1/third_party/terraform/website/docs/r/cloudbuild_worker_pool.html.markdown b/mmv1/third_party/terraform/website/docs/r/cloudbuild_worker_pool.html.markdown index 0ad57f5ed54b..bd9e48c1904b 100644 --- a/mmv1/third_party/terraform/website/docs/r/cloudbuild_worker_pool.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/cloudbuild_worker_pool.html.markdown @@ -27,7 +27,6 @@ resource "google_cloudbuild_worker_pool" "pool" { ```hcl resource "google_project_service" "servicenetworking" { service = "servicenetworking.googleapis.com" - disable_on_destroy = false } resource "google_compute_network" "network" { diff --git a/mmv1/third_party/terraform/website/docs/r/google_project_service.html.markdown b/mmv1/third_party/terraform/website/docs/r/google_project_service.html.markdown index 657f874cf3f2..f5a53f913637 100644 --- a/mmv1/third_party/terraform/website/docs/r/google_project_service.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/google_project_service.html.markdown @@ -33,8 +33,6 @@ resource "google_project_service" "project" { create = "30m" update = "40m" } - - disable_on_destroy = false } ``` @@ -49,9 +47,8 @@ is used. * `disable_on_destroy` - (Optional) If `true` or unset, disable the service when the Terraform resource is destroyed. If `false`, the service will be left enabled when -the Terraform resource is destroyed. Defaults to `true`. Most configurations should -set this to `false`; it should generally only be `true` or unset in configurations -that manage the `google_project` resource itself. +the Terraform resource is destroyed. Defaults to `false`. It should generally only +be `true` or unset in configurations that manage the `google_project` resource itself. * `disable_dependent_services` - (Optional) If `true`, services that are enabled and which depend on this service should also be disabled when this service is From fbc3d5de75662659a5edba4a47b02e3b16ed9ac2 Mon Sep 17 00:00:00 2001 From: Eric Pang Date: Tue, 12 Aug 2025 12:46:41 -0400 Subject: [PATCH 743/884] Set SecureSourceManager Instance and Repository deletion_policy default to PREVENT (#14781) --- .../securesourcemanager/BranchRule.yaml | 4 ++++ .../securesourcemanager/Instance.yaml | 8 ++++++- .../securesourcemanager/Repository.yaml | 6 ++++- ..._source_manager_branch_rule_update_test.go | 24 ++++++++----------- ...e_source_manager_repository_update_test.go | 22 ++++++----------- 5 files changed, 33 insertions(+), 31 deletions(-) diff --git a/mmv1/products/securesourcemanager/BranchRule.yaml b/mmv1/products/securesourcemanager/BranchRule.yaml index 4eb27bf14d76..8f2f83fb9a2a 100644 --- a/mmv1/products/securesourcemanager/BranchRule.yaml +++ b/mmv1/products/securesourcemanager/BranchRule.yaml @@ -52,6 +52,8 @@ examples: 'deletion_policy': '"DELETE"' oics_vars_overrides: 'deletion_policy': '"DELETE"' + ignore_read_extra: + - 'deletion_policy' - name: 'secure_source_manager_branch_rule_with_fields' primary_resource_id: 'default' vars: @@ -63,6 +65,8 @@ examples: 'deletion_policy': '"DELETE"' oics_vars_overrides: 'deletion_policy': '"DELETE"' + ignore_read_extra: + - 'deletion_policy' parameters: - name: 'branch_rule_id' type: String diff --git a/mmv1/products/securesourcemanager/Instance.yaml b/mmv1/products/securesourcemanager/Instance.yaml index c154cda42263..b03875eee254 100644 --- a/mmv1/products/securesourcemanager/Instance.yaml +++ b/mmv1/products/securesourcemanager/Instance.yaml @@ -66,6 +66,7 @@ examples: 'deletion_policy': '"DELETE"' ignore_read_extra: - 'update_time' + - 'deletion_policy' - name: 'secure_source_manager_instance_cmek' primary_resource_id: 'default' primary_resource_name: 'fmt.Sprintf("tf-test-my-instance%s", context["random_suffix"])' @@ -80,6 +81,7 @@ examples: 'deletion_policy': '"DELETE"' ignore_read_extra: - 'update_time' + - 'deletion_policy' - name: 'secure_source_manager_instance_private' primary_resource_id: 'default' primary_resource_name: 'fmt.Sprintf("tf-test-my-instance%s", context["random_suffix"])' @@ -95,6 +97,7 @@ examples: external_providers: ["time"] ignore_read_extra: - 'update_time' + - 'deletion_policy' - name: 'secure_source_manager_instance_private_psc_backend' primary_resource_id: 'default' primary_resource_name: 'fmt.Sprintf("tf-test-my-instance%s", context["random_suffix"])' @@ -118,6 +121,7 @@ examples: external_providers: ["time"] ignore_read_extra: - 'update_time' + - 'deletion_policy' - name: 'secure_source_manager_instance_private_psc_endpoint' primary_resource_id: 'default' primary_resource_name: 'fmt.Sprintf("tf-test-my-instance%s", context["random_suffix"])' @@ -138,6 +142,7 @@ examples: external_providers: ["time"] ignore_read_extra: - 'update_time' + - 'deletion_policy' - name: 'secure_source_manager_instance_workforce_identity_federation' primary_resource_id: 'default' primary_resource_name: 'fmt.Sprintf("tf-test-my-instance%s", context["random_suffix"])' @@ -150,6 +155,7 @@ examples: 'deletion_policy': '"DELETE"' ignore_read_extra: - 'update_time' + - 'deletion_policy' parameters: - name: 'location' type: String @@ -177,7 +183,7 @@ virtual_fields: * DELETE * PREVENT * ABANDON - default_value: 'DELETE' + default_value: 'PREVENT' properties: - name: 'name' type: String diff --git a/mmv1/products/securesourcemanager/Repository.yaml b/mmv1/products/securesourcemanager/Repository.yaml index d9d15ff238b5..49b481949da9 100644 --- a/mmv1/products/securesourcemanager/Repository.yaml +++ b/mmv1/products/securesourcemanager/Repository.yaml @@ -60,6 +60,8 @@ examples: 'deletion_policy': '"DELETE"' oics_vars_overrides: 'deletion_policy': '"DELETE"' + ignore_read_extra: + - 'deletion_policy' - name: 'secure_source_manager_repository_initial_config' primary_resource_id: 'default' primary_resource_name: 'fmt.Sprintf("tf-test-my-repository%s", context["random_suffix"])' @@ -71,6 +73,8 @@ examples: 'deletion_policy': '"DELETE"' oics_vars_overrides: 'deletion_policy': '"DELETE"' + ignore_read_extra: + - 'deletion_policy' parameters: - name: 'location' type: String @@ -98,7 +102,7 @@ virtual_fields: * DELETE * PREVENT * ABANDON - default_value: 'DELETE' + default_value: 'PREVENT' properties: - name: 'name' type: String diff --git a/mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_branch_rule_update_test.go b/mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_branch_rule_update_test.go index 3e219e5659b7..ed1397c981a5 100644 --- a/mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_branch_rule_update_test.go +++ b/mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_branch_rule_update_test.go @@ -11,7 +11,7 @@ func TestAccSecureSourceManagerBranchRule_secureSourceManagerBranchRuleWithField t.Parallel() context := map[string]interface{}{ - "prevent_destroy": false, + "deletion_policy": "DELETE", "random_suffix": acctest.RandString(t, 10), } @@ -46,20 +46,18 @@ func testAccSecureSourceManagerBranchRule_secureSourceManagerBranchRuleWithField resource "google_secure_source_manager_instance" "instance" { location = "us-central1" instance_id = "tf-test-my-initial-instance%{random_suffix}" + # Prevent accidental deletions. - lifecycle { - prevent_destroy = "%{prevent_destroy}" - } + deletion_policy = "%{deletion_policy}" } resource "google_secure_source_manager_repository" "repository" { repository_id = "tf-test-my-initial-repository%{random_suffix}" instance = google_secure_source_manager_instance.instance.name location = google_secure_source_manager_instance.instance.location + # Prevent accidental deletions. - lifecycle { - prevent_destroy = "%{prevent_destroy}" - } + deletion_policy = "%{deletion_policy}" } resource "google_secure_source_manager_branch_rule" "default" { @@ -83,20 +81,18 @@ func testAccSecureSourceManagerBranchRule_secureSourceManagerBranchRuleWithField resource "google_secure_source_manager_instance" "instance" { location = "us-central1" instance_id = "tf-test-my-initial-instance%{random_suffix}" - # Prevent accidental deletions. - lifecycle { - prevent_destroy = "%{prevent_destroy}" - } + + # Prevent accidental deletions. + deletion_policy = "%{deletion_policy}" } resource "google_secure_source_manager_repository" "repository" { repository_id = "tf-test-my-initial-repository%{random_suffix}" instance = google_secure_source_manager_instance.instance.name location = google_secure_source_manager_instance.instance.location + # Prevent accidental deletions. - lifecycle { - prevent_destroy = "%{prevent_destroy}" - } + deletion_policy = "%{deletion_policy}" } resource "google_secure_source_manager_branch_rule" "default" { diff --git a/mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_repository_update_test.go b/mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_repository_update_test.go index 4a5264e2c8b7..a035b2094130 100644 --- a/mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_repository_update_test.go +++ b/mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_repository_update_test.go @@ -12,7 +12,7 @@ func TestAccSecureSourceManagerRepository_secureSourceManagerRepositoryBasicExam t.Parallel() context := map[string]interface{}{ - "prevent_destroy": false, + "deletion_policy": "DELETE", "random_suffix": acctest.RandString(t, 10), } @@ -27,7 +27,7 @@ func TestAccSecureSourceManagerRepository_secureSourceManagerRepositoryBasicExam ResourceName: "google_secure_source_manager_repository.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_config", "location", "repository_id"}, + ImportStateVerifyIgnore: []string{"initial_config", "location", "repository_id", "deletion_policy"}, }, { Config: testAccSecureSourceManagerRepository_secureSourceManagerRepositoryBasicExample_update(context), @@ -41,7 +41,7 @@ func TestAccSecureSourceManagerRepository_secureSourceManagerRepositoryBasicExam ResourceName: "google_secure_source_manager_repository.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_config", "location", "repository_id"}, + ImportStateVerifyIgnore: []string{"initial_config", "location", "repository_id", "deletion_policy"}, }, }, }) @@ -54,9 +54,7 @@ resource "google_secure_source_manager_instance" "instance" { instance_id = "tf-test-my-instance%{random_suffix}" # Prevent accidental deletions. - lifecycle { - prevent_destroy = "%{prevent_destroy}" - } + deletion_policy = "%{deletion_policy}" } resource "google_secure_source_manager_repository" "default" { @@ -65,9 +63,7 @@ resource "google_secure_source_manager_repository" "default" { instance = google_secure_source_manager_instance.instance.name # Prevent accidental deletions. - lifecycle { - prevent_destroy = "%{prevent_destroy}" - } + deletion_policy = "%{deletion_policy}" } `, context) } @@ -79,9 +75,7 @@ resource "google_secure_source_manager_instance" "instance" { instance_id = "tf-test-my-instance%{random_suffix}" # Prevent accidental deletions. - lifecycle { - prevent_destroy = "%{prevent_destroy}" - } + deletion_policy = "%{deletion_policy}" } resource "google_secure_source_manager_repository" "default" { @@ -92,9 +86,7 @@ resource "google_secure_source_manager_repository" "default" { description = "new description" # Prevent accidental deletions. - lifecycle { - prevent_destroy = "%{prevent_destroy}" - } + deletion_policy = "%{deletion_policy}" } `, context) } From f35919e402e30978337f55e3a1a12502ef71c620 Mon Sep 17 00:00:00 2001 From: zoeyai-google Date: Tue, 12 Aug 2025 09:51:57 -0700 Subject: [PATCH 744/884] Deprecate Cloud TPU google_tpu_node resoruce (#14794) Co-authored-by: Stephen Lewis (Burrows) --- mmv1/products/tpu/Node.yaml | 189 ------------------ mmv1/products/tpu/product.yaml | 27 --- .../terraform/examples/tpu_node_basic.tf.tmpl | 15 -- .../terraform/examples/tpu_node_full.tf.tmpl | 50 ----- .../examples/tpu_node_full_test.tf.tmpl | 29 --- .../components/inputs/services_beta.kt | 5 - .../components/inputs/services_ga.kt | 5 - .../provider/provider_mmv1_resources.go.tmpl | 1 - ...ata_source_tpu_tensorflow_versions_test.go | 68 ------- .../guides/version_7_upgrade.html.markdown | 6 +- 10 files changed, 5 insertions(+), 390 deletions(-) delete mode 100644 mmv1/products/tpu/Node.yaml delete mode 100644 mmv1/products/tpu/product.yaml delete mode 100644 mmv1/templates/terraform/examples/tpu_node_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/tpu_node_full.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/tpu_node_full_test.tf.tmpl delete mode 100644 mmv1/third_party/terraform/services/tpu/data_source_tpu_tensorflow_versions_test.go diff --git a/mmv1/products/tpu/Node.yaml b/mmv1/products/tpu/Node.yaml deleted file mode 100644 index 6e48ab9ff558..000000000000 --- a/mmv1/products/tpu/Node.yaml +++ /dev/null @@ -1,189 +0,0 @@ -# Copyright 2024 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'Node' -description: | - A Cloud TPU instance. -references: - guides: - 'Official Documentation': 'https://cloud.google.com/tpu/docs/' - api: 'https://cloud.google.com/tpu/docs/reference/rest/v1/projects.locations.nodes' -deprecation_message: >- - `google_tpu_node` is deprecated and will be removed in a future major release. - Use `google_tpu_v2_vm` instead. For moving from TPU Node to TPU VM architecture, see - https://cloud.google.com/tpu/docs/system-architecture-tpu-vm#from-tpu-node-to-tpu-vm. -base_url: 'projects/{{project}}/locations/{{zone}}/nodes' -self_link: 'projects/{{project}}/locations/{{zone}}/nodes/{{name}}' -create_url: 'projects/{{project}}/locations/{{zone}}/nodes?nodeId={{name}}' -immutable: true -timeouts: - insert_minutes: 20 - update_minutes: 20 - delete_minutes: 20 -autogen_async: true -async: - actions: ['create', 'delete', 'update'] - type: 'OpAsync' - operation: - base_url: '{{op_id}}' - result: - resource_inside_response: true -custom_code: - constants: 'templates/terraform/constants/tpu_node.tmpl' -custom_diff: - - 'tpuNodeCustomizeDiff' -sweeper: - url_substitutions: - - zone: "us-central1-b" -examples: - - name: 'tpu_node_basic' - primary_resource_id: 'tpu' - vars: - node_name: 'test-tpu' - # resource is deprecated - exclude_test: true - - name: 'tpu_node_full' - primary_resource_id: 'tpu' - vars: - node_name: 'test-tpu' - global_address_name: 'my-global-address' - network_name: 'tpu-node-network' - exclude_test: true - - name: 'tpu_node_full_test' - primary_resource_id: 'tpu' - vars: - node_name: 'test-tpu' - network_name: 'tpu-node-network' - test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "vpc-network-1")' - # resource is deprecated - exclude_test: true - exclude_docs: true -parameters: - # TODO: resourceref? - - name: 'zone' - type: String - description: | - The GCP location for the TPU. If it is not provided, the provider zone is used. - url_param_only: true - immutable: true - default_from_api: true -properties: - - name: 'name' - type: String - description: | - The immutable name of the TPU. - required: true - immutable: true - custom_flatten: 'templates/terraform/custom_flatten/name_from_self_link.tmpl' - - name: 'description' - type: String - description: | - The user-supplied description of the TPU. Maximum of 512 characters. - immutable: true - - name: 'acceleratorType' - type: String - description: | - The type of hardware accelerators associated with this node. - required: true - immutable: true - - name: 'tensorflowVersion' - type: String - description: | - The version of Tensorflow running in the Node. - required: true - update_url: 'projects/{{project}}/locations/{{zone}}/nodes/{{name}}:reimage' - update_verb: 'POST' - - name: 'network' - type: String - description: | - The name of a network to peer the TPU node to. It must be a - preexisting Compute Engine network inside of the project on which - this API has been activated. If none is provided, "default" will be - used. - immutable: true - default_from_api: true - diff_suppress_func: 'tpgresource.CompareSelfLinkOrResourceName' - - name: 'cidrBlock' - type: String - description: | - The CIDR block that the TPU node will use when selecting an IP - address. This CIDR block must be a /29 block; the Compute Engine - networks API forbids a smaller block, and using a larger block would - be wasteful (a node can only consume one IP address). - - Errors will occur if the CIDR block has already been used for a - currently existing TPU node, the CIDR block conflicts with any - subnetworks in the user's provided network, or the provided network - is peered with another network that is using that CIDR block. - immutable: true - default_from_api: true - conflicts: - - use_service_networking - - name: 'serviceAccount' - type: String - description: | - The service account used to run the tensor flow services within the - node. To share resources, including Google Cloud Storage data, with - the Tensorflow job running in the Node, this account must have - permissions to that data. - output: true - - name: 'useServiceNetworking' - type: Boolean - description: | - Whether the VPC peering for the node is set up through Service Networking API. - The VPC Peering should be set up before provisioning the node. If this field is set, - cidr_block field should not be specified. If the network that you want to peer the - TPU Node to is a Shared VPC network, the node must be created with this this field enabled. - immutable: true - conflicts: - - cidr_block - default_value: false - - name: 'schedulingConfig' - type: NestedObject - description: | - Sets the scheduling options for this TPU instance. - immutable: true - diff_suppress_func: 'compareTpuNodeSchedulingConfig' - properties: - - name: 'preemptible' - type: Boolean - description: | - Defines whether the TPU instance is preemptible. - required: true - diff_suppress_func: 'compareTpuNodeSchedulingConfig' - - name: 'networkEndpoints' - type: Array - description: | - The network endpoints where TPU workers can be accessed and sent work. - It is recommended that Tensorflow clients of the node first reach out - to the first (index 0) entry. - output: true - item_type: - type: NestedObject - properties: - - name: 'ipAddress' - type: String - description: | - The IP address of this network endpoint. - output: true - - name: 'port' - type: Integer - description: | - The port of this network endpoint. - output: true - - name: 'labels' - type: KeyValueLabels - description: Resource labels to represent user provided metadata. - immutable: true diff --git a/mmv1/products/tpu/product.yaml b/mmv1/products/tpu/product.yaml deleted file mode 100644 index a9302af0cd89..000000000000 --- a/mmv1/products/tpu/product.yaml +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright 2024 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'TPU' -display_name: 'Cloud TPU' -versions: - - name: 'ga' - base_url: 'https://tpu.googleapis.com/v1/' -scopes: - - 'https://www.googleapis.com/auth/cloud-platform' -async: - type: "OpAsync" - operation: - base_url: '{{op_id}}' - result: - resource_inside_response: true diff --git a/mmv1/templates/terraform/examples/tpu_node_basic.tf.tmpl b/mmv1/templates/terraform/examples/tpu_node_basic.tf.tmpl deleted file mode 100644 index 9f516fd9ad65..000000000000 --- a/mmv1/templates/terraform/examples/tpu_node_basic.tf.tmpl +++ /dev/null @@ -1,15 +0,0 @@ -{{/* WARNING: cidr_block must not overlap with other existing TPU blocks - Make sure if you change this value that it does not overlap with the - autogenerated examples. */ -}} - -data "google_tpu_tensorflow_versions" "available" { -} - -resource "google_tpu_node" "{{$.PrimaryResourceId}}" { - name = "{{index $.Vars "node_name"}}" - zone = "us-central1-b" - - accelerator_type = "v3-8" - tensorflow_version = data.google_tpu_tensorflow_versions.available.versions[0] - cidr_block = "10.2.0.0/29" -} diff --git a/mmv1/templates/terraform/examples/tpu_node_full.tf.tmpl b/mmv1/templates/terraform/examples/tpu_node_full.tf.tmpl deleted file mode 100644 index af0c39fd7ef5..000000000000 --- a/mmv1/templates/terraform/examples/tpu_node_full.tf.tmpl +++ /dev/null @@ -1,50 +0,0 @@ -data "google_tpu_tensorflow_versions" "available" { -} - -{{/* WARNING: cidr_block must not overlap with other existing TPU blocks - Make sure if you change this value that it does not overlap with the - autogenerated examples. */ -}} - -resource "google_tpu_node" "{{$.PrimaryResourceId}}" { - name = "{{index $.Vars "node_name"}}" - zone = "us-central1-b" - - accelerator_type = "v3-8" - - tensorflow_version = data.google_tpu_tensorflow_versions.available.versions[0] - - description = "Terraform Google Provider test TPU" - use_service_networking = true -{{/* We previously used a separate network resource here, but TPUs only allow using 50 - different network names, ever. This caused our tests to start failing, so just - use the default network in order to still demonstrate using as many fields as - possible on the resource. */ -}} - - network = google_service_networking_connection.private_service_connection.network - - labels = { - foo = "bar" - } - - scheduling_config { - preemptible = true - } -} - -resource "google_compute_network" "network" { - name = "{{index $.Vars "network_name"}}" -} - -resource "google_compute_global_address" "service_range" { - name = "{{index $.Vars "global_address_name"}}" - purpose = "VPC_PEERING" - address_type = "INTERNAL" - prefix_length = 16 - network = google_compute_network.network.id -} - -resource "google_service_networking_connection" "private_service_connection" { - network = google_compute_network.network.id - service = "servicenetworking.googleapis.com" - reserved_peering_ranges = [google_compute_global_address.service_range.name] -} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/tpu_node_full_test.tf.tmpl b/mmv1/templates/terraform/examples/tpu_node_full_test.tf.tmpl deleted file mode 100644 index 01ce73d6c2cf..000000000000 --- a/mmv1/templates/terraform/examples/tpu_node_full_test.tf.tmpl +++ /dev/null @@ -1,29 +0,0 @@ -{{/* WARNING: cidr_block must not overlap with other existing TPU blocks - Make sure if you change this value that it does not overlap with the - autogenerated examples. */ -}} - -resource "google_tpu_node" "{{$.PrimaryResourceId}}" { - name = "{{index $.Vars "node_name"}}" - zone = "us-central1-b" - - accelerator_type = "v3-8" - - tensorflow_version = "2.10.0" - - description = "Terraform Google Provider test TPU" - use_service_networking = true - - network = data.google_compute_network.network.id - - labels = { - foo = "bar" - } - - scheduling_config { - preemptible = true - } -} - -data "google_compute_network" "network" { - name = "{{index $.Vars "network_name"}}" -} diff --git a/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt b/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt index e16d98f34deb..fac5c46661cf 100644 --- a/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt +++ b/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt @@ -811,11 +811,6 @@ var ServicesListBeta = mapOf( "displayName" to "Tags", "path" to "./google-beta/services/tags" ), - "tpu" to mapOf( - "name" to "tpu", - "displayName" to "Tpu", - "path" to "./google-beta/services/tpu" - ), "tpuv2" to mapOf( "name" to "tpuv2", "displayName" to "Tpuv2", diff --git a/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt b/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt index 2c32d877bc36..175f9e8a5d15 100644 --- a/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt +++ b/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt @@ -806,11 +806,6 @@ var ServicesListGa = mapOf( "displayName" to "Tags", "path" to "./google/services/tags" ), - "tpu" to mapOf( - "name" to "tpu", - "displayName" to "Tpu", - "path" to "./google/services/tpu" - ), "tpuv2" to mapOf( "name" to "tpuv2", "displayName" to "Tpuv2", diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index 9d58d8d16024..cc4864f51210 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -266,7 +266,6 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_tags_tag_keys": tags.DataSourceGoogleTagsTagKeys(), "google_tags_tag_value": tags.DataSourceGoogleTagsTagValue(), "google_tags_tag_values": tags.DataSourceGoogleTagsTagValues(), - "google_tpu_tensorflow_versions": tpu.DataSourceTpuTensorflowVersions(), {{- if ne $.TargetVersionName "ga" }} "google_tpu_v2_runtime_versions": tpuv2.DataSourceTpuV2RuntimeVersions(), "google_tpu_v2_accelerator_types": tpuv2.DataSourceTpuV2AcceleratorTypes(), diff --git a/mmv1/third_party/terraform/services/tpu/data_source_tpu_tensorflow_versions_test.go b/mmv1/third_party/terraform/services/tpu/data_source_tpu_tensorflow_versions_test.go deleted file mode 100644 index 78661db237e8..000000000000 --- a/mmv1/third_party/terraform/services/tpu/data_source_tpu_tensorflow_versions_test.go +++ /dev/null @@ -1,68 +0,0 @@ -package tpu_test - -import ( - "errors" - "fmt" - "strconv" - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/terraform" - "github.com/hashicorp/terraform-provider-google/google/acctest" -) - -func TestAccTPUTensorflowVersions_basic(t *testing.T) { - t.Parallel() - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccTpuTensorFlowVersionsConfig, - Check: resource.ComposeTestCheckFunc( - testAccCheckGoogleTpuTensorflowVersions("data.google_tpu_tensorflow_versions.available"), - ), - }, - }, - }) -} - -func testAccCheckGoogleTpuTensorflowVersions(n string) resource.TestCheckFunc { - return func(s *terraform.State) error { - rs, ok := s.RootModule().Resources[n] - if !ok { - return fmt.Errorf("Can't find TPU Tensorflow versions data source: %s", n) - } - - if rs.Primary.ID == "" { - return errors.New("data source id not set") - } - - count, ok := rs.Primary.Attributes["versions.#"] - if !ok { - return errors.New("can't find 'versions' attribute") - } - - cnt, err := strconv.Atoi(count) - if err != nil { - return errors.New("failed to read number of version") - } - if cnt < 2 { - return fmt.Errorf("expected at least 2 versions, received %d, this is most likely a bug", cnt) - } - - for i := 0; i < cnt; i++ { - idx := fmt.Sprintf("versions.%d", i) - _, ok := rs.Primary.Attributes[idx] - if !ok { - return fmt.Errorf("expected %q, version not found", idx) - } - } - return nil - } -} - -var testAccTpuTensorFlowVersionsConfig = ` -data "google_tpu_tensorflow_versions" "available" {} -` diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index 3a7d56e0ed4f..b8bdc4306444 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -182,10 +182,14 @@ Remove `template.containers.depends_on` from your configuration after upgrade. ### `enable_secure_private_service_connect` is removed as it is not available in the GA version of the API, only in the beta version. +## Resource: `google_tpu_node` is now removed + +`google_tpu_node` is removed in favor of `google_tpu_v2_vm`. For moving from TPU Node to TPU VM architecture, see https://cloud.google.com/tpu/docs/system-architecture-tpu-vm#from-tpu-node-to-tpu-vm. + ## Resource: `google_project_service` ### `disable_on_destroy` now defaults to `false` The default value for `disable_on_destroy` has been changed to `false`. The previous default (`true`) created a risk of unintended service disruptions, as destroying a single `google_project_service` resource would disable the API for the entire project. -Now, destroying the resource will only remove it from Terraform's state and leave the service enabled. To disable a service when the resource is destroyed, you must now make an explicit decision by setting `disable_on_destroy = true`. \ No newline at end of file +Now, destroying the resource will only remove it from Terraform's state and leave the service enabled. To disable a service when the resource is destroyed, you must now make an explicit decision by setting `disable_on_destroy = true`. From 012e4fc552a93b344aea1af05faf68b8e301927e Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Tue, 12 Aug 2025 13:37:53 -0700 Subject: [PATCH 745/884] remove default vals and custom code for `publicRepository` in artifact registry repository (#14795) --- mmv1/products/artifactregistry/Repository.yaml | 13 ------------- ...artifact_registry_remote_repository.go.tmpl | 18 ------------------ .../guides/version_7_upgrade.html.markdown | 6 ++++++ 3 files changed, 6 insertions(+), 31 deletions(-) delete mode 100644 mmv1/templates/terraform/pre_create/artifact_registry_remote_repository.go.tmpl diff --git a/mmv1/products/artifactregistry/Repository.yaml b/mmv1/products/artifactregistry/Repository.yaml index 3b9ffebd2bff..1241e4cac2e8 100644 --- a/mmv1/products/artifactregistry/Repository.yaml +++ b/mmv1/products/artifactregistry/Repository.yaml @@ -53,7 +53,6 @@ iam_policy: custom_code: constants: 'templates/terraform/constants/artifact_registry_repository.go.tmpl' encoder: 'templates/terraform/encoders/location_from_region.go.tmpl' - pre_create: 'templates/terraform/pre_create/artifact_registry_remote_repository.go.tmpl' sweeper: url_substitutions: - region: "us-central1" @@ -517,9 +516,6 @@ properties: immutable: true conflicts: - remoteRepositoryConfig.0.docker_repository.0.custom_repository - custom_flatten: 'templates/terraform/custom_flatten/default_if_empty.tmpl' - # Eventually lets delete default_value and custom_flatten in a major release - default_value: "DOCKER_HUB" enum_values: - 'DOCKER_HUB' - name: 'customRepository' @@ -556,9 +552,6 @@ properties: immutable: true conflicts: - remoteRepositoryConfig.0.maven_repository.0.custom_repository - custom_flatten: 'templates/terraform/custom_flatten/default_if_empty.tmpl' - # Eventually lets delete default_value and custom_flatten in a major release - default_value: "MAVEN_CENTRAL" enum_values: - 'MAVEN_CENTRAL' - name: 'customRepository' @@ -595,9 +588,6 @@ properties: immutable: true conflicts: - remoteRepositoryConfig.0.npm_repository.0.custom_repository - custom_flatten: 'templates/terraform/custom_flatten/default_if_empty.tmpl' - # Eventually lets delete default_value and custom_flatten in a major release - default_value: "NPMJS" enum_values: - 'NPMJS' - name: 'customRepository' @@ -634,9 +624,6 @@ properties: immutable: true conflicts: - remoteRepositoryConfig.0.python_repository.0.custom_repository - custom_flatten: 'templates/terraform/custom_flatten/default_if_empty.tmpl' - # Eventually lets delete default_value and custom_flatten in a major release - default_value: "PYPI" enum_values: - 'PYPI' - name: 'customRepository' diff --git a/mmv1/templates/terraform/pre_create/artifact_registry_remote_repository.go.tmpl b/mmv1/templates/terraform/pre_create/artifact_registry_remote_repository.go.tmpl deleted file mode 100644 index bad5d0a9599b..000000000000 --- a/mmv1/templates/terraform/pre_create/artifact_registry_remote_repository.go.tmpl +++ /dev/null @@ -1,18 +0,0 @@ -// This file should be deleted in the next major terraform release, alongside -// the default values for 'publicRepository'. - -// deletePublicRepoIfCustom deletes the publicRepository key for a given -// pkg type from the remote repository config if customRepository is set. -deletePublicRepoIfCustom := func(pkgType string) { - if _, ok := d.GetOk(fmt.Sprintf("remote_repository_config.0.%s_repository.0.custom_repository", pkgType)); ok { - rrcfg := obj["remoteRepositoryConfig"].(map[string]interface{}) - repo := rrcfg[fmt.Sprintf("%sRepository", pkgType)].(map[string]interface{}) - delete(repo, "publicRepository") - } -} - -// Call above func for all pkg types that support custom remote repos. -deletePublicRepoIfCustom("docker") -deletePublicRepoIfCustom("maven") -deletePublicRepoIfCustom("npm") -deletePublicRepoIfCustom("python") \ No newline at end of file diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index b8bdc4306444..128b9253d649 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -102,6 +102,12 @@ Description of the change and how users should adjust their configuration (if ne ## Resources +## Resource: `google_artifact_registry_repository` + +### `public_repository` fields have had their default values removed. + +`public_repository` fields have had their default values removed. If your state has been reliant on them, they will need to be manually included into your configuration now. + ## Resource: `google_bigtable_table_iam_binding` ### `instance` is now removed From c6974b3e92a35130c503e72116c58f1aa3043d6e Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Tue, 12 Aug 2025 13:57:08 -0700 Subject: [PATCH 746/884] `google_vertex_ai_index` fields made required (#14786) --- mmv1/products/vertexai/Index.yaml | 3 ++- .../website/docs/guides/version_7_upgrade.html.markdown | 4 ++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/mmv1/products/vertexai/Index.yaml b/mmv1/products/vertexai/Index.yaml index dd7730740afe..7edadb67ef44 100644 --- a/mmv1/products/vertexai/Index.yaml +++ b/mmv1/products/vertexai/Index.yaml @@ -109,6 +109,7 @@ properties: - name: 'config' type: NestedObject description: The configuration of the Matching Engine Index. + required: true immutable: true properties: - name: 'dimensions' @@ -153,7 +154,7 @@ properties: type: NestedObject description: The configuration with regard to the algorithms used for efficient - search. + search. This field may be required based on your configuration. properties: - name: 'treeAhConfig' type: NestedObject diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index 128b9253d649..de5158a684dc 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -188,6 +188,10 @@ Remove `template.containers.depends_on` from your configuration after upgrade. ### `enable_secure_private_service_connect` is removed as it is not available in the GA version of the API, only in the beta version. +## Resource: `google_vertex_ai_index` + +### `metadata`, and `metadata.config` are now required. Resource creation would fail without these attributes already, so no change is necessary to existing configurations. + ## Resource: `google_tpu_node` is now removed `google_tpu_node` is removed in favor of `google_tpu_v2_vm`. For moving from TPU Node to TPU VM architecture, see https://cloud.google.com/tpu/docs/system-architecture-tpu-vm#from-tpu-node-to-tpu-vm. From 1d3866638c5540b34c47c5f0d7d4fda705b1142d Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Thu, 14 Aug 2025 12:46:00 -0700 Subject: [PATCH 747/884] remove google_beyondcorp_application (#14798) --- mmv1/products/beyondcorp/Application.yaml | 152 ------------------ .../beyondcorp_application_basic.tf.tmpl | 13 -- .../beyondcorp_application_vpc.tf.tmpl | 23 --- .../resource_beyondcorp_application_test.go | 85 ---------- .../guides/version_7_upgrade.html.markdown | 6 + 5 files changed, 6 insertions(+), 273 deletions(-) delete mode 100644 mmv1/products/beyondcorp/Application.yaml delete mode 100644 mmv1/templates/terraform/examples/beyondcorp_application_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/beyondcorp_application_vpc.tf.tmpl delete mode 100644 mmv1/third_party/terraform/services/beyondcorp/resource_beyondcorp_application_test.go diff --git a/mmv1/products/beyondcorp/Application.yaml b/mmv1/products/beyondcorp/Application.yaml deleted file mode 100644 index a4d4e862bbaf..000000000000 --- a/mmv1/products/beyondcorp/Application.yaml +++ /dev/null @@ -1,152 +0,0 @@ -# Copyright 2024 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: Application -deprecation_message: '`google_beyondcorp_application` is deprecated. Use `google_beyondcorp_security_gateway_application` instead.' -description: Specifies application endpoint(s) to protect behind a Security Gateway. -base_url: projects/{{project}}/locations/global/securityGateways/{{security_gateways_id}}/applications -update_mask: true -self_link: projects/{{project}}/locations/global/securityGateways/{{security_gateways_id}}/applications/{{application_id}} -create_url: projects/{{project}}/locations/global/securityGateways/{{security_gateways_id}}/applications?applicationId={{application_id}} -update_verb: PATCH -id_format: projects/{{project}}/locations/global/securityGateways/{{security_gateways_id}}/applications/{{application_id}} -import_format: - - projects/{{project}}/locations/global/securityGateways/{{security_gateways_id}}/applications/{{application_id}} -iam_policy: - method_name_separator: ':' - iam_conditions_request_type: 'QUERY_PARAM_NESTED' - allowed_iam_role: 'roles/beyondcorp.securityGatewayUser' - parent_resource_attribute: 'application_id' - import_format: - - 'projects/{{project}}/locations/global/securityGateways/{{security_gateways_id}}/applications/{{application_id}}' - - '{{application_id}}' -examples: - - name: beyondcorp_application_basic - primary_resource_id: example - primary_resource_name: 'fmt.Sprintf("default%s", context["random_suffix"]), fmt.Sprintf("google%s", context["random_suffix"])' - vars: - security_gateway_name: default - application_name: google - - name: beyondcorp_application_vpc - primary_resource_id: example - primary_resource_name: 'fmt.Sprintf("default%s", context["random_suffix"]), fmt.Sprintf("google%s", context["random_suffix"])' - vars: - security_gateway_name: default - application_name: my-vm-service -autogen_async: true -async: - operation: - timeouts: - insert_minutes: 20 - update_minutes: 20 - delete_minutes: 20 - base_url: '{{op_id}}' - actions: - - create - - delete - - update - type: OpAsync - result: - resource_inside_response: true - include_project: false -autogen_status: QXBwbGljYXRpb24= -parameters: - - name: securityGatewaysId - type: String - description: Part of `parent`. See documentation of `projectsId`. - immutable: true - url_param_only: true - required: true - - name: applicationId - type: String - description: |- - Optional. User-settable Application resource ID. - * Must start with a letter. - * Must contain between 4-63 characters from `/a-z-/`. - * Must end with a number or letter. - immutable: true - url_param_only: true - required: true -properties: - - name: createTime - type: String - description: Output only. Timestamp when the resource was created. - output: true - - name: displayName - type: String - description: |- - Optional. An arbitrary user-provided name for the Application resource. - Cannot exceed 64 characters. - - name: endpointMatchers - type: Array - description: |- - Required. Endpoint matchers associated with an application. - A combination of hostname and ports as endpoint matcher is used to match - the application. - Match conditions for OR logic. - An array of match conditions to allow for multiple matching criteria. - The rule is considered a match if one the conditions are met. - The conditions can be one of the following combination - (Hostname), (Hostname & Ports) - - EXAMPLES: - Hostname - ("*.abc.com"), ("xyz.abc.com") - Hostname and Ports - ("abc.com" and "22"), ("abc.com" and "22,33") etc - required: true - item_type: - type: NestedObject - properties: - - name: hostname - type: String - description: Required. Hostname of the application. - required: true - - name: ports - type: Array - description: Optional. Ports of the application. - item_type: - type: Integer - - name: upstreams - type: Array - description: Optional. List of which upstream resource(s) to forward traffic to. - item_type: - type: NestedObject - properties: - - name: egressPolicy - type: NestedObject - description: Optional. Routing policy information. - properties: - - name: regions - type: Array - description: Required. List of regions where the application sends traffic to. - required: true - item_type: - type: String - - name: network - type: NestedObject - description: Network to forward traffic to. - properties: - - name: name - type: string - description: |- - Required. Network name is of the format: - `projects/{project}/global/networks/{network}` - required: true - - name: name - type: String - description: Identifier. Name of the resource. - output: true - - name: updateTime - type: String - description: Output only. Timestamp when the resource was last modified. - output: true diff --git a/mmv1/templates/terraform/examples/beyondcorp_application_basic.tf.tmpl b/mmv1/templates/terraform/examples/beyondcorp_application_basic.tf.tmpl deleted file mode 100644 index 264aadfd924d..000000000000 --- a/mmv1/templates/terraform/examples/beyondcorp_application_basic.tf.tmpl +++ /dev/null @@ -1,13 +0,0 @@ -resource "google_beyondcorp_security_gateway" "default" { - security_gateway_id = "{{index $.Vars "security_gateway_name"}}" - display_name = "My Security Gateway resource" - hubs { region = "us-central1" } -} - -resource "google_beyondcorp_application" "{{$.PrimaryResourceId}}" { - security_gateways_id = google_beyondcorp_security_gateway.default.security_gateway_id - application_id = "{{index $.Vars "application_name"}}" - endpoint_matchers { - hostname = "google.com" - } -} diff --git a/mmv1/templates/terraform/examples/beyondcorp_application_vpc.tf.tmpl b/mmv1/templates/terraform/examples/beyondcorp_application_vpc.tf.tmpl deleted file mode 100644 index a54a36989bf5..000000000000 --- a/mmv1/templates/terraform/examples/beyondcorp_application_vpc.tf.tmpl +++ /dev/null @@ -1,23 +0,0 @@ -data "google_project" "project" {} - -resource "google_beyondcorp_security_gateway" "default" { - security_gateway_id = "{{index $.Vars "security_gateway_name"}}" - display_name = "My Security Gateway resource" - hubs { region = "us-central1" } -} - -resource "google_beyondcorp_application" "{{$.PrimaryResourceId}}" { - security_gateways_id = google_beyondcorp_security_gateway.default.security_gateway_id - application_id = "{{index $.Vars "application_name"}}" - endpoint_matchers { - hostname = "my-vm-service.com" - } - upstreams { - egress_policy { - regions = ["us-central1"] - } - network { - name = "projects/${data.google_project.project.project_id}/global/networks/default" - } - } -} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/beyondcorp/resource_beyondcorp_application_test.go b/mmv1/third_party/terraform/services/beyondcorp/resource_beyondcorp_application_test.go deleted file mode 100644 index 28086bc32df4..000000000000 --- a/mmv1/third_party/terraform/services/beyondcorp/resource_beyondcorp_application_test.go +++ /dev/null @@ -1,85 +0,0 @@ -package beyondcorp_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" - - "github.com/hashicorp/terraform-provider-google/google/acctest" -) - -func TestAccBeyondcorpApplication_beyondcorpSecurityGatewayApplicationBasicExample_update(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccBeyondcorpApplication_beyondcorpSecurityGatewayApplicationBasicExample_basic(context), - }, - { - ResourceName: "google_beyondcorp_application.example", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"application_id", "security_gateways_id"}, - }, - { - Config: testAccBeyondcorpApplication_beyondcorpSecurityGatewayApplicationBasicExample_update(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_beyondcorp_application.example", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_beyondcorp_application.example", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"application_id", "security_gateways_id"}, - }, - }, - }) -} - -func testAccBeyondcorpApplication_beyondcorpSecurityGatewayApplicationBasicExample_basic(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_beyondcorp_security_gateway" "default" { - security_gateway_id = "default%{random_suffix}" - display_name = "My Security Gateway resource" - hubs { region = "us-central1" } -} - -resource "google_beyondcorp_application" "example" { - security_gateways_id = google_beyondcorp_security_gateway.default.security_gateway_id - application_id = "google%{random_suffix}" - endpoint_matchers { - hostname = "google.com" - } -} -`, context) -} - -func testAccBeyondcorpApplication_beyondcorpSecurityGatewayApplicationBasicExample_update(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_beyondcorp_security_gateway" "default" { - security_gateway_id = "default%{random_suffix}" - display_name = "My Security Gateway resource" - hubs { region = "us-central1" } -} - -resource "google_beyondcorp_application" "example" { - security_gateways_id = google_beyondcorp_security_gateway.default.security_gateway_id - display_name = "Updated Name" - application_id = "google%{random_suffix}" - endpoint_matchers { - hostname = "google.com" - } -} -`, context) -} diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index de5158a684dc..b9048301b65d 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -102,6 +102,12 @@ Description of the change and how users should adjust their configuration (if ne ## Resources + +## Resource: `google_beyondcorp_application` is now removed + +`google_beyondcorp_application`, the associated IAM resources `google_beyondcorp_application_iam_binding`, `google_beyondcorp_application_iam_member`, and `google_beyondcorp_application_iam_policy`, and the `google_beyondcorp_application_iam_policy` datasource have been removed. +Use `google_beyondcorp_security_gateway_application` instead. + ## Resource: `google_artifact_registry_repository` ### `public_repository` fields have had their default values removed. From 913d96da701500cbb61cf17615ca79b40ff17571 Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Thu, 14 Aug 2025 12:53:31 -0700 Subject: [PATCH 748/884] Remove `project` from `google_service_account_key` (#14784) --- .../data_source_google_service_account_key.go | 5 ----- .../website/docs/guides/version_7_upgrade.html.markdown | 6 ++++++ 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/mmv1/third_party/terraform/services/resourcemanager/data_source_google_service_account_key.go b/mmv1/third_party/terraform/services/resourcemanager/data_source_google_service_account_key.go index 6c5002ab40c0..e83244935d5e 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/data_source_google_service_account_key.go +++ b/mmv1/third_party/terraform/services/resourcemanager/data_source_google_service_account_key.go @@ -29,11 +29,6 @@ func DataSourceGoogleServiceAccountKey() *schema.Resource { Optional: true, ValidateFunc: validation.StringInSlice([]string{"TYPE_NONE", "TYPE_X509_PEM_FILE", "TYPE_RAW_PUBLIC_KEY"}, false), }, - "project": { - Type: schema.TypeString, - Optional: true, - Deprecated: "`project` is deprecated and will be removed in a future major release. This field is non-functional and can be removed from your configuration safely.", - }, "key_algorithm": { Type: schema.TypeString, Computed: true, diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index b9048301b65d..9bb435d9169c 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -100,6 +100,12 @@ Description of the change and how users should adjust their configuration (if ne Description of the change and how users should adjust their configuration (if needed). +## Datasource: `google_service_account_key` + +### `project` is now removed + +`project` has been removed. It can be safely removed from your configuration. + ## Resources From bb66d8cdc7dc2e66ac7a467684f0af0986cdeca5 Mon Sep 17 00:00:00 2001 From: Cameron Thornton Date: Thu, 14 Aug 2025 16:33:07 -0500 Subject: [PATCH 749/884] fix bigtable Table IAM instance_name panic (#14864) --- .../services/bigtable/iam_bigtable_table.go | 32 +++++++++---------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/mmv1/third_party/terraform/services/bigtable/iam_bigtable_table.go b/mmv1/third_party/terraform/services/bigtable/iam_bigtable_table.go index 03c647546453..8afaf2e8d5ed 100644 --- a/mmv1/third_party/terraform/services/bigtable/iam_bigtable_table.go +++ b/mmv1/third_party/terraform/services/bigtable/iam_bigtable_table.go @@ -33,11 +33,11 @@ var IamBigtableTableSchema = map[string]*schema.Schema{ } type BigtableTableIamUpdater struct { - project string - instance string - table string - d tpgresource.TerraformResourceData - Config *transport_tpg.Config + project string + instanceName string + table string + d tpgresource.TerraformResourceData + Config *transport_tpg.Config } func NewBigtableTableUpdater(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (tpgiamresource.ResourceIamUpdater, error) { @@ -51,18 +51,18 @@ func NewBigtableTableUpdater(d tpgresource.TerraformResourceData, config *transp } return &BigtableTableIamUpdater{ - project: project, - instance: d.Get("instance").(string), - table: d.Get("table").(string), - d: d, - Config: config, + project: project, + instanceName: d.Get("instance_name").(string), + table: d.Get("table").(string), + d: d, + Config: config, }, nil } func BigtableTableIdParseFunc(d *schema.ResourceData, config *transport_tpg.Config) error { values := make(map[string]string) - m, err := tpgresource.GetImportIdQualifiers([]string{"projects/(?P[^/]+)/instances/(?P[^/]+)/tables/(?P
[^/]+)"}, d, config, d.Id()) + m, err := tpgresource.GetImportIdQualifiers([]string{"projects/(?P[^/]+)/instances/(?P[^/]+)/tables/(?P
[^/]+)"}, d, config, d.Id()) if err != nil { return err } @@ -77,7 +77,7 @@ func BigtableTableIdParseFunc(d *schema.ResourceData, config *transport_tpg.Conf return fmt.Errorf("Error setting project: %s", err) } - if err := d.Set("instance", values["instance"]); err != nil { + if err := d.Set("instance_name", values["instance_name"]); err != nil { return fmt.Errorf("Error setting instance: %s", err) } @@ -86,7 +86,7 @@ func BigtableTableIdParseFunc(d *schema.ResourceData, config *transport_tpg.Conf } // Explicitly set the id so imported resources have the same ID format as non-imported ones. - d.SetId(fmt.Sprintf("projects/%s/instances/%s/tables/%s", project, values["instance"], values["table"])) + d.SetId(fmt.Sprintf("projects/%s/instances/%s/tables/%s", project, values["instance_name"], values["table"])) return nil } @@ -133,13 +133,13 @@ func (u *BigtableTableIamUpdater) SetResourceIamPolicy(policy *cloudresourcemana } func (u *BigtableTableIamUpdater) GetResourceId() string { - return fmt.Sprintf("projects/%s/instances/%s/tables/%s", u.project, u.instance, u.table) + return fmt.Sprintf("projects/%s/instances/%s/tables/%s", u.project, u.instanceName, u.table) } func (u *BigtableTableIamUpdater) GetMutexKey() string { - return fmt.Sprintf("iam-bigtable-instance-%s-%s-%s", u.project, u.instance, u.table) + return fmt.Sprintf("iam-bigtable-instance-%s-%s-%s", u.project, u.instanceName, u.table) } func (u *BigtableTableIamUpdater) DescribeResource() string { - return fmt.Sprintf("Bigtable Table %s/%s-%s", u.project, u.instance, u.table) + return fmt.Sprintf("Bigtable Table %s/%s-%s", u.project, u.instanceName, u.table) } From 4f9f8ad081a6f86035efb0fa7ea4666e3b2f05bf Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Fri, 15 Aug 2025 09:54:54 -0700 Subject: [PATCH 750/884] Fix partial match regexes (#14866) Co-authored-by: Shuya Ma <87669292+shuyama1@users.noreply.github.com> --- .../terraform/custom_import/extract_taxonomy.go.tmpl | 2 +- .../custom_import/vertex_ai_tensorboard_import.go.tmpl | 8 ++++---- .../terraform/services/apigee/resource_apigee_api.go | 4 ++-- .../terraform/services/apigee/resource_apigee_flowhook.go | 4 ++-- .../resource_apigee_keystores_aliases_key_cert_file.go | 4 ++-- .../apigee/resource_apigee_keystores_aliases_pkcs12.go | 4 ++-- .../services/apigee/resource_apigee_sharedflow.go | 4 ++-- .../apigee/resource_apigee_sharedflow_deployment.go | 5 +++-- .../services/bigquery/resource_bigquery_table.go.tmpl | 6 +++--- .../bigtable/resource_bigtable_authorized_view.go | 6 +++--- .../services/bigtable/resource_bigtable_instance.go | 6 +++--- .../services/bigtable/resource_bigtable_table.go | 6 +++--- .../cloudfunctions/resource_cloudfunctions_function.go | 6 +++--- .../services/compute/resource_compute_instance.go.tmpl | 6 +++--- .../compute/resource_compute_instance_group.go.tmpl | 6 +++--- .../resource_compute_project_metadata_item.go.tmpl | 4 ++-- .../services/compute/resource_compute_target_pool.go.tmpl | 8 ++++---- .../terraform/services/dns/resource_dns_record_set.go | 6 +++--- .../osconfig/resource_os_config_os_policy_assignment.go | 6 +++--- .../resource_google_folder_organization_policy.go | 6 +++--- .../resource_google_project_iam_custom_role.go | 6 +++--- .../resource_google_project_organization_policy.go | 6 +++--- .../resourcemanager/resource_google_service_account.go | 6 +++--- .../services/sql/resource_sql_database_instance.go.tmpl | 6 +++--- .../website/docs/guides/version_7_upgrade.html.markdown | 4 ++++ tpgtools/ignored_handwritten/custom_import.go | 8 ++++---- 26 files changed, 74 insertions(+), 69 deletions(-) diff --git a/mmv1/templates/terraform/custom_import/extract_taxonomy.go.tmpl b/mmv1/templates/terraform/custom_import/extract_taxonomy.go.tmpl index c99cf546f939..17d7a76b1bfb 100644 --- a/mmv1/templates/terraform/custom_import/extract_taxonomy.go.tmpl +++ b/mmv1/templates/terraform/custom_import/extract_taxonomy.go.tmpl @@ -1,7 +1,7 @@ config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "(?Pprojects/[^/]+/locations/[^/]+/taxonomies/[^/]+)/policyTags/(?P.+)"}, d, config); err != nil { + "^(?Pprojects/[^/]+/locations/[^/]+/taxonomies/[^/]+)/policyTags/(?P.+)$"}, d, config); err != nil { return nil, err } diff --git a/mmv1/templates/terraform/custom_import/vertex_ai_tensorboard_import.go.tmpl b/mmv1/templates/terraform/custom_import/vertex_ai_tensorboard_import.go.tmpl index be6cd588a588..7d834ceeba51 100644 --- a/mmv1/templates/terraform/custom_import/vertex_ai_tensorboard_import.go.tmpl +++ b/mmv1/templates/terraform/custom_import/vertex_ai_tensorboard_import.go.tmpl @@ -1,9 +1,9 @@ config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/locations/(?P[^/]+)/tensorboards/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)", + "^projects/(?P[^/]+)/locations/(?P[^/]+)/tensorboards/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_api.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_api.go index 57375e59f2ae..2921e19e0caa 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_api.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_api.go @@ -324,8 +324,8 @@ func resourceApigeeApiDelete(d *schema.ResourceData, meta interface{}) error { func resourceApigeeApiImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "organizations/(?P[^/]+)/apis/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", + "^organizations/(?P[^/]+)/apis/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_flowhook.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_flowhook.go index 8a5d6ac30957..635838873cd0 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_flowhook.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_flowhook.go @@ -223,8 +223,8 @@ func resourceApigeeFlowhookDelete(d *schema.ResourceData, meta interface{}) erro func resourceApigeeFlowhookImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "organizations/(?P[^/]+)/environments/(?P[^/]+)/flowhooks/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "^organizations/(?P[^/]+)/environments/(?P[^/]+)/flowhooks/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_key_cert_file.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_key_cert_file.go index 48747a85d5b9..03d842bfc305 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_key_cert_file.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_key_cert_file.go @@ -362,8 +362,8 @@ func resourceApigeeKeystoresAliasesKeyCertFileDelete(d *schema.ResourceData, met func resourceApigeeKeystoresAliasesKeyCertFileImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "organizations/(?P[^/]+)/environments/(?P[^/]+)/keystores/(?P[^/]+)/aliases/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "^organizations/(?P[^/]+)/environments/(?P[^/]+)/keystores/(?P[^/]+)/aliases/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_pkcs12.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_pkcs12.go index 12b1f85fc0ef..ef6a2655cf39 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_pkcs12.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_pkcs12.go @@ -299,8 +299,8 @@ func ResourceApigeeKeystoresAliasesPkcs12Delete(d *schema.ResourceData, meta int func ResourceApigeeKeystoresAliasesPkcs12Import(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "organizations/(?P[^/]+)/environments/(?P[^/]+)/keystores/(?P[^/]+)/aliases/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "^organizations/(?P[^/]+)/environments/(?P[^/]+)/keystores/(?P[^/]+)/aliases/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow.go index 4820b95768d7..3b0eba665a4c 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow.go @@ -324,8 +324,8 @@ func resourceApigeeSharedFlowDelete(d *schema.ResourceData, meta interface{}) er func resourceApigeeSharedFlowImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "organizations/(?P[^/]+)/sharedflows/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", + "^organizations/(?P[^/]+)/sharedflows/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow_deployment.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow_deployment.go index 7cd90e8676b0..68f1c91d9ac2 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow_deployment.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow_deployment.go @@ -228,8 +228,9 @@ func resourceApigeeSharedflowDeploymentDelete(d *schema.ResourceData, meta inter func resourceApigeeSharedflowDeploymentImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "organizations/(?P[^/]+)/environments/(?P[^/]+)/sharedflows/(?P[^/]+)/revisions/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "^organizations/(?P[^/]+)/environments/(?P[^/]+)/sharedflows/(?P[^/]+)/revisions/(?P[^/]+)$", + "^organizations/(?P[^/]+)/environments/(?P[^/]+)/sharedflows/(?P[^/]+)/revisions/(?P[^/]+)/deployments$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl index dce185ebebf6..bebe08d56791 100644 --- a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl +++ b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl @@ -3501,9 +3501,9 @@ func flattenSerDeInfo(si *bigquery.SerDeInfo) []map[string]interface{} { func resourceBigQueryTableImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/datasets/(?P[^/]+)/tables/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", + "^projects/(?P[^/]+)/datasets/(?P[^/]+)/tables/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_authorized_view.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_authorized_view.go index 4a26b6e82a8b..b34ff48e2388 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_authorized_view.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_authorized_view.go @@ -355,9 +355,9 @@ func resourceBigtableAuthorizedViewDestroy(d *schema.ResourceData, meta interfac func resourceBigtableAuthorizedViewImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/instances/(?P[^/]+)/tables/(?P[^/]+)/authorizedViews/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "^projects/(?P[^/]+)/instances/(?P[^/]+)/tables/(?P[^/]+)/authorizedViews/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance.go index 1487ce288b7e..c51d6b8c68ff 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance.go @@ -829,9 +829,9 @@ func resourceBigtableInstanceClusterReorderTypeListFunc(diff tpgresource.Terrafo func resourceBigtableInstanceImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/instances/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)", + "^projects/(?P[^/]+)/instances/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table.go index 66df55fb8b2c..91cddb12b30d 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table.go @@ -686,9 +686,9 @@ func FlattenColumnFamily(families []bigtable.FamilyInfo) ([]map[string]interface func resourceBigtableTableImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/instances/(?P[^/]+)/tables/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", + "^projects/(?P[^/]+)/instances/(?P[^/]+)/tables/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function.go b/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function.go index 5fbab6b22280..739b37d86a47 100644 --- a/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function.go +++ b/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function.go @@ -65,9 +65,9 @@ func (s *CloudFunctionId) locationId() string { func parseCloudFunctionId(d *schema.ResourceData, config *transport_tpg.Config) (*CloudFunctionId, error) { if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/locations/(?P[^/]+)/functions/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)", + "^projects/(?P[^/]+)/locations/(?P[^/]+)/functions/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl index 46ea496e1388..718a857dab8d 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl @@ -3436,9 +3436,9 @@ func resourceComputeInstanceDelete(d *schema.ResourceData, meta interface{}) err func resourceComputeInstanceImportState(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/zones/(?P[^/]+)/instances/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)", + "^projects/(?P[^/]+)/zones/(?P[^/]+)/instances/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_group.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_group.go.tmpl index b5782de796cb..af13d60d0d6b 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_group.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_group.go.tmpl @@ -455,9 +455,9 @@ func resourceComputeInstanceGroupDelete(d *schema.ResourceData, meta interface{} func resourceComputeInstanceGroupImportState(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/zones/(?P[^/]+)/instanceGroups/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", + "^projects/(?P[^/]+)/zones/(?P[^/]+)/instanceGroups/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_project_metadata_item.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_project_metadata_item.go.tmpl index fdf16116aa12..4f10ee9db007 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_project_metadata_item.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_project_metadata_item.go.tmpl @@ -184,8 +184,8 @@ func resourceComputeProjectMetadataItemDelete(d *schema.ResourceData, meta inter func resourceComputeProjectMetadataItemImportState(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/meta-data/(?P[^/]+)", - "(?P[^/]+)", + "^projects/(?P[^/]+)/meta-data/(?P[^/]+)$", + "^(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_target_pool.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_target_pool.go.tmpl index d5617b8e3e96..612ec7399dd1 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_target_pool.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_target_pool.go.tmpl @@ -571,10 +571,10 @@ func resourceComputeTargetPoolDelete(d *schema.ResourceData, meta interface{}) e func resourceTargetPoolStateImporter(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/regions/(?P[^/]+)/targetPools/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)", + "^projects/(?P[^/]+)/regions/(?P[^/]+)/targetPools/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/dns/resource_dns_record_set.go b/mmv1/third_party/terraform/services/dns/resource_dns_record_set.go index a9f713075b73..a8e74f92a90d 100644 --- a/mmv1/third_party/terraform/services/dns/resource_dns_record_set.go +++ b/mmv1/third_party/terraform/services/dns/resource_dns_record_set.go @@ -648,9 +648,9 @@ func resourceDnsRecordSetUpdate(d *schema.ResourceData, meta interface{}) error func resourceDnsRecordSetImportState(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/managedZones/(?P[^/]+)/rrsets/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "^projects/(?P[^/]+)/managedZones/(?P[^/]+)/rrsets/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/osconfig/resource_os_config_os_policy_assignment.go b/mmv1/third_party/terraform/services/osconfig/resource_os_config_os_policy_assignment.go index 5ae751ff3792..ad832cd01496 100644 --- a/mmv1/third_party/terraform/services/osconfig/resource_os_config_os_policy_assignment.go +++ b/mmv1/third_party/terraform/services/osconfig/resource_os_config_os_policy_assignment.go @@ -1445,9 +1445,9 @@ func resourceOSConfigOSPolicyAssignmentDelete(d *schema.ResourceData, meta inter func resourceOSConfigOSPolicyAssignmentImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/locations/(?P[^/]+)/osPolicyAssignments/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", + "^projects/(?P[^/]+)/locations/(?P[^/]+)/osPolicyAssignments/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder_organization_policy.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder_organization_policy.go index 18ec055b54d1..6e17b8d7603b 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder_organization_policy.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder_organization_policy.go @@ -47,9 +47,9 @@ func resourceFolderOrgPolicyImporter(d *schema.ResourceData, meta interface{}) ( config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "folders/(?P[^/]+)/constraints/(?P[^/]+)", - "folders/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)"}, + "^folders/(?P[^/]+)/constraints/(?P[^/]+)$", + "^folders/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$"}, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_custom_role.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_custom_role.go index ef27f2aacb9a..9b4828db0ab8 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_custom_role.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_custom_role.go @@ -238,9 +238,9 @@ func resourceGoogleProjectIamCustomRoleDelete(d *schema.ResourceData, meta inter func resourceGoogleProjectIamCustomRoleImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/roles/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)", + "^projects/(?P[^/]+)/roles/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_organization_policy.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_organization_policy.go index 503117edf5b8..88ae902d0783 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_organization_policy.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_organization_policy.go @@ -47,9 +47,9 @@ func resourceProjectOrgPolicyImporter(d *schema.ResourceData, meta interface{}) config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+):constraints/(?P[^/]+)", - "(?P[^/]+):constraints/(?P[^/]+)", - "(?P[^/]+):(?P[^/]+)"}, + "^projects/(?P[^/]+):constraints/(?P[^/]+)$", + "^(?P[^/]+):constraints/(?P[^/]+)$", + "^(?P[^/]+):(?P[^/]+)$"}, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_service_account.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_service_account.go index fd50b35de26e..21e829a6374a 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_service_account.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_service_account.go @@ -321,9 +321,9 @@ func resourceGoogleServiceAccountUpdate(d *schema.ResourceData, meta interface{} func resourceGoogleServiceAccountImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/serviceAccounts/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)"}, d, config); err != nil { + "^projects/(?P[^/]+)/serviceAccounts/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)$"}, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl index bd3c036f73f3..6ed39b1db93c 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl @@ -2367,9 +2367,9 @@ func resourceSqlDatabaseInstanceDelete(d *schema.ResourceData, meta interface{}) func resourceSqlDatabaseInstanceImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/instances/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)"}, d, config); err != nil { + "^projects/(?P[^/]+)/instances/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)$"}, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index 9bb435d9169c..47707bfd4b45 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -92,6 +92,10 @@ terraform { Description of the change and how users should adjust their configuration (if needed). +### Resource import formats have improved validation + +Throughout the provider there were many resources which erroneously gave false positives to poorly formatted import input if a subset of the provided input was valid to their configured import formats. All GCP resource IDs supplied to "terraform import" must match the documentation specified import formats exactly. + ## Datasources ## Datasource: `google_product_datasource` diff --git a/tpgtools/ignored_handwritten/custom_import.go b/tpgtools/ignored_handwritten/custom_import.go index be5cfbc9c300..b50236160e00 100644 --- a/tpgtools/ignored_handwritten/custom_import.go +++ b/tpgtools/ignored_handwritten/custom_import.go @@ -10,8 +10,8 @@ import ( func sourceRepoImport(d *schema.ResourceData, config *transport_tpg.Config) error { if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/repos/(?P.+)", - "(?P.+)", + "^projects/(?P[^/]+)/repos/(?P.+)$", + "^(?P.+)$", }, d, config); err != nil { return err } @@ -28,8 +28,8 @@ func sourceRepoImport(d *schema.ResourceData, config *transport_tpg.Config) erro func runtimeconfigVariableImport(d *schema.ResourceData, config *transport_tpg.Config) error { if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/configs/(?P[^/]+)/variables/(?P.+)", - "(?P[^/]+)/(?P.+)", + "^projects/(?P[^/]+)/configs/(?P[^/]+)/variables/(?P.+)$", + "^(?P[^/]+)/(?P.+)$", }, d, config); err != nil { return err } From 0e91a8ec279b5864d50b63955f90557d52191f51 Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Fri, 15 Aug 2025 10:30:14 -0700 Subject: [PATCH 751/884] convert storage transfer job file from .tmpl (#14867) --- ...ransfer_job.go.tmpl => resource_storage_transfer_job.go} | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) rename mmv1/third_party/terraform/services/storagetransfer/{resource_storage_transfer_job.go.tmpl => resource_storage_transfer_job.go} (99%) diff --git a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go.tmpl b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go similarity index 99% rename from mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go.tmpl rename to mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go index 8187246ffdd2..38fbf29048af 100644 --- a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go.tmpl +++ b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go @@ -694,9 +694,9 @@ func gcsDataSchema() *schema.Resource { Description: `Google Cloud Storage bucket name.`, }, "path": { - Optional: true, - Type: schema.TypeString, - Description: `Google Cloud Storage path in bucket to transfer. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should not begin with a '/'.`, + Optional: true, + Type: schema.TypeString, + Description: `Google Cloud Storage path in bucket to transfer. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should not begin with a '/'.`, ValidateFunc: validateGCSDataPath, }, }, From ab550cbd249e09ca545c2869af0c61b02af0562e Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Mon, 18 Aug 2025 09:45:40 -0700 Subject: [PATCH 752/884] changed mirrored_resources nested objects to set (#14871) Co-authored-by: Jonathan Greger <43762185+jmgreger@users.noreply.github.com> --- mmv1/products/compute/PacketMirroring.yaml | 6 ++++ .../compute_packet_mirroring_full.tf.tmpl | 3 ++ .../tpgresource/self_link_helpers.go | 30 +++++++++++++++++++ .../guides/version_7_upgrade.html.markdown | 6 ++++ 4 files changed, 45 insertions(+) diff --git a/mmv1/products/compute/PacketMirroring.yaml b/mmv1/products/compute/PacketMirroring.yaml index 123fcc99c203..d4bf994a3a73 100644 --- a/mmv1/products/compute/PacketMirroring.yaml +++ b/mmv1/products/compute/PacketMirroring.yaml @@ -144,6 +144,8 @@ properties: properties: - name: 'subnetworks' type: Array + is_set: true + set_hash_func: tpgresource.NestedUrlSetHashFunc description: | All instances in one of these subnetworks will be mirrored. at_least_one_of: @@ -161,10 +163,13 @@ properties: The URL of the subnetwork where this rule should be active. required: true custom_expand: 'templates/terraform/custom_expand/resourceref_with_validation.go.tmpl' + diff_suppress_func: 'tpgresource.CompareSelfLinkRelativePaths' resource: 'Subnetwork' imports: 'selfLink' - name: 'instances' type: Array + is_set: true + set_hash_func: tpgresource.NestedUrlSetHashFunc description: | All the listed instances will be mirrored. Specify at most 50. at_least_one_of: @@ -181,6 +186,7 @@ properties: The URL of the instances where this rule should be active. required: true custom_expand: 'templates/terraform/custom_expand/resourceref_with_validation.go.tmpl' + diff_suppress_func: 'tpgresource.CompareSelfLinkRelativePaths' resource: 'Instance' imports: 'selfLink' - name: 'tags' diff --git a/mmv1/templates/terraform/examples/compute_packet_mirroring_full.tf.tmpl b/mmv1/templates/terraform/examples/compute_packet_mirroring_full.tf.tmpl index 4a82a492c4cd..054f849bf010 100644 --- a/mmv1/templates/terraform/examples/compute_packet_mirroring_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/compute_packet_mirroring_full.tf.tmpl @@ -68,6 +68,9 @@ resource "google_compute_packet_mirroring" "{{$.PrimaryResourceId}}" { instances { url = google_compute_instance.mirror.id } + subnetworks { + url = google_compute_subnetwork.default.id + } } filter { ip_protocols = ["tcp"] diff --git a/mmv1/third_party/terraform/tpgresource/self_link_helpers.go b/mmv1/third_party/terraform/tpgresource/self_link_helpers.go index 0a1f3d9ff8be..93982ff5c0d1 100644 --- a/mmv1/third_party/terraform/tpgresource/self_link_helpers.go +++ b/mmv1/third_party/terraform/tpgresource/self_link_helpers.go @@ -1,8 +1,10 @@ package tpgresource import ( + "bytes" "errors" "fmt" + "log" "net/url" "regexp" "strings" @@ -92,6 +94,34 @@ func SelfLinkNameHash(selfLink interface{}) int { return Hashcode(name) } +// Hash based on relative url for a nested object containing a URL field. +func NestedUrlSetHashFunc(v interface{}) int { + if v == nil { + return 0 + } + + var buf bytes.Buffer + m := v.(map[string]interface{}) + log.Printf("[DEBUG] hashing %v", m) + + if v, ok := m["url"]; ok { + if v == nil { + v = "" + } else { + if relUrl, err := GetRelativePath(v.(string)); err != nil { + log.Printf("[WARN] Error on retrieving relative path of network url: %s", err) + } else { + v = relUrl + } + } + + buf.WriteString(fmt.Sprintf("%v-", v)) + } + + log.Printf("[DEBUG] computed hash value of %v from %v", Hashcode(buf.String()), buf.String()) + return Hashcode(buf.String()) +} + func ConvertSelfLinkToV1(link string) string { reg := regexp.MustCompile("/compute/[a-zA-Z0-9]*/projects/") return reg.ReplaceAllString(link, "/compute/v1/projects/") diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index 47707bfd4b45..a913d137eefc 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -142,6 +142,12 @@ Use `google_beyondcorp_security_gateway_application` instead. `instance` has been removed in favor of `instance_name`. +## Resource: `google_compute_packet_mirroring` + +### `subnetworks` and `instances` fields have been converted to sets + +`subnetworks` and `instances` fields have been converted to sets. If you need to access values in their nested objects, it will need to be accessed via `for_each` or locally converting the field to a list/array in your configuration. + ## Resource: `google_compute_subnetwork` ### `enable_flow_logs`is now removed From e4cc53cd070a24c4f80f955232811564d5367633 Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Mon, 18 Aug 2025 12:05:10 -0700 Subject: [PATCH 753/884] add deletion protection to alloydb cluster (#14796) --- mmv1/products/alloydb/Cluster.yaml | 21 +++ .../examples/alloydb_backup_basic.tf.tmpl | 2 + .../alloydb_backup_basic_test.tf.tmpl | 2 + .../examples/alloydb_backup_full.tf.tmpl | 2 + .../examples/alloydb_backup_full_test.tf.tmpl | 2 + .../alloydb_cluster_after_upgrade.tf.tmpl | 2 + .../examples/alloydb_cluster_basic.tf.tmpl | 2 + .../alloydb_cluster_before_upgrade.tf.tmpl | 2 + .../examples/alloydb_cluster_full.tf.tmpl | 2 + .../examples/alloydb_cluster_restore.tf.tmpl | 6 + .../examples/alloydb_instance_basic.tf.tmpl | 2 + .../alloydb_instance_basic_test.tf.tmpl | 2 + .../alloydb_instance_psc_test.tf.tmpl | 2 + .../alloydb_secondary_cluster_basic.tf.tmpl | 3 + ...loydb_secondary_cluster_basic_test.tf.tmpl | 3 + .../alloydb_secondary_instance_basic.tf.tmpl | 3 + ...oydb_secondary_instance_basic_test.tf.tmpl | 3 + .../examples/alloydb_user_builtin.tf.tmpl | 2 + .../alloydb_user_builtin_test.tf.tmpl | 2 + .../examples/alloydb_user_iam.tf.tmpl | 2 + .../examples/alloydb_user_iam_test.tf.tmpl | 2 + ...onnection_profile_existing_alloydb.tf.tmpl | 2 + ..._migration_job_postgres_to_alloydb.tf.tmpl | 2 + .../pre_delete/alloydb_cluster.go.tmpl | 4 + .../alloydb/data_source_alloydb_cluster.go | 5 + .../data_source_alloydb_cluster_test.go | 2 + ...a_source_alloydb_database_instance_test.go | 2 + .../alloydb/resource_alloydb_backup_test.go | 8 + .../resource_alloydb_cluster_restore_test.go | 44 ++++- .../alloydb/resource_alloydb_cluster_test.go | 165 ++++++++++++------ .../alloydb/resource_alloydb_instance_test.go | 34 ++++ ...resource_alloydb_secondary_cluster_test.go | 132 +++++++++++--- ...esource_alloydb_secondary_instance_test.go | 28 +++ .../alloydb/resource_alloydb_user_test.go | 6 + .../guides/version_7_upgrade.html.markdown | 7 + .../tests/data/example_alloydb_instance.tf | 2 + 36 files changed, 429 insertions(+), 83 deletions(-) diff --git a/mmv1/products/alloydb/Cluster.yaml b/mmv1/products/alloydb/Cluster.yaml index 9a1341808086..d1754852121c 100644 --- a/mmv1/products/alloydb/Cluster.yaml +++ b/mmv1/products/alloydb/Cluster.yaml @@ -67,6 +67,8 @@ examples: primary_resource_id: 'default' vars: alloydb_cluster_name: 'alloydb-cluster' + ignore_read_extra: + - 'deletion_protection' - name: 'alloydb_cluster_before_upgrade' primary_resource_id: 'default' vars: @@ -75,6 +77,8 @@ examples: network_name: 'alloydb-network' test_vars_overrides: 'network_name': 'acctest.BootstrapSharedTestNetwork(t, "alloydb-1")' + ignore_read_extra: + - 'deletion_protection' - name: 'alloydb_cluster_after_upgrade' primary_resource_id: 'default' vars: @@ -83,10 +87,14 @@ examples: network_name: 'alloydb-network' test_vars_overrides: 'network_name': 'acctest.BootstrapSharedTestNetwork(t, "alloydb-1")' + ignore_read_extra: + - 'deletion_protection' - name: 'alloydb_cluster_full' primary_resource_id: 'full' vars: alloydb_cluster_name: 'alloydb-cluster-full' + ignore_read_extra: + - 'deletion_protection' - name: 'alloydb_cluster_restore' primary_resource_id: 'source' vars: @@ -99,6 +107,7 @@ examples: test_vars_overrides: 'network_name': 'acctest.BootstrapSharedTestNetwork(t, "alloydb-instance-basic")' ignore_read_extra: + - 'deletion_protection' - 'reconciling' - 'update_time' exclude_test: true @@ -108,6 +117,8 @@ examples: alloydb_primary_cluster_name: 'alloydb-primary-cluster' alloydb_primary_instance_name: 'alloydb-primary-instance' alloydb_secondary_cluster_name: 'alloydb-secondary-cluster' + ignore_read_extra: + - 'deletion_protection' exclude_test: true - name: 'alloydb_secondary_cluster_basic_test' primary_resource_id: 'secondary' @@ -118,6 +129,8 @@ examples: network_name: 'alloydb-network' test_vars_overrides: 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1")' + ignore_read_extra: + - 'deletion_protection' exclude_docs: true virtual_fields: - name: 'deletion_policy' @@ -128,6 +141,14 @@ virtual_fields: Possible values: DEFAULT, FORCE type: String default_value: "DEFAULT" + - name: 'deletion_protection' + description: | + Whether Terraform will be prevented from destroying the cluster. + When the field is set to true or unset in Terraform state, a `terraform apply` + or `terraform destroy` that would delete the cluster will fail. + When the field is set to false, deleting the cluster is allowed. + type: Boolean + default_value: true - name: 'skip_await_major_version_upgrade' type: Boolean default_value: true diff --git a/mmv1/templates/terraform/examples/alloydb_backup_basic.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_backup_basic.tf.tmpl index a78ce7463f6a..933c5ec28e04 100644 --- a/mmv1/templates/terraform/examples/alloydb_backup_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_backup_basic.tf.tmpl @@ -12,6 +12,8 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { network_config { network = google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "{{$.PrimaryResourceId}}" { diff --git a/mmv1/templates/terraform/examples/alloydb_backup_basic_test.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_backup_basic_test.tf.tmpl index b09d5a4be7ef..aedf0a53d9ab 100644 --- a/mmv1/templates/terraform/examples/alloydb_backup_basic_test.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_backup_basic_test.tf.tmpl @@ -12,6 +12,8 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "{{$.PrimaryResourceId}}" { diff --git a/mmv1/templates/terraform/examples/alloydb_backup_full.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_backup_full.tf.tmpl index 563fdcbac298..6b4f0385861e 100644 --- a/mmv1/templates/terraform/examples/alloydb_backup_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_backup_full.tf.tmpl @@ -17,6 +17,8 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { network_config { network = google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "{{$.PrimaryResourceId}}" { diff --git a/mmv1/templates/terraform/examples/alloydb_backup_full_test.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_backup_full_test.tf.tmpl index 218bc7ee3bed..abb4e8618b2e 100644 --- a/mmv1/templates/terraform/examples/alloydb_backup_full_test.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_backup_full_test.tf.tmpl @@ -17,6 +17,8 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "{{$.PrimaryResourceId}}" { diff --git a/mmv1/templates/terraform/examples/alloydb_cluster_after_upgrade.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_cluster_after_upgrade.tf.tmpl index 672301dfc794..cb829f13414f 100644 --- a/mmv1/templates/terraform/examples/alloydb_cluster_after_upgrade.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_cluster_after_upgrade.tf.tmpl @@ -20,6 +20,8 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { initial_user { password = "{{index $.Vars "alloydb_cluster_name"}}" } + + deletion_protection = false } data "google_compute_network" "default" { diff --git a/mmv1/templates/terraform/examples/alloydb_cluster_basic.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_cluster_basic.tf.tmpl index c9bab8098b43..86f8d92fe2f9 100644 --- a/mmv1/templates/terraform/examples/alloydb_cluster_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_cluster_basic.tf.tmpl @@ -4,6 +4,8 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { network_config { network = google_compute_network.default.id } + + deletion_protection = false } data "google_project" "project" {} diff --git a/mmv1/templates/terraform/examples/alloydb_cluster_before_upgrade.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_cluster_before_upgrade.tf.tmpl index 9cc7adf3c6aa..319e346fd914 100644 --- a/mmv1/templates/terraform/examples/alloydb_cluster_before_upgrade.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_cluster_before_upgrade.tf.tmpl @@ -20,6 +20,8 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { initial_user { password = "{{index $.Vars "alloydb_cluster_name"}}" } + + deletion_protection = false } data "google_compute_network" "default" { diff --git a/mmv1/templates/terraform/examples/alloydb_cluster_full.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_cluster_full.tf.tmpl index 2b0c9e4c9f63..44f8e9faea1f 100644 --- a/mmv1/templates/terraform/examples/alloydb_cluster_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_cluster_full.tf.tmpl @@ -44,6 +44,8 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { labels = { test = "{{index $.Vars "alloydb_cluster_name"}}" } + + deletion_protection = false } data "google_project" "project" {} diff --git a/mmv1/templates/terraform/examples/alloydb_cluster_restore.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_cluster_restore.tf.tmpl index 7713276440e6..bd1ff8d2668d 100644 --- a/mmv1/templates/terraform/examples/alloydb_cluster_restore.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_cluster_restore.tf.tmpl @@ -6,6 +6,8 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { initial_user { password = "{{index $.Vars "alloydb_cluster_name"}}" } + + deletion_protection = false } resource "google_alloydb_instance" "{{$.PrimaryResourceId}}" { @@ -37,6 +39,8 @@ resource "google_alloydb_cluster" "restored_from_backup" { restore_backup_source { backup_name = google_alloydb_backup.{{$.PrimaryResourceId}}.name } + + deletion_protection = false } resource "google_alloydb_cluster" "restored_via_pitr" { @@ -49,6 +53,8 @@ resource "google_alloydb_cluster" "restored_via_pitr" { cluster = google_alloydb_cluster.{{$.PrimaryResourceId}}.name point_in_time = "2023-08-03T19:19:00.094Z" } + + deletion_protection = false } data "google_project" "project" {} diff --git a/mmv1/templates/terraform/examples/alloydb_instance_basic.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_instance_basic.tf.tmpl index 575c2a6ed790..31b0d9e40a0b 100644 --- a/mmv1/templates/terraform/examples/alloydb_instance_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_instance_basic.tf.tmpl @@ -20,6 +20,8 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { initial_user { password = "{{index $.Vars "alloydb_cluster_name"}}" } + + deletion_protection = false } data "google_project" "project" {} diff --git a/mmv1/templates/terraform/examples/alloydb_instance_basic_test.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_instance_basic_test.tf.tmpl index 0ca5146f0ac8..dd6c485736fe 100644 --- a/mmv1/templates/terraform/examples/alloydb_instance_basic_test.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_instance_basic_test.tf.tmpl @@ -17,6 +17,8 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { initial_user { password = "{{index $.Vars "alloydb_cluster_name"}}" } + + deletion_protection = false } data "google_project" "project" {} diff --git a/mmv1/templates/terraform/examples/alloydb_instance_psc_test.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_instance_psc_test.tf.tmpl index d2d4712d0ae7..78b6cdf4d91e 100644 --- a/mmv1/templates/terraform/examples/alloydb_instance_psc_test.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_instance_psc_test.tf.tmpl @@ -18,4 +18,6 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { psc_config { psc_enabled = true } + + deletion_protection = false } diff --git a/mmv1/templates/terraform/examples/alloydb_secondary_cluster_basic.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_secondary_cluster_basic.tf.tmpl index 6911d955d778..6d48cfa10e94 100644 --- a/mmv1/templates/terraform/examples/alloydb_secondary_cluster_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_secondary_cluster_basic.tf.tmpl @@ -4,6 +4,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -34,6 +36,7 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { primary_cluster_name = google_alloydb_cluster.primary.name } + deletion_protection = false depends_on = [google_alloydb_instance.primary] } diff --git a/mmv1/templates/terraform/examples/alloydb_secondary_cluster_basic_test.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_secondary_cluster_basic_test.tf.tmpl index 9b04de5953d4..87fa2ba37399 100644 --- a/mmv1/templates/terraform/examples/alloydb_secondary_cluster_basic_test.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_secondary_cluster_basic_test.tf.tmpl @@ -4,6 +4,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -32,6 +34,7 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { primary_cluster_name = google_alloydb_cluster.primary.name } + deletion_protection = false depends_on = [google_alloydb_instance.primary] } diff --git a/mmv1/templates/terraform/examples/alloydb_secondary_instance_basic.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_secondary_instance_basic.tf.tmpl index 8e2eeb44e88d..a10d35521051 100644 --- a/mmv1/templates/terraform/examples/alloydb_secondary_instance_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_secondary_instance_basic.tf.tmpl @@ -4,6 +4,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -43,6 +45,7 @@ resource "google_alloydb_cluster" "secondary" { ignore_changes = [instance_type] } + deletion_protection = false depends_on = [google_alloydb_instance.primary] } diff --git a/mmv1/templates/terraform/examples/alloydb_secondary_instance_basic_test.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_secondary_instance_basic_test.tf.tmpl index 432fd4d91e80..f3c959d2baf0 100644 --- a/mmv1/templates/terraform/examples/alloydb_secondary_instance_basic_test.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_secondary_instance_basic_test.tf.tmpl @@ -4,6 +4,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -34,6 +36,7 @@ resource "google_alloydb_cluster" "secondary" { deletion_policy = "FORCE" + deletion_protection = false depends_on = [google_alloydb_instance.primary] } diff --git a/mmv1/templates/terraform/examples/alloydb_user_builtin.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_user_builtin.tf.tmpl index b91e92abddce..5e3480f68a38 100644 --- a/mmv1/templates/terraform/examples/alloydb_user_builtin.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_user_builtin.tf.tmpl @@ -15,6 +15,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "{{index $.Vars "alloydb_cluster_pass"}}" } + + deletion_protection = false } data "google_project" "project" {} diff --git a/mmv1/templates/terraform/examples/alloydb_user_builtin_test.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_user_builtin_test.tf.tmpl index 01d0f92dd77c..2ccac4ee4918 100644 --- a/mmv1/templates/terraform/examples/alloydb_user_builtin_test.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_user_builtin_test.tf.tmpl @@ -13,6 +13,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "{{index $.Vars "alloydb_cluster_pass"}}" } + + deletion_protection = false } data "google_project" "project" {} diff --git a/mmv1/templates/terraform/examples/alloydb_user_iam.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_user_iam.tf.tmpl index 107cdaf25b3f..62a938a57ee8 100644 --- a/mmv1/templates/terraform/examples/alloydb_user_iam.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_user_iam.tf.tmpl @@ -16,6 +16,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "{{index $.Vars "alloydb_cluster_pass"}}" } + + deletion_protection = false } data "google_project" "project" {} diff --git a/mmv1/templates/terraform/examples/alloydb_user_iam_test.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_user_iam_test.tf.tmpl index eebb03454761..1db1157c4b9f 100644 --- a/mmv1/templates/terraform/examples/alloydb_user_iam_test.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_user_iam_test.tf.tmpl @@ -13,6 +13,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "{{index $.Vars "alloydb_cluster_pass"}}" } + + deletion_protection = false } data "google_project" "project" {} diff --git a/mmv1/templates/terraform/examples/database_migration_service_connection_profile_existing_alloydb.tf.tmpl b/mmv1/templates/terraform/examples/database_migration_service_connection_profile_existing_alloydb.tf.tmpl index 10b99197cb48..5e272f5be9d4 100644 --- a/mmv1/templates/terraform/examples/database_migration_service_connection_profile_existing_alloydb.tf.tmpl +++ b/mmv1/templates/terraform/examples/database_migration_service_connection_profile_existing_alloydb.tf.tmpl @@ -13,6 +13,8 @@ resource "google_alloydb_cluster" "destination_alloydb" { user = "{{index $.Vars "destination_alloydb"}}" password = "{{index $.Vars "destination_alloydb"}}" } + + deletion_protection = false } resource "google_alloydb_instance" "destination_alloydb_primary" { diff --git a/mmv1/templates/terraform/examples/database_migration_service_migration_job_postgres_to_alloydb.tf.tmpl b/mmv1/templates/terraform/examples/database_migration_service_migration_job_postgres_to_alloydb.tf.tmpl index 2e66858dec05..1b8b97fb8209 100644 --- a/mmv1/templates/terraform/examples/database_migration_service_migration_job_postgres_to_alloydb.tf.tmpl +++ b/mmv1/templates/terraform/examples/database_migration_service_migration_job_postgres_to_alloydb.tf.tmpl @@ -62,6 +62,8 @@ resource "google_alloydb_cluster" "destination_alloydb" { user = "{{index $.Vars "destination_alloydb"}}" password = "{{index $.Vars "destination_alloydb"}}" } + + deletion_protection = false } resource "google_alloydb_instance" "destination_alloydb_primary" { diff --git a/mmv1/templates/terraform/pre_delete/alloydb_cluster.go.tmpl b/mmv1/templates/terraform/pre_delete/alloydb_cluster.go.tmpl index 743d7e857912..b1543d5758c4 100644 --- a/mmv1/templates/terraform/pre_delete/alloydb_cluster.go.tmpl +++ b/mmv1/templates/terraform/pre_delete/alloydb_cluster.go.tmpl @@ -1,3 +1,7 @@ +if d.Get("deletion_protection").(bool) { + return fmt.Errorf("cannot destroy cluster without setting deletion_protection=false and running `terraform apply`") +} + // Forcefully delete the secondary cluster and the dependent instances because deletion of secondary instance is not supported. if deletionPolicy := d.Get("deletion_policy"); deletionPolicy == "FORCE" { url = url + "?force=true" diff --git a/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_cluster.go b/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_cluster.go index 4ccc3db083c3..3e289182ae8d 100644 --- a/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_cluster.go +++ b/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_cluster.go @@ -55,5 +55,10 @@ func dataSourceAlloydbDatabaseClusterRead(d *schema.ResourceData, meta interface if d.Id() == "" { return fmt.Errorf("%s not found", id) } + + if err := d.Set("deletion_protection", nil); err != nil { + return fmt.Errorf("Error setting deletion_protection: %s", err) + } + return nil } diff --git a/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_cluster_test.go b/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_cluster_test.go index e520602a99f2..c70906077b38 100644 --- a/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_cluster_test.go +++ b/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_cluster_test.go @@ -38,6 +38,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf-test-alloydb-cluster%{random_suffix}" } + + deletion_protection = false } data "google_compute_network" "default" { diff --git a/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_database_instance_test.go b/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_database_instance_test.go index 5220263365cd..805a1cb8c1d3 100644 --- a/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_database_instance_test.go +++ b/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_database_instance_test.go @@ -48,6 +48,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf-test-alloydb-cluster%{random_suffix}" } + + deletion_protection = false } data "google_compute_network" "default" { diff --git a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_backup_test.go b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_backup_test.go index 271676fdc51e..4bff41abc781 100644 --- a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_backup_test.go +++ b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_backup_test.go @@ -63,6 +63,8 @@ resource "google_alloydb_cluster" "default" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "default" { @@ -99,6 +101,8 @@ resource "google_alloydb_cluster" "default" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "default" { @@ -149,6 +153,8 @@ resource "google_alloydb_cluster" "default" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } data "google_project" "project" { } @@ -218,6 +224,8 @@ resource "google_alloydb_cluster" "default" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "default" { diff --git a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_cluster_restore_test.go b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_cluster_restore_test.go index 9d12fba70b1b..b3ce46da17df 100644 --- a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_cluster_restore_test.go +++ b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_cluster_restore_test.go @@ -34,7 +34,7 @@ func TestAccAlloydbCluster_restore(t *testing.T) { ResourceName: "google_alloydb_cluster.source", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { // Invalid input check - cannot pass in both sources @@ -54,7 +54,7 @@ func TestAccAlloydbCluster_restore(t *testing.T) { ResourceName: "google_alloydb_cluster.restored_from_backup", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location", "restore_backup_source"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location", "restore_backup_source"}, }, { // Validate PITR succeeds @@ -64,7 +64,7 @@ func TestAccAlloydbCluster_restore(t *testing.T) { ResourceName: "google_alloydb_cluster.restored_from_point_in_time", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location", "restore_continuous_backup_source"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location", "restore_continuous_backup_source"}, }, { // Make sure updates work without recreating the clusters @@ -89,6 +89,8 @@ resource "google_alloydb_cluster" "source" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "source" { @@ -122,6 +124,8 @@ resource "google_alloydb_cluster" "source" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "source" { @@ -155,6 +159,8 @@ resource "google_alloydb_cluster" "restored" { lifecycle { prevent_destroy = true } + + deletion_protection = false } data "google_project" "project" {} @@ -174,6 +180,8 @@ resource "google_alloydb_cluster" "source" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "source" { @@ -201,6 +209,8 @@ resource "google_alloydb_cluster" "restored" { cluster = google_alloydb_cluster.source.name } + deletion_protection = false + lifecycle { prevent_destroy = true } @@ -222,6 +232,8 @@ resource "google_alloydb_cluster" "source" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "source" { @@ -248,6 +260,8 @@ resource "google_alloydb_cluster" "restored_from_backup" { backup_name = google_alloydb_backup.default.name } + deletion_protection = false + lifecycle { prevent_destroy = true } @@ -271,6 +285,8 @@ resource "google_alloydb_cluster" "source" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "source" { @@ -297,6 +313,8 @@ resource "google_alloydb_cluster" "restored_from_backup" { backup_name = google_alloydb_backup.default.name } + deletion_protection = false + lifecycle { prevent_destroy = true } @@ -313,6 +331,8 @@ resource "google_alloydb_cluster" "restored_from_point_in_time" { point_in_time = google_alloydb_backup.default.update_time } + deletion_protection = false + lifecycle { prevent_destroy = true } @@ -336,6 +356,8 @@ resource "google_alloydb_cluster" "source" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "source" { @@ -367,6 +389,8 @@ resource "google_alloydb_cluster" "restored_from_backup" { recovery_window_days = 20 } + deletion_protection = false + lifecycle { prevent_destroy = true } @@ -388,6 +412,8 @@ resource "google_alloydb_cluster" "restored_from_point_in_time" { recovery_window_days = 20 } + deletion_protection = false + lifecycle { prevent_destroy = true } @@ -411,6 +437,8 @@ resource "google_alloydb_cluster" "source" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "source" { @@ -450,6 +478,8 @@ resource "google_alloydb_cluster" "restored_from_backup" { recovery_window_days = 20 } + deletion_protection = false + lifecycle { prevent_destroy = true } @@ -473,6 +503,8 @@ resource "google_alloydb_cluster" "restored_from_point_in_time" { recovery_window_days = 20 } + deletion_protection = false + lifecycle { prevent_destroy = true } @@ -496,6 +528,8 @@ resource "google_alloydb_cluster" "source" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "source" { @@ -521,6 +555,8 @@ resource "google_alloydb_cluster" "restored_from_backup" { restore_backup_source { backup_name = google_alloydb_backup.default.name } + + deletion_protection = false } resource "google_alloydb_cluster" "restored_from_point_in_time" { @@ -533,6 +569,8 @@ resource "google_alloydb_cluster" "restored_from_point_in_time" { cluster = google_alloydb_cluster.source.name point_in_time = google_alloydb_backup.default.update_time } + + deletion_protection = false } data "google_project" "project" {} diff --git a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_cluster_test.go b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_cluster_test.go index 3c7d9f433cae..d3f141b83b84 100644 --- a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_cluster_test.go +++ b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_cluster_test.go @@ -30,7 +30,7 @@ func TestAccAlloydbCluster_update(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location", "labels", "terraform_labels"}, }, { Config: testAccAlloydbCluster_update(context), @@ -39,7 +39,7 @@ func TestAccAlloydbCluster_update(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location", "labels", "terraform_labels"}, }, { Config: testAccAlloydbCluster_alloydbClusterBasicExample(context), @@ -56,6 +56,8 @@ resource "google_alloydb_cluster" "default" { network_config { network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.default.name}" } + + deletion_protection = false } data "google_project" "project" { @@ -87,7 +89,7 @@ func TestAccAlloydbCluster_upgrade(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location", "labels", "terraform_labels", "skip_await_major_version_upgrade"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location", "labels", "terraform_labels", "skip_await_major_version_upgrade"}, }, { Config: testAccAlloydbCluster_afterUpgrade(context), @@ -96,7 +98,7 @@ func TestAccAlloydbCluster_upgrade(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location", "labels", "terraform_labels", "skip_await_major_version_upgrade"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location", "labels", "terraform_labels", "skip_await_major_version_upgrade"}, }, }, }) @@ -112,6 +114,8 @@ resource "google_alloydb_cluster" "default" { network = data.google_compute_network.default.id } database_version = "POSTGRES_14" + + deletion_protection = false } resource "google_alloydb_instance" "default" { @@ -140,6 +144,8 @@ resource "google_alloydb_cluster" "default" { network = data.google_compute_network.default.id } database_version = "POSTGRES_15" + + deletion_protection = false } resource "google_alloydb_instance" "default" { @@ -192,6 +198,8 @@ resource "google_alloydb_cluster" "default" { network_config { network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.default.name}" } + + deletion_protection = false } data "google_project" "project" { @@ -235,6 +243,8 @@ resource "google_alloydb_cluster" "default" { network_config { network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.default.name}" } + + deletion_protection = false } data "google_project" "project" { @@ -269,7 +279,7 @@ func TestAccAlloydbCluster_addAutomatedBackupPolicyAndInitialUser(t *testing.T) ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_withInitialUserAndAutomatedBackupPolicy(context), @@ -278,7 +288,7 @@ func TestAccAlloydbCluster_addAutomatedBackupPolicyAndInitialUser(t *testing.T) ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_alloydbClusterBasicExample(context), @@ -310,7 +320,7 @@ func TestAccAlloydbCluster_deleteAutomatedBackupPolicyAndInitialUser(t *testing. ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_withoutInitialUserAndAutomatedBackupPolicy(context), @@ -319,7 +329,7 @@ func TestAccAlloydbCluster_deleteAutomatedBackupPolicyAndInitialUser(t *testing. ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_alloydbClusterBasicExample(context), @@ -350,7 +360,7 @@ func TestAccAlloydbCluster_AutomatedBackupPolicyHandlesMidnight(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_alloydbClusterBasicExample(context), @@ -397,6 +407,9 @@ resource "google_alloydb_cluster" "default" { test = "tf-test-alloydb-cluster%{random_suffix}" } } + + deletion_protection = false + lifecycle { prevent_destroy = true } @@ -419,6 +432,9 @@ resource "google_alloydb_cluster" "default" { network_config { network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.default.name}" } + + deletion_protection = false + lifecycle { prevent_destroy = true } @@ -450,9 +466,10 @@ func TestAccAlloydbCluster_missingWeeklySchedule(t *testing.T) { Config: testAccAlloydbCluster_missingWeeklySchedule(context), }, { - ResourceName: "google_alloydb_cluster.default", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_alloydb_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -474,9 +491,11 @@ resource "google_alloydb_cluster" "default" { count = 1 } labels = { - test = "tf-test-alloydb-cluster%{random_suffix}" - } + test = "tf-test-alloydb-cluster%{random_suffix}" + } } + + deletion_protection = false } data "google_project" "project" {} resource "google_compute_network" "default" { @@ -542,17 +561,19 @@ func TestAccAlloydbCluster_deleteTimeBasedRetentionPolicy(t *testing.T) { Config: testAccAlloydbCluster_withTimeBasedRetentionPolicy(context), }, { - ResourceName: "google_alloydb_cluster.default", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_alloydb_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { Config: testAccAlloydbCluster_withoutTimeBasedRetentionPolicy(context), }, { - ResourceName: "google_alloydb_cluster.default", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_alloydb_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { Config: testAccAlloydbCluster_alloydbClusterBasicExample(context), @@ -588,6 +609,9 @@ resource "google_alloydb_cluster" "default" { retention_period = "4.5s" } } + + deletion_protection = false + lifecycle { ignore_changes = [ automated_backup_policy[0].time_based_retention @@ -628,6 +652,9 @@ resource "google_alloydb_cluster" "default" { } } } + + deletion_protection = false + lifecycle { ignore_changes = [ automated_backup_policy[0].time_based_retention @@ -663,7 +690,7 @@ func TestAccAlloydbCluster_usingCMEK(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "cluster_id", "location"}, }, }, }) @@ -680,6 +707,8 @@ resource "google_alloydb_cluster" "default" { encryption_config { kms_key_name = "%{kms_key_name}" } + + deletion_protection = false depends_on = [google_kms_crypto_key_iam_member.crypto_key] } resource "google_compute_network" "default" { @@ -715,7 +744,7 @@ func TestAccAlloydbCluster_CMEKInAutomatedBackupIsUpdatable(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_updateCMEKInAutomatedBackup(context), @@ -724,7 +753,7 @@ func TestAccAlloydbCluster_CMEKInAutomatedBackupIsUpdatable(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_usingCMEKallowDeletion(context), @@ -733,7 +762,7 @@ func TestAccAlloydbCluster_CMEKInAutomatedBackupIsUpdatable(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "cluster_id", "location"}, }, }, }) @@ -761,8 +790,11 @@ resource "google_alloydb_cluster" "default" { retention_period = "510s" } } + + deletion_protection = false + lifecycle { - prevent_destroy = true + prevent_destroy = true } depends_on = [google_kms_crypto_key_iam_member.crypto_key] } @@ -803,6 +835,9 @@ resource "google_alloydb_cluster" "default" { retention_period = "510s" } } + + deletion_protection = false + lifecycle { prevent_destroy = true } @@ -851,6 +886,8 @@ resource "google_alloydb_cluster" "default" { retention_period = "510s" } } + + deletion_protection = false depends_on = [google_kms_crypto_key_iam_member.crypto_key] } @@ -898,7 +935,7 @@ func TestAccAlloydbCluster_continuousBackup_enabledByDefault(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_alloydbClusterBasicExample(context), @@ -934,7 +971,7 @@ func TestAccAlloydbCluster_continuousBackup_update_noChangeIfDefaultsSet(t *test ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_continuousBackupConfig(context), @@ -947,7 +984,7 @@ func TestAccAlloydbCluster_continuousBackup_update_noChangeIfDefaultsSet(t *test ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_alloydbClusterBasicExample(context), @@ -983,7 +1020,7 @@ func TestAccAlloydbCluster_continuousBackup_noChangeIfRemoved(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_alloydbClusterBasicExample(context), @@ -1028,7 +1065,7 @@ func TestAccAlloydbCluster_continuousBackup_update(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_continuousBackupConfig(context), @@ -1041,7 +1078,7 @@ func TestAccAlloydbCluster_continuousBackup_update(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_continuousBackupConfig(context2), @@ -1054,7 +1091,7 @@ func TestAccAlloydbCluster_continuousBackup_update(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_alloydbClusterBasicExample(context), @@ -1071,6 +1108,9 @@ resource "google_alloydb_cluster" "default" { network_config { network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.default.name}" } + + deletion_protection = false + lifecycle { prevent_destroy = true } @@ -1098,6 +1138,9 @@ resource "google_alloydb_cluster" "default" { enabled = %{enabled} recovery_window_days = %{recovery_window_days} } + + deletion_protection = false + lifecycle { prevent_destroy = true } @@ -1142,7 +1185,7 @@ func TestAccAlloydbCluster_continuousBackup_CMEKIsUpdatable(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_usingCMEKInClusterAndContinuousBackup(context2), @@ -1151,7 +1194,7 @@ func TestAccAlloydbCluster_continuousBackup_CMEKIsUpdatable(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_continuousBackupUsingCMEKAllowDeletion(context2), @@ -1160,7 +1203,7 @@ func TestAccAlloydbCluster_continuousBackup_CMEKIsUpdatable(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "cluster_id", "location"}, }, }, }) @@ -1181,8 +1224,11 @@ resource "google_alloydb_cluster" "default" { kms_key_name = "%{key_name}" } } + + deletion_protection = false + lifecycle { - prevent_destroy = true + prevent_destroy = true } depends_on = [google_kms_crypto_key_iam_member.crypto_key] } @@ -1216,6 +1262,8 @@ resource "google_alloydb_cluster" "default" { kms_key_name = "%{key_name}" } } + + deletion_protection = false depends_on = [google_kms_crypto_key_iam_member.crypto_key] } @@ -1250,9 +1298,10 @@ func TestAccAlloydbCluster_withNetworkConfig(t *testing.T) { Config: testAccAlloydbCluster_withNetworkConfig(context), }, { - ResourceName: "google_alloydb_cluster.default", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_alloydb_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -1266,6 +1315,8 @@ resource "google_alloydb_cluster" "default" { network_config { network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.default.name}" } + + deletion_protection = false } data "google_project" "project" {} resource "google_compute_network" "default" { @@ -1291,9 +1342,10 @@ func TestAccAlloydbCluster_withNetworkConfigAndAllocatedIPRange(t *testing.T) { Config: testAccAlloydbCluster_withNetworkConfigAndAllocatedIPRange(context), }, { - ResourceName: "google_alloydb_cluster.default", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_alloydb_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -1308,6 +1360,8 @@ resource "google_alloydb_cluster" "default" { network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.default.name}" allocated_ip_range = google_compute_global_address.private_ip_alloc.name } + + deletion_protection = false } data "google_project" "project" {} resource "google_compute_network" "default" { @@ -1341,9 +1395,10 @@ func TestAccAlloydbCluster_withMaintenanceWindows(t *testing.T) { Config: testAccAlloydbCluster_withMaintenanceWindows(context), }, { - ResourceName: "google_alloydb_cluster.default", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_alloydb_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -1368,6 +1423,8 @@ resource "google_alloydb_cluster" "default" { } } } + + deletion_protection = false } data "google_project" "project" {} resource "google_compute_network" "default" { @@ -1416,6 +1473,8 @@ resource "google_alloydb_cluster" "default" { day = "WEDNESDAY" } } + + deletion_protection = false } resource "google_compute_network" "default" { @@ -1445,6 +1504,8 @@ resource "google_alloydb_cluster" "default" { } } } + + deletion_protection = false } resource "google_compute_network" "default" { @@ -1487,6 +1548,8 @@ resource "google_alloydb_cluster" "default" { psc_config { psc_enabled = true } + + deletion_protection = false } data "google_project" "project" {} `, context) @@ -1515,7 +1578,7 @@ func TestAccAlloydbCluster_standardClusterUpdate(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_withSubscriptionTypeStandard(context), @@ -1524,7 +1587,7 @@ func TestAccAlloydbCluster_standardClusterUpdate(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_withSubscriptionTypeStandard(context), @@ -1533,7 +1596,7 @@ func TestAccAlloydbCluster_standardClusterUpdate(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, }, }) @@ -1559,7 +1622,7 @@ func TestAccAlloydbCluster_trialClusterUpdate(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_withSubscriptionTypeTrial(context), @@ -1568,7 +1631,7 @@ func TestAccAlloydbCluster_trialClusterUpdate(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_withSubscriptionTypeStandard(context), @@ -1577,7 +1640,7 @@ func TestAccAlloydbCluster_trialClusterUpdate(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, }, }) diff --git a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go index 72cec88fa08c..9280badf632b 100644 --- a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go +++ b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go @@ -64,6 +64,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf-test-alloydb-cluster%{random_suffix}" } + + deletion_protection = false } data "google_compute_network" "default" { @@ -99,6 +101,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf-test-alloydb-cluster%{random_suffix}" } + + deletion_protection = false } data "google_compute_network" "default" { @@ -211,6 +215,8 @@ resource "google_alloydb_cluster" "default" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } data "google_project" "project" {} @@ -235,6 +241,8 @@ resource "google_alloydb_cluster" "default" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } data "google_project" "project" {} @@ -362,6 +370,8 @@ resource "google_alloydb_cluster" "default" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } data "google_project" "project" {} @@ -445,6 +455,8 @@ resource "google_alloydb_cluster" "default" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } data "google_project" "project" {} @@ -496,6 +508,8 @@ resource "google_alloydb_cluster" "default" { network = data.google_compute_network.default.id allocated_ip_range = data.google_compute_global_address.private_ip_alloc.name } + + deletion_protection = false } data "google_compute_network" "default" { @@ -635,6 +649,8 @@ resource "google_alloydb_cluster" "default" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } data "google_project" "project" {} @@ -666,6 +682,8 @@ resource "google_alloydb_cluster" "default" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } data "google_project" "project" {} @@ -797,6 +815,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf-test-alloydb-cluster%{random_suffix}" } + + deletion_protection = false } data "google_project" "project" {} @@ -835,6 +855,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf-test-alloydb-cluster%{random_suffix}" } + + deletion_protection = false } data "google_project" "project" {} @@ -890,6 +912,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf-test-alloydb-cluster%{random_suffix}" } + + deletion_protection = false } data "google_project" "project" {} `, context) @@ -917,6 +941,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf-test-alloydb-cluster%{random_suffix}" } + + deletion_protection = false } data "google_project" "project" {} `, context) @@ -972,6 +998,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf-test-alloydb-cluster%{random_suffix}" } + + deletion_protection = false } data "google_project" "project" {} `, context) @@ -1054,6 +1082,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf-test-alloydb-cluster%{random_suffix}" } + + deletion_protection = false } data "google_project" "project" {} `, context) @@ -1078,6 +1108,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf-test-alloydb-cluster%{random_suffix}" } + + deletion_protection = false } data "google_project" "project" {} `, context) @@ -1132,6 +1164,8 @@ resource "google_alloydb_cluster" "default" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } data "google_project" "project" {} diff --git a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_secondary_cluster_test.go b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_secondary_cluster_test.go index 23c9340b6dc0..e3010a2c8454 100644 --- a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_secondary_cluster_test.go +++ b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_secondary_cluster_test.go @@ -28,7 +28,7 @@ func TestAccAlloydbCluster_secondaryClusterMandatoryFields(t *testing.T) { ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -42,6 +42,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -70,6 +72,8 @@ resource "google_alloydb_cluster" "secondary" { primary_cluster_name = google_alloydb_cluster.primary.name } + deletion_protection = false + depends_on = [google_alloydb_instance.primary] } @@ -111,6 +115,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -135,6 +141,8 @@ resource "google_alloydb_cluster" "secondary" { enabled = false } + deletion_protection = false + depends_on = [google_alloydb_instance.primary] } @@ -176,6 +184,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -203,6 +213,8 @@ resource "google_alloydb_cluster" "secondary" { primary_cluster_name = google_alloydb_cluster.primary.name } + deletion_protection = false + depends_on = [google_alloydb_instance.primary] } @@ -244,6 +256,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -272,6 +286,8 @@ resource "google_alloydb_cluster" "secondary" { primary_cluster_name = google_alloydb_cluster.primary.name } + deletion_protection = false + depends_on = [google_alloydb_instance.primary] } @@ -304,7 +320,7 @@ func TestAccAlloydbCluster_secondaryClusterUpdate(t *testing.T) { ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterUpdate(context), @@ -313,7 +329,7 @@ func TestAccAlloydbCluster_secondaryClusterUpdate(t *testing.T) { ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -327,6 +343,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -359,6 +377,8 @@ resource "google_alloydb_cluster" "secondary" { foo = "bar" } + deletion_protection = false + depends_on = [google_alloydb_instance.primary] } @@ -391,7 +411,7 @@ func TestAccAlloydbCluster_secondaryClusterUsingCMEK(t *testing.T) { ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -405,6 +425,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -437,6 +459,8 @@ resource "google_alloydb_cluster" "secondary" { kms_key_name = "%{kms_key_name}" } + deletion_protection = false + depends_on = [ google_alloydb_instance.primary, google_kms_crypto_key_iam_member.crypto_key @@ -478,7 +502,7 @@ func TestAccAlloydbCluster_secondaryClusterWithNetworkConfig(t *testing.T) { ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -492,6 +516,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = "projects/${data.google_project.project.number}/global/networks/${data.google_compute_network.default.name}" } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -520,6 +546,8 @@ resource "google_alloydb_cluster" "secondary" { primary_cluster_name = google_alloydb_cluster.primary.name } + deletion_protection = false + depends_on = [google_alloydb_instance.primary] } @@ -553,7 +581,7 @@ func TestAccAlloydbCluster_secondaryClusterWithNetworkConfigAndAllocatedIPRange( ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -568,6 +596,8 @@ resource "google_alloydb_cluster" "primary" { network = "projects/${data.google_project.project.number}/global/networks/${data.google_compute_network.default.name}" allocated_ip_range = data.google_compute_global_address.private_ip_alloc.name } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -597,6 +627,8 @@ resource "google_alloydb_cluster" "secondary" { primary_cluster_name = google_alloydb_cluster.primary.name } + deletion_protection = false + depends_on = [google_alloydb_instance.primary] } @@ -634,7 +666,7 @@ func TestAccAlloydbCluster_secondaryClusterPromote(t *testing.T) { ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromote(context), @@ -643,7 +675,7 @@ func TestAccAlloydbCluster_secondaryClusterPromote(t *testing.T) { ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -657,6 +689,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -687,6 +721,8 @@ resource "google_alloydb_cluster" "secondary" { deletion_policy = "FORCE" + deletion_protection = false + depends_on = [google_alloydb_instance.primary] } @@ -720,6 +756,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -743,6 +781,8 @@ resource "google_alloydb_cluster" "secondary" { continuous_backup_config { enabled = false } + + deletion_protection = false } resource "google_alloydb_instance" "secondary" { @@ -789,7 +829,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndSimultaneousUpdate(t *testi ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromoteAndSimultaneousUpdate(context), @@ -798,7 +838,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndSimultaneousUpdate(t *testi ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -812,6 +852,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -839,6 +881,8 @@ resource "google_alloydb_cluster" "secondary" { labels = { foo = "bar" } + + deletion_protection = false } resource "google_alloydb_instance" "secondary" { @@ -885,7 +929,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndDeleteOriginalPrimary(t *te ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromote(context), @@ -894,7 +938,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndDeleteOriginalPrimary(t *te ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromoteAndDeleteOriginalPrimary(context), @@ -903,7 +947,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndDeleteOriginalPrimary(t *te ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -922,6 +966,8 @@ resource "google_alloydb_cluster" "secondary" { continuous_backup_config { enabled = false } + + deletion_protection = false } resource "google_alloydb_instance" "secondary" { @@ -968,7 +1014,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndUpdate(t *testing.T) { ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromote(context), @@ -977,7 +1023,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndUpdate(t *testing.T) { ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromoteAndUpdate(context), @@ -986,7 +1032,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndUpdate(t *testing.T) { ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -1000,6 +1046,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -1028,6 +1076,7 @@ resource "google_alloydb_cluster" "secondary" { foo = "bar" } + deletion_protection = false } resource "google_alloydb_instance" "secondary" { @@ -1074,7 +1123,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteWithNetworkConfigAndAllocatedI ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromoteWithNetworkConfigAndAllocatedIPRange(context), @@ -1083,7 +1132,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteWithNetworkConfigAndAllocatedI ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -1098,6 +1147,8 @@ resource "google_alloydb_cluster" "primary" { network = "projects/${data.google_project.project.number}/global/networks/${data.google_compute_network.default.name}" allocated_ip_range = data.google_compute_global_address.private_ip_alloc.name } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -1129,6 +1180,8 @@ resource "google_alloydb_cluster" "secondary" { deletion_policy = "FORCE" + deletion_protection = false + depends_on = [google_alloydb_instance.primary] } @@ -1167,6 +1220,8 @@ resource "google_alloydb_cluster" "primary" { network = "projects/${data.google_project.project.number}/global/networks/${data.google_compute_network.default.name}" allocated_ip_range = data.google_compute_global_address.private_ip_alloc.name } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -1191,6 +1246,8 @@ resource "google_alloydb_cluster" "secondary" { continuous_backup_config { enabled = false } + + deletion_protection = false } resource "google_alloydb_instance" "secondary" { @@ -1242,7 +1299,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndAddAndDeleteAutomatedBackup ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromote(context), @@ -1251,7 +1308,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndAddAndDeleteAutomatedBackup ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromoteAndAddAutomatedBackupPolicyAndInitialUser(context), @@ -1260,7 +1317,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndAddAndDeleteAutomatedBackup ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromote(context), @@ -1269,7 +1326,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndAddAndDeleteAutomatedBackup ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -1283,6 +1340,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -1336,6 +1395,8 @@ resource "google_alloydb_cluster" "secondary" { test = "tf-test-alloydb-secondary-cluster%{random_suffix}" } } + + deletion_protection = false } resource "google_alloydb_instance" "secondary" { @@ -1382,7 +1443,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndDeleteTimeBasedRetentionPol ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromote(context), @@ -1391,7 +1452,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndDeleteTimeBasedRetentionPol ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromoteWithTimeBasedRetentionPolicy(context), @@ -1400,7 +1461,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndDeleteTimeBasedRetentionPol ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromoteWithoutTimeBasedRetentionPolicy(context), @@ -1409,7 +1470,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndDeleteTimeBasedRetentionPol ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -1423,6 +1484,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -1471,6 +1534,9 @@ resource "google_alloydb_cluster" "secondary" { retention_period = "4.5s" } } + + deletion_protection = false + lifecycle { ignore_changes = [ automated_backup_policy[0].time_based_retention @@ -1508,6 +1574,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -1553,6 +1621,9 @@ resource "google_alloydb_cluster" "secondary" { } } } + + deletion_protection = false + lifecycle { ignore_changes = [ automated_backup_policy[0].time_based_retention @@ -1604,7 +1675,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndAddContinuousBackupConfig(t ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromote(context), @@ -1613,7 +1684,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndAddContinuousBackupConfig(t ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromoteAndAddContinuousBackupConfig(context), @@ -1622,7 +1693,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndAddContinuousBackupConfig(t ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -1636,6 +1707,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -1661,6 +1734,7 @@ resource "google_alloydb_cluster" "secondary" { recovery_window_days = 14 } + deletion_protection = false } resource "google_alloydb_instance" "secondary" { diff --git a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_secondary_instance_test.go b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_secondary_instance_test.go index 9f57d3cfb9fd..4554df032a82 100644 --- a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_secondary_instance_test.go +++ b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_secondary_instance_test.go @@ -51,6 +51,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -81,6 +83,8 @@ resource "google_alloydb_cluster" "secondary" { deletion_policy = "FORCE" + deletion_protection = false + depends_on = [google_alloydb_instance.primary] } @@ -110,6 +114,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -140,6 +146,8 @@ resource "google_alloydb_cluster" "secondary" { deletion_policy = "FORCE" + deletion_protection = false + depends_on = [google_alloydb_instance.primary] } @@ -196,6 +204,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -226,6 +236,8 @@ resource "google_alloydb_cluster" "secondary" { deletion_policy = "FORCE" + deletion_protection = false + depends_on = [google_alloydb_instance.primary] } @@ -294,6 +306,8 @@ resource "google_alloydb_cluster" "primary" { network = data.google_compute_network.default.id allocated_ip_range = data.google_compute_global_address.private_ip_alloc.name } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -325,6 +339,8 @@ resource "google_alloydb_cluster" "secondary" { deletion_policy = "FORCE" + deletion_protection = false + depends_on = [google_alloydb_instance.primary] } @@ -394,6 +410,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -424,6 +442,8 @@ resource "google_alloydb_cluster" "secondary" { deletion_policy = "FORCE" + deletion_protection = false + depends_on = [google_alloydb_instance.primary] } @@ -493,6 +513,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -523,6 +545,8 @@ resource "google_alloydb_cluster" "secondary" { deletion_policy = "FORCE" + deletion_protection = false + depends_on = [google_alloydb_instance.primary] } @@ -586,6 +610,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -616,6 +642,8 @@ resource "google_alloydb_cluster" "secondary" { deletion_policy = "FORCE" + deletion_protection = false + depends_on = [google_alloydb_instance.primary] } diff --git a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_user_test.go b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_user_test.go index 846a0bc53f02..f7a41a81df3f 100644 --- a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_user_test.go +++ b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_user_test.go @@ -60,6 +60,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf_test_cluster_secret%{random_suffix}" } + + deletion_protection = false } data "google_project" "project" {} @@ -132,6 +134,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf_test_cluster_secret%{random_suffix}" } + + deletion_protection = false } data "google_project" "project" {} @@ -202,6 +206,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf_test_cluster_secret%{random_suffix}" } + + deletion_protection = false } data "google_project" "project" {} data "google_compute_network" "default" { diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index a913d137eefc..0995605c9c1b 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -112,6 +112,13 @@ Description of the change and how users should adjust their configuration (if ne ## Resources +## Resource: `google_alloydb_cluster` + +### Cluster deletion now prevented by default with `deletion_protection` + +The field `deletion_protection` has been added with a default value of `true`. This field prevents +Terraform from destroying or recreating the cluster during `terraform apply`. In 7.0.0, existing clusters will have +`deletion_protection` set to `true` during the next refresh unless otherwise set in configuration. ## Resource: `google_beyondcorp_application` is now removed diff --git a/mmv1/third_party/tgc/tests/data/example_alloydb_instance.tf b/mmv1/third_party/tgc/tests/data/example_alloydb_instance.tf index b6803e2986af..fbff8402bac3 100644 --- a/mmv1/third_party/tgc/tests/data/example_alloydb_instance.tf +++ b/mmv1/third_party/tgc/tests/data/example_alloydb_instance.tf @@ -22,6 +22,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "alloydb-cluster" } + + deletion_protection = false } resource "google_alloydb_instance" "default" { From 03c415e15cedd5f43bffa491ce8e38105f096c6b Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Mon, 18 Aug 2025 12:49:00 -0700 Subject: [PATCH 754/884] Remove post_startup_script_config field from google_colab_runtime_template resource (#14873) --- mmv1/products/colab/RuntimeTemplate.yaml | 18 ------------------ .../guides/version_7_upgrade.html.markdown | 6 ++++++ 2 files changed, 6 insertions(+), 18 deletions(-) diff --git a/mmv1/products/colab/RuntimeTemplate.yaml b/mmv1/products/colab/RuntimeTemplate.yaml index 02ee8dadfa26..e4008c309467 100644 --- a/mmv1/products/colab/RuntimeTemplate.yaml +++ b/mmv1/products/colab/RuntimeTemplate.yaml @@ -209,21 +209,3 @@ properties: If a variable cannot be resolved, the reference in the input string will be unchanged. The $(VAR_NAME) syntax can be escaped with a double $$, ie: $$(VAR_NAME). Escaped references will never be expanded, regardless of whether the variable exists or not.' - - name: 'postStartupScriptConfig' - deprecation_message: '`post_startup_script_config` is deprecated and will be removed in a future major release. New resource creation with this field is unavailable at this time.' - type: NestedObject - description: 'Post startup script config.' - properties: - - name: 'postStartupScript' - type: String - description: 'Post startup script to run after runtime is started.' - - name: 'postStartupScriptUrl' - type: String - description: 'Post startup script url to download. Example: https://bucket/script.sh.' - - name: 'postStartupScriptBehavior' - type: Enum - description: 'Post startup script behavior that defines download and execution behavior.' - enum_values: - - 'RUN_ONCE' - - 'RUN_EVERY_START' - - 'DOWNLOAD_AND_RUN_EVERY_START' diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index 0995605c9c1b..a2d922354caa 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -181,6 +181,12 @@ To reflect the new type explicitly, surround the current integer value in quotes Remove `description` from your configuration after upgrade. +## Resource: `google_colab_runtime_template` + +### `post_startup_script_config` is now removed. + +Remove `post_startup_script_config` from your configuration after upgrade. + ## Resource: `google_network_services_lb_traffic_extension` ### `load_balancing_scheme` is now required From 37c369958cb66d3b608a4c3111e3741dab9b4cca Mon Sep 17 00:00:00 2001 From: Aman Mahendroo <30946991+amanMahendroo@users.noreply.github.com> Date: Tue, 19 Aug 2025 05:35:41 +0530 Subject: [PATCH 755/884] Convert advertised_ip_ranges from List to Set (#14854) Co-authored-by: Chris Hawk --- mmv1/products/compute/Router.yaml | 2 +- .../terraform/constants/router.go.tmpl | 2 +- .../compute_router_range.go.tmpl | 42 ------------------- .../resource_compute_router_peer.go.tmpl | 41 ++++++++++-------- 4 files changed, 25 insertions(+), 62 deletions(-) delete mode 100644 mmv1/templates/terraform/custom_flatten/compute_router_range.go.tmpl diff --git a/mmv1/products/compute/Router.yaml b/mmv1/products/compute/Router.yaml index 846c992203d5..a800a8b0a04b 100644 --- a/mmv1/products/compute/Router.yaml +++ b/mmv1/products/compute/Router.yaml @@ -171,7 +171,7 @@ properties: ranges will be advertised in addition to any specified groups. Leave this field blank to advertise no custom IP ranges. send_empty_value: true - custom_flatten: 'templates/terraform/custom_flatten/compute_router_range.go.tmpl' + is_set: true item_type: type: NestedObject properties: diff --git a/mmv1/templates/terraform/constants/router.go.tmpl b/mmv1/templates/terraform/constants/router.go.tmpl index f4f93dfb5b2f..c4c387b890d6 100644 --- a/mmv1/templates/terraform/constants/router.go.tmpl +++ b/mmv1/templates/terraform/constants/router.go.tmpl @@ -16,7 +16,7 @@ func resourceComputeRouterCustomDiff(_ context.Context, diff *schema.ResourceDif block := diff.Get("bgp.0").(map[string]interface{}) advertiseMode := block["advertise_mode"] advertisedGroups := block["advertised_groups"].([]interface{}) - advertisedIPRanges := block["advertised_ip_ranges"].([]interface{}) + advertisedIPRanges := block["advertised_ip_ranges"].(*schema.Set).List() if advertiseMode == "DEFAULT" && len(advertisedGroups) != 0 { return fmt.Errorf("Error in bgp: advertised_groups cannot be specified when using advertise_mode DEFAULT") diff --git a/mmv1/templates/terraform/custom_flatten/compute_router_range.go.tmpl b/mmv1/templates/terraform/custom_flatten/compute_router_range.go.tmpl deleted file mode 100644 index a4afe3f63d8a..000000000000 --- a/mmv1/templates/terraform/custom_flatten/compute_router_range.go.tmpl +++ /dev/null @@ -1,42 +0,0 @@ -{{/* - The license inside this block applies to this file - Copyright 2024 Google Inc. - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ -}} -func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - if v == nil { - return v - } - l := v.([]interface{}) - apiData := make([]map[string]interface{}, 0, len(l)) - for _, raw := range l { - original := raw.(map[string]interface{}) - if len(original) < 1 { - // Do not include empty json objects coming back from the api - continue - } - apiData = append(apiData, map[string]interface{}{ - "description": original["description"], - "range": original["range"], - }) - } - configData := []map[string]interface{}{} - if v, ok := d.GetOk("bgp.0.advertised_ip_ranges"); ok { - for _, item := range v.([]interface{}) { - configData = append(configData, item.(map[string]interface{})) - } - } - sorted, err := tpgresource.SortMapsByConfigOrder(configData, apiData, "range") - if err != nil { - log.Printf("[ERROR] Could not support API response for advertisedIpRanges.0.range: %s", err) - return apiData - } - return sorted -} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_router_peer.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_router_peer.go.tmpl index c5755139504b..197639163cd3 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_router_peer.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_router_peer.go.tmpl @@ -112,28 +112,14 @@ Leave this field blank to advertise no custom groups.`, }, }, "advertised_ip_ranges": { - Type: schema.TypeList, + Type: schema.TypeSet, Optional: true, Description: `User-specified list of individual IP ranges to advertise in custom mode. This field can only be populated if advertiseMode is 'CUSTOM' and is advertised to all peers of the router. These IP ranges will be advertised in addition to any specified groups. Leave this field blank to advertise no custom IP ranges.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "range": { - Type: schema.TypeString, - Required: true, - Description: `The IP range to advertise. The value must be a -CIDR-formatted string.`, - }, - "description": { - Type: schema.TypeString, - Optional: true, - Description: `User-specified description for the IP range.`, - }, - }, - }, + Elem: computeRouterBgpPeerAdvertisedIpRangesSchema(), }, "advertised_route_priority": { Type: schema.TypeInt, @@ -390,6 +376,24 @@ Must be unique within a router. Must be referenced by exactly one bgpPeer. Must } } +func computeRouterBgpPeerAdvertisedIpRangesSchema() *schema.Resource { + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + "range": { + Type: schema.TypeString, + Required: true, + Description: `The IP range to advertise. The value must be a +CIDR-formatted string.`, + }, + "description": { + Type: schema.TypeString, + Optional: true, + Description: `User-specified description for the IP range.`, + }, + }, + } +} + func resourceComputeRouterBgpPeerCreate(d *schema.ResourceData, meta interface{}) error { config := meta.(*transport_tpg.Config) userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) @@ -1150,14 +1154,14 @@ func flattenNestedComputeRouterBgpPeerAdvertisedIpRanges(v interface{}, d *schem return v } l := v.([]interface{}) - transformed := make([]interface{}, 0, len(l)) + transformed := schema.NewSet(schema.HashResource(computeRouterBgpPeerAdvertisedIpRangesSchema()), []interface{}{}) for _, raw := range l { original := raw.(map[string]interface{}) if len(original) < 1 { // Do not include empty json objects coming back from the api continue } - transformed = append(transformed, map[string]interface{}{ + transformed.Add(map[string]interface{}{ "range": flattenNestedComputeRouterBgpPeerAdvertisedIpRangesRange(original["range"], d, config), "description": flattenNestedComputeRouterBgpPeerAdvertisedIpRangesDescription(original["description"], d, config), }) @@ -1388,6 +1392,7 @@ func expandNestedComputeRouterBgpPeerAdvertisedGroups(v interface{}, d tpgresour } func expandNestedComputeRouterBgpPeerAdvertisedIpRanges(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + v = v.(*schema.Set).List() l := v.([]interface{}) req := make([]interface{}, 0, len(l)) for _, raw := range l { From 16a70cb4d6d749aadf200cb317f08fb6b78fd3f2 Mon Sep 17 00:00:00 2001 From: Ryan Oaks Date: Tue, 12 Aug 2025 15:25:25 -0400 Subject: [PATCH 756/884] Remove use of MODIFIED_FILE_PATH in EAP VCR (#14823) --- .ci/magician/cmd/test_eap_vcr.go | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.ci/magician/cmd/test_eap_vcr.go b/.ci/magician/cmd/test_eap_vcr.go index 9fe0ece0900f..43a2834ab1de 100644 --- a/.ci/magician/cmd/test_eap_vcr.go +++ b/.ci/magician/cmd/test_eap_vcr.go @@ -31,7 +31,6 @@ var tevRequiredEnvironmentVariables = [...]string{ "GOOGLE_IMPERSONATE_SERVICE_ACCOUNT", "KOKORO_ARTIFACTS_DIR", "HOME", - "MODIFIED_FILE_PATH", "PATH", "USER", } @@ -125,7 +124,7 @@ The following environment variables are required: return fmt.Errorf("wrong number of arguments %d, expected 1", len(args)) } - return execTestEAPVCR(args[0], env["GEN_PATH"], env["KOKORO_ARTIFACTS_DIR"], env["MODIFIED_FILE_PATH"], rnr, vt) + return execTestEAPVCR(args[0], env["GEN_PATH"], env["KOKORO_ARTIFACTS_DIR"], rnr, vt) }, } @@ -137,7 +136,7 @@ func listTEVEnvironmentVariables() string { return result } -func execTestEAPVCR(changeNumber, genPath, kokoroArtifactsDir, modifiedFilePath string, rnr ExecRunner, vt *vcr.Tester) error { +func execTestEAPVCR(changeNumber, genPath, kokoroArtifactsDir string, rnr ExecRunner, vt *vcr.Tester) error { vt.SetRepoPath(provider.Private, genPath) if err := rnr.PushDir(genPath); err != nil { return fmt.Errorf("error changing to gen path: %w", err) From 66d686b72e3766f9002f4eaefc704cf8208696a1 Mon Sep 17 00:00:00 2001 From: zhuolinliu-csql Date: Tue, 12 Aug 2025 12:26:19 -0700 Subject: [PATCH 757/884] Update cloud sql instance tier from db-f1-micro to db-custom-2-3840 in tests (#14713) --- mmv1/third_party/terraform/acctest/bootstrap_test_utils.go.tmpl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/acctest/bootstrap_test_utils.go.tmpl b/mmv1/third_party/terraform/acctest/bootstrap_test_utils.go.tmpl index 9df43a962d27..6b8fbb575510 100644 --- a/mmv1/third_party/terraform/acctest/bootstrap_test_utils.go.tmpl +++ b/mmv1/third_party/terraform/acctest/bootstrap_test_utils.go.tmpl @@ -1098,7 +1098,7 @@ func BootstrapSharedSQLInstanceBackupRun(t *testing.T) string { PointInTimeRecoveryEnabled: true, } settings := &sqladmin.Settings{ - Tier: "db-f1-micro", + Tier: "db-custom-2-3840", BackupConfiguration: backupConfig, } bootstrapInstance = &sqladmin.DatabaseInstance{ From 1d952026a617c756f8cb42a56305ae83c2a6c528 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B6rn?= <81525627+bestefreund@users.noreply.github.com> Date: Tue, 12 Aug 2025 22:01:37 +0200 Subject: [PATCH 758/884] Add plural data source for retrieving Artifact Registry tags (#14702) --- .../provider/provider_mmv1_resources.go.tmpl | 3 +- .../data_source_artifact_registry_tags.go | 169 ++++++++++++++++++ ...data_source_artifact_registry_tags_test.go | 44 +++++ .../d/artifact_registry_tags.html.markdown | 47 +++++ 4 files changed, 262 insertions(+), 1 deletion(-) create mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_tags.go create mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_tags_test.go create mode 100644 mmv1/third_party/terraform/website/docs/d/artifact_registry_tags.html.markdown diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index cc4864f51210..5c3c2c7f50eb 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -29,9 +29,10 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_artifact_registry_docker_image": artifactregistry.DataSourceArtifactRegistryDockerImage(), "google_artifact_registry_docker_images": artifactregistry.DataSourceArtifactRegistryDockerImages(), "google_artifact_registry_locations": artifactregistry.DataSourceGoogleArtifactRegistryLocations(), + "google_artifact_registry_package": artifactregistry.DataSourceArtifactRegistryPackage(), "google_artifact_registry_repositories": artifactregistry.DataSourceArtifactRegistryRepositories(), "google_artifact_registry_repository": artifactregistry.DataSourceArtifactRegistryRepository(), - "google_artifact_registry_package": artifactregistry.DataSourceArtifactRegistryPackage(), + "google_artifact_registry_tags": artifactregistry.DataSourceArtifactRegistryTags(), "google_artifact_registry_version": artifactregistry.DataSourceArtifactRegistryVersion(), "google_apphub_discovered_workload": apphub.DataSourceApphubDiscoveredWorkload(), "google_app_engine_default_service_account": appengine.DataSourceGoogleAppEngineDefaultServiceAccount(), diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_tags.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_tags.go new file mode 100644 index 000000000000..0160bc2f3f4e --- /dev/null +++ b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_tags.go @@ -0,0 +1,169 @@ +package artifactregistry + +import ( + "fmt" + "net/http" + "net/url" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func DataSourceArtifactRegistryTags() *schema.Resource { + return &schema.Resource{ + Read: dataSourceArtifactRegistryTagsRead, + Schema: map[string]*schema.Schema{ + "location": { + Type: schema.TypeString, + Required: true, + }, + "repository_id": { + Type: schema.TypeString, + Required: true, + }, + "package_name": { + Type: schema.TypeString, + Required: true, + }, + "filter": { + Type: schema.TypeString, + Optional: true, + }, + "project": { + Type: schema.TypeString, + Optional: true, + }, + "tags": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Computed: true, + }, + "version": { + Type: schema.TypeString, + Computed: true, + }, + }, + }, + }, + }, + } +} + +func dataSourceArtifactRegistryTagsRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + + basePath, err := tpgresource.ReplaceVars(d, config, "{{ArtifactRegistryBasePath}}") + if err != nil { + return fmt.Errorf("Error setting Artifact Registry base path: %s", err) + } + + resourcePath, err := tpgresource.ReplaceVars(d, config, fmt.Sprintf("projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/packages/{{package_name}}/tags")) + if err != nil { + return fmt.Errorf("Error setting resource path: %s", err) + } + + urlRequest := basePath + resourcePath + + filter := "" + if v, ok := d.GetOk("filter"); ok { + filter = v.(string) + + u, err := url.Parse(urlRequest) + if err != nil { + return fmt.Errorf("Error parsing URL: %s", err) + } + + q := u.Query() + q.Set("filter", filter) + u.RawQuery = q.Encode() + urlRequest = u.String() + } + + headers := make(http.Header) + tags := make([]map[string]interface{}, 0) + pageToken := "" + + for { + u, err := url.Parse(urlRequest) + if err != nil { + return fmt.Errorf("Error parsing URL: %s", err) + } + + q := u.Query() + if pageToken != "" { + q.Set("pageToken", pageToken) + } + u.RawQuery = q.Encode() + + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + RawURL: u.String(), + UserAgent: userAgent, + Headers: headers, + }) + + if err != nil { + return fmt.Errorf("Error listing Artifact Registry tags: %s", err) + } + + if items, ok := res["tags"].([]interface{}); ok { + for _, item := range items { + tag := item.(map[string]interface{}) + + annotations := make(map[string]string) + if anno, ok := tag["annotations"].(map[string]interface{}); ok { + for k, v := range anno { + if val, ok := v.(string); ok { + annotations[k] = val + } + } + } + + getString := func(m map[string]interface{}, key string) string { + if v, ok := m[key].(string); ok { + return v + } + return "" + } + + tags = append(tags, map[string]interface{}{ + "name": getString(tag, "name"), + "version": getString(tag, "version"), + }) + } + } + + if nextToken, ok := res["nextPageToken"].(string); ok && nextToken != "" { + pageToken = nextToken + } else { + break + } + } + + if err := d.Set("project", project); err != nil { + return fmt.Errorf("Error setting project: %s", err) + } + + if err := d.Set("tags", tags); err != nil { + return fmt.Errorf("Error setting Artifact Registry tags: %s", err) + } + + d.SetId(resourcePath) + + return nil +} diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_tags_test.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_tags_test.go new file mode 100644 index 000000000000..ecd0fe7ed322 --- /dev/null +++ b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_tags_test.go @@ -0,0 +1,44 @@ +package artifactregistry_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccDataSourceArtifactRegistryTags_basic(t *testing.T) { + t.Parallel() + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceArtifactRegistryTagsConfig, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("data.google_artifact_registry_tags.this", "project"), + resource.TestCheckResourceAttrSet("data.google_artifact_registry_tags.this", "location"), + resource.TestCheckResourceAttrSet("data.google_artifact_registry_tags.this", "repository_id"), + resource.TestCheckResourceAttrSet("data.google_artifact_registry_tags.this", "package_name"), + resource.TestCheckResourceAttrSet("data.google_artifact_registry_tags.this", "tags.0.name"), + resource.TestCheckResourceAttrSet("data.google_artifact_registry_tags.this", "tags.0.version"), + ), + }, + }, + }) +} + +// Test the data source against the public AR repos +// https://console.cloud.google.com/artifacts/docker/cloudrun/us/container +// https://console.cloud.google.com/artifacts/docker/go-containerregistry/us/gcr.io +const testAccDataSourceArtifactRegistryTagsConfig = ` +data "google_artifact_registry_tags" "this" { + project = "go-containerregistry" + location = "us" + repository_id = "gcr.io" + package_name = "gcrane" + # Filter doesn't work with gcr.io + # filter = "name=\"projects/go-containerregistry/locations/us/repositories/gcr.io/packages/gcrane/tags/latest\"" +} +` diff --git a/mmv1/third_party/terraform/website/docs/d/artifact_registry_tags.html.markdown b/mmv1/third_party/terraform/website/docs/d/artifact_registry_tags.html.markdown new file mode 100644 index 000000000000..3345eeb9c98e --- /dev/null +++ b/mmv1/third_party/terraform/website/docs/d/artifact_registry_tags.html.markdown @@ -0,0 +1,47 @@ +--- +subcategory: "Artifact Registry" +description: |- + Get information about tags within a Google Artifact Registry package. +--- + +# google_artifact_registry_tags + +Get information about Artifact Registry tags. +See [the official documentation](https://cloud.google.com/artifact-registry/docs/overview) +and [API](https://cloud.google.com/artifact-registry/docs/reference/rest/v1/projects.locations.repositories.packages.tags/list). + +## Example Usage + +```hcl +data "google_artifact_registry_tags" "my_tags" { + location = "us-central1" + repository_id = "example-repo" + package_name = "example-package" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `location` - (Required) The location of the Artifact Registry repository. + +* `repository_id` - (Required) The last part of the repository name to fetch from. + +* `package_name` - (Required) The name of the package. + +* `filter` - (Optional) An expression for filtering the results of the request. Filter rules are case insensitive. The fields eligible for filtering are `name` and `version`. Further information can be found in the [REST API](https://cloud.google.com/artifact-registry/docs/reference/rest/v1/projects.locations.repositories.packages.tags/list#query-parameters). + +* `project` - (Optional) The project ID in which the resource belongs. If it is not provided, the provider project is used. + +## Attributes Reference + +The following attributes are exported: + +* `tags` - A list of all retrieved Artifact Registry tags. Structure is [defined below](#nested_tags). + +The `tags` block supports: + +* `name` - The name of the tag, for example: `projects/p1/locations/us-central1/repositories/repo1/packages/pkg1/tags/tag1`. If the package part contains slashes, the slashes are escaped. + +* `version` - The version of the tag. From 932fcae1a4c8a5593dd877f7bc53cf835bf828fd Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Tue, 12 Aug 2025 14:00:47 -0700 Subject: [PATCH 759/884] Marked dashboard_json field as a json field (#14818) --- .../services/monitoring/resource_monitoring_dashboard_meta.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/mmv1/third_party/terraform/services/monitoring/resource_monitoring_dashboard_meta.yaml b/mmv1/third_party/terraform/services/monitoring/resource_monitoring_dashboard_meta.yaml index 8008b75c43f5..ca764c6567b1 100644 --- a/mmv1/third_party/terraform/services/monitoring/resource_monitoring_dashboard_meta.yaml +++ b/mmv1/third_party/terraform/services/monitoring/resource_monitoring_dashboard_meta.yaml @@ -5,4 +5,5 @@ api_version: 'v1' api_resource_type_kind: 'Dashboard' fields: - field: 'dashboard_json' + json: true - field: 'project' From 9614a09044e3903bd062bf4bfb3911bed6919579 Mon Sep 17 00:00:00 2001 From: g-dreva Date: Tue, 12 Aug 2025 21:16:25 +0000 Subject: [PATCH 760/884] Adds saasservicemgmt.googleapis.com to CI bootstrap config (#14829) --- .ci/infra/terraform/main.tf | 1 + 1 file changed, 1 insertion(+) diff --git a/.ci/infra/terraform/main.tf b/.ci/infra/terraform/main.tf index 6d0943a6ea07..5bdd941a1d67 100644 --- a/.ci/infra/terraform/main.tf +++ b/.ci/infra/terraform/main.tf @@ -334,6 +334,7 @@ module "project-services" { "resourceviews.googleapis.com", "run.googleapis.com", "runtimeconfig.googleapis.com", + "saasservicemgmt.googleapis.com", "secretmanager.googleapis.com", "securesourcemanager.googleapis.com", "securetoken.googleapis.com", From 157acaecc8c0cfd221442065c671871d47eafded Mon Sep 17 00:00:00 2001 From: VeraQin <31418633+VeraQin@users.noreply.github.com> Date: Tue, 12 Aug 2025 15:56:58 -0700 Subject: [PATCH 761/884] Add support for new flags in Container node kubelet and linux config. (#14685) --- .../services/container/node_config.go.tmpl | 424 +++++++++++++++--- .../resource_container_cluster_test.go.tmpl | 223 ++++++++- .../resource_container_node_pool_test.go.tmpl | 39 +- .../docs/r/container_cluster.html.markdown | 61 ++- 4 files changed, 666 insertions(+), 81 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/node_config.go.tmpl b/mmv1/third_party/terraform/services/container/node_config.go.tmpl index 29ffe7e5dd81..9525e93f6c99 100644 --- a/mmv1/third_party/terraform/services/container/node_config.go.tmpl +++ b/mmv1/third_party/terraform/services/container/node_config.go.tmpl @@ -631,47 +631,178 @@ func schemaNodeConfig() *schema.Schema { Optional: true, Description: `Controls the maximum number of processes allowed to run in a pod.`, }, + "max_parallel_image_pulls": { + Type: schema.TypeInt, + Optional: true, + Computed: true, + Description: `Set the maximum number of image pulls in parallel.`, + }, "container_log_max_size": { - Type: schema.TypeString, - Optional: true, - Description: `Defines the maximum size of the container log file before it is rotated.`, - }, + Type: schema.TypeString, + Optional: true, + Description: `Defines the maximum size of the container log file before it is rotated.`, + }, "container_log_max_files": { - Type: schema.TypeInt, - Optional: true, - Description: `Defines the maximum number of container log files that can be present for a container.`, - }, + Type: schema.TypeInt, + Optional: true, + Description: `Defines the maximum number of container log files that can be present for a container.`, + }, "image_gc_low_threshold_percent": { - Type: schema.TypeInt, - Optional: true, - Description: `Defines the percent of disk usage before which image garbage collection is never run. Lowest disk usage to garbage collect to.`, - }, + Type: schema.TypeInt, + Optional: true, + Description: `Defines the percent of disk usage before which image garbage collection is never run. Lowest disk usage to garbage collect to.`, + }, "image_gc_high_threshold_percent": { - Type: schema.TypeInt, - Optional: true, - Description: `Defines the percent of disk usage after which image garbage collection is always run.`, - }, + Type: schema.TypeInt, + Optional: true, + Description: `Defines the percent of disk usage after which image garbage collection is always run.`, + }, "image_minimum_gc_age": { - Type: schema.TypeString, - Optional: true, - Description: `Defines the minimum age for an unused image before it is garbage collected.`, - }, + Type: schema.TypeString, + Optional: true, + Description: `Defines the minimum age for an unused image before it is garbage collected.`, + }, "image_maximum_gc_age": { - Type: schema.TypeString, - Optional: true, - Description: `Defines the maximum age an image can be unused before it is garbage collected.`, - }, + Type: schema.TypeString, + Optional: true, + Description: `Defines the maximum age an image can be unused before it is garbage collected.`, + }, "allowed_unsafe_sysctls": { - Type: schema.TypeList, - Optional: true, - Description: `Defines a comma-separated allowlist of unsafe sysctls or sysctl patterns which can be set on the Pods.`, + Type: schema.TypeList, + Optional: true, + Description: `Defines a comma-separated allowlist of unsafe sysctls or sysctl patterns which can be set on the Pods.`, Elem: &schema.Schema{Type: schema.TypeString}, - }, - "single_process_oom_kill": { - Type: schema.TypeBool, - Optional: true, - Description: `Defines whether to enable single process OOM killer.`, - }, + }, + "single_process_oom_kill": { + Type: schema.TypeBool, + Optional: true, + Description: `Defines whether to enable single process OOM killer.`, + }, + "eviction_max_pod_grace_period_seconds": { + Type: schema.TypeInt, + Optional: true, + Description: `Defines the maximum allowed grace period (in seconds) to use when terminating pods in response to a soft eviction threshold being met.`, + }, + "eviction_soft": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: `Defines a map of signal names to quantities or percentage that defines soft eviction thresholds.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "memory_available": { + Type: schema.TypeString, + Optional: true, + Description: `Defines quantity of soft eviction threshold for memory.available.`, + }, + "nodefs_available": { + Type: schema.TypeString, + Optional: true, + Description: `Defines percentage of soft eviction threshold for nodefs.available.`, + }, + "nodefs_inodes_free": { + Type: schema.TypeString, + Optional: true, + Description: `Defines percentage of soft eviction threshold for nodefs.inodesFree.`, + }, + "imagefs_available": { + Type: schema.TypeString, + Optional: true, + Description: `Defines percentage of soft eviction threshold for imagefs.available.`, + }, + "imagefs_inodes_free": { + Type: schema.TypeString, + Optional: true, + Description: `Defines percentage of soft eviction threshold for imagefs.inodesFree.`, + }, + "pid_available": { + Type: schema.TypeString, + Optional: true, + Description: `Defines percentage of soft eviction threshold for pid.available.`, + }, + }, + }, + }, + "eviction_soft_grace_period": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: `Defines a map of signal names to durations that defines grace periods for soft eviction thresholds. Each soft eviction threshold must have a corresponding grace period.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "memory_available": { + Type: schema.TypeString, + Optional: true, + Description: `Defines grace period for the memory.available soft eviction threshold.`, + }, + "nodefs_available": { + Type: schema.TypeString, + Optional: true, + Description: `Defines grace period for the nodefs.available soft eviction threshold.`, + }, + "nodefs_inodes_free": { + Type: schema.TypeString, + Optional: true, + Description: `Defines grace period for the nodefs.inodesFree soft eviction threshold.`, + }, + "imagefs_available": { + Type: schema.TypeString, + Optional: true, + Description: `Defines grace period for the imagefs.available soft eviction threshold`, + }, + "imagefs_inodes_free": { + Type: schema.TypeString, + Optional: true, + Description: `Defines grace period for the imagefs.inodesFree soft eviction threshold.`, + }, + "pid_available": { + Type: schema.TypeString, + Optional: true, + Description: `Defines grace period for the pid.available soft eviction threshold.`, + }, + }, + }, + }, + "eviction_minimum_reclaim": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: `Defines a map of signal names to percentage that defines minimum reclaims. It describes the minimum amount of a given resource the kubelet will reclaim when performing a pod eviction.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "memory_available": { + Type: schema.TypeString, + Optional: true, + Description: `Defines percentage of minimum reclaim for memory.available.`, + }, + "nodefs_available": { + Type: schema.TypeString, + Optional: true, + Description: `Defines percentage of minimum reclaim for nodefs.available.`, + }, + "nodefs_inodes_free": { + Type: schema.TypeString, + Optional: true, + Description: `Defines percentage of minimum reclaim for nodefs.inodesFree.`, + }, + "imagefs_available": { + Type: schema.TypeString, + Optional: true, + Description: `Defines percentage of minimum reclaim for imagefs.available.`, + }, + "imagefs_inodes_free": { + Type: schema.TypeString, + Optional: true, + Description: `Defines percentage of minimum reclaim for imagefs.inodesFree.`, + }, + "pid_available": { + Type: schema.TypeString, + Optional: true, + Description: `Defines percentage of minimum reclaim for pid.available.`, + }, + }, + }, + }, }, }, }, @@ -679,6 +810,7 @@ func schemaNodeConfig() *schema.Schema { Type: schema.TypeList, Optional: true, MaxItems: 1, + Computed: true, Description: `Parameters that can be configured on Linux nodes.`, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ @@ -696,6 +828,21 @@ func schemaNodeConfig() *schema.Schema { Description: `cgroupMode specifies the cgroup mode to be used on the node.`, DiffSuppressFunc: tpgresource.EmptyOrDefaultStringSuppress("CGROUP_MODE_UNSPECIFIED"), }, + "transparent_hugepage_enabled": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ValidateFunc: validation.StringInSlice([]string{"TRANSPARENT_HUGEPAGE_ENABLED_ALWAYS", "TRANSPARENT_HUGEPAGE_ENABLED_MADVISE", "TRANSPARENT_HUGEPAGE_ENABLED_NEVER", "TRANSPARENT_HUGEPAGE_ENABLED_UNSPECIFIED"}, false), + Description: `The Linux kernel transparent hugepage setting.`, + DiffSuppressFunc: tpgresource.EmptyOrDefaultStringSuppress("TRANSPARENT_HUGEPAGE_ENABLED_UNSPECIFIED"), + }, + "transparent_hugepage_defrag": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{"TRANSPARENT_HUGEPAGE_DEFRAG_ALWAYS", "TRANSPARENT_HUGEPAGE_DEFRAG_DEFER", "TRANSPARENT_HUGEPAGE_DEFRAG_DEFER_WITH_MADVISE", "TRANSPARENT_HUGEPAGE_DEFRAG_MADVISE", "TRANSPARENT_HUGEPAGE_DEFRAG_NEVER", "TRANSPARENT_HUGEPAGE_DEFRAG_UNSPECIFIED"}, false), + Description: `The Linux kernel transparent hugepage defrag setting.`, + DiffSuppressFunc: tpgresource.EmptyOrDefaultStringSuppress("TRANSPARENT_HUGEPAGE_DEFRAG_UNSPECIFIED"), + }, "hugepages_config": { Type: schema.TypeList, Optional: true, @@ -811,6 +958,11 @@ func schemaNodeConfig() *schema.Schema { }, }, }, + "min_node_cpus": { + Type: schema.TypeInt, + Optional: true, + Description: `Specifies the minimum number of vCPUs that each sole tenant node must have to use CPU overcommit. If not specified, the CPU overcommit feature is disabled.`, + }, }, }, }, @@ -1458,34 +1610,109 @@ func expandKubeletConfig(v interface{}) *container.NodeKubeletConfig { if podPidsLimit, ok := cfg["pod_pids_limit"]; ok { kConfig.PodPidsLimit = int64(podPidsLimit.(int)) } + if maxParallelImagePulls, ok := cfg["max_parallel_image_pulls"]; ok { + kConfig.MaxParallelImagePulls = int64(maxParallelImagePulls.(int)) + } if containerLogMaxSize, ok := cfg["container_log_max_size"]; ok { - kConfig.ContainerLogMaxSize = containerLogMaxSize.(string) - } + kConfig.ContainerLogMaxSize = containerLogMaxSize.(string) + } if containerLogMaxFiles, ok := cfg["container_log_max_files"]; ok { - kConfig.ContainerLogMaxFiles = int64(containerLogMaxFiles.(int)) - } + kConfig.ContainerLogMaxFiles = int64(containerLogMaxFiles.(int)) + } if imageGcLowThresholdPercent, ok := cfg["image_gc_low_threshold_percent"]; ok { - kConfig.ImageGcLowThresholdPercent = int64(imageGcLowThresholdPercent.(int)) - } + kConfig.ImageGcLowThresholdPercent = int64(imageGcLowThresholdPercent.(int)) + } if imageGcHighThresholdPercent, ok := cfg["image_gc_high_threshold_percent"]; ok { - kConfig.ImageGcHighThresholdPercent = int64(imageGcHighThresholdPercent.(int)) - } + kConfig.ImageGcHighThresholdPercent = int64(imageGcHighThresholdPercent.(int)) + } if imageMinimumGcAge, ok := cfg["image_minimum_gc_age"]; ok { - kConfig.ImageMinimumGcAge = imageMinimumGcAge.(string) - } + kConfig.ImageMinimumGcAge = imageMinimumGcAge.(string) + } if imageMaximumGcAge, ok := cfg["image_maximum_gc_age"]; ok { - kConfig.ImageMaximumGcAge = imageMaximumGcAge.(string) - } + kConfig.ImageMaximumGcAge = imageMaximumGcAge.(string) + } if allowedUnsafeSysctls, ok := cfg["allowed_unsafe_sysctls"]; ok { - sysctls := allowedUnsafeSysctls.([]interface{}) + sysctls := allowedUnsafeSysctls.([]interface{}) kConfig.AllowedUnsafeSysctls = make([]string, len(sysctls)) for i, s := range sysctls { kConfig.AllowedUnsafeSysctls[i] = s.(string) } - } + } if singleProcessOomKill, ok := cfg["single_process_oom_kill"]; ok { - kConfig.SingleProcessOomKill = singleProcessOomKill.(bool) - } + kConfig.SingleProcessOomKill = singleProcessOomKill.(bool) + } + if evictionMaxPodGracePeriodSeconds, ok := cfg["eviction_max_pod_grace_period_seconds"]; ok { + kConfig.EvictionMaxPodGracePeriodSeconds = int64(evictionMaxPodGracePeriodSeconds.(int)) + } + if v, ok := cfg["eviction_soft"]; ok && len(v.([]interface{})) > 0 { + es := v.([]interface{})[0].(map[string]interface{}) + evictionSoft := &container.EvictionSignals{} + if val, ok := es["memory_available"]; ok { + evictionSoft.MemoryAvailable = val.(string) + } + if val, ok := es["nodefs_available"]; ok { + evictionSoft.NodefsAvailable = val.(string) + } + if val, ok := es["imagefs_available"]; ok { + evictionSoft.ImagefsAvailable = val.(string) + } + if val, ok := es["imagefs_inodes_free"]; ok { + evictionSoft.ImagefsInodesFree = val.(string) + } + if val, ok := es["nodefs_inodes_free"]; ok { + evictionSoft.NodefsInodesFree = val.(string) + } + if val, ok := es["pid_available"]; ok { + evictionSoft.PidAvailable = val.(string) + } + kConfig.EvictionSoft = evictionSoft + } + if v, ok := cfg["eviction_soft_grace_period"]; ok && len(v.([]interface{})) > 0 { + es := v.([]interface{})[0].(map[string]interface{}) + periods := &container.EvictionGracePeriod{} + if val, ok := es["memory_available"]; ok { + periods.MemoryAvailable = val.(string) + } + if val, ok := es["nodefs_available"]; ok { + periods.NodefsAvailable = val.(string) + } + if val, ok := es["imagefs_available"]; ok { + periods.ImagefsAvailable = val.(string) + } + if val, ok := es["imagefs_inodes_free"]; ok { + periods.ImagefsInodesFree = val.(string) + } + if val, ok := es["nodefs_inodes_free"]; ok { + periods.NodefsInodesFree = val.(string) + } + if val, ok := es["pid_available"]; ok { + periods.PidAvailable = val.(string) + } + kConfig.EvictionSoftGracePeriod = periods + } + if v, ok := cfg["eviction_minimum_reclaim"]; ok && len(v.([]interface{})) > 0 { + es := v.([]interface{})[0].(map[string]interface{}) + reclaim := &container.EvictionMinimumReclaim{} + if val, ok := es["memory_available"]; ok { + reclaim.MemoryAvailable = val.(string) + } + if val, ok := es["nodefs_available"]; ok { + reclaim.NodefsAvailable = val.(string) + } + if val, ok := es["imagefs_available"]; ok { + reclaim.ImagefsAvailable = val.(string) + } + if val, ok := es["imagefs_inodes_free"]; ok { + reclaim.ImagefsInodesFree = val.(string) + } + if val, ok := es["nodefs_inodes_free"]; ok { + reclaim.NodefsInodesFree = val.(string) + } + if val, ok := es["pid_available"]; ok { + reclaim.PidAvailable = val.(string) + } + kConfig.EvictionMinimumReclaim = reclaim + } return kConfig } @@ -1512,6 +1739,13 @@ func expandLinuxNodeConfig(v interface{}) *container.LinuxNodeConfig { linuxNodeConfig.CgroupMode = cgroupMode } + if v, ok := cfg["transparent_hugepage_enabled"]; ok { + linuxNodeConfig.TransparentHugepageEnabled = v.(string) + } + if v, ok := cfg["transparent_hugepage_defrag"]; ok { + linuxNodeConfig.TransparentHugepageDefrag = v.(string) + } + if v, ok := cfg["hugepages_config"]; ok { linuxNodeConfig.Hugepages = expandHugepagesConfig(v) } @@ -1678,24 +1912,24 @@ func expandSoleTenantConfig(v interface{}) *container.SoleTenantConfig { if len(ls) == 0 { return nil } + stConfig := &container.SoleTenantConfig{} cfg := ls[0].(map[string]interface{}) - affinitiesRaw, ok := cfg["node_affinity"] - if !ok { - return nil - } - affinities := make([]*container.NodeAffinity, 0) - for _, v := range affinitiesRaw.(*schema.Set).List() { - na := v.(map[string]interface{}) - - affinities = append(affinities, &container.NodeAffinity{ - Key: na["key"].(string), - Operator: na["operator"].(string), - Values: tpgresource.ConvertStringArr(na["values"].([]interface{})), - }) + if affinitiesRaw, ok := cfg["node_affinity"]; ok { + affinities := make([]*container.NodeAffinity, 0) + for _, v := range affinitiesRaw.(*schema.Set).List() { + na := v.(map[string]interface{}) + affinities = append(affinities, &container.NodeAffinity{ + Key: na["key"].(string), + Operator: na["operator"].(string), + Values: tpgresource.ConvertStringArr(na["values"].([]interface{})), + }) + } + stConfig.NodeAffinities = affinities } - return &container.SoleTenantConfig{ - NodeAffinities: affinities, + if v, ok := cfg["min_node_cpus"]; ok { + stConfig.MinNodeCpus = int64(v.(int)) } + return stConfig } {{ if ne $.TargetVersionName `ga` -}} @@ -2139,7 +2373,12 @@ func flattenKubeletConfig(c *container.NodeKubeletConfig) []map[string]interface "image_minimum_gc_age": c.ImageMinimumGcAge, "image_maximum_gc_age": c.ImageMaximumGcAge, "allowed_unsafe_sysctls": c.AllowedUnsafeSysctls, - "single_process_oom_kill": c.SingleProcessOomKill, + "single_process_oom_kill": c.SingleProcessOomKill, + "max_parallel_image_pulls": c.MaxParallelImagePulls, + "eviction_max_pod_grace_period_seconds": c.EvictionMaxPodGracePeriodSeconds, + "eviction_soft": flattenEvictionSignals(c.EvictionSoft), + "eviction_soft_grace_period": flattenEvictionGracePeriod(c.EvictionSoftGracePeriod), + "eviction_minimum_reclaim": flattenEvictionMinimumReclaim(c.EvictionMinimumReclaim), }) } return result @@ -2155,13 +2394,61 @@ func flattenNodePoolAutoConfigNodeKubeletConfig(c *container.NodeKubeletConfig) return result } + +func flattenEvictionSignals(c *container.EvictionSignals) []map[string]interface{} { + result := []map[string]interface{}{} + if c != nil { + result = append(result, map[string]interface{}{ + "memory_available": c.MemoryAvailable, + "nodefs_available": c.NodefsAvailable, + "nodefs_inodes_free": c.NodefsInodesFree, + "imagefs_available": c.ImagefsAvailable, + "imagefs_inodes_free": c.ImagefsInodesFree, + "pid_available": c.PidAvailable, + }) + } + return result +} + +func flattenEvictionGracePeriod(c *container.EvictionGracePeriod) []map[string]interface{} { + result := []map[string]interface{}{} + if c != nil { + result = append(result, map[string]interface{}{ + "memory_available": c.MemoryAvailable, + "nodefs_available": c.NodefsAvailable, + "nodefs_inodes_free": c.NodefsInodesFree, + "imagefs_available": c.ImagefsAvailable, + "imagefs_inodes_free": c.ImagefsInodesFree, + "pid_available": c.PidAvailable, + }) + } + return result +} + +func flattenEvictionMinimumReclaim(c *container.EvictionMinimumReclaim) []map[string]interface{} { + result := []map[string]interface{}{} + if c != nil { + result = append(result, map[string]interface{}{ + "memory_available": c.MemoryAvailable, + "nodefs_available": c.NodefsAvailable, + "nodefs_inodes_free": c.NodefsInodesFree, + "imagefs_available": c.ImagefsAvailable, + "imagefs_inodes_free": c.ImagefsInodesFree, + "pid_available": c.PidAvailable, + }) + } + return result +} + func flattenLinuxNodeConfig(c *container.LinuxNodeConfig) []map[string]interface{} { result := []map[string]interface{}{} if c != nil { result = append(result, map[string]interface{}{ - "sysctls": c.Sysctls, - "cgroup_mode": c.CgroupMode, - "hugepages_config": flattenHugepagesConfig(c.Hugepages), + "sysctls": c.Sysctls, + "cgroup_mode": c.CgroupMode, + "hugepages_config": flattenHugepagesConfig(c.Hugepages), + "transparent_hugepage_enabled": c.TransparentHugepageEnabled, + "transparent_hugepage_defrag": c.TransparentHugepageDefrag, }) } return result @@ -2282,6 +2569,7 @@ func flattenSoleTenantConfig(c *container.SoleTenantConfig) []map[string]interfa } return append(result, map[string]interface{}{ "node_affinity": affinities, + "min_node_cpus": c.MinNodeCpus, }) } diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl index eed554ce3320..41ed9e9f6119 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl @@ -1940,7 +1940,7 @@ func TestAccContainerCluster_withNodeConfigLinuxNodeConfig(t *testing.T) { Steps: []resource.TestStep{ // First test with empty `node_config.linux_node_config` (should result in "CGROUP_MODE_UNSPECIFIED") { - Config: testAccContainerCluster_withNodeConfigLinuxNodeConfig(clusterName, networkName, subnetworkName, ""), + Config: testAccContainerCluster_withNodeConfigLinuxNodeConfig(clusterName, networkName, subnetworkName, "", false), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ acctest.ExpectNoDelete(), @@ -1951,11 +1951,11 @@ func TestAccContainerCluster_withNodeConfigLinuxNodeConfig(t *testing.T) { ResourceName: "google_container_cluster.with_linux_node_config", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"min_master_version", "deletion_protection"}, }, // Then add a config and make sure it updates. { - Config: testAccContainerCluster_withNodeConfigLinuxNodeConfig(clusterName, networkName, subnetworkName, "CGROUP_MODE_V2"), + Config: testAccContainerCluster_withNodeConfigLinuxNodeConfig(clusterName, networkName, subnetworkName, "CGROUP_MODE_V2", false), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr( "google_container_cluster.with_linux_node_config", @@ -1972,11 +1972,11 @@ func TestAccContainerCluster_withNodeConfigLinuxNodeConfig(t *testing.T) { ResourceName: "google_container_cluster.with_linux_node_config", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"min_master_version", "deletion_protection"}, }, // Lastly, update the setting in-place. V1 since UNSPECIFIED is default { - Config: testAccContainerCluster_withNodeConfigLinuxNodeConfig(clusterName, networkName, subnetworkName, "CGROUP_MODE_V1"), + Config: testAccContainerCluster_withNodeConfigLinuxNodeConfig(clusterName, networkName, subnetworkName, "CGROUP_MODE_V1", false), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr( "google_container_cluster.with_linux_node_config", @@ -1993,8 +1993,33 @@ func TestAccContainerCluster_withNodeConfigLinuxNodeConfig(t *testing.T) { ResourceName: "google_container_cluster.with_linux_node_config", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"min_master_version", "deletion_protection"}, }, + // Update linux config transparent hugepage + { + Config: testAccContainerCluster_withNodeConfigLinuxNodeConfig(clusterName, networkName, subnetworkName, "", true), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_container_cluster.with_linux_node_config", + "node_config.0.linux_node_config.0.transparent_hugepage_enabled", "TRANSPARENT_HUGEPAGE_ENABLED_ALWAYS", + ), + resource.TestCheckResourceAttr( + "google_container_cluster.with_linux_node_config", + "node_config.0.linux_node_config.0.transparent_hugepage_defrag", "TRANSPARENT_HUGEPAGE_DEFRAG_ALWAYS", + ), + ), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + acctest.ExpectNoDelete(), + }, + }, + }, + { + ResourceName: "google_container_cluster.with_linux_node_config", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"min_master_version", "deletion_protection"}, + }, }, }) } @@ -2153,6 +2178,31 @@ func TestAccContainerCluster_withNodeConfigKubeletConfigSettingsUpdates(t *testi }) } +func TestAccContainerCluster_withNodeConfigKubeletConfigSettingsInNodePool(t *testing.T) { + t.Parallel() + clusterName := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) + nodePoolName := fmt.Sprintf("tf-test-nodepool-%s", acctest.RandString(t, 10)) + networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") + subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withNodeConfigKubeletConfigSettingsInNodePool(clusterName, nodePoolName, networkName, subnetworkName, "TRANSPARENT_HUGEPAGE_DEFRAG_NEVER", "TRANSPARENT_HUGEPAGE_ENABLED_MADVISE"), + }, + { + ResourceName: "google_container_cluster.with_node_config_kubelet_config_settings_in_node_pool", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"min_master_version", "deletion_protection"}, + }, + }, + }) +} + func TestAccContainerCluster_withInsecureKubeletReadonlyPortEnabledInNodePool(t *testing.T) { t.Parallel() clusterName := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) @@ -8268,7 +8318,35 @@ resource "google_container_cluster" "with_node_config_kubelet_config_settings" { node_config { kubelet_config { - pod_pids_limit = 1024 + pod_pids_limit = 1024 + container_log_max_files = 4 + single_process_oom_kill = true + max_parallel_image_pulls = 5 + eviction_max_pod_grace_period_seconds = 200 + eviction_soft { + memory_available = "200Mi" + nodefs_available = "10%%" + nodefs_inodes_free = "20%%" + imagefs_available = "30%%" + imagefs_inodes_free = "40%%" + pid_available = "50%%" + } + eviction_soft_grace_period { + memory_available = "4m" + nodefs_available = "3m30s" + nodefs_inodes_free = "3m" + imagefs_available = "5m" + imagefs_inodes_free = "2.5m" + pid_available = "10s" + } + eviction_minimum_reclaim { + memory_available = "5%%" + nodefs_available = "6%%" + nodefs_inodes_free = "4%%" + imagefs_available = "2.5%%" + imagefs_inodes_free = "9.0%%" + pid_available = "1.5%%" + } } } network = "%s" @@ -8293,6 +8371,33 @@ resource "google_container_cluster" "with_node_config_kubelet_config_settings" { cpu_cfs_quota_period = "%s" insecure_kubelet_readonly_port_enabled = "%s" pod_pids_limit = %v + single_process_oom_kill = true + max_parallel_image_pulls = 5 + eviction_max_pod_grace_period_seconds = 200 + eviction_soft { + memory_available = "100Mi" + nodefs_available = "50%%" + nodefs_inodes_free = "40%%" + imagefs_available = "30%%" + imagefs_inodes_free = "20%%" + pid_available = "10%%" + } + eviction_soft_grace_period { + memory_available = "5m" + nodefs_available = "4m30s" + nodefs_inodes_free = "3.6m" + imagefs_available = "100s" + imagefs_inodes_free = "2m" + pid_available = "3m2.6s" + } + eviction_minimum_reclaim { + memory_available = "10%%" + nodefs_available = "8.5%%" + nodefs_inodes_free = "5.0%%" + imagefs_available = "3%%" + imagefs_inodes_free = "9%%" + pid_available = "5%%" + } } } network = "%s" @@ -8303,6 +8408,87 @@ resource "google_container_cluster" "with_node_config_kubelet_config_settings" { `, clusterName, cpuManagerPolicy, cpuCfsQuota, cpuCfsQuotaPeriod, insecureKubeletReadonlyPortEnabled, podPidsLimit, networkName, subnetworkName) } +func testAccContainerCluster_withNodeConfigKubeletConfigSettingsInNodePool(clusterName, nodePoolName, networkName, subnetworkName, thpDefrag, thpEnabled string) string { + return fmt.Sprintf(` +data "google_container_engine_versions" "central1f" { + location = "us-central1-f" +} +resource "google_compute_node_template" "soletenant-tmpl" { + name = "%s" + region = "us-central1" + node_type = "n1-node-96-624" + cpu_overcommit_type = "ENABLED" +} +resource "google_compute_node_group" "group" { + name = "%s" + zone = "us-central1-f" + description = "example google_compute_node_group for Terraform Google Provider" + initial_size = 1 + node_template = google_compute_node_template.soletenant-tmpl.id +} +resource "google_container_cluster" "with_node_config_kubelet_config_settings_in_node_pool" { + name = "%s" + location = "us-central1-f" + min_master_version = data.google_container_engine_versions.central1f.latest_master_version + + node_pool { + name = "%s" + initial_node_count = 1 + machine_type = "n1-standard-1" + node_config { + kubelet_config { + max_parallel_image_pulls = 5 + eviction_max_pod_grace_period_seconds = 200 + eviction_soft { + memory_available = "200Mi" + nodefs_available = "10%%" + nodefs_inodes_free = "20%%" + imagefs_available = "30%%" + imagefs_inodes_free = "40%%" + pid_available = "50%%" + } + eviction_soft_grace_period { + memory_available = "1m" + nodefs_available = "2s" + nodefs_inodes_free = "3m" + imagefs_available = "100s" + imagefs_inodes_free = "2m" + pid_available = "3m2.6s" + } + eviction_minimum_reclaim { + memory_available = "10%%" + nodefs_available = "8.5%%" + nodefs_inodes_free = "5.0%%" + imagefs_available = "3%%" + imagefs_inodes_free = "9%%" + pid_available = "5%%" + } + } + disk_size_gb = 15 + disk_type = "pd-ssd" + node_group = google_compute_node_group.group.name + sole_tenant_config { + node_affinity { + key = "compute.googleapis.com/node-group-name" + operator = "IN" + values = [google_compute_node_group.group.name] + } + min_node_cpus = 1 + } + linux_node_config { + transparent_hugepage_defrag = %s + transparent_hugepage_enabled = %s + } + } + } + network = "%s" + subnetwork = "%s" + + deletion_protection = false +} +`, clusterName, clusterName, clusterName, nodePoolName, thpDefrag, thpEnabled, networkName, subnetworkName) +} + func testAccContainerCluster_withInsecureKubeletReadonlyPortEnabledInNodePool(clusterName, nodePoolName, networkName, subnetworkName, insecureKubeletReadonlyPortEnabled string) string { return fmt.Sprintf(` resource "google_container_cluster" "with_insecure_kubelet_readonly_port_enabled_in_node_pool" { @@ -8524,7 +8710,7 @@ resource "google_container_cluster" "with_node_config" { `, clusterName, networkName, subnetworkName) } -func testAccContainerCluster_withNodeConfigLinuxNodeConfig(clusterName, networkName, subnetworkName, cgroupMode string) string { +func testAccContainerCluster_withNodeConfigLinuxNodeConfig(clusterName, networkName, subnetworkName, cgroupMode string, thpEnabled bool) string { // Empty block inside node_config if cgroupMode is empty linuxNodeConfig := "" @@ -8536,11 +8722,23 @@ func testAccContainerCluster_withNodeConfigLinuxNodeConfig(clusterName, networkN `, cgroupMode) } + if cgroupMode== "" && thpEnabled { + linuxNodeConfig = ` + linux_node_config { + transparent_hugepage_defrag = "TRANSPARENT_HUGEPAGE_DEFRAG_ALWAYS" + transparent_hugepage_enabled = "TRANSPARENT_HUGEPAGE_ENABLED_ALWAYS" + }` + } + return fmt.Sprintf(` +data "google_container_engine_versions" "central1a" { + location = "us-central1-a" +} resource "google_container_cluster" "with_linux_node_config" { name = "%s" location = "us-central1-f" initial_node_count = 1 + min_master_version = data.google_container_engine_versions.central1a.latest_master_version node_config { disk_size_gb = 15 @@ -11854,6 +12052,7 @@ resource "google_compute_node_template" "soletenant-tmpl" { name = "%s" region = "us-central1" node_type = "n1-node-96-624" + cpu_overcommit_type = "ENABLED" } resource "google_compute_node_group" "group" { @@ -11874,6 +12073,14 @@ resource "google_container_cluster" "primary" { disk_size_gb = 15 disk_type = "pd-ssd" node_group = google_compute_node_group.group.name + sole_tenant_config { + node_affinity { + key = "compute.googleapis.com/node-group-name" + operator = "IN" + values = [google_compute_node_group.group.name] + } + min_node_cpus = 1 + } } network = "%s" subnetwork = "%s" diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl index dcdd1d4c7085..d03b5db01290 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl @@ -1965,6 +1965,7 @@ func TestAccContainerNodePool_withSoleTenantConfig(t *testing.T) { np := fmt.Sprintf("tf-test-np-%s", acctest.RandString(t, 10)) networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) + minNodeCpus := 1 acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -1972,7 +1973,7 @@ func TestAccContainerNodePool_withSoleTenantConfig(t *testing.T) { CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccContainerNodePool_withSoleTenantConfig(cluster, np, networkName, subnetworkName), + Config: testAccContainerNodePool_withSoleTenantConfig(cluster, np, networkName, subnetworkName, minNodeCpus), }, { ResourceName: "google_container_node_pool.with_sole_tenant_config", @@ -3892,8 +3893,34 @@ resource "google_container_node_pool" "with_kubelet_config" { image_gc_high_threshold_percent = %d image_minimum_gc_age = %q image_maximum_gc_age = %q - allowed_unsafe_sysctls = ["kernel.shm*", "kernel.msg*", "kernel.sem", "fs.mqueue.*", "net.*"] + allowed_unsafe_sysctls = ["kernel.shm*", "kernel.msg*", "kernel.sem", "fs.mqueue.*", "net.*"] single_process_oom_kill = %v + max_parallel_image_pulls = 5 + eviction_max_pod_grace_period_seconds = 200 + eviction_soft { + memory_available = "100Mi" + nodefs_available = "50%%" + nodefs_inodes_free = "40%%" + imagefs_available = "30%%" + imagefs_inodes_free = "20%%" + pid_available = "10%%" + } + eviction_soft_grace_period { + memory_available = "5m" + nodefs_available = "4m30s" + nodefs_inodes_free = "3.6m" + imagefs_available = "100s" + imagefs_inodes_free = "2m" + pid_available = "3m2.6s" + } + eviction_minimum_reclaim { + memory_available = "10%%" + nodefs_available = "8.5%%" + nodefs_inodes_free = "5.0%%" + imagefs_available = "3%%" + imagefs_inodes_free = "9%%" + pid_available = "5%%" + } } oauth_scopes = [ "https://www.googleapis.com/auth/logging.write", @@ -3926,6 +3953,8 @@ func testAccContainerNodePool_withLinuxNodeConfig(cluster, np, tcpMem, networkNa "net.ipv4.tcp_tw_reuse" = 1 "kernel.shmmni" = 8192 } + transparent_hugepage_enabled = "TRANSPARENT_HUGEPAGE_ENABLED_ALWAYS" + transparent_hugepage_defrag = "TRANSPARENT_HUGEPAGE_DEFRAG_DEFER_WITH_MADVISE" } `, tcpMem, tcpMem) } @@ -4855,7 +4884,7 @@ resource "google_container_node_pool" "np2" { `, cluster, networkName, subnetworkName, np1, np2) } -func testAccContainerNodePool_withSoleTenantConfig(cluster, np, networkName, subnetworkName string) string { +func testAccContainerNodePool_withSoleTenantConfig(cluster, np, networkName, subnetworkName string, minNodeCpus int) string { return fmt.Sprintf(` data "google_container_engine_versions" "central1a" { location = "us-central1-a" @@ -4865,6 +4894,7 @@ resource "google_compute_node_template" "soletenant-tmpl" { name = "tf-test-soletenant-tmpl" region = "us-central1" node_type = "n1-node-96-624" + cpu_overcommit_type = "ENABLED" } resource "google_compute_node_group" "nodes" { @@ -4897,6 +4927,7 @@ resource "google_container_node_pool" "with_sole_tenant_config" { operator = "IN" values = [google_compute_node_group.nodes.name] } + min_node_cpus = %d } oauth_scopes = [ "https://www.googleapis.com/auth/logging.write", @@ -4904,7 +4935,7 @@ resource "google_container_node_pool" "with_sole_tenant_config" { ] } } -`, cluster, networkName, subnetworkName, np) +`, cluster, networkName, subnetworkName, np, minNodeCpus) } func TestAccContainerNodePool_withConfidentialNodes(t *testing.T) { diff --git a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown index 20490ff2b14f..c45437ea5f02 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown @@ -1062,7 +1062,7 @@ windows_node_config { * `node_group` - (Optional) Setting this field will assign instances of this pool to run on the specified node group. This is useful for running workloads on [sole tenant nodes](https://cloud.google.com/compute/docs/nodes/sole-tenant-nodes). -* `sole_tenant_config` (Optional) Allows specifying multiple [node affinities](https://cloud.google.com/compute/docs/nodes/sole-tenant-nodes#node_affinity_and_anti-affinity) useful for running workloads on [sole tenant nodes](https://cloud.google.com/kubernetes-engine/docs/how-to/sole-tenancy). `node_affinity` structure is [documented below](#nested_node_affinity). +* `sole_tenant_config` - (Optional) Allows specifying multiple [node affinities](https://cloud.google.com/compute/docs/nodes/sole-tenant-nodes#node_affinity_and_anti-affinity) useful for running workloads on [sole tenant nodes](https://cloud.google.com/kubernetes-engine/docs/how-to/sole-tenancy). Structure is [documented below](#nested_sole_tenant_config). ```hcl sole_tenant_config { @@ -1097,6 +1097,12 @@ sole_tenant_config { * `confidential_instance_type` (Optional) - Defines the type of technology used by the confidential node. +The `sole_tenant_config` block supports: + +* `node_affinity` (Required) - The node affinity settings for the sole tenant node pool. Structure is [documented below](#nested_node_affinity). + +* `min_node_cpus` - (Optional) Specifies the minimum number of vCPUs that each sole tenant node must have to use CPU overcommit. If not specified, the CPU overcommit feeature is disabled. The value should be greater than or equal to half of the machine type's CPU count. + The `node_affinity` block supports: * `key` (Required) - The default or custom node affinity label key name. @@ -1485,6 +1491,43 @@ such as `"300ms"`. Valid time units are "ns", "us" (or "µs"), "ms", "s", "m", * `single_process_oom_kill` - (Optional) Defines whether to enable single process OOM killer. If true, the processes in the container will be OOM killed individually instead of as a group. +* `max_parallel_image_pulls` - (Optional) Set the maximum number of image pulls in parallel. The integer must be between 2 and 5, inclusive. + +* `eviction_max_pod_grace_period_seconds` - (Optional) Defines the maximum allowed grace period (in seconds) to use when terminating pods in response to a soft eviction threshold being met. The integer must be positive and not exceed 300. + +* `eviction_soft` - (Optional) Defines a map of signal names to quantities or percentage that defines soft eviction thresholds. Structure is [documented below](#nested_eviction_soft). + +* `eviction_soft_grace_period` - (Optional) Defines a map of signal names to durations that defines grace periods for soft eviction thresholds. Each soft eviction threshold must have a corresponding grace period. Structure is [documented below](#nested_eviction_soft_grace_period). + +* `eviction_minimum_reclaim` - (Optional) Defines a map of signal names to percentage that defines minimum reclaims. It describes the minimum amount of a given resource the kubelet will reclaim when performing a pod eviction. Structure is [documented below](#nested_eviction_minimum_reclaim). + +The `eviction_soft` block supports: + +* `memory_available` - (Optional) Defines quantity of soft eviction threshold for memory.available. The value must be a quantity, such as `"100Mi"`. The value must be greater than or equal to the GKE default hard eviction threshold of `"100Mi"` and less than 50% of machine memory. +* `nodefs_available` - (Optional) Defines percentage of soft eviction threshold for nodefs.available. The value must be a percentage between `10%` and `50%`, such as `"20%"`. +* `nodefs_inodes_free` - (Optional) Defines percentage of soft eviction threshold for nodefs.inodesFree. The value must be a percentage between `5%` and `50%`, such as `"20%"`. +* `imagefs_available` - (Optional) Defines percentage of soft eviction threshold for imagefs.available. The value must be a percentage between `15%` and `50%`, such as `"20%"`. +* `imagefs_inodes_free` - (Optional) Defines percentage of soft eviction threshold for imagefs.inodesFree. The value must be a percentage between `5%` and `50%`, such as `"20%"`. +* `pid_available` - (Optional) Defines percentage of soft eviction threshold for pid.available. The value must be a percentage between `10%` and `50%`, such as `"20%"`. + +The `eviction_soft_grace_period` block supports: + +* `memory_available` - (Optional) Defines grace period for the memory.available soft eviction threshold. The value must be a positive duration string no more than `"5m"`, such as `"30s"`, `"1m30s"`, `"2.5m"`. Valid time units are "ns", "us" (or "µs"), "ms", "s", "m", "h". +* `nodefs_available` - (Optional) Defines grace period for the nodefs.available soft eviction threshold. The value must be a positive duration string no more than `"5m"`. +* `nodefs_inodes_free` - (Optional) Defines grace period for the nodefs.inodesFree soft eviction threshold. The value must be a positive duration string no more than `"5m"`. +* `imagefs_available` - (Optional) Defines grace period for the imagefs.available soft eviction threshold. The value must be a positive duration string no more than `"5m"`. +* `imagefs_inodes_free` - (Optional) Defines grace period for the imagefs.inodesFree soft eviction threshold. The value must be a positive duration string no more than `"5m"`. +* `pid_available` - (Optional) Defines grace period for the pid.available soft eviction threshold. The value must be a positive duration string no more than `"5m"`. + +The `eviction_minimum_reclaim` block supports: + +* `memory_available` - (Optional) Defines percentage of minimum reclaim for memory.available. The value must be a percentage no more than `"10%"`, such as `"5%"`. +* `nodefs_available` - (Optional) Defines percentage of minimum reclaim for nodefs.available. The value must be a percentage no more than `"10%"`, such as `"5%"`. +* `nodefs_inodes_free` - (Optional) Defines percentage of minimum reclaim for nodefs.inodesFree. The value must be a percentage no more than `"10%"`, such as `"5%"`. +* `imagefs_available` - (Optional) Defines percentage of minimum reclaim for imagefs.available. The value must be a percentage no more than `"10%"`, such as `"5%"`. +* `imagefs_inodes_free` - (Optional) Defines percentage of minimum reclaim for imagefs.inodesFree. The value must be a percentage no more than `"10%"`, such as `"5%"`. +* `pid_available` - (Optional) Defines percentage of minimum reclaim for pid.available. The value must be a percentage no more than `"10%"`, such as `"5%"`. + The `linux_node_config` block supports: * `sysctls` - (Optional) The Linux kernel parameters to be applied to the nodes @@ -1515,6 +1558,22 @@ linux_node_config { * `hugepage_size_1g` - (Optional) Amount of 1G hugepages. +* `transparent_hugepage_enabled` - (Optional) The Linux kernel transparent hugepage setting. + Accepted values are: + * `TRANSPARENT_HUGEPAGE_ENABLED_ALWAYS`: Transparent hugepage is enabled system wide. + * `TRANSPARENT_HUGEPAGE_ENABLED_MADVISE`: Transparent hugepage is enabled inside MADV_HUGEPAGE regions. This is the default kernel configuration. + * `TRANSPARENT_HUGEPAGE_ENABLED_NEVER`: Transparent hugepage is disabled. + * `TRANSPARENT_HUGEPAGE_ENABLED_UNSPECIFIED`: Default value. GKE will not modify the kernel configuration. + +* `transparent_hugepage_defrag` - (Optional) The Linux kernel transparent hugepage defrag setting. + Accepted values are: + * `TRANSPARENT_HUGEPAGE_DEFRAG_ALWAYS`: An application requesting THP will stall on allocation failure and directly reclaim pages and compact memory in an effort to allocate a THP immediately. + * `TRANSPARENT_HUGEPAGE_DEFRAG_DEFER`: An application will wake kswapd in the background to reclaim pages and wake kcompactd to compact memory so that THP is available in the near future. It is the responsibility of khugepaged to then install the THP pages later. + * `TRANSPARENT_HUGEPAGE_DEFRAG_DEFER_WITH_MADVISE`: An application will enter direct reclaim and compaction like always, but only for regions that have used madvise(MADV_HUGEPAGE); all other regions will wake kswapd in the background to reclaim pages and wake kcompactd to compact memory so that THP is available in the near future. + * `TRANSPARENT_HUGEPAGE_DEFRAG_MADVISE`: An application will enter direct reclaim and compaction like always, but only for regions that have used madvise(MADV_HUGEPAGE); all other regions will wake kswapd in the background to reclaim pages and wake kcompactd to compact memory so that THP is available in the near future. + * `TRANSPARENT_HUGEPAGE_DEFRAG_NEVER`: An application will never enter direct reclaim or compaction. + * `TRANSPARENT_HUGEPAGE_DEFRAG_UNSPECIFIED`: Default value. GKE will not modify the kernel configuration. + The `containerd_config` block supports: * `private_registry_access_config` (Optional) - Configuration for private container registries. There are two fields in this config: From 17d4fc4d6f4b1027628b52e5993524f1256e7bd9 Mon Sep 17 00:00:00 2001 From: Sudheer Vakati <50149932+s-vakati@users.noreply.github.com> Date: Wed, 13 Aug 2025 16:08:58 +0000 Subject: [PATCH 762/884] google_sql_user doc change to indicate password policy is supported for PG (#14799) --- .../third_party/terraform/website/docs/r/sql_user.html.markdown | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/website/docs/r/sql_user.html.markdown b/mmv1/third_party/terraform/website/docs/r/sql_user.html.markdown index 5e973aba88fa..054666ef4fc1 100644 --- a/mmv1/third_party/terraform/website/docs/r/sql_user.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/sql_user.html.markdown @@ -141,7 +141,7 @@ The following arguments are supported: * `project` - (Optional) The ID of the project in which the resource belongs. If it is not provided, the provider project is used. -The optional `password_policy` block is only supported by Mysql. The `password_policy` block supports: +The optional `password_policy` block is only supported for creating MySQL and Postgres users. The `password_policy` block supports: * `allowed_failed_attempts` - (Optional) Number of failed attempts allowed before the user get locked. From b354099fa8cf68d30cacd77f0b0e02d8178da5a5 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Wed, 13 Aug 2025 11:12:18 -0700 Subject: [PATCH 763/884] tgc-revival: fix the maximum number of retries (#14840) --- mmv1/third_party/tgc_next/test/assert_test_files.go | 3 ++- mmv1/third_party/tgc_next/test/setup.go | 10 +++++----- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/mmv1/third_party/tgc_next/test/assert_test_files.go b/mmv1/third_party/tgc_next/test/assert_test_files.go index 5da0f9113751..fd33ed62dc1b 100644 --- a/mmv1/third_party/tgc_next/test/assert_test_files.go +++ b/mmv1/third_party/tgc_next/test/assert_test_files.go @@ -76,7 +76,8 @@ func BidirectionalConversion(t *testing.T, ignoredFields []string, ignoredAssetF return nil } - backoffPolicy := retry.WithMaxRetries(maxRetries, retry.NewConstant(50*time.Millisecond)) + // Note maxAttempts-1 is retries, not attempts. + backoffPolicy := retry.WithMaxRetries(maxAttempts-1, retry.NewConstant(50*time.Millisecond)) t.Log("Starting test with retry logic.") diff --git a/mmv1/third_party/tgc_next/test/setup.go b/mmv1/third_party/tgc_next/test/setup.go index 07e8f20c209d..50c2ecc5a9ed 100644 --- a/mmv1/third_party/tgc_next/test/setup.go +++ b/mmv1/third_party/tgc_next/test/setup.go @@ -51,12 +51,12 @@ type Resource struct { } const ( - ymdFormat = "2006-01-02" - maxRetries = 3 + ymdFormat = "2006-01-02" + maxAttempts = 3 ) var ( - TestsMetadata = make([]NightlyRun, maxRetries) + TestsMetadata = make([]NightlyRun, maxAttempts) setupDone = false ) @@ -88,8 +88,8 @@ func ReadTestsDataFromGcs() ([]NightlyRun, error) { // Keep looking until we find a date with metadata. i-- retries++ - if retries > maxRetries { - // Stop looking when we find maxRetries dates with no metadata. + if retries > maxAttempts { + // Stop looking when we find maxAttempts dates with no metadata. return nil, fmt.Errorf("too many retries, %v", allErrs) } } else { From 8720ee6c79ddfd92e9acb681a70a9720be2d9201 Mon Sep 17 00:00:00 2001 From: Lakshman Swaminathan Date: Wed, 13 Aug 2025 14:23:59 -0400 Subject: [PATCH 764/884] updated datasource generation to handle certain other code blocks (#14800) --- mmv1/api/resource.go | 29 ++++++++++++++++++--- mmv1/api/resource/datasource.go | 19 ++++++++++++++ mmv1/products/iap/Client.yaml | 3 ++- mmv1/templates/terraform/datasource.go.tmpl | 12 +++++++++ 4 files changed, 59 insertions(+), 4 deletions(-) create mode 100644 mmv1/api/resource/datasource.go diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index db45e65aeda5..95c10c201f4f 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -227,7 +227,7 @@ type Resource struct { ExcludeImport bool `yaml:"exclude_import,omitempty"` // If true, resource should be autogenerated as a data source - GenerateDatasource bool `yaml:"generate_datasource,omitempty"` + Datasource *resource.Datasource `yaml:"datasource,omitempty"` // If true, skip sweeper generation for this resource ExcludeSweeper bool `yaml:"exclude_sweeper,omitempty"` @@ -2078,8 +2078,31 @@ func urlContainsOnlyAllowedKeys(templateURL string, allowedKeys []string) bool { return true } -func (r Resource) ShouldGenerateSingularDataSource() bool { - return r.GenerateDatasource +func (r *Resource) ShouldGenerateSingularDataSource() bool { + + if r.Datasource == nil { + return false + } + + return r.Datasource.Generate +} + +func (r Resource) ShouldDatasourceSetLabels() bool { + for _, p := range r.Properties { + if p.Name == "labels" && p.Type == "KeyValueLabels" { + return true + } + } + return false +} + +func (r Resource) ShouldDatasourceSetAnnotations() bool { + for _, p := range r.Properties { + if p.Name == "annotations" && p.Type == "KeyValueAnnotations" { + return true + } + } + return false } // DatasourceOptionalFields returns a list of fields from the resource's URI diff --git a/mmv1/api/resource/datasource.go b/mmv1/api/resource/datasource.go new file mode 100644 index 000000000000..ee87c1501f87 --- /dev/null +++ b/mmv1/api/resource/datasource.go @@ -0,0 +1,19 @@ +// Copyright 2024 Google Inc. +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package resource + +type Datasource struct { + // boolean to determine whether the datasource file should be generated + Generate bool `yaml:"generate"` +} diff --git a/mmv1/products/iap/Client.yaml b/mmv1/products/iap/Client.yaml index b3604a627a00..ff5a04384e51 100644 --- a/mmv1/products/iap/Client.yaml +++ b/mmv1/products/iap/Client.yaml @@ -33,7 +33,8 @@ self_link: '{{brand}}/identityAwareProxyClients/{{client_id}}' immutable: true import_format: - '{{brand}}/identityAwareProxyClients/{{client_id}}' -generate_datasource: true +datasource: + generate: true timeouts: insert_minutes: 20 update_minutes: 20 diff --git a/mmv1/templates/terraform/datasource.go.tmpl b/mmv1/templates/terraform/datasource.go.tmpl index 89bdf90c72d3..471ba248fd7f 100644 --- a/mmv1/templates/terraform/datasource.go.tmpl +++ b/mmv1/templates/terraform/datasource.go.tmpl @@ -84,6 +84,18 @@ func dataSource{{ $.ResourceName -}}Read(d *schema.ResourceData, meta interface{ return err } + {{if $.ShouldDatasourceSetLabels}} + if err := tpgresource.SetDataSourceLabels(d); err != nil { + return err + } + {{end}} + + {{if $.ShouldDatasourceSetAnnotations}} + if err := tpgresource.SetDataSourceAnnotations(d); err != nil { + return err + } + {{end}} + if d.Id() == "" { return fmt.Errorf("%s not found", id) } From d4ccadb4dd8835f8b073c3c7d2a98a84536897fe Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Wed, 13 Aug 2025 13:01:05 -0700 Subject: [PATCH 765/884] tgc-revival: add apphub resources (#14843) --- mmv1/products/apphub/Application.yaml | 1 + mmv1/products/apphub/Service.yaml | 1 + mmv1/products/apphub/Workload.yaml | 1 + 3 files changed, 3 insertions(+) diff --git a/mmv1/products/apphub/Application.yaml b/mmv1/products/apphub/Application.yaml index 34f66b9ba088..8132bf8d0e5e 100644 --- a/mmv1/products/apphub/Application.yaml +++ b/mmv1/products/apphub/Application.yaml @@ -41,6 +41,7 @@ custom_code: constants: 'templates/terraform/constants/apphub_application.go.tmpl' custom_diff: - 'apphubApplicationCustomizeDiff' +include_in_tgc_next_DO_NOT_USE: true sweeper: url_substitutions: - region: "us-central1" diff --git a/mmv1/products/apphub/Service.yaml b/mmv1/products/apphub/Service.yaml index f62ff42e1611..d026e536e820 100644 --- a/mmv1/products/apphub/Service.yaml +++ b/mmv1/products/apphub/Service.yaml @@ -40,6 +40,7 @@ async: result: resource_inside_response: true custom_code: +include_in_tgc_next_DO_NOT_USE: true examples: - name: 'apphub_service_basic' primary_resource_id: 'example' diff --git a/mmv1/products/apphub/Workload.yaml b/mmv1/products/apphub/Workload.yaml index 27c9ab21c094..a538ceb880f6 100644 --- a/mmv1/products/apphub/Workload.yaml +++ b/mmv1/products/apphub/Workload.yaml @@ -40,6 +40,7 @@ async: result: resource_inside_response: true custom_code: +include_in_tgc_next_DO_NOT_USE: true examples: - name: 'apphub_workload_basic' primary_resource_id: 'example' From 4c523b99d348ce75b1bfe516f6e0ab5e05de752c Mon Sep 17 00:00:00 2001 From: Abhishek Roy Date: Thu, 14 Aug 2025 01:33:50 +0530 Subject: [PATCH 766/884] Add capability related fields to the folder schema (#14601) --- .../data_source_google_folder.go | 9 +++++++++ .../resourcemanager/resource_google_folder.go | 17 +++++++++++++++++ ...rce_resource_manager_capability_test.go.tmpl | 12 ++++++++++++ .../website/docs/d/folder.html.markdown | 2 ++ .../website/docs/r/google_folder.html.markdown | 2 ++ 5 files changed, 42 insertions(+) diff --git a/mmv1/third_party/terraform/services/resourcemanager/data_source_google_folder.go b/mmv1/third_party/terraform/services/resourcemanager/data_source_google_folder.go index f644f61c84c9..7c9fd0c32ab5 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/data_source_google_folder.go +++ b/mmv1/third_party/terraform/services/resourcemanager/data_source_google_folder.go @@ -54,6 +54,15 @@ func DataSourceGoogleFolder() *schema.Resource { Type: schema.TypeBool, Computed: true, }, + "configured_capabilities": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + "management_project": { + Type: schema.TypeString, + Computed: true, + }, }, } } diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder.go index 37b89229e998..c1a472ca7a34 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder.go @@ -78,6 +78,17 @@ func ResourceGoogleFolder() *schema.Resource { Elem: &schema.Schema{Type: schema.TypeString}, Description: `A map of resource manager tags. Resource manager tag keys and values have the same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/456. The field is ignored when empty. This field is only set at create time and modifying this field after creation will trigger recreation. To apply tags to an existing resource, see the google_tags_tag_value resource.`, }, + "configured_capabilities": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: `A list of capabilities that are configured for this folder.`, + }, + "management_project": { + Type: schema.TypeString, + Computed: true, + Description: `The Management Project associated with the folder's configured capabilities.`, + }, }, UseJSONNumber: true, } @@ -179,6 +190,12 @@ func resourceGoogleFolderRead(d *schema.ResourceData, meta interface{}) error { if err := d.Set("create_time", folder.CreateTime); err != nil { return fmt.Errorf("Error setting create_time: %s", err) } + if err := d.Set("configured_capabilities", folder.ConfiguredCapabilities); err != nil { + return fmt.Errorf("Error setting configured_capabilities: %s", err) + } + if err := d.Set("management_project", folder.ManagementProject); err != nil { + return fmt.Errorf("Error setting management_project: %s", err) + } return nil } diff --git a/mmv1/third_party/terraform/services/resourcemanager3/resource_resource_manager_capability_test.go.tmpl b/mmv1/third_party/terraform/services/resourcemanager3/resource_resource_manager_capability_test.go.tmpl index 8277c542e255..35dbd2c77259 100644 --- a/mmv1/third_party/terraform/services/resourcemanager3/resource_resource_manager_capability_test.go.tmpl +++ b/mmv1/third_party/terraform/services/resourcemanager3/resource_resource_manager_capability_test.go.tmpl @@ -3,6 +3,7 @@ package resourcemanager3_test import ( "testing" + "regexp" "github.com/hashicorp/terraform-plugin-testing/helper/resource" @@ -17,6 +18,7 @@ func TestAccResourceManagerCapability_resourceManagerCapabilityExample_basic(t * "org_id": envvar.GetTestOrgFromEnv(t), "random_suffix": acctest.RandString(t, 10), } + folderTFResourceName := "google_folder.folder" acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -28,6 +30,16 @@ func TestAccResourceManagerCapability_resourceManagerCapabilityExample_basic(t * { Config: testAccResourceManagerCapability_resourceManagerCapabilityExample_basic(context), }, + { + ResourceName: folderTFResourceName, + ImportState: true, + ImportStateVerify: false, + Check: resource.ComposeTestCheckFunc( + // Checks are now performed on the state *after* the import/refresh. + resource.TestCheckResourceAttr(folderTFResourceName, "configured_capabilities.#", "1"), + resource.TestMatchResourceAttr(folderTFResourceName, "management_project", regexp.MustCompile(".+")), + ), + }, { ResourceName: "google_resource_manager_capability.capability", ImportState: true, diff --git a/mmv1/third_party/terraform/website/docs/d/folder.html.markdown b/mmv1/third_party/terraform/website/docs/d/folder.html.markdown index 1a62e8ae5079..3f61f1e64a45 100644 --- a/mmv1/third_party/terraform/website/docs/d/folder.html.markdown +++ b/mmv1/third_party/terraform/website/docs/d/folder.html.markdown @@ -47,3 +47,5 @@ The following attributes are exported: * `create_time` - Timestamp when the Organization was created. A timestamp in RFC3339 UTC "Zulu" format, accurate to nanoseconds. Example: "2014-10-02T15:01:23.045123456Z". * `lifecycle_state` - The Folder's current lifecycle state. * `organization` - If `lookup_organization` is enable, the resource name of the Organization that the folder belongs. +* `configured_capabilities` - Optional capabilities configured for this folder. +* `management_project` - Management Project associated with this folder (if capability is enabled). diff --git a/mmv1/third_party/terraform/website/docs/r/google_folder.html.markdown b/mmv1/third_party/terraform/website/docs/r/google_folder.html.markdown index 4bed99d434bd..e6ec9fa94014 100644 --- a/mmv1/third_party/terraform/website/docs/r/google_folder.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/google_folder.html.markdown @@ -69,6 +69,8 @@ exported: * `lifecycle_state` - The lifecycle state of the folder such as `ACTIVE` or `DELETE_REQUESTED`. * `create_time` - Timestamp when the Folder was created. Assigned by the server. A timestamp in RFC3339 UTC "Zulu" format, accurate to nanoseconds. Example: "2014-10-02T15:01:23.045123456Z". +* `configured_capabilities` - Optional capabilities configured for this folder. +* `management_project` - Management Project associated with this folder (if capability is enabled). ## Import From 4b5fb82778a4af35008539741bf18f04a0b9fab5 Mon Sep 17 00:00:00 2001 From: Danny Qiu Date: Wed, 13 Aug 2025 14:48:48 -0700 Subject: [PATCH 767/884] Treat skip_initial_version_creation as a create-only parameter for Cloud KMS keys (#14802) --- mmv1/products/kms/CryptoKey.yaml | 3 +- .../custom_import/kms_crypto_key.go.tmpl | 4 -- .../kms/resource_kms_crypto_key_test.go.tmpl | 45 ++++++++++--------- 3 files changed, 27 insertions(+), 25 deletions(-) diff --git a/mmv1/products/kms/CryptoKey.yaml b/mmv1/products/kms/CryptoKey.yaml index f43efc93960e..9f00694f76a6 100644 --- a/mmv1/products/kms/CryptoKey.yaml +++ b/mmv1/products/kms/CryptoKey.yaml @@ -75,8 +75,9 @@ parameters: If set to true, the request will create a CryptoKey without any CryptoKeyVersions. You must use the `google_kms_crypto_key_version` resource to create a new CryptoKeyVersion or `google_kms_key_ring_import_job` resource to import the CryptoKeyVersion. + This field is only applicable during initial CryptoKey creation. url_param_only: true - immutable: true + ignore_read: true properties: - name: 'name' type: String diff --git a/mmv1/templates/terraform/custom_import/kms_crypto_key.go.tmpl b/mmv1/templates/terraform/custom_import/kms_crypto_key.go.tmpl index db6e302250d3..3776775b66d7 100644 --- a/mmv1/templates/terraform/custom_import/kms_crypto_key.go.tmpl +++ b/mmv1/templates/terraform/custom_import/kms_crypto_key.go.tmpl @@ -13,10 +13,6 @@ return nil, fmt.Errorf("Error setting name: %s", err) } - if err := d.Set("skip_initial_version_creation", false); err != nil { - return nil, fmt.Errorf("Error setting skip_initial_version_creation: %s", err) - } - id, err := tpgresource.ReplaceVars(d, config, "{{$.GetIdFormat}}") if err != nil { return nil, fmt.Errorf("Error constructing id: %s", err) diff --git a/mmv1/third_party/terraform/services/kms/resource_kms_crypto_key_test.go.tmpl b/mmv1/third_party/terraform/services/kms/resource_kms_crypto_key_test.go.tmpl index f38115cdea1e..ccacec67295e 100644 --- a/mmv1/third_party/terraform/services/kms/resource_kms_crypto_key_test.go.tmpl +++ b/mmv1/third_party/terraform/services/kms/resource_kms_crypto_key_test.go.tmpl @@ -158,7 +158,7 @@ func TestAccKmsCryptoKey_basic(t *testing.T) { ResourceName: "google_kms_crypto_key.crypto_key", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"skip_initial_version_creation", "labels", "terraform_labels"}, }, // Test importing with a short id { @@ -166,7 +166,7 @@ func TestAccKmsCryptoKey_basic(t *testing.T) { ImportState: true, ImportStateId: fmt.Sprintf("%s/%s/%s/%s", projectId, location, keyRingName, cryptoKeyName), ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"skip_initial_version_creation", "labels", "terraform_labels"}, }, // Use a separate TestStep rather than a CheckDestroy because we need the project to still exist. { @@ -203,25 +203,28 @@ func TestAccKmsCryptoKey_rotation(t *testing.T) { Config: testGoogleKmsCryptoKey_rotation(projectId, projectOrg, projectBillingAccount, keyRingName, cryptoKeyName, rotationPeriod), }, { - ResourceName: "google_kms_crypto_key.crypto_key", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_kms_crypto_key.crypto_key", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"skip_initial_version_creation"}, }, { Config: testGoogleKmsCryptoKey_rotation(projectId, projectOrg, projectBillingAccount, keyRingName, cryptoKeyName, updatedRotationPeriod), }, { - ResourceName: "google_kms_crypto_key.crypto_key", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_kms_crypto_key.crypto_key", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"skip_initial_version_creation"}, }, { Config: testGoogleKmsCryptoKey_rotationRemoved(projectId, projectOrg, projectBillingAccount, keyRingName, cryptoKeyName), }, { - ResourceName: "google_kms_crypto_key.crypto_key", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_kms_crypto_key.crypto_key", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"skip_initial_version_creation"}, }, // Use a separate TestStep rather than a CheckDestroy because we need the project to still exist. { @@ -256,17 +259,19 @@ func TestAccKmsCryptoKey_template(t *testing.T) { Config: testGoogleKmsCryptoKey_template(projectId, projectOrg, projectBillingAccount, keyRingName, cryptoKeyName, algorithm), }, { - ResourceName: "google_kms_crypto_key.crypto_key", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_kms_crypto_key.crypto_key", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"skip_initial_version_creation"}, }, { Config: testGoogleKmsCryptoKey_template(projectId, projectOrg, projectBillingAccount, keyRingName, cryptoKeyName, updatedAlgorithm), }, { - ResourceName: "google_kms_crypto_key.crypto_key", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_kms_crypto_key.crypto_key", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"skip_initial_version_creation"}, }, // Use a separate TestStep rather than a CheckDestroy because we need the project to still exist. { @@ -302,7 +307,7 @@ func TestAccKmsCryptoKey_destroyDuration(t *testing.T) { ResourceName: "google_kms_crypto_key.crypto_key", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"skip_initial_version_creation", "labels", "terraform_labels"}, }, // Use a separate TestStep rather than a CheckDestroy because we need the project to still exist. { @@ -344,7 +349,7 @@ func TestAccKmsCryptoKey_keyAccessJustificationsPolicy(t *testing.T) { ResourceName: "google_kms_crypto_key.crypto_key", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"skip_initial_version_creation", "labels", "terraform_labels"}, }, { Config: testGoogleKmsCryptoKey_keyAccessJustificationsPolicy(projectId, projectOrg, projectBillingAccount, keyRingName, cryptoKeyName, updatedAllowedAccessReason), @@ -353,7 +358,7 @@ func TestAccKmsCryptoKey_keyAccessJustificationsPolicy(t *testing.T) { ResourceName: "google_kms_crypto_key.crypto_key", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"skip_initial_version_creation", "labels", "terraform_labels"}, }, // Use a separate TestStep rather than a CheckDestroy because we need the project to still exist. { From a573a90e14ea3f6968553f376949b6f49a6e65cd Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Wed, 13 Aug 2025 18:05:05 -0700 Subject: [PATCH 768/884] tgc-revival: add google_alloydb_instance (#14773) --- mmv1/products/alloydb/Instance.yaml | 7 ++ .../pkg/cai2hcl/converters/utils/utils.go | 77 +++++++++++++++---- .../cai2hcl/converters/utils/utils_test.go | 75 ++++++++++++++++++ 3 files changed, 143 insertions(+), 16 deletions(-) diff --git a/mmv1/products/alloydb/Instance.yaml b/mmv1/products/alloydb/Instance.yaml index 069500936ff0..afc7ebf98c5f 100644 --- a/mmv1/products/alloydb/Instance.yaml +++ b/mmv1/products/alloydb/Instance.yaml @@ -52,6 +52,7 @@ custom_code: custom_import: 'templates/terraform/custom_import/alloydb_instance.go.tmpl' # Skipping the sweeper because instances will be deleted during cluster sweeps exclude_sweeper: true +include_in_tgc_next_DO_NOT_USE: true examples: - name: 'alloydb_instance_basic' primary_resource_id: 'default' @@ -248,9 +249,11 @@ properties: - name: 'recordApplicationTags' type: Boolean description: 'Record application tags for an instance. This flag is turned "on" by default.' + include_empty_value_in_cai: true # Default value is false in CAI asset - name: 'recordClientAddress' type: Boolean description: 'Record client address for an instance. Client address is PII information. This flag is turned "on" by default.' + include_empty_value_in_cai: true # Default value is false in CAI asset - name: 'queryPlansPerMinute' type: Integer description: 'Number of query execution plans captured by Insights per minute for all queries combined. The default value is 5. Any integer between 0 and 20 is considered valid.' @@ -263,9 +266,11 @@ properties: - name: 'enabled' type: Boolean description: 'Observability feature status for an instance.' + include_empty_value_in_cai: true # Default value is false in CAI asset - name: 'preserveComments' type: Boolean description: 'Preserve comments in the query string.' + include_empty_value_in_cai: true # Default value is false in CAI asset - name: 'trackWaitEvents' type: Boolean description: 'Record wait events during query execution for an instance.' @@ -278,12 +283,14 @@ properties: - name: 'recordApplicationTags' type: Boolean description: 'Record application tags for an instance. This flag is turned "on" by default.' + include_empty_value_in_cai: true # Default value is false in CAI asset - name: 'queryPlansPerMinute' type: Integer description: 'Number of query execution plans captured by Insights per minute for all queries combined. The default value is 5. Any integer between 0 and 200 is considered valid.' - name: 'trackActiveQueries' type: Boolean description: 'Track actively running queries. If not set, default value is "off".' + include_empty_value_in_cai: true # Default value is false in CAI asset - name: 'assistiveExperiencesEnabled' type: Boolean description: 'Whether assistive experiences are enabled for this AlloyDB instance.' diff --git a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils.go b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils.go index f8947e065da4..6e74880ed6cb 100644 --- a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils.go +++ b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils.go @@ -3,6 +3,7 @@ package utils import ( "encoding/json" "fmt" + "log" "strings" hashicorpcty "github.com/hashicorp/go-cty/cty" @@ -28,29 +29,73 @@ func ParseFieldValue(url string, name string) string { template: //bigquery.googleapis.com/projects/{{project}}/datasets/{{dataset_id}} assetName: //bigquery.googleapis.com/projects/my-project/datasets/my-dataset hclData: [project:my-project dataset_id:my-dataset] + +It also handles multi-fragment fields. +template: {{cluster}}/instances/{{instance_id}} +assetName: //alloydb.googleapis.com/projects/ci-test-project/locations/us-central1/clusters/tf-test-cluster/instances/tf-test-instance +hclData: [cluster:projects/ci-test-project/locations/us-central1/clusters/tf-test-cluster instance_id:tf-test-instance] */ func ParseUrlParamValuesFromAssetName(assetName, template string, outputFields map[string]struct{}, hclData map[string]any) { - fragments := strings.Split(template, "/") - if len(fragments) < 2 { - // We need a field and a prefix. - return - } - fields := make(map[string]string) // keys are prefixes in URI, values are names of fields - for ix, item := range fragments[1:] { - if trimmed, ok := strings.CutPrefix(item, "{{"); ok { - if trimmed, ok = strings.CutSuffix(trimmed, "}}"); ok { - fields[fragments[ix]] = trimmed // ix is relative to the subslice + templateFragments := strings.Split(template, "/") + assetFragments := strings.Split(assetName, "/") + + // Iterate through the fragments and match fields. + assetIx := 0 + for templateIx := 0; templateIx < len(templateFragments); templateIx++ { + templateFragment := templateFragments[templateIx] + + // Check if the template fragment is a field (e.g., {{project}}) + if fieldName, isField := strings.CutPrefix(templateFragment, "{{"); isField { + if fieldName, hasEnd := strings.CutSuffix(fieldName, "}}"); hasEnd { + // Find the end of this field in the template. The end is the next non-field fragment. + endTemplateIx := templateIx + 1 + for endTemplateIx < len(templateFragments) && strings.HasPrefix(templateFragments[endTemplateIx], "{{") { + endTemplateIx++ + } + + endAssetIx := getEndAssetIx(endTemplateIx, templateFragments, assetFragments) + + valueFragments := assetFragments[assetIx:endAssetIx] + value := strings.Join(valueFragments, "/") + + if _, isOutput := outputFields[fieldName]; !isOutput { + hclData[fieldName] = value + } + + assetIx = endAssetIx + templateIx = endTemplateIx - 1 + } else { + assetIx++ + } + } else { + // This is a literal fragment, just advance the asset index if it matches. + if assetIx < len(assetFragments) && assetFragments[assetIx] == templateFragment { + assetIx++ + } else { + log.Printf("Warning: Template literal '%s' does not match assetName at index %d.", templateFragment, assetIx) } } } - fragments = strings.Split(assetName, "/") - for ix, item := range fragments[:len(fragments)-1] { - if fieldName, ok := fields[item]; ok { - if _, isOutput := outputFields[fieldName]; !isOutput { - hclData[fieldName] = fragments[ix+1] - } +} + +// Finds the exclusive end index of a dynamic path segment within a Google Cloud asset name +// by searching for the next literal segment from a template. +func getEndAssetIx(endTemplateIx int, templateFragments []string, assetFragments []string) int { + if endTemplateIx >= len(templateFragments) { + return len(assetFragments) + } + + // Find the index of the next non-field fragment in the asset name. + nextNonFieldFragment := templateFragments[endTemplateIx] + for ix, item := range assetFragments { + if item == nextNonFieldFragment { + return ix } } + + // If the next non-field fragment is not found in the asset name, + // it means the dynamic field goes to the end of the asset name. + return len(assetFragments) } // DecodeJSON decodes the map object into the target struct. diff --git a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils_test.go b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils_test.go index bf0d6c5c80c5..afeebb1402ca 100644 --- a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils_test.go +++ b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils_test.go @@ -1,8 +1,11 @@ package utils_test import ( + "fmt" "testing" + "github.com/google/go-cmp/cmp" + "github.com/google/go-cmp/cmp/cmpopts" "github.com/stretchr/testify/assert" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters/utils" @@ -186,3 +189,75 @@ func createSchema(name string) map[string]*schema.Schema { return provider.ResourcesMap[name].Schema } + +func TestParseUrlParamValuesFromAssetName(t *testing.T) { + compareMaps := func(m1, m2 map[string]any) error { + if diff := cmp.Diff(m1, m2, cmpopts.SortMaps(func(k1, k2 string) bool { return k1 < k2 })); diff != "" { + return fmt.Errorf("maps are not equal (-got +want):\n%s", diff) + } + return nil + } + + // Test cases for different scenarios + testCases := []struct { + name string + template string + assetName string + outputFields map[string]struct{} + want map[string]any + }{ + { + name: "ComputeUrlmap", + template: "//compute.googleapis.com/projects/{{project}}/global/urlMaps/{{name}}", + assetName: "//compute.googleapis.com/projects/my-project/global/urlMaps/urlmapibgtchooyo", + outputFields: make(map[string]struct{}), + want: map[string]any{"project": "my-project", "name": "urlmapibgtchooyo"}, + }, + { + name: "BigQueryDataset", + template: "//bigquery.googleapis.com/projects/{{project}}/datasets/{{dataset_id}}", + assetName: "//bigquery.googleapis.com/projects/my-project/datasets/my-dataset", + outputFields: make(map[string]struct{}), + want: map[string]any{"project": "my-project", "dataset_id": "my-dataset"}, + }, + { + name: "AlloyDBInstance", + template: "//alloydb.googleapis.com/{{cluster}}/instances/{{instance_id}}", + assetName: "//alloydb.googleapis.com/projects/ci-test/locations/us-central1/clusters/tf-test-cluster/instances/tf-test-instance", + outputFields: make(map[string]struct{}), + want: map[string]any{"cluster": "projects/ci-test/locations/us-central1/clusters/tf-test-cluster", "instance_id": "tf-test-instance"}, + }, + { + name: "WithOutputFieldsIgnored", + template: "//bigquery.googleapis.com/projects/{{project}}/location/{{location}}/datasets/{{dataset_id}}", + assetName: "//bigquery.googleapis.com/projects/my-project/location/abc/datasets/my-dataset", + outputFields: map[string]struct{}{"location": {}}, // 'location' should be ignored + want: map[string]any{"project": "my-project", "dataset_id": "my-dataset"}, + }, + { + name: "WithMissingSuffix", + template: "//bigquery.googleapis.com/projects/{{project/datasets/{{dataset_id}}", + assetName: "//bigquery.googleapis.com/projects/my-project/datasets/my-dataset", + outputFields: make(map[string]struct{}), + want: map[string]any{"dataset_id": "my-dataset"}, + }, + { + name: "EmptyTemplate", + template: "", + assetName: "//bigquery.googleapis.com/projects/my-project/datasets/my-dataset", + outputFields: make(map[string]struct{}), + want: map[string]any{}, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + hclData := make(map[string]any) + utils.ParseUrlParamValuesFromAssetName(tc.assetName, tc.template, tc.outputFields, hclData) + + if err := compareMaps(hclData, tc.want); err != nil { + t.Fatalf("map mismatch: %v", err) + } + }) + } +} From 4926183a6e026f10b73faca2acd3fa9387139157 Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Wed, 13 Aug 2025 21:05:16 -0700 Subject: [PATCH 769/884] fix TestAccContainerCluster_additional_ip_ranges_config_on_update (#14812) --- .../container/resource_container_cluster_test.go.tmpl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl index 41ed9e9f6119..c02fbad06340 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl @@ -14423,7 +14423,7 @@ func testAccContainerCluster_additional_ip_ranges_config(name string, additional resource "google_compute_subnetwork" "main" { ip_cidr_range = "10.2.0.0/24" - name = "main" + name = "%s" network = google_compute_network.main.self_link region = "us-central1" @@ -14455,7 +14455,7 @@ func testAccContainerCluster_additional_ip_ranges_config(name string, additional deletion_protection = false } - `, name, subnetStr, name, additionalIpRangesStr) + `, name, name, subnetStr, name, additionalIpRangesStr) } func testAccContainerCluster_withAnonymousAuthenticationConfig(name, networkName, subnetworkName string, mode string) string { From b318dbe9f3f7110c4ae665e2f74734994c6ed98f Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Wed, 13 Aug 2025 21:05:33 -0700 Subject: [PATCH 770/884] fix TestAccDataFusionInstance_dataFusionInstanceCmekExample (#14826) --- .../examples/data_fusion_instance_cmek.tf.tmpl | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/mmv1/templates/terraform/examples/data_fusion_instance_cmek.tf.tmpl b/mmv1/templates/terraform/examples/data_fusion_instance_cmek.tf.tmpl index 05ff63eed8aa..f410158db004 100644 --- a/mmv1/templates/terraform/examples/data_fusion_instance_cmek.tf.tmpl +++ b/mmv1/templates/terraform/examples/data_fusion_instance_cmek.tf.tmpl @@ -7,7 +7,7 @@ resource "google_data_fusion_instance" "{{$.PrimaryResourceId}}" { key_reference = google_kms_crypto_key.crypto_key.id } - depends_on = [google_kms_crypto_key_iam_member.crypto_key_member] + depends_on = [google_kms_crypto_key_iam_member.crypto_key_member_cdf_sa, google_kms_crypto_key_iam_member.crypto_key_member_gcs_sa] } resource "google_kms_crypto_key" "crypto_key" { @@ -20,11 +20,18 @@ resource "google_kms_key_ring" "key_ring" { location = "us-central1" } -resource "google_kms_crypto_key_iam_member" "crypto_key_member" { +resource "google_kms_crypto_key_iam_member" "crypto_key_member_cdf_sa" { crypto_key_id = google_kms_crypto_key.crypto_key.id role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-datafusion.iam.gserviceaccount.com" } +resource "google_kms_crypto_key_iam_member" "crypto_key_member_gcs_sa" { + crypto_key_id = google_kms_crypto_key.crypto_key.id + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + + member = "serviceAccount:service-${data.google_project.project.number}@gs-project-accounts.iam.gserviceaccount.com" +} + data "google_project" "project" {} From af9f2f05b0b3008b3b56e638b13d625fc9f97663 Mon Sep 17 00:00:00 2001 From: olagacek Date: Thu, 14 Aug 2025 18:26:13 +0200 Subject: [PATCH 771/884] Update google.golang.org/api package to the 0.245.0 version. (#14848) --- mmv1/third_party/terraform/go.mod | 34 +++++++-------- mmv1/third_party/terraform/go.sum | 72 +++++++++++++++---------------- 2 files changed, 53 insertions(+), 53 deletions(-) diff --git a/mmv1/third_party/terraform/go.mod b/mmv1/third_party/terraform/go.mod index d670d3a09c9e..c60b3db2b710 100644 --- a/mmv1/third_party/terraform/go.mod +++ b/mmv1/third_party/terraform/go.mod @@ -3,7 +3,7 @@ module github.com/hashicorp/terraform-provider-google go 1.23.0 require ( - cloud.google.com/go/auth v0.16.2 + cloud.google.com/go/auth v0.16.3 cloud.google.com/go/auth/oauth2adapt v0.2.8 cloud.google.com/go/bigtable v1.37.0 github.com/GoogleCloudPlatform/declarative-resource-client-library v1.80.0 @@ -32,18 +32,18 @@ require ( github.com/stretchr/testify v1.10.0 go4.org/netipx v0.0.0-20231129151722-fdeea329fbba golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 - golang.org/x/net v0.41.0 + golang.org/x/net v0.42.0 golang.org/x/oauth2 v0.30.0 - google.golang.org/api v0.242.0 - google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 - google.golang.org/grpc v1.73.0 + google.golang.org/api v0.245.0 + google.golang.org/genproto/googleapis/rpc v0.0.0-20250728155136-f173205681a0 + google.golang.org/grpc v1.74.2 google.golang.org/protobuf v1.36.6 gopkg.in/yaml.v2 v2.4.0 ) require ( bitbucket.org/creachadair/stringset v0.0.8 // indirect - cel.dev/expr v0.23.0 // indirect + cel.dev/expr v0.24.0 // indirect cloud.google.com/go v0.121.0 // indirect cloud.google.com/go/compute/metadata v0.7.0 // indirect cloud.google.com/go/iam v1.5.2 // indirect @@ -55,22 +55,22 @@ require ( github.com/cenkalti/backoff v2.2.1+incompatible // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect github.com/cloudflare/circl v1.6.0 // indirect - github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f // indirect + github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 // indirect github.com/envoyproxy/go-control-plane/envoy v1.32.4 // indirect github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect github.com/fatih/color v1.16.0 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect github.com/gammazero/deque v0.0.0-20180920172122-f6adf94963e4 // indirect github.com/go-jose/go-jose/v4 v4.0.5 // indirect - github.com/go-logr/logr v1.4.2 // indirect + github.com/go-logr/logr v1.4.3 // indirect github.com/go-logr/stdr v1.2.2 // indirect - github.com/golang/glog v1.2.4 // indirect + github.com/golang/glog v1.2.5 // indirect github.com/golang/protobuf v1.5.4 // indirect github.com/google/go-cpy v0.0.0-20211218193943-a9c933c06932 // indirect github.com/google/s2a-go v0.1.9 // indirect github.com/google/uuid v1.6.0 // indirect github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect - github.com/googleapis/gax-go/v2 v2.14.2 // indirect + github.com/googleapis/gax-go/v2 v2.15.0 // indirect github.com/hashicorp/go-checkpoint v0.5.0 // indirect github.com/hashicorp/go-hclog v1.6.3 // indirect github.com/hashicorp/go-plugin v1.6.3 // indirect @@ -108,15 +108,15 @@ require ( go.opentelemetry.io/otel/sdk v1.36.0 // indirect go.opentelemetry.io/otel/sdk/metric v1.36.0 // indirect go.opentelemetry.io/otel/trace v1.36.0 // indirect - golang.org/x/crypto v0.39.0 // indirect + golang.org/x/crypto v0.40.0 // indirect golang.org/x/mod v0.25.0 // indirect - golang.org/x/sync v0.15.0 // indirect - golang.org/x/sys v0.33.0 // indirect - golang.org/x/text v0.26.0 // indirect + golang.org/x/sync v0.16.0 // indirect + golang.org/x/sys v0.34.0 // indirect + golang.org/x/text v0.27.0 // indirect golang.org/x/time v0.12.0 // indirect - golang.org/x/tools v0.33.0 // indirect + golang.org/x/tools v0.34.0 // indirect google.golang.org/appengine v1.6.8 // indirect - google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20250505200425-f936aa4a68b2 // indirect + google.golang.org/genproto v0.0.0-20250603155806-513f23925822 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/mmv1/third_party/terraform/go.sum b/mmv1/third_party/terraform/go.sum index 1204e197fb26..8c5d2430b522 100644 --- a/mmv1/third_party/terraform/go.sum +++ b/mmv1/third_party/terraform/go.sum @@ -1,12 +1,12 @@ bitbucket.org/creachadair/stringset v0.0.8 h1:gQqe4vs8XWgMyijfyKE6K8o4TcyGGrRXe0JvHgx5H+M= bitbucket.org/creachadair/stringset v0.0.8/go.mod h1:AgthVMyMxC/6FK1KBJ2ALdqkZObGN8hOetgpwXyMn34= -cel.dev/expr v0.23.0 h1:wUb94w6OYQS4uXraxo9U+wUAs9jT47Xvl4iPgAwM2ss= -cel.dev/expr v0.23.0/go.mod h1:hLPLo1W4QUmuYdA72RBX06QTs6MXw941piREPl3Yfiw= +cel.dev/expr v0.24.0 h1:56OvJKSH3hDGL0ml5uSxZmz3/3Pq4tJ+fb1unVLAFcY= +cel.dev/expr v0.24.0/go.mod h1:hLPLo1W4QUmuYdA72RBX06QTs6MXw941piREPl3Yfiw= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.121.0 h1:pgfwva8nGw7vivjZiRfrmglGWiCJBP+0OmDpenG/Fwg= cloud.google.com/go v0.121.0/go.mod h1:rS7Kytwheu/y9buoDmu5EIpMMCI4Mb8ND4aeN4Vwj7Q= -cloud.google.com/go/auth v0.16.2 h1:QvBAGFPLrDeoiNjyfVunhQ10HKNYuOwZ5noee0M5df4= -cloud.google.com/go/auth v0.16.2/go.mod h1:sRBas2Y1fB1vZTdurouM0AzuYQBMZinrUYL8EufhtEA= +cloud.google.com/go/auth v0.16.3 h1:kabzoQ9/bobUmnseYnBO6qQG7q4a/CffFRlJSxv2wCc= +cloud.google.com/go/auth v0.16.3/go.mod h1:NucRGjaXfzP1ltpcQ7On/VTZ0H4kWB5Jy+Y9Dnm76fA= cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc= cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c= cloud.google.com/go/bigtable v1.37.0 h1:Q+x7y04lQ0B+WXp03wc1/FLhFt4CwcQdkwWT0M4Jp3w= @@ -46,8 +46,8 @@ github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDk github.com/cloudflare/circl v1.6.0 h1:cr5JKic4HI+LkINy2lg3W2jF8sHCVTBncJr5gIIq7qk= github.com/cloudflare/circl v1.6.0/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= -github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f h1:C5bqEmzEPLsHm9Mv73lSE9e9bKV23aB1vxOsmZrkl3k= -github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= +github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 h1:aQ3y1lwWyqYPiWZThqv1aFbZMiM9vblcSArJRf2Irls= +github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= github.com/creachadair/staticfile v0.1.2/go.mod h1:a3qySzCIXEprDGxk6tSxSI+dBBdLzqeBOMhZ+o2d3pM= github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s= github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI= @@ -90,8 +90,8 @@ github.com/go-jose/go-jose/v4 v4.0.5/go.mod h1:s3P1lRrkT8igV8D9OjyL4WRyHvjB6a4JS github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= -github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= +github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= @@ -100,8 +100,8 @@ github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3a github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/glog v1.2.4 h1:CNNw5U8lSiiBk7druxtSHHTsRWcxKoac6kZKm2peBBc= -github.com/golang/glog v1.2.4/go.mod h1:6AhwSGph0fcJtXVM/PEHPqZlFeoLxhs7/t5UDAwmO+w= +github.com/golang/glog v1.2.5 h1:DrW6hGnjIhtvhOIiAKT6Psh/Kd/ldepEa81DKeiRJ5I= +github.com/golang/glog v1.2.5/go.mod h1:6AhwSGph0fcJtXVM/PEHPqZlFeoLxhs7/t5UDAwmO+w= github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ= github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= @@ -128,8 +128,8 @@ github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/enterprise-certificate-proxy v0.3.6 h1:GW/XbdyBFQ8Qe+YAmFU9uHLo7OnF5tL52HFAgMmyrf4= github.com/googleapis/enterprise-certificate-proxy v0.3.6/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA= -github.com/googleapis/gax-go/v2 v2.14.2 h1:eBLnkZ9635krYIPD+ag1USrOAI0Nr0QYF3+/3GqO0k0= -github.com/googleapis/gax-go/v2 v2.14.2/go.mod h1:ON64QhlJkhVtSqp4v1uaK92VyZ2gmvDQsweuyLV+8+w= +github.com/googleapis/gax-go/v2 v2.15.0 h1:SyjDc1mGgZU5LncH8gimWo9lW1DtIfPibOG81vgd/bo= +github.com/googleapis/gax-go/v2 v2.15.0/go.mod h1:zVVkkxAQHa1RQpg9z2AUCMnKhi0Qld9rcmyfL1OZhoc= github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 h1:+9834+KizmvFV7pXQGSXQTsaWhq2GjuNUt0aUU0YBYw= github.com/grpc-ecosystem/go-grpc-middleware v1.3.0/go.mod h1:z0ButlSOZa5vEBq9m2m2hlwIgKw+rp3sdCBRoJY+30Y= github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA= @@ -298,8 +298,8 @@ golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACk golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.39.0 h1:SHs+kF4LP+f+p14esP5jAoDpHU8Gu/v9lFRK6IT5imM= -golang.org/x/crypto v0.39.0/go.mod h1:L+Xg3Wf6HoL4Bn4238Z6ft6KfEpN0tJGo53AAPC632U= +golang.org/x/crypto v0.40.0 h1:r4x+VvoG5Fm+eJcxMaY8CQM7Lb0l1lsmjGBQ6s8BfKM= +golang.org/x/crypto v0.40.0/go.mod h1:Qr1vMER5WyS2dfPHAlsOj01wgLbsyWtFn/aY+5+ZdxY= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 h1:2dVuKD2vS7b0QIHQbpyTISPd0LeHDbnYEryqj5Q1ug8= golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56/go.mod h1:M4RDyNAINzryxdtnbRXRL/OHtkFuWGRjvuhBJpk2IlY= @@ -321,8 +321,8 @@ golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLL golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw= -golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA= +golang.org/x/net v0.42.0 h1:jzkYrhi3YQWD6MLBJcsklgQsoAcw89EcZbJw8Z614hs= +golang.org/x/net v0.42.0/go.mod h1:FF1RA5d3u7nAYA4z2TkclSCKh68eSXtiFwcWQpPXdt8= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI= golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU= @@ -332,8 +332,8 @@ golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8= -golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw= +golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -351,18 +351,18 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= -golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/sys v0.34.0 h1:H5Y5sJ2L2JRdyv7ROF1he/lPdvFsd0mJHFw2ThKHxLA= +golang.org/x/sys v0.34.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.32.0 h1:DR4lr0TjUs3epypdhTOkMmuF5CDFJ/8pOnbzMZPQ7bg= -golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ= +golang.org/x/term v0.33.0 h1:NuFncQrRcaRvVmgRkvM3j/F00gWIAlcmlB8ACEKmGIg= +golang.org/x/term v0.33.0/go.mod h1:s18+ql9tYWp1IfpV9DmCtQDDSRBUjKaw9M1eAv5UeF0= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= -golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M= -golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA= +golang.org/x/text v0.27.0 h1:4fGWRpyh641NLlecmyl4LOe6yDdfaYNrGb2zdfo4JV4= +golang.org/x/text v0.27.0/go.mod h1:1D28KMCvyooCX9hBiosv5Tz/+YLxj0j7XhWjpSUF7CU= golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE= golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -374,14 +374,14 @@ golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/tools v0.33.0 h1:4qz2S3zmRxbGIhDIAgjxvFutSvH5EfnsYrRBj0UI0bc= -golang.org/x/tools v0.33.0/go.mod h1:CIJMaWEY88juyUfo7UbgPqbC8rU2OqfAV1h2Qp0oMYI= +golang.org/x/tools v0.34.0 h1:qIpSLOxeCYGg9TrcJokLBG4KFA6d795g0xkBkiESGlo= +golang.org/x/tools v0.34.0/go.mod h1:pAP9OwEaY1CAW3HOmg3hLZC5Z0CCmzjAF2UQMSqNARg= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/api v0.242.0 h1:7Lnb1nfnpvbkCiZek6IXKdJ0MFuAZNAJKQfA1ws62xg= -google.golang.org/api v0.242.0/go.mod h1:cOVEm2TpdAGHL2z+UwyS+kmlGr3bVWQQ6sYEqkKje50= +google.golang.org/api v0.245.0 h1:YliGvz1rjXB+sTLNIST6Ffeji9WlRdLQ+LPl9ruSa5Y= +google.golang.org/api v0.245.0/go.mod h1:dMVhVcylamkirHdzEBAIQWUCgqY885ivNeZYd7VAVr8= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= @@ -389,19 +389,19 @@ google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJ google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20200423170343-7949de9c1215/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2 h1:1tXaIXCracvtsRxSBsYDiSBN0cuJvM7QYW+MrpIRY78= -google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2/go.mod h1:49MsLSx0oWMOZqcpB3uL8ZOkAh1+TndpJ8ONoCBWiZk= -google.golang.org/genproto/googleapis/api v0.0.0-20250505200425-f936aa4a68b2 h1:vPV0tzlsK6EzEDHNNH5sa7Hs9bd7iXR7B1tSiPepkV0= -google.golang.org/genproto/googleapis/api v0.0.0-20250505200425-f936aa4a68b2/go.mod h1:pKLAc5OolXC3ViWGI62vvC0n10CpwAtRcTNCFwTKBEw= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 h1:fc6jSaCT0vBduLYZHYrBBNY4dsWuvgyff9noRNDdBeE= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= +google.golang.org/genproto v0.0.0-20250603155806-513f23925822 h1:rHWScKit0gvAPuOnu87KpaYtjK5zBMLcULh7gxkCXu4= +google.golang.org/genproto v0.0.0-20250603155806-513f23925822/go.mod h1:HubltRL7rMh0LfnQPkMH4NPDFEWp0jw3vixw7jEM53s= +google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 h1:oWVWY3NzT7KJppx2UKhKmzPq4SRe0LdCijVRwvGeikY= +google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822/go.mod h1:h3c4v36UTKzUiuaOKQ6gr3S+0hovBtUrXzTG/i3+XEc= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250728155136-f173205681a0 h1:MAKi5q709QWfnkkpNQ0M12hYJ1+e8qYVDyowc4U1XZM= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250728155136-f173205681a0/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= -google.golang.org/grpc v1.73.0 h1:VIWSmpI2MegBtTuFt5/JWy2oXxtjJ/e89Z70ImfD2ok= -google.golang.org/grpc v1.73.0/go.mod h1:50sbHOUqWoCQGI8V2HQLJM0B+LMlIUjNSZmow7EVBQc= +google.golang.org/grpc v1.74.2 h1:WoosgB65DlWVC9FqI82dGsZhWFNBSLjQ84bjROOpMu4= +google.golang.org/grpc v1.74.2/go.mod h1:CtQ+BGjaAIXHs/5YS3i473GqwBBa1zGQNevxdeBEXrM= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= From 267ab70d54fe5ced1becedb02283f4612b93ca02 Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 14 Aug 2025 13:00:54 -0400 Subject: [PATCH 772/884] Allow sending `start_time` with default values. (#14779) --- mmv1/products/clouddeploy/DeployPolicy.yaml | 50 ++++++++++--------- .../clouddeploy_deploy_policy_basic.tf.tmpl | 6 +-- .../clouddeploy_deploy_policy_full.tf.tmpl | 16 ++---- ...resource_clouddeploy_deploy_policy_test.go | 14 +++--- 4 files changed, 41 insertions(+), 45 deletions(-) diff --git a/mmv1/products/clouddeploy/DeployPolicy.yaml b/mmv1/products/clouddeploy/DeployPolicy.yaml index 5668011e78d3..56dc1bf0633a 100644 --- a/mmv1/products/clouddeploy/DeployPolicy.yaml +++ b/mmv1/products/clouddeploy/DeployPolicy.yaml @@ -72,7 +72,7 @@ properties: output: true - name: "description" type: String - description: "Optional. Description of the `DeployPolicy`. Max length is 255 characters." + description: "Description of the `DeployPolicy`. Max length is 255 characters." - name: "createTime" type: String description: "Output only. Time at which the DeployPolicy was created." @@ -83,21 +83,21 @@ properties: output: true - name: "annotations" type: KeyValueAnnotations - description: "Optional. User annotations. These attributes can only be set and used by the user, and not by Cloud Deploy. Annotations must meet the following constraints: * Annotations are key/value pairs. * Valid annotation keys have two segments: an optional prefix and name, separated by a slash (`/`). * The name segment is required and must be 63 characters or less, beginning and ending with an alphanumeric character (`[a-z0-9A-Z]`) with dashes (`-`), underscores (`_`), dots (`.`), and alphanumerics between. * The prefix is optional. If specified, the prefix must be a DNS subdomain: a series of DNS labels separated by dots(`.`), not longer than 253 characters in total, followed by a slash (`/`). See https://kubernetes.io/docs/concepts/overview/working-with-objects/annotations/#syntax-and-character-set for more details." + description: "User annotations. These attributes can only be set and used by the user, and not by Cloud Deploy. Annotations must meet the following constraints: * Annotations are key/value pairs. * Valid annotation keys have two segments: an optional prefix and name, separated by a slash (`/`). * The name segment is required and must be 63 characters or less, beginning and ending with an alphanumeric character (`[a-z0-9A-Z]`) with dashes (`-`), underscores (`_`), dots (`.`), and alphanumerics between. * The prefix is optional. If specified, the prefix must be a DNS subdomain: a series of DNS labels separated by dots(`.`), not longer than 253 characters in total, followed by a slash (`/`). See https://kubernetes.io/docs/concepts/overview/working-with-objects/annotations/#syntax-and-character-set for more details." - name: "labels" type: KeyValueLabels - description: "Optional. Labels are attributes that can be set and used by both the user and by Cloud Deploy. Labels must meet the following constraints: * Keys and values can contain only lowercase letters, numeric characters, underscores, and dashes. * All characters must use UTF-8 encoding, and international characters are allowed. * Keys must start with a lowercase letter or international character. * Each resource is limited to a maximum of 64 labels. Both keys and values are additionally constrained to be <= 63 characters." + description: "Labels are attributes that can be set and used by both the user and by Cloud Deploy. Labels must meet the following constraints: * Keys and values can contain only lowercase letters, numeric characters, underscores, and dashes. * All characters must use UTF-8 encoding, and international characters are allowed. * Keys must start with a lowercase letter or international character. * Each resource is limited to a maximum of 64 labels. Both keys and values are additionally constrained to be <= 63 characters." - name: "etag" type: String - description: "Optional. The weak etag of the `DeployPolicy` resource. This checksum is computed by the server based on the value of other fields, and may be sent on update and delete requests to ensure the client has an up-to-date value before proceeding." + description: "The weak etag of the `DeployPolicy` resource. This checksum is computed by the server based on the value of other fields, and may be sent on update and delete requests to ensure the client has an up-to-date value before proceeding." output: true - name: "suspended" type: Boolean - description: "Optional. When suspended, the policy will not prevent actions from occurring, even if the action violates the policy." + description: "When suspended, the policy will not prevent actions from occurring, even if the action violates the policy." send_empty_value: true - name: "selectors" type: Array - description: "Required. Selected resources to which the policy will be applied. At least one selector is required. If one selector matches the resource the policy applies. For example, if there are two selectors and the action being attempted matches one of them, the policy will apply to that action." + description: "Selected resources to which the policy will be applied. At least one selector is required. If one selector matches the resource the policy applies. For example, if there are two selectors and the action being attempted matches one of them, the policy will apply to that action." required: true item_type: type: NestedObject @@ -120,7 +120,7 @@ properties: - name: "id" type: String description: |- - Optional. ID of the DeliveryPipeline. The value of this field could be one of the following: + ID of the DeliveryPipeline. The value of this field could be one of the following: - The last segment of a pipeline name - "*", all delivery pipelines in a location - name: "labels" @@ -129,22 +129,22 @@ properties: default_from_api: true - name: "rules" type: Array - description: "Required. Rules to apply. At least one rule must be present." + description: "Rules to apply. At least one rule must be present." required: true item_type: type: NestedObject properties: - name: "rolloutRestriction" type: NestedObject - description: "Optional. Rollout restrictions." + description: "Rollout restrictions." properties: - name: "id" type: String - description: "Required. ID of the rule. This id must be unique in the `DeployPolicy` resource to which this rule belongs. The format is `a-z{0,62}`." + description: "ID of the rule. This id must be unique in the `DeployPolicy` resource to which this rule belongs. The format is `a-z{0,62}`." required: true - name: "invokers" type: Array - description: "Optional. What invoked the action. If left empty, all invoker types will be restricted." + description: "What invoked the action. If left empty, all invoker types will be restricted." item_type: type: Enum enum_values: @@ -152,7 +152,7 @@ properties: - "DEPLOY_AUTOMATION" - name: "actions" type: Array - description: "Optional. Rollout actions to be restricted as part of the policy. If left empty, all actions will be restricted." + description: "Rollout actions to be restricted as part of the policy. If left empty, all actions will be restricted." item_type: type: Enum enum_values: @@ -166,21 +166,21 @@ properties: - "TERMINATE_JOBRUN" - name: "timeWindows" type: NestedObject - description: "Required. Time window within which actions are restricted." + description: "Time window within which actions are restricted." properties: - name: "timeZone" type: String - description: "Required. The time zone in IANA format IANA Time Zone Database (e.g. America/New_York)." + description: "The time zone in IANA format IANA Time Zone Database (e.g. America/New_York)." required: true - name: oneTimeWindows type: Array - description: "Optional. One-time windows within which actions are restricted." + description: "One-time windows within which actions are restricted." item_type: type: NestedObject properties: - name: "startDate" type: NestedObject - description: "Required. Start date." + description: "Start date." required: true properties: - name: "year" @@ -194,7 +194,7 @@ properties: description: "Day of a month. Must be from 1 to 31 and valid for the year and month, or 0 to specify a year by itself or a year and month where the day isn't significant." - name: "endDate" type: NestedObject - description: "Required. End date." + description: "End date." required: true properties: - name: "year" @@ -208,7 +208,9 @@ properties: description: "Day of a month. Must be from 1 to 31 and valid for the year and month." - name: "startTime" type: NestedObject - description: "Required. Start time (inclusive). Use 00:00 for the beginning of the day." + send_empty_value: true + allow_empty_object: true + description: "Start time (inclusive). Use 00:00 for the beginning of the day." required: true properties: - name: "hours" @@ -225,7 +227,7 @@ properties: description: "Fractions of seconds, in nanoseconds. Must be greater than or equal to 0 and less than or equal to 999,999,999." - name: "endTime" type: NestedObject - description: "Required. End time (exclusive). You may use 24:00 for the end of the day." + description: "End time (exclusive). You may use 24:00 for the end of the day." required: true properties: - name: "hours" @@ -242,13 +244,13 @@ properties: description: "Fractions of seconds, in nanoseconds. Must be greater than or equal to 0 and less than or equal to 999,999,999." - name: weeklyWindows type: Array - description: "Optional. Recurring weekly windows within which actions are restricted." + description: "Recurring weekly windows within which actions are restricted." item_type: type: NestedObject properties: - name: "daysOfWeek" type: Array - description: "Optional. Days of week. If left empty, all days of the week will be included." + description: "Days of week. If left empty, all days of the week will be included." item_type: type: Enum enum_values: @@ -261,7 +263,9 @@ properties: - "SUNDAY" - name: "startTime" type: NestedObject - description: "Optional. Start time (inclusive). Use 00:00 for the beginning of the day. If you specify startTime you must also specify endTime. If left empty, this will block for the entire day for the days specified in daysOfWeek." + description: "Start time (inclusive). Use 00:00 for the beginning of the day. If you specify startTime you must also specify endTime. If left empty, this will block for the entire day for the days specified in daysOfWeek." + send_empty_value: true + allow_empty_object: true properties: - name: "hours" type: Integer @@ -277,7 +281,7 @@ properties: description: "Fractions of seconds, in nanoseconds. Must be greater than or equal to 0 and less than or equal to 999,999,999." - name: "endTime" type: NestedObject - description: "Optional. End time (exclusive). Use 24:00 to indicate midnight. If you specify endTime you must also specify startTime. If left empty, this will block for the entire day for the days specified in daysOfWeek." + description: "End time (exclusive). Use 24:00 to indicate midnight. If you specify endTime you must also specify startTime. If left empty, this will block for the entire day for the days specified in daysOfWeek." properties: - name: "hours" type: Integer diff --git a/mmv1/templates/terraform/examples/clouddeploy_deploy_policy_basic.tf.tmpl b/mmv1/templates/terraform/examples/clouddeploy_deploy_policy_basic.tf.tmpl index 513878b68ffe..a355ef601f72 100644 --- a/mmv1/templates/terraform/examples/clouddeploy_deploy_policy_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/clouddeploy_deploy_policy_basic.tf.tmpl @@ -13,11 +13,11 @@ resource "google_clouddeploy_deploy_policy" "{{$.PrimaryResourceId}}" { time_zone = "America/Los_Angeles" weekly_windows { start_time { - hours = "12" - minutes = "00" + hours = 0 + minutes = 0 } end_time { - hours = "13" + hours = "24" minutes = "00" } } diff --git a/mmv1/templates/terraform/examples/clouddeploy_deploy_policy_full.tf.tmpl b/mmv1/templates/terraform/examples/clouddeploy_deploy_policy_full.tf.tmpl index 61e5590c0902..5495942b666c 100644 --- a/mmv1/templates/terraform/examples/clouddeploy_deploy_policy_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/clouddeploy_deploy_policy_full.tf.tmpl @@ -34,16 +34,12 @@ resource "google_clouddeploy_deploy_policy" "{{$.PrimaryResourceId}}" { time_zone = "America/Los_Angeles" weekly_windows { start_time { - hours = "12" - minutes = "00" - seconds = "00" - nanos = "00" + hours = 0 + minutes = 0 } end_time { hours = "13" minutes = "00" - seconds = "00" - nanos = "00" } } } @@ -60,8 +56,6 @@ resource "google_clouddeploy_deploy_policy" "{{$.PrimaryResourceId}}" { start_time { hours = "13" minutes = "00" - seconds = "00" - nanos = "00" } end_time { hours = "14" @@ -74,16 +68,12 @@ resource "google_clouddeploy_deploy_policy" "{{$.PrimaryResourceId}}" { one_time_windows { start_time { - hours = "15" + hours = "00" minutes = "00" - seconds = "00" - nanos = "00" } end_time { hours = "16" minutes = "00" - seconds = "00" - nanos = "00" } start_date { year = "2019" diff --git a/mmv1/third_party/terraform/services/clouddeploy/resource_clouddeploy_deploy_policy_test.go b/mmv1/third_party/terraform/services/clouddeploy/resource_clouddeploy_deploy_policy_test.go index 3e37ca066579..eecdaf1a4fb1 100644 --- a/mmv1/third_party/terraform/services/clouddeploy/resource_clouddeploy_deploy_policy_test.go +++ b/mmv1/third_party/terraform/services/clouddeploy/resource_clouddeploy_deploy_policy_test.go @@ -60,8 +60,8 @@ resource "google_clouddeploy_deploy_policy" "deploy_policy" { time_zone = "America/Los_Angeles" weekly_windows { start_time { - hours = "12" - minutes = "00" + hours = 0 + minutes = 0 } end_time { hours = "13" @@ -112,12 +112,14 @@ resource "google_clouddeploy_deploy_policy" "deploy_policy" { time_zone = "America/Los_Angeles" weekly_windows { start_time { - hours = "13" - minutes = "00" + hours = 13 + minutes = 00 } end_time { - hours = "14" - minutes = "00" + hours = 24 + minutes = 0 + seconds = 0 + nanos = 0 } days_of_week = ["MONDAY"] } From 5ee06eb8e096f7ba6d727c72f566a548397522e4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B6rn?= <81525627+bestefreund@users.noreply.github.com> Date: Thu, 14 Aug 2025 19:38:25 +0200 Subject: [PATCH 773/884] Add singular data source for retrieving an Artifact Registry tag (#14717) --- .../provider/provider_mmv1_resources.go.tmpl | 1 + .../data_source_artifact_registry_tag.go | 122 ++++++++++++++++++ .../data_source_artifact_registry_tag_test.go | 38 ++++++ .../d/artifact_registry_tag.html.markdown | 41 ++++++ 4 files changed, 202 insertions(+) create mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_tag.go create mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_tag_test.go create mode 100644 mmv1/third_party/terraform/website/docs/d/artifact_registry_tag.html.markdown diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index 5c3c2c7f50eb..79a5701201bc 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -32,6 +32,7 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_artifact_registry_package": artifactregistry.DataSourceArtifactRegistryPackage(), "google_artifact_registry_repositories": artifactregistry.DataSourceArtifactRegistryRepositories(), "google_artifact_registry_repository": artifactregistry.DataSourceArtifactRegistryRepository(), + "google_artifact_registry_tag": artifactregistry.DataSourceArtifactRegistryTag(), "google_artifact_registry_tags": artifactregistry.DataSourceArtifactRegistryTags(), "google_artifact_registry_version": artifactregistry.DataSourceArtifactRegistryVersion(), "google_apphub_discovered_workload": apphub.DataSourceApphubDiscoveredWorkload(), diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_tag.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_tag.go new file mode 100644 index 000000000000..7654a57d7973 --- /dev/null +++ b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_tag.go @@ -0,0 +1,122 @@ +package artifactregistry + +import ( + "fmt" + "net/http" + "net/url" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func DataSourceArtifactRegistryTag() *schema.Resource { + return &schema.Resource{ + Read: DataSourceArtifactRegistryTagRead, + + Schema: map[string]*schema.Schema{ + "location": { + Type: schema.TypeString, + Required: true, + }, + "repository_id": { + Type: schema.TypeString, + Required: true, + }, + "package_name": { + Type: schema.TypeString, + Required: true, + }, + "tag_name": { + Type: schema.TypeString, + Required: true, + }, + "project": { + Type: schema.TypeString, + Optional: true, + }, + "name": { + Type: schema.TypeString, + Computed: true, + }, + "version": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} + +func DataSourceArtifactRegistryTagRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return fmt.Errorf("Error setting Artifact Registry user agent: %s", err) + } + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return fmt.Errorf("Error setting Artifact Registry project: %s", err) + } + + basePath, err := tpgresource.ReplaceVars(d, config, "{{ArtifactRegistryBasePath}}") + if err != nil { + return fmt.Errorf("Error setting Artifact Registry base path: %s", err) + } + + resourcePath, err := tpgresource.ReplaceVars(d, config, fmt.Sprintf("projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/packages/{{package_name}}/tags/{{tag_name}}")) + if err != nil { + return fmt.Errorf("Error setting resource path: %s", err) + } + + urlRequest := basePath + resourcePath + headers := make(http.Header) + + u, err := url.Parse(urlRequest) + if err != nil { + return fmt.Errorf("Error parsing URL: %s", err) + } + + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + RawURL: u.String(), + UserAgent: userAgent, + Headers: headers, + }) + if err != nil { + return fmt.Errorf("Error getting Artifact Registry tag: %s", err) + } + + annotations := make(map[string]string) + if anno, ok := res["annotations"].(map[string]interface{}); ok { + for k, v := range anno { + if val, ok := v.(string); ok { + annotations[k] = val + } + } + } + + getString := func(m map[string]interface{}, key string) string { + if v, ok := m[key].(string); ok { + return v + } + return "" + } + + name := getString(res, "name") + + if err := d.Set("project", project); err != nil { + return err + } + if err := d.Set("name", name); err != nil { + return err + } + if err := d.Set("version", res["version"].(string)); err != nil { + return err + } + + d.SetId(name) + + return nil +} diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_tag_test.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_tag_test.go new file mode 100644 index 000000000000..b29fb4183b45 --- /dev/null +++ b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_tag_test.go @@ -0,0 +1,38 @@ +package artifactregistry_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccDataSourceArtifactRegistryTag_basic(t *testing.T) { + t.Parallel() + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceArtifactRegistryTagConfig, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("data.google_artifact_registry_tag.this", "name", "projects/go-containerregistry/locations/us/repositories/gcr.io/packages/gcrane/tags/latest"), + ), + }, + }, + }) +} + +// Test the data source against the public AR repos +// https://console.cloud.google.com/artifacts/docker/cloudrun/us/container +// https://console.cloud.google.com/artifacts/docker/go-containerregistry/us/gcr.io +const testAccDataSourceArtifactRegistryTagConfig = ` +data "google_artifact_registry_tag" "this" { + project = "go-containerregistry" + location = "us" + repository_id = "gcr.io" + package_name = "gcrane" + tag_name = "latest" +} +` diff --git a/mmv1/third_party/terraform/website/docs/d/artifact_registry_tag.html.markdown b/mmv1/third_party/terraform/website/docs/d/artifact_registry_tag.html.markdown new file mode 100644 index 000000000000..7898109ebd10 --- /dev/null +++ b/mmv1/third_party/terraform/website/docs/d/artifact_registry_tag.html.markdown @@ -0,0 +1,41 @@ +--- +subcategory: "Artifact Registry" +description: |- + Get information about a tag within a Google Artifact Registry repository. +--- + +# google_artifact_registry_tag +This data source fetches information of a tag from a provided Artifact Registry repository. + +## Example Usage + +```hcl +data "google_artifact_registry_tags" "my_tags" { + location = "us-central1" + repository_id = "example-repo" + package_name = "example-package" + tag_name = "latest" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `location` - (Required) The location of the artifact registry. + +* `repository_id` - (Required) The last part of the repository name to fetch from. + +* `package_name` - (Required) The name of the package. + +* `tag_name` - (Required) The name of the tag. + +* `project` - (Optional) The project ID in which the resource belongs. If it is not provided, the provider project is used. + +## Attributes Reference + +The following computed attributes are exported: + +* `name` - The name of the tag, for example: `projects/p1/locations/us-central1/repositories/repo1/packages/pkg1/tags/tag1`. If the package part contains slashes, the slashes are escaped. + +* `version` - The version of the tag. From 25f8e35800d754dd90501605f458567ce30dcfeb Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Thu, 14 Aug 2025 11:38:02 -0700 Subject: [PATCH 774/884] beyondcorp iam deprecation (#14817) --- mmv1/api/resource/iam_policy.go | 3 + mmv1/products/beyondcorp/Application.yaml | 153 ++++++++++++++++++ .../datasource_iam.html.markdown.tmpl | 3 + .../terraform/resource_iam.html.markdown.tmpl | 4 + 4 files changed, 163 insertions(+) create mode 100644 mmv1/products/beyondcorp/Application.yaml diff --git a/mmv1/api/resource/iam_policy.go b/mmv1/api/resource/iam_policy.go index 37973ee462ae..fb6a7ab34705 100644 --- a/mmv1/api/resource/iam_policy.go +++ b/mmv1/api/resource/iam_policy.go @@ -114,6 +114,9 @@ type IamPolicy struct { // [Optional] Check to see if zone value should be replaced with GOOGLE_ZONE in iam tests // Defaults to true SubstituteZoneValue bool `yaml:"substitute_zone_value"` + + // Add a deprecation message for a resource that's been deprecated in the API. + DeprecationMessage string `yaml:"deprecation_message,omitempty"` } func (p *IamPolicy) UnmarshalYAML(unmarshal func(any) error) error { diff --git a/mmv1/products/beyondcorp/Application.yaml b/mmv1/products/beyondcorp/Application.yaml new file mode 100644 index 000000000000..5b1992fe6894 --- /dev/null +++ b/mmv1/products/beyondcorp/Application.yaml @@ -0,0 +1,153 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: Application +deprecation_message: '`google_beyondcorp_application` is deprecated. Use `google_beyondcorp_security_gateway_application` instead.' +description: Specifies application endpoint(s) to protect behind a Security Gateway. +base_url: projects/{{project}}/locations/global/securityGateways/{{security_gateways_id}}/applications +update_mask: true +self_link: projects/{{project}}/locations/global/securityGateways/{{security_gateways_id}}/applications/{{application_id}} +create_url: projects/{{project}}/locations/global/securityGateways/{{security_gateways_id}}/applications?applicationId={{application_id}} +update_verb: PATCH +id_format: projects/{{project}}/locations/global/securityGateways/{{security_gateways_id}}/applications/{{application_id}} +import_format: + - projects/{{project}}/locations/global/securityGateways/{{security_gateways_id}}/applications/{{application_id}} +iam_policy: + method_name_separator: ':' + iam_conditions_request_type: 'QUERY_PARAM_NESTED' + allowed_iam_role: 'roles/beyondcorp.securityGatewayUser' + parent_resource_attribute: 'application_id' + import_format: + - 'projects/{{project}}/locations/global/securityGateways/{{security_gateways_id}}/applications/{{application_id}}' + - '{{application_id}}' + deprecation_message: '`google_beyondcorp_application` and associated IAM resources are deprecated. Use `google_beyondcorp_security_gateway_application` instead.' +examples: + - name: beyondcorp_application_basic + primary_resource_id: example + primary_resource_name: 'fmt.Sprintf("default%s", context["random_suffix"]), fmt.Sprintf("google%s", context["random_suffix"])' + vars: + security_gateway_name: default + application_name: google + - name: beyondcorp_application_vpc + primary_resource_id: example + primary_resource_name: 'fmt.Sprintf("default%s", context["random_suffix"]), fmt.Sprintf("google%s", context["random_suffix"])' + vars: + security_gateway_name: default + application_name: my-vm-service +autogen_async: true +async: + operation: + timeouts: + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 + base_url: '{{op_id}}' + actions: + - create + - delete + - update + type: OpAsync + result: + resource_inside_response: true + include_project: false +autogen_status: QXBwbGljYXRpb24= +parameters: + - name: securityGatewaysId + type: String + description: Part of `parent`. See documentation of `projectsId`. + immutable: true + url_param_only: true + required: true + - name: applicationId + type: String + description: |- + Optional. User-settable Application resource ID. + * Must start with a letter. + * Must contain between 4-63 characters from `/a-z-/`. + * Must end with a number or letter. + immutable: true + url_param_only: true + required: true +properties: + - name: createTime + type: String + description: Output only. Timestamp when the resource was created. + output: true + - name: displayName + type: String + description: |- + Optional. An arbitrary user-provided name for the Application resource. + Cannot exceed 64 characters. + - name: endpointMatchers + type: Array + description: |- + Required. Endpoint matchers associated with an application. + A combination of hostname and ports as endpoint matcher is used to match + the application. + Match conditions for OR logic. + An array of match conditions to allow for multiple matching criteria. + The rule is considered a match if one the conditions are met. + The conditions can be one of the following combination + (Hostname), (Hostname & Ports) + + EXAMPLES: + Hostname - ("*.abc.com"), ("xyz.abc.com") + Hostname and Ports - ("abc.com" and "22"), ("abc.com" and "22,33") etc + required: true + item_type: + type: NestedObject + properties: + - name: hostname + type: String + description: Required. Hostname of the application. + required: true + - name: ports + type: Array + description: Optional. Ports of the application. + item_type: + type: Integer + - name: upstreams + type: Array + description: Optional. List of which upstream resource(s) to forward traffic to. + item_type: + type: NestedObject + properties: + - name: egressPolicy + type: NestedObject + description: Optional. Routing policy information. + properties: + - name: regions + type: Array + description: Required. List of regions where the application sends traffic to. + required: true + item_type: + type: String + - name: network + type: NestedObject + description: Network to forward traffic to. + properties: + - name: name + type: string + description: |- + Required. Network name is of the format: + `projects/{project}/global/networks/{network}` + required: true + - name: name + type: String + description: Identifier. Name of the resource. + output: true + - name: updateTime + type: String + description: Output only. Timestamp when the resource was last modified. + output: true diff --git a/mmv1/templates/terraform/datasource_iam.html.markdown.tmpl b/mmv1/templates/terraform/datasource_iam.html.markdown.tmpl index 16dc380923bb..0e39d4cc3135 100644 --- a/mmv1/templates/terraform/datasource_iam.html.markdown.tmpl +++ b/mmv1/templates/terraform/datasource_iam.html.markdown.tmpl @@ -43,6 +43,9 @@ description: |- # {{ $.IamTerraformName }}_policy +{{- if $.IamPolicy.DeprecationMessage }} +~> **Warning:** {{$.IamPolicy.DeprecationMessage}} +{{- end }} Retrieves the current IAM policy data for {{ lower $.Name }} {{- if or (eq $.MinVersionObj.Name "beta") (eq $.IamPolicy.MinVersion "beta") }} diff --git a/mmv1/templates/terraform/resource_iam.html.markdown.tmpl b/mmv1/templates/terraform/resource_iam.html.markdown.tmpl index affc3c258c9a..f4a61aa9c5a0 100644 --- a/mmv1/templates/terraform/resource_iam.html.markdown.tmpl +++ b/mmv1/templates/terraform/resource_iam.html.markdown.tmpl @@ -42,6 +42,10 @@ description: |- --- # IAM policy for {{$.ProductMetadata.DisplayName}} {{$.Name}} +{{- if $.IamPolicy.DeprecationMessage }} +~> **Warning:** {{$.IamPolicy.DeprecationMessage}} +{{- end }} + Three different resources help you manage your IAM policy for {{$.ProductMetadata.DisplayName}} {{$.Name}}. Each of these resources serves a different use case: * `{{ $.IamTerraformName }}_policy`: Authoritative. Sets the IAM policy for the {{ lower $.Name }} and replaces any existing policy already attached. From ecab51fec328974f57dde7ef02351763333d9559 Mon Sep 17 00:00:00 2001 From: Cameron Thornton Date: Thu, 14 Aug 2025 13:51:11 -0500 Subject: [PATCH 775/884] google_dialogflow_conversation_profile (#14849) Co-authored-by: Spheny1 <118945261+Spheny1@users.noreply.github.com> --- .../dialogflow/ConversationProfile.yaml | 654 ++++++++++++++++++ ...on_profile_context_filter_settings.go.tmpl | 26 + ...rofile_suggestion_trigger_settings.go.tmpl | 24 + ...logflow_conversation_profile_basic.tf.tmpl | 18 + ...ce_dialogflow_conversation_profile_test.go | 411 +++++++++++ 5 files changed, 1133 insertions(+) create mode 100644 mmv1/products/dialogflow/ConversationProfile.yaml create mode 100644 mmv1/templates/terraform/custom_flatten/conversation_profile_context_filter_settings.go.tmpl create mode 100644 mmv1/templates/terraform/custom_flatten/conversation_profile_suggestion_trigger_settings.go.tmpl create mode 100644 mmv1/templates/terraform/examples/dialogflow_conversation_profile_basic.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_conversation_profile_test.go diff --git a/mmv1/products/dialogflow/ConversationProfile.yaml b/mmv1/products/dialogflow/ConversationProfile.yaml new file mode 100644 index 000000000000..2ac0d248715e --- /dev/null +++ b/mmv1/products/dialogflow/ConversationProfile.yaml @@ -0,0 +1,654 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'ConversationProfile' +description: | + A conversation profile configures a set of parameters that control the suggestions made to an agent. These parameters control the suggestions that are surfaced during runtime. Each profile configures either a Dialogflow virtual agent or a human agent for a conversation. +references: + guides: + 'Official Documentation': 'https://cloud.google.com/dialogflow/docs/' + api: 'https://cloud.google.com/dialogflow/docs/reference/rest/v2beta1/projects/conversationProfiles' +docs: +id_format: '{{name}}' +base_url: 'projects/{{project}}/locations/{{location}}/conversationProfiles' +self_link: '{{name}}' +update_verb: 'PATCH' +update_mask: true +import_format: + - '{{name}}' +timeouts: + insert_minutes: 40 + update_minutes: 40 + delete_minutes: 20 +custom_code: + post_create: 'templates/terraform/post_create/set_computed_name.tmpl' + custom_import: 'templates/terraform/custom_import/self_link_as_name_set_project.go.tmpl' +exclude_sweeper: true +examples: + - name: 'dialogflow_conversation_profile_basic' + primary_resource_id: 'basic_profile' + vars: + profile_name: 'dialogflow-profile' +parameters: + - name: 'location' + type: String + url_param_only: true + description: | + desc + required: true +properties: + - name: 'name' + type: String + description: | + name + output: true + - name: 'displayName' + type: String + description: | + Required. Human readable name for this profile. Max length 1024 bytes. + required: true + - type: NestedObject + name: 'automatedAgentConfig' + description: | + Configuration for an automated agent to use with this profile + properties: + - type: String + name: 'agent' + description: | + ID of the Dialogflow agent environment to use. + Expects the format "projects//locations//agent/environments/" + required: true + - type: String + name: 'sessionTtl' + description: | + Configure lifetime of the Dialogflow session. + - type: NestedObject + name: 'humanAgentAssistantConfig' + description: | + Configuration for connecting to a live agent + properties: + - type: NestedObject + name: 'notificationConfig' + description: | + Pub/Sub topic on which to publish new agent assistant events. + Expects the format "projects//locations//topics/" + properties: + - type: String + name: 'topic' + description: | + Name of the Pub/Sub topic to publish conversation events + - type: Enum + name: 'messageFormat' + description: | + Format of the message + enum_values: + - MESSAGE_FORMAT_UNSPECIFIED + - PROTO + - JSON + - type: NestedObject + name: 'humanAgentSuggestionConfig' + description: | + Configuration for agent assistance of human agent participant. + properties: + - type: Array + name: 'featureConfigs' + description: | + Configuration of different suggestion features. One feature can have only one config. + item_type: + type: NestedObject + properties: + - type: NestedObject + name: 'suggestionFeature' + description: | + The suggestion feature. + properties: + - type: String + name: 'type' + description: | + Type of Human Agent Assistant API feature to request. + - type: Boolean + name: 'enableEventBasedSuggestion' + description: | + Automatically iterates all participants and tries to compile suggestions. + This feature is only supported for types: ARTICLE_SUGGESTION, FAQ, DIALOGFLOW_ASSIST, KNOWLEDGE_ASSIST. + - type: Boolean + name: 'disableAgentQueryLogging' + description: | + Disable the logging of search queries sent by human agents. It can prevent those queries from being stored at answer records. + This feature is only supported for types: KNOWLEDGE_SEARCH. + - type: Boolean + name: 'enableQuerySuggestionWhenNoAnswer' + description: | + Enable query suggestion even if we can't find its answer. By default, queries are suggested only if we find its answer. + This feature is only supported for types: KNOWLEDGE_ASSIST. + - type: Boolean + name: 'enableConversationAugmentedQuery' + description: | + Enable including conversation context during query answer generation. + This feature is only supported for types: KNOWLEDGE_SEARCH. + - type: Boolean + name: 'enableQuerySuggestionOnly' + description: | + Enable query suggestion only. + This feature is only supported for types: KNOWLEDGE_ASSIST + - type: NestedObject + name: 'suggestionTriggerSettings' + description: | + Settings of suggestion trigger. + This feature is only supported for types: ARTICLE_SUGGESTION, FAQ. + custom_flatten: 'templates/terraform/custom_flatten/conversation_profile_suggestion_trigger_settings.go.tmpl' + properties: + - type: Boolean + name: 'noSmallTalk' + description: | + Do not trigger if last utterance is small talk. + - type: Boolean + name: 'onlyEndUser' + description: | + Only trigger suggestion if participant role of last utterance is END_USER. + - type: NestedObject + name: queryConfig + description: | + Configs of query. + properties: + - type: Integer + name: maxResults + default_value: 10 + description: | + Maximum number of results to return. + - type: Double + name: confidenceThreshold + description: | + Confidence threshold of query result. + This feature is only supported for types: ARTICLE_SUGGESTION, FAQ, SMART_REPLY, SMART_COMPOSE, KNOWLEDGE_SEARCH, KNOWLEDGE_ASSIST, ENTITY_EXTRACTION. + - type: NestedObject + name: contextFilterSettings + description: | + Determines how recent conversation context is filtered when generating suggestions. If unspecified, no messages will be dropped. + custom_flatten: 'templates/terraform/custom_flatten/conversation_profile_context_filter_settings.go.tmpl' + properties: + - type: Boolean + name: 'dropHandoffMessages' + description: | + If set to true, the last message from virtual agent (hand off message) and the message before it (trigger message of hand off) are dropped. + - type: Boolean + name: 'dropVirtualAgentMessages' + description: | + If set to true, all messages from virtual agent are dropped. + - type: Boolean + name: 'dropIvrMessages' + description: | + If set to true, all messages from ivr stage are dropped. + - type: NestedObject + name: 'sections' + description: | + he customized sections chosen to return when requesting a summary of a conversation. + properties: + - type: Array + name: 'sectionTypes' + description: | + The selected sections chosen to return when requesting a summary of a conversation + If not provided the default selection will be "{SITUATION, ACTION, RESULT}". + item_type: + type: Enum + name: 'sectionType' + description: | + The selected sections chosen to return when requesting a summary of a conversation. A duplicate selected section will be treated as a single selected section. + enum_values: + - SECTION_TYPE_UNSPECIFIED + - SITUATION + - ACTION + - RESOLUTION + - REASON_FOR_CANCELLATION + - CUSTOMER_SATISFACTION + - ENTITIES + - type: NestedObject + name: 'dialogflowQuerySource' + description: | + Query from Dialogflow agent. + This feature is supported for types: DIALOGFLOW_ASSIST. + properties: + - type: String + name: 'agent' + required: true + description: | + he name of a Dialogflow virtual agent used for end user side intent detection and suggestion. Format: projects//locations//agent. + - type: NestedObject + name: 'humanAgentSideConfig' + description: | + The Dialogflow assist configuration for human agent. + properties: + - type: String + name: 'agent' + description: | + The name of a dialogflow virtual agent used for intent detection and suggestion triggered by human agent. Format: projects//locations//agent. + - type: NestedObject + name: conversationModelConfig + description: | + Configs of custom conversation model. + properties: + - type: String + name: 'model' + description: | + Conversation model resource name. Format: projects//conversationModels/. + - type: String + name: 'baselineModelVersion' + description: | + Version of current baseline model. It will be ignored if model is set. Valid versions are: Article Suggestion baseline model: - 0.9 - 1.0 (default) Summarization baseline model: - 1.0 + - type: NestedObject + name: 'conversationProcessConfig' + description: | + Config to process conversation. + properties: + - type: Integer + name: 'recentSentencesCount' + description: | + Number of recent non-small-talk sentences to use as context for article and FAQ suggestion + - type: Boolean + name: 'groupSuggestionResponses' + description: | + If groupSuggestionResponses is false, and there are multiple featureConfigs in event based suggestion or StreamingAnalyzeContent, we will try to deliver suggestions to customers as soon as we get new suggestion. Different type of suggestions based on the same context will be in separate Pub/Sub event or StreamingAnalyzeContentResponse. + + If groupSuggestionResponses set to true. All the suggestions to the same participant based on the same context will be grouped into a single Pub/Sub event or StreamingAnalyzeContentResponse. + - type: Array + name: 'generators' + description: | + List of various generator resource names used in the conversation profile. + item_type: + type: String + - type: Boolean + name: 'disableHighLatencyFeaturesSyncDelivery' + description: | + When disableHighLatencyFeaturesSyncDelivery is true and using the AnalyzeContent API, we will not deliver the responses from high latency features in the API response. The humanAgentAssistantConfig.notification_config must be configured and enableEventBasedSuggestion must be set to true to receive the responses from high latency features in Pub/Sub. High latency feature(s): KNOWLEDGE_ASSIST + - type: NestedObject + name: 'endUserSuggestionConfig' + description: | + Configuration for agent assistance of end user participant. + properties: + - type: Array + name: 'featureConfigs' + description: | + Configuration of different suggestion features. One feature can have only one config. + item_type: + type: NestedObject + properties: + - type: NestedObject + name: 'suggestionFeature' + description: | + The suggestion feature. + properties: + - type: String + name: 'type' + description: | + Type of Human Agent Assistant API feature to request. + - type: Boolean + name: 'enableEventBasedSuggestion' + description: | + Automatically iterates all participants and tries to compile suggestions. + This feature is only supported for types: ARTICLE_SUGGESTION, FAQ, DIALOGFLOW_ASSIST, KNOWLEDGE_ASSIST. + - type: Boolean + name: 'disableAgentQueryLogging' + description: | + Disable the logging of search queries sent by human agents. It can prevent those queries from being stored at answer records. + This feature is only supported for types: KNOWLEDGE_SEARCH. + - type: Boolean + name: 'enableQuerySuggestionWhenNoAnswer' + description: | + Enable query suggestion even if we can't find its answer. By default, queries are suggested only if we find its answer. + This feature is only supported for types: KNOWLEDGE_ASSIST. + - type: Boolean + name: 'enableConversationAugmentedQuery' + description: | + Enable including conversation context during query answer generation. + This feature is only supported for types: KNOWLEDGE_SEARCH. + - type: Boolean + name: 'enableQuerySuggestionOnly' + description: | + Enable query suggestion only. + This feature is only supported for types: KNOWLEDGE_ASSIST + - type: NestedObject + name: 'suggestionTriggerSettings' + description: | + Settings of suggestion trigger. + This feature is only supported for types: ARTICLE_SUGGESTION, FAQ. + custom_flatten: 'templates/terraform/custom_flatten/conversation_profile_suggestion_trigger_settings.go.tmpl' + properties: + - type: Boolean + name: 'noSmallTalk' + description: | + Do not trigger if last utterance is small talk. + - type: Boolean + name: 'onlyEndUser' + description: | + Only trigger suggestion if participant role of last utterance is END_USER. + - type: NestedObject + name: queryConfig + description: | + Configs of query. + properties: + - type: Integer + name: maxResults + default_value: 10 + description: | + Maximum number of results to return. + - type: Double + name: confidenceThreshold + description: | + Confidence threshold of query result. + This feature is only supported for types: ARTICLE_SUGGESTION, FAQ, SMART_REPLY, SMART_COMPOSE, KNOWLEDGE_SEARCH, KNOWLEDGE_ASSIST, ENTITY_EXTRACTION. + - type: NestedObject + name: contextFilterSettings + description: | + Determines how recent conversation context is filtered when generating suggestions. If unspecified, no messages will be dropped. + custom_flatten: 'templates/terraform/custom_flatten/conversation_profile_context_filter_settings.go.tmpl' + properties: + - type: Boolean + name: 'dropHandoffMessages' + description: | + If set to true, the last message from virtual agent (hand off message) and the message before it (trigger message of hand off) are dropped. + - type: Boolean + name: 'dropVirtualAgentMessages' + description: | + If set to true, all messages from virtual agent are dropped. + - type: Boolean + name: 'dropIvrMessages' + description: | + If set to true, all messages from ivr stage are dropped. + - type: NestedObject + name: 'sections' + description: | + he customized sections chosen to return when requesting a summary of a conversation. + properties: + - type: Array + name: 'sectionTypes' + description: | + The selected sections chosen to return when requesting a summary of a conversation + If not provided the default selection will be "{SITUATION, ACTION, RESULT}". + item_type: + type: Enum + name: 'sectionType' + description: | + The selected sections chosen to return when requesting a summary of a conversation. A duplicate selected section will be treated as a single selected section. + enum_values: + - SECTION_TYPE_UNSPECIFIED + - SITUATION + - ACTION + - RESOLUTION + - REASON_FOR_CANCELLATION + - CUSTOMER_SATISFACTION + - ENTITIES + - type: NestedObject + name: 'knowledgeBaseQuerySource' + description: | + Query from knowledgebase. + This feature is only supported for types: ARTICLE_SUGGESTION, FAQ. + properties: + - type: Array + name: 'knowledgeBases' + required: true + description: | + Knowledge bases to query. Format: projects//locations//knowledgeBases/. + item_type: + type: String + - type: NestedObject + name: 'documentQuerySource' + description: | + Query from knowledge base document. + This feature is supported for types: SMART_REPLY, SMART_COMPOSE. + properties: + - type: Array + name: 'documents' + required: true + description: | + Knowledge documents to query from. Format: projects//locations//knowledgeBases//documents/. + item_type: + type: String + - type: NestedObject + name: 'dialogflowQuerySource' + description: | + Query from Dialogflow agent. + This feature is supported for types: DIALOGFLOW_ASSIST. + properties: + - type: String + name: 'agent' + required: true + description: | + he name of a Dialogflow virtual agent used for end user side intent detection and suggestion. Format: projects//locations//agent. + - type: NestedObject + name: 'humanAgentSideConfig' + description: | + The Dialogflow assist configuration for human agent. + properties: + - type: String + name: 'agent' + description: | + The name of a dialogflow virtual agent used for intent detection and suggestion triggered by human agent. Format: projects//locations//agent. + - type: NestedObject + name: conversationModelConfig + description: | + Configs of custom conversation model. + properties: + - type: String + name: 'model' + description: | + Conversation model resource name. Format: projects//conversationModels/. + - type: String + name: 'baselineModelVersion' + description: | + Version of current baseline model. It will be ignored if model is set. Valid versions are: Article Suggestion baseline model: - 0.9 - 1.0 (default) Summarization baseline model: - 1.0 + - type: NestedObject + name: 'conversationProcessConfig' + description: | + Config to process conversation. + properties: + - type: Integer + name: 'recentSentencesCount' + description: | + Number of recent non-small-talk sentences to use as context for article and FAQ suggestion + - type: Boolean + name: 'groupSuggestionResponses' + description: | + If groupSuggestionResponses is false, and there are multiple featureConfigs in event based suggestion or StreamingAnalyzeContent, we will try to deliver suggestions to customers as soon as we get new suggestion. Different type of suggestions based on the same context will be in separate Pub/Sub event or StreamingAnalyzeContentResponse. + + If groupSuggestionResponses set to true. All the suggestions to the same participant based on the same context will be grouped into a single Pub/Sub event or StreamingAnalyzeContentResponse. + - type: Array + name: 'generators' + description: | + List of various generator resource names used in the conversation profile. + item_type: + type: String + - type: Boolean + name: 'disableHighLatencyFeaturesSyncDelivery' + description: | + When disableHighLatencyFeaturesSyncDelivery is true and using the AnalyzeContent API, we will not deliver the responses from high latency features in the API response. The humanAgentAssistantConfig.notification_config must be configured and enableEventBasedSuggestion must be set to true to receive the responses from high latency features in Pub/Sub. High latency feature(s): KNOWLEDGE_ASSIST + - type: NestedObject + name: 'messageAnalysisConfig' + description: | + desc + properties: + - type: Boolean + name: 'enableEntityExtraction' + default_value: false + description: | + Enable entity extraction in conversation messages on agent assist stage. + - type: Boolean + name: 'enableSentimentAnalysis' + default_value: false + description: | + Enable sentiment analysis in conversation messages on agent assist stage. Sentiment analysis inspects user input and identifies the prevailing subjective opinion, especially to determine a user's attitude as positive, negative, or neutral. + - type: NestedObject + name: 'humanAgentHandoffConfig' + description: | + Defines the hand off to a live agent, typically on which external agent service provider to connect to a conversation. + properties: + - type: NestedObject + name: livePersonConfig + description: | + Config for using LivePerson. + properties: + - type: String + name: 'accountNumber' + required: true + description: | + Account number of the LivePerson account to connect. + - type: NestedObject + name: 'notificationConfig' + description: | + Pub/Sub topic on which to publish new agent assistant events. + Expects the format "projects//locations//topics/" + properties: + - type: String + name: 'topic' + description: | + Name of the Pub/Sub topic to publish conversation events + - type: Enum + name: 'messageFormat' + description: | + Format of the message + enum_values: + - MESSAGE_FORMAT_UNSPECIFIED + - PROTO + - JSON + - type: NestedObject + name: 'loggingConfig' + # Due to inconsistent API behaviour http://b/303056144, ignore read can be removed once fixed + ignore_read: true + description: | + Defines logging behavior for conversation lifecycle events. + properties: + - type: Boolean + name: enableStackdriverLogging + description: | + Whether to log conversation events + - type: NestedObject + name: newMessageEventNotificationConfig + description: | + Pub/Sub topic on which to publish new agent assistant events. + Expects the format "projects//locations//topics/" + properties: + - type: String + name: 'topic' + description: | + Name of the Pub/Sub topic to publish conversation events + - type: Enum + name: 'messageFormat' + description: | + Format of the message + enum_values: + - MESSAGE_FORMAT_UNSPECIFIED + - PROTO + - JSON + - type: NestedObject + name: sttConfig + description: | + Settings for speech transcription. + properties: + - type: Enum + name: speechModelVariant + description: | + The speech model used in speech to text. + enum_values: + - SPEECH_MODEL_VARIANT_UNSPECIFIED + - USE_BEST_AVAILABLE + - USE_STANDARD + - USE_ENHANCED + - type: String + name: 'model' + description: | + Which Speech model to select. + Leave this field unspecified to use Agent Speech settings for model selection. + - type: Enum + name: 'audioEncoding' + description: | + Audio encoding of the audio content to process. + enum_values: + - AUDIO_ENCODING_UNSPECIFIED + - AUDIO_ENCODING_LINEAR_16 + - AUDIO_ENCODING_FLAC + - AUDIO_ENCODING_MULAW + - AUDIO_ENCODING_AMR + - AUDIO_ENCODING_AMR_WB + - AUDIO_ENCODING_OGG_OPUS + - AUDIOENCODING_SPEEX_WITH_HEADER_BYTE + - type: Integer + name: 'sampleRateHertz' + description: | + Sample rate (in Hertz) of the audio content sent in the query. + - type: String + name: 'languageCode' + description: | + The language of the supplied audio. + default_from_api: true + - type: Boolean + name: 'enableWordInfo' + description: | + If true, Dialogflow returns SpeechWordInfo in StreamingRecognitionResult with information about the recognized speech words. + - type: Boolean + name: 'useTimeoutBasedEndpointing' + description: | + Use timeout based endpointing, interpreting endpointer sensitivy as seconds of timeout value. + - type: String + name: 'languageCode' + description: | + Language code for the conversation profile. This should be a BCP-47 language tag. + default_from_api: true + - type: String + name: 'timeZone' + description: | + The time zone of this conversational profile. + - type: String + name: 'securitySettings' + description: | + Name of the CX SecuritySettings reference for the agent. + - type: NestedObject + name: 'ttsConfig' + description: | + Configuration for Text-to-Speech synthesization. If agent defines synthesization options as well, agent settings overrides the option here. + properties: + - type: Double + name: 'speakingRate' + description: | + Speaking rate/speed, in the range [0.25, 4.0]. + - type: Double + name: 'pitch' + description: | + Speaking pitch, in the range [-20.0, 20.0]. 20 means increase 20 semitones from the original pitch. -20 means decrease 20 semitones from the original pitch. + - type: Double + name: 'volumeGainDb' + description: | + Volume gain (in dB) of the normal native volume supported by the specific voice. + - type: Array + name: 'effectsProfileId' + description: | + An identifier which selects 'audio effects' profiles that are applied on (post synthesized) text to speech. Effects are applied on top of each other in the order they are given. + item_type: + type: String + - type: NestedObject + name: voice + description: | + The desired voice of the synthesized audio. + properties: + - type: String + name: 'name' + description: | + The name of the voice. + - type: Enum + name: 'ssmlGender' + description: | + The preferred gender of the voice. + enum_values: + - SSML_VOICE_GENDER_UNSPECIFIED + - SSML_VOICE_GENDER_MALE + - SSML_VOICE_GENDER_FEMALE + - SSML_VOICE_GENDER_NEUTRAL diff --git a/mmv1/templates/terraform/custom_flatten/conversation_profile_context_filter_settings.go.tmpl b/mmv1/templates/terraform/custom_flatten/conversation_profile_context_filter_settings.go.tmpl new file mode 100644 index 000000000000..bbe17a79ac1e --- /dev/null +++ b/mmv1/templates/terraform/custom_flatten/conversation_profile_context_filter_settings.go.tmpl @@ -0,0 +1,26 @@ +{{/* + The license inside this block applies to this file + Copyright 2024 Google Inc. + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ -}} +func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + transformed := make(map[string]interface{}) + if v == nil { + transformed["drop_handoff_messages"] = false + transformed["drop_ivr_messages"] = false + transformed["drop_virtual_agent_messages"] = false + return []interface{}{transformed} + } + original := v.(map[string]interface{}) + transformed["drop_handoff_messages"] = original["dropHandoffMessages"] + transformed["drop_ivr_messages"] = original["dropIvrMessages"] + transformed["drop_virtual_agent_messages"] = original["dropVirtualAgentMessages"] + return []interface{}{transformed} +} diff --git a/mmv1/templates/terraform/custom_flatten/conversation_profile_suggestion_trigger_settings.go.tmpl b/mmv1/templates/terraform/custom_flatten/conversation_profile_suggestion_trigger_settings.go.tmpl new file mode 100644 index 000000000000..52fcce31b2b4 --- /dev/null +++ b/mmv1/templates/terraform/custom_flatten/conversation_profile_suggestion_trigger_settings.go.tmpl @@ -0,0 +1,24 @@ +{{/* + The license inside this block applies to this file + Copyright 2024 Google Inc. + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ -}} +func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + transformed := make(map[string]interface{}) + if v == nil { + transformed["no_small_talk"] = false + transformed["only_end_user"] = false + return []interface{}{transformed} + } + original := v.(map[string]interface{}) + transformed["no_small_talk"] = original["noSmallTalk"] + transformed["only_end_user"] = original["onlyEndUser"] + return []interface{}{transformed} +} diff --git a/mmv1/templates/terraform/examples/dialogflow_conversation_profile_basic.tf.tmpl b/mmv1/templates/terraform/examples/dialogflow_conversation_profile_basic.tf.tmpl new file mode 100644 index 000000000000..5fb3596d06c5 --- /dev/null +++ b/mmv1/templates/terraform/examples/dialogflow_conversation_profile_basic.tf.tmpl @@ -0,0 +1,18 @@ +resource "google_dialogflow_agent" "basic_agent" { + display_name = "example_agent" + default_language_code = "en-us" + time_zone = "America/New_York" +} +resource "google_dialogflow_conversation_profile" "{{$.PrimaryResourceId}}" { + display_name = "{{index $.Vars "profile_name"}}" + location = "global" + automated_agent_config { + agent = "projects/${google_dialogflow_agent.basic_agent.id}/locations/global/agent/environments/draft" + } + human_agent_assistant_config { + message_analysis_config { + enable_entity_extraction = true + enable_sentiment_analysis = true + } + } +} diff --git a/mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_conversation_profile_test.go b/mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_conversation_profile_test.go new file mode 100644 index 000000000000..f337597e4c24 --- /dev/null +++ b/mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_conversation_profile_test.go @@ -0,0 +1,411 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** Type: MMv1 *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Magic Modules and manual +// changes will be clobbered when the file is regenerated. +// +// Please read more about how to change this file in +// .github/CONTRIBUTING.md. +// +// ---------------------------------------------------------------------------- + +package dialogflow_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccDialogflowConversationProfile_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "org_id": envvar.GetTestOrgFromEnv(t), + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: testAccDialogflowConversationProfile_dialogflowAgentFull1(context), + }, + { + ResourceName: "google_dialogflow_conversation_profile.profile", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "logging_config", "logging_config.0", "logging_config.0.enable_stackdriver_logging"}, + }, + { + Config: testAccDialogflowConversationProfile_dialogflowAgentFull2(context), + }, + { + ResourceName: "google_dialogflow_conversation_profile.profile", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "logging_config", "logging_config.0", "logging_config.0.enable_stackdriver_logging"}, + }, + }, + }) +} + +func testAccDialogflowConversationProfile_dialogflowAgentFull1(context map[string]interface{}) string { + return acctest.Nprintf(` + resource "google_project" "agent_project" { + name = "tf-test-dialogflow-%{random_suffix}" + project_id = "tf-test-dialogflow-%{random_suffix}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" + deletion_policy = "DELETE" + } + + resource "google_project_service" "agent_project" { + service = "dialogflow.googleapis.com" + disable_dependent_services = false + project = "${google_project.agent_project.id}" + } + + resource "google_service_account" "dialogflow_service_account" { + account_id = "tf-test-dialogflow-%{random_suffix}" + } + + resource "google_project_iam_member" "agent_create" { + role = "roles/dialogflow.admin" + member = "serviceAccount:${google_service_account.dialogflow_service_account.email}" + project = "${google_project.agent_project.id}" + } + + resource "google_dialogflow_agent" "agent" { + display_name = "tf-test-agent-%{random_suffix}" + default_language_code = "en-us" + time_zone = "America/New_York" + project = google_project.agent_project.name + } + + resource "google_pubsub_topic" "topic" { + name = "tf-test-topic-%{random_suffix}" + project = google_project.agent_project.project_id + depends_on = [google_project.agent_project, time_sleep.wait_120_seconds] + message_retention_duration = "8000s" + } + resource "google_dialogflow_cx_security_settings" "security_setting" { + display_name = "tf-test-setting-%{random_suffix}" + location = "global" + purge_data_types = [] + retention_window_days = 7 + project = google_project.agent_project.project_id + depends_on = [time_sleep.wait_120_seconds] + } + resource "time_sleep" "wait_120_seconds" { + create_duration = "120s" + depends_on = [google_dialogflow_agent.agent] + } + resource "google_dialogflow_conversation_profile" "profile" { + depends_on = [google_dialogflow_agent.agent, google_dialogflow_cx_security_settings.security_setting,time_sleep.wait_120_seconds] + project = google_project.agent_project.name + display_name = "tf-test-conversation-profile-%{random_suffix}" + location = "global" + language_code = "en-US" + automated_agent_config { + agent = "projects/tf-test-dialogflow-%{random_suffix}/locations/global/agent/environments/draft" + session_ttl = "30s" + } + human_agent_assistant_config { + end_user_suggestion_config { + disable_high_latency_features_sync_delivery = true + feature_configs { + conversation_process_config { + recent_sentences_count = 1 + } + disable_agent_query_logging = false + enable_conversation_augmented_query = false + enable_event_based_suggestion = false + enable_query_suggestion_when_no_answer = false + enable_query_suggestion_only = false + query_config { + confidence_threshold = "1.0" + context_filter_settings { + drop_handoff_messages = true + drop_ivr_messages = true + drop_virtual_agent_messages = true + } + dialogflow_query_source { + agent = "projects/tf-test-dialogflow-%{random_suffix}/locations/global/agent/environments/draft" + human_agent_side_config { + agent = "projects/tf-test-dialogflow-%{random_suffix}/locations/global/agent/environments/draft" + } + } + max_results = 1 + sections { + section_types = ["SECTION_TYPE_UNSPECIFIED"] + } + } + suggestion_feature { + type = "CONVERSATION_SUMMARIZATION" + } + suggestion_trigger_settings { + no_small_talk = false + only_end_user = true + } + } + group_suggestion_responses = true + } + human_agent_suggestion_config { + disable_high_latency_features_sync_delivery = true + feature_configs { + conversation_process_config { + recent_sentences_count = 1 + } + disable_agent_query_logging = false + enable_conversation_augmented_query = false + enable_event_based_suggestion = false + enable_query_suggestion_when_no_answer = false + enable_query_suggestion_only = false + query_config { + confidence_threshold = 0.1 + context_filter_settings { + drop_handoff_messages = true + drop_ivr_messages = true + drop_virtual_agent_messages = true + } + dialogflow_query_source { + agent = "projects/tf-test-dialogflow-%{random_suffix}/locations/global/agent/environments/draft" + human_agent_side_config { + agent = "projects/tf-test-dialogflow-%{random_suffix}/locations/global/agent/environments/draft" + } + } + max_results = 1 + sections { + section_types = ["SECTION_TYPE_UNSPECIFIED"] + } + } + suggestion_feature { + type = "CONVERSATION_SUMMARIZATION" + } + suggestion_trigger_settings { + no_small_talk = false + only_end_user = true + } + } + group_suggestion_responses = true + } + notification_config { + message_format = "JSON" + topic = google_pubsub_topic.topic.id + } + } + human_agent_handoff_config { + live_person_config { + account_number = "00" + } + } + logging_config { + enable_stackdriver_logging = true + } + new_message_event_notification_config { + message_format = "JSON" + topic = google_pubsub_topic.topic.id + } + notification_config { + message_format = "JSON" + topic = google_pubsub_topic.topic.id + } + security_settings = google_dialogflow_cx_security_settings.security_setting.id + stt_config { + enable_word_info = true + language_code = "en-US" + model = "phone_call" + sample_rate_hertz = 1000 + speech_model_variant = "USE_ENHANCED" + use_timeout_based_endpointing = true + } + tts_config { + effects_profile_id = ["id"] + pitch = 1 + speaking_rate = 1 + voice { + name = "john" + ssml_gender = "SSML_VOICE_GENDER_MALE" + } + volume_gain_db = 5 + } + } +`, context) +} +func testAccDialogflowConversationProfile_dialogflowAgentFull2(context map[string]interface{}) string { + return acctest.Nprintf(` + resource "google_project" "agent_project" { + name = "tf-test-dialogflow-%{random_suffix}" + project_id = "tf-test-dialogflow-%{random_suffix}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" + deletion_policy = "DELETE" + } + resource "google_project_service" "agent_project" { + project = "${google_project.agent_project.id}" + service = "dialogflow.googleapis.com" + disable_dependent_services = false + } + + resource "google_service_account" "dialogflow_service_account" { + account_id = "tf-test-dialogflow-%{random_suffix}" + } + + resource "google_project_iam_member" "agent_create" { + project = "${google_project.agent_project.id}" + role = "roles/dialogflow.admin" + member = "serviceAccount:${google_service_account.dialogflow_service_account.email}" + } + + resource "google_dialogflow_agent" "agent" { + display_name = "tf-test-agent-%{random_suffix}" + default_language_code = "en-us" + time_zone = "America/New_York" + project = google_project.agent_project.name + } + resource "google_pubsub_topic" "topic_diff" { + name = "tf-test-topic-%{random_suffix}-diff" + project = google_project.agent_project.project_id + depends_on = [google_project.agent_project, time_sleep.wait_120_seconds] + message_retention_duration = "8000s" + } + resource "google_dialogflow_cx_security_settings" "security_setting_diff" { + display_name = "tf-test-setting-%{random_suffix}-diff" + location = "global" + purge_data_types = [] + retention_window_days = 7 + project = google_project.agent_project.project_id + depends_on = [time_sleep.wait_120_seconds] + } + resource "time_sleep" "wait_120_seconds" { + create_duration = "120s" + depends_on = [google_dialogflow_agent.agent] + } + resource "google_dialogflow_conversation_profile" "profile" { + depends_on = [google_dialogflow_agent.agent, google_dialogflow_cx_security_settings.security_setting_diff, time_sleep.wait_120_seconds] + project = "${google_project.agent_project.name}" + display_name = "tf-test-conversation-profile-%{random_suffix}-new" + location = "global" + language_code = "fr" + automated_agent_config { + agent = "projects/tf-test-dialogflow-%{random_suffix}/locations/global/agent/environments/draft" + session_ttl = "31s" + } + human_agent_assistant_config { + end_user_suggestion_config { + disable_high_latency_features_sync_delivery = false + feature_configs { + conversation_process_config { + recent_sentences_count = 2 + } + disable_agent_query_logging = false + enable_conversation_augmented_query = false + enable_event_based_suggestion = false + enable_query_suggestion_when_no_answer = false + enable_query_suggestion_only = false + query_config { + confidence_threshold = "0.9" + context_filter_settings { + drop_handoff_messages = false + drop_ivr_messages = false + drop_virtual_agent_messages = false + } + dialogflow_query_source { + agent = "projects/tf-test-dialogflow-%{random_suffix}/locations/global/agent/environments/draft" + human_agent_side_config { + agent = "projects/tf-test-dialogflow-%{random_suffix}/locations/global/agent/environments/draft" + } + } + max_results = 2 + sections { + section_types = ["SITUATION"] + } + } + suggestion_feature { + type = "CONVERSATION_SUMMARIZATION" + } + suggestion_trigger_settings { + no_small_talk = false + only_end_user = false + } + } + group_suggestion_responses = false + } + human_agent_suggestion_config { + disable_high_latency_features_sync_delivery = false + feature_configs { + conversation_process_config { + recent_sentences_count = 2 + } + disable_agent_query_logging = false + enable_conversation_augmented_query = false + enable_event_based_suggestion = false + enable_query_suggestion_when_no_answer = false + enable_query_suggestion_only = false + query_config { + confidence_threshold = 0.2 + context_filter_settings { + drop_handoff_messages = false + drop_ivr_messages = false + drop_virtual_agent_messages = false + } + dialogflow_query_source { + agent = "projects/tf-test-dialogflow-%{random_suffix}/locations/global/agent/environments/draft" + human_agent_side_config { + agent = "projects/tf-test-dialogflow-%{random_suffix}/locations/global/agent/environments/draft" + } + } + max_results = 2 + sections { + section_types = ["SITUATION"] + } + } + suggestion_feature { + type = "CONVERSATION_SUMMARIZATION" + } + suggestion_trigger_settings { + no_small_talk = false + only_end_user = false + } + } + group_suggestion_responses = false + } + notification_config { + message_format = "PROTO" + topic = google_pubsub_topic.topic_diff.id + } + } + human_agent_handoff_config { + live_person_config { + account_number = "01" + } + } + logging_config { + enable_stackdriver_logging = false + } + new_message_event_notification_config { + message_format = "PROTO" + topic = google_pubsub_topic.topic_diff.id + } + notification_config { + message_format = "PROTO" + topic = google_pubsub_topic.topic_diff.id + } + security_settings = google_dialogflow_cx_security_settings.security_setting_diff.id + } +`, context) +} From 225a13af7b22989b5245fc2d57c4cb6fa8c258a2 Mon Sep 17 00:00:00 2001 From: Elijah Date: Thu, 14 Aug 2025 15:21:17 -0400 Subject: [PATCH 776/884] fix: Update Cloud Tasks queue name to required (#14846) --- mmv1/products/cloudtasks/Queue.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/mmv1/products/cloudtasks/Queue.yaml b/mmv1/products/cloudtasks/Queue.yaml index 0170f4eabbe1..ed7b41ca2ec8 100644 --- a/mmv1/products/cloudtasks/Queue.yaml +++ b/mmv1/products/cloudtasks/Queue.yaml @@ -67,6 +67,7 @@ properties: - name: 'name' type: String description: The queue name. + required: true immutable: true custom_flatten: 'templates/terraform/custom_flatten/name_from_self_link.tmpl' custom_expand: 'templates/terraform/custom_expand/qualify_queue_name.go.tmpl' From c59d02c760e9c76012d2bd00d81858e8fd65b4b9 Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Thu, 14 Aug 2025 12:44:25 -0700 Subject: [PATCH 777/884] add 7.0.0 guide to main (#14861) Co-authored-by: Cameron Thornton --- .../guides/version_7_upgrade.html.markdown | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index a2d922354caa..2143b16cd1a7 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -68,7 +68,11 @@ For example, given this previous configuration: terraform { required_providers { google = { +<<<<<<< HEAD version = "~> 5.30.0" +======= + version = "~> 6.48.0" +>>>>>>> c96e1c59a (add 7.0.0 guide to main (#14861)) } } } @@ -86,6 +90,7 @@ terraform { } ``` +<<<<<<< HEAD ## Provider ### Provider-level change example header @@ -125,6 +130,10 @@ Terraform from destroying or recreating the cluster during `terraform apply`. In `google_beyondcorp_application`, the associated IAM resources `google_beyondcorp_application_iam_binding`, `google_beyondcorp_application_iam_member`, and `google_beyondcorp_application_iam_policy`, and the `google_beyondcorp_application_iam_policy` datasource have been removed. Use `google_beyondcorp_security_gateway_application` instead. +======= +## Resources + +>>>>>>> c96e1c59a (add 7.0.0 guide to main (#14861)) ## Resource: `google_artifact_registry_repository` ### `public_repository` fields have had their default values removed. @@ -149,12 +158,15 @@ Use `google_beyondcorp_security_gateway_application` instead. `instance` has been removed in favor of `instance_name`. +<<<<<<< HEAD ## Resource: `google_compute_packet_mirroring` ### `subnetworks` and `instances` fields have been converted to sets `subnetworks` and `instances` fields have been converted to sets. If you need to access values in their nested objects, it will need to be accessed via `for_each` or locally converting the field to a list/array in your configuration. +======= +>>>>>>> c96e1c59a (add 7.0.0 guide to main (#14861)) ## Resource: `google_compute_subnetwork` ### `enable_flow_logs`is now removed @@ -181,12 +193,15 @@ To reflect the new type explicitly, surround the current integer value in quotes Remove `description` from your configuration after upgrade. +<<<<<<< HEAD ## Resource: `google_colab_runtime_template` ### `post_startup_script_config` is now removed. Remove `post_startup_script_config` from your configuration after upgrade. +======= +>>>>>>> c96e1c59a (add 7.0.0 guide to main (#14861)) ## Resource: `google_network_services_lb_traffic_extension` ### `load_balancing_scheme` is now required @@ -237,4 +252,8 @@ Remove `template.containers.depends_on` from your configuration after upgrade. The default value for `disable_on_destroy` has been changed to `false`. The previous default (`true`) created a risk of unintended service disruptions, as destroying a single `google_project_service` resource would disable the API for the entire project. +<<<<<<< HEAD +Now, destroying the resource will only remove it from Terraform's state and leave the service enabled. To disable a service when the resource is destroyed, you must now make an explicit decision by setting `disable_on_destroy = true`. +======= Now, destroying the resource will only remove it from Terraform's state and leave the service enabled. To disable a service when the resource is destroyed, you must now make an explicit decision by setting `disable_on_destroy = true`. +>>>>>>> c96e1c59a (add 7.0.0 guide to main (#14861)) From 4ca33e7d0f73c3c3457f4586e396ad08ac2c844f Mon Sep 17 00:00:00 2001 From: Cameron Thornton Date: Thu, 14 Aug 2025 15:57:10 -0500 Subject: [PATCH 778/884] upgrade dcl to 1.81.0 (#14852) --- mmv1/third_party/terraform/go.mod | 2 +- mmv1/third_party/terraform/go.sum | 2 ++ tpgtools/go.mod | 2 +- tpgtools/go.sum | 2 ++ .../samples/key/service_account_key.tf.tmpl | 19 +++++++++++++++++++ .../samples/key/service_account_key.yaml | 11 +++++++++++ 6 files changed, 36 insertions(+), 2 deletions(-) create mode 100644 tpgtools/overrides/apikeys/samples/key/service_account_key.tf.tmpl create mode 100755 tpgtools/overrides/apikeys/samples/key/service_account_key.yaml diff --git a/mmv1/third_party/terraform/go.mod b/mmv1/third_party/terraform/go.mod index c60b3db2b710..9282c683f316 100644 --- a/mmv1/third_party/terraform/go.mod +++ b/mmv1/third_party/terraform/go.mod @@ -6,7 +6,7 @@ require ( cloud.google.com/go/auth v0.16.3 cloud.google.com/go/auth/oauth2adapt v0.2.8 cloud.google.com/go/bigtable v1.37.0 - github.com/GoogleCloudPlatform/declarative-resource-client-library v1.80.0 + github.com/GoogleCloudPlatform/declarative-resource-client-library v1.81.0 github.com/apparentlymart/go-cidr v1.1.0 github.com/davecgh/go-spew v1.1.1 github.com/dnaeon/go-vcr v1.0.1 diff --git a/mmv1/third_party/terraform/go.sum b/mmv1/third_party/terraform/go.sum index 8c5d2430b522..2ed259c385dc 100644 --- a/mmv1/third_party/terraform/go.sum +++ b/mmv1/third_party/terraform/go.sum @@ -24,6 +24,8 @@ dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/GoogleCloudPlatform/declarative-resource-client-library v1.80.0 h1:ZpQrm5i+ppVxTQjp6lU2APyAejavB/d7G2gZNu2RxsU= github.com/GoogleCloudPlatform/declarative-resource-client-library v1.80.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.81.0 h1:zTRBYNu7nk3TMbiRfkBcRNzw4cOeym0z1GduDYNyRyE= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.81.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= github.com/ProtonMail/go-crypto v1.1.6 h1:ZcV+Ropw6Qn0AX9brlQLAUXfqLBc7Bl+f/DmNxpLfdw= diff --git a/tpgtools/go.mod b/tpgtools/go.mod index ce165743cf36..7b2747548e1e 100644 --- a/tpgtools/go.mod +++ b/tpgtools/go.mod @@ -4,7 +4,7 @@ go 1.23 require ( bitbucket.org/creachadair/stringset v0.0.11 - github.com/GoogleCloudPlatform/declarative-resource-client-library v1.80.0 + github.com/GoogleCloudPlatform/declarative-resource-client-library v1.81.0 github.com/golang/glog v1.1.2 github.com/hashicorp/hcl v1.0.0 github.com/kylelemons/godebug v1.1.0 diff --git a/tpgtools/go.sum b/tpgtools/go.sum index faba4095e176..e727756f3b61 100644 --- a/tpgtools/go.sum +++ b/tpgtools/go.sum @@ -8,6 +8,8 @@ cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2Aawl github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/GoogleCloudPlatform/declarative-resource-client-library v1.80.0 h1:ZpQrm5i+ppVxTQjp6lU2APyAejavB/d7G2gZNu2RxsU= github.com/GoogleCloudPlatform/declarative-resource-client-library v1.80.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.81.0 h1:zTRBYNu7nk3TMbiRfkBcRNzw4cOeym0z1GduDYNyRyE= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.81.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4= github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= diff --git a/tpgtools/overrides/apikeys/samples/key/service_account_key.tf.tmpl b/tpgtools/overrides/apikeys/samples/key/service_account_key.tf.tmpl new file mode 100644 index 000000000000..a0b9a45815dd --- /dev/null +++ b/tpgtools/overrides/apikeys/samples/key/service_account_key.tf.tmpl @@ -0,0 +1,19 @@ +resource "google_apikeys_key" "primary" { + name = "{{key}}" + display_name = "sample-key" + project = google_project.project.project_id + service_account_email = google_service_account.key_service_account.email +} + +resource "google_project" "project" { + project_id = "{{app}}" + name = "{{app}}" + org_id = "{{org_id}}" + deletion_policy = "DELETE" +} + +resource "google_service_account" "key_service_account" { + account_id = "{{app}}" + project = google_project.project.project_id + display_name = "Test Service Account" +} \ No newline at end of file diff --git a/tpgtools/overrides/apikeys/samples/key/service_account_key.yaml b/tpgtools/overrides/apikeys/samples/key/service_account_key.yaml new file mode 100755 index 000000000000..6b60761db48e --- /dev/null +++ b/tpgtools/overrides/apikeys/samples/key/service_account_key.yaml @@ -0,0 +1,11 @@ +variables: + - name: "app" + type: "resource_name" + - name: "project" + type: "project" + - name: "key" + type: "resource_name" + - name: "org_id" + type: "org_id" + - name: "billing_account" + type: "billing_account" From a5098caa27773143c523f81231e2fd396f81fb03 Mon Sep 17 00:00:00 2001 From: Lakshman Swaminathan Date: Thu, 14 Aug 2025 17:43:22 -0400 Subject: [PATCH 779/884] datasource migration for storagecontrol (#14844) --- .../FolderIntelligenceConfig.yaml | 3 ++ .../OrganizationIntelligenceConfig.yaml | 3 ++ .../ProjectIntelligenceConfig.yaml | 3 ++ .../provider/provider_mmv1_resources.go.tmpl | 6 +-- ...rage_control_folder_intelligence_config.go | 40 ------------------- ...ontrol_organization_intelligence_config.go | 40 ------------------- ...age_control_project_intelligence_config.go | 40 ------------------- 7 files changed, 12 insertions(+), 123 deletions(-) delete mode 100644 mmv1/third_party/terraform/services/storagecontrol/data_source_storage_control_folder_intelligence_config.go delete mode 100644 mmv1/third_party/terraform/services/storagecontrol/data_source_storage_control_organization_intelligence_config.go delete mode 100644 mmv1/third_party/terraform/services/storagecontrol/data_source_storage_control_project_intelligence_config.go diff --git a/mmv1/products/storagecontrol/FolderIntelligenceConfig.yaml b/mmv1/products/storagecontrol/FolderIntelligenceConfig.yaml index fd3d9d2f0d2c..83e0f7a1b098 100644 --- a/mmv1/products/storagecontrol/FolderIntelligenceConfig.yaml +++ b/mmv1/products/storagecontrol/FolderIntelligenceConfig.yaml @@ -49,6 +49,9 @@ import_format: # the resource. If false, that code is not generated. autogen_async: false +datasource: + generate: true + examples: - name: storage_control_folder_intelligence_config_basic primary_resource_id: example diff --git a/mmv1/products/storagecontrol/OrganizationIntelligenceConfig.yaml b/mmv1/products/storagecontrol/OrganizationIntelligenceConfig.yaml index 5f4374dc969f..edf604dac363 100644 --- a/mmv1/products/storagecontrol/OrganizationIntelligenceConfig.yaml +++ b/mmv1/products/storagecontrol/OrganizationIntelligenceConfig.yaml @@ -49,6 +49,9 @@ import_format: # the resource. If false, that code is not generated. autogen_async: false +datasource: + generate: true + examples: - name: storage_control_organization_intelligence_config_basic primary_resource_id: example diff --git a/mmv1/products/storagecontrol/ProjectIntelligenceConfig.yaml b/mmv1/products/storagecontrol/ProjectIntelligenceConfig.yaml index e5eb0c840b33..d2c654e08b13 100644 --- a/mmv1/products/storagecontrol/ProjectIntelligenceConfig.yaml +++ b/mmv1/products/storagecontrol/ProjectIntelligenceConfig.yaml @@ -50,6 +50,9 @@ import_format: # the resource. If false, that code is not generated. autogen_async: false +datasource: + generate: true + examples: - name: storage_control_project_intelligence_config_basic primary_resource_id: example diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index 79a5701201bc..df261f00d296 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -257,9 +257,9 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_storage_bucket_object": storage.DataSourceGoogleStorageBucketObject(), "google_storage_bucket_objects": storage.DataSourceGoogleStorageBucketObjects(), "google_storage_bucket_object_content": storage.DataSourceGoogleStorageBucketObjectContent(), - "google_storage_control_folder_intelligence_config": storagecontrol.DataSourceGoogleStorageControlFolderIntelligenceConfig(), - "google_storage_control_organization_intelligence_config": storagecontrol.DataSourceGoogleStorageControlOrganizationIntelligenceConfig(), - "google_storage_control_project_intelligence_config": storagecontrol.DataSourceGoogleStorageControlProjectIntelligenceConfig(), + "google_storage_control_folder_intelligence_config": storagecontrol.DataSourceStorageControlFolderIntelligenceConfig(), + "google_storage_control_organization_intelligence_config": storagecontrol.DataSourceStorageControlOrganizationIntelligenceConfig(), + "google_storage_control_project_intelligence_config": storagecontrol.DataSourceStorageControlProjectIntelligenceConfig(), "google_storage_insights_dataset_config": storageinsights.DataSourceGoogleStorageInsightsDatasetConfig(), "google_storage_object_signed_url": storage.DataSourceGoogleSignedUrl(), "google_storage_project_service_account": storage.DataSourceGoogleStorageProjectServiceAccount(), diff --git a/mmv1/third_party/terraform/services/storagecontrol/data_source_storage_control_folder_intelligence_config.go b/mmv1/third_party/terraform/services/storagecontrol/data_source_storage_control_folder_intelligence_config.go deleted file mode 100644 index c87ba75ecbbf..000000000000 --- a/mmv1/third_party/terraform/services/storagecontrol/data_source_storage_control_folder_intelligence_config.go +++ /dev/null @@ -1,40 +0,0 @@ -package storagecontrol - -import ( - "fmt" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -func DataSourceGoogleStorageControlFolderIntelligenceConfig() *schema.Resource { - - dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceStorageControlFolderIntelligenceConfig().Schema) - tpgresource.AddRequiredFieldsToSchema(dsSchema, "name") - - return &schema.Resource{ - Read: dataSourceGoogleStorageControlFolderIntelligenceConfigRead, - Schema: dsSchema, - } -} - -func dataSourceGoogleStorageControlFolderIntelligenceConfigRead(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - - id, err := tpgresource.ReplaceVars(d, config, "folders/{{name}}/locations/global/intelligenceConfig") - if err != nil { - return fmt.Errorf("Error constructing id: %s", err) - } - d.SetId(id) - err = resourceStorageControlFolderIntelligenceConfigRead(d, meta) - if err != nil { - return err - } - - if d.Id() == "" { - return fmt.Errorf("%s not found", id) - } - - return nil -} diff --git a/mmv1/third_party/terraform/services/storagecontrol/data_source_storage_control_organization_intelligence_config.go b/mmv1/third_party/terraform/services/storagecontrol/data_source_storage_control_organization_intelligence_config.go deleted file mode 100644 index 9730d488ded1..000000000000 --- a/mmv1/third_party/terraform/services/storagecontrol/data_source_storage_control_organization_intelligence_config.go +++ /dev/null @@ -1,40 +0,0 @@ -package storagecontrol - -import ( - "fmt" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -func DataSourceGoogleStorageControlOrganizationIntelligenceConfig() *schema.Resource { - - dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceStorageControlOrganizationIntelligenceConfig().Schema) - tpgresource.AddRequiredFieldsToSchema(dsSchema, "name") - - return &schema.Resource{ - Read: dataSourceGoogleStorageControlOrganizationIntelligenceConfigRead, - Schema: dsSchema, - } -} - -func dataSourceGoogleStorageControlOrganizationIntelligenceConfigRead(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - - id, err := tpgresource.ReplaceVars(d, config, "organizations/{{name}}/locations/global/intelligenceConfig") - if err != nil { - return fmt.Errorf("Error constructing id: %s", err) - } - d.SetId(id) - err = resourceStorageControlOrganizationIntelligenceConfigRead(d, meta) - if err != nil { - return err - } - - if d.Id() == "" { - return fmt.Errorf("%s not found", id) - } - - return nil -} diff --git a/mmv1/third_party/terraform/services/storagecontrol/data_source_storage_control_project_intelligence_config.go b/mmv1/third_party/terraform/services/storagecontrol/data_source_storage_control_project_intelligence_config.go deleted file mode 100644 index f74663d70839..000000000000 --- a/mmv1/third_party/terraform/services/storagecontrol/data_source_storage_control_project_intelligence_config.go +++ /dev/null @@ -1,40 +0,0 @@ -package storagecontrol - -import ( - "fmt" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -func DataSourceGoogleStorageControlProjectIntelligenceConfig() *schema.Resource { - - dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceStorageControlProjectIntelligenceConfig().Schema) - tpgresource.AddRequiredFieldsToSchema(dsSchema, "name") - - return &schema.Resource{ - Read: dataSourceGoogleStorageControlProjectIntelligenceConfigRead, - Schema: dsSchema, - } -} - -func dataSourceGoogleStorageControlProjectIntelligenceConfigRead(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - - id, err := tpgresource.ReplaceVars(d, config, "projects/{{name}}/locations/global/intelligenceConfig") - if err != nil { - return fmt.Errorf("Error constructing id: %s", err) - } - d.SetId(id) - err = resourceStorageControlProjectIntelligenceConfigRead(d, meta) - if err != nil { - return err - } - - if d.Id() == "" { - return fmt.Errorf("%s not found", id) - } - - return nil -} From df50c43c0175752b892fd880e2b76eaea3d02425 Mon Sep 17 00:00:00 2001 From: Raj Anand <88097156+raazanand@users.noreply.github.com> Date: Fri, 15 Aug 2025 05:00:13 +0530 Subject: [PATCH 780/884] marked largeCapacity as immutable (#14841) --- mmv1/products/netapp/Volume.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/mmv1/products/netapp/Volume.yaml b/mmv1/products/netapp/Volume.yaml index e3df1d42acdf..072e43425f6a 100644 --- a/mmv1/products/netapp/Volume.yaml +++ b/mmv1/products/netapp/Volume.yaml @@ -490,6 +490,7 @@ properties: type: Boolean description: | Optional. Flag indicating if the volume will be a large capacity volume or a regular volume. + immutable: true - name: 'multipleEndpoints' type: Boolean description: | From 5121d81d7af66b67cb320acd2c93e0fdf6190828 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Thu, 14 Aug 2025 18:41:44 -0700 Subject: [PATCH 781/884] Enable custom endpoints for all DCL-based resources (#14778) --- .../terraform/fwmodels/provider_model.go.tmpl | 3 - .../fwprovider/framework_provider.go.tmpl | 34 +++++- .../terraform/provider/provider.go.tmpl | 20 ++-- .../terraform/transport/config.go.tmpl | 61 +++++++++- .../provider_handwritten_endpoint.go.tmpl | 40 ++++++- mmv1/third_party/tgc/dcl.go | 3 - tpgtools/main.go | 1 - .../apikeys/beta/tpgtools_product.yaml | 3 + .../overrides/apikeys/tpgtools_product.yaml | 3 + .../beta/tpgtools_product.yaml | 3 + .../assuredworkloads/tpgtools_product.yaml | 3 + .../cloudbuild/beta/tpgtools_product.yaml | 3 +- .../cloudbuild/tpgtools_product.yaml | 3 +- .../beta/tpgtools_product.yaml | 3 + .../tpgtools_product.yaml | 3 + .../firebaserules/beta/tpgtools_product.yaml | 3 + .../firebaserules/tpgtools_product.yaml | 3 + .../gkehub/beta/tpgtools_product.yaml | 3 +- .../overrides/gkehub/tpgtools_product.yaml | 3 +- .../beta/tpgtools_product.yaml | 3 + .../recaptchaenterprise/tpgtools_product.yaml | 3 + .../templates/provider_dcl_endpoints.go.tmpl | 105 ------------------ 22 files changed, 169 insertions(+), 140 deletions(-) delete mode 100644 mmv1/third_party/tgc/dcl.go create mode 100644 tpgtools/overrides/apikeys/beta/tpgtools_product.yaml create mode 100644 tpgtools/overrides/apikeys/tpgtools_product.yaml create mode 100644 tpgtools/overrides/assuredworkloads/beta/tpgtools_product.yaml create mode 100644 tpgtools/overrides/assuredworkloads/tpgtools_product.yaml create mode 100644 tpgtools/overrides/cloudresourcemanager/beta/tpgtools_product.yaml create mode 100644 tpgtools/overrides/cloudresourcemanager/tpgtools_product.yaml create mode 100644 tpgtools/overrides/firebaserules/beta/tpgtools_product.yaml create mode 100644 tpgtools/overrides/firebaserules/tpgtools_product.yaml create mode 100644 tpgtools/overrides/recaptchaenterprise/beta/tpgtools_product.yaml create mode 100644 tpgtools/overrides/recaptchaenterprise/tpgtools_product.yaml delete mode 100644 tpgtools/templates/provider_dcl_endpoints.go.tmpl diff --git a/mmv1/third_party/terraform/fwmodels/provider_model.go.tmpl b/mmv1/third_party/terraform/fwmodels/provider_model.go.tmpl index 85f96fe7481f..306a95578244 100644 --- a/mmv1/third_party/terraform/fwmodels/provider_model.go.tmpl +++ b/mmv1/third_party/terraform/fwmodels/provider_model.go.tmpl @@ -59,12 +59,9 @@ type ProviderModel struct { // dcl generated ApikeysCustomEndpoint types.String `tfsdk:"apikeys_custom_endpoint"` AssuredWorkloadsCustomEndpoint types.String `tfsdk:"assured_workloads_custom_endpoint"` - CloudBuildWorkerPoolCustomEndpoint types.String `tfsdk:"cloud_build_worker_pool_custom_endpoint"` CloudResourceManagerCustomEndpoint types.String `tfsdk:"cloud_resource_manager_custom_endpoint"` FirebaserulesCustomEndpoint types.String `tfsdk:"firebaserules_custom_endpoint"` RecaptchaEnterpriseCustomEndpoint types.String `tfsdk:"recaptcha_enterprise_custom_endpoint"` - - GkehubFeatureCustomEndpoint types.String `tfsdk:"gkehub_feature_custom_endpoint"` } type ProviderBatching struct { diff --git a/mmv1/third_party/terraform/fwprovider/framework_provider.go.tmpl b/mmv1/third_party/terraform/fwprovider/framework_provider.go.tmpl index 80c944261338..f72d12124f68 100644 --- a/mmv1/third_party/terraform/fwprovider/framework_provider.go.tmpl +++ b/mmv1/third_party/terraform/fwprovider/framework_provider.go.tmpl @@ -222,7 +222,7 @@ func (p *FrameworkProvider) Schema(_ context.Context, _ provider.SchemaRequest, }, }, - // dcl + // DCL "container_aws_custom_endpoint": &schema.StringAttribute{ Optional: true, Validators: []validator.String{ @@ -235,6 +235,36 @@ func (p *FrameworkProvider) Schema(_ context.Context, _ provider.SchemaRequest, transport_tpg.CustomEndpointValidator(), }, }, + "apikeys_custom_endpoint": &schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + transport_tpg.CustomEndpointValidator(), + }, + }, + "assured_workloads_custom_endpoint": &schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + transport_tpg.CustomEndpointValidator(), + }, + }, + "cloud_resource_manager_custom_endpoint": &schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + transport_tpg.CustomEndpointValidator(), + }, + }, + "firebaserules_custom_endpoint": &schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + transport_tpg.CustomEndpointValidator(), + }, + }, + "recaptcha_enterprise_custom_endpoint": &schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + transport_tpg.CustomEndpointValidator(), + }, + }, }, Blocks: map[string]schema.Block{ "batching": schema.ListNestedBlock{ @@ -278,8 +308,6 @@ func (p *FrameworkProvider) Schema(_ context.Context, _ provider.SchemaRequest, }, }, } - - transport_tpg.ConfigureDCLCustomEndpointAttributesFramework(&resp.Schema) } // Configure prepares the metadata/'meta' required for data sources and resources to function. diff --git a/mmv1/third_party/terraform/provider/provider.go.tmpl b/mmv1/third_party/terraform/provider/provider.go.tmpl index 828254a7b757..4ea3c6a9a0f9 100644 --- a/mmv1/third_party/terraform/provider/provider.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider.go.tmpl @@ -197,6 +197,11 @@ func Provider() *schema.Provider { // dcl transport_tpg.ContainerAwsCustomEndpointEntryKey: transport_tpg.ContainerAwsCustomEndpointEntry, transport_tpg.ContainerAzureCustomEndpointEntryKey: transport_tpg.ContainerAzureCustomEndpointEntry, + transport_tpg.ApikeysEndpointEntryKey: transport_tpg.ApikeysEndpointEntry, + transport_tpg.AssuredWorkloadsEndpointEntryKey: transport_tpg.AssuredWorkloadsEndpointEntry, + transport_tpg.CloudResourceManagerEndpointEntryKey: transport_tpg.CloudResourceManagerEndpointEntry, + transport_tpg.FirebaserulesEndpointEntryKey: transport_tpg.FirebaserulesEndpointEntry, + transport_tpg.RecaptchaEnterpriseEndpointEntryKey: transport_tpg.RecaptchaEnterpriseEndpointEntry, }, ProviderMetaSchema: map[string]*schema.Schema{ @@ -214,9 +219,6 @@ func Provider() *schema.Provider { provider.ConfigureContextFunc = func(ctx context.Context, d *schema.ResourceData) (interface{}, diag.Diagnostics) { return ProviderConfigure(ctx, d, provider) } -{{ if ne $.Compiler "terraformgoogleconversion-codegen"}} - transport_tpg.ConfigureDCLProvider(provider) -{{ end }} return provider } {{ if ne $.Compiler "terraformgoogleconversion-codegen"}} @@ -328,10 +330,6 @@ func ProviderConfigure(ctx context.Context, d *schema.ResourceData, p *schema.Pr if v, ok := d.GetOk("universe_domain"); ok { config.UniverseDomain = v.(string) } -{{ if ne $.Compiler "terraformgoogleconversion-codegen"}} - // Configure DCL basePath - transport_tpg.ProviderDCLConfigure(d, &config) -{{- end }} // Replace hostname by the universe_domain field. if config.UniverseDomain != "" && config.UniverseDomain != "googleapis.com" { @@ -344,9 +342,6 @@ func ProviderConfigure(ctx context.Context, d *schema.ResourceData, p *schema.Pr if err != nil { return nil, diag.FromErr(err) } -{{- if ne $.Compiler "terraformgoogleconversion-codegen"}} - transport_tpg.HandleDCLCustomEndpointDefaults(d) -{{- end }} // Given that impersonate_service_account is a secondary auth method, it has // no conflicts to worry about. We pull the env var in a DefaultFunc. @@ -419,6 +414,11 @@ func ProviderConfigure(ctx context.Context, d *schema.ResourceData, p *schema.Pr // dcl config.ContainerAwsBasePath = d.Get(transport_tpg.ContainerAwsCustomEndpointEntryKey).(string) config.ContainerAzureBasePath = d.Get(transport_tpg.ContainerAzureCustomEndpointEntryKey).(string) + config.ApikeysBasePath = d.Get(transport_tpg.ApikeysEndpointEntryKey).(string) + config.AssuredWorkloadsBasePath = d.Get(transport_tpg.AssuredWorkloadsEndpointEntryKey).(string) + config.CloudResourceManagerBasePath = d.Get(transport_tpg.CloudResourceManagerEndpointEntryKey).(string) + config.FirebaserulesBasePath = d.Get(transport_tpg.FirebaserulesEndpointEntryKey).(string) + config.RecaptchaEnterpriseBasePath = d.Get(transport_tpg.RecaptchaEnterpriseEndpointEntryKey).(string) stopCtx, ok := schema.StopContext(ctx) if !ok { diff --git a/mmv1/third_party/terraform/transport/config.go.tmpl b/mmv1/third_party/terraform/transport/config.go.tmpl index b45ecfb7bbbe..7d9b54ede5e6 100644 --- a/mmv1/third_party/terraform/transport/config.go.tmpl +++ b/mmv1/third_party/terraform/transport/config.go.tmpl @@ -234,9 +234,6 @@ func ExpandExternalCredentialsConfig(v interface{}) (*ExternalCredentials, error // Config is the configuration structure used to instantiate the Google // provider. type Config struct { -{{- if ne $.Compiler "terraformgoogleconversion-codegen"}} - DCLConfig -{{- end }} AccessToken string Credentials string ExternalCredentials *ExternalCredentials @@ -280,9 +277,14 @@ type Config struct { BigtableAdminBasePath string TagsLocationBasePath string - // dcl + // DCL ContainerAwsBasePath string ContainerAzureBasePath string + ApikeysBasePath string + AssuredWorkloadsBasePath string + CloudResourceManagerBasePath string + FirebaserulesBasePath string + RecaptchaEnterpriseBasePath string RequestBatcherServiceUsage *RequestBatcher RequestBatcherIam *RequestBatcher @@ -318,9 +320,19 @@ var DefaultBasePaths = map[string]string{ IamCredentialsBasePathKey : "https://iamcredentials.googleapis.com/v1/", ResourceManagerV3BasePathKey : "https://cloudresourcemanager.googleapis.com/v3/", BigtableAdminBasePathKey : "https://bigtableadmin.googleapis.com/v2/", - ContainerAwsBasePathKey: "https://{{"{{"}}location{{"}}"}}-gkemulticloud.googleapis.com/v1/", - ContainerAzureBasePathKey: "https://{{"{{"}}location{{"}}"}}-gkemulticloud.googleapis.com/v1/", TagsLocationBasePathKey: "https://{{"{{"}}location{{"}}"}}-cloudresourcemanager.googleapis.com/v3/", + // DCL + ContainerAwsBasePathKey: "https://{{"{{"}}location{{"}}"}}-gkemulticloud.googleapis.com/v1/", + ContainerAzureBasePathKey: "https://{{"{{"}}location{{"}}"}}-gkemulticloud.googleapis.com/v1/", + ApikeysEndpointEntryKey: "https://apikeys.googleapis.com/v2/", +{{- if eq $.TargetVersionName "ga" }} + AssuredWorkloadsEndpointEntryKey: "https://{{"{{"}}location{{"}}"}}-assuredworkloads.googleapis.com/v1beta1/", +{{- else }} + AssuredWorkloadsEndpointEntryKey: "https://{{"{{"}}location{{"}}"}}-assuredworkloads.googleapis.com/v1/", +{{- end }} + CloudResourceManagerEndpointEntryKey: "https://cloudresourcemanager.googleapis.com/", + FirebaserulesEndpointEntryKey: "https://firebaserules.googleapis.com/v1/", + RecaptchaEnterpriseEndpointEntryKey: "https://recaptchaenterprise.googleapis.com/v1/", } var DefaultClientScopes = []string{ @@ -465,6 +477,9 @@ func SetEndpointDefaults(d *schema.ResourceData) error { }, DefaultBasePaths[TagsLocationBasePathKey])) } + // DCL endpoints - these are hardcoded as a workaround for the DCL not providing a way to + // determine base paths at generation time. + if d.Get(ContainerAwsCustomEndpointEntryKey) == "" { d.Set(ContainerAwsCustomEndpointEntryKey, MultiEnvDefault([]string{ "GOOGLE_CONTAINERAWS_CUSTOM_ENDPOINT", @@ -476,6 +491,31 @@ func SetEndpointDefaults(d *schema.ResourceData) error { "GOOGLE_CONTAINERAZURE_CUSTOM_ENDPOINT", }, DefaultBasePaths[ContainerAzureBasePathKey])) } + if d.Get(ApikeysEndpointEntryKey) == "" { + d.Set(ApikeysEndpointEntryKey, MultiEnvDefault([]string{ + "GOOGLE_APIKEYS_CUSTOM_ENDPOINT", + }, DefaultBasePaths[ApikeysEndpointEntryKey])) + } + if d.Get(AssuredWorkloadsEndpointEntryKey) == "" { + d.Set(AssuredWorkloadsEndpointEntryKey, MultiEnvDefault([]string{ + "GOOGLE_ASSURED_WORKLOADS_CUSTOM_ENDPOINT", + }, DefaultBasePaths[AssuredWorkloadsEndpointEntryKey])) + } + if d.Get(CloudResourceManagerEndpointEntryKey) == "" { + d.Set(CloudResourceManagerEndpointEntryKey, MultiEnvDefault([]string{ + "GOOGLE_CLOUD_RESOURCE_MANAGER_CUSTOM_ENDPOINT", + }, DefaultBasePaths[CloudResourceManagerEndpointEntryKey])) + } + if d.Get(FirebaserulesEndpointEntryKey) == "" { + d.Set(FirebaserulesEndpointEntryKey, MultiEnvDefault([]string{ + "GOOGLE_FIREBASERULES_CUSTOM_ENDPOINT", + }, DefaultBasePaths[FirebaserulesEndpointEntryKey])) + } + if d.Get(RecaptchaEnterpriseEndpointEntryKey) == "" { + d.Set(RecaptchaEnterpriseEndpointEntryKey, MultiEnvDefault([]string{ + "GOOGLE_RECAPTCHA_ENTERPRISE_CUSTOM_ENDPOINT", + }, DefaultBasePaths[RecaptchaEnterpriseEndpointEntryKey])) + } return nil } @@ -1453,6 +1493,15 @@ func ConfigureBasePaths(c *Config) { c.BigQueryBasePath = DefaultBasePaths[BigQueryBasePathKey] c.BigtableAdminBasePath = DefaultBasePaths[BigtableAdminBasePathKey] c.TagsLocationBasePath = DefaultBasePaths[TagsLocationBasePathKey] + + // DCL + c.ContainerAwsBasePath = DefaultBasePaths[ContainerAwsBasePathKey] + c.ContainerAzureBasePath = DefaultBasePaths[ContainerAzureBasePathKey] + c.ApikeysBasePath = DefaultBasePaths[ApikeysEndpointEntryKey] + c.AssuredWorkloadsBasePath = DefaultBasePaths[AssuredWorkloadsEndpointEntryKey] + c.CloudResourceManagerBasePath = DefaultBasePaths[CloudResourceManagerEndpointEntryKey] + c.FirebaserulesBasePath = DefaultBasePaths[FirebaserulesEndpointEntryKey] + c.RecaptchaEnterpriseBasePath = DefaultBasePaths[RecaptchaEnterpriseEndpointEntryKey] } func GetCurrentUserEmail(config *Config, userAgent string) (string, error) { diff --git a/mmv1/third_party/terraform/transport/provider_handwritten_endpoint.go.tmpl b/mmv1/third_party/terraform/transport/provider_handwritten_endpoint.go.tmpl index f42ba038e969..8ea1083a96a0 100644 --- a/mmv1/third_party/terraform/transport/provider_handwritten_endpoint.go.tmpl +++ b/mmv1/third_party/terraform/transport/provider_handwritten_endpoint.go.tmpl @@ -104,6 +104,14 @@ var PrivatecaCertificateTemplateCustomEndpointEntry = &schema.Schema{ }, DefaultBasePaths[PrivatecaBasePathKey]), } +var TagsLocationCustomEndpointEntryKey = "tags_location_custom_endpoint" +var TagsLocationCustomEndpointEntry = &schema.Schema{ + Type: schema.TypeString, + Optional: true, + ValidateFunc: ValidateCustomEndpoint, +} + +// DCL var ContainerAwsCustomEndpointEntryKey = "container_aws_custom_endpoint" var ContainerAwsCustomEndpointEntry = &schema.Schema{ Type: schema.TypeString, @@ -117,12 +125,34 @@ var ContainerAzureCustomEndpointEntry = &schema.Schema{ Optional: true, ValidateFunc: ValidateCustomEndpoint, } +var ApikeysEndpointEntryKey = "apikeys_custom_endpoint" +var ApikeysEndpointEntry = &schema.Schema{ + Type: schema.TypeString, + Optional: true, +} -var TagsLocationCustomEndpointEntryKey = "tags_location_custom_endpoint" -var TagsLocationCustomEndpointEntry = &schema.Schema{ - Type: schema.TypeString, - Optional: true, - ValidateFunc: ValidateCustomEndpoint, +var AssuredWorkloadsEndpointEntryKey = "assured_workloads_custom_endpoint" +var AssuredWorkloadsEndpointEntry = &schema.Schema{ + Type: schema.TypeString, + Optional: true, +} + +var CloudResourceManagerEndpointEntryKey = "cloud_resource_manager_custom_endpoint" +var CloudResourceManagerEndpointEntry = &schema.Schema{ + Type: schema.TypeString, + Optional: true, +} + +var FirebaserulesEndpointEntryKey = "firebaserules_custom_endpoint" +var FirebaserulesEndpointEntry = &schema.Schema{ + Type: schema.TypeString, + Optional: true, +} + +var RecaptchaEnterpriseEndpointEntryKey = "recaptcha_enterprise_custom_endpoint" +var RecaptchaEnterpriseEndpointEntry = &schema.Schema{ + Type: schema.TypeString, + Optional: true, } func ValidateCustomEndpoint(v interface{}, k string) (ws []string, errors []error) { diff --git a/mmv1/third_party/tgc/dcl.go b/mmv1/third_party/tgc/dcl.go deleted file mode 100644 index e4b8d77ff59a..000000000000 --- a/mmv1/third_party/tgc/dcl.go +++ /dev/null @@ -1,3 +0,0 @@ -package transport - -type DCLConfig struct{} diff --git a/tpgtools/main.go b/tpgtools/main.go index 19ff83fa5e4e..67f2cd81de17 100644 --- a/tpgtools/main.go +++ b/tpgtools/main.go @@ -110,7 +110,6 @@ func main() { } // product specific generation - generateProductsFile("provider_dcl_endpoints", productsForVersion) generateProductsFile("provider_dcl_client_creation", productsForVersion) if oPath == nil || *oPath == "" { diff --git a/tpgtools/overrides/apikeys/beta/tpgtools_product.yaml b/tpgtools/overrides/apikeys/beta/tpgtools_product.yaml new file mode 100644 index 000000000000..862ff73cb130 --- /dev/null +++ b/tpgtools/overrides/apikeys/beta/tpgtools_product.yaml @@ -0,0 +1,3 @@ +- type: PRODUCT_BASE_PATH + details: + skip: true diff --git a/tpgtools/overrides/apikeys/tpgtools_product.yaml b/tpgtools/overrides/apikeys/tpgtools_product.yaml new file mode 100644 index 000000000000..862ff73cb130 --- /dev/null +++ b/tpgtools/overrides/apikeys/tpgtools_product.yaml @@ -0,0 +1,3 @@ +- type: PRODUCT_BASE_PATH + details: + skip: true diff --git a/tpgtools/overrides/assuredworkloads/beta/tpgtools_product.yaml b/tpgtools/overrides/assuredworkloads/beta/tpgtools_product.yaml new file mode 100644 index 000000000000..862ff73cb130 --- /dev/null +++ b/tpgtools/overrides/assuredworkloads/beta/tpgtools_product.yaml @@ -0,0 +1,3 @@ +- type: PRODUCT_BASE_PATH + details: + skip: true diff --git a/tpgtools/overrides/assuredworkloads/tpgtools_product.yaml b/tpgtools/overrides/assuredworkloads/tpgtools_product.yaml new file mode 100644 index 000000000000..862ff73cb130 --- /dev/null +++ b/tpgtools/overrides/assuredworkloads/tpgtools_product.yaml @@ -0,0 +1,3 @@ +- type: PRODUCT_BASE_PATH + details: + skip: true diff --git a/tpgtools/overrides/cloudbuild/beta/tpgtools_product.yaml b/tpgtools/overrides/cloudbuild/beta/tpgtools_product.yaml index c14db0746cb2..b260c549bce5 100644 --- a/tpgtools/overrides/cloudbuild/beta/tpgtools_product.yaml +++ b/tpgtools/overrides/cloudbuild/beta/tpgtools_product.yaml @@ -1,6 +1,7 @@ - type: PRODUCT_BASE_PATH details: - basepathidentifier: cloud_build_worker_pool + skip: true + basepathidentifier: cloud_build - type: PRODUCT_TITLE # used to align with mmv1 product details: title: "cloudbuild" diff --git a/tpgtools/overrides/cloudbuild/tpgtools_product.yaml b/tpgtools/overrides/cloudbuild/tpgtools_product.yaml index c14db0746cb2..b260c549bce5 100644 --- a/tpgtools/overrides/cloudbuild/tpgtools_product.yaml +++ b/tpgtools/overrides/cloudbuild/tpgtools_product.yaml @@ -1,6 +1,7 @@ - type: PRODUCT_BASE_PATH details: - basepathidentifier: cloud_build_worker_pool + skip: true + basepathidentifier: cloud_build - type: PRODUCT_TITLE # used to align with mmv1 product details: title: "cloudbuild" diff --git a/tpgtools/overrides/cloudresourcemanager/beta/tpgtools_product.yaml b/tpgtools/overrides/cloudresourcemanager/beta/tpgtools_product.yaml new file mode 100644 index 000000000000..862ff73cb130 --- /dev/null +++ b/tpgtools/overrides/cloudresourcemanager/beta/tpgtools_product.yaml @@ -0,0 +1,3 @@ +- type: PRODUCT_BASE_PATH + details: + skip: true diff --git a/tpgtools/overrides/cloudresourcemanager/tpgtools_product.yaml b/tpgtools/overrides/cloudresourcemanager/tpgtools_product.yaml new file mode 100644 index 000000000000..862ff73cb130 --- /dev/null +++ b/tpgtools/overrides/cloudresourcemanager/tpgtools_product.yaml @@ -0,0 +1,3 @@ +- type: PRODUCT_BASE_PATH + details: + skip: true diff --git a/tpgtools/overrides/firebaserules/beta/tpgtools_product.yaml b/tpgtools/overrides/firebaserules/beta/tpgtools_product.yaml new file mode 100644 index 000000000000..862ff73cb130 --- /dev/null +++ b/tpgtools/overrides/firebaserules/beta/tpgtools_product.yaml @@ -0,0 +1,3 @@ +- type: PRODUCT_BASE_PATH + details: + skip: true diff --git a/tpgtools/overrides/firebaserules/tpgtools_product.yaml b/tpgtools/overrides/firebaserules/tpgtools_product.yaml new file mode 100644 index 000000000000..862ff73cb130 --- /dev/null +++ b/tpgtools/overrides/firebaserules/tpgtools_product.yaml @@ -0,0 +1,3 @@ +- type: PRODUCT_BASE_PATH + details: + skip: true diff --git a/tpgtools/overrides/gkehub/beta/tpgtools_product.yaml b/tpgtools/overrides/gkehub/beta/tpgtools_product.yaml index cbded10b5146..589844700c05 100644 --- a/tpgtools/overrides/gkehub/beta/tpgtools_product.yaml +++ b/tpgtools/overrides/gkehub/beta/tpgtools_product.yaml @@ -3,4 +3,5 @@ ## Skip base path generation... needs to not share a name with GKEHub Membership - type: PRODUCT_BASE_PATH details: - basepathidentifier: gkehub_feature + skip: true + basepathidentifier: gkehub diff --git a/tpgtools/overrides/gkehub/tpgtools_product.yaml b/tpgtools/overrides/gkehub/tpgtools_product.yaml index cbded10b5146..589844700c05 100644 --- a/tpgtools/overrides/gkehub/tpgtools_product.yaml +++ b/tpgtools/overrides/gkehub/tpgtools_product.yaml @@ -3,4 +3,5 @@ ## Skip base path generation... needs to not share a name with GKEHub Membership - type: PRODUCT_BASE_PATH details: - basepathidentifier: gkehub_feature + skip: true + basepathidentifier: gkehub diff --git a/tpgtools/overrides/recaptchaenterprise/beta/tpgtools_product.yaml b/tpgtools/overrides/recaptchaenterprise/beta/tpgtools_product.yaml new file mode 100644 index 000000000000..862ff73cb130 --- /dev/null +++ b/tpgtools/overrides/recaptchaenterprise/beta/tpgtools_product.yaml @@ -0,0 +1,3 @@ +- type: PRODUCT_BASE_PATH + details: + skip: true diff --git a/tpgtools/overrides/recaptchaenterprise/tpgtools_product.yaml b/tpgtools/overrides/recaptchaenterprise/tpgtools_product.yaml new file mode 100644 index 000000000000..862ff73cb130 --- /dev/null +++ b/tpgtools/overrides/recaptchaenterprise/tpgtools_product.yaml @@ -0,0 +1,3 @@ +- type: PRODUCT_BASE_PATH + details: + skip: true diff --git a/tpgtools/templates/provider_dcl_endpoints.go.tmpl b/tpgtools/templates/provider_dcl_endpoints.go.tmpl deleted file mode 100644 index 59bbbc4df714..000000000000 --- a/tpgtools/templates/provider_dcl_endpoints.go.tmpl +++ /dev/null @@ -1,105 +0,0 @@ -{{/* Copyright 2021 Google LLC. All Rights Reserved. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. */}} -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -// ---------------------------------------------------------------------------- -// -// *** AUTO GENERATED CODE *** Type: DCL *** -// -// ---------------------------------------------------------------------------- -// -// This file is managed by Magic Modules (https://github.com/GoogleCloudPlatform/magic-modules) -// and is based on the DCL (https://github.com/GoogleCloudPlatform/declarative-resource-client-library). -// Changes will need to be made to the DCL or Magic Modules instead of here. -// -// We are not currently able to accept contributions to this file. If changes -// are required, please file an issue at https://github.com/hashicorp/terraform-provider-google/issues/new/choose -// -// ---------------------------------------------------------------------------- - -package transport - -import ( - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - framework_schema "github.com/hashicorp/terraform-plugin-framework/provider/schema" - "github.com/hashicorp/terraform-plugin-framework/schema/validator" -) - -// empty string is passed for dcl default since dcl -// [hardcodes the values](https://github.com/GoogleCloudPlatform/declarative-resource-client-library/blob/main/services/google/eventarc/beta/trigger_internal.go#L96-L103) -{{range $index, $pkg := .}} -{{- if $pkg.ShouldWriteProductBasePath }} -var {{$pkg.BasePathIdentifier.ToTitle}}EndpointEntryKey = "{{$pkg.BasePathIdentifier}}_custom_endpoint" -var {{$pkg.BasePathIdentifier.ToTitle}}EndpointEntry = &schema.Schema{ - Type: schema.TypeString, - Optional: true, -} -{{- end}} -{{end}} - -type DCLConfig struct { -{{- range $index, $pkg := . }} -{{- if $pkg.ShouldWriteProductBasePath }} - {{$pkg.BasePathIdentifier.ToTitle}}BasePath string -{{- end}} -{{- end}} -} - -func ConfigureDCLProvider(provider *schema.Provider) { -{{- range $index, $pkg := . }} -{{- if $pkg.ShouldWriteProductBasePath }} - provider.Schema[{{$pkg.BasePathIdentifier.ToTitle}}EndpointEntryKey] = {{$pkg.BasePathIdentifier.ToTitle}}EndpointEntry -{{- end}} -{{- end}} -} - -func HandleDCLCustomEndpointDefaults(d *schema.ResourceData) { -{{- range $index, $pkg := . }} -{{- if $pkg.ShouldWriteProductBasePath }} - if d.Get({{$pkg.BasePathIdentifier.ToTitle}}EndpointEntryKey) == "" { - d.Set({{$pkg.BasePathIdentifier.ToTitle}}EndpointEntryKey, MultiEnvDefault([]string{ - "GOOGLE_{{$pkg.BasePathIdentifier.ToUpper}}_CUSTOM_ENDPOINT", - }, "")) - } -{{- end}} -{{- end}} -} - -// plugin-framework provider set-up -func ConfigureDCLCustomEndpointAttributesFramework(frameworkSchema *framework_schema.Schema) { -{{- range $index, $pkg := . }} -{{- if $pkg.ShouldWriteProductBasePath }} - frameworkSchema.Attributes["{{$pkg.BasePathIdentifier}}_custom_endpoint"] = framework_schema.StringAttribute{ - Optional: true, - Validators: []validator.String{ - CustomEndpointValidator(), - }, - } -{{- end}} -{{- end}} -} - -func ProviderDCLConfigure(d *schema.ResourceData, config *Config) interface{} { - // networkConnectivity uses mmv1 basePath, assuredworkloads has a location variable in the basepath, can't be defined here. - config.ApikeysBasePath = "https://apikeys.googleapis.com/v2/" - config.AssuredWorkloadsBasePath = d.Get(AssuredWorkloadsEndpointEntryKey).(string) - config.CloudBuildWorkerPoolBasePath = "https://cloudbuild.googleapis.com/v1/" - config.CloudResourceManagerBasePath = "https://cloudresourcemanager.googleapis.com/" - config.EventarcBasePath = "https://eventarc.googleapis.com/v1/" - config.FirebaserulesBasePath = "https://firebaserules.googleapis.com/v1/" - config.RecaptchaEnterpriseBasePath = "https://recaptchaenterprise.googleapis.com/v1/" - - return config -} From cffa909c8c6a7ae94cb79be3e7e6ea6567ca0aa0 Mon Sep 17 00:00:00 2001 From: zhihaos Date: Fri, 15 Aug 2025 12:40:16 -0400 Subject: [PATCH 782/884] Add new fields in Agent resource for Dialogflow CX service (#14828) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Glen Yu Signed-off-by: James Alseth Co-authored-by: nimish-khurana Co-authored-by: chenir0219 Co-authored-by: Stephen Lewis (Burrows) Co-authored-by: Robert Teller <31879487+r-teller@users.noreply.github.com> Co-authored-by: Rajesh Guptha Co-authored-by: sameer-google Co-authored-by: bcreddy-gcp <123543489+bcreddy-gcp@users.noreply.github.com> Co-authored-by: Lakshman Swaminathan Co-authored-by: Zhenhua Li Co-authored-by: Mauricio Alvarez Leon <65101411+BBBmau@users.noreply.github.com> Co-authored-by: jacek-izykowski Co-authored-by: Thomas Rodgers Co-authored-by: gurusai-voleti Co-authored-by: Ramon Vermeulen Co-authored-by: Eric Pang Co-authored-by: Cameron Thornton Co-authored-by: Rishita Golla Co-authored-by: paridhishah18 <166548459+paridhishah18@users.noreply.github.com> Co-authored-by: Arnav Dham Co-authored-by: jialei-chen <147877028+jialei-chen@users.noreply.github.com> Co-authored-by: Jesse DeJong Co-authored-by: Björn <81525627+bestefreund@users.noreply.github.com> Co-authored-by: Dawid212 Co-authored-by: efe Co-authored-by: Francis O'Hara Aidoo Co-authored-by: ishamiGIT <202351040+ishamiGIT@users.noreply.github.com> Co-authored-by: NA2047 <12290725+NA2047@users.noreply.github.com> Co-authored-by: Scott Suarez Co-authored-by: Ryan Oaks Co-authored-by: Stephane Charite Co-authored-by: Andrew Browne <81702808+abbrowne126@users.noreply.github.com> Co-authored-by: sachin purohit Co-authored-by: Durgesh Ninave Co-authored-by: Yanwei Guo Co-authored-by: zlq Co-authored-by: Nick Elliot Co-authored-by: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Co-authored-by: Liyun Huang Co-authored-by: dixuswe <152918466+dixuswe@users.noreply.github.com> Co-authored-by: Sarah French Co-authored-by: Glen Yu Co-authored-by: Richard Belleville Co-authored-by: Mohit Swain Co-authored-by: Nathaniel Ford Co-authored-by: Nathaniel Ford Co-authored-by: Sam Levenick Co-authored-by: Chun Wang Co-authored-by: aditikumarii-google Co-authored-by: sanmahapatra <168236987+sanmahapatra@users.noreply.github.com> Co-authored-by: Laurenz K. <45950275+laurenz-k@users.noreply.github.com> Co-authored-by: Salome Papiashvili Co-authored-by: Harshal Neelkamal <17376513+HarshalNeelkamal@users.noreply.github.com> Co-authored-by: James Alseth Co-authored-by: Yuuki Takahashi <20282867+yktakaha4@users.noreply.github.com> Co-authored-by: Darshan Mehta <8850770+darshanmehta17@users.noreply.github.com> Co-authored-by: Luca Prete Co-authored-by: Luca Prete Co-authored-by: kgala2 Co-authored-by: Yu Liao Co-authored-by: Phil Sung Co-authored-by: Andrew Ferg Co-authored-by: chethangowda89 <110084536+chethangowda89@users.noreply.github.com> Co-authored-by: Chethan Gowda Co-authored-by: Taneli Leppä Co-authored-by: Stephen Lewis (Burrows) Co-authored-by: maayanbeltzer Co-authored-by: Aman Mahendroo <30946991+amanMahendroo@users.noreply.github.com> Co-authored-by: Jaylon McShan Co-authored-by: Riley Karson Co-authored-by: zoeyai-google --- mmv1/products/dialogflowcx/Agent.yaml | 73 +++++++++++++++++++ .../examples/dialogflowcx_agent_full.tf.tmpl | 36 ++++++++- 2 files changed, 108 insertions(+), 1 deletion(-) diff --git a/mmv1/products/dialogflowcx/Agent.yaml b/mmv1/products/dialogflowcx/Agent.yaml index ec0297c70b81..70b8bbd73300 100644 --- a/mmv1/products/dialogflowcx/Agent.yaml +++ b/mmv1/products/dialogflowcx/Agent.yaml @@ -309,3 +309,76 @@ properties: description: | The full name of the Gen App Builder engine related to this agent if there is one. Format: projects/{Project ID}/locations/{Location ID}/collections/{Collection ID}/engines/{Engine ID} + - name: 'startPlaybook' + type: String + ignore_read: true + description: | + Name of the start playbook in this agent. A start playbook will be automatically created when the agent is created, and can only be deleted by deleting the agent. Format: **projects//locations//agents//playbooks/**. Currently only the default playbook with id "00000000-0000-0000-0000-000000000000" is allowed. + conflicts: + - startFlow + - name: 'enableMultiLanguageTraining' + type: Boolean + description: | + Enable training multi-lingual models for this agent. These models will be trained on all the languages supported by the agent. + - name: 'locked' + type: Boolean + description: | + Indicates whether the agent is locked for changes. If the agent is locked, modifications to the agent will be rejected except for [agents.restore][]. + - name: 'satisfiesPzs' + type: Boolean + output: true + description: | + A read only boolean field reflecting Zone Separation status of the agent. + - name: 'satisfiesPzi' + type: Boolean + output: true + description: | + A read only boolean field reflecting Zone Isolation status of the agent. + - name: 'answerFeedbackSettings' + type: NestedObject + ignore_read: true + description: | + Answer feedback collection settings. + properties: + - name: 'enableAnswerFeedback' + type: Boolean + description: | + If enabled, end users will be able to provide [answer feedback](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/projects.locations.agents.sessions/submitAnswerFeedback#body.AnswerFeedback) + to Dialogflow responses. Feature works only if interaction logging is enabled in the Dialogflow agent. + - name: 'personalizationSettings' + type: NestedObject + description: | + Settings for end user personalization. + properties: + - name: 'defaultEndUserMetadata' + type: String + description: | + Default end user metadata, used when processing DetectIntent requests. Recommended to be filled as a template instead of hard-coded value, for example { "age": "$session.params.age" }. + The data will be merged with the [QueryParameters.end_user_metadata](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/QueryParameters#FIELDS.end_user_metadata) + in [DetectIntentRequest.query_params](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/projects.locations.agents.sessions/detectIntent#body.request_body.FIELDS.query_params) during query processing. + + This field uses JSON data as a string. The value provided must be a valid JSON representation documented in [Struct](https://protobuf.dev/reference/protobuf/google.protobuf/#struct). + state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' + custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' + custom_expand: 'templates/terraform/custom_expand/json_schema.tmpl' + validation: + function: 'validation.StringIsJSON' + - name: 'clientCertificateSettings' + type: NestedObject + description: | + Settings for custom client certificates. + properties: + - name: 'sslCertificate' + type: String + required: true + description: | + The ssl certificate encoded in PEM format. This string must include the begin header and end footer lines. + - name: 'privateKey' + type: String + required: true + description: | + The name of the SecretManager secret version resource storing the private key encoded in PEM format. Format: **projects/{project}/secrets/{secret}/versions/{version}** + - name: 'passphrase' + type: String + description: | + The name of the SecretManager secret version resource storing the passphrase. 'passphrase' should be left unset if the private key is not encrypted. Format: **projects/{project}/secrets/{secret}/versions/{version}** diff --git a/mmv1/templates/terraform/examples/dialogflowcx_agent_full.tf.tmpl b/mmv1/templates/terraform/examples/dialogflowcx_agent_full.tf.tmpl index a70263220f7f..ebd9f90743b1 100644 --- a/mmv1/templates/terraform/examples/dialogflowcx_agent_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/dialogflowcx_agent_full.tf.tmpl @@ -68,4 +68,38 @@ resource "google_dialogflow_cx_agent" "{{$.PrimaryResourceId}}" { gen_app_builder_settings { engine = "projects/-/locations/-/collections/-/engines/-" } -} \ No newline at end of file + start_playbook = "projects/-/locations/-/agents/-/playbooks/00000000-0000-0000-0000-000000000000" + enable_multi_language_training = false + locked = false + answer_feedback_settings { + enable_answer_feedback = false + } + client_certificate_settings { + passphrase = "projects/example-proj/secrets/example-secret/versions/example-version" + private_key = "projects/example-proj/secrets/example-secret/versions/example-version" + ssl_certificate = < Date: Fri, 15 Aug 2025 11:42:13 -0500 Subject: [PATCH 783/884] Add datasource for google_certificate_manager_dns_authorization (#14865) --- .../provider/provider_mmv1_resources.go.tmpl | 1 + ...e_certificate_manager_dns_authorization.go | 46 ++++++++++++++++++ ...tificate_manager_dns_authorization_test.go | 48 +++++++++++++++++++ ...te_manager_dns_authorization.html.markdown | 42 ++++++++++++++++ 4 files changed, 137 insertions(+) create mode 100644 mmv1/third_party/terraform/services/certificatemanager/data_source_google_certificate_manager_dns_authorization.go create mode 100644 mmv1/third_party/terraform/services/certificatemanager/data_source_google_certificate_manager_dns_authorization_test.go create mode 100644 mmv1/third_party/terraform/website/docs/d/certificate_manager_dns_authorization.html.markdown diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index df261f00d296..d9c46a255d9b 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -57,6 +57,7 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_bigquery_default_service_account": bigquery.DataSourceGoogleBigqueryDefaultServiceAccount(), "google_certificate_manager_certificates": certificatemanager.DataSourceGoogleCertificateManagerCertificates(), "google_certificate_manager_certificate_map": certificatemanager.DataSourceGoogleCertificateManagerCertificateMap(), + "google_certificate_manager_dns_authorization": certificatemanager.DataSourceGoogleCertificateManagerDnsAuthorization(), "google_cloudbuild_trigger": cloudbuild.DataSourceGoogleCloudBuildTrigger(), "google_cloudfunctions_function": cloudfunctions.DataSourceGoogleCloudFunctionsFunction(), "google_cloudfunctions2_function": cloudfunctions2.DataSourceGoogleCloudFunctions2Function(), diff --git a/mmv1/third_party/terraform/services/certificatemanager/data_source_google_certificate_manager_dns_authorization.go b/mmv1/third_party/terraform/services/certificatemanager/data_source_google_certificate_manager_dns_authorization.go new file mode 100644 index 000000000000..f5c6519fab53 --- /dev/null +++ b/mmv1/third_party/terraform/services/certificatemanager/data_source_google_certificate_manager_dns_authorization.go @@ -0,0 +1,46 @@ +package certificatemanager + +import ( + "fmt" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func DataSourceGoogleCertificateManagerDnsAuthorization() *schema.Resource { + // Generate datasource schema from resource + dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceCertificateManagerDnsAuthorization().Schema) + + // Set 'Required' schema elements + tpgresource.AddRequiredFieldsToSchema(dsSchema, "name", "domain") + // Set 'Optional' schema elements + tpgresource.AddOptionalFieldsToSchema(dsSchema, "project", "location") + + return &schema.Resource{ + Read: dataSourceGoogleCertificateManagerDnsAuthorizationRead, + Schema: dsSchema, + } +} + +func dataSourceGoogleCertificateManagerDnsAuthorizationRead(d *schema.ResourceData, meta interface{}) error { + id, err := tpgresource.ReplaceVars(d, meta.(*transport_tpg.Config), "projects/{{project}}/locations/{{location}}/dnsAuthorizations/{{name}}") + if err != nil { + return fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + + err = resourceCertificateManagerDnsAuthorizationRead(d, meta) + if err != nil { + return err + } + + if err := tpgresource.SetDataSourceLabels(d); err != nil { + return err + } + + if d.Id() == "" { + return fmt.Errorf("%s not found", id) + } + return nil +} diff --git a/mmv1/third_party/terraform/services/certificatemanager/data_source_google_certificate_manager_dns_authorization_test.go b/mmv1/third_party/terraform/services/certificatemanager/data_source_google_certificate_manager_dns_authorization_test.go new file mode 100644 index 000000000000..56f2c3293b14 --- /dev/null +++ b/mmv1/third_party/terraform/services/certificatemanager/data_source_google_certificate_manager_dns_authorization_test.go @@ -0,0 +1,48 @@ +package certificatemanager_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccCertificateManagerDnsAuthorizationDatasource(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccCertificateManagerDnsAuthorizationDatasourceConfig(context), + Check: resource.ComposeTestCheckFunc( + acctest.CheckDataSourceStateMatchesResourceState("data.google_certificate_manager_dns_authorization.default", "google_certificate_manager_dns_authorization.default"), + ), + }, + }, + }) +} + +func testAccCertificateManagerDnsAuthorizationDatasourceConfig(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_certificate_manager_dns_authorization" "default" { + name = "tf-test-dns-auth-%{random_suffix}" + location = "global" + description = "The default dns" + domain = "%{random_suffix}.hashicorptest.com" + +} + +data "google_certificate_manager_dns_authorization" "default" { + name = google_certificate_manager_dns_authorization.default.name + domain = "%{random_suffix}.hashicorptest.com" + location = "global" + +} +`, context) +} diff --git a/mmv1/third_party/terraform/website/docs/d/certificate_manager_dns_authorization.html.markdown b/mmv1/third_party/terraform/website/docs/d/certificate_manager_dns_authorization.html.markdown new file mode 100644 index 000000000000..a0d2b8ecfe2e --- /dev/null +++ b/mmv1/third_party/terraform/website/docs/d/certificate_manager_dns_authorization.html.markdown @@ -0,0 +1,42 @@ +--- +subcategory: "Certificate Manager" +description: |- + Fetches the details of a Certificate Manager DNS Authorization. +--- + +# google_certificate_manager_dns_authorization + +Use this data source to get information about a Certificate Manager DNS Authorization. For more details, see the [API documentation](https://cloud.google.com/certificate-manager/docs/reference/certificate-manager/rest/v1/projects.locations.dnsAuthorizations). + +## Example Usage + +```hcl +data "google_certificate_manager_dns_authorization" "default" { + name = "my-dns-auth" + location = "global" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `name` - + (Required) + The name of the DNS Authorization. + +* `domain` - + (Required) + The name of the DNS Authorization. + +* `location` - + (Optional) + The Certificate Manager location. If not specified, "global" is used. + +* `project` - + (Optional) + The ID of the project in which the resource belongs. If it is not provided, the provider project is used. + +## Attributes Reference + +See [google_certificate_manager_dns_authorization](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/certificate_manager_dns_authorization) resource for details of all the available attributes. From 1fc2464088dc3a0cbc3d9dc84e3b52149da0973f Mon Sep 17 00:00:00 2001 From: Lakshman Swaminathan Date: Fri, 15 Aug 2025 17:52:58 -0400 Subject: [PATCH 784/884] changed teamcity-diff-test message in cli (#14815) --- mmv1/third_party/terraform/scripts/teamcitytestscripts/main.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/scripts/teamcitytestscripts/main.go b/mmv1/third_party/terraform/scripts/teamcitytestscripts/main.go index 344b8905f608..327fca741b2a 100644 --- a/mmv1/third_party/terraform/scripts/teamcitytestscripts/main.go +++ b/mmv1/third_party/terraform/scripts/teamcitytestscripts/main.go @@ -14,7 +14,7 @@ import ( func usage() string { return `Usage: - teamcity-go-test -test [-parallelism n] [-timeout t] + teamcity-diff-test -test [-parallelism n] [-timeout t] Test names must be listed one per line on stdin. ` From 511b656064fe2ed5ebf4f6755714a43eccb8cecd Mon Sep 17 00:00:00 2001 From: Chris Gonterman Date: Mon, 18 Aug 2025 08:56:00 -0700 Subject: [PATCH 785/884] Delete vpc test (#14868) --- .../compute/NetworkPeeringRoutesConfig.yaml | 13 ----- .../network_peering_routes_config_gke.tf.tmpl | 55 ------------------- 2 files changed, 68 deletions(-) delete mode 100644 mmv1/templates/terraform/examples/network_peering_routes_config_gke.tf.tmpl diff --git a/mmv1/products/compute/NetworkPeeringRoutesConfig.yaml b/mmv1/products/compute/NetworkPeeringRoutesConfig.yaml index 515a3b57195c..761096b0c7f3 100644 --- a/mmv1/products/compute/NetworkPeeringRoutesConfig.yaml +++ b/mmv1/products/compute/NetworkPeeringRoutesConfig.yaml @@ -63,19 +63,6 @@ examples: peering_secondary_name: 'secondary-peering' network_primary_name: 'primary-network' network_secondary_name: 'secondary-network' - - name: 'network_peering_routes_config_gke' - primary_resource_id: 'peering_gke_routes' - vars: - network_name: 'container-network' - subnetwork_name: 'container-subnetwork' - gke_cluster_name: 'private-cluster' - deletion_protection: 'true' - test_vars_overrides: - 'deletion_protection': 'false' - oics_vars_overrides: - 'deletion_protection': 'false' - # currently failing - skip_vcr: true parameters: - name: 'network' type: ResourceRef diff --git a/mmv1/templates/terraform/examples/network_peering_routes_config_gke.tf.tmpl b/mmv1/templates/terraform/examples/network_peering_routes_config_gke.tf.tmpl deleted file mode 100644 index 787b79ac44f6..000000000000 --- a/mmv1/templates/terraform/examples/network_peering_routes_config_gke.tf.tmpl +++ /dev/null @@ -1,55 +0,0 @@ -resource "google_compute_network_peering_routes_config" "{{$.PrimaryResourceId}}" { - peering = google_container_cluster.private_cluster.private_cluster_config[0].peering_name - network = google_compute_network.container_network.name - - import_custom_routes = true - export_custom_routes = true - import_subnet_routes_with_public_ip = true - export_subnet_routes_with_public_ip = true -} - -resource "google_compute_network" "container_network" { - name = "{{index $.Vars "network_name"}}" - auto_create_subnetworks = false -} - -resource "google_compute_subnetwork" "container_subnetwork" { - name = "{{index $.Vars "subnetwork_name"}}" - region = "us-central1" - network = google_compute_network.container_network.name - ip_cidr_range = "10.0.36.0/24" - private_ip_google_access = true - - secondary_ip_range { - range_name = "pod" - ip_cidr_range = "10.0.0.0/19" - } - - secondary_ip_range { - range_name = "svc" - ip_cidr_range = "10.0.32.0/22" - } -} - -resource "google_container_cluster" "private_cluster" { - name = "{{index $.Vars "gke_cluster_name"}}" - location = "us-central1-a" - initial_node_count = 1 - - network = google_compute_network.container_network.name - subnetwork = google_compute_subnetwork.container_subnetwork.name - - private_cluster_config { - enable_private_endpoint = true - enable_private_nodes = true - master_ipv4_cidr_block = "10.42.0.0/28" - } - - master_authorized_networks_config {} - - ip_allocation_policy { - cluster_secondary_range_name = google_compute_subnetwork.container_subnetwork.secondary_ip_range[0].range_name - services_secondary_range_name = google_compute_subnetwork.container_subnetwork.secondary_ip_range[1].range_name - } - deletion_protection = {{index $.Vars "deletion_protection"}} -} From 204fc9a11bd6cf606eb9ae4f5121a4a6bb93426a Mon Sep 17 00:00:00 2001 From: Lakshman Swaminathan Date: Mon, 18 Aug 2025 12:21:59 -0400 Subject: [PATCH 786/884] prelim teamcity config changes (#14764) --- .../builds/build_configuration_per_package.kt | 14 +++-- .../components/builds/build_steps.kt | 52 +++++++++++++++++++ 2 files changed, 61 insertions(+), 5 deletions(-) diff --git a/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_per_package.kt b/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_per_package.kt index f652b2dabcc9..8346d819818a 100644 --- a/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_per_package.kt +++ b/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_per_package.kt @@ -29,7 +29,7 @@ fun BuildConfigurationsForPackages(packages: Map>, p val displayName: String = info.getValue("displayName").toString() val pkg = PackageDetails(packageName, displayName, providerName, parentProjectName, releaseDiffTest) - val buildConfig = pkg.buildConfiguration(path, vcsRoot, sharedResources, environmentVariables, testPrefix = testPrefix) + val buildConfig = pkg.buildConfiguration(path, vcsRoot, sharedResources, environmentVariables, testPrefix = testPrefix, releaseDiffTest = releaseDiffTest) list.add(buildConfig) } @@ -39,15 +39,15 @@ fun BuildConfigurationsForPackages(packages: Map>, p // BuildConfigurationForSinglePackage accepts details of a single package in a provider and returns a build configuration for it // Intended to be used in short-lived projects where we're testing specific packages, e.g. feature branch testing fun BuildConfigurationForSinglePackage(packageName: String, packagePath: String, packageDisplayName: String, providerName: String, parentProjectName: String, vcsRoot: GitVcsRoot, sharedResources: List, environmentVariables: AccTestConfiguration, testPrefix: String = "TestAcc", releaseDiffTest: String = "false"): BuildType{ - val pkg = PackageDetails(packageName, packageDisplayName, providerName, parentProjectName, releaseDiffTest) - return pkg.buildConfiguration(packagePath, vcsRoot, sharedResources, environmentVariables, testPrefix = testPrefix) + val pkg = PackageDetails(packageName, packageDisplayName, providerName, parentProjectName, releaseDiffTest = releaseDiffTest) + return pkg.buildConfiguration(packagePath, vcsRoot, sharedResources, environmentVariables, testPrefix = testPrefix, releaseDiffTest = releaseDiffTest) } class PackageDetails(private val packageName: String, private val displayName: String, private val providerName: String, private val parentProjectName: String, private val releaseDiffTest: String) { // buildConfiguration returns a BuildType for a service package // For BuildType docs, see https://teamcity.jetbrains.com/app/dsl-documentation/root/build-type/index.html - fun buildConfiguration(path: String, vcsRoot: GitVcsRoot, sharedResources: List, environmentVariables: AccTestConfiguration, buildTimeout: Int = DefaultBuildTimeoutDuration, testPrefix: String): BuildType { + fun buildConfiguration(path: String, vcsRoot: GitVcsRoot, sharedResources: List, environmentVariables: AccTestConfiguration, buildTimeout: Int = DefaultBuildTimeoutDuration, testPrefix: String, releaseDiffTest: String): BuildType { val testPrefix = "TestAcc" val testTimeout = "12" @@ -72,7 +72,11 @@ class PackageDetails(private val packageName: String, private val displayName: S tagBuildToIndicateTriggerMethod() configureGoEnv() downloadTerraformBinary() - runAcceptanceTests() + if (releaseDiffTest.toBoolean()) { + runDiffTests() + } else { + runAcceptanceTests() + } saveArtifactsToGCS() archiveArtifactsIfOverLimit() // Must be after push to GCS step, as this step impacts debug log files } diff --git a/mmv1/third_party/terraform/.teamcity/components/builds/build_steps.kt b/mmv1/third_party/terraform/.teamcity/components/builds/build_steps.kt index c615d74128fa..9216b59ab694 100644 --- a/mmv1/third_party/terraform/.teamcity/components/builds/build_steps.kt +++ b/mmv1/third_party/terraform/.teamcity/components/builds/build_steps.kt @@ -226,3 +226,55 @@ fun BuildSteps.archiveArtifactsIfOverLimit() { // https://youtrack.jetbrains.com/issue/KT-2425/Provide-a-way-for-escaping-the-dollar-sign-symbol-in-multiline-strings-and-string-templates }) } + +fun BuildSteps.runDiffTests() { + if (UseTeamCityGoTest) { + step(ScriptBuildStep { + name = "Run Diff Tests" + scriptContent = "go test -v \"%PACKAGE_PATH%\" -timeout=\"%TIMEOUT%h\" -test.parallel=\"%PARALLELISM%\" -run=\"%TEST_PREFIX%\" -json" + }) + } else { + step(ScriptBuildStep { + name = "Compile Test Binary" + workingDir = "%PACKAGE_PATH%" + scriptContent = """ + #!/bin/bash + export TEST_FILE_COUNT=$(ls ./*_test.go | wc -l) + if test ${'$'}TEST_FILE_COUNT -gt "0"; then + echo "Compiling test binary" + go test -c -o test-binary + else + echo "Skipping compilation of test binary; no Go test files found" + fi + """.trimIndent() + }) + + step(ScriptBuildStep { + name = "Run via scripts/teamcitytestscripts/teamcity-diff-test" + workingDir = "%PACKAGE_PATH%" + scriptContent = """ + #!/bin/bash + if ! test -f "./test-binary"; then + echo "Skipping test execution; file ./test-binary does not exist." + exit 0 + fi + + echo "Compiling teamcity-diff-test..." + pushd ../../../scripts/teamcitytestscripts > /dev/null + go build -o ../../teamcity-diff-test . + popd > /dev/null + + + export TEST_COUNT=${'$'}(./test-binary -test.list="%TEST_PREFIX%" | wc -l) + echo "Found ${'$'}{TEST_COUNT} tests that match the given test prefix %TEST_PREFIX%" + if test ${'$'}TEST_COUNT -le "0"; then + echo "Skipping test execution; no tests to run" + exit 0 + fi + + echo "Starting tests" + ./test-binary -test.list="%TEST_PREFIX%" | ../../../teamcity-diff-test -test ./test-binary -parallelism "%PARALLELISM%" -timeout "%TIMEOUT%h" + """.trimIndent() + }) + } +} \ No newline at end of file From dd477acd6220d0907626e6ea98a6385d5219da4c Mon Sep 17 00:00:00 2001 From: Naitian Liu <83430653+naitianliu-google@users.noreply.github.com> Date: Mon, 18 Aug 2025 10:08:40 -0700 Subject: [PATCH 787/884] Set default_from_api for image field in VmwareNodePool (#14842) --- mmv1/products/gkeonprem/VmwareNodePool.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/mmv1/products/gkeonprem/VmwareNodePool.yaml b/mmv1/products/gkeonprem/VmwareNodePool.yaml index 05f84052babd..5936de1cca32 100644 --- a/mmv1/products/gkeonprem/VmwareNodePool.yaml +++ b/mmv1/products/gkeonprem/VmwareNodePool.yaml @@ -127,6 +127,7 @@ properties: - name: 'image' type: String description: The OS image name in vCenter, only valid when using Windows. + default_from_api: true - name: 'bootDiskSizeGb' type: Integer description: VMware disk size to be used during creation. From d8184ae454df3ee19d196f4c098fa0ee3f85b512 Mon Sep 17 00:00:00 2001 From: olagacek Date: Mon, 18 Aug 2025 19:58:14 +0200 Subject: [PATCH 788/884] Add support for default compute class on GKE (#14810) --- .../resource_container_cluster.go.tmpl | 15 ++++ .../resource_container_cluster_test.go.tmpl | 70 +++++++++++++++++++ .../docs/r/container_cluster.html.markdown | 2 + 3 files changed, 87 insertions(+) diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl index 7abd84c1a4d8..4c053deb9a46 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl @@ -920,6 +920,11 @@ func ResourceContainerCluster() *schema.Resource { ValidateFunc: validation.StringInSlice([]string{"BALANCED", "OPTIMIZE_UTILIZATION"}, false), Description: `Configuration options for the Autoscaling profile feature, which lets you choose whether the cluster autoscaler should optimize for resource utilization or resource availability when deciding to remove nodes from a cluster. Can be BALANCED or OPTIMIZE_UTILIZATION. Defaults to BALANCED.`, }, + "default_compute_class_enabled": { + Type: schema.TypeBool, + Optional: true, + Description: `Specifies whether default compute class behaviour is enabled. If enabled, cluster autoscaler will use Compute Class with name default for all the workloads, if not overriden.`, + }, }, }, }, @@ -5699,9 +5704,16 @@ func expandClusterAutoscaling(configured interface{}, d *schema.ResourceData) *c } } } + var defaultCCConfig *container.DefaultComputeClassConfig + if defaultCCEnabled, ok := config["default_compute_class_enabled"]; ok { + defaultCCConfig = &container.DefaultComputeClassConfig{ + Enabled: defaultCCEnabled.(bool), + } + } return &container.ClusterAutoscaling{ EnableNodeAutoprovisioning: config["enabled"].(bool), ResourceLimits: resourceLimits, + DefaultComputeClassConfig: defaultCCConfig, AutoscalingProfile: config["autoscaling_profile"].(string), AutoprovisioningNodePoolDefaults: expandAutoProvisioningDefaults(config["auto_provisioning_defaults"], d), AutoprovisioningLocations: tpgresource.ConvertStringArr(config["auto_provisioning_locations"].([]interface{})), @@ -7325,6 +7337,9 @@ func flattenClusterAutoscaling(a *container.ClusterAutoscaling) []map[string]int r["enabled"] = false } r["autoscaling_profile"] = a.AutoscalingProfile + if a.DefaultComputeClassConfig != nil { + r["default_compute_class_enabled"] = a.DefaultComputeClassConfig.Enabled + } return []map[string]interface{}{r} } diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl index c02fbad06340..77e8246a06e8 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl @@ -3772,6 +3772,76 @@ func TestAccContainerCluster_nodeAutoprovisioningNetworkTags(t *testing.T) { }) } +func TestAccContainerCluster_withDefaultComputeClassEnabled(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) + networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") + subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withDefaultComputeClassEnabled(clusterName, networkName, subnetworkName, true), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.primary", "cluster_autoscaling.0.default_compute_class_enabled", "true"), + ), + }, + { + ResourceName: "google_container_cluster.primary", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + { + Config: testAccContainerCluster_withDefaultComputeClassEnabled(clusterName, networkName, subnetworkName, false), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.primary", "cluster_autoscaling.0.default_compute_class_enabled", "false"), + ), + }, + { + ResourceName: "google_container_cluster.primary", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + }, + }) +} + +func testAccContainerCluster_withDefaultComputeClassEnabled(clusterName, networkName, subnetworkName string, enabled bool) string { + return fmt.Sprintf(` +resource "google_container_cluster" "primary" { + name = "%s" + location = "us-central1-a" + initial_node_count = 1 + network = "%s" + subnetwork = "%s" + deletion_protection = false + + cluster_autoscaling { + enabled = true + default_compute_class_enabled = %t + resource_limits { + resource_type = "cpu" + minimum = 1 + maximum = 10 + } + resource_limits { + resource_type = "memory" + minimum = 10 + maximum = 100 + } + } +} +`, clusterName, networkName, subnetworkName, enabled) +} + + + func TestAccContainerCluster_withShieldedNodes(t *testing.T) { t.Parallel() diff --git a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown index c45437ea5f02..4bd90c16e424 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown @@ -603,6 +603,8 @@ options for the [Autoscaling profile](https://cloud.google.com/kubernetes-engine feature, which lets you choose whether the cluster autoscaler should optimize for resource utilization or resource availability when deciding to remove nodes from a cluster. Can be `BALANCED` or `OPTIMIZE_UTILIZATION`. Defaults to `BALANCED`. +* `default_compute_class_enabled` - (Optional) Specifies whether default compute class behaviour is enabled. If enabled, cluster autoscaler will use Compute Class with name default for all the workloads, if not overriden. + The `resource_limits` block supports: * `resource_type` - (Required) The type of the resource. For example, `cpu` and From 4cdc5e1eeb0c336124b06229fce09b372d4b4ef4 Mon Sep 17 00:00:00 2001 From: Yan Date: Mon, 18 Aug 2025 12:05:43 -0700 Subject: [PATCH 789/884] Add support for "connection_properties" for bigquery job resource (#14797) --- mmv1/products/bigquery/Job.yaml | 24 +++++++++++++++++++ .../bigquery_job_query_continuous.tf.tmpl | 7 +++++- 2 files changed, 30 insertions(+), 1 deletion(-) diff --git a/mmv1/products/bigquery/Job.yaml b/mmv1/products/bigquery/Job.yaml index 7f6cb7b330f1..3894e8155bfc 100644 --- a/mmv1/products/bigquery/Job.yaml +++ b/mmv1/products/bigquery/Job.yaml @@ -406,6 +406,30 @@ properties: description: | Whether to run the query as continuous or a regular query. min_version: beta + - name: 'connectionProperties' + type: Array + description: | + Connection properties to customize query behavior. Under JDBC, these correspond + directly to connection properties passed to the DriverManager. Under ODBC, these + correspond to properties in the connection string. + item_type: + type: NestedObject + properties: + - name: 'key' + type: String + description: | + The key of the property to set. Currently supported connection properties: + * `dataset_project_id`: represents the default project for datasets that are used in the query + * `time_zone`: represents the default timezone used to run the query + * `session_id`: associates the query with a given session + * `query_label`: associates the query with a given job label + * `service_account`: indicates the service account to use to run a continuous query + required: true + - name: 'value' + type: String + description: | + The value of the property to set. + required: true - name: 'load' type: NestedObject description: 'Configures a load job.' diff --git a/mmv1/templates/terraform/examples/bigquery_job_query_continuous.tf.tmpl b/mmv1/templates/terraform/examples/bigquery_job_query_continuous.tf.tmpl index 643bd49b9361..12783b62a96d 100644 --- a/mmv1/templates/terraform/examples/bigquery_job_query_continuous.tf.tmpl +++ b/mmv1/templates/terraform/examples/bigquery_job_query_continuous.tf.tmpl @@ -6,5 +6,10 @@ resource "google_bigquery_job" "{{$.PrimaryResourceId}}" { query { query = "SELECT state FROM [lookerdata:cdc.project_tycho_reports]" continuous = true + + connection_properties { + key = "service_account" + value = "bq-runner@project-query-continuous.iam.gserviceaccount.com" + } } -} \ No newline at end of file +} From eb5829d4ac9a662385c039d053442415a595a038 Mon Sep 17 00:00:00 2001 From: g-dreva Date: Mon, 18 Aug 2025 21:47:26 +0000 Subject: [PATCH 790/884] Add SaaS Runtime product and Saas resource (#14671) --- mmv1/products/saasservicemgmt/Saas.yaml | 109 ++++++++++++++++++ mmv1/products/saasservicemgmt/product.yaml | 23 ++++ .../examples/saas_runtime_saas_basic.tf.tmpl | 12 ++ .../components/inputs/services_beta.kt | 5 + .../components/inputs/services_ga.kt | 5 + .../resource_saas_runtime_saas_test.go.tmpl | 99 ++++++++++++++++ 6 files changed, 253 insertions(+) create mode 100644 mmv1/products/saasservicemgmt/Saas.yaml create mode 100644 mmv1/products/saasservicemgmt/product.yaml create mode 100644 mmv1/templates/terraform/examples/saas_runtime_saas_basic.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/saasruntime/resource_saas_runtime_saas_test.go.tmpl diff --git a/mmv1/products/saasservicemgmt/Saas.yaml b/mmv1/products/saasservicemgmt/Saas.yaml new file mode 100644 index 000000000000..41ec4ec19b9e --- /dev/null +++ b/mmv1/products/saasservicemgmt/Saas.yaml @@ -0,0 +1,109 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: Saas +description: A Saas resource is the top-level representation of a SaaS service managed by a producer. It contains a list of locations where the service is available, which is used by the Rollout system to generate a rollout plan. +base_url: projects/{{project}}/locations/{{location}}/saas +update_mask: true +self_link: projects/{{project}}/locations/{{location}}/saas/{{saas_id}} +create_url: projects/{{project}}/locations/{{location}}/saas?saasId={{saas_id}} +update_verb: PATCH +id_format: projects/{{project}}/locations/{{location}}/saas/{{saas_id}} +import_format: + - projects/{{project}}/locations/{{location}}/saas/{{saas_id}} +min_version: beta +examples: + - name: saas_runtime_saas_basic + primary_resource_id: "example" + min_version: 'beta' + vars: + saas_name: test-saas + bootstrap_iam: + - member: "serviceAccount:service-{project_number}@gcp-sa-saasservicemgmt.iam.gserviceaccount.com" + role: "roles/saasservicemgmt.serviceAgent" +autogen_async: false +autogen_status: U2Fhcw== +parameters: + - name: location + type: String + description: Resource ID segment making up resource `name`. It identifies the resource within its parent collection as described in https://google.aip.dev/122. + immutable: true + url_param_only: true + required: true + - name: saasId + type: String + description: The ID value for the new saas. + immutable: true + url_param_only: true + required: true +properties: + - name: annotations + type: KeyValueAnnotations + description: |- + Annotations is an unstructured key-value map stored with a resource that + may be set by external tools to store and retrieve arbitrary metadata. + They are not queryable and should be preserved when modifying objects. + + More info: https://kubernetes.io/docs/user-guide/annotations + - name: createTime + type: String + description: The timestamp when the resource was created. + output: true + - name: etag + type: String + description: |- + An opaque value that uniquely identifies a version or + generation of a resource. It can be used to confirm that the client + and server agree on the ordering of a resource being written. + output: true + - name: labels + type: KeyValueLabels + description: |- + The labels on the resource, which can be used for categorization. + similar to Kubernetes resource labels. + - name: locations + type: Array + description: |- + List of locations that the service is available in. Rollout refers to the + list to generate a rollout plan. + item_type: + type: NestedObject + properties: + - name: name + type: String + description: Name of location. + - name: name + type: String + description: |- + Identifier. The resource name (full URI of the resource) following the standard naming + scheme: + + "projects/{project}/locations/{location}/saas/{saas}" + output: true + - name: uid + type: String + description: |- + The unique identifier of the resource. UID is unique in the time + and space for this resource within the scope of the service. It is + typically generated by the server on successful creation of a resource + and must not be changed. UID is used to uniquely identify resources + with resource name reuses. This should be a UUID4. + output: true + - name: updateTime + type: String + description: |- + The timestamp when the resource was last updated. Any + change to the resource made by users must refresh this value. + Changes to a resource made by the service should refresh this value. + output: true diff --git a/mmv1/products/saasservicemgmt/product.yaml b/mmv1/products/saasservicemgmt/product.yaml new file mode 100644 index 000000000000..dfd3e75f78f2 --- /dev/null +++ b/mmv1/products/saasservicemgmt/product.yaml @@ -0,0 +1,23 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: SaasRuntime +display_name: SaaS Runtime +scopes: + - https://www.googleapis.com/auth/cloud-platform +versions: + - name: "beta" + base_url: https://saasservicemgmt.googleapis.com/v1beta1/ +caibaseurl: "" +resourceswithcaiassettype: {} diff --git a/mmv1/templates/terraform/examples/saas_runtime_saas_basic.tf.tmpl b/mmv1/templates/terraform/examples/saas_runtime_saas_basic.tf.tmpl new file mode 100644 index 000000000000..8993684b993e --- /dev/null +++ b/mmv1/templates/terraform/examples/saas_runtime_saas_basic.tf.tmpl @@ -0,0 +1,12 @@ +resource "google_saas_runtime_saas" "{{$.PrimaryResourceId}}" { + provider = google-beta + saas_id = "{{index $.Vars "saas_name"}}" + location = "global" + + locations { + name = "us-central1" + } + locations { + name = "europe-west1" + } +} diff --git a/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt b/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt index fac5c46661cf..5c2b40ad838b 100644 --- a/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt +++ b/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt @@ -696,6 +696,11 @@ var ServicesListBeta = mapOf( "displayName" to "Runtimeconfig", "path" to "./google-beta/services/runtimeconfig" ), + "saasruntime" to mapOf( + "name" to "saasruntime", + "displayName" to "SaaS Runtime", + "path" to "./google-beta/services/saasruntime" + ), "secretmanager" to mapOf( "name" to "secretmanager", "displayName" to "Secretmanager", diff --git a/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt b/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt index 175f9e8a5d15..9b3f94e065da 100644 --- a/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt +++ b/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt @@ -691,6 +691,11 @@ var ServicesListGa = mapOf( "displayName" to "Runtimeconfig", "path" to "./google/services/runtimeconfig" ), + "saasruntime" to mapOf( + "name" to "saasruntime", + "displayName" to "SaaS Runtime", + "path" to "./google/services/saasruntime" + ), "secretmanager" to mapOf( "name" to "secretmanager", "displayName" to "Secretmanager", diff --git a/mmv1/third_party/terraform/services/saasruntime/resource_saas_runtime_saas_test.go.tmpl b/mmv1/third_party/terraform/services/saasruntime/resource_saas_runtime_saas_test.go.tmpl new file mode 100644 index 000000000000..1d9c30460336 --- /dev/null +++ b/mmv1/third_party/terraform/services/saasruntime/resource_saas_runtime_saas_test.go.tmpl @@ -0,0 +1,99 @@ +package saasruntime_test + +{{ if ne $.TargetVersionName `ga` -}} + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccSaasRuntimeSaas_update(t *testing.T) { + t.Parallel() + acctest.BootstrapIamMembers(t, []acctest.IamMember{ + { + Member: "serviceAccount:service-{project_number}@gcp-sa-saasservicemgmt.iam.gserviceaccount.com", + Role: "roles/saasservicemgmt.serviceAgent", + }, + }) + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccSaasRuntimeSaas_basic(context), + }, + { + ResourceName: "google_saas_runtime_saas.example", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "saas_id", "terraform_labels"}, + }, + { + Config: testAccSaasRuntimeSaas_update(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_saas_runtime_saas.example", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_saas_runtime_saas.example", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "saas_id", "terraform_labels"}, + }, + }, + }) +} + +func testAccSaasRuntimeSaas_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_saas_runtime_saas" "example" { + provider = google-beta + saas_id = "tf-test-test-saas%{random_suffix}" + location = "global" + + locations { + name = "us-central1" + } + locations { + name = "europe-west1" + } +} +`, context) +} + +func testAccSaasRuntimeSaas_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_saas_runtime_saas" "example" { + provider = google-beta + saas_id = "tf-test-test-saas%{random_suffix}" + location = "global" + locations { + name = "us-central1" + } + locations { + name = "europe-west1" + } + locations { + name = "us-east1" + } + labels = { + "label-one": "foo" + } + annotations = { + "annotation-one": "bar" + } +} +`, context) +} +{{- end }} From 68867f0585b636fc1b5492c35d40ae6c02c71025 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Mon, 18 Aug 2025 16:15:18 -0700 Subject: [PATCH 791/884] Made issue labeler resilient to header level changes (#14881) --- tools/issue-labeler/labeler/labels.go | 2 +- tools/issue-labeler/labeler/labels_test.go | 34 +++++++++++++++++----- 2 files changed, 28 insertions(+), 8 deletions(-) diff --git a/tools/issue-labeler/labeler/labels.go b/tools/issue-labeler/labeler/labels.go index 3ac523afdb99..e10f9f3775b7 100644 --- a/tools/issue-labeler/labeler/labels.go +++ b/tools/issue-labeler/labeler/labels.go @@ -14,7 +14,7 @@ import ( "gopkg.in/yaml.v2" ) -var sectionRegexp = regexp.MustCompile(`### (New or )?Affected Resource\(s\)[^#]+`) +var sectionRegexp = regexp.MustCompile(`#+ (New or )?Affected Resource\(s\)[^#]+`) var commentRegexp = regexp.MustCompile(``) var resourceRegexp = regexp.MustCompile(`google_[\w*.]+`) diff --git a/tools/issue-labeler/labeler/labels_test.go b/tools/issue-labeler/labeler/labels_test.go index 278f1cacab5b..612193dbdba9 100644 --- a/tools/issue-labeler/labeler/labels_test.go +++ b/tools/issue-labeler/labeler/labels_test.go @@ -10,31 +10,51 @@ import ( ) func TestExtractAffectedResources(t *testing.T) { - cases := map[string]struct { + cases := []struct { + name string body string expectedResources []string }{ - "2023 bug": { + { + name: "2023 bug", body: "\r\n\r\n### Community Note\r\n\r\n* Please vote on this issue by adding a 👍 [reaction](https://blog.github.com/2016-03-10-add-reactions-to-pull-requests-issues-and-comments/) to the original issue to help the community and maintainers prioritize this request.\r\n* Please do not leave _+1_ or _me too_ comments, they generate extra noise for issue followers and do not help prioritize the request.\r\n* If you are interested in working on this issue or have submitted a pull request, please leave a comment.\r\n* If an issue is assigned to the `modular-magician` user, it is either in the process of being autogenerated, or is planned to be autogenerated soon. If an issue is assigned to a user, that user is claiming responsibility for the issue. If an issue is assigned to `hashibot`, a community member has claimed the issue already.\r\n\r\n\r\n\r\n### Terraform Version\r\n\r\n\r\nTerraform v1.3.7\r\non linux_amd64\r\nprovider registry.terraform.io/hashicorp/google v4.48.0\r\n\r\n### Affected Resource(s)\r\n\r\n\r\n\r\n* google_container_node_pool\r\n* google_container_cluster\r\n\r\n### Terraform Configuration Files\r\n\r\n\r\n\r\n```tf\r\nnode_config {\r\n tags = null\r\n}\r\n```\r\n### Expected Behavior\r\n\r\nIn above code, if there already exists a list of tags defined in the `node_config` block then I would expect TF to ignore this tags field and leave them as they are\r\n\r\n### Actual Behavior\r\n\r\nTF sets the tags to an empty list, [], thus removing existing tags\r\n\r\n### Steps to Reproduce\r\n\r\n1. Create google nodepool TF code with node_config block and set `tags` within the block to a list of strings, i.e. `tags=[ \"one\", \"two\" ]`\r\n2. Terraform apply to create the nodepool with this node config\r\n3. Now update code to say `tags=null`\r\n4. Terraform apply and see the tags removed rather than ignored\r\n", expectedResources: []string{"google_container_node_pool", "google_container_cluster"}, }, - "2023 enhancement": { + { + name: "2023 enhancement", body: "\r\n\r\n### Community Note\r\n\r\n* Please vote on this issue by adding a 👍 [reaction](https://blog.github.com/2016-03-10-add-reactions-to-pull-requests-issues-and-comments/) to the original issue to help the community and maintainers prioritize this request\r\n* Please do not leave \"+1\" or \"me too\" comments, they generate extra noise for issue followers and do not help prioritize the request\r\n* If you are interested in working on this issue or have submitted a pull request, please leave a comment. If the issue is assigned to the \"modular-magician\" user, it is either in the process of being autogenerated, or is planned to be autogenerated soon. If the issue is assigned to a user, that user is claiming responsibility for the issue. If the issue is assigned to \"hashibot\", a community member has claimed the issue already.\r\n\r\n\r\n\r\n### Description\r\n\r\n\r\n\r\nSupport for creating mute configs in SCC:\r\nhttps://cloud.google.com/security-command-center/docs/reference/rest/v1/organizations.muteConfigs/create\r\n\r\n### New or Affected Resource(s)\r\n\r\n\r\n\r\n* google_scc_mute_config\r\n\r\n### Potential Terraform Configuration\r\n\r\n\r\n\r\n```tf\r\nresource \"google_scc_mute_config\" \"my_config\" {\r\n config_id = \"my-config\"\r\n organisation = \"12345678\"\r\n description = \"My Awesome Mute Config\"\r\n filter = \"severity=LOW\"\r\n}\r\n```\r\n\r\nCurious as to why the current notification config is only supported at the org level? Even though the parent config can exist at folder or project level? (Same applies here)\r\n\r\n### References\r\n\r\n\r\n\r\n* #0000\r\n\r\n\r\n", expectedResources: []string{"google_scc_mute_config"}, }, - "google_* comment ignored": { + { + name: "google_* comment ignored", body: "\r\n\r\n### Community Note\r\n\r\n* Please vote on this issue by adding a 👍 [reaction](https://blog.github.com/2016-03-10-add-reactions-to-pull-requests-issues-and-comments/) to the original issue to help the community and maintainers prioritize this request\r\n* Please do not leave \"+1\" or \"me too\" comments, they generate extra noise for issue followers and do not help prioritize the request\r\n* If you are interested in working on this issue or have submitted a pull request, please leave a comment. If the issue is assigned to the \"modular-magician\" user, it is either in the process of being autogenerated, or is planned to be autogenerated soon. If the issue is assigned to a user, that user is claiming responsibility for the issue. If the issue is assigned to \"hashibot\", a community member has claimed the issue already.\r\n\r\n\r\n\r\n### Description\r\n\r\n\r\n\r\nSupport for creating mute configs in SCC:\r\nhttps://cloud.google.com/security-command-center/docs/reference/rest/v1/organizations.muteConfigs/create\r\n\r\n### New or Affected Resource(s)\r\n\r\n\r\n\r\n* google_scc_mute_config\r\n\r\n### Potential Terraform Configuration\r\n\r\n\r\n\r\n```tf\r\nresource \"google_scc_mute_config\" \"my_config\" {\r\n config_id = \"my-config\"\r\n organisation = \"12345678\"\r\n description = \"My Awesome Mute Config\"\r\n filter = \"severity=LOW\"\r\n}\r\n```\r\n\r\nCurious as to why the current notification config is only supported at the org level? Even though the parent config can exist at folder or project level? (Same applies here)\r\n\r\n### References\r\n\r\n\r\n\r\n* #0000\r\n\r\n\r\n", expectedResources: []string{"google_scc_mute_config"}, }, - "no resources returns empty slice": { + { + name: "no resources returns empty slice", body: "### New or Affected Resource(s):\r\n#", expectedResources: []string{}, }, + { + name: "h1", + body: "\n# New or Affected Resource(s):\r\ngoogle_scc_mute_config", + expectedResources: []string{"google_scc_mute_config"}, + }, + { + name: "h2", + body: "\n## New or Affected Resource(s):\r\ngoogle_scc_mute_config", + expectedResources: []string{"google_scc_mute_config"}, + }, + { + name: "h4", + body: "\n#### New or Affected Resource(s):\r\ngoogle_scc_mute_config", + expectedResources: []string{"google_scc_mute_config"}, + }, } - for tn, tc := range cases { + for _, tc := range cases { tc := tc - t.Run(tn, func(t *testing.T) { + t.Run(tc.name, func(t *testing.T) { t.Parallel() resources := ExtractAffectedResources(tc.body) if !slices.Equal(resources, tc.expectedResources) { From e47b168162940606faee58ea99b896b04d817e9c Mon Sep 17 00:00:00 2001 From: Weston Haught Date: Mon, 18 Aug 2025 17:06:28 -0700 Subject: [PATCH 792/884] Add max instance count field to Cloud Run Service (#14724) --- mmv1/products/cloudrunv2/Service.yaml | 4 ++ .../examples/cloudrunv2_service_basic.tf.tmpl | 4 ++ .../examples/cloudrunv2_service_gpu.tf.tmpl | 7 ++-- .../examples/cloudrunv2_service_sql.tf.tmpl | 10 ++--- ...resource_cloud_run_v2_service_test.go.tmpl | 37 +++++++++---------- 5 files changed, 35 insertions(+), 27 deletions(-) diff --git a/mmv1/products/cloudrunv2/Service.yaml b/mmv1/products/cloudrunv2/Service.yaml index 237903dc5544..1806b23b986b 100644 --- a/mmv1/products/cloudrunv2/Service.yaml +++ b/mmv1/products/cloudrunv2/Service.yaml @@ -340,6 +340,10 @@ properties: type: Integer description: | Minimum number of instances for the service, to be divided among all revisions receiving traffic. + - name: 'maxInstanceCount' + type: Integer + description: | + Combined maximum number of instances for all revisions receiving traffic. - name: 'scalingMode' type: Enum description: | diff --git a/mmv1/templates/terraform/examples/cloudrunv2_service_basic.tf.tmpl b/mmv1/templates/terraform/examples/cloudrunv2_service_basic.tf.tmpl index 2949efc82151..52e59fb72481 100644 --- a/mmv1/templates/terraform/examples/cloudrunv2_service_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/cloudrunv2_service_basic.tf.tmpl @@ -3,6 +3,10 @@ resource "google_cloud_run_v2_service" "{{$.PrimaryResourceId}}" { location = "us-central1" deletion_protection = false ingress = "INGRESS_TRAFFIC_ALL" + + scaling { + max_instance_count = 100 + } template { containers { diff --git a/mmv1/templates/terraform/examples/cloudrunv2_service_gpu.tf.tmpl b/mmv1/templates/terraform/examples/cloudrunv2_service_gpu.tf.tmpl index 512fbfaef947..871fdf872b96 100644 --- a/mmv1/templates/terraform/examples/cloudrunv2_service_gpu.tf.tmpl +++ b/mmv1/templates/terraform/examples/cloudrunv2_service_gpu.tf.tmpl @@ -4,6 +4,10 @@ resource "google_cloud_run_v2_service" "{{$.PrimaryResourceId}}" { deletion_protection = false ingress = "INGRESS_TRAFFIC_ALL" + scaling { + max_instance_count = 1 + } + template { containers { image = "us-docker.pkg.dev/cloudrun/container/hello" @@ -20,8 +24,5 @@ resource "google_cloud_run_v2_service" "{{$.PrimaryResourceId}}" { accelerator = "nvidia-l4" } gpu_zonal_redundancy_disabled = true - scaling { - max_instance_count = 1 - } } } diff --git a/mmv1/templates/terraform/examples/cloudrunv2_service_sql.tf.tmpl b/mmv1/templates/terraform/examples/cloudrunv2_service_sql.tf.tmpl index b22f049b9298..77b34479795d 100644 --- a/mmv1/templates/terraform/examples/cloudrunv2_service_sql.tf.tmpl +++ b/mmv1/templates/terraform/examples/cloudrunv2_service_sql.tf.tmpl @@ -4,11 +4,11 @@ resource "google_cloud_run_v2_service" "{{$.PrimaryResourceId}}" { deletion_protection = false ingress = "INGRESS_TRAFFIC_ALL" - template { - scaling { - max_instance_count = 2 - } - + scaling { + max_instance_count = 2 + } + + template { volumes { name = "cloudsql" cloud_sql_instance { diff --git a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl index 637a63830ff6..1336a242d070 100644 --- a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl @@ -63,6 +63,10 @@ resource "google_cloud_run_v2_service" "default" { } client = "client-1" client_version = "client-version-1" + scaling { + min_instance_count = 1 + max_instance_count = 3 + } template { labels = { label-1 = "value-1" @@ -70,10 +74,6 @@ resource "google_cloud_run_v2_service" "default" { timeout = "300s" service_account = google_service_account.service_account.email execution_environment = "EXECUTION_ENVIRONMENT_GEN2" - scaling { - max_instance_count = 3 - min_instance_count = 1 - } annotations = { generated-by = "magic-modules" } @@ -131,7 +131,10 @@ resource "google_cloud_run_v2_service" "default" { } client = "client-update" client_version = "client-version-update" - + scaling { + min_instance_count = 1 + max_instance_count = 2 + } template { labels = { label-1 = "value-update" @@ -139,10 +142,6 @@ resource "google_cloud_run_v2_service" "default" { timeout = "500s" service_account = google_service_account.service_account.email execution_environment = "EXECUTION_ENVIRONMENT_GEN1" - scaling { - max_instance_count = 2 - min_instance_count = 1 - } annotations = { generated-by = "magic-modules" } @@ -253,6 +252,10 @@ resource "google_cloud_run_v2_service" "default" { } client = "client-1" client_version = "client-version-1" + scaling { + min_instance_count = 1 + max_instance_count = 3 + } template { labels = { label-1 = "value-1" @@ -260,10 +263,6 @@ resource "google_cloud_run_v2_service" "default" { timeout = "300s" service_account = google_service_account.service_account.email execution_environment = "EXECUTION_ENVIRONMENT_GEN2" - scaling { - max_instance_count = 3 - min_instance_count = 1 - } annotations = { generated-by = "magic-modules" } @@ -1327,6 +1326,9 @@ resource "google_cloud_run_v2_service" "default" { } client = "client-1" client_version = "client-version-1" + scaling { + max_instance_count = 1 + } template { containers { image = "us-docker.pkg.dev/cloudrun/container/hello" @@ -1338,9 +1340,6 @@ resource "google_cloud_run_v2_service" "default" { startup_cpu_boost = true } } - scaling { - max_instance_count = 1 - } } } `, context) @@ -1362,6 +1361,9 @@ resource "google_cloud_run_v2_service" "default" { } client = "client-1" client_version = "client-version-1" + scaling { + max_instance_count = 1 + } template { containers { image = "us-docker.pkg.dev/cloudrun/container/hello" @@ -1378,9 +1380,6 @@ resource "google_cloud_run_v2_service" "default" { accelerator = "nvidia-l4" } gpu_zonal_redundancy_disabled = true - scaling { - max_instance_count = 1 - } } } `, context) From 32267c1ed0366abe3ee8825edcf4d0ae441cedae Mon Sep 17 00:00:00 2001 From: karolgorc Date: Wed, 20 Aug 2025 18:33:00 +0200 Subject: [PATCH 793/884] Remove hardcoded values that are handled by the API (#11423) Co-authored-by: Cameron Thornton Co-authored-by: Nick Elliot --- .../resource_compute_instance_template.go.tmpl | 7 ------- ...source_compute_instance_template_internal_test.go | 11 ++++++----- ...te_region_instance_template_internal_test.go.tmpl | 9 +++++---- .../docs/guides/version_7_upgrade.html.markdown | 12 ++++++++++++ 4 files changed, 23 insertions(+), 16 deletions(-) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_template.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template.go.tmpl index 88d85a142384..ba87f811432b 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_template.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template.go.tmpl @@ -1376,9 +1376,6 @@ func buildDisks(d *schema.ResourceData, config *transport_tpg.Config) ([]*comput // Build the disk var disk compute.AttachedDisk - disk.Type = "PERSISTENT" - disk.Mode = "READ_WRITE" - disk.Interface = "SCSI" disk.Boot = i == 0 disk.AutoDelete = d.Get(prefix + ".auto_delete").(bool) @@ -1833,10 +1830,6 @@ func reorderDisks(configDisks []interface{}, apiDisks []map[string]interface{}) disksByDeviceName[v.(string)] = i } else if v := disk["type"]; v.(string) == "SCRATCH" { iface := disk["interface"].(string) - if iface == "" { - // apply-time default - iface = "SCSI" - } scratchDisksByInterface[iface] = append(scratchDisksByInterface[iface], i) } else if v := disk["source"]; v.(string) != "" { attachedDisksBySource[v.(string)] = i diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_internal_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_internal_test.go index 1ff79ef03e2a..f917b6b601e9 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_internal_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_internal_test.go @@ -19,8 +19,9 @@ func TestComputeInstanceTemplate_reorderDisks(t *testing.T) { cDeviceName := map[string]interface{}{ "device_name": "disk-1", } - cScratch := map[string]interface{}{ - "type": "SCRATCH", + cScratchScsi := map[string]interface{}{ + "type": "SCRATCH", + "interface": "SCSI", } cSource := map[string]interface{}{ "source": "disk-source", @@ -78,7 +79,7 @@ func TestComputeInstanceTemplate_reorderDisks(t *testing.T) { aBoot, aScratchNvme, aSource, aScratchScsi, aFallThrough, aDeviceName, }, ConfigDisks: []interface{}{ - cBoot, cFallThrough, cDeviceName, cScratch, cSource, cScratchNvme, + cBoot, cFallThrough, cDeviceName, cScratchScsi, cSource, cScratchNvme, }, ExpectedResult: []map[string]interface{}{ aBoot, aFallThrough, aDeviceName, aScratchScsi, aSource, aScratchNvme, @@ -89,7 +90,7 @@ func TestComputeInstanceTemplate_reorderDisks(t *testing.T) { aBoot, aNoMatch, aScratchNvme, aScratchScsi, aFallThrough, aDeviceName, }, ConfigDisks: []interface{}{ - cBoot, cFallThrough, cDeviceName, cScratch, cSource, cScratchNvme, + cBoot, cFallThrough, cDeviceName, cScratchScsi, cSource, cScratchNvme, }, ExpectedResult: []map[string]interface{}{ aBoot, aFallThrough, aDeviceName, aScratchScsi, aScratchNvme, aNoMatch, @@ -100,7 +101,7 @@ func TestComputeInstanceTemplate_reorderDisks(t *testing.T) { aBoot, aScratchNvme, aFallThrough, aSource, aScratchScsi, aFallThrough2, aDeviceName, }, ConfigDisks: []interface{}{ - cBoot, cFallThrough, cDeviceName, cScratch, cFallThrough, cSource, cScratchNvme, + cBoot, cFallThrough, cDeviceName, cScratchScsi, cFallThrough, cSource, cScratchNvme, }, ExpectedResult: []map[string]interface{}{ aBoot, aFallThrough, aDeviceName, aScratchScsi, aFallThrough2, aSource, aScratchNvme, diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_internal_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_internal_test.go.tmpl index a643e4719836..bb235b036588 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_internal_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_internal_test.go.tmpl @@ -22,8 +22,9 @@ func TestComputeRegionInstanceTemplate_reorderDisks(t *testing.T) { cDeviceName := map[string]interface{}{ "device_name": "disk-1", } - cScratch := map[string]interface{}{ + cScratchScsi := map[string]interface{}{ "type": "SCRATCH", + "interface": "SCSI", } cSource := map[string]interface{}{ "source": "disk-source", @@ -81,7 +82,7 @@ func TestComputeRegionInstanceTemplate_reorderDisks(t *testing.T) { aBoot, aScratchNvme, aSource, aScratchScsi, aFallThrough, aDeviceName, }, ConfigDisks: []interface{}{ - cBoot, cFallThrough, cDeviceName, cScratch, cSource, cScratchNvme, + cBoot, cFallThrough, cDeviceName, cScratchScsi, cSource, cScratchNvme, }, ExpectedResult: []map[string]interface{}{ aBoot, aFallThrough, aDeviceName, aScratchScsi, aSource, aScratchNvme, @@ -92,7 +93,7 @@ func TestComputeRegionInstanceTemplate_reorderDisks(t *testing.T) { aBoot, aNoMatch, aScratchNvme, aScratchScsi, aFallThrough, aDeviceName, }, ConfigDisks: []interface{}{ - cBoot, cFallThrough, cDeviceName, cScratch, cSource, cScratchNvme, + cBoot, cFallThrough, cDeviceName, cScratchScsi, cSource, cScratchNvme, }, ExpectedResult: []map[string]interface{}{ aBoot, aFallThrough, aDeviceName, aScratchScsi, aScratchNvme, aNoMatch, @@ -103,7 +104,7 @@ func TestComputeRegionInstanceTemplate_reorderDisks(t *testing.T) { aBoot, aScratchNvme, aFallThrough, aSource, aScratchScsi, aFallThrough2, aDeviceName, }, ConfigDisks: []interface{}{ - cBoot, cFallThrough, cDeviceName, cScratch, cFallThrough, cSource, cScratchNvme, + cBoot, cFallThrough, cDeviceName, cScratchScsi, cFallThrough, cSource, cScratchNvme, }, ExpectedResult: []map[string]interface{}{ aBoot, aFallThrough, aDeviceName, aScratchScsi, aFallThrough2, aSource, aScratchNvme, diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index 2143b16cd1a7..f183e3525b15 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -173,6 +173,18 @@ Use `google_beyondcorp_security_gateway_application` instead. `enable_flow_logs` has been removed in favor of `log_config`. +## Resource: `google_compute_instance_template` + +### The resource will no longer use hardcoded values + +`disk.type`, `disk.mode` and `disk.interface` will no longer use provider configured default values and instead will be set by the API. This shouldn't have any effect on the functionality of the resource. + +## Resource: `google_compute_region_instance_template` + +### The resource will no longer use hardcoded values + +`disk.type`, `disk.mode` and `disk.interface` will no longer use provider configured default values and instead will be set by the API. This shouldn't have any effect on the functionality of the resource. + ## Resource: `google_notebooks_location` is now removed This resource is not functional. From d3cb383c385df73526b8a3dfb61ea62ebc791159 Mon Sep 17 00:00:00 2001 From: wj-chen Date: Wed, 20 Aug 2025 14:51:03 -0700 Subject: [PATCH 794/884] Remove the default value of view.use_legacy_sql in google_bigquery_table (#14751) --- .../bigquery/resource_bigquery_table.go.tmpl | 24 +++++++++++-------- .../guides/version_7_upgrade.html.markdown | 6 +++++ .../docs/r/bigquery_table.html.markdown | 5 +++- 3 files changed, 24 insertions(+), 11 deletions(-) diff --git a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl index bebe08d56791..e6abc6db1021 100644 --- a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl +++ b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl @@ -14,6 +14,7 @@ import ( "golang.org/x/exp/slices" + "github.com/hashicorp/go-cty/cty" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/structure" @@ -1051,13 +1052,13 @@ func ResourceBigQueryTable() *schema.Resource { }, // UseLegacySQL: [Optional] Specifies whether to use BigQuery's - // legacy SQL for this view. The default value is true. If set to - // false, the view will use BigQuery's standard SQL: + // legacy SQL for this view. If set to false, the view will use + // BigQuery's standard SQL: "use_legacy_sql": { Type: schema.TypeBool, Optional: true, - Default: true, - Description: `Specifies whether to use BigQuery's legacy SQL for this view. The default value is true. If set to false, the view will use BigQuery's standard SQL`, + Computed: true, + Description: `Specifies whether to use BigQuery's legacy SQL for this view. If set to false, the view will use BigQuery's standard SQL`, }, }, }, @@ -1668,8 +1669,8 @@ func resourceTable(d *schema.ResourceData, meta interface{}) (*bigquery.Table, e }, } - if v, ok := d.GetOk("view"); ok { - table.View = expandView(v) + if _, ok := d.GetOk("view"); ok { + table.View = expandView(d) } if v, ok := d.GetOk("materialized_view"); ok { @@ -3051,12 +3052,15 @@ func flattenRangePartitioning(rp *bigquery.RangePartitioning) []map[string]inter return []map[string]interface{}{result} } -func expandView(configured interface{}) *bigquery.ViewDefinition { - raw := configured.([]interface{})[0].(map[string]interface{}) +func expandView(d *schema.ResourceData) *bigquery.ViewDefinition { + v, _ := d.GetOk("view") + raw := v.([]interface{})[0].(map[string]interface{}) vd := &bigquery.ViewDefinition{Query: raw["query"].(string)} - if v, ok := raw["use_legacy_sql"]; ok { - vd.UseLegacySql = v.(bool) + configValue := d.GetRawConfig().GetAttr("view").Index(cty.NumberIntVal(0)).AsValueMap() + useLegacySQLValue := configValue["use_legacy_sql"] + if !useLegacySQLValue.IsNull() { + vd.UseLegacySql = useLegacySQLValue.RawEquals(cty.True) vd.ForceSendFields = append(vd.ForceSendFields, "UseLegacySql") } diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index f183e3525b15..43652031839f 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -140,6 +140,12 @@ Use `google_beyondcorp_security_gateway_application` instead. `public_repository` fields have had their default values removed. If your state has been reliant on them, they will need to be manually included into your configuration now. +## Resource: `google_bigquery_table` + +### `view.use_legacy_sql` no longer has a default value of `True` + +The `view.use_legacy_sql` field no longer has a default value. Configurations that relied on the old default will show no diff in the plan, and there will be no change to existing views. For a new view, leaving this field unspecified in the configuration will result in the view being created with no `use_legacy_sql` value, which the API interprets as a `true` and assumes the legacy SQL dialect for its query. See the [API documentation](https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#ViewDefinition) for more details. + ## Resource: `google_bigtable_table_iam_binding` ### `instance` is now removed diff --git a/mmv1/third_party/terraform/website/docs/r/bigquery_table.html.markdown b/mmv1/third_party/terraform/website/docs/r/bigquery_table.html.markdown index b92ced3e90e2..56d2430cc7d0 100644 --- a/mmv1/third_party/terraform/website/docs/r/bigquery_table.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/bigquery_table.html.markdown @@ -425,7 +425,10 @@ The following arguments are supported: * `query` - (Required) A query that BigQuery executes when the view is referenced. * `use_legacy_sql` - (Optional) Specifies whether to use BigQuery's legacy SQL for this view. - The default value is true. If set to false, the view will use BigQuery's standard SQL. + If set to `false`, the view will use BigQuery's standard SQL. If set to + `true`, the view will use BigQuery's legacy SQL. If unset, the API will + interpret it as a `true` and assumes the legacy SQL dialect for its query + according to the [API documentation](https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#ViewDefinition). -> **Note**: Starting in provider version `7.0.0`, no default value is provided for this field unless explicitly set in the configuration. From f64ec15e1c6736e7aa407be75ce328bbbdec41b8 Mon Sep 17 00:00:00 2001 From: Cameron Thornton Date: Wed, 20 Aug 2025 17:31:58 -0500 Subject: [PATCH 795/884] upgrade dcl to 1.82.0 gkehub binauthz removal (#14916) --- mmv1/third_party/terraform/go.mod | 2 +- mmv1/third_party/terraform/go.sum | 2 ++ tpgtools/go.mod | 2 +- tpgtools/go.sum | 2 ++ 4 files changed, 6 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/go.mod b/mmv1/third_party/terraform/go.mod index 9282c683f316..5d14fa4cdddb 100644 --- a/mmv1/third_party/terraform/go.mod +++ b/mmv1/third_party/terraform/go.mod @@ -6,7 +6,7 @@ require ( cloud.google.com/go/auth v0.16.3 cloud.google.com/go/auth/oauth2adapt v0.2.8 cloud.google.com/go/bigtable v1.37.0 - github.com/GoogleCloudPlatform/declarative-resource-client-library v1.81.0 + github.com/GoogleCloudPlatform/declarative-resource-client-library v1.82.0 github.com/apparentlymart/go-cidr v1.1.0 github.com/davecgh/go-spew v1.1.1 github.com/dnaeon/go-vcr v1.0.1 diff --git a/mmv1/third_party/terraform/go.sum b/mmv1/third_party/terraform/go.sum index 2ed259c385dc..a56b9444c47a 100644 --- a/mmv1/third_party/terraform/go.sum +++ b/mmv1/third_party/terraform/go.sum @@ -26,6 +26,8 @@ github.com/GoogleCloudPlatform/declarative-resource-client-library v1.80.0 h1:Zp github.com/GoogleCloudPlatform/declarative-resource-client-library v1.80.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= github.com/GoogleCloudPlatform/declarative-resource-client-library v1.81.0 h1:zTRBYNu7nk3TMbiRfkBcRNzw4cOeym0z1GduDYNyRyE= github.com/GoogleCloudPlatform/declarative-resource-client-library v1.81.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.82.0 h1:58Vw+qpPWX4JGAB/DfuDwEg6dGp0+q6raXqjs52qRik= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.82.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= github.com/ProtonMail/go-crypto v1.1.6 h1:ZcV+Ropw6Qn0AX9brlQLAUXfqLBc7Bl+f/DmNxpLfdw= diff --git a/tpgtools/go.mod b/tpgtools/go.mod index 7b2747548e1e..d7a556fbec12 100644 --- a/tpgtools/go.mod +++ b/tpgtools/go.mod @@ -4,7 +4,7 @@ go 1.23 require ( bitbucket.org/creachadair/stringset v0.0.11 - github.com/GoogleCloudPlatform/declarative-resource-client-library v1.81.0 + github.com/GoogleCloudPlatform/declarative-resource-client-library v1.82.0 github.com/golang/glog v1.1.2 github.com/hashicorp/hcl v1.0.0 github.com/kylelemons/godebug v1.1.0 diff --git a/tpgtools/go.sum b/tpgtools/go.sum index e727756f3b61..4c05f97caca1 100644 --- a/tpgtools/go.sum +++ b/tpgtools/go.sum @@ -10,6 +10,8 @@ github.com/GoogleCloudPlatform/declarative-resource-client-library v1.80.0 h1:Zp github.com/GoogleCloudPlatform/declarative-resource-client-library v1.80.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= github.com/GoogleCloudPlatform/declarative-resource-client-library v1.81.0 h1:zTRBYNu7nk3TMbiRfkBcRNzw4cOeym0z1GduDYNyRyE= github.com/GoogleCloudPlatform/declarative-resource-client-library v1.81.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.82.0 h1:58Vw+qpPWX4JGAB/DfuDwEg6dGp0+q6raXqjs52qRik= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.82.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4= github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= From 240d274e3bc799e0b0299e2b2fa9f8973977189f Mon Sep 17 00:00:00 2001 From: haiyanmeng Date: Thu, 21 Aug 2025 15:05:21 -0400 Subject: [PATCH 796/884] Remove `configmanagement.binauthz` field in google_gke_hub_feature_membership (#14531) Co-authored-by: Cameron Thornton --- ...ource_gke_hub_feature_membership_meta.yaml.tmpl | 1 - .../docs/guides/version_7_upgrade.html.markdown | 6 ++++++ .../r/gke_hub_feature_membership.html.markdown | 14 +------------- 3 files changed, 7 insertions(+), 14 deletions(-) diff --git a/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_meta.yaml.tmpl b/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_meta.yaml.tmpl index 778b7227674b..bb160974eefd 100644 --- a/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_meta.yaml.tmpl +++ b/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_meta.yaml.tmpl @@ -8,7 +8,6 @@ api_version: 'v1' {{- end }} api_resource_type_kind: 'Feature' fields: - - field: 'configmanagement.binauthz.enabled' - field: 'configmanagement.config_sync.enabled' - field: 'configmanagement.config_sync.git.gcp_service_account_email' - field: 'configmanagement.config_sync.git.https_proxy' diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index 43652031839f..40c96c9ee338 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -205,6 +205,12 @@ Terraform [Type Conversion](https://developer.hashicorp.com/terraform/language/e To reflect the new type explicitly, surround the current integer value in quotes, i.e. `retention_period = 10` -> `retention_period = "10"`. +## Resource: `google_gke_hub_feature_membership` + +### `configmanagement.binauthz` is now removed + +Remove `configmanagement.binauthz` from your configuration after upgrade. + ## Resource: `google_gke_hub_membership` ### `description` is now removed diff --git a/mmv1/third_party/terraform/website/docs/r/gke_hub_feature_membership.html.markdown b/mmv1/third_party/terraform/website/docs/r/gke_hub_feature_membership.html.markdown index b053b083c696..5bbe489b5019 100644 --- a/mmv1/third_party/terraform/website/docs/r/gke_hub_feature_membership.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/gke_hub_feature_membership.html.markdown @@ -4,7 +4,7 @@ description: |- Contains information about a GKEHub Feature Memberships. --- -# google_gkehub_feature_membership +# google_gke_hub_feature_membership Contains information about a GKEHub Feature Memberships. Feature Memberships configure GKEHub Features that apply to specific memberships rather than the project as a whole. The google_gke_hub is the Fleet API. @@ -426,11 +426,6 @@ The following arguments are supported: (Optional) Version of Config Sync installed. -* `binauthz` - - (Optional, Deprecated) - Binauthz configuration for the cluster. Structure is [documented below](#nested_binauthz). - This field will be ignored and should not be set. - * `hierarchy_controller` - (Optional) Hierarchy Controller configuration for the cluster. Structure is [documented below](#nested_hierarchy_controller). @@ -444,13 +439,6 @@ The following arguments are supported: Policy Controller configuration for the cluster. Structure is [documented below](#nested_policy_controller). Configuring Policy Controller through the configmanagement feature is no longer recommended. Use the policycontroller feature instead. - - -The `binauthz` block supports: - -* `enabled` - - (Optional) - Whether binauthz is enabled in this cluster. The `config_sync` block supports: From d12d762fb42454a291ef969ef7eb75ec059d03e3 Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Thu, 21 Aug 2025 13:57:34 -0700 Subject: [PATCH 797/884] remove default_from_api from credit_types and subaccounts in google_billing_budget (#14938) --- mmv1/products/billingbudget/Budget.yaml | 6 - .../billing/resource_billing_budget_test.go | 132 +++++++++--------- .../guides/version_7_upgrade.html.markdown | 9 ++ 3 files changed, 74 insertions(+), 73 deletions(-) diff --git a/mmv1/products/billingbudget/Budget.yaml b/mmv1/products/billingbudget/Budget.yaml index 13b0d5d889fd..8fa30a93ce35 100644 --- a/mmv1/products/billingbudget/Budget.yaml +++ b/mmv1/products/billingbudget/Budget.yaml @@ -213,9 +213,6 @@ properties: Optional. If creditTypesTreatment is INCLUDE_SPECIFIED_CREDITS, this is a list of credit types to be subtracted from gross cost to determine the spend for threshold calculations. See a list of acceptable credit type values. If creditTypesTreatment is not INCLUDE_SPECIFIED_CREDITS, this field must be empty. - - **Note:** If the field has a value in the config and needs to be removed, the field has to be an empty array in the config. - default_from_api: true at_least_one_of: - 'budget_filter.0.projects' - 'budget_filter.0.resource_ancestors' @@ -236,9 +233,6 @@ properties: the parent account, usage from the parent account will be included. If the field is omitted, the report will include usage from the parent account and all subaccounts, if they exist. - - **Note:** If the field has a value in the config and needs to be removed, the field has to be an empty array in the config. - default_from_api: true at_least_one_of: - 'budget_filter.0.projects' - 'budget_filter.0.resource_ancestors' diff --git a/mmv1/third_party/terraform/services/billing/resource_billing_budget_test.go b/mmv1/third_party/terraform/services/billing/resource_billing_budget_test.go index ffc59734ae29..9d2f4659f818 100644 --- a/mmv1/third_party/terraform/services/billing/resource_billing_budget_test.go +++ b/mmv1/third_party/terraform/services/billing/resource_billing_budget_test.go @@ -221,7 +221,6 @@ resource "google_billing_budget" "budget" { labels = { label = "bar" } - subaccounts = [] } amount { @@ -412,7 +411,7 @@ resource "google_billing_budget" "budget" { labels = { label1 = "bar2" } - calendar_period = "YEAR" + calendar_period = "YEAR" } amount { @@ -460,19 +459,18 @@ resource "google_billing_budget" "budget" { labels = { label1 = "bar2" } - custom_period { - start_date { - year = 2022 - month = 1 - day = 1 - } - end_date { - year = 2023 - month = 12 - day = 31 - } - } - credit_types = [] + custom_period { + start_date { + year = 2022 + month = 1 + day = 1 + } + end_date { + year = 2023 + month = 12 + day = 31 + } + } } amount { @@ -584,43 +582,43 @@ func testAccBillingBudget_budgetFilterProjectsOrdering1(context map[string]inter return acctest.Nprintf(` data "google_billing_account" "account" { - billing_account = "%{billing_acct}" + billing_account = "%{billing_acct}" } resource "google_project" "project1" { - project_id = "tf-test-%{random_suffix_1}" - name = "tf-test-%{random_suffix_1}" - org_id = "%{org}" - billing_account = "%{project_billing_acct}" - deletion_policy = "DELETE" + project_id = "tf-test-%{random_suffix_1}" + name = "tf-test-%{random_suffix_1}" + org_id = "%{org}" + billing_account = "%{project_billing_acct}" + deletion_policy = "DELETE" } resource "google_project" "project2" { - project_id = "tf-test-%{random_suffix_2}" - name = "tf-test-%{random_suffix_2}" - org_id = "%{org}" - billing_account = "%{project_billing_acct}" - deletion_policy = "DELETE" + project_id = "tf-test-%{random_suffix_2}" + name = "tf-test-%{random_suffix_2}" + org_id = "%{org}" + billing_account = "%{project_billing_acct}" + deletion_policy = "DELETE" } resource "google_billing_budget" "budget" { - billing_account = data.google_billing_account.account.id - display_name = "Example Billing Budget" - - budget_filter { - projects = [ - "projects/${google_project.project1.number}", - "projects/${google_project.project2.number}", - ] - } + billing_account = data.google_billing_account.account.id + display_name = "Example Billing Budget" - amount { - last_period_amount = true - } + budget_filter { + projects = [ + "projects/${google_project.project1.number}", + "projects/${google_project.project2.number}", + ] + } - threshold_rules { - threshold_percent = 10.0 - } + amount { + last_period_amount = true + } + + threshold_rules { + threshold_percent = 10.0 + } } `, context) @@ -630,43 +628,43 @@ func testAccBillingBudget_budgetFilterProjectsOrdering2(context map[string]inter return acctest.Nprintf(` data "google_billing_account" "account" { - billing_account = "%{billing_acct}" + billing_account = "%{billing_acct}" } resource "google_project" "project1" { - project_id = "tf-test-%{random_suffix_1}" - name = "tf-test-%{random_suffix_1}" - org_id = "%{org}" - billing_account = "%{project_billing_acct}" - deletion_policy = "DELETE" + project_id = "tf-test-%{random_suffix_1}" + name = "tf-test-%{random_suffix_1}" + org_id = "%{org}" + billing_account = "%{project_billing_acct}" + deletion_policy = "DELETE" } resource "google_project" "project2" { - project_id = "tf-test-%{random_suffix_2}" - name = "tf-test-%{random_suffix_2}" - org_id = "%{org}" - billing_account = "%{project_billing_acct}" - deletion_policy = "DELETE" + project_id = "tf-test-%{random_suffix_2}" + name = "tf-test-%{random_suffix_2}" + org_id = "%{org}" + billing_account = "%{project_billing_acct}" + deletion_policy = "DELETE" } resource "google_billing_budget" "budget" { - billing_account = data.google_billing_account.account.id - display_name = "Example Billing Budget" - - budget_filter { - projects = [ - "projects/${google_project.project2.number}", - "projects/${google_project.project1.number}", - ] - } + billing_account = data.google_billing_account.account.id + display_name = "Example Billing Budget" - amount { - last_period_amount = true - } + budget_filter { + projects = [ + "projects/${google_project.project2.number}", + "projects/${google_project.project1.number}", + ] + } - threshold_rules { - threshold_percent = 10.0 - } + amount { + last_period_amount = true + } + + threshold_rules { + threshold_percent = 10.0 + } } `, context) diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index 40c96c9ee338..b7cbfcb567a6 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -165,6 +165,15 @@ The `view.use_legacy_sql` field no longer has a default value. Configurations th `instance` has been removed in favor of `instance_name`. <<<<<<< HEAD +<<<<<<< HEAD +======= +## Resource: `google_billing_budget` + +### `budget_filter.credit types` and `budget_filter.subaccounts` are no longer optional+computed, only optional + +`budget_filter.credit types` and `budget_filter.subaccounts` are no longer O+C. These fields already did not export any API-default values, so no change to your configuration should be necessary. + +>>>>>>> e2e1d5150 (remove default_from_api from credit_types and subaccounts in google_billing_budget (#14938)) ## Resource: `google_compute_packet_mirroring` ### `subnetworks` and `instances` fields have been converted to sets From 95ef5838fa1ca0b6e0c8a2c5fb50e99ed270f6f8 Mon Sep 17 00:00:00 2001 From: NA2047 <12290725+NA2047@users.noreply.github.com> Date: Thu, 21 Aug 2025 14:05:14 -0700 Subject: [PATCH 798/884] Breaking Change: Remove allow_fewer_zones_deployment from Memorystore and Redis Cluster (#14889) Co-authored-by: Stephen Lewis (Burrows) --- mmv1/products/memorystore/Instance.yaml | 8 -------- mmv1/products/redis/Cluster.yaml | 8 -------- .../examples/memorystore_instance_full.tf.tmpl | 1 - .../terraform/examples/redis_cluster_aof.tf.tmpl | 1 - .../docs/guides/version_7_upgrade.html.markdown | 11 +++++++++++ 5 files changed, 11 insertions(+), 18 deletions(-) diff --git a/mmv1/products/memorystore/Instance.yaml b/mmv1/products/memorystore/Instance.yaml index 4689f7d3e36f..ec40528f2814 100644 --- a/mmv1/products/memorystore/Instance.yaml +++ b/mmv1/products/memorystore/Instance.yaml @@ -517,14 +517,6 @@ properties: enum_values: - 'MULTI_ZONE' - 'SINGLE_ZONE' - - name: 'allowFewerZonesDeployment' - type: Boolean - description: | - Allows customers to specify if they are okay with deploying a multi-zone - instance in less than 3 zones. Once set, if there is a zonal outage during - the instance creation, the instance will only be deployed in 2 zones, and - stay within the 2 zones for its lifecycle. - immutable: true - name: 'deletionProtectionEnabled' type: Boolean description: "Optional. If set to true deletion of the instance will fail. " diff --git a/mmv1/products/redis/Cluster.yaml b/mmv1/products/redis/Cluster.yaml index 39ea126efb16..5b26b4639464 100644 --- a/mmv1/products/redis/Cluster.yaml +++ b/mmv1/products/redis/Cluster.yaml @@ -378,14 +378,6 @@ properties: type: String description: | Immutable. The zone for single zone Memorystore Redis cluster. - - name: 'allowFewerZonesDeployment' - type: Boolean - immutable: true - description: | - Allows customers to specify if they are okay with deploying a multi-zone - cluster in less than 3 zones. Once set, if there is a zonal outage during - the cluster creation, the cluster will only be deployed in 2 zones, and - stay within the 2 zones for its lifecycle. - name: 'pscConfigs' type: Array description: | diff --git a/mmv1/templates/terraform/examples/memorystore_instance_full.tf.tmpl b/mmv1/templates/terraform/examples/memorystore_instance_full.tf.tmpl index 30d2cafe722b..742450575aaf 100644 --- a/mmv1/templates/terraform/examples/memorystore_instance_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/memorystore_instance_full.tf.tmpl @@ -14,7 +14,6 @@ resource "google_memorystore_instance" "{{$.PrimaryResourceId}}" { engine_configs = { maxmemory-policy = "volatile-ttl" } - allow_fewer_zones_deployment = true zone_distribution_config { mode = "SINGLE_ZONE" zone = "us-central1-b" diff --git a/mmv1/templates/terraform/examples/redis_cluster_aof.tf.tmpl b/mmv1/templates/terraform/examples/redis_cluster_aof.tf.tmpl index 789bf86028d6..e5abf31f38d4 100644 --- a/mmv1/templates/terraform/examples/redis_cluster_aof.tf.tmpl +++ b/mmv1/templates/terraform/examples/redis_cluster_aof.tf.tmpl @@ -13,7 +13,6 @@ resource "google_redis_cluster" "{{$.PrimaryResourceId}}" { maxmemory-policy = "volatile-ttl" } deletion_protection_enabled = {{index $.Vars "deletion_protection_enabled"}} - allow_fewer_zones_deployment = true zone_distribution_config { mode = "MULTI_ZONE" } diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index b7cbfcb567a6..23e891f9bf48 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -287,6 +287,17 @@ The default value for `disable_on_destroy` has been changed to `false`. The prev <<<<<<< HEAD Now, destroying the resource will only remove it from Terraform's state and leave the service enabled. To disable a service when the resource is destroyed, you must now make an explicit decision by setting `disable_on_destroy = true`. +<<<<<<< HEAD ======= Now, destroying the resource will only remove it from Terraform's state and leave the service enabled. To disable a service when the resource is destroyed, you must now make an explicit decision by setting `disable_on_destroy = true`. >>>>>>> c96e1c59a (add 7.0.0 guide to main (#14861)) +======= + +## Resource: `google_memorystore_instance` + + `allow_fewer_zones_deployment` has been removed because it isn't user-configurable. + +## Resource: `google_redis_cluster` + + `allow_fewer_zones_deployment` has been removed because it isn't user-configurable. +>>>>>>> eed48c10c (Breaking Change: Remove allow_fewer_zones_deployment from Memorystore and Redis Cluster (#14889)) From 8eb0fbdc08e6148ade3ed4aafd16b9103097311b Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Thu, 21 Aug 2025 14:29:27 -0700 Subject: [PATCH 799/884] =?UTF-8?q?Revert=20"=20provider:=20eliminated=20t?= =?UTF-8?q?he=20need=20to=20manually=20add=20`*=5Fwo`=20and=20`*=5F?= =?UTF-8?q?=E2=80=A6=20(#14942)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Scott Suarez --- docs/content/reference/field.md | 9 +- mmv1/api/resource.go | 86 +---- mmv1/api/resource_test.go | 330 ------------------ mmv1/api/type.go | 49 +-- mmv1/api/type_test.go | 2 +- mmv1/main.go | 4 +- .../products/bigquerydatatransfer/Config.yaml | 22 +- .../monitoring/UptimeCheckConfig.yaml | 21 +- .../products/secretmanager/SecretVersion.yaml | 16 + .../terraform/flatten_property_method.go.tmpl | 2 +- 10 files changed, 67 insertions(+), 474 deletions(-) diff --git a/docs/content/reference/field.md b/docs/content/reference/field.md index c07098e02f0c..1f24ec8abc06 100644 --- a/docs/content/reference/field.md +++ b/docs/content/reference/field.md @@ -108,11 +108,10 @@ sensitive: true ``` ### `write_only` -Set to true to enable write-only functionality for this field. -If true, the write-only fields will be automatically generated by the code generator (`_wo` and `_wo_version`). -When the write-only variant of a field is used, it means that its value will be obscured in Terraform output as well as not be stored in state. -This field is meant to replace `sensitive` as it doesn't store the value in state. -See [Ephemerality in Resources - Use Write-only arguments](https://developer.hashicorp.com/terraform/language/resources/ephemeral/write-only) for more information. +If true, the field is considered "write-only", which means that its value will +be obscured in Terraform output as well as not be stored in state. This field is meant to replace `sensitive` as it doesn't store the value in state. +See [Ephemerality in Resources - Use Write-only arguments](https://developer.hashicorp.com/terraform/language/resources/ephemeral/write-only) +for more information. Write-only fields are only supported in Terraform v1.11+. Because the provider supports earlier Terraform versions, write only fields must be paired with (mutually exclusive) `sensitive` fields covering the same functionality for compatibility with those older versions. This field cannot be used in conjuction with `immutable` or `sensitive`. diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index 95c10c201f4f..afa8b929d722 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -737,94 +737,14 @@ func (r Resource) GetIdentity() []*Type { }) } -func buildWriteOnlyField(name string, versionFieldName string, originalField *Type, originalFieldLineage string) *Type { - description := fmt.Sprintf("%s Note: This property is write-only and will not be read from the API. For more info see [updating write-only attributes](/docs/providers/google/guides/using_write_only_attributes.html#updating-write-only-attributes)", originalField.Description) - fieldPathOriginalField := originalFieldLineage - fieldPathCurrentField := strings.ReplaceAll(originalFieldLineage, google.Underscore(originalField.Name), google.Underscore(name)) - requiredWith := strings.ReplaceAll(originalFieldLineage, google.Underscore(originalField.Name), google.Underscore(versionFieldName)) - - apiName := originalField.ApiName - if apiName == "" { - apiName = originalField.Name - } - - options := []func(*Type){ - propertyWithType("String"), - propertyWithRequired(false), - propertyWithDescription(description), - propertyWithWriteOnly(true), - propertyWithApiName(apiName), - propertyWithIgnoreRead(true), - propertyWithRequiredWith([]string{requiredWith}), - } - - if originalField.Required { - exactlyOneOf := append(originalField.ExactlyOneOf, fieldPathOriginalField, fieldPathCurrentField) - options = append(options, propertyWithExactlyOneOf(exactlyOneOf)) - } else { - conflicts := append(originalField.Conflicts, fieldPathOriginalField) - options = append(options, propertyWithConflicts(conflicts)) - } - - if len(originalField.AtLeastOneOf) > 0 { - atLeastOneOf := append(originalField.AtLeastOneOf, fieldPathCurrentField) - options = append(options, propertyWithAtLeastOneOf(atLeastOneOf)) - } - - return NewProperty(name, originalField.ApiName, options) -} - -func buildWriteOnlyVersionField(name string, originalField *Type, writeOnlyField *Type, originalFieldLineage string) *Type { - description := fmt.Sprintf("Triggers update of %s write-only. For more info see [updating write-only attributes](/docs/providers/google/guides/using_write_only_attributes.html#updating-write-only-attributes)", google.Underscore(writeOnlyField.Name)) - requiredWith := strings.ReplaceAll(originalFieldLineage, google.Underscore(originalField.Name), google.Underscore(writeOnlyField.Name)) - - options := []func(*Type){ - propertyWithType("String"), - propertyWithImmutable(originalField.Immutable), - propertyWithDescription(description), - propertyWithRequiredWith([]string{requiredWith}), - propertyWithClientSide(true), - } - - return NewProperty(name, name, options) -} - -func (r *Resource) addWriteOnlyFields(props []*Type, propWithWoConfigured *Type, propWithWoConfiguredLineagePath string) []*Type { - if len(propWithWoConfigured.RequiredWith) > 0 { - log.Fatalf("WriteOnly property '%s' in resource '%s' cannot have RequiredWith set. This combination is not supported.", propWithWoConfigured.Name, r.Name) - } - woFieldName := fmt.Sprintf("%sWo", propWithWoConfigured.Name) - woVersionFieldName := fmt.Sprintf("%sVersion", woFieldName) - writeOnlyField := buildWriteOnlyField(woFieldName, woVersionFieldName, propWithWoConfigured, propWithWoConfiguredLineagePath) - writeOnlyVersionField := buildWriteOnlyVersionField(woVersionFieldName, propWithWoConfigured, writeOnlyField, propWithWoConfiguredLineagePath) - props = append(props, writeOnlyField, writeOnlyVersionField) - return props -} - -func (r *Resource) buildCurrentPropLineage(p *Type, lineage string) string { - underscoreName := google.Underscore(p.Name) - if lineage == "" { - return underscoreName - } - return fmt.Sprintf("%s.0.%s", lineage, underscoreName) -} - -// AddExtraFields processes properties and adds supplementary fields based on property types. -// It handles write-only properties, labels, and annotations. -func (r *Resource) AddExtraFields(props []*Type, parent *Type, lineage string) []*Type { +func (r *Resource) AddLabelsRelatedFields(props []*Type, parent *Type) []*Type { for _, p := range props { - currentPropLineage := r.buildCurrentPropLineage(p, lineage) - if p.WriteOnly && !strings.HasSuffix(p.Name, "Wo") { - props = r.addWriteOnlyFields(props, p, currentPropLineage) - p.WriteOnly = false - p.Required = false - } if p.IsA("KeyValueLabels") { props = r.addLabelsFields(props, parent, p) } else if p.IsA("KeyValueAnnotations") { props = r.addAnnotationsFields(props, parent, p) } else if p.IsA("NestedObject") && len(p.AllProperties()) > 0 { - p.Properties = r.AddExtraFields(p.AllProperties(), p, currentPropLineage) + p.Properties = r.AddLabelsRelatedFields(p.AllProperties(), p) } } return props @@ -843,7 +763,6 @@ func (r *Resource) addLabelsFields(props []*Type, parent *Type, labels *Type) [] terraformLabelsField := buildTerraformLabelsField("labels", parent, labels) effectiveLabelsField := buildEffectiveLabelsField("labels", labels) - props = append(props, terraformLabelsField, effectiveLabelsField) // The effective_labels field is used to write to API, instead of the labels field. @@ -880,7 +799,6 @@ func (r *Resource) addAnnotationsFields(props []*Type, parent *Type, annotations } effectiveAnnotationsField := buildEffectiveLabelsField("annotations", annotations) - props = append(props, effectiveAnnotationsField) return props } diff --git a/mmv1/api/resource_test.go b/mmv1/api/resource_test.go index 46a34b8b8036..ad7dd327b288 100644 --- a/mmv1/api/resource_test.go +++ b/mmv1/api/resource_test.go @@ -4,7 +4,6 @@ import ( "os" "path/filepath" "reflect" - "slices" "strings" "testing" @@ -504,332 +503,3 @@ func TestHasPostCreateComputedFields(t *testing.T) { }) } } - -func TestResourceAddExtraFields(t *testing.T) { - t.Parallel() - - createTestResource := func(name string) *Resource { - return &Resource{ - Name: name, - ProductMetadata: &Product{ - Name: "testproduct", - }, - } - } - - createTestType := func(name, typeStr string, options ...func(*Type)) *Type { - t := &Type{ - Name: name, - Type: typeStr, - } - for _, option := range options { - option(t) - } - return t - } - - withWriteOnly := func(writeOnly bool) func(*Type) { - return func(t *Type) { t.WriteOnly = writeOnly } - } - withRequired := func(required bool) func(*Type) { - return func(t *Type) { t.Required = required } - } - withDescription := func(desc string) func(*Type) { - return func(t *Type) { t.Description = desc } - } - withProperties := func(props []*Type) func(*Type) { - return func(t *Type) { t.Properties = props } - } - - t.Run("WriteOnly property adds companion fields", func(t *testing.T) { - t.Parallel() - - resource := createTestResource("testresource") - writeOnlyProp := createTestType("password", "String", - withWriteOnly(true), - withRequired(true), - withDescription("A password field"), - ) - - props := []*Type{writeOnlyProp} - result := resource.AddExtraFields(props, nil, "") - - if len(result) != 3 { - t.Errorf("Expected 3 properties after adding WriteOnly fields, got %d", len(result)) - } - - if writeOnlyProp.WriteOnly { - t.Error("Original WriteOnly property should have WriteOnly set to false after processing") - } - if writeOnlyProp.Required { - t.Error("Original WriteOnly property should have Required set to false after processing") - } - - var foundWoField, foundVersionField bool - for _, prop := range result { - if prop.Name == "passwordWo" { - foundWoField = true - if !prop.WriteOnly { - t.Error("passwordWo field should have WriteOnly=true") - } - } - if prop.Name == "passwordWoVersion" { - foundVersionField = true - if !prop.ClientSide { - t.Error("passwordWoVersion field should have ClientSide=true") - } - } - } - - if !foundWoField { - t.Error("Expected to find passwordWo field") - } - if !foundVersionField { - t.Error("Expected to find passwordWoVersion field") - } - }) - - t.Run("KeyValueLabels property adds terraform and effective labels", func(t *testing.T) { - t.Parallel() - - resource := createTestResource("testresource") - labelsType := &Type{ - Name: "labels", - Type: "KeyValueLabels", - Description: "Resource labels", - } - - props := []*Type{labelsType} - result := resource.AddExtraFields(props, nil, "") - - if len(result) != 3 { - t.Errorf("Expected 3 properties after adding labels fields, got %d", len(result)) - } - - if !labelsType.IgnoreWrite { - t.Error("Original labels field should have IgnoreWrite=true after processing") - } - if !strings.Contains(labelsType.Description, "**Note**") { - t.Error("Original labels field description should contain note after processing") - } - - var foundTerraformLabels, foundEffectiveLabels bool - for _, prop := range result { - if prop.Name == "terraformLabels" { - foundTerraformLabels = true - if prop.Type != "KeyValueTerraformLabels" { - t.Errorf("terraformLabels should have type KeyValueTerraformLabels, got %s", prop.Type) - } - } - if prop.Name == "effectiveLabels" { - foundEffectiveLabels = true - if prop.Type != "KeyValueEffectiveLabels" { - t.Errorf("effectiveLabels should have type KeyValueEffectiveLabels, got %s", prop.Type) - } - } - } - - if !foundTerraformLabels { - t.Error("Expected to find terraformLabels field") - } - if !foundEffectiveLabels { - t.Error("Expected to find effectiveLabels field") - } - - expectedDiff := "tpgresource.SetLabelsDiff" - if !slices.Contains(resource.CustomDiff, expectedDiff) { - t.Errorf("Expected CustomDiff to contain %s", expectedDiff) - } - }) - - t.Run("KeyValueLabels with ExcludeAttributionLabel adds different CustomDiff", func(t *testing.T) { - t.Parallel() - - resource := createTestResource("testresource") - resource.ExcludeAttributionLabel = true - - labelsType := &Type{ - Name: "labels", - Type: "KeyValueLabels", - } - - props := []*Type{labelsType} - resource.AddExtraFields(props, nil, "") - - expectedDiff := "tpgresource.SetLabelsDiffWithoutAttributionLabel" - if !slices.Contains(resource.CustomDiff, expectedDiff) { - t.Errorf("Expected CustomDiff to contain %s", expectedDiff) - } - }) - - t.Run("KeyValueLabels with metadata parent adds metadata CustomDiff", func(t *testing.T) { - t.Parallel() - - resource := createTestResource("testresource") - parent := &Type{Name: "metadata"} - - labelsType := &Type{ - Name: "labels", - Type: "KeyValueLabels", - } - - props := []*Type{labelsType} - resource.AddExtraFields(props, parent, "") - - expectedDiff := "tpgresource.SetMetadataLabelsDiff" - if !slices.Contains(resource.CustomDiff, expectedDiff) { - t.Errorf("Expected CustomDiff to contain %s", expectedDiff) - } - }) - - t.Run("KeyValueAnnotations property adds effective annotations", func(t *testing.T) { - t.Parallel() - - resource := createTestResource("testresource") - annotationsType := &Type{ - Name: "annotations", - Type: "KeyValueAnnotations", - Description: "Resource annotations", - } - - props := []*Type{annotationsType} - result := resource.AddExtraFields(props, nil, "") - - if len(result) != 2 { - t.Errorf("Expected 2 properties after adding annotations fields, got %d", len(result)) - } - - if !annotationsType.IgnoreWrite { - t.Error("Original annotations field should have IgnoreWrite=true after processing") - } - - var foundEffectiveAnnotations bool - for _, prop := range result { - if prop.Name == "effectiveAnnotations" { - foundEffectiveAnnotations = true - if prop.Type != "KeyValueEffectiveLabels" { - t.Errorf("effectiveAnnotations should have type KeyValueEffectiveLabels, got %s", prop.Type) - } - } - } - - if !foundEffectiveAnnotations { - t.Error("Expected to find effectiveAnnotations field") - } - - expectedDiff := "tpgresource.SetAnnotationsDiff" - if !slices.Contains(resource.CustomDiff, expectedDiff) { - t.Errorf("Expected CustomDiff to contain %s", expectedDiff) - } - }) - - t.Run("NestedObject with properties processes recursively", func(t *testing.T) { - t.Parallel() - - resource := createTestResource("testresource") - - nestedWriteOnly := createTestType("nestedPassword", "String", withWriteOnly(true)) - nestedObject := createTestType("config", "NestedObject", withProperties([]*Type{nestedWriteOnly})) - - props := []*Type{nestedObject} - result := resource.AddExtraFields(props, nil, "") - - if len(result) != 1 { - t.Errorf("Expected 1 top-level property, got %d", len(result)) - } - - if len(nestedObject.Properties) != 3 { - t.Errorf("Expected 3 nested properties after recursive processing, got %d", len(nestedObject.Properties)) - } - - if nestedWriteOnly.WriteOnly { - t.Error("Nested WriteOnly property should have WriteOnly=false after processing") - } - }) - - t.Run("Empty NestedObject properties are not processed", func(t *testing.T) { - t.Parallel() - - resource := createTestResource("testresource") - emptyNestedObject := createTestType("config", "NestedObject", withProperties([]*Type{})) - - props := []*Type{emptyNestedObject} - result := resource.AddExtraFields(props, nil, "") - - if len(result) != 1 { - t.Errorf("Expected 1 property, got %d", len(result)) - } - if len(emptyNestedObject.Properties) != 0 { - t.Errorf("Expected 0 nested properties, got %d", len(emptyNestedObject.Properties)) - } - }) - - t.Run("WriteOnly property already ending with Wo is skipped", func(t *testing.T) { - t.Parallel() - - resource := createTestResource("testresource") - woProperty := createTestType("passwordWo", "String", withWriteOnly(true)) - - props := []*Type{woProperty} - result := resource.AddExtraFields(props, nil, "") - - if len(result) != 1 { - t.Errorf("Expected 1 property for Wo-suffixed field, got %d", len(result)) - } - - if !woProperty.WriteOnly { - t.Error("Wo-suffixed property should remain WriteOnly=true") - } - }) - - t.Run("Regular properties are passed through unchanged", func(t *testing.T) { - t.Parallel() - - resource := createTestResource("testresource") - regularProp := createTestType("name", "String", withRequired(true)) - - props := []*Type{regularProp} - result := resource.AddExtraFields(props, nil, "") - - if len(result) != 1 { - t.Errorf("Expected 1 property for regular field, got %d", len(result)) - } - - if result[0] != regularProp { - t.Error("Regular property should be passed through unchanged") - } - if !regularProp.Required { - t.Error("Regular property Required should be unchanged") - } - }) - - t.Run("Multiple property types processed correctly", func(t *testing.T) { - t.Parallel() - - resource := createTestResource("testresource") - - regularProp := createTestType("name", "String") - writeOnlyProp := createTestType("password", "String", withWriteOnly(true)) - labelsType := &Type{Name: "labels", Type: "KeyValueLabels"} - - props := []*Type{regularProp, writeOnlyProp, labelsType} - result := resource.AddExtraFields(props, nil, "") - - // Should have: name + password + passwordWo + passwordWoVersion + labels + terraformLabels + effectiveLabels = 7 - if len(result) != 7 { - t.Errorf("Expected 7 properties total, got %d", len(result)) - } - - names := make(map[string]bool) - for _, prop := range result { - names[prop.Name] = true - } - - expectedNames := []string{"name", "password", "passwordWo", "passwordWoVersion", "labels", "terraformLabels", "effectiveLabels"} - for _, expected := range expectedNames { - if !names[expected] { - t.Errorf("Expected to find property named %s", expected) - } - } - }) -} diff --git a/mmv1/api/type.go b/mmv1/api/type.go index 078775772585..690687db966d 100644 --- a/mmv1/api/type.go +++ b/mmv1/api/type.go @@ -624,6 +624,7 @@ func (t Type) ExactlyOneOfList() []string { if t.ResourceMetadata == nil { return []string{} } + return t.ExactlyOneOf } @@ -1013,54 +1014,6 @@ func propertyWithIgnoreWrite(ignoreWrite bool) func(*Type) { } } -func propertyWithRequired(required bool) func(*Type) { - return func(p *Type) { - p.Required = required - } -} - -func propertyWithWriteOnly(writeOnly bool) func(*Type) { - return func(p *Type) { - p.WriteOnly = writeOnly - } -} - -func propertyWithIgnoreRead(ignoreRead bool) func(*Type) { - return func(p *Type) { - p.IgnoreRead = ignoreRead - } -} - -func propertyWithConflicts(conflicts []string) func(*Type) { - return func(p *Type) { - p.Conflicts = conflicts - } -} - -func propertyWithRequiredWith(requiredWith []string) func(*Type) { - return func(p *Type) { - p.RequiredWith = requiredWith - } -} - -func propertyWithExactlyOneOf(exactlyOneOf []string) func(*Type) { - return func(p *Type) { - p.ExactlyOneOf = exactlyOneOf - } -} - -func propertyWithAtLeastOneOf(atLeastOneOf []string) func(*Type) { - return func(p *Type) { - p.AtLeastOneOf = atLeastOneOf - } -} - -func propertyWithApiName(apiName string) func(*Type) { - return func(p *Type) { - p.ApiName = apiName - } -} - func (t *Type) validateLabelsField() { productName := t.ResourceMetadata.ProductMetadata.Name resourceName := t.ResourceMetadata.Name diff --git a/mmv1/api/type_test.go b/mmv1/api/type_test.go index f6a738d248f2..3d46d120a31f 100644 --- a/mmv1/api/type_test.go +++ b/mmv1/api/type_test.go @@ -361,7 +361,7 @@ func TestProviderOnly(t *testing.T) { }, }, } - labeled.Properties = labeled.AddExtraFields(labeled.PropertiesWithExcluded(), nil, "") + labeled.Properties = labeled.AddLabelsRelatedFields(labeled.PropertiesWithExcluded(), nil) labeled.SetDefault(nil) cases := []struct { diff --git a/mmv1/main.go b/mmv1/main.go index 9e7c0fc88612..ef62e7ba5f88 100644 --- a/mmv1/main.go +++ b/mmv1/main.go @@ -235,7 +235,7 @@ func GenerateProduct(version, providerName, productName, outputPath string, prod resource.SourceYamlFile = resourceYamlPath resource.TargetVersionName = version - resource.Properties = resource.AddExtraFields(resource.PropertiesWithExcluded(), nil, "") + resource.Properties = resource.AddLabelsRelatedFields(resource.PropertiesWithExcluded(), nil) resource.SetDefault(productApi) resource.Validate() resources = append(resources, resource) @@ -268,7 +268,7 @@ func GenerateProduct(version, providerName, productName, outputPath string, prod } resource.TargetVersionName = version - resource.Properties = resource.AddExtraFields(resource.PropertiesWithExcluded(), nil, "") + resource.Properties = resource.AddLabelsRelatedFields(resource.PropertiesWithExcluded(), nil) resource.SetDefault(productApi) resource.Validate() resources = append(resources, resource) diff --git a/mmv1/products/bigquerydatatransfer/Config.yaml b/mmv1/products/bigquerydatatransfer/Config.yaml index b9bb9cf4b137..df88222fc7b7 100644 --- a/mmv1/products/bigquerydatatransfer/Config.yaml +++ b/mmv1/products/bigquerydatatransfer/Config.yaml @@ -222,10 +222,30 @@ properties: to a different credential configuration in the config will require an apply to update state. url_param_only: true properties: + - name: 'secretAccessKeyWoVersion' + type: Integer + url_param_only: true + required_with: + - 'sensitive_params.0.secretAccessKeyWo' + description: | + The version of the sensitive params - used to trigger updates of the write-only params. For more info see [updating write-only attributes](/docs/providers/google/guides/using_write_only_attributes.html#updating-write-only-attributes) - name: 'secretAccessKey' type: String description: | The Secret Access Key of the AWS account transferring data from. sensitive: true + at_least_one_of: + - 'sensitive_params.0.secretAccessKey' + - 'sensitive_params.0.secretAccessKeyWo' + conflicts: + - 'sensitive_params.0.secretAccessKeyWo' + - name: 'secretAccessKeyWo' # Wo is convention for write-only properties + type: String + description: | + The Secret Access Key of the AWS account transferring data from. write_only: true - required: true + at_least_one_of: + - 'sensitive_params.0.secretAccessKeyWo' + - 'sensitive_params.0.secretAccessKey' + conflicts: + - 'sensitive_params.0.secretAccessKey' diff --git a/mmv1/products/monitoring/UptimeCheckConfig.yaml b/mmv1/products/monitoring/UptimeCheckConfig.yaml index 9affd60788a1..67f7fdd0e791 100644 --- a/mmv1/products/monitoring/UptimeCheckConfig.yaml +++ b/mmv1/products/monitoring/UptimeCheckConfig.yaml @@ -246,10 +246,27 @@ properties: - name: 'password' type: String description: The password to authenticate. - required: true - write_only: true + exactly_one_of: + - 'password' + - 'password_wo' sensitive: true custom_flatten: 'templates/terraform/custom_flatten/uptime_check_http_password.tmpl' + - name: 'passwordWo' + type: String + description: The password to authenticate. + exactly_one_of: + - 'passwordWo' + - 'password' + required_with: + - 'http_check.0.auth_info.0.password_wo_version' + write_only: true + - name: 'passwordWoVersion' + type: String + immutable: true + ignore_read: true + description: The password write-only version. + required_with: + - 'http_check.0.auth_info.0.password_wo' - name: 'username' type: String description: The username to authenticate. diff --git a/mmv1/products/secretmanager/SecretVersion.yaml b/mmv1/products/secretmanager/SecretVersion.yaml index ac840f29e772..d3e0335ee2bd 100644 --- a/mmv1/products/secretmanager/SecretVersion.yaml +++ b/mmv1/products/secretmanager/SecretVersion.yaml @@ -160,6 +160,22 @@ properties: type: String description: The secret data. Must be no larger than 64KiB. api_name: data + conflicts: + - 'secretDataWo' immutable: true sensitive: true + - name: 'secretDataWo' + type: String + description: The secret data. Must be no larger than 64KiB. For more info see [updating write-only attributes](/docs/providers/google/guides/using_write_only_attributes.html#updating-write-only-attributes) + api_name: data + required_with: + - 'SecretDataWoVersion' + conflicts: + - 'payload.0.secretData' write_only: true + - name: 'SecretDataWoVersion' + type: Integer + default_value: 0 + url_param_only: true + description: Triggers update of secret data write-only. For more info see [updating write-only attributes](/docs/providers/google/guides/using_write_only_attributes.html#updating-write-only-attributes) + immutable: true diff --git a/mmv1/templates/terraform/flatten_property_method.go.tmpl b/mmv1/templates/terraform/flatten_property_method.go.tmpl index 92387f432fff..cb0fbb7a76a8 100644 --- a/mmv1/templates/terraform/flatten_property_method.go.tmpl +++ b/mmv1/templates/terraform/flatten_property_method.go.tmpl @@ -18,7 +18,7 @@ {{- $.CustomTemplate $.CustomFlatten false -}} {{- else -}} func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - {{- if or (and $.IgnoreRead (not $.ResourceMetadata.IsTgcCompiler)) $.ClientSide }} + {{- if and $.IgnoreRead (not $.ResourceMetadata.IsTgcCompiler) }} return d.Get("{{ $.TerraformLineage }}") {{- else if $.IsA "NestedObject" }} if v == nil { From 2cc4cd1f45d68e1f10a510c58ea8d3a208f4a028 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Thu, 21 Aug 2025 15:54:55 -0700 Subject: [PATCH 800/884] Standardized required_with behavior for write-only fields (#14941) --- .../products/bigquerydatatransfer/Config.yaml | 28 ++++++++++--------- .../monitoring/UptimeCheckConfig.yaml | 8 +++--- .../products/secretmanager/SecretVersion.yaml | 10 ++++--- .../services/sql/resource_sql_user.go | 1 + .../services/sql/resource_sql_user_test.go | 3 +- .../guides/version_7_upgrade.html.markdown | 21 ++++++++++++++ 6 files changed, 49 insertions(+), 22 deletions(-) diff --git a/mmv1/products/bigquerydatatransfer/Config.yaml b/mmv1/products/bigquerydatatransfer/Config.yaml index df88222fc7b7..138d8b327e66 100644 --- a/mmv1/products/bigquerydatatransfer/Config.yaml +++ b/mmv1/products/bigquerydatatransfer/Config.yaml @@ -222,30 +222,32 @@ properties: to a different credential configuration in the config will require an apply to update state. url_param_only: true properties: - - name: 'secretAccessKeyWoVersion' - type: Integer - url_param_only: true - required_with: - - 'sensitive_params.0.secretAccessKeyWo' - description: | - The version of the sensitive params - used to trigger updates of the write-only params. For more info see [updating write-only attributes](/docs/providers/google/guides/using_write_only_attributes.html#updating-write-only-attributes) - name: 'secretAccessKey' type: String description: | The Secret Access Key of the AWS account transferring data from. sensitive: true at_least_one_of: - - 'sensitive_params.0.secretAccessKey' - - 'sensitive_params.0.secretAccessKeyWo' + - 'sensitive_params.0.secret_access_key' + - 'sensitive_params.0.secret_access_key_wo' conflicts: - - 'sensitive_params.0.secretAccessKeyWo' + - 'sensitive_params.0.secret_access_key_wo' - name: 'secretAccessKeyWo' # Wo is convention for write-only properties type: String description: | The Secret Access Key of the AWS account transferring data from. write_only: true at_least_one_of: - - 'sensitive_params.0.secretAccessKeyWo' - - 'sensitive_params.0.secretAccessKey' + - 'sensitive_params.0.secret_access_key_wo' + - 'sensitive_params.0.secret_access_key' conflicts: - - 'sensitive_params.0.secretAccessKey' + - 'sensitive_params.0.secret_access_key' + required_with: + - 'sensitive_params.0.secret_access_key_wo_version' + - name: 'secretAccessKeyWoVersion' + type: Integer + url_param_only: true + required_with: + - 'sensitive_params.0.secret_access_key_wo' + description: | + The version of the sensitive params - used to trigger updates of the write-only params. For more info see [updating write-only attributes](/docs/providers/google/guides/using_write_only_attributes.html#updating-write-only-attributes) diff --git a/mmv1/products/monitoring/UptimeCheckConfig.yaml b/mmv1/products/monitoring/UptimeCheckConfig.yaml index 67f7fdd0e791..7f6bf290d24e 100644 --- a/mmv1/products/monitoring/UptimeCheckConfig.yaml +++ b/mmv1/products/monitoring/UptimeCheckConfig.yaml @@ -247,16 +247,16 @@ properties: type: String description: The password to authenticate. exactly_one_of: - - 'password' - - 'password_wo' + - 'http_check.0.auth_info.0.password_wo' + - 'http_check.0.auth_info.0.password' sensitive: true custom_flatten: 'templates/terraform/custom_flatten/uptime_check_http_password.tmpl' - name: 'passwordWo' type: String description: The password to authenticate. exactly_one_of: - - 'passwordWo' - - 'password' + - 'http_check.0.auth_info.0.password_wo' + - 'http_check.0.auth_info.0.password' required_with: - 'http_check.0.auth_info.0.password_wo_version' write_only: true diff --git a/mmv1/products/secretmanager/SecretVersion.yaml b/mmv1/products/secretmanager/SecretVersion.yaml index d3e0335ee2bd..ab26d83a4bda 100644 --- a/mmv1/products/secretmanager/SecretVersion.yaml +++ b/mmv1/products/secretmanager/SecretVersion.yaml @@ -161,7 +161,7 @@ properties: description: The secret data. Must be no larger than 64KiB. api_name: data conflicts: - - 'secretDataWo' + - 'payload.0.secret_data_wo' immutable: true sensitive: true - name: 'secretDataWo' @@ -169,13 +169,15 @@ properties: description: The secret data. Must be no larger than 64KiB. For more info see [updating write-only attributes](/docs/providers/google/guides/using_write_only_attributes.html#updating-write-only-attributes) api_name: data required_with: - - 'SecretDataWoVersion' + - 'payload.0.secret_data_wo_version' conflicts: - - 'payload.0.secretData' + - 'payload.0.secret_data' write_only: true - - name: 'SecretDataWoVersion' + - name: 'secretDataWoVersion' type: Integer default_value: 0 url_param_only: true description: Triggers update of secret data write-only. For more info see [updating write-only attributes](/docs/providers/google/guides/using_write_only_attributes.html#updating-write-only-attributes) immutable: true + required_with: + - 'payload.0.secret_data_wo' diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_user.go b/mmv1/third_party/terraform/services/sql/resource_sql_user.go index 5fec5c13ceb2..7273955d0a40 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_user.go +++ b/mmv1/third_party/terraform/services/sql/resource_sql_user.go @@ -103,6 +103,7 @@ func ResourceSqlUser() *schema.Resource { Optional: true, WriteOnly: true, ConflictsWith: []string{"password"}, + RequiredWith: []string{"password_wo_version"}, Description: `The password for the user. Can be updated. For Postgres instances this is a Required field, unless type is set to either CLOUD_IAM_USER or CLOUD_IAM_SERVICE_ACCOUNT.`, }, diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_user_test.go b/mmv1/third_party/terraform/services/sql/resource_sql_user_test.go index b392d8ffd6b7..c4e5cc4404b7 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_user_test.go +++ b/mmv1/third_party/terraform/services/sql/resource_sql_user_test.go @@ -439,6 +439,7 @@ resource "google_sql_user" "user1" { instance = google_sql_database_instance.instance.name host = "gmail.com" password_wo = "%s" + password_wo_version = 1 } `, instance, password) } @@ -460,7 +461,7 @@ resource "google_sql_user" "user1" { instance = google_sql_database_instance.instance.name host = "gmail.com" password_wo = "%s" - password_wo_version = 1 + password_wo_version = 2 } `, instance, password) } diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index 23e891f9bf48..b66bec6cc4d8 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -233,8 +233,17 @@ Remove `description` from your configuration after upgrade. Remove `post_startup_script_config` from your configuration after upgrade. +<<<<<<< HEAD ======= >>>>>>> c96e1c59a (add 7.0.0 guide to main (#14861)) +======= +## Resource: `google_monitoring_uptime_check_config` + +### Exactly one of `http_check.auth_info.password` and `http_check.auth_info.password_wo` must be set + +At least one must be set, and setting both would make it unclear which was being used. + +>>>>>>> 7b15bdcb5 (Standardized required_with behavior for write-only fields (#14941)) ## Resource: `google_network_services_lb_traffic_extension` ### `load_balancing_scheme` is now required @@ -267,6 +276,18 @@ Remove `service_config.service` from your configuration after upgrade. Remove `template.containers.depends_on` from your configuration after upgrade. +## Resource: `google_secret_manager_secret_version` + +### `secret_data_wo` and `secret_data_wo_version` must be set together + +This standardizes the behavior of write-only fields across the provider and makes it easier to remember to update the fields together. + +## Resource: `google_sql_user` + +### `password_wo_version` is now required when `password_wo` is set + +This standardizes the behavior of write-only fields across the provider and makes it easier to remember to update the fields together. + ## Resource: `google_vertex_ai_endpoint` ### `enable_secure_private_service_connect` is removed as it is not available in the GA version of the API, only in the beta version. From 5d962fd47d675417369b49404b791fe9a6228305 Mon Sep 17 00:00:00 2001 From: duaaeissa Date: Tue, 19 Aug 2025 18:44:00 +0100 Subject: [PATCH 801/884] SSM: make `ca_pool` argument optional for private instances that use Google-managed trusted certificates. (#14900) --- mmv1/products/securesourcemanager/Instance.yaml | 1 - ...e_source_manager_instance_private_trusted_cert.tf.tmpl | 8 ++++++++ 2 files changed, 8 insertions(+), 1 deletion(-) create mode 100644 mmv1/templates/terraform/examples/secure_source_manager_instance_private_trusted_cert.tf.tmpl diff --git a/mmv1/products/securesourcemanager/Instance.yaml b/mmv1/products/securesourcemanager/Instance.yaml index b03875eee254..8a1531efa591 100644 --- a/mmv1/products/securesourcemanager/Instance.yaml +++ b/mmv1/products/securesourcemanager/Instance.yaml @@ -268,7 +268,6 @@ properties: type: String description: | CA pool resource, resource must in the format of `projects/{project}/locations/{location}/caPools/{ca_pool}`. - required: true immutable: true - name: 'httpServiceAttachment' type: String diff --git a/mmv1/templates/terraform/examples/secure_source_manager_instance_private_trusted_cert.tf.tmpl b/mmv1/templates/terraform/examples/secure_source_manager_instance_private_trusted_cert.tf.tmpl new file mode 100644 index 000000000000..fa579d557a4f --- /dev/null +++ b/mmv1/templates/terraform/examples/secure_source_manager_instance_private_trusted_cert.tf.tmpl @@ -0,0 +1,8 @@ +resource "google_secure_source_manager_instance" "{{$.PrimaryResourceId}}" { + instance_id = "{{index $.Vars "instance_id"}}" + location = "us-central1" + + private_config { + is_private = true + } +} From 16f146edb86e773da0e270420b674431a54c7418 Mon Sep 17 00:00:00 2001 From: Lakshman Swaminathan Date: Tue, 19 Aug 2025 14:02:55 -0400 Subject: [PATCH 802/884] fix noop for optional and required fields (#14879) --- mmv1/templates/terraform/datasource.go.tmpl | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/mmv1/templates/terraform/datasource.go.tmpl b/mmv1/templates/terraform/datasource.go.tmpl index 471ba248fd7f..f7bc979c589c 100644 --- a/mmv1/templates/terraform/datasource.go.tmpl +++ b/mmv1/templates/terraform/datasource.go.tmpl @@ -58,11 +58,13 @@ func DataSource{{ .ResourceName -}}() *schema.Resource { dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(rs) - // Set 'Required' schema elements + {{if .DatasourceRequiredFields}} tpgresource.AddRequiredFieldsToSchema(dsSchema, {{range $index, $field := .DatasourceRequiredFields}}{{if gt $index 0}}, {{end}}{{printf "%q" $field}}{{end}}) - - // Set 'Optional' schema elements + {{end}} + + {{if .DatasourceOptionalFields}} tpgresource.AddOptionalFieldsToSchema(dsSchema, {{range $index, $field := .DatasourceOptionalFields}}{{if gt $index 0}}, {{end}}{{printf "%q" $field}}{{end}}) + {{end}} return &schema.Resource{ Read: dataSource{{ $.ResourceName -}}Read, From b437cf57bf3b734e28c795e5d3f42ab73be64fa5 Mon Sep 17 00:00:00 2001 From: Ryan Oaks Date: Tue, 19 Aug 2025 14:37:14 -0400 Subject: [PATCH 803/884] Fix VCR comment for EAP to be more readable (#14903) --- .../cmd/templates/vcr/post_replay_eap.tmpl | 46 ++++ .ci/magician/cmd/test_eap_vcr.go | 11 +- .ci/magician/cmd/test_eap_vcr_test.go | 252 ++++++++++++++++++ 3 files changed, 308 insertions(+), 1 deletion(-) create mode 100644 .ci/magician/cmd/templates/vcr/post_replay_eap.tmpl create mode 100644 .ci/magician/cmd/test_eap_vcr_test.go diff --git a/.ci/magician/cmd/templates/vcr/post_replay_eap.tmpl b/.ci/magician/cmd/templates/vcr/post_replay_eap.tmpl new file mode 100644 index 000000000000..5b5e5475527e --- /dev/null +++ b/.ci/magician/cmd/templates/vcr/post_replay_eap.tmpl @@ -0,0 +1,46 @@ +{{- if or (gt (len .NotRunBetaTests) 0) (gt (len .NotRunGATests) 0)}} +#### Non-exercised tests + +{{if gt (len .NotRunBetaTests) 0 -}} +{{color "red" "Tests were added that are skipped in VCR:"}} +{{range .NotRunBetaTests}}{{. | printf "- %s\n"}}{{end}} +{{end}} + +{{if gt (len .NotRunGATests) 0 -}} +{{color "red" "Tests were added that are GA-only additions and require manual runs:"}} +{{range .NotRunGATests}}{{. | printf "- %s\n"}}{{end}} +{{end}} +{{end}} +#### Tests analytics +Total tests: {{add (add (len .ReplayingResult.PassedTests) (len .ReplayingResult.SkippedTests)) (len .ReplayingResult.FailedTests) }} +Passed tests: {{len .ReplayingResult.PassedTests}} +Skipped tests: {{len .ReplayingResult.SkippedTests}} +Affected tests: {{len .ReplayingResult.FailedTests}} + +Affected service packages: +{{if .RunFullVCR}} +All service packages are affected +{{else if gt (len .AffectedServices) 0}} +{{range .AffectedServices}} +`{{.}}` {{/* remove trailing whitespace */ -}} +{{end}} +{{else}} +None +{{end}} +{{ if gt (len .ReplayingResult.FailedTests) 0 -}} +#### Action taken +Found {{len .ReplayingResult.FailedTests}} affected test(s) by replaying old test recordings. Starting RECORDING based on the most recent commit. Affected tests: +{{range .ReplayingResult.FailedTests}} +`{{.}}` {{/* remove trailing whitespace */ -}} +{{end}} + +[Get to know how VCR tests work](https://googlecloudplatform.github.io/magic-modules/develop/test/test/) +{{ else -}} +{{- if .ReplayingErr -}} +{{color "red" "Errors occurred during REPLAYING mode. Please fix them to complete your PR."}} +{{- else -}} +{{color "green" "All tests passed!"}} +{{- end}} + +View the [build log](https://storage.cloud.google.com/{{.LogBucket}}/{{.Version}}/refs/heads/{{.Head}}/artifacts/{{.BuildID}}/build-log/replaying_test.log) +{{- end}} \ No newline at end of file diff --git a/.ci/magician/cmd/test_eap_vcr.go b/.ci/magician/cmd/test_eap_vcr.go index 43a2834ab1de..7f288d0d9f8a 100644 --- a/.ci/magician/cmd/test_eap_vcr.go +++ b/.ci/magician/cmd/test_eap_vcr.go @@ -15,6 +15,11 @@ import ( "github.com/spf13/cobra" ) +var ( + //go:embed templates/vcr/post_replay_eap.tmpl + postReplayEAPTmplText string +) + var tevRequiredEnvironmentVariables = [...]string{ "GEN_PATH", "GOCACHE", @@ -189,7 +194,7 @@ func execTestEAPVCR(changeNumber, genPath, kokoroArtifactsDir string, rnr ExecRu Version: provider.Private.String(), Head: head, } - comment, err := formatPostReplay(postReplayData) + comment, err := formatPostReplayEAP(postReplayData) if err != nil { return fmt.Errorf("error formatting post replay comment: %w", err) } @@ -290,3 +295,7 @@ View the [build log](https://storage.cloud.google.com/ci-vcr-logs/%s/refs/heads/ func init() { rootCmd.AddCommand(testEAPVCRCmd) } + +func formatPostReplayEAP(data postReplay) (string, error) { + return formatComment("post_replay_eap.tmpl", postReplayEAPTmplText, data) +} diff --git a/.ci/magician/cmd/test_eap_vcr_test.go b/.ci/magician/cmd/test_eap_vcr_test.go new file mode 100644 index 000000000000..63bbe584e923 --- /dev/null +++ b/.ci/magician/cmd/test_eap_vcr_test.go @@ -0,0 +1,252 @@ +package cmd + +import ( + "fmt" + "strings" + "testing" + + "magician/provider" + "magician/vcr" +) + +func TestAnalyticsCommentEAP(t *testing.T) { + tests := []struct { + name string + data postReplay + wantContains []string + }{ + { + name: "run full vcr is false and no affected services", + data: postReplay{ + ReplayingResult: vcr.Result{ + PassedTests: []string{"a", "b", "c"}, + SkippedTests: []string{"d", "e"}, + FailedTests: []string{"f"}, + }, + RunFullVCR: false, + AffectedServices: []string{}, + }, + wantContains: []string{ + "#### Tests analytics", + "Total tests: 6", + "Passed tests: 3", + "Skipped tests: 2", + "Affected tests: 1", + "Affected service packages", + "None", + }, + }, + { + name: "run full vcr is false and has affected services", + data: postReplay{ + ReplayingResult: vcr.Result{ + PassedTests: []string{"a", "b", "c"}, + SkippedTests: []string{"d", "e"}, + FailedTests: []string{"f"}, + }, + RunFullVCR: false, + AffectedServices: []string{"svc-a", "svc-b"}, + }, + wantContains: []string{ + "#### Tests analytics", + "Total tests: 6", + "Passed tests: 3", + "Skipped tests: 2", + "Affected tests: 1", + "Affected service packages", + "`svc-a`", + "`svc-b`", + }, + }, + { + name: "run full vcr is true", + data: postReplay{ + ReplayingResult: vcr.Result{ + PassedTests: []string{"a", "b", "c"}, + SkippedTests: []string{"d", "e"}, + FailedTests: []string{"f"}, + }, + RunFullVCR: true, + AffectedServices: []string{}, + }, + wantContains: []string{ + "#### Tests analytics", + "Total tests: 6", + "Passed tests: 3", + "Skipped tests: 2", + "Affected tests: 1", + "Affected service packages", + "All service packages are affected", + }, + }, + } + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + got, err := formatPostReplayEAP(tc.data) + if err != nil { + t.Fatalf("Failed to format comment: %v", err) + } + for _, wc := range tc.wantContains { + if !strings.Contains(got, wc) { + t.Errorf("formatPostReplayEAP() returned %q, which does not contain %q", got, wc) + } + } + }) + } +} + +func TestNonExercisedTestsCommentEAP(t *testing.T) { + tests := []struct { + name string + data postReplay + wantContains []string + }{ + { + name: "with not run beta tests", + data: postReplay{ + NotRunBetaTests: []string{"beta-1", "beta-2"}, + }, + wantContains: []string{ + "#### Non-exercised tests", + "", + color("red", "Tests were added that are skipped in VCR:"), + "- beta-1", + "- beta-2", + }, + }, + { + name: "with not run ga tests", + data: postReplay{ + NotRunGATests: []string{"ga-1", "ga-2"}, + }, + wantContains: []string{ + "#### Non-exercised tests", + "", + "", + "", + color("red", "Tests were added that are GA-only additions and require manual runs:"), + "- ga-1", + "- ga-2", + }, + }, + { + name: "with not run ga tests and not run beta tests", + data: postReplay{ + NotRunGATests: []string{"ga-1", "ga-2"}, + NotRunBetaTests: []string{"beta-1", "beta-2"}, + }, + wantContains: []string{ + "#### Non-exercised tests", + "", + color("red", "Tests were added that are skipped in VCR:"), + "- beta-1", + "- beta-2", + "", + "", + "", + color("red", "Tests were added that are GA-only additions and require manual runs:"), + "- ga-1", + "- ga-2", + }, + }, + } + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + got, err := formatPostReplayEAP(tc.data) + if err != nil { + t.Fatalf("Failed to format comment: %v", err) + } + for _, wc := range tc.wantContains { + if !strings.Contains(got, wc) { + t.Errorf("formatPostReplayEAP() returned %q, which does not contain %q", got, wc) + } + } + }) + } +} + +func TestWithReplayFailedTestsEAP(t *testing.T) { + tests := []struct { + name string + data postReplay + wantContains []string + }{ + { + name: "with failed tests", + data: postReplay{ + ReplayingResult: vcr.Result{ + FailedTests: []string{"a", "b"}, + }, + }, + wantContains: []string{ + "#### Action taken", + "Found 2 affected test(s) by replaying old test recordings. Starting RECORDING based on the most recent commit. Affected tests", + "`a`", + "`b`", + "[Get to know how VCR tests work](https://googlecloudplatform.github.io/magic-modules/develop/test/test/)", + }, + }, + } + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + got, err := formatPostReplayEAP(tc.data) + if err != nil { + t.Fatalf("Failed to format comment: %v", err) + } + for _, wc := range tc.wantContains { + if !strings.Contains(got, wc) { + t.Errorf("formatPostReplayEAP() returned %q, which does not contain %q", got, wc) + } + } + }) + } +} + +func TestWithoutReplayFailedTestsEAP(t *testing.T) { + tests := []struct { + name string + data postReplay + wantContains []string + }{ + { + name: "with replay error", + data: postReplay{ + ReplayingErr: fmt.Errorf("some error"), + BuildID: "build-123", + Head: "auto-pr-123", + LogBucket: "ci-vcr-logs", + Version: provider.Beta.String(), + }, + wantContains: []string{ + color("red", "Errors occurred during REPLAYING mode. Please fix them to complete your PR."), + "View the [build log](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-123/artifacts/build-123/build-log/replaying_test.log)", + }, + }, + { + name: "without replay error", + data: postReplay{ + BuildID: "build-123", + Head: "auto-pr-123", + LogBucket: "ci-vcr-logs", + Version: provider.Beta.String(), + }, + wantContains: []string{ + color("green", "All tests passed!"), + "View the [build log](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-123/artifacts/build-123/build-log/replaying_test.log)", + }, + }, + } + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + got, err := formatPostReplayEAP(tc.data) + if err != nil { + t.Fatalf("Failed to format comment: %v", err) + } + for _, wc := range tc.wantContains { + if !strings.Contains(got, wc) { + t.Errorf("formatPostReplayEAP() returned %q, which does not contain %q", got, wc) + } + } + }) + } +} From e57a44b21f7477ee977ddca6b1ec8477c1be38b5 Mon Sep 17 00:00:00 2001 From: Deepraj K Pednekar Date: Tue, 19 Aug 2025 21:13:35 +0000 Subject: [PATCH 804/884] IAP princiapls for regional forwarding rule (#14877) --- .../iap/ForwardingRuleRegionalService.yaml | 43 ++++++++++++++++ ...rwarding_rule_region_service_basic.tf.tmpl | 49 +++++++++++++++++++ 2 files changed, 92 insertions(+) create mode 100644 mmv1/products/iap/ForwardingRuleRegionalService.yaml create mode 100644 mmv1/templates/terraform/examples/forwarding_rule_region_service_basic.tf.tmpl diff --git a/mmv1/products/iap/ForwardingRuleRegionalService.yaml b/mmv1/products/iap/ForwardingRuleRegionalService.yaml new file mode 100644 index 000000000000..6c33585b4841 --- /dev/null +++ b/mmv1/products/iap/ForwardingRuleRegionalService.yaml @@ -0,0 +1,43 @@ +name: 'WebRegionForwardingRuleService' +description: | + Only used to generate IAM resources +# This resource is only used to generate IAM resources. They do not correspond to real +# GCP resources, and should not be used to generate anything other than IAM support. +exclude_resource: true +docs: +id_format: 'projects/{{project}}/iap_web/forwarding_rule-{{region}}/services/{{name}}' +base_url: 'projects/{{project}}/iap_web/forwarding_rule-{{region}}/services/{{name}}' +self_link: 'projects/{{project}}/iap_web/forwarding_rule-{{region}}/services/{{name}}' +import_format: + - 'projects/{{project}}/iap_web/forwarding_rule-{{region}}/services/{{name}}' +timeouts: + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 +iam_policy: + method_name_separator: ':' + parent_resource_type: 'google_compute_forwarding_rule' + fetch_iam_policy_verb: 'POST' + allowed_iam_role: 'roles/iap.httpsResourceAccessor' + parent_resource_attribute: 'forwarding_rule_region_service_name' + iam_conditions_request_type: 'REQUEST_BODY' + example_config_body: 'templates/terraform/iam/iam_attributes.go.tmpl' +exclude_tgc: true +examples: + - name: 'forwarding_rule_region_service_basic' + primary_resource_id: 'default' + primary_resource_name: 'fmt.Sprintf("tf-test-forwarding-rule-region-service%s", context["random_suffix"])' + vars: + forwarding_rule_region_service_name: 'forwarding-rule-region-service' + regional_health_check_name: 'tf-test-region-health-check' + regional_backend_service_name: 'regional-bs' + regional_url_map_name: 'regional-url-map' + regional_target_http_proxy_name: 'regional-target-http-proxy' + compute_network_name: 'tf-test-network-name' + compute_subnetwork_name: 'tf-test-subnetwork-name' +parameters: +properties: + - name: 'name' + type: String + description: Name or self link of a regional forwarding rule service. + required: true diff --git a/mmv1/templates/terraform/examples/forwarding_rule_region_service_basic.tf.tmpl b/mmv1/templates/terraform/examples/forwarding_rule_region_service_basic.tf.tmpl new file mode 100644 index 000000000000..37d48e6e9b8e --- /dev/null +++ b/mmv1/templates/terraform/examples/forwarding_rule_region_service_basic.tf.tmpl @@ -0,0 +1,49 @@ +resource "google_compute_region_health_check" "default" { + name = "{{index $.Vars "regional_health_check_name"}}" + http_health_check { + port = 80 + request_path = "/" + } +} + +resource "google_compute_region_backend_service" "default" { + name = "{{index $.Vars "regional_backend_service_name"}}" + protocol = "HTTP" + port_name = "http" + timeout_sec = 10 + health_checks = [google_compute_region_health_check.default.id] + load_balancing_scheme = "EXTERNAL_MANAGED" +} + +resource "google_compute_region_url_map" "default" { + name = "{{index $.Vars "regional_url_map_name"}}" + default_service = google_compute_region_backend_service.default.id +} + + +resource "google_compute_region_target_http_proxy" "default" { + name = "{{index $.Vars "regional_target_http_proxy_name"}}" + url_map = google_compute_region_url_map.default.id +} + +resource "google_compute_network" "my_vpc" { + name = "{{index $.Vars "compute_network_name"}}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "proxy_only_subnet" { + name = "{{index $.Vars "compute_subnetwork_name"}}" + ip_cidr_range = "10.129.0.0/23" + network = google_compute_network.my_vpc.id + purpose = "REGIONAL_MANAGED_PROXY" + role = "ACTIVE" +} + +resource "google_compute_forwarding_rule" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "forwarding_rule_region_service_name"}}" + target = google_compute_region_target_http_proxy.default.id + port_range = "80" + load_balancing_scheme = "EXTERNAL_MANAGED" + network = google_compute_network.my_vpc.id + depends_on = [google_compute_subnetwork.proxy_only_subnet] +} From 179ed58ec49af6f9a719f9cc023689d4622e4ef2 Mon Sep 17 00:00:00 2001 From: Matt Laddy <92891258+MattLaddy@users.noreply.github.com> Date: Tue, 19 Aug 2025 17:33:36 -0400 Subject: [PATCH 805/884] add Developer Connect Triggers to Cloudbuild (#14530) --- mmv1/products/cloudbuild/Trigger.yaml | 82 ++++++++++++++++++- ...ild_trigger_developer_connect_pull.tf.tmpl | 13 +++ ...ild_trigger_developer_connect_push.tf.tmpl | 12 +++ ...gger_developer_connect_push_branch.tf.tmpl | 11 +++ 4 files changed, 117 insertions(+), 1 deletion(-) create mode 100644 mmv1/templates/terraform/examples/cloudbuild_trigger_developer_connect_pull.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/cloudbuild_trigger_developer_connect_push.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/cloudbuild_trigger_developer_connect_push_branch.tf.tmpl diff --git a/mmv1/products/cloudbuild/Trigger.yaml b/mmv1/products/cloudbuild/Trigger.yaml index f4367b6eab7d..b85d87dd3436 100644 --- a/mmv1/products/cloudbuild/Trigger.yaml +++ b/mmv1/products/cloudbuild/Trigger.yaml @@ -30,7 +30,7 @@ id_format: 'projects/{{project}}/locations/{{location}}/triggers/{{trigger_id}}' base_url: 'projects/{{project}}/locations/{{location}}/triggers' self_link: 'projects/{{project}}/locations/{{location}}/triggers/{{trigger_id}}' update_verb: 'PATCH' - # import by default only works with old-style self links ending in a name +# import by default only works with old-style self links ending in a name import_format: - 'projects/{{project}}/triggers/{{trigger_id}}' - 'projects/{{project}}/locations/{{location}}/triggers/{{trigger_id}}' @@ -134,6 +134,12 @@ examples: 'installation_id': '31300675' 'pat_secret': '"projects/gcb-terraform-creds/secrets/github-pat/versions/latest"' 'repo_uri': '"https://github.com/gcb-repos-robot/tf-demo.git"' + - name: 'cloudbuild_trigger_developer_connect_pull' + primary_resource_id: 'developer-connect-trigger-pull' + - name: 'cloudbuild_trigger_developer_connect_push' + primary_resource_id: 'developer-connect-trigger-push' + - name: 'cloudbuild_trigger_developer_connect_push_branch' + primary_resource_id: 'dc-trigger-regular-push-branch' parameters: - name: 'location' type: String @@ -274,6 +280,7 @@ properties: - 'webhook_config' - 'source_to_build' - 'repository_event_config' + - 'developer_connect_event_config' properties: - name: 'repository' type: String @@ -357,6 +364,7 @@ properties: - 'webhook_config' - 'source_to_build' - 'repository_event_config' + - 'developer_connect_event_config' properties: - name: 'uri' type: String @@ -441,6 +449,7 @@ properties: - 'webhook_config' - 'source_to_build' - 'repository_event_config' + - 'developer_connect_event_config' properties: - name: 'projectId' type: String @@ -506,6 +515,7 @@ properties: - 'webhook_config' - 'source_to_build' - 'repository_event_config' + - 'developer_connect_event_config' properties: - name: 'owner' type: String @@ -585,6 +595,7 @@ properties: - 'webhook_config' - 'source_to_build' - 'repository_event_config' + - 'developer_connect_event_config' properties: - name: 'repoSlug' type: String @@ -669,6 +680,7 @@ properties: - 'webhook_config' - 'source_to_build' - 'repository_event_config' + - 'developer_connect_event_config' properties: - name: 'subscription' type: String @@ -705,6 +717,7 @@ properties: - 'webhook_config' - 'source_to_build' - 'repository_event_config' + - 'developer_connect_event_config' properties: - name: 'secret' type: String @@ -1335,3 +1348,70 @@ properties: Paths must be absolute and cannot conflict with other volume paths on the same build step or with certain reserved volume paths. + - name: 'developerConnectEventConfig' + type: NestedObject + description: | + Configuration for triggers that respond to Developer Connect events. + exactly_one_of: + - 'pullRequest' + - 'push' + properties: + - name: 'gitRepositoryLink' + type: String + description: | + The Developer Connect Git repository link, formatted as `projects/*/locations/*/connections/*/gitRepositoryLink/*`. + required: true + - name: 'gitRepositoryLinkType' + type: Enum + description: | + The type of DeveloperConnect GitRepositoryLink. + output: true + enum_values: + - 'GIT_REPOSITORY_LINK_TYPE_UNSPECIFIED' + - 'GITHUB' + - 'GITHUB_ENTERPRISE' + - 'GITLAB' + - 'GITLAB_ENTERPRISE' + - 'BITBUCKET_DATA_CENTER' + - 'BITBUCKET_CLOUD' + - name: 'pullRequest' + type: NestedObject + description: | + Filter to match changes in pull requests. + properties: + - name: 'branch' + type: String + description: | + Regex of branches to match. + - name: 'commentControl' + type: Enum + description: | + Configure builds to run whether a repository owner or collaborator need to comment `/gcbrun`. + enum_values: + - 'COMMENTS_DISABLED' + - 'COMMENTS_ENABLED' + - 'COMMENTS_ENABLED_FOR_EXTERNAL_CONTRIBUTORS_ONLY' + - name: 'invertRegex' + type: Boolean + description: | + If true, branches that do NOT match the git_ref will trigger a build. + - name: 'push' + type: NestedObject + description: | + Filter to match changes in refs like branches and tags. + exactly_one_of: + - 'branch' + - 'tag' + properties: + - name: 'branch' + type: String + description: | + Regex of branches to match. + - name: 'tag' + type: String + description: | + Regex of tags to match. + - name: 'invertRegex' + type: Boolean + description: | + If true, only trigger a build if the revision regex does NOT match the git_ref regex. diff --git a/mmv1/templates/terraform/examples/cloudbuild_trigger_developer_connect_pull.tf.tmpl b/mmv1/templates/terraform/examples/cloudbuild_trigger_developer_connect_pull.tf.tmpl new file mode 100644 index 000000000000..d7db752bb872 --- /dev/null +++ b/mmv1/templates/terraform/examples/cloudbuild_trigger_developer_connect_pull.tf.tmpl @@ -0,0 +1,13 @@ +resource "google_cloudbuild_trigger" "{{$.PrimaryResourceId}}" { + location = "us-central1" + + developer_connect_event_config { + git_repository_link = "projects/cryptic-tower-286020/locations/us-central1/connections/prod-bbs-push/gitRepositoryLinks/cbprob-prod-us-central1-push1" + pull_request { + branch = "^master$" + invert_regex = false + comment_control = "COMMENTS_ENABLED" + } + } + filename = "cloudbuild.yaml" +} diff --git a/mmv1/templates/terraform/examples/cloudbuild_trigger_developer_connect_push.tf.tmpl b/mmv1/templates/terraform/examples/cloudbuild_trigger_developer_connect_push.tf.tmpl new file mode 100644 index 000000000000..2718678048cb --- /dev/null +++ b/mmv1/templates/terraform/examples/cloudbuild_trigger_developer_connect_push.tf.tmpl @@ -0,0 +1,12 @@ +resource "google_cloudbuild_trigger" "{{$.PrimaryResourceId}}" { + location = "us-central1" + + developer_connect_event_config { + git_repository_link = "projects/cryptic-tower-286020/locations/us-central1/connections/prod-bbs-push/gitRepositoryLinks/cbprob-prod-us-central1-push1" + push { + tag = "^0.1.*" + invert_regex = true + } + } + filename = "cloudbuild.yaml" +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/cloudbuild_trigger_developer_connect_push_branch.tf.tmpl b/mmv1/templates/terraform/examples/cloudbuild_trigger_developer_connect_push_branch.tf.tmpl new file mode 100644 index 000000000000..5236bc926421 --- /dev/null +++ b/mmv1/templates/terraform/examples/cloudbuild_trigger_developer_connect_push_branch.tf.tmpl @@ -0,0 +1,11 @@ +resource "google_cloudbuild_trigger" "{{$.PrimaryResourceId}}" { + location = "us-central1" + + developer_connect_event_config { + git_repository_link = "projects/cryptic-tower-286020/locations/us-central1/connections/prod-bbs-push/gitRepositoryLinks/cbprob-prod-us-central1-push1" + push { + branch = "main" + } + } + filename = "cloudbuild.yaml" +} From 229bd6b09a28ac1a86292dd2c27d71190a6349d2 Mon Sep 17 00:00:00 2001 From: nimish-khurana Date: Wed, 20 Aug 2025 03:05:37 +0530 Subject: [PATCH 806/884] feat: enable default_from_api flag for ODB Network related fields in Oracledatabase CloudVmCluster (#14896) --- mmv1/products/oracledatabase/CloudVmCluster.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/mmv1/products/oracledatabase/CloudVmCluster.yaml b/mmv1/products/oracledatabase/CloudVmCluster.yaml index ced187994a37..a83c31dfc8ed 100644 --- a/mmv1/products/oracledatabase/CloudVmCluster.yaml +++ b/mmv1/products/oracledatabase/CloudVmCluster.yaml @@ -331,15 +331,18 @@ properties: projects/{project}/locations/{location}/odbNetworks/{odb_network} It is optional but if specified, this should match the parent ODBNetwork of the odb_subnet and backup_odb_subnet. + default_from_api: true - name: odbSubnet type: String description: |- The name of the OdbSubnet associated with the VM Cluster for IP allocation. Format: projects/{project}/locations/{location}/odbNetworks/{odb_network}/odbSubnets/{odb_subnet} + default_from_api: true - name: backupOdbSubnet type: String description: |- The name of the backup OdbSubnet associated with the VM Cluster. Format: projects/{project}/locations/{location}/odbNetworks/{odb_network}/odbSubnets/{odb_subnet} + default_from_api: true From c49dc134419d098a3803b0662b4127f90827b90f Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Tue, 19 Aug 2025 16:30:38 -0700 Subject: [PATCH 807/884] tgc: move caiasset folder to the management of magic-modules (#14862) --- mmv1/provider/terraform_tgc.go | 1 + mmv1/third_party/tgc/caiasset/asset.go | 132 +++++++++++++++++++++++++ 2 files changed, 133 insertions(+) create mode 100644 mmv1/third_party/tgc/caiasset/asset.go diff --git a/mmv1/provider/terraform_tgc.go b/mmv1/provider/terraform_tgc.go index 3f0802052af0..2a0bf7a9d7e2 100644 --- a/mmv1/provider/terraform_tgc.go +++ b/mmv1/provider/terraform_tgc.go @@ -354,6 +354,7 @@ func (tgc TerraformGoogleConversion) CopyCommonFiles(outputFolder string, genera tgc.CopyFileList(outputFolder, retrieveTestSourceCodeWithLocation(".go")) resourceConverters := map[string]string{ + "../caiasset/asset.go": "third_party/tgc/caiasset/asset.go", "converters/google/resources/cai/constants.go": "third_party/tgc/cai/constants.go", "converters/google/resources/constants.go": "third_party/tgc/constants.go", "converters/google/resources/cai.go": "third_party/tgc/cai.go", diff --git a/mmv1/third_party/tgc/caiasset/asset.go b/mmv1/third_party/tgc/caiasset/asset.go new file mode 100644 index 000000000000..ec1f61b72572 --- /dev/null +++ b/mmv1/third_party/tgc/caiasset/asset.go @@ -0,0 +1,132 @@ +package caiasset + +import ( + "fmt" + "strings" + "time" +) + +// Asset is the CAI representation of a resource. +type Asset struct { + // The name, in a peculiar format: `\\.googleapis.com/` + Name string `json:"name"` + // The type name in `google..` format. + Type string `json:"asset_type"` + Resource *AssetResource `json:"resource,omitempty"` + IAMPolicy *IAMPolicy `json:"iam_policy,omitempty"` + OrgPolicy []*OrgPolicy `json:"org_policy,omitempty"` + V2OrgPolicies []*V2OrgPolicies `json:"v2_org_policies,omitempty"` + Ancestors []string `json:"ancestors"` +} + +// IAMPolicy is the representation of a Cloud IAM policy set on a cloud resource. +type IAMPolicy struct { + Bindings []IAMBinding `json:"bindings"` +} + +// IAMBinding binds a role to a set of members. +type IAMBinding struct { + Role string `json:"role"` + Members []string `json:"members"` +} + +// AssetResource is nested within the Asset type. +type AssetResource struct { + Version string `json:"version"` + DiscoveryDocumentURI string `json:"discovery_document_uri"` + DiscoveryName string `json:"discovery_name"` + Parent string `json:"parent"` + Data map[string]interface{} `json:"data"` + Location string `json:"location,omitempty"` +} + +// OrgPolicy is for managing organization policies. +type OrgPolicy struct { + Constraint string `json:"constraint,omitempty"` + ListPolicy *ListPolicy `json:"list_policy,omitempty"` + BooleanPolicy *BooleanPolicy `json:"boolean_policy,omitempty"` + RestoreDefault *RestoreDefault `json:"restore_default,omitempty"` + UpdateTime *Timestamp `json:"update_time,omitempty"` +} + +// V2OrgPolicies is the represtation of V2OrgPolicies +type V2OrgPolicies struct { + Name string `json:"name"` + PolicySpec *PolicySpec `json:"spec,omitempty"` +} + +// Spec is the representation of Spec for Custom Org Policy +type PolicySpec struct { + Etag string `json:"etag,omitempty"` + UpdateTime *Timestamp `json:"update_time,omitempty"` + PolicyRules []*PolicyRule `json:"rules,omitempty"` + InheritFromParent bool `json:"inherit_from_parent,omitempty"` + Reset bool `json:"reset,omitempty"` +} + +type PolicyRule struct { + Values *StringValues `json:"values,omitempty"` + AllowAll bool `json:"allow_all,omitempty"` + DenyAll bool `json:"deny_all,omitempty"` + Enforce bool `json:"enforce,omitempty"` + Condition *Expr `json:"condition,omitempty"` +} + +type StringValues struct { + AllowedValues []string `json:"allowed_values,omitempty"` + DeniedValues []string `json:"denied_values,omitempty"` +} + +type Expr struct { + Expression string `json:"expression,omitempty"` + Title string `json:"title,omitempty"` + Description string `json:"description,omitempty"` + Location string `json:"location,omitempty"` +} + +type Timestamp struct { + Seconds int64 `json:"seconds,omitempty"` + Nanos int64 `json:"nanos,omitempty"` +} + +func (t Timestamp) MarshalJSON() ([]byte, error) { + return []byte(`"` + time.Unix(0, t.Nanos).UTC().Format(time.RFC3339Nano) + `"`), nil +} + +func (t *Timestamp) UnmarshalJSON(b []byte) error { + p, err := time.Parse(time.RFC3339Nano, strings.Trim(string(b), `"`)) + if err != nil { + return fmt.Errorf("bad Timestamp: %v", err) + } + t.Seconds = p.Unix() + t.Nanos = p.UnixNano() + return nil +} + +// ListPolicyAllValues is used to set `Policies` that apply to all possible +// configuration values rather than specific values in `allowed_values` or +// `denied_values`. +type ListPolicyAllValues int32 + +// ListPolicy can define specific values and subtrees of Cloud Resource +// Manager resource hierarchy (`Organizations`, `Folders`, `Projects`) that +// are allowed or denied by setting the `allowed_values` and `denied_values` +// fields. +type ListPolicy struct { + AllowedValues []string `json:"allowed_values,omitempty"` + DeniedValues []string `json:"denied_values,omitempty"` + AllValues ListPolicyAllValues `json:"all_values,omitempty"` + SuggestedValue string `json:"suggested_value,omitempty"` + InheritFromParent bool `json:"inherit_from_parent,omitempty"` +} + +// BooleanPolicy If `true`, then the `Policy` is enforced. If `false`, +// then any configuration is acceptable. +type BooleanPolicy struct { + Enforced bool `json:"enforced,omitempty"` +} + +// RestoreDefault determines if the default values of the `Constraints` are active for the +// resources. +type RestoreDefault struct { +} From f5c915c0864f84cafc63ab80dfc9a520ec3f76d7 Mon Sep 17 00:00:00 2001 From: Shrishty Chandra <3104562+shrishty@users.noreply.github.com> Date: Wed, 20 Aug 2025 20:19:22 +0530 Subject: [PATCH 808/884] Update go.mod and go.sum (#14894) Co-authored-by: Shrishty Chandra --- mmv1/third_party/terraform/go.mod | 22 +++++++------- mmv1/third_party/terraform/go.sum | 50 +++++++++++++++---------------- 2 files changed, 35 insertions(+), 37 deletions(-) diff --git a/mmv1/third_party/terraform/go.mod b/mmv1/third_party/terraform/go.mod index 5d14fa4cdddb..d6d72af47b3d 100644 --- a/mmv1/third_party/terraform/go.mod +++ b/mmv1/third_party/terraform/go.mod @@ -3,7 +3,7 @@ module github.com/hashicorp/terraform-provider-google go 1.23.0 require ( - cloud.google.com/go/auth v0.16.3 + cloud.google.com/go/auth v0.16.4 cloud.google.com/go/auth/oauth2adapt v0.2.8 cloud.google.com/go/bigtable v1.37.0 github.com/GoogleCloudPlatform/declarative-resource-client-library v1.82.0 @@ -32,12 +32,12 @@ require ( github.com/stretchr/testify v1.10.0 go4.org/netipx v0.0.0-20231129151722-fdeea329fbba golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 - golang.org/x/net v0.42.0 + golang.org/x/net v0.43.0 golang.org/x/oauth2 v0.30.0 - google.golang.org/api v0.245.0 - google.golang.org/genproto/googleapis/rpc v0.0.0-20250728155136-f173205681a0 + google.golang.org/api v0.247.0 + google.golang.org/genproto/googleapis/rpc v0.0.0-20250804133106-a7a43d27e69b google.golang.org/grpc v1.74.2 - google.golang.org/protobuf v1.36.6 + google.golang.org/protobuf v1.36.7 gopkg.in/yaml.v2 v2.4.0 ) @@ -45,7 +45,7 @@ require ( bitbucket.org/creachadair/stringset v0.0.8 // indirect cel.dev/expr v0.24.0 // indirect cloud.google.com/go v0.121.0 // indirect - cloud.google.com/go/compute/metadata v0.7.0 // indirect + cloud.google.com/go/compute/metadata v0.8.0 // indirect cloud.google.com/go/iam v1.5.2 // indirect cloud.google.com/go/longrunning v0.6.7 // indirect cloud.google.com/go/monitoring v1.24.2 // indirect @@ -108,13 +108,13 @@ require ( go.opentelemetry.io/otel/sdk v1.36.0 // indirect go.opentelemetry.io/otel/sdk/metric v1.36.0 // indirect go.opentelemetry.io/otel/trace v1.36.0 // indirect - golang.org/x/crypto v0.40.0 // indirect - golang.org/x/mod v0.25.0 // indirect + golang.org/x/crypto v0.41.0 // indirect + golang.org/x/mod v0.26.0 // indirect golang.org/x/sync v0.16.0 // indirect - golang.org/x/sys v0.34.0 // indirect - golang.org/x/text v0.27.0 // indirect + golang.org/x/sys v0.35.0 // indirect + golang.org/x/text v0.28.0 // indirect golang.org/x/time v0.12.0 // indirect - golang.org/x/tools v0.34.0 // indirect + golang.org/x/tools v0.35.0 // indirect google.golang.org/appengine v1.6.8 // indirect google.golang.org/genproto v0.0.0-20250603155806-513f23925822 // indirect google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 // indirect diff --git a/mmv1/third_party/terraform/go.sum b/mmv1/third_party/terraform/go.sum index a56b9444c47a..3889c6c81471 100644 --- a/mmv1/third_party/terraform/go.sum +++ b/mmv1/third_party/terraform/go.sum @@ -5,14 +5,14 @@ cel.dev/expr v0.24.0/go.mod h1:hLPLo1W4QUmuYdA72RBX06QTs6MXw941piREPl3Yfiw= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.121.0 h1:pgfwva8nGw7vivjZiRfrmglGWiCJBP+0OmDpenG/Fwg= cloud.google.com/go v0.121.0/go.mod h1:rS7Kytwheu/y9buoDmu5EIpMMCI4Mb8ND4aeN4Vwj7Q= -cloud.google.com/go/auth v0.16.3 h1:kabzoQ9/bobUmnseYnBO6qQG7q4a/CffFRlJSxv2wCc= -cloud.google.com/go/auth v0.16.3/go.mod h1:NucRGjaXfzP1ltpcQ7On/VTZ0H4kWB5Jy+Y9Dnm76fA= +cloud.google.com/go/auth v0.16.4 h1:fXOAIQmkApVvcIn7Pc2+5J8QTMVbUGLscnSVNl11su8= +cloud.google.com/go/auth v0.16.4/go.mod h1:j10ncYwjX/g3cdX7GpEzsdM+d+ZNsXAbb6qXA7p1Y5M= cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc= cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c= cloud.google.com/go/bigtable v1.37.0 h1:Q+x7y04lQ0B+WXp03wc1/FLhFt4CwcQdkwWT0M4Jp3w= cloud.google.com/go/bigtable v1.37.0/go.mod h1:HXqddP6hduwzrtiTCqZPpj9ij4hGZb4Zy1WF/dT+yaU= -cloud.google.com/go/compute/metadata v0.7.0 h1:PBWF+iiAerVNe8UCHxdOt6eHLVc3ydFeOCw78U8ytSU= -cloud.google.com/go/compute/metadata v0.7.0/go.mod h1:j5MvL9PprKL39t166CoB1uVHfQMs4tFQZZcKwksXUjo= +cloud.google.com/go/compute/metadata v0.8.0 h1:HxMRIbao8w17ZX6wBnjhcDkW6lTFpgcaobyVfZWqRLA= +cloud.google.com/go/compute/metadata v0.8.0/go.mod h1:sYOGTp851OV9bOFJ9CH7elVvyzopvWQFNNghtDQ/Biw= cloud.google.com/go/iam v1.5.2 h1:qgFRAGEmd8z6dJ/qyEchAuL9jpswyODjA2lS+w234g8= cloud.google.com/go/iam v1.5.2/go.mod h1:SE1vg0N81zQqLzQEwxL2WI6yhetBdbNQuTvIKCSkUHE= cloud.google.com/go/longrunning v0.6.7 h1:IGtfDWHhQCgCjwQjV9iiLnUta9LBCo8R9QmAFsS/PrE= @@ -22,8 +22,6 @@ cloud.google.com/go/monitoring v1.24.2/go.mod h1:x7yzPWcgDRnPEv3sI+jJGBkwl5qINf+ dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.80.0 h1:ZpQrm5i+ppVxTQjp6lU2APyAejavB/d7G2gZNu2RxsU= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.80.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= github.com/GoogleCloudPlatform/declarative-resource-client-library v1.81.0 h1:zTRBYNu7nk3TMbiRfkBcRNzw4cOeym0z1GduDYNyRyE= github.com/GoogleCloudPlatform/declarative-resource-client-library v1.81.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= github.com/GoogleCloudPlatform/declarative-resource-client-library v1.82.0 h1:58Vw+qpPWX4JGAB/DfuDwEg6dGp0+q6raXqjs52qRik= @@ -302,8 +300,8 @@ golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACk golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.40.0 h1:r4x+VvoG5Fm+eJcxMaY8CQM7Lb0l1lsmjGBQ6s8BfKM= -golang.org/x/crypto v0.40.0/go.mod h1:Qr1vMER5WyS2dfPHAlsOj01wgLbsyWtFn/aY+5+ZdxY= +golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4= +golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 h1:2dVuKD2vS7b0QIHQbpyTISPd0LeHDbnYEryqj5Q1ug8= golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56/go.mod h1:M4RDyNAINzryxdtnbRXRL/OHtkFuWGRjvuhBJpk2IlY= @@ -313,8 +311,8 @@ golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHl golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/mod v0.25.0 h1:n7a+ZbQKQA/Ysbyb0/6IbB1H/X41mKgbhfv7AfG/44w= -golang.org/x/mod v0.25.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= +golang.org/x/mod v0.26.0 h1:EGMPT//Ezu+ylkCijjPc+f4Aih7sZvaAr+O3EHBxvZg= +golang.org/x/mod v0.26.0/go.mod h1:/j6NAhSk8iQ723BGAUyoAcn7SlD7s15Dp9Nd/SfeaFQ= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -325,8 +323,8 @@ golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLL golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.42.0 h1:jzkYrhi3YQWD6MLBJcsklgQsoAcw89EcZbJw8Z614hs= -golang.org/x/net v0.42.0/go.mod h1:FF1RA5d3u7nAYA4z2TkclSCKh68eSXtiFwcWQpPXdt8= +golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE= +golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI= golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU= @@ -355,18 +353,18 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.34.0 h1:H5Y5sJ2L2JRdyv7ROF1he/lPdvFsd0mJHFw2ThKHxLA= -golang.org/x/sys v0.34.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI= +golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.33.0 h1:NuFncQrRcaRvVmgRkvM3j/F00gWIAlcmlB8ACEKmGIg= -golang.org/x/term v0.33.0/go.mod h1:s18+ql9tYWp1IfpV9DmCtQDDSRBUjKaw9M1eAv5UeF0= +golang.org/x/term v0.34.0 h1:O/2T7POpk0ZZ7MAzMeWFSg6S5IpWd/RXDlM9hgM3DR4= +golang.org/x/term v0.34.0/go.mod h1:5jC53AEywhIVebHgPVeg0mj8OD3VO9OzclacVrqpaAw= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= -golang.org/x/text v0.27.0 h1:4fGWRpyh641NLlecmyl4LOe6yDdfaYNrGb2zdfo4JV4= -golang.org/x/text v0.27.0/go.mod h1:1D28KMCvyooCX9hBiosv5Tz/+YLxj0j7XhWjpSUF7CU= +golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng= +golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU= golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE= golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -378,14 +376,14 @@ golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/tools v0.34.0 h1:qIpSLOxeCYGg9TrcJokLBG4KFA6d795g0xkBkiESGlo= -golang.org/x/tools v0.34.0/go.mod h1:pAP9OwEaY1CAW3HOmg3hLZC5Z0CCmzjAF2UQMSqNARg= +golang.org/x/tools v0.35.0 h1:mBffYraMEf7aa0sB+NuKnuCy8qI/9Bughn8dC2Gu5r0= +golang.org/x/tools v0.35.0/go.mod h1:NKdj5HkL/73byiZSJjqJgKn3ep7KjFkBOkR/Hps3VPw= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/api v0.245.0 h1:YliGvz1rjXB+sTLNIST6Ffeji9WlRdLQ+LPl9ruSa5Y= -google.golang.org/api v0.245.0/go.mod h1:dMVhVcylamkirHdzEBAIQWUCgqY885ivNeZYd7VAVr8= +google.golang.org/api v0.247.0 h1:tSd/e0QrUlLsrwMKmkbQhYVa109qIintOls2Wh6bngc= +google.golang.org/api v0.247.0/go.mod h1:r1qZOPmxXffXg6xS5uhx16Fa/UFY8QU/K4bfKrnvovM= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= @@ -397,8 +395,8 @@ google.golang.org/genproto v0.0.0-20250603155806-513f23925822 h1:rHWScKit0gvAPuO google.golang.org/genproto v0.0.0-20250603155806-513f23925822/go.mod h1:HubltRL7rMh0LfnQPkMH4NPDFEWp0jw3vixw7jEM53s= google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 h1:oWVWY3NzT7KJppx2UKhKmzPq4SRe0LdCijVRwvGeikY= google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822/go.mod h1:h3c4v36UTKzUiuaOKQ6gr3S+0hovBtUrXzTG/i3+XEc= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250728155136-f173205681a0 h1:MAKi5q709QWfnkkpNQ0M12hYJ1+e8qYVDyowc4U1XZM= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250728155136-f173205681a0/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250804133106-a7a43d27e69b h1:zPKJod4w6F1+nRGDI9ubnXYhU9NSWoFAijkHkUXeTK8= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250804133106-a7a43d27e69b/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= @@ -408,8 +406,8 @@ google.golang.org/grpc v1.74.2 h1:WoosgB65DlWVC9FqI82dGsZhWFNBSLjQ84bjROOpMu4= google.golang.org/grpc v1.74.2/go.mod h1:CtQ+BGjaAIXHs/5YS3i473GqwBBa1zGQNevxdeBEXrM= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= -google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= +google.golang.org/protobuf v1.36.7 h1:IgrO7UwFQGJdRNXH/sQux4R1Dj1WAKcLElzeeRaXV2A= +google.golang.org/protobuf v1.36.7/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= From 30c336a27761fa30491779e2edd09e7ae50c5de7 Mon Sep 17 00:00:00 2001 From: Daniel Liao Date: Thu, 21 Aug 2025 01:47:11 +1000 Subject: [PATCH 809/884] Add support to pause google_cloud_tasks_queue resources (#14278) Signed-off-by: Daniel Liao --- mmv1/products/cloudtasks/Queue.yaml | 23 +++++++ .../cloud_tasks_queue_state.go.tmpl | 16 +++++ .../cloud_tasks_queue_state.go.tmpl | 35 ++++++++++ .../resource_cloud_tasks_queue_test.go | 68 ++++++++++++++++++- 4 files changed, 141 insertions(+), 1 deletion(-) create mode 100644 mmv1/templates/terraform/post_create/cloud_tasks_queue_state.go.tmpl create mode 100644 mmv1/templates/terraform/post_update/cloud_tasks_queue_state.go.tmpl diff --git a/mmv1/products/cloudtasks/Queue.yaml b/mmv1/products/cloudtasks/Queue.yaml index ed7b41ca2ec8..d1f4cff5abab 100644 --- a/mmv1/products/cloudtasks/Queue.yaml +++ b/mmv1/products/cloudtasks/Queue.yaml @@ -34,6 +34,8 @@ iam_policy: - '{{name}}' custom_code: constants: 'templates/terraform/constants/cloud_tasks_retry_config_custom_diff.go.tmpl' + post_create: 'templates/terraform/post_create/cloud_tasks_queue_state.go.tmpl' + post_update: 'templates/terraform/post_update/cloud_tasks_queue_state.go.tmpl' examples: - name: 'queue_basic' primary_resource_id: 'default' @@ -56,6 +58,18 @@ examples: primary_resource_id: 'http_target_oauth' vars: name: 'cloud-tasks-queue-http-target-oauth' +virtual_fields: + - name: 'desired_state' + type: Enum + description: | + The desired state of the queue. Use this to pause and resume the queue. + + * RUNNING: The queue is running. Tasks can be dispatched. + * PAUSED: The queue is paused. Tasks are not dispatched but can be added to the queue. + default_value: 'RUNNING' + enum_values: + - 'RUNNING' + - 'PAUSED' parameters: - name: 'location' type: String @@ -207,6 +221,15 @@ properties: This field may contain any value between 0.0 and 1.0, inclusive. 0.0 is the default and means that no operations are logged. required: true + - name: 'state' + type: Enum + description: | + The current state of the queue. + output: true + enum_values: + - 'RUNNING' + - 'PAUSED' + - 'DISABLED' - name: 'httpTarget' type: NestedObject description: Modifies HTTP target for HTTP tasks. diff --git a/mmv1/templates/terraform/post_create/cloud_tasks_queue_state.go.tmpl b/mmv1/templates/terraform/post_create/cloud_tasks_queue_state.go.tmpl new file mode 100644 index 000000000000..7b21cc1c06f1 --- /dev/null +++ b/mmv1/templates/terraform/post_create/cloud_tasks_queue_state.go.tmpl @@ -0,0 +1,16 @@ +// Handle desired state after queue creation +if v, ok := d.GetOk("desired_state"); ok && v.(string) == "PAUSED" { + pauseUrl := fmt.Sprintf("%s%s:pause", config.CloudTasksBasePath, id) + + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "POST", + Project: billingProject, + RawURL: pauseUrl, + UserAgent: userAgent, + }) + + if err != nil { + return fmt.Errorf("Error pausing queue %q: %s", d.Id(), err) + } +} \ No newline at end of file diff --git a/mmv1/templates/terraform/post_update/cloud_tasks_queue_state.go.tmpl b/mmv1/templates/terraform/post_update/cloud_tasks_queue_state.go.tmpl new file mode 100644 index 000000000000..59e65255432c --- /dev/null +++ b/mmv1/templates/terraform/post_update/cloud_tasks_queue_state.go.tmpl @@ -0,0 +1,35 @@ +// Handle desired state changes +if d.HasChange("desired_state") { + old, new := d.GetChange("desired_state") + + if old.(string) != new.(string) { + var action string + + actionUrl, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}CloudTasksBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/locations/{{"{{"}}location{{"}}"}}/queues/{{"{{"}}name{{"}}"}}") + if err != nil { + return err + } + + if new.(string) == "PAUSED" { + actionUrl = fmt.Sprintf("%s:pause", actionUrl) + action = "pausing" + } else if new.(string) == "RUNNING" { + actionUrl = fmt.Sprintf("%s:resume", actionUrl) + action = "resuming" + } + + if actionUrl != "" { + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "POST", + Project: billingProject, + RawURL: actionUrl, + UserAgent: userAgent, + }) + + if err != nil { + return fmt.Errorf("Error %s queue %q: %s", action, d.Id(), err) + } + } + } +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/cloudtasks/resource_cloud_tasks_queue_test.go b/mmv1/third_party/terraform/services/cloudtasks/resource_cloud_tasks_queue_test.go index 707509e9033e..eb7d61fe65ec 100644 --- a/mmv1/third_party/terraform/services/cloudtasks/resource_cloud_tasks_queue_test.go +++ b/mmv1/third_party/terraform/services/cloudtasks/resource_cloud_tasks_queue_test.go @@ -2,9 +2,10 @@ package cloudtasks_test import ( "fmt" - "github.com/hashicorp/terraform-provider-google/google/acctest" "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) @@ -173,6 +174,37 @@ func TestAccCloudTasksQueue_HttpTargetOAuth_update(t *testing.T) { }) } +func TestAccCloudTasksQueue_paused(t *testing.T) { + t.Parallel() + + name := "cloudtasksqueuetest-" + acctest.RandString(t, 10) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccCloudTasksQueue_full(name), + }, + { + ResourceName: "google_cloud_tasks_queue.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"app_engine_routing_override.0.service", "app_engine_routing_override.0.version", "app_engine_routing_override.0.instance"}, + }, + { + Config: testAccCloudTasksQueue_paused(name), + }, + { + ResourceName: "google_cloud_tasks_queue.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"app_engine_routing_override.0.service", "app_engine_routing_override.0.version", "app_engine_routing_override.0.instance", "desired_state"}, + }, + }, + }) +} + func testAccCloudTasksQueue_basic(name string) string { return fmt.Sprintf(` resource "google_cloud_tasks_queue" "default" { @@ -192,6 +224,7 @@ func testAccCloudTasksQueue_full(name string) string { resource "google_cloud_tasks_queue" "default" { name = "%s" location = "us-central1" + desired_state = "RUNNING" app_engine_routing_override { service = "worker" @@ -378,3 +411,36 @@ resource "google_service_account" "test" { `, name, serviceAccountID) } + +func testAccCloudTasksQueue_paused(name string) string { + return fmt.Sprintf(` +resource "google_cloud_tasks_queue" "default" { + name = "%s" + location = "us-central1" + desired_state = "PAUSED" + + app_engine_routing_override { + service = "main" + version = "2.0" + instance = "beta" + } + + rate_limits { + max_concurrent_dispatches = 4 + max_dispatches_per_second = 3 + } + + retry_config { + max_attempts = 6 + max_retry_duration = "5s" + max_backoff = "4s" + min_backoff = "3s" + max_doublings = 2 + } + + stackdriver_logging_config { + sampling_ratio = 0.1 + } +} +`, name) +} From 9b4bc892c598a42beec68b6360521be775d9343d Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Wed, 20 Aug 2025 09:25:49 -0700 Subject: [PATCH 810/884] tgc-revival: add google_backup_dr_backup_vault and google_backup_dr_backup_plan (#14907) --- mmv1/products/backupdr/BackupPlan.yaml | 3 +++ mmv1/products/backupdr/BackupVault.yaml | 4 ++++ .../decoders/backup_dr_backup_plan.go.tmpl | 20 +++++++++++++++++++ 3 files changed, 27 insertions(+) create mode 100644 mmv1/templates/tgc_next/decoders/backup_dr_backup_plan.go.tmpl diff --git a/mmv1/products/backupdr/BackupPlan.yaml b/mmv1/products/backupdr/BackupPlan.yaml index c3bc7c401f27..c60029f8850a 100644 --- a/mmv1/products/backupdr/BackupPlan.yaml +++ b/mmv1/products/backupdr/BackupPlan.yaml @@ -29,6 +29,9 @@ timeouts: insert_minutes: 60 update_minutes: 60 delete_minutes: 60 +custom_code: + tgc_decoder: 'templates/tgc_next/decoders/backup_dr_backup_plan.go.tmpl' +include_in_tgc_next_DO_NOT_USE: true examples: - name: 'backup_dr_backup_plan_simple' primary_resource_id: 'my-backup-plan-1' diff --git a/mmv1/products/backupdr/BackupVault.yaml b/mmv1/products/backupdr/BackupVault.yaml index e9c4cc5efa4b..0caeb7dc6f73 100644 --- a/mmv1/products/backupdr/BackupVault.yaml +++ b/mmv1/products/backupdr/BackupVault.yaml @@ -39,6 +39,7 @@ async: resource_inside_response: true custom_code: pre_delete: 'templates/terraform/pre_delete/backup_dr_backup_vault.go.tmpl' +include_in_tgc_next_DO_NOT_USE: true examples: - name: 'backup_dr_backup_vault_full' primary_resource_id: 'backup-vault-test' @@ -113,6 +114,7 @@ properties: - name: 'labels' type: KeyValueLabels description: "Optional. Resource labels to represent user provided metadata. " + is_missing_in_cai: true - name: 'createTime' type: String description: 'Output only. The time when the instance was created. ' @@ -167,6 +169,7 @@ properties: type: KeyValueAnnotations description: "Optional. User annotations. See https://google.aip.dev/128#annotations\nStores small amounts of arbitrary data. " + is_missing_in_cai: true - name: 'accessRestriction' type: Enum description: | @@ -182,6 +185,7 @@ properties: - name: 'backupRetentionInheritance' type: Enum ignore_read: true + is_missing_in_cai: true description: | How a backup's enforced retention end time is inherited. Default value is `INHERIT_VAULT_RETENTION` if not provided during creation. enum_values: diff --git a/mmv1/templates/tgc_next/decoders/backup_dr_backup_plan.go.tmpl b/mmv1/templates/tgc_next/decoders/backup_dr_backup_plan.go.tmpl new file mode 100644 index 000000000000..996b7f1a1cbd --- /dev/null +++ b/mmv1/templates/tgc_next/decoders/backup_dr_backup_plan.go.tmpl @@ -0,0 +1,20 @@ +// startHourOfDay is missing in CAI, but has default value 0 in API object +if rules, ok := res["backupRules"].([]interface{}); ok { + for _, raw := range rules { + if rule, ok := raw.(map[string]interface{}); ok { + if raw, ok := rule["standardSchedule"]; ok { + if ss, ok := raw.(map[string]interface{}); ok { + if raw, ok := ss["backupWindow"]; ok { + if bw, ok := raw.(map[string]interface{}); ok { + if _, ok := ss["startHourOfDay"]; !ok { + bw["startHourOfDay"] = 0 + } + } + } + } + } + } + } +} + +return res, nil \ No newline at end of file From 38ff0d0bafc65e72e6702130d2d050ee0e56e899 Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Wed, 20 Aug 2025 18:29:34 +0200 Subject: [PATCH 811/884] write-only: use `CamelizeProperty` for the expander property variable name instead of `ApiName` (#14906) --- mmv1/api/type.go | 4 +++ .../interconnect_attachment.go.tmpl | 2 +- .../terraform/post_create/labels.tmpl | 6 ++-- mmv1/templates/terraform/resource.go.tmpl | 32 +++++++++---------- mmv1/templates/tgc/resource_converter.go.tmpl | 10 +++--- .../tfplan2cai/resource_converter.go.tmpl | 10 +++--- 6 files changed, 34 insertions(+), 30 deletions(-) diff --git a/mmv1/api/type.go b/mmv1/api/type.go index 690687db966d..ba180ceafd5f 100644 --- a/mmv1/api/type.go +++ b/mmv1/api/type.go @@ -508,6 +508,10 @@ func (t Type) TitlelizeProperty() string { return google.Camelize(t.Name, "upper") } +func (t Type) CamelizeProperty() string { + return google.Camelize(t.Name, "lower") +} + // If the Prefix field is already set, returns the value. // Otherwise, set the Prefix field and returns the value. func (t *Type) GetPrefix() string { diff --git a/mmv1/templates/terraform/post_create/interconnect_attachment.go.tmpl b/mmv1/templates/terraform/post_create/interconnect_attachment.go.tmpl index 6b3b0459d63c..34811f75dd02 100644 --- a/mmv1/templates/terraform/post_create/interconnect_attachment.go.tmpl +++ b/mmv1/templates/terraform/post_create/interconnect_attachment.go.tmpl @@ -1,6 +1,6 @@ {{- if $.HasLabelsField }} -if v, ok := d.GetOkExists("effective_labels"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, labelsProp)) { +if v, ok := d.GetOkExists("effective_labels"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, effectiveLabelsProp)) { labels := d.Get("labels") terraformLables := d.Get("terraform_labels") diff --git a/mmv1/templates/terraform/post_create/labels.tmpl b/mmv1/templates/terraform/post_create/labels.tmpl index 427d911cee87..907acab5277f 100644 --- a/mmv1/templates/terraform/post_create/labels.tmpl +++ b/mmv1/templates/terraform/post_create/labels.tmpl @@ -1,6 +1,6 @@ {{- if $.HasLabelsField }} -if v, ok := d.GetOkExists("effective_labels"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, labelsProp)) { - labels := d.Get("labels") +if v, ok := d.GetOkExists("effective_labels"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, effectiveLabelsProp)) { + userLabels := d.Get("labels") terraformLables := d.Get("terraform_labels") // Labels cannot be set in a create. We'll have to set them here. @@ -50,7 +50,7 @@ if v, ok := d.GetOkExists("effective_labels"); !tpgresource.IsEmptyValue(reflect } // Set back the labels field, as it is needed to decide the value of "labels" in the state in the read function. - if err := d.Set("labels", labels); err != nil { + if err := d.Set("labels", userLabels); err != nil { return fmt.Errorf("Error setting back labels: %s", err) } diff --git a/mmv1/templates/terraform/resource.go.tmpl b/mmv1/templates/terraform/resource.go.tmpl index f209f8c9cbfe..6bed9ba5b917 100644 --- a/mmv1/templates/terraform/resource.go.tmpl +++ b/mmv1/templates/terraform/resource.go.tmpl @@ -186,19 +186,19 @@ func resource{{ $.ResourceName -}}Create(d *schema.ResourceData, meta interface{ obj := make(map[string]interface{}) {{- range $prop := $.SettableProperties }} - {{ $prop.ApiName -}}Prop, err := expand{{ if $.NestedQuery -}}Nested{{ end }}{{ $.ResourceName -}}{{ camelize $prop.Name "upper" -}}({{ if $prop.FlattenObject }}nil{{ else }}d.Get("{{ underscore $prop.Name }}"){{ end }}, d, config) + {{ $prop.CamelizeProperty -}}Prop, err := expand{{ if $.NestedQuery -}}Nested{{ end }}{{ $.ResourceName -}}{{ camelize $prop.Name "upper" -}}({{ if $prop.FlattenObject }}nil{{ else }}d.Get("{{ underscore $prop.Name }}"){{ end }}, d, config) if err != nil { return err -{{- if $prop.SendEmptyValue -}} - } else if v, ok := d.GetOkExists("{{ underscore $prop.Name -}}"); ok || !reflect.DeepEqual(v, {{ $prop.ApiName -}}Prop) { -{{- else if $prop.FlattenObject -}} - } else if !tpgresource.IsEmptyValue(reflect.ValueOf({{ $prop.ApiName -}}Prop)) { -{{- else -}} - } else if v, ok := d.GetOkExists("{{ underscore $prop.Name -}}"); !tpgresource.IsEmptyValue(reflect.ValueOf({{ $prop.ApiName -}}Prop)) && (ok || !reflect.DeepEqual(v, {{ $prop.ApiName -}}Prop)) { -{{- end}} - obj["{{ $prop.ApiName -}}"] = {{ $prop.ApiName -}}Prop + {{- if $prop.SendEmptyValue -}} + } else if v, ok := d.GetOkExists("{{ underscore $prop.Name -}}"); ok || !reflect.DeepEqual(v, {{ $prop.CamelizeProperty -}}Prop) { + {{- else if $prop.FlattenObject -}} + } else if !tpgresource.IsEmptyValue(reflect.ValueOf({{ $prop.CamelizeProperty -}}Prop)) { + {{- else -}} + } else if v, ok := d.GetOkExists("{{ underscore $prop.Name -}}"); !tpgresource.IsEmptyValue(reflect.ValueOf({{ $prop.CamelizeProperty -}}Prop)) && (ok || !reflect.DeepEqual(v, {{ $prop.CamelizeProperty -}}Prop)) { + {{- end}} + obj["{{ $prop.ApiName -}}"] = {{ $prop.CamelizeProperty -}}Prop } -{{- end}} + {{- end}} {{if $.CustomCode.Encoder -}} obj, err = resource{{ $.ResourceName -}}Encoder(d, meta, obj) @@ -723,17 +723,17 @@ func resource{{ $.ResourceName -}}Update(d *schema.ResourceData, meta interface{ obj := make(map[string]interface{}) {{- range $prop := $.UpdateBodyProperties }} {{/* flattened $s won't have something stored in state so instead nil is passed to the next expander. */}} - {{- $prop.ApiName -}}Prop, err := expand{{ if $.NestedQuery -}}Nested{{end}}{{ $.ResourceName -}}{{ camelize $prop.Name "upper" -}}({{ if $prop.FlattenObject }}nil{{else}}d.Get("{{underscore $prop.Name}}"){{ end }}, d, config) + {{- $prop.CamelizeProperty -}}Prop, err := expand{{ if $.NestedQuery -}}Nested{{end}}{{ $.ResourceName -}}{{ camelize $prop.Name "upper" -}}({{ if $prop.FlattenObject }}nil{{else}}d.Get("{{underscore $prop.Name}}"){{ end }}, d, config) if err != nil { return err {{- if $prop.SendEmptyValue -}} - } else if v, ok := d.GetOkExists("{{ underscore $prop.Name -}}"); ok || !reflect.DeepEqual(v, {{ $prop.ApiName -}}Prop) { + } else if v, ok := d.GetOkExists("{{ underscore $prop.Name -}}"); ok || !reflect.DeepEqual(v, {{ $prop.CamelizeProperty -}}Prop) { {{- else if $prop.FlattenObject -}} - } else if !tpgresource.IsEmptyValue(reflect.ValueOf({{ $prop.ApiName -}}Prop)) { + } else if !tpgresource.IsEmptyValue(reflect.ValueOf({{ $prop.CamelizeProperty -}}Prop)) { {{- else -}} - } else if v, ok := d.GetOkExists("{{ underscore $prop.Name -}}"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, {{ $prop.ApiName -}}Prop)) { + } else if v, ok := d.GetOkExists("{{ underscore $prop.Name -}}"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, {{ $prop.CamelizeProperty -}}Prop)) { {{- end}} - obj["{{ $prop.ApiName -}}"] = {{ $prop.ApiName -}}Prop + obj["{{ $prop.ApiName -}}"] = {{ $prop.CamelizeProperty -}}Prop } {{- end}} @@ -1276,4 +1276,4 @@ func resource{{ $.ResourceName -}}PostCreateSetComputedFields(d *schema.Resource {{- end }}{{/* range */}} return nil } -{{- end }} \ No newline at end of file +{{- end }} diff --git a/mmv1/templates/tgc/resource_converter.go.tmpl b/mmv1/templates/tgc/resource_converter.go.tmpl index 09087598c84d..a5fd4ec23e0b 100644 --- a/mmv1/templates/tgc/resource_converter.go.tmpl +++ b/mmv1/templates/tgc/resource_converter.go.tmpl @@ -81,18 +81,18 @@ func Get{{ $.ResourceName -}}ApiObject(d tpgresource.TerraformResourceData, conf obj := make(map[string]interface{}) {{- range $prop := $.SettableProperties }} {{- if $prop.FlattenObject }} - {{ $prop.ApiName -}}Prop, err := expand{{ $.ResourceName -}}{{$prop.TitlelizeProperty}}(nil, d, config) + {{ $prop.CamelizeProperty -}}Prop, err := expand{{ $.ResourceName -}}{{$prop.TitlelizeProperty}}(nil, d, config) {{- else }} - {{ $prop.ApiName -}}Prop, err := expand{{ $.ResourceName -}}{{$prop.TitlelizeProperty}}(d.Get("{{underscore $prop.Name}}"), d, config) + {{ $prop.CamelizeProperty -}}Prop, err := expand{{ $.ResourceName -}}{{$prop.TitlelizeProperty}}(d.Get("{{underscore $prop.Name}}"), d, config) {{- end}} if err != nil { return nil, err {{- if not $prop.SendEmptyValue }} - } else if v, ok := d.GetOkExists("{{underscore $prop.Name}}"); !tpgresource.IsEmptyValue(reflect.ValueOf({{ $prop.ApiName -}}Prop)) && (ok || !reflect.DeepEqual(v, {{ $prop.ApiName -}}Prop)) { + } else if v, ok := d.GetOkExists("{{underscore $prop.Name}}"); !tpgresource.IsEmptyValue(reflect.ValueOf({{ $prop.CamelizeProperty -}}Prop)) && (ok || !reflect.DeepEqual(v, {{ $prop.CamelizeProperty -}}Prop)) { {{- else }} - } else if v, ok := d.GetOkExists("{{underscore $prop.Name}}"); ok || !reflect.DeepEqual(v, {{ $prop.ApiName -}}Prop) { + } else if v, ok := d.GetOkExists("{{underscore $prop.Name}}"); ok || !reflect.DeepEqual(v, {{ $prop.CamelizeProperty -}}Prop) { {{- end }} - obj["{{ $prop.ApiName -}}"] = {{ $prop.ApiName -}}Prop + obj["{{ $prop.ApiName -}}"] = {{ $prop.CamelizeProperty -}}Prop } {{- end}} diff --git a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl index be1a1e4e3eef..58b3ee417f09 100644 --- a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl +++ b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl @@ -84,18 +84,18 @@ func Get{{ $.ResourceName -}}CaiObject(d tpgresource.TerraformResourceData, conf obj := make(map[string]interface{}) {{- range $prop := $.SettableProperties }} {{- if $prop.FlattenObject }} - {{ $prop.ApiName -}}Prop, err := expand{{ $.ResourceName -}}{{$prop.TitlelizeProperty}}(nil, d, config) + {{ $prop.CamelizeProperty -}}Prop, err := expand{{ $.ResourceName -}}{{$prop.TitlelizeProperty}}(nil, d, config) {{- else }} - {{ $prop.ApiName -}}Prop, err := expand{{ $.ResourceName -}}{{$prop.TitlelizeProperty}}(d.Get("{{underscore $prop.Name}}"), d, config) + {{ $prop.CamelizeProperty -}}Prop, err := expand{{ $.ResourceName -}}{{$prop.TitlelizeProperty}}(d.Get("{{underscore $prop.Name}}"), d, config) {{- end}} if err != nil { return nil, err {{- if and (not $prop.SendEmptyValue) (not $prop.TGCSendEmptyValue) }} - } else if v, ok := d.GetOkExists("{{underscore $prop.Name}}"); !tpgresource.IsEmptyValue(reflect.ValueOf({{ $prop.ApiName -}}Prop)) && (ok || !reflect.DeepEqual(v, {{ $prop.ApiName -}}Prop)) { + } else if v, ok := d.GetOkExists("{{underscore $prop.Name}}"); !tpgresource.IsEmptyValue(reflect.ValueOf({{ $prop.CamelizeProperty -}}Prop)) && (ok || !reflect.DeepEqual(v, {{ $prop.CamelizeProperty -}}Prop)) { {{- else }} - } else if v, ok := d.GetOkExists("{{underscore $prop.Name}}"); ok || !reflect.DeepEqual(v, {{ $prop.ApiName -}}Prop) { + } else if v, ok := d.GetOkExists("{{underscore $prop.Name}}"); ok || !reflect.DeepEqual(v, {{ $prop.CamelizeProperty -}}Prop) { {{- end }} - obj["{{ $prop.ApiName -}}"] = {{ $prop.ApiName -}}Prop + obj["{{ $prop.ApiName -}}"] = {{ $prop.CamelizeProperty -}}Prop } {{- end}} From 245ba9f2798e9968a1a1705b2a43e57cf69fa863 Mon Sep 17 00:00:00 2001 From: Stephane Charite Date: Wed, 20 Aug 2025 10:20:47 -0700 Subject: [PATCH 812/884] Fix Lustre timeout failures (#14878) --- mmv1/products/lustre/Instance.yaml | 2 +- .../services/lustre/data_source_lustre_instance_test.go | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mmv1/products/lustre/Instance.yaml b/mmv1/products/lustre/Instance.yaml index 69e865e18e68..5d12dbee9ad7 100644 --- a/mmv1/products/lustre/Instance.yaml +++ b/mmv1/products/lustre/Instance.yaml @@ -44,7 +44,7 @@ autogen_async: true async: operation: timeouts: - insert_minutes: 20 + insert_minutes: 120 update_minutes: 20 delete_minutes: 20 base_url: '{{op_id}}' diff --git a/mmv1/third_party/terraform/services/lustre/data_source_lustre_instance_test.go b/mmv1/third_party/terraform/services/lustre/data_source_lustre_instance_test.go index fd4618b72873..e369beca67db 100644 --- a/mmv1/third_party/terraform/services/lustre/data_source_lustre_instance_test.go +++ b/mmv1/third_party/terraform/services/lustre/data_source_lustre_instance_test.go @@ -44,7 +44,7 @@ resource "google_lustre_instance" "instance" { capacity_gib = 18000 network = data.google_compute_network.lustre-network.id gke_support_enabled = false - per_unit_storage_throughput = 1000 + per_unit_storage_throughput = 1000 } // This example assumes this network already exists. From 8c1f68c68aa9f41bebce49ac2b04cca15782c3b8 Mon Sep 17 00:00:00 2001 From: William Yardley Date: Wed, 20 Aug 2025 10:29:28 -0700 Subject: [PATCH 813/884] container: Removed instance type from resourceManagerTags test (#14834) --- .../container/resource_container_node_pool_test.go.tmpl | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl index d03b5db01290..bcebd1f3dc62 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl @@ -5559,7 +5559,6 @@ resource "google_container_node_pool" "primary_nodes" { node_count = 1 node_config { - machine_type = "n1-standard-1" // can't be e2 because of local-ssd disk_size_gb = 15 resource_manager_tags = { @@ -5606,12 +5605,11 @@ resource "google_container_node_pool" "primary_nodes" { node_count = 1 node_config { - machine_type = "n1-standard-1" // can't be e2 because of local-ssd disk_size_gb = 15 resource_manager_tags = { "%{pid}/%{tagKey1}" = "%{tagValue1}" - "%{pid}/%{tagKey2}" = "%{tagValue2}" + "%{pid}/%{tagKey2}" = "%{tagValue2}" } } } @@ -5654,7 +5652,6 @@ resource "google_container_node_pool" "primary_nodes" { node_count = 1 node_config { - machine_type = "n1-standard-1" // can't be e2 because of local-ssd disk_size_gb = 15 } } From d0bfd6442c51013fe9443ecca665a4e8abebce4c Mon Sep 17 00:00:00 2001 From: Max Portocarrero CI&T <105444618+maxi-cit@users.noreply.github.com> Date: Wed, 20 Aug 2025 14:53:35 -0500 Subject: [PATCH 814/884] updated organization security policies (#14914) --- .../compute/OrganizationSecurityPolicy.yaml | 17 +++++++--- ..._organization_security_policy_test.go.tmpl | 34 +++++++++++++++++++ 2 files changed, 46 insertions(+), 5 deletions(-) diff --git a/mmv1/products/compute/OrganizationSecurityPolicy.yaml b/mmv1/products/compute/OrganizationSecurityPolicy.yaml index 966188adf897..f3b0a7039a4e 100644 --- a/mmv1/products/compute/OrganizationSecurityPolicy.yaml +++ b/mmv1/products/compute/OrganizationSecurityPolicy.yaml @@ -58,15 +58,20 @@ properties: - name: 'displayName' type: String description: | - A textual name of the security policy. + User-provided name of the organization security policy. The name should be unique in the organization in which the security policy is created. This should only be used when SecurityPolicyType is FIREWALL. min_version: 'beta' - required: true immutable: true - name: 'description' type: String description: | A textual description for the organization security policy. min_version: 'beta' + - name: 'shortName' + type: String + description: | + User-provided name of the organization security policy. The name should be unique in the organization in which the security policy is created. This should only be used when SecurityPolicyType is CLOUD_ARMOR. + min_version: 'beta' + immutable: true - name: 'fingerprint' type: Fingerprint description: | @@ -84,11 +89,13 @@ properties: - name: 'type' type: Enum description: | - The type indicates the intended use of the security policy. - For organization security policies, the only supported type - is "FIREWALL". + The type indicates the intended use of the security policy. This field can be set only at resource creation time. min_version: 'beta' immutable: true default_value: "FIREWALL" enum_values: - 'FIREWALL' + - 'CLOUD_ARMOR' + - 'CLOUD_ARMOR_EDGE' + - 'CLOUD_ARMOR_INTERNAL_SERVICE' + - 'CLOUD_ARMOR_NETWORK' diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_organization_security_policy_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_organization_security_policy_test.go.tmpl index cdfe1e9894a7..56bbcb25e466 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_organization_security_policy_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_organization_security_policy_test.go.tmpl @@ -42,6 +42,29 @@ func TestAccComputeOrganizationSecurityPolicy_organizationSecurityPolicyUpdateEx }) } +func TestAccComputeOrganizationSecurityPolicy_organizationSecurityPolicyShortName(t *testing.T) { + context := map[string]interface{}{ + "org_id": envvar.GetTestOrgFromEnv(t), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeOrganizationSecurityPolicyDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeOrganizationSecurityPolicy_organizationSecurityPolicyShortName(context), + }, + { + ResourceName: "google_compute_organization_security_policy.policy", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + func testAccComputeOrganizationSecurityPolicy_organizationSecurityPolicyPreUpdateExample(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_compute_organization_security_policy" "policy" { @@ -60,4 +83,15 @@ resource "google_compute_organization_security_policy" "policy" { } `, context) } + +func testAccComputeOrganizationSecurityPolicy_organizationSecurityPolicyShortName(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_organization_security_policy" "policy" { + short_name = "tf-test%{random_suffix}" + parent = "organizations/%{org_id}" + description = "org security policy description" + type = "CLOUD_ARMOR" +} +`, context) +} {{- end }} From f2661713d36f2dc7906afb79273bb852dd8a42ce Mon Sep 17 00:00:00 2001 From: Scott Suarez Date: Wed, 20 Aug 2025 13:34:54 -0700 Subject: [PATCH 815/884] Revert " provider: eliminated the need to manually add `*_wo` and `*_wo_version` for write-only properties" (#14917) --- mmv1/products/bigquerydatatransfer/Config.yaml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/mmv1/products/bigquerydatatransfer/Config.yaml b/mmv1/products/bigquerydatatransfer/Config.yaml index 138d8b327e66..4bafa3b99dc5 100644 --- a/mmv1/products/bigquerydatatransfer/Config.yaml +++ b/mmv1/products/bigquerydatatransfer/Config.yaml @@ -222,6 +222,13 @@ properties: to a different credential configuration in the config will require an apply to update state. url_param_only: true properties: + - name: 'secretAccessKeyWoVersion' + type: Integer + url_param_only: true + required_with: + - 'sensitive_params.0.secretAccessKeyWo' + description: | + The version of the sensitive params - used to trigger updates of the write-only params. For more info see [updating write-only attributes](/docs/providers/google/guides/using_write_only_attributes.html#updating-write-only-attributes) - name: 'secretAccessKey' type: String description: | From 0f5578f796a997b72ceef410a2265bff2212a0f6 Mon Sep 17 00:00:00 2001 From: Axel Kossek Date: Wed, 20 Aug 2025 23:21:12 +0200 Subject: [PATCH 816/884] Add resource_manager_tags support to Backend Service api (#14902) --- mmv1/products/compute/BackendService.yaml | 16 ++++++ ...ource_compute_backend_service_test.go.tmpl | 51 +++++++++++++++++++ 2 files changed, 67 insertions(+) diff --git a/mmv1/products/compute/BackendService.yaml b/mmv1/products/compute/BackendService.yaml index 7ddc4ee360b4..351b20feb43d 100644 --- a/mmv1/products/compute/BackendService.yaml +++ b/mmv1/products/compute/BackendService.yaml @@ -1688,3 +1688,19 @@ properties: description: | A boolean flag enabling IP:PORT based dynamic forwarding. immutable: true + - name: 'params' + type: NestedObject + ignore_read: true + immutable: true + description: | + Additional params passed with the request, but not persisted as part of resource payload + properties: + - name: 'resourceManagerTags' + type: KeyValuePairs + description: | + Resource manager tags to be bound to the backend service. Tag keys and values have the + same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, + and values are in the format tagValues/456. + api_name: resourceManagerTags + ignore_read: true + immutable: true diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_backend_service_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_backend_service_test.go.tmpl index c0e3c2dc9aac..5bb447cf1b70 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_backend_service_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_backend_service_test.go.tmpl @@ -10,6 +10,7 @@ import ( "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/plancheck" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" ) func TestAccComputeBackendService_basic(t *testing.T) { @@ -1163,6 +1164,35 @@ func TestAccComputeBackendService_withNetworkPassThroughLbTrafficPolicy(t *testi } {{- end }} +func TestAccComputeBackendService_resourceManagerTags(t *testing.T) { + t.Parallel() + + org := envvar.GetTestOrgFromEnv(t) + + serviceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + checkName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + tagKeyResult := acctest.BootstrapSharedTestTagKeyDetails(t, "crm-bs-tagkey", "organizations/"+org, make(map[string]interface{})) + sharedTagkey,_ := tagKeyResult["shared_tag_key"] + tagValueResult := acctest.BootstrapSharedTestTagValueDetails(t, "crm-bs-tagvalue", sharedTagkey, org) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeBackendServiceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeBackendService_withTags(serviceName, checkName, tagKeyResult["name"], tagValueResult["name"]), + }, + { + ResourceName: "google_compute_backend_service.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"params"}, + }, + }, + }) +} + func testAccComputeBackendService_trafficDirectorBasic(serviceName, checkName string) string { return fmt.Sprintf(` resource "google_compute_backend_service" "foobar" { @@ -2972,3 +3002,24 @@ resource "google_compute_health_check" "default" { `, namePrefix, spillover, ratio, namePrefix, namePrefix, namePrefix, namePrefix) } {{- end }} + +func testAccComputeBackendService_withTags(serviceName, checkName string, tagKey string, tagValue string) string { + return fmt.Sprintf(` +resource "google_compute_backend_service" "foobar" { + name = "%s" + health_checks = [google_compute_http_health_check.zero.self_link] + params { + resource_manager_tags = { + "%s" = "%s" + } + } +} + +resource "google_compute_http_health_check" "zero" { + name = "%s" + request_path = "/" + check_interval_sec = 1 + timeout_sec = 1 +} +`, serviceName, tagKey, tagValue, checkName) +} From 5c6cd1c76d1507919f6c664e90645b633dae7a0b Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Wed, 20 Aug 2025 14:34:33 -0700 Subject: [PATCH 817/884] tgc-revival: add TGCIgnoreTerraformEncoder and TGCIgnoreTerraformCustomFlatten (#14788) Co-authored-by: Thomas Rodgers --- mmv1/api/resource.go | 3 +++ mmv1/api/type.go | 7 ++++++ mmv1/products/alloydb/Cluster.yaml | 1 - mmv1/products/pubsub/Subscription.yaml | 2 +- mmv1/products/pubsub/Topic.yaml | 2 +- .../terraform/flatten_property_method.go.tmpl | 2 +- ...alloydb_cluster_input_user_flatten.go.tmpl | 24 ------------------- .../tgc_next/encoders/pubsub_add_name.go.tmpl | 10 -------- .../tfplan2cai/resource_converter.go.tmpl | 4 ++-- 9 files changed, 15 insertions(+), 40 deletions(-) delete mode 100644 mmv1/templates/tgc_next/custom_flatten/alloydb_cluster_input_user_flatten.go.tmpl delete mode 100644 mmv1/templates/tgc_next/encoders/pubsub_add_name.go.tmpl diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index afa8b929d722..194ae8ede6ad 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -379,6 +379,9 @@ type TGCResource struct { // and compute.googleapis.com/GlobalAddress has GlobalAddress for CaiResourceKind. // But they have the same api resource type: address CaiResourceKind string `yaml:"cai_resource_kind,omitempty"` + + // If true, the Terraform custom encoder is not applied during tfplan2cai + TGCIgnoreTerraformEncoder bool `yaml:"tgc_ignore_terraform_encoder,omitempty"` } func (r *Resource) UnmarshalYAML(unmarshal func(any) error) error { diff --git a/mmv1/api/type.go b/mmv1/api/type.go index ba180ceafd5f..2ab006fcda8f 100644 --- a/mmv1/api/type.go +++ b/mmv1/api/type.go @@ -322,6 +322,9 @@ type Type struct { // If a property is missing in CAI asset, use `is_missing_in_cai: true` // and `exclude_false_in_cai: true` is not needed ExcludeFalseInCai bool `yaml:"exclude_false_in_cai,omitempty"` + + // If true, the custom flatten function is not applied during cai2hcl + TGCIgnoreTerraformCustomFlatten bool `yaml:"tgc_ignore_terraform_custom_flatten,omitempty"` } const MAX_NAME = 20 @@ -1311,3 +1314,7 @@ func (t Type) TGCSendEmptyValue() bool { return false } + +func (t Type) ShouldIgnoreCustomFlatten() bool { + return t.ResourceMetadata.IsTgcCompiler() && (t.IgnoreRead || t.TGCIgnoreTerraformCustomFlatten) +} diff --git a/mmv1/products/alloydb/Cluster.yaml b/mmv1/products/alloydb/Cluster.yaml index d1754852121c..4f2586a0b4e1 100644 --- a/mmv1/products/alloydb/Cluster.yaml +++ b/mmv1/products/alloydb/Cluster.yaml @@ -324,7 +324,6 @@ properties: Initial user to setup during cluster creation. ignore_read: true custom_flatten: 'templates/terraform/custom_flatten/alloydb_cluster_input_user_flatten.go.tmpl' - custom_tgc_flatten: 'templates/tgc_next/custom_flatten/alloydb_cluster_input_user_flatten.go.tmpl' properties: - name: 'user' type: String diff --git a/mmv1/products/pubsub/Subscription.yaml b/mmv1/products/pubsub/Subscription.yaml index a770ea6048b5..1624f57df84e 100644 --- a/mmv1/products/pubsub/Subscription.yaml +++ b/mmv1/products/pubsub/Subscription.yaml @@ -45,7 +45,7 @@ custom_code: constants: 'templates/terraform/constants/subscription.go.tmpl' encoder: 'templates/terraform/encoders/no_send_name.go.tmpl' update_encoder: 'templates/terraform/update_encoder/pubsub_subscription.tmpl' - tgc_encoder: 'templates/tgc_next/encoders/pubsub_add_name.go.tmpl' +tgc_ignore_terraform_encoder: true examples: - name: 'pubsub_subscription_push' primary_resource_id: 'example' diff --git a/mmv1/products/pubsub/Topic.yaml b/mmv1/products/pubsub/Topic.yaml index 9043692921cb..a47c9f935ae5 100644 --- a/mmv1/products/pubsub/Topic.yaml +++ b/mmv1/products/pubsub/Topic.yaml @@ -52,7 +52,7 @@ iam_policy: custom_code: encoder: 'templates/terraform/encoders/no_send_name.go.tmpl' update_encoder: 'templates/terraform/update_encoder/pubsub_topic.tmpl' - tgc_encoder: 'templates/tgc_next/encoders/pubsub_add_name.go.tmpl' +tgc_ignore_terraform_encoder: true error_retry_predicates: - 'transport_tpg.PubsubTopicProjectNotReady' include_in_tgc_next_DO_NOT_USE: true diff --git a/mmv1/templates/terraform/flatten_property_method.go.tmpl b/mmv1/templates/terraform/flatten_property_method.go.tmpl index cb0fbb7a76a8..7c24debc4096 100644 --- a/mmv1/templates/terraform/flatten_property_method.go.tmpl +++ b/mmv1/templates/terraform/flatten_property_method.go.tmpl @@ -14,7 +14,7 @@ limitations under the License. */ -}} {{- define "flattenPropertyMethod" }} {{- if $.WriteOnly }} -{{- else if $.CustomFlatten }} +{{- else if and $.CustomFlatten (not $.ShouldIgnoreCustomFlatten) }} {{- $.CustomTemplate $.CustomFlatten false -}} {{- else -}} func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { diff --git a/mmv1/templates/tgc_next/custom_flatten/alloydb_cluster_input_user_flatten.go.tmpl b/mmv1/templates/tgc_next/custom_flatten/alloydb_cluster_input_user_flatten.go.tmpl deleted file mode 100644 index fd462707e678..000000000000 --- a/mmv1/templates/tgc_next/custom_flatten/alloydb_cluster_input_user_flatten.go.tmpl +++ /dev/null @@ -1,24 +0,0 @@ -{{/* - The license inside this block applies to this file - Copyright 2025 Google Inc. - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ -}} -func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - if v == nil || len(v.([]interface{})) == 0 { - return nil - } - - return []interface{}{ - map[string]interface{}{ - "user": d.Get("initial_user.0.user"), - "password": d.Get("initial_user.0.password"), - }, - } -} diff --git a/mmv1/templates/tgc_next/encoders/pubsub_add_name.go.tmpl b/mmv1/templates/tgc_next/encoders/pubsub_add_name.go.tmpl deleted file mode 100644 index 68fc32a4e971..000000000000 --- a/mmv1/templates/tgc_next/encoders/pubsub_add_name.go.tmpl +++ /dev/null @@ -1,10 +0,0 @@ -config := meta.(*transport_tpg.Config) - -nameProp, err := expandPubsubTopicName(d.Get("name"), d, config) -if err != nil { - return nil, err -} else if v, ok := d.GetOkExists("name"); !tpgresource.IsEmptyValue(reflect.ValueOf(nameProp)) && (ok || !reflect.DeepEqual(v, nameProp)) { - obj["name"] = nameProp -} - -return obj, nil \ No newline at end of file diff --git a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl index 58b3ee417f09..ad2153a20dca 100644 --- a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl +++ b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl @@ -99,7 +99,7 @@ func Get{{ $.ResourceName -}}CaiObject(d tpgresource.TerraformResourceData, conf } {{- end}} -{{ if $.CustomCode.Encoder -}} +{{ if and $.CustomCode.Encoder (not $.TGCIgnoreTerraformEncoder) -}} obj, err = resource{{ $.ResourceName -}}Encoder(d, config, obj) if err != nil { return nil, err @@ -113,7 +113,7 @@ func Get{{ $.ResourceName -}}CaiObject(d tpgresource.TerraformResourceData, conf {{- end}} } -{{if $.CustomCode.Encoder -}} +{{if and $.CustomCode.Encoder (not $.TGCIgnoreTerraformEncoder) -}} func resource{{ $.ResourceName -}}Encoder(d tpgresource.TerraformResourceData, meta interface{}, obj map[string]interface{}) (map[string]interface{}, error) { {{ $.CustomTemplate $.CustomCode.Encoder false -}} } From ff34dba08d5f6cb9c920d3900669c1ec2bb8f3ef Mon Sep 17 00:00:00 2001 From: William Yardley Date: Wed, 20 Aug 2025 14:49:40 -0700 Subject: [PATCH 818/884] container: Used zonal cluster for network config test (#14847) --- .../resource_container_node_pool_test.go.tmpl | 83 ++++++++++--------- 1 file changed, 42 insertions(+), 41 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl index bcebd1f3dc62..571f99164a7a 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl @@ -4100,7 +4100,8 @@ resource "google_compute_subnetwork" "container_subnetwork" { resource "google_container_cluster" "cluster" { name = "%s" - location = "us-central1" + # Zonal rather than regional to reduce setup time and node count per zone. + location = "us-central1-c" initial_node_count = 1 network = google_compute_network.container_network.name @@ -4110,82 +4111,82 @@ resource "google_container_cluster" "cluster" { services_secondary_range_name = google_compute_subnetwork.container_subnetwork.secondary_ip_range[1].range_name } release_channel { - channel = "RAPID" + channel = "RAPID" } deletion_protection = false } resource "google_container_node_pool" "with_manual_pod_cidr" { - name = "%s-manual" - location = "us-central1" - cluster = google_container_cluster.cluster.name + name = "%s-manual" + location = google_container_cluster.cluster.location + cluster = google_container_cluster.cluster.name node_count = 1 network_config { create_pod_range = false - pod_range = google_compute_subnetwork.container_subnetwork.secondary_ip_range[2].range_name + pod_range = google_compute_subnetwork.container_subnetwork.secondary_ip_range[2].range_name } node_config { - oauth_scopes = [ - "https://www.googleapis.com/auth/cloud-platform", - ] + oauth_scopes = [ + "https://www.googleapis.com/auth/cloud-platform", + ] } } resource "google_container_node_pool" "with_auto_pod_cidr" { - name = "%s-auto" - location = "us-central1" - cluster = google_container_cluster.cluster.name + name = "%s-auto" + location = google_container_cluster.cluster.location + cluster = google_container_cluster.cluster.name node_count = 1 network_config { - create_pod_range = true - pod_range = "auto-pod-range" - pod_ipv4_cidr_block = "10.2.0.0/20" + create_pod_range = true + pod_range = "auto-pod-range" + pod_ipv4_cidr_block = "10.2.0.0/20" } node_config { - oauth_scopes = [ - "https://www.googleapis.com/auth/cloud-platform", - ] + oauth_scopes = [ + "https://www.googleapis.com/auth/cloud-platform", + ] } } resource "google_container_node_pool" "with_pco_disabled" { - name = "%s-pco" - location = "us-central1" - cluster = google_container_cluster.cluster.name + name = "%s-pco" + location = google_container_cluster.cluster.location + cluster = google_container_cluster.cluster.name node_count = 1 network_config { - pod_cidr_overprovision_config { - disabled = true - } + pod_cidr_overprovision_config { + disabled = true + } } node_config { - oauth_scopes = [ - "https://www.googleapis.com/auth/cloud-platform", - ] + oauth_scopes = [ + "https://www.googleapis.com/auth/cloud-platform", + ] } } resource "google_container_node_pool" "with_tier1_net" { - name = "%s-tier1" - location = "us-central1" - cluster = google_container_cluster.cluster.name + name = "%s-tier1" + location = google_container_cluster.cluster.location + cluster = google_container_cluster.cluster.name node_count = 1 node_locations = [ - "us-central1-a", + "us-central1-c", ] network_config { - network_performance_config { - total_egress_bandwidth_tier = "%s" - } + network_performance_config { + total_egress_bandwidth_tier = "%s" + } } node_config { - machine_type = "n2-standard-32" - gvnic { - enabled = true - } - oauth_scopes = [ - "https://www.googleapis.com/auth/cloud-platform", - ] + machine_type = "n2-standard-32" + gvnic { + enabled = true + } + oauth_scopes = [ + "https://www.googleapis.com/auth/cloud-platform", + ] } } From 9f0fdb6cc6b96f79a4da3a9fd4a7be1ca9e569b8 Mon Sep 17 00:00:00 2001 From: aditikumarii-google Date: Thu, 21 Aug 2025 03:21:35 +0530 Subject: [PATCH 819/884] Adding restoreBackup support for sql db instance using backupdr_backup (#14653) --- .../acctest/bootstrap_test_utils.go.tmpl | 88 ++++++ .../resource_sql_database_instance.go.tmpl | 35 ++- .../resource_sql_database_instance_meta.yaml | 1 + ...esource_sql_database_instance_test.go.tmpl | 284 +++++++++++++++++- .../terraform/transport/config.go.tmpl | 15 + .../r/sql_database_instance.html.markdown | 5 + 6 files changed, 421 insertions(+), 7 deletions(-) diff --git a/mmv1/third_party/terraform/acctest/bootstrap_test_utils.go.tmpl b/mmv1/third_party/terraform/acctest/bootstrap_test_utils.go.tmpl index 6b8fbb575510..2c158386e146 100644 --- a/mmv1/third_party/terraform/acctest/bootstrap_test_utils.go.tmpl +++ b/mmv1/third_party/terraform/acctest/bootstrap_test_utils.go.tmpl @@ -34,6 +34,7 @@ import ( "google.golang.org/api/servicenetworking/v1" "google.golang.org/api/serviceusage/v1" sqladmin "google.golang.org/api/sqladmin/v1beta4" + backupdr "google.golang.org/api/backupdr/v1" ) var SharedKeyRing = "tftest-shared-keyring-1" @@ -1159,6 +1160,93 @@ func BootstrapSharedSQLInstanceBackupRun(t *testing.T) string { return bootstrapInstance.Name } +// waitForBackupdrOperation polls the operation until it is done or times out. +func waitForBackupdrOperation(ctx context.Context, t *testing.T, backupdrService *backupdr.Service, op *backupdr.Operation) (*backupdr.Operation, error) { + t.Helper() + opService := backupdr.NewProjectsLocationsOperationsService(backupdrService) + ticker := time.NewTicker(5 * time.Second) // Poll every 5 seconds + defer ticker.Stop() + + const timeout = 5 * time.Minute // Maximum time to wait + ctx, cancel := context.WithTimeout(ctx, timeout) + defer cancel() + + for { + select { + case <-ctx.Done(): + return nil, fmt.Errorf("timed out waiting for operation %s to complete", op.Name) + case <-ticker.C: + latestOp, err := opService.Get(op.Name).Context(ctx).Do() + if err != nil { + // Retry on transient errors if necessary, fail on others. + return nil, fmt.Errorf("error getting operation %s: %w", op.Name, err) + } + op = latestOp + t.Logf("Operation %s status: Done=%v", op.Name, op.Done) + + if op.Done { + if op.Error != nil { + return op, fmt.Errorf("operation %s failed: %v (code %d)", op.Name, op.Error.Message, op.Error.Code) + } + t.Logf("Operation %s completed successfully.", op.Name) + return op, nil + } + } + } +} + +// BootstrapBackupDRVault creates or gets a BackupDR backup vault for testing. +func BootstrapBackupDRVault(t *testing.T, vaultID, location string) string { + ctx := context.Background() + project := envvar.GetTestProjectFromEnv() + config := BootstrapConfig(t) + if config == nil { + t.Fatal("Could not bootstrap config.") + } + + // Create a backupdr client and check if the vault exists, if not create a vault + // backupdrClient := config.NewBackupDRClient(config.UserAgent) + vaultName := fmt.Sprintf("projects/%s/locations/%s/backupVaults/%s", project, location, vaultID) + projectAndLocation := fmt.Sprintf("projects/%s/locations/%s", project, location) + + log.Printf("[DEBUG] Getting BackupDR vault %q", vaultName) + backupdrService := config.NewBackupDRClient(config.UserAgent) + _, err := backupdrService.Projects.Locations.BackupVaults.Get(vaultName).Do() + if err != nil && transport_tpg.IsGoogleApiErrorWithCode(err, 404) { + log.Printf("[DEBUG] BackupDR vault %q not found, bootstrapping", vaultName) + // Prepare the request body for BackupVault creation + enforcedRetentionDays := 1 + effectiveDays := 1 + + retentionDuration := time.Duration(enforcedRetentionDays) * 24 * time.Hour + effectiveTime := time.Now().Add(time.Duration(effectiveDays) * 24 * time.Hour) + + backupVault := &backupdr.BackupVault{ + BackupMinimumEnforcedRetentionDuration: fmt.Sprintf("%ds", int(retentionDuration.Seconds())), + EffectiveTime: effectiveTime.Format(time.RFC3339), + Description: "Created by BootstrapBackupDRVault function", + } + {{/* _, err = config.NewBackupDRClient(config.UserAgent).Projects.Locations.BackupVaults.Create(projectAndLocation, backupVault).Do() */}} + createCall := backupdrService.Projects.Locations.BackupVaults.Create(projectAndLocation, backupVault) + createCall.BackupVaultId(vaultID) // *** This is REQUIRED for the query parameter *** + // createCall.ValidateOnly(false) // Optional: explicit validate only flag + op, err := createCall.Do() + if err != nil { + t.Fatalf("Error calling Create BackupDR vault %q: %s", vaultName, err) + } + fmt.Printf("Successfully initiated creation of BackupDR vault %q (Operation: %s)\n", vaultName, op.Name) + + // *** WAIT FOR COMPLETION *** + if _, err := waitForBackupdrOperation(ctx, t, backupdrService, op); err != nil { + t.Fatalf("Create operation for %s failed: %v", vaultName, err) + } + fmt.Printf("Successfully created BackupDR vault %q\n", vaultName) + + } + + return vaultName +} + func BootstrapSharedCaPoolInLocation(t *testing.T, location string) string { project := envvar.GetTestProjectFromEnv() poolName := "static-ca-pool" diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl index 6ed39b1db93c..bdae1754c38c 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl @@ -1131,6 +1131,11 @@ API (for read pools, effective_availability_type may differ from availability_ty }, }, }, + "backupdr_backup": { + Type: schema.TypeString, + Optional: true, + Description: `The name of the BackupDR backup to restore from.`, + }, "clone": { Type: schema.TypeList, Optional: true, @@ -1437,7 +1442,14 @@ func resourceSqlDatabaseInstanceCreate(d *schema.ResourceData, meta interface{}) // Perform a backup restore if the backup context exists if r, ok := d.GetOk("restore_backup_context"); ok { - err = sqlDatabaseInstanceRestoreFromBackup(d, config, userAgent, project, name, r) + log.Printf("[DEBUG] Restoring instance %s from backup context: %v", name, r) + err = sqlDatabaseInstanceRestoreFromBackup(d, config, userAgent, project, name, r, "") + if err != nil { + return err + } + } else if b, ok := d.GetOk("backupdr_backup"); ok && b.(string) != "" { + log.Printf("[DEBUG] Restoring instance %s from BackupDR backup: %s", name, b.(string)) + err = sqlDatabaseInstanceRestoreFromBackup(d, config, userAgent, project, name, nil, b) if err != nil { return err } @@ -2260,7 +2272,14 @@ func resourceSqlDatabaseInstanceUpdate(d *schema.ResourceData, meta interface{}) // Perform a backup restore if the backup context exists and has changed if r, ok := d.GetOk("restore_backup_context"); ok { if d.HasChange("restore_backup_context") { - err = sqlDatabaseInstanceRestoreFromBackup(d, config, userAgent, project, d.Get("name").(string), r) + err = sqlDatabaseInstanceRestoreFromBackup(d, config, userAgent, project, d.Get("name").(string), r, "") + if err != nil { + return err + } + } + } else if b, ok := d.GetOk("backupdr_backup"); ok && b.(string) != "" { + if d.HasChange("backupdr_backup") { + err = sqlDatabaseInstanceRestoreFromBackup(d, config, userAgent, project, d.Get("name").(string), nil, b) if err != nil { return err } @@ -2905,12 +2924,16 @@ func expandRestoreBackupContext(configured []interface{}) *sqladmin.RestoreBacku } } -func sqlDatabaseInstanceRestoreFromBackup(d *schema.ResourceData, config *transport_tpg.Config, userAgent, project, instanceId string, r interface{}) error { +func sqlDatabaseInstanceRestoreFromBackup(d *schema.ResourceData, config *transport_tpg.Config, userAgent, project, instanceId string, r interface{}, backupdrBackup interface{}) error { log.Printf("[DEBUG] Initiating SQL database instance backup restore") - restoreContext := r.([]interface{}) - backupRequest := &sqladmin.InstancesRestoreBackupRequest{ - RestoreBackupContext: expandRestoreBackupContext(restoreContext), + backupRequest := &sqladmin.InstancesRestoreBackupRequest{} + + if r != nil { + restoreContext := r.([]interface{}) + backupRequest.RestoreBackupContext = expandRestoreBackupContext(restoreContext) + } else if backupdrBackup != nil && backupdrBackup.(string) != "" { + backupRequest.BackupdrBackup = backupdrBackup.(string) } var op *sqladmin.Operation diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_meta.yaml b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_meta.yaml index 39cca9f8f62b..df9d0644270c 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_meta.yaml +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_meta.yaml @@ -5,6 +5,7 @@ api_version: 'v1beta4' api_resource_type_kind: 'DatabaseInstance' fields: - field: 'available_maintenance_versions' + - field: 'backupdr_backup' - field: 'clone.allocated_ip_range' - field: 'clone.database_names' - field: 'clone.point_in_time' diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl index 2a86ad8c74b8..87a8ab904fa4 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl @@ -1400,6 +1400,43 @@ func TestAccSqlDatabaseInstance_createFromBackup(t *testing.T) { }) } +func TestAccSqlDatabaseInstance_createFromBackupDR(t *testing.T) { + t.Parallel() + + // Bootstrap the BackupDR vault + backupVaultID := "bv-test" + location := "us-central1" + project := envvar.GetTestProjectFromEnv() + backupvault := acctest.BootstrapBackupDRVault(t, backupVaultID, location) + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "project": project, + "backup_vault_id": backupVaultID, + "backup_vault": backupvault, + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccSqlDatabaseInstanceDestroyProducer(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: testAccSqlDatabaseInstance_createFromBackupDR(context), + }, + { + ResourceName: "google_sql_database_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection", "backupdr_backup"}, + }, + }, + }) +} + func TestAccSqlDatabaseInstance_backupUpdate(t *testing.T) { // Sqladmin client acctest.SkipIfVcr(t) @@ -1407,6 +1444,7 @@ func TestAccSqlDatabaseInstance_backupUpdate(t *testing.T) { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), + "db_version": "POSTGRES_11", "original_db_name": acctest.BootstrapSharedSQLInstanceBackupRun(t), } @@ -1437,6 +1475,53 @@ func TestAccSqlDatabaseInstance_backupUpdate(t *testing.T) { }) } +func TestAccSqlDatabaseInstance_BackupDRUpdate(t *testing.T) { + t.Parallel() + + // Bootstrap the BackupDR vault + backupVaultID := "bv-test" + location := "us-central1" + project := envvar.GetTestProjectFromEnv() + backupvault := acctest.BootstrapBackupDRVault(t, backupVaultID, location) + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "project": project, + "backup_vault_id": backupVaultID, + "backup_vault": backupvault, + "db_version": "MYSQL_8_0_41", + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccSqlDatabaseInstanceDestroyProducer(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: testAccSqlDatabaseInstance_beforeBackup(context), + }, + { + ResourceName: "google_sql_database_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + { + Config: testAccSqlDatabaseInstance_updateFromBackupDR(context), + }, + { + ResourceName: "google_sql_database_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection", "backupdr_backup"}, + }, + }, + }) +} + func TestAccSqlDatabaseInstance_basicClone(t *testing.T) { // Sqladmin client acctest.SkipIfVcr(t) @@ -6351,7 +6436,7 @@ func testAccSqlDatabaseInstance_beforeBackup(context map[string]interface{}) str return acctest.Nprintf(` resource "google_sql_database_instance" "instance" { name = "tf-test-%{random_suffix}" - database_version = "POSTGRES_11" + database_version = "%{db_version}" region = "us-central1" settings { @@ -6400,6 +6485,203 @@ data "google_sql_backup_run" "backup" { `, context) } +func testAccSqlDatabaseInstance_createFromBackupDR(context map[string]interface{}) string { + return acctest.Nprintf(` +// Create service account +resource "google_service_account" "bkdr_sa" { + account_id = "tf-test-bkdr-sa-%{random_suffix}" + display_name = "Backup DR Service Account" +} + +// Create a backup plan +resource "google_backup_dr_backup_plan" "plan" { + location = "us-central1" + backup_plan_id = "tf-test-bp-test-%{random_suffix}" + resource_type = "sqladmin.googleapis.com/Instance" + backup_vault = "%{backup_vault}" + + backup_rules { + rule_id = "rule-1" + backup_retention_days = 7 + + standard_schedule { + recurrence_type = "DAILY" + hourly_frequency = 6 + time_zone = "UTC" + + backup_window { + start_hour_of_day = 0 + end_hour_of_day = 23 + } + } + } +} + +// Create source SQL instance to backup +resource "google_sql_database_instance" "source" { + name = "tf-test-source-%{random_suffix}" + database_version = "MYSQL_8_0_41" + region = "us-central1" + project = "%{project}" + settings { + tier = "db-f1-micro" + backup_configuration { + enabled = true + } + } + lifecycle { + ignore_changes = [ + settings[0].backup_configuration[0].enabled, + ] + } + deletion_protection = false +} + +// Associate backup plan with SQL instance +resource "google_backup_dr_backup_plan_association" "association" { + location = "us-central1" + backup_plan_association_id = "tf-test-bpa-test-%{random_suffix}" + resource = "projects/${google_sql_database_instance.source.project}/instances/${google_sql_database_instance.source.name}" + resource_type = "sqladmin.googleapis.com/Instance" + backup_plan = google_backup_dr_backup_plan.plan.name +} + +// Wait for the first backup to be created +resource "time_sleep" "wait_10_mins" { + depends_on = [google_backup_dr_backup_plan_association.association] + + create_duration = "600s" +} + +data "google_backup_dr_backup" "sql_backups" { + project = "%{project}" + location = "us-central1" + backup_vault_id = "%{backup_vault_id}" + data_source_id = element(split("/", google_backup_dr_backup_plan_association.association.data_source), length(split("/", google_backup_dr_backup_plan_association.association.data_source)) - 1) + + depends_on = [time_sleep.wait_10_mins] +} + +resource "google_sql_database_instance" "instance" { + name = "tf-test-%{random_suffix}" + database_version = "MYSQL_8_0_41" + region = "us-central1" + + settings { + tier = "db-g1-small" + backup_configuration { + enabled = true + } + } + + backupdr_backup = data.google_backup_dr_backup.sql_backups.backups[0].name + + deletion_protection = false +} +`, context) +} + +func testAccSqlDatabaseInstance_updateFromBackupDR(context map[string]interface{}) string { + return acctest.Nprintf(` +// Create service account +resource "google_service_account" "bkdr_sa" { + account_id = "tf-test-bkdr-sa-%{random_suffix}" + display_name = "Backup DR Service Account" +} + +// Create a backup plan +resource "google_backup_dr_backup_plan" "plan" { + location = "us-central1" + backup_plan_id = "tf-test-bp-test-%{random_suffix}" + resource_type = "sqladmin.googleapis.com/Instance" + backup_vault = "%{backup_vault}" + + backup_rules { + rule_id = "rule-1" + backup_retention_days = 7 + + standard_schedule { + recurrence_type = "DAILY" + hourly_frequency = 6 + time_zone = "UTC" + + backup_window { + start_hour_of_day = 0 + end_hour_of_day = 23 + } + } + } +} + +// Create source SQL instance to backup +resource "google_sql_database_instance" "source" { + name = "tf-test-source-%{random_suffix}" + database_version = "MYSQL_8_0_41" + region = "us-central1" + project = "%{project}" + settings { + tier = "db-f1-micro" + backup_configuration { + enabled = true + } + } + lifecycle { + ignore_changes = [ + settings[0].backup_configuration[0].enabled, + ] + } + deletion_protection = false +} + +// Associate backup plan with SQL instance +resource "google_backup_dr_backup_plan_association" "association" { + location = "us-central1" + backup_plan_association_id = "tf-test-bpa-test-%{random_suffix}" + resource = "projects/${google_sql_database_instance.source.project}/instances/${google_sql_database_instance.source.name}" + resource_type = "sqladmin.googleapis.com/Instance" + backup_plan = google_backup_dr_backup_plan.plan.name +} + +// Wait for the first backup to be created +resource "time_sleep" "wait_10_mins" { + depends_on = [google_backup_dr_backup_plan_association.association] + + create_duration = "600s" +} + +data "google_backup_dr_backup" "sql_backups" { + project = "%{project}" + location = "us-central1" + backup_vault_id = "%{backup_vault_id}" + data_source_id = element(split("/", google_backup_dr_backup_plan_association.association.data_source), length(split("/", google_backup_dr_backup_plan_association.association.data_source)) - 1) + + depends_on = [time_sleep.wait_10_mins] +} + +resource "google_sql_database_instance" "instance" { + name = "tf-test-%{random_suffix}" + database_version = "MYSQL_8_0_41" + region = "us-central1" + + settings { + tier = "db-g1-small" + backup_configuration { + enabled = true + } + } + lifecycle { + ignore_changes = [ + settings[0].backup_configuration[0].enabled, + ] + } + + backupdr_backup = data.google_backup_dr_backup.sql_backups.backups[0].name + + deletion_protection = false +} +`, context) +} + func testAccSqlDatabaseInstance_basicClone(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_sql_database_instance" "instance" { diff --git a/mmv1/third_party/terraform/transport/config.go.tmpl b/mmv1/third_party/terraform/transport/config.go.tmpl index 7d9b54ede5e6..15deef0d53fb 100644 --- a/mmv1/third_party/terraform/transport/config.go.tmpl +++ b/mmv1/third_party/terraform/transport/config.go.tmpl @@ -87,6 +87,7 @@ import ( "google.golang.org/api/sourcerepo/v1" "google.golang.org/api/spanner/v1" sqladmin "google.golang.org/api/sqladmin/v1beta4" + backupdr "google.golang.org/api/backupdr/v1" "google.golang.org/api/storage/v1" "google.golang.org/api/storagetransfer/v1" "google.golang.org/api/transport" @@ -903,6 +904,20 @@ func (c *Config) NewSqlAdminClient(userAgent string) *sqladmin.Service { return clientSqlAdmin } +func (c *Config) NewBackupDRClient(userAgent string) *backupdr.Service { + backupdrClientBasePath := RemoveBasePathVersion(RemoveBasePathVersion(c.BackupDRBasePath)) + log.Printf("[INFO] Instantiating Google SqlAdmin client for path %s", backupdrClientBasePath) + clientBackupdrAdmin, err := backupdr.NewService(c.Context, option.WithHTTPClient(c.Client)) + if err != nil { + log.Printf("[WARN] Error creating client storage: %s", err) + return nil + } + clientBackupdrAdmin.UserAgent = userAgent + clientBackupdrAdmin.BasePath = backupdrClientBasePath + + return clientBackupdrAdmin +} + func (c *Config) NewPubsubClient(userAgent string) *pubsub.Service { pubsubClientBasePath := RemoveBasePathVersion(c.PubsubBasePath) log.Printf("[INFO] Instantiating Google Pubsub client for path %s", pubsubClientBasePath) diff --git a/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown b/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown index 5d3abe5dc5ed..b0675085eaf1 100644 --- a/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown @@ -334,6 +334,11 @@ includes an up-to-date reference of supported versions. **NOTE:** Restoring from a backup is an imperative action and not recommended via Terraform. Adding or modifying this block during resource creation/update will trigger the restore action after the resource is created/updated. +* `backupdr_backup` - (optional) The backupdr_backup needed to restore the database to a backup run. This field will + cause Terraform to trigger the database to restore from the backup run indicated. The configuration is detailed below. + **NOTE:** Restoring from a backup is an imperative action and not recommended via Terraform. Adding or modifying this + block during resource creation/update will trigger the restore action after the resource is created/updated. + * `clone` - (Optional) The context needed to create this instance as a clone of another instance. When this field is set during resource creation, Terraform will attempt to clone another instance as indicated in the context. The configuration is detailed below. From 777bc11276fd67edfb40f302f176d709fe3b1013 Mon Sep 17 00:00:00 2001 From: Axel Kossek Date: Thu, 21 Aug 2025 02:44:46 +0200 Subject: [PATCH 820/884] Add resource_manager_tags support to Backend Bucket api (#14901) --- mmv1/products/compute/BackendBucket.yaml | 16 ++++++ .../resource_compute_backend_bucket_test.go | 49 +++++++++++++++++++ 2 files changed, 65 insertions(+) diff --git a/mmv1/products/compute/BackendBucket.yaml b/mmv1/products/compute/BackendBucket.yaml index a92ae63a5127..21ad6c671212 100644 --- a/mmv1/products/compute/BackendBucket.yaml +++ b/mmv1/products/compute/BackendBucket.yaml @@ -298,3 +298,19 @@ properties: enum_values: - 'INTERNAL_MANAGED' send_empty_value: true + - name: 'params' + type: NestedObject + ignore_read: true + immutable: true + description: | + Additional params passed with the request, but not persisted as part of resource payload + properties: + - name: 'resourceManagerTags' + type: KeyValuePairs + description: | + Resource manager tags to be bound to the backend bucket. Tag keys and values have the + same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, + and values are in the format tagValues/456. + api_name: resourceManagerTags + ignore_read: true + immutable: true diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_backend_bucket_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_backend_bucket_test.go index f515811428fd..dd2ff2fea9f3 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_backend_bucket_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_backend_bucket_test.go @@ -6,6 +6,7 @@ import ( "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" ) func TestAccComputeBackendBucket_basicModified(t *testing.T) { @@ -205,6 +206,35 @@ func TestAccComputeBackendBucket_withCdnCacheMode_update(t *testing.T) { }) } +func TestAccComputeBackendBucket_withTags(t *testing.T) { + t.Parallel() + + org := envvar.GetTestOrgFromEnv(t) + + backendName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + storageName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + tagKeyResult := acctest.BootstrapSharedTestTagKeyDetails(t, "crm-bb-tagkey", "organizations/"+org, make(map[string]interface{})) + sharedTagkey, _ := tagKeyResult["shared_tag_key"] + tagValueResult := acctest.BootstrapSharedTestTagValueDetails(t, "crm-bb-tagvalue", sharedTagkey, org) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeBackendBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeBackendBucket_withTags(backendName, storageName, tagKeyResult["name"], tagValueResult["name"]), + }, + { + ResourceName: "google_compute_backend_bucket.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"params"}, + }, + }, + }) +} + func testAccComputeBackendBucket_basic(backendName, storageName string) string { return fmt.Sprintf(` resource "google_compute_backend_bucket" "foobar" { @@ -414,3 +444,22 @@ resource "google_storage_bucket" "bucket" { } `, backendName, default_ttl, storageName) } + +func testAccComputeBackendBucket_withTags(backendName, storageName string, tagKey string, tagValue string) string { + return fmt.Sprintf(` +resource "google_compute_backend_bucket" "foobar" { + name = "%s" + bucket_name = google_storage_bucket.bucket_one.name + params { + resource_manager_tags = { + "%s" = "%s" + } + } +} + +resource "google_storage_bucket" "bucket_one" { + name = "%s" + location = "EU" +} +`, backendName, tagKey, tagValue, storageName) +} From 948719672cf4e7f238cf1ad52d7778d9000963d3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B6rn?= <81525627+bestefreund@users.noreply.github.com> Date: Thu, 21 Aug 2025 18:10:16 +0200 Subject: [PATCH 821/884] Add singular data source for retrieving an NPM package from an Artifact Registry repository (#14804) --- .../provider/provider_mmv1_resources.go.tmpl | 1 + ...ta_source_artifact_registry_npm_package.go | 295 ++++++++++++++++++ ...urce_artifact_registry_npm_package_test.go | 67 ++++ ...rtifact_registry_npm_package.html.markdown | 65 ++++ 4 files changed, 428 insertions(+) create mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_npm_package.go create mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_npm_package_test.go create mode 100644 mmv1/third_party/terraform/website/docs/d/artifact_registry_npm_package.html.markdown diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index d9c46a255d9b..85012670afa3 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -29,6 +29,7 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_artifact_registry_docker_image": artifactregistry.DataSourceArtifactRegistryDockerImage(), "google_artifact_registry_docker_images": artifactregistry.DataSourceArtifactRegistryDockerImages(), "google_artifact_registry_locations": artifactregistry.DataSourceGoogleArtifactRegistryLocations(), + "google_artifact_registry_npm_package": artifactregistry.DataSourceArtifactRegistryNpmPackage(), "google_artifact_registry_package": artifactregistry.DataSourceArtifactRegistryPackage(), "google_artifact_registry_repositories": artifactregistry.DataSourceArtifactRegistryRepositories(), "google_artifact_registry_repository": artifactregistry.DataSourceArtifactRegistryRepository(), diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_npm_package.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_npm_package.go new file mode 100644 index 000000000000..ca0355700f8c --- /dev/null +++ b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_npm_package.go @@ -0,0 +1,295 @@ +package artifactregistry + +import ( + "fmt" + "net/url" + "sort" + "strings" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +type NpmPackage struct { + name string + packageName string + version string + tags []string + createTime time.Time + updateTime time.Time +} + +func DataSourceArtifactRegistryNpmPackage() *schema.Resource { + return &schema.Resource{ + Read: DataSourceArtifactRegistryNpmPackageRead, + + Schema: map[string]*schema.Schema{ + "project": { + Type: schema.TypeString, + Optional: true, + Description: "Project ID of the project.", + }, + "location": { + Type: schema.TypeString, + Required: true, + Description: "The region of the Artifact Registry repository.", + }, + "repository_id": { + Type: schema.TypeString, + Required: true, + Description: "The repository ID containing the Npm package.", + }, + "package_name": { + Type: schema.TypeString, + Required: true, + Description: "The name of the Npm package.", + }, + "version": { + Type: schema.TypeString, + Computed: true, + Description: "The version of the Npm package.", + }, + "tags": { + Type: schema.TypeList, + Computed: true, + Description: "The tags associated with the Npm package.", + Elem: &schema.Schema{Type: schema.TypeString}, + }, + "name": { + Type: schema.TypeString, + Computed: true, + Description: "The fully qualified name of the Npm package.", + }, + "create_time": { + Type: schema.TypeString, + Computed: true, + Description: "The time the package was created.", + }, + "update_time": { + Type: schema.TypeString, + Computed: true, + Description: "The time the package was last updated.", + }, + }, + } +} + +func DataSourceArtifactRegistryNpmPackageRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + + var res NpmPackage + + packageName, version := parseNpmPackage(d.Get("package_name").(string)) + + if version != "" { + // fetch package by version + // https://cloud.google.com/artifact-registry/docs/reference/rest/v1/projects.locations.repositories.npmPackages/get + packageUrlSafe := url.QueryEscape(packageName) + urlRequest, err := tpgresource.ReplaceVars(d, config, fmt.Sprintf("{{ArtifactRegistryBasePath}}projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/npmPackages/%s:%s", packageUrlSafe, version)) + if err != nil { + return fmt.Errorf("Error setting api endpoint") + } + + resGet, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + RawURL: urlRequest, + UserAgent: userAgent, + }) + if err != nil { + return err + } + + res = convertNpmPackageResponseToStruct(resGet) + } else { + // fetch the list of packages, ordered by update time + // https://cloud.google.com/artifact-registry/docs/reference/rest/v1/projects.locations.repositories.npmPackages/list + urlRequest, err := tpgresource.ReplaceVars(d, config, "{{ArtifactRegistryBasePath}}projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/npmPackages") + if err != nil { + return fmt.Errorf("Error setting api endpoint") + } + + // to reduce the number of pages we need to fetch, we set the pageSize to 1000(max) + urlRequest, err = transport_tpg.AddQueryParams(urlRequest, map[string]string{"pageSize": "1000"}) + if err != nil { + return err + } + + res, err = retrieveAndFilterNpmPackages(d, config, urlRequest, userAgent, packageName, version) + if err != nil { + return err + } + } + + // Set Terraform schema fields + if err := d.Set("project", project); err != nil { + return err + } + if err := d.Set("package_name", packageName); err != nil { + return err + } + if err := d.Set("name", res.name); err != nil { + return err + } + if err := d.Set("version", res.version); err != nil { + return err + } + if err := d.Set("tags", res.tags); err != nil { + return err + } + if err := d.Set("create_time", res.createTime.Format(time.RFC3339Nano)); err != nil { + return err + } + if err := d.Set("update_time", res.updateTime.Format(time.RFC3339Nano)); err != nil { + return err + } + + d.SetId(res.name) + + return nil +} + +func parseNpmPackage(pkg string) (packageName string, version string) { + splitByColon := strings.Split(pkg, ":") + + if len(splitByColon) == 2 { + packageName = splitByColon[0] + version = splitByColon[1] + } else { + packageName = pkg + } + + return packageName, version +} + +func retrieveAndFilterNpmPackages(d *schema.ResourceData, config *transport_tpg.Config, urlRequest string, userAgent string, packageName string, version string) (NpmPackage, error) { + // Paging through the list method until either: + // if a version was provided, the matching package name and version pair + // otherwise, return the first matching package name + + var allPackages []NpmPackage + + for { + resListNpmPackages, token, err := retrieveListOfNpmPackages(config, urlRequest, userAgent) + if err != nil { + return NpmPackage{}, err + } + + for _, pkg := range resListNpmPackages { + if strings.Contains(pkg.name, "/"+url.QueryEscape(packageName)+":") { + allPackages = append(allPackages, pkg) + } + } + + if token == "" { + break + } + + urlRequest, err = transport_tpg.AddQueryParams(urlRequest, map[string]string{"pageToken": token}) + if err != nil { + return NpmPackage{}, err + } + } + + if len(allPackages) == 0 { + return NpmPackage{}, fmt.Errorf("Requested Npm package was not found.") + } + + // Client-side sort by updateTime descending and createTime descending + sort.Slice(allPackages, func(i, j int) bool { + if !allPackages[i].updateTime.Equal(allPackages[j].updateTime) { + return allPackages[i].updateTime.After(allPackages[j].updateTime) + } + return allPackages[i].createTime.After(allPackages[j].createTime) + }) + + if version != "" { + for _, pkg := range allPackages { + if pkg.version == version { + return pkg, nil + } + } + return NpmPackage{}, fmt.Errorf("Requested version was not found.") + } + + // Return the latest package if no version specified + return allPackages[0], nil +} + +func retrieveListOfNpmPackages(config *transport_tpg.Config, urlRequest string, userAgent string) ([]NpmPackage, string, error) { + resList, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + RawURL: urlRequest, + UserAgent: userAgent, + }) + if err != nil { + return make([]NpmPackage, 0), "", err + } + + if nextPageToken, ok := resList["nextPageToken"].(string); ok { + return flattenNpmPackageDataSourceListResponse(resList), nextPageToken, nil + } else { + return flattenNpmPackageDataSourceListResponse(resList), "", nil + } +} + +func flattenNpmPackageDataSourceListResponse(res map[string]interface{}) []NpmPackage { + var npmPackages []NpmPackage + + resNpmPackages, _ := res["npmPackages"].([]interface{}) + + for _, resPackage := range resNpmPackages { + pkg, _ := resPackage.(map[string]interface{}) + npmPackages = append(npmPackages, convertNpmPackageResponseToStruct(pkg)) + } + + return npmPackages +} + +func convertNpmPackageResponseToStruct(res map[string]interface{}) NpmPackage { + var npmPackage NpmPackage + + if name, ok := res["name"].(string); ok { + npmPackage.name = name + } + + if packageName, ok := res["packageName"].(string); ok { + npmPackage.packageName = packageName + } + + if version, ok := res["version"].(string); ok { + npmPackage.version = version + } + + var tags []string + if rawTags, ok := res["tags"].([]interface{}); ok { + for _, tag := range rawTags { + if tagStr, ok := tag.(string); ok { + tags = append(tags, tagStr) + } + } + } + npmPackage.tags = tags + + if createTimeStr, ok := res["createTime"].(string); ok { + npmPackage.createTime, _ = time.Parse(time.RFC3339, createTimeStr) + } + + if updateTimeStr, ok := res["updateTime"].(string); ok { + npmPackage.updateTime, _ = time.Parse(time.RFC3339, updateTimeStr) + } + + return npmPackage +} diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_npm_package_test.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_npm_package_test.go new file mode 100644 index 000000000000..ae9b112b192e --- /dev/null +++ b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_npm_package_test.go @@ -0,0 +1,67 @@ +package artifactregistry_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccDataSourceArtifactRegistryNpmPackage_basic(t *testing.T) { + acctest.SkipIfVcr(t) + t.Parallel() + + // At the moment there are no public Npm packages available in Artifact Registry. + // This test is skipped to avoid unnecessary failures. + // As soon as there are public packages available, this test can be enabled by removing the skip and adjusting the configuration accordingly. + t.Skip("No public Npm packages available in Artifact Registry") + + resourceName := "data.google_artifact_registry_npm_package.test" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceArtifactRegistryNpmPackageConfig, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet(resourceName, "project"), + resource.TestCheckResourceAttrSet(resourceName, "location"), + resource.TestCheckResourceAttrSet(resourceName, "repository_id"), + resource.TestCheckResourceAttrSet(resourceName, "package_name"), + resource.TestCheckResourceAttrSet(resourceName, "name"), + resource.TestCheckResourceAttrSet(resourceName, "version"), + validateNpmPackageTimestamps(resourceName), + ), + }, + }, + }) +} + +const testAccDataSourceArtifactRegistryNpmPackageConfig = ` +data "google_artifact_registry_npm_package" "test" { + project = "example-project" + location = "us" + repository_id = "example-repo" + package_name = "example-package" +} +` + +func validateNpmPackageTimestamps(dataSourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + res, ok := s.RootModule().Resources[dataSourceName] + if !ok { + return fmt.Errorf("can't find %s in state", dataSourceName) + } + + for _, attr := range []string{"create_time", "update_time"} { + if ts, ok := res.Primary.Attributes[attr]; !ok || !isRFC3339(ts) { + return fmt.Errorf("%s is not RFC3339: %s", attr, ts) + } + } + + return nil + } +} diff --git a/mmv1/third_party/terraform/website/docs/d/artifact_registry_npm_package.html.markdown b/mmv1/third_party/terraform/website/docs/d/artifact_registry_npm_package.html.markdown new file mode 100644 index 000000000000..b6e23d7d2829 --- /dev/null +++ b/mmv1/third_party/terraform/website/docs/d/artifact_registry_npm_package.html.markdown @@ -0,0 +1,65 @@ +--- +subcategory: "Artifact Registry" +description: |- + Get information about an NPM package within a Google Artifact Registry Repository. +--- + +# google_artifact_registry_npm_package + +This data source fetches information from a provided Artifact Registry repository, based on a the latest version of the package and optional version. + +## Example Usage + +```hcl +resource "google_artifact_registry_repository" "npm_repo" { + location = "us-central1" + repository_id = "my-npm-repo" + format = "NPM" +} + +data "google_artifact_registry_npm_package" "latest" { + location = google_artifact_registry_repository.npm_repo.location + repository_id = google_artifact_registry_repository.npm_repo.repository_id + package_name = "example-pkg" +} + +data "google_artifact_registry_npm_package" "with_version" { + location = google_artifact_registry_repository.npm_repo.location + repository_id = google_artifact_registry_repository.npm_repo.repository_id + package_name = "example-pkg:1.0.0" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `location` – (Required) The location of the Artifact Registry repository. + +* `repository_id` – (Required) The ID of the repository containing the NPM package. + +* `package_name` – (Required) The name of the package to fetch. Can optionally include a specific version (e.g., `my_pkg:1.2.3`). If no version is provided, the latest version is used. + +* `project` – (Optional) The ID of the project that owns the repository. If not provided, the provider-level project is used. + +## Attributes Reference + +The following computed attributes are exported: + +* `id` – The fully qualified name of the fetched package. Format: + ``` + projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/npmPackages/{{package}}:{{version}} + ``` + +* `name` – The fully qualified name of the fetched package. Format: + ``` + projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/npmPackages/{{package}}:{{version}} + ``` + +* `version` – The version of the NPM package. + +* `tags` - A list of all Tags attached to this package. + +* `create_time` – The time the package was created. + +* `update_time` – The time the package was last updated. From 71df16f145a8f85092f87f50dd88027ce55b32e8 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Thu, 21 Aug 2025 09:25:02 -0700 Subject: [PATCH 822/884] tgc-revival: add beyondcorp resources (#14919) --- mmv1/products/beyondcorp/AppConnection.yaml | 1 + mmv1/products/beyondcorp/AppConnector.yaml | 1 + mmv1/products/beyondcorp/AppGateway.yaml | 1 + mmv1/products/binaryauthorization/Attestor.yaml | 1 + mmv1/third_party/tgc_next/test/hcl.go | 2 +- 5 files changed, 5 insertions(+), 1 deletion(-) diff --git a/mmv1/products/beyondcorp/AppConnection.yaml b/mmv1/products/beyondcorp/AppConnection.yaml index b18a67eae6ac..bc495bc95b0d 100644 --- a/mmv1/products/beyondcorp/AppConnection.yaml +++ b/mmv1/products/beyondcorp/AppConnection.yaml @@ -45,6 +45,7 @@ async: result: resource_inside_response: true custom_code: +include_in_tgc_next_DO_NOT_USE: true examples: - name: 'beyondcorp_app_connection_basic' primary_resource_id: 'app_connection' diff --git a/mmv1/products/beyondcorp/AppConnector.yaml b/mmv1/products/beyondcorp/AppConnector.yaml index e6d813017782..d8fd86d53cc8 100644 --- a/mmv1/products/beyondcorp/AppConnector.yaml +++ b/mmv1/products/beyondcorp/AppConnector.yaml @@ -42,6 +42,7 @@ async: result: resource_inside_response: true custom_code: +include_in_tgc_next_DO_NOT_USE: true examples: - name: 'beyondcorp_app_connector_basic' primary_resource_id: 'app_connector' diff --git a/mmv1/products/beyondcorp/AppGateway.yaml b/mmv1/products/beyondcorp/AppGateway.yaml index 1efdaf1504d3..808be9e4f2a6 100644 --- a/mmv1/products/beyondcorp/AppGateway.yaml +++ b/mmv1/products/beyondcorp/AppGateway.yaml @@ -45,6 +45,7 @@ async: result: resource_inside_response: true custom_code: +include_in_tgc_next_DO_NOT_USE: true schema_version: 1 state_upgraders: true examples: diff --git a/mmv1/products/binaryauthorization/Attestor.yaml b/mmv1/products/binaryauthorization/Attestor.yaml index d0862a41eb03..ab271659cc74 100644 --- a/mmv1/products/binaryauthorization/Attestor.yaml +++ b/mmv1/products/binaryauthorization/Attestor.yaml @@ -34,6 +34,7 @@ iam_policy: example_config_body: 'templates/terraform/iam/iam_attributes.go.tmpl' custom_code: constants: 'templates/terraform/constants/binaryauthorization_attestor.go.tmpl' +include_in_tgc_next_DO_NOT_USE: true examples: - name: 'binary_authorization_attestor_basic' primary_resource_id: 'attestor' diff --git a/mmv1/third_party/tgc_next/test/hcl.go b/mmv1/third_party/tgc_next/test/hcl.go index 6702e53cfec7..8748e33ce1fd 100644 --- a/mmv1/third_party/tgc_next/test/hcl.go +++ b/mmv1/third_party/tgc_next/test/hcl.go @@ -86,7 +86,7 @@ func parseHCLBody(body hcl.Body) ( func insert(data any, key string, parent map[string]any) { if existing, ok := parent[key]; ok { if existingSlice, ok := existing.([]any); ok { - existingSlice = append(existingSlice, data) + parent[key] = append(existingSlice, data) } else { // Until we see a second instance of a repeated block or attribute, it will look non-repeated. parent[key] = []any{existing, data} From b621816bc3e3d55f3300604e7665caeadd06d475 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Thu, 21 Aug 2025 13:25:59 -0700 Subject: [PATCH 823/884] Update membership_data.go (#14940) --- .ci/magician/github/membership_data.go | 1 - 1 file changed, 1 deletion(-) diff --git a/.ci/magician/github/membership_data.go b/.ci/magician/github/membership_data.go index 390d48b868f4..488dd5c70d74 100644 --- a/.ci/magician/github/membership_data.go +++ b/.ci/magician/github/membership_data.go @@ -155,7 +155,6 @@ var ( // This is for new team members who are onboarding trustedContributors = map[string]struct{}{ "bbasata": struct{}{}, - "jaylonmcshan03": struct{}{}, "malhotrasagar2212": struct{}{}, } ) From 8b339d15264727047d43b32f72aada065c03769b Mon Sep 17 00:00:00 2001 From: Scott Suarez Date: Thu, 21 Aug 2025 13:51:08 -0700 Subject: [PATCH 824/884] fixed issue where a failed creation on container_node_pool would result in an unrecoverable tainted state (#14937) --- .../container/resource_container_node_pool.go.tmpl | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl index 2531ce078380..f324f5dc45a8 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl @@ -702,6 +702,7 @@ func resourceContainerNodePoolCreate(d *schema.ResourceData, meta interface{}) e return nil }) if err != nil { + d.SetId("") return fmt.Errorf("error creating NodePool: %s", err) } timeout -= time.Since(startTime) @@ -788,13 +789,19 @@ func resourceContainerNodePoolRead(d *schema.ResourceData, meta interface{}) err clusterNodePoolsGetCall.Header().Add("X-Goog-User-Project", nodePoolInfo.project) } nodePool, err := clusterNodePoolsGetCall.Do() + if err != nil { + return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("NodePool %q from cluster %q", name, nodePoolInfo.cluster)) + } + {{- else }} npCache.refreshIfNeeded(d, config, userAgent, nodePoolInfo, name) nodePool, err := npCache.get(nodePoolInfo.fullyQualifiedName(name)) -{{- end }} if err != nil { - return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("NodePool %q from cluster %q", name, nodePoolInfo.cluster)) + log.Printf("[WARN] Removing %s because it's gone", fmt.Sprintf("NodePool %q from cluster %q", name, nodePoolInfo.cluster)) + d.SetId("") + return nil } +{{- end }} npMap, err := flattenNodePool(d, config, nodePool, "") if err != nil { From 0f58a6725742e1da8e671f981acc578300d9a364 Mon Sep 17 00:00:00 2001 From: bcreddy-gcp <123543489+bcreddy-gcp@users.noreply.github.com> Date: Thu, 21 Aug 2025 14:19:27 -0700 Subject: [PATCH 825/884] workbench: Make install-monitoring-agent settable but unmodifiable (#14918) --- mmv1/templates/terraform/constants/workbench_instance.go.tmpl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/templates/terraform/constants/workbench_instance.go.tmpl b/mmv1/templates/terraform/constants/workbench_instance.go.tmpl index 09e63d4fc722..817474e89de7 100644 --- a/mmv1/templates/terraform/constants/workbench_instance.go.tmpl +++ b/mmv1/templates/terraform/constants/workbench_instance.go.tmpl @@ -24,6 +24,7 @@ func WorkbenchInstanceLabelsDiffSuppress(k, old, new string, d *schema.ResourceD var WorkbenchInstanceSettableUnmodifiableDefaultMetadata = []string{ + "install-monitoring-agent", "serial-port-logging-enable", } @@ -67,7 +68,6 @@ var WorkbenchInstanceProvidedMetadata = []string{ "generate-diagnostics-options", "google-logging-enabled", "image-url", - "install-monitoring-agent", "install-nvidia-driver", "installed-extensions", "instance-region", From 329d8216c42e4277ea178c42f112bd040ad4d66f Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Thu, 21 Aug 2025 15:32:38 -0700 Subject: [PATCH 826/884] tgc-revival: add google_apigee_instance (#14924) Co-authored-by: Thomas Rodgers --- mmv1/products/apigee/Instance.yaml | 1 + .../ancestrymanager/ancestrymanager.go | 8 ++++- .../tgc_next/test/assert_test_files.go | 34 +++++++++++++------ 3 files changed, 31 insertions(+), 12 deletions(-) diff --git a/mmv1/products/apigee/Instance.yaml b/mmv1/products/apigee/Instance.yaml index b51712e9f528..219d6223d8f9 100644 --- a/mmv1/products/apigee/Instance.yaml +++ b/mmv1/products/apigee/Instance.yaml @@ -47,6 +47,7 @@ custom_code: error_retry_predicates: - 'transport_tpg.IsApigeeRetryableError' exclude_sweeper: true +include_in_tgc_next_DO_NOT_USE: true examples: - name: 'apigee_instance_basic' vars: diff --git a/mmv1/third_party/tgc_next/pkg/tfplan2cai/ancestrymanager/ancestrymanager.go b/mmv1/third_party/tgc_next/pkg/tfplan2cai/ancestrymanager/ancestrymanager.go index 1d70d9619bff..1c408a7ab36f 100644 --- a/mmv1/third_party/tgc_next/pkg/tfplan2cai/ancestrymanager/ancestrymanager.go +++ b/mmv1/third_party/tgc_next/pkg/tfplan2cai/ancestrymanager/ancestrymanager.go @@ -196,7 +196,13 @@ func (m *manager) fetchAncestors(config *transport_tpg.Config, tfData tpgresourc return []string{unknownOrg}, nil } key = projectKey - + case "apigee.googleapis.com/Instance": + // Project is used to find the ancestors. + // org_id in resource `google_apigee_instance` is the apigee org id under a project. + if projectKey == "" { + return []string{unknownOrg}, nil + } + key = projectKey default: switch { case orgOK: diff --git a/mmv1/third_party/tgc_next/test/assert_test_files.go b/mmv1/third_party/tgc_next/test/assert_test_files.go index fd33ed62dc1b..9c278dc867b4 100644 --- a/mmv1/third_party/tgc_next/test/assert_test_files.go +++ b/mmv1/third_party/tgc_next/test/assert_test_files.go @@ -188,8 +188,8 @@ func testSingleResource(t *testing.T, testName string, testData ResourceTestData // Compare roundtrip_config with export_config to ensure they are identical. // Convert the export config to roundtrip assets and then convert the roundtrip assets back to roundtrip config - ancestryCache := getAncestryCache(assets) - roundtripAssets, roundtripConfigData, err := getRoundtripConfig(t, testName, tfDir, ancestryCache, logger, ignoredAssetFields) + ancestryCache, defaultProject := getAncestryCache(assets) + roundtripAssets, roundtripConfigData, err := getRoundtripConfig(t, testName, tfDir, ancestryCache, defaultProject, logger, ignoredAssetFields) if err != nil { return fmt.Errorf("error when converting the round-trip config: %#v", err) } @@ -249,9 +249,10 @@ func testSingleResource(t *testing.T, testName string, testData ResourceTestData return nil } -// Gets the ancestry cache for tfplan2cai conversion -func getAncestryCache(assets []caiasset.Asset) map[string]string { +// Gets the ancestry cache for tfplan2cai conversion and the default project +func getAncestryCache(assets []caiasset.Asset) (map[string]string, string) { ancestryCache := make(map[string]string, 0) + defaultProject := "" for _, asset := range assets { ancestors := asset.Ancestors @@ -268,18 +269,29 @@ func getAncestryCache(assets []caiasset.Asset) map[string]string { if _, ok := ancestryCache[ancestors[0]]; !ok { ancestryCache[ancestors[0]] = path + if defaultProject == "" { + if s, hasPrefix := strings.CutPrefix(ancestors[0], "projects/"); hasPrefix { + defaultProject = s + } + } } project := utils.ParseFieldValue(asset.Name, "projects") - projectKey := fmt.Sprintf("projects/%s", project) - if strings.HasPrefix(ancestors[0], "projects") && ancestors[0] != projectKey { - if _, ok := ancestryCache[projectKey]; !ok { - ancestryCache[projectKey] = path + if project != "" { + projectKey := fmt.Sprintf("projects/%s", project) + if strings.HasPrefix(ancestors[0], "projects") && ancestors[0] != projectKey { + if _, ok := ancestryCache[projectKey]; !ok { + ancestryCache[projectKey] = path + } + } + + if defaultProject == "" { + defaultProject = project } } } } - return ancestryCache + return ancestryCache, defaultProject } // Compares HCL and finds all of the keys in map1 that are not in map2 @@ -331,7 +343,7 @@ func isIgnored(key string, ignoredFields map[string]struct{}) bool { } // Converts a tfplan to CAI asset, and then converts the CAI asset into HCL -func getRoundtripConfig(t *testing.T, testName string, tfDir string, ancestryCache map[string]string, logger *zap.Logger, ignoredAssetFields []string) ([]caiasset.Asset, []byte, error) { +func getRoundtripConfig(t *testing.T, testName string, tfDir string, ancestryCache map[string]string, defaultProject string, logger *zap.Logger, ignoredAssetFields []string) ([]caiasset.Asset, []byte, error) { fileName := fmt.Sprintf("%s_export", testName) // Run terraform init and terraform apply to generate tfplan.json files @@ -348,7 +360,7 @@ func getRoundtripConfig(t *testing.T, testName string, tfDir string, ancestryCac roundtripAssets, err := tfplan2cai.Convert(ctx, jsonPlan, &tfplan2cai.Options{ ErrorLogger: logger, Offline: true, - DefaultProject: "ci-test-project-nightly-beta", + DefaultProject: defaultProject, DefaultRegion: "", DefaultZone: "", UserAgent: "", From a78c531019b1cabe2860ba2429b15915ea0b414d Mon Sep 17 00:00:00 2001 From: Scott Suarez Date: Thu, 21 Aug 2025 15:57:43 -0700 Subject: [PATCH 827/884] fix other scenario for node_pool not exist (#14943) --- .../container/resource_container_node_pool.go.tmpl | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl index f324f5dc45a8..1356a211a42e 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl @@ -968,11 +968,6 @@ func resourceContainerNodePoolExists(d *schema.ResourceData, meta interface{}) ( clusterNodePoolsGetCall.Header().Add("X-Goog-User-Project", nodePoolInfo.project) } _, err = clusterNodePoolsGetCall.Do() -{{- else }} - npCache.refreshIfNeeded(d, config, userAgent, nodePoolInfo, name) - _, err = npCache.get(nodePoolInfo.fullyQualifiedName(name)) -{{- end }} - if err != nil { if err = transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("Container NodePool %s", name)); err == nil { return false, nil @@ -980,6 +975,15 @@ func resourceContainerNodePoolExists(d *schema.ResourceData, meta interface{}) ( // There was some other error in reading the resource return true, err } +{{- else }} + npCache.refreshIfNeeded(d, config, userAgent, nodePoolInfo, name) + _, err = npCache.get(nodePoolInfo.fullyQualifiedName(name)) + if err != nil { + log.Printf("[WARN] Removing %s because it's gone", fmt.Sprintf("NodePool %q from cluster %q", name, nodePoolInfo.cluster)) + d.SetId("") + return false, nil + } +{{- end }} return true, nil } From f6b9666c5b00eb894ce5e946acfb5b79df391fd6 Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Thu, 21 Aug 2025 16:04:20 -0700 Subject: [PATCH 828/884] Improve test data ingestion (#14944) --- .../cmd/collect_nightly_test_status.go | 47 ++++++++++++------- .../cmd/create_test_failure_ticket.go | 6 +-- 2 files changed, 34 insertions(+), 19 deletions(-) diff --git a/.ci/magician/cmd/collect_nightly_test_status.go b/.ci/magician/cmd/collect_nightly_test_status.go index 31de8b6373f1..ad20ddf56213 100644 --- a/.ci/magician/cmd/collect_nightly_test_status.go +++ b/.ci/magician/cmd/collect_nightly_test_status.go @@ -30,7 +30,8 @@ import ( ) const ( - NightlyDataBucket = "nightly-test-data" + nightlyDataBucket = "nightly-test-data" + tcTimeFormat = "20060102T150405Z0700" ) var cntsRequiredEnvironmentVariables = [...]string{ @@ -38,16 +39,16 @@ var cntsRequiredEnvironmentVariables = [...]string{ } type TestInfo struct { - Name string `json:"name"` - Status string `json:"status"` - Service string `json:"service"` - ErrorMessage string `json:"error_message"` - LogLink string `json"log_link` - ProviderVersion string `json:"provider_version"` - QueuedDate string `json:"queuedDate"` - StartDate string `json:"startDate"` - FinishDate string `json:"finishDate"` - Duration int `json:"duration"` + Name string `json:"name"` + Status string `json:"status"` + Service string `json:"service"` + ErrorMessage string `json:"error_message"` + LogLink string `json:"log_link"` + ProviderVersion string `json:"provider_version"` + QueuedDate time.Time `json:"queued_date"` + StartDate time.Time `json:"start_date"` + FinishDate time.Time `json:"finish_date"` + Duration int `json:"duration"` } // collectNightlyTestStatusCmd represents the collectNightlyTestStatus command @@ -168,17 +169,31 @@ func createTestReport(pVersion provider.Version, tc TeamcityClient, gcs Cloudsto if testResult.Status == "FAILURE" || testResult.Status == "UNKNOWN" { errorMessage = convertErrorMessage(testResult.ErrorMessage) } + + queuedTime, err := time.Parse(tcTimeFormat, build.QueuedDate) + if err != nil { + return fmt.Errorf("failed to parse QueuedDate: %v", err) + } + startTime, err := time.Parse(tcTimeFormat, build.StartDate) + if err != nil { + return fmt.Errorf("failed to parse StartDate: %v", err) + } + finishTime, err := time.Parse(tcTimeFormat, build.FinishDate) + if err != nil { + return fmt.Errorf("failed to parse FinishDate: %v", err) + } + testInfoList = append(testInfoList, TestInfo{ Name: testResult.Name, Status: testResult.Status, Service: serviceName, ErrorMessage: errorMessage, LogLink: logLink, - ProviderVersion: pVersion.String(), + ProviderVersion: strings.ToUpper(pVersion.String()), Duration: testResult.Duration, - QueuedDate: build.QueuedDate, - StartDate: build.StartDate, - FinishDate: build.FinishDate, + QueuedDate: queuedTime, + StartDate: startTime, + FinishDate: finishTime, }) } } @@ -193,7 +208,7 @@ func createTestReport(pVersion provider.Version, tc TeamcityClient, gcs Cloudsto // Upload test status data file to gcs bucket objectName := fmt.Sprintf("test-metadata/%s/%s", pVersion.String(), testStatusFileName) - err = gcs.WriteToGCSBucket(NightlyDataBucket, objectName, testStatusFileName) + err = gcs.WriteToGCSBucket(nightlyDataBucket, objectName, testStatusFileName) if err != nil { return err } diff --git a/.ci/magician/cmd/create_test_failure_ticket.go b/.ci/magician/cmd/create_test_failure_ticket.go index 41b4716794cb..ab60d1aa7ed0 100644 --- a/.ci/magician/cmd/create_test_failure_ticket.go +++ b/.ci/magician/cmd/create_test_failure_ticket.go @@ -268,7 +268,7 @@ func getTestInfoList(pVersion provider.Version, date time.Time, gcs Cloudstorage objectName := fmt.Sprintf("test-metadata/%s/%s", pVersion.String(), testStatusFileName) var testInfoList []TestInfo - err := gcs.DownloadFile(NightlyDataBucket, objectName, testStatusFileName) + err := gcs.DownloadFile(nightlyDataBucket, objectName, testStatusFileName) if err != nil { return testInfoList, err } @@ -506,13 +506,13 @@ func storeErrorMessage(pVersion provider.Version, gcs CloudstorageClient, errorM // upload file to GCS objectName := fmt.Sprintf("test-errors/%s/%s/%s", pVersion.String(), date, fileName) - err = gcs.WriteToGCSBucket(NightlyDataBucket, objectName, fileName) + err = gcs.WriteToGCSBucket(nightlyDataBucket, objectName, fileName) if err != nil { return "", fmt.Errorf("failed to upload error message file %s to GCS bucket: %w", objectName, err) } // compute object view path - link := fmt.Sprintf("https://storage.cloud.google.com/%s/%s", NightlyDataBucket, objectName) + link := fmt.Sprintf("https://storage.cloud.google.com/%s/%s", nightlyDataBucket, objectName) return link, nil } From 72e2407067bf5d7c2fa7e78f05c37c7178572aee Mon Sep 17 00:00:00 2001 From: Lakshman Swaminathan Date: Fri, 22 Aug 2025 07:04:00 -0700 Subject: [PATCH 829/884] allows difftest tests to be skipped if they fail, not pass (#14915) --- .../terraform/scripts/teamcitytestscripts/teamcity.go | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/mmv1/third_party/terraform/scripts/teamcitytestscripts/teamcity.go b/mmv1/third_party/terraform/scripts/teamcitytestscripts/teamcity.go index 4e169e4ceffd..93675c2e5384 100644 --- a/mmv1/third_party/terraform/scripts/teamcitytestscripts/teamcity.go +++ b/mmv1/third_party/terraform/scripts/teamcitytestscripts/teamcity.go @@ -22,7 +22,8 @@ const ( ) var ( - end = regexp.MustCompile(`--- (PASS|SKIP|FAIL):\s+([a-zA-Z_]\S*) \(([\.\d]+)\)`) + // Looks for the final status line, accommodating both simple and full summaries. + end = regexp.MustCompile(`\n(PASS|SKIP|FAIL)(?:[\t\s]+(.*)\s+([0-9\.]+[a-z]+))?\s*$`) diff = regexp.MustCompile(`\[Diff\] (.*)`) paniced = regexp.MustCompile(`panic:\s+(.*)\s+\[recovered\]\n`) //suite = regexp.MustCompile("^(ok|FAIL)\\s+([^\\s]+)\\s+([\\.\\d]+)s") @@ -86,7 +87,8 @@ func (test *TeamCityTest) FormatTestOutput() string { } if test.Fail { - output.WriteString(fmt.Sprintf(TeamCityTestFailed, now, test.Name)) + // skip failures for diff tests + output.WriteString(fmt.Sprintf(TeamCityTestIgnored, now, test.Name)) output.WriteString(fmt.Sprintf(TeamCityTestFinished, now, test.Name)) return output.String() } @@ -107,8 +109,8 @@ func (test *TeamCityTest) FormatTestOutput() string { return output.String() } - // test passes if no diff, even if failure (failure artifacts will be in regular_failure_file.log) - output.WriteString(fmt.Sprintf(TeamCityTestFinished, now, test.Name)) + // instead of failing when something unexpected happens, we skip the test now + output.WriteString(fmt.Sprintf(TeamCityTestIgnored, now, test.Name)) return output.String() } From 3ef9e24548e80a02395a4283afc065999616a6fd Mon Sep 17 00:00:00 2001 From: Cameron Thornton Date: Mon, 25 Aug 2025 15:45:56 -0500 Subject: [PATCH 830/884] Plugin Framework feature branch merge (#14977) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: drfaust92 Signed-off-by: Cezary Sobczak Signed-off-by: James Alseth Signed-off-by: Misha Efimov Co-authored-by: Nick Elliot Co-authored-by: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Co-authored-by: paridhishah18 <166548459+paridhishah18@users.noreply.github.com> Co-authored-by: William Yardley Co-authored-by: Stephen Lewis (Burrows) Co-authored-by: shantstepanian <17996546+shantstepanian@users.noreply.github.com> Co-authored-by: Zhenhua Li Co-authored-by: Yanwei Guo Co-authored-by: Scott Suarez Co-authored-by: skysarthak Co-authored-by: Sarthak Tandon Co-authored-by: Ramon Vermeulen Co-authored-by: bcreddy-gcp <123543489+bcreddy-gcp@users.noreply.github.com> Co-authored-by: Stephen Lewis (Burrows) Co-authored-by: govardhanitallam Co-authored-by: xuebaoZ Co-authored-by: Michael Turgeman Co-authored-by: gurusai-voleti Co-authored-by: jkrish-c <31221535+jkrish-c@users.noreply.github.com> Co-authored-by: Wiktor Niesiobędzki Co-authored-by: dorianverna Co-authored-by: Andras Kerekes Co-authored-by: Raj Anand <88097156+raazanand@users.noreply.github.com> Co-authored-by: ML Co-authored-by: Marek Lipert Co-authored-by: James Cherry Co-authored-by: Hengfeng Li Co-authored-by: Jeremie Stordeur Co-authored-by: abhilashsamgoogle Co-authored-by: ArtoriaRen Co-authored-by: Nandini Agrawal Co-authored-by: Aiden Grossman Co-authored-by: Rohan Chawla <73727454+rohanchawla23@users.noreply.github.com> Co-authored-by: ma-g-22 <123424520+ma-g-22@users.noreply.github.com> Co-authored-by: Rajesh Guptha Co-authored-by: Parker DeWilde Co-authored-by: Thomas Rodgers Co-authored-by: sachin purohit Co-authored-by: karolgorc Co-authored-by: Rachel Thornton Co-authored-by: Alex Morozov Co-authored-by: Alex Morozov Co-authored-by: Lingkai Shen Co-authored-by: Ilia Lazebnik Co-authored-by: Betto Cerrillos <32439055+Berro321@users.noreply.github.com> Co-authored-by: Riley Karson Co-authored-by: Ron Gal <125445217+ron-gal@users.noreply.github.com> Co-authored-by: Akshat Jindal <67505646+akshat-jindal-nit@users.noreply.github.com> Co-authored-by: Swamita Gupta <55314843+swamitagupta@users.noreply.github.com> Co-authored-by: Bob "Wombat" Hogg Co-authored-by: hao-nan-li <100219545+hao-nan-li@users.noreply.github.com> Co-authored-by: NA2047 <12290725+NA2047@users.noreply.github.com> Co-authored-by: sahil-mahajan-google Co-authored-by: Arpit Gupta Co-authored-by: ahmed-laiq Co-authored-by: stevenyang72 Co-authored-by: Sam Levenick Co-authored-by: Haoting.C <34197666+plus-1s@users.noreply.github.com> Co-authored-by: Daniel Dubnikov Co-authored-by: Pawel Jasinski Co-authored-by: Sachin_R Co-authored-by: Cezary Sobczak <57288981+Cezarus27@users.noreply.github.com> Co-authored-by: wj-chen Co-authored-by: Kian Jones <11655409+kianjones9@users.noreply.github.com> Co-authored-by: uaditya70 Co-authored-by: pujawadare Co-authored-by: Sachin_R Co-authored-by: Jaylon McShan Co-authored-by: tulika-aakriti Co-authored-by: anthonyrtong Co-authored-by: Chris Hawk Co-authored-by: Shrishty Chandra <3104562+shrishty@users.noreply.github.com> Co-authored-by: Shrishty Chandra Co-authored-by: Sharan Teja M Co-authored-by: James Alseth Co-authored-by: stevenyang72 Co-authored-by: oferhandel-google Co-authored-by: Jatin Miglani Co-authored-by: translucens Co-authored-by: Sing Co-authored-by: Ronson Xaviour <50081163+ronsonx@users.noreply.github.com> Co-authored-by: Ronson Xaviour Co-authored-by: Iris Chen <10179943+iyabchen@users.noreply.github.com> Co-authored-by: Or Sela Co-authored-by: Samir Ribeiro <42391123+Samir-Cit@users.noreply.github.com> Co-authored-by: Mauricio Alvarez Leon <65101411+BBBmau@users.noreply.github.com> Co-authored-by: kigesui Co-authored-by: Meng Yang Co-authored-by: Ashwin G Co-authored-by: Allison Fisher Co-authored-by: mihhalj Co-authored-by: Guy Bidkar <5646214+gbidkar@users.noreply.github.com> Co-authored-by: Dawid212 Co-authored-by: Michael Lopez Co-authored-by: kautikdk <144651627+kautikdk@users.noreply.github.com> Co-authored-by: harshithpatte-g Co-authored-by: ML Co-authored-by: James Alseth Co-authored-by: Madhura Phadnis Co-authored-by: YashTayal04 <47032845+YashTayal04@users.noreply.github.com> Co-authored-by: Misha Efimov Co-authored-by: Aiden Grossman Co-authored-by: MatthewVu-dev Co-authored-by: Madhu Suraj Co-authored-by: Matheus Guilherme Souza Aleixo <82680416+matheusaleixo-cit@users.noreply.github.com> Co-authored-by: Jun Luo Co-authored-by: Tommy Reddad --- .ci/magician/cmd/test_terraform_vcr.go | 1 - mmv1/api/resource.go | 3 + mmv1/api/type.go | 48 ++ mmv1/products/datafusion/Instance.yaml | 1 + mmv1/provider/template_data.go | 9 + mmv1/provider/terraform.go | 9 +- .../fw_datafusion_instance_update.go.tmpl | 35 + mmv1/templates/terraform/resource_fw.go.tmpl | 764 ++++++++++++++++++ .../terraform/schema_property_fw.go.tmpl | 52 ++ .../terraform/update_mask_fw.go.tmpl | 27 + .../terraform/acctest/vcr_utils.go | 12 + .../fwprovider/framework_provider.go.tmpl | 8 +- .../terraform/fwresource/field_helpers.go | 23 +- .../terraform/fwresource/framework_import.go | 192 +++++ .../fwresource/framework_import_test.go | 183 +++++ .../terraform/fwtransport/framework_utils.go | 321 +++++++- .../fwvalidators/framework_validators.go | 80 ++ .../fwvalidators/framework_validators_test.go | 138 ++++ mmv1/third_party/terraform/go.mod | 1 + mmv1/third_party/terraform/go.sum | 8 + .../provider/provider_mmv1_resources.go.tmpl | 2 - ..._apigee_keystores_aliases_key_cert_file.go | 533 ++++++++++++ ..._apigee_keystores_aliases_key_cert_file.go | 692 ---------------- ...data_source_google_compute_network.go.tmpl | 187 +++++ ...data_source_google_compute_network_test.go | 86 ++ .../terraform/services/compute/image.go | 1 - ...ce_dataflow_flex_template_job_test.go.tmpl | 2 +- .../resource_dns_managed_zone_test.go.tmpl | 2 +- ...google_firebase_android_app_config.go.tmpl | 2 +- ...e_google_firebase_apple_app_config.go.tmpl | 2 +- ...rce_google_firebase_web_app_config.go.tmpl | 2 +- .../fw_resource_pubsub_lite_reservation.go | 383 +++++++++ ...w_resource_pubsub_lite_reservation_test.go | 56 ++ .../services/sql/fw_resource_sql_user.go | 507 ++++++++++++ .../services/sql/fw_resource_sql_user_test.go | 90 +++ .../fw_resource_storage_notification.go | 325 ++++++++ ...fw_storage_notification_state_upgraders.go | 100 +++ .../storage/resource_storage_notification.go | 196 ----- .../resource_storage_notification_test.go | 16 +- ...stores_aliases_key_cert_file.html.markdown | 2 +- 40 files changed, 4190 insertions(+), 911 deletions(-) create mode 100644 mmv1/templates/terraform/pre_update/fw_datafusion_instance_update.go.tmpl create mode 100644 mmv1/templates/terraform/resource_fw.go.tmpl create mode 100644 mmv1/templates/terraform/schema_property_fw.go.tmpl create mode 100644 mmv1/templates/terraform/update_mask_fw.go.tmpl create mode 100644 mmv1/third_party/terraform/fwresource/framework_import.go create mode 100644 mmv1/third_party/terraform/fwresource/framework_import_test.go create mode 100644 mmv1/third_party/terraform/services/apigee/fw_resource_apigee_keystores_aliases_key_cert_file.go delete mode 100644 mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_key_cert_file.go create mode 100644 mmv1/third_party/terraform/services/compute/fw_data_source_google_compute_network.go.tmpl create mode 100644 mmv1/third_party/terraform/services/compute/fw_data_source_google_compute_network_test.go create mode 100644 mmv1/third_party/terraform/services/pubsublite/fw_resource_pubsub_lite_reservation.go create mode 100644 mmv1/third_party/terraform/services/pubsublite/fw_resource_pubsub_lite_reservation_test.go create mode 100644 mmv1/third_party/terraform/services/sql/fw_resource_sql_user.go create mode 100644 mmv1/third_party/terraform/services/sql/fw_resource_sql_user_test.go create mode 100644 mmv1/third_party/terraform/services/storage/fw_resource_storage_notification.go create mode 100644 mmv1/third_party/terraform/services/storage/fw_storage_notification_state_upgraders.go delete mode 100644 mmv1/third_party/terraform/services/storage/resource_storage_notification.go diff --git a/.ci/magician/cmd/test_terraform_vcr.go b/.ci/magician/cmd/test_terraform_vcr.go index 74f7673744d7..1cca9513652e 100644 --- a/.ci/magician/cmd/test_terraform_vcr.go +++ b/.ci/magician/cmd/test_terraform_vcr.go @@ -237,7 +237,6 @@ func execTestTerraformVCR(prNumber, mmCommitSha, buildID, projectID, buildStep, } notRunBeta, notRunGa := notRunTests(tpgRepo.UnifiedZeroDiff, tpgbRepo.UnifiedZeroDiff, replayingResult) - postReplayData := postReplay{ RunFullVCR: runFullVCR, AffectedServices: sort.StringSlice(servicesArr), diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index 194ae8ede6ad..386487ac696f 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -308,6 +308,9 @@ type Resource struct { // control if a resource is continuously generated from public OpenAPI docs AutogenStatus string `yaml:"autogen_status"` + // If true, this resource generates with the new plugin framework resource template + FrameworkResource bool `yaml:"plugin_framework,omitempty"` + // The three groups of []*Type fields are expected to be strictly ordered within a yaml file // in the sequence of Virtual Fields -> Parameters -> Properties diff --git a/mmv1/api/type.go b/mmv1/api/type.go index 2ab006fcda8f..ca664c774a8c 100644 --- a/mmv1/api/type.go +++ b/mmv1/api/type.go @@ -551,6 +551,15 @@ func (t Type) ResourceType() string { return path[len(path)-1] } +func (t Type) FWResourceType() string { + r := t.ResourceRef() + if r == nil { + return "" + } + path := strings.Split(r.BaseUrl, "/") + return path[len(path)-1] +} + // TODO rewrite: validation // func (t *Type) check_default_value_property() { // return if @default_value.nil? @@ -821,6 +830,45 @@ func (t Type) TFType(s string) string { return "schema.TypeString" } +func (t Type) GetFWType() string { + switch t.Type { + case "Boolean": + return "Bool" + case "Double": + return "Float64" + case "Integer": + return "Int64" + case "String": + return "String" + case "Time": + return "String" + case "Enum": + return "String" + case "ResourceRef": + return "String" + case "NestedObject": + return "Nested" + case "Array": + return "List" + case "KeyValuePairs": + return "Map" + case "KeyValueLabels": + return "Map" + case "KeyValueTerraformLabels": + return "Map" + case "KeyValueEffectiveLabels": + return "Map" + case "KeyValueAnnotations": + return "Map" + case "Map": + return "Map" + case "Fingerprint": + return "String" + } + + return "String" +} + // TODO rewrite: validation // // Represents an enum, and store is valid values // class Enum < Primitive diff --git a/mmv1/products/datafusion/Instance.yaml b/mmv1/products/datafusion/Instance.yaml index 665c7e1f7a31..2809e987d1ed 100644 --- a/mmv1/products/datafusion/Instance.yaml +++ b/mmv1/products/datafusion/Instance.yaml @@ -13,6 +13,7 @@ --- name: 'Instance' +# plugin_framework: true description: | Represents a Data Fusion instance. references: diff --git a/mmv1/provider/template_data.go b/mmv1/provider/template_data.go index 645da0d1463f..6964d5de5f9b 100644 --- a/mmv1/provider/template_data.go +++ b/mmv1/provider/template_data.go @@ -85,6 +85,15 @@ func (td *TemplateData) GenerateResourceFile(filePath string, resource api.Resou td.GenerateFile(filePath, templatePath, resource, true, templates...) } +func (td *TemplateData) GenerateFWResourceFile(filePath string, resource api.Resource) { + templatePath := "templates/terraform/resource_fw.go.tmpl" + templates := []string{ + templatePath, + "templates/terraform/schema_property_fw.go.tmpl", + } + td.GenerateFile(filePath, templatePath, resource, true, templates...) +} + func (td *TemplateData) GenerateMetadataFile(filePath string, resource api.Resource) { templatePath := "templates/terraform/metadata.yaml.tmpl" templates := []string{ diff --git a/mmv1/provider/terraform.go b/mmv1/provider/terraform.go index 17abcb4d4def..3e96ce7f58ea 100644 --- a/mmv1/provider/terraform.go +++ b/mmv1/provider/terraform.go @@ -128,8 +128,13 @@ func (t *Terraform) GenerateResource(object api.Resource, templateData TemplateD if err := os.MkdirAll(targetFolder, os.ModePerm); err != nil { log.Println(fmt.Errorf("error creating parent directory %v: %v", targetFolder, err)) } - targetFilePath := path.Join(targetFolder, fmt.Sprintf("resource_%s.go", t.ResourceGoFilename(object))) - templateData.GenerateResourceFile(targetFilePath, object) + if object.FrameworkResource { + targetFilePath := path.Join(targetFolder, fmt.Sprintf("resource_fw_%s.go", t.ResourceGoFilename(object))) + templateData.GenerateFWResourceFile(targetFilePath, object) + } else { + targetFilePath := path.Join(targetFolder, fmt.Sprintf("resource_%s.go", t.ResourceGoFilename(object))) + templateData.GenerateResourceFile(targetFilePath, object) + } } if generateDocs { diff --git a/mmv1/templates/terraform/pre_update/fw_datafusion_instance_update.go.tmpl b/mmv1/templates/terraform/pre_update/fw_datafusion_instance_update.go.tmpl new file mode 100644 index 000000000000..8b3b701a19b7 --- /dev/null +++ b/mmv1/templates/terraform/pre_update/fw_datafusion_instance_update.go.tmpl @@ -0,0 +1,35 @@ +{{/* + The license inside this block applies to this file + Copyright 2024 Google Inc. + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ -}} +updateMask := []string{} + +if !plan.EnableStackdriverLogging.Equal(state.EnableStackdriverLogging) { + updateMask = append(updateMask, "enableStackdriverLogging") +} + +if !plan.EnableStackdriverMonitoring.Equal(state.EnableStackdriverMonitoring) { + updateMask = append(updateMask, "enableStackdriverMonitoring") +} + +if !plan.EnableRbac.Equal(state.EnableRbac) { + updateMask = append(updateMask, "enableRbac") +} + + +// updateMask is a URL parameter but not present in the schema, so ReplaceVars +// won't set it + +url, err := transport_tpg.AddQueryParams(url, map[string]string{"updateMask": strings.Join(updateMask, ",")}) +if err != nil { + resp.Diagnostics.AddError("Error, failure building update mask query parameters in {{ $.Name -}}", err.Error()) + return +} \ No newline at end of file diff --git a/mmv1/templates/terraform/resource_fw.go.tmpl b/mmv1/templates/terraform/resource_fw.go.tmpl new file mode 100644 index 000000000000..c73454a0e4b3 --- /dev/null +++ b/mmv1/templates/terraform/resource_fw.go.tmpl @@ -0,0 +1,764 @@ +{{/* The license inside this block applies to this file + Copyright 2025 Google LLC. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. */ -}} +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +{{/*{{$.CodeHeader TemplatePath}}*/}} + +package {{ lower $.ProductMetadata.Name }} + +import ( + + "fmt" + "log" + "net/http" + "reflect" +{{- if $.SupportsIndirectUserProjectOverride }} + "regexp" +{{- end }} +{{- if or (and (not $.Immutable) ($.UpdateMask)) $.LegacyLongFormProject }} + "strings" +{{- end }} + "time" + +{{/* # We list all the v2 imports here, because we run 'goimports' to guess the correct */}} +{{/* # set of imports, which will never guess the major version correctly. */}} +{{/* + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/structure" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/logging" + */}} + "github.com/hashicorp/go-cty/cty" + + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" + + "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" + "{{ $.ImportPath }}/fwmodels" + "{{ $.ImportPath }}/fwresource" + "{{ $.ImportPath }}/fwtransport" + + "{{ $.ImportPath }}/tpgresource" + transport_tpg "{{ $.ImportPath }}/transport" + "{{ $.ImportPath }}/verify" + +{{ if $.FlattenedProperties }} + "google.golang.org/api/googleapi" +{{- end}} +) + +{{if $.CustomCode.Constants -}} + {{- $.CustomTemplate $.CustomCode.Constants true -}} +{{- end}} + +var ( + _ resource.Resource = &{{$.ResourceName}}FWResource{} + _ resource.ResourceWithConfigure = &{{$.ResourceName}}FWResource{} +) + +func New{{$.ResourceName}}FWResource() resource.Resource { + return &{{$.ResourceName}}FWResource{} +} + +type {{$.ResourceName}}FWResource struct { + {{/*client *sqladmin.Service*/}} + providerConfig *transport_tpg.Config +} + +type {{$.ResourceName}}FWModel struct { + {{- range $prop := $.OrderProperties $.AllUserProperties }} + {{camelize $prop.Name "upper"}} types.{{$prop.GetFWType}} `tfsdk:"{{underscore $prop.Name}}"` + {{- end }} + {{ if $.HasProject -}} + Project types.String `tfsdk:"project"` + {{- end }} + + Id types.String `tfsdk:"id"`{{/* TODO should this be gated behind a greenfield/brownfield flag? */}} + Timeouts timeouts.Value `tfsdk:"timeouts"` +} + +// Metadata returns the resource type name. +func (d *{{$.ResourceName}}FWResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_fw_{{ underscore $.ResourceName}}" +} + +func (r *{{$.ResourceName}}FWResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { + // Prevent panic if the provider has not been configured. + if req.ProviderData == nil { + return + } + + p, ok := req.ProviderData.(*transport_tpg.Config) + if !ok { + resp.Diagnostics.AddError( + "Unexpected Resource Configure Type", + fmt.Sprintf("Expected *transport_tpg.Config, got: %T. Please report this issue to the provider developers.", req.ProviderData), + ) + return + } + + {{/* TODO non-client equivalent? */}} + {{/* + r.client = p.NewSqlAdminClient(p.UserAgent) + if resp.Diagnostics.HasError() { + return + }*/}} + r.providerConfig = p +} + +func (d *{{$.ResourceName}}FWResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = schema.Schema{ + MarkdownDescription: "A resource to represent a SQL User object.", + + Attributes: map[string]schema.Attribute{ +{{- range $prop := $.OrderProperties $.AllUserProperties }} + {{template "SchemaFieldsFW" $prop -}} +{{- end }} +{{- range $prop := $.VirtualFields }} + {{template "SchemaFieldsFW" $prop -}} +{{- end }} +{{- if $.CustomCode.ExtraSchemaEntry }} + {{ $.CustomTemplate $.CustomCode.ExtraSchemaEntry false -}} +{{- end}} +{{ if $.HasProject -}} + "project": schema.StringAttribute{ + Optional: true, + Computed: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + stringplanmodifier.UseStateForUnknown(), + }, + }, +{{- end}} +{{- if $.HasSelfLink }} + "self_link": schema.StringAttribute{ + Computed: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.UseStateForUnknown(), + }, + }, +{{- end}} + // This is included for backwards compatibility with the original, SDK-implemented resource. + "id": schema.StringAttribute{ + Description: "Project identifier", + MarkdownDescription: "Project identifier", + Computed: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.UseStateForUnknown(), + }, + }, + }, + } +} + +func (r *{{$.ResourceName}}FWResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + var data {{$.ResourceName}}FWModel + var metaData *fwmodels.ProviderMetaModel +{{ if $.CustomCode.CustomCreate -}} + {{ $.CustomTemplate $.CustomCode.CustomCreate false -}} +{{ else -}} + + // Read Provider meta into the meta model + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + + // Read Terraform plan data into the model + resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + var project, billingProject types.String +{{ if $.HasProject -}} + project = fwresource.GetProjectFramework(data.Project, types.StringValue(r.providerConfig.Project), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } +{{ if $.LegacyLongFormProject -}} + billingProject = strings.TrimPrefix(project, "projects/") +{{ else -}} + billingProject = project +{{- end }} +{{- end }} +{{ if $.HasRegion -}} + region := fwresource.GetRegionFramework(data.Region, types.StringValue(r.providerConfig.Region), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } +{{- end }} +{{ if $.HasZone -}} + zone := fwresource.GetZoneFramework(data.Zone, types.StringValue(r.providerConfig.Zone), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } +{{- end }} + + var schemaDefaultVals fwtransport.DefaultVars +{{ if $.HasProject -}} + schemaDefaultVals.Project = project +{{- end }} +{{ if $.HasRegion -}} + schemaDefaultVals.Region = region +{{- end }} +{{ if $.HasZone -}} + schemaDefaultVals.Zone = zone +{{- end }} + + // Use provider_meta to set User-Agent + userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, r.providerConfig.UserAgent) + + obj := make(map[string]interface{}) + +{{- range $prop := $.OrderProperties $.AllUserProperties }} + {{$prop.ApiName}}Prop, diags := data.{{camelize $prop.Name "upper"}}.To{{$prop.GetFWType}}Value(ctx) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + obj["{{ $prop.ApiName -}}"] = {{ $prop.ApiName -}}Prop +{{- end }} + + + {{/* TODO default timeouts */}} + createTimeout, diags := data.Timeouts.Create(ctx, 20*time.Minute) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + url := fwtransport.ReplaceVars(ctx, req, &resp.Diagnostics, schemaDefaultVals, r.providerConfig, "{{"{{"}}{{$.ProductMetadata.Name}}BasePath{{"}}"}}{{$.CreateUri}}") + if resp.Diagnostics.HasError() { + return + } + + log.Printf("[DEBUG] Creating new {{ $.Name -}}: %#v", obj) + + {{/* Nested Query block */}} + + headers := make(http.Header) +{{- if $.CustomCode.PreCreate }} + {{ $.CustomTemplate $.CustomCode.PreCreate false -}} +{{- end}} + res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ + Config: r.providerConfig, + Method: "{{ upper $.CreateVerb -}}", + Project: billingProject.ValueString(), + RawURL: url, + UserAgent: userAgent, + Body: obj, + Timeout: createTimeout, + Headers: headers, +{{- if $.ErrorRetryPredicates }} + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{{"{"}}{{ join $.ErrorRetryPredicates "," -}}{{"}"}}, +{{- end}} +{{- if $.ErrorAbortPredicates }} + ErrorAbortPredicates: []transport_tpg.RetryErrorPredicateFunc{{"{"}}{{ join $.ErrorAbortPredicates "," -}}{{"}"}}, +{{- end}} + }, &resp.Diagnostics) + if resp.Diagnostics.HasError() { +{{- if and ($.CustomCode.PostCreateFailure) (not $.GetAsync) -}} + resource{{ $.ResourceName -}}PostCreateFailure(d, meta) +{{- end}} + return + } + + tflog.Trace(ctx, "created {{$.Name}} resource") + + data.Id = types.StringValue("{{ $.IdFormat -}}") +{{ if $.HasProject -}} + data.Project = project +{{- end }} +{{ if $.HasRegion -}} + data.Region = region +{{- end }} +{{ if $.HasZone -}} + data.Zone = zone +{{- end }} + +{{if and $.GetAsync ($.GetAsync.Allow "Create") -}} +{{ if ($.GetAsync.IsA "OpAsync") -}} +{{ if and $.GetAsync.Result.ResourceInsideResponse $.HasPostCreateComputedFields -}} + // Use the resource in the operation response to populate + // identity fields and d.Id() before read + var opRes map[string]interface{} + err = {{ $.ClientNamePascal -}}OperationWaitTimeWithResponse( + r.providerConfig, res, &opRes, {{if or $.HasProject $.GetAsync.IncludeProject -}} {{if $.LegacyLongFormProject -}}tpgresource.GetResourceNameFromSelfLink(project.ValueString()){{ else }}project.ValueString(){{ end }}, {{ end -}} "Creating {{ $.Name -}}", userAgent, + createTimeout) + if err != nil { + {{/* Postcreate Failure */}} +{{- if not $.TaintResourceOnFailedCreate -}} + // The resource didn't actually create + resp.State.RemoveResource(ctx){{/* TODO verify this works */}} +{{ end -}} + resp.Diagnostics.AddError("Error, failure waiting to create {{ $.Name -}}", err.Error()) + return + } + + {{/* CustomCode.Decoder */}} + {{/* NestedQuery */}} + {{/* if $.HasPostCreateComputedFields */}} + {{/* This may have caused the ID to update - update it if so. */}} +{{ else -}}{{/* $.GetAsync.Result.ResourceInsideResponse */}} + err := {{ $.ClientNamePascal -}}OperationWaitTime( + r.providerConfig, res, {{if or $.HasProject $.GetAsync.IncludeProject -}} {{if $.LegacyLongFormProject -}}tpgresource.GetResourceNameFromSelfLink(project.ValueString()){{ else }}project.ValueString(){{ end }}, {{ end -}} "Creating {{ $.Name -}}", userAgent, + createTimeout) + + if err != nil { + + {{/* Postcreate Failure */}} +{{- if not $.TaintResourceOnFailedCreate -}} + // The resource didn't actually create + resp.State.RemoveResource(ctx){{/* TODO verify this works */}} +{{ end -}} + resp.Diagnostics.AddError("Error, failure waiting to create {{ $.Name -}}", err.Error()) + return + } + +{{ end -}}{{/* $.GetAsync.Result.ResourceInsideResponse */}} +{{ end -}}{{/*if ($.GetAsync.IsA "OpAsync")*/}} +{{end -}}{{/*if and $.GetAsync ($.GetAsync.Allow "Create")*/}} +{{if $.CustomCode.PostCreate -}} + {{- $.CustomTemplate $.CustomCode.PostCreate false -}} +{{- end}} + + + // read back {{$.Name}} + r.{{$.ResourceName}}FWRefresh(ctx, &data, &resp.State, req, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + // Save data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + + log.Printf("[DEBUG] Finished creating {{ $.Name }} %q: %#v", data.Id.ValueString(), res) +{{ end }} {{/* if CustomCreate */}} +} + + +func (r *{{$.ResourceName}}FWResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + var data {{$.ResourceName}}FWModel + var metaData *fwmodels.ProviderMetaModel + + // Read Provider meta into the meta model + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + + // Read Terraform configuration data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + tflog.Trace(ctx, "read {{$.Name}} resource") + + // read back {{$.Name}} + r.{{$.ResourceName}}FWRefresh(ctx, &data, &resp.State, req, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + // Save data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} + + +func (r *{{$.ResourceName}}FWResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + var state, plan {{$.ResourceName}}FWModel + var metaData *fwmodels.ProviderMetaModel + // Read Provider meta into the meta model + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(req.State.Get(ctx, &state)...) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) + if resp.Diagnostics.HasError() { + return + } + + var project, billingProject types.String +{{ if $.HasProject -}} + project = fwresource.GetProjectFramework(data.Project, types.StringValue(r.providerConfig.Project), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } +{{ if $.LegacyLongFormProject -}} + billingProject = strings.TrimPrefix(project, "projects/") +{{ else -}} + billingProject = project +{{- end }} +{{- end }} +{{ if $.HasRegion -}} + region := fwresource.GetRegionFramework(plan.Region, types.StringValue(r.providerConfig.Region), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } +{{- end }} +{{ if $.HasZone -}} + zone := fwresource.GetZoneFramework(plan.Zone, types.StringValue(r.providerConfig.Zone), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } +{{- end }} + + var schemaDefaultVals fwtransport.DefaultVars +{{ if $.HasProject -}} + schemaDefaultVals.Project = project +{{- end }} +{{ if $.HasRegion -}} + schemaDefaultVals.Region = region +{{- end }} +{{ if $.HasZone -}} + schemaDefaultVals.Zone = zone +{{- end }} + + // Use provider_meta to set User-Agent + userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, r.providerConfig.UserAgent) + + obj := make(map[string]interface{}) + +{{- range $prop := $.OrderProperties $.UpdateBodyProperties }} + if !plan.{{camelize $prop.Name "upper"}}.Equal(state.{{camelize $prop.Name "upper"}}) { + {{$prop.ApiName}}Prop, diags := plan.{{camelize $prop.Name "upper"}}.To{{$prop.GetFWType}}Value(ctx) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + obj["{{ $prop.ApiName -}}"] = {{ $prop.ApiName -}}Prop + } +{{- end }} + + {{/* TODO default timeouts */}} + updateTimeout, diags := data.Timeouts.Update(ctx, 20*time.Minute) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + url := fwtransport.ReplaceVars(ctx, req, &resp.Diagnostics, schemaDefaultVals, r.providerConfig, "{{"{{"}}{{$.ProductMetadata.Name}}BasePath{{"}}"}}{{$.CreateUri}}") + if resp.Diagnostics.HasError() { + return + } + + log.Printf("[DEBUG] Updating {{ $.Name -}}: %#v", obj) + + headers := make(http.Header) + +{{- if $.UpdateMask }} +{{ $.CustomTemplate "templates/terraform/update_mask_fw.go.tmpl" false -}} +{{ end}} + +{{- if $.CustomCode.PreUpdate }} + {{ $.CustomTemplate $.CustomCode.PreUpdate false -}} +{{- end}} + res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ + Config: r.providerConfig, + Method: "{{ upper $.UpdateVerb -}}", + Project: billingProject.ValueString(), + RawURL: url, + UserAgent: userAgent, + Body: obj, + Timeout: updateTimeout, + Headers: headers, +{{- if $.ErrorRetryPredicates }} + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{{"{"}}{{ join $.ErrorRetryPredicates "," -}}{{"}"}}, +{{- end}} +{{- if $.ErrorAbortPredicates }} + ErrorAbortPredicates: []transport_tpg.RetryErrorPredicateFunc{{"{"}}{{ join $.ErrorAbortPredicates "," -}}{{"}"}}, +{{- end}} + }, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + +{{if and ($.GetAsync) ($.GetAsync.Allow "update") -}} +{{ if $.GetAsync.IsA "OpAsync" -}} + err = {{ $.ClientNamePascal -}}OperationWaitTime( + r.providerConfig, res, {{if or $.HasProject $.GetAsync.IncludeProject -}} {{if $.LegacyLongFormProject -}}tpgresource.GetResourceNameFromSelfLink(project.ValueString()){{ else }}project.ValueString(){{ end }}, {{ end -}} "Updating {{ $.Name -}}", userAgent, + updateTimeout) + if err != nil { + resp.Diagnostics.AddError("Error, failure waiting to update {{ $.Name -}}", err.Error()) + return + } +{{- else if $.GetAsync.IsA "PollAsync" -}} + err = transport_tpg.PollingWaitTime(resource{{ $.ResourceName -}}PollRead(d, meta), {{ $.GetAsync.CheckResponseFuncExistence -}}, "Updating {{ $.Name -}}", d.Timeout(schema.TimeoutUpdate), {{ $.GetAsync.TargetOccurrences -}}) + if err != nil { +{{- if $.GetAsync.SuppressError -}} + log.Printf("[ERROR] Unable to confirm eventually consistent {{ $.Name -}} %q finished updating: %q", data.Id.ValueString(), err) +{{- else -}} + resp.Diagnostics.AddError("Error, failure polling for update in {{ $.Name -}}", err.Error()) + return +{{- end}} + } +{{- end}}{{/* if $.GetAsync.IsA "OpAsync" */}} +{{- end}}{{/* if and ($.GetAsync) ($.GetAsync.Allow "update") */}} + + tflog.Trace(ctx, "updated {{$.Name}} resource") + + // read back {{$.Name}} + r.{{$.ResourceName}}FWRefresh(ctx, &plan, &resp.State, req, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + // Save updated data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...) +} + + +func (r *{{$.ResourceName}}FWResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + var data {{$.ResourceName}}FWModel + var metaData *fwmodels.ProviderMetaModel + // Read Provider meta into the meta model + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } +{{- if $.ExcludeDelete }} + log.Printf("[WARNING] {{ $.ProductMetadata.Name }}{{" "}}{{ $.Name }} resources" + + " cannot be deleted from Google Cloud. The resource %s will be removed from Terraform" + + " state, but will still be present on Google Cloud.", data.Id.ValueString()) + r.SetId("") + + return nil +{{- else }} + + // Read Terraform prior state data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + var project, billingProject types.String +{{ if $.HasProject -}} + project = fwresource.GetProjectFramework(data.Project, types.StringValue(r.providerConfig.Project), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } +{{ if $.LegacyLongFormProject -}} + billingProject = strings.TrimPrefix(project, "projects/") +{{ else -}} + billingProject = project +{{- end }} +{{- end }} +{{ if $.HasRegion -}} + region := fwresource.GetRegionFramework(data.Region, types.StringValue(r.providerConfig.Region), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } +{{- end }} +{{ if $.HasZone -}} + zone := fwresource.GetZoneFramework(data.Zone, types.StringValue(r.providerConfig.Zone), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } +{{- end }} + + var schemaDefaultVals fwtransport.DefaultVars +{{ if $.HasProject -}} + schemaDefaultVals.Project = project +{{- end }} +{{ if $.HasRegion -}} + schemaDefaultVals.Region = region +{{- end }} +{{ if $.HasZone -}} + schemaDefaultVals.Zone = zone +{{- end }} + + // Use provider_meta to set User-Agent + userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, r.providerConfig.UserAgent) + + obj := make(map[string]interface{}) + + deleteTimeout, diags := data.Timeouts.Delete(ctx, 20*time.Minute) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + url := fwtransport.ReplaceVars(ctx, req, &resp.Diagnostics, schemaDefaultVals, r.providerConfig, "{{"{{"}}{{$.ProductMetadata.Name}}BasePath{{"}}"}}{{$.DeleteUri}}") + if resp.Diagnostics.HasError() { + return + } + +{{ if $.CustomCode.CustomDelete }} +{{ $.CustomTemplate $.CustomCode.CustomDelete false -}} +{{- else }} + headers := make(http.Header) + {{- if $.CustomCode.PreDelete }} + {{ $.CustomTemplate $.CustomCode.PreDelete false -}} + {{- end }} + + log.Printf("[DEBUG] Deleting {{ $.Name }} %q", data.Id.ValueString()) + res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ + Config: r.providerConfig, + Method: "{{ upper $.DeleteVerb -}}", + Project: billingProject.ValueString(), + RawURL: url, + UserAgent: userAgent, + Body: obj, + Timeout: deleteTimeout, + Headers: headers, +{{- if $.ErrorRetryPredicates }} + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{{"{"}}{{ join $.ErrorRetryPredicates "," -}}{{"}"}}, +{{- end}} +{{- if $.ErrorAbortPredicates }} + ErrorAbortPredicates: []transport_tpg.RetryErrorPredicateFunc{{"{"}}{{ join $.ErrorAbortPredicates "," -}}{{"}"}}, +{{- end}} + }, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + diags.AddError(fmt.Sprintf("Error deleting {{ $.Name -}}: %s", data.Id.ValueString()), err.Error()) + return + } +{{if and $.GetAsync ($.GetAsync.Allow "Delete") -}} +{{ if $.GetAsync.IsA "PollAsync" }} + err := transport_tpg.PollingWaitTime(resource{{ $.ResourceName }}PollRead(d, meta), {{ $.GetAsync.CheckResponseFuncAbsence }}, "Deleting {{ $.Name }}", d.Timeout(schema.TimeoutCreate), {{ $.Async.TargetOccurrences }}) + if err != nil { +{{- if $.Async.SuppressError }} + log.Printf("[ERROR] Unable to confirm eventually consistent {{ $.Name -}} %q finished updating: %q", data.Id.ValueString(), err) +{{- else }} + resp.Diagnostics.AddError("Error, failure polling for delete in {{ $.Name -}}", err.Error()) + return +{{- end }} + } +{{- else }} + err := {{ $.ClientNamePascal }}OperationWaitTime( + r.providerConfig, res, {{if or $.HasProject $.GetAsync.IncludeProject -}} {{if $.LegacyLongFormProject -}}tpgresource.GetResourceNameFromSelfLink(project.ValueString()){{ else }}project.ValueString(){{ end }}, {{ end -}} "Deleting {{ $.Name -}}", userAgent, + deleteTimeout) + + if err != nil { + resp.Diagnostics.AddError("Error, failure waiting to delete {{ $.Name -}}", err.Error()) + return + } +{{- end }}{{/* if $.GetAsync.IsA "PollAsync" */}} +{{- end }}{{/* if and $.GetAsync ($.GetAsync.Allow "Delete") */}} + +{{- if $.CustomCode.PostDelete }} + {{ $.CustomTemplate $.CustomCode.PostDelete false -}} +{{- end }} + + log.Printf("[DEBUG] Finished deleting {{ $.Name }} %q: %#v", data.Id.ValueString(), res) + + +{{- end }}{{/* if CustomCode.CustomDelete */}} +{{- end }}{{/* if ExcludeDelete */}} +} + +func (r *{{$.ResourceName}}FWResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { + resource.ImportStatePassthroughID(ctx, path.Root("id"), req, resp) +} + +func (r *{{$.ResourceName}}FWResource) {{$.ResourceName}}FWRefresh(ctx context.Context, data *{{$.ResourceName}}FWModel, state *tfsdk.State, req interface{}, diag *diag.Diagnostics) { + var metaData *fwmodels.ProviderMetaModel + //load default values +{{ if $.HasProject -}} + project := fwresource.GetProjectFramework(data.Project, types.StringValue(r.providerConfig.Project), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } +{{- end }} +{{ if $.HasRegion -}} + region := fwresource.GetRegionFramework(data.Region, types.StringValue(r.providerConfig.Region), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } +{{- end }} +{{ if $.HasZone -}} + zone := fwresource.GetZoneFramework(data.Zone, types.StringValue(r.providerConfig.Zone), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } +{{- end }} + + var schemaDefaultVals fwtransport.DefaultVars +{{ if $.HasProject -}} + schemaDefaultVals.Project = project +{{- end }} +{{ if $.HasRegion -}} + schemaDefaultVals.Region = region +{{- end }} +{{ if $.HasZone -}} + schemaDefaultVals.Zone = zone +{{- end }} + + // Use provider_meta to set User-Agent + userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, r.providerConfig.UserAgent) + + url := fwtransport.ReplaceVars(ctx, req, &resp.Diagnostics, schemaDefaultVals, r.providerConfig, "{{"{{"}}{{$.ProductMetadata.Name}}BasePath{{"}}"}}{{$.SelfLinkUri}}{{$.ReadQueryParams}}") + if resp.Diagnostics.HasError() { + return + } + + log.Printf("[DEBUG] Refreshing {{ $.Name -}} data: %s", data.Id.ValueString()) + + headers := make(http.Header) +{{- if $.CustomCode.PreRead }} + {{ $.CustomTemplate $.CustomCode.PreRead false -}} +{{- end}} + res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ + Config: r.providerConfig, + Method: "{{ upper $.ReadVerb -}}", + Project: billingProject.ValueString(), + RawURL: url, + UserAgent: userAgent, + Timeout: createTimeout, + Headers: headers, +{{- if $.ErrorRetryPredicates }} + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{{"{"}}{{ join $.ErrorRetryPredicates "," -}}{{"}"}}, +{{- end}} +{{- if $.ErrorAbortPredicates }} + ErrorAbortPredicates: []transport_tpg.RetryErrorPredicateFunc{{"{"}}{{ join $.ErrorAbortPredicates "," -}}{{"}"}}, +{{- end}} + }, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + fwtransport.HandleNotFoundError(ctx, err, &resp.State, fmt.Sprintf("{{ $.ResourceName }} %s", data.Id.ValueString()), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + } + +{{ range $prop := $.OrderProperties $.AllUserProperties }} + data.{{camelize $prop.Name "upper"}} = res["{{ $prop.ApiName -}}"] + {{$prop.ApiName}}Prop, diags := data.{{camelize $prop.Name "upper"}}.To{{$prop.GetFWType}}Value(ctx) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } +{{ end }} + + tflog.Trace(ctx, "refreshed {{$.Name}} resource data") + + +} \ No newline at end of file diff --git a/mmv1/templates/terraform/schema_property_fw.go.tmpl b/mmv1/templates/terraform/schema_property_fw.go.tmpl new file mode 100644 index 000000000000..03bcbb82e896 --- /dev/null +++ b/mmv1/templates/terraform/schema_property_fw.go.tmpl @@ -0,0 +1,52 @@ +{{/*# The license inside this block applies to this file. + # Copyright 2024 Google Inc. + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. +*/}} +{{- define "SchemaFieldsFW"}} +{{- if .FlattenObject -}} + {{- range $prop := .ResourceMetadata.OrderProperties .UserProperties -}} + {{ template "SchemaFieldsFW" $prop }} + {{ end -}} +{{- else -}} +"{{underscore .Name -}}": schema.{{.GetFWType}}Attribute{ +{{ if .DefaultFromApi -}} + Optional: true, + Computed: true, +{{ else if .Required -}} + Required: true, +{{ else if .Output -}} + Computed: true, +{{ else -}} + Optional: true, +{{ end -}} +{{ if .DeprecationMessage -}} + DeprecationMessage: "{{ .DeprecationMessage }}", +{{ end -}} +{{ if .Sensitive -}} + Sensitive: true, +{{ end -}} +{{ if or .IsForceNew .DefaultFromApi -}} + PlanModifiers: []planmodifier.{{.GetFWType}}{ + + {{ if .IsForceNew -}} + {{lower .GetFWType}}planmodifier.RequiresReplace(), + {{ end -}} + + {{ if .DefaultFromApi -}} + {{lower .GetFWType}}planmodifier.UseStateForUnknown(), + {{ end -}} + }, +{{ end -}} +}, +{{- end -}} +{{- end -}} \ No newline at end of file diff --git a/mmv1/templates/terraform/update_mask_fw.go.tmpl b/mmv1/templates/terraform/update_mask_fw.go.tmpl new file mode 100644 index 000000000000..8ad689215da2 --- /dev/null +++ b/mmv1/templates/terraform/update_mask_fw.go.tmpl @@ -0,0 +1,27 @@ +{{- /* + The license inside this block applies to this file + Copyright 2025 Google Inc. + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ -}} +updateMask := []string{} +{{- $maskGroups := $.GetPropertyUpdateMasksGroups $.UpdateBodyProperties "" }} +{{- range $key := $.GetPropertyUpdateMasksGroupKeys $.UpdateBodyProperties }} + +if !plan.{{camelize $key "upper"}}.Equal(state.{{camelize $key "upper"}}) { + updateMask = append(updateMask, "{{ join (index $maskGroups $key) "\",\n\""}}") +} +{{- end }} +// updateMask is a URL parameter but not present in the schema, so ReplaceVars +// won't set it +url, err := transport_tpg.AddQueryParams(url, map[string]string{"updateMask": strings.Join(updateMask, ",")}) +if err != nil { + resp.Diagnostics.AddError("Error, failure building update mask query parameters in {{ $.Name -}}", err.Error()) + return +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/acctest/vcr_utils.go b/mmv1/third_party/terraform/acctest/vcr_utils.go index a726982a3a5d..117aca3b4169 100644 --- a/mmv1/third_party/terraform/acctest/vcr_utils.go +++ b/mmv1/third_party/terraform/acctest/vcr_utils.go @@ -22,6 +22,9 @@ import ( "github.com/hashicorp/terraform-provider-google/google/fwprovider" tpgprovider "github.com/hashicorp/terraform-provider-google/google/provider" + "github.com/hashicorp/terraform-provider-google/google/services/compute" + "github.com/hashicorp/terraform-provider-google/google/services/pubsublite" + "github.com/hashicorp/terraform-provider-google/google/services/sql" "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" @@ -29,6 +32,8 @@ import ( "github.com/dnaeon/go-vcr/recorder" "github.com/hashicorp/terraform-plugin-framework/datasource" + fwResource "github.com/hashicorp/terraform-plugin-framework/resource" + fwDiags "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/provider" "github.com/hashicorp/terraform-plugin-log/tflog" @@ -339,9 +344,16 @@ func (p *frameworkTestProvider) Configure(ctx context.Context, req provider.Conf func (p *frameworkTestProvider) DataSources(ctx context.Context) []func() datasource.DataSource { ds := p.FrameworkProvider.DataSources(ctx) ds = append(ds, fwprovider.NewGoogleProviderConfigPluginFrameworkDataSource) // google_provider_config_plugin_framework + ds = append(ds, compute.NewComputeNetworkFWDataSource) // google_fw_compute_network return ds } +func (p *frameworkTestProvider) Resources(ctx context.Context) []func() fwResource.Resource { + r := p.FrameworkProvider.Resources(ctx) + r = append(r, pubsublite.NewGooglePubsubLiteReservationFWResource, sql.NewSQLUserFWResource) // google_fwprovider_pubsub_lite_reservation + return r +} + // GetSDKProvider gets the SDK provider for use in acceptance tests // If VCR is in use, the configure function is overwritten. // See usage in MuxedProviders diff --git a/mmv1/third_party/terraform/fwprovider/framework_provider.go.tmpl b/mmv1/third_party/terraform/fwprovider/framework_provider.go.tmpl index f72d12124f68..3ceb0f566e7d 100644 --- a/mmv1/third_party/terraform/fwprovider/framework_provider.go.tmpl +++ b/mmv1/third_party/terraform/fwprovider/framework_provider.go.tmpl @@ -21,10 +21,13 @@ import ( "github.com/hashicorp/terraform-provider-google/google/functions" "github.com/hashicorp/terraform-provider-google/google/fwmodels" "github.com/hashicorp/terraform-provider-google/google/services/resourcemanager" + "github.com/hashicorp/terraform-provider-google/google/services/apigee" + "github.com/hashicorp/terraform-provider-google/version" {{- if ne $.TargetVersionName "ga" }} "github.com/hashicorp/terraform-provider-google/google/services/firebase" {{- end }} + "github.com/hashicorp/terraform-provider-google/google/services/storage" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) @@ -356,7 +359,10 @@ func (p *FrameworkProvider) DataSources(_ context.Context) []func() datasource.D // Resources defines the resources implemented in the provider. func (p *FrameworkProvider) Resources(_ context.Context) []func() resource.Resource { - return nil + return []func() resource.Resource{ + apigee.NewApigeeKeystoresAliasesKeyCertFileResource, + storage.NewStorageNotificationResource, + } } // Functions defines the provider functions implemented in the provider. diff --git a/mmv1/third_party/terraform/fwresource/field_helpers.go b/mmv1/third_party/terraform/fwresource/field_helpers.go index 54788d8346e7..40c170a85935 100644 --- a/mmv1/third_party/terraform/fwresource/field_helpers.go +++ b/mmv1/third_party/terraform/fwresource/field_helpers.go @@ -17,10 +17,18 @@ import ( // back to the provider's value if not given. If the provider's value is not // given, an error is returned. func GetProjectFramework(rVal, pVal types.String, diags *diag.Diagnostics) types.String { - return getProjectFromFrameworkSchema("project", rVal, pVal, diags) + return getProviderDefaultFromFrameworkSchema("project", rVal, pVal, diags) } -func getProjectFromFrameworkSchema(projectSchemaField string, rVal, pVal types.String, diags *diag.Diagnostics) types.String { +func GetRegionFramework(rVal, pVal types.String, diags *diag.Diagnostics) types.String { + return getProviderDefaultFromFrameworkSchema("region", rVal, pVal, diags) +} + +func GetZoneFramework(rVal, pVal types.String, diags *diag.Diagnostics) types.String { + return getProviderDefaultFromFrameworkSchema("zone", rVal, pVal, diags) +} + +func getProviderDefaultFromFrameworkSchema(schemaField string, rVal, pVal types.String, diags *diag.Diagnostics) types.String { if !rVal.IsNull() && rVal.ValueString() != "" { return rVal } @@ -29,7 +37,7 @@ func getProjectFromFrameworkSchema(projectSchemaField string, rVal, pVal types.S return pVal } - diags.AddError("required field is not set", fmt.Sprintf("%s is not set", projectSchemaField)) + diags.AddError("required field is not set", fmt.Sprintf("%s is not set", schemaField)) return types.String{} } @@ -54,7 +62,7 @@ func ParseProjectFieldValueFramework(resourceType, fieldValue, projectSchemaFiel } } - project := getProjectFromFrameworkSchema(projectSchemaField, rVal, pVal, diags) + project := getProviderDefaultFromFrameworkSchema(projectSchemaField, rVal, pVal, diags) if diags.HasError() { return nil } @@ -111,3 +119,10 @@ func ReplaceVarsForFrameworkTest(prov *transport_tpg.Config, rs *terraform.Resou return re.ReplaceAllStringFunc(linkTmpl, replaceFunc), nil } + +func FlattenStringEmptyToNull(configuredValue types.String, apiValue string) types.String { + if configuredValue.IsNull() && apiValue == "" { + return types.StringNull() + } + return types.StringValue(apiValue) +} diff --git a/mmv1/third_party/terraform/fwresource/framework_import.go b/mmv1/third_party/terraform/fwresource/framework_import.go new file mode 100644 index 000000000000..3e344332c0ff --- /dev/null +++ b/mmv1/third_party/terraform/fwresource/framework_import.go @@ -0,0 +1,192 @@ +package fwresource + +import ( + "context" + "fmt" + "regexp" + "strconv" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +// ParseImportId uses a list of regular expressions to parse a resource's import ID. +// It extracts named capture groups from the regex and converts them to their +// corresponding type-safe attribute values based on the provided resource schema. +// It also handles setting default values (project, region, etc) if they are not +// present in the import ID. +func ParseImportId( + ctx context.Context, + req resource.ImportStateRequest, + resourceSchema schema.Schema, + providerConfig *transport_tpg.Config, + idRegexes []string, +) (map[string]attr.Value, diag.Diagnostics) { + var diags diag.Diagnostics + parsedAttributes := make(map[string]attr.Value) + + var matchFound bool + for _, idFormat := range idRegexes { + re, err := regexp.Compile(idFormat) + if err != nil { + diags.AddError( + "Invalid Import Regex", + fmt.Sprintf("Provider developer error: could not compile regex %q. Please report this issue. Error: %s", idFormat, err), + ) + // This is a developer error, so we stop immediately. + return nil, diags + } + + if match := re.FindStringSubmatch(req.ID); match != nil { + matchFound = true + subexpNames := re.SubexpNames() + for i, valueStr := range match { + // Index 0 is the full match, so we skip it. + if i == 0 { + continue + } + + fieldName := subexpNames[i] + if fieldName == "" { + continue + } + + // Look up the attribute in the resource's schema. + attribute, ok := resourceSchema.Attributes[fieldName] + if !ok { + diags.AddWarning( + "Unknown Import Field", + fmt.Sprintf("Parsed field %q from import ID but it is not defined in the resource schema.", fieldName), + ) + continue + } + + // Convert the parsed string value to the correct attr.Value type. + attrVal, conversionDiags := convertToAttrValue(valueStr, attribute) + diags.Append(conversionDiags...) + if conversionDiags.HasError() { + continue + } + parsedAttributes[fieldName] = attrVal + } + // Once a match is found, we stop. The most specific regex should be first. + break + } + } + + if !matchFound { + diags.AddError( + "Invalid Import ID", + fmt.Sprintf("Import ID %q doesn't match any of the accepted formats: %v", req.ID, idRegexes), + ) + return nil, diags + } + + // Handle default values like project, region, and zone. + defaultDiags := addDefaultValues(ctx, parsedAttributes, providerConfig, resourceSchema, idRegexes[0]) + diags.Append(defaultDiags...) + + return parsedAttributes, diags +} + +// convertToAttrValue converts a string to the appropriate attr.Value based on the schema attribute type. +func convertToAttrValue(valueStr string, attr schema.Attribute) (attr.Value, diag.Diagnostics) { + var diags diag.Diagnostics + + switch attr.(type) { + case schema.StringAttribute: + return types.StringValue(valueStr), nil + case schema.Int64Attribute: + intVal, err := strconv.ParseInt(valueStr, 10, 64) + if err != nil { + diags.AddError( + "Import Value Conversion Error", + fmt.Sprintf("Failed to parse %q as an integer: %s", valueStr, err), + ) + return nil, diags + } + return types.Int64Value(intVal), nil + case schema.BoolAttribute: + boolVal, err := strconv.ParseBool(valueStr) + if err != nil { + diags.AddError( + "Import Value Conversion Error", + fmt.Sprintf("Failed to parse %q as a boolean: %s", valueStr, err), + ) + return nil, diags + } + return types.BoolValue(boolVal), nil + case schema.Float64Attribute: + floatVal, err := strconv.ParseFloat(valueStr, 64) + if err != nil { + diags.AddError( + "Import Value Conversion Error", + fmt.Sprintf("Failed to parse %q as a float: %s", valueStr, err), + ) + return nil, diags + } + return types.Float64Value(floatVal), nil + default: + // For complex types like List, Object, etc., a simple string conversion is not feasible. + // The assumption is that import IDs will only contain primitive types. + diags.AddError( + "Unsupported Import Attribute Type", + fmt.Sprintf("Importing attributes of type %T is not supported. This is a provider developer issue.", attr), + ) + return nil, diags + } +} + +// addDefaultValues checks for common provider-level defaults (project, region, zone) +// and adds them to the parsed attributes map if they were not already set from the import ID. +func addDefaultValues( + ctx context.Context, + parsedAttributes map[string]attr.Value, + config *transport_tpg.Config, + resourceSchema schema.Schema, + primaryRegex string, +) diag.Diagnostics { + var diags diag.Diagnostics + + defaults := map[string]func(*transport_tpg.Config) (string, error){ + "project": func(c *transport_tpg.Config) (string, error) { return c.Project, nil }, + "region": func(c *transport_tpg.Config) (string, error) { return c.Region, nil }, + "zone": func(c *transport_tpg.Config) (string, error) { return c.Zone, nil }, + } + + for field, getDefault := range defaults { + // Check if the primary regex expects this field. + if !strings.Contains(primaryRegex, fmt.Sprintf("(?P<%s>", field)) { + continue + } + // Check if the resource schema actually has this attribute. + if _, ok := resourceSchema.Attributes[field]; !ok { + continue + } + // Check if the value was already parsed from the import ID. + if _, ok := parsedAttributes[field]; ok { + continue + } + + // Get the default value from the provider configuration. + value, err := getDefault(config) + if err != nil { + diags.AddError( + fmt.Sprintf("Failed to get default value for %s", field), + err.Error(), + ) + continue + } + + if value != "" { + parsedAttributes[field] = types.StringValue(value) + } + } + + return diags +} diff --git a/mmv1/third_party/terraform/fwresource/framework_import_test.go b/mmv1/third_party/terraform/fwresource/framework_import_test.go new file mode 100644 index 000000000000..278b55dd9a19 --- /dev/null +++ b/mmv1/third_party/terraform/fwresource/framework_import_test.go @@ -0,0 +1,183 @@ +package fwresource + +import ( + "context" + "reflect" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/int64planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/types" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func TestParseImportId(t *testing.T) { + testSchema := schema.Schema{ + Attributes: map[string]schema.Attribute{ + "project": schema.StringAttribute{ + Optional: true, + Computed: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.UseStateForUnknown(), + }, + }, + "name": schema.StringAttribute{ + Required: true, + }, + "zone": schema.StringAttribute{ + Required: true, + }, + "instance_id": schema.Int64Attribute{ + Required: true, + PlanModifiers: []planmodifier.Int64{ + int64planmodifier.RequiresReplace(), + }, + }, + }, + } + + cases := map[string]struct { + importId string + idRegexes []string + resourceSchema schema.Schema + providerConfig *transport_tpg.Config + expectedAttributes map[string]attr.Value + expectError bool + errorContains string + }{ + "successfully parses full resource ID format": { + importId: "projects/my-project/zones/us-central1-a/instances/12345", + idRegexes: []string{ + "projects/(?P[^/]+)/zones/(?P[^/]+)/instances/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + }, + resourceSchema: testSchema, + providerConfig: &transport_tpg.Config{}, + expectedAttributes: map[string]attr.Value{ + "project": types.StringValue("my-project"), + "zone": types.StringValue("us-central1-a"), + "instance_id": types.Int64Value(12345), + }, + }, + "successfully parses shorter ID format": { + importId: "my-project/us-central1-a/12345", + idRegexes: []string{ + "projects/(?P[^/]+)/zones/(?P[^/]+)/instances/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + }, + resourceSchema: testSchema, + providerConfig: &transport_tpg.Config{}, + expectedAttributes: map[string]attr.Value{ + "project": types.StringValue("my-project"), + "zone": types.StringValue("us-central1-a"), + "instance_id": types.Int64Value(12345), + }, + }, + "successfully uses provider default for project": { + importId: "us-central1-a/my-instance/12345", + idRegexes: []string{ + "projects/(?P[^/]+)/zones/(?P[^/]+)/instances/(?P[^/]+)/(?P[^/]+)", // Most specific + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + }, + resourceSchema: testSchema, + providerConfig: &transport_tpg.Config{ + Project: "default-provider-project", + }, + expectedAttributes: map[string]attr.Value{ + "project": types.StringValue("default-provider-project"), + "zone": types.StringValue("us-central1-a"), + "name": types.StringValue("my-instance"), + "instance_id": types.Int64Value(12345), + }, + }, + "returns error for non-matching ID": { + importId: "invalid-id-format", + idRegexes: []string{ + "projects/(?P[^/]+)/zones/(?P[^/]+)/instances/(?P[^/]+)", + }, + resourceSchema: testSchema, + providerConfig: &transport_tpg.Config{}, + expectError: true, + errorContains: "doesn't match any of the accepted formats", + }, + "returns error for value that cannot be converted to type": { + importId: "projects/my-project/zones/us-central1-a/instances/not-an-integer", + idRegexes: []string{ + "projects/(?P[^/]+)/zones/(?P[^/]+)/instances/(?P[^/]+)", + }, + resourceSchema: testSchema, + providerConfig: &transport_tpg.Config{}, + expectError: true, + errorContains: "Failed to parse \"not-an-integer\" as an integer", + }, + "returns error for invalid regex pattern": { + importId: "any/id", + idRegexes: []string{ + "projects/(?P[^/]+)/zones/(?P[^/+", // Invalid regex with unclosed bracket + }, + resourceSchema: testSchema, + providerConfig: &transport_tpg.Config{}, + expectError: true, + errorContains: "could not compile regex", + }, + "warns about field in regex not present in schema": { + importId: "projects/my-project/zones/us-central1-a/instances/12345/extra/field", + idRegexes: []string{ + "projects/(?P[^/]+)/zones/(?P[^/]+)/instances/(?P[^/]+)/extra/(?P[^/]+)", + }, + resourceSchema: testSchema, + providerConfig: &transport_tpg.Config{}, + // We expect success, but with a warning diagnostic. The valid fields should still be parsed. + expectedAttributes: map[string]attr.Value{ + "project": types.StringValue("my-project"), + "zone": types.StringValue("us-central1-a"), + "instance_id": types.Int64Value(12345), + }, + }, + } + + for name, tc := range cases { + t.Run(name, func(t *testing.T) { + ctx := context.Background() + req := resource.ImportStateRequest{ + ID: tc.importId, + } + + parsedAttributes, diags := ParseImportId(ctx, req, tc.resourceSchema, tc.providerConfig, tc.idRegexes) + + if diags.HasError() { + if tc.expectError { + // Check if the error message contains the expected substring. + if tc.errorContains != "" { + found := false + for _, d := range diags.Errors() { + if strings.Contains(d.Detail(), tc.errorContains) { + found = true + break + } + } + if !found { + t.Fatalf("expected error to contain %q, but it did not. Got: %v", tc.errorContains, diags.Errors()) + } + } + // Correctly handled an expected error. + return + } + t.Fatalf("unexpected error: %v", diags) + } + + if tc.expectError { + t.Fatal("expected an error, but got none") + } + + if !reflect.DeepEqual(tc.expectedAttributes, parsedAttributes) { + t.Fatalf("incorrect attributes parsed.\n- got: %v\n- want: %v", parsedAttributes, tc.expectedAttributes) + } + }) + } +} diff --git a/mmv1/third_party/terraform/fwtransport/framework_utils.go b/mmv1/third_party/terraform/fwtransport/framework_utils.go index b297b475cb25..238670b1b581 100644 --- a/mmv1/third_party/terraform/fwtransport/framework_utils.go +++ b/mmv1/third_party/terraform/fwtransport/framework_utils.go @@ -1,17 +1,29 @@ package fwtransport import ( + "bytes" "context" + "encoding/json" "fmt" + "net/http" "os" + "reflect" + "regexp" "strings" + "time" "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" "github.com/hashicorp/terraform-plugin-framework/tfsdk" + "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/hashicorp/terraform-provider-google/google/fwmodels" + "github.com/hashicorp/terraform-provider-google/google/fwresource" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + "google.golang.org/api/googleapi" ) const uaEnvVar = "TF_APPEND_USER_AGENT" @@ -38,7 +50,7 @@ func GenerateFrameworkUserAgentString(metaData *fwmodels.ProviderMetaModel, curr return currUserAgent } -func HandleDatasourceNotFoundError(ctx context.Context, err error, state *tfsdk.State, resource string, diags *diag.Diagnostics) { +func HandleNotFoundError(ctx context.Context, err error, state *tfsdk.State, resource string, diags *diag.Diagnostics) { if transport_tpg.IsGoogleApiErrorWithCode(err, 404) { tflog.Warn(ctx, fmt.Sprintf("Removing %s because it's gone", resource)) // The resource doesn't exist anymore @@ -47,3 +59,310 @@ func HandleDatasourceNotFoundError(ctx context.Context, err error, state *tfsdk. diags.AddError(fmt.Sprintf("Error when reading or editing %s", resource), err.Error()) } + +var DefaultRequestTimeout = 5 * time.Minute + +type SendRequestOptions struct { + Config *transport_tpg.Config + Method string + Project string + RawURL string + UserAgent string + Body map[string]any + Timeout time.Duration + Headers http.Header + ErrorRetryPredicates []transport_tpg.RetryErrorPredicateFunc + ErrorAbortPredicates []transport_tpg.RetryErrorPredicateFunc +} + +func SendRequest(opt SendRequestOptions, diags *diag.Diagnostics) map[string]interface{} { + reqHeaders := opt.Headers + if reqHeaders == nil { + reqHeaders = make(http.Header) + } + reqHeaders.Set("User-Agent", opt.UserAgent) + reqHeaders.Set("Content-Type", "application/json") + + if opt.Config.UserProjectOverride && opt.Project != "" { + // When opt.Project is "NO_BILLING_PROJECT_OVERRIDE" in the function GetCurrentUserEmail, + // set the header X-Goog-User-Project to be empty string. + if opt.Project == "NO_BILLING_PROJECT_OVERRIDE" { + reqHeaders.Set("X-Goog-User-Project", "") + } else { + // Pass the project into this fn instead of parsing it from the URL because + // both project names and URLs can have colons in them. + reqHeaders.Set("X-Goog-User-Project", opt.Project) + } + } + + if opt.Timeout == 0 { + opt.Timeout = DefaultRequestTimeout + } + + var res *http.Response + err := transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + var buf bytes.Buffer + if opt.Body != nil { + err := json.NewEncoder(&buf).Encode(opt.Body) + if err != nil { + return err + } + } + + u, err := transport_tpg.AddQueryParams(opt.RawURL, map[string]string{"alt": "json"}) + if err != nil { + return err + } + req, err := http.NewRequest(opt.Method, u, &buf) + if err != nil { + return err + } + + req.Header = reqHeaders + res, err = opt.Config.Client.Do(req) + if err != nil { + return err + } + + if err := googleapi.CheckResponse(res); err != nil { + googleapi.CloseBody(res) + return err + } + + return nil + }, + Timeout: opt.Timeout, + ErrorRetryPredicates: opt.ErrorRetryPredicates, + ErrorAbortPredicates: opt.ErrorAbortPredicates, + }) + if err != nil { + diags.AddError("Error when sending HTTP request: ", err.Error()) + return nil + } + + if res == nil { + diags.AddError("Unable to parse server response. This is most likely a terraform problem, please file a bug at https://github.com/hashicorp/terraform-provider-google/issues.", "") + return nil + } + + // The defer call must be made outside of the retryFunc otherwise it's closed too soon. + defer googleapi.CloseBody(res) + + // 204 responses will have no body, so we're going to error with "EOF" if we + // try to parse it. Instead, we can just return nil. + if res.StatusCode == 204 { + return nil + } + result := make(map[string]interface{}) + if err := json.NewDecoder(res.Body).Decode(&result); err != nil { + diags.AddError("Error when sending HTTP request: ", err.Error()) + return nil + } + + return result +} + +type DefaultVars struct { + BillingProject types.String + Project types.String + Region types.String + Zone types.String +} + +func ReplaceVars(ctx context.Context, req interface{}, diags *diag.Diagnostics, data DefaultVars, config *transport_tpg.Config, linkTmpl string) string { + return ReplaceVarsRecursive(ctx, req, diags, data, config, linkTmpl, false, 0) +} + +// relaceVarsForId shortens variables by running them through GetResourceNameFromSelfLink +// this allows us to use long forms of variables from configs without needing +// custom id formats. For instance: +// accessPolicies/{{access_policy}}/accessLevels/{{access_level}} +// with values: +// access_policy: accessPolicies/foo +// access_level: accessPolicies/foo/accessLevels/bar +// becomes accessPolicies/foo/accessLevels/bar +func ReplaceVarsForId(ctx context.Context, req interface{}, diags *diag.Diagnostics, data DefaultVars, config *transport_tpg.Config, linkTmpl string) string { + return ReplaceVarsRecursive(ctx, req, diags, data, config, linkTmpl, true, 0) +} + +// ReplaceVars must be done recursively because there are baseUrls that can contain references to regions +// (eg cloudrun service) there aren't any cases known for 2+ recursion but we will track a run away +// substitution as 10+ calls to allow for future use cases. +func ReplaceVarsRecursive(ctx context.Context, req interface{}, diags *diag.Diagnostics, data DefaultVars, config *transport_tpg.Config, linkTmpl string, shorten bool, depth int) string { + if depth > 10 { + diags.AddError("url building error", "Recursive substitution detected.") + } + + // https://github.com/google/re2/wiki/Syntax + re := regexp.MustCompile("{{([%[:word:]]+)}}") + f := BuildReplacementFunc(ctx, re, req, diags, data, config, linkTmpl, shorten) + if diags.HasError() { + return "" + } + final := re.ReplaceAllStringFunc(linkTmpl, f) + + if re.Match([]byte(final)) { + return ReplaceVarsRecursive(ctx, req, diags, data, config, final, shorten, depth+1) + } + + return final +} + +// This function replaces references to Terraform properties (in the form of {{var}}) with their value in Terraform +// It also replaces {{project}}, {{project_id_or_project}}, {{region}}, and {{zone}} with their appropriate values +// This function supports URL-encoding the result by prepending '%' to the field name e.g. {{%var}} +func BuildReplacementFunc(ctx context.Context, re *regexp.Regexp, req interface{}, diags *diag.Diagnostics, data DefaultVars, config *transport_tpg.Config, linkTmpl string, shorten bool) func(string) string { + var project, region, zone string + var projectID types.String + + if strings.Contains(linkTmpl, "{{project}}") { + project = fwresource.GetProjectFramework(data.Project, types.StringValue(config.Project), diags).ValueString() + if diags.HasError() { + return nil + } + if shorten { + project = strings.TrimPrefix(project, "projects/") + } + } + + if strings.Contains(linkTmpl, "{{project_id_or_project}}") { + var diagInfo diag.Diagnostics + switch req.(type) { + case resource.CreateRequest: + pReq := req.(resource.CreateRequest) + diagInfo = pReq.Plan.GetAttribute(ctx, path.Root("project_id"), &projectID) + case resource.UpdateRequest: + pReq := req.(resource.UpdateRequest) + diagInfo = pReq.Plan.GetAttribute(ctx, path.Root("project_id"), &projectID) + case resource.ReadRequest: + sReq := req.(resource.ReadRequest) + diagInfo = sReq.State.GetAttribute(ctx, path.Root("project_id"), &projectID) + case resource.DeleteRequest: + sReq := req.(resource.DeleteRequest) + diagInfo = sReq.State.GetAttribute(ctx, path.Root("project_id"), &projectID) + } + diags.Append(diagInfo...) + if diags.HasError() { + return nil + } + if projectID.ValueString() != "" { + project = fwresource.GetProjectFramework(data.Project, types.StringValue(config.Project), diags).ValueString() + if diags.HasError() { + return nil + } + } + if shorten { + project = strings.TrimPrefix(project, "projects/") + projectID = types.StringValue(strings.TrimPrefix(projectID.ValueString(), "projects/")) + } + } + + if strings.Contains(linkTmpl, "{{region}}") { + region = fwresource.GetRegionFramework(data.Region, types.StringValue(config.Region), diags).ValueString() + if diags.HasError() { + return nil + } + if shorten { + region = strings.TrimPrefix(region, "regions/") + } + } + + if strings.Contains(linkTmpl, "{{zone}}") { + zone = fwresource.GetRegionFramework(data.Zone, types.StringValue(config.Zone), diags).ValueString() + if diags.HasError() { + return nil + } + if shorten { + zone = strings.TrimPrefix(region, "zones/") + } + } + + f := func(s string) string { + + m := re.FindStringSubmatch(s)[1] + if m == "project" { + return project + } + if m == "project_id_or_project" { + if projectID.ValueString() != "" { + return projectID.ValueString() + } + return project + } + if m == "region" { + return region + } + if m == "zone" { + return zone + } + if string(m[0]) == "%" { + var v types.String + var diagInfo diag.Diagnostics + switch req.(type) { + case resource.CreateRequest: + pReq := req.(resource.CreateRequest) + diagInfo = pReq.Plan.GetAttribute(ctx, path.Root(m[1:]), &v) + case resource.UpdateRequest: + pReq := req.(resource.UpdateRequest) + diagInfo = pReq.Plan.GetAttribute(ctx, path.Root(m[1:]), &v) + case resource.ReadRequest: + sReq := req.(resource.ReadRequest) + diagInfo = sReq.State.GetAttribute(ctx, path.Root(m[1:]), &v) + case resource.DeleteRequest: + sReq := req.(resource.DeleteRequest) + diagInfo = sReq.State.GetAttribute(ctx, path.Root(m[1:]), &v) + } + //an error here means the attribute was not found, we want to do nothing in that case + if !diagInfo.HasError() { + diags.Append(diagInfo...) + if v.ValueString() != "" { + if shorten { + return tpgresource.GetResourceNameFromSelfLink(fmt.Sprintf("%v", v.ValueString())) + } else { + return fmt.Sprintf("%v", v.ValueString()) + } + } + } + } else { + var v types.String + var diagInfo diag.Diagnostics + switch req.(type) { + case resource.CreateRequest: + pReq := req.(resource.CreateRequest) + diagInfo = pReq.Plan.GetAttribute(ctx, path.Root(m), &v) + case resource.UpdateRequest: + pReq := req.(resource.UpdateRequest) + diagInfo = pReq.Plan.GetAttribute(ctx, path.Root(m), &v) + case resource.ReadRequest: + sReq := req.(resource.ReadRequest) + diagInfo = sReq.State.GetAttribute(ctx, path.Root(m), &v) + case resource.DeleteRequest: + sReq := req.(resource.DeleteRequest) + diagInfo = sReq.State.GetAttribute(ctx, path.Root(m), &v) + } + //an error here means the attribute was not found, we want to do nothing in that case + if !diagInfo.HasError() { + diags.Append(diagInfo...) + if v.ValueString() != "" { + if shorten { + return tpgresource.GetResourceNameFromSelfLink(fmt.Sprintf("%v", v.ValueString())) + } else { + return fmt.Sprintf("%v", v.ValueString()) + } + } + } + } + + // terraform-google-conversion doesn't provide a provider config in tests. + if config != nil { + // Attempt to draw values from the provider config if it's present. + if f := reflect.Indirect(reflect.ValueOf(config)).FieldByName(m); f.IsValid() { + return f.String() + } + } + return "" + } + + return f +} diff --git a/mmv1/third_party/terraform/fwvalidators/framework_validators.go b/mmv1/third_party/terraform/fwvalidators/framework_validators.go index b0da8417591e..eed3a32e98f9 100644 --- a/mmv1/third_party/terraform/fwvalidators/framework_validators.go +++ b/mmv1/third_party/terraform/fwvalidators/framework_validators.go @@ -9,6 +9,8 @@ import ( "strings" "time" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" googleoauth "golang.org/x/oauth2/google" @@ -262,3 +264,81 @@ func (v jwtValidator) ValidateString(ctx context.Context, request validator.Stri func JWTValidator() validator.String { return jwtValidator{} } + +// stringValuesInSetValidator validates that all string elements in a set +// are present in the configured list of valid strings. +type stringValuesInSetValidator struct { + ValidStrings []string +} + +func (v stringValuesInSetValidator) Description(_ context.Context) string { + return fmt.Sprintf("all elements must be one of: %q", v.ValidStrings) +} + +func (v stringValuesInSetValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +func (v stringValuesInSetValidator) ValidateSet(ctx context.Context, req validator.SetRequest, resp *validator.SetResponse) { + if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { + return + } + + valid := make(map[string]struct{}, len(v.ValidStrings)) + for _, s := range v.ValidStrings { + valid[s] = struct{}{} + } + + var elements []types.String + resp.Diagnostics.Append(req.ConfigValue.ElementsAs(ctx, &elements, false)...) + if resp.Diagnostics.HasError() { + return + } + + for _, el := range elements { + if _, ok := valid[el.ValueString()]; !ok { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid Set Element", + fmt.Sprintf("Element %q is not a valid value. %s.", el.ValueString(), v.Description(ctx)), + ) + } + } +} + +func StringValuesInSet(validStrings ...string) validator.Set { + return stringValuesInSetValidator{ + ValidStrings: validStrings, + } +} + +type TopicPrefixValidator struct{} + +func (v TopicPrefixValidator) Description(ctx context.Context) string { + return "ensures the topic does not start with '//pubsub.googleapis.com/'" +} + +func (v TopicPrefixValidator) MarkdownDescription(ctx context.Context) string { + return "Ensures the topic does not start with `//pubsub.googleapis.com/`." +} + +func (v TopicPrefixValidator) ValidateString(ctx context.Context, req validator.StringRequest, resp *validator.StringResponse) { + if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { + return + } + + value := req.ConfigValue.ValueString() + forbiddenPrefix := "//pubsub.googleapis.com/" + + if strings.HasPrefix(value, forbiddenPrefix) { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid Topic Format", + fmt.Sprintf("The topic must not start with '%s', please use the format projects/{project}/topics/{topic} instead.", forbiddenPrefix), + ) + } +} + +func NewTopicPrefixValidator() validator.String { + return TopicPrefixValidator{} +} diff --git a/mmv1/third_party/terraform/fwvalidators/framework_validators_test.go b/mmv1/third_party/terraform/fwvalidators/framework_validators_test.go index 07e2378e8bf3..8d8a285584fd 100644 --- a/mmv1/third_party/terraform/fwvalidators/framework_validators_test.go +++ b/mmv1/third_party/terraform/fwvalidators/framework_validators_test.go @@ -307,3 +307,141 @@ func TestBoundedDuration(t *testing.T) { }) } } + +func TestStringValuesInSetValidator(t *testing.T) { + t.Parallel() + + // Define the set of valid strings for the validator + validStrings := []string{"APPLE", "BANANA", "CHERRY"} + + stringSet := func(elems []string) types.Set { + if elems == nil { + return types.SetNull(types.StringType) + } + val, diags := types.SetValueFrom(context.Background(), types.StringType, elems) + if diags.HasError() { + t.Fatalf("Failed to create test set: %v", diags) + } + return val + } + + cases := map[string]struct { + ConfigValue types.Set + ExpectedErrorCount int + }{ + "valid set with one element": { + ConfigValue: stringSet([]string{"APPLE"}), + ExpectedErrorCount: 0, + }, + "valid set with multiple elements": { + ConfigValue: stringSet([]string{"BANANA", "CHERRY"}), + ExpectedErrorCount: 0, + }, + "valid empty set": { + ConfigValue: stringSet([]string{}), + ExpectedErrorCount: 0, + }, + "null set is valid": { + ConfigValue: stringSet(nil), + ExpectedErrorCount: 0, + }, + "unknown set is valid": { + ConfigValue: types.SetUnknown(types.StringType), + ExpectedErrorCount: 0, + }, + "invalid set with one element": { + ConfigValue: stringSet([]string{"DURIAN"}), + ExpectedErrorCount: 1, + }, + "invalid set with multiple elements": { + ConfigValue: stringSet([]string{"DURIAN", "ELDERBERRY"}), + ExpectedErrorCount: 2, + }, + "set with mixed valid and invalid elements": { + ConfigValue: stringSet([]string{"APPLE", "DURIAN", "CHERRY"}), + ExpectedErrorCount: 1, + }, + } + + for tn, tc := range cases { + tn, tc := tn, tc + t.Run(tn, func(t *testing.T) { + t.Parallel() + + req := validator.SetRequest{ + Path: path.Root("test_attribute"), + ConfigValue: tc.ConfigValue, + } + resp := &validator.SetResponse{ + Diagnostics: diag.Diagnostics{}, + } + v := fwvalidators.StringValuesInSet(validStrings...) + + v.ValidateSet(context.Background(), req, resp) + + if resp.Diagnostics.ErrorsCount() != tc.ExpectedErrorCount { + t.Errorf("Expected %d errors, but got %d. Errors: %v", tc.ExpectedErrorCount, resp.Diagnostics.ErrorsCount(), resp.Diagnostics.Errors()) + } + }) + } +} + +func TestTopicPrefixValidator(t *testing.T) { + t.Parallel() + + type testCase struct { + value types.String + expectError bool + errorContains string + } + + tests := map[string]testCase{ + "valid topic format": { + value: types.StringValue("projects/my-project/topics/my-topic"), + expectError: false, + }, + "invalid topic format - starts with pubsub prefix": { + value: types.StringValue("//pubsub.googleapis.com/projects/my-project/topics/my-topic"), + expectError: true, + errorContains: "The topic must not start with '//pubsub.googleapis.com/', please use the format projects/{project}/topics/{topic} instead.", + }, + } + + for name, test := range tests { + name, test := name, test + t.Run(name, func(t *testing.T) { + t.Parallel() + + request := validator.StringRequest{ + Path: path.Root("test_topic"), + PathExpression: path.MatchRoot("test_topic"), + ConfigValue: test.value, + } + response := validator.StringResponse{} + v := fwvalidators.NewTopicPrefixValidator() + + v.ValidateString(context.Background(), request, &response) + + if test.expectError && !response.Diagnostics.HasError() { + t.Errorf("expected error, got none for value: %q", test.value.ValueString()) + } + + if !test.expectError && response.Diagnostics.HasError() { + t.Errorf("got unexpected error for value: %q: %s", test.value.ValueString(), response.Diagnostics.Errors()) + } + + if test.errorContains != "" { + foundError := false + for _, err := range response.Diagnostics.Errors() { + if err.Detail() == test.errorContains { + foundError = true + break + } + } + if !foundError { + t.Errorf("expected error with detail %q, got none", test.errorContains) + } + } + }) + } +} diff --git a/mmv1/third_party/terraform/go.mod b/mmv1/third_party/terraform/go.mod index d6d72af47b3d..026850fd2bd3 100644 --- a/mmv1/third_party/terraform/go.mod +++ b/mmv1/third_party/terraform/go.mod @@ -20,6 +20,7 @@ require ( github.com/hashicorp/go-version v1.7.0 github.com/hashicorp/terraform-json v0.25.0 github.com/hashicorp/terraform-plugin-framework v1.15.0 + github.com/hashicorp/terraform-plugin-framework-timeouts v0.5.0 github.com/hashicorp/terraform-plugin-framework-validators v0.9.0 github.com/hashicorp/terraform-plugin-go v0.28.0 github.com/hashicorp/terraform-plugin-log v0.9.0 diff --git a/mmv1/third_party/terraform/go.sum b/mmv1/third_party/terraform/go.sum index 3889c6c81471..ed7608cc7167 100644 --- a/mmv1/third_party/terraform/go.sum +++ b/mmv1/third_party/terraform/go.sum @@ -162,12 +162,20 @@ github.com/hashicorp/hcl/v2 v2.23.0 h1:Fphj1/gCylPxHutVSEOf2fBOh1VE4AuLV7+kbJf3q github.com/hashicorp/hcl/v2 v2.23.0/go.mod h1:62ZYHrXgPoX8xBnzl8QzbWq4dyDsDtfCRgIq1rbJEvA= github.com/hashicorp/logutils v1.0.0 h1:dLEQVugN8vlakKOUE3ihGLTZJRB4j+M2cdTm/ORI65Y= github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= +github.com/hashicorp/terraform-exec v0.22.0 h1:G5+4Sz6jYZfRYUCg6eQgDsqTzkNXV+fP8l+uRmZHj64= +github.com/hashicorp/terraform-exec v0.22.0/go.mod h1:bjVbsncaeh8jVdhttWYZuBGj21FcYw6Ia/XfHcNO7lQ= github.com/hashicorp/terraform-exec v0.23.0 h1:MUiBM1s0CNlRFsCLJuM5wXZrzA3MnPYEsiXmzATMW/I= github.com/hashicorp/terraform-exec v0.23.0/go.mod h1:mA+qnx1R8eePycfwKkCRk3Wy65mwInvlpAeOwmA7vlY= +github.com/hashicorp/terraform-json v0.24.0 h1:rUiyF+x1kYawXeRth6fKFm/MdfBS6+lW4NbeATsYz8Q= +github.com/hashicorp/terraform-json v0.24.0/go.mod h1:Nfj5ubo9xbu9uiAoZVBsNOjvNKB66Oyrvtit74kC7ow= github.com/hashicorp/terraform-json v0.25.0 h1:rmNqc/CIfcWawGiwXmRuiXJKEiJu1ntGoxseG1hLhoQ= github.com/hashicorp/terraform-json v0.25.0/go.mod h1:sMKS8fiRDX4rVlR6EJUMudg1WcanxCMoWwTLkgZP/vc= +github.com/hashicorp/terraform-plugin-framework v1.13.0 h1:8OTG4+oZUfKgnfTdPTJwZ532Bh2BobF4H+yBiYJ/scw= +github.com/hashicorp/terraform-plugin-framework v1.13.0/go.mod h1:j64rwMGpgM3NYXTKuxrCnyubQb/4VKldEKlcG8cvmjU= github.com/hashicorp/terraform-plugin-framework v1.15.0 h1:LQ2rsOfmDLxcn5EeIwdXFtr03FVsNktbbBci8cOKdb4= github.com/hashicorp/terraform-plugin-framework v1.15.0/go.mod h1:hxrNI/GY32KPISpWqlCoTLM9JZsGH3CyYlir09bD/fI= +github.com/hashicorp/terraform-plugin-framework-timeouts v0.5.0 h1:I/N0g/eLZ1ZkLZXUQ0oRSXa8YG/EF0CEuQP1wXdrzKw= +github.com/hashicorp/terraform-plugin-framework-timeouts v0.5.0/go.mod h1:t339KhmxnaF4SzdpxmqW8HnQBHVGYazwtfxU0qCs4eE= github.com/hashicorp/terraform-plugin-framework-validators v0.9.0 h1:LYz4bXh3t7bTEydXOmPDPupRRnA480B/9+jV8yZvxBA= github.com/hashicorp/terraform-plugin-framework-validators v0.9.0/go.mod h1:+BVERsnfdlhYR2YkXMBtPnmn9UsL19U3qUtSZ+Y/5MY= github.com/hashicorp/terraform-plugin-go v0.28.0 h1:zJmu2UDwhVN0J+J20RE5huiF3XXlTYVIleaevHZgKPA= diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index 85012670afa3..b0e968f31749 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -356,7 +356,6 @@ var handwrittenResources = map[string]*schema.Resource{ "google_apigee_sharedflow_deployment": apigee.ResourceApigeeSharedFlowDeployment(), "google_apigee_flowhook": apigee.ResourceApigeeFlowhook(), "google_apigee_keystores_aliases_pkcs12": apigee.ResourceApigeeKeystoresAliasesPkcs12(), - "google_apigee_keystores_aliases_key_cert_file": apigee.ResourceApigeeKeystoresAliasesKeyCertFile(), "google_bigquery_table": bigquery.ResourceBigQueryTable(), "google_bigtable_gc_policy": bigtable.ResourceBigtableGCPolicy(), "google_bigtable_instance": bigtable.ResourceBigtableInstance(), @@ -447,7 +446,6 @@ var handwrittenResources = map[string]*schema.Resource{ "google_storage_bucket_object": storage.ResourceStorageBucketObject(), "google_storage_object_acl": storage.ResourceStorageObjectAcl(), "google_storage_default_object_acl": storage.ResourceStorageDefaultObjectAcl(), - "google_storage_notification": storage.ResourceStorageNotification(), "google_storage_transfer_job": storagetransfer.ResourceStorageTransferJob(), "google_tags_location_tag_binding": tags.ResourceTagsLocationTagBinding(), // ####### END handwritten resources ########### diff --git a/mmv1/third_party/terraform/services/apigee/fw_resource_apigee_keystores_aliases_key_cert_file.go b/mmv1/third_party/terraform/services/apigee/fw_resource_apigee_keystores_aliases_key_cert_file.go new file mode 100644 index 000000000000..787f84425f65 --- /dev/null +++ b/mmv1/third_party/terraform/services/apigee/fw_resource_apigee_keystores_aliases_key_cert_file.go @@ -0,0 +1,533 @@ +package apigee + +import ( + "bytes" + "context" + "fmt" + "mime/multipart" + "time" + + "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/tfsdk" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/hashicorp/terraform-provider-google/google/fwmodels" + "github.com/hashicorp/terraform-provider-google/google/fwresource" + "github.com/hashicorp/terraform-provider-google/google/fwtransport" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +var ( + _ resource.Resource = &ApigeeKeystoresAliasesKeyCertFileResource{} + _ resource.ResourceWithConfigure = &ApigeeKeystoresAliasesKeyCertFileResource{} + _ resource.ResourceWithImportState = &ApigeeKeystoresAliasesKeyCertFileResource{} +) + +func NewApigeeKeystoresAliasesKeyCertFileResource() resource.Resource { + return &ApigeeKeystoresAliasesKeyCertFileResource{} +} + +type ApigeeKeystoresAliasesKeyCertFileResource struct { + providerConfig *transport_tpg.Config +} + +type ApigeeKeystoresAliasesKeyCertFileResourceModel struct { + Id types.String `tfsdk:"id"` + OrgId types.String `tfsdk:"org_id"` + Environment types.String `tfsdk:"environment"` + Keystore types.String `tfsdk:"keystore"` + Alias types.String `tfsdk:"alias"` + Cert types.String `tfsdk:"cert"` + Key types.String `tfsdk:"key"` + Password types.String `tfsdk:"password"` + Type types.String `tfsdk:"type"` + CertsInfo types.List `tfsdk:"certs_info"` + Timeouts timeouts.Value `tfsdk:"timeouts"` +} + +type CertInfoDetailModel struct { + BasicConstraints types.String `tfsdk:"basic_constraints"` + ExpiryDate types.String `tfsdk:"expiry_date"` + IsValid types.String `tfsdk:"is_valid"` + Issuer types.String `tfsdk:"issuer"` + PublicKey types.String `tfsdk:"public_key"` + SerialNumber types.String `tfsdk:"serial_number"` + SigAlgName types.String `tfsdk:"sig_alg_name"` + Subject types.String `tfsdk:"subject"` + SubjectAlternativeNames types.List `tfsdk:"subject_alternative_names"` + ValidFrom types.String `tfsdk:"valid_from"` + Version types.Int64 `tfsdk:"version"` +} + +func (r *ApigeeKeystoresAliasesKeyCertFileResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_apigee_keystores_aliases_key_cert_file" +} + +func (r *ApigeeKeystoresAliasesKeyCertFileResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { + if req.ProviderData == nil { + return + } + p, ok := req.ProviderData.(*transport_tpg.Config) + if !ok { + resp.Diagnostics.AddError( + "Unexpected Resource Configure Type", + fmt.Sprintf("Expected *transport_tpg.Config, got: %T. Please report this issue to the provider developers.", req.ProviderData), + ) + return + } + r.providerConfig = p +} + +func (r *ApigeeKeystoresAliasesKeyCertFileResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = schema.Schema{ + Description: "An alias from a key/cert file.", + Attributes: map[string]schema.Attribute{ + "org_id": schema.StringAttribute{ + Description: "Organization ID associated with the alias.", + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, + "environment": schema.StringAttribute{ + Description: "Environment associated with the alias.", + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, + "keystore": schema.StringAttribute{ + Description: "Keystore Name.", + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, + "alias": schema.StringAttribute{ + Description: "Alias Name.", + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, + "cert": schema.StringAttribute{ + Description: "Cert content.", + Required: true, + }, + "key": schema.StringAttribute{ + Description: "Private Key content, omit if uploading to truststore.", + Optional: true, + Sensitive: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, + "password": schema.StringAttribute{ + Description: "Password for the Private Key if it's encrypted.", + Optional: true, + Sensitive: true, + }, + "type": schema.StringAttribute{ + Description: "Optional. Type of Alias.", + Computed: true, + }, + "id": schema.StringAttribute{ + Description: "Project identifier", + Computed: true, + }, + "certs_info": schema.ListAttribute{ + Description: "Chain of certificates under this alias.", + Computed: true, + ElementType: types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "basic_constraints": types.StringType, + "expiry_date": types.StringType, + "is_valid": types.StringType, + "issuer": types.StringType, + "public_key": types.StringType, + "serial_number": types.StringType, + "sig_alg_name": types.StringType, + "subject": types.StringType, + "subject_alternative_names": types.ListType{ElemType: types.StringType}, + "valid_from": types.StringType, + "version": types.Int64Type, + }, + }, + }, + }, + Blocks: map[string]schema.Block{ + "timeouts": timeouts.Block(ctx, timeouts.Opts{ + Create: true, + Read: true, + Update: true, + Delete: true, + }), + }, + } +} + +func (r *ApigeeKeystoresAliasesKeyCertFileResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + var plan ApigeeKeystoresAliasesKeyCertFileResourceModel + var metaData *fwmodels.ProviderMetaModel + + resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + + createTimeout, diags := plan.Timeouts.Create(ctx, 20*time.Minute) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + buf := new(bytes.Buffer) + bw := multipart.NewWriter(buf) + if !plan.Key.IsNull() && !plan.Key.IsUnknown() { + keyFilePartWriter, _ := bw.CreateFormField("keyFile") + keyFilePartWriter.Write([]byte(plan.Key.ValueString())) + } + if !plan.Password.IsNull() && !plan.Password.IsUnknown() { + keyFilePartWriter, _ := bw.CreateFormField("password") + keyFilePartWriter.Write([]byte(plan.Password.ValueString())) + } + certFilePartWriter, _ := bw.CreateFormField("certFile") + certFilePartWriter.Write([]byte(plan.Cert.ValueString())) + bw.Close() + + billingProject := types.StringValue(r.providerConfig.BillingProject) + + var schemaDefaultVals fwtransport.DefaultVars + + userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, r.providerConfig.UserAgent) + url := fwtransport.ReplaceVars(ctx, req, &resp.Diagnostics, schemaDefaultVals, r.providerConfig, "{{ApigeeBasePath}}organizations/{{org_id}}/environments/{{environment}}/keystores/{{keystore}}/aliases?format=keycertfile&alias={{alias}}&ignoreExpiryValidation=true") + if resp.Diagnostics.HasError() { + return + } + + res, err := sendRequestRawBodyWithTimeout(r.providerConfig, "POST", billingProject.ValueString(), url, userAgent, buf, bw.FormDataContentType(), createTimeout) + if err != nil { + resp.Diagnostics.AddError("Error, failure to create key cert file", err.Error()) + return + } + + tflog.Trace(ctx, "Successfully created Apigee Keystore Alias", map[string]interface{}{"response": res}) + + id := fmt.Sprintf("organizations/%s/environments/%s/keystores/%s/aliases/%s", + plan.OrgId.ValueString(), + plan.Environment.ValueString(), + plan.Keystore.ValueString(), + plan.Alias.ValueString(), + ) + plan.Id = types.StringValue(id) + + r.refresh(ctx, req, &plan, &resp.State, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...) +} + +func (r *ApigeeKeystoresAliasesKeyCertFileResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + var state ApigeeKeystoresAliasesKeyCertFileResourceModel + + resp.Diagnostics.Append(req.State.Get(ctx, &state)...) + if resp.Diagnostics.HasError() { + return + } + + r.refresh(ctx, req, &state, &resp.State, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(resp.State.Set(ctx, &state)...) +} + +func (r *ApigeeKeystoresAliasesKeyCertFileResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + var plan ApigeeKeystoresAliasesKeyCertFileResourceModel + var state ApigeeKeystoresAliasesKeyCertFileResourceModel + var metaData *fwmodels.ProviderMetaModel + + resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) + resp.Diagnostics.Append(req.State.Get(ctx, &state)...) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + updateTimeout, diags := plan.Timeouts.Update(ctx, 20*time.Minute) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + buf := new(bytes.Buffer) + bw := multipart.NewWriter(buf) + certFilePartWriter, err := bw.CreateFormField("certFile") + if err != nil { + resp.Diagnostics.AddError("Unable to create form field for certificate", err.Error()) + return + } + certFilePartWriter.Write([]byte(plan.Cert.ValueString())) + bw.Close() + + billingProject := types.StringValue(r.providerConfig.BillingProject) + userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, r.providerConfig.UserAgent) + + var schemaDefaultVals fwtransport.DefaultVars + + url := fwtransport.ReplaceVars(ctx, req, &resp.Diagnostics, schemaDefaultVals, r.providerConfig, "{{ApigeeBasePath}}organizations/{{org_id}}/environments/{{environment}}/keystores/{{keystore}}/aliases/{{alias}}?ignoreExpiryValidation=true") + if resp.Diagnostics.HasError() { + return + } + + tflog.Trace(ctx, "Updating Apigee Keystore Alias", map[string]interface{}{"url": url}) + res, err := sendRequestRawBodyWithTimeout(r.providerConfig, "PUT", billingProject.ValueString(), url, userAgent, buf, bw.FormDataContentType(), updateTimeout) + + if err != nil { + resp.Diagnostics.AddError("Error, failure to update key cert file", err.Error()) + return + } + + tflog.Trace(ctx, "Successfully sent update request for Apigee Keystore Alias", map[string]interface{}{"response": res}) + + r.refresh(ctx, req, &plan, &resp.State, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...) +} + +func (r *ApigeeKeystoresAliasesKeyCertFileResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + var data ApigeeKeystoresAliasesKeyCertFileResourceModel + var metaData *fwmodels.ProviderMetaModel + + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + + deleteTimeout, diags := data.Timeouts.Delete(ctx, 20*time.Minute) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, r.providerConfig.UserAgent) + + var schemaDefaultVals fwtransport.DefaultVars + url := fwtransport.ReplaceVars(ctx, req, &resp.Diagnostics, schemaDefaultVals, r.providerConfig, "{{ApigeeBasePath}}organizations/{{org_id}}/environments/{{environment}}/keystores/{{keystore}}/aliases/{{alias}}") + if resp.Diagnostics.HasError() { + return + } + + tflog.Trace(ctx, "Deleting Apigee Keystore Alias", map[string]interface{}{"url": url}) + + _ = fwtransport.SendRequest(fwtransport.SendRequestOptions{ + Config: r.providerConfig, + Method: "DELETE", + Project: data.OrgId.ValueString(), + RawURL: url, + UserAgent: userAgent, + Timeout: deleteTimeout, + }, &resp.Diagnostics) + + tflog.Trace(ctx, "Successfully deleted Apigee Keystore Alias.") +} + +func (r *ApigeeKeystoresAliasesKeyCertFileResource) refresh(ctx context.Context, req interface{}, data *ApigeeKeystoresAliasesKeyCertFileResourceModel, state *tfsdk.State, diags *diag.Diagnostics) { + var metaData *fwmodels.ProviderMetaModel + + userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, r.providerConfig.UserAgent) + + var schemaDefaultVals fwtransport.DefaultVars + url := fwtransport.ReplaceVars(ctx, req, diags, schemaDefaultVals, r.providerConfig, "{{ApigeeBasePath}}organizations/{{org_id}}/environments/{{environment}}/keystores/{{keystore}}/aliases/{{alias}}") + if diags.HasError() { + return + } + + readTimeout, timeoutDiags := data.Timeouts.Read(ctx, 20*time.Minute) + diags.Append(timeoutDiags...) + if diags.HasError() { + return + } + + tflog.Trace(ctx, "Refreshing Apigee Keystore Alias", map[string]interface{}{"url": url}) + + res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ + Config: r.providerConfig, + Method: "GET", + Project: data.OrgId.ValueString(), + RawURL: url, + UserAgent: userAgent, + Timeout: readTimeout, + }, diags) + + if diags.HasError() { + return + } + + tflog.Trace(ctx, "Successfully refreshed Apigee Keystore Alias", map[string]interface{}{"response": res}) + + id := fmt.Sprintf("organizations/%s/environments/%s/keystores/%s/aliases/%s", + data.OrgId.ValueString(), + data.Environment.ValueString(), + data.Keystore.ValueString(), + data.Alias.ValueString(), + ) + data.Id = types.StringValue(id) + + data.Type = types.StringValue(res["type"].(string)) + + flattenedCertsInfo, certDiags := flattenCertsInfo(res["certsInfo"]) + diags.Append(certDiags...) + if diags.HasError() { + return + } + data.CertsInfo = flattenedCertsInfo +} + +var certInfoObjectType = types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "basic_constraints": types.StringType, + "expiry_date": types.StringType, + "is_valid": types.StringType, + "issuer": types.StringType, + "public_key": types.StringType, + "serial_number": types.StringType, + "sig_alg_name": types.StringType, + "subject": types.StringType, + "subject_alternative_names": types.ListType{ElemType: types.StringType}, + "valid_from": types.StringType, + "version": types.Int64Type, + }, +} + +func flattenCertsInfo(v interface{}) (types.List, diag.Diagnostics) { + if v == nil { + return types.ListNull(certInfoObjectType), nil + } + + var diags diag.Diagnostics + + certsInfoMap, ok := v.(map[string]interface{}) + if !ok { + diags.AddError("Invalid Type", "Cannot flatten certs_info: input is not a map.") + return types.ListNull(certInfoObjectType), diags + } + if len(certsInfoMap) == 0 { + return types.ListNull(certInfoObjectType), nil + } + + certInfoListRaw, ok := certsInfoMap["certInfo"].([]interface{}) + if !ok || len(certInfoListRaw) == 0 { + return types.ListNull(certInfoObjectType), nil + } + + var certInfoDetails []CertInfoDetailModel + for _, rawCertInfo := range certInfoListRaw { + certInfo, ok := rawCertInfo.(map[string]interface{}) + if !ok || len(certInfo) == 0 { + continue + } + getStringValue := func(key string) types.String { + if val, ok := certInfo[key].(string); ok { + return types.StringValue(val) + } + return types.StringNull() + } + var sansValue types.List + if sansRaw, ok := certInfo["subjectAlternativeNames"].([]interface{}); ok { + sans := make([]string, 0, len(sansRaw)) + for _, san := range sansRaw { + if s, ok := san.(string); ok { + sans = append(sans, s) + } + } + var listDiags diag.Diagnostics + sansValue, listDiags = types.ListValueFrom(context.Background(), types.StringType, sans) + diags.Append(listDiags...) + } else { + sansValue = types.ListNull(types.StringType) + } + var versionValue types.Int64 + if versionRaw, ok := certInfo["version"]; ok { + switch v := versionRaw.(type) { + case float64: + versionValue = types.Int64Value(int64(v)) + case string: + versionValue = types.Int64Null() + default: + versionValue = types.Int64Null() + } + } else { + versionValue = types.Int64Null() + } + detail := CertInfoDetailModel{ + BasicConstraints: getStringValue("basicConstraints"), + ExpiryDate: getStringValue("expiryDate"), + IsValid: getStringValue("isValid"), + Issuer: getStringValue("issuer"), + PublicKey: getStringValue("publicKey"), + SerialNumber: getStringValue("serialNumber"), + SigAlgName: getStringValue("sigAlgName"), + Subject: getStringValue("subject"), + ValidFrom: getStringValue("validFrom"), + SubjectAlternativeNames: sansValue, + Version: versionValue, + } + certInfoDetails = append(certInfoDetails, detail) + } + + if diags.HasError() { + return types.ListNull(certInfoObjectType), diags + } + + flattenedList, listDiags := types.ListValueFrom(context.Background(), certInfoObjectType, certInfoDetails) + diags.Append(listDiags...) + + return flattenedList, diags +} + +func (r *ApigeeKeystoresAliasesKeyCertFileResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { + idRegexes := []string{ + "organizations/(?P[^/]+)/environments/(?P[^/]+)/keystores/(?P[^/]+)/aliases/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + } + + var resourceSchemaResp resource.SchemaResponse + r.Schema(ctx, resource.SchemaRequest{}, &resourceSchemaResp) + if resourceSchemaResp.Diagnostics.HasError() { + resp.Diagnostics.Append(resourceSchemaResp.Diagnostics...) + return + } + + parsedAttributes, diags := fwresource.ParseImportId(ctx, req, resourceSchemaResp.Schema, r.providerConfig, idRegexes) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + for name, value := range parsedAttributes { + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root(name), value)...) + } +} diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_key_cert_file.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_key_cert_file.go deleted file mode 100644 index 03d842bfc305..000000000000 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_key_cert_file.go +++ /dev/null @@ -1,692 +0,0 @@ -package apigee - -import ( - "bytes" - "context" - "fmt" - "log" - "mime/multipart" - "reflect" - "time" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -func ResourceApigeeKeystoresAliasesKeyCertFile() *schema.Resource { - return &schema.Resource{ - Create: resourceApigeeKeystoresAliasesKeyCertFileCreate, - Read: resourceApigeeKeystoresAliasesKeyCertFileRead, - Update: resourceApigeeKeystoresAliasesKeyCertFileUpdate, - Delete: resourceApigeeKeystoresAliasesKeyCertFileDelete, - - Importer: &schema.ResourceImporter{ - State: resourceApigeeKeystoresAliasesKeyCertFileImport, - }, - - CustomizeDiff: customdiff.All( - /* - If cert is changed then an update is expected, so we tell Terraform core to expect update on certs_info - */ - - customdiff.ComputedIf("certs_info", func(_ context.Context, diff *schema.ResourceDiff, v interface{}) bool { - return diff.HasChange("cert") - }), - ), - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(20 * time.Minute), - Read: schema.DefaultTimeout(20 * time.Minute), - Update: schema.DefaultTimeout(20 * time.Minute), - Delete: schema.DefaultTimeout(20 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "alias": { - Type: schema.TypeString, - ForceNew: true, - Required: true, - Description: `Alias Name`, - }, - "cert": { - Type: schema.TypeString, - Required: true, - Description: `Cert content`, - }, - "environment": { - Type: schema.TypeString, - ForceNew: true, - Required: true, - Description: `Environment associated with the alias`, - }, - "keystore": { - Type: schema.TypeString, - ForceNew: true, - Required: true, - Description: `Keystore Name`, - }, - "org_id": { - Type: schema.TypeString, - ForceNew: true, - Required: true, - Description: `Organization ID associated with the alias`, - }, - "certs_info": { - Type: schema.TypeList, - Optional: true, - Computed: true, - Description: `Chain of certificates under this alias.`, - MaxItems: 1, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "cert_info": { - Type: schema.TypeList, - Optional: true, - Computed: true, - Description: `List of all properties in the object.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "basic_constraints": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: `X.509 basic constraints extension.`, - }, - "expiry_date": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: `X.509 notAfter validity period in milliseconds since epoch.`, - }, - "is_valid": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: `Flag that specifies whether the certificate is valid. -Flag is set to Yes if the certificate is valid, No if expired, or Not yet if not yet valid.`, - }, - "issuer": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: `X.509 issuer.`, - }, - "public_key": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: `Public key component of the X.509 subject public key info.`, - }, - "serial_number": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: `X.509 serial number.`, - }, - "sig_alg_name": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: `X.509 signatureAlgorithm.`, - }, - "subject": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: `X.509 subject.`, - }, - "subject_alternative_names": { - Type: schema.TypeList, - Optional: true, - Computed: true, - Description: `X.509 subject alternative names (SANs) extension.`, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - }, - "valid_from": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: `X.509 notBefore validity period in milliseconds since epoch.`, - }, - "version": { - Type: schema.TypeInt, - Optional: true, - Computed: true, - Description: `X.509 version.`, - }, - }, - }, - }, - }, - }, - }, - "key": { - Type: schema.TypeString, - ForceNew: true, - Optional: true, - Sensitive: true, - Description: `Private Key content, omit if uploading to truststore`, - }, - "password": { - Type: schema.TypeString, - Optional: true, - Sensitive: true, - Description: `Password for the Private Key if it's encrypted`, - }, - "type": { - Type: schema.TypeString, - Computed: true, - Description: `Optional.Type of Alias`, - }, - }, - UseJSONNumber: true, - } -} - -func resourceApigeeKeystoresAliasesKeyCertFileCreate(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return err - } - - buf := new(bytes.Buffer) - bw := multipart.NewWriter(buf) - if key, ok := d.GetOkExists("key"); ok { - keyFilePartWriter, _ := bw.CreateFormField("keyFile") - keyFilePartWriter.Write([]byte(key.(string))) - } - if password, ok := d.GetOkExists("password"); ok { - keyFilePartWriter, _ := bw.CreateFormField("password") - keyFilePartWriter.Write([]byte(password.(string))) - } - certFilePartWriter, _ := bw.CreateFormField("certFile") - certFilePartWriter.Write([]byte(d.Get("cert").(string))) - bw.Close() - - url, err := tpgresource.ReplaceVars(d, config, "{{ApigeeBasePath}}organizations/{{org_id}}/environments/{{environment}}/keystores/{{keystore}}/aliases?format=keycertfile&alias={{alias}}&ignoreExpiryValidation=true") - if err != nil { - return err - } - - log.Printf("[DEBUG] Creating new KeystoresAliasesKeyCertFile") - billingProject := "" - - // err == nil indicates that the billing_project value was found - if bp, err := tpgresource.GetBillingProject(d, config); err == nil { - billingProject = bp - } - - res, err := sendRequestRawBodyWithTimeout(config, "POST", billingProject, url, userAgent, buf, "multipart/form-data; boundary="+bw.Boundary(), d.Timeout(schema.TimeoutCreate)) - if err != nil { - return fmt.Errorf("Error creating KeystoresAliasesKeyCertFile: %s", err) - } - - // Store the ID now - id, err := tpgresource.ReplaceVars(d, config, "organizations/{{org_id}}/environments/{{environment}}/keystores/{{keystore}}/aliases/{{alias}}") - if err != nil { - return fmt.Errorf("Error constructing id: %s", err) - } - d.SetId(id) - - log.Printf("[DEBUG] Finished creating KeystoresAliasesKeyCertFile %q: %#v", d.Id(), res) - - return resourceApigeeKeystoresAliasesKeyCertFileRead(d, meta) -} - -func resourceApigeeKeystoresAliasesKeyCertFileRead(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return err - } - - url, err := tpgresource.ReplaceVars(d, config, "{{ApigeeBasePath}}organizations/{{org_id}}/environments/{{environment}}/keystores/{{keystore}}/aliases/{{alias}}") - if err != nil { - return err - } - - billingProject := "" - - // err == nil indicates that the billing_project value was found - if bp, err := tpgresource.GetBillingProject(d, config); err == nil { - billingProject = bp - } - - res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - Project: billingProject, - RawURL: url, - UserAgent: userAgent, - }) - if err != nil { - return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("ApigeeKeystoresAliasesKeyCertFile %q", d.Id())) - } - - if err := d.Set("alias", flattenApigeeKeystoresAliasesKeyCertFileAlias(res["alias"], d, config)); err != nil { - return fmt.Errorf("Error reading KeystoresAliasesKeyCertFile: %s", err) - } - - if err := d.Set("certs_info", flattenApigeeKeystoresAliasesKeyCertFileCertsInfo(res["certsInfo"], d, config)); err != nil { - return fmt.Errorf("Error reading KeystoresAliasesKeyCertFile: %s", err) - } - if err := d.Set("type", flattenApigeeKeystoresAliasesKeyCertFileType(res["type"], d, config)); err != nil { - return fmt.Errorf("Error reading KeystoresAliasesKeyCertFile: %s", err) - } - - return nil -} - -func resourceApigeeKeystoresAliasesKeyCertFileUpdate(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return err - } - - billingProject := "" - - url, err := tpgresource.ReplaceVars(d, config, "{{ApigeeBasePath}}organizations/{{org_id}}/environments/{{environment}}/keystores/{{keystore}}/aliases/{{alias}}?ignoreExpiryValidation=true") - if err != nil { - return err - } - - log.Printf("[DEBUG] Updating KeystoresAliasesKeyCertFile %q", d.Id()) - - // err == nil indicates that the billing_project value was found - if bp, err := tpgresource.GetBillingProject(d, config); err == nil { - billingProject = bp - } - - buf := new(bytes.Buffer) - bw := multipart.NewWriter(buf) - certFilePartWriter, _ := bw.CreateFormField("certFile") - certFilePartWriter.Write([]byte(d.Get("cert").(string))) - bw.Close() - - res, err := sendRequestRawBodyWithTimeout(config, "PUT", billingProject, url, userAgent, buf, "multipart/form-data; boundary="+bw.Boundary(), d.Timeout(schema.TimeoutCreate)) - - if err != nil { - return fmt.Errorf("Error updating KeystoresAliasesKeyCertFile %q: %s", d.Id(), err) - } else { - log.Printf("[DEBUG] Finished updating KeystoresAliasesKeyCertFile %q: %#v", d.Id(), res) - } - - return resourceApigeeKeystoresAliasesKeyCertFileRead(d, meta) -} - -func resourceApigeeKeystoresAliasesKeyCertFileDelete(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return err - } - - billingProject := "" - - url, err := tpgresource.ReplaceVars(d, config, "{{ApigeeBasePath}}organizations/{{org_id}}/environments/{{environment}}/keystores/{{keystore}}/aliases/{{alias}}") - if err != nil { - return err - } - - var obj map[string]interface{} - log.Printf("[DEBUG] Deleting KeystoresAliasesKeyCertFile %q", d.Id()) - - // err == nil indicates that the billing_project value was found - if bp, err := tpgresource.GetBillingProject(d, config); err == nil { - billingProject = bp - } - - res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "DELETE", - Project: billingProject, - RawURL: url, - UserAgent: userAgent, - Body: obj, - Timeout: d.Timeout(schema.TimeoutDelete), - }) - if err != nil { - return transport_tpg.HandleNotFoundError(err, d, "KeystoresAliasesKeyCertFile") - } - - log.Printf("[DEBUG] Finished deleting KeystoresAliasesKeyCertFile %q: %#v", d.Id(), res) - return nil -} - -func resourceApigeeKeystoresAliasesKeyCertFileImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { - config := meta.(*transport_tpg.Config) - if err := tpgresource.ParseImportId([]string{ - "^organizations/(?P[^/]+)/environments/(?P[^/]+)/keystores/(?P[^/]+)/aliases/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", - }, d, config); err != nil { - return nil, err - } - - // Replace import id for the resource id - id, err := tpgresource.ReplaceVars(d, config, "organizations/{{org_id}}/environments/{{environment}}/keystores/{{keystore}}/aliases/{{alias}}") - if err != nil { - return nil, fmt.Errorf("Error constructing id: %s", err) - } - d.SetId(id) - - return []*schema.ResourceData{d}, nil -} - -func flattenApigeeKeystoresAliasesKeyCertFileOrgId(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFileEnvironment(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFileKeystore(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFileAlias(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFileKey(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFilePassword(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFileCert(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFileCertsInfo(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - if v == nil { - return nil - } - original := v.(map[string]interface{}) - if len(original) == 0 { - return nil - } - transformed := make(map[string]interface{}) - transformed["cert_info"] = - flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfo(original["certInfo"], d, config) - return []interface{}{transformed} -} -func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfo(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - if v == nil { - return v - } - l := v.([]interface{}) - transformed := make([]interface{}, 0, len(l)) - for _, raw := range l { - original := raw.(map[string]interface{}) - if len(original) < 1 { - // Do not include empty json objects coming back from the api - continue - } - transformed = append(transformed, map[string]interface{}{ - "version": flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoVersion(original["version"], d, config), - "subject": flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSubject(original["subject"], d, config), - "issuer": flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoIssuer(original["issuer"], d, config), - "expiry_date": flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoExpiryDate(original["expiryDate"], d, config), - "valid_from": flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoValidFrom(original["validFrom"], d, config), - "is_valid": flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoIsValid(original["isValid"], d, config), - "subject_alternative_names": flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSubjectAlternativeNames(original["subjectAlternativeNames"], d, config), - "sig_alg_name": flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSigAlgName(original["sigAlgName"], d, config), - "public_key": flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoPublicKey(original["publicKey"], d, config), - "basic_constraints": flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoBasicConstraints(original["basicConstraints"], d, config), - "serial_number": flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSerialNumber(original["serialNumber"], d, config), - }) - } - return transformed -} -func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoVersion(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - // Handles the string fixed64 format - if strVal, ok := v.(string); ok { - if intVal, err := tpgresource.StringToFixed64(strVal); err == nil { - return intVal - } - } - - // number values are represented as float64 - if floatVal, ok := v.(float64); ok { - intVal := int(floatVal) - return intVal - } - - return v // let terraform core handle it otherwise -} - -func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSubject(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoIssuer(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoExpiryDate(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoValidFrom(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoIsValid(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSubjectAlternativeNames(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSigAlgName(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoPublicKey(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoBasicConstraints(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSerialNumber(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFileType(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func expandApigeeKeystoresAliasesKeyCertFileOrgId(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileEnvironment(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileKeystore(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileAlias(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileKey(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFilePassword(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileCert(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileCertsInfo(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - l := v.([]interface{}) - if len(l) == 0 || l[0] == nil { - return nil, nil - } - raw := l[0] - original := raw.(map[string]interface{}) - transformed := make(map[string]interface{}) - - transformedCertInfo, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfo(original["cert_info"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedCertInfo); val.IsValid() && !tpgresource.IsEmptyValue(val) { - transformed["certInfo"] = transformedCertInfo - } - - return transformed, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfo(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - l := v.([]interface{}) - req := make([]interface{}, 0, len(l)) - for _, raw := range l { - if raw == nil { - continue - } - original := raw.(map[string]interface{}) - transformed := make(map[string]interface{}) - - transformedVersion, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoVersion(original["version"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedVersion); val.IsValid() && !tpgresource.IsEmptyValue(val) { - transformed["version"] = transformedVersion - } - - transformedSubject, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSubject(original["subject"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedSubject); val.IsValid() && !tpgresource.IsEmptyValue(val) { - transformed["subject"] = transformedSubject - } - - transformedIssuer, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoIssuer(original["issuer"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedIssuer); val.IsValid() && !tpgresource.IsEmptyValue(val) { - transformed["issuer"] = transformedIssuer - } - - transformedExpiryDate, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoExpiryDate(original["expiry_date"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedExpiryDate); val.IsValid() && !tpgresource.IsEmptyValue(val) { - transformed["expiryDate"] = transformedExpiryDate - } - - transformedValidFrom, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoValidFrom(original["valid_from"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedValidFrom); val.IsValid() && !tpgresource.IsEmptyValue(val) { - transformed["validFrom"] = transformedValidFrom - } - - transformedIsValid, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoIsValid(original["is_valid"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedIsValid); val.IsValid() && !tpgresource.IsEmptyValue(val) { - transformed["isValid"] = transformedIsValid - } - - transformedSubjectAlternativeNames, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSubjectAlternativeNames(original["subject_alternative_names"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedSubjectAlternativeNames); val.IsValid() && !tpgresource.IsEmptyValue(val) { - transformed["subjectAlternativeNames"] = transformedSubjectAlternativeNames - } - - transformedSigAlgName, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSigAlgName(original["sig_alg_name"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedSigAlgName); val.IsValid() && !tpgresource.IsEmptyValue(val) { - transformed["sigAlgName"] = transformedSigAlgName - } - - transformedPublicKey, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoPublicKey(original["public_key"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedPublicKey); val.IsValid() && !tpgresource.IsEmptyValue(val) { - transformed["publicKey"] = transformedPublicKey - } - - transformedBasicConstraints, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoBasicConstraints(original["basic_constraints"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedBasicConstraints); val.IsValid() && !tpgresource.IsEmptyValue(val) { - transformed["basicConstraints"] = transformedBasicConstraints - } - - transformedSerialNumber, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSerialNumber(original["serial_number"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedSerialNumber); val.IsValid() && !tpgresource.IsEmptyValue(val) { - transformed["serialNumber"] = transformedSerialNumber - } - - req = append(req, transformed) - } - return req, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoVersion(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSubject(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoIssuer(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoExpiryDate(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoValidFrom(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoIsValid(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSubjectAlternativeNames(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSigAlgName(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoPublicKey(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoBasicConstraints(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSerialNumber(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} diff --git a/mmv1/third_party/terraform/services/compute/fw_data_source_google_compute_network.go.tmpl b/mmv1/third_party/terraform/services/compute/fw_data_source_google_compute_network.go.tmpl new file mode 100644 index 000000000000..b08deb9e3c65 --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/fw_data_source_google_compute_network.go.tmpl @@ -0,0 +1,187 @@ +package compute + +import ( + "context" + "fmt" + "strconv" + + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" + {{ if eq $.TargetVersionName `ga` }} + "google.golang.org/api/compute/v1" + {{- else }} + compute "google.golang.org/api/compute/v0.beta" + {{- end }} + + "github.com/hashicorp/terraform-provider-google/google/fwmodels" + "github.com/hashicorp/terraform-provider-google/google/fwresource" + "github.com/hashicorp/terraform-provider-google/google/fwtransport" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +// Ensure the implementation satisfies the expected interfaces. +var ( + _ datasource.DataSource = &ComputeNetworkFWDataSource{} + _ datasource.DataSourceWithConfigure = &ComputeNetworkFWDataSource{} +) + +// NewComputeNetworkFWDataSource is a helper function to simplify the provider implementation. +func NewComputeNetworkFWDataSource() datasource.DataSource { + return &ComputeNetworkFWDataSource{} +} + +// ComputeNetworkFWDataSource is the data source implementation. +type ComputeNetworkFWDataSource struct { + client *compute.Service + providerConfig *transport_tpg.Config +} + +type ComputeNetworkModel struct { + Id types.String `tfsdk:"id"` + Project types.String `tfsdk:"project"` + Name types.String `tfsdk:"name"` + Description types.String `tfsdk:"description"` + NetworkId types.Int64 `tfsdk:"network_id"` + NumericId types.String `tfsdk:"numeric_id"` + GatewayIpv4 types.String `tfsdk:"gateway_ipv4"` + InternalIpv6Range types.String `tfsdk:"internal_ipv6_range"` + SelfLink types.String `tfsdk:"self_link"` + // NetworkProfile types.String `tfsdk:"network_profile"` + // SubnetworksSelfLinks types.List `tfsdk:"subnetworks_self_links"` +} + +// Metadata returns the data source type name. +func (d *ComputeNetworkFWDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_fw_compute_network" +} + +func (d *ComputeNetworkFWDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { + // Prevent panic if the provider has not been configured. + if req.ProviderData == nil { + return + } + + p, ok := req.ProviderData.(*transport_tpg.Config) + if !ok { + resp.Diagnostics.AddError( + "Unexpected Data Source Configure Type", + fmt.Sprintf("Expected *transport_tpg.Config, got: %T. Please report this issue to the provider developers.", req.ProviderData), + ) + return + } + + d.client = p.NewComputeClient(p.UserAgent) + if resp.Diagnostics.HasError() { + return + } + d.providerConfig = p +} + +// Schema defines the schema for the data source. +func (d *ComputeNetworkFWDataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = schema.Schema{ + MarkdownDescription: "A data source to get network details.", + + Attributes: map[string]schema.Attribute{ + "project": schema.StringAttribute{ + Description: `The project name.`, + MarkdownDescription: `The project name.`, + Optional: true, + }, + "name": schema.StringAttribute{ + Description: `The name of the Compute network.`, + MarkdownDescription: `The name of the Compute network.`, + Required: true, + }, + "description": schema.StringAttribute{ + Description: `The description of the network.`, + MarkdownDescription: `The description of the network.`, + Computed: true, + }, + "network_id": schema.Int64Attribute{ + Description: `The network ID.`, + MarkdownDescription: `The network ID.`, + Computed: true, + }, + "numeric_id": schema.StringAttribute{ + Description: `The numeric ID of the network. Deprecated in favor of network_id.`, + MarkdownDescription: `The numeric ID of the network. Deprecated in favor of network_id.`, + Computed: true, + DeprecationMessage: "`numeric_id` is deprecated and will be removed in a future major release. Use `network_id` instead.", + }, + "gateway_ipv4": schema.StringAttribute{ + Description: `The gateway address for default routing out of the network.`, + MarkdownDescription: `The gateway address for default routing out of the network.`, + Computed: true, + }, + "internal_ipv6_range": schema.StringAttribute{ + Description: `The internal ipv6 address range of the network.`, + MarkdownDescription: `The internal ipv6 address range of the network.`, + Computed: true, + }, + "self_link": schema.StringAttribute{ + Description: `The network self link.`, + MarkdownDescription: `The network self link.`, + Computed: true, + }, + // This is included for backwards compatibility with the original, SDK-implemented data source. + "id": schema.StringAttribute{ + Description: "Project identifier", + MarkdownDescription: "Project identifier", + Computed: true, + }, + }, + } +} + +// Read refreshes the Terraform state with the latest data. +func (d *ComputeNetworkFWDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + var data ComputeNetworkModel + var metaData *fwmodels.ProviderMetaModel + + // Read Provider meta into the meta model + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + + // Read Terraform configuration data into the model + resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + // Use provider_meta to set User-Agent + d.client.UserAgent = fwtransport.GenerateFrameworkUserAgentString(metaData, d.client.UserAgent) + + project := fwresource.GetProjectFramework(data.Project, types.StringValue(d.providerConfig.Project), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + // GET Request + clientResp, err := d.client.Networks.Get(project.ValueString(), data.Name.ValueString()).Do() + if err != nil { + fwtransport.HandleNotFoundError(ctx, err, &resp.State, fmt.Sprintf("dataSourceComputeNetwork %q", data.Name.ValueString()), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + } + + tflog.Trace(ctx, "read compute network data source") + + // Put data in model + id := fmt.Sprintf("projects/%s/global/networks/%s", project.ValueString(), clientResp.Name) + data.Id = types.StringValue(id) + data.Description = types.StringValue(clientResp.Description) + data.NetworkId = types.Int64Value(int64(clientResp.Id)) + data.NumericId = types.StringValue(strconv.Itoa(int(clientResp.Id))) + data.GatewayIpv4 = types.StringValue(clientResp.GatewayIPv4) + data.InternalIpv6Range = types.StringValue(clientResp.InternalIpv6Range) + data.SelfLink = types.StringValue(clientResp.SelfLink) + + // Save data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} diff --git a/mmv1/third_party/terraform/services/compute/fw_data_source_google_compute_network_test.go b/mmv1/third_party/terraform/services/compute/fw_data_source_google_compute_network_test.go new file mode 100644 index 000000000000..ae82326c793d --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/fw_data_source_google_compute_network_test.go @@ -0,0 +1,86 @@ +package compute_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" +) + +func TestAccDataSourceGoogleFWNetwork(t *testing.T) { + t.Parallel() + + networkName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceGoogleNetworkFWConfig(networkName), + Check: resource.ComposeTestCheckFunc( + testAccDataSourceGoogleFWNetworkCheck("data.google_fw_compute_network.my_network", "google_compute_network.foobar"), + ), + }, + }, + }) +} + +func testAccDataSourceGoogleFWNetworkCheck(data_source_name string, resource_name string) resource.TestCheckFunc { + return func(s *terraform.State) error { + ds, ok := s.RootModule().Resources[data_source_name] + if !ok { + return fmt.Errorf("root module has no resource called %s", data_source_name) + } + + rs, ok := s.RootModule().Resources[resource_name] + if !ok { + return fmt.Errorf("can't find %s in state", resource_name) + } + + ds_attr := ds.Primary.Attributes + rs_attr := rs.Primary.Attributes + network_attrs_to_test := []string{ + "id", + "name", + "network_id", + "numeric_id", + "description", + "internal_ipv6_range", + } + + for _, attr_to_check := range network_attrs_to_test { + if ds_attr[attr_to_check] != rs_attr[attr_to_check] { + return fmt.Errorf( + "%s is %s; want %s", + attr_to_check, + ds_attr[attr_to_check], + rs_attr[attr_to_check], + ) + } + } + + if !tpgresource.CompareSelfLinkOrResourceName("", ds_attr["self_link"], rs_attr["self_link"], nil) && ds_attr["self_link"] != rs_attr["self_link"] { + return fmt.Errorf("self link does not match: %s vs %s", ds_attr["self_link"], rs_attr["self_link"]) + } + + return nil + } +} + +func testAccDataSourceGoogleNetworkFWConfig(name string) string { + return fmt.Sprintf(` +resource "google_compute_network" "foobar" { + name = "%s" + description = "my-description" + enable_ula_internal_ipv6 = true + auto_create_subnetworks = false +} + +data "google_fw_compute_network" "my_network" { + name = google_compute_network.foobar.name +} +`, name) +} diff --git a/mmv1/third_party/terraform/services/compute/image.go b/mmv1/third_party/terraform/services/compute/image.go index c51547f83f35..7b020823db98 100644 --- a/mmv1/third_party/terraform/services/compute/image.go +++ b/mmv1/third_party/terraform/services/compute/image.go @@ -108,7 +108,6 @@ func ResolveImage(c *transport_tpg.Config, project, name, userAgent string) (str break } } - switch { case resolveImageLink.MatchString(name): // https://www.googleapis.com/compute/v1/projects/xyz/global/images/xyz return name, nil diff --git a/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job_test.go.tmpl b/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job_test.go.tmpl index a0659cc90aed..8210ff4ae2ec 100644 --- a/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job_test.go.tmpl +++ b/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job_test.go.tmpl @@ -2157,4 +2157,4 @@ resource "google_dataflow_flex_template_job" "flex_job" { `, context) } -{{- end }} +{{- end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/dns/resource_dns_managed_zone_test.go.tmpl b/mmv1/third_party/terraform/services/dns/resource_dns_managed_zone_test.go.tmpl index ad473a06dc1b..881f888f3188 100644 --- a/mmv1/third_party/terraform/services/dns/resource_dns_managed_zone_test.go.tmpl +++ b/mmv1/third_party/terraform/services/dns/resource_dns_managed_zone_test.go.tmpl @@ -828,4 +828,4 @@ resource "google_compute_network" "network" { } `, context) } -{{- end }} +{{- end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_android_app_config.go.tmpl b/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_android_app_config.go.tmpl index 0755b2f3740a..6b735ae83e81 100644 --- a/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_android_app_config.go.tmpl +++ b/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_android_app_config.go.tmpl @@ -139,7 +139,7 @@ func (d *GoogleFirebaseAndroidAppConfigDataSource) Read(ctx context.Context, req appName := fmt.Sprintf("projects/%s/androidApps/%s/config", data.Project.ValueString(), data.AppId.ValueString()) clientResp, err := service.GetConfig(appName).Do() if err != nil { - fwtransport.HandleDatasourceNotFoundError(ctx, err, &resp.State, fmt.Sprintf("dataSourceFirebaseAndroidAppConfig %q", data.AppId.ValueString()), &resp.Diagnostics) + fwtransport.HandleNotFoundError(ctx, err, &resp.State, fmt.Sprintf("dataSourceFirebaseAndroidAppConfig %q", data.AppId.ValueString()), &resp.Diagnostics) if resp.Diagnostics.HasError() { return } diff --git a/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_apple_app_config.go.tmpl b/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_apple_app_config.go.tmpl index 8dfdf61f9dc2..e64e5949608f 100644 --- a/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_apple_app_config.go.tmpl +++ b/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_apple_app_config.go.tmpl @@ -137,7 +137,7 @@ func (d *GoogleFirebaseAppleAppConfigDataSource) Read(ctx context.Context, req d appName := fmt.Sprintf("projects/%s/iosApps/%s/config", data.Project.ValueString(), data.AppId.ValueString()) clientResp, err := service.GetConfig(appName).Do() if err != nil { - fwtransport.HandleDatasourceNotFoundError(ctx, err, &resp.State, fmt.Sprintf("dataSourceFirebaseAppleAppConfig %q", data.AppId.ValueString()), &resp.Diagnostics) + fwtransport.HandleNotFoundError(ctx, err, &resp.State, fmt.Sprintf("dataSourceFirebaseAppleAppConfig %q", data.AppId.ValueString()), &resp.Diagnostics) if resp.Diagnostics.HasError() { return } diff --git a/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_web_app_config.go.tmpl b/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_web_app_config.go.tmpl index 7626b3d5b902..29891e565551 100644 --- a/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_web_app_config.go.tmpl +++ b/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_web_app_config.go.tmpl @@ -184,7 +184,7 @@ func (d *GoogleFirebaseWebAppConfigDataSource) Read(ctx context.Context, req dat appName := fmt.Sprintf("projects/%s/webApps/%s/config", data.Project.ValueString(), data.WebAppId.ValueString()) clientResp, err := service.GetConfig(appName).Do() if err != nil { - fwtransport.HandleDatasourceNotFoundError(ctx, err, &resp.State, fmt.Sprintf("dataSourceFirebaseWebAppConfig %q", data.WebAppId.ValueString()), &resp.Diagnostics) + fwtransport.HandleNotFoundError(ctx, err, &resp.State, fmt.Sprintf("dataSourceFirebaseWebAppConfig %q", data.WebAppId.ValueString()), &resp.Diagnostics) if resp.Diagnostics.HasError() { return } diff --git a/mmv1/third_party/terraform/services/pubsublite/fw_resource_pubsub_lite_reservation.go b/mmv1/third_party/terraform/services/pubsublite/fw_resource_pubsub_lite_reservation.go new file mode 100644 index 000000000000..685428eca2de --- /dev/null +++ b/mmv1/third_party/terraform/services/pubsublite/fw_resource_pubsub_lite_reservation.go @@ -0,0 +1,383 @@ +package pubsublite + +import ( + "context" + "fmt" + "net/http" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" + + "github.com/hashicorp/terraform-provider-google/google/fwmodels" + "github.com/hashicorp/terraform-provider-google/google/fwresource" + "github.com/hashicorp/terraform-provider-google/google/fwtransport" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + "google.golang.org/api/pubsublite/v1" +) + +// Ensure the implementation satisfies the expected interfaces. +var ( + _ resource.Resource = &GooglePubsubLiteReservationFWResource{} + _ resource.ResourceWithConfigure = &GooglePubsubLiteReservationFWResource{} +) + +// NewGooglePubsubLiteReservationResource is a helper function to simplify the provider implementation. +func NewGooglePubsubLiteReservationFWResource() resource.Resource { + return &GooglePubsubLiteReservationFWResource{} +} + +// GooglePubsubLiteReservationResource is the resource implementation. +type GooglePubsubLiteReservationFWResource struct { + client *pubsublite.Service + providerConfig *transport_tpg.Config +} + +type GooglePubsubLiteReservationModel struct { + Id types.String `tfsdk:"id"` + Project types.String `tfsdk:"project"` + Region types.String `tfsdk:"region"` + Name types.String `tfsdk:"name"` + ThroughputCapacity types.Int64 `tfsdk:"throughput_capacity"` +} + +// Metadata returns the resource type name. +func (d *GooglePubsubLiteReservationFWResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_fwprovider_pubsub_lite_reservation" +} + +func (d *GooglePubsubLiteReservationFWResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { + // Prevent panic if the provider has not been configured. + if req.ProviderData == nil { + return + } + + p, ok := req.ProviderData.(*transport_tpg.Config) + if !ok { + resp.Diagnostics.AddError( + "Unexpected Resource Configure Type", + fmt.Sprintf("Expected *transport_tpg.Config, got: %T. Please report this issue to the provider developers.", req.ProviderData), + ) + return + } + + d.providerConfig = p +} + +// Schema defines the schema for the data source. +func (d *GooglePubsubLiteReservationFWResource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = schema.Schema{ + MarkdownDescription: "Pubsub Lite Reservation resource description", + + Attributes: map[string]schema.Attribute{ + "project": schema.StringAttribute{ + Description: "The project id of the Pubsub Lite Reservation.", + MarkdownDescription: "The project id of the Pubsub Lite Reservation.", + Required: true, + }, + "region": schema.StringAttribute{ + Description: "The region of the Pubsub Lite Reservation.", + MarkdownDescription: "The region of the Pubsub Lite Reservation.", + Required: true, + }, + "name": schema.StringAttribute{ + Description: `The display name of the project.`, + MarkdownDescription: `The display name of the project.`, + Required: true, + }, + "throughput_capacity": schema.Int64Attribute{ + Description: `The reserved throughput capacity. Every unit of throughput capacity is equivalent to 1 MiB/s of published messages or 2 MiB/s of subscribed messages.`, + MarkdownDescription: `The reserved throughput capacity. Every unit of throughput capacity is equivalent to 1 MiB/s of published messages or 2 MiB/s of subscribed messages.`, + Required: true, + }, + // This is included for backwards compatibility with the original, SDK-implemented data source. + "id": schema.StringAttribute{ + Description: "Project identifier", + MarkdownDescription: "Project identifier", + Computed: true, + }, + }, + } +} + +func (d *GooglePubsubLiteReservationFWResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + var data GooglePubsubLiteReservationModel + var metaData *fwmodels.ProviderMetaModel + + // Read Provider meta into the meta model + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + + // Read Terraform configuration data into the model + resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + // Use provider_meta to set User-Agent + userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, d.providerConfig.UserAgent) + + obj := make(map[string]interface{}) + + obj["throughputCapacity"] = data.ThroughputCapacity.ValueInt64() + + data.Project = fwresource.GetProjectFramework(data.Project, types.StringValue(d.providerConfig.Project), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + data.Region = fwresource.GetRegionFramework(data.Region, types.StringValue(d.providerConfig.Region), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + billingProject := data.Project + + var schemaDefaultVals fwtransport.DefaultVars + schemaDefaultVals.Project = data.Project + schemaDefaultVals.Region = data.Region + + url := fwtransport.ReplaceVars(ctx, req, &resp.Diagnostics, schemaDefaultVals, d.providerConfig, "{{PubsubLiteBasePath}}projects/{{project}}/locations/{{region}}/reservations?reservationId={{name}}") + if resp.Diagnostics.HasError() { + return + } + tflog.Trace(ctx, fmt.Sprintf("[DEBUG] Creating new Reservation: %#v", obj)) + + headers := make(http.Header) + res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ + Config: d.providerConfig, + Method: "POST", + Project: billingProject.ValueString(), + RawURL: url, + UserAgent: userAgent, + Body: obj, + Headers: headers, + }, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + tflog.Trace(ctx, "create fwprovider google_pubsub_lite resource") + + // Put data in model + data.Id = types.StringValue(fmt.Sprintf("projects/%s/locations/%s/reservations/%s", data.Project.ValueString(), data.Region.ValueString(), data.Name.ValueString())) + data.ThroughputCapacity = types.Int64Value(res["throughputCapacity"].(int64)) + + // Save data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} + +// Read refreshes the Terraform state with the latest data. +func (d *GooglePubsubLiteReservationFWResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + var data GooglePubsubLiteReservationModel + var metaData *fwmodels.ProviderMetaModel + + // Read Provider meta into the meta model + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + + // Read Terraform configuration data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + // Use provider_meta to set User-Agent + userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, d.providerConfig.UserAgent) + + data.Project = fwresource.GetProjectFramework(data.Project, types.StringValue(d.providerConfig.Project), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + data.Region = fwresource.GetRegionFramework(data.Region, types.StringValue(d.providerConfig.Region), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + billingProject := data.Project + + var schemaDefaultVals fwtransport.DefaultVars + schemaDefaultVals.Project = data.Project + schemaDefaultVals.Region = data.Region + + url := fwtransport.ReplaceVars(ctx, req, &resp.Diagnostics, schemaDefaultVals, d.providerConfig, "{{PubSubLiteBasePath}}projects/{{project}}/locations/{{region}}/instances/{{name}}") + + if resp.Diagnostics.HasError() { + return + } + + headers := make(http.Header) + res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ + Config: d.providerConfig, + Method: "GET", + Project: billingProject.ValueString(), + RawURL: url, + UserAgent: userAgent, + Headers: headers, + }, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + tflog.Trace(ctx, "read fwprovider google_pubsub_lite resource") + + // Put data in model + data.Id = types.StringValue(fmt.Sprintf("projects/%s/locations/%s/instances/%s", data.Project.ValueString(), data.Region.ValueString(), data.Name.ValueString())) + data.ThroughputCapacity = types.Int64Value(res["throughputCapacity"].(int64)) + + // Save data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} + +func (d *GooglePubsubLiteReservationFWResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + var plan, state GooglePubsubLiteReservationModel + var metaData *fwmodels.ProviderMetaModel + + // Read Provider meta into the meta model + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + + // Read Terraform configuration data into the model + resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) + if resp.Diagnostics.HasError() { + return + } + + // Read Terraform configuration data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &state)...) + if resp.Diagnostics.HasError() { + return + } + + // Use provider_meta to set User-Agent + userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, d.providerConfig.UserAgent) + + obj := make(map[string]interface{}) + + obj["throughputCapacity"] = plan.ThroughputCapacity.ValueInt64() + + plan.Project = fwresource.GetProjectFramework(plan.Project, types.StringValue(d.providerConfig.Project), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + plan.Region = fwresource.GetRegionFramework(plan.Region, types.StringValue(d.providerConfig.Region), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + billingProject := plan.Project + + var schemaDefaultVals fwtransport.DefaultVars + schemaDefaultVals.Project = plan.Project + schemaDefaultVals.Region = plan.Region + + url := fwtransport.ReplaceVars(ctx, req, &resp.Diagnostics, schemaDefaultVals, d.providerConfig, "{{PubSubLiteBasePath}}projects/{{project}}/locations/{{region}}/instances/{{name}}") + + if resp.Diagnostics.HasError() { + return + } + tflog.Trace(ctx, fmt.Sprintf("[DEBUG] Updating Reservation: %#v", obj)) + + headers := make(http.Header) + + updateMask := []string{} + if !plan.ThroughputCapacity.Equal(state.ThroughputCapacity) { + updateMask = append(updateMask, "throughputCapacity") + } + + // updateMask is a URL parameter but not present in the schema, so ReplaceVars + // won't set it + var err error + url, err = transport_tpg.AddQueryParams(url, map[string]string{"updateMask": strings.Join(updateMask, ",")}) + if err != nil { + resp.Diagnostics.AddError("Error when sending HTTP request: ", err.Error()) + return + } + + res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ + Config: d.providerConfig, + Method: "PATCH", + Project: billingProject.ValueString(), + RawURL: url, + UserAgent: userAgent, + Body: obj, + Headers: headers, + }, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + tflog.Trace(ctx, "update fwprovider google_pubsub_lite resource") + + // Put data in model + plan.Id = types.StringValue(fmt.Sprintf("projects/%s/locations/%s/instances/%s", plan.Project.ValueString(), plan.Region.ValueString(), plan.Name.ValueString())) + plan.ThroughputCapacity = types.Int64Value(res["throughputCapacity"].(int64)) + + // Save data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...) +} +func (d *GooglePubsubLiteReservationFWResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + var data GooglePubsubLiteReservationModel + var metaData *fwmodels.ProviderMetaModel + + // Read Provider meta into the meta model + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + + // Read Terraform configuration data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + // Use provider_meta to set User-Agent + userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, d.providerConfig.UserAgent) + + obj := make(map[string]interface{}) + + data.Project = fwresource.GetProjectFramework(data.Project, types.StringValue(d.providerConfig.Project), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + data.Region = fwresource.GetRegionFramework(data.Region, types.StringValue(d.providerConfig.Region), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + billingProject := data.Project + + var schemaDefaultVals fwtransport.DefaultVars + schemaDefaultVals.Project = data.Project + schemaDefaultVals.Region = data.Region + + url := fwtransport.ReplaceVars(ctx, req, &resp.Diagnostics, schemaDefaultVals, d.providerConfig, "{{PubSubLiteBasePath}}projects/{{project}}/locations/{{region}}/instances/{{name}}") + + if resp.Diagnostics.HasError() { + return + } + tflog.Trace(ctx, fmt.Sprintf("[DEBUG] Deleting Reservation: %#v", obj)) + + headers := make(http.Header) + res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ + Config: d.providerConfig, + Method: "DELETE", + Project: billingProject.ValueString(), + RawURL: url, + UserAgent: userAgent, + Body: obj, + Headers: headers, + }, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + tflog.Trace(ctx, fmt.Sprintf("[DEBUG] Deleted Reservation: %#v", res)) +} diff --git a/mmv1/third_party/terraform/services/pubsublite/fw_resource_pubsub_lite_reservation_test.go b/mmv1/third_party/terraform/services/pubsublite/fw_resource_pubsub_lite_reservation_test.go new file mode 100644 index 000000000000..e4507dfaec41 --- /dev/null +++ b/mmv1/third_party/terraform/services/pubsublite/fw_resource_pubsub_lite_reservation_test.go @@ -0,0 +1,56 @@ +package pubsublite_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccResourceFWPubsubLiteReservation_basic(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccResourceFWPubsubLiteReservation_basic(context), + }, + { + Config: testAccResourceFWPubsubLiteReservation_upgrade(context), + }, + }, + }) +} + +func testAccResourceFWPubsubLiteReservation_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_fwprovider_pubsub_lite_reservation" "basic" { + name = "tf-test-example-reservation%{random_suffix}" + region = "us-central1" + project = data.google_project.project.number + throughput_capacity = 2 +} + +data "google_project" "project" { +} +`, context) +} + +func testAccResourceFWPubsubLiteReservation_upgrade(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_fwprovider_pubsub_lite_reservation" "basic" { + name = "tf-test-example-reservation%{random_suffix}" + region = "us-central1" + project = data.google_project.project.number + throughput_capacity = 3 +} + +data "google_project" "project" { +} +`, context) +} diff --git a/mmv1/third_party/terraform/services/sql/fw_resource_sql_user.go b/mmv1/third_party/terraform/services/sql/fw_resource_sql_user.go new file mode 100644 index 000000000000..5e536900be96 --- /dev/null +++ b/mmv1/third_party/terraform/services/sql/fw_resource_sql_user.go @@ -0,0 +1,507 @@ +package sql + +import ( + "context" + "fmt" + "strings" + "time" + + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/tfsdk" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" + + "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" + "github.com/hashicorp/terraform-provider-google/google/fwmodels" + "github.com/hashicorp/terraform-provider-google/google/fwresource" + "github.com/hashicorp/terraform-provider-google/google/fwtransport" + "github.com/hashicorp/terraform-provider-google/google/transport" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + sqladmin "google.golang.org/api/sqladmin/v1beta4" +) + +var ( + _ resource.Resource = &SQLUserFWResource{} + _ resource.ResourceWithConfigure = &SQLUserFWResource{} +) + +func NewSQLUserFWResource() resource.Resource { + return &SQLUserFWResource{} +} + +type SQLUserFWResource struct { + client *sqladmin.Service + providerConfig *transport_tpg.Config +} + +type SQLUserModel struct { + Id types.String `tfsdk:"id"` + Project types.String `tfsdk:"project"` + Name types.String `tfsdk:"name"` + Host types.String `tfsdk:"host"` + Instance types.String `tfsdk:"instance"` + Password types.String `tfsdk:"password"` + // PasswordWO types.String `tfsdk:"password_wo"` + // PasswordWOVersion types.String `tfsdk:"password_wo_version"` + Type types.String `tfsdk:"type"` + // SqlServerUserDetails types.List `tfsdk:"sql_server_user_details"` + // PasswordPolicy types.List `tfsdk:"password_policy"` + // DeletionPolicy types.String `tfsdk:"deletion_policy"` + Timeouts timeouts.Value `tfsdk:"timeouts"` +} + +// Metadata returns the resource type name. +func (d *SQLUserFWResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_fw_sql_user" +} + +func (r *SQLUserFWResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { + // Prevent panic if the provider has not been configured. + if req.ProviderData == nil { + return + } + + p, ok := req.ProviderData.(*transport_tpg.Config) + if !ok { + resp.Diagnostics.AddError( + "Unexpected Resource Configure Type", + fmt.Sprintf("Expected *transport_tpg.Config, got: %T. Please report this issue to the provider developers.", req.ProviderData), + ) + return + } + + r.client = p.NewSqlAdminClient(p.UserAgent) + if resp.Diagnostics.HasError() { + return + } + r.providerConfig = p +} + +func (d *SQLUserFWResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = schema.Schema{ + MarkdownDescription: "A resource to represent a SQL User object.", + + Attributes: map[string]schema.Attribute{ + "project": schema.StringAttribute{ + Optional: true, + Computed: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + stringplanmodifier.UseStateForUnknown(), + }, + }, + "host": schema.StringAttribute{ + Optional: true, + Computed: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + stringplanmodifier.UseStateForUnknown(), + }, + }, + "instance": schema.StringAttribute{ + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, + "name": schema.StringAttribute{ + Description: `The name of the user. Changing this forces a new resource to be created.`, + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + SQLUserNameIAMPlanModifier(), + }, + }, + "password": schema.StringAttribute{ + Optional: true, + Sensitive: true, + }, + "type": schema.StringAttribute{ + Optional: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + // TODO DiffSuppressFunc: tpgresource.EmptyOrDefaultStringSuppress("BUILT_IN"), + }, + }, + // This is included for backwards compatibility with the original, SDK-implemented resource. + "id": schema.StringAttribute{ + Description: "Project identifier", + MarkdownDescription: "Project identifier", + Computed: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.UseStateForUnknown(), + }, + }, + }, + Blocks: map[string]schema.Block{ + "timeouts": timeouts.Block(ctx, timeouts.Opts{ + Create: true, + }), + }, + } +} + +func (r *SQLUserFWResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + var data SQLUserModel + var metaData *fwmodels.ProviderMetaModel + + // Read Provider meta into the meta model + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + + // Read Terraform plan data into the model + resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + project := fwresource.GetProjectFramework(data.Project, types.StringValue(r.providerConfig.Project), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + nameData, diags := data.Name.ToStringValue(ctx) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + instanceData, diags := data.Instance.ToStringValue(ctx) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + hostData, diags := data.Host.ToStringValue(ctx) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + typeData, diags := data.Type.ToStringValue(ctx) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + passwordData, diags := data.Password.ToStringValue(ctx) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + createTimeout, diags := data.Timeouts.Create(ctx, 20*time.Minute) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + user := &sqladmin.User{ + Name: nameData.ValueString(), + Instance: instanceData.ValueString(), + Password: passwordData.ValueString(), + Host: hostData.ValueString(), + Type: typeData.ValueString(), + } + + transport_tpg.MutexStore.Lock(instanceMutexKey(project.ValueString(), instanceData.ValueString())) + defer transport_tpg.MutexStore.Unlock(instanceMutexKey(project.ValueString(), instanceData.ValueString())) + + r.client.UserAgent = fwtransport.GenerateFrameworkUserAgentString(metaData, r.client.UserAgent) + + // TODO host check logic + + var op *sqladmin.Operation + var err error + insertFunc := func() error { + op, err = r.client.Users.Insert(project.ValueString(), instanceData.ValueString(), + user).Do() + return err + } + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: insertFunc, + Timeout: createTimeout, + }) + + if err != nil { + resp.Diagnostics.AddError(fmt.Sprintf("Error, failed to insert "+ + "user %s into instance %s", nameData.ValueString(), instanceData.ValueString()), err.Error()) + return + } + + err = SqlAdminOperationWaitTime(r.providerConfig, op, project.ValueString(), "Insert User", r.client.UserAgent, createTimeout) + + if err != nil { + resp.Diagnostics.AddError(fmt.Sprintf("Error, failure waiting to insert "+ + "user %s into instance %s", nameData.ValueString(), instanceData.ValueString()), err.Error()) + return + } + + tflog.Trace(ctx, "created sql user resource") + + // This will include a double-slash (//) for postgres instances, + // for which user.Host is an empty string. That's okay. + data.Id = types.StringValue(fmt.Sprintf("%s/%s/%s", user.Name, user.Host, user.Instance)) + data.Project = project + + // read back sql user + r.SQLUserRefresh(ctx, &data, &resp.State, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + // Save data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} + +func (r *SQLUserFWResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + var data SQLUserModel + var metaData *fwmodels.ProviderMetaModel + + // Read Provider meta into the meta model + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + + // Read Terraform configuration data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + // Use provider_meta to set User-Agent + r.client.UserAgent = fwtransport.GenerateFrameworkUserAgentString(metaData, r.client.UserAgent) + + tflog.Trace(ctx, "read sql user resource") + + // read back sql user + r.SQLUserRefresh(ctx, &data, &resp.State, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + // Save data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} + +func (r *SQLUserFWResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + var old, new SQLUserModel + var metaData *fwmodels.ProviderMetaModel + + resp.Diagnostics.Append(req.State.Get(ctx, &old)...) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(req.Plan.Get(ctx, &new)...) + if resp.Diagnostics.HasError() { + return + } + + // Use provider_meta to set User-Agent + r.client.UserAgent = fwtransport.GenerateFrameworkUserAgentString(metaData, r.client.UserAgent) + + if !old.Password.Equal(new.Password) { + project := new.Project.ValueString() + instance := new.Instance.ValueString() + name := new.Name.ValueString() + host := new.Host.ValueString() + password := new.Password.ValueString() + + updateTimeout, diags := new.Timeouts.Update(ctx, 20*time.Minute) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + user := &sqladmin.User{ + Name: name, + Instance: instance, + Password: password, + } + transport_tpg.MutexStore.Lock(instanceMutexKey(project, instance)) + defer transport_tpg.MutexStore.Unlock(instanceMutexKey(project, instance)) + var op *sqladmin.Operation + var err error + updateFunc := func() error { + op, err = r.client.Users.Update(project, instance, user).Host(host).Name(name).Do() + return err + } + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: updateFunc, + Timeout: updateTimeout, + }) + + if err != nil { + resp.Diagnostics.AddError(fmt.Sprintf("failed to update"+ + "user %s in instance %s", name, instance), err.Error()) + return + } + + err = SqlAdminOperationWaitTime(r.providerConfig, op, project, "Update User", r.client.UserAgent, updateTimeout) + + if err != nil { + resp.Diagnostics.AddError(fmt.Sprintf("failure waiting for update"+ + "user %s in instance %s", name, instance), err.Error()) + return + } + + // read back sql user + r.SQLUserRefresh(ctx, &new, &resp.State, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + } + + // Save updated data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &new)...) +} + +func (r *SQLUserFWResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + var data SQLUserModel + + // Read Terraform prior state data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + project := data.Project.ValueString() + instance := data.Instance.ValueString() + name := data.Name.ValueString() + host := data.Host.ValueString() + + deleteTimeout, diags := data.Timeouts.Delete(ctx, 20*time.Minute) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + transport_tpg.MutexStore.Lock(instanceMutexKey(project, instance)) + defer transport_tpg.MutexStore.Unlock(instanceMutexKey(project, instance)) + var op *sqladmin.Operation + var err error + deleteFunc := func() error { + op, err = r.client.Users.Delete(project, instance).Host(host).Name(name).Do() + return err + } + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: deleteFunc, + Timeout: deleteTimeout, + }) + + if err != nil { + resp.Diagnostics.AddError(fmt.Sprintf("failed to delete"+ + "user %s in instance %s", name, instance), err.Error()) + return + } + + err = SqlAdminOperationWaitTime(r.providerConfig, op, project, "Delete User", r.client.UserAgent, deleteTimeout) + + if err != nil { + resp.Diagnostics.AddError(fmt.Sprintf("Error, failure waiting to delete "+ + "user %s", name), err.Error()) + return + } +} + +func (r *SQLUserFWResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { + idParts := strings.Split(req.ID, "/") + + // TODO recreate all import cases + if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" { + resp.Diagnostics.AddError( + "Unexpected Import Identifier", + fmt.Sprintf("Expected import identifier with format: project/instance/host/name. Got: %q", req.ID), + ) + return + } + + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project"), idParts[0])...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance"), idParts[1])...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("host"), idParts[2])...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("name"), idParts[3])...) +} + +func (r *SQLUserFWResource) SQLUserRefresh(ctx context.Context, data *SQLUserModel, state *tfsdk.State, diag *diag.Diagnostics) { + userReadResp, err := r.client.Users.Get(data.Project.ValueString(), data.Instance.ValueString(), data.Name.ValueString()).Host(data.Host.ValueString()).Do() + if err != nil { + // Treat HTTP 404 Not Found status as a signal to recreate resource + // and return early + if userReadResp != nil && transport.IsGoogleApiErrorWithCode(err, userReadResp.HTTPStatusCode) { + tflog.Trace(ctx, "sql user resource not found, removing from state") + state.RemoveResource(ctx) + return + } + diag.AddError(fmt.Sprintf("Error, failure waiting to read "+ + "user %s", data.Name.ValueString()), err.Error()) + return + } + + id := fmt.Sprintf("projects/%s/global/networks/%s", userReadResp.Project, userReadResp.Name) + data.Id = types.StringValue(id) + data.Project = types.StringValue(userReadResp.Project) + data.Instance = types.StringValue(userReadResp.Instance) + if userReadResp.Host != "" { + data.Host = types.StringValue(userReadResp.Host) + } + if userReadResp.Type != "" { + data.Type = types.StringValue(userReadResp.Type) + } +} + +// Plan Modifiers +func SQLUserNameIAMPlanModifier() planmodifier.String { + return &sqlUserNameIAMPlanModifier{} +} + +type sqlUserNameIAMPlanModifier struct { +} + +func (d *sqlUserNameIAMPlanModifier) Description(ctx context.Context) string { + return "Suppresses name diffs for IAM user types." +} +func (d *sqlUserNameIAMPlanModifier) MarkdownDescription(ctx context.Context) string { + return d.Description(ctx) +} + +// Plan modifier to emulate the SDK diffSuppressIamUserName +func (d *sqlUserNameIAMPlanModifier) PlanModifyString(ctx context.Context, req planmodifier.StringRequest, resp *planmodifier.StringResponse) { + // Retrieve relevant fields + var oldName types.String + diags := req.State.GetAttribute(ctx, path.Root("name"), &oldName) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + var newName types.String + diags = req.Plan.GetAttribute(ctx, path.Root("name"), &newName) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + var userType types.String + diags = req.Plan.GetAttribute(ctx, path.Root("type"), &userType) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + // Old diff suppress logic + strippedNewName := strings.Split(newName.ValueString(), "@")[0] + + if oldName.ValueString() == strippedNewName && strings.Contains(userType.ValueString(), "IAM") { + // Suppress the diff by setting the planned value to the old value + resp.PlanValue = oldName + } +} diff --git a/mmv1/third_party/terraform/services/sql/fw_resource_sql_user_test.go b/mmv1/third_party/terraform/services/sql/fw_resource_sql_user_test.go new file mode 100644 index 000000000000..80e78c4a0316 --- /dev/null +++ b/mmv1/third_party/terraform/services/sql/fw_resource_sql_user_test.go @@ -0,0 +1,90 @@ +package sql_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccSqlUserFW_mysql(t *testing.T) { + // Multiple fine-grained resources + acctest.SkipIfVcr(t) + t.Parallel() + + instance := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccSqlUserDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testGoogleSqlUserFW_mysql(instance, "password"), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleSqlUserExists(t, "google_fw_sql_user.user1"), + testAccCheckGoogleSqlUserExists(t, "google_fw_sql_user.user2"), + ), + }, + { + // Update password + Config: testGoogleSqlUserFW_mysql(instance, "new_password"), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleSqlUserExists(t, "google_fw_sql_user.user1"), + testAccCheckGoogleSqlUserExists(t, "google_fw_sql_user.user2"), + testAccCheckGoogleSqlUserExists(t, "google_fw_sql_user.user3"), + ), + }, + { + ResourceName: "google_fw_sql_user.user2", + ImportStateId: fmt.Sprintf("%s/%s/gmail.com/admin", envvar.GetTestProjectFromEnv(), instance), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"password"}, + }, + { + ResourceName: "google_fw_sql_user.user3", + ImportStateId: fmt.Sprintf("%s/%s/10.0.0.0/24/admin", envvar.GetTestProjectFromEnv(), instance), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"password"}, + }, + }, + }) +} + +func testGoogleSqlUserFW_mysql(instance, password string) string { + return fmt.Sprintf(` +resource "google_sql_database_instance" "instance" { + name = "%s" + region = "us-central1" + database_version = "MYSQL_5_7" + deletion_protection = false + settings { + tier = "db-f1-micro" + } +} + +resource "google_fw_sql_user" "user1" { + name = "admin" + instance = google_sql_database_instance.instance.name + host = "google.com" + password = "%s" +} + +resource "google_fw_sql_user" "user2" { + name = "admin" + instance = google_sql_database_instance.instance.name + host = "gmail.com" + password = "hunter2" +} + +resource "google_fw_sql_user" "user3" { + name = "admin" + instance = google_sql_database_instance.instance.name + host = "10.0.0.0/24" + password = "hunter3" +} +`, instance, password) +} diff --git a/mmv1/third_party/terraform/services/storage/fw_resource_storage_notification.go b/mmv1/third_party/terraform/services/storage/fw_resource_storage_notification.go new file mode 100644 index 000000000000..e8f5fe15be0f --- /dev/null +++ b/mmv1/third_party/terraform/services/storage/fw_resource_storage_notification.go @@ -0,0 +1,325 @@ +package storage + +import ( + "context" + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/mapplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/setplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" + "google.golang.org/api/googleapi" + "google.golang.org/api/storage/v1" + + "github.com/hashicorp/terraform-provider-google/google/fwmodels" + "github.com/hashicorp/terraform-provider-google/google/fwresource" + "github.com/hashicorp/terraform-provider-google/google/fwtransport" + "github.com/hashicorp/terraform-provider-google/google/fwvalidators" + "github.com/hashicorp/terraform-provider-google/google/services/pubsub" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +var ( + _ resource.Resource = &storageNotificationResource{} + _ resource.ResourceWithConfigure = &storageNotificationResource{} + _ resource.ResourceWithImportState = &storageNotificationResource{} + _ resource.ResourceWithUpgradeState = &storageNotificationResource{} +) + +func NewStorageNotificationResource() resource.Resource { + return &storageNotificationResource{} +} + +type storageNotificationResource struct { + config *transport_tpg.Config +} + +type storageNotificationModel struct { + Bucket types.String `tfsdk:"bucket"` + PayloadFormat types.String `tfsdk:"payload_format"` + Topic types.String `tfsdk:"topic"` + CustomAttributes types.Map `tfsdk:"custom_attributes"` + EventTypes types.Set `tfsdk:"event_types"` + ObjectNamePrefix types.String `tfsdk:"object_name_prefix"` + NotificationID types.String `tfsdk:"notification_id"` + SelfLink types.String `tfsdk:"self_link"` + Id types.String `tfsdk:"id"` +} + +func (r *storageNotificationResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_storage_notification" +} + +func (r *storageNotificationResource) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { + if req.ProviderData == nil { + return + } + + config, ok := req.ProviderData.(*transport_tpg.Config) + if !ok { + resp.Diagnostics.AddError( + "Unexpected Resource Configure Type", + fmt.Sprintf("Expected *transport_tpg.Config, got: %T. Please report this issue to the provider developers.", req.ProviderData), + ) + return + } + r.config = config +} + +func (r *storageNotificationResource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = schema.Schema{ + Description: "Creates a new notification configuration on a specified bucket, establishing a flow of event notifications from GCS to a Cloud Pub/Sub topic.", + Version: 1, + Attributes: map[string]schema.Attribute{ + "bucket": schema.StringAttribute{ + Required: true, + Description: "The name of the bucket.", + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, + "payload_format": schema.StringAttribute{ + Required: true, + Description: `The desired content of the Payload. One of "JSON_API_V1" or "NONE".`, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + Validators: []validator.String{ + stringvalidator.OneOf("JSON_API_V1", "NONE"), + }, + }, + "topic": schema.StringAttribute{ + Required: true, + Description: "The Cloud Pub/Sub topic to which this subscription publishes.", + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + Validators: []validator.String{ + fwvalidators.NewTopicPrefixValidator(), + }, + }, + "custom_attributes": schema.MapAttribute{ + ElementType: types.StringType, + Optional: true, + Description: "A set of key/value attribute pairs to attach to each Cloud Pub/Sub message published for this notification subscription.", + PlanModifiers: []planmodifier.Map{ + mapplanmodifier.RequiresReplace(), + }, + }, + "event_types": schema.SetAttribute{ + ElementType: types.StringType, + Optional: true, + Description: `List of event type filters for this notification config. If not specified, Cloud Storage will send notifications for all event types. The valid types are: "OBJECT_FINALIZE", "OBJECT_METADATA_UPDATE", "OBJECT_DELETE", "OBJECT_ARCHIVE"`, + PlanModifiers: []planmodifier.Set{ + setplanmodifier.RequiresReplace(), + }, + Validators: []validator.Set{ + fwvalidators.StringValuesInSet( + "OBJECT_FINALIZE", + "OBJECT_METADATA_UPDATE", + "OBJECT_DELETE", + "OBJECT_ARCHIVE", + ), + }, + }, + "object_name_prefix": schema.StringAttribute{ + Optional: true, + Description: "Specifies a prefix path filter for this notification config. Cloud Storage will only send notifications for objects in this bucket whose names begin with the specified prefix.", + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, + "notification_id": schema.StringAttribute{ + Computed: true, + Description: "The ID of the created notification.", + }, + "self_link": schema.StringAttribute{ + Computed: true, + Description: "The URI of the created resource.", + }, + "id": schema.StringAttribute{ + Computed: true, + }, + }, + } +} + +func (r *storageNotificationResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + var plan storageNotificationModel + var metaData *fwmodels.ProviderMetaModel + resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + + computedTopicName := pubsub.GetComputedTopicName("", plan.Topic.ValueString()) + + var customAttrs map[string]string + if !plan.CustomAttributes.IsNull() && !plan.CustomAttributes.IsUnknown() { + resp.Diagnostics.Append(plan.CustomAttributes.ElementsAs(ctx, &customAttrs, false)...) + if resp.Diagnostics.HasError() { + return + } + } + + var eventTypes []string + if !plan.EventTypes.IsNull() && !plan.EventTypes.IsUnknown() { + resp.Diagnostics.Append(plan.EventTypes.ElementsAs(ctx, &eventTypes, false)...) + if resp.Diagnostics.HasError() { + return + } + } + + storageNotification := &storage.Notification{ + CustomAttributes: customAttrs, + EventTypes: eventTypes, + ObjectNamePrefix: plan.ObjectNamePrefix.ValueString(), + PayloadFormat: plan.PayloadFormat.ValueString(), + Topic: computedTopicName, + } + + userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, r.config.UserAgent) + bucket := plan.Bucket.ValueString() + + res, err := r.config.NewStorageClient(userAgent).Notifications.Insert(bucket, storageNotification).Do() + if err != nil { + resp.Diagnostics.AddError(fmt.Sprintf("Error creating notification config for bucket %s", bucket), err.Error()) + return + } + + plan.Id = types.StringValue(fmt.Sprintf("%s/notificationConfigs/%s", bucket, res.Id)) + tflog.Info(ctx, "Created Storage Notification", map[string]interface{}{"id": plan.Id.ValueString()}) + + found := r.refresh(ctx, &plan, metaData, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + if !found { + resp.Diagnostics.AddError("Newly created resource not found", "The Storage Notification was not found immediately after creation.") + return + } + + resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...) +} + +func (r *storageNotificationResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + var state storageNotificationModel + var metaData *fwmodels.ProviderMetaModel + resp.Diagnostics.Append(req.State.Get(ctx, &state)...) + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + + found := r.refresh(ctx, &state, metaData, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + if !found { + tflog.Warn(ctx, "Storage Notification not found, removing from state.", map[string]interface{}{"id": state.Id.ValueString()}) + resp.State.RemoveResource(ctx) + return + } + + resp.Diagnostics.Append(resp.State.Set(ctx, &state)...) +} + +// Update is not supported for this resource. +func (r *storageNotificationResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + // This resource is immutable and all configurable attributes are marked with `RequiresReplace`. + // This function should not get called. +} + +func (r *storageNotificationResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + var state storageNotificationModel + var metaData *fwmodels.ProviderMetaModel + resp.Diagnostics.Append(req.State.Get(ctx, &state)...) + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + + bucket, notificationID, err := ParseStorageNotificationID(state.Id.ValueString()) + if err != nil { + resp.Diagnostics.AddError("Invalid resource ID", err.Error()) + return + } + + userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, r.config.UserAgent) + + err = r.config.NewStorageClient(userAgent).Notifications.Delete(bucket, notificationID).Do() + if err != nil { + if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == 404 { + // Resource is gone. This is a successful deletion. + return + } + resp.Diagnostics.AddError(fmt.Sprintf("Error deleting notification configuration %s for bucket %s", notificationID, bucket), err.Error()) + return + } +} + +func (r *storageNotificationResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { + resource.ImportStatePassthroughID(ctx, path.Root("id"), req, resp) +} + +func (r *storageNotificationResource) refresh(ctx context.Context, model *storageNotificationModel, metaData *fwmodels.ProviderMetaModel, diags *diag.Diagnostics) bool { + bucket, notificationID, err := ParseStorageNotificationID(model.Id.ValueString()) + if err != nil { + diags.AddError("Invalid resource ID", err.Error()) + return false + } + + userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, r.config.UserAgent) + + res, err := r.config.NewStorageClient(userAgent).Notifications.Get(bucket, notificationID).Do() + if err != nil { + if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == 404 { + return false + } + diags.AddError("Error reading Storage Notification", err.Error()) + return false + } + + model.Bucket = types.StringValue(bucket) + model.NotificationID = types.StringValue(notificationID) + model.SelfLink = types.StringValue(res.SelfLink) + model.PayloadFormat = types.StringValue(res.PayloadFormat) + + configuredObjectNamePrefix := model.ObjectNamePrefix + apiObjectNamePrefix := res.ObjectNamePrefix + model.ObjectNamePrefix = fwresource.FlattenStringEmptyToNull(configuredObjectNamePrefix, apiObjectNamePrefix) + + // trim the fully qualified prefix + apiValue := res.Topic + model.Topic = types.StringValue(strings.TrimPrefix(apiValue, "//pubsub.googleapis.com/")) + + var eventTypesDiags diag.Diagnostics + model.EventTypes, eventTypesDiags = types.SetValueFrom(ctx, types.StringType, res.EventTypes) + diags.Append(eventTypesDiags...) + + var customAttrsDiags diag.Diagnostics + model.CustomAttributes, customAttrsDiags = types.MapValueFrom(ctx, types.StringType, res.CustomAttributes) + diags.Append(customAttrsDiags...) + + return !diags.HasError() +} + +// ParseStorageNotificationID replicates the logic from the SDKv2 helper. +func ParseStorageNotificationID(id string) (bucket string, notificationID string, err error) { + parts := strings.Split(id, "/") + if len(parts) != 3 || parts[1] != "notificationConfigs" { + return "", "", fmt.Errorf("invalid storage notification ID format, expected '{bucket}/notificationConfigs/{notification_id}', got '%s'", id) + } + return parts[0], parts[2], nil +} diff --git a/mmv1/third_party/terraform/services/storage/fw_storage_notification_state_upgraders.go b/mmv1/third_party/terraform/services/storage/fw_storage_notification_state_upgraders.go new file mode 100644 index 000000000000..d492237f7116 --- /dev/null +++ b/mmv1/third_party/terraform/services/storage/fw_storage_notification_state_upgraders.go @@ -0,0 +1,100 @@ +package storage + +import ( + "context" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +// Represents the schema of the SDKv2 state +type storageNotificationModelV0 struct { + Bucket types.String `tfsdk:"bucket"` + PayloadFormat types.String `tfsdk:"payload_format"` + Topic types.String `tfsdk:"topic"` + CustomAttributes types.Map `tfsdk:"custom_attributes"` + EventTypes types.Set `tfsdk:"event_types"` + ObjectNamePrefix types.String `tfsdk:"object_name_prefix"` + NotificationID types.String `tfsdk:"notification_id"` + SelfLink types.String `tfsdk:"self_link"` + Id types.String `tfsdk:"id"` +} + +func (r *storageNotificationResource) UpgradeState(ctx context.Context) map[int64]resource.StateUpgrader { + return map[int64]resource.StateUpgrader{ + 0: { + PriorSchema: &schema.Schema{ + Attributes: map[string]schema.Attribute{ + "bucket": schema.StringAttribute{ + Required: true, + }, + "payload_format": schema.StringAttribute{ + Required: true, + }, + "topic": schema.StringAttribute{ + Required: true, + }, + "custom_attributes": schema.MapAttribute{ + ElementType: types.StringType, + Optional: true, + }, + "event_types": schema.SetAttribute{ + ElementType: types.StringType, + Optional: true, + }, + "object_name_prefix": schema.StringAttribute{ + Optional: true, + }, + "notification_id": schema.StringAttribute{ + Computed: true, + }, + "self_link": schema.StringAttribute{ + Computed: true, + }, + "id": schema.StringAttribute{ + Computed: true, + }, + }, + }, + StateUpgrader: func(ctx context.Context, req resource.UpgradeStateRequest, resp *resource.UpgradeStateResponse) { + var priorStateData storageNotificationModelV0 + + resp.Diagnostics.Append(req.State.Get(ctx, &priorStateData)...) + if resp.Diagnostics.HasError() { + return + } + + upgradedStateData := storageNotificationModel{ + Bucket: priorStateData.Bucket, + PayloadFormat: priorStateData.PayloadFormat, + CustomAttributes: priorStateData.CustomAttributes, + EventTypes: priorStateData.EventTypes, + ObjectNamePrefix: priorStateData.ObjectNamePrefix, + NotificationID: priorStateData.NotificationID, + SelfLink: priorStateData.SelfLink, + Id: priorStateData.Id, + } + + // topic - trim the fully qualified prefix + if !priorStateData.Topic.IsNull() && !priorStateData.Topic.IsUnknown() { + apiTopic := priorStateData.Topic.ValueString() + transformedTopic := strings.TrimPrefix(apiTopic, "//pubsub.googleapis.com/") + upgradedStateData.Topic = types.StringValue(transformedTopic) + } else { + upgradedStateData.Topic = priorStateData.Topic + } + + // ObjectNamePrefix - normalize "" to Null + if !priorStateData.ObjectNamePrefix.IsNull() && !priorStateData.ObjectNamePrefix.IsUnknown() && priorStateData.ObjectNamePrefix.ValueString() == "" { + upgradedStateData.ObjectNamePrefix = types.StringNull() + } else { + upgradedStateData.ObjectNamePrefix = priorStateData.ObjectNamePrefix + } + + resp.Diagnostics.Append(resp.State.Set(ctx, upgradedStateData)...) + }, + }, + } +} diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_notification.go b/mmv1/third_party/terraform/services/storage/resource_storage_notification.go deleted file mode 100644 index 1bd4d46c84e5..000000000000 --- a/mmv1/third_party/terraform/services/storage/resource_storage_notification.go +++ /dev/null @@ -1,196 +0,0 @@ -package storage - -import ( - "fmt" - "strings" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" - - "github.com/hashicorp/terraform-provider-google/google/services/pubsub" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" - "google.golang.org/api/storage/v1" -) - -func ResourceStorageNotification() *schema.Resource { - return &schema.Resource{ - Create: resourceStorageNotificationCreate, - Read: resourceStorageNotificationRead, - Delete: resourceStorageNotificationDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, - - Schema: map[string]*schema.Schema{ - "bucket": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: `The name of the bucket.`, - }, - - "payload_format": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringInSlice([]string{"JSON_API_V1", "NONE"}, false), - Description: `The desired content of the Payload. One of "JSON_API_V1" or "NONE".`, - }, - - "topic": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, - Description: `The Cloud Pub/Sub topic to which this subscription publishes. Expects either the topic name, assumed to belong to the default GCP provider project, or the project-level name, i.e. projects/my-gcp-project/topics/my-topic or my-topic. If the project is not set in the provider, you will need to use the project-level name.`, - }, - - "custom_attributes": { - Type: schema.TypeMap, - Optional: true, - ForceNew: true, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - Description: ` A set of key/value attribute pairs to attach to each Cloud Pub/Sub message published for this notification subscription`, - }, - - "event_types": { - Type: schema.TypeSet, - Optional: true, - ForceNew: true, - Elem: &schema.Schema{ - Type: schema.TypeString, - ValidateFunc: validation.StringInSlice([]string{ - "OBJECT_FINALIZE", "OBJECT_METADATA_UPDATE", "OBJECT_DELETE", "OBJECT_ARCHIVE"}, - false), - }, - Description: `List of event type filters for this notification config. If not specified, Cloud Storage will send notifications for all event types. The valid types are: "OBJECT_FINALIZE", "OBJECT_METADATA_UPDATE", "OBJECT_DELETE", "OBJECT_ARCHIVE"`, - }, - - "object_name_prefix": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - Description: `Specifies a prefix path filter for this notification config. Cloud Storage will only send notifications for objects in this bucket whose names begin with the specified prefix.`, - }, - - "notification_id": { - Type: schema.TypeString, - Computed: true, - Description: `The ID of the created notification.`, - }, - - "self_link": { - Type: schema.TypeString, - Computed: true, - Description: `The URI of the created resource.`, - }, - }, - UseJSONNumber: true, - } -} - -func resourceStorageNotificationCreate(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return err - } - - bucket := d.Get("bucket").(string) - - topicName := d.Get("topic").(string) - computedTopicName := pubsub.GetComputedTopicName("", topicName) - if computedTopicName != topicName { - project, err := tpgresource.GetProject(d, config) - if err != nil { - return err - } - computedTopicName = pubsub.GetComputedTopicName(project, topicName) - } - - storageNotification := &storage.Notification{ - CustomAttributes: tpgresource.ExpandStringMap(d, "custom_attributes"), - EventTypes: tpgresource.ConvertStringSet(d.Get("event_types").(*schema.Set)), - ObjectNamePrefix: d.Get("object_name_prefix").(string), - PayloadFormat: d.Get("payload_format").(string), - Topic: computedTopicName, - } - - res, err := config.NewStorageClient(userAgent).Notifications.Insert(bucket, storageNotification).Do() - if err != nil { - return fmt.Errorf("Error creating notification config for bucket %s: %v", bucket, err) - } - - d.SetId(fmt.Sprintf("%s/notificationConfigs/%s", bucket, res.Id)) - - return resourceStorageNotificationRead(d, meta) -} - -func resourceStorageNotificationRead(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return err - } - - bucket, notificationID := ResourceStorageNotificationParseID(d.Id()) - - res, err := config.NewStorageClient(userAgent).Notifications.Get(bucket, notificationID).Do() - if err != nil { - return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("Notification configuration %s for bucket %s", notificationID, bucket)) - } - - if err := d.Set("bucket", bucket); err != nil { - return fmt.Errorf("Error setting bucket: %s", err) - } - if err := d.Set("payload_format", res.PayloadFormat); err != nil { - return fmt.Errorf("Error setting payload_format: %s", err) - } - if err := d.Set("topic", res.Topic); err != nil { - return fmt.Errorf("Error setting topic: %s", err) - } - if err := d.Set("object_name_prefix", res.ObjectNamePrefix); err != nil { - return fmt.Errorf("Error setting object_name_prefix: %s", err) - } - if err := d.Set("event_types", res.EventTypes); err != nil { - return fmt.Errorf("Error setting event_types: %s", err) - } - if err := d.Set("notification_id", notificationID); err != nil { - return fmt.Errorf("Error setting notification_id: %s", err) - } - if err := d.Set("self_link", res.SelfLink); err != nil { - return fmt.Errorf("Error setting self_link: %s", err) - } - if err := d.Set("custom_attributes", res.CustomAttributes); err != nil { - return fmt.Errorf("Error setting custom_attributes: %s", err) - } - - return nil -} - -func resourceStorageNotificationDelete(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return err - } - - bucket, notificationID := ResourceStorageNotificationParseID(d.Id()) - - err = config.NewStorageClient(userAgent).Notifications.Delete(bucket, notificationID).Do() - if err != nil { - return fmt.Errorf("Error deleting notification configuration %s for bucket %s: %v", notificationID, bucket, err) - } - - return nil -} - -func ResourceStorageNotificationParseID(id string) (string, string) { - //bucket, NotificationID - parts := strings.Split(id, "/") - - return parts[0], parts[2] -} diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_notification_test.go b/mmv1/third_party/terraform/services/storage/resource_storage_notification_test.go index a3650a829875..1e8ffccc5b90 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_notification_test.go +++ b/mmv1/third_party/terraform/services/storage/resource_storage_notification_test.go @@ -27,7 +27,7 @@ func TestAccStorageNotification_basic(t *testing.T) { var notification storage.Notification bucketName := acctest.TestBucketName(t) topicName := fmt.Sprintf("tf-pstopic-test-%d", acctest.RandInt(t)) - topic := fmt.Sprintf("//pubsub.googleapis.com/projects/%s/topics/%s", os.Getenv("GOOGLE_PROJECT"), topicName) + topic := fmt.Sprintf("projects/%s/topics/%s", os.Getenv("GOOGLE_PROJECT"), topicName) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -71,7 +71,7 @@ func TestAccStorageNotification_withEventsAndAttributes(t *testing.T) { var notification storage.Notification bucketName := acctest.TestBucketName(t) topicName := fmt.Sprintf("tf-pstopic-test-%d", acctest.RandInt(t)) - topic := fmt.Sprintf("//pubsub.googleapis.com/projects/%s/topics/%s", os.Getenv("GOOGLE_PROJECT"), topicName) + topic := fmt.Sprintf("projects/%s/topics/%s", os.Getenv("GOOGLE_PROJECT"), topicName) eventType1 := "OBJECT_FINALIZE" eventType2 := "OBJECT_ARCHIVE" @@ -115,9 +115,12 @@ func testAccStorageNotificationDestroyProducer(t *testing.T) func(s *terraform.S continue } - bucket, notificationID := tpgstorage.ResourceStorageNotificationParseID(rs.Primary.ID) + bucket, notificationID, err := tpgstorage.ParseStorageNotificationID(rs.Primary.ID) + if err != nil { + return err + } - _, err := config.NewStorageClient(config.UserAgent).Notifications.Get(bucket, notificationID).Do() + _, err = config.NewStorageClient(config.UserAgent).Notifications.Get(bucket, notificationID).Do() if err == nil { return fmt.Errorf("Notification configuration still exists") } @@ -140,7 +143,10 @@ func testAccCheckStorageNotificationExists(t *testing.T, resource string, notifi config := acctest.GoogleProviderConfig(t) - bucket, notificationID := tpgstorage.ResourceStorageNotificationParseID(rs.Primary.ID) + bucket, notificationID, err := tpgstorage.ParseStorageNotificationID(rs.Primary.ID) + if err != nil { + return err + } found, err := config.NewStorageClient(config.UserAgent).Notifications.Get(bucket, notificationID).Do() if err != nil { diff --git a/mmv1/third_party/terraform/website/docs/r/apigee_keystores_aliases_key_cert_file.html.markdown b/mmv1/third_party/terraform/website/docs/r/apigee_keystores_aliases_key_cert_file.html.markdown index 2660aaf170d6..1424e2a1b351 100644 --- a/mmv1/third_party/terraform/website/docs/r/apigee_keystores_aliases_key_cert_file.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/apigee_keystores_aliases_key_cert_file.html.markdown @@ -66,7 +66,7 @@ In addition to the arguments listed above, the following computed attributes are Optional.Type of Alias -The `certs_info` block contains: +The `certs_info` list contains: * `cert_info` - (Output) From d3ecbb2c9de3684bef1d206ebd4be6cd91fcc45f Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Mon, 25 Aug 2025 14:29:03 -0700 Subject: [PATCH 831/884] Fix 7.0.0 upgrade guide (#14959) --- .../guides/version_7_upgrade.html.markdown | 135 +++++++++--------- 1 file changed, 66 insertions(+), 69 deletions(-) diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index b66bec6cc4d8..e38de6f3bb47 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -93,22 +93,12 @@ terraform { <<<<<<< HEAD ## Provider -### Provider-level change example header - -Description of the change and how users should adjust their configuration (if needed). - ### Resource import formats have improved validation Throughout the provider there were many resources which erroneously gave false positives to poorly formatted import input if a subset of the provided input was valid to their configured import formats. All GCP resource IDs supplied to "terraform import" must match the documentation specified import formats exactly. ## Datasources -## Datasource: `google_product_datasource` - -### Datasource-level change example header - -Description of the change and how users should adjust their configuration (if needed). - ## Datasource: `google_service_account_key` ### `project` is now removed @@ -140,11 +130,16 @@ Use `google_beyondcorp_security_gateway_application` instead. `public_repository` fields have had their default values removed. If your state has been reliant on them, they will need to be manually included into your configuration now. +## Resource: `google_beyondcorp_application` is now removed + +`google_beyondcorp_application`, the associated IAM resources `google_beyondcorp_application_iam_binding`, `google_beyondcorp_application_iam_member`, and `google_beyondcorp_application_iam_policy`, and the `google_beyondcorp_application_iam_policy` datasource have been removed. +Use `google_beyondcorp_security_gateway_application` instead. + ## Resource: `google_bigquery_table` ### `view.use_legacy_sql` no longer has a default value of `True` -The `view.use_legacy_sql` field no longer has a default value. Configurations that relied on the old default will show no diff in the plan, and there will be no change to existing views. For a new view, leaving this field unspecified in the configuration will result in the view being created with no `use_legacy_sql` value, which the API interprets as a `true` and assumes the legacy SQL dialect for its query. See the [API documentation](https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#ViewDefinition) for more details. +The `view.use_legacy_sql` field no longer has a default value. Configurations that relied on the old default will show no diff in the plan, and there will be no change to existing views. For newly created views, leaving this field unspecified in the configuration will result in the view being created with no `use_legacy_sql` value, which the API interprets as a `true` and assumes the legacy SQL dialect for its query. See the [API documentation](https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#ViewDefinition) for more details. ## Resource: `google_bigtable_table_iam_binding` @@ -188,32 +183,6 @@ The `view.use_legacy_sql` field no longer has a default value. Configurations th `enable_flow_logs` has been removed in favor of `log_config`. -## Resource: `google_compute_instance_template` - -### The resource will no longer use hardcoded values - -`disk.type`, `disk.mode` and `disk.interface` will no longer use provider configured default values and instead will be set by the API. This shouldn't have any effect on the functionality of the resource. - -## Resource: `google_compute_region_instance_template` - -### The resource will no longer use hardcoded values - -`disk.type`, `disk.mode` and `disk.interface` will no longer use provider configured default values and instead will be set by the API. This shouldn't have any effect on the functionality of the resource. - -## Resource: `google_notebooks_location` is now removed - -This resource is not functional. - -## Resource: `google_storage_bucket` - -### `retention_period` changed to `string` data type - -`retention_period` was changed to the [`string` data type](https://developer.hashicorp.com/terraform/language/expressions/types#string) to handle higher values for the bucket's retention period. - -Terraform [Type Conversion](https://developer.hashicorp.com/terraform/language/expressions/types#type-conversion) will handle the change automatically for most configurations, and they will not need to be modified. - -To reflect the new type explicitly, surround the current integer value in quotes, i.e. `retention_period = 10` -> `retention_period = "10"`. - ## Resource: `google_gke_hub_feature_membership` ### `configmanagement.binauthz` is now removed @@ -241,40 +210,30 @@ Remove `post_startup_script_config` from your configuration after upgrade. ### Exactly one of `http_check.auth_info.password` and `http_check.auth_info.password_wo` must be set -At least one must be set, and setting both would make it unclear which was being used. +Setting exactly one of `http_check.auth_info.password` and `http_check.auth_info.password_wo` is now enforced in order to avoid situations where it is unclear which was being used. >>>>>>> 7b15bdcb5 (Standardized required_with behavior for write-only fields (#14941)) ## Resource: `google_network_services_lb_traffic_extension` ### `load_balancing_scheme` is now required -`load_balancing_scheme` is now a required field. - -## Resource: `google_storage_transfer_job` - -### `transfer_spec.gcs_data_sink.path` Implemented validation to prevent strings from starting with a '/' character, while still permitting empty strings." - -### `transfer_spec.gcs_data_source.path` Implemented validation to prevent strings from starting with a '/' character, while still permitting empty strings." +`load_balancing_scheme` is now a required field. This field was already required for resource functionality so no change to your configuration should be necessary. -### `replication_spec.gcs_data_source.path` Implemented validation to prevent strings from starting with a '/' character, while still permitting empty strings." - -### `replication_spec.gcs_data_sink.path` Implemented validation to prevent strings from starting with a '/' character, while still permitting empty strings." - -## Resource: `google_cloudfunctions2_function` +## Resource: `google_notebooks_location` is now removed -### `event_trigger.event_type` is now required +This resource is not functional and can safely be removed from your configuration. -The `event_type` field is now required when `event_trigger` is configured. +## Resource: `google_project_service` -### `service_config.service` is changed from `Argument` to `Attribute` +### `disable_on_destroy` now defaults to `false` -Remove `service_config.service` from your configuration after upgrade. +The default value for `disable_on_destroy` has been changed to `false`. The previous default (`true`) created a risk of unintended service disruptions, as destroying a single `google_project_service` resource would disable the API for the entire project. -## Resource: `google_cloud_run_v2_worker_pool` +Now, destroying the resource will only remove it from Terraform's state and leave the service enabled. To disable a service when the resource is destroyed, you must now make an explicit decision by setting `disable_on_destroy = true`. -### `template.containers.depends_on` is removed as it is not supported. +## Resource: `google_redis_cluster` -Remove `template.containers.depends_on` from your configuration after upgrade. + `allow_fewer_zones_deployment` has been removed because it isn't user-configurable. ## Resource: `google_secret_manager_secret_version` @@ -284,27 +243,67 @@ This standardizes the behavior of write-only fields across the provider and make ## Resource: `google_sql_user` -### `password_wo_version` is now required when `password_wo` is set +### `password_wo` and `password_wo_version` must be set together This standardizes the behavior of write-only fields across the provider and makes it easier to remember to update the fields together. -## Resource: `google_vertex_ai_endpoint` +## Resource: `google_secure_source_manager_instance` + +### `deletion_policy` has had its default value changed to `PREVENT` + +`deletion_policy` has had its default value changed to `PREVENT`. This field prevents +Terraform from destroying or recreating the cluster during `terraform apply`. In 7.0.0, existing resources will have +`deletion_policy` set to `true` during the next refresh unless otherwise set in configuration. -### `enable_secure_private_service_connect` is removed as it is not available in the GA version of the API, only in the beta version. +## Resource: `google_secure_source_manager_repository` -## Resource: `google_vertex_ai_index` +### `deletion_policy` has had its default value changed to `PREVENT` -### `metadata`, and `metadata.config` are now required. Resource creation would fail without these attributes already, so no change is necessary to existing configurations. +`deletion_policy` has had its default value changed to `PREVENT`. This field prevents +Terraform from destroying or recreating the cluster during `terraform apply`. In 7.0.0, existing resources will have +`deletion_policy` set to `true` during the next refresh unless otherwise set in configuration. + +## Resource: `google_storage_transfer_job` + +### Several `path` fields have improved validation + +`transfer_spec.gcs_data_sink.path`, `transfer_spec.gcs_data_source.path`, `replication_spec.gcs_data_source.path`, and `replication_spec.gcs_data_sink.path` are now required to not start with a '/' character. + +## Resource: `google_storage_bucket` + +### `retention_period` changed to `string` data type + +`retention_period` was changed to the [`string` data type](https://developer.hashicorp.com/terraform/language/expressions/types#string) to handle higher values for the bucket's retention period. + +Terraform [Type Conversion](https://developer.hashicorp.com/terraform/language/expressions/types#type-conversion) will handle the change automatically for most configurations, and they will not need to be modified. + +To reflect the new type explicitly, surround the current integer value in quotes, i.e. `retention_period = 10` -> `retention_period = "10"`. + +## Resource: `google_storage_notification` + +### `google_storage_notification` Migrated to the Plugin Framework + +This resource has been migrated from SDKv2 to the more modern [plugin framework resource implementation](https://developer.hashicorp.com/terraform/plugin/framework). One associated breaking change is expected with this migration; please review the details below. + +### `topic` Field Format Change + +The `topic` field for `google_storage_notification` must now be provided in the format `projects/{{project}}/topics/{{topic}}`. + +The previous SDKv2 implementation accepted both `projects/{{project}}/topics/{{topic}}` and the fully qualified Google API format `//pubsub.googleapis.com/projects/{{project}}/topics/{{topic}}` in configuration. However, it consistently stored the latter (fully qualified) format in the Terraform state. + +With this migration, only the `projects/{{project}}/topics/{{topic}}` format is allowed in configuration, aligning with the `id` format of the `google_pubsub_topic` resource. + +A state upgrader will automatically migrate the `topic` field's format in your Terraform state when you upgrade to this provider version. However, you **must ensure your Terraform configuration files are updated** to use the `projects/{{project}}/topics/{{topic}}` format to avoid validation errors. ## Resource: `google_tpu_node` is now removed `google_tpu_node` is removed in favor of `google_tpu_v2_vm`. For moving from TPU Node to TPU VM architecture, see https://cloud.google.com/tpu/docs/system-architecture-tpu-vm#from-tpu-node-to-tpu-vm. -## Resource: `google_project_service` +## Resource: `google_vertex_ai_endpoint` -### `disable_on_destroy` now defaults to `false` +### `enable_secure_private_service_connect` is now removed from the GA provider -The default value for `disable_on_destroy` has been changed to `false`. The previous default (`true`) created a risk of unintended service disruptions, as destroying a single `google_project_service` resource would disable the API for the entire project. +`enable_secure_private_service_connect` has been removed from the GA provider it is not available in the GA version of the API. The field is still available when using the beta provider. <<<<<<< HEAD Now, destroying the resource will only remove it from Terraform's state and leave the service enabled. To disable a service when the resource is destroyed, you must now make an explicit decision by setting `disable_on_destroy = true`. @@ -314,11 +313,9 @@ Now, destroying the resource will only remove it from Terraform's state and leav >>>>>>> c96e1c59a (add 7.0.0 guide to main (#14861)) ======= -## Resource: `google_memorystore_instance` - - `allow_fewer_zones_deployment` has been removed because it isn't user-configurable. +### `metadata`, and `metadata.config` are now required. -## Resource: `google_redis_cluster` +`metadata`, and `metadata.config` are now required. These fields were already required for resource functionality, so no change is necessary to existing configurations. `allow_fewer_zones_deployment` has been removed because it isn't user-configurable. >>>>>>> eed48c10c (Breaking Change: Remove allow_fewer_zones_deployment from Memorystore and Redis Cluster (#14889)) From 7635ba77160d47373887737fdb6d05140666f3d2 Mon Sep 17 00:00:00 2001 From: gurusai-voleti Date: Tue, 26 Aug 2025 23:53:25 +0530 Subject: [PATCH 832/884] note: removed deprecated status for field detect_md5hash (#14641) --- .../services/storage/resource_storage_bucket_object.go | 3 +-- .../website/docs/r/storage_bucket_object.html.markdown | 2 ++ 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object.go b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object.go index e65c8fc9b2bd..015fba07671a 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object.go +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object.go @@ -151,8 +151,7 @@ func ResourceStorageBucketObject() *schema.Resource { // Detect changes to local file or changes made outside of Terraform to the file stored on the server. "detect_md5hash": { - Type: schema.TypeString, - Deprecated: "`detect_md5hash` is deprecated and will be removed in future release. Start using `source_md5hash` instead", + Type: schema.TypeString, // This field is not Computed because it needs to trigger a diff. Optional: true, // Makes the diff message nicer: diff --git a/mmv1/third_party/terraform/website/docs/r/storage_bucket_object.html.markdown b/mmv1/third_party/terraform/website/docs/r/storage_bucket_object.html.markdown index 86e7b39b008f..961983611298 100644 --- a/mmv1/third_party/terraform/website/docs/r/storage_bucket_object.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/storage_bucket_object.html.markdown @@ -81,6 +81,8 @@ One of the following is required: * `detect_md5hash` - (Optional) Detect changes to local file or changes made outside of Terraform to the file stored on the server. MD5 hash of the data, encoded using [base64](https://datatracker.ietf.org/doc/html/rfc4648#section-4). This field is not present for [composite objects](https://cloud.google.com/storage/docs/composite-objects). For more information about using the MD5 hash, see [Hashes and ETags: Best Practices](https://cloud.google.com/storage/docs/hashes-etags#json-api). + ~> **Warning:** For dynamically populated files or objects, `detect_md5hash` cannot track or detect changes and will not trigger updates to the objects in the bucket. Please use `source_md5hash` instead. + * `storage_class` - (Optional) The [StorageClass](https://cloud.google.com/storage/docs/storage-classes) of the new bucket object. Supported values include: `MULTI_REGIONAL`, `REGIONAL`, `NEARLINE`, `COLDLINE`, `ARCHIVE`. If not provided, this defaults to the bucket's default storage class or to a [standard](https://cloud.google.com/storage/docs/storage-classes#standard) class. From a8ef7b0ab665c3d2f5a82d6ec3e066af7c74b6b0 Mon Sep 17 00:00:00 2001 From: chenir0219 Date: Tue, 26 Aug 2025 18:30:28 +0000 Subject: [PATCH 833/884] Add provisioned_throughput to the public documentation for instance template (#14981) --- .../website/docs/r/compute_instance_template.html.markdown | 7 +++---- .../docs/r/compute_region_instance_template.html.markdown | 7 +++---- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/mmv1/third_party/terraform/website/docs/r/compute_instance_template.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_instance_template.html.markdown index f8862f1c3379..f72b252840f8 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_instance_template.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_instance_template.html.markdown @@ -444,10 +444,9 @@ The following arguments are supported: * `disk_name` - (Optional) Name of the disk. When not provided, this defaults to the name of the instance. -* `provisioned_iops` - (Optional) Indicates how many IOPS to provision for the disk. This - sets the number of I/O operations per second that the disk can handle. - Values must be between 10,000 and 120,000. For more details, see the - [Extreme persistent disk documentation](https://cloud.google.com/compute/docs/disks/extreme-persistent-disk). +* `provisioned_iops` - (Optional) Indicates how many IOPS to provision for the disk. This sets the number of I/O operations per second that the disk can handle. For more details, see the [Extreme persistent disk documentation](https://cloud.google.com/compute/docs/disks/extreme-persistent-disk) or the [Hyperdisk documentation](https://cloud.google.com/compute/docs/disks/hyperdisks) depending on the selected disk_type. + +* `provisioned_throughput` - (Optional) Indicates how much throughput to provision for the disk, in MB/s. This sets the amount of data that can be read or written from the disk per second. Values must greater than or equal to 1. For more details, see the [Hyperdisk documentation](https://cloud.google.com/compute/docs/disks/hyperdisks). * `resource_manager_tags` - (Optional) A set of key/value resource manager tag pairs to bind to this disk. Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/456. diff --git a/mmv1/third_party/terraform/website/docs/r/compute_region_instance_template.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_region_instance_template.html.markdown index 1e94cd62e829..afd836162e1a 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_region_instance_template.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_region_instance_template.html.markdown @@ -409,10 +409,9 @@ The following arguments are supported: * `disk_name` - (Optional) Name of the disk. When not provided, this defaults to the name of the instance. -* `provisioned_iops` - (Optional) Indicates how many IOPS to provision for the disk. This - sets the number of I/O operations per second that the disk can handle. - Values must be between 10,000 and 120,000. For more details, see the - [Extreme persistent disk documentation](https://cloud.google.com/compute/docs/disks/extreme-persistent-disk). +* `provisioned_iops` - (Optional) Indicates how many IOPS to provision for the disk. This sets the number of I/O operations per second that the disk can handle. For more details, see the [Extreme persistent disk documentation](https://cloud.google.com/compute/docs/disks/extreme-persistent-disk) or the [Hyperdisk documentation](https://cloud.google.com/compute/docs/disks/hyperdisks) depending on the selected disk_type. + +* `provisioned_throughput` - (Optional) Indicates how much throughput to provision for the disk, in MB/s. This sets the amount of data that can be read or written from the disk per second. Values must greater than or equal to 1. For more details, see the [Hyperdisk documentation](https://cloud.google.com/compute/docs/disks/hyperdisks). * `resource_manager_tags` - (Optional) A set of key/value resource manager tag pairs to bind to this disk. Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/456. From a98095de0e9972c709e4c402d209c7a8f7a3d398 Mon Sep 17 00:00:00 2001 From: Rafael Tello-Cabrales <4848374+rafaeltello@users.noreply.github.com> Date: Tue, 26 Aug 2025 11:41:16 -0700 Subject: [PATCH 834/884] FEAT: Add support for Multi-Region Services in cloudrunv2 (#14592) --- mmv1/products/cloudrunv2/Service.yaml | 16 +++ ...resource_cloud_run_v2_service_test.go.tmpl | 97 +++++++++++++++++++ 2 files changed, 113 insertions(+) diff --git a/mmv1/products/cloudrunv2/Service.yaml b/mmv1/products/cloudrunv2/Service.yaml index 1806b23b986b..9482b22054af 100644 --- a/mmv1/products/cloudrunv2/Service.yaml +++ b/mmv1/products/cloudrunv2/Service.yaml @@ -1221,6 +1221,22 @@ properties: type: String description: |- Service account to be used for building the container. The format of this field is `projects/{projectId}/serviceAccounts/{serviceAccountEmail}`. + - name: 'multiRegionSettings' + type: NestedObject + description: |- + Settings for creating a Multi-Region Service. Make sure to use region = 'global' when using them. For more information, visit https://cloud.google.com/run/docs/multiple-regions#deploy + properties: + - name: 'regions' + type: Array + item_type: + type: String + description: |- + The list of regions to deploy the multi-region Service. + - name: 'multiRegionId' + type: String + description: |- + System-generated unique id for the multi-region Service. + output: true - name: 'reconciling' type: Boolean description: | diff --git a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl index 1336a242d070..fd20578a8d20 100644 --- a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl @@ -1345,6 +1345,103 @@ resource "google_cloud_run_v2_service" "default" { `, context) } +func TestAccCloudRunV2Service_cloudrunv2MultiRegionService(t *testing.T) { + t.Parallel() + context := map[string]interface{} { + "random_suffix" : acctest.RandString(t, 10), + } + acctest.VcrTest(t, resource.TestCase { + PreCheck: func() { acctest.AccTestPreCheck(t)}, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckCloudRunV2ServiceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccCloudRunV2Service_cloudrunv2ServiceWithMultiRegion(context), + }, + { + ResourceName: "google_cloud_run_v2_service.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "location", "annotations", "labels", "terraform_labels", "launch_stage", "deletion_protection"}, + }, + { + Config: testAccCloudRunV2Service_cloudrunv2ServiceWithMultiRegionUpdate(context), + }, + { + ResourceName: "google_cloud_run_v2_service.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "location", "annotations", "labels", "terraform_labels", "launch_stage", "deletion_protection"}, + }, + }, + }) +} + +func testAccCloudRunV2Service_cloudrunv2ServiceWithMultiRegion(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_cloud_run_v2_service" "default" { + name = "tf-test-cloudrun-service%{random_suffix}" + description = "Multi-Region Service" + location = "global" + deletion_protection = false + launch_stage = "GA" + annotations = { + generated-by = "magic-modules" + } + multi_region_settings { + regions = [ + "us-central1", + "us-east1", + "us-west1", + ] + } + ingress = "INGRESS_TRAFFIC_ALL" + labels = { + label-1 = "value-1" + } + client = "client-1" + client_version = "client-version-1" + template { + containers { + image = "us-docker.pkg.dev/cloudrun/container/hello" + } + } +} +`, context) +} + +func testAccCloudRunV2Service_cloudrunv2ServiceWithMultiRegionUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_cloud_run_v2_service" "default" { + name = "tf-test-cloudrun-service%{random_suffix}" + description = "Multi-Region Service" + location = "global" + deletion_protection = false + launch_stage = "GA" + annotations = { + generated-by = "magic-modules" + } + multi_region_settings { + regions = [ + "us-central1", + "us-east1", + ] + } + ingress = "INGRESS_TRAFFIC_ALL" + labels = { + label-1 = "value-1" + } + client = "client-1" + client_version = "client-version-1" + template { + containers { + image = "us-docker.pkg.dev/cloudrun/container/hello" + } + } +} +`, context) +} + func testAccCloudRunV2Service_cloudrunv2ServiceWithGpu(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_cloud_run_v2_service" "default" { From ffb3799ab2ee054f60c9eb9932b9e816322b04b9 Mon Sep 17 00:00:00 2001 From: Cameron Thornton Date: Tue, 26 Aug 2025 13:45:41 -0500 Subject: [PATCH 835/884] Remove 7.0.0 TC testing (#14991) --- .../FEATURE-BRANCH-major-release-7.0.0.kt | 105 ------------------ .../projects/google_beta_subproject.kt | 2 +- .../projects/google_ga_subproject.kt | 2 +- .../components/projects/root_project.kt | 4 - 4 files changed, 2 insertions(+), 111 deletions(-) delete mode 100644 mmv1/third_party/terraform/.teamcity/components/projects/feature_branches/FEATURE-BRANCH-major-release-7.0.0.kt diff --git a/mmv1/third_party/terraform/.teamcity/components/projects/feature_branches/FEATURE-BRANCH-major-release-7.0.0.kt b/mmv1/third_party/terraform/.teamcity/components/projects/feature_branches/FEATURE-BRANCH-major-release-7.0.0.kt deleted file mode 100644 index 5c463e28febc..000000000000 --- a/mmv1/third_party/terraform/.teamcity/components/projects/feature_branches/FEATURE-BRANCH-major-release-7.0.0.kt +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright (c) HashiCorp, Inc. - * SPDX-License-Identifier: MPL-2.0 - */ - -// This file is controlled by MMv1, any changes made here will be overwritten - -package projects.feature_branches - -import ProviderNameBeta -import ProviderNameGa -import builds.* -import jetbrains.buildServer.configs.kotlin.Project -import jetbrains.buildServer.configs.kotlin.vcs.GitVcsRoot -import projects.reused.nightlyTests -import replaceCharsId - -const val branchName = "FEATURE-BRANCH-major-release-7.0.0" - -// VCS Roots specifically for pulling code from the feature branches in the downstream repos - -object HashicorpVCSRootGa_featureBranchMajorRelease700: GitVcsRoot({ - name = "VCS root for the hashicorp/terraform-provider-${ProviderNameGa} repo @ refs/heads/${branchName}" - url = "https://github.com/hashicorp/terraform-provider-${ProviderNameGa}" - branch = "refs/heads/${branchName}" - branchSpec = """ - +:(refs/heads/*) - -:refs/pulls/* - """.trimIndent() -}) - -object HashicorpVCSRootBeta_featureBranchMajorRelease700: GitVcsRoot({ - name = "VCS root for the hashicorp/terraform-provider-${ProviderNameBeta} repo @ refs/heads/${branchName}" - url = "https://github.com/hashicorp/terraform-provider-${ProviderNameBeta}" - branch = "refs/heads/${branchName}" - branchSpec = """ - +:(refs/heads/*) - -:refs/pulls/* - """.trimIndent() -}) - -fun featureBranchMajorRelease700_Project(allConfig: AllContextParameters): Project { - - val projectId = replaceCharsId(branchName) - val gaProjectId = replaceCharsId(projectId + "_GA") - val betaProjectId= replaceCharsId(projectId + "_BETA") - - // Get config for using the GA and Beta identities - val gaConfig = getGaAcceptanceTestConfig(allConfig) - val betaConfig = getBetaAcceptanceTestConfig(allConfig) - - return Project{ - id(projectId) - name = "7.0.0 Major Release Testing" - description = "Subproject for testing feature branch $branchName" - - // Register feature branch-specific VCS roots in the project - vcsRoot(HashicorpVCSRootGa_featureBranchMajorRelease700) - vcsRoot(HashicorpVCSRootBeta_featureBranchMajorRelease700) - - // Nested Nightly Test project that uses hashicorp/terraform-provider-google - subProject( - Project{ - id(gaProjectId) - name = "Google" - subProject( - nightlyTests( - gaProjectId, - ProviderNameGa, - HashicorpVCSRootGa_featureBranchMajorRelease700, - gaConfig, - NightlyTriggerConfiguration( - branch = "refs/heads/${branchName}", // Make triggered builds use the feature branch - daysOfWeek = "4" // Wednesday for GA, TeamCity numbers days Sun=1...Sat=7 - ), - ) - ) - } - ) - - // Nested Nightly Test project that uses hashicorp/terraform-provider-google-beta - subProject( - Project { - id(betaProjectId) - name = "Google Beta" - subProject( - nightlyTests( - betaProjectId, - ProviderNameBeta, - HashicorpVCSRootBeta_featureBranchMajorRelease700, - betaConfig, - NightlyTriggerConfiguration( - branch = "refs/heads/${branchName}", // Make triggered builds use the feature branch - daysOfWeek="4" // Wednesday for Beta, TeamCity numbers days Sun=1...Sat=7 - ), - ) - ) - } - ) - - params { - readOnlySettings() - } - } -} \ No newline at end of file diff --git a/mmv1/third_party/terraform/.teamcity/components/projects/google_beta_subproject.kt b/mmv1/third_party/terraform/.teamcity/components/projects/google_beta_subproject.kt index f3a04d9df13f..75eabec8a70c 100644 --- a/mmv1/third_party/terraform/.teamcity/components/projects/google_beta_subproject.kt +++ b/mmv1/third_party/terraform/.teamcity/components/projects/google_beta_subproject.kt @@ -33,7 +33,7 @@ fun googleSubProjectBeta(allConfig: AllContextParameters): Project { description = "Subproject containing builds for testing the Beta version of the Google provider" // Nightly Test project that uses hashicorp/terraform-provider-google-beta - subProject(nightlyTests(betaId, ProviderNameBeta, HashiCorpVCSRootBeta, betaConfig, NightlyTriggerConfiguration(daysOfWeek="1-3,5-7"))) // All nights except Wednesday (4) for Beta; feature branch testing happens on Wednesdays and TeamCity numbers days Sun=1...Sat=7 + subProject(nightlyTests(betaId, ProviderNameBeta, HashiCorpVCSRootBeta, betaConfig, NightlyTriggerConfiguration())) // MM Upstream project that uses modular-magician/terraform-provider-google-beta subProject(mmUpstream(betaId, ProviderNameBeta, ModularMagicianVCSRootBeta, HashiCorpVCSRootBeta, vcrConfig, NightlyTriggerConfiguration())) diff --git a/mmv1/third_party/terraform/.teamcity/components/projects/google_ga_subproject.kt b/mmv1/third_party/terraform/.teamcity/components/projects/google_ga_subproject.kt index fbf3685fbd94..cd45d7d754c8 100644 --- a/mmv1/third_party/terraform/.teamcity/components/projects/google_ga_subproject.kt +++ b/mmv1/third_party/terraform/.teamcity/components/projects/google_ga_subproject.kt @@ -31,7 +31,7 @@ fun googleSubProjectGa(allConfig: AllContextParameters): Project { description = "Subproject containing builds for testing the GA version of the Google provider" // Nightly Test project that uses hashicorp/terraform-provider-google - subProject(nightlyTests(gaId, ProviderNameGa, HashiCorpVCSRootGa, gaConfig, NightlyTriggerConfiguration(daysOfWeek="1-3,5-7"))) // All nights except Wednesday (4) for GA; feature branch testing happens on Wednesday and TeamCity numbers days Sun=1...Sat=7 + subProject(nightlyTests(gaId, ProviderNameGa, HashiCorpVCSRootGa, gaConfig, NightlyTriggerConfiguration())) // MM Upstream project that uses modular-magician/terraform-provider-google subProject(mmUpstream(gaId, ProviderNameGa, ModularMagicianVCSRootGa, HashiCorpVCSRootGa, vcrConfig, NightlyTriggerConfiguration())) diff --git a/mmv1/third_party/terraform/.teamcity/components/projects/root_project.kt b/mmv1/third_party/terraform/.teamcity/components/projects/root_project.kt index c810b73b9605..3c96dea4f099 100644 --- a/mmv1/third_party/terraform/.teamcity/components/projects/root_project.kt +++ b/mmv1/third_party/terraform/.teamcity/components/projects/root_project.kt @@ -19,7 +19,6 @@ import generated.ServicesListGa import jetbrains.buildServer.configs.kotlin.Project import jetbrains.buildServer.configs.kotlin.sharedResource import projects.feature_branches.featureBranchResourceIdentitySubProject -import projects.feature_branches.featureBranchMajorRelease700_Project // googleCloudRootProject returns a root project that contains a subprojects for the GA and Beta version of the // Google provider. There are also resources to help manage the test projects used for acceptance tests. @@ -67,9 +66,6 @@ fun googleCloudRootProject(allConfig: AllContextParameters): Project { // Feature branch-testing projects - these will be added and removed as needed - // Feature branch testing - subProject(featureBranchMajorRelease700_Project(allConfig)) // FEATURE-BRANCH-major-release-7.0.0 - params { readOnlySettings() } From c211d330cab48b219155474fb07f69779651696f Mon Sep 17 00:00:00 2001 From: victorsantos-cit Date: Tue, 26 Aug 2025 15:49:55 -0300 Subject: [PATCH 836/884] SWP: Add support for value "EXPLICIT_ROUTING_MODE" on field "routing_mode" on resource "google_network_services_gateway" (#14767) --- mmv1/products/networkservices/Gateway.yaml | 2 ++ .../resource_network_services_gateway_test.go | 1 + .../terraform/tpgresource/common_diff_suppress.go | 13 +++++++++++++ 3 files changed, 16 insertions(+) diff --git a/mmv1/products/networkservices/Gateway.yaml b/mmv1/products/networkservices/Gateway.yaml index 39d4048e3c73..6404cdf9f2b4 100644 --- a/mmv1/products/networkservices/Gateway.yaml +++ b/mmv1/products/networkservices/Gateway.yaml @@ -227,7 +227,9 @@ properties: - 'DEBUG_HEADERS' - name: 'routingMode' type: Enum + diff_suppress_func: 'tpgresource.SuppressRoutingModeDefault' description: | The routing mode of the Gateway. This field is configurable only for gateways of type SECURE_WEB_GATEWAY. This field is required for gateways of type SECURE_WEB_GATEWAY. enum_values: - 'NEXT_HOP_ROUTING_MODE' + - 'EXPLICIT_ROUTING_MODE' diff --git a/mmv1/third_party/terraform/services/networkservices/resource_network_services_gateway_test.go b/mmv1/third_party/terraform/services/networkservices/resource_network_services_gateway_test.go index c2e15bffad71..08275ca4125e 100644 --- a/mmv1/third_party/terraform/services/networkservices/resource_network_services_gateway_test.go +++ b/mmv1/third_party/terraform/services/networkservices/resource_network_services_gateway_test.go @@ -943,6 +943,7 @@ resource "google_network_services_gateway" "foobar" { location = "us-central1" addresses = ["10.128.0.99"] type = "SECURE_WEB_GATEWAY" + routing_mode = "EXPLICIT_ROUTING_MODE" ports = [443] description = "my description" gateway_security_policy = google_network_security_gateway_security_policy.default.id diff --git a/mmv1/third_party/terraform/tpgresource/common_diff_suppress.go b/mmv1/third_party/terraform/tpgresource/common_diff_suppress.go index cbc7e442a25e..a0ba73cd1a4e 100644 --- a/mmv1/third_party/terraform/tpgresource/common_diff_suppress.go +++ b/mmv1/third_party/terraform/tpgresource/common_diff_suppress.go @@ -101,6 +101,19 @@ func ProjectNumberDiffSuppress(_, old, new string, _ *schema.ResourceData) bool return a2 == b2 } +// Suppresses diffs where `routing_mode` is unset (empty string) vs. explicitly set +// to "EXPLICIT_ROUTING_MODE". Since null/empty is treated as the default +// EXPLICIT_ROUTING_MODE, both values collapse into the same state. This ensures +// Terraform does not show unnecessary differences unless the value is explicitly +// changed to "NEXT_HOP_ROUTING_MODE". +func SuppressRoutingModeDefault(_, old, new string, _ *schema.ResourceData) bool { + if old == new { + return true + } + return (old == "" && new == "EXPLICIT_ROUTING_MODE") || + (old == "EXPLICIT_ROUTING_MODE" && new == "") +} + // Suppress diffs when the value read from api // has the project ID instead of the project number func ProjectIDDiffSuppress(_, old, new string, _ *schema.ResourceData) bool { From 39b79023e287a97408e115518c31e2243d9c8564 Mon Sep 17 00:00:00 2001 From: gurusai-voleti Date: Wed, 27 Aug 2025 00:22:18 +0530 Subject: [PATCH 837/884] feat: (storage) added field credentials_secret in storage transfer job (#14819) --- .../resource_storage_transfer_job.go | 20 +++++++++++++++---- .../resource_storage_transfer_job_meta.yaml | 1 + .../docs/r/storage_transfer_job.html.markdown | 2 ++ 3 files changed, 19 insertions(+), 4 deletions(-) diff --git a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go index 38fbf29048af..2eff1acfb8d6 100644 --- a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go +++ b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go @@ -99,6 +99,7 @@ var ( awsS3AuthKeys = []string{ "transfer_spec.0.aws_s3_data_source.0.aws_access_key", "transfer_spec.0.aws_s3_data_source.0.role_arn", + "transfer_spec.0.aws_s3_data_source.0.credentials_secret", } azureOptionCredentials = []string{ "transfer_spec.0.azure_blob_storage_data_source.0.azure_credentials", @@ -766,6 +767,12 @@ func awsS3DataSchema() *schema.Resource { Optional: true, Description: `The CloudFront distribution domain name pointing to this bucket, to use when fetching. See [Transfer from S3 via CloudFront](https://cloud.google.com/storage-transfer/docs/s3-cloudfront) for more information. Format: https://{id}.cloudfront.net or any valid custom domain. Must begin with https://.`, }, + "credentials_secret": { + Type: schema.TypeString, + Optional: true, + ExactlyOneOf: awsS3AuthKeys, + Description: `The Resource name of a secret in Secret Manager. AWS credentials must be stored in Secret Manager in JSON format. If credentials_secret is specified, do not specify role_arn or aws_access_key. Format: projects/{projectNumber}/secrets/{secret_name}.`, + }, }, } } @@ -1378,10 +1385,11 @@ func expandAwsS3Data(awsS3Datas []interface{}) *storagetransfer.AwsS3Data { awsS3Data := awsS3Datas[0].(map[string]interface{}) result := &storagetransfer.AwsS3Data{ - BucketName: awsS3Data["bucket_name"].(string), - AwsAccessKey: expandAwsAccessKeys(awsS3Data["aws_access_key"].([]interface{})), - RoleArn: awsS3Data["role_arn"].(string), - Path: awsS3Data["path"].(string), + BucketName: awsS3Data["bucket_name"].(string), + AwsAccessKey: expandAwsAccessKeys(awsS3Data["aws_access_key"].([]interface{})), + RoleArn: awsS3Data["role_arn"].(string), + CredentialsSecret: awsS3Data["credentials_secret"].(string), + Path: awsS3Data["path"].(string), } if v, ok := awsS3Data["managed_private_network"]; ok { @@ -1413,6 +1421,10 @@ func flattenAwsS3Data(awsS3Data *storagetransfer.AwsS3Data, d *schema.ResourceDa data["cloudfront_domain"] = awsS3Data.CloudfrontDomain } + if awsS3Data.CredentialsSecret != "" { + data["credentials_secret"] = awsS3Data.CredentialsSecret + } + return []map[string]interface{}{data} } diff --git a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_meta.yaml b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_meta.yaml index 1992d537368f..8d14a6504710 100644 --- a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_meta.yaml +++ b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_meta.yaml @@ -49,6 +49,7 @@ fields: - field: 'transfer_spec.aws_s3_data_source.managed_private_network' - field: 'transfer_spec.aws_s3_data_source.path' - field: 'transfer_spec.aws_s3_data_source.role_arn' + - field: 'transfer_spec.aws_s3_data_source.credentials_secret' - field: 'transfer_spec.azure_blob_storage_data_source.azure_credentials.sas_token' - field: 'transfer_spec.azure_blob_storage_data_source.container' - field: 'transfer_spec.azure_blob_storage_data_source.credentials_secret' diff --git a/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown b/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown index b83ea6ebe37a..2c27448bd9c7 100644 --- a/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown @@ -264,6 +264,8 @@ A duration in seconds with up to nine fractional digits, terminated by 's'. Exam * `cloudfront_domain` - (Optional) The CloudFront distribution domain name pointing to this bucket, to use when fetching. See [Transfer from S3 via CloudFront](https://cloud.google.com/storage-transfer/docs/s3-cloudfront) for more information. Format: `https://{id}.cloudfront.net` or any valid custom domain. Must begin with `https://`. +* `credentials_secret` - (Optional) The Resource name of a secret in Secret Manager. AWS credentials must be stored in Secret Manager in JSON format. If credentials_secret is specified, do not specify role_arn or aws_access_key. Format: `projects/{projectNumber}/secrets/{secret_name}`. + The `aws_access_key` block supports: * `access_key_id` - (Required) AWS Key ID. From 2292b54ad6c0f1d1219c403ac742ce6dac03fcab Mon Sep 17 00:00:00 2001 From: Mauricio Alvarez Leon <65101411+BBBmau@users.noreply.github.com> Date: Tue, 26 Aug 2025 12:23:00 -0700 Subject: [PATCH 838/884] `teamcity`: use ModularMagician repo for testing purposes in `weeklyDiffTest` project (#14989) --- .../.teamcity/components/projects/google_beta_subproject.kt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/.teamcity/components/projects/google_beta_subproject.kt b/mmv1/third_party/terraform/.teamcity/components/projects/google_beta_subproject.kt index 75eabec8a70c..c67a50303ee5 100644 --- a/mmv1/third_party/terraform/.teamcity/components/projects/google_beta_subproject.kt +++ b/mmv1/third_party/terraform/.teamcity/components/projects/google_beta_subproject.kt @@ -43,7 +43,7 @@ fun googleSubProjectBeta(allConfig: AllContextParameters): Project { subProject(vcrRecording(betaId, ProviderNameBeta, HashiCorpVCSRootBeta, ModularMagicianVCSRootBeta, vcrConfig)) // Beta Diff Test project that uses hashicorp/terraform-provider-google-beta-diff-test - subProject(weeklyDiffTests(betaId + "_DIFF_TEST", ProviderNameBeta, HashiCorpVCSRootBeta, betaConfig, NightlyTriggerConfiguration(daysOfWeek = "SAT", nightlyTestsEnabled = false))) + subProject(weeklyDiffTests(betaId + "_DIFF_TEST", ProviderNameBeta, ModularMagicianVCSRootBeta, betaConfig, NightlyTriggerConfiguration(daysOfWeek = "SAT", nightlyTestsEnabled = false))) params { readOnlySettings() From 2c36eb447c715a8a18ac930d5182571085c9e768 Mon Sep 17 00:00:00 2001 From: Lakshman Swaminathan Date: Tue, 26 Aug 2025 12:23:35 -0700 Subject: [PATCH 839/884] altered datasource url usage (#14880) --- mmv1/api/resource.go | 4 +- mmv1/products/cloudrun/Service.yaml | 2 + mmv1/templates/terraform/datasource.go.tmpl | 7 ++-- .../provider/provider_mmv1_resources.go.tmpl | 2 +- .../cloudrun/data_source_cloud_run_service.go | 41 ------------------- 5 files changed, 9 insertions(+), 47 deletions(-) delete mode 100644 mmv1/third_party/terraform/services/cloudrun/data_source_cloud_run_service.go diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index 386487ac696f..a8b03959f0c9 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -2033,7 +2033,7 @@ func (r Resource) ShouldDatasourceSetAnnotations() bool { // that should be marked as "Required". func (r Resource) DatasourceRequiredFields() []string { requiredFields := []string{} - uriParts := strings.Split(r.SelfLink, "/") + uriParts := strings.Split(r.IdFormat, "/") for _, part := range uriParts { if strings.HasPrefix(part, "{{") && strings.HasSuffix(part, "}}") { @@ -2050,7 +2050,7 @@ func (r Resource) DatasourceRequiredFields() []string { // that should be marked as "Optional". func (r Resource) DatasourceOptionalFields() []string { optionalFields := []string{} - uriParts := strings.Split(r.SelfLink, "/") + uriParts := strings.Split(r.IdFormat, "/") for _, part := range uriParts { if strings.HasPrefix(part, "{{") && strings.HasSuffix(part, "}}") { diff --git a/mmv1/products/cloudrun/Service.yaml b/mmv1/products/cloudrun/Service.yaml index 6f2ef4e9f2b3..294276192445 100644 --- a/mmv1/products/cloudrun/Service.yaml +++ b/mmv1/products/cloudrun/Service.yaml @@ -29,6 +29,8 @@ base_url: 'apis/serving.knative.dev/v1/namespaces/{{project}}/services' cai_base_url: 'projects/{{project}}/locations/{{location}}/services' import_format: - 'locations/{{location}}/namespaces/{{project}}/services/{{name}}' +datasource: + generate: true timeouts: insert_minutes: 20 update_minutes: 20 diff --git a/mmv1/templates/terraform/datasource.go.tmpl b/mmv1/templates/terraform/datasource.go.tmpl index f7bc979c589c..edd29d3c1dd5 100644 --- a/mmv1/templates/terraform/datasource.go.tmpl +++ b/mmv1/templates/terraform/datasource.go.tmpl @@ -75,10 +75,11 @@ func DataSource{{ .ResourceName -}}() *schema.Resource { func dataSource{{ $.ResourceName -}}Read(d *schema.ResourceData, meta interface{}) error { config := meta.(*transport_tpg.Config) - id, err := tpgresource.ReplaceVars{{if $.LegacyLongFormProject -}}ForId{{ end -}}(d, config, "{{$.SelfLinkUri}}{{$.ReadQueryParams}}") - if err != nil { + id, err := tpgresource.ReplaceVars{{if $.LegacyLongFormProject -}}ForId{{ end -}}(d, config, "{{ $.IdFormat -}}") + if err != nil { return err - } + } + d.SetId(id) err = resource{{ $.ResourceName -}}Read(d, meta) diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index b0e968f31749..06f8767048a9 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -73,7 +73,7 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_cloud_quotas_quota_info": cloudquotas.DataSourceGoogleCloudQuotasQuotaInfo(), "google_cloud_quotas_quota_infos": cloudquotas.DataSourceGoogleCloudQuotasQuotaInfos(), "google_cloud_run_locations": cloudrun.DataSourceGoogleCloudRunLocations(), - "google_cloud_run_service": cloudrun.DataSourceGoogleCloudRunService(), + "google_cloud_run_service": cloudrun.DataSourceCloudRunService(), "google_cloud_run_v2_job": cloudrunv2.DataSourceGoogleCloudRunV2Job(), "google_cloud_run_v2_service": cloudrunv2.DataSourceGoogleCloudRunV2Service(), "google_cloud_run_v2_worker_pool": cloudrunv2.DataSourceGoogleCloudRunV2WorkerPool(), diff --git a/mmv1/third_party/terraform/services/cloudrun/data_source_cloud_run_service.go b/mmv1/third_party/terraform/services/cloudrun/data_source_cloud_run_service.go deleted file mode 100644 index c674b097723d..000000000000 --- a/mmv1/third_party/terraform/services/cloudrun/data_source_cloud_run_service.go +++ /dev/null @@ -1,41 +0,0 @@ -package cloudrun - -import ( - "fmt" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -func DataSourceGoogleCloudRunService() *schema.Resource { - - dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceCloudRunService().Schema) - tpgresource.AddRequiredFieldsToSchema(dsSchema, "name", "location") - tpgresource.AddOptionalFieldsToSchema(dsSchema, "project") - - return &schema.Resource{ - Read: dataSourceGoogleCloudRunServiceRead, - Schema: dsSchema, - } -} - -func dataSourceGoogleCloudRunServiceRead(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - - id, err := tpgresource.ReplaceVars(d, config, "locations/{{location}}/namespaces/{{project}}/services/{{name}}") - if err != nil { - return fmt.Errorf("Error constructing id: %s", err) - } - d.SetId(id) - err = resourceCloudRunServiceRead(d, meta) - if err != nil { - return err - } - - if d.Id() == "" { - return fmt.Errorf("%s not found", id) - } - - return nil -} From 72d36e186f4b40a1baff2af9c2fab7a1fc78ac97 Mon Sep 17 00:00:00 2001 From: victorsantos-cit Date: Tue, 26 Aug 2025 16:43:47 -0300 Subject: [PATCH 840/884] compute: canonicalize backend.group self-links to avoid spurious diffs between v1/beta and variants (#14939) --- .../compute/RegionBackendService.yaml | 2 +- ...ompute_region_backend_service_test.go.tmpl | 280 ++++++++++++++++++ .../tpgresource/self_link_helpers.go | 34 +++ 3 files changed, 315 insertions(+), 1 deletion(-) diff --git a/mmv1/products/compute/RegionBackendService.yaml b/mmv1/products/compute/RegionBackendService.yaml index ed56d1af4162..312a970d5c9d 100644 --- a/mmv1/products/compute/RegionBackendService.yaml +++ b/mmv1/products/compute/RegionBackendService.yaml @@ -251,7 +251,7 @@ properties: Group resource using the fully-qualified URL, rather than a partial URL. required: true - diff_suppress_func: 'tpgresource.CompareSelfLinkRelativePaths' + diff_suppress_func: 'tpgresource.CompareSelfLinkCanonicalPaths' custom_flatten: 'templates/terraform/custom_flatten/guard_self_link.go.tmpl' - name: 'maxConnections' type: Integer diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_test.go.tmpl index a8a46c54f711..c4e16475dc75 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_test.go.tmpl @@ -433,6 +433,286 @@ func TestAccComputeRegionBackendService_withLogConfig(t *testing.T) { }) } +func TestAccComputeRegionBackendService_zonalILB(t *testing.T) { + t.Parallel() + + serviceName := fmt.Sprintf("tf-test-ilb-bs-%s", acctest.RandString(t, 10)) + checkName := fmt.Sprintf("tf-test-ilb-hc-%s", acctest.RandString(t, 10)) + checkName2 := fmt.Sprintf("tf-test-ilb-hc2-%s", acctest.RandString(t, 10)) + negName := fmt.Sprintf("tf-test-ilb-neg-%s", acctest.RandString(t, 10)) + negName2 := fmt.Sprintf("tf-test-ilb-neg2-%s", acctest.RandString(t, 10)) + instanceName := fmt.Sprintf("tf-test-ilb-vm-%s", acctest.RandString(t, 10)) + instanceName2 := fmt.Sprintf("tf-test-ilb-vm2-%s", acctest.RandString(t, 10)) + + // subnetwork with random suffix + subnetName := fmt.Sprintf("tf-test-subnet-%s", acctest.RandString(t, 8)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeRegionBackendServiceDestroyProducer(t), + Steps: []resource.TestStep{ + // STEP 1: base (self-link v1) + { + Config: testAccComputeRegionBackendService_zonalILB_withGroup( + testAccComputeRegionBackendService_common(checkName, negName, instanceName, subnetName), + serviceName, + "google_compute_network_endpoint_group.neg.id", + ), + }, + { + ResourceName: "google_compute_region_backend_service.default", + ImportState: true, + ImportStateVerify: true, + }, + + // STEP 2: same NEG with /compute/beta/ (apply OK) + { + Config: fmt.Sprintf(` +%s + +locals { + neg_beta = replace(google_compute_network_endpoint_group.neg.id, "/compute/v1/", "/compute/beta/") +} + +%s +`, testAccComputeRegionBackendService_common(checkName, negName, instanceName, subnetName), + testAccComputeRegionBackendService_zonalILB_withGroup("", serviceName, "local.neg_beta"), + ), + }, + { + ResourceName: "google_compute_region_backend_service.default", + ImportState: true, + ImportStateVerify: true, + }, + + // STEP 3: Invalid variation for API (UPPERCASE + "/") — tested only in PLAN + { + PlanOnly: true, // does not call the API; only exercises diff/canonicalization + Config: fmt.Sprintf(` +%s + +locals { + neg_slash_upper = "${google_compute_network_endpoint_group.neg.id}" +} + +%s +`, testAccComputeRegionBackendService_common(checkName, negName, instanceName, subnetName), + testAccComputeRegionBackendService_zonalILB_withGroup("", serviceName, "local.neg_slash_upper"), + ), + }, + + // STEP 4: Modified scenario (changes NEG/HC/VM) — continues validating real updates + { + Config: testAccComputeRegionBackendService_zonalILBModified(serviceName, checkName, negName, instanceName, checkName2, negName2, instanceName2, subnetName), + }, + { + ResourceName: "google_compute_region_backend_service.default", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccComputeRegionBackendService_common(checkName, negName, instanceName, subnetworkName string) string { + return fmt.Sprintf(` +resource "google_compute_network" "default" { + name = "tf-test-net" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + name = "%s" + ip_cidr_range = "10.10.0.0/16" + region = "us-central1" + network = google_compute_network.default.id +} + +resource "google_compute_region_health_check" "hc1" { + name = "%s" + region = "us-central1" + http_health_check { + port = 8080 + request_path = "/status" + } +} + +resource "google_compute_instance" "default" { + name = "%s" + zone = "us-central1-a" + machine_type = "e2-micro" + + boot_disk { + initialize_params { + image = "debian-cloud/debian-11" + } + } + + network_interface { + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + access_config {} + } +} + +resource "google_compute_network_endpoint_group" "neg" { + name = "%s" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + zone = "us-central1-a" + network_endpoint_type = "GCE_VM_IP_PORT" +} + +resource "google_compute_network_endpoint" "endpoint" { + network_endpoint_group = google_compute_network_endpoint_group.neg.name + zone = "us-central1-a" + instance = google_compute_instance.default.name + ip_address = google_compute_instance.default.network_interface[0].network_ip + port = 8080 +} +`, subnetworkName, checkName, instanceName, negName) +} + +func testAccComputeRegionBackendService_zonalILB_withGroup(commonHCL string, serviceName string, groupExpr string) string { + header := commonHCL + return fmt.Sprintf(` +%s +resource "google_compute_region_backend_service" "default" { + name = "%s" + region = "us-central1" + protocol = "HTTP" + load_balancing_scheme = "INTERNAL_MANAGED" + health_checks = [google_compute_region_health_check.hc1.id] + + backend { + group = %s + balancing_mode = "RATE" + max_rate_per_endpoint = 100 + capacity_scaler = 1.0 + } + + session_affinity = "CLIENT_IP" + locality_lb_policy = "ROUND_ROBIN" +} +`, header, serviceName, groupExpr) +} + +func testAccComputeRegionBackendService_zonalILBModified(serviceName, checkName, negName, instanceName, checkName2, negName2, instanceName2, subnetworkName string) string { + return fmt.Sprintf(` +resource "google_compute_network" "default" { + name = "tf-test-net" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + name = "%s" + ip_cidr_range = "10.10.0.0/16" + region = "us-central1" + network = google_compute_network.default.id +} + +resource "google_compute_region_health_check" "hc1" { + name = "%s" + region = "us-central1" + http_health_check { + port = 8080 + request_path = "/status" + } +} + +resource "google_compute_instance" "default" { + name = "%s" + zone = "us-central1-a" + machine_type = "e2-micro" + + boot_disk { + initialize_params { + image = "debian-cloud/debian-11" + } + } + + network_interface { + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + access_config {} + } +} + +resource "google_compute_network_endpoint_group" "neg" { + name = "%s" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + zone = "us-central1-a" + network_endpoint_type = "GCE_VM_IP_PORT" +} + +resource "google_compute_network_endpoint" "endpoint" { + network_endpoint_group = google_compute_network_endpoint_group.neg.name + zone = "us-central1-a" + instance = google_compute_instance.default.name + ip_address = google_compute_instance.default.network_interface[0].network_ip + port = 8080 +} + +resource "google_compute_instance" "instance2" { + name = "%s" + zone = "us-central1-a" + machine_type = "e2-micro" + + boot_disk { + initialize_params { + image = "debian-cloud/debian-11" + } + } + + network_interface { + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + access_config {} + } +} + +resource "google_compute_region_health_check" "hc2" { + name = "%s" + region = "us-central1" + http_health_check { + port = 80 + } +} + +resource "google_compute_network_endpoint_group" "neg2" { + name = "%s" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + zone = "us-central1-a" + network_endpoint_type = "GCE_VM_IP_PORT" +} + +resource "google_compute_network_endpoint" "endpoint2" { + network_endpoint_group = google_compute_network_endpoint_group.neg2.name + zone = "us-central1-a" + instance = google_compute_instance.instance2.name + ip_address = google_compute_instance.instance2.network_interface[0].network_ip + port = 8080 +} + +resource "google_compute_region_backend_service" "default" { + name = "%s" + region = "us-central1" + load_balancing_scheme = "INTERNAL_MANAGED" + health_checks = [google_compute_region_health_check.hc2.id] + + backend { + group = google_compute_network_endpoint_group.neg2.id + balancing_mode = "RATE" + max_rate_per_endpoint = 200 + capacity_scaler = 0.5 + } +} +`, subnetworkName, checkName, instanceName, negName, instanceName2, checkName2, negName2, serviceName) +} + func TestAccComputeRegionBackendService_withDynamicBackendCount(t *testing.T) { t.Parallel() diff --git a/mmv1/third_party/terraform/tpgresource/self_link_helpers.go b/mmv1/third_party/terraform/tpgresource/self_link_helpers.go index 93982ff5c0d1..61a8243cba8d 100644 --- a/mmv1/third_party/terraform/tpgresource/self_link_helpers.go +++ b/mmv1/third_party/terraform/tpgresource/self_link_helpers.go @@ -73,6 +73,40 @@ func CompareSelfLinkOrResourceName(_, old, new string, _ *schema.ResourceData) b return CompareSelfLinkRelativePaths("", old, new, nil) } +// canonicalizeSelfLink normalizes Compute API self-links by removing the version prefix (v1/beta), +// ensuring a leading "/", collapsing duplicate slashes, trimming any trailing "/", +// and lowercasing the result so logically identical links compare equal. +func CompareSelfLinkCanonicalPaths(_, old, new string, _ *schema.ResourceData) bool { + return canonicalizeSelfLink(old) == canonicalizeSelfLink(new) +} + +var ( + rePrefix = regexp.MustCompile(`(?i)^https?://[a-z0-9.-]*/compute/(v1|beta)/`) + reDuplicateSlashes = regexp.MustCompile(`/+`) +) + +func canonicalizeSelfLink(link string) string { + if link == "" { + return "" + } + + // Remove "https://…/compute/v1/" or "https://…/compute/beta/" + path := rePrefix.ReplaceAllString(link, "/") + + // Ensure leading "/" + if !strings.HasPrefix(path, "/") { + path = "/" + path + } + + // Collapse "//" + path = reDuplicateSlashes.ReplaceAllString(path, "/") + + // Remove trailing "/" + path = strings.TrimSuffix(path, "/") + + return strings.ToLower(path) +} + // Hash the relative path of a self link. func SelfLinkRelativePathHash(selfLink interface{}) int { path, _ := GetRelativePath(selfLink.(string)) From ecc2f1fe70b4beb9c951f4963e0ad44ca14f2313 Mon Sep 17 00:00:00 2001 From: cardinalli_andre_cint Date: Tue, 26 Aug 2025 17:09:53 -0300 Subject: [PATCH 841/884] Add Terraform support for GCS in Global Internal Load Balancers (#14679) Co-authored-by: samir-cit Co-authored-by: Nick Elliot --- mmv1/products/compute/BackendBucket.yaml | 6 +++++- .../terraform/examples/backend_bucket_global_ilb.tf.tmpl | 6 +++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/mmv1/products/compute/BackendBucket.yaml b/mmv1/products/compute/BackendBucket.yaml index 21ad6c671212..b8c3b6e7aeb8 100644 --- a/mmv1/products/compute/BackendBucket.yaml +++ b/mmv1/products/compute/BackendBucket.yaml @@ -115,6 +115,7 @@ examples: backend_bucket_name: 'global-ilb-backend-bucket' bucket_name: 'global-ilb-bucket' exclude_docs: true + skip_vcr: true parameters: properties: - name: 'bucketName' @@ -273,7 +274,9 @@ properties: client when the resource is created. - name: 'enableCdn' type: Boolean - description: 'If true, enable Cloud CDN for this BackendBucket.' + description: | + If true, enable Cloud CDN for this BackendBucket. + Note: This cannot be set to true when loadBalancingScheme is set to INTERNAL_MANAGED. include_empty_value_in_cai: true - name: 'name' type: String @@ -295,6 +298,7 @@ properties: description: | The value can only be INTERNAL_MANAGED for cross-region internal layer 7 load balancer. If loadBalancingScheme is not specified, the backend bucket can be used by classic global external load balancers, or global application external load balancers, or both. + Important: CDN cannot be enabled (enableCdn cannot be set to true) when loadBalancingScheme is set to INTERNAL_MANAGED. enum_values: - 'INTERNAL_MANAGED' send_empty_value: true diff --git a/mmv1/templates/terraform/examples/backend_bucket_global_ilb.tf.tmpl b/mmv1/templates/terraform/examples/backend_bucket_global_ilb.tf.tmpl index 9497c721747b..01d3d96fc795 100644 --- a/mmv1/templates/terraform/examples/backend_bucket_global_ilb.tf.tmpl +++ b/mmv1/templates/terraform/examples/backend_bucket_global_ilb.tf.tmpl @@ -1,3 +1,7 @@ +# Note: This example must be run in a project without Cloud Armor tier configured, +# as it may cause conflicts with the INTERNAL_MANAGED load balancing scheme. +# This test is skipped in VCR mode due to non-determinism in project creation and resource management. + resource "google_project" "unarmored" { project_id = "tf-test%{random_suffix}" name = "tf-test%{random_suffix}" @@ -14,7 +18,7 @@ resource "google_project_service" "project" { resource "google_compute_backend_bucket" "{{$.PrimaryResourceId}}" { name = "{{index $.Vars "backend_bucket_name"}}" - project = google_project.unarmored.name + project = google_project.unarmored.number bucket_name = google_storage_bucket.{{$.PrimaryResourceId}}.name load_balancing_scheme = "INTERNAL_MANAGED" From 588a39d19e5b6853beaf9394196a77086853c965 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Tue, 26 Aug 2025 13:19:23 -0700 Subject: [PATCH 842/884] add vacation for zhenhua (#14992) --- .ci/magician/github/membership_data.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.ci/magician/github/membership_data.go b/.ci/magician/github/membership_data.go index 488dd5c70d74..1dedcf7dcc19 100644 --- a/.ci/magician/github/membership_data.go +++ b/.ci/magician/github/membership_data.go @@ -145,8 +145,8 @@ var ( "zli82016": { vacations: []Vacation{ { - startDate: newDate(2025, 1, 15), - endDate: newDate(2025, 2, 9), + startDate: newDate(2025, 8, 27), + endDate: newDate(2025, 9, 2), }, }, }, From 543be5c0b01ffc49d5649e83dba62b9a33d0aa30 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Tue, 26 Aug 2025 13:21:24 -0700 Subject: [PATCH 843/884] tgc-revival: add cloudasset feed resources (#14934) --- mmv1/api/resource.go | 14 ++++++++++++-- mmv1/products/cloudasset/FolderFeed.yaml | 5 +++++ mmv1/products/cloudasset/OrganizationFeed.yaml | 5 +++++ mmv1/products/cloudasset/ProjectFeed.yaml | 4 ++++ .../tgc_next/cai2hcl/resource_converter.go.tmpl | 4 ++-- .../decoders/backup_dr_backup_plan.go.tmpl | 2 +- .../certificatemanager_certificate.go.tmpl | 2 +- .../tgc_next/decoders/cloud_asset_feed.go.tmpl | 4 ++++ .../decoders/compute_backend_service.go.tmpl | 2 +- .../tgc_next/decoders/compute_subnetwork.go.tmpl | 2 +- .../pkg/cai2hcl/converters/convert_resource.go | 12 +++++++++++- 11 files changed, 47 insertions(+), 9 deletions(-) create mode 100644 mmv1/templates/tgc_next/decoders/cloud_asset_feed.go.tmpl diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index a8b03959f0c9..f14b98e20b35 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -385,6 +385,12 @@ type TGCResource struct { // If true, the Terraform custom encoder is not applied during tfplan2cai TGCIgnoreTerraformEncoder bool `yaml:"tgc_ignore_terraform_encoder,omitempty"` + + // [Optional] The parameter that uniquely identifies the resource. + // Generally, it's safe to leave empty, in which case it defaults to `name`. + // Other values are normally useful in cases where an object has a parent + // and is identified by some non-name value, such as an ip+port pair. + CaiIdentity string `yaml:"cai_identity,omitempty"` } func (r *Resource) UnmarshalYAML(unmarshal func(any) error) error { @@ -1893,14 +1899,18 @@ func (r Resource) DefineAssetTypeForResourceInProduct() bool { // For example: //monitoring.googleapis.com/v3/projects/{{project}}/services/{{service_id}} func (r Resource) rawCaiAssetNameTemplate(productBackendName string) string { caiBaseUrl := "" + caiId := "name" + if r.CaiIdentity != "" { + caiId = r.CaiIdentity + } if r.CaiBaseUrl != "" { - caiBaseUrl = fmt.Sprintf("%s/{{name}}", r.CaiBaseUrl) + caiBaseUrl = fmt.Sprintf("%s/{{%s}}", r.CaiBaseUrl, caiId) } if caiBaseUrl == "" { caiBaseUrl = r.SelfLink } if caiBaseUrl == "" { - caiBaseUrl = fmt.Sprintf("%s/{{name}}", r.BaseUrl) + caiBaseUrl = fmt.Sprintf("%s/{{%s}}", r.BaseUrl, caiId) } return fmt.Sprintf("//%s.googleapis.com/%s", productBackendName, caiBaseUrl) } diff --git a/mmv1/products/cloudasset/FolderFeed.yaml b/mmv1/products/cloudasset/FolderFeed.yaml index f1c41eaba949..36a8de774956 100644 --- a/mmv1/products/cloudasset/FolderFeed.yaml +++ b/mmv1/products/cloudasset/FolderFeed.yaml @@ -38,7 +38,12 @@ custom_code: pre_create: 'templates/terraform/pre_create/cloud_asset_feed.go.tmpl' post_create: 'templates/terraform/post_create/cloud_asset_feed.go.tmpl' custom_import: 'templates/terraform/custom_import/cloud_asset_feed.go.tmpl' + tgc_decoder: 'templates/tgc_next/decoders/cloud_asset_feed.go.tmpl' supports_indirect_user_project_override: true +include_in_tgc_next_DO_NOT_USE: true +cai_base_url: 'folders/{{folder}}/feeds' +cai_identity: 'feed_id' +tgc_ignore_terraform_encoder: true examples: - name: 'cloud_asset_folder_feed' primary_resource_id: 'folder_feed' diff --git a/mmv1/products/cloudasset/OrganizationFeed.yaml b/mmv1/products/cloudasset/OrganizationFeed.yaml index f43276faa1b0..0c2082471491 100644 --- a/mmv1/products/cloudasset/OrganizationFeed.yaml +++ b/mmv1/products/cloudasset/OrganizationFeed.yaml @@ -38,7 +38,12 @@ custom_code: pre_create: 'templates/terraform/pre_create/cloud_asset_feed.go.tmpl' post_create: 'templates/terraform/post_create/cloud_asset_feed.go.tmpl' custom_import: 'templates/terraform/custom_import/cloud_asset_feed.go.tmpl' + tgc_decoder: 'templates/tgc_next/decoders/cloud_asset_feed.go.tmpl' supports_indirect_user_project_override: true +include_in_tgc_next_DO_NOT_USE: true +cai_base_url: 'organizations/{{org_id}}/feeds' +cai_identity: 'feed_id' +tgc_ignore_terraform_encoder: true examples: - name: 'cloud_asset_organization_feed' primary_resource_id: 'organization_feed' diff --git a/mmv1/products/cloudasset/ProjectFeed.yaml b/mmv1/products/cloudasset/ProjectFeed.yaml index 525bbca61a56..4132e44ff685 100644 --- a/mmv1/products/cloudasset/ProjectFeed.yaml +++ b/mmv1/products/cloudasset/ProjectFeed.yaml @@ -38,6 +38,10 @@ custom_code: pre_create: 'templates/terraform/pre_create/cloud_asset_feed.go.tmpl' post_create: 'templates/terraform/post_create/cloud_asset_feed.go.tmpl' custom_import: 'templates/terraform/custom_import/cloud_asset_feed.go.tmpl' +include_in_tgc_next_DO_NOT_USE: true +cai_base_url: 'projects/{{project}}/feeds' +cai_identity: 'feed_id' +tgc_ignore_terraform_encoder: true examples: - name: 'cloud_asset_project_feed' primary_resource_id: 'project_feed' diff --git a/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl b/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl index f9efdc3e73fc..15a71bd749e5 100644 --- a/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl +++ b/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl @@ -92,7 +92,7 @@ func (c *{{ $.ResourceName -}}Cai2hclConverter) convertResourceData(asset caiass hclData := make(map[string]interface{}) {{ if $.CustomCode.TgcDecoder -}} - res, err = resource{{ $.ResourceName -}}TgcDecoder(d, config, res) + res, hclData, err = resource{{ $.ResourceName -}}TgcDecoder(d, config, res, hclData) if err != nil { return nil, err } @@ -140,7 +140,7 @@ func (c *{{ $.ResourceName -}}Cai2hclConverter) convertResourceData(asset caiass {{- end }} {{- if $.CustomCode.TgcDecoder }} -func resource{{ $.ResourceName -}}TgcDecoder(d *schema.ResourceData, meta interface{}, res map[string]interface{}) (map[string]interface{}, error) { +func resource{{ $.ResourceName -}}TgcDecoder(d *schema.ResourceData, meta interface{}, res map[string]interface{}, hclData map[string]interface{}) (map[string]interface{}, map[string]interface{}, error) { {{ $.CustomTemplate $.CustomCode.TgcDecoder false -}} } {{- end }} diff --git a/mmv1/templates/tgc_next/decoders/backup_dr_backup_plan.go.tmpl b/mmv1/templates/tgc_next/decoders/backup_dr_backup_plan.go.tmpl index 996b7f1a1cbd..2ce5d4f233e0 100644 --- a/mmv1/templates/tgc_next/decoders/backup_dr_backup_plan.go.tmpl +++ b/mmv1/templates/tgc_next/decoders/backup_dr_backup_plan.go.tmpl @@ -17,4 +17,4 @@ if rules, ok := res["backupRules"].([]interface{}); ok { } } -return res, nil \ No newline at end of file +return res, hclData, nil \ No newline at end of file diff --git a/mmv1/templates/tgc_next/decoders/certificatemanager_certificate.go.tmpl b/mmv1/templates/tgc_next/decoders/certificatemanager_certificate.go.tmpl index f7e84128a130..2596316b67fc 100644 --- a/mmv1/templates/tgc_next/decoders/certificatemanager_certificate.go.tmpl +++ b/mmv1/templates/tgc_next/decoders/certificatemanager_certificate.go.tmpl @@ -6,4 +6,4 @@ if vStr, ok := res["scope"].(string); ok && vStr == "DEFAULT" { // Omit the default value. delete(res, "scope") } -return res, nil +return res, hclData, nil diff --git a/mmv1/templates/tgc_next/decoders/cloud_asset_feed.go.tmpl b/mmv1/templates/tgc_next/decoders/cloud_asset_feed.go.tmpl new file mode 100644 index 000000000000..69605bf73cd3 --- /dev/null +++ b/mmv1/templates/tgc_next/decoders/cloud_asset_feed.go.tmpl @@ -0,0 +1,4 @@ +// billing_project is the required url_param_only property, but is not in CAI asset name or data +// TODO: handle it in a generic way +hclData["billing_project"] = "null" +return res, hclData, nil \ No newline at end of file diff --git a/mmv1/templates/tgc_next/decoders/compute_backend_service.go.tmpl b/mmv1/templates/tgc_next/decoders/compute_backend_service.go.tmpl index de931a039dea..74d3e6ab7ea5 100644 --- a/mmv1/templates/tgc_next/decoders/compute_backend_service.go.tmpl +++ b/mmv1/templates/tgc_next/decoders/compute_backend_service.go.tmpl @@ -28,4 +28,4 @@ if v, ok := res["backends"]; ok { } } -return res, nil \ No newline at end of file +return res, hclData, nil \ No newline at end of file diff --git a/mmv1/templates/tgc_next/decoders/compute_subnetwork.go.tmpl b/mmv1/templates/tgc_next/decoders/compute_subnetwork.go.tmpl index 748efa32f2f4..b1d31f1ad3f9 100644 --- a/mmv1/templates/tgc_next/decoders/compute_subnetwork.go.tmpl +++ b/mmv1/templates/tgc_next/decoders/compute_subnetwork.go.tmpl @@ -20,4 +20,4 @@ if raw, ok := res["stackType"]; ok { } } -return res, nil \ No newline at end of file +return res, hclData, nil \ No newline at end of file diff --git a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/convert_resource.go b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/convert_resource.go index daabb215054f..18fbc46b56a4 100644 --- a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/convert_resource.go +++ b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/convert_resource.go @@ -21,7 +21,7 @@ func ConvertResource(asset caiasset.Asset) ([]*models.TerraformResourceBlock, er } } - // Edge cases + // Handle the tdge case that multiple Terraform resources share the same CAI asset type if asset.Type == "compute.googleapis.com/Autoscaler" { if strings.Contains(asset.Name, "/zones/") { converter = ConverterMap[asset.Type]["ComputeAutoscaler"] @@ -29,5 +29,15 @@ func ConvertResource(asset caiasset.Asset) ([]*models.TerraformResourceBlock, er converter = ConverterMap[asset.Type]["ComputeRegionAutoscaler"] } } + + if asset.Type == "cloudasset.googleapis.com/Feed" { + if strings.Contains(asset.Name, "/organizations/") { + converter = ConverterMap[asset.Type]["CloudAssetOrganizationFeed"] + } else if strings.Contains(asset.Name, "/folders/") { + converter = ConverterMap[asset.Type]["CloudAssetFolderFeed"] + } else { + converter = ConverterMap[asset.Type]["CloudAssetProjectFeed"] + } + } return converter.Convert(asset) } From 9bdb44af1987e050921284b17241f6ef67de9100 Mon Sep 17 00:00:00 2001 From: translucens Date: Wed, 27 Aug 2025 05:26:00 +0900 Subject: [PATCH 844/884] add GKE default disk_type notes (#14930) --- .../terraform/website/docs/r/container_cluster.html.markdown | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown index 4bd90c16e424..623a11241f9b 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown @@ -890,7 +890,7 @@ The `master_authorized_networks_config.cidr_blocks` block supports: Prefer configuring `boot_disk`. * `disk_type` - (Optional) Type of the disk attached to each node - (e.g. 'pd-standard', 'pd-balanced' or 'pd-ssd'). If unspecified, the default disk type is 'pd-balanced' This is being migrated to `boot_disk.disk_type`, and must match if specified in both places. Prefer configuring `boot_disk`. + (e.g. 'pd-standard', 'pd-balanced', 'pd-ssd', or 'hyperdisk-balanced'). Defaults to `hyperdisk-balanced` if `hyperdisk-balanced` is supported and `pd-balanced` is not supported for the machine type; otherwise defaults to `pd-balanced`. This is being migrated to `boot_disk.disk_type`, and must match if specified in both places. Prefer configuring `boot_disk`. * `enable_confidential_storage` - (Optional) Enabling Confidential Storage will create boot disk with confidential mode. It is disabled by default. @@ -1085,7 +1085,7 @@ sole_tenant_config { in GB. The smallest allowed disk size is 10GB. Defaults to 100GB. This is being migrated from `node_config.disk_size_gb`, and must match if specified in both places. Prefer using this field. * `disk_type` - (Optional) Type of the disk attached to each node - (e.g. 'pd-standard', 'pd-balanced', 'pd-ssd', 'hyperdisk-balanced'). If unspecified, the default disk type is 'pd-balanced' This is being migrated from `node_config.disk_type`, and must match if specified in both places. Prefer using this field. + (e.g. 'pd-standard', 'pd-balanced', 'pd-ssd', or 'hyperdisk-balanced'). Defaults to `hyperdisk-balanced` if `hyperdisk-balanced` is supported and `pd-balanced` is not supported for the machine type; otherwise defaults to `pd-balanced`. This is being migrated from `node_config.disk_type`, and must match if specified in both places. Prefer using this field. * `provisioned_iops` - (Optional) Configure disk IOPs. This is only valid if the `disk_type` is 'hyperdisk-balanced'. See [performance limit documention](https://cloud.google.com/compute/docs/disks/hyperdisk-perf-limits) for more information about valid values. From 3964cf5973d7bf29e5636e94b3998995ddfa7919 Mon Sep 17 00:00:00 2001 From: Naheed <89418276+naheedtayab@users.noreply.github.com> Date: Tue, 26 Aug 2025 21:27:20 +0100 Subject: [PATCH 845/884] Fix typo on word 'Certificate' for map description (#14859) --- mmv1/products/certificatemanager/CertificateMapEntry.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/products/certificatemanager/CertificateMapEntry.yaml b/mmv1/products/certificatemanager/CertificateMapEntry.yaml index a8b23152a365..c7b58e3b37ba 100644 --- a/mmv1/products/certificatemanager/CertificateMapEntry.yaml +++ b/mmv1/products/certificatemanager/CertificateMapEntry.yaml @@ -59,7 +59,7 @@ parameters: - name: 'map' type: ResourceRef description: | - A map entry that is inputted into the cetrificate map + A map entry that is inputted into the certificate map url_param_only: true required: true immutable: true From b5f0c0ba4b6af7b1afde20e3c7db4ab0a09949c2 Mon Sep 17 00:00:00 2001 From: Raj Anand <88097156+raazanand@users.noreply.github.com> Date: Wed, 27 Aug 2025 02:24:04 +0530 Subject: [PATCH 846/884] added qos support (#14929) --- mmv1/products/netapp/StoragePool.yaml | 15 +++ mmv1/products/netapp/Volume.yaml | 5 + .../resource_netapp_volume_test.go.tmpl | 101 +++++++++++++++++- 3 files changed, 120 insertions(+), 1 deletion(-) diff --git a/mmv1/products/netapp/StoragePool.yaml b/mmv1/products/netapp/StoragePool.yaml index cc2ae1b0352e..6921fdfd36f5 100644 --- a/mmv1/products/netapp/StoragePool.yaml +++ b/mmv1/products/netapp/StoragePool.yaml @@ -189,6 +189,7 @@ properties: type: String description: | Optional. Custom Performance Total Throughput of the pool (in MiB/s). + default_from_api: true - name: 'totalIops' type: String description: | @@ -207,3 +208,17 @@ properties: Flag indicating that the hot-tier threshold will be auto-increased by 10% of the hot-tier when it hits 100%. Default is true. The increment will kick in only if the new size after increment is still less than or equal to storage pool size. min_version: 'beta' + - name: 'qosType' + type: Enum + description: | + QoS (Quality of Service) type of the storage pool. + Possible values are: AUTO, MANUAL. + enum_values: + - 'QOS_TYPE_UNSPECIFIED' + - 'AUTO' + - 'MANUAL' + - name: 'availableThroughputMibps' + type: Double + description: | + Available throughput of the storage pool (in MiB/s). + output: true diff --git a/mmv1/products/netapp/Volume.yaml b/mmv1/products/netapp/Volume.yaml index 072e43425f6a..9bd31101c5d6 100644 --- a/mmv1/products/netapp/Volume.yaml +++ b/mmv1/products/netapp/Volume.yaml @@ -567,3 +567,8 @@ properties: description: | Optional. Labels to be added to the replication as the key value pairs. An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }. + - name: 'throughputMibps' + type: Double + description: | + Optional. Custom Performance Total Throughput of the pool (in MiB/s). + default_from_api: true diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go.tmpl b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go.tmpl index 88ea097e6554..a8d8f2b9b5d9 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go.tmpl +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go.tmpl @@ -760,7 +760,6 @@ data "google_compute_network" "default" { `, context) } - {{ if ne $.TargetVersionName `ga` -}} func TestAccNetappVolume_flexAutoTierNetappVolume_update(t *testing.T) { context := map[string]interface{}{ @@ -871,4 +870,104 @@ data "google_compute_network" "default" { } `, context) } + +func TestAccNetappStoragePool_ManualQos(t *testing.T) { + context := map[string]interface{}{ + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckNetappVolumeDestroyProducer(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: testAccNetappVolume_ManualQosAuto(context), + }, + { + ResourceName: "google_netapp_volume.test_volume", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"restore_parameters", "location", "name", "deletion_policy", "labels", "terraform_labels"}, + }, + { + Config: testAccNetappVolume_ManualQosManual(context), + }, + { + ResourceName: "google_netapp_volume.test_volume", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"restore_parameters", "location", "name", "deletion_policy", "labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccNetappVolume_ManualQosAuto(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_netapp_storage_pool" "test_pool" { + name = "tf-test-pool%{random_suffix}" + location = "us-east4" + service_level = "EXTREME" + capacity_gib = "2048" + network = data.google_compute_network.default.id + qos_type = "AUTO" +} + +resource "time_sleep" "wait_3_minutes" { + depends_on = [google_netapp_storage_pool.test_pool] + create_duration = "3m" +} + +resource "google_netapp_volume" "test_volume" { + location = "us-east4" + name = "tf-test-test-volume%{random_suffix}" + capacity_gib = "100" + share_name = "tf-test-test-volume%{random_suffix}" + storage_pool = google_netapp_storage_pool.test_pool.name + protocols = ["NFSV3"] +} + +data "google_compute_network" "default" { + name = "%{network_name}" +} +`, context) +} + +func testAccNetappVolume_ManualQosManual(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_netapp_storage_pool" "test_pool" { + name = "tf-test-pool%{random_suffix}" + location = "us-east4" + service_level = "EXTREME" + capacity_gib = "2048" + network = data.google_compute_network.default.id + qos_type = "MANUAL" +} + +resource "time_sleep" "wait_3_minutes" { + depends_on = [google_netapp_storage_pool.test_pool] + create_duration = "3m" +} + +resource "google_netapp_volume" "test_volume" { + location = "us-east4" + name = "tf-test-test-volume%{random_suffix}" + capacity_gib = "100" + description = "This is a test description for manual qos volume" + share_name = "tf-test-test-volume%{random_suffix}" + storage_pool = google_netapp_storage_pool.test_pool.name + protocols = ["NFSV3"] + throughput_mibps = 12.5 +} + +data "google_compute_network" "default" { + name = "%{network_name}" +} +`, context) +} {{ end }} From 8b9e627992b10a592fcbefe8e91b955dd8db41f4 Mon Sep 17 00:00:00 2001 From: HansiMou Date: Tue, 26 Aug 2025 15:08:39 -0700 Subject: [PATCH 847/884] Add unique index support in Firestore (#14682) --- mmv1/products/firestore/Index.yaml | 14 +++++++- .../examples/firestore_index_unique.tf.tmpl | 32 +++++++++++++++++++ 2 files changed, 45 insertions(+), 1 deletion(-) create mode 100644 mmv1/templates/terraform/examples/firestore_index_unique.tf.tmpl diff --git a/mmv1/products/firestore/Index.yaml b/mmv1/products/firestore/Index.yaml index 571e67499892..c3e29a56845a 100644 --- a/mmv1/products/firestore/Index.yaml +++ b/mmv1/products/firestore/Index.yaml @@ -92,6 +92,12 @@ examples: database_id: 'database-id-sparse-any' test_env_vars: project_id: 'PROJECT_NAME' + - name: 'firestore_index_unique' + primary_resource_id: 'my-index' + vars: + database_id: 'database-id-unique' + test_env_vars: + project_id: 'PROJECT_NAME' parameters: properties: - name: 'name' @@ -152,6 +158,12 @@ properties: definition reach or traverse an array, except via an explicit array index. Violations will result in errors. Note this field only applies to indexes with MONGODB_COMPATIBLE_API ApiScope. + - name: 'unique' + type: Boolean + default_from_api: true + description: + Whether it is an unique index. Unique index ensures all values for the + indexed field(s) are unique across documents. - name: 'fields' type: Array description: | @@ -206,7 +218,7 @@ properties: send_empty_value: true allow_empty_object: true properties: - # Meant to be an empty object with no properties. + # Meant to be an empty object with no properties. [] # Most composite indexes require at least two fields, but it is possible # for a user to require a single field index such as `__name__ DESC`. diff --git a/mmv1/templates/terraform/examples/firestore_index_unique.tf.tmpl b/mmv1/templates/terraform/examples/firestore_index_unique.tf.tmpl new file mode 100644 index 000000000000..4797628e90cb --- /dev/null +++ b/mmv1/templates/terraform/examples/firestore_index_unique.tf.tmpl @@ -0,0 +1,32 @@ +resource "google_firestore_database" "database" { + project = "{{index $.TestEnvVars "project_id"}}" + name = "{{index $.Vars "database_id"}}" + location_id = "nam5" + type = "FIRESTORE_NATIVE" + database_edition = "ENTERPRISE" + + delete_protection_state = "DELETE_PROTECTION_DISABLED" + deletion_policy = "DELETE" +} + +resource "google_firestore_index" "{{$.PrimaryResourceId}}" { + project = "{{index $.TestEnvVars "project_id"}}" + database = google_firestore_database.database.name + collection = "atestcollection" + + api_scope = "MONGODB_COMPATIBLE_API" + query_scope = "COLLECTION_GROUP" + multikey = true + density = "DENSE" + unique = true + + fields { + field_path = "name" + order = "ASCENDING" + } + + fields { + field_path = "description" + order = "DESCENDING" + } +} From 7d713d726035af8a2dc4a1919c88730e5d83ddd4 Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Wed, 27 Aug 2025 00:09:48 +0200 Subject: [PATCH 848/884] artifactregistry: added `registry_uri` as attribute to `google_artifact_registry_repository` (#14776) --- mmv1/products/artifactregistry/Repository.yaml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/mmv1/products/artifactregistry/Repository.yaml b/mmv1/products/artifactregistry/Repository.yaml index 1241e4cac2e8..bb007704faeb 100644 --- a/mmv1/products/artifactregistry/Repository.yaml +++ b/mmv1/products/artifactregistry/Repository.yaml @@ -270,6 +270,11 @@ properties: longer than 63 characters. Label keys must begin with a lowercase letter and may only contain lowercase letters, numeric characters, underscores, and dashes. + - name: 'registryUri' + type: String + output: true + description: | + The repository endpoint, for example: us-docker.pkg.dev/my-proj/my-repo. - name: 'kmsKeyName' type: String description: |- From 7266da8860038e1c4c748759ac65843d4c60078b Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Wed, 27 Aug 2025 07:38:15 -0700 Subject: [PATCH 849/884] Upgrade DCL 1.83.0 (#14935) --- mmv1/third_party/terraform/go.mod | 2 +- mmv1/third_party/terraform/go.sum | 2 ++ .../resource_cloudbuild_worker_pool_test.go.tmpl | 3 ++- .../website/docs/r/cloudbuild_worker_pool.html.markdown | 6 +++++- tpgtools/go.mod | 2 +- tpgtools/go.sum | 8 ++------ 6 files changed, 13 insertions(+), 10 deletions(-) diff --git a/mmv1/third_party/terraform/go.mod b/mmv1/third_party/terraform/go.mod index 026850fd2bd3..3641e8f50c8b 100644 --- a/mmv1/third_party/terraform/go.mod +++ b/mmv1/third_party/terraform/go.mod @@ -6,7 +6,7 @@ require ( cloud.google.com/go/auth v0.16.4 cloud.google.com/go/auth/oauth2adapt v0.2.8 cloud.google.com/go/bigtable v1.37.0 - github.com/GoogleCloudPlatform/declarative-resource-client-library v1.82.0 + github.com/GoogleCloudPlatform/declarative-resource-client-library v1.83.0 github.com/apparentlymart/go-cidr v1.1.0 github.com/davecgh/go-spew v1.1.1 github.com/dnaeon/go-vcr v1.0.1 diff --git a/mmv1/third_party/terraform/go.sum b/mmv1/third_party/terraform/go.sum index ed7608cc7167..bd42f7af505b 100644 --- a/mmv1/third_party/terraform/go.sum +++ b/mmv1/third_party/terraform/go.sum @@ -431,3 +431,5 @@ honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWh honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= rsc.io/binaryregexp v0.2.0 h1:HfqmD5MEmC0zvwBuF187nq9mdnXjXsSivRiXN7SmRkE= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.83.0 h1:pvSYcI7HKOtqHTr4E9cRqVbgnh0+qnJZCrnmozltFVg= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.83.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= diff --git a/mmv1/third_party/terraform/services/cloudbuild/resource_cloudbuild_worker_pool_test.go.tmpl b/mmv1/third_party/terraform/services/cloudbuild/resource_cloudbuild_worker_pool_test.go.tmpl index 9451c9c5c2ce..aa6399bb3de0 100644 --- a/mmv1/third_party/terraform/services/cloudbuild/resource_cloudbuild_worker_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudbuild/resource_cloudbuild_worker_pool_test.go.tmpl @@ -132,8 +132,9 @@ resource "google_cloudbuild_worker_pool" "pool" { location = "europe-west1" worker_config { disk_size_gb = 101 - machine_type = "e2-standard-4" + machine_type = "c3-standard-4" no_external_ip = false + enable_nested_virtualization = true } annotations = { diff --git a/mmv1/third_party/terraform/website/docs/r/cloudbuild_worker_pool.html.markdown b/mmv1/third_party/terraform/website/docs/r/cloudbuild_worker_pool.html.markdown index bd9e48c1904b..50005f52291f 100644 --- a/mmv1/third_party/terraform/website/docs/r/cloudbuild_worker_pool.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/cloudbuild_worker_pool.html.markdown @@ -106,7 +106,11 @@ The following arguments are supported: Immutable. Subnet IP range within the peered network. This is specified in CIDR notation with a slash and the subnet prefix size. You can optionally specify an IP address before the subnet prefix value. e.g. `192.168.0.0/29` would specify an IP range starting at 192.168.0.0 with a prefix size of 29 bits. `/16` would specify a prefix size of 16 bits, with an automatically determined IP within the peered VPC. If unspecified, a value of `/24` will be used. The `worker_config` block supports: - + +* `enable_nested_virtualization` - + (Optional) + Enable nested virtualization on the worker, if supported by the machine type. See [Worker pool config file](https://cloud.google.com/build/docs/private-pools/worker-pool-config-file-schema). If left blank, Cloud Build will set this to false. + * `disk_size_gb` - (Optional) Size of the disk attached to the worker, in GB. See [diskSizeGb](https://cloud.google.com/build/docs/private-pools/private-pool-config-file-schema#disksizegb). Specify a value of up to 1000. If `0` is specified, Cloud Build will use a standard disk size. diff --git a/tpgtools/go.mod b/tpgtools/go.mod index d7a556fbec12..f80b39ae5daa 100644 --- a/tpgtools/go.mod +++ b/tpgtools/go.mod @@ -4,7 +4,7 @@ go 1.23 require ( bitbucket.org/creachadair/stringset v0.0.11 - github.com/GoogleCloudPlatform/declarative-resource-client-library v1.82.0 + github.com/GoogleCloudPlatform/declarative-resource-client-library v1.83.0 github.com/golang/glog v1.1.2 github.com/hashicorp/hcl v1.0.0 github.com/kylelemons/godebug v1.1.0 diff --git a/tpgtools/go.sum b/tpgtools/go.sum index 4c05f97caca1..058e41114a23 100644 --- a/tpgtools/go.sum +++ b/tpgtools/go.sum @@ -6,12 +6,8 @@ cloud.google.com/go/compute v1.23.0/go.mod h1:4tCnrn48xsqlwSAiLf1HXMQk8CONslYbdi cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.80.0 h1:ZpQrm5i+ppVxTQjp6lU2APyAejavB/d7G2gZNu2RxsU= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.80.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.81.0 h1:zTRBYNu7nk3TMbiRfkBcRNzw4cOeym0z1GduDYNyRyE= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.81.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.82.0 h1:58Vw+qpPWX4JGAB/DfuDwEg6dGp0+q6raXqjs52qRik= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.82.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.83.0 h1:pvSYcI7HKOtqHTr4E9cRqVbgnh0+qnJZCrnmozltFVg= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.83.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4= github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= From e72d116fd23d28b217ca0bf684c8857611538015 Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Wed, 27 Aug 2025 16:41:52 +0200 Subject: [PATCH 850/884] logging: update documentation for `include_children` in `google_logging_organization_sink` (#14990) --- .../services/logging/resource_logging_organization_sink.go | 2 +- .../website/docs/r/logging_organization_sink.html.markdown | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/logging/resource_logging_organization_sink.go b/mmv1/third_party/terraform/services/logging/resource_logging_organization_sink.go index fcf750b93c86..708f370a5c4a 100644 --- a/mmv1/third_party/terraform/services/logging/resource_logging_organization_sink.go +++ b/mmv1/third_party/terraform/services/logging/resource_logging_organization_sink.go @@ -33,7 +33,7 @@ func ResourceLoggingOrganizationSink() *schema.Resource { Type: schema.TypeBool, Optional: true, Default: false, - Description: `Whether or not to include children organizations in the sink export. If true, logs associated with child projects are also exported; otherwise only logs relating to the provided organization are included.`, + Description: `Whether or not to include child folders or projects in the sink export. If true, logs associated with child projects are also exported; otherwise only logs relating to the provided organization are included.`, } schm.Schema["intercept_children"] = &schema.Schema{ Type: schema.TypeBool, diff --git a/mmv1/third_party/terraform/website/docs/r/logging_organization_sink.html.markdown b/mmv1/third_party/terraform/website/docs/r/logging_organization_sink.html.markdown index a0a1fc869b0c..8dfb360b8a7c 100644 --- a/mmv1/third_party/terraform/website/docs/r/logging_organization_sink.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/logging_organization_sink.html.markdown @@ -64,7 +64,7 @@ The following arguments are supported: * `disabled` - (Optional) If set to True, then this sink is disabled and it does not export any log entries. -* `include_children` - (Optional) Whether or not to include children organizations in the sink export. If true, logs +* `include_children` - (Optional) Whether or not to include child folders or projects in the sink export. If true, logs associated with child projects are also exported; otherwise only logs relating to the provided organization are included. * `intercept_children` - (Optional) Whether or not to intercept logs from child projects. If true, matching logs will not From 61998a4ecd9c2d6464b25906afcd873c3ed5c73e Mon Sep 17 00:00:00 2001 From: bryan0515 Date: Wed, 27 Aug 2025 08:40:24 -0700 Subject: [PATCH 851/884] Add IAP for global forwarding rule (#14947) --- mmv1/products/iap/ForwardingRuleService.yaml | 56 +++++++++++++++++++ .../forwarding_rule_service_basic.tf.tmpl | 38 +++++++++++++ 2 files changed, 94 insertions(+) create mode 100644 mmv1/products/iap/ForwardingRuleService.yaml create mode 100644 mmv1/templates/terraform/examples/forwarding_rule_service_basic.tf.tmpl diff --git a/mmv1/products/iap/ForwardingRuleService.yaml b/mmv1/products/iap/ForwardingRuleService.yaml new file mode 100644 index 000000000000..c65f35deb029 --- /dev/null +++ b/mmv1/products/iap/ForwardingRuleService.yaml @@ -0,0 +1,56 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'WebForwardingRuleService' +description: | + Only used to generate IAM resources +# This resource is only used to generate IAM resources. They do not correspond to real +# GCP resources, and should not be used to generate anything other than IAM support. +exclude_resource: true +docs: +id_format: 'projects/{{project}}/iap_web/forwarding_rule/services/{{name}}' +base_url: 'projects/{{project}}/iap_web/forwarding_rule/services/{{name}}' +self_link: 'projects/{{project}}/iap_web/forwarding_rule/services/{{name}}' +import_format: + - 'projects/{{project}}/iap_web/forwarding_rule/services/{{name}}' +timeouts: + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 +iam_policy: + method_name_separator: ':' + parent_resource_type: 'google_compute_global_forwarding_rule' + fetch_iam_policy_verb: 'POST' + allowed_iam_role: 'roles/iap.httpsResourceAccessor' + parent_resource_attribute: 'forwarding_rule_service_name' + iam_conditions_request_type: 'REQUEST_BODY' + example_config_body: 'templates/terraform/iam/iam_attributes.go.tmpl' +custom_code: +exclude_tgc: true +examples: + - name: 'forwarding_rule_service_basic' + primary_resource_id: 'default' + primary_resource_name: 'fmt.Sprintf("tf-test-forwarding-rule-service%s", context["random_suffix"])' + vars: + forwarding_rule_service_name: 'forwarding-rule-service' + target_http_proxy_name: 'target-http-proxy-name' + url_map_name: 'url-map-name' + backend_service_name: 'backend-service-name' + health_check_name: 'health-check-name' +parameters: +properties: + - name: 'name' + type: String + description: Name or self link of a forwarding rule service. + required: true diff --git a/mmv1/templates/terraform/examples/forwarding_rule_service_basic.tf.tmpl b/mmv1/templates/terraform/examples/forwarding_rule_service_basic.tf.tmpl new file mode 100644 index 000000000000..7e5122104b69 --- /dev/null +++ b/mmv1/templates/terraform/examples/forwarding_rule_service_basic.tf.tmpl @@ -0,0 +1,38 @@ +resource "google_compute_health_check" "default" { + name = "{{index $.Vars "health_check_name"}}" + http_health_check { + port = 80 + request_path = "/" + } +} + + +resource "google_compute_backend_service" "default" { + name = "{{index $.Vars "backend_service_name"}}" + protocol = "HTTP" + port_name = "http" + timeout_sec = 10 + health_checks = [google_compute_health_check.default.id] + load_balancing_scheme = "EXTERNAL_MANAGED" +} + + +resource "google_compute_url_map" "default" { + name = "{{index $.Vars "url_map_name"}}" + default_service = google_compute_backend_service.default.id +} + + +resource "google_compute_target_http_proxy" "default" { + name = "{{index $.Vars "target_http_proxy_name"}}" + url_map = google_compute_url_map.default.id +} + + +resource "google_compute_global_forwarding_rule" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "forwarding_rule_service_name"}}" + target = google_compute_target_http_proxy.default.id + port_range = "80" + load_balancing_scheme = "EXTERNAL_MANAGED" +} + From 6225dbd25fc06a8ec4a3987a966492595c0c3f10 Mon Sep 17 00:00:00 2001 From: Richard Belleville Date: Wed, 27 Aug 2025 08:40:42 -0700 Subject: [PATCH 852/884] GKE Multi-Subnet additional_ip_ranges_config: Change test subnet and network name from main to msc_main (#14839) --- .../resource_container_cluster_test.go.tmpl | 150 ++++++++++-------- 1 file changed, 88 insertions(+), 62 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl index 77e8246a06e8..9eef6abaf583 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl @@ -14323,9 +14323,77 @@ resource "google_container_cluster" "primary" { `, name, networkName, subnetworkName, config) } +type subnetRangeInfo struct { + SubnetName string + RangeNames []string +} + +func bootstrapAdditionalIpRangesNetworkConfig(t *testing.T, name string, additionalSubnetCount int, secondaryRangeCount int) (string, []subnetRangeInfo) { + sri := []subnetRangeInfo{} + + // We create our network to ensure no range collisions. + networkName := acctest.BootstrapSharedTestNetwork(t, fmt.Sprintf("%s-network", name)) + mainSubnet := acctest.BootstrapSubnetWithOverrides(t, fmt.Sprintf("%s-subnet-main", name), networkName, map[string]interface{}{ + "ipCidrRange": "10.2.0.0/24", + "secondaryIpRanges": []map[string]interface{}{ + { + "rangeName": "pods", + "ipCidrRange": "10.3.0.0/16", + }, + { + "rangeName": "services", + "ipCidrRange": "10.4.0.0/16", + }, + }, + }) + + si := subnetRangeInfo{ + SubnetName: mainSubnet, + RangeNames: []string{"pods"}, + } + sri = append(sri, si) + + cumulativeRangeIndex := 0 + for subnetIndex := 0; subnetIndex < additionalSubnetCount; subnetIndex++ { + ranges := []map[string]interface{}{} + rangeNames := []string{} + for rangeIndex := 0; rangeIndex < secondaryRangeCount; rangeIndex++ { + rangeName := fmt.Sprintf("range-%d", cumulativeRangeIndex) + r := map[string]interface{}{ + "rangeName": rangeName, + "ipCidrRange": fmt.Sprintf("10.0.%d.0/24", cumulativeRangeIndex), + } + rangeNames = append(rangeNames, rangeName) + ranges = append(ranges, r) + cumulativeRangeIndex++ + } + + subnetOverrides := map[string]interface{}{ + "ipCidrRange": fmt.Sprintf("10.1.%d.0/24", subnetIndex), + "secondaryIpRanges": ranges, + } + + subnetName := fmt.Sprintf("%s-subnet-add-%d", name, subnetIndex) + acctest.BootstrapSubnetWithOverrides(t, subnetName, networkName, subnetOverrides) + + si := subnetRangeInfo{ + SubnetName: subnetName, + RangeNames: rangeNames, + } + + sri = append(sri, si) + } + + return networkName, sri +} + func TestAccContainerCluster_additional_ip_ranges_config_on_create(t *testing.T) { t.Parallel() + testName := "gke-msc" + network, sri := bootstrapAdditionalIpRangesNetworkConfig(t, testName, 2, 2) + + clusterName := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -14333,7 +14401,7 @@ func TestAccContainerCluster_additional_ip_ranges_config_on_create(t *testing.T) CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, 2, 2), + Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, network, sri), }, { ResourceName: "google_container_cluster.primary", @@ -14349,6 +14417,9 @@ func TestAccContainerCluster_additional_ip_ranges_config_on_create(t *testing.T) func TestAccContainerCluster_additional_ip_ranges_config_on_update(t *testing.T) { t.Parallel() + testName := "gke-msc-update" + network, sri := bootstrapAdditionalIpRangesNetworkConfig(t, testName, 2, 2) + clusterName := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -14356,7 +14427,7 @@ func TestAccContainerCluster_additional_ip_ranges_config_on_update(t *testing.T) CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, 0, 0), + Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, network, sri), }, { ResourceName: "google_container_cluster.primary", @@ -14366,7 +14437,7 @@ func TestAccContainerCluster_additional_ip_ranges_config_on_update(t *testing.T) Check: resource.TestCheckResourceAttrSet("google_container_cluster.primary", "node_pool.0.network_config.subnetwork"), }, { - Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, 1, 1), + Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, network, sri[:len(sri)-1]), }, { ResourceName: "google_container_cluster.primary", @@ -14375,7 +14446,7 @@ func TestAccContainerCluster_additional_ip_ranges_config_on_update(t *testing.T) ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { - Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, 0, 0), + Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, network, sri[:1]), }, { ResourceName: "google_container_cluster.primary", @@ -14384,7 +14455,7 @@ func TestAccContainerCluster_additional_ip_ranges_config_on_update(t *testing.T) ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { - Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, 2, 2), + Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, network, sri), }, { ResourceName: "google_container_cluster.primary", @@ -14393,7 +14464,7 @@ func TestAccContainerCluster_additional_ip_ranges_config_on_update(t *testing.T) ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { - Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, 0, 0), + Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, network, sri[:1]), }, { ResourceName: "google_container_cluster.primary", @@ -14445,76 +14516,31 @@ func TestAccContainerCluster_withAnonymousAuthenticationConfig(t *testing.T) { }) } -func testAccContainerCluster_additional_ip_ranges_config(name string, additionalSubnetCount int, secondaryRangeCount int) string { - var subnetStr string +func testAccContainerCluster_additional_ip_ranges_config(clusterName string, networkName string, sri []subnetRangeInfo) string { var additionalIpRangesStr string - cumulativeRangeIndex := 0 - for subnetIndex := 0; subnetIndex < additionalSubnetCount; subnetIndex++ { - var secondaryRangeStr string + + for _, si := range sri[1:] { var podIpv4RangeStr string - for rangeIndex := 0; rangeIndex < secondaryRangeCount; rangeIndex++ { - secondaryRangeStr += fmt.Sprintf(` - secondary_ip_range { - range_name = "range-%d" - ip_cidr_range = "10.0.%d.0/24" - } - `, cumulativeRangeIndex, cumulativeRangeIndex) - - podIpv4RangeStr += fmt.Sprintf("google_compute_subnetwork.extra_%d.secondary_ip_range[%d].range_name", subnetIndex, rangeIndex) - if rangeIndex != secondaryRangeCount - 1 { + for i, rn := range si.RangeNames { + podIpv4RangeStr += fmt.Sprintf("\"%s\"", rn) + if i != len(si.RangeNames) - 1 { podIpv4RangeStr += ", " } - cumulativeRangeIndex++ } - - subnetStr += fmt.Sprintf(` - resource "google_compute_subnetwork" "extra_%d" { - ip_cidr_range = "10.1.%d.0/24" - name = "tf-test-subnet-%d" - network = google_compute_network.main.self_link - region = "us-central1" - %s - } - `, subnetIndex, subnetIndex, subnetIndex, secondaryRangeStr) - additionalIpRangesStr += fmt.Sprintf(` additional_ip_ranges_config { - subnetwork = google_compute_subnetwork.extra_%d.id + subnetwork = "%s" pod_ipv4_range_names = [%s] } - `, subnetIndex, podIpv4RangeStr) + `, si.SubnetName, podIpv4RangeStr) } return fmt.Sprintf(` - resource "google_compute_network" "main" { - name = "%s" - auto_create_subnetworks = false - } - - resource "google_compute_subnetwork" "main" { - ip_cidr_range = "10.2.0.0/24" - name = "%s" - network = google_compute_network.main.self_link - region = "us-central1" - - secondary_ip_range { - range_name = "services" - ip_cidr_range = "10.3.0.0/16" - } - - secondary_ip_range { - range_name = "pods" - ip_cidr_range = "10.4.0.0/16" - } - } - - %s - resource "google_container_cluster" "primary" { name = "%s" location = "us-central1-a" - network = google_compute_network.main.name - subnetwork = google_compute_subnetwork.main.name + network = "%s" + subnetwork = "%s" initial_node_count = 1 ip_allocation_policy { @@ -14525,7 +14551,7 @@ func testAccContainerCluster_additional_ip_ranges_config(name string, additional deletion_protection = false } - `, name, name, subnetStr, name, additionalIpRangesStr) + `, clusterName, networkName, sri[0].SubnetName, additionalIpRangesStr) } func testAccContainerCluster_withAnonymousAuthenticationConfig(name, networkName, subnetworkName string, mode string) string { From a465b89fc10558d88ae2ab4d5afcf950250cf124 Mon Sep 17 00:00:00 2001 From: Shrishty Chandra <3104562+shrishty@users.noreply.github.com> Date: Wed, 27 Aug 2025 21:10:52 +0530 Subject: [PATCH 853/884] Promote update_strategy field in google_compute_network_peering to v1 (#14820) Co-authored-by: Shrishty Chandra --- .../resource_compute_network_peering.go.tmpl | 19 ------------------- ... resource_compute_network_peering_test.go} | 2 -- .../r/compute_network_peering.html.markdown | 2 +- 3 files changed, 1 insertion(+), 22 deletions(-) rename mmv1/third_party/terraform/services/compute/{resource_compute_network_peering_test.go.tmpl => resource_compute_network_peering_test.go} (99%) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_network_peering.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_network_peering.go.tmpl index 1afbaac3e4f0..0b24e3fc61fc 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_network_peering.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_network_peering.go.tmpl @@ -114,7 +114,6 @@ func ResourceComputeNetworkPeering() *schema.Resource { Default: "IPV4_ONLY", }, - {{ if ne $.TargetVersionName `ga` }} "update_strategy": { Type: schema.TypeString, Optional: true, @@ -122,7 +121,6 @@ func ResourceComputeNetworkPeering() *schema.Resource { Description: `The update strategy determines the semantics for updates and deletes to the peering connection configuration. The default value is INDEPENDENT. Possible values: ["INDEPENDENT", "CONSENSUS"]`, Default: "INDEPENDENT", }, - {{- end }} }, UseJSONNumber: true, } @@ -223,11 +221,9 @@ func resourceComputeNetworkPeeringRead(d *schema.ResourceData, meta interface{}) return fmt.Errorf("Error setting stack_type: %s", err) } - {{ if ne $.TargetVersionName `ga` }} if err := d.Set("update_strategy", flattenNetworkPeeringUpdateStrategy(peering.UpdateStrategy, d, config)); err != nil { return fmt.Errorf("Error setting update_strategy: %s", err) } - {{- end }} return nil } @@ -328,19 +324,6 @@ func findPeeringFromNetwork(network *compute.Network, peeringName string) *compu return nil } func expandNetworkPeering(d *schema.ResourceData) *compute.NetworkPeering { - {{ if eq $.TargetVersionName `ga` }} - return &compute.NetworkPeering{ - ExchangeSubnetRoutes: true, - Name: d.Get("name").(string), - Network: d.Get("peer_network").(string), - ExportCustomRoutes: d.Get("export_custom_routes").(bool), - ImportCustomRoutes: d.Get("import_custom_routes").(bool), - ExportSubnetRoutesWithPublicIp: d.Get("export_subnet_routes_with_public_ip").(bool), - ImportSubnetRoutesWithPublicIp: d.Get("import_subnet_routes_with_public_ip").(bool), - StackType: d.Get("stack_type").(string), - ForceSendFields: []string{"ExportSubnetRoutesWithPublicIp", "ImportCustomRoutes", "ExportCustomRoutes"}, - } - {{- else }} return &compute.NetworkPeering{ ExchangeSubnetRoutes: true, Name: d.Get("name").(string), @@ -353,8 +336,6 @@ func expandNetworkPeering(d *schema.ResourceData) *compute.NetworkPeering { UpdateStrategy: d.Get("update_strategy").(string), ForceSendFields: []string{"ExportSubnetRoutesWithPublicIp", "ImportCustomRoutes", "ExportCustomRoutes"}, } - {{- end }} - } func flattenNetworkPeeringStackType(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_network_peering_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_network_peering_test.go similarity index 99% rename from mmv1/third_party/terraform/services/compute/resource_compute_network_peering_test.go.tmpl rename to mmv1/third_party/terraform/services/compute/resource_compute_network_peering_test.go index 17d2a20537d9..f97e7d71a5d0 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_network_peering_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_network_peering_test.go @@ -141,7 +141,6 @@ func TestAccComputeNetworkPeering_stackType(t *testing.T) { } -{{ if ne $.TargetVersionName `ga` }} func TestAccComputeNetworkPeering_updateStrategy(t *testing.T) { t.Parallel() @@ -177,7 +176,6 @@ func TestAccComputeNetworkPeering_updateStrategy(t *testing.T) { }) } -{{- end }} func testAccComputeNetworkPeeringDestroyProducer(t *testing.T) func(s *terraform.State) error { return func(s *terraform.State) error { diff --git a/mmv1/third_party/terraform/website/docs/r/compute_network_peering.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_network_peering.html.markdown index 410448fb8766..03aabfac59f1 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_network_peering.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_network_peering.html.markdown @@ -68,7 +68,7 @@ Whether subnet routes with public IP range are imported. The default value is fa * `stack_type` - (Optional) Which IP version(s) of traffic and routes are allowed to be imported or exported between peer networks. The default value is IPV4_ONLY. Possible values: ["IPV4_ONLY", "IPV4_IPV6"]. -* `update_strategy` - (Optional, [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html)) +* `update_strategy` - (Optional) The update strategy determines the semantics for updates and deletes to the peering connection configuration. The default value is INDEPENDENT. Possible values: ["INDEPENDENT", "CONSENSUS"] ## Attributes Reference From ae29ee0784198a20ad38d7464aaf332bfc4be5a9 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Wed, 27 Aug 2025 08:45:17 -0700 Subject: [PATCH 854/884] Ignore parallelstore Instance update_time changes in importstateverify (#14987) --- mmv1/products/parallelstore/Instance.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/mmv1/products/parallelstore/Instance.yaml b/mmv1/products/parallelstore/Instance.yaml index 59ae8a9cf935..d073d8aa9f04 100644 --- a/mmv1/products/parallelstore/Instance.yaml +++ b/mmv1/products/parallelstore/Instance.yaml @@ -48,12 +48,16 @@ examples: name: 'instance' network_name: 'network' address_name: 'address' + ignore_read_extra: + - "update_time" - name: 'parallelstore_instance_basic' primary_resource_id: 'instance' vars: name: 'instance' network_name: 'network' address_name: 'address' + ignore_read_extra: + - "update_time" parameters: - name: 'location' type: String From 2a9dd35a84c729f4a4f09386ac84524d7a8c4652 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wojtek=20Olesi=C5=84ski?= <64974153+wojtekolesinski@users.noreply.github.com> Date: Wed, 27 Aug 2025 17:48:36 +0200 Subject: [PATCH 855/884] Fix non-idempotent behavior in google_compute_region_network_endpoint_group when network is omitted (#14948) Signed-off-by: wojtekolesinski --- mmv1/products/compute/RegionNetworkEndpointGroup.yaml | 1 + .../region_network_endpoint_group_psc_service_attachment.tf.tmpl | 1 - 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/products/compute/RegionNetworkEndpointGroup.yaml b/mmv1/products/compute/RegionNetworkEndpointGroup.yaml index 454f66a0244a..5cba46b0978e 100644 --- a/mmv1/products/compute/RegionNetworkEndpointGroup.yaml +++ b/mmv1/products/compute/RegionNetworkEndpointGroup.yaml @@ -163,6 +163,7 @@ properties: custom_expand: 'templates/terraform/custom_expand/resourceref_with_validation.go.tmpl' resource: 'Network' imports: 'selfLink' + default_from_api: true - name: 'subnetwork' type: ResourceRef description: | diff --git a/mmv1/templates/terraform/examples/region_network_endpoint_group_psc_service_attachment.tf.tmpl b/mmv1/templates/terraform/examples/region_network_endpoint_group_psc_service_attachment.tf.tmpl index 0bf3fbeec1c5..51a0d376f9b8 100644 --- a/mmv1/templates/terraform/examples/region_network_endpoint_group_psc_service_attachment.tf.tmpl +++ b/mmv1/templates/terraform/examples/region_network_endpoint_group_psc_service_attachment.tf.tmpl @@ -64,6 +64,5 @@ resource "google_compute_region_network_endpoint_group" "{{$.PrimaryResourceId}} psc_data { producer_port = "88" } - network = google_compute_network.default.self_link subnetwork = google_compute_subnetwork.default.self_link } From 52fdd0faa59f33142dae915ba03e8b5af069f328 Mon Sep 17 00:00:00 2001 From: aditikumarii-google Date: Wed, 27 Aug 2025 21:31:07 +0530 Subject: [PATCH 856/884] Add create_time field in backups (#14951) --- .../backupdr/data_source_backup_dr_backup.go.tmpl | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_backup.go.tmpl b/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_backup.go.tmpl index 944e547352d3..2933f3c1ea3f 100644 --- a/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_backup.go.tmpl +++ b/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_backup.go.tmpl @@ -45,6 +45,11 @@ func DataSourceGoogleCloudBackupDRBackup() *schema.Resource { Computed: true, Description: `Name of the Data Source associated with Backup.`, }, + "create_time": { + Type: schema.TypeString, + Computed: true, + Description: `The time when the backup was created.`, + }, }, }, }, @@ -64,6 +69,11 @@ func DataSourceGoogleCloudBackupDRBackup() *schema.Resource { Type: schema.TypeString, Required: true, }, + "create_time": { + Type: schema.TypeString, + Computed: true, + Description: `The time when the backup was created.`, + }, } return &schema.Resource{ @@ -146,6 +156,7 @@ func flattenDataSourceBackupDRBackups(v interface{}, d *schema.ResourceData, con "backup_id": flattenDataSourceBackupDRBackupsBackupId(original["backupId"], d, config), "backup_vault_id": flattenDataSourceBackupDRBackupsBackupVaultId(original["backupVaultId"], d, config), "data_source_id": flattenDataSourceBackupDRBackupsDataSourceId(original["dataSourceId"], d, config), + "create_time": flattenDataSourceBackupDRBackupsCreateTime(original["createTime"], d, config), }) } return transformed @@ -170,3 +181,7 @@ func flattenDataSourceBackupDRBackupsBackupVaultId(v interface{}, d *schema.Reso func flattenDataSourceBackupDRBackupsDataSourceId(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { return v } + +func flattenDataSourceBackupDRBackupsCreateTime(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} \ No newline at end of file From c87725f7e4d53a9869bf522d111122d1384bb3cb Mon Sep 17 00:00:00 2001 From: nimish-khurana Date: Wed, 27 Aug 2025 21:44:25 +0530 Subject: [PATCH 857/884] feat: enable default_from_api flag for ODB Network related fields in Oracledatabase AutonomousDatabase resource (#14898) --- mmv1/products/oracledatabase/AutonomousDatabase.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/mmv1/products/oracledatabase/AutonomousDatabase.yaml b/mmv1/products/oracledatabase/AutonomousDatabase.yaml index 3fb1691b9ddc..67e6d993c696 100644 --- a/mmv1/products/oracledatabase/AutonomousDatabase.yaml +++ b/mmv1/products/oracledatabase/AutonomousDatabase.yaml @@ -666,12 +666,14 @@ properties: projects/{project}/locations/{location}/odbNetworks/{odb_network} It is optional but if specified, this should match the parent ODBNetwork of the odb_subnet and backup_odb_subnet. + default_from_api: true - name: odbSubnet type: String description: |- The name of the OdbSubnet associated with the Autonomous Database for IP allocation. Format: projects/{project}/locations/{location}/odbNetworks/{odb_network}/odbSubnets/{odb_subnet} + default_from_api: true - name: 'createTime' type: String description: 'The date and time that the Autonomous Database was created. ' From f49ba871d443dd4fcbd36897fa8958938f36592b Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Wed, 27 Aug 2025 09:46:42 -0700 Subject: [PATCH 858/884] Added missing update_time importstateverify ignore for notebooks instance (#14988) --- .../services/notebooks/resource_notebooks_instance_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_test.go b/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_test.go index bb7da4791158..84ac3f4f66d1 100644 --- a/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_test.go +++ b/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_test.go @@ -25,7 +25,7 @@ func TestAccNotebooksInstance_create_vm_image(t *testing.T) { ResourceName: "google_notebooks_instance.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"vm_image", "metadata"}, + ImportStateVerifyIgnore: []string{"vm_image", "metadata", "update_time"}, }, }, }) From 50b5aba2e1cb14e17b1764dab979169ec6f75051 Mon Sep 17 00:00:00 2001 From: Cameron Thornton Date: Wed, 27 Aug 2025 12:12:50 -0500 Subject: [PATCH 859/884] fix storage bucket retention_period migration crash (#15000) --- .../resource_storage_bucket_600_migration.go | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_600_migration.go b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_600_migration.go index bf4a561b50ea..22553d7bbbf3 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_600_migration.go +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_600_migration.go @@ -2,9 +2,10 @@ package storage import ( "context" + "encoding/json" + "fmt" "log" "math" - "strconv" "strings" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" @@ -1571,8 +1572,14 @@ func ResourceStorageBucketStateUpgradeV3(_ context.Context, rawState map[string] retentionPolicies := rawState["retention_policy"].([]interface{}) if len(retentionPolicies) > 0 { retentionPolicy := retentionPolicies[0].(map[string]interface{}) - if v, ok := retentionPolicy["retention_period"]; ok { - retentionPolicy["retention_period"] = strconv.Itoa(v.(int)) + // nil check + if v, ok := retentionPolicy["retention_period"]; ok && v != nil { + // number conversion check to error rather than crash + if num, ok := v.(json.Number); ok { + retentionPolicy["retention_period"] = num.String() + } else { + return rawState, fmt.Errorf("retention_period in state has unexpected type %T", v) + } } } } From daf559525311b95704668353e03cba3eaf78aedc Mon Sep 17 00:00:00 2001 From: Andrew Ferg Date: Wed, 27 Aug 2025 14:23:26 -0400 Subject: [PATCH 860/884] Fix HA Policy update test flakiness (#14960) --- ...ice_ha_policy_manual_leader_update_test.go | 74 +++++++++++-------- 1 file changed, 44 insertions(+), 30 deletions(-) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_ha_policy_manual_leader_update_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_ha_policy_manual_leader_update_test.go index 085f5cc19f8b..952893dde6bb 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_ha_policy_manual_leader_update_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_ha_policy_manual_leader_update_test.go @@ -128,21 +128,28 @@ resource "google_compute_region_backend_service" "default" { protocol = "UDP" load_balancing_scheme = "EXTERNAL" network = google_compute_network.default.id - backend { - group = google_compute_network_endpoint_group.neg.self_link - balancing_mode = "CONNECTION" - } - ha_policy { - fast_ip_move = "GARP_RA" - leader { - backend_group = google_compute_network_endpoint_group.neg.self_link - network_endpoint { - instance = google_compute_instance.endpoint-instance1.name - } - } - } - // Must explicitly disable connection draining to override default value. - connection_draining_timeout_sec = 0 + backend { + group = google_compute_network_endpoint_group.neg.self_link + balancing_mode = "CONNECTION" + } + ha_policy { + fast_ip_move = "GARP_RA" + leader { + backend_group = google_compute_network_endpoint_group.neg.self_link + network_endpoint { + instance = google_compute_instance.endpoint-instance1.name + } + } + } + // Must explicitly disable connection draining to override default value. + connection_draining_timeout_sec = 0 + // Explicitly depend on the endpoints to prevent test flakes due to creating + // the BackendService before the endpoints have been added to the NEG. + depends_on = [ + google_compute_network_endpoint_group.neg, + google_compute_network_endpoint.endpoint1, + google_compute_network_endpoint.endpoint2 + ] } `, context) } @@ -228,21 +235,28 @@ resource "google_compute_region_backend_service" "default" { protocol = "UDP" load_balancing_scheme = "EXTERNAL" network = google_compute_network.default.id - backend { - group = google_compute_network_endpoint_group.neg.self_link - balancing_mode = "CONNECTION" - } - ha_policy { - fast_ip_move = "GARP_RA" - leader { - backend_group = google_compute_network_endpoint_group.neg.self_link - network_endpoint { - instance = google_compute_instance.endpoint-instance2.name - } - } - } - // Must explicitly disable connection draining to override default value. - connection_draining_timeout_sec = 0 + backend { + group = google_compute_network_endpoint_group.neg.self_link + balancing_mode = "CONNECTION" + } + ha_policy { + fast_ip_move = "GARP_RA" + leader { + backend_group = google_compute_network_endpoint_group.neg.self_link + network_endpoint { + instance = google_compute_instance.endpoint-instance2.name + } + } + } + // Must explicitly disable connection draining to override default value. + connection_draining_timeout_sec = 0 + // Explicitly depend on the endpoints to prevent test flakes due to creating + // the BackendService before the endpoints have been added to the NEG. + depends_on = [ + google_compute_network_endpoint_group.neg, + google_compute_network_endpoint.endpoint1, + google_compute_network_endpoint.endpoint2 + ] } `, context) } From 60b2a4b150c16dac4a1f49df6e8d1382a2bd3753 Mon Sep 17 00:00:00 2001 From: Gitika Yadav <123439083+gitika-yadav@users.noreply.github.com> Date: Thu, 28 Aug 2025 02:03:18 +0530 Subject: [PATCH 861/884] Update BackupPlan.yaml to move resource type field to GA (#14984) --- mmv1/products/backupdr/BackupPlan.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/mmv1/products/backupdr/BackupPlan.yaml b/mmv1/products/backupdr/BackupPlan.yaml index c60029f8850a..42ff4e86a235 100644 --- a/mmv1/products/backupdr/BackupPlan.yaml +++ b/mmv1/products/backupdr/BackupPlan.yaml @@ -98,7 +98,6 @@ properties: type: Array description: | The list of all resource types to which the `BackupPlan` can be applied. - min_version: beta item_type: type: String output: true From 4e3e627d9468441b14c8a2c13cbd87000afbef48 Mon Sep 17 00:00:00 2001 From: aditikumarii-google Date: Thu, 28 Aug 2025 02:03:56 +0530 Subject: [PATCH 862/884] =?UTF-8?q?Changes=20to=20sql=20testAccSqlDatabase?= =?UTF-8?q?Instance=5FupdateFromBackupDR=20function=E2=80=A6=20(#14999)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../sql/resource_sql_database_instance_test.go.tmpl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl index 87a8ab904fa4..5bf0eeeeace0 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl @@ -6664,10 +6664,10 @@ resource "google_sql_database_instance" "instance" { region = "us-central1" settings { - tier = "db-g1-small" + tier = "db-g1-small" backup_configuration { - enabled = true - } + enabled = "false" + } } lifecycle { ignore_changes = [ From 795f154b70c8bfd12e7e378214373b38eadfc1c4 Mon Sep 17 00:00:00 2001 From: Axel Kossek Date: Wed, 27 Aug 2025 22:52:08 +0200 Subject: [PATCH 863/884] Add resource_manager_tags support to Region Backend Service api (#14837) Co-authored-by: Thomas Rodgers --- .../compute/RegionBackendService.yaml | 16 ++++++ ...ompute_region_backend_service_test.go.tmpl | 55 +++++++++++++++++++ 2 files changed, 71 insertions(+) diff --git a/mmv1/products/compute/RegionBackendService.yaml b/mmv1/products/compute/RegionBackendService.yaml index 312a970d5c9d..d022966f3458 100644 --- a/mmv1/products/compute/RegionBackendService.yaml +++ b/mmv1/products/compute/RegionBackendService.yaml @@ -1544,3 +1544,19 @@ properties: description: | The name of the VM instance of the leader network endpoint. The instance must already be attached to the NEG specified in the haPolicy.leader.backendGroup. + - name: 'params' + type: NestedObject + ignore_read: true + immutable: true + description: | + Additional params passed with the request, but not persisted as part of resource payload + properties: + - name: 'resourceManagerTags' + type: KeyValuePairs + description: | + Resource manager tags to be bound to the region backend service. Tag keys and values have the + same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, + and values are in the format tagValues/456. + api_name: resourceManagerTags + ignore_read: true + immutable: true diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_test.go.tmpl index c4e16475dc75..591114bb6691 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_test.go.tmpl @@ -7,6 +7,7 @@ import ( "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" ) func TestAccComputeRegionBackendService_basic(t *testing.T) { @@ -756,6 +757,35 @@ func TestAccComputeRegionBackendService_withDynamicBackendCount(t *testing.T) { }) } +func TestAccComputeRegionBackendService_withTags(t *testing.T) { + t.Parallel() + + org := envvar.GetTestOrgFromEnv(t) + + serviceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + checkName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + tagKeyResult := acctest.BootstrapSharedTestTagKeyDetails(t, "crm-rbs-tagkey", "organizations/"+org, make(map[string]interface{})) + sharedTagkey,_ := tagKeyResult["shared_tag_key"] + tagValueResult := acctest.BootstrapSharedTestTagValueDetails(t, "crm-rbs-tagvalue", sharedTagkey, org) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeRegionBackendServiceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeRegionBackendService_withTags(serviceName, checkName, tagKeyResult["name"], tagValueResult["name"]), + }, + { + ResourceName: "google_compute_region_backend_service.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"params"}, + }, + }, + }) +} + func testAccComputeRegionBackendService_withDynamicBackendCount(serviceName, netName, hcName, igName string) string { return fmt.Sprintf(` locals { @@ -1770,3 +1800,28 @@ resource "google_compute_region_health_check" "health_check" { } `, serviceName, checkName) } + +func testAccComputeRegionBackendService_withTags(serviceName, checkName string, tagKey string, tagValue string) string { + return fmt.Sprintf(` +resource "google_compute_region_backend_service" "foobar" { + name = "%s" + health_checks = [google_compute_health_check.zero.self_link] + region = "us-central1" + params { + resource_manager_tags = { + "%s" = "%s" + } + } +} + +resource "google_compute_health_check" "zero" { + name = "%s" + check_interval_sec = 1 + timeout_sec = 1 + + tcp_health_check { + port = "80" + } +} +`, serviceName, tagKey, tagValue, checkName) +} \ No newline at end of file From 17ce31d1c67bec2db16a505cf17e3af74087102b Mon Sep 17 00:00:00 2001 From: Ryan Oaks Date: Wed, 27 Aug 2025 16:53:41 -0400 Subject: [PATCH 864/884] Modify test file generation to use resource ImportPath (#15002) --- mmv1/provider/template_data.go | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/mmv1/provider/template_data.go b/mmv1/provider/template_data.go index 6964d5de5f9b..32df384cbc8a 100644 --- a/mmv1/provider/template_data.go +++ b/mmv1/provider/template_data.go @@ -144,7 +144,7 @@ func (td *TemplateData) GenerateTestFile(filePath string, resource api.Resource) } tmplInput := TestInput{ Res: resource, - ImportPath: td.ImportPath(), + ImportPath: resource.ImportPath, PROJECT_NAME: "my-project-name", CREDENTIALS: "my/credentials/filename.json", REGION: "us-west1", @@ -281,15 +281,6 @@ func (td *TemplateData) GenerateFile(filePath, templatePath string, input any, g } } -func (td *TemplateData) ImportPath() string { - if td.VersionName == GA_VERSION { - return "github.com/hashicorp/terraform-provider-google/google" - } else if td.VersionName == ALPHA_VERSION || td.VersionName == PRIVATE_VERSION { - return "internal/terraform-next/google-private" - } - return "github.com/hashicorp/terraform-provider-google-beta/google-beta" -} - func FixImports(outputPath string, dumpDiffs bool) { log.Printf("Fixing go import paths") From 54668fa6f415f4f780e1d804448f4ccda36df3b4 Mon Sep 17 00:00:00 2001 From: Mauricio Alvarez Leon <65101411+BBBmau@users.noreply.github.com> Date: Wed, 27 Aug 2025 14:34:20 -0700 Subject: [PATCH 865/884] `teamcity`: set `RELEASE_DIFF` as environment variable instead of parameter (#15003) --- .../terraform/.teamcity/components/builds/build_parameters.kt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/.teamcity/components/builds/build_parameters.kt b/mmv1/third_party/terraform/.teamcity/components/builds/build_parameters.kt index 4a2af9b93e15..393493f525df 100644 --- a/mmv1/third_party/terraform/.teamcity/components/builds/build_parameters.kt +++ b/mmv1/third_party/terraform/.teamcity/components/builds/build_parameters.kt @@ -210,7 +210,7 @@ fun ParametrizedWithType.acceptanceTestBuildParams(parallelism: Int, prefix: Str text("PARALLELISM", "%d".format(parallelism)) text("TEST_PREFIX", prefix) text("TIMEOUT", timeout) - text("RELEASE_DIFF", "true") + text("env.RELEASE_DIFF", releaseDiffTest) } // ParametrizedWithType.sweeperParameters sets build parameters that affect how sweepers are run From d81b8e146d518d6bb46ad5448ebeba681d8387aa Mon Sep 17 00:00:00 2001 From: kautikdk <144651627+kautikdk@users.noreply.github.com> Date: Wed, 27 Aug 2025 22:49:25 +0000 Subject: [PATCH 866/884] feat(storagetransfer): add service_account to google_storage_transfer_job (#14961) --- .../resource_storage_transfer_job.go | 16 + .../resource_storage_transfer_job_test.go | 349 ++++++++++++++++-- .../docs/r/storage_transfer_job.html.markdown | 6 +- 3 files changed, 347 insertions(+), 24 deletions(-) diff --git a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go index 2eff1acfb8d6..41bd024723ce 100644 --- a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go +++ b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go @@ -142,6 +142,11 @@ func ResourceStorageTransferJob() *schema.Resource { ForceNew: true, Description: `The project in which the resource belongs. If it is not provided, the provider project is used.`, }, + "service_account": { + Type: schema.TypeString, + Optional: true, + Description: `The user-managed service account to run the job. If this field is specified, the given service account is granted the necessary permissions to all applicable resources (e.g. GCS buckets) required by the job.`, + }, "event_stream": { Type: schema.TypeList, Optional: true, @@ -909,6 +914,7 @@ func resourceStorageTransferJobCreate(d *schema.ResourceData, meta interface{}) ReplicationSpec: expandReplicationSpecs(d.Get("replication_spec").([]interface{})), LoggingConfig: expandTransferJobLoggingConfig(d.Get("logging_config").([]interface{})), NotificationConfig: expandTransferJobNotificationConfig(d.Get("notification_config").([]interface{})), + ServiceAccount: d.Get("service_account").(string), } var res *storagetransfer.TransferJob @@ -976,6 +982,9 @@ func resourceStorageTransferJobRead(d *schema.ResourceData, meta interface{}) er if err := d.Set("deletion_time", res.DeletionTime); err != nil { return fmt.Errorf("Error setting deletion_time: %s", err) } + if err := d.Set("service_account", res.ServiceAccount); err != nil { + return fmt.Errorf("Error setting service_account: %s", err) + } err = d.Set("schedule", flattenTransferSchedule(res.Schedule)) if err != nil { @@ -1085,6 +1094,13 @@ func resourceStorageTransferJobUpdate(d *schema.ResourceData, meta interface{}) } } + if d.HasChange("service_account") { + fieldMask = append(fieldMask, "service_account") + if v, ok := d.GetOk("service_account"); ok { + transferJob.ServiceAccount = v.(string) + } + } + if len(fieldMask) == 0 { return nil } diff --git a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_test.go b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_test.go index 09647d6ff895..3dc2058b0e31 100644 --- a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_test.go +++ b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_test.go @@ -573,6 +573,57 @@ func TestAccStorageTransferJob_hdfsSource(t *testing.T) { }) } +func TestAccStorageTransferJob_withServiceAccount(t *testing.T) { + t.Parallel() + + testTransferJobDescription := acctest.RandString(t, 10) + testSourceBucketName := fmt.Sprintf("tf-acc-source-%s", acctest.RandString(t, 10)) + testSinkBucketName := fmt.Sprintf("tf-acc-sink-%s", acctest.RandString(t, 10)) + testServiceAccountId := fmt.Sprintf("tf-acc-sa1-%s", acctest.RandString(t, 10)) + testUpdatedServiceAccountId := fmt.Sprintf("tf-acc-sa2-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageTransferJobDestroyProducer(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: testAccStorageTransferJob_withServiceAccount(testTransferJobDescription, testSourceBucketName, testSinkBucketName, testServiceAccountId, envvar.GetTestProjectFromEnv()), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("google_storage_transfer_job.with_sa", "service_account", fmt.Sprintf("%s@%s.iam.gserviceaccount.com", testServiceAccountId, envvar.GetTestProjectFromEnv())), + ), + }, + { + ResourceName: "google_storage_transfer_job.with_sa", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccStorageTransferJob_withServiceAccount_updated(testTransferJobDescription, testSourceBucketName, testSinkBucketName, testServiceAccountId, testUpdatedServiceAccountId, envvar.GetTestProjectFromEnv()), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("google_storage_transfer_job.with_sa", "service_account", fmt.Sprintf("%s@%s.iam.gserviceaccount.com", testUpdatedServiceAccountId, envvar.GetTestProjectFromEnv())), + ), + }, + { + ResourceName: "google_storage_transfer_job.with_sa", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccStorageTransferJob_withServiceAccount_removed(testTransferJobDescription, testSourceBucketName, testSinkBucketName, envvar.GetTestProjectFromEnv()), + }, + { + ResourceName: "google_storage_transfer_job.with_sa", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + func TestAccStorageTransferJob_transferUpdateToEmptyString(t *testing.T) { t.Parallel() @@ -937,7 +988,7 @@ resource "google_storage_transfer_job" "transfer_job" { } repeat_interval = "604800s" } - + logging_config { log_actions = [ "COPY", @@ -1112,7 +1163,7 @@ resource "google_storage_transfer_job" "transfer_job" { } repeat_interval = "604800s" } - + logging_config { log_actions = [ "COPY", @@ -1138,7 +1189,7 @@ func testAccStorageTransferJob_transferJobName(project string, dataSourceBucketN data "google_storage_transfer_project_service_account" "default" { project = "%s" } - + resource "google_storage_bucket" "data_source" { name = "%s" project = "%s" @@ -1146,13 +1197,13 @@ func testAccStorageTransferJob_transferJobName(project string, dataSourceBucketN force_destroy = true uniform_bucket_level_access = true } - + resource "google_storage_bucket_iam_member" "data_source" { bucket = google_storage_bucket.data_source.name role = "roles/storage.admin" member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" } - + resource "google_storage_bucket" "data_sink" { name = "%s" project = "%s" @@ -1160,18 +1211,18 @@ func testAccStorageTransferJob_transferJobName(project string, dataSourceBucketN force_destroy = true uniform_bucket_level_access = true } - + resource "google_storage_bucket_iam_member" "data_sink" { bucket = google_storage_bucket.data_sink.name role = "roles/storage.admin" member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" } - + resource "google_storage_transfer_job" "transfer_job" { name = "transferJobs/%s" description = "%s" project = "%s" - + transfer_spec { gcs_data_source { bucket_name = google_storage_bucket.data_source.name @@ -1182,7 +1233,7 @@ func testAccStorageTransferJob_transferJobName(project string, dataSourceBucketN path = "foo/bar/" } } - + schedule { schedule_start_date { year = 2018 @@ -1202,7 +1253,7 @@ func testAccStorageTransferJob_transferJobName(project string, dataSourceBucketN } repeat_interval = "604800s" } - + depends_on = [ google_storage_bucket_iam_member.data_source, google_storage_bucket_iam_member.data_sink, @@ -1387,7 +1438,7 @@ resource "google_storage_transfer_job" "transfer_job" { path = "foo/bar/" } } - + logging_config { enable_on_prem_gcs_transfer_logs = true } @@ -1522,7 +1573,7 @@ resource "google_storage_transfer_job" "transfer_job" { bucket_name = google_storage_bucket.data_source.name } } - + logging_config { enable_on_prem_gcs_transfer_logs = false } @@ -2155,7 +2206,7 @@ resource "google_storage_transfer_job" "transfer_job" { last_modified_since = "2020-01-01T00:00:00Z" last_modified_before = "2020-01-01T00:00:00Z" exclude_prefixes = [ - "a/b/c", + "a/b/c", ] include_prefixes = [ "a/b" @@ -2241,7 +2292,7 @@ resource "google_storage_transfer_job" "transfer_job" { last_modified_since = "2020-01-01T00:00:00Z" last_modified_before = "2020-01-01T00:00:00Z" exclude_prefixes = [ - "a/b/c", + "a/b/c", ] include_prefixes = [ "a/b" @@ -2431,7 +2482,7 @@ func testAccStorageTransferJob_transferJobGcsPath(project string, dataSourceBuck data "google_storage_transfer_project_service_account" "default" { project = "%s" } - + resource "google_storage_bucket" "data_source" { name = "%s" project = "%s" @@ -2439,13 +2490,13 @@ func testAccStorageTransferJob_transferJobGcsPath(project string, dataSourceBuck force_destroy = true uniform_bucket_level_access = true } - + resource "google_storage_bucket_iam_member" "data_source" { bucket = google_storage_bucket.data_source.name role = "roles/storage.admin" member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" } - + resource "google_storage_bucket" "data_sink" { name = "%s" project = "%s" @@ -2453,18 +2504,18 @@ func testAccStorageTransferJob_transferJobGcsPath(project string, dataSourceBuck force_destroy = true uniform_bucket_level_access = true } - + resource "google_storage_bucket_iam_member" "data_sink" { bucket = google_storage_bucket.data_sink.name role = "roles/storage.admin" member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" } - + resource "google_storage_transfer_job" "transfer_job" { name = "transferJobs/%s" description = "%s" project = "%s" - + transfer_spec { gcs_data_source { bucket_name = google_storage_bucket.data_source.name @@ -2475,7 +2526,7 @@ func testAccStorageTransferJob_transferJobGcsPath(project string, dataSourceBuck path = "%s" } } - + schedule { schedule_start_date { year = 2018 @@ -2495,7 +2546,7 @@ func testAccStorageTransferJob_transferJobGcsPath(project string, dataSourceBuck } repeat_interval = "604800s" } - + depends_on = [ google_storage_bucket_iam_member.data_source, google_storage_bucket_iam_member.data_sink, @@ -2503,3 +2554,257 @@ func testAccStorageTransferJob_transferJobGcsPath(project string, dataSourceBuck } `, project, dataSourceBucketName, project, dataSinkBucketName, project, testTransferJobName, transferJobDescription, project, gcsPath) } + +func testAccStorageTransferJob_withServiceAccount(description, dataSourceBucketName, dataSinkBucketName, serviceAccountId, project string) string { + return fmt.Sprintf(` +resource "google_service_account" "test_account" { + project = "%s" + account_id = "%s" + display_name = "Test Service Account" +} + +resource "google_storage_bucket" "source" { + project = "%s" + name = "%s" + location = "US" + force_destroy = true +} + +resource "google_storage_bucket" "sink" { + project = "%s" + name = "%s" + location = "US" + force_destroy = true +} + +resource "google_storage_bucket_iam_member" "source_iam" { + bucket = google_storage_bucket.source.name + role = "roles/storage.admin" + member = "serviceAccount:${google_service_account.test_account.email}" +} + +resource "google_storage_bucket_iam_member" "sink_iam" { + bucket = google_storage_bucket.sink.name + role = "roles/storage.admin" + member = "serviceAccount:${google_service_account.test_account.email}" +} + +data "google_storage_transfer_project_service_account" "transfer_sa" { +} + +resource "google_service_account_iam_member" "token_creator" { + service_account_id = google_service_account.test_account.name + role = "roles/iam.serviceAccountTokenCreator" + member = "serviceAccount:${data.google_storage_transfer_project_service_account.transfer_sa.email}" +} + +resource "time_sleep" "wait_120_seconds" { + depends_on = [ + google_service_account_iam_member.token_creator, + google_storage_bucket_iam_member.source_iam, + google_storage_bucket_iam_member.sink_iam, + ] + create_duration = "120s" +} + +resource "google_storage_transfer_job" "with_sa" { + description = "%s" + project = "%s" + service_account = google_service_account.test_account.email + + transfer_spec { + gcs_data_source { + bucket_name = google_storage_bucket.source.name + } + gcs_data_sink { + bucket_name = google_storage_bucket.sink.name + } + } + + schedule { + schedule_start_date { + year = 2023 + month = 1 + day = 15 + } + schedule_end_date { + year = 2023 + month = 1 + day = 15 + } + } + + depends_on = [ + time_sleep.wait_120_seconds, + ] +} +`, project, serviceAccountId, project, dataSourceBucketName, project, dataSinkBucketName, description, project) +} + +func testAccStorageTransferJob_withServiceAccount_updated(description, dataSourceBucketName, dataSinkBucketName, serviceAccountId, updatedServiceAccountId, project string) string { + return fmt.Sprintf(` +resource "google_service_account" "test_account" { + project = "%s" + account_id = "%s" + display_name = "Test Service Account" +} + +resource "google_service_account" "test_account_2" { + project = "%s" + account_id = "%s" + display_name = "Test Service Account 2" +} + +resource "google_storage_bucket" "source" { + project = "%s" + name = "%s" + location = "US" + force_destroy = true +} + +resource "google_storage_bucket" "sink" { + project = "%s" + name = "%s" + location = "US" + force_destroy = true +} + +resource "google_storage_bucket_iam_member" "source_iam" { + bucket = google_storage_bucket.source.name + role = "roles/storage.admin" + member = "serviceAccount:${google_service_account.test_account_2.email}" +} + +resource "google_storage_bucket_iam_member" "sink_iam" { + bucket = google_storage_bucket.sink.name + role = "roles/storage.admin" + member = "serviceAccount:${google_service_account.test_account_2.email}" +} + +data "google_storage_transfer_project_service_account" "transfer_sa" { +} + +resource "google_service_account_iam_member" "token_creator" { + service_account_id = google_service_account.test_account_2.name + role = "roles/iam.serviceAccountTokenCreator" + member = "serviceAccount:${data.google_storage_transfer_project_service_account.transfer_sa.email}" +} + +resource "time_sleep" "wait_120_seconds_2" { + depends_on = [ + google_service_account_iam_member.token_creator, + google_storage_bucket_iam_member.source_iam, + google_storage_bucket_iam_member.sink_iam, + ] + create_duration = "120s" +} + +resource "google_storage_transfer_job" "with_sa" { + description = "%s" + project = "%s" + service_account = google_service_account.test_account_2.email + + transfer_spec { + gcs_data_source { + bucket_name = google_storage_bucket.source.name + } + gcs_data_sink { + bucket_name = google_storage_bucket.sink.name + } + } + + schedule { + schedule_start_date { + year = 2023 + month = 1 + day = 15 + } + schedule_end_date { + year = 2023 + month = 1 + day = 15 + } + } + + depends_on = [ + time_sleep.wait_120_seconds_2, + ] +} +`, project, serviceAccountId, project, updatedServiceAccountId, project, dataSourceBucketName, project, dataSinkBucketName, description, project) +} + +func testAccStorageTransferJob_withServiceAccount_removed(description, dataSourceBucketName, dataSinkBucketName, project string) string { + return fmt.Sprintf(` + +resource "google_storage_bucket" "source" { + project = "%s" + name = "%s" + location = "US" + force_destroy = true +} + +resource "google_storage_bucket" "sink" { + project = "%s" + name = "%s" + location = "US" + force_destroy = true +} + + +data "google_storage_transfer_project_service_account" "default" { + project = "%s" +} + +resource "google_storage_bucket_iam_member" "source_iam" { + bucket = google_storage_bucket.source.name + role = "roles/storage.admin" + member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" +} + +resource "google_storage_bucket_iam_member" "sink_iam" { + bucket = google_storage_bucket.sink.name + role = "roles/storage.admin" + member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" +} + +resource "time_sleep" "wait_120_seconds_3" { + depends_on = [ + google_storage_bucket_iam_member.source_iam, + google_storage_bucket_iam_member.sink_iam, + ] + create_duration = "120s" +} + +resource "google_storage_transfer_job" "with_sa" { + description = "%s" + project = "%s" + + transfer_spec { + gcs_data_source { + bucket_name = google_storage_bucket.source.name + } + gcs_data_sink { + bucket_name = google_storage_bucket.sink.name + } + } + + schedule { + schedule_start_date { + year = 2023 + month = 1 + day = 15 + } + schedule_end_date { + year = 2023 + month = 1 + day = 15 + } + } + + depends_on = [ + time_sleep.wait_120_seconds_3, + ] + +} +`, project, dataSourceBucketName, project, dataSinkBucketName, project, description, project) +} diff --git a/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown b/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown index 2c27448bd9c7..65bd53d0a055 100644 --- a/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown @@ -140,6 +140,8 @@ The following arguments are supported: * `project` - (Optional) The project in which the resource belongs. If it is not provided, the provider project is used. +* `service_account` - (Optional) The user-managed service account to run the job. If this field is specified, the given service account is granted the necessary permissions to all applicable resources (e.g. GCS buckets) required by the job. + * `status` - (Optional) Status of the job. Default: `ENABLED`. **NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.** * `notification_config` - (Optional) Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure [documented below](#nested_notification_config). @@ -328,13 +330,13 @@ The `azure_credentials` block supports: The `loggin_config` block supports: -* `log_actions` - (Optional) A list of actions to be logged. If empty, no logs are generated. Not supported for transfers with PosixFilesystem data sources; use enableOnpremGcsTransferLogs instead. +* `log_actions` - (Optional) A list of actions to be logged. If empty, no logs are generated. Not supported for transfers with PosixFilesystem data sources; use enableOnpremGcsTransferLogs instead. Each action may be one of `FIND`, `DELETE`, and `COPY`. * `log_action_states` - (Optional) A list of loggable action states. If empty, no logs are generated. Not supported for transfers with PosixFilesystem data sources; use enableOnpremGcsTransferLogs instead. Each action state may be one of `SUCCEEDED`, and `FAILED`. -* `enable_on_prem_gcs_transfer` - (Optional) For transfers with a PosixFilesystem source, this option enables the Cloud Storage transfer logs for this transfer. +* `enable_on_prem_gcs_transfer` - (Optional) For transfers with a PosixFilesystem source, this option enables the Cloud Storage transfer logs for this transfer. Defaults to false. ## Attributes Reference From 44dcd4b27a4b5a1fafa5a9109a08837e70e6e75f Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Wed, 27 Aug 2025 16:53:14 -0700 Subject: [PATCH 867/884] Fixed TestAccApigeeSecurityAction_apigeeSecurityActionFull test setup (#14986) --- .../resource_apigee_security_action_test.go | 42 ++++++++++++++++++- 1 file changed, 40 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_security_action_test.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_security_action_test.go index c49f19b29c06..cd4dd1e382b4 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_security_action_test.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_security_action_test.go @@ -9,6 +9,7 @@ import ( "github.com/hashicorp/terraform-plugin-testing/terraform" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) @@ -57,7 +58,9 @@ func TestAccApigeeSecurityAction_apigeeSecurityActionFull(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + "org_id": envvar.GetTestOrgFromEnv(t), + "random_suffix": acctest.RandString(t, 10), } acctest.VcrTest(t, resource.TestCase{ @@ -177,10 +180,45 @@ func TestAccApigeeSecurityAction_apigeeSecurityActionFull(t *testing.T) { func testAccApigeeSecurityAction_apigeeBase(context map[string]interface{}) string { return acctest.Nprintf(` -data "google_client_config" "current" {} +resource "google_project" "project" { + project_id = "tf-test-%{random_suffix}" + name = "tf-test-%{random_suffix}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" + deletion_policy = "DELETE" +} + +resource "time_sleep" "wait_60_seconds" { + create_duration = "60s" + depends_on = [google_project.project] +} + +resource "google_project_service" "apigee" { + project = google_project.project.project_id + service = "apigee.googleapis.com"" + depends_on = [time_sleep.wait_60_seconds] +} + +resource "google_project_service" "compute" { + project = google_project.project.project_id + service = "compute.googleapis.com" + depends_on = [google_project_service.apigee] +} + +resource "google_project_service" "servicenetworking" { + project = google_project.project.project_id + service = "servicenetworking.googleapis.com" + depends_on = [google_project_service.compute] +} + +resource "time_sleep" "wait_120_seconds" { + create_duration = "120s" + depends_on = [google_project_service.servicenetworking] +} resource "google_compute_network" "apigee_network" { name = "tf-test-network-%{random_suffix}" + depends_on = [time_sleep.wait_120_seconds] } resource "google_compute_global_address" "apigee_range" { From 88665fb944ff343c8f42f202698f239917248106 Mon Sep 17 00:00:00 2001 From: Jon Buckley Date: Thu, 28 Aug 2025 11:55:08 -0400 Subject: [PATCH 868/884] sql: Add `consumer_network_status`, `ip_address`, and `status` fields to `google_sql_database_instance` (#14643) --- .../sql/resource_sql_database_instance.go.tmpl | 18 ++++++++++++++++++ .../resource_sql_database_instance_meta.yaml | 3 +++ .../docs/r/sql_database_instance.html.markdown | 6 ++++++ 3 files changed, 27 insertions(+) diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl index bdae1754c38c..6abe0b4d3d4e 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl @@ -561,6 +561,21 @@ API (for read pools, effective_availability_type may differ from availability_ty Required: true, Description: `The consumer network of this consumer endpoint. This must be a resource path that includes both the host project and the network name. The consumer host project of this network might be different from the consumer service project.`, }, + "consumer_network_status": { + Type: schema.TypeString, + Computed: true, + Description: `The connection policy status of the consumer network.`, + }, + "ip_address": { + Type: schema.TypeString, + Computed: true, + Description: `The IP address of the consumer endpoint.`, + }, + "status": { + Type: schema.TypeString, + Computed: true, + Description: `The connection status of the consumer endpoint.`, + }, }, }, Description: `A comma-separated list of networks or a comma-separated list of network-project pairs. Each project in this list is represented by a project number (numeric) or by a project ID (alphanumeric). This allows Private Service Connect connections to be created automatically for the specified networks.`, @@ -2701,7 +2716,10 @@ func flattenPscAutoConnections(pscAutoConnections []*sqladmin.PscAutoConnectionC for _, flag := range pscAutoConnections { data := map[string]interface{}{ "consumer_network": flag.ConsumerNetwork, + "consumer_network_status": flag.ConsumerNetworkStatus, "consumer_service_project_id": flag.ConsumerProject, + "ip_address": flag.IpAddress, + "status": flag.Status, } flags = append(flags, data) diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_meta.yaml b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_meta.yaml index df9d0644270c..18549ce5cb8b 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_meta.yaml +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_meta.yaml @@ -100,7 +100,10 @@ fields: - field: 'settings.ip_configuration.private_network' - field: 'settings.ip_configuration.psc_config.allowed_consumer_projects' - field: 'settings.ip_configuration.psc_config.psc_auto_connections.consumer_network' + - field: 'settings.ip_configuration.psc_config.psc_auto_connections.consumer_network_status' - field: 'settings.ip_configuration.psc_config.psc_auto_connections.consumer_service_project_id' + - field: 'settings.ip_configuration.psc_config.psc_auto_connections.ip_address' + - field: 'settings.ip_configuration.psc_config.psc_auto_connections.status' - field: 'settings.ip_configuration.psc_config.psc_enabled' - field: 'settings.ip_configuration.server_ca_mode' - field: 'settings.ip_configuration.server_ca_pool' diff --git a/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown b/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown index b0675085eaf1..f3c424607478 100644 --- a/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown @@ -698,6 +698,12 @@ performing filtering in a Terraform config. ~> **NOTE:** Users can upgrade a read replica instance to a stand-alone Cloud SQL instance with the help of `instance_type`. To promote, users have to set the `instance_type` property as `CLOUD_SQL_INSTANCE` and remove/unset `master_instance_name` and `replica_configuration` from instance configuration. This operation might cause your instance to restart. +* `settings.ip_configuration.psc_config.psc_auto_connections.consumer_network_status` - (Output) The connection policy status of the consumer network. + +* `settings.ip_configuration.psc_config.psc_auto_connections.ip_address` - (Output) The IP address of the consumer endpoint. + +* `settings.ip_configuration.psc_config.psc_auto_connections.status` - (Output) The connection status of the consumer endpoint. + * `settings.version` - Used to make sure changes to the `settings` block are atomic. From 90f6ecb2b14667537d19664d169ceb3be766ceb6 Mon Sep 17 00:00:00 2001 From: malhotrasagar2212 Date: Thu, 28 Aug 2025 12:12:29 -0400 Subject: [PATCH 869/884] Compute public delagated sub prefix list support (#14264) --- .../compute/PublicDelegatedPrefix.yaml | 55 +++++++++++++ ...e_compute_public_advertised_prefix_test.go | 79 +++++++++++++++++++ 2 files changed, 134 insertions(+) diff --git a/mmv1/products/compute/PublicDelegatedPrefix.yaml b/mmv1/products/compute/PublicDelegatedPrefix.yaml index c84125c96061..21a74f13da40 100644 --- a/mmv1/products/compute/PublicDelegatedPrefix.yaml +++ b/mmv1/products/compute/PublicDelegatedPrefix.yaml @@ -122,3 +122,58 @@ properties: The IP address range, in CIDR format, represented by this public delegated prefix. required: true + - name: 'publicDelegatedSubPrefixs' + type: Array + output: true + description: | + List of sub public delegated fixes for BYO IP functionality. + Each item in this array represents a sub prefix that can be + used to create addresses or further allocations. + item_type: + type: NestedObject + properties: + - name: 'name' + type: String + description: | + The name of the sub public delegated prefix. + - name: 'description' + type: String + description: | + An optional description of this sub public delegated prefix. + - name: 'region' + type: String + description: | + Output-only. The region of the sub public delegated prefix if it is regional. If absent, the sub prefix is global. + - name: 'status' + type: Enum + description: | + The status of the sub public delegated prefix. + enum_values: + - 'INITIALIZING' + - 'READY_TO_ANNOUNCE' + - 'ANNOUNCED' + - 'DELETING' + - name: 'ipCidrRange' + type: String + description: | + The IP address range in the CIDR format represented by this sub prefix. + - name: 'isAddress' + type: Boolean + description: | + Whether the sub prefix is delegated for address creation. + - name: 'mode' + type: Enum + description: | + The PublicDelegatedSubPrefix mode for IPv6 only. + enum_values: + - 'DELEGATION' + - 'EXTERNAL_IPV6_FORWARDING_RULE_CREATION' + - 'EXTERNAL_IPV6_SUBNETWORK_CREATION' + - name: 'allocatablePrefixLength' + type: Integer + description: | + The allocatable prefix length supported by this PublicDelegatedSubPrefix. + - name: 'delegatee_project' + type: String + description: | + Name of the project scoping this PublicDelegatedSubPrefix. diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_public_advertised_prefix_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_public_advertised_prefix_test.go index 9c4ca876645e..856a6801d044 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_public_advertised_prefix_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_public_advertised_prefix_test.go @@ -21,6 +21,7 @@ func TestAccComputePublicPrefixes(t *testing.T) { "public_delegated_prefixes_ipv6": testAccComputePublicDelegatedPrefix_publicDelegatedPrefixesIpv6Test, "public_advertised_prefixes_pdp_scope": testAccComputePublicAdvertisedPrefix_publicAdvertisedPrefixesPdpScopeTest, "public_delegated_prefix_ipv6_subnet_mode": testAccComputePublicDelegatedPrefix_publicDelegatedPrefixIpv6SubnetModeTest, + "public_delgated_prefix_with_sub_prefix": TestAccComputePublicDelegatedPrefix_computePublicDelegatedPrefixWithSubPrefixExample, } for name, tc := range testCases { @@ -35,6 +36,84 @@ func TestAccComputePublicPrefixes(t *testing.T) { } } +func TestAccComputePublicDelegatedPrefix_computePublicDelegatedPrefixWithSubPrefixExample(t *testing.T) { + t.Parallel() + subPrefixResourceName := "google_compute_public_delegated_prefix.subprefix" + parentProject := "tf-static-byoip" + parentRegion := "us-central1" + parentName := "tf-test-delegation-mode-sub-pdp" + + context := map[string]interface{}{ + "parent_pdp_id": "projects/tf-static-byoip/regions/us-central1/publicDelegatedPrefixes/tf-test-delegation-mode-sub-pdp", + "project": "tf-static-byoip", + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputePublicDelegatedPrefixDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputePublicDelegatedPrefix_computePublicDelegatedPrefixWithSubPrefixExample(context), + Check: resource.ComposeTestCheckFunc( + // First, a basic check that the sub-prefix was created + resource.TestCheckResourceAttrSet(subPrefixResourceName, "id"), + + // Now, the custom check function + testAccCheckParentHasSubPrefix(t, parentProject, parentRegion, parentName, subPrefixResourceName), + ), + }, + { + ResourceName: "google_compute_public_delegated_prefix.subprefix", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"region"}, + }, + }, + }) +} + +func testAccComputePublicDelegatedPrefix_computePublicDelegatedPrefixWithSubPrefixExample(context map[string]interface{}) string { + return acctest.Nprintf(` + +resource "google_compute_public_delegated_prefix" "subprefix" { + name = "tf-test-sub-prefix-1%{random_suffix}" + description = "A nested address" + region = "us-central1" + ip_cidr_range = "2600:1901:4500:2::/64" + parent_prefix = "%{parent_pdp_id}" + mode = "DELEGATION" +} +`, context) +} + +func testAccCheckParentHasSubPrefix(t *testing.T, project, region, parentName, subPrefixResourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[subPrefixResourceName] + if !ok { + return fmt.Errorf("Not found: %s", subPrefixResourceName) + } + newSubPrefixName := rs.Primary.Attributes["name"] + + config := acctest.GoogleProviderConfig(t) + computeService := config.NewComputeClient(config.UserAgent) + + parent, err := computeService.PublicDelegatedPrefixes.Get(project, region, parentName).Do() + if err != nil { + return err + } + + for _, sub := range parent.PublicDelegatedSubPrefixs { + if sub.Name == newSubPrefixName { + return nil + } + } + + return fmt.Errorf("Sub-Prefix %q not found in parent %q's sub-prefix list", newSubPrefixName, parentName) + } +} + func testAccComputePublicAdvertisedPrefix_publicAdvertisedPrefixesPdpScopeTest(t *testing.T) { context := map[string]interface{}{ "description": envvar.GetTestPublicAdvertisedPrefixDescriptionFromEnv(t), From 749a809135e0b283e5b88e160851d060740a9cce Mon Sep 17 00:00:00 2001 From: shumiao Date: Thu, 28 Aug 2025 10:08:06 -0700 Subject: [PATCH 870/884] Update unit test - AutomaticManagement is no longer supported on the gkehub api server for the ConfigManagement feature (#14995) --- .../resource_gke_hub_feature_test.go.tmpl | 62 ------------------- 1 file changed, 62 deletions(-) diff --git a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.tmpl b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.tmpl index 37e510333765..518347c9d53f 100644 --- a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.tmpl +++ b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.tmpl @@ -489,14 +489,6 @@ func TestAccGKEHubFeature_FleetDefaultMemberConfigConfigManagement(t *testing.T) ImportState: true, ImportStateVerify: true, }, - { - Config: testAccGKEHubFeature_FleetDefaultMemberConfigConfigManagementEnableAutomaticManagementUpdate(context), - }, - { - ResourceName: "google_gke_hub_feature.feature", - ImportState: true, - ImportStateVerify: true, - }, { Config: testAccGKEHubFeature_FleetDefaultMemberConfigConfigManagementRemovalUpdate(context), }, @@ -505,37 +497,10 @@ func TestAccGKEHubFeature_FleetDefaultMemberConfigConfigManagement(t *testing.T) ImportState: true, ImportStateVerify: true, }, - { - Config: testAccGKEHubFeature_FleetDefaultMemberConfigConfigManagementAutomaticManagement(context), - }, - { - ResourceName: "google_gke_hub_feature.feature", - ImportState: true, - ImportStateVerify: true, - }, }, }) } -func testAccGKEHubFeature_FleetDefaultMemberConfigConfigManagementAutomaticManagement(context map[string]interface{}) string { - return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` -resource "google_gke_hub_feature" "feature" { - name = "configmanagement" - location = "global" - fleet_default_member_config { - configmanagement { - management = "MANAGEMENT_AUTOMATIC" - config_sync { - enabled = true - } - } - } - depends_on = [google_project_service.anthos, google_project_service.gkehub, google_project_service.acm] - project = google_project.project.project_id -} -`, context) -} - func testAccGKEHubFeature_FleetDefaultMemberConfigConfigManagement(context map[string]interface{}) string { return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` resource "google_gke_hub_feature" "feature" { @@ -593,33 +558,6 @@ resource "google_gke_hub_feature" "feature" { `, context) } -func testAccGKEHubFeature_FleetDefaultMemberConfigConfigManagementEnableAutomaticManagementUpdate(context map[string]interface{}) string { - return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` -resource "google_gke_hub_feature" "feature" { - name = "configmanagement" - location = "global" - fleet_default_member_config { - configmanagement { - management = "MANAGEMENT_AUTOMATIC" - config_sync { - prevent_drift = true - source_format = "unstructured" - oci { - sync_repo = "us-central1-docker.pkg.dev/corp-gke-build-artifacts/acm/configs:latest" - policy_dir = "/acm/nonprod-root/" - secret_type = "gcpserviceaccount" - sync_wait_secs = "15" - gcp_service_account_email = "gke-cluster@gke-foo-nonprod.iam.gserviceaccount.com" - } - } - } - } - depends_on = [google_project_service.anthos, google_project_service.gkehub, google_project_service.acm] - project = google_project.project.project_id -} -`, context) -} - func testAccGKEHubFeature_FleetDefaultMemberConfigConfigManagementRemovalUpdate(context map[string]interface{}) string { return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` resource "google_gke_hub_feature" "feature" { From d5bb755f8e80b2c14ea9f7d40f901fea077f61b1 Mon Sep 17 00:00:00 2001 From: Jiongxin Ye <48576162+JessieYee@users.noreply.github.com> Date: Thu, 28 Aug 2025 10:38:00 -0700 Subject: [PATCH 871/884] update go.mod and go.sum (#14926) --- mmv1/third_party/terraform/go.mod | 6 +++--- mmv1/third_party/terraform/go.sum | 12 ++++++------ 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/mmv1/third_party/terraform/go.mod b/mmv1/third_party/terraform/go.mod index 3641e8f50c8b..ab0546334c5d 100644 --- a/mmv1/third_party/terraform/go.mod +++ b/mmv1/third_party/terraform/go.mod @@ -3,7 +3,7 @@ module github.com/hashicorp/terraform-provider-google go 1.23.0 require ( - cloud.google.com/go/auth v0.16.4 + cloud.google.com/go/auth v0.16.5 cloud.google.com/go/auth/oauth2adapt v0.2.8 cloud.google.com/go/bigtable v1.37.0 github.com/GoogleCloudPlatform/declarative-resource-client-library v1.83.0 @@ -35,8 +35,8 @@ require ( golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 golang.org/x/net v0.43.0 golang.org/x/oauth2 v0.30.0 - google.golang.org/api v0.247.0 - google.golang.org/genproto/googleapis/rpc v0.0.0-20250804133106-a7a43d27e69b + google.golang.org/api v0.248.0 + google.golang.org/genproto/googleapis/rpc v0.0.0-20250818200422-3122310a409c google.golang.org/grpc v1.74.2 google.golang.org/protobuf v1.36.7 gopkg.in/yaml.v2 v2.4.0 diff --git a/mmv1/third_party/terraform/go.sum b/mmv1/third_party/terraform/go.sum index bd42f7af505b..b4176222aeaf 100644 --- a/mmv1/third_party/terraform/go.sum +++ b/mmv1/third_party/terraform/go.sum @@ -5,8 +5,8 @@ cel.dev/expr v0.24.0/go.mod h1:hLPLo1W4QUmuYdA72RBX06QTs6MXw941piREPl3Yfiw= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.121.0 h1:pgfwva8nGw7vivjZiRfrmglGWiCJBP+0OmDpenG/Fwg= cloud.google.com/go v0.121.0/go.mod h1:rS7Kytwheu/y9buoDmu5EIpMMCI4Mb8ND4aeN4Vwj7Q= -cloud.google.com/go/auth v0.16.4 h1:fXOAIQmkApVvcIn7Pc2+5J8QTMVbUGLscnSVNl11su8= -cloud.google.com/go/auth v0.16.4/go.mod h1:j10ncYwjX/g3cdX7GpEzsdM+d+ZNsXAbb6qXA7p1Y5M= +cloud.google.com/go/auth v0.16.5 h1:mFWNQ2FEVWAliEQWpAdH80omXFokmrnbDhUS9cBywsI= +cloud.google.com/go/auth v0.16.5/go.mod h1:utzRfHMP+Vv0mpOkTRQoWD2q3BatTOoWbA7gCc2dUhQ= cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc= cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c= cloud.google.com/go/bigtable v1.37.0 h1:Q+x7y04lQ0B+WXp03wc1/FLhFt4CwcQdkwWT0M4Jp3w= @@ -390,8 +390,8 @@ golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8T golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/api v0.247.0 h1:tSd/e0QrUlLsrwMKmkbQhYVa109qIintOls2Wh6bngc= -google.golang.org/api v0.247.0/go.mod h1:r1qZOPmxXffXg6xS5uhx16Fa/UFY8QU/K4bfKrnvovM= +google.golang.org/api v0.248.0 h1:hUotakSkcwGdYUqzCRc5yGYsg4wXxpkKlW5ryVqvC1Y= +google.golang.org/api v0.248.0/go.mod h1:yAFUAF56Li7IuIQbTFoLwXTCI6XCFKueOlS7S9e4F9k= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= @@ -403,8 +403,8 @@ google.golang.org/genproto v0.0.0-20250603155806-513f23925822 h1:rHWScKit0gvAPuO google.golang.org/genproto v0.0.0-20250603155806-513f23925822/go.mod h1:HubltRL7rMh0LfnQPkMH4NPDFEWp0jw3vixw7jEM53s= google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 h1:oWVWY3NzT7KJppx2UKhKmzPq4SRe0LdCijVRwvGeikY= google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822/go.mod h1:h3c4v36UTKzUiuaOKQ6gr3S+0hovBtUrXzTG/i3+XEc= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250804133106-a7a43d27e69b h1:zPKJod4w6F1+nRGDI9ubnXYhU9NSWoFAijkHkUXeTK8= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250804133106-a7a43d27e69b/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250818200422-3122310a409c h1:qXWI/sQtv5UKboZ/zUk7h+mrf/lXORyI+n9DKDAusdg= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250818200422-3122310a409c/go.mod h1:gw1tLEfykwDz2ET4a12jcXt4couGAm7IwsVaTy0Sflo= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= From 49ebc6319ab36cbf085d0584e15f5c1854ff0a77 Mon Sep 17 00:00:00 2001 From: Lakshman Swaminathan Date: Thu, 28 Aug 2025 11:44:52 -0700 Subject: [PATCH 872/884] autogeneration of tests for singular datasources (#14872) Co-authored-by: Stephen Lewis (Burrows) --- mmv1/api/resource.go | 42 +++++--- mmv1/api/resource/datasource.go | 2 + mmv1/products/cloudrun/Service.yaml | 1 + .../FolderIntelligenceConfig.yaml | 1 + .../OrganizationIntelligenceConfig.yaml | 1 + .../ProjectIntelligenceConfig.yaml | 1 + mmv1/provider/template_data.go | 29 ++++++ mmv1/provider/terraform.go | 16 ++++ .../base_configs/datasource_test_file.go.tmpl | 96 +++++++++++++++++++ .../iap/data_source_iap_client_test.go | 70 -------------- 10 files changed, 177 insertions(+), 82 deletions(-) create mode 100644 mmv1/templates/terraform/examples/base_configs/datasource_test_file.go.tmpl delete mode 100644 mmv1/third_party/terraform/services/iap/data_source_iap_client_test.go diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index f14b98e20b35..f29a0328bfad 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -1203,31 +1203,42 @@ func ImportIdFormats(importFormat, identity []string, baseUrl string) []string { return uniq } -func (r Resource) IgnoreReadPropertiesToString(e resource.Examples) string { +// IgnoreReadProperties returns a sorted slice of property names (snake_case) that should be ignored when reading. +// This is useful for downstream code that needs to iterate over these properties. +func (r Resource) IgnoreReadProperties(e resource.Examples) []string { var props []string for _, tp := range r.AllUserProperties() { if tp.UrlParamOnly || tp.IsA("ResourceRef") { - props = append(props, fmt.Sprintf("\"%s\"", google.Underscore(tp.Name))) + props = append(props, google.Underscore(tp.Name)) } } - for _, tp := range e.IgnoreReadExtra { - props = append(props, fmt.Sprintf("\"%s\"", tp)) - } - for _, tp := range r.IgnoreReadLabelsFields(r.PropertiesWithExcluded()) { - props = append(props, fmt.Sprintf("\"%s\"", tp)) - } - for _, tp := range ignoreReadFields(r.AllUserProperties()) { - props = append(props, fmt.Sprintf("\"%s\"", tp)) - } + props = append(props, e.IgnoreReadExtra...) + props = append(props, r.IgnoreReadLabelsFields(r.PropertiesWithExcluded())...) + props = append(props, ignoreReadFields(r.AllUserProperties())...) slices.Sort(props) + return props +} +// IgnoreReadPropertiesToString returns the ignore read properties as a Go-syntax string slice. +// This is a wrapper around IgnoreReadProperties for backwards compatibility. +func (r Resource) IgnoreReadPropertiesToString(e resource.Examples) string { + props := r.IgnoreReadProperties(e) if len(props) > 0 { - return fmt.Sprintf("[]string{%s}", strings.Join(props, ", ")) + return fmt.Sprintf("[]string{%s}", strings.Join(quoteStrings(props), ", ")) } return "" } +// quoteStrings returns a new slice with each string quoted. +func quoteStrings(strs []string) []string { + quoted := make([]string, len(strs)) + for i, s := range strs { + quoted[i] = fmt.Sprintf("\"%s\"", s) + } + return quoted +} + func ignoreReadFields(props []*Type) []string { var fields []string for _, tp := range props { @@ -2021,6 +2032,13 @@ func (r *Resource) ShouldGenerateSingularDataSource() bool { return r.Datasource.Generate } +func (r *Resource) ShouldGenerateSingularDataSourceTests() bool { + if r.Datasource == nil { + return false + } + return !r.Datasource.ExcludeTest +} + func (r Resource) ShouldDatasourceSetLabels() bool { for _, p := range r.Properties { if p.Name == "labels" && p.Type == "KeyValueLabels" { diff --git a/mmv1/api/resource/datasource.go b/mmv1/api/resource/datasource.go index ee87c1501f87..2fe4f4755d6f 100644 --- a/mmv1/api/resource/datasource.go +++ b/mmv1/api/resource/datasource.go @@ -16,4 +16,6 @@ package resource type Datasource struct { // boolean to determine whether the datasource file should be generated Generate bool `yaml:"generate"` + // boolean to determine whether tests should be generated for a datasource + ExcludeTest bool `yaml:"exclude_test"` } diff --git a/mmv1/products/cloudrun/Service.yaml b/mmv1/products/cloudrun/Service.yaml index 294276192445..6e9415daed92 100644 --- a/mmv1/products/cloudrun/Service.yaml +++ b/mmv1/products/cloudrun/Service.yaml @@ -31,6 +31,7 @@ import_format: - 'locations/{{location}}/namespaces/{{project}}/services/{{name}}' datasource: generate: true + exclude_test: true timeouts: insert_minutes: 20 update_minutes: 20 diff --git a/mmv1/products/storagecontrol/FolderIntelligenceConfig.yaml b/mmv1/products/storagecontrol/FolderIntelligenceConfig.yaml index 83e0f7a1b098..ee9cb9e043a3 100644 --- a/mmv1/products/storagecontrol/FolderIntelligenceConfig.yaml +++ b/mmv1/products/storagecontrol/FolderIntelligenceConfig.yaml @@ -51,6 +51,7 @@ autogen_async: false datasource: generate: true + exclude_test: true examples: - name: storage_control_folder_intelligence_config_basic diff --git a/mmv1/products/storagecontrol/OrganizationIntelligenceConfig.yaml b/mmv1/products/storagecontrol/OrganizationIntelligenceConfig.yaml index edf604dac363..547f726d0a54 100644 --- a/mmv1/products/storagecontrol/OrganizationIntelligenceConfig.yaml +++ b/mmv1/products/storagecontrol/OrganizationIntelligenceConfig.yaml @@ -51,6 +51,7 @@ autogen_async: false datasource: generate: true + exclude_test: true examples: - name: storage_control_organization_intelligence_config_basic diff --git a/mmv1/products/storagecontrol/ProjectIntelligenceConfig.yaml b/mmv1/products/storagecontrol/ProjectIntelligenceConfig.yaml index d2c654e08b13..60962fef6c4d 100644 --- a/mmv1/products/storagecontrol/ProjectIntelligenceConfig.yaml +++ b/mmv1/products/storagecontrol/ProjectIntelligenceConfig.yaml @@ -52,6 +52,7 @@ autogen_async: false datasource: generate: true + exclude_test: true examples: - name: storage_control_project_intelligence_config_basic diff --git a/mmv1/provider/template_data.go b/mmv1/provider/template_data.go index 32df384cbc8a..831d7fecafe1 100644 --- a/mmv1/provider/template_data.go +++ b/mmv1/provider/template_data.go @@ -165,6 +165,35 @@ func (td *TemplateData) GenerateTestFile(filePath string, resource api.Resource) td.GenerateFile(filePath, templatePath, tmplInput, true, templates...) } +func (td *TemplateData) GenerateDataSourceTestFile(filePath string, resource api.Resource) { + templatePath := "templates/terraform/examples/base_configs/datasource_test_file.go.tmpl" + templates := []string{ + "templates/terraform/env_var_context.go.tmpl", + templatePath, + } + tmplInput := TestInput{ + Res: resource, + ImportPath: resource.ImportPath, + PROJECT_NAME: "my-project-name", + CREDENTIALS: "my/credentials/filename.json", + REGION: "us-west1", + ORG_ID: "123456789", + ORG_DOMAIN: "example.com", + ORG_TARGET: "123456789", + PROJECT_NUMBER: "1111111111111", + BILLING_ACCT: "000000-0000000-0000000-000000", + MASTER_BILLING_ACCT: "000000-0000000-0000000-000000", + SERVICE_ACCT: "my@service-account.com", + CUST_ID: "A01b123xz", + IDENTITY_USER: "cloud_identity_user", + PAP_DESCRIPTION: "description", + CHRONICLE_ID: "00000000-0000-0000-0000-000000000000", + VMWAREENGINE_PROJECT: "my-vmwareengine-project", + } + + td.GenerateFile(filePath, templatePath, tmplInput, true, templates...) +} + func (td *TemplateData) GenerateIamPolicyFile(filePath string, resource api.Resource) { templatePath := "templates/terraform/iam_policy.go.tmpl" templates := []string{ diff --git a/mmv1/provider/terraform.go b/mmv1/provider/terraform.go index 3e96ce7f58ea..88f14e0c853a 100644 --- a/mmv1/provider/terraform.go +++ b/mmv1/provider/terraform.go @@ -108,6 +108,7 @@ func (t *Terraform) GenerateObject(object api.Resource, outputFolder, productPat t.GenerateResourceTests(object, *templateData, outputFolder) t.GenerateResourceSweeper(object, *templateData, outputFolder) t.GenerateSingularDataSource(object, *templateData, outputFolder) + t.GenerateSingularDataSourceTests(object, *templateData, outputFolder) // log.Printf("Generating %s metadata", object.Name) t.GenerateResourceMetadata(object, *templateData, outputFolder) } @@ -208,6 +209,21 @@ func (t *Terraform) GenerateSingularDataSource(object api.Resource, templateData templateData.GenerateDataSourceFile(targetFilePath, object) } +func (t *Terraform) GenerateSingularDataSourceTests(object api.Resource, templateData TemplateData, outputFolder string) { + if !object.ShouldGenerateSingularDataSourceTests() { + return + } + + productName := t.Product.ApiName + targetFolder := path.Join(outputFolder, t.FolderName(), "services", productName) + if err := os.MkdirAll(targetFolder, os.ModePerm); err != nil { + log.Println(fmt.Errorf("error creating parent directory %v: %v", targetFolder, err)) + } + targetFilePath := path.Join(targetFolder, fmt.Sprintf("data_source_%s_test.go", t.ResourceGoFilename(object))) + templateData.GenerateDataSourceTestFile(targetFilePath, object) + +} + // GenerateProduct creates the product.go file for a given service directory. // This will be used to seed the directory and add a package-level comment // specific to the product. diff --git a/mmv1/templates/terraform/examples/base_configs/datasource_test_file.go.tmpl b/mmv1/templates/terraform/examples/base_configs/datasource_test_file.go.tmpl new file mode 100644 index 000000000000..84421bf40166 --- /dev/null +++ b/mmv1/templates/terraform/examples/base_configs/datasource_test_file.go.tmpl @@ -0,0 +1,96 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** Type: MMv1 *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Magic Modules and manual +// changes will be clobbered when the file is regenerated. +// +// Please read more about how to change this file in +// .github/CONTRIBUTING.md. +// +// ---------------------------------------------------------------------------- + +package {{ $.Res.PackageName }}_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + + "{{ $.ImportPath }}/acctest" + "{{ $.ImportPath }}/envvar" + "{{ $.ImportPath }}/tpgresource" +) + +{{ if $.Res.TestExamples }} +{{ $e := index $.Res.TestExamples 0 }} +func TestAccDataSource{{ $.Res.ResourceName }}_basic(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + {{- template "EnvVarContext" dict "TestEnvVars" $e.TestEnvVars "HasNewLine" false}} + {{- range $varKey, $varVal := $e.TestVarsOverrides }} + "{{$varKey}}": {{$varVal}}, + {{- end }} + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + {{- if $.Res.VersionedProvider $e.MinVersion }} + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + {{- else }} + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + {{- end }} + {{- if $e.ExternalProviders }} + ExternalProviders: map[string]resource.ExternalProvider{ + {{- range $provider := $e.ExternalProviders }} + "{{$provider}}": {}, + {{- end }} + }, + {{- end }} + {{- if not $.Res.ExcludeDelete }} + CheckDestroy: testAccCheck{{ $.Res.ResourceName }}DestroyProducer(t), +{{- end }} + Steps: []resource.TestStep{ + { + Config: testAcc{{ $e.TestSlug $.Res.ProductMetadata.Name $.Res.Name }}DataSource(context), + Check: resource.ComposeTestCheckFunc( + {{- if gt (len ($.Res.IgnoreReadProperties $e)) 0 }} + acctest.CheckDataSourceStateMatchesResourceStateWithIgnores( + "data.{{ $e.ResourceType $.Res.TerraformName }}.default", + "{{ $e.ResourceType $.Res.TerraformName }}.{{ $e.PrimaryResourceId }}", + map[string]struct{}{ + {{- range $prop := $.Res.IgnoreReadProperties $e }} + "{{ $prop }}": {}, + {{- end }} + }, + ), + {{- else }} + acctest.CheckDataSourceStateMatchesResourceState("data.{{ $e.ResourceType $.Res.TerraformName }}.default", "{{ $e.ResourceType $.Res.TerraformName }}.{{ $e.PrimaryResourceId }}"), + {{- end }} + ), + }, + }, + }) +} + +func testAcc{{ $e.TestSlug $.Res.ProductMetadata.Name $.Res.Name }}DataSource(context map[string]interface{}) string { + return acctest.Nprintf(` +{{ $e.TestHCLText }} + +data "{{ $.Res.TerraformName }}" "default" { +{{- range $fieldName := $.Res.DatasourceRequiredFields }} + {{ $fieldName }} = {{ $e.ResourceType $.Res.TerraformName }}.{{ $e.PrimaryResourceId }}.{{ $fieldName }} +{{- end }} +}`, + context, + ) +} +{{ end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/iap/data_source_iap_client_test.go b/mmv1/third_party/terraform/services/iap/data_source_iap_client_test.go deleted file mode 100644 index 0442f018d845..000000000000 --- a/mmv1/third_party/terraform/services/iap/data_source_iap_client_test.go +++ /dev/null @@ -1,70 +0,0 @@ -package iap_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" -) - -func TestAccIapClient_Datasource_basic(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "org_id": envvar.GetTestOrgFromEnv(t), - "org_domain": envvar.GetTestOrgDomainFromEnv(t), - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccIapClientDatasourceConfig(context), - Check: resource.ComposeTestCheckFunc( - acctest.CheckDataSourceStateMatchesResourceStateWithIgnores( - "data.google_iap_client.project_client", - "google_iap_client.project_client", - map[string]struct{}{ - "brand": {}, - }, - ), - ), - }, - }, - }) -} - -func testAccIapClientDatasourceConfig(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_project" "project" { - project_id = "tf-test%{random_suffix}" - name = "tf-test%{random_suffix}" - org_id = "%{org_id}" - deletion_policy = "DELETE" -} - -resource "google_project_service" "project_service" { - project = google_project.project.project_id - service = "iap.googleapis.com" -} - -resource "google_iap_brand" "project_brand" { - support_email = "support@%{org_domain}" - application_title = "Cloud IAP protected Application" - project = google_project_service.project_service.project -} - -resource "google_iap_client" "project_client" { - display_name = "Test Client" - brand = google_iap_brand.project_brand.name -} - -data "google_iap_client" "project_client" { - brand = google_iap_client.project_client.brand - client_id = google_iap_client.project_client.client_id -} -`, context) -} From 8e2b46b749c4cbe2447aaf0696572f2aa11983d0 Mon Sep 17 00:00:00 2001 From: Mehul3217 <44620455+Mehul3217@users.noreply.github.com> Date: Fri, 29 Aug 2025 00:44:12 +0530 Subject: [PATCH 873/884] fixing custom performance pool, since totalIops will be returned by sever (#15005) --- mmv1/products/netapp/StoragePool.yaml | 1 + .../resource_netapp_storage_pool_test.go.tmpl | 72 +++++++++++++++++++ 2 files changed, 73 insertions(+) diff --git a/mmv1/products/netapp/StoragePool.yaml b/mmv1/products/netapp/StoragePool.yaml index 6921fdfd36f5..084d89c9f67f 100644 --- a/mmv1/products/netapp/StoragePool.yaml +++ b/mmv1/products/netapp/StoragePool.yaml @@ -194,6 +194,7 @@ properties: type: String description: | Optional. Custom Performance Total IOPS of the pool If not provided, it will be calculated based on the totalThroughputMibps + default_from_api: true - name: 'hotTierSizeGib' type: String description: | diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl b/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl index 7f283395f5b4..e5b5063c461f 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl @@ -567,3 +567,75 @@ data "google_compute_network" "default" { } `, context) } + +func TestAccNetappStoragePool_customPerformanceEnabledStoragePoolCreateExample_update(t *testing.T) { + context := map[string]interface{}{ + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckNetappStoragePoolDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccNetappStoragePool_customPerformanceEnabledStoragePoolCreateExample_full(context), + }, + { + ResourceName: "google_netapp_storage_pool.test_pool", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "name", "labels", "terraform_labels"}, + }, + { + Config: testAccNetappStoragePool_customPerformanceEnabledStoragePoolCreateExample_update(context), + }, + { + ResourceName: "google_netapp_storage_pool.test_pool", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "name", "labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccNetappStoragePool_customPerformanceEnabledStoragePoolCreateExample_full(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_netapp_storage_pool" "test_pool" { + name = "tf-test-pool%{random_suffix}" + location = "us-east4-a" + service_level = "FLEX" + capacity_gib = "2048" + network = data.google_compute_network.default.id + description = "this is a test description" + custom_performance_enabled = true + total_throughput_mibps = "200" +} + +data "google_compute_network" "default" { + name = "%{network_name}" +} +`, context) +} + +func testAccNetappStoragePool_customPerformanceEnabledStoragePoolCreateExample_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_netapp_storage_pool" "test_pool" { + name = "tf-test-pool%{random_suffix}" + location = "us-east4-a" + service_level = "FLEX" + capacity_gib = "2048" + network = data.google_compute_network.default.id + description = "this is updated test description" + custom_performance_enabled = true + total_throughput_mibps = "200" + total_iops = "3500" +} + +data "google_compute_network" "default" { + name = "%{network_name}" +} +`, context) +} \ No newline at end of file From c7ef67ac775aaeadd6b8fee72e69790b0e02791b Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Thu, 28 Aug 2025 14:24:21 -0700 Subject: [PATCH 874/884] Removed changes that were left out of v7 release (#15013) --- .../website/docs/guides/version_7_upgrade.html.markdown | 6 ------ 1 file changed, 6 deletions(-) diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index e38de6f3bb47..3ee8d0e7bf63 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -235,12 +235,6 @@ Now, destroying the resource will only remove it from Terraform's state and leav `allow_fewer_zones_deployment` has been removed because it isn't user-configurable. -## Resource: `google_secret_manager_secret_version` - -### `secret_data_wo` and `secret_data_wo_version` must be set together - -This standardizes the behavior of write-only fields across the provider and makes it easier to remember to update the fields together. - ## Resource: `google_sql_user` ### `password_wo` and `password_wo_version` must be set together From ed64e67c43a612af0bec191f5d599b4418313c02 Mon Sep 17 00:00:00 2001 From: Ryan Oaks Date: Thu, 28 Aug 2025 17:41:32 -0400 Subject: [PATCH 875/884] Remove unused TerraformResourceDirectory and TerraformProviderModule fields in generator (#15001) --- mmv1/provider/template_data.go | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/mmv1/provider/template_data.go b/mmv1/provider/template_data.go index 831d7fecafe1..8946f9238e31 100644 --- a/mmv1/provider/template_data.go +++ b/mmv1/provider/template_data.go @@ -35,9 +35,6 @@ type TemplateData struct { OutputFolder string VersionName string - TerraformResourceDirectory string - TerraformProviderModule string - // TODO rewrite: is this needed? // # Information about the local environment // # (which formatters are enabled, start-time) @@ -53,18 +50,6 @@ var goimportFiles sync.Map func NewTemplateData(outputFolder string, versionName string) *TemplateData { td := TemplateData{OutputFolder: outputFolder, VersionName: versionName} - - if versionName == GA_VERSION { - td.TerraformResourceDirectory = "google" - td.TerraformProviderModule = "github.com/hashicorp/terraform-provider-google" - } else if versionName == ALPHA_VERSION || versionName == PRIVATE_VERSION { - td.TerraformResourceDirectory = "google-private" - td.TerraformProviderModule = "internal/terraform-next" - } else { - td.TerraformResourceDirectory = "google-beta" - td.TerraformProviderModule = "github.com/hashicorp/terraform-provider-google-beta" - } - return &td } From ef7fd53b785be9d27f8f27746c66b2681fbea6cd Mon Sep 17 00:00:00 2001 From: Ryan Oaks Date: Thu, 28 Aug 2025 18:05:45 -0400 Subject: [PATCH 876/884] Update provider generation to use generic naming for imports (#15008) --- mmv1/provider/provider.go | 6 ++---- mmv1/provider/terraform.go | 11 ++++------- 2 files changed, 6 insertions(+), 11 deletions(-) diff --git a/mmv1/provider/provider.go b/mmv1/provider/provider.go index 7da908c8aa5e..1f32008fff85 100644 --- a/mmv1/provider/provider.go +++ b/mmv1/provider/provider.go @@ -18,10 +18,8 @@ type Provider interface { const TERRAFORM_PROVIDER_GA = "github.com/hashicorp/terraform-provider-google" const TERRAFORM_PROVIDER_BETA = "github.com/hashicorp/terraform-provider-google-beta" const TGC_PROVIDER = "github.com/GoogleCloudPlatform/terraform-google-conversion/v6" -const TERRAFORM_PROVIDER_PRIVATE = "internal/terraform-next" const RESOURCE_DIRECTORY_GA = "google" const RESOURCE_DIRECTORY_BETA = "google-beta" -const RESOURCE_DIRECTORY_PRIVATE = "google-private" const RESOURCE_DIRECTORY_TGC = "pkg" // # TODO: Review all object interfaces and move to private methods @@ -40,8 +38,8 @@ func ImportPathFromVersion(v string) string { tpg = TERRAFORM_PROVIDER_BETA dir = RESOURCE_DIRECTORY_BETA default: - tpg = TERRAFORM_PROVIDER_PRIVATE - dir = RESOURCE_DIRECTORY_PRIVATE + tpg = "github.com/hashicorp/terraform-provider-google-" + v + dir = "google-" + v } return fmt.Sprintf("%s/%s", tpg, dir) } diff --git a/mmv1/provider/terraform.go b/mmv1/provider/terraform.go index 88f14e0c853a..cecb60ec909d 100644 --- a/mmv1/provider/terraform.go +++ b/mmv1/provider/terraform.go @@ -302,10 +302,8 @@ func (t *Terraform) GenerateIamDocumentation(object api.Resource, templateData T func (t *Terraform) FolderName() string { if t.TargetVersionName == "ga" { return "google" - } else if t.TargetVersionName == "beta" { - return "google-beta" } - return "google-private" + return "google-" + t.TargetVersionName } // Similar to FullResourceName, but override-aware to prevent things like ending in _test. @@ -732,9 +730,8 @@ func (t Terraform) replaceImportPath(outputFolder, target string) { tpg = TERRAFORM_PROVIDER_BETA dir = RESOURCE_DIRECTORY_BETA default: - tpg = TERRAFORM_PROVIDER_PRIVATE - dir = RESOURCE_DIRECTORY_PRIVATE - + tpg = "github.com/hashicorp/terraform-provider-google-" + t.TargetVersionName + dir = "google-" + t.TargetVersionName } sourceByte = bytes.Replace(sourceByte, []byte(gaImportPath), []byte(tpg+"/"+dir), -1) @@ -764,7 +761,7 @@ func (t Terraform) ProviderFromVersion() string { case "beta": dir = RESOURCE_DIRECTORY_BETA default: - dir = RESOURCE_DIRECTORY_PRIVATE + dir = "google-" + t.TargetVersionName } return dir } From 4194145bde265ea79c263999d841b6de17164bd2 Mon Sep 17 00:00:00 2001 From: Brad Hoekstra Date: Thu, 28 Aug 2025 19:20:48 -0400 Subject: [PATCH 877/884] Document deprecated GKE Enterprise fields. (#14932) Signed-off-by: Brad Hoekstra --- .../services/container/resource_container_cluster.go.tmpl | 3 +++ .../terraform/website/docs/r/container_cluster.html.markdown | 4 ++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl index 4c053deb9a46..16456ccafaf0 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl @@ -2523,12 +2523,14 @@ func ResourceContainerCluster() *schema.Resource { MaxItems: 1, Computed: true, Description: `Defines the config needed to enable/disable GKE Enterprise`, + Deprecated: `GKE Enterprise features are now available without an Enterprise tier. This field is deprecated and will be removed in a future major release`, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "cluster_tier": { Type: schema.TypeString, Computed: true, Description: `Indicates the effective cluster tier. Available options include STANDARD and ENTERPRISE.`, + Deprecated: `GKE Enterprise features are now available without an Enterprise tier. This field is deprecated and will be removed in a future major release`, }, "desired_tier": { Type: schema.TypeString, @@ -2536,6 +2538,7 @@ func ResourceContainerCluster() *schema.Resource { Computed: true, ValidateFunc: validation.StringInSlice([]string{"STANDARD", "ENTERPRISE"}, false), Description: `Indicates the desired cluster tier. Available options include STANDARD and ENTERPRISE.`, + Deprecated: `GKE Enterprise features are now available without an Enterprise tier. This field is deprecated and will be removed in a future major release`, DiffSuppressFunc: tpgresource.EmptyOrDefaultStringSuppress("CLUSTER_TIER_UNSPECIFIED"), }, }, diff --git a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown index 623a11241f9b..bd0d63961dac 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown @@ -421,7 +421,7 @@ Fleet configuration for the cluster. Structure is [documented below](#nested_fle Configuration for [direct-path (via ALTS) with workload identity.](https://cloud.google.com/kubernetes-engine/docs/reference/rest/v1beta1/projects.locations.clusters#workloadaltsconfig). Structure is [documented below](#nested_workload_alts_config). * `enterprise_config` - (Optional) - Configuration for [Enterprise edition].(https://cloud.google.com/kubernetes-engine/enterprise/docs/concepts/gke-editions). Structure is [documented below](#nested_enterprise_config). + (DEPRECATED) Configuration for [Enterprise edition].(https://cloud.google.com/kubernetes-engine/enterprise/docs/concepts/gke-editions). Structure is [documented below](#nested_enterprise_config). Deprecated as GKE Enterprise features are now available without an Enterprise tier. See https://cloud.google.com/blog/products/containers-kubernetes/gke-gets-new-pricing-and-capabilities-on-10th-birthday for the announcement of this change. * `anonymous_authentication_config` - (Optional) Configuration for [anonymous authentication restrictions](https://cloud.google.com/kubernetes-engine/docs/how-to/hardening-your-cluster#restrict-anon-access). Structure is [documented below](#anonymous_authentication_config). @@ -1643,7 +1643,7 @@ linux_node_config { The `enterprise_config` block supports: -* `desired_tier` - (Optional) Sets the tier of the cluster. Available options include `STANDARD` and `ENTERPRISE`. +* `desired_tier` - (Optional) (DEPRECATED) Sets the tier of the cluster. Available options include `STANDARD` and `ENTERPRISE`. Deprecated as GKE Enterprise features are now available without an Enterprise tier. See https://cloud.google.com/blog/products/containers-kubernetes/gke-gets-new-pricing-and-capabilities-on-10th-birthday for the announcement of this change. The `anonymous_authentication_config` block supports: From cbca94896cc9d13bbd2db91a20691465380bc7f1 Mon Sep 17 00:00:00 2001 From: Mauricio Alvarez Leon <65101411+BBBmau@users.noreply.github.com> Date: Thu, 28 Aug 2025 16:50:26 -0700 Subject: [PATCH 878/884] `teamcity`: `RELEASE_DIFF` == "true" (#15016) --- mmv1/third_party/terraform/acctest/diff_utils.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/acctest/diff_utils.go b/mmv1/third_party/terraform/acctest/diff_utils.go index d23f226180f4..0a91f3ebed29 100644 --- a/mmv1/third_party/terraform/acctest/diff_utils.go +++ b/mmv1/third_party/terraform/acctest/diff_utils.go @@ -20,7 +20,7 @@ const diffTag = "[Diff]" func isReleaseDiffEnabled() bool { releaseDiff := os.Getenv("RELEASE_DIFF") - return releaseDiff != "" + return releaseDiff == "true" } func initializeReleaseDiffTest(c resource.TestCase, testName string, tempOutputFile *os.File) resource.TestCase { From a3ec8653cfe867ab11f8e947908f76b893ba749d Mon Sep 17 00:00:00 2001 From: NA2047 <12290725+NA2047@users.noreply.github.com> Date: Thu, 28 Aug 2025 19:29:27 -0500 Subject: [PATCH 879/884] Fix for desired_auto_created_endpoints incorrect update (#14777) Co-authored-by: Riley Karson Co-authored-by: Stephen Lewis (Burrows) --- mmv1/products/memorystore/Instance.yaml | 4 +- .../decoders/memorystore_instance.go.tmpl | 24 +- .../resource_memorystore_instance_test.go | 233 ++++++++++++++++++ 3 files changed, 256 insertions(+), 5 deletions(-) diff --git a/mmv1/products/memorystore/Instance.yaml b/mmv1/products/memorystore/Instance.yaml index ec40528f2814..9d937470dc09 100644 --- a/mmv1/products/memorystore/Instance.yaml +++ b/mmv1/products/memorystore/Instance.yaml @@ -108,8 +108,8 @@ examples: 'secondary_instance_prevent_destroy': 'false' virtual_fields: - name: 'desired_psc_auto_connections' - description: "`desired_psc_auto_connections` is deprecated Use `desired_auto_created_endpoints` instead." - deprecation_message: '`desired_psc_auto_connections` is deprecated Use `desired_auto_created_endpoints` instead.' + description: "`desired_psc_auto_connections` is deprecated Use `desired_auto_created_endpoints` instead `terraform import` will only work with desired_auto_created_endpoints`." + deprecation_message: '`desired_psc_auto_connections` is deprecated. Use `desired_auto_created_endpoints` instead. `terraform import` will only work with desired_auto_created_endpoints`.' type: Array immutable: true conflicts: diff --git a/mmv1/templates/terraform/decoders/memorystore_instance.go.tmpl b/mmv1/templates/terraform/decoders/memorystore_instance.go.tmpl index 68ce254147ec..3918e6238c31 100644 --- a/mmv1/templates/terraform/decoders/memorystore_instance.go.tmpl +++ b/mmv1/templates/terraform/decoders/memorystore_instance.go.tmpl @@ -85,10 +85,28 @@ } } } + // We want to make these fields detect API-side drift, so if the API returns a value for them and they're set in config, we set them in state. + // On import, we only set `desired_auto_created_endpoints` because that's the non-deprecated field. if len(transformed) > 0 { - d.Set("desired_auto_created_endpoints", transformed) - log.Printf("[DEBUG] Setting desired_auto_created_endpoints in decoder for %#v", transformed) - + _, okEndpoint := d.GetOk("desired_auto_created_endpoints") + _, okPsc := d.GetOk("desired_psc_auto_connections") + if okEndpoint { + d.Set("desired_auto_created_endpoints", transformed) + log.Printf("[DEBUG] Setting desired_auto_created_endpoints in decoder within endpoints for %#v", transformed) + } else if okPsc { + d.Set("desired_auto_created_endpoints", []interface{}{}) + } + if okPsc { + d.Set("desired_psc_auto_connections", transformed) + log.Printf("[DEBUG] Setting desired_psc_auto_connections in decoder within endpoints for %#v", transformed) + } else if okEndpoint { + d.Set("desired_psc_auto_connections", []interface{}{}) + } + // Set preferred field on import + if !okPsc && !okEndpoint { + d.Set("desired_auto_created_endpoints", transformed) + log.Printf("[DEBUG] Setting desired_auto_created_endpoints in decoder within endpoints for %#v", transformed) + } } } diff --git a/mmv1/third_party/terraform/services/memorystore/resource_memorystore_instance_test.go b/mmv1/third_party/terraform/services/memorystore/resource_memorystore_instance_test.go index 12b182cfdea1..7636dfc388e6 100644 --- a/mmv1/third_party/terraform/services/memorystore/resource_memorystore_instance_test.go +++ b/mmv1/third_party/terraform/services/memorystore/resource_memorystore_instance_test.go @@ -1537,3 +1537,236 @@ data "google_project" "project" { } `, context) } + +func TestAccMemorystoreInstance_memorystorePscAutoInstanceClusterDisabled(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "location": "us-central1", + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckMemorystoreInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccMemorystoreInstance_memorystorePscAutoInstanceClusterDisabled_bothConnections(context), + }, + { + ResourceName: "google_memorystore_instance.instance-cluster-disabled", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"desired_auto_created_endpoints.#", "desired_auto_created_endpoints.0.%", "desired_auto_created_endpoints.0.project_id", "desired_auto_created_endpoints.0.network", "desired_psc_auto_connections.#", "desired_psc_auto_connections.0.%", "desired_psc_auto_connections.0.network", "desired_psc_auto_connections.0.project_id"}, + }, + { + Config: testAccMemorystoreInstance_memorystorePscAutoInstanceClusterDisabledPscAutoConnections(context), + }, + { + ResourceName: "google_memorystore_instance.instance-cluster-disabled", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"desired_auto_created_endpoints.#", "desired_auto_created_endpoints.0.%", "desired_auto_created_endpoints.0.project_id", "desired_auto_created_endpoints.0.network", "desired_psc_auto_connections.#", "desired_psc_auto_connections.0.%", "desired_psc_auto_connections.0.network", "desired_psc_auto_connections.0.project_id"}, + }, + { + Config: testAccMemorystoreInstance_memorystorePscAutoInstanceClusterDisabled_onlyAutoCreatedEndpoints(context), + }, + { + ResourceName: "google_memorystore_instance.instance-cluster-disabled", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"desired_auto_created_endpoints.#", "desired_auto_created_endpoints.0.%", "desired_auto_created_endpoints.0.project_id", "desired_auto_created_endpoints.0.network", "desired_psc_auto_connections.#", "desired_psc_auto_connections.0.%", "desired_psc_auto_connections.0.network", "desired_psc_auto_connections.0.project_id"}, + }, + { + Config: testAccMemorystoreInstance_memorystorePscAutoInstanceClusterDisabled_neitherConnection(context), + }, + { + ResourceName: "google_memorystore_instance.instance-cluster-disabled", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"desired_auto_created_endpoints.#", "desired_auto_created_endpoints.0.%", "desired_auto_created_endpoints.0.project_id", "desired_auto_created_endpoints.0.network", "desired_psc_auto_connections.#", "desired_psc_auto_connections.0.%", "desired_psc_auto_connections.0.network", "desired_psc_auto_connections.0.project_id"}, + }, + }, + }) +} + +func testAccMemorystoreInstance_memorystorePscAutoInstanceClusterDisabledPscAutoConnections(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_memorystore_instance" "instance-cluster-disabled" { + instance_id = "tf-test-instance-psc%{random_suffix}" + shard_count = 1 + desired_psc_auto_connections { + network = google_compute_network.producer_net.id + project_id = data.google_project.project.project_id + } + location = "%{location}" + deletion_protection_enabled = false + mode = "CLUSTER_DISABLED" + depends_on = [ + google_network_connectivity_service_connection_policy.default + ] +} + +resource "google_network_connectivity_service_connection_policy" "default" { + name = "tf-test-my-policy%{random_suffix}" + location = "%{location}" + service_class = "gcp-memorystore" + description = "my basic service connection policy" + network = google_compute_network.producer_net.id + psc_config { + subnetworks = [google_compute_subnetwork.producer_subnet.id] + } +} + +resource "google_compute_subnetwork" "producer_subnet" { + name = "tf-test-my-subnet%{random_suffix}" + ip_cidr_range = "10.0.0.248/29" + region = "%{location}" + network = google_compute_network.producer_net.id +} + +resource "google_compute_network" "producer_net" { + name = "tf-test-my-network%{random_suffix}" + auto_create_subnetworks = false +} + +data "google_project" "project" { +} +`, context) +} + +func testAccMemorystoreInstance_memorystorePscAutoInstanceClusterDisabled_bothConnections(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_memorystore_instance" "instance-cluster-disabled" { + instance_id = "tf-test-instance-psc%{random_suffix}" + shard_count = 1 + desired_psc_auto_connections { + network = google_compute_network.producer_net.id + project_id = data.google_project.project.project_id + } + desired_auto_created_endpoints { + network = google_compute_network.producer_net.id + project_id = data.google_project.project.project_id + } + location = "%{location}" + deletion_protection_enabled = false + mode = "CLUSTER_DISABLED" + depends_on = [ + google_network_connectivity_service_connection_policy.default + ] +} + +resource "google_network_connectivity_service_connection_policy" "default" { + name = "tf-test-my-policy%{random_suffix}" + location = "%{location}" + service_class = "gcp-memorystore" + description = "my basic service connection policy" + network = google_compute_network.producer_net.id + psc_config { + subnetworks = [google_compute_subnetwork.producer_subnet.id] + } +} + +resource "google_compute_subnetwork" "producer_subnet" { + name = "tf-test-my-subnet%{random_suffix}" + ip_cidr_range = "10.0.0.248/29" + region = "%{location}" + network = google_compute_network.producer_net.id +} + +resource "google_compute_network" "producer_net" { + name = "tf-test-my-network%{random_suffix}" + auto_create_subnetworks = false +} + +data "google_project" "project" { +} +`, context) +} + +func testAccMemorystoreInstance_memorystorePscAutoInstanceClusterDisabled_onlyAutoCreatedEndpoints(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_memorystore_instance" "instance-cluster-disabled" { + instance_id = "tf-test-instance-psc%{random_suffix}" + shard_count = 1 + desired_auto_created_endpoints { + network = google_compute_network.producer_net.id + project_id = data.google_project.project.project_id + } + location = "%{location}" + deletion_protection_enabled = false + mode = "CLUSTER_DISABLED" + depends_on = [ + google_network_connectivity_service_connection_policy.default + ] +} + +resource "google_network_connectivity_service_connection_policy" "default" { + name = "tf-test-my-policy%{random_suffix}" + location = "%{location}" + service_class = "gcp-memorystore" + description = "my basic service connection policy" + network = google_compute_network.producer_net.id + psc_config { + subnetworks = [google_compute_subnetwork.producer_subnet.id] + } +} + +resource "google_compute_subnetwork" "producer_subnet" { + name = "tf-test-my-subnet%{random_suffix}" + ip_cidr_range = "10.0.0.248/29" + region = "%{location}" + network = google_compute_network.producer_net.id +} + +resource "google_compute_network" "producer_net" { + name = "tf-test-my-network%{random_suffix}" + auto_create_subnetworks = false +} + +data "google_project" "project" { +} +`, context) +} + +func testAccMemorystoreInstance_memorystorePscAutoInstanceClusterDisabled_neitherConnection(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_memorystore_instance" "instance-cluster-disabled" { + instance_id = "tf-test-instance-psc%{random_suffix}" + shard_count = 1 + location = "%{location}" + deletion_protection_enabled = false + mode = "CLUSTER_DISABLED" + depends_on = [ + google_network_connectivity_service_connection_policy.default + ] +} + +resource "google_network_connectivity_service_connection_policy" "default" { + name = "tf-test-my-policy%{random_suffix}" + location = "%{location}" + service_class = "gcp-memorystore" + description = "my basic service connection policy" + network = google_compute_network.producer_net.id + psc_config { + subnetworks = [google_compute_subnetwork.producer_subnet.id] + } +} + +resource "google_compute_subnetwork" "producer_subnet" { + name = "tf-test-my-subnet%{random_suffix}" + ip_cidr_range = "10.0.0.248/29" + region = "%{location}" + network = google_compute_network.producer_net.id +} + +resource "google_compute_network" "producer_net" { + name = "tf-test-my-network%{random_suffix}" + auto_create_subnetworks = false +} + +data "google_project" "project" { +} +`, context) +} From 8711bfab49e8151417969e5e80a02cfc1efd853d Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Fri, 29 Aug 2025 19:04:35 +0200 Subject: [PATCH 880/884] MMv1 field: changed `write_only` to `write_only_legacy` and marked field as deprecated (#15022) --- docs/content/reference/field.md | 6 ++++-- mmv1/api/resource.go | 2 +- mmv1/api/type.go | 14 ++++++++------ mmv1/products/bigquerydatatransfer/Config.yaml | 2 +- mmv1/products/monitoring/UptimeCheckConfig.yaml | 2 +- .../custom_flatten/dataplex_entry_aspects.go.tmpl | 2 +- .../terraform/flatten_property_method.go.tmpl | 6 +++--- ...rty_write_only_documentation.html.markdown.tmpl | 2 +- .../property_documentation.html.markdown.tmpl | 2 +- .../terraform/resource.html.markdown.tmpl | 6 +++--- mmv1/templates/terraform/schema_property.go.tmpl | 2 +- 11 files changed, 25 insertions(+), 21 deletions(-) diff --git a/docs/content/reference/field.md b/docs/content/reference/field.md index 1f24ec8abc06..ae40c0b3bd4d 100644 --- a/docs/content/reference/field.md +++ b/docs/content/reference/field.md @@ -107,7 +107,7 @@ Example: sensitive: true ``` -### `write_only` +### `write_only_legacy` (deprecated) If true, the field is considered "write-only", which means that its value will be obscured in Terraform output as well as not be stored in state. This field is meant to replace `sensitive` as it doesn't store the value in state. See [Ephemerality in Resources - Use Write-only arguments](https://developer.hashicorp.com/terraform/language/resources/ephemeral/write-only) @@ -121,9 +121,11 @@ This field cannot be used in conjuction with `immutable` or `sensitive`. Example: ```yaml -write_only: true +write_only_legacy: true ``` +**Deprecated**: This field is deprecated and will be removed in a future release. + ### `ignore_read` If true, the provider sets the field's value in the resource state based only on the user's configuration. If false or unset, the provider sets the field's diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index f29a0328bfad..5468905cddae 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -617,7 +617,7 @@ func (r Resource) SensitiveProps() []*Type { func (r Resource) WriteOnlyProps() []*Type { props := r.AllNestedProperties(r.RootProperties()) return google.Select(props, func(p *Type) bool { - return p.WriteOnly + return p.WriteOnlyLegacy }) } diff --git a/mmv1/api/type.go b/mmv1/api/type.go index ca664c774a8c..341b31d44ee7 100644 --- a/mmv1/api/type.go +++ b/mmv1/api/type.go @@ -171,7 +171,9 @@ type Type struct { Sensitive bool `yaml:"sensitive,omitempty"` // Adds `Sensitive: true` to the schema - WriteOnly bool `yaml:"write_only,omitempty"` // Adds `WriteOnly: true` to the schema + // TODO: remove this field after all references are migrated + // see: https://github.com/GoogleCloudPlatform/magic-modules/pull/14933#pullrequestreview-3166578379 + WriteOnlyLegacy bool `yaml:"write_only_legacy,omitempty"` // Adds `WriteOnlyLegacy: true` to the schema // Does not set this value to the returned API value. Useful for fields // like secrets where the returned API value is not helpful. @@ -395,11 +397,11 @@ func (t *Type) Validate(rName string) { log.Fatalf("'default_value' and 'default_from_api' cannot be both set in resource %s", rName) } - if t.WriteOnly && (t.DefaultFromApi || t.Output) { + if t.WriteOnlyLegacy && (t.DefaultFromApi || t.Output) { log.Fatalf("Property %s cannot be write_only and default_from_api or output at the same time in resource %s", t.Name, rName) } - if t.WriteOnly && t.Sensitive { + if t.WriteOnlyLegacy && t.Sensitive { log.Fatalf("Property %s cannot be write_only and sensitive at the same time in resource %s", t.Name, rName) } @@ -750,7 +752,7 @@ func (t Type) WriteOnlyProperties() []*Type { } case t.IsA("NestedObject"): props = google.Select(t.UserProperties(), func(p *Type) bool { - return p.WriteOnly + return p.WriteOnlyLegacy }) case t.IsA("Map"): props = google.Reject(t.ValueType.WriteOnlyProperties(), func(p *Type) bool { @@ -1224,8 +1226,8 @@ func (t *Type) IsForceNew() bool { return t.Immutable } - // WriteOnly fields are never immutable - if t.WriteOnly { + // WriteOnlyLegacy fields are never immutable + if t.WriteOnlyLegacy { return false } diff --git a/mmv1/products/bigquerydatatransfer/Config.yaml b/mmv1/products/bigquerydatatransfer/Config.yaml index 4bafa3b99dc5..91b3c82333b1 100644 --- a/mmv1/products/bigquerydatatransfer/Config.yaml +++ b/mmv1/products/bigquerydatatransfer/Config.yaml @@ -243,7 +243,7 @@ properties: type: String description: | The Secret Access Key of the AWS account transferring data from. - write_only: true + write_only_legacy: true at_least_one_of: - 'sensitive_params.0.secret_access_key_wo' - 'sensitive_params.0.secret_access_key' diff --git a/mmv1/products/monitoring/UptimeCheckConfig.yaml b/mmv1/products/monitoring/UptimeCheckConfig.yaml index 7f6bf290d24e..7cfac9e299c3 100644 --- a/mmv1/products/monitoring/UptimeCheckConfig.yaml +++ b/mmv1/products/monitoring/UptimeCheckConfig.yaml @@ -259,7 +259,7 @@ properties: - 'http_check.0.auth_info.0.password' required_with: - 'http_check.0.auth_info.0.password_wo_version' - write_only: true + write_only_legacy: true - name: 'passwordWoVersion' type: String immutable: true diff --git a/mmv1/templates/terraform/custom_flatten/dataplex_entry_aspects.go.tmpl b/mmv1/templates/terraform/custom_flatten/dataplex_entry_aspects.go.tmpl index de8dd922f1f9..fcd51234760c 100644 --- a/mmv1/templates/terraform/custom_flatten/dataplex_entry_aspects.go.tmpl +++ b/mmv1/templates/terraform/custom_flatten/dataplex_entry_aspects.go.tmpl @@ -15,7 +15,7 @@ func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.Reso transformed = append(transformed, map[string]interface{}{ {{- range $prop := $.ItemType.UserProperties }} - {{- if not (or $prop.IgnoreRead $prop.WriteOnly) }} + {{- if not (or $prop.IgnoreRead $prop.WriteOnlyLegacy) }} "{{ underscore $prop.Name }}": flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}{{$prop.TitlelizeProperty}}(original["{{ $prop.ApiName }}"], d, config), {{- end }} {{- end }} diff --git a/mmv1/templates/terraform/flatten_property_method.go.tmpl b/mmv1/templates/terraform/flatten_property_method.go.tmpl index 7c24debc4096..e36259baebb5 100644 --- a/mmv1/templates/terraform/flatten_property_method.go.tmpl +++ b/mmv1/templates/terraform/flatten_property_method.go.tmpl @@ -13,7 +13,7 @@ See the License for the specific language governing permissions and limitations under the License. */ -}} {{- define "flattenPropertyMethod" }} -{{- if $.WriteOnly }} +{{- if $.WriteOnlyLegacy }} {{- else if and $.CustomFlatten (not $.ShouldIgnoreCustomFlatten) }} {{- $.CustomTemplate $.CustomFlatten false -}} {{- else -}} @@ -34,7 +34,7 @@ func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.Reso {{- end }} transformed := make(map[string]interface{}) {{- range $prop := $.UserProperties }} - {{- if $prop.WriteOnly }} + {{- if $prop.WriteOnlyLegacy }} {{- else if $prop.FlattenObject }} if {{ $prop.ApiName }} := flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}{{$prop.TitlelizeProperty}}(original["{{ $prop.ApiName }}"], d, config); {{ $prop.ApiName }} != nil { obj := {{ $prop.ApiName }}.([]interface{})[0] @@ -80,7 +80,7 @@ func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.Reso {{- end }} {{- range $prop := $.ItemType.UserProperties }} - {{- if not (or $prop.IgnoreRead $prop.WriteOnly) }} + {{- if not (or $prop.IgnoreRead $prop.WriteOnlyLegacy) }} "{{ underscore $prop.Name }}": flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}{{$prop.TitlelizeProperty}}(original["{{ $prop.ApiName }}"], d, config), {{- end }} {{- end }} diff --git a/mmv1/templates/terraform/nested_property_write_only_documentation.html.markdown.tmpl b/mmv1/templates/terraform/nested_property_write_only_documentation.html.markdown.tmpl index 4c4aa286648b..92e426750b56 100644 --- a/mmv1/templates/terraform/nested_property_write_only_documentation.html.markdown.tmpl +++ b/mmv1/templates/terraform/nested_property_write_only_documentation.html.markdown.tmpl @@ -3,7 +3,7 @@ {{ "" }} {{- if $.NestedProperties }} {{- range $np := $.NestedProperties }} - {{- if $np.WriteOnly }} + {{- if $np.WriteOnlyLegacy }} {{- trimTemplate "property_documentation.html.markdown.tmpl" $np -}} {{- end -}} {{- end -}} diff --git a/mmv1/templates/terraform/property_documentation.html.markdown.tmpl b/mmv1/templates/terraform/property_documentation.html.markdown.tmpl index a6f89d8bb946..ee1399b228ef 100644 --- a/mmv1/templates/terraform/property_documentation.html.markdown.tmpl +++ b/mmv1/templates/terraform/property_documentation.html.markdown.tmpl @@ -36,7 +36,7 @@ {{- if $.Sensitive }} **Note**: This property is sensitive and will not be displayed in the plan. {{- end }} - {{- if $.WriteOnly }} + {{- if $.WriteOnlyLegacy }} **Note**: This property is write-only and will not be read from the API. {{- end }} {{- if and (not $.FlattenObject) $.NestedProperties }} diff --git a/mmv1/templates/terraform/resource.html.markdown.tmpl b/mmv1/templates/terraform/resource.html.markdown.tmpl index ae1026a4b46f..b848ff6b1680 100644 --- a/mmv1/templates/terraform/resource.html.markdown.tmpl +++ b/mmv1/templates/terraform/resource.html.markdown.tmpl @@ -88,13 +88,13 @@ The following arguments are supported: {{ "" }} {{ "" }} {{- range $p := $.RootProperties }} - {{- if and $p.Required (not $p.WriteOnly) }} + {{- if and $p.Required (not $p.WriteOnlyLegacy) }} {{- trimTemplate "property_documentation.html.markdown.tmpl" $p -}} {{- end }} {{- end }} {{ "" }} {{- range $p := $.RootProperties }} - {{- if and (not $p.Required) (not $p.Output) (not $p.WriteOnly) }} + {{- if and (not $p.Required) (not $p.Output) (not $p.WriteOnlyLegacy) }} {{- trimTemplate "property_documentation.html.markdown.tmpl" $p -}} {{- end }} {{- end }} @@ -129,7 +129,7 @@ The following arguments are supported: The following write-only attributes are supported: {{ range $p := $.RootProperties }} - {{- if $p.WriteOnly }} + {{- if $p.WriteOnlyLegacy }} {{- trimTemplate "property_documentation.html.markdown.tmpl" $p }} {{- end}} {{- end }} diff --git a/mmv1/templates/terraform/schema_property.go.tmpl b/mmv1/templates/terraform/schema_property.go.tmpl index 180e49cf6312..b046c585f13c 100644 --- a/mmv1/templates/terraform/schema_property.go.tmpl +++ b/mmv1/templates/terraform/schema_property.go.tmpl @@ -161,7 +161,7 @@ Default value: {{ .ItemType.DefaultValue -}} {{ if .Sensitive -}} Sensitive: true, {{ end -}} -{{ if .WriteOnly -}} +{{ if .WriteOnlyLegacy -}} WriteOnly: true, {{ end -}} {{ if not (eq .DefaultValue nil ) -}} From 73429fd7bc3a38eda38f22e5e034ca4d4c9fff6f Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Fri, 29 Aug 2025 17:23:25 -0700 Subject: [PATCH 881/884] Bumped version for GKE Hub Feature test to fix test failures (#15032) --- .../services/gkehub2/resource_gke_hub_feature_test.go.tmpl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.tmpl b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.tmpl index 518347c9d53f..c1bb69f57339 100644 --- a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.tmpl +++ b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.tmpl @@ -508,7 +508,7 @@ resource "google_gke_hub_feature" "feature" { location = "global" fleet_default_member_config { configmanagement { - version = "1.19.1" + version = "1.21.2" config_sync { source_format = "hierarchy" git { @@ -535,7 +535,7 @@ resource "google_gke_hub_feature" "feature" { location = "global" fleet_default_member_config { configmanagement { - version = "1.19.2" + version = "1.21.3" management = "MANAGEMENT_MANUAL" config_sync { enabled = true From b2df4da0fd4dc2452a72371560b17d0ffea62f9e Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Fri, 29 Aug 2025 17:26:24 -0700 Subject: [PATCH 882/884] Fixed typo in TestAccApigeeSecurityAction_apigeeSecurityActionFull config (#15030) --- .../services/apigee/resource_apigee_security_action_test.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_security_action_test.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_security_action_test.go index cd4dd1e382b4..e18f05fa5a66 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_security_action_test.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_security_action_test.go @@ -195,7 +195,7 @@ resource "time_sleep" "wait_60_seconds" { resource "google_project_service" "apigee" { project = google_project.project.project_id - service = "apigee.googleapis.com"" + service = "apigee.googleapis.com" depends_on = [time_sleep.wait_60_seconds] } @@ -237,7 +237,7 @@ resource "google_service_networking_connection" "apigee_vpc_connection" { resource "google_apigee_organization" "apigee_org" { analytics_region = "us-central1" - project_id = data.google_client_config.current.project + project_id = google_project.project.project_id authorized_network = google_compute_network.apigee_network.id depends_on = [google_service_networking_connection.apigee_vpc_connection] } From e3bf7ddec0efa33e1ca5508b221506ca5190e802 Mon Sep 17 00:00:00 2001 From: NandiniAgrawal15 Date: Thu, 4 Sep 2025 10:27:33 +0000 Subject: [PATCH 883/884] Add Interconnect Locations Data Source --- .../provider/provider_mmv1_resources.go.tmpl | 2 + ...ce_google_compute_interconnect_location.go | 189 ++++++++++++++++++ ...ogle_compute_interconnect_location_test.go | 99 +++++++++ ...e_google_compute_interconnect_locations.go | 137 +++++++++++++ ...gle_compute_interconnect_locations_test.go | 31 +++ ...ompute_interconnect_location.html.markdown | 54 +++++ ...mpute_interconnect_locations.html.markdown | 47 +++++ 7 files changed, 559 insertions(+) create mode 100644 mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_location.go create mode 100644 mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_location_test.go create mode 100644 mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_locations.go create mode 100644 mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_locations_test.go create mode 100644 mmv1/third_party/terraform/website/docs/d/compute_interconnect_location.html.markdown create mode 100644 mmv1/third_party/terraform/website/docs/d/compute_interconnect_locations.html.markdown diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index 06f8767048a9..a9a0a2f2d81f 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -101,6 +101,8 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_compute_instance_serial_port": compute.DataSourceGoogleComputeInstanceSerialPort(), "google_compute_instance_template": compute.DataSourceGoogleComputeInstanceTemplate(), "google_compute_instance_guest_attributes": compute.DataSourceGoogleComputeInstanceGuestAttributes(), + "google_compute_interconnect_location": compute.DataSourceGoogleComputeInterconnectLocation(), + "google_compute_interconnect_locations": compute.DataSourceGoogleComputeInterconnectLocations(), "google_compute_lb_ip_ranges": compute.DataSourceGoogleComputeLbIpRanges(), "google_compute_machine_types": compute.DataSourceGoogleComputeMachineTypes(), "google_compute_network": compute.DataSourceGoogleComputeNetwork(), diff --git a/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_location.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_location.go new file mode 100644 index 000000000000..4558e116d16a --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_location.go @@ -0,0 +1,189 @@ +package compute + +import ( + "fmt" + "regexp" + "strings" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +var ( + computeInterconnectLocationIdTemplate = "projects/%s/global/interconnectlocations/%s" + computeInterconnectLocationLinkRegex = regexp.MustCompile(`projects/(.+)/global/interconnectlocations/(.+)$`) +) + +type ComputeInterconnectLocationId struct { + Project string + Name string +} + +func (s ComputeInterconnectLocationId) CanonicalId() string { + return fmt.Sprintf(computeInterconnectLocationIdTemplate, s.Project, s.Name) +} + +// ParseComputeInterconnectLocationId parses IDs of the form: +// - projects/{project}/global/interconnectlocations/{name} +// - {project}/{name} +// - {name} (requires config.Project) +func ParseComputeInterconnectLocationId(id string, config *transport_tpg.Config) (*ComputeInterconnectLocationId, error) { + var parts []string + if computeInterconnectLocationLinkRegex.MatchString(id) { + parts = computeInterconnectLocationLinkRegex.FindStringSubmatch(id) + return &ComputeInterconnectLocationId{ + Project: parts[1], + Name: parts[2], + }, nil + } else { + parts = strings.Split(id, "/") + } + + if len(parts) == 2 { + return &ComputeInterconnectLocationId{ + Project: parts[0], + Name: parts[1], + }, nil + } else if len(parts) == 1 { + if config.Project == "" { + return nil, fmt.Errorf("The default project for the provider must be set when using the `{name}` id format.") + } + return &ComputeInterconnectLocationId{ + Project: config.Project, + Name: parts[0], + }, nil + } + + return nil, fmt.Errorf("Invalid interconnect location id. Expecting resource link, `{project}/{name}` or `{name}` format.") +} + +func DataSourceGoogleComputeInterconnectLocation() *schema.Resource { + return &schema.Resource{ + Read: dataSourceGoogleComputeInterconnectLocationRead, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + }, + + "project": { + Type: schema.TypeString, + Optional: true, + Computed: true, + }, + + "self_link": { + Type: schema.TypeString, + Computed: true, + }, + + "description": { + Type: schema.TypeString, + Computed: true, + }, + + "peeringdb_facility_id": { + Type: schema.TypeString, + Computed: true, + }, + + "address": { + Type: schema.TypeString, + Computed: true, + }, + + "facility_provider": { + Type: schema.TypeString, + Computed: true, + }, + + "facility_provider_facility_id": { + Type: schema.TypeString, + Computed: true, + }, + + "continent": { + Type: schema.TypeString, + Computed: true, + }, + + "city": { + Type: schema.TypeString, + Computed: true, + }, + + "availability_zone": { + Type: schema.TypeString, + Computed: true, + }, + + "status": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} + +func dataSourceGoogleComputeInterconnectLocationRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + + name := d.Get("name").(string) + + id := fmt.Sprintf("projects/%s/global/interconnectlocations/%s", project, name) + + location, err := config.NewComputeClient(userAgent).InterconnectLocations.Get(project, name).Do() + if err != nil { + return transport_tpg.HandleDataSourceNotFoundError(err, d, fmt.Sprintf("InterconnectLocation Not Found : %s", name), id) + } + + d.SetId(location.Name) + if err := d.Set("project", project); err != nil { + return fmt.Errorf("Error setting project: %s", err) + } + if err := d.Set("self_link", location.SelfLink); err != nil { + return fmt.Errorf("Error setting self_link: %s", err) + } + if err := d.Set("description", location.Description); err != nil { + return fmt.Errorf("Error setting description: %s", err) + } + if err := d.Set("peeringdb_facility_id", location.PeeringdbFacilityId); err != nil { + return fmt.Errorf("Error setting peeringdb_facility_id: %s", err) + } + if err := d.Set("address", location.Address); err != nil { + return fmt.Errorf("Error setting address: %s", err) + } + if err := d.Set("facility_provider", location.FacilityProvider); err != nil { + return fmt.Errorf("Error setting facility_provider: %s", err) + } + if err := d.Set("facility_provider_facility_id", location.FacilityProviderFacilityId); err != nil { + return fmt.Errorf("Error setting facility_provider_facility_id: %s", err) + } + if err := d.Set("continent", location.Continent); err != nil { + return fmt.Errorf("Error setting continent: %s", err) + } + if err := d.Set("city", location.City); err != nil { + return fmt.Errorf("Error setting city: %s", err) + } + if err := d.Set("availability_zone", location.AvailabilityZone); err != nil { + return fmt.Errorf("Error setting availability_zone: %s", err) + } + if err := d.Set("status", location.Status); err != nil { + return fmt.Errorf("Error setting status: %s", err) + } + + d.SetId(id) + return nil +} diff --git a/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_location_test.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_location_test.go new file mode 100644 index 000000000000..0d5f07d12ac5 --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_location_test.go @@ -0,0 +1,99 @@ +package compute_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" + + "github.com/hashicorp/terraform-plugin-testing/terraform" + "github.com/hashicorp/terraform-provider-google/google/services/compute" + "github.com/hashicorp/terraform-provider-google/google/transport" +) + +var interconnectLoc = "z2z-us-west8-zone2-ncphxk-z" + +func testAccDataSourceCheckInterconnectLocation() func(s *terraform.State) error { + return func(s *terraform.State) error { + data_source_name := "data.google_compute_interconnect_location.my_location" + ds, ok := s.RootModule().Resources[data_source_name] + if !ok { + return fmt.Errorf("root module has no resource called %s", data_source_name) + } + + ds_attr := ds.Primary.Attributes + expected := map[string]string{ + "name": interconnectLoc, + "description": "Zakim-to-Zakim location", + "facility_provider": "Google", + } + for attr, expect_value := range expected { + if ds_attr[attr] != expect_value { + return fmt.Errorf("%s is %s; want %s", attr, ds_attr[attr], expect_value) + } + } + + if ds_attr["self_link"] == "" { + return fmt.Errorf("self_link is not set") + } + + return nil + } +} + +func TestAccDataSourceGoogleComputeInterconnectLocation_basic(t *testing.T) { + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceGoogleComputeInterconnectLocationConfig(interconnectLoc), + Check: resource.ComposeTestCheckFunc( + testAccDataSourceCheckInterconnectLocation(), + ), + }, + }, + }) +} + +func testAccDataSourceGoogleComputeInterconnectLocationConfig(locationName string) string { + return fmt.Sprintf(` +data "google_compute_interconnect_location" "my_location" { + name = "%s" +} +`, locationName) +} + +func TestParseComputeInterconnectLocationId(t *testing.T) { + config := &transport.Config{Project: "my-project"} + + cases := []struct { + id string + wantProj string + wantName string + wantErr bool + }{ + {"projects/my-project/global/interconnectlocations/z2z-us-west8-zone2-ncphxk-z", "my-project", interconnectLoc, false}, + {"my-project/z2z-us-west8-zone2-ncphxk-z", "my-project", interconnectLoc, false}, + {interconnectLoc, "my-project", interconnectLoc, false}, + {"invalid/format/extra", "", "", true}, + } + + for _, tc := range cases { + got, err := compute.ParseComputeInterconnectLocationId(tc.id, config) + if tc.wantErr { + if err == nil { + t.Errorf("ParseComputeInterconnectLocationId(%q) expected error, got nil", tc.id) + } + continue + } + if err != nil { + t.Errorf("ParseComputeInterconnectLocationId(%q) unexpected error: %v", tc.id, err) + continue + } + if got.Project != tc.wantProj || got.Name != tc.wantName { + t.Errorf("ParseComputeInterconnectLocationId(%q) = (%q, %q), want (%q, %q)", tc.id, got.Project, got.Name, tc.wantProj, tc.wantName) + } + } +} diff --git a/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_locations.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_locations.go new file mode 100644 index 000000000000..9c7ebcd5fd7d --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_locations.go @@ -0,0 +1,137 @@ +package compute + +import ( + "fmt" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func DataSourceGoogleComputeInterconnectLocations() *schema.Resource { + return &schema.Resource{ + Read: dataSourceGoogleComputeInterconnectLocationsRead, + Schema: map[string]*schema.Schema{ + "project": { + Type: schema.TypeString, + Optional: true, + Computed: true, + }, + "locations": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Computed: true, + }, + "description": { + Type: schema.TypeString, + Computed: true, + }, + "self_link": { + Type: schema.TypeString, + Computed: true, + }, + "peeringdb_facility_id": { + Type: schema.TypeString, + Computed: true, + }, + "address": { + Type: schema.TypeString, + Computed: true, + }, + "facility_provider": { + Type: schema.TypeString, + Computed: true, + }, + "facility_provider_facility_id": { + Type: schema.TypeString, + Computed: true, + }, + "status": { + Type: schema.TypeString, + Computed: true, + }, + "continent": { + Type: schema.TypeString, + Computed: true, + }, + "city": { + Type: schema.TypeString, + Computed: true, + }, + "availability_zone": { + Type: schema.TypeString, + Computed: true, + }, + "supports_pzs": { + Type: schema.TypeBool, + Computed: true, + }, + "available_features": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "available_link_types": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + }, + }, + }, + }, + } +} + +func dataSourceGoogleComputeInterconnectLocationsRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + + d.SetId(fmt.Sprintf("projects/%s", project)) + + list, err := config.NewComputeClient(userAgent).InterconnectLocations.List(project).Do() + if err != nil { + return fmt.Errorf("Error retrieving list of interconnect locations: %s", err) + } + + var locations []map[string]interface{} + for _, location := range list.Items { + locations = append(locations, map[string]interface{}{ + "name": location.Name, + "description": location.Description, + "self_link": location.SelfLink, + "peeringdb_facility_id": location.PeeringdbFacilityId, + "address": location.Address, + "facility_provider": location.FacilityProvider, + "facility_provider_facility_id": location.FacilityProviderFacilityId, + "status": location.Status, + "continent": location.Continent, + "city": location.City, + "availability_zone": location.AvailabilityZone, + "supports_pzs": location.SupportsPzs, + "available_features": location.AvailableFeatures, + "available_link_types": location.AvailableLinkTypes, + }) + } + if err := d.Set("locations", locations); err != nil { + return fmt.Errorf("Error setting locations: %s", err) + } + + return nil +} diff --git a/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_locations_test.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_locations_test.go new file mode 100644 index 000000000000..67d00491521e --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_locations_test.go @@ -0,0 +1,31 @@ +package compute_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccDataSourceGoogleComputeInterconnectLocations_basic(t *testing.T) { + t.Parallel() + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceGoogleComputeInterconnectLocations_basic(), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("data.google_compute_interconnect_locations.all", "locations.0.self_link"), + ), + }, + }, + }) +} + +func testAccDataSourceGoogleComputeInterconnectLocations_basic() string { + return ` +data "google_compute_interconnect_locations" "all" {} +` +} diff --git a/mmv1/third_party/terraform/website/docs/d/compute_interconnect_location.html.markdown b/mmv1/third_party/terraform/website/docs/d/compute_interconnect_location.html.markdown new file mode 100644 index 000000000000..3809d5c67806 --- /dev/null +++ b/mmv1/third_party/terraform/website/docs/d/compute_interconnect_location.html.markdown @@ -0,0 +1,54 @@ +--- +subcategory: "Compute Engine" +description: |- + Get the details of a single interconnect location. +--- + +# google_compute_interconnect_location + +Get the details of a single interconnect location. For more information see +the official [API](https://cloud.google.com/compute/docs/reference/rest/v1/interconnectLocations/get) documentation. + +## Example Usage + +```hcl +data "google_compute_interconnect_location" "this" { + name = "iad-zone1-1" +} + +resource "google_compute_interconnect" "this" { + project = data.google_compute_interconnect_location.this.project + location = data.google_compute_interconnect_location.this.self_link + name = "my-dedicated-connection-1" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `name` - (Required) The name of the interconnect location. + +- - - + +* `project` - (Optional) The project in which the resource belongs. If it + is not provided, the provider project is used. + +## Attributes Reference + +In addition to the arguments listed above, the following computed attributes are +exported: + +* `self_link` - The URI of the created resource. +* `description` - A textual description of the resource. +* `peeringdb_facility_id` - The PeeringDB facility ID for this facility. +* `address` - The postal address of the Point of Presence. +* `facility_provider` - The name of the provider for this facility. +* `facility_provider_facility_id` - A provider-assigned Identifier for this facility. +* `status` - The status of this InterconnectLocation. +* `continent` - The continent for this location. +* `city` - The city for this location. +* `availability_zone` - The availability zone for this InterconnectLocation. +* `supports_pzs` - Reserved for future use. +* `available_features` - A list of features available at this InterconnectLocation. +* `available_link_types` - A list of link types available at this InterconnectLocation. diff --git a/mmv1/third_party/terraform/website/docs/d/compute_interconnect_locations.html.markdown b/mmv1/third_party/terraform/website/docs/d/compute_interconnect_locations.html.markdown new file mode 100644 index 000000000000..6da98be9d0e0 --- /dev/null +++ b/mmv1/third_party/terraform/website/docs/d/compute_interconnect_locations.html.markdown @@ -0,0 +1,47 @@ +--- +subcategory: "Compute Engine" +description: |- + Get a list of interconnect locations. +--- + +# google_compute_interconnect_locations + +Get a list of interconnect locations. For more information see +the official [API](https://cloud.google.com/compute/docs/reference/rest/v1/interconnectLocations/list) documentation. + +## Example Usage + +```hcl +data "google_compute_interconnect_locations" "all" {} + +output "interconnect_locations" { + value = data.google_compute_interconnect_locations.all.locations +} +``` + +## Argument Reference + +The following arguments are supported: + +* `project` - (Optional) The project in which the resource belongs. If it + is not provided, the provider project is used. + +## Attributes Reference + +In addition to the arguments listed above, the following computed attributes are +exported: + +* `locations` - A list of interconnect locations. Each location will have the following attributes: + * `self_link` - The URI of the created resource. + * `description` - A textual description of the resource. + * `peeringdb_facility_id` - The PeeringDB facility ID for this facility. + * `address` - The postal address of the Point of Presence. + * `facility_provider` - The name of the provider for this facility. + * `facility_provider_facility_id` - A provider-assigned Identifier for this facility. + * `status` - The status of this InterconnectLocation. + * `continent` - The continent for this location. + * `city` - The city for this location. + * `availability_zone` - The availability zone for this InterconnectLocation. + * `supports_pzs` - Reserved for future use. + * `available_features` - A list of features available at this InterconnectLocation. + * `available_link_types` - A list of link types available at this InterconnectLocation. From bfdbf8885f286a1cafce10075ca9aaada1dae609 Mon Sep 17 00:00:00 2001 From: "google-labs-jules[bot]" <161369871+google-labs-jules[bot]@users.noreply.github.com> Date: Thu, 4 Sep 2025 10:43:31 +0000 Subject: [PATCH 884/884] Add Interconnect Locations Data Source --- .ci/README.md | 2 +- .ci/changelog2.tmpl | 50 - .ci/containers/build-environment/Dockerfile | 2 +- .ci/containers/go-plus/Dockerfile | 4 +- .ci/infra/terraform/README.md | 5 +- .ci/infra/terraform/main.tf | 2 - .ci/magician/cmd/check_cassettes.go | 2 +- .../cmd/collect_nightly_test_status.go | 53 +- .../cmd/create_test_failure_ticket.go | 8 +- .ci/magician/cmd/generate_comment.go | 112 +- .ci/magician/cmd/generate_comment_test.go | 144 +- .ci/magician/cmd/generate_downstream.go | 24 +- .ci/magician/cmd/mock_runner_test.go | 7 +- .ci/magician/cmd/scheduled_pr_reminders.go | 2 +- .../cmd/scheduled_pr_reminders_test.go | 2 +- .../cmd/templates/DIFF_COMMENT.md.tmpl | 9 +- .../templates/vcr/non_exercised_tests.tmpl | 13 + .../cmd/templates/vcr/post_replay.tmpl | 56 - .../cmd/templates/vcr/post_replay_eap.tmpl | 46 - .../cmd/templates/vcr/record_replay.tmpl | 14 +- .../cmd/templates/vcr/test_analytics.tmpl | 20 + .../vcr/with_replay_failed_tests.tmpl | 12 + .../vcr/without_replay_failed_tests.tmpl | 7 + .ci/magician/cmd/test_eap_vcr.go | 134 +- .ci/magician/cmd/test_eap_vcr_test.go | 252 -- .ci/magician/cmd/test_terraform_vcr.go | 172 +- .ci/magician/cmd/test_terraform_vcr_test.go | 259 +- .ci/magician/cmd/vcr_cassette_update_test.go | 7 +- .ci/magician/cmd/vcr_merge_eap.go | 11 +- .ci/magician/exec/runner.go | 3 - .ci/magician/github/README.md | 24 - .../github/REVIEWER_ASSIGNMENT_COMMENT.md | 4 +- .ci/magician/github/get.go | 194 +- .ci/magician/github/init.go | 62 - .ci/magician/github/integration_test.go | 355 --- .ci/magician/github/interface_conversion.go | 70 - .ci/magician/github/membership.go | 18 +- .ci/magician/github/membership_data.go | 41 +- .ci/magician/github/set.go | 134 +- .ci/magician/go.mod | 4 +- .ci/magician/go.sum | 8 +- .ci/magician/teamcity/get.go | 7 +- .ci/magician/utility/utils.go | 66 +- .ci/magician/vcr/tester.go | 44 +- .ci/magician/vcr/tester_test.go | 58 - .ci/release-note2.tmpl | 3 - .github/actions/build-downstream/action.yml | 1 + .github/workflows/basic-pr-checks.yml | 69 - .github/workflows/build-downstream.yml | 1 + .github/workflows/disallow-submodules.yml | 22 + .github/workflows/mmv1-check-templates.yml | 30 + .github/workflows/mmv1-lint-product-yaml.yml | 36 + .github/workflows/override-labels.yml | 29 - .github/workflows/reassign-reviewer.yml | 4 +- .github/workflows/request-reviewer.yml | 6 - .github/workflows/unit-test-magician.yml | 2 +- .github/workflows/unit-test-mmv1.yml | 111 - .github/workflows/unit-test-tools.yml | 12 +- .gitignore | 1 - GNUmakefile | 78 +- LICENSE | 377 --- .../common-resource-patterns.md | 4 +- docs/content/best-practices/validation.md | 34 - .../breaking-changes/breaking-changes.md | 7 +- .../make-a-breaking-change.md | 10 +- docs/content/code-review/review-pr.md | 1 - docs/content/develop/add-fields.md | 16 +- docs/content/develop/add-iam-support.md | 17 +- docs/content/develop/add-resource.md | 21 +- docs/content/develop/generate-providers.md | 36 +- docs/content/develop/promote-to-ga.md | 18 +- docs/content/document/add-documentation.md | 18 +- docs/content/reference/field.md | 86 +- docs/content/reference/make-commands.md | 6 +- docs/content/reference/resource.md | 5 +- docs/content/test/run-tests.md | 38 - docs/content/test/test.md | 77 +- mmv1/api/product.go | 11 - mmv1/api/product/version.go | 7 +- mmv1/api/resource.go | 358 +-- mmv1/api/resource/custom_code.go | 7 - mmv1/api/resource/datasource.go | 21 - mmv1/api/resource/examples.go | 69 +- mmv1/api/resource/iam_policy.go | 3 - mmv1/api/resource_test.go | 20 +- mmv1/api/type.go | 189 +- mmv1/google/string_utils.go | 3 +- mmv1/google/string_utils_test.go | 5 - mmv1/main.go | 115 +- mmv1/openapi_generate/parser.go | 9 +- .../accesscontextmanager/AccessPolicy.yaml | 1 - .../AuthorizedOrgsDesc.yaml | 2 - .../GcpUserAccessBinding.yaml | 6 +- mmv1/products/alloydb/Backup.yaml | 6 +- mmv1/products/alloydb/Cluster.yaml | 33 +- mmv1/products/alloydb/Instance.yaml | 70 +- mmv1/products/alloydb/User.yaml | 4 +- mmv1/products/apigee/AddonsConfig.yaml | 25 +- mmv1/products/apigee/ApiProduct.yaml | 423 --- mmv1/products/apigee/EnvgroupAttachment.yaml | 3 - mmv1/products/apigee/Environment.yaml | 32 - mmv1/products/apigee/Instance.yaml | 22 - mmv1/products/apigee/InstanceAttachment.yaml | 3 - mmv1/products/apigee/SecurityAction.yaml | 246 -- .../apigee/SecurityMonitoringCondition.yaml | 132 - mmv1/products/apigee/TargetServer.yaml | 4 - mmv1/products/apihub/Curation.yaml | 165 -- mmv1/products/apihub/Plugin.yaml | 261 -- mmv1/products/apihub/PluginInstance.yaml | 332 --- .../appengine/FlexibleAppVersion.yaml | 4 +- .../appengine/StandardAppVersion.yaml | 4 +- mmv1/products/apphub/Application.yaml | 1 - mmv1/products/apphub/Service.yaml | 1 - mmv1/products/apphub/Workload.yaml | 1 - .../products/artifactregistry/Repository.yaml | 28 +- mmv1/products/backupdr/BackupPlan.yaml | 40 +- .../backupdr/BackupPlanAssociation.yaml | 10 +- mmv1/products/backupdr/BackupVault.yaml | 13 - mmv1/products/backupdr/ServiceConfig.yaml | 59 - mmv1/products/beyondcorp/AppConnection.yaml | 1 - mmv1/products/beyondcorp/AppConnector.yaml | 1 - mmv1/products/beyondcorp/AppGateway.yaml | 13 +- mmv1/products/beyondcorp/Application.yaml | 6 +- mmv1/products/beyondcorp/SecurityGateway.yaml | 13 +- .../SecurityGatewayApplication.yaml | 151 -- mmv1/products/bigquery/Dataset.yaml | 8 - mmv1/products/bigquery/Job.yaml | 28 +- mmv1/products/bigquery/Routine.yaml | 6 - mmv1/products/bigquery/RowAccessPolicy.yaml | 111 - mmv1/products/bigquery/Table.yaml | 10 +- .../bigqueryanalyticshub/DataExchange.yaml | 25 - .../DataExchangeSubscription.yaml | 257 -- .../bigqueryanalyticshub/Listing.yaml | 135 +- .../ListingSubscription.yaml | 20 - .../products/bigquerydatatransfer/Config.yaml | 25 +- .../bigqueryreservation/Reservation.yaml | 97 - mmv1/products/bigtable/LogicalView.yaml | 9 - mmv1/products/bigtable/SchemaBundle.yaml | 98 - mmv1/products/billingbudget/Budget.yaml | 9 +- .../binaryauthorization/Attestor.yaml | 5 +- .../blockchainnodeengine/BlockchainNodes.yaml | 1 - .../certificatemanager/Certificate.yaml | 8 +- .../CertificateMapEntry.yaml | 2 +- mmv1/products/cloudasset/FolderFeed.yaml | 5 - .../products/cloudasset/OrganizationFeed.yaml | 5 - mmv1/products/cloudasset/ProjectFeed.yaml | 4 - mmv1/products/cloudbuild/Trigger.yaml | 84 +- mmv1/products/clouddeploy/DeployPolicy.yaml | 50 +- mmv1/products/cloudfunctions2/Function.yaml | 8 +- .../cloudidentity/GroupMembership.yaml | 7 - .../cloudquotas/QuotaAdjusterSettings.yaml | 14 +- mmv1/products/cloudrun/Service.yaml | 6 - mmv1/products/cloudrunv2/Job.yaml | 136 +- mmv1/products/cloudrunv2/Service.yaml | 31 - mmv1/products/cloudrunv2/WorkerPool.yaml | 813 ------ mmv1/products/cloudtasks/Queue.yaml | 24 - mmv1/products/colab/RuntimeTemplate.yaml | 26 +- mmv1/products/colab/Schedule.yaml | 2 - .../composer/UserWorkloadsConfigMap.yaml | 1 - mmv1/products/compute/Address.yaml | 3 +- mmv1/products/compute/Autoscaler.yaml | 1 - mmv1/products/compute/BackendBucket.yaml | 44 +- mmv1/products/compute/BackendService.yaml | 139 +- mmv1/products/compute/CrossSiteNetwork.yaml | 13 - mmv1/products/compute/Disk.yaml | 23 +- mmv1/products/compute/DiskType.yaml | 4 +- mmv1/products/compute/ExternalVpnGateway.yaml | 1 - mmv1/products/compute/Firewall.yaml | 20 +- mmv1/products/compute/FirewallPolicy.yaml | 2 - mmv1/products/compute/FirewallPolicyRule.yaml | 58 - .../compute/FirewallPolicyWithRules.yaml | 192 +- mmv1/products/compute/ForwardingRule.yaml | 9 +- mmv1/products/compute/FutureReservation.yaml | 608 ----- mmv1/products/compute/GlobalAddress.yaml | 2 - .../compute/GlobalForwardingRule.yaml | 38 +- mmv1/products/compute/HealthCheck.yaml | 79 - mmv1/products/compute/Image.yaml | 5 +- mmv1/products/compute/Instance.yaml | 10 +- .../compute/InstanceGroupManager.yaml | 2 +- mmv1/products/compute/InstanceSettings.yaml | 3 +- mmv1/products/compute/Interconnect.yaml | 111 - .../compute/InterconnectAttachment.yaml | 44 +- .../compute/InterconnectAttachmentGroup.yaml | 280 -- mmv1/products/compute/InterconnectGroup.yaml | 265 -- mmv1/products/compute/Network.yaml | 25 +- mmv1/products/compute/NetworkAttachment.yaml | 4 +- .../compute/NetworkEndpointGroup.yaml | 6 - .../compute/NetworkFirewallPolicy.yaml | 11 - .../NetworkFirewallPolicyWithRules.yaml | 80 +- .../compute/NetworkPeeringRoutesConfig.yaml | 13 + mmv1/products/compute/NodeGroup.yaml | 1 - mmv1/products/compute/NodeTemplate.yaml | 1 - .../compute/OrganizationSecurityPolicy.yaml | 17 +- mmv1/products/compute/PacketMirroring.yaml | 6 - mmv1/products/compute/PreviewFeature.yaml | 84 - .../compute/PublicDelegatedPrefix.yaml | 55 - mmv1/products/compute/RegionAutoscaler.yaml | 3 +- .../compute/RegionBackendService.yaml | 145 +- mmv1/products/compute/RegionDisk.yaml | 41 +- mmv1/products/compute/RegionHealthCheck.yaml | 76 - .../compute/RegionInstanceGroupManager.yaml | 2 +- .../compute/RegionNetworkEndpointGroup.yaml | 1 - .../compute/RegionNetworkFirewallPolicy.yaml | 17 - .../RegionNetworkFirewallPolicyWithRules.yaml | 87 +- .../compute/RegionSecurityPolicy.yaml | 58 - .../compute/RegionSecurityPolicyRule.yaml | 4 - mmv1/products/compute/RegionUrlMap.yaml | 460 +--- mmv1/products/compute/Reservation.yaml | 109 +- mmv1/products/compute/ResourcePolicy.yaml | 6 + mmv1/products/compute/Route.yaml | 17 - mmv1/products/compute/Router.yaml | 20 +- mmv1/products/compute/RouterNat.yaml | 31 - mmv1/products/compute/RouterRoutePolicy.yaml | 3 +- mmv1/products/compute/SecurityPolicyRule.yaml | 4 - mmv1/products/compute/ServiceAttachment.yaml | 20 +- mmv1/products/compute/Snapshot.yaml | 2 +- mmv1/products/compute/SnapshotSettings.yaml | 92 - mmv1/products/compute/StoragePool.yaml | 4 - mmv1/products/compute/Subnetwork.yaml | 32 +- mmv1/products/compute/TargetHttpProxy.yaml | 16 - mmv1/products/compute/TargetHttpsProxy.yaml | 17 - mmv1/products/compute/UrlMap.yaml | 204 +- mmv1/products/compute/VpnTunnel.yaml | 77 - mmv1/products/compute/WireGroup.yaml | 212 -- mmv1/products/compute/product.yaml | 2 - .../contactcenterinsights/AnalysisRule.yaml | 178 -- mmv1/products/contactcenterinsights/View.yaml | 65 - .../contactcenterinsights/product.yaml | 21 - .../PrivateConnection.yaml | 11 +- mmv1/products/datafusion/Instance.yaml | 1 - mmv1/products/dataplex/Datascan.yaml | 129 +- mmv1/products/dataplex/Entry.yaml | 276 -- mmv1/products/dataplex/Glossary.yaml | 112 - mmv1/products/dataplex/GlossaryCategory.yaml | 99 - mmv1/products/dataplex/GlossaryTerm.yaml | 99 - mmv1/products/dataproc/Batch.yaml | 12 - mmv1/products/dataproc/SessionTemplate.yaml | 256 -- .../products/dataprocgdc/ServiceInstance.yaml | 1 - .../datastream/ConnectionProfile.yaml | 5 +- .../datastream/PrivateConnection.yaml | 42 +- mmv1/products/datastream/Stream.yaml | 1 - .../developerconnect/InsightsConfig.yaml | 248 -- .../dialogflow/ConversationProfile.yaml | 654 ----- mmv1/products/dialogflow/EncryptionSpec.yaml | 78 - mmv1/products/dialogflow/EntityType.yaml | 2 +- mmv1/products/dialogflowcx/Agent.yaml | 115 - mmv1/products/dialogflowcx/Flow.yaml | 400 --- .../dialogflowcx/GenerativeSettings.yaml | 170 -- mmv1/products/dialogflowcx/Generator.yaml | 138 - mmv1/products/dialogflowcx/Page.yaml | 389 --- mmv1/products/dialogflowcx/Playbook.yaml | 166 -- .../dialogflowcx/SecuritySettings.yaml | 1 - mmv1/products/dialogflowcx/Tool.yaml | 307 --- mmv1/products/dialogflowcx/Webhook.yaml | 303 +-- mmv1/products/discoveryengine/CmekConfig.yaml | 142 - mmv1/products/discoveryengine/DataStore.yaml | 102 +- .../discoveryengine/RecommendationEngine.yaml | 212 -- mmv1/products/dlp/DeidentifyTemplate.yaml | 3 +- mmv1/products/dlp/DiscoveryConfig.yaml | 1 - mmv1/products/dns/ManagedZone.yaml | 11 +- mmv1/products/dns/Policy.yaml | 14 - mmv1/products/eventarc/Pipeline.yaml | 19 +- mmv1/products/eventarc/Trigger.yaml | 3 +- mmv1/products/filestore/Instance.yaml | 83 +- .../firebaseapphosting/DefaultDomain.yaml | 125 - mmv1/products/firebaseapphosting/Domain.yaml | 425 --- mmv1/products/firebaseapphosting/Traffic.yaml | 200 -- .../products/firebaseextensions/Instance.yaml | 1 - mmv1/products/firebasehosting/Site.yaml | 11 - mmv1/products/firestore/Database.yaml | 30 - mmv1/products/firestore/Index.yaml | 21 +- mmv1/products/gemini/CodeRepositoryIndex.yaml | 6 +- .../gemini/DataSharingWithGoogleSetting.yaml | 5 +- .../DataSharingWithGoogleSettingBinding.yaml | 6 +- .../gemini/GeminiGcpEnablementSetting.yaml | 13 - .../GeminiGcpEnablementSettingBinding.yaml | 6 +- .../gemini/LoggingSettingBinding.yaml | 1 - .../gemini/ReleaseChannelSettingBinding.yaml | 1 - mmv1/products/gemini/RepositoryGroup.yaml | 6 + mmv1/products/gkebackup/BackupChannel.yaml | 2 +- mmv1/products/gkebackup/BackupPlan.yaml | 6 +- mmv1/products/gkebackup/RestoreChannel.yaml | 2 +- mmv1/products/gkehub/Membership.yaml | 6 + mmv1/products/gkehub/product.yaml | 2 +- mmv1/products/gkehub2/Feature.yaml | 15 - .../gkehub2/ScopeRBACRoleBinding.yaml | 16 - .../gkeonprem/BareMetalAdminCluster.yaml | 2 - mmv1/products/gkeonprem/BareMetalCluster.yaml | 4 +- .../products/gkeonprem/BareMetalNodePool.yaml | 2 - .../gkeonprem/VmwareAdminCluster.yaml | 20 +- mmv1/products/gkeonprem/VmwareCluster.yaml | 2 - mmv1/products/gkeonprem/VmwareNodePool.yaml | 3 - .../iambeta/WorkloadIdentityPool.yaml | 166 +- .../WorkloadIdentityPoolManagedIdentity.yaml | 140 - .../WorkloadIdentityPoolNamespace.yaml | 116 - .../iambeta/WorkloadIdentityPoolProvider.yaml | 1 - .../OauthClientCredential.yaml | 1 - .../iamworkforcepool/WorkforcePool.yaml | 16 +- .../WorkforcePoolProviderKey.yaml | 158 -- mmv1/products/iap/Brand.yaml | 3 +- mmv1/products/iap/Client.yaml | 5 +- .../iap/ForwardingRuleRegionalService.yaml | 43 - mmv1/products/iap/ForwardingRuleService.yaml | 56 - mmv1/products/identityplatform/Tenant.yaml | 2 + .../integrationconnectors/Connection.yaml | 40 +- mmv1/products/kms/AutokeyConfig.yaml | 8 +- mmv1/products/kms/CryptoKey.yaml | 3 +- mmv1/products/kms/KeyHandle.yaml | 6 +- mmv1/products/logging/Metric.yaml | 1 + mmv1/products/looker/Instance.yaml | 6 - mmv1/products/lustre/Instance.yaml | 93 +- mmv1/products/managedkafka/Acl.yaml | 152 -- mmv1/products/managedkafka/Cluster.yaml | 36 - mmv1/products/memorystore/Instance.yaml | 71 +- mmv1/products/metastore/Service.yaml | 10 +- mmv1/products/modelarmor/Template.yaml | 266 -- mmv1/products/modelarmor/product.yaml | 25 - .../modelarmorglobal/Floorsetting.yaml | 248 -- mmv1/products/modelarmorglobal/product.yaml | 26 - .../products/monitoring/MetricDescriptor.yaml | 5 + .../monitoring/UptimeCheckConfig.yaml | 10 +- mmv1/products/netapp/Backup.yaml | 13 +- mmv1/products/netapp/BackupVault.yaml | 54 - mmv1/products/netapp/StoragePool.yaml | 42 +- mmv1/products/netapp/Volume.yaml | 53 +- mmv1/products/netapp/VolumeQuotaRule.yaml | 2 +- mmv1/products/netapp/VolumeReplication.yaml | 49 +- mmv1/products/netapp/VolumeSnapshot.yaml | 2 +- .../networkconnectivity/InternalRange.yaml | 39 - .../networkconnectivity/PolicyBasedRoute.yaml | 5 +- .../networkconnectivity/RegionalEndpoint.yaml | 2 +- .../ServiceConnectionPolicy.yaml | 23 - mmv1/products/networkconnectivity/Spoke.yaml | 7 +- .../networkmanagement/VpcFlowLogsConfig.yaml | 58 +- .../networksecurity/AddressGroup.yaml | 3 +- .../BackendAuthenticationConfig.yaml | 7 +- .../networksecurity/InterceptDeployment.yaml | 12 + .../InterceptDeploymentGroup.yaml | 16 + .../InterceptEndpointGroup.yaml | 19 + .../InterceptEndpointGroupAssociation.yaml | 17 + .../networksecurity/ServerTlsPolicy.yaml | 10 - .../networkservices/EdgeCacheOrigin.yaml | 23 - .../networkservices/EdgeCacheService.yaml | 1 - mmv1/products/networkservices/Gateway.yaml | 2 - .../networkservices/LbTrafficExtension.yaml | 1 - mmv1/products/notebooks/Instance.yaml | 10 - mmv1/products/notebooks/Location.yaml | 43 + mmv1/products/notebooks/Runtime.yaml | 5 - .../oracledatabase/AutonomousDatabase.yaml | 54 +- .../CloudExadataInfrastructure.yaml | 4 +- .../oracledatabase/CloudVmCluster.yaml | 57 +- mmv1/products/oracledatabase/OdbNetwork.yaml | 116 - mmv1/products/oracledatabase/OdbSubnet.yaml | 126 - mmv1/products/osconfig/GuestPolicies.yaml | 34 +- mmv1/products/parallelstore/Instance.yaml | 4 - .../parametermanager/ParameterVersion.yaml | 16 - .../RegionalParameterVersion.yaml | 16 - .../privateca/CertificateAuthority.yaml | 16 +- .../privateca/CertificateTemplate.yaml | 112 +- mmv1/products/pubsub/Subscription.yaml | 77 - mmv1/products/pubsub/Topic.yaml | 72 +- mmv1/products/redis/Cluster.yaml | 22 +- .../redis/ClusterUserCreatedConnections.yaml | 2 +- mmv1/products/redis/Instance.yaml | 18 +- mmv1/products/resourcemanager/Lien.yaml | 1 + .../products/resourcemanager3/Capability.yaml | 79 - mmv1/products/saasservicemgmt/Saas.yaml | 109 - mmv1/products/saasservicemgmt/product.yaml | 23 - mmv1/products/secretmanager/Secret.yaml | 13 +- .../products/secretmanager/SecretVersion.yaml | 12 +- .../secretmanagerregional/RegionalSecret.yaml | 19 - .../RegionalSecretVersion.yaml | 1 - .../securesourcemanager/BranchRule.yaml | 16 +- .../securesourcemanager/Instance.yaml | 57 +- .../securesourcemanager/Repository.yaml | 43 +- .../OrganizationSccBigQueryExport.yaml | 2 +- .../OrganizationSccBigQueryExport.yaml | 2 +- .../OrganizationSccBigQueryExports.yaml | 2 +- mmv1/products/spanner/BackupSchedule.yaml | 11 - mmv1/products/spanner/Database.yaml | 6 - mmv1/products/spanner/Instance.yaml | 43 +- mmv1/products/sql/Database.yaml | 1 + mmv1/products/storage/Bucket.yaml | 2 + .../FolderIntelligenceConfig.yaml | 4 - .../OrganizationIntelligenceConfig.yaml | 4 - .../ProjectIntelligenceConfig.yaml | 4 - .../storageinsights/DatasetConfig.yaml | 290 -- mmv1/products/storagetransfer/AgentPool.yaml | 1 - mmv1/products/tpu/Node.yaml | 182 ++ .../{resourcemanager3 => tpu}/product.yaml | 11 +- mmv1/products/vertexai/Endpoint.yaml | 5 +- .../EndpointWithModelGardenDeployment.yaml | 1034 ------- mmv1/products/vertexai/Index.yaml | 8 +- .../vertexai/IndexEndpointDeployedIndex.yaml | 5 - mmv1/products/vertexai/RagEngineConfig.yaml | 110 - .../vmwareengine/ExternalAddress.yaml | 2 - .../products/vmwareengine/NetworkPeering.yaml | 1 - mmv1/products/vmwareengine/PrivateCloud.yaml | 4 +- mmv1/products/vpcaccess/Connector.yaml | 19 +- mmv1/products/workbench/Instance.yaml | 51 - mmv1/products/workbench/product.yaml | 2 - .../workstations/WorkstationCluster.yaml | 2 +- .../workstations/WorkstationConfig.yaml | 4 +- mmv1/provider/provider.go | 10 +- mmv1/provider/template_data.go | 99 +- mmv1/provider/terraform.go | 134 +- mmv1/provider/terraform_tgc.go | 5 +- mmv1/provider/terraform_tgc_cai2hcl.go | 3 +- mmv1/provider/terraform_tgc_next.go | 224 +- .../artifact_registry_repository.go.tmpl | 70 +- .../beyondcorp_security_gateway.go.tmpl | 24 - .../constants/bigquery_dataset.go.tmpl | 33 - .../constants/dataplex_entry.go.tmpl | 193 -- .../datastream_connection_profile.go.tmpl | 23 - mmv1/templates/terraform/constants/disk.tmpl | 25 +- .../iam_workforce_pool_provider_key.go.tmpl | 18 - ...oad_identity_pool_managed_identity.go.tmpl | 44 - ...m_workload_identity_pool_namespace.go.tmpl | 44 - .../constants/region_security_policy.go.tmpl | 31 - .../constants/regional_secret_version.go.tmpl | 35 - .../terraform/constants/router.go.tmpl | 2 +- .../terraform/constants/router_nat.go.tmpl | 2 +- .../constants/secret_version.go.tmpl | 35 - .../terraform/constants/subnetwork.tmpl | 12 + .../constants/vpc_access_connector.go.tmpl | 8 - .../constants/workbench_instance.go.tmpl | 74 +- .../firebasehosting_default_site.go.tmpl | 23 - ...oad_identity_pool_managed_identity.go.tmpl | 22 - ...m_workload_identity_pool_namespace.go.tmpl | 22 - .../custom_delete/clear_iap_settings.go.tmpl | 8 +- ...point_with_model_garden_deployment.go.tmpl | 102 - .../vertex_ai_rag_engine_config.go.tmpl | 44 - .../array_resourceref_with_validation.go.tmpl | 5 - .../bigquery_row_access_policy_ref.go.tmpl | 11 - ..._object.go.tmpl => bool_to_object.go.tmpl} | 3 - ...compute_instance_settings_fingerprint.tmpl | 30 + ...ca_certificate_template_509_config.go.tmpl | 53 - .../regional_secret_version_enable.go.tmpl | 37 +- .../resourceref_with_validation.go.tmpl | 5 - .../secret_version_enable.go.tmpl | 39 +- .../service_attachment_target_service.go.tmpl | 11 +- .../compute_router_range.go.tmpl | 42 + ...on_profile_context_filter_settings.go.tmpl | 26 - ...rofile_suggestion_trigger_settings.go.tmpl | 24 - .../dataplex_entry_aspects.go.tmpl | 44 - ...oorsetting_multilanguage_detection.go.tmpl | 19 - ...r_template_multilanguage_detection.go.tmpl | 19 - .../custom_flatten/name_from_self_link.tmpl | 2 +- ...ca_certificate_template_509_config.go.tmpl | 20 - .../custom_import/apigee_api_product.go.tmpl | 42 - ...igee_security_monitoring_condition.go.tmpl | 32 - .../custom_import/dataplex_entry.go.tmpl | 20 - .../dialogflowcx_generative_settings.go.tmpl | 18 - .../dialogflowcx_generator.go.tmpl | 18 - .../dialogflowcx_playbook.go.tmpl | 18 - .../custom_import/dialogflowcx_tool.go.tmpl | 18 - .../custom_import/extract_taxonomy.go.tmpl | 2 +- .../custom_import/kms_crypto_key.go.tmpl | 4 + .../modelarmorglobal_floorsetting.go.tmpl | 19 - .../resource_manager_capability.go.tmpl | 16 - .../vertex_ai_tensorboard_import.go.tmpl | 8 +- ...ticshub_data_exchange_subscription.go.tmpl | 19 - .../regional_secret_version.go.tmpl | 2 +- .../custom_update/secret_version.go.tmpl | 4 +- mmv1/templates/terraform/datasource.go.tmpl | 107 - .../datasource_iam.html.markdown.tmpl | 20 +- ...ticshub_data_exchange_subscription.go.tmpl | 14 - .../terraform/decoders/dataplex_entry.go.tmpl | 41 - .../decoders/memorystore_instance.go.tmpl | 184 +- .../terraform/decoders/redis_cluster.go.tmpl | 48 - ...ticshub_data_exchange_subscription.go.tmpl | 36 - .../compute_service_attachment.go.tmpl | 8 - .../compute_snapshot_settings.go.tmpl | 7 - .../terraform/encoders/dataplex_entry.go.tmpl | 17 - ...tion_engine_hardcode_solution_type.go.tmpl | 3 - .../terraform/encoders/health_check_type.tmpl | 16 - .../encoders/memorystore_instance.go.tmpl | 107 +- ...etwork_services_edge_cache_service.go.tmpl | 53 - .../encoders/spanner_instance.go.tmpl | 14 +- .../examples/Storage_pool_create.tf.tmpl | 33 +- .../examples/Storage_pool_create_doc.tf.tmpl | 43 - .../examples/alloydb_backup_basic.tf.tmpl | 2 - .../alloydb_backup_basic_test.tf.tmpl | 2 - .../examples/alloydb_backup_full.tf.tmpl | 2 - .../examples/alloydb_backup_full_test.tf.tmpl | 2 - .../alloydb_cluster_after_upgrade.tf.tmpl | 25 +- .../examples/alloydb_cluster_basic.tf.tmpl | 2 - .../alloydb_cluster_before_upgrade.tf.tmpl | 25 +- .../examples/alloydb_cluster_full.tf.tmpl | 2 - .../examples/alloydb_cluster_restore.tf.tmpl | 6 - .../examples/alloydb_instance_basic.tf.tmpl | 2 - .../alloydb_instance_basic_test.tf.tmpl | 2 - .../alloydb_instance_psc_test.tf.tmpl | 2 - .../alloydb_secondary_cluster_basic.tf.tmpl | 3 - ...loydb_secondary_cluster_basic_test.tf.tmpl | 3 - .../alloydb_secondary_instance_basic.tf.tmpl | 3 - ...oydb_secondary_instance_basic_test.tf.tmpl | 3 - .../examples/alloydb_user_builtin.tf.tmpl | 2 - .../alloydb_user_builtin_test.tf.tmpl | 2 - .../examples/alloydb_user_iam.tf.tmpl | 2 - .../examples/alloydb_user_iam_test.tf.tmpl | 2 - .../examples/apigee_api_product_basic.tf.tmpl | 45 - .../apigee_api_product_basic_test.tf.tmpl | 86 - ...apigee_api_product_with_attributes.tf.tmpl | 193 -- ...e_api_product_with_attributes_test.tf.tmpl | 234 -- ..._api_product_with_legacy_operation.tf.tmpl | 68 - ...product_with_legacy_operation_test.tf.tmpl | 109 - .../examples/apigee_environment_basic.tf.tmpl | 6 - ...t_client_ip_resolution_config_test.tf.tmpl | 81 - .../apigee_instance_full_test.tf.tmpl | 5 - ...ization_cloud_basic_data_residency.tf.tmpl | 8 +- ...on_cloud_basic_data_residency_test.tf.tmpl | 10 +- .../apigee_security_action_basic.tf.tmpl | 70 - ...ecurity_monitoring_condition_basic.tf.tmpl | 53 - ...ty_monitoring_condition_basic_test.tf.tmpl | 89 - .../examples/apihub_curation_basic.tf.tmpl | 17 - .../examples/apihub_plugin_full.tf.tmpl | 65 - .../apihub_plugin_instance_basic.tf.tmpl | 10 - ...act_registry_repository_remote_apt.tf.tmpl | 4 +- .../backend_bucket_global_ilb.tf.tmpl | 36 - .../backend_service_custom_metrics.tf.tmpl | 5 - ...backend_service_dynamic_forwarding.tf.tmpl | 10 - .../backend_service_external_managed.tf.tmpl | 1 - .../backend_service_tls_settings.tf.tmpl | 3 + ...p_dr_backup_plan_for_csql_resource.tf.tmpl | 29 - ...p_dr_backup_plan_for_disk_resource.tf.tmpl | 31 - .../backup_dr_backup_vault_full.tf.tmpl | 1 - .../examples/backup_dr_service_config.tf.tmpl | 4 - .../base_configs/datasource_test_file.go.tmpl | 96 - .../base_configs/iam_test_file.go.tmpl | 2 +- ...security_gateway_application_basic.tf.tmpl | 4 +- ...p_security_gateway_application_vpc.tf.tmpl | 4 +- ...ange_log_linked_dataset_query_user.tf.tmpl | 7 - ...ub_dataexchange_subscription_basic.tf.tmpl | 94 - ...y_analyticshub_listing_dcr_routine.tf.tmpl | 58 - ...ting_log_linked_dataset_query_user.tf.tmpl | 26 - ...y_analyticshub_listing_marketplace.tf.tmpl | 27 - ...gquery_analyticshub_listing_pubsub.tf.tmpl | 26 - ...ticshub_listing_subscription_basic.tf.tmpl | 6 +- ..._analyticshub_public_data_exchange.tf.tmpl | 7 - ...gquery_analyticshub_public_listing.tf.tmpl | 28 - .../examples/bigquery_dataset_basic.tf.tmpl | 2 +- .../bigquery_job_query_continuous.tf.tmpl | 7 +- .../examples/bigquery_routine_basic.tf.tmpl | 1 - .../bigquery_row_access_policy_basic.tf.tmpl | 22 - .../examples/bigtable_logical_view.tf.tmpl | 1 - .../examples/bigtable_schema_bundle.tf.tmpl | 30 - ...ucket_server_config_peered_network.tf.tmpl | 1 + ...ild_trigger_developer_connect_pull.tf.tmpl | 13 - ...ild_trigger_developer_connect_push.tf.tmpl | 12 - ...gger_developer_connect_push_branch.tf.tmpl | 11 - .../clouddeploy_deploy_policy_basic.tf.tmpl | 6 +- .../clouddeploy_deploy_policy_full.tf.tmpl | 16 +- ...e.tf.tmpl => cloudfunctions2_abiu.tf.tmpl} | 0 ...=> cloudfunctions2_abiu_on_deploy.tf.tmpl} | 0 .../examples/cloudrunv2_job_gpu.tf.tmpl | 17 - .../cloudrunv2_job_multicontainer.tf.tmpl | 18 - .../examples/cloudrunv2_service_basic.tf.tmpl | 4 - .../cloudrunv2_service_function.tf.tmpl | 1 + .../examples/cloudrunv2_service_gpu.tf.tmpl | 7 +- .../examples/cloudrunv2_service_sql.tf.tmpl | 8 +- .../cloudrunv2_worker_pool_basic.tf.tmpl | 12 - ...runv2_worker_pool_custom_audiences.tf.tmpl | 13 - .../cloudrunv2_worker_pool_directvpc.tf.tmpl | 19 - .../cloudrunv2_worker_pool_gpu.tf.tmpl | 23 - .../cloudrunv2_worker_pool_limits.tf.tmpl | 18 - .../cloudrunv2_worker_pool_mount_gcs.tf.tmpl | 31 - .../cloudrunv2_worker_pool_mount_nfs.tf.tmpl | 48 - ...udrunv2_worker_pool_multicontainer.tf.tmpl | 28 - .../cloudrunv2_worker_pool_secret.tf.tmpl | 51 - .../cloudrunv2_worker_pool_sql.tf.tmpl | 77 - .../colab_runtime_template_full.tf.tmpl | 17 +- .../examples/colab_schedule_full.tf.tmpl | 6 - ...er_user_workloads_config_map_basic.tf.tmpl | 17 - ...te_firewall_policy_with_rules_full.tf.tmpl | 62 +- ...erconnect_attachment_custom_ranges.tf.tmpl | 29 - .../compute_interconnect_basic.tf.tmpl | 2 +- .../compute_interconnect_basic_test.tf.tmpl | 2 +- ...rk_firewall_policy_with_rules_full.tf.tmpl | 50 + .../compute_packet_mirroring_full.tf.tmpl | 3 - ...rk_firewall_policy_with_rules_full.tf.tmpl | 49 + ...rk_firewall_policy_with_rules_roce.tf.tmpl | 22 - .../compute_storage_pool_basic.tf.tmpl | 7 - .../compute_storage_pool_full.tf.tmpl | 7 - .../examples/compute_wire_group_basic.tf.tmpl | 27 - ...enter_insights_analysis_rule_basic.tf.tmpl | 4 - ...center_insights_analysis_rule_full.tf.tmpl | 28 - ...ter_insights_analysis_rule_profile.tf.tmpl | 28 - ...contact_center_insights_view_basic.tf.tmpl | 5 - .../contact_center_insights_view_full.tf.tmpl | 5 - .../data_fusion_instance_cmek.tf.tmpl | 11 +- ...onnection_profile_existing_alloydb.tf.tmpl | 2 - ..._migration_job_postgres_to_alloydb.tf.tmpl | 2 - ...gration_service_private_connection.tf.tmpl | 2 - .../dataplex_datascan_basic_discovery.tf.tmpl | 24 - .../dataplex_datascan_full_discovery.tf.tmpl | 68 - .../dataplex_datascan_full_quality.tf.tmpl | 1 - ...ataplex_datascan_full_quality_test.tf.tmpl | 182 -- .../examples/dataplex_entry_basic.tf.tmpl | 20 - .../examples/dataplex_entry_full.tf.tmpl | 133 - .../examples/dataplex_glossary_basic.tf.tmpl | 4 - .../dataplex_glossary_category_basic.tf.tmpl | 11 - .../dataplex_glossary_category_full.tf.tmpl | 15 - .../examples/dataplex_glossary_full.tf.tmpl | 8 - .../dataplex_glossary_term_basic.tf.tmpl | 11 - .../dataplex_glossary_term_full.tf.tmpl | 15 - .../dataproc_batch_spark_full.tf.tmpl | 5 +- ...dataproc_session_templates_jupyter.tf.tmpl | 25 - ...roc_session_templates_jupyter_full.tf.tmpl | 116 - ...oc_session_templates_spark_connect.tf.tmpl | 17 - ...m_private_connection_psc_interface.tf.tmpl | 37 - ...oper_connect_insights_config_basic.tf.tmpl | 135 - ...logflow_conversation_profile_basic.tf.tmpl | 18 - .../dialogflow_encryption_spec_basic.tf.tmpl | 73 - .../examples/dialogflowcx_agent_full.tf.tmpl | 39 +- .../dialogflowcx_flow_custom_endpoint.tf.tmpl | 47 - .../examples/dialogflowcx_flow_full.tf.tmpl | 132 - ...logflowcx_generative_settings_full.tf.tmpl | 44 - .../dialogflowcx_generator_basic.tf.tmpl | 24 - .../examples/dialogflowcx_page_full.tf.tmpl | 126 +- .../dialogflowcx_playbook_basic.tf.tmpl | 44 - .../dialogflowcx_playbook_fulfillment.tf.tmpl | 86 - .../dialogflowcx_tool_data_store.tf.tmpl | 41 - .../dialogflowcx_tool_function.tf.tmpl | 40 - .../dialogflowcx_tool_open_api.tf.tmpl | 78 - ...tmpl => dialogflowcx_webhook_full.tf.tmpl} | 11 - ...webhook_service_directory_flexible.tf.tmpl | 37 - ...webhook_service_directory_standard.tf.tmpl | 45 - .../dialogflowcx_webhook_standard.tf.tmpl | 42 - ...discoveryengine_cmekconfig_default.tf.tmpl | 14 - ...ment_processing_config_layout_full.tf.tmpl | 27 - ...overyengine_datastore_kms_key_name.tf.tmpl | 11 - ...ngine_recommendationengine_generic.tf.tmpl | 20 - ...yengine_recommendationengine_media.tf.tmpl | 30 - ...discovery_config_org_folder_paused.tf.tmpl | 4 +- .../dns_managed_zone_cloud_logging.tf.tmpl | 2 +- ...iner_local_control_plane_node_pool.tf.tmpl | 2 +- .../examples/eventarc_basic_tf.tf.tmpl | 2 + ...rc_pipeline_with_topic_destination.tf.tmpl | 3 + ...pipeline_with_workflow_destination.tf.tmpl | 3 + .../examples/eventarc_workflows.tf.tmpl | 3 + ...p_check_play_integrity_config_full.tf.tmpl | 1 + ...heck_play_integrity_config_minimal.tf.tmpl | 3 + ..._recaptcha_enterprise_config_basic.tf.tmpl | 3 + ..._app_check_service_config_enforced.tf.tmpl | 1 + ...ebase_app_check_service_config_off.tf.tmpl | 1 + ...pp_check_service_config_unenforced.tf.tmpl | 1 + .../firebase_app_hosting_backend_full.tf.tmpl | 2 + ...irebase_app_hosting_backend_github.tf.tmpl | 1 + ...rebase_app_hosting_backend_minimal.tf.tmpl | 2 + .../firebase_app_hosting_build_full.tf.tmpl | 2 + .../firebase_app_hosting_build_github.tf.tmpl | 4 + ...firebase_app_hosting_build_minimal.tf.tmpl | 2 + ...pp_hosting_default_domain_disabled.tf.tmpl | 30 - ...se_app_hosting_default_domain_full.tf.tmpl | 30 - ...app_hosting_default_domain_minimal.tf.tmpl | 28 - .../firebase_app_hosting_domain_full.tf.tmpl | 35 - ...irebase_app_hosting_domain_minimal.tf.tmpl | 28 - ...app_hosting_traffic_rollout_policy.tf.tmpl | 48 - ...ng_traffic_rollout_policy_disabled.tf.tmpl | 49 - ...irebase_app_hosting_traffic_target.tf.tmpl | 64 - ...database_instance_default_database.tf.tmpl | 4 + .../firebasedataconnect_service_basic.tf.tmpl | 1 + ...onnect_service_with_force_deletion.tf.tmpl | 1 + .../firebasehosting_site_default.tf.tmpl | 5 - .../firestore_database_with_tags.tf.tmpl | 11 - .../examples/firestore_index_unique.tf.tmpl | 32 - .../examples/firewall_policy_rule.tf.tmpl | 17 - .../firewall_policy_rule_secure_tags.tf.tmpl | 60 - ...rwarding_rule_region_service_basic.tf.tmpl | 49 - .../forwarding_rule_service_basic.tf.tmpl | 38 - ..._reservation_aggregate_reservation.tf.tmpl | 28 - .../examples/future_reservation_basic.tf.tmpl | 19 - ..._sharing_with_google_setting_basic.tf.tmpl | 1 - ..._with_google_setting_binding_basic.tf.tmpl | 1 - ...emini_gcp_enablement_setting_basic.tf.tmpl | 1 - ...p_enablement_setting_binding_basic.tf.tmpl | 1 - .../gkebackup_backupchannel_basic.tf.tmpl | 2 +- .../gkebackup_restorechannel_basic.tf.tmpl | 2 +- ..._feature_rbacrolebinding_actuation.tf.tmpl | 9 - ...ope_rbac_custom_role_binding_basic.tf.tmpl | 26 - ...ehub_scope_rbac_role_binding_basic.tf.tmpl | 1 + ...eonprem_vmware_admin_cluster_basic.tf.tmpl | 5 +- ...keonprem_vmware_admin_cluster_full.tf.tmpl | 5 +- ...nprem_vmware_admin_cluster_metallb.tf.tmpl | 8 +- .../health_check_grpc_with_tls.tf.tmpl | 12 - .../health_check_grpc_with_tls_full.tf.tmpl | 17 - ...force_pool_provider_saml_key_basic.tf.tmpl | 29 - .../iam_workload_identity_pool_full.tf.tmpl | 6 + ...ity_pool_full_federation_only_mode.tf.tmpl | 9 - ...entity_pool_full_trust_domain_mode.tf.tmpl | 38 - ...entity_pool_managed_identity_basic.tf.tmpl | 21 - ...dentity_pool_managed_identity_full.tf.tmpl | 29 - ...load_identity_pool_namespace_basic.tf.tmpl | 13 - ...kload_identity_pool_namespace_full.tf.tmpl | 15 - .../terraform/examples/instance_basic.tf.tmpl | 2 +- ...nterconnect_attachment_group_basic.tf.tmpl | 6 - .../examples/interconnect_group_basic.tf.tmpl | 6 - .../examples/kms_autokey_config_all.tf.tmpl | 1 + .../examples/kms_key_handle_basic.tf.tmpl | 1 + .../examples/lustre_instance_basic.tf.tmpl | 15 +- .../examples/managedkafka_acl_basic.tf.tmpl | 37 - .../managedkafka_cluster_mtls.tf.tmpl | 36 - .../memorystore_instance_basic.tf.tmpl | 4 +- ...ed_user_and_auto_created_endpoints.tf.tmpl | 2 +- .../memorystore_instance_full.tf.tmpl | 45 +- ...morystore_instance_persistence_aof.tf.tmpl | 4 +- ...ystore_instance_secondary_instance.tf.tmpl | 4 +- ...morystore_instance_standalone_full.tf.tmpl | 4 +- ..._floorsetting_ai_platform_metadata.tf.tmpl | 21 - .../modelarmor_floorsetting_basic.tf.tmpl | 8 - ...elarmor_floorsetting_filter_config.tf.tmpl | 27 - .../modelarmor_template_basic.tf.tmpl | 12 - .../modelarmor_template_filter_config.tf.tmpl | 30 - .../modelarmor_template_label.tf.tmpl | 28 - ...elarmor_template_template_metadata.tf.tmpl | 26 - ...ternal_ranges_allocation_algoritms.tf.tmpl | 18 - ...llocation_algoritms_random_first_n.tf.tmpl | 19 - ...ty_regional_endpoint_global_access.tf.tmpl | 2 +- ..._regional_endpoint_regional_access.tf.tmpl | 4 +- ...flow_logs_config_interconnect_full.tf.tmpl | 33 + ...vpc_flow_logs_config_network_basic.tf.tmpl | 15 - ..._vpc_flow_logs_config_subnet_basic.tf.tmpl | 24 - ...ment_vpc_flow_logs_config_vpn_full.tf.tmpl | 70 + .../network_peering_routes_config_gke.tf.tmpl | 55 + ...ackend_authentication_config_basic.tf.tmpl | 1 + ...backend_authentication_config_full.tf.tmpl | 3 + ...ecurity_intercept_deployment_basic.tf.tmpl | 27 +- ...y_intercept_deployment_group_basic.tf.tmpl | 2 + ...t_endpoint_group_association_basic.tf.tmpl | 11 +- ...ity_intercept_endpoint_group_basic.tf.tmpl | 11 +- .../examples/notebook_instance_basic.tf.tmpl | 4 +- .../notebook_instance_basic_gpu.tf.tmpl | 4 +- .../notebook_instance_basic_stopped.tf.tmpl | 4 +- .../examples/notebook_instance_full.tf.tmpl | 6 +- ...ase_autonomous_database_odbnetwork.tf.tmpl | 17 - ...abase_autonomous_database_publicip.tf.tmpl | 16 - ...atabase_cloud_vmcluster_odbnetwork.tf.tmpl | 34 - .../oracledatabase_odbnetwork.tf.tmpl | 15 - .../examples/oracledatabase_odbsubnet.tf.tmpl | 12 - ...version_with_json_format_with_file.tf.tmpl | 10 - ...version_with_yaml_format_with_file.tf.tmpl | 10 - .../examples/preview_feature_basic.tf.tmpl | 10 - ...certificate_authority_basic_no_org.tf.tmpl | 35 - ...ero_max_issuer_path_length_null_ca.tf.tmpl | 85 - .../pubsub_subscription_multiple_smts.tf.tmpl | 41 - .../pubsub_subscription_single_smt.tf.tmpl | 21 - .../pubsub_topic_multiple_smts.tf.tmpl | 36 - .../examples/pubsub_topic_single_smt.tf.tmpl | 15 - .../examples/redis_cluster_aof.tf.tmpl | 1 + .../redis_cluster_tls_enabled.tf.tmpl | 57 - .../examples/redis_instance_cmek.tf.tmpl | 3 +- .../examples/redis_instance_full.tf.tmpl | 2 +- .../examples/redis_instance_mrr.tf.tmpl | 3 +- .../redis_instance_private_service.tf.tmpl | 2 +- ...edis_instance_private_service_test.tf.tmpl | 2 +- ...ion_backend_service_balancing_mode.tf.tmpl | 2 +- ...backend_service_dynamic_forwarding.tf.tmpl | 11 - .../region_backend_service_ha_policy.tf.tmpl | 17 - ...nd_service_ha_policy_manual_leader.tf.tmpl | 72 - ...k_hyperdisk_balanced_ha_write_many.tf.tmpl | 7 - .../region_health_check_grpc_with_tls.tf.tmpl | 12 - ...on_health_check_grpc_with_tls_full.tf.tmpl | 17 - ...n_network_endpoint_group_functions.tf.tmpl | 2 +- ...point_group_psc_service_attachment.tf.tmpl | 1 + ...egion_network_firewall_policy_roce.tf.tmpl | 6 - ...urity_policy_with_advanced_options.tf.tmpl | 14 - ...ion_url_map_default_mirror_percent.tf.tmpl | 59 - ...ath_matcher_default_mirror_percent.tf.tmpl | 59 - ..._path_matcher_default_route_action.tf.tmpl | 179 -- ...n_url_map_path_rule_mirror_percent.tf.tmpl | 59 - ..._url_map_route_rule_mirror_percent.tf.tmpl | 63 - ...version_with_json_format_with_file.tf.tmpl | 11 - ...version_with_yaml_format_with_file.tf.tmpl | 11 - .../regional_secret_config_basic.tf.tmpl | 1 - .../examples/reservation_basic_beta.tf.tmpl | 16 - .../reservation_sharing_policy.tf.tmpl | 50 - ...servation_source_instance_template.tf.tmpl | 48 - .../resource_manager_capability.tf.tmpl | 17 - ...licy_placement_policy_gpu_topology.tf.tmpl | 1 + .../resource_policy_workload_policy.tf.tmpl | 1 + ...rkload_policy_accelerator_topology.tf.tmpl | 1 + ...kload_policy_max_topology_distance.tf.tmpl | 1 + .../examples/saas_runtime_saas_basic.tf.tmpl | 12 - .../examples/secret_config_basic.tf.tmpl | 1 - ...e_source_manager_branch_rule_basic.tf.tmpl | 10 +- ...ce_manager_branch_rule_with_fields.tf.tmpl | 10 +- ...cure_source_manager_instance_basic.tf.tmpl | 4 +- ...ecure_source_manager_instance_cmek.tf.tmpl | 4 +- ...re_source_manager_instance_private.tf.tmpl | 4 +- ...nager_instance_private_psc_backend.tf.tmpl | 4 +- ...ager_instance_private_psc_endpoint.tf.tmpl | 4 +- ...ager_instance_private_trusted_cert.tf.tmpl | 8 - ...ance_workforce_identity_federation.tf.tmpl | 4 +- ...re_source_manager_repository_basic.tf.tmpl | 8 +- ..._manager_repository_initial_config.tf.tmpl | 10 +- ...ervice_attachment_cross_region_ilb.tf.tmpl | 77 - .../shared_future_reservation.tf.tmpl | 53 - .../examples/shared_reservation_basic.tf.tmpl | 16 +- .../examples/shared_reservation_beta.tf.tmpl | 55 - ...apshot_settings_specific_locations.tf.tmpl | 9 - .../examples/spanner_database_basic.tf.tmpl | 1 - ...e_insights_dataset_config_excludes.tf.tmpl | 20 - ...e_insights_dataset_config_includes.tf.tmpl | 25 - .../target_http_proxy_fingerprint.tf.tmpl | 45 - .../target_https_proxy_fingerprint.tf.tmpl | 54 - .../terraform/examples/tpu_node_basic.tf.tmpl | 15 + .../terraform/examples/tpu_node_full.tf.tmpl | 50 + .../examples/tpu_node_full_test.tf.tmpl | 29 + ...l_map_custom_error_response_policy.tf.tmpl | 5 + .../url_map_default_mirror_percent.tf.tmpl | 54 - .../url_map_http_filter_configs.tf.tmpl | 78 - .../url_map_http_filter_metadata.tf.tmpl | 111 - ...ath_matcher_default_mirror_percent.tf.tmpl | 54 - .../url_map_path_rule_mirror_percent.tf.tmpl | 54 - .../url_map_route_rule_mirror_percent.tf.tmpl | 58 - .../url_map_test_expected_output_url.tf.tmpl | 52 - .../examples/url_map_test_headers.tf.tmpl | 58 - ...rl_map_test_redirect_response_code.tf.tmpl | 73 - .../examples/vertex_ai_deploy_basic.tf.tmpl | 7 - ...vertex_ai_deploy_huggingface_model.tf.tmpl | 7 - ...deploy_multiple_models_in_parallel.tf.tmpl | 53 - ...deploy_multiple_models_in_sequence.tf.tmpl | 55 - .../vertex_ai_deploy_with_configs.tf.tmpl | 17 - ...i_endpoint_private_service_connect.tf.tmpl | 1 + ...inestore_featureview_cross_project.tf.tmpl | 1 + ...deployed_index_automatic_resources.tf.tmpl | 76 +- ...ndex_endpoint_deployed_index_basic.tf.tmpl | 88 +- ..._endpoint_deployed_index_basic_two.tf.tmpl | 85 +- ...deployed_index_dedicated_resources.tf.tmpl | 80 +- .../vertex_ai_rag_engine_config_basic.tf.tmpl | 7 - ...vertex_ai_rag_engine_config_scaled.tf.tmpl | 7 - ...ai_rag_engine_config_unprovisioned.tf.tmpl | 7 - .../examples/vpn_tunnel_cipher_suite.tf.tmpl | 84 - .../workbench_instance_basic_gpu.tf.tmpl | 28 - .../examples/workbench_instance_euc.tf.tmpl | 28 - .../examples/workbench_instance_full.tf.tmpl | 30 +- .../terraform/expand_property_method.go.tmpl | 10 +- .../terraform/flatten_property_method.go.tmpl | 30 +- mmv1/templates/terraform/metadata.yaml.tmpl | 3 - ...rite_only_documentation.html.markdown.tmpl | 2 +- .../cloud_tasks_queue_state.go.tmpl | 16 - .../colab_runtime_template.go.tmpl | 10 + .../post_create/colab_schedule.go.tmpl | 16 + .../terraform/post_create/gcip_tenant.go.tmpl | 14 + .../terraform/post_create/group.tmpl | 4 + ...oad_identity_pool_managed_identity.go.tmpl | 32 - .../terraform/post_create/iap_client.go.tmpl | 7 + .../index.go.tmpl} | 12 +- .../interconnect_attachment.go.tmpl | 2 +- .../terraform/post_create/labels.tmpl | 6 +- .../templates/terraform/post_create/lien.tmpl | 11 + .../modelarmor_floorsetting_sleep.go.tmpl | 4 - .../regional_secret_version.go.tmpl | 2 +- ...point_with_model_garden_deployment.go.tmpl | 114 - .../post_create/secret_version.go.tmpl | 4 +- .../post_create/spanner_database.go.tmpl | 15 +- .../storage_insights_dataset_config.go.tmpl | 33 - .../post_delete/dialogflowcx_agent.go.tmpl | 34 - ...ticshub_data_exchange_subscription.go.tmpl | 51 - ...oad_identity_pool_managed_identity.go.tmpl | 18 - .../cloud_tasks_queue_state.go.tmpl | 35 - ...rtifact_registry_remote_repository.go.tmpl | 18 + .../cloud_identity_group_membership.go.tmpl | 69 - ...pute_instance_settings_fingerprint.go.tmpl | 21 - .../datastream_connection_profile.go.tmpl | 18 - .../dialogflow_set_endpoint.go.tmpl | 6 - .../dialogflow_set_location.go.tmpl | 5 +- .../pre_create/dialogflowcx_generator.go.tmpl | 24 - ...owcx_set_location_skip_default_obj.go.tmpl | 5 +- .../gkehub_existing_feature.go.tmpl | 25 - ...oad_identity_pool_managed_identity.go.tmpl | 7 - .../pre_delete/alloydb_cluster.go.tmpl | 4 - .../bigquery_analytics_hub_listing.go.tmpl | 4 - .../bigquery_row_access_policy.go.tmpl | 3 - ...drunv2_worker_pool_deletion_policy.go.tmpl | 3 - ...code_repository_index_force_delete.go.tmpl | 6 + .../pre_delete/dialogflowcx_agent.go.tmpl | 20 - .../pre_delete/firebasehosting_site.go.tmpl | 4 - .../gkehub_existing_feature.go.tmpl | 55 - .../oracledatabase_odbnetwork.go.tmpl | 3 - .../oracledatabase_odbsubnet.go.tmpl | 3 - .../pre_delete/regional_secret.go.tmpl | 3 - .../pre_delete/secret_manager_secret.go.tmpl | 4 - ...ecuresourcemanager_deletion_policy.go.tmpl | 7 - .../terraform/pre_read/dataplex_entry.go.tmpl | 4 - .../sql_database_activation_policy.tmpl | 12 +- .../bigqueryanalyticshub_listing.go.tmpl | 20 - .../bigquerydatatransfer_config.tmpl | 9 - .../pre_update/dataplex_entry.go.tmpl | 39 - .../fw_datafusion_instance_update.go.tmpl | 35 - .../storage_insights_dataset_config.go.tmpl | 116 - .../vpc_access_connector_instances.go.tmpl | 15 - mmv1/templates/terraform/product.go.tmpl | 22 - .../property_documentation.html.markdown.tmpl | 2 +- mmv1/templates/terraform/resource.go.tmpl | 55 +- .../terraform/resource.html.markdown.tmpl | 20 +- mmv1/templates/terraform/resource_fw.go.tmpl | 764 ------ .../terraform/resource_iam.html.markdown.tmpl | 21 +- .../terraform/schema_property.go.tmpl | 2 +- .../terraform/schema_property_fw.go.tmpl | 52 - .../update_encoder/compute_network.go.tmpl | 14 +- .../compute_service_attachment.go.tmpl | 7 - .../discoveryengine_cmekconfig_kmskey.go.tmpl | 4 - .../update_encoder/reservation.go.tmpl | 28 - .../terraform/update_encoder/ssl_policy.tmpl | 4 +- .../terraform/update_mask_fw.go.tmpl | 27 - .../bigquery_data_transfer_config.go.tmpl | 1 + .../validation/secret_version.go.tmpl | 1 + mmv1/templates/tgc/resource_converter.go.tmpl | 10 +- .../flatten_property_method_tgc.go.tmpl | 32 - .../cai2hcl/full_to_relative_path.go.tmpl | 10 - .../cai2hcl/resource_converter.go.tmpl | 152 -- .../cai2hcl/resource_converters.go.tmpl | 28 +- .../pubsub_subscription_attributes.go.tmpl | 22 - .../custom_expand/set_to_slice_or_nil.go.tmpl | 7 - ...rvice_signed_url_cache_max_age_sec.go.tmpl | 20 - .../decoders/backup_dr_backup_plan.go.tmpl | 20 - .../certificatemanager_certificate.go.tmpl | 9 - .../decoders/cloud_asset_feed.go.tmpl | 4 - .../decoders/compute_backend_service.go.tmpl | 31 - .../decoders/compute_subnetwork.go.tmpl | 23 - .../certificatemanager_certificate.go.tmpl | 7 - .../tgc_next/encoders/compute_disk.go.tmpl | 5 - .../provider/provider_mmv1_resources.go.tmpl | 26 - .../tgc_next/services/resource.go.tmpl | 73 - .../templates/tgc_next/test/test_file.go.tmpl | 44 - .../expand_property_method_tgc.go.tmpl | 21 - .../tfplan2cai/resource_converter.go.tmpl | 130 - .../tfplan2cai/resource_converters.go.tmpl | 20 +- mmv1/third_party/cai2hcl/convert_test.go | 12 +- mmv1/third_party/cai2hcl/converter_map.go | 5 - .../compute/compute_forwarding_rule.go | 2 +- .../compute/compute_region_backend_service.go | 2 +- .../compute/compute_region_health_check.go | 2 +- .../networksecurity/server_tls_policy.go | 173 -- .../networksecurity/server_tls_policy_test.go | 13 - .../testdata/server_tls_policy.json | 361 --- .../testdata/server_tls_policy.tf | 194 -- .../builds/build_configuration_per_package.kt | 26 +- .../builds/build_configuration_sweepers.kt | 5 +- .../build_configuration_vcr_recording.kt | 3 +- .../components/builds/build_parameters.kt | 5 +- .../components/builds/build_steps.kt | 52 - .../.teamcity/components/constants.kt | 6 +- .../components/inputs/services_beta.kt | 30 +- .../components/inputs/services_ga.kt | 30 +- .../FEATURE_BRANCH_resource_identity.kt | 1 - .../projects/google_beta_subproject.kt | 4 - .../projects/reused/nightly_tests.kt | 3 - .../projects/reused/weekly_diff_tests.kt | 72 - .../terraform/.teamcity/tests/test_utils.kt | 1 - .../.teamcity/tests/weekly_diff_project.kt | 50 - .../acctest/bootstrap_test_utils.go.tmpl | 168 +- .../terraform/acctest/diff_utils.go | 204 -- .../terraform/acctest/diff_utils_test.go | 206 -- .../terraform/acctest/provider_test_utils.go | 8 - .../acctest/resource_inventory_reader.go | 212 -- .../acctest/resource_inventory_test.go | 113 - .../terraform/acctest/tgc_utils.go | 296 +- .../terraform/acctest/vcr_utils.go | 117 +- .../terraform/envvar/envvar_utils.go | 10 +- .../terraform/fwmodels/provider_model.go.tmpl | 3 + .../fwprovider/framework_provider.go.tmpl | 42 +- .../terraform/fwresource/field_helpers.go | 23 +- .../terraform/fwresource/framework_import.go | 192 -- .../fwresource/framework_import_test.go | 183 -- .../fwresource/framework_location.go | 4 +- .../terraform/fwtransport/framework_utils.go | 321 +-- .../fwvalidators/framework_validators.go | 80 - .../fwvalidators/framework_validators_test.go | 138 - mmv1/third_party/terraform/go.mod | 104 +- mmv1/third_party/terraform/go.sum | 698 ++++- .../terraform/provider/provider.go.tmpl | 31 +- .../provider/provider_billing_project_test.go | 3 + .../provider/provider_mmv1_resources.go.tmpl | 52 +- ...idators.go => provider_validators.go.tmpl} | 2 +- .../universe/universe_domain_compute_test.go | 47 - .../universe/universe_domain_storage_test.go | 9 +- .../scripts/teamcitytestscripts/main.go | 125 - .../scripts/teamcitytestscripts/teamcity.go | 125 - ...ext_manager_access_level_condition_test.go | 4 +- ..._context_manager_access_level_test.go.tmpl | 41 +- ...xt_manager_gcp_user_access_binding_test.go | 93 +- ...ext_manager_service_perimeter_test.go.tmpl | 12 +- ...ontext_manager_services_perimeters_test.go | 8 +- .../alloydb/data_source_alloydb_cluster.go | 5 - .../data_source_alloydb_cluster_test.go | 4 +- ...a_source_alloydb_database_instance_test.go | 4 +- .../alloydb/resource_alloydb_backup_test.go | 14 +- .../resource_alloydb_cluster_restore_test.go | 46 +- .../alloydb/resource_alloydb_cluster_test.go | 167 +- .../alloydb/resource_alloydb_instance_test.go | 285 +- ...resource_alloydb_secondary_cluster_test.go | 168 +- ...esource_alloydb_secondary_instance_test.go | 42 +- .../alloydb/resource_alloydb_user_test.go | 12 +- .../resource_api_gateway_gateway_test.go.tmpl | 18 +- ..._apigee_keystores_aliases_key_cert_file.go | 533 ---- .../services/apigee/resource_apigee_api.go | 4 +- ...resource_apigee_api_product_update_test.go | 518 ---- ...resource_apigee_environment_update_test.go | 6 - .../apigee/resource_apigee_flowhook.go | 4 +- .../resource_apigee_instance_update_test.go | 10 - ..._apigee_keystores_aliases_key_cert_file.go | 692 +++++ ...esource_apigee_keystores_aliases_pkcs12.go | 4 +- .../resource_apigee_security_action_test.go | 649 ----- .../apigee/resource_apigee_sharedflow.go | 4 +- .../resource_apigee_sharedflow_deployment.go | 5 +- .../resource_apigee_target_server_test.go | 2 - .../apihub/resource_apihub_curation_test.go | 87 - ..._source_artifact_registry_docker_images.go | 191 -- ...ce_artifact_registry_docker_images_test.go | 42 - ...ta_source_artifact_registry_npm_package.go | 295 -- ...urce_artifact_registry_npm_package_test.go | 67 - .../data_source_artifact_registry_package.go | 136 - ...a_source_artifact_registry_package_test.go | 37 - ...a_source_artifact_registry_repositories.go | 173 -- ...rce_artifact_registry_repositories_test.go | 127 - .../data_source_artifact_registry_tag.go | 122 - .../data_source_artifact_registry_tag_test.go | 38 - .../data_source_artifact_registry_tags.go | 169 -- ...data_source_artifact_registry_tags_test.go | 44 - .../data_source_artifact_registry_version.go | 206 -- ...a_source_artifact_registry_version_test.go | 38 - ..._artifact_registry_repository_test.go.tmpl | 68 - .../data_source_backup_dr_backup.go.tmpl | 15 - .../data_source_backup_dr_backup_plan_test.go | 77 +- ..._source_backup_dr_data_source_test.go.tmpl | 16 +- ..._backup_dr_backup_plan_association_test.go | 251 -- .../resource_backup_dr_backup_plan_test.go | 219 -- ...ource_backup_dr_backup_vault_test.go.tmpl} | 12 +- ...urce_google_beyondcorp_security_gateway.go | 53 - ...google_beyondcorp_security_gateway_test.go | 82 - .../resource_beyondcorp_application_test.go | 85 + ...dcorp_security_gateway_application_test.go | 98 - .../data_source_google_bigquery_datasets.go | 161 -- ...ta_source_google_bigquery_datasets_test.go | 106 - .../data_source_google_bigquery_table.go | 51 - .../data_source_google_bigquery_table_test.go | 190 -- .../bigquery/iam_bigquery_member_dataset.go | 7 +- ...source_bigquery_dataset_iam_member_test.go | 78 +- .../resource_bigquery_routine_test.go | 2 - ...esource_bigquery_row_access_policy_test.go | 96 - .../bigquery/resource_bigquery_table.go.tmpl | 185 +- .../resource_bigquery_table_meta.yaml.tmpl | 2 - .../bigquery/resource_bigquery_table_test.go | 411 +-- ...igquery_analytics_hub_dataexchange_test.go | 54 - ..._hub_dataexchangesubscription_test.go.tmpl | 153 -- ...analytics_hub_listing_subscription_test.go | 1 + ...rce_bigquery_analytics_hub_listing_test.go | 202 -- ... => resource_bigquery_reservation_test.go} | 77 - .../services/bigtable/iam_bigtable_table.go | 34 +- .../resource_bigtable_authorized_view.go | 6 +- .../bigtable/resource_bigtable_instance.go | 58 +- ...esource_bigtable_instance_internal_test.go | 28 +- .../resource_bigtable_instance_test.go | 97 +- .../resource_bigtable_logical_view_test.go | 11 +- .../resource_bigtable_schema_bundle_test.go | 98 - .../bigtable/resource_bigtable_table.go | 78 +- .../resource_bigtable_table_iam_test.go | 34 +- .../resource_bigtable_table_meta.yaml | 2 - .../bigtable/resource_bigtable_table_test.go | 188 -- .../test-fixtures/proto_schema_bundle.pb | 6 - .../test-fixtures/proto_schema_bundle.proto | 22 - .../updated_proto_schema_bundle.pb | 7 - .../updated_proto_schema_bundle.proto | 23 - .../data_source_google_billing_account.go | 7 - ...data_source_google_billing_account_test.go | 1 - .../billing/resource_billing_budget_test.go | 132 +- ..._binary_authorization_policy_test.go.tmpl} | 8 +- ...e_certificate_manager_dns_authorization.go | 46 - ...tificate_manager_dns_authorization_test.go | 48 - ..._chronicle_data_access_label_test.go.tmpl} | 0 ..._chronicle_data_access_scope_test.go.tmpl} | 0 ...rce_chronicle_reference_list_test.go.tmpl} | 0 ...ce_chronicle_rule_deployment_test.go.tmpl} | 0 ...o => resource_chronicle_rule_test.go.tmpl} | 0 ...resource_chronicle_watchlist_test.go.tmpl} | 0 ...e_cloud_asset_resources_search_all.go.tmpl | 9 +- ..._cloud_asset_search_all_resources.go.tmpl} | 12 +- ...d_asset_search_all_resources_test.go.tmpl} | 2 +- ...source_cloudbuild_worker_pool_test.go.tmpl | 12 +- ...resource_clouddeploy_deploy_policy_test.go | 14 +- .../resource_cloudfunctions_function.go | 119 +- ...resource_cloudfunctions_function_meta.yaml | 2 - ...ource_cloudfunctions_function_test.go.tmpl | 148 - ...ource_cloud_identity_group_lookup.go.tmpl} | 0 ...oud_identity_group_membership_test.go.tmpl | 175 -- .../data_source_cloud_run_service.go} | 16 +- .../resource_cloud_run_service_test.go.tmpl | 171 -- ..._source_google_cloud_run_v2_worker_pool.go | 52 - ...ce_google_cloud_run_v2_worker_pool_test.go | 65 - .../resource_cloud_run_v2_job_test.go.tmpl | 498 +--- ...resource_cloud_run_v2_service_test.go.tmpl | 216 +- ...urce_cloud_run_v2_worker_pool_test.go.tmpl | 670 ----- ...> resource_cloud_tasks_queue_test.go.tmpl} | 81 +- ...source_google_composer_environment_test.go | 15 +- ...er_user_workloads_config_map_test.go.tmpl} | 15 - ...mposer_user_workloads_secret_test.go.tmpl} | 21 +- .../resource_composer_environment.go.tmpl | 6 +- ...esource_composer_environment_test.go.tmpl} | 2370 ++++++++++------- ...er_user_workloads_config_map_test.go.tmpl} | 64 +- ...rce_composer_user_workloads_secret_test.go | 230 -- ...omposer_user_workloads_secret_test.go.tmpl | 179 ++ ... => compute_instance_helpers_test.go.tmpl} | 0 ...rce_compute_network_endpoint_group_test.go | 5 - ...data_source_google_compute_images.go.tmpl} | 0 ...ta_source_google_compute_instance.go.tmpl} | 2 +- ...oogle_compute_instance_group_test.go.tmpl} | 4 +- ...urce_google_compute_instance_test.go.tmpl} | 2 +- ...ce_google_compute_interconnect_location.go | 143 +- ...ogle_compute_interconnect_location_test.go | 95 +- ...e_google_compute_interconnect_locations.go | 28 +- ...gle_compute_interconnect_locations_test.go | 11 +- ...ata_source_google_compute_network.go.tmpl} | 4 +- ...ource_google_compute_network_attachment.go | 70 - ..._google_compute_network_attachment_test.go | 202 -- ...ompute_region_instance_group_test.go.tmpl} | 6 +- ...ce_google_compute_resource_policy.go.tmpl} | 2 +- .../data_source_google_compute_subnetworks.go | 7 - ...data_source_google_compute_network.go.tmpl | 187 -- ...data_source_google_compute_network_test.go | 86 - .../terraform/services/compute/image.go | 8 +- .../resource_compute_backend_bucket_test.go | 49 - ...ource_compute_backend_service_test.go.tmpl | 363 +-- ...rce_compute_disk_async_replication.go.tmpl | 2 +- .../compute/resource_compute_disk_sweeper.go | 108 +- .../resource_compute_disk_test.go.tmpl | 4 +- ...compute_firewall_policy_rule_test.go.tmpl} | 373 +-- ...urce_compute_firewall_policy_test.go.tmpl} | 6 +- ...e_firewall_policy_with_rules_test.go.tmpl} | 216 +- ...=> resource_compute_firewall_test.go.tmpl} | 67 +- ...ce_compute_future_reservation_test.go.tmpl | 101 - ...ource_compute_global_address_test.go.tmpl} | 14 +- ...ompute_global_forwarding_rule_test.go.tmpl | 179 -- ...pute_global_network_endpoint_test.go.tmpl} | 3 +- ...resource_compute_health_check_test.go.tmpl | 133 +- .../compute/resource_compute_instance.go.tmpl | 59 +- ...e_instance_from_machine_image_test.go.tmpl | 8 +- ...ompute_instance_from_template_test.go.tmpl | 170 +- .../resource_compute_instance_group.go.tmpl | 6 +- ...rce_compute_instance_group_manager.go.tmpl | 56 - ...ompute_instance_group_manager_test.go.tmpl | 333 --- ...ce_compute_instance_settings_test.go.tmpl} | 8 +- ...resource_compute_instance_template.go.tmpl | 47 +- ...compute_instance_template_internal_test.go | 11 +- ...rce_compute_instance_template_test.go.tmpl | 83 +- .../resource_compute_instance_test.go.tmpl | 1131 ++++---- ...connect_application_awareness_test.go.tmpl | 131 - ...pute_interconnect_attachment_group_test.go | 95 - ...esource_compute_interconnect_group_test.go | 95 - ...esource_compute_network_attachment_test.go | 204 -- ...mpute_network_endpoint_group_test.go.tmpl} | 2 +- ...network_firewall_policy_rule_test.go.tmpl} | 282 +- ...k_firewall_policy_with_rules_test.go.tmpl} | 22 +- .../resource_compute_network_peering.go.tmpl | 44 +- .../resource_compute_network_peering_test.go | 77 - .../resource_compute_network_test.go.tmpl | 173 +- ... resource_compute_node_group_test.go.tmpl} | 0 ..._organization_security_policy_test.go.tmpl | 34 - ..._compute_per_instance_config_test.go.tmpl} | 14 +- .../resource_compute_preview_features_test.go | 55 - ...urce_compute_project_metadata_item.go.tmpl | 4 +- ...e_compute_public_advertised_prefix_test.go | 79 - ...ice_ha_policy_manual_leader_update_test.go | 262 -- ...ompute_region_backend_service_test.go.tmpl | 373 --- .../resource_compute_region_disk_test.go.tmpl | 113 - ...e_compute_region_health_check_test.go.tmpl | 142 +- ...e_compute_region_instance_template.go.tmpl | 29 - ...on_instance_template_internal_test.go.tmpl | 9 +- ...pute_region_instance_template_test.go.tmpl | 84 +- ...network_firewall_policy_rule_test.go.tmpl} | 18 +- ...k_firewall_policy_with_rules_test.go.tmpl} | 15 +- ...e_region_per_instance_config_test.go.tmpl} | 3 +- ...ompute_region_security_policy_test.go.tmpl | 244 -- ...ute_region_target_http_proxy_test.go.tmpl} | 2 +- ...pute_region_target_tcp_proxy_test.go.tmpl} | 4 +- ...ource_compute_region_url_map_test.go.tmpl} | 4 +- .../resource_compute_reservation_test.go | 120 - .../compute/resource_compute_route_test.go | 50 - ...e_compute_router_nat_address_test.go.tmpl} | 2 +- ... resource_compute_router_nat_test.go.tmpl} | 524 +--- .../resource_compute_router_peer.go.tmpl | 41 +- ..._compute_router_route_policy_test.go.tmpl} | 2 +- .../resource_compute_router_test.go.tmpl | 83 +- .../resource_compute_security_policy.go.tmpl | 39 +- ...compute_security_policy_rule_test.go.tmpl} | 101 +- ...ource_compute_security_policy_test.go.tmpl | 88 - ...e_compute_service_attachment_test.go.tmpl} | 29 +- ..._compute_shared_reservation_update_test.go | 38 +- ...resource_compute_snapshot_settings_test.go | 116 - .../resource_compute_subnetwork_test.go.tmpl | 59 - .../resource_compute_target_pool.go.tmpl | 8 +- .../resource_compute_wire_group_test.go.tmpl | 117 - ...tact_center_insights_analysis_rule_test.go | 138 - ...ource_contact_center_insights_view_test.go | 72 - ...source_google_container_engine_versions.go | 10 - ...e_google_container_engine_versions_test.go | 5 - .../services/container/node_config.go.tmpl | 572 +--- .../resource_container_cluster.go.tmpl | 682 +---- .../resource_container_cluster_meta.yaml.tmpl | 6 - ...source_container_cluster_migratev1.go.tmpl | 8 - .../resource_container_cluster_test.go.tmpl | 1356 ++-------- .../resource_container_node_pool.go.tmpl | 125 +- ...esource_container_node_pool_meta.yaml.tmpl | 3 - .../resource_container_node_pool_test.go.tmpl | 973 ++----- ...esource_dataflow_flex_template_job.go.tmpl | 66 +- ..._dataflow_flex_template_job_meta.yaml.tmpl | 1 - ...ce_dataflow_flex_template_job_test.go.tmpl | 296 +- ...w_job.go => resource_dataflow_job.go.tmpl} | 39 +- .../resource_data_fusion_instance_test.go | 64 +- ...data_source_dataplex_data_quality_rules.go | 334 --- ...source_dataplex_data_quality_rules_test.go | 190 -- .../resource_dataplex_datascan_test.go | 232 -- .../resource_dataplex_entry_meta.yaml | 36 - .../dataplex/resource_dataplex_entry_test.go | 838 ------ ...esource_dataplex_glossary_category_test.go | 83 - .../resource_dataplex_glossary_term_test.go | 83 - .../resource_dataplex_glossary_test.go.tmpl | 73 - .../dataplex/resource_dataplex_task_test.go | 4 +- .../dataproc/resource_dataproc_cluster.go | 90 +- ...resource_dataproc_cluster_internal_test.go | 5 - .../resource_dataproc_cluster_meta.yaml | 2 - ...=> resource_dataproc_cluster_test.go.tmpl} | 255 +- ...c_job.go => resource_dataproc_job.go.tmpl} | 12 +- .../resource_dataproc_job_test.go.tmpl | 14 +- ...resource_dataproc_session_template_test.go | 198 -- ...taproc_gdc_application_environment_test.go | 1 - ...e_dataproc_metastore_service_test.go.tmpl} | 2 +- ...oc_metastore_service_diff_supress.go.tmpl} | 0 ...e_dataproc_metastore_service_test.go.tmpl} | 56 + ...urce_datastream_connection_profile_test.go | 8 +- ...eveloper_connect_account_connector_test.go | 524 ---- ...per_connect_account_connector_test.go.tmpl | 545 ++++ ...developer_connect_connection_test.go.tmpl} | 223 +- ..._developer_connect_insights_config_test.go | 341 --- .../dialogflow/dialogflow_operation.go | 87 - ...=> resource_dialogflow_agent_test.go.tmpl} | 2 +- ...ce_dialogflow_conversation_profile_test.go | 411 --- ...ource_dialogflow_entity_type_test.go.tmpl} | 0 ...ource_dialogflow_fulfillment_test.go.tmpl} | 0 ...> resource_dialogflow_intent_test.go.tmpl} | 0 .../resource_dialogflow_cx_generator_test.go | 88 - .../resource_dialogflow_cx_tool_test.go | 310 --- ... resource_dialogflowcx_agent_test.go.tmpl} | 0 .../resource_dialogflowcx_flow_test.go | 31 - ...e_dialogflowcx_generative_settings_test.go | 125 - .../resource_dialogflowcx_page_test.go | 29 - ...ource_discovery_engine_cmek_config_test.go | 112 - ...overy_engine_recommendation_engine_test.go | 115 - ...st.go => data_source_dns_key_test.go.tmpl} | 1 + ...> data_source_dns_record_set_test.go.tmpl} | 0 .../resource_dns_managed_zone_test.go.tmpl | 8 +- .../services/dns/resource_dns_policy_test.go | 171 -- .../dns/resource_dns_policy_test.go.tmpl | 72 + .../services/dns/resource_dns_record_set.go | 6 +- ...ument_ai_warehouse_document_schema_test.go | 1 + .../resource_eventarc_message_bus_test.go | 35 +- .../resource_eventarc_pipeline_test.go | 16 +- .../resource_filestore_backup_test.go | 4 +- ...pl => resource_filestore_instance_test.go} | 259 +- ...google_firebase_android_app_config.go.tmpl | 2 +- ...e_google_firebase_apple_app_config.go.tmpl | 2 +- ...rce_google_firebase_web_app_config.go.tmpl | 2 +- ...base_app_check_service_config_test.go.tmpl | 3 + ...urce_firebase_data_connect_service_test.go | 1 + .../data_source_google_firestore_document.go | 51 - ...a_source_google_firestore_document_test.go | 98 - .../resource_firestore_database_test.go | 54 - ...ce_firestore_database_update_test.go.tmpl} | 38 +- ... iam_gemini_repository_group_test.go.tmpl} | 0 ...gemini_code_repository_index_test.go.tmpl} | 0 ...i_code_tools_setting_binding_test.go.tmpl} | 6 +- ...ce_gemini_code_tools_setting_test.go.tmpl} | 2 +- ..._with_google_setting_binding_test.go.tmpl} | 6 +- ..._sharing_with_google_setting_test.go.tmpl} | 4 +- ...p_enablement_setting_binding_test.go.tmpl} | 8 +- ...emini_gcp_enablement_setting_test.go.tmpl} | 4 +- ...mini_logging_setting_binding_test.go.tmpl} | 4 +- ...ease_channel_setting_binding_test.go.tmpl} | 4 +- ...mini_release_channel_setting_test.go.tmpl} | 0 ...urce_gemini_repository_group_test.go.tmpl} | 2 +- ...ce_gke_backup_backup_channel_test.go.tmpl} | 0 ...ource_gke_backup_backup_plan_test.go.tmpl} | 42 +- ...e_gke_backup_restore_channel_test.go.tmpl} | 0 ...urce_gke_backup_restore_plan_test.go.tmpl} | 9 +- .../data_source_google_gke_hub_membership.go | 39 - ...a_source_google_gke_hub_membership_test.go | 144 - ..._gke_hub_feature_membership_meta.yaml.tmpl | 1 + ...ce_gke_hub_feature_membership_test.go.tmpl | 33 +- ...data_source_google_gke_hub_feature_test.go | 6 +- ...st.go => iam_gke_hub_feature_test.go.tmpl} | 5 + .../resource_gke_hub_feature_test.go.tmpl | 145 +- .../resource_gke_hub_fleet_test.go.tmpl | 2 + ...ce_gke_hub_scope_rbac_role_binding_test.go | 140 +- ...eration.go => gkeonprem_operation.go.tmpl} | 8 +- ...gkeonprem_bare_metal_cluster_test.go.tmpl} | 179 +- ...eonprem_bare_metal_node_pool_test.go.tmpl} | 62 +- ...rce_gkeonprem_vmware_cluster_test.go.tmpl} | 182 +- ...e_gkeonprem_vmware_node_pool_test.go.tmpl} | 62 +- ... => resource_iam_deny_policy_test.go.tmpl} | 2 +- ...e_iam_folders_policy_binding_test.go.tmpl} | 2 +- ...organizations_policy_binding_test.go.tmpl} | 2 +- ...cipal_access_boundary_policy_test.go.tmpl} | 1 - ..._iam_projects_policy_binding_test.go.tmpl} | 7 +- ...kload_identity_pool_provider_test.go.tmpl} | 2 +- ...e_iam_workload_identity_pool_test.go.tmpl} | 2 +- ...am_workload_identity_pool_id_test.go.tmpl} | 0 ...tity_pool_managed_identity_id_test.go.tmpl | 38 - ...dentity_pool_managed_identity_test.go.tmpl | 180 -- ...ad_identity_pool_namespace_id_test.go.tmpl | 38 - ...kload_identity_pool_namespace_test.go.tmpl | 148 - ...ad_identity_pool_provider_id_test.go.tmpl} | 0 ...kload_identity_pool_provider_test.go.tmpl} | 4 +- ...ce_iam_workload_identity_pool_test.go.tmpl | 140 - .../iambeta/test-fixtures/trust_anchor_1.pem | 3 - .../iambeta/test-fixtures/trust_anchor_2.pem | 3 - .../iambeta/test-fixtures/trust_anchor_3.pem | 3 - .../iambeta/test-fixtures/trust_anchor_4.pem | 3 - ..._iam_oauth_client_credential_test.go.tmpl} | 3 +- ...=> resource_iam_oauth_client_test.go.tmpl} | 4 +- ...ce_iam_workforce_pool_provider_key_test.go | 136 - ... resource_iam_workforce_pool_test.go.tmpl} | 2 +- ...force_pool_workforce_pool_id_test.go.tmpl} | 0 ...l_workforce_pool_provider_id_test.go.tmpl} | 0 ...ool_workforce_pool_provider_key_id_test.go | 33 - .../services/iap/data_source_iap_client.go | 39 + .../iap/data_source_iap_client_test.go | 70 + ..._integration_connectors_connection_test.go | 1 - ...source_google_kms_auotokey_config.go.tmpl} | 4 + ...e_google_kms_auotokey_config_test.go.tmpl} | 3 + ...data_source_google_kms_key_handle.go.tmpl} | 8 +- ...source_google_kms_key_handle_test.go.tmpl} | 3 + ...ource_google_kms_key_handles_test.go.tmpl} | 4 + .../kms/resource_kms_crypto_key_test.go.tmpl | 45 +- .../resource_logging_organization_sink.go | 2 +- .../lustre/data_source_lustre_instance.go | 76 - .../data_source_lustre_instance_test.go | 67 - .../lustre/resource_lustre_instance_test.go | 38 +- .../resource_managed_kafka_acl_test.go | 130 - ...source_managed_kafka_cluster_test.go.tmpl} | 60 - ...resource_managed_kafka_topic_test.go.tmpl} | 0 .../data_source_memorystore_instance_test.go | 40 +- .../resource_memorystore_instance_test.go | 405 +-- .../resource_model_armor_template_test.go | 185 -- .../resource_model_armor_floorsetting_test.go | 131 - .../resource_monitoring_dashboard_meta.yaml | 1 - ...ource_monitoring_metric_descriptor_test.go | 42 +- .../netapp/resource_netapp_backup_test.go | 284 +- .../resource_netapp_storage_pool_test.go.tmpl | 328 +-- .../resource_netapp_volume_quotaRule_test.go | 2 +- ...resource_netapp_volume_replication_test.go | 2 +- .../resource_netapp_volume_snapshot_test.go | 2 +- ...go.tmpl => resource_netapp_volume_test.go} | 229 +- ...etwork_connectivity_internal_range_test.go | 58 - ...tivity_service_connection_policies_test.go | 10 +- ...esource_network_connectivity_spoke_test.go | 55 +- ...etwork_management_connectivity_test_run.go | 469 ---- ...k_management_connectivity_test_run_test.go | 113 - ...nagement_vpc_flow_logs_config_test.go.tmpl | 256 +- ...ackend_authentication_config_test.go.tmpl} | 15 +- ...rk_security_client_tls_policy_test.go.tmpl | 5 + ...urity_gateway_security_policy_rule_test.go | 2 + ...tercept_deployment_generated_test.go.tmpl} | 59 +- ...t_deployment_group_generated_test.go.tmpl} | 9 +- ..._group_association_generated_test.go.tmpl} | 27 +- ...ept_endpoint_group_generated_test.go.tmpl} | 27 +- ...urity_security_profile_group_test.go.tmpl} | 0 ...rk_security_security_profile_test.go.tmpl} | 2 +- ...network_services_edge_cache_origin_test.go | 6 - ...etwork_services_edge_cache_service_test.go | 201 -- .../resource_network_services_gateway_test.go | 1 - ...source_notebooks_instance_gpu_test.go.tmpl | 4 +- ...urce_notebooks_instance_state_test.go.tmpl | 8 +- ... resource_notebooks_instance_test.go.tmpl} | 18 +- ...> resource_notebooks_runtime_test.go.tmpl} | 3 +- ...racle_database_autonomous_database_test.go | 2 +- ...acle_database_autonomous_databases_test.go | 2 +- ...abase_cloud_exadata_infrastructure_test.go | 4 +- ...base_cloud_exadata_infrastructures_test.go | 2 +- ...e_oracle_database_cloud_vm_cluster_test.go | 2 +- ..._oracle_database_cloud_vm_clusters_test.go | 2 +- ...ta_source_oracle_database_db_nodes_test.go | 2 +- ..._source_oracle_database_db_servers_test.go | 2 +- ...org_policy_custom_constraint_test.go.tmpl} | 2 +- ...resource_os_config_os_policy_assignment.go | 8 +- ...olicy_orchestrator_for_folder_test.go.tmpl | 2 - ...rchestrator_for_organization_test.go.tmpl} | 7 +- ...onfig_v2_policy_orchestrator_test.go.tmpl} | 1 + ..._parameter_manager_parameter_test.go.tmpl} | 0 ...ger_parameter_version_render_test.go.tmpl} | 0 ...er_manager_parameter_version_test.go.tmpl} | 2 +- ...parameter_manager_parameters_test.go.tmpl} | 0 ..._parameter_manager_parameter_test.go.tmpl} | 6 +- ...er_manager_parameter_version_test.go.tmpl} | 0 .../parameter_data_json_format.json | 6 - .../parameter_data_yaml_format.yaml | 4 - ...r_manager_regional_parameter_test.go.tmpl} | 0 ...nal_parameter_version_render_test.go.tmpl} | 0 ...r_regional_parameter_version_test.go.tmpl} | 2 +- ..._manager_regional_parameters_test.go.tmpl} | 0 ...r_manager_regional_parameter_test.go.tmpl} | 6 +- ...r_regional_parameter_version_test.go.tmpl} | 0 .../regional_parameter_data_json_format.json | 6 - .../regional_parameter_data_yaml_format.yaml | 4 - .../services/privateca/privateca_utils.go | 79 +- ...rce_privateca_certificate_template_test.go | 341 --- ...d_access_manager_entitlement_test.go.tmpl} | 2 +- .../resource_pubsub_subscription_test.go | 83 - .../pubsub/resource_pubsub_topic_test.go | 48 - .../fw_resource_pubsub_lite_reservation.go | 383 --- ...w_resource_pubsub_lite_reservation_test.go | 56 - .../redis/data_source_redis_cluster.go | 48 - .../redis/data_source_redis_cluster_test.go | 46 - .../redis/data_source_redis_instance.go | 17 - .../redis/data_source_redis_instance_test.go | 1 - ...go => resource_redis_cluster_test.go.tmpl} | 97 +- .../redis/resource_redis_instance_test.go | 96 + .../data_source_google_folder.go | 9 - ... => data_source_google_iam_policy.go.tmpl} | 16 +- .../data_source_google_netblock_ip_ranges.go | 44 +- ...a_source_google_netblock_ip_ranges_test.go | 54 +- .../data_source_google_projects.go | 3 +- .../data_source_google_service_account_key.go | 4 + .../resourcemanager/resource_google_folder.go | 17 - ...ource_google_folder_organization_policy.go | 6 +- .../resource_google_folder_test.go | 4 +- .../resource_google_project.go | 17 +- ...e_google_project_iam_binding_test.go.tmpl} | 4 +- ...resource_google_project_iam_custom_role.go | 6 +- ...ce_google_project_iam_member_test.go.tmpl} | 4 +- ...ce_google_project_iam_policy_test.go.tmpl} | 12 +- ...urce_google_project_organization_policy.go | 6 +- .../resource_google_project_service.go.tmpl | 9 +- ...ce_google_project_service_internal_test.go | 4 + ...source_google_project_service_test.go.tmpl | 6 +- .../resource_google_project_test.go | 17 +- .../resource_google_service_account.go | 71 +- ...e_resource_manager_capability_test.go.tmpl | 76 - .../resource_runtimeconfig_variable.go.tmpl | 2 +- .../resource_saas_runtime_saas_test.go.tmpl | 99 - ...ta_source_secret_manager_secret_version.go | 56 +- ...urce_secret_manager_secret_version_test.go | 46 - ...=> iam_secret_manager_secret_test.go.tmpl} | 2 +- ...source_secret_manager_secret_test.go.tmpl} | 121 +- ...ecret_manager_secret_version_test.go.tmpl} | 20 +- ...cret_manager_regional_secret_test.go.tmpl} | 0 ...cret_manager_regional_secret_test.go.tmpl} | 181 +- ...ager_regional_secret_version_test.go.tmpl} | 0 ..._source_manager_branch_rule_update_test.go | 24 +- ...e_source_manager_repository_update_test.go | 92 - ...le_service_networking_peered_dns_domain.go | 2 +- .../resource_service_networking_connection.go | 8 +- ...rce_site_verification_web_resource_test.go | 2 +- ...=> resource_spanner_database_test.go.tmpl} | 104 +- .../spanner/resource_spanner_instance_test.go | 63 +- .../resource_spanner_schedule_backup_test.go | 173 -- .../sql/data_source_sql_database_instances.go | 2 +- .../services/sql/fw_resource_sql_user.go | 507 ---- .../services/sql/fw_resource_sql_user_test.go | 90 - .../resource_sql_database_instance.go.tmpl | 226 +- .../resource_sql_database_instance_meta.yaml | 6 - ...esource_sql_database_instance_test.go.tmpl | 993 +------ .../services/sql/resource_sql_user.go | 5 +- .../services/sql/resource_sql_user_test.go | 3 +- ...ta_source_storage_bucket_object_content.go | 27 - ...urce_storage_bucket_object_content_test.go | 2 - .../fw_resource_storage_notification.go | 325 --- ...fw_storage_notification_state_upgraders.go | 100 - .../storage/iam_storage_bucket_test.go | 16 +- .../iam_storage_managed_folder_test.go | 2 +- .../storage/resource_storage_bucket.go.tmpl | 346 +-- .../resource_storage_bucket_600_migration.go | 556 +--- .../storage/resource_storage_bucket_object.go | 66 +- .../resource_storage_bucket_object_test.go | 295 -- ...o => resource_storage_bucket_test.go.tmpl} | 319 +-- .../storage/resource_storage_notification.go | 196 ++ .../resource_storage_notification_test.go | 16 +- .../resource_storage_object_acl_test.go | 2 +- ...rage_control_folder_intelligence_config.go | 40 + ...ontrol_organization_intelligence_config.go | 40 + ...age_control_project_intelligence_config.go | 40 + ...ontrol_project_intelligence_config_test.go | 2 +- ...ontrol_project_intelligence_config_test.go | 2 +- ...ce_storage_insights_dataset_config_test.go | 331 --- ... => resource_storage_transfer_job.go.tmpl} | 190 +- ...ource_storage_transfer_job_meta.yaml.tmpl} | 6 +- .../resource_storage_transfer_job_test.go | 409 --- ...resource_tags_location_tag_binding.go.tmpl | 1 - .../services/tags/resource_tags_test.go | 70 - .../data_source_tpu_tensorflow_versions.go | 93 + ...ata_source_tpu_tensorflow_versions_test.go | 68 + .../services/tpu/resource_tpu_node_test.go | 58 + .../resource_vertex_ai_deploy_test.go | 229 -- ...source_vertex_ai_rag_engine_config_test.go | 51 - .../resource_vpc_access_connector_test.go | 207 -- ...workbench_instance_shielded_config_test.go | 228 -- ...ench_instance_shielded_config_test.go.tmpl | 228 ++ ... resource_workbench_instance_test.go.tmpl} | 253 +- ... resource_workflows_workflow_test.go.tmpl} | 4 +- ... => terraform-registry-manifest.json.tmpl} | 0 ...ppress.go => common_diff_suppress.go.tmpl} | 15 +- .../terraform/tpgresource/field_helpers.go | 22 - .../tpgresource/self_link_helpers.go | 74 +- .../tpgresource/self_link_helpers_test.go | 7 +- .../terraform/transport/config.go.tmpl | 138 +- .../transport/error_retry_predicates.go | 21 +- .../provider_handwritten_endpoint.go.tmpl | 40 +- .../terraform/transport/transport.go | 4 - .../terraform/verify/validation.go | 48 + .../terraform/verify/validation_test.go | 40 + ...ifact_registry_docker_images.html.markdown | 56 - ...rtifact_registry_npm_package.html.markdown | 65 - .../d/artifact_registry_package.html.markdown | 41 - .../d/artifact_registry_repositories.markdown | 48 - .../d/artifact_registry_tag.html.markdown | 41 - .../d/artifact_registry_tags.html.markdown | 47 - .../d/artifact_registry_version.html.markdown | 51 - .../beyondcorp_security_gateway.html.markdown | 32 - .../docs/d/bigquery_datasets.html.markdown | 39 - .../docs/d/bigquery_table.html.markdown | 36 - .../d/bigtable_table_iam_policy.html.markdown | 4 +- .../docs/d/billing_account.html.markdown | 1 - ...te_manager_dns_authorization.html.markdown | 42 - .../d/cloud_run_v2_worker_pool.html.markdown | 37 - .../compute_network_attachment.html.markdown | 38 - .../docs/d/compute_subnetworks.html.markdown | 1 - .../d/container_engine_versions.html.markdown | 1 - .../dataplex_data_quality_rules.html.markdown | 38 - .../docs/d/firestore_document.html.markdown | 43 - .../website/docs/d/folder.html.markdown | 2 - .../docs/d/gke_hub_feature.html.markdown | 30 - .../docs/d/gke_hub_membership.html.markdown | 35 - .../docs/d/kms_autokey_config.html.markdown | 4 + .../docs/d/kms_key_handle.html.markdown | 3 + .../docs/d/kms_key_handles.html.markdown | 3 + .../docs/d/lustre_instance.html.markdown | 32 - .../docs/d/netblock_ip_ranges.html.markdown | 10 +- ...gement_connectivity_test_run.html.markdown | 205 -- .../docs/d/redis_cluster.html.markdown | 38 - ...ecret_manager_secret_version.html.markdown | 5 +- ...torage_bucket_object_content.html.markdown | 5 - ...rage_insights_dataset_config.html.markdown | 36 - .../d/tpu_tensorflow_versions.html.markdown | 4 - .../external_credentials_stacks.html.markdown | 1 + .../sql_instance_switchover.html.markdown | 24 +- .../using_gke_with_terraform.html.markdown | 2 +- .../guides/version_7_upgrade.html.markdown | 315 --- ...stores_aliases_key_cert_file.html.markdown | 2 +- .../docs/r/bigquery_table.html.markdown | 19 +- .../docs/r/bigtable_instance.html.markdown | 2 - .../docs/r/bigtable_table.html.markdown | 2 +- .../docs/r/bigtable_table_iam.html.markdown | 26 +- .../r/cloudbuild_worker_pool.html.markdown | 5 +- .../r/cloudfunctions_function.html.markdown | 8 - .../docs/r/composer_environment.html.markdown | 2 - .../docs/r/compute_instance.html.markdown | 10 +- ...mpute_instance_group_manager.html.markdown | 64 - .../r/compute_instance_template.html.markdown | 15 +- .../r/compute_network_peering.html.markdown | 3 - ...ute_region_instance_template.html.markdown | 13 +- .../r/compute_security_policy.html.markdown | 6 +- .../docs/r/container_cluster.html.markdown | 172 +- .../docs/r/container_node_pool.html.markdown | 10 +- .../dataflow_flex_template_job.html.markdown | 2 - .../docs/r/dataproc_cluster.html.markdown | 18 +- .../docs/r/dns_record_set.html.markdown | 1 - .../gke_hub_feature_membership.html.markdown | 14 +- .../docs/r/google_folder.html.markdown | 2 - .../r/google_project_service.html.markdown | 7 +- .../r/logging_organization_sink.html.markdown | 2 +- .../r/sql_database_instance.html.markdown | 87 +- .../website/docs/r/sql_user.html.markdown | 2 +- .../docs/r/storage_bucket.html.markdown | 31 +- .../r/storage_bucket_object.html.markdown | 10 - .../docs/r/storage_transfer_job.html.markdown | 18 +- .../tgc/ancestrymanager/ancestrymanager.go | 19 +- .../ancestrymanager/ancestrymanager_test.go | 51 +- mmv1/third_party/tgc/caiasset/asset.go | 132 - mmv1/third_party/tgc/dcl.go | 3 + .../tgc/resource_converters.go.tmpl | 1 - .../tgc/services/storage/storage_bucket.go | 5 +- ..._access_context_manager_access_policy.json | 6 +- .../tests/data/example_alloydb_instance.tf | 2 - .../example_org_policy_custom_constraint.json | 20 - .../example_org_policy_custom_constraint.tf | 38 - .../tgc/tests/source/environment_test.go | 2 +- .../tgc/tfdata/fake_resource_data_test.go | 2 - mmv1/third_party/tgc_next/Makefile | 38 - mmv1/third_party/tgc_next/go.mod | 125 - mmv1/third_party/tgc_next/go.sum | 431 --- .../tgc_next/pkg/cai2hcl/convert.go | 2 + .../cai2hcl/converters/convert_resource.go | 33 +- .../services/compute/compute_instance.go} | 87 +- .../compute/compute_instance_helpers.go | 330 +++ .../services/resourcemanager/project.go} | 21 +- .../pkg/cai2hcl/converters/utils/utils.go | 84 +- .../cai2hcl/converters/utils/utils_test.go | 126 +- .../tgc_next/pkg/cai2hcl/models/converter.go | 2 +- .../tgc_next/pkg/cai2hcl/models/hcl_block.go | 8 +- .../pkg/services/compute/compute_instance.go | 1612 ----------- .../pkg/services/resourcemanager/project.go | 117 - .../ancestrymanager/ancestrymanager.go | 33 +- .../ancestrymanager/ancestryutil.go | 4 +- .../pkg/tfplan2cai/converters/cai/cai.go | 4 +- .../converters/cai/resource_converter.go | 6 +- .../tfplan2cai/converters/convert_resource.go | 2 +- .../services/compute/compute_instance.go} | 271 +- .../services/resourcemanager/project.go} | 8 +- .../fake_resource_data_with_meta_test.go | 151 +- .../resolvers/default_pre_resolver.go | 2 +- .../pkg/tfplan2cai/transport/getconfig.go | 2 +- .../tgc_next/pkg/tgcresource/utils.go | 78 - .../tgc_next/pkg/transport/config_tgc.go | 5 - .../tgc_next/test/assert_test_files.go | 484 ---- mmv1/third_party/tgc_next/test/hcl.go | 164 -- mmv1/third_party/tgc_next/test/hcl_test.go | 180 -- mmv1/third_party/tgc_next/test/setup.go | 249 -- mmv1/third_party/tgc_next/test/utils.go | 104 - mmv1/validate_third_party_test.go | 86 - .../breaking_changes/breaking_changes.go | 3 +- .../breaking_changes/field_diff.go | 61 +- .../breaking_changes/field_diff_test.go | 132 +- .../breaking_changes/mock_schema_diff_test.go | 36 - .../breaking_changes/resource_diff.go | 124 +- .../breaking_changes/resource_diff_test.go | 24 - tools/diff-processor/diff/diff.go | 51 +- tools/diff-processor/diff/diff_test.go | 364 --- tools/diff-processor/diff/sets.go | 97 +- tools/diff-processor/diff/sets_test.go | 5 - tools/issue-labeler/go.mod | 2 +- tools/issue-labeler/go.sum | 4 +- tools/issue-labeler/labeler/backfill.go | 2 +- tools/issue-labeler/labeler/backfill_test.go | 50 +- .../issue-labeler/labeler/enrolled_teams.yml | 18 +- tools/issue-labeler/labeler/github.go | 2 +- tools/issue-labeler/labeler/labels.go | 4 +- tools/issue-labeler/labeler/labels_test.go | 50 +- tools/template-check/cmd/root.go | 43 - tools/template-check/cmd/versionguard.go | 61 - tools/template-check/go.mod | 15 +- tools/template-check/go.sum | 15 - tools/template-check/main.go | 63 +- tpgtools/go.mod | 2 +- tpgtools/go.sum | 4 +- tpgtools/ignored_handwritten/custom_import.go | 8 +- tpgtools/main.go | 1 + .../apikeys/beta/tpgtools_product.yaml | 3 - .../samples/key/service_account_key.tf.tmpl | 19 - .../samples/key/service_account_key.yaml | 11 - .../overrides/apikeys/tpgtools_product.yaml | 3 - .../beta/tpgtools_product.yaml | 3 - .../assuredworkloads/tpgtools_product.yaml | 3 - .../cloudbuild/beta/tpgtools_product.yaml | 3 +- .../cloudbuild/tpgtools_product.yaml | 3 +- .../beta/tpgtools_product.yaml | 3 - .../tpgtools_product.yaml | 3 - .../firebaserules/beta/tpgtools_product.yaml | 3 - .../firebaserules/tpgtools_product.yaml | 3 - .../gkehub/beta/tpgtools_product.yaml | 3 +- .../overrides/gkehub/tpgtools_product.yaml | 3 +- .../beta/tpgtools_product.yaml | 3 - .../recaptchaenterprise/tpgtools_product.yaml | 3 - tpgtools/property.go | 2 +- tpgtools/sample.go | 2 +- .../templates/provider_dcl_endpoints.go.tmpl | 105 + tpgtools/templates/resource.go.tmpl | 2 +- tpgtools/templates/serialization.go.tmpl | 4 +- 1668 files changed, 13692 insertions(+), 85877 deletions(-) delete mode 100644 .ci/changelog2.tmpl create mode 100644 .ci/magician/cmd/templates/vcr/non_exercised_tests.tmpl delete mode 100644 .ci/magician/cmd/templates/vcr/post_replay.tmpl delete mode 100644 .ci/magician/cmd/templates/vcr/post_replay_eap.tmpl create mode 100644 .ci/magician/cmd/templates/vcr/test_analytics.tmpl create mode 100644 .ci/magician/cmd/templates/vcr/with_replay_failed_tests.tmpl create mode 100644 .ci/magician/cmd/templates/vcr/without_replay_failed_tests.tmpl delete mode 100644 .ci/magician/cmd/test_eap_vcr_test.go delete mode 100644 .ci/magician/github/README.md delete mode 100644 .ci/magician/github/integration_test.go delete mode 100644 .ci/magician/github/interface_conversion.go delete mode 100644 .ci/magician/vcr/tester_test.go delete mode 100644 .ci/release-note2.tmpl delete mode 100644 .github/workflows/basic-pr-checks.yml create mode 100644 .github/workflows/disallow-submodules.yml create mode 100644 .github/workflows/mmv1-check-templates.yml create mode 100644 .github/workflows/mmv1-lint-product-yaml.yml delete mode 100644 .github/workflows/override-labels.yml delete mode 100644 .github/workflows/unit-test-mmv1.yml delete mode 100644 docs/content/best-practices/validation.md delete mode 100644 mmv1/api/resource/datasource.go delete mode 100644 mmv1/products/apigee/ApiProduct.yaml delete mode 100644 mmv1/products/apigee/SecurityAction.yaml delete mode 100644 mmv1/products/apigee/SecurityMonitoringCondition.yaml delete mode 100644 mmv1/products/apihub/Curation.yaml delete mode 100644 mmv1/products/apihub/Plugin.yaml delete mode 100644 mmv1/products/apihub/PluginInstance.yaml delete mode 100644 mmv1/products/backupdr/ServiceConfig.yaml delete mode 100644 mmv1/products/beyondcorp/SecurityGatewayApplication.yaml delete mode 100644 mmv1/products/bigquery/RowAccessPolicy.yaml delete mode 100644 mmv1/products/bigqueryanalyticshub/DataExchangeSubscription.yaml delete mode 100644 mmv1/products/bigtable/SchemaBundle.yaml delete mode 100644 mmv1/products/cloudrunv2/WorkerPool.yaml delete mode 100644 mmv1/products/compute/FutureReservation.yaml delete mode 100644 mmv1/products/compute/InterconnectAttachmentGroup.yaml delete mode 100644 mmv1/products/compute/InterconnectGroup.yaml delete mode 100644 mmv1/products/compute/PreviewFeature.yaml delete mode 100644 mmv1/products/compute/SnapshotSettings.yaml delete mode 100644 mmv1/products/compute/WireGroup.yaml delete mode 100644 mmv1/products/contactcenterinsights/AnalysisRule.yaml delete mode 100644 mmv1/products/contactcenterinsights/View.yaml delete mode 100644 mmv1/products/contactcenterinsights/product.yaml delete mode 100644 mmv1/products/dataplex/Entry.yaml delete mode 100644 mmv1/products/dataplex/Glossary.yaml delete mode 100644 mmv1/products/dataplex/GlossaryCategory.yaml delete mode 100644 mmv1/products/dataplex/GlossaryTerm.yaml delete mode 100644 mmv1/products/dataproc/SessionTemplate.yaml delete mode 100644 mmv1/products/developerconnect/InsightsConfig.yaml delete mode 100644 mmv1/products/dialogflow/ConversationProfile.yaml delete mode 100644 mmv1/products/dialogflow/EncryptionSpec.yaml delete mode 100644 mmv1/products/dialogflowcx/GenerativeSettings.yaml delete mode 100644 mmv1/products/dialogflowcx/Generator.yaml delete mode 100644 mmv1/products/dialogflowcx/Playbook.yaml delete mode 100644 mmv1/products/dialogflowcx/Tool.yaml delete mode 100644 mmv1/products/discoveryengine/CmekConfig.yaml delete mode 100644 mmv1/products/discoveryengine/RecommendationEngine.yaml delete mode 100644 mmv1/products/firebaseapphosting/DefaultDomain.yaml delete mode 100644 mmv1/products/firebaseapphosting/Domain.yaml delete mode 100644 mmv1/products/firebaseapphosting/Traffic.yaml delete mode 100644 mmv1/products/iambeta/WorkloadIdentityPoolManagedIdentity.yaml delete mode 100644 mmv1/products/iambeta/WorkloadIdentityPoolNamespace.yaml delete mode 100644 mmv1/products/iamworkforcepool/WorkforcePoolProviderKey.yaml delete mode 100644 mmv1/products/iap/ForwardingRuleRegionalService.yaml delete mode 100644 mmv1/products/iap/ForwardingRuleService.yaml delete mode 100644 mmv1/products/managedkafka/Acl.yaml delete mode 100644 mmv1/products/modelarmor/Template.yaml delete mode 100644 mmv1/products/modelarmor/product.yaml delete mode 100644 mmv1/products/modelarmorglobal/Floorsetting.yaml delete mode 100644 mmv1/products/modelarmorglobal/product.yaml create mode 100644 mmv1/products/notebooks/Location.yaml delete mode 100644 mmv1/products/oracledatabase/OdbNetwork.yaml delete mode 100644 mmv1/products/oracledatabase/OdbSubnet.yaml delete mode 100644 mmv1/products/resourcemanager3/Capability.yaml delete mode 100644 mmv1/products/saasservicemgmt/Saas.yaml delete mode 100644 mmv1/products/saasservicemgmt/product.yaml delete mode 100644 mmv1/products/storageinsights/DatasetConfig.yaml create mode 100644 mmv1/products/tpu/Node.yaml rename mmv1/products/{resourcemanager3 => tpu}/product.yaml (80%) delete mode 100644 mmv1/products/vertexai/EndpointWithModelGardenDeployment.yaml delete mode 100644 mmv1/products/vertexai/RagEngineConfig.yaml delete mode 100644 mmv1/templates/terraform/constants/beyondcorp_security_gateway.go.tmpl delete mode 100644 mmv1/templates/terraform/constants/dataplex_entry.go.tmpl delete mode 100644 mmv1/templates/terraform/constants/datastream_connection_profile.go.tmpl delete mode 100644 mmv1/templates/terraform/constants/iam_workforce_pool_provider_key.go.tmpl delete mode 100644 mmv1/templates/terraform/constants/iam_workload_identity_pool_managed_identity.go.tmpl delete mode 100644 mmv1/templates/terraform/constants/iam_workload_identity_pool_namespace.go.tmpl delete mode 100644 mmv1/templates/terraform/constants/region_security_policy.go.tmpl delete mode 100644 mmv1/templates/terraform/constants/regional_secret_version.go.tmpl delete mode 100644 mmv1/templates/terraform/constants/secret_version.go.tmpl delete mode 100644 mmv1/templates/terraform/constants/vpc_access_connector.go.tmpl delete mode 100644 mmv1/templates/terraform/custom_check_destroy/firebasehosting_default_site.go.tmpl delete mode 100644 mmv1/templates/terraform/custom_check_destroy/iam_workload_identity_pool_managed_identity.go.tmpl delete mode 100644 mmv1/templates/terraform/custom_check_destroy/iam_workload_identity_pool_namespace.go.tmpl delete mode 100644 mmv1/templates/terraform/custom_delete/resource_vertexai_endpoint_with_model_garden_deployment.go.tmpl delete mode 100644 mmv1/templates/terraform/custom_delete/vertex_ai_rag_engine_config.go.tmpl delete mode 100644 mmv1/templates/terraform/custom_expand/bigquery_row_access_policy_ref.go.tmpl rename mmv1/templates/terraform/custom_expand/{deprecated_bool_to_object.go.tmpl => bool_to_object.go.tmpl} (92%) create mode 100644 mmv1/templates/terraform/custom_expand/compute_instance_settings_fingerprint.tmpl delete mode 100644 mmv1/templates/terraform/custom_expand/privateca_certificate_template_509_config.go.tmpl create mode 100644 mmv1/templates/terraform/custom_flatten/compute_router_range.go.tmpl delete mode 100644 mmv1/templates/terraform/custom_flatten/conversation_profile_context_filter_settings.go.tmpl delete mode 100644 mmv1/templates/terraform/custom_flatten/conversation_profile_suggestion_trigger_settings.go.tmpl delete mode 100644 mmv1/templates/terraform/custom_flatten/dataplex_entry_aspects.go.tmpl delete mode 100644 mmv1/templates/terraform/custom_flatten/modelarmor_floorsetting_multilanguage_detection.go.tmpl delete mode 100644 mmv1/templates/terraform/custom_flatten/modelarmor_template_multilanguage_detection.go.tmpl delete mode 100644 mmv1/templates/terraform/custom_flatten/privateca_certificate_template_509_config.go.tmpl delete mode 100644 mmv1/templates/terraform/custom_import/apigee_api_product.go.tmpl delete mode 100644 mmv1/templates/terraform/custom_import/apigee_security_monitoring_condition.go.tmpl delete mode 100644 mmv1/templates/terraform/custom_import/dataplex_entry.go.tmpl delete mode 100644 mmv1/templates/terraform/custom_import/dialogflowcx_generative_settings.go.tmpl delete mode 100644 mmv1/templates/terraform/custom_import/dialogflowcx_generator.go.tmpl delete mode 100644 mmv1/templates/terraform/custom_import/dialogflowcx_playbook.go.tmpl delete mode 100644 mmv1/templates/terraform/custom_import/dialogflowcx_tool.go.tmpl delete mode 100644 mmv1/templates/terraform/custom_import/modelarmorglobal_floorsetting.go.tmpl delete mode 100644 mmv1/templates/terraform/custom_import/resource_manager_capability.go.tmpl delete mode 100644 mmv1/templates/terraform/custom_update/bigqueryanalyticshub_data_exchange_subscription.go.tmpl delete mode 100644 mmv1/templates/terraform/datasource.go.tmpl delete mode 100644 mmv1/templates/terraform/decoders/bigqueryanalyticshub_data_exchange_subscription.go.tmpl delete mode 100644 mmv1/templates/terraform/decoders/dataplex_entry.go.tmpl delete mode 100644 mmv1/templates/terraform/decoders/redis_cluster.go.tmpl delete mode 100644 mmv1/templates/terraform/encoders/bigqueryanalyticshub_data_exchange_subscription.go.tmpl delete mode 100644 mmv1/templates/terraform/encoders/compute_service_attachment.go.tmpl delete mode 100644 mmv1/templates/terraform/encoders/compute_snapshot_settings.go.tmpl delete mode 100644 mmv1/templates/terraform/encoders/dataplex_entry.go.tmpl delete mode 100644 mmv1/templates/terraform/encoders/discovery_engine_recommendation_engine_hardcode_solution_type.go.tmpl delete mode 100644 mmv1/templates/terraform/encoders/network_services_edge_cache_service.go.tmpl delete mode 100644 mmv1/templates/terraform/examples/Storage_pool_create_doc.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/apigee_api_product_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/apigee_api_product_basic_test.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/apigee_api_product_with_attributes.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/apigee_api_product_with_attributes_test.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/apigee_api_product_with_legacy_operation.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/apigee_api_product_with_legacy_operation_test.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/apigee_environment_client_ip_resolution_config_test.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/apigee_security_action_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/apigee_security_monitoring_condition_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/apigee_security_monitoring_condition_basic_test.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/apihub_curation_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/apihub_plugin_full.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/apihub_plugin_instance_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/backend_bucket_global_ilb.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/backend_service_dynamic_forwarding.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/backup_dr_backup_plan_for_csql_resource.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/backup_dr_backup_plan_for_disk_resource.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/backup_dr_service_config.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/base_configs/datasource_test_file.go.tmpl delete mode 100644 mmv1/templates/terraform/examples/bigquery_analyticshub_data_exchange_log_linked_dataset_query_user.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/bigquery_analyticshub_dataexchange_subscription_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/bigquery_analyticshub_listing_dcr_routine.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/bigquery_analyticshub_listing_log_linked_dataset_query_user.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/bigquery_analyticshub_listing_marketplace.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/bigquery_analyticshub_listing_pubsub.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/bigquery_analyticshub_public_data_exchange.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/bigquery_analyticshub_public_listing.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/bigquery_row_access_policy_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/bigtable_schema_bundle.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/cloudbuild_trigger_developer_connect_pull.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/cloudbuild_trigger_developer_connect_push.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/cloudbuild_trigger_developer_connect_push_branch.tf.tmpl rename mmv1/templates/terraform/examples/{cloudfunctions2_automatic_base_image_update.tf.tmpl => cloudfunctions2_abiu.tf.tmpl} (100%) rename mmv1/templates/terraform/examples/{cloudfunctions2_on_deploy_base_image_update.tf.tmpl => cloudfunctions2_abiu_on_deploy.tf.tmpl} (100%) delete mode 100644 mmv1/templates/terraform/examples/cloudrunv2_job_gpu.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/cloudrunv2_job_multicontainer.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/cloudrunv2_worker_pool_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/cloudrunv2_worker_pool_custom_audiences.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/cloudrunv2_worker_pool_directvpc.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/cloudrunv2_worker_pool_gpu.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/cloudrunv2_worker_pool_limits.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/cloudrunv2_worker_pool_mount_gcs.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/cloudrunv2_worker_pool_mount_nfs.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/cloudrunv2_worker_pool_multicontainer.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/cloudrunv2_worker_pool_secret.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/cloudrunv2_worker_pool_sql.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/compute_interconnect_attachment_custom_ranges.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/compute_region_network_firewall_policy_with_rules_roce.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/compute_wire_group_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/contact_center_insights_analysis_rule_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/contact_center_insights_analysis_rule_full.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/contact_center_insights_analysis_rule_profile.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/contact_center_insights_view_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/contact_center_insights_view_full.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/dataplex_datascan_basic_discovery.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/dataplex_datascan_full_discovery.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/dataplex_datascan_full_quality_test.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/dataplex_entry_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/dataplex_entry_full.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/dataplex_glossary_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/dataplex_glossary_category_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/dataplex_glossary_category_full.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/dataplex_glossary_full.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/dataplex_glossary_term_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/dataplex_glossary_term_full.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/dataproc_session_templates_jupyter.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/dataproc_session_templates_jupyter_full.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/dataproc_session_templates_spark_connect.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/datastream_private_connection_psc_interface.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/developer_connect_insights_config_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/dialogflow_conversation_profile_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/dialogflow_encryption_spec_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/dialogflowcx_flow_custom_endpoint.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/dialogflowcx_generative_settings_full.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/dialogflowcx_generator_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/dialogflowcx_playbook_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/dialogflowcx_playbook_fulfillment.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/dialogflowcx_tool_data_store.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/dialogflowcx_tool_function.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/dialogflowcx_tool_open_api.tf.tmpl rename mmv1/templates/terraform/examples/{dialogflowcx_webhook_flexible.tf.tmpl => dialogflowcx_webhook_full.tf.tmpl} (60%) delete mode 100644 mmv1/templates/terraform/examples/dialogflowcx_webhook_service_directory_flexible.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/dialogflowcx_webhook_service_directory_standard.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/dialogflowcx_webhook_standard.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/discoveryengine_cmekconfig_default.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/discoveryengine_datastore_document_processing_config_layout_full.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/discoveryengine_datastore_kms_key_name.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/discoveryengine_recommendationengine_generic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/discoveryengine_recommendationengine_media.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/firebase_app_hosting_default_domain_disabled.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/firebase_app_hosting_default_domain_full.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/firebase_app_hosting_default_domain_minimal.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/firebase_app_hosting_domain_full.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/firebase_app_hosting_domain_minimal.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/firebase_app_hosting_traffic_rollout_policy.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/firebase_app_hosting_traffic_rollout_policy_disabled.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/firebase_app_hosting_traffic_target.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/firebasehosting_site_default.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/firestore_database_with_tags.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/firestore_index_unique.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/firewall_policy_rule_secure_tags.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/forwarding_rule_region_service_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/forwarding_rule_service_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/future_reservation_aggregate_reservation.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/future_reservation_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/gkehub_feature_rbacrolebinding_actuation.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/gkehub_scope_rbac_custom_role_binding_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/health_check_grpc_with_tls.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/health_check_grpc_with_tls_full.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/iam_workforce_pool_provider_saml_key_basic.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/iam_workload_identity_pool_full.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/iam_workload_identity_pool_full_federation_only_mode.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/iam_workload_identity_pool_full_trust_domain_mode.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/iam_workload_identity_pool_managed_identity_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/iam_workload_identity_pool_managed_identity_full.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/iam_workload_identity_pool_namespace_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/iam_workload_identity_pool_namespace_full.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/interconnect_attachment_group_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/interconnect_group_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/managedkafka_acl_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/managedkafka_cluster_mtls.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/modelarmor_floorsetting_ai_platform_metadata.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/modelarmor_floorsetting_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/modelarmor_floorsetting_filter_config.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/modelarmor_template_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/modelarmor_template_filter_config.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/modelarmor_template_label.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/modelarmor_template_template_metadata.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/network_connectivity_internal_ranges_allocation_algoritms.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/network_connectivity_internal_ranges_allocation_algoritms_random_first_n.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/network_management_vpc_flow_logs_config_interconnect_full.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/network_management_vpc_flow_logs_config_network_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/network_management_vpc_flow_logs_config_subnet_basic.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/network_management_vpc_flow_logs_config_vpn_full.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/network_peering_routes_config_gke.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/oracledatabase_autonomous_database_odbnetwork.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/oracledatabase_autonomous_database_publicip.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/oracledatabase_cloud_vmcluster_odbnetwork.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/oracledatabase_odbnetwork.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/oracledatabase_odbsubnet.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/parameter_version_with_json_format_with_file.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/parameter_version_with_yaml_format_with_file.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/preview_feature_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/privateca_certificate_authority_basic_no_org.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/privateca_template_zero_max_issuer_path_length_null_ca.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/pubsub_subscription_multiple_smts.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/pubsub_subscription_single_smt.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/pubsub_topic_multiple_smts.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/pubsub_topic_single_smt.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/redis_cluster_tls_enabled.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/region_backend_service_dynamic_forwarding.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/region_backend_service_ha_policy.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/region_backend_service_ha_policy_manual_leader.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/region_disk_hyperdisk_balanced_ha_write_many.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/region_health_check_grpc_with_tls.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/region_health_check_grpc_with_tls_full.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/region_network_firewall_policy_roce.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/region_security_policy_with_advanced_options.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/region_url_map_default_mirror_percent.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/region_url_map_path_matcher_default_mirror_percent.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/region_url_map_path_matcher_default_route_action.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/region_url_map_path_rule_mirror_percent.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/region_url_map_route_rule_mirror_percent.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/regional_parameter_version_with_json_format_with_file.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/regional_parameter_version_with_yaml_format_with_file.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/reservation_basic_beta.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/reservation_sharing_policy.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/reservation_source_instance_template.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/resource_manager_capability.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/saas_runtime_saas_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/secure_source_manager_instance_private_trusted_cert.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/service_attachment_cross_region_ilb.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/shared_future_reservation.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/shared_reservation_beta.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/snapshot_settings_specific_locations.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/storage_insights_dataset_config_excludes.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/storage_insights_dataset_config_includes.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/target_http_proxy_fingerprint.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/target_https_proxy_fingerprint.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/tpu_node_basic.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/tpu_node_full.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/tpu_node_full_test.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/url_map_default_mirror_percent.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/url_map_http_filter_configs.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/url_map_http_filter_metadata.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/url_map_path_matcher_default_mirror_percent.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/url_map_path_rule_mirror_percent.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/url_map_route_rule_mirror_percent.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/url_map_test_expected_output_url.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/url_map_test_headers.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/url_map_test_redirect_response_code.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/vertex_ai_deploy_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/vertex_ai_deploy_huggingface_model.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/vertex_ai_deploy_multiple_models_in_parallel.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/vertex_ai_deploy_multiple_models_in_sequence.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/vertex_ai_deploy_with_configs.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/vertex_ai_rag_engine_config_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/vertex_ai_rag_engine_config_scaled.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/vertex_ai_rag_engine_config_unprovisioned.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/vpn_tunnel_cipher_suite.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/workbench_instance_euc.tf.tmpl delete mode 100644 mmv1/templates/terraform/post_create/cloud_tasks_queue_state.go.tmpl create mode 100644 mmv1/templates/terraform/post_create/colab_runtime_template.go.tmpl create mode 100644 mmv1/templates/terraform/post_create/gcip_tenant.go.tmpl create mode 100644 mmv1/templates/terraform/post_create/group.tmpl delete mode 100644 mmv1/templates/terraform/post_create/iam_workload_identity_pool_managed_identity.go.tmpl create mode 100644 mmv1/templates/terraform/post_create/iap_client.go.tmpl rename mmv1/templates/terraform/{update_encoder/future_reservation.go.tmpl => post_create/index.go.tmpl} (63%) create mode 100644 mmv1/templates/terraform/post_create/lien.tmpl delete mode 100644 mmv1/templates/terraform/post_create/modelarmor_floorsetting_sleep.go.tmpl delete mode 100644 mmv1/templates/terraform/post_create/resource_vertexai_endpoint_with_model_garden_deployment.go.tmpl delete mode 100644 mmv1/templates/terraform/post_create/storage_insights_dataset_config.go.tmpl delete mode 100644 mmv1/templates/terraform/post_delete/dialogflowcx_agent.go.tmpl delete mode 100644 mmv1/templates/terraform/post_read/bigqueryanalyticshub_data_exchange_subscription.go.tmpl delete mode 100644 mmv1/templates/terraform/post_read/iam_workload_identity_pool_managed_identity.go.tmpl delete mode 100644 mmv1/templates/terraform/post_update/cloud_tasks_queue_state.go.tmpl create mode 100644 mmv1/templates/terraform/pre_create/artifact_registry_remote_repository.go.tmpl delete mode 100644 mmv1/templates/terraform/pre_create/cloud_identity_group_membership.go.tmpl delete mode 100644 mmv1/templates/terraform/pre_create/compute_instance_settings_fingerprint.go.tmpl delete mode 100644 mmv1/templates/terraform/pre_create/datastream_connection_profile.go.tmpl delete mode 100644 mmv1/templates/terraform/pre_create/dialogflow_set_endpoint.go.tmpl delete mode 100644 mmv1/templates/terraform/pre_create/dialogflowcx_generator.go.tmpl delete mode 100644 mmv1/templates/terraform/pre_create/gkehub_existing_feature.go.tmpl delete mode 100644 mmv1/templates/terraform/pre_create/iam_workload_identity_pool_managed_identity.go.tmpl delete mode 100644 mmv1/templates/terraform/pre_delete/bigquery_analytics_hub_listing.go.tmpl delete mode 100644 mmv1/templates/terraform/pre_delete/bigquery_row_access_policy.go.tmpl delete mode 100644 mmv1/templates/terraform/pre_delete/cloudrunv2_worker_pool_deletion_policy.go.tmpl create mode 100644 mmv1/templates/terraform/pre_delete/code_repository_index_force_delete.go.tmpl delete mode 100644 mmv1/templates/terraform/pre_delete/dialogflowcx_agent.go.tmpl delete mode 100644 mmv1/templates/terraform/pre_delete/firebasehosting_site.go.tmpl delete mode 100644 mmv1/templates/terraform/pre_delete/gkehub_existing_feature.go.tmpl delete mode 100644 mmv1/templates/terraform/pre_delete/oracledatabase_odbnetwork.go.tmpl delete mode 100644 mmv1/templates/terraform/pre_delete/oracledatabase_odbsubnet.go.tmpl delete mode 100644 mmv1/templates/terraform/pre_delete/regional_secret.go.tmpl delete mode 100644 mmv1/templates/terraform/pre_delete/secret_manager_secret.go.tmpl delete mode 100644 mmv1/templates/terraform/pre_delete/securesourcemanager_deletion_policy.go.tmpl delete mode 100644 mmv1/templates/terraform/pre_read/dataplex_entry.go.tmpl delete mode 100644 mmv1/templates/terraform/pre_update/bigqueryanalyticshub_listing.go.tmpl delete mode 100644 mmv1/templates/terraform/pre_update/dataplex_entry.go.tmpl delete mode 100644 mmv1/templates/terraform/pre_update/fw_datafusion_instance_update.go.tmpl delete mode 100644 mmv1/templates/terraform/pre_update/storage_insights_dataset_config.go.tmpl delete mode 100644 mmv1/templates/terraform/pre_update/vpc_access_connector_instances.go.tmpl delete mode 100644 mmv1/templates/terraform/product.go.tmpl delete mode 100644 mmv1/templates/terraform/resource_fw.go.tmpl delete mode 100644 mmv1/templates/terraform/schema_property_fw.go.tmpl delete mode 100644 mmv1/templates/terraform/update_encoder/discoveryengine_cmekconfig_kmskey.go.tmpl delete mode 100644 mmv1/templates/terraform/update_mask_fw.go.tmpl create mode 100644 mmv1/templates/terraform/validation/bigquery_data_transfer_config.go.tmpl create mode 100644 mmv1/templates/terraform/validation/secret_version.go.tmpl delete mode 100644 mmv1/templates/tgc_next/cai2hcl/flatten_property_method_tgc.go.tmpl delete mode 100644 mmv1/templates/tgc_next/cai2hcl/full_to_relative_path.go.tmpl delete mode 100644 mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl delete mode 100644 mmv1/templates/tgc_next/custom_expand/pubsub_subscription_attributes.go.tmpl delete mode 100644 mmv1/templates/tgc_next/custom_expand/set_to_slice_or_nil.go.tmpl delete mode 100644 mmv1/templates/tgc_next/custom_flatten/compute_backend_service_signed_url_cache_max_age_sec.go.tmpl delete mode 100644 mmv1/templates/tgc_next/decoders/backup_dr_backup_plan.go.tmpl delete mode 100644 mmv1/templates/tgc_next/decoders/certificatemanager_certificate.go.tmpl delete mode 100644 mmv1/templates/tgc_next/decoders/cloud_asset_feed.go.tmpl delete mode 100644 mmv1/templates/tgc_next/decoders/compute_backend_service.go.tmpl delete mode 100644 mmv1/templates/tgc_next/decoders/compute_subnetwork.go.tmpl delete mode 100644 mmv1/templates/tgc_next/encoders/certificatemanager_certificate.go.tmpl delete mode 100644 mmv1/templates/tgc_next/encoders/compute_disk.go.tmpl delete mode 100644 mmv1/templates/tgc_next/provider/provider_mmv1_resources.go.tmpl delete mode 100644 mmv1/templates/tgc_next/services/resource.go.tmpl delete mode 100644 mmv1/templates/tgc_next/test/test_file.go.tmpl delete mode 100644 mmv1/templates/tgc_next/tfplan2cai/expand_property_method_tgc.go.tmpl delete mode 100644 mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl delete mode 100644 mmv1/third_party/cai2hcl/services/networksecurity/server_tls_policy.go delete mode 100644 mmv1/third_party/cai2hcl/services/networksecurity/server_tls_policy_test.go delete mode 100644 mmv1/third_party/cai2hcl/services/networksecurity/testdata/server_tls_policy.json delete mode 100644 mmv1/third_party/cai2hcl/services/networksecurity/testdata/server_tls_policy.tf delete mode 100644 mmv1/third_party/terraform/.teamcity/components/projects/reused/weekly_diff_tests.kt delete mode 100644 mmv1/third_party/terraform/.teamcity/tests/weekly_diff_project.kt delete mode 100644 mmv1/third_party/terraform/acctest/diff_utils.go delete mode 100644 mmv1/third_party/terraform/acctest/diff_utils_test.go delete mode 100644 mmv1/third_party/terraform/acctest/resource_inventory_reader.go delete mode 100644 mmv1/third_party/terraform/acctest/resource_inventory_test.go delete mode 100644 mmv1/third_party/terraform/fwresource/framework_import.go delete mode 100644 mmv1/third_party/terraform/fwresource/framework_import_test.go rename mmv1/third_party/terraform/provider/{provider_validators.go => provider_validators.go.tmpl} (99%) delete mode 100644 mmv1/third_party/terraform/scripts/teamcitytestscripts/main.go delete mode 100644 mmv1/third_party/terraform/scripts/teamcitytestscripts/teamcity.go delete mode 100644 mmv1/third_party/terraform/services/apigee/fw_resource_apigee_keystores_aliases_key_cert_file.go delete mode 100644 mmv1/third_party/terraform/services/apigee/resource_apigee_api_product_update_test.go create mode 100644 mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_key_cert_file.go delete mode 100644 mmv1/third_party/terraform/services/apigee/resource_apigee_security_action_test.go delete mode 100644 mmv1/third_party/terraform/services/apihub/resource_apihub_curation_test.go delete mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_docker_images.go delete mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_docker_images_test.go delete mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_npm_package.go delete mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_npm_package_test.go delete mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_package.go delete mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_package_test.go delete mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_repositories.go delete mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_repositories_test.go delete mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_tag.go delete mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_tag_test.go delete mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_tags.go delete mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_tags_test.go delete mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_version.go delete mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_version_test.go delete mode 100644 mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_plan_association_test.go delete mode 100644 mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_plan_test.go rename mmv1/third_party/terraform/services/backupdr/{resource_backup_dr_backup_vault_test.go => resource_backup_dr_backup_vault_test.go.tmpl} (85%) delete mode 100644 mmv1/third_party/terraform/services/beyondcorp/data_source_google_beyondcorp_security_gateway.go delete mode 100644 mmv1/third_party/terraform/services/beyondcorp/data_source_google_beyondcorp_security_gateway_test.go create mode 100644 mmv1/third_party/terraform/services/beyondcorp/resource_beyondcorp_application_test.go delete mode 100644 mmv1/third_party/terraform/services/beyondcorp/resource_beyondcorp_security_gateway_application_test.go delete mode 100644 mmv1/third_party/terraform/services/bigquery/data_source_google_bigquery_datasets.go delete mode 100644 mmv1/third_party/terraform/services/bigquery/data_source_google_bigquery_datasets_test.go delete mode 100644 mmv1/third_party/terraform/services/bigquery/data_source_google_bigquery_table.go delete mode 100644 mmv1/third_party/terraform/services/bigquery/data_source_google_bigquery_table_test.go delete mode 100644 mmv1/third_party/terraform/services/bigquery/resource_bigquery_row_access_policy_test.go delete mode 100644 mmv1/third_party/terraform/services/bigqueryanalyticshub/resource_bigquery_analytics_hub_dataexchange_test.go delete mode 100644 mmv1/third_party/terraform/services/bigqueryanalyticshub/resource_bigquery_analytics_hub_dataexchangesubscription_test.go.tmpl rename mmv1/third_party/terraform/services/bigqueryreservation/{resource_bigquery_reservation_test.go.tmpl => resource_bigquery_reservation_test.go} (51%) delete mode 100644 mmv1/third_party/terraform/services/bigtable/resource_bigtable_schema_bundle_test.go delete mode 100644 mmv1/third_party/terraform/services/bigtable/test-fixtures/proto_schema_bundle.pb delete mode 100644 mmv1/third_party/terraform/services/bigtable/test-fixtures/proto_schema_bundle.proto delete mode 100644 mmv1/third_party/terraform/services/bigtable/test-fixtures/updated_proto_schema_bundle.pb delete mode 100644 mmv1/third_party/terraform/services/bigtable/test-fixtures/updated_proto_schema_bundle.proto rename mmv1/third_party/terraform/services/binaryauthorization/{resource_binary_authorization_policy_test.go => resource_binary_authorization_policy_test.go.tmpl} (98%) delete mode 100644 mmv1/third_party/terraform/services/certificatemanager/data_source_google_certificate_manager_dns_authorization.go delete mode 100644 mmv1/third_party/terraform/services/certificatemanager/data_source_google_certificate_manager_dns_authorization_test.go rename mmv1/third_party/terraform/services/chronicle/{resource_chronicle_data_access_label_test.go => resource_chronicle_data_access_label_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/chronicle/{resource_chronicle_data_access_scope_test.go => resource_chronicle_data_access_scope_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/chronicle/{resource_chronicle_reference_list_test.go => resource_chronicle_reference_list_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/chronicle/{resource_chronicle_rule_deployment_test.go => resource_chronicle_rule_deployment_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/chronicle/{resource_chronicle_rule_test.go => resource_chronicle_rule_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/chronicle/{resource_chronicle_watchlist_test.go => resource_chronicle_watchlist_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/cloudasset/{data_source_google_cloud_asset_search_all_resources.go => data_source_google_cloud_asset_search_all_resources.go.tmpl} (94%) rename mmv1/third_party/terraform/services/cloudasset/{data_source_google_cloud_asset_search_all_resources_test.go => data_source_google_cloud_asset_search_all_resources_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/cloudidentity/{data_source_cloud_identity_group_lookup.go => data_source_cloud_identity_group_lookup.go.tmpl} (100%) rename mmv1/third_party/terraform/services/{storageinsights/data_source_storage_insights_dataset_config.go => cloudrun/data_source_cloud_run_service.go} (53%) delete mode 100644 mmv1/third_party/terraform/services/cloudrunv2/data_source_google_cloud_run_v2_worker_pool.go delete mode 100644 mmv1/third_party/terraform/services/cloudrunv2/data_source_google_cloud_run_v2_worker_pool_test.go delete mode 100644 mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_worker_pool_test.go.tmpl rename mmv1/third_party/terraform/services/cloudtasks/{resource_cloud_tasks_queue_test.go => resource_cloud_tasks_queue_test.go.tmpl} (83%) rename mmv1/third_party/terraform/services/composer/{data_source_google_composer_user_workloads_config_map_test.go => data_source_google_composer_user_workloads_config_map_test.go.tmpl} (74%) rename mmv1/third_party/terraform/services/composer/{data_source_google_composer_user_workloads_secret_test.go => data_source_google_composer_user_workloads_secret_test.go.tmpl} (75%) rename mmv1/third_party/terraform/services/composer/{resource_composer_environment_test.go => resource_composer_environment_test.go.tmpl} (61%) rename mmv1/third_party/terraform/services/composer/{resource_composer_user_workloads_config_map_test.go => resource_composer_user_workloads_config_map_test.go.tmpl} (67%) delete mode 100644 mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_secret_test.go create mode 100644 mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_secret_test.go.tmpl rename mmv1/third_party/terraform/services/compute/{compute_instance_helpers_test.go => compute_instance_helpers_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/compute/{data_source_google_compute_images.go => data_source_google_compute_images.go.tmpl} (100%) rename mmv1/third_party/terraform/services/compute/{data_source_google_compute_instance.go => data_source_google_compute_instance.go.tmpl} (99%) rename mmv1/third_party/terraform/services/compute/{data_source_google_compute_instance_group_test.go => data_source_google_compute_instance_group_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/compute/{data_source_google_compute_instance_test.go => data_source_google_compute_instance_test.go.tmpl} (99%) rename mmv1/third_party/terraform/services/compute/{data_source_google_compute_network.go => data_source_google_compute_network.go.tmpl} (98%) delete mode 100644 mmv1/third_party/terraform/services/compute/data_source_google_compute_network_attachment.go delete mode 100644 mmv1/third_party/terraform/services/compute/data_source_google_compute_network_attachment_test.go rename mmv1/third_party/terraform/services/compute/{data_source_google_compute_region_instance_group_test.go => data_source_google_compute_region_instance_group_test.go.tmpl} (97%) rename mmv1/third_party/terraform/services/compute/{data_source_google_compute_resource_policy.go => data_source_google_compute_resource_policy.go.tmpl} (100%) delete mode 100644 mmv1/third_party/terraform/services/compute/fw_data_source_google_compute_network.go.tmpl delete mode 100644 mmv1/third_party/terraform/services/compute/fw_data_source_google_compute_network_test.go rename mmv1/third_party/terraform/services/compute/{resource_compute_firewall_policy_rule_test.go => resource_compute_firewall_policy_rule_test.go.tmpl} (70%) rename mmv1/third_party/terraform/services/compute/{resource_compute_firewall_policy_test.go => resource_compute_firewall_policy_test.go.tmpl} (98%) rename mmv1/third_party/terraform/services/compute/{resource_compute_firewall_policy_with_rules_test.go => resource_compute_firewall_policy_with_rules_test.go.tmpl} (52%) rename mmv1/third_party/terraform/services/compute/{resource_compute_firewall_test.go => resource_compute_firewall_test.go.tmpl} (88%) delete mode 100644 mmv1/third_party/terraform/services/compute/resource_compute_future_reservation_test.go.tmpl rename mmv1/third_party/terraform/services/compute/{resource_compute_global_address_test.go => resource_compute_global_address_test.go.tmpl} (93%) rename mmv1/third_party/terraform/services/compute/{resource_compute_global_network_endpoint_test.go => resource_compute_global_network_endpoint_test.go.tmpl} (99%) rename mmv1/third_party/terraform/services/compute/{resource_compute_instance_settings_test.go => resource_compute_instance_settings_test.go.tmpl} (95%) delete mode 100644 mmv1/third_party/terraform/services/compute/resource_compute_interconnect_application_awareness_test.go.tmpl delete mode 100644 mmv1/third_party/terraform/services/compute/resource_compute_interconnect_attachment_group_test.go delete mode 100644 mmv1/third_party/terraform/services/compute/resource_compute_interconnect_group_test.go delete mode 100644 mmv1/third_party/terraform/services/compute/resource_compute_network_attachment_test.go rename mmv1/third_party/terraform/services/compute/{resource_compute_network_endpoint_group_test.go => resource_compute_network_endpoint_group_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/compute/{resource_compute_network_firewall_policy_rule_test.go => resource_compute_network_firewall_policy_rule_test.go.tmpl} (84%) rename mmv1/third_party/terraform/services/compute/{resource_compute_network_firewall_policy_with_rules_test.go => resource_compute_network_firewall_policy_with_rules_test.go.tmpl} (93%) rename mmv1/third_party/terraform/services/compute/{resource_compute_node_group_test.go => resource_compute_node_group_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/compute/{resource_compute_per_instance_config_test.go => resource_compute_per_instance_config_test.go.tmpl} (99%) delete mode 100644 mmv1/third_party/terraform/services/compute/resource_compute_preview_features_test.go delete mode 100644 mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_ha_policy_manual_leader_update_test.go rename mmv1/third_party/terraform/services/compute/{resource_compute_region_network_firewall_policy_rule_test.go => resource_compute_region_network_firewall_policy_rule_test.go.tmpl} (98%) rename mmv1/third_party/terraform/services/compute/{resource_compute_region_network_firewall_policy_with_rules_test.go => resource_compute_region_network_firewall_policy_with_rules_test.go.tmpl} (94%) rename mmv1/third_party/terraform/services/compute/{resource_compute_region_per_instance_config_test.go => resource_compute_region_per_instance_config_test.go.tmpl} (99%) rename mmv1/third_party/terraform/services/compute/{resource_compute_region_target_http_proxy_test.go => resource_compute_region_target_http_proxy_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/compute/{resource_compute_region_target_tcp_proxy_test.go => resource_compute_region_target_tcp_proxy_test.go.tmpl} (99%) rename mmv1/third_party/terraform/services/compute/{resource_compute_region_url_map_test.go => resource_compute_region_url_map_test.go.tmpl} (99%) rename mmv1/third_party/terraform/services/compute/{resource_compute_router_nat_address_test.go => resource_compute_router_nat_address_test.go.tmpl} (99%) rename mmv1/third_party/terraform/services/compute/{resource_compute_router_nat_test.go => resource_compute_router_nat_test.go.tmpl} (79%) rename mmv1/third_party/terraform/services/compute/{resource_compute_router_route_policy_test.go => resource_compute_router_route_policy_test.go.tmpl} (99%) rename mmv1/third_party/terraform/services/compute/{resource_compute_security_policy_rule_test.go => resource_compute_security_policy_rule_test.go.tmpl} (94%) rename mmv1/third_party/terraform/services/compute/{resource_compute_service_attachment_test.go => resource_compute_service_attachment_test.go.tmpl} (94%) delete mode 100644 mmv1/third_party/terraform/services/compute/resource_compute_snapshot_settings_test.go delete mode 100644 mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl delete mode 100644 mmv1/third_party/terraform/services/contactcenterinsights/resource_contact_center_insights_analysis_rule_test.go delete mode 100644 mmv1/third_party/terraform/services/contactcenterinsights/resource_contact_center_insights_view_test.go rename mmv1/third_party/terraform/services/dataflow/{resource_dataflow_job.go => resource_dataflow_job.go.tmpl} (95%) delete mode 100644 mmv1/third_party/terraform/services/dataplex/data_source_dataplex_data_quality_rules.go delete mode 100644 mmv1/third_party/terraform/services/dataplex/data_source_dataplex_data_quality_rules_test.go delete mode 100644 mmv1/third_party/terraform/services/dataplex/resource_dataplex_datascan_test.go delete mode 100644 mmv1/third_party/terraform/services/dataplex/resource_dataplex_entry_meta.yaml delete mode 100644 mmv1/third_party/terraform/services/dataplex/resource_dataplex_entry_test.go delete mode 100644 mmv1/third_party/terraform/services/dataplex/resource_dataplex_glossary_category_test.go delete mode 100644 mmv1/third_party/terraform/services/dataplex/resource_dataplex_glossary_term_test.go delete mode 100644 mmv1/third_party/terraform/services/dataplex/resource_dataplex_glossary_test.go.tmpl rename mmv1/third_party/terraform/services/dataproc/{resource_dataproc_cluster_test.go => resource_dataproc_cluster_test.go.tmpl} (92%) rename mmv1/third_party/terraform/services/dataproc/{resource_dataproc_job.go => resource_dataproc_job.go.tmpl} (99%) delete mode 100644 mmv1/third_party/terraform/services/dataproc/resource_dataproc_session_template_test.go rename mmv1/third_party/terraform/services/dataprocmetastore/{data_source_dataproc_metastore_service_test.go => data_source_dataproc_metastore_service_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/dataprocmetastore/{dataproc_metastore_service_diff_supress.go => dataproc_metastore_service_diff_supress.go.tmpl} (100%) rename mmv1/third_party/terraform/services/dataprocmetastore/{resource_dataproc_metastore_service_test.go => resource_dataproc_metastore_service_test.go.tmpl} (74%) delete mode 100644 mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_account_connector_test.go create mode 100644 mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_account_connector_test.go.tmpl rename mmv1/third_party/terraform/services/developerconnect/{resource_developer_connect_connection_test.go => resource_developer_connect_connection_test.go.tmpl} (83%) delete mode 100644 mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_insights_config_test.go delete mode 100644 mmv1/third_party/terraform/services/dialogflow/dialogflow_operation.go rename mmv1/third_party/terraform/services/dialogflow/{resource_dialogflow_agent_test.go => resource_dialogflow_agent_test.go.tmpl} (100%) delete mode 100644 mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_conversation_profile_test.go rename mmv1/third_party/terraform/services/dialogflow/{resource_dialogflow_entity_type_test.go => resource_dialogflow_entity_type_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/dialogflow/{resource_dialogflow_fulfillment_test.go => resource_dialogflow_fulfillment_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/dialogflow/{resource_dialogflow_intent_test.go => resource_dialogflow_intent_test.go.tmpl} (100%) delete mode 100644 mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflow_cx_generator_test.go delete mode 100644 mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflow_cx_tool_test.go rename mmv1/third_party/terraform/services/dialogflowcx/{resource_dialogflowcx_agent_test.go => resource_dialogflowcx_agent_test.go.tmpl} (100%) delete mode 100644 mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflowcx_generative_settings_test.go delete mode 100644 mmv1/third_party/terraform/services/discoveryengine/resource_discovery_engine_cmek_config_test.go delete mode 100644 mmv1/third_party/terraform/services/discoveryengine/resource_discovery_engine_recommendation_engine_test.go rename mmv1/third_party/terraform/services/dns/{data_source_dns_key_test.go => data_source_dns_key_test.go.tmpl} (99%) rename mmv1/third_party/terraform/services/dns/{data_source_dns_record_set_test.go => data_source_dns_record_set_test.go.tmpl} (100%) delete mode 100644 mmv1/third_party/terraform/services/dns/resource_dns_policy_test.go create mode 100644 mmv1/third_party/terraform/services/dns/resource_dns_policy_test.go.tmpl rename mmv1/third_party/terraform/services/filestore/{resource_filestore_instance_test.go.tmpl => resource_filestore_instance_test.go} (63%) delete mode 100644 mmv1/third_party/terraform/services/firestore/data_source_google_firestore_document.go delete mode 100644 mmv1/third_party/terraform/services/firestore/data_source_google_firestore_document_test.go delete mode 100644 mmv1/third_party/terraform/services/firestore/resource_firestore_database_test.go rename mmv1/third_party/terraform/services/firestore/{resource_firestore_database_update_test.go => resource_firestore_database_update_test.go.tmpl} (83%) rename mmv1/third_party/terraform/services/gemini/{iam_gemini_repository_group_test.go => iam_gemini_repository_group_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/gemini/{resource_gemini_code_repository_index_test.go => resource_gemini_code_repository_index_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/gemini/{resource_gemini_code_tools_setting_binding_test.go => resource_gemini_code_tools_setting_binding_test.go.tmpl} (97%) rename mmv1/third_party/terraform/services/gemini/{resource_gemini_code_tools_setting_test.go => resource_gemini_code_tools_setting_test.go.tmpl} (99%) rename mmv1/third_party/terraform/services/gemini/{resource_gemini_data_sharing_with_google_setting_binding_test.go => resource_gemini_data_sharing_with_google_setting_binding_test.go.tmpl} (95%) rename mmv1/third_party/terraform/services/gemini/{resource_gemini_data_sharing_with_google_setting_test.go => resource_gemini_data_sharing_with_google_setting_test.go.tmpl} (97%) rename mmv1/third_party/terraform/services/gemini/{resource_gemini_gemini_gcp_enablement_setting_binding_test.go => resource_gemini_gemini_gcp_enablement_setting_binding_test.go.tmpl} (92%) rename mmv1/third_party/terraform/services/gemini/{resource_gemini_gemini_gcp_enablement_setting_test.go => resource_gemini_gemini_gcp_enablement_setting_test.go.tmpl} (95%) rename mmv1/third_party/terraform/services/gemini/{resource_gemini_logging_setting_binding_test.go => resource_gemini_logging_setting_binding_test.go.tmpl} (99%) rename mmv1/third_party/terraform/services/gemini/{resource_gemini_release_channel_setting_binding_test.go => resource_gemini_release_channel_setting_binding_test.go.tmpl} (96%) rename mmv1/third_party/terraform/services/gemini/{resource_gemini_release_channel_setting_test.go => resource_gemini_release_channel_setting_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/gemini/{resource_gemini_repository_group_test.go => resource_gemini_repository_group_test.go.tmpl} (98%) rename mmv1/third_party/terraform/services/gkebackup/{resource_gke_backup_backup_channel_test.go => resource_gke_backup_backup_channel_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/gkebackup/{resource_gke_backup_backup_plan_test.go => resource_gke_backup_backup_plan_test.go.tmpl} (90%) rename mmv1/third_party/terraform/services/gkebackup/{resource_gke_backup_restore_channel_test.go => resource_gke_backup_restore_channel_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/gkebackup/{resource_gke_backup_restore_plan_test.go => resource_gke_backup_restore_plan_test.go.tmpl} (99%) delete mode 100644 mmv1/third_party/terraform/services/gkehub/data_source_google_gke_hub_membership.go delete mode 100644 mmv1/third_party/terraform/services/gkehub/data_source_google_gke_hub_membership_test.go rename mmv1/third_party/terraform/services/gkehub2/{iam_gke_hub_feature_test.go => iam_gke_hub_feature_test.go.tmpl} (98%) rename mmv1/third_party/terraform/services/gkeonprem/{gkeonprem_operation.go => gkeonprem_operation.go.tmpl} (97%) rename mmv1/third_party/terraform/services/gkeonprem/{resource_gkeonprem_bare_metal_cluster_test.go => resource_gkeonprem_bare_metal_cluster_test.go.tmpl} (79%) rename mmv1/third_party/terraform/services/gkeonprem/{resource_gkeonprem_bare_metal_node_pool_test.go => resource_gkeonprem_bare_metal_node_pool_test.go.tmpl} (81%) rename mmv1/third_party/terraform/services/gkeonprem/{resource_gkeonprem_vmware_cluster_test.go => resource_gkeonprem_vmware_cluster_test.go.tmpl} (77%) rename mmv1/third_party/terraform/services/gkeonprem/{resource_gkeonprem_vmware_node_pool_test.go => resource_gkeonprem_vmware_node_pool_test.go.tmpl} (81%) rename mmv1/third_party/terraform/services/iam2/{resource_iam_deny_policy_test.go => resource_iam_deny_policy_test.go.tmpl} (99%) rename mmv1/third_party/terraform/services/iam3/{resource_iam_folders_policy_binding_test.go => resource_iam_folders_policy_binding_test.go.tmpl} (99%) rename mmv1/third_party/terraform/services/iam3/{resource_iam_organizations_policy_binding_test.go => resource_iam_organizations_policy_binding_test.go.tmpl} (98%) rename mmv1/third_party/terraform/services/iam3/{resource_iam_principal_access_boundary_policy_test.go => resource_iam_principal_access_boundary_policy_test.go.tmpl} (99%) rename mmv1/third_party/terraform/services/iam3/{resource_iam_projects_policy_binding_test.go => resource_iam_projects_policy_binding_test.go.tmpl} (96%) rename mmv1/third_party/terraform/services/iambeta/{data_source_iam_workload_identity_pool_provider_test.go => data_source_iam_workload_identity_pool_provider_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/iambeta/{data_source_iam_workload_identity_pool_test.go => data_source_iam_workload_identity_pool_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/iambeta/{resource_iam_workload_identity_pool_id_test.go => resource_iam_workload_identity_pool_id_test.go.tmpl} (100%) delete mode 100644 mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_managed_identity_id_test.go.tmpl delete mode 100644 mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_managed_identity_test.go.tmpl delete mode 100644 mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_namespace_id_test.go.tmpl delete mode 100644 mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_namespace_test.go.tmpl rename mmv1/third_party/terraform/services/iambeta/{resource_iam_workload_identity_pool_provider_id_test.go => resource_iam_workload_identity_pool_provider_id_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/iambeta/{resource_iam_workload_identity_pool_provider_test.go => resource_iam_workload_identity_pool_provider_test.go.tmpl} (99%) delete mode 100644 mmv1/third_party/terraform/services/iambeta/test-fixtures/trust_anchor_1.pem delete mode 100644 mmv1/third_party/terraform/services/iambeta/test-fixtures/trust_anchor_2.pem delete mode 100644 mmv1/third_party/terraform/services/iambeta/test-fixtures/trust_anchor_3.pem delete mode 100644 mmv1/third_party/terraform/services/iambeta/test-fixtures/trust_anchor_4.pem rename mmv1/third_party/terraform/services/iamworkforcepool/{resource_iam_oauth_client_credential_test.go => resource_iam_oauth_client_credential_test.go.tmpl} (99%) rename mmv1/third_party/terraform/services/iamworkforcepool/{resource_iam_oauth_client_test.go => resource_iam_oauth_client_test.go.tmpl} (99%) delete mode 100644 mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_provider_key_test.go rename mmv1/third_party/terraform/services/iamworkforcepool/{resource_iam_workforce_pool_test.go => resource_iam_workforce_pool_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/iamworkforcepool/{resource_iam_workforce_pool_workforce_pool_id_test.go => resource_iam_workforce_pool_workforce_pool_id_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/iamworkforcepool/{resource_iam_workforce_pool_workforce_pool_provider_id_test.go => resource_iam_workforce_pool_workforce_pool_provider_id_test.go.tmpl} (100%) delete mode 100644 mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_workforce_pool_provider_key_id_test.go create mode 100644 mmv1/third_party/terraform/services/iap/data_source_iap_client.go create mode 100644 mmv1/third_party/terraform/services/iap/data_source_iap_client_test.go rename mmv1/third_party/terraform/services/kms/{data_source_google_kms_auotokey_config.go => data_source_google_kms_auotokey_config.go.tmpl} (94%) rename mmv1/third_party/terraform/services/kms/{data_source_google_kms_auotokey_config_test.go => data_source_google_kms_auotokey_config_test.go.tmpl} (95%) rename mmv1/third_party/terraform/services/kms/{data_source_google_kms_key_handle.go => data_source_google_kms_key_handle.go.tmpl} (91%) rename mmv1/third_party/terraform/services/kms/{data_source_google_kms_key_handle_test.go => data_source_google_kms_key_handle_test.go.tmpl} (96%) rename mmv1/third_party/terraform/services/kms/{data_source_google_kms_key_handles_test.go => data_source_google_kms_key_handles_test.go.tmpl} (97%) delete mode 100644 mmv1/third_party/terraform/services/lustre/data_source_lustre_instance.go delete mode 100644 mmv1/third_party/terraform/services/lustre/data_source_lustre_instance_test.go delete mode 100644 mmv1/third_party/terraform/services/managedkafka/resource_managed_kafka_acl_test.go rename mmv1/third_party/terraform/services/managedkafka/{resource_managed_kafka_cluster_test.go => resource_managed_kafka_cluster_test.go.tmpl} (63%) rename mmv1/third_party/terraform/services/managedkafka/{resource_managed_kafka_topic_test.go => resource_managed_kafka_topic_test.go.tmpl} (100%) delete mode 100644 mmv1/third_party/terraform/services/modelarmor/resource_model_armor_template_test.go delete mode 100644 mmv1/third_party/terraform/services/modelarmorglobal/resource_model_armor_floorsetting_test.go rename mmv1/third_party/terraform/services/netapp/{resource_netapp_volume_test.go.tmpl => resource_netapp_volume_test.go} (76%) delete mode 100644 mmv1/third_party/terraform/services/networkmanagement/data_source_network_management_connectivity_test_run.go delete mode 100644 mmv1/third_party/terraform/services/networkmanagement/data_source_network_management_connectivity_test_run_test.go rename mmv1/third_party/terraform/services/networksecurity/{resource_network_security_backend_authentication_config_test.go => resource_network_security_backend_authentication_config_test.go.tmpl} (92%) rename mmv1/third_party/terraform/services/networksecurity/{resource_network_security_intercept_deployment_test.go => resource_network_security_intercept_deployment_generated_test.go.tmpl} (74%) rename mmv1/third_party/terraform/services/networksecurity/{resource_network_security_intercept_deployment_group_test.go => resource_network_security_intercept_deployment_group_generated_test.go.tmpl} (90%) rename mmv1/third_party/terraform/services/networksecurity/{resource_network_security_intercept_endpoint_group_association_test.go => resource_network_security_intercept_endpoint_group_association_generated_test.go.tmpl} (80%) rename mmv1/third_party/terraform/services/networksecurity/{resource_network_security_intercept_endpoint_group_test.go => resource_network_security_intercept_endpoint_group_generated_test.go.tmpl} (75%) rename mmv1/third_party/terraform/services/networksecurity/{resource_network_security_security_profile_group_test.go => resource_network_security_security_profile_group_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/networksecurity/{resource_network_security_security_profile_test.go => resource_network_security_security_profile_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/notebooks/{resource_notebooks_instance_test.go => resource_notebooks_instance_test.go.tmpl} (89%) rename mmv1/third_party/terraform/services/notebooks/{resource_notebooks_runtime_test.go => resource_notebooks_runtime_test.go.tmpl} (99%) rename mmv1/third_party/terraform/services/orgpolicy/{resource_org_policy_custom_constraint_test.go => resource_org_policy_custom_constraint_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/osconfigv2/{resource_os_config_v2_policy_orchestrator_for_organization_test.go => resource_os_config_v2_policy_orchestrator_for_organization_test.go.tmpl} (96%) rename mmv1/third_party/terraform/services/osconfigv2/{resource_os_config_v2_policy_orchestrator_test.go => resource_os_config_v2_policy_orchestrator_test.go.tmpl} (99%) rename mmv1/third_party/terraform/services/parametermanager/{data_source_parameter_manager_parameter_test.go => data_source_parameter_manager_parameter_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/parametermanager/{data_source_parameter_manager_parameter_version_render_test.go => data_source_parameter_manager_parameter_version_render_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/parametermanager/{data_source_parameter_manager_parameter_version_test.go => data_source_parameter_manager_parameter_version_test.go.tmpl} (98%) rename mmv1/third_party/terraform/services/parametermanager/{data_source_parameter_manager_parameters_test.go => data_source_parameter_manager_parameters_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/parametermanager/{resource_parameter_manager_parameter_test.go => resource_parameter_manager_parameter_test.go.tmpl} (95%) rename mmv1/third_party/terraform/services/parametermanager/{resource_parameter_manager_parameter_version_test.go => resource_parameter_manager_parameter_version_test.go.tmpl} (100%) delete mode 100644 mmv1/third_party/terraform/services/parametermanager/test-fixtures/parameter_data_json_format.json delete mode 100644 mmv1/third_party/terraform/services/parametermanager/test-fixtures/parameter_data_yaml_format.yaml rename mmv1/third_party/terraform/services/parametermanagerregional/{data_source_parameter_manager_regional_parameter_test.go => data_source_parameter_manager_regional_parameter_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/parametermanagerregional/{data_source_parameter_manager_regional_parameter_version_render_test.go => data_source_parameter_manager_regional_parameter_version_render_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/parametermanagerregional/{data_source_parameter_manager_regional_parameter_version_test.go => data_source_parameter_manager_regional_parameter_version_test.go.tmpl} (98%) rename mmv1/third_party/terraform/services/parametermanagerregional/{data_source_parameter_manager_regional_parameters_test.go => data_source_parameter_manager_regional_parameters_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/parametermanagerregional/{resource_parameter_manager_regional_parameter_test.go => resource_parameter_manager_regional_parameter_test.go.tmpl} (96%) rename mmv1/third_party/terraform/services/parametermanagerregional/{resource_parameter_manager_regional_parameter_version_test.go => resource_parameter_manager_regional_parameter_version_test.go.tmpl} (100%) delete mode 100644 mmv1/third_party/terraform/services/parametermanagerregional/test-fixtures/regional_parameter_data_json_format.json delete mode 100644 mmv1/third_party/terraform/services/parametermanagerregional/test-fixtures/regional_parameter_data_yaml_format.yaml rename mmv1/third_party/terraform/services/privilegedaccessmanager/{resource_privileged_access_manager_entitlement_test.go => resource_privileged_access_manager_entitlement_test.go.tmpl} (98%) delete mode 100644 mmv1/third_party/terraform/services/pubsublite/fw_resource_pubsub_lite_reservation.go delete mode 100644 mmv1/third_party/terraform/services/pubsublite/fw_resource_pubsub_lite_reservation_test.go delete mode 100644 mmv1/third_party/terraform/services/redis/data_source_redis_cluster.go delete mode 100644 mmv1/third_party/terraform/services/redis/data_source_redis_cluster_test.go rename mmv1/third_party/terraform/services/redis/{resource_redis_cluster_test.go => resource_redis_cluster_test.go.tmpl} (94%) rename mmv1/third_party/terraform/services/resourcemanager/{data_source_google_iam_policy.go => data_source_google_iam_policy.go.tmpl} (97%) rename mmv1/third_party/terraform/services/resourcemanager/{resource_google_project_iam_binding_test.go => resource_google_project_iam_binding_test.go.tmpl} (99%) rename mmv1/third_party/terraform/services/resourcemanager/{resource_google_project_iam_member_test.go => resource_google_project_iam_member_test.go.tmpl} (98%) rename mmv1/third_party/terraform/services/resourcemanager/{resource_google_project_iam_policy_test.go => resource_google_project_iam_policy_test.go.tmpl} (97%) delete mode 100644 mmv1/third_party/terraform/services/resourcemanager3/resource_resource_manager_capability_test.go.tmpl delete mode 100644 mmv1/third_party/terraform/services/saasruntime/resource_saas_runtime_saas_test.go.tmpl rename mmv1/third_party/terraform/services/secretmanager/{iam_secret_manager_secret_test.go => iam_secret_manager_secret_test.go.tmpl} (98%) rename mmv1/third_party/terraform/services/secretmanager/{resource_secret_manager_secret_test.go => resource_secret_manager_secret_test.go.tmpl} (92%) rename mmv1/third_party/terraform/services/secretmanager/{resource_secret_manager_secret_version_test.go => resource_secret_manager_secret_version_test.go.tmpl} (94%) rename mmv1/third_party/terraform/services/secretmanagerregional/{iam_secret_manager_regional_secret_test.go => iam_secret_manager_regional_secret_test.go.tmpl} (100%) rename mmv1/third_party/terraform/services/secretmanagerregional/{resource_secret_manager_regional_secret_test.go => resource_secret_manager_regional_secret_test.go.tmpl} (87%) rename mmv1/third_party/terraform/services/secretmanagerregional/{resource_secret_manager_regional_secret_version_test.go => resource_secret_manager_regional_secret_version_test.go.tmpl} (100%) delete mode 100644 mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_repository_update_test.go rename mmv1/third_party/terraform/services/spanner/{resource_spanner_database_test.go => resource_spanner_database_test.go.tmpl} (86%) delete mode 100644 mmv1/third_party/terraform/services/sql/fw_resource_sql_user.go delete mode 100644 mmv1/third_party/terraform/services/sql/fw_resource_sql_user_test.go delete mode 100644 mmv1/third_party/terraform/services/storage/fw_resource_storage_notification.go delete mode 100644 mmv1/third_party/terraform/services/storage/fw_storage_notification_state_upgraders.go rename mmv1/third_party/terraform/services/storage/{resource_storage_bucket_test.go => resource_storage_bucket_test.go.tmpl} (87%) create mode 100644 mmv1/third_party/terraform/services/storage/resource_storage_notification.go create mode 100644 mmv1/third_party/terraform/services/storagecontrol/data_source_storage_control_folder_intelligence_config.go create mode 100644 mmv1/third_party/terraform/services/storagecontrol/data_source_storage_control_organization_intelligence_config.go create mode 100644 mmv1/third_party/terraform/services/storagecontrol/data_source_storage_control_project_intelligence_config.go delete mode 100644 mmv1/third_party/terraform/services/storageinsights/resource_storage_insights_dataset_config_test.go rename mmv1/third_party/terraform/services/storagetransfer/{resource_storage_transfer_job.go => resource_storage_transfer_job.go.tmpl} (90%) rename mmv1/third_party/terraform/services/storagetransfer/{resource_storage_transfer_job_meta.yaml => resource_storage_transfer_job_meta.yaml.tmpl} (92%) create mode 100644 mmv1/third_party/terraform/services/tpu/data_source_tpu_tensorflow_versions.go create mode 100644 mmv1/third_party/terraform/services/tpu/data_source_tpu_tensorflow_versions_test.go create mode 100644 mmv1/third_party/terraform/services/tpu/resource_tpu_node_test.go delete mode 100644 mmv1/third_party/terraform/services/vertexai/resource_vertex_ai_deploy_test.go delete mode 100644 mmv1/third_party/terraform/services/vertexai/resource_vertex_ai_rag_engine_config_test.go delete mode 100644 mmv1/third_party/terraform/services/workbench/resource_workbench_instance_shielded_config_test.go create mode 100644 mmv1/third_party/terraform/services/workbench/resource_workbench_instance_shielded_config_test.go.tmpl rename mmv1/third_party/terraform/services/workbench/{resource_workbench_instance_test.go => resource_workbench_instance_test.go.tmpl} (79%) rename mmv1/third_party/terraform/services/workflows/{resource_workflows_workflow_test.go => resource_workflows_workflow_test.go.tmpl} (99%) rename mmv1/third_party/terraform/{terraform-registry-manifest.json => terraform-registry-manifest.json.tmpl} (100%) rename mmv1/third_party/terraform/tpgresource/{common_diff_suppress.go => common_diff_suppress.go.tmpl} (88%) delete mode 100644 mmv1/third_party/terraform/website/docs/d/artifact_registry_docker_images.html.markdown delete mode 100644 mmv1/third_party/terraform/website/docs/d/artifact_registry_npm_package.html.markdown delete mode 100644 mmv1/third_party/terraform/website/docs/d/artifact_registry_package.html.markdown delete mode 100644 mmv1/third_party/terraform/website/docs/d/artifact_registry_repositories.markdown delete mode 100644 mmv1/third_party/terraform/website/docs/d/artifact_registry_tag.html.markdown delete mode 100644 mmv1/third_party/terraform/website/docs/d/artifact_registry_tags.html.markdown delete mode 100644 mmv1/third_party/terraform/website/docs/d/artifact_registry_version.html.markdown delete mode 100644 mmv1/third_party/terraform/website/docs/d/beyondcorp_security_gateway.html.markdown delete mode 100644 mmv1/third_party/terraform/website/docs/d/bigquery_datasets.html.markdown delete mode 100644 mmv1/third_party/terraform/website/docs/d/bigquery_table.html.markdown delete mode 100644 mmv1/third_party/terraform/website/docs/d/certificate_manager_dns_authorization.html.markdown delete mode 100644 mmv1/third_party/terraform/website/docs/d/cloud_run_v2_worker_pool.html.markdown delete mode 100644 mmv1/third_party/terraform/website/docs/d/compute_network_attachment.html.markdown delete mode 100644 mmv1/third_party/terraform/website/docs/d/dataplex_data_quality_rules.html.markdown delete mode 100644 mmv1/third_party/terraform/website/docs/d/firestore_document.html.markdown delete mode 100644 mmv1/third_party/terraform/website/docs/d/gke_hub_feature.html.markdown delete mode 100644 mmv1/third_party/terraform/website/docs/d/gke_hub_membership.html.markdown delete mode 100644 mmv1/third_party/terraform/website/docs/d/lustre_instance.html.markdown delete mode 100644 mmv1/third_party/terraform/website/docs/d/network_management_connectivity_test_run.html.markdown delete mode 100644 mmv1/third_party/terraform/website/docs/d/redis_cluster.html.markdown delete mode 100644 mmv1/third_party/terraform/website/docs/d/storage_insights_dataset_config.html.markdown delete mode 100644 mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown delete mode 100644 mmv1/third_party/tgc/caiasset/asset.go create mode 100644 mmv1/third_party/tgc/dcl.go delete mode 100644 mmv1/third_party/tgc/tests/data/example_org_policy_custom_constraint.json delete mode 100644 mmv1/third_party/tgc/tests/data/example_org_policy_custom_constraint.tf delete mode 100644 mmv1/third_party/tgc_next/Makefile delete mode 100644 mmv1/third_party/tgc_next/go.mod delete mode 100644 mmv1/third_party/tgc_next/go.sum rename mmv1/third_party/tgc_next/pkg/{services/compute/compute_instance_cai2hcl.go => cai2hcl/converters/services/compute/compute_instance.go} (65%) create mode 100644 mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance_helpers.go rename mmv1/third_party/tgc_next/pkg/{services/resourcemanager/project_cai2hcl.go => cai2hcl/converters/services/resourcemanager/project.go} (69%) delete mode 100644 mmv1/third_party/tgc_next/pkg/services/compute/compute_instance.go delete mode 100644 mmv1/third_party/tgc_next/pkg/services/resourcemanager/project.go rename mmv1/third_party/tgc_next/pkg/{services/compute/compute_instance_tfplan2cai.go => tfplan2cai/converters/services/compute/compute_instance.go} (59%) rename mmv1/third_party/tgc_next/pkg/{services/resourcemanager/project_tfplan2cai.go => tfplan2cai/converters/services/resourcemanager/project.go} (93%) delete mode 100644 mmv1/third_party/tgc_next/pkg/tgcresource/utils.go delete mode 100644 mmv1/third_party/tgc_next/pkg/transport/config_tgc.go delete mode 100644 mmv1/third_party/tgc_next/test/assert_test_files.go delete mode 100644 mmv1/third_party/tgc_next/test/hcl.go delete mode 100644 mmv1/third_party/tgc_next/test/hcl_test.go delete mode 100644 mmv1/third_party/tgc_next/test/setup.go delete mode 100644 mmv1/third_party/tgc_next/test/utils.go delete mode 100644 mmv1/validate_third_party_test.go delete mode 100644 tools/diff-processor/breaking_changes/mock_schema_diff_test.go delete mode 100644 tools/template-check/cmd/root.go delete mode 100644 tools/template-check/cmd/versionguard.go delete mode 100644 tools/template-check/go.sum delete mode 100644 tpgtools/overrides/apikeys/beta/tpgtools_product.yaml delete mode 100644 tpgtools/overrides/apikeys/samples/key/service_account_key.tf.tmpl delete mode 100755 tpgtools/overrides/apikeys/samples/key/service_account_key.yaml delete mode 100644 tpgtools/overrides/apikeys/tpgtools_product.yaml delete mode 100644 tpgtools/overrides/assuredworkloads/beta/tpgtools_product.yaml delete mode 100644 tpgtools/overrides/assuredworkloads/tpgtools_product.yaml delete mode 100644 tpgtools/overrides/cloudresourcemanager/beta/tpgtools_product.yaml delete mode 100644 tpgtools/overrides/cloudresourcemanager/tpgtools_product.yaml delete mode 100644 tpgtools/overrides/firebaserules/beta/tpgtools_product.yaml delete mode 100644 tpgtools/overrides/firebaserules/tpgtools_product.yaml delete mode 100644 tpgtools/overrides/recaptchaenterprise/beta/tpgtools_product.yaml delete mode 100644 tpgtools/overrides/recaptchaenterprise/tpgtools_product.yaml create mode 100644 tpgtools/templates/provider_dcl_endpoints.go.tmpl diff --git a/.ci/README.md b/.ci/README.md index ad5fd3dd5276..08160b06ca23 100644 --- a/.ci/README.md +++ b/.ci/README.md @@ -22,7 +22,7 @@ The sync branches allow downstream generation for each downstream to wait until Run the following command to verify what commits the sync branches are pointing to: ``` -git fetch origin && git rev-parse origin/tpg-sync origin/tpgb-sync origin/tf-oics-sync origin/tgc-sync +`git fetch origin && git rev-parse origin/tpg-sync origin/tpgb-sync origin/tf-oics-sync origin/tgc-sync` ``` ### Transient GitHub failures diff --git a/.ci/changelog2.tmpl b/.ci/changelog2.tmpl deleted file mode 100644 index cea067f2a8d9..000000000000 --- a/.ci/changelog2.tmpl +++ /dev/null @@ -1,50 +0,0 @@ -{{- if .NotesByType.unknown -}} -UNKNOWN CHANGELOG TYPE: -{{range .NotesByType.unknown -}} -* {{ template "note" .}} -{{ end -}} -{{- end -}} - -{{- if .NotesByType.note -}} -NOTES: -{{range .NotesByType.note -}} -* {{ template "note" .}} -{{ end -}} -{{- end -}} - -{{- if .NotesByType.deprecation -}} -DEPRECATIONS: -{{range .NotesByType.deprecation -}} -* {{ template "note" .}} -{{ end -}} -{{- end -}} - -{{- if index .NotesByType "breaking-change" -}} -BREAKING CHANGES: -{{range index .NotesByType "breaking-change" -}} -* {{ template "note" .}} -{{ end -}} -{{- end -}} - -{{- $features := combineTypes .NotesByType.feature (index .NotesByType "new-resource" ) (index .NotesByType "new-datasource") (index .NotesByType "new-data-source") (index .NotesByType "new-function" ) (index .NotesByType "new-ephemeral" ) -}} -{{- if $features }} -FEATURES: -{{range $features | sort -}} -* {{ template "note" . }} -{{ end -}} -{{- end -}} - -{{- $improvements := combineTypes .NotesByType.improvement .NotesByType.enhancement -}} -{{- if $improvements }} -IMPROVEMENTS: -{{range $improvements | sort -}} -* {{ template "note" . }} -{{ end -}} -{{- end -}} - -{{- if .NotesByType.bug }} -BUG FIXES: -{{range .NotesByType.bug -}} -* {{ template "note" . }} -{{ end -}} -{{- end -}} \ No newline at end of file diff --git a/.ci/containers/build-environment/Dockerfile b/.ci/containers/build-environment/Dockerfile index ab6ad04fdd31..93e0c125c4ab 100644 --- a/.ci/containers/build-environment/Dockerfile +++ b/.ci/containers/build-environment/Dockerfile @@ -25,7 +25,7 @@ ENV GO111MODULE "on" ENV LC_ALL=C.UTF-8 ENV LANG=C.UTF-8 -RUN apt-get update && apt-get install -y --no-install-recommends git openssh-client apt-transport-https ca-certificates curl netbase wget gcc make jq libjq1 unzip zip +RUN apt-get update && apt-get install -y --no-install-recommends git openssh-client apt-transport-https ca-certificates curl netbase wget gcc make jq libjq1 RUN git config --global user.name "Modular Magician" RUN git config --global user.email "magic-modules@google.com" diff --git a/.ci/containers/go-plus/Dockerfile b/.ci/containers/go-plus/Dockerfile index b8348b3a61ea..dbe2c2e672a9 100644 --- a/.ci/containers/go-plus/Dockerfile +++ b/.ci/containers/go-plus/Dockerfile @@ -21,7 +21,7 @@ COPY --from=builder /go/pkg/mod /go/pkg/mod COPY --from=builder /go/cache /go/cache RUN apt-get update && \ - apt-get install -y git jq unzip zip parallel curl && \ + apt-get install -y git jq unzip parallel curl && \ echo "deb [signed-by=/usr/share/keyrings/cloud.google.gpg] http://packages.cloud.google.com/apt cloud-sdk main" | tee -a /etc/apt/sources.list.d/google-cloud-sdk.list && \ curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key --keyring /usr/share/keyrings/cloud.google.gpg add - && \ apt-get update -y && \ @@ -32,4 +32,4 @@ RUN apt-get update && \ RUN wget https://releases.hashicorp.com/terraform/1.11.0/terraform_1.11.0_linux_amd64.zip \ && unzip terraform_1.11.0_linux_amd64.zip \ && rm terraform_1.11.0_linux_amd64.zip \ - && mv ./terraform /bin/terraform \ No newline at end of file + && mv ./terraform /bin/terraform diff --git a/.ci/infra/terraform/README.md b/.ci/infra/terraform/README.md index bc93932c0749..a600ed2534cc 100644 --- a/.ci/infra/terraform/README.md +++ b/.ci/infra/terraform/README.md @@ -12,8 +12,8 @@ Prerequisites: - A BeyondCorp subscription on the organization After applying this configuration: -- (Internal setup) Enable stubbed calls for GKE MultiCloud resources -- (Internal setup) Verify ownership of `hashicorptest.com` for new service account +- (Internal only) Enable stubbed calls for GKE MultiCloud resources +- (Internal only) Verify ownership of `hashicorptest.com` for new service account - Enable Media CDN - Enable Access Boundary permissions - Enable BigQuery Table IAM conditions @@ -73,4 +73,3 @@ Quotas that will need to be adjusted to support all tests: - compute.googleapis.com/c2_cpus (us-central1) - compute.googleapis.com/n2_cpus (us-central1) to 36+ - VMware Engine standard 72 vCPUs nodes per region - southamerica-east1 to 21 -- logging.googleapis.com/log_buckets_count to 200 diff --git a/.ci/infra/terraform/main.tf b/.ci/infra/terraform/main.tf index 5bdd941a1d67..3e5c829db476 100644 --- a/.ci/infra/terraform/main.tf +++ b/.ci/infra/terraform/main.tf @@ -306,7 +306,6 @@ module "project-services" { "migrationcenter.googleapis.com", "ml.googleapis.com", "mobilecrashreporting.googleapis.com", - "modelarmor.googleapis.com", "monitoring.googleapis.com", "multiclustermetering.googleapis.com", "netapp.googleapis.com", @@ -334,7 +333,6 @@ module "project-services" { "resourceviews.googleapis.com", "run.googleapis.com", "runtimeconfig.googleapis.com", - "saasservicemgmt.googleapis.com", "secretmanager.googleapis.com", "securesourcemanager.googleapis.com", "securetoken.googleapis.com", diff --git a/.ci/magician/cmd/check_cassettes.go b/.ci/magician/cmd/check_cassettes.go index bbf94d800933..74b5ec3ce3d6 100644 --- a/.ci/magician/cmd/check_cassettes.go +++ b/.ci/magician/cmd/check_cassettes.go @@ -130,7 +130,7 @@ func execCheckCassettes(commit string, vt *vcr.Tester, ctlr *source.Controller) return fmt.Errorf("error uploading logs: %w", err) } fmt.Println(len(result.FailedTests), " failed tests: ", result.FailedTests) - // TODO report these failures to bigquery + // TODO(trodge) report these failures to bigquery fmt.Println(len(result.PassedTests), " passed tests: ", result.PassedTests) fmt.Println(len(result.SkippedTests), " skipped tests: ", result.SkippedTests) diff --git a/.ci/magician/cmd/collect_nightly_test_status.go b/.ci/magician/cmd/collect_nightly_test_status.go index ad20ddf56213..8882c6299d3e 100644 --- a/.ci/magician/cmd/collect_nightly_test_status.go +++ b/.ci/magician/cmd/collect_nightly_test_status.go @@ -30,8 +30,7 @@ import ( ) const ( - nightlyDataBucket = "nightly-test-data" - tcTimeFormat = "20060102T150405Z0700" + NightlyDataBucket = "nightly-test-data" ) var cntsRequiredEnvironmentVariables = [...]string{ @@ -39,16 +38,11 @@ var cntsRequiredEnvironmentVariables = [...]string{ } type TestInfo struct { - Name string `json:"name"` - Status string `json:"status"` - Service string `json:"service"` - ErrorMessage string `json:"error_message"` - LogLink string `json:"log_link"` - ProviderVersion string `json:"provider_version"` - QueuedDate time.Time `json:"queued_date"` - StartDate time.Time `json:"start_date"` - FinishDate time.Time `json:"finish_date"` - Duration int `json:"duration"` + Name string `json:"name"` + Status string `json:"status"` + Service string `json:"service"` + ErrorMessage string `json:"error_message"` + LogLink string `json"log_link` } // collectNightlyTestStatusCmd represents the collectNightlyTestStatus command @@ -93,8 +87,8 @@ var collectNightlyTestStatusCmd = &cobra.Command{ // check if a specific date is provided if customDate != "" { parsedDate, err := time.Parse("2006-01-02", customDate) // input format YYYY-MM-DD - // Set the time to 7pm PT - date = time.Date(parsedDate.Year(), parsedDate.Month(), parsedDate.Day(), 19, 0, 0, 0, loc) + // Set the time to 6pm PT + date = time.Date(parsedDate.Year(), parsedDate.Month(), parsedDate.Day(), 18, 0, 0, 0, loc) if err != nil { return fmt.Errorf("invalid input time format: %w", err) } @@ -169,31 +163,12 @@ func createTestReport(pVersion provider.Version, tc TeamcityClient, gcs Cloudsto if testResult.Status == "FAILURE" || testResult.Status == "UNKNOWN" { errorMessage = convertErrorMessage(testResult.ErrorMessage) } - - queuedTime, err := time.Parse(tcTimeFormat, build.QueuedDate) - if err != nil { - return fmt.Errorf("failed to parse QueuedDate: %v", err) - } - startTime, err := time.Parse(tcTimeFormat, build.StartDate) - if err != nil { - return fmt.Errorf("failed to parse StartDate: %v", err) - } - finishTime, err := time.Parse(tcTimeFormat, build.FinishDate) - if err != nil { - return fmt.Errorf("failed to parse FinishDate: %v", err) - } - testInfoList = append(testInfoList, TestInfo{ - Name: testResult.Name, - Status: testResult.Status, - Service: serviceName, - ErrorMessage: errorMessage, - LogLink: logLink, - ProviderVersion: strings.ToUpper(pVersion.String()), - Duration: testResult.Duration, - QueuedDate: queuedTime, - StartDate: startTime, - FinishDate: finishTime, + Name: testResult.Name, + Status: testResult.Status, + Service: serviceName, + ErrorMessage: errorMessage, + LogLink: logLink, }) } } @@ -208,7 +183,7 @@ func createTestReport(pVersion provider.Version, tc TeamcityClient, gcs Cloudsto // Upload test status data file to gcs bucket objectName := fmt.Sprintf("test-metadata/%s/%s", pVersion.String(), testStatusFileName) - err = gcs.WriteToGCSBucket(nightlyDataBucket, objectName, testStatusFileName) + err = gcs.WriteToGCSBucket(NightlyDataBucket, objectName, testStatusFileName) if err != nil { return err } diff --git a/.ci/magician/cmd/create_test_failure_ticket.go b/.ci/magician/cmd/create_test_failure_ticket.go index ab60d1aa7ed0..7576ace9d918 100644 --- a/.ci/magician/cmd/create_test_failure_ticket.go +++ b/.ci/magician/cmd/create_test_failure_ticket.go @@ -268,7 +268,7 @@ func getTestInfoList(pVersion provider.Version, date time.Time, gcs Cloudstorage objectName := fmt.Sprintf("test-metadata/%s/%s", pVersion.String(), testStatusFileName) var testInfoList []TestInfo - err := gcs.DownloadFile(nightlyDataBucket, objectName, testStatusFileName) + err := gcs.DownloadFile(NightlyDataBucket, objectName, testStatusFileName) if err != nil { return testInfoList, err } @@ -506,13 +506,13 @@ func storeErrorMessage(pVersion provider.Version, gcs CloudstorageClient, errorM // upload file to GCS objectName := fmt.Sprintf("test-errors/%s/%s/%s", pVersion.String(), date, fileName) - err = gcs.WriteToGCSBucket(nightlyDataBucket, objectName, fileName) + err = gcs.WriteToGCSBucket(NightlyDataBucket, objectName, fileName) if err != nil { return "", fmt.Errorf("failed to upload error message file %s to GCS bucket: %w", objectName, err) } // compute object view path - link := fmt.Sprintf("https://storage.cloud.google.com/%s/%s", nightlyDataBucket, objectName) + link := fmt.Sprintf("https://storage.cloud.google.com/%s/%s", NightlyDataBucket, objectName) return link, nil } @@ -521,7 +521,7 @@ func init() { } var ( - // TODO: add all mismatch resource names + // TODO(shuyama1): add all mismatch resource names resourceNameConverter = map[string]string{ "google_iam3_projects_policy_binding": "google_iam_projects_policy_binding", "google_iam3_organizations_policy_binding": "google_iam_organizations_policy_binding", diff --git a/.ci/magician/cmd/generate_comment.go b/.ci/magician/cmd/generate_comment.go index 976435500c75..9c526cc51e1b 100644 --- a/.ci/magician/cmd/generate_comment.go +++ b/.ci/magician/cmd/generate_comment.go @@ -16,13 +16,11 @@ package cmd import ( - "bytes" "encoding/json" "fmt" "os" "path/filepath" "regexp" - "slices" "sort" "strconv" "strings" @@ -36,9 +34,6 @@ import ( "github.com/GoogleCloudPlatform/magic-modules/tools/issue-labeler/labeler" "github.com/spf13/cobra" - "github.com/yuin/goldmark" - "github.com/yuin/goldmark/parser" - "go.abhg.dev/goldmark/frontmatter" "golang.org/x/exp/maps" _ "embed" @@ -50,11 +45,9 @@ var ( ) type Diff struct { - Title string - Repo string - ShortStat string - CommitSHA string - OldCommitSHA string + Title string + Repo string + ShortStat string } type BreakingChange struct { @@ -84,12 +77,12 @@ type Errors struct { } type diffCommentData struct { + PrNumber int Diffs []Diff BreakingChanges []BreakingChange MissingServiceLabels []string MissingTests map[string]*MissingTestInfo MissingDocs *MissingDocsSummary - AddedResources []string Errors []Errors } @@ -99,7 +92,6 @@ type simpleSchemaDiff struct { const allowBreakingChangesLabel = "override-breaking-change" const allowMissingServiceLabelsLabel = "override-missing-service-labels" -const allowMultipleResourcesLabel = "override-multiple-resources" var gcEnvironmentVariables = [...]string{ "BUILD_ID", @@ -180,7 +172,7 @@ func listGCEnvironmentVariables() string { func execGenerateComment(prNumber int, ghTokenMagicModules, buildId, buildStep, projectId, commitSha string, gh GithubClient, rnr ExecRunner, ctlr *source.Controller) error { errors := map[string][]string{"Other": []string{}} - // TODO - temporary fix to ensure the label is removed. + // TODO(ScottSuarez) - temporary fix to ensure the label is removed. // Once we migrate to the new trigger there is an explicit task // for this and this line can be removed. gh.RemoveLabel(fmt.Sprint(prNumber), "awaiting-approval") @@ -222,7 +214,9 @@ func execGenerateComment(prNumber int, ghTokenMagicModules, buildId, buildStep, } // Initialize repos - data := diffCommentData{} + data := diffCommentData{ + PrNumber: prNumber, + } for _, repo := range []*source.Repo{&tpgRepo, &tpgbRepo, &tgcRepo, &tfoicsRepo} { errors[repo.Title] = []string{} repo.Branch = newBranch @@ -268,24 +262,10 @@ func execGenerateComment(prNumber int, ghTokenMagicModules, buildId, buildStep, errors[repo.Title] = append(errors[repo.Title], "Failed to compute repo diff shortstats") } if shortStat != "" { - variablePath := fmt.Sprintf("/workspace/commitSHA_modular-magician_%s.txt", repo.Name) - oldVariablePath := fmt.Sprintf("/workspace/commitSHA_modular-magician_%s-old.txt", repo.Name) - commitSHA, err := rnr.ReadFile(variablePath) - if err != nil { - errors[repo.Title] = append(errors[repo.Title], "Failed to read commit sha from file") - continue - } - oldCommitSHA, err := rnr.ReadFile(oldVariablePath) - if err != nil { - errors[repo.Title] = append(errors[repo.Title], "Failed to read old commit sha from file") - continue - } diffs = append(diffs, Diff{ - Title: repo.Title, - Repo: repo.Name, - ShortStat: shortStat, - CommitSHA: commitSHA, - OldCommitSHA: oldCommitSHA, + Title: repo.Title, + Repo: repo.Name, + ShortStat: shortStat, }) repo.ChangedFiles, err = ctlr.DiffNameOnly(repo, oldBranch, newBranch) if err != nil { @@ -349,11 +329,6 @@ func execGenerateComment(prNumber int, ghTokenMagicModules, buildId, buildStep, errors[repo.Title] = append(errors[repo.Title], "The missing doc detector failed to run.") } data.MissingDocs = missingDocs - - errStrs := checkDocumentFrontmatter(repo) - if len(errStrs) > 0 { - errors[repo.Title] = append(errors[repo.Title], errStrs...) - } } simpleDiff, err := computeAffectedResources(diffProcessorPath, rnr, repo) @@ -375,25 +350,6 @@ func execGenerateComment(prNumber int, ghTokenMagicModules, buildId, buildStep, }) data.BreakingChanges = breakingChangesSlice - // Check if multiple resources were added. - multipleResourcesState := "success" - if len(uniqueAddedResources) > 1 { - multipleResourcesState = "failure" - for _, label := range pullRequest.Labels { - if label.Name == allowMultipleResourcesLabel { - multipleResourcesState = "success" - break - } - } - } - targetURL := fmt.Sprintf("https://console.cloud.google.com/cloud-build/builds;region=global/%s;step=%s?project=%s", buildId, buildStep, projectId) - if err = gh.PostBuildStatus(strconv.Itoa(prNumber), "terraform-provider-multiple-resources", multipleResourcesState, targetURL, commitSha); err != nil { - fmt.Printf("Error posting terraform-provider-multiple-resources build status for pr %d commit %s: %v\n", prNumber, commitSha, err) - errors["Other"] = append(errors["Other"], "Failed to update missing-service-labels status check with state: "+multipleResourcesState) - } - data.AddedResources = maps.Keys(uniqueAddedResources) - slices.Sort(data.AddedResources) - // Compute affected resources based on changed files changedFilesAffectedResources := map[string]struct{}{} for _, repo := range []source.Repo{tpgRepo, tpgbRepo} { @@ -458,6 +414,7 @@ func execGenerateComment(prNumber int, ghTokenMagicModules, buildId, buildStep, } } } + targetURL := fmt.Sprintf("https://console.cloud.google.com/cloud-build/builds;region=global/%s;step=%s?project=%s", buildId, buildStep, projectId) if err = gh.PostBuildStatus(strconv.Itoa(prNumber), "terraform-provider-breaking-change-test", breakingState, targetURL, commitSha); err != nil { fmt.Printf("Error posting terraform-provider-breaking-change-test build status for pr %d commit %s: %v\n", prNumber, commitSha, err) errors["Other"] = append(errors["Other"], "Failed to update breaking-change status check with state: "+breakingState) @@ -679,48 +636,3 @@ func pathChanged(path string, changedFiles []string) bool { func init() { rootCmd.AddCommand(generateCommentCmd) } - -// checkDocumentFrontmatter checks changed markdown files' frontmatter -// structure in the repo and returns error strings when applicable. -func checkDocumentFrontmatter(repo source.Repo) []string { - var errs []string - for _, f := range repo.ChangedFiles { - if !strings.HasPrefix(f, "website/docs/r/") && !strings.HasPrefix(f, "website/docs/d/") { - continue - } - if !strings.HasSuffix(f, ".markdown") { - continue - } - src, err := os.ReadFile(filepath.Join(repo.Path, f)) - if err != nil { - errs = append(errs, "Error reading file "+f) - continue - } - - md := goldmark.New( - goldmark.WithExtensions(&frontmatter.Extender{}), - ) - - ctx := parser.NewContext() - var buff bytes.Buffer - - err = md.Convert(src, &buff, parser.WithContext(ctx)) - if err != nil { - errs = append(errs, "Error parsing file "+f) - continue - } - data := frontmatter.Get(ctx) - if data == nil { - errs = append(errs, fmt.Sprintf("No frontmatter found in file %s. This is usually due to an incorrect structure in the frontmatter.", f)) - continue - } - - var metadata struct { - Subcategory string - } - if err := data.Decode(&metadata); err != nil { - errs = append(errs, fmt.Sprintf("Failed to decode frontmatter in file %s. This is usually due to an incorrect structure in the frontmatter.", f)) - } - } - return errs -} diff --git a/.ci/magician/cmd/generate_comment_test.go b/.ci/magician/cmd/generate_comment_test.go index bd51b87c60ff..25d91879db76 100644 --- a/.ci/magician/cmd/generate_comment_test.go +++ b/.ci/magician/cmd/generate_comment_test.go @@ -16,9 +16,7 @@ package cmd import ( - "fmt" "os" - "path/filepath" "reflect" "testing" @@ -40,22 +38,6 @@ func TestExecGenerateComment(t *testing.T) { "GOPATH": os.Getenv("GOPATH"), "HOME": os.Getenv("HOME"), } - for _, repo := range []string{ - "terraform-provider-google", - "terraform-provider-google-beta", - "terraform-google-conversion", - } { - variablePathOld := fmt.Sprintf("/workspace/commitSHA_modular-magician_%s-old.txt", repo) - variablePath := fmt.Sprintf("/workspace/commitSHA_modular-magician_%s.txt", repo) - err := mr.WriteFile(variablePathOld, "1a2a3a4a") - if err != nil { - t.Errorf("Error writing file: %s", err) - } - err = mr.WriteFile(variablePath, "1a2a3a4b") - if err != nil { - t.Errorf("Error writing file: %s", err) - } - } execGenerateComment( 123456, "*******", @@ -130,11 +112,10 @@ func TestExecGenerateComment(t *testing.T) { for method, expectedCalls := range map[string][][]any{ "PostBuildStatus": { - {"123456", "terraform-provider-multiple-resources", "success", "https://console.cloud.google.com/cloud-build/builds;region=global/build1;step=17?project=project1", "sha1"}, {"123456", "terraform-provider-breaking-change-test", "success", "https://console.cloud.google.com/cloud-build/builds;region=global/build1;step=17?project=project1", "sha1"}, {"123456", "terraform-provider-missing-service-labels", "success", "https://console.cloud.google.com/cloud-build/builds;region=global/build1;step=17?project=project1", "sha1"}, }, - "PostComment": {{"123456", "Hi there, I'm the Modular magician. I've detected the following information about your changes:\n\n## Diff report\n\nYour PR generated some diffs in downstreams - here they are.\n\n`google` provider: [Diff](https://github.com/modular-magician/terraform-provider-google/compare/1a2a3a4a..1a2a3a4b) ( 2 files changed, 40 insertions(+))\n`google-beta` provider: [Diff](https://github.com/modular-magician/terraform-provider-google-beta/compare/1a2a3a4a..1a2a3a4b) ( 2 files changed, 40 insertions(+))\n`terraform-google-conversion`: [Diff](https://github.com/modular-magician/terraform-google-conversion/compare/1a2a3a4a..1a2a3a4b) ( 1 file changed, 10 insertions(+))\n\n\n\n## Missing test report\nYour PR includes resource fields which are not covered by any test.\n\nResource: `google_folder_access_approval_settings` (3 total tests)\nPlease add an acceptance test which includes these fields. The test should include the following:\n\n```hcl\nresource \"google_folder_access_approval_settings\" \"primary\" {\n uncovered_field = # value needed\n}\n\n```\n\n\n"}}, + "PostComment": {{"123456", "Hi there, I'm the Modular magician. I've detected the following information about your changes:\n\n## Diff report\n\nYour PR generated some diffs in downstreams - here they are.\n\n`google` provider: [Diff](https://github.com/modular-magician/terraform-provider-google/compare/auto-pr-123456-old..auto-pr-123456) ( 2 files changed, 40 insertions(+))\n`google-beta` provider: [Diff](https://github.com/modular-magician/terraform-provider-google-beta/compare/auto-pr-123456-old..auto-pr-123456) ( 2 files changed, 40 insertions(+))\n`terraform-google-conversion`: [Diff](https://github.com/modular-magician/terraform-google-conversion/compare/auto-pr-123456-old..auto-pr-123456) ( 1 file changed, 10 insertions(+))\n\n\n\n## Missing test report\nYour PR includes resource fields which are not covered by any test.\n\nResource: `google_folder_access_approval_settings` (3 total tests)\nPlease add an acceptance test which includes these fields. The test should include the following:\n\n```hcl\nresource \"google_folder_access_approval_settings\" \"primary\" {\n uncovered_field = # value needed\n}\n\n```\n\n\n"}}, "AddLabels": {{"123456", []string{"service/alloydb"}}}, } { if actualCalls, ok := gh.calledMethods[method]; !ok { @@ -189,27 +170,24 @@ func TestFormatDiffComment(t *testing.T) { }, "diffs are displayed": { data: diffCommentData{ + PrNumber: 1234567890, Diffs: []Diff{ { - Title: "Repo 1", - Repo: "repo-1", - ShortStat: "+1 added, -1 removed", - CommitSHA: "1a2a3a4b", - OldCommitSHA: "1a2a3a4a", + Title: "Repo 1", + Repo: "repo-1", + ShortStat: "+1 added, -1 removed", }, { - Title: "Repo 2", - Repo: "repo-2", - ShortStat: "+2 added, -2 removed", - CommitSHA: "1a2a3a4d", - OldCommitSHA: "1a2a3a4c", + Title: "Repo 2", + Repo: "repo-2", + ShortStat: "+2 added, -2 removed", }, }, }, expectedStrings: []string{ "## Diff report", "generated some diffs", - "Repo 1: [Diff](https://github.com/modular-magician/repo-1/compare/1a2a3a4a..1a2a3a4b) (+1 added, -1 removed)\nRepo 2: [Diff](https://github.com/modular-magician/repo-2/compare/1a2a3a4c..1a2a3a4d) (+2 added, -2 removed)", + "Repo 1: [Diff](https://github.com/modular-magician/repo-1/compare/auto-pr-1234567890-old..auto-pr-1234567890) (+1 added, -1 removed)\nRepo 2: [Diff](https://github.com/modular-magician/repo-2/compare/auto-pr-1234567890-old..auto-pr-1234567890) (+2 added, -2 removed)", }, notExpectedStrings: []string{ "hasn't generated any diffs", @@ -244,25 +222,6 @@ func TestFormatDiffComment(t *testing.T) { "## Missing test report", }, }, - "multiple resources are displayed": { - data: diffCommentData{ - AddedResources: []string{"google_redis_instance", "google_alloydb_cluster"}, - }, - expectedStrings: []string{ - "## Diff report", - "## Multiple resources added", - "`override-multiple-resources`", - "split it into multiple PRs", - "`google_redis_instance`, `google_alloydb_cluster`.", - }, - notExpectedStrings: []string{ - "generated some diffs", - "## Errors", - "## Missing test report", - "## Missing doc report", - "## Breaking Change(s) Detected", - }, - }, "missing tests are displayed": { data: diffCommentData{ MissingTests: map[string]*MissingTestInfo{ @@ -551,88 +510,3 @@ func TestPathChanged(t *testing.T) { }) } } - -func TestCheckDocumentFrontmatter(t *testing.T) { - tmpDir := t.TempDir() - files := map[string]string{ - "malformed.markdown": ` -subcategory: Example Subcategory ---- -`, - "sample.markdown": ` ---- -subcategory: Example Subcategory ---- -`, - "missingsubcategory.markdown": ` ---- -random: Example Subcategory ---- -`, - } - - folderPath := filepath.Join(tmpDir, "website", "docs", "r") - if err := os.MkdirAll(folderPath, 0755); err != nil { - t.Fatal(err) - } - for name, content := range files { - fullPath := filepath.Join(folderPath, name) - err := os.WriteFile(fullPath, []byte(content), 0644) - if err != nil { - t.Fatalf("Failed to create file %s: %v", name, err) - } - } - - // write a file in other folders - if err := os.WriteFile(filepath.Join(tmpDir, "abc.md"), []byte("random"), 0644); err != nil { - t.Fatalf("Failed to create file %s: %v", filepath.Join(tmpDir, "abc.md"), err) - } - - tests := []struct { - name string - changedFiles []string - wantErr bool - }{ - { - name: "not in relevant doc folder", - changedFiles: []string{"abc.md"}, - wantErr: false, - }, - { - name: "not markdown files", - changedFiles: []string{"website/docs/r/abc.txt"}, - wantErr: false, - }, - { - name: "malformed markdown", - changedFiles: []string{"website/docs/r/malformed.markdown"}, - wantErr: true, - }, - { - name: "markdown not exist", - changedFiles: []string{"website/docs/d/sample.markdown"}, - wantErr: true, - }, - { - name: "correct format", - changedFiles: []string{"website/docs/r/sample.markdown"}, - wantErr: false, - }, - } - - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - repo := source.Repo{ - Path: tmpDir, - ChangedFiles: tc.changedFiles, - } - got := checkDocumentFrontmatter(repo) - if tc.wantErr && len(got) == 0 { - t.Errorf("checkDocumentFrontmatter() = %v, want error", got) - } - if !tc.wantErr && len(got) > 0 { - t.Errorf("checkDocumentFrontmatter() = %v, want no error", got) - } - }) - } -} diff --git a/.ci/magician/cmd/generate_downstream.go b/.ci/magician/cmd/generate_downstream.go index 3f88ad6c5daf..402bf704e2dc 100644 --- a/.ci/magician/cmd/generate_downstream.go +++ b/.ci/magician/cmd/generate_downstream.go @@ -300,11 +300,9 @@ func runMake(downstreamRepo *source.Repo, command string, rnr ExecRunner) error return err } case "terraform": - // --- legacy -- can be cleaned up after go/mm-pull/13722 is submitted if _, err := rnr.Run("make", []string{"clean-provider", "OUTPUT_PATH=" + downstreamRepo.Path}, nil); err != nil { return err } - // ------------------------------------------------------------------- if _, err := rnr.Run("make", []string{"provider", "OUTPUT_PATH=" + downstreamRepo.Path, fmt.Sprintf("VERSION=%s", downstreamRepo.Version)}, nil); err != nil { return err } @@ -340,9 +338,8 @@ func createCommit(scratchRepo *source.Repo, commitMessage string, rnr ExecRunner return "", err } - _, commitErr := rnr.Run("git", []string{"commit", "--signoff", "-m", commitMessage}, nil) - if commitErr != nil && !strings.Contains(commitErr.Error(), "nothing to commit") { - return "", commitErr + if _, err := rnr.Run("git", []string{"commit", "--signoff", "-m", commitMessage}, nil); err != nil { + return "", err } commitSha, err := rnr.Run("git", []string{"rev-parse", "HEAD"}, nil) @@ -355,13 +352,8 @@ func createCommit(scratchRepo *source.Repo, commitMessage string, rnr ExecRunner // auto-pr's use commitSHA_modular-magician__.txt file to communicate commmit hash // across cloudbuild steps. Used in test-tpg to execute unit tests for the HEAD commit - if strings.HasPrefix(scratchRepo.Branch, "auto-pr-") { - var variablePath string - if strings.HasSuffix(scratchRepo.Branch, "-old") { - variablePath = fmt.Sprintf("/workspace/commitSHA_modular-magician_%s-old.txt", scratchRepo.Name) - } else { - variablePath = fmt.Sprintf("/workspace/commitSHA_modular-magician_%s.txt", scratchRepo.Name) - } + if strings.HasPrefix(scratchRepo.Branch, "auto-pr-") && !strings.HasSuffix(scratchRepo.Branch, "-old") { + variablePath := fmt.Sprintf("/workspace/commitSHA_modular-magician_%s.txt", scratchRepo.Name) fmt.Println("variablePath: ", variablePath) err = rnr.WriteFile(variablePath, commitSha) if err != nil { @@ -369,7 +361,7 @@ func createCommit(scratchRepo *source.Repo, commitMessage string, rnr ExecRunner } } - return commitSha, commitErr + return commitSha, err } func addChangelogEntry(downstreamRepo *source.Repo, pullRequest *github.PullRequest, rnr ExecRunner) error { @@ -383,12 +375,6 @@ func addChangelogEntry(downstreamRepo *source.Repo, pullRequest *github.PullRequ return err } } - // If changelog entry is missing, add an entry "unknown: ". - if matches == nil { - if err := rnr.WriteFile(filepath.Join(".changelog", fmt.Sprintf("%d.txt", pullRequest.Number)), "unknown: "+pullRequest.Title); err != nil { - return err - } - } return rnr.PopDir() } diff --git a/.ci/magician/cmd/mock_runner_test.go b/.ci/magician/cmd/mock_runner_test.go index 742ca0617dba..79bc206f2c43 100644 --- a/.ci/magician/cmd/mock_runner_test.go +++ b/.ci/magician/cmd/mock_runner_test.go @@ -41,7 +41,6 @@ type mockRunner struct { cwd string dirStack *list.List notifyError bool - fileContents map[string]string } func sortedEnvString(env map[string]string) string { @@ -108,14 +107,10 @@ func (mr *mockRunner) Walk(root string, fn filepath.WalkFunc) error { } func (mr *mockRunner) ReadFile(name string) (string, error) { - return mr.fileContents[name], nil + return "", nil } func (mr *mockRunner) WriteFile(name, data string) error { - if mr.fileContents == nil { - mr.fileContents = make(map[string]string) - } - mr.fileContents[name] = data return nil } diff --git a/.ci/magician/cmd/scheduled_pr_reminders.go b/.ci/magician/cmd/scheduled_pr_reminders.go index 097310244897..f75cd3ddb72c 100644 --- a/.ci/magician/cmd/scheduled_pr_reminders.go +++ b/.ci/magician/cmd/scheduled_pr_reminders.go @@ -26,7 +26,7 @@ import ( membership "magician/github" - "github.com/google/go-github/v68/github" + "github.com/google/go-github/v61/github" "github.com/spf13/cobra" "golang.org/x/exp/slices" diff --git a/.ci/magician/cmd/scheduled_pr_reminders_test.go b/.ci/magician/cmd/scheduled_pr_reminders_test.go index 8e1d1d146d3e..60719116cb39 100644 --- a/.ci/magician/cmd/scheduled_pr_reminders_test.go +++ b/.ci/magician/cmd/scheduled_pr_reminders_test.go @@ -6,7 +6,7 @@ import ( membership "magician/github" - "github.com/google/go-github/v68/github" + "github.com/google/go-github/v61/github" "github.com/stretchr/testify/assert" ) diff --git a/.ci/magician/cmd/templates/DIFF_COMMENT.md.tmpl b/.ci/magician/cmd/templates/DIFF_COMMENT.md.tmpl index 5c2942060dc0..f50fdee4626a 100644 --- a/.ci/magician/cmd/templates/DIFF_COMMENT.md.tmpl +++ b/.ci/magician/cmd/templates/DIFF_COMMENT.md.tmpl @@ -7,7 +7,7 @@ Your PR hasn't generated any diffs, but I'll let you know if a future commit doe Your PR generated some diffs in downstreams - here they are. {{range .Diffs -}} -{{.Title}}: [Diff](https://github.com/modular-magician/{{.Repo}}/compare/{{.OldCommitSHA}}..{{.CommitSHA}}) ({{.ShortStat}}) +{{.Title}}: [Diff](https://github.com/modular-magician/{{.Repo}}/compare/auto-pr-{{$.PrNumber}}-old..auto-pr-{{$.PrNumber}}) ({{.ShortStat}}) {{end -}} {{end -}} @@ -51,13 +51,6 @@ If you believe this detection to be incorrect please raise the concern with your An `override-missing-service-label` label can be added to allow merging. {{end}} -{{- if gt (len .AddedResources) 1 }} -## Multiple resources added - -This PR adds multiple new resources: {{range $i, $resource := .AddedResources}}{{ if gt $i 0}}, {{end}}`{{$resource}}`{{end}}. This makes review significantly more difficult. Please split it into multiple PRs, one per resource. -An `override-multiple-resources` label can be added to allow merging. -{{end}} - {{- if and (.MissingDocs) (or .MissingDocs.Resource .MissingDocs.DataSource) }} ## Missing doc report (experimental) diff --git a/.ci/magician/cmd/templates/vcr/non_exercised_tests.tmpl b/.ci/magician/cmd/templates/vcr/non_exercised_tests.tmpl new file mode 100644 index 000000000000..9efae64310a5 --- /dev/null +++ b/.ci/magician/cmd/templates/vcr/non_exercised_tests.tmpl @@ -0,0 +1,13 @@ +{{- if or (gt (len .NotRunBetaTests) 0) (gt (len .NotRunGATests) 0)}} +#### Non-exercised tests + +{{if gt (len .NotRunBetaTests) 0 -}} +{{color "red" "Tests were added that are skipped in VCR:"}} +{{range .NotRunBetaTests}}{{. | printf "- %s\n"}}{{end}} +{{end}} + +{{if gt (len .NotRunGATests) 0 -}} +{{color "red" "Tests were added that are GA-only additions and require manual runs:"}} +{{range .NotRunGATests}}{{. | printf "- %s\n"}}{{end}} +{{end}} +{{end}} diff --git a/.ci/magician/cmd/templates/vcr/post_replay.tmpl b/.ci/magician/cmd/templates/vcr/post_replay.tmpl deleted file mode 100644 index b914d0286884..000000000000 --- a/.ci/magician/cmd/templates/vcr/post_replay.tmpl +++ /dev/null @@ -1,56 +0,0 @@ -{{- if or (gt (len .NotRunBetaTests) 0) (gt (len .NotRunGATests) 0)}} -#### Non-exercised tests - -{{if gt (len .NotRunBetaTests) 0 -}} -{{color "red" "Tests were added that are skipped in VCR:"}} -{{range .NotRunBetaTests}}{{. | printf "- %s\n"}}{{end}} -{{end}} - -{{if gt (len .NotRunGATests) 0 -}} -{{color "red" "Tests were added that are GA-only additions and require manual runs:"}} -{{range .NotRunGATests}}{{. | printf "- %s\n"}}{{end}} -{{end}} -{{end}} -#### Tests analytics -Total tests: {{add (add (len .ReplayingResult.PassedTests) (len .ReplayingResult.SkippedTests)) (len .ReplayingResult.FailedTests) }} -Passed tests: {{len .ReplayingResult.PassedTests}} -Skipped tests: {{len .ReplayingResult.SkippedTests}} -Affected tests: {{len .ReplayingResult.FailedTests}} - -
-Click here to see the affected service packages -
-{{if .RunFullVCR}} -All service packages are affected -{{else if gt (len .AffectedServices) 0}} -
    -{{range .AffectedServices}}{{. | printf "
  • %s
  • \n"}}{{end}} -
-{{else}} -None -{{end}} -
-
- -{{ if gt (len .ReplayingResult.FailedTests) 0 -}} -#### Action taken -
-Found {{len .ReplayingResult.FailedTests}} affected test(s) by replaying old test recordings. Starting RECORDING based on the most recent commit. Click here to see the affected tests - -
-
    -{{range .ReplayingResult.FailedTests}}{{. | printf "
  • %s
  • \n"}}{{end}} -
-
-
- -[Get to know how VCR tests work](https://googlecloudplatform.github.io/magic-modules/develop/test/test/) -{{ else -}} -{{- if .ReplayingErr -}} -{{color "red" "Errors occurred during REPLAYING mode. Please fix them to complete your PR."}} -{{- else -}} -{{color "green" "All tests passed!"}} -{{- end}} - -View the [build log](https://storage.cloud.google.com/{{.LogBucket}}/{{.Version}}/refs/heads/{{.Head}}/artifacts/{{.BuildID}}/build-log/replaying_test.log) -{{- end}} diff --git a/.ci/magician/cmd/templates/vcr/post_replay_eap.tmpl b/.ci/magician/cmd/templates/vcr/post_replay_eap.tmpl deleted file mode 100644 index 5b5e5475527e..000000000000 --- a/.ci/magician/cmd/templates/vcr/post_replay_eap.tmpl +++ /dev/null @@ -1,46 +0,0 @@ -{{- if or (gt (len .NotRunBetaTests) 0) (gt (len .NotRunGATests) 0)}} -#### Non-exercised tests - -{{if gt (len .NotRunBetaTests) 0 -}} -{{color "red" "Tests were added that are skipped in VCR:"}} -{{range .NotRunBetaTests}}{{. | printf "- %s\n"}}{{end}} -{{end}} - -{{if gt (len .NotRunGATests) 0 -}} -{{color "red" "Tests were added that are GA-only additions and require manual runs:"}} -{{range .NotRunGATests}}{{. | printf "- %s\n"}}{{end}} -{{end}} -{{end}} -#### Tests analytics -Total tests: {{add (add (len .ReplayingResult.PassedTests) (len .ReplayingResult.SkippedTests)) (len .ReplayingResult.FailedTests) }} -Passed tests: {{len .ReplayingResult.PassedTests}} -Skipped tests: {{len .ReplayingResult.SkippedTests}} -Affected tests: {{len .ReplayingResult.FailedTests}} - -Affected service packages: -{{if .RunFullVCR}} -All service packages are affected -{{else if gt (len .AffectedServices) 0}} -{{range .AffectedServices}} -`{{.}}` {{/* remove trailing whitespace */ -}} -{{end}} -{{else}} -None -{{end}} -{{ if gt (len .ReplayingResult.FailedTests) 0 -}} -#### Action taken -Found {{len .ReplayingResult.FailedTests}} affected test(s) by replaying old test recordings. Starting RECORDING based on the most recent commit. Affected tests: -{{range .ReplayingResult.FailedTests}} -`{{.}}` {{/* remove trailing whitespace */ -}} -{{end}} - -[Get to know how VCR tests work](https://googlecloudplatform.github.io/magic-modules/develop/test/test/) -{{ else -}} -{{- if .ReplayingErr -}} -{{color "red" "Errors occurred during REPLAYING mode. Please fix them to complete your PR."}} -{{- else -}} -{{color "green" "All tests passed!"}} -{{- end}} - -View the [build log](https://storage.cloud.google.com/{{.LogBucket}}/{{.Version}}/refs/heads/{{.Head}}/artifacts/{{.BuildID}}/build-log/replaying_test.log) -{{- end}} \ No newline at end of file diff --git a/.ci/magician/cmd/templates/vcr/record_replay.tmpl b/.ci/magician/cmd/templates/vcr/record_replay.tmpl index e618d2d63b4a..9a8b2859ac6a 100644 --- a/.ci/magician/cmd/templates/vcr/record_replay.tmpl +++ b/.ci/magician/cmd/templates/vcr/record_replay.tmpl @@ -2,7 +2,7 @@ {{color "green" "Tests passed during RECORDING mode:"}} {{range .RecordingResult.PassedTests -}} `{{.}}` {{/* remove trailing whitespace */ -}} - [[Debug log]({{$.LogBaseUrl}}/recording/{{.}}.log)] + [[Debug log](https://storage.cloud.google.com/{{$.LogBucket}}/{{$.Version}}/refs/heads/{{$.Head}}/artifacts/{{$.BuildID}}/recording/{{.}}.log)] {{/* remove trailing whitespace */ -}} {{end}} @@ -11,8 +11,8 @@ {{color "red" "Tests failed when rerunning REPLAYING mode:"}} {{range .ReplayingAfterRecordingResult.FailedTests -}} `{{.}}` {{/* remove trailing whitespace */ -}} - [[Error message]({{$.LogBaseUrl}}/build-log/replaying_build_after_recording/{{compoundTest .}}_replaying_test.log)] {{/* remove trailing whitespace */ -}} - [[Debug log]({{$.LogBaseUrl}}/replaying_after_recording/{{.}}.log)] + [[Error message](https://storage.cloud.google.com/{{$.LogBucket}}/{{$.Version}}/refs/heads/{{$.Head}}/artifacts/{{$.BuildID}}/build-log/replaying_build_after_recording/{{.}}_replaying_test.log)] {{/* remove trailing whitespace */ -}} + [[Debug log](https://storage.cloud.google.com/{{$.LogBucket}}/{{$.Version}}/refs/heads/{{$.Head}}/artifacts/{{$.BuildID}}/replaying_after_recording/{{.}}.log)] {{/* remove trailing whitespace */ -}} {{end}} @@ -30,8 +30,8 @@ Please fix these to complete your PR. If you believe these test failures to be i {{color "red" "Tests failed during RECORDING mode:"}} {{range .RecordingResult.FailedTests -}} `{{.}}` {{/* remove trailing whitespace */ -}} - [[Error message]({{$.LogBaseUrl}}/build-log/recording_build/{{compoundTest .}}_recording_test.log)] {{/* remove trailing whitespace */ -}} - [[Debug log]({{$.LogBaseUrl}}/recording/{{.}}.log)] + [[Error message](https://storage.cloud.google.com/{{$.LogBucket}}/{{$.Version}}/refs/heads/{{$.Head}}/artifacts/{{$.BuildID}}/build-log/recording_build/{{.}}_recording_test.log)] {{/* remove trailing whitespace */ -}} + [[Debug log](https://storage.cloud.google.com/{{$.LogBucket}}/{{$.Version}}/refs/heads/{{$.Head}}/artifacts/{{$.BuildID}}/recording/{{.}}.log)] {{/* remove trailing whitespace */ -}} {{end}} {{end}} {{- /* end of if gt (len .RecordingResult.FailedTests) 0 */ -}} @@ -42,5 +42,5 @@ Please fix these to complete your PR. If you believe these test failures to be i {{if .AllRecordingPassed}}{{color "green" "All tests passed!"}}{{end}} -View the [build log]({{.LogBaseUrl}}/build-log/recording_test.log) {{/* remove trailing whitespace */ -}} -or the [debug log]({{.BrowseLogBaseUrl}}/recording) for each test +View the [build log](https://storage.cloud.google.com/{{.LogBucket}}/{{.Version}}/refs/heads/{{.Head}}/artifacts/{{.BuildID}}/build-log/recording_test.log) {{/* remove trailing whitespace */ -}} +or the [debug log](https://console.cloud.google.com/storage/browser/{{.LogBucket}}/{{.Version}}/refs/heads/{{.Head}}/artifacts/{{.BuildID}}/recording) for each test diff --git a/.ci/magician/cmd/templates/vcr/test_analytics.tmpl b/.ci/magician/cmd/templates/vcr/test_analytics.tmpl new file mode 100644 index 000000000000..9c4006541946 --- /dev/null +++ b/.ci/magician/cmd/templates/vcr/test_analytics.tmpl @@ -0,0 +1,20 @@ +#### Tests analytics +Total tests: {{add (add (len .ReplayingResult.PassedTests) (len .ReplayingResult.SkippedTests)) (len .ReplayingResult.FailedTests) }} +Passed tests: {{len .ReplayingResult.PassedTests}} +Skipped tests: {{len .ReplayingResult.SkippedTests}} +Affected tests: {{len .ReplayingResult.FailedTests}} + +
+Click here to see the affected service packages +
+{{if .RunFullVCR}} +All service packages are affected +{{else if gt (len .AffectedServices) 0}} +
    +{{range .AffectedServices}}{{. | printf "
  • %s
  • \n"}}{{end}} +
+{{else}} +None +{{end}} +
+
diff --git a/.ci/magician/cmd/templates/vcr/with_replay_failed_tests.tmpl b/.ci/magician/cmd/templates/vcr/with_replay_failed_tests.tmpl new file mode 100644 index 000000000000..d6d14bb4175f --- /dev/null +++ b/.ci/magician/cmd/templates/vcr/with_replay_failed_tests.tmpl @@ -0,0 +1,12 @@ +#### Action taken +
+Found {{len .ReplayingResult.FailedTests}} affected test(s) by replaying old test recordings. Starting RECORDING based on the most recent commit. Click here to see the affected tests + +
+
    +{{range .ReplayingResult.FailedTests}}{{. | printf "
  • %s
  • \n"}}{{end}} +
+
+
+ +[Get to know how VCR tests work](https://googlecloudplatform.github.io/magic-modules/develop/test/test/) diff --git a/.ci/magician/cmd/templates/vcr/without_replay_failed_tests.tmpl b/.ci/magician/cmd/templates/vcr/without_replay_failed_tests.tmpl new file mode 100644 index 000000000000..256d1652fc4f --- /dev/null +++ b/.ci/magician/cmd/templates/vcr/without_replay_failed_tests.tmpl @@ -0,0 +1,7 @@ +{{- if .ReplayingErr -}} +{{color "red" "Errors occurred during REPLAYING mode. Please fix them to complete your PR."}} +{{- else -}} +{{color "green" "All tests passed!"}} +{{- end}} + +View the [build log](https://storage.cloud.google.com/{{.LogBucket}}/{{.Version}}/refs/heads/{{.Head}}/artifacts/{{.BuildID}}/build-log/replaying_test.log) diff --git a/.ci/magician/cmd/test_eap_vcr.go b/.ci/magician/cmd/test_eap_vcr.go index 7f288d0d9f8a..867c1a7ac800 100644 --- a/.ci/magician/cmd/test_eap_vcr.go +++ b/.ci/magician/cmd/test_eap_vcr.go @@ -2,7 +2,6 @@ package cmd import ( _ "embed" - "encoding/json" "fmt" "magician/exec" "magician/provider" @@ -15,11 +14,6 @@ import ( "github.com/spf13/cobra" ) -var ( - //go:embed templates/vcr/post_replay_eap.tmpl - postReplayEAPTmplText string -) - var tevRequiredEnvironmentVariables = [...]string{ "GEN_PATH", "GOCACHE", @@ -36,6 +30,7 @@ var tevRequiredEnvironmentVariables = [...]string{ "GOOGLE_IMPERSONATE_SERVICE_ACCOUNT", "KOKORO_ARTIFACTS_DIR", "HOME", + "MODIFIED_FILE_PATH", "PATH", "USER", } @@ -51,42 +46,6 @@ var tevOptionalEnvironmentVariables = [...]string{ "GOOGLE_VMWAREENGINE_PROJECT", } -// GerritComment is a single inline comment for a Gerrit CL. -// See go/kokoro-gob-scm#gerrit-inline-comments. -type GerritComment struct { - Path string `json:"path"` - Message string `json:"message"` -} - -// GerritCommenter is used to add comments to a Gerrit CL. -type GerritCommenter struct { - gerritCommentsFilename string - rnr ExecRunner - comments []GerritComment -} - -func NewGerritCommenter(gerritCommentsFilename string, rnr ExecRunner) *GerritCommenter { - return &GerritCommenter{ - gerritCommentsFilename: gerritCommentsFilename, - rnr: rnr, - } -} - -// Add adds a comment to the gerrit_comments_file json file. If a path is not -// specified, the comment is added at the patchset level, just like other -// kokoro messages. -func (g *GerritCommenter) Add(c GerritComment) error { - if c.Path == "" { - c.Path = "/PATCHSET_LEVEL" - } - g.comments = append(g.comments, c) - b, err := json.Marshal(g.comments) - if err != nil { - return err - } - return g.rnr.WriteFile(g.gerritCommentsFilename, string(b)) -} - var testEAPVCRCmd = &cobra.Command{ Use: "test-eap-vcr", Short: "Run vcr tests for affected packages in EAP", @@ -129,7 +88,7 @@ The following environment variables are required: return fmt.Errorf("wrong number of arguments %d, expected 1", len(args)) } - return execTestEAPVCR(args[0], env["GEN_PATH"], env["KOKORO_ARTIFACTS_DIR"], rnr, vt) + return execTestEAPVCR(args[0], env["GEN_PATH"], env["KOKORO_ARTIFACTS_DIR"], env["MODIFIED_FILE_PATH"], rnr, vt) }, } @@ -141,7 +100,7 @@ func listTEVEnvironmentVariables() string { return result } -func execTestEAPVCR(changeNumber, genPath, kokoroArtifactsDir string, rnr ExecRunner, vt *vcr.Tester) error { +func execTestEAPVCR(changeNumber, genPath, kokoroArtifactsDir, modifiedFilePath string, rnr ExecRunner, vt *vcr.Tester) error { vt.SetRepoPath(provider.Private, genPath) if err := rnr.PushDir(genPath); err != nil { return fmt.Errorf("error changing to gen path: %w", err) @@ -172,10 +131,7 @@ func execTestEAPVCR(changeNumber, genPath, kokoroArtifactsDir string, rnr ExecRu return fmt.Errorf("error uploading replaying logs: %w", err) } - // Comments for VCR must go in the gerrit_comments_acctest.json json file. - commenter := NewGerritCommenter(filepath.Join(kokoroArtifactsDir, "gerrit_comments_acctest.json"), rnr) - - if hasPanics, err := handleEAPVCRPanics(head, replayingResult, vcr.Replaying, commenter); err != nil { + if hasPanics, err := handleEAPVCRPanics(head, kokoroArtifactsDir, modifiedFilePath, replayingResult, vcr.Replaying, rnr); err != nil { return fmt.Errorf("error handling panics: %w", err) } else if hasPanics { return nil @@ -185,26 +141,29 @@ func execTestEAPVCR(changeNumber, genPath, kokoroArtifactsDir string, rnr ExecRu for s := range services { servicesArr = append(servicesArr, s) } - postReplayData := postReplay{ + analyticsData := analytics{ + ReplayingResult: replayingResult, RunFullVCR: runFullVCR, AffectedServices: sort.StringSlice(servicesArr), - ReplayingResult: replayingResult, - ReplayingErr: replayingErr, - LogBucket: "ci-vcr-logs", - Version: provider.Private.String(), - Head: head, } - comment, err := formatPostReplayEAP(postReplayData) + testsAnalyticsComment, err := formatTestsAnalytics(analyticsData) if err != nil { - return fmt.Errorf("error formatting post replay comment: %w", err) - } - c := GerritComment{ - Message: comment, - } - if err := commenter.Add(c); err != nil { - return fmt.Errorf("error adding comment: %w", err) + return fmt.Errorf("error formatting test_analytics comment: %w", err) } if len(replayingResult.FailedTests) > 0 { + withReplayFailedTestsData := withReplayFailedTests{ + ReplayingResult: replayingResult, + } + + withReplayFailedTestsComment, err := formatWithReplayFailedTests(withReplayFailedTestsData) + if err != nil { + return fmt.Errorf("error formatting action taken comment: %w", err) + } + comment := strings.Join([]string{testsAnalyticsComment, withReplayFailedTestsComment}, "\n") + if err := postGerritComment(kokoroArtifactsDir, modifiedFilePath, comment, rnr); err != nil { + return fmt.Errorf("error posting comment: %w", err) + } + recordingResult, recordingErr := vt.RunParallel(vcr.RunOptions{ Mode: vcr.Recording, Version: provider.Private, @@ -220,16 +179,7 @@ func execTestEAPVCR(changeNumber, genPath, kokoroArtifactsDir string, rnr ExecRu return fmt.Errorf("error uploading cassettes: %w", err) } - if err := vt.UploadLogs(vcr.UploadLogsOptions{ - Head: head, - Parallel: true, - Mode: vcr.Recording, - Version: provider.Private, - }); err != nil { - return fmt.Errorf("error uploading recording logs: %w", err) - } - - if hasPanics, err := handleEAPVCRPanics(head, recordingResult, vcr.Recording, commenter); err != nil { + if hasPanics, err := handleEAPVCRPanics(head, kokoroArtifactsDir, modifiedFilePath, recordingResult, vcr.Recording, rnr); err != nil { return fmt.Errorf("error handling panics: %w", err) } else if hasPanics { return nil @@ -247,10 +197,10 @@ func execTestEAPVCR(changeNumber, genPath, kokoroArtifactsDir string, rnr ExecRu Head: head, Parallel: true, AfterRecording: true, - Mode: vcr.Replaying, + Mode: vcr.Recording, Version: provider.Private, }); err != nil { - return fmt.Errorf("error uploading replaying after recording logs: %w", err) + return fmt.Errorf("error uploading recording logs: %w", err) } } hasTerminatedTests := (len(recordingResult.PassedTests) + len(recordingResult.FailedTests)) < len(replayingResult.FailedTests) @@ -269,33 +219,43 @@ func execTestEAPVCR(changeNumber, genPath, kokoroArtifactsDir string, rnr ExecRu if err != nil { return fmt.Errorf("error formatting record replay comment: %w", err) } - c = GerritComment{ - Message: recordReplayComment, + if err := postGerritComment(kokoroArtifactsDir, modifiedFilePath, recordReplayComment, rnr); err != nil { + return fmt.Errorf("error posting comment: %w", err) + } + } else { // len(replayingResult.FailedTests) == 0 + withoutReplayFailedTestsData := withoutReplayFailedTests{ + ReplayingErr: replayingErr, + } + withoutReplayFailedTestsComment, err := formatWithoutReplayFailedTests(withoutReplayFailedTestsData) + if err != nil { + return fmt.Errorf("error formatting action taken comment: %w", err) } - if err := commenter.Add(c); err != nil { - return fmt.Errorf("error adding comment: %w", err) + comment := strings.Join([]string{testsAnalyticsComment, withoutReplayFailedTestsComment}, "\n") + if err := postGerritComment(kokoroArtifactsDir, modifiedFilePath, comment, rnr); err != nil { + return fmt.Errorf("error posting comment: %w", err) } } return nil } -func handleEAPVCRPanics(head string, result vcr.Result, mode vcr.Mode, commenter *GerritCommenter) (bool, error) { +func handleEAPVCRPanics(head, kokoroArtifactsDir, modifiedFilePath string, result vcr.Result, mode vcr.Mode, rnr ExecRunner) (bool, error) { if len(result.Panics) > 0 { - c := GerritComment{ - Message: fmt.Sprintf(`The provider crashed while running the VCR tests in %s mode. + comment := fmt.Sprintf(`The provider crashed while running the VCR tests in %s mode. Please fix it to complete your CL View the [build log](https://storage.cloud.google.com/ci-vcr-logs/%s/refs/heads/%s/build-log/%s_test.log)`, - provider.Private.String(), mode.Upper(), head, mode.Lower()), + provider.Private.String(), mode.Upper(), head, mode.Lower()) + if err := postGerritComment(kokoroArtifactsDir, modifiedFilePath, comment, rnr); err != nil { + return true, fmt.Errorf("error posting comment: %v", err) } - return true, commenter.Add(c) + return true, nil } return false, nil } -func init() { - rootCmd.AddCommand(testEAPVCRCmd) +func postGerritComment(kokoroArtifactsDir, modifiedFilePath, comment string, rnr ExecRunner) error { + return rnr.AppendFile(filepath.Join(kokoroArtifactsDir, "gerrit_comments.json"), fmt.Sprintf("\n{path: \"%s\", message: \"%s\"}", modifiedFilePath, comment)) } -func formatPostReplayEAP(data postReplay) (string, error) { - return formatComment("post_replay_eap.tmpl", postReplayEAPTmplText, data) +func init() { + rootCmd.AddCommand(testEAPVCRCmd) } diff --git a/.ci/magician/cmd/test_eap_vcr_test.go b/.ci/magician/cmd/test_eap_vcr_test.go deleted file mode 100644 index 63bbe584e923..000000000000 --- a/.ci/magician/cmd/test_eap_vcr_test.go +++ /dev/null @@ -1,252 +0,0 @@ -package cmd - -import ( - "fmt" - "strings" - "testing" - - "magician/provider" - "magician/vcr" -) - -func TestAnalyticsCommentEAP(t *testing.T) { - tests := []struct { - name string - data postReplay - wantContains []string - }{ - { - name: "run full vcr is false and no affected services", - data: postReplay{ - ReplayingResult: vcr.Result{ - PassedTests: []string{"a", "b", "c"}, - SkippedTests: []string{"d", "e"}, - FailedTests: []string{"f"}, - }, - RunFullVCR: false, - AffectedServices: []string{}, - }, - wantContains: []string{ - "#### Tests analytics", - "Total tests: 6", - "Passed tests: 3", - "Skipped tests: 2", - "Affected tests: 1", - "Affected service packages", - "None", - }, - }, - { - name: "run full vcr is false and has affected services", - data: postReplay{ - ReplayingResult: vcr.Result{ - PassedTests: []string{"a", "b", "c"}, - SkippedTests: []string{"d", "e"}, - FailedTests: []string{"f"}, - }, - RunFullVCR: false, - AffectedServices: []string{"svc-a", "svc-b"}, - }, - wantContains: []string{ - "#### Tests analytics", - "Total tests: 6", - "Passed tests: 3", - "Skipped tests: 2", - "Affected tests: 1", - "Affected service packages", - "`svc-a`", - "`svc-b`", - }, - }, - { - name: "run full vcr is true", - data: postReplay{ - ReplayingResult: vcr.Result{ - PassedTests: []string{"a", "b", "c"}, - SkippedTests: []string{"d", "e"}, - FailedTests: []string{"f"}, - }, - RunFullVCR: true, - AffectedServices: []string{}, - }, - wantContains: []string{ - "#### Tests analytics", - "Total tests: 6", - "Passed tests: 3", - "Skipped tests: 2", - "Affected tests: 1", - "Affected service packages", - "All service packages are affected", - }, - }, - } - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - got, err := formatPostReplayEAP(tc.data) - if err != nil { - t.Fatalf("Failed to format comment: %v", err) - } - for _, wc := range tc.wantContains { - if !strings.Contains(got, wc) { - t.Errorf("formatPostReplayEAP() returned %q, which does not contain %q", got, wc) - } - } - }) - } -} - -func TestNonExercisedTestsCommentEAP(t *testing.T) { - tests := []struct { - name string - data postReplay - wantContains []string - }{ - { - name: "with not run beta tests", - data: postReplay{ - NotRunBetaTests: []string{"beta-1", "beta-2"}, - }, - wantContains: []string{ - "#### Non-exercised tests", - "", - color("red", "Tests were added that are skipped in VCR:"), - "- beta-1", - "- beta-2", - }, - }, - { - name: "with not run ga tests", - data: postReplay{ - NotRunGATests: []string{"ga-1", "ga-2"}, - }, - wantContains: []string{ - "#### Non-exercised tests", - "", - "", - "", - color("red", "Tests were added that are GA-only additions and require manual runs:"), - "- ga-1", - "- ga-2", - }, - }, - { - name: "with not run ga tests and not run beta tests", - data: postReplay{ - NotRunGATests: []string{"ga-1", "ga-2"}, - NotRunBetaTests: []string{"beta-1", "beta-2"}, - }, - wantContains: []string{ - "#### Non-exercised tests", - "", - color("red", "Tests were added that are skipped in VCR:"), - "- beta-1", - "- beta-2", - "", - "", - "", - color("red", "Tests were added that are GA-only additions and require manual runs:"), - "- ga-1", - "- ga-2", - }, - }, - } - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - got, err := formatPostReplayEAP(tc.data) - if err != nil { - t.Fatalf("Failed to format comment: %v", err) - } - for _, wc := range tc.wantContains { - if !strings.Contains(got, wc) { - t.Errorf("formatPostReplayEAP() returned %q, which does not contain %q", got, wc) - } - } - }) - } -} - -func TestWithReplayFailedTestsEAP(t *testing.T) { - tests := []struct { - name string - data postReplay - wantContains []string - }{ - { - name: "with failed tests", - data: postReplay{ - ReplayingResult: vcr.Result{ - FailedTests: []string{"a", "b"}, - }, - }, - wantContains: []string{ - "#### Action taken", - "Found 2 affected test(s) by replaying old test recordings. Starting RECORDING based on the most recent commit. Affected tests", - "`a`", - "`b`", - "[Get to know how VCR tests work](https://googlecloudplatform.github.io/magic-modules/develop/test/test/)", - }, - }, - } - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - got, err := formatPostReplayEAP(tc.data) - if err != nil { - t.Fatalf("Failed to format comment: %v", err) - } - for _, wc := range tc.wantContains { - if !strings.Contains(got, wc) { - t.Errorf("formatPostReplayEAP() returned %q, which does not contain %q", got, wc) - } - } - }) - } -} - -func TestWithoutReplayFailedTestsEAP(t *testing.T) { - tests := []struct { - name string - data postReplay - wantContains []string - }{ - { - name: "with replay error", - data: postReplay{ - ReplayingErr: fmt.Errorf("some error"), - BuildID: "build-123", - Head: "auto-pr-123", - LogBucket: "ci-vcr-logs", - Version: provider.Beta.String(), - }, - wantContains: []string{ - color("red", "Errors occurred during REPLAYING mode. Please fix them to complete your PR."), - "View the [build log](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-123/artifacts/build-123/build-log/replaying_test.log)", - }, - }, - { - name: "without replay error", - data: postReplay{ - BuildID: "build-123", - Head: "auto-pr-123", - LogBucket: "ci-vcr-logs", - Version: provider.Beta.String(), - }, - wantContains: []string{ - color("green", "All tests passed!"), - "View the [build log](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-123/artifacts/build-123/build-log/replaying_test.log)", - }, - }, - } - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - got, err := formatPostReplayEAP(tc.data) - if err != nil { - t.Fatalf("Failed to format comment: %v", err) - } - for _, wc := range tc.wantContains { - if !strings.Contains(got, wc) { - t.Errorf("formatPostReplayEAP() returned %q, which does not contain %q", got, wc) - } - } - }) - } -} diff --git a/.ci/magician/cmd/test_terraform_vcr.go b/.ci/magician/cmd/test_terraform_vcr.go index 1cca9513652e..3dd30d9a3d1d 100644 --- a/.ci/magician/cmd/test_terraform_vcr.go +++ b/.ci/magician/cmd/test_terraform_vcr.go @@ -21,8 +21,14 @@ import ( ) var ( - //go:embed templates/vcr/post_replay.tmpl - postReplayTmplText string + //go:embed templates/vcr/test_analytics.tmpl + testsAnalyticsTmplText string + //go:embed templates/vcr/non_exercised_tests.tmpl + nonExercisedTestsTmplText string + //go:embed templates/vcr/with_replay_failed_tests.tmpl + withReplayFailedTestsTmplText string + //go:embed templates/vcr/without_replay_failed_tests.tmpl + withoutReplayFailedTestsTmplText string //go:embed templates/vcr/record_replay.tmpl recordReplayTmplText string ) @@ -54,17 +60,27 @@ var ttvOptionalEnvironmentVariables = [...]string{ "GOOGLE_VMWAREENGINE_PROJECT", } -type postReplay struct { +type analytics struct { + ReplayingResult vcr.Result RunFullVCR bool AffectedServices []string - NotRunBetaTests []string - NotRunGATests []string - ReplayingResult vcr.Result - ReplayingErr error - LogBucket string - Version string - Head string - BuildID string +} + +type nonExercisedTests struct { + NotRunBetaTests []string + NotRunGATests []string +} + +type withReplayFailedTests struct { + ReplayingResult vcr.Result +} + +type withoutReplayFailedTests struct { + ReplayingErr error + LogBucket string + Version string + Head string + BuildID string } type recordReplay struct { @@ -77,8 +93,6 @@ type recordReplay struct { Version string Head string BuildID string - LogBaseUrl string - BrowseLogBaseUrl string } var testTerraformVCRCmd = &cobra.Command{ @@ -235,29 +249,41 @@ func execTestTerraformVCR(prNumber, mmCommitSha, buildID, projectID, buildStep, for s := range services { servicesArr = append(servicesArr, s) } - - notRunBeta, notRunGa := notRunTests(tpgRepo.UnifiedZeroDiff, tpgbRepo.UnifiedZeroDiff, replayingResult) - postReplayData := postReplay{ + analyticsData := analytics{ + ReplayingResult: replayingResult, RunFullVCR: runFullVCR, AffectedServices: sort.StringSlice(servicesArr), - NotRunBetaTests: notRunBeta, - NotRunGATests: notRunGa, - ReplayingResult: subtestResult(replayingResult), - ReplayingErr: replayingErr, - LogBucket: "ci-vcr-logs", - Version: provider.Beta.String(), - Head: newBranch, - BuildID: buildID, } - - comment, err := formatPostReplay(postReplayData) + testsAnalyticsComment, err := formatTestsAnalytics(analyticsData) if err != nil { - return fmt.Errorf("error formatting post replay comment: %w", err) + return fmt.Errorf("error formatting test_analytics comment: %w", err) + } + + notRunBeta, notRunGa := notRunTests(tpgRepo.UnifiedZeroDiff, tpgbRepo.UnifiedZeroDiff, replayingResult) + + nonExercisedTestsData := nonExercisedTests{ + NotRunBetaTests: notRunBeta, + NotRunGATests: notRunGa, } - if err := gh.PostComment(prNumber, comment); err != nil { - return fmt.Errorf("error posting comment: %w", err) + nonExercisedTestsComment, err := formatNonExercisedTests(nonExercisedTestsData) + if err != nil { + return fmt.Errorf("error formatting non exercised tests comment: %w", err) } + if len(replayingResult.FailedTests) > 0 { + withReplayFailedTestsData := withReplayFailedTests{ + ReplayingResult: replayingResult, + } + withReplayFailedTestsComment, err := formatWithReplayFailedTests(withReplayFailedTestsData) + if err != nil { + return fmt.Errorf("error formatting action taken comment: %w", err) + } + + comment := strings.Join([]string{testsAnalyticsComment, nonExercisedTestsComment, withReplayFailedTestsComment}, "\n") + if err := gh.PostComment(prNumber, comment); err != nil { + return fmt.Errorf("error posting comment: %w", err) + } + recordingResult, recordingErr := vt.RunParallel(vcr.RunOptions{ Mode: vcr.Recording, Version: provider.Beta, @@ -320,8 +346,8 @@ func execTestTerraformVCR(prNumber, mmCommitSha, buildID, projectID, buildStep, allRecordingPassed := len(recordingResult.FailedTests) == 0 && !hasTerminatedTests && recordingErr == nil recordReplayData := recordReplay{ - RecordingResult: subtestResult(recordingResult), - ReplayingAfterRecordingResult: subtestResult(replayingAfterRecordingResult), + RecordingResult: recordingResult, + ReplayingAfterRecordingResult: replayingAfterRecordingResult, RecordingErr: recordingErr, HasTerminatedTests: hasTerminatedTests, AllRecordingPassed: allRecordingPassed, @@ -337,6 +363,24 @@ func execTestTerraformVCR(prNumber, mmCommitSha, buildID, projectID, buildStep, if err := gh.PostComment(prNumber, recordReplayComment); err != nil { return fmt.Errorf("error posting comment: %w", err) } + + } else { // len(replayingResult.FailedTests) == 0 + withoutReplayFailedTestsData := withoutReplayFailedTests{ + ReplayingErr: replayingErr, + LogBucket: "ci-vcr-logs", + Version: provider.Beta.String(), + Head: newBranch, + BuildID: buildID, + } + withoutReplayFailedTestsComment, err := formatWithoutReplayFailedTests(withoutReplayFailedTestsData) + if err != nil { + return fmt.Errorf("error formatting action taken comment: %w", err) + } + + comment := strings.Join([]string{testsAnalyticsComment, nonExercisedTestsComment, withoutReplayFailedTestsComment}, "\n") + if err := gh.PostComment(prNumber, comment); err != nil { + return fmt.Errorf("error posting comment: %w", err) + } } if err := gh.PostBuildStatus(prNumber, "VCR-test", testState, buildStatusTargetURL, mmCommitSha); err != nil { @@ -388,43 +432,6 @@ func notRunTests(gaDiff, betaDiff string, result vcr.Result) ([]string, []string return notRunBeta, notRunGa } -func subtestResult(original vcr.Result) vcr.Result { - return vcr.Result{ - PassedTests: excludeCompoundTests(original.PassedTests, original.PassedSubtests), - FailedTests: excludeCompoundTests(original.FailedTests, original.FailedSubtests), - SkippedTests: excludeCompoundTests(original.SkippedTests, original.SkippedSubtests), - Panics: original.Panics, - } -} - -// Returns the name of the compound test that the given subtest belongs to. -func compoundTest(subtest string) string { - parts := strings.Split(subtest, "__") - if len(parts) != 2 { - return subtest - } - return parts[0] -} - -// Returns subtests and tests that are not compound tests. -func excludeCompoundTests(allTests, subtests []string) []string { - res := make([]string, 0, len(allTests)+len(subtests)) - compoundTests := make(map[string]struct{}, len(subtests)) - for _, subtest := range subtests { - if compound := compoundTest(subtest); compound != subtest { - compoundTests[compound] = struct{}{} - res = append(res, subtest) - } - } - for _, test := range allTests { - if _, ok := compoundTests[test]; !ok { - res = append(res, test) - } - } - sort.Strings(res) - return res -} - func modifiedPackages(changedFiles []string, version provider.Version) (map[string]struct{}, bool) { var goFiles []string for _, line := range changedFiles { @@ -507,10 +514,9 @@ func init() { func formatComment(fileName string, tmplText string, data any) (string, error) { funcs := template.FuncMap{ - "join": strings.Join, - "add": func(i, j int) int { return i + j }, - "color": color, - "compoundTest": compoundTest, + "join": strings.Join, + "add": func(i, j int) int { return i + j }, + "color": color, } tmpl, err := template.New(fileName).Funcs(funcs).Parse(tmplText) if err != nil { @@ -524,16 +530,22 @@ func formatComment(fileName string, tmplText string, data any) (string, error) { return strings.TrimSpace(sb.String()), nil } -func formatPostReplay(data postReplay) (string, error) { - return formatComment("post_replay.tmpl", postReplayTmplText, data) +func formatTestsAnalytics(data analytics) (string, error) { + return formatComment("test_analytics.tmpl", testsAnalyticsTmplText, data) +} + +func formatNonExercisedTests(data nonExercisedTests) (string, error) { + return formatComment("non_exercised_tests.tmpl", nonExercisedTestsTmplText, data) +} + +func formatWithReplayFailedTests(data withReplayFailedTests) (string, error) { + return formatComment("with_replay_failed_tests.tmpl", withReplayFailedTestsTmplText, data) +} + +func formatWithoutReplayFailedTests(data withoutReplayFailedTests) (string, error) { + return formatComment("without_replay_failed_tests.tmpl", withoutReplayFailedTestsTmplText, data) } func formatRecordReplay(data recordReplay) (string, error) { - logBasePath := fmt.Sprintf("%s/%s/refs/heads/%s/artifacts/%s", data.LogBucket, data.Version, data.Head, data.BuildID) - if data.BuildID == "" { - logBasePath = fmt.Sprintf("%s/%s/refs/heads/%s", data.LogBucket, data.Version, data.Head) - } - data.LogBaseUrl = fmt.Sprintf("https://storage.cloud.google.com/%s", logBasePath) - data.BrowseLogBaseUrl = fmt.Sprintf("https://console.cloud.google.com/storage/browser/%s", logBasePath) return formatComment("record_replay.tmpl", recordReplayTmplText, data) } diff --git a/.ci/magician/cmd/test_terraform_vcr_test.go b/.ci/magician/cmd/test_terraform_vcr_test.go index 835d9d302d32..cf7e4b019327 100644 --- a/.ci/magician/cmd/test_terraform_vcr_test.go +++ b/.ci/magician/cmd/test_terraform_vcr_test.go @@ -6,6 +6,7 @@ import ( "strings" "testing" + "github.com/google/go-cmp/cmp" "github.com/stretchr/testify/assert" "magician/provider" @@ -219,13 +220,13 @@ func TestNotRunTests(t *testing.T) { func TestAnalyticsComment(t *testing.T) { tests := []struct { - name string - data postReplay - wantContains []string + name string + data analytics + want string }{ { name: "run full vcr is false and no affected services", - data: postReplay{ + data: analytics{ ReplayingResult: vcr.Result{ PassedTests: []string{"a", "b", "c"}, SkippedTests: []string{"d", "e"}, @@ -234,26 +235,29 @@ func TestAnalyticsComment(t *testing.T) { RunFullVCR: false, AffectedServices: []string{}, }, - wantContains: []string{ - "#### Tests analytics", - "Total tests: 6", - "Passed tests: 3", - "Skipped tests: 2", - "Affected tests: 1", - "", - "
", - "Click here to see the affected service packages", - "
", - "", - "None", - "", - "
", - "
", - }, + want: strings.Join( + []string{ + "#### Tests analytics", + "Total tests: 6", + "Passed tests: 3", + "Skipped tests: 2", + "Affected tests: 1", + "", + "
", + "Click here to see the affected service packages", + "
", + "", + "None", + "", + "
", + "
", + }, + "\n", + ), }, { name: "run full vcr is false and has affected services", - data: postReplay{ + data: analytics{ ReplayingResult: vcr.Result{ PassedTests: []string{"a", "b", "c"}, SkippedTests: []string{"d", "e"}, @@ -262,30 +266,33 @@ func TestAnalyticsComment(t *testing.T) { RunFullVCR: false, AffectedServices: []string{"svc-a", "svc-b"}, }, - wantContains: []string{ - "#### Tests analytics", - "Total tests: 6", - "Passed tests: 3", - "Skipped tests: 2", - "Affected tests: 1", - "", - "
", - "Click here to see the affected service packages", - "
", - "", - "
    ", - "
  • svc-a
  • ", - "
  • svc-b
  • ", - "", - "
", - "", - "
", - "
", - }, + want: strings.Join( + []string{ + "#### Tests analytics", + "Total tests: 6", + "Passed tests: 3", + "Skipped tests: 2", + "Affected tests: 1", + "", + "
", + "Click here to see the affected service packages", + "
", + "", + "
    ", + "
  • svc-a
  • ", + "
  • svc-b
  • ", + "", + "
", + "", + "
", + "
", + }, + "\n", + ), }, { name: "run full vcr is true", - data: postReplay{ + data: analytics{ ReplayingResult: vcr.Result{ PassedTests: []string{"a", "b", "c"}, SkippedTests: []string{"d", "e"}, @@ -294,7 +301,7 @@ func TestAnalyticsComment(t *testing.T) { RunFullVCR: true, AffectedServices: []string{}, }, - wantContains: []string{ + want: strings.Join([]string{ "#### Tests analytics", "Total tests: 6", "Passed tests: 3", @@ -310,18 +317,18 @@ func TestAnalyticsComment(t *testing.T) { "", "", }, + "\n", + ), }, } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { - got, err := formatPostReplay(tc.data) + got, err := formatTestsAnalytics(tc.data) if err != nil { t.Fatalf("Failed to format comment: %v", err) } - for _, wc := range tc.wantContains { - if !strings.Contains(got, wc) { - t.Errorf("formatPostReplay() returned %q, which does not contain %q", got, wc) - } + if diff := cmp.Diff(tc.want, got); diff != "" { + t.Errorf("formatTestsAnalytics() returned unexpected difference (-want +got):\n%s", diff) } }) } @@ -329,69 +336,84 @@ func TestAnalyticsComment(t *testing.T) { func TestNonExercisedTestsComment(t *testing.T) { tests := []struct { - name string - data postReplay - wantContains []string + name string + data nonExercisedTests + want string }{ + { + name: "without non exercised tests", + data: nonExercisedTests{}, + want: strings.Join( + []string{}, + "\n", + ), + }, { name: "with not run beta tests", - data: postReplay{ + data: nonExercisedTests{ NotRunBetaTests: []string{"beta-1", "beta-2"}, }, - wantContains: []string{ - "#### Non-exercised tests", - "", - color("red", "Tests were added that are skipped in VCR:"), - "- beta-1", - "- beta-2", - }, + want: strings.Join( + []string{ + "#### Non-exercised tests", + "", + color("red", "Tests were added that are skipped in VCR:"), + "- beta-1", + "- beta-2", + }, + "\n", + ), }, { name: "with not run ga tests", - data: postReplay{ + data: nonExercisedTests{ NotRunGATests: []string{"ga-1", "ga-2"}, }, - wantContains: []string{ - "#### Non-exercised tests", - "", - "", - "", - color("red", "Tests were added that are GA-only additions and require manual runs:"), - "- ga-1", - "- ga-2", - }, + want: strings.Join( + []string{ + "#### Non-exercised tests", + "", + "", + "", + color("red", "Tests were added that are GA-only additions and require manual runs:"), + "- ga-1", + "- ga-2", + }, + "\n", + ), }, { name: "with not run ga tests and not run beta tests", - data: postReplay{ + data: nonExercisedTests{ NotRunGATests: []string{"ga-1", "ga-2"}, NotRunBetaTests: []string{"beta-1", "beta-2"}, }, - wantContains: []string{ - "#### Non-exercised tests", - "", - color("red", "Tests were added that are skipped in VCR:"), - "- beta-1", - "- beta-2", - "", - "", - "", - color("red", "Tests were added that are GA-only additions and require manual runs:"), - "- ga-1", - "- ga-2", - }, + want: strings.Join( + []string{ + "#### Non-exercised tests", + "", + color("red", "Tests were added that are skipped in VCR:"), + "- beta-1", + "- beta-2", + "", + "", + "", + color("red", "Tests were added that are GA-only additions and require manual runs:"), + "- ga-1", + "- ga-2", + }, + "\n", + ), }, } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { - got, err := formatPostReplay(tc.data) + got, err := formatNonExercisedTests(tc.data) if err != nil { t.Fatalf("Failed to format comment: %v", err) } - for _, wc := range tc.wantContains { - if !strings.Contains(got, wc) { - t.Errorf("formatPostReplay() returned %q, which does not contain %q", got, wc) - } + if diff := cmp.Diff(tc.want, got); diff != "" { + t.Errorf("formatNonExercisedTests() returned unexpected difference (-want +got):\n%s", diff) } }) } @@ -399,45 +421,46 @@ func TestNonExercisedTestsComment(t *testing.T) { func TestWithReplayFailedTests(t *testing.T) { tests := []struct { - name string - data postReplay - wantContains []string + name string + data withReplayFailedTests + want string }{ { name: "with failed tests", - data: postReplay{ + data: withReplayFailedTests{ ReplayingResult: vcr.Result{ FailedTests: []string{"a", "b"}, }, }, - wantContains: []string{ - "#### Action taken", - "
", - "Found 2 affected test(s) by replaying old test recordings. Starting RECORDING based on the most recent commit. Click here to see the affected tests", - "", - "
", - "
    ", - "
  • a
  • ", - "
  • b
  • ", - "", // Empty line - "
", - "
", - "
", - "", - "[Get to know how VCR tests work](https://googlecloudplatform.github.io/magic-modules/develop/test/test/)", - }, + want: strings.Join( + []string{ + "#### Action taken", + "
", + "Found 2 affected test(s) by replaying old test recordings. Starting RECORDING based on the most recent commit. Click here to see the affected tests", + "", + "
", + "
    ", + "
  • a
  • ", + "
  • b
  • ", + "", // Empty line + "
", + "
", + "
", + "", + "[Get to know how VCR tests work](https://googlecloudplatform.github.io/magic-modules/develop/test/test/)", + }, + "\n", + ), }, } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { - got, err := formatPostReplay(tc.data) + got, err := formatWithReplayFailedTests(tc.data) if err != nil { t.Fatalf("Failed to format comment: %v", err) } - for _, wc := range tc.wantContains { - if !strings.Contains(got, wc) { - t.Errorf("formatPostReplay() returned %q, which does not contain %q", got, wc) - } + if diff := cmp.Diff(tc.want, got); diff != "" { + t.Errorf("formatWithReplayFailedTests() returned unexpected difference (-want +got):\n%s", diff) } }) } @@ -446,12 +469,12 @@ func TestWithReplayFailedTests(t *testing.T) { func TestWithoutReplayFailedTests(t *testing.T) { tests := []struct { name string - data postReplay + data withoutReplayFailedTests wantContains []string }{ { name: "with replay error", - data: postReplay{ + data: withoutReplayFailedTests{ ReplayingErr: fmt.Errorf("some error"), BuildID: "build-123", Head: "auto-pr-123", @@ -465,7 +488,7 @@ func TestWithoutReplayFailedTests(t *testing.T) { }, { name: "without replay error", - data: postReplay{ + data: withoutReplayFailedTests{ BuildID: "build-123", Head: "auto-pr-123", LogBucket: "ci-vcr-logs", @@ -479,13 +502,13 @@ func TestWithoutReplayFailedTests(t *testing.T) { } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { - got, err := formatPostReplay(tc.data) + got, err := formatWithoutReplayFailedTests(tc.data) if err != nil { t.Fatalf("Failed to format comment: %v", err) } for _, wc := range tc.wantContains { if !strings.Contains(got, wc) { - t.Errorf("formatPostReplay() returned %q, which does not contain %q", got, wc) + t.Errorf("formatWithoutReplayFailedTests() returned %q, which does not contain %q", got, wc) } } }) diff --git a/.ci/magician/cmd/vcr_cassette_update_test.go b/.ci/magician/cmd/vcr_cassette_update_test.go index e06fa239ed95..0d391e0f5d45 100644 --- a/.ci/magician/cmd/vcr_cassette_update_test.go +++ b/.ci/magician/cmd/vcr_cassette_update_test.go @@ -325,7 +325,6 @@ func TestExecVCRCassetteUpdate(t *testing.T) { "SA_KEY": "sa_key", "TF_ACC": "1", "TF_LOG": "DEBUG", - "TF_LOG_CORE": "WARN", "TF_LOG_PATH_MASK": "/mock/dir/magic-modules/.ci/magician/testlogs/replaying/beta/%s.log", "TF_LOG_SDK_FRAMEWORK": "INFO", "TF_SCHEMA_PANIC_ON_ERROR": "1", @@ -340,8 +339,8 @@ func TestExecVCRCassetteUpdate(t *testing.T) { { name: "replay failed then record", cmdResults: map[string]string{ - "gopath/src/github.com/hashicorp/terraform-provider-google-beta go [test -parallel 32 -v -run=TestAcc -timeout 240m -ldflags=-X=github.com/hashicorp/terraform-provider-google-beta/version.ProviderVersion=acc -vet=off] map[ACCTEST_PARALLELISM:32 GOOGLE_APPLICATION_CREDENTIALS:/mock/dir/magic-modules/.ci/magician/sa_key.json GOOGLE_CREDENTIALS:sa_key GOOGLE_TEST_DIRECTORY: SA_KEY:sa_key TF_ACC:1 TF_LOG:DEBUG TF_LOG_CORE:WARN TF_LOG_PATH_MASK:/mock/dir/magic-modules/.ci/magician/testlogs/replaying/beta/%s.log TF_LOG_SDK_FRAMEWORK:INFO TF_SCHEMA_PANIC_ON_ERROR:1 VCR_MODE:REPLAYING VCR_PATH:/mock/dir/magic-modules/.ci/magician/cassettes/beta]": "--- FAIL: TestAccContainerNodePool_defaultDriverInstallation (590.29s)", - "gopath/src/github.com/hashicorp/terraform-provider-google-beta go [test -parallel 1 -v -run=TestAccContainerNodePool_defaultDriverInstallation$ -timeout 240m -ldflags=-X=github.com/hashicorp/terraform-provider-google-beta/version.ProviderVersion=acc -vet=off] map[ACCTEST_PARALLELISM:1 GOOGLE_APPLICATION_CREDENTIALS:/mock/dir/magic-modules/.ci/magician/sa_key.json GOOGLE_CREDENTIALS:sa_key GOOGLE_TEST_DIRECTORY: SA_KEY:sa_key TF_ACC:1 TF_LOG:DEBUG TF_LOG_CORE:WARN TF_LOG_PATH_MASK:/mock/dir/magic-modules/.ci/magician/testlogs/recording/beta/%s.log TF_LOG_SDK_FRAMEWORK:INFO TF_SCHEMA_PANIC_ON_ERROR:1 VCR_MODE:RECORDING VCR_PATH:/mock/dir/magic-modules/.ci/magician/cassettes/beta]": "--- PASS: TestAccContainerNodePool_defaultDriverInstallation (590.29s)", + "gopath/src/github.com/hashicorp/terraform-provider-google-beta go [test -parallel 32 -v -run=TestAcc -timeout 240m -ldflags=-X=github.com/hashicorp/terraform-provider-google-beta/version.ProviderVersion=acc -vet=off] map[ACCTEST_PARALLELISM:32 GOOGLE_APPLICATION_CREDENTIALS:/mock/dir/magic-modules/.ci/magician/sa_key.json GOOGLE_CREDENTIALS:sa_key GOOGLE_TEST_DIRECTORY: SA_KEY:sa_key TF_ACC:1 TF_LOG:DEBUG TF_LOG_PATH_MASK:/mock/dir/magic-modules/.ci/magician/testlogs/replaying/beta/%s.log TF_LOG_SDK_FRAMEWORK:INFO TF_SCHEMA_PANIC_ON_ERROR:1 VCR_MODE:REPLAYING VCR_PATH:/mock/dir/magic-modules/.ci/magician/cassettes/beta]": "--- FAIL: TestAccContainerNodePool_defaultDriverInstallation (590.29s)", + "gopath/src/github.com/hashicorp/terraform-provider-google-beta go [test -parallel 1 -v -run=TestAccContainerNodePool_defaultDriverInstallation$ -timeout 240m -ldflags=-X=github.com/hashicorp/terraform-provider-google-beta/version.ProviderVersion=acc -vet=off] map[ACCTEST_PARALLELISM:1 GOOGLE_APPLICATION_CREDENTIALS:/mock/dir/magic-modules/.ci/magician/sa_key.json GOOGLE_CREDENTIALS:sa_key GOOGLE_TEST_DIRECTORY: SA_KEY:sa_key TF_ACC:1 TF_LOG:DEBUG TF_LOG_PATH_MASK:/mock/dir/magic-modules/.ci/magician/testlogs/recording/beta/%s.log TF_LOG_SDK_FRAMEWORK:INFO TF_SCHEMA_PANIC_ON_ERROR:1 VCR_MODE:RECORDING VCR_PATH:/mock/dir/magic-modules/.ci/magician/cassettes/beta]": "--- PASS: TestAccContainerNodePool_defaultDriverInstallation (590.29s)", }, expectedCalls: map[string][]ParameterList{ "Run": { @@ -358,7 +357,6 @@ func TestExecVCRCassetteUpdate(t *testing.T) { "SA_KEY": "sa_key", "TF_ACC": "1", "TF_LOG": "DEBUG", - "TF_LOG_CORE": "WARN", "TF_LOG_PATH_MASK": "/mock/dir/magic-modules/.ci/magician/testlogs/replaying/beta/%s.log", "TF_LOG_SDK_FRAMEWORK": "INFO", "TF_SCHEMA_PANIC_ON_ERROR": "1", @@ -377,7 +375,6 @@ func TestExecVCRCassetteUpdate(t *testing.T) { "SA_KEY": "sa_key", "TF_ACC": "1", "TF_LOG": "DEBUG", - "TF_LOG_CORE": "WARN", "TF_LOG_PATH_MASK": "/mock/dir/magic-modules/.ci/magician/testlogs/recording/beta/%s.log", "TF_LOG_SDK_FRAMEWORK": "INFO", "TF_SCHEMA_PANIC_ON_ERROR": "1", diff --git a/.ci/magician/cmd/vcr_merge_eap.go b/.ci/magician/cmd/vcr_merge_eap.go index 2c6d68436454..5797f0aaf52d 100644 --- a/.ci/magician/cmd/vcr_merge_eap.go +++ b/.ci/magician/cmd/vcr_merge_eap.go @@ -3,6 +3,7 @@ package cmd import ( "fmt" "magician/exec" + "magician/github" "magician/source" "os" @@ -25,6 +26,11 @@ var vcrMergeEapCmd = &cobra.Command{ clNumber := args[0] fmt.Println("CL number:", clNumber) + githubToken, ok := os.LookupEnv("GITHUB_TOKEN_CLASSIC") + if !ok { + return fmt.Errorf("did not provide GITHUB_TOKEN_CLASSIC environment variable") + } + baseBranch := os.Getenv("BASE_BRANCH") if baseBranch == "" { return fmt.Errorf("environment variable BASE_BRANCH is empty") @@ -35,11 +41,12 @@ var vcrMergeEapCmd = &cobra.Command{ return fmt.Errorf("error creating Runner: %w", err) } - return execVCRMergeEAP(clNumber, baseBranch, rnr) + gh := github.NewClient(githubToken) + return execVCRMergeEAP(gh, clNumber, baseBranch, rnr) }, } -func execVCRMergeEAP(clNumber, baseBranch string, runner source.Runner) error { +func execVCRMergeEAP(gh GithubClient, clNumber, baseBranch string, runner source.Runner) error { head := "auto-cl-" + clNumber mergeCassettes("gs://ci-vcr-cassettes/private", baseBranch, fmt.Sprintf("refs/heads/%s", head), runner) return nil diff --git a/.ci/magician/exec/runner.go b/.ci/magician/exec/runner.go index dd79898d46f0..2de978885e98 100644 --- a/.ci/magician/exec/runner.go +++ b/.ci/magician/exec/runner.go @@ -131,9 +131,6 @@ func (ar *Runner) Run(name string, args []string, env map[string]string) (string return "", fmt.Errorf("path error running %s: %v", name, typedErr) } - if err != nil { - return "", fmt.Errorf("error running %q: %v", name, err) - } return string(out), nil } diff --git a/.ci/magician/github/README.md b/.ci/magician/github/README.md deleted file mode 100644 index 987b87408af2..000000000000 --- a/.ci/magician/github/README.md +++ /dev/null @@ -1,24 +0,0 @@ -# GitHub Integration Tests - -## Overview -This directory contains an interface for the GitHub client that make real API calls to GitHub's API. The tests in `integration_test.go` are isolated with build tags to prevent accidental execution. - -## Build Tags -This file uses Go build tags (`//go:build integration`) which: -- Exclude these tests from normal test execution (`go test ./...`) -- Require explicit opt-in (`go test -tags=integration`) -- Prevent accidental execution of tests that make real API calls and may have side effects - -## Usage - -### Requirements -- GitHub API token with appropriate permissions -- Token set as environment variable: `GITHUB_API_TOKEN` - -### Running Tests -```bash -# Run all integration tests -GITHUB_API_TOKEN=your_token_here go test -v -tags=integration ./github - -# Run specific test -GITHUB_API_TOKEN=your_token_here go test -v -tags=integration -run TestIntegrationGetPullRequest ./github \ No newline at end of file diff --git a/.ci/magician/github/REVIEWER_ASSIGNMENT_COMMENT.md b/.ci/magician/github/REVIEWER_ASSIGNMENT_COMMENT.md index 6954d07d906f..784465ddba9a 100644 --- a/.ci/magician/github/REVIEWER_ASSIGNMENT_COMMENT.md +++ b/.ci/magician/github/REVIEWER_ASSIGNMENT_COMMENT.md @@ -1,6 +1,4 @@ -Hello! I am a robot. Tests will require approval from a repository maintainer to run. - -**Googlers:** For automatic test runs see go/terraform-auto-test-runs. +Hello! I am a robot. Tests will require approval from a repository maintainer to run. Googlers: see go/terraform-auto-test-runs to set up automatic test runs. @{{.reviewer}}, a repository maintainer, has been assigned to [review your changes](https://googlecloudplatform.github.io/magic-modules/contribute/review-pr/). If you have not received review feedback within 2 business days, please leave a comment on this PR asking them to take a look. diff --git a/.ci/magician/github/get.go b/.ci/magician/github/get.go index 8df94827a5f9..50fe8ce89b64 100644 --- a/.ci/magician/github/get.go +++ b/.ci/magician/github/get.go @@ -17,18 +17,10 @@ package github import ( "fmt" - "strconv" + utils "magician/utility" "time" - - gh "github.com/google/go-github/v68/github" -) - -const ( - defaultOwner = "GoogleCloudPlatform" - defaultRepo = "magic-modules" ) -// Types for external interface compatibility type User struct { Login string `json:"login"` } @@ -55,179 +47,101 @@ type PullRequestComment struct { CreatedAt time.Time `json:"created_at"` } -// GetPullRequest fetches a single pull request -func (c *Client) GetPullRequest(prNumber string) (PullRequest, error) { - num, err := strconv.Atoi(prNumber) - if err != nil { - return PullRequest{}, err - } +func (gh *Client) GetPullRequest(prNumber string) (PullRequest, error) { + url := fmt.Sprintf("https://api.github.com/repos/GoogleCloudPlatform/magic-modules/issues/%s", prNumber) - pr, _, err := c.gh.PullRequests.Get(c.ctx, defaultOwner, defaultRepo, num) - if err != nil { - return PullRequest{}, err - } + var pullRequest PullRequest - return convertGHPullRequest(pr), nil + err := utils.RequestCallWithRetry(url, "GET", gh.token, &pullRequest, nil) + + return pullRequest, err } -// GetPullRequests fetches multiple pull requests -func (c *Client) GetPullRequests(state, base, sort, direction string) ([]PullRequest, error) { - opts := &gh.PullRequestListOptions{ - State: state, - Base: base, - Sort: sort, - Direction: direction, - } +func (gh *Client) GetPullRequests(state, base, sort, direction string) ([]PullRequest, error) { + url := fmt.Sprintf("https://api.github.com/repos/GoogleCloudPlatform/magic-modules/pulls?state=%s&base=%s&sort=%s&direction=%s", state, base, sort, direction) - prs, _, err := c.gh.PullRequests.List(c.ctx, defaultOwner, defaultRepo, opts) - if err != nil { - return nil, err - } + var pullRequests []PullRequest - result := make([]PullRequest, len(prs)) - for i, pr := range prs { - result[i] = convertGHPullRequest(pr) - } + err := utils.RequestCallWithRetry(url, "GET", gh.token, &pullRequests, nil) - return result, nil + return pullRequests, err } -// GetPullRequestRequestedReviewers gets requested reviewers for a PR -func (c *Client) GetPullRequestRequestedReviewers(prNumber string) ([]User, error) { - num, err := strconv.Atoi(prNumber) - if err != nil { - return nil, err +func (gh *Client) GetPullRequestRequestedReviewers(prNumber string) ([]User, error) { + url := fmt.Sprintf("https://api.github.com/repos/GoogleCloudPlatform/magic-modules/pulls/%s/requested_reviewers", prNumber) + + var requestedReviewers struct { + Users []User `json:"users"` } - reviewers, _, err := c.gh.PullRequests.ListReviewers(c.ctx, defaultOwner, defaultRepo, num, nil) + err := utils.RequestCallWithRetry(url, "GET", gh.token, &requestedReviewers, nil) if err != nil { return nil, err } - return convertGHUsers(reviewers.Users), nil + return requestedReviewers.Users, nil } -// GetPullRequestPreviousReviewers gets previous reviewers for a PR -func (c *Client) GetPullRequestPreviousReviewers(prNumber string) ([]User, error) { - num, err := strconv.Atoi(prNumber) - if err != nil { - return nil, err +func (gh *Client) GetPullRequestPreviousReviewers(prNumber string) ([]User, error) { + url := fmt.Sprintf("https://api.github.com/repos/GoogleCloudPlatform/magic-modules/pulls/%s/reviews", prNumber) + + var reviews []struct { + User User `json:"user"` } - reviews, _, err := c.gh.PullRequests.ListReviews(c.ctx, defaultOwner, defaultRepo, num, nil) + + err := utils.RequestCallWithRetry(url, "GET", gh.token, &reviews, nil) if err != nil { return nil, err } - // Use a map to deduplicate reviewers - reviewerMap := make(map[string]*gh.User) - + previousAssignedReviewers := map[string]User{} for _, review := range reviews { - if review.User != nil && review.User.Login != nil { - login := review.User.GetLogin() - reviewerMap[login] = review.User - } + previousAssignedReviewers[review.User.Login] = review.User } - // Convert map to slice - reviewers := make([]*gh.User, 0, len(reviewerMap)) - for _, user := range reviewerMap { - reviewers = append(reviewers, user) + result := []User{} + for _, user := range previousAssignedReviewers { + result = append(result, user) } - return convertGHUsers(reviewers), nil + return result, nil } -// GetCommitMessage gets a commit message -func (c *Client) GetCommitMessage(owner, repo, sha string) (string, error) { - commit, _, err := c.gh.Repositories.GetCommit(c.ctx, owner, repo, sha, nil) - if err != nil { - return "", err - } +func (gh *Client) GetCommitMessage(owner, repo, sha string) (string, error) { + url := fmt.Sprintf("https://api.github.com/repos/%s/%s/commits/%s", owner, repo, sha) - if commit.Commit != nil && commit.Commit.Message != nil { - return *commit.Commit.Message, nil + var commit struct { + Commit struct { + Message string `json:"message"` + } `json:"commit"` } - return "", fmt.Errorf("no commit message found") -} - -// GetPullRequestComments gets all comments on a PR, handling pagination -func (c *Client) GetPullRequestComments(prNumber string) ([]PullRequestComment, error) { - num, err := strconv.Atoi(prNumber) + err := utils.RequestCall(url, "GET", gh.token, &commit, nil) if err != nil { - return nil, err - } - - var allComments []*gh.IssueComment - opts := &gh.IssueListCommentsOptions{ - ListOptions: gh.ListOptions{ - PerPage: 100, - }, - } - - for { - comments, resp, err := c.gh.Issues.ListComments(c.ctx, defaultOwner, defaultRepo, num, opts) - if err != nil { - return nil, err - } - - allComments = append(allComments, comments...) - - if resp.NextPage == 0 { - break // No more pages - } - - // Set up for the next page - opts.Page = resp.NextPage + return "", err } - return convertGHComments(allComments), nil + return commit.Commit.Message, nil } -// GetTeamMembers gets all members of a team, handling pagination -func (c *Client) GetTeamMembers(organization, team string) ([]User, error) { - var allMembers []*gh.User - opts := &gh.TeamListTeamMembersOptions{ - ListOptions: gh.ListOptions{ - PerPage: 100, - }, - } +func (gh *Client) GetPullRequestComments(prNumber string) ([]PullRequestComment, error) { + url := fmt.Sprintf("https://api.github.com/repos/GoogleCloudPlatform/magic-modules/issues/%s/comments", prNumber) - for { - members, resp, err := c.gh.Teams.ListTeamMembersBySlug(c.ctx, organization, team, opts) - if err != nil { - return nil, err - } - - allMembers = append(allMembers, members...) - - if resp.NextPage == 0 { - break // No more pages - } - - // Set up for the next page - opts.Page = resp.NextPage - } - - return convertGHUsers(allMembers), nil -} - -// IsOrgMember checks if a user is a member of an organization -func (c *Client) IsOrgMember(username, org string) bool { - isMember, _, err := c.gh.Organizations.IsMember(c.ctx, org, username) + var comments []PullRequestComment + err := utils.RequestCallWithRetry(url, "GET", gh.token, &comments, nil) if err != nil { - return false + return nil, err } - - return isMember + return comments, nil } -// IsTeamMember checks if a user is a member of a team -func (c *Client) IsTeamMember(organization, teamSlug, username string) bool { - membership, _, err := c.gh.Teams.GetTeamMembershipBySlug(c.ctx, organization, teamSlug, username) +func (gh *Client) GetTeamMembers(organization, team string) ([]User, error) { + url := fmt.Sprintf("https://api.github.com/orgs/%s/teams/%s/members", organization, team) + + var members []User + err := utils.RequestCallWithRetry(url, "GET", gh.token, &members, nil) if err != nil { - return false + return nil, err } - - return membership != nil && membership.State != nil && *membership.State == "active" + return members, nil } diff --git a/.ci/magician/github/init.go b/.ci/magician/github/init.go index c09217803075..d64995aa46b7 100644 --- a/.ci/magician/github/init.go +++ b/.ci/magician/github/init.go @@ -15,75 +15,13 @@ */ package github -import ( - "bytes" - "context" - "io" - "net/http" - - utils "magician/utility" - - gh "github.com/google/go-github/v68/github" -) - // Client for GitHub interactions. type Client struct { token string - gh *gh.Client - ctx context.Context -} - -// retryTransport is a custom RoundTripper that adds retry and logging -type retryTransport struct { - underlyingTransport http.RoundTripper - token string } func NewClient(token string) *Client { - ctx := context.Background() - - // Create a custom transport with retry logic - rt := &retryTransport{ - underlyingTransport: http.DefaultTransport, - token: token, - } - - // Use this custom transport with OAuth2 - tc := &http.Client{Transport: rt} - - // Create the GitHub client with our custom transport - ghClient := gh.NewClient(tc) - return &Client{ - gh: ghClient, token: token, - ctx: ctx, - } -} - -// RoundTrip implements the http.RoundTripper interface -func (rt *retryTransport) RoundTrip(req *http.Request) (*http.Response, error) { - // Extract information from the request - method := req.Method - urlStr := req.URL.String() - - // Read and log the request body if present - var bodyBytes []byte - if req.Body != nil { - bodyBytes, _ = io.ReadAll(req.Body) - req.Body.Close() - req.Body = io.NopCloser(bytes.NewReader(bodyBytes)) } - - resp, respBody, err := utils.RequestCallWithRetryRaw(urlStr, method, rt.token, bodyBytes) - if err != nil { - return nil, err - } - - // Replace the response body with our captured body - resp.Body.Close() // Close the original body - resp.Body = io.NopCloser(bytes.NewReader(respBody)) - resp.ContentLength = int64(len(respBody)) - - return resp, nil } diff --git a/.ci/magician/github/integration_test.go b/.ci/magician/github/integration_test.go deleted file mode 100644 index 64ed13e0ba64..000000000000 --- a/.ci/magician/github/integration_test.go +++ /dev/null @@ -1,355 +0,0 @@ -//go:build integration -// +build integration - -/* -* Copyright 2025 Google LLC. All Rights Reserved. -* -* Licensed under the Apache License, Version 2.0 (the "License"); -* you may not use this file except in compliance with the License. -* You may obtain a copy of the License at -* -* http://www.apache.org/licenses/LICENSE-2.0 -* -* Unless required by applicable law or agreed to in writing, software -* distributed under the License is distributed on an "AS IS" BASIS, -* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -* See the License for the specific language governing permissions and -* limitations under the License. -* -* Integration tests - makes real GitHub API calls. -* NOT run during normal test execution (go test). -* Requires: -* - GITHUB_API_TOKEN environment variable -* - Run with: go test -tags=integration - */ - -package github - -import ( - "fmt" - "os" - "testing" - "time" -) - -// https://github.com/GoogleCloudPlatform/magic-modules -const ( - testNonMember = "bananaman5000" - testRepo = "magic-modules" - testOwner = "GoogleCloudPlatform" - testReviewer = "melinath" - testPRNumber = "14031" - testOldPRNumber = "13969" // replace this with an actual PR Number - testPRCommitSha = "4a8409686810551655eea2533e939cc5344e83e2" // replace this with an actual SHA - testMainCommitSha = "fd910977cf24595d2c04e3f0a369a82c79fdb8f8" // replace this with an actual SHA - testLabel = "terraform-3.0" - testOrg = "GoogleCloudPlatform" - testTeam = "terraform" - workflowFileName = "test-tpg.yml" -) - -func skipIfNoToken(t *testing.T) *Client { - token := os.Getenv("GITHUB_API_TOKEN") - if token == "" { - t.Skip("Skipping integration test: GITHUB_API_TOKEN environment variable not set") - } - - return NewClient(token) -} - -func TestIntegrationGetPullRequest(t *testing.T) { - client := skipIfNoToken(t) - - pr, err := client.GetPullRequest(testPRNumber) - if err != nil { - t.Fatalf("GetPullRequest failed: %v", err) - } - - t.Logf("PR Title: %s", pr.Title) - if pr.Number == 0 { - t.Error("Expected PR number to be non-zero") - } - if pr.Title == "" { - t.Error("Expected PR title to be non-empty") - } -} - -func TestIntegrationGetPullRequests(t *testing.T) { - client := skipIfNoToken(t) - - prs, err := client.GetPullRequests("open", "main", "created", "desc") - if err != nil { - t.Fatalf("GetPullRequests failed: %v", err) - } - - t.Logf("Found %d PRs", len(prs)) -} - -func TestIntegrationGetCommitMessage(t *testing.T) { - client := skipIfNoToken(t) - - // You'll need a valid commit SHA for this test - if testMainCommitSha == "HEAD" { - t.Skip("Skipping GetCommitMessage test: need a valid commit SHA") - } - - message, err := client.GetCommitMessage(testOwner, testRepo, testMainCommitSha) - if err != nil { - t.Fatalf("GetCommitMessage failed: %v", err) - } - - t.Logf("Commit message: %s", message) - if message == "" { - t.Error("Expected commit message to be non-empty") - } -} - -func TestIntegrationGetPullRequestComments(t *testing.T) { - client := skipIfNoToken(t) - - comments, err := client.GetPullRequestComments(testPRNumber) - if err != nil { - t.Fatalf("GetPullRequestComments failed: %v", err) - } - - t.Logf("Found %d comments", len(comments)) - for i, comment := range comments { - t.Logf("Comment %d: %s by %s", i+1, comment.Body[:min(len(comment.Body), 50)], comment.User.Login) - } -} - -func TestIntegrationGetTeamMembers(t *testing.T) { - client := skipIfNoToken(t) - - members, err := client.GetTeamMembers(testOrg, testTeam) - if err != nil { - t.Logf("GetTeamMembers failed: %v", err) - t.Skip("Skipping team member test - might not have access to the specified team") - } - - t.Logf("Found %d team members", len(members)) - for i, member := range members { - t.Logf("Member %d: %s", i+1, member.Login) - } -} - -func TestIntegrationIsOrgMember(t *testing.T) { - client := skipIfNoToken(t) - - isMember := client.IsOrgMember(testOwner, testOrg) - t.Logf("Is %s a member of %s: %v", testOwner, testOrg, isMember) - - if !isMember { - t.Errorf("Note: Expected %s to be a member of %s, but they're not", testOwner, testOrg) - } - - isMember = client.IsOrgMember(testNonMember, testOrg) - if isMember { - t.Errorf("Expected %s to not be a member of %s, but they are", testNonMember, testOrg) - } else { - t.Logf("Is %s not a member of %s: %v", testNonMember, testOrg, isMember) - } -} - -func TestIntegrationIsTeamMember(t *testing.T) { - client := skipIfNoToken(t) - - isMember := client.IsTeamMember(testOrg, testTeam, testOwner) - if !isMember { - t.Errorf("Expected %s to be a member of team %s in org %s, but they're not", testOwner, testTeam, testOrg) - } else { - t.Logf("Is %s a member of team %s in org %s: %v", testOwner, testTeam, testOrg, isMember) - } - - isMember = client.IsTeamMember(testOrg, testTeam, testNonMember) - if isMember { - t.Errorf("Expected %s to not be a member of team %s in org %s, but they are", testNonMember, testTeam, testOrg) - } else { - t.Logf("Is %s not a member of team %s in org %s: %v", testNonMember, testTeam, testOrg, isMember) - } -} - -func TestIntegrationPostAndUpdateComment(t *testing.T) { - client := skipIfNoToken(t) - - // First post a comment - comment := fmt.Sprintf("Test comment from integration test at %s", time.Now().Format(time.RFC3339)) - err := client.PostComment(testPRNumber, comment) - if err != nil { - t.Fatalf("PostComment failed: %v", err) - } - - // Get the comment ID - comments, err := client.GetPullRequestComments(testPRNumber) - if err != nil { - t.Fatalf("GetPullRequestComments failed: %v", err) - } - - var commentID int - for _, c := range comments { - if c.Body == comment { - commentID = c.ID - break - } - } - - if commentID == 0 { - t.Fatal("Could not find the comment we just posted") - } - - // Update the comment - updatedComment := fmt.Sprintf("Updated test comment from integration test at %s", time.Now().Format(time.RFC3339)) - err = client.UpdateComment(testPRNumber, updatedComment, commentID) - if err != nil { - t.Fatalf("UpdateComment failed: %v", err) - } - - t.Logf("Successfully posted and updated comment with ID: %d", commentID) -} - -func TestIntegrationAddAndRemoveLabels(t *testing.T) { - client := skipIfNoToken(t) - - err := client.AddLabels(testPRNumber, []string{testLabel}) - if err != nil { - t.Fatalf("AddLabels failed: %v", err) - } - - // Then remove the label - err = client.RemoveLabel(testPRNumber, testLabel) - if err != nil { - t.Fatalf("RemoveLabel failed: %v", err) - } - - t.Logf("Successfully added and removed label: %s", testLabel) -} - -func TestIntegrationPostBuildStatus(t *testing.T) { - client := skipIfNoToken(t) - - // You'll need a valid commit SHA for this test - if testPRCommitSha == "HEAD" { - t.Skip("Skipping PostBuildStatus test: need a valid commit SHA") - } - - err := client.PostBuildStatus( - testPRNumber, - "integration-test", - "success", - "https://example.com/integration-test", - testPRCommitSha, - ) - if err != nil { - t.Errorf("PostBuildStatus failed: %v", err) - } - - err = client.PostBuildStatus( - testPRNumber, - "integration-test-failed", - "failure", - "https://example.com/integration-test-fail", - testPRCommitSha, - ) - if err != nil { - t.Errorf("PostBuildStatus failed: %v", err) - } - - t.Logf("Successfully posted build status") -} - -func TestIntegrationCreateWorkflowDispatchEvent(t *testing.T) { - client := skipIfNoToken(t) - - // Skip this test by default as it can have side effects - if os.Getenv("RUN_WORKFLOW_DISPATCH_TEST") != "true" { - t.Skip("Skipping workflow dispatch test: set RUN_WORKFLOW_DISPATCH_TEST=true to run") - } - - if err := client.CreateWorkflowDispatchEvent("test-tpg.yml", map[string]any{ - "owner": "modular-magician", - "repo": testRepo, - "branch": "main", - "pr-number": testPRNumber, - "sha": testPRCommitSha, - }); err != nil { - t.Errorf("error creating workflow dispatch event: %v", err) - } - - t.Logf("Successfully triggered workflow dispatch event") -} - -func TestIntegrationRequestAndRemovePullRequestReviewers(t *testing.T) { - client := skipIfNoToken(t) - - // Request a reviewer - reviewers := []string{testReviewer} // Request the owner to review their own PR - err := client.RequestPullRequestReviewers(testPRNumber, reviewers) - if err != nil { - t.Fatalf("RequestPullRequestReviewers failed: %v", err) - } - - // Remove the reviewer - err = client.RemovePullRequestReviewers(testPRNumber, reviewers) - if err != nil { - t.Fatalf("RemovePullRequestReviewers failed: %v", err) - } - - t.Logf("Successfully requested and removed reviewers: %v", reviewers) -} - -func TestIntegrationGetPullRequestRequestedReviewers(t *testing.T) { - client := skipIfNoToken(t) - - reviewers, err := client.GetPullRequestRequestedReviewers(testPRNumber) - if err != nil { - t.Fatalf("GetPullRequestRequestedReviewers failed: %v", err) - } - - t.Logf("Found %d requested reviewers", len(reviewers)) - for i, reviewer := range reviewers { - t.Logf("Reviewer %d: %s", i+1, reviewer.Login) - } -} - -func TestIntegrationGetPullRequestPreviousReviewers(t *testing.T) { - client := skipIfNoToken(t) - - reviewers, err := client.GetPullRequestPreviousReviewers(testOldPRNumber) - if err != nil { - t.Fatalf("GetPullRequestPreviousReviewers failed: %v", err) - } - - t.Logf("Found %d previous reviewers", len(reviewers)) - for i, reviewer := range reviewers { - t.Logf("Previous reviewer %d: %s", i+1, reviewer.Login) - } -} - -// TestIntegrationMergePullRequest is commented out as it has permanent effects -// Uncomment and run only if you're sure you want to merge the PR -/* - func TestIntegrationMergePullRequest(t *testing.T) { - client := skipIfNoToken(t) - // Skip this test by default as it has permanent effects - if os.Getenv("RUN_MERGE_PR_TEST") != "true" { - t.Skip("Skipping merge PR test: set RUN_MERGE_PR_TEST=true to run") - } - // You'll need a valid commit SHA for this test - if testPRCommitSha == "HEAD" { - t.Skip("Skipping MergePullRequest test: need a valid commit SHA") - } - err := client.MergePullRequest(testOwner, testRepo, testPRNumber, testPRCommitSha) - if err != nil { - t.Fatalf("MergePullRequest failed: %v", err) - } - t.Logf("Successfully merged pull request") - } -*/ - -// Helper function to get minimum of two integers -func min(a, b int) int { - if a < b { - return a - } - return b -} diff --git a/.ci/magician/github/interface_conversion.go b/.ci/magician/github/interface_conversion.go deleted file mode 100644 index 3ca9f35a1ba2..000000000000 --- a/.ci/magician/github/interface_conversion.go +++ /dev/null @@ -1,70 +0,0 @@ -package github - -import ( - gh "github.com/google/go-github/v68/github" -) - -// Convert from GitHub types to our types -func convertGHPullRequest(pr *gh.PullRequest) PullRequest { - if pr == nil { - return PullRequest{} - } - - var labels []Label - if pr.Labels != nil { - for _, l := range pr.Labels { - if l.Name != nil { - labels = append(labels, Label{Name: *l.Name}) - } - } - } - - return PullRequest{ - HTMLUrl: pr.GetHTMLURL(), - Number: pr.GetNumber(), - Title: pr.GetTitle(), - User: User{Login: pr.GetUser().GetLogin()}, - Body: pr.GetBody(), - Labels: labels, - MergeCommitSha: pr.GetMergeCommitSHA(), - Merged: pr.GetMerged(), - } -} - -func convertGHUser(user *gh.User) User { - if user == nil { - return User{} - } - return User{ - Login: user.GetLogin(), - } -} - -func convertGHUsers(users []*gh.User) []User { - result := make([]User, len(users)) - for i, u := range users { - result[i] = convertGHUser(u) - } - return result -} - -func convertGHComment(comment *gh.IssueComment) PullRequestComment { - if comment == nil { - return PullRequestComment{} - } - - return PullRequestComment{ - User: convertGHUser(comment.User), - Body: comment.GetBody(), - ID: int(comment.GetID()), - CreatedAt: comment.GetCreatedAt().Time, - } -} - -func convertGHComments(comments []*gh.IssueComment) []PullRequestComment { - result := make([]PullRequestComment, len(comments)) - for i, c := range comments { - result[i] = convertGHComment(c) - } - return result -} diff --git a/.ci/magician/github/membership.go b/.ci/magician/github/membership.go index 84d67b0e5d17..a45b42bb30e5 100644 --- a/.ci/magician/github/membership.go +++ b/.ci/magician/github/membership.go @@ -50,19 +50,12 @@ func (gh *Client) GetUserType(user string) UserType { return CoreContributorUserType } - if gh.IsTeamMember("GoogleCloudPlatform", "terraform", user) { - fmt.Println("User is an active member of the 'terraform' team in 'GoogleCloudPlatform' organization") - return GooglerUserType - } else { - fmt.Printf("User '%s' is not an active member of the 'terraform' team in 'GoogleCloudPlatform' organization\n", user) - } - - if gh.IsOrgMember(user, "GoogleCloudPlatform") { + if isOrgMember(user, "GoogleCloudPlatform", gh.token) { fmt.Println("User is a GCP org member") return GooglerUserType } - if gh.IsOrgMember(user, "googlers") { + if isOrgMember(user, "googlers", gh.token) { fmt.Println("User is a googlers org member") return GooglerUserType } @@ -81,6 +74,13 @@ func IsCoreReviewer(user string) bool { return isCoreReviewer } +func isOrgMember(author, org, githubToken string) bool { + url := fmt.Sprintf("https://api.github.com/orgs/%s/members/%s", org, author) + err := utils.RequestCallWithRetry(url, "GET", githubToken, nil, nil) + + return err == nil +} + // GetRandomReviewer returns a random available reviewer (optionally excluding some people from the reviewer pool) func GetRandomReviewer(excludedReviewers []string) string { availableReviewers := AvailableReviewers(excludedReviewers) diff --git a/.ci/magician/github/membership_data.go b/.ci/magician/github/membership_data.go index 1dedcf7dcc19..28bcaba31972 100644 --- a/.ci/magician/github/membership_data.go +++ b/.ci/magician/github/membership_data.go @@ -64,8 +64,8 @@ var ( "BBBmau": { vacations: []Vacation{ { - startDate: newDate(2025, 7, 1), - endDate: newDate(2025, 7, 17), + startDate: newDate(2025, 4, 7), + endDate: newDate(2025, 4, 11), }, }, }, @@ -81,12 +81,7 @@ var ( vacations: []Vacation{}, }, "melinath": { - vacations: []Vacation{ - { - startDate: newDate(2025, 6, 26), - endDate: newDate(2025, 7, 7), - }, - }, + vacations: []Vacation{}, }, "NickElliot": { vacations: []Vacation{}, @@ -100,12 +95,7 @@ var ( }, }, "roaks3": { - vacations: []Vacation{ - { - startDate: newDate(2025, 8, 1), - endDate: newDate(2025, 8, 11), - }, - }, + vacations: []Vacation{}, }, "ScottSuarez": { vacations: []Vacation{}, @@ -113,8 +103,8 @@ var ( "shuyama1": { vacations: []Vacation{ { - startDate: newDate(2025, 5, 23), - endDate: newDate(2025, 5, 30), + startDate: newDate(2025, 3, 26), + endDate: newDate(2025, 4, 1), }, }, }, @@ -127,26 +117,16 @@ var ( }, }, "slevenick": { - vacations: []Vacation{ - { - startDate: newDate(2025, 5, 22), - endDate: newDate(2025, 6, 7), - }, - }, + vacations: []Vacation{}, }, "trodge": { - vacations: []Vacation{ - { - startDate: newDate(2025, 8, 7), - endDate: newDate(2025, 8, 10), - }, - }, + vacations: []Vacation{}, }, "zli82016": { vacations: []Vacation{ { - startDate: newDate(2025, 8, 27), - endDate: newDate(2025, 9, 2), + startDate: newDate(2025, 1, 15), + endDate: newDate(2025, 2, 9), }, }, }, @@ -155,6 +135,7 @@ var ( // This is for new team members who are onboarding trustedContributors = map[string]struct{}{ "bbasata": struct{}{}, + "jaylonmcshan03": struct{}{}, "malhotrasagar2212": struct{}{}, } ) diff --git a/.ci/magician/github/set.go b/.ci/magician/github/set.go index c04e7daf936b..15ac9996ca22 100644 --- a/.ci/magician/github/set.go +++ b/.ci/magician/github/set.go @@ -17,139 +17,120 @@ package github import ( "fmt" - "strconv" + utils "magician/utility" "strings" "time" - - utils "magician/utility" - - gh "github.com/google/go-github/v68/github" ) -// PostBuildStatus creates a commit status for a specific SHA -func (c *Client) PostBuildStatus(prNumber, title, state, targetURL, commitSha string) error { - repoStatus := &gh.RepoStatus{ - Context: gh.Ptr(title), - State: gh.Ptr(state), - TargetURL: gh.Ptr(targetURL), +func (gh *Client) PostBuildStatus(prNumber, title, state, targetURL, commitSha string) error { + url := fmt.Sprintf("https://api.github.com/repos/GoogleCloudPlatform/magic-modules/statuses/%s", commitSha) + + postBody := map[string]string{ + "context": title, + "state": state, + "target_url": targetURL, } - _, _, err := c.gh.Repositories.CreateStatus(c.ctx, defaultOwner, defaultRepo, commitSha, repoStatus) + err := utils.RequestCallWithRetry(url, "POST", gh.token, nil, postBody) if err != nil { return err } fmt.Printf("Successfully posted build status to pull request %s\n", prNumber) + return nil } -// PostComment adds a comment to a pull request -func (c *Client) PostComment(prNumber, comment string) error { - num, err := strconv.Atoi(prNumber) - if err != nil { - return err - } +func (gh *Client) PostComment(prNumber, comment string) error { + url := fmt.Sprintf("https://api.github.com/repos/GoogleCloudPlatform/magic-modules/issues/%s/comments", prNumber) - issueComment := &gh.IssueComment{ - Body: gh.Ptr(comment), + body := map[string]string{ + "body": comment, } - _, _, err = c.gh.Issues.CreateComment(c.ctx, defaultOwner, defaultRepo, num, issueComment) + err := utils.RequestCallWithRetry(url, "POST", gh.token, nil, body) if err != nil { return err } fmt.Printf("Successfully posted comment to pull request %s\n", prNumber) + return nil } -// UpdateComment updates an existing comment -func (c *Client) UpdateComment(prNumber, comment string, id int) error { - issueComment := &gh.IssueComment{ - Body: gh.Ptr(comment), +func (gh *Client) UpdateComment(prNumber, comment string, id int) error { + url := fmt.Sprintf("https://api.github.com/repos/GoogleCloudPlatform/magic-modules/issues/comments/%d", id) + + body := map[string]string{ + "body": comment, } - _, _, err := c.gh.Issues.EditComment(c.ctx, defaultOwner, defaultRepo, int64(id), issueComment) + err := utils.RequestCallWithRetry(url, "PATCH", gh.token, nil, body) if err != nil { return err } fmt.Printf("Successfully updated comment %d in pull request %s\n", id, prNumber) + return nil } -// RequestPullRequestReviewers adds reviewers to a pull request -func (c *Client) RequestPullRequestReviewers(prNumber string, reviewers []string) error { - if len(reviewers) == 0 { - return nil - } +func (gh *Client) RequestPullRequestReviewers(prNumber string, reviewers []string) error { + url := fmt.Sprintf("https://api.github.com/repos/GoogleCloudPlatform/magic-modules/pulls/%s/requested_reviewers", prNumber) - num, err := strconv.Atoi(prNumber) - if err != nil { - return err - } - - // Create the reviewers request - reviewersRequest := gh.ReviewersRequest{ - Reviewers: reviewers, + body := map[string][]string{ + "reviewers": reviewers, + "team_reviewers": {}, } - _, _, err = c.gh.PullRequests.RequestReviewers(c.ctx, defaultOwner, defaultRepo, num, reviewersRequest) + err := utils.RequestCallWithRetry(url, "POST", gh.token, nil, body) if err != nil { return err } fmt.Printf("Successfully added reviewers %v to pull request %s\n", reviewers, prNumber) + return nil } -// RemovePullRequestReviewers removes reviewers from a pull request -func (c *Client) RemovePullRequestReviewers(prNumber string, reviewers []string) error { - if len(reviewers) == 0 { - return nil - } - - num, err := strconv.Atoi(prNumber) - if err != nil { - return err - } +func (gh *Client) RemovePullRequestReviewers(prNumber string, reviewers []string) error { + url := fmt.Sprintf("https://api.github.com/repos/GoogleCloudPlatform/magic-modules/pulls/%s/requested_reviewers", prNumber) - reviewersRequest := gh.ReviewersRequest{ - Reviewers: reviewers, + body := map[string][]string{ + "reviewers": reviewers, + "team_reviewers": {}, } - _, err = c.gh.PullRequests.RemoveReviewers(c.ctx, defaultOwner, defaultRepo, num, reviewersRequest) + err := utils.RequestCall(url, "DELETE", gh.token, nil, body) if err != nil { return err } - fmt.Printf("Successfully removed reviewers %v from pull request %s\n", reviewers, prNumber) + fmt.Printf("Successfully removed reviewers %v to pull request %s\n", reviewers, prNumber) + return nil } -// AddLabels adds labels to an issue or pull request -func (c *Client) AddLabels(prNumber string, labels []string) error { - num, err := strconv.Atoi(prNumber) - if err != nil { - return err +func (gh *Client) AddLabels(prNumber string, labels []string) error { + url := fmt.Sprintf("https://api.github.com/repos/GoogleCloudPlatform/magic-modules/issues/%s/labels", prNumber) + + body := map[string][]string{ + "labels": labels, } + err := utils.RequestCallWithRetry(url, "POST", gh.token, nil, body) - _, _, err = c.gh.Issues.AddLabelsToIssue(c.ctx, defaultOwner, defaultRepo, num, labels) if err != nil { return fmt.Errorf("failed to add %q labels: %s", labels, err) } return nil + } -// RemoveLabel removes a label from an issue or pull request -func (c *Client) RemoveLabel(prNumber, label string) error { - num, err := strconv.Atoi(prNumber) - if err != nil { - return err - } +func (gh *Client) RemoveLabel(prNumber, label string) error { + url := fmt.Sprintf("https://api.github.com/repos/GoogleCloudPlatform/magic-modules/issues/%s/labels/%s", prNumber, label) + err := utils.RequestCallWithRetry(url, "DELETE", gh.token, nil, nil) - _, err = c.gh.Issues.RemoveLabelForIssue(c.ctx, defaultOwner, defaultRepo, num, label) if err != nil { return fmt.Errorf("failed to remove %s label: %s", label, err) } @@ -157,24 +138,19 @@ func (c *Client) RemoveLabel(prNumber, label string) error { return nil } -// CreateWorkflowDispatchEvent triggers a workflow run -func (c *Client) CreateWorkflowDispatchEvent(workflowFileName string, inputs map[string]any) error { - stringInputs := make(map[string]interface{}) - for k, v := range inputs { - stringInputs[k] = v - } - - event := gh.CreateWorkflowDispatchEventRequest{ - Ref: "main", - Inputs: stringInputs, - } +func (gh *Client) CreateWorkflowDispatchEvent(workflowFileName string, inputs map[string]any) error { + url := fmt.Sprintf("https://api.github.com/repos/GoogleCloudPlatform/magic-modules/actions/workflows/%s/dispatches", workflowFileName) + err := utils.RequestCallWithRetry(url, "POST", gh.token, nil, map[string]any{ + "ref": "main", + "inputs": inputs, + }) - _, err := c.gh.Actions.CreateWorkflowDispatchEventByFileName(c.ctx, defaultOwner, defaultRepo, workflowFileName, event) if err != nil { return fmt.Errorf("failed to create workflow dispatch event: %s", err) } fmt.Printf("Successfully created workflow dispatch event for %s with inputs %v\n", workflowFileName, inputs) + return nil } diff --git a/.ci/magician/go.mod b/.ci/magician/go.mod index f422afca2320..4a19d6ff1ba7 100644 --- a/.ci/magician/go.mod +++ b/.ci/magician/go.mod @@ -16,11 +16,10 @@ require ( require ( cloud.google.com/go/storage v1.50.0 github.com/google/go-cmp v0.6.0 + github.com/google/go-github/v61 v61.0.0 github.com/google/go-github/v68 v68.0.0 github.com/otiai10/copy v1.12.0 github.com/stretchr/testify v1.10.0 - github.com/yuin/goldmark v1.7.8 - go.abhg.dev/goldmark/frontmatter v0.2.0 gopkg.in/yaml.v2 v2.4.0 ) @@ -32,7 +31,6 @@ require ( cloud.google.com/go/compute/metadata v0.6.0 // indirect cloud.google.com/go/iam v1.2.2 // indirect cloud.google.com/go/monitoring v1.21.2 // indirect - github.com/BurntSushi/toml v1.2.1 // indirect github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.25.0 // indirect github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.48.1 // indirect github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.48.1 // indirect diff --git a/.ci/magician/go.sum b/.ci/magician/go.sum index d5db267de18d..733652cf458d 100644 --- a/.ci/magician/go.sum +++ b/.ci/magician/go.sum @@ -22,8 +22,6 @@ cloud.google.com/go/storage v1.50.0/go.mod h1:l7XeiD//vx5lfqE3RavfmU9yvk5Pp0Zhcv cloud.google.com/go/trace v1.11.2 h1:4ZmaBdL8Ng/ajrgKqY5jfvzqMXbrDcBsUGXOT9aqTtI= cloud.google.com/go/trace v1.11.2/go.mod h1:bn7OwXd4pd5rFuAnTrzBuoZ4ax2XQeG3qNgYmfCy0Io= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/BurntSushi/toml v1.2.1 h1:9F2/+DoOYIOksmaJFPw1tGFy1eDnIJXg+UHjuD8lTak= -github.com/BurntSushi/toml v1.2.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.25.0 h1:3c8yed4lgqTt+oTQ+JNMDo+F4xprBf+O/il4ZC0nRLw= github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.25.0/go.mod h1:obipzmGjfSjam60XLwGfqUkJsfiheAl+TUjG+4yzyPM= github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.48.1 h1:UQ0AhxogsIRZDkElkblfnwjc3IaltCm2HUMvezQaL7s= @@ -87,6 +85,8 @@ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-github/v61 v61.0.0 h1:VwQCBwhyE9JclCI+22/7mLB1PuU9eowCXKY5pNlu1go= +github.com/google/go-github/v61 v61.0.0/go.mod h1:0WR+KmsWX75G2EbpyGsGmradjo3IiciuI4BmdVCobQY= github.com/google/go-github/v68 v68.0.0 h1:ZW57zeNZiXTdQ16qrDiZ0k6XucrxZ2CGmoTvcCyQG6s= github.com/google/go-github/v68 v68.0.0/go.mod h1:K9HAUBovM2sLwM408A18h+wd9vqdLOEqTUCbnRIcx68= github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= @@ -132,10 +132,6 @@ github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= -github.com/yuin/goldmark v1.7.8 h1:iERMLn0/QJeHFhxSt3p6PeN9mGnvIKSpG9YYorDMnic= -github.com/yuin/goldmark v1.7.8/go.mod h1:uzxRWxtg69N339t3louHJ7+O03ezfj6PlliRlaOzY1E= -go.abhg.dev/goldmark/frontmatter v0.2.0 h1:P8kPG0YkL12+aYk2yU3xHv4tcXzeVnN+gU0tJ5JnxRw= -go.abhg.dev/goldmark/frontmatter v0.2.0/go.mod h1:XqrEkZuM57djk7zrlRUB02x8I5J0px76YjkOzhB4YlU= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= go.opentelemetry.io/contrib/detectors/gcp v1.29.0 h1:TiaiXB4DpGD3sdzNlYQxruQngn5Apwzi1X0DRhuGvDQ= diff --git a/.ci/magician/teamcity/get.go b/.ci/magician/teamcity/get.go index 0126dc2ed9f4..37f0ec8d4c17 100644 --- a/.ci/magician/teamcity/get.go +++ b/.ci/magician/teamcity/get.go @@ -28,9 +28,6 @@ type Build struct { BuildConfName string `json:"buildConfName"` WebUrl string `json:"webUrl"` Number string `json:"number"` - QueuedDate string `json:"queuedDate"` - StartDate string `json:"startDate"` - FinishDate string `json:"finishDate"` } type Builds struct { @@ -55,7 +52,7 @@ type FirstFailed struct { } func (tc *Client) GetBuilds(project, finishCut, startCut string) (Builds, error) { - url := fmt.Sprintf("https://hashicorp.teamcity.com/app/rest/builds?locator=count:500,tag:cron-trigger,project:%s,branch:refs/heads/nightly-test,queuedDate:(date:%s,condition:before),queuedDate:(date:%s,condition:after)&fields=build(id,buildTypeId,buildConfName,webUrl,number,queuedDate,startDate,finishDate)", project, finishCut, startCut) + url := fmt.Sprintf("https://hashicorp.teamcity.com/app/rest/builds?locator=count:500,tag:cron-trigger,project:%s,branch:refs/heads/nightly-test,finishDate:(date:%s,condition:before),startDate:(date:%s,condition:after)", project, finishCut, startCut) var builds Builds @@ -65,7 +62,7 @@ func (tc *Client) GetBuilds(project, finishCut, startCut string) (Builds, error) } func (tc *Client) GetTestResults(build Build) (TestResults, error) { - url := fmt.Sprintf("https://hashicorp.teamcity.com/app/rest/testOccurrences?locator=count:5000,build:(id:%d)&fields=testOccurrence(id,name,status,duration,firstFailed(href),details)", build.Id) + url := fmt.Sprintf("https://hashicorp.teamcity.com/app/rest/testOccurrences?locator=count:5000,build:(id:%d)&fields=testOccurrence(id,name,status,duration,firstFailed(href),details,build(webUrl))", build.Id) var testResults TestResults diff --git a/.ci/magician/utility/utils.go b/.ci/magician/utility/utils.go index 1c55326bc84a..02534af9b0a4 100644 --- a/.ci/magician/utility/utils.go +++ b/.ci/magician/utility/utils.go @@ -23,7 +23,6 @@ import ( "math" "net/http" "os" - "strings" "time" "golang.org/x/exp/slices" @@ -52,37 +51,21 @@ func defaultRetryConfig() retryConfig { // makeHTTPRequest performs the actual HTTP request and returns the response func makeHTTPRequest(url, method, credentials string, body any) (*http.Response, []byte, error) { client := &http.Client{} - - fmt.Println("") - fmt.Println("request url: ", url) - - var reqBody io.Reader - if body != nil { - switch v := body.(type) { - case []byte: - // Body is already serialized, use directly - reqBody = bytes.NewBuffer(v) - rbString := strings.TrimSpace(string(v)) - fmt.Println("request body (raw bytes): ", rbString) - default: - // Body needs serialization - jsonBody, err := json.Marshal(body) - if err != nil { - return nil, nil, fmt.Errorf("error marshaling JSON: %s", err) - } - reqBody = bytes.NewBuffer(jsonBody) - fmt.Println("request body (serialized): ", string(jsonBody)) - } + jsonBody, err := json.Marshal(body) + if err != nil { + return nil, nil, fmt.Errorf("error marshaling JSON: %s", err) } - - req, err := http.NewRequest(method, url, reqBody) + req, err := http.NewRequest(method, url, bytes.NewBuffer(jsonBody)) if err != nil { return nil, nil, fmt.Errorf("error creating request: %s", err) } - req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", credentials)) req.Header.Set("Content-Type", "application/json") req.Header.Set("Accept", "application/json") + + fmt.Println("") + fmt.Println("request url: ", url) + fmt.Println("request body: ", string(jsonBody)) fmt.Println("") resp, err := client.Do(req) @@ -160,11 +143,9 @@ func calculateBackoff(attempt int, config retryConfig) time.Duration { return backoff } -// RequestCallWithRetryRaw raw version of the retry function that returns the response and body bytes -func requestCallWithRetryRaw(url, method, credentials string, body any, config retryConfig) (*http.Response, []byte, error) { +// RequestCallWithRetry makes an HTTP request with retry capability +func requestCallWithRetry(url, method, credentials string, result any, body any, config retryConfig) error { var lastErr error - var lastResp *http.Response - var lastBodyBytes []byte for attempt := 0; attempt <= config.MaxRetries; attempt++ { // If this is a retry attempt, wait before trying again @@ -180,36 +161,27 @@ func requestCallWithRetryRaw(url, method, credentials string, body any, config r continue // Network error, retry } - lastResp = resp - lastBodyBytes = respBodyBytes + // Process the response + err = processResponse(resp, respBodyBytes, result) + if err != nil { + lastErr = err - // Check if we should retry based on status code - if resp.StatusCode < 200 || resp.StatusCode >= 300 { + // Check if we should retry based on status code if shouldRetry(resp.StatusCode, config) { continue } } - return lastResp, lastBodyBytes, nil + // If we got here with no error, return success + return err } - return lastResp, lastBodyBytes, lastErr -} - -// RequestCallWithRetryRaw is a convenience function that uses default retry settings -func RequestCallWithRetryRaw(url, method, credentials string, body any) (*http.Response, []byte, error) { - return requestCallWithRetryRaw(url, method, credentials, body, defaultRetryConfig()) + return fmt.Errorf("max retries exceeded: %w", lastErr) } // RequestCallWithRetry is a convenience function that uses default retry settings -// and unmarshals the response into the result func RequestCallWithRetry(url, method, credentials string, result any, body any) error { - resp, respBodyBytes, err := requestCallWithRetryRaw(url, method, credentials, body, defaultRetryConfig()) - if err != nil { - return err - } - - return processResponse(resp, respBodyBytes, result) + return requestCallWithRetry(url, method, credentials, result, body, defaultRetryConfig()) } func Removes(s1 []string, s2 []string) []string { diff --git a/.ci/magician/vcr/tester.go b/.ci/magician/vcr/tester.go index 6f01ee352357..5b0d8975e122 100644 --- a/.ci/magician/vcr/tester.go +++ b/.ci/magician/vcr/tester.go @@ -13,13 +13,10 @@ import ( ) type Result struct { - PassedTests []string - SkippedTests []string - FailedTests []string - PassedSubtests []string - SkippedSubtests []string - FailedSubtests []string - Panics []string + PassedTests []string + SkippedTests []string + FailedTests []string + Panics []string } type Mode int @@ -69,8 +66,6 @@ const replayingTimeout = "240m" var testResultsExpression = regexp.MustCompile(`(?m:^--- (PASS|FAIL|SKIP): (TestAcc\w+))`) -var subtestResultsExpression = regexp.MustCompile(`(?m:^ --- (PASS|FAIL|SKIP): (TestAcc\w+)/(\w+))`) - var testPanicExpression = regexp.MustCompile(`^panic: .*`) var safeToLog = map[string]bool{ @@ -105,7 +100,6 @@ var safeToLog = map[string]bool{ "SA_KEY": false, "TF_ACC": true, "TF_LOG": true, - "TF_LOG_CORE": true, "TF_LOG_PATH_MASK": true, "TF_LOG_SDK_FRAMEWORK": true, "TF_SCHEMA_PANIC_ON_ERROR": true, @@ -259,7 +253,6 @@ func (vt *Tester) Run(opt RunOptions) (Result, error) { "GOOGLE_CREDENTIALS": vt.env["SA_KEY"], "GOOGLE_TEST_DIRECTORY": strings.Join(opt.TestDirs, " "), "TF_LOG": "DEBUG", - "TF_LOG_CORE": "WARN", "TF_LOG_SDK_FRAMEWORK": "INFO", "TF_LOG_PATH_MASK": filepath.Join(logPath, "%s.log"), "TF_ACC": "1", @@ -407,7 +400,6 @@ func (vt *Tester) runInParallel(mode Mode, version provider.Version, testDir, te "GOOGLE_CREDENTIALS": vt.env["SA_KEY"], "GOOGLE_TEST_DIRECTORY": testDir, "TF_LOG": "DEBUG", - "TF_LOG_CORE": "WARN", "TF_LOG_SDK_FRAMEWORK": "INFO", "TF_LOG_PATH_MASK": filepath.Join(logPath, "%s.log"), "TF_ACC": "1", @@ -611,39 +603,19 @@ func collectResult(output string) Result { } resultSets[submatches[1]][submatches[2]] = struct{}{} } - matches = subtestResultsExpression.FindAllStringSubmatch(output, -1) - subtestResultSets := make(map[string]map[string]struct{}, 4) - for _, submatches := range matches { - if len(submatches) != 4 { - fmt.Printf("Warning: unexpected regex match found in test output: %v", submatches) - continue - } - if _, ok := subtestResultSets[submatches[1]]; !ok { - subtestResultSets[submatches[1]] = make(map[string]struct{}) - } - subtestResultSets[submatches[1]][fmt.Sprintf("%s__%s", submatches[2], submatches[3])] = struct{}{} - } results := make(map[string][]string, 4) results["PANIC"] = testPanicExpression.FindAllString(output, -1) sort.Strings(results["PANIC"]) - subtestResults := make(map[string][]string, 3) for _, kind := range []string{"FAIL", "PASS", "SKIP"} { for test := range resultSets[kind] { results[kind] = append(results[kind], test) } sort.Strings(results[kind]) - for subtest := range subtestResultSets[kind] { - subtestResults[kind] = append(subtestResults[kind], subtest) - } - sort.Strings(subtestResults[kind]) } return Result{ - FailedTests: results["FAIL"], - PassedTests: results["PASS"], - SkippedTests: results["SKIP"], - FailedSubtests: subtestResults["FAIL"], - PassedSubtests: subtestResults["PASS"], - SkippedSubtests: subtestResults["SKIP"], - Panics: results["PANIC"], + FailedTests: results["FAIL"], + PassedTests: results["PASS"], + SkippedTests: results["SKIP"], + Panics: results["PANIC"], } } diff --git a/.ci/magician/vcr/tester_test.go b/.ci/magician/vcr/tester_test.go deleted file mode 100644 index 4ed29faa2c5a..000000000000 --- a/.ci/magician/vcr/tester_test.go +++ /dev/null @@ -1,58 +0,0 @@ -package vcr - -import ( - "testing" - - "github.com/google/go-cmp/cmp" -) - -func TestCollectResults(t *testing.T) { - for _, test := range []struct { - name string - output string - expected Result - }{ - { - name: "no compound tests", - output: `--- FAIL: TestAccServiceOneResourceOne (100.00s) ---- PASS: TestAccServiceOneResourceTwo (100.00s) ---- PASS: TestAccServiceTwoResourceOne (100.00s) ---- PASS: TestAccServiceTwoResourceTwo (100.00s) -`, - expected: Result{ - PassedTests: []string{"TestAccServiceOneResourceTwo", "TestAccServiceTwoResourceOne", "TestAccServiceTwoResourceTwo"}, - FailedTests: []string{"TestAccServiceOneResourceOne"}, - }, - }, - { - name: "compound tests", - output: `--- FAIL: TestAccServiceOneResourceOne (100.00s) ---- FAIL: TestAccServiceOneResourceTwo (100.00s) - --- PASS: TestAccServiceOneResourceTwo/test_one (100.00s) - --- FAIL: TestAccServiceOneResourceTwo/test_two (100.00s) ---- PASS: TestAccServiceTwoResourceOne (100.00s) - --- PASS: TestAccServiceTwoResourceOne/test_one (100.00s) - --- PASS: TestAccServiceTwoResourceOne/test_two (100.00s) ---- PASS: TestAccServiceTwoResourceTwo (100.00s) -`, - expected: Result{ - PassedTests: []string{ - "TestAccServiceTwoResourceOne", - "TestAccServiceTwoResourceTwo", - }, - FailedTests: []string{"TestAccServiceOneResourceOne", "TestAccServiceOneResourceTwo"}, - PassedSubtests: []string{ - "TestAccServiceOneResourceTwo__test_one", - "TestAccServiceTwoResourceOne__test_one", - "TestAccServiceTwoResourceOne__test_two", - }, - FailedSubtests: []string{"TestAccServiceOneResourceTwo__test_two"}, - }, - }, - } { - if diff := cmp.Diff(test.expected, collectResult(test.output)); diff != "" { - t.Errorf("collectResult(%q) got unexpected diff (-want +got):\n%s", test.output, diff) - } - } - -} diff --git a/.ci/release-note2.tmpl b/.ci/release-note2.tmpl deleted file mode 100644 index e1506f545b3d..000000000000 --- a/.ci/release-note2.tmpl +++ /dev/null @@ -1,3 +0,0 @@ -{{- define "note" -}} -{{if eq "new-resource" .Type}}**New Resource:** {{else if eq "new-datasource" .Type}}**New Data Source:** {{else if eq "new-function" .Type}}**New Function:** {{else if eq "new-ephemeral" .Type}}**New Ephemeral Resource:** {{ end }}{{.Body}} ([#{{- .Issue -}}]) -{{- end -}} \ No newline at end of file diff --git a/.github/actions/build-downstream/action.yml b/.github/actions/build-downstream/action.yml index 4c3a35d57019..731810d372c6 100644 --- a/.github/actions/build-downstream/action.yml +++ b/.github/actions/build-downstream/action.yml @@ -66,6 +66,7 @@ runs: else export VERSION=beta fi + make clean-provider make provider elif [ "$GH_REPO" == "terraform-google-conversion" ]; then UPSTREAM_OWNER=GoogleCloudPlatform diff --git a/.github/workflows/basic-pr-checks.yml b/.github/workflows/basic-pr-checks.yml deleted file mode 100644 index ac43d8bb581a..000000000000 --- a/.github/workflows/basic-pr-checks.yml +++ /dev/null @@ -1,69 +0,0 @@ -name: "pull-request" -permissions: read-all - -on: - pull_request - -jobs: - disallow-submodules: - runs-on: ubuntu-22.04 - steps: - - name: Checkout repository - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.2 - - name: Check for submodules - run: | - output=$(git submodule status --recursive 2>&1) - if [ ! -z $output ]; then - echo $output - echo "Submodules are not allowed" - exit 1 - else - echo "No submodules found" - fi - disallow-large-prs: - runs-on: ubuntu-22.04 - steps: - - name: Check PR size - shell: bash - run: | - # Get PR details - pr_data=$(curl --get -Ss -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/repos/${{ github.repository }}/pulls/${{github.event.pull_request.number}}") - - # Get list of files in the PR - pr_files=$(curl --get -Ss -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/repos/${{ github.repository }}/pulls/${{github.event.pull_request.number}}/files") - - # Calculate additions and deletions excluding test files - total_additions=0 - total_deletions=0 - - # Use jq to filter out test and documentation files and calculate totals - filtered_stats=$(echo "$pr_files" | jq '[ - .[] | - select( - (.filename | endswith("_test.go") | not) and - (.filename | endswith("test.go.tmpl") | not) and - (.filename | endswith(".md") | not) and - (.filename | endswith(".md.tmpl") | not) and - (.filename | endswith(".html.markdown") | not) - ) | - {additions: .additions, deletions: .deletions} - ] | - reduce .[] as $item ( - {"additions": 0, "deletions": 0}; - .additions += $item.additions | - .deletions += $item.deletions - )') - - total_additions=$(echo "$filtered_stats" | jq -r '.additions') - total_deletions=$(echo "$filtered_stats" | jq -r '.deletions') - total=$((total_additions + total_deletions)) - - echo "Excluding test and documentation files:" - echo "$total_additions lines added; $total_deletions lines deleted" - - if (( total > 500 )); then - echo "This PR changed $total lines of code (excluding test and documentation files), which is above the recommended limit of 500. Your reviewer may ask you to break it into multiple PRs." - exit 1 - else - echo "This PR changed $total lines of code (excluding test and documentation files), which meets the recommended limit of 500." - fi \ No newline at end of file diff --git a/.github/workflows/build-downstream.yml b/.github/workflows/build-downstream.yml index 3490e1ad9bb9..af1d30a3d8e6 100644 --- a/.github/workflows/build-downstream.yml +++ b/.github/workflows/build-downstream.yml @@ -71,6 +71,7 @@ jobs: else export VERSION=beta fi + make clean-provider make provider elif [ "$GH_REPO" == "terraform-google-conversion" ]; then UPSTREAM_OWNER=GoogleCloudPlatform diff --git a/.github/workflows/disallow-submodules.yml b/.github/workflows/disallow-submodules.yml new file mode 100644 index 000000000000..c61685931565 --- /dev/null +++ b/.github/workflows/disallow-submodules.yml @@ -0,0 +1,22 @@ +name: "Disallow submodules" +permissions: read-all + +on: + pull_request + +jobs: + disallow-submodules: + runs-on: ubuntu-22.04 + steps: + - name: Checkout repository + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.2 + - name: Check for submodules + run: | + output=$(git submodule status --recursive 2>&1) + if [ ! -z $output ]; then + echo $output + echo "Submodules are not allowed" + exit 1 + else + echo "No submodules found" + fi diff --git a/.github/workflows/mmv1-check-templates.yml b/.github/workflows/mmv1-check-templates.yml new file mode 100644 index 000000000000..f8b1f4c052ce --- /dev/null +++ b/.github/workflows/mmv1-check-templates.yml @@ -0,0 +1,30 @@ +name: mmv1-check-templates + +permissions: read-all + +on: + pull_request: + paths: + - 'mmv1/**/*.tmpl' + +jobs: + version-guard-check: + runs-on: ubuntu-22.04 + steps: + - name: Checkout Repository + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.2 + with: + path: repo + fetch-depth: 0 + - name: Merge base branch + id: pull_request + run: | + cd repo + git config user.name "modular-magician" + git config user.email "magic-modules@google.com" + git fetch origin ${{ github.base_ref }} # Fetch the base branch + git merge --no-ff origin/${{ github.base_ref }} # Merge with the base branch + - name: Check for invalid version guards + run: | + cd repo/tools/template-check + git diff --name-only --diff-filter=d origin/${{ github.base_ref }} ../../*.tmpl | sed 's=^=../../=g' | go run main.go diff --git a/.github/workflows/mmv1-lint-product-yaml.yml b/.github/workflows/mmv1-lint-product-yaml.yml new file mode 100644 index 000000000000..41bdcfd65ace --- /dev/null +++ b/.github/workflows/mmv1-lint-product-yaml.yml @@ -0,0 +1,36 @@ +name: mmv1-lint-product-yaml + +permissions: read-all + +on: + pull_request: + paths: + - 'mmv1/products/**' + +jobs: + lint-yaml: + runs-on: ubuntu-22.04 + steps: + - name: Checkout Repository + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.2 + with: + path: repo + fetch-depth: 0 + - name: Check for mmv1 product file changes + id: pull_request + run: | + cd repo + git config user.name "modular-magician" + git config user.email "magic-modules@google.com" + git fetch origin ${{ github.base_ref }} # Fetch the base branch + git merge --no-ff origin/${{ github.base_ref }} # Merge with the base branch + yamlfiles=$(git diff --name-only origin/${{ github.base_ref }} -- mmv1/products) # Compare with the base branch + if [ ! -z "$yamlfiles" ]; then + echo "yamlfiles=repo/${yamlfiles//$'\n'/ repo/}" >> $GITHUB_OUTPUT + fi + - name: Install yamllint + if: ${{ !failure() && steps.pull_request.outputs.yamlfiles != '' }} + run: pip install yamllint==1.32.0 pyyaml==6.0.1 pathspec==0.12.1 --no-deps + - name: Lint YAML files + if: ${{ !failure() && steps.pull_request.outputs.yamlfiles != '' }} + run: yamllint -c repo/.yamllint ${{steps.pull_request.outputs.yamlfiles}} diff --git a/.github/workflows/override-labels.yml b/.github/workflows/override-labels.yml deleted file mode 100644 index a658bf452d53..000000000000 --- a/.github/workflows/override-labels.yml +++ /dev/null @@ -1,29 +0,0 @@ -name: override-labels - -permissions: read-all - -on: - pull_request_target: - types: [labeled, unlabeled] - - -jobs: - override-breaking-change: - runs-on: ubuntu-22.04 - if: github.event.label.name == 'override-breaking-change' - permissions: - statuses: write - env: - STATE: "${{ github.event.action == 'labeled' && 'success' || 'failure' }}" - DESCRIPTION: "${{ github.event.action == 'labeled' && 'override-breaking-change applied' || 'override-breaking-change removed' }}" - steps: - - name: Override breaking changes label applied - shell: bash - run: | - curl -L \ - -X POST \ - -H "Accept: application/vnd.github+json" \ - -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" \ - -H "X-GitHub-Api-Version: 2022-11-28" \ - https://api.github.com/repos/${{ github.repository }}/statuses/${{ github.event.pull_request.head.sha }} \ - -d '{"state":"${{ env.STATE }}","description":"${{ env.DESCRIPTION }}","context":"terraform-provider-breaking-change-test"}' diff --git a/.github/workflows/reassign-reviewer.yml b/.github/workflows/reassign-reviewer.yml index 9da2f2dcbe01..5581c8519865 100644 --- a/.github/workflows/reassign-reviewer.yml +++ b/.github/workflows/reassign-reviewer.yml @@ -22,7 +22,7 @@ jobs: uses: actions-ecosystem/action-regex-match@d50fd2e7a37d0e617aea3d7ada663bd56862b9cc # v2.0.2 with: text: ${{ github.event.comment.body }} - regex: '.*@modular-magician (?:re)?assign[- ]review(?:er)? ?@?([a-zA-Z0-9-_]*).*' + regex: '(?:^|\n|\r)@modular-magician reassign-reviewer ?@?([a-zA-Z0-9-]+)?(?:$|\n|\r)' - name: Checkout Repository if: steps.read-comment.outputs.match != '' uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.2 @@ -33,6 +33,8 @@ jobs: uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0 with: go-version: '^1.21' + # Disable caching for now due to issues with large provider dependency caches + cache: false - name: Build magician if: steps.read-comment.outputs.match != '' run: | diff --git a/.github/workflows/request-reviewer.yml b/.github/workflows/request-reviewer.yml index c5c3ccbb7b26..92cb937e709d 100644 --- a/.github/workflows/request-reviewer.yml +++ b/.github/workflows/request-reviewer.yml @@ -37,10 +37,4 @@ jobs: cd .ci/magician go build . - name: Request reviewer - if: ${{ github.event.issue.user.login != 'copybara-service' }} run: .ci/magician/magician request-reviewer ${{ github.event.pull_request.number || github.event.issue.number }} - - name: Request reviewer (copybara) - if: ${{ github.event.issue.user.login == 'copybara-service' }} - env: - GH_TOKEN: ${{secrets.GITHUB_TOKEN}} - run: gh pr edit ${{ github.event.pull_request.number || github.event.issue.number }} --add-reviewer "@ScottSuarez" diff --git a/.github/workflows/unit-test-magician.yml b/.github/workflows/unit-test-magician.yml index c90e855ebf5a..68680b5a75e0 100644 --- a/.github/workflows/unit-test-magician.yml +++ b/.github/workflows/unit-test-magician.yml @@ -19,7 +19,7 @@ jobs: - name: Run magician unit tests run: | cd .ci/magician - go test ./... + go test ./... -v env: GITHUB_TOKEN_CLASSIC: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/unit-test-mmv1.yml b/.github/workflows/unit-test-mmv1.yml deleted file mode 100644 index 5b26e7397436..000000000000 --- a/.github/workflows/unit-test-mmv1.yml +++ /dev/null @@ -1,111 +0,0 @@ -name: mmv1 - -permissions: read-all - -on: - push: - branches: - - main - - 'FEATURE-BRANCH-*' - merge_group: - types: [checks_requested] - pull_request: - -concurrency: - group: ${{ github.event_name == 'merge_group' && format('mmv1-merge-group-{0}', github.event.merge_group.head_sha) || github.event_name == 'pull_request' && format('mmv1-pr-{0}', github.event.pull_request.number) || format('mmv1-commit-{0}', github.sha) }} - cancel-in-progress: true - - -jobs: - version-guard-check: - runs-on: ubuntu-22.04 - steps: - - name: Checkout Repository - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.2 - with: - path: repo - fetch-depth: 0 - - name: Merge base branch - if: github.event_name == 'pull_request' - run: | - cd repo - git config user.name "modular-magician" - git config user.email "magic-modules@google.com" - git fetch origin ${{ github.base_ref }} # Fetch the base branch - git merge --no-ff origin/${{ github.base_ref }} # Merge with the base branch - - name: Check for invalid version guards - run: | - cd repo/tools/template-check - tmpls=$(git diff --name-only --diff-filter=d origin/${{ github.base_ref }} ../../*.tmpl | sed 's=^=../../=g') - tmpls=${tmpls//$'\n'/,} - echo $tmpls - if [[ -n "$tmpls" ]]; then - go run main.go version-guard --file-list $tmpls - fi - lint-yaml: - runs-on: ubuntu-22.04 - steps: - - name: Checkout Repository - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.2 - with: - path: repo - fetch-depth: 0 - - name: Merge base branch - if: github.event_name == 'pull_request' - run: | - cd repo - git config user.name "modular-magician" - git config user.email "magic-modules@google.com" - git fetch origin ${{ github.base_ref }} - git merge --no-ff origin/${{ github.base_ref }} - - name: Find YAML files to lint - id: yaml_files - run: | - cd repo - if [ "${{ github.event_name }}" == "pull_request" ]; then - # For PRs, get only changed files - yamlfiles=$(git diff --name-only origin/${{ github.base_ref }} -- mmv1/products) - if [ ! -z "$yamlfiles" ]; then - echo "yamlfiles=${yamlfiles//$'\n'/ }" >> $GITHUB_OUTPUT - fi - else - # For other events, get all YAML files - yamlfiles=$(find mmv1/products -name "*.yaml" -o -name "*.yml" | tr '\n' ' ') - if [ ! -z "$yamlfiles" ]; then - echo "yamlfiles=$yamlfiles" >> $GITHUB_OUTPUT - fi - fi - - name: Install yamllint - if: ${{ !failure() && steps.yaml_files.outputs.yamlfiles != '' }} - run: pip install yamllint==1.32.0 pyyaml==6.0.1 pathspec==0.12.1 --no-deps - - name: Lint YAML files - if: ${{ !failure() && steps.yaml_files.outputs.yamlfiles != '' }} - run: | - cd repo - yamllint -c .yamllint ${{ steps.yaml_files.outputs.yamlfiles }} - unit-tests: - runs-on: ubuntu-22.04 - steps: - - name: Checkout Repository - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.2 - with: - path: repo - fetch-depth: 0 - - name: Merge base branch - if: github.event_name == 'pull_request' - run: | - cd repo - git config user.name "modular-magician" - git config user.email "magic-modules@google.com" - git fetch origin ${{ github.base_ref }} # Fetch the base branch - git merge --no-ff origin/${{ github.base_ref }} # Merge with the base branch - - name: Set up Go - uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0 - with: - go-version: '^1.23' - - name: Run mmv1 unit tests - run: | - cd repo - cd mmv1 - go test ./... - diff --git a/.github/workflows/unit-test-tools.yml b/.github/workflows/unit-test-tools.yml index 21cfe6eedb48..e63e9becc393 100644 --- a/.github/workflows/unit-test-tools.yml +++ b/.github/workflows/unit-test-tools.yml @@ -29,7 +29,7 @@ jobs: - name: Test diff-processor with TPG run: | cd tools/diff-processor - go test ./... + go test -v ./... env: SERVICES_DIR: tools/diff-processor/new/google/services @@ -42,7 +42,7 @@ jobs: - name: Test diff-processor with TPGB run: | cd tools/diff-processor - go test ./... + go test -v ./... env: SERVICES_DIR: tools/diff-processor/new/google/services @@ -64,7 +64,7 @@ jobs: - name: Test go-changelog run: | cd tools/go-changelog - go test ./... + go test -v ./... issue-labeler: runs-on: ubuntu-22.04 @@ -84,7 +84,7 @@ jobs: - name: Test issue-labeler run: | cd tools/issue-labeler - go test ./... + go test -v ./... template-check: runs-on: ubuntu-22.04 @@ -104,7 +104,7 @@ jobs: - name: Test template-check run: | cd tools/template-check - go test ./... + go test -v ./... test-reader: runs-on: ubuntu-22.04 @@ -124,4 +124,4 @@ jobs: - name: Test test-reader run: | cd tools/test-reader - go test ./... \ No newline at end of file + go test -v ./... \ No newline at end of file diff --git a/.gitignore b/.gitignore index 383e6dbd3c09..ab0fe65a0cd1 100644 --- a/.gitignore +++ b/.gitignore @@ -27,7 +27,6 @@ # IDEA files .idea/* *.iml -**/.idea/* # OS generated files .DS_Store diff --git a/GNUmakefile b/GNUmakefile index 76ea7e731f17..5670b031bea0 100644 --- a/GNUmakefile +++ b/GNUmakefile @@ -53,21 +53,15 @@ endif ifeq ($(FORCE_DCL),) FORCE_DCL=latest endif - -SHOULD_SKIP_CLEAN := false # Default: do not skip -ifneq ($(SKIP_CLEAN),) - ifneq ($(SKIP_CLEAN),false) - SHOULD_SKIP_CLEAN := true - endif -endif - -terraform build provider: validate_environment clean-provider mmv1 tpgtools - @echo "Provider generation process finished for $(VERSION) in $(OUTPUT_PATH)" - +terraform build provider: + @make validate_environment; + make mmv1 + make tpgtools mmv1: - @echo "Executing mmv1 build for $(OUTPUT_PATH)"; - @cd mmv1;\ + # Chaining these with "&&" is critical so this will exit non-0 if the first + # command fails, since we're not forcing bash and errexit / pipefail here. + cd mmv1;\ if [ "$(VERSION)" = "ga" ]; then \ go run . --output $(OUTPUT_PATH) --version ga --no-docs $(mmv1_compile) \ && go run . --output $(OUTPUT_PATH) --version beta --no-code $(mmv1_compile); \ @@ -75,39 +69,15 @@ mmv1: go run . --output $(OUTPUT_PATH) --version $(VERSION) $(mmv1_compile); \ fi -tpgtools: serialize - @echo "Executing tpgtools build for $(OUTPUT_PATH)"; - @cd tpgtools;\ +tpgtools: + make serialize + cd tpgtools;\ go run . --output $(OUTPUT_PATH) --version $(VERSION) $(tpgtools_compile) -clean-provider: check_safe_build - @if [ -n "$(PRODUCT)" ]; then \ - printf "\n\e[1;33mWARNING:\e[0m Skipping clean-provider step because PRODUCT ('$(PRODUCT)') is set.\n"; \ - printf " Ensure your downstream repository is synchronized with the Magic Modules branch\n"; \ - printf " to avoid potential build inconsistencies.\n"; \ - printf " Downstream repository (OUTPUT_PATH): %s\n\n" "$(OUTPUT_PATH)"; \ - elif [ "$(SHOULD_SKIP_CLEAN)" = "true" ]; then \ - printf "\e[1;33mINFO:\e[0m Skipping clean-provider step because SKIP_CLEAN is set to a non-false value ('$(SKIP_CLEAN)').\n"; \ - else \ - echo "Executing clean-provider in $(OUTPUT_PATH)..."; \ - ( \ - cd $(OUTPUT_PATH) && \ - echo "---> Changing directory to $(OUTPUT_PATH)" && \ - if ! command -v git > /dev/null 2>&1; then \ - printf "\e[1;33mINFO:\e[0m Skipping git-based cleaning because git is not installed.\n"; \ - elif ! git rev-parse --is-inside-work-tree > /dev/null 2>&1; then \ - printf "\e[1;33mINFO:\e[0m Skipping git-based cleaning because $(OUTPUT_PATH) is not a git repository.\n"; \ - else \ - echo "---> Downloading Go module dependencies... (Ensures tools like gofmt can find relevant code)" && \ - go mod download && \ - echo "---> Finding tracked files to remove..." && \ - git ls-files | grep -v -E '(^\.git|^\.changelog|^\.travis\.yml$$|^\.golangci\.yml$$|^CHANGELOG\.md$$|^CHANGELOG_v.*\.md$$|^GNUmakefile$$|docscheck\.sh$$|^LICENSE$$|^CODEOWNERS$$|^README\.md$$|^\.go-version$$|^\.hashibot\.hcl$$|^go\.mod$$|^go\.sum$$|^examples)' | xargs -r git rm -f -q && \ - echo "---> Unstaging changes with git reset..." && \ - git reset -q && \ - echo "---> clean-provider actions finished. Changes have been unstaged."; \ - fi \ - ) && echo "clean-provider target finished successfully."; \ - fi +clean-provider: + cd $(OUTPUT_PATH);\ + go mod download;\ + find . -type f -not -wholename "./.git*" -not -wholename "./.changelog*" -not -name ".travis.yml" -not -name ".golangci.yml" -not -name "CHANGELOG.md" -not -name "CHANGELOG_v*.md" -not -name "GNUmakefile" -not -name "docscheck.sh" -not -name "LICENSE" -not -name "CODEOWNERS" -not -name "README.md" -not -wholename "./examples*" -not -name ".go-version" -not -name ".hashibot.hcl" -print0 | xargs -0 git rm > /dev/null clean-tgc: cd $(OUTPUT_PATH);\ @@ -152,25 +122,13 @@ upgrade-dcl: sed ${SED_I} "s!.*declarative-resource-client-library.*!$$MOD_LINE!" go.mod; echo "$$SUM_LINE" >> go.sum -validate_environment: check_parameters check_safe_build - -check_parameters: +validate_environment: # only print doctor script to console if there was a dependency failure detected. @./scripts/doctor 2>&1 > /dev/null || ./scripts/doctor - @[ -d "$(OUTPUT_PATH)" ] || (printf "\n\e[1;31mERROR: directory '$(OUTPUT_PATH)' does not exist - ENV variable \033[0mOUTPUT_PATH\e[1;31m should be set to a provider directory. \033[0m \n\n" && exit 1); - @[ -n "$(VERSION)" ] || (printf "\n\e[1;31mERROR: version '$(VERSION)' does not exist - ENV variable \033[0mVERSION\e[1;31m should be set to ga or beta \033[0m \n\n" && exit 1); - - -check_safe_build: - @([ -f "$(OUTPUT_PATH)/go.mod" ] && head -n 1 "$(OUTPUT_PATH)/go.mod" | grep -q 'terraform') || \ - ( \ - printf "\n\e[1;31mERROR: Validation failed for OUTPUT_PATH '$(OUTPUT_PATH)'.\n" && \ - printf " Either go.mod is missing or the module name within it does not contain 'terraform'.\n" && \ - printf " This is a safety check before cleaning/building. Halting.\033[0m\n\n" && \ - exit 1 \ - ); \ + @[ -d "$(OUTPUT_PATH)" ] || (printf " \e[1;31mdirectory '$(OUTPUT_PATH)' does not exist - ENV variable \033[0mOUTPUT_PATH\e[1;31m should be set to a provider directory. \033[0m \n" && exit 1); + @[ -n "$(VERSION)" ] || (printf " \e[1;31mversion '$(VERSION)' does not exist - ENV variable \033[0mVERSION\e[1;31m should be set to ga or beta \033[0m \n" && exit 1); doctor: ./scripts/doctor -.PHONY: mmv1 tpgtools test clean-provider validate_environment serialize doctor +.PHONY: mmv1 tpgtools test diff --git a/LICENSE b/LICENSE index dcb8de7551d3..ef51da2b0e8d 100644 --- a/LICENSE +++ b/LICENSE @@ -1,381 +1,4 @@ -Files: tools/go-changelog/*, mmv1/third_party/terraform/* -Mozilla Public License Version 2.0 -================================== - -1. Definitions --------------- -1.1. "Contributor" - means each individual or legal entity that creates, contributes to - the creation of, or owns Covered Software. - -1.2. "Contributor Version" - means the combination of the Contributions of others (if any) used - by a Contributor and that particular Contributor's Contribution. - -1.3. "Contribution" - means Covered Software of a particular Contributor. - -1.4. "Covered Software" - means Source Code Form to which the initial Contributor has attached - the notice in Exhibit A, the Executable Form of such Source Code - Form, and Modifications of such Source Code Form, in each case - including portions thereof. - -1.5. "Incompatible With Secondary Licenses" - means - - (a) that the initial Contributor has attached the notice described - in Exhibit B to the Covered Software; or - - (b) that the Covered Software was made available under the terms of - version 1.1 or earlier of the License, but not also under the - terms of a Secondary License. - -1.6. "Executable Form" - means any form of the work other than Source Code Form. - -1.7. "Larger Work" - means a work that combines Covered Software with other material, in - a separate file or files, that is not Covered Software. - -1.8. "License" - means this document. - -1.9. "Licensable" - means having the right to grant, to the maximum extent possible, - whether at the time of the initial grant or subsequently, any and - all of the rights conveyed by this License. - -1.10. "Modifications" - means any of the following: - - (a) any file in Source Code Form that results from an addition to, - deletion from, or modification of the contents of Covered - Software; or - - (b) any new file in Source Code Form that contains any Covered - Software. - -1.11. "Patent Claims" of a Contributor - means any patent claim(s), including without limitation, method, - process, and apparatus claims, in any patent Licensable by such - Contributor that would be infringed, but for the grant of the - License, by the making, using, selling, offering for sale, having - made, import, or transfer of either its Contributions or its - Contributor Version. - -1.12. "Secondary License" - means either the GNU General Public License, Version 2.0, the GNU - Lesser General Public License, Version 2.1, the GNU Affero General - Public License, Version 3.0, or any later versions of those - licenses. - -1.13. "Source Code Form" - means the form of the work preferred for making modifications. - -1.14. "You" (or "Your") - means an individual or a legal entity exercising rights under this - License. For legal entities, "You" includes any entity that - controls, is controlled by, or is under common control with You. For - purposes of this definition, "control" means (a) the power, direct - or indirect, to cause the direction or management of such entity, - whether by contract or otherwise, or (b) ownership of more than - fifty percent (50%) of the outstanding shares or beneficial - ownership of such entity. - -2. License Grants and Conditions --------------------------------- - -2.1. Grants - -Each Contributor hereby grants You a world-wide, royalty-free, -non-exclusive license: - -(a) under intellectual property rights (other than patent or trademark) - Licensable by such Contributor to use, reproduce, make available, - modify, display, perform, distribute, and otherwise exploit its - Contributions, either on an unmodified basis, with Modifications, or - as part of a Larger Work; and - -(b) under Patent Claims of such Contributor to make, use, sell, offer - for sale, have made, import, and otherwise transfer either its - Contributions or its Contributor Version. - -2.2. Effective Date - -The licenses granted in Section 2.1 with respect to any Contribution -become effective for each Contribution on the date the Contributor first -distributes such Contribution. - -2.3. Limitations on Grant Scope - -The licenses granted in this Section 2 are the only rights granted under -this License. No additional rights or licenses will be implied from the -distribution or licensing of Covered Software under this License. -Notwithstanding Section 2.1(b) above, no patent license is granted by a -Contributor: - -(a) for any code that a Contributor has removed from Covered Software; - or - -(b) for infringements caused by: (i) Your and any other third party's - modifications of Covered Software, or (ii) the combination of its - Contributions with other software (except as part of its Contributor - Version); or - -(c) under Patent Claims infringed by Covered Software in the absence of - its Contributions. - -This License does not grant any rights in the trademarks, service marks, -or logos of any Contributor (except as may be necessary to comply with -the notice requirements in Section 3.4). - -2.4. Subsequent Licenses - -No Contributor makes additional grants as a result of Your choice to -distribute the Covered Software under a subsequent version of this -License (see Section 10.2) or under the terms of a Secondary License (if -permitted under the terms of Section 3.3). - -2.5. Representation - -Each Contributor represents that the Contributor believes its -Contributions are its original creation(s) or it has sufficient rights -to grant the rights to its Contributions conveyed by this License. - -2.6. Fair Use - -This License is not intended to limit any rights You have under -applicable copyright doctrines of fair use, fair dealing, or other -equivalents. - -2.7. Conditions - -Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted -in Section 2.1. - -3. Responsibilities -------------------- - -3.1. Distribution of Source Form - -All distribution of Covered Software in Source Code Form, including any -Modifications that You create or to which You contribute, must be under -the terms of this License. You must inform recipients that the Source -Code Form of the Covered Software is governed by the terms of this -License, and how they can obtain a copy of this License. You may not -attempt to alter or restrict the recipients' rights in the Source Code -Form. - -3.2. Distribution of Executable Form - -If You distribute Covered Software in Executable Form then: - -(a) such Covered Software must also be made available in Source Code - Form, as described in Section 3.1, and You must inform recipients of - the Executable Form how they can obtain a copy of such Source Code - Form by reasonable means in a timely manner, at a charge no more - than the cost of distribution to the recipient; and - -(b) You may distribute such Executable Form under the terms of this - License, or sublicense it under different terms, provided that the - license for the Executable Form does not attempt to limit or alter - the recipients' rights in the Source Code Form under this License. - -3.3. Distribution of a Larger Work - -You may create and distribute a Larger Work under terms of Your choice, -provided that You also comply with the requirements of this License for -the Covered Software. If the Larger Work is a combination of Covered -Software with a work governed by one or more Secondary Licenses, and the -Covered Software is not Incompatible With Secondary Licenses, this -License permits You to additionally distribute such Covered Software -under the terms of such Secondary License(s), so that the recipient of -the Larger Work may, at their option, further distribute the Covered -Software under the terms of either this License or such Secondary -License(s). - -3.4. Notices - -You may not remove or alter the substance of any license notices -(including copyright notices, patent notices, disclaimers of warranty, -or limitations of liability) contained within the Source Code Form of -the Covered Software, except that You may alter any license notices to -the extent required to remedy known factual inaccuracies. - -3.5. Application of Additional Terms - -You may choose to offer, and to charge a fee for, warranty, support, -indemnity or liability obligations to one or more recipients of Covered -Software. However, You may do so only on Your own behalf, and not on -behalf of any Contributor. You must make it absolutely clear that any -such warranty, support, indemnity, or liability obligation is offered by -You alone, and You hereby agree to indemnify every Contributor for any -liability incurred by such Contributor as a result of warranty, support, -indemnity or liability terms You offer. You may include additional -disclaimers of warranty and limitations of liability specific to any -jurisdiction. - -4. Inability to Comply Due to Statute or Regulation ---------------------------------------------------- - -If it is impossible for You to comply with any of the terms of this -License with respect to some or all of the Covered Software due to -statute, judicial order, or regulation then You must: (a) comply with -the terms of this License to the maximum extent possible; and (b) -describe the limitations and the code they affect. Such description must -be placed in a text file included with all distributions of the Covered -Software under this License. Except to the extent prohibited by statute -or regulation, such description must be sufficiently detailed for a -recipient of ordinary skill to be able to understand it. - -5. Termination --------------- - -5.1. The rights granted under this License will terminate automatically -if You fail to comply with any of its terms. However, if You become -compliant, then the rights granted under this License from a particular -Contributor are reinstated (a) provisionally, unless and until such -Contributor explicitly and finally terminates Your grants, and (b) on an -ongoing basis, if such Contributor fails to notify You of the -non-compliance by some reasonable means prior to 60 days after You have -come back into compliance. Moreover, Your grants from a particular -Contributor are reinstated on an ongoing basis if such Contributor -notifies You of the non-compliance by some reasonable means, this is the -first time You have received notice of non-compliance with this License -from such Contributor, and You become compliant prior to 30 days after -Your receipt of the notice. - -5.2. If You initiate litigation against any entity by asserting a patent -infringement claim (excluding declaratory judgment actions, -counter-claims, and cross-claims) alleging that a Contributor Version -directly or indirectly infringes any patent, then the rights granted to -You by any and all Contributors for the Covered Software under Section -2.1 of this License shall terminate. - -5.3. In the event of termination under Sections 5.1 or 5.2 above, all -end user license agreements (excluding distributors and resellers) which -have been validly granted by You or Your distributors under this License -prior to termination shall survive termination. - -************************************************************************ -* * -* 6. Disclaimer of Warranty * -* ------------------------- * -* * -* Covered Software is provided under this License on an "as is" * -* basis, without warranty of any kind, either expressed, implied, or * -* statutory, including, without limitation, warranties that the * -* Covered Software is free of defects, merchantable, fit for a * -* particular purpose or non-infringing. The entire risk as to the * -* quality and performance of the Covered Software is with You. * -* Should any Covered Software prove defective in any respect, You * -* (not any Contributor) assume the cost of any necessary servicing, * -* repair, or correction. This disclaimer of warranty constitutes an * -* essential part of this License. No use of any Covered Software is * -* authorized under this License except under this disclaimer. * -* * -************************************************************************ - -************************************************************************ -* * -* 7. Limitation of Liability * -* -------------------------- * -* * -* Under no circumstances and under no legal theory, whether tort * -* (including negligence), contract, or otherwise, shall any * -* Contributor, or anyone who distributes Covered Software as * -* permitted above, be liable to You for any direct, indirect, * -* special, incidental, or consequential damages of any character * -* including, without limitation, damages for lost profits, loss of * -* goodwill, work stoppage, computer failure or malfunction, or any * -* and all other commercial damages or losses, even if such party * -* shall have been informed of the possibility of such damages. This * -* limitation of liability shall not apply to liability for death or * -* personal injury resulting from such party's negligence to the * -* extent applicable law prohibits such limitation. Some * -* jurisdictions do not allow the exclusion or limitation of * -* incidental or consequential damages, so this exclusion and * -* limitation may not apply to You. * -* * -************************************************************************ - -8. Litigation -------------- - -Any litigation relating to this License may be brought only in the -courts of a jurisdiction where the defendant maintains its principal -place of business and such litigation shall be governed by laws of that -jurisdiction, without reference to its conflict-of-law provisions. -Nothing in this Section shall prevent a party's ability to bring -cross-claims or counter-claims. - -9. Miscellaneous ----------------- - -This License represents the complete agreement concerning the subject -matter hereof. If any provision of this License is held to be -unenforceable, such provision shall be reformed only to the extent -necessary to make it enforceable. Any law or regulation which provides -that the language of a contract shall be construed against the drafter -shall not be used to construe this License against a Contributor. - -10. Versions of the License ---------------------------- - -10.1. New Versions - -Mozilla Foundation is the license steward. Except as provided in Section -10.3, no one other than the license steward has the right to modify or -publish new versions of this License. Each version will be given a -distinguishing version number. - -10.2. Effect of New Versions - -You may distribute the Covered Software under the terms of the version -of the License under which You originally received the Covered Software, -or under the terms of any subsequent version published by the license -steward. - -10.3. Modified Versions - -If you create software not governed by this License, and you want to -create a new license for such software, you may create and use a -modified version of this License if you rename the license and remove -any references to the name of the license steward (except to note that -such modified license differs from this License). - -10.4. Distributing Source Code Form that is Incompatible With Secondary -Licenses - -If You choose to distribute Source Code Form that is Incompatible With -Secondary Licenses under the terms of this version of the License, the -notice described in Exhibit B of this License must be attached. - -Exhibit A - Source Code Form License Notice -------------------------------------------- - - This Source Code Form is subject to the terms of the Mozilla Public - License, v. 2.0. If a copy of the MPL was not distributed with this - file, You can obtain one at http://mozilla.org/MPL/2.0/. - -If it is not possible or desirable to put the notice in a particular -file, then You may include the notice in a location (such as a LICENSE -file in a relevant directory) where a recipient would be likely to look -for such a notice. - -You may add additional accurate notices of copyright ownership. - -Exhibit B - "Incompatible With Secondary Licenses" Notice ---------------------------------------------------------- - - This Source Code Form is "Incompatible With Secondary Licenses", as - defined by the Mozilla Public License, v. 2.0. - ---- - -Files: *, excluding tools/go-changelog/* and mmv1/third_party/terraform/* Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ diff --git a/docs/content/best-practices/common-resource-patterns.md b/docs/content/best-practices/common-resource-patterns.md index 26da45202b16..3956932a2a18 100644 --- a/docs/content/best-practices/common-resource-patterns.md +++ b/docs/content/best-practices/common-resource-patterns.md @@ -14,6 +14,4 @@ Implementing resources like this may require some or all of the following: 1. If there _isn't_ a create endpoint, set the [create_url]({{< ref "/reference/resource/#create_url" >}}) to point to the update endpoint. 1. If there _is_ a create endpoint, add [pre-create custom code]({{< ref "/develop/custom-code/#pre_post_injection" >}}) that implements "acquire-on-create" logic. The custom code should check whether the resource already exists with a read request, and if it does, run the update logic and return early. For example, see [mmv1/templates/terraform/pre_create/firebasehosting_site.go.tmpl](https://github.com/GoogleCloudPlatform/magic-modules/blob/dc4d9755cb9288177e0996c1c3b3fa9738ebdf89/mmv1/templates/terraform/pre_create/firebasehosting_site.go.tmpl). * Note: The main disadvantage of "acquire-on-create" logic is that users will not be presented with a diff between the resource's old and new states – because from the terraform perspective, the resource is only being created. Please upvote https://github.com/hashicorp/terraform/issues/19017 to request better support for this workflow. -1. If there is no delete endpoint, set [`exclude_delete: true`]({{< ref "/reference/resource/#create_url" >}}) at the top level of the resource. - -Tests for singletons can run into issues because they are modifying a shared state. To avoid the problems this can cause, ensure that the tests [create dedicated parent resources]({{< ref "/test/test#create-test-projects" >}}) instead of modifying the default test environment. If there need to be multiple test cases, make sure they either have individual parent resources, or that they run serially, like [TestAccAccessContextManager](https://github.com/hashicorp/terraform-provider-google-beta/blob/88fa0756f2ce116765edd4c1551680d9029621f6/google-beta/services/accesscontextmanager/resource_access_context_manager_access_policy_test.go#L31-L33). +1. If there is no delete endpoint, set [`exclude_delete: true`]({{< ref "/reference/resource/#create_url" >}}) at the top level of the resource. \ No newline at end of file diff --git a/docs/content/best-practices/validation.md b/docs/content/best-practices/validation.md deleted file mode 100644 index 6005577098fe..000000000000 --- a/docs/content/best-practices/validation.md +++ /dev/null @@ -1,34 +0,0 @@ ---- -title: "Validation" -weight: 50 ---- - -# Validation - -There are a number of ways to add client-side validation to resources. The benefit of client-side validation is that errors can be surfaced at plan time, instead of partway through a (potentially very long) apply process, allowing for faster iteration. However, the tradeoff is that client-side validation can get out of sync with server-side validation, creating additional maintenance burden for the provider and preventing users from accessing the latest features without upgrading. - -Client-side validation is generally discouraged due to the low positive impact of an individual validation rule and outsized negative impact when client-side validation and API capabilities drift, requiring both provider changes and users to update. Client-side validation may be added in cases where it is extremely unlikely to change, covered below. - -The following sections cover best practices for specific types of client-side validation. - -## URL segments - -If a resource URL looks like: - -``` -projects/{project}/folders/{folder}/resource/{resource_id} -``` - -Adding validation for the last part of the path (`resource_id`) may be safe if there are specific restrictions that aren't going to change, such as following an external RFC or other spec/standard. However, if the API was ever less restrictive (or becomes less restrictive later), resources created with other tools and then imported into Terraform may be impossible to actually manage with Terraform (without deleting & recreating them) because the ID which was valid in the API violates the more restrictive validation in the provider. - -## Enum - -Enums are generally okay if they are exhaustive of all possible values for a clearly defined domain where new values are extremely unlikely. Otherwise, it is better to use a string field and add a link to the API documentation as a reference for the possible values. - -## Inter-field relationships - -[`conflicts`]({{< ref "/reference/field#conflicts" >}}), [`required_with`]({{< ref "/reference/field#required_with" >}}), [`exactly_one_of`]({{< ref "/reference/field#exactly_one_of" >}}), and [`at_least_one_of`]({{< ref "/reference/field#at_least_one_of" >}}) are often safe to add. However, if there is a chance that the API validation will relax in the future (such as two fields no longer being required together, or two fields no longer conflicting) it's better to not add the restriction in the first place. - -## Immutable facts - -It is safe to validate things that will definitely always be true about an API. For example, a `node_count` field will most likely always need to be non-negative. That is safe to validate. However, validating a max value for `node_count` may not be safe, because the API might increase the allowed values in the future. diff --git a/docs/content/breaking-changes/breaking-changes.md b/docs/content/breaking-changes/breaking-changes.md index 8a34fc167f56..debe0641cfe9 100644 --- a/docs/content/breaking-changes/breaking-changes.md +++ b/docs/content/breaking-changes/breaking-changes.md @@ -60,8 +60,7 @@ For more information, see * Between complex types like changing a List to a Set. * Changing the field type between primitive and complex data types is not possible. For this scenario, field renames are preferred. -* Making an optional field required -* Adding a required field to a pre-existing resource at any level of nesting, unless it is being added at the same time as an optional ancestor +* Making an optional field required or adding a new required field * Adding an "ExactlyOneOf" constraint that causes one or more previously-optional fields to be required or conflict with each other * Making a settable field read-only * For MMv1 resources, adding `output: true` to an existing field. @@ -82,9 +81,6 @@ For more information, see if the change will destroy and recreate the resource due to changing an immutable value. Default changes in the provider are comparable in impact to default changes in an API, and modifying examples and modules may achieve the intended effect with a smaller blast radius. -* Adding an optional field with a default value and force new to a pre-existing resource at any level of nesting, unless it is being added at the same time as an optional ancestor - * This can be allowed if there is a confirmed API-level default that matches the schema default - * Please work with your reviewer and ensure this scenario is debugged carefully to avoid a destructive permadiff * Modifying how field data is stored in state * For example, changing the case of a value returned by the API in a flattener or decorder * Removing diff suppression from a field. @@ -92,7 +88,6 @@ For more information, see * For handwritten resources, removing `DiffSuppressFunc` from a field. * Removing update support from a field. - ### Making validation more strict * Increasing the minimum number of items in an array diff --git a/docs/content/breaking-changes/make-a-breaking-change.md b/docs/content/breaking-changes/make-a-breaking-change.md index e717df96f9eb..231572f3c026 100644 --- a/docs/content/breaking-changes/make-a-breaking-change.md +++ b/docs/content/breaking-changes/make-a-breaking-change.md @@ -1,6 +1,6 @@ --- -majorVersion: "7.0.0" -upgradeGuide: "version_7_upgrade.html.markdown" +majorVersion: "6.0.0" +upgradeGuide: "version_6_upgrade.html.markdown" title: "Make a breaking change" summary: "Guidance on making a breaking changes" weight: 20 @@ -65,7 +65,7 @@ The general process for contributing a breaking change to the 1. Make the `main` branch forwards-compatible with the major release 2. Add deprecations and warnings to the `main` branch of `magic-modules` -3. Add upgrade guide entries to the `FEATURE-BRANCH-major-release-7.0.0` branch of `magic-modules` +3. Add upgrade guide entries to the `FEATURE-BRANCH-major-release-6.0.0` branch of `magic-modules` 4. Make the breaking change on `FEATURE-BRANCH-major-release-{{% param "majorVersion" %}}` These are covered in more detail in the following sections. The upgrade guide @@ -198,9 +198,9 @@ with the following changes: merged into the major release branch every Monday. 1. Make the breaking change. 1. Add the upgrade guide entries to -[{{< param upgradeGuide >}}](https://github.com/GoogleCloudPlatform/magic-modules/blob/FEATURE-BRANCH-major-release-{{% param "majorVersion" %}}/mmv1/third_party/terraform/website/docs/guides/{{< param upgradeGuide >}}). Entries should focus on the changes that users need to make when upgrading +[{{< param upgradeGuide >}}](https://github.com/GoogleCloudPlatform/magic-modules/blob/FEATURE-BRANCH-major-release-6.0.0/mmv1/third_party/terraform/website/docs/guides/{{< param upgradeGuide >}}). Entries should focus on the changes that users need to make when upgrading to `{{% param "majorVersion" %}}`, rather than how to write configurations -after upgrading. See [Terraform provider for Google Cloud 6.0.0 Upgrade Guide](https://registry.terraform.io/providers/hashicorp/google/latest/docs/guides/version_6_upgrade) +after upgrading. See [Terraform provider for Google Cloud 5.0.0 Upgrade Guide](https://registry.terraform.io/providers/hashicorp/google/latest/docs/guides/version_5_upgrade) and other upgrade guides for examples. 1. Remove any deprecation notices and warnings (including in documentation) not already removed by the breaking change. 1. When you create your pull request, diff --git a/docs/content/code-review/review-pr.md b/docs/content/code-review/review-pr.md index 5e52bf22212e..6eb13e961dd5 100644 --- a/docs/content/code-review/review-pr.md +++ b/docs/content/code-review/review-pr.md @@ -21,7 +21,6 @@ The following types of PRs may require additional scrutiny and/or multiple revie 1. Read the PR description to understand the context and ensure the PR either * is linked to a GitHub issue or an internal bug * if not, check the [issue tracker](https://github.com/hashicorp/terraform-provider-google/issues) to see whether the feature has already been requested and add the issues in the description, if any. - * "Fixes {github_issue_link}" is preferred if an external issue is available because it will [auto-close the issue](https://docs.github.com/en/issues/tracking-your-work-with-issues/using-issues/linking-a-pull-request-to-an-issue) when the PR is merged. However, there's no need to create an external issue solely for this purpose. * establishes clear context itself via title or description. 2. If the PR adds any new resource, ensure that the resource does not already exist in the [GA provider](https://github.com/hashicorp/terraform-provider-google) or [beta provider](https://github.com/hashicorp/terraform-provider-google-beta) 1. Read through all the changes in the PR, generated code in the downstreams and the API documentation to ensure that: diff --git a/docs/content/develop/add-fields.md b/docs/content/develop/add-fields.md index f1e6bb8c571a..0bf5985f2855 100644 --- a/docs/content/develop/add-fields.md +++ b/docs/content/develop/add-fields.md @@ -19,14 +19,14 @@ For more information about types of resources and the generation process overall 1. Complete the steps in [Set up your development environment]({{< ref "/develop/set-up-dev-environment" >}}) to set up your environment and your Google Cloud project. 1. [Ensure the resource to which you want to add the fields exists in the provider]({{< ref "/develop/add-resource" >}}). 1. Ensure that your `magic-modules`, `terraform-provider-google`, and `terraform-provider-google-beta` repositories are up to date. - ```bash - cd ~/magic-modules - git checkout main && git clean -f . && git checkout -- . && git pull - cd $GOPATH/src/github.com/hashicorp/terraform-provider-google - git checkout main && git clean -f . && git checkout -- . && git pull - cd $GOPATH/src/github.com/hashicorp/terraform-provider-google-beta - git checkout main && git clean -f . && git checkout -- . && git pull - ``` + ``` + cd ~/magic-modules + git checkout main && git clean -f . && git checkout -- . && git pull + cd $GOPATH/src/github.com/hashicorp/terraform-provider-google + git checkout main && git clean -f . && git checkout -- . && git pull + cd $GOPATH/src/github.com/hashicorp/terraform-provider-google-beta + git checkout main && git clean -f . && git checkout -- . && git pull + ``` ## Add fields diff --git a/docs/content/develop/add-iam-support.md b/docs/content/develop/add-iam-support.md index 9cad46ef01cb..575e8950e6f0 100644 --- a/docs/content/develop/add-iam-support.md +++ b/docs/content/develop/add-iam-support.md @@ -14,14 +14,15 @@ For more information about types of resources and the generation process overall 1. Complete the steps in [Set up your development environment]({{< ref "/develop/set-up-dev-environment" >}}) to set up your environment and your Google Cloud project. 1. Ensure that your `magic-modules`, `terraform-provider-google`, and `terraform-provider-google-beta` repositories are up to date. - ```bash - cd ~/magic-modules - git checkout main && git clean -f . && git checkout -- . && git pull - cd $GOPATH/src/github.com/hashicorp/terraform-provider-google - git checkout main && git clean -f . && git checkout -- . && git pull - cd $GOPATH/src/github.com/hashicorp/terraform-provider-google-beta - git checkout main && git clean -f . && git checkout -- . && git pull - ``` + ``` + cd ~/magic-modules + git checkout main && git clean -f . && git checkout -- . && git pull + cd $GOPATH/src/github.com/hashicorp/terraform-provider-google + git checkout main && git clean -f . && git checkout -- . && git pull + cd $GOPATH/src/github.com/hashicorp/terraform-provider-google-beta + git checkout main && git clean -f . && git checkout -- . && git pull + ``` + ## Add IAM support {{< tabs "IAM" >}} diff --git a/docs/content/develop/add-resource.md b/docs/content/develop/add-resource.md index 45d00d1ebb10..52c2d1c3d24d 100644 --- a/docs/content/develop/add-resource.md +++ b/docs/content/develop/add-resource.md @@ -34,14 +34,14 @@ For more information about types of resources and the generation process overall 1. Complete the steps in [Set up your development environment]({{< ref "/develop/set-up-dev-environment" >}}) to set up your environment and your Google Cloud project. 1. Ensure that your `magic-modules`, `terraform-provider-google`, and `terraform-provider-google-beta` repositories are up to date. - ```bash - cd ~/magic-modules - git checkout main && git clean -f . && git checkout -- . && git pull - cd $GOPATH/src/github.com/hashicorp/terraform-provider-google - git checkout main && git clean -f . && git checkout -- . && git pull - cd $GOPATH/src/github.com/hashicorp/terraform-provider-google-beta - git checkout main && git clean -f . && git checkout -- . && git pull - ``` + ``` + cd ~/magic-modules + git checkout main && git clean -f . && git checkout -- . && git pull + cd $GOPATH/src/github.com/hashicorp/terraform-provider-google + git checkout main && git clean -f . && git checkout -- . && git pull + cd $GOPATH/src/github.com/hashicorp/terraform-provider-google-beta + git checkout main && git clean -f . && git checkout -- . && git pull + ``` ## Add a resource @@ -163,8 +163,9 @@ For more information about types of resources and the generation process overall - Replace all occurrences of `github.com/hashicorp/terraform-provider-google-beta/google-beta` with `github.com/hashicorp/terraform-provider-google/google` - Remove the `Example` suffix from all test function names. - Remove the comments at the top of the file. - - If any of the added Go code (including any imports) is beta-only, change the file suffix to `.go.tmpl` and wrap the beta-only code in a version guard: `{{- if ne $.TargetVersionName "ga" -}}...{{- else }}...{{- end }}`. - - If the whole resource is beta-only, wrap everything except package declarations. Otherwise, individually wrap each logically-related block of code in a version guard (field, test, etc) rather than grouping adjacent version-guarded sections - it's easier to read and easier to modify as things move out of beta. + - If beta-only fields are being tested, do the following: + - Change the file suffix to `.go.tmpl` + - Wrap each beta-only test in a separate version guard: `{{- if ne $.TargetVersionName "ga" -}}...{{- else }}...{{- end }}` 5. Register the resource `handwrittenResources` in [`magic-modules/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl`](https://github.com/GoogleCloudPlatform/magic-modules/blob/main/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl) - Add a version guard for any beta-only resources. 6. Optional: Complete other handwritten tasks that require the MMv1 configuration file. diff --git a/docs/content/develop/generate-providers.md b/docs/content/develop/generate-providers.md index 75d06dbb46bd..1af1f99d7db9 100644 --- a/docs/content/develop/generate-providers.md +++ b/docs/content/develop/generate-providers.md @@ -24,8 +24,6 @@ provider changes to the `google` and `google-beta` Terraform providers. + [Adding custom resource code]({{< ref "/develop/custom-code" >}}). + [Promoting a resource to GA]({{< ref "/develop/promote-to-ga" >}}). -By default, running a full `make provider` command cleans the output directory (`OUTPUT_PATH`) before generating code to prevent sync issues. This will override and delete any changes to that directory. See the [`make` commands reference]({{< ref "/reference/make-commands" >}}) for details on advanced usage. - ## Generate a provider change 1. Clone the `google` and `google-beta` provider repositories with the following commands: @@ -35,14 +33,31 @@ By default, running a full `make provider` command cleans the output directory ( git clone https://github.com/hashicorp/terraform-provider-google-beta.git $GOPATH/src/github.com/hashicorp/terraform-provider-google-beta ``` 1. Generate changes for the `google` provider: - ```bash - make provider VERSION=ga OUTPUT_PATH="$GOPATH/src/github.com/hashicorp/terraform-provider-google" - ``` + ```bash + make provider VERSION=ga OUTPUT_PATH="$GOPATH/src/github.com/hashicorp/terraform-provider-google" PRODUCT=[PRODUCT_NAME] + ``` + Where `[PRODUCT_NAME]` is one of the folder names in + https://github.com/GoogleCloudPlatform/magic-modules/tree/main/mmv1/products. + + For example, if your product is `bigqueryanalyticshub`, the command would be + the following: + + ```bash + make provider VERSION=ga OUTPUT_PATH="$GOPATH/src/github.com/hashicorp/terraform-provider-google" PRODUCT=bigqueryanalyticshub + ``` 1. Generate changes for the `google-beta` provider: - ```bash - make provider VERSION=beta OUTPUT_PATH="$GOPATH/src/github.com/hashicorp/terraform-provider-google-beta" - ``` + ```bash + make provider VERSION=beta OUTPUT_PATH="$GOPATH/src/github.com/hashicorp/terraform-provider-google-beta" PRODUCT=[PRODUCT_NAME] + ``` + + Where `[PRODUCT_NAME]` is one of the folder names in https://github.com/GoogleCloudPlatform/magic-modules/tree/main/mmv1/products. + + For example, if your product name is `bigqueryanalyticshub`, the command would be the following: + + ```bash + make provider VERSION=beta OUTPUT_PATH="$GOPATH/src/github.com/hashicorp/terraform-provider-google-beta" PRODUCT=bigqueryanalyticshub + ``` 1. Confirm that the expected changes were generated: ```bash @@ -54,9 +69,12 @@ By default, running a full `make provider` command cleans the output directory ( {{< hint info >}} - **Note**: You might see additional changes in your `git diff` output beyond your own. This can happen if your `magic-modules` repository is out of sync with the provider repositories, causing the generator to also apply any pending updates from `magic-modules`. + **Note**: There may be additional changes present due to specifying a + `PRODUCT=` value or due to the `magic-modules` repository being out of sync + with the provider repositories. {{< /hint >}} + ## Troubleshoot ### Too many open files {#too-many-open-files} diff --git a/docs/content/develop/promote-to-ga.md b/docs/content/develop/promote-to-ga.md index 79b1f41d2858..bd5f3fd1b2e0 100644 --- a/docs/content/develop/promote-to-ga.md +++ b/docs/content/develop/promote-to-ga.md @@ -14,15 +14,15 @@ For more information about types of resources and the generation process overall ## Before you begin 1. Complete the steps in [Set up your development environment]({{< ref "/develop/set-up-dev-environment" >}}) to set up your environment and your Google Cloud project. -1. Ensure that your `magic-modules`, `terraform-provider-google`, and `terraform-provider-google-beta` repositories are up to date. - ```bash - cd ~/magic-modules - git checkout main && git clean -f . && git checkout -- . && git pull - cd $GOPATH/src/github.com/hashicorp/terraform-provider-google - git checkout main && git clean -f . && git checkout -- . && git pull - cd $GOPATH/src/github.com/hashicorp/terraform-provider-google-beta - git checkout main && git clean -f . && git checkout -- . && git pull - ``` +2. Ensure that your `magic-modules`, `terraform-provider-google`, and `terraform-provider-google-beta` repositories are up to date. + ``` + cd ~/magic-modules + git checkout main && git clean -f . && git checkout -- . && git pull + cd $GOPATH/src/github.com/hashicorp/terraform-provider-google + git checkout main && git clean -f . && git checkout -- . && git pull + cd $GOPATH/src/github.com/hashicorp/terraform-provider-google-beta + git checkout main && git clean -f . && git checkout -- . && git pull + ``` ## Promote fields and resources diff --git a/docs/content/document/add-documentation.md b/docs/content/document/add-documentation.md index 02462c3d14d0..3bc4c529b536 100644 --- a/docs/content/document/add-documentation.md +++ b/docs/content/document/add-documentation.md @@ -7,7 +7,7 @@ aliases: # Add documentation -Documentation is autogenerated based on the [resource]({{< ref "/develop/add-resource" >}}) and [field({{< ref "/develop/add-fields" >}}] configurations. This page describes how to add documentation to resources and fields. +Documentation is autogenerated based on the resource and field configurations. This page describes how to add documentation to resources and fields. For more information about types of resources and the generation process overall, see [How Magic Modules works]({{< ref "/" >}}). @@ -15,14 +15,14 @@ For more information about types of resources and the generation process overall 1. Complete the steps in [Set up your development environment]({{< ref "/develop/set-up-dev-environment" >}}) to set up your environment and your Google Cloud project. 1. Ensure that your `magic-modules`, `terraform-provider-google`, and `terraform-provider-google-beta` repositories are up to date. - ```bash - cd ~/magic-modules - git checkout main && git clean -f . && git checkout -- . && git pull - cd $GOPATH/src/github.com/hashicorp/terraform-provider-google - git checkout main && git clean -f . && git checkout -- . && git pull - cd $GOPATH/src/github.com/hashicorp/terraform-provider-google-beta - git checkout main && git clean -f . && git checkout -- . && git pull - ``` + ``` + cd ~/magic-modules + git checkout main && git clean -f . && git checkout -- . && git pull + cd $GOPATH/src/github.com/hashicorp/terraform-provider-google + git checkout main && git clean -f . && git checkout -- . && git pull + cd $GOPATH/src/github.com/hashicorp/terraform-provider-google-beta + git checkout main && git clean -f . && git checkout -- . && git pull + ``` ## Add documentation diff --git a/docs/content/reference/field.md b/docs/content/reference/field.md index ae40c0b3bd4d..dda2ff000d6f 100644 --- a/docs/content/reference/field.md +++ b/docs/content/reference/field.md @@ -27,17 +27,10 @@ is present in provider.yaml. Do not use if an ancestor field (or the overall resource) is already marked as beta-only. ### `immutable` -If true, the field is considered immutable - that is, only settable on create. If -unset or false, the field is considered to support update-in-place. - -Immutability is not inherited from field to field: subfields are still considered to -be updatable in place by default. However, if the overall resource has -[`immutable`]({{< ref "/reference/resource#immutable" >}}) set to true, all its -fields are considered immutable. Individual fields can override this for themselves -and their subfields with [`update_url`]({{< ref "/reference/field#update_url" >}}) -if they have a custom update method in the API. - -See [Best practices: Immutable fields]({{< ref "/best-practices/immutable-fields/" >}}) for more information. +If true, the field (and any subfields) are considered immutable - that is, +only settable on create. If unset or false, the field is still considered +immutable if any ancestor field (or the overall resource) is immutable, +unless `update_url` is set. Example: @@ -47,10 +40,8 @@ immutable: true ### `update_url` If set, changes to the field's value trigger a separate call to a specific -API method for updating the field's value. Even if the overall resource is marked -immutable, the field and its subfields are not considered immutable unless explicitly -marked as such. - +API method for updating the field's value. The field is not considered +immutable even if an ancestor field (or the overall resource) is immutable. Terraform field names enclosed in double curly braces are replaced with the field values from the resource at runtime. @@ -107,7 +98,7 @@ Example: sensitive: true ``` -### `write_only_legacy` (deprecated) +### `write_only` If true, the field is considered "write-only", which means that its value will be obscured in Terraform output as well as not be stored in state. This field is meant to replace `sensitive` as it doesn't store the value in state. See [Ephemerality in Resources - Use Write-only arguments](https://developer.hashicorp.com/terraform/language/resources/ephemeral/write-only) @@ -121,19 +112,22 @@ This field cannot be used in conjuction with `immutable` or `sensitive`. Example: ```yaml -write_only_legacy: true +write_only: true ``` -**Deprecated**: This field is deprecated and will be removed in a future release. - ### `ignore_read` If true, the provider sets the field's value in the resource state based only on the user's configuration. If false or unset, the provider sets the field's value in the resource state based on the API response. Only use this attribute if the field cannot be read from GCP due to either API or provider constraints. -`ignore_read` is current not supported inside arrays of nested objects. See [tpg#23630](https://github.com/hashicorp/terraform-provider-google/issues/23630) -for details and workarounds. +Nested fields currently +[do not support `ignore_read`](https://github.com/hashicorp/terraform-provider-google/issues/12410) +but can replicate the behavior by implementing a +[`custom_flatten`]({{< ref "/develop/custom-code#custom_flatten" >}}) +that always ignores the value returned by the API. [Example](https://github.com/GoogleCloudPlatform/magic-modules/blob/5923d4cb878396a04bed9beaf22a8478e8b1e6a5/mmv1/templates/terraform/custom_flatten/source_representation_instance_configuration_password.go.tmpl). +Any fields using a custom flatten also need to be added to `ignore_read_extra` +for any examples where the field is set. Example: YAML @@ -141,6 +135,14 @@ Example: YAML ignore_read: true ``` +Example: Custom flatten + +```go +func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return d.Get("password") +} +``` + ### `default_value` Sets a client-side default value for the field. This should be used if the API has a default value that applies in all cases and is stable. Removing @@ -163,6 +165,7 @@ value for the field. This attribute is useful for complex or frequently-changed API-side defaults, but provides less useful information at plan time than `default_value` and causes the provider to ignore user configurations that explicitly set the field to an "empty" value. +`default_from_api` and `send_empty_value` cannot both be true on the same field. Example: @@ -177,10 +180,7 @@ strings) to the API if set explicitly in the user's configuration. If false, This attribute is useful for fields where the API would behave differently for an "empty" value vs no value for a particular field - for example, boolean fields that have an API-side default of true. - -If true simulataneously with `default_from_api`, the provider will send empty values -explicitly set in configuration. If the field is unset, the provider will -accept API values as the default as usual with `default_from_api`. +`send_empty_value` and `default_from_api` cannot both be true on the same field. Due to a [bug](https://github.com/hashicorp/terraform-provider-google/issues/13201), NestedObject fields will currently be sent as `null` if unset (rather than being @@ -276,8 +276,6 @@ Example: ``` ### `validation` -In many cases, it is better to avoid client-side validation. See [Best practices: Validation]({{< ref "/best-practices/validation" >}}) for more information. - Controls the value set for the field's [`ValidateFunc`](https://developer.hashicorp.com/terraform/plugin/sdkv2/schemas/schema-behaviors#validatefunc). For Enum fields, this will override the default validation (that the provided value is one of the enum [`values`](#values)). @@ -320,29 +318,6 @@ Example: Regex regex: '^[a-zA-Z][a-zA-Z0-9_]*$' ``` -### `is_set` -If true, the field is a Set rather than an Array. Set fields represent an -unordered set of unique elements. `set_hash_func` may be used to customize the -hash function used to index elements in the set, otherwise the schema default -function will be used. Adding this property to an existing field is usually a -breaking change. - -```yaml -- name: 'fieldOne' - type: Array - is_set: true -``` - -### `set_hash_func` -Specifies a function for hashing elements in a Set field. If unspecified, -`schema.HashString` will be used if the elements are strings, otherwise -`schema.HashSchema`. The hash function should be defined in -`custom_code.constants`. - -```yaml -set_hash_func: functionName -``` - ### `api_name` Specifies a name to use for communication with the API that is different than the name of the field in Terraform. In general, setting an `api_name` is not @@ -368,11 +343,10 @@ url_param_only: true ## `Enum` properties ### `enum_values` -Enum only. If the allowed values may change in the future, use a String field instead and link to API documentation -stating the current allowed values in the String field's description. -See [Best practices: Validation]({{< ref "/best-practices/validation" >}}) for more information. - -Do not include UNSPECIFIED values in this list. +Enum only. If the allowed values change frequently, use a String field instead +to allow better forwards-compatibility, and link to API documentation +stating the current allowed values in the String field's description. Do not +include UNSPECIFIED values in this list. Enums will validate that the provided field is in the allowed list unless a custom [`validation`]({{}}) is provided. @@ -426,8 +400,6 @@ item_type: Array only. Controls the [`ValidateFunc`](https://developer.hashicorp.com/terraform/plugin/sdkv2/schemas/schema-behaviors#validatefunc) used to validate individual items in the array. Behaves like [`validation`]({{}}). -In many cases, it is better to avoid client-side validation. See [Best practices: Validation]({{< ref "/best-practices/validation" >}}) for more information. - For arrays of enums, this will override the default validation (that the provided value is one of the enum [`values`](#values)). If you need additional validation on top of an enum, ensure that the supplied validation func also verifies the enum values are correct. diff --git a/docs/content/reference/make-commands.md b/docs/content/reference/make-commands.md index 6acdfc18b1a8..6742b44173a8 100644 --- a/docs/content/reference/make-commands.md +++ b/docs/content/reference/make-commands.md @@ -8,8 +8,7 @@ weight: 30 ### `make` / `make provider` -Generates the code for the downstream `google` and `google-beta` providers -into the `OUTPUT_PATH`, overriding and deleting any local changes. +Generates the code for the downstream `google` and `google-beta` providers. {{< hint info >}} **Note:** Generation works best if the downstream provider has a commit checked out corresponding to the latest `main` branch commit that is present in your `magic-modules` working branch. This can generally be identified based on matching commit messages. @@ -35,8 +34,7 @@ make provider VERSION=ga OUTPUT_PATH="$GOPATH/src/github.com/hashicorp/terraform - `OUTPUT_PATH`: Required. The location you are generating provider code into. - `VERSION`: Required. The version of the provider you are building into. Valid values are `ga` and `beta`. -- `PRODUCT`: Limits generations to the specified folder within `mmv1/products` or `tpgtools/api`. Handwritten files from `mmv1/third_party/terraform` are always generated into the downstream regardless of this setting, so you can provide a non-existent product name to generate only handwritten code. Required if `RESOURCE` is specified. **Using `PRODUCT` skips the pre-generation cleanup step. This is considered advanced usage; recommend running a full, clean build (`make provider` without `PRODUCT`) beforehand if repositories may be out of sync.** -- `SKIP_CLEAN`: If set to `true`, skips the default pre-generation cleanup of `OUTPUT_PATH` during a full provider build. Has no effect if `PRODUCT` is specified (as cleanup is already skipped). Example: `make provider VERSION=ga OUTPUT_PATH=... SKIP_CLEAN=true`. +- `PRODUCT`: Limits generations to the specified folder within `mmv1/products` or `tpgtools/api`. Handwritten files from `mmv1/third_party/terraform` are always generated into the downstream regardless of this setting, so you can provide a non-existent product name to generate only handwritten code. Required if `RESOURCE` is specified. - `RESOURCE`: Limits generation to the specified resource within a particular product. For `mmv1` resources, matches the resource's `name` field (set in its configuration file).For `tpgtools` resources, matches the terraform resource name. - `ENGINE`: Modifies `make provider` to only generate code using the specified engine. Valid values are `mmv1` or `tpgtools`. (Providing `tpgtools` will still generate any prerequisite mmv1 files required for tpgtools.) diff --git a/docs/content/reference/resource.md b/docs/content/reference/resource.md index 921af666156a..eb86839f3bc2 100644 --- a/docs/content/reference/resource.md +++ b/docs/content/reference/resource.md @@ -97,9 +97,8 @@ self_link: 'projects/{{project}}/locations/{{location}}/resourcenames/{{name}}' ### `immutable` If true, the resource and all its fields are considered immutable - that is, -only creatable, not updatable. Individual fields can override this for themselves and -their subfields with [`update_url`]({{< ref "/reference/field#update_url" >}}) -if they have a custom update method in the API. +only creatable, not updatable. Individual fields can override this if they +have a custom update method in the API. See [Best practices: Immutable fields]({{< ref "/best-practices/immutable-fields/" >}}) for more information. diff --git a/docs/content/test/run-tests.md b/docs/content/test/run-tests.md index cf460ce1b34b..a85d87a40cab 100644 --- a/docs/content/test/run-tests.md +++ b/docs/content/test/run-tests.md @@ -147,44 +147,6 @@ This indicates that after an apply to create or update a resource, the resource - The URL for reads was built incorrectly. The exact fix will depend on why this is happening. Run the test with the `TF_LOG=DEBUG` environment variable and check whether the read URL matches what you expect. - There is a call to unset the resource's id (`d.SetId("")`) somewhere it shouldn't be. The fix is to remove that extraneous call. This is rare. -### Error: Inconsistent dependency lock file - -Tests require all of the providers they use (except the one actually being tested) to be explicitly stated. This error generally means one of a few things: - -- If the error mentions `provider registry.terraform.io/hashicorp/google`: - - Beta-only test: This indicates that one of the `google_*` resources in the test doesn't have `provider = google-beta` set - - ```hcl - resource "google_compute_instance" "beta-instance" { - provider = google-beta - # ... - } - ``` - - GA+beta test: This indicates that the wrong setting is being used for `ProtoV5ProviderFactories` on a handwritten test case. Should be: - - ```go - acctest.VcrTest(t, resource.TestCase{ - // ... - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - ``` -- If the error mentions `provider registry.terraform.io/hashicorp/google-beta`: - - Beta-only test: This indicates that the wrong setting is being used for `ProtoV5ProviderFactories` on a handwritten test case. Should be: - - ```go - acctest.VcrTest(t, resource.TestCase{ - // ... - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), - ``` - - GA+beta test: This indicates that one of the `google_*` resources in the test has `provider = google-beta` set. `provider = google-beta` can't be set unless the test is beta-only. -- If the error mentions some other provider: The test relies on an external provider, such as `time`, and that is not explicitly declared - - For MMv1 example-based tests, use [`examples.external_providers`](https://googlecloudplatform.github.io/magic-modules/reference/resource/#examples). - - For Handwritten tests, use TestCase.ExternalProviders: - ```go - acctest.VcrTest(t, resource.TestCase{ - ExternalProviders: map[string]resource.ExternalProvider{ - "time": {}, - }, - // ... - } - ``` - ## Optional: Test with different `terraform` versions Tests will use whatever version of the `terraform` binary is found on your `PATH`. If you are testing a change that you know only impacts certain `terraform` versions, follow these steps: diff --git a/docs/content/test/test.md b/docs/content/test/test.md index 9fa7856a3c5f..d08b12236846 100644 --- a/docs/content/test/test.md +++ b/docs/content/test/test.md @@ -143,8 +143,6 @@ This section assumes you've used the [Add a resource]({{< ref "/develop/add-reso - If beta-only fields are being tested, do the following: - Change the file suffix to `.go.tmpl` - Wrap each beta-only test in a separate version guard: `{{- if ne $.TargetVersionName "ga" -}}...{{- else }}...{{- end }}` - - In each beta-only test, ensure that the TestCase sets `ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t)` - - In each beta-only test, ensure that all Terraform resources in all configs have `provider = google-beta` set {{< /tab >}} {{< /tabs >}} @@ -277,7 +275,7 @@ An update test is an **acceptance test** that creates the target resource and th {{< /tab >}} {{< /tabs >}} -## Bootstrap API resources {#bootstrapping} +## Bootstrapping API resources {#bootstrapping} Most acceptance tests run in a the default org and default test project, which means that they can conflict for quota, resource namespaces, and control over shared resources. You can work around these limitations with "bootstrapped" resources. @@ -445,79 +443,6 @@ func TestAccProductResource_update(t *testing.T) { {{< /tab >}} {{< /tabs >}} -## Create test projects -If [bootstrapping]({{< ref "#bootstrapping" >}}) doesn't work or isn't an option for some reason, you can also work around project quota issues or test project-global resources by creating a new test project. You will also need to enable any necessary APIs and wait for their enablement to propagate. - -```go -import ( - "testing" - - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google-beta/google-beta/acctest" - "github.com/hashicorp/terraform-provider-google-beta/google-beta/envvar" -) -func TestAccProductResourceName_update(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - "billing_account": envvar.GetTestBillingAccountFromEnv(t), - "org_id": envvar.GetTestOrgFromEnv(t), - } - acctest.VcrTest(t, resource.TestCase{ - // ... - // Add ExternalProviders so you can use `time_sleep` - ExternalProviders: map[string]resource.ExternalProvider{ - "time": {}, - }, - Steps: []resource.TestStep{ - { - testAccProductResourceName_update1(context), - }, - // ... - }, - }) -} - -func testAccProductResourceName_update1(context map[string]interface{}) string { - return accest.Nprintf(` -// Set up a test project -resource "google_project" "project" { - project_id = "tf-test%{random_suffix}" - name = "tf-test%{random_suffix}" - org_id = "%{org_id}" - billing_account = "%{billing_account}" - deletion_policy = "DELETE" -} - -// Enable APIs in a deterministic order to avoid inconsistent VCR recordings -resource "google_project_service" "servicenetworking" { - project = google_project.project.project_id - service = "servicenetworking.googleapis.com" -} - -resource "google_project_service" "compute" { - project = google_project.project.project_id - service = "compute.googleapis.com" - depends_on = [google_project_service.servicenetworking] -} - -// wait for API enablement -resource "time_sleep" "wait_120_seconds" { - create_duration = "120s" - depends_on = [google_project_service.compute] -} - -resource "google_product_resource" "example" { - // ... - depends_on = [time_sleep.wait_120_seconds] -} - -`, context) -} -``` - ## Skip tests in VCR replaying mode {#skip-vcr} Acceptance tests are run in VCR replaying mode on PRs (using pre-recorded HTTP requests and responses) to reduce the time it takes to present results to contributors. However, not all resources or tests are possible to run in replaying mode. Incompatible tests should be skipped during VCR replaying mode. They will still run in our nightly test suite. diff --git a/mmv1/api/product.go b/mmv1/api/product.go index 4d6140c72e0c..0a42267653a1 100644 --- a/mmv1/api/product.go +++ b/mmv1/api/product.go @@ -14,7 +14,6 @@ package api import ( - "fmt" "log" "reflect" "regexp" @@ -58,9 +57,6 @@ type Product struct { // base URL. Specific to defining the resource as a CAI asset. CaiBaseUrl string - // CaiResourceType of resources that already have an AssetType constant defined in the product. - ResourcesWithCaiAssetType map[string]struct{} - // A function reference designed for the rare case where you // need to use retries in operation calls. Used for the service api // as it enables itself (self referential) and can result in occasional @@ -72,9 +68,6 @@ type Product struct { LegacyName string `yaml:"legacy_name,omitempty"` ClientName string `yaml:"client_name,omitempty"` - - // The compiler to generate the downstream files, for example "terraformgoogleconversion-codegen". - Compiler string `yaml:"-"` } func (p *Product) UnmarshalYAML(unmarshal func(any) error) error { @@ -140,10 +133,6 @@ func (p *Product) SetDisplayName() { } } -func (p *Product) SetCompiler(t string) { - p.Compiler = fmt.Sprintf("%s-codegen", strings.ToLower(t)) -} - // ==================== // Version-related methods // ==================== diff --git a/mmv1/api/product/version.go b/mmv1/api/product/version.go index b36dfd461d5a..d94c0a41e8e5 100644 --- a/mmv1/api/product/version.go +++ b/mmv1/api/product/version.go @@ -26,10 +26,9 @@ var ORDER = []string{"ga", "beta", "alpha", "private"} // a superset of beta, and beta a superset of GA. Each version will have a // different version url. type Version struct { - CaiBaseUrl string `yaml:"cai_base_url,omitempty"` - CaiLegacyBaseUrl string `yaml:"cai_legacy_base_url,omitempty"` - BaseUrl string `yaml:"base_url"` - Name string + CaiBaseUrl string `yaml:"cai_base_url,omitempty"` + BaseUrl string `yaml:"base_url"` + Name string } func (v *Version) Validate(pName string) { diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index 5468905cddae..42b2315c2d14 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -13,23 +13,18 @@ package api import ( - "bytes" "fmt" "log" "maps" - "path/filepath" "regexp" - "slices" "sort" "strings" - "text/template" - - "github.com/golang/glog" "github.com/GoogleCloudPlatform/magic-modules/mmv1/api/product" "github.com/GoogleCloudPlatform/magic-modules/mmv1/api/resource" "github.com/GoogleCloudPlatform/magic-modules/mmv1/api/utils" "github.com/GoogleCloudPlatform/magic-modules/mmv1/google" + "golang.org/x/exp/slices" ) const RELATIVE_MAGICIAN_LOCATION = "mmv1/" @@ -226,8 +221,9 @@ type Resource struct { // If true, resource is not importable ExcludeImport bool `yaml:"exclude_import,omitempty"` - // If true, resource should be autogenerated as a data source - Datasource *resource.Datasource `yaml:"datasource,omitempty"` + // If true, exclude resource from Terraform Validator + // (i.e. terraform-provider-conversion) + ExcludeTgc bool `yaml:"exclude_tgc,omitempty"` // If true, skip sweeper generation for this resource ExcludeSweeper bool `yaml:"exclude_sweeper,omitempty"` @@ -308,9 +304,6 @@ type Resource struct { // control if a resource is continuously generated from public OpenAPI docs AutogenStatus string `yaml:"autogen_status"` - // If true, this resource generates with the new plugin framework resource template - FrameworkResource bool `yaml:"plugin_framework,omitempty"` - // The three groups of []*Type fields are expected to be strictly ordered within a yaml file // in the sequence of Virtual Fields -> Parameters -> Properties @@ -358,39 +351,6 @@ type Resource struct { ImportPath string `yaml:"-"` SourceYamlFile string `yaml:"-"` - - // ==================== - // TGC - // ==================== - TGCResource `yaml:",inline"` -} - -type TGCResource struct { - // If true, exclude resource from Terraform Validator - // (i.e. terraform-provider-conversion) - ExcludeTgc bool `yaml:"exclude_tgc,omitempty"` - - // If true, include resource in the new package of TGC (terraform-provider-conversion) - IncludeInTGCNext bool `yaml:"include_in_tgc_next_DO_NOT_USE,omitempty"` - - // Name of the hcl resource block used in TGC - TgcHclBlockName string `yaml:"tgc_hcl_block_name,omitempty"` - - // The resource kind in CAI. - // If this is not set, then :name is used instead. - // For example: compute.googleapis.com/Address has Address for CaiResourceKind, - // and compute.googleapis.com/GlobalAddress has GlobalAddress for CaiResourceKind. - // But they have the same api resource type: address - CaiResourceKind string `yaml:"cai_resource_kind,omitempty"` - - // If true, the Terraform custom encoder is not applied during tfplan2cai - TGCIgnoreTerraformEncoder bool `yaml:"tgc_ignore_terraform_encoder,omitempty"` - - // [Optional] The parameter that uniquely identifies the resource. - // Generally, it's safe to leave empty, in which case it defaults to `name`. - // Other values are normally useful in cases where an object has a parent - // and is identified by some non-name value, such as an ip+port pair. - CaiIdentity string `yaml:"cai_identity,omitempty"` } func (r *Resource) UnmarshalYAML(unmarshal func(any) error) error { @@ -423,11 +383,7 @@ func (r *Resource) SetDefault(product *Product) { r.ApiName = r.Name } if r.CollectionUrlKey == "" { - key := r.Name - if r.ApiResourceTypeKind != "" { - key = r.ApiResourceTypeKind - } - r.CollectionUrlKey = google.Camelize(google.Plural(key), "lower") + r.CollectionUrlKey = google.Camelize(google.Plural(r.Name), "lower") } if r.IdFormat == "" { r.IdFormat = r.SelfLinkUri() @@ -617,7 +573,7 @@ func (r Resource) SensitiveProps() []*Type { func (r Resource) WriteOnlyProps() []*Type { props := r.AllNestedProperties(r.RootProperties()) return google.Select(props, func(p *Type) bool { - return p.WriteOnlyLegacy + return p.WriteOnly }) } @@ -1203,42 +1159,31 @@ func ImportIdFormats(importFormat, identity []string, baseUrl string) []string { return uniq } -// IgnoreReadProperties returns a sorted slice of property names (snake_case) that should be ignored when reading. -// This is useful for downstream code that needs to iterate over these properties. -func (r Resource) IgnoreReadProperties(e resource.Examples) []string { +func (r Resource) IgnoreReadPropertiesToString(e resource.Examples) string { var props []string for _, tp := range r.AllUserProperties() { if tp.UrlParamOnly || tp.IsA("ResourceRef") { - props = append(props, google.Underscore(tp.Name)) + props = append(props, fmt.Sprintf("\"%s\"", google.Underscore(tp.Name))) } } - props = append(props, e.IgnoreReadExtra...) - props = append(props, r.IgnoreReadLabelsFields(r.PropertiesWithExcluded())...) - props = append(props, ignoreReadFields(r.AllUserProperties())...) + for _, tp := range e.IgnoreReadExtra { + props = append(props, fmt.Sprintf("\"%s\"", tp)) + } + for _, tp := range r.IgnoreReadLabelsFields(r.PropertiesWithExcluded()) { + props = append(props, fmt.Sprintf("\"%s\"", tp)) + } + for _, tp := range ignoreReadFields(r.AllUserProperties()) { + props = append(props, fmt.Sprintf("\"%s\"", tp)) + } slices.Sort(props) - return props -} -// IgnoreReadPropertiesToString returns the ignore read properties as a Go-syntax string slice. -// This is a wrapper around IgnoreReadProperties for backwards compatibility. -func (r Resource) IgnoreReadPropertiesToString(e resource.Examples) string { - props := r.IgnoreReadProperties(e) if len(props) > 0 { - return fmt.Sprintf("[]string{%s}", strings.Join(quoteStrings(props), ", ")) + return fmt.Sprintf("[]string{%s}", strings.Join(props, ", ")) } return "" } -// quoteStrings returns a new slice with each string quoted. -func quoteStrings(strs []string) []string { - quoted := make([]string, len(strs)) - for i, s := range strs { - quoted[i] = fmt.Sprintf("\"%s\"", s) - } - return quoted -} - func ignoreReadFields(props []*Type) []string { var fields []string for _, tp := range props { @@ -1267,30 +1212,20 @@ func (r Resource) GetIdFormat() string { } // Returns true if the Type is in the ID format and false otherwise. -func (r Resource) InPostCreateComputed(prop Type) bool { - fields := map[string]struct{}{} - for _, f := range r.ExtractIdentifiers(r.GetIdFormat()) { - fields[f] = struct{}{} - } - for _, f := range r.ExtractIdentifiers(r.SelfLinkUri()) { - fields[f] = struct{}{} - } - _, ok := fields[google.Underscore(prop.Name)] - return ok +func (r Resource) InIdFormat(prop Type) bool { + fields := r.ExtractIdentifiers(r.GetIdFormat()) + return slices.Contains(fields, google.Underscore(prop.Name)) } // Returns true if at least one of the fields in the ID format is computed -func (r Resource) HasPostCreateComputedFields() bool { - fields := map[string]struct{}{} +func (r Resource) HasComputedIdFormatFields() bool { + idFormatFields := map[string]struct{}{} for _, f := range r.ExtractIdentifiers(r.GetIdFormat()) { - fields[f] = struct{}{} - } - for _, f := range r.ExtractIdentifiers(r.SelfLinkUri()) { - fields[f] = struct{}{} + idFormatFields[f] = struct{}{} } for _, p := range r.GettableProperties() { // Skip fields not in the id format - if _, ok := fields[google.Underscore(p.Name)]; !ok { + if _, ok := idFormatFields[google.Underscore(p.Name)]; !ok { continue } if (p.Output || p.DefaultFromApi) && !p.IgnoreRead { @@ -1460,27 +1395,13 @@ func (r Resource) IamSelfLinkIdentifiers() []string { return r.ExtractIdentifiers(selfLink) } -// Returns the resource properties that are idenfifires in Iam resource when generating the docs. -// The "project" and "organization" properties are excluded, as they are handled seperated in the docs. -func (r Resource) IamResourceProperties() []*Type { - urlProperties := make([]*Type, 0) - for _, param := range r.IamResourceParams() { - if param == "project" || param == "organization" { - continue - } +// Returns the resource properties that are idenfifires in the selflink url +func (r Resource) IamSelfLinkProperties() []*Type { + params := r.IamSelfLinkIdentifiers() - found := false - for _, p := range r.AllUserProperties() { - if param == google.Underscore(p.Name) { - urlProperties = append(urlProperties, p) - found = true - break - } - } - if !found { - urlProperties = append(urlProperties, &Type{Name: param}) - } - } + urlProperties := google.Select(r.AllUserProperties(), func(p *Type) bool { + return slices.Contains(params, p.Name) + }) return urlProperties } @@ -1669,45 +1590,13 @@ func (r Resource) FormatDocDescription(desc string, indent bool) string { } func (r Resource) CustomTemplate(templatePath string, appendNewline bool) string { - output := ExecuteTemplate(&r, templatePath, appendNewline) + output := resource.ExecuteTemplate(&r, templatePath, appendNewline) if !appendNewline { output = strings.TrimSuffix(output, "\n") } return output } -func ExecuteTemplate(e any, templatePath string, appendNewline bool) string { - templates := []string{ - templatePath, - "templates/terraform/expand_resource_ref.tmpl", - "templates/terraform/custom_flatten/bigquery_table_ref.go.tmpl", - "templates/terraform/flatten_property_method.go.tmpl", - "templates/terraform/expand_property_method.go.tmpl", - "templates/terraform/update_mask.go.tmpl", - "templates/terraform/nested_query.go.tmpl", - "templates/terraform/unordered_list_customize_diff.go.tmpl", - } - templateFileName := filepath.Base(templatePath) - - tmpl, err := template.New(templateFileName).Funcs(google.TemplateFunctions).ParseFiles(templates...) - if err != nil { - glog.Exit(err) - } - - contents := bytes.Buffer{} - if err = tmpl.ExecuteTemplate(&contents, templateFileName, e); err != nil { - glog.Exit(err) - } - - rs := contents.String() - - if !strings.HasSuffix(rs, "\n") && appendNewline { - rs = fmt.Sprintf("%s\n", rs) - } - - return rs -} - // Returns the key of the list of resources in the List API response // Used to get the list of resources to sweep func (r Resource) ResourceListKey() string { @@ -1864,20 +1753,6 @@ func (r Resource) CaiProductBaseUrl() string { return baseUrl } -// Gets the CAI product legacy base url. -// For example, https://www.googleapis.com/compute/v1/ for compute -func (r Resource) CaiProductLegacyBaseUrl() string { - version := r.ProductMetadata.VersionObjOrClosest(r.TargetVersionName) - baseUrl := version.CaiLegacyBaseUrl - if baseUrl == "" { - baseUrl = version.CaiBaseUrl - } - if baseUrl == "" { - baseUrl = version.BaseUrl - } - return baseUrl -} - // Returns the Cai product backend name from the version base url // base_url: https://accessapproval.googleapis.com/v1/ -> accessapproval func (r Resource) CaiProductBackendName(caiProductBaseUrl string) string { @@ -1885,43 +1760,18 @@ func (r Resource) CaiProductBackendName(caiProductBaseUrl string) string { return strings.ToLower(backendUrl) } -// Returns the asset type for this resource. -func (r Resource) CaiAssetType() string { - baseURL := r.CaiProductBaseUrl() - productBackendName := r.CaiProductBackendName(baseURL) - return fmt.Sprintf("%s.googleapis.com/%s", productBackendName, r.CaiResourceName()) -} - -// DefineAssetTypeForResourceInProduct marks the AssetType constant for this resource as defined. -// It returns true if this is the first time it's been called for this resource, -// and false otherwise, preventing duplicate definitions. -func (r Resource) DefineAssetTypeForResourceInProduct() bool { - if r.ProductMetadata.ResourcesWithCaiAssetType == nil { - r.ProductMetadata.ResourcesWithCaiAssetType = make(map[string]struct{}, 1) - } - if _, alreadyDefined := r.ProductMetadata.ResourcesWithCaiAssetType[r.CaiResourceType()]; alreadyDefined { - return false - } - r.ProductMetadata.ResourcesWithCaiAssetType[r.CaiResourceType()] = struct{}{} - return true -} - // Gets the Cai asset name template, which could include version // For example: //monitoring.googleapis.com/v3/projects/{{project}}/services/{{service_id}} func (r Resource) rawCaiAssetNameTemplate(productBackendName string) string { caiBaseUrl := "" - caiId := "name" - if r.CaiIdentity != "" { - caiId = r.CaiIdentity - } if r.CaiBaseUrl != "" { - caiBaseUrl = fmt.Sprintf("%s/{{%s}}", r.CaiBaseUrl, caiId) + caiBaseUrl = fmt.Sprintf("%s/{{name}}", r.CaiBaseUrl) } if caiBaseUrl == "" { caiBaseUrl = r.SelfLink } if caiBaseUrl == "" { - caiBaseUrl = fmt.Sprintf("%s/{{%s}}", r.BaseUrl, caiId) + caiBaseUrl = fmt.Sprintf("%s/{{name}}", r.BaseUrl) } return fmt.Sprintf("//%s.googleapis.com/%s", productBackendName, caiBaseUrl) } @@ -2023,74 +1873,6 @@ func urlContainsOnlyAllowedKeys(templateURL string, allowedKeys []string) bool { return true } -func (r *Resource) ShouldGenerateSingularDataSource() bool { - - if r.Datasource == nil { - return false - } - - return r.Datasource.Generate -} - -func (r *Resource) ShouldGenerateSingularDataSourceTests() bool { - if r.Datasource == nil { - return false - } - return !r.Datasource.ExcludeTest -} - -func (r Resource) ShouldDatasourceSetLabels() bool { - for _, p := range r.Properties { - if p.Name == "labels" && p.Type == "KeyValueLabels" { - return true - } - } - return false -} - -func (r Resource) ShouldDatasourceSetAnnotations() bool { - for _, p := range r.Properties { - if p.Name == "annotations" && p.Type == "KeyValueAnnotations" { - return true - } - } - return false -} - -// DatasourceOptionalFields returns a list of fields from the resource's URI -// that should be marked as "Required". -func (r Resource) DatasourceRequiredFields() []string { - requiredFields := []string{} - uriParts := strings.Split(r.IdFormat, "/") - - for _, part := range uriParts { - if strings.HasPrefix(part, "{{") && strings.HasSuffix(part, "}}") { - field := strings.TrimSuffix(strings.TrimPrefix(part, "{{"), "}}") - if field != "region" && field != "project" && field != "zone" { - requiredFields = append(requiredFields, field) - } - } - } - return requiredFields -} - -// DatasourceOptionalFields returns a list of fields from the resource's URI -// that should be marked as "Optional". -func (r Resource) DatasourceOptionalFields() []string { - optionalFields := []string{} - uriParts := strings.Split(r.IdFormat, "/") - - for _, part := range uriParts { - if strings.HasPrefix(part, "{{") && strings.HasSuffix(part, "}}") { - field := strings.TrimSuffix(strings.TrimPrefix(part, "{{"), "}}") - if field == "region" || field == "project" || field == "zone" { - optionalFields = append(optionalFields, field) - } - } - } - return optionalFields -} - func (r Resource) ShouldGenerateSweepers() bool { if !r.ExcludeSweeper && !utils.IsEmpty(r.Sweeper) { return true @@ -2133,75 +1915,3 @@ func (r Resource) CodeHeader(templatePath string) string { func (r Resource) MarkdownHeader(templatePath string) string { return strings.Replace(r.CodeHeader(templatePath), "//", "#", -1) } - -// TGC Methods -// ==================== -// Lists fields that test.BidirectionalConversion should ignore -func (r Resource) TGCTestIgnorePropertiesToStrings(e resource.Examples) []string { - props := []string{ - "depends_on", - "count", - "for_each", - "provider", - "lifecycle", - } - for _, tp := range r.VirtualFields { - props = append(props, google.Underscore(tp.Name)) - } - for _, tp := range r.AllNestedProperties(r.RootProperties()) { - if tp.UrlParamOnly { - props = append(props, google.Underscore(tp.Name)) - } else if tp.IsMissingInCai { - props = append(props, tp.MetadataLineage()) - } - } - props = append(props, e.TGCTestIgnoreExtra...) - - slices.Sort(props) - return props -} - -// Filters out computed properties during cai2hcl -func (r Resource) ReadPropertiesForTgc() []*Type { - return google.Reject(r.AllUserProperties(), func(v *Type) bool { - return v.Output || v.UrlParamOnly - }) -} - -// OutputFieldSetStr returns a Go-syntax string representation of a set -// containing all the output properties for a resource. -// The property names are converted to snake_case. -// This is useful for generating code that requires a map literal of field names. -func (r Resource) OutputFieldSetStr() string { - fieldNames := make(map[string]struct{}) - for _, tp := range r.AllUserProperties() { - if tp.Output { - fieldNames[google.Underscore(tp.Name)] = struct{}{} - } - } - return fmt.Sprintf("%#v", fieldNames) -} - -// For example, the CAI resource type with product of "google_compute_autoscaler" is "ComputeAutoscalerAssetType". -// The CAI resource type with product of "google_compute_region_autoscaler" is also "ComputeAutoscalerAssetType". -func (r Resource) CaiResourceType() string { - return fmt.Sprintf("%s%s", r.ProductMetadata.Name, r.CaiResourceName()) -} - -// The API resource type of the resource. Normally, it is the resource name. -// Rarely, it is the API "resource type kind" or CAI "resource kind" -// For example, the CAI resource type of "google_compute_autoscaler" is "Autoscaler". -// The CAI resource type of "google_compute_region_autoscaler" is also "Autoscaler". -func (r Resource) CaiResourceName() string { - if r.CaiResourceKind != "" { - return r.CaiResourceKind - } - if r.ApiResourceTypeKind != "" { - return r.ApiResourceTypeKind - } - return r.Name -} - -func (r Resource) IsTgcCompiler() bool { - return r.Compiler == "terraformgoogleconversionnext-codegen" -} diff --git a/mmv1/api/resource/custom_code.go b/mmv1/api/resource/custom_code.go index 50e4425f618a..a7281b918136 100644 --- a/mmv1/api/resource/custom_code.go +++ b/mmv1/api/resource/custom_code.go @@ -139,11 +139,4 @@ type CustomCode struct { TestCheckDestroy string `yaml:"test_check_destroy"` ValidateRawResourceConfigFuncs string `yaml:"raw_resource_config_validation"` - - // ==================== - // TGC Encoders & Decoders - // ==================== - TgcEncoder string `yaml:"tgc_encoder"` - - TgcDecoder string `yaml:"tgc_decoder"` } diff --git a/mmv1/api/resource/datasource.go b/mmv1/api/resource/datasource.go deleted file mode 100644 index 2fe4f4755d6f..000000000000 --- a/mmv1/api/resource/datasource.go +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright 2024 Google Inc. -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package resource - -type Datasource struct { - // boolean to determine whether the datasource file should be generated - Generate bool `yaml:"generate"` - // boolean to determine whether tests should be generated for a datasource - ExcludeTest bool `yaml:"exclude_test"` -} diff --git a/mmv1/api/resource/examples.go b/mmv1/api/resource/examples.go index 2040a19e535f..b2d267363e8d 100644 --- a/mmv1/api/resource/examples.go +++ b/mmv1/api/resource/examples.go @@ -18,7 +18,6 @@ import ( "fmt" "log" "net/url" - "os" "path/filepath" "regexp" "slices" @@ -175,23 +174,6 @@ type Examples struct { DocumentationHCLText string `yaml:"-"` TestHCLText string `yaml:"-"` OicsHCLText string `yaml:"-"` - - // ==================== - // TGC - // ==================== - // Extra properties to ignore test. - // These properties are present in Terraform resources schema, but not in CAI assets. - // Virtual Fields and url parameters are already ignored by default and do not need to be duplicated here. - TGCTestIgnoreExtra []string `yaml:"tgc_test_ignore_extra,omitempty"` - // The properties ignored in CAI assets. It is rarely used and only used - // when the nested field has sent_empty_value: true. - // But its parent field is C + O and not specified in raw_config. - // Example: ['RESOURCE.cdnPolicy.signedUrlCacheMaxAgeSec']. - // "RESOURCE" means that the property is for resource data in CAI asset. - TGCTestIgnoreInAsset []string `yaml:"tgc_test_ignore_in_asset,omitempty"` - // The reason to skip a test. For example, a link to a ticket explaining the issue that needs to be resolved before - // unskipping the test. If this is not empty, the test will be skipped. - TGCSkipTest string `yaml:"tgc_skip_test,omitempty"` } // Set default value for fields @@ -219,22 +201,6 @@ func (e *Examples) Validate(rName string) { e.ValidateExternalProviders() } -func validateRegexForContents(r *regexp.Regexp, contents string, configPath string, objName string, vars map[string]string) { - matches := r.FindAllStringSubmatch(contents, -1) - for _, v := range matches { - found := false - for k, _ := range vars { - if k == v[1] { - found = true - break - } - } - if !found { - log.Fatalf("Failed to find %s environment variable defined in YAML file when validating the file %s. Please define this in %s", v[1], configPath, objName) - } - } -} - func (e *Examples) ValidateExternalProviders() { // Official providers supported by HashiCorp // https://registry.terraform.io/search/providers?namespace=hashicorp&tier=official @@ -283,7 +249,7 @@ func (e *Examples) SetHCLText() { docTestEnvVars[key] = docs_defaults[e.TestEnvVars[key]] } e.TestEnvVars = docTestEnvVars - e.DocumentationHCLText = e.ExecuteTemplate() + e.DocumentationHCLText = ExecuteTemplate(e, e.ConfigPath, true) e.DocumentationHCLText = regexp.MustCompile(`\n\n$`).ReplaceAllString(e.DocumentationHCLText, "\n") // Remove region tags @@ -324,7 +290,7 @@ func (e *Examples) SetHCLText() { e.Vars = testVars e.TestEnvVars = testTestEnvVars - e.TestHCLText = e.ExecuteTemplate() + e.TestHCLText = ExecuteTemplate(e, e.ConfigPath, true) e.TestHCLText = regexp.MustCompile(`\n\n$`).ReplaceAllString(e.TestHCLText, "\n") // Remove region tags e.TestHCLText = re1.ReplaceAllString(e.TestHCLText, "") @@ -336,23 +302,20 @@ func (e *Examples) SetHCLText() { e.TestEnvVars = originalTestEnvVars } -func (e *Examples) ExecuteTemplate() string { - templateContent, err := os.ReadFile(e.ConfigPath) - if err != nil { - glog.Exit(err) +func ExecuteTemplate(e any, templatePath string, appendNewline bool) string { + templates := []string{ + templatePath, + "templates/terraform/expand_resource_ref.tmpl", + "templates/terraform/custom_flatten/bigquery_table_ref.go.tmpl", + "templates/terraform/flatten_property_method.go.tmpl", + "templates/terraform/expand_property_method.go.tmpl", + "templates/terraform/update_mask.go.tmpl", + "templates/terraform/nested_query.go.tmpl", + "templates/terraform/unordered_list_customize_diff.go.tmpl", } + templateFileName := filepath.Base(templatePath) - fileContentString := string(templateContent) - - // Check that any variables in Vars or TestEnvVars used in the example are defined via YAML - envVarRegex := regexp.MustCompile(`{{index \$\.TestEnvVars "([a-zA-Z_]*)"}}`) - validateRegexForContents(envVarRegex, fileContentString, e.ConfigPath, "test_env_vars", e.TestEnvVars) - varRegex := regexp.MustCompile(`{{index \$\.Vars "([a-zA-Z_]*)"}}`) - validateRegexForContents(varRegex, fileContentString, e.ConfigPath, "vars", e.Vars) - - templateFileName := filepath.Base(e.ConfigPath) - - tmpl, err := template.New(templateFileName).Funcs(google.TemplateFunctions).Parse(fileContentString) + tmpl, err := template.New(templateFileName).Funcs(google.TemplateFunctions).ParseFiles(templates...) if err != nil { glog.Exit(err) } @@ -364,7 +327,7 @@ func (e *Examples) ExecuteTemplate() string { rs := contents.String() - if !strings.HasSuffix(rs, "\n") { + if !strings.HasSuffix(rs, "\n") && appendNewline { rs = fmt.Sprintf("%s\n", rs) } @@ -438,7 +401,7 @@ func (e *Examples) SetOiCSHCLText() { } e.Vars = testVars - e.OicsHCLText = e.ExecuteTemplate() + e.OicsHCLText = ExecuteTemplate(e, e.ConfigPath, true) e.OicsHCLText = regexp.MustCompile(`\n\n$`).ReplaceAllString(e.OicsHCLText, "\n") // Remove region tags diff --git a/mmv1/api/resource/iam_policy.go b/mmv1/api/resource/iam_policy.go index fb6a7ab34705..37973ee462ae 100644 --- a/mmv1/api/resource/iam_policy.go +++ b/mmv1/api/resource/iam_policy.go @@ -114,9 +114,6 @@ type IamPolicy struct { // [Optional] Check to see if zone value should be replaced with GOOGLE_ZONE in iam tests // Defaults to true SubstituteZoneValue bool `yaml:"substitute_zone_value"` - - // Add a deprecation message for a resource that's been deprecated in the API. - DeprecationMessage string `yaml:"deprecation_message,omitempty"` } func (p *IamPolicy) UnmarshalYAML(unmarshal func(any) error) error { diff --git a/mmv1/api/resource_test.go b/mmv1/api/resource_test.go index ad7dd327b288..6f88fc0ac35b 100644 --- a/mmv1/api/resource_test.go +++ b/mmv1/api/resource_test.go @@ -358,7 +358,7 @@ func TestMagicianLocation(t *testing.T) { } } -func TestHasPostCreateComputedFields(t *testing.T) { +func TestHasComputedIdFormatFields(t *testing.T) { cases := []struct { name, description string resource Resource @@ -476,29 +476,15 @@ func TestHasPostCreateComputedFields(t *testing.T) { }, want: true, }, - { - name: "includes fields in self link that aren't in id format", - resource: Resource{ - IdFormat: "projects/{{project}}/resource/{{resource_id}}", - SelfLink: "{{name}}", - Properties: []*Type{ - { - Name: "name", - Output: true, - }, - }, - }, - want: true, - }, } for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { t.Parallel() - got := tc.resource.HasPostCreateComputedFields() + got := tc.resource.HasComputedIdFormatFields() if got != tc.want { - t.Errorf("HasPostCreateComputedFields(%q) returned unexpected value. got %t; want %t.", tc.name, got, tc.want) + t.Errorf("HasComputedIdFormatFields(%q) returned unexpected value. got %t; want %t.", tc.name, got, tc.want) } }) } diff --git a/mmv1/api/type.go b/mmv1/api/type.go index 341b31d44ee7..573730146017 100644 --- a/mmv1/api/type.go +++ b/mmv1/api/type.go @@ -171,9 +171,7 @@ type Type struct { Sensitive bool `yaml:"sensitive,omitempty"` // Adds `Sensitive: true` to the schema - // TODO: remove this field after all references are migrated - // see: https://github.com/GoogleCloudPlatform/magic-modules/pull/14933#pullrequestreview-3166578379 - WriteOnlyLegacy bool `yaml:"write_only_legacy,omitempty"` // Adds `WriteOnlyLegacy: true` to the schema + WriteOnly bool `yaml:"write_only,omitempty"` // Adds `WriteOnly: true` to the schema // Does not set this value to the returned API value. Useful for fields // like secrets where the returned API value is not helpful. @@ -295,38 +293,6 @@ type Type struct { // The prefix used as part of the property expand/flatten function name // flatten{{$.GetPrefix}}{{$.TitlelizeProperty}} Prefix string `yaml:"prefix,omitempty"` - - // The field is not present in CAI asset - IsMissingInCai bool `yaml:"is_missing_in_cai,omitempty"` - - // A custom expander replaces the default expander for an attribute. - // It is called as part of tfplan2cai conversion if - // object.input is false. It can return an object of any type, - // so the function header *is* part of the custom code template. - // As with flatten, `property` and `prefix` are available. - CustomTgcExpand string `yaml:"custom_tgc_expand,omitempty"` - - // A custom flattener replaces the default flattener for an attribute. - // It is called as part of cai2hcl conversion. It can return an object of any type, - // so the function header *is* a part of the custom code template. To help with - // creating the function header, `property` and `prefix` are available, - // just as they are in the standard flattener template. - CustomTgcFlatten string `yaml:"custom_tgc_flatten,omitempty"` - - // If true, the empty value of this attribute in CAI asset is included. - IncludeEmptyValueInCai bool `yaml:"include_empty_value_in_cai,omitempty"` - - // If the property is type of bool and has `defaul_from_api: true`, - // include empty value in CAI asset by default during tfplan2cai conversion. - // Use `exclude_false_in_cai` to override the default behavior - // when the default value on API side is true. - // - // If a property is missing in CAI asset, use `is_missing_in_cai: true` - // and `exclude_false_in_cai: true` is not needed - ExcludeFalseInCai bool `yaml:"exclude_false_in_cai,omitempty"` - - // If true, the custom flatten function is not applied during cai2hcl - TGCIgnoreTerraformCustomFlatten bool `yaml:"tgc_ignore_terraform_custom_flatten,omitempty"` } const MAX_NAME = 20 @@ -397,11 +363,11 @@ func (t *Type) Validate(rName string) { log.Fatalf("'default_value' and 'default_from_api' cannot be both set in resource %s", rName) } - if t.WriteOnlyLegacy && (t.DefaultFromApi || t.Output) { + if t.WriteOnly && (t.DefaultFromApi || t.Output) { log.Fatalf("Property %s cannot be write_only and default_from_api or output at the same time in resource %s", t.Name, rName) } - if t.WriteOnlyLegacy && t.Sensitive { + if t.WriteOnly && t.Sensitive { log.Fatalf("Property %s cannot be write_only and sensitive at the same time in resource %s", t.Name, rName) } @@ -448,7 +414,7 @@ func (t Type) Lineage() string { // This format is intended for resource metadata, to be used for connecting a Terraform // type with a corresponding API type. func (t Type) MetadataLineage() string { - if t.ParentMetadata == nil || t.ParentMetadata.FlattenObject { + if t.ParentMetadata == nil { return google.Underscore(t.Name) } @@ -513,10 +479,6 @@ func (t Type) TitlelizeProperty() string { return google.Camelize(t.Name, "upper") } -func (t Type) CamelizeProperty() string { - return google.Camelize(t.Name, "lower") -} - // If the Prefix field is already set, returns the value. // Otherwise, set the Prefix field and returns the value. func (t *Type) GetPrefix() string { @@ -553,15 +515,6 @@ func (t Type) ResourceType() string { return path[len(path)-1] } -func (t Type) FWResourceType() string { - r := t.ResourceRef() - if r == nil { - return "" - } - path := strings.Split(r.BaseUrl, "/") - return path[len(path)-1] -} - // TODO rewrite: validation // func (t *Type) check_default_value_property() { // return if @default_value.nil? @@ -752,7 +705,7 @@ func (t Type) WriteOnlyProperties() []*Type { } case t.IsA("NestedObject"): props = google.Select(t.UserProperties(), func(p *Type) bool { - return p.WriteOnlyLegacy + return p.WriteOnly }) case t.IsA("Map"): props = google.Reject(t.ValueType.WriteOnlyProperties(), func(p *Type) bool { @@ -832,45 +785,6 @@ func (t Type) TFType(s string) string { return "schema.TypeString" } -func (t Type) GetFWType() string { - switch t.Type { - case "Boolean": - return "Bool" - case "Double": - return "Float64" - case "Integer": - return "Int64" - case "String": - return "String" - case "Time": - return "String" - case "Enum": - return "String" - case "ResourceRef": - return "String" - case "NestedObject": - return "Nested" - case "Array": - return "List" - case "KeyValuePairs": - return "Map" - case "KeyValueLabels": - return "Map" - case "KeyValueTerraformLabels": - return "Map" - case "KeyValueEffectiveLabels": - return "Map" - case "KeyValueAnnotations": - return "Map" - case "Map": - return "Map" - case "Fingerprint": - return "String" - } - - return "String" -} - // TODO rewrite: validation // // Represents an enum, and store is valid values // class Enum < Primitive @@ -922,20 +836,6 @@ func (t Type) ResourceRef() *Resource { return resources[0] } -// Checks if the referenced resource is in the same product or not -func (t Type) IsResourceRefFound() bool { - if !t.IsA("ResourceRef") { - return false - } - - product := t.ResourceMetadata.ProductMetadata - resources := google.Select(product.Objects, func(obj *Resource) bool { - return obj.Name == t.Resource - }) - - return len(resources) != 0 -} - // TODO rewrite: validation // func (t *Type) check_resource_ref_property_exists // return unless defined?(resource_ref.all_user_properties) @@ -978,9 +878,6 @@ func (t Type) UserProperties() []*Type { } return google.Reject(t.Properties, func(p *Type) bool { - if t.ResourceMetadata.IsTgcCompiler() { - return p.Exclude || p.Output - } return p.Exclude }) } @@ -1181,7 +1078,7 @@ func (t Type) NamespaceProperty() string { } func (t Type) CustomTemplate(templatePath string, appendNewline bool) string { - return ExecuteTemplate(&t, templatePath, appendNewline) + return resource.ExecuteTemplate(&t, templatePath, appendNewline) } func (t *Type) GetIdFormat() string { @@ -1226,45 +1123,13 @@ func (t *Type) IsForceNew() bool { return t.Immutable } - // WriteOnlyLegacy fields are never immutable - if t.WriteOnlyLegacy { - return false - } - - // Output fields (except effective labels) can't be immutable - if t.Output && !t.IsA("KeyValueEffectiveLabels") { - return false - } - - // Explicitly-marked fields are always immutable - if t.Immutable { - return true - } - - // At this point the field can only be immutable if the resource is immutable. - if !t.ResourceMetadata.Immutable { - return false - } - - // If this field has an update_url set, it's not immutable. - if t.UpdateUrl != "" { - return false - } - - // If this is a top-level field, it inherits immutability from the resource. parent := t.Parent() - if parent == nil { - return true - } - - // If the parent field _isn't_ immutable, that's inherited by this field. - if !parent.IsForceNew() { - return false - } - - // Otherwise, the field is immutable unless it's a KeyValueLabels field - // and the parent has FlattenObject set. - return !(parent.FlattenObject && t.IsA("KeyValueLabels")) + return !t.WriteOnly && (!t.Output || t.IsA("KeyValueEffectiveLabels")) && + (t.Immutable || + (t.ResourceMetadata.Immutable && t.UpdateUrl == "" && + (parent == nil || + (parent.IsForceNew() && + !(parent.FlattenObject && t.IsA("KeyValueLabels")))))) } // Returns true if the type does not correspond to an API type @@ -1291,7 +1156,7 @@ func (t *Type) ProviderOnly() bool { // fields still need to be included, ie: // flattenedField > newParent > renameMe should be passed to this function as // flattened_field.0.new_parent.0.im_renamed -// TODO: Change format of input for +// TODO(emilymye): Change format of input for // exactly_one_of/at_least_one_of/etc to use camelcase, MM properities and // convert to snake in this method func (t *Type) GetPropertySchemaPath(schemaPath string) string { @@ -1340,31 +1205,3 @@ func (t Type) GetPropertySchemaPathList(propertyList []string) []string { } return list } - -func (t Type) IsJsonField() bool { - if t.CustomFlatten == "templates/terraform/custom_flatten/json_schema.tmpl" { - return true - } - if t.CustomExpand == "templates/terraform/custom_expand/json_schema.tmpl" || t.CustomExpand == "templates/terraform/custom_expand/json_value.tmpl" { - return true - } - return false -} - -// Checks if the empty value should be set in CAI assets during tfplan2cai conversion -func (t Type) TGCSendEmptyValue() bool { - if t.IncludeEmptyValueInCai { - return true - } - - // Automatically check if false value should be set in CAI assets - if t.IsA("Boolean") { - return t.Required || (t.DefaultFromApi && !t.IsMissingInCai && !t.ExcludeFalseInCai) - } - - return false -} - -func (t Type) ShouldIgnoreCustomFlatten() bool { - return t.ResourceMetadata.IsTgcCompiler() && (t.IgnoreRead || t.TGCIgnoreTerraformCustomFlatten) -} diff --git a/mmv1/google/string_utils.go b/mmv1/google/string_utils.go index 13fb90f1a469..338f9b9f4d8a 100644 --- a/mmv1/google/string_utils.go +++ b/mmv1/google/string_utils.go @@ -83,8 +83,7 @@ func Plural(source string) string { } // mesh -> meshes - // messageBus -> messageBuses - if strings.HasSuffix(source, "esh") || strings.HasSuffix(source, "s") { + if strings.HasSuffix(source, "esh") { return fmt.Sprintf("%ses", source) } diff --git a/mmv1/google/string_utils_test.go b/mmv1/google/string_utils_test.go index 153bc8191fa3..b2f570ab2137 100644 --- a/mmv1/google/string_utils_test.go +++ b/mmv1/google/string_utils_test.go @@ -101,11 +101,6 @@ func TestStringPlural(t *testing.T) { term: "gateway", expected: "gateways", }, - { - description: "Plural camelcase string ending with s", - term: "messageBus", - expected: "messageBuses", - }, } for _, tc := range cases { diff --git a/mmv1/main.go b/mmv1/main.go index ef62e7ba5f88..b8465d04c7d0 100644 --- a/mmv1/main.go +++ b/mmv1/main.go @@ -26,30 +26,32 @@ var wg sync.WaitGroup // TODO rewrite: additional flags // Example usage: --output $GOPATH/src/github.com/terraform-providers/terraform-provider-google-beta -var outputPathFlag = flag.String("output", "", "path to output generated files to") +var outputPath = flag.String("output", "", "path to output generated files to") // Example usage: --version beta -var versionFlag = flag.String("version", "", "optional version name. If specified, this version is preferred for resource generation when applicable") +var version = flag.String("version", "", "optional version name. If specified, this version is preferred for resource generation when applicable") -var overrideDirectoryFlag = flag.String("overrides", "", "directory containing yaml overrides") +var overrideDirectory = flag.String("overrides", "", "directory containing yaml overrides") -var productFlag = flag.String("product", "", "optional product name. If specified, the resources under the specific product will be generated. Otherwise, resources under all products will be generated.") +var product = flag.String("product", "", "optional product name. If specified, the resources under the specific product will be generated. Otherwise, resources under all products will be generated.") -var resourceFlag = flag.String("resource", "", "optional resource name. Limits generation to the specified resource within a particular product.") +var resourceToGenerate = flag.String("resource", "", "optional resource name. Limits generation to the specified resource within a particular product.") var doNotGenerateCode = flag.Bool("no-code", false, "do not generate code") var doNotGenerateDocs = flag.Bool("no-docs", false, "do not generate docs") -var providerFlag = flag.String("provider", "", "optional provider name. If specified, a non-default provider will be used.") +var forceProvider = flag.String("provider", "", "optional provider name. If specified, a non-default provider will be used.") var openapiGenerate = flag.Bool("openapi-generate", false, "Generate MMv1 YAML from openapi directory (Experimental)") -var showImportDiffsFlag = flag.Bool("show-import-diffs", false, "write go import diffs to stdout") +// Example usage: --yaml +var yamlMode = flag.Bool("yaml", false, "copy text over from ruby yaml to go yaml") + +var showImportDiffs = flag.Bool("show-import-diffs", false, "write go import diffs to stdout") func main() { - // Handle all flags in main. Other functions must not access flag values directly. flag.Parse() if *openapiGenerate { @@ -58,25 +60,24 @@ func main() { return } - if *outputPathFlag == "" { + if outputPath == nil || *outputPath == "" { log.Printf("No output path specified, exiting") return } - GenerateProducts(*productFlag, *resourceFlag, *providerFlag, *versionFlag, *outputPathFlag, *overrideDirectoryFlag, !*doNotGenerateCode, !*doNotGenerateDocs, *showImportDiffsFlag) -} - -func GenerateProducts(product, resource, providerName, version, outputPath, overrideDirectory string, generateCode, generateDocs, showImportDiffs bool) { - if version == "" { + if version == nil || *version == "" { log.Printf("No version specified, assuming ga") - version = "ga" + *version = "ga" } + + var generateCode = !*doNotGenerateCode + var generateDocs = !*doNotGenerateDocs var productsToGenerate []string var allProducts = false - if product == "" { + if product == nil || *product == "" { allProducts = true } else { - var productToGenerate = fmt.Sprintf("products/%s", product) + var productToGenerate = fmt.Sprintf("products/%s", *product) productsToGenerate = []string{productToGenerate} } @@ -91,26 +92,26 @@ func GenerateProducts(product, resource, providerName, version, outputPath, over allProductFiles = append(allProductFiles, fmt.Sprintf("products/%s", filepath.Base(dir))) } - if overrideDirectory != "" { - log.Printf("Using override directory %s", overrideDirectory) + if *overrideDirectory != "" { + log.Printf("Using override directory %s", *overrideDirectory) // Normalize override dir to a path that is relative to the magic-modules directory // This is needed for templates that concatenate pwd + override dir + path - if filepath.IsAbs(overrideDirectory) { + if filepath.IsAbs(*overrideDirectory) { wd, err := os.Getwd() if err != nil { panic(err) } - overrideDirectory, err = filepath.Rel(wd, overrideDirectory) - log.Printf("Override directory normalized to relative path %s", overrideDirectory) + *overrideDirectory, err = filepath.Rel(wd, *overrideDirectory) + log.Printf("Override directory normalized to relative path %s", *overrideDirectory) } - overrideFiles, err := filepath.Glob(fmt.Sprintf("%s/products/**/product.yaml", overrideDirectory)) + overrideFiles, err := filepath.Glob(fmt.Sprintf("%s/products/**/product.yaml", *overrideDirectory)) if err != nil { panic(err) } for _, filePath := range overrideFiles { - product, err := filepath.Rel(overrideDirectory, filePath) + product, err := filepath.Rel(*overrideDirectory, filePath) if err != nil { panic(err) } @@ -131,44 +132,67 @@ func GenerateProducts(product, resource, providerName, version, outputPath, over } startTime := time.Now() - if providerName == "" { - providerName = "default (terraform)" + providerName := "default (terraform)" + if *forceProvider != "" { + providerName = *forceProvider } - log.Printf("Generating MM output to '%s'", outputPath) - log.Printf("Building %s version", version) + log.Printf("Generating MM output to '%s'", *outputPath) + log.Printf("Building %s version", *version) log.Printf("Building %s provider", providerName) + // Building compute takes a long time and can't be parallelized within the product + // so lets build it first + sort.Slice(allProductFiles, func(i int, j int) bool { + if allProductFiles[i] == "products/compute" { + return true + } + return false + }) + + var providerToGenerate provider.Provider + + productFileChannel := make(chan string, len(allProductFiles)) productsForVersionChannel := make(chan *api.Product, len(allProductFiles)) - for _, productFile := range allProductFiles { + for _, pf := range allProductFiles { + productFileChannel <- pf + } + + for i := 0; i < len(allProductFiles); i++ { wg.Add(1) - go GenerateProduct(version, providerName, productFile, outputPath, productsForVersionChannel, startTime, productsToGenerate, resource, overrideDirectory, generateCode, generateDocs) + go GenerateProduct(productFileChannel, providerToGenerate, productsForVersionChannel, startTime, productsToGenerate, *resourceToGenerate, *overrideDirectory, generateCode, generateDocs) } wg.Wait() + close(productFileChannel) close(productsForVersionChannel) var productsForVersion []*api.Product + for p := range productsForVersionChannel { productsForVersion = append(productsForVersion, p) } + slices.SortFunc(productsForVersion, func(p1, p2 *api.Product) int { return strings.Compare(strings.ToLower(p1.Name), strings.ToLower(p2.Name)) }) // In order to only copy/compile files once per provider this must be called outside - // of the products loop. Create an MMv1 provider with an arbitrary product (the first loaded). - providerToGenerate := newProvider(providerName, version, productsForVersion[0], startTime) - providerToGenerate.CopyCommonFiles(outputPath, generateCode, generateDocs) + // of the products loop. This will get called with the provider from the final iteration + // of the loop + providerToGenerate = setProvider(*forceProvider, *version, productsForVersion[0], startTime) + providerToGenerate.CopyCommonFiles(*outputPath, generateCode, generateDocs) if generateCode { - providerToGenerate.CompileCommonFiles(outputPath, productsForVersion, "") + providerToGenerate.CompileCommonFiles(*outputPath, productsForVersion, "") } - provider.FixImports(outputPath, showImportDiffs) + provider.FixImports(*outputPath, *showImportDiffs) } -func GenerateProduct(version, providerName, productName, outputPath string, productsForVersionChannel chan *api.Product, startTime time.Time, productsToGenerate []string, resourceToGenerate, overrideDirectory string, generateCode, generateDocs bool) { +func GenerateProduct(productChannel chan string, providerToGenerate provider.Provider, productsForVersionChannel chan *api.Product, startTime time.Time, productsToGenerate []string, resourceToGenerate, overrideDirectory string, generateCode, generateDocs bool) { + defer wg.Done() + productName := <-productChannel productYamlPath := path.Join(productName, "product.yaml") @@ -205,8 +229,8 @@ func GenerateProduct(version, providerName, productName, outputPath string, prod var resources []*api.Resource = make([]*api.Resource, 0) - if !productApi.ExistsAtVersionOrLower(version) { - log.Printf("%s does not have a '%s' version, skipping", productName, version) + if !productApi.ExistsAtVersionOrLower(*version) { + log.Printf("%s does not have a '%s' version, skipping", productName, *version) return } @@ -234,7 +258,7 @@ func GenerateProduct(version, providerName, productName, outputPath string, prod api.Compile(resourceYamlPath, resource, overrideDirectory) resource.SourceYamlFile = resourceYamlPath - resource.TargetVersionName = version + resource.TargetVersionName = *version resource.Properties = resource.AddLabelsRelatedFields(resource.PropertiesWithExcluded(), nil) resource.SetDefault(productApi) resource.Validate() @@ -267,7 +291,7 @@ func GenerateProduct(version, providerName, productName, outputPath string, prod api.Compile(overrideYamlPath, resource, overrideDirectory) } - resource.TargetVersionName = version + resource.TargetVersionName = *version resource.Properties = resource.AddLabelsRelatedFields(resource.PropertiesWithExcluded(), nil) resource.SetDefault(productApi) resource.Validate() @@ -284,7 +308,8 @@ func GenerateProduct(version, providerName, productName, outputPath string, prod productApi.Objects = resources productApi.Validate() - providerToGenerate := newProvider(providerName, version, productApi, startTime) + providerToGenerate = setProvider(*forceProvider, *version, productApi, startTime) + productsForVersionChannel <- productApi if !slices.Contains(productsToGenerate, productName) { @@ -293,12 +318,12 @@ func GenerateProduct(version, providerName, productName, outputPath string, prod } log.Printf("%s: Generating files", productName) - - providerToGenerate.Generate(outputPath, productName, resourceToGenerate, generateCode, generateDocs) + providerToGenerate.Generate(*outputPath, productName, resourceToGenerate, generateCode, generateDocs) } -func newProvider(providerName, version string, productApi *api.Product, startTime time.Time) provider.Provider { - switch providerName { +// Sets provider via flag +func setProvider(forceProvider, version string, productApi *api.Product, startTime time.Time) provider.Provider { + switch forceProvider { case "tgc": return provider.NewTerraformGoogleConversion(productApi, version, startTime) case "tgc_cai2hcl": diff --git a/mmv1/openapi_generate/parser.go b/mmv1/openapi_generate/parser.go index f9ae4b845cb7..2ff5339da761 100644 --- a/mmv1/openapi_generate/parser.go +++ b/mmv1/openapi_generate/parser.go @@ -19,7 +19,6 @@ import ( "context" "encoding/base64" "fmt" - "maps" "os" "path" "path/filepath" @@ -163,7 +162,7 @@ func buildProduct(filePath, output string, root *openapi3.T, header []byte) stri apiVersion := &product.Version{} apiVersion.BaseUrl = fmt.Sprintf("%s/%s/", server, version) - // TODO figure out how to tell the API version + // TODO(slevenick) figure out how to tell the API version apiVersion.Name = "ga" apiProduct.Versions = []*product.Version{apiVersion} @@ -358,9 +357,6 @@ func writeObject(name string, obj *openapi3.SchemaRef, objType openapi3.Types, u if len(obj.Value.Enum) > 0 { var enums []string for _, enum := range obj.Value.Enum { - if strings.HasSuffix(fmt.Sprintf("%v", enum), "_UNSPECIFIED") { - continue - } enums = append(enums, fmt.Sprintf("%v", enum)) } additionalDescription = fmt.Sprintf("\n Possible values:\n %s", strings.Join(enums, "\n")) @@ -443,8 +439,7 @@ func writeObject(name string, obj *openapi3.SchemaRef, objType openapi3.Types, u func buildProperties(props openapi3.Schemas, required []string) []*api.Type { properties := []*api.Type{} - for _, k := range slices.Sorted(maps.Keys(props)) { - prop := props[k] + for k, prop := range props { propObj := writeObject(k, prop, propType(prop), false) if slices.Contains(required, k) { propObj.Required = true diff --git a/mmv1/products/accesscontextmanager/AccessPolicy.yaml b/mmv1/products/accesscontextmanager/AccessPolicy.yaml index 5544e8ccabd9..96e5f22c6a4e 100644 --- a/mmv1/products/accesscontextmanager/AccessPolicy.yaml +++ b/mmv1/products/accesscontextmanager/AccessPolicy.yaml @@ -90,7 +90,6 @@ parameters: description: | Folder or project on which this policy is applicable. Format: 'folders/{{folder_id}}' or 'projects/{{project_number}}' - immutable: true item_type: type: String max_size: 1 diff --git a/mmv1/products/accesscontextmanager/AuthorizedOrgsDesc.yaml b/mmv1/products/accesscontextmanager/AuthorizedOrgsDesc.yaml index 680d5a717fd4..e8b1cfd103f9 100644 --- a/mmv1/products/accesscontextmanager/AuthorizedOrgsDesc.yaml +++ b/mmv1/products/accesscontextmanager/AuthorizedOrgsDesc.yaml @@ -61,8 +61,6 @@ examples: - name: 'access_context_manager_authorized_orgs_desc_basic' primary_resource_id: 'authorized-orgs-desc' exclude_test: true - test_env_vars: - org_id: 'ORG_ID' parameters: - name: 'parent' type: String diff --git a/mmv1/products/accesscontextmanager/GcpUserAccessBinding.yaml b/mmv1/products/accesscontextmanager/GcpUserAccessBinding.yaml index 7fcf1bbf8718..18138a9a3ca7 100644 --- a/mmv1/products/accesscontextmanager/GcpUserAccessBinding.yaml +++ b/mmv1/products/accesscontextmanager/GcpUserAccessBinding.yaml @@ -131,16 +131,12 @@ properties: - name: restrictedClientApplication type: NestedObject description: | - Optional. The application that is subject to this binding's scope. Only one of clientId or name should be specified. + Optional. The application that is subject to this binding's scope. properties: - name: clientId type: String description: | The OAuth client ID of the application. - - name: name - type: String - description: | - The name of the application. Example: "Cloud Console" - name: 'activeSettings' type: NestedObject description: | diff --git a/mmv1/products/alloydb/Backup.yaml b/mmv1/products/alloydb/Backup.yaml index 825525c71828..621ea1b436fa 100644 --- a/mmv1/products/alloydb/Backup.yaml +++ b/mmv1/products/alloydb/Backup.yaml @@ -40,7 +40,6 @@ async: resource_inside_response: false custom_code: encoder: 'templates/terraform/encoders/alloydb_backup.tmpl' -include_in_tgc_next_DO_NOT_USE: true examples: - name: 'alloydb_backup_basic' primary_resource_id: 'default' @@ -61,7 +60,7 @@ examples: alloydb_instance_name: 'alloydb-instance' network_name: 'alloydb-network' test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1")' + 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1")' ignore_read_extra: - 'reconciling' - 'update_time' @@ -76,7 +75,6 @@ examples: ignore_read_extra: - 'reconciling' - 'update_time' - exclude_test: true - name: 'alloydb_backup_full_test' primary_resource_id: 'default' vars: @@ -85,7 +83,7 @@ examples: alloydb_instance_name: 'alloydb-instance' network_name: 'alloydb-network' test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1")' + 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1")' ignore_read_extra: - 'reconciling' - 'update_time' diff --git a/mmv1/products/alloydb/Cluster.yaml b/mmv1/products/alloydb/Cluster.yaml index 4f2586a0b4e1..cce1fdb55d35 100644 --- a/mmv1/products/alloydb/Cluster.yaml +++ b/mmv1/products/alloydb/Cluster.yaml @@ -61,40 +61,27 @@ custom_code: pre_delete: 'templates/terraform/pre_delete/alloydb_cluster.go.tmpl' # Skipping the sweeper because we need to force-delete clusters. exclude_sweeper: true -include_in_tgc_next_DO_NOT_USE: true examples: - name: 'alloydb_cluster_basic' primary_resource_id: 'default' vars: alloydb_cluster_name: 'alloydb-cluster' - ignore_read_extra: - - 'deletion_protection' - name: 'alloydb_cluster_before_upgrade' primary_resource_id: 'default' vars: alloydb_cluster_name: 'alloydb-cluster' alloydb_instance_name: 'alloydb-instance' network_name: 'alloydb-network' - test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedTestNetwork(t, "alloydb-1")' - ignore_read_extra: - - 'deletion_protection' - name: 'alloydb_cluster_after_upgrade' primary_resource_id: 'default' vars: alloydb_cluster_name: 'alloydb-cluster' alloydb_instance_name: 'alloydb-instance' network_name: 'alloydb-network' - test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedTestNetwork(t, "alloydb-1")' - ignore_read_extra: - - 'deletion_protection' - name: 'alloydb_cluster_full' primary_resource_id: 'full' vars: alloydb_cluster_name: 'alloydb-cluster-full' - ignore_read_extra: - - 'deletion_protection' - name: 'alloydb_cluster_restore' primary_resource_id: 'source' vars: @@ -107,7 +94,6 @@ examples: test_vars_overrides: 'network_name': 'acctest.BootstrapSharedTestNetwork(t, "alloydb-instance-basic")' ignore_read_extra: - - 'deletion_protection' - 'reconciling' - 'update_time' exclude_test: true @@ -117,8 +103,6 @@ examples: alloydb_primary_cluster_name: 'alloydb-primary-cluster' alloydb_primary_instance_name: 'alloydb-primary-instance' alloydb_secondary_cluster_name: 'alloydb-secondary-cluster' - ignore_read_extra: - - 'deletion_protection' exclude_test: true - name: 'alloydb_secondary_cluster_basic_test' primary_resource_id: 'secondary' @@ -128,9 +112,7 @@ examples: alloydb_secondary_cluster_name: 'alloydb-secondary-cluster' network_name: 'alloydb-network' test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1")' - ignore_read_extra: - - 'deletion_protection' + 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1")' exclude_docs: true virtual_fields: - name: 'deletion_policy' @@ -141,14 +123,6 @@ virtual_fields: Possible values: DEFAULT, FORCE type: String default_value: "DEFAULT" - - name: 'deletion_protection' - description: | - Whether Terraform will be prevented from destroying the cluster. - When the field is set to true or unset in Terraform state, a `terraform apply` - or `terraform destroy` that would delete the cluster will fail. - When the field is set to false, deleting the cluster is allowed. - type: Boolean - default_value: true - name: 'skip_await_major_version_upgrade' type: Boolean default_value: true @@ -329,14 +303,12 @@ properties: type: String description: | The database username. - is_missing_in_cai: true - name: 'password' type: String description: | The initial password for the user. required: true sensitive: true - is_missing_in_cai: true - name: 'restoreBackupSource' type: NestedObject description: | @@ -471,17 +443,14 @@ properties: Hours of day in 24 hour format. Should be from 0 to 23. An API may choose to allow the value "24:00:00" for scenarios like business closing time. - name: 'minutes' type: Integer - is_missing_in_cai: true description: | Minutes of hour of day. Currently, only the value 0 is supported. - name: 'seconds' type: Integer - is_missing_in_cai: true description: | Seconds of minutes of the time. Currently, only the value 0 is supported. - name: 'nanos' type: Integer - is_missing_in_cai: true description: | Fractions of seconds in nanoseconds. Currently, only the value 0 is supported. - name: 'timeBasedRetention' diff --git a/mmv1/products/alloydb/Instance.yaml b/mmv1/products/alloydb/Instance.yaml index afc7ebf98c5f..4c4b3e09b8e4 100644 --- a/mmv1/products/alloydb/Instance.yaml +++ b/mmv1/products/alloydb/Instance.yaml @@ -52,7 +52,6 @@ custom_code: custom_import: 'templates/terraform/custom_import/alloydb_instance.go.tmpl' # Skipping the sweeper because instances will be deleted during cluster sweeps exclude_sweeper: true -include_in_tgc_next_DO_NOT_USE: true examples: - name: 'alloydb_instance_basic' primary_resource_id: 'default' @@ -83,7 +82,7 @@ examples: alloydb_instance_name: 'alloydb-instance' network_name: 'alloydb-network' test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1")' + 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1")' ignore_read_extra: - 'reconciling' - 'update_time' @@ -97,7 +96,7 @@ examples: alloydb_secondary_instance_name: 'alloydb-secondary-instance' network_name: 'alloydb-secondary-network' test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1")' + 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1")' ignore_read_extra: - 'reconciling' - 'update_time' @@ -201,22 +200,6 @@ properties: - 'AVAILABILITY_TYPE_UNSPECIFIED' - 'ZONAL' - 'REGIONAL' - - name: 'activationPolicy' - type: Enum - description: | - 'Specifies whether an instance needs to spin up. Once the instance is - active, the activation policy can be updated to the `NEVER` to stop the - instance. Likewise, the activation policy can be updated to `ALWAYS` to - start the instance. - There are restrictions around when an instance can/cannot be activated (for - example, a read pool instance should be stopped before stopping primary - etc.). Please refer to the API documentation for more details. - Possible values are: `ACTIVATION_POLICY_UNSPECIFIED`, `ALWAYS`, `NEVER`.' - default_from_api: true - enum_values: - - 'ACTIVATION_POLICY_UNSPECIFIED' - - 'ALWAYS' - - 'NEVER' - name: 'instanceType' type: Enum description: | @@ -249,11 +232,9 @@ properties: - name: 'recordApplicationTags' type: Boolean description: 'Record application tags for an instance. This flag is turned "on" by default.' - include_empty_value_in_cai: true # Default value is false in CAI asset - name: 'recordClientAddress' type: Boolean description: 'Record client address for an instance. Client address is PII information. This flag is turned "on" by default.' - include_empty_value_in_cai: true # Default value is false in CAI asset - name: 'queryPlansPerMinute' type: Integer description: 'Number of query execution plans captured by Insights per minute for all queries combined. The default value is 5. Any integer between 0 and 20 is considered valid.' @@ -266,11 +247,9 @@ properties: - name: 'enabled' type: Boolean description: 'Observability feature status for an instance.' - include_empty_value_in_cai: true # Default value is false in CAI asset - name: 'preserveComments' type: Boolean description: 'Preserve comments in the query string.' - include_empty_value_in_cai: true # Default value is false in CAI asset - name: 'trackWaitEvents' type: Boolean description: 'Record wait events during query execution for an instance.' @@ -283,14 +262,12 @@ properties: - name: 'recordApplicationTags' type: Boolean description: 'Record application tags for an instance. This flag is turned "on" by default.' - include_empty_value_in_cai: true # Default value is false in CAI asset - name: 'queryPlansPerMinute' type: Integer description: 'Number of query execution plans captured by Insights per minute for all queries combined. The default value is 5. Any integer between 0 and 200 is considered valid.' - name: 'trackActiveQueries' type: Boolean description: 'Track actively running queries. If not set, default value is "off".' - include_empty_value_in_cai: true # Default value is false in CAI asset - name: 'assistiveExperiencesEnabled' type: Boolean description: 'Whether assistive experiences are enabled for this AlloyDB instance.' @@ -384,42 +361,6 @@ properties: The network attachment resource created in the consumer project to which the PSC interface will be linked. This is of the format: "projects/${CONSUMER_PROJECT}/regions/${REGION}/networkAttachments/${NETWORK_ATTACHMENT_NAME}". The network attachment must be in the same region as the instance. - - name: 'pscAutoConnections' - type: Array - description: | - Configurations for setting up PSC service automation. - item_type: - type: NestedObject - properties: - - name: 'consumerProject' - type: String - description: | - The consumer project to which the PSC service automation endpoint will - be created. The API expects the consumer project to be the project ID( - and not the project number). - - name: 'consumerNetwork' - type: String - description: | - The consumer network for the PSC service automation, example: - "projects/vpc-host-project/global/networks/default". - The consumer network might be hosted a different project than the - consumer project. The API expects the consumer project specified to be - the project ID (and not the project number) - - name: 'ipAddress' - type: String - description: | - The IP address of the PSC service automation endpoint. - output: true - - name: 'status' - type: String - description: | - The status of the PSC service automation connection. - output: true - - name: 'consumerNetworkStatus' - type: String - description: | - The status of the service connection policy. - output: true - name: 'networkConfig' type: NestedObject default_from_api: true @@ -450,13 +391,6 @@ properties: type: Boolean description: | Enabling outbound public ip for the instance. - - name: 'allocatedIpRangeOverride' - type: String - immutable: true - description: | - Name of the allocated IP range for the private IP AlloyDB instance, for example: "google-managed-services-default". - If set, the instance IPs will be created from this allocated range and will override the IP range used by the parent cluster. - The range name must comply with RFC 1035. Specifically, the name must be 1-63 characters long and match the regular expression [a-z]([-a-z0-9]*[a-z0-9])?. - name: 'publicIpAddress' type: String description: | diff --git a/mmv1/products/alloydb/User.yaml b/mmv1/products/alloydb/User.yaml index 8efb6d14a6fb..df3ba9a3dcd3 100644 --- a/mmv1/products/alloydb/User.yaml +++ b/mmv1/products/alloydb/User.yaml @@ -58,7 +58,7 @@ examples: alloydb_user_pass: 'user_secret' network_name: 'alloydb-network' test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1")' + 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1")' ignore_read_extra: - 'password' exclude_docs: true @@ -80,7 +80,7 @@ examples: alloydb_user_name: 'user2@foo.com' network_name: 'alloydb-network' test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1")' + 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1")' exclude_docs: true parameters: - name: 'cluster' diff --git a/mmv1/products/apigee/AddonsConfig.yaml b/mmv1/products/apigee/AddonsConfig.yaml index df67662ba025..7a23d261e204 100644 --- a/mmv1/products/apigee/AddonsConfig.yaml +++ b/mmv1/products/apigee/AddonsConfig.yaml @@ -67,7 +67,7 @@ properties: properties: - name: 'advancedApiOpsConfig' type: NestedObject - description: Configuration for the Advanced API Ops add-on. + description: Configuration for the Monetization add-on. properties: - name: 'enabled' type: Boolean @@ -76,12 +76,12 @@ properties: enabled. - name: 'integrationConfig' type: NestedObject - description: Configuration for the Integration add-on. + description: Configuration for the Monetization add-on. properties: - name: 'enabled' type: Boolean description: - Flag that specifies whether the Integration add-on is + Flag that specifies whether the Advanced API Ops add-on is enabled. - name: 'monetizationConfig' type: NestedObject @@ -90,20 +90,22 @@ properties: - name: 'enabled' type: Boolean description: - Flag that specifies whether the Monetization add-on is enabled. + Flag that specifies whether the Advanced API Ops add-on is + enabled. - name: 'apiSecurityConfig' type: NestedObject - description: Configuration for the API Security add-on. + description: Configuration for the Monetization add-on. properties: - name: 'enabled' type: Boolean description: - Flag that specifies whether the API security add-on is enabled. + Flag that specifies whether the Advanced API Ops add-on is + enabled. - name: 'expiresAt' type: String description: - Time at which the API Security add-on expires in in milliseconds since epoch. - If unspecified, the add-on will never expire. + Flag that specifies whether the Advanced API Ops add-on is + enabled. output: true - name: 'connectorsPlatformConfig' type: NestedObject @@ -112,10 +114,11 @@ properties: - name: 'enabled' type: Boolean description: - Flag that specifies whether the Connectors Platform add-on is enabled. + Flag that specifies whether the Advanced API Ops add-on is + enabled. - name: 'expiresAt' type: String description: - Time at which the Connectors Platform add-on expires in milliseconds since epoch. - If unspecified, the add-on will never expire. + Flag that specifies whether the Advanced API Ops add-on is + enabled. output: true diff --git a/mmv1/products/apigee/ApiProduct.yaml b/mmv1/products/apigee/ApiProduct.yaml deleted file mode 100644 index 9adbeef3ec14..000000000000 --- a/mmv1/products/apigee/ApiProduct.yaml +++ /dev/null @@ -1,423 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: "ApiProduct" -description: | - An `ApiProduct` in Apigee. -references: - guides: - "Creating an API product": "https://cloud.google.com/apigee/docs/api-platform/publish/what-api-product" - api: "https://cloud.google.com/apigee/docs/reference/apis/apigee/rest/v1/organizations.apiproducts#ApiProduct" -docs: -base_url: "{{org_id}}/apiproducts" -self_link: "{{org_id}}/apiproducts/{{name}}" -import_format: - - "{{org_id}}/apiproducts/{{name}}" - - "{{org_id}}/{{name}}" -custom_code: - custom_import: "templates/terraform/custom_import/apigee_api_product.go.tmpl" -examples: - - name: "apigee_api_product_basic" - vars: - instance_name: "my-instance" - product_name: "my-product" - exclude_test: true - - name: "apigee_api_product_basic_test" - primary_resource_id: "apigee_api_product" - test_env_vars: - org_id: "ORG_ID" - billing_account: "BILLING_ACCT" - exclude_docs: true - external_providers: ["time"] - - name: "apigee_api_product_with_legacy_operation" - vars: - instance_name: "my-instance" - product_name: "my-product" - exclude_test: true - - name: "apigee_api_product_with_legacy_operation_test" - primary_resource_id: "apigee_api_product" - test_env_vars: - org_id: "ORG_ID" - billing_account: "BILLING_ACCT" - exclude_docs: true - external_providers: ["time"] - - name: "apigee_api_product_with_attributes" - vars: - instance_name: "my-instance" - product_name: "my-product" - exclude_test: true - - name: "apigee_api_product_with_attributes_test" - primary_resource_id: "apigee_api_product" - test_env_vars: - org_id: "ORG_ID" - billing_account: "BILLING_ACCT" - exclude_docs: true - external_providers: ["time"] -parameters: - - name: "orgId" - type: String - description: | - The Apigee Organization associated with the Apigee API product, - in the format `organizations/{{org_name}}`. - url_param_only: true - required: true - immutable: true - -properties: - - name: "name" - type: String - description: | - Internal name of the API product. - required: true - immutable: true - validation: - regex: '^[a-z][a-z0-9._\-$ %]*$' - - - name: "displayName" - type: String - description: | - Name displayed in the UI or developer portal to developers registering for API access. - required: true - - - name: "description" - type: String - description: | - Description of the API product. Include key information about the API product that is not captured by other fields. - - - name: "approvalType" - type: Enum - description: | - Flag that specifies how API keys are approved to access the APIs defined by the API product. - Valid values are `auto` or `manual`. - enum_values: - - "auto" - - "manual" - - - name: "attributes" - type: Array - is_set: true - description: | - Array of attributes that may be used to extend the default API product profile with customer-specific metadata. You can specify a maximum of 18 attributes. - Use this property to specify the access level of the API product as either public, private, or internal. - item_type: - type: NestedObject - properties: - - name: "name" - type: String - description: | - Key of the attribute. - - name: "value" - type: String - description: | - Value of the attribute. - - - name: "apiResources" - type: Array - is_set: true - description: | - Comma-separated list of API resources to be bundled in the API product. By default, the resource paths are mapped from the proxy.pathsuffix variable. - The proxy path suffix is defined as the URI fragment following the ProxyEndpoint base path. For example, if the apiResources element is defined to be /forecastrss and the base path defined for the API proxy is /weather, then only requests to /weather/forecastrss are permitted by the API product. - item_type: - type: String - - - name: "environments" - type: Array - is_set: true - description: | - Comma-separated list of environment names to which the API product is bound. Requests to environments that are not listed are rejected. - By specifying one or more environments, you can bind the resources listed in the API product to a specific environment, preventing developers from accessing those resources through API proxies deployed in another environment. - item_type: - type: String - - name: "proxies" - type: Array - is_set: true - description: | - Comma-separated list of API proxy names to which this API product is bound. By specifying API proxies, you can associate resources in the API product with specific API proxies, preventing developers from accessing those resources through other API proxies. - Apigee rejects requests to API proxies that are not listed. - item_type: - type: String - - - name: "scopes" - type: Array - description: | - Comma-separated list of OAuth scopes that are validated at runtime. Apigee validates that the scopes in any access token presented match the scopes defined in the OAuth policy associated with the API product. - item_type: - type: String - - - name: "quota" - type: String - description: | - Number of request messages permitted per app by this API product for the specified quotaInterval and quotaTimeUnit. - For example, a quota of 50, for a quotaInterval of 12 and a quotaTimeUnit of hours means 50 requests are allowed every 12 hours. - - - name: "quotaInterval" - type: String - description: | - Time interval over which the number of request messages is calculated. - - - name: "quotaTimeUnit" - type: String - description: | - Time unit defined for the quotaInterval. Valid values include second, minute, hour, day, month or year. - - - name: "createdAt" - type: String - description: | - Response only. Creation time of this environment as milliseconds since epoch. - output: true - - - name: "lastModifiedAt" - type: String - description: | - Response only. Modified time of this environment as milliseconds since epoch. - output: true - - - name: "operationGroup" - type: NestedObject - description: | - Configuration used to group Apigee proxies or remote services with resources, method types, and quotas. The resource refers to the resource URI (excluding the base path). With this grouping, the API product creator is able to fine-tune and give precise control over which REST methods have access to specific resources and how many calls can be made (using the quota setting). - Note: The apiResources setting cannot be specified for both the API product and operation group; otherwise the call will fail. - properties: - - name: "operationConfigs" - type: Array - is_set: true - description: | - Required. List of operation configurations for either Apigee API proxies or other remote services that are associated with this API product. - item_type: - type: NestedObject - properties: - - name: "apiSource" - type: String - description: | - Required. Name of the API proxy or remote service with which the resources, methods, and quota are associated. - - name: "operations" - type: Array - description: | - List of resource/method pairs for the API proxy or remote service to which quota will applied. - Note: Currently, you can specify only a single resource/method pair. The call will fail if more than one resource/method pair is provided. - item_type: - type: NestedObject - properties: - - name: "resource" - type: String - description: | - Required. REST resource path associated with the API proxy or remote service. - - name: "methods" - type: Array - is_set: true - description: | - Methods refers to the REST verbs, when none specified, all verb types are allowed. - item_type: - type: String - - name: "quota" - type: NestedObject - description: | - Quota parameters to be enforced for the resources, methods, and API source combination. If none are specified, quota enforcement will not be done. - properties: - - name: "limit" - type: String - description: | - Required. Upper limit allowed for the time interval and time unit specified. Requests exceeding this limit will be rejected. - - name: "interval" - type: String - description: | - Required. Time interval over which the number of request messages is calculated. - - name: "timeUnit" - type: String - description: | - Time unit defined for the interval. Valid values include second, minute, hour, day, month or year. If limit and interval are valid, the default value is hour; otherwise, the default is null. - - name: "attributes" - type: Array - is_set: true - description: | - Custom attributes associated with the operation. - item_type: - type: NestedObject - properties: - - name: "name" - type: String - description: | - Key of the attribute. - - name: "value" - type: String - description: | - Value of the attribute. - - name: "operationConfigType" - type: Enum - description: | - Flag that specifes whether the configuration is for Apigee API proxy or a remote service. Valid values include proxy or remoteservice. Defaults to proxy. Set to proxy when Apigee API proxies are associated with the API product. Set to remoteservice when non-Apigee proxies like Istio-Envoy are associated with the API product. - enum_values: - - "proxy" - - "remoteservice" - - - name: "graphqlOperationGroup" - type: NestedObject - description: | - Configuration used to group Apigee proxies or remote services with graphQL operation name, graphQL operation type and quotas. This grouping allows us to precisely set quota for a particular combination of graphQL name and operation type for a particular proxy request. If graphQL name is not set, this would imply quota will be applied on all graphQL requests matching the operation type. - properties: - - name: "operationConfigs" - type: Array - is_set: true - description: | - List of graphQL operation configuration details associated with Apigee API proxies or remote services. Remote services are non-Apigee proxies, such as Istio-Envoy. - item_type: - type: NestedObject - properties: - - name: "apiSource" - type: String - description: | - Required. Name of the API proxy endpoint or remote service with which the GraphQL operation and quota are associated. - - name: "operations" - type: Array - is_set: true - description: | - Required. List of GraphQL name/operation type pairs for the proxy or remote service to which quota will be applied. If only operation types are specified, the quota will be applied to all GraphQL requests irrespective of the GraphQL name. - - Note: Currently, you can specify only a single GraphQLOperation. Specifying more than one will cause the operation to fail. - item_type: - type: NestedObject - properties: - - name: "operationTypes" - type: Array - is_set: true - description: | - Required. GraphQL operation types. Valid values include query or mutation. - Note: Apigee does not currently support subscription types. - item_type: - type: String - - name: "operation" - type: String - description: | - GraphQL operation name. The name and operation type will be used to apply quotas. If no name is specified, the quota will be applied to all GraphQL operations irrespective of their operation names in the payload. - - name: "quota" - type: NestedObject - description: | - Quota parameters to be enforced for the resources, methods, and API source combination. If none are specified, quota enforcement will not be done. - properties: - - name: "limit" - type: String - description: | - Required. Upper limit allowed for the time interval and time unit specified. Requests exceeding this limit will be rejected. - - name: "interval" - type: String - description: | - Required. Time interval over which the number of request messages is calculated. - - name: "timeUnit" - type: String - description: | - Time unit defined for the interval. Valid values include second, minute, hour, day, month or year. If limit and interval are valid, the default value is hour; otherwise, the default is null. - - name: "attributes" - type: Array - is_set: true - description: | - Custom attributes associated with the operation. - item_type: - type: NestedObject - properties: - - name: "name" - type: String - description: | - Key of the attribute. - - name: "value" - type: String - description: | - Value of the attribute. - - name: "operationConfigType" - type: Enum - description: | - Flag that specifes whether the configuration is for Apigee API proxy or a remote service. Valid values include proxy or remoteservice. Defaults to proxy. Set to proxy when Apigee API proxies are associated with the API product. Set to remoteservice when non-Apigee proxies like Istio-Envoy are associated with the API product. - enum_values: - - "proxy" - - "remoteservice" - - - name: "grpcOperationGroup" - type: NestedObject - description: | - Optional. Configuration used to group Apigee proxies with gRPC services and method names. This grouping allows us to set quota for a particular proxy with the gRPC service name and method. If a method name is not set, this implies quota and authorization are applied to all gRPC methods implemented by that proxy for that particular gRPC service. - properties: - - name: "operationConfigs" - type: Array - is_set: true - description: | - Required. List of operation configurations for either Apigee API proxies that are associated with this API product. - item_type: - type: NestedObject - properties: - - name: "apiSource" - type: String - description: | - Required. Name of the API proxy with which the gRPC operation and quota are associated. - - name: "methods" - type: Array - is_set: true - description: | - List of unqualified gRPC method names for the proxy to which quota will be applied. If this field is empty, the Quota will apply to all operations on the gRPC service defined on the proxy. - - Example: Given a proxy that is configured to serve com.petstore.PetService, the methods com.petstore.PetService.ListPets and com.petstore.PetService.GetPet would be specified here as simply ["ListPets", "GetPet"]. - - Note: Currently, you can specify only a single GraphQLOperation. Specifying more than one will cause the operation to fail. - item_type: - type: String - - name: "quota" - type: NestedObject - description: | - Quota parameters to be enforced for the resources, methods, and API source combination. If none are specified, quota enforcement will not be done. - properties: - - name: "limit" - type: String - description: | - Required. Upper limit allowed for the time interval and time unit specified. Requests exceeding this limit will be rejected. - - name: "interval" - type: String - description: | - Required. Time interval over which the number of request messages is calculated. - - name: "timeUnit" - type: String - description: | - Time unit defined for the interval. Valid values include second, minute, hour, day, month or year. If limit and interval are valid, the default value is hour; otherwise, the default is null. - - name: "attributes" - type: Array - is_set: true - description: | - Custom attributes associated with the operation. - item_type: - type: NestedObject - properties: - - name: "name" - type: String - description: | - Key of the attribute. - - name: "value" - type: String - description: | - Value of the attribute. - - name: "service" - type: String - description: | - Required. gRPC Service name associated to be associated with the API proxy, on which quota rules can be applied upon. - - - name: "quotaCounterScope" - type: Enum - description: | - Scope of the quota decides how the quota counter gets applied and evaluate for quota violation. If the Scope is set as PROXY, then all the operations defined for the APIproduct that are associated with the same proxy will share the same quota counter set at the APIproduct level, making it a global counter at a proxy level. If the Scope is set as OPERATION, then each operations get the counter set at the API product dedicated, making it a local counter. Note that, the QuotaCounterScope applies only when an operation does not have dedicated quota set for itself. - enum_values: - - "QUOTA_COUNTER_SCOPE_UNSPECIFIED" - - "PROXY" - - "OPERATION" - - - name: "space" - type: String - immutable: true - description: | - Optional. The resource ID of the parent Space. If not set, the parent resource will be the Organization. diff --git a/mmv1/products/apigee/EnvgroupAttachment.yaml b/mmv1/products/apigee/EnvgroupAttachment.yaml index ac75d8abae38..3d659175fd38 100644 --- a/mmv1/products/apigee/EnvgroupAttachment.yaml +++ b/mmv1/products/apigee/EnvgroupAttachment.yaml @@ -52,9 +52,6 @@ examples: project_id: 'my-project' envgroup_name: 'my-envgroup' environment_name: 'my-environment' - test_env_vars: - org_id: 'ORG_ID' - billing_account: 'BILLING_ACCT' exclude_test: true - name: 'apigee_environment_group_attachment_basic_test' primary_resource_id: 'apigee_environment_group_attachment' diff --git a/mmv1/products/apigee/Environment.yaml b/mmv1/products/apigee/Environment.yaml index 37e8de50d76f..e9d6564627e5 100644 --- a/mmv1/products/apigee/Environment.yaml +++ b/mmv1/products/apigee/Environment.yaml @@ -96,16 +96,6 @@ examples: # Resource creation race skip_vcr: true external_providers: ["time"] - - name: 'apigee_environment_client_ip_resolution_config_test' - primary_resource_id: 'apigee_environment' - primary_resource_name: 'fmt.Sprintf("organizations/tf-test%s", context["random_suffix"]), fmt.Sprintf("tf-test%s", context["random_suffix"])' - test_env_vars: - org_id: 'ORG_ID' - billing_account: 'BILLING_ACCT' - exclude_docs: true - # Resource creation race - skip_vcr: true - external_providers: ["time"] parameters: - name: 'orgId' type: String @@ -223,25 +213,3 @@ properties: type: String description: | The property value. - - name: 'clientIpResolutionConfig' - type: NestedObject - description: | - The algorithm to resolve IP. This will affect Analytics, API Security, and other features that use the client ip. To remove a client ip resolution config, update the field to an empty value. Example: '{ "clientIpResolutionConfig" = {} }' For more information, see: https://cloud.google.com/apigee/docs/api-platform/system-administration/client-ip-resolution - properties: - - name: 'headerIndexAlgorithm' - type: NestedObject - description: | - Resolves the client ip based on a custom header. - exactly_one_of: - - client_ip_resolution_config.0.header_index_algorithm - properties: - - name: ipHeaderName - type: String - description: | - The name of the header to extract the client ip from. We are currently only supporting the X-Forwarded-For header. - required: true - - name: ipHeaderIndex - type: Integer - description: | - The index of the ip in the header. Positive indices 0, 1, 2, 3 chooses indices from the left (first ips). Negative indices -1, -2, -3 chooses indices from the right (last ips). - required: true diff --git a/mmv1/products/apigee/Instance.yaml b/mmv1/products/apigee/Instance.yaml index 219d6223d8f9..bc671891a21c 100644 --- a/mmv1/products/apigee/Instance.yaml +++ b/mmv1/products/apigee/Instance.yaml @@ -47,7 +47,6 @@ custom_code: error_retry_predicates: - 'transport_tpg.IsApigeeRetryableError' exclude_sweeper: true -include_in_tgc_next_DO_NOT_USE: true examples: - name: 'apigee_instance_basic' vars: @@ -205,24 +204,3 @@ properties: the format: projects/*/regions/*/serviceAttachments/* Apigee customers can privately forward traffic to this service attachment using the PSC endpoints. output: true - - name: 'accessLoggingConfig' - type: NestedObject - immutable: true - description: | - Access logging configuration enables the access logging feature at the instance. - Apigee customers can enable access logging to ship the access logs to their own project's cloud logging. - properties: - - name: 'enabled' - type: Boolean - required: true - immutable: true - description: | - Boolean flag that specifies whether the customer access log feature is enabled. - - name: 'filter' - type: String - immutable: true - description: | - Ship the access log entries that match the statusCode defined in the filter. - The statusCode is the only expected/supported filter field. (Ex: statusCode) - The filter will parse it to the Common Expression Language semantics for expression - evaluation to build the filter condition. (Ex: "filter": statusCode >= 200 && statusCode < 300 ) diff --git a/mmv1/products/apigee/InstanceAttachment.yaml b/mmv1/products/apigee/InstanceAttachment.yaml index 942b765976ea..1ae86f456e68 100644 --- a/mmv1/products/apigee/InstanceAttachment.yaml +++ b/mmv1/products/apigee/InstanceAttachment.yaml @@ -51,9 +51,6 @@ examples: project_id: 'my-project' instance_name: 'my-instance-name' environment_name: 'my-environment-name' - test_env_vars: - org_id: 'ORG_ID' - billing_account: 'BILLING_ACCT' exclude_test: true # This is a more verbose version of the above that creates all # the resources needed for the acceptance test. diff --git a/mmv1/products/apigee/SecurityAction.yaml b/mmv1/products/apigee/SecurityAction.yaml deleted file mode 100644 index c4bb992c1713..000000000000 --- a/mmv1/products/apigee/SecurityAction.yaml +++ /dev/null @@ -1,246 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'SecurityAction' -description: | - A SecurityAction is rule that can be enforced at an environment level. - The result is one of: - A denied API call - An explicitly allowed API call - - A flagged API call (HTTP headers added before the target receives it) - At least one condition is required to create a SecurityAction. -references: - guides: - 'Creating security actions': 'https://cloud.google.com/apigee/docs/api-security/security-actions-api#create-security-actions' - api: 'https://cloud.google.com/apigee/docs/reference/apis/apigee/rest/v1/organizations.environments.securityActions/create' -docs: -base_url: 'organizations/{{org_id}}/environments/{{env_id}}/securityActions' -self_link: 'organizations/{{org_id}}/environments/{{env_id}}/securityActions/{{security_action_id}}' -create_url: 'organizations/{{org_id}}/environments/{{env_id}}/securityActions?securityActionId={{security_action_id}}' -immutable: true -import_format: - - 'organizations/{{org_id}}/environments/{{env_id}}/securityActions/{{security_action_id}}' -examples: - - name: 'apigee_security_action_basic' - vars: - network_name: 'my-network' - global_address_name: 'my-address' - environment_name: 'my-environment' - security_action_id: 'my-security-action' - primary_resource_id: 'apigee_security_action' - test_env_vars: - org_id: 'ORG_ID' - billing_account: 'BILLING_ACCT' - exclude_test: true -parameters: - - name: 'orgId' - type: String - description: | - The organization that this security action applies to. - url_param_only: true - required: true - immutable: true - - name: 'envId' - type: String - description: | - The Apigee environment that this security action applies to. - required: true - immutable: true - url_param_only: true - - name: 'securityActionId' - type: String - description: | - The ID to use for the SecurityAction, which will become the final component of the action's resource name. - This value should be 0-61 characters, and valid format is (^a-z?$). - required: true - immutable: true - url_param_only: true -properties: - - name: 'description' - type: String - description: | - An optional user provided description of the SecurityAction. - - name: 'state' - type: Enum - description: | - Only an ENABLED SecurityAction is enforced. An ENABLED SecurityAction past its expiration time will not be enforced. - required: true - enum_values: - - 'ENABLED' - - 'DISABLED' - - name: 'createTime' - type: String - description: | - The create time for this SecurityAction. - Uses RFC 3339, where generated output will always be Z-normalized and uses 0, 3, 6 or 9 fractional digits. - Offsets other than "Z" are also accepted. Examples: "2014-10-02T15:01:23Z", "2014-10-02T15:01:23.045123456Z" or "2014-10-02T15:01:23+05:30". - output: true - - name: 'updateTime' - type: String - description: | - The update time for this SecurityAction. This reflects when this SecurityAction changed states. - Uses RFC 3339, where generated output will always be Z-normalized and uses 0, 3, 6 or 9 fractional digits. - Offsets other than "Z" are also accepted. Examples: "2014-10-02T15:01:23Z", "2014-10-02T15:01:23.045123456Z" or "2014-10-02T15:01:23+05:30". - output: true - - name: 'apiProxies' - type: Array - description: | - If unset, this would apply to all proxies in the environment. - If set, this action is enforced only if at least one proxy in the repeated - list is deployed at the time of enforcement. If set, several restrictions are enforced on SecurityActions. - There can be at most 100 enabled actions with proxies set in an env. - Several other restrictions apply on conditions and are detailed later. - item_type: - type: String - - name: 'conditionConfig' - type: NestedObject - required: true - description: | - A valid SecurityAction must contain at least one condition. - properties: - - name: 'ipAddressRanges' - type: Array - description: | - A list of IP addresses. This could be either IPv4 or IPv6. Limited to 100 per action. - item_type: - type: String - - name: 'botReasons' - type: Array - description: | - A list of Bot Reasons. Current options: Flooder, Brute Guessor, Static Content Scraper, - OAuth Abuser, Robot Abuser, TorListRule, Advanced Anomaly Detection, Advanced API Scraper, - Search Engine Crawlers, Public Clouds, Public Cloud AWS, Public Cloud Azure, and Public Cloud Google. - item_type: - type: String - - name: 'httpMethods' - type: Array - description: | - Act only on particular HTTP methods. E.g. A read-only API can block POST/PUT/DELETE methods. - Accepted values are: GET, HEAD, POST, PUT, DELETE, CONNECT, OPTIONS, TRACE and PATCH. - item_type: - type: String - - name: 'apiKeys' - type: Array - description: | - A list of API keys. Limit 1000 per action. - item_type: - type: String - - name: 'accessTokens' - type: Array - description: | - A list of accessTokens. Limit 1000 per action. - item_type: - type: String - - name: 'apiProducts' - type: Array - description: | - A list of API Products. Limit 1000 per action. - item_type: - type: String - - name: 'developerApps' - type: Array - description: | - A list of developer apps. Limit 1000 per action. - item_type: - type: String - - name: 'developers' - type: Array - description: | - A list of developers. Limit 1000 per action. - item_type: - type: String - - name: 'userAgents' - type: Array - description: | - A list of user agents to deny. We look for exact matches. Limit 50 per action. - item_type: - type: String - - name: 'regionCodes' - type: Array - description: | - A list of countries/region codes to act on, e.g. US. This follows https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2. - item_type: - type: String - - name: 'asns' - type: Array - description: | - A list of ASN numbers to act on, e.g. 23. https://en.wikipedia.org/wiki/Autonomous_system_(Internet) - This uses int64 instead of uint32 because of https://linter.aip.dev/141/forbidden-types. - item_type: - type: String - - name: 'allow' - type: NestedObject - description: | - Allow a request through if it matches this SecurityAction. - exactly_one_of: - - 'allow' - - 'deny' - - 'flag' - # empty object with no properties, see: https://cloud.google.com/apigee/docs/reference/apis/apigee/rest/v1/organizations.environments.securityActions#Allow - allow_empty_object: true - send_empty_value: true - properties: [] - - name: 'deny' - type: NestedObject - description: | - Deny a request through if it matches this SecurityAction. - exactly_one_of: - - 'allow' - - 'deny' - - 'flag' - properties: - - name: 'responseCode' - type: Integer - description: | - The HTTP response code if the Action = DENY. - - name: 'flag' - type: NestedObject - description: | - Flag a request through if it matches this SecurityAction. - exactly_one_of: - - 'allow' - - 'deny' - - 'flag' - properties: - - name: 'headers' - type: Array - description: | - A list of HTTP headers to be sent to the target in case of a FLAG SecurityAction. - Limit 5 headers per SecurityAction. - At least one is mandatory. - item_type: - type: NestedObject - properties: - - name: 'name' - type: String - description: | - The header name to be sent to the target. - - name: 'value' - type: String - description: | - The header value to be sent to the target. - - name: 'expireTime' - type: String - description: | - The expiration for this SecurityAction. - Uses RFC 3339, where generated output will always be Z-normalized and uses 0, 3, 6 or 9 - fractional digits. Offsets other than "Z" are also accepted. - Examples: "2014-10-02T15:01:23Z", "2014-10-02T15:01:23.045123456Z" or "2014-10-02T15:01:23+05:30". - conflicts: - - 'ttl' - - name: 'ttl' - type: String - description: | - The TTL for this SecurityAction. - A duration in seconds with up to nine fractional digits, ending with 's'. Example: "3.5s". - conflicts: - - 'expireTime' diff --git a/mmv1/products/apigee/SecurityMonitoringCondition.yaml b/mmv1/products/apigee/SecurityMonitoringCondition.yaml deleted file mode 100644 index 6960f7be563f..000000000000 --- a/mmv1/products/apigee/SecurityMonitoringCondition.yaml +++ /dev/null @@ -1,132 +0,0 @@ -# Copyright 2024 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'SecurityMonitoringCondition' -description: | - Security monitoring condition for risk assessment version 2 in Apigee. -references: - guides: - 'Creating a security monitoring condition': 'https://cloud.google.com/apigee/docs/api-security/security-scores#monitoring-conditions' - api: 'https://cloud.google.com/apigee/docs/reference/apis/apigee/rest/v1/organizations.securityMonitoringConditions/create' -docs: -base_url: '{{org_id}}/securityMonitoringConditions' -self_link: '{{org_id}}/securityMonitoringConditions/{{condition_id}}' -create_url: '{{org_id}}/securityMonitoringConditions?security_monitoring_condition_id={{condition_id}}' -update_verb: 'PATCH' -delete_url: "{{org_id}}/securityMonitoringConditions/{{condition_id}}" -immutable: false -import_format: - - '{{org_id}}/securityMonitoringConditions/{{condition_id}}' - - '{{org_id}}/{{condition_id}}' -custom_code: - custom_import: "templates/terraform/custom_import/apigee_security_monitoring_condition.go.tmpl" -examples: - - name: 'apigee_security_monitoring_condition_basic' - vars: - environment_name: 'my-environment' - security_monitoring_condition_id: 'my-condition' - exclude_test: true - - name: 'apigee_security_monitoring_condition_basic_test' - primary_resource_id: 'security_monitoring_condition' - test_env_vars: - org_id: 'ORG_ID' - billing_account: 'BILLING_ACCT' - exclude_docs: true - skip_vcr: true - external_providers: ["time"] -parameters: - - name: 'orgId' - type: String - description: | - The Apigee Organization associated with the Apigee Security Monitoring Condition, - in the format `organizations/{{org_name}}`. - url_param_only: true - required: true - immutable: true - - name: 'conditionId' - type: String - description: | - Resource ID of the security monitoring condition. - immutable: true - url_param_only: true - required: true -properties: - - name: 'name' - type: String - description: | - Name of the security monitoring condition resource, - in the format `organizations/{{org_name}}/securityMonitoringConditions/{{condition_id}}`. - output: true - - name: 'profile' - type: String - description: | - ID of security profile of the security monitoring condition. - required: true - - name: 'scope' - type: String - description: | - ID of security profile of the security monitoring condition. - required: true - - name: 'includeAllResources' - type: NestedObject - exactly_one_of: - - include_all_resources - # TODO: hashicorp/terraform-provider-google#22581 add this block back + test once deployment is supported - # - include - properties: [] - send_empty_value: true - allow_empty_object: true - # TODO: hashicorp/terraform-provider-google#22581 add this block back + test once deployment is supported - # - name: 'include' - # type: NestedObject - # properties: - # - name: 'resources' - # type: Array - # required: true - # item_type: - # type: NestedObject - # properties: - # - name: 'type' - # type: Enum - # description: 'Type of this resource' - # enum_values: - # - 'API_PROXY' - # required: true - # - name: 'name' - # type: String - # description: 'Name of this resource' - # required: true - # exactly_one_of: - # - include_all_resources - # - include - - name: 'createTime' - type: String - description: | - The timestamp at which this profile was created. - output: true - - name: 'updateTime' - type: String - description: | - The timestamp at which this profile was most recently updated. - output: true - - name: 'totalMonitoredResources' - type: Integer - description: | - Total number of monitored resources within this condition. - output: true - - name: 'totalDeployedResources' - type: Integer - description: | - Total number of deployed resources within scope. - output: true diff --git a/mmv1/products/apigee/TargetServer.yaml b/mmv1/products/apigee/TargetServer.yaml index 7b5f0d779136..3e16dc8eeead 100644 --- a/mmv1/products/apigee/TargetServer.yaml +++ b/mmv1/products/apigee/TargetServer.yaml @@ -142,10 +142,6 @@ properties: type: Boolean description: | Indicates whether the cert should be matched against as a wildcard cert. - - name: 'enforce' - type: Boolean - description: | - If true, TLS is strictly enforced. - name: 'protocol' type: Enum diff --git a/mmv1/products/apihub/Curation.yaml b/mmv1/products/apihub/Curation.yaml deleted file mode 100644 index be9e5c5ca9e2..000000000000 --- a/mmv1/products/apihub/Curation.yaml +++ /dev/null @@ -1,165 +0,0 @@ -# Copyright 2024 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: Curation -description: Description -base_url: projects/{{project}}/locations/{{location}}/curations -update_mask: true -self_link: projects/{{project}}/locations/{{location}}/curations/{{curation_id}} -create_url: projects/{{project}}/locations/{{location}}/curations?curationId={{curation_id}} -update_verb: PATCH -id_format: projects/{{project}}/locations/{{location}}/curations/{{curation_id}} -import_format: - - projects/{{project}}/locations/{{location}}/curations/{{curation_id}} -autogen_async: true -autogen_status: Q3VyYXRpb24= -examples: - - name: apihub_curation_basic - primary_resource_id: apihub_curation_basic - vars: - curation_id: 'test' - # API hub instance needs to be created before this, and end to end creation of that resource using Terraform is not yet supported. - exclude_test: true - external_providers: ["time"] -parameters: - - name: location - type: String - description: Resource ID segment making up resource `name`. It identifies the resource within its parent collection as described in https://google.aip.dev/122. - immutable: true - url_param_only: true - required: true - - name: curationId - type: String - description: |- - The ID to use for the curation resource, which will become the final - component of the curations's resource name. This field is optional. - - * If provided, the same will be used. The service will throw an error if - the specified ID is already used by another curation resource in the API - hub. - * If not provided, a system generated ID will be used. - - This value should be 4-500 characters, and valid characters - are /a-z[0-9]-_/. - immutable: true - url_param_only: true - required: true -properties: - - name: name - type: String - description: |- - Identifier. The name of the curation. - - Format: - `projects/{project}/locations/{location}/curations/{curation}` - output: true - - name: displayName - type: String - description: The display name of the curation. - required: true - - name: endpoint - type: NestedObject - description: |- - The endpoint to be triggered for curation. - The endpoint will be invoked with a request payload containing - ApiMetadata. - Response should contain curated data in the form of - ApiMetadata. - required: true - immutable: true - properties: - - name: applicationIntegrationEndpointDetails - type: NestedObject - description: |- - The details of the Application Integration endpoint to be triggered for - curation. - required: true - immutable: true - properties: - - name: triggerId - type: String - description: The API trigger ID of the Application Integration workflow. - required: true - immutable: true - - name: uri - type: String - description: |- - The endpoint URI should be a valid REST URI for triggering an Application - Integration. - Format: - `https://integrations.googleapis.com/v1/{name=projects/*/locations/*/integrations/*}:execute` - or - `https://{location}-integrations.googleapis.com/v1/{name=projects/*/locations/*/integrations/*}:execute` - required: true - immutable: true - - name: lastExecutionState - type: String - description: |- - The last execution state of the curation. - Possible values: - LAST_EXECUTION_STATE_UNSPECIFIED - SUCCEEDED - FAILED - output: true - - name: lastExecutionErrorMessage - type: String - description: |- - Error message describing the failure, if any, during the last execution of - the curation. - output: true - - name: description - type: String - description: The description of the curation. - - name: pluginInstanceActions - type: Array - description: |- - The plugin instances and associated actions that are using the curation. - Note: A particular curation could be used by multiple plugin instances or - multiple actions in a plugin instance. - output: true - item_type: - type: NestedObject - properties: - - name: pluginInstance - type: String - description: |- - Plugin instance that is using the curation. - Format is - `projects/{project}/locations/{locati on}/plugins/{plugin}/instances/{instance}` - output: true - - name: actionId - type: String - description: |- - The action ID that is using the curation. - This should map to one of the action IDs specified - in action configs in the plugin. - output: true - - name: lastExecutionErrorCode - type: String - description: |- - The error code of the last execution of the curation. The error code is - populated only when the last execution state is failed. - Possible values: - ERROR_CODE_UNSPECIFIED - INTERNAL_ERROR - UNAUTHORIZED - output: true - - name: createTime - type: String - description: The time at which the curation was created. - output: true - - name: updateTime - type: String - description: The time at which the curation was last updated. - output: true diff --git a/mmv1/products/apihub/Plugin.yaml b/mmv1/products/apihub/Plugin.yaml deleted file mode 100644 index e6b7e9bc1245..000000000000 --- a/mmv1/products/apihub/Plugin.yaml +++ /dev/null @@ -1,261 +0,0 @@ -# Copyright 2024 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: Plugin -description: A plugin resource in the API hub. -base_url: projects/{{project}}/locations/{{location}}/plugins -immutable: true -self_link: projects/{{project}}/locations/{{location}}/plugins/{{plugin_id}} -create_url: projects/{{project}}/locations/{{location}}/plugins?pluginId={{plugin_id}} -id_format: projects/{{project}}/locations/{{location}}/plugins/{{plugin_id}} -import_format: - - projects/{{project}}/locations/{{location}}/plugins/{{plugin_id}} -examples: - - name: apihub_plugin_full - primary_resource_id: apihub_plugin_full - vars: - plugin_id: 'plugin-full' - # API hub instance needs to be created before this, and end to end creation of that resource using Terraform is not yet supported. - exclude_test: true - external_providers: ["time"] -autogen_async: true -autogen_status: UGx1Z2lu -parameters: - - name: location - type: String - description: Resource ID segment making up resource `name`. It identifies the resource within its parent collection as described in https://google.aip.dev/122. - immutable: true - url_param_only: true - required: true - - name: pluginId - type: String - description: |- - The ID to use for the Plugin resource, which will become the final - component of the Plugin's resource name. This field is optional. - - * If provided, the same will be used. The service will throw an error if - the specified id is already used by another Plugin resource in the API hub - instance. - * If not provided, a system generated id will be used. - - This value should be 4-63 characters, overall resource name which will be - of format - `projects/{project}/locations/{location}/plugins/{plugin}`, - its length is limited to 1000 characters and valid characters are - /a-z[0-9]-_/. - immutable: true - url_param_only: true - required: true -properties: - - name: description - type: String - description: |- - The plugin description. Max length is 2000 characters (Unicode code - points). - - name: state - type: String - description: |- - Represents the state of the plugin. - Note this field will not be set for plugins developed via plugin - framework as the state will be managed at plugin instance level. - Possible values: - STATE_UNSPECIFIED - ENABLED - DISABLED - output: true - - name: ownershipType - type: String - description: |- - The type of the plugin, indicating whether it is 'SYSTEM_OWNED' or - 'USER_OWNED'. - Possible values: - OWNERSHIP_TYPE_UNSPECIFIED - SYSTEM_OWNED - USER_OWNED - output: true - - name: actionsConfig - type: Array - description: The configuration of actions supported by the plugin. - item_type: - type: NestedObject - properties: - - name: id - type: String - description: The id of the action. - required: true - - name: displayName - type: String - description: The display name of the action. - required: true - - name: description - type: String - description: The description of the operation performed by the action. - required: true - - name: triggerMode - type: String - description: |- - The trigger mode supported by the action. - Possible values: - TRIGGER_MODE_UNSPECIFIED - API_HUB_ON_DEMAND_TRIGGER - API_HUB_SCHEDULE_TRIGGER - NON_API_HUB_MANAGED - required: true - - name: documentation - type: NestedObject - description: Documentation details. - properties: - - name: externalUri - type: String - description: The uri of the externally hosted documentation. - - name: pluginCategory - type: String - description: |2- - - Possible values: - PLUGIN_CATEGORY_UNSPECIFIED - API_GATEWAY - API_PRODUCER - - name: configTemplate - type: NestedObject - description: ConfigTemplate represents the configuration template for a plugin. - properties: - - name: authConfigTemplate - type: NestedObject - description: AuthConfigTemplate represents the authentication template for a plugin. - properties: - - name: supportedAuthTypes - type: Array - description: The list of authentication types supported by the plugin. - required: true - item_type: - type: String - - name: serviceAccount - type: NestedObject - description: Config for Google service account authentication. - properties: - - name: serviceAccount - type: String - description: |- - The service account to be used for authenticating request. - - The `iam.serviceAccounts.getAccessToken` permission should be granted on - this service account to the impersonator service account. - required: true - - name: additionalConfigTemplate - type: Array - description: |- - The list of additional configuration variables for the plugin's - configuration. - item_type: - type: NestedObject - properties: - - name: required - type: Boolean - description: |- - Flag represents that this `ConfigVariable` must be provided for a - PluginInstance. - - name: enumOptions - type: Array - description: Enum options. To be populated if `ValueType` is `ENUM`. - item_type: - type: NestedObject - properties: - - name: id - type: String - description: Id of the option. - required: true - - name: displayName - type: String - description: Display name of the option. - required: true - - name: description - type: String - description: Description of the option. - - name: multiSelectOptions - type: Array - description: Multi select options. To be populated if `ValueType` is `MULTI_SELECT`. - item_type: - type: NestedObject - properties: - - name: id - type: String - description: Id of the option. - required: true - - name: displayName - type: String - description: Display name of the option. - required: true - - name: description - type: String - description: Description of the option. - - name: id - type: String - description: ID of the config variable. Must be unique within the configuration. - required: true - - name: valueType - type: String - description: |- - Type of the parameter: string, int, bool etc. - Possible values: - VALUE_TYPE_UNSPECIFIED - STRING - INT - BOOL - SECRET - ENUM - MULTI_SELECT - MULTI_STRING - MULTI_INT - required: true - - name: description - type: String - description: Description. - - name: validationRegex - type: String - description: |- - Regular expression in RE2 syntax used for validating the `value` of a - `ConfigVariable`. - - name: name - type: String - description: |- - Identifier. The name of the plugin. - Format: `projects/{project}/locations/{location}/plugins/{plugin}` - output: true - - name: displayName - type: String - description: |- - The display name of the plugin. Max length is 50 characters (Unicode code - points). - required: true - - name: hostingService - type: NestedObject - description: |- - The information related to the service implemented by the plugin - developer, used to invoke the plugin's functionality. - properties: - - name: serviceUri - type: String - description: |- - The URI of the service implemented by the plugin developer, used to - invoke the plugin's functionality. This information is only required for - user defined plugins. - - name: createTime - type: String - description: Timestamp indicating when the plugin was created. - output: true - - name: updateTime - type: String - description: Timestamp indicating when the plugin was last updated. - output: true diff --git a/mmv1/products/apihub/PluginInstance.yaml b/mmv1/products/apihub/PluginInstance.yaml deleted file mode 100644 index 0f1c823ca9f0..000000000000 --- a/mmv1/products/apihub/PluginInstance.yaml +++ /dev/null @@ -1,332 +0,0 @@ -# Copyright 2024 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: PluginInstance -description: Description -base_url: projects/{{project}}/locations/{{location}}/plugins/{{plugin}}/instances -immutable: true -self_link: projects/{{project}}/locations/{{location}}/plugins/{{plugin}}/instances/{{plugin_instance_id}} -create_url: projects/{{project}}/locations/{{location}}/plugins/{{plugin}}/instances?pluginInstanceId={{plugin_instance_id}} -id_format: projects/{{project}}/locations/{{location}}/plugins/{{plugin}}/instances/{{plugin_instance_id}} -import_format: - - projects/{{project}}/locations/{{location}}/plugins/{{plugin}}/instances/{{plugin_instance_id}} -examples: - - name: apihub_plugin_instance_basic - primary_resource_id: apihub_plugin_instance_basic - vars: - curation_id: 'test' - # API hub instance needs to be created before this, and end to end creation of that resource using Terraform is not yet supported. - exclude_test: true - external_providers: ["time"] -autogen_async: true -async: - operation: - timeouts: - insert_minutes: 20 - update_minutes: 20 - delete_minutes: 20 - base_url: '{{op_id}}' - actions: - - create - - delete - - update - type: OpAsync - result: - resource_inside_response: true - include_project: false -autogen_status: UGx1Z2luSW5zdGFuY2U= -parameters: - - name: location - type: String - description: Resource ID segment making up resource `name`. It identifies the resource within its parent collection as described in https://google.aip.dev/122. - immutable: true - url_param_only: true - required: true - - name: plugin - type: String - description: Resource ID segment making up resource `name`. It identifies the resource within its parent collection as described in https://google.aip.dev/122. - immutable: true - url_param_only: true - required: true - - name: pluginInstanceId - type: String - description: |- - The ID to use for the plugin instance, which will become the final - component of the plugin instance's resource name. This field is optional. - - * If provided, the same will be used. The service will throw an error if - the specified id is already used by another plugin instance in the plugin - resource. - * If not provided, a system generated id will be used. - - This value should be 4-63 characters, and valid characters - are /a-z[0-9]-_/. - immutable: true - url_param_only: true - required: true -properties: - - name: name - type: String - description: |- - Identifier. The unique name of the plugin instance resource. - Format: - `projects/{project}/locations/{location}/plugins/{plugin}/instances/{instance}` - output: true - - name: disable - type: Boolean - default_value: false - update_url: projects/{{project}}/locations/{{location}}/plugins/{{plugin}}/instances/{{plugin_instance_id}}:enableAction - description: The display name for this plugin instance. Max length is 255 characters. - - name: authConfig - type: NestedObject - description: AuthConfig represents the authentication information. - properties: - - name: googleServiceAccountConfig - type: NestedObject - description: Config for Google service account authentication. - properties: - - name: serviceAccount - type: String - description: |- - The service account to be used for authenticating request. - - The `iam.serviceAccounts.getAccessToken` permission should be granted on - this service account to the impersonator service account. - required: true - - name: userPasswordConfig - type: NestedObject - description: Parameters to support Username and Password Authentication. - properties: - - name: username - type: String - description: Username. - required: true - - name: password - type: NestedObject - description: Secret provides a reference to entries in Secret Manager. - required: true - properties: - - name: secretVersion - type: String - description: |- - The resource name of the secret version in the format, - format as: `projects/*/secrets/*/versions/*`. - required: true - - name: apiKeyConfig - type: NestedObject - description: Config for authentication with API key. - properties: - - name: name - type: String - description: |- - The parameter name of the API key. - E.g. If the API request is "https://example.com/act?api_key=", - "api_key" would be the parameter name. - required: true - - name: apiKey - type: NestedObject - description: Secret provides a reference to entries in Secret Manager. - required: true - properties: - - name: secretVersion - type: String - description: |- - The resource name of the secret version in the format, - format as: `projects/*/secrets/*/versions/*`. - required: true - - name: httpElementLocation - type: String - description: |- - The location of the API key. - The default value is QUERY. - Possible values: - HTTP_ELEMENT_LOCATION_UNSPECIFIED - QUERY - HEADER - PATH - BODY - COOKIE - required: true - - name: oauth2ClientCredentialsConfig - type: NestedObject - description: |- - Parameters to support Oauth 2.0 client credentials grant authentication. - See https://tools.ietf.org/html/rfc6749#section-1.3.4 for more details. - properties: - - name: clientSecret - type: NestedObject - description: Secret provides a reference to entries in Secret Manager. - required: true - properties: - - name: secretVersion - type: String - description: |- - The resource name of the secret version in the format, - format as: `projects/*/secrets/*/versions/*`. - required: true - - name: clientId - type: String - description: The client identifier. - required: true - - name: authType - type: String - description: |2- - - Possible values: - AUTH_TYPE_UNSPECIFIED - NO_AUTH - GOOGLE_SERVICE_ACCOUNT - USER_PASSWORD - API_KEY - OAUTH2_CLIENT_CREDENTIALS - required: true - - name: state - type: String - description: |- - The current state of the plugin instance (e.g., enabled, disabled, - provisioning). - Possible values: - STATE_UNSPECIFIED - CREATING - ACTIVE - APPLYING_CONFIG - ERROR - FAILED - DELETING - output: true - - name: createTime - type: String - description: Timestamp indicating when the plugin instance was created. - output: true - - name: updateTime - type: String - description: Timestamp indicating when the plugin instance was last updated. - output: true - - name: displayName - type: String - description: The display name for this plugin instance. Max length is 255 characters. - required: true - - name: errorMessage - type: String - description: |- - Error message describing the failure, if any, during Create, Delete or - ApplyConfig operation corresponding to the plugin instance.This field will - only be populated if the plugin instance is in the ERROR or FAILED state. - output: true - - name: actions - type: Array - description: The action status for the plugin instance. - required: true - default_from_api: true - item_type: - type: NestedObject - properties: - - name: hubInstanceAction - type: NestedObject - output: true - description: The execution status for the plugin instance. - properties: - - name: currentExecutionState - type: String - description: |- - The current state of the execution. - Possible values: - CURRENT_EXECUTION_STATE_UNSPECIFIED - RUNNING - NOT_RUNNING - output: true - - name: lastExecution - type: NestedObject - description: The result of the last execution of the plugin instance. - output: true - properties: - - name: result - type: String - description: |- - The result of the last execution of the plugin instance. - Possible values: - RESULT_UNSPECIFIED - SUCCEEDED - FAILED - output: true - - name: errorMessage - type: String - description: |- - Error message describing the failure, if any, during the last - execution. - output: true - - name: startTime - type: String - description: The last execution start time of the plugin instance. - output: true - - name: endTime - type: String - description: The last execution end time of the plugin instance. - output: true - - name: actionId - type: String - description: |- - This should map to one of the action id specified - in actions_config in the plugin. - required: true - - name: state - type: String - description: |- - The current state of the plugin action in the plugin instance. - Possible values: - STATE_UNSPECIFIED - ENABLED - DISABLED - ENABLING - DISABLING - ERROR - output: true - - name: scheduleCronExpression - type: String - default_from_api: true - description: |- - The schedule for this plugin instance action. This can only be set if the - plugin supports API_HUB_SCHEDULE_TRIGGER mode for this action. - - name: curationConfig - type: NestedObject - default_from_api: true - description: The curation information for this plugin instance. - properties: - - name: customCuration - type: NestedObject - description: Custom curation information for this plugin instance. - properties: - - name: curation - type: String - description: |- - The unique name of the curation resource. This will be the name of the - curation resource in the format: - `projects/{project}/locations/{location}/curations/{curation}` - required: true - - name: curationType - type: String - default_from_api: true - description: |2- - - Possible values: - CURATION_TYPE_UNSPECIFIED - DEFAULT_CURATION_FOR_API_METADATA - CUSTOM_CURATION_FOR_API_METADATA - required: true - - name: scheduleTimeZone - type: String - default_from_api: true - description: |- - The time zone for the schedule cron expression. If not provided, UTC will - be used. diff --git a/mmv1/products/appengine/FlexibleAppVersion.yaml b/mmv1/products/appengine/FlexibleAppVersion.yaml index 28c28cf7b31a..07f1cacf6a04 100644 --- a/mmv1/products/appengine/FlexibleAppVersion.yaml +++ b/mmv1/products/appengine/FlexibleAppVersion.yaml @@ -322,7 +322,7 @@ properties: - 'REDIRECT_HTTP_RESPONSE_CODE_307' - name: 'script' type: NestedObject - # TODO: Exactly one of script, staticFiles, or apiEndpoint must be set + # TODO (mbang): Exactly one of script, staticFiles, or apiEndpoint must be set description: | Executes a script to handle the requests that match this URL pattern. Only the auto value is supported for Node.js in the App Engine standard environment, for example "script:" "auto". @@ -334,7 +334,7 @@ properties: required: true - name: 'staticFiles' type: NestedObject - # TODO: Exactly one of script, staticFiles, or apiEndpoint must be set + # TODO (mbang): Exactly one of script, staticFiles, or apiEndpoint must be set description: | Files served directly to the user for a given URL, such as images, CSS stylesheets, or JavaScript source files. Static file handlers describe which files in the application directory are static files, and which URLs serve them. diff --git a/mmv1/products/appengine/StandardAppVersion.yaml b/mmv1/products/appengine/StandardAppVersion.yaml index d3d740b1e6c9..c4a8a296c0e8 100644 --- a/mmv1/products/appengine/StandardAppVersion.yaml +++ b/mmv1/products/appengine/StandardAppVersion.yaml @@ -183,7 +183,7 @@ properties: - 'REDIRECT_HTTP_RESPONSE_CODE_307' - name: 'script' type: NestedObject - # TODO: Exactly one of script, staticFiles, or apiEndpoint must be set + # TODO (mbang): Exactly one of script, staticFiles, or apiEndpoint must be set description: | Executes a script to handle the requests that match this URL pattern. Only the auto value is supported for Node.js in the App Engine standard environment, for example "script:" "auto". @@ -195,7 +195,7 @@ properties: required: true - name: 'staticFiles' type: NestedObject - # TODO: Exactly one of script, staticFiles, or apiEndpoint must be set + # TODO (mbang): Exactly one of script, staticFiles, or apiEndpoint must be set description: | Files served directly to the user for a given URL, such as images, CSS stylesheets, or JavaScript source files. Static file handlers describe which files in the application directory are static files, and which URLs serve them. properties: diff --git a/mmv1/products/apphub/Application.yaml b/mmv1/products/apphub/Application.yaml index 8132bf8d0e5e..34f66b9ba088 100644 --- a/mmv1/products/apphub/Application.yaml +++ b/mmv1/products/apphub/Application.yaml @@ -41,7 +41,6 @@ custom_code: constants: 'templates/terraform/constants/apphub_application.go.tmpl' custom_diff: - 'apphubApplicationCustomizeDiff' -include_in_tgc_next_DO_NOT_USE: true sweeper: url_substitutions: - region: "us-central1" diff --git a/mmv1/products/apphub/Service.yaml b/mmv1/products/apphub/Service.yaml index d026e536e820..f62ff42e1611 100644 --- a/mmv1/products/apphub/Service.yaml +++ b/mmv1/products/apphub/Service.yaml @@ -40,7 +40,6 @@ async: result: resource_inside_response: true custom_code: -include_in_tgc_next_DO_NOT_USE: true examples: - name: 'apphub_service_basic' primary_resource_id: 'example' diff --git a/mmv1/products/apphub/Workload.yaml b/mmv1/products/apphub/Workload.yaml index a538ceb880f6..27c9ab21c094 100644 --- a/mmv1/products/apphub/Workload.yaml +++ b/mmv1/products/apphub/Workload.yaml @@ -40,7 +40,6 @@ async: result: resource_inside_response: true custom_code: -include_in_tgc_next_DO_NOT_USE: true examples: - name: 'apphub_workload_basic' primary_resource_id: 'example' diff --git a/mmv1/products/artifactregistry/Repository.yaml b/mmv1/products/artifactregistry/Repository.yaml index bb007704faeb..bf5e4e0e7acf 100644 --- a/mmv1/products/artifactregistry/Repository.yaml +++ b/mmv1/products/artifactregistry/Repository.yaml @@ -42,7 +42,7 @@ async: iam_policy: method_name_separator: ':' allowed_iam_role: 'roles/artifactregistry.reader' - # TODO: Change to repository_id in 4.0 + # TODO (camthornton): Change to repository_id in 4.0 parent_resource_attribute: 'repository' base_url: 'projects/{{project}}/locations/{{location}}/repositories/{{name}}' example_config_body: 'templates/terraform/iam/iam_attributes.go.tmpl' @@ -53,6 +53,7 @@ iam_policy: custom_code: constants: 'templates/terraform/constants/artifact_registry_repository.go.tmpl' encoder: 'templates/terraform/encoders/location_from_region.go.tmpl' + pre_create: 'templates/terraform/pre_create/artifact_registry_remote_repository.go.tmpl' sweeper: url_substitutions: - region: "us-central1" @@ -98,7 +99,7 @@ examples: - name: 'artifact_registry_repository_remote_apt' primary_resource_id: 'my-repo' vars: - repository_id: 'debian-stable' + repository_id: 'debian-buster' desc: 'example remote apt repository' - name: 'artifact_registry_repository_remote_yum' primary_resource_id: 'my-repo' @@ -270,11 +271,6 @@ properties: longer than 63 characters. Label keys must begin with a lowercase letter and may only contain lowercase letters, numeric characters, underscores, and dashes. - - name: 'registryUri' - type: String - output: true - description: | - The repository endpoint, for example: us-docker.pkg.dev/my-proj/my-repo. - name: 'kmsKeyName' type: String description: |- @@ -393,7 +389,7 @@ properties: type: NestedObject description: |- Policy condition for matching versions. - # TODO: exactly_one_of: condition, mostRecentVersions + # TODO (jrsb): exactly_one_of: condition, mostRecentVersions properties: - name: 'tagState' type: Enum @@ -439,7 +435,7 @@ properties: description: |- Policy condition for retaining a minimum number of versions. May only be specified with a Keep action. - # TODO: exactly_one_of: condition, mostRecentVersions + # TODO (jrsb): exactly_one_of: condition, mostRecentVersions properties: - name: 'packageNamePrefixes' type: Array @@ -487,7 +483,7 @@ properties: - name: 'repositoryBase' type: Enum description: |- - A common public repository base for Apt, e.g. `"debian/dists/stable"` + A common public repository base for Apt, e.g. `"debian/dists/buster"` required: true immutable: true enum_values: @@ -521,6 +517,9 @@ properties: immutable: true conflicts: - remoteRepositoryConfig.0.docker_repository.0.custom_repository + custom_flatten: 'templates/terraform/custom_flatten/default_if_empty.tmpl' + # Eventually lets delete default_value and custom_flatten in a major release + default_value: "DOCKER_HUB" enum_values: - 'DOCKER_HUB' - name: 'customRepository' @@ -557,6 +556,9 @@ properties: immutable: true conflicts: - remoteRepositoryConfig.0.maven_repository.0.custom_repository + custom_flatten: 'templates/terraform/custom_flatten/default_if_empty.tmpl' + # Eventually lets delete default_value and custom_flatten in a major release + default_value: "MAVEN_CENTRAL" enum_values: - 'MAVEN_CENTRAL' - name: 'customRepository' @@ -593,6 +595,9 @@ properties: immutable: true conflicts: - remoteRepositoryConfig.0.npm_repository.0.custom_repository + custom_flatten: 'templates/terraform/custom_flatten/default_if_empty.tmpl' + # Eventually lets delete default_value and custom_flatten in a major release + default_value: "NPMJS" enum_values: - 'NPMJS' - name: 'customRepository' @@ -629,6 +634,9 @@ properties: immutable: true conflicts: - remoteRepositoryConfig.0.python_repository.0.custom_repository + custom_flatten: 'templates/terraform/custom_flatten/default_if_empty.tmpl' + # Eventually lets delete default_value and custom_flatten in a major release + default_value: "PYPI" enum_values: - 'PYPI' - name: 'customRepository' diff --git a/mmv1/products/backupdr/BackupPlan.yaml b/mmv1/products/backupdr/BackupPlan.yaml index 42ff4e86a235..c045e0c46b90 100644 --- a/mmv1/products/backupdr/BackupPlan.yaml +++ b/mmv1/products/backupdr/BackupPlan.yaml @@ -15,8 +15,7 @@ name: 'BackupPlan' base_url: projects/{{project}}/locations/{{location}}/backupPlans create_url: projects/{{project}}/locations/{{location}}/backupPlans/?backup_plan_id={{backup_plan_id}} self_link: projects/{{project}}/locations/{{location}}/backupPlans/{{backup_plan_id}} -update_verb: 'PATCH' -update_mask: true +immutable: true description: A backup plan defines when and how to back up a resource, including the backup's schedule, retention, and location. import_format: - 'projects/{{project}}/locations/{{location}}/backupPlans/{{backup_plan_id}}' @@ -27,11 +26,7 @@ references: autogen_async: true timeouts: insert_minutes: 60 - update_minutes: 60 delete_minutes: 60 -custom_code: - tgc_decoder: 'templates/tgc_next/decoders/backup_dr_backup_plan.go.tmpl' -include_in_tgc_next_DO_NOT_USE: true examples: - name: 'backup_dr_backup_plan_simple' primary_resource_id: 'my-backup-plan-1' @@ -40,34 +35,17 @@ examples: backup_plan_id: 'backup-plan-simple-test' test_env_vars: project: :PROJECT_NAME - - name: 'backup_dr_backup_plan_for_disk_resource' - primary_resource_id: 'my-disk-backup-plan-1' - min_version: 'beta' - vars: - backup_vault_id: 'backup-vault-disk-test' - backup_plan_id: 'backup-plan-disk-test' - test_env_vars: - project: :PROJECT_NAME - - name: 'backup_dr_backup_plan_for_csql_resource' - primary_resource_id: 'my-csql-backup-plan-1' - vars: - backup_vault_id: 'backup-vault-csql-test' - backup_plan_id: 'backup-plan-csql-test' - test_env_vars: - project: :PROJECT_NAME parameters: - name: 'location' type: String required: true url_param_only: true - immutable: true description: | The location for the backup plan - name: 'backup_plan_id' type: String required: true url_param_only: true - immutable: true description: |- The ID of the backup plan properties: @@ -87,25 +65,16 @@ properties: description: | Backup vault where the backups gets stored using this Backup plan. required: true - immutable: true diff_suppress_func: 'tpgresource.ProjectNumberDiffSuppress' - name: 'backupVaultServiceAccount' type: String description: | The Google Cloud Platform Service Account to be used by the BackupVault for taking backups. output: true - - name: 'supportedResourceTypes' - type: Array - description: | - The list of all resource types to which the `BackupPlan` can be applied. - item_type: - type: String - output: true - name: 'resourceType' type: String description: | - The resource type to which the `BackupPlan` will be applied. - Examples include, "compute.googleapis.com/Instance", "compute.googleapis.com/Disk", "sqladmin.googleapis.com/Instance" and "storage.googleapis.com/Bucket". + The resource type to which the `BackupPlan` will be applied. Examples include, "compute.googleapis.com/Instance" and "storage.googleapis.com/Bucket". required: true - name: 'createTime' type: String @@ -245,8 +214,3 @@ properties: description: | The hour of the day (1-24) when the window ends, for example, if the value of end hour of the day is 10, that means the backup window end time is 10:00. The end hour of the day should be greater than the start - - name: 'logRetentionDays' - type: Integer - immutable: true - description: | - This is only applicable for CloudSql resource. Days for which logs will be stored. This value should be greater than or equal to minimum enforced log retention duration of the backup vault. diff --git a/mmv1/products/backupdr/BackupPlanAssociation.yaml b/mmv1/products/backupdr/BackupPlanAssociation.yaml index edb94dd3cfdd..36909dab6d15 100644 --- a/mmv1/products/backupdr/BackupPlanAssociation.yaml +++ b/mmv1/products/backupdr/BackupPlanAssociation.yaml @@ -15,8 +15,7 @@ name: 'BackupPlanAssociation' base_url: projects/{{project}}/locations/{{location}}/backupPlanAssociations create_url: projects/{{project}}/locations/{{location}}/backupPlanAssociations/?backup_plan_association_id={{backup_plan_association_id}} self_link: projects/{{project}}/locations/{{location}}/backupPlanAssociations/{{backup_plan_association_id}} -update_verb: 'PATCH' -update_mask: true +immutable: true delete_url: projects/{{project}}/locations/{{location}}/backupPlanAssociations/{{backup_plan_association_id}} description: A Backup and DR BackupPlanAssociation. import_format: @@ -28,7 +27,6 @@ references: autogen_async: true timeouts: insert_minutes: 60 - update_minutes: 20 delete_minutes: 60 examples: - name: 'backup_dr_bpa' @@ -70,16 +68,12 @@ properties: type: String description: | The BP with which resource needs to be created - Note: - - A Backup Plan configured for 'compute.googleapis.com/Instance', can only protect instance type resources. - - A Backup Plan configured for 'compute.googleapis.com/Disk' can be used to protect both standard Disks and Regional Disks resources. required: true diff_suppress_func: 'tpgresource.ProjectNumberDiffSuppress' - name: 'resourceType' type: String description: | - The resource type of workload on which backupplan is applied. - Examples include, "compute.googleapis.com/Instance", "compute.googleapis.com/Disk", and "compute.googleapis.com/RegionDisk" + The resource type of workload on which backupplan is applied required: true - name: 'createTime' type: String diff --git a/mmv1/products/backupdr/BackupVault.yaml b/mmv1/products/backupdr/BackupVault.yaml index 0caeb7dc6f73..52f933b7f99a 100644 --- a/mmv1/products/backupdr/BackupVault.yaml +++ b/mmv1/products/backupdr/BackupVault.yaml @@ -39,7 +39,6 @@ async: resource_inside_response: true custom_code: pre_delete: 'templates/terraform/pre_delete/backup_dr_backup_vault.go.tmpl' -include_in_tgc_next_DO_NOT_USE: true examples: - name: 'backup_dr_backup_vault_full' primary_resource_id: 'backup-vault-test' @@ -114,7 +113,6 @@ properties: - name: 'labels' type: KeyValueLabels description: "Optional. Resource labels to represent user provided metadata. " - is_missing_in_cai: true - name: 'createTime' type: String description: 'Output only. The time when the instance was created. ' @@ -169,7 +167,6 @@ properties: type: KeyValueAnnotations description: "Optional. User annotations. See https://google.aip.dev/128#annotations\nStores small amounts of arbitrary data. " - is_missing_in_cai: true - name: 'accessRestriction' type: Enum description: | @@ -182,13 +179,3 @@ properties: - 'WITHIN_ORGANIZATION' - 'UNRESTRICTED' - 'WITHIN_ORG_BUT_UNRESTRICTED_FOR_BA' - - name: 'backupRetentionInheritance' - type: Enum - ignore_read: true - is_missing_in_cai: true - description: | - How a backup's enforced retention end time is inherited. Default value is `INHERIT_VAULT_RETENTION` if not provided during creation. - enum_values: - - 'BACKUP_RETENTION_INHERITANCE_UNSPECIFIED' - - 'INHERIT_VAULT_RETENTION' - - 'MATCH_BACKUP_EXPIRE_TIME' diff --git a/mmv1/products/backupdr/ServiceConfig.yaml b/mmv1/products/backupdr/ServiceConfig.yaml deleted file mode 100644 index e641cd27ac2d..000000000000 --- a/mmv1/products/backupdr/ServiceConfig.yaml +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'ServiceConfig' -description: | - Initializes a Project-level default Backupdr config. It creates default Backupvault and default Backup Plan in same project for customers to protect instances. -references: - guides: - api: 'https://cloud.google.com/backup-disaster-recovery/docs/reference/rest/v1/projects.locations.serviceConfig' - -base_url: 'projects/{{project}}/locations/{{location}}/serviceConfig' - -create_url: 'projects/{{project}}/locations/{{location}}/serviceConfig:initialize' - -exclude_read: true -exclude_delete: true -exclude_import: true -exclude_sweeper: true - -immutable: true - -async: - actions: ['create'] - operation: - base_url: '{{op_id}}' - -examples: - - name: "backup_dr_service_config" - primary_resource_id: "my-service-config" - exclude_import_test: true - vars: - resource_type: 'compute.googleapis.com/Instance' - test_env_vars: - project: 'PROJECT_NAME' - -parameters: - - name: 'location' - type: String - required: true - url_param_only: true - description: | - The location in which the Service config is to be initialized. - -properties: - - name: 'resourceType' - type: String - required: true - description: The resource type to which the default service config will be applied. diff --git a/mmv1/products/beyondcorp/AppConnection.yaml b/mmv1/products/beyondcorp/AppConnection.yaml index bc495bc95b0d..b18a67eae6ac 100644 --- a/mmv1/products/beyondcorp/AppConnection.yaml +++ b/mmv1/products/beyondcorp/AppConnection.yaml @@ -45,7 +45,6 @@ async: result: resource_inside_response: true custom_code: -include_in_tgc_next_DO_NOT_USE: true examples: - name: 'beyondcorp_app_connection_basic' primary_resource_id: 'app_connection' diff --git a/mmv1/products/beyondcorp/AppConnector.yaml b/mmv1/products/beyondcorp/AppConnector.yaml index d8fd86d53cc8..e6d813017782 100644 --- a/mmv1/products/beyondcorp/AppConnector.yaml +++ b/mmv1/products/beyondcorp/AppConnector.yaml @@ -42,7 +42,6 @@ async: result: resource_inside_response: true custom_code: -include_in_tgc_next_DO_NOT_USE: true examples: - name: 'beyondcorp_app_connector_basic' primary_resource_id: 'app_connector' diff --git a/mmv1/products/beyondcorp/AppGateway.yaml b/mmv1/products/beyondcorp/AppGateway.yaml index 808be9e4f2a6..9a39bc7b9318 100644 --- a/mmv1/products/beyondcorp/AppGateway.yaml +++ b/mmv1/products/beyondcorp/AppGateway.yaml @@ -29,9 +29,9 @@ create_url: 'projects/{{project}}/locations/{{region}}/appGateways?app_gateway_i # This resources is not updatable immutable: true timeouts: - insert_minutes: 40 - update_minutes: 40 - delete_minutes: 40 + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 autogen_async: true async: actions: ['create', 'delete', 'update'] @@ -39,13 +39,12 @@ async: operation: base_url: '{{op_id}}' timeouts: - insert_minutes: 40 - update_minutes: 40 - delete_minutes: 40 + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 result: resource_inside_response: true custom_code: -include_in_tgc_next_DO_NOT_USE: true schema_version: 1 state_upgraders: true examples: diff --git a/mmv1/products/beyondcorp/Application.yaml b/mmv1/products/beyondcorp/Application.yaml index 5b1992fe6894..891835f21fa6 100644 --- a/mmv1/products/beyondcorp/Application.yaml +++ b/mmv1/products/beyondcorp/Application.yaml @@ -13,7 +13,6 @@ --- name: Application -deprecation_message: '`google_beyondcorp_application` is deprecated. Use `google_beyondcorp_security_gateway_application` instead.' description: Specifies application endpoint(s) to protect behind a Security Gateway. base_url: projects/{{project}}/locations/global/securityGateways/{{security_gateways_id}}/applications update_mask: true @@ -31,15 +30,14 @@ iam_policy: import_format: - 'projects/{{project}}/locations/global/securityGateways/{{security_gateways_id}}/applications/{{application_id}}' - '{{application_id}}' - deprecation_message: '`google_beyondcorp_application` and associated IAM resources are deprecated. Use `google_beyondcorp_security_gateway_application` instead.' examples: - - name: beyondcorp_application_basic + - name: beyondcorp_security_gateway_application_basic primary_resource_id: example primary_resource_name: 'fmt.Sprintf("default%s", context["random_suffix"]), fmt.Sprintf("google%s", context["random_suffix"])' vars: security_gateway_name: default application_name: google - - name: beyondcorp_application_vpc + - name: beyondcorp_security_gateway_application_vpc primary_resource_id: example primary_resource_name: 'fmt.Sprintf("default%s", context["random_suffix"]), fmt.Sprintf("google%s", context["random_suffix"])' vars: diff --git a/mmv1/products/beyondcorp/SecurityGateway.yaml b/mmv1/products/beyondcorp/SecurityGateway.yaml index 3a772561b28f..487b4608bf32 100644 --- a/mmv1/products/beyondcorp/SecurityGateway.yaml +++ b/mmv1/products/beyondcorp/SecurityGateway.yaml @@ -54,8 +54,6 @@ async: resource_inside_response: true include_project: false autogen_status: U2VjdXJpdHlHYXRld2F5 -custom_code: - constants: 'templates/terraform/constants/beyondcorp_security_gateway.go.tmpl' parameters: - name: location type: String @@ -106,17 +104,15 @@ properties: as a key. key_name: region key_description: The region to deploy the hub in. - set_hash_func: 'beyondcorpSecurityGatewayHubsHash' value_type: name: Hub type: NestedObject properties: - - name: internetGateway + - name: internet_gateway type: NestedObject description: Internet Gateway configuration. - default_from_api: true properties: - - name: assignedIps + - name: assigned_ips type: Array description: Output only. List of IP addresses assigned to the Cloud NAT. output: true @@ -139,8 +135,3 @@ properties: type: String description: Identifier. Name of the resource. output: true - - name: delegatingServiceAccount - type: String - description: |- - Service account used for operations that involve resources in consumer projects. - output: true diff --git a/mmv1/products/beyondcorp/SecurityGatewayApplication.yaml b/mmv1/products/beyondcorp/SecurityGatewayApplication.yaml deleted file mode 100644 index 35cc5a00ee43..000000000000 --- a/mmv1/products/beyondcorp/SecurityGatewayApplication.yaml +++ /dev/null @@ -1,151 +0,0 @@ -# Copyright 2024 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: SecurityGatewayApplication -description: Specifies application endpoint(s) to protect behind a Security Gateway. -base_url: projects/{{project}}/locations/global/securityGateways/{{security_gateway_id}}/applications -update_mask: true -self_link: projects/{{project}}/locations/global/securityGateways/{{security_gateway_id}}/applications/{{application_id}} -create_url: projects/{{project}}/locations/global/securityGateways/{{security_gateway_id}}/applications?applicationId={{application_id}} -update_verb: PATCH -id_format: projects/{{project}}/locations/global/securityGateways/{{security_gateway_id}}/applications/{{application_id}} -import_format: - - projects/{{project}}/locations/global/securityGateways/{{security_gateway_id}}/applications/{{application_id}} -iam_policy: - method_name_separator: ':' - iam_conditions_request_type: 'QUERY_PARAM_NESTED' - allowed_iam_role: 'roles/beyondcorp.securityGatewayUser' - parent_resource_attribute: 'application_id' - import_format: - - 'projects/{{project}}/locations/global/securityGateways/{{security_gateway_id}}/applications/{{application_id}}' - - '{{application_id}}' -examples: - - name: beyondcorp_security_gateway_application_basic - primary_resource_id: example - primary_resource_name: 'fmt.Sprintf("tf-test-default-sg%s", context["random_suffix"]), fmt.Sprintf("tf-test-google-sga%s", context["random_suffix"])' - vars: - security_gateway_name: default-sg - application_name: google-sga - - name: beyondcorp_security_gateway_application_vpc - primary_resource_id: example - primary_resource_name: 'fmt.Sprintf("tf-test-default-sg%s", context["random_suffix"]), fmt.Sprintf("tf-test-google-sga%s", context["random_suffix"])' - vars: - security_gateway_name: default-sg - application_name: my-vm-service2 -autogen_async: true -async: - operation: - timeouts: - insert_minutes: 20 - update_minutes: 20 - delete_minutes: 20 - base_url: '{{op_id}}' - actions: - - create - - delete - - update - type: OpAsync - result: - resource_inside_response: true - include_project: false -autogen_status: QXBwbGljYXRpb24= -parameters: - - name: securityGatewayId - type: String - description: ID of the Security Gateway resource this belongs to. - immutable: true - url_param_only: true - required: true - - name: applicationId - type: String - description: |- - User-settable Application resource ID. - * Must start with a letter. - * Must contain between 4-63 characters from `/a-z-/`. - * Must end with a number or letter. - immutable: true - url_param_only: true - required: true -properties: - - name: createTime - type: String - description: Output only. Timestamp when the resource was created. - output: true - - name: displayName - type: String - description: |- - Optional. An arbitrary user-provided name for the Application resource. - Cannot exceed 64 characters. - - name: endpointMatchers - type: Array - description: |- - Required. Endpoint matchers associated with an application. - A combination of hostname and ports as endpoint matcher is used to match - the application. - Match conditions for OR logic. - An array of match conditions to allow for multiple matching criteria. - The rule is considered a match if one the conditions are met. - The conditions can be one of the following combination - (Hostname), (Hostname & Ports) - - EXAMPLES: - Hostname - ("*.abc.com"), ("xyz.abc.com") - Hostname and Ports - ("abc.com" and "22"), ("abc.com" and "22,33") etc - required: true - item_type: - type: NestedObject - properties: - - name: hostname - type: String - description: Required. Hostname of the application. - required: true - - name: ports - type: Array - description: Optional. Ports of the application. - item_type: - type: Integer - - name: upstreams - type: Array - description: Optional. List of which upstream resource(s) to forward traffic to. - item_type: - type: NestedObject - properties: - - name: egressPolicy - type: NestedObject - description: Optional. Routing policy information. - properties: - - name: regions - type: Array - description: Required. List of regions where the application sends traffic to. - required: true - item_type: - type: String - - name: network - type: NestedObject - description: Network to forward traffic to. - properties: - - name: name - type: string - description: |- - Required. Network name is of the format: - `projects/{project}/global/networks/{network}` - required: true - - name: name - type: String - description: Identifier. Name of the resource. - output: true - - name: updateTime - type: String - description: Output only. Timestamp when the resource was last modified. - output: true diff --git a/mmv1/products/bigquery/Dataset.yaml b/mmv1/products/bigquery/Dataset.yaml index 15a545f81a6b..55904db87680 100644 --- a/mmv1/products/bigquery/Dataset.yaml +++ b/mmv1/products/bigquery/Dataset.yaml @@ -37,7 +37,6 @@ timeouts: insert_minutes: 20 update_minutes: 20 delete_minutes: 20 -include_in_tgc_next_DO_NOT_USE: true custom_code: constants: 'templates/terraform/constants/bigquery_dataset.go.tmpl' pre_read: 'templates/terraform/pre_read/bigquery_dataset.go.tmpl' @@ -128,8 +127,6 @@ properties: 'An array of objects that define dataset access for one or more entities.' is_set: true default_from_api: true - is_missing_in_cai: true - set_hash_func: 'resourceBigqueryDatasetAccessHash' item_type: type: NestedObject properties: @@ -401,11 +398,6 @@ properties: By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references. default_from_api: true - # This property has default value `true` in the real CAI asset. - # The bool property with `default_from_api: true` will have false value in converted CAI asset - # by default during tfplan2cai. - # Use exclude_false_in_cai: true to override the default behavior during tfplan2cai conversion. - exclude_false_in_cai: true - name: 'defaultCollation' type: String description: | diff --git a/mmv1/products/bigquery/Job.yaml b/mmv1/products/bigquery/Job.yaml index 3894e8155bfc..b389e34b97df 100644 --- a/mmv1/products/bigquery/Job.yaml +++ b/mmv1/products/bigquery/Job.yaml @@ -239,13 +239,13 @@ properties: properties: - name: 'resourceUri' type: String - # TODO: exactly_one_of: resourceUri, inlineCode + # TODO (mbang): exactly_one_of: resourceUri, inlineCode description: 'A code resource to load from a Google Cloud Storage URI (gs://bucket/path).' - name: 'inlineCode' type: String - # TODO: exactly_one_of: resourceUri, inlineCode + # TODO (mbang): exactly_one_of: resourceUri, inlineCode description: | An inline resource that contains code for a user-defined function (UDF). Providing a inline code resource is equivalent to providing a URI for a file containing the same code. @@ -406,30 +406,6 @@ properties: description: | Whether to run the query as continuous or a regular query. min_version: beta - - name: 'connectionProperties' - type: Array - description: | - Connection properties to customize query behavior. Under JDBC, these correspond - directly to connection properties passed to the DriverManager. Under ODBC, these - correspond to properties in the connection string. - item_type: - type: NestedObject - properties: - - name: 'key' - type: String - description: | - The key of the property to set. Currently supported connection properties: - * `dataset_project_id`: represents the default project for datasets that are used in the query - * `time_zone`: represents the default timezone used to run the query - * `session_id`: associates the query with a given session - * `query_label`: associates the query with a given job label - * `service_account`: indicates the service account to use to run a continuous query - required: true - - name: 'value' - type: String - description: | - The value of the property to set. - required: true - name: 'load' type: NestedObject description: 'Configures a load job.' diff --git a/mmv1/products/bigquery/Routine.yaml b/mmv1/products/bigquery/Routine.yaml index bd144e9df1e8..759b27b169e6 100644 --- a/mmv1/products/bigquery/Routine.yaml +++ b/mmv1/products/bigquery/Routine.yaml @@ -233,12 +233,6 @@ properties: description: If set to DATA_MASKING, the function is validated and made available as a masking function. For more information, see https://cloud.google.com/bigquery/docs/user-defined-functions#custom-mask enum_values: - 'DATA_MASKING' - - name: 'securityMode' - type: Enum - description: Optional. The security mode of the routine, if defined. If not defined, the security mode is automatically determined from the routine's configuration. - enum_values: - - 'DEFINER' - - 'INVOKER' - name: 'sparkOptions' type: NestedObject description: | diff --git a/mmv1/products/bigquery/RowAccessPolicy.yaml b/mmv1/products/bigquery/RowAccessPolicy.yaml deleted file mode 100644 index 4cbb43179211..000000000000 --- a/mmv1/products/bigquery/RowAccessPolicy.yaml +++ /dev/null @@ -1,111 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: RowAccessPolicy -description: Represents access on a subset of rows on the specified table, defined by its filter predicate. Access to the subset of rows is controlled by its IAM policy. -base_url: projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}/rowAccessPolicies -self_link: projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}/rowAccessPolicies/{{policy_id}} -id_format: projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}/rowAccessPolicies/{{policy_id}} -import_format: - - projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}/rowAccessPolicies/{{policy_id}} -custom_code: - pre_delete: 'templates/terraform/pre_delete/bigquery_row_access_policy.go.tmpl' -examples: - - name: 'bigquery_row_access_policy_basic' - primary_resource_id: 'example' - vars: - dataset_id: 'dataset_id' - table_id: 'table_id' - policy_id: 'policy_id' -parameters: -properties: - - name: rowAccessPolicyReference - type: NestedObject - description: Id path of a row access policy. - required: true - custom_expand: 'templates/terraform/custom_expand/bigquery_row_access_policy_ref.go.tmpl' - flatten_object: true - properties: - - name: datasetId - type: String - description: The ID of the dataset containing this row access policy. - required: true - immutable: true - - name: tableId - type: String - description: The ID of the table containing this row access policy. - required: true - immutable: true - - name: policyId - type: String - description: |- - The ID of the row access policy. The ID must contain only - letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum - length is 256 characters. - required: true - immutable: true - - name: filterPredicate - type: String - description: |- - A SQL boolean expression that represents the rows defined by this row - access policy, similar to the boolean expression in a WHERE clause of a - SELECT query on a table. - References to other tables, routines, and temporary functions are not - supported. - - Examples: region="EU" - date_field = CAST('2019-9-27' as DATE) - nullable_field is not NULL - numeric_field BETWEEN 1.0 AND 5.0 - required: true - - name: creationTime - type: String - description: |- - The time when this row access policy was created, in milliseconds since - the epoch. - output: true - - name: lastModifiedTime - type: String - description: |- - The time when this row access policy was last modified, in milliseconds - since the epoch. - output: true - - name: grantees - type: Array - description: |- - Input only. The optional list of iam_member users or groups that specifies the initial - members that the row-level access policy should be created with. - - grantees types: - - "user:alice@example.com": An email address that represents a specific - Google account. - - "serviceAccount:my-other-app@appspot.gserviceaccount.com": An email - address that represents a service account. - - "group:admins@example.com": An email address that represents a Google - group. - - "domain:example.com":The Google Workspace domain (primary) that - represents all the users of that domain. - - "allAuthenticatedUsers": A special identifier that represents all service - accounts and all users on the internet who have authenticated with a Google - Account. This identifier includes accounts that aren't connected to a - Google Workspace or Cloud Identity domain, such as personal Gmail accounts. - Users who aren't authenticated, such as anonymous visitors, aren't - included. - - "allUsers":A special identifier that represents anyone who is on - the internet, including authenticated and unauthenticated users. Because - BigQuery requires authentication before a user can access the service, - allUsers includes only authenticated users. - item_type: - type: String - ignore_read: true diff --git a/mmv1/products/bigquery/Table.yaml b/mmv1/products/bigquery/Table.yaml index 8327fc0e524c..402ee2e93e1b 100644 --- a/mmv1/products/bigquery/Table.yaml +++ b/mmv1/products/bigquery/Table.yaml @@ -45,7 +45,7 @@ examples: dataset_id: 'dataset_id' table_id: 'table_id' parameters: - # TODO: Remove once we have support for placing + # TODO(alexstephen): Remove once we have support for placing # nested object fields in URL - name: 'dataset' type: String @@ -638,11 +638,3 @@ virtual_fields: View sets the optional parameter "view": Specifies the view that determines which table information is returned. By default, basic table information and storage statistics (STORAGE_STATS) are returned. Possible values: TABLE_METADATA_VIEW_UNSPECIFIED, BASIC, STORAGE_STATS, FULL - - name: 'ignore_auto_generated_schema' - type: Boolean - description: | - If set to true, Terraform will prevent implicitly added columns in schema from showing diff. - - name: 'generated_schema_columns' - type: String - description: | - (Output-only) A list of autogenerated schema fields. diff --git a/mmv1/products/bigqueryanalyticshub/DataExchange.yaml b/mmv1/products/bigqueryanalyticshub/DataExchange.yaml index 001a662ed643..03915fb20529 100644 --- a/mmv1/products/bigqueryanalyticshub/DataExchange.yaml +++ b/mmv1/products/bigqueryanalyticshub/DataExchange.yaml @@ -59,20 +59,6 @@ examples: vars: data_exchange_id: 'dcr_data_exchange' desc: 'example dcr data exchange' - - name: 'bigquery_analyticshub_data_exchange_log_linked_dataset_query_user' - primary_resource_id: 'data_exchange' - primary_resource_name: 'fmt.Sprintf("tf_test_log_email_data_exchange%s", context["random_suffix"])' - region_override: 'US' - vars: - data_exchange_id: 'tf_test_log_email_data_exchange' - description: 'Example for log email test for data exchange' - - name: 'bigquery_analyticshub_public_data_exchange' - primary_resource_id: 'data_exchange' - primary_resource_name: 'fmt.Sprintf("tf_test_log_email_data_exchange%s", context["random_suffix"])' - region_override: 'US' - vars: - data_exchange_id: 'public_data_exchange' - desc: 'Example for public data exchange' parameters: properties: - name: 'name' @@ -153,14 +139,3 @@ properties: - 'sharing_environment_config.0.dcr_exchange_config' properties: [] - - name: 'discoveryType' - type: Enum - enum_values: - - 'DISCOVERY_TYPE_PRIVATE' - - 'DISCOVERY_TYPE_PUBLIC' - default_from_api: true - description: Type of discovery on the discovery page for all the listings under this exchange. Cannot be set for a Data Clean Room. Updating this field also updates (overwrites) the discoveryType field for all the listings under this exchange. - - name: 'logLinkedDatasetQueryUserEmail' - type: Boolean - description: - If true, subscriber email logging is enabled and all queries on the linked dataset will log the email address of the querying user. Once enabled, this setting cannot be turned off. diff --git a/mmv1/products/bigqueryanalyticshub/DataExchangeSubscription.yaml b/mmv1/products/bigqueryanalyticshub/DataExchangeSubscription.yaml deleted file mode 100644 index 1e6743d18bef..000000000000 --- a/mmv1/products/bigqueryanalyticshub/DataExchangeSubscription.yaml +++ /dev/null @@ -1,257 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'DataExchangeSubscription' -min_version: beta -api_resource_type_kind: Subscription -description: A Bigquery Analytics Hub Data Exchange subscription -references: - guides: - 'Official Documentation': 'https://cloud.google.com/bigquery/docs/analytics-hub-introduction' - api: 'https://cloud.google.com/bigquery/docs/reference/analytics-hub/rest/v1/projects.locations.subscriptions' -docs: - note: |- - When importing the resource with `terraform import`, provide the destination/subscriber's project and location - in the format projects/{{subscriber_project}}/locations/{{subscriber_location}}/subscriptions/{{subscription_id}} -base_url: 'projects/{{project}}/locations/{{location}}/subscriptions' -self_link: 'projects/{{project}}/locations/{{location}}/subscriptions/{{subscription_id}}' -create_url: 'projects/{{data_exchange_project}}/locations/{{data_exchange_location}}/dataExchanges/{{data_exchange_id}}:subscribe' -import_format: - - 'projects/{{project}}/locations/{{location}}/subscriptions/{{subscription_id}}' -custom_code: - decoder: 'templates/terraform/decoders/bigqueryanalyticshub_data_exchange_subscription.go.tmpl' - encoder: 'templates/terraform/encoders/bigqueryanalyticshub_data_exchange_subscription.go.tmpl' - post_read: 'templates/terraform/post_read/bigqueryanalyticshub_data_exchange_subscription.go.tmpl' - custom_update: 'templates/terraform/custom_update/bigqueryanalyticshub_data_exchange_subscription.go.tmpl' -sweeper: - url_substitutions: - - region: "us" -examples: - - name: 'bigquery_analyticshub_dataexchange_subscription_basic' - primary_resource_id: 'subscription' - primary_resource_name: 'fmt.Sprintf("tf_test_subscription_%s", context["random_suffix"])' - region_override: 'us' - ignore_read_extra: - - 'last_modify_time' - - 'state' - - 'linked_dataset_map' - - 'linked_resources' - vars: - data_exchange_id: 'my_test_dataexchange' - listing_dataset_id: 'listing_src_dataset' - listing_table_id: 'listing_src_table' - listing_id: 'my_test_listing' - subscription_id: 'my_subscription_id' - subscriber_contact_email: 'testuser@example.com' - # Variables for the Destination Dataset created by the Subscription - destination_dataset_id: 'subscribed_dest_dataset' - destination_dataset_friendly_name: 'Subscribed Destination Dataset' -virtual_fields: - - name: 'refresh_policy' - type: Enum - description: |- - Controls when the subscription is automatically refreshed by the provider. - * `ON_READ`: Default value if not specified. The subscription will be refreshed every time Terraform performs a read operation (e.g., `terraform plan`, `terraform apply`, `terraform refresh`). This ensures the state is always up-to-date. - * `ON_STALE`: The subscription will only be refreshed when its reported `state` (an output-only field from the API) is `STATE_STALE` during a Terraform read operation. - * `NEVER`: The provider will not automatically refresh the subscription. - default_value: 'ON_READ' - enum_values: - - 'ON_READ' - - 'ON_STALE' - - 'NEVER' -parameters: - - name: 'dataExchangeId' - type: String - immutable: true - description: |- - The ID of the data exchange. Must contain only Unicode letters, numbers (0-9), underscores (_). Should not use characters that require URL-escaping, or characters outside of ASCII, spaces. - url_param_only: true - required: true - - name: 'dataExchangeProject' - type: String - immutable: true - description: |- - The ID of the Google Cloud project where the Data Exchange is located. - url_param_only: true - required: true - diff_suppress_func: 'tpgresource.ProjectNumberDiffSuppress' - - name: 'dataExchangeLocation' - type: String - immutable: true - description: |- - The name of the location of the Data Exchange. - url_param_only: true - required: true - - name: 'location' - type: String - immutable: true - description: | - The geographic location where the Subscription (and its linked dataset) should reside. - This is the subscriber's desired location for the created resources. - See https://cloud.google.com/bigquery/docs/locations for supported locations. - url_param_only: true - required: true - custom_flatten: 'templates/terraform/custom_flatten/bigquery_dataset_location.go.tmpl' - diff_suppress_func: 'tpgresource.CaseDiffSuppress' - - name: 'subscriptionId' - type: String - immutable: true - description: |- - Name of the subscription to create. - required: true - custom_flatten: 'templates/terraform/custom_flatten/id_from_name.tmpl' - - name: 'subscriberContact' - type: String - immutable: true - description: |- - Email of the subscriber. - - name: 'destinationDataset' - type: NestedObject - immutable: true - description: - BigQuery destination dataset to create for the subscriber. - ignore_read: true - properties: - - name: 'location' - type: String - immutable: true - description: | - The geographic location where the dataset should reside. - See https://cloud.google.com/bigquery/docs/locations for supported locations. - required: true - custom_flatten: 'templates/terraform/custom_flatten/bigquery_dataset_location.go.tmpl' - diff_suppress_func: 'tpgresource.CaseDiffSuppress' - - name: 'datasetReference' - type: NestedObject - immutable: true - required: true - description: A reference that identifies the destination dataset. - properties: - - name: 'datasetId' - type: String - immutable: true - description: A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters. - required: true - - name: 'projectId' - type: String - immutable: true - description: The ID of the project containing this dataset. - required: true - diff_suppress_func: 'tpgresource.ProjectNumberDiffSuppress' - - name: 'friendlyName' - type: String - immutable: true - description: A descriptive name for the dataset. - - name: 'description' - type: String - immutable: true - description: A user-friendly description of the dataset. - - name: 'labels' - type: KeyValuePairs - immutable: true - description: | - The labels associated with this dataset. You can use these to - organize and group your datasets. -properties: - - name: 'name' - type: String - description: |- - The resource name of the subscription. e.g. "projects/myproject/locations/us/subscriptions/123" - output: true - - name: 'creationTime' - type: Time - description: |- - Timestamp when the subscription was created. - output: true - - name: 'lastModifyTime' - type: Time - description: |- - Timestamp when the subscription was last modified. - output: true - - name: 'organizationId' - type: String - description: |- - Organization of the project this subscription belongs to. - output: true - - name: 'organizationDisplayName' - type: String - description: |- - Display name of the project of this subscription. - output: true - - name: 'state' - type: String - description: |- - Current state of the subscription. - output: true - - name: 'resourceType' - type: String - description: |- - Listing shared asset type. - output: true - - name: 'linkedDatasetMap' - output: true - type: Map - description: |- - Output only. Map of listing resource names to associated linked resource, - e.g. projects/123/locations/us/dataExchanges/456/listings/789 -> projects/123/datasets/my_dataset - For Data Exchange subscriptions, this map may contain multiple entries if the Data Exchange has multiple listings. - key_name: resource_name - key_description: The associated linked resource - value_type: - name: linked_resource - type: NestedObject - properties: - - name: 'listing' - type: string - description: Output only. Listing for which linked resource is created. - output: true - - name: 'linkedDataset' - type: string - description: Output only. Name of the linked dataset, e.g. projects/subscriberproject/datasets/linkedDataset - output: true - exactly_one_of: - - 'linkedDataset' - - 'linkedPubsubSubscription' - - name: 'linkedPubsubSubscription' - type: string - description: Output only. Name of the Pub/Sub subscription, e.g. projects/subscriberproject/subscriptions/subscriptions/sub_id - output: true - exactly_one_of: - - 'linkedDataset' - - 'linkedPubsubSubscription' - - name: 'linkedResources' - type: Array - description: | - Output only. Linked resources created in the subscription. Only contains values if state = STATE_ACTIVE. - output: true - item_type: - type: NestedObject - properties: - - name: 'listing' - type: string - description: Output only. Listing for which linked resource is created. - output: true - - name: 'linkedDataset' - type: string - description: Output only. Name of the linked dataset, e.g. projects/subscriberproject/datasets/linkedDataset - output: true - - name: 'dataExchange' - type: String - description: |- - Output only. Resource name of the source Data Exchange. e.g. projects/123/locations/us/dataExchanges/456 - output: true - - name: 'logLinkedDatasetQueryUserEmail' - type: Boolean - description: 'Output only. By default, false. If true, the Subscriber agreed to the email sharing mandate that is enabled for DataExchange/Listing.' - output: true diff --git a/mmv1/products/bigqueryanalyticshub/Listing.yaml b/mmv1/products/bigqueryanalyticshub/Listing.yaml index 8eb1f631e2d3..a65d420ca9bf 100644 --- a/mmv1/products/bigqueryanalyticshub/Listing.yaml +++ b/mmv1/products/bigqueryanalyticshub/Listing.yaml @@ -40,8 +40,6 @@ iam_policy: - 'projects/{{project}}/locations/{{location}}/dataExchanges/{{data_exchange_id}}/listings/{{listing_id}}' - '{{listing_id}}' custom_code: - pre_update: 'templates/terraform/pre_update/bigqueryanalyticshub_listing.go.tmpl' - pre_delete: 'templates/terraform/pre_delete/bigquery_analytics_hub_listing.go.tmpl' # Skipping the sweeper due to the non-standard base_url exclude_sweeper: true examples: @@ -69,58 +67,6 @@ examples: data_exchange_id: 'dcr_data_exchange' listing_id: 'dcr_listing' desc: 'example dcr data exchange' - - name: 'bigquery_analyticshub_listing_log_linked_dataset_query_user' - primary_resource_id: 'listing' - primary_resource_name: 'fmt.Sprintf("tf_test_log_email_de%s", context["random_suffix"]),fmt.Sprintf("tf_test_log_email_listing%s", context["random_suffix"])' - region_override: 'US' - vars: - data_exchange_id: 'tf_test_log_email_de' - listing_id: 'tf_test_log_email_listing' - dataset_id: 'tf_test_log_email_ds' - description: 'Example for log email test' - - name: 'bigquery_analyticshub_listing_pubsub' - primary_resource_id: 'listing' - primary_resource_name: 'fmt.Sprintf("tf_test_pubsub_de%s", context["random_suffix"]),fmt.Sprintf("tf_test_listing%s", context["random_suffix"])' - region_override: 'US' - vars: - data_exchange_id: 'tf_test_pubsub_data_exchange' - listing_id: 'tf_test_pubsub_listing' - pubsub_topic_name: 'test_pubsub' - description: 'Example for pubsub topic source' - - name: 'bigquery_analyticshub_listing_dcr_routine' - primary_resource_id: 'listing' - primary_resource_name: 'fmt.Sprintf("tf_test_pubsub_de%s", context["random_suffix"]),fmt.Sprintf("tf_test_listing%s", context["random_suffix"])' - region_override: 'US' - min_version: beta - vars: - data_exchange_id: 'tf_test_data_exchange' - listing_id: 'tf_test_listing_routine' - dataset_id: 'tf_test_dataset' - routine_id: 'tf_test_routine' - desc: 'Example for listing with routine' - - name: 'bigquery_analyticshub_public_listing' - primary_resource_id: 'listing' - primary_resource_name: 'fmt.Sprintf("tf_test_my_data_exchange%s", context["random_suffix"]), fmt.Sprintf("tf_test_my_listing%s", context["random_suffix"])' - region_override: 'US' - vars: - data_exchange_id: 'my_data_exchange' - listing_id: 'my_listing' - desc: 'example public listing' - - name: 'bigquery_analyticshub_listing_marketplace' - primary_resource_id: 'listing' - primary_resource_name: 'fmt.Sprintf("tf_test_my_data_exchange%s", context["random_suffix"]), fmt.Sprintf("tf_test_my_listing%s", context["random_suffix"])' - region_override: 'us' - vars: - data_exchange_id: 'my_data_exchange' - listing_id: 'my_listing' - desc: 'example data exchange' - ignore_read_extra: - - 'delete_commercial' -virtual_fields: - - name: 'delete_commercial' - type: Boolean - description: |- - If the listing is commercial then this field must be set to true, otherwise a failure is thrown. This acts as a safety guard to avoid deleting commercial listings accidentally. parameters: properties: - name: 'name' @@ -207,10 +153,8 @@ properties: - name: 'bigqueryDataset' type: NestedObject description: Shared dataset i.e. BigQuery dataset source. + required: true immutable: true - exactly_one_of: - - 'pubsubTopic' - - 'bigqueryDataset' properties: - name: 'dataset' type: String @@ -233,41 +177,6 @@ properties: Format: For table: projects/{projectId}/datasets/{datasetId}/tables/{tableId} Example:"projects/test_project/datasets/test_dataset/tables/test_table" immutable: true diff_suppress_func: 'tpgresource.ProjectNumberDiffSuppress' - exactly_one_of: - - 'table' - - 'routine' - - name: 'routine' - min_version: beta - type: String - description: | - Format: For routine: projects/{projectId}/datasets/{datasetId}/routines/{routineId} Example:"projects/test_project/datasets/test_dataset/routines/test_routine" - immutable: true - diff_suppress_func: 'tpgresource.ProjectNumberDiffSuppress' - exactly_one_of: - - 'table' - - 'routine' - - name: 'pubsubTopic' - type: NestedObject - description: Pub/Sub topic source. - exactly_one_of: - - 'pubsubTopic' - - 'bigqueryDataset' - properties: - - name: 'topic' - type: String - description: |- - Resource name of the Pub/Sub topic source for this listing. e.g. projects/myproject/topics/topicId - required: true - immutable: true - diff_suppress_func: 'tpgresource.ProjectNumberDiffSuppress' - - name: 'dataAffinityRegions' - type: Array - is_set: true - description: |- - Region hint on where the data might be published. Data affinity regions are modifiable. - See https://cloud.google.com/about/locations for full listing of possible Cloud regions. - item_type: - type: String - name: 'restrictedExportConfig' type: NestedObject description: If set, restricted export configuration will be propagated and enforced on the linked dataset. @@ -285,45 +194,3 @@ properties: type: Boolean description: If true, restrict export of query result derived from restricted linked dataset table. - - name: 'logLinkedDatasetQueryUserEmail' - type: Boolean - description: - If true, subscriber email logging is enabled and all queries on the linked dataset will log the email address of the querying user. Once enabled, this setting cannot be turned off. - - name: 'state' - type: String - description: |- - Current state of the listing. - output: true - - name: 'discoveryType' - type: Enum - enum_values: - - 'DISCOVERY_TYPE_PRIVATE' - - 'DISCOVERY_TYPE_PUBLIC' - default_from_api: true - description: Specifies the type of discovery on the discovery page. Cannot be set for a restricted listing. Note that this does not control the visibility of the exchange/listing which is defined by IAM permission. - - name: 'allowOnlyMetadataSharing' - type: Boolean - immutable: true - description: If true, the listing is only available to get the resource metadata. Listing is non subscribable. - - name: 'commercialInfo' - type: NestedObject - description: | - Commercial info contains the information about the commercial data products associated with the listing. - output: true - properties: - - name: 'cloudMarketplace' - type: NestedObject - description: Details of the Marketplace Data Product associated with the Listing. - output: true - properties: - - name: 'service' - type: String - description: | - Resource name of the commercial service associated with the Marketplace Data Product. e.g. example.com - output: true - - name: 'commercialState' - type: String - description: | - Commercial state of the Marketplace Data Product. - Possible values: COMMERCIAL_STATE_UNSPECIFIED, ONBOARDING, ACTIVE - output: true diff --git a/mmv1/products/bigqueryanalyticshub/ListingSubscription.yaml b/mmv1/products/bigqueryanalyticshub/ListingSubscription.yaml index f0d7aa963e6c..ccbdc6bc6e07 100644 --- a/mmv1/products/bigqueryanalyticshub/ListingSubscription.yaml +++ b/mmv1/products/bigqueryanalyticshub/ListingSubscription.yaml @@ -194,23 +194,3 @@ properties: type: string description: Output only. Name of the linked dataset, e.g. projects/subscriberproject/datasets/linkedDataset output: true - - name: 'logLinkedDatasetQueryUserEmail' - type: Boolean - description: 'Output only. By default, false. If true, the Subscriber agreed to the email sharing mandate that is enabled for Listing.' - output: true - - name: 'commercialInfo' - type: NestedObject - description: | - Commercial info metadata for this subscription. This is set if this is a commercial subscription i.e. if this subscription was created from subscribing to a commercial listing. - output: true - properties: - - name: 'cloudMarketplace' - type: NestedObject - description: Cloud Marketplace commercial metadata for this subscription. - output: true - properties: - - name: 'order' - type: String - description: | - Resource name of the Marketplace Order. - output: true diff --git a/mmv1/products/bigquerydatatransfer/Config.yaml b/mmv1/products/bigquerydatatransfer/Config.yaml index 91b3c82333b1..50628f0c56cb 100644 --- a/mmv1/products/bigquerydatatransfer/Config.yaml +++ b/mmv1/products/bigquerydatatransfer/Config.yaml @@ -40,6 +40,7 @@ custom_code: decoder: 'templates/terraform/decoders/bigquery_data_transfer.go.tmpl' pre_update: 'templates/terraform/pre_update/bigquerydatatransfer_config.tmpl' custom_import: 'templates/terraform/custom_import/bigquery_data_transfer_self_link_as_name_set_location.go.tmpl' + raw_resource_config_validation: 'templates/terraform/validation/bigquery_data_transfer_config.go.tmpl' custom_diff: - 'sensitiveParamCustomizeDiff' - 'paramsCustomizeDiff' @@ -59,7 +60,6 @@ examples: dataset_id: 'example_dataset' key_name: 'example-key' keyring_name: 'example-keyring' - display_name: 'display-name' exclude_test: true - name: 'bigquerydatatransfer_config_salesforce' primary_resource_id: 'salesforce_config' @@ -235,26 +235,17 @@ properties: The Secret Access Key of the AWS account transferring data from. sensitive: true at_least_one_of: - - 'sensitive_params.0.secret_access_key' - - 'sensitive_params.0.secret_access_key_wo' + - 'sensitive_params.0.secretAccessKey' + - 'sensitive_params.0.secretAccessKeyWo' conflicts: - - 'sensitive_params.0.secret_access_key_wo' + - 'sensitive_params.0.secretAccessKeyWo' - name: 'secretAccessKeyWo' # Wo is convention for write-only properties type: String description: | The Secret Access Key of the AWS account transferring data from. - write_only_legacy: true + write_only: true at_least_one_of: - - 'sensitive_params.0.secret_access_key_wo' - - 'sensitive_params.0.secret_access_key' + - 'sensitive_params.0.secretAccessKeyWo' + - 'sensitive_params.0.secretAccessKey' conflicts: - - 'sensitive_params.0.secret_access_key' - required_with: - - 'sensitive_params.0.secret_access_key_wo_version' - - name: 'secretAccessKeyWoVersion' - type: Integer - url_param_only: true - required_with: - - 'sensitive_params.0.secret_access_key_wo' - description: | - The version of the sensitive params - used to trigger updates of the write-only params. For more info see [updating write-only attributes](/docs/providers/google/guides/using_write_only_attributes.html#updating-write-only-attributes) + - 'sensitive_params.0.secretAccessKey' diff --git a/mmv1/products/bigqueryreservation/Reservation.yaml b/mmv1/products/bigqueryreservation/Reservation.yaml index fa6a7158d557..d6b973adba4e 100644 --- a/mmv1/products/bigqueryreservation/Reservation.yaml +++ b/mmv1/products/bigqueryreservation/Reservation.yaml @@ -156,100 +156,3 @@ properties: replicated to the secondary. output: true output: true - - name: 'scalingMode' - type: Enum - min_version: beta - description: | - The scaling mode for the reservation. If the field is present but maxSlots is not present, - requests will be rejected with error code google.rpc.Code.INVALID_ARGUMENT. - - Enum values: - - `SCALING_MODE_UNSPECIFIED`: Default value of ScalingMode. - - `AUTOSCALE_ONLY`: The reservation will scale up only using slots from autoscaling. It will - not use any idle slots even if there may be some available. The upper limit that autoscaling - can scale up to will be maxSlots - baseline. For example, if maxSlots is 1000, baseline is 200 - and customer sets ScalingMode to AUTOSCALE_ONLY, then autoscalerg will scale up to 800 slots - and no idle slots will be used. Please note, in this mode, the ignoreIdleSlots field must be - set to true. Otherwise the request will be rejected with error code - google.rpc.Code.INVALID_ARGUMENT. - - `IDLE_SLOTS_ONLY`: The reservation will scale up using only idle slots contributed by other - reservations or from unassigned commitments. If no idle slots are available it will not scale - up further. If the idle slots which it is using are reclaimed by the contributing reservation(s) - it may be forced to scale down. The max idle slots the reservation can be maxSlots - baseline - capacity. For example, if maxSlots is 1000, baseline is 200 and customer sets ScalingMode to - IDLE_SLOTS_ONLY, 1. if there are 1000 idle slots available in other reservations, the - reservation will scale up to 1000 slots with 200 baseline and 800 idle slots. 2. if there are - 500 idle slots available in other reservations, the reservation will scale up to 700 slots with - 200 baseline and 300 idle slots. Please note, in this mode, the reservation might not be able to - scale up to maxSlots. Please note, in this mode, the ignoreIdleSlots field must be set to false. - Otherwise the request will be rejected with error code google.rpc.Code.INVALID_ARGUMENT - - `ALL_SLOTS`: The reservation will scale up using all slots available to it. It will use idle slots - contributed by other reservations or from unassigned commitments first. If no idle slots are - available it will scale up using autoscaling. For example, if maxSlots is 1000, baseline is 200 - and customer sets ScalingMode to ALL_SLOTS, 1. if there are 800 idle slots available in other - reservations, the reservation will scale up to 1000 slots with 200 baseline and 800 idle slots. 2. - if there are 500 idle slots available in other reservations, the reservation will scale up to 1000 - slots with 200 baseline, 500 idle slots and 300 autoscaling slots. 3. if there are no idle slots - available in other reservations, it will scale up to 1000 slots with 200 baseline and 800 - autoscaling slots. Please note, in this mode, the ignoreIdleSlots field must be set to false. - Otherwise the request will be rejected with error code google.rpc.Code.INVALID_ARGUMENT. - enum_values: - - 'SCALING_MODE_UNSPECIFIED' - - 'AUTOSCALE_ONLY' - - 'IDLE_SLOTS_ONLY' - - 'ALL_SLOTS' - required_with: - - 'maxSlots' - conflicts: - - 'autoscale' - - name: 'maxSlots' - type: Integer - min_version: beta - description: | - The overall max slots for the reservation, covering slotCapacity (baseline), idle slots - (if ignoreIdleSlots is false) and scaled slots. If present, the reservation won't use - more than the specified number of slots, even if there is demand and supply (from idle - slots). NOTE: capping a reservation's idle slot usage is best effort and its usage may - exceed the maxSlots value. However, in terms of autoscale.current_slots (which accounts - for the additional added slots), it will never exceed the maxSlots - baseline. - - This field must be set together with the scalingMode enum value, otherwise the request - will be rejected with error code google.rpc.Code.INVALID_ARGUMENT. - - If the maxSlots and scalingMode are set, the autoscale or autoscale.max_slots field - must be unset. Otherwise the request will be rejected with error code - google.rpc.Code.INVALID_ARGUMENT. However, the autoscale field may still be in the - output. The autopscale.max_slots will always show as 0 and the autoscaler.current_slots - will represent the current slots from autoscaler excluding idle slots. For example, - if the maxSlots is 1000 and scalingMode is AUTOSCALE_ONLY, then in the output, the - autoscaler.max_slots will be 0 and the autoscaler.current_slots may be any value - between 0 and 1000. - - If the maxSlots is 1000, scalingMode is ALL_SLOTS, the baseline is 100 and idle slots - usage is 200, then in the output, the autoscaler.max_slots will be 0 and the - autoscaler.current_slots will not be higher than 700. - - If the maxSlots is 1000, scalingMode is IDLE_SLOTS_ONLY, then in the output, the - autoscaler field will be null. - - If the maxSlots and scalingMode are set, then the ignoreIdleSlots field must be - aligned with the scalingMode enum value.(See details in ScalingMode comments). - Otherwise the request will be rejected with error code google.rpc.Code.INVALID_ARGUMENT. - - Please note, the maxSlots is for user to manage the part of slots greater than the - baseline. Therefore, we don't allow users to set maxSlots smaller or equal to the - baseline as it will not be meaningful. If the field is present and - slotCapacity>=maxSlots, requests will be rejected with error code - google.rpc.Code.INVALID_ARGUMENT. - - Please note that if maxSlots is set to 0, we will treat it as unset. Customers can set - maxSlots to 0 and set scalingMode to SCALING_MODE_UNSPECIFIED to disable the maxSlots - feature. - required_with: - - 'scalingMode' - conflicts: - - 'autoscale' diff --git a/mmv1/products/bigtable/LogicalView.yaml b/mmv1/products/bigtable/LogicalView.yaml index cef599560909..be4e6c7b1dbd 100644 --- a/mmv1/products/bigtable/LogicalView.yaml +++ b/mmv1/products/bigtable/LogicalView.yaml @@ -42,11 +42,6 @@ examples: instance_name: 'bt-instance' table_name: 'bt-table' logical_view_name: 'bt-logical-view' - deletion_protection: 'true' - test_vars_overrides: - 'deletion_protection': 'false' - oics_vars_overrides: - 'deletion_protection': 'false' # bigtable instance does not use the shared HTTP client, this test creates an instance skip_vcr: true parameters: @@ -76,7 +71,3 @@ properties: description: 'The logical view''s select query.' required: true - - name: 'deletionProtection' - type: Boolean - description: - 'Set to true to make the logical view protected against deletion.' diff --git a/mmv1/products/bigtable/SchemaBundle.yaml b/mmv1/products/bigtable/SchemaBundle.yaml deleted file mode 100644 index 129906179825..000000000000 --- a/mmv1/products/bigtable/SchemaBundle.yaml +++ /dev/null @@ -1,98 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'SchemaBundle' -kind: 'bigtable#schemaBundle' -description: | - A schema bundle object that can be referenced in SQL queries. -references: - guides: - api: 'https://cloud.google.com/bigtable/docs/reference/admin/rest/v2/projects.instances.tables.schemaBundles' -docs: -id_format: 'projects/{{project}}/instances/{{instance}}/tables/{{table}}/schemaBundles/{{schema_bundle_id}}' -base_url: 'projects/{{project}}/instances/{{instance}}/tables/{{table}}/schemaBundles?schemaBundleId={{schema_bundle_id}}' -self_link: 'projects/{{project}}/instances/{{instance}}/tables/{{table}}/schemaBundles/{{schema_bundle_id}}' -create_url: 'projects/{{project}}/instances/{{instance}}/tables/{{table}}/schemaBundles?schemaBundleId={{schema_bundle_id}}' -update_url: 'projects/{{project}}/instances/{{instance}}/tables/{{table}}/schemaBundles/{{schema_bundle_id}}?ignoreWarnings={{ignore_warnings}}' -update_verb: 'PATCH' -update_mask: true -delete_url: 'projects/{{project}}/instances/{{instance}}/tables/{{table}}/schemaBundles/{{schema_bundle_id}}' -import_format: - - 'projects/{{project}}/instances/{{instance}}/tables/{{table}}/schemaBundles/{{schema_bundle_id}}' -timeouts: - insert_minutes: 10 - update_minutes: 10 - delete_minutes: 10 -exclude_sweeper: true -examples: - - name: 'bigtable_schema_bundle' - primary_resource_id: 'schema_bundle' - vars: - instance_name: 'bt-instance' - table_name: 'bt-table' - schema_bundle_name: 'bt-schema-bundle' - ignore_read_extra: - - 'ignore_warnings' - # bigtable instance does not use the shared HTTP client, this test creates an instance - skip_vcr: true -parameters: - - name: 'schemaBundleId' - type: String - description: - 'The unique name of the schema bundle in the form - `[_a-zA-Z0-9][-_.a-zA-Z0-9]*`.' - url_param_only: true - required: true - immutable: true - - name: 'instance' - type: String - description: 'The name of the instance to create the schema bundle within.' - url_param_only: true - immutable: true - diff_suppress_func: 'tpgresource.CompareResourceNames' - - name: 'table' - type: String - description: 'The name of the table to create the schema bundle within.' - url_param_only: true - immutable: true - diff_suppress_func: 'tpgresource.CompareResourceNames' - - name: 'ignoreWarnings' - type: Boolean - description: - 'If true, allow backwards incompatible changes.' - url_param_only: true - default_value: false -properties: - - name: 'name' - type: String - description: - 'The unique name of the requested schema bundle. Values are of the form - `projects//instances//tables/
/schemaBundles/`.' - output: true - - name: 'protoSchema' - type: NestedObject - description: | - File descriptor set, generated by protoc. - To generate, use protoc with imports and source info included. For an example test.proto file, the following command would put the value in a new file named out.pb. - - $ protoc --include_imports --include_source_info test.proto -o out.pb - required: true - properties: - - name: 'protoDescriptors' - type: String - description: | - Base64 encoded content of the file. - required: true - validation: - function: 'verify.ValidateBase64String' diff --git a/mmv1/products/billingbudget/Budget.yaml b/mmv1/products/billingbudget/Budget.yaml index 8fa30a93ce35..0cdc678ffbd8 100644 --- a/mmv1/products/billingbudget/Budget.yaml +++ b/mmv1/products/billingbudget/Budget.yaml @@ -213,6 +213,9 @@ properties: Optional. If creditTypesTreatment is INCLUDE_SPECIFIED_CREDITS, this is a list of credit types to be subtracted from gross cost to determine the spend for threshold calculations. See a list of acceptable credit type values. If creditTypesTreatment is not INCLUDE_SPECIFIED_CREDITS, this field must be empty. + + **Note:** If the field has a value in the config and needs to be removed, the field has to be an empty array in the config. + default_from_api: true at_least_one_of: - 'budget_filter.0.projects' - 'budget_filter.0.resource_ancestors' @@ -233,6 +236,9 @@ properties: the parent account, usage from the parent account will be included. If the field is omitted, the report will include usage from the parent account and all subaccounts, if they exist. + + **Note:** If the field has a value in the config and needs to be removed, the field has to be an empty array in the config. + default_from_api: true at_least_one_of: - 'budget_filter.0.projects' - 'budget_filter.0.resource_ancestors' @@ -406,8 +412,7 @@ properties: - 'amount.0.specified_amount' - 'amount.0.last_period_amount' custom_flatten: 'templates/terraform/custom_flatten/object_to_bool.go.tmpl' - # THIS TEMPLATE IS DEPRECATED, DO NOT USE FOR NEW FIELDS - custom_expand: 'templates/terraform/custom_expand/deprecated_bool_to_object.go.tmpl' + custom_expand: 'templates/terraform/custom_expand/bool_to_object.go.tmpl' - name: 'thresholdRules' type: Array description: | diff --git a/mmv1/products/binaryauthorization/Attestor.yaml b/mmv1/products/binaryauthorization/Attestor.yaml index ab271659cc74..b5bc05f30bdb 100644 --- a/mmv1/products/binaryauthorization/Attestor.yaml +++ b/mmv1/products/binaryauthorization/Attestor.yaml @@ -34,7 +34,6 @@ iam_policy: example_config_body: 'templates/terraform/iam/iam_attributes.go.tmpl' custom_code: constants: 'templates/terraform/constants/binaryauthorization_attestor.go.tmpl' -include_in_tgc_next_DO_NOT_USE: true examples: - name: 'binary_authorization_attestor_basic' primary_resource_id: 'attestor' @@ -115,7 +114,7 @@ properties: default_from_api: true - name: 'asciiArmoredPgpPublicKey' type: String - # TODO: Exactly one of asciiArmoredPgpPublicKey or pkixPublicKey must be set + # TODO (mbang): Exactly one of asciiArmoredPgpPublicKey or pkixPublicKey must be set description: | ASCII-armored representation of a PGP public key, as the entire output by the command @@ -128,7 +127,7 @@ properties: be overwritten by the API-calculated ID. - name: 'pkixPublicKey' type: NestedObject - # TODO: Exactly one of asciiArmoredPgpPublicKey or pkixPublicKey must be set + # TODO (mbang): Exactly one of asciiArmoredPgpPublicKey or pkixPublicKey must be set description: | A raw PKIX SubjectPublicKeyInfo format public key. diff --git a/mmv1/products/blockchainnodeengine/BlockchainNodes.yaml b/mmv1/products/blockchainnodeengine/BlockchainNodes.yaml index 472c5ae0d6c0..43afc298cec6 100644 --- a/mmv1/products/blockchainnodeengine/BlockchainNodes.yaml +++ b/mmv1/products/blockchainnodeengine/BlockchainNodes.yaml @@ -30,7 +30,6 @@ timeouts: insert_minutes: 45 update_minutes: 20 delete_minutes: 35 -include_in_tgc_next_DO_NOT_USE: true autogen_async: true async: actions: ['create', 'delete', 'update'] diff --git a/mmv1/products/certificatemanager/Certificate.yaml b/mmv1/products/certificatemanager/Certificate.yaml index 8e298db14953..2ba2854f40c3 100644 --- a/mmv1/products/certificatemanager/Certificate.yaml +++ b/mmv1/products/certificatemanager/Certificate.yaml @@ -28,7 +28,6 @@ timeouts: insert_minutes: 20 update_minutes: 20 delete_minutes: 20 -include_in_tgc_next_DO_NOT_USE: true autogen_async: true async: actions: ['create', 'delete', 'update'] @@ -39,8 +38,6 @@ async: resource_inside_response: false custom_code: constants: 'templates/terraform/constants/cert_manager.tmpl' - tgc_encoder: 'templates/tgc_next/encoders/certificatemanager_certificate.go.tmpl' - tgc_decoder: 'templates/tgc_next/decoders/certificatemanager_certificate.go.tmpl' schema_version: 1 state_upgraders: true sweeper: @@ -53,7 +50,7 @@ sweeper: - region: "us-west1" dependencies: - "google_compute_region_target_https_proxy" - - "google_network_security_backend_authentication_config" + # - "google_network_security_backend_authentication_config" (beta only) - "google_network_services_gateway" - "google_compute_target_https_proxy" - "google_compute_network" @@ -177,7 +174,6 @@ properties: Leaf certificate comes first, followed by intermediate ones if any. immutable: true sensitive: true - is_missing_in_cai: true exactly_one_of: - 'self_managed.0.certificate_pem' - 'self_managed.0.pem_certificate' @@ -188,7 +184,6 @@ properties: The private key of the leaf certificate in PEM-encoded form. immutable: true sensitive: true - is_missing_in_cai: true exactly_one_of: - 'self_managed.0.private_key_pem' - 'self_managed.0.pem_private_key' @@ -209,7 +204,6 @@ properties: The private key of the leaf certificate in PEM-encoded form. immutable: true sensitive: true - is_missing_in_cai: true exactly_one_of: - 'self_managed.0.private_key_pem' - 'self_managed.0.pem_private_key' diff --git a/mmv1/products/certificatemanager/CertificateMapEntry.yaml b/mmv1/products/certificatemanager/CertificateMapEntry.yaml index c7b58e3b37ba..a8b23152a365 100644 --- a/mmv1/products/certificatemanager/CertificateMapEntry.yaml +++ b/mmv1/products/certificatemanager/CertificateMapEntry.yaml @@ -59,7 +59,7 @@ parameters: - name: 'map' type: ResourceRef description: | - A map entry that is inputted into the certificate map + A map entry that is inputted into the cetrificate map url_param_only: true required: true immutable: true diff --git a/mmv1/products/cloudasset/FolderFeed.yaml b/mmv1/products/cloudasset/FolderFeed.yaml index 36a8de774956..f1c41eaba949 100644 --- a/mmv1/products/cloudasset/FolderFeed.yaml +++ b/mmv1/products/cloudasset/FolderFeed.yaml @@ -38,12 +38,7 @@ custom_code: pre_create: 'templates/terraform/pre_create/cloud_asset_feed.go.tmpl' post_create: 'templates/terraform/post_create/cloud_asset_feed.go.tmpl' custom_import: 'templates/terraform/custom_import/cloud_asset_feed.go.tmpl' - tgc_decoder: 'templates/tgc_next/decoders/cloud_asset_feed.go.tmpl' supports_indirect_user_project_override: true -include_in_tgc_next_DO_NOT_USE: true -cai_base_url: 'folders/{{folder}}/feeds' -cai_identity: 'feed_id' -tgc_ignore_terraform_encoder: true examples: - name: 'cloud_asset_folder_feed' primary_resource_id: 'folder_feed' diff --git a/mmv1/products/cloudasset/OrganizationFeed.yaml b/mmv1/products/cloudasset/OrganizationFeed.yaml index 0c2082471491..f43276faa1b0 100644 --- a/mmv1/products/cloudasset/OrganizationFeed.yaml +++ b/mmv1/products/cloudasset/OrganizationFeed.yaml @@ -38,12 +38,7 @@ custom_code: pre_create: 'templates/terraform/pre_create/cloud_asset_feed.go.tmpl' post_create: 'templates/terraform/post_create/cloud_asset_feed.go.tmpl' custom_import: 'templates/terraform/custom_import/cloud_asset_feed.go.tmpl' - tgc_decoder: 'templates/tgc_next/decoders/cloud_asset_feed.go.tmpl' supports_indirect_user_project_override: true -include_in_tgc_next_DO_NOT_USE: true -cai_base_url: 'organizations/{{org_id}}/feeds' -cai_identity: 'feed_id' -tgc_ignore_terraform_encoder: true examples: - name: 'cloud_asset_organization_feed' primary_resource_id: 'organization_feed' diff --git a/mmv1/products/cloudasset/ProjectFeed.yaml b/mmv1/products/cloudasset/ProjectFeed.yaml index 4132e44ff685..525bbca61a56 100644 --- a/mmv1/products/cloudasset/ProjectFeed.yaml +++ b/mmv1/products/cloudasset/ProjectFeed.yaml @@ -38,10 +38,6 @@ custom_code: pre_create: 'templates/terraform/pre_create/cloud_asset_feed.go.tmpl' post_create: 'templates/terraform/post_create/cloud_asset_feed.go.tmpl' custom_import: 'templates/terraform/custom_import/cloud_asset_feed.go.tmpl' -include_in_tgc_next_DO_NOT_USE: true -cai_base_url: 'projects/{{project}}/feeds' -cai_identity: 'feed_id' -tgc_ignore_terraform_encoder: true examples: - name: 'cloud_asset_project_feed' primary_resource_id: 'project_feed' diff --git a/mmv1/products/cloudbuild/Trigger.yaml b/mmv1/products/cloudbuild/Trigger.yaml index b85d87dd3436..db31f044d3f6 100644 --- a/mmv1/products/cloudbuild/Trigger.yaml +++ b/mmv1/products/cloudbuild/Trigger.yaml @@ -30,7 +30,7 @@ id_format: 'projects/{{project}}/locations/{{location}}/triggers/{{trigger_id}}' base_url: 'projects/{{project}}/locations/{{location}}/triggers' self_link: 'projects/{{project}}/locations/{{location}}/triggers/{{trigger_id}}' update_verb: 'PATCH' -# import by default only works with old-style self links ending in a name + # import by default only works with old-style self links ending in a name import_format: - 'projects/{{project}}/triggers/{{trigger_id}}' - 'projects/{{project}}/locations/{{location}}/triggers/{{trigger_id}}' @@ -82,8 +82,6 @@ examples: cloudbuild_trigger_name: 'manual-trigger' - name: 'cloudbuild_trigger_manual_github_enterprise' primary_resource_id: 'manual-ghe-trigger' - vars: - cloudbuild_trigger_name: 'my-trigger' exclude_test: true - name: 'cloudbuild_trigger_manual_bitbucket_server' primary_resource_id: 'manual-bitbucket-trigger' @@ -134,12 +132,6 @@ examples: 'installation_id': '31300675' 'pat_secret': '"projects/gcb-terraform-creds/secrets/github-pat/versions/latest"' 'repo_uri': '"https://github.com/gcb-repos-robot/tf-demo.git"' - - name: 'cloudbuild_trigger_developer_connect_pull' - primary_resource_id: 'developer-connect-trigger-pull' - - name: 'cloudbuild_trigger_developer_connect_push' - primary_resource_id: 'developer-connect-trigger-push' - - name: 'cloudbuild_trigger_developer_connect_push_branch' - primary_resource_id: 'dc-trigger-regular-push-branch' parameters: - name: 'location' type: String @@ -280,7 +272,6 @@ properties: - 'webhook_config' - 'source_to_build' - 'repository_event_config' - - 'developer_connect_event_config' properties: - name: 'repository' type: String @@ -364,7 +355,6 @@ properties: - 'webhook_config' - 'source_to_build' - 'repository_event_config' - - 'developer_connect_event_config' properties: - name: 'uri' type: String @@ -449,7 +439,6 @@ properties: - 'webhook_config' - 'source_to_build' - 'repository_event_config' - - 'developer_connect_event_config' properties: - name: 'projectId' type: String @@ -515,7 +504,6 @@ properties: - 'webhook_config' - 'source_to_build' - 'repository_event_config' - - 'developer_connect_event_config' properties: - name: 'owner' type: String @@ -595,7 +583,6 @@ properties: - 'webhook_config' - 'source_to_build' - 'repository_event_config' - - 'developer_connect_event_config' properties: - name: 'repoSlug' type: String @@ -680,7 +667,6 @@ properties: - 'webhook_config' - 'source_to_build' - 'repository_event_config' - - 'developer_connect_event_config' properties: - name: 'subscription' type: String @@ -717,7 +703,6 @@ properties: - 'webhook_config' - 'source_to_build' - 'repository_event_config' - - 'developer_connect_event_config' properties: - name: 'secret' type: String @@ -1348,70 +1333,3 @@ properties: Paths must be absolute and cannot conflict with other volume paths on the same build step or with certain reserved volume paths. - - name: 'developerConnectEventConfig' - type: NestedObject - description: | - Configuration for triggers that respond to Developer Connect events. - exactly_one_of: - - 'pullRequest' - - 'push' - properties: - - name: 'gitRepositoryLink' - type: String - description: | - The Developer Connect Git repository link, formatted as `projects/*/locations/*/connections/*/gitRepositoryLink/*`. - required: true - - name: 'gitRepositoryLinkType' - type: Enum - description: | - The type of DeveloperConnect GitRepositoryLink. - output: true - enum_values: - - 'GIT_REPOSITORY_LINK_TYPE_UNSPECIFIED' - - 'GITHUB' - - 'GITHUB_ENTERPRISE' - - 'GITLAB' - - 'GITLAB_ENTERPRISE' - - 'BITBUCKET_DATA_CENTER' - - 'BITBUCKET_CLOUD' - - name: 'pullRequest' - type: NestedObject - description: | - Filter to match changes in pull requests. - properties: - - name: 'branch' - type: String - description: | - Regex of branches to match. - - name: 'commentControl' - type: Enum - description: | - Configure builds to run whether a repository owner or collaborator need to comment `/gcbrun`. - enum_values: - - 'COMMENTS_DISABLED' - - 'COMMENTS_ENABLED' - - 'COMMENTS_ENABLED_FOR_EXTERNAL_CONTRIBUTORS_ONLY' - - name: 'invertRegex' - type: Boolean - description: | - If true, branches that do NOT match the git_ref will trigger a build. - - name: 'push' - type: NestedObject - description: | - Filter to match changes in refs like branches and tags. - exactly_one_of: - - 'branch' - - 'tag' - properties: - - name: 'branch' - type: String - description: | - Regex of branches to match. - - name: 'tag' - type: String - description: | - Regex of tags to match. - - name: 'invertRegex' - type: Boolean - description: | - If true, only trigger a build if the revision regex does NOT match the git_ref regex. diff --git a/mmv1/products/clouddeploy/DeployPolicy.yaml b/mmv1/products/clouddeploy/DeployPolicy.yaml index 56dc1bf0633a..5668011e78d3 100644 --- a/mmv1/products/clouddeploy/DeployPolicy.yaml +++ b/mmv1/products/clouddeploy/DeployPolicy.yaml @@ -72,7 +72,7 @@ properties: output: true - name: "description" type: String - description: "Description of the `DeployPolicy`. Max length is 255 characters." + description: "Optional. Description of the `DeployPolicy`. Max length is 255 characters." - name: "createTime" type: String description: "Output only. Time at which the DeployPolicy was created." @@ -83,21 +83,21 @@ properties: output: true - name: "annotations" type: KeyValueAnnotations - description: "User annotations. These attributes can only be set and used by the user, and not by Cloud Deploy. Annotations must meet the following constraints: * Annotations are key/value pairs. * Valid annotation keys have two segments: an optional prefix and name, separated by a slash (`/`). * The name segment is required and must be 63 characters or less, beginning and ending with an alphanumeric character (`[a-z0-9A-Z]`) with dashes (`-`), underscores (`_`), dots (`.`), and alphanumerics between. * The prefix is optional. If specified, the prefix must be a DNS subdomain: a series of DNS labels separated by dots(`.`), not longer than 253 characters in total, followed by a slash (`/`). See https://kubernetes.io/docs/concepts/overview/working-with-objects/annotations/#syntax-and-character-set for more details." + description: "Optional. User annotations. These attributes can only be set and used by the user, and not by Cloud Deploy. Annotations must meet the following constraints: * Annotations are key/value pairs. * Valid annotation keys have two segments: an optional prefix and name, separated by a slash (`/`). * The name segment is required and must be 63 characters or less, beginning and ending with an alphanumeric character (`[a-z0-9A-Z]`) with dashes (`-`), underscores (`_`), dots (`.`), and alphanumerics between. * The prefix is optional. If specified, the prefix must be a DNS subdomain: a series of DNS labels separated by dots(`.`), not longer than 253 characters in total, followed by a slash (`/`). See https://kubernetes.io/docs/concepts/overview/working-with-objects/annotations/#syntax-and-character-set for more details." - name: "labels" type: KeyValueLabels - description: "Labels are attributes that can be set and used by both the user and by Cloud Deploy. Labels must meet the following constraints: * Keys and values can contain only lowercase letters, numeric characters, underscores, and dashes. * All characters must use UTF-8 encoding, and international characters are allowed. * Keys must start with a lowercase letter or international character. * Each resource is limited to a maximum of 64 labels. Both keys and values are additionally constrained to be <= 63 characters." + description: "Optional. Labels are attributes that can be set and used by both the user and by Cloud Deploy. Labels must meet the following constraints: * Keys and values can contain only lowercase letters, numeric characters, underscores, and dashes. * All characters must use UTF-8 encoding, and international characters are allowed. * Keys must start with a lowercase letter or international character. * Each resource is limited to a maximum of 64 labels. Both keys and values are additionally constrained to be <= 63 characters." - name: "etag" type: String - description: "The weak etag of the `DeployPolicy` resource. This checksum is computed by the server based on the value of other fields, and may be sent on update and delete requests to ensure the client has an up-to-date value before proceeding." + description: "Optional. The weak etag of the `DeployPolicy` resource. This checksum is computed by the server based on the value of other fields, and may be sent on update and delete requests to ensure the client has an up-to-date value before proceeding." output: true - name: "suspended" type: Boolean - description: "When suspended, the policy will not prevent actions from occurring, even if the action violates the policy." + description: "Optional. When suspended, the policy will not prevent actions from occurring, even if the action violates the policy." send_empty_value: true - name: "selectors" type: Array - description: "Selected resources to which the policy will be applied. At least one selector is required. If one selector matches the resource the policy applies. For example, if there are two selectors and the action being attempted matches one of them, the policy will apply to that action." + description: "Required. Selected resources to which the policy will be applied. At least one selector is required. If one selector matches the resource the policy applies. For example, if there are two selectors and the action being attempted matches one of them, the policy will apply to that action." required: true item_type: type: NestedObject @@ -120,7 +120,7 @@ properties: - name: "id" type: String description: |- - ID of the DeliveryPipeline. The value of this field could be one of the following: + Optional. ID of the DeliveryPipeline. The value of this field could be one of the following: - The last segment of a pipeline name - "*", all delivery pipelines in a location - name: "labels" @@ -129,22 +129,22 @@ properties: default_from_api: true - name: "rules" type: Array - description: "Rules to apply. At least one rule must be present." + description: "Required. Rules to apply. At least one rule must be present." required: true item_type: type: NestedObject properties: - name: "rolloutRestriction" type: NestedObject - description: "Rollout restrictions." + description: "Optional. Rollout restrictions." properties: - name: "id" type: String - description: "ID of the rule. This id must be unique in the `DeployPolicy` resource to which this rule belongs. The format is `a-z{0,62}`." + description: "Required. ID of the rule. This id must be unique in the `DeployPolicy` resource to which this rule belongs. The format is `a-z{0,62}`." required: true - name: "invokers" type: Array - description: "What invoked the action. If left empty, all invoker types will be restricted." + description: "Optional. What invoked the action. If left empty, all invoker types will be restricted." item_type: type: Enum enum_values: @@ -152,7 +152,7 @@ properties: - "DEPLOY_AUTOMATION" - name: "actions" type: Array - description: "Rollout actions to be restricted as part of the policy. If left empty, all actions will be restricted." + description: "Optional. Rollout actions to be restricted as part of the policy. If left empty, all actions will be restricted." item_type: type: Enum enum_values: @@ -166,21 +166,21 @@ properties: - "TERMINATE_JOBRUN" - name: "timeWindows" type: NestedObject - description: "Time window within which actions are restricted." + description: "Required. Time window within which actions are restricted." properties: - name: "timeZone" type: String - description: "The time zone in IANA format IANA Time Zone Database (e.g. America/New_York)." + description: "Required. The time zone in IANA format IANA Time Zone Database (e.g. America/New_York)." required: true - name: oneTimeWindows type: Array - description: "One-time windows within which actions are restricted." + description: "Optional. One-time windows within which actions are restricted." item_type: type: NestedObject properties: - name: "startDate" type: NestedObject - description: "Start date." + description: "Required. Start date." required: true properties: - name: "year" @@ -194,7 +194,7 @@ properties: description: "Day of a month. Must be from 1 to 31 and valid for the year and month, or 0 to specify a year by itself or a year and month where the day isn't significant." - name: "endDate" type: NestedObject - description: "End date." + description: "Required. End date." required: true properties: - name: "year" @@ -208,9 +208,7 @@ properties: description: "Day of a month. Must be from 1 to 31 and valid for the year and month." - name: "startTime" type: NestedObject - send_empty_value: true - allow_empty_object: true - description: "Start time (inclusive). Use 00:00 for the beginning of the day." + description: "Required. Start time (inclusive). Use 00:00 for the beginning of the day." required: true properties: - name: "hours" @@ -227,7 +225,7 @@ properties: description: "Fractions of seconds, in nanoseconds. Must be greater than or equal to 0 and less than or equal to 999,999,999." - name: "endTime" type: NestedObject - description: "End time (exclusive). You may use 24:00 for the end of the day." + description: "Required. End time (exclusive). You may use 24:00 for the end of the day." required: true properties: - name: "hours" @@ -244,13 +242,13 @@ properties: description: "Fractions of seconds, in nanoseconds. Must be greater than or equal to 0 and less than or equal to 999,999,999." - name: weeklyWindows type: Array - description: "Recurring weekly windows within which actions are restricted." + description: "Optional. Recurring weekly windows within which actions are restricted." item_type: type: NestedObject properties: - name: "daysOfWeek" type: Array - description: "Days of week. If left empty, all days of the week will be included." + description: "Optional. Days of week. If left empty, all days of the week will be included." item_type: type: Enum enum_values: @@ -263,9 +261,7 @@ properties: - "SUNDAY" - name: "startTime" type: NestedObject - description: "Start time (inclusive). Use 00:00 for the beginning of the day. If you specify startTime you must also specify endTime. If left empty, this will block for the entire day for the days specified in daysOfWeek." - send_empty_value: true - allow_empty_object: true + description: "Optional. Start time (inclusive). Use 00:00 for the beginning of the day. If you specify startTime you must also specify endTime. If left empty, this will block for the entire day for the days specified in daysOfWeek." properties: - name: "hours" type: Integer @@ -281,7 +277,7 @@ properties: description: "Fractions of seconds, in nanoseconds. Must be greater than or equal to 0 and less than or equal to 999,999,999." - name: "endTime" type: NestedObject - description: "End time (exclusive). Use 24:00 to indicate midnight. If you specify endTime you must also specify startTime. If left empty, this will block for the entire day for the days specified in daysOfWeek." + description: "Optional. End time (exclusive). Use 24:00 to indicate midnight. If you specify endTime you must also specify startTime. If left empty, this will block for the entire day for the days specified in daysOfWeek." properties: - name: "hours" type: Integer diff --git a/mmv1/products/cloudfunctions2/Function.yaml b/mmv1/products/cloudfunctions2/Function.yaml index b7381c6cfda7..08a51465040d 100644 --- a/mmv1/products/cloudfunctions2/Function.yaml +++ b/mmv1/products/cloudfunctions2/Function.yaml @@ -32,7 +32,6 @@ timeouts: insert_minutes: 60 update_minutes: 60 delete_minutes: 60 -include_in_tgc_next_DO_NOT_USE: true autogen_async: true async: actions: ['create', 'delete', 'update'] @@ -265,7 +264,7 @@ examples: project: 'my-project-name' # this example file will cause IAM conflicts between tests if used to make a test exclude_test: true - - name: 'cloudfunctions2_automatic_base_image_update' + - name: 'cloudfunctions2_abiu' primary_resource_id: 'function' min_version: 'beta' vars: @@ -284,7 +283,7 @@ examples: ignore_read_extra: - 'build_config.0.source.0.storage_source.0.object' - 'build_config.0.source.0.storage_source.0.bucket' - - name: 'cloudfunctions2_on_deploy_base_image_update' + - name: 'cloudfunctions2_abiu_on_deploy' primary_resource_id: 'function' min_version: 'beta' vars: @@ -503,7 +502,7 @@ properties: type: String description: | Name of the service associated with a Function. - output: true + default_from_api: true - name: 'timeoutSeconds' type: Integer description: | @@ -678,7 +677,6 @@ properties: default_from_api: true - name: 'eventType' type: String - required: true description: 'Required. The type of event to observe.' - name: 'eventFilters' type: Array diff --git a/mmv1/products/cloudidentity/GroupMembership.yaml b/mmv1/products/cloudidentity/GroupMembership.yaml index 1790f8d8869a..9b0376a33642 100644 --- a/mmv1/products/cloudidentity/GroupMembership.yaml +++ b/mmv1/products/cloudidentity/GroupMembership.yaml @@ -37,7 +37,6 @@ timeouts: update_minutes: 20 delete_minutes: 20 custom_code: - pre_create: templates/terraform/pre_create/cloud_identity_group_membership.go.tmpl post_create: 'templates/terraform/post_create/set_computed_name.tmpl' custom_update: 'templates/terraform/custom_update/cloud_identity_group_membership.go.tmpl' post_import: 'templates/terraform/post_import/cloud_identity_group_membership.go.tmpl' @@ -63,12 +62,6 @@ examples: identity_user: 'IDENTITY_USER' # Has a handwritten test due to CloudIdentityGroup-related tests needing to run synchronously exclude_test: true -virtual_fields: - - name: 'create_ignore_already_exists' - description: | - If set to true, skip group member creation if a membership with the same name already exists. Defaults to false. - type: Boolean - default_value: false parameters: - name: 'group' type: ResourceRef diff --git a/mmv1/products/cloudquotas/QuotaAdjusterSettings.yaml b/mmv1/products/cloudquotas/QuotaAdjusterSettings.yaml index 362472b02523..481fefd92bce 100644 --- a/mmv1/products/cloudquotas/QuotaAdjusterSettings.yaml +++ b/mmv1/products/cloudquotas/QuotaAdjusterSettings.yaml @@ -67,25 +67,15 @@ properties: type: String description: | The resource container that determines if the quota adjuster is set for this project. + Expect this field to be empty currently. output: true - name: 'effectiveEnablement' type: Enum description: | Based on the effective container`s setting above, determines Whether this resource container has the quota adjuster enabled. + Expect this field to be empty currently. output: true enum_values: - 'DEFAULT' - 'ENABLED' - 'DISABLED' - - name: 'inherited' - type: Boolean - description: | - Indicates whether the setting is inherited or explicitly specified. - output: true - - name: 'inheritedFrom' - type: String - description: | - The resource container from which the setting is inherited. This refers to the nearest ancestor with enablement set (either ENABLED or DISABLED). - The value can be `organizations/{organization_id}`, `folders/{folder_id}`, or can be `default` if no ancestor exists with enablement set. - The value will be empty when `enablement` is specified on this resource container. - output: true diff --git a/mmv1/products/cloudrun/Service.yaml b/mmv1/products/cloudrun/Service.yaml index 6e9415daed92..e54ab3db759e 100644 --- a/mmv1/products/cloudrun/Service.yaml +++ b/mmv1/products/cloudrun/Service.yaml @@ -29,9 +29,6 @@ base_url: 'apis/serving.knative.dev/v1/namespaces/{{project}}/services' cai_base_url: 'projects/{{project}}/locations/{{location}}/services' import_format: - 'locations/{{location}}/namespaces/{{project}}/services/{{name}}' -datasource: - generate: true - exclude_test: true timeouts: insert_minutes: 20 update_minutes: 20 @@ -1109,6 +1106,3 @@ properties: for the Service. For example, `"run.googleapis.com/ingress" = "all"`. - `run.googleapis.com/launch-stage` sets the [launch stage](https://cloud.google.com/run/docs/troubleshooting#launch-stage-validation) when a preview feature is used. For example, `"run.googleapis.com/launch-stage": "BETA"` - - `run.googleapis.com/minScale` sets the [minimum number of container instances](https://cloud.google.com/sdk/gcloud/reference/run/deploy#--min) of the Service. - - `run.googleapis.com/scalingMode` sets the type of scaling mode for the service. The supported values for scaling mode are "manual" and "automatic". If not provided, it defaults to "automatic". - - `run.googleapis.com/manualInstanceCount` sets the total instance count for the service in manual scaling mode. This number of instances is divided among all revisions with specified traffic based on the percent of traffic they are receiving. diff --git a/mmv1/products/cloudrunv2/Job.yaml b/mmv1/products/cloudrunv2/Job.yaml index 4e4b200d4cfe..035c6a12c4b5 100644 --- a/mmv1/products/cloudrunv2/Job.yaml +++ b/mmv1/products/cloudrunv2/Job.yaml @@ -118,20 +118,6 @@ examples: cloud_run_job_name: 'cloudrun-job' ignore_read_extra: - 'deletion_protection' - - name: 'cloudrunv2_job_multicontainer' - primary_resource_id: 'default' - primary_resource_name: 'fmt.Sprintf("tf-test-cloudrun-job%s", context["random_suffix"])' - vars: - cloud_run_job_name: 'cloudrun-job' - ignore_read_extra: - - 'deletion_protection' - - name: 'cloudrunv2_job_gpu' - primary_resource_id: 'default' - primary_resource_name: 'fmt.Sprintf("tf-test-cloudrun-job%s", context["random_suffix"])' - vars: - cloud_run_job_name: 'cloudrun-job' - ignore_read_extra: - - 'deletion_protection' virtual_fields: - name: 'deletion_protection' description: | @@ -404,7 +390,7 @@ properties: - name: 'limits' type: KeyValuePairs description: |- - Only memory, CPU, and nvidia.com/gpu are supported. Use key `cpu` for CPU limit, `memory` for memory limit, `nvidia.com/gpu` for gpu limit. Note: The only supported values for CPU are '1', '2', '4', and '8'. Setting 4 CPU requires at least 2Gi of memory. The values of the map is string form of the 'quantity' k8s type: https://github.com/kubernetes/kubernetes/blob/master/staging/src/k8s.io/apimachinery/pkg/api/resource/quantity.go + Only memory and CPU are supported. Use key `cpu` for CPU limit and `memory` for memory limit. Note: The only supported values for CPU are '1', '2', '4', and '8'. Setting 4 CPU requires at least 2Gi of memory. The values of the map is string form of the 'quantity' k8s type: https://github.com/kubernetes/kubernetes/blob/master/staging/src/k8s.io/apimachinery/pkg/api/resource/quantity.go default_from_api: true - name: 'ports' type: Array @@ -444,114 +430,6 @@ properties: type: String description: |- Container's working directory. If not specified, the container runtime's default will be used, which might be configured in the container image. - - name: 'dependsOn' - type: Array - description: |- - Names of the containers that must start before this container. - item_type: - type: String - - name: 'startupProbe' - type: NestedObject - description: |- - Startup probe of application within the container. - All other probes are disabled if a startup probe is provided, until it - succeeds. Container will not be added to service endpoints if the probe fails. - default_from_api: true - properties: - - name: 'initialDelaySeconds' - type: Integer - description: |- - Number of seconds after the container has started before the probe is - initiated. - Defaults to 0 seconds. Minimum value is 0. Maximum value is 240. - default_value: 0 - - name: 'timeoutSeconds' - type: Integer - description: |- - Number of seconds after which the probe times out. - Defaults to 1 second. Minimum value is 1. Maximum value is 3600. - Must be smaller than periodSeconds. - default_value: 1 - - name: 'periodSeconds' - type: Integer - description: |- - How often (in seconds) to perform the probe. - Default to 10 seconds. Minimum value is 1. Maximum value is 240. - default_value: 10 - - name: 'failureThreshold' - type: Integer - description: |- - Minimum consecutive failures for the probe to be considered failed after - having succeeded. Defaults to 3. Minimum value is 1. - default_value: 3 - - name: 'tcpSocket' - type: NestedObject - description: |- - TcpSocket specifies an action involving a TCP port. - send_empty_value: true - allow_empty_object: true - properties: - - name: 'port' - type: Integer - description: |- - Port number to access on the container. Number must be in the range 1 to 65535. - If not specified, defaults to the same value as container.ports[0].containerPort. - default_from_api: true - - name: 'httpGet' - type: NestedObject - description: |- - HttpGet specifies the http request to perform. - send_empty_value: true - allow_empty_object: true - properties: - - name: 'path' - type: String - description: |- - Path to access on the HTTP server. If set, it should not be empty string. - default_value: "/" - - name: 'port' - type: Integer - description: |- - Port number to access on the container. Number must be in the range 1 to 65535. - If not specified, defaults to the same value as container.ports[0].containerPort. - default_from_api: true - - name: 'httpHeaders' - type: Array - description: |- - Custom headers to set in the request. HTTP allows repeated headers. - item_type: - type: NestedObject - properties: - - name: 'name' - type: String - description: |- - The header field name. - required: true - - name: 'value' - type: String - description: |- - The header field value. - send_empty_value: true - default_value: "" - - name: 'grpc' - type: NestedObject - description: |- - GRPC specifies an action involving a GRPC port. - send_empty_value: true - allow_empty_object: true - properties: - - name: 'port' - type: Integer - description: |- - Port number to access on the container. Number must be in the range 1 to 65535. - If not specified, defaults to the same value as container.ports[0].containerPort. - default_from_api: true - - name: 'service' - type: String - description: |- - The name of the service to place in the gRPC HealthCheckRequest - (see https://github.com/grpc/grpc/blob/master/doc/health-checking.md). - If this is not specified, the default behavior is defined by gRPC. - name: 'volumes' type: Array description: |- @@ -772,18 +650,6 @@ properties: Number of retries allowed per Task, before marking this Task failed. Defaults to 3. Minimum value is 0. send_empty_value: true default_value: 3 - - name: 'nodeSelector' - type: NestedObject - description: Node Selector describes the hardware requirements of the resources. - properties: - - name: 'accelerator' - type: String - description: - The GPU to attach to an instance. See https://cloud.google.com/run/docs/configuring/jobs/gpu for configuring GPU. - required: true - - name: 'gpuZonalRedundancyDisabled' - type: Boolean - description: True if GPU zonal redundancy is disabled on this execution. - name: 'observedGeneration' type: String description: | diff --git a/mmv1/products/cloudrunv2/Service.yaml b/mmv1/products/cloudrunv2/Service.yaml index 9482b22054af..5f62e9c8b3db 100644 --- a/mmv1/products/cloudrunv2/Service.yaml +++ b/mmv1/products/cloudrunv2/Service.yaml @@ -340,21 +340,6 @@ properties: type: Integer description: | Minimum number of instances for the service, to be divided among all revisions receiving traffic. - - name: 'maxInstanceCount' - type: Integer - description: | - Combined maximum number of instances for all revisions receiving traffic. - - name: 'scalingMode' - type: Enum - description: | - The [scaling mode](https://cloud.google.com/run/docs/reference/rest/v2/projects.locations.services#scalingmode) for the service. - enum_values: - - 'AUTOMATIC' - - 'MANUAL' - - name: 'manualInstanceCount' - type: Integer - description: | - Total instance count for the service in manual scaling mode. This number of instances is divided among all revisions with specified traffic based on the percent of traffic they are receiving. - name: 'defaultUriDisabled' type: Boolean description: |- @@ -1221,22 +1206,6 @@ properties: type: String description: |- Service account to be used for building the container. The format of this field is `projects/{projectId}/serviceAccounts/{serviceAccountEmail}`. - - name: 'multiRegionSettings' - type: NestedObject - description: |- - Settings for creating a Multi-Region Service. Make sure to use region = 'global' when using them. For more information, visit https://cloud.google.com/run/docs/multiple-regions#deploy - properties: - - name: 'regions' - type: Array - item_type: - type: String - description: |- - The list of regions to deploy the multi-region Service. - - name: 'multiRegionId' - type: String - description: |- - System-generated unique id for the multi-region Service. - output: true - name: 'reconciling' type: Boolean description: | diff --git a/mmv1/products/cloudrunv2/WorkerPool.yaml b/mmv1/products/cloudrunv2/WorkerPool.yaml deleted file mode 100644 index 4f6958c4aa1e..000000000000 --- a/mmv1/products/cloudrunv2/WorkerPool.yaml +++ /dev/null @@ -1,813 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'WorkerPool' -description: | - WorkerPool acts as a top-level container that manages a set of configurations and revision templates which implement a pull-based workload. WorkerPool exists to provide a singular abstraction which can be access controlled, reasoned about, and which encapsulates software lifecycle decisions such as rollout policy and team resource ownership. -references: - guides: - 'Official Documentation': 'https://cloud.google.com/run/docs/' - api: 'https://cloud.google.com/run/docs/reference/rest/v2/projects.locations.workerPools' -docs: -id_format: 'projects/{{project}}/locations/{{location}}/workerPools/{{name}}' -base_url: 'projects/{{project}}/locations/{{location}}/workerPools' -self_link: 'projects/{{project}}/locations/{{location}}/workerPools/{{name}}' -create_url: 'projects/{{project}}/locations/{{location}}/workerPools?workerPoolId={{name}}' -update_verb: 'PATCH' -import_format: - - 'projects/{{project}}/locations/{{location}}/workerPools/{{name}}' -timeouts: - insert_minutes: 20 - update_minutes: 20 - delete_minutes: 20 -autogen_async: true -async: - actions: ['create', 'delete', 'update'] - type: 'OpAsync' - operation: - base_url: '{{op_id}}' - result: - resource_inside_response: true -iam_policy: - method_name_separator: ':' - parent_resource_attribute: 'name' - base_url: 'projects/{{project}}/locations/{{location}}/workerPools/{{name}}' - example_config_body: 'templates/terraform/iam/iam_attributes.go.tmpl' - import_format: - - 'projects/{{project}}/locations/{{location}}/workerPools/{{name}}' - - '{{name}}' -custom_code: - pre_delete: 'templates/terraform/pre_delete/cloudrunv2_worker_pool_deletion_policy.go.tmpl' -taint_resource_on_failed_create: true -examples: - - name: 'cloudrunv2_worker_pool_basic' - primary_resource_id: 'default' - primary_resource_name: 'fmt.Sprintf("tf-test-cloudrun-worker-pool%s", context["random_suffix"])' - vars: - cloud_run_worker_pool_name: 'cloudrun-worker-pool' - ignore_read_extra: - - 'deletion_protection' - - name: 'cloudrunv2_worker_pool_limits' - primary_resource_id: 'default' - vars: - cloud_run_worker_pool_name: 'cloudrun-worker-pool' - ignore_read_extra: - - 'deletion_protection' - - name: 'cloudrunv2_worker_pool_sql' - primary_resource_id: 'default' - primary_resource_name: 'fmt.Sprintf("tf-test-cloudrun-wp%s", context["random_suffix"])' - vars: - cloud_run_worker_pool_name: 'cloudrun-worker-pool' - secret_id: 'secret-1' - cloud_run_sql_name: 'cloudrun-sql' - deletion_protection: 'true' - test_vars_overrides: - 'deletion_protection': 'false' - oics_vars_overrides: - 'deletion_protection': 'false' - ignore_read_extra: - - 'deletion_protection' - - name: 'cloudrunv2_worker_pool_directvpc' - primary_resource_id: 'default' - primary_resource_name: 'fmt.Sprintf("tf-test-cloudrun-wp%s", context["random_suffix"])' - vars: - cloud_run_worker_pool_name: 'cloudrun-worker-pool' - ignore_read_extra: - - 'deletion_protection' - - name: 'cloudrunv2_worker_pool_gpu' - primary_resource_id: 'default' - primary_resource_name: 'fmt.Sprintf("tf-test-cloudrun-wp%s", context["random_suffix"])' - vars: - cloud_run_worker_pool_name: 'cloudrun-worker-pool' - ignore_read_extra: - - 'deletion_protection' - - name: 'cloudrunv2_worker_pool_secret' - primary_resource_id: 'default' - primary_resource_name: 'fmt.Sprintf("tf-test-cloudrun-wp%s", context["random_suffix"])' - vars: - cloud_run_worker_pool_name: 'cloudrun-worker-pool' - secret_id: 'secret-1' - ignore_read_extra: - - 'deletion_protection' - - name: 'cloudrunv2_worker_pool_multicontainer' - primary_resource_id: 'default' - primary_resource_name: 'fmt.Sprintf("tf-test-cloudrun-worker-pool%s", context["random_suffix"])' - vars: - cloud_run_worker_pool_name: 'cloudrun-worker-pool' - ignore_read_extra: - - 'deletion_protection' - - name: 'cloudrunv2_worker_pool_mount_gcs' - primary_resource_id: 'default' - primary_resource_name: 'fmt.Sprintf("tf-test-cloudrun-worker-pool-%s", context["random_suffix"])' - vars: - cloud_run_worker_pool_name: 'cloudrun-worker-pool' - ignore_read_extra: - - 'deletion_protection' - # Currently failing - skip_vcr: true - - name: 'cloudrunv2_worker_pool_mount_nfs' - primary_resource_id: 'default' - primary_resource_name: 'fmt.Sprintf("tf-test-cloudrun-worker-pool-%s", context["random_suffix"])' - vars: - cloud_run_worker_pool_name: 'cloudrun-worker-pool' - ignore_read_extra: - - 'deletion_protection' - # Currently failing - skip_vcr: true - - name: 'cloudrunv2_worker_pool_custom_audiences' - primary_resource_id: 'default' - primary_resource_name: 'fmt.Sprintf("tf-test-cloudrun-worker-pool-%s", context["random_suffix"])' - vars: - cloud_run_worker_pool_name: 'cloudrun-worker-pool' - ignore_read_extra: - - 'deletion_protection' -virtual_fields: - - name: 'deletion_protection' - description: | - Whether Terraform will be prevented from destroying the service. Defaults to true. - When a`terraform destroy` or `terraform apply` would delete the service, - the command will fail if this field is not set to false in Terraform state. - When the field is set to true or unset in Terraform state, a `terraform apply` - or `terraform destroy` that would delete the WorkerPool will fail. - When the field is set to false, deleting the WorkerPool is allowed. - type: Boolean - default_value: true -parameters: - - name: 'location' - type: String - description: The location of the cloud run worker pool - url_param_only: true - required: true - immutable: true -properties: - - name: 'name' - type: String - description: | - Name of the WorkerPool. - url_param_only: true - required: true - immutable: true - diff_suppress_func: 'tpgresource.CompareSelfLinkOrResourceName' - custom_flatten: 'templates/terraform/custom_flatten/name_from_self_link.tmpl' - custom_expand: 'templates/terraform/custom_expand/resource_from_self_link.go.tmpl' - - name: 'description' - type: String - description: | - User-provided description of the WorkerPool. This field currently has a 512-character limit. - - name: 'uid' - type: String - description: | - Server assigned unique identifier for the trigger. The value is a UUID4 string and guaranteed to remain unchanged until the resource is deleted. - output: true - - name: 'generation' - type: String - description: | - A number that monotonically increases every time the user modifies the desired state. Please note that unlike v1, this is an int64 value. As with most Google APIs, its JSON representation will be a string instead of an integer. - output: true - - name: 'labels' - type: KeyValueLabels - description: |- - Unstructured key value map that can be used to organize and categorize objects. User-provided labels are shared with Google's billing system, so they can be used to filter, or break down billing charges by team, component, - environment, state, etc. For more information, visit https://cloud.google.com/resource-manager/docs/creating-managing-labels or https://cloud.google.com/run/docs/configuring/labels. - - Cloud Run API v2 does not support labels with `run.googleapis.com`, `cloud.googleapis.com`, `serving.knative.dev`, or `autoscaling.knative.dev` namespaces, and they will be rejected. - All system labels in v1 now have a corresponding field in v2 WorkerPool. - - name: 'annotations' - type: KeyValueAnnotations - description: |- - Unstructured key value map that may be set by external tools to store and arbitrary metadata. They are not queryable and should be preserved when modifying objects. - - Cloud Run API v2 does not support annotations with `run.googleapis.com`, `cloud.googleapis.com`, `serving.knative.dev`, or `autoscaling.knative.dev` namespaces, and they will be rejected in new resources. - All system annotations in v1 now have a corresponding field in v2 WorkerPool. - - This field follows Kubernetes annotations' namespacing, limits, and rules. - - name: 'createTime' - type: Time - description: |- - The creation time. - output: true - - name: 'updateTime' - type: Time - description: |- - The last-modified time. - output: true - - name: 'deleteTime' - type: Time - description: |- - The deletion time. - output: true - - name: 'expireTime' - type: Time - description: |- - For a deleted resource, the time after which it will be permanently deleted. - output: true - - name: 'creator' - type: String - description: |- - Email address of the authenticated creator. - output: true - - name: 'lastModifier' - type: String - description: |- - Email address of the last authenticated modifier. - output: true - - name: 'client' - type: String - description: | - Arbitrary identifier for the API client. - - name: 'clientVersion' - type: String - description: | - Arbitrary version identifier for the API client. - - name: 'launchStage' - type: Enum - description: | - The launch stage as defined by [Google Cloud Platform Launch Stages](https://cloud.google.com/products#product-launch-stages). Cloud Run supports ALPHA, BETA, and GA. - If no value is specified, GA is assumed. Set the launch stage to a preview stage on input to allow use of preview features in that stage. On read (or output), describes whether the resource uses preview features. - - For example, if ALPHA is provided as input, but only BETA and GA-level features are used, this field will be BETA on output. - default_from_api: true - enum_values: - - 'UNIMPLEMENTED' - - 'PRELAUNCH' - - 'EARLY_ACCESS' - - 'ALPHA' - - 'BETA' - - 'GA' - - 'DEPRECATED' - - name: 'binaryAuthorization' - type: NestedObject - description: | - Settings for the Binary Authorization feature. - properties: - - name: 'breakglassJustification' - type: String - description: | - If present, indicates to use Breakglass using this justification. If useDefault is False, then it must be empty. For more information on breakglass, see https://cloud.google.com/binary-authorization/docs/using-breakglass - - name: 'useDefault' - type: Boolean - description: | - If True, indicates to use the default project's binary authorization policy. If False, binary authorization will be disabled. - conflicts: - - policy - - name: 'policy' - type: String - description: | - The path to a binary authorization policy. Format: projects/{project}/platforms/cloudRun/{policy-name} - conflicts: - - use_default - - name: 'customAudiences' - type: Array - description: | - One or more custom audiences that you want this worker pool to support. Specify each custom audience as the full URL in a string. The custom audiences are encoded in the token and used to authenticate requests. - For more information, see https://cloud.google.com/run/docs/configuring/custom-audiences. - item_type: - type: String - - name: 'scaling' - type: NestedObject - description: | - Scaling settings that apply to the worker pool. - default_from_api: true - properties: - - name: 'scalingMode' - type: Enum - description: | - The scaling mode for the worker pool. It defaults to MANUAL. - enum_values: - - 'AUTOMATIC' - - 'MANUAL' - - name: 'minInstanceCount' - type: Integer - description: | - The minimum count of instances distributed among revisions based on the specified instance split percentages. - - name: 'maxInstanceCount' - type: Integer - description: | - The maximum count of instances distributed among revisions based on the specified instance split percentages. - - name: 'manualInstanceCount' - type: Integer - description: | - The total number of instances in manual scaling mode. - send_empty_value: true - - name: 'template' - type: NestedObject - description: | - The template used to create revisions for this WorkerPool. - required: true - properties: - - name: 'revision' - type: String - description: |- - The unique name for the revision. If this field is omitted, it will be automatically generated based on the WorkerPool name. - - name: 'labels' - type: KeyValuePairs - description: |- - Unstructured key value map that can be used to organize and categorize objects. User-provided labels are shared with Google's billing system, so they can be used to filter, or break down billing charges by team, component, environment, state, etc. - For more information, visit https://cloud.google.com/resource-manager/docs/creating-managing-labels or https://cloud.google.com/run/docs/configuring/labels. - - Cloud Run API v2 does not support labels with `run.googleapis.com`, `cloud.googleapis.com`, `serving.knative.dev`, or `autoscaling.knative.dev` namespaces, and they will be rejected. - All system labels in v1 now have a corresponding field in v2 WorkerPoolRevisionTemplate. - - name: 'annotations' - type: KeyValuePairs - description: |- - Unstructured key value map that may be set by external tools to store and arbitrary metadata. They are not queryable and should be preserved when modifying objects. - - Cloud Run API v2 does not support annotations with `run.googleapis.com`, `cloud.googleapis.com`, `serving.knative.dev`, or `autoscaling.knative.dev` namespaces, and they will be rejected. - All system annotations in v1 now have a corresponding field in v2 WorkerPoolRevisionTemplate. - - This field follows Kubernetes annotations' namespacing, limits, and rules. - - name: 'vpcAccess' - type: NestedObject - description: |- - VPC Access configuration to use for this Revision. For more information, visit https://cloud.google.com/run/docs/configuring/connecting-vpc. - properties: - - name: 'egress' - type: Enum - description: |- - Traffic VPC egress settings. - default_from_api: true - enum_values: - - 'ALL_TRAFFIC' - - 'PRIVATE_RANGES_ONLY' - - name: 'networkInterfaces' - type: Array - description: |- - Direct VPC egress settings. Currently only single network interface is supported. - item_type: - type: NestedObject - properties: - - name: 'network' - type: String - description: |- - The VPC network that the Cloud Run resource will be able to send traffic to. At least one of network or subnetwork must be specified. If both - network and subnetwork are specified, the given VPC subnetwork must belong to the given VPC network. If network is not specified, it will be - looked up from the subnetwork. - default_from_api: true - - name: 'subnetwork' - type: String - description: |- - The VPC subnetwork that the Cloud Run resource will get IPs from. At least one of network or subnetwork must be specified. If both - network and subnetwork are specified, the given VPC subnetwork must belong to the given VPC network. If subnetwork is not specified, the - subnetwork with the same name with the network will be used. - default_from_api: true - - name: 'tags' - type: Array - description: |- - Network tags applied to this Cloud Run WorkerPool. - item_type: - type: String - - name: 'serviceAccount' - type: String - description: |- - Email address of the IAM service account associated with the revision of the WorkerPool. The service account represents the identity of the running revision, and determines what permissions the revision has. If not provided, the revision will use the project's default service account. - default_from_api: true - - name: 'containers' - type: Array - description: |- - Holds the containers that define the unit of execution for this WorkerPool. - item_type: - type: NestedObject - properties: - - name: 'name' - type: String - description: |- - Name of the container specified as a DNS_LABEL. - - name: 'image' - type: String - description: |- - URL of the Container image in Google Container Registry or Google Artifact Registry. More info: https://kubernetes.io/docs/concepts/containers/images - required: true - - name: 'command' - type: Array - description: |- - Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be resolved, the reference in the input string will be unchanged. The $(VAR_NAME) syntax can be escaped with a double $$, ie: $$(VAR_NAME). Escaped references will never be expanded, regardless of whether the variable exists or not. More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell - item_type: - type: String - - name: 'args' - type: Array - description: |- - Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable references are not supported in Cloud Run. - item_type: - type: String - - name: 'env' - type: Array - description: |- - List of environment variables to set in the container. - is_set: true - item_type: - type: NestedObject - properties: - - name: 'name' - type: String - description: |- - Name of the environment variable. Must be a C_IDENTIFIER, and may not exceed 32768 characters. - required: true - - name: 'value' - type: String - # env is a set. - # The env.value has value "" in Terraform state, but it has value nil in Terraform plan, - # which causes the diffs for unchanged env. default_value: "" is to suppress the diffs. - default_value: "" - description: |- - Literal value of the environment variable. Defaults to "" and the maximum allowed length is 32768 characters. Variable references are not supported in Cloud Run. - # exactly_one_of: - # - template.0.containers.0.env.0.value - # - template.0.containers.0.env.0.valueSource - - name: 'valueSource' - type: NestedObject - description: |- - Source for the environment variable's value. - # exactly_one_of: - # - template.0.containers.0.env.0.value - # - template.0.containers.0.env.0.valueSource - properties: - - name: 'secretKeyRef' - type: NestedObject - description: |- - Selects a secret and a specific version from Cloud Secret Manager. - properties: - - name: 'secret' - type: String - description: |- - The name of the secret in Cloud Secret Manager. Format: {secretName} if the secret is in the same project. projects/{project}/secrets/{secretName} if the secret is in a different project. - required: true - - name: 'version' - type: String - description: |- - The Cloud Secret Manager secret version. Can be 'latest' for the latest value or an integer for a specific version. - - name: 'resources' - type: NestedObject - description: |- - Compute Resource requirements by this container. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#resources - default_from_api: true - properties: - - name: 'limits' - type: KeyValuePairs - description: |- - Only memory, CPU, and nvidia.com/gpu are supported. Use key `cpu` for CPU limit, `memory` for memory limit, `nvidia.com/gpu` for gpu limit. Note: The only supported values for CPU are '1', '2', '4', and '8'. Setting 4 CPU requires at least 2Gi of memory. The values of the map is string form of the 'quantity' k8s type: https://github.com/kubernetes/kubernetes/blob/master/staging/src/k8s.io/apimachinery/pkg/api/resource/quantity.go - default_from_api: true - - name: 'volumeMounts' - type: Array - description: |- - Volume to mount into the container's filesystem. - item_type: - type: NestedObject - properties: - - name: 'name' - type: String - description: |- - This must match the Name of a Volume. - required: true - - name: 'mountPath' - type: String - description: |- - Path within the container at which the volume should be mounted. Must not contain ':'. For Cloud SQL volumes, it can be left empty, or must otherwise be /cloudsql. All instances defined in the Volume will be available as /cloudsql/[instance]. For more information on Cloud SQL volumes, visit https://cloud.google.com/sql/docs/mysql/connect-run - required: true - - name: 'workingDir' - type: String - description: |- - Container's working directory. If not specified, the container runtime's default will be used, which might be configured in the container image. - - name: 'volumes' - type: Array - description: |- - A list of Volumes to make available to containers. - item_type: - type: NestedObject - properties: - - name: 'name' - type: String - description: |- - Volume's name. - required: true - - name: 'secret' - type: NestedObject - description: |- - Secret represents a secret that should populate this volume. More info: https://kubernetes.io/docs/concepts/storage/volumes#secret - # exactly_one_of: - # - template.0.volumes.0.secret - # - template.0.volumes.0.cloudSqlInstance - # - template.0.volumes.0.emptyDir - properties: - - name: 'secret' - type: String - description: |- - The name of the secret in Cloud Secret Manager. Format: {secret} if the secret is in the same project. projects/{project}/secrets/{secret} if the secret is in a different project. - required: true - - name: 'defaultMode' - type: Integer - description: |- - Integer representation of mode bits to use on created files by default. Must be a value between 0000 and 0777 (octal), defaulting to 0444. Directories within the path are not affected by this setting. - - name: 'items' - type: Array - description: |- - If unspecified, the volume will expose a file whose name is the secret, relative to VolumeMount.mount_path. If specified, the key will be used as the version to fetch from Cloud Secret Manager and the path will be the name of the file exposed in the volume. When items are defined, they must specify a path and a version. - item_type: - type: NestedObject - properties: - - name: 'path' - type: String - description: |- - The relative path of the secret in the container. - required: true - - name: 'version' - type: String - description: |- - The Cloud Secret Manager secret version. Can be 'latest' for the latest value or an integer for a specific version - - name: 'mode' - type: Integer - description: |- - Integer octal mode bits to use on this file, must be a value between 01 and 0777 (octal). If 0 or not set, the Volume's default mode will be used. - - name: 'cloudSqlInstance' - type: NestedObject - description: |- - For Cloud SQL volumes, contains the specific instances that should be mounted. Visit https://cloud.google.com/sql/docs/mysql/connect-run for more information on how to connect Cloud SQL and Cloud Run. - # exactly_one_of: - # - template.0.volumes.0.secret - # - template.0.volumes.0.cloudSqlInstance - # - template.0.volumes.0.emptyDir - # - template.0.volumes.0.gcs - properties: - - name: 'instances' - type: Array - description: |- - The Cloud SQL instance connection names, as can be found in https://console.cloud.google.com/sql/instances. Visit https://cloud.google.com/sql/docs/mysql/connect-run for more information on how to connect Cloud SQL and Cloud Run. Format: {project}:{location}:{instance} - is_set: true - item_type: - type: String - - name: 'emptyDir' - type: NestedObject - description: |- - Ephemeral storage used as a shared volume. - # exactly_one_of: - # - template.0.volumes.0.secret - # - template.0.volumes.0.cloudSqlInstance - # - template.0.volumes.0.emptyDir - # - template.0.volumes.0.gcs - properties: - - name: 'medium' - type: Enum - description: |- - The different types of medium supported for EmptyDir. - default_value: "MEMORY" - enum_values: - - 'MEMORY' - - name: 'sizeLimit' - type: String - description: |- - Limit on the storage usable by this EmptyDir volume. The size limit is also applicable for memory medium. The maximum usage on memory medium EmptyDir would be the minimum value between the SizeLimit specified here and the sum of memory limits of all containers in a pod. This field's values are of the 'Quantity' k8s type: https://kubernetes.io/docs/reference/kubernetes-api/common-definitions/quantity/. The default is nil which means that the limit is undefined. More info: https://kubernetes.io/docs/concepts/storage/volumes/#emptydir. - - name: 'gcs' - type: NestedObject - description: |- - Cloud Storage bucket mounted as a volume using GCSFuse. This feature is only supported in the gen2 execution environment. - # exactly_one_of: - # - template.0.volumes.0.secret - # - template.0.volumes.0.cloudSqlInstance - # - template.0.volumes.0.emptyDir - # - template.0.volumes.0.gcs - properties: - - name: 'bucket' - type: String - description: GCS Bucket name - required: true - - name: 'readOnly' - type: Boolean - description: If true, mount the GCS bucket as read-only - required: false - - name: 'mountOptions' - min_version: 'beta' - type: Array - description: | - A list of flags to pass to the gcsfuse command for configuring this volume. - Flags should be passed without leading dashes. - item_type: - type: String - - name: 'nfs' - type: NestedObject - description: Represents an NFS mount. - properties: - - name: 'server' - type: String - description: Hostname or IP address of the NFS server - required: true - - name: 'path' - type: String - description: Path that is exported by the NFS server. - required: true - - name: 'readOnly' - type: Boolean - description: If true, mount the NFS volume as read only - required: false - - name: 'encryptionKey' - type: String - description: |- - A reference to a customer managed encryption key (CMEK) to use to encrypt this container image. For more information, go to https://cloud.google.com/run/docs/securing/using-cmek - - name: 'encryptionKeyRevocationAction' - type: Enum - description: |- - The action to take if the encryption key is revoked. - enum_values: - - 'PREVENT_NEW' - - 'SHUTDOWN' - - name: 'encryptionKeyShutdownDuration' - type: String - description: |- - If encryptionKeyRevocationAction is SHUTDOWN, the duration before shutting down all instances. The minimum increment is 1 hour. - - A duration in seconds with up to nine fractional digits, ending with 's'. Example: "3.5s". - - name: 'nodeSelector' - type: NestedObject - description: Node Selector describes the hardware requirements of the resources. - properties: - - name: 'accelerator' - type: String - description: - The GPU to attach to an instance. See https://cloud.google.com/run/docs/configuring/services/gpu for configuring GPU. - required: true - - name: 'gpuZonalRedundancyDisabled' - type: Boolean - description: True if GPU zonal redundancy is disabled on this revision. - - name: 'instanceSplits' - type: Array - description: |- - Specifies how to distribute instances over a collection of Revisions belonging to the WorkerPool. If instance split is empty or not provided, defaults to 100% instances assigned to the latest Ready Revision. - default_from_api: true - item_type: - type: NestedObject - properties: - - name: 'type' - type: Enum - description: | - The allocation type for this instance split. - enum_values: - - 'INSTANCE_SPLIT_ALLOCATION_TYPE_LATEST' - - 'INSTANCE_SPLIT_ALLOCATION_TYPE_REVISION' - - name: 'revision' - type: String - description: | - Revision to which to assign this portion of instances, if split allocation is by revision. - - name: 'percent' - type: Integer - description: | - Specifies percent of the instance split to this Revision. This defaults to zero if unspecified. - default_from_api: true - - name: 'observedGeneration' - type: String - description: | - The generation of this WorkerPool currently serving traffic. See comments in reconciling for additional information on reconciliation process in Cloud Run. Please note that unlike v1, this is an int64 value. As with most Google APIs, its JSON representation will be a string instead of an integer. - output: true - - name: 'terminalCondition' - type: NestedObject - description: | - The Condition of this WorkerPool, containing its readiness status, and detailed error information in case it did not reach a serving state. See comments in reconciling for additional information on reconciliation process in Cloud Run. - output: true - properties: - - name: 'type' - type: String - description: |- - type is used to communicate the status of the reconciliation process. See also: https://github.com/knative/serving/blob/main/docs/spec/errors.md#error-conditions-and-reporting Types common to all resources include: * "Ready": True when the Resource is ready. - output: true - - name: 'state' - type: String - description: |- - State of the condition. - output: true - - name: 'message' - type: String - description: |- - Human readable message indicating details about the current status. - output: true - - name: 'lastTransitionTime' - type: Time - description: |- - Last time the condition transitioned from one status to another. - output: true - - name: 'severity' - type: String - description: |- - How to interpret failures of this condition, one of Error, Warning, Info - output: true - - name: 'reason' - type: String - description: |- - A common (workerPool-level) reason for this condition. - output: true - - name: 'revisionReason' - type: String - description: |- - A reason for the revision condition. - output: true - - name: 'executionReason' - type: String - description: |- - A reason for the execution condition. - output: true - - name: 'conditions' - type: Array - description: |- - The Conditions of all other associated sub-resources. They contain additional diagnostics information in case the WorkerPool does not reach its Serving state. See comments in reconciling for additional information on reconciliation process in Cloud Run. - output: true - item_type: - type: NestedObject - properties: - - name: 'type' - type: String - description: |- - type is used to communicate the status of the reconciliation process. See also: https://github.com/knative/serving/blob/main/docs/spec/errors.md#error-conditions-and-reporting Types common to all resources include: * "Ready": True when the Resource is ready. - output: true - - name: 'state' - type: String - description: |- - State of the condition. - output: true - - name: 'message' - type: String - description: |- - Human readable message indicating details about the current status. - output: true - - name: 'lastTransitionTime' - type: Time - description: |- - Last time the condition transitioned from one status to another. - - A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z". - output: true - - name: 'severity' - type: String - description: |- - How to interpret failures of this condition, one of Error, Warning, Info - output: true - - name: 'reason' - type: String - description: |- - A common (workerPool-level) reason for this condition. - output: true - - name: 'revisionReason' - type: String - description: |- - A reason for the revision condition. - output: true - - name: 'executionReason' - type: String - description: |- - A reason for the execution condition. - output: true - - name: 'latestReadyRevision' - type: String - description: | - Name of the latest revision that is serving traffic. See comments in reconciling for additional information on reconciliation process in Cloud Run. - output: true - - name: 'latestCreatedRevision' - type: String - description: | - Name of the last created revision. See comments in reconciling for additional information on reconciliation process in Cloud Run. - output: true - - name: 'instanceSplitStatuses' - type: Array - description: |- - Detailed status information for corresponding instance splits. See comments in reconciling for additional information on reconciliation process in Cloud Run. - output: true - item_type: - type: NestedObject - properties: - - name: 'type' - type: String - description: |- - The allocation type for this instance split. - output: true - - name: 'revision' - type: String - description: |- - Revision to which this instance split is assigned. - output: true - - name: 'percent' - type: Integer - description: |- - Specifies percent of the instance split to this Revision. - output: true - - name: 'reconciling' - type: Boolean - description: | - Returns true if the WorkerPool is currently being acted upon by the system to bring it into the desired state. - - When a new WorkerPool is created, or an existing one is updated, Cloud Run will asynchronously perform all necessary steps to bring the WorkerPool to the desired serving state. This process is called reconciliation. While reconciliation is in process, observedGeneration, latest_ready_revison, trafficStatuses, and uri will have transient values that might mismatch the intended state: Once reconciliation is over (and this field is false), there are two possible outcomes: reconciliation succeeded and the serving state matches the WorkerPool, or there was an error, and reconciliation failed. This state can be found in terminalCondition.state. - - If reconciliation succeeded, the following fields will match: traffic and trafficStatuses, observedGeneration and generation, latestReadyRevision and latestCreatedRevision. - - If reconciliation failed, trafficStatuses, observedGeneration, and latestReadyRevision will have the state of the last serving revision, or empty for newly created WorkerPools. Additional information on the failure can be found in terminalCondition and conditions. - output: true - - name: 'etag' - type: String - description: | - A system-generated fingerprint for this version of the resource. May be used to detect modification conflict during updates. - output: true diff --git a/mmv1/products/cloudtasks/Queue.yaml b/mmv1/products/cloudtasks/Queue.yaml index d1f4cff5abab..0170f4eabbe1 100644 --- a/mmv1/products/cloudtasks/Queue.yaml +++ b/mmv1/products/cloudtasks/Queue.yaml @@ -34,8 +34,6 @@ iam_policy: - '{{name}}' custom_code: constants: 'templates/terraform/constants/cloud_tasks_retry_config_custom_diff.go.tmpl' - post_create: 'templates/terraform/post_create/cloud_tasks_queue_state.go.tmpl' - post_update: 'templates/terraform/post_update/cloud_tasks_queue_state.go.tmpl' examples: - name: 'queue_basic' primary_resource_id: 'default' @@ -58,18 +56,6 @@ examples: primary_resource_id: 'http_target_oauth' vars: name: 'cloud-tasks-queue-http-target-oauth' -virtual_fields: - - name: 'desired_state' - type: Enum - description: | - The desired state of the queue. Use this to pause and resume the queue. - - * RUNNING: The queue is running. Tasks can be dispatched. - * PAUSED: The queue is paused. Tasks are not dispatched but can be added to the queue. - default_value: 'RUNNING' - enum_values: - - 'RUNNING' - - 'PAUSED' parameters: - name: 'location' type: String @@ -81,7 +67,6 @@ properties: - name: 'name' type: String description: The queue name. - required: true immutable: true custom_flatten: 'templates/terraform/custom_flatten/name_from_self_link.tmpl' custom_expand: 'templates/terraform/custom_expand/qualify_queue_name.go.tmpl' @@ -221,15 +206,6 @@ properties: This field may contain any value between 0.0 and 1.0, inclusive. 0.0 is the default and means that no operations are logged. required: true - - name: 'state' - type: Enum - description: | - The current state of the queue. - output: true - enum_values: - - 'RUNNING' - - 'PAUSED' - - 'DISABLED' - name: 'httpTarget' type: NestedObject description: Modifies HTTP target for HTTP tasks. diff --git a/mmv1/products/colab/RuntimeTemplate.yaml b/mmv1/products/colab/RuntimeTemplate.yaml index e4008c309467..5fc646f1ad47 100644 --- a/mmv1/products/colab/RuntimeTemplate.yaml +++ b/mmv1/products/colab/RuntimeTemplate.yaml @@ -32,8 +32,6 @@ async: type: 'OpAsync' operation: full_url: 'https://{{location}}-aiplatform.googleapis.com/v1/{{op_id}}' - result: - resource_inside_response: true iam_policy: parent_resource_attribute: 'runtime_template' method_name_separator: ':' @@ -42,6 +40,8 @@ iam_policy: import_format: - 'projects/{{project}}/locations/{{location}}/notebookRuntimeTemplates/{{runtime_template}}' - '{{runtime_template}}' +custom_code: + post_create: 'templates/terraform/post_create/colab_runtime_template.go.tmpl' examples: - name: 'colab_runtime_template_basic' primary_resource_id: 'runtime-template' @@ -158,24 +158,20 @@ properties: type: NestedObject default_from_api: true description: 'EUC configuration of the NotebookRuntimeTemplate.' - diff_suppress_func: 'tpgresource.EmptyOrUnsetBlockDiffSuppress' properties: - name: 'eucDisabled' type: Boolean default_from_api: true description: 'Disable end user credential access for the runtime.' - diff_suppress_func: 'tpgresource.EmptyOrFalseSuppressBoolean' - name: shieldedVmConfig type: NestedObject default_from_api: true description: 'Runtime Shielded VM spec.' - diff_suppress_func: 'tpgresource.EmptyOrUnsetBlockDiffSuppress' properties: - name: 'enableSecureBoot' type: Boolean default_from_api: true description: 'Enables secure boot for the runtime.' - diff_suppress_func: 'tpgresource.EmptyOrFalseSuppressBoolean' - name: 'networkTags' type: Array item_type: @@ -190,7 +186,6 @@ properties: description: 'The Cloud KMS encryption key (customer-managed encryption key) used to protect the runtime.' - name: softwareConfig type: NestedObject - default_from_api: true description: 'The notebook software configuration of the notebook runtime.' properties: - name: 'env' @@ -209,3 +204,20 @@ properties: If a variable cannot be resolved, the reference in the input string will be unchanged. The $(VAR_NAME) syntax can be escaped with a double $$, ie: $$(VAR_NAME). Escaped references will never be expanded, regardless of whether the variable exists or not.' + - name: 'postStartupScriptConfig' + type: NestedObject + description: 'Post startup script config.' + properties: + - name: 'postStartupScript' + type: String + description: 'Post startup script to run after runtime is started.' + - name: 'postStartupScriptUrl' + type: String + description: 'Post startup script url to download. Example: https://bucket/script.sh.' + - name: 'postStartupScriptBehavior' + type: Enum + description: 'Post startup script behavior that defines download and execution behavior.' + enum_values: + - 'RUN_ONCE' + - 'RUN_EVERY_START' + - 'DOWNLOAD_AND_RUN_EVERY_START' diff --git a/mmv1/products/colab/Schedule.yaml b/mmv1/products/colab/Schedule.yaml index 9aa46e282b57..7bdd1b89322f 100644 --- a/mmv1/products/colab/Schedule.yaml +++ b/mmv1/products/colab/Schedule.yaml @@ -62,7 +62,6 @@ examples: ignore_read_extra: - desired_state - name: 'colab_schedule_full' - min_version: "beta" primary_resource_id: 'schedule' bootstrap_iam: - member: "serviceAccount:service-{project_number}@gcp-sa-dataform.iam.gserviceaccount.com" @@ -76,7 +75,6 @@ examples: dataform_repository: 'dataform-repository' start_time: '2014-10-02T15:01:23Z' end_time: '2014-10-10T15:01:23Z' - key_name: 'my-key' test_env_vars: project_id: 'PROJECT_NAME' location: 'REGION' diff --git a/mmv1/products/composer/UserWorkloadsConfigMap.yaml b/mmv1/products/composer/UserWorkloadsConfigMap.yaml index 19d0a5564be0..4efc7c9ec7fe 100644 --- a/mmv1/products/composer/UserWorkloadsConfigMap.yaml +++ b/mmv1/products/composer/UserWorkloadsConfigMap.yaml @@ -32,7 +32,6 @@ examples: - name: 'composer_user_workloads_config_map_basic' primary_resource_id: 'config_map' vars: - service_account_name: 'test-sa' environment_name: 'test-environment' config_map_name: 'test-config-map' parameters: diff --git a/mmv1/products/compute/Address.yaml b/mmv1/products/compute/Address.yaml index f25c8cf09da9..37b751efcea7 100644 --- a/mmv1/products/compute/Address.yaml +++ b/mmv1/products/compute/Address.yaml @@ -49,7 +49,6 @@ async: result: resource_inside_response: false collection_url_key: 'items' -include_in_tgc_next_DO_NOT_USE: true custom_code: post_create: 'templates/terraform/post_create/labels.tmpl' sweeper: @@ -80,7 +79,7 @@ examples: address_name: 'my-internal-address' # It is almost identical to internal_with_gce_endpoint exclude_docs: true - # TODO: Remove this example when instance is supported + # TODO(rileykarson): Remove this example when instance is supported - name: 'instance_with_ip' primary_resource_id: 'static' vars: diff --git a/mmv1/products/compute/Autoscaler.yaml b/mmv1/products/compute/Autoscaler.yaml index 39a7aded9fa8..c96c55e04624 100644 --- a/mmv1/products/compute/Autoscaler.yaml +++ b/mmv1/products/compute/Autoscaler.yaml @@ -42,7 +42,6 @@ async: result: resource_inside_response: false collection_url_key: 'items' -include_in_tgc_next_DO_NOT_USE: true custom_code: sweeper: url_substitutions: diff --git a/mmv1/products/compute/BackendBucket.yaml b/mmv1/products/compute/BackendBucket.yaml index b8c3b6e7aeb8..4f0601c6898c 100644 --- a/mmv1/products/compute/BackendBucket.yaml +++ b/mmv1/products/compute/BackendBucket.yaml @@ -41,7 +41,6 @@ async: result: resource_inside_response: false collection_url_key: 'items' -include_in_tgc_next_DO_NOT_USE: true iam_policy: parent_resource_attribute: 'name' example_config_body: 'templates/terraform/iam/iam_attributes.go.tmpl' @@ -60,7 +59,6 @@ examples: vars: backend_bucket_name: 'image-backend-bucket' bucket_name: 'image-store-bucket' - tgc_test_ignore_in_asset: ['RESOURCE.cdnPolicy.signedUrlCacheMaxAgeSec'] - name: 'backend_bucket_full' primary_resource_id: 'image_backend_full' vars: @@ -72,7 +70,6 @@ examples: vars: backend_bucket_name: 'image-backend-bucket' bucket_name: 'image-store-bucket' - tgc_test_ignore_in_asset: ['RESOURCE.cdnPolicy.signedUrlCacheMaxAgeSec'] - name: 'backend_bucket_query_string_whitelist' primary_resource_id: 'image_backend' vars: @@ -106,16 +103,6 @@ examples: backend_bucket_name: 'image-backend-bucket' bucket_name: 'image-store-bucket' exclude_docs: true - - name: 'backend_bucket_global_ilb' - primary_resource_id: 'global-ilb-backend' - test_env_vars: - org_id: 'ORG_ID' - billing_account: 'BILLING_ACCT' - vars: - backend_bucket_name: 'global-ilb-backend-bucket' - bucket_name: 'global-ilb-bucket' - exclude_docs: true - skip_vcr: true parameters: properties: - name: 'bucketName' @@ -274,10 +261,7 @@ properties: client when the resource is created. - name: 'enableCdn' type: Boolean - description: | - If true, enable Cloud CDN for this BackendBucket. - Note: This cannot be set to true when loadBalancingScheme is set to INTERNAL_MANAGED. - include_empty_value_in_cai: true + description: 'If true, enable Cloud CDN for this BackendBucket.' - name: 'name' type: String description: | @@ -292,29 +276,3 @@ properties: immutable: true validation: regex: '^(?:[a-z](?:[-a-z0-9]{0,61}[a-z0-9])?)$' - - name: 'loadBalancingScheme' - is_missing_in_cai: true - type: Enum - description: | - The value can only be INTERNAL_MANAGED for cross-region internal layer 7 load balancer. - If loadBalancingScheme is not specified, the backend bucket can be used by classic global external load balancers, or global application external load balancers, or both. - Important: CDN cannot be enabled (enableCdn cannot be set to true) when loadBalancingScheme is set to INTERNAL_MANAGED. - enum_values: - - 'INTERNAL_MANAGED' - send_empty_value: true - - name: 'params' - type: NestedObject - ignore_read: true - immutable: true - description: | - Additional params passed with the request, but not persisted as part of resource payload - properties: - - name: 'resourceManagerTags' - type: KeyValuePairs - description: | - Resource manager tags to be bound to the backend bucket. Tag keys and values have the - same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, - and values are in the format tagValues/456. - api_name: resourceManagerTags - ignore_read: true - immutable: true diff --git a/mmv1/products/compute/BackendService.yaml b/mmv1/products/compute/BackendService.yaml index 351b20feb43d..cf5c22b576cb 100644 --- a/mmv1/products/compute/BackendService.yaml +++ b/mmv1/products/compute/BackendService.yaml @@ -43,7 +43,6 @@ async: result: resource_inside_response: false collection_url_key: 'items' -include_in_tgc_next_DO_NOT_USE: true iam_policy: allowed_iam_role: 'roles/compute.admin' parent_resource_attribute: 'name' @@ -56,7 +55,6 @@ custom_code: decoder: 'templates/terraform/decoders/backend_service.go.tmpl' post_create: 'templates/terraform/post_create/compute_backend_service_security_policy.go.tmpl' post_update: 'templates/terraform/post_create/compute_backend_service_security_policy.go.tmpl' - tgc_decoder: 'templates/tgc_next/decoders/compute_backend_service.go.tmpl' schema_version: 1 examples: - name: 'backend_service_basic' @@ -98,18 +96,12 @@ examples: vars: backend_service_name: 'backend-service' health_check_name: 'health-check' - tgc_test_ignore_in_asset: - - 'RESOURCE.enableCDN' # It has false value in CAI asset - name: 'backend_service_traffic_director_ring_hash' primary_resource_id: 'default' min_version: 'beta' vars: backend_service_name: 'backend-service' health_check_name: 'health-check' - tgc_test_ignore_extra: - - outlier_detection.enforcing_consecutive_gateway_failure # Ignore the 0 value in config - tgc_test_ignore_in_asset: - - 'RESOURCE.enableCDN' # It has false value in CAI asset - name: 'backend_service_stateful_session_affinity' primary_resource_id: 'default' min_version: 'beta' @@ -140,6 +132,7 @@ examples: network_name: 'network' - name: 'backend_service_tls_settings' primary_resource_id: 'default' + min_version: 'beta' vars: backend_service_name: 'backend-service' health_check_name: 'health-check' @@ -149,13 +142,6 @@ examples: vars: url_map_name: 'url_map' exclude_test: true - - name: 'backend_service_dynamic_forwarding' - primary_resource_id: 'default' - min_version: 'beta' - vars: - backend_service_name: 'backend-service' - tgc_test_ignore_in_asset: - - 'RESOURCE.enableCDN' # It has false value in CAI asset parameters: properties: - name: 'affinityCookieTtlSec' @@ -167,7 +153,6 @@ properties: maximum allowed value for TTL is one day. When the load balancing scheme is INTERNAL, this field is not used. - include_empty_value_in_cai: true - name: 'backend' type: Array description: | @@ -618,7 +603,6 @@ properties: - 'cdn_policy.0.cache_key_policy.0.include_named_cookies' item_type: type: String - custom_tgc_expand: 'templates/tgc_next/custom_expand/set_to_slice_or_nil.go.tmpl' - name: 'queryStringWhitelist' type: Array description: | @@ -630,7 +614,6 @@ properties: delimiters. is_set: true send_empty_value: true - custom_tgc_expand: 'templates/tgc_next/custom_expand/set_to_slice_or_nil.go.tmpl' at_least_one_of: - 'cdn_policy.0.cache_key_policy.0.include_host' - 'cdn_policy.0.cache_key_policy.0.include_protocol' @@ -689,7 +672,6 @@ properties: - 'cdn_policy.0.cache_key_policy' - 'cdn_policy.0.signed_url_cache_max_age_sec' default_value: 3600 - custom_tgc_flatten: 'templates/tgc_next/custom_flatten/compute_backend_service_signed_url_cache_max_age_sec.go.tmpl' - name: 'defaultTtl' type: Integer description: | @@ -808,7 +790,6 @@ properties: type: Boolean description: | If true, enable Cloud CDN for this BackendService. - include_empty_value_in_cai: true - name: 'healthChecks' type: Array description: | @@ -858,7 +839,6 @@ properties: ignore_read: true sensitive: true send_empty_value: true - is_missing_in_cai: true - name: 'oauth2ClientSecretSha256' type: String description: | @@ -880,6 +860,7 @@ properties: external load balancing. A backend service created for one type of load balancing cannot be used with the other. For more information, refer to [Choosing a load balancer](https://cloud.google.com/load-balancing/docs/backend-service). + immutable: true default_value: "EXTERNAL" # If you're modifying this value, it probably means Global ILB is now # an option. If that's the case, all of the documentation is based on @@ -889,38 +870,6 @@ properties: - 'INTERNAL_SELF_MANAGED' - 'INTERNAL_MANAGED' - 'EXTERNAL_MANAGED' - - name: 'externalManagedMigrationState' - type: Enum - description: | - Specifies the canary migration state. Possible values are PREPARE, TEST_BY_PERCENTAGE, and - TEST_ALL_TRAFFIC. - - To begin the migration from EXTERNAL to EXTERNAL_MANAGED, the state must be changed to - PREPARE. The state must be changed to TEST_ALL_TRAFFIC before the loadBalancingScheme can be - changed to EXTERNAL_MANAGED. Optionally, the TEST_BY_PERCENTAGE state can be used to migrate - traffic by percentage using externalManagedMigrationTestingPercentage. - - Rolling back a migration requires the states to be set in reverse order. So changing the - scheme from EXTERNAL_MANAGED to EXTERNAL requires the state to be set to TEST_ALL_TRAFFIC at - the same time. Optionally, the TEST_BY_PERCENTAGE state can be used to migrate some traffic - back to EXTERNAL or PREPARE can be used to migrate all traffic back to EXTERNAL. - enum_values: - - 'PREPARE' - - 'TEST_BY_PERCENTAGE' - - 'TEST_ALL_TRAFFIC' - - name: 'externalManagedMigrationTestingPercentage' - type: Double - description: | - Determines the fraction of requests that should be processed by the Global external - Application Load Balancer. - - The value of this field must be in the range [0, 100]. - - Session affinity options will slightly affect this routing behavior, for more details, - see: Session Affinity. - - This value can only be set if the loadBalancingScheme in the backend service is set to - EXTERNAL (when using the Classic ALB) and the migration state is TEST_BY_PERCENTAGE. - name: 'localityLbPolicy' type: Enum description: | @@ -974,7 +923,7 @@ properties: locality_lb_policy is applicable to either: - * A regional backend service with the service_protocol set to HTTP, HTTPS, HTTP2 or H2C, + * A regional backend service with the service_protocol set to HTTP, HTTPS, or HTTP2, and loadBalancingScheme set to INTERNAL_MANAGED. * A global backend service with the load_balancing_scheme set to INTERNAL_SELF_MANAGED. * A regional backend service with loadBalancingScheme set to EXTERNAL (External Network @@ -1377,10 +1326,11 @@ properties: type: Enum description: | The protocol this BackendService uses to communicate with backends. - The default is HTTP. Possible values are HTTP, HTTPS, HTTP2, H2C, TCP, SSL, UDP - or GRPC. Refer to the documentation for the load balancers or for Traffic Director - for more information. Must be set to GRPC when the backend service is referenced - by a URL map that is bound to target gRPC proxy. + The default is HTTP. **NOTE**: HTTP2 is only valid for beta HTTP/2 load balancer + types and may result in errors if used with the GA API. **NOTE**: With protocol “UNSPECIFIED”, + the backend service can be used by Layer 4 Internal Load Balancing or Network Load Balancing + with TCP/UDP/L3_DEFAULT Forwarding Rule protocol. + # TODO: make a ResourceRef to Security Policy default_from_api: true enum_values: - 'HTTP' @@ -1388,12 +1338,9 @@ properties: - 'HTTP2' - 'TCP' - 'SSL' - - 'UDP' - 'GRPC' - 'UNSPECIFIED' - - 'H2C' - name: 'securityPolicy' - # TODO: make a ResourceRef to Security Policy type: String description: | The security policy associated with this backend service. @@ -1407,7 +1354,7 @@ properties: type: NestedObject description: | The security settings that apply to this backend service. This field is applicable to either - a regional backend service with the service_protocol set to HTTP, HTTPS, HTTP2 or H2C, and + a regional backend service with the service_protocol set to HTTP, HTTPS, or HTTP2, and load_balancing_scheme set to INTERNAL_MANAGED; or a global backend service with the load_balancing_scheme set to INTERNAL_SELF_MANAGED. properties: @@ -1568,7 +1515,6 @@ properties: This field can only be specified if logging is enabled for this backend service and "logConfig.optionalMode" was set to CUSTOM. Contains a list of optional fields you want to include in the logs. For example: serverInstance, serverGkeDetails.cluster, serverGkeDetails.pod.podNamespace - For example: orca_load_report, tls.protocol item_type: type: String - name: 'serviceLbPolicy' @@ -1580,6 +1526,7 @@ properties: type: NestedObject description: | Configuration for Backend Authenticated TLS and mTLS. May only be specified when the backend protocol is SSL, HTTPS or HTTP2. + min_version: beta properties: - name: 'sni' type: String @@ -1638,69 +1585,3 @@ properties: Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive. - - name: 'networkPassThroughLbTrafficPolicy' - type: NestedObject - description: | - Configures traffic steering properties of internal passthrough Network Load Balancers. - min_version: beta - properties: - - name: 'zonalAffinity' - type: NestedObject - description: | - When configured, new connections are load balanced across healthy backend endpoints in the local zone. - properties: - - name: 'spillover' - type: Enum - description: | - This field indicates whether zonal affinity is enabled or not. - enum_values: - - 'ZONAL_AFFINITY_DISABLED' - - 'ZONAL_AFFINITY_SPILL_CROSS_ZONE' - - 'ZONAL_AFFINITY_STAY_WITHIN_ZONE' - default_value: 'ZONAL_AFFINITY_DISABLED' - min_version: beta - - name: 'spilloverRatio' - type: Double - description: | - The value of the field must be in [0, 1]. When the ratio of the count of healthy backend endpoints in a zone - to the count of backend endpoints in that same zone is equal to or above this threshold, the load balancer - distributes new connections to all healthy endpoints in the local zone only. When the ratio of the count - of healthy backend endpoints in a zone to the count of backend endpoints in that same zone is below this - threshold, the load balancer distributes all new connections to all healthy endpoints across all zones. - min_version: beta - - name: 'dynamicForwarding' - type: NestedObject - description: | - Dynamic forwarding configuration. This field is used to configure the backend service with dynamic forwarding - feature which together with Service Extension allows customized and complex routing logic. - min_version: beta - is_missing_in_cai: true - properties: - - name: 'ipPortSelection' - type: NestedObject - description: | - IP:PORT based dynamic forwarding configuration. - min_version: beta - properties: - - name: 'enabled' - type: Boolean - min_version: beta - description: | - A boolean flag enabling IP:PORT based dynamic forwarding. - immutable: true - - name: 'params' - type: NestedObject - ignore_read: true - immutable: true - description: | - Additional params passed with the request, but not persisted as part of resource payload - properties: - - name: 'resourceManagerTags' - type: KeyValuePairs - description: | - Resource manager tags to be bound to the backend service. Tag keys and values have the - same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, - and values are in the format tagValues/456. - api_name: resourceManagerTags - ignore_read: true - immutable: true diff --git a/mmv1/products/compute/CrossSiteNetwork.yaml b/mmv1/products/compute/CrossSiteNetwork.yaml index 771893952651..3251f609287b 100644 --- a/mmv1/products/compute/CrossSiteNetwork.yaml +++ b/mmv1/products/compute/CrossSiteNetwork.yaml @@ -13,7 +13,6 @@ --- name: 'CrossSiteNetwork' -# kind: 'compute/crossSiteNetwork' description: | Represents a cross-site-network resource. A CrossSiteNetwork is used to establish L2 connectivity between groups of Interconnects. min_version: beta @@ -47,18 +46,6 @@ examples: project: 'PROJECT_NAME' parameters: properties: - - name: 'name' - type: String - description: | - Name of the resource. Provided by the client when the resource is created. The name must be - 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters - long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first - character must be a lowercase letter, and all following characters must be a dash, - lowercase letter, or digit, except the last character, which cannot be a dash. - required: true - immutable: true - validation: - regex: '^[a-z]([-a-z0-9]*[a-z0-9])?$' - name: 'name' type: String description: | diff --git a/mmv1/products/compute/Disk.yaml b/mmv1/products/compute/Disk.yaml index d4e18c02ea62..29435dbb62f7 100644 --- a/mmv1/products/compute/Disk.yaml +++ b/mmv1/products/compute/Disk.yaml @@ -50,7 +50,6 @@ async: result: resource_inside_response: false collection_url_key: 'items' -include_in_tgc_next_DO_NOT_USE: true iam_policy: parent_resource_attribute: 'name' base_url: 'projects/{{project}}/zones/{{zone}}/disks/{{name}}' @@ -64,7 +63,6 @@ custom_code: update_encoder: 'templates/terraform/update_encoder/hyper_disk.go.tmpl' decoder: 'templates/terraform/decoders/disk.tmpl' pre_delete: 'templates/terraform/pre_delete/detach_disk.tmpl' - tgc_encoder: 'templates/tgc_next/encoders/compute_disk.go.tmpl' custom_diff: - 'customdiff.ForceNewIfChange("size", IsDiskShrinkage)' - 'hyperDiskIopsUpdateDiffSuppress' @@ -75,7 +73,7 @@ examples: vars: disk_name: 'test-disk' - name: 'disk_async' - primary_resource_id: 'secondary' + primary_resource_id: 'primary' primary_resource_name: 'fmt.Sprintf("tf-test-test-disk%s", context["random_suffix"])' vars: disk_name: 'async-test-disk' @@ -130,7 +128,7 @@ properties: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied encryption key that protects this resource. output: true - # TODO Change to ResourceRef once KMS is in Magic Modules + # TODO(chrisst) Change to ResourceRef once KMS is in Magic Modules - name: 'kmsKeySelfLink' type: String description: | @@ -240,7 +238,7 @@ properties: description: | Specifies a 256-bit customer-supplied encryption key, encoded in RFC 4648 base64 to either encrypt or decrypt this resource. - # TODO Change to ResourceRef once KMS is in Magic Modules + # TODO(chrisst) Change to ResourceRef once KMS is in Magic Modules - name: 'kmsKeySelfLink' type: String description: | @@ -496,8 +494,10 @@ properties: - name: 'architecture' ignore_read: true type: String - description: | - The architecture of the disk. Values include `X86_64`, `ARM64`. + The architecture of the disk. + enum_values: + - 'X86_64' + - 'ARM64' - name: 'params' type: NestedObject ignore_read: true @@ -557,12 +557,11 @@ properties: - name: 'accessMode' type: String description: | - The access mode of the disk. + The accessMode of the disk. For example: - * READ_WRITE_SINGLE: The default AccessMode, means the disk can be attached to single instance in RW mode. - * READ_WRITE_MANY: The AccessMode means the disk can be attached to multiple instances in RW mode. - * READ_ONLY_SINGLE: The AccessMode means the disk can be attached to multiple instances in RO mode. - The AccessMode is only valid for Hyperdisk disk types. + * READ_WRITE_SINGLE + * READ_WRITE_MANY + * READ_ONLY_SINGLE required: false immutable: false default_from_api: true diff --git a/mmv1/products/compute/DiskType.yaml b/mmv1/products/compute/DiskType.yaml index 964b7df6923a..a9dfe0d19ab9 100644 --- a/mmv1/products/compute/DiskType.yaml +++ b/mmv1/products/compute/DiskType.yaml @@ -14,7 +14,7 @@ --- name: 'DiskType' kind: 'compute#diskType' -# TODO: Search all documentation for references of using URL (like +# TODO(nelsonjr): Search all documentation for references of using URL (like # the description below) and replace with the proper reference to the # corresponding type. description: | @@ -22,7 +22,7 @@ description: | of disk to use, such as a pd-ssd, pd-balanced or pd-standard. To reference a disk type, use the disk type's full or partial URL. exclude: true -# TODO: Temporarily make DiskType virtual so no tests gets +# TODO(nelsonjr): Temporarily make DiskType virtual so no tests gets # triggered for create. Implement support for read only objects, and delete # the virtual tag # | readonly: true diff --git a/mmv1/products/compute/ExternalVpnGateway.yaml b/mmv1/products/compute/ExternalVpnGateway.yaml index 37b2a89a4210..373bc1d5e912 100644 --- a/mmv1/products/compute/ExternalVpnGateway.yaml +++ b/mmv1/products/compute/ExternalVpnGateway.yaml @@ -35,7 +35,6 @@ async: result: resource_inside_response: false collection_url_key: 'items' -include_in_tgc_next_DO_NOT_USE: true custom_code: examples: - name: 'external_vpn_gateway' diff --git a/mmv1/products/compute/Firewall.yaml b/mmv1/products/compute/Firewall.yaml index 30ce8906f8e9..0fe92be2c8d0 100644 --- a/mmv1/products/compute/Firewall.yaml +++ b/mmv1/products/compute/Firewall.yaml @@ -50,7 +50,6 @@ async: result: resource_inside_response: false collection_url_key: 'items' -include_in_tgc_next_DO_NOT_USE: true custom_code: extra_schema_entry: 'templates/terraform/extra_schema_entry/firewall.tmpl' constants: 'templates/terraform/constants/firewall.tmpl' @@ -73,7 +72,7 @@ examples: project: 'PROJECT_NAME' parameters: properties: - # TODO: [nice to have] Make the format here simpler to use, in + # TODO(nelsonjr): [nice to have] Make the format here simpler to use, in # the form of # 22/tcp, [12345-23456]/tcp. It requires a conversion # function to the # final JSON format expected by the API for this # proposal to work. @@ -332,20 +331,3 @@ properties: - target_service_accounts item_type: type: String - - name: 'params' - type: NestedObject - ignore_read: true - immutable: true - description: | - Additional params passed with the request, but not persisted as part of resource payload - properties: - - name: 'resourceManagerTags' - type: KeyValuePairs - description: | - Resource manager tags to be bound to the firewall. Tag keys and values have the - same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, - and values are in the format tagValues/456. The field is ignored when empty. - The field is immutable and causes resource replacement when mutated. This field is only - set at create time and modifying this field after creation will trigger recreation. - To apply tags to an existing resource, see the google_tags_tag_binding resource. - ignore_read: true diff --git a/mmv1/products/compute/FirewallPolicy.yaml b/mmv1/products/compute/FirewallPolicy.yaml index 5c4d516d6bbe..0eb7f969b700 100644 --- a/mmv1/products/compute/FirewallPolicy.yaml +++ b/mmv1/products/compute/FirewallPolicy.yaml @@ -41,8 +41,6 @@ custom_code: post_update: 'templates/terraform/constants/compute_firewall_policy_operation.go.tmpl' custom_diff: - 'tpgresource.DefaultProviderProject' -include_in_tgc_next_DO_NOT_USE: true -tgc_hcl_block_name: 'shortName' examples: - name: 'firewall_policy' primary_resource_id: 'default' diff --git a/mmv1/products/compute/FirewallPolicyRule.yaml b/mmv1/products/compute/FirewallPolicyRule.yaml index 230625d28de7..4395443f3f18 100644 --- a/mmv1/products/compute/FirewallPolicyRule.yaml +++ b/mmv1/products/compute/FirewallPolicyRule.yaml @@ -50,8 +50,6 @@ examples: address_group: 'address-group' folder: 'folder' fw_policy: 'fw-policy' - tag_key: 'tag-key' - tag_value: 'tag-value' test_env_vars: org_id: 'ORG_ID' service_acct: 'SERVICE_ACCT' @@ -61,20 +59,9 @@ examples: folder: 'folder' fw_policy: 'fw-policy' network: 'network' - tag_key: 'tag-key' - tag_value: 'tag-value' test_env_vars: org_id: 'ORG_ID' min_version: beta - - name: 'firewall_policy_rule_secure_tags' - primary_resource_id: 'primary' - vars: - folder: 'folder' - fw_policy: 'fw-policy' - tag_key: 'tag-key' - tag_value: 'tag-value' - test_env_vars: - org_id: 'ORG_ID' parameters: - name: 'firewallPolicy' type: ResourceRef @@ -234,27 +221,6 @@ properties: Names of Network Threat Intelligence lists. The IPs in these lists will be matched against traffic source. item_type: type: String - - name: 'srcSecureTags' - type: Array - send_empty_value: true - description: | - List of secure tag values, which should be matched at the source of the traffic. For INGRESS rule, if all the srcSecureTag are INEFFECTIVE, and there is no srcIpRange, this rule will be ignored. Maximum number of source tag values allowed is 256. - item_type: - type: NestedObject - properties: - - name: 'name' - type: String - description: | - Name of the secure tag, created with TagManager's TagValue API. - diff_suppress_func: 'tpgresource.CompareSelfLinkOrResourceName' - - name: 'state' - type: Enum - description: | - State of the secure tag, either EFFECTIVE or INEFFECTIVE. A secure tag is INEFFECTIVE when it is deleted or its network is deleted. - output: true - enum_values: - - 'EFFECTIVE' - - 'INEFFECTIVE' - name: 'action' type: String description: 'The Action to perform when the client connection triggers the rule. Valid actions are "allow", "deny", "goto_next" and "apply_security_profile_group".' @@ -308,32 +274,8 @@ properties: A list of service accounts indicating the sets of instances that are applied with this rule. item_type: type: String - - name: 'targetSecureTags' - type: Array - send_empty_value: true - description: | - A list of secure tags that controls which instances the firewall rule applies to. - If targetSecureTag are specified, then the firewall rule applies only to instances in the VPC network that have one of those EFFECTIVE secure tags, if all the targetSecureTag are in INEFFECTIVE state, then this rule will be ignored. - targetSecureTag may not be set at the same time as targetServiceAccounts. If neither targetServiceAccounts nor targetSecureTag are specified, the firewall rule applies to all instances on the specified network. Maximum number of target secure tags allowed is 256. - item_type: - type: NestedObject - properties: - - name: 'name' - type: String - description: | - Name of the secure tag, created with TagManager's TagValue API. - diff_suppress_func: 'tpgresource.CompareSelfLinkOrResourceName' - - name: 'state' - type: Enum - description: | - State of the secure tag, either EFFECTIVE or INEFFECTIVE. A secure tag is INEFFECTIVE when it is deleted or its network is deleted. - output: true - enum_values: - - 'EFFECTIVE' - - 'INEFFECTIVE' - name: 'disabled' type: Boolean - send_empty_value: true description: | Denotes whether the firewall policy rule is disabled. When set to true, the firewall policy rule is not enforced and traffic behaves as if it did not exist. diff --git a/mmv1/products/compute/FirewallPolicyWithRules.yaml b/mmv1/products/compute/FirewallPolicyWithRules.yaml index 693d15691d41..ec241e0ae038 100644 --- a/mmv1/products/compute/FirewallPolicyWithRules.yaml +++ b/mmv1/products/compute/FirewallPolicyWithRules.yaml @@ -17,6 +17,7 @@ api_resource_type_kind: FirewallPolicy description: | The Compute FirewallPolicy with rules resource. It declaratively manges all rules in the firewall policy. +min_version: 'beta' docs: id_format: 'locations/global/firewallPolicies/{{policy_id}}' base_url: 'locations/global/firewallPolicies?parentId={{parent}}' @@ -46,8 +47,6 @@ examples: network: 'network' security_profile: 'sp' security_profile_group: 'spg' - tag_key: 'tag-key' - tag_value: 'tag-value' test_env_vars: org_id: 'ORG_ID' parameters: @@ -56,30 +55,36 @@ parameters: description: | The parent of this FirewallPolicy in the Cloud Resource Hierarchy. Format: organizations/{organization_id} or folders/{folder_id} + min_version: 'beta' required: true immutable: true properties: - name: 'creationTimestamp' type: String description: Creation timestamp in RFC3339 text format. + min_version: 'beta' output: true - name: 'shortName' type: String description: A textual name of the security policy. + min_version: 'beta' required: true immutable: true - name: 'policyId' type: String description: The unique identifier for the resource. This identifier is defined by the server. api_name: id + min_version: 'beta' output: true - name: 'description' type: String description: An optional description of this resource. + min_version: 'beta' - name: 'rule' type: Array description: A list of firewall policy rules. api_name: rules + min_version: 'beta' required: true item_type: type: NestedObject @@ -88,23 +93,27 @@ properties: type: String description: | A description of the rule. + min_version: 'beta' - name: 'ruleName' type: String description: | An optional name for the rule. This field is not a unique identifier and can be updated. + min_version: 'beta' - name: 'priority' type: Integer description: | An integer indicating the priority of a rule in the list. The priority must be a value between 0 and 2147483647. Rules are evaluated from highest to lowest priority where 0 is the highest priority and 2147483647 is the lowest priority. + min_version: 'beta' required: true - name: 'match' type: NestedObject description: A match condition that incoming traffic is evaluated against. If it evaluates to true, the corresponding 'action' is enforced. + min_version: 'beta' required: true properties: - name: 'srcIpRanges' @@ -112,6 +121,7 @@ properties: description: | Source IP address range in CIDR format. Required for INGRESS rules. + min_version: 'beta' item_type: type: String - name: 'destIpRanges' @@ -119,6 +129,7 @@ properties: description: | Destination IP address range in CIDR format. Required for EGRESS rules. + min_version: 'beta' item_type: type: String - name: 'srcAddressGroups' @@ -126,6 +137,7 @@ properties: description: | Address groups which should be matched against the traffic source. Maximum number of source address groups is 10. + min_version: 'beta' item_type: type: String - name: 'destAddressGroups' @@ -133,6 +145,7 @@ properties: description: | Address groups which should be matched against the traffic destination. Maximum number of destination address groups is 10. + min_version: 'beta' item_type: type: String - name: 'srcFqdns' @@ -140,6 +153,7 @@ properties: description: | Fully Qualified Domain Name (FQDN) which should be matched against traffic source. Maximum number of source fqdn allowed is 100. + min_version: 'beta' item_type: type: String - name: 'destFqdns' @@ -147,6 +161,7 @@ properties: description: | Fully Qualified Domain Name (FQDN) which should be matched against traffic destination. Maximum number of destination fqdn allowed is 100. + min_version: 'beta' item_type: type: String - name: 'srcNetworkScope' @@ -183,6 +198,7 @@ properties: of traffic. Should be specified as 2 letter country code defined as per ISO 3166 alpha-2 country codes. ex."US" Maximum number of source region codes allowed is 5000. + min_version: 'beta' item_type: type: String - name: 'destRegionCodes' @@ -192,6 +208,7 @@ properties: of traffic. Should be specified as 2 letter country code defined as per ISO 3166 alpha-2 country codes. ex."US" Maximum number of destination region codes allowed is 5000. + min_version: 'beta' item_type: type: String - name: 'srcThreatIntelligences' @@ -199,6 +216,7 @@ properties: description: | Names of Network Threat Intelligence lists. The IPs in these lists will be matched against traffic source. + min_version: 'beta' item_type: type: String - name: 'destThreatIntelligences' @@ -206,40 +224,15 @@ properties: description: | Names of Network Threat Intelligence lists. The IPs in these lists will be matched against traffic destination. + min_version: 'beta' item_type: type: String - - name: 'srcSecureTag' - type: Array - description: | - List of secure tag values, which should be matched at the source - of the traffic. - For INGRESS rule, if all the srcSecureTag are INEFFECTIVE, - and there is no srcIpRange, this rule will be ignored. - Maximum number of source tag values allowed is 256. - api_name: srcSecureTags - item_type: - type: NestedObject - properties: - - name: 'name' - type: String - description: | - Name of the secure tag, created with TagManager's TagValue API. - @pattern tagValues/[0-9]+ - - name: 'state' - type: Enum - description: | - [Output Only] State of the secure tag, either `EFFECTIVE` or - `INEFFECTIVE`. A secure tag is `INEFFECTIVE` when it is deleted - or its network is deleted. - output: true - enum_values: - - 'EFFECTIVE' - - 'INEFFECTIVE' - name: 'layer4Config' type: Array description: | Pairs of IP protocols and ports that the rule should match. api_name: layer4Configs + min_version: 'beta' required: true item_type: type: NestedObject @@ -252,6 +245,7 @@ properties: This value can either be one of the following well known protocol strings (tcp, udp, icmp, esp, ah, ipip, sctp), or the IP protocol number. + min_version: 'beta' required: true - name: 'ports' type: Array @@ -262,51 +256,21 @@ properties: applies to connections through any port. Example inputs include: ["22"], ["80","443"], and ["12345-12349"]. + min_version: 'beta' item_type: type: String - - name: 'targetSecureTag' - type: Array - description: | - A list of secure tags that controls which instances the firewall rule - applies to. If targetSecureTag are specified, then the - firewall rule applies only to instances in the VPC network that have one - of those EFFECTIVE secure tags, if all the target_secure_tag are in - INEFFECTIVE state, then this rule will be ignored. - targetSecureTag may not be set at the same time as - targetServiceAccounts. - If neither targetServiceAccounts nor - targetSecureTag are specified, the firewall rule applies - to all instances on the specified network. - Maximum number of target secure tags allowed is 256. - api_name: targetSecureTags - item_type: - type: NestedObject - properties: - - name: 'name' - type: String - description: | - Name of the secure tag, created with TagManager's TagValue API. - @pattern tagValues/[0-9]+ - - name: 'state' - type: Enum - description: | - [Output Only] State of the secure tag, either `EFFECTIVE` or - `INEFFECTIVE`. A secure tag is `INEFFECTIVE` when it is deleted - or its network is deleted. - output: true - enum_values: - - 'EFFECTIVE' - - 'INEFFECTIVE' - name: 'action' type: String description: | The Action to perform when the client connection triggers the rule. Can currently be either "allow", "deny", "apply_security_profile_group" or "goto_next". + min_version: 'beta' required: true - name: 'direction' type: Enum description: | The direction in which this rule applies. If unspecified an INGRESS rule is created. + min_version: 'beta' enum_values: - 'INGRESS' - 'EGRESS' @@ -316,12 +280,14 @@ properties: Denotes whether to enable logging for a particular rule. If logging is enabled, logs will be exported to the configured export destination in Stackdriver. + min_version: 'beta' send_empty_value: true - name: 'targetServiceAccounts' type: Array description: | A list of service accounts indicating the sets of instances that are applied with this rule. + min_version: 'beta' item_type: type: String - name: 'securityProfileGroup' @@ -331,11 +297,13 @@ properties: Example: https://networksecurity.googleapis.com/v1/projects/{project}/locations/{location}/securityProfileGroups/my-security-profile-group Must be specified if action is 'apply_security_profile_group'. + min_version: 'beta' - name: 'tlsInspect' type: Boolean description: | Boolean flag indicating if the traffic should be TLS decrypted. It can be set only if action = 'apply_security_profile_group' and cannot be set for other actions. + min_version: 'beta' - name: 'targetResources' type: Array description: | @@ -343,8 +311,9 @@ properties: This field allows you to control which network's VMs get this rule. If this field is left blank, all VMs within the organization will receive the rule. + min_version: 'beta' item_type: - type: ResourceRef + type: String - name: 'disabled' type: Boolean description: | @@ -352,9 +321,11 @@ properties: the firewall policy rule is not enforced and traffic behaves as if it did not exist. If this is unspecified, the firewall policy rule will be enabled. + min_version: 'beta' - name: 'predefinedRules' type: Array description: A list of pre-define firewall policy rules. + min_version: 'beta' output: true item_type: type: NestedObject @@ -363,12 +334,14 @@ properties: type: String description: | A description of the rule. + min_version: 'beta' output: true - name: 'ruleName' type: String description: | An optional name for the rule. This field is not a unique identifier and can be updated. + min_version: 'beta' output: true - name: 'priority' type: Integer @@ -376,12 +349,14 @@ properties: An integer indicating the priority of a rule in the list. The priority must be a value between 0 and 2147483647. Rules are evaluated from highest to lowest priority where 0 is the highest priority and 2147483647 is the lowest priority. + min_version: 'beta' output: true - name: 'match' type: NestedObject description: A match condition that incoming traffic is evaluated against. If it evaluates to true, the corresponding 'action' is enforced. + min_version: 'beta' output: true properties: - name: 'srcIpRanges' @@ -389,6 +364,7 @@ properties: description: | Source IP address range in CIDR format. Required for INGRESS rules. + min_version: 'beta' output: true item_type: type: String @@ -397,6 +373,7 @@ properties: description: | Destination IP address range in CIDR format. Required for EGRESS rules. + min_version: 'beta' output: true item_type: type: String @@ -405,6 +382,7 @@ properties: description: | Address groups which should be matched against the traffic source. Maximum number of source address groups is 10. + min_version: 'beta' output: true item_type: type: String @@ -413,6 +391,7 @@ properties: description: | Address groups which should be matched against the traffic destination. Maximum number of destination address groups is 10. + min_version: 'beta' output: true item_type: type: String @@ -421,6 +400,7 @@ properties: description: | Fully Qualified Domain Name (FQDN) which should be matched against traffic source. Maximum number of source fqdn allowed is 100. + min_version: 'beta' output: true item_type: type: String @@ -429,6 +409,7 @@ properties: description: | Fully Qualified Domain Name (FQDN) which should be matched against traffic destination. Maximum number of destination fqdn allowed is 100. + min_version: 'beta' output: true item_type: type: String @@ -439,6 +420,7 @@ properties: of traffic. Should be specified as 2 letter country code defined as per ISO 3166 alpha-2 country codes. ex."US" Maximum number of source region codes allowed is 5000. + min_version: 'beta' output: true item_type: type: String @@ -449,6 +431,7 @@ properties: of traffic. Should be specified as 2 letter country code defined as per ISO 3166 alpha-2 country codes. ex."US" Maximum number of destination region codes allowed is 5000. + min_version: 'beta' output: true item_type: type: String @@ -457,6 +440,7 @@ properties: description: | Names of Network Threat Intelligence lists. The IPs in these lists will be matched against traffic source. + min_version: 'beta' output: true item_type: type: String @@ -465,6 +449,7 @@ properties: description: | Names of Network Threat Intelligence lists. The IPs in these lists will be matched against traffic destination. + min_version: 'beta' output: true item_type: type: String @@ -473,6 +458,7 @@ properties: description: | Pairs of IP protocols and ports that the rule should match. api_name: layer4Configs + min_version: 'beta' output: true item_type: type: NestedObject @@ -485,6 +471,7 @@ properties: This value can either be one of the following well known protocol strings (tcp, udp, icmp, esp, ah, ipip, sctp), or the IP protocol number. + min_version: 'beta' output: true - name: 'ports' type: Array @@ -495,83 +482,22 @@ properties: applies to connections through any port. Example inputs include: ["22"], ["80","443"], and ["12345-12349"]. + min_version: 'beta' output: true item_type: type: String - - name: 'srcSecureTag' - type: Array - description: | - List of secure tag values, which should be matched at the source - of the traffic. - For INGRESS rule, if all the srcSecureTag are INEFFECTIVE, - and there is no srcIpRange, this rule will be ignored. - Maximum number of source tag values allowed is 256. - api_name: srcSecureTags - output: true - item_type: - type: NestedObject - properties: - - name: 'name' - type: String - description: | - Name of the secure tag, created with TagManager's TagValue API. - @pattern tagValues/[0-9]+ - output: true - - name: 'state' - type: Enum - description: | - [Output Only] State of the secure tag, either `EFFECTIVE` or - `INEFFECTIVE`. A secure tag is `INEFFECTIVE` when it is deleted - or its network is deleted. - output: true - enum_values: - - 'EFFECTIVE' - - 'INEFFECTIVE' - - name: 'targetSecureTag' - type: Array - description: | - A list of secure tags that controls which instances the firewall rule - applies to. If targetSecureTag are specified, then the - firewall rule applies only to instances in the VPC network that have one - of those EFFECTIVE secure tags, if all the target_secure_tag are in - INEFFECTIVE state, then this rule will be ignored. - targetSecureTag may not be set at the same time as - targetServiceAccounts. - If neither targetServiceAccounts nor - targetSecureTag are specified, the firewall rule applies - to all instances on the specified network. - Maximum number of target secure tags allowed is 256. - api_name: targetSecureTags - output: true - item_type: - type: NestedObject - properties: - - name: 'name' - type: String - description: | - Name of the secure tag, created with TagManager's TagValue API. - @pattern tagValues/[0-9]+ - output: true - - name: 'state' - type: Enum - description: | - [Output Only] State of the secure tag, either `EFFECTIVE` or - `INEFFECTIVE`. A secure tag is `INEFFECTIVE` when it is deleted - or its network is deleted. - output: true - enum_values: - - 'EFFECTIVE' - - 'INEFFECTIVE' - name: 'action' type: String description: | The Action to perform when the client connection triggers the rule. Can currently be either "allow", "deny", "apply_security_profile_group" or "goto_next". + min_version: 'beta' output: true - name: 'direction' type: Enum description: | The direction in which this rule applies. If unspecified an INGRESS rule is created. + min_version: 'beta' output: true enum_values: - 'INGRESS' @@ -582,12 +508,14 @@ properties: Denotes whether to enable logging for a particular rule. If logging is enabled, logs will be exported to the configured export destination in Stackdriver. + min_version: 'beta' output: true - name: 'targetServiceAccounts' type: Array description: | A list of service accounts indicating the sets of instances that are applied with this rule. + min_version: 'beta' output: true item_type: type: String @@ -598,12 +526,14 @@ properties: Example: https://networksecurity.googleapis.com/v1/projects/{project}/locations/{location}/securityProfileGroups/my-security-profile-group Must be specified if action is 'apply_security_profile_group'. + min_version: 'beta' output: true - name: 'tlsInspect' type: Boolean description: | Boolean flag indicating if the traffic should be TLS decrypted. It can be set only if action = 'apply_security_profile_group' and cannot be set for other actions. + min_version: 'beta' output: true - name: 'targetResources' type: Array @@ -612,9 +542,10 @@ properties: This field allows you to control which network's VMs get this rule. If this field is left blank, all VMs within the organization will receive the rule. + min_version: 'beta' output: true item_type: - type: ResourceRef + type: String - name: 'disabled' type: Boolean description: | @@ -622,20 +553,25 @@ properties: the firewall policy rule is not enforced and traffic behaves as if it did not exist. If this is unspecified, the firewall policy rule will be enabled. + min_version: 'beta' output: true - name: 'fingerprint' type: Fingerprint description: Fingerprint of the resource. This field is used internally during updates of this resource. + min_version: 'beta' output: true - name: 'selfLink' type: String description: Server-defined URL for the resource. + min_version: 'beta' output: true - name: 'selfLinkWithId' type: String description: Server-defined URL for this resource with the resource id. + min_version: 'beta' output: true - name: 'ruleTupleCount' type: Integer description: Total count of all firewall policy rule tuples. A firewall policy can not exceed a set number of tuples. + min_version: 'beta' output: true diff --git a/mmv1/products/compute/ForwardingRule.yaml b/mmv1/products/compute/ForwardingRule.yaml index ceaab26c4bf4..114d2a667bcb 100644 --- a/mmv1/products/compute/ForwardingRule.yaml +++ b/mmv1/products/compute/ForwardingRule.yaml @@ -370,10 +370,6 @@ properties: description: | Specifies the forwarding rule type. - Note that an empty string value (`""`) is also supported for some use - cases, for example PSC (private service connection) regional forwarding - rules. - For more information about forwarding rules, refer to [Forwarding rule concepts](https://cloud.google.com/load-balancing/docs/forwarding-rule-concepts). default_value: "EXTERNAL" @@ -413,7 +409,7 @@ properties: For Private Service Connect forwarding rules that forward traffic to Google APIs, a network must be provided. default_from_api: true - # TODO: When implementing new types enable converting the + # TODO(nelsonjr): When implementing new types enable converting the # manifest input from a single value to a range of form NN-NN. The API # accepts a single value, e.g. '80', but the API stores and returns # '80-80'. This causes idempotency false positive. @@ -506,6 +502,9 @@ properties: The forwarded traffic must be of a type appropriate to the target object. * For load balancers, see the "Target" column in [Port specifications](https://cloud.google.com/load-balancing/docs/forwarding-rule-concepts#ip_address_specifications). + * For Private Service Connect forwarding rules that forward traffic to Google APIs, provide the name of a supported Google API bundle: + * `vpc-sc` - [ APIs that support VPC Service Controls](https://cloud.google.com/vpc-service-controls/docs/supported-products). + * `all-apis` - [All supported Google APIs](https://cloud.google.com/vpc/docs/private-service-connect#supported-apis). For Private Service Connect forwarding rules that forward traffic to managed services, the target must be a service attachment. update_url: 'projects/{{project}}/regions/{{region}}/forwardingRules/{{name}}/setTarget' diff --git a/mmv1/products/compute/FutureReservation.yaml b/mmv1/products/compute/FutureReservation.yaml deleted file mode 100644 index c05e20f89e99..000000000000 --- a/mmv1/products/compute/FutureReservation.yaml +++ /dev/null @@ -1,608 +0,0 @@ -# Copyright 2024 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'FutureReservation' -description: | - Represents a future reservation resource in Compute Engine. Future reservations allow users - to reserve capacity for a specified time window, ensuring that resources are available - when needed. - - Reservations apply only to Compute Engine, Cloud Dataproc, and Google - Kubernetes Engine VM usage.Reservations do not apply to `f1-micro` or - `g1-small` machine types, preemptible VMs, sole tenant nodes, or other - services not listed above - like Cloud SQL and Dataflow. -references: - guides: - 'Future Reservations Guide': 'https://cloud.google.com/compute/docs/instances/future-reservations-overview' - api: https://cloud.google.com/compute/docs/reference/rest/v1/futureReservations -docs: -base_url: 'projects/{{project}}/zones/{{zone}}/futureReservations' -has_self_link: true -update_url: 'projects/{{project}}/zones/{{zone}}/futureReservations/{{name}}' -update_verb: 'PATCH' -update_mask: true -timeouts: - insert_minutes: 20 - update_minutes: 20 - delete_minutes: 20 -async: - actions: ['create', 'delete', 'update'] - type: 'OpAsync' - operation: - base_url: '{{op_id}}' - result: - resource_inside_response: false -collection_url_key: 'items' -custom_code: - update_encoder: 'templates/terraform/update_encoder/future_reservation.go.tmpl' -min_version: beta -examples: - - name: 'future_reservation_basic' - primary_resource_id: 'gce_future_reservation' - vars: - future_reservation_name: 'gce-future-reservation' - test_env_vars: - project: 'PROJECT_NAME' - org_id: 'ORG_ID' - billing_account: 'BILLING_ACCT' - - name: 'future_reservation_aggregate_reservation' - primary_resource_id: 'gce_future_reservation' - vars: - future_reservation_name: 'gce-future-reservation-aggregate-reservation' - test_env_vars: - project: 'PROJECT_NAME' - org_id: 'ORG_ID' - billing_account: 'BILLING_ACCT' - - name: 'shared_future_reservation' - primary_resource_id: 'gce_future_reservation' - vars: - future_reservation_name: 'gce-shared-future-reservation' - test_env_vars: - project: 'PROJECT_NAME' - org_id: 'ORG_ID' - billing_account: 'BILLING_ACCT' - exclude_docs: true - skip_vcr: true -parameters: - - name: 'name' - type: String - description: | - Name of the resource. Provided by the client when the resource is - created. The name must be 1-63 characters long, and comply with - RFC1035. Specifically, the name must be 1-63 characters long and match - the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the - first character must be a lowercase letter, and all following - characters must be a dash, lowercase letter, or digit, except the las - character, which cannot be a dash. - required: true - immutable: true -properties: - - name: 'zone' - type: String - description: | - URL of the Zone where this future reservation resides. - output: true - - name: 'creationTimestamp' - type: Time - description: | - The creation timestamp for this future reservation in RFC3339 text format. - output: true - - name: 'selfLinkWithId' - type: String - description: | - Server-defined URL for this resource with the resource id. - output: true - - name: 'description' - type: String - description: | - An optional description of this resource. - - name: 'timeWindow' - type: NestedObject - description: | - Time window for this Future Reservation. - required: true - properties: - - name: 'startTime' - type: String - description: | - Start time of the future reservation in RFC3339 format. - required: true - - name: 'endTime' - type: String - description: | - End time of the future reservation in RFC3339 format. - - name: 'duration' - type: NestedObject - description: | - Duration of the future reservation - properties: - - name: 'seconds' - type: String - description: | - Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive. - - name: 'nanos' - type: Integer - description: | - Span of time that's a fraction of a second at nanosecond resolution. - - name: 'shareSettings' - type: NestedObject - description: | - Settings for sharing the future reservation - properties: - - name: 'shareType' - type: Enum - description: | - Type of sharing for this future reservation. - immutable: true - default_from_api: true - enum_values: - - 'LOCAL' - - 'SPECIFIC_PROJECTS' - - name: 'projects' - type: Array - description: | - list of Project names to specify consumer projects for this shared-reservation. This is only valid when shareType's value is SPECIFIC_PROJECTS. - item_type: - type: String - - name: 'projectMap' - type: Map - description: | - A map of project id and project config. This is only valid when shareType's value is SPECIFIC_PROJECTS. - key_name: 'id' - key_description: | - The project id/number which is deleting or adding to the project list. - value_type: - name: 'projectConfig' - type: NestedObject - properties: - - name: 'projectId' - type: String - description: | - The project ID, should be same as the key of this project config in the parent map. - - name: 'namePrefix' - type: String - description: | - Name prefix for the reservations to be created at the time of delivery. The name prefix must comply with RFC1035. Maximum allowed length for name prefix is 20. Automatically created reservations name format will be -date-####. - - name: 'status' - type: NestedObject - description: | - [Output only] Status of the Future Reservation - output: true - properties: - - name: 'procurementStatus' - type: Enum - description: | - Current state of this Future Reservation - enum_values: - - 'APPROVED' - - 'CANCELLED' - - 'COMMITTED' - - 'DECLINED' - - 'DRAFTING' - - 'FAILED' - - 'FAILED_PARTIALLY_FULFILLED' - - 'FULFILLED' - - 'PENDING_AMENDMENT_APPROVAL' - - 'PENDING_APPROVAL' - - 'PROCURING' - - 'PROVISIONING' - - name: 'lockTime' - type: String - description: | - Time when Future Reservation would become LOCKED, after which no modifications to Future Reservation will be allowed. Applicable only after the Future Reservation is in the APPROVED state. The lockTime is an RFC3339 string. The procurementStatus will transition to PROCURING state at this time. - - name: 'autoCreatedReservations' - type: Array - description: | - Fully qualified urls of the automatically created reservations at startTime. - item_type: - type: String - - name: 'fulfilledCount' - type: String - description: | - This count indicates the fulfilled capacity so far. This is set during "PROVISIONING" state. This count also includes capacity delivered as part of existing matching reservations. - - name: 'specificSkuProperties' - type: NestedObject - description: | - Instance properties related to the Future Reservation. - properties: - - name: 'sourceInstanceTemplateId' - type: String - description: | - ID of the instance template used to populate the Future Reservation properties. - - name: 'amendmentStatus' - type: Enum - description: | - The current status of the requested amendment. - - name: 'lastKnownGoodState' - type: NestedObject - description: | - This field represents the future reservation before an amendment was requested. If the amendment is declined, the Future Reservation will be reverted to the last known good state. The last known good state is not set when updating a future reservation whose Procurement Status is DRAFTING. - properties: - - name: 'futureReservationSpecs' - type: NestedObject - description: | - The previous instance-related properties of the Future Reservation. - properties: - - name: 'specificSkuProperties' - type: NestedObject - description: | - The previous instance related properties of the Future Reservation. - properties: - - name: 'instanceProperties' - type: NestedObject - description: | - Properties of the SKU instances being reserved. - properties: - - name: 'machineType' - type: String - description: | - Specifies type of machine (name only) which has fixed number of vCPUs and fixed amount of memory. This also includes specifying custom machine type following custom-NUMBER_OF_CPUS-AMOUNT_OF_MEMORY pattern. - - name: 'guestAccelerators' - type: Array - description: | - Specifies accelerator type and count. - item_type: - type: NestedObject - properties: - - name: 'acceleratorType' - type: String - description: | - Full or partial URL of the accelerator type resource to attach to this instance. - - name: 'acceleratorCount' - type: Integer - description: | - The number of the guest accelerator cards exposed to this instance. - - name: 'minCpuPlatform' - type: String - description: | - Minimum CPU platform for the reservation. - - name: 'localSsds' - type: Array - description: | - Specifies amount of local ssd to reserve with each instance. The type of disk is local-ssd. - item_type: - type: NestedObject - properties: - - name: 'diskSizeGb' - type: String - description: | - Specifies the size of the disk in base-2 GB. - - name: 'interface' - type: Enum - description: | - Specifies the disk interface to use for attaching this disk, which is either SCSI or NVME. The default is SCSI. - enum_values: - - 'SCSI' - - 'NVME' - - name: 'maintenanceFreezeDurationHours' - type: Integer - description: | - Specifies the number of hours after reservation creation where instances using the reservation won't be scheduled for maintenance. - - name: 'locationHint' - type: String - description: | - An opaque location hint used to place the allocation close to other resources. This field is for use by internal tools that use the public API. - - name: 'maintenanceInterval' - type: Enum - description: | - Specifies the frequency of planned maintenance events. The accepted values are: PERIODIC. - enum_values: - - 'PERIODIC' - - name: 'totalCount' - type: String - description: | - Total number of instances for which capacity assurance is requested at a future time period. - - name: 'sourceInstanceTemplate' - type: String - description: | - The instance template that will be used to populate the ReservedInstanceProperties of the future reservation - - name: 'timeWindow' - type: NestedObject - description: | - [Output Only] The previous time window of the Future Reservation. - properties: - - name: 'startTime' - type: String - description: | - Start time of the Future Reservation. The startTime is an RFC3339 string. - - name: 'endTime' - type: String - description: | - End time of the Future Reservation in RFC3339 format. - - name: 'duration' - type: NestedObject - description: | - Specifies the duration of the reservation. - properties: - - name: 'seconds' - type: String - description: | - Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive. - - name: 'nanos' - type: Integer - description: | - Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive. - - name: 'shareSettings' - type: NestedObject - description: | - The previous share settings of the Future Reservation. - properties: - - name: 'shareType' - type: Enum - description: | - Type of sharing for this shared-reservation - enum_values: - - 'LOCAL' - - 'ORGANIZATION' - - 'SPECIFIC_PROJECTS' - - name: 'projects' - type: Array - description: | - A futureReservations.list of Project names to specify consumer projects for this shared-reservation. This is only valid when shareType's value is SPECIFIC_PROJECTS. - item_type: - type: String - - name: 'projectMap' - type: Map - key_name: project - key_description: | - The project ID, used as the key in this map. - description: | - A map of project id and project config. This is only valid when shareType's value is SPECIFIC_PROJECTS. - value_type: - name: 'projectConfig' - type: NestedObject - properties: - - name: 'projectId' - type: String - description: | - The project ID, should be same as the key of this project config in the parent map. - - name: 'procurementStatus' - type: Enum - description: | - The status of the last known good state for the Future Reservation - - name: 'namePrefix' - type: String - description: | - The name prefix of the Future Reservation before an amendment was requested. - - name: 'description' - type: String - description: | - The description of the FutureReservation before an amendment was requested. - - name: 'lockTime' - type: String - description: | - The lock time of the FutureReservation before an amendment was requested. - - name: 'existingMatchingUsageInfo' - type: NestedObject - description: | - Represents the matching usage for the future reservation before an amendment was requested. - properties: - - name: 'count' - type: String - description: | - Count representing minimum(FR totalCount, matching_reserved_capacity+matching_unreserved_instances). - - name: 'timeStamp' - type: String - description: | - Timestamp when the matching usage was calculated. - - name: 'planningStatus' - type: Enum - description: | - Planning state before being submitted for evaluation - enum_values: - - 'DRAFT' - - 'SUBMITTED' - default_from_api: true - - name: 'autoDeleteAutoCreatedReservations' - type: Boolean - ignore_read: true - description: | - Setting for enabling or disabling automatic deletion for auto-created reservation. If set to true, auto-created reservations will be deleted at Future Reservation's end time (default) or at user's defined timestamp if any of the [autoCreatedReservationsDeleteTime, autoCreatedReservationsDuration] values is specified. For keeping auto-created reservation indefinitely, this value should be set to false. - - name: 'specificReservationRequired' - type: Boolean - description: | - Indicates whether the auto-created reservation can be consumed by VMs with affinity for "any" reservation. If the field is set, then only VMs that target the reservation by name can consume from the delivered reservation. - - name: 'reservationName' - type: String - description: | - Name of reservations where the capacity is provisioned at the time of delivery of future reservations. If the reservation with the given name does not exist already, it is created automatically at the time of Approval with INACTIVE state till specified start-time. Either provide the reservationName or a namePrefix. - - name: 'deploymentType' - type: Enum - description: | - Type of the deployment requested as part of future reservation. - enum_values: - - 'DENSE' - - 'FLEXIBLE' - - name: 'reservationMode' - type: Enum - description: | - The reservation mode which determines reservation-termination behavior and expected pricing. - enum_values: - - 'CALENDAR' - - 'DEFAULT' - - name: 'commitmentInfo' - type: NestedObject - description: | - If not present, then FR will not deliver a new commitment or update an existing commitment. - properties: - - name: 'commitmentPlan' - type: Enum - description: | - Indicates if a Commitment needs to be created as part of FR delivery. If this field is not present, then no commitment needs to be created. - enum_values: - - 'INVALID' - - 'THIRTY_SIX_MONTH' - - 'TWELVE_MONTH' - - name: 'commitmentName' - type: String - description: | - name of the commitment where capacity is being delivered to. - - name: 'previousCommitmentTerms' - type: Enum - description: | - Only applicable if FR is delivering to the same reservation. If set, all parent commitments will be extended to match the end date of the plan for this commitment. - enum_values: - - 'EXTEND' - - name: 'schedulingType' - type: Enum - description: | - Maintenance information for this reservation - enum_values: - - 'GROUPED' - - 'INDEPENDENT' - - name: 'specificSkuProperties' - type: NestedObject - description: | - Future Reservation configuration to indicate instance properties and total count. - properties: - - name: 'instanceProperties' - type: NestedObject - description: | - Properties of the SKU instances being reserved. - properties: - - name: 'machineType' - type: String - description: | - Specifies type of machine (name only) which has fixed number of vCPUs and fixed amount of memory. This also includes specifying custom machine type following custom-NUMBER_OF_CPUS-AMOUNT_OF_MEMORY pattern. - - name: 'guestAccelerators' - type: Array - description: | - Specifies accelerator type and count. - item_type: - type: NestedObject - properties: - - name: 'acceleratorType' - type: String - description: | - Full or partial URL of the accelerator type resource to attach to this instance. - - name: 'acceleratorCount' - type: Integer - description: | - The number of the guest accelerator cards exposed to this instance. - - name: 'minCpuPlatform' - type: String - description: | - Minimum cpu platform the reservation. - - name: 'localSsds' - type: Array - description: | - Specifies amount of local ssd to reserve with each instance. The type of disk is local-ssd. - item_type: - type: NestedObject - properties: - - name: 'diskSizeGb' - type: String - description: | - Specifies the size of the disk in base-2 GB. - - name: 'interface' - type: Enum - description: | - Specifies the disk interface to use for attaching this disk, which is either SCSI or NVME. The default is SCSI. - enum_values: - - 'SCSI' - - 'NVME' - - name: 'maintenanceFreezeDurationHours' - type: Integer - description: | - Specifies the number of hours after reservation creation where instances using the reservation won't be scheduled for maintenance. - - name: 'locationHint' - type: String - description: | - An opaque location hint used to place the allocation close to other resources. This field is for use by internal tools that use the public API. - - name: 'maintenanceInterval' - type: Enum - description: | - Specifies the frequency of planned maintenance events. The accepted values are: PERIODIC - enum_values: - - 'PERIODIC' - - name: 'totalCount' - type: String - description: | - Total number of instances for which capacity assurance is requested at a future time period. - - name: 'sourceInstanceTemplate' - type: String - description: | - The instance template that will be used to populate the ReservedInstanceProperties of the future reservation - - name: 'autoCreatedReservationsDeleteTime' - type: String - ignore_read: true - description: | - Future timestamp when the FR auto-created reservations will be deleted by Compute Engine. - - name: 'autoCreatedReservationsDuration' - type: NestedObject - description: | - Specifies the duration of auto-created reservations. It represents relative time to future reservation startTime when auto-created reservations will be automatically deleted by Compute Engine. Duration time unit is represented as a count of seconds and fractions of seconds at nanosecond resolution. - properties: - - name: 'seconds' - type: String - description: | - Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive. - - name: 'nanos' - type: Integer - description: | - Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive. - - name: 'aggregateReservation' - type: NestedObject - description: | - Aggregate reservation details for the future reservation. - immutable: true - properties: - - name: 'vmFamily' - type: Enum - description: | - The VM family that all instances scheduled against this reservation must belong to. - immutable: true - enum_values: - - 'VM_FAMILY_CLOUD_TPU_DEVICE_CT3' - - 'VM_FAMILY_CLOUD_TPU_LITE_DEVICE_CT5L' - - 'VM_FAMILY_CLOUD_TPU_LITE_POD_SLICE_CT5LP' - - 'VM_FAMILY_CLOUD_TPU_LITE_POD_SLICE_CT6E' - - 'VM_FAMILY_CLOUD_TPU_POD_SLICE_CT3P' - - 'VM_FAMILY_CLOUD_TPU_POD_SLICE_CT4P' - - 'VM_FAMILY_CLOUD_TPU_POD_SLICE_CT5P' - - name: reservedResources - type: Array - description: | - futureReservations.list of reserved resources (CPUs, memory, accelerators). - required: true - immutable: true - item_type: - type: NestedObject - properties: - - name: 'accelerator' - type: NestedObject - description: | - Properties of accelerator resources in this reservation. - immutable: true - properties: - - name: 'acceleratorCount' - type: Integer - description: | - Number of accelerators of specified type. - immutable: true - - name: 'acceleratorType' - type: String - description: | - Full or partial URL to accelerator type. e.g. "projects/{PROJECT}/zones/{ZONE}/acceleratorTypes/ct4l" - immutable: true - - name: 'workloadType' - type: Enum - description: | - The workload type of the instances that will target this reservation. - immutable: true - enum_values: - - 'BATCH' - - 'SERVING' - - 'UNSPECIFIED' diff --git a/mmv1/products/compute/GlobalAddress.yaml b/mmv1/products/compute/GlobalAddress.yaml index 33ed7872f069..0f130cd27b86 100644 --- a/mmv1/products/compute/GlobalAddress.yaml +++ b/mmv1/products/compute/GlobalAddress.yaml @@ -14,7 +14,6 @@ --- name: 'GlobalAddress' api_resource_type_kind: Address -cai_resource_kind: GlobalAddress kind: 'compute#address' description: | Represents a Global Address resource. Global addresses are used for @@ -39,7 +38,6 @@ async: result: resource_inside_response: false collection_url_key: 'items' -include_in_tgc_next_DO_NOT_USE: true custom_code: pre_create: 'templates/terraform/pre_create/compute_global_address.go.tmpl' post_create: 'templates/terraform/post_create/labels.tmpl' diff --git a/mmv1/products/compute/GlobalForwardingRule.yaml b/mmv1/products/compute/GlobalForwardingRule.yaml index dcf27c10af18..c110523f0929 100644 --- a/mmv1/products/compute/GlobalForwardingRule.yaml +++ b/mmv1/products/compute/GlobalForwardingRule.yaml @@ -318,8 +318,6 @@ properties: - 'EXTERNAL_MANAGED' - 'INTERNAL_MANAGED' - 'INTERNAL_SELF_MANAGED' - update_url: 'projects/{{project}}/global/forwardingRules/{{name}}' - update_verb: 'PATCH' - name: 'metadataFilters' type: Array description: | @@ -413,7 +411,7 @@ properties: For Private Service Connect forwarding rules that forward traffic to Google APIs, a network must be provided. default_from_api: true - # TODO: When implementing new types enable converting the + # TODO(nelsonjr): When implementing new types enable converting the # manifest input from a single value to a range of form NN-NN. The API # accepts a single value, e.g. '80', but the API stores and returns # '80-80'. This causes idempotency false positive. @@ -502,40 +500,6 @@ properties: enum_values: - 'PREMIUM' - 'STANDARD' - - name: 'externalManagedBackendBucketMigrationState' - type: Enum - description: | - Specifies the canary migration state for the backend buckets attached to this forwarding rule. - Possible values are PREPARE, TEST_BY_PERCENTAGE, and TEST_ALL_TRAFFIC. - - To begin the migration from EXTERNAL to EXTERNAL_MANAGED, the state must be changed to - PREPARE. The state must be changed to TEST_ALL_TRAFFIC before the loadBalancingScheme can be - changed to EXTERNAL_MANAGED. Optionally, the TEST_BY_PERCENTAGE state can be used to migrate - traffic to backend buckets attached to this forwarding rule by percentage using - externalManagedBackendBucketMigrationTestingPercentage. - - Rolling back a migration requires the states to be set in reverse order. So changing the - scheme from EXTERNAL_MANAGED to EXTERNAL requires the state to be set to TEST_ALL_TRAFFIC at - the same time. Optionally, the TEST_BY_PERCENTAGE state can be used to migrate some traffic - back to EXTERNAL or PREPARE can be used to migrate all traffic back to EXTERNAL. - enum_values: - - 'PREPARE' - - 'TEST_BY_PERCENTAGE' - - 'TEST_ALL_TRAFFIC' - update_url: 'projects/{{project}}/global/forwardingRules/{{name}}' - update_verb: 'PATCH' - - name: 'externalManagedBackendBucketMigrationTestingPercentage' - type: Double - description: | - Determines the fraction of requests to backend buckets that should be processed by the Global - external Application Load Balancer. - - The value of this field must be in the range [0, 100]. - - This value can only be set if the loadBalancingScheme in the forwarding rule is set to - EXTERNAL (when using the Classic ALB) and the migration state is TEST_BY_PERCENTAGE. - update_url: 'projects/{{project}}/global/forwardingRules/{{name}}' - update_verb: 'PATCH' - name: 'serviceDirectoryRegistrations' type: Array description: | diff --git a/mmv1/products/compute/HealthCheck.yaml b/mmv1/products/compute/HealthCheck.yaml index 5e7092ec02c9..f99e6cb340d6 100644 --- a/mmv1/products/compute/HealthCheck.yaml +++ b/mmv1/products/compute/HealthCheck.yaml @@ -51,7 +51,6 @@ async: result: resource_inside_response: false collection_url_key: 'items' -include_in_tgc_next_DO_NOT_USE: true custom_code: constants: 'templates/terraform/constants/health_check.tmpl' encoder: 'templates/terraform/encoders/health_check_type.tmpl' @@ -109,18 +108,6 @@ examples: primary_resource_id: 'grpc-health-check' vars: health_check_name: 'grpc-health-check' - - name: 'health_check_grpc_with_tls' - primary_resource_id: 'grpc-with-tls-health-check' - min_version: 'beta' - vars: - health_check_name: 'grpc-with-tls-health-check' - tgc_skip_test: 'grpcTlsHealthCheck is not in CAI asset, but is required in this test.' - - name: 'health_check_grpc_with_tls_full' - primary_resource_id: 'grpc-with-tls-health-check' - min_version: 'beta' - vars: - health_check_name: 'grpc-with-tls-health-check' - tgc_skip_test: 'grpcTlsHealthCheck is not in CAI asset, but is required in this test.' - name: 'health_check_with_logging' primary_resource_id: 'health-check-with-logging' min_version: 'beta' @@ -205,7 +192,6 @@ properties: type: String min_size: 3 max_size: 3 - is_missing_in_cai: true - name: 'unhealthyThreshold' type: Integer description: | @@ -232,7 +218,6 @@ properties: - 'tcp_health_check' - 'ssl_health_check' - 'grpc_health_check' - - 'grpc_tls_health_check' diff_suppress_func: 'portDiffSuppress' properties: - name: 'host' @@ -358,7 +343,6 @@ properties: - 'tcp_health_check' - 'ssl_health_check' - 'grpc_health_check' - - 'grpc_tls_health_check' diff_suppress_func: 'portDiffSuppress' properties: - name: 'host' @@ -484,7 +468,6 @@ properties: - 'tcp_health_check' - 'ssl_health_check' - 'grpc_health_check' - - 'grpc_tls_health_check' diff_suppress_func: 'portDiffSuppress' properties: - name: 'request' @@ -591,7 +574,6 @@ properties: - 'tcp_health_check' - 'ssl_health_check' - 'grpc_health_check' - - 'grpc_tls_health_check' diff_suppress_func: 'portDiffSuppress' properties: - name: 'request' @@ -698,7 +680,6 @@ properties: - 'tcp_health_check' - 'ssl_health_check' - 'grpc_health_check' - - 'grpc_tls_health_check' diff_suppress_func: 'portDiffSuppress' properties: - name: 'host' @@ -824,7 +805,6 @@ properties: - 'tcp_health_check' - 'ssl_health_check' - 'grpc_health_check' - - 'grpc_tls_health_check' diff_suppress_func: 'portDiffSuppress' properties: - name: 'port' @@ -887,65 +867,6 @@ properties: - 'grpc_health_check.0.port_name' - 'grpc_health_check.0.port_specification' - 'grpc_health_check.0.grpc_service_name' - - name: 'grpcTlsHealthCheck' - min_version: beta - type: NestedObject - exactly_one_of: - - 'http_health_check' - - 'https_health_check' - - 'http2_health_check' - - 'tcp_health_check' - - 'ssl_health_check' - - 'grpc_health_check' - - 'grpc_tls_health_check' - diff_suppress_func: 'portDiffSuppress' - is_missing_in_cai: true - properties: - - name: 'port' - type: Integer - description: | - The port number for the health check request. - Must be specified if port_specification is USE_FIXED_PORT. Valid values are 1 through 65535. - at_least_one_of: - - 'grpc_tls_health_check.0.port' - - 'grpc_tls_health_check.0.port_specification' - - 'grpc_tls_health_check.0.grpc_service_name' - - name: 'portSpecification' - type: Enum - description: | - Specifies how port is selected for health checking, can be one of the - following values: - - * `USE_FIXED_PORT`: The port number in `port` is used for health checking. - - * `USE_NAMED_PORT`: Not supported for GRPC with TLS health checking. - - * `USE_SERVING_PORT`: For NetworkEndpointGroup, the port specified for each - network endpoint is used for health checking. For other backends, the - port or named port specified in the Backend Service is used for health - checking. - - If not specified, gRPC with TLS health check follows behavior specified in the `port` field. - at_least_one_of: - - 'grpc_tls_health_check.0.port' - - 'grpc_tls_health_check.0.port_specification' - - 'grpc_tls_health_check.0.grpc_service_name' - enum_values: - - 'USE_FIXED_PORT' - - 'USE_NAMED_PORT' - - 'USE_SERVING_PORT' - - name: 'grpcServiceName' - type: String - description: | - The gRPC service name for the health check. - The value of grpcServiceName has the following meanings by convention: - - Empty serviceName means the overall status of all services at the backend. - - Non-empty serviceName means the health of that gRPC service, as defined by the owner of the service. - The grpcServiceName can only be ASCII. - at_least_one_of: - - 'grpc_tls_health_check.0.port' - - 'grpc_tls_health_check.0.port_specification' - - 'grpc_tls_health_check.0.grpc_service_name' - name: 'logConfig' type: NestedObject description: | diff --git a/mmv1/products/compute/Image.yaml b/mmv1/products/compute/Image.yaml index 6571ab51ff4b..ef66b209418f 100644 --- a/mmv1/products/compute/Image.yaml +++ b/mmv1/products/compute/Image.yaml @@ -56,7 +56,6 @@ iam_policy: parent_resource_attribute: 'image' iam_conditions_request_type: 'QUERY_PARAM' example_config_body: 'templates/terraform/iam/iam_attributes.go.tmpl' -include_in_tgc_next_DO_NOT_USE: true custom_code: examples: - name: 'image_basic' @@ -106,7 +105,7 @@ properties: type: Integer description: | Size of the image when restored onto a persistent disk (in GB). - # TODO: Build family support. + # TODO(alexstephen): Build family support. # Families use a different API default_from_api: true - name: 'family' @@ -241,7 +240,7 @@ properties: This is provided by the client when the disk image is created. api_name: sha1Checksum diff_suppress_func: 'tpgresource.Base64DiffSuppress' - # TODO: Figure out cross-module ResourceRefs + # TODO(alexstephen): Figure out cross-module ResourceRefs - name: 'source' type: String description: | diff --git a/mmv1/products/compute/Instance.yaml b/mmv1/products/compute/Instance.yaml index 9e3bcca3b078..fc8fd5e82cf6 100644 --- a/mmv1/products/compute/Instance.yaml +++ b/mmv1/products/compute/Instance.yaml @@ -247,7 +247,7 @@ properties: - 'SCSI' - 'NVME' # Ignoring kind - It's a constant and we don't need it. - # TODO: Place in licenses - it's a Array of + # TODO(alexstephen): Place in licenses - it's a Array of # ResourceRefs - name: 'mode' type: Enum @@ -298,7 +298,7 @@ properties: description: | The number of the guest accelerator cards exposed to this instance. - # TODO: Change to ResourceRef once AcceleratorType is + # TODO(alexstephen): Change to ResourceRef once AcceleratorType is # created. - name: 'acceleratorType' type: String @@ -334,7 +334,7 @@ properties: Labels to apply to this instance. A list of key->value pairs. update_url: 'projects/{{project}}/zones/{{zone}}/instances/{{name}}/setLabels' update_verb: 'POST' - # TODO: Implement updating metadata *after* resource is created. + # TODO(nelsonjr): Implement updating metadata *after* resource is created. # Expose instance 'metadata' as a simple name/value pair hash. However the API # defines metadata as a NestedObject with the following layout: @@ -365,7 +365,7 @@ properties: description: 'A reference to a machine type which defines VM kind.' update_url: 'projects/{{project}}/zones/{{zone}}/instances/{{name}}/setMachineType' update_verb: 'POST' - # TODO: Add metadata + # TODO(alexstephen): Add metadata custom_expand: 'templates/terraform/custom_expand/resourceref_with_validation.go.tmpl' resource: 'MachineType' imports: 'selfLink' @@ -670,7 +670,7 @@ properties: by the setTags method. Each tag within the list must comply with RFC1035. properties: - # TODO Investigate bytes type + # TODO(alexstephen) Investigate bytes type - name: 'fingerprint' type: String description: | diff --git a/mmv1/products/compute/InstanceGroupManager.yaml b/mmv1/products/compute/InstanceGroupManager.yaml index 4720d32165c5..fe416170ccec 100644 --- a/mmv1/products/compute/InstanceGroupManager.yaml +++ b/mmv1/products/compute/InstanceGroupManager.yaml @@ -174,7 +174,7 @@ properties: The name of the managed instance group. The name must be 1-63 characters long, and comply with RFC1035. required: true - # TODO: Make namedPorts a NameValue(name[string], port[integer]) + # TODO(nelsonjr): Make namedPorts a NameValue(name[string], port[integer]) - name: 'namedPorts' type: Array description: diff --git a/mmv1/products/compute/InstanceSettings.yaml b/mmv1/products/compute/InstanceSettings.yaml index 1818f9aca839..1ce2c6746646 100644 --- a/mmv1/products/compute/InstanceSettings.yaml +++ b/mmv1/products/compute/InstanceSettings.yaml @@ -41,8 +41,6 @@ async: result: resource_inside_response: false custom_code: - pre_create: 'templates/terraform/pre_create/compute_instance_settings_fingerprint.go.tmpl' - pre_update: 'templates/terraform/pre_create/compute_instance_settings_fingerprint.go.tmpl' custom_delete: 'templates/terraform/custom_delete/clear_instance_settings.go.tmpl' test_check_destroy: 'templates/terraform/custom_check_destroy/skip_delete_during_test.go.tmpl' examples: @@ -63,6 +61,7 @@ properties: The fingerprint used for optimistic locking of this resource. Used internally during updates. output: true + custom_expand: 'templates/terraform/custom_expand/compute_instance_settings_fingerprint.tmpl' - name: 'metadata' type: NestedObject description: | diff --git a/mmv1/products/compute/Interconnect.yaml b/mmv1/products/compute/Interconnect.yaml index 0943605e9a7d..f3d1b9ae06b9 100644 --- a/mmv1/products/compute/Interconnect.yaml +++ b/mmv1/products/compute/Interconnect.yaml @@ -401,7 +401,6 @@ properties: interconnects.list of features requested for this Interconnect connection enum_values: - 'MACSEC' - - 'CROSS_SITE_NETWORK' - 'IF_MACSEC' - name: 'availableFeatures' type: Array @@ -411,115 +410,5 @@ properties: ports. If not present then the Interconnect connection is provisioned on non-MACsec capable ports and MACsec isn't supported and enabling MACsec fails). output: true - item_type: - type: Enum - enum_values: - - 'IF_CROSS_SITE_NETWORK' - - 'IF_MACSEC' - - 'MACSEC' - - name: 'wireGroups' - type: Array - min_version: beta - description: | - A list of the URLs of all CrossSiteNetwork WireGroups configured to use this Interconnect. The Interconnect cannot be deleted if this list is non-empty. - output: true item_type: type: String - - name: 'interconnectGroups' - type: Array - description: | - URLs of InterconnectGroups that include this Interconnect. - Order is arbitrary and items are unique. - output: true - is_set: true - item_type: - type: String - - name: 'aaiEnabled' - type: Boolean - description: | - Enable or disable the Application Aware Interconnect(AAI) feature on this interconnect. - min_version: beta - - name: 'applicationAwareInterconnect' - type: NestedObject - description: | - Configuration that enables Media Access Control security (MACsec) on the Cloud - Interconnect connection between Google and your on-premises router. - min_version: beta - properties: - - name: 'profileDescription' - type: String - description: | - A description for the AAI profile on this interconnect. - min_version: beta - - name: 'strictPriorityPolicy' - type: NestedObject - allow_empty_object: true - description: | - Specify configuration for StrictPriorityPolicy. - properties: [] - min_version: beta - - name: 'bandwidthPercentagePolicy' - type: NestedObject - description: | - Bandwidth Percentage policy allows you to have granular control over how your Interconnect - bandwidth is utilized among your workloads mapping to different traffic classes. - min_version: beta - properties: - - name: bandwidthPercentage - type: Array - description: | - Specify bandwidth percentages for various traffic classes for queuing - type Bandwidth Percent. - api_name: bandwidthPercentages - min_version: beta - item_type: - type: NestedObject - properties: - - name: trafficClass - type: Enum - description: | - Enum representing the various traffic classes offered by AAI. - default_value: "TC_UNSPECIFIED" - enum_values: - - 'TC_UNSPECIFIED' - - 'TC1' - - 'TC2' - - 'TC3' - - 'TC4' - - 'TC5' - - 'TC6' - min_version: beta - - name: percentage - type: Integer - description: | - Bandwidth percentage for a specific traffic class. - min_version: beta - - name: shapeAveragePercentage - type: Array - description: | - Optional field to specify a list of shape average percentages to be - applied in conjunction with StrictPriorityPolicy or BandwidthPercentagePolicy - min_version: beta - api_name: shapeAveragePercentages - item_type: - type: NestedObject - properties: - - name: trafficClass - type: Enum - description: | - Enum representing the various traffic classes offered by AAI. - default_value: "TC_UNSPECIFIED" - enum_values: - - 'TC_UNSPECIFIED' - - 'TC1' - - 'TC2' - - 'TC3' - - 'TC4' - - 'TC5' - - 'TC6' - min_version: beta - - name: percentage - type: Integer - description: | - Bandwidth percentage for a specific traffic class. - min_version: beta diff --git a/mmv1/products/compute/InterconnectAttachment.yaml b/mmv1/products/compute/InterconnectAttachment.yaml index 6de7abf63d20..5c0404efeb12 100644 --- a/mmv1/products/compute/InterconnectAttachment.yaml +++ b/mmv1/products/compute/InterconnectAttachment.yaml @@ -63,13 +63,6 @@ examples: address_name: 'test-address' router_name: 'test-router' network_name: 'test-network' - - name: 'compute_interconnect_attachment_custom_ranges' - primary_resource_id: 'custom-ranges-interconnect-attachment' - vars: - interconnect_attachment_name: 'test-custom-ranges-interconnect-attachment' - router_name: 'test-router' - network_name: 'test-network' - min_version: beta parameters: - name: 'region' type: ResourceRef @@ -115,8 +108,8 @@ properties: - name: 'mtu' type: String description: | - Maximum Transmission Unit (MTU), in bytes, of packets passing through this interconnect attachment. - Valid values are 1440, 1460, 1500, and 8896. If not specified, the value will default to 1440. + Maximum Transmission Unit (MTU), in bytes, of packets passing through + this interconnect attachment. Currently, only 1440 and 1500 are allowed. If not specified, the value will default to 1440. default_from_api: true custom_flatten: 'templates/terraform/custom_flatten/float64_to_int_to_string.go.tmpl' - name: 'bandwidth' @@ -361,36 +354,3 @@ properties: You must always provide an up-to-date fingerprint hash in order to update or change labels, otherwise the request will fail with error 412 conditionNotMet. output: true - - name: 'candidateCloudRouterIpAddress' - type: String - description: | - Single IPv4 address + prefix length to be configured on the cloud router interface for this - interconnect attachment. Example: 203.0.113.1/29 - immutable: true - min_version: beta - - name: 'candidateCustomerRouterIpAddress' - type: String - description: | - Single IPv4 address + prefix length to be configured on the customer router interface for this - interconnect attachment. Example: 203.0.113.2/29 - immutable: true - min_version: beta - - name: 'candidateCloudRouterIpv6Address' - type: String - description: | - Single IPv6 address + prefix length to be configured on the cloud router interface for this - interconnect attachment. Example: 2001:db8::1/125 - immutable: true - min_version: beta - - name: 'candidateCustomerRouterIpv6Address' - type: String - description: | - Single IPv6 address + prefix length to be configured on the customer router interface for this - interconnect attachment. Example: 2001:db8::2/125 - immutable: true - min_version: beta - - name: 'attachmentGroup' - type: String - description: | - URL of the AttachmentGroup that includes this Attachment. - output: true diff --git a/mmv1/products/compute/InterconnectAttachmentGroup.yaml b/mmv1/products/compute/InterconnectAttachmentGroup.yaml deleted file mode 100644 index a059be45471b..000000000000 --- a/mmv1/products/compute/InterconnectAttachmentGroup.yaml +++ /dev/null @@ -1,280 +0,0 @@ -# Copyright 2024 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'InterconnectAttachmentGroup' -kind: 'compute#InterconnectAttachmentGroup' -description: | - An interconnect attachment group resource allows customers to create, - analyze, and expand highly available deployments. -references: - guides: - 'Create a Dedicated Interconnect': 'https://cloud.google.com/network-connectivity/docs/interconnect/concepts/dedicated-overview' - api: 'https://cloud.google.com/compute/docs/reference/rest/v1/interconnects' -docs: -base_url: 'projects/{{project}}/global/interconnectAttachmentGroups' -self_link: 'projects/{{project}}/global/interconnectAttachmentGroups/{{name}}' -update_verb: 'PATCH' -timeouts: - insert_minutes: 20 - update_minutes: 20 - delete_minutes: 20 -async: - actions: ['create', 'delete', 'update'] - type: 'OpAsync' - operation: - base_url: '{{op_id}}' - result: - resource_inside_response: false -examples: - - name: 'interconnect_attachment_group_basic' - primary_resource_id: 'example-interconnect-attachment-group' - vars: - interconnect_attachment_group_name: 'example-interconnect-attachment-group' - deletion_protection: 'true' - test_vars_overrides: - 'deletion_protection': 'false' -parameters: -properties: - - name: 'description' - type: String - description: | - An optional description of this resource. Provide this property when you create the resource. - - name: 'creationTimestamp' - type: Time - description: | - Creation timestamp in RFC3339 text format. - output: true - - name: 'name' - type: String - description: | - Name of the resource. Provided by the client when the resource is created. The name must be - 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters - long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first - character must be a lowercase letter, and all following characters must be a dash, - lowercase letter, or digit, except the last character, which cannot be a dash. - required: true - immutable: true - validation: - regex: '^[a-z]([-a-z0-9]*[a-z0-9])?$' - - name: 'attachments' - type: Map - description: | - Attachments in the AttachmentGroup. Keys are arbitrary user-specified - strings. Users are encouraged, but not required, to use their preferred - format for resource links as keys. - Note that there are add-members and remove-members methods in gcloud. - The size of this map is limited by an "Attachments per group" quota. - key_name: 'name' - key_description: | - Resource name - value_type: - name: mapObjectName - type: NestedObject - properties: - - name: 'attachment' - type: String - - name: 'interconnectGroup' - type: String - description: | - The URL of an InterconnectGroup that groups these Attachments' - Interconnects. Customers do not need to set this unless directed by - Google Support. - - name: 'intent' - type: NestedObject - description: | - The user's intent for this group. This is the only required field besides - the name that must be specified on group creation. - properties: - - name: 'availabilitySla' - type: Enum - description: | - Which SLA the user intends this group to support. - enum_values: - - 'PRODUCTION_NON_CRITICAL' - - 'PRODUCTION_CRITICAL' - - 'NO_SLA' - - 'AVAILABILITY_SLA_UNSPECIFIED' - required: true - - name: 'logicalStructure' - type: NestedObject - description: | - An analysis of the logical layout of Attachments in this - group. Every Attachment in the group is shown once in this structure. - output: true - properties: - - name: 'regions' - type: Array - description: | - The regions Attachments in this group are in. - output: true - item_type: - type: NestedObject - properties: - - name: 'region' - type: String - description: | - The name of a region, like "us-central1". - output: true - - name: 'metros' - type: Array - description: | - The metros of Attachments in this group in this region. - output: true - item_type: - type: NestedObject - properties: - - name: 'metro' - type: String - description: | - The name of the metro, as a three-letter lowercase - string like "iad". This is the first component of the location of an - Interconnect. - output: true - - name: 'facilities' - type: Array - description: | - The facilities used for this group's Attachments' - Interconnects. - output: true - item_type: - type: NestedObject - properties: - - name: 'facility' - type: String - description: | - The name of a facility, like "iad-1234". - output: true - - name: 'zones' - type: Array - description: | - The zones that Interconnects in this facility are - present in. - output: true - item_type: - type: NestedObject - properties: - - name: 'zone' - type: String - description: | - The zones that Attachments in this group are present - in, in the given facilities. This is inherited from their - Interconnects. - output: true - - name: 'attachment' - type: Array - description: | - URLs of Attachments in the given zone, to the given - region, on Interconnects in the given facility and metro. Every - Attachment in the AG has such an entry. - output: true - item_type: - type: String - - name: 'configured' - type: NestedObject - description: | - The redundancy this group is configured to support. The way a - user queries what SLA their Attachment gets is by looking at this field of - the Attachment's AttachmentGroup. - output: true - properties: - - name: 'availabilitySla' - type: NestedObject - description: | - Which SLA this group is configured to support, and why this - group does or does not meet that SLA's requirements. - output: true - properties: - - name: 'effectiveSla' - type: Enum - description: | - Which SLA this group supports. Options are the same as the - intent. - enum_values: - - 'PRODUCTION_CRITICAL' - - 'PRODUCTION_NON_CRITICAL' - - 'NO_SLA' - - 'EFFECTIVE_SLA_UNSPECIFIED' - output: true - - name: 'intendedSlaBlockers' - type: Array - description: | - Reasons why configuration.availabilitySLA.sla differs from - intent.availabilitySLA. This list is empty if and only if those are the - same. - output: true - item_type: - type: NestedObject - properties: - - name: 'blockerType' - type: Enum - description: | - The category of an unmet SLA requirement. - enum_values: - - 'BLOCKER_TYPE_UNSPECIFIED' - - 'NO_ATTACHMENTS' - - 'INCOMPATIBLE_REGIONS' - - 'INCOMPATIBLE_METROS' - - 'NO_ATTACHMENTS_IN_METRO_AND_ZONE' - - 'MISSING_GLOBAL_ROUTING' - - 'OTHER' - output: true - - name: 'explanation' - type: String - description: | - A human-readable explanation of this requirement and - why it's not met. This is set for every type of requirement. - output: true - - name: 'documentationLink' - type: String - description: | - The url of Google Cloud public documentation explaining - this requirement. This is set for every type of requirement. - output: true - - name: 'regions' - type: Array - description: | - Regions used to explain this blocker in more - detail. These are region names formatted like "us-central1". This - will be set for some blockers (like INCOMPATIBLE_REGIONS) but does - not apply to others. - item_type: - type: String - output: true - - name: 'metros' - type: Array - description: | - Metros used to explain this blocker in more detail. - These are three-letter lowercase strings like "iad". This will be set - for some blockers (like NO_ATTACHMENTS_IN_METRO_AND_ZONE) but does - not apply to others. - item_type: - type: String - output: true - - name: 'zones' - type: Array - description: | - Zones used to explain this blocker in more detail. - Format is "zone1" and/or "zone2". This will be set for some blockers - (like MISSING_ZONE) but does not apply to others. - item_type: - type: String - output: true - - name: 'attachments' - type: Array - description: | - URLs of any particular Attachments to explain this - blocker in more detail. - item_type: - type: String - output: true diff --git a/mmv1/products/compute/InterconnectGroup.yaml b/mmv1/products/compute/InterconnectGroup.yaml deleted file mode 100644 index 233fc98a49c4..000000000000 --- a/mmv1/products/compute/InterconnectGroup.yaml +++ /dev/null @@ -1,265 +0,0 @@ -# Copyright 2024 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'InterconnectGroup' -kind: 'compute#InterconnectGroup' -description: | - An interconnect group resource allows customers to create, analyze, and - expand their redundant connections. -references: - guides: - 'Create a Dedicated Interconnect': 'https://cloud.google.com/network-connectivity/docs/interconnect/concepts/dedicated-overview' - api: 'https://cloud.google.com/compute/docs/reference/rest/v1/interconnects' -docs: -base_url: 'projects/{{project}}/global/interconnectGroups' -self_link: 'projects/{{project}}/global/interconnectGroups/{{name}}' -update_verb: 'PATCH' -timeouts: - insert_minutes: 20 - update_minutes: 20 - delete_minutes: 20 -async: - actions: ['create', 'delete', 'update'] - type: 'OpAsync' - operation: - base_url: '{{op_id}}' - result: - resource_inside_response: false -examples: - - name: 'interconnect_group_basic' - primary_resource_id: 'example-interconnect-group' - vars: - interconnect_group_name: 'example-interconnect-group' - test_vars_overrides: - 'deletion_protection': 'false' -parameters: -properties: - - name: 'description' - type: String - description: | - An optional description of this resource. Provide this property when you create the resource. - - name: 'creationTimestamp' - type: Time - description: | - Creation timestamp in RFC3339 text format. - output: true - - name: 'name' - type: String - description: | - Name of the resource. Provided by the client when the resource is created. The name must be - 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters - long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first - character must be a lowercase letter, and all following characters must be a dash, - lowercase letter, or digit, except the last character, which cannot be a dash. - required: true - immutable: true - validation: - regex: '^[a-z]([-a-z0-9]*[a-z0-9])?$' - - name: 'interconnects' - type: Map - description: | - Interconnects in the InterconnectGroup. Keys are arbitrary user-specified - strings. Users are encouraged, but not required, to use their preferred - format for resource links as keys. - Note that there are add-members and remove-members methods in gcloud. - The size of this map is limited by an "Interconnects per group" quota. - key_name: 'name' - key_description: | - Resource name - value_type: - name: mapObjectName - type: NestedObject - properties: - - name: 'interconnect' - type: String - description: | - The URL of an Interconnect in this group. All Interconnects in the group are unique. - - name: 'intent' - type: NestedObject - description: | - The user's intent for this group. This is the only required field besides - the name that must be specified on group creation. - properties: - - name: 'topologyCapability' - type: Enum - description: | - The reliability the user intends this group to be capable of, in terms - of the Interconnect product SLAs. - enum_values: - - 'PRODUCTION_NON_CRITICAL' - - 'PRODUCTION_CRITICAL' - - 'NO_SLA' - - 'AVAILABILITY_SLA_UNSPECIFIED' - required: true - - name: 'physicalStructure' - type: NestedObject - description: | - An analysis of the physical layout of Interconnects in this - group. Every Interconnect in the group is shown once in this structure. - output: true - properties: - - name: 'metros' - type: Array - description: | - The metros Interconnects in this group are in. - output: true - item_type: - type: NestedObject - properties: - - name: 'metro' - type: String - description: | - The name of the metro, as a three-letter lowercase string - like "iad". This is the first component of the location of - Interconnects underneath this. - output: true - - name: 'facilities' - type: Array - description: | - The facilities Interconnects in this metro are present - in. - output: true - item_type: - type: NestedObject - properties: - - name: 'facility' - type: String - description: | - The ID of this facility, as a numeric string like - "5467". This is the third component of the location of Interconnects - in this facility. - output: true - - name: 'zones' - type: Array - description: | - The zones that Interconnects in this facility are - present in. - output: true - item_type: - type: NestedObject - properties: - - name: 'zone' - type: String - description: | - The name of the zone, either "zone1" or "zone2". - This is the second component of the location of Interconnects in - this facility. - output: true - - name: 'interconnects' - type: Array - description: | - URLs of Interconnects in this redundancy group in the - given metro, facility, and zone. - item_type: - type: String - output: true - - name: 'configured' - type: NestedObject - description: | - The status of the group as configured. This has the same - structure as the operational field reported by the OperationalStatus - method, but does not take into account the operational status of each - resource. - output: true - properties: - - name: 'topologyCapability' - type: NestedObject - description: | - How reliable this topology is configured to be, and why - this group does or does not meet the requirements for the intended - capability. - output: true - properties: - - name: 'supportedSla' - type: Enum - description: | - Which level of reliability this group is configured to - support. - enum_values: - - 'PRODUCTION_CRITICAL' - - 'PRODUCTION_NON_CRITICAL' - - 'NO_SLA' - - 'UNSPECIFIED' - output: true - - name: 'intendedCapabilityBlockers' - type: Array - description: | - Reasons why configuration.topologyCapability.sla differs - from intent.topologyCapability. This list is empty if and only if those - are the same. - output: true - item_type: - type: NestedObject - properties: - - name: 'blockerType' - type: Enum - description: | - The category of an unmet SLA requirement. The Intended - SLA Blockers section below explains this field and how it relates to - other fields in intendedCapabilityBlockers. - enum_values: - - 'UNSPECIFIED' - - 'NO_INTERCONNECTS' - - 'INCOMPATIBLE_METROS' - - 'NO_INTERCONNECTS_IN_METRO_AND_ZONE' - - 'NOT_AVAILABLE' - - 'OTHER' - output: true - - name: 'explanation' - type: String - description: | - A human-readable explanation of this requirement and - why it's not met. This is set for every type of requirement. - output: true - - name: 'documentationLink' - type: String - description: | - The url of Google Cloud public documentation explaining - this requirement. This is set for every type of requirement. - output: true - - name: 'metros' - type: Array - description: | - Metros used to explain this blocker in more detail. - These are three-letter lowercase strings like "iad". A blocker like - INCOMPATIBLE_METROS will specify the problematic metros in this - field. - item_type: - type: String - output: true - - name: 'facilities' - type: Array - description: | - Facilities used to explain this blocker in more detail. - Like physicalStructure.metros.facilities.facility, this is a numeric - string like "5467". - item_type: - type: String - output: true - - name: 'zones' - type: Array - description: | - Zones used to explain this blocker in more detail. - Zone names are "zone1" and/or "zone2". - item_type: - type: String - output: true - - name: 'interconnects' - type: Array - description: | - Interconnects used to explain this blocker in more - detail. - item_type: - type: String - output: true diff --git a/mmv1/products/compute/Network.yaml b/mmv1/products/compute/Network.yaml index bc282210b110..34cb65993e95 100644 --- a/mmv1/products/compute/Network.yaml +++ b/mmv1/products/compute/Network.yaml @@ -36,7 +36,6 @@ async: result: resource_inside_response: false collection_url_key: 'items' -include_in_tgc_next_DO_NOT_USE: true sweeper: dependencies: # - "google_network_security_intercept_endpoint_group" (beta only) @@ -177,7 +176,6 @@ properties: enum_values: - 'LEGACY' - 'STANDARD' - is_missing_in_cai: true - name: 'bgpAlwaysCompareMed' type: Boolean description: | @@ -187,7 +185,6 @@ properties: default_from_api: true update_url: 'projects/{{project}}/global/networks/{{name}}' update_verb: 'PATCH' - is_missing_in_cai: true - name: 'bgpInterRegionCost' type: Enum description: | @@ -199,7 +196,6 @@ properties: enum_values: - 'DEFAULT' - 'ADD_COST_TO_MED' - is_missing_in_cai: true - name: 'mtu' type: Integer description: | @@ -208,11 +204,8 @@ properties: Note that packets larger than 1500 bytes (standard Ethernet) can be subject to TCP-MSS clamping or dropped with an ICMP `Fragmentation-Needed` message if the packets are routed to the Internet or other VPCs with varying MTUs. + immutable: true default_from_api: true - update_url: 'projects/{{project}}/global/networks/{{name}}' - update_verb: 'PATCH' - update_id: 'mtu' - fingerprint_name: 'fingerprint' - name: 'enableUlaInternalIpv6' type: Boolean description: | @@ -248,19 +241,3 @@ properties: following are valid URLs: * https://www.googleapis.com/compute/v1/projects/{projectId}/global/networkProfiles/{network_profile_name} * projects/{projectId}/global/networkProfiles/{network_profile_name} - diff_suppress_func: 'tpgresource.CompareSelfLinkRelativePaths' - - name: 'params' - type: NestedObject - ignore_read: true - immutable: true - description: | - Additional params passed with the request, but not persisted as part of resource payload - properties: - - name: 'resourceManagerTags' - type: KeyValuePairs - description: | - Resource manager tags to be bound to the network. Tag keys and values have the - same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, - and values are in the format tagValues/456. - api_name: resourceManagerTags - ignore_read: true diff --git a/mmv1/products/compute/NetworkAttachment.yaml b/mmv1/products/compute/NetworkAttachment.yaml index fce1f0218cd1..a4d6f97ca575 100644 --- a/mmv1/products/compute/NetworkAttachment.yaml +++ b/mmv1/products/compute/NetworkAttachment.yaml @@ -22,7 +22,7 @@ references: api: 'https://cloud.google.com/compute/docs/reference/rest/v1/networkAttachments' docs: base_url: 'projects/{{project}}/regions/{{region}}/networkAttachments' -update_verb: 'PATCH' +immutable: true timeouts: insert_minutes: 20 update_minutes: 20 @@ -62,7 +62,6 @@ parameters: description: | Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. required: true - immutable: true - name: 'region' type: ResourceRef description: | @@ -103,7 +102,6 @@ properties: type: Enum description: | The connection preference of service attachment. The value can be set to ACCEPT_AUTOMATIC. An ACCEPT_AUTOMATIC service attachment is one that always accepts the connection from consumer forwarding rules. - immutable: true required: true enum_values: - 'ACCEPT_AUTOMATIC' diff --git a/mmv1/products/compute/NetworkEndpointGroup.yaml b/mmv1/products/compute/NetworkEndpointGroup.yaml index b15b69dda2c9..9ba059d1748f 100644 --- a/mmv1/products/compute/NetworkEndpointGroup.yaml +++ b/mmv1/products/compute/NetworkEndpointGroup.yaml @@ -143,9 +143,3 @@ properties: description: | The default port used if the port number is not specified in the network endpoint. - - name: 'generated_id' - type: Integer - api_name: 'id' - output: true - description: | - The uniquely generated identifier for the resource. This identifier is defined by the server. diff --git a/mmv1/products/compute/NetworkFirewallPolicy.yaml b/mmv1/products/compute/NetworkFirewallPolicy.yaml index 7d31a4affc07..e003f212f534 100644 --- a/mmv1/products/compute/NetworkFirewallPolicy.yaml +++ b/mmv1/products/compute/NetworkFirewallPolicy.yaml @@ -57,17 +57,6 @@ properties: - name: 'description' type: String description: An optional description of this resource. Provide this property when you create the resource. - - name: 'policyType' - type: Enum - immutable: true - description: | - Policy type is used to determine which resources (networks) the policy can be associated with. - A policy can be associated with a network only if the network has the matching policyType in its network profile. - Different policy types may support some of the Firewall Rules features. - min_version: 'beta' - default_from_api: true - enum_values: - - 'VPC_POLICY' - name: 'fingerprint' type: Fingerprint description: Fingerprint of the resource. This field is used internally during updates of this resource. diff --git a/mmv1/products/compute/NetworkFirewallPolicyWithRules.yaml b/mmv1/products/compute/NetworkFirewallPolicyWithRules.yaml index cc36572932c0..5e4f5a5b2c50 100644 --- a/mmv1/products/compute/NetworkFirewallPolicyWithRules.yaml +++ b/mmv1/products/compute/NetworkFirewallPolicyWithRules.yaml @@ -15,6 +15,7 @@ name: 'NetworkFirewallPolicyWithRules' api_resource_type_kind: FirewallPolicy description: "The Compute NetworkFirewallPolicy with rules resource" +min_version: 'beta' docs: base_url: 'projects/{{project}}/global/firewallPolicies' self_link: 'projects/{{project}}/global/firewallPolicies/{{name}}' @@ -56,6 +57,7 @@ properties: - name: 'creationTimestamp' type: String description: Creation timestamp in RFC3339 text format. + min_version: 'beta' output: true - name: 'name' type: String @@ -66,31 +68,24 @@ properties: the name must be 1-63 characters long and match the regular expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + min_version: 'beta' required: true immutable: true - name: 'networkFirewallPolicyId' type: String description: The unique identifier for the resource. This identifier is defined by the server. api_name: id + min_version: 'beta' output: true - name: 'description' type: String description: An optional description of this resource. - - name: 'policyType' - type: Enum - immutable: true - description: | - Policy type is used to determine which resources (networks) the policy can be associated with. - A policy can be associated with a network only if the network has the matching policyType in its network profile. - Different policy types may support some of the Firewall Rules features. - default_from_api: true min_version: 'beta' - enum_values: - - 'VPC_POLICY' - name: 'rule' type: Array description: A list of firewall policy rules. api_name: rules + min_version: 'beta' required: true item_type: type: NestedObject @@ -99,23 +94,27 @@ properties: type: String description: | A description of the rule. + min_version: 'beta' - name: 'ruleName' type: String description: | An optional name for the rule. This field is not a unique identifier and can be updated. + min_version: 'beta' - name: 'priority' type: Integer description: | An integer indicating the priority of a rule in the list. The priority must be a value between 0 and 2147483647. Rules are evaluated from highest to lowest priority where 0 is the highest priority and 2147483647 is the lowest priority. + min_version: 'beta' required: true - name: 'match' type: NestedObject description: A match condition that incoming traffic is evaluated against. If it evaluates to true, the corresponding 'action' is enforced. + min_version: 'beta' required: true properties: - name: 'srcIpRanges' @@ -123,6 +122,7 @@ properties: description: | Source IP address range in CIDR format. Required for INGRESS rules. + min_version: 'beta' item_type: type: String - name: 'destIpRanges' @@ -130,6 +130,7 @@ properties: description: | Destination IP address range in CIDR format. Required for EGRESS rules. + min_version: 'beta' item_type: type: String - name: 'srcAddressGroups' @@ -137,6 +138,7 @@ properties: description: | Address groups which should be matched against the traffic source. Maximum number of source address groups is 10. + min_version: 'beta' item_type: type: String - name: 'destAddressGroups' @@ -144,6 +146,7 @@ properties: description: | Address groups which should be matched against the traffic destination. Maximum number of destination address groups is 10. + min_version: 'beta' item_type: type: String - name: 'srcFqdns' @@ -151,6 +154,7 @@ properties: description: | Fully Qualified Domain Name (FQDN) which should be matched against traffic source. Maximum number of source fqdn allowed is 100. + min_version: 'beta' item_type: type: String - name: 'destFqdns' @@ -158,6 +162,7 @@ properties: description: | Fully Qualified Domain Name (FQDN) which should be matched against traffic destination. Maximum number of destination fqdn allowed is 100. + min_version: 'beta' item_type: type: String - name: 'srcRegionCodes' @@ -167,6 +172,7 @@ properties: of traffic. Should be specified as 2 letter country code defined as per ISO 3166 alpha-2 country codes. ex."US" Maximum number of source region codes allowed is 5000. + min_version: 'beta' item_type: type: String - name: 'destRegionCodes' @@ -176,6 +182,7 @@ properties: of traffic. Should be specified as 2 letter country code defined as per ISO 3166 alpha-2 country codes. ex."US" Maximum number of destination region codes allowed is 5000. + min_version: 'beta' item_type: type: String - name: 'srcNetworkScope' @@ -210,6 +217,7 @@ properties: description: | Names of Network Threat Intelligence lists. The IPs in these lists will be matched against traffic source. + min_version: 'beta' item_type: type: String - name: 'destThreatIntelligences' @@ -217,6 +225,7 @@ properties: description: | Names of Network Threat Intelligence lists. The IPs in these lists will be matched against traffic destination. + min_version: 'beta' item_type: type: String - name: 'layer4Config' @@ -224,6 +233,7 @@ properties: description: | Pairs of IP protocols and ports that the rule should match. api_name: layer4Configs + min_version: 'beta' required: true item_type: type: NestedObject @@ -236,6 +246,7 @@ properties: This value can either be one of the following well known protocol strings (tcp, udp, icmp, esp, ah, ipip, sctp), or the IP protocol number. + min_version: 'beta' required: true - name: 'ports' type: Array @@ -246,6 +257,7 @@ properties: applies to connections through any port. Example inputs include: ["22"], ["80","443"], and ["12345-12349"]. + min_version: 'beta' item_type: type: String - name: 'srcSecureTag' @@ -257,6 +269,7 @@ properties: and there is no srcIpRange, this rule will be ignored. Maximum number of source tag values allowed is 256. api_name: srcSecureTags + min_version: 'beta' item_type: type: NestedObject properties: @@ -265,12 +278,14 @@ properties: description: | Name of the secure tag, created with TagManager's TagValue API. @pattern tagValues/[0-9]+ + min_version: 'beta' - name: 'state' type: Enum description: | [Output Only] State of the secure tag, either `EFFECTIVE` or `INEFFECTIVE`. A secure tag is `INEFFECTIVE` when it is deleted or its network is deleted. + min_version: 'beta' output: true enum_values: - 'EFFECTIVE' @@ -290,6 +305,7 @@ properties: to all instances on the specified network. Maximum number of target label tags allowed is 256. api_name: targetSecureTags + min_version: 'beta' item_type: type: NestedObject properties: @@ -298,12 +314,14 @@ properties: description: | Name of the secure tag, created with TagManager's TagValue API. @pattern tagValues/[0-9]+ + min_version: 'beta' - name: 'state' type: Enum description: | [Output Only] State of the secure tag, either `EFFECTIVE` or `INEFFECTIVE`. A secure tag is `INEFFECTIVE` when it is deleted or its network is deleted. + min_version: 'beta' output: true enum_values: - 'EFFECTIVE' @@ -313,11 +331,13 @@ properties: description: | The Action to perform when the client connection triggers the rule. Can currently be either "allow", "deny", "apply_security_profile_group" or "goto_next". + min_version: 'beta' required: true - name: 'direction' type: Enum description: | The direction in which this rule applies. If unspecified an INGRESS rule is created. + min_version: 'beta' enum_values: - 'INGRESS' - 'EGRESS' @@ -327,12 +347,14 @@ properties: Denotes whether to enable logging for a particular rule. If logging is enabled, logs will be exported to the configured export destination in Stackdriver. + min_version: 'beta' send_empty_value: true - name: 'targetServiceAccounts' type: Array description: | A list of service accounts indicating the sets of instances that are applied with this rule. + min_version: 'beta' item_type: type: String - name: 'securityProfileGroup' @@ -342,11 +364,13 @@ properties: Example: https://networksecurity.googleapis.com/v1/projects/{project}/locations/{location}/securityProfileGroups/my-security-profile-group Must be specified if action is 'apply_security_profile_group'. + min_version: 'beta' - name: 'tlsInspect' type: Boolean description: | Boolean flag indicating if the traffic should be TLS decrypted. It can be set only if action = 'apply_security_profile_group' and cannot be set for other actions. + min_version: 'beta' - name: 'disabled' type: Boolean description: | @@ -354,9 +378,11 @@ properties: the firewall policy rule is not enforced and traffic behaves as if it did not exist. If this is unspecified, the firewall policy rule will be enabled. + min_version: 'beta' - name: 'predefinedRules' type: Array description: A list of firewall policy pre-defined rules. + min_version: 'beta' output: true item_type: type: NestedObject @@ -365,12 +391,14 @@ properties: type: String description: | A description of the rule. + min_version: 'beta' output: true - name: 'ruleName' type: String description: | An optional name for the rule. This field is not a unique identifier and can be updated. + min_version: 'beta' output: true - name: 'priority' type: Integer @@ -378,12 +406,14 @@ properties: An integer indicating the priority of a rule in the list. The priority must be a value between 0 and 2147483647. Rules are evaluated from highest to lowest priority where 0 is the highest priority and 2147483647 is the lowest priority. + min_version: 'beta' output: true - name: 'match' type: NestedObject description: A match condition that incoming traffic is evaluated against. If it evaluates to true, the corresponding 'action' is enforced. + min_version: 'beta' output: true properties: - name: 'srcIpRanges' @@ -391,6 +421,7 @@ properties: description: | Source IP address range in CIDR format. Required for INGRESS rules. + min_version: 'beta' output: true item_type: type: String @@ -399,6 +430,7 @@ properties: description: | Destination IP address range in CIDR format. Required for EGRESS rules. + min_version: 'beta' output: true item_type: type: String @@ -407,6 +439,7 @@ properties: description: | Address groups which should be matched against the traffic source. Maximum number of source address groups is 10. + min_version: 'beta' output: true item_type: type: String @@ -415,6 +448,7 @@ properties: description: | Address groups which should be matched against the traffic destination. Maximum number of destination address groups is 10. + min_version: 'beta' output: true item_type: type: String @@ -423,6 +457,7 @@ properties: description: | Fully Qualified Domain Name (FQDN) which should be matched against traffic source. Maximum number of source fqdn allowed is 100. + min_version: 'beta' output: true item_type: type: String @@ -431,6 +466,7 @@ properties: description: | Fully Qualified Domain Name (FQDN) which should be matched against traffic destination. Maximum number of destination fqdn allowed is 100. + min_version: 'beta' output: true item_type: type: String @@ -441,6 +477,7 @@ properties: of traffic. Should be specified as 2 letter country code defined as per ISO 3166 alpha-2 country codes. ex."US" Maximum number of source region codes allowed is 5000. + min_version: 'beta' output: true item_type: type: String @@ -451,6 +488,7 @@ properties: of traffic. Should be specified as 2 letter country code defined as per ISO 3166 alpha-2 country codes. ex."US" Maximum number of destination region codes allowed is 5000. + min_version: 'beta' output: true item_type: type: String @@ -459,6 +497,7 @@ properties: description: | Names of Network Threat Intelligence lists. The IPs in these lists will be matched against traffic source. + min_version: 'beta' output: true item_type: type: String @@ -467,6 +506,7 @@ properties: description: | Names of Network Threat Intelligence lists. The IPs in these lists will be matched against traffic destination. + min_version: 'beta' output: true item_type: type: String @@ -475,6 +515,7 @@ properties: description: | Pairs of IP protocols and ports that the rule should match. api_name: layer4Configs + min_version: 'beta' output: true item_type: type: NestedObject @@ -487,6 +528,7 @@ properties: This value can either be one of the following well known protocol strings (tcp, udp, icmp, esp, ah, ipip, sctp), or the IP protocol number. + min_version: 'beta' output: true - name: 'ports' type: Array @@ -497,6 +539,7 @@ properties: applies to connections through any port. Example inputs include: ["22"], ["80","443"], and ["12345-12349"]. + min_version: 'beta' output: true item_type: type: String @@ -509,6 +552,7 @@ properties: and there is no srcIpRange, this rule will be ignored. Maximum number of source tag values allowed is 256. api_name: srcSecureTags + min_version: 'beta' output: true item_type: type: NestedObject @@ -518,6 +562,7 @@ properties: description: | Name of the secure tag, created with TagManager's TagValue API. @pattern tagValues/[0-9]+ + min_version: 'beta' output: true - name: 'state' type: Enum @@ -525,6 +570,7 @@ properties: [Output Only] State of the secure tag, either `EFFECTIVE` or `INEFFECTIVE`. A secure tag is `INEFFECTIVE` when it is deleted or its network is deleted. + min_version: 'beta' output: true enum_values: - 'EFFECTIVE' @@ -544,6 +590,7 @@ properties: to all instances on the specified network. Maximum number of target label tags allowed is 256. api_name: targetSecureTags + min_version: 'beta' output: true item_type: type: NestedObject @@ -553,6 +600,7 @@ properties: description: | Name of the secure tag, created with TagManager's TagValue API. @pattern tagValues/[0-9]+ + min_version: 'beta' output: true - name: 'state' type: Enum @@ -560,6 +608,7 @@ properties: [Output Only] State of the secure tag, either `EFFECTIVE` or `INEFFECTIVE`. A secure tag is `INEFFECTIVE` when it is deleted or its network is deleted. + min_version: 'beta' output: true enum_values: - 'EFFECTIVE' @@ -569,11 +618,13 @@ properties: description: | The Action to perform when the client connection triggers the rule. Can currently be either "allow", "deny", "apply_security_profile_group" or "goto_next". + min_version: 'beta' output: true - name: 'direction' type: Enum description: | The direction in which this rule applies. If unspecified an INGRESS rule is created. + min_version: 'beta' output: true enum_values: - 'INGRESS' @@ -584,6 +635,7 @@ properties: Denotes whether to enable logging for a particular rule. If logging is enabled, logs will be exported to the configured export destination in Stackdriver. + min_version: 'beta' output: true send_empty_value: true - name: 'targetServiceAccounts' @@ -591,6 +643,7 @@ properties: description: | A list of service accounts indicating the sets of instances that are applied with this rule. + min_version: 'beta' output: true item_type: type: String @@ -601,12 +654,14 @@ properties: Example: https://networksecurity.googleapis.com/v1/projects/{project}/locations/{location}/securityProfileGroups/my-security-profile-group Must be specified if action is 'apply_security_profile_group'. + min_version: 'beta' output: true - name: 'tlsInspect' type: Boolean description: | Boolean flag indicating if the traffic should be TLS decrypted. It can be set only if action = 'apply_security_profile_group' and cannot be set for other actions. + min_version: 'beta' output: true - name: 'disabled' type: Boolean @@ -615,20 +670,25 @@ properties: the firewall policy rule is not enforced and traffic behaves as if it did not exist. If this is unspecified, the firewall policy rule will be enabled. + min_version: 'beta' output: true - name: 'fingerprint' type: Fingerprint description: Fingerprint of the resource. This field is used internally during updates of this resource. + min_version: 'beta' output: true - name: 'selfLink' type: String description: Server-defined URL for the resource. + min_version: 'beta' output: true - name: 'selfLinkWithId' type: String description: Server-defined URL for this resource with the resource id. + min_version: 'beta' output: true - name: 'ruleTupleCount' type: Integer description: Total count of all firewall policy rule tuples. A firewall policy can not exceed a set number of tuples. + min_version: 'beta' output: true diff --git a/mmv1/products/compute/NetworkPeeringRoutesConfig.yaml b/mmv1/products/compute/NetworkPeeringRoutesConfig.yaml index 761096b0c7f3..515a3b57195c 100644 --- a/mmv1/products/compute/NetworkPeeringRoutesConfig.yaml +++ b/mmv1/products/compute/NetworkPeeringRoutesConfig.yaml @@ -63,6 +63,19 @@ examples: peering_secondary_name: 'secondary-peering' network_primary_name: 'primary-network' network_secondary_name: 'secondary-network' + - name: 'network_peering_routes_config_gke' + primary_resource_id: 'peering_gke_routes' + vars: + network_name: 'container-network' + subnetwork_name: 'container-subnetwork' + gke_cluster_name: 'private-cluster' + deletion_protection: 'true' + test_vars_overrides: + 'deletion_protection': 'false' + oics_vars_overrides: + 'deletion_protection': 'false' + # currently failing + skip_vcr: true parameters: - name: 'network' type: ResourceRef diff --git a/mmv1/products/compute/NodeGroup.yaml b/mmv1/products/compute/NodeGroup.yaml index 3a854983c1db..326a1c13d986 100644 --- a/mmv1/products/compute/NodeGroup.yaml +++ b/mmv1/products/compute/NodeGroup.yaml @@ -95,7 +95,6 @@ properties: type: String description: | Name of the resource. - required: true - name: 'nodeTemplate' type: ResourceRef description: | diff --git a/mmv1/products/compute/NodeTemplate.yaml b/mmv1/products/compute/NodeTemplate.yaml index c5eb4ebb2461..e23d4317d85d 100644 --- a/mmv1/products/compute/NodeTemplate.yaml +++ b/mmv1/products/compute/NodeTemplate.yaml @@ -79,7 +79,6 @@ properties: - name: 'name' type: String description: 'Name of the resource.' - required: true - name: 'nodeAffinityLabels' type: KeyValuePairs description: | diff --git a/mmv1/products/compute/OrganizationSecurityPolicy.yaml b/mmv1/products/compute/OrganizationSecurityPolicy.yaml index f3b0a7039a4e..966188adf897 100644 --- a/mmv1/products/compute/OrganizationSecurityPolicy.yaml +++ b/mmv1/products/compute/OrganizationSecurityPolicy.yaml @@ -58,20 +58,15 @@ properties: - name: 'displayName' type: String description: | - User-provided name of the organization security policy. The name should be unique in the organization in which the security policy is created. This should only be used when SecurityPolicyType is FIREWALL. + A textual name of the security policy. min_version: 'beta' + required: true immutable: true - name: 'description' type: String description: | A textual description for the organization security policy. min_version: 'beta' - - name: 'shortName' - type: String - description: | - User-provided name of the organization security policy. The name should be unique in the organization in which the security policy is created. This should only be used when SecurityPolicyType is CLOUD_ARMOR. - min_version: 'beta' - immutable: true - name: 'fingerprint' type: Fingerprint description: | @@ -89,13 +84,11 @@ properties: - name: 'type' type: Enum description: | - The type indicates the intended use of the security policy. This field can be set only at resource creation time. + The type indicates the intended use of the security policy. + For organization security policies, the only supported type + is "FIREWALL". min_version: 'beta' immutable: true default_value: "FIREWALL" enum_values: - 'FIREWALL' - - 'CLOUD_ARMOR' - - 'CLOUD_ARMOR_EDGE' - - 'CLOUD_ARMOR_INTERNAL_SERVICE' - - 'CLOUD_ARMOR_NETWORK' diff --git a/mmv1/products/compute/PacketMirroring.yaml b/mmv1/products/compute/PacketMirroring.yaml index d4bf994a3a73..123fcc99c203 100644 --- a/mmv1/products/compute/PacketMirroring.yaml +++ b/mmv1/products/compute/PacketMirroring.yaml @@ -144,8 +144,6 @@ properties: properties: - name: 'subnetworks' type: Array - is_set: true - set_hash_func: tpgresource.NestedUrlSetHashFunc description: | All instances in one of these subnetworks will be mirrored. at_least_one_of: @@ -163,13 +161,10 @@ properties: The URL of the subnetwork where this rule should be active. required: true custom_expand: 'templates/terraform/custom_expand/resourceref_with_validation.go.tmpl' - diff_suppress_func: 'tpgresource.CompareSelfLinkRelativePaths' resource: 'Subnetwork' imports: 'selfLink' - name: 'instances' type: Array - is_set: true - set_hash_func: tpgresource.NestedUrlSetHashFunc description: | All the listed instances will be mirrored. Specify at most 50. at_least_one_of: @@ -186,7 +181,6 @@ properties: The URL of the instances where this rule should be active. required: true custom_expand: 'templates/terraform/custom_expand/resourceref_with_validation.go.tmpl' - diff_suppress_func: 'tpgresource.CompareSelfLinkRelativePaths' resource: 'Instance' imports: 'selfLink' - name: 'tags' diff --git a/mmv1/products/compute/PreviewFeature.yaml b/mmv1/products/compute/PreviewFeature.yaml deleted file mode 100644 index 45967adc14bd..000000000000 --- a/mmv1/products/compute/PreviewFeature.yaml +++ /dev/null @@ -1,84 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -# API resource name -name: 'PreviewFeature' -kind: 'compute#PreviewFeature' -description: | - Represents a single Google Compute Engine preview feature such as Alpha API access, which can be enabled or disabled for a project. -min_version: 'beta' -references: - guides: - 'Use the Compute Engine alpha API': 'https://cloud.google.com/compute/docs/reference/rest/alpha' - api: 'https://cloud.google.com/compute/docs/reference/rest/beta/PreviewFeatures' -docs: -base_url: 'projects/{{project}}/global/previewFeatures' -has_self_link: false - -create_url: 'projects/{{project}}/global/previewFeatures/{{name}}' -create_verb: 'PATCH' -update_url: 'projects/{{project}}/global/previewFeatures/{{name}}' -update_verb: 'PATCH' -update_mask: true -exclude_delete: true -timeouts: - insert_minutes: 20 - update_minutes: 20 - delete_minutes: 20 - -autogen_async: true -async: - actions: ['create', 'delete', 'update'] - type: 'OpAsync' - operation: - base_url: '{{op_id}}' - result: - resource_inside_response: false - -examples: - - name: 'preview_feature_basic' - primary_resource_id: 'gce_preview_feature' - -parameters: - - name: 'name' - type: String - required: true - immutable: true - url_param_only: true - description: | - The name of the preview feature. - -properties: - - name: 'activationStatus' - type: Enum - description: 'The activation status of the preview feature.' - required: true - enum_values: - - 'ENABLED' - - 'DISABLED' - - name: 'rolloutOperation' - type: NestedObject - description: 'The rollout operation of the feature.' - ignore_read: true - properties: - - name: 'rolloutInput' - type: NestedObject - description: 'The input for the rollout operation.' - properties: - - name: 'predefinedRolloutPlan' - type: Enum - description: 'Predefined rollout plans.' - required: true - enum_values: - - 'ROLLOUT_PLAN_FAST_ROLLOUT' diff --git a/mmv1/products/compute/PublicDelegatedPrefix.yaml b/mmv1/products/compute/PublicDelegatedPrefix.yaml index 21a74f13da40..c84125c96061 100644 --- a/mmv1/products/compute/PublicDelegatedPrefix.yaml +++ b/mmv1/products/compute/PublicDelegatedPrefix.yaml @@ -122,58 +122,3 @@ properties: The IP address range, in CIDR format, represented by this public delegated prefix. required: true - - name: 'publicDelegatedSubPrefixs' - type: Array - output: true - description: | - List of sub public delegated fixes for BYO IP functionality. - Each item in this array represents a sub prefix that can be - used to create addresses or further allocations. - item_type: - type: NestedObject - properties: - - name: 'name' - type: String - description: | - The name of the sub public delegated prefix. - - name: 'description' - type: String - description: | - An optional description of this sub public delegated prefix. - - name: 'region' - type: String - description: | - Output-only. The region of the sub public delegated prefix if it is regional. If absent, the sub prefix is global. - - name: 'status' - type: Enum - description: | - The status of the sub public delegated prefix. - enum_values: - - 'INITIALIZING' - - 'READY_TO_ANNOUNCE' - - 'ANNOUNCED' - - 'DELETING' - - name: 'ipCidrRange' - type: String - description: | - The IP address range in the CIDR format represented by this sub prefix. - - name: 'isAddress' - type: Boolean - description: | - Whether the sub prefix is delegated for address creation. - - name: 'mode' - type: Enum - description: | - The PublicDelegatedSubPrefix mode for IPv6 only. - enum_values: - - 'DELEGATION' - - 'EXTERNAL_IPV6_FORWARDING_RULE_CREATION' - - 'EXTERNAL_IPV6_SUBNETWORK_CREATION' - - name: 'allocatablePrefixLength' - type: Integer - description: | - The allocatable prefix length supported by this PublicDelegatedSubPrefix. - - name: 'delegatee_project' - type: String - description: | - Name of the project scoping this PublicDelegatedSubPrefix. diff --git a/mmv1/products/compute/RegionAutoscaler.yaml b/mmv1/products/compute/RegionAutoscaler.yaml index fc6895c803b1..c84ac30c0341 100644 --- a/mmv1/products/compute/RegionAutoscaler.yaml +++ b/mmv1/products/compute/RegionAutoscaler.yaml @@ -43,7 +43,6 @@ async: result: resource_inside_response: false collection_url_key: 'items' -include_in_tgc_next_DO_NOT_USE: true custom_code: examples: - name: 'region_autoscaler_basic' @@ -396,7 +395,7 @@ properties: A description of a scaling schedule. - name: 'target' type: String - # TODO: #303 resourceref once RegionIGM exists + # TODO(#303): resourceref once RegionIGM exists # resource: 'RegionInstanceGroupManager' # imports: 'selfLink' description: | diff --git a/mmv1/products/compute/RegionBackendService.yaml b/mmv1/products/compute/RegionBackendService.yaml index d022966f3458..821c5706fe5f 100644 --- a/mmv1/products/compute/RegionBackendService.yaml +++ b/mmv1/products/compute/RegionBackendService.yaml @@ -139,24 +139,6 @@ examples: instance_group_name: 'instance_group' network_name: 'network' exclude_test: true - - name: 'region_backend_service_dynamic_forwarding' - primary_resource_id: 'default' - min_version: 'beta' - vars: - region_backend_service_name: 'region-service' - - name: 'region_backend_service_ha_policy' - primary_resource_id: 'default' - vars: - region_backend_service_name: 'region-service' - network_name: 'rbs-net' - - name: 'region_backend_service_ha_policy_manual_leader' - primary_resource_id: 'default' - vars: - region_backend_service_name: 'region-service' - network_name: 'rbs-net' - subnetwork_name: 'rbs-subnet' - instance_name: 'rbs-instance' - neg_name: 'rbs-neg' parameters: - name: 'region' type: ResourceRef @@ -251,7 +233,7 @@ properties: Group resource using the fully-qualified URL, rather than a partial URL. required: true - diff_suppress_func: 'tpgresource.CompareSelfLinkCanonicalPaths' + diff_suppress_func: 'tpgresource.CompareSelfLinkRelativePaths' custom_flatten: 'templates/terraform/custom_flatten/guard_self_link.go.tmpl' - name: 'maxConnections' type: Integer @@ -354,7 +336,7 @@ properties: description: | Settings controlling the volume of connections to a backend service. This field is applicable only when the `load_balancing_scheme` is set to INTERNAL_MANAGED - and the `protocol` is set to HTTP, HTTPS, HTTP2 or H2C. + and the `protocol` is set to HTTP, HTTPS, or HTTP2. properties: - name: 'connectTimeout' type: NestedObject @@ -459,7 +441,7 @@ properties: hashing. This field only applies when all of the following are true - * `load_balancing_scheme` is set to INTERNAL_MANAGED - * `protocol` is set to HTTP, HTTPS, HTTP2 or H2C + * `protocol` is set to HTTP, HTTPS, or HTTP2 * `locality_lb_policy` is set to MAGLEV or RING_HASH properties: - name: 'httpCookie' @@ -929,7 +911,7 @@ properties: locality_lb_policy is applicable to either: - * A regional backend service with the service_protocol set to HTTP, HTTPS, HTTP2 or H2C, + * A regional backend service with the service_protocol set to HTTP, HTTPS, or HTTP2, and loadBalancingScheme set to INTERNAL_MANAGED. * A global backend service with the load_balancing_scheme set to INTERNAL_SELF_MANAGED. * A regional backend service with loadBalancingScheme set to EXTERNAL (External Network @@ -993,7 +975,7 @@ properties: description: | Settings controlling eviction of unhealthy hosts from the load balancing pool. This field is applicable only when the `load_balancing_scheme` is set - to INTERNAL_MANAGED and the `protocol` is set to HTTP, HTTPS, HTTP2 or H2C. + to INTERNAL_MANAGED and the `protocol` is set to HTTP, HTTPS, or HTTP2. properties: - name: 'baseEjectionTime' type: NestedObject @@ -1237,21 +1219,22 @@ properties: - name: 'protocol' type: Enum description: | - The protocol this BackendService uses to communicate with backends. - The default is HTTP. Possible values are HTTP, HTTPS, HTTP2, H2C, TCP, SSL, UDP - or GRPC. Refer to the documentation for the load balancers or for Traffic Director - for more information. + The protocol this RegionBackendService uses to communicate with backends. + The default is HTTP. **NOTE**: HTTP2 is only valid for beta HTTP/2 load balancer + types and may result in errors if used with the GA API. default_from_api: true + # This is removed to avoid breaking terraform, as default values cannot be + # unspecified. Providers should include this as needed via overrides + # default_value: :TCP enum_values: - 'HTTP' - 'HTTPS' - 'HTTP2' - - 'TCP' - 'SSL' + - 'TCP' - 'UDP' - 'GRPC' - 'UNSPECIFIED' - - 'H2C' - name: 'securityPolicy' type: String description: | @@ -1456,107 +1439,3 @@ properties: required: true enum_values: - 'CONSISTENT_HASH_SUBSETTING' - - name: 'subsetSize' - type: Integer - description: | - The number of backends per backend group assigned to each proxy instance or each service mesh client. - An input parameter to the CONSISTENT_HASH_SUBSETTING algorithm. Can only be set if policy is set to - CONSISTENT_HASH_SUBSETTING. Can only be set if load balancing scheme is INTERNAL_MANAGED or INTERNAL_SELF_MANAGED. - subsetSize is optional for Internal HTTP(S) load balancing and required for Traffic Director. - If you do not provide this value, Cloud Load Balancing will calculate it dynamically to optimize the number - of proxies/clients visible to each backend and vice versa. - Must be greater than 0. If subsetSize is larger than the number of backends/endpoints, then subsetting is disabled. - - name: 'dynamicForwarding' - type: NestedObject - description: | - Dynamic forwarding configuration. This field is used to configure the backend service with dynamic forwarding - feature which together with Service Extension allows customized and complex routing logic. - min_version: beta - properties: - - name: 'ipPortSelection' - type: NestedObject - description: | - IP:PORT based dynamic forwarding configuration. - min_version: beta - properties: - - name: 'enabled' - type: Boolean - min_version: beta - description: | - A boolean flag enabling IP:PORT based dynamic forwarding. - immutable: true - - name: 'haPolicy' - type: NestedObject - description: | - Configures self-managed High Availability (HA) for External and Internal Protocol Forwarding. - The backends of this regional backend service must only specify zonal network endpoint groups - (NEGs) of type GCE_VM_IP. Note that haPolicy is not for load balancing, and therefore cannot - be specified with sessionAffinity, connectionTrackingPolicy, and failoverPolicy. haPolicy - requires customers to be responsible for tracking backend endpoint health and electing a - leader among the healthy endpoints. Therefore, haPolicy cannot be specified with healthChecks. - haPolicy can only be specified for External Passthrough Network Load Balancers and Internal - Passthrough Network Load Balancers. - conflicts: - - sessionAffinity - - connectionTrackingPolicy - - failoverPolicy - - healthChecks - properties: - - name: 'fastIPMove' - type: Enum - description: | - Specifies whether fast IP move is enabled, and if so, the mechanism to achieve it. - Supported values are: - - * `DISABLED`: Fast IP Move is disabled. You can only use the haPolicy.leader API to - update the leader. - - * `GARP_RA`: Provides a method to very quickly define a new network endpoint as the - leader. This method is faster than updating the leader using the - haPolicy.leader API. Fast IP move works as follows: The VM hosting the - network endpoint that should become the new leader sends either a - Gratuitous ARP (GARP) packet (IPv4) or an ICMPv6 Router Advertisement(RA) - packet (IPv6). Google Cloud immediately but temporarily associates the - forwarding rule IP address with that VM, and both new and in-flight packets - are quickly delivered to that VM. - immutable: true - enum_values: - - 'DISABLED' - - 'GARP_RA' - - name: 'leader' - type: NestedObject - description: | - Selects one of the network endpoints attached to the backend NEGs of this service as the - active endpoint (the leader) that receives all traffic. - properties: - - name: 'backendGroup' - type: ResourceRef - description: | - A fully-qualified URL of the zonal Network Endpoint Group (NEG) that the leader is - attached to. - - name: 'networkEndpoint' - type: NestedObject - description: | - The network endpoint within the leader.backendGroup that is designated as the leader. - properties: - - name: 'instance' - type: String - description: | - The name of the VM instance of the leader network endpoint. The instance must - already be attached to the NEG specified in the haPolicy.leader.backendGroup. - - name: 'params' - type: NestedObject - ignore_read: true - immutable: true - description: | - Additional params passed with the request, but not persisted as part of resource payload - properties: - - name: 'resourceManagerTags' - type: KeyValuePairs - description: | - Resource manager tags to be bound to the region backend service. Tag keys and values have the - same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, - and values are in the format tagValues/456. - api_name: resourceManagerTags - ignore_read: true - immutable: true diff --git a/mmv1/products/compute/RegionDisk.yaml b/mmv1/products/compute/RegionDisk.yaml index d4d219944771..df6fa60f6092 100644 --- a/mmv1/products/compute/RegionDisk.yaml +++ b/mmv1/products/compute/RegionDisk.yaml @@ -61,7 +61,6 @@ iam_policy: custom_code: encoder: 'templates/terraform/encoders/disk.tmpl' decoder: 'templates/terraform/decoders/disk.tmpl' - update_encoder: 'templates/terraform/update_encoder/hyper_disk.go.tmpl' pre_delete: 'templates/terraform/pre_delete/detach_disk.tmpl' custom_diff: - 'customdiff.ForceNewIfChange("size", IsDiskShrinkage)' @@ -85,11 +84,6 @@ examples: primary_resource_name: 'fmt.Sprintf("tf-test-my-region-disk%s", context["random_suffix"])' vars: region_disk_name: 'my-region-features-disk' - - name: 'region_disk_hyperdisk_balanced_ha_write_many' - primary_resource_id: 'primary' - primary_resource_name: 'fmt.Sprintf("tf-test-my-region-disk%s", context["random_suffix"])' - vars: - region_disk_name: 'my-region-hyperdisk' parameters: - name: 'region' type: ResourceRef @@ -152,7 +146,7 @@ properties: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied encryption key that protects this resource. output: true - # TODO Change to ResourceRef once KMS is in Magic Modules + # TODO(chrisst) Change to ResourceRef once KMS is in Magic Modules - name: 'kmsKeyName' type: String description: | @@ -170,7 +164,7 @@ properties: description: | Specifies a 256-bit customer-supplied encryption key, encoded in RFC 4648 base64 to either encrypt or decrypt this resource. - # TODO Change to ResourceRef once KMS is in Magic Modules + # TODO(chrisst) Change to ResourceRef once KMS is in Magic Modules - name: 'kmsKeyName' type: String description: | @@ -382,37 +376,6 @@ properties: description: 'An applicable license URI' resource: 'License' imports: 'selfLink' - - name: 'accessMode' - type: String - description: | - The access mode of the disk. - For example: - * READ_WRITE_SINGLE: The default AccessMode, means the disk can be attached to single instance in RW mode. - * READ_WRITE_MANY: The AccessMode means the disk can be attached to multiple instances in RW mode. - * READ_ONLY_SINGLE: The AccessMode means the disk can be attached to multiple instances in RO mode. - The AccessMode is only valid for Hyperdisk disk types. - required: false - immutable: false - default_from_api: true - update_url: 'projects/{{project}}/regions/{{region}}/disks/{{name}}?paths=accessMode' - update_verb: 'PATCH' - - name: 'provisionedIops' - type: Integer - description: | - Indicates how many IOPS to provision for the disk. This sets the number of I/O operations per second - that the disk can handle. Values must be between 10,000 and 120,000. - For more details, see the Extreme persistent disk [documentation](https://cloud.google.com/compute/docs/disks/extreme-persistent-disk). - default_from_api: true - update_url: 'projects/{{project}}/regions/{{region}}/disks/{{name}}?paths=provisionedIops' - update_verb: 'PATCH' - - name: 'provisionedThroughput' - type: Integer - description: | - Indicates how much throughput to provision for the disk. This sets the number of throughput - mb per second that the disk can handle. Values must be greater than or equal to 1. - default_from_api: true - update_url: 'projects/{{project}}/regions/{{region}}/disks/{{name}}?paths=provisionedThroughput' - update_verb: 'PATCH' virtual_fields: - name: 'create_snapshot_before_destroy' type: Boolean diff --git a/mmv1/products/compute/RegionHealthCheck.yaml b/mmv1/products/compute/RegionHealthCheck.yaml index 4c8ce671bd14..bb952940b13a 100644 --- a/mmv1/products/compute/RegionHealthCheck.yaml +++ b/mmv1/products/compute/RegionHealthCheck.yaml @@ -112,16 +112,6 @@ examples: primary_resource_id: 'grpc-region-health-check' vars: health_check_name: 'grpc-region-health-check' - - name: 'region_health_check_grpc_with_tls' - primary_resource_id: 'grpc-with-tls-region-health-check' - min_version: 'beta' - vars: - health_check_name: 'grpc-with-tls-region-health-check' - - name: 'region_health_check_grpc_with_tls_full' - primary_resource_id: 'grpc-with-tls-region-health-check' - min_version: 'beta' - vars: - health_check_name: 'grpc-with-tls-region-health-check' parameters: - name: 'region' type: ResourceRef @@ -209,7 +199,6 @@ properties: - 'tcp_health_check' - 'ssl_health_check' - 'grpc_health_check' - - 'grpc_tls_health_check' diff_suppress_func: 'portDiffSuppress' properties: - name: 'host' @@ -335,7 +324,6 @@ properties: - 'tcp_health_check' - 'ssl_health_check' - 'grpc_health_check' - - 'grpc_tls_health_check' diff_suppress_func: 'portDiffSuppress' properties: - name: 'host' @@ -461,7 +449,6 @@ properties: - 'tcp_health_check' - 'ssl_health_check' - 'grpc_health_check' - - 'grpc_tls_health_check' diff_suppress_func: 'portDiffSuppress' properties: - name: 'request' @@ -568,7 +555,6 @@ properties: - 'tcp_health_check' - 'ssl_health_check' - 'grpc_health_check' - - 'grpc_tls_health_check' diff_suppress_func: 'portDiffSuppress' properties: - name: 'request' @@ -675,7 +661,6 @@ properties: - 'tcp_health_check' - 'ssl_health_check' - 'grpc_health_check' - - 'grpc_tls_health_check' diff_suppress_func: 'portDiffSuppress' properties: - name: 'host' @@ -801,7 +786,6 @@ properties: - 'tcp_health_check' - 'ssl_health_check' - 'grpc_health_check' - - 'grpc_tls_health_check' diff_suppress_func: 'portDiffSuppress' properties: - name: 'port' @@ -866,66 +850,6 @@ properties: - 'grpc_health_check.0.port_name' - 'grpc_health_check.0.port_specification' - 'grpc_health_check.0.grpc_service_name' - - name: 'grpcTlsHealthCheck' - min_version: 'beta' - type: NestedObject - exactly_one_of: - - 'http_health_check' - - 'https_health_check' - - 'http2_health_check' - - 'tcp_health_check' - - 'ssl_health_check' - - 'grpc_health_check' - - 'grpc_tls_health_check' - diff_suppress_func: 'portDiffSuppress' - properties: - - name: 'port' - type: Integer - description: | - The port number for the health check request. - Must be specified if port_specification is USE_FIXED_PORT. Valid values are 1 through 65535. - at_least_one_of: - - 'grpc_tls_health_check.0.port' - - 'grpc_tls_health_check.0.port_specification' - - 'grpc_tls_health_check.0.grpc_service_name' - - name: 'portSpecification' - type: Enum - description: | - Specifies how port is selected for health checking, can be one of the - following values: - - * `USE_FIXED_PORT`: The port number in `port` is used for health checking. - - * `USE_NAMED_PORT`: Not supported for GRPC with TLS health checking. - - * `USE_SERVING_PORT`: For NetworkEndpointGroup, the port specified for each - network endpoint is used for health checking. For other backends, the - port or named port specified in the Backend Service is used for health - checking. - - If not specified, gRPC health check follows behavior specified in the `port` field. - at_least_one_of: - - 'grpc_tls_health_check.0.port' - - 'grpc_tls_health_check.0.port_specification' - - 'grpc_tls_health_check.0.grpc_service_name' - enum_values: - - 'USE_FIXED_PORT' - - 'USE_NAMED_PORT' - - 'USE_SERVING_PORT' - - name: 'grpcServiceName' - type: String - description: | - The gRPC service name for the health check. - The value of grpcServiceName has the following meanings by convention: - - * Empty serviceName means the overall status of all services at the backend. - * Non-empty serviceName means the health of that gRPC service, as defined by the owner of the service. - - The grpcServiceName can only be ASCII. - at_least_one_of: - - 'grpc_tls_health_check.0.port' - - 'grpc_tls_health_check.0.port_specification' - - 'grpc_tls_health_check.0.grpc_service_name' - name: 'logConfig' type: NestedObject description: | diff --git a/mmv1/products/compute/RegionInstanceGroupManager.yaml b/mmv1/products/compute/RegionInstanceGroupManager.yaml index 5037bc480c1c..71b3e7cf6387 100644 --- a/mmv1/products/compute/RegionInstanceGroupManager.yaml +++ b/mmv1/products/compute/RegionInstanceGroupManager.yaml @@ -179,7 +179,7 @@ properties: The name of the managed instance group. The name must be 1-63 characters long, and comply with RFC1035. required: true - # TODO: Make namedPorts a NameValue(name[string], port[integer]) + # TODO(nelsonjr): Make namedPorts a NameValue(name[string], port[integer]) - name: 'namedPorts' type: Array description: diff --git a/mmv1/products/compute/RegionNetworkEndpointGroup.yaml b/mmv1/products/compute/RegionNetworkEndpointGroup.yaml index 5cba46b0978e..454f66a0244a 100644 --- a/mmv1/products/compute/RegionNetworkEndpointGroup.yaml +++ b/mmv1/products/compute/RegionNetworkEndpointGroup.yaml @@ -163,7 +163,6 @@ properties: custom_expand: 'templates/terraform/custom_expand/resourceref_with_validation.go.tmpl' resource: 'Network' imports: 'selfLink' - default_from_api: true - name: 'subnetwork' type: ResourceRef description: | diff --git a/mmv1/products/compute/RegionNetworkFirewallPolicy.yaml b/mmv1/products/compute/RegionNetworkFirewallPolicy.yaml index cc5143b725a2..92531c95af69 100644 --- a/mmv1/products/compute/RegionNetworkFirewallPolicy.yaml +++ b/mmv1/products/compute/RegionNetworkFirewallPolicy.yaml @@ -38,11 +38,6 @@ examples: primary_resource_id: 'policy' vars: policy_name: 'tf-test-policy' - - name: 'region_network_firewall_policy_roce' - primary_resource_id: 'policy' - min_version: beta - vars: - policy_name: 'rnf-policy' parameters: - name: 'region' type: String @@ -68,18 +63,6 @@ properties: - name: 'description' type: String description: An optional description of this resource. Provide this property when you create the resource. - - name: 'policyType' - type: Enum - immutable: true - description: | - Policy type is used to determine which resources (networks) the policy can be associated with. - A policy can be associated with a network only if the network has the matching policyType in its network profile. - Different policy types may support some of the Firewall Rules features. - min_version: 'beta' - default_from_api: true - enum_values: - - 'VPC_POLICY' - - 'RDMA_ROCE_POLICY' - name: 'fingerprint' type: Fingerprint description: Fingerprint of the resource. This field is used internally during updates of this resource. diff --git a/mmv1/products/compute/RegionNetworkFirewallPolicyWithRules.yaml b/mmv1/products/compute/RegionNetworkFirewallPolicyWithRules.yaml index 3d9bd318ac32..6dbaa6561153 100644 --- a/mmv1/products/compute/RegionNetworkFirewallPolicyWithRules.yaml +++ b/mmv1/products/compute/RegionNetworkFirewallPolicyWithRules.yaml @@ -15,6 +15,7 @@ name: 'RegionNetworkFirewallPolicyWithRules' api_resource_type_kind: FirewallPolicy description: "The Compute NetworkFirewallPolicy with rules resource" +min_version: 'beta' docs: base_url: 'projects/{{project}}/regions/{{region}}/firewallPolicies' self_link: 'projects/{{project}}/regions/{{region}}/firewallPolicies/{{name}}' @@ -52,15 +53,11 @@ examples: tag_value: 'tag-value' test_env_vars: org_id: 'ORG_ID' - - name: 'compute_region_network_firewall_policy_with_rules_roce' - primary_resource_id: 'policy' - min_version: beta - vars: - policy_name: 'rnf-policy' parameters: - name: 'region' type: String description: The region of this resource. + min_version: 'beta' url_param_only: true immutable: true default_from_api: true @@ -68,6 +65,7 @@ properties: - name: 'creationTimestamp' type: String description: Creation timestamp in RFC3339 text format. + min_version: 'beta' output: true - name: 'name' type: String @@ -78,32 +76,24 @@ properties: the name must be 1-63 characters long and match the regular expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + min_version: 'beta' required: true immutable: true - name: 'networkFirewallPolicyId' type: String description: The unique identifier for the resource. This identifier is defined by the server. api_name: id + min_version: 'beta' output: true - name: 'description' type: String description: An optional description of this resource. - - name: 'policyType' - type: Enum - immutable: true - description: | - Policy type is used to determine which resources (networks) the policy can be associated with. - A policy can be associated with a network only if the network has the matching policyType in its network profile. - Different policy types may support some of the Firewall Rules features. - default_from_api: true min_version: 'beta' - enum_values: - - 'VPC_POLICY' - - 'RDMA_ROCE_POLICY' - name: 'rule' type: Array description: A list of firewall policy rules. api_name: rules + min_version: 'beta' required: true item_type: type: NestedObject @@ -112,23 +102,27 @@ properties: type: String description: | A description of the rule. + min_version: 'beta' - name: 'ruleName' type: String description: | An optional name for the rule. This field is not a unique identifier and can be updated. + min_version: 'beta' - name: 'priority' type: Integer description: | An integer indicating the priority of a rule in the list. The priority must be a value between 0 and 2147483647. Rules are evaluated from highest to lowest priority where 0 is the highest priority and 2147483647 is the lowest priority. + min_version: 'beta' required: true - name: 'match' type: NestedObject description: A match condition that incoming traffic is evaluated against. If it evaluates to true, the corresponding 'action' is enforced. + min_version: 'beta' required: true properties: - name: 'srcIpRanges' @@ -136,6 +130,7 @@ properties: description: | Source IP address range in CIDR format. Required for INGRESS rules. + min_version: 'beta' item_type: type: String - name: 'destIpRanges' @@ -143,6 +138,7 @@ properties: description: | Destination IP address range in CIDR format. Required for EGRESS rules. + min_version: 'beta' item_type: type: String - name: 'srcAddressGroups' @@ -150,6 +146,7 @@ properties: description: | Address groups which should be matched against the traffic source. Maximum number of source address groups is 10. + min_version: 'beta' item_type: type: String - name: 'destAddressGroups' @@ -157,6 +154,7 @@ properties: description: | Address groups which should be matched against the traffic destination. Maximum number of destination address groups is 10. + min_version: 'beta' item_type: type: String - name: 'srcFqdns' @@ -164,6 +162,7 @@ properties: description: | Fully Qualified Domain Name (FQDN) which should be matched against traffic source. Maximum number of source fqdn allowed is 100. + min_version: 'beta' item_type: type: String - name: 'destFqdns' @@ -171,6 +170,7 @@ properties: description: | Fully Qualified Domain Name (FQDN) which should be matched against traffic destination. Maximum number of destination fqdn allowed is 100. + min_version: 'beta' item_type: type: String - name: 'srcNetworkScope' @@ -207,6 +207,7 @@ properties: of traffic. Should be specified as 2 letter country code defined as per ISO 3166 alpha-2 country codes. ex."US" Maximum number of source region codes allowed is 5000. + min_version: 'beta' item_type: type: String - name: 'destRegionCodes' @@ -216,6 +217,7 @@ properties: of traffic. Should be specified as 2 letter country code defined as per ISO 3166 alpha-2 country codes. ex."US" Maximum number of destination region codes allowed is 5000. + min_version: 'beta' item_type: type: String - name: 'srcThreatIntelligences' @@ -223,6 +225,7 @@ properties: description: | Names of Network Threat Intelligence lists. The IPs in these lists will be matched against traffic source. + min_version: 'beta' item_type: type: String - name: 'destThreatIntelligences' @@ -230,6 +233,7 @@ properties: description: | Names of Network Threat Intelligence lists. The IPs in these lists will be matched against traffic destination. + min_version: 'beta' item_type: type: String - name: 'layer4Config' @@ -237,6 +241,7 @@ properties: description: | Pairs of IP protocols and ports that the rule should match. api_name: layer4Configs + min_version: 'beta' required: true item_type: type: NestedObject @@ -249,6 +254,7 @@ properties: This value can either be one of the following well known protocol strings (tcp, udp, icmp, esp, ah, ipip, sctp), or the IP protocol number. + min_version: 'beta' required: true - name: 'ports' type: Array @@ -259,6 +265,7 @@ properties: applies to connections through any port. Example inputs include: ["22"], ["80","443"], and ["12345-12349"]. + min_version: 'beta' item_type: type: String - name: 'srcSecureTag' @@ -270,6 +277,7 @@ properties: and there is no srcIpRange, this rule will be ignored. Maximum number of source tag values allowed is 256. api_name: srcSecureTags + min_version: 'beta' item_type: type: NestedObject properties: @@ -278,12 +286,14 @@ properties: description: | Name of the secure tag, created with TagManager's TagValue API. @pattern tagValues/[0-9]+ + min_version: 'beta' - name: 'state' type: Enum description: | [Output Only] State of the secure tag, either `EFFECTIVE` or `INEFFECTIVE`. A secure tag is `INEFFECTIVE` when it is deleted or its network is deleted. + min_version: 'beta' output: true enum_values: - 'EFFECTIVE' @@ -303,6 +313,7 @@ properties: to all instances on the specified network. Maximum number of target label tags allowed is 256. api_name: targetSecureTags + min_version: 'beta' item_type: type: NestedObject properties: @@ -311,12 +322,14 @@ properties: description: | Name of the secure tag, created with TagManager's TagValue API. @pattern tagValues/[0-9]+ + min_version: 'beta' - name: 'state' type: Enum description: | [Output Only] State of the secure tag, either `EFFECTIVE` or `INEFFECTIVE`. A secure tag is `INEFFECTIVE` when it is deleted or its network is deleted. + min_version: 'beta' output: true enum_values: - 'EFFECTIVE' @@ -326,11 +339,13 @@ properties: description: | The Action to perform when the client connection triggers the rule. Can currently be either "allow", "deny", "apply_security_profile_group" or "goto_next". + min_version: 'beta' required: true - name: 'direction' type: Enum description: | The direction in which this rule applies. If unspecified an INGRESS rule is created. + min_version: 'beta' enum_values: - 'INGRESS' - 'EGRESS' @@ -340,12 +355,14 @@ properties: Denotes whether to enable logging for a particular rule. If logging is enabled, logs will be exported to the configured export destination in Stackdriver. + min_version: 'beta' send_empty_value: true - name: 'targetServiceAccounts' type: Array description: | A list of service accounts indicating the sets of instances that are applied with this rule. + min_version: 'beta' item_type: type: String - name: 'securityProfileGroup' @@ -355,11 +372,13 @@ properties: Example: https://networksecurity.googleapis.com/v1/projects/{project}/locations/{location}/securityProfileGroups/my-security-profile-group Must be specified if action is 'apply_security_profile_group'. + min_version: 'beta' - name: 'tlsInspect' type: Boolean description: | Boolean flag indicating if the traffic should be TLS decrypted. It can be set only if action = 'apply_security_profile_group' and cannot be set for other actions. + min_version: 'beta' - name: 'disabled' type: Boolean description: | @@ -367,9 +386,11 @@ properties: the firewall policy rule is not enforced and traffic behaves as if it did not exist. If this is unspecified, the firewall policy rule will be enabled. + min_version: 'beta' - name: 'predefinedRules' type: Array description: A list of firewall policy pre-defined rules. + min_version: 'beta' output: true item_type: type: NestedObject @@ -378,12 +399,14 @@ properties: type: String description: | A description of the rule. + min_version: 'beta' output: true - name: 'ruleName' type: String description: | An optional name for the rule. This field is not a unique identifier and can be updated. + min_version: 'beta' output: true - name: 'priority' type: Integer @@ -391,12 +414,14 @@ properties: An integer indicating the priority of a rule in the list. The priority must be a value between 0 and 2147483647. Rules are evaluated from highest to lowest priority where 0 is the highest priority and 2147483647 is the lowest priority. + min_version: 'beta' output: true - name: 'match' type: NestedObject description: A match condition that incoming traffic is evaluated against. If it evaluates to true, the corresponding 'action' is enforced. + min_version: 'beta' output: true properties: - name: 'srcIpRanges' @@ -404,6 +429,7 @@ properties: description: | Source IP address range in CIDR format. Required for INGRESS rules. + min_version: 'beta' output: true item_type: type: String @@ -412,6 +438,7 @@ properties: description: | Destination IP address range in CIDR format. Required for EGRESS rules. + min_version: 'beta' output: true item_type: type: String @@ -420,6 +447,7 @@ properties: description: | Address groups which should be matched against the traffic source. Maximum number of source address groups is 10. + min_version: 'beta' output: true item_type: type: String @@ -428,6 +456,7 @@ properties: description: | Address groups which should be matched against the traffic destination. Maximum number of destination address groups is 10. + min_version: 'beta' output: true item_type: type: String @@ -436,6 +465,7 @@ properties: description: | Fully Qualified Domain Name (FQDN) which should be matched against traffic source. Maximum number of source fqdn allowed is 100. + min_version: 'beta' output: true item_type: type: String @@ -444,6 +474,7 @@ properties: description: | Fully Qualified Domain Name (FQDN) which should be matched against traffic destination. Maximum number of destination fqdn allowed is 100. + min_version: 'beta' output: true item_type: type: String @@ -454,6 +485,7 @@ properties: of traffic. Should be specified as 2 letter country code defined as per ISO 3166 alpha-2 country codes. ex."US" Maximum number of source region codes allowed is 5000. + min_version: 'beta' output: true item_type: type: String @@ -464,6 +496,7 @@ properties: of traffic. Should be specified as 2 letter country code defined as per ISO 3166 alpha-2 country codes. ex."US" Maximum number of destination region codes allowed is 5000. + min_version: 'beta' output: true item_type: type: String @@ -472,6 +505,7 @@ properties: description: | Names of Network Threat Intelligence lists. The IPs in these lists will be matched against traffic source. + min_version: 'beta' output: true item_type: type: String @@ -480,6 +514,7 @@ properties: description: | Names of Network Threat Intelligence lists. The IPs in these lists will be matched against traffic destination. + min_version: 'beta' output: true item_type: type: String @@ -488,6 +523,7 @@ properties: description: | Pairs of IP protocols and ports that the rule should match. api_name: layer4Configs + min_version: 'beta' output: true item_type: type: NestedObject @@ -500,6 +536,7 @@ properties: This value can either be one of the following well known protocol strings (tcp, udp, icmp, esp, ah, ipip, sctp), or the IP protocol number. + min_version: 'beta' output: true - name: 'ports' type: Array @@ -510,6 +547,7 @@ properties: applies to connections through any port. Example inputs include: ["22"], ["80","443"], and ["12345-12349"]. + min_version: 'beta' output: true item_type: type: String @@ -522,6 +560,7 @@ properties: and there is no srcIpRange, this rule will be ignored. Maximum number of source tag values allowed is 256. api_name: srcSecureTags + min_version: 'beta' output: true item_type: type: NestedObject @@ -531,6 +570,7 @@ properties: description: | Name of the secure tag, created with TagManager's TagValue API. @pattern tagValues/[0-9]+ + min_version: 'beta' output: true - name: 'state' type: Enum @@ -538,6 +578,7 @@ properties: [Output Only] State of the secure tag, either `EFFECTIVE` or `INEFFECTIVE`. A secure tag is `INEFFECTIVE` when it is deleted or its network is deleted. + min_version: 'beta' output: true enum_values: - 'EFFECTIVE' @@ -557,6 +598,7 @@ properties: to all instances on the specified network. Maximum number of target label tags allowed is 256. api_name: targetSecureTags + min_version: 'beta' output: true item_type: type: NestedObject @@ -566,6 +608,7 @@ properties: description: | Name of the secure tag, created with TagManager's TagValue API. @pattern tagValues/[0-9]+ + min_version: 'beta' output: true - name: 'state' type: Enum @@ -573,6 +616,7 @@ properties: [Output Only] State of the secure tag, either `EFFECTIVE` or `INEFFECTIVE`. A secure tag is `INEFFECTIVE` when it is deleted or its network is deleted. + min_version: 'beta' output: true enum_values: - 'EFFECTIVE' @@ -582,11 +626,13 @@ properties: description: | The Action to perform when the client connection triggers the rule. Can currently be either "allow", "deny", "apply_security_profile_group" or "goto_next". + min_version: 'beta' output: true - name: 'direction' type: Enum description: | The direction in which this rule applies. If unspecified an INGRESS rule is created. + min_version: 'beta' output: true enum_values: - 'INGRESS' @@ -597,6 +643,7 @@ properties: Denotes whether to enable logging for a particular rule. If logging is enabled, logs will be exported to the configured export destination in Stackdriver. + min_version: 'beta' output: true send_empty_value: true - name: 'targetServiceAccounts' @@ -604,6 +651,7 @@ properties: description: | A list of service accounts indicating the sets of instances that are applied with this rule. + min_version: 'beta' output: true item_type: type: String @@ -614,12 +662,14 @@ properties: Example: https://networksecurity.googleapis.com/v1/projects/{project}/locations/{location}/securityProfileGroups/my-security-profile-group Must be specified if action is 'apply_security_profile_group'. + min_version: 'beta' output: true - name: 'tlsInspect' type: Boolean description: | Boolean flag indicating if the traffic should be TLS decrypted. It can be set only if action = 'apply_security_profile_group' and cannot be set for other actions. + min_version: 'beta' output: true - name: 'disabled' type: Boolean @@ -628,20 +678,25 @@ properties: the firewall policy rule is not enforced and traffic behaves as if it did not exist. If this is unspecified, the firewall policy rule will be enabled. + min_version: 'beta' output: true - name: 'fingerprint' type: Fingerprint description: Fingerprint of the resource. This field is used internally during updates of this resource. + min_version: 'beta' output: true - name: 'selfLink' type: String description: Server-defined URL for the resource. + min_version: 'beta' output: true - name: 'selfLinkWithId' type: String description: Server-defined URL for this resource with the resource id. + min_version: 'beta' output: true - name: 'ruleTupleCount' type: Integer description: Total count of all firewall policy rule tuples. A firewall policy can not exceed a set number of tuples. + min_version: 'beta' output: true diff --git a/mmv1/products/compute/RegionSecurityPolicy.yaml b/mmv1/products/compute/RegionSecurityPolicy.yaml index 4e0c2bf7e254..8cb057322d99 100644 --- a/mmv1/products/compute/RegionSecurityPolicy.yaml +++ b/mmv1/products/compute/RegionSecurityPolicy.yaml @@ -39,7 +39,6 @@ async: result: resource_inside_response: false custom_code: - constants: 'templates/terraform/constants/region_security_policy.go.tmpl' sweeper: url_substitutions: - region: "us-south1" @@ -132,58 +131,6 @@ properties: - 'ADVANCED' - 'ADVANCED_PREVIEW' - 'STANDARD' - - name: "advancedOptionsConfig" - type: NestedObject - description: | - Advanced Options Config of this security policy. - properties: - - name: "jsonParsing" - type: Enum - description: | - JSON body parsing. Supported values include: "DISABLED", "STANDARD", "STANDARD_WITH_GRAPHQL". - enum_values: - - "DISABLED" - - "STANDARD" - - "STANDARD_WITH_GRAPHQL" - - name: "jsonCustomConfig" - type: NestedObject - description: | - Custom configuration to apply the JSON parsing. Only applicable when JSON parsing is set to STANDARD. - properties: - - name: "contentTypes" - type: Array - description: | - A list of custom Content-Type header values to apply the JSON parsing. - item_type: - type: String - is_set: true - required: true - - name: "logLevel" - type: Enum - description: | - Logging level. Supported values include: "NORMAL", "VERBOSE". - enum_values: - - "NORMAL" - - "VERBOSE" - - name: "userIpRequestHeaders" - type: Array - description: | - An optional list of case-insensitive request header names to use for resolving the callers client IP address. - item_type: - type: String - is_set: true - - name: requestBodyInspectionSize - type: Enum - description: | - The maximum request size chosen by the customer with Waf enabled. Values supported are "8KB", "16KB, "32KB", "48KB" and "64KB". - Values are case insensitive. - enum_values: - - "8KB" - - "16KB" - - "32KB" - - "48KB" - - "64KB" - min_version: beta - name: 'selfLink' type: String description: | @@ -241,7 +188,6 @@ properties: description: | The set of rules that belong to this policy. There must always be a default rule (rule with priority 2147483647 and match "*"). If no rules are provided when creating a security policy, a default rule with action "allow" will be added. default_from_api: true - diff_suppress_func: 'resourceComputeRegionSecurityPolicySpecRulesDiffSuppress' item_type: type: NestedObject properties: @@ -513,7 +459,6 @@ properties: * SNI: Server name indication in the TLS session of the HTTPS request. The key value is truncated to the first 128 bytes. The key type defaults to ALL on a HTTP session. * REGION_CODE: The country/region from which the request originates. * TLS_JA3_FINGERPRINT: JA3 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. - * TLS_JA4_FINGERPRINT: JA4 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. * USER_IP: The IP address of the originating client, which is resolved based on "userIpRequestHeaders" configured with the security policy. If there is no "userIpRequestHeaders" configuration or an IP address cannot be resolved from it, the key type defaults to IP. enum_values: - 'ALL' @@ -525,7 +470,6 @@ properties: - 'SNI' - 'REGION_CODE' - 'TLS_JA3_FINGERPRINT' - - 'TLS_JA4_FINGERPRINT' - 'USER_IP' - name: 'enforceOnKeyName' type: String @@ -555,7 +499,6 @@ properties: * SNI: Server name indication in the TLS session of the HTTPS request. The key value is truncated to the first 128 bytes. The key type defaults to ALL on a HTTP session. * REGION_CODE: The country/region from which the request originates. * TLS_JA3_FINGERPRINT: JA3 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. - * TLS_JA4_FINGERPRINT: JA4 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. * USER_IP: The IP address of the originating client, which is resolved based on "userIpRequestHeaders" configured with the security policy. If there is no "userIpRequestHeaders" configuration or an IP address cannot be resolved from it, the key type defaults to IP. enum_values: - 'ALL' @@ -567,7 +510,6 @@ properties: - 'SNI' - 'REGION_CODE' - 'TLS_JA3_FINGERPRINT' - - 'TLS_JA4_FINGERPRINT' - 'USER_IP' - name: 'enforceOnKeyName' type: String diff --git a/mmv1/products/compute/RegionSecurityPolicyRule.yaml b/mmv1/products/compute/RegionSecurityPolicyRule.yaml index 1f0f8ab7a71b..b4ec0ffd7fb9 100644 --- a/mmv1/products/compute/RegionSecurityPolicyRule.yaml +++ b/mmv1/products/compute/RegionSecurityPolicyRule.yaml @@ -354,7 +354,6 @@ properties: * SNI: Server name indication in the TLS session of the HTTPS request. The key value is truncated to the first 128 bytes. The key type defaults to ALL on a HTTP session. * REGION_CODE: The country/region from which the request originates. * TLS_JA3_FINGERPRINT: JA3 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. - * TLS_JA4_FINGERPRINT: JA4 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. * USER_IP: The IP address of the originating client, which is resolved based on "userIpRequestHeaders" configured with the security policy. If there is no "userIpRequestHeaders" configuration or an IP address cannot be resolved from it, the key type defaults to IP. enum_values: - 'ALL' @@ -366,7 +365,6 @@ properties: - 'SNI' - 'REGION_CODE' - 'TLS_JA3_FINGERPRINT' - - 'TLS_JA4_FINGERPRINT' - 'USER_IP' - name: 'enforceOnKeyName' type: String @@ -396,7 +394,6 @@ properties: * SNI: Server name indication in the TLS session of the HTTPS request. The key value is truncated to the first 128 bytes. The key type defaults to ALL on a HTTP session. * REGION_CODE: The country/region from which the request originates. * TLS_JA3_FINGERPRINT: JA3 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. - * TLS_JA4_FINGERPRINT: JA4 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. * USER_IP: The IP address of the originating client, which is resolved based on "userIpRequestHeaders" configured with the security policy. If there is no "userIpRequestHeaders" configuration or an IP address cannot be resolved from it, the key type defaults to IP. enum_values: - 'ALL' @@ -408,7 +405,6 @@ properties: - 'SNI' - 'REGION_CODE' - 'TLS_JA3_FINGERPRINT' - - 'TLS_JA4_FINGERPRINT' - 'USER_IP' - name: 'enforceOnKeyName' type: String diff --git a/mmv1/products/compute/RegionUrlMap.yaml b/mmv1/products/compute/RegionUrlMap.yaml index 4fe1431636ef..5d6689b7dc0c 100644 --- a/mmv1/products/compute/RegionUrlMap.yaml +++ b/mmv1/products/compute/RegionUrlMap.yaml @@ -36,9 +36,9 @@ collection_url_key: 'items' custom_code: sweeper: url_substitutions: - - region: 'us-central1' - - region: 'europe-west1' - - region: 'us-west1' + - region: "us-central1" + - region: "europe-west1" + - region: "us-west1" examples: - name: 'region_url_map_basic' primary_resource_id: 'regionurlmap' @@ -120,7 +120,7 @@ examples: regional_l7_xlb_map: 'regional-l7-xlb-map' l7_xlb_proxy: 'l7-xlb-proxy' l7_xlb_forwarding_rule: 'l7-xlb-forwarding-rule' - # Similar to other samples + # Similar to other samples exclude_test: true exclude_docs: true - name: 'region_url_map_path_template_match' @@ -131,45 +131,6 @@ examples: cart_backend_service_name: 'cart-service' user_backend_service_name: 'user-service' health_check_name: 'health-check' - - name: 'region_url_map_path_matcher_default_route_action' - primary_resource_id: 'regionurlmap' - vars: - region_url_map_name: 'regionurlmap' - login_region_backend_service_name: 'login' - home_region_backend_service_name: 'home' - region_health_check_name: 'health-check' - - name: 'region_url_map_default_mirror_percent' - primary_resource_id: 'regionurlmap' - min_version: 'beta' - vars: - region_url_map_name: 'regionurlmap' - home_backend_service_name: 'home' - mirror_backend_service_name: 'mirror' - region_health_check_name: 'health-check' - - name: 'region_url_map_path_matcher_default_mirror_percent' - primary_resource_id: 'regionurlmap' - min_version: 'beta' - vars: - region_url_map_name: 'regionurlmap' - home_backend_service_name: 'home' - mirror_backend_service_name: 'mirror' - region_health_check_name: 'health-check' - - name: 'region_url_map_path_rule_mirror_percent' - primary_resource_id: 'regionurlmap' - min_version: 'beta' - vars: - region_url_map_name: 'regionurlmap' - home_backend_service_name: 'home' - mirror_backend_service_name: 'mirror' - region_health_check_name: 'health-check' - - name: 'region_url_map_route_rule_mirror_percent' - primary_resource_id: 'regionurlmap' - min_version: 'beta' - vars: - region_url_map_name: 'regionurlmap' - home_backend_service_name: 'home' - mirror_backend_service_name: 'mirror' - region_health_check_name: 'health-check' parameters: - name: 'region' type: ResourceRef @@ -283,7 +244,6 @@ properties: # exactly_one_of: # - path_matchers.0.default_service # - path_matchers.0.default_url_redirect - # - path_matchers.0.default_route_action.0.weighted_backend_services resource: 'RegionBackendService' imports: 'selfLink' - name: 'description' @@ -765,14 +725,6 @@ properties: custom_expand: 'templates/terraform/custom_expand/resourceref_with_validation.go.tmpl' resource: 'RegionBackendService' imports: 'selfLink' - - name: 'mirrorPercent' - min_version: beta - type: Double - description: | - The percentage of requests to be mirrored to backendService. - The value must be between 0.0 and 100.0 inclusive. - validation: - function: 'validation.FloatBetween(0, 100)' - name: 'retryPolicy' type: NestedObject description: | @@ -1235,14 +1187,6 @@ properties: custom_expand: 'templates/terraform/custom_expand/resourceref_with_validation.go.tmpl' resource: 'RegionBackendService' imports: 'selfLink' - - name: 'mirrorPercent' - min_version: beta - type: Double - description: | - The percentage of requests to be mirrored to backendService. - The value must be between 0.0 and 100.0 inclusive. - validation: - function: 'validation.FloatBetween(0, 100)' - name: 'retryPolicy' type: NestedObject description: | @@ -1516,7 +1460,6 @@ properties: # exactly_one_of: # - path_matchers.0.default_service # - path_matchers.0.default_url_redirect - # - path_matchers.0.default_route_action.0.weighted_backend_services description: | When none of the specified hostRules match, the request is redirected to a URL specified by defaultUrlRedirect. If defaultUrlRedirect is specified, defaultService or @@ -1582,393 +1525,6 @@ properties: retained. This field is required to ensure an empty block is not set. The normal default value is false. required: true - - name: 'defaultRouteAction' - type: NestedObject - # TODO: (mbang) conflicts also won't work for array path matchers yet, uncomment here once supported. - # conflicts: - # - path_matcher.path_matcher.default_url_redirect - description: | - defaultRouteAction takes effect when none of the pathRules or routeRules match. The load balancer performs - advanced routing actions like URL rewrites, header transformations, etc. prior to forwarding the request - to the selected backend. If defaultRouteAction specifies any weightedBackendServices, defaultService must not be set. - Conversely if defaultService is set, defaultRouteAction cannot contain any weightedBackendServices. - - Only one of defaultRouteAction or defaultUrlRedirect must be set. - properties: - - name: 'weightedBackendServices' - type: Array - # TODO: (mbang) won't work for array path matchers yet, uncomment here once they are supported. - # (github.com/hashicorp/terraform-plugin-sdk/issues/470) - # exactly_one_of: - # - path_matchers.0.default_service - # - path_matchers.0.default_url_redirect - # - path_matchers.0.default_route_action.0.weighted_backend_services - description: | - A list of weighted backend services to send traffic to when a route match occurs. - The weights determine the fraction of traffic that flows to their corresponding backend service. - If all traffic needs to go to a single backend service, there must be one weightedBackendService - with weight set to a non-zero number. - - Once a backendService is identified and before forwarding the request to the backend service, - advanced routing actions like Url rewrites and header transformations are applied depending on - additional settings specified in this HttpRouteAction. - item_type: - type: NestedObject - properties: - - name: 'backendService' - type: ResourceRef - description: | - The full or partial URL to the default BackendService resource. Before forwarding the - request to backendService, the loadbalancer applies any relevant headerActions - specified as part of this backendServiceWeight. - custom_expand: 'templates/terraform/custom_expand/reference_to_backend.tmpl' - resource: 'BackendService' - imports: 'selfLink' - - name: 'weight' - type: Integer - description: | - Specifies the fraction of traffic sent to backendService, computed as - weight / (sum of all weightedBackendService weights in routeAction) . - - The selection of a backend service is determined only for new traffic. Once a user's request - has been directed to a backendService, subsequent requests will be sent to the same backendService - as determined by the BackendService's session affinity policy. - - The value must be between 0 and 1000 - validation: - function: 'validation.IntBetween(0, 1000)' - - name: 'headerAction' - type: NestedObject - description: | - Specifies changes to request and response headers that need to take effect for - the selected backendService. - - headerAction specified here take effect before headerAction in the enclosing - HttpRouteRule, PathMatcher and UrlMap. - properties: - - name: 'requestHeadersToRemove' - type: Array - description: | - A list of header names for headers that need to be removed from the request prior to - forwarding the request to the backendService. - item_type: - type: String - - name: 'requestHeadersToAdd' - type: Array - description: | - Headers to add to a matching request prior to forwarding the request to the backendService. - item_type: - type: NestedObject - properties: - - name: 'headerName' - type: String - description: | - The name of the header to add. - - name: 'headerValue' - type: String - description: | - The value of the header to add. - - name: 'replace' - type: Boolean - description: | - If false, headerValue is appended to any values that already exist for the header. - If true, headerValue is set for the header, discarding any values that were set for that header. - default_value: false - - name: 'responseHeadersToRemove' - type: Array - description: | - A list of header names for headers that need to be removed from the response prior to sending the - response back to the client. - item_type: - type: String - - name: 'responseHeadersToAdd' - type: Array - description: | - Headers to add the response prior to sending the response back to the client. - item_type: - type: NestedObject - properties: - - name: 'headerName' - type: String - description: | - The name of the header to add. - - name: 'headerValue' - type: String - description: | - The value of the header to add. - - name: 'replace' - type: Boolean - description: | - If false, headerValue is appended to any values that already exist for the header. - If true, headerValue is set for the header, discarding any values that were set for that header. - default_value: false - - name: 'urlRewrite' - type: NestedObject - description: | - The spec to modify the URL of the request, prior to forwarding the request to the matched service. - properties: - - name: 'pathPrefixRewrite' - type: String - description: | - Prior to forwarding the request to the selected backend service, the matching portion of the - request's path is replaced by pathPrefixRewrite. - - The value must be between 1 and 1024 characters. - - name: 'hostRewrite' - type: String - description: | - Prior to forwarding the request to the selected service, the request's host header is replaced - with contents of hostRewrite. - - The value must be between 1 and 255 characters. - - name: 'pathTemplateRewrite' - type: string - description: | - If specified, the pattern rewrites the URL path (based on the :path header) using the HTTP template syntax. - - A corresponding pathTemplateMatch must be specified. Any template variables must exist in the pathTemplateMatch field. - - * At least one variable must be specified in the pathTemplateMatch field - * You can omit variables from the rewritten URL - * The * and ** operators cannot be matched unless they have a corresponding variable name - e.g. {format=*} or {var=**}. - - For example, a pathTemplateMatch of /static/{format=**} could be rewritten as /static/content/{format} to prefix - /content to the URL. Variables can also be re-ordered in a rewrite, so that /{country}/{format}/{suffix=**} can be - rewritten as /content/{format}/{country}/{suffix}. - - At least one non-empty routeRules[].matchRules[].path_template_match is required. - - Only one of pathPrefixRewrite or pathTemplateRewrite may be specified. - # TODO: (mbang) won't work for array path matchers yet, uncomment here once they are supported. - # (github.com/hashicorp/terraform-plugin-sdk/issues/470) - # exactly_one_of: - # - path_matchers.0.default_route_action.0.url_rewrite.path_prefix_rewrite - # - path_matchers.0.default_route_action.0.url_rewrite.path_template_rewrite - - name: 'timeout' - type: NestedObject - description: | - Specifies the timeout for the selected route. Timeout is computed from the time the request has been - fully processed (i.e. end-of-stream) up until the response has been completely processed. Timeout includes all retries. - - If not specified, will use the largest timeout among all backend services associated with the route. - default_from_api: true - properties: - - name: 'seconds' - type: String - description: | - Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive. - Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years - - name: 'nanos' - type: Integer - description: | - Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are represented - with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive. - - name: 'maxStreamDuration' - type: NestedObject - description: | - Specifies the maximum duration (timeout) for streams on the selected route. - Unlike the `Timeout` field where the timeout duration starts from the time the request - has been fully processed (known as end-of-stream), the duration in this field - is computed from the beginning of the stream until the response has been processed, - including all retries. A stream that does not complete in this duration is closed. - default_from_api: true - properties: - - name: 'nanos' - type: Integer - description: | - Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are represented - with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive. - - name: 'seconds' - type: String - description: | - Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive. - Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years - required: true - - name: 'retryPolicy' - type: NestedObject - description: | - Specifies the retry policy associated with this route. - properties: - - name: 'retryConditions' - type: Array - description: | - Specfies one or more conditions when this retry rule applies. Valid values are: - - * 5xx: Loadbalancer will attempt a retry if the backend service responds with any 5xx response code, - or if the backend service does not respond at all, example: disconnects, reset, read timeout, - * connection failure, and refused streams. - * gateway-error: Similar to 5xx, but only applies to response codes 502, 503 or 504. - * connect-failure: Loadbalancer will retry on failures connecting to backend services, - for example due to connection timeouts. - * retriable-4xx: Loadbalancer will retry for retriable 4xx response codes. - Currently the only retriable error supported is 409. - * refused-stream:Loadbalancer will retry if the backend service resets the stream with a REFUSED_STREAM error code. - This reset type indicates that it is safe to retry. - * cancelled: Loadbalancer will retry if the gRPC status code in the response header is set to cancelled - * deadline-exceeded: Loadbalancer will retry if the gRPC status code in the response header is set to deadline-exceeded - * resource-exhausted: Loadbalancer will retry if the gRPC status code in the response header is set to resource-exhausted - * unavailable: Loadbalancer will retry if the gRPC status code in the response header is set to unavailable - item_type: - type: String - - name: 'numRetries' - type: Integer - description: | - Specifies the allowed number retries. This number must be > 0. If not specified, defaults to 1. - validation: - function: 'validation.IntAtLeast(1)' - default_value: 1 - - name: 'perTryTimeout' - type: NestedObject - description: | - Specifies a non-zero timeout per retry attempt. - - If not specified, will use the timeout set in HttpRouteAction. If timeout in HttpRouteAction is not set, - will use the largest timeout among all backend services associated with the route. - properties: - - name: 'seconds' - type: String - description: | - Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive. - Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years - - name: 'nanos' - type: Integer - description: | - Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are - represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive. - - name: 'requestMirrorPolicy' - type: NestedObject - description: | - Specifies the policy on how requests intended for the route's backends are shadowed to a separate mirrored backend service. - Loadbalancer does not wait for responses from the shadow service. Prior to sending traffic to the shadow service, - the host / authority header is suffixed with -shadow. - properties: - - name: 'backendService' - type: ResourceRef - description: | - The full or partial URL to the BackendService resource being mirrored to. - required: true - custom_expand: 'templates/terraform/custom_expand/reference_to_backend.tmpl' - resource: 'BackendService' - imports: 'selfLink' - - name: 'mirrorPercent' - min_version: beta - type: Double - description: | - The percentage of requests to be mirrored to backendService. - The value must be between 0.0 and 100.0 inclusive. - validation: - function: 'validation.FloatBetween(0, 100)' - - name: 'corsPolicy' - type: NestedObject - description: | - The specification for allowing client side cross-origin requests. Please see - [W3C Recommendation for Cross Origin Resource Sharing](https://www.w3.org/TR/cors/) - properties: - - name: 'allowOrigins' - type: Array - description: | - Specifies the list of origins that will be allowed to do CORS requests. - An origin is allowed if it matches either an item in allowOrigins or an item in allowOriginRegexes. - item_type: - type: String - - name: 'allowOriginRegexes' - type: Array - description: | - Specifies the regular expression patterns that match allowed origins. For regular expression grammar - please see en.cppreference.com/w/cpp/regex/ecmascript - An origin is allowed if it matches either an item in allowOrigins or an item in allowOriginRegexes. - item_type: - type: String - - name: 'allowMethods' - type: Array - description: | - Specifies the content for the Access-Control-Allow-Methods header. - item_type: - type: String - - name: 'allowHeaders' - type: Array - description: | - Specifies the content for the Access-Control-Allow-Headers header. - item_type: - type: String - - name: 'exposeHeaders' - type: Array - description: | - Specifies the content for the Access-Control-Expose-Headers header. - item_type: - type: String - - name: 'maxAge' - type: Integer - description: | - Specifies how long results of a preflight request can be cached in seconds. - This translates to the Access-Control-Max-Age header. - - name: 'allowCredentials' - type: Boolean - description: | - In response to a preflight request, setting this to true indicates that the actual request can include user credentials. - This translates to the Access-Control-Allow-Credentials header. - default_value: false - - name: 'disabled' - type: Boolean - description: | - If true, specifies the CORS policy is disabled. The default value is false, which indicates that the CORS policy is in effect. - default_value: false - - name: 'faultInjectionPolicy' - type: NestedObject - description: | - The specification for fault injection introduced into traffic to test the resiliency of clients to backend service failure. - As part of fault injection, when clients send requests to a backend service, delays can be introduced by Loadbalancer on a - percentage of requests before sending those request to the backend service. Similarly requests from clients can be aborted - by the Loadbalancer for a percentage of requests. - - timeout and retryPolicy will be ignored by clients that are configured with a faultInjectionPolicy. - properties: - - name: 'delay' - type: NestedObject - description: | - The specification for how client requests are delayed as part of fault injection, before being sent to a backend service. - properties: - - name: 'fixedDelay' - type: NestedObject - description: | - Specifies the value of the fixed delay interval. - properties: - - name: 'seconds' - type: String - description: | - Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive. - Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years - - name: 'nanos' - type: Integer - description: | - Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are - represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive. - - name: 'percentage' - type: Double - description: | - The percentage of traffic (connections/operations/requests) on which delay will be introduced as part of fault injection. - The value must be between 0.0 and 100.0 inclusive. - validation: - function: 'validation.FloatBetween(0, 100)' - - name: 'abort' - type: NestedObject - description: | - The specification for how client requests are aborted as part of fault injection. - properties: - - name: 'httpStatus' - type: Integer - description: | - The HTTP status code used to abort the request. - The value must be between 200 and 599 inclusive. - validation: - function: 'validation.IntBetween(200, 599)' - - name: 'percentage' - type: Double - description: | - The percentage of traffic (connections/operations/requests) which will be aborted as part of fault injection. - The value must be between 0.0 and 100.0 inclusive. - validation: - function: 'validation.FloatBetween(0, 100)' - name: 'test' type: Array description: | @@ -2339,14 +1895,6 @@ properties: custom_expand: 'templates/terraform/custom_expand/resourceref_with_validation.go.tmpl' resource: 'RegionBackendService' imports: 'selfLink' - - name: 'mirrorPercent' - min_version: beta - type: Double - description: | - The percentage of requests to be mirrored to backendService. - The value must be between 0.0 and 100.0 inclusive. - validation: - function: 'validation.FloatBetween(0, 100)' - name: 'corsPolicy' type: NestedObject description: | diff --git a/mmv1/products/compute/Reservation.yaml b/mmv1/products/compute/Reservation.yaml index f459bee899aa..ee9aba108e44 100644 --- a/mmv1/products/compute/Reservation.yaml +++ b/mmv1/products/compute/Reservation.yaml @@ -58,21 +58,6 @@ examples: primary_resource_id: 'gce_reservation' vars: reservation_name: 'gce-reservation' - - name: 'reservation_basic_beta' - primary_resource_id: 'gce_reservation' - vars: - reservation_name: 'gce-reservation' - min_version: 'beta' - - name: 'reservation_source_instance_template' - primary_resource_id: 'gce_reservation_source_instance_template' - vars: - instance-template: 'instance-template' - reservation_name: 'gce-reservation-source-instance-template' - - name: 'reservation_sharing_policy' - primary_resource_id: 'gce_reservation_sharing_policy' - vars: - instance-template: 'instance-template' - reservation_name: 'gce-reservation-sharing-policy' - name: 'shared_reservation_basic' primary_resource_id: 'gce_reservation' vars: @@ -82,16 +67,8 @@ examples: org_id: 'ORG_ID' billing_account: 'BILLING_ACCT' exclude_docs: true - - name: 'shared_reservation_beta' - primary_resource_id: 'gce_reservation' - vars: - reservation_name: 'gce-shared-reservation-beta' - test_env_vars: - project: 'PROJECT_NAME' - org_id: 'ORG_ID' - billing_account: 'BILLING_ACCT' - exclude_docs: true - min_version: 'beta' + # Resource creation race + skip_vcr: true parameters: - name: 'zone' type: ResourceRef @@ -177,13 +154,6 @@ properties: type: String description: | The project id/number, should be same as the key of this project config in the project map. - - name: 'projects' - type: Array - description: | - List of project IDs with which the reservation is shared. - item_type: - type: String - min_version: 'beta' - name: 'specificReservation' type: NestedObject description: | @@ -208,11 +178,8 @@ properties: type: NestedObject description: | The instance properties for the reservation. + required: true immutable: true - default_from_api: true - exactly_one_of: - - 'specific_reservation.0.instance_properties' - - 'specific_reservation.0.source_instance_template' properties: - name: 'machineType' type: String @@ -278,73 +245,3 @@ properties: The size of the disk in base-2 GB. required: true immutable: true - - name: 'maintenanceInterval' - type: Enum - description: | - Specifies the frequency of planned maintenance events. - enum_values: - - 'AS_NEEDED' - - 'PERIODIC' - - 'RECURRENT' - min_version: 'beta' - immutable: true - - name: 'sourceInstanceTemplate' - type: String - description: | - Specifies the instance template to create the reservation. If you use this field, you must exclude the - instanceProperties field. - exactly_one_of: - - 'specific_reservation.0.instance_properties' - - 'specific_reservation.0.source_instance_template' - - name: 'deleteAtTime' - type: String - description: | - Absolute time in future when the reservation will be auto-deleted by Compute Engine. Timestamp is represented in RFC3339 text format. - Cannot be used with delete_after_duration. - immutable: true - default_from_api: true - conflicts: - - 'delete_after_duration.0.seconds' - - 'delete_after_duration.0.nanos' - - name: 'deleteAfterDuration' - type: NestedObject - description: | - Duration after which the reservation will be auto-deleted by Compute Engine. Cannot be used with delete_at_time. - ignore_read: true - properties: - - name: 'seconds' - type: String - description: | - Number of seconds for the auto-delete duration. - immutable: true - conflicts: - - 'delete_at_time' - - name: 'nanos' - type: Integer - description: | - Number of nanoseconds for the auto-delete duration. - immutable: true - conflicts: - - 'delete_at_time' - - name: 'reservationSharingPolicy' - type: NestedObject - description: | - Sharing policy for reservations with Google Cloud managed services. - default_from_api: true - properties: - - name: 'serviceShareType' - type: Enum - description: | - Sharing config for all Google Cloud services. - enum_values: - - 'ALLOW_ALL' - - 'DISALLOW_ALL' - default_from_api: true - immutable: true - - name: 'enableEmergentMaintenance' - type: Boolean - description: | - Indicates if this group of VMs have emergent maintenance enabled. - immutable: true - ignore_read: true - min_version: 'beta' diff --git a/mmv1/products/compute/ResourcePolicy.yaml b/mmv1/products/compute/ResourcePolicy.yaml index 9b3f3a15e35b..c4b0f1c71199 100644 --- a/mmv1/products/compute/ResourcePolicy.yaml +++ b/mmv1/products/compute/ResourcePolicy.yaml @@ -81,18 +81,22 @@ examples: name: 'gce-policy' - name: 'resource_policy_workload_policy' primary_resource_id: 'bar' + min_version: 'beta' vars: name: 'gce-policy' - name: 'resource_policy_workload_policy_accelerator_topology' primary_resource_id: 'bar' + min_version: 'beta' vars: name: 'gce-policy' - name: 'resource_policy_workload_policy_max_topology_distance' primary_resource_id: 'bar' + min_version: 'beta' vars: name: 'gce-policy' - name: 'resource_policy_placement_policy_gpu_topology' primary_resource_id: 'baz' + min_version: 'beta' vars: name: 'gce-policy' - name: 'resource_policy_placement_policy_tpu_topology' @@ -332,6 +336,7 @@ properties: conflicts: - group_placement_policy.0.max_distance immutable: true + min_version: 'beta' - name: 'tpuTopology' type: String description: | @@ -409,6 +414,7 @@ properties: type: NestedObject description: | Represents the workload policy. + min_version: 'beta' properties: - name: 'type' type: Enum diff --git a/mmv1/products/compute/Route.yaml b/mmv1/products/compute/Route.yaml index e8164df51c12..02cb782b4671 100644 --- a/mmv1/products/compute/Route.yaml +++ b/mmv1/products/compute/Route.yaml @@ -366,20 +366,3 @@ properties: - 'ACTIVE' for an active route - 'INACTIVE' for an inactive route output: true - - name: 'params' - type: NestedObject - ignore_read: true - immutable: true - description: | - Additional params passed with the request, but not persisted as part of resource payload - properties: - - name: 'resourceManagerTags' - type: KeyValuePairs - description: | - Resource manager tags to be bound to the route. Tag keys and values have the - same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, - and values are in the format tagValues/456. The field is ignored when empty. - The field is immutable and causes resource replacement when mutated. This field is only - set at create time and modifying this field after creation will trigger recreation. - To apply tags to an existing resource, see the google_tags_tag_binding resource. - ignore_read: true diff --git a/mmv1/products/compute/Router.yaml b/mmv1/products/compute/Router.yaml index a800a8b0a04b..d762c7a40409 100644 --- a/mmv1/products/compute/Router.yaml +++ b/mmv1/products/compute/Router.yaml @@ -159,7 +159,7 @@ properties: This enum field has the one valid value: ALL_SUBNETS send_empty_value: true - # TODO: #324 enum? + # TODO(#324): enum? item_type: type: String - name: 'advertisedIpRanges' @@ -171,7 +171,7 @@ properties: ranges will be advertised in addition to any specified groups. Leave this field blank to advertise no custom IP ranges. send_empty_value: true - is_set: true + custom_flatten: 'templates/terraform/custom_flatten/compute_router_range.go.tmpl' item_type: type: NestedObject properties: @@ -233,19 +233,3 @@ properties: description: | Value of the key used for MD5 authentication. required: true - - name: 'params' - type: NestedObject - min_version: 'beta' - ignore_read: true - immutable: true - description: | - Additional params passed with the request, but not persisted as part of resource payload - properties: - - name: 'resourceManagerTags' - type: KeyValuePairs - description: | - Resource manager tags to be bound to the router. Tag keys and values have the - same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, - and values are in the format tagValues/456. - api_name: resourceManagerTags - ignore_read: true diff --git a/mmv1/products/compute/RouterNat.yaml b/mmv1/products/compute/RouterNat.yaml index 8c35ac4e8a62..6c9a904460c8 100644 --- a/mmv1/products/compute/RouterNat.yaml +++ b/mmv1/products/compute/RouterNat.yaml @@ -257,36 +257,6 @@ properties: is_set: true item_type: type: String - - name: 'sourceSubnetworkIpRangesToNat64' - type: Enum - description: | - Specify the Nat option for NAT64, which can take one of the following values: - ALL_IPV6_SUBNETWORKS: All of the IP ranges in every Subnetwork are allowed to Nat. - LIST_OF_IPV6_SUBNETWORKS: A list of Subnetworks are allowed to Nat (specified in the field nat64Subnetwork below). - Note that if this field contains NAT64_ALL_V6_SUBNETWORKS no other Router.Nat section in this region can also enable NAT64 for any Subnetworks in this network. - Other Router.Nat sections can still be present to enable NAT44 only. - enum_values: - - 'ALL_IPV6_SUBNETWORKS' - - 'LIST_OF_IPV6_SUBNETWORKS' - - name: 'nat64Subnetwork' - type: Array - description: | - One or more subnetwork NAT configurations whose traffic should be translated by NAT64 Gateway. - Only used if `source_subnetwork_ip_ranges_to_nat64` is set to `LIST_OF_IPV6_SUBNETWORKS` - api_name: nat64Subnetworks - is_set: true - send_empty_value: true - set_hash_func: computeRouterNatSubnetworkHash - item_type: - type: NestedObject - properties: - - name: 'name' - type: ResourceRef - description: 'Self-link of the subnetwork resource that will use NAT64' - required: true - custom_expand: 'templates/terraform/custom_expand/resourceref_with_validation.go.tmpl' - resource: 'Subnetwork' - imports: 'selfLink' - name: 'minPortsPerVm' type: Integer description: | @@ -512,7 +482,6 @@ properties: Must be one of: PREMIUM, STANDARD. If not specified, then the current project-level default tier is used. default_from_api: true - ignore_read: true enum_values: - 'PREMIUM' - 'STANDARD' diff --git a/mmv1/products/compute/RouterRoutePolicy.yaml b/mmv1/products/compute/RouterRoutePolicy.yaml index 44e9f8b8c266..33cd16b941e9 100644 --- a/mmv1/products/compute/RouterRoutePolicy.yaml +++ b/mmv1/products/compute/RouterRoutePolicy.yaml @@ -110,8 +110,7 @@ properties: - name: 'match' type: NestedObject description: | - CEL expression evaluated against a route to determine if this term applies (see Policy Language). - required: true + CEL expression evaluated against a route to determine if this term applies (see Policy Language). When not set, the term applies to all routes. properties: - name: 'expression' type: String diff --git a/mmv1/products/compute/SecurityPolicyRule.yaml b/mmv1/products/compute/SecurityPolicyRule.yaml index 1b80486b6dbc..3ccaeb449e14 100644 --- a/mmv1/products/compute/SecurityPolicyRule.yaml +++ b/mmv1/products/compute/SecurityPolicyRule.yaml @@ -370,7 +370,6 @@ properties: * SNI: Server name indication in the TLS session of the HTTPS request. The key value is truncated to the first 128 bytes. The key type defaults to ALL on a HTTP session. * REGION_CODE: The country/region from which the request originates. * TLS_JA3_FINGERPRINT: JA3 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. - * TLS_JA4_FINGERPRINT: JA4 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. * USER_IP: The IP address of the originating client, which is resolved based on "userIpRequestHeaders" configured with the security policy. If there is no "userIpRequestHeaders" configuration or an IP address cannot be resolved from it, the key type defaults to IP. enum_values: - 'ALL' @@ -382,7 +381,6 @@ properties: - 'SNI' - 'REGION_CODE' - 'TLS_JA3_FINGERPRINT' - - 'TLS_JA4_FINGERPRINT' - 'USER_IP' - name: 'enforceOnKeyName' type: String @@ -412,7 +410,6 @@ properties: * SNI: Server name indication in the TLS session of the HTTPS request. The key value is truncated to the first 128 bytes. The key type defaults to ALL on a HTTP session. * REGION_CODE: The country/region from which the request originates. * TLS_JA3_FINGERPRINT: JA3 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. - * TLS_JA4_FINGERPRINT: JA4 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. * USER_IP: The IP address of the originating client, which is resolved based on "userIpRequestHeaders" configured with the security policy. If there is no "userIpRequestHeaders" configuration or an IP address cannot be resolved from it, the key type defaults to IP. enum_values: - 'ALL' @@ -424,7 +421,6 @@ properties: - 'SNI' - 'REGION_CODE' - 'TLS_JA3_FINGERPRINT' - - 'TLS_JA4_FINGERPRINT' - 'USER_IP' - name: 'enforceOnKeyName' type: String diff --git a/mmv1/products/compute/ServiceAttachment.yaml b/mmv1/products/compute/ServiceAttachment.yaml index f89c9250f4ee..4813b6d0f765 100644 --- a/mmv1/products/compute/ServiceAttachment.yaml +++ b/mmv1/products/compute/ServiceAttachment.yaml @@ -38,7 +38,6 @@ async: custom_code: constants: 'templates/terraform/constants/compute_service_attachment.go.tmpl' update_encoder: 'templates/terraform/update_encoder/compute_service_attachment.go.tmpl' - encoder: 'templates/terraform/encoders/compute_service_attachment.go.tmpl' sweeper: url_substitutions: - region: "us-west2" @@ -95,10 +94,6 @@ examples: producer_forwarding_rule_name: 'producer-forwarding-rule' consumer_address_name: 'psc-ilb-consumer-address' consumer_forwarding_rule_name: 'psc-ilb-consumer-forwarding-rule' - - name: 'service_attachment_cross_region_ilb' - primary_resource_id: 'psc_ilb_service_attachment' - vars: - name: 'sa' parameters: - name: 'region' type: ResourceRef @@ -234,13 +229,13 @@ properties: properties: - name: 'projectIdOrNum' type: String - # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO (laurensknoll): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) description: | A project that is allowed to connect to this service attachment. Only one of project_id_or_num and network_url may be set. - name: 'networkUrl' type: String - # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO (laurensknoll): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) description: | The network that is allowed to connect to this service attachment. Only one of project_id_or_num and network_url may be set. @@ -269,14 +264,5 @@ properties: If the connection preference of the service attachment is ACCEPT_MANUAL, the limit applies to each project or network that is listed in the consumer accept list. If the connection preference of the service attachment is ACCEPT_AUTOMATIC, the limit applies to each project that contains a connected endpoint. - If unspecified, the default propagated connection limit is 250. To explicitly send a zero value, set `send_propagated_connection_limit_if_zero = true`. + If unspecified, the default propagated connection limit is 250. default_from_api: true -virtual_fields: - - name: 'send_propagated_connection_limit_if_zero' - description: | - Controls the behavior of propagated_connection_limit. - When false, setting propagated_connection_limit to zero causes the provider to use to the API's default value. - When true, the provider will set propagated_connection_limit to zero. - Defaults to false. - type: Boolean - default_value: false diff --git a/mmv1/products/compute/Snapshot.yaml b/mmv1/products/compute/Snapshot.yaml index 4fafb7962670..088b93bb4c86 100644 --- a/mmv1/products/compute/Snapshot.yaml +++ b/mmv1/products/compute/Snapshot.yaml @@ -136,7 +136,7 @@ parameters: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied encryption key that protects this resource. output: true - # TODO Change to ResourceRef once KMS is in Magic Modules + # TODO(chrisst) Change to ResourceRef once KMS is in Magic Modules - name: 'kmsKeySelfLink' type: String description: | diff --git a/mmv1/products/compute/SnapshotSettings.yaml b/mmv1/products/compute/SnapshotSettings.yaml deleted file mode 100644 index 93c194dd8e98..000000000000 --- a/mmv1/products/compute/SnapshotSettings.yaml +++ /dev/null @@ -1,92 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'SnapshotSettings' -kind: 'compute#snapshotSettings' -description: | - Updates your project's snapshot settings and sets a new default storage location for snapshots. -references: - guides: - 'Official Documentation': 'https://cloud.google.com/compute/docs/disks/snapshot-settings#snapshot_settings_how_to_update' - api: 'https://cloud.google.com/compute/docs/reference/rest/v1/snapshotSettings' -base_url: 'projects/{{project}}/global/snapshotSettings' -update_url: 'projects/{{project}}/global/snapshotSettings' -import_format: - - 'projects/{{project}}/global/snapshotSettings/' -timeouts: - insert_minutes: 20 - update_minutes: 20 - delete_minutes: 20 -create_verb: 'PATCH' -update_verb: 'PATCH' -custom_code: - encoder: 'templates/terraform/encoders/compute_snapshot_settings.go.tmpl' -autogen_async: true -async: - actions: ['create', 'update'] - type: 'OpAsync' - operation: - base_url: '{{op_id}}' - timeouts: - insert_minutes: 20 - update_minutes: 20 - delete_minutes: 20 - result: - resource_inside_response: false -# there is only a GET and PATCH endpoint -exclude_delete: true -examples: - - name: 'snapshot_settings_specific_locations' - exclude_test: true - primary_resource_id: 'tf-test-snapshot-settings' - vars: - policy: 'SPECIFIC_LOCATIONS' -parameters: -properties: - - name: 'storageLocation' - type: NestedObject - description: | - Policy of which storage location is going to be resolved, and additional data - that particularizes how the policy is going to be carried out - required: true - properties: - - name: 'policy' - type: Enum - description: | - The chosen location policy - required: true - enum_values: - - 'NEAREST_MULTI_REGION' - - 'LOCAL_REGION' - - 'SPECIFIC_LOCATIONS' - - name: 'locations' - type: Map - description: | - When the policy is SPECIFIC_LOCATIONS, snapshots will be stored in the - locations listed in this field. Keys are Cloud Storage bucket locations. - Only one location can be specified. - key_name: 'location' - key_description: | - Name of the location. It should be one of the Cloud Storage buckets. - Only one location can be specified. - value_type: - name: location - type: NestedObject - properties: - - name: 'name' - type: String - description: | - Name of the location. It should be one of the Cloud Storage buckets. - Only one location can be specified. (should match location) - required: true diff --git a/mmv1/products/compute/StoragePool.yaml b/mmv1/products/compute/StoragePool.yaml index c6e457c51a60..80d869b02969 100644 --- a/mmv1/products/compute/StoragePool.yaml +++ b/mmv1/products/compute/StoragePool.yaml @@ -277,10 +277,6 @@ properties: - "STANDARD" - "ADVANCED" default_from_api: true - - name: "labels" - type: KeyValueLabels - description: | - Labels to apply to this storage pool. These can be later modified by the setLabels method. virtual_fields: - name: "deletion_protection" type: Boolean diff --git a/mmv1/products/compute/Subnetwork.yaml b/mmv1/products/compute/Subnetwork.yaml index f3ebd6ada0c9..c22c26b95c6b 100644 --- a/mmv1/products/compute/Subnetwork.yaml +++ b/mmv1/products/compute/Subnetwork.yaml @@ -45,7 +45,6 @@ references: docs: base_url: 'projects/{{project}}/regions/{{region}}/subnetworks' has_self_link: true -include_in_tgc_next_DO_NOT_USE: true immutable: true timeouts: insert_minutes: 20 @@ -68,7 +67,6 @@ custom_code: extra_schema_entry: 'templates/terraform/extra_schema_entry/subnetwork.tmpl' constants: 'templates/terraform/constants/subnetwork.tmpl' post_update: 'templates/terraform/post_update/compute_subnetwork.go.tmpl' - tgc_decoder: 'templates/tgc_next/decoders/compute_subnetwork.go.tmpl' custom_diff: - 'customdiff.ForceNewIfChange("ip_cidr_range", IsShrinkageIpCidr)' - 'sendSecondaryIpRangeIfEmptyDiff' @@ -359,6 +357,7 @@ properties: fingerprint_name: 'fingerprint' custom_flatten: 'templates/terraform/custom_flatten/subnetwork_log_config.go.tmpl' custom_expand: 'templates/terraform/custom_expand/subnetwork_log_config.go.tmpl' + diff_suppress_func: 'subnetworkLogConfigDiffSuppress' properties: - name: 'aggregationInterval' type: Enum @@ -473,7 +472,6 @@ properties: - name: 'ipCollection' type: String ignore_read: true - is_missing_in_cai: true description: | Resource reference of a PublicDelegatedPrefix. The PDP must be a sub-PDP in EXTERNAL_IPV6_SUBNETWORK_CREATION mode. @@ -509,7 +507,15 @@ properties: update_url: 'projects/{{project}}/regions/{{region}}/subnetworks/{{name}}' update_verb: 'PATCH' fingerprint_name: 'fingerprint' - is_missing_in_cai: true + - name: 'enableFlowLogs' + type: Boolean + description: | + Whether to enable flow logging for this subnetwork. If this field is not explicitly set, + it will not appear in get listings. If not set the default behavior is determined by the + org policy, if there is no org policy specified, then it will default to disabled. + This field isn't supported if the subnet purpose field is set to REGIONAL_MANAGED_PROXY. + default_from_api: true + deprecation_message: 'This field is being removed in favor of log_config. If log_config is present, flow logs are enabled.' - name: 'state' type: Enum description: | @@ -518,21 +524,3 @@ properties: set to INTERNAL_HTTPS_LOAD_BALANCER and indicates that connections to the load balancer are being drained. A subnetwork that is draining cannot be used or modified until it reaches a status of READY' output: true - - name: 'params' - type: NestedObject - ignore_read: true - immutable: true - description: | - Additional params passed with the request, but not persisted as part of resource payload - properties: - - name: 'resourceManagerTags' - type: KeyValuePairs - description: | - Resource manager tags to be bound to the subnetwork. Tag keys and values have the - same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, - and values are in the format tagValues/456. The field is ignored when empty. - The field is immutable and causes resource replacement when mutated. This field is only - set at create time and modifying this field after creation will trigger recreation. - To apply tags to an existing resource, see the google_tags_tag_binding resource. - ignore_read: true - immutable: true diff --git a/mmv1/products/compute/TargetHttpProxy.yaml b/mmv1/products/compute/TargetHttpProxy.yaml index d221a310f271..cd1ff2f7d3dc 100644 --- a/mmv1/products/compute/TargetHttpProxy.yaml +++ b/mmv1/products/compute/TargetHttpProxy.yaml @@ -58,13 +58,6 @@ examples: vars: target_http_proxy_name: 'test-https-redirect-proxy' url_map_name: 'url-map' - - name: 'target_http_proxy_fingerprint' - primary_resource_id: 'default' - vars: - target_http_proxy_name: 'test-fingerprint-proxy' - url_map_name: 'url-map' - backend_service_name: 'backend-service' - http_health_check_name: 'http-health-check' parameters: properties: - name: 'creationTimestamp' @@ -119,12 +112,3 @@ properties: value is 600 seconds, the minimum allowed value is 5 seconds, and the maximum allowed value is 600 seconds. For Global external HTTP(S) load balancer (classic), this option is not available publicly. - - name: 'fingerprint' - type: Fingerprint - description: | - Fingerprint of this resource. A hash of the contents stored in this object. This field is used in optimistic locking. - This field will be ignored when inserting a TargetHttpProxy. An up-to-date fingerprint must be provided in order to - patch/update the TargetHttpProxy; otherwise, the request will fail with error 412 conditionNotMet. - To see the latest fingerprint, make a get() request to retrieve the TargetHttpProxy. - A base64-encoded string. - output: true diff --git a/mmv1/products/compute/TargetHttpsProxy.yaml b/mmv1/products/compute/TargetHttpsProxy.yaml index f0eed46422ae..a9082e23415b 100644 --- a/mmv1/products/compute/TargetHttpsProxy.yaml +++ b/mmv1/products/compute/TargetHttpsProxy.yaml @@ -79,14 +79,6 @@ examples: certificate_manager_certificate_name: 'my-certificate' url_map_name: 'url-map' backend_service_name: 'backend-service' - - name: 'target_https_proxy_fingerprint' - primary_resource_id: 'default' - vars: - target_https_proxy_name: 'test-fingerprint-proxy' - ssl_certificate_name: 'my-certificate' - url_map_name: 'url-map' - backend_service_name: 'backend-service' - http_health_check_name: 'http-health-check' parameters: properties: - name: 'creationTimestamp' @@ -246,12 +238,3 @@ properties: fingerprint_name: 'fingerprint' resource: 'ServerTlsPolicy' imports: 'selfLink' - - name: 'fingerprint' - type: Fingerprint - description: | - Fingerprint of this resource. A hash of the contents stored in this object. This field is used in optimistic locking. - This field will be ignored when inserting a TargetHttpsProxy. An up-to-date fingerprint must be provided in order to - patch the TargetHttpsProxy; otherwise, the request will fail with error 412 conditionNotMet. - To see the latest fingerprint, make a get() request to retrieve the TargetHttpsProxy. - A base64-encoded string. - output: true diff --git a/mmv1/products/compute/UrlMap.yaml b/mmv1/products/compute/UrlMap.yaml index 5fbee1b216be..f137b5431645 100644 --- a/mmv1/products/compute/UrlMap.yaml +++ b/mmv1/products/compute/UrlMap.yaml @@ -36,7 +36,6 @@ async: result: resource_inside_response: false collection_url_key: 'items' -include_in_tgc_next_DO_NOT_USE: true custom_code: examples: - name: 'url_map_bucket_and_service' @@ -53,8 +52,6 @@ examples: url_map_name: 'urlmap' home_backend_service_name: 'home' health_check_name: 'health-check' - tgc_test_ignore_extra: - - 'path_matcher.route_rules.url_redirect.https_redirect' - name: 'url_map_traffic_director_route_partial' primary_resource_id: 'urlmap' vars: @@ -89,56 +86,6 @@ examples: service_a_backend_service_name: 'service-a' service_b_backend_service_name: 'service-b' health_check_name: 'health-check' - - name: 'url_map_default_mirror_percent' - primary_resource_id: 'urlmap' - min_version: 'beta' - vars: - url_map_name: 'urlmap' - home_backend_service_name: 'home' - mirror_backend_service_name: 'mirror' - health_check_name: 'health-check' - - name: 'url_map_path_matcher_default_mirror_percent' - primary_resource_id: 'urlmap' - min_version: 'beta' - vars: - url_map_name: 'urlmap' - home_backend_service_name: 'home' - mirror_backend_service_name: 'mirror' - health_check_name: 'health-check' - - name: 'url_map_path_rule_mirror_percent' - primary_resource_id: 'urlmap' - min_version: 'beta' - vars: - url_map_name: 'urlmap' - home_backend_service_name: 'home' - mirror_backend_service_name: 'mirror' - health_check_name: 'health-check' - - name: 'url_map_route_rule_mirror_percent' - primary_resource_id: 'urlmap' - min_version: 'beta' - vars: - url_map_name: 'urlmap' - home_backend_service_name: 'home' - mirror_backend_service_name: 'mirror' - health_check_name: 'health-check' - - name: 'url_map_test_headers' - primary_resource_id: 'urlmap' - vars: - url_map_name: 'urlmap' - backend_service_name: 'backend' - health_check_name: 'health-check' - - name: 'url_map_test_expected_output_url' - primary_resource_id: 'urlmap' - vars: - url_map_name: 'urlmap' - backend_service_name: 'backend' - health_check_name: 'health-check' - - name: 'url_map_test_redirect_response_code' - primary_resource_id: 'urlmap' - vars: - url_map_name: 'urlmap' - backend_service_name: 'backend' - health_check_name: 'health-check' - name: 'external_http_lb_mig_backend' primary_resource_id: 'default' vars: @@ -168,29 +115,13 @@ examples: storage_bucket_name: 'static-asset-bucket' - name: 'url_map_custom_error_response_policy' primary_resource_id: 'urlmap' + min_version: 'beta' vars: url_map_name: 'urlmap' backend_service_name: 'login' http_health_check_name: 'health-check' storage_bucket_name: 'static-asset-bucket' error_backend_bucket_name: 'error-backend-bucket' - - name: 'url_map_http_filter_configs' - primary_resource_id: 'urlmap' - min_version: 'beta' - vars: - url_map_name: 'urlmap' - default_backend_service_name: 'default-backend' - service_a_backend_service_name: 'service-a-backend' - health_check_name: 'health-check' - - name: 'url_map_http_filter_metadata' - primary_resource_id: 'urlmap' - min_version: 'beta' - vars: - url_map_name: 'urlmap' - default_backend_service_name: 'default-backend' - service_a_backend_service_name: 'service-a-backend' - service_b_backend_service_name: 'service-b-backend' - health_check_name: 'health-check' parameters: properties: - name: 'creationTimestamp' @@ -396,6 +327,7 @@ properties: When used in conjunction with pathMatcher.defaultRouteAction.retryPolicy, retries take precedence. Only once all retries are exhausted, the defaultCustomErrorResponsePolicy is applied. While attempting a retry, if load balancer is successful in reaching the service, the defaultCustomErrorResponsePolicy is ignored and the response from the service is returned to the client. defaultCustomErrorResponsePolicy is supported only for global external Application Load Balancers. + min_version: 'beta' properties: - name: 'errorResponseRule' type: Array @@ -405,7 +337,6 @@ properties: For example, assume that you configure a rule for 401 (Un-authorized) code, and another for all 4 series error codes (4XX). If the backend service returns a 401, then the rule for 401 will be applied. However if the backend service returns a 403, the rule for 4xx takes effect. api_name: errorResponseRules - is_missing_in_cai: true item_type: type: NestedObject properties: @@ -557,6 +488,7 @@ properties: A PathRule for /coming_soon/ is configured for the error code 404. If the request is for www.myotherdomain.com and a 404 is encountered, the policy under UrlMap.defaultCustomErrorResponsePolicy takes effect. If a 404 response is encountered for the request www.example.com/current_events/, the pathMatcher's policy takes effect. If however, the request for www.example.com/coming_soon/ encounters a 404, the policy in PathRule.customErrorResponsePolicy takes effect. If any of the requests in this example encounter a 500 error code, the policy at UrlMap.defaultCustomErrorResponsePolicy takes effect. customErrorResponsePolicy is supported only for global external Application Load Balancers. + min_version: 'beta' properties: - name: 'errorResponseRule' type: Array @@ -566,7 +498,6 @@ properties: For example, assume that you configure a rule for 401 (Un-authorized) code, and another for all 4 series error codes (4XX). If the backend service returns a 401, then the rule for 401 will be applied. However if the backend service returns a 403, the rule for 4xx takes effect. api_name: errorResponseRules - is_missing_in_cai: true item_type: type: NestedObject properties: @@ -750,15 +681,6 @@ properties: custom_expand: 'templates/terraform/custom_expand/reference_to_backend.tmpl' resource: 'BackendService' imports: 'selfLink' - - name: 'mirrorPercent' - min_version: beta - type: Double - description: | - The percentage of requests to be mirrored to backendService. - The value must be between 0.0 and 100.0 inclusive. - is_missing_in_cai: true - validation: - function: 'validation.FloatBetween(0, 100)' - name: 'retryPolicy' type: NestedObject description: | @@ -775,7 +697,6 @@ properties: properties: - name: 'nanos' type: Integer - include_empty_value_in_cai: true description: | Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are represented with a 0 `seconds` field and a positive @@ -1512,14 +1433,6 @@ properties: custom_expand: 'templates/terraform/custom_expand/reference_to_backend.tmpl' resource: 'BackendService' imports: 'selfLink' - - name: 'mirrorPercent' - min_version: beta - type: Double - description: | - The percentage of requests to be mirrored to backendService. - The value must be between 0.0 and 100.0 inclusive. - validation: - function: 'validation.FloatBetween(0, 100)' - name: 'retryPolicy' type: NestedObject description: | @@ -1820,6 +1733,7 @@ properties: type: NestedObject description: | customErrorResponsePolicy specifies how the Load Balancer returns error responses when BackendService or BackendBucket responds with an error. + min_version: 'beta' properties: - name: 'errorResponseRule' type: Array @@ -1869,62 +1783,6 @@ properties: If load balancer cannot reach the backendBucket, a simple Not Found Error will be returned, with the original response code (or overrideResponseCode if configured). resource: 'BackendBucket' imports: 'selfLink' - - name: 'httpFilterConfigs' - type: Array - min_version: 'beta' - is_missing_in_cai: true - description: | - Outbound route specific configuration for networkservices.HttpFilter resources enabled by Traffic Director. - httpFilterConfigs only applies for load balancers with loadBalancingScheme set to INTERNAL_SELF_MANAGED. - See ForwardingRule for more details. - - Not supported when the URL map is bound to a target gRPC proxy that has validateForProxyless field set to true. - item_type: - type: NestedObject - properties: - - name: 'filterName' - type: String - description: | - Name of the networkservices.HttpFilter resource this configuration belongs to. - This name must be known to the xDS client. Example: envoy.wasm - - name: 'configTypeUrl' - type: String - description: | - The fully qualified versioned proto3 type url of the protobuf that the filter expects for its contextual settings, - for example: type.googleapis.com/google.protobuf.Struct - - name: 'config' - type: String - description: | - The configuration needed to enable the networkservices.HttpFilter resource. - The configuration must be YAML formatted and only contain fields defined in the protobuf identified in configTypeUrl - - name: 'httpFilterMetadata' - type: Array - min_version: 'beta' - is_missing_in_cai: true - description: | - Outbound route specific metadata supplied to networkservices.HttpFilter resources enabled by Traffic Director. - httpFilterMetadata only applies for load balancers with loadBalancingScheme set to INTERNAL_SELF_MANAGED. - See ForwardingRule for more details. - - Not supported when the URL map is bound to a target gRPC proxy that has validateForProxyless field set to true. - item_type: - type: NestedObject - properties: - - name: 'filterName' - type: String - description: | - Name of the networkservices.HttpFilter resource this configuration belongs to. - This name must be known to the xDS client. Example: envoy.wasm - - name: 'configTypeUrl' - type: String - description: | - The fully qualified versioned proto3 type url of the protobuf that the filter expects for its contextual settings, - for example: type.googleapis.com/google.protobuf.Struct - - name: 'config' - type: String - description: | - The configuration needed to enable the networkservices.HttpFilter resource. - The configuration must be YAML formatted and only contain fields defined in the protobuf identified in configTypeUrl - name: 'defaultUrlRedirect' type: NestedObject # TODO: (mbang) won't work for array path matchers yet, uncomment here once they are supported. @@ -2243,15 +2101,6 @@ properties: custom_expand: 'templates/terraform/custom_expand/reference_to_backend.tmpl' resource: 'BackendService' imports: 'selfLink' - - name: 'mirrorPercent' - min_version: beta - type: Double - is_missing_in_cai: true - description: | - The percentage of requests to be mirrored to backendService. - The value must be between 0.0 and 100.0 inclusive. - validation: - function: 'validation.FloatBetween(0, 100)' - name: 'corsPolicy' type: NestedObject description: | @@ -2379,6 +2228,7 @@ properties: When used in conjunction with pathMatcher.defaultRouteAction.retryPolicy, retries take precedence. Only once all retries are exhausted, the defaultCustomErrorResponsePolicy is applied. While attempting a retry, if load balancer is successful in reaching the service, the defaultCustomErrorResponsePolicy is ignored and the response from the service is returned to the client. defaultCustomErrorResponsePolicy is supported only for global external Application Load Balancers. + min_version: 'beta' properties: - name: 'errorResponseRule' type: Array @@ -2388,7 +2238,6 @@ properties: For example, assume that you configure a rule for 401 (Un-authorized) code, and another for all 4 series error codes (4XX). If the backend service returns a 401, then the rule for 401 will be applied. However if the backend service returns a 403, the rule for 4xx takes effect. api_name: errorResponseRules - is_missing_in_cai: true item_type: type: NestedObject properties: @@ -2451,45 +2300,13 @@ properties: description: | Path portion of the URL. required: true - - name: 'headers' - type: Array - description: | - HTTP headers for this request. - item_type: - type: NestedObject - properties: - - name: 'name' - type: String - description: | - Header name. - required: true - - name: 'value' - type: String - description: | - Header value. - required: true - name: 'service' type: ResourceRef description: The backend service or backend bucket link that should be matched by this test. + required: true custom_expand: 'templates/terraform/custom_expand/reference_to_backend.tmpl' resource: 'BackendService' imports: 'selfLink' - - name: 'expectedOutputUrl' - type: String - description: | - The expected output URL evaluated by the load balancer containing the scheme, host, path and query parameters. - - For rules that forward requests to backends, the test passes only when expectedOutputUrl matches the request forwarded by the load balancer to backends. For rules with urlRewrite, the test verifies that the forwarded request matches hostRewrite and pathPrefixRewrite in the urlRewrite action. When service is specified, expectedOutputUrl`s scheme is ignored. - - For rules with urlRedirect, the test passes only if expectedOutputUrl matches the URL in the load balancer's redirect response. If urlRedirect specifies httpsRedirect, the test passes only if the scheme in expectedOutputUrl is also set to HTTPS. If urlRedirect specifies stripQuery, the test passes only if expectedOutputUrl does not contain any query parameters. - - expectedOutputUrl is optional when service is specified. - - name: 'expectedRedirectResponseCode' - type: Integer - description: | - For rules with urlRedirect, the test passes only if expectedRedirectResponseCode matches the HTTP status code in load balancer's redirect response. - - expectedRedirectResponseCode cannot be set when service is set. - name: 'defaultUrlRedirect' type: NestedObject description: | @@ -2875,15 +2692,6 @@ properties: custom_expand: 'templates/terraform/custom_expand/reference_to_backend.tmpl' resource: 'BackendService' imports: 'selfLink' - - name: 'mirrorPercent' - min_version: beta - is_missing_in_cai: true - type: Double - description: | - The percentage of requests to be mirrored to backendService. - The value must be between 0.0 and 100.0 inclusive. - validation: - function: 'validation.FloatBetween(0, 100)' - name: 'corsPolicy' type: NestedObject description: | diff --git a/mmv1/products/compute/VpnTunnel.yaml b/mmv1/products/compute/VpnTunnel.yaml index 6aa59f9b177a..63ec456d1ba1 100644 --- a/mmv1/products/compute/VpnTunnel.yaml +++ b/mmv1/products/compute/VpnTunnel.yaml @@ -52,18 +52,6 @@ examples: udp500_forwarding_rule_name: 'fr-udp500' udp4500_forwarding_rule_name: 'fr-udp4500' route_name: 'route1' - - name: 'vpn_tunnel_cipher_suite' - min_version: 'beta' - primary_resource_id: 'tunnel1' - vars: - vpn_tunnel_name: 'tunnel-cipher' - target_vpn_gateway_name: 'vpn-1' - network_name: 'network-1' - address_name: 'vpn-static-ip' - esp_forwarding_rule_name: 'fr-esp' - udp500_forwarding_rule_name: 'fr-udp500' - udp4500_forwarding_rule_name: 'fr-udp4500' - route_name: 'route1' parameters: - name: 'region' type: ResourceRef @@ -231,68 +219,3 @@ properties: type: String description: 'Detailed status message for the VPN tunnel.' output: true - - name: 'cipherSuite' - type: NestedObject - min_version: 'beta' - description: | - User specified list of ciphers to use for the phase 1 and phase 2 of the IKE protocol. - properties: - - name: 'phase1' - type: NestedObject - description: 'Cipher configuration for phase 1 of the IKE protocol.' - min_version: 'beta' - properties: - - name: 'encryption' - type: Array - description: 'Encryption algorithms.' - is_set: true - min_version: 'beta' - item_type: - type: String - - name: 'integrity' - type: Array - description: 'Integrity algorithms.' - is_set: true - min_version: 'beta' - item_type: - type: String - - name: 'prf' - type: Array - description: 'Pseudo-random functions.' - is_set: true - min_version: 'beta' - item_type: - type: String - - name: 'dh' - type: Array - description: 'Diffie-Hellman groups.' - is_set: true - min_version: 'beta' - item_type: - type: String - - name: 'phase2' - type: NestedObject - description: 'Cipher configuration for phase 2 of the IKE protocol.' - min_version: 'beta' - properties: - - name: 'encryption' - type: Array - description: 'Encryption algorithms.' - is_set: true - min_version: 'beta' - item_type: - type: String - - name: 'integrity' - type: Array - description: 'Integrity algorithms.' - is_set: true - min_version: 'beta' - item_type: - type: String - - name: 'pfs' - type: Array - description: 'Perfect forward secrecy groups.' - is_set: true - min_version: 'beta' - item_type: - type: String diff --git a/mmv1/products/compute/WireGroup.yaml b/mmv1/products/compute/WireGroup.yaml deleted file mode 100644 index b81bc5975952..000000000000 --- a/mmv1/products/compute/WireGroup.yaml +++ /dev/null @@ -1,212 +0,0 @@ -# Copyright 2024 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'WireGroup' -kind: 'compute#wireGroup' -description: | - The WireGroup resource represents a group of redundant wires between interconnects in two different metros. Each WireGroup belongs to a CrossSiteNetwork. A wire group defines endpoints and the wires which exist between them. - -references: - guides: - 'Create a WireGroup': 'https://cloud.google.com/network-connectivity/docs/interconnect/how-to/cross-site/modify-network#add-wire-group' - api: 'https://cloud.google.com/compute/docs/reference/rest/beta/wireGroups' -min_version: beta -docs: -id_format: 'projects/{{project}}/global/crossSiteNetworks/{{cross_site_network}}/wireGroups/{{name}}' -base_url: 'projects/{{project}}/global/crossSiteNetworks/{{cross_site_network}}/wireGroups' -self_link: 'projects/{{project}}/global/crossSiteNetworks/{{cross_site_network}}/wireGroups/{{name}}' -update_verb: 'PATCH' -update_mask: true -import_format: - - 'projects/{{project}}/global/crossSiteNetworks/{{cross_site_network}}/wireGroups/{{name}}' -timeouts: - insert_minutes: 20 - update_minutes: 20 - delete_minutes: 20 -async: - actions: ['create', 'delete', 'update'] - type: 'OpAsync' - operation: - base_url: '{{op_id}}' - result: - resource_inside_response: false -examples: - - name: 'compute_wire_group_basic' - primary_resource_id: 'example-test-wire-group' - vars: - name: 'test-wire-group' - description: 'Example Wire Group' - cross_site_network: 'test-cross-site-network' - min_version: 'beta' - test_env_vars: - project: 'PROJECT_NAME' -parameters: - - name: 'crossSiteNetwork' - type: ResourceRef - description: Required cross site network to which wire group belongs. - required: true - immutable: true - url_param_only: true - resource: 'CrossSiteNetwork' - imports: 'name' - diff_suppress_func: 'tpgresource.CompareResourceNames' - custom_expand: 'templates/terraform/custom_expand/resourceref_with_validation.go.tmpl' - min_version: beta -properties: - - name: 'description' - type: String - description: | - An optional description of this resource. Provide this property when you create the resource. - - name: 'creationTimestamp' - type: Time - description: | - Creation timestamp in RFC3339 text format. - output: true - - name: 'name' - type: String - description: | - Name of the resource. Provided by the client when the resource is created. The name must be - 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters - long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first - character must be a lowercase letter, and all following characters must be a dash, - lowercase letter, or digit, except the last character, which cannot be a dash. - required: true - validation: - regex: '^[a-z]([-a-z0-9]*[a-z0-9])?$' - - name: endpoints - type: Map - description: | - Endpoints grouped by location, each mapping to interconnect configurations. - key_name: 'endpoint' - key_description: | - The name of the endpoint, which is a city name. - value_type: - type: NestedObject - properties: - - name: interconnects - type: Map - key_name: interconnect_name - key_description: | - The name of the interconnect. - value_type: - type: NestedObject - description: | - Map of interconnect details. - properties: - - name: interconnect - type: string - - name: vlan_tags - type: Array - description: | - VLAN tags for the interconnect. - item_type: - type: Integer - - name: adminEnabled - type: Boolean - description: | - Indicates whether the wire group is administratively enabled. - default_value: true - - name: wireGroupProperties - type: NestedObject - description: | - Properties specific to the wire group. - properties: - - name: type - type: enum - description: | - Type of wire group (enum). - WIRE: a single pseudowire over two Interconnect connections with no redundancy. - REDUNDANT: two pseudowires over four Interconnect connections, with two connections in one metro and two connections in another metro. - BOX_AND_CROSS: four pseudowires over four Interconnect connections, with two connections in one metro and two connections in another metro. - enum_values: - - 'WIRE' - - 'REDUNDANT' - - 'BOX_AND_CROSS' - - name: wireProperties - type: NestedObject - description: | - Default properties for wires within the group. - properties: - - name: bandwidthUnmetered - type: Integer - description: | - The unmetered bandwidth setting. - - name: faultResponse - type: enum - description: | - Response when a fault is detected in a pseudowire: - NONE: default. - DISABLE_PORT: set the port line protocol down when inline probes detect a fault. This setting is only permitted on port mode pseudowires. - enum_values: - - 'NONE' - - 'DISABLE_PORT' - - name: wires - type: Array - description: | - The single/redundant wire(s) managed by the wire group. - output: true - item_type: - type: NestedObject - properties: - - name: label - type: string - output: true - - name: endpoints - type: Array - output: true - description: | - 'Wire endpoints are specific Interconnect connections.' - item_type: - type: NestedObject - properties: - - name: interconnect - type: string - output: true - - name: vlanTag - type: Integer - output: true - - name: wireProperties - type: NestedObject - output: true - properties: - - name: bandwidthUnmetered - type: Integer - output: true - - name: faultResponse - type: enum - output: true - enum_values: - - 'NONE' - - 'DISABLE_PORT' - - name: adminEnabled - type: Boolean - output: true - - name: topology - type: NestedObject - description: | - Topology details for the wire group configuration. - output: true - properties: - - name: endpoints - type: Array - output: true - item_type: - type: NestedObject - properties: - - name: label - type: string - output: true - - name: city - type: string - output: true diff --git a/mmv1/products/compute/product.yaml b/mmv1/products/compute/product.yaml index 012ca3739a61..a77b1c0718a7 100644 --- a/mmv1/products/compute/product.yaml +++ b/mmv1/products/compute/product.yaml @@ -17,9 +17,7 @@ display_name: 'Compute Engine' versions: - name: 'ga' base_url: 'https://compute.googleapis.com/compute/v1/' - cai_legacy_base_url: 'https://www.googleapis.com/compute/v1/' - name: 'beta' base_url: 'https://compute.googleapis.com/compute/beta/' - cai_legacy_base_url: 'https://www.googleapis.com/compute/v1/' scopes: - 'https://www.googleapis.com/auth/compute' diff --git a/mmv1/products/contactcenterinsights/AnalysisRule.yaml b/mmv1/products/contactcenterinsights/AnalysisRule.yaml deleted file mode 100644 index cd540ad80c8c..000000000000 --- a/mmv1/products/contactcenterinsights/AnalysisRule.yaml +++ /dev/null @@ -1,178 +0,0 @@ -# Copyright 2024 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: AnalysisRule -description: | - The CCAI Insights project wide analysis rule. - This rule will be applied to all conversations that match the filter defined in the rule. - For a conversation matches the filter, the annotators specified in the rule will be run. - If a conversation matches multiple rules, a union of all the annotators will be run. - One project can have multiple analysis rules. -references: - guides: - 'Configure analysis rules using the API': 'https://cloud.google.com/contact-center/insights/docs/analysis-rule' - api: 'https://cloud.google.com/contact-center/insights/docs/reference/rest/v1/projects.locations.analysisRules' -base_url: projects/{{project}}/locations/{{location}}/analysisRules -self_link: projects/{{project}}/locations/{{location}}/analysisRules/{{name}} -create_url: projects/{{project}}/locations/{{location}}/analysisRules -delete_url: projects/{{project}}/locations/{{location}}/analysisRules/{{name}} -update_verb: 'PATCH' -update_mask: true -id_format: projects/{{project}}/locations/{{location}}/analysisRules/{{name}} -import_format: - - projects/{{project}}/locations/{{location}}/analysisRules/{{name}} -autogen_status: QW5hbHlzaXNSdWxl -examples: - - name: 'contact_center_insights_analysis_rule_basic' - primary_resource_id: 'analysis_rule_basic' - - name: 'contact_center_insights_analysis_rule_full' - primary_resource_id: 'analysis_rule_full' - test_env_vars: - project_number: 'PROJECT_NUMBER' - - name: 'contact_center_insights_analysis_rule_profile' - primary_resource_id: 'analysis_rule_profile' - test_env_vars: - project_number: 'PROJECT_NUMBER' -parameters: - - name: location - type: String - description: Location of the resource. - immutable: true - url_param_only: true - required: true -properties: - - name: name - type: String - description: |- - The resource name of the analysis rule. Randomly generated by Insights. - output: true - custom_flatten: 'templates/terraform/custom_flatten/name_from_self_link.tmpl' - - name: createTime - type: String - description: Output only. The time at which this analysis rule was created. - output: true - - name: updateTime - type: String - description: Output only. The most recent time at which this analysis rule was updated. - output: true - - name: displayName - type: String - description: Display Name of the analysis rule. - - name: conversationFilter - type: String - description: |- - Filter for the conversations that should apply this analysis - rule. An empty filter means this analysis rule applies to all - conversations. - Refer to https://cloud.google.com/contact-center/insights/docs/filtering - for details. - - name: annotatorSelector - type: NestedObject - description: Selector of all available annotators and phrase matchers to run. - properties: - - name: runInterruptionAnnotator - type: Boolean - description: Whether to run the interruption annotator. - - name: phraseMatchers - type: Array - description: |- - The list of phrase matchers to run. If not provided, all active phrase - matchers will be used. If inactive phrase matchers are provided, they will - not be used. Phrase matchers will be run only if - run_phrase_matcher_annotator is set to true. Format: - projects/{project}/locations/{location}/phraseMatchers/{phrase_matcher} - item_type: - type: String - - name: runEntityAnnotator - type: Boolean - description: Whether to run the entity annotator. - - name: issueModels - type: Array - description: |- - The issue model to run. If not provided, the most recently deployed topic - model will be used. The provided issue model will only be used for - inference if the issue model is deployed and if run_issue_model_annotator - is set to true. If more than one issue model is provided, only the first - provided issue model will be used for inference. - item_type: - type: String - - name: runQaAnnotator - type: Boolean - description: Whether to run the QA annotator. - - name: runSilenceAnnotator - type: Boolean - description: Whether to run the silence annotator. - - name: runPhraseMatcherAnnotator - type: Boolean - description: Whether to run the active phrase matcher annotator(s). - - name: runSentimentAnnotator - type: Boolean - description: Whether to run the sentiment annotator. - - name: runIntentAnnotator - type: Boolean - description: Whether to run the intent annotator. - - name: runIssueModelAnnotator - type: Boolean - description: |- - Whether to run the issue model annotator. A model should have already been - deployed for this to take effect. - - name: runSummarizationAnnotator - type: Boolean - description: Whether to run the summarization annotator. - - name: summarizationConfig - type: NestedObject - description: Configuration for summarization. - properties: - - name: conversationProfile - type: String - description: |- - Resource name of the Dialogflow conversation profile. - Format: - projects/{project}/locations/{location}/conversationProfiles/{conversation_profile} - - name: summarizationModel - type: Enum - description: |- - Default summarization model to be used. - Possible values: - SUMMARIZATION_MODEL_UNSPECIFIED - BASELINE_MODEL - BASELINE_MODEL_V2_0 - enum_values: - - 'BASELINE_MODEL' - - 'BASELINE_MODEL_V2_0' - - name: qaConfig - type: NestedObject - description: Configuration for the QA feature. - properties: - - name: scorecardList - type: NestedObject - description: Container for a list of scorecards. - properties: - - name: qaScorecardRevisions - type: Array - description: List of QaScorecardRevisions. - item_type: - type: String - - name: analysisPercentage - type: Double - description: |- - Percentage of conversations that we should apply this analysis setting - automatically, between [0, 1]. For example, 0.1 means 10%. Conversations - are sampled in a determenestic way. The original runtime_percentage & - upload percentage will be replaced by defining filters on the conversation. - - name: active - type: Boolean - description: |- - If true, apply this rule to conversations. Otherwise, this rule is - inactive and saved as a draft. diff --git a/mmv1/products/contactcenterinsights/View.yaml b/mmv1/products/contactcenterinsights/View.yaml deleted file mode 100644 index c5e9fc90aa4a..000000000000 --- a/mmv1/products/contactcenterinsights/View.yaml +++ /dev/null @@ -1,65 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: View -description: | - Insights View resource for filtering conversations -references: - api: 'https://cloud.google.com/contact-center/insights/docs/reference/rest/v1/projects.locations.views' -base_url: projects/{{project}}/locations/{{location}}/views -self_link: projects/{{project}}/locations/{{location}}/views/{{name}} -create_url: projects/{{project}}/locations/{{location}}/views -delete_url: projects/{{project}}/locations/{{location}}/views/{{name}} -update_verb: PATCH -update_mask: true -id_format: projects/{{project}}/locations/{{location}}/views/{{name}} -import_format: - - projects/{{project}}/locations/{{location}}/views/{{name}} -autogen_status: Vmlldw== -examples: - - name: 'contact_center_insights_view_basic' - primary_resource_id: 'basic_view' - - name: 'contact_center_insights_view_full' - primary_resource_id: 'full_view' -parameters: - - name: location - type: String - description: Location of the resource. - immutable: true - url_param_only: true - required: true -properties: - - name: name - type: String - description: |- - The resource name of the view. Randomly generated by Insights. - output: true - custom_flatten: 'templates/terraform/custom_flatten/name_from_self_link.tmpl' - - name: displayName - type: String - description: The human-readable display name of the view. - - name: createTime - type: String - description: Output only. The time at which this view was created. - output: true - - name: updateTime - type: String - description: Output only. The most recent time at which the view was updated. - output: true - - name: value - type: String - description: |- - A filter to reduce conversation results to a specific subset. - Refer to https://cloud.google.com/contact-center/insights/docs/filtering - for details. diff --git a/mmv1/products/contactcenterinsights/product.yaml b/mmv1/products/contactcenterinsights/product.yaml deleted file mode 100644 index 4c4c79a150b0..000000000000 --- a/mmv1/products/contactcenterinsights/product.yaml +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'ContactCenterInsights' -display_name: 'Contact Center AI Insights' -scopes: - - 'https://www.googleapis.com/auth/cloud-platform' -versions: - - base_url: 'https://contactcenterinsights.googleapis.com/v1/' - name: 'ga' diff --git a/mmv1/products/databasemigrationservice/PrivateConnection.yaml b/mmv1/products/databasemigrationservice/PrivateConnection.yaml index 7029eb22b30f..8b2187688ac6 100644 --- a/mmv1/products/databasemigrationservice/PrivateConnection.yaml +++ b/mmv1/products/databasemigrationservice/PrivateConnection.yaml @@ -23,7 +23,7 @@ docs: id_format: 'projects/{{project}}/locations/{{location}}/privateConnections/{{private_connection_id}}' base_url: 'projects/{{project}}/locations/{{location}}/privateConnections' self_link: 'projects/{{project}}/locations/{{location}}/privateConnections/{{private_connection_id}}' -create_url: 'projects/{{project}}/locations/{{location}}/privateConnections?privateConnectionId={{private_connection_id}}&skip_validation={{create_without_validation}}' +create_url: 'projects/{{project}}/locations/{{location}}/privateConnections?privateConnectionId={{private_connection_id}}' immutable: true import_format: - 'projects/{{project}}/locations/{{location}}/privateConnections/{{private_connection_id}}' @@ -46,7 +46,6 @@ examples: vars: private_connection_id: 'my-connection' network_name: 'my-network' - create_without_validation: 'false' parameters: - name: 'privateConnectionId' type: String @@ -55,14 +54,6 @@ parameters: url_param_only: true required: true immutable: true - - name: 'create_without_validation' - type: Boolean - description: |- - If set to true, will skip validations. - url_param_only: true - required: false - immutable: true - default_value: false - name: 'location' type: String description: | diff --git a/mmv1/products/datafusion/Instance.yaml b/mmv1/products/datafusion/Instance.yaml index 2809e987d1ed..665c7e1f7a31 100644 --- a/mmv1/products/datafusion/Instance.yaml +++ b/mmv1/products/datafusion/Instance.yaml @@ -13,7 +13,6 @@ --- name: 'Instance' -# plugin_framework: true description: | Represents a Data Fusion instance. references: diff --git a/mmv1/products/dataplex/Datascan.yaml b/mmv1/products/dataplex/Datascan.yaml index 0875890a9c99..d595e1fa2e5a 100644 --- a/mmv1/products/dataplex/Datascan.yaml +++ b/mmv1/products/dataplex/Datascan.yaml @@ -84,28 +84,6 @@ examples: datascan_name: 'dataquality-full' test_env_vars: project_name: 'PROJECT_NAME' - exclude_test: true - - name: 'dataplex_datascan_full_quality_test' - primary_resource_id: 'full_quality_test' - vars: - datascan_name: 'dataquality-full-test' - test_env_vars: - project_name: 'PROJECT_NAME' - exclude_docs: true - - name: 'dataplex_datascan_basic_discovery' - primary_resource_id: 'basic_discovery' - vars: - datascan_name: 'datadiscovery-basic' - test_env_vars: - project_name: 'PROJECT_NAME' - location: 'REGION' - - name: 'dataplex_datascan_full_discovery' - primary_resource_id: 'full_discovery' - vars: - datascan_name: 'datadiscovery-full' - test_env_vars: - project_name: 'PROJECT_NAME' - location: 'REGION' parameters: - name: 'location' type: String @@ -184,7 +162,7 @@ properties: type: String description: | The service-qualified full resource name of the cloud resource for a DataScan job to scan against. The field could be: - Cloud Storage bucket (//storage.googleapis.com/projects/PROJECT_ID/buckets/BUCKET_ID) for DataDiscoveryScan OR BigQuery table of type "TABLE" (/bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID) for DataProfileScan/DataQualityScan. + (Cloud Storage bucket for DataDiscoveryScan)BigQuery table of type "TABLE" for DataProfileScan/DataQualityScan. immutable: true exactly_one_of: - 'data.0.entity' @@ -256,7 +234,6 @@ properties: - 'DATA_SCAN_TYPE_UNSPECIFIED' - 'DATA_QUALITY' - 'DATA_PROFILE' - - 'DATA_DISCOVERY' - name: 'dataQualitySpec' type: NestedObject description: | @@ -264,7 +241,6 @@ properties: exactly_one_of: - 'data_quality_spec' - 'data_profile_spec' - - 'data_discovery_spec' properties: - name: 'samplingPercent' type: Double @@ -351,7 +327,7 @@ properties: - name: 'dimension' type: String description: | - The dimension name a rule belongs to. Custom dimension name is supported with all uppercase letters and maximum length of 30 characters. + The dimension a rule belongs to. Results are also aggregated at the dimension level. Supported dimensions are ["COMPLETENESS", "ACCURACY", "CONSISTENCY", "VALIDITY", "UNIQUENESS", "INTEGRITY"] required: true - name: 'threshold' type: Double @@ -365,11 +341,6 @@ properties: The maximum length is 63 characters. Must start with a letter. Must end with a number or a letter. - - name: 'suspended' - type: Boolean - description: | - Whether the Rule is active or suspended. Default = false. - default_value: false - name: 'description' type: String description: | @@ -506,10 +477,6 @@ properties: The SQL statement. required: true min_size: 1 - - name: 'catalogPublishingEnabled' - type: Boolean - description: | - If set, the latest DataScan job result will be published to Dataplex Catalog. - name: 'dataProfileSpec' type: NestedObject description: | @@ -519,7 +486,6 @@ properties: exactly_one_of: - 'data_quality_spec' - 'data_profile_spec' - - 'data_discovery_spec' properties: - name: 'samplingPercent' type: Double @@ -574,94 +540,3 @@ properties: For instance, if 'x' is of nested field type, listing 'x' is supported but 'x.y.z' is not supported. Here 'y' and 'y.z' are nested fields of 'x'. item_type: type: String - - name: 'dataDiscoverySpec' - type: NestedObject - description: | - DataDiscoveryScan related setting. - send_empty_value: true - allow_empty_object: true - exactly_one_of: - - 'data_quality_spec' - - 'data_profile_spec' - - 'data_discovery_spec' - properties: - - name: 'bigqueryPublishingConfig' - type: NestedObject - description: | - Configuration for metadata publishing. - properties: - - name: tableType - type: Enum - description: | - Determines whether to publish discovered tables as BigLake external tables or non-BigLake external tables. - enum_values: - - TABLE_TYPE_UNSPECIFIED - - EXTERNAL - - BIGLAKE - - name: connection - type: String - description: | - The BigQuery connection used to create BigLake tables. Must be in the form `projects/{projectId}/locations/{locationId}/connections/{connection_id}`. - - name: location - type: String - description: | - The location of the BigQuery dataset to publish BigLake external or non-BigLake external tables to. - - name: project - type: String - description: | - The project of the BigQuery dataset to publish BigLake external or non-BigLake external tables to. If not specified, the project of the Cloud Storage bucket will be used. The format is "projects/{project_id_or_number}". - - name: 'storageConfig' - type: NestedObject - description: | - Configurations related to Cloud Storage as the data source. - properties: - - name: includePatterns - type: Array - description: | - Defines the data to include during discovery when only a subset of the data should be considered. Provide a list of patterns that identify the data to include. For Cloud Storage bucket assets, these patterns are interpreted as glob patterns used to match object names. For BigQuery dataset assets, these patterns are interpreted as patterns to match table names. - item_type: - type: String - - name: excludePatterns - type: Array - description: | - Defines the data to exclude during discovery. Provide a list of patterns that identify the data to exclude. For Cloud Storage bucket assets, these patterns are interpreted as glob patterns used to match object names. For BigQuery dataset assets, these patterns are interpreted as patterns to match table names. - item_type: - type: String - - name: csvOptions - type: NestedObject - description: | - Configuration for CSV data. - properties: - - name: headerRows - type: Integer - description: | - The number of rows to interpret as header rows that should be skipped when reading data rows. - - name: delimiter - type: String - description: | - The delimiter that is used to separate values. The default is `,` (comma). - - name: encoding - type: String - description: | - The character encoding of the data. The default is UTF-8. - - name: typeInferenceDisabled - type: Boolean - description: | - Whether to disable the inference of data types for CSV data. If true, all columns are registered as strings. - - name: quote - type: String - description: | - The character used to quote column values. Accepts `"` (double quotation mark) or `'` (single quotation mark). If unspecified, defaults to `"` (double quotation mark). - - name: jsonOptions - type: NestedObject - description: | - Configuration for JSON data. - properties: - - name: encoding - type: String - description: | - The character encoding of the data. The default is UTF-8. - - name: typeInferenceDisabled - type: Boolean - description: | - Whether to disable the inference of data types for JSON data. If true, all columns are registered as their primitive types (strings, number, or boolean). diff --git a/mmv1/products/dataplex/Entry.yaml b/mmv1/products/dataplex/Entry.yaml deleted file mode 100644 index f7684c993b7a..000000000000 --- a/mmv1/products/dataplex/Entry.yaml +++ /dev/null @@ -1,276 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'Entry' -description: | - An entry represents a data asset for which you capture metadata, such as a BigQuery table. - The primary constituents of an entry are aspects, which provide thematically coherent information. - Examples include a table's schema, sensitive data protection profile, data quality information, or a simple tag. - - **Important Considerations:** - - * There is a limit of 99 aspects per entry. - * The entry resource has to use project numbers and not project IDs. Therefore, if - a dependency was already provisioned using project ID, it needs to be referenced explicitly as a resource name - containing the project number. - -references: - guides: - 'Manage entries and ingest custom sources': 'https://cloud.google.com/dataplex/docs/ingest-custom-sources' - api: 'https://cloud.google.com/dataplex/docs/reference/rest/v1/projects.locations.entryGroups.entries' - -base_url: 'projects/{{project}}/locations/{{location}}/entryGroups/{{entry_group_id}}/entries/{{entry_id}}' -self_link: 'projects/{{project}}/locations/{{location}}/entryGroups/{{entry_group_id}}/entries/{{entry_id}}' -create_url: 'projects/{{project}}/locations/{{location}}/entryGroups/{{entry_group_id}}/entries?entryId={{entry_id}}' -update_verb: 'PATCH' -update_mask: true -import_format: - - 'projects/{{project}}/locations/{{location}}/entryGroups/{{entry_group_id}}/entries/{{entry_id}}' - -custom_code: - constants: templates/terraform/constants/dataplex_entry.go.tmpl - decoder: templates/terraform/decoders/dataplex_entry.go.tmpl - encoder: templates/terraform/encoders/dataplex_entry.go.tmpl - custom_import: templates/terraform/custom_import/dataplex_entry.go.tmpl - pre_read: templates/terraform/pre_read/dataplex_entry.go.tmpl - pre_update: templates/terraform/pre_update/dataplex_entry.go.tmpl - -timeouts: - insert_minutes: 5 - update_minutes: 5 - delete_minutes: 5 - -examples: - - name: 'dataplex_entry_basic' - primary_resource_id: 'test_basic' - primary_resource_name: 'fmt.Sprintf("tf-test-entry%s", context["random_suffix"])' - vars: - entry_id: 'entry-basic' - entry_group_name: 'entry-group-basic' - aspect_type_name: "aspect-type-basic" - entry_type_name: "entry-type-basic" - test_env_vars: - project_number: 'PROJECT_NUMBER' - - name: 'dataplex_entry_full' - primary_resource_id: 'test_entry_full' - primary_resource_name: 'fmt.Sprintf("tf-test-entry%s", context["random_suffix"])' - ignore_read_extra: - - 'aspects' - vars: - entry_id: 'entry-full/has/slashes' - entry_group_name: 'entry-group-full' - aspect_type_name: "aspect-type-full" - entry_type_name: "entry-type-full" - test_env_vars: - project_number: 'PROJECT_NUMBER' - -parameters: - - name: 'location' - type: String - url_param_only: true - immutable: true - description: | - The location where entry will be created. - - - name: 'entryGroupId' - type: String - url_param_only: true - immutable: true - description: | - The entry group id of the entry group the entry will be created in. - - - name: 'entryId' - type: String - url_param_only: true - immutable: true - description: | - The entry id of the entry. - -properties: - - name: 'name' - type: String - output: true - immutable: true - description: | - The relative resource name of the entry, in the format projects/{project_number}/locations/{locationId}/entryGroups/{entryGroupId}/entries/{entryId}. - - - name: 'entryType' - type: String - required: true - immutable: true - validation: - function: ProjectNumberValidation - description: | - The relative resource name of the entry type that was used to create this entry, in the format projects/{project_number}/locations/{locationId}/entryTypes/{entryTypeId}. - - - name: 'createTime' - type: Time - output: true - description: | - The time when the Entry was created in Dataplex. - - - name: 'updateTime' - type: Time - output: true - description: | - The time when the entry was last updated in Dataplex. - - - name: 'aspects' - type: Array - custom_flatten: 'templates/terraform/custom_flatten/dataplex_entry_aspects.go.tmpl' - description: | - The aspects that are attached to the entry. - - item_type: - type: NestedObject - properties: - - name: 'aspectKey' - type: String - required: true - validation: - function: AspectProjectNumberValidation - description: | - Depending on how the aspect is attached to the entry, the format of the aspect key can be one of the following: - - If the aspect is attached directly to the entry: {project_number}.{locationId}.{aspectTypeId} - If the aspect is attached to an entry's path: {project_number}.{locationId}.{aspectTypeId}@{path} - - - name: 'aspect' - type: NestedObject - required: true - properties: - - name: 'aspectType' - type: String - output: true - description: | - The resource name of the type used to create this Aspect. - - - name: 'path' - type: String - output: true - description: | - The path in the entry under which the aspect is attached. - - - name: 'createTime' - type: Time - output: true - description: | - The time when the Aspect was created. - - - name: 'updateTime' - type: Time - output: true - description: | - The time when the Aspect was last modified. - - - name: 'data' - type: String - required: true - state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' - custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' - custom_expand: 'templates/terraform/custom_expand/json_schema.tmpl' - validation: - function: 'validation.StringIsJSON' - description: | - The content of the aspect in JSON form, according to its aspect type schema. The maximum size of the field is 120KB (encoded as UTF-8). - - - name: 'parentEntry' - type: String - immutable: true - description: | - The resource name of the parent entry, in the format projects/{project_number}/locations/{locationId}/entryGroups/{entryGroupId}/entries/{entryId}. - - - name: 'fullyQualifiedName' - type: String - description: | - A name for the entry that can be referenced by an external system. For more information, see https://cloud.google.com/dataplex/docs/fully-qualified-names. - The maximum size of the field is 4000 characters. - - - name: 'entrySource' - type: NestedObject - default_from_api: true - properties: - - name: 'resource' - type: String - description: | - The name of the resource in the source system. Maximum length is 4,000 characters. - - - name: 'system' - type: String - description: | - The name of the source system. Maximum length is 64 characters. - - - name: 'platform' - type: String - description: | - The platform containing the source system. Maximum length is 64 characters. - - - name: 'displayName' - type: String - description: | - A user-friendly display name. Maximum length is 500 characters. - - - name: 'description' - type: String - description: | - A description of the data resource. Maximum length is 2,000 characters. - - - name: 'labels' - type: KeyValuePairs - description: | - User-defined labels. The maximum size of keys and values is 128 characters each. - An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }. - - - name: 'ancestors' - type: Array - immutable: true - item_type: - type: NestedObject - properties: - - name: 'name' - type: String - description: | - The name of the ancestor resource. - - - name: 'type' - type: String - description: | - The type of the ancestor resource. - - description: | - The entries representing the ancestors of the data resource in the source system. - - - name: 'createTime' - type: Time - validation: - function: 'validation.IsRFC3339Time' - description: | - The time when the resource was created in the source system. - - - name: 'updateTime' - type: Time - validation: - function: 'validation.IsRFC3339Time' - description: | - The time when the resource was last updated in the source system. - If the entry exists in the system and its EntrySource has updateTime populated, - further updates to the EntrySource of the entry must provide incremental updates to its updateTime. - - - name: 'location' - type: String - output: true - description: |- - Location of the resource in the source system. You can search the entry by this location. - By default, this should match the location of the entry group containing this entry. - A different value allows capturing the source location for data external to Google Cloud. diff --git a/mmv1/products/dataplex/Glossary.yaml b/mmv1/products/dataplex/Glossary.yaml deleted file mode 100644 index ab539228dc4c..000000000000 --- a/mmv1/products/dataplex/Glossary.yaml +++ /dev/null @@ -1,112 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name: 'Glossary' -description: | - Represents a collection of categories and terms defined by the user. Glossary is a top level resource and is the GCP parent resource of all the categories and terms within it. -base_url: 'projects/{{project}}/locations/{{location}}/glossaries/{{glossary_id}}' -self_link: 'projects/{{project}}/locations/{{location}}/glossaries/{{glossary_id}}' -create_url: 'projects/{{project}}/locations/{{location}}/glossaries?glossary_id={{glossary_id}}' -update_verb: 'PATCH' -update_mask: true -timeouts: - insert_minutes: 15 - update_minutes: 15 - delete_minutes: 15 -autogen_async: true -import_format: ['projects/{{project}}/locations/{{location}}/glossaries/{{glossary_id}}'] -async: - actions: ['create', 'delete', 'update'] - type: 'OpAsync' - operation: - base_url: '{{op_id}}' -iam_policy: - method_name_separator: ':' - parent_resource_attribute: 'glossary_id' - import_format: - - 'projects/{{project}}/locations/{{location}}/glossaries/{{glossary_id}}' - - '{{glossary_id}}' -examples: - - name: 'dataplex_glossary_basic' - primary_resource_id: 'glossary_test_id' - primary_resource_name: 'fmt.Sprintf("tf-test-glossary-basic%s", context["random_suffix"])' - vars: - glossary_name: 'glossary-basic' - - name: 'dataplex_glossary_full' - primary_resource_id: 'glossary_test_id_full' - primary_resource_name: 'fmt.Sprintf("tf-test-glossary-full%s", context["random_suffix"])' - vars: - glossary_name: 'glossary-full' -parameters: - - name: 'location' - type: String - description: | - The location where the glossary should reside. - url_param_only: true - required: true - immutable: true - - name: 'glossaryId' - type: String - description: | - The glossary id for creation. - url_param_only: true - required: true - immutable: true -properties: - - name: 'name' - type: String - description: | - The resource name of the Glossary. Format: projects/{projectId}/locations/{locationId}/glossaries/{glossaryId} - output: true - - name: 'displayName' - type: String - description: | - User friendly display name of the glossary. This is user-mutable. This will be same as the glossaryId, if not specified. - required: false - - name: 'description' - type: String - description: | - The user-mutable description of the glossary. - required: false - - name: 'labels' - type: KeyValueLabels - description: | - User-defined labels for the Glossary. - required: false - - name: 'uid' - type: String - description: | - System generated unique id for the Glossary. This ID will be different if the Glossary is deleted and re-created with the same name. - output: true - - name: 'createTime' - type: Timestamp - description: | - The time at which the glossary was created. - output: true - - name: 'updateTime' - type: Timestamp - description: | - The time at which the glossary was last updated. - output: true - - name: 'termCount' - type: Integer - description: | - The number of terms in the glossary. - required: false - output: true - - name: 'categoryCount' - type: Integer - description: | - The number of categories in the glossary. - required: false - output: true diff --git a/mmv1/products/dataplex/GlossaryCategory.yaml b/mmv1/products/dataplex/GlossaryCategory.yaml deleted file mode 100644 index 9a8b13dbcbf1..000000000000 --- a/mmv1/products/dataplex/GlossaryCategory.yaml +++ /dev/null @@ -1,99 +0,0 @@ -# Copyright 2024 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name: 'GlossaryCategory' -description: | - Represents a collection of categories and terms within a Glossary that are related to each other. -base_url: 'projects/{{project}}/locations/{{location}}/glossaries/{{glossary_id}}/categories/{{category_id}}' -self_link: 'projects/{{project}}/locations/{{location}}/glossaries/{{glossary_id}}/categories/{{category_id}}' -create_url: 'projects/{{project}}/locations/{{location}}/glossaries/{{glossary_id}}/categories?category_id={{category_id}}' -update_verb: 'PATCH' -update_mask: true -timeouts: - insert_minutes: 15 - update_minutes: 15 - delete_minutes: 15 -autogen_async: true -import_format: ['projects/{{project}}/locations/{{location}}/glossaries/{{glossary_id}}/categories/{{category_id}}'] -examples: - - name: 'dataplex_glossary_category_basic' - primary_resource_id: 'category_test_id' - primary_resource_name: 'fmt.Sprintf("tf-test-category-basic%s", context["random_suffix"]), fmt.Sprintf("tf-test-glossary%s", context["random_suffix"])' - vars: - category_name: 'category-basic' - - name: 'dataplex_glossary_category_full' - primary_resource_id: 'category_test_id_full' - primary_resource_name: 'fmt.Sprintf("tf-test-category-full%s", context["random_suffix"]), fmt.Sprintf("tf-test-glossary%s", context["random_suffix"])' - vars: - category_name: 'category-full' -parameters: - - name: 'location' - type: String - description: | - The location where the glossary category should reside. - url_param_only: true - required: true - immutable: true - - name: 'glossaryId' - type: String - description: | - The glossary id for creation. - url_param_only: true - immutable: true - - name: 'categoryId' - type: String - description: | - The category id for creation. - url_param_only: true - immutable: true -properties: - - name: 'name' - type: String - description: | - The resource name of the GlossaryCategory. Format: projects/{projectId}/locations/{locationId}/glossaries/{glossaryId}/categories/{categoryId} - output: true - - name: 'displayName' - type: String - description: | - User friendly display name of the GlossaryCategory. This is user-mutable. This will be same as the categoryId, if not specified. - required: false - - name: 'description' - type: String - description: | - The user-mutable description of the GlossaryCategory. - required: false - - name: 'labels' - type: KeyValueLabels - description: | - User-defined labels for the GlossaryCategory. - required: false - - name: 'uid' - type: String - description: | - System generated unique id for the GlossaryCategory. This ID will be different if the GlossaryCategory is deleted and re-created with the same name. - output: true - - name: 'createTime' - type: Timestamp - description: | - The time at which the GlossaryCategory was created. - output: true - - name: 'updateTime' - type: Timestamp - description: | - The time at which the GlossaryCategory was last updated. - output: true - - name: 'parent' - type: String - description: | - The immediate parent of the GlossaryCategory in the resource-hierarchy. It can either be a Glossary or a Category. Format: projects/{projectId}/locations/{locationId}/glossaries/{glossaryId} OR projects/{projectId}/locations/{locationId}/glossaries/{glossaryId}/categories/{categoryId} - required: true diff --git a/mmv1/products/dataplex/GlossaryTerm.yaml b/mmv1/products/dataplex/GlossaryTerm.yaml deleted file mode 100644 index e133549ed345..000000000000 --- a/mmv1/products/dataplex/GlossaryTerm.yaml +++ /dev/null @@ -1,99 +0,0 @@ -# Copyright 2024 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name: 'GlossaryTerm' -description: | - Represents a collection of terms within a Glossary that are related to each other. -base_url: 'projects/{{project}}/locations/{{location}}/glossaries/{{glossary_id}}/terms/{{term_id}}' -self_link: 'projects/{{project}}/locations/{{location}}/glossaries/{{glossary_id}}/terms/{{term_id}}' -create_url: 'projects/{{project}}/locations/{{location}}/glossaries/{{glossary_id}}/terms?term_id={{term_id}}' -update_verb: 'PATCH' -update_mask: true -timeouts: - insert_minutes: 15 - update_minutes: 15 - delete_minutes: 15 -autogen_async: true -import_format: ['projects/{{project}}/locations/{{location}}/glossaries/{{glossary_id}}/terms/{{term_id}}'] -examples: - - name: 'dataplex_glossary_term_basic' - primary_resource_id: 'term_test_id' - primary_resource_name: 'fmt.Sprintf("tf-test-term-basic%s", context["random_suffix"]), fmt.Sprintf("tf-test-glossary%s", context["random_suffix"])' - vars: - term_name: 'term-basic' - - name: 'dataplex_glossary_term_full' - primary_resource_id: 'term_test_id_full' - primary_resource_name: 'fmt.Sprintf("tf-test-term-full%s", context["random_suffix"]), fmt.Sprintf("tf-test-glossary%s", context["random_suffix"])' - vars: - term_name: 'term-full' -parameters: - - name: 'location' - type: String - description: | - The location where the glossary term should reside. - url_param_only: true - required: true - immutable: true - - name: 'glossaryId' - type: String - description: | - The glossary id for creation. - url_param_only: true - immutable: true - - name: 'termId' - type: String - description: | - The term id for creation. - url_param_only: true - immutable: true -properties: - - name: 'name' - type: String - description: | - The resource name of the GlossaryTerm. Format: projects/{projectId}/locations/{locationId}/glossaries/{glossaryId}/categories/{termId} - output: true - - name: 'displayName' - type: String - description: | - User friendly display name of the GlossaryTerm. This is user-mutable. This will be same as the termId, if not specified. - required: false - - name: 'description' - type: String - description: | - The user-mutable description of the GlossaryTerm. - required: false - - name: 'labels' - type: KeyValueLabels - description: | - User-defined labels for the GlossaryTerm. - required: false - - name: 'uid' - type: String - description: | - System generated unique id for the GlossaryTerm. This ID will be different if the GlossaryTerm is deleted and re-created with the same name. - output: true - - name: 'createTime' - type: Timestamp - description: | - The time at which the GlossaryTerm was created. - output: true - - name: 'updateTime' - type: Timestamp - description: | - The time at which the GlossaryTerm was last updated. - output: true - - name: 'parent' - type: String - description: | - The immediate parent of the GlossaryTerm in the resource-hierarchy. It can either be a Glossary or a Term. Format: projects/{projectId}/locations/{locationId}/glossaries/{glossaryId} OR projects/{projectId}/locations/{locationId}/glossaries/{glossaryId}/terms/{termId} - required: true diff --git a/mmv1/products/dataproc/Batch.yaml b/mmv1/products/dataproc/Batch.yaml index c2402f6f7b1d..4865221c64a8 100644 --- a/mmv1/products/dataproc/Batch.yaml +++ b/mmv1/products/dataproc/Batch.yaml @@ -377,18 +377,6 @@ properties: Subnetwork configuration for workload execution. conflicts: - environment_config.0.execution_config.0.network_uri - - name: 'authenticationConfig' - type: NestedObject - description: | - Authentication configuration for a workload is used to set the default identity for the workload execution. - properties: - - name: userWorkloadAuthenticationType - type: Enum - description: | - Authentication type for the user workload running in containers. - enum_values: - - SERVICE_ACCOUNT - - END_USER_CREDENTIALS - name: 'peripheralsConfig' type: NestedObject description: | diff --git a/mmv1/products/dataproc/SessionTemplate.yaml b/mmv1/products/dataproc/SessionTemplate.yaml deleted file mode 100644 index 06f77418bb16..000000000000 --- a/mmv1/products/dataproc/SessionTemplate.yaml +++ /dev/null @@ -1,256 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'SessionTemplate' -api_variant_patterns: - - 'projects/{project}/locations/{location}/sessionTemplates/{sessionTemplate}' -description: | - A Dataproc Serverless session template defines the configuration settings for - creating one or more Dataproc Serverless interactive sessions. -references: - guides: - 'Dataproc Serverless Session Templates': 'https://cloud.google.com/dataproc-serverless/docs/guides/create-serverless-sessions-templates#create-dataproc-serverless-session-template' - api: 'https://cloud.google.com/dataproc-serverless/docs/reference/rest/v1/projects.locations.sessionTemplates' -docs: -id_format: '{{name}}' -base_url: 'projects/{{project}}/locations/{{location}}/sessionTemplates' -self_link: '{{name}}' -update_verb: 'PATCH' -autogen_async: true -import_format: - - '{{name}}' -custom_code: - custom_import: 'templates/terraform/custom_import/set_id_name_with_slashes.go.tmpl' -collection_url_key: 'sessionTemplates' -examples: - - name: 'dataproc_session_templates_jupyter' - primary_resource_id: 'example_session_templates_jupyter' - primary_resource_name: 'fmt.Sprintf("projects/%s/locations/%s/sessionTemplates/%s", envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), fmt.Sprintf("tf-test-session-templates-jupyter%s", context["random_suffix"]))' - vars: - name: 'jupyter-session-template' - subnetwork_name: 'default' - prevent_destroy: 'true' - test_env_vars: - project_name: 'PROJECT_NAME' - test_vars_overrides: - 'subnetwork_name': 'acctest.BootstrapSubnetWithFirewallForDataprocBatches(t, "jupyer-session-test-network", "jupyter-session-test-subnetwork")' - 'prevent_destroy': 'false' - ignore_read_extra: - - 'runtime_config.0.properties' - - name: 'dataproc_session_templates_jupyter_full' - primary_resource_id: 'dataproc_session_templates_jupyter_full' - primary_resource_name: 'fmt.Sprintf("projects/%s/locations/%s/sessionTemplates/%s", envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), fmt.Sprintf("tf-test-session-templates-jupyter%s", context["random_suffix"]))' - vars: - name: 'jupyter-session-template' - subnetwork_name: 'default' - prevent_destroy: 'true' - kms_key_name: 'example-key' - bucket_name: 'dataproc-bucket' - test_env_vars: - project_name: 'PROJECT_NAME' - test_vars_overrides: - 'subnetwork_name': 'acctest.BootstrapSubnetWithFirewallForDataprocBatches(t, "jupyer-session-test-network", "jupyter-session-test-subnetwork")' - 'prevent_destroy': 'false' - 'kms_key_name': 'acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-bootstrap-dataproc-session-template-key1").CryptoKey.Name' - ignore_read_extra: - - 'runtime_config.0.properties' - - name: 'dataproc_session_templates_spark_connect' - primary_resource_id: 'example_session_templates_spark_connect' - primary_resource_name: 'fmt.Sprintf("projects/%s/locations/%s/sessionTemplates/%s", envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), fmt.Sprintf("tf-test-session-templates-jupyter%s", context["random_suffix"]))' - vars: - name: 'sc-session-template' - subnetwork_name: 'default' - prevent_destroy: 'true' - test_env_vars: - project_name: 'PROJECT_NAME' - test_vars_overrides: - 'subnetwork_name': 'acctest.BootstrapSubnetWithFirewallForDataprocBatches(t, "spark-connect-session-test-network", "spark-connect-session-test-subnetwork")' - 'prevent_destroy': 'false' - ignore_read_extra: - - 'runtime_config.0.properties' -parameters: - - name: 'location' - type: String - description: | - The location in which the session template will be created in. - url_param_only: true - immutable: true -properties: - - name: 'name' - type: String - description: | - The resource name of the session template in the following format: - projects/{project}/locations/{location}/sessionTemplates/{template_id} - required: true - immutable: true - - name: 'uuid' - type: String - description: | - A session template UUID (Unique Universal Identifier). The service generates this value when it creates the session template. - output: true - - name: 'createTime' - type: String - description: | - The time when the session template was created. - output: true - - name: 'updateTime' - type: String - description: | - The time when the session template was updated. - output: true - - name: 'creator' - type: String - description: | - The email address of the user who created the session template. - output: true - - name: 'labels' - type: KeyValueLabels - description: | - The labels to associate with this session template. - - name: 'runtimeConfig' - type: NestedObject - description: | - Runtime configuration for the session template. - properties: - - name: 'version' - type: String - description: | - Version of the session runtime. - - name: 'containerImage' - type: String - description: | - Optional custom container image for the job runtime environment. If not specified, a default container image will be used. - - name: 'properties' - type: KeyValuePairs - description: | - A mapping of property names to values, which are used to configure workload execution. - - name: 'effective_properties' - type: KeyValuePairs - description: | - A mapping of property names to values, which are used to configure workload execution. - output: true - - name: 'environmentConfig' - type: NestedObject - description: | - Environment configuration for the session execution. - properties: - - name: 'executionConfig' - type: NestedObject - description: | - Execution configuration for a workload. - properties: - - name: 'serviceAccount' - type: String - description: | - Service account that used to execute workload. - default_from_api: true - - name: 'networkTags' - type: Array - description: | - Tags used for network traffic control. - item_type: - type: String - - name: 'kmsKey' - type: String - description: | - The Cloud KMS key to use for encryption. - - name: 'idleTtl' - type: String - description: | - The duration to keep the session alive while it's idling. - Exceeding this threshold causes the session to terminate. Minimum value is 10 minutes; maximum value is 14 day. - Defaults to 1 hour if not set. If both ttl and idleTtl are specified for an interactive session, the conditions - are treated as OR conditions: the workload will be terminated when it has been idle for idleTtl or when ttl has - been exceeded, whichever occurs first. - - name: 'ttl' - type: String - description: | - The duration after which the workload will be terminated. - When the workload exceeds this duration, it will be unconditionally terminated without waiting for ongoing - work to finish. If ttl is not specified for a session workload, the workload will be allowed to run until it - exits naturally (or run forever without exiting). If ttl is not specified for an interactive session, - it defaults to 24 hours. If ttl is not specified for a batch that uses 2.1+ runtime version, it defaults to 4 hours. - Minimum value is 10 minutes; maximum value is 14 days. If both ttl and idleTtl are specified (for an interactive session), - the conditions are treated as OR conditions: the workload will be terminated when it has been idle for idleTtl or - when ttl has been exceeded, whichever occurs first. - default_from_api: true - - name: 'stagingBucket' - type: String - description: | - A Cloud Storage bucket used to stage workload dependencies, config files, and store - workload output and other ephemeral data, such as Spark history files. If you do not specify a staging bucket, - Cloud Dataproc will determine a Cloud Storage location according to the region where your workload is running, - and then create and manage project-level, per-location staging and temporary buckets. - This field requires a Cloud Storage bucket name, not a gs://... URI to a Cloud Storage bucket. - - name: 'subnetworkUri' - type: String - description: | - Subnetwork configuration for workload execution. - - name: 'authenticationConfig' - type: NestedObject - description: | - Authentication configuration for a workload is used to set the default identity for the workload execution. - properties: - - name: userWorkloadAuthenticationType - type: Enum - description: | - Authentication type for the user workload running in containers. - enum_values: - - SERVICE_ACCOUNT - - END_USER_CREDENTIALS - - name: 'peripheralsConfig' - type: NestedObject - description: | - Peripherals configuration that workload has access to. - default_from_api: true - allow_empty_object: true - properties: - - name: 'metastoreService' - type: String - description: | - Resource name of an existing Dataproc Metastore service. - - name: 'sparkHistoryServerConfig' - type: NestedObject - description: | - The Spark History Server configuration for the workload. - properties: - - name: 'dataprocCluster' - type: String - description: | - Resource name of an existing Dataproc Cluster to act as a Spark History Server for the workload. - - name: 'jupyterSession' - type: NestedObject - description: | - Jupyter configuration for an interactive session. - properties: - - name: 'kernel' - type: Enum - description: | - Kernel to be used with Jupyter interactive session. - enum_values: - - 'PYTHON' - - 'SCALA' - - name: 'displayName' - type: String - description: | - Display name, shown in the Jupyter kernelspec card. - - name: 'sparkConnectSession' - type: NestedObject - description: | - Spark connect configuration for an interactive session. - diff_suppress_func: 'tpgresource.EmptyOrUnsetBlockDiffSuppress' - allow_empty_object: true - send_empty_value: true - properties: - [] # Meant to be an empty object with no properties. diff --git a/mmv1/products/dataprocgdc/ServiceInstance.yaml b/mmv1/products/dataprocgdc/ServiceInstance.yaml index dd4156091f63..8643340b34ea 100644 --- a/mmv1/products/dataprocgdc/ServiceInstance.yaml +++ b/mmv1/products/dataprocgdc/ServiceInstance.yaml @@ -57,7 +57,6 @@ examples: project: "my-project" test_vars_overrides: 'project': '"gdce-cluster-monitoring"' - skip_test: https://github.com/hashicorp/terraform-provider-google/issues/21173 properties: - name: gdceCluster type: NestedObject diff --git a/mmv1/products/datastream/ConnectionProfile.yaml b/mmv1/products/datastream/ConnectionProfile.yaml index 99184fdd44d8..24e8cb23f246 100644 --- a/mmv1/products/datastream/ConnectionProfile.yaml +++ b/mmv1/products/datastream/ConnectionProfile.yaml @@ -23,7 +23,7 @@ docs: id_format: 'projects/{{project}}/locations/{{location}}/connectionProfiles/{{connection_profile_id}}' base_url: 'projects/{{project}}/locations/{{location}}/connectionProfiles' self_link: 'projects/{{project}}/locations/{{location}}/connectionProfiles/{{connection_profile_id}}' -create_url: 'projects/{{project}}/locations/{{location}}/connectionProfiles?connectionProfileId={{connection_profile_id}}' +create_url: 'projects/{{project}}/locations/{{location}}/connectionProfiles?connectionProfileId={{connection_profile_id}}&force={{create_without_validation}}' update_verb: 'PATCH' update_mask: true import_format: @@ -40,8 +40,6 @@ async: result: resource_inside_response: true custom_code: - constants: 'templates/terraform/constants/datastream_connection_profile.go.tmpl' - pre_create: 'templates/terraform/pre_create/datastream_connection_profile.go.tmpl' examples: - name: 'datastream_connection_profile_basic' primary_resource_id: 'default' @@ -124,7 +122,6 @@ parameters: required: false immutable: true default_value: false - diff_suppress_func: 'resourceDataStreamStreamCreateWithoutValidationDiffSuppress' - name: 'location' type: String description: | diff --git a/mmv1/products/datastream/PrivateConnection.yaml b/mmv1/products/datastream/PrivateConnection.yaml index 37aea53e9381..9c91ead1d608 100644 --- a/mmv1/products/datastream/PrivateConnection.yaml +++ b/mmv1/products/datastream/PrivateConnection.yaml @@ -28,9 +28,9 @@ immutable: true import_format: - 'projects/{{project}}/locations/{{location}}/privateConnections/{{private_connection_id}}' timeouts: - insert_minutes: 30 - update_minutes: 30 - delete_minutes: 30 + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 async: actions: ['create', 'delete', 'update'] type: 'OpAsync' @@ -51,18 +51,6 @@ examples: vars: private_connection_id: 'my-connection' network_name: 'my-network' - - name: 'datastream_private_connection_psc_interface' - primary_resource_id: 'default' - test_env_vars: - project: 'PROJECT_NAME' - project_number: 'PROJECT_NUMBER' - org_id: 'ORG_ID' - billing_account: 'BILLING_ACCT' - vars: - private_connection_id: 'my-connection' - network_attachment_name: 'my-network-attachment' - network_name: 'my-network' - subnetwork_name: 'my-subnetwork' parameters: - name: 'privateConnectionId' type: String @@ -128,9 +116,7 @@ properties: description: | The VPC Peering configuration is used to create VPC peering between Datastream and the consumer's VPC. - exactly_one_of: - - 'vpc_peering_config' - - 'psc_interface_config' + required: true properties: - name: 'vpc' type: String @@ -143,23 +129,3 @@ properties: description: | A free subnet for peering. (CIDR of /29) required: true - - name: 'pscInterfaceConfig' - type: NestedObject - description: | - The PSC Interface configuration is used to create PSC Interface - between Datastream and the consumer's PSC. - exactly_one_of: - - 'vpc_peering_config' - - 'psc_interface_config' - properties: - - name: 'networkAttachment' - type: String - description: | - Fully qualified name of the network attachment that Datastream will connect to. - Format: projects/{project}/regions/{region}/networkAttachments/{name} - - To get Datastream project for the accepted list: - `gcloud datastream private-connections create [PC ID] --location=[LOCATION] --network-attachment=[NA URI] --validate-only --display-name=[ANY STRING]` - Add Datastream project to the attachment accepted list: - `gcloud compute network-attachments update [NA URI] --region=[NA region] --producer-accept-list=[TP from prev command]` - required: true diff --git a/mmv1/products/datastream/Stream.yaml b/mmv1/products/datastream/Stream.yaml index fa4df7b8b3e0..10c6ae72349e 100644 --- a/mmv1/products/datastream/Stream.yaml +++ b/mmv1/products/datastream/Stream.yaml @@ -216,7 +216,6 @@ examples: - name: 'datastream_stream_salesforce' primary_resource_id: 'default' vars: - stream_id: 'sf-stream' source_connection_profile_id: 'source-profile' destination_connection_profile_id: 'destination-profile' exclude_test: true diff --git a/mmv1/products/developerconnect/InsightsConfig.yaml b/mmv1/products/developerconnect/InsightsConfig.yaml deleted file mode 100644 index acbff0377c39..000000000000 --- a/mmv1/products/developerconnect/InsightsConfig.yaml +++ /dev/null @@ -1,248 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: InsightsConfig -description: Description -base_url: projects/{{project}}/locations/{{location}}/insightsConfigs -update_mask: true -self_link: projects/{{project}}/locations/{{location}}/insightsConfigs/{{insights_config_id}} -create_url: projects/{{project}}/locations/{{location}}/insightsConfigs?insightsConfigId={{insights_config_id}} -update_verb: PATCH -id_format: projects/{{project}}/locations/{{location}}/insightsConfigs/{{insights_config_id}} -import_format: - - projects/{{project}}/locations/{{location}}/insightsConfigs/{{insights_config_id}} -examples: - - name: 'developer_connect_insights_config_basic' - external_providers: ["time"] - primary_resource_id: 'insights_config' - test_env_vars: - org_id: "ORG_ID" - billing_account: "BILLING_ACCT" - skip_vcr: true -autogen_async: true -async: - operation: - timeouts: - insert_minutes: 20 - update_minutes: 20 - delete_minutes: 20 - base_url: '{{op_id}}' - actions: - - create - - delete - - update - type: OpAsync - result: - resource_inside_response: true - include_project: false -autogen_status: SW5zaWdodHNDb25maWc= -parameters: - - name: location - type: String - description: Resource ID segment making up resource `name`. It identifies the resource within its parent collection as described in https://google.aip.dev/122. - immutable: true - url_param_only: true - required: true - - name: insightsConfigId - type: String - description: ID of the requesting InsightsConfig. - immutable: true - url_param_only: true - required: true -properties: - - name: appHubApplication - type: String - description: |- - The name of the App Hub Application. - Format: - projects/{project}/locations/{location}/applications/{application} - required: true - - name: name - type: String - description: |- - Identifier. The name of the InsightsConfig. - Format: - projects/{project}/locations/{location}/insightsConfigs/{insightsConfig} - output: true - - name: updateTime - type: String - description: '[Output only] Update timestamp' - output: true - - name: artifactConfigs - type: Array - description: The artifact configurations of the artifacts that are deployed. - item_type: - type: NestedObject - properties: - - name: googleArtifactRegistry - type: NestedObject - description: Google Artifact Registry configurations. - properties: - - name: projectId - type: String - description: The host project of Artifact Registry. - required: true - - name: artifactRegistryPackage - type: String - description: The name of the artifact registry package. - immutable: true - required: true - - name: googleArtifactAnalysis - type: NestedObject - description: Google Artifact Analysis configurations. - properties: - - name: projectId - type: String - description: The project id of the project where the provenance is stored. - required: true - - name: uri - type: String - description: |- - The URI of the artifact that is deployed. - e.g. `us-docker.pkg.dev/my-project/my-repo/image`. - The URI does not include the tag / digest because it captures a lineage of - artifacts. - immutable: true - - name: annotations - type: KeyValueAnnotations - description: |- - User specified annotations. See https://google.aip.dev/148#annotations - for more details such as format and size limitations. - - name: labels - type: KeyValueLabels - description: Set of labels associated with an InsightsConfig. - ignore_read: true - - name: reconciling - type: Boolean - description: |- - Reconciling (https://google.aip.dev/128#reconciliation). - Set to true if the current state of InsightsConfig does not match the - user's intended state, and the service is actively updating the resource to - reconcile them. This can happen due to user-triggered updates or - system actions like failover or maintenance. - output: true - - name: errors - type: Array - description: |- - Any errors that occurred while setting up the InsightsConfig. - Each error will be in the format: `field_name: error_message`, e.g. - GetAppHubApplication: Permission denied while getting App Hub - application. Please grant permissions to the P4SA. - output: true - item_type: - type: NestedObject - properties: - - name: code - type: Integer - description: The status code, which should be an enum value of google.rpc.Code. - output: true - - name: message - type: String - description: |- - A developer-facing error message, which should be in English. Any - user-facing error message should be localized and sent in the - google.rpc.Status.details field, or localized by the client. - output: true - - name: details - type: Array - description: |- - A list of messages that carry the error details. There is a common set of - message types for APIs to use. - output: true - item_type: - type: NestedObject - properties: - - name: detail_message - type: String - description: |- - A message with details about the error. - output: true - - name: createTime - type: String - description: '[Output only] Create timestamp' - output: true - - name: runtimeConfigs - type: Array - description: The runtime configurations where the application is deployed. - output: true - item_type: - type: NestedObject - properties: - - name: uri - type: String - description: |- - The URI of the runtime configuration. - For GKE, this is the cluster name. - For Cloud Run, this is the service name. - immutable: true - required: true - - name: state - type: String - description: |- - The state of the Runtime. - Possible values: - STATE_UNSPECIFIED - LINKED - UNLINKED - output: true - - name: gkeWorkload - type: NestedObject - description: GKEWorkload represents the Google Kubernetes Engine runtime. - properties: - - name: cluster - type: String - description: |- - The name of the GKE cluster. - Format: - `projects/{project}/locations/{location}/clusters/{cluster}`. - immutable: true - required: true - - name: deployment - type: String - description: |- - The name of the GKE deployment. - Format: - `projects/{project}/locations/{location}/clusters/{cluster}/namespaces/{namespace}/deployments/{deployment}`. - output: true - - name: appHubWorkload - type: NestedObject - description: AppHubWorkload represents the App Hub Workload. - properties: - - name: criticality - type: String - description: The criticality of the App Hub Workload. - output: true - - name: environment - type: String - description: The environment of the App Hub Workload. - output: true - - name: workload - type: String - description: |- - Output only. The name of the App Hub Workload. - Format: - `projects/{project}/locations/{location}/applications/{application}/workloads/{workload}`. - output: true - immutable: false - required: false - - name: state - type: String - description: |- - The state of the InsightsConfig. - Possible values: - STATE_UNSPECIFIED - PENDING - COMPLETE - ERROR - output: true diff --git a/mmv1/products/dialogflow/ConversationProfile.yaml b/mmv1/products/dialogflow/ConversationProfile.yaml deleted file mode 100644 index 2ac0d248715e..000000000000 --- a/mmv1/products/dialogflow/ConversationProfile.yaml +++ /dev/null @@ -1,654 +0,0 @@ -# Copyright 2024 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'ConversationProfile' -description: | - A conversation profile configures a set of parameters that control the suggestions made to an agent. These parameters control the suggestions that are surfaced during runtime. Each profile configures either a Dialogflow virtual agent or a human agent for a conversation. -references: - guides: - 'Official Documentation': 'https://cloud.google.com/dialogflow/docs/' - api: 'https://cloud.google.com/dialogflow/docs/reference/rest/v2beta1/projects/conversationProfiles' -docs: -id_format: '{{name}}' -base_url: 'projects/{{project}}/locations/{{location}}/conversationProfiles' -self_link: '{{name}}' -update_verb: 'PATCH' -update_mask: true -import_format: - - '{{name}}' -timeouts: - insert_minutes: 40 - update_minutes: 40 - delete_minutes: 20 -custom_code: - post_create: 'templates/terraform/post_create/set_computed_name.tmpl' - custom_import: 'templates/terraform/custom_import/self_link_as_name_set_project.go.tmpl' -exclude_sweeper: true -examples: - - name: 'dialogflow_conversation_profile_basic' - primary_resource_id: 'basic_profile' - vars: - profile_name: 'dialogflow-profile' -parameters: - - name: 'location' - type: String - url_param_only: true - description: | - desc - required: true -properties: - - name: 'name' - type: String - description: | - name - output: true - - name: 'displayName' - type: String - description: | - Required. Human readable name for this profile. Max length 1024 bytes. - required: true - - type: NestedObject - name: 'automatedAgentConfig' - description: | - Configuration for an automated agent to use with this profile - properties: - - type: String - name: 'agent' - description: | - ID of the Dialogflow agent environment to use. - Expects the format "projects//locations//agent/environments/" - required: true - - type: String - name: 'sessionTtl' - description: | - Configure lifetime of the Dialogflow session. - - type: NestedObject - name: 'humanAgentAssistantConfig' - description: | - Configuration for connecting to a live agent - properties: - - type: NestedObject - name: 'notificationConfig' - description: | - Pub/Sub topic on which to publish new agent assistant events. - Expects the format "projects//locations//topics/" - properties: - - type: String - name: 'topic' - description: | - Name of the Pub/Sub topic to publish conversation events - - type: Enum - name: 'messageFormat' - description: | - Format of the message - enum_values: - - MESSAGE_FORMAT_UNSPECIFIED - - PROTO - - JSON - - type: NestedObject - name: 'humanAgentSuggestionConfig' - description: | - Configuration for agent assistance of human agent participant. - properties: - - type: Array - name: 'featureConfigs' - description: | - Configuration of different suggestion features. One feature can have only one config. - item_type: - type: NestedObject - properties: - - type: NestedObject - name: 'suggestionFeature' - description: | - The suggestion feature. - properties: - - type: String - name: 'type' - description: | - Type of Human Agent Assistant API feature to request. - - type: Boolean - name: 'enableEventBasedSuggestion' - description: | - Automatically iterates all participants and tries to compile suggestions. - This feature is only supported for types: ARTICLE_SUGGESTION, FAQ, DIALOGFLOW_ASSIST, KNOWLEDGE_ASSIST. - - type: Boolean - name: 'disableAgentQueryLogging' - description: | - Disable the logging of search queries sent by human agents. It can prevent those queries from being stored at answer records. - This feature is only supported for types: KNOWLEDGE_SEARCH. - - type: Boolean - name: 'enableQuerySuggestionWhenNoAnswer' - description: | - Enable query suggestion even if we can't find its answer. By default, queries are suggested only if we find its answer. - This feature is only supported for types: KNOWLEDGE_ASSIST. - - type: Boolean - name: 'enableConversationAugmentedQuery' - description: | - Enable including conversation context during query answer generation. - This feature is only supported for types: KNOWLEDGE_SEARCH. - - type: Boolean - name: 'enableQuerySuggestionOnly' - description: | - Enable query suggestion only. - This feature is only supported for types: KNOWLEDGE_ASSIST - - type: NestedObject - name: 'suggestionTriggerSettings' - description: | - Settings of suggestion trigger. - This feature is only supported for types: ARTICLE_SUGGESTION, FAQ. - custom_flatten: 'templates/terraform/custom_flatten/conversation_profile_suggestion_trigger_settings.go.tmpl' - properties: - - type: Boolean - name: 'noSmallTalk' - description: | - Do not trigger if last utterance is small talk. - - type: Boolean - name: 'onlyEndUser' - description: | - Only trigger suggestion if participant role of last utterance is END_USER. - - type: NestedObject - name: queryConfig - description: | - Configs of query. - properties: - - type: Integer - name: maxResults - default_value: 10 - description: | - Maximum number of results to return. - - type: Double - name: confidenceThreshold - description: | - Confidence threshold of query result. - This feature is only supported for types: ARTICLE_SUGGESTION, FAQ, SMART_REPLY, SMART_COMPOSE, KNOWLEDGE_SEARCH, KNOWLEDGE_ASSIST, ENTITY_EXTRACTION. - - type: NestedObject - name: contextFilterSettings - description: | - Determines how recent conversation context is filtered when generating suggestions. If unspecified, no messages will be dropped. - custom_flatten: 'templates/terraform/custom_flatten/conversation_profile_context_filter_settings.go.tmpl' - properties: - - type: Boolean - name: 'dropHandoffMessages' - description: | - If set to true, the last message from virtual agent (hand off message) and the message before it (trigger message of hand off) are dropped. - - type: Boolean - name: 'dropVirtualAgentMessages' - description: | - If set to true, all messages from virtual agent are dropped. - - type: Boolean - name: 'dropIvrMessages' - description: | - If set to true, all messages from ivr stage are dropped. - - type: NestedObject - name: 'sections' - description: | - he customized sections chosen to return when requesting a summary of a conversation. - properties: - - type: Array - name: 'sectionTypes' - description: | - The selected sections chosen to return when requesting a summary of a conversation - If not provided the default selection will be "{SITUATION, ACTION, RESULT}". - item_type: - type: Enum - name: 'sectionType' - description: | - The selected sections chosen to return when requesting a summary of a conversation. A duplicate selected section will be treated as a single selected section. - enum_values: - - SECTION_TYPE_UNSPECIFIED - - SITUATION - - ACTION - - RESOLUTION - - REASON_FOR_CANCELLATION - - CUSTOMER_SATISFACTION - - ENTITIES - - type: NestedObject - name: 'dialogflowQuerySource' - description: | - Query from Dialogflow agent. - This feature is supported for types: DIALOGFLOW_ASSIST. - properties: - - type: String - name: 'agent' - required: true - description: | - he name of a Dialogflow virtual agent used for end user side intent detection and suggestion. Format: projects//locations//agent. - - type: NestedObject - name: 'humanAgentSideConfig' - description: | - The Dialogflow assist configuration for human agent. - properties: - - type: String - name: 'agent' - description: | - The name of a dialogflow virtual agent used for intent detection and suggestion triggered by human agent. Format: projects//locations//agent. - - type: NestedObject - name: conversationModelConfig - description: | - Configs of custom conversation model. - properties: - - type: String - name: 'model' - description: | - Conversation model resource name. Format: projects//conversationModels/. - - type: String - name: 'baselineModelVersion' - description: | - Version of current baseline model. It will be ignored if model is set. Valid versions are: Article Suggestion baseline model: - 0.9 - 1.0 (default) Summarization baseline model: - 1.0 - - type: NestedObject - name: 'conversationProcessConfig' - description: | - Config to process conversation. - properties: - - type: Integer - name: 'recentSentencesCount' - description: | - Number of recent non-small-talk sentences to use as context for article and FAQ suggestion - - type: Boolean - name: 'groupSuggestionResponses' - description: | - If groupSuggestionResponses is false, and there are multiple featureConfigs in event based suggestion or StreamingAnalyzeContent, we will try to deliver suggestions to customers as soon as we get new suggestion. Different type of suggestions based on the same context will be in separate Pub/Sub event or StreamingAnalyzeContentResponse. - - If groupSuggestionResponses set to true. All the suggestions to the same participant based on the same context will be grouped into a single Pub/Sub event or StreamingAnalyzeContentResponse. - - type: Array - name: 'generators' - description: | - List of various generator resource names used in the conversation profile. - item_type: - type: String - - type: Boolean - name: 'disableHighLatencyFeaturesSyncDelivery' - description: | - When disableHighLatencyFeaturesSyncDelivery is true and using the AnalyzeContent API, we will not deliver the responses from high latency features in the API response. The humanAgentAssistantConfig.notification_config must be configured and enableEventBasedSuggestion must be set to true to receive the responses from high latency features in Pub/Sub. High latency feature(s): KNOWLEDGE_ASSIST - - type: NestedObject - name: 'endUserSuggestionConfig' - description: | - Configuration for agent assistance of end user participant. - properties: - - type: Array - name: 'featureConfigs' - description: | - Configuration of different suggestion features. One feature can have only one config. - item_type: - type: NestedObject - properties: - - type: NestedObject - name: 'suggestionFeature' - description: | - The suggestion feature. - properties: - - type: String - name: 'type' - description: | - Type of Human Agent Assistant API feature to request. - - type: Boolean - name: 'enableEventBasedSuggestion' - description: | - Automatically iterates all participants and tries to compile suggestions. - This feature is only supported for types: ARTICLE_SUGGESTION, FAQ, DIALOGFLOW_ASSIST, KNOWLEDGE_ASSIST. - - type: Boolean - name: 'disableAgentQueryLogging' - description: | - Disable the logging of search queries sent by human agents. It can prevent those queries from being stored at answer records. - This feature is only supported for types: KNOWLEDGE_SEARCH. - - type: Boolean - name: 'enableQuerySuggestionWhenNoAnswer' - description: | - Enable query suggestion even if we can't find its answer. By default, queries are suggested only if we find its answer. - This feature is only supported for types: KNOWLEDGE_ASSIST. - - type: Boolean - name: 'enableConversationAugmentedQuery' - description: | - Enable including conversation context during query answer generation. - This feature is only supported for types: KNOWLEDGE_SEARCH. - - type: Boolean - name: 'enableQuerySuggestionOnly' - description: | - Enable query suggestion only. - This feature is only supported for types: KNOWLEDGE_ASSIST - - type: NestedObject - name: 'suggestionTriggerSettings' - description: | - Settings of suggestion trigger. - This feature is only supported for types: ARTICLE_SUGGESTION, FAQ. - custom_flatten: 'templates/terraform/custom_flatten/conversation_profile_suggestion_trigger_settings.go.tmpl' - properties: - - type: Boolean - name: 'noSmallTalk' - description: | - Do not trigger if last utterance is small talk. - - type: Boolean - name: 'onlyEndUser' - description: | - Only trigger suggestion if participant role of last utterance is END_USER. - - type: NestedObject - name: queryConfig - description: | - Configs of query. - properties: - - type: Integer - name: maxResults - default_value: 10 - description: | - Maximum number of results to return. - - type: Double - name: confidenceThreshold - description: | - Confidence threshold of query result. - This feature is only supported for types: ARTICLE_SUGGESTION, FAQ, SMART_REPLY, SMART_COMPOSE, KNOWLEDGE_SEARCH, KNOWLEDGE_ASSIST, ENTITY_EXTRACTION. - - type: NestedObject - name: contextFilterSettings - description: | - Determines how recent conversation context is filtered when generating suggestions. If unspecified, no messages will be dropped. - custom_flatten: 'templates/terraform/custom_flatten/conversation_profile_context_filter_settings.go.tmpl' - properties: - - type: Boolean - name: 'dropHandoffMessages' - description: | - If set to true, the last message from virtual agent (hand off message) and the message before it (trigger message of hand off) are dropped. - - type: Boolean - name: 'dropVirtualAgentMessages' - description: | - If set to true, all messages from virtual agent are dropped. - - type: Boolean - name: 'dropIvrMessages' - description: | - If set to true, all messages from ivr stage are dropped. - - type: NestedObject - name: 'sections' - description: | - he customized sections chosen to return when requesting a summary of a conversation. - properties: - - type: Array - name: 'sectionTypes' - description: | - The selected sections chosen to return when requesting a summary of a conversation - If not provided the default selection will be "{SITUATION, ACTION, RESULT}". - item_type: - type: Enum - name: 'sectionType' - description: | - The selected sections chosen to return when requesting a summary of a conversation. A duplicate selected section will be treated as a single selected section. - enum_values: - - SECTION_TYPE_UNSPECIFIED - - SITUATION - - ACTION - - RESOLUTION - - REASON_FOR_CANCELLATION - - CUSTOMER_SATISFACTION - - ENTITIES - - type: NestedObject - name: 'knowledgeBaseQuerySource' - description: | - Query from knowledgebase. - This feature is only supported for types: ARTICLE_SUGGESTION, FAQ. - properties: - - type: Array - name: 'knowledgeBases' - required: true - description: | - Knowledge bases to query. Format: projects//locations//knowledgeBases/. - item_type: - type: String - - type: NestedObject - name: 'documentQuerySource' - description: | - Query from knowledge base document. - This feature is supported for types: SMART_REPLY, SMART_COMPOSE. - properties: - - type: Array - name: 'documents' - required: true - description: | - Knowledge documents to query from. Format: projects//locations//knowledgeBases//documents/. - item_type: - type: String - - type: NestedObject - name: 'dialogflowQuerySource' - description: | - Query from Dialogflow agent. - This feature is supported for types: DIALOGFLOW_ASSIST. - properties: - - type: String - name: 'agent' - required: true - description: | - he name of a Dialogflow virtual agent used for end user side intent detection and suggestion. Format: projects//locations//agent. - - type: NestedObject - name: 'humanAgentSideConfig' - description: | - The Dialogflow assist configuration for human agent. - properties: - - type: String - name: 'agent' - description: | - The name of a dialogflow virtual agent used for intent detection and suggestion triggered by human agent. Format: projects//locations//agent. - - type: NestedObject - name: conversationModelConfig - description: | - Configs of custom conversation model. - properties: - - type: String - name: 'model' - description: | - Conversation model resource name. Format: projects//conversationModels/. - - type: String - name: 'baselineModelVersion' - description: | - Version of current baseline model. It will be ignored if model is set. Valid versions are: Article Suggestion baseline model: - 0.9 - 1.0 (default) Summarization baseline model: - 1.0 - - type: NestedObject - name: 'conversationProcessConfig' - description: | - Config to process conversation. - properties: - - type: Integer - name: 'recentSentencesCount' - description: | - Number of recent non-small-talk sentences to use as context for article and FAQ suggestion - - type: Boolean - name: 'groupSuggestionResponses' - description: | - If groupSuggestionResponses is false, and there are multiple featureConfigs in event based suggestion or StreamingAnalyzeContent, we will try to deliver suggestions to customers as soon as we get new suggestion. Different type of suggestions based on the same context will be in separate Pub/Sub event or StreamingAnalyzeContentResponse. - - If groupSuggestionResponses set to true. All the suggestions to the same participant based on the same context will be grouped into a single Pub/Sub event or StreamingAnalyzeContentResponse. - - type: Array - name: 'generators' - description: | - List of various generator resource names used in the conversation profile. - item_type: - type: String - - type: Boolean - name: 'disableHighLatencyFeaturesSyncDelivery' - description: | - When disableHighLatencyFeaturesSyncDelivery is true and using the AnalyzeContent API, we will not deliver the responses from high latency features in the API response. The humanAgentAssistantConfig.notification_config must be configured and enableEventBasedSuggestion must be set to true to receive the responses from high latency features in Pub/Sub. High latency feature(s): KNOWLEDGE_ASSIST - - type: NestedObject - name: 'messageAnalysisConfig' - description: | - desc - properties: - - type: Boolean - name: 'enableEntityExtraction' - default_value: false - description: | - Enable entity extraction in conversation messages on agent assist stage. - - type: Boolean - name: 'enableSentimentAnalysis' - default_value: false - description: | - Enable sentiment analysis in conversation messages on agent assist stage. Sentiment analysis inspects user input and identifies the prevailing subjective opinion, especially to determine a user's attitude as positive, negative, or neutral. - - type: NestedObject - name: 'humanAgentHandoffConfig' - description: | - Defines the hand off to a live agent, typically on which external agent service provider to connect to a conversation. - properties: - - type: NestedObject - name: livePersonConfig - description: | - Config for using LivePerson. - properties: - - type: String - name: 'accountNumber' - required: true - description: | - Account number of the LivePerson account to connect. - - type: NestedObject - name: 'notificationConfig' - description: | - Pub/Sub topic on which to publish new agent assistant events. - Expects the format "projects//locations//topics/" - properties: - - type: String - name: 'topic' - description: | - Name of the Pub/Sub topic to publish conversation events - - type: Enum - name: 'messageFormat' - description: | - Format of the message - enum_values: - - MESSAGE_FORMAT_UNSPECIFIED - - PROTO - - JSON - - type: NestedObject - name: 'loggingConfig' - # Due to inconsistent API behaviour http://b/303056144, ignore read can be removed once fixed - ignore_read: true - description: | - Defines logging behavior for conversation lifecycle events. - properties: - - type: Boolean - name: enableStackdriverLogging - description: | - Whether to log conversation events - - type: NestedObject - name: newMessageEventNotificationConfig - description: | - Pub/Sub topic on which to publish new agent assistant events. - Expects the format "projects//locations//topics/" - properties: - - type: String - name: 'topic' - description: | - Name of the Pub/Sub topic to publish conversation events - - type: Enum - name: 'messageFormat' - description: | - Format of the message - enum_values: - - MESSAGE_FORMAT_UNSPECIFIED - - PROTO - - JSON - - type: NestedObject - name: sttConfig - description: | - Settings for speech transcription. - properties: - - type: Enum - name: speechModelVariant - description: | - The speech model used in speech to text. - enum_values: - - SPEECH_MODEL_VARIANT_UNSPECIFIED - - USE_BEST_AVAILABLE - - USE_STANDARD - - USE_ENHANCED - - type: String - name: 'model' - description: | - Which Speech model to select. - Leave this field unspecified to use Agent Speech settings for model selection. - - type: Enum - name: 'audioEncoding' - description: | - Audio encoding of the audio content to process. - enum_values: - - AUDIO_ENCODING_UNSPECIFIED - - AUDIO_ENCODING_LINEAR_16 - - AUDIO_ENCODING_FLAC - - AUDIO_ENCODING_MULAW - - AUDIO_ENCODING_AMR - - AUDIO_ENCODING_AMR_WB - - AUDIO_ENCODING_OGG_OPUS - - AUDIOENCODING_SPEEX_WITH_HEADER_BYTE - - type: Integer - name: 'sampleRateHertz' - description: | - Sample rate (in Hertz) of the audio content sent in the query. - - type: String - name: 'languageCode' - description: | - The language of the supplied audio. - default_from_api: true - - type: Boolean - name: 'enableWordInfo' - description: | - If true, Dialogflow returns SpeechWordInfo in StreamingRecognitionResult with information about the recognized speech words. - - type: Boolean - name: 'useTimeoutBasedEndpointing' - description: | - Use timeout based endpointing, interpreting endpointer sensitivy as seconds of timeout value. - - type: String - name: 'languageCode' - description: | - Language code for the conversation profile. This should be a BCP-47 language tag. - default_from_api: true - - type: String - name: 'timeZone' - description: | - The time zone of this conversational profile. - - type: String - name: 'securitySettings' - description: | - Name of the CX SecuritySettings reference for the agent. - - type: NestedObject - name: 'ttsConfig' - description: | - Configuration for Text-to-Speech synthesization. If agent defines synthesization options as well, agent settings overrides the option here. - properties: - - type: Double - name: 'speakingRate' - description: | - Speaking rate/speed, in the range [0.25, 4.0]. - - type: Double - name: 'pitch' - description: | - Speaking pitch, in the range [-20.0, 20.0]. 20 means increase 20 semitones from the original pitch. -20 means decrease 20 semitones from the original pitch. - - type: Double - name: 'volumeGainDb' - description: | - Volume gain (in dB) of the normal native volume supported by the specific voice. - - type: Array - name: 'effectsProfileId' - description: | - An identifier which selects 'audio effects' profiles that are applied on (post synthesized) text to speech. Effects are applied on top of each other in the order they are given. - item_type: - type: String - - type: NestedObject - name: voice - description: | - The desired voice of the synthesized audio. - properties: - - type: String - name: 'name' - description: | - The name of the voice. - - type: Enum - name: 'ssmlGender' - description: | - The preferred gender of the voice. - enum_values: - - SSML_VOICE_GENDER_UNSPECIFIED - - SSML_VOICE_GENDER_MALE - - SSML_VOICE_GENDER_FEMALE - - SSML_VOICE_GENDER_NEUTRAL diff --git a/mmv1/products/dialogflow/EncryptionSpec.yaml b/mmv1/products/dialogflow/EncryptionSpec.yaml deleted file mode 100644 index cc9a8fda9cfc..000000000000 --- a/mmv1/products/dialogflow/EncryptionSpec.yaml +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'EncryptionSpec' -description: | - Initializes a location-level encryption key specification. -references: - guides: - 'Official ES Documentation': 'https://cloud.google.com/dialogflow/es/docs' - 'Official CX Documentation': 'https://cloud.google.com/dialogflow/cx/docs' - api: 'https://cloud.google.com/dialogflow/es/docs/reference/rest/v2/projects.locations.encryptionSpec' - -base_url: 'projects/{{project}}/locations/{{location}}/encryptionSpec' - -create_url: 'projects/{{project}}/locations/{{location}}/encryptionSpec:initialize' - -exclude_read: true -exclude_delete: true -exclude_import: true -exclude_sweeper: true - -immutable: true - -custom_code: - pre_create: 'templates/terraform/pre_create/dialogflow_set_endpoint.go.tmpl' - -async: - actions: ['create'] - operation: - base_url: '{{op_id}}' - -parameters: - - name: 'location' - type: String - required: true - ignore_read: true - description: | - The location in which the encryptionSpec is to be initialized. - -properties: - - name: 'encryptionSpec' - type: NestedObject - required: true - properties: - - name: 'kmsKey' - type: String - required: true - description: | - The name of customer-managed encryption key that is used to secure a resource and its sub-resources. - If empty, the resource is secured by the default Google encryption key. - Only the key in the same location as this resource is allowed to be used for encryption. - Format: projects/{project}/locations/{location}/keyRings/{keyRing}/cryptoKeys/{key} - -examples: - - name: "dialogflow_encryption_spec_basic" - primary_resource_id: "my-encryption-spec" - min_version: "beta" - exclude_import_test: true - vars: - project_id: 'my-proj' - kms_keyring: 'my-keyring' - kms_key: 'my-key' - test_env_vars: - org_id: 'ORG_ID' - billing_acct: 'BILLING_ACCT' - external_providers: - - "time" diff --git a/mmv1/products/dialogflow/EntityType.yaml b/mmv1/products/dialogflow/EntityType.yaml index c938ed0191b0..179920721f21 100644 --- a/mmv1/products/dialogflow/EntityType.yaml +++ b/mmv1/products/dialogflow/EntityType.yaml @@ -38,7 +38,7 @@ examples: - name: 'dialogflow_entity_type_basic' primary_resource_id: 'basic_entity_type' vars: - entity_type_name: 'basic-entity-type' + intent_name: 'basic-entity-type' exclude_test: true parameters: properties: diff --git a/mmv1/products/dialogflowcx/Agent.yaml b/mmv1/products/dialogflowcx/Agent.yaml index 70b8bbd73300..ef9fe6f7e087 100644 --- a/mmv1/products/dialogflowcx/Agent.yaml +++ b/mmv1/products/dialogflowcx/Agent.yaml @@ -31,17 +31,6 @@ timeouts: update_minutes: 40 delete_minutes: 20 custom_code: - # An engine resource https://cloud.google.com/generative-ai-app-builder/docs/reference/rest/v1/projects.locations.collections.engines - # will be automatically created when we specify dataStoreConnections in Flow, Page, or Tool resources associated with the Agent. - # When delete_chat_engine_on_destroy is set to true, we must - # 1. extract the engine ID from the agent in pre_delete. - # 2. delete the agent. - # 3. delete the engine in post_delete. - # We can't delete the chat engine first because the agent depends on the chat engine. Deleting chat engine then allows deleting the - # data store. The chain of dependency is agent -> engine -> data store. - # TODO: remove pre_delete and post_delete after the delete API is updated to clean up the engine resource. - pre_delete: 'templates/terraform/pre_delete/dialogflowcx_agent.go.tmpl' - post_delete: 'templates/terraform/post_delete/dialogflowcx_agent.go.tmpl' exclude_sweeper: true examples: - name: 'dialogflowcx_agent_full' @@ -53,25 +42,6 @@ examples: - 'git_integration_settings.0.github_settings.0.access_token' - 'enable_stackdriver_logging' - 'advanced_settings.0.logging_settings' -virtual_fields: - - name: 'delete_chat_engine_on_destroy' - description: | - If set to `true`, Terraform will delete the chat engine associated with the agent when the agent is destroyed. - Otherwise, the chat engine will persist. - - This virtual field addresses a critical dependency chain: `agent` -> `engine` -> `data store`. The chat engine is automatically - provisioned when a data store is linked to the agent, meaning Terraform doesn't have direct control over its lifecycle as a managed - resource. This creates a problem when both the agent and data store are managed by Terraform and need to be destroyed. Without - delete_chat_engine_on_destroy set to true, the data store's deletion would fail because the unmanaged chat engine would still be - using it. This setting ensures that the entire dependency chain can be properly torn down. - See `mmv1/templates/terraform/examples/dialogflowcx_tool_data_store.tf.tmpl` as an example. - - Data store can be linked to an agent through the `knowledgeConnectorSettings` field of a [flow](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/projects.locations.agents.flows#resource:-flow) - or a [page](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/projects.locations.agents.flows.pages#resource:-page) - or the `dataStoreSpec` field of a [tool](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/projects.locations.agents.tools#resource:-tool). - The ID of the implicitly created engine is stored in the `genAppBuilderSettings` field of the [agent](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/projects.locations.agents#resource:-agent). - type: Boolean - default_value: false parameters: properties: - name: 'name' @@ -297,88 +267,3 @@ properties: custom_expand: 'templates/terraform/custom_expand/json_schema.tmpl' validation: function: 'validation.StringIsJSON' - - name: 'genAppBuilderSettings' - type: NestedObject - description: | - Gen App Builder-related agent-level settings. - default_from_api: true - properties: - - name: 'engine' - type: String - required: true - description: | - The full name of the Gen App Builder engine related to this agent if there is one. - Format: projects/{Project ID}/locations/{Location ID}/collections/{Collection ID}/engines/{Engine ID} - - name: 'startPlaybook' - type: String - ignore_read: true - description: | - Name of the start playbook in this agent. A start playbook will be automatically created when the agent is created, and can only be deleted by deleting the agent. Format: **projects//locations//agents//playbooks/**. Currently only the default playbook with id "00000000-0000-0000-0000-000000000000" is allowed. - conflicts: - - startFlow - - name: 'enableMultiLanguageTraining' - type: Boolean - description: | - Enable training multi-lingual models for this agent. These models will be trained on all the languages supported by the agent. - - name: 'locked' - type: Boolean - description: | - Indicates whether the agent is locked for changes. If the agent is locked, modifications to the agent will be rejected except for [agents.restore][]. - - name: 'satisfiesPzs' - type: Boolean - output: true - description: | - A read only boolean field reflecting Zone Separation status of the agent. - - name: 'satisfiesPzi' - type: Boolean - output: true - description: | - A read only boolean field reflecting Zone Isolation status of the agent. - - name: 'answerFeedbackSettings' - type: NestedObject - ignore_read: true - description: | - Answer feedback collection settings. - properties: - - name: 'enableAnswerFeedback' - type: Boolean - description: | - If enabled, end users will be able to provide [answer feedback](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/projects.locations.agents.sessions/submitAnswerFeedback#body.AnswerFeedback) - to Dialogflow responses. Feature works only if interaction logging is enabled in the Dialogflow agent. - - name: 'personalizationSettings' - type: NestedObject - description: | - Settings for end user personalization. - properties: - - name: 'defaultEndUserMetadata' - type: String - description: | - Default end user metadata, used when processing DetectIntent requests. Recommended to be filled as a template instead of hard-coded value, for example { "age": "$session.params.age" }. - The data will be merged with the [QueryParameters.end_user_metadata](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/QueryParameters#FIELDS.end_user_metadata) - in [DetectIntentRequest.query_params](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/projects.locations.agents.sessions/detectIntent#body.request_body.FIELDS.query_params) during query processing. - - This field uses JSON data as a string. The value provided must be a valid JSON representation documented in [Struct](https://protobuf.dev/reference/protobuf/google.protobuf/#struct). - state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' - custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' - custom_expand: 'templates/terraform/custom_expand/json_schema.tmpl' - validation: - function: 'validation.StringIsJSON' - - name: 'clientCertificateSettings' - type: NestedObject - description: | - Settings for custom client certificates. - properties: - - name: 'sslCertificate' - type: String - required: true - description: | - The ssl certificate encoded in PEM format. This string must include the begin header and end footer lines. - - name: 'privateKey' - type: String - required: true - description: | - The name of the SecretManager secret version resource storing the private key encoded in PEM format. Format: **projects/{project}/secrets/{secret}/versions/{version}** - - name: 'passphrase' - type: String - description: | - The name of the SecretManager secret version resource storing the passphrase. 'passphrase' should be left unset if the private key is not encrypted. Format: **projects/{project}/secrets/{secret}/versions/{version}** diff --git a/mmv1/products/dialogflowcx/Flow.yaml b/mmv1/products/dialogflowcx/Flow.yaml index 2eab19e025e9..05d7643c9c54 100644 --- a/mmv1/products/dialogflowcx/Flow.yaml +++ b/mmv1/products/dialogflowcx/Flow.yaml @@ -49,17 +49,11 @@ examples: vars: agent_name: 'dialogflowcx-agent' bucket_name: 'dialogflowcx-bucket' - data-store: 'datastore-flow-full' - name: 'dialogflowcx_flow_default_start_flow' primary_resource_id: 'default_start_flow' vars: agent_name: 'dialogflowcx-agent' exclude_docs: true - - name: 'dialogflowcx_flow_custom_endpoint' - primary_resource_id: 'custom_endpoint_flow' - vars: - agent_name: 'dialogflowcx-agent' - exclude_docs: true virtual_fields: - name: 'is_default_start_flow' description: | @@ -507,12 +501,6 @@ properties: custom_expand: 'templates/terraform/custom_expand/json_value.tmpl' validation: function: 'validation.StringIsJSON' - - name: 'enableGenerativeFallback' - type: Boolean - description: | - If the flag is true, the agent will utilize LLM to generate a text response. - If LLM generation fails, the defined responses in the fulfillment will be respected. - This flag is only useful for fulfillments associated with no-match event handlers. - name: 'targetPage' type: String description: | @@ -646,391 +634,3 @@ properties: type: Boolean description: | Enables consent-based end-user input redaction, if true, a pre-defined session parameter **$session.params.conversation-redaction** will be used to determine if the utterance should be redacted. - - name: 'knowledgeConnectorSettings' - type: NestedObject - description: | - Knowledge connector configuration. - properties: - - name: 'enabled' - type: Boolean - description: | - Whether Knowledge Connector is enabled or not. - - name: 'triggerFulfillment' - type: NestedObject - description: | - The fulfillment to be triggered. - When the answers from the Knowledge Connector are selected by Dialogflow, you can utitlize the request scoped parameter $request.knowledge.answers (contains up to the 5 highest confidence answers) and $request.knowledge.questions (contains the corresponding questions) to construct the fulfillment. - properties: - - name: 'messages' - type: Array - description: | - The list of rich message responses to present to the user. - item_type: - type: NestedObject - properties: - # 'responseType' is ignored when creating/updating resources, so we skip this field. See https://github.com/GoogleCloudPlatform/magic-modules/pull/8757/commits/65ad64bd008c60498d9b27b767dc7bc664d42c0b. - - name: 'channel' - type: String - description: | - The channel which the response is associated with. Clients can specify the channel via QueryParameters.channel, and only associated channel response will be returned. - - name: 'text' - type: NestedObject - description: | - The text response message. - This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. - properties: - - name: 'text' - type: Array - description: | - A collection of text response variants. If multiple variants are defined, only one text response variant is returned at runtime. - required: true - item_type: - type: String - - name: 'allowPlaybackInterruption' - type: Boolean - description: | - Whether the playback of this message can be interrupted by the end user's speech and the client can then starts the next Dialogflow request. - output: true - # This can be an arbitrary json blob, so we use a string instead of a NestedObject. - - name: 'payload' - type: String - description: | - Returns a response containing a custom, platform-specific payload. - This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. - state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' - custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' - custom_expand: 'templates/terraform/custom_expand/json_schema.tmpl' - validation: - function: 'validation.StringIsJSON' - - name: 'conversationSuccess' - type: NestedObject - description: | - Indicates that the conversation succeeded, i.e., the bot handled the issue that the customer talked to it about. - Dialogflow only uses this to determine which conversations should be counted as successful and doesn't process the metadata in this message in any way. Note that Dialogflow also considers conversations that get to the conversation end page as successful even if they don't return ConversationSuccess. - You may set this, for example: - * In the entryFulfillment of a Page if entering the page indicates that the conversation succeeded. - * In a webhook response when you determine that you handled the customer issue. - This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. - properties: - # This can be an arbitrary json blob, so we use a string instead of a NestedObject. - - name: 'metadata' - type: String - description: | - Custom metadata. Dialogflow doesn't impose any structure on this. - state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' - custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' - custom_expand: 'templates/terraform/custom_expand/json_schema.tmpl' - validation: - function: 'validation.StringIsJSON' - - name: 'outputAudioText' - type: NestedObject - description: | - A text or ssml response that is preferentially used for TTS output audio synthesis, as described in the comment on the ResponseMessage message. - This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. - properties: - - name: 'allowPlaybackInterruption' - type: Boolean - description: | - Whether the playback of this message can be interrupted by the end user's speech and the client can then starts the next Dialogflow request. - output: true - - name: 'text' - type: String - description: | - The raw text to be synthesized. - This field is part of a union field `source`: Only one of `text` or `ssml` may be set. - - name: 'ssml' - type: String - description: | - The SSML text to be synthesized. For more information, see SSML. - This field is part of a union field `source`: Only one of `text` or `ssml` may be set. - - name: 'liveAgentHandoff' - type: NestedObject - description: | - Indicates that the conversation should be handed off to a live agent. - Dialogflow only uses this to determine which conversations were handed off to a human agent for measurement purposes. What else to do with this signal is up to you and your handoff procedures. - You may set this, for example: - * In the entryFulfillment of a Page if entering the page indicates something went extremely wrong in the conversation. - * In a webhook response when you determine that the customer issue can only be handled by a human. - This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. - properties: - # This can be an arbitrary json blob, so we use a string instead of a NestedObject. - - name: 'metadata' - type: String - description: | - Custom metadata. Dialogflow doesn't impose any structure on this. - state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' - custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' - custom_expand: 'templates/terraform/custom_expand/json_schema.tmpl' - validation: - function: 'validation.StringIsJSON' - - name: 'endInteraction' - type: NestedObject - description: | - This type has no fields. - Indicates that interaction with the Dialogflow agent has ended. This message is generated by Dialogflow only and not supposed to be defined by the user. - This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. - output: true - allow_empty_object: true - properties: [] # Meant to be an empty object with no properties. - - name: 'playAudio' - type: NestedObject - description: | - Specifies an audio clip to be played by the client as part of the response. - This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. - properties: - - name: 'audioUri' - type: String - description: | - URI of the audio clip. Dialogflow does not impose any validation on this value. It is specific to the client that reads it. - required: true - - name: 'allowPlaybackInterruption' - type: Boolean - description: | - Whether the playback of this message can be interrupted by the end user's speech and the client can then starts the next Dialogflow request. - output: true - - name: 'mixedAudio' - type: NestedObject - description: | - Represents an audio message that is composed of both segments synthesized from the Dialogflow agent prompts and ones hosted externally at the specified URIs. The external URIs are specified via playAudio. This message is generated by Dialogflow only and not supposed to be defined by the user. - This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. - output: true - properties: - - name: 'segments' - type: Array - description: | - Segments this audio response is composed of. - item_type: - type: NestedObject - properties: - - name: 'allowPlaybackInterruption' - type: Boolean - description: | - Whether the playback of this segment can be interrupted by the end user's speech and the client should then start the next Dialogflow request. - output: true - - name: 'audio' - type: String - description: | - Raw audio synthesized from the Dialogflow agent's response using the output config specified in the request. - A base64-encoded string. - This field is part of a union field `content`: Only one of `audio` or `uri` may be set. - - name: 'uri' - type: String - description: | - Client-specific URI that points to an audio clip accessible to the client. Dialogflow does not impose any validation on it. - This field is part of a union field `content`: Only one of `audio` or `uri` may be set. - - name: 'telephonyTransferCall' - type: NestedObject - description: | - Represents the signal that telles the client to transfer the phone call connected to the agent to a third-party endpoint. - This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. - properties: - - name: 'phoneNumber' - type: String - description: | - Transfer the call to a phone number in E.164 format. - required: true - - name: 'knowledgeInfoCard' - type: NestedObject - description: | - This type has no fields. - Represents info card response. If the response contains generative knowledge prediction, Dialogflow will return a payload with Infobot Messenger compatible info card. - Otherwise, the info card response is skipped. - This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. - allow_empty_object: true - send_empty_value: true - properties: [] # Meant to be an empty object with no properties. - # Although ResponseMessage has a field named "toolCall", we can't include it here because it references the Tool resource, which hasn't been available on Terraform. - # See https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3beta1/ResponseMessage - - name: 'webhook' - type: String - description: | - The webhook to call. Format: projects//locations//agents//webhooks/. - - name: 'returnPartialResponses' - type: Boolean - description: | - Whether Dialogflow should return currently queued fulfillment response messages in streaming APIs. If a webhook is specified, it happens before Dialogflow invokes webhook. Warning: 1) This flag only affects streaming API. Responses are still queued and returned once in non-streaming API. 2) The flag can be enabled in any fulfillment but only the first 3 partial responses will be returned. You may only want to apply it to fulfillments that have slow webhooks. - - name: 'tag' - type: String - description: | - The tag used by the webhook to identify which fulfillment is being called. This field is required if webhook is specified. - - name: 'setParameterActions' - type: Array - description: | - Set parameter values before executing the webhook. - item_type: - type: NestedObject - properties: - - name: 'parameter' - type: String - description: | - Display name of the parameter. - - name: 'value' - type: String - description: | - The new JSON-encoded value of the parameter. A null value clears the parameter. - state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' - custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' - custom_expand: 'templates/terraform/custom_expand/json_value.tmpl' - validation: - function: 'validation.StringIsJSON' - - name: 'conditionalCases' - type: Array - description: | - Conditional cases for this fulfillment. - item_type: - type: NestedObject - properties: - - name: 'cases' - type: String - description: | - A JSON encoded list of cascading if-else conditions. Cases are mutually exclusive. The first one with a matching condition is selected, all the rest ignored. - See [Case](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/Fulfillment#case) for the schema. - state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' - custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' - custom_expand: 'templates/terraform/custom_expand/json_value.tmpl' - validation: - function: 'validation.StringIsJSON' - - name: 'advancedSettings' - type: NestedObject - description: | - Hierarchical advanced settings for agent/flow/page/fulfillment/parameter. Settings exposed at lower level overrides the settings exposed at higher level. Overriding occurs at the sub-setting level. For example, the playbackInterruptionSettings at fulfillment level only overrides the playbackInterruptionSettings at the agent level, leaving other settings at the agent level unchanged. - DTMF settings does not override each other. DTMF settings set at different levels define DTMF detections running in parallel. - Hierarchy: Agent->Flow->Page->Fulfillment/Parameter. - properties: - # This field currently can't be set. The API is not including the value in the API response, causing Acceptance Test to fail. - # - name: 'audioExportGcsDestination' - # type: NestedObject - # description: | - # If present, incoming audio is exported by Dialogflow to the configured Google Cloud Storage destination. Exposed at the following levels: - # * Agent level - # * Flow level - # properties: - # - name: 'uri' - # type: String - # description: | - # The Google Cloud Storage URI for the exported objects. A URI is of the form: gs://bucket/object-name-or-prefix Whether a full object name, or just a prefix, its usage depends on the Dialogflow operation. - # required: true - - name: 'speechSettings' - type: NestedObject - description: | - Settings for speech to text detection. Exposed at the following levels: - * Agent level - * Flow level - * Page level - * Parameter level - properties: - - name: 'endpointerSensitivity' - type: Integer - description: | - Sensitivity of the speech model that detects the end of speech. Scale from 0 to 100. - - name: 'noSpeechTimeout' - type: String - description: | - Timeout before detecting no speech. - A duration in seconds with up to nine fractional digits, ending with 's'. Example: "3.500s". - - name: 'useTimeoutBasedEndpointing' - type: Boolean - description: | - Use timeout based endpointing, interpreting endpointer sensitivity as seconds of timeout value. - - name: 'models' - type: KeyValuePairs - description: | - Mapping from language to Speech-to-Text model. The mapped Speech-to-Text model will be selected for requests from its corresponding language. For more information, see [Speech models](https://cloud.google.com/dialogflow/cx/docs/concept/speech-models). - An object containing a list of **"key": value** pairs. Example: **{ "name": "wrench", "mass": "1.3kg", "count": "3" }**. - - name: 'dtmfSettings' - type: NestedObject - description: | - Define behaviors for DTMF (dual tone multi frequency). DTMF settings does not override each other. DTMF settings set at different levels define DTMF detections running in parallel. Exposed at the following levels: - * Agent level - * Flow level - * Page level - * Parameter level - properties: - - name: 'enabled' - type: Boolean - description: | - If true, incoming audio is processed for DTMF (dual tone multi frequtectency) events. For example, if the caller presses a button on their telephone keypad and DTMF processing is enabled, Dialogflow will de the event (e.g. a "3" was pressed) in the incoming audio and pass the event to the bot to drive business logic (e.g. when 3 is pressed, return the account balance). - - name: 'maxDigits' - type: Integer - description: | - Max length of DTMF digits. - - name: 'finishDigit' - type: String - description: | - The digit that terminates a DTMF digit sequence. - - name: 'interdigitTimeoutDuration' - type: String - description: | - Interdigit timeout setting for matching dtmf input to regex. - A duration in seconds with up to nine fractional digits, ending with 's'. Example: "3.500s". - - name: 'endpointingTimeoutDuration' - type: String - description: | - Endpoint timeout setting for matching dtmf input to regex. - A duration in seconds with up to nine fractional digits, ending with 's'. Example: "3.500s". - - name: 'loggingSettings' - type: NestedObject - # Due to inconsistent API behaviour http://b/303056144, ignore read can be removed once fixed - ignore_read: true - description: | - Settings for logging. Settings for Dialogflow History, Contact Center messages, StackDriver logs, and speech logging. Exposed at the following levels: - * Agent level - properties: - - name: 'enableStackdriverLogging' - type: Boolean - description: | - Enables Google Cloud Logging. - - name: 'enableInteractionLogging' - type: Boolean - description: | - Enables DF Interaction logging. - - name: 'enableConsentBasedRedaction' - type: Boolean - description: | - Enables consent-based end-user input redaction, if true, a pre-defined session parameter **$session.params.conversation-redaction** will be used to determine if the utterance should be redacted. - - name: 'enableGenerativeFallback' - type: Boolean - description: | - If the flag is true, the agent will utilize LLM to generate a text response. If LLM generation fails, the defined responses in the fulfillment will be respected. This flag is only useful for fulfillments associated with no-match event handlers. - - name: 'dataStoreConnections' - type: Array - description: | - Optional. List of related data store connections. - item_type: - type: NestedObject - properties: - - name: 'dataStoreType' - type: Enum - description: | - The type of the connected data store. - * PUBLIC_WEB: A data store that contains public web content. - * UNSTRUCTURED: A data store that contains unstructured private data. - * STRUCTURED: A data store that contains structured data (for example FAQ). - enum_values: - - 'PUBLIC_WEB' - - 'UNSTRUCTURED' - - 'STRUCTURED' - - name: 'dataStore' - type: String - description: | - The full name of the referenced data store. Formats: projects/{project}/locations/{location}/collections/{collection}/dataStores/{dataStore} projects/{project}/locations/{location}/dataStores/{dataStore} - - name: 'documentProcessingMode' - type: Enum - description: | - The document processing mode for the data store connection. Should only be set for PUBLIC_WEB and UNSTRUCTURED data stores. If not set it is considered as DOCUMENTS, as this is the legacy mode. - * DOCUMENTS: Documents are processed as documents. - * CHUNKS: Documents are converted to chunks. - enum_values: - - 'DOCUMENTS' - - 'CHUNKS' - - name: 'targetPage' - type: String - description: | - The target page to transition to. Format: projects//locations//agents//flows//pages/. - The page must be in the same host flow (the flow that owns this `KnowledgeConnectorSettings`). - This field is part of a union field `target`: Only one of `targetPage` or `targetFlow` may be set. - - name: 'targetFlow' - type: String - description: | - The target flow to transition to. Format: projects//locations//agents//flows/. - This field is part of a union field `target`: Only one of `targetPage` or `targetFlow` may be set. diff --git a/mmv1/products/dialogflowcx/GenerativeSettings.yaml b/mmv1/products/dialogflowcx/GenerativeSettings.yaml deleted file mode 100644 index 16f029dbe8a6..000000000000 --- a/mmv1/products/dialogflowcx/GenerativeSettings.yaml +++ /dev/null @@ -1,170 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'GenerativeSettings' -description: | - Settings for Generative AI. -references: - guides: - 'Official Documentation': 'https://cloud.google.com/dialogflow/cx/docs' - api: 'https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/projects.locations.agents/getGenerativeSettings' -docs: -base_url: '{{parent}}/generativeSettings' -self_link: '{{parent}}/generativeSettings?languageCode={{language_code}}' -create_url: '{{parent}}/generativeSettings' -update_url: '{{parent}}/generativeSettings' -create_verb: 'PATCH' -update_verb: 'PATCH' -update_mask: true -exclude_delete: true -import_format: - - '{{parent}}/generativeSettings' -timeouts: - insert_minutes: 40 - update_minutes: 40 - delete_minutes: 20 -custom_code: - pre_create: 'templates/terraform/pre_create/dialogflowcx_set_location_skip_default_obj.go.tmpl' - pre_read: 'templates/terraform/pre_create/dialogflow_set_location.go.tmpl' - pre_update: 'templates/terraform/pre_create/dialogflow_set_location.go.tmpl' - pre_delete: 'templates/terraform/pre_delete/dialogflowcx_set_location_skip_default_obj.go.tmpl' - custom_import: 'templates/terraform/custom_import/dialogflowcx_generative_settings.go.tmpl' -exclude_sweeper: true -examples: - - name: 'dialogflowcx_generative_settings_full' - primary_resource_id: 'full_generative_settings' - vars: - agent_name: 'dialogflowcx-agent' -parameters: - - name: 'parent' - type: String - description: | - The agent to create a flow for. - Format: projects//locations//agents/. - url_param_only: true - immutable: true -properties: - - name: 'name' - type: String - description: | - The unique identifier of the generativeSettings. - Format: projects//locations//agents//generativeSettings. - output: true - custom_flatten: 'templates/terraform/custom_flatten/name_from_self_link.tmpl' - - name: 'fallbackSettings' - type: NestedObject - description: | - Settings for Generative Fallback. - properties: - - name: 'selectedPrompt' - type: String - description: | - Display name of the selected prompt. - - name: 'promptTemplates' - type: Array - # ignore reading results for fallback_settings.prompt_templates because it includes data that is auto-generated on the server side, - # e.g., prompt_templates with display names "Default" and "Example". - ignore_read: true - description: | - Stored prompts that can be selected, for example default templates like "conservative" or "chatty", or user defined ones. - item_type: - type: NestedObject - properties: - - name: 'displayName' - type: String - description: | - Prompt name. - - name: 'promptText' - type: String - description: | - Prompt text that is sent to a LLM on no-match default, placeholders are filled downstream. For example: "Here is a conversation $conversation, a response is: " - - name: 'frozen' - type: Boolean - description: | - If the flag is true, the prompt is frozen and cannot be modified by users. - - name: 'generativeSafetySettings' - type: NestedObject - description: | - Settings for Generative Safety. - w - properties: - - name: 'defaultBannedPhraseMatchStrategy' - type: String - description: | - Optional. Default phrase match strategy for banned phrases. - See [PhraseMatchStrategy](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/GenerativeSettings#phrasematchstrategy) for valid values. - - name: 'bannedPhrases' - type: Array - description: | - Banned phrases for generated text. - item_type: - type: NestedObject - properties: - - name: 'text' - type: String - description: | - Text input which can be used for prompt or banned phrases. - required: true - - name: 'languageCode' - type: String - description: | - Language code of the phrase. - required: true - - name: 'knowledgeConnectorSettings' - type: NestedObject - description: | - Settings for knowledge connector. - properties: - - name: 'business' - type: String - description: | - Name of the company, organization or other entity that the agent represents. Used for knowledge connector LLM prompt and for knowledge search. - - name: 'agent' - type: String - description: | - Name of the virtual agent. Used for LLM prompt. Can be left empty. - - name: 'agentIdentity' - type: String - description: | - Identity of the agent, e.g. "virtual agent", "AI assistant". - - name: 'businessDescription' - type: String - description: | - Company description, used for LLM prompt, e.g. "a family company selling freshly roasted coffee beans".`` - - name: 'agentScope' - type: String - description: | - Agent scope, e.g. "Example company website", "internal Example company website for employees", "manual of car owner". - - name: 'disableDataStoreFallback' - type: Boolean - description: | - Whether to disable fallback to Data Store search results (in case the LLM couldn't pick a proper answer). Per default the feature is enabled. - - name: 'languageCode' - type: String - description: | - Language for this settings. - required: true - - name: 'llmModelSettings' - type: NestedObject - description: | - LLM model settings. - properties: - - name: 'model' - type: String - description: | - The selected LLM model. - - name: 'promptText' - type: String - description: | - The custom prompt to use. diff --git a/mmv1/products/dialogflowcx/Generator.yaml b/mmv1/products/dialogflowcx/Generator.yaml deleted file mode 100644 index 1be77ea63593..000000000000 --- a/mmv1/products/dialogflowcx/Generator.yaml +++ /dev/null @@ -1,138 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: Generator -description: - Generators contain prompt to be sent to the LLM model to generate text. - The prompt can contain parameters which will be resolved before calling the model. - It can optionally contain banned phrases to ensure the model responses are safe. -references: - guides: - 'Official Documentation': 'https://cloud.google.com/dialogflow/cx/docs' - api: 'https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/projects.locations.agents.generators' -id_format: '{{parent}}/generators/{{name}}' -base_url: '{{parent}}/generators' -update_verb: 'PATCH' -update_mask: true -import_format: - - '{{parent}}/generators/{{name}}' -timeouts: - insert_minutes: 40 - update_minutes: 40 - delete_minutes: 20 -custom_code: - pre_create: 'templates/terraform/pre_create/dialogflowcx_generator.go.tmpl' - pre_read: 'templates/terraform/pre_create/dialogflow_set_location.go.tmpl' - pre_update: 'templates/terraform/pre_create/dialogflow_set_location.go.tmpl' - pre_delete: 'templates/terraform/pre_delete/dialogflowcx_set_location_skip_default_obj.go.tmpl' - custom_import: 'templates/terraform/custom_import/dialogflowcx_generator.go.tmpl' -exclude_sweeper: true -examples: - - name: 'dialogflowcx_generator_basic' - primary_resource_id: 'generator' - vars: - agent_name: 'dialogflowcx-agent-fucntion' -parameters: - - name: 'parent' - type: String - description: | - The agent to create a Generator for. - Format: projects//locations//agents/. - url_param_only: true - immutable: true - - name: 'languageCode' - type: String - description: | - The language to create generators for the following fields: - * Generator.prompt_text.text - If not specified, the agent's default language is used. - url_param_only: true - immutable: true -properties: - - name: 'name' - type: String - description: | - The unique identifier of the Generator. - Format: projects//locations//agents//generators/. - output: true - custom_flatten: 'templates/terraform/custom_flatten/name_from_self_link.tmpl' - - name: 'displayName' - type: String - description: | - The human-readable name of the generator, unique within the agent. - required: true - - name: 'llmModelSettings' - type: NestedObject - description: | - The LLM model settings. - properties: - - name: 'model' - type: String - description: | - The selected LLM model. - - name: 'promptText' - type: String - description: | - The custom prompt to use. - - name: 'modelParameter' - type: NestedObject - description: | - Parameters passed to the LLM to configure its behavior. - properties: - - name: 'temperature' - type: Double - description: | - The temperature used for sampling. Temperature sampling occurs after both topP and topK have been applied. - Valid range: [0.0, 1.0] Low temperature = less random. High temperature = more random. - - name: 'maxDecodeSteps' - type: Integer - description: | - The maximum number of tokens to generate. - - name: 'topP' - type: Double - description: | - If set, only the tokens comprising the top topP probability mass are considered. - If both topP and topK are set, topP will be used for further refining candidates selected with topK. - Valid range: (0.0, 1.0]. Small topP = less random. Large topP = more random. - - name: 'topK' - type: Integer - description: | - If set, the sampling process in each step is limited to the topK tokens with highest probabilities. - Valid range: [1, 40] or 1000+. Small topK = less random. Large topK = more random. - - name: 'placeholders' - type: Array - description: | - List of custom placeholders in the prompt text. - item_type: - type: NestedObject - properties: - - name: 'id' - type: String - description: | - Unique ID used to map custom placeholder to parameters in fulfillment. - - name: 'name' - type: String - description: | - Custom placeholder value in the prompt text. - - name: 'promptText' - type: NestedObject - required: true - ignore_read: true - description: | - Prompt for the LLM model. - properties: - - name: 'text' - type: String - description: | - Text input which can be used for prompt or banned phrases. diff --git a/mmv1/products/dialogflowcx/Page.yaml b/mmv1/products/dialogflowcx/Page.yaml index d20ef5fb368f..4166d047f098 100644 --- a/mmv1/products/dialogflowcx/Page.yaml +++ b/mmv1/products/dialogflowcx/Page.yaml @@ -42,7 +42,6 @@ examples: primary_resource_id: 'basic_page' vars: agent_name: 'dialogflowcx-agent' - data-store: 'datastore-page-full' parameters: - name: 'parent' type: String @@ -1158,391 +1157,3 @@ properties: type: String description: | The digit that terminates a DTMF digit sequence. - - name: 'knowledgeConnectorSettings' - type: NestedObject - description: | - Knowledge connector configuration. - properties: - - name: 'enabled' - type: Boolean - description: | - Whether Knowledge Connector is enabled or not. - - name: 'triggerFulfillment' - type: NestedObject - description: | - The fulfillment to be triggered. - When the answers from the Knowledge Connector are selected by Dialogflow, you can utitlize the request scoped parameter $request.knowledge.answers (contains up to the 5 highest confidence answers) and $request.knowledge.questions (contains the corresponding questions) to construct the fulfillment. - properties: - - name: 'messages' - type: Array - description: | - The list of rich message responses to present to the user. - item_type: - type: NestedObject - properties: - # 'responseType' is ignored when creating/updating resources, so we skip this field. See https://github.com/GoogleCloudPlatform/magic-modules/pull/8757/commits/65ad64bd008c60498d9b27b767dc7bc664d42c0b. - - name: 'channel' - type: String - description: | - The channel which the response is associated with. Clients can specify the channel via QueryParameters.channel, and only associated channel response will be returned. - - name: 'text' - type: NestedObject - description: | - The text response message. - This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. - properties: - - name: 'text' - type: Array - description: | - A collection of text response variants. If multiple variants are defined, only one text response variant is returned at runtime. - required: true - item_type: - type: String - - name: 'allowPlaybackInterruption' - type: Boolean - description: | - Whether the playback of this message can be interrupted by the end user's speech and the client can then starts the next Dialogflow request. - output: true - # This can be an arbitrary json blob, so we use a string instead of a NestedObject. - - name: 'payload' - type: String - description: | - Returns a response containing a custom, platform-specific payload. - This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. - state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' - custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' - custom_expand: 'templates/terraform/custom_expand/json_schema.tmpl' - validation: - function: 'validation.StringIsJSON' - - name: 'conversationSuccess' - type: NestedObject - description: | - Indicates that the conversation succeeded, i.e., the bot handled the issue that the customer talked to it about. - Dialogflow only uses this to determine which conversations should be counted as successful and doesn't process the metadata in this message in any way. Note that Dialogflow also considers conversations that get to the conversation end page as successful even if they don't return ConversationSuccess. - You may set this, for example: - * In the entryFulfillment of a Page if entering the page indicates that the conversation succeeded. - * In a webhook response when you determine that you handled the customer issue. - This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. - properties: - # This can be an arbitrary json blob, so we use a string instead of a NestedObject. - - name: 'metadata' - type: String - description: | - Custom metadata. Dialogflow doesn't impose any structure on this. - state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' - custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' - custom_expand: 'templates/terraform/custom_expand/json_schema.tmpl' - validation: - function: 'validation.StringIsJSON' - - name: 'outputAudioText' - type: NestedObject - description: | - A text or ssml response that is preferentially used for TTS output audio synthesis, as described in the comment on the ResponseMessage message. - This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. - properties: - - name: 'allowPlaybackInterruption' - type: Boolean - description: | - Whether the playback of this message can be interrupted by the end user's speech and the client can then starts the next Dialogflow request. - output: true - - name: 'text' - type: String - description: | - The raw text to be synthesized. - This field is part of a union field `source`: Only one of `text` or `ssml` may be set. - - name: 'ssml' - type: String - description: | - The SSML text to be synthesized. For more information, see SSML. - This field is part of a union field `source`: Only one of `text` or `ssml` may be set. - - name: 'liveAgentHandoff' - type: NestedObject - description: | - Indicates that the conversation should be handed off to a live agent. - Dialogflow only uses this to determine which conversations were handed off to a human agent for measurement purposes. What else to do with this signal is up to you and your handoff procedures. - You may set this, for example: - * In the entryFulfillment of a Page if entering the page indicates something went extremely wrong in the conversation. - * In a webhook response when you determine that the customer issue can only be handled by a human. - This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. - properties: - # This can be an arbitrary json blob, so we use a string instead of a NestedObject. - - name: 'metadata' - type: String - description: | - Custom metadata. Dialogflow doesn't impose any structure on this. - state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' - custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' - custom_expand: 'templates/terraform/custom_expand/json_schema.tmpl' - validation: - function: 'validation.StringIsJSON' - - name: 'endInteraction' - type: NestedObject - description: | - This type has no fields. - Indicates that interaction with the Dialogflow agent has ended. This message is generated by Dialogflow only and not supposed to be defined by the user. - This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. - output: true - allow_empty_object: true - properties: [] # Meant to be an empty object with no properties. - - name: 'playAudio' - type: NestedObject - description: | - Specifies an audio clip to be played by the client as part of the response. - This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. - properties: - - name: 'audioUri' - type: String - description: | - URI of the audio clip. Dialogflow does not impose any validation on this value. It is specific to the client that reads it. - required: true - - name: 'allowPlaybackInterruption' - type: Boolean - description: | - Whether the playback of this message can be interrupted by the end user's speech and the client can then starts the next Dialogflow request. - output: true - - name: 'mixedAudio' - type: NestedObject - description: | - Represents an audio message that is composed of both segments synthesized from the Dialogflow agent prompts and ones hosted externally at the specified URIs. The external URIs are specified via playAudio. This message is generated by Dialogflow only and not supposed to be defined by the user. - This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. - output: true - properties: - - name: 'segments' - type: Array - description: | - Segments this audio response is composed of. - item_type: - type: NestedObject - properties: - - name: 'allowPlaybackInterruption' - type: Boolean - description: | - Whether the playback of this segment can be interrupted by the end user's speech and the client should then start the next Dialogflow request. - output: true - - name: 'audio' - type: String - description: | - Raw audio synthesized from the Dialogflow agent's response using the output config specified in the request. - A base64-encoded string. - This field is part of a union field `content`: Only one of `audio` or `uri` may be set. - - name: 'uri' - type: String - description: | - Client-specific URI that points to an audio clip accessible to the client. Dialogflow does not impose any validation on it. - This field is part of a union field `content`: Only one of `audio` or `uri` may be set. - - name: 'telephonyTransferCall' - type: NestedObject - description: | - Represents the signal that telles the client to transfer the phone call connected to the agent to a third-party endpoint. - This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. - properties: - - name: 'phoneNumber' - type: String - description: | - Transfer the call to a phone number in E.164 format. - required: true - - name: 'knowledgeInfoCard' - type: NestedObject - description: | - This type has no fields. - Represents info card response. If the response contains generative knowledge prediction, Dialogflow will return a payload with Infobot Messenger compatible info card. - Otherwise, the info card response is skipped. - This field is part of a union field `message`: Only one of `text`, `payload`, `conversationSuccess`, `outputAudioText`, `liveAgentHandoff`, `endInteraction`, `playAudio`, `mixedAudio`, `telephonyTransferCall`, or `knowledgeInfoCard` may be set. - allow_empty_object: true - send_empty_value: true - properties: [] # Meant to be an empty object with no properties. - # Although ResponseMessage has a field named "toolCall", we can't include it here because it references the Tool resource, which hasn't been available on Terraform. - # See https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3beta1/ResponseMessage - - name: 'webhook' - type: String - description: | - The webhook to call. Format: projects//locations//agents//webhooks/. - - name: 'returnPartialResponses' - type: Boolean - description: | - Whether Dialogflow should return currently queued fulfillment response messages in streaming APIs. If a webhook is specified, it happens before Dialogflow invokes webhook. Warning: 1) This flag only affects streaming API. Responses are still queued and returned once in non-streaming API. 2) The flag can be enabled in any fulfillment but only the first 3 partial responses will be returned. You may only want to apply it to fulfillments that have slow webhooks. - - name: 'tag' - type: String - description: | - The tag used by the webhook to identify which fulfillment is being called. This field is required if webhook is specified. - - name: 'setParameterActions' - type: Array - description: | - Set parameter values before executing the webhook. - item_type: - type: NestedObject - properties: - - name: 'parameter' - type: String - description: | - Display name of the parameter. - - name: 'value' - type: String - description: | - The new JSON-encoded value of the parameter. A null value clears the parameter. - state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' - custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' - custom_expand: 'templates/terraform/custom_expand/json_value.tmpl' - validation: - function: 'validation.StringIsJSON' - - name: 'conditionalCases' - type: Array - description: | - Conditional cases for this fulfillment. - item_type: - type: NestedObject - properties: - - name: 'cases' - type: String - description: | - A JSON encoded list of cascading if-else conditions. Cases are mutually exclusive. The first one with a matching condition is selected, all the rest ignored. - See [Case](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/Fulfillment#case) for the schema. - state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' - custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' - custom_expand: 'templates/terraform/custom_expand/json_value.tmpl' - validation: - function: 'validation.StringIsJSON' - - name: 'advancedSettings' - type: NestedObject - description: | - Hierarchical advanced settings for agent/flow/page/fulfillment/parameter. Settings exposed at lower level overrides the settings exposed at higher level. Overriding occurs at the sub-setting level. For example, the playbackInterruptionSettings at fulfillment level only overrides the playbackInterruptionSettings at the agent level, leaving other settings at the agent level unchanged. - DTMF settings does not override each other. DTMF settings set at different levels define DTMF detections running in parallel. - Hierarchy: Agent->Flow->Page->Fulfillment/Parameter. - properties: - # This field currently can't be set. The API is not including the value in the API response, causing Acceptance Test to fail. - # - name: 'audioExportGcsDestination' - # type: NestedObject - # description: | - # If present, incoming audio is exported by Dialogflow to the configured Google Cloud Storage destination. Exposed at the following levels: - # * Agent level - # * Flow level - # properties: - # - name: 'uri' - # type: String - # description: | - # The Google Cloud Storage URI for the exported objects. A URI is of the form: gs://bucket/object-name-or-prefix Whether a full object name, or just a prefix, its usage depends on the Dialogflow operation. - # required: true - - name: 'speechSettings' - type: NestedObject - description: | - Settings for speech to text detection. Exposed at the following levels: - * Agent level - * Flow level - * Page level - * Parameter level - properties: - - name: 'endpointerSensitivity' - type: Integer - description: | - Sensitivity of the speech model that detects the end of speech. Scale from 0 to 100. - - name: 'noSpeechTimeout' - type: String - description: | - Timeout before detecting no speech. - A duration in seconds with up to nine fractional digits, ending with 's'. Example: "3.500s". - - name: 'useTimeoutBasedEndpointing' - type: Boolean - description: | - Use timeout based endpointing, interpreting endpointer sensitivity as seconds of timeout value. - - name: 'models' - type: KeyValuePairs - description: | - Mapping from language to Speech-to-Text model. The mapped Speech-to-Text model will be selected for requests from its corresponding language. For more information, see [Speech models](https://cloud.google.com/dialogflow/cx/docs/concept/speech-models). - An object containing a list of **"key": value** pairs. Example: **{ "name": "wrench", "mass": "1.3kg", "count": "3" }**. - - name: 'dtmfSettings' - type: NestedObject - description: | - Define behaviors for DTMF (dual tone multi frequency). DTMF settings does not override each other. DTMF settings set at different levels define DTMF detections running in parallel. Exposed at the following levels: - * Agent level - * Flow level - * Page level - * Parameter level - properties: - - name: 'enabled' - type: Boolean - description: | - If true, incoming audio is processed for DTMF (dual tone multi frequtectency) events. For example, if the caller presses a button on their telephone keypad and DTMF processing is enabled, Dialogflow will de the event (e.g. a "3" was pressed) in the incoming audio and pass the event to the bot to drive business logic (e.g. when 3 is pressed, return the account balance). - - name: 'maxDigits' - type: Integer - description: | - Max length of DTMF digits. - - name: 'finishDigit' - type: String - description: | - The digit that terminates a DTMF digit sequence. - - name: 'interdigitTimeoutDuration' - type: String - description: | - Interdigit timeout setting for matching dtmf input to regex. - A duration in seconds with up to nine fractional digits, ending with 's'. Example: "3.500s". - - name: 'endpointingTimeoutDuration' - type: String - description: | - Endpoint timeout setting for matching dtmf input to regex. - A duration in seconds with up to nine fractional digits, ending with 's'. Example: "3.500s". - - name: 'loggingSettings' - type: NestedObject - # Due to inconsistent API behaviour http://b/303056144, ignore read can be removed once fixed - ignore_read: true - description: | - Settings for logging. Settings for Dialogflow History, Contact Center messages, StackDriver logs, and speech logging. Exposed at the following levels: - * Agent level - properties: - - name: 'enableStackdriverLogging' - type: Boolean - description: | - Enables Google Cloud Logging. - - name: 'enableInteractionLogging' - type: Boolean - description: | - Enables DF Interaction logging. - - name: 'enableConsentBasedRedaction' - type: Boolean - description: | - Enables consent-based end-user input redaction, if true, a pre-defined session parameter **$session.params.conversation-redaction** will be used to determine if the utterance should be redacted. - - name: 'enableGenerativeFallback' - type: Boolean - description: | - If the flag is true, the agent will utilize LLM to generate a text response. If LLM generation fails, the defined responses in the fulfillment will be respected. This flag is only useful for fulfillments associated with no-match event handlers. - - name: 'dataStoreConnections' - type: Array - description: | - Optional. List of related data store connections. - item_type: - type: NestedObject - properties: - - name: 'dataStoreType' - type: Enum - description: | - The type of the connected data store. - * PUBLIC_WEB: A data store that contains public web content. - * UNSTRUCTURED: A data store that contains unstructured private data. - * STRUCTURED: A data store that contains structured data (for example FAQ). - enum_values: - - 'PUBLIC_WEB' - - 'UNSTRUCTURED' - - 'STRUCTURED' - - name: 'dataStore' - type: String - description: | - The full name of the referenced data store. Formats: projects/{project}/locations/{location}/collections/{collection}/dataStores/{dataStore} projects/{project}/locations/{location}/dataStores/{dataStore} - - name: 'documentProcessingMode' - type: Enum - description: | - The document processing mode for the data store connection. Should only be set for PUBLIC_WEB and UNSTRUCTURED data stores. If not set it is considered as DOCUMENTS, as this is the legacy mode. - * DOCUMENTS: Documents are processed as documents. - * CHUNKS: Documents are converted to chunks. - enum_values: - - 'DOCUMENTS' - - 'CHUNKS' - - name: 'targetPage' - type: String - description: | - The target page to transition to. Format: projects//locations//agents//flows//pages/. - The page must be in the same host flow (the flow that owns this `KnowledgeConnectorSettings`). - This field is part of a union field `target`: Only one of `targetPage` or `targetFlow` may be set. - - name: 'targetFlow' - type: String - description: | - The target flow to transition to. Format: projects//locations//agents//flows/. - This field is part of a union field `target`: Only one of `targetPage` or `targetFlow` may be set. diff --git a/mmv1/products/dialogflowcx/Playbook.yaml b/mmv1/products/dialogflowcx/Playbook.yaml deleted file mode 100644 index 6912742b2203..000000000000 --- a/mmv1/products/dialogflowcx/Playbook.yaml +++ /dev/null @@ -1,166 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'Playbook' -description: | - Playbook is the basic building block to instruct the LLM how to execute a certain task. -references: - guides: - 'Official CX Documentation': 'https://cloud.google.com/dialogflow/cx/docs' - api: 'https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/projects.locations.agents.playbooks' - -base_url: '{{parent}}/playbooks' -self_link: '{{parent}}/playbooks/{{name}}' - -create_url: '{{parent}}/playbooks' - -update_verb: 'PATCH' -update_mask: true - -custom_code: - pre_create: 'templates/terraform/pre_create/dialogflow_set_location.go.tmpl' - pre_read: 'templates/terraform/pre_create/dialogflow_set_location.go.tmpl' - pre_update: 'templates/terraform/pre_create/dialogflow_set_location.go.tmpl' - pre_delete: 'templates/terraform/pre_create/dialogflow_set_location.go.tmpl' - custom_import: 'templates/terraform/custom_import/dialogflowcx_playbook.go.tmpl' - -examples: - - name: "dialogflowcx_playbook_basic" - primary_resource_id: "my-playbook" - vars: - agent_name: 'dialogflowcx-agent-basic' - - name: "dialogflowcx_playbook_fulfillment" - primary_resource_id: "my-playbook" - vars: - agent_name: 'dialogflowcx-agent' - bucket_name: 'dialogflowcx-bucket' - -parameters: - - name: 'parent' - type: String - description: | - The agent to create a Playbook for. - Format: projects//locations//agents/. - url_param_only: true - immutable: true - -properties: - - name: 'name' - type: String - description: | - The unique identifier of the Playbook. - Format: projects//locations//agents//playbooks/. - output: true - custom_flatten: 'templates/terraform/custom_flatten/name_from_self_link.tmpl' - - name: 'displayName' - type: String - description: | - The human-readable name of the playbook, unique within an agent. - required: true - - name: 'goal' - type: String - description: | - High level description of the goal the playbook intend to accomplish. A goal should be concise since it's visible to other playbooks that may reference this playbook. - required: true - - name: 'instruction' - type: NestedObject - description: | - Instruction to accomplish target goal. - properties: - - name: 'guidelines' - type: String - description: | - General guidelines for the playbook. These are unstructured instructions that are not directly part of the goal, e.g. "Always be polite". It's valid for this text to be long and used instead of steps altogether. - - name: 'steps' - type: Array - description: | - Ordered list of step by step execution instructions to accomplish target goal. - item_type: - type: NestedObject - properties: - - name: 'steps' - type: String - description: | - Sub-processing needed to execute the current step. - - This field uses JSON data as a string. The value provided must be a valid JSON representation documented in [Step](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/projects.locations.agents.playbooks#step). - state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' - custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' - custom_expand: 'templates/terraform/custom_expand/json_value.tmpl' - validation: - function: 'validation.StringIsJSON' - - name: 'text' - type: String - description: | - Step instruction in text format. - - name: 'tokenCount' - type: String - description: | - Estimated number of tokes current playbook takes when sent to the LLM. - output: true - - name: 'createTime' - type: Time - description: | - The timestamp of initial playbook creation. - - Uses RFC 3339, where generated output will always be Z-normalized and uses 0, 3, 6 or 9 fractional digits. Offsets other than "Z" are also accepted. Examples: "2014-10-02T15:01:23Z", "2014-10-02T15:01:23.045123456Z" or "2014-10-02T15:01:23+05:30". - output: true - - name: 'updateTime' - type: Time - description: | - Last time the playbook version was updated. - - Uses RFC 3339, where generated output will always be Z-normalized and uses 0, 3, 6 or 9 fractional digits. Offsets other than "Z" are also accepted. Examples: "2014-10-02T15:01:23Z", "2014-10-02T15:01:23.045123456Z" or "2014-10-02T15:01:23+05:30". - output: true - - name: 'referencedPlaybooks' - type: Array - description: | - The resource name of other playbooks referenced by the current playbook in the instructions. - output: true - item_type: - type: String - - name: 'referencedFlows' - type: Array - description: | - The resource name of flows referenced by the current playbook in the instructions. - output: true - item_type: - type: String - - name: 'referencedTools' - type: Array - description: | - The resource name of tools referenced by the current playbook in the instructions. If not provided explicitly, they are will be implied using the tool being referenced in goal and steps. - item_type: - type: String - - name: 'llmModelSettings' - type: NestedObject - description: | - Llm model settings for the playbook. - properties: - - name: 'model' - type: String - description: | - The selected LLM model. - - name: 'promptText' - type: String - description: | - The custom prompt to use. - - name: 'playbookType' - type: Enum - description: Type of the playbook. - ignore_read: true - enum_values: - - 'PLAYBOOK_TYPE_UNSPECIFIED' - - 'TASK' - - 'ROUTINE' diff --git a/mmv1/products/dialogflowcx/SecuritySettings.yaml b/mmv1/products/dialogflowcx/SecuritySettings.yaml index 80054ed2b1dd..1dc22a343a54 100644 --- a/mmv1/products/dialogflowcx/SecuritySettings.yaml +++ b/mmv1/products/dialogflowcx/SecuritySettings.yaml @@ -13,7 +13,6 @@ --- name: 'SecuritySettings' -collection_url_key: 'securitySettings' description: | Represents the settings related to security issues, such as data redaction and data retention. It may take hours for updates on the settings to propagate to all the related components and take effect. Multiple security settings can be configured in each location. Each agent can specify the security settings to apply, and each setting can be applied to multiple agents in the same project and location. diff --git a/mmv1/products/dialogflowcx/Tool.yaml b/mmv1/products/dialogflowcx/Tool.yaml deleted file mode 100644 index 7f8671fb0f29..000000000000 --- a/mmv1/products/dialogflowcx/Tool.yaml +++ /dev/null @@ -1,307 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'Tool' -description: | - A tool provides a list of actions which are available to the Playbook to attain its goal. - A Tool consists of a description of the tool's usage and a specification of the tool which contains the schema and authentication information. -references: - guides: - 'Official Documentation': 'https://cloud.google.com/dialogflow/cx/docs' - api: 'https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/projects.locations.agents.tools' -id_format: '{{parent}}/tools/{{name}}' -base_url: '{{parent}}/tools' -update_verb: 'PATCH' -update_mask: true -import_format: - - '{{parent}}/tools/{{name}}' -timeouts: - insert_minutes: 40 - update_minutes: 40 - delete_minutes: 20 -custom_code: - pre_create: 'templates/terraform/pre_create/dialogflowcx_set_location_skip_default_obj.go.tmpl' - pre_read: 'templates/terraform/pre_create/dialogflow_set_location.go.tmpl' - pre_update: 'templates/terraform/pre_create/dialogflow_set_location.go.tmpl' - pre_delete: 'templates/terraform/pre_delete/dialogflowcx_set_location_skip_default_obj.go.tmpl' - custom_import: 'templates/terraform/custom_import/dialogflowcx_tool.go.tmpl' -exclude_sweeper: true -examples: - - name: 'dialogflowcx_tool_open_api' - primary_resource_id: 'open_api_tool' - vars: - agent_name: 'dialogflowcx-agent-open-api' - - name: 'dialogflowcx_tool_data_store' - primary_resource_id: 'data_store_tool' - vars: - agent_name: 'dialogflowcx-agent-data-store' - data_store: 'datastore-tool' - - name: 'dialogflowcx_tool_function' - primary_resource_id: 'function_tool' - vars: - agent_name: 'dialogflowcx-agent-fucntion' -parameters: - - name: 'parent' - type: String - description: | - The agent to create a Tool for. - Format: projects//locations//agents/. - url_param_only: true - immutable: true -properties: - - name: 'name' - type: String - description: | - The unique identifier of the Tool. - Format: projects//locations//agents//tools/. - output: true - custom_flatten: 'templates/terraform/custom_flatten/name_from_self_link.tmpl' - - name: 'displayName' - type: String - description: | - The human-readable name of the tool, unique within the agent. - required: true - - name: 'description' - type: String - description: | - High level description of the Tool and its usage. - required: true - - name: 'toolType' - type: Enum - description: | - The tool type. - output: true - - name: 'openApiSpec' - type: NestedObject - description: | - OpenAPI specification of the Tool. - This field is part of a union field `specification`: Only one of `openApiSpec`, `dataStoreSpec`, or `functionSpec` may be set. - properties: - - name: 'authentication' - type: NestedObject - description: | - Optional. Authentication information required by the API. - properties: - - name: 'apiKeyConfig' - type: NestedObject - description: | - Config for API key auth. - This field is part of a union field `auth_config`: Only one of `apiKeyConfig`, `oauthConfig`, `serviceAgentAuthConfig`, or `bearerTokenConfig` may be set. - properties: - - name: 'keyName' - type: String - description: | - The parameter name or the header name of the API key. - E.g., If the API request is "https://example.com/act?X-Api-Key=", "X-Api-Key" would be the parameter name. - required: true - - name: 'apiKey' - type: String - description: | - Optional. The API key. If the `secretVersionForApiKey`` field is set, this field will be ignored. - sensitive: true - ignore_read: true - - name: 'secretVersionForApiKey' - type: String - description: | - Optional. The name of the SecretManager secret version resource storing the API key. - If this field is set, the apiKey field will be ignored. - Format: projects/{project}/secrets/{secret}/versions/{version} - - name: 'requestLocation' - type: String - description: | - Key location in the request. - See [RequestLocation](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/projects.locations.agents.tools#requestlocation) for valid values. - required: true - - name: 'oauthConfig' - type: NestedObject - description: | - Config for OAuth. - This field is part of a union field `auth_config`: Only one of `apiKeyConfig`, `oauthConfig`, `serviceAgentAuthConfig`, or `bearerTokenConfig` may be set. - properties: - - name: 'oauthGrantType' - type: String - description: | - OAuth grant types. - See [OauthGrantType](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/projects.locations.agents.tools#oauthgranttype) for valid values - required: true - - name: 'clientId' - type: String - description: | - The client ID from the OAuth provider. - required: true - - name: 'clientSecret' - type: String - description: | - Optional. The client secret from the OAuth provider. If the `secretVersionForClientSecret` field is set, this field will be ignored. - sensitive: true - ignore_read: true - - name: 'secretVersionForClientSecret' - type: String - description: | - Optional. The name of the SecretManager secret version resource storing the client secret. - If this field is set, the clientSecret field will be ignored. - Format: projects/{project}/secrets/{secret}/versions/{version} - - name: 'tokenEndpoint' - type: String - description: | - The token endpoint in the OAuth provider to exchange for an access token. - required: true - - name: 'scopes' - type: Array - description: | - Optional. The OAuth scopes to grant. - item_type: - type: String - - name: 'serviceAgentAuthConfig' - type: NestedObject - description: | - Config for [Diglogflow service agent](https://cloud.google.com/iam/docs/service-agents#dialogflow-service-agent) auth. - This field is part of a union field `auth_config`: Only one of `apiKeyConfig`, `oauthConfig`, `serviceAgentAuthConfig`, or `bearerTokenConfig` may be set. - properties: - - name: 'serviceAgentAuth' - type: String - description: | - Optional. Indicate the auth token type generated from the Diglogflow service agent. - The generated token is sent in the Authorization header. - See [ServiceAgentAuth](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/projects.locations.agents.tools#serviceagentauth) for valid values. - - name: 'bearerTokenConfig' - type: NestedObject - description: | - Config for bearer token auth. - This field is part of a union field `auth_config`: Only one of `apiKeyConfig`, `oauthConfig`, `serviceAgentAuthConfig`, or `bearerTokenConfig` may be set. - properties: - - name: 'token' - type: String - description: | - Optional. The text token appended to the text Bearer to the request Authorization header. - [Session parameters reference](https://cloud.google.com/dialogflow/cx/docs/concept/parameter#session-ref) can be used to pass the token dynamically, e.g. `$session.params.parameter-id`. - sensitive: true - ignore_read: true - - name: 'secretVersionForToken' - type: String - description: | - Optional. The name of the SecretManager secret version resource storing the Bearer token. If this field is set, the `token` field will be ignored. - Format: projects/{project}/secrets/{secret}/versions/{version} - - name: 'tlsConfig' - type: NestedObject - description: | - Optional. TLS configuration for the HTTPS verification. - properties: - - name: 'caCerts' - type: Array - description: | - Specifies a list of allowed custom CA certificates for HTTPS verification. - required: true - item_type: - type: NestedObject - properties: - - name: 'displayName' - type: String - description: | - The name of the allowed custom CA certificates. This can be used to disambiguate the custom CA certificates. - required: true - - name: 'cert' - type: String - description: | - The allowed custom CA certificates (in DER format) for HTTPS verification. This overrides the default SSL trust store. - If this is empty or unspecified, Dialogflow will use Google's default trust store to verify certificates. - N.B. Make sure the HTTPS server certificates are signed with "subject alt name". - For instance a certificate can be self-signed using the following command: - ``` - openssl x509 -req -days 200 -in example.com.csr \ - -signkey example.com.key \ - -out example.com.crt \ - -extfile <(printf "\nsubjectAltName='DNS:www.example.com'") - ``` - A base64-encoded string. - required: true - - name: 'serviceDirectoryConfig' - type: NestedObject - description: | - Optional. Service Directory configuration. - properties: - - name: 'service' - type: String - description: | - The name of [Service Directory](https://cloud.google.com/service-directory/docs) service. - Format: projects//locations//namespaces//services/. LocationID of the service directory must be the same as the location of the agent. - required: true - - name: 'textSchema' - type: String - description: | - The OpenAPI schema specified as a text. - This field is part of a union field `schema`: only one of `textSchema` may be set. - required: true - - name: 'dataStoreSpec' - type: NestedObject - description: | - Data store search tool specification. - This field is part of a union field `specification`: Only one of `openApiSpec`, `dataStoreSpec`, or `functionSpec` may be set. - properties: - - name: 'dataStoreConnections' - type: Array - description: | - List of data stores to search. - required: true - item_type: - type: NestedObject - properties: - - name: 'dataStoreType' - type: String - description: | - The type of the connected data store. - See [DataStoreType](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/DataStoreConnection#datastoretype) for valid values. - - name: 'dataStore' - type: String - description: | - The full name of the referenced data store. Formats: projects/{project}/locations/{location}/collections/{collection}/dataStores/{dataStore} projects/{project}/locations/{location}/dataStores/{dataStore} - - name: 'documentProcessingMode' - type: String - description: | - The document processing mode for the data store connection. Should only be set for PUBLIC_WEB and UNSTRUCTURED data stores. If not set it is considered as DOCUMENTS, as this is the legacy mode. - See [DocumentProcessingMode](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/DataStoreConnection#documentprocessingmode) for valid values. - - name: 'fallbackPrompt' - type: NestedObject - description: | - Fallback prompt configurations to use. - required: true - allow_empty_object: true - send_empty_value: true - properties: [] # Meant to be an empty object with no properties. - - name: 'functionSpec' - type: NestedObject - description: | - Client side executed function specification. - This field is part of a union field `specification`: Only one of `openApiSpec`, `dataStoreSpec`, or `functionSpec` may be set. - properties: - - name: 'inputSchema' - type: String - description: | - Optional. The JSON schema is encapsulated in a [google.protobuf.Struct](https://protobuf.dev/reference/protobuf/google.protobuf/#struct) to describe the input of the function. - This input is a JSON object that contains the function's parameters as properties of the object - state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' - custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' - custom_expand: 'templates/terraform/custom_expand/json_schema.tmpl' - validation: - function: 'validation.StringIsJSON' - - name: 'outputSchema' - type: String - description: | - Optional. The JSON schema is encapsulated in a [google.protobuf.Struct](https://protobuf.dev/reference/protobuf/google.protobuf/#struct) to describe the output of the function. - This output is a JSON object that contains the function's parameters as properties of the object - state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' - custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' - custom_expand: 'templates/terraform/custom_expand/json_schema.tmpl' - validation: - function: 'validation.StringIsJSON' diff --git a/mmv1/products/dialogflowcx/Webhook.yaml b/mmv1/products/dialogflowcx/Webhook.yaml index 126b21034e9f..57b270cc6f73 100644 --- a/mmv1/products/dialogflowcx/Webhook.yaml +++ b/mmv1/products/dialogflowcx/Webhook.yaml @@ -38,20 +38,8 @@ custom_code: custom_import: 'templates/terraform/custom_import/dialogflowcx_webhook.go.tmpl' exclude_sweeper: true examples: - - name: 'dialogflowcx_webhook_standard' - primary_resource_id: 'standard_webhook' - vars: - agent_name: 'dialogflowcx-agent' - - name: 'dialogflowcx_webhook_flexible' - primary_resource_id: 'flexible_webhook' - vars: - agent_name: 'dialogflowcx-agent' - - name: 'dialogflowcx_webhook_service_directory_standard' - primary_resource_id: 'standard_webhook' - vars: - agent_name: 'dialogflowcx-agent' - - name: 'dialogflowcx_webhook_service_directory_flexible' - primary_resource_id: 'flexible_webhook' + - name: 'dialogflowcx_webhook_full' + primary_resource_id: 'basic_webhook' vars: agent_name: 'dialogflowcx-agent' parameters: @@ -83,134 +71,27 @@ properties: type: Boolean description: | Indicates whether the webhook is disabled. - - name: genericWebService + - name: 'genericWebService' type: NestedObject - description: Represents configuration for a generic web service. + description: | + Configuration for a generic web service. properties: - - name: allowedCaCerts + - name: 'uri' + type: String + description: | + Whether to use speech adaptation for speech recognition. + required: true + - name: 'requestHeaders' + type: KeyValuePairs + description: | + The HTTP request headers to send together with webhook requests. + immutable: true + - name: 'allowedCaCerts' type: Array - description: |- - Specifies a list of allowed custom CA certificates (in DER format) for - HTTPS verification. This overrides the default SSL trust store. If this - is empty or unspecified, Dialogflow will use Google's default trust store - to verify certificates. - N.B. Make sure the HTTPS server certificates are signed with "subject alt - name". For instance a certificate can be self-signed using the following - command, - openssl x509 -req -days 200 -in example.com.csr \ - -signkey example.com.key \ - -out example.com.crt \ - -extfile <(printf "\nsubjectAltName='DNS:www.example.com'") + description: | + Specifies a list of allowed custom CA certificates (in DER format) for HTTPS verification. item_type: type: String - - name: httpMethod - type: Enum - description: |- - HTTP method for the flexible webhook calls. Standard webhook always uses - POST. - enum_values: - - 'POST' - - 'GET' - - 'HEAD' - - 'PUT' - - 'DELETE' - - 'PATCH' - - 'OPTIONS' - - name: oauthConfig - type: NestedObject - description: |- - Represents configuration of OAuth client credential flow for 3rd party - API authentication. - properties: - - name: clientId - type: String - description: The client ID provided by the 3rd party platform. - required: true - - name: clientSecret - type: String - description: |- - The client secret provided by the 3rd party platform. If the - `secret_version_for_client_secret` field is set, this field will be - ignored. - ignore_read: true - - name: scopes - type: Array - description: The OAuth scopes to grant. - item_type: - type: String - - name: secretVersionForClientSecret - type: String - description: |- - The name of the SecretManager secret version resource storing the - client secret. If this field is set, the `client_secret` field will be - ignored. - Format: `projects/{project}/secrets/{secret}/versions/{version}` - - name: tokenEndpoint - type: String - description: |- - The token endpoint provided by the 3rd party platform to exchange an - access token. - required: true - - name: parameterMapping - type: KeyValuePairs - description: |- - Maps the values extracted from specific fields of the flexible webhook - response into session parameters. - - Key: session parameter name - - Value: field path in the webhook response - - name: requestBody - type: String - description: Defines a custom JSON object as request body to send to flexible webhook. - - name: requestHeaders - type: KeyValuePairs - description: The HTTP request headers to send together with webhook requests. - - name: secretVersionForUsernamePassword - type: String - description: |- - The SecretManager secret version resource storing the username:password - pair for HTTP Basic authentication. - Format: `projects/{project}/secrets/{secret}/versions/{version}` - - name: secretVersionsForRequestHeaders - type: Map - description: |- - The HTTP request headers to send together with webhook requests. Header - values are stored in SecretManager secret versions. - - When the same header name is specified in both `request_headers` and - `secret_versions_for_request_headers`, the value in - `secret_versions_for_request_headers` will be used. - key_name: 'key' - value_type: - name: 'secretVersionsForRequestHeader' - type: NestedObject - properties: - - name: secretVersion - type: String - description: | - The SecretManager secret version resource storing the header value. - Format: `projects/{project}/secrets/{secret}/versions/{version}` - required: true - - name: serviceAgentAuth - type: Enum - description: |- - Indicate the auth token type generated from the [Diglogflow service - agent](https://cloud.google.com/iam/docs/service-agents#dialogflow-service-agent). - The generated token is sent in the Authorization header. - enum_values: - - 'NONE' - - 'ID_TOKEN' - - 'ACCESS_TOKEN' - - name: uri - type: String - description: The webhook URI for receiving POST requests. It must use https protocol. - required: true - - name: webhookType - type: Enum - description: |- - Type of the webhook. - enum_values: - - 'STANDARD' - - 'FLEXIBLE' - name: 'serviceDirectory' type: NestedObject description: | @@ -221,148 +102,42 @@ properties: description: | The name of Service Directory service. required: true - - name: genericWebService + - name: 'genericWebService' type: NestedObject - description: Represents configuration for a generic web service. + description: | + The name of Service Directory service. + required: true properties: - - name: allowedCaCerts + - name: 'uri' + type: String + description: | + Whether to use speech adaptation for speech recognition. + required: true + - name: 'requestHeaders' + type: KeyValuePairs + description: | + The HTTP request headers to send together with webhook requests. + immutable: true + - name: 'allowedCaCerts' type: Array - description: |- - Specifies a list of allowed custom CA certificates (in DER format) for - HTTPS verification. This overrides the default SSL trust store. If this - is empty or unspecified, Dialogflow will use Google's default trust store - to verify certificates. - N.B. Make sure the HTTPS server certificates are signed with "subject alt - name". For instance a certificate can be self-signed using the following - command, - openssl x509 -req -days 200 -in example.com.csr \ - -signkey example.com.key \ - -out example.com.crt \ - -extfile <(printf "\nsubjectAltName='DNS:www.example.com'") + description: | + Specifies a list of allowed custom CA certificates (in DER format) for HTTPS verification. item_type: type: String - - name: httpMethod - type: Enum - description: |- - HTTP method for the flexible webhook calls. Standard webhook always uses - POST. - enum_values: - - 'POST' - - 'GET' - - 'HEAD' - - 'PUT' - - 'DELETE' - - 'PATCH' - - 'OPTIONS' - - name: oauthConfig - type: NestedObject - description: |- - Represents configuration of OAuth client credential flow for 3rd party - API authentication. - properties: - - name: clientId - type: String - description: The client ID provided by the 3rd party platform. - required: true - - name: clientSecret - type: String - description: |- - The client secret provided by the 3rd party platform. If the - `secret_version_for_client_secret` field is set, this field will be - ignored. - ignore_read: true - - name: scopes - type: Array - description: The OAuth scopes to grant. - item_type: - type: String - - name: secretVersionForClientSecret - type: String - description: |- - The name of the SecretManager secret version resource storing the - client secret. If this field is set, the `client_secret` field will be - ignored. - Format: `projects/{project}/secrets/{secret}/versions/{version}` - - name: tokenEndpoint - type: String - description: |- - The token endpoint provided by the 3rd party platform to exchange an - access token. - required: true - - name: parameterMapping - type: KeyValuePairs - description: |- - Maps the values extracted from specific fields of the flexible webhook - response into session parameters. - - Key: session parameter name - - Value: field path in the webhook response - - name: requestBody - type: String - description: Defines a custom JSON object as request body to send to flexible webhook. - - name: requestHeaders - type: KeyValuePairs - description: The HTTP request headers to send together with webhook requests. - - name: secretVersionForUsernamePassword - type: String - description: |- - The SecretManager secret version resource storing the username:password - pair for HTTP Basic authentication. - Format: `projects/{project}/secrets/{secret}/versions/{version}` - - name: secretVersionsForRequestHeaders - type: Map - description: |- - The HTTP request headers to send together with webhook requests. Header - values are stored in SecretManager secret versions. - - When the same header name is specified in both `request_headers` and - `secret_versions_for_request_headers`, the value in - `secret_versions_for_request_headers` will be used. - key_name: 'key' - value_type: - name: 'secretVersionsForRequestHeader' - type: NestedObject - properties: - - name: secretVersion - type: String - description: | - The SecretManager secret version resource storing the header value. - Format: `projects/{project}/secrets/{secret}/versions/{version}` - required: true - - name: serviceAgentAuth - type: Enum - description: |- - Indicate the auth token type generated from the [Diglogflow service - agent](https://cloud.google.com/iam/docs/service-agents#dialogflow-service-agent). - The generated token is sent in the Authorization header. - enum_values: - - 'NONE' - - 'ID_TOKEN' - - 'ACCESS_TOKEN' - - name: uri - type: String - description: The webhook URI for receiving POST requests. It must use https protocol. - required: true - - name: webhookType - type: Enum - description: |- - Type of the webhook. - enum_values: - - 'STANDARD' - - 'FLEXIBLE' - name: 'startFlow' type: String description: | - Deprecated. Name of the start flow in this agent. A start flow will be automatically created when the agent is created, and can only be deleted by deleting the agent. Format: projects//locations//agents//flows/. + Name of the start flow in this agent. A start flow will be automatically created when the agent is created, and can only be deleted by deleting the agent. Format: projects//locations//agents//flows/. output: true - name: 'securitySettings' type: String description: | - Deprecated. Name of the SecuritySettings reference for the agent. Format: projects//locations//securitySettings/. + Name of the SecuritySettings reference for the agent. Format: projects//locations//securitySettings/. - name: 'enableStackdriverLogging' type: Boolean description: | - Deprecated. Determines whether this agent should log conversation queries. + Determines whether this agent should log conversation queries. - name: 'enableSpellCorrection' type: Boolean description: | - Deprecated. Indicates if automatic spell correction is enabled in detect intent requests. + Indicates if automatic spell correction is enabled in detect intent requests. diff --git a/mmv1/products/discoveryengine/CmekConfig.yaml b/mmv1/products/discoveryengine/CmekConfig.yaml deleted file mode 100644 index 2ac10fce4c33..000000000000 --- a/mmv1/products/discoveryengine/CmekConfig.yaml +++ /dev/null @@ -1,142 +0,0 @@ -# Copyright 2024 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'CmekConfig' -description: | - CmekConfig represents configurations used to enable CMEK data encryption with - Cloud KMS keys. -references: - guides: - api: 'https://cloud.google.com/generative-ai-app-builder/docs/reference/rest/v1/projects.locations.cmekConfigs' -base_url: 'projects/{{project}}/locations/{{location}}/cmekConfigs' -self_link: 'projects/{{project}}/locations/{{location}}/cmekConfigs/{{cmek_config_id}}' -# Update API is also used as create API. -create_url: 'projects/{{project}}/locations/{{location}}/cmekConfigs/{{cmek_config_id}}?&setDefault={{set_default}}' -create_verb: 'PATCH' -update_url: 'projects/{{project}}/locations/{{location}}/cmekConfigs/{{cmek_config_id}}?&setDefault={{set_default}}' -update_verb: 'PATCH' -delete_url: 'projects/{{project}}/locations/{{location}}/cmekConfigs/{{cmek_config_id}}' -import_format: - - 'projects/{{project}}/locations/{{location}}/cmekConfigs/{{cmek_config_id}}' -timeouts: - insert_minutes: 60 - update_minutes: 60 - delete_minutes: 60 -autogen_async: false -async: - actions: ['create', 'delete', 'update'] - type: 'OpAsync' - operation: - base_url: '{{op_id}}' - timeouts: - insert_minutes: 60 - update_minutes: 60 - delete_minutes: 60 - result: - resource_inside_response: true -custom_code: - update_encoder: 'templates/terraform/update_encoder/discoveryengine_cmekconfig_kmskey.go.tmpl' -sweeper: - url_substitutions: - - location: "us" -examples: - - name: 'discoveryengine_cmekconfig_default' - primary_resource_id: 'default' - primary_resource_name: 'fmt.Sprintf("tf_test_cmek_config%s", context["random_suffix"])' - vars: - cmek_config_id: 'cmek-config-id' - kms_key_name: 'kms-key-name' - test_vars_overrides: - kms_key_name: 'acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us", "tftest-shared-key-5").CryptoKey.Name' - ignore_read_extra: - - 'project' -parameters: - - name: 'location' - type: String - description: | - The geographic location where the CMEK config should reside. The value can - only be one of "us" and "eu". - url_param_only: true - required: true - immutable: true - - name: 'cmekConfigId' - type: String - description: | - The unique id of the cmek config. - url_param_only: true - immutable: true - - name: 'setDefault' - type: Boolean - description: | - Set the following CmekConfig as the default to be used for child resources - if one is not specified. The default value is true. - url_param_only: true - default_value: true - immutable: true -properties: - - name: 'name' - type: String - description: | - The unique full resource name of the cmek config. Values are of the format - `projects/{project}/locations/{location}/cmekConfigs/{cmek_config_id}`. - This field must be a UTF-8 encoded string with a length limit of 1024 - characters. - output: true - - name: 'kmsKey' - type: String - description: | - KMS key resource name which will be used to encrypt resources - `projects/{project}/locations/{location}/keyRings/{keyRing}/cryptoKeys/{keyId}`. - required: true - immutable: true - - name: 'kmsKeyVersion' - type: String - description: | - KMS key version resource name which will be used to encrypt resources - `/cryptoKeyVersions/{keyVersion}`. - output: true - - name: 'state' - type: String - description: | - The state of the CmekConfig. - output: true - - name: 'isDefault' - type: Boolean - description: | - The default CmekConfig for the Customer. - output: true - - name: 'lastRotationTimestampMicros' - type: Integer - description: | - The timestamp of the last key rotation. - output: true - - name: 'singleRegionKeys' - type: Array - description: | - Single-regional CMEKs that are required for some VAIS features. - item_type: - type: NestedObject - properties: - - name: 'kmsKey' - type: String - description: | - Single-regional kms key resource name which will be used to encrypt - resources - `projects/{project}/locations/{location}/keyRings/{keyRing}/cryptoKeys/{keyId}`. - required: true - - name: 'notebooklmState' - type: String - description: | - Whether the NotebookLM Corpus is ready to be used. - output: true diff --git a/mmv1/products/discoveryengine/DataStore.yaml b/mmv1/products/discoveryengine/DataStore.yaml index 83503bd187da..63c2b5839720 100644 --- a/mmv1/products/discoveryengine/DataStore.yaml +++ b/mmv1/products/discoveryengine/DataStore.yaml @@ -57,13 +57,6 @@ examples: primary_resource_name: 'fmt.Sprintf("tf_test_data_store%s", context["random_suffix"])' vars: data_store_id: 'data-store-id' - - name: 'discoveryengine_datastore_kms_key_name' - primary_resource_id: 'kms_key_name' - vars: - data_store_id: 'data-store-id' - kms_key_name: 'kms-key' - test_vars_overrides: - kms_key_name: 'acctest.BootstrapKMSKeyInLocation(t, "us").CryptoKey.Name' - name: 'discoveryengine_datastore_document_processing_config' primary_resource_id: 'document_processing_config' primary_resource_name: 'fmt.Sprintf("tf_test_data_store%s", context["random_suffix"])' @@ -81,12 +74,6 @@ examples: vars: data_store_id: 'data-store-id' exclude_docs: true - - name: 'discoveryengine_datastore_document_processing_config_layout_full' - primary_resource_id: 'document_processing_config_layout_full' - primary_resource_name: 'fmt.Sprintf("tf_test_data_store%s", context["random_suffix"])' - vars: - data_store_id: 'data-store-id' - exclude_docs: true - name: 'discoveryengine_datastore_advanced_site_search_config' primary_resource_id: 'advanced_site_search_config' primary_resource_name: 'fmt.Sprintf("tf_test_data_store%s", context["random_suffix"])' @@ -198,17 +185,6 @@ properties: type: Boolean description: If set true, automatic refresh is disabled for the DataStore. required: false - - name: 'kmsKeyName' - type: String - description: | - KMS key resource name which will be used to encrypt resources: - `/{project}/locations/{location}/keyRings/{keyRing}/cryptoKeys/{keyId}` - The KMS key to be used to protect this DataStore at creation time. Must be - set for requests that need to comply with CMEK Org Policy protections. - If this field is set and processed successfully, the DataStore will be - protected by the KMS key, as indicated in the cmek_config field. - required: false - ignore_read: true - name: 'documentProcessingConfig' type: NestedObject description: | @@ -298,44 +274,7 @@ properties: - 'default_parsing_config.0.ocr_parsing_config' - 'default_parsing_config.0.layout_parsing_config' properties: - - name: 'enableTableAnnotation' - type: Boolean - description: | - If true, the LLM based annotation is added to the table during parsing. - required: false - - name: 'enableImageAnnotation' - type: Boolean - description: | - If true, the LLM based annotation is added to the image during parsing. - required: false - - name: 'structuredContentTypes' - type: Array - description: | - Contains the required structure types to extract from the document. Supported values: `shareholder-structure`. - required: false - item_type: - type: String - - name: 'excludeHtmlElements' - type: Array - description: | - List of HTML elements to exclude from the parsed content. - required: false - item_type: - type: String - - name: 'excludeHtmlClasses' - type: Array - description: | - List of HTML classes to exclude from the parsed content. - required: false - item_type: - type: String - - name: 'excludeHtmlIds' - type: Array - description: | - List of HTML ids to exclude from the parsed content. - required: false - item_type: - type: String + [] - name: 'parsingConfigOverrides' type: Map description: | @@ -388,44 +327,7 @@ properties: - 'default_parsing_config.0.ocr_parsing_config' - 'default_parsing_config.0.layout_parsing_config' properties: - - name: 'enableTableAnnotation' - type: Boolean - description: | - If true, the LLM based annotation is added to the table during parsing. - required: false - - name: 'enableImageAnnotation' - type: Boolean - description: | - If true, the LLM based annotation is added to the image during parsing. - required: false - - name: 'structuredContentTypes' - type: Array - description: | - Contains the required structure types to extract from the document. Supported values: `shareholder-structure`. - required: false - item_type: - type: String - - name: 'excludeHtmlElements' - type: Array - description: | - List of HTML elements to exclude from the parsed content. - required: false - item_type: - type: String - - name: 'excludeHtmlClasses' - type: Array - description: | - List of HTML classes to exclude from the parsed content. - required: false - item_type: - type: String - - name: 'excludeHtmlIds' - type: Array - description: | - List of HTML ids to exclude from the parsed content. - required: false - item_type: - type: String + [] - name: 'createTime' type: Time description: | diff --git a/mmv1/products/discoveryengine/RecommendationEngine.yaml b/mmv1/products/discoveryengine/RecommendationEngine.yaml deleted file mode 100644 index a28b51213529..000000000000 --- a/mmv1/products/discoveryengine/RecommendationEngine.yaml +++ /dev/null @@ -1,212 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'RecommendationEngine' -api_resource_type_kind: Engine -description: | - Vertex AI Search recommendation apps. -references: - guides: - 'Create a Recommendation Engine': 'https://cloud.google.com/generative-ai-app-builder/docs/create-generic-recommendations-app' - api: 'https://cloud.google.com/generative-ai-app-builder/docs/reference/rest/v1/projects.locations.collections.engines' -base_url: 'projects/{{project}}/locations/{{location}}/collections/default_collection/engines/{{engine_id}}' -self_link: 'projects/{{project}}/locations/{{location}}/collections/default_collection/engines/{{engine_id}}' -create_url: 'projects/{{project}}/locations/{{location}}/collections/default_collection/engines?engineId={{engine_id}}' -update_url: 'projects/{{project}}/locations/{{location}}/collections/default_collection/engines/{{engine_id}}' -update_verb: 'PATCH' -update_mask: true -delete_url: 'projects/{{project}}/locations/{{location}}/collections/default_collection/engines/{{engine_id}}' -import_format: - - 'projects/{{project}}/locations/{{location}}/collections/default_collection/engines/{{engine_id}}' -timeouts: - insert_minutes: 20 - update_minutes: 20 - delete_minutes: 20 -autogen_async: false -async: - actions: ['create', 'delete'] - type: 'OpAsync' - operation: - base_url: '{{op_id}}' - result: - resource_inside_response: true -custom_code: - encoder: 'templates/terraform/encoders/discovery_engine_recommendation_engine_hardcode_solution_type.go.tmpl' -examples: - - name: 'discoveryengine_recommendationengine_generic' - primary_resource_id: 'generic' - vars: - engine_id: 'recommendation-engine-id' - data_store_id: 'recommendation-datastore-id' - - name: 'discoveryengine_recommendationengine_media' - primary_resource_id: 'media' - vars: - engine_id: 'recommendation-engine-id' - data_store_id: 'recommendation-datastore-id' -parameters: - - name: 'engineId' - type: String - description: | - Unique ID to use for Recommendation Engine. - url_param_only: true - required: true - immutable: true - - name: 'location' - type: String - description: | - The geographic location where the data store should reside. The value can - only be one of "global", "us" and "eu". - url_param_only: true - required: true - immutable: true -properties: - - name: 'name' - type: String - description: | - The unique full resource name of the recommendation engine. Values are of the format - `projects/{project}/locations/{location}/collections/{collection}/engines/{engine_id}`. - This field must be a UTF-8 encoded string with a length limit of 1024 characters. - output: true - - name: 'displayName' - type: String - description: | - Required. The display name of the engine. Should be human readable. UTF-8 encoded string with limit of 1024 characters. - required: true - - name: 'createTime' - type: Time - description: | - Timestamp the Engine was created at. - output: true - - name: 'updateTime' - type: Time - description: | - Timestamp the Engine was last updated. - output: true - - name: 'dataStoreIds' - type: Array - description: | - The data stores associated with this engine. For SOLUTION_TYPE_RECOMMENDATION type of engines, they can only associate with at most one data store. - required: true - item_type: - type: String - - name: 'industryVertical' - type: Enum - description: | - The industry vertical that the engine registers. The restriction of the Engine industry vertical is based on DataStore: If unspecified, default to GENERIC. Vertical on Engine has to match vertical of the DataStore liniked to the engine. - immutable: true - ignore_read: true - default_value: "GENERIC" - enum_values: - - 'GENERIC' - - 'MEDIA' - - name: 'mediaRecommendationEngineConfig' - type: NestedObject - description: | - Configurations for a Media Recommendation Engine. Only applicable on the data stores - with SOLUTION_TYPE_RECOMMENDATION solution type and MEDIA industry vertical. - properties: - - name: 'type' - type: String - description: | - The type of engine. e.g., `recommended-for-you`. - This field together with MediaRecommendationEngineConfig.optimizationObjective describes - engine metadata to use to control engine training and serving. - Currently supported values: `recommended-for-you`, `others-you-may-like`, - `more-like-this`, `most-popular-items`. - - name: 'optimizationObjective' - type: String - description: | - The optimization objective. e.g., `cvr`. - This field together with MediaRecommendationEngineConfig.type describes - engine metadata to use to control engine training and serving. - Currently supported values: `ctr`, `cvr`. - If not specified, we choose default based on engine type. Default depends on type of recommendation: - `recommended-for-you` => `ctr` - `others-you-may-like` => `ctr` - - name: 'optimizationObjectiveConfig' - type: NestedObject - description: | - Name and value of the custom threshold for cvr optimization_objective. - For target_field `watch-time`, target_field_value must be an integer - value indicating the media progress time in seconds between (0, 86400] - (excludes 0, includes 86400) (e.g., 90). - For target_field `watch-percentage`, the target_field_value must be a - valid float value between (0, 1.0] (excludes 0, includes 1.0) (e.g., 0.5). - properties: - - name: 'targetField' - type: String - description: | - The name of the field to target. Currently supported values: `watch-percentage`, `watch-time`. - - name: 'targetFieldValueFloat' - type: Double - description: | - The threshold to be applied to the target (e.g., 0.5). - - name: 'trainingState' - type: Enum - description: | - The training state that the engine is in (e.g. `TRAINING` or `PAUSED`). - Since part of the cost of running the service - is frequency of training - this can be used to determine when to train - engine in order to control cost. If not specified: the default value for - `CreateEngine` method is `TRAINING`. The default value for - `UpdateEngine` method is to keep the state the same as before. - enum_values: - - 'PAUSED' - - 'TRAINING' - - name: 'engineFeaturesConfig' - type: NestedObject - description: | - More feature configs of the selected engine type. - exactly_one_of: - - recommended_for_you_config - - most_popular_config - properties: - - name: 'recommendedForYouConfig' - type: NestedObject - description: | - Additional feature configurations for creating a `recommended-for-you` engine. - properties: - - name: 'contextEventType' - type: String - description: | - The type of event with which the engine is queried at prediction time. - If set to `generic`, only `view-item`, `media-play`,and - `media-complete` will be used as `context-event` in engine training. If - set to `view-home-page`, `view-home-page` will also be used as - `context-events` in addition to `view-item`, `media-play`, and - `media-complete`. Currently supported for the `recommended-for-you` - engine. Currently supported values: `view-home-page`, `generic`. - - name: 'mostPopularConfig' - type: NestedObject - description: | - Feature configurations that are required for creating a Most Popular engine. - properties: - - name: 'timeWindowDays' - type: Integer - description: | - The time window of which the engine is queried at training and - prediction time. Positive integers only. The value translates to the - last X days of events. Currently required for the `most-popular-items` - engine. - - name: 'commonConfig' - type: NestedObject - description: | - Common config spec that specifies the metadata of the engine. - immutable: true - ignore_read: true - properties: - - name: 'companyName' - type: String - description: | - The name of the company, business or entity that is associated with the engine. Setting this may help improve LLM related features.cd diff --git a/mmv1/products/dlp/DeidentifyTemplate.yaml b/mmv1/products/dlp/DeidentifyTemplate.yaml index 8ff5250fb9d6..198b67d9aec4 100644 --- a/mmv1/products/dlp/DeidentifyTemplate.yaml +++ b/mmv1/products/dlp/DeidentifyTemplate.yaml @@ -349,8 +349,7 @@ properties: description: | Replace each matching finding with the name of the info type. custom_flatten: 'templates/terraform/custom_flatten/object_to_bool.go.tmpl' - # THIS TEMPLATE IS DEPRECATED, DO NOT USE FOR NEW FIELDS - custom_expand: 'templates/terraform/custom_expand/deprecated_bool_to_object.go.tmpl' + custom_expand: 'templates/terraform/custom_expand/bool_to_object.go.tmpl' - name: 'characterMaskConfig' type: NestedObject description: | diff --git a/mmv1/products/dlp/DiscoveryConfig.yaml b/mmv1/products/dlp/DiscoveryConfig.yaml index df8330e5bf74..6e0493f05907 100644 --- a/mmv1/products/dlp/DiscoveryConfig.yaml +++ b/mmv1/products/dlp/DiscoveryConfig.yaml @@ -62,7 +62,6 @@ examples: test_env_vars: project: 'PROJECT_NAME' organization: 'ORG_ID' - location: 'REGION' - name: 'dlp_discovery_config_conditions_cadence' primary_resource_id: 'conditions_cadence' test_env_vars: diff --git a/mmv1/products/dns/ManagedZone.yaml b/mmv1/products/dns/ManagedZone.yaml index fc5960ab94cc..68b65cbd99e0 100644 --- a/mmv1/products/dns/ManagedZone.yaml +++ b/mmv1/products/dns/ManagedZone.yaml @@ -116,9 +116,6 @@ examples: primary_resource_id: 'cloud-logging-enabled-zone' vars: zone_name: 'cloud-logging-enabled-zone' - dns_name: 'services.example.com.' - test_vars_overrides: - 'dns_name': '"services.example.com-" + acctest.RandString(t, 10) + "."' virtual_fields: - name: 'force_destroy' description: 'Set this true to delete all records in the zone.' @@ -317,7 +314,7 @@ properties: item_type: type: NestedObject properties: - # TODO: Make 'networkUrl' a ResourceRef once cross-module references + # TODO(drebes): Make 'networkUrl' a ResourceRef once cross-module references # are possible. - name: 'networkUrl' type: String @@ -361,6 +358,7 @@ properties: - name: 'domainName' type: String description: 'Fully qualified domain name for the forwarding target.' + min_version: 'beta' - name: 'forwardingPath' type: Enum description: | @@ -384,7 +382,7 @@ properties: description: 'The network with which to peer.' required: true properties: - # TODO: Make 'networkUrl' a ResourceRef once cross-module references + # TODO(drebes): Make 'networkUrl' a ResourceRef once cross-module references # are possible. - name: 'networkUrl' type: String @@ -405,8 +403,7 @@ properties: min_version: 'beta' immutable: true custom_flatten: 'templates/terraform/custom_flatten/object_to_bool.go.tmpl' - # THIS TEMPLATE IS DEPRECATED, DO NOT USE FOR NEW FIELDS - custom_expand: 'templates/terraform/custom_expand/deprecated_bool_to_object.go.tmpl' + custom_expand: 'templates/terraform/custom_expand/bool_to_object.go.tmpl' - name: 'serviceDirectoryConfig' type: NestedObject description: diff --git a/mmv1/products/dns/Policy.yaml b/mmv1/products/dns/Policy.yaml index 7f982a4bc9b9..545fcf938f12 100644 --- a/mmv1/products/dns/Policy.yaml +++ b/mmv1/products/dns/Policy.yaml @@ -103,20 +103,6 @@ properties: update_url: 'projects/{{project}}/policies/{{name}}' update_verb: 'PATCH' default_value: "Managed by Terraform" - - name: 'dns64Config' - type: NestedObject - default_from_api: true - description: Configurations related to DNS64 for this Policy. - properties: - - name: 'scope' - type: NestedObject - description: The scope to which DNS64 config will be applied to. - required: true - properties: - - name: 'allQueries' - type: Boolean - description: Controls whether DNS64 is enabled globally at the network level. - send_empty_value: true - name: 'enableInboundForwarding' type: Boolean description: | diff --git a/mmv1/products/eventarc/Pipeline.yaml b/mmv1/products/eventarc/Pipeline.yaml index fb2d8c34646c..ed4a14eb74e9 100644 --- a/mmv1/products/eventarc/Pipeline.yaml +++ b/mmv1/products/eventarc/Pipeline.yaml @@ -41,8 +41,11 @@ examples: vars: pipeline_name: some-pipeline topic_name: some-topic + network_attachment_name: some-network-attachment test_env_vars: project_id: 'PROJECT_NAME' + test_vars_overrides: + 'network_attachment_name': 'acctest.BootstrapNetworkAttachment(t, "tf-test-eventarc-pipeline-na", acctest.BootstrapSubnet(t, "tf-test-eventarc-pipeline-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-test-eventarc-pipeline-network")))' - name: eventarc_pipeline_with_http_destination primary_resource_id: primary vars: @@ -51,14 +54,17 @@ examples: test_env_vars: project_id: 'PROJECT_NAME' test_vars_overrides: - 'network_attachment_name': 'acctest.BootstrapNetworkAttachment(t, "tf-bootstrap-eventarc-pipeline-na", acctest.BootstrapSubnet(t, "tf-bootstrap-eventarc-pipeline-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-bootstrap-eventarc-pipeline-network")))' + 'network_attachment_name': 'acctest.BootstrapNetworkAttachment(t, "tf-test-eventarc-pipeline-na", acctest.BootstrapSubnet(t, "tf-test-eventarc-pipeline-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-test-eventarc-pipeline-network")))' - name: eventarc_pipeline_with_workflow_destination primary_resource_id: primary vars: pipeline_name: some-pipeline workflow_name: some-workflow + network_attachment_name: some-network-attachment test_env_vars: project_id: 'PROJECT_NAME' + test_vars_overrides: + 'network_attachment_name': 'acctest.BootstrapNetworkAttachment(t, "tf-test-eventarc-pipeline-na", acctest.BootstrapSubnet(t, "tf-test-eventarc-pipeline-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-test-eventarc-pipeline-network")))' - name: eventarc_pipeline_with_oidc_and_json_format primary_resource_id: primary vars: @@ -68,7 +74,7 @@ examples: project_id: 'PROJECT_NAME' service_account: 'SERVICE_ACCT' test_vars_overrides: - 'network_attachment_name': 'acctest.BootstrapNetworkAttachment(t, "tf-bootstrap-eventarc-pipeline-na", acctest.BootstrapSubnet(t, "tf-bootstrap-eventarc-pipeline-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-bootstrap-eventarc-pipeline-network")))' + 'network_attachment_name': 'acctest.BootstrapNetworkAttachment(t, "tf-test-eventarc-pipeline-na", acctest.BootstrapSubnet(t, "tf-test-eventarc-pipeline-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-test-eventarc-pipeline-network")))' - name: eventarc_pipeline_with_oauth_and_protobuf_format primary_resource_id: primary vars: @@ -78,7 +84,7 @@ examples: project_id: 'PROJECT_NAME' service_account: 'SERVICE_ACCT' test_vars_overrides: - 'network_attachment_name': 'acctest.BootstrapNetworkAttachment(t, "tf-bootstrap-eventarc-pipeline-na", acctest.BootstrapSubnet(t, "tf-bootstrap-eventarc-pipeline-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-bootstrap-eventarc-pipeline-network")))' + 'network_attachment_name': 'acctest.BootstrapNetworkAttachment(t, "tf-test-eventarc-pipeline-na", acctest.BootstrapSubnet(t, "tf-test-eventarc-pipeline-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-test-eventarc-pipeline-network")))' - name: eventarc_pipeline_with_cmek_and_avro_format primary_resource_id: primary bootstrap_iam: @@ -91,7 +97,7 @@ examples: test_env_vars: project_id: 'PROJECT_NAME' test_vars_overrides: - 'network_attachment_name': 'acctest.BootstrapNetworkAttachment(t, "tf-bootstrap-eventarc-pipeline-na", acctest.BootstrapSubnet(t, "tf-bootstrap-eventarc-pipeline-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-bootstrap-eventarc-pipeline-network")))' + 'network_attachment_name': 'acctest.BootstrapNetworkAttachment(t, "tf-test-eventarc-pipeline-na", acctest.BootstrapSubnet(t, "tf-test-eventarc-pipeline-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-test-eventarc-pipeline-network")))' 'key_name': 'acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-bootstrap-eventarc-pipeline-key").CryptoKey.Name' parameters: - name: location @@ -310,12 +316,9 @@ properties: type: String description: |- Name of the NetworkAttachment that allows access to the consumer VPC. - Format: `projects/{PROJECT_ID}/regions/{REGION}/networkAttachments/{NETWORK_ATTACHMENT_NAME}` - - Required for HTTP endpoint destinations. Must not be specified for - Workflows, MessageBus, or Topic destinations. + required: true - name: httpEndpoint type: NestedObject description: Represents a HTTP endpoint destination. diff --git a/mmv1/products/eventarc/Trigger.yaml b/mmv1/products/eventarc/Trigger.yaml index 2bd878675905..fcb4cd0a7a6c 100644 --- a/mmv1/products/eventarc/Trigger.yaml +++ b/mmv1/products/eventarc/Trigger.yaml @@ -48,9 +48,8 @@ examples: primary_resource_id: primary vars: trigger_name: some-trigger - network_attachment_name: network-attachment test_vars_overrides: - 'network_attachment_name': 'acctest.BootstrapNetworkAttachment(t, "tf-bootstrap-eventarc-trigger-na", acctest.BootstrapSubnet(t, "tf-bootstrap-eventarc-trigger-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-bootstrap-eventarc-trigger-network")))' + 'network_attachment_name': 'acctest.BootstrapNetworkAttachment(t, "tf-test-eventarc-trigger-na", acctest.BootstrapSubnet(t, "tf-test-eventarc-trigger-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-test-eventarc-trigger-network")))' test_env_vars: project_id: 'PROJECT_NAME' service_account: 'SERVICE_ACCT' diff --git a/mmv1/products/filestore/Instance.yaml b/mmv1/products/filestore/Instance.yaml index 726598df1f53..a2139d6c22ee 100644 --- a/mmv1/products/filestore/Instance.yaml +++ b/mmv1/products/filestore/Instance.yaml @@ -209,12 +209,6 @@ properties: An integer representing the anonymous group id with a default value of 65534. Anon_gid may only be set with squashMode of ROOT_SQUASH. An error will be returned if this field is specified for other squashMode settings. - - name: 'network' - type: String - min_version: beta - description: | - The source VPC network for `ip_ranges`. - Required for instances using Private Service Connect, optional otherwise. max_size: 10 max_size: 1 - name: 'networks' @@ -279,22 +273,6 @@ properties: enum_values: - 'DIRECT_PEERING' - 'PRIVATE_SERVICE_ACCESS' - - 'PRIVATE_SERVICE_CONNECT' - - name: 'pscConfig' - type: NestedObject - min_version: beta - description: | - Private Service Connect configuration. - Should only be set when connect_mode is PRIVATE_SERVICE_CONNECT. - properties: - - name: endpointProject - type: String - description: | - Consumer service project in which the Private Service Connect endpoint - would be set up. This is optional, and only relevant in case the network - is a shared VPC. If this is not specified, the endpoint would be set up - in the VPC host project. - immutable: true min_size: 1 - name: 'etag' type: String @@ -370,9 +348,9 @@ properties: type: NestedObject description: | Replication configuration, once set, this cannot be updated. - Additionally this should be specified on the replica instance only, indicating the active as the peer_instance + Addtionally this should be specified on the replica instance only, indicating the active as the peer_instance + url_param_only: true immutable: true - ignore_read: true properties: - name: 'role' type: Enum @@ -402,11 +380,6 @@ properties: description: | Output only fields for replication configuration. properties: - - name: 'role' - type: Enum - description: | - The replication role. - output: true - name: 'replicas' type: Array description: | @@ -414,11 +387,6 @@ properties: item_type: type: NestedObject properties: - - name: 'peerInstance' - type: String - description: | - The peer instance. - output: true - name: 'state' type: Enum description: | @@ -438,50 +406,3 @@ properties: A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z" output: true - - name: 'directoryServices' - type: NestedObject - min_version: beta - description: | - Directory Services configuration. - Should only be set if protocol is "NFS_V4_1". - immutable: true - properties: - - name: 'ldap' - type: NestedObject - description: | - Configuration for LDAP servers. - immutable: true - properties: - - name: 'domain' - type: String - required: true - description: | - The LDAP domain name in the format of `my-domain.com`. - immutable: true - - name: 'servers' - required: true - type: Array - description: | - The servers names are used for specifying the LDAP servers names. - The LDAP servers names can come with two formats: - 1. DNS name, for example: `ldap.example1.com`, `ldap.example2.com`. - 2. IP address, for example: `10.0.0.1`, `10.0.0.2`, `10.0.0.3`. - All servers names must be in the same format: either all DNS names or all - IP addresses. - immutable: true - item_type: - type: String - - name: 'usersOu' - type: String - description: | - The users Organizational Unit (OU) is optional. This parameter is a hint - to allow faster lookup in the LDAP namespace. In case that this parameter - is not provided, Filestore instance will query the whole LDAP namespace. - immutable: true - - name: 'groupsOu' - type: String - description: | - The groups Organizational Unit (OU) is optional. This parameter is a hint - to allow faster lookup in the LDAP namespace. In case that this parameter - is not provided, Filestore instance will query the whole LDAP namespace. - immutable: true diff --git a/mmv1/products/firebaseapphosting/DefaultDomain.yaml b/mmv1/products/firebaseapphosting/DefaultDomain.yaml deleted file mode 100644 index 84e94a7a75da..000000000000 --- a/mmv1/products/firebaseapphosting/DefaultDomain.yaml +++ /dev/null @@ -1,125 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: DefaultDomain -description: A domain name that is associated with a backend. -base_url: projects/{{project}}/locations/{{location}}/backends/{{backend}}/domains -update_mask: true -self_link: projects/{{project}}/locations/{{location}}/backends/{{backend}}/domains/{{domain_id}} -create_url: projects/{{project}}/locations/{{location}}/backends/{{backend}}/domains/{{domain_id}}?update_mask=disabled -create_verb: PATCH -update_verb: PATCH -id_format: projects/{{project}}/locations/{{location}}/backends/{{backend}}/domains/{{domain_id}} -# The default domain can't be deleted, only disabled -exclude_delete: true -exclude_sweeper: true -import_format: - - projects/{{project}}/locations/{{location}}/backends/{{backend}}/domains/{{domain_id}} - - "{{project}}/{{location}}/{{backend}}/{{domain_id}}" - - "{{location}}/{{backend}}/{{domain_id}}" -examples: - - name: firebase_app_hosting_default_domain_minimal - primary_resource_id: example - vars: - backend_id: 'dd-mini' - service_act_id: 'service-account' - test_env_vars: - project_id: 'PROJECT_NAME' - test_vars_overrides: - # prevent tests from colliding with each other - service_act_id: '"tf-test-dd-mi"' - - name: firebase_app_hosting_default_domain_full - primary_resource_id: example - vars: - backend_id: 'dd-full' - service_act_id: 'service-account' - test_env_vars: - project_id: 'PROJECT_NAME' - test_vars_overrides: - # prevent tests from colliding with each other - service_act_id: '"tf-test-dd-full"' - - name: firebase_app_hosting_default_domain_disabled - primary_resource_id: example - vars: - backend_id: 'dd-disabled' - service_act_id: 'service-account' - test_env_vars: - project_id: 'PROJECT_NAME' - test_vars_overrides: - # prevent tests from colliding with each other - service_act_id: '"tf-test-dd-disabled"' -autogen_async: true -async: - operation: - timeouts: - insert_minutes: 20 - update_minutes: 20 - base_url: "{{op_id}}" - actions: - - create - - update - type: OpAsync - result: - resource_inside_response: true - include_project: false -autogen_status: RG9tYWlu -parameters: - - name: location - type: String - description: The location of the Backend that this Domain is associated with - immutable: true - url_param_only: true - required: true - - name: backend - type: String - description: The ID of the Backend that this Domain is associated with - immutable: true - url_param_only: true - required: true - - name: domainId - type: String - description: |- - Id of the domain. For default domain, it should be {{backend}}--{{project_id}}.{{location}}.hosted.app - immutable: true - url_param_only: true - required: true -properties: - - name: disabled - type: Boolean - description: Whether the domain is disabled. Defaults to false. - default_from_api: true - - name: name - type: String - description: |- - Identifier. The resource name of the domain, e.g. - `projects/{project}/locations/{locationId}/backends/{backendId}/domains/{domainId}` - output: true - - name: uid - type: String - description: System-assigned, unique identifier. - output: true - - name: etag - type: String - description: |- - Server-computed checksum based on other values; may be sent - on update or delete to ensure operation is done on expected resource. - output: true - - name: updateTime - type: String - description: Time at which the domain was last updated. - output: true - - name: createTime - type: String - description: Time at which the domain was created. - output: true diff --git a/mmv1/products/firebaseapphosting/Domain.yaml b/mmv1/products/firebaseapphosting/Domain.yaml deleted file mode 100644 index f27993655a87..000000000000 --- a/mmv1/products/firebaseapphosting/Domain.yaml +++ /dev/null @@ -1,425 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: Domain -description: A domain name that is associated with a backend. -base_url: projects/{{project}}/locations/{{location}}/backends/{{backend}}/domains -update_mask: true -self_link: projects/{{project}}/locations/{{location}}/backends/{{backend}}/domains/{{domain_id}} -create_url: projects/{{project}}/locations/{{location}}/backends/{{backend}}/domains?domainId={{domain_id}} -update_verb: PATCH -id_format: projects/{{project}}/locations/{{location}}/backends/{{backend}}/domains/{{domain_id}} -import_format: - - projects/{{project}}/locations/{{location}}/backends/{{backend}}/domains/{{domain_id}} - - "{{project}}/{{location}}/{{backend}}/{{domain_id}}" - - "{{location}}/{{backend}}/{{domain_id}}" -autogen_async: true -async: - operation: - timeouts: - update_minutes: 20 - delete_minutes: 20 - base_url: "{{op_id}}" - actions: - - update - - delete - type: PollAsync - include_project: false - check_response_func_existence: 'transport_tpg.PollCheckForExistence' - check_response_func_absence: 'transport_tpg.PollCheckForAbsence' - # Errors are on the Domain resource itself - suppress_error: true -examples: - - name: firebase_app_hosting_domain_minimal - primary_resource_id: example - vars: - backend_id: 'domain-mini' - service_act_id: 'sa-id' - domain_id: example.com - test_env_vars: - project_id: 'PROJECT_NAME' - test_vars_overrides: - domain_id: '"my-domain-m.com"' - # prevent tests from colliding with each other - service_act_id: '"tf-test-domain-m"' - - name: firebase_app_hosting_domain_full - primary_resource_id: example - vars: - backend_id: 'domain-full' - service_act_id: 'sa-id' - domain_id: example.com - test_env_vars: - project_id: 'PROJECT_NAME' - test_vars_overrides: - domain_id: '"my-domain.com"' - # prevent tests from colliding with each other - service_act_id: '"tf-test-domain"' -autogen_status: RG9tYWlu -parameters: - - name: location - type: String - description: The location of the Backend that this Domain is associated with - immutable: true - url_param_only: true - required: true - - name: backend - type: String - description: The ID of the Backend that this Domain is associated with - immutable: true - url_param_only: true - required: true - - name: domainId - type: String - description: |- - Id of the domain to create. - Must be a valid domain name, such as "foo.com" - immutable: true - url_param_only: true - required: true -properties: - - name: customDomainStatus - type: NestedObject - description: The status of a custom domain's linkage to the Backend. - output: true - properties: - - name: certState - type: String - output: true - description: |- - Possible values: - CERT_PREPARING - CERT_VALIDATING - CERT_PROPAGATING - CERT_ACTIVE - CERT_EXPIRING_SOON - CERT_EXPIRED - - name: requiredDnsUpdates - type: Array - description: |- - Lists the records that must added or removed to a custom domain's DNS - in order to finish setup and start serving content. - Field is present during onboarding. Also present after onboarding if one - or more of the above states is not *_ACTIVE, indicating the domain's DNS - records are in a bad state. - output: true - item_type: - type: NestedObject - properties: - - name: domainName - type: String - description: The domain name the DNS updates pertain to. - output: true - - name: discovered - type: Array - description: The set of DNS records App Hosting discovered when inspecting a domain. - output: true - item_type: - type: NestedObject - properties: - - name: domainName - type: String - description: The domain name the record set pertains to. - output: true - - name: checkError - type: NestedObject - output: true - description: |- - The `Status` type defines a logical error model that is suitable for - different programming environments, including REST APIs and RPC APIs. It is - used by [gRPC](https://github.com/grpc). Each `Status` message contains - three pieces of data: error code, error message, and error details. - - You can find out more about this error model and how to work with it in the - [API Design Guide](https://cloud.google.com/apis/design/errors). - properties: - - name: code - type: Integer - description: The status code, which should be an enum value of google.rpc.Code. - output: true - - name: message - type: String - output: true - description: |- - A developer-facing error message, which should be in English. Any - user-facing error message should be localized and sent in the - google.rpc.Status.details field, or localized by the client. - - name: details - type: String - output: true - description: | - A list of messages that carry the error details. - custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' - custom_expand: 'templates/terraform/custom_expand/json_schema.tmpl' - validation: - function: 'validation.StringIsJSON' - - name: records - type: Array - description: Records on the domain. - output: true - item_type: - type: NestedObject - properties: - - name: domainName - type: String - description: The domain the record pertains to, e.g. `foo.bar.com.`. - output: true - - name: type - type: String - description: |- - The record's type, which determines what data the record contains. - Possible values: - A - CNAME - TXT - AAAA - CAA - output: true - - name: rdata - type: String - description: |- - The data of the record. The meaning of the value depends on record type: - - A and AAAA: IP addresses for the domain. - - CNAME: Another domain to check for records. - - TXT: Arbitrary text strings associated with the domain. App Hosting - uses TXT records to determine which Firebase projects have - permission to act on the domain's behalf. - - CAA: The record's flags, tag, and value, e.g. `0 issue "pki.goog"`. - output: true - - name: requiredAction - type: String - description: |- - An enum that indicates the a required action for this record. Populated - when the record is part of a required change in a `DnsUpdates` - `discovered` or `desired` record set. - Possible values: - NONE - ADD - REMOVE - output: true - - name: relevantState - type: Array - description: |- - An enum that indicates which state(s) this DNS record applies to. Populated - for all records with an `ADD` or `REMOVE` required action. - output: true - item_type: - type: String - - name: desired - type: Array - description: |- - The set of DNS records App Hosting needs in order to be able to serve - secure content on the domain. - output: true - item_type: - type: NestedObject - properties: - - name: domainName - type: String - description: The domain name the record set pertains to. - output: true - - name: checkError - type: NestedObject - output: true - description: |- - The `Status` type defines a logical error model that is suitable for - different programming environments, including REST APIs and RPC APIs. It is - used by [gRPC](https://github.com/grpc). Each `Status` message contains - three pieces of data: error code, error message, and error details. - - You can find out more about this error model and how to work with it in the - [API Design Guide](https://cloud.google.com/apis/design/errors). - properties: - - name: code - type: Integer - output: true - description: The status code, which should be an enum value of google.rpc.Code. - - name: message - type: String - output: true - description: |- - A developer-facing error message, which should be in English. Any - user-facing error message should be localized and sent in the - google.rpc.Status.details field, or localized by the client. - - name: details - type: String - output: true - description: | - A list of messages that carry the error details. - custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' - custom_expand: 'templates/terraform/custom_expand/json_schema.tmpl' - validation: - function: 'validation.StringIsJSON' - - name: records - type: Array - description: Records on the domain. - output: true - item_type: - type: NestedObject - properties: - - name: requiredAction - type: String - description: |- - An enum that indicates the a required action for this record. Populated - when the record is part of a required change in a `DnsUpdates` - `discovered` or `desired` record set. - Possible values: - NONE - ADD - REMOVE - output: true - - name: relevantState - type: Array - description: |- - An enum that indicates which state(s) this DNS record applies to. Populated - for all records with an `ADD` or `REMOVE` required action. - output: true - item_type: - type: String - - name: domainName - type: String - description: The domain the record pertains to, e.g. `foo.bar.com.`. - output: true - - name: type - type: String - description: |- - The record's type, which determines what data the record contains. - Possible values: - A - CNAME - TXT - AAAA - CAA - output: true - - name: rdata - type: String - description: |- - The data of the record. The meaning of the value depends on record type: - - A and AAAA: IP addresses for the domain. - - CNAME: Another domain to check for records. - - TXT: Arbitrary text strings associated with the domain. App Hosting - uses TXT records to determine which Firebase projects have - permission to act on the domain's behalf. - - CAA: The record's flags, tag, and value, e.g. `0 issue "pki.goog"`. - output: true - - name: checkTime - type: String - description: The last time App Hosting checked your custom domain's DNS records. - output: true - - name: issues - type: Array - description: |- - A list of issues with domain configuration. Allows users to self-correct - problems with DNS records. - output: true - item_type: - type: NestedObject - properties: - - name: code - type: Integer - output: true - description: The status code, which should be an enum value of google.rpc.Code. - - name: message - type: String - output: true - description: |- - A developer-facing error message, which should be in English. Any - user-facing error message should be localized and sent in the - google.rpc.Status.details field, or localized by the client. - - name: details - type: String - output: true - description: | - A list of messages that carry the error details. - custom_flatten: 'templates/terraform/custom_flatten/json_schema.tmpl' - custom_expand: 'templates/terraform/custom_expand/json_schema.tmpl' - validation: - function: 'validation.StringIsJSON' - - name: hostState - type: String - output: true - description: |- - Possible values: - HOST_UNHOSTED - HOST_UNREACHABLE - HOST_NON_FAH - HOST_CONFLICT - HOST_WRONG_SHARD - HOST_ACTIVE - - name: ownershipState - type: String - output: true - description: |- - Possible values: - OWNERSHIP_MISSING - OWNERSHIP_UNREACHABLE - OWNERSHIP_MISMATCH - OWNERSHIP_CONFLICT - OWNERSHIP_PENDING - OWNERSHIP_ACTIVE - - name: name - type: String - description: |- - Identifier. The resource name of the domain, e.g. - `projects/{project}/locations/{locationId}/backends/{backendId}/domains/{domainId}` - output: true - - name: uid - type: String - description: System-assigned, unique identifier. - output: true - - name: etag - type: String - description: |- - Server-computed checksum based on other values; may be sent - on update or delete to ensure operation is done on expected resource. - output: true - - name: serve - type: NestedObject - description: |- - The serving behavior of the domain. If specified, the domain will - serve content other than its Backend's live content. - properties: - - name: redirect - type: NestedObject - description: Specifies redirect behavior for a domain. - properties: - - name: uri - type: String - description: |- - The URI of the redirect's intended destination. This URI will be - prepended to the original request path. URI without a scheme are - assumed to be HTTPS. - required: true - - name: status - type: String - description: |- - The status code to use in a redirect response. Must be a valid HTTP 3XX - status code. Defaults to 302 if not present. - - name: updateTime - type: String - description: Time at which the domain was last updated. - output: true - - name: purgeTime - type: String - description: |- - Time at which a soft-deleted domain will be purged, rendering in - permanently deleted. - output: true - - name: deleteTime - type: String - description: Time at which the domain was deleted. - output: true - - name: createTime - type: String - description: Time at which the domain was created. - output: true diff --git a/mmv1/products/firebaseapphosting/Traffic.yaml b/mmv1/products/firebaseapphosting/Traffic.yaml deleted file mode 100644 index 958fa0e659bb..000000000000 --- a/mmv1/products/firebaseapphosting/Traffic.yaml +++ /dev/null @@ -1,200 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: Traffic -description: Controls traffic configuration for a backend. -base_url: projects/{{project}}/locations/{{location}}/backends/{{backend}} -update_mask: true -self_link: projects/{{project}}/locations/{{location}}/backends/{{backend}}/traffic -create_url: projects/{{project}}/locations/{{location}}/backends/{{backend}}/traffic?update_mask=* -create_verb: PATCH -update_verb: PATCH -exclude_delete: true # Traffic config cannot be deleted -exclude_sweeper: true -id_format: projects/{{project}}/locations/{{location}}/backends/{{backend}}/traffic -import_format: - - "projects/{{project}}/locations/{{location}}/backends/{{backend}}/traffic" - - "{{project}}/{{location}}/{{backend}}" - - "{{location}}/{{backend}}" -examples: - - name: firebase_app_hosting_traffic_target - primary_resource_id: example - vars: - backend_id: "traffic-tg" - build_id: "target-build" - service_act_id: "firebase-app-hosting-compute" - test_env_vars: - project_id: "PROJECT_NAME" - test_vars_overrides: - # prevent tests from colliding with each other - service_act_id: '"tf-test-traffic-tg"' - - name: firebase_app_hosting_traffic_rollout_policy - primary_resource_id: example - vars: - backend_id: "traffic-rp" - service_act_id: "firebase-app-hosting-compute" - branch: "main" - test_env_vars: - project_id: "PROJECT_NAME" - test_vars_overrides: - # prevent tests from colliding with each other - service_act_id: '"tf-test-traffic-rp"' - - name: firebase_app_hosting_traffic_rollout_policy_disabled - primary_resource_id: example - vars: - backend_id: "traffic-rpd" - service_act_id: "firebase-app-hosting-compute" - branch: "main" - test_env_vars: - project_id: "PROJECT_NAME" - test_vars_overrides: - # prevent tests from colliding with each other - service_act_id: '"tf-test-traffic-rpd"' -autogen_async: true -async: - operation: - timeouts: - insert_minutes: 20 - update_minutes: 20 - delete_minutes: 20 - base_url: "{{op_id}}" - actions: - - create - - delete - - update - type: OpAsync - result: - resource_inside_response: true - include_project: false -parameters: - - name: location - type: String - description: The location the Backend that this Traffic config applies to - immutable: true - url_param_only: true - required: true - - name: backend - type: String - description: Id of the backend that this Traffic config applies to - immutable: true - url_param_only: true - required: true -properties: - - name: etag - type: Fingerprint - description: |- - Server-computed checksum based on other values; may be sent - on update or delete to ensure operation is done on expected resource. - output: true - - name: uid - type: String - description: System-assigned, unique identifier. - output: true - - name: createTime - type: Time - description: Time at which the backend was created. - output: true - - name: updateTime - type: Time - description: Time at which the backend was last updated. - output: true - - name: deleteTime - type: Time - description: Time at which the backend was deleted. - output: true - - name: name - type: String - description: |- - Identifier. The resource name of the backend traffic config - - Format: - - `projects/{project}/locations/{locationId}/backends/{backendId}/traffic`. - output: true - - name: current - type: NestedObject - output: true - description: |- - Current state of traffic allocation for the backend. - When setting `target`, this field may differ for some time until the desired state is reached. - properties: - - name: splits - type: Array - description: A list of traffic splits that together represent where traffic is being routed. - output: true - item_type: - type: NestedObject - description: The traffic allocation for the backend. - properties: - - name: build - type: String - output: true - description: |- - The build that traffic is being routed to. - - name: percent - type: Integer - output: true - description: |- - The percentage of traffic to send to the build. Currently must be 100 or 0. - - name: target - type: NestedObject - description: |- - Set to manually control the desired traffic for the backend. This will - cause current to eventually match this value. The percentages must add - up to 100. - exactly_one_of: - - rolloutPolicy - - target - properties: - - name: splits - type: Array - description: A list of traffic splits that together represent where traffic is being routed. - required: true - item_type: - type: NestedObject - description: The traffic allocation for the backend. - properties: - - name: build - type: String - required: true - description: |- - The build that traffic is being routed to. - - name: percent - type: Integer - required: true - description: |- - The percentage of traffic to send to the build. Currently must be 100 or 0. - - name: rolloutPolicy - type: NestedObject - description: |- - The policy for how builds and rollouts are triggered and rolled out. - exactly_one_of: - - rolloutPolicy - - target - properties: - - name: disabled - type: Boolean - description: |- - A flag that, if true, prevents rollouts from being created via this RolloutPolicy. - default_value: false - - name: disabledTime - type: Time - output: true - description: |- - If disabled is set, the time at which the rollouts were disabled. - - name: codebaseBranch - type: String - description: |- - Specifies a branch that triggers a new build to be started with this - policy. If not set, no automatic rollouts will happen. diff --git a/mmv1/products/firebaseextensions/Instance.yaml b/mmv1/products/firebaseextensions/Instance.yaml index 85d6a897f8b7..71791e8e5c60 100644 --- a/mmv1/products/firebaseextensions/Instance.yaml +++ b/mmv1/products/firebaseextensions/Instance.yaml @@ -52,7 +52,6 @@ examples: instance-id: 'storage-resize-images' bucket_id: 'bucket-id' service-account-id: 's-a' - location: "us-central1" test_env_vars: project_id: 'PROJECT_NAME' test_vars_overrides: diff --git a/mmv1/products/firebasehosting/Site.yaml b/mmv1/products/firebasehosting/Site.yaml index de08fa279010..a32466eb8da2 100644 --- a/mmv1/products/firebasehosting/Site.yaml +++ b/mmv1/products/firebasehosting/Site.yaml @@ -35,8 +35,6 @@ timeouts: delete_minutes: 20 custom_code: pre_create: 'templates/terraform/pre_create/firebasehosting_site.go.tmpl' - pre_delete: 'templates/terraform/pre_delete/firebasehosting_site.go.tmpl' - test_check_destroy: 'templates/terraform/custom_check_destroy/firebasehosting_default_site.go.tmpl' examples: - name: 'firebasehosting_site_basic' primary_resource_id: 'default' @@ -55,11 +53,6 @@ examples: project_id: 'PROJECT_NAME' test_vars_overrides: 'display_name': '"tf-test Test web app for Firebase Hosting"' - - name: 'firebasehosting_site_default' - primary_resource_id: 'default' - min_version: 'beta' - test_env_vars: - project_id: 'PROJECT_NAME' parameters: - name: 'site_id' type: String @@ -95,7 +88,3 @@ properties: The default URL for the site in the form of https://{name}.web.app min_version: 'beta' output: true - - name: 'type' - type: String - output: true - description: The type of Hosting site, either 'DEFAULT_SITE' or `USER_SITE` diff --git a/mmv1/products/firestore/Database.yaml b/mmv1/products/firestore/Database.yaml index d20ac330081e..0040822f0829 100644 --- a/mmv1/products/firestore/Database.yaml +++ b/mmv1/products/firestore/Database.yaml @@ -72,22 +72,6 @@ examples: - 'project' - 'etag' - 'deletion_policy' - - name: 'firestore_database_with_tags' - primary_resource_id: 'database' - vars: - database_id: 'database-with-tags-id' - delete_protection_state: 'DELETE_PROTECTION_ENABLED' - tag_key_id: 'keyname' - tag_value_id: 'valuename' - test_env_vars: - project_id: 'PROJECT_NAME' - test_vars_overrides: - 'delete_protection_state': '"DELETE_PROTECTION_DISABLED"' - ignore_read_extra: - - 'project' - - 'etag' - - 'deletion_policy' - exclude_test: true - name: 'firestore_cmek_database' primary_resource_id: 'database' vars: @@ -161,9 +145,6 @@ virtual_fields: See also `delete_protection`. type: String default_value: "ABANDON" - # `deletion_policy` is deprecated and will be removed in a future major release. - # Once that release happens, you should use `delete_protection_state` instead. - # For now though, setting this field is necessary if you wish for your Firestore databases to be deleted upon `terraform destroy`. parameters: properties: - name: 'name' @@ -331,14 +312,3 @@ properties: output: true item_type: type: String - - name: 'tags' - type: KeyValuePairs - description: | - Input only. A map of resource manager tags. Resource manager tag keys - and values have the same definition as resource manager tags. - Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/456. - The field is ignored when empty. The field is immutable and causes - resource replacement when mutated. To apply tags to an existing resource, see - the `google_tags_tag_value` resource. - immutable: true - ignore_read: true diff --git a/mmv1/products/firestore/Index.yaml b/mmv1/products/firestore/Index.yaml index c3e29a56845a..026cb9b3594d 100644 --- a/mmv1/products/firestore/Index.yaml +++ b/mmv1/products/firestore/Index.yaml @@ -50,6 +50,7 @@ async: custom_code: constants: 'templates/terraform/constants/firestore_index.go.tmpl' encoder: 'templates/terraform/encoders/index.go.tmpl' + post_create: 'templates/terraform/post_create/index.go.tmpl' custom_import: 'templates/terraform/custom_import/index_self_link_as_name_set_project.go.tmpl' error_retry_predicates: @@ -92,12 +93,6 @@ examples: database_id: 'database-id-sparse-any' test_env_vars: project_id: 'PROJECT_NAME' - - name: 'firestore_index_unique' - primary_resource_id: 'my-index' - vars: - database_id: 'database-id-unique' - test_env_vars: - project_id: 'PROJECT_NAME' parameters: properties: - name: 'name' @@ -158,12 +153,6 @@ properties: definition reach or traverse an array, except via an explicit array index. Violations will result in errors. Note this field only applies to indexes with MONGODB_COMPATIBLE_API ApiScope. - - name: 'unique' - type: Boolean - default_from_api: true - description: - Whether it is an unique index. Unique index ensures all values for the - indexed field(s) are unique across documents. - name: 'fields' type: Array description: | @@ -184,7 +173,7 @@ properties: Name of the field. - name: 'order' type: Enum - # TODO: Exactly one of order, arrayConfig, or vectorConfig must be set + # TODO (mbang): Exactly one of order, arrayConfig, or vectorConfig must be set description: | Indicates that this field supports ordering by the specified order or comparing using =, <, <=, >, >=. Only one of `order`, `arrayConfig`, and `vectorConfig` can be specified. @@ -193,7 +182,7 @@ properties: - 'DESCENDING' - name: 'arrayConfig' type: Enum - # TODO: Exactly one of order, arrayConfig, or vectorConfig must be set + # TODO (mbang): Exactly one of order, arrayConfig, or vectorConfig must be set description: | Indicates that this field supports operations on arrayValues. Only one of `order`, `arrayConfig`, and `vectorConfig` can be specified. @@ -201,7 +190,7 @@ properties: - 'CONTAINS' - name: 'vectorConfig' type: NestedObject - # TODO: Exactly one of order, arrayConfig, or vectorConfig must be set + # TODO (mbang): Exactly one of order, arrayConfig, or vectorConfig must be set description: | Indicates that this field supports vector search operations. Only one of `order`, `arrayConfig`, and `vectorConfig` can be specified. Vector Fields should come after the field path `__name__`. @@ -218,7 +207,7 @@ properties: send_empty_value: true allow_empty_object: true properties: - # Meant to be an empty object with no properties. + # Meant to be an empty object with no properties. [] # Most composite indexes require at least two fields, but it is possible # for a user to require a single field index such as `__name__ DESC`. diff --git a/mmv1/products/gemini/CodeRepositoryIndex.yaml b/mmv1/products/gemini/CodeRepositoryIndex.yaml index 29f78c38f19b..6391c6286028 100644 --- a/mmv1/products/gemini/CodeRepositoryIndex.yaml +++ b/mmv1/products/gemini/CodeRepositoryIndex.yaml @@ -21,7 +21,6 @@ references: base_url: projects/{{project}}/locations/{{location}}/codeRepositoryIndexes self_link: projects/{{project}}/locations/{{location}}/codeRepositoryIndexes/{{code_repository_index_id}} create_url: projects/{{project}}/locations/{{location}}/codeRepositoryIndexes?codeRepositoryIndexId={{code_repository_index_id}} -delete_url: projects/{{project}}/locations/{{location}}/codeRepositoryIndexes/{{code_repository_index_id}}?force={{force_destroy}} update_verb: 'PATCH' update_mask: true id_format: projects/{{project}}/locations/{{location}}/codeRepositoryIndexes/{{code_repository_index_id}} @@ -54,12 +53,11 @@ async: result: resource_inside_response: true include_project: false +custom_code: + pre_delete: templates/terraform/pre_delete/code_repository_index_force_delete.go.tmpl error_retry_predicates: - 'transport_tpg.IsCodeRepositoryIndexUnreadyError' - 'transport_tpg.IsRepositoryGroupQueueError' -sweeper: - url_substitutions: - - force_destroy: true virtual_fields: - name: 'force_destroy' description: diff --git a/mmv1/products/gemini/DataSharingWithGoogleSetting.yaml b/mmv1/products/gemini/DataSharingWithGoogleSetting.yaml index cb11c67cd586..acfac1f91529 100644 --- a/mmv1/products/gemini/DataSharingWithGoogleSetting.yaml +++ b/mmv1/products/gemini/DataSharingWithGoogleSetting.yaml @@ -67,7 +67,4 @@ properties: description: Labels as key value pairs. - name: enablePreviewDataSharing type: Boolean - description: Whether data sharing should be enabled in Preview products. - - name: enableDataSharing - type: Boolean - description: Whether data sharing should be enabled in GA products. + description: Whether preview data sharing should be enabled. diff --git a/mmv1/products/gemini/DataSharingWithGoogleSettingBinding.yaml b/mmv1/products/gemini/DataSharingWithGoogleSettingBinding.yaml index aef767df4267..8331e3b38226 100644 --- a/mmv1/products/gemini/DataSharingWithGoogleSettingBinding.yaml +++ b/mmv1/products/gemini/DataSharingWithGoogleSettingBinding.yaml @@ -75,9 +75,11 @@ parameters: required: true properties: - name: product - type: String + type: Enum description: |- - Product type of the setting binding. Values include GEMINI_IN_BIGQUERY, GEMINI_CLOUD_ASSIST, etc. See [product reference](https://cloud.google.com/gemini/docs/api/reference/rest/v1/projects.locations.dataSharingWithGoogleSettings.settingBindings) for a complete list. + Product type of the setting binding. + enum_values: + - 'GEMINI_CLOUD_ASSIST' default_from_api: true - name: name type: String diff --git a/mmv1/products/gemini/GeminiGcpEnablementSetting.yaml b/mmv1/products/gemini/GeminiGcpEnablementSetting.yaml index 0020c68ffd8f..5d1eec4e8c51 100644 --- a/mmv1/products/gemini/GeminiGcpEnablementSetting.yaml +++ b/mmv1/products/gemini/GeminiGcpEnablementSetting.yaml @@ -68,16 +68,3 @@ properties: - name: enableCustomerDataSharing type: Boolean description: Whether customer data sharing should be enabled. - - name: disableWebGrounding - type: Boolean - description: |- - Whether web grounding should be disabled. - deprecation_message: |- - `disable_web_grounding` is deprecated. Use `web_grounding_type` instead. - - name: webGroundingType - type: String - description: |- - Web grounding type. - Possible values: - GROUNDING_WITH_GOOGLE_SEARCH - WEB_GROUNDING_FOR_ENTERPRISE diff --git a/mmv1/products/gemini/GeminiGcpEnablementSettingBinding.yaml b/mmv1/products/gemini/GeminiGcpEnablementSettingBinding.yaml index 344a41c87e71..c756c36f740c 100644 --- a/mmv1/products/gemini/GeminiGcpEnablementSettingBinding.yaml +++ b/mmv1/products/gemini/GeminiGcpEnablementSettingBinding.yaml @@ -82,9 +82,11 @@ properties: description: Target of the binding. required: true - name: product - type: String + type: Enum description: |- - Product type of the setting binding. Values include GEMINI_IN_BIGQUERY, GEMINI_CLOUD_ASSIST, etc. See [product reference](https://cloud.google.com/gemini/docs/api/reference/rest/v1/projects.locations.dataSharingWithGoogleSettings.settingBindings) for a complete list. + Product type of the setting binding. + enum_values: + - 'GEMINI_IN_BIGQUERY' default_from_api: true - name: name type: String diff --git a/mmv1/products/gemini/LoggingSettingBinding.yaml b/mmv1/products/gemini/LoggingSettingBinding.yaml index 1793330954de..57b29f646e9c 100644 --- a/mmv1/products/gemini/LoggingSettingBinding.yaml +++ b/mmv1/products/gemini/LoggingSettingBinding.yaml @@ -91,7 +91,6 @@ properties: Product type of the setting binding. enum_values: - 'GEMINI_CODE_ASSIST' - default_from_api: true - name: name type: String description: |- diff --git a/mmv1/products/gemini/ReleaseChannelSettingBinding.yaml b/mmv1/products/gemini/ReleaseChannelSettingBinding.yaml index 93380e0e2cf2..5bef6995417d 100644 --- a/mmv1/products/gemini/ReleaseChannelSettingBinding.yaml +++ b/mmv1/products/gemini/ReleaseChannelSettingBinding.yaml @@ -105,4 +105,3 @@ properties: enum_values: - 'GEMINI_CLOUD_ASSIST' - 'GEMINI_CODE_ASSIST' - default_from_api: true diff --git a/mmv1/products/gemini/RepositoryGroup.yaml b/mmv1/products/gemini/RepositoryGroup.yaml index 883506b5e2b5..0fa7043ccfd1 100644 --- a/mmv1/products/gemini/RepositoryGroup.yaml +++ b/mmv1/products/gemini/RepositoryGroup.yaml @@ -28,12 +28,18 @@ mutex: 'projects/{{project}}/locations/{{location}}/codeRepositoryIndexes/{{code examples: - name: "gemini_repository_group_basic" primary_resource_id: "example" + primary_resource_name: 'acctest.BootstrapSharedCodeRepositoryIndex(t, "basic-rg-gen-example", "us-central1", "", map[string]string{"ccfe_debug_note":"terraform_e2e_do_not_delete"}), fmt.Sprintf("tf-test-gen-repository-group-%s", context["random_suffix"])' vars: repository_group_id: "example-repository-group" git_repository_link_id: 'example-git-repository-link-id' cri_id: "cri-example" repository_resource: "projects/example-project/locations/us-central1/connections/example-connection/gitRepositoryLinks/example-repo" connection_id: "example-connection-id" + test_vars_overrides: + git_repository_link_id: 'acctest.BootstrapGitRepository(t, "basic", "us-central1", "https://github.com/CC-R-github-robot/tf-test.git", acctest.BootstrapDeveloperConnection(t, "basic", "us-central1", "projects/502367051001/secrets/tf-test-cloudaicompanion-github-oauthtoken-c42e5c/versions/1", 54180648))' + cri_id: 'acctest.BootstrapSharedCodeRepositoryIndex(t, "basic-rg-gen-example", "us-central1", "", map[string]string{"ccfe_debug_note":"terraform_e2e_do_not_delete"})' + repository_resource: '"projects/"+envvar.GetTestProjectFromEnv()+"/locations/us-central1/connections/"+acctest.BootstrapDeveloperConnection(t, "basic", "us-central1", "projects/502367051001/secrets/tf-test-cloudaicompanion-github-oauthtoken-c42e5c/versions/1", 54180648)+"/gitRepositoryLinks/"+acctest.BootstrapGitRepository(t, "basic", "us-central1", "https://github.com/CC-R-github-robot/tf-test.git", acctest.BootstrapDeveloperConnection(t, "basic", "us-central1", "projects/502367051001/secrets/tf-test-cloudaicompanion-github-oauthtoken-c42e5c/versions/1", 54180648))' + connection_id: 'acctest.BootstrapDeveloperConnection(t, "basic", "us-central1", "projects/502367051001/secrets/tf-test-cloudaicompanion-github-oauthtoken-c42e5c/versions/1", 54180648)' exclude_test: true timeouts: insert_minutes: 30 diff --git a/mmv1/products/gkebackup/BackupChannel.yaml b/mmv1/products/gkebackup/BackupChannel.yaml index 22387f21ab0f..727ae15333c8 100644 --- a/mmv1/products/gkebackup/BackupChannel.yaml +++ b/mmv1/products/gkebackup/BackupChannel.yaml @@ -78,7 +78,7 @@ properties: description: | The project where Backups are allowed to be stored. The format is `projects/{project}`. - {project} can be project number or project id. + {project} can only be a project number. required: true immutable: true - name: 'description' diff --git a/mmv1/products/gkebackup/BackupPlan.yaml b/mmv1/products/gkebackup/BackupPlan.yaml index 6f5208e93e32..9910c9df2d31 100644 --- a/mmv1/products/gkebackup/BackupPlan.yaml +++ b/mmv1/products/gkebackup/BackupPlan.yaml @@ -318,7 +318,7 @@ properties: function: 'verify.ValidateDuration()' - name: 'singleOccurrenceDate' type: NestedObject - # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO (cmfeng): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) description: | No recurrence. The exclusion window occurs only once and on this date in UTC. Only one of singleOccurrenceDate, daily and daysOfWeek may be set. @@ -337,14 +337,14 @@ properties: Day of a month. - name: 'daily' type: Boolean - # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO (cmfeng): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) description: | The exclusion window occurs every day if set to "True". Specifying this field to "False" is an error. Only one of singleOccurrenceDate, daily and daysOfWeek may be set. - name: 'daysOfWeek' type: NestedObject - # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO (cmfeng): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) description: | The exclusion window occurs on these days of each week in UTC. Only one of singleOccurrenceDate, daily and daysOfWeek may be set. diff --git a/mmv1/products/gkebackup/RestoreChannel.yaml b/mmv1/products/gkebackup/RestoreChannel.yaml index 8cd932beb137..98809c2a0b31 100644 --- a/mmv1/products/gkebackup/RestoreChannel.yaml +++ b/mmv1/products/gkebackup/RestoreChannel.yaml @@ -78,7 +78,7 @@ properties: description: | The project where Backups will be restored. The format is `projects/{project}`. - {project} can be project number or project id. + {project} can only be a project number. required: true immutable: true - name: 'description' diff --git a/mmv1/products/gkehub/Membership.yaml b/mmv1/products/gkehub/Membership.yaml index f599106c5b9f..ef242bb3e385 100644 --- a/mmv1/products/gkehub/Membership.yaml +++ b/mmv1/products/gkehub/Membership.yaml @@ -122,6 +122,12 @@ properties: description: | The unique identifier of the membership. output: true + - name: 'description' + type: String + description: | + The name of this entity type to be displayed on the console. This field is unavailable in v1 of the API. + min_version: 'beta' + deprecation_message: '`description` is deprecated and will be removed in a future major release.' - name: 'labels' type: KeyValueLabels description: | diff --git a/mmv1/products/gkehub/product.yaml b/mmv1/products/gkehub/product.yaml index db104a135598..eb701f7c8923 100644 --- a/mmv1/products/gkehub/product.yaml +++ b/mmv1/products/gkehub/product.yaml @@ -17,7 +17,7 @@ legacy_name: 'gke_hub' display_name: 'GKEHub' versions: - name: 'beta' - base_url: 'https://gkehub.googleapis.com/v1beta/' + base_url: 'https://gkehub.googleapis.com/v1beta1/' - name: 'ga' base_url: 'https://gkehub.googleapis.com/v1/' scopes: diff --git a/mmv1/products/gkehub2/Feature.yaml b/mmv1/products/gkehub2/Feature.yaml index c2f1884a9f4e..8833d725b200 100644 --- a/mmv1/products/gkehub2/Feature.yaml +++ b/mmv1/products/gkehub2/Feature.yaml @@ -51,8 +51,6 @@ iam_policy: - 'projects/{{project}}/locations/{{location}}/features/{{name}}' - '{{name}}' custom_code: - pre_create: templates/terraform/pre_create/gkehub_existing_feature.go.tmpl - pre_delete: templates/terraform/pre_delete/gkehub_existing_feature.go.tmpl # Skip sweeper gen since this is a child resource. exclude_sweeper: true legacy_long_form_project: true @@ -105,10 +103,6 @@ examples: primary_resource_id: 'feature' primary_resource_name: 'fmt.Sprint("clusterupgrade")' exclude_test: true - - name: 'gkehub_feature_rbacrolebinding_actuation' - primary_resource_id: 'feature' - primary_resource_name: 'fmt.Sprint("rbacrolebindingactuation")' - exclude_test: true parameters: - name: 'location' type: String @@ -247,15 +241,6 @@ properties: description: | Amount of time to "soak" after a rollout has been finished before marking it COMPLETE. Cannot exceed 30 days. required: true - - name: 'rbacrolebindingactuation' - type: NestedObject - description: RBACRolebinding Actuation feature spec. - properties: - - name: 'allowedCustomRoles' - type: Array - description: 'The list of allowed custom roles (ClusterRoles). If a custom role is not part of this list, it cannot be used in a fleet scope RBACRoleBinding. If a custom role in this list is in use, it cannot be removed from the list until the scope RBACRolebindings using it are deleted.' - item_type: - type: String - name: 'fleetDefaultMemberConfig' type: NestedObject description: Optional. Fleet Default Membership Configuration. diff --git a/mmv1/products/gkehub2/ScopeRBACRoleBinding.yaml b/mmv1/products/gkehub2/ScopeRBACRoleBinding.yaml index 858bcd52cb8a..2b960e60a0fa 100644 --- a/mmv1/products/gkehub2/ScopeRBACRoleBinding.yaml +++ b/mmv1/products/gkehub2/ScopeRBACRoleBinding.yaml @@ -51,12 +51,6 @@ examples: primary_resource_name: 'fmt.Sprintf(\"tf-test-scope%s\", context[\"random_suffix\"]), fmt.Sprintf(\"tf-test-rbac-role-binding%s\", context[\"random_suffix\"])' test_env_vars: project: 'PROJECT_NAME' - - name: 'gkehub_scope_rbac_custom_role_binding_basic' - primary_resource_id: 'scope_rbac_role_binding' - primary_resource_name: 'fmt.Sprintf(\"tf-test-scope%s\", context[\"random_suffix\"]), fmt.Sprintf(\"tf-test-custom-rbac-role-binding%s\", context[\"random_suffix\"])' - test_env_vars: - project: 'PROJECT_NAME' - exclude_test: true parameters: - name: 'scope_id' type: String @@ -149,16 +143,6 @@ properties: - 'ADMIN' - 'EDIT' - 'VIEW' - exactly_one_of: - - 'role.0.predefined_role' - - 'role.0.custom_role' - - name: 'customRole' - type: String - description: | - CustomRole is the custom Kubernetes ClusterRole to be used. The custom role format must be allowlisted in the rbacrolebindingactuation feature and RFC 1123 compliant. - exactly_one_of: - - 'role.0.predefined_role' - - 'role.0.custom_role' - name: 'labels' type: KeyValueLabels description: | diff --git a/mmv1/products/gkeonprem/BareMetalAdminCluster.yaml b/mmv1/products/gkeonprem/BareMetalAdminCluster.yaml index a4e19e2f0564..517a96b4b48c 100644 --- a/mmv1/products/gkeonprem/BareMetalAdminCluster.yaml +++ b/mmv1/products/gkeonprem/BareMetalAdminCluster.yaml @@ -14,8 +14,6 @@ --- name: 'BareMetalAdminCluster' description: "A Google Bare Metal Admin Cluster." -references: - api: 'https://cloud.google.com/kubernetes-engine/distributed-cloud/reference/on-prem-api/rest/v1/projects.locations.bareMetalAdminClusters' docs: id_format: 'projects/{{project}}/locations/{{location}}/bareMetalAdminClusters/{{name}}' base_url: 'projects/{{project}}/locations/{{location}}/bareMetalAdminClusters' diff --git a/mmv1/products/gkeonprem/BareMetalCluster.yaml b/mmv1/products/gkeonprem/BareMetalCluster.yaml index 7bc573baf0d5..ba188bc3a769 100644 --- a/mmv1/products/gkeonprem/BareMetalCluster.yaml +++ b/mmv1/products/gkeonprem/BareMetalCluster.yaml @@ -14,8 +14,6 @@ --- name: 'BareMetalCluster' description: "A Google Bare Metal User Cluster." -references: - api: 'https://cloud.google.com/kubernetes-engine/distributed-cloud/reference/on-prem-api/rest/v1/projects.locations.bareMetalClusters' docs: id_format: 'projects/{{project}}/locations/{{location}}/bareMetalClusters/{{name}}' base_url: 'projects/{{project}}/locations/{{location}}/bareMetalClusters' @@ -499,7 +497,7 @@ properties: If true, avoid using IPs ending in .0 or .255. This avoids buggy consumer devices mistakenly dropping IPv4 traffic for those special IP addresses. - name: 'manualAssign' - type: Boolean + type: String description: | If true, prevent IP addresses from being automatically assigned. - name: 'loadBalancerNodePoolConfig' diff --git a/mmv1/products/gkeonprem/BareMetalNodePool.yaml b/mmv1/products/gkeonprem/BareMetalNodePool.yaml index 5e5cbe38ef4a..1b0f9d92d8a4 100644 --- a/mmv1/products/gkeonprem/BareMetalNodePool.yaml +++ b/mmv1/products/gkeonprem/BareMetalNodePool.yaml @@ -14,8 +14,6 @@ --- name: 'BareMetalNodePool' description: 'A Google Bare Metal Node Pool.' -references: - api: 'https://cloud.google.com/kubernetes-engine/distributed-cloud/reference/on-prem-api/rest/v1/projects.locations.bareMetalClusters.bareMetalNodePools' docs: id_format: 'projects/{{project}}/locations/{{location}}/bareMetalClusters/{{bare_metal_cluster}}/bareMetalNodePools/{{name}}' base_url: 'projects/{{project}}/locations/{{location}}/bareMetalClusters/{{bare_metal_cluster}}/bareMetalNodePools' diff --git a/mmv1/products/gkeonprem/VmwareAdminCluster.yaml b/mmv1/products/gkeonprem/VmwareAdminCluster.yaml index 9cc09f9d9a29..74f6a344bea3 100644 --- a/mmv1/products/gkeonprem/VmwareAdminCluster.yaml +++ b/mmv1/products/gkeonprem/VmwareAdminCluster.yaml @@ -13,15 +13,14 @@ --- name: "VmwareAdminCluster" -description: "A Google VMware Admin Cluster." -references: - api: 'https://cloud.google.com/kubernetes-engine/distributed-cloud/reference/on-prem-api/rest/v1/projects.locations.vmwareAdminClusters' +min_version: beta base_url: "projects/{{project}}/locations/{{location}}/vmwareAdminClusters" create_url: "projects/{{project}}/locations/{{location}}/vmwareAdminClusters?vmware_admin_cluster_id={{name}}" update_url: "projects/{{project}}/locations/{{location}}/vmwareAdminClusters/{{name}}" self_link: "projects/{{project}}/locations/{{location}}/vmwareAdminClusters/{{name}}" update_verb: "PATCH" update_mask: true +description: "A Google VMware Admin Cluster." exclude_delete: true id_format: "projects/{{project}}/locations/{{location}}/vmwareAdminClusters/{{name}}" import_format: ["projects/{{project}}/locations/{{location}}/vmwareAdminClusters/{{name}}"] @@ -41,18 +40,21 @@ taint_resource_on_failed_create: true examples: - name: "gkeonprem_vmware_admin_cluster_basic" primary_resource_id: "admin-cluster-basic" + min_version: beta vars: name: "basic" test_env_vars: project: "fake-backend-360322" - name: 'gkeonprem_vmware_admin_cluster_full' primary_resource_id: 'admin-cluster-full' + min_version: beta vars: name: 'full' test_env_vars: project: 'fake-backend-360322' - name: 'gkeonprem_vmware_admin_cluster_metallb' primary_resource_id: 'admin-cluster-metallb' + min_version: beta vars: name: 'metallb' test_env_vars: @@ -688,14 +690,4 @@ properties: - type: Boolean name: enableAdvancedCluster description: If set, the advanced cluster feature is enabled. - default_from_api: true - - type: NestedObject - name: privateRegistryConfig - description: Configuration for private registry. - properties: - - type: String - name: 'address' - description: The registry address. - - type: String - name: 'caCert' - description: The CA certificate public key for private registry. + output: true diff --git a/mmv1/products/gkeonprem/VmwareCluster.yaml b/mmv1/products/gkeonprem/VmwareCluster.yaml index 2be6a7d95568..65117f190f89 100644 --- a/mmv1/products/gkeonprem/VmwareCluster.yaml +++ b/mmv1/products/gkeonprem/VmwareCluster.yaml @@ -14,8 +14,6 @@ --- name: 'VmwareCluster' description: 'A Google VMware User Cluster.' -references: - api: 'https://cloud.google.com/kubernetes-engine/distributed-cloud/reference/on-prem-api/rest/v1/projects.locations.vmwareClusters' docs: id_format: 'projects/{{project}}/locations/{{location}}/vmwareClusters/{{name}}' base_url: 'projects/{{project}}/locations/{{location}}/vmwareClusters' diff --git a/mmv1/products/gkeonprem/VmwareNodePool.yaml b/mmv1/products/gkeonprem/VmwareNodePool.yaml index 5936de1cca32..6207952441b8 100644 --- a/mmv1/products/gkeonprem/VmwareNodePool.yaml +++ b/mmv1/products/gkeonprem/VmwareNodePool.yaml @@ -14,8 +14,6 @@ --- name: 'VmwareNodePool' description: "A Google Vmware Node Pool." -references: - api: 'https://cloud.google.com/kubernetes-engine/distributed-cloud/reference/on-prem-api/rest/v1/projects.locations.vmwareClusters.vmwareNodePools' docs: id_format: 'projects/{{project}}/locations/{{location}}/vmwareClusters/{{vmware_cluster}}/vmwareNodePools/{{name}}' base_url: 'projects/{{project}}/locations/{{location}}/vmwareClusters/{{vmware_cluster}}/vmwareNodePools' @@ -127,7 +125,6 @@ properties: - name: 'image' type: String description: The OS image name in vCenter, only valid when using Windows. - default_from_api: true - name: 'bootDiskSizeGb' type: Integer description: VMware disk size to be used during creation. diff --git a/mmv1/products/iambeta/WorkloadIdentityPool.yaml b/mmv1/products/iambeta/WorkloadIdentityPool.yaml index 829f398fac0b..4ab9768f124d 100644 --- a/mmv1/products/iambeta/WorkloadIdentityPool.yaml +++ b/mmv1/products/iambeta/WorkloadIdentityPool.yaml @@ -19,8 +19,6 @@ description: | references: guides: 'Managing workload identity pools': 'https://cloud.google.com/iam/docs/manage-workload-identity-pools-providers#pools' - 'Configure managed workload identity authentication for Compute Engine': 'https://cloud.google.com/iam/docs/create-managed-workload-identities' - 'Configure managed workload identity authentication for GKE': 'https://cloud.google.com/iam/docs/create-managed-workload-identities-gke' api: 'https://cloud.google.com/iam/docs/reference/rest/v1/projects.locations.workloadIdentityPools' docs: base_url: 'projects/{{project}}/locations/global/workloadIdentityPools' @@ -42,16 +40,6 @@ async: base_url: '{{op_id}}' result: resource_inside_response: false -iam_policy: - parent_resource_attribute: 'workload_identity_pool_id' - method_name_separator: ':' - fetch_iam_policy_verb: 'POST' - import_format: - - 'projects/{{project}}/locations/global/workloadIdentityPools/{{workload_identity_pool_id}}' - - '{{workload_identity_pool_id}}' - allowed_iam_role: 'roles/iam.workloadIdentityPoolViewer' - iam_conditions_request_type: 'REQUEST_BODY' - min_version: beta custom_code: constants: 'templates/terraform/constants/iam_workload_identity_pool.go.tmpl' decoder: 'templates/terraform/decoders/treat_deleted_state_as_gone.go.tmpl' @@ -59,26 +47,12 @@ custom_code: examples: - name: 'iam_workload_identity_pool_basic' primary_resource_id: 'example' - primary_resource_name: - 'fmt.Sprintf("tf-test-example-pool%s", context["random_suffix"])' vars: workload_identity_pool_id: 'example-pool' - - name: 'iam_workload_identity_pool_full_federation_only_mode' + - name: 'iam_workload_identity_pool_full' primary_resource_id: 'example' - primary_resource_name: - 'fmt.Sprintf("tf-test-example-pool%s", context["random_suffix"])' vars: workload_identity_pool_id: 'example-pool' - min_version: beta - external_providers: - - 'random' - - name: 'iam_workload_identity_pool_full_trust_domain_mode' - primary_resource_id: 'example' - primary_resource_name: - 'fmt.Sprintf("tf-test-example-pool%s", context["random_suffix"])' - vars: - workload_identity_pool_id: 'example-pool' - min_version: beta parameters: properties: - name: 'workloadIdentityPoolId' @@ -96,11 +70,11 @@ properties: type: Enum description: | The state of the pool. - * `STATE_UNSPECIFIED`: State unspecified. - * `ACTIVE`: The pool is active, and may be used in Google Cloud policies. - * `DELETED`: The pool is soft-deleted. Soft-deleted pools are permanently deleted after + * STATE_UNSPECIFIED: State unspecified. + * ACTIVE: The pool is active, and may be used in Google Cloud policies. + * DELETED: The pool is soft-deleted. Soft-deleted pools are permanently deleted after approximately 30 days. You can restore a soft-deleted pool using - `UndeleteWorkloadIdentityPool`. You cannot reuse the ID of a soft-deleted pool until it is + UndeleteWorkloadIdentityPool. You cannot reuse the ID of a soft-deleted pool until it is permanently deleted. While a pool is deleted, you cannot use it to exchange tokens, or use existing tokens to access resources. If the pool is undeleted, existing tokens grant access again. @@ -127,133 +101,3 @@ properties: Whether the pool is disabled. You cannot use a disabled pool to exchange tokens, or use existing tokens to access resources. If the pool is re-enabled, existing tokens grant access again. - - name: mode - type: Enum - description: | - The mode for the pool is operating in. Pools with an unspecified mode will operate as if they - are in `FEDERATION_ONLY` mode. - - - ~> **Note** This field cannot be changed after the Workload Identity Pool is created. While - `terraform plan` may show an update if you change this field's value, `terraform apply` - **will fail with an API error** (such as `Error 400: Attempted to update an immutable field.`). - To specify a different `mode`, please create a new Workload Identity Pool resource. - - * `FEDERATION_ONLY`: Pools can only be used for federating external workload identities into - Google Cloud. Unless otherwise noted, no structure or format constraints are applied to - workload identities in a `FEDERATION_ONLY` mode pool, and you may not create any resources - within the pool besides providers. - * `TRUST_DOMAIN`: Pools can be used to assign identities to Google Cloud workloads. All - identities within a `TRUST_DOMAIN` mode pool must consist of a single namespace and individual - workload identifier. The subject identifier for all identities must conform to the following - format: `ns//sa/`. - `google_iam_workload_identity_pool_provider`s cannot be created within `TRUST_DOMAIN` - mode pools. - min_version: beta - enum_values: - - 'FEDERATION_ONLY' - - 'TRUST_DOMAIN' - - name: 'inlineCertificateIssuanceConfig' - type: NestedObject - description: | - Represents configuration for generating mutual TLS (mTLS) certificates for the identities - within this pool. Defines the Certificate Authority (CA) pool resources and configurations - required for issuance and rotation of mTLS workload certificates. - min_version: beta - properties: - - name: 'caPools' - type: KeyValuePairs - description: | - A required mapping of a cloud region to the CA pool resource located in that region used - for certificate issuance, adhering to these constraints: - - * **Key format:** A supported cloud region name equivalent to the location identifier in - the corresponding map entry's value. - * **Value format:** A valid CA pool resource path format like: - `projects/{project}/locations/{location}/caPools/{ca_pool}` - * **Region Matching:** Workloads are ONLY issued certificates from CA pools within the - same region. Also the CA pool region (in value) must match the workload's region (key). - required: true - - name: 'lifetime' - type: String - description: | - Lifetime of the workload certificates issued by the CA pool in seconds. Must be between - `86400s` (24 hours) to `2592000s` (30 days), ends in the suffix "`s`" (indicating seconds) - and is preceded by the number of seconds. If unspecified, this will be defaulted to - `86400s` (24 hours). - default_from_api: true - - name: 'rotationWindowPercentage' - type: Integer - description: | - Rotation window percentage indicating when certificate rotation should be initiated based - on remaining lifetime. Must be between `50` - `80`. If unspecified, this will be defaulted - to `50`. - default_from_api: true - - name: 'keyAlgorithm' - type: Enum - description: | - Key algorithm to use when generating the key pair. This key pair will be used to create - the certificate. If unspecified, this will default to `ECDSA_P256`. - - * `RSA_2048`: Specifies RSA with a 2048-bit modulus. - * `RSA_3072`: Specifies RSA with a 3072-bit modulus. - * `RSA_4096`: Specifies RSA with a 4096-bit modulus. - * `ECDSA_P256`: Specifies ECDSA with curve P256. - * `ECDSA_P384`: Specifies ECDSA with curve P384. - default_from_api: true - enum_values: - - 'RSA_2048' - - 'RSA_3072' - - 'RSA_4096' - - 'ECDSA_P256' - - 'ECDSA_P384' - - name: 'inlineTrustConfig' - type: NestedObject - description: | - Represents config to add additional trusted trust domains. Defines configuration for extending - trust to additional trust domains. By establishing trust with another domain, the current - domain will recognize and accept certificates issued by entities within the trusted domains. - Note that a trust domain automatically trusts itself, eliminating the need for explicit - configuration. - min_version: beta - properties: - - name: 'additionalTrustBundles' - type: Map - description: | - Maps specific trust domains (e.g., "example.com") to their corresponding `TrustStore` - objects, which contain the trusted root certificates for that domain. There can be a - maximum of `10` trust domain entries in this map. - - Note that a trust domain automatically trusts itself and don't need to be specified here. - If however, this `WorkloadIdentityPool`'s trust domain contains any trust anchors in the - `additional_trust_bundles` map, those trust anchors will be *appended to* the Trust Bundle - automatically derived from your `InlineCertificateIssuanceConfig`'s `ca_pools`. - key_name: trust_domain - key_description: | - The trusted trust domains (e.g., "example.com") to be extended trust to additional trust - domains to. - value_type: - name: trustStore - type: NestedObject - description: | - Trust store that contains trust anchors and optional intermediate CAs used in PKI to - build trust chain and verify client's identity. - properties: - - name: 'trustAnchors' - type: Array - description: | - List of Trust Anchors to be used while performing validation against a given - `TrustStore`. The incoming end entity's certificate must be chained up to one of the - trust anchors here. - required: true - item_type: - type: NestedObject - description: | - Represents a root of trust. - properties: - - name: 'pemCertificate' - type: String - description: | - PEM certificate of the PKI used for validation. Must only contain one ca - certificate(either root or intermediate cert). - required: true diff --git a/mmv1/products/iambeta/WorkloadIdentityPoolManagedIdentity.yaml b/mmv1/products/iambeta/WorkloadIdentityPoolManagedIdentity.yaml deleted file mode 100644 index 09b36fded96c..000000000000 --- a/mmv1/products/iambeta/WorkloadIdentityPoolManagedIdentity.yaml +++ /dev/null @@ -1,140 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'WorkloadIdentityPoolManagedIdentity' -description: | - Represents a managed identity for a workload identity pool namespace. -references: - guides: - 'Configure managed workload identity authentication for Compute Engine': 'https://cloud.google.com/iam/docs/create-managed-workload-identities' - 'Configure managed workload identity authentication for GKE': 'https://cloud.google.com/iam/docs/create-managed-workload-identities-gke' - api: 'https://cloud.google.com/iam/docs/reference/rest/v1/projects.locations.workloadIdentityPools.namespaces.managedIdentities' -min_version: beta -base_url: 'projects/{{project}}/locations/global/workloadIdentityPools/{{workload_identity_pool_id}}/namespaces/{{workload_identity_pool_namespace_id}}/managedIdentities' -self_link: 'projects/{{project}}/locations/global/workloadIdentityPools/{{workload_identity_pool_id}}/namespaces/{{workload_identity_pool_namespace_id}}/managedIdentities/{{workload_identity_pool_managed_identity_id}}' -create_url: 'projects/{{project}}/locations/global/workloadIdentityPools/{{workload_identity_pool_id}}/namespaces/{{workload_identity_pool_namespace_id}}/managedIdentities?workloadIdentityPoolManagedIdentityId={{workload_identity_pool_managed_identity_id}}' -update_verb: 'PATCH' -update_mask: true -import_format: - - 'projects/{{project}}/locations/global/workloadIdentityPools/{{workload_identity_pool_id}}/namespaces/{{workload_identity_pool_namespace_id}}/managedIdentities/{{workload_identity_pool_managed_identity_id}}' -autogen_async: true -custom_code: - constants: 'templates/terraform/constants/iam_workload_identity_pool_managed_identity.go.tmpl' - decoder: 'templates/terraform/decoders/treat_deleted_state_as_gone.go.tmpl' - post_create: 'templates/terraform/post_create/iam_workload_identity_pool_managed_identity.go.tmpl' - post_read: 'templates/terraform/post_read/iam_workload_identity_pool_managed_identity.go.tmpl' - pre_create: 'templates/terraform/pre_create/iam_workload_identity_pool_managed_identity.go.tmpl' - test_check_destroy: 'templates/terraform/custom_check_destroy/iam_workload_identity_pool_managed_identity.go.tmpl' -examples: - - name: 'iam_workload_identity_pool_managed_identity_basic' - primary_resource_id: 'example' - vars: - workload_identity_pool_id: 'example-pool' - workload_identity_pool_namespace_id: 'example-namespace' - workload_identity_pool_managed_identity_id: 'example-managed-identity' - - name: 'iam_workload_identity_pool_managed_identity_full' - primary_resource_id: 'example' - vars: - workload_identity_pool_id: 'example-pool' - workload_identity_pool_namespace_id: 'example-namespace' - workload_identity_pool_managed_identity_id: 'example-managed-identity' - test_env_vars: - project: 'PROJECT_NUMBER' -parameters: - - name: 'workload_identity_pool_id' - type: String - required: true - immutable: true - url_param_only: true - description: | - The ID to use for the pool, which becomes the final component of the resource name. This - value should be 4-32 characters, and may contain the characters [a-z0-9-]. The prefix - `gcp-` is reserved for use by Google, and may not be specified. - - name: 'workload_identity_pool_namespace_id' - type: String - required: true - immutable: true - url_param_only: true - description: | - The ID to use for the namespace. This value must: - * contain at most 63 characters - * contain only lowercase alphanumeric characters or `-` - * start with an alphanumeric character - * end with an alphanumeric character - - - The prefix `gcp-` will be reserved for future uses. - - name: 'workload_identity_pool_managed_identity_id' - type: String - required: true - immutable: true - url_param_only: true - description: | - The ID to use for the managed identity. This value must: - * contain at most 63 characters - * contain only lowercase alphanumeric characters or `-` - * start with an alphanumeric character - * end with an alphanumeric character - - - The prefix `gcp-` will be reserved for future uses. - validation: - function: 'ValidateWorkloadIdentityPoolManagedIdentityId' -properties: - - name: 'name' - type: String - description: | - The resource name of the managed identity as - `projects/{project_number}/locations/global/workloadIdentityPools/{workload_identity_pool_id}/namespaces/{workload_identity_pool_namespace_id}/managedIdentities/{workload_identity_pool_managed_identity_id}`. - output: true - - name: 'description' - type: String - description: | - A description of the managed identity. Cannot exceed 256 characters. - - name: 'state' - type: Enum - description: | - The current state of the managed identity. - * `ACTIVE`: The managed identity is active. - * `DELETED`: The managed identity is soft-deleted. Soft-deleted managed identities are - permanently deleted after approximately 30 days. You can restore a soft-deleted managed - identity using UndeleteWorkloadIdentityPoolManagedIdentity. You cannot reuse the ID of a - soft-deleted managed identity until it is permanently deleted. - output: true - enum_values: - - 'ACTIVE' - - 'DELETED' - - name: 'disabled' - type: Boolean - description: | - Whether the managed identity is disabled. If disabled, credentials may no longer be issued for - the identity, however existing credentials will still be accepted until they expire. - - name: 'attestationRules' - type: Array - description: | - Defines which workloads can receive an identity within a pool. When an AttestationRule is - defined under a managed identity, matching workloads may receive that identity. A maximum of - 50 AttestationRules can be set. - update_url: 'projects/{{project}}/locations/global/workloadIdentityPools/{{workload_identity_pool_id}}/namespaces/{{workload_identity_pool_namespace_id}}/managedIdentities/{{workload_identity_pool_managed_identity_id}}:setAttestationRules' - update_verb: 'POST' - is_set: true - item_type: - type: NestedObject - properties: - - name: 'googleCloudResource' - type: String - description: | - A single workload operating on Google Cloud. For example: - `//compute.googleapis.com/projects/123/uid/zones/us-central1-a/instances/12345678`. - required: true diff --git a/mmv1/products/iambeta/WorkloadIdentityPoolNamespace.yaml b/mmv1/products/iambeta/WorkloadIdentityPoolNamespace.yaml deleted file mode 100644 index 45bf3ac09eb2..000000000000 --- a/mmv1/products/iambeta/WorkloadIdentityPoolNamespace.yaml +++ /dev/null @@ -1,116 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'WorkloadIdentityPoolNamespace' -description: | - Represents a namespace for a workload identity pool. Namespaces are used to segment identities - within the pool. -references: - guides: - 'Configure managed workload identity authentication for Compute Engine': 'https://cloud.google.com/iam/docs/create-managed-workload-identities' - 'Configure managed workload identity authentication for GKE': 'https://cloud.google.com/iam/docs/create-managed-workload-identities-gke' - api: 'https://cloud.google.com/iam/docs/reference/rest/v1/projects.locations.workloadIdentityPools.namespaces' -min_version: beta -base_url: 'projects/{{project}}/locations/global/workloadIdentityPools/{{workload_identity_pool_id}}/namespaces' -self_link: 'projects/{{project}}/locations/global/workloadIdentityPools/{{workload_identity_pool_id}}/namespaces/{{workload_identity_pool_namespace_id}}' -create_url: 'projects/{{project}}/locations/global/workloadIdentityPools/{{workload_identity_pool_id}}/namespaces?workloadIdentityPoolNamespaceId={{workload_identity_pool_namespace_id}}' -update_verb: 'PATCH' -update_mask: true -import_format: - - 'projects/{{project}}/locations/global/workloadIdentityPools/{{workload_identity_pool_id}}/namespaces/{{workload_identity_pool_namespace_id}}' -autogen_async: true -custom_code: - constants: 'templates/terraform/constants/iam_workload_identity_pool_namespace.go.tmpl' - decoder: 'templates/terraform/decoders/treat_deleted_state_as_gone.go.tmpl' - test_check_destroy: 'templates/terraform/custom_check_destroy/iam_workload_identity_pool_namespace.go.tmpl' -examples: - - name: 'iam_workload_identity_pool_namespace_basic' - primary_resource_id: 'example' - vars: - workload_identity_pool_id: 'example-pool' - workload_identity_pool_namespace_id: 'example-namespace' - - name: 'iam_workload_identity_pool_namespace_full' - primary_resource_id: 'example' - vars: - workload_identity_pool_id: 'example-pool' - workload_identity_pool_namespace_id: 'example-namespace' -parameters: - - name: 'workload_identity_pool_id' - type: String - required: true - immutable: true - url_param_only: true - description: | - The ID to use for the pool, which becomes the final component of the resource name. This - value should be 4-32 characters, and may contain the characters [a-z0-9-]. The prefix - `gcp-` is reserved for use by Google, and may not be specified. - - name: 'workload_identity_pool_namespace_id' - type: String - required: true - immutable: true - url_param_only: true - description: | - The ID to use for the namespace. This value must: - * contain at most 63 characters - * contain only lowercase alphanumeric characters or `-` - * start with an alphanumeric character - * end with an alphanumeric character - - - The prefix `gcp-` will be reserved for future uses. - validation: - function: 'ValidateWorkloadIdentityPoolNamespaceId' -properties: - - name: 'name' - type: String - description: | - The resource name of the namespace as - `projects/{project_number}/locations/global/workloadIdentityPools/{workload_identity_pool_id}/namespaces/{workload_identity_pool_namespace_id}`. - output: true - - name: 'description' - type: String - description: | - A description of the namespace. Cannot exceed 256 characters. - - name: 'state' - type: Enum - description: | - The current state of the namespace. - * `ACTIVE`: The namespace is active. - * `DELETED`: The namespace is soft-deleted. Soft-deleted namespaces are permanently deleted - after approximately 30 days. You can restore a soft-deleted namespace using - UndeleteWorkloadIdentityPoolNamespace. You cannot reuse the ID of a soft-deleted namespace - until it is permanently deleted. - output: true - enum_values: - - 'ACTIVE' - - 'DELETED' - - name: 'disabled' - type: Boolean - description: | - Whether the namespace is disabled. If disabled, credentials may no longer be issued for - identities within this namespace, however existing credentials will still be accepted until - they expire. - - name: 'ownerService' - type: NestedObject - description: | - Defines the owner that is allowed to mutate this resource. If present, this resource can only - be mutated by the owner. - output: true - properties: - - name: 'principalSubject' - type: String - description: | - The service agent principal subject, e.g. - `serviceAccount:service-1234@gcp-sa-gkehub.iam.gserviceaccount.com`. - output: true diff --git a/mmv1/products/iambeta/WorkloadIdentityPoolProvider.yaml b/mmv1/products/iambeta/WorkloadIdentityPoolProvider.yaml index c3ce3eb2acb0..177fbf91c251 100644 --- a/mmv1/products/iambeta/WorkloadIdentityPoolProvider.yaml +++ b/mmv1/products/iambeta/WorkloadIdentityPoolProvider.yaml @@ -310,7 +310,6 @@ properties: } ``` required: false - state_func: 'func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }' - name: 'saml' type: NestedObject description: diff --git a/mmv1/products/iamworkforcepool/OauthClientCredential.yaml b/mmv1/products/iamworkforcepool/OauthClientCredential.yaml index bacf221fadcd..b6eb2abd111e 100644 --- a/mmv1/products/iamworkforcepool/OauthClientCredential.yaml +++ b/mmv1/products/iamworkforcepool/OauthClientCredential.yaml @@ -85,7 +85,6 @@ properties: more, see [OAuth client and credential security risks and mitigations](https://cloud.google.com/iam/docs/workforce-oauth-app#security) output: true - sensitive: true - name: displayName type: String description: |- diff --git a/mmv1/products/iamworkforcepool/WorkforcePool.yaml b/mmv1/products/iamworkforcepool/WorkforcePool.yaml index af11bfd15a44..66675f58baff 100644 --- a/mmv1/products/iamworkforcepool/WorkforcePool.yaml +++ b/mmv1/products/iamworkforcepool/WorkforcePool.yaml @@ -50,32 +50,19 @@ custom_code: exclude_sweeper: true examples: - name: 'iam_workforce_pool_basic' - primary_resource_name: 'fmt.Sprintf("tf-test-example-pool%s", context["random_suffix"])' primary_resource_id: 'example' - region_override: 'global' vars: workforce_pool_id: 'example-pool' test_env_vars: org_id: 'ORG_ID' - name: 'iam_workforce_pool_full' - primary_resource_name: 'fmt.Sprintf("tf-test-example-pool%s", context["random_suffix"])' primary_resource_id: 'example' - region_override: 'global' vars: workforce_pool_id: 'example-pool' test_env_vars: org_id: 'ORG_ID' -iam_policy: - parent_resource_attribute: 'workforce_pool_id' - method_name_separator: ':' - example_config_body: 'templates/terraform/iam/iam_attributes.go.tmpl' - allowed_iam_role: 'roles/iam.workforcePoolViewer' - admin_iam_role: 'roles/iam.workforcePoolAdmin' - fetch_iam_policy_verb: 'POST' - import_format: - - 'locations/{{location}}/workforcePools/{{workforce_pool_id}}' - - '{{workforce_pool_id}}' parameters: +properties: - name: 'location' type: String description: The location for the resource. @@ -93,7 +80,6 @@ parameters: immutable: true validation: function: 'ValidateWorkforcePoolId' -properties: - name: 'name' type: String description: | diff --git a/mmv1/products/iamworkforcepool/WorkforcePoolProviderKey.yaml b/mmv1/products/iamworkforcepool/WorkforcePoolProviderKey.yaml deleted file mode 100644 index 1082c3fca870..000000000000 --- a/mmv1/products/iamworkforcepool/WorkforcePoolProviderKey.yaml +++ /dev/null @@ -1,158 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'WorkforcePoolProviderKey' -description: | - Represents a public key configuration for a Workforce Pool Provider. The key can be configured in your identity provider to encrypt SAML assertions. - Google holds the corresponding private key, which it uses to decrypt encrypted tokens. -references: - guides: - 'Workforce Identity Federation Overview': 'https://cloud.google.com/iam/docs/workforce-identity-federation' - 'Configure a provider within the workforce pool': 'https://cloud.google.com/iam/docs/manage-workforce-identity-pools-providers#configure_a_provider_within_the_workforce_pool' - api: 'https://cloud.google.com/iam/docs/reference/rest/v1/locations.workforcePools.providers.keys' -base_url: 'locations/{{location}}/workforcePools/{{workforce_pool_id}}/providers/{{provider_id}}/keys' -self_link: 'locations/{{location}}/workforcePools/{{workforce_pool_id}}/providers/{{provider_id}}/keys/{{key_id}}' -create_url: 'locations/{{location}}/workforcePools/{{workforce_pool_id}}/providers/{{provider_id}}/keys?workforcePoolProviderKeyId={{key_id}}' -immutable: true -import_format: - - 'locations/{{location}}/workforcePools/{{workforce_pool_id}}/providers/{{provider_id}}/keys/{{key_id}}' -timeouts: - insert_minutes: 20 - delete_minutes: 20 -autogen_async: true -async: - actions: ['create', 'delete'] - type: 'OpAsync' - operation: - base_url: '{{op_id}}' - result: - resource_inside_response: false -custom_code: - constants: 'templates/terraform/constants/iam_workforce_pool_provider_key.go.tmpl' -examples: - - name: 'iam_workforce_pool_provider_saml_key_basic' - primary_resource_id: "example" - vars: - workforce_pool_id: 'example-pool' - provider_id: 'example-prvdr' - key_id: 'example-key' - test_env_vars: - org_id: 'ORG_ID' -parameters: - - name: 'location' - type: String - description: | - The location for the resource. - url_param_only: true - required: true - immutable: true - - name: 'workforcePoolId' - type: String - description: | - The ID of the workforce pool. - url_param_only: true - required: true - immutable: true - - name: 'providerId' - type: String - description: | - The ID of the provider. - url_param_only: true - required: true - immutable: true - - name: 'keyId' - type: String - description: | - The ID to use for the key, which becomes the final component of the resource name. This value must be 4-32 characters, and may contain the characters [a-z0-9-]. - url_param_only: true - required: true - immutable: true - validation: - function: 'ValidateWorkforcePoolProviderKeyId' -properties: - - name: 'name' - type: String - description: | - Identifier. The resource name of the key. - Format: `locations/{location}/workforcePools/{workforcePoolId}/providers/{providerId}/keys/{keyId}` - output: true - - name: 'keyData' - type: 'NestedObject' - description: | - Immutable. Public half of the asymmetric key. - required: true - properties: - - name: 'format' - type: Enum - description: | - The format of the key. - output: true - enum_values: - - 'RSA_X509_PEM' - - name: 'notBeforeTime' - type: String - description: | - Earliest timestamp when this key is valid. Attempts to use this key before this time will fail. - Only present if the key data represents a X.509 certificate. - - Uses RFC 3339, where generated output will always be Z-normalized and uses 0, 3, 6 or 9 fractional digits. - Offsets other than "Z" are also accepted. - Examples: "2014-10-02T15:01:23Z", "2014-10-02T15:01:23.045123456Z" or "2014-10-02T15:01:23+05:30". - output: true - - name: 'notAfterTime' - type: String - description: | - Latest timestamp when this key is valid. Attempts to use this key after this time will fail. - Only present if the key data represents a X.509 certificate. - - Uses RFC 3339, where generated output will always be Z-normalized and uses 0, 3, 6 or 9 fractional digits. - Offsets other than "Z" are also accepted. - Examples: "2014-10-02T15:01:23Z", "2014-10-02T15:01:23.045123456Z" or "2014-10-02T15:01:23+05:30". - output: true - - name: 'key' - type: String - description: | - The key data. The format of the key is represented by the format field. - output: true - - name: 'keySpec' - type: Enum - description: | - The specifications for the key. - required: true - enum_values: - - 'RSA_2048' - - 'RSA_3072' - - 'RSA_4096' - - name: 'state' - type: Enum - description: | - The state of the key. - output: true - enum_values: - - 'STATE_UNSPECIFIED' - - 'ACTIVE' - - 'DELETED' - - name: 'use' - type: Enum - description: | - The purpose of the key. - required: true - enum_values: - - 'ENCRYPTION' - - name: 'expireTime' - type: String - description: | - The time after which the key will be permanently deleted and cannot be recovered. - Note that the key may get purged before this time if the total limit of keys per provider is exceeded. - output: true diff --git a/mmv1/products/iap/Brand.yaml b/mmv1/products/iap/Brand.yaml index 6f070c3b4b27..15fc48816b79 100644 --- a/mmv1/products/iap/Brand.yaml +++ b/mmv1/products/iap/Brand.yaml @@ -13,8 +13,6 @@ --- name: 'Brand' -deprecation_message: >- - after July 2025, the `google_iap_brand` Terraform resource will no longer function as intended due to the deprecation of the IAP OAuth Admin API description: | OAuth brand data. Only "Organization Internal" brands can be created programmatically via API. To convert it into an external brands @@ -52,6 +50,7 @@ async: identity: - name custom_code: + post_create: 'templates/terraform/post_create/set_computed_name.tmpl' custom_import: 'templates/terraform/custom_import/iap_brand.go.tmpl' examples: - name: 'iap_brand' diff --git a/mmv1/products/iap/Client.yaml b/mmv1/products/iap/Client.yaml index ff5a04384e51..3aae432eee14 100644 --- a/mmv1/products/iap/Client.yaml +++ b/mmv1/products/iap/Client.yaml @@ -14,8 +14,6 @@ --- name: 'Client' api_resource_type_kind: IdentityAwareProxyClient -deprecation_message: >- - After July 2025, the `google_iap_client` Terraform resource will no longer function as intended due to the deprecation of the IAP OAuth Admin API description: | Contains the data that describes an Identity Aware Proxy owned client. @@ -33,13 +31,12 @@ self_link: '{{brand}}/identityAwareProxyClients/{{client_id}}' immutable: true import_format: - '{{brand}}/identityAwareProxyClients/{{client_id}}' -datasource: - generate: true timeouts: insert_minutes: 20 update_minutes: 20 delete_minutes: 20 custom_code: + post_create: 'templates/terraform/post_create/iap_client.go.tmpl' custom_import: 'templates/terraform/custom_import/iap_client.go.tmpl' exclude_sweeper: true error_retry_predicates: diff --git a/mmv1/products/iap/ForwardingRuleRegionalService.yaml b/mmv1/products/iap/ForwardingRuleRegionalService.yaml deleted file mode 100644 index 6c33585b4841..000000000000 --- a/mmv1/products/iap/ForwardingRuleRegionalService.yaml +++ /dev/null @@ -1,43 +0,0 @@ -name: 'WebRegionForwardingRuleService' -description: | - Only used to generate IAM resources -# This resource is only used to generate IAM resources. They do not correspond to real -# GCP resources, and should not be used to generate anything other than IAM support. -exclude_resource: true -docs: -id_format: 'projects/{{project}}/iap_web/forwarding_rule-{{region}}/services/{{name}}' -base_url: 'projects/{{project}}/iap_web/forwarding_rule-{{region}}/services/{{name}}' -self_link: 'projects/{{project}}/iap_web/forwarding_rule-{{region}}/services/{{name}}' -import_format: - - 'projects/{{project}}/iap_web/forwarding_rule-{{region}}/services/{{name}}' -timeouts: - insert_minutes: 20 - update_minutes: 20 - delete_minutes: 20 -iam_policy: - method_name_separator: ':' - parent_resource_type: 'google_compute_forwarding_rule' - fetch_iam_policy_verb: 'POST' - allowed_iam_role: 'roles/iap.httpsResourceAccessor' - parent_resource_attribute: 'forwarding_rule_region_service_name' - iam_conditions_request_type: 'REQUEST_BODY' - example_config_body: 'templates/terraform/iam/iam_attributes.go.tmpl' -exclude_tgc: true -examples: - - name: 'forwarding_rule_region_service_basic' - primary_resource_id: 'default' - primary_resource_name: 'fmt.Sprintf("tf-test-forwarding-rule-region-service%s", context["random_suffix"])' - vars: - forwarding_rule_region_service_name: 'forwarding-rule-region-service' - regional_health_check_name: 'tf-test-region-health-check' - regional_backend_service_name: 'regional-bs' - regional_url_map_name: 'regional-url-map' - regional_target_http_proxy_name: 'regional-target-http-proxy' - compute_network_name: 'tf-test-network-name' - compute_subnetwork_name: 'tf-test-subnetwork-name' -parameters: -properties: - - name: 'name' - type: String - description: Name or self link of a regional forwarding rule service. - required: true diff --git a/mmv1/products/iap/ForwardingRuleService.yaml b/mmv1/products/iap/ForwardingRuleService.yaml deleted file mode 100644 index c65f35deb029..000000000000 --- a/mmv1/products/iap/ForwardingRuleService.yaml +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'WebForwardingRuleService' -description: | - Only used to generate IAM resources -# This resource is only used to generate IAM resources. They do not correspond to real -# GCP resources, and should not be used to generate anything other than IAM support. -exclude_resource: true -docs: -id_format: 'projects/{{project}}/iap_web/forwarding_rule/services/{{name}}' -base_url: 'projects/{{project}}/iap_web/forwarding_rule/services/{{name}}' -self_link: 'projects/{{project}}/iap_web/forwarding_rule/services/{{name}}' -import_format: - - 'projects/{{project}}/iap_web/forwarding_rule/services/{{name}}' -timeouts: - insert_minutes: 20 - update_minutes: 20 - delete_minutes: 20 -iam_policy: - method_name_separator: ':' - parent_resource_type: 'google_compute_global_forwarding_rule' - fetch_iam_policy_verb: 'POST' - allowed_iam_role: 'roles/iap.httpsResourceAccessor' - parent_resource_attribute: 'forwarding_rule_service_name' - iam_conditions_request_type: 'REQUEST_BODY' - example_config_body: 'templates/terraform/iam/iam_attributes.go.tmpl' -custom_code: -exclude_tgc: true -examples: - - name: 'forwarding_rule_service_basic' - primary_resource_id: 'default' - primary_resource_name: 'fmt.Sprintf("tf-test-forwarding-rule-service%s", context["random_suffix"])' - vars: - forwarding_rule_service_name: 'forwarding-rule-service' - target_http_proxy_name: 'target-http-proxy-name' - url_map_name: 'url-map-name' - backend_service_name: 'backend-service-name' - health_check_name: 'health-check-name' -parameters: -properties: - - name: 'name' - type: String - description: Name or self link of a forwarding rule service. - required: true diff --git a/mmv1/products/identityplatform/Tenant.yaml b/mmv1/products/identityplatform/Tenant.yaml index 316f37493103..97255b481867 100644 --- a/mmv1/products/identityplatform/Tenant.yaml +++ b/mmv1/products/identityplatform/Tenant.yaml @@ -31,6 +31,8 @@ timeouts: insert_minutes: 20 update_minutes: 20 delete_minutes: 20 +custom_code: + post_create: 'templates/terraform/post_create/gcip_tenant.go.tmpl' examples: - name: 'identity_platform_tenant_basic' primary_resource_id: 'tenant' diff --git a/mmv1/products/integrationconnectors/Connection.yaml b/mmv1/products/integrationconnectors/Connection.yaml index 6b8efd70e2be..b156e7b33787 100644 --- a/mmv1/products/integrationconnectors/Connection.yaml +++ b/mmv1/products/integrationconnectors/Connection.yaml @@ -306,6 +306,12 @@ properties: type: NestedObject description: | User password for Authentication. + exactly_one_of: + - 'auth_config.0.user_password' + - 'auth_config.0.oauth2_jwt_bearer' + - 'auth_config.0.oauth2_client_credentials' + - 'auth_config.0.ssh_public_key' + - 'auth_config.0.oauth2_auth_code_flow' properties: - name: 'username' type: String @@ -327,6 +333,12 @@ properties: type: NestedObject description: | OAuth2 JWT Bearer for Authentication. + exactly_one_of: + - 'auth_config.0.user_password' + - 'auth_config.0.oauth2_jwt_bearer' + - 'auth_config.0.oauth2_client_credentials' + - 'auth_config.0.ssh_public_key' + - 'auth_config.0.oauth2_auth_code_flow' properties: - name: 'clientKey' type: NestedObject @@ -362,6 +374,12 @@ properties: type: NestedObject description: | OAuth3 Client Credentials for Authentication. + exactly_one_of: + - 'auth_config.0.user_password' + - 'auth_config.0.oauth2_jwt_bearer' + - 'auth_config.0.oauth2_client_credentials' + - 'auth_config.0.ssh_public_key' + - 'auth_config.0.oauth2_auth_code_flow' properties: - name: 'clientId' type: String @@ -383,6 +401,12 @@ properties: type: NestedObject description: | SSH Public Key for Authentication. + exactly_one_of: + - 'auth_config.0.user_password' + - 'auth_config.0.oauth2_jwt_bearer' + - 'auth_config.0.oauth2_client_credentials' + - 'auth_config.0.ssh_public_key' + - 'auth_config.0.oauth2_auth_code_flow' properties: - name: 'username' type: String @@ -419,6 +443,12 @@ properties: type: NestedObject description: | Parameters to support Oauth 2.0 Auth Code Grant Authentication. + exactly_one_of: + - 'auth_config.0.user_password' + - 'auth_config.0.oauth2_jwt_bearer' + - 'auth_config.0.oauth2_client_credentials' + - 'auth_config.0.ssh_public_key' + - 'auth_config.0.oauth2_auth_code_flow' properties: - name: 'clientId' type: String @@ -539,16 +569,6 @@ properties: description: | Enabled represents whether logging is enabled or not for a connection. required: true - - name: 'level' - type: Enum - description: | - Log configuration level. - default_from_api: true - enum_values: - - 'LOG_LEVEL_UNSPECIFIED' - - 'ERROR' - - 'INFO' - - 'DEBUG' - name: 'sslConfig' type: NestedObject description: | diff --git a/mmv1/products/kms/AutokeyConfig.yaml b/mmv1/products/kms/AutokeyConfig.yaml index 3f6b5bfdc208..d25d82d5fb73 100644 --- a/mmv1/products/kms/AutokeyConfig.yaml +++ b/mmv1/products/kms/AutokeyConfig.yaml @@ -22,6 +22,7 @@ description: | ~> **Note:** AutokeyConfigs cannot be deleted from Google Cloud Platform. Destroying a Terraform-managed AutokeyConfig will remove it from state but *will not delete the resource from the project.* +min_version: 'beta' references: guides: 'Cloud KMS with Autokey': 'https://cloud.google.com/kms/docs/kms-with-autokey' @@ -58,7 +59,6 @@ exclude_sweeper: true examples: - name: 'kms_autokey_config_all' primary_resource_id: 'example-autokeyconfig' - # test depends upon google_project_service_identity service which is still in beta, so we need to keep test limited to beta min_version: 'beta' vars: folder_name: 'my-folder' @@ -72,6 +72,7 @@ parameters: type: String description: | The folder for which to retrieve config. + min_version: 'beta' url_param_only: true required: true immutable: true @@ -83,7 +84,4 @@ properties: The target key project for a given folder where KMS Autokey will provision a CryptoKey for any new KeyHandle the Developer creates. Should have the form `projects/`. - - name: 'etag' - type: String - description: 'The etag of the AutokeyConfig for optimistic concurrency control.' - output: true + min_version: 'beta' diff --git a/mmv1/products/kms/CryptoKey.yaml b/mmv1/products/kms/CryptoKey.yaml index 9f00694f76a6..f43efc93960e 100644 --- a/mmv1/products/kms/CryptoKey.yaml +++ b/mmv1/products/kms/CryptoKey.yaml @@ -75,9 +75,8 @@ parameters: If set to true, the request will create a CryptoKey without any CryptoKeyVersions. You must use the `google_kms_crypto_key_version` resource to create a new CryptoKeyVersion or `google_kms_key_ring_import_job` resource to import the CryptoKeyVersion. - This field is only applicable during initial CryptoKey creation. url_param_only: true - ignore_read: true + immutable: true properties: - name: 'name' type: String diff --git a/mmv1/products/kms/KeyHandle.yaml b/mmv1/products/kms/KeyHandle.yaml index 7d4e968958af..a65e9206c0b9 100644 --- a/mmv1/products/kms/KeyHandle.yaml +++ b/mmv1/products/kms/KeyHandle.yaml @@ -21,6 +21,7 @@ description: | ~> **Note:** KeyHandles cannot be deleted from Google Cloud Platform. Destroying a Terraform-managed KeyHandle will remove it from state but *will not delete the resource from the project.* +min_version: 'beta' references: guides: 'Cloud KMS with Autokey': 'https://cloud.google.com/kms/docs/kms-with-autokey' @@ -51,7 +52,6 @@ custom_code: examples: - name: 'kms_key_handle_basic' primary_resource_id: 'example-keyhandle' - # test depends upon google_project_service_identity service which is still in beta, so we need to keep test limited to beta min_version: 'beta' vars: folder_name: 'my-folder' @@ -67,6 +67,7 @@ parameters: description: | The location for the KeyHandle. A full list of valid locations can be found by running `gcloud kms locations list`. + min_version: 'beta' url_param_only: true required: true properties: @@ -74,6 +75,7 @@ properties: type: String description: | The resource name for the KeyHandle. + min_version: 'beta' required: true immutable: true - name: 'kmsKey' @@ -82,11 +84,13 @@ properties: A reference to a Cloud KMS CryptoKey that can be used for CMEK in the requested product/project/location, for example `projects/1/locations/us-east1/keyRings/foo/cryptoKeys/bar-ffffff` + min_version: 'beta' output: true - name: 'resourceTypeSelector' type: String description: | Selector of the resource type where we want to protect resources. For example, `storage.googleapis.com/Bucket`. + min_version: 'beta' required: true immutable: true diff --git a/mmv1/products/logging/Metric.yaml b/mmv1/products/logging/Metric.yaml index 04a748a71323..6689245b2927 100644 --- a/mmv1/products/logging/Metric.yaml +++ b/mmv1/products/logging/Metric.yaml @@ -36,6 +36,7 @@ timeouts: update_minutes: 20 delete_minutes: 20 custom_code: + post_create: 'templates/terraform/post_create/set_computed_name.tmpl' custom_import: 'templates/terraform/custom_import/self_link_as_name.tmpl' examples: - name: 'logging_metric_basic' diff --git a/mmv1/products/looker/Instance.yaml b/mmv1/products/looker/Instance.yaml index 228836f05401..e0e7d93acb77 100644 --- a/mmv1/products/looker/Instance.yaml +++ b/mmv1/products/looker/Instance.yaml @@ -409,9 +409,6 @@ properties: - LOOKER_CORE_NONPROD_STANDARD_ANNUAL: nonprod subscription standard instance - LOOKER_CORE_NONPROD_ENTERPRISE_ANNUAL: nonprod subscription enterprise instance - LOOKER_CORE_NONPROD_EMBED_ANNUAL: nonprod subscription embed instance - - LOOKER_CORE_TRIAL_STANDARD: A standard trial edition of Looker (Google Cloud core) product. - - LOOKER_CORE_TRIAL_ENTERPRISE: An enterprise trial edition of Looker (Google Cloud core) product. - - LOOKER_CORE_TRIAL_EMBED: An embed trial edition of Looker (Google Cloud core) product. immutable: true default_value: "LOOKER_CORE_TRIAL" enum_values: @@ -423,9 +420,6 @@ properties: - 'LOOKER_CORE_NONPROD_STANDARD_ANNUAL' - 'LOOKER_CORE_NONPROD_ENTERPRISE_ANNUAL' - 'LOOKER_CORE_NONPROD_EMBED_ANNUAL' - - 'LOOKER_CORE_TRIAL_STANDARD' - - 'LOOKER_CORE_TRIAL_ENTERPRISE' - - 'LOOKER_CORE_TRIAL_EMBED' - name: 'privateIpEnabled' type: Boolean description: | diff --git a/mmv1/products/lustre/Instance.yaml b/mmv1/products/lustre/Instance.yaml index 5d12dbee9ad7..2e190dfc347a 100644 --- a/mmv1/products/lustre/Instance.yaml +++ b/mmv1/products/lustre/Instance.yaml @@ -14,10 +14,6 @@ --- name: Instance description: A Managed Lustre instance -references: - guides: - 'Official Documentation': 'https://cloud.google.com/managed-lustre/docs/create-instance' - api: 'https://cloud.google.com/managed-lustre/docs/reference/rest/v1/projects.locations.instances' base_url: projects/{{project}}/locations/{{location}}/instances update_mask: true self_link: projects/{{project}}/locations/{{location}}/instances/{{instance_id}} @@ -26,11 +22,7 @@ update_verb: PATCH id_format: projects/{{project}}/locations/{{location}}/instances/{{instance_id}} import_format: - projects/{{project}}/locations/{{location}}/instances/{{instance_id}} -sweeper: - url_substitutions: - - location: "us-central1-a" examples: - - name: lustre_instance_basic primary_resource_id: 'instance' vars: @@ -44,7 +36,7 @@ autogen_async: true async: operation: timeouts: - insert_minutes: 120 + insert_minutes: 20 update_minutes: 20 delete_minutes: 20 base_url: '{{op_id}}' @@ -67,7 +59,7 @@ parameters: - name: instanceId type: String description: |- - The name of the Managed Lustre instance. + Required. The name of the Managed Lustre instance. * Must contain only lowercase letters, numbers, and hyphens. * Must start with a letter. @@ -77,41 +69,10 @@ parameters: url_param_only: true required: true properties: - - name: capacityGib - type: String - description: |- - The storage capacity of the instance in gibibytes (GiB). Allowed values - are from `18000` to `954000`, in increments of 9000. - required: true - - name: updateTime - type: String - description: Timestamp when the instance was last updated. - output: true - - name: gkeSupportEnabled - type: Boolean - description: |- - Indicates whether you want to enable support for GKE clients. By default, - GKE clients are not supported. - - name: filesystem - type: String - description: |- - The filesystem name for this instance. This name is used by client-side - tools, including when mounting the instance. Must be eight characters or - less and can only contain letters and numbers. - immutable: true - required: true - - name: network - type: String - description: |- - The full name of the VPC network to which the instance is connected. - Must be in the format - `projects/{project_id}/global/networks/{network_name}`. - immutable: true - required: true - name: state type: String description: |- - The state of the instance. + Output only. The state of the instance. Possible values: STATE_UNSPECIFIED ACTIVE @@ -123,26 +84,50 @@ properties: output: true - name: mountPoint type: String - description: Mount point of the instance in the format `IP_ADDRESS@tcp:/FILESYSTEM`. + description: Output only. Mount point of the instance in the format `IP_ADDRESS@tcp:/FILESYSTEM`. output: true + - name: labels + type: KeyValueLabels + description: Optional. Labels as key value pairs. + - name: capacityGib + type: String + description: |- + Required. The storage capacity of the instance in gibibytes (GiB). Allowed values + are from 18000 to 954000, in increments of 9000. + required: true - name: createTime type: String - description: Timestamp when the instance was created. + description: Output only. Timestamp when the instance was created. output: true - - name: description + - name: updateTime type: String - description: A user-readable description of the instance. - - name: labels - type: KeyValueLabels - description: Labels as key value pairs. - - name: perUnitStorageThroughput + description: Output only. Timestamp when the instance was last updated. + output: true + - name: description type: String + description: Optional. A user-readable description of the instance. + - name: gkeSupportEnabled + type: Boolean description: |- - The throughput of the instance in MB/s/TiB. - Valid values are 125, 250, 500, 1000. - required: true - immutable: true + Optional. Indicates whether you want to enable support for GKE clients. By default, + GKE clients are not supported. - name: name type: String description: Identifier. The name of the instance. output: true + - name: filesystem + type: String + description: |- + Required. Immutable. The filesystem name for this instance. This name is used by client-side + tools, including when mounting the instance. Must be 8 characters or less + and may only contain letters and numbers. + immutable: true + required: true + - name: network + type: String + description: |- + Required. Immutable. The full name of the VPC network to which the instance is connected. + Must be in the format + `projects/{project_id}/global/networks/{network_name}`. + immutable: true + required: true diff --git a/mmv1/products/managedkafka/Acl.yaml b/mmv1/products/managedkafka/Acl.yaml deleted file mode 100644 index 3aab9dff1067..000000000000 --- a/mmv1/products/managedkafka/Acl.yaml +++ /dev/null @@ -1,152 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -# API resource name -name: 'Acl' -# Resource description for the provider documentation. -description: | - A Managed Service for Apache Kafka ACL. Apache Kafka is a trademark owned by the Apache Software Foundation. - -docs: -id_format: 'projects/{{project}}/locations/{{location}}/clusters/{{cluster}}/acls/{{acl_id}}' -base_url: 'projects/{{project}}/locations/{{location}}/clusters/{{cluster}}/acls' -self_link: 'projects/{{project}}/locations/{{location}}/clusters/{{cluster}}/acls/{{acl_id}}' -create_url: 'projects/{{project}}/locations/{{location}}/clusters/{{cluster}}/acls?aclId={{acl_id}}' -update_verb: 'PATCH' -update_mask: true -import_format: - - 'projects/{{project}}/locations/{{location}}/clusters/{{cluster}}/acls/{{%acl_id}}' - -timeouts: - insert_minutes: 20 - update_minutes: 20 - delete_minutes: 20 - -custom_code: - post_create: 'templates/terraform/post_create/sleep.go.tmpl' - post_update: 'templates/terraform/post_create/sleep.go.tmpl' - -examples: - - name: 'managedkafka_acl_basic' - primary_resource_id: 'example' - vars: - acl_id: 'topic/mytopic' - cluster_id: 'my-cluster' - test_vars_overrides: - 'acl_id': '"topic/mytopic"' - -parameters: - - name: 'location' - type: String - description: "ID of the location of the Kafka resource. See - https://cloud.google.com/managed-kafka/docs/locations for a list of - supported locations." - url_param_only: true - required: true - immutable: true - - name: 'cluster' - type: String - description: "The cluster name." - url_param_only: true - required: true - immutable: true - - name: 'aclId' - type: String - description: "The ID to use for the acl, which will become the final - component of the acl's name. The structure of `aclId` defines the Resource Pattern (resource_type, - resource_name, pattern_type) of the acl. `aclId` is structured like one of the following: - - For acls on the cluster: - `cluster` - - For acls on a single resource within the cluster: - `topic/{resource_name}` - `consumerGroup/{resource_name}` - `transactionalId/{resource_name}` - - For acls on all resources that match a prefix: - `topicPrefixed/{resource_name}` - `consumerGroupPrefixed/{resource_name}` - `transactionalIdPrefixed/{resource_name}` - - For acls on all resources of a given type (i.e. the wildcard literal '*''): - `allTopics` (represents `topic/*`) - `allConsumerGroups` (represents `consumerGroup/*`) - `allTransactionalIds` (represents `transactionalId/*`)." - url_param_only: true - required: true - immutable: true - -properties: - - name: 'name' - type: String - description: "The name of the acl. The `ACL_ID` segment is used when - connecting directly to the cluster. Must be in the format `projects/PROJECT_ID/locations/LOCATION/clusters/CLUSTER_ID/acls/ACL_ID`." - output: true - - name: 'aclEntries' - type: Array - is_set: true - required: true - description: "The acl entries that apply to the resource pattern. The maximum number of allowed - entries is 100." - item_type: - type: NestedObject - properties: - - name: 'principal' - type: String - description: 'The principal. Specified as Google Cloud account, with the Kafka - StandardAuthorizer prefix User:". For example: "User:test-kafka-client@test-project.iam.gserviceaccount.com". - Can be the wildcard "User:*" to refer to all users.' - required: true - - name: 'permissionType' - type: String - default_value: "ALLOW" - description: 'The permission type. Accepted values are (case insensitive): ALLOW, DENY.' - - name: 'operation' - type: String - description: | - The operation type. Allowed values are (case insensitive): ALL, READ, - WRITE, CREATE, DELETE, ALTER, DESCRIBE, CLUSTER_ACTION, DESCRIBE_CONFIGS, - ALTER_CONFIGS, and IDEMPOTENT_WRITE. See https://kafka.apache.org/documentation/#operations_resources_and_protocols - for valid combinations of resource_type and operation for different Kafka API requests. - required: true - - name: 'host' - type: String - default_value: "*" - description: 'The host. Must be set to "*" for Managed Service for Apache Kafka.' - - name: 'etag' - type: Fingerprint - output: true - description: | - `etag` is used for concurrency control. An `etag` is returned in the - response to `GetAcl` and `CreateAcl`. Callers are required to put that etag - in the request to `UpdateAcl` to ensure that their change will be applied - to the same version of the acl that exists in the Kafka Cluster. - - A terminal 'T' character in the etag indicates that the AclEntries were - truncated due to repeated field limits. - - name: 'resourceType' - type: String - description: | - The acl resource type derived from the name. One of: CLUSTER, TOPIC, GROUP, TRANSACTIONAL_ID. - output: true - - name: 'resourceName' - type: String - description: | - The acl resource name derived from the name. For cluster resource_type, this is always "kafka-cluster". Can be the wildcard literal "*". - output: true - - name: 'patternType' - type: String - description: "The acl pattern type derived from the name. One of: LITERAL, PREFIXED." - output: true diff --git a/mmv1/products/managedkafka/Cluster.yaml b/mmv1/products/managedkafka/Cluster.yaml index 1cede7806e0e..a6b23a066ccd 100644 --- a/mmv1/products/managedkafka/Cluster.yaml +++ b/mmv1/products/managedkafka/Cluster.yaml @@ -44,11 +44,6 @@ examples: cluster_id: 'my-cluster' key_name: 'example-key' keyring_name: 'example-keyring' - - name: 'managedkafka_cluster_mtls' - primary_resource_id: 'example' - vars: - cluster_id: 'my-cluster' - ca_pool_id: 'my-ca-pool' - name: 'managedkafka_cluster_cmek' primary_resource_id: 'example' min_version: 'beta' @@ -156,34 +151,3 @@ properties: type: String description: "The current state of the cluster. Possible values: `STATE_UNSPECIFIED`, `CREATING`, `ACTIVE`, `DELETING`." output: true - - name: 'tlsConfig' - type: NestedObject - default_from_api: true - description: "TLS configuration for the Kafka cluster. This is used to configure mTLS authentication. To clear our a TLS configuration that has been previously set, please explicitly add an empty `tls_config` block." - properties: - - name: 'trustConfig' - type: NestedObject - allow_empty_object: true - description: "The configuration of the broker truststore. If specified, clients can use mTLS for authentication." - properties: - - name: 'casConfigs' - type: Array - description: "Configuration for the Google Certificate Authority Service. To support mTLS, you must specify at least one `cas_configs` block. A maximum of 10 CA pools can be specified. Additional CA pools may be specified with additional `cas_configs` blocks." - item_type: - type: NestedObject - properties: - - name: 'caPool' - type: String - description: "The name of the CA pool to pull CA certificates from. The CA pool does not need - to be in the same project or location as the Kafka cluster. Must be in the format `projects/PROJECT_ID/locations/LOCATION/caPools/CA_POOL_ID." - required: true - diff_suppress_func: 'tpgresource.ProjectNumberDiffSuppress' - - name: 'sslPrincipalMappingRules' - type: String - description: "The rules for mapping mTLS certificate Distinguished Names (DNs) to - shortened principal names for Kafka ACLs. This field corresponds exactly - to the ssl.principal.mapping.rules broker config and matches the format - and syntax defined in the Apache Kafka documentation. Setting or - modifying this field will trigger a rolling restart of the Kafka - brokers to apply the change. An empty string means that the default - Kafka behavior is used. Example: `RULE:^CN=(.?),OU=ServiceUsers.$/$1@example.com/,DEFAULT`" diff --git a/mmv1/products/memorystore/Instance.yaml b/mmv1/products/memorystore/Instance.yaml index 9d937470dc09..7307a3e961e1 100644 --- a/mmv1/products/memorystore/Instance.yaml +++ b/mmv1/products/memorystore/Instance.yaml @@ -56,21 +56,14 @@ examples: 'prevent_destroy': 'false' - name: 'memorystore_instance_full' primary_resource_id: 'instance-full' - bootstrap_iam: - - member: "serviceAccount:service-{project_number}@gcp-sa-memorystore.iam.gserviceaccount.com" - role: "roles/cloudkms.cryptoKeyEncrypterDecrypter" vars: instance_name: 'full-instance' policy_name: 'my-policy' subnet_name: 'my-subnet' network_name: 'my-network' prevent_destroy: 'true' - kms_key_name: 'my-key' test_vars_overrides: 'prevent_destroy': 'false' - 'kms_key_name': 'acctest.BootstrapKMSKeyInLocation(t, "us-central1").CryptoKey.Name' - ignore_read_extra: - - 'update_time' - name: 'memorystore_instance_persistence_aof' primary_resource_id: 'instance-persistence-aof' vars: @@ -108,34 +101,10 @@ examples: 'secondary_instance_prevent_destroy': 'false' virtual_fields: - name: 'desired_psc_auto_connections' - description: "`desired_psc_auto_connections` is deprecated Use `desired_auto_created_endpoints` instead `terraform import` will only work with desired_auto_created_endpoints`." - deprecation_message: '`desired_psc_auto_connections` is deprecated. Use `desired_auto_created_endpoints` instead. `terraform import` will only work with desired_auto_created_endpoints`.' - type: Array - immutable: true - conflicts: - - desiredAutoCreatedEndpoints - item_type: - type: NestedObject - properties: - - type: String - name: project_id - description: - "Required. The consumer project_id where the forwarding rule is - created from. " - required: true - - type: String - name: network - description: - "Required. The consumer network where the IP address resides, in - the form of\nprojects/{project_id}/global/networks/{network_id}. " - required: true - - name: 'desired_auto_created_endpoints' description: "Immutable. User inputs for the auto-created - endpoints connections. " + PSC connections. " type: Array immutable: true - conflicts: - - desiredPscAutoConnections item_type: type: NestedObject properties: @@ -265,7 +234,6 @@ properties: "Optional. Number of replica nodes per shard. If omitted the default is 0 replicas. " default_from_api: true - send_empty_value: true - name: 'authorizationMode' type: String description: @@ -287,14 +255,8 @@ properties: - name: 'discoveryEndpoints' type: Array description: - "Deprecated. Output only. Endpoints clients can connect to the instance through." - deprecation_message: - This field is deprecated. As a result it will not be populated - if the connections are created using `desired_auto_created_endpoints` - parameter or `google_memorystore_instance_desired_user_created_endpoints` - resource. Instead of this parameter, for discovery, use - `endpoints.connections.pscConnection` and `endpoints.connections.pscAutoConnection` - with `connectionType` CONNECTION_TYPE_DISCOVERY. + "Output only. Endpoints clients can connect to the instance through. + Currently only one\ndiscovery endpoint is supported. " output: true item_type: type: NestedObject @@ -713,7 +675,6 @@ properties: description: "Output only. User inputs and resource details of the auto-created PSC connections. " - deprecation_message: '`psc_auto_connections` is deprecated Use `endpoints.connections.pscAutoConnections` instead.' output: true item_type: type: NestedObject @@ -789,7 +750,7 @@ properties: is_set: true description: | URIs of the GCS objects to import. - Example: gs://bucket1/object1, gs://bucket2/folder2/object2 + Example: gs://bucket1/object1, gs//bucket2/folder2/object2 immutable: true required: true item_type: @@ -805,7 +766,7 @@ properties: - name: 'backup' type: String description: | - Example: `projects/{project}/locations/{location}/backupCollections/{collection}/backups/{backup}`. + Example: //memorystore.googleapis.com/projects/{project}/locations/{location}/backups/{backupId}. In this case, it assumes the backup is under memorystore.googleapis.com. immutable: true required: true - name: 'backupCollection' @@ -814,25 +775,3 @@ properties: The backup collection full resource name. Example: projects/{project}/locations/{location}/backupCollections/{collection} output: true - - name: kmsKey - type: String - description: The KMS key used to encrypt the at-rest data of the cluster - immutable: true - - name: 'managedServerCa' - type: NestedObject - output: true - description: Instance's Certificate Authority. This field will only be populated if instance's transit_encryption_mode is SERVER_AUTHENTICATION - properties: - - name: 'caCerts' - type: Array - output: true - description: The PEM encoded CA certificate chains for managed server authentication - item_type: - type: NestedObject - properties: - - name: 'certificates' - type: Array - output: true - description: The certificates that form the CA chain, from leaf to root order - item_type: - type: String diff --git a/mmv1/products/metastore/Service.yaml b/mmv1/products/metastore/Service.yaml index af8aab692d4d..d10d902d95e5 100644 --- a/mmv1/products/metastore/Service.yaml +++ b/mmv1/products/metastore/Service.yaml @@ -71,8 +71,6 @@ examples: primary_resource_id: 'default' vars: metastore_service_name: 'example-service' - kms_key_name: 'example-key' - test_vars_overrides: 'kms_key_name': 'acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-bootstrap-metastore-service-key1").CryptoKey.Name' exclude_docs: true skip_vcr: true @@ -522,3 +520,11 @@ properties: enum_values: - 'LEGACY' - 'JSON' + - name: 'tags' + type: KeyValuePairs + description: | + A map of resource manager tags. + Resource manager tag keys and values have the same definition as resource manager tags. + Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/{tag_value_id}. + immutable: true + ignore_read: true diff --git a/mmv1/products/modelarmor/Template.yaml b/mmv1/products/modelarmor/Template.yaml deleted file mode 100644 index fa564361710d..000000000000 --- a/mmv1/products/modelarmor/Template.yaml +++ /dev/null @@ -1,266 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: Template -description: | - A `Template` is a resource of Model Armor that lets you configure how Model Armor screens prompts and responses. - It functions as sets of customized filters and thresholds for different safety and security confidence levels, allowing control over what content is flagged. -base_url: projects/{{project}}/locations/{{location}}/templates -update_mask: true -self_link: projects/{{project}}/locations/{{location}}/templates/{{template_id}} -create_url: projects/{{project}}/locations/{{location}}/templates?templateId={{template_id}} -update_verb: PATCH -id_format: projects/{{project}}/locations/{{location}}/templates/{{template_id}} -import_format: - - projects/{{project}}/locations/{{location}}/templates/{{template_id}} -examples: - - name: 'modelarmor_template_basic' - primary_resource_id: 'template-basic' - test_vars_overrides: - templateId: '"modelarmor1"' - location: '"us-central1"' - - name: 'modelarmor_template_filter_config' - primary_resource_id: 'template-filter-config' - test_vars_overrides: - templateId: '"modelarmor2"' - location: '"us-central1"' - filter_config_rai_settings_rai_filters_0_filter_type: '"HATE_SPEECH"' - filter_config_rai_settings_rai_filters_0_confidence_level: '"HIGH"' - sdp_settings_config_type: '"basic_config"' - filter_config_sdp_settings_basic_config_filter_enforcement: '"ENABLED"' - filter_config_pi_and_jailbreak_filter_settings_filter_enforcement: '"ENABLED"' - filter_config_pi_and_jailbreak_filter_settings_confidence_level: '"MEDIUM_AND_ABOVE"' - filter_config_malicious_uri_filter_settings_filter_enforcement: '"ENABLED"' - template_metadata_multi_language_detection_enable_multi_language_detection: false - - name: 'modelarmor_template_template_metadata' - primary_resource_id: 'template-template-metadata' - test_vars_overrides: - templateId: '"modelarmor3"' - location: '"us-central1"' - filter_config_rai_settings_rai_filters_0_filter_type: '"HARASSMENT"' - filter_config_rai_settings_rai_filters_0_confidence_level: '"MEDIUM_AND_ABOVE"' - template_metadata_log_template_operations: true - template_metadata_log_sanitize_operations: false - template_metadata_multi_language_detection_enable_multi_language_detection: true - template_metadata_ignore_partial_invocation_failures: false - template_metadata_custom_llm_response_safety_error_message: '"This is a custom error message for LLM response"' - template_metadata_custom_prompt_safety_error_code: 400 - template_metadata_custom_prompt_safety_error_message: '"This is a custom error message for prompt"' - template_metadata_custom_llm_response_safety_error_code: 401 - template_metadata_enforcement_type: '"INSPECT_ONLY"' - - name: 'modelarmor_template_label' - primary_resource_id: 'template-label-advanced-config' - test_vars_overrides: - templateId: '"modelarmor4"' - location: '"us-central1"' - label_test_label: '"template-test-label"' - filter_config_rai_settings_rai_filters_0_filter_type: '"DANGEROUS"' - filter_config_rai_settings_rai_filters_0_confidence_level: '"MEDIUM_AND_ABOVE"' - sdp_settings_config_type: '"advanced_config"' - filter_config_sdp_settings_advanced_config_inspect_template: '"projects/llm-firewall-demo/locations/us-central1/inspectTemplates/t3"' - filter_config_sdp_settings_advanced_config_deidentify_template: '"projects/llm-firewall-demo/locations/us-central1/deidentifyTemplates/t2"' - filter_config_sdp_settings_basic_config_filter_enforcement: '"ENABLED"' - template_metadata_multi_language_detection_enable_multi_language_detection: false -autogen_status: VGVtcGxhdGU= -parameters: - - name: location - type: String - description: Resource ID segment making up resource `name`. It identifies the resource within its parent collection as described in https://google.aip.dev/122. - immutable: true - url_param_only: true - required: true - - name: templateId - type: String - description: |- - Id of the requesting object - If auto-generating Id server-side, remove this field and - template_id from the method_signature of Create RPC - immutable: true - url_param_only: true - required: true -properties: - - name: name - type: String - description: Identifier. name of resource - output: true - - name: createTime - type: String - description: 'Create time stamp' - output: true - - name: updateTime - type: String - description: 'Update time stamp' - output: true - - name: labels - type: KeyValueLabels - description: Labels as key value pairs - - name: filterConfig - type: NestedObject - description: Filters configuration. - required: true - send_empty_value: true - allow_empty_object: true - properties: - - name: maliciousUriFilterSettings - type: NestedObject - description: Malicious URI filter settings. - properties: - - name: filterEnforcement - type: String - description: |- - Tells whether the Malicious URI filter is enabled or disabled. - Possible values: - ENABLED - DISABLED - - name: raiSettings - type: NestedObject - description: Responsible AI Filter settings. - properties: - - name: raiFilters - type: Array - description: List of Responsible AI filters enabled for template. - required: true - item_type: - type: NestedObject - properties: - - name: filterType - type: String - description: |- - Possible values: - SEXUALLY_EXPLICIT - HATE_SPEECH - HARASSMENT - DANGEROUS - required: true - - name: confidenceLevel - type: String - description: |- - Possible values: - LOW_AND_ABOVE - MEDIUM_AND_ABOVE - HIGH - - name: sdpSettings - type: NestedObject - description: Sensitive Data Protection settings. - properties: - - name: advancedConfig - type: NestedObject - conflicts: - - filter_config.0.sdp_settings.0.basic_config - description: Sensitive Data Protection Advanced configuration. - properties: - - name: inspectTemplate - type: String - description: |- - Sensitive Data Protection inspect template resource name - If only inspect template is provided (de-identify template not provided), - then Sensitive Data Protection InspectContent action is performed during - Sanitization. All Sensitive Data Protection findings identified during - inspection will be returned as SdpFinding in SdpInsepctionResult. - e.g:- - `projects/{project}/locations/{location}/inspectTemplates/{inspect_template}` - - name: deidentifyTemplate - type: String - description: |- - Optional Sensitive Data Protection Deidentify template resource name. - If provided then DeidentifyContent action is performed during Sanitization - using this template and inspect template. The De-identified data will - be returned in SdpDeidentifyResult. - Note that all info-types present in the deidentify template must be present - in inspect template. - e.g. - `projects/{project}/locations/{location}/deidentifyTemplates/{deidentify_template}` - - name: basicConfig - type: NestedObject - conflicts: - - filter_config.0.sdp_settings.0.advanced_config - description: Sensitive Data Protection basic configuration. - properties: - - name: filterEnforcement - type: String - description: |- - Tells whether the Sensitive Data Protection basic config is enabled or - disabled. - Possible values: - ENABLED - DISABLED - - name: piAndJailbreakFilterSettings - type: NestedObject - description: Prompt injection and Jailbreak Filter settings. - properties: - - name: filterEnforcement - type: String - description: |- - Tells whether Prompt injection and Jailbreak filter is enabled or - disabled. - Possible values: - ENABLED - DISABLED - - name: confidenceLevel - type: String - description: |- - Possible values: - LOW_AND_ABOVE - MEDIUM_AND_ABOVE - HIGH - - name: templateMetadata - type: NestedObject - description: Message describing TemplateMetadata - allow_empty_object: true - properties: - - name: logTemplateOperations - type: Boolean - description: If true, log template crud operations. - - name: logSanitizeOperations - type: Boolean - description: If true, log sanitize operations. - - name: multiLanguageDetection - type: NestedObject - description: Metadata to enable multi language detection via template. - custom_flatten: 'templates/terraform/custom_flatten/modelarmor_template_multilanguage_detection.go.tmpl' - properties: - - name: enableMultiLanguageDetection - type: Boolean - description: If true, multi language detection will be enabled. - required: true - send_empty_value: true - - name: ignorePartialInvocationFailures - type: Boolean - description: If true, partial detector failures should be ignored. - - name: customPromptSafetyErrorCode - type: Integer - description: |- - Indicates the custom error code set by the user to be returned to the end - user by the service extension if the prompt trips Model Armor filters. - - name: customPromptSafetyErrorMessage - type: String - description: |- - Indicates the custom error message set by the user to be returned to the - end user if the prompt trips Model Armor filters. - - name: customLlmResponseSafetyErrorCode - type: Integer - description: |- - Indicates the custom error code set by the user to be returned to the end - user if the LLM response trips Model Armor filters. - - name: customLlmResponseSafetyErrorMessage - type: String - description: |- - Indicates the custom error message set by the user to be returned to the - end user if the LLM response trips Model Armor filters. - - name: enforcementType - type: String - description: |- - Possible values: - INSPECT_ONLY - INSPECT_AND_BLOCK diff --git a/mmv1/products/modelarmor/product.yaml b/mmv1/products/modelarmor/product.yaml deleted file mode 100644 index 7d842c2933d7..000000000000 --- a/mmv1/products/modelarmor/product.yaml +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: ModelArmor -display_name: Model Armor -scopes: - - https://www.googleapis.com/auth/cloud-platform -versions: - - name: ga - base_url: 'https://modelarmor.{{location}}.rep.googleapis.com/v1/' - cai_base_url: "https://modelarmor.googleapis.com/v1/" - - name: beta - base_url: 'https://modelarmor.{{location}}.rep.googleapis.com/v1beta/' - cai_base_url: "https://modelarmor.googleapis.com/v1beta/" diff --git a/mmv1/products/modelarmorglobal/Floorsetting.yaml b/mmv1/products/modelarmorglobal/Floorsetting.yaml deleted file mode 100644 index ef138bfdd1df..000000000000 --- a/mmv1/products/modelarmorglobal/Floorsetting.yaml +++ /dev/null @@ -1,248 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: Floorsetting -description: | - Model Armor floor settings define rules that dictate minimum requirements for all Model Armor templates created at a specific point in the Google Cloud resource hierarchy (that is, at an organization, folder, or project level). If there are - multiple floor settings that conflict with each other, the settings lower in the resource hierarchy take precedence - For more information, checkout https://cloud.google.com/security-command-center/docs/model_armor_floor_settings -base_url: '{{parent}}/locations/{{location}}/floorSetting' -update_mask: true -id_format: '{{parent}}/locations/{{location}}/floorSetting' -self_link: '{{parent}}/locations/{{location}}/floorSetting' -create_url: '{{parent}}/locations/{{location}}/floorSetting' -# This is a singleton resource that is already created, so create -# is really an update, and therefore should be PATCHed. -create_verb: 'PATCH' -update_url: '{{parent}}/locations/{{location}}/floorSetting' -update_verb: 'PATCH' -# This is a singleton resource that cannot be deleted, so skip delete. -exclude_delete: true -import_format: - - '{{parent}}/locations/{{location}}/floorSetting' -custom_code: - custom_import: 'templates/terraform/custom_import/modelarmorglobal_floorsetting.go.tmpl' - post_create: 'templates/terraform/post_create/modelarmor_floorsetting_sleep.go.tmpl' - post_update: 'templates/terraform/post_create/modelarmor_floorsetting_sleep.go.tmpl' -examples: - # Excluding tests as they modify a singleton resource and hence will result in race conditions - - name: 'modelarmor_floorsetting_basic' - primary_resource_id: 'floorsetting-basic' - test_env_vars: - project_id: 'PROJECT_NAME' - exclude_test: true - - name: 'modelarmor_floorsetting_filter_config' - primary_resource_id: 'floorsetting-filter-config' - test_env_vars: - project_id: 'PROJECT_NAME' - exclude_test: true - - name: 'modelarmor_floorsetting_ai_platform_metadata' - primary_resource_id: 'floorsetting-integrated-metadata' - test_env_vars: - project_id: 'PROJECT_NAME' - exclude_test: true -parameters: - - name: location - type: String - description: Resource ID segment making up resource `name`. It identifies the resource within its parent collection as described in https://google.aip.dev/122. - immutable: true - url_param_only: true - required: true - - name: parent - type: String - description: | - Will be any one of these: - - * `projects/{project}` - * `folders/{folder}` - * `organizations/{organizationId}` - immutable: true - url_param_only: true - required: true -properties: - - name: name - type: String - description: Identifier. The resource name. - output: true - - name: createTime - type: String - description: '[Output only] Create timestamp' - output: true - - name: updateTime - type: String - description: '[Output only] Update timestamp' - output: true - - name: filterConfig - type: NestedObject - description: Filters configuration. - required: true - send_empty_value: true - allow_empty_object: true - properties: - - name: maliciousUriFilterSettings - type: NestedObject - description: Malicious URI filter settings. - properties: - - name: filterEnforcement - type: String - description: |- - Tells whether the Malicious URI filter is enabled or disabled. - Possible values: - ENABLED - DISABLED - - name: raiSettings - type: NestedObject - description: Responsible AI Filter settings. - properties: - - name: raiFilters - type: Array - description: List of Responsible AI filters enabled for template. - required: true - item_type: - type: NestedObject - properties: - - name: filterType - type: String - description: |- - Possible values: - SEXUALLY_EXPLICIT - HATE_SPEECH - HARASSMENT - DANGEROUS - required: true - - name: confidenceLevel - type: String - description: |- - Possible values: - LOW_AND_ABOVE - MEDIUM_AND_ABOVE - HIGH - - name: sdpSettings - type: NestedObject - description: Sensitive Data Protection settings. - properties: - - name: advancedConfig - type: NestedObject - conflicts: - - filter_config.0.sdp_settings.0.basic_config - description: Sensitive Data Protection Advanced configuration. - properties: - - name: inspectTemplate - type: String - description: |- - Sensitive Data Protection inspect template resource name - - If only inspect template is provided (de-identify template not provided), - then Sensitive Data Protection InspectContent action is performed during - Sanitization. All Sensitive Data Protection findings identified during - inspection will be returned as SdpFinding in SdpInsepctionResult. - - e.g:- - `projects/{project}/locations/{location}/inspectTemplates/{inspect_template}` - - name: deidentifyTemplate - type: String - description: |- - Optional Sensitive Data Protection Deidentify template resource name. - - If provided then DeidentifyContent action is performed during Sanitization - using this template and inspect template. The De-identified data will - be returned in SdpDeidentifyResult. - Note that all info-types present in the deidentify template must be present - in inspect template. - - e.g. - `projects/{project}/locations/{location}/deidentifyTemplates/{deidentify_template}` - - name: basicConfig - type: NestedObject - conflicts: - - filter_config.0.sdp_settings.0.advanced_config - description: Sensitive Data Protection basic configuration. - properties: - - name: filterEnforcement - type: String - description: |- - Tells whether the Sensitive Data Protection basic config is enabled or - disabled. - Possible values: - ENABLED - DISABLED - - name: piAndJailbreakFilterSettings - type: NestedObject - description: Prompt injection and Jailbreak Filter settings. - properties: - - name: filterEnforcement - type: String - description: |- - Tells whether Prompt injection and Jailbreak filter is enabled or - disabled. - Possible values: - ENABLED - DISABLED - - name: confidenceLevel - type: String - description: |- - Possible values: - LOW_AND_ABOVE - MEDIUM_AND_ABOVE - HIGH - - name: enableFloorSettingEnforcement - type: Boolean - description: Floor Settings enforcement status. - send_empty_value: true - - name: integratedServices - type: Array - description: List of integrated services for which the floor setting is applicable. - item_type: - type: String - description: |- - Possible values: - AI_PLATFORM - - name: aiPlatformFloorSetting - type: NestedObject - description: AI Platform floor setting. - allow_empty_object: true - properties: - - name: inspectOnly - type: Boolean - description: |- - If true, Model Armor filters will be run in inspect only mode. No action - will be taken on the request. - exactly_one_of: - - ai_platform_floor_setting.0.inspect_only - - ai_platform_floor_setting.0.inspect_and_block - - name: inspectAndBlock - type: Boolean - description: |- - If true, Model Armor filters will be run in inspect and block mode. - Requests that trip Model Armor filters will be blocked. - exactly_one_of: - - ai_platform_floor_setting.0.inspect_only - - ai_platform_floor_setting.0.inspect_and_block - - name: enableCloudLogging - type: Boolean - description: If true, log Model Armor filter results to Cloud Logging. - - name: floorSettingMetadata - type: NestedObject - description: Metadata to enable multi language detection via floor setting. - properties: - - name: multiLanguageDetection - type: NestedObject - description: Metadata for multi language detection. - custom_flatten: 'templates/terraform/custom_flatten/modelarmor_floorsetting_multilanguage_detection.go.tmpl' - properties: - - name: enableMultiLanguageDetection - type: Boolean - description: If true, multi language detection will be enabled. - required: true - send_empty_value: true diff --git a/mmv1/products/modelarmorglobal/product.yaml b/mmv1/products/modelarmorglobal/product.yaml deleted file mode 100644 index 84ab6097edad..000000000000 --- a/mmv1/products/modelarmorglobal/product.yaml +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: ModelArmorGlobal -legacy_name: model_armor -display_name: Model Armor -scopes: - - https://www.googleapis.com/auth/cloud-platform -versions: - - name: ga - base_url: 'https://modelarmor.googleapis.com/v1/' - cai_base_url: "https://modelarmor.googleapis.com/v1/" - - name: beta - base_url: 'https://modelarmor.googleapis.com/v1beta/' - cai_base_url: "https://modelarmor.googleapis.com/v1beta/" diff --git a/mmv1/products/monitoring/MetricDescriptor.yaml b/mmv1/products/monitoring/MetricDescriptor.yaml index 7294d5b177b5..1fd268827b59 100644 --- a/mmv1/products/monitoring/MetricDescriptor.yaml +++ b/mmv1/products/monitoring/MetricDescriptor.yaml @@ -156,15 +156,20 @@ properties: The supported units are a subset of The Unified Code for Units of Measure standard. More info can be found in the API documentation (https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.metricDescriptors). + immutable: true - name: 'description' type: String description: A detailed description of the metric, which can be used in documentation. + required: true + immutable: true - name: 'displayName' type: String description: A concise name for the metric, which can be displayed in user interfaces. Use sentence case without an ending period, for example "Request count". + required: true + immutable: true - name: 'metadata' type: NestedObject description: Metadata which can be used to guide usage of the metric. diff --git a/mmv1/products/monitoring/UptimeCheckConfig.yaml b/mmv1/products/monitoring/UptimeCheckConfig.yaml index 7cfac9e299c3..67f7fdd0e791 100644 --- a/mmv1/products/monitoring/UptimeCheckConfig.yaml +++ b/mmv1/products/monitoring/UptimeCheckConfig.yaml @@ -247,19 +247,19 @@ properties: type: String description: The password to authenticate. exactly_one_of: - - 'http_check.0.auth_info.0.password_wo' - - 'http_check.0.auth_info.0.password' + - 'password' + - 'password_wo' sensitive: true custom_flatten: 'templates/terraform/custom_flatten/uptime_check_http_password.tmpl' - name: 'passwordWo' type: String description: The password to authenticate. exactly_one_of: - - 'http_check.0.auth_info.0.password_wo' - - 'http_check.0.auth_info.0.password' + - 'passwordWo' + - 'password' required_with: - 'http_check.0.auth_info.0.password_wo_version' - write_only_legacy: true + write_only: true - name: 'passwordWoVersion' type: String immutable: true diff --git a/mmv1/products/netapp/Backup.yaml b/mmv1/products/netapp/Backup.yaml index 5511bd0332de..05828c3088fd 100644 --- a/mmv1/products/netapp/Backup.yaml +++ b/mmv1/products/netapp/Backup.yaml @@ -68,9 +68,8 @@ examples: volume_name: 'backup-volume' backup_vault_name: 'backup-vault' backup_name: 'test-backup' - network_name: 'network' test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' + 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' parameters: - name: 'location' type: String @@ -144,13 +143,3 @@ properties: Format: `projects/{{projectId}}/locations/{{location}}/volumes/{{volumename}}/snapshots/{{snapshotname}}`` required: false diff_suppress_func: 'tpgresource.ProjectNumberDiffSuppress' - - name: 'volumeRegion' - type: String - description: | - Region of the volume from which the backup was created. - output: true - - name: 'backupRegion' - type: String - description: | - Region in which backup is stored. - output: true diff --git a/mmv1/products/netapp/BackupVault.yaml b/mmv1/products/netapp/BackupVault.yaml index a40e43d2a02a..d5b9b2d9dd3d 100644 --- a/mmv1/products/netapp/BackupVault.yaml +++ b/mmv1/products/netapp/BackupVault.yaml @@ -88,57 +88,3 @@ properties: description: | Labels as key value pairs. Example: `{ "owner": "Bob", "department": "finance", "purpose": "testing" }`. required: false - - name: 'backupVaultType' - type: Enum - description: | - Type of the backup vault to be created. Default is IN_REGION. - enum_values: - - 'BACKUP_VAULT_TYPE_UNSPECIFIED' - - 'IN_REGION' - - 'CROSS_REGION' - default_from_api: true - - name: 'backupRegion' - type: String - description: | - Region in which backup is stored. - - name: 'sourceRegion' - type: String - description: | - Region in which the backup vault is created. - output: true - - name: 'sourceBackupVault' - type: String - description: | - Name of the Backup vault created in source region. - output: true - - name: 'destinationBackupVault' - type: String - description: | - Name of the Backup vault created in backup region. - output: true - - name: 'backupRetentionPolicy' - type: NestedObject - description: | - Backup retention policy defining the retention of the backups. - properties: - - name: 'backupMinimumEnforcedRetentionDays' - type: Integer - description: | - Minimum retention duration in days for backups in the backup vault. - required: true - - name: 'dailyBackupImmutable' - type: Boolean - description: | - Indicates if the daily backups are immutable. At least one of daily_backup_immutable, weekly_backup_immutable, monthly_backup_immutable and manual_backup_immutable must be true. - - name: 'weeklyBackupImmutable' - type: Boolean - description: | - Indicates if the weekly backups are immutable. At least one of daily_backup_immutable, weekly_backup_immutable, monthly_backup_immutable and manual_backup_immutable must be true. - - name: 'monthlyBackupImmutable' - type: Boolean - description: | - Indicates if the monthly backups are immutable. At least one of daily_backup_immutable, weekly_backup_immutable, monthly_backup_immutable and manual_backup_immutable must be true. - - name: 'manualBackupImmutable' - type: Boolean - description: | - Indicates if the manual backups are immutable. At least one of daily_backup_immutable, weekly_backup_immutable, monthly_backup_immutable and manual_backup_immutable must be true. diff --git a/mmv1/products/netapp/StoragePool.yaml b/mmv1/products/netapp/StoragePool.yaml index 084d89c9f67f..be122b485c1f 100644 --- a/mmv1/products/netapp/StoragePool.yaml +++ b/mmv1/products/netapp/StoragePool.yaml @@ -35,6 +35,7 @@ description: | the next apply. You can trigger a manual [zone switch](https://cloud.google.com/netapp/volumes/docs/configure-and-use/storage-pools/edit-or-delete-storage-pool#switch_active_and_replica_zones) via Terraform by swapping the value of the `zone` and `replica_zone` parameters in your HCL code. + Note: Custom Performance FLEX storage pools are supported in beta provider currently. references: guides: @@ -70,20 +71,11 @@ custom_code: exclude_sweeper: true examples: - name: 'Storage_pool_create' - primary_resource_id: 'test_pool' - vars: - pool_name: 'test-pool' - network_name: 'test-network' - test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' - exclude_docs: true - - name: 'Storage_pool_create_doc' primary_resource_id: 'test_pool' vars: pool_name: 'test-pool' network_name: 'test-network' global_name: 'test-address' - exclude_test: true parameters: - name: 'location' type: String @@ -180,46 +172,20 @@ properties: description: | Optional. True if the storage pool supports Auto Tiering enabled volumes. Default is false. Auto-tiering can be enabled after storage pool creation but it can't be disabled once enabled. + immutable: true - name: 'customPerformanceEnabled' type: Boolean description: | Optional. True if using Independent Scaling of capacity and performance (Hyperdisk). Default is false. immutable: true + min_version: 'beta' - name: 'totalThroughputMibps' type: String description: | Optional. Custom Performance Total Throughput of the pool (in MiB/s). - default_from_api: true + min_version: 'beta' - name: 'totalIops' type: String description: | Optional. Custom Performance Total IOPS of the pool If not provided, it will be calculated based on the totalThroughputMibps - default_from_api: true - - name: 'hotTierSizeGib' - type: String - description: | - Total hot tier capacity for the Storage Pool. It is applicable only to Flex service level. - It should be less than the minimum storage pool size and cannot be more than the current storage pool size. It cannot be decreased once set. min_version: 'beta' - - name: 'enableHotTierAutoResize' - type: Boolean - send_empty_value: true - ignore_read: true - description: | - Flag indicating that the hot-tier threshold will be auto-increased by 10% of the hot-tier when it hits 100%. Default is true. - The increment will kick in only if the new size after increment is still less than or equal to storage pool size. - min_version: 'beta' - - name: 'qosType' - type: Enum - description: | - QoS (Quality of Service) type of the storage pool. - Possible values are: AUTO, MANUAL. - enum_values: - - 'QOS_TYPE_UNSPECIFIED' - - 'AUTO' - - 'MANUAL' - - name: 'availableThroughputMibps' - type: Double - description: | - Available throughput of the storage pool (in MiB/s). - output: true diff --git a/mmv1/products/netapp/Volume.yaml b/mmv1/products/netapp/Volume.yaml index 9bd31101c5d6..87c09a8b5343 100644 --- a/mmv1/products/netapp/Volume.yaml +++ b/mmv1/products/netapp/Volume.yaml @@ -56,7 +56,7 @@ examples: pool_name: 'test-pool' network_name: 'test-network' test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' + 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' ignore_read_extra: - 'deletion_policy' virtual_fields: @@ -490,7 +490,6 @@ properties: type: Boolean description: | Optional. Flag indicating if the volume will be a large capacity volume or a regular volume. - immutable: true - name: 'multipleEndpoints' type: Boolean description: | @@ -508,7 +507,6 @@ properties: update_mask_fields: - 'tiering_policy.cooling_threshold_days' - 'tiering_policy.tier_action' - - 'tiering_policy.hot_tier_bypass_mode_enabled' properties: - name: 'coolingThresholdDays' type: Integer @@ -523,52 +521,3 @@ properties: enum_values: - 'ENABLED' - 'PAUSED' - - name: 'hotTierBypassModeEnabled' - type: Boolean - description: | - Optional. Flag indicating that the hot tier bypass mode is enabled. Default is false. - Only applicable to Flex service level. - min_version: 'beta' - - name: 'hybridReplicationParameters' - type: NestedObject - description: |- - The Hybrid Replication parameters for the volume. - properties: - - name: 'replication' - type: String - description: | - Required. Desired name for the replication of this volume. - - name: 'peerVolumeName' - type: String - description: | - Required. Name of the user's local source volume to be peered with the destination volume. - - name: 'peerClusterName' - type: String - description: | - Required. Name of the user's local source cluster to be peered with the destination cluster. - - name: 'peerSvmName' - type: String - description: | - Required. Name of the user's local source vserver svm to be peered with the destination vserver svm. - - name: 'peerIpAddresses' - type: String - description: | - Required. List of node ip addresses to be peered with. - - name: 'clusterLocation' - type: String - description: | - Optional. Name of source cluster location associated with the Hybrid replication. This is a free-form field for the display purpose only. - - name: 'description' - type: String - description: | - Optional. Description of the replication. - - name: 'labels' - type: KeyValuePairs - description: | - Optional. Labels to be added to the replication as the key value pairs. - An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }. - - name: 'throughputMibps' - type: Double - description: | - Optional. Custom Performance Total Throughput of the pool (in MiB/s). - default_from_api: true diff --git a/mmv1/products/netapp/VolumeQuotaRule.yaml b/mmv1/products/netapp/VolumeQuotaRule.yaml index bd80d2b0fe0c..930ef3c0e7eb 100644 --- a/mmv1/products/netapp/VolumeQuotaRule.yaml +++ b/mmv1/products/netapp/VolumeQuotaRule.yaml @@ -52,7 +52,7 @@ examples: network_name: 'test-network' quota_rule_name: 'test-volume-quota-rule' test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' + 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' parameters: - name: 'location' type: String diff --git a/mmv1/products/netapp/VolumeReplication.yaml b/mmv1/products/netapp/VolumeReplication.yaml index d7f4904eedc7..58ad3df736a7 100644 --- a/mmv1/products/netapp/VolumeReplication.yaml +++ b/mmv1/products/netapp/VolumeReplication.yaml @@ -71,7 +71,7 @@ examples: destination_volume: 'destination-volume' network_name: 'test-network' test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' + 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' ignore_read_extra: - 'delete_destination_volume' - 'replication_enabled' @@ -297,50 +297,3 @@ properties: type: String description: | An description of this resource. - - name: 'hybridReplicationType' - type: String - description: | - Hybrid replication type. - output: true - - name: 'hybridPeeringDetails' - type: NestedObject - description: |- - HybridPeeringDetails contains details about the hybrid peering. - output: true - properties: - - name: 'subnetIp' - type: String - description: | - Optional. IP address of the subnet. - output: true - - name: 'command' - type: String - description: | - Optional. Copy-paste-able commands to be used on user's ONTAP to accept peering requests. - output: true - - name: 'commandExpiryTime' - type: String - description: | - Optional. Expiration time for the peering command to be executed on user's ONTAP. - Uses RFC 3339, where generated output will always be Z-normalized and uses 0, 3, 6 or 9 fractional digits. Offsets other than "Z" are also accepted. - output: true - - name: 'passphrase' - type: String - description: | - Optional. Temporary passphrase generated to accept cluster peering command. - output: true - - name: 'peerVolumeName' - type: String - description: | - Optional. Name of the user's local source volume to be peered with the destination volume. - output: true - - name: 'peerClusterName' - type: String - description: | - Optional. Name of the user's local source cluster to be peered with the destination cluster. - output: true - - name: 'peerSvmName' - type: String - description: | - Optional. Name of the user's local source vserver svm to be peered with the destination vserver svm. - output: true diff --git a/mmv1/products/netapp/VolumeSnapshot.yaml b/mmv1/products/netapp/VolumeSnapshot.yaml index 48f0174ae2bd..1180e3898898 100644 --- a/mmv1/products/netapp/VolumeSnapshot.yaml +++ b/mmv1/products/netapp/VolumeSnapshot.yaml @@ -56,7 +56,7 @@ examples: network_name: 'test-network' snap_name: 'testvolumesnap' test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' + 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog"))' parameters: - name: 'location' type: String diff --git a/mmv1/products/networkconnectivity/InternalRange.yaml b/mmv1/products/networkconnectivity/InternalRange.yaml index fff15a04e858..f909d72624e7 100644 --- a/mmv1/products/networkconnectivity/InternalRange.yaml +++ b/mmv1/products/networkconnectivity/InternalRange.yaml @@ -71,16 +71,6 @@ examples: internal_range_name: 'migration' network_name: 'internal-ranges' source_subnet_name: 'source-subnet' - - name: 'network_connectivity_internal_ranges_allocation_algoritms' - primary_resource_id: 'default' - vars: - internal_range_name: 'allocation-algorithms' - network_name: 'internal-ranges' - - name: 'network_connectivity_internal_ranges_allocation_algoritms_random_first_n' - primary_resource_id: 'default' - vars: - internal_range_name: 'allocation-algorithms-random-first-n' - network_name: 'internal-ranges' parameters: properties: - name: 'name' @@ -151,30 +141,6 @@ properties: Only IPv4 CIDR ranges are supported. item_type: type: String - - name: 'allocationOptions' - type: NestedObject - description: | - Options for automatically allocating a free range with a size given by prefixLength. - immutable: true - properties: - - name: 'allocationStrategy' - type: Enum - enum_values: - - 'RANDOM' - - 'FIRST_AVAILABLE' - - 'RANDOM_FIRST_N_AVAILABLE' - - 'FIRST_SMALLEST_FITTING' - description: | - Optional. Sets the strategy used to automatically find a free range of a size given by prefixLength. Can be set only when trying to create a reservation that automatically finds the free range to reserve. - immutable: true - - name: 'firstAvailableRangesLookupSize' - type: Integer - description: | - Must be set when allocation_strategy is RANDOM_FIRST_N_AVAILABLE, otherwise must remain unset. Defines the size of the set of free ranges from which RANDOM_FIRST_N_AVAILABLE strategy randomy selects one, - in other words it sets the N in the RANDOM_FIRST_N_AVAILABLE. - validation: - function: 'validation.IntAtLeast(1)' - immutable: true - name: 'users' type: Array description: | @@ -217,8 +183,3 @@ properties: may not exist yet. For example /projects/{project}/regions/{region}/subnetworks/{subnet} required: true - - name: 'immutable' - type: Boolean - description: | - Immutable ranges cannot have their fields modified, except for labels and description. - immutable: true diff --git a/mmv1/products/networkconnectivity/PolicyBasedRoute.yaml b/mmv1/products/networkconnectivity/PolicyBasedRoute.yaml index c5d2e90ffa48..2d80d2bd4e53 100644 --- a/mmv1/products/networkconnectivity/PolicyBasedRoute.yaml +++ b/mmv1/products/networkconnectivity/PolicyBasedRoute.yaml @@ -87,7 +87,6 @@ properties: required: true enum_values: - 'IPV4' - - 'IPV6' # probably could have been an enum, but it's a string in the API - name: 'ipProtocol' type: String @@ -97,12 +96,12 @@ properties: - name: 'srcRange' type: String description: | - The source IP range of outgoing packets that this policy-based route applies to. Default is "0.0.0.0/0". + The source IP range of outgoing packets that this policy-based route applies to. Default is "0.0.0.0/0" if protocol version is IPv4. default_value: "0.0.0.0/0" - name: 'destRange' type: String description: | - The destination IP range of outgoing packets that this policy-based route applies to. Default is "0.0.0.0/0". + The destination IP range of outgoing packets that this policy-based route applies to. Default is "0.0.0.0/0" if protocol version is IPv4. default_value: "0.0.0.0/0" - name: 'nextHopOtherRoutes' type: Enum diff --git a/mmv1/products/networkconnectivity/RegionalEndpoint.yaml b/mmv1/products/networkconnectivity/RegionalEndpoint.yaml index fd1765d57014..5e809e34fcf1 100644 --- a/mmv1/products/networkconnectivity/RegionalEndpoint.yaml +++ b/mmv1/products/networkconnectivity/RegionalEndpoint.yaml @@ -89,7 +89,7 @@ properties: - name: 'targetGoogleApi' type: String description: | - The service endpoint this private regional endpoint connects to. Format: `{apiname}.{region}.rep.googleapis.com` Example: \"cloudkms.us-central1.rep.googleapis.com\". + The service endpoint this private regional endpoint connects to. Format: `{apiname}.{region}.p.rep.googleapis.com` Example: \"cloudkms.us-central1.p.rep.googleapis.com\". required: true - name: 'network' type: String diff --git a/mmv1/products/networkconnectivity/ServiceConnectionPolicy.yaml b/mmv1/products/networkconnectivity/ServiceConnectionPolicy.yaml index abccb1705741..83a39ea98884 100644 --- a/mmv1/products/networkconnectivity/ServiceConnectionPolicy.yaml +++ b/mmv1/products/networkconnectivity/ServiceConnectionPolicy.yaml @@ -118,29 +118,6 @@ properties: required: true item_type: type: String - - name: 'producerInstanceLocation' - type: Enum - description: | - ProducerInstanceLocation is used to specify which authorization mechanism to use to determine which projects - the Producer instance can be within. - default_from_api: true - enum_values: - - 'PRODUCER_INSTANCE_LOCATION_UNSPECIFIED' - - 'CUSTOM_RESOURCE_HIERARCHY_LEVELS' - - name: 'allowedGoogleProducersResourceHierarchyLevel' - type: Array - description: | - List of Projects, Folders, or Organizations from where the Producer instance can be within. For example, - a network administrator can provide both 'organizations/foo' and 'projects/bar' as - allowed_google_producers_resource_hierarchy_levels. This allowlists this network to connect with any Producer - instance within the 'foo' organization or the 'bar' project. By default, - allowedGoogleProducersResourceHierarchyLevel is empty. The format for each - allowedGoogleProducersResourceHierarchyLevel is / where is one of 'projects', 'folders', or 'organizations' - and is either the ID or the number of the resource type. Format for each - allowedGoogleProducersResourceHierarchyLevel value: 'projects/' or 'folders/' or 'organizations/' Eg. - [projects/my-project-id, projects/567, folders/891, organizations/123] - item_type: - type: String - name: 'limit' type: String description: | diff --git a/mmv1/products/networkconnectivity/Spoke.yaml b/mmv1/products/networkconnectivity/Spoke.yaml index 57b3bcb1c965..831d45f6f387 100644 --- a/mmv1/products/networkconnectivity/Spoke.yaml +++ b/mmv1/products/networkconnectivity/Spoke.yaml @@ -226,6 +226,7 @@ properties: type: Array description: The list of router appliance instances required: true + immutable: true item_type: description: The list of router appliance instances type: NestedObject @@ -233,11 +234,13 @@ properties: - name: 'virtualMachine' type: String description: The URI of the virtual machine resource + immutable: true required: true diff_suppress_func: 'tpgresource.CompareSelfLinkOrResourceName' - name: 'ipAddress' type: String description: The IP address on the VM to use for peering. + immutable: true required: true - name: 'siteToSiteDataTransfer' type: Boolean @@ -254,14 +257,12 @@ properties: - name: 'linkedVpcNetwork' type: NestedObject description: VPC network that is associated with the spoke. + immutable: true conflicts: - linked_interconnect_attachments - linked_router_appliance_instances - linked_vpn_tunnels - linked_producer_vpc_network - update_mask_fields: - - 'linkedVpcNetwork.excludeExportRanges' - - 'linkedVpcNetwork.includeExportRanges' properties: - name: 'uri' type: String diff --git a/mmv1/products/networkmanagement/VpcFlowLogsConfig.yaml b/mmv1/products/networkmanagement/VpcFlowLogsConfig.yaml index aeaad17276f0..f03e3ca43683 100644 --- a/mmv1/products/networkmanagement/VpcFlowLogsConfig.yaml +++ b/mmv1/products/networkmanagement/VpcFlowLogsConfig.yaml @@ -15,7 +15,7 @@ name: 'VpcFlowLogsConfig' description: VPC Flow Logs Config is a resource that lets you configure - Flow Logs for Networks, Subnets, Interconnect attachments or VPN Tunnels. + Flow Logs for VPC, Interconnect attachments or VPN Tunnels. id_format: 'projects/{{project}}/locations/{{location}}/vpcFlowLogsConfigs/{{vpc_flow_logs_config_id}}' base_url: 'projects/{{project}}/locations/{{location}}/vpcFlowLogsConfigs' self_link: 'projects/{{project}}/locations/{{location}}/vpcFlowLogsConfigs/{{vpc_flow_logs_config_id}}' @@ -41,6 +41,13 @@ sweeper: url_substitutions: - region: "global" examples: + - name: 'network_management_vpc_flow_logs_config_interconnect_full' + primary_resource_id: 'interconnect-test' + vars: + network_name: 'full-interconnect-test-network' + router_name: 'full-interconnect-test-router' + vpc_flow_logs_config_id: 'full-interconnect-test-id' + interconnect_attachment_name: 'full-interconnect-test-id' - name: 'network_management_vpc_flow_logs_config_interconnect_basic' primary_resource_id: 'interconnect-test' vars: @@ -60,19 +67,18 @@ examples: esp_forwarding_rule_name: 'basic-test-fresp' route_name: 'basic-test-route' vpc_flow_logs_config_id: 'basic-test-id' - - name: 'network_management_vpc_flow_logs_config_network_basic' - primary_resource_id: 'network-test' - min_version: 'beta' - vars: - network_name: 'basic-network-test-network' - vpc_flow_logs_config_id: 'basic-network-test-id' - - name: 'network_management_vpc_flow_logs_config_subnet_basic' - primary_resource_id: 'subnet-test' - min_version: 'beta' + - name: 'network_management_vpc_flow_logs_config_vpn_full' + primary_resource_id: 'vpn-test' vars: - network_name: 'basic-subnet-test-network' - subnetwork_name: 'basic-subnet-test-subnetwork' - vpc_flow_logs_config_id: 'basic-subnet-test-id' + network_name: 'full-test-network' + vpn_tunnel_name: 'full-test-tunnel' + target_vpn_gateway_name: 'full-test-gateway' + address_name: 'full-test-address' + udp500_forwarding_rule_name: 'full-test-fr500' + udp4500_forwarding_rule_name: 'full-test-fr4500' + esp_forwarding_rule_name: 'full-test-fresp' + route_name: 'full-test-route' + vpc_flow_logs_config_id: 'full-test-id' parameters: - name: 'location' type: String @@ -106,14 +112,13 @@ properties: default_from_api: true description: | Optional. The state of the VPC Flow Log configuration. Default value - is ENABLED. When creating a new configuration, it must be enabled. - Possible values: STATE_UNSPECIFIED ENABLED DISABLED + is ENABLED. When creating a new configuration, it must be enabled. Possible - name: 'aggregationInterval' type: String default_from_api: true description: | Optional. The aggregation interval for the logs. Default value is - INTERVAL_5_SEC. Possible values: AGGREGATION_INTERVAL_UNSPECIFIED INTERVAL_5_SEC INTERVAL_30_SEC INTERVAL_1_MIN INTERVAL_5_MIN INTERVAL_10_MIN INTERVAL_15_MIN + INTERVAL_5_SEC. Possible values: AGGREGATION_INTERVAL_UNSPECIFIED INTERVAL_5_SEC INTERVAL_30_SEC INTERVAL_1_MIN INTERVAL_5_MIN INTERVAL_10_MIN INTERVAL_15_MIN" - name: 'flowSampling' type: Double default_from_api: true @@ -148,17 +153,6 @@ properties: type: String description: | Traffic will be logged from the VPN Tunnel. Format: projects/{project_id}/regions/{region}/vpnTunnels/{name} - - name: 'subnet' - min_version: 'beta' - type: String - description: | - Traffic will be logged from VMs within the subnetwork. Format: projects/{project_id}/regions/{region}/subnetworks/{name} - - name: 'network' - min_version: 'beta' - type: String - description: | - Traffic will be logged from VMs, VPN tunnels and Interconnect Attachments within the network. Format: projects/{project_id}/global/networks/{name} - - name: 'labels' type: KeyValueLabels description: | @@ -173,13 +167,3 @@ properties: description: | Output only. The time the config was updated. output: true - - name: 'targetResourceState' - type: String - description: |- - Describes the state of the configured target resource for diagnostic - purposes. - Possible values: - TARGET_RESOURCE_STATE_UNSPECIFIED - TARGET_RESOURCE_EXISTS - TARGET_RESOURCE_DOES_NOT_EXIST - output: true diff --git a/mmv1/products/networksecurity/AddressGroup.yaml b/mmv1/products/networksecurity/AddressGroup.yaml index f1454643f4e1..e9269dcccc72 100644 --- a/mmv1/products/networksecurity/AddressGroup.yaml +++ b/mmv1/products/networksecurity/AddressGroup.yaml @@ -15,7 +15,7 @@ name: 'AddressGroup' description: | AddressGroup is a resource that specifies how a collection of IP/DNS used in Firewall Policy. - # TODO: change the url to GA once it is available. + # TODO(diogoesteves): change the url to GA once it is available. references: guides: 'Use AddressGroups': 'https://cloud.google.com/vpc/docs/use-address-groups-firewall-policies' @@ -135,7 +135,6 @@ properties: description: | Capacity of the Address Group. required: true - immutable: true - name: 'purpose' type: Array description: | diff --git a/mmv1/products/networksecurity/BackendAuthenticationConfig.yaml b/mmv1/products/networksecurity/BackendAuthenticationConfig.yaml index 27bc817ba6f3..ef0243f2b11a 100644 --- a/mmv1/products/networksecurity/BackendAuthenticationConfig.yaml +++ b/mmv1/products/networksecurity/BackendAuthenticationConfig.yaml @@ -24,6 +24,7 @@ create_url: 'projects/{{project}}/locations/{{location}}/backendAuthenticationCo update_verb: 'PATCH' update_mask: true autogen_async: true +min_version: 'beta' async: actions: ['create', 'delete', 'update'] type: 'OpAsync' @@ -43,12 +44,6 @@ examples: resource_name: 'my-backend-authentication-config' certificate_name: 'my-certificate' trust_config_name: 'my-trust-config' - - name: 'backend_service_tls_settings' - primary_resource_id: 'default' - vars: - backend_service_name: 'backend-service' - health_check_name: 'health-check' - authentication_name: 'authentication' parameters: - name: 'name' diff --git a/mmv1/products/networksecurity/InterceptDeployment.yaml b/mmv1/products/networksecurity/InterceptDeployment.yaml index 14c349a9183b..3a0942bf0089 100644 --- a/mmv1/products/networksecurity/InterceptDeployment.yaml +++ b/mmv1/products/networksecurity/InterceptDeployment.yaml @@ -18,6 +18,7 @@ description: |- GENEVE-encapsulated traffic, e.g. a zonal instance group fronted by an internal passthrough load balancer. Deployments are always part of a global deployment group which represents a global intercept service. +min_version: 'beta' docs: id_format: 'projects/{{project}}/locations/{{location}}/interceptDeployments/{{intercept_deployment_id}}' base_url: 'projects/{{project}}/locations/{{location}}/interceptDeployments' @@ -60,6 +61,7 @@ parameters: type: String description: |- The cloud location of the deployment, e.g. `us-central1-a` or `asia-south1-b`. + min_version: 'beta' url_param_only: true required: true immutable: true @@ -68,6 +70,7 @@ parameters: description: |- The ID to use for the new deployment, which will become the final component of the deployment's resource name. + min_version: 'beta' url_param_only: true required: true immutable: true @@ -78,29 +81,34 @@ properties: The resource name of this deployment, for example: `projects/123456789/locations/us-central1-a/interceptDeployments/my-dep`. See https://google.aip.dev/122 for more details. + min_version: 'beta' output: true - name: 'createTime' type: String description: |- The timestamp when the resource was created. See https://google.aip.dev/148#timestamps. + min_version: 'beta' output: true - name: 'updateTime' type: String description: |- The timestamp when the resource was most recently updated. See https://google.aip.dev/148#timestamps. + min_version: 'beta' output: true - name: 'labels' type: KeyValueLabels description: |- Labels are key/value pairs that help to organize and filter resources. + min_version: 'beta' - name: 'forwardingRule' type: String description: |- The regional forwarding rule that fronts the interceptors, for example: `projects/123456789/regions/us-central1/forwardingRules/my-rule`. See https://google.aip.dev/124. + min_version: 'beta' required: true immutable: true - name: 'interceptDeploymentGroup' @@ -109,6 +117,7 @@ properties: The deployment group that this deployment is a part of, for example: `projects/123456789/locations/global/interceptDeploymentGroups/my-dg`. See https://google.aip.dev/124. + min_version: 'beta' required: true immutable: true - name: 'state' @@ -123,6 +132,7 @@ properties: DELETING OUT_OF_SYNC DELETE_FAILED + min_version: 'beta' output: true - name: 'reconciling' type: Boolean @@ -131,9 +141,11 @@ properties: and the system is working to reconcile them. This part of the normal operation (e.g. linking a new association to the parent group). See https://google.aip.dev/128. + min_version: 'beta' output: true - name: 'description' type: String description: |- User-provided description of the deployment. Used as additional context for the deployment. + min_version: 'beta' diff --git a/mmv1/products/networksecurity/InterceptDeploymentGroup.yaml b/mmv1/products/networksecurity/InterceptDeploymentGroup.yaml index 5678948f59dc..8bfca204c402 100644 --- a/mmv1/products/networksecurity/InterceptDeploymentGroup.yaml +++ b/mmv1/products/networksecurity/InterceptDeploymentGroup.yaml @@ -17,6 +17,7 @@ description: |- A deployment group aggregates many zonal intercept backends (deployments) into a single global intercept service. Consumers can connect this service using an endpoint group. +min_version: 'beta' docs: id_format: 'projects/{{project}}/locations/{{location}}/interceptDeploymentGroups/{{intercept_deployment_group_id}}' base_url: 'projects/{{project}}/locations/{{location}}/interceptDeploymentGroups' @@ -57,6 +58,7 @@ parameters: type: String description: |- The cloud location of the deployment group, currently restricted to `global`. + min_version: 'beta' url_param_only: true required: true immutable: true @@ -65,6 +67,7 @@ parameters: description: |- The ID to use for the new deployment group, which will become the final component of the deployment group's resource name. + min_version: 'beta' url_param_only: true required: true immutable: true @@ -75,33 +78,39 @@ properties: The resource name of this deployment group, for example: `projects/123456789/locations/global/interceptDeploymentGroups/my-dg`. See https://google.aip.dev/122 for more details. + min_version: 'beta' output: true - name: 'createTime' type: String description: |- The timestamp when the resource was created. See https://google.aip.dev/148#timestamps. + min_version: 'beta' output: true - name: 'updateTime' type: String description: |- The timestamp when the resource was most recently updated. See https://google.aip.dev/148#timestamps. + min_version: 'beta' output: true - name: 'labels' type: KeyValueLabels description: |- Labels are key/value pairs that help to organize and filter resources. + min_version: 'beta' - name: 'network' type: String description: |- The network that will be used for all child deployments, for example: `projects/{project}/global/networks/{network}`. See https://google.aip.dev/124. + min_version: 'beta' required: true immutable: true - name: 'connectedEndpointGroups' type: Array + min_version: 'beta' description: |- The list of endpoint groups that are connected to this resource. output: true @@ -114,6 +123,7 @@ properties: The connected endpoint group's resource name, for example: `projects/123456789/locations/global/interceptEndpointGroups/my-eg`. See https://google.aip.dev/124. + min_version: 'beta' output: true - name: 'state' type: String @@ -125,6 +135,7 @@ properties: ACTIVE CREATING DELETING + min_version: 'beta' output: true - name: 'reconciling' type: Boolean @@ -133,17 +144,20 @@ properties: and the system is working to reconcile them. This is part of the normal operation (e.g. adding a new deployment to the group) See https://google.aip.dev/128. + min_version: 'beta' output: true - name: 'description' type: String description: |- User-provided description of the deployment group. Used as additional context for the deployment group. + min_version: 'beta' - name: 'locations' type: Array is_set: true description: |- The list of locations where the deployment group is present. + min_version: 'beta' output: true item_type: type: NestedObject @@ -156,9 +170,11 @@ properties: STATE_UNSPECIFIED ACTIVE OUT_OF_SYNC + min_version: 'beta' output: true - name: 'location' type: String description: |- The cloud location, e.g. `us-central1-a` or `asia-south1-b`. + min_version: 'beta' output: true diff --git a/mmv1/products/networksecurity/InterceptEndpointGroup.yaml b/mmv1/products/networksecurity/InterceptEndpointGroup.yaml index e72884477419..58e89ba17649 100644 --- a/mmv1/products/networksecurity/InterceptEndpointGroup.yaml +++ b/mmv1/products/networksecurity/InterceptEndpointGroup.yaml @@ -19,6 +19,7 @@ description: |- - An association between their network and the endpoint group. - A security profile that points to the endpoint group. - A firewall rule that references the security profile (group). +min_version: 'beta' docs: id_format: 'projects/{{project}}/locations/{{location}}/interceptEndpointGroups/{{intercept_endpoint_group_id}}' base_url: 'projects/{{project}}/locations/{{location}}/interceptEndpointGroups' @@ -59,6 +60,7 @@ parameters: type: String description: |- The cloud location of the endpoint group, currently restricted to `global`. + min_version: 'beta' url_param_only: true required: true immutable: true @@ -67,6 +69,7 @@ parameters: description: |- The ID to use for the endpoint group, which will become the final component of the endpoint group's resource name. + min_version: 'beta' url_param_only: true required: true immutable: true @@ -77,29 +80,34 @@ properties: The resource name of this endpoint group, for example: `projects/123456789/locations/global/interceptEndpointGroups/my-eg`. See https://google.aip.dev/122 for more details. + min_version: 'beta' output: true - name: 'createTime' type: String description: |- The timestamp when the resource was created. See https://google.aip.dev/148#timestamps. + min_version: 'beta' output: true - name: 'updateTime' type: String description: |- The timestamp when the resource was most recently updated. See https://google.aip.dev/148#timestamps. + min_version: 'beta' output: true - name: 'labels' type: KeyValueLabels description: |- Labels are key/value pairs that help to organize and filter resources. + min_version: 'beta' - name: 'interceptDeploymentGroup' type: String description: |- The deployment group that this endpoint group is connected to, for example: `projects/123456789/locations/global/interceptDeploymentGroups/my-dg`. See https://google.aip.dev/124. + min_version: 'beta' required: true immutable: true - name: 'state' @@ -115,6 +123,7 @@ properties: DELETING OUT_OF_SYNC DELETE_FAILED + min_version: 'beta' output: true - name: 'reconciling' type: Boolean @@ -123,17 +132,20 @@ properties: and the system is working to reconcile them. This is part of the normal operation (e.g. adding a new association to the group). See https://google.aip.dev/128. + min_version: 'beta' output: true - name: description type: String description: |- User-provided description of the endpoint group. Used as additional context for the endpoint group. + min_version: 'beta' - name: associations type: Array is_set: true description: |- List of associations to this endpoint group. + min_version: 'beta' output: true item_type: type: NestedObject @@ -144,6 +156,7 @@ properties: The connected association's resource name, for example: `projects/123456789/locations/global/interceptEndpointGroupAssociations/my-ega`. See https://google.aip.dev/124. + min_version: 'beta' output: true - name: network type: String @@ -151,6 +164,7 @@ properties: The associated network, for example: projects/123456789/global/networks/my-network. See https://google.aip.dev/124. + min_version: 'beta' output: true - name: state type: String @@ -164,11 +178,13 @@ properties: CLOSED OUT_OF_SYNC DELETE_FAILED + min_version: 'beta' output: true - name: connectedDeploymentGroup type: NestedObject description: |- The endpoint group's view of a connected deployment group. + min_version: 'beta' output: true properties: - name: name @@ -177,12 +193,14 @@ properties: The connected deployment group's resource name, for example: `projects/123456789/locations/global/interceptDeploymentGroups/my-dg`. See https://google.aip.dev/124. + min_version: 'beta' output: true - name: locations type: Array is_set: true description: |- The list of locations where the deployment group is present. + min_version: 'beta' output: true item_type: type: NestedObject @@ -200,4 +218,5 @@ properties: STATE_UNSPECIFIED ACTIVE OUT_OF_SYNC + min_version: 'beta' output: true diff --git a/mmv1/products/networksecurity/InterceptEndpointGroupAssociation.yaml b/mmv1/products/networksecurity/InterceptEndpointGroupAssociation.yaml index 299d5c5b3311..b54b4e1c4b0e 100644 --- a/mmv1/products/networksecurity/InterceptEndpointGroupAssociation.yaml +++ b/mmv1/products/networksecurity/InterceptEndpointGroupAssociation.yaml @@ -21,6 +21,7 @@ description: |- network to the endpoint group, but does not enable intercept by itself. To enable intercept, the user must also create a network firewall policy containing intercept rules and associate it with the network. +min_version: 'beta' docs: id_format: 'projects/{{project}}/locations/{{location}}/interceptEndpointGroupAssociations/{{intercept_endpoint_group_association_id}}' base_url: 'projects/{{project}}/locations/{{location}}/interceptEndpointGroupAssociations' @@ -61,6 +62,7 @@ parameters: type: String description: |- The cloud location of the association, currently restricted to `global`. + min_version: 'beta' url_param_only: true required: true immutable: true @@ -70,6 +72,7 @@ parameters: The ID to use for the new association, which will become the final component of the endpoint group's resource name. If not provided, the server will generate a unique ID. + min_version: 'beta' url_param_only: true immutable: true properties: @@ -79,29 +82,34 @@ properties: The resource name of this endpoint group association, for example: `projects/123456789/locations/global/interceptEndpointGroupAssociations/my-eg-association`. See https://google.aip.dev/122 for more details. + min_version: 'beta' output: true - name: 'createTime' type: String description: |- The timestamp when the resource was created. See https://google.aip.dev/148#timestamps. + min_version: 'beta' output: true - name: 'updateTime' type: String description: |- The timestamp when the resource was most recently updated. See https://google.aip.dev/148#timestamps. + min_version: 'beta' output: true - name: 'labels' type: KeyValueLabels description: |- Labels are key/value pairs that help to organize and filter resources. + min_version: 'beta' - name: 'interceptEndpointGroup' type: String description: |- The endpoint group that this association is connected to, for example: `projects/123456789/locations/global/interceptEndpointGroups/my-eg`. See https://google.aip.dev/124. + min_version: 'beta' required: true immutable: true - name: 'network' @@ -110,6 +118,7 @@ properties: The VPC network that is associated. for example: `projects/123456789/global/networks/my-network`. See https://google.aip.dev/124. + min_version: 'beta' required: true immutable: true - name: 'locationsDetails' @@ -120,12 +129,14 @@ properties: of the association itself. deprecation_message: |- `locationsDetails` is deprecated and will be removed in a future major release. Use `locations` instead. + min_version: 'beta' output: true item_type: type: NestedObject properties: - name: 'location' type: String + min_version: 'beta' description: |- The cloud location, e.g. `us-central1-a` or `asia-south1`. output: true @@ -137,6 +148,7 @@ properties: STATE_UNSPECIFIED ACTIVE OUT_OF_SYNC + min_version: 'beta' output: true - name: 'state' type: String @@ -150,6 +162,7 @@ properties: CLOSED OUT_OF_SYNC DELETE_FAILED + min_version: 'beta' output: true - name: 'reconciling' type: Boolean @@ -158,6 +171,7 @@ properties: and the system is working to reconcile them. This part of the normal operation (e.g. adding a new location to the target deployment group). See https://google.aip.dev/128. + min_version: 'beta' output: true - name: locations type: Array @@ -165,6 +179,7 @@ properties: description: |- The list of locations where the association is configured. This information is retrieved from the linked endpoint group. + min_version: 'beta' output: true item_type: type: NestedObject @@ -173,6 +188,7 @@ properties: type: String description: |- The cloud location, e.g. `us-central1-a` or `asia-south1-b`. + min_version: 'beta' output: true - name: state type: String @@ -182,4 +198,5 @@ properties: STATE_UNSPECIFIED ACTIVE OUT_OF_SYNC + min_version: 'beta' output: true diff --git a/mmv1/products/networksecurity/ServerTlsPolicy.yaml b/mmv1/products/networksecurity/ServerTlsPolicy.yaml index e7192d07212f..94b868ebbcf0 100644 --- a/mmv1/products/networksecurity/ServerTlsPolicy.yaml +++ b/mmv1/products/networksecurity/ServerTlsPolicy.yaml @@ -41,7 +41,6 @@ async: delete_minutes: 30 result: resource_inside_response: false -include_in_tgc_next_DO_NOT_USE: true custom_code: sweeper: url_substitutions: @@ -53,27 +52,19 @@ examples: primary_resource_id: 'default' vars: resource_name: 'my-server-tls-policy' - tgc_test_ignore_extra: - - 'allow_open' # Ignore its false value in Terraform configuration - name: 'network_security_server_tls_policy_advanced' primary_resource_id: 'default' vars: resource_name: 'my-server-tls-policy' - tgc_test_ignore_extra: - - 'allow_open' # Ignore its false value in Terraform configuration - name: 'network_security_server_tls_policy_server_cert' primary_resource_id: 'default' vars: resource_name: 'my-server-tls-policy' - tgc_test_ignore_extra: - - 'allow_open' # Ignore its false value in Terraform configuration - name: 'network_security_server_tls_policy_mtls' primary_resource_id: 'default' vars: resource_name: 'my-server-tls-policy' trust_config_name: 'my-trust-config' - tgc_test_ignore_extra: - - 'allow_open' # Ignore its false value in Terraform configuration parameters: - name: 'name' type: String @@ -103,7 +94,6 @@ properties: - name: 'labels' type: KeyValueLabels description: Set of label tags associated with the ServerTlsPolicy resource. - is_missing_in_cai: true - name: 'description' type: String description: | diff --git a/mmv1/products/networkservices/EdgeCacheOrigin.yaml b/mmv1/products/networkservices/EdgeCacheOrigin.yaml index 1680c2a76e98..8fbb43ddcbd0 100644 --- a/mmv1/products/networkservices/EdgeCacheOrigin.yaml +++ b/mmv1/products/networkservices/EdgeCacheOrigin.yaml @@ -332,26 +332,3 @@ properties: item_type: type: String max_size: 5 - - name: 'flexShielding' - type: NestedObject - description: | - The FlexShieldingOptions to be used for all routes to this origin. - - If not set, defaults to a global caching layer in front of the origin. - properties: - - name: flexShieldingRegions - type: Array - description: | - Whenever possible, content will be fetched from origin and cached in or - near the specified origin. Best effort. - - You must specify exactly one FlexShieldingRegion. - item_type: - type: Enum - description: | - Available regions for flexible shielding caching layer. - enum_values: - - 'AFRICA_SOUTH1' - - 'ME_CENTRAL1' - min_size: 1 - max_size: 1 diff --git a/mmv1/products/networkservices/EdgeCacheService.yaml b/mmv1/products/networkservices/EdgeCacheService.yaml index 7b0c5dd0f621..33ad8ba9c796 100644 --- a/mmv1/products/networkservices/EdgeCacheService.yaml +++ b/mmv1/products/networkservices/EdgeCacheService.yaml @@ -42,7 +42,6 @@ async: result: resource_inside_response: false custom_code: - encoder: 'templates/terraform/encoders/network_services_edge_cache_service.go.tmpl' examples: - name: 'network_services_edge_cache_service_basic' primary_resource_id: 'instance' diff --git a/mmv1/products/networkservices/Gateway.yaml b/mmv1/products/networkservices/Gateway.yaml index 6404cdf9f2b4..39d4048e3c73 100644 --- a/mmv1/products/networkservices/Gateway.yaml +++ b/mmv1/products/networkservices/Gateway.yaml @@ -227,9 +227,7 @@ properties: - 'DEBUG_HEADERS' - name: 'routingMode' type: Enum - diff_suppress_func: 'tpgresource.SuppressRoutingModeDefault' description: | The routing mode of the Gateway. This field is configurable only for gateways of type SECURE_WEB_GATEWAY. This field is required for gateways of type SECURE_WEB_GATEWAY. enum_values: - 'NEXT_HOP_ROUTING_MODE' - - 'EXPLICIT_ROUTING_MODE' diff --git a/mmv1/products/networkservices/LbTrafficExtension.yaml b/mmv1/products/networkservices/LbTrafficExtension.yaml index d5ddcdd1d34e..a71a99c345c2 100644 --- a/mmv1/products/networkservices/LbTrafficExtension.yaml +++ b/mmv1/products/networkservices/LbTrafficExtension.yaml @@ -201,7 +201,6 @@ properties: For more information, refer to [Choosing a load balancer](https://cloud.google.com/load-balancing/docs/backend-service) and [Supported application load balancers](https://cloud.google.com/service-extensions/docs/callouts-overview#supported-lbs). immutable: true - required: true enum_values: - 'INTERNAL_MANAGED' - 'EXTERNAL_MANAGED' diff --git a/mmv1/products/notebooks/Instance.yaml b/mmv1/products/notebooks/Instance.yaml index 2cd080c9abfa..e5f82d4a64bb 100644 --- a/mmv1/products/notebooks/Instance.yaml +++ b/mmv1/products/notebooks/Instance.yaml @@ -70,8 +70,6 @@ examples: region_override: 'us-west1-a' vars: instance_name: 'notebooks-instance' - ignore_read_extra: - - 'update_time' - name: 'notebook_instance_basic_stopped' primary_resource_id: 'instance' primary_resource_name: 'fmt.Sprintf("tf-test-notebooks-instance%s", context["random_suffix"])' @@ -80,24 +78,18 @@ examples: instance_name: 'notebooks-instance' ignore_read_extra: - 'desired_state' - - 'update_time' - skip_test: https://github.com/hashicorp/terraform-provider-google/issues/17593#issuecomment-2888583933 - name: 'notebook_instance_basic_container' primary_resource_id: 'instance' primary_resource_name: 'fmt.Sprintf("tf-test-notebooks-instance%s", context["random_suffix"])' region_override: 'us-west1-a' vars: instance_name: 'notebooks-instance' - ignore_read_extra: - - 'update_time' - name: 'notebook_instance_basic_gpu' primary_resource_id: 'instance' primary_resource_name: 'fmt.Sprintf("tf-test-notebooks-instance%s", context["random_suffix"])' region_override: 'us-west1-a' vars: instance_name: 'notebooks-instance' - ignore_read_extra: - - 'update_time' - name: 'notebook_instance_full' primary_resource_id: 'instance' primary_resource_name: 'fmt.Sprintf("tf-test-notebooks-instance%s", context["random_suffix"])' @@ -109,8 +101,6 @@ examples: service_account: 'SERVICE_ACCT' test_vars_overrides: 'key_name': 'acctest.BootstrapKMSKeyInLocation(t, "global").CryptoKey.Name' - ignore_read_extra: - - 'update_time' virtual_fields: - name: 'desired_state' description: | diff --git a/mmv1/products/notebooks/Location.yaml b/mmv1/products/notebooks/Location.yaml new file mode 100644 index 000000000000..1438575441d3 --- /dev/null +++ b/mmv1/products/notebooks/Location.yaml @@ -0,0 +1,43 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'Location' +kind: 'compute#zone' +description: 'Represents a Location resource.' +deprecation_message: >- + `google_notebooks_location` is deprecated and will be removed in a future major release. + This resource is not functional. +readonly: true +docs: +base_url: 'projects/{{project}}/locations' +has_self_link: true +timeouts: + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 +async: + actions: ['create', 'delete', 'update'] + type: 'OpAsync' + operation: + base_url: '{{op_id}}' + result: + resource_inside_response: true +collection_url_key: 'items' +custom_code: +parameters: +properties: + - name: 'name' + type: String + description: 'Name of the Location resource.' + custom_flatten: 'templates/terraform/custom_flatten/name_from_self_link.tmpl' diff --git a/mmv1/products/notebooks/Runtime.yaml b/mmv1/products/notebooks/Runtime.yaml index 720d217f5e80..4d73e00b2ff6 100644 --- a/mmv1/products/notebooks/Runtime.yaml +++ b/mmv1/products/notebooks/Runtime.yaml @@ -13,11 +13,6 @@ --- name: 'Runtime' -# This resource should not be removed until the 2025 major release or later. -# Check instance availability first before fully removing. -deprecation_message: >- - `google_notebook_runtime` is deprecated and will be removed in a future major release. - Use `google_workbench_instance` instead. description: | A Cloud AI Platform Notebook runtime. diff --git a/mmv1/products/oracledatabase/AutonomousDatabase.yaml b/mmv1/products/oracledatabase/AutonomousDatabase.yaml index 67e6d993c696..e3b225c4149a 100644 --- a/mmv1/products/oracledatabase/AutonomousDatabase.yaml +++ b/mmv1/products/oracledatabase/AutonomousDatabase.yaml @@ -56,7 +56,7 @@ examples: - 'deletion_protection' test_vars_overrides: deletion_protection: 'false' - project: '"oci-terraform-testing-prod"' + project: '"oci-terraform-testing"' database_name: 'fmt.Sprintf("tftestdatabase%s", acctest.RandString(t, 10))' - name: 'oracledatabase_autonomous_database_full' primary_resource_id: 'myADB' @@ -69,40 +69,10 @@ examples: ignore_read_extra: - 'deletion_protection' test_vars_overrides: - project: '"oci-terraform-testing-prod"' + project: '"oci-terraform-testing"' deletion_protection: 'false' database_name: 'fmt.Sprintf("tftestdatabase%s", acctest.RandString(t, 10))' endpoint_name: 'fmt.Sprintf("tftestendpoint%s", acctest.RandString(t, 10))' - - name: 'oracledatabase_autonomous_database_odbnetwork' - primary_resource_id: 'myADB' - vars: - project: 'my-project' - autonomous_database_id: 'my-instance' - database_name: 'mydatabase' - odb_network: 'projects/my-project/locations/europe-west2/odbNetworks/my-odbnetwork' - odb_subnet: 'projects/my-project/locations/europe-west2/odbNetworks/my-odbnetwork/odbSubnets/my-odbsubnet' - deletion_protection: 'true' - ignore_read_extra: - - 'deletion_protection' - test_vars_overrides: - deletion_protection: 'false' - project: '"oci-terraform-testing-prod"' - database_name: 'fmt.Sprintf("tftestdatabase%s", acctest.RandString(t, 10))' - odb_network: '"projects/oci-terraform-testing-prod/locations/europe-west2/odbNetworks/tf-test-permanent-odbnetwork"' - odb_subnet: '"projects/oci-terraform-testing-prod/locations/europe-west2/odbNetworks/tf-test-permanent-odbnetwork/odbSubnets/tf-test-permanent-client-odbsubnet"' - - name: 'oracledatabase_autonomous_database_publicip' - primary_resource_id: 'myADB' - vars: - project: 'my-project' - autonomous_database_id: 'my-instance' - database_name: 'mydatabase' - deletion_protection: 'true' - ignore_read_extra: - - 'deletion_protection' - test_vars_overrides: - deletion_protection: 'false' - project: '"oci-terraform-testing-prod"' - database_name: 'fmt.Sprintf("tftestdatabase%s", acctest.RandString(t, 10))' virtual_fields: - name: 'deletion_protection' type: Boolean @@ -653,27 +623,11 @@ properties: type: String description: "The name of the VPC network used by the Autonomous Database.\nFormat: projects/{project}/global/networks/{network} " - required: false + required: true - name: 'cidr' type: String description: 'The subnet CIDR range for the Autonmous Database. ' - required: false - - name: odbNetwork - type: String - description: |- - The name of the OdbNetwork associated with the Autonomous Database. - Format: - projects/{project}/locations/{location}/odbNetworks/{odb_network} - It is optional but if specified, this should match the parent ODBNetwork of - the odb_subnet and backup_odb_subnet. - default_from_api: true - - name: odbSubnet - type: String - description: |- - The name of the OdbSubnet associated with the Autonomous Database for - IP allocation. Format: - projects/{project}/locations/{location}/odbNetworks/{odb_network}/odbSubnets/{odb_subnet} - default_from_api: true + required: true - name: 'createTime' type: String description: 'The date and time that the Autonomous Database was created. ' diff --git a/mmv1/products/oracledatabase/CloudExadataInfrastructure.yaml b/mmv1/products/oracledatabase/CloudExadataInfrastructure.yaml index 2976447ffaf5..ac66ce05f378 100644 --- a/mmv1/products/oracledatabase/CloudExadataInfrastructure.yaml +++ b/mmv1/products/oracledatabase/CloudExadataInfrastructure.yaml @@ -54,7 +54,7 @@ examples: ignore_read_extra: - 'deletion_protection' test_vars_overrides: - 'project': '"oci-terraform-testing-prod"' + 'project': '"oci-terraform-testing"' 'deletion_protection': 'false' # ofake- prefix is needed to create a dummy resource for testing purposes only # See: https://github.com/hashicorp/terraform-provider-google/issues/19983#issuecomment-2516403770 @@ -70,7 +70,7 @@ examples: ignore_read_extra: - 'deletion_protection' test_vars_overrides: - 'project': '"oci-terraform-testing-prod"' + 'project': '"oci-terraform-testing"' 'deletion_protection': 'false' # ofake- prefix is needed to create a dummy resource for testing purposes only # See: https://github.com/hashicorp/terraform-provider-google/issues/19983#issuecomment-2516403770 diff --git a/mmv1/products/oracledatabase/CloudVmCluster.yaml b/mmv1/products/oracledatabase/CloudVmCluster.yaml index a83c31dfc8ed..34d8b6a7bb14 100644 --- a/mmv1/products/oracledatabase/CloudVmCluster.yaml +++ b/mmv1/products/oracledatabase/CloudVmCluster.yaml @@ -58,37 +58,13 @@ examples: - 'deletion_protection' test_vars_overrides: 'deletion_protection': 'false' - 'project': '"oci-terraform-testing-prod"' + 'project': '"oci-terraform-testing"' # ofake- prefix is needed to create a dummy resource for testing purposes only # See: https://github.com/hashicorp/terraform-provider-google/issues/19983#issuecomment-2516403770 # As a result these resources are not sweepable # See: https://github.com/hashicorp/terraform-provider-google/issues/20599 cloud_vm_cluster_id: 'fmt.Sprintf("ofake-tf-test-vmcluster-basic-%s", acctest.RandString(t, 10))' cloud_exadata_infrastructure_id: 'fmt.Sprintf("ofake-tf-test-exadata-for-vmcluster-basic-%s", acctest.RandString(t, 10))' - - name: 'oracledatabase_cloud_vmcluster_odbnetwork' - primary_resource_id: 'my_vmcluster' - vars: - project: 'my-project' - cloud_vm_cluster_id: 'my-instance' - cloud_exadata_infrastructure_id: 'my-exadata' - odb_network: 'projects/my-project/locations/europe-west2/odbNetworks/my-odbnetwork' - odb_subnet: 'projects/my-project/locations/europe-west2/odbNetworks/my-odbnetwork/odbSubnets/my-odbsubnet' - backup_odb_subnet: 'projects/my-project/locations/europe-west2/odbNetworks/my-odbnetwork/odbSubnets/my-backup-odbsubnet' - deletion_protection: 'true' - ignore_read_extra: - - 'deletion_protection' - test_vars_overrides: - deletion_protection: 'false' - project: '"oci-terraform-testing-prod"' - # ofake- prefix is needed to create a dummy resource for testing purposes only - # See: https://github.com/hashicorp/terraform-provider-google/issues/19983#issuecomment-2516403770 - # As a result these resources are not sweepable - # See: https://github.com/hashicorp/terraform-provider-google/issues/20599 - cloud_vm_cluster_id: 'fmt.Sprintf("ofake-tf-test-vmcluster-odbnetwork-%s", acctest.RandString(t, 10))' - cloud_exadata_infrastructure_id: 'fmt.Sprintf("ofake-tf-test-exadata-for-vmcluster-odbnetwork-%s", acctest.RandString(t, 10))' - odb_network: '"projects/oci-terraform-testing-prod/locations/europe-west2/odbNetworks/tf-test-permanent-odbnetwork"' - odb_subnet: '"projects/oci-terraform-testing-prod/locations/europe-west2/odbNetworks/tf-test-permanent-odbnetwork/odbSubnets/tf-test-permanent-client-odbsubnet"' - backup_odb_subnet: '"projects/oci-terraform-testing-prod/locations/europe-west2/odbNetworks/tf-test-permanent-odbnetwork/odbSubnets/tf-test-permanent-backup-odbsubnet"' - name: 'oracledatabase_cloud_vmcluster_full' primary_resource_id: 'my_vmcluster' vars: @@ -100,7 +76,7 @@ examples: - 'deletion_protection' test_vars_overrides: 'deletion_protection': 'false' - 'project': '"oci-terraform-testing-prod"' + 'project': '"oci-terraform-testing"' # ofake- prefix is needed to create a dummy resource for testing purposes only # See: https://github.com/hashicorp/terraform-provider-google/issues/19983#issuecomment-2516403770 # As a result these resources are not sweepable @@ -314,35 +290,12 @@ properties: - name: 'cidr' type: String description: 'Network settings. CIDR to use for cluster IP allocation. ' - required: false + required: true - name: 'backupSubnetCidr' type: String description: 'CIDR range of the backup subnet. ' - required: false + required: true - name: 'network' type: String description: "The name of the VPC network.\nFormat: projects/{project}/global/networks/{network} " - required: false - - name: odbNetwork - type: String - description: |- - The name of the OdbNetwork associated with the VM Cluster. - Format: - projects/{project}/locations/{location}/odbNetworks/{odb_network} - It is optional but if specified, this should match the parent ODBNetwork of - the odb_subnet and backup_odb_subnet. - default_from_api: true - - name: odbSubnet - type: String - description: |- - The name of the OdbSubnet associated with the VM Cluster for - IP allocation. Format: - projects/{project}/locations/{location}/odbNetworks/{odb_network}/odbSubnets/{odb_subnet} - default_from_api: true - - name: backupOdbSubnet - type: String - description: |- - The name of the backup OdbSubnet associated with the VM Cluster. - Format: - projects/{project}/locations/{location}/odbNetworks/{odb_network}/odbSubnets/{odb_subnet} - default_from_api: true + required: true diff --git a/mmv1/products/oracledatabase/OdbNetwork.yaml b/mmv1/products/oracledatabase/OdbNetwork.yaml deleted file mode 100644 index 28dc955f5b92..000000000000 --- a/mmv1/products/oracledatabase/OdbNetwork.yaml +++ /dev/null @@ -1,116 +0,0 @@ -# Copyright 2024 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'OdbNetwork' -description: An OdbNetwork resource which represents a private network providing connectivity between OracleDatabase resources and Google Cloud VPC network. -references: - guides: - 'OracleDatabase@Google Cloud': https://cloud.google.com/oracle/database/docs/overview' -base_url: 'projects/{{project}}/locations/{{location}}/odbNetworks' -immutable: true -self_link: 'projects/{{project}}/locations/{{location}}/odbNetworks/{{odb_network_id}}' -create_url: 'projects/{{project}}/locations/{{location}}/odbNetworks?odbNetworkId={{odb_network_id}}' -id_format: 'projects/{{project}}/locations/{{location}}/odbNetworks/{{odb_network_id}}' -import_format: - - 'projects/{{project}}/locations/{{location}}/odbNetworks/{{odb_network_id}}' -custom_code: - pre_delete: 'templates/terraform/pre_delete/oracledatabase_odbnetwork.go.tmpl' -examples: - - name: oracledatabase_odbnetwork - primary_resource_id: my-odbnetwork - vars: - project: my-project - odb_network_id: my-odbnetwork - deletion_protection: 'true' - ignore_read_extra: - - 'deletion_protection' - test_vars_overrides: - deletion_protection: 'false' - project: '"oci-terraform-testing-prod"' - odb_network_id: 'fmt.Sprintf("tf-test-odbnetwork-%s", acctest.RandString(t, 10))' -virtual_fields: - - name: 'deletion_protection' - type: Boolean - default_value: true - description: 'Whether or not to allow Terraform to destroy the instance. - Unless this field is set to false in Terraform state, a terraform destroy - or terraform apply that would delete the instance will fail.' -autogen_async: true -async: - operation: - timeouts: - insert_minutes: 20 - update_minutes: 20 - delete_minutes: 20 - base_url: '{{op_id}}' - actions: - - create - - delete - - update - type: OpAsync - result: - resource_inside_response: true - include_project: false -autogen_status: T2RiTmV0d29yaw== -parameters: - - name: location - type: String - description: Resource ID segment making up resource `name`. It identifies the resource within its parent collection as described in https://google.aip.dev/122. - immutable: true - url_param_only: true - required: true - - name: odbNetworkId - type: String - description: |- - The ID of the OdbNetwork to create. This value is restricted - to (^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$) and must be a maximum of 63 - characters in length. The value must start with a letter and end with - a letter or a number. - immutable: true - url_param_only: true - required: true -properties: - - name: createTime - type: String - description: The date and time that the OdbNetwork was created. - output: true - - name: entitlementId - type: String - description: The ID of the subscription entitlement associated with the OdbNetwork. - output: true - - name: labels - type: KeyValueLabels - description: Labels or tags associated with the resource. - - name: name - type: String - description: |- - Identifier. The name of the OdbNetwork resource in the following format: - projects/{project}/locations/{region}/odbNetworks/{odb_network} - output: true - - name: network - type: String - description: |- - The name of the VPC network in the following format: - projects/{project}/global/networks/{network} - required: true - - name: state - type: String - description: |- - State of the ODB Network. - Possible values: - PROVISIONING - AVAILABLE - TERMINATING - FAILED - output: true diff --git a/mmv1/products/oracledatabase/OdbSubnet.yaml b/mmv1/products/oracledatabase/OdbSubnet.yaml deleted file mode 100644 index ca3b6d28d5bb..000000000000 --- a/mmv1/products/oracledatabase/OdbSubnet.yaml +++ /dev/null @@ -1,126 +0,0 @@ -# Copyright 2024 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'OdbSubnet' -description: 'An OdbSubnet resource which represents a subnet under an OdbNetwork.' -references: - guides: - 'OracleDatabase@Google Cloud': https://cloud.google.com/oracle/database/docs/overview' -base_url: 'projects/{{project}}/locations/{{location}}/odbNetworks/{{odbnetwork}}/odbSubnets' -immutable: true -self_link: 'projects/{{project}}/locations/{{location}}/odbNetworks/{{odbnetwork}}/odbSubnets/{{odb_subnet_id}}' -create_url: 'projects/{{project}}/locations/{{location}}/odbNetworks/{{odbnetwork}}/odbSubnets?odbSubnetId={{odb_subnet_id}}' -id_format: 'projects/{{project}}/locations/{{location}}/odbNetworks/{{odbnetwork}}/odbSubnets/{{odb_subnet_id}}' -import_format: - - 'projects/{{project}}/locations/{{location}}/odbNetworks/{{odbnetwork}}/odbSubnets/{{odb_subnet_id}}' -custom_code: - pre_delete: 'templates/terraform/pre_delete/oracledatabase_odbsubnet.go.tmpl' -examples: - - name: oracledatabase_odbsubnet - primary_resource_id: my-odbsubnet - vars: - project: my-project - odb_network_id: my-odbnetwork - odb_subnet_id: my-odbsubnet - deletion_protection: 'true' - ignore_read_extra: - - 'deletion_protection' - test_vars_overrides: - deletion_protection: 'false' - project: '"oci-terraform-testing-prod"' - odb_network_id: '"tf-test-permanent-odbnetwork"' - odb_subnet_id: 'fmt.Sprintf("tf-test-odbsubnet-%s", acctest.RandString(t, 10))' -virtual_fields: - - name: 'deletion_protection' - type: Boolean - default_value: true - description: 'Whether or not to allow Terraform to destroy the instance. - Unless this field is set to false in Terraform state, a terraform destroy - or terraform apply that would delete the instance will fail.' -autogen_async: true -async: - operation: - timeouts: - insert_minutes: 90 - update_minutes: 90 - delete_minutes: 90 - base_url: '{{op_id}}' - actions: - - create - - delete - - update - type: OpAsync - result: - resource_inside_response: true - include_project: false -autogen_status: T2RiU3VibmV0 -parameters: - - name: location - type: String - description: Resource ID segment making up resource `name`. It identifies the resource within its parent collection as described in https://google.aip.dev/122. - immutable: true - url_param_only: true - required: true - - name: odbnetwork - type: String - description: Resource ID segment making up resource `name`. It identifies the resource within its parent collection as described in https://google.aip.dev/122. - immutable: true - url_param_only: true - required: true - - name: odbSubnetId - type: String - description: |- - The ID of the OdbSubnet to create. This value is restricted - to (^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$) and must be a maximum of 63 - characters in length. The value must start with a letter and end with - a letter or a number. - immutable: true - url_param_only: true - required: true -properties: - - name: cidrRange - type: String - description: The CIDR range of the subnet. - required: true - - name: createTime - type: String - description: The date and time that the OdbNetwork was created. - output: true - - name: labels - type: KeyValueLabels - description: Labels or tags associated with the resource. - - name: name - type: String - description: |- - Identifier. The name of the OdbSubnet resource in the following format: - projects/{project}/locations/{location}/odbNetworks/{odb_network}/odbSubnets/{odb_subnet} - output: true - - name: purpose - type: String - description: |- - Purpose of the subnet. - Possible values: - CLIENT_SUBNET - BACKUP_SUBNET - required: true - - name: state - type: String - description: |- - State of the ODB Subnet. - Possible values: - PROVISIONING - AVAILABLE - TERMINATING - FAILED - output: true diff --git a/mmv1/products/osconfig/GuestPolicies.yaml b/mmv1/products/osconfig/GuestPolicies.yaml index bd37400aa888..1b2119faf71e 100644 --- a/mmv1/products/osconfig/GuestPolicies.yaml +++ b/mmv1/products/osconfig/GuestPolicies.yaml @@ -38,6 +38,8 @@ timeouts: delete_minutes: 20 identity: - guestPolicyId +custom_code: + post_create: 'templates/terraform/post_create/set_computed_name.tmpl' examples: - name: 'os_config_guest_policies_basic' primary_resource_id: 'guest_policies' @@ -254,7 +256,7 @@ properties: description: | An Apt Repository. min_version: 'beta' - # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO (mbang): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) properties: - name: 'archiveType' type: Enum @@ -296,7 +298,7 @@ properties: description: | A Yum Repository. min_version: 'beta' - # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO (mbang): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) properties: - name: 'id' type: String @@ -328,7 +330,7 @@ properties: description: | A Zypper Repository. min_version: 'beta' - # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO (mbang): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) properties: - name: 'id' type: String @@ -360,7 +362,7 @@ properties: description: | A Goo Repository. min_version: 'beta' - # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO (mbang): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) properties: - name: 'name' type: String @@ -424,7 +426,7 @@ properties: description: | A generic remote artifact. min_version: 'beta' - # TODO: add `conflicts` when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO (mbang): add `conflicts` when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) properties: - name: 'uri' type: String @@ -443,7 +445,7 @@ properties: description: | A Google Cloud Storage artifact. min_version: 'beta' - # TODO: add `conflicts` when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO (mbang): add `conflicts` when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) properties: - name: 'bucket' type: String @@ -477,7 +479,7 @@ properties: description: | Copies a file onto the instance. min_version: 'beta' - # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO (mbang): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) properties: - name: 'artifactId' type: String @@ -514,7 +516,7 @@ properties: description: | Extracts an archive into the specified directory. min_version: 'beta' - # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO (mbang): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) properties: - name: 'artifactId' type: String @@ -546,7 +548,7 @@ properties: description: | Installs an MSI file. min_version: 'beta' - # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO (mbang): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) properties: - name: 'artifactId' type: String @@ -575,7 +577,7 @@ properties: description: | Installs a deb file via dpkg. min_version: 'beta' - # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO (mbang): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) properties: - name: 'artifactId' type: String @@ -588,7 +590,7 @@ properties: description: | Installs an rpm file via the rpm utility. min_version: 'beta' - # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO (mbang): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) properties: - name: 'artifactId' type: String @@ -601,7 +603,7 @@ properties: description: | Executes an artifact or local file. min_version: 'beta' - # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO (mbang): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) properties: - name: 'args' type: Array @@ -621,19 +623,19 @@ properties: description: | The id of the relevant artifact in the recipe. min_version: 'beta' - # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO (mbang): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) - name: 'localPath' type: String description: | The absolute path of the file on the local filesystem. min_version: 'beta' - # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO (mbang): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) - name: 'scriptRun' type: NestedObject description: | Runs commands in a shell. min_version: 'beta' - # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO (mbang): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) properties: - name: 'script' type: String @@ -672,7 +674,7 @@ properties: description: | Copies a file onto the instance. min_version: 'beta' - # TODO: add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + # TODO (mbang): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) properties: - name: 'artifactId' type: String diff --git a/mmv1/products/parallelstore/Instance.yaml b/mmv1/products/parallelstore/Instance.yaml index d073d8aa9f04..59ae8a9cf935 100644 --- a/mmv1/products/parallelstore/Instance.yaml +++ b/mmv1/products/parallelstore/Instance.yaml @@ -48,16 +48,12 @@ examples: name: 'instance' network_name: 'network' address_name: 'address' - ignore_read_extra: - - "update_time" - name: 'parallelstore_instance_basic' primary_resource_id: 'instance' vars: name: 'instance' network_name: 'network' address_name: 'address' - ignore_read_extra: - - "update_time" parameters: - name: 'location' type: String diff --git a/mmv1/products/parametermanager/ParameterVersion.yaml b/mmv1/products/parametermanager/ParameterVersion.yaml index 9fd89c41cb51..13a3ffa9708f 100644 --- a/mmv1/products/parametermanager/ParameterVersion.yaml +++ b/mmv1/products/parametermanager/ParameterVersion.yaml @@ -57,22 +57,6 @@ examples: vars: parameter_id: 'parameter' parameter_version_id: 'parameter_version' - - name: 'parameter_version_with_json_format_with_file' - primary_resource_id: 'parameter-version-with-json-format-with-file' - vars: - parameter_id: 'parameter' - parameter_version_id: 'parameter_version' - data: parameter-json-data.json - test_vars_overrides: - 'data': '"./test-fixtures/parameter_data_json_format.json"' - - name: 'parameter_version_with_yaml_format_with_file' - primary_resource_id: 'parameter-version-with-yaml-format-with-file' - vars: - parameter_id: 'parameter' - parameter_version_id: 'parameter_version' - data: parameter-yaml-data.yaml - test_vars_overrides: - 'data': '"./test-fixtures/parameter_data_yaml_format.yaml"' custom_code: custom_import: 'templates/terraform/custom_import/parameter_manager_parameter_version.go.tmpl' parameters: diff --git a/mmv1/products/parametermanagerregional/RegionalParameterVersion.yaml b/mmv1/products/parametermanagerregional/RegionalParameterVersion.yaml index 13dee82c0960..ca9cae6ef3cf 100644 --- a/mmv1/products/parametermanagerregional/RegionalParameterVersion.yaml +++ b/mmv1/products/parametermanagerregional/RegionalParameterVersion.yaml @@ -58,22 +58,6 @@ examples: bootstrap_iam: - member: "serviceAccount:service-{project_number}@gcp-sa-pm.iam.gserviceaccount.com" role: "roles/cloudkms.cryptoKeyEncrypterDecrypter" - - name: 'regional_parameter_version_with_json_format_with_file' - primary_resource_id: 'regional-parameter-version-with-json-format-with-file' - vars: - parameter_id: 'regional_parameter' - parameter_version_id: 'regional_parameter_version' - data: regional-parameter-json-data.json - test_vars_overrides: - 'data': '"./test-fixtures/regional_parameter_data_json_format.json"' - - name: 'regional_parameter_version_with_yaml_format_with_file' - primary_resource_id: 'regional-parameter-version-with-yaml-format-with-file' - vars: - parameter_id: 'regional_parameter' - parameter_version_id: 'regional_parameter_version' - data: regional-parameter-yaml-data.yaml - test_vars_overrides: - 'data': '"./test-fixtures/regional_parameter_data_yaml_format.yaml"' custom_code: pre_create: 'templates/terraform/pre_create/parameter_manager_regional_parameter_version.go.tmpl' custom_import: 'templates/terraform/custom_import/parameter_manager_regional_parameter_version.go.tmpl' diff --git a/mmv1/products/privateca/CertificateAuthority.yaml b/mmv1/products/privateca/CertificateAuthority.yaml index 513ec47fe234..c6a7e80aa27e 100644 --- a/mmv1/products/privateca/CertificateAuthority.yaml +++ b/mmv1/products/privateca/CertificateAuthority.yaml @@ -70,21 +70,6 @@ examples: 'deletion_protection': 'false' ignore_read_extra: - 'deletion_protection' - - name: 'privateca_certificate_authority_basic_no_org' - primary_resource_id: 'default' - vars: - certificate_authority_id: 'my-certificate-authority' - pool_name: 'ca-pool' - pool_location: 'us-central1' - deletion_protection: 'true' - test_vars_overrides: - 'pool_name': 'acctest.BootstrapSharedCaPoolInLocation(t, "us-central1")' - 'pool_location': '"us-central1"' - 'deletion_protection': 'false' - ignore_read_extra: - - 'deletion_protection' - # This example is meant to confirm that it's possible to make - # a CA without specifying the organization field - name: 'privateca_certificate_authority_subordinate' primary_resource_id: 'default' vars: @@ -580,6 +565,7 @@ properties: - name: 'organization' type: String description: The organization of the subject. + required: true immutable: true - name: 'organizationalUnit' type: String diff --git a/mmv1/products/privateca/CertificateTemplate.yaml b/mmv1/products/privateca/CertificateTemplate.yaml index 311c1bd9d6f1..2f440e386815 100644 --- a/mmv1/products/privateca/CertificateTemplate.yaml +++ b/mmv1/products/privateca/CertificateTemplate.yaml @@ -52,11 +52,6 @@ examples: primary_resource_name: 'fmt.Sprintf("tf-test-my-template%s", context["random_suffix"])' vars: name: 'my-template' - - name: 'privateca_template_zero_max_issuer_path_length_null_ca' - primary_resource_id: 'default' - primary_resource_name: 'fmt.Sprintf("tf-test-my-template%s", context["random_suffix"])' - vars: - name: 'my-template' parameters: properties: - name: 'name' @@ -74,8 +69,6 @@ properties: - name: 'predefinedValues' type: NestedObject description: Optional. A set of X.509 values that will be applied to all issued certificates that use this template. If the certificate request includes conflicting values for the same properties, they will be overwritten by the values defined here. If the issuing CaPool's IssuancePolicy defines conflicting baseline_values for the same properties, the certificate issuance request will fail. - custom_flatten: 'templates/terraform/custom_flatten/privateca_certificate_template_509_config.go.tmpl' - custom_expand: 'templates/terraform/custom_expand/privateca_certificate_template_509_config.go.tmpl' properties: - name: 'keyUsage' type: NestedObject @@ -185,29 +178,11 @@ properties: properties: - name: 'isCa' type: Boolean - description: | - Optional. Refers to the "CA" X.509 extension, which is a boolean value. When this value is true, the "CA" in Basic Constraints extension will be set to true. - send_empty_value: true - - name: 'nullCa' - type: Boolean - description: | - Optional. When true, the "CA" in Basic Constraints extension will be set to null and omitted from the CA certificate. - If both `is_ca` and `null_ca` are unset, the "CA" in Basic Constraints extension will be set to false. - Note that the behavior when `is_ca = false` for this resource is different from the behavior in the Certificate Authority, Certificate and CaPool resources. - url_param_only: true + description: Optional. Refers to the "CA" X.509 extension, which is a boolean value. When this value is missing, the extension will be omitted from the CA certificate. send_empty_value: true - name: 'maxIssuerPathLength' type: Integer - description: | - Optional. Refers to the "path length constraint" in Basic Constraints extension. For a CA certificate, this value describes the depth of - subordinate CA certificates that are allowed. If this value is less than 0, the request will fail. - - name: 'zeroMaxIssuerPathLength' - type: Boolean - description: | - Optional. When true, the "path length constraint" in Basic Constraints extension will be set to 0. - if both `max_issuer_path_length` and `zero_max_issuer_path_length` are unset, - the max path length will be omitted from the CA certificate. - url_param_only: true + description: Optional. Refers to the path length restriction X.509 extension. For a CA certificate, this value describes the depth of subordinate CA certificates that are allowed. If this value is less than 0, the request will fail. If this value is missing, the max path length will be omitted from the CA certificate. - name: 'policyIds' type: Array description: Optional. Describes the X.509 certificate policy object identifiers, per https://tools.ietf.org/html/rfc5280#section-4.2.1.4. @@ -252,89 +227,6 @@ properties: type: String description: Required. The value of this X.509 extension. required: true - - name: 'nameConstraints' - type: NestedObject - description: | - Describes the X.509 name constraints extension. - properties: - - name: 'critical' - type: Boolean - description: - Indicates whether or not the name constraints are marked - critical. - required: true - - name: 'permittedDnsNames' - type: Array - description: | - Contains permitted DNS names. Any DNS name that can be - constructed by simply adding zero or more labels to - the left-hand side of the name satisfies the name constraint. - For example, `example.com`, `www.example.com`, `www.sub.example.com` - would satisfy `example.com` while `example1.com` does not. - item_type: - type: String - - name: 'excludedDnsNames' - type: Array - description: | - Contains excluded DNS names. Any DNS name that can be - constructed by simply adding zero or more labels to - the left-hand side of the name satisfies the name constraint. - For example, `example.com`, `www.example.com`, `www.sub.example.com` - would satisfy `example.com` while `example1.com` does not. - item_type: - type: String - - name: 'permittedIpRanges' - type: Array - description: | - Contains the permitted IP ranges. For IPv4 addresses, the ranges - are expressed using CIDR notation as specified in RFC 4632. - For IPv6 addresses, the ranges are expressed in similar encoding as IPv4 - addresses. - item_type: - type: String - - name: 'excludedIpRanges' - type: Array - description: | - Contains the excluded IP ranges. For IPv4 addresses, the ranges - are expressed using CIDR notation as specified in RFC 4632. - For IPv6 addresses, the ranges are expressed in similar encoding as IPv4 - addresses. - item_type: - type: String - - name: 'permittedEmailAddresses' - type: Array - description: | - Contains the permitted email addresses. The value can be a particular - email address, a hostname to indicate all email addresses on that host or - a domain with a leading period (e.g. `.example.com`) to indicate - all email addresses in that domain. - item_type: - type: String - - name: 'excludedEmailAddresses' - type: Array - description: | - Contains the excluded email addresses. The value can be a particular - email address, a hostname to indicate all email addresses on that host or - a domain with a leading period (e.g. `.example.com`) to indicate - all email addresses in that domain. - item_type: - type: String - - name: 'permittedUris' - type: Array - description: | - Contains the permitted URIs that apply to the host part of the name. - The value can be a hostname or a domain with a - leading period (like `.example.com`) - item_type: - type: String - - name: 'excludedUris' - type: Array - description: | - Contains the excluded URIs that apply to the host part of the name. - The value can be a hostname or a domain with a - leading period (like `.example.com`) - item_type: - type: String - name: 'identityConstraints' type: NestedObject description: Optional. Describes constraints on identities that may be appear in Certificates issued using this template. If this is omitted, then this template will not add restrictions on a certificate's identity. diff --git a/mmv1/products/pubsub/Subscription.yaml b/mmv1/products/pubsub/Subscription.yaml index 1624f57df84e..9b3e7064839b 100644 --- a/mmv1/products/pubsub/Subscription.yaml +++ b/mmv1/products/pubsub/Subscription.yaml @@ -29,7 +29,6 @@ create_verb: 'PUT' update_url: 'projects/{{project}}/subscriptions/{{name}}' update_verb: 'PATCH' update_mask: true -include_in_tgc_next_DO_NOT_USE: true timeouts: insert_minutes: 20 update_minutes: 20 @@ -45,7 +44,6 @@ custom_code: constants: 'templates/terraform/constants/subscription.go.tmpl' encoder: 'templates/terraform/encoders/no_send_name.go.tmpl' update_encoder: 'templates/terraform/update_encoder/pubsub_subscription.tmpl' -tgc_ignore_terraform_encoder: true examples: - name: 'pubsub_subscription_push' primary_resource_id: 'example' @@ -57,8 +55,6 @@ examples: vars: topic_name: 'example-topic' subscription_name: 'example-subscription' - tgc_test_ignore_extra: - - enable_message_ordering # ignore its false value in configuration - name: 'pubsub_subscription_pull_filter' primary_resource_id: 'example' vars: @@ -120,17 +116,6 @@ examples: subscription_name: 'example-subscription' bucket_name: 'example-bucket' service_account_id: 'example-stw' - - name: 'pubsub_subscription_single_smt' - primary_resource_id: 'example' - vars: - topic_name: 'example-topic' - subscription_name: 'example-subscription' - - name: 'pubsub_subscription_multiple_smts' - primary_resource_id: 'example' - vars: - topic_name: 'example-topic' - subscription_name: 'example-subscription' - tgc_skip_test: 'The dynamic block is in test configuration. The test takes time to fix.' parameters: properties: - name: 'name' @@ -171,7 +156,6 @@ properties: description: | The name of the table to which to write data, of the form {projectId}.{datasetId}.{tableId} required: true - is_missing_in_cai: true - name: 'useTopicSchema' type: Boolean description: | @@ -186,7 +170,6 @@ properties: must be published in JSON format. Only one of use_topic_schema and use_table_schema can be set. conflicts: - use_topic_schema - is_missing_in_cai: true - name: 'writeMetadata' type: Boolean description: | @@ -204,7 +187,6 @@ properties: The service account to use to write to BigQuery. If not specified, the Pub/Sub [service agent](https://cloud.google.com/iam/docs/service-agents), service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com, is used. - is_missing_in_cai: true - name: 'cloudStorageConfig' type: NestedObject description: | @@ -214,7 +196,6 @@ properties: conflicts: - push_config - bigquery_config - is_missing_in_cai: true properties: - name: 'bucket' type: String @@ -348,7 +329,6 @@ properties: - v1beta1: uses the push format defined in the v1beta1 Pub/Sub API. - v1 or v1beta2: uses the push format defined in the v1 Pub/Sub API. diff_suppress_func: 'IgnoreMissingKeyInMap("x-goog-version")' - custom_tgc_expand: templates/tgc_next/custom_expand/pubsub_subscription_attributes.go.tmpl - name: 'noWrapper' type: NestedObject description: | @@ -526,60 +506,3 @@ properties: Note that subscribers may still receive multiple copies of a message when `enable_exactly_once_delivery` is true if the message was published multiple times by a publisher client. These copies are considered distinct by Pub/Sub and have distinct messageId values - - name: messageTransforms - type: Array - description: | - Transforms to be applied to messages published to the topic. Transforms are applied in the - order specified. - item_type: - type: NestedObject - properties: - - name: javascriptUdf - type: NestedObject - description: | - Javascript User Defined Function. If multiple Javascript UDFs are specified on a resource, - each one must have a unique `function_name`. - properties: - - name: functionName - type: String - required: true - description: | - Name of the JavaScript function that should be applied to Pub/Sub messages. - - name: code - type: String - required: true - description: | - JavaScript code that contains a function `function_name` with the - following signature: - ``` - /** - * Transforms a Pub/Sub message. - * - * @return {(Object)>|null)} - To - * filter a message, return `null`. To transform a message return a map - * with the following keys: - * - (required) 'data' : {string} - * - (optional) 'attributes' : {Object} - * Returning empty `attributes` will remove all attributes from the - * message. - * - * @param {(Object)>} Pub/Sub - * message. Keys: - * - (required) 'data' : {string} - * - (required) 'attributes' : {Object} - * - * @param {Object} metadata - Pub/Sub message metadata. - * Keys: - * - (required) 'message_id' : {string} - * - (optional) 'publish_time': {string} YYYY-MM-DDTHH:MM:SSZ format - * - (optional) 'ordering_key': {string} - */ - function (message, metadata) { - } - ``` - - name: disabled - type: Boolean - default_value: false - description: | - Controls whether or not to use this transform. If not set or `false`, - the transform will be applied to messages. Default: `true`. diff --git a/mmv1/products/pubsub/Topic.yaml b/mmv1/products/pubsub/Topic.yaml index a47c9f935ae5..439fdcdc387e 100644 --- a/mmv1/products/pubsub/Topic.yaml +++ b/mmv1/products/pubsub/Topic.yaml @@ -52,10 +52,9 @@ iam_policy: custom_code: encoder: 'templates/terraform/encoders/no_send_name.go.tmpl' update_encoder: 'templates/terraform/update_encoder/pubsub_topic.tmpl' -tgc_ignore_terraform_encoder: true error_retry_predicates: + - 'transport_tpg.PubsubTopicProjectNotReady' -include_in_tgc_next_DO_NOT_USE: true examples: - name: 'pubsub_topic_basic' primary_resource_id: 'example' @@ -100,15 +99,6 @@ examples: primary_resource_id: 'example' vars: topic_name: 'example-topic' - - name: 'pubsub_topic_single_smt' - primary_resource_id: 'example' - vars: - topic_name: 'example-topic' - - name: 'pubsub_topic_multiple_smts' - primary_resource_id: 'example' - vars: - topic_name: 'example-topic' - tgc_skip_test: 'The dynamic block is in test configuration. The test takes time to fix.' parameters: properties: - name: 'name' @@ -160,12 +150,10 @@ properties: operations on this topic and subscribe operations on any subscription attached to this topic in any region that is not in `allowedPersistenceRegions`. required: false - is_missing_in_cai: true - name: 'schemaSettings' type: NestedObject description: | Settings for validating messages published against a schema. - is_missing_in_cai: true properties: - name: 'schema' type: String @@ -194,7 +182,6 @@ properties: set, message retention is controlled by settings on individual subscriptions. The rotation period has the format of a decimal number, followed by the letter `s` (seconds). Cannot be more than 31 days or less than 10 minutes. - is_missing_in_cai: true - name: 'ingestionDataSourceSettings' type: NestedObject description: | @@ -451,60 +438,3 @@ properties: The GCP service account to be used for Federated Identity authentication with Confluent Cloud. required: true - - name: messageTransforms - type: Array - description: | - Transforms to be applied to messages published to the topic. Transforms are applied in the - order specified. - item_type: - type: NestedObject - properties: - - name: javascriptUdf - type: NestedObject - description: | - Javascript User Defined Function. If multiple Javascript UDFs are specified on a resource, - each one must have a unique `function_name`. - properties: - - name: functionName - type: String - required: true - description: | - Name of the JavaScript function that should be applied to Pub/Sub messages. - - name: code - type: String - required: true - description: | - JavaScript code that contains a function `function_name` with the - following signature: - ``` - /** - * Transforms a Pub/Sub message. - * - * @return {(Object)>|null)} - To - * filter a message, return `null`. To transform a message return a map - * with the following keys: - * - (required) 'data' : {string} - * - (optional) 'attributes' : {Object} - * Returning empty `attributes` will remove all attributes from the - * message. - * - * @param {(Object)>} Pub/Sub - * message. Keys: - * - (required) 'data' : {string} - * - (required) 'attributes' : {Object} - * - * @param {Object} metadata - Pub/Sub message metadata. - * Keys: - * - (required) 'message_id' : {string} - * - (optional) 'publish_time': {string} YYYY-MM-DDTHH:MM:SSZ format - * - (optional) 'ordering_key': {string} - */ - function (message, metadata) { - } - ``` - - name: disabled - type: Boolean - default_value: false - description: | - Controls whether or not to use this transform. If not set or `false`, - the transform will be applied to messages. Default: `true`. diff --git a/mmv1/products/redis/Cluster.yaml b/mmv1/products/redis/Cluster.yaml index 5b26b4639464..4f4b858093e9 100644 --- a/mmv1/products/redis/Cluster.yaml +++ b/mmv1/products/redis/Cluster.yaml @@ -117,7 +117,6 @@ async: resource_inside_response: false custom_code: encoder: 'templates/terraform/encoders/redis_cluster.go.tmpl' - decoder: 'templates/terraform/decoders/redis_cluster.go.tmpl' sweeper: ensure_value: field: deletionProtectionEnabled @@ -284,7 +283,8 @@ properties: - name: 'backup' type: String description: | - Example: `projects/{project}/locations/{location}/backupCollections/{collection}/backups/{backup}`. + Example: //redis.googleapis.com/projects/{project}/locations/{location}/backupCollections/{collection}/backups/{backup} A shorter version (without the prefix) of the backup name is also supported, + like projects/{project}/locations/{location}/backupCollections/{collection}/backups/{backupId}. In this case, it assumes the backup is under redis.googleapis.com. required: true immutable: true - name: 'backupCollection' @@ -812,21 +812,3 @@ properties: - name: 'kmsKey' type: String description: The KMS key used to encrypt the at-rest data of the cluster. - - name: 'managedServerCa' - type: NestedObject - output: true - description: Cluster's Certificate Authority. This field will only be populated if Redis Cluster's transit_encryption_mode is TRANSIT_ENCRYPTION_MODE_SERVER_AUTHENTICATION - properties: - - name: 'caCerts' - type: Array - output: true - description: The PEM encoded CA certificate chains for redis managed server authentication - item_type: - type: NestedObject - properties: - - name: 'certificates' - type: Array - output: true - description: The certificates that form the CA chain, from leaf to root order - item_type: - type: String diff --git a/mmv1/products/redis/ClusterUserCreatedConnections.yaml b/mmv1/products/redis/ClusterUserCreatedConnections.yaml index 1c07c1c5ed54..379d75834338 100644 --- a/mmv1/products/redis/ClusterUserCreatedConnections.yaml +++ b/mmv1/products/redis/ClusterUserCreatedConnections.yaml @@ -19,7 +19,7 @@ description: | docs: note: | Please ensure your connections meet the requirements outlined at - https://cloud.google.com/memorystore/docs/cluster/about-multiple-vpc-networking. + https://cloud.devsite.corp.google.com/memorystore/docs/cluster/about-multiple-vpc-networking#application_connection_requirements. If you remove a connections item from the resource, the corresponding forwarding rule will no longer be functioning. If the corresponding forwarding rule is represented in your terraform configuration it is recommended to delete that `google_compute_forwarding_rule` resource at the same time. diff --git a/mmv1/products/redis/Instance.yaml b/mmv1/products/redis/Instance.yaml index 59166df38db8..a86b63745bdd 100644 --- a/mmv1/products/redis/Instance.yaml +++ b/mmv1/products/redis/Instance.yaml @@ -445,16 +445,6 @@ properties: # but will be a subset of the range. ignore_read: true default_from_api: true - - name: 'effectiveReservedIpRange' - type: String - description: | - The CIDR range of internal addresses that are reserved for this - instance. If not provided, the service will choose an unused /29 - block, for example, 10.0.0.0/29 or 192.168.0.0/29. Ranges must be - unique and non-overlapping with existing subnets in an authorized - network. - output: true - api_name: reservedIpRange - name: 'tier' type: Enum description: | @@ -578,3 +568,11 @@ properties: Optional. The KMS key reference that you want to use to encrypt the data at rest for this Redis instance. If this is provided, CMEK is enabled. immutable: true + - name: 'tags' + type: KeyValuePairs + description: | + A map of resource manager tags. + Resource manager tag keys and values have the same definition as resource manager tags. + Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/{tag_key_value}. + immutable: true + ignore_read: true diff --git a/mmv1/products/resourcemanager/Lien.yaml b/mmv1/products/resourcemanager/Lien.yaml index 17b24e49ee45..8a1467851126 100644 --- a/mmv1/products/resourcemanager/Lien.yaml +++ b/mmv1/products/resourcemanager/Lien.yaml @@ -45,6 +45,7 @@ nested_query: modify_by_patch: false custom_code: decoder: 'templates/terraform/decoders/avoid_meaningless_project_update.tmpl' + post_create: 'templates/terraform/post_create/lien.tmpl' pre_delete: 'templates/terraform/pre_delete/modify_delete_url.tmpl' post_import: 'templates/terraform/post_import/lien_import.tmpl' examples: diff --git a/mmv1/products/resourcemanager3/Capability.yaml b/mmv1/products/resourcemanager3/Capability.yaml deleted file mode 100644 index 0ddee6611585..000000000000 --- a/mmv1/products/resourcemanager3/Capability.yaml +++ /dev/null @@ -1,79 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'Capability' -description: - An app-enabled folder is a folder within the Google Cloud resource hierarchy that has been configured for application management. - This folder lets you define and manage App Hub applications. These applications are functional groupings of services and workloads - that span multiple projects within that folder and its descendant projects. -references: - guides: - 'Official Documentation': 'https://cloud.google.com/resource-manager/docs/manage-applications' - api: 'https://cloud.google.com/resource-manager/reference/rest' -min_version: beta - -import_format: - - '{{parent}}/capabilities/{{capability_name}}' - -self_link: '{{parent}}/capabilities/{{capability_name}}' -create_url: '{{parent}}/capabilities/{{capability_name}}?updateMask=value' -update_url: '{{parent}}/capabilities/{{capability_name}}' - -create_verb: 'PATCH' -update_verb: 'PATCH' - -update_mask: true - -exclude_delete: true -timeouts: - insert_minutes: 20 - update_minutes: 20 - -autogen_async: true -async: - actions: ['create', 'update'] - operation: - base_url: '{{op_id}}' -custom_code: - custom_import: 'templates/terraform/custom_import/resource_manager_capability.go.tmpl' -examples: - - name: 'resource_manager_capability' - primary_resource_id: 'capability' - vars: - display_name: 'my-folder' - test_env_vars: - org_id: "ORG_ID" - min_version: beta - external_providers: ["time"] - -parameters: - -properties: - - name: 'parent' - type: String - description: | - Folder on which Capability needs to be updated in the format folders/folder_id. - required: true - url_param_only: true - - name: 'capability_name' - type: String - description: | - Capability name that should be updated on the folder. - required: true - url_param_only: true - - name: 'value' - type: Boolean - description: | - Capability Value. - required: true diff --git a/mmv1/products/saasservicemgmt/Saas.yaml b/mmv1/products/saasservicemgmt/Saas.yaml deleted file mode 100644 index 41ec4ec19b9e..000000000000 --- a/mmv1/products/saasservicemgmt/Saas.yaml +++ /dev/null @@ -1,109 +0,0 @@ -# Copyright 2024 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: Saas -description: A Saas resource is the top-level representation of a SaaS service managed by a producer. It contains a list of locations where the service is available, which is used by the Rollout system to generate a rollout plan. -base_url: projects/{{project}}/locations/{{location}}/saas -update_mask: true -self_link: projects/{{project}}/locations/{{location}}/saas/{{saas_id}} -create_url: projects/{{project}}/locations/{{location}}/saas?saasId={{saas_id}} -update_verb: PATCH -id_format: projects/{{project}}/locations/{{location}}/saas/{{saas_id}} -import_format: - - projects/{{project}}/locations/{{location}}/saas/{{saas_id}} -min_version: beta -examples: - - name: saas_runtime_saas_basic - primary_resource_id: "example" - min_version: 'beta' - vars: - saas_name: test-saas - bootstrap_iam: - - member: "serviceAccount:service-{project_number}@gcp-sa-saasservicemgmt.iam.gserviceaccount.com" - role: "roles/saasservicemgmt.serviceAgent" -autogen_async: false -autogen_status: U2Fhcw== -parameters: - - name: location - type: String - description: Resource ID segment making up resource `name`. It identifies the resource within its parent collection as described in https://google.aip.dev/122. - immutable: true - url_param_only: true - required: true - - name: saasId - type: String - description: The ID value for the new saas. - immutable: true - url_param_only: true - required: true -properties: - - name: annotations - type: KeyValueAnnotations - description: |- - Annotations is an unstructured key-value map stored with a resource that - may be set by external tools to store and retrieve arbitrary metadata. - They are not queryable and should be preserved when modifying objects. - - More info: https://kubernetes.io/docs/user-guide/annotations - - name: createTime - type: String - description: The timestamp when the resource was created. - output: true - - name: etag - type: String - description: |- - An opaque value that uniquely identifies a version or - generation of a resource. It can be used to confirm that the client - and server agree on the ordering of a resource being written. - output: true - - name: labels - type: KeyValueLabels - description: |- - The labels on the resource, which can be used for categorization. - similar to Kubernetes resource labels. - - name: locations - type: Array - description: |- - List of locations that the service is available in. Rollout refers to the - list to generate a rollout plan. - item_type: - type: NestedObject - properties: - - name: name - type: String - description: Name of location. - - name: name - type: String - description: |- - Identifier. The resource name (full URI of the resource) following the standard naming - scheme: - - "projects/{project}/locations/{location}/saas/{saas}" - output: true - - name: uid - type: String - description: |- - The unique identifier of the resource. UID is unique in the time - and space for this resource within the scope of the service. It is - typically generated by the server on successful creation of a resource - and must not be changed. UID is used to uniquely identify resources - with resource name reuses. This should be a UUID4. - output: true - - name: updateTime - type: String - description: |- - The timestamp when the resource was last updated. Any - change to the resource made by users must refresh this value. - Changes to a resource made by the service should refresh this value. - output: true diff --git a/mmv1/products/saasservicemgmt/product.yaml b/mmv1/products/saasservicemgmt/product.yaml deleted file mode 100644 index dfd3e75f78f2..000000000000 --- a/mmv1/products/saasservicemgmt/product.yaml +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright 2024 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: SaasRuntime -display_name: SaaS Runtime -scopes: - - https://www.googleapis.com/auth/cloud-platform -versions: - - name: "beta" - base_url: https://saasservicemgmt.googleapis.com/v1beta1/ -caibaseurl: "" -resourceswithcaiassettype: {} diff --git a/mmv1/products/secretmanager/Secret.yaml b/mmv1/products/secretmanager/Secret.yaml index e54451d30967..6a7c3cac6683 100644 --- a/mmv1/products/secretmanager/Secret.yaml +++ b/mmv1/products/secretmanager/Secret.yaml @@ -42,7 +42,6 @@ iam_policy: custom_code: constants: 'templates/terraform/constants/secret_manager_secret.go.tmpl' pre_update: 'templates/terraform/pre_update/secret_manager_secret.go.tmpl' - pre_delete: 'templates/terraform/pre_delete/secret_manager_secret.go.tmpl' custom_diff: - 'secretManagerSecretAutoCustomizeDiff' examples: @@ -51,8 +50,6 @@ examples: primary_resource_name: 'fmt.Sprintf("secret%s", context["random_suffix"])' vars: secret_id: 'secret' - ignore_read_extra: - - 'deletion_protection' - name: 'secret_with_annotations' primary_resource_id: 'secret-with-annotations' vars: @@ -258,14 +255,6 @@ properties: description: | A map of resource manager tags. Resource manager tag keys and values have the same definition as resource manager tags. - Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/{tag_value_id}. + Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/{tag_key_value}. immutable: true ignore_read: true -virtual_fields: - - name: 'deletion_protection' - description: | - Whether Terraform will be prevented from destroying the secret. Defaults to false. - When the field is set to true in Terraform state, a `terraform apply` - or `terraform destroy` that would delete the secret will fail. - type: Boolean - default_value: false diff --git a/mmv1/products/secretmanager/SecretVersion.yaml b/mmv1/products/secretmanager/SecretVersion.yaml index ab26d83a4bda..a3f0691ae0c1 100644 --- a/mmv1/products/secretmanager/SecretVersion.yaml +++ b/mmv1/products/secretmanager/SecretVersion.yaml @@ -49,7 +49,7 @@ custom_code: custom_update: 'templates/terraform/custom_update/secret_version.go.tmpl' pre_delete: 'templates/terraform/pre_delete/secret_version_deletion_policy.go.tmpl' custom_import: 'templates/terraform/custom_import/secret_version.go.tmpl' - constants: 'templates/terraform/constants/secret_version.go.tmpl' + raw_resource_config_validation: 'templates/terraform/validation/secret_version.go.tmpl' # Sweeper skipped as this resource has customized deletion. exclude_sweeper: true examples: @@ -161,7 +161,7 @@ properties: description: The secret data. Must be no larger than 64KiB. api_name: data conflicts: - - 'payload.0.secret_data_wo' + - 'secretDataWo' immutable: true sensitive: true - name: 'secretDataWo' @@ -169,15 +169,13 @@ properties: description: The secret data. Must be no larger than 64KiB. For more info see [updating write-only attributes](/docs/providers/google/guides/using_write_only_attributes.html#updating-write-only-attributes) api_name: data required_with: - - 'payload.0.secret_data_wo_version' + - 'SecretDataWoVersion' conflicts: - - 'payload.0.secret_data' + - 'payload.0.secretData' write_only: true - - name: 'secretDataWoVersion' + - name: 'SecretDataWoVersion' type: Integer default_value: 0 url_param_only: true description: Triggers update of secret data write-only. For more info see [updating write-only attributes](/docs/providers/google/guides/using_write_only_attributes.html#updating-write-only-attributes) immutable: true - required_with: - - 'payload.0.secret_data_wo' diff --git a/mmv1/products/secretmanagerregional/RegionalSecret.yaml b/mmv1/products/secretmanagerregional/RegionalSecret.yaml index 09641d016359..c767e53ec7df 100644 --- a/mmv1/products/secretmanagerregional/RegionalSecret.yaml +++ b/mmv1/products/secretmanagerregional/RegionalSecret.yaml @@ -43,15 +43,12 @@ iam_policy: - '{{secret_id}}' custom_code: pre_update: 'templates/terraform/pre_update/secret_manager_regional_secret.go.tmpl' - pre_delete: 'templates/terraform/pre_delete/regional_secret.go.tmpl' examples: - name: 'regional_secret_config_basic' primary_resource_id: 'regional-secret-basic' primary_resource_name: 'fmt.Sprintf("tf-test-tf-reg-secret%s", context["random_suffix"])' vars: secret_id: 'tf-reg-secret' - ignore_read_extra: - - 'deletion_protection' - name: 'regional_secret_with_cmek' primary_resource_id: 'regional-secret-with-cmek' vars: @@ -224,19 +221,3 @@ properties: For secret with versionDestroyTtl>0, version destruction doesn't happen immediately on calling destroy instead the version goes to a disabled state and the actual destruction happens after this TTL expires. It must be atleast 24h. - - name: 'tags' - type: KeyValuePairs - description: | - A map of resource manager tags. - Resource manager tag keys and values have the same definition as resource manager tags. - Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/{tag_value_id}. - immutable: true - ignore_read: true -virtual_fields: - - name: 'deletion_protection' - description: | - Whether Terraform will be prevented from destroying the regional secret. Defaults to false. - When the field is set to true in Terraform state, a `terraform apply` - or `terraform destroy` that would delete the federation will fail. - type: Boolean - default_value: false diff --git a/mmv1/products/secretmanagerregional/RegionalSecretVersion.yaml b/mmv1/products/secretmanagerregional/RegionalSecretVersion.yaml index 813119eacbf5..6521262d6dc3 100644 --- a/mmv1/products/secretmanagerregional/RegionalSecretVersion.yaml +++ b/mmv1/products/secretmanagerregional/RegionalSecretVersion.yaml @@ -49,7 +49,6 @@ custom_code: custom_update: 'templates/terraform/custom_update/regional_secret_version.go.tmpl' pre_delete: 'templates/terraform/pre_delete/regional_secret_version_deletion_policy.go.tmpl' custom_import: 'templates/terraform/custom_import/regional_secret_version.go.tmpl' - constants: 'templates/terraform/constants/regional_secret_version.go.tmpl' # Sweeper skipped as this resource has customized deletion. exclude_sweeper: true examples: diff --git a/mmv1/products/securesourcemanager/BranchRule.yaml b/mmv1/products/securesourcemanager/BranchRule.yaml index 8f2f83fb9a2a..4462bae669e2 100644 --- a/mmv1/products/securesourcemanager/BranchRule.yaml +++ b/mmv1/products/securesourcemanager/BranchRule.yaml @@ -47,26 +47,22 @@ examples: branch_rule_id: 'my-basic-branchrule' repository_id: 'my-basic-repository' instance_id: 'my-basic-instance' - deletion_policy: 'PREVENT' + prevent_destroy: 'true' test_vars_overrides: - 'deletion_policy': '"DELETE"' + 'prevent_destroy': 'false' oics_vars_overrides: - 'deletion_policy': '"DELETE"' - ignore_read_extra: - - 'deletion_policy' + 'prevent_destroy': 'false' - name: 'secure_source_manager_branch_rule_with_fields' primary_resource_id: 'default' vars: branch_rule_id: 'my-initial-branchrule' repository_id: 'my-initial-repository' instance_id: 'my-initial-instance' - deletion_policy: 'PREVENT' + prevent_destroy: 'true' test_vars_overrides: - 'deletion_policy': '"DELETE"' + 'prevent_destroy': 'false' oics_vars_overrides: - 'deletion_policy': '"DELETE"' - ignore_read_extra: - - 'deletion_policy' + 'prevent_destroy': 'false' parameters: - name: 'branch_rule_id' type: String diff --git a/mmv1/products/securesourcemanager/Instance.yaml b/mmv1/products/securesourcemanager/Instance.yaml index 8a1531efa591..c49835376553 100644 --- a/mmv1/products/securesourcemanager/Instance.yaml +++ b/mmv1/products/securesourcemanager/Instance.yaml @@ -52,36 +52,33 @@ iam_policy: - 'projects/{{project}}/locations/{{location}}/instances/{{instance_id}}' - '{{instance_id}}' custom_code: - pre_delete: 'templates/terraform/pre_delete/securesourcemanager_deletion_policy.go.tmpl' examples: - name: 'secure_source_manager_instance_basic' primary_resource_id: 'default' primary_resource_name: 'fmt.Sprintf("tf-test-my-instance%s", context["random_suffix"])' vars: instance_id: 'my-instance' - deletion_policy: 'PREVENT' + prevent_destroy: 'true' test_vars_overrides: - 'deletion_policy': '"DELETE"' + 'prevent_destroy': 'false' oics_vars_overrides: - 'deletion_policy': '"DELETE"' + 'prevent_destroy': 'false' ignore_read_extra: - 'update_time' - - 'deletion_policy' - name: 'secure_source_manager_instance_cmek' primary_resource_id: 'default' primary_resource_name: 'fmt.Sprintf("tf-test-my-instance%s", context["random_suffix"])' vars: instance_id: 'my-instance' kms_key_name: 'my-key' - deletion_policy: 'PREVENT' + prevent_destroy: 'true' test_vars_overrides: - 'deletion_policy': '"DELETE"' + 'prevent_destroy': 'false' 'kms_key_name': 'acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-bootstrap-secure-source-manager-key1").CryptoKey.Name' oics_vars_overrides: - 'deletion_policy': '"DELETE"' + 'prevent_destroy': 'false' ignore_read_extra: - 'update_time' - - 'deletion_policy' - name: 'secure_source_manager_instance_private' primary_resource_id: 'default' primary_resource_name: 'fmt.Sprintf("tf-test-my-instance%s", context["random_suffix"])' @@ -89,15 +86,14 @@ examples: instance_id: 'my-instance' ca_pool_id: 'ca-pool' root_ca_id: 'root-ca' - deletion_policy: 'PREVENT' + prevent_destroy: 'true' test_vars_overrides: - 'deletion_policy': '"DELETE"' + 'prevent_destroy': 'false' oics_vars_overrides: - 'deletion_policy': '"DELETE"' + 'prevent_destroy': 'false' external_providers: ["time"] ignore_read_extra: - 'update_time' - - 'deletion_policy' - name: 'secure_source_manager_instance_private_psc_backend' primary_resource_id: 'default' primary_resource_name: 'fmt.Sprintf("tf-test-my-instance%s", context["random_suffix"])' @@ -113,15 +109,14 @@ examples: instance_id: 'my-instance' ca_pool_id: 'ca-pool' root_ca_id: 'root-ca' - deletion_policy: 'PREVENT' + prevent_destroy: 'true' test_vars_overrides: - 'deletion_policy': '"DELETE"' + 'prevent_destroy': 'false' oics_vars_overrides: - 'deletion_policy': '"DELETE"' + 'prevent_destroy': 'false' external_providers: ["time"] ignore_read_extra: - 'update_time' - - 'deletion_policy' - name: 'secure_source_manager_instance_private_psc_endpoint' primary_resource_id: 'default' primary_resource_name: 'fmt.Sprintf("tf-test-my-instance%s", context["random_suffix"])' @@ -134,28 +129,26 @@ examples: instance_id: 'my-instance' ca_pool_id: 'ca-pool' root_ca_id: 'root-ca' - deletion_policy: 'PREVENT' + prevent_destroy: 'true' test_vars_overrides: - 'deletion_policy': '"DELETE"' + 'prevent_destroy': 'false' oics_vars_overrides: - 'deletion_policy': '"DELETE"' + 'prevent_destroy': 'false' external_providers: ["time"] ignore_read_extra: - 'update_time' - - 'deletion_policy' - name: 'secure_source_manager_instance_workforce_identity_federation' primary_resource_id: 'default' primary_resource_name: 'fmt.Sprintf("tf-test-my-instance%s", context["random_suffix"])' vars: instance_id: 'my-instance' - deletion_policy: 'PREVENT' + prevent_destroy: 'true' test_vars_overrides: - 'deletion_policy': '"DELETE"' + 'prevent_destroy': 'false' oics_vars_overrides: - 'deletion_policy': '"DELETE"' + 'prevent_destroy': 'false' ignore_read_extra: - 'update_time' - - 'deletion_policy' parameters: - name: 'location' type: String @@ -171,19 +164,6 @@ parameters: url_param_only: true required: true immutable: true -virtual_fields: - - name: 'deletion_policy' - type: String - description: | - The deletion policy for the instance. Setting `ABANDON` allows the resource - to be abandoned, rather than deleted. Setting `DELETE` deletes the resource - and all its contents. Setting `PREVENT` prevents the resource from accidental - deletion by erroring out during plan. - Default is `DELETE`. Possible values are: - * DELETE - * PREVENT - * ABANDON - default_value: 'PREVENT' properties: - name: 'name' type: String @@ -268,6 +248,7 @@ properties: type: String description: | CA pool resource, resource must in the format of `projects/{project}/locations/{location}/caPools/{ca_pool}`. + required: true immutable: true - name: 'httpServiceAttachment' type: String diff --git a/mmv1/products/securesourcemanager/Repository.yaml b/mmv1/products/securesourcemanager/Repository.yaml index 49b481949da9..599baf8e11d1 100644 --- a/mmv1/products/securesourcemanager/Repository.yaml +++ b/mmv1/products/securesourcemanager/Repository.yaml @@ -21,8 +21,7 @@ references: docs: base_url: 'projects/{{project}}/locations/{{location}}/repositories?repository_id={{repository_id}}' self_link: 'projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}' -update_verb: 'PATCH' -update_mask: true +immutable: true import_format: - 'projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}' - '{{repository_id}}' @@ -32,7 +31,7 @@ timeouts: delete_minutes: 20 autogen_async: true async: - actions: ['create', 'update', 'delete'] + actions: ['create', 'delete'] type: 'OpAsync' operation: base_url: '{{op_id}}' @@ -47,7 +46,6 @@ iam_policy: - 'projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}' - '{{repository_id}}' custom_code: - pre_delete: 'templates/terraform/pre_delete/securesourcemanager_deletion_policy.go.tmpl' examples: - name: 'secure_source_manager_repository_basic' primary_resource_id: 'default' @@ -55,54 +53,35 @@ examples: vars: repository_id: 'my-repository' instance_id: 'my-instance' - deletion_policy: 'PREVENT' + prevent_destroy: 'true' test_vars_overrides: - 'deletion_policy': '"DELETE"' + 'prevent_destroy': 'false' oics_vars_overrides: - 'deletion_policy': '"DELETE"' - ignore_read_extra: - - 'deletion_policy' + 'prevent_destroy': 'false' - name: 'secure_source_manager_repository_initial_config' primary_resource_id: 'default' primary_resource_name: 'fmt.Sprintf("tf-test-my-repository%s", context["random_suffix"])' vars: repository_id: 'my-repository' instance_id: 'my-instance' - deletion_policy: 'PREVENT' + prevent_destroy: 'true' test_vars_overrides: - 'deletion_policy': '"DELETE"' + 'prevent_destroy': 'false' oics_vars_overrides: - 'deletion_policy': '"DELETE"' - ignore_read_extra: - - 'deletion_policy' + 'prevent_destroy': 'false' parameters: - name: 'location' type: String - immutable: true description: | The location for the Repository. url_param_only: true required: true - name: 'repository_id' type: String - immutable: true description: | The ID for the Repository. url_param_only: true required: true -virtual_fields: - - name: 'deletion_policy' - type: String - description: | - The deletion policy for the repository. Setting `ABANDON` allows the resource - to be abandoned, rather than deleted. Setting `DELETE` deletes the resource - and all its contents. Setting `PREVENT` prevents the resource from accidental deletion - by erroring out during plan. - Default is `DELETE`. Possible values are: - * DELETE - * PREVENT - * ABANDON - default_value: 'PREVENT' properties: - name: 'name' type: String @@ -118,7 +97,6 @@ properties: description: | The name of the instance in which the repository is hosted. required: true - immutable: true diff_suppress_func: 'tpgresource.ProjectNumberDiffSuppress' - name: 'uid' type: String @@ -161,16 +139,13 @@ properties: description: | Initial configurations for the repository. ignore_read: true - immutable: true properties: - name: 'defaultBranch' type: String - immutable: true description: | Default branch name of the repository. - name: 'gitignores' type: Array - immutable: true description: | List of gitignore template names user can choose from. Valid values can be viewed at https://cloud.google.com/secure-source-manager/docs/reference/rest/v1/projects.locations.repositories#initialconfig. @@ -178,13 +153,11 @@ properties: type: String - name: 'license' type: String - immutable: true description: | License template name user can choose from. Valid values can be viewed at https://cloud.google.com/secure-source-manager/docs/reference/rest/v1/projects.locations.repositories#initialconfig. - name: 'readme' type: String - immutable: true description: | README template name. Valid values can be viewed at https://cloud.google.com/secure-source-manager/docs/reference/rest/v1/projects.locations.repositories#initialconfig. diff --git a/mmv1/products/securitycenter/OrganizationSccBigQueryExport.yaml b/mmv1/products/securitycenter/OrganizationSccBigQueryExport.yaml index 5a68d4bc8a59..8a01eb3fa977 100644 --- a/mmv1/products/securitycenter/OrganizationSccBigQueryExport.yaml +++ b/mmv1/products/securitycenter/OrganizationSccBigQueryExport.yaml @@ -45,7 +45,7 @@ examples: primary_resource_id: 'custom_big_query_export_config' vars: big_query_export_id: 'my-export' - dataset_id: 'my-dataset' + dataset: 'my-dataset' name: 'my-export' test_env_vars: org_id: 'ORG_ID' diff --git a/mmv1/products/securitycenterv2/OrganizationSccBigQueryExport.yaml b/mmv1/products/securitycenterv2/OrganizationSccBigQueryExport.yaml index 8e415261d0ca..79e11daa14c3 100644 --- a/mmv1/products/securitycenterv2/OrganizationSccBigQueryExport.yaml +++ b/mmv1/products/securitycenterv2/OrganizationSccBigQueryExport.yaml @@ -46,7 +46,7 @@ examples: primary_resource_id: 'custom_big_query_export_config' vars: big_query_export_id: 'my-export' - dataset_id: 'my-dataset' + dataset: 'my-dataset' name: 'my-export' test_env_vars: org_id: 'ORG_ID' diff --git a/mmv1/products/securitycenterv2/OrganizationSccBigQueryExports.yaml b/mmv1/products/securitycenterv2/OrganizationSccBigQueryExports.yaml index 2d3e382dec7b..ccfcbb0f6752 100644 --- a/mmv1/products/securitycenterv2/OrganizationSccBigQueryExports.yaml +++ b/mmv1/products/securitycenterv2/OrganizationSccBigQueryExports.yaml @@ -47,7 +47,7 @@ examples: primary_resource_id: 'custom_big_query_export_config' vars: big_query_export_id: 'my-export' - dataset_id: 'my-dataset' + dataset: 'my-dataset' name: 'my-export' test_env_vars: org_id: 'ORG_ID' diff --git a/mmv1/products/spanner/BackupSchedule.yaml b/mmv1/products/spanner/BackupSchedule.yaml index e220711ac218..67d5eb3112ee 100644 --- a/mmv1/products/spanner/BackupSchedule.yaml +++ b/mmv1/products/spanner/BackupSchedule.yaml @@ -177,14 +177,3 @@ properties: description: | The resource name of the Cloud KMS key to use for encryption. Format: 'projects/{project}/locations/{location}/keyRings/{keyRing}/cryptoKeys/{cryptoKey}' - conflicts: - - encryption_config.0.kms_key_names - - name: 'kmsKeyNames' - type: Array - description: | - Fully qualified name of the KMS keys to use to encrypt this database. The keys must exist - in the same locations as the Spanner Database. - item_type: - type: String - conflicts: - - encryption_config.0.kms_key_name diff --git a/mmv1/products/spanner/Database.yaml b/mmv1/products/spanner/Database.yaml index a55393817cbb..6d4a7edf357e 100644 --- a/mmv1/products/spanner/Database.yaml +++ b/mmv1/products/spanner/Database.yaml @@ -68,7 +68,6 @@ examples: database_name: 'my-database' ignore_read_extra: - 'deletion_protection' - - 'default_time_zone' # Randomness due to spanner instance skip_vcr: true virtual_fields: @@ -82,11 +81,6 @@ virtual_fields: When the field is set to false, deleting the database is allowed. type: Boolean default_value: true - - name: 'default_time_zone' - description: | - The default time zone for the database. The default time zone must be a valid name - from the tz database. Default value is "America/Los_angeles". - type: String parameters: - name: 'instance' type: ResourceRef diff --git a/mmv1/products/spanner/Instance.yaml b/mmv1/products/spanner/Instance.yaml index 5b453628b69b..63520a1f091c 100644 --- a/mmv1/products/spanner/Instance.yaml +++ b/mmv1/products/spanner/Instance.yaml @@ -108,32 +108,24 @@ properties: - name: 'num_nodes' type: Integer description: | - The number of nodes allocated to this instance. Exactly one of either num_nodes, processing_units or - autoscaling_config must be present in terraform except when instance_type = FREE_INSTANCE. + The number of nodes allocated to this instance. Exactly one of either node_count or processing_units + must be present in terraform. api_name: nodeCount default_from_api: true - at_least_one_of: + exactly_one_of: - 'num_nodes' - 'processing_units' - 'autoscaling_config' - - 'instance_type' - conflicts: - - 'processing_units' - - 'autoscaling_config' - name: 'processingUnits' type: Integer description: | - The number of processing units allocated to this instance. Exactly one of either num_nodes, - processing_units or autoscaling_config must be present in terraform except when instance_type = FREE_INSTANCE. + The number of processing units allocated to this instance. Exactly one of processing_units + or node_count must be present in terraform. default_from_api: true - at_least_one_of: + exactly_one_of: - 'num_nodes' - 'processing_units' - 'autoscaling_config' - - 'instance_type' - conflicts: - - 'num_nodes' - - 'autoscaling_config' - name: 'labels' type: KeyValueLabels description: | @@ -151,19 +143,13 @@ properties: type: NestedObject description: | The autoscaling configuration. Autoscaling is enabled if this field is set. - Exactly one of either num_nodes, processing_units or autoscaling_config must be - present in terraform except when instance_type = FREE_INSTANCE. When autoscaling is enabled, num_nodes and processing_units are treated as, OUTPUT_ONLY fields and reflect the current compute capacity allocated to the instance. - at_least_one_of: + exactly_one_of: - 'num_nodes' - 'processing_units' - 'autoscaling_config' - - 'instance_type' - conflicts: - - 'num_nodes' - - 'processing_units' properties: - name: 'autoscalingLimits' type: NestedObject @@ -275,21 +261,6 @@ properties: - 'STANDARD' - 'ENTERPRISE' - 'ENTERPRISE_PLUS' - - name: 'instanceType' - type: Enum - description: | - The type of this instance. The type can be used to distinguish product variants, that can affect aspects like: - usage restrictions, quotas and billing. Currently this is used to distinguish FREE_INSTANCE vs PROVISIONED instances. - When configured as FREE_INSTANCE, the field `edition` should not be configured. - default_from_api: true - at_least_one_of: - - 'num_nodes' - - 'processing_units' - - 'autoscaling_config' - - 'instance_type' - enum_values: - - 'PROVISIONED' - - 'FREE_INSTANCE' - name: 'defaultBackupScheduleType' type: Enum description: | diff --git a/mmv1/products/sql/Database.yaml b/mmv1/products/sql/Database.yaml index 7a74376617a1..e523d433662f 100644 --- a/mmv1/products/sql/Database.yaml +++ b/mmv1/products/sql/Database.yaml @@ -26,6 +26,7 @@ import_format: - '{{project}}/{{instance}}/{{name}}' - 'instances/{{instance}}/databases/{{name}}' - '{{instance}}/{{name}}' + - '{{name}}' timeouts: insert_minutes: 20 update_minutes: 20 diff --git a/mmv1/products/storage/Bucket.yaml b/mmv1/products/storage/Bucket.yaml index d7496ddbfa44..f4bef03a27bc 100644 --- a/mmv1/products/storage/Bucket.yaml +++ b/mmv1/products/storage/Bucket.yaml @@ -392,6 +392,8 @@ properties: - name: 'name' type: String description: 'The name of the bucket' + validation: + function: 'verify.ValidateGCSName' - name: 'owner' type: NestedObject description: | diff --git a/mmv1/products/storagecontrol/FolderIntelligenceConfig.yaml b/mmv1/products/storagecontrol/FolderIntelligenceConfig.yaml index ee9cb9e043a3..fd3d9d2f0d2c 100644 --- a/mmv1/products/storagecontrol/FolderIntelligenceConfig.yaml +++ b/mmv1/products/storagecontrol/FolderIntelligenceConfig.yaml @@ -49,10 +49,6 @@ import_format: # the resource. If false, that code is not generated. autogen_async: false -datasource: - generate: true - exclude_test: true - examples: - name: storage_control_folder_intelligence_config_basic primary_resource_id: example diff --git a/mmv1/products/storagecontrol/OrganizationIntelligenceConfig.yaml b/mmv1/products/storagecontrol/OrganizationIntelligenceConfig.yaml index 547f726d0a54..5f4374dc969f 100644 --- a/mmv1/products/storagecontrol/OrganizationIntelligenceConfig.yaml +++ b/mmv1/products/storagecontrol/OrganizationIntelligenceConfig.yaml @@ -49,10 +49,6 @@ import_format: # the resource. If false, that code is not generated. autogen_async: false -datasource: - generate: true - exclude_test: true - examples: - name: storage_control_organization_intelligence_config_basic primary_resource_id: example diff --git a/mmv1/products/storagecontrol/ProjectIntelligenceConfig.yaml b/mmv1/products/storagecontrol/ProjectIntelligenceConfig.yaml index 60962fef6c4d..e5eb0c840b33 100644 --- a/mmv1/products/storagecontrol/ProjectIntelligenceConfig.yaml +++ b/mmv1/products/storagecontrol/ProjectIntelligenceConfig.yaml @@ -50,10 +50,6 @@ import_format: # the resource. If false, that code is not generated. autogen_async: false -datasource: - generate: true - exclude_test: true - examples: - name: storage_control_project_intelligence_config_basic primary_resource_id: example diff --git a/mmv1/products/storageinsights/DatasetConfig.yaml b/mmv1/products/storageinsights/DatasetConfig.yaml deleted file mode 100644 index 382395fc32dd..000000000000 --- a/mmv1/products/storageinsights/DatasetConfig.yaml +++ /dev/null @@ -1,290 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'DatasetConfig' -description: | - Represents a Storage Insights DatasetConfig. -references: - guides: - 'Official Documentation': 'https://cloud.google.com/storage/docs/insights/datasets' - api: 'https://cloud.google.com/storage/docs/insights/reference/rest/v1/projects.locations.datasetConfigs' - - -base_url: 'projects/{{project}}/locations/{{location}}/datasetConfigs' -self_link: 'projects/{{project}}/locations/{{location}}/datasetConfigs/{{dataset_config_id}}' -create_url: 'projects/{{project}}/locations/{{location}}/datasetConfigs?datasetConfigId={{dataset_config_id}}' - -custom_code: - post_create: templates/terraform/post_create/storage_insights_dataset_config.go.tmpl - pre_update: templates/terraform/pre_update/storage_insights_dataset_config.go.tmpl - -update_verb: 'PATCH' - -# Constructing updateMask in pre_update due to API exceptions. -update_mask: false - -# If true, code for handling long-running operations is generated along with -# the resource. If false, that code is not generated. -# write operations of dataset config is an LRO. -autogen_async: true -async: !ruby/object:Api::OpAsync - operation: !ruby/object:Api::OpAsync::Operation - base_url: '{{op_id}}' - -import_format: - - 'projects/{{project}}/locations/{{location}}/datasetConfigs/{{dataset_config_id}}' - -examples: - - name: 'storage_insights_dataset_config_includes' - primary_resource_id: 'config_includes' - vars: - dataset_config_id: 'my_config_includes' - - name: 'storage_insights_dataset_config_excludes' - primary_resource_id: 'config_excludes' - vars: - dataset_config_id: 'my_config_excludes' - -virtual_fields: - - name: 'link_dataset' - type: Boolean - default_value: false - description: | - A boolean terraform only flag to link/unlink dataset. - - Setting this field to true while creation will automatically link the created dataset as an additional functionality. - -> **Note** A dataset config resource can only be destroyed once it is unlinked, - so users must set this field to false to unlink the dataset and destroy the dataset config resource. - -parameters: - - name: 'location' - type: String - required: true - immutable: true - url_param_only: true - description: | - The location of the DatasetConfig. - - name: 'datasetConfigId' - type: String - required: true - immutable: true - url_param_only: true - description: | - The user-defined ID of the DatasetConfig - -properties: - - name: 'name' - type: String - description: | - The full canonical resource name of the DatasetConfig (e.g., projects/P/locations/L/datasetConfigs/ID). - output: true - - name: 'createTime' - type: String - description: | - The UTC time at which the DatasetConfig was created. This is auto-populated. - output: true - - name: 'updateTime' - type: String - description: | - The UTC time at which the DatasetConfig was updated. This is auto-populated. - output: true - - name: 'uid' - type: String - description: | - System generated unique identifier for the resource. - output: true - - name: 'organizationNumber' - type: String - description: | - Organization resource ID that the source projects should belong to. - Projects that do not belong to the provided organization are not considered when creating the dataset. - default_from_api: true - immutable: true - - name: 'includeNewlyCreatedBuckets' - type: Boolean - description: | - If set to true, the request includes all the newly created buckets in the dataset that meet the inclusion and exclusion rules. - - name: 'retentionPeriodDays' - type: Integer - description: | - Number of days of history that must be retained. - required: true - - name: 'link' - type: NestedObject - description: | - Details of the linked DatasetConfig. - output: true - properties: - - name: 'dataset' - type: String - output: true - description: | - Dataset name for the linked DatasetConfig. - - name: 'linked' - type: Boolean - output: true - description: | - State of the linked DatasetConfig. - - name: 'identity' - type: NestedObject - description: | - Identity used by DatasetConfig. - immutable: true - required: true - properties: - - name: 'name' - type: String - output: true - description: | - Name of the identity. - - name: 'type' - type: Enum - required: true - description: | - Type of identity to use for the DatasetConfig. - enum_values: - - 'IDENTITY_TYPE_PER_CONFIG' - - 'IDENTITY_TYPE_PER_PROJECT' - - name: 'datasetConfigState' - type: Enum - description: | - State of the DatasetConfig. - output: true - enum_values: - - 'CONFIG_STATE_UNSPECIFIED' - - 'CONFIG_STATE_ACTIVE' - - 'CONFIG_STATE_VERIFICATION_IN_PROGRESS' - - 'CONFIG_STATE_CREATED' - - 'CONFIG_STATE_PROCESSING' - - name: 'description' - type: String - description: | - An optional user-provided description for the dataset configuration with a maximum length of 256 characters. - - name: 'sourceProjects' - type: NestedObject - description: | - Defines the options for providing source projects for the DatasetConfig. - properties: - - name: 'projectNumbers' - type: Array - description: | - The list of project numbers to include in the DatasetConfig. - item_type: - type: String - exactly_one_of: - - 'source_projects' - - 'source_folders' - - 'organization_scope' - - name: 'sourceFolders' - type: NestedObject - description: | - Defines the options for providing source folders for the DatasetConfig. - properties: - - name: 'folderNumbers' - type: Array - description: | - The list of folder numbers to include in the DatasetConfig. - item_type: - type: String - exactly_one_of: - - 'source_projects' - - 'source_folders' - - 'organization_scope' - - name: 'organizationScope' - type: Boolean - description: | - Defines the options for providing a source organization for the DatasetConfig. - exactly_one_of: - - 'source_projects' - - 'source_folders' - - 'organization_scope' - - name: 'includeCloudStorageLocations' - type: NestedObject - description: | - Defines the options for including cloud storage locations for the DatasetConfig. - properties: - - name: locations - type: Array - required: true - description: | - The list of cloud storage locations to include in the DatasetConfig. - item_type: - type: String - conflicts: - - 'exclude_cloud_storage_locations' - - name: 'excludeCloudStorageLocations' - type: NestedObject - description: | - Defines the options for excluding cloud storage locations for the DatasetConfig. - properties: - - name: locations - type: Array - required: true - description: | - The list of cloud storage locations to exclude in the DatasetConfig. - item_type: - type: String - conflicts: - - 'include_cloud_storage_locations' - - name: 'includeCloudStorageBuckets' - type: NestedObject - description: | - Defines the options for including cloud storage buckets for the DatasetConfig. - properties: - - name: cloudStorageBuckets - type: Array - required: true - description: | - The list of cloud storage buckets/bucket prefix regexes to include in the DatasetConfig. - item_type: - type: NestedObject - properties: - - name: bucketName - type: String - description: | - The list of cloud storage bucket names to include in the DatasetConfig. - Exactly one of the bucket_name and bucket_prefix_regex should be specified. - - name: bucketPrefixRegex - type: String - description: | - The list of regex patterns for bucket names matching the regex. - Regex should follow the syntax specified in google/re2 on GitHub. - Exactly one of the bucket_name and bucket_prefix_regex should be specified. - conflicts: - - 'exclude_cloud_storage_buckets' - - name: 'excludeCloudStorageBuckets' - type: NestedObject - description: | - Defined the options for excluding cloud storage buckets for the DatasetConfig. - properties: - - name: cloudStorageBuckets - type: Array - required: true - description: | - The list of cloud storage buckets/bucket prefix regexes to exclude in the DatasetConfig. - item_type: - type: NestedObject - properties: - - name: bucketName - type: String - description: | - The list of cloud storage bucket names to exclude in the DatasetConfig. - Exactly one of the bucket_name and bucket_prefix_regex should be specified. - - name: bucketPrefixRegex - type: String - description: | - The list of regex patterns for bucket names matching the regex. - Regex should follow the syntax specified in google/re2 on GitHub. - Exactly one of the bucket_name and bucket_prefix_regex should be specified. - conflicts: - - 'include_cloud_storage_buckets' diff --git a/mmv1/products/storagetransfer/AgentPool.yaml b/mmv1/products/storagetransfer/AgentPool.yaml index acb915902484..68d501a2ce90 100644 --- a/mmv1/products/storagetransfer/AgentPool.yaml +++ b/mmv1/products/storagetransfer/AgentPool.yaml @@ -14,7 +14,6 @@ --- name: 'AgentPool' api_resource_type_kind: agentPools -collection_url_key: agentPools description: 'Represents an On-Premises Agent pool.' references: guides: diff --git a/mmv1/products/tpu/Node.yaml b/mmv1/products/tpu/Node.yaml new file mode 100644 index 000000000000..6950ed83085a --- /dev/null +++ b/mmv1/products/tpu/Node.yaml @@ -0,0 +1,182 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'Node' +description: | + A Cloud TPU instance. +references: + guides: + 'Official Documentation': 'https://cloud.google.com/tpu/docs/' + api: 'https://cloud.google.com/tpu/docs/reference/rest/v1/projects.locations.nodes' +docs: +base_url: 'projects/{{project}}/locations/{{zone}}/nodes' +self_link: 'projects/{{project}}/locations/{{zone}}/nodes/{{name}}' +create_url: 'projects/{{project}}/locations/{{zone}}/nodes?nodeId={{name}}' +immutable: true +timeouts: + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 +autogen_async: true +async: + actions: ['create', 'delete', 'update'] + type: 'OpAsync' + operation: + base_url: '{{op_id}}' + result: + resource_inside_response: true +custom_code: + constants: 'templates/terraform/constants/tpu_node.tmpl' +custom_diff: + - 'tpuNodeCustomizeDiff' +sweeper: + url_substitutions: + - zone: "us-central1-b" +examples: + - name: 'tpu_node_basic' + primary_resource_id: 'tpu' + vars: + node_name: 'test-tpu' + - name: 'tpu_node_full' + primary_resource_id: 'tpu' + vars: + node_name: 'test-tpu' + global_address_name: 'my-global-address' + network_name: 'tpu-node-network' + exclude_test: true + - name: 'tpu_node_full_test' + primary_resource_id: 'tpu' + vars: + node_name: 'test-tpu' + network_name: 'tpu-node-network' + test_vars_overrides: + 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "vpc-network-1")' + exclude_docs: true +parameters: + # TODO: resourceref? + - name: 'zone' + type: String + description: | + The GCP location for the TPU. If it is not provided, the provider zone is used. + url_param_only: true + immutable: true + default_from_api: true +properties: + - name: 'name' + type: String + description: | + The immutable name of the TPU. + required: true + immutable: true + custom_flatten: 'templates/terraform/custom_flatten/name_from_self_link.tmpl' + - name: 'description' + type: String + description: | + The user-supplied description of the TPU. Maximum of 512 characters. + immutable: true + - name: 'acceleratorType' + type: String + description: | + The type of hardware accelerators associated with this node. + required: true + immutable: true + - name: 'tensorflowVersion' + type: String + description: | + The version of Tensorflow running in the Node. + required: true + update_url: 'projects/{{project}}/locations/{{zone}}/nodes/{{name}}:reimage' + update_verb: 'POST' + - name: 'network' + type: String + description: | + The name of a network to peer the TPU node to. It must be a + preexisting Compute Engine network inside of the project on which + this API has been activated. If none is provided, "default" will be + used. + immutable: true + default_from_api: true + diff_suppress_func: 'tpgresource.CompareSelfLinkOrResourceName' + - name: 'cidrBlock' + type: String + description: | + The CIDR block that the TPU node will use when selecting an IP + address. This CIDR block must be a /29 block; the Compute Engine + networks API forbids a smaller block, and using a larger block would + be wasteful (a node can only consume one IP address). + + Errors will occur if the CIDR block has already been used for a + currently existing TPU node, the CIDR block conflicts with any + subnetworks in the user's provided network, or the provided network + is peered with another network that is using that CIDR block. + immutable: true + default_from_api: true + conflicts: + - use_service_networking + - name: 'serviceAccount' + type: String + description: | + The service account used to run the tensor flow services within the + node. To share resources, including Google Cloud Storage data, with + the Tensorflow job running in the Node, this account must have + permissions to that data. + output: true + - name: 'useServiceNetworking' + type: Boolean + description: | + Whether the VPC peering for the node is set up through Service Networking API. + The VPC Peering should be set up before provisioning the node. If this field is set, + cidr_block field should not be specified. If the network that you want to peer the + TPU Node to is a Shared VPC network, the node must be created with this this field enabled. + immutable: true + conflicts: + - cidr_block + default_value: false + - name: 'schedulingConfig' + type: NestedObject + description: | + Sets the scheduling options for this TPU instance. + immutable: true + diff_suppress_func: 'compareTpuNodeSchedulingConfig' + properties: + - name: 'preemptible' + type: Boolean + description: | + Defines whether the TPU instance is preemptible. + required: true + diff_suppress_func: 'compareTpuNodeSchedulingConfig' + - name: 'networkEndpoints' + type: Array + description: | + The network endpoints where TPU workers can be accessed and sent work. + It is recommended that Tensorflow clients of the node first reach out + to the first (index 0) entry. + output: true + item_type: + type: NestedObject + properties: + - name: 'ipAddress' + type: String + description: | + The IP address of this network endpoint. + output: true + - name: 'port' + type: Integer + description: | + The port of this network endpoint. + output: true + - name: 'labels' + type: KeyValueLabels + description: Resource labels to represent user provided metadata. + immutable: true diff --git a/mmv1/products/resourcemanager3/product.yaml b/mmv1/products/tpu/product.yaml similarity index 80% rename from mmv1/products/resourcemanager3/product.yaml rename to mmv1/products/tpu/product.yaml index b5d3ffad64f5..a9302af0cd89 100644 --- a/mmv1/products/resourcemanager3/product.yaml +++ b/mmv1/products/tpu/product.yaml @@ -12,15 +12,16 @@ # limitations under the License. --- -name: 'ResourceManager3' -legacy_name: 'resource_manager' -display_name: 'Resource Manager' +name: 'TPU' +display_name: 'Cloud TPU' versions: - - name: 'beta' - base_url: 'https://cloudresourcemanager.googleapis.com/v3/' + - name: 'ga' + base_url: 'https://tpu.googleapis.com/v1/' scopes: - 'https://www.googleapis.com/auth/cloud-platform' async: type: "OpAsync" operation: base_url: '{{op_id}}' + result: + resource_inside_response: true diff --git a/mmv1/products/vertexai/Endpoint.yaml b/mmv1/products/vertexai/Endpoint.yaml index 7dd64bb13d28..dd9f87c030c2 100644 --- a/mmv1/products/vertexai/Endpoint.yaml +++ b/mmv1/products/vertexai/Endpoint.yaml @@ -21,7 +21,7 @@ description: references: guides: 'Official Documentation': 'https://cloud.google.com/vertex-ai/docs' - api: 'https://cloud.google.com/vertex-ai/docs/reference/rest/v1/projects.locations.endpoints' + api: 'https://cloud.google.com/vertex-ai/docs/reference/rest/v1beta1/projects.locations.endpoints' docs: base_url: 'projects/{{project}}/locations/{{location}}/endpoints' self_link: 'projects/{{project}}/locations/{{location}}/endpoints/{{name}}' @@ -139,7 +139,7 @@ properties: training](https://cloud.google.com/vertex-ai/docs/training/configure-compute#machine-types). For DeployedModel this field is optional, and the default value is `n1-standard-2`. For BatchPredictionJob or as part - of WorkerPoolSpec this field is required. TODO: + of WorkerPoolSpec this field is required. TODO(rsurowka): Try to better unify the required vs optional.' output: true - name: 'acceleratorType' @@ -458,7 +458,6 @@ properties: description: 'A list of Projects from which the forwarding rule will target the service attachment.' - name: 'enableSecurePrivateServiceConnect' - min_version: 'beta' type: Boolean description: 'If set to true, enable secure private service connect with IAM authorization. Otherwise, private service connect will be done without authorization. Note latency will be slightly increased if authorization is enabled.' diff --git a/mmv1/products/vertexai/EndpointWithModelGardenDeployment.yaml b/mmv1/products/vertexai/EndpointWithModelGardenDeployment.yaml deleted file mode 100644 index 09a05c90c9d9..000000000000 --- a/mmv1/products/vertexai/EndpointWithModelGardenDeployment.yaml +++ /dev/null @@ -1,1034 +0,0 @@ -# Copyright 2025 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: EndpointWithModelGardenDeployment -api_resource_type_kind: Endpoint -description: | - Create an Endpoint and deploy a Model Garden model to it. -references: - guides: - "Use models in Model Garden": "https://cloud.google.com/vertex-ai/generative-ai/docs/model-garden/use-models" - "Overview of Model Garden": "https://cloud.google.com/vertex-ai/generative-ai/docs/model-garden/explore-models" - "Overview of self-deployed models": "https://cloud.google.com/vertex-ai/generative-ai/docs/model-garden/self-deployed-models" - api: "https://cloud.google.com/vertex-ai/docs/reference/rest/v1/projects.locations/deploy" -docs: -id_format: "projects/{{project}}/locations/{{location}}/endpoints/{{endpoint}}" -base_url: "projects/{{project}}/locations/{{location}}:deploy" -self_link: "projects/{{project}}/locations/{{location}}/endpoints/{{endpoint}}" -create_url: "projects/{{project}}/locations/{{location}}:deploy" -immutable: true -exclude_read: true -exclude_import: true # the resource does not support import -timeouts: - insert_minutes: 180 - delete_minutes: 20 -autogen_status: RW5kcG9pbnRXaXRoTW9kZWxHYXJkZW5EZXBsb3ltZW50 -async: - actions: ["create"] - type: "OpAsync" - operation: - timeouts: - insert_minutes: 180 - base_url: "{{op_id}}" - result: - resource_inside_response: true -custom_code: - post_create: "templates/terraform/post_create/resource_vertexai_endpoint_with_model_garden_deployment.go.tmpl" - custom_delete: "templates/terraform/custom_delete/resource_vertexai_endpoint_with_model_garden_deployment.go.tmpl" -examples: - - name: "vertex_ai_deploy_basic" - primary_resource_id: "deploy" - vars: - project: "vertex-ai" - publisher_model_name: "publisher_model_name" - ignore_read_extra: - - "project" - exclude_test: true # handwritten test required since resource does not support import - - name: "vertex_ai_deploy_huggingface_model" - primary_resource_id: "deploy" - vars: - project: "vertex-ai" - publisher_model_name: "publisher_model_name" - ignore_read_extra: - - "project" - exclude_test: true # handwritten test required since resource does not support import - - name: "vertex_ai_deploy_with_configs" - primary_resource_id: "deploy" - vars: - project: "vertex-ai" - publisher_model_name: "publisher_model_name" - ignore_read_extra: - - "project" - exclude_test: true # handwritten test required since resource does not support import - - name: "vertex_ai_deploy_multiple_models_in_parallel" - primary_resource_id: "deploy" - vars: - project: "vertex-ai" - publisher_model_name: "publisher_model_name" - ignore_read_extra: - - "project" - exclude_test: true # handwritten test required since resource does not support import - - name: "vertex_ai_deploy_multiple_models_in_sequence" - primary_resource_id: "deploy" - vars: - project: "vertex-ai" - publisher_model_name: "publisher_model_name" - ignore_read_extra: - - "project" - exclude_test: true # handwritten test required since resource does not support import -parameters: - - name: location - type: String - description: - Resource ID segment making up resource `location`. It identifies the - resource within its parent collection as described in https://google.aip.dev/122. - immutable: true - url_param_only: true - required: true -properties: - - name: endpoint - type: String - description: - Resource ID segment making up resource `endpoint`. It identifies the - resource within its parent collection as described in https://google.aip.dev/122. - url_param_only: true - output: true - - name: deployedModelId - type: String - description: | - Output only. The unique numeric ID that Vertex AI assigns to the model at the time it is deployed to the endpoint. - It is required to undeploy the model from the endpoint during resource deletion as described in - https://cloud.google.com/vertex-ai/docs/reference/rest/v1/projects.locations.endpoints/undeployModel. - output: true - - name: deployedModelDisplayName - type: String - description: | - Output only. The display name assigned to the model deployed to the endpoint. - This is not required to delete the resource but is used for debug logging. - output: true - - name: publisherModelName - type: String - description: |- - The Model Garden model to deploy. - Format: - `publishers/{publisher}/models/{publisher_model}@{version_id}`, or - `publishers/hf-{hugging-face-author}/models/{hugging-face-model-name}@001`. - exactly_one_of: - - "publisher_model_name" - - "hugging_face_model_id" - - name: huggingFaceModelId - type: String - description: |- - The Hugging Face model to deploy. - Format: Hugging Face model ID like `google/gemma-2-2b-it`. - exactly_one_of: - - "publisher_model_name" - - "hugging_face_model_id" - - name: modelConfig - type: NestedObject - description: The model config to use for the deployment. - properties: - - name: huggingFaceCacheEnabled - type: Boolean - description: |- - If true, the model will deploy with a cached version instead of directly - downloading the model artifacts from Hugging Face. This is suitable for - VPC-SC users with limited internet access. - - name: modelDisplayName - type: String - description: |- - The user-specified display name of the uploaded model. If not - set, a default name will be used. - - name: containerSpec - type: NestedObject - description: |- - Specification of a container for serving predictions. Some fields in this - message correspond to fields in the [Kubernetes Container v1 core - specification](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#container-v1-core). - properties: - - name: ports - type: Array - description: |- - List of ports to expose from the container. Vertex AI sends any - prediction requests that it receives to the first port on this list. Vertex - AI also sends - [liveness and health - checks](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#liveness) - to this port. - - If you do not specify this field, it defaults to following value: - - ```json - [ - { - "containerPort": 8080 - } - ] - ``` - - Vertex AI does not use ports other than the first one listed. This field - corresponds to the `ports` field of the Kubernetes Containers - [v1 core - API](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#container-v1-core). - immutable: true - item_type: - type: NestedObject - properties: - - name: containerPort - type: Integer - description: |- - The number of the port to expose on the pod's IP address. - Must be a valid port number, between 1 and 65535 inclusive. - - name: predictRoute - type: String - description: |- - HTTP path on the container to send prediction requests to. Vertex AI - forwards requests sent using - projects.locations.endpoints.predict to this - path on the container's IP address and port. Vertex AI then returns the - container's response in the API response. - - For example, if you set this field to `/foo`, then when Vertex AI - receives a prediction request, it forwards the request body in a POST - request to the `/foo` path on the port of your container specified by the - first value of this `ModelContainerSpec`'s - ports field. - - If you don't specify this field, it defaults to the following value when - you deploy this Model to an Endpoint:/v1/endpoints/ENDPOINT/deployedModels/DEPLOYED_MODEL:predict - The placeholders in this value are replaced as follows: - - * ENDPOINT: The last segment (following `endpoints/`)of the - Endpoint.name][] field of the Endpoint where this Model has been - deployed. (Vertex AI makes this value available to your container code - as the [`AIP_ENDPOINT_ID` environment - variable](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#aip-variables).) - - * DEPLOYED_MODEL: DeployedModel.id of the `DeployedModel`. - (Vertex AI makes this value available to your container code - as the [`AIP_DEPLOYED_MODEL_ID` environment - variable](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#aip-variables).) - immutable: true - - name: healthRoute - type: String - description: |- - HTTP path on the container to send health checks to. Vertex AI - intermittently sends GET requests to this path on the container's IP - address and port to check that the container is healthy. Read more about - [health - checks](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#health). - - For example, if you set this field to `/bar`, then Vertex AI - intermittently sends a GET request to the `/bar` path on the port of your - container specified by the first value of this `ModelContainerSpec`'s - ports field. - - If you don't specify this field, it defaults to the following value when - you deploy this Model to an Endpoint:/v1/endpoints/ENDPOINT/deployedModels/DEPLOYED_MODEL:predict - The placeholders in this value are replaced as follows: - - * ENDPOINT: The last segment (following `endpoints/`)of the - Endpoint.name][] field of the Endpoint where this Model has been - deployed. (Vertex AI makes this value available to your container code - as the [`AIP_ENDPOINT_ID` environment - variable](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#aip-variables).) - - * DEPLOYED_MODEL: DeployedModel.id of the `DeployedModel`. - (Vertex AI makes this value available to your container code as the - [`AIP_DEPLOYED_MODEL_ID` environment - variable](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#aip-variables).) - immutable: true - - name: deploymentTimeout - type: String - description: |- - Deployment timeout. - Limit for deployment timeout is 2 hours. - immutable: true - - name: startupProbe - type: NestedObject - description: |- - Probe describes a health check to be performed against a container to - determine whether it is alive or ready to receive traffic. - properties: - - name: exec - type: NestedObject - description: ExecAction specifies a command to execute. - properties: - - name: command - type: Array - description: |- - Command is the command line to execute inside the container, the working - directory for the command is root ('/') in the container's filesystem. - The command is simply exec'd, it is not run inside a shell, so - traditional shell instructions ('|', etc) won't work. To use a shell, you - need to explicitly call out to that shell. Exit status of 0 is treated as - live/healthy and non-zero is unhealthy. - item_type: - type: String - - name: httpGet - type: NestedObject - description: HttpGetAction describes an action based on HTTP Get requests. - properties: - - name: path - type: String - description: Path to access on the HTTP server. - - name: port - type: Integer - description: |- - Number of the port to access on the container. - Number must be in the range 1 to 65535. - - name: host - type: String - description: |- - Host name to connect to, defaults to the model serving container's IP. - You probably want to set "Host" in httpHeaders instead. - - name: scheme - type: String - description: |- - Scheme to use for connecting to the host. - Defaults to HTTP. Acceptable values are "HTTP" or "HTTPS". - - name: httpHeaders - type: Array - description: - Custom headers to set in the request. HTTP allows repeated - headers. - item_type: - type: NestedObject - properties: - - name: value - type: String - description: The header field value - - name: name - type: String - description: |- - The header field name. - This will be canonicalized upon output, so case-variant names will be - understood as the same header. - - name: grpc - type: NestedObject - description: GrpcAction checks the health of a container using a gRPC service. - properties: - - name: port - type: Integer - description: - Port number of the gRPC service. Number must be in the range - 1 to 65535. - - name: service - type: String - description: |- - Service is the name of the service to place in the gRPC - HealthCheckRequest. See - https://github.com/grpc/grpc/blob/master/doc/health-checking.md. - - If this is not specified, the default behavior is defined by gRPC. - - name: tcpSocket - type: NestedObject - description: |- - TcpSocketAction probes the health of a container by opening a TCP socket - connection. - properties: - - name: port - type: Integer - description: |- - Number of the port to access on the container. - Number must be in the range 1 to 65535. - - name: host - type: String - description: |- - Optional: Host name to connect to, defaults to the model serving - container's IP. - - name: timeoutSeconds - type: Integer - description: |- - Number of seconds after which the probe times out. Defaults to 1 second. - Minimum value is 1. Must be greater or equal to period_seconds. - - Maps to Kubernetes probe argument 'timeoutSeconds'. - - name: successThreshold - type: Integer - description: |- - Number of consecutive successes before the probe is considered successful. - Defaults to 1. Minimum value is 1. - - Maps to Kubernetes probe argument 'successThreshold'. - - name: initialDelaySeconds - type: Integer - description: |- - Number of seconds to wait before starting the probe. Defaults to 0. - Minimum value is 0. - - Maps to Kubernetes probe argument 'initialDelaySeconds'. - - name: periodSeconds - type: Integer - description: |- - How often (in seconds) to perform the probe. Default to 10 seconds. - Minimum value is 1. Must be less than timeout_seconds. - - Maps to Kubernetes probe argument 'periodSeconds'. - - name: failureThreshold - type: Integer - description: |- - Number of consecutive failures before the probe is considered failed. - Defaults to 3. Minimum value is 1. - - Maps to Kubernetes probe argument 'failureThreshold'. - - name: healthProbe - type: NestedObject - description: |- - Probe describes a health check to be performed against a container to - determine whether it is alive or ready to receive traffic. - properties: - - name: exec - type: NestedObject - description: ExecAction specifies a command to execute. - properties: - - name: command - type: Array - description: |- - Command is the command line to execute inside the container, the working - directory for the command is root ('/') in the container's filesystem. - The command is simply exec'd, it is not run inside a shell, so - traditional shell instructions ('|', etc) won't work. To use a shell, you - need to explicitly call out to that shell. Exit status of 0 is treated as - live/healthy and non-zero is unhealthy. - item_type: - type: String - - name: httpGet - type: NestedObject - description: HttpGetAction describes an action based on HTTP Get requests. - properties: - - name: path - type: String - description: Path to access on the HTTP server. - - name: port - type: Integer - description: |- - Number of the port to access on the container. - Number must be in the range 1 to 65535. - - name: host - type: String - description: |- - Host name to connect to, defaults to the model serving container's IP. - You probably want to set "Host" in httpHeaders instead. - - name: scheme - type: String - description: |- - Scheme to use for connecting to the host. - Defaults to HTTP. Acceptable values are "HTTP" or "HTTPS". - - name: httpHeaders - type: Array - description: - Custom headers to set in the request. HTTP allows repeated - headers. - item_type: - type: NestedObject - properties: - - name: name - type: String - description: |- - The header field name. - This will be canonicalized upon output, so case-variant names will be - understood as the same header. - - name: value - type: String - description: The header field value - - name: grpc - type: NestedObject - description: GrpcAction checks the health of a container using a gRPC service. - properties: - - name: port - type: Integer - description: - Port number of the gRPC service. Number must be in the range - 1 to 65535. - - name: service - type: String - description: |- - Service is the name of the service to place in the gRPC - HealthCheckRequest. See - https://github.com/grpc/grpc/blob/master/doc/health-checking.md. - - If this is not specified, the default behavior is defined by gRPC. - - name: tcpSocket - type: NestedObject - description: |- - TcpSocketAction probes the health of a container by opening a TCP socket - connection. - properties: - - name: port - type: Integer - description: |- - Number of the port to access on the container. - Number must be in the range 1 to 65535. - - name: host - type: String - description: |- - Optional: Host name to connect to, defaults to the model serving - container's IP. - - name: timeoutSeconds - type: Integer - description: |- - Number of seconds after which the probe times out. Defaults to 1 second. - Minimum value is 1. Must be greater or equal to period_seconds. - - Maps to Kubernetes probe argument 'timeoutSeconds'. - - name: successThreshold - type: Integer - description: |- - Number of consecutive successes before the probe is considered successful. - Defaults to 1. Minimum value is 1. - - Maps to Kubernetes probe argument 'successThreshold'. - - name: initialDelaySeconds - type: Integer - description: |- - Number of seconds to wait before starting the probe. Defaults to 0. - Minimum value is 0. - - Maps to Kubernetes probe argument 'initialDelaySeconds'. - - name: periodSeconds - type: Integer - description: |- - How often (in seconds) to perform the probe. Default to 10 seconds. - Minimum value is 1. Must be less than timeout_seconds. - - Maps to Kubernetes probe argument 'periodSeconds'. - - name: failureThreshold - type: Integer - description: |- - Number of consecutive failures before the probe is considered failed. - Defaults to 3. Minimum value is 1. - - Maps to Kubernetes probe argument 'failureThreshold'. - - name: imageUri - type: String - description: |- - URI of the Docker image to be used as the custom container for serving - predictions. This URI must identify an image in Artifact Registry or - Container Registry. Learn more about the [container publishing - requirements](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#publishing), - including permissions requirements for the Vertex AI Service Agent. - - The container image is ingested upon ModelService.UploadModel, stored - internally, and this original path is afterwards not used. - - To learn about the requirements for the Docker image itself, see - [Custom container - requirements](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#). - - You can use the URI to one of Vertex AI's [pre-built container images for - prediction](https://cloud.google.com/vertex-ai/docs/predictions/pre-built-containers) - in this field. - immutable: true - required: true - - name: command - type: Array - description: |- - Specifies the command that runs when the container starts. This overrides - the container's - [ENTRYPOINT](https://docs.docker.com/engine/reference/builder/#entrypoint). - Specify this field as an array of executable and arguments, similar to a - Docker `ENTRYPOINT`'s "exec" form, not its "shell" form. - - If you do not specify this field, then the container's `ENTRYPOINT` runs, - in conjunction with the args field or the - container's [`CMD`](https://docs.docker.com/engine/reference/builder/#cmd), - if either exists. If this field is not specified and the container does not - have an `ENTRYPOINT`, then refer to the Docker documentation about [how - `CMD` and `ENTRYPOINT` - interact](https://docs.docker.com/engine/reference/builder/#understand-how-cmd-and-entrypoint-interact). - - If you specify this field, then you can also specify the `args` field to - provide additional arguments for this command. However, if you specify this - field, then the container's `CMD` is ignored. See the - [Kubernetes documentation about how the - `command` and `args` fields interact with a container's `ENTRYPOINT` and - `CMD`](https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#notes). - - In this field, you can reference [environment variables set by Vertex - AI](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#aip-variables) - and environment variables set in the env field. - You cannot reference environment variables set in the Docker image. In - order for environment variables to be expanded, reference them by using the - following syntax:$(VARIABLE_NAME) - Note that this differs from Bash variable expansion, which does not use - parentheses. If a variable cannot be resolved, the reference in the input - string is used unchanged. To avoid variable expansion, you can escape this - syntax with `$$`; for example:$$(VARIABLE_NAME) - This field corresponds to the `command` field of the Kubernetes Containers - [v1 core - API](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#container-v1-core). - immutable: true - item_type: - type: String - - name: args - type: Array - description: |- - Specifies arguments for the command that runs when the container starts. - This overrides the container's - [`CMD`](https://docs.docker.com/engine/reference/builder/#cmd). Specify - this field as an array of executable and arguments, similar to a Docker - `CMD`'s "default parameters" form. - - If you don't specify this field but do specify the - command field, then the command from the - `command` field runs without any additional arguments. See the - [Kubernetes documentation about how the - `command` and `args` fields interact with a container's `ENTRYPOINT` and - `CMD`](https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#notes). - - If you don't specify this field and don't specify the `command` field, - then the container's - [`ENTRYPOINT`](https://docs.docker.com/engine/reference/builder/#cmd) and - `CMD` determine what runs based on their default behavior. See the Docker - documentation about [how `CMD` and `ENTRYPOINT` - interact](https://docs.docker.com/engine/reference/builder/#understand-how-cmd-and-entrypoint-interact). - - In this field, you can reference [environment variables - set by Vertex - AI](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#aip-variables) - and environment variables set in the env field. - You cannot reference environment variables set in the Docker image. In - order for environment variables to be expanded, reference them by using the - following syntax:$(VARIABLE_NAME) - Note that this differs from Bash variable expansion, which does not use - parentheses. If a variable cannot be resolved, the reference in the input - string is used unchanged. To avoid variable expansion, you can escape this - syntax with `$$`; for example:$$(VARIABLE_NAME) - This field corresponds to the `args` field of the Kubernetes Containers - [v1 core - API](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#container-v1-core). - immutable: true - item_type: - type: String - - name: grpcPorts - type: Array - description: |- - List of ports to expose from the container. Vertex AI sends gRPC - prediction requests that it receives to the first port on this list. Vertex - AI also sends liveness and health checks to this port. - - If you do not specify this field, gRPC requests to the container will be - disabled. - - Vertex AI does not use ports other than the first one listed. This field - corresponds to the `ports` field of the Kubernetes Containers v1 core API. - immutable: true - item_type: - type: NestedObject - properties: - - name: containerPort - type: Integer - description: |- - The number of the port to expose on the pod's IP address. - Must be a valid port number, between 1 and 65535 inclusive. - - name: sharedMemorySizeMb - type: String - description: |- - The amount of the VM memory to reserve as the shared memory for the model - in megabytes. - immutable: true - - name: livenessProbe - type: NestedObject - description: |- - Probe describes a health check to be performed against a container to - determine whether it is alive or ready to receive traffic. - properties: - - name: exec - type: NestedObject - description: ExecAction specifies a command to execute. - properties: - - name: command - type: Array - description: |- - Command is the command line to execute inside the container, the working - directory for the command is root ('/') in the container's filesystem. - The command is simply exec'd, it is not run inside a shell, so - traditional shell instructions ('|', etc) won't work. To use a shell, you - need to explicitly call out to that shell. Exit status of 0 is treated as - live/healthy and non-zero is unhealthy. - item_type: - type: String - - name: httpGet - type: NestedObject - description: HttpGetAction describes an action based on HTTP Get requests. - properties: - - name: path - type: String - description: Path to access on the HTTP server. - - name: port - type: Integer - description: |- - Number of the port to access on the container. - Number must be in the range 1 to 65535. - - name: host - type: String - description: |- - Host name to connect to, defaults to the model serving container's IP. - You probably want to set "Host" in httpHeaders instead. - - name: scheme - type: String - description: |- - Scheme to use for connecting to the host. - Defaults to HTTP. Acceptable values are "HTTP" or "HTTPS". - - name: httpHeaders - type: Array - description: - Custom headers to set in the request. HTTP allows repeated - headers. - item_type: - type: NestedObject - properties: - - name: name - type: String - description: |- - The header field name. - This will be canonicalized upon output, so case-variant names will be - understood as the same header. - - name: value - type: String - description: The header field value - - name: grpc - type: NestedObject - description: GrpcAction checks the health of a container using a gRPC service. - properties: - - name: service - type: String - description: |- - Service is the name of the service to place in the gRPC - HealthCheckRequest. See - https://github.com/grpc/grpc/blob/master/doc/health-checking.md. - - If this is not specified, the default behavior is defined by gRPC. - - name: port - type: Integer - description: - Port number of the gRPC service. Number must be in the range - 1 to 65535. - - name: tcpSocket - type: NestedObject - description: |- - TcpSocketAction probes the health of a container by opening a TCP socket - connection. - properties: - - name: port - type: Integer - description: |- - Number of the port to access on the container. - Number must be in the range 1 to 65535. - - name: host - type: String - description: |- - Optional: Host name to connect to, defaults to the model serving - container's IP. - - name: timeoutSeconds - type: Integer - description: |- - Number of seconds after which the probe times out. Defaults to 1 second. - Minimum value is 1. Must be greater or equal to period_seconds. - - Maps to Kubernetes probe argument 'timeoutSeconds'. - - name: successThreshold - type: Integer - description: |- - Number of consecutive successes before the probe is considered successful. - Defaults to 1. Minimum value is 1. - - Maps to Kubernetes probe argument 'successThreshold'. - - name: initialDelaySeconds - type: Integer - description: |- - Number of seconds to wait before starting the probe. Defaults to 0. - Minimum value is 0. - - Maps to Kubernetes probe argument 'initialDelaySeconds'. - - name: periodSeconds - type: Integer - description: |- - How often (in seconds) to perform the probe. Default to 10 seconds. - Minimum value is 1. Must be less than timeout_seconds. - - Maps to Kubernetes probe argument 'periodSeconds'. - - name: failureThreshold - type: Integer - description: |- - Number of consecutive failures before the probe is considered failed. - Defaults to 3. Minimum value is 1. - - Maps to Kubernetes probe argument 'failureThreshold'. - - name: env - type: Array - description: |- - List of environment variables to set in the container. After the container - starts running, code running in the container can read these environment - variables. - - Additionally, the command and - args fields can reference these variables. Later - entries in this list can also reference earlier entries. For example, the - following example sets the variable `VAR_2` to have the value `foo bar`: - - ```json - [ - { - "name": "VAR_1", - "value": "foo" - }, - { - "name": "VAR_2", - "value": "$(VAR_1) bar" - } - ] - ``` - - If you switch the order of the variables in the example, then the expansion - does not occur. - - This field corresponds to the `env` field of the Kubernetes Containers - [v1 core - API](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#container-v1-core). - immutable: true - item_type: - type: NestedObject - properties: - - name: name - type: String - description: Name of the environment variable. Must be a valid C identifier. - required: true - - name: value - type: String - description: |- - Variables that reference a $(VAR_NAME) are expanded - using the previous defined environment variables in the container and - any service environment variables. If a variable cannot be resolved, - the reference in the input string will be unchanged. The $(VAR_NAME) - syntax can be escaped with a double $$, ie: $$(VAR_NAME). Escaped - references will never be expanded, regardless of whether the variable - exists or not. - required: true - - name: acceptEula - type: Boolean - description: |- - Whether the user accepts the End User License Agreement (EULA) - for the model. - - name: huggingFaceAccessToken - type: String - description: |- - The Hugging Face read access token used to access the model - artifacts of gated models. - - name: endpointConfig - type: NestedObject - description: The endpoint config to use for the deployment. - properties: - - name: endpointDisplayName - type: String - description: |- - The user-specified display name of the endpoint. If not set, a - default name will be used. - - name: dedicatedEndpointEnabled - type: Boolean - description: |- - If true, the endpoint will be exposed through a dedicated - DNS [Endpoint.dedicated_endpoint_dns]. Your request to the dedicated DNS - will be isolated from other users' traffic and will have better - performance and reliability. Note: Once you enabled dedicated endpoint, - you won't be able to send request to the shared DNS - {region}-aiplatform.googleapis.com. The limitations will be removed soon. - - name: deployConfig - type: NestedObject - description: The deploy config to use for the deployment. - properties: - - name: systemLabels - type: KeyValuePairs - description: |- - System labels for Model Garden deployments. - These labels are managed by Google and for tracking purposes only. - - name: dedicatedResources - type: NestedObject - description: |- - A description of resources that are dedicated to a DeployedModel or - DeployedIndex, and that need a higher degree of manual configuration. - properties: - - name: machineSpec - type: NestedObject - description: Specification of a single machine. - required: true - properties: - - name: reservationAffinity - type: NestedObject - description: |- - A ReservationAffinity can be used to configure a Vertex AI resource (e.g., a - DeployedModel) to draw its Compute Engine resources from a Shared - Reservation, or exclusively from on-demand capacity. - properties: - - name: reservationAffinityType - type: String - description: |- - Specifies the reservation affinity type. - Possible values: - TYPE_UNSPECIFIED - NO_RESERVATION - ANY_RESERVATION - SPECIFIC_RESERVATION - required: true - - name: key - type: String - description: |- - Corresponds to the label key of a reservation resource. To target a - SPECIFIC_RESERVATION by name, use `compute.googleapis.com/reservation-name` - as the key and specify the name of your reservation as its value. - - name: values - type: Array - description: |- - Corresponds to the label values of a reservation resource. This must be the - full resource name of the reservation or reservation block. - item_type: - type: String - - name: machineType - type: String - description: |- - The type of the machine. - - See the [list of machine types supported for - prediction](https://cloud.google.com/vertex-ai/docs/predictions/configure-compute#machine-types) - - See the [list of machine types supported for custom - training](https://cloud.google.com/vertex-ai/docs/training/configure-compute#machine-types). - - For DeployedModel this field is optional, and the default - value is `n1-standard-2`. For BatchPredictionJob or as part of - WorkerPoolSpec this field is required. - immutable: true - - name: acceleratorType - type: String - description: |2- - - Possible values: - ACCELERATOR_TYPE_UNSPECIFIED - NVIDIA_TESLA_K80 - NVIDIA_TESLA_P100 - NVIDIA_TESLA_V100 - NVIDIA_TESLA_P4 - NVIDIA_TESLA_T4 - NVIDIA_TESLA_A100 - NVIDIA_A100_80GB - NVIDIA_L4 - NVIDIA_H100_80GB - NVIDIA_H100_MEGA_80GB - NVIDIA_H200_141GB - NVIDIA_B200 - TPU_V2 - TPU_V3 - TPU_V4_POD - TPU_V5_LITEPOD - - name: acceleratorCount - type: Integer - description: The number of accelerators to attach to the machine. - - name: tpuTopology - type: String - description: |- - The topology of the TPUs. Corresponds to the TPU topologies available from - GKE. (Example: tpu_topology: "2x2x1"). - immutable: true - - name: multihostGpuNodeCount - type: Integer - description: The number of nodes per replica for multihost GPU deployments. - immutable: true - - name: minReplicaCount - type: Integer - description: |- - The minimum number of machine replicas that will be always deployed on. - This value must be greater than or equal to 1. - - If traffic increases, it may dynamically be deployed onto more replicas, - and as traffic decreases, some of these extra replicas may be freed. - immutable: true - required: true - - name: maxReplicaCount - type: Integer - description: |- - The maximum number of replicas that may be deployed on when the traffic - against it increases. If the requested value is too large, the deployment - will error, but if deployment succeeds then the ability to scale to that - many replicas is guaranteed (barring service outages). If traffic increases - beyond what its replicas at maximum may handle, a portion of the traffic - will be dropped. If this value is not provided, will use - min_replica_count as the default value. - - The value of this field impacts the charge against Vertex CPU and GPU - quotas. Specifically, you will be charged for (max_replica_count * - number of cores in the selected machine type) and (max_replica_count * - number of GPUs per replica in the selected machine type). - immutable: true - - name: requiredReplicaCount - type: Integer - description: |- - Number of required available replicas for the deployment to succeed. - This field is only needed when partial deployment/mutation is - desired. If set, the deploy/mutate operation will succeed once - available_replica_count reaches required_replica_count, and the rest of - the replicas will be retried. If not set, the default - required_replica_count will be min_replica_count. - - name: autoscalingMetricSpecs - type: Array - description: |- - The metric specifications that overrides a resource - utilization metric (CPU utilization, accelerator's duty cycle, and so on) - target value (default to 60 if not set). At most one entry is allowed per - metric. - - If machine_spec.accelerator_count is - above 0, the autoscaling will be based on both CPU utilization and - accelerator's duty cycle metrics and scale up when either metrics exceeds - its target value while scale down if both metrics are under their target - value. The default target value is 60 for both metrics. - - If machine_spec.accelerator_count is - 0, the autoscaling will be based on CPU utilization metric only with - default target value 60 if not explicitly set. - - For example, in the case of Online Prediction, if you want to override - target CPU utilization to 80, you should set - autoscaling_metric_specs.metric_name - to `aiplatform.googleapis.com/prediction/online/cpu/utilization` and - autoscaling_metric_specs.target to `80`. - immutable: true - item_type: - type: NestedObject - properties: - - name: metricName - type: String - description: |- - The resource metric name. - Supported metrics: - - * For Online Prediction: - * `aiplatform.googleapis.com/prediction/online/accelerator/duty_cycle` - * `aiplatform.googleapis.com/prediction/online/cpu/utilization` - required: true - - name: target - type: Integer - description: |- - The target resource utilization in percentage (1% - 100%) for the given - metric; once the real usage deviates from the target by a certain - percentage, the machine replicas change. The default value is 60 - (representing 60%) if not provided. - - name: spot - type: Boolean - description: |- - If true, schedule the deployment workload on [spot - VMs](https://cloud.google.com/kubernetes-engine/docs/concepts/spot-vms). - - name: fastTryoutEnabled - type: Boolean - description: If true, enable the QMT fast tryout feature for this model if possible. diff --git a/mmv1/products/vertexai/Index.yaml b/mmv1/products/vertexai/Index.yaml index 7edadb67ef44..5ee3e4fa1170 100644 --- a/mmv1/products/vertexai/Index.yaml +++ b/mmv1/products/vertexai/Index.yaml @@ -84,10 +84,7 @@ properties: # https://cloud.google.com/vertex-ai/docs/matching-engine/configuring-indexes - name: 'metadata' type: NestedObject - description: |- - Additional information about the Index. - Although this field is not marked as required in the API specification, it is currently required when creating an Index and must be provided. - Attempts to create an Index without this field will result in an API error. + description: An additional information about the Index properties: - name: 'contentsDeltaUri' type: String @@ -109,7 +106,6 @@ properties: - name: 'config' type: NestedObject description: The configuration of the Matching Engine Index. - required: true immutable: true properties: - name: 'dimensions' @@ -154,7 +150,7 @@ properties: type: NestedObject description: The configuration with regard to the algorithms used for efficient - search. This field may be required based on your configuration. + search. properties: - name: 'treeAhConfig' type: NestedObject diff --git a/mmv1/products/vertexai/IndexEndpointDeployedIndex.yaml b/mmv1/products/vertexai/IndexEndpointDeployedIndex.yaml index 0018354b2e88..e2f4f9306576 100644 --- a/mmv1/products/vertexai/IndexEndpointDeployedIndex.yaml +++ b/mmv1/products/vertexai/IndexEndpointDeployedIndex.yaml @@ -108,11 +108,6 @@ parameters: immutable: true resource: 'IndexEndpoint' imports: 'name' - - name: 'region' - type: String - description: The region of the index endpoint deployment. eg us-central1 - url_param_only: true - immutable: true properties: - name: 'name' type: String diff --git a/mmv1/products/vertexai/RagEngineConfig.yaml b/mmv1/products/vertexai/RagEngineConfig.yaml deleted file mode 100644 index a6be4dbee80d..000000000000 --- a/mmv1/products/vertexai/RagEngineConfig.yaml +++ /dev/null @@ -1,110 +0,0 @@ -# Copyright 2024 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'RagEngineConfig' -description: | - Vertex AI RAG Engine lets you scale your RagManagedDb instance based on your usage and performance requirements using a choice of two tiers, and optionally, lets you delete your Vertex AI RAG Engine data using a third tier. The tier is a project-level setting that's available in the RagEngineConfig resource that impacts all RAG corpora using RagManagedDb. The following tiers are available in RagEngineConfig: Basic, Scaled and Unprovisioned. -references: - guides: - 'Official Documentation': 'https://cloud.google.com/vertex-ai/generative-ai/docs/rag-engine/understanding-ragmanageddb' - api: 'https://cloud.google.com/vertex-ai/generative-ai/docs/reference/rest/v1/RagEngineConfig' -docs: -id_format: 'projects/{{project}}/locations/{{region}}/ragEngineConfig' -base_url: 'projects/{{project}}/locations/{{region}}/ragEngineConfig' -self_link: 'projects/{{project}}/locations/{{region}}/ragEngineConfig' -create_url: 'projects/{{project}}/locations/{{region}}/ragEngineConfig' -# This is a singleton resource that is already created, so create -# is really an update, and therefore should be PATCHed. -create_verb: 'PATCH' -update_url: 'projects/{{project}}/locations/{{region}}/ragEngineConfig' -update_verb: 'PATCH' -delete_url: 'projects/{{project}}/locations/{{region}}/ragEngineConfig' -delete_verb: 'PATCH' -import_format: - - 'projects/{{project}}/locations/{{region}}/ragEngineConfig' -update_mask: false -timeouts: - insert_minutes: 20 - update_minutes: 20 - delete_minutes: 20 -autogen_async: true -async: - actions: ['create', 'update'] - type: 'OpAsync' - operation: - base_url: '{{op_id}}' - result: - resource_inside_response: true -custom_code: - custom_delete: "templates/terraform/custom_delete/vertex_ai_rag_engine_config.go.tmpl" -examples: - - name: 'vertex_ai_rag_engine_config_basic' - exclude_test: true - - name: 'vertex_ai_rag_engine_config_scaled' - exclude_test: true - - name: 'vertex_ai_rag_engine_config_unprovisioned' - exclude_test: true -parameters: - - name: 'region' - type: String - description: The region of the RagEngineConfig. eg us-central1 - url_param_only: true - immutable: true - default_from_api: true -properties: - - name: 'ragManagedDbConfig' - type: NestedObject - description: | - Required. The config of the RagManagedDb used by RagEngine. - required: true - properties: - - name: 'scaled' - type: NestedObject - exactly_one_of: - - 'rag_managed_db_config.0.scaled' - - 'rag_managed_db_config.0.basic' - - 'rag_managed_db_config.0.unprovisioned' - description: | - Scaled tier offers production grade performance along with autoscaling functionality. It is suitable for customers with large amounts of data or performance sensitive workloads. - allow_empty_object: true - send_empty_value: true - properties: [] - - name: 'basic' - type: NestedObject - exactly_one_of: - - 'rag_managed_db_config.0.scaled' - - 'rag_managed_db_config.0.basic' - - 'rag_managed_db_config.0.unprovisioned' - description: | - Basic tier is a cost-effective and low compute tier suitable for the following cases: Experimenting with RagManagedDb, Small data size, Latency insensitive workload, Only using RAG Engine with external vector DBs. - NOTE: This is the default tier if not explicitly chosen. - allow_empty_object: true - send_empty_value: true - properties: [] - - name: 'unprovisioned' - type: NestedObject - exactly_one_of: - - 'rag_managed_db_config.0.scaled' - - 'rag_managed_db_config.0.basic' - - 'rag_managed_db_config.0.unprovisioned' - description: | - Disables the RAG Engine service and deletes all your data held within this service. This will halt the billing of the service. - NOTE: Once deleted the data cannot be recovered. To start using RAG Engine again, you will need to update the tier by calling the UpdateRagEngineConfig API. - allow_empty_object: true - send_empty_value: true - properties: [] - - name: 'name' - type: String - description: The resource name of the Dataset. This value is set by Google. - output: true diff --git a/mmv1/products/vmwareengine/ExternalAddress.yaml b/mmv1/products/vmwareengine/ExternalAddress.yaml index 5e51a5143d24..ee3aa777699b 100644 --- a/mmv1/products/vmwareengine/ExternalAddress.yaml +++ b/mmv1/products/vmwareengine/ExternalAddress.yaml @@ -62,8 +62,6 @@ examples: private_cloud_id: 'sample-pc' management_cluster_id: 'sample-mgmt-cluster' network_policy_id: 'sample-np' - test_env_vars: - region: 'REGION' # update tests will take care of all CRUD tests. Parent PC creation is expensive and node reservation is required. exclude_test: true parameters: diff --git a/mmv1/products/vmwareengine/NetworkPeering.yaml b/mmv1/products/vmwareengine/NetworkPeering.yaml index 7cc42633e210..3279552a69ca 100644 --- a/mmv1/products/vmwareengine/NetworkPeering.yaml +++ b/mmv1/products/vmwareengine/NetworkPeering.yaml @@ -138,7 +138,6 @@ properties: - 'NETAPP_CLOUD_VOLUMES' - 'THIRD_PARTY_SERVICE' - 'DELL_POWERSCALE' - - 'GOOGLE_CLOUD_NETAPP_VOLUMES' - name: 'uid' type: String description: | diff --git a/mmv1/products/vmwareengine/PrivateCloud.yaml b/mmv1/products/vmwareengine/PrivateCloud.yaml index 904a527b8efa..e1239d5e1d02 100644 --- a/mmv1/products/vmwareengine/PrivateCloud.yaml +++ b/mmv1/products/vmwareengine/PrivateCloud.yaml @@ -26,7 +26,7 @@ delete_url: 'projects/{{project}}/locations/{{location}}/privateClouds/{{name}}' import_format: - 'projects/{{project}}/locations/{{location}}/privateClouds/{{name}}' timeouts: - insert_minutes: 360 + insert_minutes: 240 update_minutes: 190 delete_minutes: 150 autogen_async: true @@ -36,7 +36,7 @@ async: operation: base_url: '{{op_id}}' timeouts: - insert_minutes: 360 + insert_minutes: 240 update_minutes: 190 delete_minutes: 150 result: diff --git a/mmv1/products/vpcaccess/Connector.yaml b/mmv1/products/vpcaccess/Connector.yaml index 2ec3d20f6cc6..7a881c119d53 100644 --- a/mmv1/products/vpcaccess/Connector.yaml +++ b/mmv1/products/vpcaccess/Connector.yaml @@ -22,9 +22,7 @@ references: docs: base_url: 'projects/{{project}}/locations/{{region}}/connectors' create_url: 'projects/{{project}}/locations/{{region}}/connectors?connectorId={{name}}' -update_url: 'projects/{{project}}/locations/{{region}}/connectors/{{name}}' -update_verb: 'PATCH' -update_mask: true +immutable: true timeouts: insert_minutes: 20 update_minutes: 20 @@ -41,11 +39,6 @@ custom_code: encoder: 'templates/terraform/encoders/no_send_name.go.tmpl' decoder: 'templates/terraform/decoders/long_name_to_self_link.go.tmpl' post_create: 'templates/terraform/post_create/sleep.go.tmpl' - constants: 'templates/terraform/constants/vpc_access_connector.go.tmpl' - pre_update: 'templates/terraform/pre_update/vpc_access_connector_instances.go.tmpl' -custom_diff: - - 'customdiff.ForceNewIfChange("min_instances", isInstanceShrinkage)' - - 'customdiff.ForceNewIfChange("max_instances", isInstanceShrinkage)' examples: - name: 'vpc_access_connector' primary_resource_id: 'connector' @@ -76,7 +69,6 @@ properties: The name of the resource (Max 25 characters). required: true custom_flatten: 'templates/terraform/custom_flatten/name_from_self_link.tmpl' - immutable: true - name: 'network' type: String description: | @@ -88,14 +80,12 @@ properties: diff_suppress_func: 'tpgresource.CompareResourceNames' custom_flatten: 'templates/terraform/custom_flatten/name_from_self_link.tmpl' custom_expand: 'templates/terraform/custom_expand/resource_from_self_link.go.tmpl' - immutable: true - name: 'ipCidrRange' type: String description: | The range of internal addresses that follows RFC 4632 notation. Example: `10.132.0.0/28`. required_with: - 'network' - immutable: true - name: 'state' type: Enum description: | @@ -107,7 +97,6 @@ properties: - 'DELETING' - 'ERROR' - 'UPDATING' - immutable: true - name: 'machineType' type: String description: | @@ -124,12 +113,11 @@ properties: - min_instances validation: function: 'validation.IntBetween(200, 1000)' - immutable: true - name: 'minInstances' type: Integer description: | Minimum value of instances in autoscaling group underlying the connector. Value must be between 2 and 9, inclusive. Must be - lower than the value specified by max_instances. Required alongside `max_instances` if not using `min_throughput`/`max_throughput`. + lower than the value specified by max_instances. default_from_api: true conflicts: - min_throughput @@ -137,7 +125,7 @@ properties: type: Integer description: | Maximum value of instances in autoscaling group underlying the connector. Value must be between 3 and 10, inclusive. Must be - higher than the value specified by min_instances. Required alongside `min_instances` if not using `min_throughput`/`max_throughput`. + higher than the value specified by min_instances. default_from_api: true conflicts: - max_throughput @@ -152,7 +140,6 @@ properties: - max_instances validation: function: 'validation.IntBetween(200, 1000)' - immutable: true - name: 'selfLink' type: String description: | diff --git a/mmv1/products/workbench/Instance.yaml b/mmv1/products/workbench/Instance.yaml index bfdf2ff10181..0f4693a69381 100644 --- a/mmv1/products/workbench/Instance.yaml +++ b/mmv1/products/workbench/Instance.yaml @@ -56,8 +56,6 @@ sweeper: url_substitutions: - region: "us-central1-a" - region: "us-west1-a" -custom_diff: - - 'workbenchMetadataCustomizeDiff' examples: - name: 'workbench_instance_basic' primary_resource_id: 'instance' @@ -77,7 +75,6 @@ examples: region_override: 'us-west1-a' vars: instance_name: 'workbench-instance' - reservation_name: 'wbi-reservation' ignore_read_extra: - 'gce_setup.0.vm_image' - name: 'workbench_instance_labels_stopped' @@ -98,7 +95,6 @@ examples: vars: instance_name: 'workbench-instance' network_name: 'wbi-test-default' - reservation_name: 'wbi-reservation' key_name: 'my-crypto-key' test_env_vars: project_id: 'PROJECT_NAME' @@ -115,15 +111,6 @@ examples: region_override: 'us-west1-a' vars: instance_name: 'workbench-instance' - - name: 'workbench_instance_euc' - primary_resource_id: 'instance' - primary_resource_name: 'fmt.Sprintf("tf-test-workbench-instance%s", context["random_suffix"])' - region_override: 'us-west1-a' - vars: - instance_name: 'workbench-instance' - test_env_vars: - project_id: 'PROJECT_NAME' - project_number: 'PROJECT_NUMBER' virtual_fields: - name: 'desired_state' description: | @@ -472,40 +459,6 @@ properties: Defines the type of technology used by the confidential instance. enum_values: - 'SEV' - - name: 'reservationAffinity' - type: NestedObject - immutable: true - default_from_api: true - description: | - Reservations that this instance can consume from. - properties: - - name: 'consumeReservationType' - type: Enum - immutable: true - default_from_api: true - description: | - Specifies the type of reservation from which this instance can consume resources: - RESERVATION_ANY (default), RESERVATION_SPECIFIC, or RESERVATION_NONE. - enum_values: - - 'RESERVATION_NONE' - - 'RESERVATION_ANY' - - 'RESERVATION_SPECIFIC' - - name: 'key' - immutable: true - description: | - Corresponds to the label key of a reservation resource. To target a - RESERVATION_SPECIFIC by name, use compute.googleapis.com/reservation-name - as the key and specify the name of your reservation as its value. - - name: 'values' - type: Array - immutable: true - item_type: - type: String - description: | - Corresponds to the label values of a reservation resource. This can be - either a name to a reservation in the same project or - "projects/different-project/reservations/some-reservation-name" - to target a shared reservation in the same zone but in a different project. - name: 'proxyUri' type: String description: | @@ -621,7 +574,3 @@ properties: description: | Flag that specifies that a notebook can be accessed with third party identity provider. - - name: 'enableManagedEuc' - type: Boolean - description: | - Flag to enable managed end user credentials for the instance. diff --git a/mmv1/products/workbench/product.yaml b/mmv1/products/workbench/product.yaml index 1502414d84d3..f453551ea553 100644 --- a/mmv1/products/workbench/product.yaml +++ b/mmv1/products/workbench/product.yaml @@ -15,8 +15,6 @@ name: 'Workbench' display_name: 'Vertex AI Workbench' versions: - - name: 'beta' - base_url: 'https://notebooks.googleapis.com/v2/' - name: 'ga' base_url: 'https://notebooks.googleapis.com/v2/' scopes: diff --git a/mmv1/products/workstations/WorkstationCluster.yaml b/mmv1/products/workstations/WorkstationCluster.yaml index 4cd7b9dbdefa..bc8f1a43429f 100644 --- a/mmv1/products/workstations/WorkstationCluster.yaml +++ b/mmv1/products/workstations/WorkstationCluster.yaml @@ -85,7 +85,7 @@ parameters: The location where the workstation cluster should reside. min_version: 'beta' url_param_only: true - # TODO: Change to required, as it's not possible for this field to be omitted on the API side. + # TODO(esu): Change to required, as it's not possible for this field to be omitted on the API side. immutable: true properties: - name: 'name' diff --git a/mmv1/products/workstations/WorkstationConfig.yaml b/mmv1/products/workstations/WorkstationConfig.yaml index da390ecea1e5..33371b537c7d 100644 --- a/mmv1/products/workstations/WorkstationConfig.yaml +++ b/mmv1/products/workstations/WorkstationConfig.yaml @@ -340,7 +340,7 @@ properties: properties: - name: 'enableConfidentialCompute' type: Boolean - # TODO: Change this to required in next breaking release. + # TODO(esu): Change this to required in next breaking release. description: | Whether the instance has confidential compute enabled. min_version: 'beta' @@ -490,7 +490,7 @@ properties: description: | Name of the snapshot to use as the source for the disk. This can be the snapshot's `self_link`, `id`, or a string in the format of `projects/{project}/global/snapshots/{snapshot}`. If set, `sizeGb` and `fsType` must be empty. Can only be updated if it has an existing value. min_version: 'beta' - # TODO: Add conflicting fields once complex lists are supported. + # TODO(esu): Add conflicting fields once complex lists are supported. - name: 'ephemeralDirectories' type: Array description: | diff --git a/mmv1/provider/provider.go b/mmv1/provider/provider.go index 1f32008fff85..fab8e0a52161 100644 --- a/mmv1/provider/provider.go +++ b/mmv1/provider/provider.go @@ -17,12 +17,12 @@ type Provider interface { const TERRAFORM_PROVIDER_GA = "github.com/hashicorp/terraform-provider-google" const TERRAFORM_PROVIDER_BETA = "github.com/hashicorp/terraform-provider-google-beta" -const TGC_PROVIDER = "github.com/GoogleCloudPlatform/terraform-google-conversion/v6" +const TERRAFORM_PROVIDER_PRIVATE = "internal/terraform-next" const RESOURCE_DIRECTORY_GA = "google" const RESOURCE_DIRECTORY_BETA = "google-beta" -const RESOURCE_DIRECTORY_TGC = "pkg" +const RESOURCE_DIRECTORY_PRIVATE = "google-private" -// # TODO: Review all object interfaces and move to private methods +// # TODO(nelsonjr): Review all object interfaces and move to private methods // # that should not be exposed outside the object hierarchy. func ProviderName(t Provider) string { return reflect.TypeOf(t).Name() @@ -38,8 +38,8 @@ func ImportPathFromVersion(v string) string { tpg = TERRAFORM_PROVIDER_BETA dir = RESOURCE_DIRECTORY_BETA default: - tpg = "github.com/hashicorp/terraform-provider-google-" + v - dir = "google-" + v + tpg = TERRAFORM_PROVIDER_PRIVATE + dir = RESOURCE_DIRECTORY_PRIVATE } return fmt.Sprintf("%s/%s", tpg, dir) } diff --git a/mmv1/provider/template_data.go b/mmv1/provider/template_data.go index 8946f9238e31..5db15f6e51fc 100644 --- a/mmv1/provider/template_data.go +++ b/mmv1/provider/template_data.go @@ -24,6 +24,7 @@ import ( "path/filepath" "strings" "sync" + "text/template" "github.com/GoogleCloudPlatform/magic-modules/mmv1/api" @@ -35,6 +36,9 @@ type TemplateData struct { OutputFolder string VersionName string + TerraformResourceDirectory string + TerraformProviderModule string + // TODO rewrite: is this needed? // # Information about the local environment // # (which formatters are enabled, start-time) @@ -50,6 +54,18 @@ var goimportFiles sync.Map func NewTemplateData(outputFolder string, versionName string) *TemplateData { td := TemplateData{OutputFolder: outputFolder, VersionName: versionName} + + if versionName == GA_VERSION { + td.TerraformResourceDirectory = "google" + td.TerraformProviderModule = "github.com/hashicorp/terraform-provider-google" + } else if versionName == ALPHA_VERSION || versionName == PRIVATE_VERSION { + td.TerraformResourceDirectory = "google-private" + td.TerraformProviderModule = "internal/terraform-next" + } else { + td.TerraformResourceDirectory = "google-beta" + td.TerraformProviderModule = "github.com/hashicorp/terraform-provider-google-beta" + } + return &td } @@ -70,15 +86,6 @@ func (td *TemplateData) GenerateResourceFile(filePath string, resource api.Resou td.GenerateFile(filePath, templatePath, resource, true, templates...) } -func (td *TemplateData) GenerateFWResourceFile(filePath string, resource api.Resource) { - templatePath := "templates/terraform/resource_fw.go.tmpl" - templates := []string{ - templatePath, - "templates/terraform/schema_property_fw.go.tmpl", - } - td.GenerateFile(filePath, templatePath, resource, true, templates...) -} - func (td *TemplateData) GenerateMetadataFile(filePath string, resource api.Resource) { templatePath := "templates/terraform/metadata.yaml.tmpl" templates := []string{ @@ -87,22 +94,6 @@ func (td *TemplateData) GenerateMetadataFile(filePath string, resource api.Resou td.GenerateFile(filePath, templatePath, resource, false, templates...) } -func (td *TemplateData) GenerateDataSourceFile(filePath string, resource api.Resource) { - templatePath := "templates/terraform/datasource.go.tmpl" - templates := []string{ - templatePath, - } - td.GenerateFile(filePath, templatePath, resource, true, templates...) -} - -func (td *TemplateData) GenerateProductFile(filePath string, product api.Product) { - templatePath := "templates/terraform/product.go.tmpl" - templates := []string{ - templatePath, - } - td.GenerateFile(filePath, templatePath, product, true, templates...) -} - func (td *TemplateData) GenerateOperationFile(filePath string, resource api.Resource) { templatePath := "templates/terraform/operation.go.tmpl" templates := []string{ @@ -129,36 +120,7 @@ func (td *TemplateData) GenerateTestFile(filePath string, resource api.Resource) } tmplInput := TestInput{ Res: resource, - ImportPath: resource.ImportPath, - PROJECT_NAME: "my-project-name", - CREDENTIALS: "my/credentials/filename.json", - REGION: "us-west1", - ORG_ID: "123456789", - ORG_DOMAIN: "example.com", - ORG_TARGET: "123456789", - PROJECT_NUMBER: "1111111111111", - BILLING_ACCT: "000000-0000000-0000000-000000", - MASTER_BILLING_ACCT: "000000-0000000-0000000-000000", - SERVICE_ACCT: "my@service-account.com", - CUST_ID: "A01b123xz", - IDENTITY_USER: "cloud_identity_user", - PAP_DESCRIPTION: "description", - CHRONICLE_ID: "00000000-0000-0000-0000-000000000000", - VMWAREENGINE_PROJECT: "my-vmwareengine-project", - } - - td.GenerateFile(filePath, templatePath, tmplInput, true, templates...) -} - -func (td *TemplateData) GenerateDataSourceTestFile(filePath string, resource api.Resource) { - templatePath := "templates/terraform/examples/base_configs/datasource_test_file.go.tmpl" - templates := []string{ - "templates/terraform/env_var_context.go.tmpl", - templatePath, - } - tmplInput := TestInput{ - Res: resource, - ImportPath: resource.ImportPath, + ImportPath: td.ImportPath(), PROJECT_NAME: "my-project-name", CREDENTIALS: "my/credentials/filename.json", REGION: "us-west1", @@ -221,17 +183,11 @@ func (td *TemplateData) GenerateSweeperFile(filePath string, resource api.Resour td.GenerateFile(filePath, templatePath, resource, false, templates...) } -func (td *TemplateData) GenerateTGCResourceFile(templatePath, filePath string, resource api.Resource) { +func (td *TemplateData) GenerateTGCResourceFile(filePath string, resource api.Resource) { + templatePath := "templates/tgc/resource_converter.go.tmpl" templates := []string{ templatePath, "templates/terraform/expand_property_method.go.tmpl", - "templates/terraform/expand_resource_ref.tmpl", - "templates/terraform/schema_property.go.tmpl", - "templates/terraform/schema_subresource.go.tmpl", - "templates/terraform/flatten_property_method.go.tmpl", - "templates/tgc_next/tfplan2cai/expand_property_method_tgc.go.tmpl", - "templates/tgc_next/cai2hcl/flatten_property_method_tgc.go.tmpl", - "templates/tgc_next/cai2hcl/full_to_relative_path.go.tmpl", } td.GenerateFile(filePath, templatePath, resource, true, templates...) } @@ -244,14 +200,6 @@ func (td *TemplateData) GenerateTGCIamResourceFile(filePath string, resource api td.GenerateFile(filePath, templatePath, resource, true, templates...) } -func (td *TemplateData) GenerateTGCNextTestFile(filePath string, resource api.Resource) { - templatePath := "templates/tgc_next/test/test_file.go.tmpl" - templates := []string{ - templatePath, - } - td.GenerateFile(filePath, templatePath, resource, true, templates...) -} - func (td *TemplateData) GenerateFile(filePath, templatePath string, input any, goFormat bool, templates ...string) { templateFileName := filepath.Base(templatePath) @@ -295,6 +243,15 @@ func (td *TemplateData) GenerateFile(filePath, templatePath string, input any, g } } +func (td *TemplateData) ImportPath() string { + if td.VersionName == GA_VERSION { + return "github.com/hashicorp/terraform-provider-google/google" + } else if td.VersionName == ALPHA_VERSION || td.VersionName == PRIVATE_VERSION { + return "internal/terraform-next/google-private" + } + return "github.com/hashicorp/terraform-provider-google-beta/google-beta" +} + func FixImports(outputPath string, dumpDiffs bool) { log.Printf("Fixing go import paths") diff --git a/mmv1/provider/terraform.go b/mmv1/provider/terraform.go index cecb60ec909d..75a077105bc4 100644 --- a/mmv1/provider/terraform.go +++ b/mmv1/provider/terraform.go @@ -61,7 +61,6 @@ func NewTerraform(product *api.Product, versionName string, startTime time.Time) } t.Product.SetPropertiesBasedOnVersion(&t.Version) - t.Product.SetCompiler(ProviderName(t)) for _, r := range t.Product.Objects { r.SetCompiler(ProviderName(t)) r.ImportPath = ImportPathFromVersion(versionName) @@ -78,7 +77,6 @@ func (t Terraform) Generate(outputFolder, productPath, resourceToGenerate string t.GenerateObjects(outputFolder, resourceToGenerate, generateCode, generateDocs) if generateCode { - t.GenerateProduct(outputFolder) t.GenerateOperation(outputFolder) } } @@ -107,8 +105,6 @@ func (t *Terraform) GenerateObject(object api.Resource, outputFolder, productPat // log.Printf("Generating %s tests", object.Name) t.GenerateResourceTests(object, *templateData, outputFolder) t.GenerateResourceSweeper(object, *templateData, outputFolder) - t.GenerateSingularDataSource(object, *templateData, outputFolder) - t.GenerateSingularDataSourceTests(object, *templateData, outputFolder) // log.Printf("Generating %s metadata", object.Name) t.GenerateResourceMetadata(object, *templateData, outputFolder) } @@ -129,13 +125,8 @@ func (t *Terraform) GenerateResource(object api.Resource, templateData TemplateD if err := os.MkdirAll(targetFolder, os.ModePerm); err != nil { log.Println(fmt.Errorf("error creating parent directory %v: %v", targetFolder, err)) } - if object.FrameworkResource { - targetFilePath := path.Join(targetFolder, fmt.Sprintf("resource_fw_%s.go", t.ResourceGoFilename(object))) - templateData.GenerateFWResourceFile(targetFilePath, object) - } else { - targetFilePath := path.Join(targetFolder, fmt.Sprintf("resource_%s.go", t.ResourceGoFilename(object))) - templateData.GenerateResourceFile(targetFilePath, object) - } + targetFilePath := path.Join(targetFolder, fmt.Sprintf("resource_%s.go", t.ResourceGoFilename(object))) + templateData.GenerateResourceFile(targetFilePath, object) } if generateDocs { @@ -195,49 +186,6 @@ func (t *Terraform) GenerateResourceSweeper(object api.Resource, templateData Te templateData.GenerateSweeperFile(targetFilePath, object) } -func (t *Terraform) GenerateSingularDataSource(object api.Resource, templateData TemplateData, outputFolder string) { - if !object.ShouldGenerateSingularDataSource() { - return - } - - productName := t.Product.ApiName - targetFolder := path.Join(outputFolder, t.FolderName(), "services", productName) - if err := os.MkdirAll(targetFolder, os.ModePerm); err != nil { - log.Println(fmt.Errorf("error creating parent directory %v: %v", targetFolder, err)) - } - targetFilePath := path.Join(targetFolder, fmt.Sprintf("data_source_%s.go", t.ResourceGoFilename(object))) - templateData.GenerateDataSourceFile(targetFilePath, object) -} - -func (t *Terraform) GenerateSingularDataSourceTests(object api.Resource, templateData TemplateData, outputFolder string) { - if !object.ShouldGenerateSingularDataSourceTests() { - return - } - - productName := t.Product.ApiName - targetFolder := path.Join(outputFolder, t.FolderName(), "services", productName) - if err := os.MkdirAll(targetFolder, os.ModePerm); err != nil { - log.Println(fmt.Errorf("error creating parent directory %v: %v", targetFolder, err)) - } - targetFilePath := path.Join(targetFolder, fmt.Sprintf("data_source_%s_test.go", t.ResourceGoFilename(object))) - templateData.GenerateDataSourceTestFile(targetFilePath, object) - -} - -// GenerateProduct creates the product.go file for a given service directory. -// This will be used to seed the directory and add a package-level comment -// specific to the product. -func (t *Terraform) GenerateProduct(outputFolder string) { - targetFolder := path.Join(outputFolder, t.FolderName(), "services", t.Product.ApiName) - if err := os.MkdirAll(targetFolder, os.ModePerm); err != nil { - log.Println(fmt.Errorf("error creating parent directory %v: %v", targetFolder, err)) - } - - targetFilePath := path.Join(targetFolder, "product.go") - templateData := NewTemplateData(outputFolder, t.TargetVersionName) - templateData.GenerateProductFile(targetFilePath, *t.Product) -} - func (t *Terraform) GenerateOperation(outputFolder string) { asyncObjects := google.Select(t.Product.Objects, func(o *api.Resource) bool { return o.AutogenAsync @@ -302,8 +250,10 @@ func (t *Terraform) GenerateIamDocumentation(object api.Resource, templateData T func (t *Terraform) FolderName() string { if t.TargetVersionName == "ga" { return "google" + } else if t.TargetVersionName == "beta" { + return "google-beta" } - return "google-" + t.TargetVersionName + return "google-private" } // Similar to FullResourceName, but override-aware to prevent things like ending in _test. @@ -394,7 +344,7 @@ func (t Terraform) getCommonCopyFiles(versionName string, generateCode, generate "go.sum": "third_party/terraform/go.sum", "go.mod": "third_party/terraform/go.mod", ".go-version": "third_party/terraform/.go-version", - "terraform-registry-manifest.json": "third_party/terraform/terraform-registry-manifest.json", + "terraform-registry-manifest.json": "third_party/terraform/terraform-registry-manifest.json.tmpl", } maps.Copy(commonCopyFiles, singleFiles) @@ -457,9 +407,7 @@ func (t Terraform) CopyFileList(outputFolder string, files map[string]string, ge if filepath.Ext(target) == ".go" || (filepath.Ext(target) == ".mod" && generateCode) { t.replaceImportPath(outputFolder, target) } - if filepath.Ext(target) == ".go" || filepath.Ext(target) == ".markdown" { - t.addCopyfileHeader(source, outputFolder, target) - } + if filepath.Ext(target) == ".go" { t.addHashicorpCopyRightHeader(outputFolder, target) } @@ -559,70 +507,10 @@ func (t Terraform) CompileFileList(outputFolder string, files map[string]string, continue } t.replaceImportPath(outputFolder, target) - if filepath.Ext(targetFile) == ".go" || filepath.Ext(targetFile) == ".markdown" { - t.addCopyfileHeader(source, outputFolder, target) - } t.addHashicorpCopyRightHeader(outputFolder, target) } } -func (t Terraform) addCopyfileHeader(srcpath, outputFolder, target string) { - githubPrefix := "https://github.com/GoogleCloudPlatform/magic-modules/tree/main/mmv1/" - if !strings.HasPrefix(srcpath, githubPrefix) { - srcpath = githubPrefix + srcpath - } - - targetFile := filepath.Join(outputFolder, target) - sourceByte, err := os.ReadFile(targetFile) - if err != nil { - log.Fatalf("Cannot read file %s to add copy file header: %s", targetFile, err) - } - - srcStr := string(sourceByte) - if strings.Contains(srcStr, "*** AUTO GENERATED CODE *** Type: Handwritten ***") { - return - } - - templateFormat := `// ---------------------------------------------------------------------------- -// -// *** AUTO GENERATED CODE *** Type: Handwritten *** -// -// ---------------------------------------------------------------------------- -// -// This code is generated by Magic Modules using the following: -// -// Source file: %s -// -// DO NOT EDIT this file directly. Any changes made to this file will be -// overwritten during the next generation cycle. -// -// ---------------------------------------------------------------------------- -%s` - content := srcStr - if filepath.Ext(target) == ".markdown" { - // insert the header after --- - templateFormat = "---\n" + strings.Replace(templateFormat, "//", "#", -1) - content = strings.TrimPrefix(srcStr, "---\n") - } - - fileStr := fmt.Sprintf(templateFormat, srcpath, content) - - sourceByte = []byte(fileStr) - // format go file - if filepath.Ext(targetFile) == ".go" { - sourceByte, err = format.Source(sourceByte) - if err != nil { - log.Printf("error formatting %s: %s\n", targetFile, err) - return - } - } - - err = os.WriteFile(targetFile, sourceByte, 0644) - if err != nil { - log.Fatalf("Cannot write file %s to add copy file header: %s", target, err) - } -} - func (t Terraform) addHashicorpCopyRightHeader(outputFolder, target string) { if !expectedOutputFolder(outputFolder) { log.Printf("Unexpected output folder (%s) detected "+ @@ -730,8 +618,9 @@ func (t Terraform) replaceImportPath(outputFolder, target string) { tpg = TERRAFORM_PROVIDER_BETA dir = RESOURCE_DIRECTORY_BETA default: - tpg = "github.com/hashicorp/terraform-provider-google-" + t.TargetVersionName - dir = "google-" + t.TargetVersionName + tpg = TERRAFORM_PROVIDER_PRIVATE + dir = RESOURCE_DIRECTORY_PRIVATE + } sourceByte = bytes.Replace(sourceByte, []byte(gaImportPath), []byte(tpg+"/"+dir), -1) @@ -761,7 +650,7 @@ func (t Terraform) ProviderFromVersion() string { case "beta": dir = RESOURCE_DIRECTORY_BETA default: - dir = "google-" + t.TargetVersionName + dir = RESOURCE_DIRECTORY_PRIVATE } return dir } @@ -912,6 +801,5 @@ func (t Terraform) SupportedProviderVersions() []string { type ProviderWithProducts struct { Terraform - Compiler string Products []*api.Product } diff --git a/mmv1/provider/terraform_tgc.go b/mmv1/provider/terraform_tgc.go index 2a0bf7a9d7e2..2b6a38e9dba4 100644 --- a/mmv1/provider/terraform_tgc.go +++ b/mmv1/provider/terraform_tgc.go @@ -60,7 +60,6 @@ func NewTerraformGoogleConversion(product *api.Product, versionName string, star } t.Product.SetPropertiesBasedOnVersion(&t.Version) - t.Product.SetCompiler(ProviderName(t)) for _, r := range t.Product.Objects { r.SetCompiler(ProviderName(t)) r.ImportPath = ImportPathFromVersion(versionName) @@ -124,9 +123,8 @@ func (tgc TerraformGoogleConversion) GenerateResource(object api.Resource, templ log.Println(fmt.Errorf("error creating parent directory %v: %v", targetFolder, err)) } - templatePath := "templates/tgc/resource_converter.go.tmpl" targetFilePath := path.Join(targetFolder, fmt.Sprintf("%s_%s.go", productName, google.Underscore(object.Name))) - templateData.GenerateTGCResourceFile(templatePath, targetFilePath, object) + templateData.GenerateTGCResourceFile(targetFilePath, object) } // Generate the IAM policy for this object. This is used to query and test @@ -354,7 +352,6 @@ func (tgc TerraformGoogleConversion) CopyCommonFiles(outputFolder string, genera tgc.CopyFileList(outputFolder, retrieveTestSourceCodeWithLocation(".go")) resourceConverters := map[string]string{ - "../caiasset/asset.go": "third_party/tgc/caiasset/asset.go", "converters/google/resources/cai/constants.go": "third_party/tgc/cai/constants.go", "converters/google/resources/constants.go": "third_party/tgc/constants.go", "converters/google/resources/cai.go": "third_party/tgc/cai.go", diff --git a/mmv1/provider/terraform_tgc_cai2hcl.go b/mmv1/provider/terraform_tgc_cai2hcl.go index 6a0da4c838b0..91fd12498cbc 100644 --- a/mmv1/provider/terraform_tgc_cai2hcl.go +++ b/mmv1/provider/terraform_tgc_cai2hcl.go @@ -44,7 +44,6 @@ func NewCaiToTerraformConversion(product *api.Product, versionName string, start } t.Product.SetPropertiesBasedOnVersion(&t.Version) - t.Product.SetCompiler(ProviderName(t)) for _, r := range t.Product.Objects { r.SetCompiler(ProviderName(t)) r.ImportPath = ImportPathFromVersion(versionName) @@ -63,7 +62,7 @@ func (cai2hcl CaiToTerraformConversion) CopyCommonFiles(outputFolder string, gen if !generateCode { return } - log.Print("Copying cai2hcl common files") + log.Printf("Coping cai2hcl common files") if err := os.MkdirAll(outputFolder, os.ModePerm); err != nil { log.Println(fmt.Errorf("error creating output directory %v: %v", outputFolder, err)) diff --git a/mmv1/provider/terraform_tgc_next.go b/mmv1/provider/terraform_tgc_next.go index 8fd165bfd61a..59ead87d5698 100644 --- a/mmv1/provider/terraform_tgc_next.go +++ b/mmv1/provider/terraform_tgc_next.go @@ -21,28 +21,17 @@ import ( "fmt" "log" "os" - "path" "path/filepath" - "strings" "time" "github.com/GoogleCloudPlatform/magic-modules/mmv1/api" "github.com/GoogleCloudPlatform/magic-modules/mmv1/api/product" - "github.com/GoogleCloudPlatform/magic-modules/mmv1/google" "github.com/otiai10/copy" ) -// TerraformGoogleConversionNext is for both tfplan2cai and cai2hcl conversions +// This proivder is for both tfplan2cai and cai2hcl conversions, // and copying other files, such as transport.go type TerraformGoogleConversionNext struct { - ResourceCount int - - ResourcesForVersion []ResourceIdentifier - - // Multiple Terraform resources can share the same CAI resource type. - // For example, "google_compute_region_autoscaler" and "google_region_autoscaler" - ResourcesByCaiResourceType map[string][]ResourceIdentifier - TargetVersionName string Version product.Version @@ -52,24 +41,15 @@ type TerraformGoogleConversionNext struct { StartTime time.Time } -type ResourceIdentifier struct { - ServiceName string - TerraformName string - ResourceName string - AliasName string // It can be "Default" or the same with ResourceName -} - func NewTerraformGoogleConversionNext(product *api.Product, versionName string, startTime time.Time) TerraformGoogleConversionNext { t := TerraformGoogleConversionNext{ - Product: product, - TargetVersionName: versionName, - Version: *product.VersionObjOrClosest(versionName), - StartTime: startTime, - ResourcesByCaiResourceType: make(map[string][]ResourceIdentifier), + Product: product, + TargetVersionName: versionName, + Version: *product.VersionObjOrClosest(versionName), + StartTime: startTime, } t.Product.SetPropertiesBasedOnVersion(&t.Version) - t.Product.SetCompiler(ProviderName(t)) for _, r := range t.Product.Objects { r.SetCompiler(ProviderName(t)) r.ImportPath = ImportPathFromVersion(versionName) @@ -79,110 +59,44 @@ func NewTerraformGoogleConversionNext(product *api.Product, versionName string, } func (tgc TerraformGoogleConversionNext) Generate(outputFolder, productPath, resourceToGenerate string, generateCode, generateDocs bool) { - for _, object := range tgc.Product.Objects { - object.ExcludeIfNotInVersion(&tgc.Version) - - if resourceToGenerate != "" && object.Name != resourceToGenerate { - log.Printf("Excluding %s per user request", object.Name) - continue - } - - tgc.GenerateObject(*object, outputFolder, tgc.TargetVersionName, generateCode, generateDocs) - } + tgc.GenerateTfToCaiObjects(outputFolder, resourceToGenerate, generateCode, generateDocs) + tgc.GenerateCaiToHclObjects(outputFolder, resourceToGenerate, generateCode, generateDocs) } -func (tgc TerraformGoogleConversionNext) GenerateObject(object api.Resource, outputFolder, resourceToGenerate string, generateCode, generateDocs bool) { - if !object.IncludeInTGCNext { - return - } - - templateData := NewTemplateData(outputFolder, tgc.TargetVersionName) - - if !object.IsExcluded() { - tgc.GenerateResource(object, *templateData, outputFolder, generateCode, generateDocs) - tgc.GenerateResourceTests(object, *templateData, outputFolder) - } +func (tgc TerraformGoogleConversionNext) GenerateTfToCaiObjects(outputFolder, resourceToGenerate string, generateCode, generateDocs bool) { } -func (tgc TerraformGoogleConversionNext) GenerateResource(object api.Resource, templateData TemplateData, outputFolder string, generateCode, generateDocs bool) { - productName := tgc.Product.ApiName - targetFolder := path.Join(outputFolder, "pkg/services", productName) - if err := os.MkdirAll(targetFolder, os.ModePerm); err != nil { - log.Println(fmt.Errorf("error creating parent directory %v: %v", targetFolder, err)) - } - - converters := []string{"tfplan2cai", "cai2hcl"} - for _, converter := range converters { - templatePath := fmt.Sprintf("templates/tgc_next/%s/resource_converter.go.tmpl", converter) - targetFilePath := path.Join(targetFolder, fmt.Sprintf("%s_%s_%s.go", productName, google.Underscore(object.Name), converter)) - templateData.GenerateTGCResourceFile(templatePath, targetFilePath, object) - } - - templatePath := "templates/tgc_next/services/resource.go.tmpl" - targetFilePath := path.Join(targetFolder, fmt.Sprintf("%s_%s.go", productName, google.Underscore(object.Name))) - templateData.GenerateTGCResourceFile(templatePath, targetFilePath, object) +func (tgc TerraformGoogleConversionNext) GenerateCaiToHclObjects(outputFolder, resourceToGenerate string, generateCode, generateDocs bool) { } -func (tgc TerraformGoogleConversionNext) GenerateCaiToHclObjects(outputFolder, resourceToGenerate string, generateCode, generateDocs bool) { +func (tgc TerraformGoogleConversionNext) CompileCommonFiles(outputFolder string, products []*api.Product, overridePath string) { + tgc.CompileTfToCaiCommonFiles(outputFolder, products) + tgc.CompileCaiToHclCommonFiles(outputFolder, products) } -func (tgc *TerraformGoogleConversionNext) GenerateResourceTests(object api.Resource, templateData TemplateData, outputFolder string) { - eligibleExample := false - for _, example := range object.Examples { - if !example.ExcludeTest { - if object.ProductMetadata.VersionObjOrClosest(tgc.Version.Name).CompareTo(object.ProductMetadata.VersionObjOrClosest(example.MinVersion)) >= 0 { - eligibleExample = true - break - } - } - } - if !eligibleExample { - return - } +func (tgc TerraformGoogleConversionNext) CompileTfToCaiCommonFiles(outputFolder string, products []*api.Product) { + log.Printf("Compiling common files for tgc tfplan2cai.") - productName := tgc.Product.ApiName - targetFolder := path.Join(outputFolder, "test", "services", productName) - if err := os.MkdirAll(targetFolder, os.ModePerm); err != nil { - log.Println(fmt.Errorf("error creating parent directory %v: %v", targetFolder, err)) + resourceConverters := map[string]string{ + "pkg/tfplan2cai/converters/resource_converters.go": "templates/tgc_next/tfplan2cai/resource_converters.go.tmpl", + "pkg/tfplan2cai/converters/services/compute/compute_instance_helpers.go": "third_party/terraform/services/compute/compute_instance_helpers.go.tmpl", + "pkg/tfplan2cai/converters/services/compute/metadata.go": "third_party/terraform/services/compute/metadata.go.tmpl", } - targetFilePath := path.Join(targetFolder, fmt.Sprintf("%s_%s_generated_test.go", productName, google.Underscore(object.Name))) - templateData.GenerateTGCNextTestFile(targetFilePath, object) + templateData := NewTemplateData(outputFolder, tgc.TargetVersionName) + tgc.CompileFileList(outputFolder, resourceConverters, *templateData, products) } -func (tgc TerraformGoogleConversionNext) CompileCommonFiles(outputFolder string, products []*api.Product, overridePath string) { - tgc.generateResourcesForVersion(products) +func (tgc TerraformGoogleConversionNext) CompileCaiToHclCommonFiles(outputFolder string, products []*api.Product) { + log.Printf("Compiling common files for tgc tfplan2cai.") resourceConverters := map[string]string{ - // common - "pkg/transport/config.go": "third_party/terraform/transport/config.go.tmpl", - "pkg/transport/provider_handwritten_endpoint.go": "third_party/terraform/transport/provider_handwritten_endpoint.go.tmpl", - "pkg/tpgresource/common_diff_suppress.go": "third_party/terraform/tpgresource/common_diff_suppress.go", - "pkg/provider/provider.go": "third_party/terraform/provider/provider.go.tmpl", - "pkg/provider/provider_validators.go": "third_party/terraform/provider/provider_validators.go", - "pkg/provider/provider_mmv1_resources.go": "templates/tgc_next/provider/provider_mmv1_resources.go.tmpl", - - // services - "pkg/services/compute/compute_instance_helpers.go": "third_party/terraform/services/compute/compute_instance_helpers.go.tmpl", - "pkg/services/compute/metadata.go": "third_party/terraform/services/compute/metadata.go.tmpl", - - // tfplan2cai - "pkg/tfplan2cai/converters/resource_converters.go": "templates/tgc_next/tfplan2cai/resource_converters.go.tmpl", - - // cai2hcl "pkg/cai2hcl/converters/resource_converters.go": "templates/tgc_next/cai2hcl/resource_converters.go.tmpl", } - templateData := NewTemplateData(outputFolder, tgc.TargetVersionName) tgc.CompileFileList(outputFolder, resourceConverters, *templateData, products) } func (tgc TerraformGoogleConversionNext) CompileFileList(outputFolder string, files map[string]string, fileTemplate TemplateData, products []*api.Product) { - providerWithProducts := TgcWithProducts{ - TerraformGoogleConversionNext: tgc, - Compiler: "terraformgoogleconversion-codegen", - Products: products, - } - if err := os.MkdirAll(outputFolder, os.ModePerm); err != nil { log.Println(fmt.Errorf("error creating output directory %v: %v", outputFolder, err)) } @@ -200,7 +114,7 @@ func (tgc TerraformGoogleConversionNext) CompileFileList(outputFolder string, fi formatFile := filepath.Ext(targetFile) == ".go" - fileTemplate.GenerateFile(targetFile, source, providerWithProducts, formatFile, templates...) + fileTemplate.GenerateFile(targetFile, source, tgc, formatFile, templates...) tgc.replaceImportPath(outputFolder, target) } } @@ -220,31 +134,8 @@ func (tgc TerraformGoogleConversionNext) CopyCommonFiles(outputFolder string, ge log.Println(fmt.Errorf("error copying directory %v: %v", outputFolder, err)) } - resourceConverters := map[string]string{ - // common - "pkg/transport/batcher.go": "third_party/terraform/transport/batcher.go", - "pkg/transport/retry_transport.go": "third_party/terraform/transport/retry_transport.go", - "pkg/transport/retry_utils.go": "third_party/terraform/transport/retry_utils.go", - "pkg/transport/header_transport.go": "third_party/terraform/transport/header_transport.go", - "pkg/transport/error_retry_predicates.go": "third_party/terraform/transport/error_retry_predicates.go", - "pkg/transport/bigtable_client_factory.go": "third_party/terraform/transport/bigtable_client_factory.go", - "pkg/transport/transport.go": "third_party/terraform/transport/transport.go", - "pkg/tpgresource/utils.go": "third_party/terraform/tpgresource/utils.go", - "pkg/tpgresource/self_link_helpers.go": "third_party/terraform/tpgresource/self_link_helpers.go", - "pkg/tpgresource/hashcode.go": "third_party/terraform/tpgresource/hashcode.go", - "pkg/tpgresource/regional_utils.go": "third_party/terraform/tpgresource/regional_utils.go", - "pkg/tpgresource/field_helpers.go": "third_party/terraform/tpgresource/field_helpers.go", - "pkg/tpgresource/service_scope.go": "third_party/terraform/tpgresource/service_scope.go", - "pkg/provider/mtls_util.go": "third_party/terraform/provider/mtls_util.go", - "pkg/verify/validation.go": "third_party/terraform/verify/validation.go", - "pkg/verify/path_or_contents.go": "third_party/terraform/verify/path_or_contents.go", - "pkg/version/version.go": "third_party/terraform/version/version.go", - - // services - "pkg/services/compute/image.go": "third_party/terraform/services/compute/image.go", - "pkg/services/compute/disk_type.go": "third_party/terraform/services/compute/disk_type.go", - } - tgc.CopyFileList(outputFolder, resourceConverters) + tgc.CopyTfToCaiCommonFiles(outputFolder) + tgc.CopyCaiToHclCommonFiles(outputFolder) } func (tgc TerraformGoogleConversionNext) CopyTfToCaiCommonFiles(outputFolder string) { @@ -301,72 +192,9 @@ func (tgc TerraformGoogleConversionNext) replaceImportPath(outputFolder, target // replace google to google-beta gaImportPath := ImportPathFromVersion("ga") - sourceByte = bytes.Replace(sourceByte, []byte(gaImportPath), []byte(TGC_PROVIDER+"/"+RESOURCE_DIRECTORY_TGC), -1) - sourceByte = bytes.Replace(sourceByte, []byte(TERRAFORM_PROVIDER_GA+"/version"), []byte(TGC_PROVIDER+"/"+RESOURCE_DIRECTORY_TGC+"/version"), -1) - + sourceByte = bytes.Replace(sourceByte, []byte(gaImportPath), []byte(TERRAFORM_PROVIDER_BETA+"/"+RESOURCE_DIRECTORY_BETA), -1) err = os.WriteFile(targetFile, sourceByte, 0644) if err != nil { log.Fatalf("Cannot write file %s to replace import path: %s", target, err) } } - -// Generates the list of resources, and gets the count of resources. -// The resource object has the format -// -// { -// terraform_name: -// resource_name: -// } -// -// The variable resources_for_version is used to generate resources in file -// mmv1/templates/tgc_next/provider/provider_mmv1_resources.go.tmpl -func (tgc *TerraformGoogleConversionNext) generateResourcesForVersion(products []*api.Product) { - resourcesByCaiResourceType := make(map[string][]ResourceIdentifier) - - for _, productDefinition := range products { - service := strings.ToLower(productDefinition.Name) - for _, object := range productDefinition.Objects { - if object.Exclude || object.NotInVersion(productDefinition.VersionObjOrClosest(tgc.TargetVersionName)) { - continue - } - - if !object.IncludeInTGCNext { - continue - } - - tgc.ResourceCount++ - - resourceIdentifier := ResourceIdentifier{ - ServiceName: service, - TerraformName: object.TerraformName(), - ResourceName: object.ResourceName(), - AliasName: object.ResourceName(), - } - tgc.ResourcesForVersion = append(tgc.ResourcesForVersion, resourceIdentifier) - - caiResourceType := fmt.Sprintf("%s.%s", service, object.CaiResourceType()) - if _, ok := resourcesByCaiResourceType[caiResourceType]; !ok { - resourcesByCaiResourceType[caiResourceType] = make([]ResourceIdentifier, 0) - } - resourcesByCaiResourceType[caiResourceType] = append(resourcesByCaiResourceType[caiResourceType], resourceIdentifier) - } - } - - for caiResourceType, resources := range resourcesByCaiResourceType { - // If no other Terraform resources share the API resource type, override the alias name as "Default" - if len(resources) == 1 { - for _, resourceIdentifier := range resources { - resourceIdentifier.AliasName = "Default" - tgc.ResourcesByCaiResourceType[caiResourceType] = []ResourceIdentifier{resourceIdentifier} - } - } else { - tgc.ResourcesByCaiResourceType[caiResourceType] = resources - } - } -} - -type TgcWithProducts struct { - TerraformGoogleConversionNext - Compiler string - Products []*api.Product -} diff --git a/mmv1/templates/terraform/constants/artifact_registry_repository.go.tmpl b/mmv1/templates/terraform/constants/artifact_registry_repository.go.tmpl index cd0c2dfc157e..d4e0e3d0a716 100644 --- a/mmv1/templates/terraform/constants/artifact_registry_repository.go.tmpl +++ b/mmv1/templates/terraform/constants/artifact_registry_repository.go.tmpl @@ -68,11 +68,23 @@ func parseDurationAsSeconds(v string) (int, bool) { // Like tpgresource.DurationDiffSuppress, but supports 'd' func durationDiffSuppress(k, oldr, newr string, d *schema.ResourceData) bool { - oldSeconds, ok := parseDurationAsSeconds(oldr) + o, n := d.GetChange(k) + old, ok := o.(string) if !ok { return false } - newSeconds, ok := parseDurationAsSeconds(newr) + new, ok := n.(string) + if !ok { + return false + } + if old == new { + return true + } + oldSeconds, ok := parseDurationAsSeconds(old) + if !ok { + return false + } + newSeconds, ok := parseDurationAsSeconds(new) if !ok { return false } @@ -80,59 +92,15 @@ func durationDiffSuppress(k, oldr, newr string, d *schema.ResourceData) bool { } func mapHashID(v any) int { - replaceNestedValue(v, []string{"condition", "older_than"}, expandDuration) - replaceNestedValue(v, []string{"condition", "newer_than"}, expandDuration) - return schema.HashString(fmt.Sprintf("%v", v)) -} - -func expandDuration(v any) (any, bool) { - if val, ok := v.(string); ok { - if secs, ok := parseDurationAsSeconds(val); ok { - return fmt.Sprintf("%ds", secs), true - } - } - return nil, false - -} - -// Replace a value in a schema object, if it exists. -// Nested maps follow the pattern map[string]any -> [1]any -> map[string]any -func replaceNestedValue(obj any, keys []string, replaceFunc func(any) (any, bool)) { - if len(keys) == 0 { - return - } - next := obj - for _, key := range keys[:len(keys)-1] { - nextMap, ok := next.(map[string]any) - if !ok { - return - } - arrObj, ok := nextMap[key] - if !ok { - return - } - arr, ok := arrObj.([]any) - if !ok { - return - } - if len(arr) != 1 { - return - } - next = arr[0] - } - lastMap, ok := next.(map[string]any) + obj, ok := v.(map[string]any) if !ok { - return + return 0 } - lastKey := keys[len(keys)-1] - last, ok := lastMap[lastKey] + s, ok := obj["id"].(string) if !ok { - return - } - result, ok := replaceFunc(last) - if ok { - lastMap[lastKey] = result + return 0 } + return schema.HashString(s) } func isDefaultEnum(val any) bool { diff --git a/mmv1/templates/terraform/constants/beyondcorp_security_gateway.go.tmpl b/mmv1/templates/terraform/constants/beyondcorp_security_gateway.go.tmpl deleted file mode 100644 index 0274329133d3..000000000000 --- a/mmv1/templates/terraform/constants/beyondcorp_security_gateway.go.tmpl +++ /dev/null @@ -1,24 +0,0 @@ -{{/* - The license inside this block applies to this file - Copyright 2025 Google Inc. - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ -}} -func beyondcorpSecurityGatewayHubsHash(v interface{}) int { - if v == nil { - return 0 - } - - var buf bytes.Buffer - m := v.(map[string]interface{}) - - buf.WriteString(fmt.Sprintf("%s-", m["region"].(string))) - - return tpgresource.Hashcode(buf.String()) -} \ No newline at end of file diff --git a/mmv1/templates/terraform/constants/bigquery_dataset.go.tmpl b/mmv1/templates/terraform/constants/bigquery_dataset.go.tmpl index 8a1bc0702c0c..3a5f4b930334 100644 --- a/mmv1/templates/terraform/constants/bigquery_dataset.go.tmpl +++ b/mmv1/templates/terraform/constants/bigquery_dataset.go.tmpl @@ -1,11 +1,5 @@ const datasetIdRegexp = `^[0-9A-Za-z_]+$` -var bigqueryDatasetAccessPrimitiveToRoleMap = map[string]string{ - "OWNER": "roles/bigquery.dataOwner", - "WRITER": "roles/bigquery.dataEditor", - "READER": "roles/bigquery.dataViewer", -} - func validateDatasetId(v interface{}, k string) (ws []string, errors []error) { value := v.(string) if !regexp.MustCompile(datasetIdRegexp).MatchString(value) { @@ -29,30 +23,3 @@ func validateDefaultTableExpirationMs(v interface{}, k string) (ws []string, err return } - -{{- if ne $.Compiler "terraformgoogleconversion-codegen" }} -// bigqueryDatasetAccessHash is a custom hash function for the access block. -// It normalizes the 'role' field before hashing, treating legacy roles -// and their modern IAM equivalents as the same. -func resourceBigqueryDatasetAccessHash(v interface{}) int { - m, ok := v.(map[string]interface{}) - if !ok { - return 0 - } - // Make a copy of the map to avoid modifying the underlying data. - copy := make(map[string]interface{}, len(m)) - for k, val := range m { - copy[k] = val - } - - // Normalize the role if it exists and matches a legacy role. - if role, ok := copy["role"].(string); ok { - if newRole, ok := bigqueryDatasetAccessPrimitiveToRoleMap[role]; ok { - copy["role"] = newRole - } - } - - // Use the default HashResource function on the (potentially modified) copy. - return schema.HashResource(bigqueryDatasetAccessSchema())(copy) -} -{{- end }} diff --git a/mmv1/templates/terraform/constants/dataplex_entry.go.tmpl b/mmv1/templates/terraform/constants/dataplex_entry.go.tmpl deleted file mode 100644 index e09c9265a78d..000000000000 --- a/mmv1/templates/terraform/constants/dataplex_entry.go.tmpl +++ /dev/null @@ -1,193 +0,0 @@ -// GetEntry supports up to 100 aspects. Therefore we set a threshold at 99. -const maxAspectNumber = 99 - -// NumberOfAspectsValidation checks if the number of aspects on an entry exceeds certain threshold. -func NumberOfAspectsValidation(i interface{}, k string) (warnings []string, errors []error) { - s, isSlice := i.([]interface{}) - m, isMap := i.(map[string]interface{}) - - if !isSlice && !isMap { - errors = append(errors, fmt.Errorf("expected type of field %q to be array, but got %T", k, i)) - return warnings, errors - } - - if len(s)+len(m) > maxAspectNumber { - errors = append(errors, fmt.Errorf( - "field %q has an invalid content: %q. The maximal number of aspects is 99.", - k, i, - )) - } - - return warnings, errors -} - -// ProjectNumberValidation checks if the input string conforms to the pattern: -// "projects//" -func ProjectNumberValidation(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - - if !ok { - errors = append(errors, fmt.Errorf("expected type of field %q to be string, but got %T", k, i)) - return warnings, errors - } - - var projectNumberRegex = regexp.MustCompile(`^projects\/[1-9]\d*\/.+$`) - if !projectNumberRegex.MatchString(v) { - errors = append(errors, fmt.Errorf( - "field %q has an invalid format: %q. Expected format: 'projects//'. Please note that project IDs are not supported.", - k, v, - )) - } - - return warnings, errors -} - -// ProjectNumberValidation checks if the input string conforms to the pattern: -// "projects//" -func AspectProjectNumberValidation(i interface{}, k string) (warnings []string, errors []error) { - v, ok := i.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected type of field %q to be string, but got %T", k, i)) - return warnings, errors - } - - var numberDotAnythingRegex = regexp.MustCompile(`^[1-9]\d*\..+$`) - - if !numberDotAnythingRegex.MatchString(v) { - errors = append(errors, fmt.Errorf( - "field %q has an invalid format: %q. Expected format: '.anything'. Please note that project IDs are not supported.", - k, v, - )) - } - - return warnings, errors -} - -// FilterAspects filters the aspects in res based on aspectKeySet. -// It returns an error if type assertions fail. -func FilterAspects(aspectKeySet map[string]struct{}, res map[string]interface{}) error { - aspectsRaw, ok := res["aspects"] - if !ok || aspectsRaw == nil { - return nil - } - - aspectsMap, ok := aspectsRaw.(map[string]interface{}) - if !ok { - return fmt.Errorf("FilterAspects: 'aspects' field is not a map[string]interface{}, got %T", aspectsRaw) - } - - for key := range aspectsMap { - if _, keep := aspectKeySet[key]; !keep { - delete(aspectsMap, key) - } - } - return nil -} - -// AddAspectsToSet adds aspect keys from the aspects interface to the aspectKeySet. -// It returns an error if type assertions fail or expected keys are missing. -func AddAspectsToSet(aspectKeySet map[string]struct{}, aspects interface{}) error { - if aspects == nil { - return nil - } - aspectsSlice, ok := aspects.([]interface{}) - if !ok { - return fmt.Errorf("AddAspectsToSet: input 'aspects' is not a []interface{}, got %T", aspects) - } - - for i, aspectItemRaw := range aspectsSlice { - aspectMap, ok := aspectItemRaw.(map[string]interface{}) - if !ok { - return fmt.Errorf("AddAspectsToSet: item at index %d is not a map[string]interface{}, got %T", i, aspectItemRaw) - } - - keyRaw, keyExists := aspectMap["aspect_key"] - if !keyExists { - return fmt.Errorf("AddAspectsToSet: 'aspect_key' not found in aspect item at index %d", i) - } - - keyString, ok := keyRaw.(string) - if !ok { - return fmt.Errorf("AddAspectsToSet: 'aspect_key' in item at index %d is not a string, got %T", i, keyRaw) - } - aspectKeySet[keyString] = struct{}{} - } - return nil -} - -// InverseTransformAspects converts the "aspects" map back to a slice of maps, -// re-inserting the "aspectKey". Modifies obj in-place. -// It returns an error if type assertions fail. -func InverseTransformAspects(res map[string]interface{}) error { - aspectsRaw, ok := res["aspects"] - if !ok || aspectsRaw == nil { - return nil - } - - originalMap, ok := aspectsRaw.(map[string]interface{}) - if !ok { - return fmt.Errorf("InverseTransformAspects: 'aspects' field is not a map[string]interface{}, got %T", aspectsRaw) - } - - newSlice := make([]interface{}, 0, len(originalMap)) - - for key, value := range originalMap { - innerMap, ok := value.(map[string]interface{}) - if !ok { - return fmt.Errorf("InverseTransformAspects: value for key '%s' is not a map[string]interface{}, got %T", key, value) - } - box := make(map[string]interface{}, 2) - box["aspectKey"] = key - box["aspect"] = innerMap - newSlice = append(newSlice, box) - } - res["aspects"] = newSlice - return nil -} - -// TransformAspects concisely transforms the "aspects" slice within obj into a map. -// Modifies obj in-place. -// It returns an error if type assertions fail or expected keys are missing. -func TransformAspects(obj map[string]interface{}) error { - aspectsRaw, ok := obj["aspects"] - if !ok || aspectsRaw == nil { - return nil - } - - originalSlice, ok := aspectsRaw.([]interface{}) - if !ok { - return fmt.Errorf("TransformAspects: 'aspects' field is not a []interface{}, got %T", aspectsRaw) - } - - newMap := make(map[string]interface{}, len(originalSlice)) - for i, item := range originalSlice { - aspectMap, ok := item.(map[string]interface{}) - if !ok { - return fmt.Errorf("TransformAspects: item in 'aspects' slice at index %d is not a map[string]interface{}, got %T", i, item) - } - - keyRaw, keyExists := aspectMap["aspectKey"] - if !keyExists { - return fmt.Errorf("TransformAspects: 'aspectKey' not found in aspect item at index %d", i) - } - key, ok := keyRaw.(string) - if !ok { - return fmt.Errorf("TransformAspects: 'aspectKey' in item at index %d is not a string, got %T", i, keyRaw) - } - - valueRaw, valueExists := aspectMap["aspect"] - if !valueExists { - newMap[key] = map[string]interface{}{"data": map[string]interface{}{}} - continue - } - - value, ok := valueRaw.(map[string]interface{}) - if ok { - newMap[key] = value - } else { - newMap[key] = map[string]interface{}{"data": map[string]interface{}{}} - } - } - obj["aspects"] = newMap - return nil -} diff --git a/mmv1/templates/terraform/constants/datastream_connection_profile.go.tmpl b/mmv1/templates/terraform/constants/datastream_connection_profile.go.tmpl deleted file mode 100644 index e28149a702c4..000000000000 --- a/mmv1/templates/terraform/constants/datastream_connection_profile.go.tmpl +++ /dev/null @@ -1,23 +0,0 @@ -{{/* - The license inside this block applies to this file - Copyright 2024 Google Inc. - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ -}} - -func resourceDataStreamStreamCreateWithoutValidationDiffSuppress(k, old, new string, d *schema.ResourceData) bool { - // If the old value was "false" and the new value is now unset (empty string), - // return true to suppress the diff. - if (old == "" && new == "false") || (old == "false" && new == "") { - return true - } - - // Otherwise, do not suppress the diff. - return false -} \ No newline at end of file diff --git a/mmv1/templates/terraform/constants/disk.tmpl b/mmv1/templates/terraform/constants/disk.tmpl index 36a4041ce642..e73ff28c98bb 100644 --- a/mmv1/templates/terraform/constants/disk.tmpl +++ b/mmv1/templates/terraform/constants/disk.tmpl @@ -43,20 +43,6 @@ func IsDiskShrinkage(_ context.Context, old, new, _ interface{}) bool { return new.(int) < old.(int) } -func matchImageLink(old string) (string, string, bool) { - // 'old' is read from the API. - // In GCP It has the format 'https://www.googleapis.com/compute/v1/projects/(%s)/global/images/(%s)' - matches := resolveImageLink.FindStringSubmatch(old) - if matches == nil { - // In alternate universes, it has the format https://compute.%s/compute/[a-z0-9]+/projects/(%s)/global/images/(%s) - matches = resolveImageUniverseLink.FindStringSubmatch(old) - if matches == nil { - return "", "", false - } - } - return matches[1], matches[2], true -} - // We cannot suppress the diff for the case when family name is not part of the image name since we can't // make a network call in a DiffSuppressFunc. func DiskImageDiffSuppress(_, old, new string, _ *schema.ResourceData) bool { @@ -66,11 +52,16 @@ func DiskImageDiffSuppress(_, old, new string, _ *schema.ResourceData) bool { // you are probably looking for the diskImageFamilyEquals function and its subfunctions. // In order to keep this maintainable, we need to ensure that the positive and negative examples // in resource_compute_disk_test.go are as complete as possible. - - oldProject, oldName, matched := matchImageLink(old) - if matched == false { + + // 'old' is read from the API. + // It always has the format 'https://www.googleapis.com/compute/v1/projects/(%s)/global/images/(%s)' + matches := resolveImageLink.FindStringSubmatch(old) + if matches == nil { + // Image read from the API doesn't have the expected format. In practice, it should never happen return false } + oldProject := matches[1] + oldName := matches[2] // Partial or full self link family if resolveImageProjectFamily.MatchString(new) { diff --git a/mmv1/templates/terraform/constants/iam_workforce_pool_provider_key.go.tmpl b/mmv1/templates/terraform/constants/iam_workforce_pool_provider_key.go.tmpl deleted file mode 100644 index 82d8e6be96f6..000000000000 --- a/mmv1/templates/terraform/constants/iam_workforce_pool_provider_key.go.tmpl +++ /dev/null @@ -1,18 +0,0 @@ -const workforcePoolProviderKeyIdRegexp = `^[a-z0-9-]{4,32}$` - -func ValidateWorkforcePoolProviderKeyId(v interface{}, k string) (ws []string, errors []error) { - value := v.(string) - - if strings.HasPrefix(value, "gcp-") { - errors = append(errors, fmt.Errorf( - "%q (%q) can not start with \"gcp-\". " + - "The prefix `gcp-` is reserved for use by Google, and may not be specified.", k, value)) - } - - if !regexp.MustCompile(workforcePoolProviderKeyIdRegexp).MatchString(value) { - errors = append(errors, fmt.Errorf( - "%q (%q) must be 4-32 characters, and may contain the characters [a-z0-9-].", k, value)) - } - - return -} diff --git a/mmv1/templates/terraform/constants/iam_workload_identity_pool_managed_identity.go.tmpl b/mmv1/templates/terraform/constants/iam_workload_identity_pool_managed_identity.go.tmpl deleted file mode 100644 index c00e6c226f8b..000000000000 --- a/mmv1/templates/terraform/constants/iam_workload_identity_pool_managed_identity.go.tmpl +++ /dev/null @@ -1,44 +0,0 @@ -const workloadIdentityPoolManagedIdentityIdRegexp = `^[0-9a-z-]+$` - -func ValidateWorkloadIdentityPoolManagedIdentityId(v interface{}, k string) (ws []string, errors []error) { - value := v.(string) - - if !regexp.MustCompile(workloadIdentityPoolManagedIdentityIdRegexp).MatchString(value) { - errors = append(errors, fmt.Errorf( - "%q must contain only lowercase letters (a-z), numbers (0-9), or dashes (-)", k)) - } - - if len(value) < 2 { - errors = append(errors, fmt.Errorf( - "%q cannot be less than 2 characters", k)) - return - } - - if len(value) > 63 { - errors = append(errors, fmt.Errorf( - "%q cannot be greater than 63 characters", k)) - } - - isLowerAlphaNumeric := func(r byte) bool { - return (r >= '0' && r <= '9') || (r >= 'a' && r <= 'z') - } - - firstChar := value[0] - if !isLowerAlphaNumeric(firstChar) { - errors = append(errors, fmt.Errorf( - "%q must start with an alphanumeric character", k)) - } - - lastChar := value[len(value) - 1] - if !isLowerAlphaNumeric(lastChar) { - errors = append(errors, fmt.Errorf( - "%q must end with an alphanumeric character", k)) - } - - if strings.HasPrefix(value, "gcp-") { - errors = append(errors, fmt.Errorf( - "%q (%q) can not start with \"gcp-\"", k, value)) - } - - return -} diff --git a/mmv1/templates/terraform/constants/iam_workload_identity_pool_namespace.go.tmpl b/mmv1/templates/terraform/constants/iam_workload_identity_pool_namespace.go.tmpl deleted file mode 100644 index d811c515103b..000000000000 --- a/mmv1/templates/terraform/constants/iam_workload_identity_pool_namespace.go.tmpl +++ /dev/null @@ -1,44 +0,0 @@ -const workloadIdentityPoolNamespaceIdRegexp = `^[0-9a-z-]+$` - -func ValidateWorkloadIdentityPoolNamespaceId(v interface{}, k string) (ws []string, errors []error) { - value := v.(string) - - if !regexp.MustCompile(workloadIdentityPoolNamespaceIdRegexp).MatchString(value) { - errors = append(errors, fmt.Errorf( - "%q must contain only lowercase letters (a-z), numbers (0-9), or dashes (-)", k)) - } - - if len(value) < 2 { - errors = append(errors, fmt.Errorf( - "%q cannot be less than 2 characters", k)) - return - } - - if len(value) > 63 { - errors = append(errors, fmt.Errorf( - "%q cannot be greater than 63 characters", k)) - } - - isLowerAlphaNumeric := func(r byte) bool { - return (r >= '0' && r <= '9') || (r >= 'a' && r <= 'z') - } - - firstChar := value[0] - if !isLowerAlphaNumeric(firstChar) { - errors = append(errors, fmt.Errorf( - "%q must start with an alphanumeric character", k)) - } - - lastChar := value[len(value) - 1] - if !isLowerAlphaNumeric(lastChar) { - errors = append(errors, fmt.Errorf( - "%q must end with an alphanumeric character", k)) - } - - if strings.HasPrefix(value, "gcp-") { - errors = append(errors, fmt.Errorf( - "%q (%q) can not start with \"gcp-\"", k, value)) - } - - return -} diff --git a/mmv1/templates/terraform/constants/region_security_policy.go.tmpl b/mmv1/templates/terraform/constants/region_security_policy.go.tmpl deleted file mode 100644 index 8c988835677e..000000000000 --- a/mmv1/templates/terraform/constants/region_security_policy.go.tmpl +++ /dev/null @@ -1,31 +0,0 @@ -{{- if ne $.Compiler "terraformgoogleconversion-codegen" }} -func resourceComputeRegionSecurityPolicySpecRulesDiffSuppress(k, o, n string, d *schema.ResourceData) bool { - oldCount, newCount := d.GetChange("rules.#") - var count int - // There could be duplicates - worth continuing even if the counts are unequal. - if oldCount.(int) < newCount.(int) { - count = newCount.(int) - } else { - count = oldCount.(int) - } - - old := make([]interface{}, 0, count) - new := make([]interface{}, 0, count) - for i := 0; i < count; i++ { - o, n := d.GetChange(fmt.Sprintf("rules.%d", i)) - - if o != nil { - old = append(old, o) - } - if n != nil { - new = append(new, n) - } - } - - oldSet := schema.NewSet(schema.HashResource(ResourceComputeRegionSecurityPolicy().Schema["rules"].Elem.(*schema.Resource)), old) - newSet := schema.NewSet(schema.HashResource(ResourceComputeRegionSecurityPolicy().Schema["rules"].Elem.(*schema.Resource)), new) - - return oldSet.Equal(newSet) -} - -{{- end }} \ No newline at end of file diff --git a/mmv1/templates/terraform/constants/regional_secret_version.go.tmpl b/mmv1/templates/terraform/constants/regional_secret_version.go.tmpl deleted file mode 100644 index 9bbc39f51dc9..000000000000 --- a/mmv1/templates/terraform/constants/regional_secret_version.go.tmpl +++ /dev/null @@ -1,35 +0,0 @@ -{{- if ne $.Compiler "terraformgoogleconversion-codegen" }} -func setEnabled(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) error { - name := d.Get("name").(string) - if name == "" { - return nil - } - - url, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}SecretManagerRegionalBasePath{{"}}"}}{{"{{"}}name{{"}}"}}") - if err != nil { - return err - } - if v == true { - url = fmt.Sprintf("%s:enable", url) - } else { - url = fmt.Sprintf("%s:disable", url) - } - - parts := strings.Split(name, "/") - project := parts[1] - - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return err - } - - _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "POST", - Project: project, - RawURL: url, - UserAgent: userAgent, - }) - return err -} -{{- end }} diff --git a/mmv1/templates/terraform/constants/router.go.tmpl b/mmv1/templates/terraform/constants/router.go.tmpl index c4c387b890d6..f4f93dfb5b2f 100644 --- a/mmv1/templates/terraform/constants/router.go.tmpl +++ b/mmv1/templates/terraform/constants/router.go.tmpl @@ -16,7 +16,7 @@ func resourceComputeRouterCustomDiff(_ context.Context, diff *schema.ResourceDif block := diff.Get("bgp.0").(map[string]interface{}) advertiseMode := block["advertise_mode"] advertisedGroups := block["advertised_groups"].([]interface{}) - advertisedIPRanges := block["advertised_ip_ranges"].(*schema.Set).List() + advertisedIPRanges := block["advertised_ip_ranges"].([]interface{}) if advertiseMode == "DEFAULT" && len(advertisedGroups) != 0 { return fmt.Errorf("Error in bgp: advertised_groups cannot be specified when using advertise_mode DEFAULT") diff --git a/mmv1/templates/terraform/constants/router_nat.go.tmpl b/mmv1/templates/terraform/constants/router_nat.go.tmpl index ee99d52dc06f..78947b6c0d0f 100644 --- a/mmv1/templates/terraform/constants/router_nat.go.tmpl +++ b/mmv1/templates/terraform/constants/router_nat.go.tmpl @@ -84,7 +84,7 @@ func computeRouterNatSubnetworkHash(v interface{}) int { } } - return schema.HashString(tpgresource.GetResourceNameFromSelfLink(name.(string))) + sourceIpRangesHash + secondaryIpRangeHash + return schema.HashString(tpgresource.NameFromSelfLinkStateFunc(name)) + sourceIpRangesHash + secondaryIpRangeHash } func computeRouterNatIPsHash(v interface{}) int { diff --git a/mmv1/templates/terraform/constants/secret_version.go.tmpl b/mmv1/templates/terraform/constants/secret_version.go.tmpl deleted file mode 100644 index a82f573aac02..000000000000 --- a/mmv1/templates/terraform/constants/secret_version.go.tmpl +++ /dev/null @@ -1,35 +0,0 @@ -{{- if ne $.Compiler "terraformgoogleconversion-codegen" }} -func setEnabled(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) error { - name := d.Get("name").(string) - if name == "" { - return nil - } - - url, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}SecretManagerBasePath{{"}}"}}{{"{{"}}name{{"}}"}}") - if err != nil { - return err - } - if v == true { - url = fmt.Sprintf("%s:enable", url) - } else { - url = fmt.Sprintf("%s:disable", url) - } - - parts := strings.Split(name, "/") - project := parts[1] - - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return err - } - - _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "POST", - Project: project, - RawURL: url, - UserAgent: userAgent, - }) - return err -} -{{- end }} diff --git a/mmv1/templates/terraform/constants/subnetwork.tmpl b/mmv1/templates/terraform/constants/subnetwork.tmpl index 9969642edc58..78697330c8e6 100644 --- a/mmv1/templates/terraform/constants/subnetwork.tmpl +++ b/mmv1/templates/terraform/constants/subnetwork.tmpl @@ -48,3 +48,15 @@ func sendSecondaryIpRangeIfEmptyDiff(_ context.Context, diff *schema.ResourceDif return nil } + +// DiffSuppressFunc for `log_config`. +func subnetworkLogConfigDiffSuppress(k, old, new string, d *schema.ResourceData) bool { + // If enable_flow_logs is enabled and log_config is not set, ignore the diff + if enable_flow_logs := d.Get("enable_flow_logs"); enable_flow_logs.(bool) { + logConfig := d.GetRawConfig().GetAttr("log_config") + logConfigIsEmpty := logConfig.IsNull() || logConfig.LengthInt() == 0 + return logConfigIsEmpty + } + + return false +} diff --git a/mmv1/templates/terraform/constants/vpc_access_connector.go.tmpl b/mmv1/templates/terraform/constants/vpc_access_connector.go.tmpl deleted file mode 100644 index c8bb1c45beaf..000000000000 --- a/mmv1/templates/terraform/constants/vpc_access_connector.go.tmpl +++ /dev/null @@ -1,8 +0,0 @@ -func isInstanceShrinkage(_ context.Context, old, new, _ interface{}) bool { - // max and min instances can only increase in-place, - // so we must create a new resource if it is decreased. - if old == nil || new == nil { - return false - } - return new.(int) < old.(int) -} \ No newline at end of file diff --git a/mmv1/templates/terraform/constants/workbench_instance.go.tmpl b/mmv1/templates/terraform/constants/workbench_instance.go.tmpl index 817474e89de7..c7356aa02072 100644 --- a/mmv1/templates/terraform/constants/workbench_instance.go.tmpl +++ b/mmv1/templates/terraform/constants/workbench_instance.go.tmpl @@ -23,26 +23,6 @@ func WorkbenchInstanceLabelsDiffSuppress(k, old, new string, d *schema.ResourceD } -var WorkbenchInstanceSettableUnmodifiableDefaultMetadata = []string{ - "install-monitoring-agent", - "serial-port-logging-enable", -} - -var WorkbenchInstanceEUCProvidedAdditionalMetadata = []string{ - "enable-oslogin", - "disable-ssh", - "ssh-keys", - "block-project-ssh-keys", - "post-startup-script", - "post-startup-script-behavior", - "startup-script", - "startup-script-url", - "gce-container-declaration", - "gce-software-declaration", - "serial-port-enable", - "euc-enabled", -} - var WorkbenchInstanceProvidedMetadata = []string{ "agent-health-check-interval-seconds", "agent-health-check-path", @@ -61,16 +41,15 @@ var WorkbenchInstanceProvidedMetadata = []string{ "dataproc-region", "dataproc-service-account", "disable-check-xsrf", - "enable-euc", "framework", "generate-diagnostics-bucket", "generate-diagnostics-file", "generate-diagnostics-options", "google-logging-enabled", "image-url", + "install-monitoring-agent", "install-nvidia-driver", "installed-extensions", - "instance-region", "last_updated_diagnostics", "notebooks-api", "notebooks-api-version", @@ -89,6 +68,7 @@ var WorkbenchInstanceProvidedMetadata = []string{ "report-system-status", "resource-url", "restriction", + "serial-port-logging-enable", "service-account-mode", "shutdown-script", "title", @@ -103,26 +83,9 @@ var WorkbenchInstanceProvidedMetadata = []string{ } func WorkbenchInstanceMetadataDiffSuppress(k, old, new string, d *schema.ResourceData) bool { - // Extract the actual metadata key from the full key path - parts := strings.Split(k, ".") - key := parts[len(parts)-1] - + // Suppress diffs for the Metadata for _, metadata := range WorkbenchInstanceProvidedMetadata { - if key == metadata { - return true - } - } - - if d.Get("enable_managed_euc").(bool){ - for _, metadata := range WorkbenchInstanceEUCProvidedAdditionalMetadata { - if key == metadata { - return true - } - } - } - - for _, metadata := range WorkbenchInstanceSettableUnmodifiableDefaultMetadata { - if strings.Contains(k, metadata) && new == "" { + if strings.Contains(k, metadata) { return true } } @@ -299,32 +262,3 @@ func mergeMaps(oldMap, newMap map[string]interface{}) map[string]string { return modifiedMap } {{- end }} - - -func workbenchMetadataCustomizeDiff(_ context.Context, diff *schema.ResourceDiff, meta interface{}) error { - if diff.HasChange("gce_setup.0.metadata") { - o, n := diff.GetChange("gce_setup.0.metadata") - oldMetadata := o.(map[string]interface{}) - newMetadata := n.(map[string]interface{}) - - for _, key := range WorkbenchInstanceSettableUnmodifiableDefaultMetadata { - oldValue, oldOk := oldMetadata[key] - newValue, newOk := newMetadata[key] - - // Condition to force new: - // 1. The key exists in both old and new metadata AND their values differ. - // 2. The key exists in new but not in old (meaning it was added). - // - // The key exists in old but not in new (meaning it was removed) is ignored. - if (oldOk && newOk && oldValue != newValue) || - (!oldOk && newOk) { - // If a change is detected for this specific key, force a new resource and stop checking. - if err := diff.ForceNew("gce_setup.0.metadata"); err != nil { - return err - } - return nil // Return nil immediately after forcing new - } - } - } - return nil -} \ No newline at end of file diff --git a/mmv1/templates/terraform/custom_check_destroy/firebasehosting_default_site.go.tmpl b/mmv1/templates/terraform/custom_check_destroy/firebasehosting_default_site.go.tmpl deleted file mode 100644 index 3f413ed9596e..000000000000 --- a/mmv1/templates/terraform/custom_check_destroy/firebasehosting_default_site.go.tmpl +++ /dev/null @@ -1,23 +0,0 @@ -config := acctest.GoogleProviderConfig(t) - -url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{"{{"}}FirebaseHostingBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/sites/{{"{{"}}site_id{{"}}"}}") -if err != nil { - return err -} - -billingProject := "" - -if config.BillingProject != "" { - billingProject = config.BillingProject -} - -resp, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - Project: billingProject, - RawURL: url, - UserAgent: config.UserAgent, -}) -if err == nil && resp["type"].(string) != "DEFAULT_SITE" { - return fmt.Errorf("Firebase Hosting Site still exists at %s", url) -} \ No newline at end of file diff --git a/mmv1/templates/terraform/custom_check_destroy/iam_workload_identity_pool_managed_identity.go.tmpl b/mmv1/templates/terraform/custom_check_destroy/iam_workload_identity_pool_managed_identity.go.tmpl deleted file mode 100644 index 87ad9345666d..000000000000 --- a/mmv1/templates/terraform/custom_check_destroy/iam_workload_identity_pool_managed_identity.go.tmpl +++ /dev/null @@ -1,22 +0,0 @@ -config := acctest.GoogleProviderConfig(t) - -url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{"{{"}}IAMBetaBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/locations/global/workloadIdentityPools/{{"{{"}}workload_identity_pool_id{{"}}"}}/namespaces/{{"{{"}}workload_identity_pool_namespace_id{{"}}"}}/managedIdentities/{{"{{"}}workload_identity_pool_managed_identity_id{{"}}"}}") -if err != nil { - return err -} - -res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - RawURL: url, - UserAgent: config.UserAgent, -}) -if err != nil { - return nil -} - -if v := res["state"]; v == "DELETED" { - return nil -} - -return fmt.Errorf("IAMBetaWorkloadIdentityPoolManagedIdentity still exists at %s", url) diff --git a/mmv1/templates/terraform/custom_check_destroy/iam_workload_identity_pool_namespace.go.tmpl b/mmv1/templates/terraform/custom_check_destroy/iam_workload_identity_pool_namespace.go.tmpl deleted file mode 100644 index 7207b1e052af..000000000000 --- a/mmv1/templates/terraform/custom_check_destroy/iam_workload_identity_pool_namespace.go.tmpl +++ /dev/null @@ -1,22 +0,0 @@ -config := acctest.GoogleProviderConfig(t) - -url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{"{{"}}IAMBetaBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/locations/global/workloadIdentityPools/{{"{{"}}workload_identity_pool_id{{"}}"}}/namespaces/{{"{{"}}workload_identity_pool_namespace_id{{"}}"}}") -if err != nil { - return err -} - -res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - RawURL: url, - UserAgent: config.UserAgent, -}) -if err != nil { - return nil -} - -if v := res["state"]; v == "DELETED" { - return nil -} - -return fmt.Errorf("IAMBetaWorkloadIdentityPoolNamespace still exists at %s", url) diff --git a/mmv1/templates/terraform/custom_delete/clear_iap_settings.go.tmpl b/mmv1/templates/terraform/custom_delete/clear_iap_settings.go.tmpl index 937c6a9e1429..fd49e8f716df 100644 --- a/mmv1/templates/terraform/custom_delete/clear_iap_settings.go.tmpl +++ b/mmv1/templates/terraform/custom_delete/clear_iap_settings.go.tmpl @@ -3,9 +3,9 @@ if err != nil { return err } -billingProject := "" -if bp, err := tpgresource.GetBillingProject(d, config); err == nil { - billingProject = bp +project, err := tpgresource.GetProject(d, config) +if err != nil { + return fmt.Errorf("Error fetching project for Settings: %s", err) } headers := make(http.Header) @@ -17,7 +17,7 @@ log.Printf("[DEBUG] Updating Settings %q: %#v", d.Id(), obj) res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ Config: config, Method: "PATCH", - Project: billingProject, + Project: project, RawURL: url, UserAgent: userAgent, Body: obj, diff --git a/mmv1/templates/terraform/custom_delete/resource_vertexai_endpoint_with_model_garden_deployment.go.tmpl b/mmv1/templates/terraform/custom_delete/resource_vertexai_endpoint_with_model_garden_deployment.go.tmpl deleted file mode 100644 index 9c64d6982892..000000000000 --- a/mmv1/templates/terraform/custom_delete/resource_vertexai_endpoint_with_model_garden_deployment.go.tmpl +++ /dev/null @@ -1,102 +0,0 @@ -log.Printf("[DEBUG] Beginning custom_delete for Vertex AI Endpoint with Model Garden Deployment") - -// Log resource ID for debugging purposes -log.Printf("[DEBUG] Resource ID: %s", d.Id()) - -billingProject := "" - -project, err := tpgresource.GetProject(d, config) -if err != nil { - return fmt.Errorf("Error fetching project for EndpointWithModelGardenDeployment: %s", err) -} -billingProject = project - -// err == nil indicates that the billing_project value was found -if bp, err := tpgresource.GetBillingProject(d, config); err == nil { - billingProject = bp -} - -// Retrieve deployed model ID and display name from Terraform fields -deployedModelId, ok := d.Get("deployed_model_id").(string) -if !ok { - return fmt.Errorf("wrong type for deployedModelId field (%T), expected string", d.Get("deployedModelId")) -} - - -deployedModelDisplayName, ok := d.Get("deployed_model_display_name").(string) -if !ok { - return fmt.Errorf("wrong type for deployedModelDisplayName field (%T), expected string", d.Get("deployedModelDisplayName")) -} - -// Undeploy the model -undeployUrl, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}VertexAIBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/locations/{{"{{"}}location{{"}}"}}/endpoints/{{"{{"}}endpoint{{"}}"}}:undeployModel") -if err != nil { - return err -} -undeployHeaders := make(http.Header) - -undeployBody := map[string]interface{}{ - "deployedModelId": deployedModelId, -} - -log.Printf("[DEBUG] Undeploying model %s from EndpointWithModelGardenDeployment %q", deployedModelDisplayName, d.Id()) - -undeployRes, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "POST", - Project: billingProject, - RawURL: undeployUrl, - UserAgent: userAgent, - Body: undeployBody, - Timeout: d.Timeout(schema.TimeoutDelete), - Headers: undeployHeaders, -}) -if err != nil { - return fmt.Errorf("Error undeploying model from EndpointWithModelGardenDeployment: %s", err) -} - -err = VertexAIOperationWaitTime( - config, undeployRes, project, fmt.Sprintf("Undeploying model %s from EndpointWithModelGardenDeployment", deployedModelDisplayName), userAgent, - d.Timeout(schema.TimeoutDelete)) - -if err != nil { - // The model could not be undeployed - return fmt.Errorf("Error waiting to undeploy model %s from EndpointWithModelGardenDeployment: %s", deployedModelDisplayName, err) -} - -log.Printf("[DEBUG] Finished undeploying model %s from EndpointWithModelGardenDeployment %q: %#v", deployedModelDisplayName, d.Id(), undeployRes) - - -// Delete Endpoint -deleteUrl, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}VertexAIBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/locations/{{"{{"}}location{{"}}"}}/endpoints/{{"{{"}}endpoint{{"}}"}}") -if err != nil { - return err -} -deleteHeaders := make(http.Header) -var deleteBody map[string]interface{} - -log.Printf("[DEBUG] Deleting EndpointWithModelGardenDeployment %q", d.Id()) -deleteRes, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "DELETE", - Project: billingProject, - RawURL: deleteUrl, - UserAgent: userAgent, - Body: deleteBody, - Timeout: d.Timeout(schema.TimeoutDelete), - Headers: deleteHeaders, -}) -if err != nil { - return transport_tpg.HandleNotFoundError(err, d, "EndpointWithModelGardenDeployment") -} - -err = VertexAIOperationWaitTime( - config, deleteRes, project, "Deleting EndpointWithModelGardenDeployment", userAgent, - d.Timeout(schema.TimeoutDelete)) - -if err != nil { - return err -} - -log.Printf("[DEBUG] Finished deleting EndpointWithModelGardenDeployment %q: %#v", d.Id(), deleteRes) -return nil \ No newline at end of file diff --git a/mmv1/templates/terraform/custom_delete/vertex_ai_rag_engine_config.go.tmpl b/mmv1/templates/terraform/custom_delete/vertex_ai_rag_engine_config.go.tmpl deleted file mode 100644 index a26444c3cd78..000000000000 --- a/mmv1/templates/terraform/custom_delete/vertex_ai_rag_engine_config.go.tmpl +++ /dev/null @@ -1,44 +0,0 @@ -log.Printf("[DEBUG] Beginning custom_delete for Vertex AI RagEngineConfig") - -project, err := tpgresource.GetProject(d, config) -if err != nil { - return fmt.Errorf("Error fetching project for RagEngineConfig: %s", err) -} - - -// Update RagEngineConfig tier to Unprovisioned -deleteUrl, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}VertexAIBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/locations/{{"{{"}}region{{"}}"}}/ragEngineConfig") -if err != nil { - return err -} -deleteHeaders := make(http.Header) -deleteBody := map[string]interface{}{ - "ragManagedDbConfig": map[string]interface{}{ - "unprovisioned": map[string]interface{}{}, - }, -} -log.Printf("[DEBUG] Updating RagEngineConfig tier to Unprovisioned") -deleteRes, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "PATCH", - Project: project, - RawURL: deleteUrl, - UserAgent: userAgent, - Body: deleteBody, - Timeout: d.Timeout(schema.TimeoutDelete), - Headers: deleteHeaders, -}) -if err != nil { - return transport_tpg.HandleNotFoundError(err, d, "RagEngineConfig") -} - -err = VertexAIOperationWaitTime( - config, deleteRes, project, "Updating RagEngineConfig tier to Unprovisioned", userAgent, - d.Timeout(schema.TimeoutDelete)) - -if err != nil { - return err -} - -log.Printf("[DEBUG] Finished Updating RagEngineConfig tier to Unprovisioned: %#v", deleteRes) -return nil \ No newline at end of file diff --git a/mmv1/templates/terraform/custom_expand/array_resourceref_with_validation.go.tmpl b/mmv1/templates/terraform/custom_expand/array_resourceref_with_validation.go.tmpl index 5d85d48d779d..3493454ce2e5 100644 --- a/mmv1/templates/terraform/custom_expand/array_resourceref_with_validation.go.tmpl +++ b/mmv1/templates/terraform/custom_expand/array_resourceref_with_validation.go.tmpl @@ -24,12 +24,7 @@ func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.T if err != nil { return nil, fmt.Errorf("Invalid value for {{underscore $.Name}}: %s", err) } - {{- if $.ResourceMetadata.IsTgcCompiler }} - url := tgcresource.GetFullUrl(config, f.RelativeLink(), "{{$.ResourceMetadata.CaiProductLegacyBaseUrl}}") - req = append(req, url) - {{- else }} req = append(req, f.RelativeLink()) - {{- end }} } return req, nil } diff --git a/mmv1/templates/terraform/custom_expand/bigquery_row_access_policy_ref.go.tmpl b/mmv1/templates/terraform/custom_expand/bigquery_row_access_policy_ref.go.tmpl deleted file mode 100644 index 56c8bbc3cdf8..000000000000 --- a/mmv1/templates/terraform/custom_expand/bigquery_row_access_policy_ref.go.tmpl +++ /dev/null @@ -1,11 +0,0 @@ -func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - transformed := make(map[string]interface{}) - - project, _ := tpgresource.GetProject(d, config) - transformed["projectId"] = project - transformed["datasetId"] = d.Get("dataset_id") - transformed["tableId"] = d.Get("table_id") - transformed["policyId"] = d.Get("policy_id") - - return transformed, nil -} diff --git a/mmv1/templates/terraform/custom_expand/deprecated_bool_to_object.go.tmpl b/mmv1/templates/terraform/custom_expand/bool_to_object.go.tmpl similarity index 92% rename from mmv1/templates/terraform/custom_expand/deprecated_bool_to_object.go.tmpl rename to mmv1/templates/terraform/custom_expand/bool_to_object.go.tmpl index dc5e6c13022f..ce0c8bfd5895 100644 --- a/mmv1/templates/terraform/custom_expand/deprecated_bool_to_object.go.tmpl +++ b/mmv1/templates/terraform/custom_expand/bool_to_object.go.tmpl @@ -10,9 +10,6 @@ See the License for the specific language governing permissions and limitations under the License. */ -}} - -{{/* THIS TEMPLATE IS DEPRECATED, DO NOT USE FOR NEW FIELDS */}} - func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { if v == nil || !v.(bool) { return nil, nil diff --git a/mmv1/templates/terraform/custom_expand/compute_instance_settings_fingerprint.tmpl b/mmv1/templates/terraform/custom_expand/compute_instance_settings_fingerprint.tmpl new file mode 100644 index 000000000000..f98f54248e63 --- /dev/null +++ b/mmv1/templates/terraform/custom_expand/compute_instance_settings_fingerprint.tmpl @@ -0,0 +1,30 @@ +func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + project, err := tpgresource.GetProject(d, config) + if err != nil { + return nil, err + } + + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return nil, err + } + + url, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}ComputeBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/zones/{{"{{"}}zone{{"}}"}}/instanceSettings/{{"{{"}}name{{"}}"}}") + if err != nil { + return nil, err + } + + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: project, + RawURL: url, + UserAgent: userAgent, + }) + + if err != nil { + return nil, err + } + + return res["fingerprint"], nil +} \ No newline at end of file diff --git a/mmv1/templates/terraform/custom_expand/privateca_certificate_template_509_config.go.tmpl b/mmv1/templates/terraform/custom_expand/privateca_certificate_template_509_config.go.tmpl deleted file mode 100644 index 5cd89e071a77..000000000000 --- a/mmv1/templates/terraform/custom_expand/privateca_certificate_template_509_config.go.tmpl +++ /dev/null @@ -1,53 +0,0 @@ -{{/* See mmv1/third_party/terraform/utils/privateca_utils.go for the sub-expanders and explanation */}} -func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - if v == nil { - return v, nil - } - l := v.([]interface{}) - if len(l) == 0 || l[0] == nil { - return nil, nil - } - raw := l[0] - original := raw.(map[string]interface{}) - if len(original) == 0 { - return nil, nil - } - transformed := make(map[string]interface{}) - - caOptions, err := expandPrivatecaCertificateTemplateConfigX509ConfigCaOptions(original["ca_options"], d, config) - if err != nil { - return nil, err - } - transformed["caOptions"] = caOptions - - keyUsage, err := expandPrivatecaCertificateConfigX509ConfigKeyUsage(original["key_usage"], d, config) - if err != nil { - return nil, err - } - transformed["keyUsage"] = keyUsage - - policyIds, err := expandPrivatecaCertificateConfigX509ConfigPolicyIds(original["policy_ids"], d, config) - if err != nil { - return nil, err - } - transformed["policyIds"] = policyIds - - aiaOcspServers, err := expandPrivatecaCertificateConfigX509ConfigAiaOcspServers(original["aia_ocsp_servers"], d, config) - if err != nil { - return nil, err - } - transformed["aiaOcspServers"] = aiaOcspServers - - addExts, err := expandPrivatecaCertificateConfigX509ConfigAdditionalExtensions(original["additional_extensions"], d, config) - if err != nil { - return nil, err - } - transformed["additionalExtensions"] = addExts - - nameConstraints, err := expandPrivatecaCertificateConfigX509ConfigNameConstraints(original["name_constraints"], d, config) - if err != nil { - return nil, err - } - transformed["nameConstraints"] = nameConstraints - return transformed, nil -} diff --git a/mmv1/templates/terraform/custom_expand/regional_secret_version_enable.go.tmpl b/mmv1/templates/terraform/custom_expand/regional_secret_version_enable.go.tmpl index 8c4712454ff5..a3a2c9effc47 100644 --- a/mmv1/templates/terraform/custom_expand/regional_secret_version_enable.go.tmpl +++ b/mmv1/templates/terraform/custom_expand/regional_secret_version_enable.go.tmpl @@ -10,6 +10,41 @@ See the License for the specific language governing permissions and limitations under the License. */ -}} -func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(_ interface{}, _ tpgresource.TerraformResourceData, _ *transport_tpg.Config) (interface{}, error) { +func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + name := d.Get("name").(string) + if name == "" { + return "", nil + } + + url, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}SecretManagerRegionalBasePath{{"}}"}}{{"{{"}}name{{"}}"}}") + if err != nil { + return nil, err + } + + if v == true { + url = fmt.Sprintf("%s:enable", url) + } else { + url = fmt.Sprintf("%s:disable", url) + } + + parts := strings.Split(name, "/") + project := parts[1] + + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return nil, err + } + + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "POST", + Project: project, + RawURL: url, + UserAgent: userAgent, + }) + if err != nil { + return nil, err + } + return nil, nil } diff --git a/mmv1/templates/terraform/custom_expand/resourceref_with_validation.go.tmpl b/mmv1/templates/terraform/custom_expand/resourceref_with_validation.go.tmpl index 718548e9eead..973e737d8467 100644 --- a/mmv1/templates/terraform/custom_expand/resourceref_with_validation.go.tmpl +++ b/mmv1/templates/terraform/custom_expand/resourceref_with_validation.go.tmpl @@ -15,10 +15,5 @@ func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.T if err != nil { return nil, fmt.Errorf("Invalid value for {{underscore $.Name}}: %s", err) } - {{- if $.ResourceMetadata.IsTgcCompiler }} - url := tgcresource.GetFullUrl(config, f.RelativeLink(), "{{$.ResourceMetadata.CaiProductLegacyBaseUrl}}") - return url, nil - {{- else }} return f.RelativeLink(), nil - {{- end }} } diff --git a/mmv1/templates/terraform/custom_expand/secret_version_enable.go.tmpl b/mmv1/templates/terraform/custom_expand/secret_version_enable.go.tmpl index 8c4712454ff5..b8d494a88f89 100644 --- a/mmv1/templates/terraform/custom_expand/secret_version_enable.go.tmpl +++ b/mmv1/templates/terraform/custom_expand/secret_version_enable.go.tmpl @@ -10,6 +10,41 @@ See the License for the specific language governing permissions and limitations under the License. */ -}} -func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(_ interface{}, _ tpgresource.TerraformResourceData, _ *transport_tpg.Config) (interface{}, error) { +func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + name := d.Get("name").(string) + if name == "" { + return "", nil + } + + url, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}SecretManagerBasePath{{"}}"}}{{"{{"}}name{{"}}"}}") + if err != nil { + return nil, err + } + + if v == true { + url = fmt.Sprintf("%s:enable", url) + } else { + url = fmt.Sprintf("%s:disable", url) + } + + parts := strings.Split(name, "/") + project := parts[1] + + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return nil, err + } + + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "POST", + Project: project, + RawURL: url, + UserAgent: userAgent, + }) + if err != nil { + return nil, err + } + return nil, nil -} +} \ No newline at end of file diff --git a/mmv1/templates/terraform/custom_expand/service_attachment_target_service.go.tmpl b/mmv1/templates/terraform/custom_expand/service_attachment_target_service.go.tmpl index 5679a4e6e2fb..9d8928ae5e0e 100644 --- a/mmv1/templates/terraform/custom_expand/service_attachment_target_service.go.tmpl +++ b/mmv1/templates/terraform/custom_expand/service_attachment_target_service.go.tmpl @@ -1,8 +1,15 @@ func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { resource := strings.Split(v.(string), "/") - if len(resource) < 4 { + resourceKind := resource[len(resource)-2] + resourceBound := resource[len(resource)-4] + if len(resource) < 4 { return nil, fmt.Errorf("invalid value for target_service") } - return v, nil + _, err := tpgresource.ParseRegionalFieldValue(resourceKind, v.(string), "project", resourceBound, "zone", d, config, true) + if err != nil { + return nil, fmt.Errorf("invalid value for target_service: %w", err) + } + + return v, nil } diff --git a/mmv1/templates/terraform/custom_flatten/compute_router_range.go.tmpl b/mmv1/templates/terraform/custom_flatten/compute_router_range.go.tmpl new file mode 100644 index 000000000000..a4afe3f63d8a --- /dev/null +++ b/mmv1/templates/terraform/custom_flatten/compute_router_range.go.tmpl @@ -0,0 +1,42 @@ +{{/* + The license inside this block applies to this file + Copyright 2024 Google Inc. + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ -}} +func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + apiData := make([]map[string]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + apiData = append(apiData, map[string]interface{}{ + "description": original["description"], + "range": original["range"], + }) + } + configData := []map[string]interface{}{} + if v, ok := d.GetOk("bgp.0.advertised_ip_ranges"); ok { + for _, item := range v.([]interface{}) { + configData = append(configData, item.(map[string]interface{})) + } + } + sorted, err := tpgresource.SortMapsByConfigOrder(configData, apiData, "range") + if err != nil { + log.Printf("[ERROR] Could not support API response for advertisedIpRanges.0.range: %s", err) + return apiData + } + return sorted +} diff --git a/mmv1/templates/terraform/custom_flatten/conversation_profile_context_filter_settings.go.tmpl b/mmv1/templates/terraform/custom_flatten/conversation_profile_context_filter_settings.go.tmpl deleted file mode 100644 index bbe17a79ac1e..000000000000 --- a/mmv1/templates/terraform/custom_flatten/conversation_profile_context_filter_settings.go.tmpl +++ /dev/null @@ -1,26 +0,0 @@ -{{/* - The license inside this block applies to this file - Copyright 2024 Google Inc. - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ -}} -func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - transformed := make(map[string]interface{}) - if v == nil { - transformed["drop_handoff_messages"] = false - transformed["drop_ivr_messages"] = false - transformed["drop_virtual_agent_messages"] = false - return []interface{}{transformed} - } - original := v.(map[string]interface{}) - transformed["drop_handoff_messages"] = original["dropHandoffMessages"] - transformed["drop_ivr_messages"] = original["dropIvrMessages"] - transformed["drop_virtual_agent_messages"] = original["dropVirtualAgentMessages"] - return []interface{}{transformed} -} diff --git a/mmv1/templates/terraform/custom_flatten/conversation_profile_suggestion_trigger_settings.go.tmpl b/mmv1/templates/terraform/custom_flatten/conversation_profile_suggestion_trigger_settings.go.tmpl deleted file mode 100644 index 52fcce31b2b4..000000000000 --- a/mmv1/templates/terraform/custom_flatten/conversation_profile_suggestion_trigger_settings.go.tmpl +++ /dev/null @@ -1,24 +0,0 @@ -{{/* - The license inside this block applies to this file - Copyright 2024 Google Inc. - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ -}} -func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - transformed := make(map[string]interface{}) - if v == nil { - transformed["no_small_talk"] = false - transformed["only_end_user"] = false - return []interface{}{transformed} - } - original := v.(map[string]interface{}) - transformed["no_small_talk"] = original["noSmallTalk"] - transformed["only_end_user"] = original["onlyEndUser"] - return []interface{}{transformed} -} diff --git a/mmv1/templates/terraform/custom_flatten/dataplex_entry_aspects.go.tmpl b/mmv1/templates/terraform/custom_flatten/dataplex_entry_aspects.go.tmpl deleted file mode 100644 index fcd51234760c..000000000000 --- a/mmv1/templates/terraform/custom_flatten/dataplex_entry_aspects.go.tmpl +++ /dev/null @@ -1,44 +0,0 @@ -// This file is a transposition of mmv1/templates/terraform/flatten_property_method.go.tmpl -// Most of the code is copied from there, with the exception of sorting logic. -func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - if v == nil { - return v - } - l := v.([]interface{}) - transformed := make([]map[string]interface{}, 0, len(l)) - for _, raw := range l { - original := raw.(map[string]interface{}) - if len(original) < 1 { - // Do not include empty json objects coming back from the api - continue - } - transformed = append(transformed, map[string]interface{}{ - - {{- range $prop := $.ItemType.UserProperties }} - {{- if not (or $prop.IgnoreRead $prop.WriteOnlyLegacy) }} - "{{ underscore $prop.Name }}": flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}{{$prop.TitlelizeProperty}}(original["{{ $prop.ApiName }}"], d, config), - {{- end }} - {{- end }} - }) - } - - configData := []map[string]interface{}{} - - for _, item := range d.Get("aspects").([]interface{}) { - configData = append(configData, item.(map[string]interface{})) - } - - sorted, err := tpgresource.SortMapsByConfigOrder(configData, transformed, "aspect_key") - if err != nil { - log.Printf("[ERROR] Could not sort API response value: %s", err) - return v - } - - return sorted -} - -{{- if $.NestedProperties }} - {{- range $prop := $.NestedProperties }} - {{ template "flattenPropertyMethod" $prop -}} - {{- end }} -{{- end }} diff --git a/mmv1/templates/terraform/custom_flatten/modelarmor_floorsetting_multilanguage_detection.go.tmpl b/mmv1/templates/terraform/custom_flatten/modelarmor_floorsetting_multilanguage_detection.go.tmpl deleted file mode 100644 index 6ef741cc1780..000000000000 --- a/mmv1/templates/terraform/custom_flatten/modelarmor_floorsetting_multilanguage_detection.go.tmpl +++ /dev/null @@ -1,19 +0,0 @@ -func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - if v == nil { - return []interface{}{map[string]interface{}{"enable_multi_language_detection": false}} - } - original, ok := v.(map[string]interface{}) - if !ok { - return nil // Should not happen if API is consistent - } - // Populating the field even if the returned block is empty. - transformed := make(map[string]interface{}) - - if val, ok := original["enableMultiLanguageDetection"]; ok { - transformed["enable_multi_language_detection"] = val - } else { - // Since the field is REQUIRED in the schema and the block exists, default to false if the key is missing from the API response. - transformed["enable_multi_language_detection"] = false - } - return []interface{}{transformed} -} \ No newline at end of file diff --git a/mmv1/templates/terraform/custom_flatten/modelarmor_template_multilanguage_detection.go.tmpl b/mmv1/templates/terraform/custom_flatten/modelarmor_template_multilanguage_detection.go.tmpl deleted file mode 100644 index 85abb1979e7a..000000000000 --- a/mmv1/templates/terraform/custom_flatten/modelarmor_template_multilanguage_detection.go.tmpl +++ /dev/null @@ -1,19 +0,0 @@ -func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - if v == nil { - return nil // The whole multi_language_detection block is absent - } - original, ok := v.(map[string]interface{}) - if !ok { - return nil // Should not happen if API is consistent - } - // Populating the field even if the returned block is empty. - transformed := make(map[string]interface{}) - - if val, ok := original["enableMultiLanguageDetection"]; ok { - transformed["enable_multi_language_detection"] = val - } else { - // Since the field is REQUIRED in the schema and the block exists, default to false if the key is missing from the API response. - transformed["enable_multi_language_detection"] = false - } - return []interface{}{transformed} -} \ No newline at end of file diff --git a/mmv1/templates/terraform/custom_flatten/name_from_self_link.tmpl b/mmv1/templates/terraform/custom_flatten/name_from_self_link.tmpl index 9093852372a5..24b2086d705e 100644 --- a/mmv1/templates/terraform/custom_flatten/name_from_self_link.tmpl +++ b/mmv1/templates/terraform/custom_flatten/name_from_self_link.tmpl @@ -14,5 +14,5 @@ func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.Reso if v == nil { return v } - return tpgresource.GetResourceNameFromSelfLink(v.(string)) + return tpgresource.NameFromSelfLinkStateFunc(v) } diff --git a/mmv1/templates/terraform/custom_flatten/privateca_certificate_template_509_config.go.tmpl b/mmv1/templates/terraform/custom_flatten/privateca_certificate_template_509_config.go.tmpl deleted file mode 100644 index 97703b6c7264..000000000000 --- a/mmv1/templates/terraform/custom_flatten/privateca_certificate_template_509_config.go.tmpl +++ /dev/null @@ -1,20 +0,0 @@ -{{/* See mmv1/third_party/terraform/utils/privateca_utils.go for the sub-expanders and explanation */}} -func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - if v == nil { - v = make(map[string]interface{}) - } - original := v.(map[string]interface{}) - transformed := make(map[string]interface{}) - transformed["additional_extensions"] = - flattenPrivatecaCertificateConfigX509ConfigAdditionalExtensions(original["additionalExtensions"], d, config) - transformed["policy_ids"] = - flattenPrivatecaCertificateConfigX509ConfigPolicyIds(original["policyIds"], d, config) - transformed["aia_ocsp_servers"] = flattenPrivatecaCertificateConfigX509ConfigAiaOcspServers(original["aiaOcspServers"], d, config) - transformed["ca_options"] = - flattenPrivatecaCertificateTemplateConfigX509ConfigCaOptions(original["caOptions"], d, config) - transformed["key_usage"] = - flattenPrivatecaCertificateConfigX509ConfigKeyUsage(original["keyUsage"], d, config) - transformed["name_constraints"] = - flattenPrivatecaCertificateConfigX509ConfigNameConstraints(original["nameConstraints"], d, config) - return []interface{}{transformed} -} diff --git a/mmv1/templates/terraform/custom_import/apigee_api_product.go.tmpl b/mmv1/templates/terraform/custom_import/apigee_api_product.go.tmpl deleted file mode 100644 index ff9c4d39157e..000000000000 --- a/mmv1/templates/terraform/custom_import/apigee_api_product.go.tmpl +++ /dev/null @@ -1,42 +0,0 @@ -config := meta.(*transport_tpg.Config) - -// current import_formats cannot import fields with forward slashes in their value -if err := tpgresource.ParseImportId([]string{"(?P.+)"}, d, config); err != nil { - return nil, err -} - -nameParts := strings.Split(d.Get("name").(string), "/") -if len(nameParts) == 4 { - // `organizations/{{"{{"}}org_name{{"}}"}}/apiproducts/{{"{{"}}name{{"}}"}}` - orgId := fmt.Sprintf("organizations/%s", nameParts[1]) - if err := d.Set("org_id", orgId); err != nil { - return nil, fmt.Errorf("Error setting org_id: %s", err) - } - if err := d.Set("name", nameParts[3]); err != nil { - return nil, fmt.Errorf("Error setting name: %s", err) - } -} else if len(nameParts) == 3 { - // `organizations/{{"{{"}}org_name{{"}}"}}/{{"{{"}}name{{"}}"}}` - orgId := fmt.Sprintf("organizations/%s", nameParts[1]) - if err := d.Set("org_id", orgId); err != nil { - return nil, fmt.Errorf("Error setting org_id: %s", err) - } - if err := d.Set("name", nameParts[2]); err != nil { - return nil, fmt.Errorf("Error setting name: %s", err) - } -} else { - return nil, fmt.Errorf( - "Saw %s when the name is expected to have shape %s or %s", - d.Get("name"), - "organizations/{{"{{"}}org_name{{"}}"}}/apiproducts/{{"{{"}}name{{"}}"}}", - "organizations/{{"{{"}}org_name{{"}}"}}/{{"{{"}}name{{"}}"}}") -} - -// Replace import id for the resource id -id, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}org_id{{"}}"}}/apiproducts/{{"{{"}}name{{"}}"}}") -if err != nil { - return nil, fmt.Errorf("Error constructing id: %s", err) -} -d.SetId(id) - -return []*schema.ResourceData{d}, nil diff --git a/mmv1/templates/terraform/custom_import/apigee_security_monitoring_condition.go.tmpl b/mmv1/templates/terraform/custom_import/apigee_security_monitoring_condition.go.tmpl deleted file mode 100644 index 2e9861432527..000000000000 --- a/mmv1/templates/terraform/custom_import/apigee_security_monitoring_condition.go.tmpl +++ /dev/null @@ -1,32 +0,0 @@ -config := meta.(*transport_tpg.Config) - -// current import_formats cannot import fields with forward slashes in their value -if err := tpgresource.ParseImportId([]string{"(?P.+)"}, d, config); err != nil { - return nil, err -} - -nameParts := strings.Split(d.Get("name").(string), "/") -if len(nameParts) == 4 { - // `organizations/{{"{{"}}org_name{{"}}"}}/securityMonitoringConditions/{{"{{"}}condition_id{{"}}"}}` - orgId := fmt.Sprintf("organizations/%s", nameParts[1]) - if err := d.Set("org_id", orgId); err != nil { - return nil, fmt.Errorf("Error setting org_id: %s", err) - } - if err := d.Set("condition_id", nameParts[3]); err != nil { - return nil, fmt.Errorf("Error setting condition_id: %s", err) - } -} else { - return nil, fmt.Errorf( - "Saw %s when the name is expected to have shape %s", - d.Get("name"), - "organizations/{{"{{"}}org_name{{"}}"}}/securityMonitoringConditions/{{"{{"}}name{{"}}"}}") -} - -// Replace import id for the resource id -id, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}org_id{{"}}"}}/securityMonitoringConditions/{{"{{"}}condition_id{{"}}"}}") -if err != nil { - return nil, fmt.Errorf("Error constructing id: %s", err) -} -d.SetId(id) - -return []*schema.ResourceData{d}, nil diff --git a/mmv1/templates/terraform/custom_import/dataplex_entry.go.tmpl b/mmv1/templates/terraform/custom_import/dataplex_entry.go.tmpl deleted file mode 100644 index 0b54654ae66a..000000000000 --- a/mmv1/templates/terraform/custom_import/dataplex_entry.go.tmpl +++ /dev/null @@ -1,20 +0,0 @@ - config := meta.(*transport_tpg.Config) - if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+)/locations/(?P[^/]+)/entryGroups/(?P[^/]+)/entries/(?P.+)$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P.+)$", - "^(?P[^/]+)/(?P[^/]+)/(?P.+)$", - }, d, config); err != nil { - return nil, err - } - - // Double curly braces are the templating language's special marker. - // We need them literally here, so apply a simple trick to force it. - template := "projects/{{"{{"}}project{{"}}"}}/locations/{{"{{"}}location{{"}}"}}/entryGroups/{{"{{"}}entry_group_id{{"}}"}}/entries/{{"{{"}}entry_id{{"}}"}}" - - // Replace import id for the resource id - id, err := tpgresource.ReplaceVars(d, config, template) - if err != nil { - return nil, fmt.Errorf("Error constructing id: %s", err) - } - d.SetId(id) - return []*schema.ResourceData{d}, nil diff --git a/mmv1/templates/terraform/custom_import/dialogflowcx_generative_settings.go.tmpl b/mmv1/templates/terraform/custom_import/dialogflowcx_generative_settings.go.tmpl deleted file mode 100644 index ffc597df1387..000000000000 --- a/mmv1/templates/terraform/custom_import/dialogflowcx_generative_settings.go.tmpl +++ /dev/null @@ -1,18 +0,0 @@ -config := meta.(*transport_tpg.Config) - -// current import_formats can't import fields with forward slashes in their value and parent contains slashes -if err := tpgresource.ParseImportId([]string{ - "(?P.+)/generativeSettings\\?languageCode=(?P[a-zA-Z-]+)", - "(?P.+)/generativeSettings", -}, d, config); err != nil { - return nil, err -} - -// Replace import id for the resource id -id, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}parent{{"}}"}}/generativeSettings?languageCode={{"{{"}}language_code{{"}}"}}") -if err != nil { - return nil, fmt.Errorf("Error constructing id: %s", err) -} -d.SetId(id) - -return []*schema.ResourceData{d}, nil \ No newline at end of file diff --git a/mmv1/templates/terraform/custom_import/dialogflowcx_generator.go.tmpl b/mmv1/templates/terraform/custom_import/dialogflowcx_generator.go.tmpl deleted file mode 100644 index bcdaa7503d69..000000000000 --- a/mmv1/templates/terraform/custom_import/dialogflowcx_generator.go.tmpl +++ /dev/null @@ -1,18 +0,0 @@ -config := meta.(*transport_tpg.Config) - -// current import_formats can't import fields with forward slashes in their value and parent contains slashes -if err := tpgresource.ParseImportId([]string{ - "(?P.+)/generators/(?P[^/]+)", - "(?P.+)/(?P[^/]+)", -}, d, config); err != nil { - return nil, err -} - -// Replace import id for the resource id -id, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}parent{{"}}"}}/generators/{{"{{"}}name{{"}}"}}") -if err != nil { - return nil, fmt.Errorf("Error constructing id: %s", err) -} -d.SetId(id) - -return []*schema.ResourceData{d}, nil \ No newline at end of file diff --git a/mmv1/templates/terraform/custom_import/dialogflowcx_playbook.go.tmpl b/mmv1/templates/terraform/custom_import/dialogflowcx_playbook.go.tmpl deleted file mode 100644 index 522f50f48ebf..000000000000 --- a/mmv1/templates/terraform/custom_import/dialogflowcx_playbook.go.tmpl +++ /dev/null @@ -1,18 +0,0 @@ -config := meta.(*transport_tpg.Config) - -// current import_formats can't import fields with forward slashes in their value and parent contains slashes -if err := tpgresource.ParseImportId([]string{ - "(?P.+)/playbooks/(?P[^/]+)", - "(?P.+)/(?P[^/]+)", -}, d, config); err != nil { - return nil, err -} - -// Replace import id for the resource id -id, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}parent{{"}}"}}/playbooks/{{"{{"}}name{{"}}"}}") -if err != nil { - return nil, fmt.Errorf("Error constructing id: %s", err) -} -d.SetId(id) - -return []*schema.ResourceData{d}, nil diff --git a/mmv1/templates/terraform/custom_import/dialogflowcx_tool.go.tmpl b/mmv1/templates/terraform/custom_import/dialogflowcx_tool.go.tmpl deleted file mode 100644 index ce7d263d3815..000000000000 --- a/mmv1/templates/terraform/custom_import/dialogflowcx_tool.go.tmpl +++ /dev/null @@ -1,18 +0,0 @@ -config := meta.(*transport_tpg.Config) - -// current import_formats can't import fields with forward slashes in their value and parent contains slashes -if err := tpgresource.ParseImportId([]string{ - "(?P.+)/tools/(?P[^/]+)", - "(?P.+)/(?P[^/]+)", -}, d, config); err != nil { - return nil, err -} - -// Replace import id for the resource id -id, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}parent{{"}}"}}/tools/{{"{{"}}name{{"}}"}}") -if err != nil { - return nil, fmt.Errorf("Error constructing id: %s", err) -} -d.SetId(id) - -return []*schema.ResourceData{d}, nil \ No newline at end of file diff --git a/mmv1/templates/terraform/custom_import/extract_taxonomy.go.tmpl b/mmv1/templates/terraform/custom_import/extract_taxonomy.go.tmpl index 17d7a76b1bfb..c99cf546f939 100644 --- a/mmv1/templates/terraform/custom_import/extract_taxonomy.go.tmpl +++ b/mmv1/templates/terraform/custom_import/extract_taxonomy.go.tmpl @@ -1,7 +1,7 @@ config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^(?Pprojects/[^/]+/locations/[^/]+/taxonomies/[^/]+)/policyTags/(?P.+)$"}, d, config); err != nil { + "(?Pprojects/[^/]+/locations/[^/]+/taxonomies/[^/]+)/policyTags/(?P.+)"}, d, config); err != nil { return nil, err } diff --git a/mmv1/templates/terraform/custom_import/kms_crypto_key.go.tmpl b/mmv1/templates/terraform/custom_import/kms_crypto_key.go.tmpl index 3776775b66d7..db6e302250d3 100644 --- a/mmv1/templates/terraform/custom_import/kms_crypto_key.go.tmpl +++ b/mmv1/templates/terraform/custom_import/kms_crypto_key.go.tmpl @@ -13,6 +13,10 @@ return nil, fmt.Errorf("Error setting name: %s", err) } + if err := d.Set("skip_initial_version_creation", false); err != nil { + return nil, fmt.Errorf("Error setting skip_initial_version_creation: %s", err) + } + id, err := tpgresource.ReplaceVars(d, config, "{{$.GetIdFormat}}") if err != nil { return nil, fmt.Errorf("Error constructing id: %s", err) diff --git a/mmv1/templates/terraform/custom_import/modelarmorglobal_floorsetting.go.tmpl b/mmv1/templates/terraform/custom_import/modelarmorglobal_floorsetting.go.tmpl deleted file mode 100644 index c3990c07374c..000000000000 --- a/mmv1/templates/terraform/custom_import/modelarmorglobal_floorsetting.go.tmpl +++ /dev/null @@ -1,19 +0,0 @@ -config := meta.(*transport_tpg.Config) - -if err := tpgresource.ParseImportId([]string{ - "^(?P.+)/locations/(?P[^/]+)/floorSetting$", - "^(?P.+)/(?P[^/]+)$", -}, d, config); err != nil { - return nil, err -} - -// Replace import id for the resource id -id, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}parent{{"}}"}}/locations/{{"{{"}}location{{"}}"}}/floorSetting") - -if err != nil { - return nil, fmt.Errorf("Error constructing id: %s", err) -} - -d.SetId(id) - -return []*schema.ResourceData{d}, nil \ No newline at end of file diff --git a/mmv1/templates/terraform/custom_import/resource_manager_capability.go.tmpl b/mmv1/templates/terraform/custom_import/resource_manager_capability.go.tmpl deleted file mode 100644 index de9cd27e8566..000000000000 --- a/mmv1/templates/terraform/custom_import/resource_manager_capability.go.tmpl +++ /dev/null @@ -1,16 +0,0 @@ - config := meta.(*transport_tpg.Config) - - // current import_formats can't import fields with forward slashes in their value - if err := tpgresource.ParseImportId([]string{ - "(?P.+)/capabilities/(?P.+)", - }, d, config); err != nil { - return nil, err - } - - id, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}parent{{"}}"}}/capabilities/{{"{{"}}capability_name{{"}}"}}") - if err != nil { - return nil, fmt.Errorf("Error constructing id: %s", err) - } - d.SetId(id) - - return []*schema.ResourceData{d}, nil diff --git a/mmv1/templates/terraform/custom_import/vertex_ai_tensorboard_import.go.tmpl b/mmv1/templates/terraform/custom_import/vertex_ai_tensorboard_import.go.tmpl index 7d834ceeba51..be6cd588a588 100644 --- a/mmv1/templates/terraform/custom_import/vertex_ai_tensorboard_import.go.tmpl +++ b/mmv1/templates/terraform/custom_import/vertex_ai_tensorboard_import.go.tmpl @@ -1,9 +1,9 @@ config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+)/locations/(?P[^/]+)/tensorboards/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)$", + "projects/(?P[^/]+)/locations/(?P[^/]+)/tensorboards/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)", }, d, config); err != nil { return nil, err } diff --git a/mmv1/templates/terraform/custom_update/bigqueryanalyticshub_data_exchange_subscription.go.tmpl b/mmv1/templates/terraform/custom_update/bigqueryanalyticshub_data_exchange_subscription.go.tmpl deleted file mode 100644 index 3faa515f48a0..000000000000 --- a/mmv1/templates/terraform/custom_update/bigqueryanalyticshub_data_exchange_subscription.go.tmpl +++ /dev/null @@ -1,19 +0,0 @@ -//If a mutable field is added later in the subscription resource, an update API endpoint will be created -//and this custom_update will have to be changed and will call a Update API as well as done by mutable resources. -// all other fields are immutable for now, don't do anything else - -_ = config - -// We can get here if 'refresh_policy' was updated in the HCL config. -// Since 'refresh_policy' has a default, d.Get("refresh_policy") will always return a string. -// We check if its value has actually changed from the prior state. -if d.HasChange("refresh_policy") { - // If 'refresh_policy' was changed by the user, ensure its new value is set in the state. - // For an Optional+Computed-false field like this, Terraform usually handles this, - // but this explicit Set operation mirrors the previous boolean field handling. - if err := d.Set("refresh_policy", d.Get("refresh_policy")); err != nil { - return fmt.Errorf("Error updating refresh_policy: %s", err) - } -} - -return nil \ No newline at end of file diff --git a/mmv1/templates/terraform/custom_update/regional_secret_version.go.tmpl b/mmv1/templates/terraform/custom_update/regional_secret_version.go.tmpl index c26d065d4b11..7cb6e95cdcff 100644 --- a/mmv1/templates/terraform/custom_update/regional_secret_version.go.tmpl +++ b/mmv1/templates/terraform/custom_update/regional_secret_version.go.tmpl @@ -10,7 +10,7 @@ See the License for the specific language governing permissions and limitations under the License. */ -}} -err := setEnabled(d.Get("enabled"), d, config) +_, err := expandSecretManagerRegionalRegionalSecretVersionEnabled(d.Get("enabled"), d, config) if err != nil { return err } diff --git a/mmv1/templates/terraform/custom_update/secret_version.go.tmpl b/mmv1/templates/terraform/custom_update/secret_version.go.tmpl index 0f0a05981061..4f5dce983088 100644 --- a/mmv1/templates/terraform/custom_update/secret_version.go.tmpl +++ b/mmv1/templates/terraform/custom_update/secret_version.go.tmpl @@ -10,9 +10,9 @@ See the License for the specific language governing permissions and limitations under the License. */ -}} -err := setEnabled(d.Get("enabled"), d, config) +_, err := expandSecretManagerSecretVersionEnabled(d.Get("enabled"), d, config) if err != nil { return err } -return resourceSecretManagerSecretVersionRead(d, meta) +return resourceSecretManagerSecretVersionRead(d, meta) \ No newline at end of file diff --git a/mmv1/templates/terraform/datasource.go.tmpl b/mmv1/templates/terraform/datasource.go.tmpl deleted file mode 100644 index edd29d3c1dd5..000000000000 --- a/mmv1/templates/terraform/datasource.go.tmpl +++ /dev/null @@ -1,107 +0,0 @@ -{{/* The license inside this block applies to this file - Copyright 2024 Google LLC. All Rights Reserved. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. */ -}} -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -{{$.CodeHeader TemplatePath}} -package {{ lower $.ProductMetadata.Name }} - -import ( - - "fmt" - "log" - "net/http" - "reflect" -{{- if $.SupportsIndirectUserProjectOverride }} - "regexp" -{{- end }} -{{- if or (and (not $.Immutable) ($.UpdateMask)) $.LegacyLongFormProject }} - "strings" -{{- end }} - "time" - -{{/* # We list all the v2 imports here, because we run 'goimports' to guess the correct */}} -{{/* # set of imports, which will never guess the major version correctly. */}} - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/structure" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" - "github.com/hashicorp/terraform-plugin-sdk/v2/diag" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/logging" - "github.com/hashicorp/go-cty/cty" - - "{{ $.ImportPath }}/tpgresource" - transport_tpg "{{ $.ImportPath }}/transport" - "{{ $.ImportPath }}/verify" - -{{ if $.FlattenedProperties }} - "google.golang.org/api/googleapi" -{{- end}} -) - -func DataSource{{ .ResourceName -}}() *schema.Resource { - rs := Resource{{ .ResourceName -}}().Schema - - dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(rs) - - {{if .DatasourceRequiredFields}} - tpgresource.AddRequiredFieldsToSchema(dsSchema, {{range $index, $field := .DatasourceRequiredFields}}{{if gt $index 0}}, {{end}}{{printf "%q" $field}}{{end}}) - {{end}} - - {{if .DatasourceOptionalFields}} - tpgresource.AddOptionalFieldsToSchema(dsSchema, {{range $index, $field := .DatasourceOptionalFields}}{{if gt $index 0}}, {{end}}{{printf "%q" $field}}{{end}}) - {{end}} - - return &schema.Resource{ - Read: dataSource{{ $.ResourceName -}}Read, - Schema: dsSchema, - } -} - -func dataSource{{ $.ResourceName -}}Read(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - - id, err := tpgresource.ReplaceVars{{if $.LegacyLongFormProject -}}ForId{{ end -}}(d, config, "{{ $.IdFormat -}}") - if err != nil { - return err - } - - d.SetId(id) - - err = resource{{ $.ResourceName -}}Read(d, meta) - if err != nil { - return err - } - - {{if $.ShouldDatasourceSetLabels}} - if err := tpgresource.SetDataSourceLabels(d); err != nil { - return err - } - {{end}} - - {{if $.ShouldDatasourceSetAnnotations}} - if err := tpgresource.SetDataSourceAnnotations(d); err != nil { - return err - } - {{end}} - - if d.Id() == "" { - return fmt.Errorf("%s not found", id) - } - - return nil -} \ No newline at end of file diff --git a/mmv1/templates/terraform/datasource_iam.html.markdown.tmpl b/mmv1/templates/terraform/datasource_iam.html.markdown.tmpl index 0e39d4cc3135..88296a56262d 100644 --- a/mmv1/templates/terraform/datasource_iam.html.markdown.tmpl +++ b/mmv1/templates/terraform/datasource_iam.html.markdown.tmpl @@ -43,9 +43,6 @@ description: |- # {{ $.IamTerraformName }}_policy -{{- if $.IamPolicy.DeprecationMessage }} -~> **Warning:** {{$.IamPolicy.DeprecationMessage}} -{{- end }} Retrieves the current IAM policy data for {{ lower $.Name }} {{- if or (eq $.MinVersionObj.Name "beta") (eq $.IamPolicy.MinVersion "beta") }} @@ -69,16 +66,15 @@ data "{{ $.IamTerraformName }}_policy" "policy" { ## Argument Reference The following arguments are supported: -{{ range $param := $.IamResourceProperties }} - {{- $n := underscore $param.Name }} -{{- if eq $n $.IamParentResourceName }} -* `{{ $n }}` - (Required) Used to find the parent resource to bind the IAM policy to -{{- else if or (or (eq $n "region") (eq $n "zone")) (eq $n "location") }} -* `{{ $n }}` - (Optional) {{ $param.Description }} Used to find the parent resource to bind the IAM policy to. If not specified, - the value will be parsed from the identifier of the parent resource. If no {{ $n }} is provided in the parent identifier and no - {{ $n }} is specified, it is taken from the provider configuration. +{{ range $param := $.IamSelfLinkProperties }} +{{- if eq $param.Name "name" }} +* `{{if $.IamPolicy.ParentResourceAttribute}}{{$.IamPolicy.ParentResourceAttribute}}{{else}}{{underscore $.Name}}{{end}}` - (Required) Used to find the parent resource to bind the IAM policy to +{{- else if or (or (eq (underscore $param.Name) "region") (eq (underscore $param.Name) "zone")) (eq (underscore $param.Name) "location") }} +* `{{ underscore $param.Name }}` - (Optional) {{ $param.Description }} Used to find the parent resource to bind the IAM policy to. If not specified, + the value will be parsed from the identifier of the parent resource. If no {{ underscore $param.Name }} is provided in the parent identifier and no + {{ underscore $param.Name }} is specified, it is taken from the provider configuration. {{- else }} -* `{{ $n }}` - (Required) {{ $param.Description }} Used to find the parent resource to bind the IAM policy to +* `{{ underscore $param.Name }}` - (Required) {{ $param.Description }} Used to find the parent resource to bind the IAM policy to {{- end }} {{- end }} {{- if $.IamPolicy.BaseUrl }} diff --git a/mmv1/templates/terraform/decoders/bigqueryanalyticshub_data_exchange_subscription.go.tmpl b/mmv1/templates/terraform/decoders/bigqueryanalyticshub_data_exchange_subscription.go.tmpl deleted file mode 100644 index 2848a08eef05..000000000000 --- a/mmv1/templates/terraform/decoders/bigqueryanalyticshub_data_exchange_subscription.go.tmpl +++ /dev/null @@ -1,14 +0,0 @@ -if v, ok := res["name"]; ok && v != nil { - name := v.(string) - parts := strings.Split(name, "/") - if len(parts) > 0 { - // The last part of the resource name is the subscription ID. - d.Set("subscription_id", parts[len(parts)-1]) - } -} - -if v, ok := res["subscriberContact"]; ok && v != nil { - d.Set("subscriber_contact", v.(string)) -} - -return res, nil \ No newline at end of file diff --git a/mmv1/templates/terraform/decoders/dataplex_entry.go.tmpl b/mmv1/templates/terraform/decoders/dataplex_entry.go.tmpl deleted file mode 100644 index ecaec6094965..000000000000 --- a/mmv1/templates/terraform/decoders/dataplex_entry.go.tmpl +++ /dev/null @@ -1,41 +0,0 @@ -aspects := res["aspects"] -if aspects != nil { - _, errors := NumberOfAspectsValidation(aspects, "aspects") - if len(errors) > 0 { - return nil, errors[0] - } -} - -aspectKeysOfInterest := make(map[string]struct{}) -var err error - -if d.HasChange("aspects") { - currentAspects, futureAspects := d.GetChange("aspects") - err = AddAspectsToSet(aspectKeysOfInterest, currentAspects) - if err != nil { - return nil, err - } - err = AddAspectsToSet(aspectKeysOfInterest, futureAspects) - if err != nil { - return nil, err - } -} else { - err = AddAspectsToSet(aspectKeysOfInterest, d.Get("aspects")) - if err != nil { - return nil, err - } -} - -err = FilterAspects(aspectKeysOfInterest, res) - -if err != nil { - return nil, err -} - -err = InverseTransformAspects(res) - -if err != nil { - return nil, err -} - -return res, nil diff --git a/mmv1/templates/terraform/decoders/memorystore_instance.go.tmpl b/mmv1/templates/terraform/decoders/memorystore_instance.go.tmpl index 3918e6238c31..f82edc817dc7 100644 --- a/mmv1/templates/terraform/decoders/memorystore_instance.go.tmpl +++ b/mmv1/templates/terraform/decoders/memorystore_instance.go.tmpl @@ -1,159 +1,45 @@ - // Retrieve endpoints.connections.pscAutoConnection from API response - v, ok := res["pscAutoConnections"] - if ok { - - connections, ok := v.([]interface{}) - if !ok { - return nil, fmt.Errorf("pscAutoConnections is not an array") - } - - transformed := make([]interface{}, 0, len(connections)) - uniqueConnections := make(map[string]bool) // Track unique project+network combos - - for _, raw := range connections { - connectionData, ok := raw.(map[string]interface{}) - if !ok || len(connectionData) < 1 { - return nil, fmt.Errorf("Invalid or empty psc connection data: %v", raw) - } - - projectID, ok := connectionData["projectId"].(string) - if !ok { - return nil, fmt.Errorf("invalid project ID in psc connection: %v", connectionData) - } - - networkID, ok := connectionData["network"].(string) - if !ok { - return nil, fmt.Errorf("invalid network ID in psc connection: %v", connectionData) - } - - uniqueKey := projectID + networkID - if !uniqueConnections[uniqueKey] { // Check for uniqueness - uniqueConnections[uniqueKey] = true - transformed = append(transformed, map[string]interface{}{ - "project_id": projectID, - "network": networkID, - }) - } - } - d.Set("desired_psc_auto_connections", transformed) - log.Printf("[DEBUG] You are setting desired_psc_auto_connections in decoder %#v", transformed) - - // Retrieve pscAutoConnections from API response - } else if v, ok := res["endpoints"]; ok { - - endpointsArray, ok := v.([]interface{}) - if !ok || len(endpointsArray) == 0 { - // No endpoints or empty array, nothing to process - } else { - transformed := make([]interface{}, 0) - uniqueEndpoints := make(map[string]bool) // Track unique project+network combos - - for _, endpoint := range endpointsArray { - endpointData, ok := endpoint.(map[string]interface{}) - if !ok { - continue - } - - connections, ok := endpointData["connections"].([]interface{}) - if !ok { - continue - } - - for _, connection := range connections { - connectionData, ok := connection.(map[string]interface{}) - if !ok { - continue - } - - pscAutoConnection, ok := connectionData["pscAutoConnection"].(map[string]interface{}) - if !ok { - continue - } +// Retrieve pscAutoConnections from API response +v, ok := res["pscAutoConnections"] +if !ok { + if _, endpointsFound := res["endpoints"]; endpointsFound { + return res, nil // For Cluster Disabled instances, we would have 'endpoints' instead of 'pscAutoConnections' + } + return res, nil +} - projectID, projectOk := pscAutoConnection["projectId"].(string) - networkID, networkOk := pscAutoConnection["network"].(string) +connections, ok := v.([]interface{}) +if !ok { + return nil, fmt.Errorf("pscAutoConnections is not an array") +} - if projectOk && networkOk { - uniqueKey := projectID + networkID - if !uniqueEndpoints[uniqueKey] { // Check for uniqueness - uniqueEndpoints[uniqueKey] = true - transformed = append(transformed, map[string]interface{}{ - "project_id": projectID, - "network": networkID, - }) - } - } - } - } - // We want to make these fields detect API-side drift, so if the API returns a value for them and they're set in config, we set them in state. - // On import, we only set `desired_auto_created_endpoints` because that's the non-deprecated field. - if len(transformed) > 0 { - _, okEndpoint := d.GetOk("desired_auto_created_endpoints") - _, okPsc := d.GetOk("desired_psc_auto_connections") - if okEndpoint { - d.Set("desired_auto_created_endpoints", transformed) - log.Printf("[DEBUG] Setting desired_auto_created_endpoints in decoder within endpoints for %#v", transformed) - } else if okPsc { - d.Set("desired_auto_created_endpoints", []interface{}{}) - } - if okPsc { - d.Set("desired_psc_auto_connections", transformed) - log.Printf("[DEBUG] Setting desired_psc_auto_connections in decoder within endpoints for %#v", transformed) - } else if okEndpoint { - d.Set("desired_psc_auto_connections", []interface{}{}) - } - // Set preferred field on import - if !okPsc && !okEndpoint { - d.Set("desired_auto_created_endpoints", transformed) - log.Printf("[DEBUG] Setting desired_auto_created_endpoints in decoder within endpoints for %#v", transformed) - } - } - } +transformed := make([]interface{}, 0, len(connections)) +uniqueConnections := make(map[string]bool) // Track unique project+network combos +for _, raw := range connections { + connectionData, ok := raw.(map[string]interface{}) + if !ok || len(connectionData) < 1 { + return nil, fmt.Errorf("Invalid or empty psc connection data: %v", raw) } - // Such custom code is necessary as the instance's certificate authority has to be retrieved via a dedicated - // getCertificateAuthority API. - // See https://cloud.google.com/memorystore/docs/valkey/reference/rest/v1/projects.locations.instances/getCertificateAuthority - // for details about this API. - config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return nil, err + projectID, ok := connectionData["projectId"].(string) + if !ok { + return nil, fmt.Errorf("invalid project ID in psc connection: %v", connectionData) } - // Only instances with SERVER_AUTHENTICATION mode have certificate authority set - if v, ok := res["transitEncryptionMode"].(string); ok && v=="SERVER_AUTHENTICATION" { - url, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}MemorystoreBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/locations/{{"{{"}}location{{"}}"}}/instances/{{"{{"}}instance_id{{"}}"}}/certificateAuthority") - if err != nil { - return nil, err - } - - billingProject := "" - - project, err := tpgresource.GetProject(d, config) - if err != nil { - return nil, fmt.Errorf("Error fetching project for instance: %s", err) - } - - billingProject = project - - // err == nil indicates that the billing_project value was found - if bp, err := tpgresource.GetBillingProject(d, config); err == nil { - billingProject = bp - } + networkID, ok := connectionData["network"].(string) + if !ok { + return nil, fmt.Errorf("invalid network ID in psc connection: %v", connectionData) + } - certificateAuthority, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - Project: billingProject, - RawURL: url, - UserAgent: userAgent, + uniqueKey := projectID + networkID + if !uniqueConnections[uniqueKey] { // Check for uniqueness + uniqueConnections[uniqueKey] = true + transformed = append(transformed, map[string]interface{}{ + "project_id": projectID, + "network": networkID, }) - if err != nil { - return nil, fmt.Errorf("Error reading certificateAuthority: %s", err) - } - - res["managedServerCa"] = certificateAuthority["managedServerCa"] } - return res, nil \ No newline at end of file +} + +d.Set("desired_psc_auto_connections", transformed) +return res, nil diff --git a/mmv1/templates/terraform/decoders/redis_cluster.go.tmpl b/mmv1/templates/terraform/decoders/redis_cluster.go.tmpl deleted file mode 100644 index 2076004090c6..000000000000 --- a/mmv1/templates/terraform/decoders/redis_cluster.go.tmpl +++ /dev/null @@ -1,48 +0,0 @@ -// Such custom code is necessary as the Cluster's certificate authority has to be retrieved via a dedicated -// getCertificateAuthority API. -// See https://cloud.google.com/memorystore/docs/cluster/reference/rest/v1/projects.locations.clusters/getCertificateAuthority#http-request -// for details about this API. -config := meta.(*transport_tpg.Config) - -userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) -if err != nil { - return nil, err -} - -// Only clusters with TRANSIT_ENCRYPTION_MODE_SERVER_AUTHENTICATION mode have certificate authority set -if v, ok := res["transitEncryptionMode"].(string); !ok || v!="TRANSIT_ENCRYPTION_MODE_SERVER_AUTHENTICATION" { - return res, nil -} - -url, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}RedisBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/locations/{{"{{"}}region{{"}}"}}/clusters/{{"{{"}}name{{"}}"}}/certificateAuthority") -if err != nil { - return nil, err -} - -billingProject := "" - -project, err := tpgresource.GetProject(d, config) -if err != nil { - return nil, fmt.Errorf("Error fetching project for Cluster: %s", err) -} - -billingProject = project - -// err == nil indicates that the billing_project value was found -if bp, err := tpgresource.GetBillingProject(d, config); err == nil { - billingProject = bp -} - -certificateAuthority, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - Project: billingProject, - RawURL: url, - UserAgent: userAgent, -}) -if err != nil { - return nil, fmt.Errorf("Error reading certificateAuthority: %s", err) -} - -res["managedServerCa"] = certificateAuthority["managedServerCa"] -return res, nil \ No newline at end of file diff --git a/mmv1/templates/terraform/encoders/bigqueryanalyticshub_data_exchange_subscription.go.tmpl b/mmv1/templates/terraform/encoders/bigqueryanalyticshub_data_exchange_subscription.go.tmpl deleted file mode 100644 index 106f0bbdb4c6..000000000000 --- a/mmv1/templates/terraform/encoders/bigqueryanalyticshub_data_exchange_subscription.go.tmpl +++ /dev/null @@ -1,36 +0,0 @@ -config := meta.(*transport_tpg.Config) -if v, ok := d.GetOk("subscription_id"); ok { - obj["subscription"] = v.(string) - // Remove the auto-generated "subscriptionId" if it was added by MM, as it conflicts. - delete(obj, "subscriptionId") -} - -// The API expects a 'destination' field in the request body for the subscriber's -// project and location, e.g., "projects/my-project/locations/us-central1". -// This is derived from the 'project' and 'location' fields of the resource. -project := d.Get("project").(string) -location := d.Get("location").(string) -obj["destination"] = fmt.Sprintf("projects/%s/locations/%s", project, location) - -if v, ok := d.GetOk("subscriber_contact"); ok { - obj["subscriberContact"] = v.(string) -} - -if v, ok := d.GetOk("destination_dataset"); ok && v != nil { - expandedDataset, err := expandBigqueryAnalyticsHubDataExchangeSubscriptionDestinationDataset(v, d, config) - if err != nil { - return nil, fmt.Errorf("error expanding destination_dataset: %w", err) - } - // Ensure the expanded dataset is not empty before assigning, to avoid sending empty objects. - if expandedDataset != nil && !tpgresource.IsEmptyValue(reflect.ValueOf(expandedDataset)) { - obj["destinationDataset"] = expandedDataset - } else { - // If the expanded dataset is empty, remove it from the payload to avoid API errors. - delete(obj, "destinationDataset") - } -} else { - // If destination_dataset is not provided by the user, ensure it's not in the payload. - delete(obj, "destinationDataset") -} - -return obj, nil \ No newline at end of file diff --git a/mmv1/templates/terraform/encoders/compute_service_attachment.go.tmpl b/mmv1/templates/terraform/encoders/compute_service_attachment.go.tmpl deleted file mode 100644 index ee3175eb5ede..000000000000 --- a/mmv1/templates/terraform/encoders/compute_service_attachment.go.tmpl +++ /dev/null @@ -1,8 +0,0 @@ -propagatedConnectionLimitProp := d.Get("propagated_connection_limit") -if sv, ok := d.GetOk("send_propagated_connection_limit_if_zero"); ok && sv.(bool) { - if v, ok := d.GetOkExists("propagated_connection_limit"); ok || !reflect.DeepEqual(v, propagatedConnectionLimitProp) { - obj["propagatedConnectionLimit"] = propagatedConnectionLimitProp - } -} - -return obj, nil diff --git a/mmv1/templates/terraform/encoders/compute_snapshot_settings.go.tmpl b/mmv1/templates/terraform/encoders/compute_snapshot_settings.go.tmpl deleted file mode 100644 index 66c120878064..000000000000 --- a/mmv1/templates/terraform/encoders/compute_snapshot_settings.go.tmpl +++ /dev/null @@ -1,7 +0,0 @@ -if storageLocation, ok := obj["storageLocation"].(map[string]interface{}); ok { - if _, ok := storageLocation["locations"]; !ok { - storageLocation["locations"] = nil - } -} - -return obj, nil \ No newline at end of file diff --git a/mmv1/templates/terraform/encoders/dataplex_entry.go.tmpl b/mmv1/templates/terraform/encoders/dataplex_entry.go.tmpl deleted file mode 100644 index 61def5ca7e99..000000000000 --- a/mmv1/templates/terraform/encoders/dataplex_entry.go.tmpl +++ /dev/null @@ -1,17 +0,0 @@ -// The yaml file does not allow validation for Array fields. -// Therefore we add validation as a part of the encoding proecess. -aspects := obj["aspects"] -if aspects != nil { - _, errors := NumberOfAspectsValidation(aspects, "aspects") - if len(errors) > 0 { - return nil, errors[0] - } -} - -err := TransformAspects(obj) - -if err != nil { - return nil, err -} - -return obj, nil diff --git a/mmv1/templates/terraform/encoders/discovery_engine_recommendation_engine_hardcode_solution_type.go.tmpl b/mmv1/templates/terraform/encoders/discovery_engine_recommendation_engine_hardcode_solution_type.go.tmpl deleted file mode 100644 index 84bb8d3e49b6..000000000000 --- a/mmv1/templates/terraform/encoders/discovery_engine_recommendation_engine_hardcode_solution_type.go.tmpl +++ /dev/null @@ -1,3 +0,0 @@ -// hard code solutionType to "SOLUTION_TYPE_RECOMMENDATION" for recommendation engine resource -obj["solutionType"] = "SOLUTION_TYPE_RECOMMENDATION" -return obj, nil \ No newline at end of file diff --git a/mmv1/templates/terraform/encoders/health_check_type.tmpl b/mmv1/templates/terraform/encoders/health_check_type.tmpl index 143364463b9f..e5d7cdae36b6 100644 --- a/mmv1/templates/terraform/encoders/health_check_type.tmpl +++ b/mmv1/templates/terraform/encoders/health_check_type.tmpl @@ -96,20 +96,4 @@ if _, ok := d.GetOk("grpc_health_check"); ok { return obj, nil } -{{ if ne $.TargetVersionName `ga` -}} -if _, ok := d.GetOk("grpc_tls_health_check"); ok { - hc := d.Get("grpc_tls_health_check").([]interface{})[0] - ps := hc.(map[string]interface{})["port_specification"] - - if ps == "USE_FIXED_PORT" || ps == "" { - m := obj["grpcTlsHealthCheck"].(map[string]interface{}) - if m["port"] == nil { - return nil, fmt.Errorf("error in HealthCheck %s: `port` must be set for GRPC with TLS health checks`.", d.Get("name").(string)) - } - } - obj["type"] = "GRPC_WITH_TLS" - return obj, nil -} -{{- end }} - return nil, fmt.Errorf("error in HealthCheck %s: No health check block specified.", d.Get("name").(string)) diff --git a/mmv1/templates/terraform/encoders/memorystore_instance.go.tmpl b/mmv1/templates/terraform/encoders/memorystore_instance.go.tmpl index acefdc55491c..e85836d19da1 100644 --- a/mmv1/templates/terraform/encoders/memorystore_instance.go.tmpl +++ b/mmv1/templates/terraform/encoders/memorystore_instance.go.tmpl @@ -1,73 +1,34 @@ - // Handles desired_auto_created_endpoints virtual field - v, ok := d.GetOk("desired_auto_created_endpoints") - if ok { - l := v.([]interface{}) - if len(l) > 0 { - endpoints := make([]interface{}, 1) - endpointObj := make(map[string]interface{}) - connections := make([]interface{}, 0, len(l)) - - for _, raw := range l { - if raw == nil { - continue - } - desiredEndpoint := raw.(map[string]interface{}) - connectionObj := make(map[string]interface{}) - pscAutoConnection := make(map[string]interface{}) - - projectId := desiredEndpoint["project_id"] - if val := reflect.ValueOf(projectId); val.IsValid() && !tpgresource.IsEmptyValue(val) { - pscAutoConnection["projectId"] = projectId - } - - network := desiredEndpoint["network"] - if val := reflect.ValueOf(network); val.IsValid() && !tpgresource.IsEmptyValue(val) { - pscAutoConnection["network"] = network - } - - connectionObj["pscAutoConnection"] = pscAutoConnection - connections = append(connections, connectionObj) - } - - endpointObj["connections"] = connections - endpoints[0] = endpointObj - obj["endpoints"] = endpoints - log.Printf("[DEBUG] You are setting desired_auto_created_endpoints in encoder %#v", endpoints) - - } - // Handles desired_auto_created_endpoints virtual field - } else if v, ok := d.GetOk("desired_psc_auto_connections"); ok { - l := v.([]interface{}) - req := make([]interface{}, 0, len(l)) - for _, raw := range l { - if raw == nil { - continue - } - desiredConnection := raw.(map[string]interface{}) - connectionReq := make(map[string]interface{}) - - projectId := desiredConnection["project_id"] - if val := reflect.ValueOf(projectId); val.IsValid() && !tpgresource.IsEmptyValue(val) { - connectionReq["projectId"] = projectId - } - - network := desiredConnection["network"] - if val := reflect.ValueOf(network); val.IsValid() && !tpgresource.IsEmptyValue(val) { - connectionReq["network"] = network - } - - req = append(req, connectionReq) - } - - obj["pscAutoConnections"] = req - log.Printf("[DEBUG] You are setting desired_psc_auto_connections in encoder %#v", req) - - } - - // If the automated_backup_config is not defined, automatedBackupMode needs to be passed and set to DISABLED in the expand - if obj["automatedBackupConfig"] == nil { - config := meta.(*transport_tpg.Config) - automatedBackupConfigProp, _ := expandMemorystoreInstanceAutomatedBackupConfig(d.Get("automated_backup_config"), d, config) - obj["automatedBackupConfig"] = automatedBackupConfigProp - } - return obj, nil \ No newline at end of file +v, ok := d.GetOk("desired_psc_auto_connections") +if !ok { + return obj, nil // No desired connections, nothing to update +} +l := v.([]interface{}) +req := make([]interface{}, 0, len(l)) +for _, raw := range l { + if raw == nil { + continue + } + desiredConnection := raw.(map[string]interface{}) + connectionReq := make(map[string]interface{}) + + projectId := desiredConnection["project_id"] + if val := reflect.ValueOf(projectId); val.IsValid() && !tpgresource.IsEmptyValue(val) { + connectionReq["projectId"] = projectId + } + + network := desiredConnection["network"] + if val := reflect.ValueOf(network); val.IsValid() && !tpgresource.IsEmptyValue(val) { + connectionReq["network"] = network + } + + req = append(req, connectionReq) +} + +obj["pscAutoConnections"] = req +// if the automated_backup_config is not defined, automatedBackupMode needs to be passed and set to DISABLED in the expand +if obj["automatedBackupConfig"] == nil { + config := meta.(*transport_tpg.Config) + automatedBackupConfigProp, _ := expandMemorystoreInstanceAutomatedBackupConfig(d.Get("automated_backup_config"), d, config) + obj["automatedBackupConfig"] = automatedBackupConfigProp +} +return obj, nil diff --git a/mmv1/templates/terraform/encoders/network_services_edge_cache_service.go.tmpl b/mmv1/templates/terraform/encoders/network_services_edge_cache_service.go.tmpl deleted file mode 100644 index a668418ea16c..000000000000 --- a/mmv1/templates/terraform/encoders/network_services_edge_cache_service.go.tmpl +++ /dev/null @@ -1,53 +0,0 @@ -// This encoder ensures TTL fields are handled correctly based on cache mode -routing, ok := obj["routing"].(map[string]interface{}) -if !ok { - return obj, nil -} - -pathMatchers, ok := routing["pathMatchers"].([]interface{}) -if !ok || len(pathMatchers) == 0 { - return obj, nil -} - -for _, pm := range pathMatchers { - pathMatcher, ok := pm.(map[string]interface{}) - if !ok { - continue - } - - routeRules, ok := pathMatcher["routeRules"].([]interface{}) - if !ok { - continue - } - - for _, rr := range routeRules { - routeRule, ok := rr.(map[string]interface{}) - if !ok { - continue - } - - routeAction, ok := routeRule["routeAction"].(map[string]interface{}) - if !ok { - continue - } - - cdnPolicy, ok := routeAction["cdnPolicy"].(map[string]interface{}) - if !ok { - continue - } - - // Handle TTL fields based on cache mode - if cacheMode, ok := cdnPolicy["cacheMode"].(string); ok { - switch cacheMode { - case "USE_ORIGIN_HEADERS", "BYPASS_CACHE": - delete(cdnPolicy, "clientTtl") - delete(cdnPolicy, "defaultTtl") - delete(cdnPolicy, "maxTtl") - case "FORCE_CACHE_ALL": - delete(cdnPolicy, "maxTtl") - } - } - } -} - -return obj, nil \ No newline at end of file diff --git a/mmv1/templates/terraform/encoders/spanner_instance.go.tmpl b/mmv1/templates/terraform/encoders/spanner_instance.go.tmpl index 25d1463ee8c9..fdda9c430a47 100644 --- a/mmv1/templates/terraform/encoders/spanner_instance.go.tmpl +++ b/mmv1/templates/terraform/encoders/spanner_instance.go.tmpl @@ -1,15 +1,7 @@ -if obj["instanceType"] == "FREE_INSTANCE" { - // when provisioning a FREE_INSTANCE, the following fields cannot be specified - if obj["nodeCount"] != nil || obj["processingUnits"] != nil || obj["autoscalingConfig"] != nil { - return nil, fmt.Errorf("`num_nodes`, `processing_units`, and `autoscaling_config` cannot be specified when instance_type is FREE_INSTANCE") - } -} else { - // Temp Logic to accommodate autoscaling_config, processing_units and num_nodes - if obj["processingUnits"] == nil && obj["nodeCount"] == nil && obj["autoscalingConfig"] == nil && obj["instanceType"] != "FREE_INSTANCE" { - obj["nodeCount"] = 1 - } +// Temp Logic to accommodate autoscaling_config, processing_units and num_nodes +if obj["processingUnits"] == nil && obj["nodeCount"] == nil && obj["autoscalingConfig"] == nil { + obj["nodeCount"] = 1 } - newObj := make(map[string]interface{}) newObj["instance"] = obj if obj["name"] == nil { diff --git a/mmv1/templates/terraform/examples/Storage_pool_create.tf.tmpl b/mmv1/templates/terraform/examples/Storage_pool_create.tf.tmpl index d358ac895c99..1d219ce86ea6 100644 --- a/mmv1/templates/terraform/examples/Storage_pool_create.tf.tmpl +++ b/mmv1/templates/terraform/examples/Storage_pool_create.tf.tmpl @@ -1,7 +1,36 @@ -data "google_compute_network" "default" { +# Create a network or use datasource to reference existing network +resource "google_compute_network" "peering_network" { name = "{{index $.Vars "network_name"}}" } +# Reserve a CIDR for NetApp Volumes to use +# When using shared-VPCs, this resource needs to be created in host project +resource "google_compute_global_address" "private_ip_alloc" { + name = "{{index $.Vars "global_name"}}" + purpose = "VPC_PEERING" + address_type = "INTERNAL" + prefix_length = 16 + network = google_compute_network.peering_network.id +} + +# Create a Private Service Access connection +# When using shared-VPCs, this resource needs to be created in host project +resource "google_service_networking_connection" "default" { + network = google_compute_network.peering_network.id + service = "netapp.servicenetworking.goog" + reserved_peering_ranges = [google_compute_global_address.private_ip_alloc.name] +} + +# Modify the PSA Connection to allow import/export of custom routes +# When using shared-VPCs, this resource needs to be created in host project +resource "google_compute_network_peering_routes_config" "route_updates" { + peering = google_service_networking_connection.default.peering + network = google_compute_network.peering_network.name + + import_custom_routes = true + export_custom_routes = true +} + # Create a storage pool # Create this resource in the project which is expected to own the volumes resource "google_netapp_storage_pool" "{{$.PrimaryResourceId}}" { @@ -10,5 +39,5 @@ resource "google_netapp_storage_pool" "{{$.PrimaryResourceId}}" { location = "us-central1" service_level = "PREMIUM" capacity_gib = "2048" - network = data.google_compute_network.default.id + network = google_compute_network.peering_network.id } diff --git a/mmv1/templates/terraform/examples/Storage_pool_create_doc.tf.tmpl b/mmv1/templates/terraform/examples/Storage_pool_create_doc.tf.tmpl deleted file mode 100644 index 1d219ce86ea6..000000000000 --- a/mmv1/templates/terraform/examples/Storage_pool_create_doc.tf.tmpl +++ /dev/null @@ -1,43 +0,0 @@ -# Create a network or use datasource to reference existing network -resource "google_compute_network" "peering_network" { - name = "{{index $.Vars "network_name"}}" -} - -# Reserve a CIDR for NetApp Volumes to use -# When using shared-VPCs, this resource needs to be created in host project -resource "google_compute_global_address" "private_ip_alloc" { - name = "{{index $.Vars "global_name"}}" - purpose = "VPC_PEERING" - address_type = "INTERNAL" - prefix_length = 16 - network = google_compute_network.peering_network.id -} - -# Create a Private Service Access connection -# When using shared-VPCs, this resource needs to be created in host project -resource "google_service_networking_connection" "default" { - network = google_compute_network.peering_network.id - service = "netapp.servicenetworking.goog" - reserved_peering_ranges = [google_compute_global_address.private_ip_alloc.name] -} - -# Modify the PSA Connection to allow import/export of custom routes -# When using shared-VPCs, this resource needs to be created in host project -resource "google_compute_network_peering_routes_config" "route_updates" { - peering = google_service_networking_connection.default.peering - network = google_compute_network.peering_network.name - - import_custom_routes = true - export_custom_routes = true -} - -# Create a storage pool -# Create this resource in the project which is expected to own the volumes -resource "google_netapp_storage_pool" "{{$.PrimaryResourceId}}" { - name = "{{index $.Vars "pool_name"}}" - # project = - location = "us-central1" - service_level = "PREMIUM" - capacity_gib = "2048" - network = google_compute_network.peering_network.id -} diff --git a/mmv1/templates/terraform/examples/alloydb_backup_basic.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_backup_basic.tf.tmpl index 933c5ec28e04..a78ce7463f6a 100644 --- a/mmv1/templates/terraform/examples/alloydb_backup_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_backup_basic.tf.tmpl @@ -12,8 +12,6 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { network_config { network = google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "{{$.PrimaryResourceId}}" { diff --git a/mmv1/templates/terraform/examples/alloydb_backup_basic_test.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_backup_basic_test.tf.tmpl index aedf0a53d9ab..b09d5a4be7ef 100644 --- a/mmv1/templates/terraform/examples/alloydb_backup_basic_test.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_backup_basic_test.tf.tmpl @@ -12,8 +12,6 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "{{$.PrimaryResourceId}}" { diff --git a/mmv1/templates/terraform/examples/alloydb_backup_full.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_backup_full.tf.tmpl index 6b4f0385861e..563fdcbac298 100644 --- a/mmv1/templates/terraform/examples/alloydb_backup_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_backup_full.tf.tmpl @@ -17,8 +17,6 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { network_config { network = google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "{{$.PrimaryResourceId}}" { diff --git a/mmv1/templates/terraform/examples/alloydb_backup_full_test.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_backup_full_test.tf.tmpl index abb4e8618b2e..218bc7ee3bed 100644 --- a/mmv1/templates/terraform/examples/alloydb_backup_full_test.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_backup_full_test.tf.tmpl @@ -17,8 +17,6 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "{{$.PrimaryResourceId}}" { diff --git a/mmv1/templates/terraform/examples/alloydb_cluster_after_upgrade.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_cluster_after_upgrade.tf.tmpl index cb829f13414f..941f111db17e 100644 --- a/mmv1/templates/terraform/examples/alloydb_cluster_after_upgrade.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_cluster_after_upgrade.tf.tmpl @@ -7,23 +7,38 @@ resource "google_alloydb_instance" "{{$.PrimaryResourceId}}" { cpu_count = 2 } + depends_on = [google_service_networking_connection.vpc_connection] } resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { cluster_id = "{{index $.Vars "alloydb_cluster_name"}}" location = "us-central1" network_config { - network = data.google_compute_network.default.id + network = google_compute_network.default.id } database_version = "POSTGRES_15" initial_user { password = "{{index $.Vars "alloydb_cluster_name"}}" } - - deletion_protection = false } -data "google_compute_network" "default" { +data "google_project" "project" {} + +resource "google_compute_network" "default" { name = "{{index $.Vars "network_name"}}" -} \ No newline at end of file +} + +resource "google_compute_global_address" "private_ip_alloc" { + name = "{{index $.Vars "alloydb_cluster_name"}}" + address_type = "INTERNAL" + purpose = "VPC_PEERING" + prefix_length = 16 + network = google_compute_network.default.id +} + +resource "google_service_networking_connection" "vpc_connection" { + network = google_compute_network.default.id + service = "servicenetworking.googleapis.com" + reserved_peering_ranges = [google_compute_global_address.private_ip_alloc.name] +} diff --git a/mmv1/templates/terraform/examples/alloydb_cluster_basic.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_cluster_basic.tf.tmpl index 86f8d92fe2f9..c9bab8098b43 100644 --- a/mmv1/templates/terraform/examples/alloydb_cluster_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_cluster_basic.tf.tmpl @@ -4,8 +4,6 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { network_config { network = google_compute_network.default.id } - - deletion_protection = false } data "google_project" "project" {} diff --git a/mmv1/templates/terraform/examples/alloydb_cluster_before_upgrade.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_cluster_before_upgrade.tf.tmpl index 319e346fd914..c9339314e80f 100644 --- a/mmv1/templates/terraform/examples/alloydb_cluster_before_upgrade.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_cluster_before_upgrade.tf.tmpl @@ -7,23 +7,38 @@ resource "google_alloydb_instance" "{{$.PrimaryResourceId}}" { cpu_count = 2 } + depends_on = [google_service_networking_connection.vpc_connection] } resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { cluster_id = "{{index $.Vars "alloydb_cluster_name"}}" location = "us-central1" network_config { - network = data.google_compute_network.default.id + network = google_compute_network.default.id } database_version = "POSTGRES_14" initial_user { password = "{{index $.Vars "alloydb_cluster_name"}}" } - - deletion_protection = false } -data "google_compute_network" "default" { +data "google_project" "project" {} + +resource "google_compute_network" "default" { name = "{{index $.Vars "network_name"}}" -} \ No newline at end of file +} + +resource "google_compute_global_address" "private_ip_alloc" { + name = "{{index $.Vars "alloydb_cluster_name"}}" + address_type = "INTERNAL" + purpose = "VPC_PEERING" + prefix_length = 16 + network = google_compute_network.default.id +} + +resource "google_service_networking_connection" "vpc_connection" { + network = google_compute_network.default.id + service = "servicenetworking.googleapis.com" + reserved_peering_ranges = [google_compute_global_address.private_ip_alloc.name] +} diff --git a/mmv1/templates/terraform/examples/alloydb_cluster_full.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_cluster_full.tf.tmpl index 44f8e9faea1f..2b0c9e4c9f63 100644 --- a/mmv1/templates/terraform/examples/alloydb_cluster_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_cluster_full.tf.tmpl @@ -44,8 +44,6 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { labels = { test = "{{index $.Vars "alloydb_cluster_name"}}" } - - deletion_protection = false } data "google_project" "project" {} diff --git a/mmv1/templates/terraform/examples/alloydb_cluster_restore.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_cluster_restore.tf.tmpl index bd1ff8d2668d..7713276440e6 100644 --- a/mmv1/templates/terraform/examples/alloydb_cluster_restore.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_cluster_restore.tf.tmpl @@ -6,8 +6,6 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { initial_user { password = "{{index $.Vars "alloydb_cluster_name"}}" } - - deletion_protection = false } resource "google_alloydb_instance" "{{$.PrimaryResourceId}}" { @@ -39,8 +37,6 @@ resource "google_alloydb_cluster" "restored_from_backup" { restore_backup_source { backup_name = google_alloydb_backup.{{$.PrimaryResourceId}}.name } - - deletion_protection = false } resource "google_alloydb_cluster" "restored_via_pitr" { @@ -53,8 +49,6 @@ resource "google_alloydb_cluster" "restored_via_pitr" { cluster = google_alloydb_cluster.{{$.PrimaryResourceId}}.name point_in_time = "2023-08-03T19:19:00.094Z" } - - deletion_protection = false } data "google_project" "project" {} diff --git a/mmv1/templates/terraform/examples/alloydb_instance_basic.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_instance_basic.tf.tmpl index 31b0d9e40a0b..575c2a6ed790 100644 --- a/mmv1/templates/terraform/examples/alloydb_instance_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_instance_basic.tf.tmpl @@ -20,8 +20,6 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { initial_user { password = "{{index $.Vars "alloydb_cluster_name"}}" } - - deletion_protection = false } data "google_project" "project" {} diff --git a/mmv1/templates/terraform/examples/alloydb_instance_basic_test.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_instance_basic_test.tf.tmpl index dd6c485736fe..0ca5146f0ac8 100644 --- a/mmv1/templates/terraform/examples/alloydb_instance_basic_test.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_instance_basic_test.tf.tmpl @@ -17,8 +17,6 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { initial_user { password = "{{index $.Vars "alloydb_cluster_name"}}" } - - deletion_protection = false } data "google_project" "project" {} diff --git a/mmv1/templates/terraform/examples/alloydb_instance_psc_test.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_instance_psc_test.tf.tmpl index 78b6cdf4d91e..d2d4712d0ae7 100644 --- a/mmv1/templates/terraform/examples/alloydb_instance_psc_test.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_instance_psc_test.tf.tmpl @@ -18,6 +18,4 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { psc_config { psc_enabled = true } - - deletion_protection = false } diff --git a/mmv1/templates/terraform/examples/alloydb_secondary_cluster_basic.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_secondary_cluster_basic.tf.tmpl index 6d48cfa10e94..6911d955d778 100644 --- a/mmv1/templates/terraform/examples/alloydb_secondary_cluster_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_secondary_cluster_basic.tf.tmpl @@ -4,8 +4,6 @@ resource "google_alloydb_cluster" "primary" { network_config { network = google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -36,7 +34,6 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { primary_cluster_name = google_alloydb_cluster.primary.name } - deletion_protection = false depends_on = [google_alloydb_instance.primary] } diff --git a/mmv1/templates/terraform/examples/alloydb_secondary_cluster_basic_test.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_secondary_cluster_basic_test.tf.tmpl index 87fa2ba37399..9b04de5953d4 100644 --- a/mmv1/templates/terraform/examples/alloydb_secondary_cluster_basic_test.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_secondary_cluster_basic_test.tf.tmpl @@ -4,8 +4,6 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -34,7 +32,6 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { primary_cluster_name = google_alloydb_cluster.primary.name } - deletion_protection = false depends_on = [google_alloydb_instance.primary] } diff --git a/mmv1/templates/terraform/examples/alloydb_secondary_instance_basic.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_secondary_instance_basic.tf.tmpl index a10d35521051..8e2eeb44e88d 100644 --- a/mmv1/templates/terraform/examples/alloydb_secondary_instance_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_secondary_instance_basic.tf.tmpl @@ -4,8 +4,6 @@ resource "google_alloydb_cluster" "primary" { network_config { network = google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -45,7 +43,6 @@ resource "google_alloydb_cluster" "secondary" { ignore_changes = [instance_type] } - deletion_protection = false depends_on = [google_alloydb_instance.primary] } diff --git a/mmv1/templates/terraform/examples/alloydb_secondary_instance_basic_test.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_secondary_instance_basic_test.tf.tmpl index f3c959d2baf0..432fd4d91e80 100644 --- a/mmv1/templates/terraform/examples/alloydb_secondary_instance_basic_test.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_secondary_instance_basic_test.tf.tmpl @@ -4,8 +4,6 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -36,7 +34,6 @@ resource "google_alloydb_cluster" "secondary" { deletion_policy = "FORCE" - deletion_protection = false depends_on = [google_alloydb_instance.primary] } diff --git a/mmv1/templates/terraform/examples/alloydb_user_builtin.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_user_builtin.tf.tmpl index 5e3480f68a38..b91e92abddce 100644 --- a/mmv1/templates/terraform/examples/alloydb_user_builtin.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_user_builtin.tf.tmpl @@ -15,8 +15,6 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "{{index $.Vars "alloydb_cluster_pass"}}" } - - deletion_protection = false } data "google_project" "project" {} diff --git a/mmv1/templates/terraform/examples/alloydb_user_builtin_test.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_user_builtin_test.tf.tmpl index 2ccac4ee4918..01d0f92dd77c 100644 --- a/mmv1/templates/terraform/examples/alloydb_user_builtin_test.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_user_builtin_test.tf.tmpl @@ -13,8 +13,6 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "{{index $.Vars "alloydb_cluster_pass"}}" } - - deletion_protection = false } data "google_project" "project" {} diff --git a/mmv1/templates/terraform/examples/alloydb_user_iam.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_user_iam.tf.tmpl index 62a938a57ee8..107cdaf25b3f 100644 --- a/mmv1/templates/terraform/examples/alloydb_user_iam.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_user_iam.tf.tmpl @@ -16,8 +16,6 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "{{index $.Vars "alloydb_cluster_pass"}}" } - - deletion_protection = false } data "google_project" "project" {} diff --git a/mmv1/templates/terraform/examples/alloydb_user_iam_test.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_user_iam_test.tf.tmpl index 1db1157c4b9f..eebb03454761 100644 --- a/mmv1/templates/terraform/examples/alloydb_user_iam_test.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_user_iam_test.tf.tmpl @@ -13,8 +13,6 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "{{index $.Vars "alloydb_cluster_pass"}}" } - - deletion_protection = false } data "google_project" "project" {} diff --git a/mmv1/templates/terraform/examples/apigee_api_product_basic.tf.tmpl b/mmv1/templates/terraform/examples/apigee_api_product_basic.tf.tmpl deleted file mode 100644 index ecd11e31de36..000000000000 --- a/mmv1/templates/terraform/examples/apigee_api_product_basic.tf.tmpl +++ /dev/null @@ -1,45 +0,0 @@ -data "google_client_config" "current" {} - -resource "google_compute_network" "apigee_network" { - name = "apigee-network" -} - -resource "google_compute_global_address" "apigee_range" { - name = "apigee-range" - purpose = "VPC_PEERING" - address_type = "INTERNAL" - prefix_length = 16 - network = google_compute_network.apigee_network.id -} - -resource "google_service_networking_connection" "apigee_vpc_connection" { - network = google_compute_network.apigee_network.id - service = "servicenetworking.googleapis.com" - reserved_peering_ranges = [google_compute_global_address.apigee_range.name] -} - -resource "google_apigee_organization" "apigee_org" { - analytics_region = "us-central1" - project_id = data.google_client_config.current.project - authorized_network = google_compute_network.apigee_network.id - depends_on = [google_service_networking_connection.apigee_vpc_connection] -} - -resource "google_apigee_instance" "apigee_instance" { - name = "{{index $.Vars "instance_name"}}" - location = "us-central1" - org_id = google_apigee_organization.apigee_org.id - peering_cidr_range = "SLASH_22" -} - -resource "google_apigee_api_product" "basic_api_product" { - org_id = google_apigee_organization.apigee_org.id - name = "{{index $.Vars "product_name"}}" - display_name = "My Basic API Product" - - approval_type = "auto" - - depends_on = [ - google_apigee_instance.apigee_instance - ] -} diff --git a/mmv1/templates/terraform/examples/apigee_api_product_basic_test.tf.tmpl b/mmv1/templates/terraform/examples/apigee_api_product_basic_test.tf.tmpl deleted file mode 100644 index fec8e03f9587..000000000000 --- a/mmv1/templates/terraform/examples/apigee_api_product_basic_test.tf.tmpl +++ /dev/null @@ -1,86 +0,0 @@ -resource "google_project" "project" { - project_id = "tf-test%{random_suffix}" - name = "tf-test%{random_suffix}" - org_id = "{{index $.TestEnvVars "org_id"}}" - billing_account = "{{index $.TestEnvVars "billing_account"}}" - deletion_policy = "DELETE" -} - -resource "time_sleep" "wait_60_seconds" { - create_duration = "60s" - depends_on = [google_project.project] -} - -resource "google_project_service" "apigee" { - project = google_project.project.project_id - service = "apigee.googleapis.com" - depends_on = [time_sleep.wait_60_seconds] -} - -resource "google_project_service" "compute" { - project = google_project.project.project_id - service = "compute.googleapis.com" - depends_on = [google_project_service.apigee] -} - -resource "google_project_service" "servicenetworking" { - project = google_project.project.project_id - service = "servicenetworking.googleapis.com" - depends_on = [google_project_service.compute] -} - -resource "time_sleep" "wait_120_seconds" { - create_duration = "120s" - depends_on = [google_project_service.servicenetworking] -} - -resource "google_compute_network" "apigee_network" { - name = "apigee-network" - project = google_project.project.project_id - depends_on = [time_sleep.wait_120_seconds] -} - -resource "google_compute_global_address" "apigee_range" { - name = "apigee-range" - purpose = "VPC_PEERING" - address_type = "INTERNAL" - prefix_length = 16 - network = google_compute_network.apigee_network.id - project = google_project.project.project_id -} - -resource "google_service_networking_connection" "apigee_vpc_connection" { - network = google_compute_network.apigee_network.id - service = "servicenetworking.googleapis.com" - reserved_peering_ranges = [google_compute_global_address.apigee_range.name] - depends_on = [google_project_service.servicenetworking] -} - -resource "google_apigee_organization" "apigee_org" { - analytics_region = "us-central1" - project_id = google_project.project.project_id - authorized_network = google_compute_network.apigee_network.id - depends_on = [ - google_service_networking_connection.apigee_vpc_connection, - google_project_service.apigee, - ] -} - -resource "google_apigee_instance" "apigee_instance" { - name = "tf-test%{random_suffix}" - location = "us-central1" - org_id = google_apigee_organization.apigee_org.id - peering_cidr_range = "SLASH_22" -} - -resource "google_apigee_api_product" "{{$.PrimaryResourceId}}" { - org_id = google_apigee_organization.apigee_org.id - name = "basic-api-product" - display_name = "My Basic API Product" - - approval_type = "auto" - - depends_on = [ - google_apigee_instance.apigee_instance - ] -} diff --git a/mmv1/templates/terraform/examples/apigee_api_product_with_attributes.tf.tmpl b/mmv1/templates/terraform/examples/apigee_api_product_with_attributes.tf.tmpl deleted file mode 100644 index 74b64f402560..000000000000 --- a/mmv1/templates/terraform/examples/apigee_api_product_with_attributes.tf.tmpl +++ /dev/null @@ -1,193 +0,0 @@ -data "google_client_config" "current" {} - -resource "google_compute_network" "apigee_network" { - name = "apigee-network" -} - -resource "google_compute_global_address" "apigee_range" { - name = "apigee-range" - purpose = "VPC_PEERING" - address_type = "INTERNAL" - prefix_length = 16 - network = google_compute_network.apigee_network.id -} - -resource "google_service_networking_connection" "apigee_vpc_connection" { - network = google_compute_network.apigee_network.id - service = "servicenetworking.googleapis.com" - reserved_peering_ranges = [google_compute_global_address.apigee_range.name] -} - -resource "google_apigee_organization" "apigee_org" { - analytics_region = "us-central1" - project_id = data.google_client_config.current.project - authorized_network = google_compute_network.apigee_network.id - depends_on = [google_service_networking_connection.apigee_vpc_connection] -} - -resource "google_apigee_instance" "apigee_instance" { - name = "{{index $.Vars "instance_name"}}" - location = "us-central1" - org_id = google_apigee_organization.apigee_org.id - peering_cidr_range = "SLASH_22" -} - -resource "google_apigee_api_product" "full_api_product" { - org_id = google_apigee_organization.apigee_org.id - name = "{{index $.Vars "product_name"}}" - display_name = "My full API Product" - - approval_type = "auto" - - description = "This is a sample API Product created with Terraform." - - quota = "10000" - quota_interval = "1" - quota_time_unit = "day" - quota_counter_scope = "PROXY" - - environments = ["dev", "hom"] - scopes = [ - "read:weather", - "write:reports" - ] - - attributes { - name = "access" - value = "private" - } - - attributes { - name = "custom" - value = "value" - } - - operation_group { - operation_config_type = "proxy" - - operation_configs { - api_source = "anoter-proxy" - - operations { - resource = "/" - methods = ["POST", "GET"] - } - - quota { - limit = "1000" - interval = "5" - time_unit = "minute" - } - - attributes { - name = "custom" - value = "value" - } - } - - operation_configs { - api_source = "hello-world" - - operations { - resource = "/test" - methods = ["POST", "GET"] - } - - quota { - limit = "10" - interval = "30" - time_unit = "second" - } - - attributes { - name = "custom" - value = "value" - } - } - } - - graphql_operation_group { - operation_config_type = "proxy" - - operation_configs { - api_source = "hello-world" - - quota { - limit = "30" - interval = "50" - time_unit = "second" - } - - operations { - operation_types = ["QUERY"] - operation = "test" - } - - attributes { - name = "custom" - value = "value" - } - } - - operation_configs { - api_source = "another-proxy" - - quota { - limit = "50000" - interval = "12" - time_unit = "hour" - } - - operations { - operation_types = ["MUTATION"] - operation = "test" - } - - attributes { - name = "custom" - value = "value" - } - } - } - - grpc_operation_group { - - operation_configs { - api_source = "another-proxy" - service = "grpc another test" - methods = ["method3", "method4"] - - quota { - limit = "1000000" - interval = "1" - time_unit = "month" - } - - attributes { - name = "graph" - value = "value" - } - } - - operation_configs { - api_source = "hello-world" - service = "grpc test" - methods = ["method1", "method2"] - - quota { - limit = "5" - interval = "1" - time_unit = "second" - } - - attributes { - name = "graph" - value = "value" - } - } - } - - depends_on = [ - google_apigee_instance.apigee_instance - ] -} diff --git a/mmv1/templates/terraform/examples/apigee_api_product_with_attributes_test.tf.tmpl b/mmv1/templates/terraform/examples/apigee_api_product_with_attributes_test.tf.tmpl deleted file mode 100644 index ac91919ad5eb..000000000000 --- a/mmv1/templates/terraform/examples/apigee_api_product_with_attributes_test.tf.tmpl +++ /dev/null @@ -1,234 +0,0 @@ -resource "google_project" "project" { - project_id = "tf-test%{random_suffix}" - name = "tf-test%{random_suffix}" - org_id = "{{index $.TestEnvVars "org_id"}}" - billing_account = "{{index $.TestEnvVars "billing_account"}}" - deletion_policy = "DELETE" -} - -resource "time_sleep" "wait_60_seconds" { - create_duration = "60s" - depends_on = [google_project.project] -} - -resource "google_project_service" "apigee" { - project = google_project.project.project_id - service = "apigee.googleapis.com" - depends_on = [time_sleep.wait_60_seconds] -} - -resource "google_project_service" "compute" { - project = google_project.project.project_id - service = "compute.googleapis.com" - depends_on = [google_project_service.apigee] -} - -resource "google_project_service" "servicenetworking" { - project = google_project.project.project_id - service = "servicenetworking.googleapis.com" - depends_on = [google_project_service.compute] -} - -resource "time_sleep" "wait_120_seconds" { - create_duration = "120s" - depends_on = [google_project_service.servicenetworking] -} - -resource "google_compute_network" "apigee_network" { - name = "apigee-network" - project = google_project.project.project_id - depends_on = [time_sleep.wait_120_seconds] -} - -resource "google_compute_global_address" "apigee_range" { - name = "apigee-range" - purpose = "VPC_PEERING" - address_type = "INTERNAL" - prefix_length = 16 - network = google_compute_network.apigee_network.id - project = google_project.project.project_id -} - -resource "google_service_networking_connection" "apigee_vpc_connection" { - network = google_compute_network.apigee_network.id - service = "servicenetworking.googleapis.com" - reserved_peering_ranges = [google_compute_global_address.apigee_range.name] - depends_on = [google_project_service.servicenetworking] -} - -resource "google_apigee_organization" "apigee_org" { - analytics_region = "us-central1" - project_id = google_project.project.project_id - authorized_network = google_compute_network.apigee_network.id - depends_on = [ - google_service_networking_connection.apigee_vpc_connection, - google_project_service.apigee, - ] -} - -resource "google_apigee_instance" "apigee_instance" { - name = "tf-test%{random_suffix}" - location = "us-central1" - org_id = google_apigee_organization.apigee_org.id - peering_cidr_range = "SLASH_22" -} - -resource "google_apigee_api_product" "{{$.PrimaryResourceId}}" { - org_id = google_apigee_organization.apigee_org.id - name = "full-api-product" - display_name = "My full API Product" - - approval_type = "auto" - - description = "This is a sample API Product created with Terraform." - - quota = "10000" - quota_interval = "1" - quota_time_unit = "day" - quota_counter_scope = "PROXY" - - environments = ["dev", "hom"] - scopes = [ - "read:weather", - "write:reports" - ] - - attributes { - name = "access" - value = "private" - } - - attributes { - name = "custom" - value = "value" - } - - operation_group { - operation_config_type = "proxy" - - operation_configs { - api_source = "anoter-proxy" - - operations { - resource = "/" - methods = ["POST", "GET"] - } - - quota { - limit = "1000" - interval = "5" - time_unit = "minute" - } - - attributes { - name = "custom" - value = "value" - } - } - - operation_configs { - api_source = "hello-world" - - operations { - resource = "/test" - methods = ["POST", "GET"] - } - - quota { - limit = "10" - interval = "30" - time_unit = "second" - } - - attributes { - name = "custom" - value = "value" - } - } - } - - graphql_operation_group { - operation_config_type = "proxy" - - operation_configs { - api_source = "hello-world" - - quota { - limit = "30" - interval = "50" - time_unit = "second" - } - - operations { - operation_types = ["QUERY"] - operation = "test" - } - - attributes { - name = "custom" - value = "value" - } - } - - operation_configs { - api_source = "another-proxy" - - quota { - limit = "50000" - interval = "12" - time_unit = "hour" - } - - operations { - operation_types = ["MUTATION"] - operation = "test" - } - - attributes { - name = "custom" - value = "value" - } - } - } - - grpc_operation_group { - - operation_configs { - api_source = "another-proxy" - service = "grpc another test" - methods = ["method3", "method4"] - - quota { - limit = "1000000" - interval = "1" - time_unit = "month" - } - - attributes { - name = "graph" - value = "value" - } - } - - operation_configs { - api_source = "hello-world" - service = "grpc test" - methods = ["method1", "method2"] - - quota { - limit = "5" - interval = "1" - time_unit = "second" - } - - attributes { - name = "graph" - value = "value" - } - } - } - - depends_on = [ - google_apigee_instance.apigee_instance - ] -} diff --git a/mmv1/templates/terraform/examples/apigee_api_product_with_legacy_operation.tf.tmpl b/mmv1/templates/terraform/examples/apigee_api_product_with_legacy_operation.tf.tmpl deleted file mode 100644 index ba85ea349890..000000000000 --- a/mmv1/templates/terraform/examples/apigee_api_product_with_legacy_operation.tf.tmpl +++ /dev/null @@ -1,68 +0,0 @@ -data "google_client_config" "current" {} - -resource "google_compute_network" "apigee_network" { - name = "apigee-network" -} - -resource "google_compute_global_address" "apigee_range" { - name = "apigee-range" - purpose = "VPC_PEERING" - address_type = "INTERNAL" - prefix_length = 16 - network = google_compute_network.apigee_network.id -} - -resource "google_service_networking_connection" "apigee_vpc_connection" { - network = google_compute_network.apigee_network.id - service = "servicenetworking.googleapis.com" - reserved_peering_ranges = [google_compute_global_address.apigee_range.name] -} - -resource "google_apigee_organization" "apigee_org" { - analytics_region = "us-central1" - project_id = data.google_client_config.current.project - authorized_network = google_compute_network.apigee_network.id - depends_on = [google_service_networking_connection.apigee_vpc_connection] -} - -resource "google_apigee_instance" "apigee_instance" { - name = "{{index $.Vars "instance_name"}}" - location = "us-central1" - org_id = google_apigee_organization.apigee_org.id - peering_cidr_range = "SLASH_22" -} - -resource "google_apigee_api_product" "full_api_product" { - org_id = google_apigee_organization.apigee_org.id - name = "{{index $.Vars "product_name"}}" - display_name = "My full API Product" - - approval_type = "auto" - - description = "This is a sample API Product created with Terraform." - - attributes { - name = "access" - value = "private" - } - - environments = ["dev", "hom"] - proxies = ["hello-world"] - api_resources = [ - "/", - "/weather/**" - ] - scopes = [ - "read:weather", - "write:reports" - ] - - quota = "10000" - quota_interval = "1" - quota_time_unit = "day" - quota_counter_scope = "PROXY" - - depends_on = [ - google_apigee_instance.apigee_instance - ] -} diff --git a/mmv1/templates/terraform/examples/apigee_api_product_with_legacy_operation_test.tf.tmpl b/mmv1/templates/terraform/examples/apigee_api_product_with_legacy_operation_test.tf.tmpl deleted file mode 100644 index 4948c768becb..000000000000 --- a/mmv1/templates/terraform/examples/apigee_api_product_with_legacy_operation_test.tf.tmpl +++ /dev/null @@ -1,109 +0,0 @@ -resource "google_project" "project" { - project_id = "tf-test%{random_suffix}" - name = "tf-test%{random_suffix}" - org_id = "{{index $.TestEnvVars "org_id"}}" - billing_account = "{{index $.TestEnvVars "billing_account"}}" - deletion_policy = "DELETE" -} - -resource "time_sleep" "wait_60_seconds" { - create_duration = "60s" - depends_on = [google_project.project] -} - -resource "google_project_service" "apigee" { - project = google_project.project.project_id - service = "apigee.googleapis.com" - depends_on = [time_sleep.wait_60_seconds] -} - -resource "google_project_service" "compute" { - project = google_project.project.project_id - service = "compute.googleapis.com" - depends_on = [google_project_service.apigee] -} - -resource "google_project_service" "servicenetworking" { - project = google_project.project.project_id - service = "servicenetworking.googleapis.com" - depends_on = [google_project_service.compute] -} - -resource "time_sleep" "wait_120_seconds" { - create_duration = "120s" - depends_on = [google_project_service.servicenetworking] -} - -resource "google_compute_network" "apigee_network" { - name = "apigee-network" - project = google_project.project.project_id - depends_on = [time_sleep.wait_120_seconds] -} - -resource "google_compute_global_address" "apigee_range" { - name = "apigee-range" - purpose = "VPC_PEERING" - address_type = "INTERNAL" - prefix_length = 16 - network = google_compute_network.apigee_network.id - project = google_project.project.project_id -} - -resource "google_service_networking_connection" "apigee_vpc_connection" { - network = google_compute_network.apigee_network.id - service = "servicenetworking.googleapis.com" - reserved_peering_ranges = [google_compute_global_address.apigee_range.name] - depends_on = [google_project_service.servicenetworking] -} - -resource "google_apigee_organization" "apigee_org" { - analytics_region = "us-central1" - project_id = google_project.project.project_id - authorized_network = google_compute_network.apigee_network.id - depends_on = [ - google_service_networking_connection.apigee_vpc_connection, - google_project_service.apigee, - ] -} - -resource "google_apigee_instance" "apigee_instance" { - name = "tf-test%{random_suffix}" - location = "us-central1" - org_id = google_apigee_organization.apigee_org.id - peering_cidr_range = "SLASH_22" -} - -resource "google_apigee_api_product" "{{$.PrimaryResourceId}}" { - org_id = google_apigee_organization.apigee_org.id - name = "legacy-operation-api-product" - display_name = "My legacy operation API Product" - - approval_type = "auto" - - description = "This is a sample API Product created with Terraform." - - attributes { - name = "access" - value = "private" - } - - environments = ["dev", "hom"] - proxies = ["hello-world"] - api_resources = [ - "/", - "/weather/**" - ] - scopes = [ - "read:weather", - "write:reports" - ] - - quota = "10000" - quota_interval = "1" - quota_time_unit = "day" - quota_counter_scope = "PROXY" - - depends_on = [ - google_apigee_instance.apigee_instance - ] -} diff --git a/mmv1/templates/terraform/examples/apigee_environment_basic.tf.tmpl b/mmv1/templates/terraform/examples/apigee_environment_basic.tf.tmpl index b202417439b0..df9c2ea7b320 100644 --- a/mmv1/templates/terraform/examples/apigee_environment_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/apigee_environment_basic.tf.tmpl @@ -30,11 +30,5 @@ resource "google_apigee_environment" "env" { description = "Apigee Environment" display_name = "environment-1" org_id = google_apigee_organization.apigee_org.id - client_ip_resolution_config { - header_index_algorithm { - ip_header_name = "X-Forwarded-For" - ip_header_index = 1 - } - } } diff --git a/mmv1/templates/terraform/examples/apigee_environment_client_ip_resolution_config_test.tf.tmpl b/mmv1/templates/terraform/examples/apigee_environment_client_ip_resolution_config_test.tf.tmpl deleted file mode 100644 index 49e3cbf526bf..000000000000 --- a/mmv1/templates/terraform/examples/apigee_environment_client_ip_resolution_config_test.tf.tmpl +++ /dev/null @@ -1,81 +0,0 @@ -resource "google_project" "project" { - project_id = "tf-test%{random_suffix}" - name = "tf-test%{random_suffix}" - org_id = "{{index $.TestEnvVars "org_id"}}" - billing_account = "{{index $.TestEnvVars "billing_account"}}" - deletion_policy = "DELETE" -} - -resource "time_sleep" "wait_60_seconds" { - create_duration = "60s" - depends_on = [google_project.project] -} - -resource "google_project_service" "apigee" { - project = google_project.project.project_id - service = "apigee.googleapis.com" - depends_on = [time_sleep.wait_60_seconds] -} - -resource "google_project_service" "servicenetworking" { - project = google_project.project.project_id - service = "servicenetworking.googleapis.com" - depends_on = [google_project_service.apigee] -} - -resource "google_project_service" "compute" { - project = google_project.project.project_id - service = "compute.googleapis.com" - depends_on = [google_project_service.servicenetworking] -} - -resource "time_sleep" "wait_120_seconds" { - create_duration = "120s" - depends_on = [google_project_service.compute] -} - - -resource "google_compute_network" "apigee_network" { - name = "apigee-network" - project = google_project.project.project_id - depends_on = [time_sleep.wait_120_seconds] -} - -resource "google_compute_global_address" "apigee_range" { - name = "apigee-range" - purpose = "VPC_PEERING" - address_type = "INTERNAL" - prefix_length = 16 - network = google_compute_network.apigee_network.id - project = google_project.project.project_id -} - -resource "google_service_networking_connection" "apigee_vpc_connection" { - network = google_compute_network.apigee_network.id - service = "servicenetworking.googleapis.com" - reserved_peering_ranges = [google_compute_global_address.apigee_range.name] - depends_on = [google_project_service.servicenetworking] -} - -resource "google_apigee_organization" "apigee_org" { - analytics_region = "us-central1" - project_id = google_project.project.project_id - authorized_network = google_compute_network.apigee_network.id - depends_on = [ - google_service_networking_connection.apigee_vpc_connection, - google_project_service.apigee, - ] -} - -resource "google_apigee_environment" "{{$.PrimaryResourceId}}" { - org_id = google_apigee_organization.apigee_org.id - name = "tf-test%{random_suffix}" - description = "Apigee Environment" - display_name = "environment-1" - client_ip_resolution_config { - header_index_algorithm { - ip_header_name = "X-Forwarded-For" - ip_header_index = 1 - } - } -} diff --git a/mmv1/templates/terraform/examples/apigee_instance_full_test.tf.tmpl b/mmv1/templates/terraform/examples/apigee_instance_full_test.tf.tmpl index 0c97983bd2f6..8b83461f0d9f 100644 --- a/mmv1/templates/terraform/examples/apigee_instance_full_test.tf.tmpl +++ b/mmv1/templates/terraform/examples/apigee_instance_full_test.tf.tmpl @@ -135,9 +135,4 @@ resource "google_apigee_instance" "{{$.PrimaryResourceId}}" { display_name = "tf-test%{random_suffix}" org_id = google_apigee_organization.apigee_org.id disk_encryption_key_name = google_kms_crypto_key.apigee_key.id - - access_logging_config { - enabled = true - filter = "status_code >= 200 && status_code < 300" - } } diff --git a/mmv1/templates/terraform/examples/apigee_organization_cloud_basic_data_residency.tf.tmpl b/mmv1/templates/terraform/examples/apigee_organization_cloud_basic_data_residency.tf.tmpl index 64aa8f4cdb8a..148874ab1633 100644 --- a/mmv1/templates/terraform/examples/apigee_organization_cloud_basic_data_residency.tf.tmpl +++ b/mmv1/templates/terraform/examples/apigee_organization_cloud_basic_data_residency.tf.tmpl @@ -5,10 +5,8 @@ provider "google" { data "google_client_config" "current" {} resource "google_apigee_organization" "org" { - description = "Terraform-provisioned basic Apigee Org under European Union hosting jurisdiction." - project_id = data.google_client_config.current.project - api_consumer_data_location = "europe-west1" - billing_type = "PAYG" - disable_vpc_peering = true + description = "Terraform-provisioned basic Apigee Org under European Union hosting jurisdiction." + project_id = data.google_client_config.current.project + disable_vpc_peering = true } diff --git a/mmv1/templates/terraform/examples/apigee_organization_cloud_basic_data_residency_test.tf.tmpl b/mmv1/templates/terraform/examples/apigee_organization_cloud_basic_data_residency_test.tf.tmpl index 399ece85e1b1..22996438f7b8 100644 --- a/mmv1/templates/terraform/examples/apigee_organization_cloud_basic_data_residency_test.tf.tmpl +++ b/mmv1/templates/terraform/examples/apigee_organization_cloud_basic_data_residency_test.tf.tmpl @@ -16,12 +16,10 @@ resource "google_project_service" "apigee" { } resource "google_apigee_organization" "{{$.PrimaryResourceId}}" { - description = "Terraform-provisioned basic Apigee Org under European Union hosting jurisdiction." - project_id = google_project.project.project_id - api_consumer_data_location = "europe-west1" - billing_type = "PAYG" - disable_vpc_peering = true - depends_on = [ + description = "Terraform-provisioned basic Apigee Org under European Union hosting jurisdiction." + project_id = google_project.project.project_id + disable_vpc_peering = true + depends_on = [ google_project_service.apigee, ] } diff --git a/mmv1/templates/terraform/examples/apigee_security_action_basic.tf.tmpl b/mmv1/templates/terraform/examples/apigee_security_action_basic.tf.tmpl deleted file mode 100644 index 842ffa7f4da8..000000000000 --- a/mmv1/templates/terraform/examples/apigee_security_action_basic.tf.tmpl +++ /dev/null @@ -1,70 +0,0 @@ -data "google_client_config" "current" {} - -resource "google_compute_network" "apigee_network" { - name = "{{index $.Vars "network_name"}}" -} - -resource "google_compute_global_address" "apigee_range" { - name ="{{index $.Vars "global_address_name"}}" - purpose = "VPC_PEERING" - address_type = "INTERNAL" - prefix_length = 16 - network = google_compute_network.apigee_network.id -} - -resource "google_service_networking_connection" "apigee_vpc_connection" { - network = google_compute_network.apigee_network.id - service = "servicenetworking.googleapis.com" - reserved_peering_ranges = [google_compute_global_address.apigee_range.name] -} - -resource "google_apigee_organization" "apigee_org" { - analytics_region = "us-central1" - project_id = data.google_client_config.current.project - authorized_network = google_compute_network.apigee_network.id - depends_on = [google_service_networking_connection.apigee_vpc_connection] -} - -resource "google_apigee_environment" "env" { - name = "{{index $.Vars "environment_name"}}" - description = "Apigee Environment" - display_name = "environment-1" - org_id = google_apigee_organization.apigee_org.id -} - -resource "google_apigee_addons_config" "apigee_org_security_addons_config" { - org = google_apigee_organization.apigee_org.name - addons_config { - api_security_config { - enabled = true - } - } -} - -resource "google_apigee_security_action" "{{$.PrimaryResourceId}}" { - security_action_id = "{{index $.Vars "security_action_id"}}" - org_id = google_apigee_organization.apigee_org.name - env_id = google_apigee_environment.env.name - description = "Apigee Security Action" - state = "ENABLED" - - condition_config { - ip_address_ranges = [ - "100.0.220.1", - "200.0.0.1", - ] - - bot_reasons = [ - "Flooder", - "Public Cloud Azure", - "Public Cloud AWS", - ] - } - - allow {} - - expire_time = "2025-12-31T23:59:59Z" - depends_on = [ - google_apigee_addons_config.apigee_org_security_addons_config - ] -} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/apigee_security_monitoring_condition_basic.tf.tmpl b/mmv1/templates/terraform/examples/apigee_security_monitoring_condition_basic.tf.tmpl deleted file mode 100644 index 4b45055c0ed1..000000000000 --- a/mmv1/templates/terraform/examples/apigee_security_monitoring_condition_basic.tf.tmpl +++ /dev/null @@ -1,53 +0,0 @@ -data "google_client_config" "current" {} - -resource "google_compute_network" "apigee_network" { - name = "apigee-network" -} - -resource "google_compute_global_address" "apigee_range" { - name = "apigee-range" - purpose = "VPC_PEERING" - address_type = "INTERNAL" - prefix_length = 16 - network = google_compute_network.apigee_network.id -} - -resource "google_service_networking_connection" "apigee_vpc_connection" { - network = google_compute_network.apigee_network.id - service = "servicenetworking.googleapis.com" - reserved_peering_ranges = [google_compute_global_address.apigee_range.name] -} - -resource "google_apigee_organization" "apigee_org" { - analytics_region = "us-central1" - project_id = data.google_client_config.current.project - authorized_network = google_compute_network.apigee_network.id - depends_on = [google_service_networking_connection.apigee_vpc_connection] -} - -resource "google_apigee_environment" "env" { - name = "{{index $.Vars "environment_name"}}" - description = "Apigee Environment" - display_name = "environment-1" - org_id = google_apigee_organization.apigee_org.id -} - -resource "google_apigee_addons_config" "apigee_org_security_addons_config" { - org = google_apigee_organization.apigee_org.name - addons_config { - api_security_config { - enabled = true - } - } -} - -resource "google_apigee_security_monitoring_condition" "security_monitoring_condition" { - condition_id = "{{index $.Vars "security_monitoring_condition_id"}}" - org_id = google_apigee_organization.apigee_org.id - profile = "google-default" - scope = "{{index $.Vars "environment_name"}}" - include_all_resources {} - depends_on = [ - google_apigee_addons_config.apigee_org_security_addons_config - ] -} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/apigee_security_monitoring_condition_basic_test.tf.tmpl b/mmv1/templates/terraform/examples/apigee_security_monitoring_condition_basic_test.tf.tmpl deleted file mode 100644 index cf2d2b86f208..000000000000 --- a/mmv1/templates/terraform/examples/apigee_security_monitoring_condition_basic_test.tf.tmpl +++ /dev/null @@ -1,89 +0,0 @@ -resource "google_project" "project" { - project_id = "tf-test%{random_suffix}" - name = "tf-test%{random_suffix}" - org_id = "{{index $.TestEnvVars "org_id"}}" - billing_account = "{{index $.TestEnvVars "billing_account"}}" - deletion_policy = "DELETE" -} - -resource "time_sleep" "wait_60_seconds" { - create_duration = "60s" - depends_on = [google_project.project] -} - -resource "google_project_service" "apigee" { - project = google_project.project.project_id - service = "apigee.googleapis.com" - depends_on = [time_sleep.wait_60_seconds] -} - -resource "google_project_service" "servicenetworking" { - project = google_project.project.project_id - service = "servicenetworking.googleapis.com" - depends_on = [google_project_service.apigee] -} - -resource "google_project_service" "compute" { - project = google_project.project.project_id - service = "compute.googleapis.com" - depends_on = [google_project_service.servicenetworking] -} - -resource "google_compute_network" "apigee_network" { - name = "apigee-network" - project = google_project.project.project_id - depends_on = [google_project_service.compute] -} - -resource "google_compute_global_address" "apigee_range" { - name = "apigee-range" - purpose = "VPC_PEERING" - address_type = "INTERNAL" - prefix_length = 16 - network = google_compute_network.apigee_network.id - project = google_project.project.project_id -} - -resource "google_service_networking_connection" "apigee_vpc_connection" { - network = google_compute_network.apigee_network.id - service = "servicenetworking.googleapis.com" - reserved_peering_ranges = [google_compute_global_address.apigee_range.name] - depends_on = [google_project_service.servicenetworking] -} - -resource "google_apigee_organization" "apigee_org" { - analytics_region = "us-central1" - project_id = google_project.project.project_id - authorized_network = google_compute_network.apigee_network.id - depends_on = [ - google_service_networking_connection.apigee_vpc_connection, - google_project_service.apigee, - ] -} - -resource "google_apigee_environment" "apigee_environment" { - org_id = google_apigee_organization.apigee_org.id - name = "tf-test-env-%{random_suffix}" - description = "Apigee Environment" - display_name = "environment-1" -} - -resource "google_apigee_addons_config" "apigee_org_security_addons_config" { - org = google_apigee_organization.apigee_org.name - addons_config { - api_security_config { - enabled = true - } - } -} - -resource "google_apigee_security_monitoring_condition" "{{$.PrimaryResourceId}}" { - org_id = google_apigee_organization.apigee_org.id - condition_id = "tf-test-conditoin" - profile = "google-default" - scope = google_apigee_environment.apigee_environment.name - include_all_resources {} - depends_on = [ - google_apigee_addons_config.apigee_org_security_addons_config - ] -} diff --git a/mmv1/templates/terraform/examples/apihub_curation_basic.tf.tmpl b/mmv1/templates/terraform/examples/apihub_curation_basic.tf.tmpl deleted file mode 100644 index 00ce0ec9d696..000000000000 --- a/mmv1/templates/terraform/examples/apihub_curation_basic.tf.tmpl +++ /dev/null @@ -1,17 +0,0 @@ -resource "google_apihub_curation" "{{$.PrimaryResourceId}}" { - location = "us-central1" - curation_id = "{{index $.Vars "curation_id"}}" - project = "apihub-terraform" - display_name = "Test Curation" - description = "This is a sample curation resource managed by Terraform." - endpoint { - application_integration_endpoint_details { - trigger_id = "api_trigger/curation_API_1" - uri = "https://integrations.googleapis.com/v1/projects/1082615593856/locations/us-central1/integrations/curation:execute" - } - } - -} - - - diff --git a/mmv1/templates/terraform/examples/apihub_plugin_full.tf.tmpl b/mmv1/templates/terraform/examples/apihub_plugin_full.tf.tmpl deleted file mode 100644 index 6cf228865d10..000000000000 --- a/mmv1/templates/terraform/examples/apihub_plugin_full.tf.tmpl +++ /dev/null @@ -1,65 +0,0 @@ -resource "google_apihub_plugin" "{{$.PrimaryResourceId}}" { - location = "us-central1" - display_name = "Test Plugin" - description="Test description" - plugin_id = "{{index $.Vars "plugin_id"}}" - plugin_category = "API_GATEWAY" - actions_config { - id = "sync-metadata" - display_name = "Sync Metadata" - description = "Syncs API metadata." - trigger_mode = "API_HUB_SCHEDULE_TRIGGER" - } - documentation { - external_uri = "https://example.com/plugin-documentation" - } - hosting_service { - service_uri = "https://your-plugin-service.example.com/api" - } - config_template { - auth_config_template { - supported_auth_types = ["NO_AUTH", "USER_PASSWORD"] - service_account { - service_account = "test@developer.gserviceaccount.com" - } - } - additional_config_template { - id = "string-val" - description = "API key for the service." - value_type = "STRING" - required = false - validation_regex = "^[a-zA-Z0-9]{5,20}$" - } - additional_config_template { - id = "integer-val" - description = "API key for the service." - value_type = "INT" - required = true - validation_regex = "" - } - additional_config_template { - id = "bool-val" - description = "API key for the service." - value_type = "BOOL" - required = false - validation_regex = "" - } - additional_config_template { - id = "enum-val" - description = "API key for the service." - value_type = "ENUM" - enum_options { - id = "Option1" - display_name = "Option1" - description = "Description for Option1" - } - enum_options { - id = "Option2" - display_name = "Option2" - description = "Description for Option2" - } - required = false - validation_regex = "" - } - } -} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/apihub_plugin_instance_basic.tf.tmpl b/mmv1/templates/terraform/examples/apihub_plugin_instance_basic.tf.tmpl deleted file mode 100644 index c2b19c82a6cc..000000000000 --- a/mmv1/templates/terraform/examples/apihub_plugin_instance_basic.tf.tmpl +++ /dev/null @@ -1,10 +0,0 @@ -resource "google_apihub_plugin_instance" "{{$.PrimaryResourceId}}" { - location = "us-central1" - plugin = "existing-plugin-id" - plugin_instance_id = "{{index $.Vars "curation_id"}}" - display_name = "Sample Plugin Instance Display Name" - disable = false - actions { - action_id = "existing-action-id" - } -} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/artifact_registry_repository_remote_apt.tf.tmpl b/mmv1/templates/terraform/examples/artifact_registry_repository_remote_apt.tf.tmpl index 0494ba3feaaa..ddffa4d557c1 100644 --- a/mmv1/templates/terraform/examples/artifact_registry_repository_remote_apt.tf.tmpl +++ b/mmv1/templates/terraform/examples/artifact_registry_repository_remote_apt.tf.tmpl @@ -5,11 +5,11 @@ resource "google_artifact_registry_repository" "{{$.PrimaryResourceId}}" { format = "APT" mode = "REMOTE_REPOSITORY" remote_repository_config { - description = "Debian stable remote repository" + description = "Debian buster remote repository" apt_repository { public_repository { repository_base = "DEBIAN" - repository_path = "debian/dists/stable" + repository_path = "debian/dists/buster" } } } diff --git a/mmv1/templates/terraform/examples/backend_bucket_global_ilb.tf.tmpl b/mmv1/templates/terraform/examples/backend_bucket_global_ilb.tf.tmpl deleted file mode 100644 index 01d3d96fc795..000000000000 --- a/mmv1/templates/terraform/examples/backend_bucket_global_ilb.tf.tmpl +++ /dev/null @@ -1,36 +0,0 @@ -# Note: This example must be run in a project without Cloud Armor tier configured, -# as it may cause conflicts with the INTERNAL_MANAGED load balancing scheme. -# This test is skipped in VCR mode due to non-determinism in project creation and resource management. - -resource "google_project" "unarmored" { - project_id = "tf-test%{random_suffix}" - name = "tf-test%{random_suffix}" - org_id = "{{index $.TestEnvVars "org_id"}}" - billing_account = "{{index $.TestEnvVars "billing_account"}}" - deletion_policy = "DELETE" -} - -resource "google_project_service" "project" { - project = google_project.unarmored.number - service = "compute.googleapis.com" - disable_on_destroy = true -} - -resource "google_compute_backend_bucket" "{{$.PrimaryResourceId}}" { - name = "{{index $.Vars "backend_bucket_name"}}" - project = google_project.unarmored.number - bucket_name = google_storage_bucket.{{$.PrimaryResourceId}}.name - load_balancing_scheme = "INTERNAL_MANAGED" - - depends_on = [google_project_service.project] -} - -resource "google_storage_bucket" "{{$.PrimaryResourceId}}" { - name = "{{index $.Vars "bucket_name"}}" - project = google_project.unarmored.number - location = "US-CENTRAL1" - force_destroy = true - uniform_bucket_level_access = true - - depends_on = [google_project_service.project] -} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/backend_service_custom_metrics.tf.tmpl b/mmv1/templates/terraform/examples/backend_service_custom_metrics.tf.tmpl index 4ebba9fe80d6..262892bb9faa 100644 --- a/mmv1/templates/terraform/examples/backend_service_custom_metrics.tf.tmpl +++ b/mmv1/templates/terraform/examples/backend_service_custom_metrics.tf.tmpl @@ -37,11 +37,6 @@ resource "google_compute_backend_service" "{{$.PrimaryResourceId}}" { dry_run = false } } - log_config { - enable = true - optional_mode = "CUSTOM" - optional_fields = [ "orca_load_report", "tls.protocol" ] - } } resource "google_compute_health_check" "default" { diff --git a/mmv1/templates/terraform/examples/backend_service_dynamic_forwarding.tf.tmpl b/mmv1/templates/terraform/examples/backend_service_dynamic_forwarding.tf.tmpl deleted file mode 100644 index f9c5ba1ad962..000000000000 --- a/mmv1/templates/terraform/examples/backend_service_dynamic_forwarding.tf.tmpl +++ /dev/null @@ -1,10 +0,0 @@ -resource "google_compute_backend_service" "{{$.PrimaryResourceId}}" { - provider = google-beta - name = "{{index $.Vars "backend_service_name"}}" - load_balancing_scheme = "INTERNAL_MANAGED" - dynamic_forwarding { - ip_port_selection { - enabled = true - } - } -} diff --git a/mmv1/templates/terraform/examples/backend_service_external_managed.tf.tmpl b/mmv1/templates/terraform/examples/backend_service_external_managed.tf.tmpl index 9d5de7a167e0..800fcf72bb71 100644 --- a/mmv1/templates/terraform/examples/backend_service_external_managed.tf.tmpl +++ b/mmv1/templates/terraform/examples/backend_service_external_managed.tf.tmpl @@ -2,7 +2,6 @@ resource "google_compute_backend_service" "{{$.PrimaryResourceId}}" { name = "{{index $.Vars "backend_service_name"}}" health_checks = [google_compute_health_check.default.id] load_balancing_scheme = "EXTERNAL_MANAGED" - protocol = "H2C" } resource "google_compute_health_check" "default" { diff --git a/mmv1/templates/terraform/examples/backend_service_tls_settings.tf.tmpl b/mmv1/templates/terraform/examples/backend_service_tls_settings.tf.tmpl index f9fd2e28ce8b..2fc850f1e11b 100644 --- a/mmv1/templates/terraform/examples/backend_service_tls_settings.tf.tmpl +++ b/mmv1/templates/terraform/examples/backend_service_tls_settings.tf.tmpl @@ -1,4 +1,5 @@ resource "google_compute_backend_service" "{{$.PrimaryResourceId}}" { + provider = google-beta name = "{{index $.Vars "backend_service_name"}}" health_checks = [google_compute_health_check.default.id] load_balancing_scheme = "EXTERNAL_MANAGED" @@ -16,6 +17,7 @@ resource "google_compute_backend_service" "{{$.PrimaryResourceId}}" { } resource "google_compute_health_check" "default" { + provider = google-beta name = "{{index $.Vars "health_check_name"}}" http_health_check { port = 80 @@ -23,6 +25,7 @@ resource "google_compute_health_check" "default" { } resource "google_network_security_backend_authentication_config" "default" { + provider = google-beta name = "{{index $.Vars "authentication_name"}}" well_known_roots = "PUBLIC_ROOTS" } \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/backup_dr_backup_plan_for_csql_resource.tf.tmpl b/mmv1/templates/terraform/examples/backup_dr_backup_plan_for_csql_resource.tf.tmpl deleted file mode 100644 index dee347472956..000000000000 --- a/mmv1/templates/terraform/examples/backup_dr_backup_plan_for_csql_resource.tf.tmpl +++ /dev/null @@ -1,29 +0,0 @@ -resource "google_backup_dr_backup_vault" "my_backup_vault" { - location = "us-central1" - backup_vault_id = "{{index $.Vars "backup_vault_id"}}" - backup_minimum_enforced_retention_duration = "100000s" -} - -resource "google_backup_dr_backup_plan" "{{$.PrimaryResourceId}}" { - location = "us-central1" - backup_plan_id = "{{index $.Vars "backup_plan_id"}}" - resource_type = "sqladmin.googleapis.com/Instance" - backup_vault = google_backup_dr_backup_vault.my_backup_vault.id - - backup_rules { - rule_id = "rule-1" - backup_retention_days = 5 - - standard_schedule { - recurrence_type = "HOURLY" - hourly_frequency = 6 - time_zone = "UTC" - - backup_window { - start_hour_of_day = 0 - end_hour_of_day = 6 - } - } - } - log_retention_days = 4 -} diff --git a/mmv1/templates/terraform/examples/backup_dr_backup_plan_for_disk_resource.tf.tmpl b/mmv1/templates/terraform/examples/backup_dr_backup_plan_for_disk_resource.tf.tmpl deleted file mode 100644 index f4185401e77d..000000000000 --- a/mmv1/templates/terraform/examples/backup_dr_backup_plan_for_disk_resource.tf.tmpl +++ /dev/null @@ -1,31 +0,0 @@ -resource "google_backup_dr_backup_vault" "my_backup_vault" { - provider = google-beta - location = "us-central1" - backup_vault_id = "{{index $.Vars "backup_vault_id"}}" - backup_minimum_enforced_retention_duration = "100000s" -} - -resource "google_backup_dr_backup_plan" "{{$.PrimaryResourceId}}" { - provider = google-beta - location = "us-central1" - backup_plan_id = "{{index $.Vars "backup_plan_id"}}" - resource_type = "compute.googleapis.com/Disk" - backup_vault = google_backup_dr_backup_vault.my_backup_vault.id - - backup_rules { - rule_id = "rule-1" - backup_retention_days = 5 - - standard_schedule { - recurrence_type = "HOURLY" - hourly_frequency = 1 - time_zone = "UTC" - - backup_window { - start_hour_of_day = 0 - end_hour_of_day = 6 - } - } - } -} - diff --git a/mmv1/templates/terraform/examples/backup_dr_backup_vault_full.tf.tmpl b/mmv1/templates/terraform/examples/backup_dr_backup_vault_full.tf.tmpl index 4b51780d77d4..f527cf9fa2f9 100644 --- a/mmv1/templates/terraform/examples/backup_dr_backup_vault_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/backup_dr_backup_vault_full.tf.tmpl @@ -13,7 +13,6 @@ resource "google_backup_dr_backup_vault" "{{$.PrimaryResourceId}}" { } force_update = "true" access_restriction = "WITHIN_ORGANIZATION" - backup_retention_inheritance = "INHERIT_VAULT_RETENTION" ignore_inactive_datasources = "true" ignore_backup_plan_references = "true" allow_missing = "true" diff --git a/mmv1/templates/terraform/examples/backup_dr_service_config.tf.tmpl b/mmv1/templates/terraform/examples/backup_dr_service_config.tf.tmpl deleted file mode 100644 index 076400311210..000000000000 --- a/mmv1/templates/terraform/examples/backup_dr_service_config.tf.tmpl +++ /dev/null @@ -1,4 +0,0 @@ -resource "google_backup_dr_service_config" "bpa1" { - location = "us-central1" - resource_type= "compute.googleapis.com/Instance" -} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/base_configs/datasource_test_file.go.tmpl b/mmv1/templates/terraform/examples/base_configs/datasource_test_file.go.tmpl deleted file mode 100644 index 84421bf40166..000000000000 --- a/mmv1/templates/terraform/examples/base_configs/datasource_test_file.go.tmpl +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -// ---------------------------------------------------------------------------- -// -// *** AUTO GENERATED CODE *** Type: MMv1 *** -// -// ---------------------------------------------------------------------------- -// -// This file is automatically generated by Magic Modules and manual -// changes will be clobbered when the file is regenerated. -// -// Please read more about how to change this file in -// .github/CONTRIBUTING.md. -// -// ---------------------------------------------------------------------------- - -package {{ $.Res.PackageName }}_test - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - - "{{ $.ImportPath }}/acctest" - "{{ $.ImportPath }}/envvar" - "{{ $.ImportPath }}/tpgresource" -) - -{{ if $.Res.TestExamples }} -{{ $e := index $.Res.TestExamples 0 }} -func TestAccDataSource{{ $.Res.ResourceName }}_basic(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - {{- template "EnvVarContext" dict "TestEnvVars" $e.TestEnvVars "HasNewLine" false}} - {{- range $varKey, $varVal := $e.TestVarsOverrides }} - "{{$varKey}}": {{$varVal}}, - {{- end }} - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - {{- if $.Res.VersionedProvider $e.MinVersion }} - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), - {{- else }} - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - {{- end }} - {{- if $e.ExternalProviders }} - ExternalProviders: map[string]resource.ExternalProvider{ - {{- range $provider := $e.ExternalProviders }} - "{{$provider}}": {}, - {{- end }} - }, - {{- end }} - {{- if not $.Res.ExcludeDelete }} - CheckDestroy: testAccCheck{{ $.Res.ResourceName }}DestroyProducer(t), -{{- end }} - Steps: []resource.TestStep{ - { - Config: testAcc{{ $e.TestSlug $.Res.ProductMetadata.Name $.Res.Name }}DataSource(context), - Check: resource.ComposeTestCheckFunc( - {{- if gt (len ($.Res.IgnoreReadProperties $e)) 0 }} - acctest.CheckDataSourceStateMatchesResourceStateWithIgnores( - "data.{{ $e.ResourceType $.Res.TerraformName }}.default", - "{{ $e.ResourceType $.Res.TerraformName }}.{{ $e.PrimaryResourceId }}", - map[string]struct{}{ - {{- range $prop := $.Res.IgnoreReadProperties $e }} - "{{ $prop }}": {}, - {{- end }} - }, - ), - {{- else }} - acctest.CheckDataSourceStateMatchesResourceState("data.{{ $e.ResourceType $.Res.TerraformName }}.default", "{{ $e.ResourceType $.Res.TerraformName }}.{{ $e.PrimaryResourceId }}"), - {{- end }} - ), - }, - }, - }) -} - -func testAcc{{ $e.TestSlug $.Res.ProductMetadata.Name $.Res.Name }}DataSource(context map[string]interface{}) string { - return acctest.Nprintf(` -{{ $e.TestHCLText }} - -data "{{ $.Res.TerraformName }}" "default" { -{{- range $fieldName := $.Res.DatasourceRequiredFields }} - {{ $fieldName }} = {{ $e.ResourceType $.Res.TerraformName }}.{{ $e.PrimaryResourceId }}.{{ $fieldName }} -{{- end }} -}`, - context, - ) -} -{{ end }} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/base_configs/iam_test_file.go.tmpl b/mmv1/templates/terraform/examples/base_configs/iam_test_file.go.tmpl index 4bb4d9d3a9a6..d0060704b8f8 100644 --- a/mmv1/templates/terraform/examples/base_configs/iam_test_file.go.tmpl +++ b/mmv1/templates/terraform/examples/base_configs/iam_test_file.go.tmpl @@ -353,7 +353,7 @@ func TestAcc{{ $.ResourceName }}IamPolicyGenerated_withCondition(t *testing.T) { { Config: testAcc{{ $.ResourceName }}IamPolicy_withConditionGenerated(context), Check: resource.ComposeAggregateTestCheckFunc( - // TODO - uncomment once https://github.com/GoogleCloudPlatform/magic-modules/pull/6466 merged + // TODO(SarahFrench) - uncomment once https://github.com/GoogleCloudPlatform/magic-modules/pull/6466 merged // resource.TestCheckResourceAttr("data.google_iam_policy.foo", "policy_data", expectedPolicyData), resource.TestCheckResourceAttr("{{ $.IamTerraformName }}_policy.foo", "policy_data", expectedPolicyData), resource.TestCheckResourceAttrWith("data.google_iam_policy.foo", "policy_data", tpgresource.CheckGoogleIamPolicy), diff --git a/mmv1/templates/terraform/examples/beyondcorp_security_gateway_application_basic.tf.tmpl b/mmv1/templates/terraform/examples/beyondcorp_security_gateway_application_basic.tf.tmpl index 6fe3273fde60..264aadfd924d 100644 --- a/mmv1/templates/terraform/examples/beyondcorp_security_gateway_application_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/beyondcorp_security_gateway_application_basic.tf.tmpl @@ -4,8 +4,8 @@ resource "google_beyondcorp_security_gateway" "default" { hubs { region = "us-central1" } } -resource "google_beyondcorp_security_gateway_application" "{{$.PrimaryResourceId}}" { - security_gateway_id = google_beyondcorp_security_gateway.default.security_gateway_id +resource "google_beyondcorp_application" "{{$.PrimaryResourceId}}" { + security_gateways_id = google_beyondcorp_security_gateway.default.security_gateway_id application_id = "{{index $.Vars "application_name"}}" endpoint_matchers { hostname = "google.com" diff --git a/mmv1/templates/terraform/examples/beyondcorp_security_gateway_application_vpc.tf.tmpl b/mmv1/templates/terraform/examples/beyondcorp_security_gateway_application_vpc.tf.tmpl index 155c7ea37088..a54a36989bf5 100644 --- a/mmv1/templates/terraform/examples/beyondcorp_security_gateway_application_vpc.tf.tmpl +++ b/mmv1/templates/terraform/examples/beyondcorp_security_gateway_application_vpc.tf.tmpl @@ -6,8 +6,8 @@ resource "google_beyondcorp_security_gateway" "default" { hubs { region = "us-central1" } } -resource "google_beyondcorp_security_gateway_application" "{{$.PrimaryResourceId}}" { - security_gateway_id = google_beyondcorp_security_gateway.default.security_gateway_id +resource "google_beyondcorp_application" "{{$.PrimaryResourceId}}" { + security_gateways_id = google_beyondcorp_security_gateway.default.security_gateway_id application_id = "{{index $.Vars "application_name"}}" endpoint_matchers { hostname = "my-vm-service.com" diff --git a/mmv1/templates/terraform/examples/bigquery_analyticshub_data_exchange_log_linked_dataset_query_user.tf.tmpl b/mmv1/templates/terraform/examples/bigquery_analyticshub_data_exchange_log_linked_dataset_query_user.tf.tmpl deleted file mode 100644 index a1d37d294881..000000000000 --- a/mmv1/templates/terraform/examples/bigquery_analyticshub_data_exchange_log_linked_dataset_query_user.tf.tmpl +++ /dev/null @@ -1,7 +0,0 @@ -resource "google_bigquery_analytics_hub_data_exchange" "{{$.PrimaryResourceId}}" { - location = "US" - data_exchange_id = "{{index $.Vars "data_exchange_id"}}" - display_name = "{{index $.Vars "data_exchange_id"}}" - description = "{{index $.Vars "description"}}" - log_linked_dataset_query_user_email = true -} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/bigquery_analyticshub_dataexchange_subscription_basic.tf.tmpl b/mmv1/templates/terraform/examples/bigquery_analyticshub_dataexchange_subscription_basic.tf.tmpl deleted file mode 100644 index c50103d3cb2f..000000000000 --- a/mmv1/templates/terraform/examples/bigquery_analyticshub_dataexchange_subscription_basic.tf.tmpl +++ /dev/null @@ -1,94 +0,0 @@ -resource "google_bigquery_analytics_hub_data_exchange" "{{$.PrimaryResourceId}}" { - provider = google-beta - location = "us" - data_exchange_id = "{{index $.Vars "data_exchange_id"}}" - display_name = "{{index $.Vars "data_exchange_id"}}" - description = "Test Data Exchange" - sharing_environment_config { - dcr_exchange_config {} - } -} - -resource "google_bigquery_dataset" "{{$.PrimaryResourceId}}" { - provider = google-beta - dataset_id = "{{index $.Vars "listing_dataset_id"}}" - friendly_name = "{{index $.Vars "listing_dataset_id"}}" - description = "Dataset for Listing" - location = "us" -} - -resource "google_bigquery_table" "{{$.PrimaryResourceId}}" { - provider = google-beta - deletion_protection = false - table_id = "{{index $.Vars "listing_table_id"}}" - dataset_id = google_bigquery_dataset.{{$.PrimaryResourceId}}.dataset_id - schema = <Some example SSML XML - EOF - } - } - messages { - live_agent_handoff { - metadata = <Some example SSML XML - EOF - } - } - messages { - live_agent_handoff { - metadata = < MIIDpDCCAoygAwIBAgIGAX7/5qPhMA0GCSqGSIb3DQEBCwUAMIGSMQswCQYDVQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEUMBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi00NTg0MjExHDAaBgkqhkiG9w0BCQEWDWluZm9Ab2t0YS5jb20wHhcNMjIwMjE2MDAxOTEyWhcNMzIwMjE2MDAyMDEyWjCBkjELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNVBAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtNDU4NDIxMRwwGgYJKoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxrBl7GKz52cRpxF9xCsirnRuMxnhFBaUrsHqAQrLqWmdlpNYZTVg+T9iQ+aq/iE68L+BRZcZniKIvW58wqqS0ltXVvIkXuDSvnvnkkI5yMIVErR20K8jSOKQm1FmK+fgAJ4koshFiu9oLiqu0Ejc0DuL3/XRsb4RuxjktKTb1khgBBtb+7idEk0sFR0RPefAweXImJkDHDm7SxjDwGJUubbqpdTxasPr0W+AHI1VUzsUsTiHAoyb0XDkYqHfDzhj/ZdIEl4zHQ3bEZvlD984ztAnmX2SuFLLKfXeAAGHei8MMixJvwxYkkPeYZ/5h8WgBZPP4heS2CPjwYExt29L8QIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQARjJFz++a9Z5IQGFzsZMrX2EDR5ML4xxUiQkbhld1S1PljOLcYFARDmUC2YYHOueU4ee8Jid9nPGEUebV/4Jok+b+oQh+dWMgiWjSLI7h5q4OYZ3VJtdlVwgMFt2iz+/4yBKMUZ50g3Qgg36vE34us+eKitg759JgCNsibxn0qtJgSPm0sgP2L6yTaLnoEUbXBRxCwynTSkp9ZijZqEzbhN0e2dWv7Rx/nfpohpDP6vEiFImKFHpDSv3M/5de1ytQzPFrZBYt9WlzlYwE1aD9FHCxdd+rWgYMVVoRaRmndpV/Rq3QUuDuFJtaoX11bC7ExkOpg9KstZzA63i3VcfYv" - } -} - -resource "google_iam_workforce_pool_provider_key" "{{$.PrimaryResourceId}}" { - workforce_pool_id = google_iam_workforce_pool.pool.workforce_pool_id - location = google_iam_workforce_pool.pool.location - provider_id = google_iam_workforce_pool_provider.provider.provider_id - key_id = "{{index $.Vars "key_id"}}" - - key_data { - key_spec = "RSA_2048" - } - use = "ENCRYPTION" -} diff --git a/mmv1/templates/terraform/examples/iam_workload_identity_pool_full.tf.tmpl b/mmv1/templates/terraform/examples/iam_workload_identity_pool_full.tf.tmpl new file mode 100644 index 000000000000..a46cc332b260 --- /dev/null +++ b/mmv1/templates/terraform/examples/iam_workload_identity_pool_full.tf.tmpl @@ -0,0 +1,6 @@ +resource "google_iam_workload_identity_pool" "{{$.PrimaryResourceId}}" { + workload_identity_pool_id = "{{index $.Vars "workload_identity_pool_id"}}" + display_name = "Name of pool" + description = "Identity pool for automated test" + disabled = true +} diff --git a/mmv1/templates/terraform/examples/iam_workload_identity_pool_full_federation_only_mode.tf.tmpl b/mmv1/templates/terraform/examples/iam_workload_identity_pool_full_federation_only_mode.tf.tmpl deleted file mode 100644 index bccc8eec0b14..000000000000 --- a/mmv1/templates/terraform/examples/iam_workload_identity_pool_full_federation_only_mode.tf.tmpl +++ /dev/null @@ -1,9 +0,0 @@ -resource "google_iam_workload_identity_pool" "{{$.PrimaryResourceId}}" { - provider = google-beta - - workload_identity_pool_id = "{{index $.Vars "workload_identity_pool_id"}}" - display_name = "Name of the pool" - description = "Identity pool operates in FEDERATION_ONLY mode" - disabled = true - mode = "FEDERATION_ONLY" -} diff --git a/mmv1/templates/terraform/examples/iam_workload_identity_pool_full_trust_domain_mode.tf.tmpl b/mmv1/templates/terraform/examples/iam_workload_identity_pool_full_trust_domain_mode.tf.tmpl deleted file mode 100644 index 8480d68712da..000000000000 --- a/mmv1/templates/terraform/examples/iam_workload_identity_pool_full_trust_domain_mode.tf.tmpl +++ /dev/null @@ -1,38 +0,0 @@ -resource "google_iam_workload_identity_pool" "{{$.PrimaryResourceId}}" { - provider = google-beta - - workload_identity_pool_id = "{{index $.Vars "workload_identity_pool_id"}}" - display_name = "Name of the pool" - description = "Identity pool operates in TRUST_DOMAIN mode" - disabled = true - mode = "TRUST_DOMAIN" - inline_certificate_issuance_config { - ca_pools = { - "us-central1" : "projects/project-bar/locations/us-central1/caPools/ca-pool-bar" - "asia-east2" : "projects/project-foo/locations/asia-east2/caPools/ca-pool-foo" - } - lifetime = "86400s" - rotation_window_percentage = 50 - key_algorithm = "ECDSA_P256" - } - inline_trust_config { - additional_trust_bundles { - trust_domain = "example.com" - trust_anchors { - pem_certificate = file("test-fixtures/trust_anchor_1.pem") - } - trust_anchors { - pem_certificate = file("test-fixtures/trust_anchor_2.pem") - } - } - additional_trust_bundles { - trust_domain = "example.net" - trust_anchors { - pem_certificate = file("test-fixtures/trust_anchor_3.pem") - } - trust_anchors { - pem_certificate = file("test-fixtures/trust_anchor_4.pem") - } - } - } -} diff --git a/mmv1/templates/terraform/examples/iam_workload_identity_pool_managed_identity_basic.tf.tmpl b/mmv1/templates/terraform/examples/iam_workload_identity_pool_managed_identity_basic.tf.tmpl deleted file mode 100644 index 42803e035e1b..000000000000 --- a/mmv1/templates/terraform/examples/iam_workload_identity_pool_managed_identity_basic.tf.tmpl +++ /dev/null @@ -1,21 +0,0 @@ -resource "google_iam_workload_identity_pool" "pool" { - provider = google-beta - - workload_identity_pool_id = "{{index $.Vars "workload_identity_pool_id"}}" - mode = "TRUST_DOMAIN" -} - -resource "google_iam_workload_identity_pool_namespace" "ns" { - provider = google-beta - - workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id - workload_identity_pool_namespace_id = "{{index $.Vars "workload_identity_pool_namespace_id"}}" -} - -resource "google_iam_workload_identity_pool_managed_identity" "{{$.PrimaryResourceId}}" { - provider = google-beta - - workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id - workload_identity_pool_namespace_id = google_iam_workload_identity_pool_namespace.ns.workload_identity_pool_namespace_id - workload_identity_pool_managed_identity_id = "{{index $.Vars "workload_identity_pool_managed_identity_id"}}" -} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/iam_workload_identity_pool_managed_identity_full.tf.tmpl b/mmv1/templates/terraform/examples/iam_workload_identity_pool_managed_identity_full.tf.tmpl deleted file mode 100644 index 4943111b3592..000000000000 --- a/mmv1/templates/terraform/examples/iam_workload_identity_pool_managed_identity_full.tf.tmpl +++ /dev/null @@ -1,29 +0,0 @@ -resource "google_iam_workload_identity_pool" "pool" { - provider = google-beta - - workload_identity_pool_id = "{{index $.Vars "workload_identity_pool_id"}}" - mode = "TRUST_DOMAIN" -} - -resource "google_iam_workload_identity_pool_namespace" "ns" { - provider = google-beta - - workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id - workload_identity_pool_namespace_id = "{{index $.Vars "workload_identity_pool_namespace_id"}}" -} - -resource "google_iam_workload_identity_pool_managed_identity" "{{$.PrimaryResourceId}}" { - provider = google-beta - - workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id - workload_identity_pool_namespace_id = google_iam_workload_identity_pool_namespace.ns.workload_identity_pool_namespace_id - workload_identity_pool_managed_identity_id = "{{index $.Vars "workload_identity_pool_managed_identity_id"}}" - description = "Example Managed Identity in a Workload Identity Pool Namespace" - disabled = true - attestation_rules { - google_cloud_resource = "//compute.googleapis.com/projects/{{index $.TestEnvVars "project"}}/uid/zones/us-central1-a/instances/12345678" - } - attestation_rules { - google_cloud_resource = "//run.googleapis.com/projects/{{index $.TestEnvVars "project"}}/name/locations/us-east1/services/my-service" - } -} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/iam_workload_identity_pool_namespace_basic.tf.tmpl b/mmv1/templates/terraform/examples/iam_workload_identity_pool_namespace_basic.tf.tmpl deleted file mode 100644 index 76f9bdb23d27..000000000000 --- a/mmv1/templates/terraform/examples/iam_workload_identity_pool_namespace_basic.tf.tmpl +++ /dev/null @@ -1,13 +0,0 @@ -resource "google_iam_workload_identity_pool" "pool" { - provider = google-beta - - workload_identity_pool_id = "{{index $.Vars "workload_identity_pool_id"}}" - mode = "TRUST_DOMAIN" -} - -resource "google_iam_workload_identity_pool_namespace" "{{$.PrimaryResourceId}}" { - provider = google-beta - - workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id - workload_identity_pool_namespace_id = "{{index $.Vars "workload_identity_pool_namespace_id"}}" -} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/iam_workload_identity_pool_namespace_full.tf.tmpl b/mmv1/templates/terraform/examples/iam_workload_identity_pool_namespace_full.tf.tmpl deleted file mode 100644 index 6a8760508a29..000000000000 --- a/mmv1/templates/terraform/examples/iam_workload_identity_pool_namespace_full.tf.tmpl +++ /dev/null @@ -1,15 +0,0 @@ -resource "google_iam_workload_identity_pool" "pool" { - provider = google-beta - - workload_identity_pool_id = "{{index $.Vars "workload_identity_pool_id"}}" - mode = "TRUST_DOMAIN" -} - -resource "google_iam_workload_identity_pool_namespace" "{{$.PrimaryResourceId}}" { - provider = google-beta - - workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id - workload_identity_pool_namespace_id = "{{index $.Vars "workload_identity_pool_namespace_id"}}" - description = "Example Namespace in a Workload Identity Pool" - disabled = true -} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/instance_basic.tf.tmpl b/mmv1/templates/terraform/examples/instance_basic.tf.tmpl index 10f12c1b45a4..ecbc1172ad0c 100644 --- a/mmv1/templates/terraform/examples/instance_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/instance_basic.tf.tmpl @@ -1,6 +1,6 @@ resource "google_compute_instance" "{{$.PrimaryResourceId}}" { name = "{{index $.Vars "instance_name"}}" - zone = "us-central1-a" + zone = "{{index $.Vars "zone_name"}}" machine_type = "e2-medium" boot_disk { diff --git a/mmv1/templates/terraform/examples/interconnect_attachment_group_basic.tf.tmpl b/mmv1/templates/terraform/examples/interconnect_attachment_group_basic.tf.tmpl deleted file mode 100644 index 0a1b83d2f043..000000000000 --- a/mmv1/templates/terraform/examples/interconnect_attachment_group_basic.tf.tmpl +++ /dev/null @@ -1,6 +0,0 @@ -resource "google_compute_interconnect_attachment_group" "{{$.PrimaryResourceId}}" { - name = "{{index $.Vars "interconnect_attachment_group_name"}}" - intent { - availability_sla = "NO_SLA" - } -} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/interconnect_group_basic.tf.tmpl b/mmv1/templates/terraform/examples/interconnect_group_basic.tf.tmpl deleted file mode 100644 index 3ca679114a46..000000000000 --- a/mmv1/templates/terraform/examples/interconnect_group_basic.tf.tmpl +++ /dev/null @@ -1,6 +0,0 @@ -resource "google_compute_interconnect_group" "{{$.PrimaryResourceId}}" { - name = "{{index $.Vars "interconnect_group_name"}}" - intent { - topology_capability = "NO_SLA" - } -} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/kms_autokey_config_all.tf.tmpl b/mmv1/templates/terraform/examples/kms_autokey_config_all.tf.tmpl index fb2d32eb7aa9..aa3ad3661cc6 100644 --- a/mmv1/templates/terraform/examples/kms_autokey_config_all.tf.tmpl +++ b/mmv1/templates/terraform/examples/kms_autokey_config_all.tf.tmpl @@ -22,6 +22,7 @@ resource "google_project_service" "kms_api_service" { provider = google-beta service = "cloudkms.googleapis.com" project = google_project.key_project.project_id + disable_on_destroy = false disable_dependent_services = true depends_on = [google_project.key_project] } diff --git a/mmv1/templates/terraform/examples/kms_key_handle_basic.tf.tmpl b/mmv1/templates/terraform/examples/kms_key_handle_basic.tf.tmpl index 20988805fc79..b67930b88d11 100644 --- a/mmv1/templates/terraform/examples/kms_key_handle_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/kms_key_handle_basic.tf.tmpl @@ -33,6 +33,7 @@ resource "google_project_service" "kms_api_service" { provider = google-beta service = "cloudkms.googleapis.com" project = google_project.key_project.project_id + disable_on_destroy = false disable_dependent_services = true depends_on = [google_project.key_project] } diff --git a/mmv1/templates/terraform/examples/lustre_instance_basic.tf.tmpl b/mmv1/templates/terraform/examples/lustre_instance_basic.tf.tmpl index d9b68469417c..3381a3b7a57b 100644 --- a/mmv1/templates/terraform/examples/lustre_instance_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/lustre_instance_basic.tf.tmpl @@ -1,12 +1,11 @@ resource "google_lustre_instance" "{{$.PrimaryResourceId}}" { - instance_id = "{{index $.Vars "name"}}" - location = "us-central1-a" - description = "test lustre instance" - filesystem = "testfs" - capacity_gib = 18000 - network = data.google_compute_network.lustre-network.id - per_unit_storage_throughput = 1000 - labels = { + instance_id = "{{index $.Vars "name"}}" + location = "us-central1-a" + description = "test lustre instance" + filesystem = "testfs" + capacity_gib = 18000 + network = data.google_compute_network.lustre-network.id + labels = { test = "value" } timeouts { diff --git a/mmv1/templates/terraform/examples/managedkafka_acl_basic.tf.tmpl b/mmv1/templates/terraform/examples/managedkafka_acl_basic.tf.tmpl deleted file mode 100644 index 2d38dcab8ba4..000000000000 --- a/mmv1/templates/terraform/examples/managedkafka_acl_basic.tf.tmpl +++ /dev/null @@ -1,37 +0,0 @@ -resource "google_managed_kafka_cluster" "cluster" { - cluster_id = "{{index $.Vars "cluster_id"}}" - location = "us-central1" - capacity_config { - vcpu_count = 3 - memory_bytes = 3221225472 - } - gcp_config { - access_config { - network_configs { - subnet = "projects/${data.google_project.project.number}/regions/us-central1/subnetworks/default" - } - } - } -} - -resource "google_managed_kafka_acl" "{{$.PrimaryResourceId}}" { - acl_id = "{{index $.Vars "acl_id"}}" - cluster = google_managed_kafka_cluster.cluster.cluster_id - location = "us-central1" - acl_entries { - principal = "User:admin@my-project.iam.gserviceaccount.com" - permission_type = "ALLOW" - operation = "ALL" - host = "*" - } - acl_entries { - principal = "User:producer-client@my-project.iam.gserviceaccount.com" - permission_type = "ALLOW" - operation = "WRITE" - host = "*" - } -} - -data "google_project" "project" { -} - diff --git a/mmv1/templates/terraform/examples/managedkafka_cluster_mtls.tf.tmpl b/mmv1/templates/terraform/examples/managedkafka_cluster_mtls.tf.tmpl deleted file mode 100644 index b40d254da964..000000000000 --- a/mmv1/templates/terraform/examples/managedkafka_cluster_mtls.tf.tmpl +++ /dev/null @@ -1,36 +0,0 @@ -resource "google_managed_kafka_cluster" "{{$.PrimaryResourceId}}" { - cluster_id = "{{index $.Vars "cluster_id"}}" - location = "us-central1" - capacity_config { - vcpu_count = 3 - memory_bytes = 3221225472 - } - gcp_config { - access_config { - network_configs { - subnet = "projects/${data.google_project.project.number}/regions/us-central1/subnetworks/default" - } - } - } - tls_config { - trust_config { - cas_configs { - ca_pool = google_privateca_ca_pool.ca_pool.id - } - } - ssl_principal_mapping_rules = "RULE:pattern/replacement/L,DEFAULT" - } -} - -resource "google_privateca_ca_pool" "ca_pool" { - name = "{{index $.Vars "ca_pool_id"}}" - location = "us-central1" - tier = "ENTERPRISE" - publishing_options { - publish_ca_cert = true - publish_crl = true - } -} - -data "google_project" "project" { -} diff --git a/mmv1/templates/terraform/examples/memorystore_instance_basic.tf.tmpl b/mmv1/templates/terraform/examples/memorystore_instance_basic.tf.tmpl index 27a7491a5481..7204c9fc9f0e 100644 --- a/mmv1/templates/terraform/examples/memorystore_instance_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/memorystore_instance_basic.tf.tmpl @@ -1,7 +1,7 @@ resource "google_memorystore_instance" "{{$.PrimaryResourceId}}" { instance_id = "{{index $.Vars "instance_name"}}" - shard_count = 1 - desired_auto_created_endpoints { + shard_count = 3 + desired_psc_auto_connections { network = google_compute_network.producer_net.id project_id = data.google_project.project.project_id } diff --git a/mmv1/templates/terraform/examples/memorystore_instance_desired_user_and_auto_created_endpoints.tf.tmpl b/mmv1/templates/terraform/examples/memorystore_instance_desired_user_and_auto_created_endpoints.tf.tmpl index 8533b2b5bb8c..ae8016449e57 100644 --- a/mmv1/templates/terraform/examples/memorystore_instance_desired_user_and_auto_created_endpoints.tf.tmpl +++ b/mmv1/templates/terraform/examples/memorystore_instance_desired_user_and_auto_created_endpoints.tf.tmpl @@ -73,7 +73,7 @@ resource "google_compute_network" "network2" { resource "google_memorystore_instance" "{{$.PrimaryResourceId}}" { instance_id = "{{index $.Vars "instance_name"}}" shard_count = 1 - desired_auto_created_endpoints { + desired_psc_auto_connections { network = google_compute_network.network1.id project_id = data.google_project.project.project_id } diff --git a/mmv1/templates/terraform/examples/memorystore_instance_full.tf.tmpl b/mmv1/templates/terraform/examples/memorystore_instance_full.tf.tmpl index 742450575aaf..caa6555c8ab3 100644 --- a/mmv1/templates/terraform/examples/memorystore_instance_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/memorystore_instance_full.tf.tmpl @@ -1,39 +1,38 @@ resource "google_memorystore_instance" "{{$.PrimaryResourceId}}" { - instance_id = "{{index $.Vars "instance_name"}}" - shard_count = 1 - desired_auto_created_endpoints { - network = google_compute_network.producer_net.id - project_id = data.google_project.project.project_id - } - location = "us-central1" - replica_count = 1 - node_type = "SHARED_CORE_NANO" - transit_encryption_mode = "TRANSIT_ENCRYPTION_DISABLED" - authorization_mode = "AUTH_DISABLED" - kms_key = "{{index $.Vars "kms_key_name"}}" - engine_configs = { - maxmemory-policy = "volatile-ttl" + instance_id = "{{index $.Vars "instance_name"}}" + shard_count = 3 + desired_psc_auto_connections { + network = google_compute_network.producer_net.id + project_id = data.google_project.project.project_id + } + location = "us-central1" + replica_count = 2 + node_type = "SHARED_CORE_NANO" + transit_encryption_mode = "TRANSIT_ENCRYPTION_DISABLED" + authorization_mode = "AUTH_DISABLED" + engine_configs = { + maxmemory-policy = "volatile-ttl" } zone_distribution_config { - mode = "SINGLE_ZONE" - zone = "us-central1-b" + mode = "SINGLE_ZONE" + zone = "us-central1-b" } maintenance_policy { weekly_maintenance_window { - day = "MONDAY" + day = "MONDAY" start_time { - hours = 1 - minutes = 0 - seconds = 0 - nanos = 0 + hours = 1 + minutes = 0 + seconds = 0 + nanos = 0 } } } engine_version = "VALKEY_7_2" deletion_protection_enabled = false - mode = "CLUSTER" + mode = "CLUSTER" persistence_config { - mode = "RDB" + mode = "RDB" rdb_config { rdb_snapshot_period = "ONE_HOUR" rdb_snapshot_start_time = "2024-10-02T15:01:23Z" diff --git a/mmv1/templates/terraform/examples/memorystore_instance_persistence_aof.tf.tmpl b/mmv1/templates/terraform/examples/memorystore_instance_persistence_aof.tf.tmpl index c3bb54bc7bc4..d4ec704a76dd 100644 --- a/mmv1/templates/terraform/examples/memorystore_instance_persistence_aof.tf.tmpl +++ b/mmv1/templates/terraform/examples/memorystore_instance_persistence_aof.tf.tmpl @@ -1,7 +1,7 @@ resource "google_memorystore_instance" "{{$.PrimaryResourceId}}" { instance_id = "{{index $.Vars "instance_name"}}" - shard_count = 1 - desired_auto_created_endpoints { + shard_count = 3 + desired_psc_auto_connections { network = google_compute_network.producer_net.id project_id = data.google_project.project.project_id } diff --git a/mmv1/templates/terraform/examples/memorystore_instance_secondary_instance.tf.tmpl b/mmv1/templates/terraform/examples/memorystore_instance_secondary_instance.tf.tmpl index a40e5c126383..c6b950035efa 100644 --- a/mmv1/templates/terraform/examples/memorystore_instance_secondary_instance.tf.tmpl +++ b/mmv1/templates/terraform/examples/memorystore_instance_secondary_instance.tf.tmpl @@ -2,7 +2,7 @@ resource "google_memorystore_instance" "primary_instance" { instance_id = "{{index $.Vars "primary_instance_name"}}" shard_count = 1 - desired_auto_created_endpoints { + desired_psc_auto_connections { network = google_compute_network.primary_producer_net.id project_id = data.google_project.project.project_id } @@ -63,7 +63,7 @@ resource "google_compute_network" "primary_producer_net" { resource "google_memorystore_instance" "secondary_instance" { instance_id = "{{index $.Vars "secondary_instance_name"}}" shard_count = 1 - desired_auto_created_endpoints { + desired_psc_auto_connections { network = google_compute_network.secondary_producer_net.id project_id = data.google_project.project.project_id } diff --git a/mmv1/templates/terraform/examples/memorystore_instance_standalone_full.tf.tmpl b/mmv1/templates/terraform/examples/memorystore_instance_standalone_full.tf.tmpl index 4fa5b0fa76f4..8c5b5853e982 100644 --- a/mmv1/templates/terraform/examples/memorystore_instance_standalone_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/memorystore_instance_standalone_full.tf.tmpl @@ -2,12 +2,12 @@ resource "google_memorystore_instance" "{{$.PrimaryResourceId}}" { provider = google-beta instance_id = "{{index $.Vars "instance_name"}}" shard_count = 1 - desired_auto_created_endpoints { + desired_psc_auto_connections { network = google_compute_network.producer_net.id project_id = data.google_project.project.project_id } location = "us-central1" - replica_count = 1 + replica_count = 2 node_type = "SHARED_CORE_NANO" transit_encryption_mode = "TRANSIT_ENCRYPTION_DISABLED" authorization_mode = "AUTH_DISABLED" diff --git a/mmv1/templates/terraform/examples/modelarmor_floorsetting_ai_platform_metadata.tf.tmpl b/mmv1/templates/terraform/examples/modelarmor_floorsetting_ai_platform_metadata.tf.tmpl deleted file mode 100644 index fd46a2f384aa..000000000000 --- a/mmv1/templates/terraform/examples/modelarmor_floorsetting_ai_platform_metadata.tf.tmpl +++ /dev/null @@ -1,21 +0,0 @@ -resource "google_model_armor_floorsetting" "floorsetting-integrated-metadata" { - location = "global" - parent = "projects/{{index $.TestEnvVars "project_id"}}" - - filter_config { - - } - - enable_floor_setting_enforcement = false - - ai_platform_floor_setting { - inspect_only = true - enable_cloud_logging = true - } - - floor_setting_metadata { - multi_language_detection { - enable_multi_language_detection = false - } - } -} diff --git a/mmv1/templates/terraform/examples/modelarmor_floorsetting_basic.tf.tmpl b/mmv1/templates/terraform/examples/modelarmor_floorsetting_basic.tf.tmpl deleted file mode 100644 index cfdef5d108d2..000000000000 --- a/mmv1/templates/terraform/examples/modelarmor_floorsetting_basic.tf.tmpl +++ /dev/null @@ -1,8 +0,0 @@ -resource "google_model_armor_floorsetting" "floorsetting-basic" { - parent = "projects/{{index $.TestEnvVars "project_id"}}" - location = "global" - - filter_config { - - } -} diff --git a/mmv1/templates/terraform/examples/modelarmor_floorsetting_filter_config.tf.tmpl b/mmv1/templates/terraform/examples/modelarmor_floorsetting_filter_config.tf.tmpl deleted file mode 100644 index 3e352a661994..000000000000 --- a/mmv1/templates/terraform/examples/modelarmor_floorsetting_filter_config.tf.tmpl +++ /dev/null @@ -1,27 +0,0 @@ -resource "google_model_armor_floorsetting" "floorsetting-filter-config" { - location = "global" - parent = "project/{{index $.TestEnvVars "project_id"}}" - - filter_config { - rai_settings { - rai_filters { - filter_type = "DANGEROUS" - confidence_level = "MEDIUM_AND_ABOVE" - } - } - sdp_settings { - basic_config { - filter_enforcement = "ENABLED" - } - } - pi_and_jailbreak_filter_settings { - filter_enforcement = "ENABLED" - confidence_level = "HIGH" - } - malicious_uri_filter_settings { - filter_enforcement = "ENABLED" - } - } - - enable_floor_setting_enforcement = true -} diff --git a/mmv1/templates/terraform/examples/modelarmor_template_basic.tf.tmpl b/mmv1/templates/terraform/examples/modelarmor_template_basic.tf.tmpl deleted file mode 100644 index 24e22a5df56d..000000000000 --- a/mmv1/templates/terraform/examples/modelarmor_template_basic.tf.tmpl +++ /dev/null @@ -1,12 +0,0 @@ -resource "google_model_armor_template" "template-basic" { - location = "{{.Vars.location}}" - template_id = "{{.Vars.templateId}}" - - filter_config { - - } - - template_metadata { - - } -} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/modelarmor_template_filter_config.tf.tmpl b/mmv1/templates/terraform/examples/modelarmor_template_filter_config.tf.tmpl deleted file mode 100644 index 5e3940918a6f..000000000000 --- a/mmv1/templates/terraform/examples/modelarmor_template_filter_config.tf.tmpl +++ /dev/null @@ -1,30 +0,0 @@ -resource "google_model_armor_template" "template-filter-config" { - location = "{{.Vars.location}}" - template_id = "{{.Vars.templateId}}" - - filter_config { - rai_settings { - rai_filters { - filter_type = "{{.Vars.filter_config_rai_settings_rai_filters_0_filter_type}}" - confidence_level = "{{.Vars.filter_config_rai_settings_rai_filters_0_confidence_level}}" - } - } - sdp_settings { - basic_config { - filter_enforcement = "{{.Vars.filter_config_sdp_settings_basic_config_filter_enforcement}}" - } - } - pi_and_jailbreak_filter_settings { - filter_enforcement = "{{.Vars.filter_config_pi_and_jailbreak_filter_settings_filter_enforcement}}" - confidence_level = "{{.Vars.filter_config_pi_and_jailbreak_filter_settings_confidence_level}}" - } - malicious_uri_filter_settings { - filter_enforcement = "{{.Vars.filter_config_malicious_uri_filter_settings_filter_enforcement}}" - } - } - template_metadata { - multi_language_detection { - enable_multi_language_detection = {{.Vars.template_metadata_multi_language_detection_enable_multi_language_detection}} - } - } -} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/modelarmor_template_label.tf.tmpl b/mmv1/templates/terraform/examples/modelarmor_template_label.tf.tmpl deleted file mode 100644 index a826014251bb..000000000000 --- a/mmv1/templates/terraform/examples/modelarmor_template_label.tf.tmpl +++ /dev/null @@ -1,28 +0,0 @@ -resource "google_model_armor_template" "template-label-advanced-config" { - location = "{{.Vars.location}}" - template_id = "{{.Vars.templateId}}" - - labels = { - "test-label" = "{{.Vars.label_test_label}}" - } - - filter_config { - rai_settings { - rai_filters { - filter_type = "{{.Vars.filter_config_rai_settings_rai_filters_0_filter_type}}" - confidence_level = "{{.Vars.filter_config_rai_settings_rai_filters_0_confidence_level}}" - } - } - sdp_settings { - advanced_config { - inspect_template = "{{.Vars.filter_config_sdp_settings_advanced_config_inspect_template}}" - deidentify_template = "{{.Vars.filter_config_sdp_settings_advanced_config_deidentify_template}}" - } - } - } - template_metadata { - multi_language_detection { - enable_multi_language_detection = {{.Vars.template_metadata_multi_language_detection_enable_multi_language_detection}} - } - } -} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/modelarmor_template_template_metadata.tf.tmpl b/mmv1/templates/terraform/examples/modelarmor_template_template_metadata.tf.tmpl deleted file mode 100644 index 4942290448d6..000000000000 --- a/mmv1/templates/terraform/examples/modelarmor_template_template_metadata.tf.tmpl +++ /dev/null @@ -1,26 +0,0 @@ -resource "google_model_armor_template" "template-template-metadata" { - location = "{{.Vars.location}}" - template_id = "{{.Vars.templateId}}" - - filter_config { - rai_settings { - rai_filters { - filter_type = "{{.Vars.filter_config_rai_settings_rai_filters_0_filter_type}}" - confidence_level = "{{.Vars.filter_config_rai_settings_rai_filters_0_confidence_level}}" - } - } - } - template_metadata { - custom_llm_response_safety_error_message = "{{.Vars.template_metadata_custom_llm_response_safety_error_message}}" - log_sanitize_operations = {{.Vars.template_metadata_log_sanitize_operations}} - log_template_operations = {{.Vars.template_metadata_log_template_operations}} - multi_language_detection { - enable_multi_language_detection = {{.Vars.template_metadata_multi_language_detection_enable_multi_language_detection}} - } - ignore_partial_invocation_failures = {{.Vars.template_metadata_ignore_partial_invocation_failures}} - custom_prompt_safety_error_code = {{.Vars.template_metadata_custom_prompt_safety_error_code}} - custom_prompt_safety_error_message = "{{.Vars.template_metadata_custom_prompt_safety_error_message}}" - custom_llm_response_safety_error_code = {{.Vars.template_metadata_custom_llm_response_safety_error_code}} - enforcement_type = "{{.Vars.template_metadata_enforcement_type}}" - } -} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/network_connectivity_internal_ranges_allocation_algoritms.tf.tmpl b/mmv1/templates/terraform/examples/network_connectivity_internal_ranges_allocation_algoritms.tf.tmpl deleted file mode 100644 index d5229d6fc50a..000000000000 --- a/mmv1/templates/terraform/examples/network_connectivity_internal_ranges_allocation_algoritms.tf.tmpl +++ /dev/null @@ -1,18 +0,0 @@ -resource "google_network_connectivity_internal_range" "{{$.PrimaryResourceId}}" { - name = "{{index $.Vars "internal_range_name"}}" - network = google_compute_network.default.id - usage = "FOR_VPC" - peering = "FOR_SELF" - prefix_length = 24 - target_cidr_range = [ - "192.16.0.0/16" - ] - allocation_options { - allocation_strategy = "FIRST_SMALLEST_FITTING" - } -} - -resource "google_compute_network" "default" { - name = "{{index $.Vars "network_name"}}" - auto_create_subnetworks = false -} diff --git a/mmv1/templates/terraform/examples/network_connectivity_internal_ranges_allocation_algoritms_random_first_n.tf.tmpl b/mmv1/templates/terraform/examples/network_connectivity_internal_ranges_allocation_algoritms_random_first_n.tf.tmpl deleted file mode 100644 index 65312d4ad6f6..000000000000 --- a/mmv1/templates/terraform/examples/network_connectivity_internal_ranges_allocation_algoritms_random_first_n.tf.tmpl +++ /dev/null @@ -1,19 +0,0 @@ -resource "google_network_connectivity_internal_range" "{{$.PrimaryResourceId}}" { - name = "{{index $.Vars "internal_range_name"}}" - network = google_compute_network.default.id - usage = "FOR_VPC" - peering = "FOR_SELF" - prefix_length = 24 - target_cidr_range = [ - "192.16.0.0/16" - ] - allocation_options { - allocation_strategy = "RANDOM_FIRST_N_AVAILABLE" - first_available_ranges_lookup_size = 20 - } -} - -resource "google_compute_network" "default" { - name = "{{index $.Vars "network_name"}}" - auto_create_subnetworks = false -} diff --git a/mmv1/templates/terraform/examples/network_connectivity_regional_endpoint_global_access.tf.tmpl b/mmv1/templates/terraform/examples/network_connectivity_regional_endpoint_global_access.tf.tmpl index d9e9d78f916f..0e9d1a71b7e9 100644 --- a/mmv1/templates/terraform/examples/network_connectivity_regional_endpoint_global_access.tf.tmpl +++ b/mmv1/templates/terraform/examples/network_connectivity_regional_endpoint_global_access.tf.tmpl @@ -13,7 +13,7 @@ resource "google_compute_subnetwork" "my_subnetwork" { resource "google_network_connectivity_regional_endpoint" "{{$.PrimaryResourceId}}" { name = "{{index $.Vars "rep_name"}}" location = "us-central1" - target_google_api = "storage.us-central1.rep.googleapis.com" + target_google_api = "storage.us-central1.p.rep.googleapis.com" access_type = "GLOBAL" address = "192.168.0.4" network = google_compute_network.my_network.id diff --git a/mmv1/templates/terraform/examples/network_connectivity_regional_endpoint_regional_access.tf.tmpl b/mmv1/templates/terraform/examples/network_connectivity_regional_endpoint_regional_access.tf.tmpl index d9c6b4200a38..a39621ba2ccb 100644 --- a/mmv1/templates/terraform/examples/network_connectivity_regional_endpoint_regional_access.tf.tmpl +++ b/mmv1/templates/terraform/examples/network_connectivity_regional_endpoint_regional_access.tf.tmpl @@ -13,11 +13,11 @@ resource "google_compute_subnetwork" "my_subnetwork" { resource "google_network_connectivity_regional_endpoint" "{{$.PrimaryResourceId}}" { name = "{{index $.Vars "rep_name"}}" location = "us-central1" - target_google_api = "storage.us-central1.rep.googleapis.com" + target_google_api = "storage.us-central1.p.rep.googleapis.com" access_type = "REGIONAL" address = "192.168.0.5" network = google_compute_network.my_network.id subnetwork = google_compute_subnetwork.my_subnetwork.id - description = "My RegionalEndpoint targeting Google API storage.us-central1.rep.googleapis.com" + description = "My RegionalEndpoint targeting Google API storage.us-central1.p.rep.googleapis.com" labels = {env = "default"} } diff --git a/mmv1/templates/terraform/examples/network_management_vpc_flow_logs_config_interconnect_full.tf.tmpl b/mmv1/templates/terraform/examples/network_management_vpc_flow_logs_config_interconnect_full.tf.tmpl new file mode 100644 index 000000000000..bbe7a268db3c --- /dev/null +++ b/mmv1/templates/terraform/examples/network_management_vpc_flow_logs_config_interconnect_full.tf.tmpl @@ -0,0 +1,33 @@ +data "google_project" "project" { +} + +resource "google_network_management_vpc_flow_logs_config" "{{$.PrimaryResourceId}}" { + vpc_flow_logs_config_id = "{{index $.Vars "vpc_flow_logs_config_id"}}" + location = "global" + interconnect_attachment = "projects/${data.google_project.project.number}/regions/us-east4/interconnectAttachments/${google_compute_interconnect_attachment.attachment.name}" + state = "ENABLED" + aggregation_interval = "INTERVAL_5_SEC" + description = "VPC Flow Logs over a VPN Gateway." + flow_sampling = 0.5 + metadata = "INCLUDE_ALL_METADATA" +} + +resource "google_compute_network" "network" { + name = "{{index $.Vars "network_name"}}" +} + +resource "google_compute_router" "router" { + name = "{{index $.Vars "router_name"}}" + network = google_compute_network.network.name + bgp { + asn = 16550 + } +} + +resource "google_compute_interconnect_attachment" "attachment" { + name = "{{index $.Vars "interconnect_attachment_name"}}" + edge_availability_domain = "AVAILABILITY_DOMAIN_1" + type = "PARTNER" + router = google_compute_router.router.id + mtu = 1500 +} diff --git a/mmv1/templates/terraform/examples/network_management_vpc_flow_logs_config_network_basic.tf.tmpl b/mmv1/templates/terraform/examples/network_management_vpc_flow_logs_config_network_basic.tf.tmpl deleted file mode 100644 index 7fa3502839b5..000000000000 --- a/mmv1/templates/terraform/examples/network_management_vpc_flow_logs_config_network_basic.tf.tmpl +++ /dev/null @@ -1,15 +0,0 @@ -data "google_project" "project" { - provider = google-beta -} - -resource "google_network_management_vpc_flow_logs_config" "{{$.PrimaryResourceId}}" { - provider = google-beta - vpc_flow_logs_config_id = "{{index $.Vars "vpc_flow_logs_config_id"}}" - location = "global" - network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.network.name}" -} - -resource "google_compute_network" "network" { - provider = google-beta - name = "{{index $.Vars "network_name"}}" -} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/network_management_vpc_flow_logs_config_subnet_basic.tf.tmpl b/mmv1/templates/terraform/examples/network_management_vpc_flow_logs_config_subnet_basic.tf.tmpl deleted file mode 100644 index 1e59a36cbfe7..000000000000 --- a/mmv1/templates/terraform/examples/network_management_vpc_flow_logs_config_subnet_basic.tf.tmpl +++ /dev/null @@ -1,24 +0,0 @@ -data "google_project" "project" { - provider = google-beta -} - -resource "google_network_management_vpc_flow_logs_config" "{{$.PrimaryResourceId}}" { - provider = google-beta - vpc_flow_logs_config_id = "{{index $.Vars "vpc_flow_logs_config_id"}}" - location = "global" - subnet = "projects/${data.google_project.project.number}/regions/us-central1/subnetworks/${google_compute_subnetwork.subnetwork.name}" -} - -resource "google_compute_network" "network" { - provider = google-beta - name = "{{index $.Vars "network_name"}}" - auto_create_subnetworks = false -} - -resource "google_compute_subnetwork" "subnetwork" { - provider = google-beta - name = "{{index $.Vars "subnetwork_name"}}" - ip_cidr_range = "10.2.0.0/16" - region = "us-central1" - network = google_compute_network.network.id -} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/network_management_vpc_flow_logs_config_vpn_full.tf.tmpl b/mmv1/templates/terraform/examples/network_management_vpc_flow_logs_config_vpn_full.tf.tmpl new file mode 100644 index 000000000000..d3a8d777d2af --- /dev/null +++ b/mmv1/templates/terraform/examples/network_management_vpc_flow_logs_config_vpn_full.tf.tmpl @@ -0,0 +1,70 @@ +data "google_project" "project" { +} + +resource "google_network_management_vpc_flow_logs_config" "{{$.PrimaryResourceId}}" { + vpc_flow_logs_config_id = "{{index $.Vars "vpc_flow_logs_config_id"}}" + location = "global" + vpn_tunnel = "projects/${data.google_project.project.number}/regions/us-central1/vpnTunnels/${google_compute_vpn_tunnel.tunnel.name}" + state = "ENABLED" + aggregation_interval = "INTERVAL_5_SEC" + description = "VPC Flow Logs over a VPN Gateway." + flow_sampling = 0.5 + metadata = "INCLUDE_ALL_METADATA" +} + +resource "google_compute_vpn_tunnel" "tunnel" { + name = "{{index $.Vars "vpn_tunnel_name"}}" + peer_ip = "15.0.0.120" + shared_secret = "a secret message" + target_vpn_gateway = google_compute_vpn_gateway.target_gateway.id + + depends_on = [ + google_compute_forwarding_rule.fr_esp, + google_compute_forwarding_rule.fr_udp500, + google_compute_forwarding_rule.fr_udp4500, + ] +} + +resource "google_compute_vpn_gateway" "target_gateway" { + name = "{{index $.Vars "target_vpn_gateway_name"}}" + network = google_compute_network.network.id +} + +resource "google_compute_network" "network" { + name = "{{index $.Vars "network_name"}}" +} + +resource "google_compute_address" "vpn_static_ip" { + name = "{{index $.Vars "address_name"}}" +} + +resource "google_compute_forwarding_rule" "fr_esp" { + name = "{{index $.Vars "esp_forwarding_rule_name"}}" + ip_protocol = "ESP" + ip_address = google_compute_address.vpn_static_ip.address + target = google_compute_vpn_gateway.target_gateway.id +} + +resource "google_compute_forwarding_rule" "fr_udp500" { + name = "{{index $.Vars "udp500_forwarding_rule_name"}}" + ip_protocol = "UDP" + port_range = "500" + ip_address = google_compute_address.vpn_static_ip.address + target = google_compute_vpn_gateway.target_gateway.id +} + +resource "google_compute_forwarding_rule" "fr_udp4500" { + name = "{{index $.Vars "udp4500_forwarding_rule_name"}}" + ip_protocol = "UDP" + port_range = "4500" + ip_address = google_compute_address.vpn_static_ip.address + target = google_compute_vpn_gateway.target_gateway.id +} + +resource "google_compute_route" "route" { + name = "{{index $.Vars "route_name"}}" + network = google_compute_network.network.name + dest_range = "15.0.0.0/24" + priority = 1000 + next_hop_vpn_tunnel = google_compute_vpn_tunnel.tunnel.id +} diff --git a/mmv1/templates/terraform/examples/network_peering_routes_config_gke.tf.tmpl b/mmv1/templates/terraform/examples/network_peering_routes_config_gke.tf.tmpl new file mode 100644 index 000000000000..787b79ac44f6 --- /dev/null +++ b/mmv1/templates/terraform/examples/network_peering_routes_config_gke.tf.tmpl @@ -0,0 +1,55 @@ +resource "google_compute_network_peering_routes_config" "{{$.PrimaryResourceId}}" { + peering = google_container_cluster.private_cluster.private_cluster_config[0].peering_name + network = google_compute_network.container_network.name + + import_custom_routes = true + export_custom_routes = true + import_subnet_routes_with_public_ip = true + export_subnet_routes_with_public_ip = true +} + +resource "google_compute_network" "container_network" { + name = "{{index $.Vars "network_name"}}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "container_subnetwork" { + name = "{{index $.Vars "subnetwork_name"}}" + region = "us-central1" + network = google_compute_network.container_network.name + ip_cidr_range = "10.0.36.0/24" + private_ip_google_access = true + + secondary_ip_range { + range_name = "pod" + ip_cidr_range = "10.0.0.0/19" + } + + secondary_ip_range { + range_name = "svc" + ip_cidr_range = "10.0.32.0/22" + } +} + +resource "google_container_cluster" "private_cluster" { + name = "{{index $.Vars "gke_cluster_name"}}" + location = "us-central1-a" + initial_node_count = 1 + + network = google_compute_network.container_network.name + subnetwork = google_compute_subnetwork.container_subnetwork.name + + private_cluster_config { + enable_private_endpoint = true + enable_private_nodes = true + master_ipv4_cidr_block = "10.42.0.0/28" + } + + master_authorized_networks_config {} + + ip_allocation_policy { + cluster_secondary_range_name = google_compute_subnetwork.container_subnetwork.secondary_ip_range[0].range_name + services_secondary_range_name = google_compute_subnetwork.container_subnetwork.secondary_ip_range[1].range_name + } + deletion_protection = {{index $.Vars "deletion_protection"}} +} diff --git a/mmv1/templates/terraform/examples/network_security_backend_authentication_config_basic.tf.tmpl b/mmv1/templates/terraform/examples/network_security_backend_authentication_config_basic.tf.tmpl index 81cc4daf64ad..fa24aefb84fc 100644 --- a/mmv1/templates/terraform/examples/network_security_backend_authentication_config_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/network_security_backend_authentication_config_basic.tf.tmpl @@ -1,4 +1,5 @@ resource "google_network_security_backend_authentication_config" "{{$.PrimaryResourceId}}" { + provider = google-beta name = "{{index $.Vars "resource_name"}}" labels = { foo = "bar" diff --git a/mmv1/templates/terraform/examples/network_security_backend_authentication_config_full.tf.tmpl b/mmv1/templates/terraform/examples/network_security_backend_authentication_config_full.tf.tmpl index 7e9cbb321dd4..e39905e9345d 100644 --- a/mmv1/templates/terraform/examples/network_security_backend_authentication_config_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/network_security_backend_authentication_config_full.tf.tmpl @@ -1,4 +1,5 @@ resource "google_certificate_manager_certificate" "certificate" { + provider = google-beta name = "{{index $.Vars "certificate_name"}}" labels = { foo = "bar" @@ -12,6 +13,7 @@ resource "google_certificate_manager_certificate" "certificate" { } resource "google_certificate_manager_trust_config" "trust_config" { + provider = google-beta name = "{{index $.Vars "trust_config_name"}}" description = "sample description for the trust config" location = "global" @@ -31,6 +33,7 @@ resource "google_certificate_manager_trust_config" "trust_config" { } resource "google_network_security_backend_authentication_config" "default" { + provider = google-beta name = "{{index $.Vars "resource_name"}}" labels = { bar = "foo" diff --git a/mmv1/templates/terraform/examples/network_security_intercept_deployment_basic.tf.tmpl b/mmv1/templates/terraform/examples/network_security_intercept_deployment_basic.tf.tmpl index dfc952ded06a..3affebab005c 100644 --- a/mmv1/templates/terraform/examples/network_security_intercept_deployment_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/network_security_intercept_deployment_basic.tf.tmpl @@ -1,9 +1,11 @@ resource "google_compute_network" "network" { + provider = google-beta name = "{{index $.Vars "network_name"}}" auto_create_subnetworks = false } resource "google_compute_subnetwork" "subnetwork" { + provider = google-beta name = "{{index $.Vars "subnetwork_name"}}" region = "us-central1" ip_cidr_range = "10.1.0.0/16" @@ -11,14 +13,16 @@ resource "google_compute_subnetwork" "subnetwork" { } resource "google_compute_region_health_check" "health_check" { - name = "{{index $.Vars "health_check_name"}}" - region = "us-central1" + provider = google-beta + name = "{{index $.Vars "health_check_name"}}" + region = "us-central1" http_health_check { port = 80 } } resource "google_compute_region_backend_service" "backend_service" { + provider = google-beta name = "{{index $.Vars "backend_service_name"}}" region = "us-central1" health_checks = [google_compute_region_health_check.health_check.id] @@ -27,23 +31,26 @@ resource "google_compute_region_backend_service" "backend_service" { } resource "google_compute_forwarding_rule" "forwarding_rule" { - name = "{{index $.Vars "forwarding_rule_name"}}" - region = "us-central1" - network = google_compute_network.network.name - subnetwork = google_compute_subnetwork.subnetwork.name - backend_service = google_compute_region_backend_service.backend_service.id - load_balancing_scheme = "INTERNAL" - ports = [6081] - ip_protocol = "UDP" + provider = google-beta + name = "{{index $.Vars "forwarding_rule_name"}}" + region = "us-central1" + network = google_compute_network.network.name + subnetwork = google_compute_subnetwork.subnetwork.name + backend_service = google_compute_region_backend_service.backend_service.id + load_balancing_scheme = "INTERNAL" + ports = [6081] + ip_protocol = "UDP" } resource "google_network_security_intercept_deployment_group" "deployment_group" { + provider = google-beta intercept_deployment_group_id = "{{index $.Vars "deployment_group_id"}}" location = "global" network = google_compute_network.network.id } resource "google_network_security_intercept_deployment" "{{$.PrimaryResourceId}}" { + provider = google-beta intercept_deployment_id = "{{index $.Vars "deployment_id"}}" location = "us-central1-a" forwarding_rule = google_compute_forwarding_rule.forwarding_rule.id diff --git a/mmv1/templates/terraform/examples/network_security_intercept_deployment_group_basic.tf.tmpl b/mmv1/templates/terraform/examples/network_security_intercept_deployment_group_basic.tf.tmpl index cb8946439004..e09f9d5d3ccd 100644 --- a/mmv1/templates/terraform/examples/network_security_intercept_deployment_group_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/network_security_intercept_deployment_group_basic.tf.tmpl @@ -1,9 +1,11 @@ resource "google_compute_network" "network" { + provider = google-beta name = "{{index $.Vars "network_name"}}" auto_create_subnetworks = false } resource "google_network_security_intercept_deployment_group" "{{$.PrimaryResourceId}}" { + provider = google-beta intercept_deployment_group_id = "{{index $.Vars "deployment_group_id"}}" location = "global" network = google_compute_network.network.id diff --git a/mmv1/templates/terraform/examples/network_security_intercept_endpoint_group_association_basic.tf.tmpl b/mmv1/templates/terraform/examples/network_security_intercept_endpoint_group_association_basic.tf.tmpl index 69971ffdebe2..7f17b1c8ba47 100644 --- a/mmv1/templates/terraform/examples/network_security_intercept_endpoint_group_association_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/network_security_intercept_endpoint_group_association_basic.tf.tmpl @@ -1,26 +1,31 @@ resource "google_compute_network" "producer_network" { + provider = google-beta name = "{{index $.Vars "producer_network_name"}}" auto_create_subnetworks = false } resource "google_compute_network" "consumer_network" { + provider = google-beta name = "{{index $.Vars "consumer_network_name"}}" auto_create_subnetworks = false } resource "google_network_security_intercept_deployment_group" "deployment_group" { + provider = google-beta intercept_deployment_group_id = "{{index $.Vars "deployment_group_id"}}" location = "global" network = google_compute_network.producer_network.id } resource "google_network_security_intercept_endpoint_group" "endpoint_group" { - intercept_endpoint_group_id = "{{index $.Vars "endpoint_group_id"}}" - location = "global" - intercept_deployment_group = google_network_security_intercept_deployment_group.deployment_group.id + provider = google-beta + intercept_endpoint_group_id = "{{index $.Vars "endpoint_group_id"}}" + location = "global" + intercept_deployment_group = google_network_security_intercept_deployment_group.deployment_group.id } resource "google_network_security_intercept_endpoint_group_association" "{{$.PrimaryResourceId}}" { + provider = google-beta intercept_endpoint_group_association_id = "{{index $.Vars "endpoint_group_association_id"}}" location = "global" network = google_compute_network.consumer_network.id diff --git a/mmv1/templates/terraform/examples/network_security_intercept_endpoint_group_basic.tf.tmpl b/mmv1/templates/terraform/examples/network_security_intercept_endpoint_group_basic.tf.tmpl index 7027ab023e9f..4facb880e499 100644 --- a/mmv1/templates/terraform/examples/network_security_intercept_endpoint_group_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/network_security_intercept_endpoint_group_basic.tf.tmpl @@ -1,19 +1,22 @@ resource "google_compute_network" "network" { + provider = google-beta name = "{{index $.Vars "network_name"}}" auto_create_subnetworks = false } resource "google_network_security_intercept_deployment_group" "deployment_group" { + provider = google-beta intercept_deployment_group_id = "{{index $.Vars "deployment_group_id"}}" location = "global" network = google_compute_network.network.id } resource "google_network_security_intercept_endpoint_group" "{{$.PrimaryResourceId}}" { - intercept_endpoint_group_id = "{{index $.Vars "endpoint_group_id"}}" - location = "global" - intercept_deployment_group = google_network_security_intercept_deployment_group.deployment_group.id - description = "some description" + provider = google-beta + intercept_endpoint_group_id = "{{index $.Vars "endpoint_group_id"}}" + location = "global" + intercept_deployment_group = google_network_security_intercept_deployment_group.deployment_group.id + description = "some description" labels = { foo = "bar" } diff --git a/mmv1/templates/terraform/examples/notebook_instance_basic.tf.tmpl b/mmv1/templates/terraform/examples/notebook_instance_basic.tf.tmpl index c9d9073769ff..c3a4e1e94acf 100644 --- a/mmv1/templates/terraform/examples/notebook_instance_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/notebook_instance_basic.tf.tmpl @@ -3,7 +3,7 @@ resource "google_notebooks_instance" "{{$.PrimaryResourceId}}" { location = "us-west1-a" machine_type = "e2-medium" vm_image { - project = "cloud-notebooks-managed" - image_family = "workbench-instances" + project = "deeplearning-platform-release" + image_family = "tf-latest-cpu" } } diff --git a/mmv1/templates/terraform/examples/notebook_instance_basic_gpu.tf.tmpl b/mmv1/templates/terraform/examples/notebook_instance_basic_gpu.tf.tmpl index 9670dd3d90bd..00be07fddb1d 100644 --- a/mmv1/templates/terraform/examples/notebook_instance_basic_gpu.tf.tmpl +++ b/mmv1/templates/terraform/examples/notebook_instance_basic_gpu.tf.tmpl @@ -9,7 +9,7 @@ resource "google_notebooks_instance" "{{$.PrimaryResourceId}}" { core_count = 1 } vm_image { - project = "cloud-notebooks-managed" - image_family = "workbench-instances" + project = "deeplearning-platform-release" + image_family = "tf-latest-gpu" } } diff --git a/mmv1/templates/terraform/examples/notebook_instance_basic_stopped.tf.tmpl b/mmv1/templates/terraform/examples/notebook_instance_basic_stopped.tf.tmpl index 9ad592402e6a..45b14362e670 100644 --- a/mmv1/templates/terraform/examples/notebook_instance_basic_stopped.tf.tmpl +++ b/mmv1/templates/terraform/examples/notebook_instance_basic_stopped.tf.tmpl @@ -3,8 +3,8 @@ resource "google_notebooks_instance" "{{$.PrimaryResourceId}}" { location = "us-west1-a" machine_type = "e2-medium" vm_image { - project = "cloud-notebooks-managed" - image_family = "workbench-instances" + project = "deeplearning-platform-release" + image_family = "tf-latest-cpu" } desired_state = "STOPPED" } diff --git a/mmv1/templates/terraform/examples/notebook_instance_full.tf.tmpl b/mmv1/templates/terraform/examples/notebook_instance_full.tf.tmpl index 55817aff86bf..930acc42a524 100644 --- a/mmv1/templates/terraform/examples/notebook_instance_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/notebook_instance_full.tf.tmpl @@ -4,8 +4,8 @@ resource "google_notebooks_instance" "{{$.PrimaryResourceId}}" { machine_type = "e2-medium" vm_image { - project = "cloud-notebooks-managed" - image_family = "workbench-instances" + project = "deeplearning-platform-release" + image_family = "tf-latest-cpu" } instance_owners = [ "{{index $.TestEnvVars "service_account"}}"] @@ -13,7 +13,7 @@ resource "google_notebooks_instance" "{{$.PrimaryResourceId}}" { install_gpu_driver = true boot_disk_type = "PD_SSD" - boot_disk_size_gb = 150 + boot_disk_size_gb = 110 no_public_ip = true no_proxy_access = true diff --git a/mmv1/templates/terraform/examples/oracledatabase_autonomous_database_odbnetwork.tf.tmpl b/mmv1/templates/terraform/examples/oracledatabase_autonomous_database_odbnetwork.tf.tmpl deleted file mode 100644 index 0dbb6742838a..000000000000 --- a/mmv1/templates/terraform/examples/oracledatabase_autonomous_database_odbnetwork.tf.tmpl +++ /dev/null @@ -1,17 +0,0 @@ -resource "google_oracle_database_autonomous_database" "{{$.PrimaryResourceId}}"{ - autonomous_database_id = "{{index $.Vars "autonomous_database_id"}}" - location = "europe-west2" - project = "{{index $.Vars "project"}}" - database = "{{index $.Vars "database_name"}}" - admin_password = "123Abpassword" - odb_network = "{{index $.Vars "odb_network"}}" - odb_subnet = "{{index $.Vars "odb_subnet"}}" - properties { - compute_count = "2" - data_storage_size_tb="1" - db_version = "19c" - db_workload = "OLTP" - license_type = "LICENSE_INCLUDED" - } - deletion_protection = "{{index $.Vars "deletion_protection"}}" -} diff --git a/mmv1/templates/terraform/examples/oracledatabase_autonomous_database_publicip.tf.tmpl b/mmv1/templates/terraform/examples/oracledatabase_autonomous_database_publicip.tf.tmpl deleted file mode 100644 index a40d91c11b19..000000000000 --- a/mmv1/templates/terraform/examples/oracledatabase_autonomous_database_publicip.tf.tmpl +++ /dev/null @@ -1,16 +0,0 @@ -resource "google_oracle_database_autonomous_database" "{{$.PrimaryResourceId}}"{ - autonomous_database_id = "{{index $.Vars "autonomous_database_id"}}" - location = "europe-west2" - project = "{{index $.Vars "project"}}" - database = "{{index $.Vars "database_name"}}" - admin_password = "123Abpassword" - properties { - compute_count = "2" - data_storage_size_tb="1" - db_version = "19c" - db_workload = "OLTP" - license_type = "LICENSE_INCLUDED" - mtls_connection_required = "true" - } - deletion_protection = "{{index $.Vars "deletion_protection"}}" -} diff --git a/mmv1/templates/terraform/examples/oracledatabase_cloud_vmcluster_odbnetwork.tf.tmpl b/mmv1/templates/terraform/examples/oracledatabase_cloud_vmcluster_odbnetwork.tf.tmpl deleted file mode 100644 index 15685ba5c365..000000000000 --- a/mmv1/templates/terraform/examples/oracledatabase_cloud_vmcluster_odbnetwork.tf.tmpl +++ /dev/null @@ -1,34 +0,0 @@ -resource "google_oracle_database_cloud_vm_cluster" "{{$.PrimaryResourceId}}"{ - cloud_vm_cluster_id = "{{index $.Vars "cloud_vm_cluster_id"}}" - display_name = "{{index $.Vars "cloud_vm_cluster_id"}} displayname" - location = "europe-west2" - project = "{{index $.Vars "project"}}" - exadata_infrastructure = google_oracle_database_cloud_exadata_infrastructure.cloudExadataInfrastructures.id - odb_network = "{{index $.Vars "odb_network"}}" - odb_subnet = "{{index $.Vars "odb_subnet"}}" - backup_odb_subnet = "{{index $.Vars "backup_odb_subnet"}}" - properties { - license_type = "LICENSE_INCLUDED" - ssh_public_keys = ["ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCz1X2744t+6vRLmE5u6nHi6/QWh8bQDgHmd+OIxRQIGA/IWUtCs2FnaCNZcqvZkaeyjk5v0lTA/n+9jvO42Ipib53athrfVG8gRt8fzPL66C6ZqHq+6zZophhrCdfJh/0G4x9xJh5gdMprlaCR1P8yAaVvhBQSKGc4SiIkyMNBcHJ5YTtMQMTfxaB4G1sHZ6SDAY9a6Cq/zNjDwfPapWLsiP4mRhE5SSjJX6l6EYbkm0JeLQg+AbJiNEPvrvDp1wtTxzlPJtIivthmLMThFxK7+DkrYFuLvN5AHUdo9KTDLvHtDCvV70r8v0gafsrKkM/OE9Jtzoo0e1N/5K/ZdyFRbAkFT4QSF3nwpbmBWLf2Evg//YyEuxnz4CwPqFST2mucnrCCGCVWp1vnHZ0y30nM35njLOmWdRDFy5l27pKUTwLp02y3UYiiZyP7d3/u5pKiN4vC27VuvzprSdJxWoAvluOiDeRh+/oeQDowxoT/Oop8DzB9uJmjktXw8jyMW2+Rpg+ENQqeNgF1OGlEzypaWiRskEFlkpLb4v/s3ZDYkL1oW0Nv/J8LTjTOTEaYt2Udjoe9x2xWiGnQixhdChWuG+MaoWffzUgx1tsVj/DBXijR5DjkPkrA1GA98zd3q8GKEaAdcDenJjHhNYSd4+rE9pIsnYn7fo5X/tFfcQH1XQ== nobody@google.com"] - cpu_core_count = "4" - gi_version = "19.0.0.0" - hostname_prefix = "hostname1" - } - - deletion_protection = "{{index $.Vars "deletion_protection"}}" -} - -resource "google_oracle_database_cloud_exadata_infrastructure" "cloudExadataInfrastructures"{ - cloud_exadata_infrastructure_id = "{{index $.Vars "cloud_exadata_infrastructure_id"}}" - display_name = "{{index $.Vars "cloud_exadata_infrastructure_id"}} displayname" - location = "europe-west2" - project = "{{index $.Vars "project"}}" - properties { - shape = "Exadata.X9M" - compute_count= "2" - storage_count= "3" - } - - deletion_protection = "{{index $.Vars "deletion_protection"}}" -} - diff --git a/mmv1/templates/terraform/examples/oracledatabase_odbnetwork.tf.tmpl b/mmv1/templates/terraform/examples/oracledatabase_odbnetwork.tf.tmpl deleted file mode 100644 index 86d8150f5ba7..000000000000 --- a/mmv1/templates/terraform/examples/oracledatabase_odbnetwork.tf.tmpl +++ /dev/null @@ -1,15 +0,0 @@ -resource "google_oracle_database_odb_network" "{{$.PrimaryResourceId}}"{ - odb_network_id = "{{index $.Vars "odb_network_id"}}" - location = "us-west3" - project = "{{index $.Vars "project"}}" - network = data.google_compute_network.default.id - labels = { - terraform_created = "true" - } - deletion_protection = "{{index $.Vars "deletion_protection"}}" -} - -data "google_compute_network" "default" { - name = "new" - project = "{{index $.Vars "project"}}" -} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/oracledatabase_odbsubnet.tf.tmpl b/mmv1/templates/terraform/examples/oracledatabase_odbsubnet.tf.tmpl deleted file mode 100644 index 4a7fe1751189..000000000000 --- a/mmv1/templates/terraform/examples/oracledatabase_odbsubnet.tf.tmpl +++ /dev/null @@ -1,12 +0,0 @@ -resource "google_oracle_database_odb_subnet" "{{$.PrimaryResourceId}}"{ - odb_subnet_id = "{{index $.Vars "odb_subnet_id"}}" - location = "europe-west2" - project = "{{index $.Vars "project"}}" - odbnetwork = "{{index $.Vars "odb_network_id"}}" - cidr_range = "10.1.1.0/24" - purpose = "CLIENT_SUBNET" - labels = { - terraform_created = "true" - } - deletion_protection = "{{index $.Vars "deletion_protection"}}" -} diff --git a/mmv1/templates/terraform/examples/parameter_version_with_json_format_with_file.tf.tmpl b/mmv1/templates/terraform/examples/parameter_version_with_json_format_with_file.tf.tmpl deleted file mode 100644 index b10091516eca..000000000000 --- a/mmv1/templates/terraform/examples/parameter_version_with_json_format_with_file.tf.tmpl +++ /dev/null @@ -1,10 +0,0 @@ -resource "google_parameter_manager_parameter" "parameter-basic" { - parameter_id = "{{index $.Vars "parameter_id"}}" - format = "JSON" -} - -resource "google_parameter_manager_parameter_version" "{{$.PrimaryResourceId}}" { - parameter = google_parameter_manager_parameter.parameter-basic.id - parameter_version_id = "{{index $.Vars "parameter_version_id"}}" - parameter_data = file("{{index $.Vars "data"}}") -} diff --git a/mmv1/templates/terraform/examples/parameter_version_with_yaml_format_with_file.tf.tmpl b/mmv1/templates/terraform/examples/parameter_version_with_yaml_format_with_file.tf.tmpl deleted file mode 100644 index a1d6af0bbd0c..000000000000 --- a/mmv1/templates/terraform/examples/parameter_version_with_yaml_format_with_file.tf.tmpl +++ /dev/null @@ -1,10 +0,0 @@ -resource "google_parameter_manager_parameter" "parameter-basic" { - parameter_id = "{{index $.Vars "parameter_id"}}" - format = "YAML" -} - -resource "google_parameter_manager_parameter_version" "{{$.PrimaryResourceId}}" { - parameter = google_parameter_manager_parameter.parameter-basic.id - parameter_version_id = "{{index $.Vars "parameter_version_id"}}" - parameter_data = file("{{index $.Vars "data"}}") -} diff --git a/mmv1/templates/terraform/examples/preview_feature_basic.tf.tmpl b/mmv1/templates/terraform/examples/preview_feature_basic.tf.tmpl deleted file mode 100644 index 3ff908fcddeb..000000000000 --- a/mmv1/templates/terraform/examples/preview_feature_basic.tf.tmpl +++ /dev/null @@ -1,10 +0,0 @@ -resource "google_compute_preview_feature" "{{ .PrimaryResourceId }}" { - provider = google-beta - name = "alpha-api-access" - activation_status = "DISABLED" - rollout_operation { - rollout_input { - predefined_rollout_plan = "ROLLOUT_PLAN_FAST_ROLLOUT" - } - } -} diff --git a/mmv1/templates/terraform/examples/privateca_certificate_authority_basic_no_org.tf.tmpl b/mmv1/templates/terraform/examples/privateca_certificate_authority_basic_no_org.tf.tmpl deleted file mode 100644 index 593e303c4913..000000000000 --- a/mmv1/templates/terraform/examples/privateca_certificate_authority_basic_no_org.tf.tmpl +++ /dev/null @@ -1,35 +0,0 @@ -resource "google_privateca_certificate_authority" "{{$.PrimaryResourceId}}" { - // This example assumes this pool already exists. - // Pools cannot be deleted in normal test circumstances, so we depend on static pools - pool = "{{index $.Vars "pool_name"}}" - certificate_authority_id = "{{index $.Vars "certificate_authority_id"}}" - location = "{{index $.Vars "pool_location"}}" - deletion_protection = {{index $.Vars "deletion_protection"}} - config { - subject_config { - subject { - common_name = "my-certificate-authority" - } - } - x509_config { - ca_options { - # is_ca *MUST* be true for certificate authorities - is_ca = true - } - key_usage { - base_key_usage { - # cert_sign and crl_sign *MUST* be true for certificate authorities - cert_sign = true - crl_sign = true - } - extended_key_usage { - } - } - } - } - # valid for 10 years - lifetime = "${10 * 365 * 24 * 3600}s" - key_spec { - algorithm = "RSA_PKCS1_4096_SHA256" - } -} diff --git a/mmv1/templates/terraform/examples/privateca_template_zero_max_issuer_path_length_null_ca.tf.tmpl b/mmv1/templates/terraform/examples/privateca_template_zero_max_issuer_path_length_null_ca.tf.tmpl deleted file mode 100644 index 09bd036806b5..000000000000 --- a/mmv1/templates/terraform/examples/privateca_template_zero_max_issuer_path_length_null_ca.tf.tmpl +++ /dev/null @@ -1,85 +0,0 @@ -resource "google_privateca_certificate_template" "{{$.PrimaryResourceId}}" { - name = "{{index $.Vars "name"}}" - location = "us-central1" - description = "A sample certificate template" - - identity_constraints { - allow_subject_alt_names_passthrough = true - allow_subject_passthrough = true - - cel_expression { - description = "Always true" - expression = "true" - location = "any.file.anywhere" - title = "Sample expression" - } - } - - maximum_lifetime = "86400s" - - passthrough_extensions { - additional_extensions { - object_id_path = [1, 6] - } - known_extensions = ["EXTENDED_KEY_USAGE"] - } - - predefined_values { - additional_extensions { - object_id { - object_id_path = [1, 6] - } - value = "c3RyaW5nCg==" - critical = true - } - aia_ocsp_servers = ["string"] - ca_options { - is_ca = false - null_ca = true - zero_max_issuer_path_length = true - max_issuer_path_length = 0 - } - key_usage { - base_key_usage { - cert_sign = false - content_commitment = true - crl_sign = false - data_encipherment = true - decipher_only = true - digital_signature = true - encipher_only = true - key_agreement = true - key_encipherment = true - } - extended_key_usage { - client_auth = true - code_signing = true - email_protection = true - ocsp_signing = true - server_auth = true - time_stamping = true - } - unknown_extended_key_usages { - object_id_path = [1, 6] - } - } - policy_ids { - object_id_path = [1, 6] - } - name_constraints { - critical = true - permitted_dns_names = ["*.example1.com", "*.example2.com"] - excluded_dns_names = ["*.deny.example1.com", "*.deny.example2.com"] - permitted_ip_ranges = ["10.0.0.0/8", "11.0.0.0/8"] - excluded_ip_ranges = ["10.1.1.0/24", "11.1.1.0/24"] - permitted_email_addresses = [".example1.com", ".example2.com"] - excluded_email_addresses = [".deny.example1.com", ".deny.example2.com"] - permitted_uris = [".example1.com", ".example2.com"] - excluded_uris = [".deny.example1.com", ".deny.example2.com"] - } - } - - labels = { - label-one = "value-one" - } -} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/pubsub_subscription_multiple_smts.tf.tmpl b/mmv1/templates/terraform/examples/pubsub_subscription_multiple_smts.tf.tmpl deleted file mode 100644 index 3528bd015fc7..000000000000 --- a/mmv1/templates/terraform/examples/pubsub_subscription_multiple_smts.tf.tmpl +++ /dev/null @@ -1,41 +0,0 @@ -resource "google_pubsub_topic" "{{$.PrimaryResourceId}}" { - name = "{{index $.Vars "topic_name"}}" -} - -resource "google_pubsub_subscription" "{{$.PrimaryResourceId}}" { - name = "{{index $.Vars "subscription_name"}}" - topic = google_pubsub_topic.{{$.PrimaryResourceId}}.id - - message_transforms { - javascript_udf { - function_name = "redactSSN" - code = <) is run post-create, by calling the `updateDdl` endpoint -defaultTimeZoneObj, defaultTimeZoneOk := d.GetOk("default_time_zone") -defaultTimeZone := defaultTimeZoneObj.(string) retention, retentionPeriodOk := d.GetOk("version_retention_period") retentionPeriod := retention.(string) ddl, ddlOk := d.GetOk("ddl") ddlStatements := ddl.([]interface{}) -if defaultTimeZoneOk || retentionPeriodOk || ddlOk { +if retentionPeriodOk || ddlOk { obj := make(map[string]interface{}) updateDdls := []string{} - // We need to put setting default time zone as first because it requires an empty - // database where tables do not exist. - if defaultTimeZoneOk { - dbName := d.Get("name") - timeZoneDdl := fmt.Sprintf("ALTER DATABASE `%s` SET OPTIONS (default_time_zone=\"%s\")", dbName, defaultTimeZone) - if dialect, ok := d.GetOk("database_dialect"); ok && dialect == "POSTGRESQL" { - timeZoneDdl = fmt.Sprintf("ALTER DATABASE \"%s\" SET spanner.default_time_zone TO \"%s\"", dbName, defaultTimeZone) - } - updateDdls = append(updateDdls, timeZoneDdl) - } - if ddlOk { for i := 0; i < len(ddlStatements); i++ { if ddlStatements[i] != nil { diff --git a/mmv1/templates/terraform/post_create/storage_insights_dataset_config.go.tmpl b/mmv1/templates/terraform/post_create/storage_insights_dataset_config.go.tmpl deleted file mode 100644 index 2570b9e6df72..000000000000 --- a/mmv1/templates/terraform/post_create/storage_insights_dataset_config.go.tmpl +++ /dev/null @@ -1,33 +0,0 @@ -if d.Get("link_dataset") == true { - - linkUrl := strings.Replace(url, "?datasetConfigId=", "/", 1) + ":linkDataset" - - // err == nil indicates that the billing_project value was found - if bp, err := tpgresource.GetBillingProject(d, config); err == nil { - billingProject = bp - } - - res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "POST", - Project: billingProject, - RawURL: linkUrl, - UserAgent: userAgent, - Timeout: d.Timeout(schema.TimeoutUpdate), - Headers: headers, - }) - - if err != nil { - return fmt.Errorf("Error Linking DatasetConfig %q: %s", d.Id(), err) - } else { - log.Printf("[DEBUG] Finished Linking DatasetConfig %q: %#v", d.Id(), res) - } - - err = StorageInsightsOperationWaitTime( - config, res, project, "Linking DatasetConfig", userAgent, - d.Timeout(schema.TimeoutUpdate)) - - if err != nil { - return err - } -} diff --git a/mmv1/templates/terraform/post_delete/dialogflowcx_agent.go.tmpl b/mmv1/templates/terraform/post_delete/dialogflowcx_agent.go.tmpl deleted file mode 100644 index ebfd6eaaa1b6..000000000000 --- a/mmv1/templates/terraform/post_delete/dialogflowcx_agent.go.tmpl +++ /dev/null @@ -1,34 +0,0 @@ -if d.Get("delete_chat_engine_on_destroy").(bool) && engineID != "" { - // Check if the engine exist. - baseUrl, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}DiscoveryEngineBasePath{{"}}"}}") - if err != nil { - return err - } - engineUrl := baseUrl + engineID - _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - Project: project, - RawURL: engineUrl, - UserAgent: userAgent, - }) - if err != nil { - log.Printf("[DEBUG] engine %s doesn't exist. No need to delete", engineID) - return nil - } - - // delete the engine - log.Printf("[DEBUG] Deleting engine %v", engineID) - _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "DELETE", - Project: project, - RawURL: engineUrl, - UserAgent: userAgent, - Timeout: d.Timeout(schema.TimeoutDelete), - }) - if err != nil { - return fmt.Errorf("Error deleting engine %s: %s", engineID, err) - } - log.Printf("[DEBUG] Finished deleting engine %s", engineID) -} \ No newline at end of file diff --git a/mmv1/templates/terraform/post_read/bigqueryanalyticshub_data_exchange_subscription.go.tmpl b/mmv1/templates/terraform/post_read/bigqueryanalyticshub_data_exchange_subscription.go.tmpl deleted file mode 100644 index 131f601be2af..000000000000 --- a/mmv1/templates/terraform/post_read/bigqueryanalyticshub_data_exchange_subscription.go.tmpl +++ /dev/null @@ -1,51 +0,0 @@ - var currentState string - if stateVal, ok := d.GetOk("state"); ok { - if s, isString := stateVal.(string); isString { - currentState = s - } else { - log.Printf("[WARN] BigQuery Analytics Hub Subscription (ID: %s): 'state' attribute is present but not a string. Value: %#v. Assuming empty state for refresh logic.", d.Id(), stateVal) - currentState = "" // Treat as empty if type assertion fails - } - } else { - // 'state' attribute is missing entirely from resource data - log.Printf("[WARN] BigQuery Analytics Hub Subscription (ID: %s): 'state' attribute is missing from resource data. Assuming empty state for refresh logic.", d.Id()) - currentState = "" // Treat as empty if missing - } - - // Get the value of the 'refresh_policy' enum field - refreshPolicy := d.Get("refresh_policy").(string) - - // Determine if the refresh API call should be triggered based on the new policy - shouldTriggerRefreshAPI := false - if refreshPolicy == "ON_READ" { - shouldTriggerRefreshAPI = true - } else if refreshPolicy == "ON_STALE" { - shouldTriggerRefreshAPI = (currentState == "STATE_STALE") - } // If refreshPolicy is "NEVER", shouldTriggerRefreshAPI remains false. - - if (shouldTriggerRefreshAPI) { - log.Printf("[DEBUG] Attempting to refresh BigQuery Analytics Hub Subscription %q (ID: %s)", d.Get("name").(string), d.Id()) - - refreshUrl, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}BigqueryAnalyticsHubBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/locations/{{"{{"}}location{{"}}"}}/subscriptions/{{"{{"}}subscription_id{{"}}"}}:refresh") - if err != nil { - return fmt.Errorf("Error constructing refresh URL for DataExchangeSubscription %q: %s", d.Id(), err) - } - - refreshBodyMap := map[string]any{} - refreshHeaders := make(http.Header) - refreshHeaders.Set("Content-Type", "application/json") - - _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "POST", - Project: billingProject, - RawURL: refreshUrl, - UserAgent: userAgent, - Headers: refreshHeaders, - Body: refreshBodyMap, - }) - - if err != nil { - return fmt.Errorf("Error performing refresh for BigQuery Analytics Hub Subscription %q : %s", d.Id(), err) - } - } \ No newline at end of file diff --git a/mmv1/templates/terraform/post_read/iam_workload_identity_pool_managed_identity.go.tmpl b/mmv1/templates/terraform/post_read/iam_workload_identity_pool_managed_identity.go.tmpl deleted file mode 100644 index ec5cf603b518..000000000000 --- a/mmv1/templates/terraform/post_read/iam_workload_identity_pool_managed_identity.go.tmpl +++ /dev/null @@ -1,18 +0,0 @@ - // list attestation_rules - ruleUrl := url + ":listAttestationRules" - - ruleRes, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - Project: billingProject, - RawURL: ruleUrl, - UserAgent: userAgent, - Headers: headers, - }) - if err != nil { - return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("IAMBetaWorkloadIdentityPoolManagedIdentity %q", d.Id())) - } - - for k, v := range ruleRes { - res[k] = v - } \ No newline at end of file diff --git a/mmv1/templates/terraform/post_update/cloud_tasks_queue_state.go.tmpl b/mmv1/templates/terraform/post_update/cloud_tasks_queue_state.go.tmpl deleted file mode 100644 index 59e65255432c..000000000000 --- a/mmv1/templates/terraform/post_update/cloud_tasks_queue_state.go.tmpl +++ /dev/null @@ -1,35 +0,0 @@ -// Handle desired state changes -if d.HasChange("desired_state") { - old, new := d.GetChange("desired_state") - - if old.(string) != new.(string) { - var action string - - actionUrl, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}CloudTasksBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/locations/{{"{{"}}location{{"}}"}}/queues/{{"{{"}}name{{"}}"}}") - if err != nil { - return err - } - - if new.(string) == "PAUSED" { - actionUrl = fmt.Sprintf("%s:pause", actionUrl) - action = "pausing" - } else if new.(string) == "RUNNING" { - actionUrl = fmt.Sprintf("%s:resume", actionUrl) - action = "resuming" - } - - if actionUrl != "" { - _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "POST", - Project: billingProject, - RawURL: actionUrl, - UserAgent: userAgent, - }) - - if err != nil { - return fmt.Errorf("Error %s queue %q: %s", action, d.Id(), err) - } - } - } -} \ No newline at end of file diff --git a/mmv1/templates/terraform/pre_create/artifact_registry_remote_repository.go.tmpl b/mmv1/templates/terraform/pre_create/artifact_registry_remote_repository.go.tmpl new file mode 100644 index 000000000000..bad5d0a9599b --- /dev/null +++ b/mmv1/templates/terraform/pre_create/artifact_registry_remote_repository.go.tmpl @@ -0,0 +1,18 @@ +// This file should be deleted in the next major terraform release, alongside +// the default values for 'publicRepository'. + +// deletePublicRepoIfCustom deletes the publicRepository key for a given +// pkg type from the remote repository config if customRepository is set. +deletePublicRepoIfCustom := func(pkgType string) { + if _, ok := d.GetOk(fmt.Sprintf("remote_repository_config.0.%s_repository.0.custom_repository", pkgType)); ok { + rrcfg := obj["remoteRepositoryConfig"].(map[string]interface{}) + repo := rrcfg[fmt.Sprintf("%sRepository", pkgType)].(map[string]interface{}) + delete(repo, "publicRepository") + } +} + +// Call above func for all pkg types that support custom remote repos. +deletePublicRepoIfCustom("docker") +deletePublicRepoIfCustom("maven") +deletePublicRepoIfCustom("npm") +deletePublicRepoIfCustom("python") \ No newline at end of file diff --git a/mmv1/templates/terraform/pre_create/cloud_identity_group_membership.go.tmpl b/mmv1/templates/terraform/pre_create/cloud_identity_group_membership.go.tmpl deleted file mode 100644 index 5e8ca2a772d5..000000000000 --- a/mmv1/templates/terraform/pre_create/cloud_identity_group_membership.go.tmpl +++ /dev/null @@ -1,69 +0,0 @@ -if d.Get("create_ignore_already_exists").(bool) { - log.Printf("[DEBUG] Calling get GroupMembership to check if membership already exists") - preferredMemberKeyPropTyped := tpgresource.CheckStringMap(preferredMemberKeyProp) - - params := map[string]string{ - "memberKey.id": preferredMemberKeyPropTyped["id"], - } - if ns, ok := preferredMemberKeyPropTyped["namespace"]; ok && ns != "" { - params["memberKey.namespace"] = ns - } - getUrl, err := transport_tpg.AddQueryParams(url+":lookup", params) - if err != nil { - return err - } - - res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - Project: billingProject, - RawURL: getUrl, - UserAgent: userAgent, - Headers: headers, - }) - // Do normal create if membership does not exist - - if err != nil { - gerr, ok := err.(*googleapi.Error) - notFound := ok && gerr.Code == 404 - // If group membership is not found, we can proceed with the create operation. - if !notFound { - return fmt.Errorf("Error checking if GroupMembership exists: %s", err) - } - } else { - // Set computed resource properties from create API response so that they're available on the subsequent Read call. - err = resourceCloudIdentityGroupMembershipPostCreateSetComputedFields(d, meta, res) - if err != nil { - return fmt.Errorf("setting computed ID format fields: %w", err) - } - - // Store the ID now - id, err := tpgresource.ReplaceVars(d, config, "{{"{{name}}"}}") - if err != nil { - return fmt.Errorf("Error constructing id: %s", err) - } - d.SetId(id) - - // `name` is autogenerated from the api so needs to be set post-create - name, ok := res["name"] - if !ok { - respBody, ok := res["response"] - if !ok { - return fmt.Errorf("Create response didn't contain critical fields. Create may not have succeeded.") - } - - name, ok = respBody.(map[string]interface{})["name"] - if !ok { - return fmt.Errorf("Create response didn't contain critical fields. Create may not have succeeded.") - } - } - if err := d.Set("name", name.(string)); err != nil { - return fmt.Errorf("Error setting name: %s", err) - } - d.SetId(name.(string)) - - log.Printf("[DEBUG] Finished creating GroupMembership %q: %#v", d.Id(), res) - - return resourceCloudIdentityGroupMembershipRead(d, meta) - } -} diff --git a/mmv1/templates/terraform/pre_create/compute_instance_settings_fingerprint.go.tmpl b/mmv1/templates/terraform/pre_create/compute_instance_settings_fingerprint.go.tmpl deleted file mode 100644 index f260b3cb2573..000000000000 --- a/mmv1/templates/terraform/pre_create/compute_instance_settings_fingerprint.go.tmpl +++ /dev/null @@ -1,21 +0,0 @@ - -fingerprintUrl, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}ComputeBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/zones/{{"{{"}}zone{{"}}"}}/instanceSettings/{{"{{"}}name{{"}}"}}") -if err != nil { - return err -} - -fingerPrintRes, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - Project: project, - RawURL: fingerprintUrl, - UserAgent: userAgent, -}) -if err != nil { - return err -} - -fingerprintProp = fingerPrintRes["fingerprint"] -if v, ok := d.GetOkExists("fingerprint"); !tpgresource.IsEmptyValue(reflect.ValueOf(fingerprintProp)) && (ok || !reflect.DeepEqual(v, fingerprintProp)) { - obj["fingerprint"] = fingerprintProp -} \ No newline at end of file diff --git a/mmv1/templates/terraform/pre_create/datastream_connection_profile.go.tmpl b/mmv1/templates/terraform/pre_create/datastream_connection_profile.go.tmpl deleted file mode 100644 index dee1facac030..000000000000 --- a/mmv1/templates/terraform/pre_create/datastream_connection_profile.go.tmpl +++ /dev/null @@ -1,18 +0,0 @@ -{{/* - The license inside this block applies to this file - Copyright 2024 Google Inc. - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ -}} - -if d.Get("create_without_validation").(bool) { - url, err = transport_tpg.AddQueryParams(url, map[string]string{"force": "true"}) -} else { - url, err = transport_tpg.AddQueryParams(url, map[string]string{"force": "false"}) -} \ No newline at end of file diff --git a/mmv1/templates/terraform/pre_create/dialogflow_set_endpoint.go.tmpl b/mmv1/templates/terraform/pre_create/dialogflow_set_endpoint.go.tmpl deleted file mode 100644 index 87dc02e5c6a1..000000000000 --- a/mmv1/templates/terraform/pre_create/dialogflow_set_endpoint.go.tmpl +++ /dev/null @@ -1,6 +0,0 @@ -location := d.Get("location").(string) - -// insert location into url for a different endpoint. -if strings.HasPrefix(url, "https://dialogflow.googleapis.com/v2/") { - url = strings.Replace(url, "https://dialogflow", fmt.Sprintf("https://%s-dialogflow", location), 1) -} diff --git a/mmv1/templates/terraform/pre_create/dialogflow_set_location.go.tmpl b/mmv1/templates/terraform/pre_create/dialogflow_set_location.go.tmpl index a1ca9cf70185..68e83e1317e0 100644 --- a/mmv1/templates/terraform/pre_create/dialogflow_set_location.go.tmpl +++ b/mmv1/templates/terraform/pre_create/dialogflow_set_location.go.tmpl @@ -11,7 +11,4 @@ if parts := regexp.MustCompile(`locations\/([^\/]*)\/`).FindStringSubmatch(d.Get ) } -// only insert location into url if the base_url in products/dialogflowcx/product.yaml is used -if strings.HasPrefix(url, "https://-dialogflow.googleapis.com/v3/") { - url = strings.Replace(url, "-dialogflow", fmt.Sprintf("%s-dialogflow", location), 1) -} +url = strings.Replace(url, "-dialogflow", fmt.Sprintf("%s-dialogflow", location), 1) diff --git a/mmv1/templates/terraform/pre_create/dialogflowcx_generator.go.tmpl b/mmv1/templates/terraform/pre_create/dialogflowcx_generator.go.tmpl deleted file mode 100644 index d822da017eaf..000000000000 --- a/mmv1/templates/terraform/pre_create/dialogflowcx_generator.go.tmpl +++ /dev/null @@ -1,24 +0,0 @@ -// extract location from the parent -location := "" - -if parts := regexp.MustCompile(`locations\/([^\/]*)\/`).FindStringSubmatch(d.Get("parent").(string)); parts != nil { - location = parts[1] -} else { - return fmt.Errorf( - "Saw %s when the parent is expected to contains location %s", - d.Get("parent"), - "projects/{{"{{"}}project{{"}}"}}/locations/{{"{{"}}location{{"}}"}}/...", - ) -} - -// only insert location into url if the base_url in products/dialogflowcx/product.yaml is used -if strings.HasPrefix(url, "https://-dialogflow.googleapis.com/v3/") { - url = strings.Replace(url,"-dialogflow",fmt.Sprintf("%s-dialogflow",location),1) -} - -if v, ok := d.GetOk("language_code") ; ok { - url, err = transport_tpg.AddQueryParams(url, map[string]string{"languageCode": fmt.Sprintf("%v", v)}) - if err != nil { - return err - } -} \ No newline at end of file diff --git a/mmv1/templates/terraform/pre_create/dialogflowcx_set_location_skip_default_obj.go.tmpl b/mmv1/templates/terraform/pre_create/dialogflowcx_set_location_skip_default_obj.go.tmpl index 19699a95dd86..79be0a450d28 100644 --- a/mmv1/templates/terraform/pre_create/dialogflowcx_set_location_skip_default_obj.go.tmpl +++ b/mmv1/templates/terraform/pre_create/dialogflowcx_set_location_skip_default_obj.go.tmpl @@ -11,10 +11,7 @@ if parts := regexp.MustCompile(`locations\/([^\/]*)\/`).FindStringSubmatch(d.Get ) } -// only insert location into url if the base_url in products/dialogflowcx/product.yaml is used -if strings.HasPrefix(url, "https://-dialogflow.googleapis.com/v3/") { - url = strings.Replace(url,"-dialogflow",fmt.Sprintf("%s-dialogflow",location),1) -} +url = strings.Replace(url,"-dialogflow",fmt.Sprintf("%s-dialogflow",location),1) // if it's a default object Dialogflow creates for you, "Update" instead of "Create" // Note: below we try to access fields that aren't present in the resource, because this custom code is reused across multiple Dialogflow resources that contain different fields. When the field isn't present, we deliberately ignore the error and the boolean is false. diff --git a/mmv1/templates/terraform/pre_create/gkehub_existing_feature.go.tmpl b/mmv1/templates/terraform/pre_create/gkehub_existing_feature.go.tmpl deleted file mode 100644 index ccc95ea82c46..000000000000 --- a/mmv1/templates/terraform/pre_create/gkehub_existing_feature.go.tmpl +++ /dev/null @@ -1,25 +0,0 @@ -// Check if the fleet feature already exists. Do an update if so. - -getUrl, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}GKEHub2BasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/locations/{{"{{"}}location{{"}}"}}/features/{{"{{"}}name{{"}}"}}") -if err != nil { - return err -} -_, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - Project: billingProject, - RawURL: getUrl, - UserAgent: userAgent, - Headers: headers, -}) - -if err == nil { - // Fleet feature already exists - log.Printf("[DEBUG] Fleet feature already exists %s", d.Get("name")) - id, err := tpgresource.ReplaceVars(d, config, "{{$.GetIdFormat}}") - if err != nil { - return fmt.Errorf("Error constructing id: %s", err) - } - d.SetId(id) - return resourceGKEHub2FeatureUpdate(d, meta) -} diff --git a/mmv1/templates/terraform/pre_create/iam_workload_identity_pool_managed_identity.go.tmpl b/mmv1/templates/terraform/pre_create/iam_workload_identity_pool_managed_identity.go.tmpl deleted file mode 100644 index 8f11cde4c552..000000000000 --- a/mmv1/templates/terraform/pre_create/iam_workload_identity_pool_managed_identity.go.tmpl +++ /dev/null @@ -1,7 +0,0 @@ - // see if we need to create attestation_rules - _, hasRule := d.GetOk("attestation_rules") - ruleObj := make(map[string]interface{}) - if hasRule { - ruleObj["attestationRules"] = attestationRulesProp - delete(obj, "attestationRules") - } \ No newline at end of file diff --git a/mmv1/templates/terraform/pre_delete/alloydb_cluster.go.tmpl b/mmv1/templates/terraform/pre_delete/alloydb_cluster.go.tmpl index b1543d5758c4..743d7e857912 100644 --- a/mmv1/templates/terraform/pre_delete/alloydb_cluster.go.tmpl +++ b/mmv1/templates/terraform/pre_delete/alloydb_cluster.go.tmpl @@ -1,7 +1,3 @@ -if d.Get("deletion_protection").(bool) { - return fmt.Errorf("cannot destroy cluster without setting deletion_protection=false and running `terraform apply`") -} - // Forcefully delete the secondary cluster and the dependent instances because deletion of secondary instance is not supported. if deletionPolicy := d.Get("deletion_policy"); deletionPolicy == "FORCE" { url = url + "?force=true" diff --git a/mmv1/templates/terraform/pre_delete/bigquery_analytics_hub_listing.go.tmpl b/mmv1/templates/terraform/pre_delete/bigquery_analytics_hub_listing.go.tmpl deleted file mode 100644 index 8da26b393417..000000000000 --- a/mmv1/templates/terraform/pre_delete/bigquery_analytics_hub_listing.go.tmpl +++ /dev/null @@ -1,4 +0,0 @@ -deleteCommercial := d.Get("delete_commercial") -if deleteCommercial != nil { - url = url + "?deleteCommercial=" + fmt.Sprintf("%v", deleteCommercial) -} \ No newline at end of file diff --git a/mmv1/templates/terraform/pre_delete/bigquery_row_access_policy.go.tmpl b/mmv1/templates/terraform/pre_delete/bigquery_row_access_policy.go.tmpl deleted file mode 100644 index ff0401b721fb..000000000000 --- a/mmv1/templates/terraform/pre_delete/bigquery_row_access_policy.go.tmpl +++ /dev/null @@ -1,3 +0,0 @@ -// Always force-delete. -obj = make(map[string]interface{}) -obj["force"] = true diff --git a/mmv1/templates/terraform/pre_delete/cloudrunv2_worker_pool_deletion_policy.go.tmpl b/mmv1/templates/terraform/pre_delete/cloudrunv2_worker_pool_deletion_policy.go.tmpl deleted file mode 100644 index c7ad64e13099..000000000000 --- a/mmv1/templates/terraform/pre_delete/cloudrunv2_worker_pool_deletion_policy.go.tmpl +++ /dev/null @@ -1,3 +0,0 @@ -if d.Get("deletion_protection").(bool) { - return fmt.Errorf("cannot destroy WorkerPool without setting deletion_protection=false and running `terraform apply`") -} \ No newline at end of file diff --git a/mmv1/templates/terraform/pre_delete/code_repository_index_force_delete.go.tmpl b/mmv1/templates/terraform/pre_delete/code_repository_index_force_delete.go.tmpl new file mode 100644 index 000000000000..86a1aa3742dd --- /dev/null +++ b/mmv1/templates/terraform/pre_delete/code_repository_index_force_delete.go.tmpl @@ -0,0 +1,6 @@ +obj = make(map[string]interface{}) +if v, ok := d.GetOk("force_destroy"); ok { + if v == true { + obj["force"] = true + } +} diff --git a/mmv1/templates/terraform/pre_delete/dialogflowcx_agent.go.tmpl b/mmv1/templates/terraform/pre_delete/dialogflowcx_agent.go.tmpl deleted file mode 100644 index 9ed850eabb05..000000000000 --- a/mmv1/templates/terraform/pre_delete/dialogflowcx_agent.go.tmpl +++ /dev/null @@ -1,20 +0,0 @@ -// Extract engine ID from the gen_app_builder_settings field of the Agent -s := d.Get("gen_app_builder_settings") -log.Printf("[DEBUG] gen_app_builder_settings: %v", s) -settings, ok := s.([]interface{}) -if !ok { - return fmt.Errorf("Error converting gen_app_builder_settings %s to []interface{}", s) -} - -engineID := "" -if len(settings) > 0 { - // An engine is linked to the Agent. Delete it. - engineIDIntf, ok := settings[0].(map[string]interface{})["engine"] - if !ok { - return fmt.Errorf("Expected key 'engine' in map %+v", settings[0]) - } - engineID, ok = engineIDIntf.(string) - if !ok { - return fmt.Errorf("Can convert engine ID %s to string", engineIDIntf) - } -} \ No newline at end of file diff --git a/mmv1/templates/terraform/pre_delete/firebasehosting_site.go.tmpl b/mmv1/templates/terraform/pre_delete/firebasehosting_site.go.tmpl deleted file mode 100644 index f7e91ee2ceba..000000000000 --- a/mmv1/templates/terraform/pre_delete/firebasehosting_site.go.tmpl +++ /dev/null @@ -1,4 +0,0 @@ -if siteType := d.Get("type"); siteType == "DEFAULT_SITE" { - log.Printf("[WARN] Skip deleting default hosting side: %q", d.Get("name").(string)) - return nil -} diff --git a/mmv1/templates/terraform/pre_delete/gkehub_existing_feature.go.tmpl b/mmv1/templates/terraform/pre_delete/gkehub_existing_feature.go.tmpl deleted file mode 100644 index be59a11edc2b..000000000000 --- a/mmv1/templates/terraform/pre_delete/gkehub_existing_feature.go.tmpl +++ /dev/null @@ -1,55 +0,0 @@ -// Special handling for the mandatory 'rbacrolebindingactuation' feature. -// Instead of deleting it, we reset it to a default state by sending a PATCH request. -if d.Get("name").(string) == "rbacrolebindingactuation" { - log.Printf("[DEBUG] Mandatory feature 'rbacrolebindingactuation' detected. Resetting instead of deleting.") - - patchUrl, err := tpgresource.ReplaceVarsForId(d, config, "{{"{{"}}GKEHub2BasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/locations/{{"{{"}}location{{"}}"}}/features/{{"{{"}}name{{"}}"}}") - if err != nil { - return err - } - - // Construct the request body to clear the desired field. - obj := map[string]interface{}{ - "spec": map[string]interface{}{ - "rbacrolebindingactuation": map[string]interface{}{ - "allowedCustomRoles": []string{}, - }, - }, - } - - // A specific updateMask is required for a PATCH request. - updateMask := "spec.rbacrolebindingactuation.allowedCustomRoles" - url, err := transport_tpg.AddQueryParams(patchUrl, map[string]string{"updateMask": updateMask}) - if err != nil { - return err - } - - log.Printf("[DEBUG] Sending PATCH to reset Feature %q: %#v", d.Id(), obj) - - // Send the raw PATCH request. - res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "PATCH", - Project: billingProject, - RawURL: url, - UserAgent: userAgent, - Body: obj, - Timeout: d.Timeout(schema.TimeoutDelete), // Use the delete timeout for this reset operation. - Headers: headers, - }) - if err != nil { - return fmt.Errorf("error resetting Feature %q: %s", d.Id(), err) - } - - // Wait for the long-running operation to complete. - err = GKEHub2OperationWaitTime( - config, res, tpgresource.GetResourceNameFromSelfLink(project), "Resetting Feature", userAgent, - d.Timeout(schema.TimeoutDelete)) - - if err != nil { - return fmt.Errorf("error waiting to reset Feature %q: %s", d.Id(), err) - } - - log.Printf("[DEBUG] Finished resetting Feature %q", d.Id()) - return nil -} diff --git a/mmv1/templates/terraform/pre_delete/oracledatabase_odbnetwork.go.tmpl b/mmv1/templates/terraform/pre_delete/oracledatabase_odbnetwork.go.tmpl deleted file mode 100644 index 3d3d974e8b47..000000000000 --- a/mmv1/templates/terraform/pre_delete/oracledatabase_odbnetwork.go.tmpl +++ /dev/null @@ -1,3 +0,0 @@ -if d.Get("deletion_protection").(bool) { - return fmt.Errorf("cannot destroy google_oracle_database_odb_network resource with id : %q without setting deletion_protection=false and running `terraform apply`", d.Id()) -} diff --git a/mmv1/templates/terraform/pre_delete/oracledatabase_odbsubnet.go.tmpl b/mmv1/templates/terraform/pre_delete/oracledatabase_odbsubnet.go.tmpl deleted file mode 100644 index b81a3712b592..000000000000 --- a/mmv1/templates/terraform/pre_delete/oracledatabase_odbsubnet.go.tmpl +++ /dev/null @@ -1,3 +0,0 @@ -if d.Get("deletion_protection").(bool) { - return fmt.Errorf("cannot destroy google_oracle_database_odb_subnet resource with id : %q without setting deletion_protection=false and running `terraform apply`", d.Id()) -} diff --git a/mmv1/templates/terraform/pre_delete/regional_secret.go.tmpl b/mmv1/templates/terraform/pre_delete/regional_secret.go.tmpl deleted file mode 100644 index 0ac2035f9173..000000000000 --- a/mmv1/templates/terraform/pre_delete/regional_secret.go.tmpl +++ /dev/null @@ -1,3 +0,0 @@ -if d.Get("deletion_protection").(bool) { - return fmt.Errorf("cannot destroy secretmanager regional secret without setting deletion_protection=false and running `terraform apply`") -} diff --git a/mmv1/templates/terraform/pre_delete/secret_manager_secret.go.tmpl b/mmv1/templates/terraform/pre_delete/secret_manager_secret.go.tmpl deleted file mode 100644 index 0c588d6cd9eb..000000000000 --- a/mmv1/templates/terraform/pre_delete/secret_manager_secret.go.tmpl +++ /dev/null @@ -1,4 +0,0 @@ -if d.Get("deletion_protection").(bool) { - return fmt.Errorf("cannot destroy secret manager secret without setting deletion_protection=false and running `terraform apply`") -} - diff --git a/mmv1/templates/terraform/pre_delete/securesourcemanager_deletion_policy.go.tmpl b/mmv1/templates/terraform/pre_delete/securesourcemanager_deletion_policy.go.tmpl deleted file mode 100644 index 26f84c8f949b..000000000000 --- a/mmv1/templates/terraform/pre_delete/securesourcemanager_deletion_policy.go.tmpl +++ /dev/null @@ -1,7 +0,0 @@ -deletionPolicy := d.Get("deletion_policy"); - -if deletionPolicy == "ABANDON" { - return nil; -} else if deletionPolicy == "PREVENT" { - return fmt.Errorf(`cannot destroy resource without setting deletion_policy="DELETE"`) -} \ No newline at end of file diff --git a/mmv1/templates/terraform/pre_read/dataplex_entry.go.tmpl b/mmv1/templates/terraform/pre_read/dataplex_entry.go.tmpl deleted file mode 100644 index f4f4fb81b2f4..000000000000 --- a/mmv1/templates/terraform/pre_read/dataplex_entry.go.tmpl +++ /dev/null @@ -1,4 +0,0 @@ -url, err = transport_tpg.AddQueryParams(url, map[string]string{"view": "ALL" }) -if err != nil { - return err -} diff --git a/mmv1/templates/terraform/pre_read/sql_database_activation_policy.tmpl b/mmv1/templates/terraform/pre_read/sql_database_activation_policy.tmpl index 72e5edbe2e77..582504e94e11 100644 --- a/mmv1/templates/terraform/pre_read/sql_database_activation_policy.tmpl +++ b/mmv1/templates/terraform/pre_read/sql_database_activation_policy.tmpl @@ -1,8 +1,8 @@ instance := d.Get("instance").(string) databaseInstance, err := config.NewSqlAdminClient(userAgent).Instances.Get(project, instance).Do() -if err != nil { - return err -} -if databaseInstance.Settings != nil && databaseInstance.Settings.ActivationPolicy != "ALWAYS" { - return nil -} \ No newline at end of file + if err != nil { + return err + } + if databaseInstance.Settings.ActivationPolicy != "ALWAYS" { + return nil + } \ No newline at end of file diff --git a/mmv1/templates/terraform/pre_update/bigqueryanalyticshub_listing.go.tmpl b/mmv1/templates/terraform/pre_update/bigqueryanalyticshub_listing.go.tmpl deleted file mode 100644 index de4b967c57d4..000000000000 --- a/mmv1/templates/terraform/pre_update/bigqueryanalyticshub_listing.go.tmpl +++ /dev/null @@ -1,20 +0,0 @@ -if d.HasChange("pubsub_topic.0.data_affinity_regions"){ - // Split URL into base and query parts - parts := strings.SplitN(url, "?", 2) - if len(parts) == 2 { - base := parts[0] - query := parts[1] - - query = strings.ReplaceAll(query, "%2C", ",") - query = strings.ReplaceAll(query, "%2c", ",") - - // Replace "pubsubTopic" with "pubsubTopic.dataAffinityRegions" - query = strings.ReplaceAll(query, "pubsubTopic", "pubsubTopic.dataAffinityRegions") - - // Re-encode commas back - query = strings.ReplaceAll(query, ",", "%2C") - - url = base + "?" + query - } -} - diff --git a/mmv1/templates/terraform/pre_update/bigquerydatatransfer_config.tmpl b/mmv1/templates/terraform/pre_update/bigquerydatatransfer_config.tmpl index 7c4fb0f8a0e9..e51ba35af4c3 100644 --- a/mmv1/templates/terraform/pre_update/bigquerydatatransfer_config.tmpl +++ b/mmv1/templates/terraform/pre_update/bigquerydatatransfer_config.tmpl @@ -50,12 +50,3 @@ url, err = transport_tpg.AddQueryParams(url, map[string]string{"updateMask": str if err != nil { return err } - -// Primarily added to fix b/421406404 -// This field is immutable, so it should be safe to set it. -dataSourceIdProp, err := expandBigqueryDataTransferConfigDataSourceId(d.Get("data_source_id"), d, config) -if err != nil { - return err -} else if v, ok := d.GetOkExists("data_source_id"); !tpgresource.IsEmptyValue(reflect.ValueOf(dataSourceIdProp)) && (ok || !reflect.DeepEqual(v, dataSourceIdProp)) { - obj["dataSourceId"] = dataSourceIdProp -} diff --git a/mmv1/templates/terraform/pre_update/dataplex_entry.go.tmpl b/mmv1/templates/terraform/pre_update/dataplex_entry.go.tmpl deleted file mode 100644 index ff2b3ff90915..000000000000 --- a/mmv1/templates/terraform/pre_update/dataplex_entry.go.tmpl +++ /dev/null @@ -1,39 +0,0 @@ -if d.HasChange("aspects") { - url, err = transport_tpg.AddQueryParams(url, map[string]string{"deleteMissingAspects": "true"}) - if err != nil { - return err - } - - aspectKeysOfInterest := make(map[string]struct{}) - currentAspects, futureAspects := d.GetChange("aspects") - - err = AddAspectsToSet(aspectKeysOfInterest, currentAspects) - - if err != nil { - return err - } - - err = AddAspectsToSet(aspectKeysOfInterest, futureAspects) - - if err != nil { - return err - } - - aspectKeysArray := make([]string, 0, len(aspectKeysOfInterest)) - - for key := range aspectKeysOfInterest { - aspectKeysArray = append(aspectKeysArray, key) - } - - sort.Strings(aspectKeysArray) - - genericAspectKeysArray := make([]interface{}, len(aspectKeysArray)) - for i, key := range aspectKeysArray { - genericAspectKeysArray[i] = key - } - - url, err = transport_tpg.AddArrayQueryParams(url, "aspectKeys", genericAspectKeysArray) - if err != nil { - return err - } -} diff --git a/mmv1/templates/terraform/pre_update/fw_datafusion_instance_update.go.tmpl b/mmv1/templates/terraform/pre_update/fw_datafusion_instance_update.go.tmpl deleted file mode 100644 index 8b3b701a19b7..000000000000 --- a/mmv1/templates/terraform/pre_update/fw_datafusion_instance_update.go.tmpl +++ /dev/null @@ -1,35 +0,0 @@ -{{/* - The license inside this block applies to this file - Copyright 2024 Google Inc. - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ -}} -updateMask := []string{} - -if !plan.EnableStackdriverLogging.Equal(state.EnableStackdriverLogging) { - updateMask = append(updateMask, "enableStackdriverLogging") -} - -if !plan.EnableStackdriverMonitoring.Equal(state.EnableStackdriverMonitoring) { - updateMask = append(updateMask, "enableStackdriverMonitoring") -} - -if !plan.EnableRbac.Equal(state.EnableRbac) { - updateMask = append(updateMask, "enableRbac") -} - - -// updateMask is a URL parameter but not present in the schema, so ReplaceVars -// won't set it - -url, err := transport_tpg.AddQueryParams(url, map[string]string{"updateMask": strings.Join(updateMask, ",")}) -if err != nil { - resp.Diagnostics.AddError("Error, failure building update mask query parameters in {{ $.Name -}}", err.Error()) - return -} \ No newline at end of file diff --git a/mmv1/templates/terraform/pre_update/storage_insights_dataset_config.go.tmpl b/mmv1/templates/terraform/pre_update/storage_insights_dataset_config.go.tmpl deleted file mode 100644 index 456ef222d86d..000000000000 --- a/mmv1/templates/terraform/pre_update/storage_insights_dataset_config.go.tmpl +++ /dev/null @@ -1,116 +0,0 @@ -updateMask := []string{} - -if d.HasChange("include_newly_created_buckets") { - updateMask = append(updateMask, "includeNewlyCreatedBuckets") -} - -if d.HasChange("retention_period_days") { - updateMask = append(updateMask, "retentionPeriodDays") -} - -if d.HasChange("description") { - updateMask = append(updateMask, "description") -} - -if d.HasChange("include_cloud_storage_locations") { - _, new_storage_locations := d.GetChange("include_cloud_storage_locations") - if new_locations, ok := new_storage_locations.([]interface{}); ok && len(new_locations) > 0 { - updateMask = append(updateMask, "includeCloudStorageLocations") - } -} - -if d.HasChange("exclude_cloud_storage_locations") { - _, new_storage_locations := d.GetChange("exclude_cloud_storage_locations") - if new_locations, ok := new_storage_locations.([]interface{}); ok && len(new_locations) > 0 { - updateMask = append(updateMask, "excludeCloudStorageLocations") - } -} - -if d.HasChange("include_cloud_storage_buckets") { - _, new_storage_buckets := d.GetChange("include_cloud_storage_buckets") - if new_buckets, ok := new_storage_buckets.([]interface{}); ok && len(new_buckets) > 0 { - updateMask = append(updateMask, "includeCloudStorageBuckets") - } -} - -if d.HasChange("exclude_cloud_storage_buckets") { - _, new_storage_buckets := d.GetChange("exclude_cloud_storage_buckets") - if new_buckets, ok := new_storage_buckets.([]interface{}); ok && len(new_buckets) > 0 { - updateMask = append(updateMask, "excludeCloudStorageBuckets") - } -} - -if d.HasChange("source_projects") { - _, new_source_projects := d.GetChange("source_projects") - if new_project_numbers, ok := new_source_projects.([]interface{}); ok && len(new_project_numbers) > 0 { - updateMask = append(updateMask, "sourceProjects") - } -} - -if d.HasChange("source_folders") { - _, new_source_folders := d.GetChange("source_folders") - if new_folder_numbers, ok := new_source_folders.([]interface{}); ok && len(new_folder_numbers) > 0 { - updateMask = append(updateMask, "sourceFolders") - } -} - -if d.HasChange("organization_scope") { - _, new_organization_scope := d.GetChange("organization_scope") - if new_organization_scope == true { - updateMask = append(updateMask, "organizationScope") - } -} - - -// Link or Unlink a dataset if required -if d.HasChange("link_dataset") { - _, new_link_dataset := d.GetChange("link_dataset") - linkAPIEndPoint := "linkDataset" - if new_link_dataset == false { - linkAPIEndPoint = "unlinkDataset" - } - - linkUrl := fmt.Sprintf("%s:%s", url, linkAPIEndPoint) - - // err == nil indicates that the billing_project value was found - if bp, err := tpgresource.GetBillingProject(d, config); err == nil { - billingProject = bp - } - - res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "POST", - Project: billingProject, - RawURL: linkUrl, - UserAgent: userAgent, - Timeout: d.Timeout(schema.TimeoutUpdate), - Headers: headers, - }) - - if err != nil { - return fmt.Errorf("Error %v DatasetConfig %q: %s", linkAPIEndPoint, d.Id(), err) - } else { - log.Printf("[DEBUG] Finished %s DatasetConfig %q: %#v", linkAPIEndPoint, d.Id(), res) - } - - err = StorageInsightsOperationWaitTime( - config, res, project, "Linking/Unlinking DatasetConfig", userAgent, - d.Timeout(schema.TimeoutUpdate)) - - if err != nil { - return err - } -} - - -// if updateMask is empty we are not updating anything so skip the post -if len(updateMask) == 0 { - return resourceStorageInsightsDatasetConfigRead(d, meta) -} - -// updateMask is a URL parameter but not present in the schema, so ReplaceVars -// won't set it -url, err = transport_tpg.AddQueryParams(url, map[string]string{"updateMask": strings.Join(updateMask, ",")}) -if err != nil { - return err -} diff --git a/mmv1/templates/terraform/pre_update/vpc_access_connector_instances.go.tmpl b/mmv1/templates/terraform/pre_update/vpc_access_connector_instances.go.tmpl deleted file mode 100644 index 89a2fc22ea98..000000000000 --- a/mmv1/templates/terraform/pre_update/vpc_access_connector_instances.go.tmpl +++ /dev/null @@ -1,15 +0,0 @@ -if d.HasChange("min_instances") && !d.HasChange("max_instances") { - obj["maxInstances"] = d.Get("max_instances").(int) - updateMask = append(updateMask, "maxInstances", "minInstances") -} - -if d.HasChange("max_instances") && !d.HasChange("min_instances") { - obj["minInstances"] = d.Get("min_instances").(int) - updateMask = append(updateMask, "maxInstances", "minInstances") -} - -// Overwrite the previously set mask. -url, err = transport_tpg.AddQueryParams(url, map[string]string{"updateMask": strings.Join(updateMask, ",")}) -if err != nil { - return err -} diff --git a/mmv1/templates/terraform/product.go.tmpl b/mmv1/templates/terraform/product.go.tmpl deleted file mode 100644 index 98f1033ace91..000000000000 --- a/mmv1/templates/terraform/product.go.tmpl +++ /dev/null @@ -1,22 +0,0 @@ -{{- if ne $.Compiler "terraformgoogleconversion-codegen" }} -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 -{{ end }} -// ---------------------------------------------------------------------------- -// -// *** AUTO GENERATED CODE *** Type: MMv1 *** -// -// ---------------------------------------------------------------------------- -// -// This file is automatically generated by Magic Modules and manual -// changes will be clobbered when the file is regenerated. -// -// Please read more about how to change this file in -// .github/CONTRIBUTING.md. -// -// ---------------------------------------------------------------------------- - -// Package {{ lower $.Name }} contains resources, datasources, etc. for the {{ lower $.DisplayName }} service. -package {{ lower $.Name }} - -const ProductName = "{{ lower $.Name }}" diff --git a/mmv1/templates/terraform/property_documentation.html.markdown.tmpl b/mmv1/templates/terraform/property_documentation.html.markdown.tmpl index ee1399b228ef..a6f89d8bb946 100644 --- a/mmv1/templates/terraform/property_documentation.html.markdown.tmpl +++ b/mmv1/templates/terraform/property_documentation.html.markdown.tmpl @@ -36,7 +36,7 @@ {{- if $.Sensitive }} **Note**: This property is sensitive and will not be displayed in the plan. {{- end }} - {{- if $.WriteOnlyLegacy }} + {{- if $.WriteOnly }} **Note**: This property is write-only and will not be read from the API. {{- end }} {{- if and (not $.FlattenObject) $.NestedProperties }} diff --git a/mmv1/templates/terraform/resource.go.tmpl b/mmv1/templates/terraform/resource.go.tmpl index 6bed9ba5b917..e573ad4523a6 100644 --- a/mmv1/templates/terraform/resource.go.tmpl +++ b/mmv1/templates/terraform/resource.go.tmpl @@ -186,19 +186,19 @@ func resource{{ $.ResourceName -}}Create(d *schema.ResourceData, meta interface{ obj := make(map[string]interface{}) {{- range $prop := $.SettableProperties }} - {{ $prop.CamelizeProperty -}}Prop, err := expand{{ if $.NestedQuery -}}Nested{{ end }}{{ $.ResourceName -}}{{ camelize $prop.Name "upper" -}}({{ if $prop.FlattenObject }}nil{{ else }}d.Get("{{ underscore $prop.Name }}"){{ end }}, d, config) + {{ $prop.ApiName -}}Prop, err := expand{{ if $.NestedQuery -}}Nested{{ end }}{{ $.ResourceName -}}{{ camelize $prop.Name "upper" -}}({{ if $prop.FlattenObject }}nil{{ else }}d.Get("{{ underscore $prop.Name }}"){{ end }}, d, config) if err != nil { return err - {{- if $prop.SendEmptyValue -}} - } else if v, ok := d.GetOkExists("{{ underscore $prop.Name -}}"); ok || !reflect.DeepEqual(v, {{ $prop.CamelizeProperty -}}Prop) { - {{- else if $prop.FlattenObject -}} - } else if !tpgresource.IsEmptyValue(reflect.ValueOf({{ $prop.CamelizeProperty -}}Prop)) { - {{- else -}} - } else if v, ok := d.GetOkExists("{{ underscore $prop.Name -}}"); !tpgresource.IsEmptyValue(reflect.ValueOf({{ $prop.CamelizeProperty -}}Prop)) && (ok || !reflect.DeepEqual(v, {{ $prop.CamelizeProperty -}}Prop)) { - {{- end}} - obj["{{ $prop.ApiName -}}"] = {{ $prop.CamelizeProperty -}}Prop +{{- if $prop.SendEmptyValue -}} + } else if v, ok := d.GetOkExists("{{ underscore $prop.Name -}}"); ok || !reflect.DeepEqual(v, {{ $prop.ApiName -}}Prop) { +{{- else if $prop.FlattenObject -}} + } else if !tpgresource.IsEmptyValue(reflect.ValueOf({{ $prop.ApiName -}}Prop)) { +{{- else -}} + } else if v, ok := d.GetOkExists("{{ underscore $prop.Name -}}"); !tpgresource.IsEmptyValue(reflect.ValueOf({{ $prop.ApiName -}}Prop)) && (ok || !reflect.DeepEqual(v, {{ $prop.ApiName -}}Prop)) { +{{- end}} + obj["{{ $prop.ApiName -}}"] = {{ $prop.ApiName -}}Prop } - {{- end}} +{{- end}} {{if $.CustomCode.Encoder -}} obj, err = resource{{ $.ResourceName -}}Encoder(d, meta, obj) @@ -291,7 +291,7 @@ func resource{{ $.ResourceName -}}Create(d *schema.ResourceData, meta interface{ {{- /* Set computed resource properties required for building the ID from create API response (as long as Create doesn't use an async operation) */}} {{- /* This is necessary so that the ID is set correctly (and so that the following Read can succeed.) */}} {{- /* Technically this should possibly use the read URL explicitly, since id_format could differ - but that might need to be in addition to id_format anyway. */}} -{{- if and $.HasPostCreateComputedFields (or (or (not $.GetAsync) (not ($.GetAsync.Allow "Create"))) (and $.GetAsync (and ($.GetAsync.IsA "PollAsync") ($.GetAsync.Allow "Create"))))}} +{{- if and $.HasComputedIdFormatFields (or (or (not $.GetAsync) (not ($.GetAsync.Allow "Create"))) (and $.GetAsync (and ($.GetAsync.IsA "PollAsync") ($.GetAsync.Allow "Create"))))}} // Set computed resource properties from create API response so that they're available on the subsequent Read // call. err = resource{{ $.ResourceName }}PostCreateSetComputedFields(d, meta, res) @@ -309,7 +309,7 @@ func resource{{ $.ResourceName -}}Create(d *schema.ResourceData, meta interface{ {{if and $.GetAsync ($.GetAsync.Allow "Create") -}} {{ if ($.GetAsync.IsA "OpAsync") -}} -{{ if and $.GetAsync.Result.ResourceInsideResponse $.HasPostCreateComputedFields -}} +{{ if and $.GetAsync.Result.ResourceInsideResponse (or $.GetIdentity $.HasComputedIdFormatFields) -}} // Use the resource in the operation response to populate // identity fields and d.Id() before read var opRes map[string]interface{} @@ -352,11 +352,11 @@ func resource{{ $.ResourceName -}}Create(d *schema.ResourceData, meta interface{ } {{- end}} {{- end}} - {{- if $.HasPostCreateComputedFields}} + {{- if $.HasComputedIdFormatFields}} {{- $renderedIdFromName := "false" }} {{- range $prop := $.GettableProperties }} {{- /* Check if prop is potentially computed */}} - {{- if and ($.InPostCreateComputed $prop) (and (or $prop.Output $prop.DefaultFromApi) (not $prop.IgnoreRead)) }} + {{- if and ($.InIdFormat $prop) (and (or $prop.Output $prop.DefaultFromApi) (not $prop.IgnoreRead)) }} {{- if and (eq $prop.CustomFlatten "templates/terraform/custom_flatten/id_from_name.tmpl") (eq $renderedIdFromName "false") }} // Setting `name` field so that `id_from_name` flattener will work properly. if err := d.Set("name", flatten{{ if $.NestedQuery -}}Nested{{end}}{{ $.ResourceName -}}Name(opRes["name"], d, config)); err != nil { @@ -378,6 +378,17 @@ func resource{{ $.ResourceName -}}Create(d *schema.ResourceData, meta interface{ {{- end }} {{- end }}{{/* prop is potentially computed */}} {{- end }}{{/* range */}} + {{- else}} +{{- /* + Temporarily keeping these resources the same - but setting properties here should be unnecessary because the impacted fields aren't expected to change as a result of the API request. Will remove in a separate step for clarity. + */}} +{{- range $prop := $.GettableProperties }} +{{- if $.IsInIdentity $prop }} + if err := d.Set("{{ underscore $prop.Name -}}", flatten{{ if $.NestedQuery -}}Nested{{ end }}{{ $.ResourceName -}}{{ camelize $prop.Name "upper" -}}(opRes["{{ $prop.ApiName -}}"], d, config)); err != nil { + return err + } +{{- end}} +{{- end}} {{- end}} // This may have caused the ID to update - update it if so. @@ -723,17 +734,17 @@ func resource{{ $.ResourceName -}}Update(d *schema.ResourceData, meta interface{ obj := make(map[string]interface{}) {{- range $prop := $.UpdateBodyProperties }} {{/* flattened $s won't have something stored in state so instead nil is passed to the next expander. */}} - {{- $prop.CamelizeProperty -}}Prop, err := expand{{ if $.NestedQuery -}}Nested{{end}}{{ $.ResourceName -}}{{ camelize $prop.Name "upper" -}}({{ if $prop.FlattenObject }}nil{{else}}d.Get("{{underscore $prop.Name}}"){{ end }}, d, config) + {{- $prop.ApiName -}}Prop, err := expand{{ if $.NestedQuery -}}Nested{{end}}{{ $.ResourceName -}}{{ camelize $prop.Name "upper" -}}({{ if $prop.FlattenObject }}nil{{else}}d.Get("{{underscore $prop.Name}}"){{ end }}, d, config) if err != nil { return err {{- if $prop.SendEmptyValue -}} - } else if v, ok := d.GetOkExists("{{ underscore $prop.Name -}}"); ok || !reflect.DeepEqual(v, {{ $prop.CamelizeProperty -}}Prop) { + } else if v, ok := d.GetOkExists("{{ underscore $prop.Name -}}"); ok || !reflect.DeepEqual(v, {{ $prop.ApiName -}}Prop) { {{- else if $prop.FlattenObject -}} - } else if !tpgresource.IsEmptyValue(reflect.ValueOf({{ $prop.CamelizeProperty -}}Prop)) { + } else if !tpgresource.IsEmptyValue(reflect.ValueOf({{ $prop.ApiName -}}Prop)) { {{- else -}} - } else if v, ok := d.GetOkExists("{{ underscore $prop.Name -}}"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, {{ $prop.CamelizeProperty -}}Prop)) { + } else if v, ok := d.GetOkExists("{{ underscore $prop.Name -}}"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, {{ $prop.ApiName -}}Prop)) { {{- end}} - obj["{{ $prop.ApiName -}}"] = {{ $prop.CamelizeProperty -}}Prop + obj["{{ $prop.ApiName -}}"] = {{ $prop.ApiName -}}Prop } {{- end}} @@ -1222,7 +1233,7 @@ func resource{{ $.ResourceName -}}PostCreateFailure(d *schema.ResourceData, meta {{ $.CustomTemplate $.StateMigrationFile false -}} {{- end }} -{{- if and $.HasPostCreateComputedFields (or (or (not $.GetAsync) (not ($.GetAsync.Allow "Create"))) (and $.GetAsync (and ($.GetAsync.IsA "PollAsync") ($.GetAsync.Allow "Create"))))}} +{{- if and $.HasComputedIdFormatFields (or (or (not $.GetAsync) (not ($.GetAsync.Allow "Create"))) (and $.GetAsync (and ($.GetAsync.IsA "PollAsync") ($.GetAsync.Allow "Create"))))}} func resource{{ $.ResourceName -}}PostCreateSetComputedFields(d *schema.ResourceData, meta interface{}, res map[string]interface{}) error { config := meta.(*transport_tpg.Config) {{- /* Don't render decoder for PollAsync resources - their decoders are expected to return `nil` until the resource completion completes, but we need to set their computed fields in order to call PollRead - so there can never be a dependency on the decoder. */}} @@ -1252,7 +1263,7 @@ func resource{{ $.ResourceName -}}PostCreateSetComputedFields(d *schema.Resource {{- $renderedIdFromName := "false" }} {{- range $prop := $.GettableProperties }} {{- /* Check if prop is potentially computed */}} - {{- if and ($.InPostCreateComputed $prop) (and (or $prop.Output $prop.DefaultFromApi) (not $prop.IgnoreRead)) }} + {{- if and ($.InIdFormat $prop) (and (or $prop.Output $prop.DefaultFromApi) (not $prop.IgnoreRead)) }} {{- if and (eq $prop.CustomFlatten "templates/terraform/custom_flatten/id_from_name.tmpl") (eq $renderedIdFromName "false") }} // Setting `name` field so that `id_from_name` flattener will work properly. if err := d.Set("name", flatten{{ if $.NestedQuery -}}Nested{{end}}{{ $.ResourceName -}}Name(res["name"], d, config)); err != nil { @@ -1276,4 +1287,4 @@ func resource{{ $.ResourceName -}}PostCreateSetComputedFields(d *schema.Resource {{- end }}{{/* range */}} return nil } -{{- end }} +{{- end }} \ No newline at end of file diff --git a/mmv1/templates/terraform/resource.html.markdown.tmpl b/mmv1/templates/terraform/resource.html.markdown.tmpl index b848ff6b1680..add27d89046c 100644 --- a/mmv1/templates/terraform/resource.html.markdown.tmpl +++ b/mmv1/templates/terraform/resource.html.markdown.tmpl @@ -88,13 +88,21 @@ The following arguments are supported: {{ "" }} {{ "" }} {{- range $p := $.RootProperties }} - {{- if and $p.Required (not $p.WriteOnlyLegacy) }} + {{- if and $p.Required (not $p.WriteOnly) }} {{- trimTemplate "property_documentation.html.markdown.tmpl" $p -}} {{- end }} {{- end }} {{ "" }} +{{- range $p := $.AllUserProperties }} + {{- if $p.Required }} +{{- trimTemplate "nested_property_documentation.html.markdown.tmpl" $p -}} + {{- end}} +{{- end }} +- - - +{{ "" }} +{{ "" }} {{- range $p := $.RootProperties }} - {{- if and (not $p.Required) (not $p.Output) (not $p.WriteOnlyLegacy) }} + {{- if and (not $p.Required) (not $p.Output) (not $p.WriteOnly) }} {{- trimTemplate "property_documentation.html.markdown.tmpl" $p -}} {{- end }} {{- end }} @@ -112,12 +120,6 @@ The following arguments are supported: {{- if $.Docs.OptionalProperties }} {{ $.Docs.OptionalProperties }} {{- end }} -{{ "" }} -{{- range $p := $.AllUserProperties }} - {{- if $p.Required }} -{{- trimTemplate "nested_property_documentation.html.markdown.tmpl" $p -}} - {{- end}} -{{- end }} {{- range $p := $.AllUserProperties }} {{- if and (not $p.Required) (not $p.Output) }} {{- trimTemplate "nested_property_documentation.html.markdown.tmpl" $p -}} @@ -129,7 +131,7 @@ The following arguments are supported: The following write-only attributes are supported: {{ range $p := $.RootProperties }} - {{- if $p.WriteOnlyLegacy }} + {{- if $p.WriteOnly }} {{- trimTemplate "property_documentation.html.markdown.tmpl" $p }} {{- end}} {{- end }} diff --git a/mmv1/templates/terraform/resource_fw.go.tmpl b/mmv1/templates/terraform/resource_fw.go.tmpl deleted file mode 100644 index c73454a0e4b3..000000000000 --- a/mmv1/templates/terraform/resource_fw.go.tmpl +++ /dev/null @@ -1,764 +0,0 @@ -{{/* The license inside this block applies to this file - Copyright 2025 Google LLC. All Rights Reserved. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. */ -}} -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -{{/*{{$.CodeHeader TemplatePath}}*/}} - -package {{ lower $.ProductMetadata.Name }} - -import ( - - "fmt" - "log" - "net/http" - "reflect" -{{- if $.SupportsIndirectUserProjectOverride }} - "regexp" -{{- end }} -{{- if or (and (not $.Immutable) ($.UpdateMask)) $.LegacyLongFormProject }} - "strings" -{{- end }} - "time" - -{{/* # We list all the v2 imports here, because we run 'goimports' to guess the correct */}} -{{/* # set of imports, which will never guess the major version correctly. */}} -{{/* - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/structure" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" - "github.com/hashicorp/terraform-plugin-sdk/v2/diag" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/logging" - */}} - "github.com/hashicorp/go-cty/cty" - - "github.com/hashicorp/terraform-plugin-framework/path" - "github.com/hashicorp/terraform-plugin-framework/resource" - "github.com/hashicorp/terraform-plugin-framework/resource/schema" - "github.com/hashicorp/terraform-plugin-framework/types" - "github.com/hashicorp/terraform-plugin-log/tflog" - - "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts" - "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" - "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" - "{{ $.ImportPath }}/fwmodels" - "{{ $.ImportPath }}/fwresource" - "{{ $.ImportPath }}/fwtransport" - - "{{ $.ImportPath }}/tpgresource" - transport_tpg "{{ $.ImportPath }}/transport" - "{{ $.ImportPath }}/verify" - -{{ if $.FlattenedProperties }} - "google.golang.org/api/googleapi" -{{- end}} -) - -{{if $.CustomCode.Constants -}} - {{- $.CustomTemplate $.CustomCode.Constants true -}} -{{- end}} - -var ( - _ resource.Resource = &{{$.ResourceName}}FWResource{} - _ resource.ResourceWithConfigure = &{{$.ResourceName}}FWResource{} -) - -func New{{$.ResourceName}}FWResource() resource.Resource { - return &{{$.ResourceName}}FWResource{} -} - -type {{$.ResourceName}}FWResource struct { - {{/*client *sqladmin.Service*/}} - providerConfig *transport_tpg.Config -} - -type {{$.ResourceName}}FWModel struct { - {{- range $prop := $.OrderProperties $.AllUserProperties }} - {{camelize $prop.Name "upper"}} types.{{$prop.GetFWType}} `tfsdk:"{{underscore $prop.Name}}"` - {{- end }} - {{ if $.HasProject -}} - Project types.String `tfsdk:"project"` - {{- end }} - - Id types.String `tfsdk:"id"`{{/* TODO should this be gated behind a greenfield/brownfield flag? */}} - Timeouts timeouts.Value `tfsdk:"timeouts"` -} - -// Metadata returns the resource type name. -func (d *{{$.ResourceName}}FWResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { - resp.TypeName = req.ProviderTypeName + "_fw_{{ underscore $.ResourceName}}" -} - -func (r *{{$.ResourceName}}FWResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { - // Prevent panic if the provider has not been configured. - if req.ProviderData == nil { - return - } - - p, ok := req.ProviderData.(*transport_tpg.Config) - if !ok { - resp.Diagnostics.AddError( - "Unexpected Resource Configure Type", - fmt.Sprintf("Expected *transport_tpg.Config, got: %T. Please report this issue to the provider developers.", req.ProviderData), - ) - return - } - - {{/* TODO non-client equivalent? */}} - {{/* - r.client = p.NewSqlAdminClient(p.UserAgent) - if resp.Diagnostics.HasError() { - return - }*/}} - r.providerConfig = p -} - -func (d *{{$.ResourceName}}FWResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { - resp.Schema = schema.Schema{ - MarkdownDescription: "A resource to represent a SQL User object.", - - Attributes: map[string]schema.Attribute{ -{{- range $prop := $.OrderProperties $.AllUserProperties }} - {{template "SchemaFieldsFW" $prop -}} -{{- end }} -{{- range $prop := $.VirtualFields }} - {{template "SchemaFieldsFW" $prop -}} -{{- end }} -{{- if $.CustomCode.ExtraSchemaEntry }} - {{ $.CustomTemplate $.CustomCode.ExtraSchemaEntry false -}} -{{- end}} -{{ if $.HasProject -}} - "project": schema.StringAttribute{ - Optional: true, - Computed: true, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.RequiresReplace(), - stringplanmodifier.UseStateForUnknown(), - }, - }, -{{- end}} -{{- if $.HasSelfLink }} - "self_link": schema.StringAttribute{ - Computed: true, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.UseStateForUnknown(), - }, - }, -{{- end}} - // This is included for backwards compatibility with the original, SDK-implemented resource. - "id": schema.StringAttribute{ - Description: "Project identifier", - MarkdownDescription: "Project identifier", - Computed: true, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.UseStateForUnknown(), - }, - }, - }, - } -} - -func (r *{{$.ResourceName}}FWResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { - var data {{$.ResourceName}}FWModel - var metaData *fwmodels.ProviderMetaModel -{{ if $.CustomCode.CustomCreate -}} - {{ $.CustomTemplate $.CustomCode.CustomCreate false -}} -{{ else -}} - - // Read Provider meta into the meta model - resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) - if resp.Diagnostics.HasError() { - return - } - - // Read Terraform plan data into the model - resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...) - if resp.Diagnostics.HasError() { - return - } - - var project, billingProject types.String -{{ if $.HasProject -}} - project = fwresource.GetProjectFramework(data.Project, types.StringValue(r.providerConfig.Project), &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } -{{ if $.LegacyLongFormProject -}} - billingProject = strings.TrimPrefix(project, "projects/") -{{ else -}} - billingProject = project -{{- end }} -{{- end }} -{{ if $.HasRegion -}} - region := fwresource.GetRegionFramework(data.Region, types.StringValue(r.providerConfig.Region), &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } -{{- end }} -{{ if $.HasZone -}} - zone := fwresource.GetZoneFramework(data.Zone, types.StringValue(r.providerConfig.Zone), &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } -{{- end }} - - var schemaDefaultVals fwtransport.DefaultVars -{{ if $.HasProject -}} - schemaDefaultVals.Project = project -{{- end }} -{{ if $.HasRegion -}} - schemaDefaultVals.Region = region -{{- end }} -{{ if $.HasZone -}} - schemaDefaultVals.Zone = zone -{{- end }} - - // Use provider_meta to set User-Agent - userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, r.providerConfig.UserAgent) - - obj := make(map[string]interface{}) - -{{- range $prop := $.OrderProperties $.AllUserProperties }} - {{$prop.ApiName}}Prop, diags := data.{{camelize $prop.Name "upper"}}.To{{$prop.GetFWType}}Value(ctx) - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } - obj["{{ $prop.ApiName -}}"] = {{ $prop.ApiName -}}Prop -{{- end }} - - - {{/* TODO default timeouts */}} - createTimeout, diags := data.Timeouts.Create(ctx, 20*time.Minute) - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } - - url := fwtransport.ReplaceVars(ctx, req, &resp.Diagnostics, schemaDefaultVals, r.providerConfig, "{{"{{"}}{{$.ProductMetadata.Name}}BasePath{{"}}"}}{{$.CreateUri}}") - if resp.Diagnostics.HasError() { - return - } - - log.Printf("[DEBUG] Creating new {{ $.Name -}}: %#v", obj) - - {{/* Nested Query block */}} - - headers := make(http.Header) -{{- if $.CustomCode.PreCreate }} - {{ $.CustomTemplate $.CustomCode.PreCreate false -}} -{{- end}} - res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ - Config: r.providerConfig, - Method: "{{ upper $.CreateVerb -}}", - Project: billingProject.ValueString(), - RawURL: url, - UserAgent: userAgent, - Body: obj, - Timeout: createTimeout, - Headers: headers, -{{- if $.ErrorRetryPredicates }} - ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{{"{"}}{{ join $.ErrorRetryPredicates "," -}}{{"}"}}, -{{- end}} -{{- if $.ErrorAbortPredicates }} - ErrorAbortPredicates: []transport_tpg.RetryErrorPredicateFunc{{"{"}}{{ join $.ErrorAbortPredicates "," -}}{{"}"}}, -{{- end}} - }, &resp.Diagnostics) - if resp.Diagnostics.HasError() { -{{- if and ($.CustomCode.PostCreateFailure) (not $.GetAsync) -}} - resource{{ $.ResourceName -}}PostCreateFailure(d, meta) -{{- end}} - return - } - - tflog.Trace(ctx, "created {{$.Name}} resource") - - data.Id = types.StringValue("{{ $.IdFormat -}}") -{{ if $.HasProject -}} - data.Project = project -{{- end }} -{{ if $.HasRegion -}} - data.Region = region -{{- end }} -{{ if $.HasZone -}} - data.Zone = zone -{{- end }} - -{{if and $.GetAsync ($.GetAsync.Allow "Create") -}} -{{ if ($.GetAsync.IsA "OpAsync") -}} -{{ if and $.GetAsync.Result.ResourceInsideResponse $.HasPostCreateComputedFields -}} - // Use the resource in the operation response to populate - // identity fields and d.Id() before read - var opRes map[string]interface{} - err = {{ $.ClientNamePascal -}}OperationWaitTimeWithResponse( - r.providerConfig, res, &opRes, {{if or $.HasProject $.GetAsync.IncludeProject -}} {{if $.LegacyLongFormProject -}}tpgresource.GetResourceNameFromSelfLink(project.ValueString()){{ else }}project.ValueString(){{ end }}, {{ end -}} "Creating {{ $.Name -}}", userAgent, - createTimeout) - if err != nil { - {{/* Postcreate Failure */}} -{{- if not $.TaintResourceOnFailedCreate -}} - // The resource didn't actually create - resp.State.RemoveResource(ctx){{/* TODO verify this works */}} -{{ end -}} - resp.Diagnostics.AddError("Error, failure waiting to create {{ $.Name -}}", err.Error()) - return - } - - {{/* CustomCode.Decoder */}} - {{/* NestedQuery */}} - {{/* if $.HasPostCreateComputedFields */}} - {{/* This may have caused the ID to update - update it if so. */}} -{{ else -}}{{/* $.GetAsync.Result.ResourceInsideResponse */}} - err := {{ $.ClientNamePascal -}}OperationWaitTime( - r.providerConfig, res, {{if or $.HasProject $.GetAsync.IncludeProject -}} {{if $.LegacyLongFormProject -}}tpgresource.GetResourceNameFromSelfLink(project.ValueString()){{ else }}project.ValueString(){{ end }}, {{ end -}} "Creating {{ $.Name -}}", userAgent, - createTimeout) - - if err != nil { - - {{/* Postcreate Failure */}} -{{- if not $.TaintResourceOnFailedCreate -}} - // The resource didn't actually create - resp.State.RemoveResource(ctx){{/* TODO verify this works */}} -{{ end -}} - resp.Diagnostics.AddError("Error, failure waiting to create {{ $.Name -}}", err.Error()) - return - } - -{{ end -}}{{/* $.GetAsync.Result.ResourceInsideResponse */}} -{{ end -}}{{/*if ($.GetAsync.IsA "OpAsync")*/}} -{{end -}}{{/*if and $.GetAsync ($.GetAsync.Allow "Create")*/}} -{{if $.CustomCode.PostCreate -}} - {{- $.CustomTemplate $.CustomCode.PostCreate false -}} -{{- end}} - - - // read back {{$.Name}} - r.{{$.ResourceName}}FWRefresh(ctx, &data, &resp.State, req, &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } - - // Save data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) - - log.Printf("[DEBUG] Finished creating {{ $.Name }} %q: %#v", data.Id.ValueString(), res) -{{ end }} {{/* if CustomCreate */}} -} - - -func (r *{{$.ResourceName}}FWResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { - var data {{$.ResourceName}}FWModel - var metaData *fwmodels.ProviderMetaModel - - // Read Provider meta into the meta model - resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) - if resp.Diagnostics.HasError() { - return - } - - // Read Terraform configuration data into the model - resp.Diagnostics.Append(req.State.Get(ctx, &data)...) - if resp.Diagnostics.HasError() { - return - } - - tflog.Trace(ctx, "read {{$.Name}} resource") - - // read back {{$.Name}} - r.{{$.ResourceName}}FWRefresh(ctx, &data, &resp.State, req, &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } - - // Save data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) -} - - -func (r *{{$.ResourceName}}FWResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { - var state, plan {{$.ResourceName}}FWModel - var metaData *fwmodels.ProviderMetaModel - // Read Provider meta into the meta model - resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) - if resp.Diagnostics.HasError() { - return - } - - resp.Diagnostics.Append(req.State.Get(ctx, &state)...) - if resp.Diagnostics.HasError() { - return - } - - resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) - if resp.Diagnostics.HasError() { - return - } - - var project, billingProject types.String -{{ if $.HasProject -}} - project = fwresource.GetProjectFramework(data.Project, types.StringValue(r.providerConfig.Project), &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } -{{ if $.LegacyLongFormProject -}} - billingProject = strings.TrimPrefix(project, "projects/") -{{ else -}} - billingProject = project -{{- end }} -{{- end }} -{{ if $.HasRegion -}} - region := fwresource.GetRegionFramework(plan.Region, types.StringValue(r.providerConfig.Region), &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } -{{- end }} -{{ if $.HasZone -}} - zone := fwresource.GetZoneFramework(plan.Zone, types.StringValue(r.providerConfig.Zone), &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } -{{- end }} - - var schemaDefaultVals fwtransport.DefaultVars -{{ if $.HasProject -}} - schemaDefaultVals.Project = project -{{- end }} -{{ if $.HasRegion -}} - schemaDefaultVals.Region = region -{{- end }} -{{ if $.HasZone -}} - schemaDefaultVals.Zone = zone -{{- end }} - - // Use provider_meta to set User-Agent - userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, r.providerConfig.UserAgent) - - obj := make(map[string]interface{}) - -{{- range $prop := $.OrderProperties $.UpdateBodyProperties }} - if !plan.{{camelize $prop.Name "upper"}}.Equal(state.{{camelize $prop.Name "upper"}}) { - {{$prop.ApiName}}Prop, diags := plan.{{camelize $prop.Name "upper"}}.To{{$prop.GetFWType}}Value(ctx) - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } - obj["{{ $prop.ApiName -}}"] = {{ $prop.ApiName -}}Prop - } -{{- end }} - - {{/* TODO default timeouts */}} - updateTimeout, diags := data.Timeouts.Update(ctx, 20*time.Minute) - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } - - url := fwtransport.ReplaceVars(ctx, req, &resp.Diagnostics, schemaDefaultVals, r.providerConfig, "{{"{{"}}{{$.ProductMetadata.Name}}BasePath{{"}}"}}{{$.CreateUri}}") - if resp.Diagnostics.HasError() { - return - } - - log.Printf("[DEBUG] Updating {{ $.Name -}}: %#v", obj) - - headers := make(http.Header) - -{{- if $.UpdateMask }} -{{ $.CustomTemplate "templates/terraform/update_mask_fw.go.tmpl" false -}} -{{ end}} - -{{- if $.CustomCode.PreUpdate }} - {{ $.CustomTemplate $.CustomCode.PreUpdate false -}} -{{- end}} - res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ - Config: r.providerConfig, - Method: "{{ upper $.UpdateVerb -}}", - Project: billingProject.ValueString(), - RawURL: url, - UserAgent: userAgent, - Body: obj, - Timeout: updateTimeout, - Headers: headers, -{{- if $.ErrorRetryPredicates }} - ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{{"{"}}{{ join $.ErrorRetryPredicates "," -}}{{"}"}}, -{{- end}} -{{- if $.ErrorAbortPredicates }} - ErrorAbortPredicates: []transport_tpg.RetryErrorPredicateFunc{{"{"}}{{ join $.ErrorAbortPredicates "," -}}{{"}"}}, -{{- end}} - }, &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } - -{{if and ($.GetAsync) ($.GetAsync.Allow "update") -}} -{{ if $.GetAsync.IsA "OpAsync" -}} - err = {{ $.ClientNamePascal -}}OperationWaitTime( - r.providerConfig, res, {{if or $.HasProject $.GetAsync.IncludeProject -}} {{if $.LegacyLongFormProject -}}tpgresource.GetResourceNameFromSelfLink(project.ValueString()){{ else }}project.ValueString(){{ end }}, {{ end -}} "Updating {{ $.Name -}}", userAgent, - updateTimeout) - if err != nil { - resp.Diagnostics.AddError("Error, failure waiting to update {{ $.Name -}}", err.Error()) - return - } -{{- else if $.GetAsync.IsA "PollAsync" -}} - err = transport_tpg.PollingWaitTime(resource{{ $.ResourceName -}}PollRead(d, meta), {{ $.GetAsync.CheckResponseFuncExistence -}}, "Updating {{ $.Name -}}", d.Timeout(schema.TimeoutUpdate), {{ $.GetAsync.TargetOccurrences -}}) - if err != nil { -{{- if $.GetAsync.SuppressError -}} - log.Printf("[ERROR] Unable to confirm eventually consistent {{ $.Name -}} %q finished updating: %q", data.Id.ValueString(), err) -{{- else -}} - resp.Diagnostics.AddError("Error, failure polling for update in {{ $.Name -}}", err.Error()) - return -{{- end}} - } -{{- end}}{{/* if $.GetAsync.IsA "OpAsync" */}} -{{- end}}{{/* if and ($.GetAsync) ($.GetAsync.Allow "update") */}} - - tflog.Trace(ctx, "updated {{$.Name}} resource") - - // read back {{$.Name}} - r.{{$.ResourceName}}FWRefresh(ctx, &plan, &resp.State, req, &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } - - // Save updated data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...) -} - - -func (r *{{$.ResourceName}}FWResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { - var data {{$.ResourceName}}FWModel - var metaData *fwmodels.ProviderMetaModel - // Read Provider meta into the meta model - resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) - if resp.Diagnostics.HasError() { - return - } -{{- if $.ExcludeDelete }} - log.Printf("[WARNING] {{ $.ProductMetadata.Name }}{{" "}}{{ $.Name }} resources" + - " cannot be deleted from Google Cloud. The resource %s will be removed from Terraform" + - " state, but will still be present on Google Cloud.", data.Id.ValueString()) - r.SetId("") - - return nil -{{- else }} - - // Read Terraform prior state data into the model - resp.Diagnostics.Append(req.State.Get(ctx, &data)...) - if resp.Diagnostics.HasError() { - return - } - - var project, billingProject types.String -{{ if $.HasProject -}} - project = fwresource.GetProjectFramework(data.Project, types.StringValue(r.providerConfig.Project), &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } -{{ if $.LegacyLongFormProject -}} - billingProject = strings.TrimPrefix(project, "projects/") -{{ else -}} - billingProject = project -{{- end }} -{{- end }} -{{ if $.HasRegion -}} - region := fwresource.GetRegionFramework(data.Region, types.StringValue(r.providerConfig.Region), &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } -{{- end }} -{{ if $.HasZone -}} - zone := fwresource.GetZoneFramework(data.Zone, types.StringValue(r.providerConfig.Zone), &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } -{{- end }} - - var schemaDefaultVals fwtransport.DefaultVars -{{ if $.HasProject -}} - schemaDefaultVals.Project = project -{{- end }} -{{ if $.HasRegion -}} - schemaDefaultVals.Region = region -{{- end }} -{{ if $.HasZone -}} - schemaDefaultVals.Zone = zone -{{- end }} - - // Use provider_meta to set User-Agent - userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, r.providerConfig.UserAgent) - - obj := make(map[string]interface{}) - - deleteTimeout, diags := data.Timeouts.Delete(ctx, 20*time.Minute) - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } - - url := fwtransport.ReplaceVars(ctx, req, &resp.Diagnostics, schemaDefaultVals, r.providerConfig, "{{"{{"}}{{$.ProductMetadata.Name}}BasePath{{"}}"}}{{$.DeleteUri}}") - if resp.Diagnostics.HasError() { - return - } - -{{ if $.CustomCode.CustomDelete }} -{{ $.CustomTemplate $.CustomCode.CustomDelete false -}} -{{- else }} - headers := make(http.Header) - {{- if $.CustomCode.PreDelete }} - {{ $.CustomTemplate $.CustomCode.PreDelete false -}} - {{- end }} - - log.Printf("[DEBUG] Deleting {{ $.Name }} %q", data.Id.ValueString()) - res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ - Config: r.providerConfig, - Method: "{{ upper $.DeleteVerb -}}", - Project: billingProject.ValueString(), - RawURL: url, - UserAgent: userAgent, - Body: obj, - Timeout: deleteTimeout, - Headers: headers, -{{- if $.ErrorRetryPredicates }} - ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{{"{"}}{{ join $.ErrorRetryPredicates "," -}}{{"}"}}, -{{- end}} -{{- if $.ErrorAbortPredicates }} - ErrorAbortPredicates: []transport_tpg.RetryErrorPredicateFunc{{"{"}}{{ join $.ErrorAbortPredicates "," -}}{{"}"}}, -{{- end}} - }, &resp.Diagnostics) - if resp.Diagnostics.HasError() { - diags.AddError(fmt.Sprintf("Error deleting {{ $.Name -}}: %s", data.Id.ValueString()), err.Error()) - return - } -{{if and $.GetAsync ($.GetAsync.Allow "Delete") -}} -{{ if $.GetAsync.IsA "PollAsync" }} - err := transport_tpg.PollingWaitTime(resource{{ $.ResourceName }}PollRead(d, meta), {{ $.GetAsync.CheckResponseFuncAbsence }}, "Deleting {{ $.Name }}", d.Timeout(schema.TimeoutCreate), {{ $.Async.TargetOccurrences }}) - if err != nil { -{{- if $.Async.SuppressError }} - log.Printf("[ERROR] Unable to confirm eventually consistent {{ $.Name -}} %q finished updating: %q", data.Id.ValueString(), err) -{{- else }} - resp.Diagnostics.AddError("Error, failure polling for delete in {{ $.Name -}}", err.Error()) - return -{{- end }} - } -{{- else }} - err := {{ $.ClientNamePascal }}OperationWaitTime( - r.providerConfig, res, {{if or $.HasProject $.GetAsync.IncludeProject -}} {{if $.LegacyLongFormProject -}}tpgresource.GetResourceNameFromSelfLink(project.ValueString()){{ else }}project.ValueString(){{ end }}, {{ end -}} "Deleting {{ $.Name -}}", userAgent, - deleteTimeout) - - if err != nil { - resp.Diagnostics.AddError("Error, failure waiting to delete {{ $.Name -}}", err.Error()) - return - } -{{- end }}{{/* if $.GetAsync.IsA "PollAsync" */}} -{{- end }}{{/* if and $.GetAsync ($.GetAsync.Allow "Delete") */}} - -{{- if $.CustomCode.PostDelete }} - {{ $.CustomTemplate $.CustomCode.PostDelete false -}} -{{- end }} - - log.Printf("[DEBUG] Finished deleting {{ $.Name }} %q: %#v", data.Id.ValueString(), res) - - -{{- end }}{{/* if CustomCode.CustomDelete */}} -{{- end }}{{/* if ExcludeDelete */}} -} - -func (r *{{$.ResourceName}}FWResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { - resource.ImportStatePassthroughID(ctx, path.Root("id"), req, resp) -} - -func (r *{{$.ResourceName}}FWResource) {{$.ResourceName}}FWRefresh(ctx context.Context, data *{{$.ResourceName}}FWModel, state *tfsdk.State, req interface{}, diag *diag.Diagnostics) { - var metaData *fwmodels.ProviderMetaModel - //load default values -{{ if $.HasProject -}} - project := fwresource.GetProjectFramework(data.Project, types.StringValue(r.providerConfig.Project), &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } -{{- end }} -{{ if $.HasRegion -}} - region := fwresource.GetRegionFramework(data.Region, types.StringValue(r.providerConfig.Region), &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } -{{- end }} -{{ if $.HasZone -}} - zone := fwresource.GetZoneFramework(data.Zone, types.StringValue(r.providerConfig.Zone), &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } -{{- end }} - - var schemaDefaultVals fwtransport.DefaultVars -{{ if $.HasProject -}} - schemaDefaultVals.Project = project -{{- end }} -{{ if $.HasRegion -}} - schemaDefaultVals.Region = region -{{- end }} -{{ if $.HasZone -}} - schemaDefaultVals.Zone = zone -{{- end }} - - // Use provider_meta to set User-Agent - userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, r.providerConfig.UserAgent) - - url := fwtransport.ReplaceVars(ctx, req, &resp.Diagnostics, schemaDefaultVals, r.providerConfig, "{{"{{"}}{{$.ProductMetadata.Name}}BasePath{{"}}"}}{{$.SelfLinkUri}}{{$.ReadQueryParams}}") - if resp.Diagnostics.HasError() { - return - } - - log.Printf("[DEBUG] Refreshing {{ $.Name -}} data: %s", data.Id.ValueString()) - - headers := make(http.Header) -{{- if $.CustomCode.PreRead }} - {{ $.CustomTemplate $.CustomCode.PreRead false -}} -{{- end}} - res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ - Config: r.providerConfig, - Method: "{{ upper $.ReadVerb -}}", - Project: billingProject.ValueString(), - RawURL: url, - UserAgent: userAgent, - Timeout: createTimeout, - Headers: headers, -{{- if $.ErrorRetryPredicates }} - ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{{"{"}}{{ join $.ErrorRetryPredicates "," -}}{{"}"}}, -{{- end}} -{{- if $.ErrorAbortPredicates }} - ErrorAbortPredicates: []transport_tpg.RetryErrorPredicateFunc{{"{"}}{{ join $.ErrorAbortPredicates "," -}}{{"}"}}, -{{- end}} - }, &resp.Diagnostics) - if resp.Diagnostics.HasError() { - fwtransport.HandleNotFoundError(ctx, err, &resp.State, fmt.Sprintf("{{ $.ResourceName }} %s", data.Id.ValueString()), &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } - } - -{{ range $prop := $.OrderProperties $.AllUserProperties }} - data.{{camelize $prop.Name "upper"}} = res["{{ $prop.ApiName -}}"] - {{$prop.ApiName}}Prop, diags := data.{{camelize $prop.Name "upper"}}.To{{$prop.GetFWType}}Value(ctx) - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } -{{ end }} - - tflog.Trace(ctx, "refreshed {{$.Name}} resource data") - - -} \ No newline at end of file diff --git a/mmv1/templates/terraform/resource_iam.html.markdown.tmpl b/mmv1/templates/terraform/resource_iam.html.markdown.tmpl index f4a61aa9c5a0..f5336c101c20 100644 --- a/mmv1/templates/terraform/resource_iam.html.markdown.tmpl +++ b/mmv1/templates/terraform/resource_iam.html.markdown.tmpl @@ -42,10 +42,6 @@ description: |- --- # IAM policy for {{$.ProductMetadata.DisplayName}} {{$.Name}} -{{- if $.IamPolicy.DeprecationMessage }} -~> **Warning:** {{$.IamPolicy.DeprecationMessage}} -{{- end }} - Three different resources help you manage your IAM policy for {{$.ProductMetadata.DisplayName}} {{$.Name}}. Each of these resources serves a different use case: * `{{ $.IamTerraformName }}_policy`: Authoritative. Sets the IAM policy for the {{ lower $.Name }} and replaces any existing policy already attached. @@ -193,16 +189,15 @@ resource "{{ $.IamTerraformName }}_member" "member" { ## Argument Reference The following arguments are supported: -{{ range $param := $.IamResourceProperties }} - {{- $n := underscore $param.Name }} -{{- if eq $n $.IamParentResourceName }} -* `{{ $n }}` - (Required) Used to find the parent resource to bind the IAM policy to -{{- else if or (or (eq $n "region") (eq $n "zone")) (eq $n "location") }} -* `{{ $n }}` - (Optional) {{ $param.Description }} Used to find the parent resource to bind the IAM policy to. If not specified, - the value will be parsed from the identifier of the parent resource. If no {{ $n }} is provided in the parent identifier and no - {{ $n }} is specified, it is taken from the provider configuration. +{{ range $param := $.IamSelfLinkProperties }} +{{- if eq $param.Name "name" }} +* `{{ $.IamParentResourceName }}` - (Required) Used to find the parent resource to bind the IAM policy to +{{- else if or (or (eq (underscore $param.Name) "region") (eq (underscore $param.Name) "zone")) (eq (underscore $param.Name) "location") }} +* `{{ underscore $param.Name }}` - (Optional) {{ $param.Description }} Used to find the parent resource to bind the IAM policy to. If not specified, + the value will be parsed from the identifier of the parent resource. If no {{ underscore $param.Name }} is provided in the parent identifier and no + {{ underscore $param.Name }} is specified, it is taken from the provider configuration. {{- else }} -* `{{ $n }}` - (Required) {{ $param.Description }} Used to find the parent resource to bind the IAM policy to +* `{{ underscore $param.Name }}` - (Required) {{ $param.Description }} Used to find the parent resource to bind the IAM policy to {{- end }} {{- end }} {{- if $.IamPolicy.BaseUrl }} diff --git a/mmv1/templates/terraform/schema_property.go.tmpl b/mmv1/templates/terraform/schema_property.go.tmpl index b046c585f13c..180e49cf6312 100644 --- a/mmv1/templates/terraform/schema_property.go.tmpl +++ b/mmv1/templates/terraform/schema_property.go.tmpl @@ -161,7 +161,7 @@ Default value: {{ .ItemType.DefaultValue -}} {{ if .Sensitive -}} Sensitive: true, {{ end -}} -{{ if .WriteOnlyLegacy -}} +{{ if .WriteOnly -}} WriteOnly: true, {{ end -}} {{ if not (eq .DefaultValue nil ) -}} diff --git a/mmv1/templates/terraform/schema_property_fw.go.tmpl b/mmv1/templates/terraform/schema_property_fw.go.tmpl deleted file mode 100644 index 03bcbb82e896..000000000000 --- a/mmv1/templates/terraform/schema_property_fw.go.tmpl +++ /dev/null @@ -1,52 +0,0 @@ -{{/*# The license inside this block applies to this file. - # Copyright 2024 Google Inc. - # Licensed under the Apache License, Version 2.0 (the "License"); - # you may not use this file except in compliance with the License. - # You may obtain a copy of the License at - # - # http://www.apache.org/licenses/LICENSE-2.0 - # - # Unless required by applicable law or agreed to in writing, software - # distributed under the License is distributed on an "AS IS" BASIS, - # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - # See the License for the specific language governing permissions and - # limitations under the License. -*/}} -{{- define "SchemaFieldsFW"}} -{{- if .FlattenObject -}} - {{- range $prop := .ResourceMetadata.OrderProperties .UserProperties -}} - {{ template "SchemaFieldsFW" $prop }} - {{ end -}} -{{- else -}} -"{{underscore .Name -}}": schema.{{.GetFWType}}Attribute{ -{{ if .DefaultFromApi -}} - Optional: true, - Computed: true, -{{ else if .Required -}} - Required: true, -{{ else if .Output -}} - Computed: true, -{{ else -}} - Optional: true, -{{ end -}} -{{ if .DeprecationMessage -}} - DeprecationMessage: "{{ .DeprecationMessage }}", -{{ end -}} -{{ if .Sensitive -}} - Sensitive: true, -{{ end -}} -{{ if or .IsForceNew .DefaultFromApi -}} - PlanModifiers: []planmodifier.{{.GetFWType}}{ - - {{ if .IsForceNew -}} - {{lower .GetFWType}}planmodifier.RequiresReplace(), - {{ end -}} - - {{ if .DefaultFromApi -}} - {{lower .GetFWType}}planmodifier.UseStateForUnknown(), - {{ end -}} - }, -{{ end -}} -}, -{{- end -}} -{{- end -}} \ No newline at end of file diff --git a/mmv1/templates/terraform/update_encoder/compute_network.go.tmpl b/mmv1/templates/terraform/update_encoder/compute_network.go.tmpl index 7b7362ade39a..084107f2772e 100644 --- a/mmv1/templates/terraform/update_encoder/compute_network.go.tmpl +++ b/mmv1/templates/terraform/update_encoder/compute_network.go.tmpl @@ -1,12 +1,2 @@ - // BGP always-compare-med - if d.HasChange("bgp_always_compare_med") { - if _, ok := obj["routingConfig"]; !ok { - obj["routingConfig"] = make(map[string]interface{}) - } - obj["routingConfig"].(map[string]interface{})["bgpAlwaysCompareMed"] = d.Get("bgp_always_compare_med").(bool) - } - - // now clean up the rest - delete(obj, "numeric_id") - return obj, nil - +delete(obj, "numeric_id") // Field doesn't exist in the API +return obj, nil diff --git a/mmv1/templates/terraform/update_encoder/compute_service_attachment.go.tmpl b/mmv1/templates/terraform/update_encoder/compute_service_attachment.go.tmpl index 867547c9e1d8..aca47912a356 100644 --- a/mmv1/templates/terraform/update_encoder/compute_service_attachment.go.tmpl +++ b/mmv1/templates/terraform/update_encoder/compute_service_attachment.go.tmpl @@ -23,11 +23,4 @@ if v, ok := d.GetOkExists("enable_proxy_protocol"); !tpgresource.IsEmptyValue(re obj["enableProxyProtocol"] = enableProxyProtocolProp } -propagatedConnectionLimitProp := d.Get("propagated_connection_limit") -if sv, ok := d.GetOk("send_propagated_connection_limit_if_zero"); ok && sv.(bool) { - if v, ok := d.GetOkExists("propagated_connection_limit"); ok || !reflect.DeepEqual(v, propagatedConnectionLimitProp) { - obj["propagatedConnectionLimit"] = propagatedConnectionLimitProp - } -} - return obj, nil diff --git a/mmv1/templates/terraform/update_encoder/discoveryengine_cmekconfig_kmskey.go.tmpl b/mmv1/templates/terraform/update_encoder/discoveryengine_cmekconfig_kmskey.go.tmpl deleted file mode 100644 index d9cbfab2b092..000000000000 --- a/mmv1/templates/terraform/update_encoder/discoveryengine_cmekconfig_kmskey.go.tmpl +++ /dev/null @@ -1,4 +0,0 @@ -// Always force-send `kms_key` value. This field is immutable and required. -// In update operation, the immutable value of this field is ignored and not found, generating generating error: "Field \"kms_key_name\" is a required field, but no value is found." -obj["kmsKey"] = d.Get("kms_key") -return obj, nil \ No newline at end of file diff --git a/mmv1/templates/terraform/update_encoder/reservation.go.tmpl b/mmv1/templates/terraform/update_encoder/reservation.go.tmpl index 2037e35568e8..0aa16a8869a7 100644 --- a/mmv1/templates/terraform/update_encoder/reservation.go.tmpl +++ b/mmv1/templates/terraform/update_encoder/reservation.go.tmpl @@ -15,36 +15,8 @@ maskId := "" firstProject := true urlUpdateMask := "" -{{- if ne $.TargetVersionName "ga" }} - - if d.HasChange("share_settings.0.projects") { - // Get name. - nameProp, err := expandComputeReservationName(d.Get("name"), d, config) - if err != nil { - return nil, fmt.Errorf("Invalid value for name: %s", err) - } else if v, ok := d.GetOkExists("name"); !tpgresource.IsEmptyValue(reflect.ValueOf(nameProp)) && (ok || !reflect.DeepEqual(v, nameProp)) { - newObj["name"] = nameProp - } - // Get zone. - zoneProp, err := expandComputeReservationZone(d.Get("zone"), d, config) - if err != nil { - return nil, fmt.Errorf("Invalid value for zone: %s", err) - } else if v, ok := d.GetOkExists("zone"); !tpgresource.IsEmptyValue(reflect.ValueOf(zoneProp)) && (ok || !reflect.DeepEqual(v, zoneProp)) { - newObj["zone"] = zoneProp - } - transformed := make(map[string]interface{}) - // Set shareType and projects. - transformed["shareType"] = "SPECIFIC_PROJECTS" - transformed["projects"] = obj["shareSettings"].(map[string]interface{})["projects"] - urlUpdateMask = "?paths=shareSettings.projects" - newObj["shareSettings"] = transformed - newObj["urlUpdateMask"] = urlUpdateMask - - } else if d.HasChange("share_settings") { -{{- else }} if d.HasChange("share_settings") { -{{- end }} // Get name. nameProp, err := expandComputeReservationName(d.Get("name"), d, config) if err != nil { diff --git a/mmv1/templates/terraform/update_encoder/ssl_policy.tmpl b/mmv1/templates/terraform/update_encoder/ssl_policy.tmpl index 7ecda8395307..3dd77710418a 100644 --- a/mmv1/templates/terraform/update_encoder/ssl_policy.tmpl +++ b/mmv1/templates/terraform/update_encoder/ssl_policy.tmpl @@ -10,10 +10,10 @@ See the License for the specific language governing permissions and limitations under the License. */ -}} -// TODO: https://github.com/GoogleCloudPlatform/magic-modules/issues/184 Handle fingerprint consistently +// TODO(https://github.com/GoogleCloudPlatform/magic-modules/issues/184): Handle fingerprint consistently obj["fingerprint"] = d.Get("fingerprint") -// TODO: https://github.com/GoogleCloudPlatform/magic-modules/issues/183 Can we generalize this +// TODO(https://github.com/GoogleCloudPlatform/magic-modules/issues/183): Can we generalize this // Send a null fields if customFeatures is empty. if v, ok := obj["customFeatures"]; ok && len(v.([]interface{})) == 0 { obj["customFeatures"] = nil diff --git a/mmv1/templates/terraform/update_mask_fw.go.tmpl b/mmv1/templates/terraform/update_mask_fw.go.tmpl deleted file mode 100644 index 8ad689215da2..000000000000 --- a/mmv1/templates/terraform/update_mask_fw.go.tmpl +++ /dev/null @@ -1,27 +0,0 @@ -{{- /* - The license inside this block applies to this file - Copyright 2025 Google Inc. - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ -}} -updateMask := []string{} -{{- $maskGroups := $.GetPropertyUpdateMasksGroups $.UpdateBodyProperties "" }} -{{- range $key := $.GetPropertyUpdateMasksGroupKeys $.UpdateBodyProperties }} - -if !plan.{{camelize $key "upper"}}.Equal(state.{{camelize $key "upper"}}) { - updateMask = append(updateMask, "{{ join (index $maskGroups $key) "\",\n\""}}") -} -{{- end }} -// updateMask is a URL parameter but not present in the schema, so ReplaceVars -// won't set it -url, err := transport_tpg.AddQueryParams(url, map[string]string{"updateMask": strings.Join(updateMask, ",")}) -if err != nil { - resp.Diagnostics.AddError("Error, failure building update mask query parameters in {{ $.Name -}}", err.Error()) - return -} \ No newline at end of file diff --git a/mmv1/templates/terraform/validation/bigquery_data_transfer_config.go.tmpl b/mmv1/templates/terraform/validation/bigquery_data_transfer_config.go.tmpl new file mode 100644 index 000000000000..0f5b6a589974 --- /dev/null +++ b/mmv1/templates/terraform/validation/bigquery_data_transfer_config.go.tmpl @@ -0,0 +1 @@ +validation.PreferWriteOnlyAttribute(cty.GetAttrPath("sensitive_params").IndexInt(0).GetAttr("secret_access_key"),cty.GetAttrPath("sensitive_params").IndexInt(0).GetAttr("secret_access_key_wo")) diff --git a/mmv1/templates/terraform/validation/secret_version.go.tmpl b/mmv1/templates/terraform/validation/secret_version.go.tmpl new file mode 100644 index 000000000000..95ec5f7ba858 --- /dev/null +++ b/mmv1/templates/terraform/validation/secret_version.go.tmpl @@ -0,0 +1 @@ +validation.PreferWriteOnlyAttribute(cty.GetAttrPath("secret_data"),cty.GetAttrPath("secret_data_wo")) diff --git a/mmv1/templates/tgc/resource_converter.go.tmpl b/mmv1/templates/tgc/resource_converter.go.tmpl index a5fd4ec23e0b..09087598c84d 100644 --- a/mmv1/templates/tgc/resource_converter.go.tmpl +++ b/mmv1/templates/tgc/resource_converter.go.tmpl @@ -81,18 +81,18 @@ func Get{{ $.ResourceName -}}ApiObject(d tpgresource.TerraformResourceData, conf obj := make(map[string]interface{}) {{- range $prop := $.SettableProperties }} {{- if $prop.FlattenObject }} - {{ $prop.CamelizeProperty -}}Prop, err := expand{{ $.ResourceName -}}{{$prop.TitlelizeProperty}}(nil, d, config) + {{ $prop.ApiName -}}Prop, err := expand{{ $.ResourceName -}}{{$prop.TitlelizeProperty}}(nil, d, config) {{- else }} - {{ $prop.CamelizeProperty -}}Prop, err := expand{{ $.ResourceName -}}{{$prop.TitlelizeProperty}}(d.Get("{{underscore $prop.Name}}"), d, config) + {{ $prop.ApiName -}}Prop, err := expand{{ $.ResourceName -}}{{$prop.TitlelizeProperty}}(d.Get("{{underscore $prop.Name}}"), d, config) {{- end}} if err != nil { return nil, err {{- if not $prop.SendEmptyValue }} - } else if v, ok := d.GetOkExists("{{underscore $prop.Name}}"); !tpgresource.IsEmptyValue(reflect.ValueOf({{ $prop.CamelizeProperty -}}Prop)) && (ok || !reflect.DeepEqual(v, {{ $prop.CamelizeProperty -}}Prop)) { + } else if v, ok := d.GetOkExists("{{underscore $prop.Name}}"); !tpgresource.IsEmptyValue(reflect.ValueOf({{ $prop.ApiName -}}Prop)) && (ok || !reflect.DeepEqual(v, {{ $prop.ApiName -}}Prop)) { {{- else }} - } else if v, ok := d.GetOkExists("{{underscore $prop.Name}}"); ok || !reflect.DeepEqual(v, {{ $prop.CamelizeProperty -}}Prop) { + } else if v, ok := d.GetOkExists("{{underscore $prop.Name}}"); ok || !reflect.DeepEqual(v, {{ $prop.ApiName -}}Prop) { {{- end }} - obj["{{ $prop.ApiName -}}"] = {{ $prop.CamelizeProperty -}}Prop + obj["{{ $prop.ApiName -}}"] = {{ $prop.ApiName -}}Prop } {{- end}} diff --git a/mmv1/templates/tgc_next/cai2hcl/flatten_property_method_tgc.go.tmpl b/mmv1/templates/tgc_next/cai2hcl/flatten_property_method_tgc.go.tmpl deleted file mode 100644 index 4cb7d48d8b92..000000000000 --- a/mmv1/templates/tgc_next/cai2hcl/flatten_property_method_tgc.go.tmpl +++ /dev/null @@ -1,32 +0,0 @@ -{{/* The license inside this block applies to this file - Copyright 2025 Google LLC. All Rights Reserved. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. */ -}} -{{- define "flattenTgcPropertyMethod" }} - {{- if $.CustomTgcFlatten }} -{{ $.CustomTemplate $.CustomTgcFlatten true -}} - {{- else if $.IsA "KeyValueLabels" }} -func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return tgcresource.RemoveTerraformAttributionLabel(v) -} - {{- else if or (and (eq $.Name "zone") $.ResourceMetadata.HasZone) (and (eq $.Name "region") $.ResourceMetadata.HasRegion) -}} -func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - if v == nil { - return v - } - return tpgresource.GetResourceNameFromSelfLink(v.(string)) -} - {{- else }} -{{ template "flattenPropertyMethod" $ -}} - {{- end }} -{{- end }} \ No newline at end of file diff --git a/mmv1/templates/tgc_next/cai2hcl/full_to_relative_path.go.tmpl b/mmv1/templates/tgc_next/cai2hcl/full_to_relative_path.go.tmpl deleted file mode 100644 index 5c84b3188429..000000000000 --- a/mmv1/templates/tgc_next/cai2hcl/full_to_relative_path.go.tmpl +++ /dev/null @@ -1,10 +0,0 @@ -{{- define "fullToRelativePath" }} - if v == nil { - return v - } - relative, err := tpgresource.GetRelativePath(v.(string)) - if err != nil { - return v - } - return relative -{{- end }} \ No newline at end of file diff --git a/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl b/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl deleted file mode 100644 index 15a71bd749e5..000000000000 --- a/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl +++ /dev/null @@ -1,152 +0,0 @@ -{{/* The license inside this block applies to this file - Copyright 2025 Google LLC. All Rights Reserved. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. */ -}} -{{$.CodeHeader TemplatePath}} - -package {{ lower $.ProductMetadata.Name }} - -import ( -{{/* We list all the v2 imports here and unstable imports, because we run 'goimports' to guess the correct - set of imports, which will never guess the major version correctly. */ -}} - "github.com/apparentlymart/go-cidr/cidr" - "github.com/hashicorp/terraform-plugin-sdk/v2/diag" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/id" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/logging" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/retry" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/structure" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" - "google.golang.org/api/bigtableadmin/v2" - "google.golang.org/api/googleapi" - - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters/utils" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/models" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tgcresource" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" - transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/verify" -) - -{{- $caiProductBaseUrl := $.CaiProductBaseUrl }} -{{- $productBackendName := $.CaiProductBackendName $caiProductBaseUrl }} - -type {{ $.ResourceName -}}Cai2hclConverter struct { - name string - schema map[string]*schema.Schema -} - -func New{{ $.ResourceName -}}Cai2hclConverter(provider *schema.Provider) models.Cai2hclConverter { - schema := provider.ResourcesMap[{{ $.ResourceName -}}SchemaName].Schema - - return &{{ $.ResourceName -}}Cai2hclConverter{ - name: {{ $.ResourceName -}}SchemaName, - schema: schema, - } -} - -// Convert converts asset to HCL resource blocks. -func (c *{{ $.ResourceName -}}Cai2hclConverter) Convert(asset caiasset.Asset) ([]*models.TerraformResourceBlock, error) { - var blocks []*models.TerraformResourceBlock - block, err := c.convertResourceData(asset) - if err != nil { - return nil, err - } - blocks = append(blocks, block) - return blocks, nil -} - -func (c *{{ $.ResourceName -}}Cai2hclConverter) convertResourceData(asset caiasset.Asset) (*models.TerraformResourceBlock, error) { - if asset.Resource == nil || asset.Resource.Data == nil { - return nil, fmt.Errorf("asset resource data is nil") - } - - var err error - res := asset.Resource.Data - config := transport.NewConfig() - d := &schema.ResourceData{} - -{{ if $.TgcHclBlockName -}} - hclBlockName := res["{{ $.TgcHclBlockName -}}"].(string) - - {{- else -}} - assetNameParts := strings.Split(asset.Name, "/") - hclBlockName := assetNameParts[len(assetNameParts)-1] -{{ end}} - - hclData := make(map[string]interface{}) - -{{ if $.CustomCode.TgcDecoder -}} - res, hclData, err = resource{{ $.ResourceName -}}TgcDecoder(d, config, res, hclData) - if err != nil { - return nil, err - } -{{ end}} - -{{ if $.CustomCode.Decoder -}} - res, err = resource{{ $.ResourceName -}}Decoder(d, config, res) - if err != nil { - return nil, err - } - - if res == nil { - // Decoding the object has resulted in it being gone. It may be marked deleted. - return nil, nil - } -{{ end}} - -{{/* Attempt to parse all self-link parameters from asset name. */}} - outputFields := {{ $.OutputFieldSetStr }} - utils.ParseUrlParamValuesFromAssetName(asset.Name, "{{ $.CaiAssetNameTemplate $productBackendName }}", outputFields, hclData) -{{ range $prop := $.ReadPropertiesForTgc }} - {{ if $prop.FlattenObject -}} - if flattenedProp := flatten{{ if $.NestedQuery -}}Nested{{end}}{{ $.ResourceName -}}{{ camelize $prop.Name "upper" -}}(res["{{ $prop.ApiName -}}"], d, config); flattenedProp != nil { - if err := tgcresource.MergeFlattenedProperties(hclData, flattenedProp); err != nil { - return nil, fmt.Errorf("error merging flattened properties from {{ $prop.Name }}: %s", err) - } - } - {{- else -}} - hclData["{{ underscore $prop.Name -}}"] = flatten{{ if $.NestedQuery -}}Nested{{end}}{{ $.ResourceName -}}{{ camelize $prop.Name "upper" -}}(res["{{ $prop.ApiName -}}"], d, config) - {{- end}} -{{- end}} - - ctyVal, err := utils.MapToCtyValWithSchema(hclData, c.schema) - if err != nil { - return nil, err - } - return &models.TerraformResourceBlock{ - Labels: []string{c.name, hclBlockName}, - Value: ctyVal, - }, nil -} - -{{ range $prop := $.ReadPropertiesForTgc }} -{{- template "flattenTgcPropertyMethod" $prop -}} -{{- end }} - -{{- if $.CustomCode.TgcDecoder }} -func resource{{ $.ResourceName -}}TgcDecoder(d *schema.ResourceData, meta interface{}, res map[string]interface{}, hclData map[string]interface{}) (map[string]interface{}, map[string]interface{}, error) { - {{ $.CustomTemplate $.CustomCode.TgcDecoder false -}} -} -{{- end }} - -{{- if $.CustomCode.Decoder }} -func resource{{ $.ResourceName -}}Decoder(d *schema.ResourceData, meta interface{}, res map[string]interface{}) (map[string]interface{}, error) { - {{ $.CustomTemplate $.CustomCode.Decoder false -}} -} -{{- end }} diff --git a/mmv1/templates/tgc_next/cai2hcl/resource_converters.go.tmpl b/mmv1/templates/tgc_next/cai2hcl/resource_converters.go.tmpl index 6338ae48945c..72acfe3809ae 100644 --- a/mmv1/templates/tgc_next/cai2hcl/resource_converters.go.tmpl +++ b/mmv1/templates/tgc_next/cai2hcl/resource_converters.go.tmpl @@ -29,33 +29,17 @@ package converters import ( "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/models" - {{- range $service := $.Products }} - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/services/{{ lower $service.Name }}" - {{- end }} - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/services/resourcemanager" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters/services/compute" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters/services/resourcemanager" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - tpg_provider "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/provider" + tpg_provider "github.com/hashicorp/terraform-provider-google-beta/google-beta/provider" ) var provider *schema.Provider = tpg_provider.Provider() // ConverterMap is a collection of converters instances, indexed by cai asset type. -var ConverterMap = map[string]map[string]models.Cai2hclConverter{ - // ####### START handwritten resources ########### - resourcemanager.ProjectAssetType: { - "Default": resourcemanager.NewProjectCai2hclConverter(provider), - }, - compute.ComputeInstanceAssetType: { - "Default": compute.NewComputeInstanceCai2hclConverter(provider), - }, - // ####### END handwritten resources ########### - - {{- range $resourceType, $resources := $.ResourcesByCaiResourceType}} - {{ $resourceType }}AssetType: { - {{- range $object := $resources }} - "{{ $object.AliasName }}": {{ $object.ServiceName }}.New{{ $object.ResourceName -}}Cai2hclConverter(provider), - {{- end }} - }, - {{- end }} +var ConverterMap = map[string]models.Converter{ + resourcemanager.ProjectAssetType: resourcemanager.NewProjectConverter(provider), + compute.ComputeInstanceAssetType: compute.NewComputeInstanceConverter(provider), } diff --git a/mmv1/templates/tgc_next/custom_expand/pubsub_subscription_attributes.go.tmpl b/mmv1/templates/tgc_next/custom_expand/pubsub_subscription_attributes.go.tmpl deleted file mode 100644 index e1d5f7ad792f..000000000000 --- a/mmv1/templates/tgc_next/custom_expand/pubsub_subscription_attributes.go.tmpl +++ /dev/null @@ -1,22 +0,0 @@ -func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - m := make(map[string]string) - if v == nil { - return m, nil - } - - vMap, ok := v.(map[string]interface{}) - if !ok { - return m, fmt.Errorf("non-map v: %v (%T)", v, v) - } - - // The default value is present in CAI asset - if len(vMap) == 0 { - m["x-goog-version"] = "v1" - return m, nil - } - - for k, val := range vMap { - m[k] = val.(string) - } - return m, nil -} diff --git a/mmv1/templates/tgc_next/custom_expand/set_to_slice_or_nil.go.tmpl b/mmv1/templates/tgc_next/custom_expand/set_to_slice_or_nil.go.tmpl deleted file mode 100644 index b081d2b53cb5..000000000000 --- a/mmv1/templates/tgc_next/custom_expand/set_to_slice_or_nil.go.tmpl +++ /dev/null @@ -1,7 +0,0 @@ -func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - vSet := v.(*schema.Set) - if vSet.Len() == 0 { - return nil, nil - } - return vSet.List(), nil -} diff --git a/mmv1/templates/tgc_next/custom_flatten/compute_backend_service_signed_url_cache_max_age_sec.go.tmpl b/mmv1/templates/tgc_next/custom_flatten/compute_backend_service_signed_url_cache_max_age_sec.go.tmpl deleted file mode 100644 index 612e2e446a02..000000000000 --- a/mmv1/templates/tgc_next/custom_flatten/compute_backend_service_signed_url_cache_max_age_sec.go.tmpl +++ /dev/null @@ -1,20 +0,0 @@ -func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - if v == nil { - return 3600 - } - - // Handles the string fixed64 format - if strVal, ok := v.(string); ok { - if intVal, err := tpgresource.StringToFixed64(strVal); err == nil { - return intVal - } - } - - // number values are represented as float64 - if floatVal, ok := v.(float64); ok { - intVal := int(floatVal) - return intVal - } - - return v // let terraform core handle it otherwise -} \ No newline at end of file diff --git a/mmv1/templates/tgc_next/decoders/backup_dr_backup_plan.go.tmpl b/mmv1/templates/tgc_next/decoders/backup_dr_backup_plan.go.tmpl deleted file mode 100644 index 2ce5d4f233e0..000000000000 --- a/mmv1/templates/tgc_next/decoders/backup_dr_backup_plan.go.tmpl +++ /dev/null @@ -1,20 +0,0 @@ -// startHourOfDay is missing in CAI, but has default value 0 in API object -if rules, ok := res["backupRules"].([]interface{}); ok { - for _, raw := range rules { - if rule, ok := raw.(map[string]interface{}); ok { - if raw, ok := rule["standardSchedule"]; ok { - if ss, ok := raw.(map[string]interface{}); ok { - if raw, ok := ss["backupWindow"]; ok { - if bw, ok := raw.(map[string]interface{}); ok { - if _, ok := ss["startHourOfDay"]; !ok { - bw["startHourOfDay"] = 0 - } - } - } - } - } - } - } -} - -return res, hclData, nil \ No newline at end of file diff --git a/mmv1/templates/tgc_next/decoders/certificatemanager_certificate.go.tmpl b/mmv1/templates/tgc_next/decoders/certificatemanager_certificate.go.tmpl deleted file mode 100644 index 2596316b67fc..000000000000 --- a/mmv1/templates/tgc_next/decoders/certificatemanager_certificate.go.tmpl +++ /dev/null @@ -1,9 +0,0 @@ -if sm, ok := res["selfManaged"].(map[string]interface{}); ok { - sm["pemCertificate"] = res["pemCertificate"] - sm["pemPrivateKey"] = "hidden" -} -if vStr, ok := res["scope"].(string); ok && vStr == "DEFAULT" { - // Omit the default value. - delete(res, "scope") -} -return res, hclData, nil diff --git a/mmv1/templates/tgc_next/decoders/cloud_asset_feed.go.tmpl b/mmv1/templates/tgc_next/decoders/cloud_asset_feed.go.tmpl deleted file mode 100644 index 69605bf73cd3..000000000000 --- a/mmv1/templates/tgc_next/decoders/cloud_asset_feed.go.tmpl +++ /dev/null @@ -1,4 +0,0 @@ -// billing_project is the required url_param_only property, but is not in CAI asset name or data -// TODO: handle it in a generic way -hclData["billing_project"] = "null" -return res, hclData, nil \ No newline at end of file diff --git a/mmv1/templates/tgc_next/decoders/compute_backend_service.go.tmpl b/mmv1/templates/tgc_next/decoders/compute_backend_service.go.tmpl deleted file mode 100644 index 74d3e6ab7ea5..000000000000 --- a/mmv1/templates/tgc_next/decoders/compute_backend_service.go.tmpl +++ /dev/null @@ -1,31 +0,0 @@ -{{/* - The license inside this block applies to this file - Copyright 2025 Google Inc. - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ -}} - -if v, ok := res["backends"]; ok { - backends := v.([]interface{}) - for _, vBackend := range backends { - backend := vBackend.(map[string]interface{}) - if vCms, ok := backend["customMetrics"]; ok { - cms := vCms.([]interface{}) - for _, vCm := range cms { - cm := vCm.(map[string]interface{}) - if vMu, ok := cm["maxUtilization"]; ok { - mu := vMu.(float64) - cm["maxUtilization"] = fmt.Sprintf("%.1f", mu) - } - } - } - } -} - -return res, hclData, nil \ No newline at end of file diff --git a/mmv1/templates/tgc_next/decoders/compute_subnetwork.go.tmpl b/mmv1/templates/tgc_next/decoders/compute_subnetwork.go.tmpl deleted file mode 100644 index b1d31f1ad3f9..000000000000 --- a/mmv1/templates/tgc_next/decoders/compute_subnetwork.go.tmpl +++ /dev/null @@ -1,23 +0,0 @@ -{{/* - The license inside this block applies to this file - Copyright 2025 Google Inc. - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ -}} -// In the GET API response, the field stackType is not present. -// In CAI asset, "stackType" has value "UNSPECIFIED_STACK_TYPE" -// So set the value to empty string in this case. -if raw, ok := res["stackType"]; ok { - v := raw.(string) - if v == "UNSPECIFIED_STACK_TYPE" { - res["stackType"] = "" - } -} - -return res, hclData, nil \ No newline at end of file diff --git a/mmv1/templates/tgc_next/encoders/certificatemanager_certificate.go.tmpl b/mmv1/templates/tgc_next/encoders/certificatemanager_certificate.go.tmpl deleted file mode 100644 index 940af8041c71..000000000000 --- a/mmv1/templates/tgc_next/encoders/certificatemanager_certificate.go.tmpl +++ /dev/null @@ -1,7 +0,0 @@ -if _, ok := d.GetOk("self_managed"); ok { - // self_managed.pem_certificate goes in root level of cai asset data. - selfManagedPemCertificateProp := d.Get("self_managed.0.pem_certificate") - obj["pemCertificate"] = selfManagedPemCertificateProp.(string) -} - -return obj, nil diff --git a/mmv1/templates/tgc_next/encoders/compute_disk.go.tmpl b/mmv1/templates/tgc_next/encoders/compute_disk.go.tmpl deleted file mode 100644 index 3936ad1725d1..000000000000 --- a/mmv1/templates/tgc_next/encoders/compute_disk.go.tmpl +++ /dev/null @@ -1,5 +0,0 @@ -config := meta.(*transport_tpg.Config) - -obj["type"] = tgcresource.GetFullUrl(config, obj["type"], "https://www.googleapis.com/compute/v1/") - -return obj, nil \ No newline at end of file diff --git a/mmv1/templates/tgc_next/provider/provider_mmv1_resources.go.tmpl b/mmv1/templates/tgc_next/provider/provider_mmv1_resources.go.tmpl deleted file mode 100644 index d6b30a5209fd..000000000000 --- a/mmv1/templates/tgc_next/provider/provider_mmv1_resources.go.tmpl +++ /dev/null @@ -1,26 +0,0 @@ -package provider - -import ( - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - - {{- range $object := $.ResourcesForVersion }} - "github.com/hashicorp/terraform-provider-google/google/services/{{ $object.ServiceName }}" - {{- end }} - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/services/resourcemanager" -) - -var handwrittenTfplan2caiResources = map[string]*schema.Resource{ - // ####### START handwritten resources ########### - "google_compute_instance": compute.ResourceComputeInstance(), - "google_project": resourcemanager.ResourceGoogleProject(), - // ####### END handwritten resources ########### -} - -// Generated resources: {{ $.ResourceCount }} -var generatedResources = map[string]*schema.Resource{ - {{- range $object := $.ResourcesForVersion }} - {{- if $object.ResourceName }} - "{{ $object.TerraformName }}": {{ $object.ServiceName }}.Resource{{ $object.ResourceName -}}(), - {{- end }} - {{- end }} -} \ No newline at end of file diff --git a/mmv1/templates/tgc_next/services/resource.go.tmpl b/mmv1/templates/tgc_next/services/resource.go.tmpl deleted file mode 100644 index 46312301b458..000000000000 --- a/mmv1/templates/tgc_next/services/resource.go.tmpl +++ /dev/null @@ -1,73 +0,0 @@ -{{/* The license inside this block applies to this file - Copyright 2025 Google LLC. All Rights Reserved. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. */ -}} -{{$.CodeHeader TemplatePath}} - -package {{ lower $.ProductMetadata.Name }} - -import ( - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" - - "github.com/apparentlymart/go-cidr/cidr" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tgcresource" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" - transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/verify" -) - -{{ if $.DefineAssetTypeForResourceInProduct -}} -const {{ $.CaiResourceType -}}AssetType string = "{{ $.CaiAssetType }}" -{{- end }} - -const {{ $.ResourceName -}}SchemaName string = "{{ $.TerraformName }}" - -{{if $.CustomCode.Constants -}} - {{- $.CustomTemplate $.CustomCode.Constants true -}} -{{- end}} - -func Resource{{ $.ResourceName -}}() *schema.Resource { - return &schema.Resource{ - Schema: map[string]*schema.Schema{ - {{- range $prop := $.OrderProperties $.AllUserProperties }} -{{template "SchemaFields" $prop -}} - {{- end }} - {{- range $prop := $.VirtualFields }} -{{template "SchemaFields" $prop -}} - {{- end }} -{{- if $.CustomCode.ExtraSchemaEntry }} - {{ $.CustomTemplate $.CustomCode.ExtraSchemaEntry false -}} -{{- end}} -{{ if $.HasProject -}} - "project": { - Type: schema.TypeString, - Optional: true, - Computed: true, - ForceNew: true, - }, -{{- end}} -{{- if $.HasSelfLink }} - "self_link": { - Type: schema.TypeString, - Computed: true, - }, -{{- end}} - }, - UseJSONNumber: true, - } -} - -{{- range $prop := $.AllUserProperties }} -{{template "SchemaSubResource" $prop}} -{{- end}} \ No newline at end of file diff --git a/mmv1/templates/tgc_next/test/test_file.go.tmpl b/mmv1/templates/tgc_next/test/test_file.go.tmpl deleted file mode 100644 index 43d14d982ea1..000000000000 --- a/mmv1/templates/tgc_next/test/test_file.go.tmpl +++ /dev/null @@ -1,44 +0,0 @@ -// ---------------------------------------------------------------------------- -// -// *** AUTO GENERATED CODE *** Type: MMv1 *** -// -// ---------------------------------------------------------------------------- -// -// This file is automatically generated by Magic Modules and manual -// changes will be clobbered when the file is regenerated. -// -// Please read more about how to change this file in -// .github/CONTRIBUTING.md. -// -// ---------------------------------------------------------------------------- - -package {{$.PackageName}}_test - -import ( - "testing" - - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/test" -) - -{{ range $e := $.TestExamples }} -func TestAcc{{ $e.TestSlug $.ProductMetadata.Name $.Name }}(t *testing.T) { - {{- if $e.TGCSkipTest }} - t.Skip("{{$e.TGCSkipTest}}") - {{- end }} - t.Parallel() - - test.BidirectionalConversion( - t, - []string{ -{{- range $field := $.TGCTestIgnorePropertiesToStrings $e }} - "{{ $field }}", -{{- end }} - }, - []string{ -{{- range $field := $e.TGCTestIgnoreInAsset }} - "{{ $field }}", -{{- end }} - }, - ) -} -{{- end }} diff --git a/mmv1/templates/tgc_next/tfplan2cai/expand_property_method_tgc.go.tmpl b/mmv1/templates/tgc_next/tfplan2cai/expand_property_method_tgc.go.tmpl deleted file mode 100644 index 3d5b82b33b21..000000000000 --- a/mmv1/templates/tgc_next/tfplan2cai/expand_property_method_tgc.go.tmpl +++ /dev/null @@ -1,21 +0,0 @@ -{{/* The license inside this block applies to this file - Copyright 2025 Google LLC. All Rights Reserved. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. */ -}} -{{- define "expandTgcPropertyMethod" }} - {{ if $.CustomTgcExpand }} -{{- $.CustomTemplate $.CustomTgcExpand false -}} - {{ else }} -{{- template "expandPropertyMethod" $ -}} - {{ end }} -{{- end }} \ No newline at end of file diff --git a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl deleted file mode 100644 index ad2153a20dca..000000000000 --- a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl +++ /dev/null @@ -1,130 +0,0 @@ -{{/* The license inside this block applies to this file - Copyright 2025 Google LLC. All Rights Reserved. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. */ -}} -{{$.CodeHeader TemplatePath}} - -package {{ lower $.ProductMetadata.Name }} - -import ( -{{/* We list all the v2 imports here and unstable imports, because we run 'goimports' to guess the correct - set of imports, which will never guess the major version correctly. */ -}} - "github.com/apparentlymart/go-cidr/cidr" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/id" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/retry" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/structure" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" - "github.com/hashicorp/terraform-plugin-sdk/v2/diag" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/logging" - "google.golang.org/api/bigtableadmin/v2" - "google.golang.org/api/googleapi" - - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/converters/cai" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tgcresource" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" - transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/verify" -) - -{{- $caiProductBaseUrl := $.CaiProductBaseUrl }} -{{- $productBackendName := $.CaiProductBackendName $caiProductBaseUrl }} -{{- $apiVersion := $.CaiApiVersion $productBackendName $caiProductBaseUrl}} - -func {{ $.ResourceName -}}Tfplan2caiConverter() cai.Tfplan2caiConverter { - return cai.Tfplan2caiConverter{ - Convert: Get{{ $.ResourceName -}}CaiAssets, - } -} - -func Get{{ $.ResourceName -}}CaiAssets(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]caiasset.Asset, error) { - name, err := cai.AssetName(d, config, "{{ $.CaiAssetNameTemplate $productBackendName }}") - if err != nil { - return []caiasset.Asset{}, err - } - if obj, err := Get{{ $.ResourceName -}}CaiObject(d, config); err == nil { - location, _ := tpgresource.GetLocation(d, config) - if location == "" { - location = "global" - } - return []caiasset.Asset{ - { - Name: name, - Type: {{ $.CaiResourceType -}}AssetType, - Resource: &caiasset.AssetResource{ - Version: "{{ $apiVersion }}", - DiscoveryDocumentURI: "https://www.googleapis.com/discovery/v1/apis/{{ $productBackendName }}/{{ $apiVersion }}/rest", - DiscoveryName: "{{ or $.ApiResourceTypeKind $.Name }}", - Data: obj, - Location: location, - }, - }, - }, nil - } else { - return []caiasset.Asset{}, err - } -} - -func Get{{ $.ResourceName -}}CaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (map[string]interface{}, error) { - obj := make(map[string]interface{}) -{{- range $prop := $.SettableProperties }} -{{- if $prop.FlattenObject }} - {{ $prop.CamelizeProperty -}}Prop, err := expand{{ $.ResourceName -}}{{$prop.TitlelizeProperty}}(nil, d, config) -{{- else }} - {{ $prop.CamelizeProperty -}}Prop, err := expand{{ $.ResourceName -}}{{$prop.TitlelizeProperty}}(d.Get("{{underscore $prop.Name}}"), d, config) -{{- end}} - if err != nil { - return nil, err -{{- if and (not $prop.SendEmptyValue) (not $prop.TGCSendEmptyValue) }} - } else if v, ok := d.GetOkExists("{{underscore $prop.Name}}"); !tpgresource.IsEmptyValue(reflect.ValueOf({{ $prop.CamelizeProperty -}}Prop)) && (ok || !reflect.DeepEqual(v, {{ $prop.CamelizeProperty -}}Prop)) { -{{- else }} - } else if v, ok := d.GetOkExists("{{underscore $prop.Name}}"); ok || !reflect.DeepEqual(v, {{ $prop.CamelizeProperty -}}Prop) { -{{- end }} - obj["{{ $prop.ApiName -}}"] = {{ $prop.CamelizeProperty -}}Prop - } -{{- end}} - -{{ if and $.CustomCode.Encoder (not $.TGCIgnoreTerraformEncoder) -}} - obj, err = resource{{ $.ResourceName -}}Encoder(d, config, obj) - if err != nil { - return nil, err - } -{{- end }} - -{{- if $.CustomCode.TgcEncoder }} - return resource{{ $.ResourceName -}}TgcEncoder(d, config, obj) -{{- else }} - return obj, nil -{{- end}} -} - -{{if and $.CustomCode.Encoder (not $.TGCIgnoreTerraformEncoder) -}} -func resource{{ $.ResourceName -}}Encoder(d tpgresource.TerraformResourceData, meta interface{}, obj map[string]interface{}) (map[string]interface{}, error) { -{{ $.CustomTemplate $.CustomCode.Encoder false -}} -} -{{- end}} - -{{if $.CustomCode.TgcEncoder -}} -func resource{{ $.ResourceName -}}TgcEncoder(d tpgresource.TerraformResourceData, meta interface{}, obj map[string]interface{}) (map[string]interface{}, error) { -{{ $.CustomTemplate $.CustomCode.TgcEncoder false -}} -} -{{- end}} - -{{ range $prop := $.SettableProperties }} - {{- template "expandTgcPropertyMethod" $prop -}} -{{- end}} \ No newline at end of file diff --git a/mmv1/templates/tgc_next/tfplan2cai/resource_converters.go.tmpl b/mmv1/templates/tgc_next/tfplan2cai/resource_converters.go.tmpl index 9778eeda65d9..5c4ef3a46ad5 100644 --- a/mmv1/templates/tgc_next/tfplan2cai/resource_converters.go.tmpl +++ b/mmv1/templates/tgc_next/tfplan2cai/resource_converters.go.tmpl @@ -29,21 +29,11 @@ package converters import ( "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/converters/cai" - {{- range $service := $.Products }} - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/services/{{ lower $service.Name }}" - {{- end }} - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/services/resourcemanager" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/converters/services/compute" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/converters/services/resourcemanager" ) -var ConverterMap = map[string]cai.Tfplan2caiConverter{ - // ####### START handwritten resources ########### - "google_project": resourcemanager.ProjectTfplan2caiConverter(), - "google_compute_instance": compute.ComputeInstanceTfplan2caiConverter(), - // ####### END handwritten resources ########### - - {{- range $object := $.ResourcesForVersion }} - {{- if $object.ResourceName }} - "{{ $object.TerraformName }}": {{ $object.ServiceName }}.{{ $object.ResourceName -}}Tfplan2caiConverter(), - {{- end }} - {{- end }} +var ConverterMap = map[string]cai.ResourceConverter{ + "google_project": resourcemanager.ResourceConverterProject(), + "google_compute_instance": compute.ResourceConverterComputeInstance(), } \ No newline at end of file diff --git a/mmv1/third_party/cai2hcl/convert_test.go b/mmv1/third_party/cai2hcl/convert_test.go index 0d8fc7bac6bb..fa2e160c6696 100644 --- a/mmv1/third_party/cai2hcl/convert_test.go +++ b/mmv1/third_party/cai2hcl/convert_test.go @@ -1,8 +1,9 @@ package cai2hcl_test import ( - cai2hclTesting "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/cai2hcl/testing" "testing" + + cai2hclTesting "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/cai2hcl/testing" ) func TestConvertCompute(t *testing.T) { @@ -22,12 +23,3 @@ func TestConvertResourcemanager(t *testing.T) { "project_create", }) } - -func TestConvertNetworksecurity(t *testing.T) { - cai2hclTesting.AssertTestFiles( - t, - "./services/networksecurity/testdata", - []string{ - "server_tls_policy", - }) -} diff --git a/mmv1/third_party/cai2hcl/converter_map.go b/mmv1/third_party/cai2hcl/converter_map.go index 81f909ff4e24..65ad92505446 100644 --- a/mmv1/third_party/cai2hcl/converter_map.go +++ b/mmv1/third_party/cai2hcl/converter_map.go @@ -3,7 +3,6 @@ package cai2hcl import ( "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/cai2hcl/common" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/cai2hcl/services/compute" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/cai2hcl/services/networksecurity" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/cai2hcl/services/resourcemanager" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" tpg_provider "github.com/hashicorp/terraform-provider-google-beta/google-beta/provider" @@ -23,8 +22,6 @@ var AssetTypeToConverter = map[string]string{ resourcemanager.ProjectAssetType: "google_project", resourcemanager.ProjectBillingAssetType: "google_project", - - networksecurity.ServerTLSPolicyAssetType: "google_network_security_server_tls_policy", } // ConverterMap is a collection of converters instances, indexed by name. @@ -38,6 +35,4 @@ var ConverterMap = map[string]common.Converter{ "google_compute_region_health_check": compute.NewComputeRegionHealthCheckConverter(provider), "google_project": resourcemanager.NewProjectConverter(provider), - - "google_network_security_server_tls_policy": networksecurity.NewServerTLSPolicyConverter(provider), } diff --git a/mmv1/third_party/cai2hcl/services/compute/compute_forwarding_rule.go b/mmv1/third_party/cai2hcl/services/compute/compute_forwarding_rule.go index 674f633f6203..ff66c9e2292a 100644 --- a/mmv1/third_party/cai2hcl/services/compute/compute_forwarding_rule.go +++ b/mmv1/third_party/cai2hcl/services/compute/compute_forwarding_rule.go @@ -307,5 +307,5 @@ func flattenComputeForwardingRuleRegion(v interface{}, d *schema.ResourceData, c if v == nil { return v } - return tpgresource.GetResourceNameFromSelfLink(v.(string)) + return tpgresource.NameFromSelfLinkStateFunc(v) } diff --git a/mmv1/third_party/cai2hcl/services/compute/compute_region_backend_service.go b/mmv1/third_party/cai2hcl/services/compute/compute_region_backend_service.go index 7e6a79dd03de..56bcf640ccad 100644 --- a/mmv1/third_party/cai2hcl/services/compute/compute_region_backend_service.go +++ b/mmv1/third_party/cai2hcl/services/compute/compute_region_backend_service.go @@ -1394,5 +1394,5 @@ func flattenComputeRegionBackendServiceRegion(v interface{}, d *schema.ResourceD if v == nil { return v } - return tpgresource.GetResourceNameFromSelfLink(v.(string)) + return tpgresource.NameFromSelfLinkStateFunc(v) } diff --git a/mmv1/third_party/cai2hcl/services/compute/compute_region_health_check.go b/mmv1/third_party/cai2hcl/services/compute/compute_region_health_check.go index ea66d50a8018..f816ccf3df65 100644 --- a/mmv1/third_party/cai2hcl/services/compute/compute_region_health_check.go +++ b/mmv1/third_party/cai2hcl/services/compute/compute_region_health_check.go @@ -564,5 +564,5 @@ func flattenComputeRegionHealthCheckRegion(v interface{}, d *schema.ResourceData if v == nil { return v } - return tpgresource.GetResourceNameFromSelfLink(v.(string)) + return tpgresource.NameFromSelfLinkStateFunc(v) } diff --git a/mmv1/third_party/cai2hcl/services/networksecurity/server_tls_policy.go b/mmv1/third_party/cai2hcl/services/networksecurity/server_tls_policy.go deleted file mode 100644 index 23b476b006f4..000000000000 --- a/mmv1/third_party/cai2hcl/services/networksecurity/server_tls_policy.go +++ /dev/null @@ -1,173 +0,0 @@ -package networksecurity - -import ( - "errors" - "fmt" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/cai2hcl/common" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/caiasset" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - netsecapi "google.golang.org/api/networksecurity/v1" - "strings" -) - -// ServerTLSPolicyAssetType is the CAI asset type name. -const ServerTLSPolicyAssetType string = "networksecurity.googleapis.com/ServerTlsPolicy" - -// ServerTLSPolicySchemaName is the TF resource schema name. -const ServerTLSPolicySchemaName string = "google_network_security_server_tls_policy" - -// ServerTLSPolicyConverter for networksecurity server tls policy resource. -type ServerTLSPolicyConverter struct { - name string - schema map[string]*schema.Schema -} - -// NewServerTLSPolicyConverter returns an HCL converter. -func NewServerTLSPolicyConverter(provider *schema.Provider) common.Converter { - schema := provider.ResourcesMap[ServerTLSPolicySchemaName].Schema - - return &ServerTLSPolicyConverter{ - name: ServerTLSPolicySchemaName, - schema: schema, - } -} - -// Convert converts CAI assets to HCL resource blocks (Provider version: 6.45.0) -func (c *ServerTLSPolicyConverter) Convert(assets []*caiasset.Asset) ([]*common.HCLResourceBlock, error) { - var blocks []*common.HCLResourceBlock - var err error - - for _, asset := range assets { - if asset == nil { - continue - } else if asset.Resource == nil || asset.Resource.Data == nil { - return nil, fmt.Errorf("INVALID_ARGUMENT: Asset resource data is nil") - } else if asset.Type != ServerTLSPolicyAssetType { - return nil, fmt.Errorf("INVALID_ARGUMENT: Expected asset of type %s, but received %s", ServerTLSPolicyAssetType, asset.Type) - } - block, errConvert := c.convertResourceData(asset) - blocks = append(blocks, block) - if errConvert != nil { - err = errors.Join(err, errConvert) - } - } - return blocks, err -} - -func (c *ServerTLSPolicyConverter) convertResourceData(asset *caiasset.Asset) (*common.HCLResourceBlock, error) { - if asset == nil || asset.Resource == nil || asset.Resource.Data == nil { - return nil, fmt.Errorf("INVALID_ARGUMENT: Asset resource data is nil") - } - - hcl, _ := flattenServerTLSPolicy(asset.Resource) - - ctyVal, err := common.MapToCtyValWithSchema(hcl, c.schema) - if err != nil { - return nil, err - } - - resourceName := hcl["name"].(string) - return &common.HCLResourceBlock{ - Labels: []string{c.name, resourceName}, - Value: ctyVal, - }, nil -} - -func flattenServerTLSPolicy(resource *caiasset.AssetResource) (map[string]any, error) { - result := make(map[string]any) - - var serverTLSPolicy *netsecapi.ServerTlsPolicy - if err := common.DecodeJSON(resource.Data, &serverTLSPolicy); err != nil { - return nil, err - } - - result["name"] = flattenName(serverTLSPolicy.Name) - result["labels"] = serverTLSPolicy.Labels - result["description"] = serverTLSPolicy.Description - result["allow_open"] = serverTLSPolicy.AllowOpen - result["server_certificate"] = flattenServerCertificate(serverTLSPolicy.ServerCertificate) - result["mtls_policy"] = flattenMTLSPolicy(serverTLSPolicy.MtlsPolicy) - result["project"] = flattenProjectName(serverTLSPolicy.Name) - - result["location"] = resource.Location - - return result, nil -} - -func flattenName(name string) string { - tokens := strings.Split(name, "/") - return tokens[len(tokens)-1] -} - -func flattenServerCertificate(certificate *netsecapi.GoogleCloudNetworksecurityV1CertificateProvider) []map[string]any { - if certificate == nil { - return nil - } - - result := make(map[string]any) - result["certificate_provider_instance"] = flattenCertificateProviderInstance(certificate.CertificateProviderInstance) - result["grpc_endpoint"] = flattenGrpcEndpoint(certificate.GrpcEndpoint) - - return []map[string]any{result} -} - -func flattenMTLSPolicy(policy *netsecapi.MTLSPolicy) []map[string]any { - if policy == nil { - return nil - } - - result := make(map[string]any) - result["client_validation_mode"] = policy.ClientValidationMode - result["client_validation_trust_config"] = policy.ClientValidationTrustConfig - result["client_validation_ca"] = flattenClientValidationCA(policy.ClientValidationCa) - - return []map[string]any{result} -} - -func flattenCertificateProviderInstance(instance *netsecapi.CertificateProviderInstance) []map[string]any { - if instance == nil { - return nil - } - - result := make(map[string]any) - result["plugin_instance"] = instance.PluginInstance - - return []map[string]any{result} -} - -func flattenGrpcEndpoint(endpoint *netsecapi.GoogleCloudNetworksecurityV1GrpcEndpoint) []map[string]any { - if endpoint == nil { - return nil - } - - result := make(map[string]any) - result["target_uri"] = endpoint.TargetUri - - return []map[string]any{result} -} - -func flattenClientValidationCA(cas []*netsecapi.ValidationCA) []map[string]any { - if cas == nil { - return nil - } - - result := make([]map[string]any, 0, len(cas)) - - for _, ca := range cas { - converted := map[string]any{ - "certificate_provider_instance": flattenCertificateProviderInstance(ca.CertificateProviderInstance), - "grpc_endpoint": flattenGrpcEndpoint(ca.GrpcEndpoint), - } - result = append(result, converted) - } - - return result -} - -func flattenProjectName(name string) string { - tokens := strings.Split(name, "/") - if len(tokens) < 2 || tokens[0] != "projects" { - return "" - } - return tokens[1] -} diff --git a/mmv1/third_party/cai2hcl/services/networksecurity/server_tls_policy_test.go b/mmv1/third_party/cai2hcl/services/networksecurity/server_tls_policy_test.go deleted file mode 100644 index 9fc4b867c58c..000000000000 --- a/mmv1/third_party/cai2hcl/services/networksecurity/server_tls_policy_test.go +++ /dev/null @@ -1,13 +0,0 @@ -package networksecurity_test - -import ( - cai2hcl_testing "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/cai2hcl/testing" - "testing" -) - -func TestServerTlsPolicy(t *testing.T) { - cai2hcl_testing.AssertTestFiles( - t, - "./testdata", - []string{"server_tls_policy"}) -} diff --git a/mmv1/third_party/cai2hcl/services/networksecurity/testdata/server_tls_policy.json b/mmv1/third_party/cai2hcl/services/networksecurity/testdata/server_tls_policy.json deleted file mode 100644 index 085ae9d032a8..000000000000 --- a/mmv1/third_party/cai2hcl/services/networksecurity/testdata/server_tls_policy.json +++ /dev/null @@ -1,361 +0,0 @@ -[ - { - "ancestors": [ - "projects/307841421122", - "folders/1004165107538", - "folders/422052295010", - "folders/23774682723", - "folders/134336129404", - "folders/376645683816", - "organizations/433637338589" - ], - "asset_type": "networksecurity.googleapis.com/ServerTlsPolicy", - "name": "//networksecurity.googleapis.com/projects/ccm-breakit/locations/global/serverTlsPolicies/lb_mtls_policy", - "resource": { - "data": { - "createTime": "2025-07-29T16:00:11.184079186Z", - "description": "my description", - "labels": { - "foo": "bar" - }, - "mtlsPolicy": { - "clientValidationMode": "REJECT_INVALID", - "clientValidationTrustConfig": "projects/307841421122/locations/global/trustConfigs/id-4adf7779-1e9f-4124-9438-652c80886074" - }, - "name": "projects/ccm-breakit/locations/global/serverTlsPolicies/lb_mtls_policy", - "updateTime": "2025-07-29T16:00:15.415731403Z" - }, - "discovery_document_uri": "https://networksecurity.googleapis.com/$discovery/rest", - "discovery_name": "ServerTlsPolicy", - "location": "global", - "parent": "//cloudresourcemanager.googleapis.com/projects/307841421122", - "version": "v1" - }, - "updateTime": "2025-07-29T18:00:00Z" - }, - { - "ancestors": [ - "projects/307841421122", - "folders/1004165107538", - "folders/422052295010", - "folders/23774682723", - "folders/134336129404", - "folders/376645683816", - "organizations/433637338589" - ], - "asset_type": "networksecurity.googleapis.com/ServerTlsPolicy", - "name": "//networksecurity.googleapis.com/projects/ccm-breakit/locations/global/serverTlsPolicies/td_mtls_policy", - "resource": { - "data": { - "createTime": "2025-07-29T16:00:12.082558809Z", - "description": "my description", - "labels": { - "foo": "bar" - }, - "mtlsPolicy": { - "clientValidationCa": [ - { - "certificateProviderInstance": { - "pluginInstance": "google_cloud_private_spiffe" - } - } - ] - }, - "name": "projects/ccm-breakit/locations/global/serverTlsPolicies/td_mtls_policy", - "serverCertificate": { - "certificateProviderInstance": { - "pluginInstance": "google_cloud_private_spiffe" - } - }, - "updateTime": "2025-07-29T16:00:15.692522561Z" - }, - "discovery_document_uri": "https://networksecurity.googleapis.com/$discovery/rest", - "discovery_name": "ServerTlsPolicy", - "location": "global", - "parent": "//cloudresourcemanager.googleapis.com/projects/307841421122", - "version": "v1" - }, - "updateTime": "2025-07-29T18:00:00Z" - }, - { - "ancestors": [ - "projects/307841421122", - "folders/1004165107538", - "folders/422052295010", - "folders/23774682723", - "folders/134336129404", - "folders/376645683816", - "organizations/433637338589" - ], - "asset_type": "networksecurity.googleapis.com/ServerTlsPolicy", - "name": "//networksecurity.googleapis.com/projects/ccm-breakit/locations/global/serverTlsPolicies/td_with_server_cert_policy", - "resource": { - "data": { - "createTime": "2025-07-29T16:00:12.040588118Z", - "description": "my description", - "name": "projects/ccm-breakit/locations/global/serverTlsPolicies/td_with_server_cert_policy", - "serverCertificate": { - "grpcEndpoint": { - "targetUri": "unix:mypath" - } - }, - "updateTime": "2025-07-29T16:00:15.680321984Z" - }, - "discovery_document_uri": "https://networksecurity.googleapis.com/$discovery/rest", - "discovery_name": "ServerTlsPolicy", - "location": "global", - "parent": "//cloudresourcemanager.googleapis.com/projects/307841421122", - "version": "v1" - }, - "updateTime": "2025-07-29T18:00:00Z" - }, - { - "ancestors": [ - "projects/307841421122", - "folders/1004165107538", - "folders/422052295010", - "folders/23774682723", - "folders/134336129404", - "folders/376645683816", - "organizations/433637338589" - ], - "asset_type": "networksecurity.googleapis.com/ServerTlsPolicy", - "name": "//networksecurity.googleapis.com/projects/ccm-breakit/locations/global/serverTlsPolicies/empty_description_policy", - "resource": { - "data": { - "createTime": "2025-07-29T16:00:11.660089355Z", - "labels": { - "foo": "bar" - }, - "mtlsPolicy": { - "clientValidationMode": "REJECT_INVALID", - "clientValidationTrustConfig": "projects/307841421122/locations/global/trustConfigs/id-4adf7779-1e9f-4124-9438-652c80886074" - }, - "name": "projects/ccm-breakit/locations/global/serverTlsPolicies/empty_description_policy", - "updateTime": "2025-07-29T16:00:16.847799545Z" - }, - "discovery_document_uri": "https://networksecurity.googleapis.com/$discovery/rest", - "discovery_name": "ServerTlsPolicy", - "location": "global", - "parent": "//cloudresourcemanager.googleapis.com/projects/307841421122", - "version": "v1" - }, - "updateTime": "2025-07-29T18:00:00Z" - }, - { - "ancestors": [ - "projects/307841421122", - "folders/1004165107538", - "folders/422052295010", - "folders/23774682723", - "folders/134336129404", - "folders/376645683816", - "organizations/433637338589" - ], - "asset_type": "networksecurity.googleapis.com/ServerTlsPolicy", - "name": "//networksecurity.googleapis.com/projects/ccm-breakit/locations/global/serverTlsPolicies/empty_labels_policy", - "resource": { - "data": { - "createTime": "2025-07-29T16:00:12.040240475Z", - "description": "my description", - "mtlsPolicy": { - "clientValidationMode": "REJECT_INVALID", - "clientValidationTrustConfig": "projects/307841421122/locations/global/trustConfigs/id-4adf7779-1e9f-4124-9438-652c80886074" - }, - "name": "projects/ccm-breakit/locations/global/serverTlsPolicies/empty_labels_policy", - "updateTime": "2025-07-29T16:00:16.309813819Z" - }, - "discovery_document_uri": "https://networksecurity.googleapis.com/$discovery/rest", - "discovery_name": "ServerTlsPolicy", - "location": "global", - "parent": "//cloudresourcemanager.googleapis.com/projects/307841421122", - "version": "v1" - }, - "updateTime": "2025-07-29T18:00:00Z" - }, - { - "ancestors": [ - "projects/307841421122", - "folders/1004165107538", - "folders/422052295010", - "folders/23774682723", - "folders/134336129404", - "folders/376645683816", - "organizations/433637338589" - ], - "asset_type": "networksecurity.googleapis.com/ServerTlsPolicy", - "name": "//networksecurity.googleapis.com/projects/ccm-breakit/locations/us-central1/serverTlsPolicies/regional_location_policy", - "resource": { - "data": { - "createTime": "2025-07-29T16:00:12.162242768Z", - "description": "my description", - "labels": { - "foo": "bar" - }, - "mtlsPolicy": { - "clientValidationMode": "REJECT_INVALID", - "clientValidationTrustConfig": "projects/307841421122/locations/us-central1/trustConfigs/tsmx-20250609-tc1" - }, - "name": "projects/ccm-breakit/locations/us-central1/serverTlsPolicies/regional_location_policy", - "updateTime": "2025-07-29T16:00:15.08724113Z" - }, - "discovery_document_uri": "https://networksecurity.googleapis.com/$discovery/rest", - "discovery_name": "ServerTlsPolicy", - "location": "us-central1", - "parent": "//cloudresourcemanager.googleapis.com/projects/307841421122", - "version": "v1" - }, - "updateTime": "2025-07-29T18:00:00Z" - }, - { - "ancestors": [ - "projects/307841421122", - "folders/1004165107538", - "folders/422052295010", - "folders/23774682723", - "folders/134336129404", - "folders/376645683816", - "organizations/433637338589" - ], - "asset_type": "networksecurity.googleapis.com/ServerTlsPolicy", - "name": "//networksecurity.googleapis.com/projects/ccm-breakit/locations/global/serverTlsPolicies/lb_mtls_allow_invalid_cert_policy", - "resource": { - "data": { - "createTime": "2025-07-29T16:00:12.078450339Z", - "description": "my description", - "labels": { - "foo": "bar" - }, - "mtlsPolicy": { - "clientValidationMode": "ALLOW_INVALID_OR_MISSING_CLIENT_CERT" - }, - "name": "projects/ccm-breakit/locations/global/serverTlsPolicies/lb_mtls_allow_invalid_cert_policy", - "updateTime": "2025-07-29T16:00:16.300643457Z" - }, - "discovery_document_uri": "https://networksecurity.googleapis.com/$discovery/rest", - "discovery_name": "ServerTlsPolicy", - "location": "global", - "parent": "//cloudresourcemanager.googleapis.com/projects/307841421122", - "version": "v1" - }, - "updateTime": "2025-07-29T18:00:00Z" - }, - { - "ancestors": [ - "projects/307841421122", - "folders/1004165107538", - "folders/422052295010", - "folders/23774682723", - "folders/134336129404", - "folders/376645683816", - "organizations/433637338589" - ], - "asset_type": "networksecurity.googleapis.com/ServerTlsPolicy", - "name": "//networksecurity.googleapis.com/projects/ccm-breakit/locations/global/serverTlsPolicies/td_allow_open_policy", - "resource": { - "data": { - "allowOpen": true, - "createTime": "2025-07-29T16:00:11.930403186Z", - "description": "my description", - "mtlsPolicy": { - "clientValidationCa": [ - { - "certificateProviderInstance": { - "pluginInstance": "google_cloud_private_spiffe" - } - } - ] - }, - "name": "projects/ccm-breakit/locations/global/serverTlsPolicies/td_allow_open_policy", - "serverCertificate": { - "grpcEndpoint": { - "targetUri": "unix:mypath" - } - }, - "updateTime": "2025-07-29T16:00:15.644106332Z" - }, - "discovery_document_uri": "https://networksecurity.googleapis.com/$discovery/rest", - "discovery_name": "ServerTlsPolicy", - "location": "global", - "parent": "//cloudresourcemanager.googleapis.com/projects/307841421122", - "version": "v1" - }, - "updateTime": "2025-07-29T18:00:00Z" - }, - { - "ancestors": [ - "projects/307841421122", - "folders/1004165107538", - "folders/422052295010", - "folders/23774682723", - "folders/134336129404", - "folders/376645683816", - "organizations/433637338589" - ], - "asset_type": "networksecurity.googleapis.com/ServerTlsPolicy", - "name": "//networksecurity.googleapis.com/projects/ccm-breakit/locations/global/serverTlsPolicies/td_with_cert_provider_policy", - "resource": { - "data": { - "createTime": "2025-07-29T16:00:12.122393281Z", - "description": "my description", - "name": "projects/ccm-breakit/locations/global/serverTlsPolicies/td_with_cert_provider_policy", - "serverCertificate": { - "certificateProviderInstance": { - "pluginInstance": "google_cloud_private_spiffe" - } - }, - "updateTime": "2025-07-29T16:00:15.720820072Z" - }, - "discovery_document_uri": "https://networksecurity.googleapis.com/$discovery/rest", - "discovery_name": "ServerTlsPolicy", - "location": "global", - "parent": "//cloudresourcemanager.googleapis.com/projects/307841421122", - "version": "v1" - }, - "updateTime": "2025-07-29T18:00:00Z" - }, - { - "ancestors": [ - "projects/307841421122", - "folders/1004165107538", - "folders/422052295010", - "folders/23774682723", - "folders/134336129404", - "folders/376645683816", - "organizations/433637338589" - ], - "asset_type": "networksecurity.googleapis.com/ServerTlsPolicy", - "name": "//networksecurity.googleapis.com/projects/ccm-breakit/locations/global/serverTlsPolicies/td_mtls_client_validation_grpc_policy", - "resource": { - "data": { - "createTime": "2025-07-29T16:00:12.000713965Z", - "description": "my description", - "labels": { - "foo": "bar" - }, - "mtlsPolicy": { - "clientValidationCa": [ - { - "grpcEndpoint": { - "targetUri": "unix:mypath" - } - } - ] - }, - "name": "projects/ccm-breakit/locations/global/serverTlsPolicies/td_mtls_client_validation_grpc_policy", - "serverCertificate": { - "certificateProviderInstance": { - "pluginInstance": "google_cloud_private_spiffe" - } - }, - "updateTime": "2025-07-29T16:00:15.701713898Z" - }, - "discovery_document_uri": "https://networksecurity.googleapis.com/$discovery/rest", - "discovery_name": "ServerTlsPolicy", - "location": "global", - "parent": "//cloudresourcemanager.googleapis.com/projects/307841421122", - "version": "v1" - }, - "updateTime": "2025-07-29T18:00:00Z" - } -] \ No newline at end of file diff --git a/mmv1/third_party/cai2hcl/services/networksecurity/testdata/server_tls_policy.tf b/mmv1/third_party/cai2hcl/services/networksecurity/testdata/server_tls_policy.tf deleted file mode 100644 index f9cadcf99e2e..000000000000 --- a/mmv1/third_party/cai2hcl/services/networksecurity/testdata/server_tls_policy.tf +++ /dev/null @@ -1,194 +0,0 @@ -resource "google_network_security_server_tls_policy" "lb_mtls_policy" { - allow_open = false - description = "my description" - - labels = { - foo = "bar" - } - - location = "global" - - mtls_policy { - client_validation_mode = "REJECT_INVALID" - client_validation_trust_config = "projects/307841421122/locations/global/trustConfigs/id-4adf7779-1e9f-4124-9438-652c80886074" - } - - name = "lb_mtls_policy" - project = "ccm-breakit" -} - -resource "google_network_security_server_tls_policy" "td_mtls_policy" { - allow_open = false - description = "my description" - - labels = { - foo = "bar" - } - - location = "global" - - mtls_policy { - client_validation_ca { - certificate_provider_instance { - plugin_instance = "google_cloud_private_spiffe" - } - } - } - - name = "td_mtls_policy" - project = "ccm-breakit" - - server_certificate { - certificate_provider_instance { - plugin_instance = "google_cloud_private_spiffe" - } - } -} - -resource "google_network_security_server_tls_policy" "td_with_server_cert_policy" { - allow_open = false - description = "my description" - location = "global" - name = "td_with_server_cert_policy" - project = "ccm-breakit" - - server_certificate { - grpc_endpoint { - target_uri = "unix:mypath" - } - } -} - -resource "google_network_security_server_tls_policy" "empty_description_policy" { - allow_open = false - - labels = { - foo = "bar" - } - - location = "global" - - mtls_policy { - client_validation_mode = "REJECT_INVALID" - client_validation_trust_config = "projects/307841421122/locations/global/trustConfigs/id-4adf7779-1e9f-4124-9438-652c80886074" - } - - name = "empty_description_policy" - project = "ccm-breakit" -} - -resource "google_network_security_server_tls_policy" "empty_labels_policy" { - allow_open = false - description = "my description" - location = "global" - - mtls_policy { - client_validation_mode = "REJECT_INVALID" - client_validation_trust_config = "projects/307841421122/locations/global/trustConfigs/id-4adf7779-1e9f-4124-9438-652c80886074" - } - - name = "empty_labels_policy" - project = "ccm-breakit" -} - -resource "google_network_security_server_tls_policy" "regional_location_policy" { - allow_open = false - description = "my description" - - labels = { - foo = "bar" - } - - location = "us-central1" - - mtls_policy { - client_validation_mode = "REJECT_INVALID" - client_validation_trust_config = "projects/307841421122/locations/us-central1/trustConfigs/tsmx-20250609-tc1" - } - - name = "regional_location_policy" - project = "ccm-breakit" -} - -resource "google_network_security_server_tls_policy" "lb_mtls_allow_invalid_cert_policy" { - allow_open = false - description = "my description" - - labels = { - foo = "bar" - } - - location = "global" - - mtls_policy { - client_validation_mode = "ALLOW_INVALID_OR_MISSING_CLIENT_CERT" - } - - name = "lb_mtls_allow_invalid_cert_policy" - project = "ccm-breakit" -} - -resource "google_network_security_server_tls_policy" "td_allow_open_policy" { - allow_open = true - description = "my description" - location = "global" - - mtls_policy { - client_validation_ca { - certificate_provider_instance { - plugin_instance = "google_cloud_private_spiffe" - } - } - } - - name = "td_allow_open_policy" - project = "ccm-breakit" - - server_certificate { - grpc_endpoint { - target_uri = "unix:mypath" - } - } -} - -resource "google_network_security_server_tls_policy" "td_with_cert_provider_policy" { - allow_open = false - description = "my description" - location = "global" - name = "td_with_cert_provider_policy" - project = "ccm-breakit" - - server_certificate { - certificate_provider_instance { - plugin_instance = "google_cloud_private_spiffe" - } - } -} - -resource "google_network_security_server_tls_policy" "td_mtls_client_validation_grpc_policy" { - allow_open = false - description = "my description" - - labels = { - foo = "bar" - } - - location = "global" - - mtls_policy { - client_validation_ca { - grpc_endpoint { - target_uri = "unix:mypath" - } - } - } - - name = "td_mtls_client_validation_grpc_policy" - project = "ccm-breakit" - - server_certificate { - certificate_provider_instance { - plugin_instance = "google_cloud_private_spiffe" - } - } -} diff --git a/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_per_package.kt b/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_per_package.kt index 8346d819818a..703181eb9e90 100644 --- a/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_per_package.kt +++ b/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_per_package.kt @@ -20,7 +20,7 @@ import replaceCharsId // BuildConfigurationsForPackages accepts a map containing details of multiple packages in a provider and returns a list of build configurations for them all. // Intended to be used in projects where we're testing all packages, e.g. the nightly test projects -fun BuildConfigurationsForPackages(packages: Map>, providerName: String, parentProjectName: String, vcsRoot: GitVcsRoot, sharedResources: List, environmentVariables: AccTestConfiguration, testPrefix: String = "TestAcc", releaseDiffTest: String = "false"): List { +fun BuildConfigurationsForPackages(packages: Map>, providerName: String, parentProjectName: String, vcsRoot: GitVcsRoot, sharedResources: List, environmentVariables: AccTestConfiguration, testPrefix: String = "TestAcc"): List { val list = ArrayList() // Create build configurations for all packages, except sweeper @@ -28,8 +28,8 @@ fun BuildConfigurationsForPackages(packages: Map>, p val path: String = info.getValue("path").toString() val displayName: String = info.getValue("displayName").toString() - val pkg = PackageDetails(packageName, displayName, providerName, parentProjectName, releaseDiffTest) - val buildConfig = pkg.buildConfiguration(path, vcsRoot, sharedResources, environmentVariables, testPrefix = testPrefix, releaseDiffTest = releaseDiffTest) + val pkg = PackageDetails(packageName, displayName, providerName, parentProjectName) + val buildConfig = pkg.buildConfiguration(path, vcsRoot, sharedResources, environmentVariables, testPrefix = testPrefix) list.add(buildConfig) } @@ -38,16 +38,16 @@ fun BuildConfigurationsForPackages(packages: Map>, p // BuildConfigurationForSinglePackage accepts details of a single package in a provider and returns a build configuration for it // Intended to be used in short-lived projects where we're testing specific packages, e.g. feature branch testing -fun BuildConfigurationForSinglePackage(packageName: String, packagePath: String, packageDisplayName: String, providerName: String, parentProjectName: String, vcsRoot: GitVcsRoot, sharedResources: List, environmentVariables: AccTestConfiguration, testPrefix: String = "TestAcc", releaseDiffTest: String = "false"): BuildType{ - val pkg = PackageDetails(packageName, packageDisplayName, providerName, parentProjectName, releaseDiffTest = releaseDiffTest) - return pkg.buildConfiguration(packagePath, vcsRoot, sharedResources, environmentVariables, testPrefix = testPrefix, releaseDiffTest = releaseDiffTest) +fun BuildConfigurationForSinglePackage(packageName: String, packagePath: String, packageDisplayName: String, providerName: String, parentProjectName: String, vcsRoot: GitVcsRoot, sharedResources: List, environmentVariables: AccTestConfiguration, testPrefix: String = "TestAcc"): BuildType{ + val pkg = PackageDetails(packageName, packageDisplayName, providerName, parentProjectName) + return pkg.buildConfiguration(packagePath, vcsRoot, sharedResources, environmentVariables, testPrefix = testPrefix) } -class PackageDetails(private val packageName: String, private val displayName: String, private val providerName: String, private val parentProjectName: String, private val releaseDiffTest: String) { +class PackageDetails(private val packageName: String, private val displayName: String, private val providerName: String, private val parentProjectName: String) { // buildConfiguration returns a BuildType for a service package // For BuildType docs, see https://teamcity.jetbrains.com/app/dsl-documentation/root/build-type/index.html - fun buildConfiguration(path: String, vcsRoot: GitVcsRoot, sharedResources: List, environmentVariables: AccTestConfiguration, buildTimeout: Int = DefaultBuildTimeoutDuration, testPrefix: String, releaseDiffTest: String): BuildType { + fun buildConfiguration(path: String, vcsRoot: GitVcsRoot, sharedResources: List, environmentVariables: AccTestConfiguration, buildTimeout: Int = DefaultBuildTimeoutDuration, testPrefix: String): BuildType { val testPrefix = "TestAcc" val testTimeout = "12" @@ -72,11 +72,7 @@ class PackageDetails(private val packageName: String, private val displayName: S tagBuildToIndicateTriggerMethod() configureGoEnv() downloadTerraformBinary() - if (releaseDiffTest.toBoolean()) { - runDiffTests() - } else { - runAcceptanceTests() - } + runAcceptanceTests() saveArtifactsToGCS() archiveArtifactsIfOverLimit() // Must be after push to GCS step, as this step impacts debug log files } @@ -95,7 +91,7 @@ class PackageDetails(private val packageName: String, private val displayName: S params { configureGoogleSpecificTestParameters(environmentVariables) - acceptanceTestBuildParams(parallelism, testPrefix, testTimeout, releaseDiffTest) + acceptanceTestBuildParams(parallelism, testPrefix, testTimeout) terraformLoggingParameters(environmentVariables, providerName) terraformCoreBinaryTesting() terraformShouldPanicForSchemaErrors() @@ -128,4 +124,4 @@ class PackageDetails(private val packageName: String, private val displayName: S var id = "%s_%s_PACKAGE_%s".format(this.parentProjectName, this.providerName, this.packageName) return replaceCharsId(id) } -} \ No newline at end of file +} diff --git a/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_sweepers.kt b/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_sweepers.kt index 0c93a9183c94..275cb9fb86b5 100644 --- a/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_sweepers.kt +++ b/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_sweepers.kt @@ -61,7 +61,6 @@ class SweeperDetails(private val sweeperName: String, private val parentProjectN // These hardcoded values affect the sweeper CLI command's behaviour val testPrefix = "TestAcc" val testTimeout = "12" - val releaseDiffTest = "false" return BuildType { @@ -98,7 +97,7 @@ class SweeperDetails(private val sweeperName: String, private val parentProjectN params { configureGoogleSpecificTestParameters(environmentVariables) - acceptanceTestBuildParams(parallelism, testPrefix, testTimeout, releaseDiffTest) + acceptanceTestBuildParams(parallelism, testPrefix, testTimeout) sweeperParameters(sweeperRegions, sweeperRun) terraformLoggingParameters(environmentVariables, providerName) terraformCoreBinaryTesting() @@ -132,4 +131,4 @@ class SweeperDetails(private val sweeperName: String, private val parentProjectN var id = "%s_%s".format(this.parentProjectName, this.sweeperName) return replaceCharsId(id) } -} \ No newline at end of file +} diff --git a/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_vcr_recording.kt b/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_vcr_recording.kt index b1531bfefd28..92eef200790c 100644 --- a/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_vcr_recording.kt +++ b/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_vcr_recording.kt @@ -27,7 +27,6 @@ class VcrDetails(private val providerName: String, private val buildId: String, val testTimeout = "12" val parallelism = DefaultParallelism val buildTimeout: Int = DefaultBuildTimeoutDuration - val releaseDiffTest = "false" // Path is just ./google(-beta) here, whereas nightly test builds use paths like ./google/something/specific // This helps VCR testing builds to run tests across multiple packages @@ -71,7 +70,7 @@ class VcrDetails(private val providerName: String, private val buildId: String, params { configureGoogleSpecificTestParameters(environmentVariables) vcrEnvironmentVariables(environmentVariables, providerName) - acceptanceTestBuildParams(parallelism, testPrefix, testTimeout, releaseDiffTest) + acceptanceTestBuildParams(parallelism, testPrefix, testTimeout) terraformLoggingParameters(environmentVariables, providerName) terraformCoreBinaryTesting() terraformShouldPanicForSchemaErrors() diff --git a/mmv1/third_party/terraform/.teamcity/components/builds/build_parameters.kt b/mmv1/third_party/terraform/.teamcity/components/builds/build_parameters.kt index 393493f525df..4acd6411e05d 100644 --- a/mmv1/third_party/terraform/.teamcity/components/builds/build_parameters.kt +++ b/mmv1/third_party/terraform/.teamcity/components/builds/build_parameters.kt @@ -205,12 +205,11 @@ fun ParametrizedWithType.configureGoogleSpecificTestParameters(config: AccTestCo // ParametrizedWithType.acceptanceTestBuildParams sets build params that affect how commands to run // acceptance tests are templated -fun ParametrizedWithType.acceptanceTestBuildParams(parallelism: Int, prefix: String, timeout: String, releaseDiffTest: String) { +fun ParametrizedWithType.acceptanceTestBuildParams(parallelism: Int, prefix: String, timeout: String) { hiddenVariable("env.TF_ACC", "1", "Set to a value to run the Acceptance Tests") text("PARALLELISM", "%d".format(parallelism)) text("TEST_PREFIX", prefix) text("TIMEOUT", timeout) - text("env.RELEASE_DIFF", releaseDiffTest) } // ParametrizedWithType.sweeperParameters sets build parameters that affect how sweepers are run @@ -302,4 +301,4 @@ fun ParametrizedWithType.hiddenVariable(name: String, value: String, description fun ParametrizedWithType.hiddenPasswordVariable(name: String, value: String, description: String) { password(name, value, "", description, ParameterDisplay.HIDDEN) -} \ No newline at end of file +} diff --git a/mmv1/third_party/terraform/.teamcity/components/builds/build_steps.kt b/mmv1/third_party/terraform/.teamcity/components/builds/build_steps.kt index 9216b59ab694..c615d74128fa 100644 --- a/mmv1/third_party/terraform/.teamcity/components/builds/build_steps.kt +++ b/mmv1/third_party/terraform/.teamcity/components/builds/build_steps.kt @@ -226,55 +226,3 @@ fun BuildSteps.archiveArtifactsIfOverLimit() { // https://youtrack.jetbrains.com/issue/KT-2425/Provide-a-way-for-escaping-the-dollar-sign-symbol-in-multiline-strings-and-string-templates }) } - -fun BuildSteps.runDiffTests() { - if (UseTeamCityGoTest) { - step(ScriptBuildStep { - name = "Run Diff Tests" - scriptContent = "go test -v \"%PACKAGE_PATH%\" -timeout=\"%TIMEOUT%h\" -test.parallel=\"%PARALLELISM%\" -run=\"%TEST_PREFIX%\" -json" - }) - } else { - step(ScriptBuildStep { - name = "Compile Test Binary" - workingDir = "%PACKAGE_PATH%" - scriptContent = """ - #!/bin/bash - export TEST_FILE_COUNT=$(ls ./*_test.go | wc -l) - if test ${'$'}TEST_FILE_COUNT -gt "0"; then - echo "Compiling test binary" - go test -c -o test-binary - else - echo "Skipping compilation of test binary; no Go test files found" - fi - """.trimIndent() - }) - - step(ScriptBuildStep { - name = "Run via scripts/teamcitytestscripts/teamcity-diff-test" - workingDir = "%PACKAGE_PATH%" - scriptContent = """ - #!/bin/bash - if ! test -f "./test-binary"; then - echo "Skipping test execution; file ./test-binary does not exist." - exit 0 - fi - - echo "Compiling teamcity-diff-test..." - pushd ../../../scripts/teamcitytestscripts > /dev/null - go build -o ../../teamcity-diff-test . - popd > /dev/null - - - export TEST_COUNT=${'$'}(./test-binary -test.list="%TEST_PREFIX%" | wc -l) - echo "Found ${'$'}{TEST_COUNT} tests that match the given test prefix %TEST_PREFIX%" - if test ${'$'}TEST_COUNT -le "0"; then - echo "Skipping test execution; no tests to run" - exit 0 - fi - - echo "Starting tests" - ./test-binary -test.list="%TEST_PREFIX%" | ../../../teamcity-diff-test -test ./test-binary -parallelism "%PARALLELISM%" -timeout "%TIMEOUT%h" - """.trimIndent() - }) - } -} \ No newline at end of file diff --git a/mmv1/third_party/terraform/.teamcity/components/constants.kt b/mmv1/third_party/terraform/.teamcity/components/constants.kt index 1323bb41f9ab..f8ba7685ee2a 100644 --- a/mmv1/third_party/terraform/.teamcity/components/constants.kt +++ b/mmv1/third_party/terraform/.teamcity/components/constants.kt @@ -8,7 +8,6 @@ // Provider name that matches the name in the Registry const val ProviderNameGa = "google" const val ProviderNameBeta = "google-beta" -const val ProviderNameBetaDiffTest = "google-beta-diff-test" // specifies the default hour (UTC) at which tests should be triggered, if enabled const val DefaultStartHour = 4 @@ -30,7 +29,7 @@ const val DefaultDaysOfMonth = "*" const val DefaultBranchName = "refs/heads/nightly-test" // Value used to make long-running builds fail due to a timeout -const val DefaultBuildTimeoutDuration = 60 * 15 // 15 hours in minutes +const val DefaultBuildTimeoutDuration = 60 * 12 // 12 hours in minutes // Values used to define and reference Shared Resource locks, used to avoid conflicting builds const val SharedResourceNameGa = "ci-test-project-nightly-ga Service Lock" @@ -43,11 +42,10 @@ const val ServiceSweeperCronName = "$ServiceSweeperName - Cron" const val ServiceSweeperManualName = "$ServiceSweeperName - Manual" const val ProjectSweeperName = "Project Sweeper" const val NightlyTestsProjectId = "NightlyTests" -const val WeeklyDiffTestsProjectId = "WeeklyDiffTests" const val MMUpstreamProjectId = "MMUpstreamTests" const val VcrRecordingProjectId = "VCRRecording" // Artifact rules controls which artifacts are uploaded to S3 // https://www.jetbrains.com/help/teamcity/2024.07/configuring-general-settings.html#Artifact+Paths // The value below lacks a file extension, to allow upload of individual .txt files or a single .tar.gz file -const val ArtifactRules = "%teamcity.build.checkoutDir%/debug*" \ No newline at end of file +const val ArtifactRules = "%teamcity.build.checkoutDir%/debug*" diff --git a/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt b/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt index 5c2b40ad838b..8ffcca156620 100644 --- a/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt +++ b/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt @@ -236,11 +236,6 @@ var ServicesListBeta = mapOf( "displayName" to "Compute", "path" to "./google-beta/services/compute" ), - "contactcenterinsights" to mapOf( - "name" to "contactcenterinsights", - "displayName" to "Contact Center AI Insights", - "path" to "./google-beta/services/contactcenterinsights" - ), "container" to mapOf( "name" to "container", "displayName" to "Container", @@ -561,16 +556,6 @@ var ServicesListBeta = mapOf( "displayName" to "Mlengine", "path" to "./google-beta/services/mlengine" ), - "modelarmor" to mapOf( - "name" to "modelarmor", - "displayName" to "ModelArmor", - "path" to "./google-beta/services/modelarmor" - ), - "modelarmorglobal" to mapOf( - "name" to "modelarmorglobal", - "displayName" to "ModelArmorGlobal", - "path" to "./google-beta/services/modelarmorglobal" - ), "monitoring" to mapOf( "name" to "monitoring", "displayName" to "Monitoring", @@ -686,21 +671,11 @@ var ServicesListBeta = mapOf( "displayName" to "Resourcemanager", "path" to "./google-beta/services/resourcemanager" ), - "resourcemanager3" to mapOf( - "name" to "resourcemanager3", - "displayName" to "Resourcemanager3", - "path" to "./google-beta/services/resourcemanager3" - ), "runtimeconfig" to mapOf( "name" to "runtimeconfig", "displayName" to "Runtimeconfig", "path" to "./google-beta/services/runtimeconfig" ), - "saasruntime" to mapOf( - "name" to "saasruntime", - "displayName" to "SaaS Runtime", - "path" to "./google-beta/services/saasruntime" - ), "secretmanager" to mapOf( "name" to "secretmanager", "displayName" to "Secretmanager", @@ -816,6 +791,11 @@ var ServicesListBeta = mapOf( "displayName" to "Tags", "path" to "./google-beta/services/tags" ), + "tpu" to mapOf( + "name" to "tpu", + "displayName" to "Tpu", + "path" to "./google-beta/services/tpu" + ), "tpuv2" to mapOf( "name" to "tpuv2", "displayName" to "Tpuv2", diff --git a/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt b/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt index 9b3f94e065da..19cff2fb52f5 100644 --- a/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt +++ b/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt @@ -236,11 +236,6 @@ var ServicesListGa = mapOf( "displayName" to "Compute", "path" to "./google/services/compute" ), - "contactcenterinsights" to mapOf( - "name" to "contactcenterinsights", - "displayName" to "Contact Center AI Insights", - "path" to "./google/services/contactcenterinsights" - ), "container" to mapOf( "name" to "container", "displayName" to "Container", @@ -556,16 +551,6 @@ var ServicesListGa = mapOf( "displayName" to "Mlengine", "path" to "./google/services/mlengine" ), - "modelarmor" to mapOf( - "name" to "modelarmor", - "displayName" to "ModelArmor", - "path" to "./google/services/modelarmor" - ), - "modelarmorglobal" to mapOf( - "name" to "modelarmorglobal", - "displayName" to "ModelArmorGlobal", - "path" to "./google/services/modelarmorglobal" - ), "monitoring" to mapOf( "name" to "monitoring", "displayName" to "Monitoring", @@ -681,21 +666,11 @@ var ServicesListGa = mapOf( "displayName" to "Resourcemanager", "path" to "./google/services/resourcemanager" ), - "resourcemanager3" to mapOf( - "name" to "resourcemanager3", - "displayName" to "Resourcemanager3", - "path" to "./google/services/resourcemanager3" - ), "runtimeconfig" to mapOf( "name" to "runtimeconfig", "displayName" to "Runtimeconfig", "path" to "./google/services/runtimeconfig" ), - "saasruntime" to mapOf( - "name" to "saasruntime", - "displayName" to "SaaS Runtime", - "path" to "./google/services/saasruntime" - ), "secretmanager" to mapOf( "name" to "secretmanager", "displayName" to "Secretmanager", @@ -811,6 +786,11 @@ var ServicesListGa = mapOf( "displayName" to "Tags", "path" to "./google/services/tags" ), + "tpu" to mapOf( + "name" to "tpu", + "displayName" to "Tpu", + "path" to "./google/services/tpu" + ), "tpuv2" to mapOf( "name" to "tpuv2", "displayName" to "Tpuv2", diff --git a/mmv1/third_party/terraform/.teamcity/components/projects/feature_branches/FEATURE_BRANCH_resource_identity.kt b/mmv1/third_party/terraform/.teamcity/components/projects/feature_branches/FEATURE_BRANCH_resource_identity.kt index e2ff35042b96..ba81b22e0779 100644 --- a/mmv1/third_party/terraform/.teamcity/components/projects/feature_branches/FEATURE_BRANCH_resource_identity.kt +++ b/mmv1/third_party/terraform/.teamcity/components/projects/feature_branches/FEATURE_BRANCH_resource_identity.kt @@ -32,7 +32,6 @@ fun featureBranchResourceIdentitySubProject(allConfig: AllContextParameters): Pr val trigger = NightlyTriggerConfiguration( branch = "refs/heads/$featureBranchResourceIdentity", // triggered builds must test the feature branch startHour = DefaultStartHour + 6, - nightlyTestsEnabled = false ) val vcrConfig = getVcrAcceptanceTestConfig(allConfig) // Reused below for both MM testing build configs val servicesToTest = arrayOf("secretmanager", "resourcemanager") diff --git a/mmv1/third_party/terraform/.teamcity/components/projects/google_beta_subproject.kt b/mmv1/third_party/terraform/.teamcity/components/projects/google_beta_subproject.kt index c67a50303ee5..a80aadd6d889 100644 --- a/mmv1/third_party/terraform/.teamcity/components/projects/google_beta_subproject.kt +++ b/mmv1/third_party/terraform/.teamcity/components/projects/google_beta_subproject.kt @@ -12,7 +12,6 @@ import builds.* import jetbrains.buildServer.configs.kotlin.Project import projects.reused.mmUpstream import projects.reused.nightlyTests -import projects.reused.weeklyDiffTests import projects.reused.vcrRecording import replaceCharsId import vcs_roots.HashiCorpVCSRootBeta @@ -42,9 +41,6 @@ fun googleSubProjectBeta(allConfig: AllContextParameters): Project { // This is only present for the Beta provider, as only TPGB VCR recordings are used. subProject(vcrRecording(betaId, ProviderNameBeta, HashiCorpVCSRootBeta, ModularMagicianVCSRootBeta, vcrConfig)) - // Beta Diff Test project that uses hashicorp/terraform-provider-google-beta-diff-test - subProject(weeklyDiffTests(betaId + "_DIFF_TEST", ProviderNameBeta, ModularMagicianVCSRootBeta, betaConfig, NightlyTriggerConfiguration(daysOfWeek = "SAT", nightlyTestsEnabled = false))) - params { readOnlySettings() } diff --git a/mmv1/third_party/terraform/.teamcity/components/projects/reused/nightly_tests.kt b/mmv1/third_party/terraform/.teamcity/components/projects/reused/nightly_tests.kt index b742a8c1899e..90fa2d49947d 100644 --- a/mmv1/third_party/terraform/.teamcity/components/projects/reused/nightly_tests.kt +++ b/mmv1/third_party/terraform/.teamcity/components/projects/reused/nightly_tests.kt @@ -10,7 +10,6 @@ package projects.reused import NightlyTestsProjectId import ProviderNameBeta import ProviderNameGa -import ProviderNameBetaDiffTest import ServiceSweeperName import SharedResourceNameBeta import SharedResourceNameGa @@ -33,7 +32,6 @@ fun nightlyTests(parentProject:String, providerName: String, vcsRoot: GitVcsRoot when(providerName) { ProviderNameGa -> sharedResources = arrayListOf(SharedResourceNameGa) ProviderNameBeta -> sharedResources = arrayListOf(SharedResourceNameBeta) - ProviderNameBetaDiffTest -> sharedResources = arrayListOf(SharedResourceNameBeta) else -> throw Exception("Provider name not supplied when generating a nightly test subproject") } @@ -50,7 +48,6 @@ fun nightlyTests(parentProject:String, providerName: String, vcsRoot: GitVcsRoot when(providerName) { ProviderNameGa -> sweepersList = SweepersListGa ProviderNameBeta -> sweepersList = SweepersListBeta - ProviderNameBetaDiffTest -> sweepersList = SweepersListBeta else -> throw Exception("Provider name not supplied when generating a nightly test subproject") } val serviceSweeperConfig = BuildConfigurationForServiceSweeper(providerName, ServiceSweeperName, sweepersList, projectId, vcsRoot, sharedResources, config) diff --git a/mmv1/third_party/terraform/.teamcity/components/projects/reused/weekly_diff_tests.kt b/mmv1/third_party/terraform/.teamcity/components/projects/reused/weekly_diff_tests.kt deleted file mode 100644 index ab3119eeb340..000000000000 --- a/mmv1/third_party/terraform/.teamcity/components/projects/reused/weekly_diff_tests.kt +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright (c) HashiCorp, Inc. - * SPDX-License-Identifier: MPL-2.0 - */ - -// This file is maintained in the GoogleCloudPlatform/magic-modules repository and copied into the downstream provider repositories. Any changes to this file in the downstream will be overwritten. - -package projects.reused - -import NightlyTestsProjectId -import ProviderNameGa -import ProviderNameBeta -import ServiceSweeperName -import SharedResourceNameBeta -import SharedResourceNameGa -import builds.* -import generated.SweepersListBeta -import generated.SweepersListGa -import jetbrains.buildServer.configs.kotlin.Project -import jetbrains.buildServer.configs.kotlin.vcs.GitVcsRoot -import replaceCharsId - -fun weeklyDiffTests(parentProject:String, providerName: String, vcsRoot: GitVcsRoot, config: AccTestConfiguration, cron: NightlyTriggerConfiguration): Project { - - var projectId = "${parentProject}_${NightlyTestsProjectId}" - projectId = replaceCharsId(projectId) - - // Nightly test projects run all acceptance tests overnight - // Here we ensure the project uses the appropriate Shared Resource to ensure no clashes between builds and/or sweepers - var sharedResources: ArrayList - when(providerName) { - ProviderNameGa -> sharedResources = arrayListOf(SharedResourceNameGa) - ProviderNameBeta -> sharedResources = arrayListOf(SharedResourceNameBeta) - else -> throw Exception("Provider name not supplied when generating a weekly diff test subproject") - } - - // Create build configs to run acceptance tests for each package defined in packages.kt and services.kt files - // and add cron trigger to them all - val allPackages = getAllPackageInProviderVersion(providerName) - val packageBuildConfigs = BuildConfigurationsForPackages(allPackages, providerName, projectId, vcsRoot, sharedResources, config, releaseDiffTest = "true") - packageBuildConfigs.forEach { buildConfiguration -> - buildConfiguration.addTrigger(cron) - } - - // Create build config for sweeping the nightly test project - var sweepersList: Map> - when(providerName) { - ProviderNameGa -> sweepersList = SweepersListGa - ProviderNameBeta -> sweepersList = SweepersListBeta - else -> throw Exception("Provider name not supplied when generating a weekly diff test subproject") - } - val serviceSweeperConfig = BuildConfigurationForServiceSweeper(providerName, ServiceSweeperName, sweepersList, projectId, vcsRoot, sharedResources, config) - val sweeperCron = cron.clone() - sweeperCron.startHour += 5 // Ensure triggered after the package test builds are triggered - serviceSweeperConfig.addTrigger(sweeperCron) - - return Project { - id(projectId) - name = "Weekly Diff Tests" - description = "A project connected to the hashicorp/terraform-provider-${providerName} repository, where scheduled weekly diff tests run and users can trigger ad-hoc builds" - - // Register build configs in the project - packageBuildConfigs.forEach { buildConfiguration -> - buildType(buildConfiguration) - } - buildType(serviceSweeperConfig) - - params{ - configureGoogleSpecificTestParameters(config) - } - } -} \ No newline at end of file diff --git a/mmv1/third_party/terraform/.teamcity/tests/test_utils.kt b/mmv1/third_party/terraform/.teamcity/tests/test_utils.kt index 02d35308545d..e2e481679b89 100644 --- a/mmv1/third_party/terraform/.teamcity/tests/test_utils.kt +++ b/mmv1/third_party/terraform/.teamcity/tests/test_utils.kt @@ -15,7 +15,6 @@ import org.junit.Assert.fail const val gaProjectName = "Google" const val betaProjectName = "Google Beta" const val nightlyTestsProjectName = "Nightly Tests" -const val weeklyDiffTestsProjectName = "Weekly Diff Tests" const val mmUpstreamProjectName = "Upstream MM Testing" const val projectSweeperProjectName = "Project Sweeper" diff --git a/mmv1/third_party/terraform/.teamcity/tests/weekly_diff_project.kt b/mmv1/third_party/terraform/.teamcity/tests/weekly_diff_project.kt deleted file mode 100644 index 0d411ea5352a..000000000000 --- a/mmv1/third_party/terraform/.teamcity/tests/weekly_diff_project.kt +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright (c) HashiCorp, Inc. - * SPDX-License-Identifier: MPL-2.0 - */ - -// This file is maintained in the GoogleCloudPlatform/magic-modules repository and copied into the downstream provider repositories. Any changes to this file in the downstream will be overwritten. - -package tests - -import jetbrains.buildServer.configs.kotlin.triggers.ScheduleTrigger -import org.junit.Assert.assertTrue -import org.junit.Test -import projects.googleCloudRootProject - -class WeeklyDiffTestProjectsTests { - @Test - fun allBuildsShouldHaveTrigger() { - val root = googleCloudRootProject(testContextParameters()) - - // Find GA nightly test project - // var gaNightlyTestProject = getNestedProjectFromRoot(root, gaProjectName, weeklyDiffTestsProjectName) - - // Find Beta nightly test project - var betaWeeklyDiffTestProject = getNestedProjectFromRoot(root, betaProjectName, weeklyDiffTestsProjectName) - - // Make assertions about builds in both weekly diff test projects - (betaWeeklyDiffTestProject.buildTypes).forEach{bt -> - assertTrue("Build configuration `${bt.name}` should contain at least one trigger", bt.triggers.items.isNotEmpty()) - // Look for at least one CRON trigger - var found: Boolean = false - lateinit var schedulingTrigger: ScheduleTrigger - for (item in bt.triggers.items){ - if (item.type == "schedulingTrigger") { - schedulingTrigger = item as ScheduleTrigger - found = true - break - } - } - - assertTrue("Build configuration `${bt.name}` should contain a CRON/'schedulingTrigger' trigger", found) - - // Check that weekly diff test is being ran on the nightly-test branch - var isNightlyTestBranch: Boolean = false - if (schedulingTrigger.branchFilter == "+:refs/heads/nightly-test"){ - isNightlyTestBranch = true - } - assertTrue("Build configuration `${bt.name}` is using the nightly-test branch filter;", isNightlyTestBranch) - } - } -} diff --git a/mmv1/third_party/terraform/acctest/bootstrap_test_utils.go.tmpl b/mmv1/third_party/terraform/acctest/bootstrap_test_utils.go.tmpl index 2c158386e146..b8f96e37a355 100644 --- a/mmv1/third_party/terraform/acctest/bootstrap_test_utils.go.tmpl +++ b/mmv1/third_party/terraform/acctest/bootstrap_test_utils.go.tmpl @@ -11,9 +11,12 @@ import ( "time" "net/http" +{{ if ne $.TargetVersionName `ga` -}} + // For beta tests only resourceManagerV3 "google.golang.org/api/cloudresourcemanager/v3" tpgservicusage "github.com/hashicorp/terraform-provider-google/google/services/serviceusage" "github.com/hashicorp/terraform-provider-google/google/services/kms" +{{- end }} "github.com/hashicorp/terraform-provider-google/google/envvar" "github.com/hashicorp/terraform-provider-google/google/services/tags" @@ -34,15 +37,19 @@ import ( "google.golang.org/api/servicenetworking/v1" "google.golang.org/api/serviceusage/v1" sqladmin "google.golang.org/api/sqladmin/v1beta4" - backupdr "google.golang.org/api/backupdr/v1" ) var SharedKeyRing = "tftest-shared-keyring-1" +{{ if ne $.TargetVersionName `ga` -}} + var DefaultKeyHandleName = "eed58b7b-20ad-4da8-ad85-ba78a0d5ab87" var DefaultKeyHandleResourceType = "compute.googleapis.com/Disk" var CloudKmsSrviceName = "cloudkms.googleapis.com" +{{- end }} + + var SharedCryptoKey = map[string]string{ "ENCRYPT_DECRYPT": "tftest-shared-key-1", "ASYMMETRIC_SIGN": "tftest-shared-sign-key-1", @@ -84,6 +91,8 @@ func BootstrapKMSKeyWithPurposeInLocation(t *testing.T, purpose, locationID stri return BootstrapKMSKeyWithPurposeInLocationAndName(t, purpose, locationID, SharedCryptoKey[purpose]) } +{{ if ne $.TargetVersionName `ga` -}} + type BootstrappedKMSAutokey struct { *cloudkms.AutokeyConfig *cloudkms.KeyHandle @@ -318,6 +327,8 @@ func setPolicy(crmService *resourceManagerV3.Service, resourceType string, resou return nil } +{{- end }} + func BootstrapKMSKeyWithPurposeInLocationAndName(t *testing.T, purpose, locationID, keyShortName string) BootstrappedKMS { config := BootstrapConfig(t) if config == nil { @@ -386,7 +397,7 @@ func BootstrapKMSKeyWithPurposeInLocationAndName(t *testing.T, purpose, location t.Fatalf("Unable to bootstrap KMS key. CryptoKey is nil!") } - // TODO: b/372305432 Use the pagination properly. + // TODO(b/372305432): Use the pagination properly. ckvResp, err := kmsClient.Projects.Locations.KeyRings.CryptoKeys.CryptoKeyVersions.List(keyName).Do() if err != nil { t.Fatalf("Unable to list cryptoKeyVersions: %v", err) @@ -872,6 +883,8 @@ func BootstrapServicePerimeterProjects(t *testing.T, desiredProjects int) []*clo return projects } +{{ if ne $.TargetVersionName `ga` -}} + // BootstrapFolder creates or get a folder having a input folderDisplayName within a TestOrgEnv func BootstrapFolder(t *testing.T, folderDisplayName string) *resourceManagerV3.Folder { config := BootstrapConfig(t) @@ -921,6 +934,8 @@ func BootstrapFolder(t *testing.T, folderDisplayName string) *resourceManagerV3. return folder } +{{- end }} + // BootstrapProject will create or get a project named // "" that will persist across test runs, // where projectIDSuffix is based off of getTestProjectFromEnv(). The reason @@ -1099,7 +1114,7 @@ func BootstrapSharedSQLInstanceBackupRun(t *testing.T) string { PointInTimeRecoveryEnabled: true, } settings := &sqladmin.Settings{ - Tier: "db-custom-2-3840", + Tier: "db-f1-micro", BackupConfiguration: backupConfig, } bootstrapInstance = &sqladmin.DatabaseInstance{ @@ -1160,93 +1175,6 @@ func BootstrapSharedSQLInstanceBackupRun(t *testing.T) string { return bootstrapInstance.Name } -// waitForBackupdrOperation polls the operation until it is done or times out. -func waitForBackupdrOperation(ctx context.Context, t *testing.T, backupdrService *backupdr.Service, op *backupdr.Operation) (*backupdr.Operation, error) { - t.Helper() - opService := backupdr.NewProjectsLocationsOperationsService(backupdrService) - ticker := time.NewTicker(5 * time.Second) // Poll every 5 seconds - defer ticker.Stop() - - const timeout = 5 * time.Minute // Maximum time to wait - ctx, cancel := context.WithTimeout(ctx, timeout) - defer cancel() - - for { - select { - case <-ctx.Done(): - return nil, fmt.Errorf("timed out waiting for operation %s to complete", op.Name) - case <-ticker.C: - latestOp, err := opService.Get(op.Name).Context(ctx).Do() - if err != nil { - // Retry on transient errors if necessary, fail on others. - return nil, fmt.Errorf("error getting operation %s: %w", op.Name, err) - } - op = latestOp - t.Logf("Operation %s status: Done=%v", op.Name, op.Done) - - if op.Done { - if op.Error != nil { - return op, fmt.Errorf("operation %s failed: %v (code %d)", op.Name, op.Error.Message, op.Error.Code) - } - t.Logf("Operation %s completed successfully.", op.Name) - return op, nil - } - } - } -} - -// BootstrapBackupDRVault creates or gets a BackupDR backup vault for testing. -func BootstrapBackupDRVault(t *testing.T, vaultID, location string) string { - ctx := context.Background() - project := envvar.GetTestProjectFromEnv() - config := BootstrapConfig(t) - if config == nil { - t.Fatal("Could not bootstrap config.") - } - - // Create a backupdr client and check if the vault exists, if not create a vault - // backupdrClient := config.NewBackupDRClient(config.UserAgent) - vaultName := fmt.Sprintf("projects/%s/locations/%s/backupVaults/%s", project, location, vaultID) - projectAndLocation := fmt.Sprintf("projects/%s/locations/%s", project, location) - - log.Printf("[DEBUG] Getting BackupDR vault %q", vaultName) - backupdrService := config.NewBackupDRClient(config.UserAgent) - _, err := backupdrService.Projects.Locations.BackupVaults.Get(vaultName).Do() - if err != nil && transport_tpg.IsGoogleApiErrorWithCode(err, 404) { - log.Printf("[DEBUG] BackupDR vault %q not found, bootstrapping", vaultName) - // Prepare the request body for BackupVault creation - enforcedRetentionDays := 1 - effectiveDays := 1 - - retentionDuration := time.Duration(enforcedRetentionDays) * 24 * time.Hour - effectiveTime := time.Now().Add(time.Duration(effectiveDays) * 24 * time.Hour) - - backupVault := &backupdr.BackupVault{ - BackupMinimumEnforcedRetentionDuration: fmt.Sprintf("%ds", int(retentionDuration.Seconds())), - EffectiveTime: effectiveTime.Format(time.RFC3339), - Description: "Created by BootstrapBackupDRVault function", - } - {{/* _, err = config.NewBackupDRClient(config.UserAgent).Projects.Locations.BackupVaults.Create(projectAndLocation, backupVault).Do() */}} - createCall := backupdrService.Projects.Locations.BackupVaults.Create(projectAndLocation, backupVault) - createCall.BackupVaultId(vaultID) // *** This is REQUIRED for the query parameter *** - // createCall.ValidateOnly(false) // Optional: explicit validate only flag - op, err := createCall.Do() - if err != nil { - t.Fatalf("Error calling Create BackupDR vault %q: %s", vaultName, err) - } - fmt.Printf("Successfully initiated creation of BackupDR vault %q (Operation: %s)\n", vaultName, op.Name) - - // *** WAIT FOR COMPLETION *** - if _, err := waitForBackupdrOperation(ctx, t, backupdrService, op); err != nil { - t.Fatalf("Create operation for %s failed: %v", vaultName, err) - } - fmt.Printf("Successfully created BackupDR vault %q\n", vaultName) - - } - - return vaultName -} - func BootstrapSharedCaPoolInLocation(t *testing.T, location string) string { project := envvar.GetTestProjectFromEnv() poolName := "static-ca-pool" @@ -2131,34 +2059,15 @@ func BootstrapSharedCodeRepositoryIndex(t *testing.T, codeRepositoryIndexId, loc } const sharedTagKeyPrefix = "tf-bootstrap-tagkey" -const sharedTagKeyParentErr = "Parent %q is not valid. Should be in format: 'organizations/123' OR 'projects/123'." - -func BootstrapSharedTestProjectTagKey(t *testing.T, testId string, obj map[string]interface{}) string { - pid := envvar.GetTestProjectFromEnv() - return BootstrapSharedTestTagKeyDetails(t, testId, "projects/"+pid, obj)["shared_tag_key"] -} -func BootstrapSharedTestOrganizationTagKey(t *testing.T, testId string, obj map[string]interface{}) string { +func BootstrapSharedTestTagKey(t *testing.T, testId string) string { org := envvar.GetTestOrgFromEnv(t) - return BootstrapSharedTestTagKeyDetails(t, testId, "organizations/"+org, obj)["shared_tag_key"] -} - -// parent should be in format: {"organization" OR "projects"}/{id} -func BootstrapSharedTestTagKeyDetails(t *testing.T, testId string, parent string, obj map[string]interface{}) map[string]string { sharedTagKey := fmt.Sprintf("%s-%s", sharedTagKeyPrefix, testId) - - parentSplit := strings.Split(parent, "/") - if len(parentSplit) < 2 || (parentSplit[0] != "organizations" && parentSplit[0] != "projects") { - parentErr := fmt.Sprintf(sharedTagKeyParentErr, parent) - t.Fatalf("Error bootstrapping shared tag key %q: %s", sharedTagKey, parentErr) - } - - parentId := parentSplit[1] - tagKeyName := fmt.Sprintf("%s/%s", parentId, sharedTagKey) + tagKeyName := fmt.Sprintf("%s/%s", org, sharedTagKey) config := BootstrapConfig(t) if config == nil { - return make(map[string]string) + return "" } log.Printf("[DEBUG] Getting shared test tag key %q", sharedTagKey) @@ -2174,13 +2083,10 @@ func BootstrapSharedTestTagKeyDetails(t *testing.T, testId string, parent string if err != nil && transport_tpg.IsGoogleApiErrorWithCode(err, 403) { log.Printf("[DEBUG] TagKey %q not found, bootstrapping", sharedTagKey) tagKeyObj := map[string]interface{}{ - "parent": parent, + "parent": "organizations/" + org, "shortName": sharedTagKey, "description": "Bootstrapped tag key for Terraform Acceptance testing", } - if obj != nil { - maps.Insert(tagKeyObj, maps.All(obj)) - } res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ Config: config, @@ -2207,7 +2113,7 @@ func BootstrapSharedTestTagKeyDetails(t *testing.T, testId string, parent string } } - getTagKeyResponse, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ Config: config, Method: "GET", Project: config.Project, @@ -2220,32 +2126,20 @@ func BootstrapSharedTestTagKeyDetails(t *testing.T, testId string, parent string t.Fatalf("Error getting shared tag key %q: %s", sharedTagKey, err) } - return map[string]string{ - "name": getTagKeyResponse["name"].(string), - "shared_tag_key": sharedTagKey, - } + return sharedTagKey } const sharedTagValuePrefix = "tf-bootstrap-tagvalue" -func BootstrapSharedTestProjectTagValue(t *testing.T, testId string, tagKey string) string { - pid := envvar.GetTestProjectFromEnv() - return BootstrapSharedTestTagValueDetails(t, testId, tagKey, pid)["shared_tag_value"] -} - -func BootstrapSharedTestOrganizationTagValue(t *testing.T, testId string, tagKey string) string { +func BootstrapSharedTestTagValue(t *testing.T, testId string, tagKey string) string { org := envvar.GetTestOrgFromEnv(t) - return BootstrapSharedTestTagValueDetails(t, testId, tagKey, org)["shared_tag_value"] -} - -func BootstrapSharedTestTagValueDetails(t *testing.T, testId string, tagKey, parentId string) map[string]string { sharedTagValue := fmt.Sprintf("%s-%s", sharedTagValuePrefix, testId) - tagKeyName := fmt.Sprintf("%s/%s", parentId, tagKey) + tagKeyName := fmt.Sprintf("%s/%s", org, tagKey) tagValueName := fmt.Sprintf("%s/%s", tagKeyName, sharedTagValue) config := BootstrapConfig(t) if config == nil { - return make(map[string]string) + return "" } log.Printf("[DEBUG] Getting shared test tag value %q", sharedTagValue) @@ -2304,7 +2198,8 @@ func BootstrapSharedTestTagValueDetails(t *testing.T, testId string, tagKey, par t.Fatalf("Error waiting to create TagValue: %s", err) } } - getTagValueResponse, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ Config: config, Method: "GET", Project: config.Project, @@ -2317,10 +2212,7 @@ func BootstrapSharedTestTagValueDetails(t *testing.T, testId string, tagKey, par t.Fatalf("Error getting shared tag value %q: %s", sharedTagValue, err) } - return map[string]string{ - "name": getTagValueResponse["name"].(string), - "shared_tag_value": sharedTagValue, - } + return sharedTagValue } type BootstrapClient struct { diff --git a/mmv1/third_party/terraform/acctest/diff_utils.go b/mmv1/third_party/terraform/acctest/diff_utils.go deleted file mode 100644 index 0a91f3ebed29..000000000000 --- a/mmv1/third_party/terraform/acctest/diff_utils.go +++ /dev/null @@ -1,204 +0,0 @@ -package acctest - -import ( - "fmt" - "io" - "os" - "path/filepath" - "reflect" - "regexp" - "strings" - - "github.com/hashicorp/terraform-plugin-go/tfprotov5" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" -) - -const diffTag = "[Diff]" - -func isReleaseDiffEnabled() bool { - releaseDiff := os.Getenv("RELEASE_DIFF") - return releaseDiff == "true" -} - -func initializeReleaseDiffTest(c resource.TestCase, testName string, tempOutputFile *os.File) resource.TestCase { - var releaseProvider string - packagePath := fmt.Sprint(reflect.TypeOf(transport_tpg.Config{}).PkgPath()) - if strings.Contains(packagePath, "google-beta") { - releaseProvider = "google-beta" - } else { - releaseProvider = "google" - } - - if c.ExternalProviders != nil { - c.ExternalProviders[releaseProvider] = resource.ExternalProvider{} - } else { - c.ExternalProviders = map[string]resource.ExternalProvider{ - releaseProvider: { - // if left empty fetches most recent release provider - }, - } - } - - localProviderName := "google-local" - if c.Providers != nil { - c.Providers = map[string]*schema.Provider{ - localProviderName: GetSDKProvider(testName), - } - c.ProtoV5ProviderFactories = map[string]func() (tfprotov5.ProviderServer, error){ - localProviderName: func() (tfprotov5.ProviderServer, error) { - return nil, nil - }, - } - } else { - c.ProtoV5ProviderFactories = map[string]func() (tfprotov5.ProviderServer, error){ - localProviderName: func() (tfprotov5.ProviderServer, error) { - provider, err := MuxedProviders(testName) - return provider(), err - }, - } - } - // InsertDiffSteps adds modified steps to the test that run with an external provider - // these steps do the actual infrastructure provisioning, and c.Steps is updated in the method to have the modified steps - c = InsertDiffSteps(c, tempOutputFile, releaseProvider, localProviderName) - return c -} - -// InsertDiffSteps inserts a new step into the test case that reformats the config to use the release provider - this allows us to see the diff -// between the local provider and the release provider -func InsertDiffSteps(c resource.TestCase, tempOutputFile *os.File, releaseProvider string, localProviderName string) resource.TestCase { - var countSteps = 0 - - var replacementSteps []resource.TestStep - for _, testStep := range c.Steps { - countSteps++ - if testStep.Config != "" { - ogConfig := testStep.Config - fmt.Fprintf(tempOutputFile, "[DEBUG] Original config: %s\n", ogConfig) - testStep.Config = ReformConfigWithProvider(ogConfig, localProviderName) - fmt.Fprintf(tempOutputFile, "[DEBUG] Reformatted config: %s\n", testStep.Config) - testStep.PreConfig = func() { - fmt.Fprintf(tempOutputFile, "%s Step %d\n", diffTag, countSteps) - } - if testStep.ExpectError == nil && !testStep.PlanOnly { - newStep := resource.TestStep{ - PreConfig: func() { - fmt.Fprintf(tempOutputFile, "Regular Step %d\n", countSteps) - }, - Config: ReformConfigWithProvider(ogConfig, releaseProvider), - } - testStep.PlanOnly = true - testStep.ExpectNonEmptyPlan = false - replacementSteps = append(replacementSteps, newStep) - } - replacementSteps = append(replacementSteps, testStep) - } else { - replacementSteps = append(replacementSteps, testStep) - } - } - c.Steps = replacementSteps - return c -} - -// reformConfigWithProvider reformats the config to use the given provider -// The method matches a regex for the provider block and replaces it with the given provider. -// For example: ' data "google_compute_network" "default" { provider = "google-local" } ' -// will be reformatted to ' data "google_compute_network" "default" { provider = "google-beta" } ' -func ReformConfigWithProvider(config, provider string) string { - configBytes := []byte(config) - providerReplacement := fmt.Sprintf("provider = %s", provider) - providerReplacementBytes := []byte(providerReplacement) - providerBlock := regexp.MustCompile(`provider *=.*google-beta.*`) - - if providerBlock.Match(configBytes) { - out := string(providerBlock.ReplaceAll(configBytes, providerReplacementBytes)) - return out - } - - providerReplacement = fmt.Sprintf("${1}\n %s\n", providerReplacement) - providerReplacementBytes = []byte(providerReplacement) - // Match resource and data blocks that use google_ provider - // regex matches for labels resource and data blocks that use google_ provider - - resourceHeader := regexp.MustCompile(`((resource|data) .*google_.* .*\w+.*\{ *)`) - return string(resourceHeader.ReplaceAll(configBytes, providerReplacementBytes)) -} - -// ReadDiffOutput reads the outputted temporary file and returns its contents -func ReadDiffOutput(f *os.File) (string, error) { - if f == nil { - return "", fmt.Errorf("file handle is nil") - } - - // Seek to the beginning of the file in case it was just written to. - if _, err := f.Seek(0, io.SeekStart); err != nil { - return "", fmt.Errorf("failed to seek to beginning of file: %w", err) - } - - // Read the entire file content. - content, err := os.ReadFile(f.Name()) - if err != nil { - return "", fmt.Errorf("failed to read file: %w", err) - } - - return string(content), nil -} - -// parseReleaseDiffOutput reads the temporary file created during the release diff test and returns whether the last line has a [Diff] flag, the test output, and any errors -func ParseReleaseDiffOutput(output string) (isDiff bool) { - trimmedOutput := strings.TrimSpace(output) - if trimmedOutput == "" { - return false - } - - lines := strings.Split(trimmedOutput, "\n") - lastLine := lines[len(lines)-1] - - isDiff = strings.HasPrefix(lastLine, diffTag) - - return isDiff -} - -func writeOutputFileDeferFunction(tempOutputFile *os.File, failed bool) { - if tempOutputFile == nil { - return - } - // parses the temporary file created during the release diff test and returns the last line of output - // This is useful for extracting the diff output from the file after the test has run - - testOutput, err := ReadDiffOutput(tempOutputFile) - if err != nil { - fmt.Printf("Error reading temporary file: %v\n", err) - return - } - isDiff := ParseReleaseDiffOutput(testOutput) - tempOutputFile.Close() - err = os.Remove(tempOutputFile.Name()) - if err != nil { - fmt.Printf("Temporary File Deletion Error: %v\n", err) - } - regularFailureFile, err := os.Create(filepath.Join("", "regular_failure_file.log")) - if err != nil { - fmt.Printf("Error creating file: %v\n", err) - return - } - defer regularFailureFile.Close() - diffFailureFile, err := os.Create(filepath.Join("", "diff_failure_file.log")) - if err != nil { - fmt.Printf("Error creating file: %v\n", err) - return - } - defer diffFailureFile.Close() - if failed { - // Check if the output line starts with "[Diff]" - if isDiff { - fmt.Fprintf(os.Stdout, "%s Breaking Change Detected] \n", diffTag) - fmt.Fprintf(diffFailureFile, "%s %s\n", diffTag, testOutput) - } else { - fmt.Fprintf(regularFailureFile, testOutput) - fmt.Fprintf(regularFailureFile, "FAILED --- %s\n", testOutput) - } - } -} diff --git a/mmv1/third_party/terraform/acctest/diff_utils_test.go b/mmv1/third_party/terraform/acctest/diff_utils_test.go deleted file mode 100644 index c5ed0a0dc4ca..000000000000 --- a/mmv1/third_party/terraform/acctest/diff_utils_test.go +++ /dev/null @@ -1,206 +0,0 @@ -package acctest_test - -import ( - "os" - "regexp" - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/terraform" - "github.com/hashicorp/terraform-provider-google/google/acctest" -) - -func TestReformConfigWithProvider(t *testing.T) { - - type testCase struct { - name string - initialConfig string - providerToInsert string - expectedConfig string - } - - cases := map[string]testCase{ - "replaces_google_beta_with_local": { - name: "Replaces 'google-beta' provider with 'google-local'", - initialConfig: `resource "google_new_resource" { - provider = google-beta -}`, - providerToInsert: "google-local", - expectedConfig: `resource "google_new_resource" { - provider = google-local -}`, - }, - "inserts_local_provider_into_empty_config": { - name: "Inserts 'google-local' provider when no provider block exists", - initialConfig: `resource "google_alloydb_cluster" "default" { - location = "us-central1" - network_config { - network = google_compute_network.default.id - } -}`, - providerToInsert: "google-local", - expectedConfig: `resource "google_alloydb_cluster" "default" { - provider = google-local - - location = "us-central1" - network_config { - network = google_compute_network.default.id - } -}`, - }, - "no_change_if_target_provider_already_present": { - name: "Does not change config if target provider is already present", - initialConfig: `resource "google_new_resource" { - provider = google-local -}`, - providerToInsert: "google-local", - expectedConfig: `resource "google_new_resource" { - provider = google-local -}`, - }, - "inserts_provider_with_other_attributes": { - name: "Inserts provider into a resource block with other attributes but no existing provider", - initialConfig: `resource "google_compute_instance" "test" { - name = "test-instance" - machine_type = "e2-medium" -}`, - providerToInsert: "google-local", - expectedConfig: `resource "google_compute_instance" "test" { - provider = google-local - - name = "test-instance" - machine_type = "e2-medium" -}`, - }, - } - - for tn, tc := range cases { - t.Run(tn, func(t *testing.T) { - newConfig := acctest.ReformConfigWithProvider(tc.initialConfig, tc.providerToInsert) - - if newConfig != tc.expectedConfig { - t.Fatalf("Test Case: %s\nExpected config to be reformatted to:\n%q\nbut got:\n%q", tc.name, tc.expectedConfig, newConfig) - } - t.Logf("Test Case: %s\nReformed config:\n%s", tc.name, newConfig) - }) - } -} - -func TestInsertDiffSteps(t *testing.T) { - - var dummyCase = resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: `resource "google_new_resource" "original" { - provider = google-beta - }`, - }, - { - Config: `resource "google_new_resource" "original" { - provider = google-beta - }`, - }, - { - ResourceName: "google_pubsub_subscription.example", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"topic"}, - }, - { - Config: `resource "google_example_widget" "foo" { - name = "dummy" - provider = google-beta - }`, - Check: resource.ComposeTestCheckFunc( - func(*terraform.State) error { return nil }, - ), - }, - { - Config: `provider = "google-local" - // ... configuration that is expected to cause an error - `, - ExpectError: regexp.MustCompile(`"restore_continuous_backup_source": conflicts with restore_backup_source`), - }, - }, - } - temp_file, err := os.CreateTemp("", "release_diff_test_output_*.log") - if err != nil { - t.Fatalf("Failed to create temp file: %v", err) - } - dummyCase = acctest.InsertDiffSteps(dummyCase, temp_file, "google-beta", "google-local") - - // Expected steps after InsertDiffSteps runs. - // A "diff" step (using 'google-local') is added for each original step containing a Config field, - // unless the step has ExpectError set. - var expectedSteps = []resource.TestStep{ - { - Config: `resource "google_new_resource" "original" { - provider = google-beta - }`, - }, - { - Config: `resource "google_new_resource" "original" { - provider = google-local - }`, - ExpectNonEmptyPlan: false, - PlanOnly: true, - }, - { - Config: `resource "google_new_resource" "original" { - provider = google-beta - }`, - }, - { - Config: `resource "google_new_resource" "original" { - provider = google-local - }`, - ExpectNonEmptyPlan: false, - PlanOnly: true, - }, - { - ResourceName: "google_pubsub_subscription.example", // No config, so no diff step added - }, - { - Config: `resource "google_example_widget" "foo" { - name = "dummy" - provider = google-beta - }`, - Check: resource.ComposeTestCheckFunc( - func(*terraform.State) error { return nil }, - ), - }, - { - Config: `resource "google_example_widget" "foo" { - name = "dummy" - provider = google-local - }`, - Check: resource.ComposeTestCheckFunc( - func(*terraform.State) error { return nil }, - ), - ExpectNonEmptyPlan: false, - PlanOnly: true, - }, - { - Config: `provider = "google-local" - // ... configuration that is expected to cause an error - `, // expect error means we don't do a second step - }, - } - - if len(dummyCase.Steps) != len(expectedSteps) { - t.Fatalf("Expected %d steps, but got %d", len(expectedSteps), len(dummyCase.Steps)) - } - - for i, step := range dummyCase.Steps { - if step.Config != expectedSteps[i].Config { - t.Fatalf("Expected step %d config to be:\n%q\nbut got:\n%q", i, expectedSteps[i].Config, step.Config) - } - if step.PlanOnly != expectedSteps[i].PlanOnly { - t.Fatalf("Expected step %d to have PlanOnly set to %v, but got %v", i, expectedSteps[i].PlanOnly, step.PlanOnly) - } - } - - defer os.Remove(temp_file.Name()) -} diff --git a/mmv1/third_party/terraform/acctest/provider_test_utils.go b/mmv1/third_party/terraform/acctest/provider_test_utils.go index 0a13f0206aa1..81bc0fd80a1d 100644 --- a/mmv1/third_party/terraform/acctest/provider_test_utils.go +++ b/mmv1/third_party/terraform/acctest/provider_test_utils.go @@ -40,14 +40,6 @@ func GoogleProviderConfig(t *testing.T) *transport_tpg.Config { sdkProvider := provider.Provider() rc := terraform.ResourceConfig{} - - // `universe_domain` must be specified through config (i.e. unlike most provider settings there's no environment variable), and we check the value matches the credentials during provider initilization - // In the test environment we seed the value through a test-only environment variable, and we need to pre-seed a value in ResourceConfig as if it was in config to pass the check - universeDomain := envvar.GetTestUniverseDomainFromEnv(t) - if universeDomain != "" && universeDomain != "googleapis.com" { - rc.Config = make(map[string]interface{}) - rc.Config["universe_domain"] = universeDomain - } sdkProvider.Configure(context.Background(), &rc) return sdkProvider.Meta().(*transport_tpg.Config) } diff --git a/mmv1/third_party/terraform/acctest/resource_inventory_reader.go b/mmv1/third_party/terraform/acctest/resource_inventory_reader.go deleted file mode 100644 index afec311b6ab7..000000000000 --- a/mmv1/third_party/terraform/acctest/resource_inventory_reader.go +++ /dev/null @@ -1,212 +0,0 @@ -package acctest - -import ( - "fmt" - "os" - "path/filepath" - "strings" - "sync" - - "gopkg.in/yaml.v2" -) - -// ResourceYAMLMetadata represents the structure of the metadata files -type ResourceYAMLMetadata struct { - Resource string `yaml:"resource"` - ApiServiceName string `yaml:"api_service_name"` - SourceFile string `yaml:"source_file"` -} - -// Cache structures to avoid repeated file system operations -var ( - // Cache for API service names (resourceName -> apiServiceName) - apiServiceNameCache = make(map[string]string) - // Cache for service packages (resourceType -> servicePackage) - servicePackageCache = make(map[string]string) - // Flag to track if cache has been populated - cachePopulated = false - // Mutex to protect cache access - cacheMutex sync.RWMutex -) - -// PopulateMetadataCache walks through all metadata files once and populates -// both the API service name and service package caches for improved performance -func PopulateMetadataCache() error { - cacheMutex.Lock() - defer cacheMutex.Unlock() - - // If cache is already populated, we can skip - if cachePopulated { - return nil - } - - baseDir, err := getServicesDir() - if err != nil { - return fmt.Errorf("failed to find services directory: %v", err) - } - - // Count for statistics - apiNameCount := 0 - servicePkgCount := 0 - - // Walk through all service directories once - err = filepath.Walk(baseDir, func(path string, info os.FileInfo, err error) error { - if err != nil { - return nil // Skip files with errors but continue walking - } - - // Look for metadata files - if !info.IsDir() && strings.HasPrefix(info.Name(), "resource_") && strings.HasSuffix(info.Name(), "_meta.yaml") { - // Read the file - content, err := os.ReadFile(path) - if err != nil { - return nil // Continue to next file - } - - // Parse YAML - var metadata ResourceYAMLMetadata - if err := yaml.Unmarshal(content, &metadata); err != nil { - return nil // Continue to next file - } - - // Skip if resource is empty - if metadata.Resource == "" { - return nil - } - - // Store API service name in cache - if metadata.ApiServiceName != "" { - apiServiceNameCache[metadata.Resource] = metadata.ApiServiceName - apiNameCount++ - } - - // Extract and store service package in cache - pathParts := strings.Split(path, string(os.PathSeparator)) - servicesIndex := -1 - for i, part := range pathParts { - if part == "services" { - servicesIndex = i - break - } - } - - if servicesIndex >= 0 && len(pathParts) > servicesIndex+1 { - servicePackage := pathParts[servicesIndex+1] // The part after "services" - servicePackageCache[metadata.Resource] = servicePackage - servicePkgCount++ - } - } - return nil - }) - - if err != nil { - return fmt.Errorf("error walking directory: %v", err) - } - - // Mark cache as populated - cachePopulated = true - - return nil -} - -// GetAPIServiceNameForResource finds the api_service_name for a given resource name -// If projectRoot is empty, it will attempt to find the project root automatically -func GetAPIServiceNameForResource(resourceName string) string { - // Make sure cache is populated - if !cachePopulated { - if err := PopulateMetadataCache(); err != nil { - return "failed_to_populate_metadata_cache" - } - } - - // Check cache - cacheMutex.RLock() - apiServiceName, found := apiServiceNameCache[resourceName] - cacheMutex.RUnlock() - - if !found { - return "unknown" - } - - return apiServiceName -} - -// GetServicePackageForResourceType finds the service package for a given resource type -// If projectRoot is empty, it will attempt to find the project root automatically -func GetServicePackageForResourceType(resourceType string) string { - // Make sure cache is populated - if !cachePopulated { - if err := PopulateMetadataCache(); err != nil { - return "failed_to_populate_metadata_cache" - } - } - - // Check cache - cacheMutex.RLock() - servicePackage, found := servicePackageCache[resourceType] - cacheMutex.RUnlock() - - if !found { - return "unknown" - } - - return servicePackage -} - -// getServicesDir returns the path to the services directory -// It will attempt to find the project root relative to cwd -func getServicesDir() (string, error) { - // Try to find project root - root, err := findProjectRoot() - if err == nil { - servicesDir := filepath.Join(root, "google-beta", "services") - if _, err := os.Stat(servicesDir); err == nil { - return servicesDir, nil - } - } - - // Last resort: try relative to current directory - currentDir, err := os.Getwd() - if err != nil { - return "", fmt.Errorf("failed to determine current directory: %v", err) - } - - // Try a few common relative paths - potentialPaths := []string{ - filepath.Join(currentDir, "google-beta", "services"), - filepath.Join(currentDir, "..", "google-beta", "services"), - filepath.Join(currentDir, "..", "..", "google-beta", "services"), - } - - for _, path := range potentialPaths { - if _, err := os.Stat(path); err == nil { - return path, nil - } - } - - return "", fmt.Errorf("unable to locate services directory, please provide explicit project root path") -} - -// findProjectRoot walks up from the current directory to find the project root -// by looking for the go.mod file -func findProjectRoot() (string, error) { - dir, err := os.Getwd() - if err != nil { - return "", err - } - - for { - // Check if go.mod exists in the current directory - if _, err := os.Stat(filepath.Join(dir, "go.mod")); err == nil { - return dir, nil - } - - // Move up to the parent directory - parentDir := filepath.Dir(dir) - if parentDir == dir { - // Reached the filesystem root without finding go.mod - return "", fmt.Errorf("could not find go.mod file in any parent directory") - } - dir = parentDir - } -} diff --git a/mmv1/third_party/terraform/acctest/resource_inventory_test.go b/mmv1/third_party/terraform/acctest/resource_inventory_test.go deleted file mode 100644 index d218b8de6808..000000000000 --- a/mmv1/third_party/terraform/acctest/resource_inventory_test.go +++ /dev/null @@ -1,113 +0,0 @@ -package acctest_test - -import ( - "strings" - "testing" - - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/provider" -) - -func TestResourceInventoryMetadataFound(t *testing.T) { - resources := provider.ResourceMap() - - // Track statistics - var ( - totalResources = 0 - missingServicePkg = 0 - missingServiceName = 0 - ) - - // Create a map to store missing resources for summary report - missingServicePkgResources := make(map[string]bool) - missingServiceNameResources := make(map[string]bool) - - for resourceType := range resources { - if strings.HasSuffix(resourceType, "_iam_member") || - strings.HasSuffix(resourceType, "_iam_policy") || - strings.HasSuffix(resourceType, "_iam_binding") { - continue - } - totalResources++ - - // Log each resource being checked - // t.Logf("Checking metadata for resource: %s", resourceType) - - // Check for service package - servicePackage := acctest.GetServicePackageForResourceType(resourceType) - if servicePackage == "unknown" { - // t.Logf("WARNING: Could not find service package for resource %s: %v", resourceType) - missingServicePkg++ - missingServicePkgResources[resourceType] = true - } - - apiServiceName := acctest.GetAPIServiceNameForResource(resourceType) - // Check for API service name - if apiServiceName == "unknown" { - // t.Logf("WARNING: Could not find API service name for resource %s: %v", resourceType) - missingServiceName++ - missingServiceNameResources[resourceType] = true - } - t.Logf(" %s servicePackage: %s apiServiceName: %s", resourceType, servicePackage, apiServiceName) - - } - - // Generate a summary report - t.Logf("\n--- RESOURCE METADATA TEST SUMMARY ---") - t.Logf("Total resources checked: %d", totalResources) - t.Logf("Resources missing service package: %d (%.1f%%)", - missingServicePkg, - float64(missingServicePkg)/float64(totalResources)*100) - t.Logf("Resources missing API service name: %d (%.1f%%)", - missingServiceName, - float64(missingServiceName)/float64(totalResources)*100) - - // List resources missing metadata (limited to first 10 for readability) - if len(missingServicePkgResources) > 0 { - t.Log("\nResources missing service package (first 10):") - count := 0 - for res := range missingServicePkgResources { - t.Logf(" - %s", res) - count++ - if count >= 10 { - remaining := len(missingServicePkgResources) - 10 - if remaining > 0 { - t.Logf(" ... and %d more", remaining) - } - break - } - } - } - - if len(missingServiceNameResources) > 0 { - t.Log("\nResources missing API service name (first 10):") - count := 0 - for res := range missingServiceNameResources { - t.Logf(" - %s", res) - count++ - if count >= 10 { - remaining := len(missingServiceNameResources) - 10 - if remaining > 0 { - t.Logf(" ... and %d more", remaining) - } - break - } - } - } - - // Decide whether to fail the test based on coverage percentage - const requiredCoveragePercent = 90.0 - - servicePkgCoverage := (float64(totalResources-missingServicePkg) / float64(totalResources)) * 100 - serviceNameCoverage := (float64(totalResources-missingServiceName) / float64(totalResources)) * 100 - - if servicePkgCoverage < requiredCoveragePercent { - t.Errorf("Service package metadata coverage (%.1f%%) is below required threshold (%.1f%%)", - servicePkgCoverage, requiredCoveragePercent) - } - - if serviceNameCoverage < requiredCoveragePercent { - t.Errorf("API service name metadata coverage (%.1f%%) is below required threshold (%.1f%%)", - serviceNameCoverage, requiredCoveragePercent) - } -} diff --git a/mmv1/third_party/terraform/acctest/tgc_utils.go b/mmv1/third_party/terraform/acctest/tgc_utils.go index 7d8a30b018f1..a3f0bcbda038 100644 --- a/mmv1/third_party/terraform/acctest/tgc_utils.go +++ b/mmv1/third_party/terraform/acctest/tgc_utils.go @@ -1,276 +1,74 @@ package acctest import ( - "encoding/base64" - "encoding/json" "fmt" "log" "regexp" "strings" - "testing" - "time" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/terraform" - "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" ) -type ResourceMetadata struct { - CaiAssetNames []string `json:"cai_asset_names"` - ResourceType string `json:"resource_type"` - ResourceAddress string `json:"resource_address"` - ImportMetadata ImportMetadata `json:"import_metadata,omitempty"` - Service string `json:"service"` +// Hardcode the Terraform resource name -> API service name mapping temporarily. +// TODO: [tgc] read the mapping from the resource metadata files. +var ApiServiceNames = map[string]string{ + "google_compute_instance": "compute.googleapis.com", + "google_project": "cloudresourcemanager.googleapis.com", } -type ImportMetadata struct { - Id string `json:"id,omitempty"` - IgnoredFields []string `json:"ignored_fields,omitempty"` -} - -// The metadata for each step in one test -type TgcMetadataPayload struct { - TestName string `json:"test_name"` - StepNumber int `json:"step_number"` - RawConfig string `json:"raw_config"` - ResourceMetadata map[string]ResourceMetadata `json:"resource_metadata"` - PrimaryResource string `json:"primary_resource"` - CaiReadTime time.Time `json:"cai_read_time"` -} - -// PROJECT_NUMBER instead of PROJECT_ID is in the CAI asset names for the resources in those services -// https://cloud.google.com/asset-inventory/docs/asset-names -var serviceWithProjectNumber = map[string]struct{}{ - "apikeys": {}, // DCL - "binaryauthorization": {}, - "cloudtasks": {}, - "cloudbuild": {}, - "colab": {}, - "containerattached": {}, - "containeraws": {}, - "containerazure": {}, - "dialogflowcx": {}, - "discoveryengine": {}, - "documentai": {}, - "healthcare": {}, - "iap": {}, - "identityplatform": {}, - "logging": {}, - "monitoring": {}, - "osconfig": {}, - "secretmanager": {}, - "secretmanagerregional": {}, - "vpcaccess": {}, -} - -// encodeToBase64JSON converts a struct to base64-encoded JSON -func encodeToBase64JSON(data interface{}) (string, error) { - jsonData, err := json.Marshal(data) - if err != nil { - return "", fmt.Errorf("error marshalling data to JSON: %v", err) - } - - return base64.StdEncoding.EncodeToString(jsonData), nil -} - -// CollectAllTgcMetadata collects metadata for all resources in a test step -func CollectAllTgcMetadata(tgcPayload TgcMetadataPayload) resource.TestCheckFunc { +// Gets the test metadata for tgc: +// - test config +// - cai asset name +// For example: //compute.googleapis.com/projects/ci-test-188019/zones/us-central1-a/instances/tf-test-mi3fqaucf8 +func GetTestMetadataForTgc(service, address, rawConfig string) resource.TestCheckFunc { return func(s *terraform.State) error { - tgcPayload.CaiReadTime = time.Now() - - projectId := envvar.GetTestProjectFromEnv() - projectNumber := envvar.GetTestProjectNumberFromEnv() - - // Process each resource to get CAI asset names and resolve auto IDs - for address, metadata := range tgcPayload.ResourceMetadata { - // If there is import metadata update our primary resource - if metadata.ImportMetadata.Id != "" { - tgcPayload.PrimaryResource = address - } - - rState := s.RootModule().Resources[address] - if rState == nil || rState.Primary == nil { - log.Printf("[DEBUG]TGC Terraform error: resource state unavailable for %s, skipping", address) - continue - } - - // Resolve the CAI asset name - apiServiceName := GetAPIServiceNameForResource(metadata.ResourceType) - if apiServiceName == "unknown" || apiServiceName == "failed_to_populate_metadata_cache" { - log.Printf("[DEBUG]TGC Terraform error: unknown resource type %s", metadata.ResourceType) - metadata.CaiAssetNames = []string{apiServiceName} - } else { - var rName string - switch metadata.ResourceType { - case "google_project": - rName = fmt.Sprintf("projects/%s", rState.Primary.Attributes["number"]) - default: - rName = rState.Primary.ID - } - - if _, ok := serviceWithProjectNumber[metadata.Service]; ok { - rName = strings.Replace(rName, projectId, projectNumber, 1) - } - - metadata.CaiAssetNames = []string{fmt.Sprintf("//%s/%s", apiServiceName, rName)} - } - - // Resolve auto IDs in import metadata - if metadata.ImportMetadata.Id != "" { - metadata.ImportMetadata.Id = strings.Replace(metadata.ImportMetadata.Id, "", rState.Primary.ID, 1) - } - - // Update the metadata in the map - tgcPayload.ResourceMetadata[address] = metadata - } - - log.Printf("[DEBUG] tgcPayload caireadtime %s", tgcPayload.CaiReadTime) - - // Encode the entire payload to base64 JSON - encodedData, err := encodeToBase64JSON(tgcPayload) - if err != nil { - log.Printf("[DEBUG]test_step_number=%d TGC Terraform error: %v", tgcPayload.StepNumber, err) - } else { - log.Printf("[DEBUG]test_step_number=%d TGC Terraform metadata: %s", tgcPayload.StepNumber, encodedData) + splits := strings.Split(address, ".") + if splits == nil || len(splits) < 2 { + return fmt.Errorf("The resource address %s is invalid.", address) } + resourceType := splits[0] + resourceName := splits[1] - return nil - } -} - -// parseResources extracts all resources from a Terraform configuration string -func parseResources(config string) []string { - // This regex matches resource blocks in Terraform configurations - resourceRegex := regexp.MustCompile(`resource\s+"([^"]+)"\s+"([^"]+)"`) - matches := resourceRegex.FindAllStringSubmatch(config, -1) - - var resources []string - for _, match := range matches { - if len(match) >= 3 { - // Combine resource type and name to form the address - resources = append(resources, fmt.Sprintf("%s.%s", match[1], match[2])) + rState := s.RootModule().Resources[address] + if rState == nil || rState.Primary == nil { + return fmt.Errorf("The resource state is unavailable. Please check if the address %s.%s is correct.", resourceType, resourceName) } - } - return resources -} - -// determineImportMetadata checks if the next step is an import step and extracts all import metadata -func determineImportMetadata(steps []resource.TestStep, currentStepIndex int, resourceName string) ImportMetadata { - var metadata ImportMetadata - - // Check if there's a next step and if it's an import step - if currentStepIndex+1 < len(steps) { - nextStep := steps[currentStepIndex+1] - - // Check if it's an import step for our resource - if nextStep.ImportState && nextStep.ResourceName == resourceName { - // Capture ignored fields if present - if nextStep.ImportStateVerify && len(nextStep.ImportStateVerifyIgnore) > 0 { - metadata.IgnoredFields = nextStep.ImportStateVerifyIgnore - } - - // If ImportStateId is explicitly set, use that - if nextStep.ImportStateId != "" { - metadata.Id = nextStep.ImportStateId - return metadata - } - - // If ImportStateIdPrefix is set, note it - if nextStep.ImportStateIdPrefix != "" { - metadata.Id = fmt.Sprintf("%s", nextStep.ImportStateIdPrefix) - return metadata - } - - // If ImportStateIdFunc is set, get function info - if nextStep.ImportStateIdFunc != nil { - metadata.Id = "" - return metadata + // Convert the resource ID into CAI asset name + // and then print out the CAI asset name in the logs + if apiServiceName, ok := ApiServiceNames[resourceType]; !ok { + return fmt.Errorf("The Cai product backend name for resource %s doesn't exist.", resourceType) + } else { + var rName string + switch resourceType { + case "google_project": + rName = fmt.Sprintf("projects/%s", rState.Primary.Attributes["number"]) + default: + rName = rState.Primary.ID } - - // Default case - the ID will be automatically determined - metadata.Id = "" - return metadata + caiAssetName := fmt.Sprintf("//%s/%s", apiServiceName, rName) + log.Printf("[DEBUG]TGC CAI asset names start\n%s\nEnd of TGC CAI asset names", caiAssetName) } - } - - return metadata -} -// extendWithTGCData adds TGC metadata check function to the last non-plan config entry -func extendWithTGCData(t *testing.T, c resource.TestCase) resource.TestCase { - var updatedSteps []resource.TestStep + // The acceptance tests names will be also used for the tgc tests. + // "service" is logged and will be used to put the tgc tests into specific service packages. + log.Printf("[DEBUG]TGC Terraform service: %s", service) + log.Printf("[DEBUG]TGC Terraform resource: %s", resourceType) - // Process all steps - for i, step := range c.Steps { - // If this is a non-plan config step, add our TGC check - if step.Config != "" && !step.PlanOnly { - // Parse resources from the config - resources := parseResources(step.Config) + re := regexp.MustCompile(`\"(tf[-_]?test[-_]?.*?)([a-z0-9]+)\"`) + rawConfig = re.ReplaceAllString(rawConfig, `"${1}tgc"`) - // Skip if no resources found - if len(resources) == 0 { - updatedSteps = append(updatedSteps, step) - continue - } + // Replace resource name with the resource's real name, + // which is used to get the main resource object by checking the address after parsing raw config. + // For example, replace `"google_compute_instance" "foobar"` with `"google_compute_instance" "tf-test-mi3fqaucf8"` + n := tpgresource.GetResourceNameFromSelfLink(rState.Primary.ID) + old := fmt.Sprintf(`"%s" "%s"`, resourceType, resourceName) + new := fmt.Sprintf(`"%s" "%s"`, resourceType, n) + rawConfig = strings.Replace(rawConfig, old, new, 1) - // Determine the service package from the first resource - firstResource := resources[0] - parts := strings.Split(firstResource, ".") - if len(parts) < 2 { - updatedSteps = append(updatedSteps, step) - continue - } - - // Collect metadata for all resources - resourceMetadata := make(map[string]ResourceMetadata) - - // Create the consolidated TGC payload - tgcPayload := TgcMetadataPayload{ - TestName: t.Name(), - StepNumber: i + 1, // Step number starts from 1 - RawConfig: step.Config, - ResourceMetadata: resourceMetadata, - } - - for _, res := range resources { - parts := strings.Split(res, ".") - if len(parts) >= 2 { - resourceType := parts[0] - - // Determine import metadata if the next step is an import step - importMeta := determineImportMetadata(c.Steps, i, res) - - // Create metadata for this resource - resourceMetadata[res] = ResourceMetadata{ - ResourceType: resourceType, - ResourceAddress: res, - ImportMetadata: importMeta, - Service: GetServicePackageForResourceType(resourceType), - // CaiAssetNames will be populated at runtime in the check function - } - } - } - - // Add a single consolidated TGC check for all resources - tgcCheck := CollectAllTgcMetadata(tgcPayload) - - // If there's an existing check function, wrap it with our consolidated check - if step.Check != nil { - existingCheck := step.Check - step.Check = resource.ComposeTestCheckFunc( - existingCheck, - tgcCheck, - ) - } else { - // Otherwise, just use our consolidated check - step.Check = tgcCheck - } - } - - updatedSteps = append(updatedSteps, step) + log.Printf("[DEBUG]TGC raw_config starts %sEnd of TGC raw_config", rawConfig) + return nil } - - c.Steps = updatedSteps - return c } diff --git a/mmv1/third_party/terraform/acctest/vcr_utils.go b/mmv1/third_party/terraform/acctest/vcr_utils.go index 117aca3b4169..5e55ca88cb2d 100644 --- a/mmv1/third_party/terraform/acctest/vcr_utils.go +++ b/mmv1/third_party/terraform/acctest/vcr_utils.go @@ -13,6 +13,7 @@ import ( "os" "path/filepath" "reflect" + "regexp" "slices" "strconv" "strings" @@ -22,9 +23,6 @@ import ( "github.com/hashicorp/terraform-provider-google/google/fwprovider" tpgprovider "github.com/hashicorp/terraform-provider-google/google/provider" - "github.com/hashicorp/terraform-provider-google/google/services/compute" - "github.com/hashicorp/terraform-provider-google/google/services/pubsublite" - "github.com/hashicorp/terraform-provider-google/google/services/sql" "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" @@ -32,10 +30,9 @@ import ( "github.com/dnaeon/go-vcr/recorder" "github.com/hashicorp/terraform-plugin-framework/datasource" - fwResource "github.com/hashicorp/terraform-plugin-framework/resource" - fwDiags "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/provider" + "github.com/hashicorp/terraform-plugin-go/tfprotov5" "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" @@ -148,19 +145,9 @@ func VcrTest(t *testing.T, c resource.TestCase) { if IsVcrEnabled() { defer closeRecorder(t) } else if isReleaseDiffEnabled() { - // creates temporary file for the individual test, will be a temporary to store the output - tempOutputFile, err := createTemporaryFile() - if err != nil { - t.Errorf("creating temporary file %v", err) - } - defer func() { - writeOutputFileDeferFunction(tempOutputFile, t.Failed()) - }() - c = initializeReleaseDiffTest(c, t.Name(), tempOutputFile) + c = initializeReleaseDiffTest(c, t.Name()) } - c = extendWithTGCData(t, c) - // terraform_labels is a computed field to which "goog-terraform-provisioned": "true" is always // added by the provider. ImportStateVerify "checks for strict equality and does not respect // DiffSuppressFunc or CustomizeDiff" so any test using ImportStateVerify must ignore @@ -177,16 +164,6 @@ func VcrTest(t *testing.T, c resource.TestCase) { resource.Test(t, c) } -func createTemporaryFile() (*os.File, error) { - // creates temporary file for the individual test, will be a temporary to store the output - tempOutputFile, err := os.CreateTemp("", "release_diff_test_output_*.log") - if err != nil { - return nil, err - } - - return tempOutputFile, nil -} - // We need to explicitly close the VCR recorder to save the cassette func closeRecorder(t *testing.T) { configsLock.RLock() @@ -223,6 +200,87 @@ func closeRecorder(t *testing.T) { } } +func isReleaseDiffEnabled() bool { + releaseDiff := os.Getenv("RELEASE_DIFF") + return releaseDiff != "" +} + +func initializeReleaseDiffTest(c resource.TestCase, testName string) resource.TestCase { + var releaseProvider string + packagePath := fmt.Sprint(reflect.TypeOf(transport_tpg.Config{}).PkgPath()) + if strings.Contains(packagePath, "google-beta") { + releaseProvider = "google-beta" + } else { + releaseProvider = "google" + } + + if c.ExternalProviders != nil { + c.ExternalProviders[releaseProvider] = resource.ExternalProvider{} + } else { + c.ExternalProviders = map[string]resource.ExternalProvider{ + releaseProvider: {}, + } + } + + localProviderName := "google-local" + if c.Providers != nil { + c.Providers = map[string]*schema.Provider{ + localProviderName: GetSDKProvider(testName), + } + c.ProtoV5ProviderFactories = map[string]func() (tfprotov5.ProviderServer, error){ + localProviderName: func() (tfprotov5.ProviderServer, error) { + return nil, nil + }, + } + } else { + c.ProtoV5ProviderFactories = map[string]func() (tfprotov5.ProviderServer, error){ + localProviderName: func() (tfprotov5.ProviderServer, error) { + provider, err := MuxedProviders(testName) + return provider(), err + }, + } + } + + var replacementSteps []resource.TestStep + for _, testStep := range c.Steps { + if testStep.Config != "" { + ogConfig := testStep.Config + testStep.Config = reformConfigWithProvider(ogConfig, localProviderName) + if testStep.ExpectError == nil && testStep.PlanOnly == false { + newStep := resource.TestStep{ + Config: reformConfigWithProvider(ogConfig, releaseProvider), + } + testStep.PlanOnly = true + testStep.ExpectNonEmptyPlan = false + replacementSteps = append(replacementSteps, newStep) + } + replacementSteps = append(replacementSteps, testStep) + } else { + replacementSteps = append(replacementSteps, testStep) + } + } + + c.Steps = replacementSteps + + return c +} + +func reformConfigWithProvider(config, provider string) string { + configBytes := []byte(config) + providerReplacement := fmt.Sprintf("provider = %s", provider) + providerReplacementBytes := []byte(providerReplacement) + providerBlock := regexp.MustCompile(`provider *=.*google-beta.*`) + + if providerBlock.Match(configBytes) { + return string(providerBlock.ReplaceAll(configBytes, providerReplacementBytes)) + } + + providerReplacement = fmt.Sprintf("${1}\n\t%s", providerReplacement) + providerReplacementBytes = []byte(providerReplacement) + resourceHeader := regexp.MustCompile(`(resource .*google_.* .*\w+.*\{.*)`) + return string(resourceHeader.ReplaceAll(configBytes, providerReplacementBytes)) +} + // HandleVCRConfiguration configures the recorder (github.com/dnaeon/go-vcr/recorder) used in the VCR test // This includes: // - Setting the recording/replaying mode @@ -344,16 +402,9 @@ func (p *frameworkTestProvider) Configure(ctx context.Context, req provider.Conf func (p *frameworkTestProvider) DataSources(ctx context.Context) []func() datasource.DataSource { ds := p.FrameworkProvider.DataSources(ctx) ds = append(ds, fwprovider.NewGoogleProviderConfigPluginFrameworkDataSource) // google_provider_config_plugin_framework - ds = append(ds, compute.NewComputeNetworkFWDataSource) // google_fw_compute_network return ds } -func (p *frameworkTestProvider) Resources(ctx context.Context) []func() fwResource.Resource { - r := p.FrameworkProvider.Resources(ctx) - r = append(r, pubsublite.NewGooglePubsubLiteReservationFWResource, sql.NewSQLUserFWResource) // google_fwprovider_pubsub_lite_reservation - return r -} - // GetSDKProvider gets the SDK provider for use in acceptance tests // If VCR is in use, the configure function is overwritten. // See usage in MuxedProviders diff --git a/mmv1/third_party/terraform/envvar/envvar_utils.go b/mmv1/third_party/terraform/envvar/envvar_utils.go index fb34c9ec481f..537242cef5e9 100644 --- a/mmv1/third_party/terraform/envvar/envvar_utils.go +++ b/mmv1/third_party/terraform/envvar/envvar_utils.go @@ -86,10 +86,6 @@ var UniverseDomainEnvVars = []string{ "GOOGLE_UNIVERSE_DOMAIN", } -var ProjectPrefixEnvVars = []string{ - "GOOGLE_UNIVERSE_PROJECT_PREFIX", -} - // This is the billing account that will be charged for the infrastructure used during testing. For // that reason, it is also the billing account used for creating new projects. var BillingAccountEnvVars = []string{ @@ -146,14 +142,10 @@ func GetTestCredsFromEnv() string { // Returns googleapis.com if there's no universe set. func GetTestUniverseDomainFromEnv(t *testing.T) string { + SkipIfEnvNotSet(t, IdentityUserEnvVars...) return transport_tpg.MultiEnvSearch(UniverseDomainEnvVars) } -// Project Prefix of different universes -func GetUniverseProjectPrefixFromEnv() string { - return transport_tpg.MultiEnvSearch(ProjectPrefixEnvVars) -} - // AccTestPreCheck ensures at least one of the region env variables is set. func GetTestRegionFromEnv() string { return transport_tpg.MultiEnvSearch(RegionEnvVars) diff --git a/mmv1/third_party/terraform/fwmodels/provider_model.go.tmpl b/mmv1/third_party/terraform/fwmodels/provider_model.go.tmpl index 306a95578244..85f96fe7481f 100644 --- a/mmv1/third_party/terraform/fwmodels/provider_model.go.tmpl +++ b/mmv1/third_party/terraform/fwmodels/provider_model.go.tmpl @@ -59,9 +59,12 @@ type ProviderModel struct { // dcl generated ApikeysCustomEndpoint types.String `tfsdk:"apikeys_custom_endpoint"` AssuredWorkloadsCustomEndpoint types.String `tfsdk:"assured_workloads_custom_endpoint"` + CloudBuildWorkerPoolCustomEndpoint types.String `tfsdk:"cloud_build_worker_pool_custom_endpoint"` CloudResourceManagerCustomEndpoint types.String `tfsdk:"cloud_resource_manager_custom_endpoint"` FirebaserulesCustomEndpoint types.String `tfsdk:"firebaserules_custom_endpoint"` RecaptchaEnterpriseCustomEndpoint types.String `tfsdk:"recaptcha_enterprise_custom_endpoint"` + + GkehubFeatureCustomEndpoint types.String `tfsdk:"gkehub_feature_custom_endpoint"` } type ProviderBatching struct { diff --git a/mmv1/third_party/terraform/fwprovider/framework_provider.go.tmpl b/mmv1/third_party/terraform/fwprovider/framework_provider.go.tmpl index 3ceb0f566e7d..80c944261338 100644 --- a/mmv1/third_party/terraform/fwprovider/framework_provider.go.tmpl +++ b/mmv1/third_party/terraform/fwprovider/framework_provider.go.tmpl @@ -21,13 +21,10 @@ import ( "github.com/hashicorp/terraform-provider-google/google/functions" "github.com/hashicorp/terraform-provider-google/google/fwmodels" "github.com/hashicorp/terraform-provider-google/google/services/resourcemanager" - "github.com/hashicorp/terraform-provider-google/google/services/apigee" - "github.com/hashicorp/terraform-provider-google/version" {{- if ne $.TargetVersionName "ga" }} "github.com/hashicorp/terraform-provider-google/google/services/firebase" {{- end }} - "github.com/hashicorp/terraform-provider-google/google/services/storage" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) @@ -225,7 +222,7 @@ func (p *FrameworkProvider) Schema(_ context.Context, _ provider.SchemaRequest, }, }, - // DCL + // dcl "container_aws_custom_endpoint": &schema.StringAttribute{ Optional: true, Validators: []validator.String{ @@ -238,36 +235,6 @@ func (p *FrameworkProvider) Schema(_ context.Context, _ provider.SchemaRequest, transport_tpg.CustomEndpointValidator(), }, }, - "apikeys_custom_endpoint": &schema.StringAttribute{ - Optional: true, - Validators: []validator.String{ - transport_tpg.CustomEndpointValidator(), - }, - }, - "assured_workloads_custom_endpoint": &schema.StringAttribute{ - Optional: true, - Validators: []validator.String{ - transport_tpg.CustomEndpointValidator(), - }, - }, - "cloud_resource_manager_custom_endpoint": &schema.StringAttribute{ - Optional: true, - Validators: []validator.String{ - transport_tpg.CustomEndpointValidator(), - }, - }, - "firebaserules_custom_endpoint": &schema.StringAttribute{ - Optional: true, - Validators: []validator.String{ - transport_tpg.CustomEndpointValidator(), - }, - }, - "recaptcha_enterprise_custom_endpoint": &schema.StringAttribute{ - Optional: true, - Validators: []validator.String{ - transport_tpg.CustomEndpointValidator(), - }, - }, }, Blocks: map[string]schema.Block{ "batching": schema.ListNestedBlock{ @@ -311,6 +278,8 @@ func (p *FrameworkProvider) Schema(_ context.Context, _ provider.SchemaRequest, }, }, } + + transport_tpg.ConfigureDCLCustomEndpointAttributesFramework(&resp.Schema) } // Configure prepares the metadata/'meta' required for data sources and resources to function. @@ -359,10 +328,7 @@ func (p *FrameworkProvider) DataSources(_ context.Context) []func() datasource.D // Resources defines the resources implemented in the provider. func (p *FrameworkProvider) Resources(_ context.Context) []func() resource.Resource { - return []func() resource.Resource{ - apigee.NewApigeeKeystoresAliasesKeyCertFileResource, - storage.NewStorageNotificationResource, - } + return nil } // Functions defines the provider functions implemented in the provider. diff --git a/mmv1/third_party/terraform/fwresource/field_helpers.go b/mmv1/third_party/terraform/fwresource/field_helpers.go index 40c170a85935..54788d8346e7 100644 --- a/mmv1/third_party/terraform/fwresource/field_helpers.go +++ b/mmv1/third_party/terraform/fwresource/field_helpers.go @@ -17,18 +17,10 @@ import ( // back to the provider's value if not given. If the provider's value is not // given, an error is returned. func GetProjectFramework(rVal, pVal types.String, diags *diag.Diagnostics) types.String { - return getProviderDefaultFromFrameworkSchema("project", rVal, pVal, diags) + return getProjectFromFrameworkSchema("project", rVal, pVal, diags) } -func GetRegionFramework(rVal, pVal types.String, diags *diag.Diagnostics) types.String { - return getProviderDefaultFromFrameworkSchema("region", rVal, pVal, diags) -} - -func GetZoneFramework(rVal, pVal types.String, diags *diag.Diagnostics) types.String { - return getProviderDefaultFromFrameworkSchema("zone", rVal, pVal, diags) -} - -func getProviderDefaultFromFrameworkSchema(schemaField string, rVal, pVal types.String, diags *diag.Diagnostics) types.String { +func getProjectFromFrameworkSchema(projectSchemaField string, rVal, pVal types.String, diags *diag.Diagnostics) types.String { if !rVal.IsNull() && rVal.ValueString() != "" { return rVal } @@ -37,7 +29,7 @@ func getProviderDefaultFromFrameworkSchema(schemaField string, rVal, pVal types. return pVal } - diags.AddError("required field is not set", fmt.Sprintf("%s is not set", schemaField)) + diags.AddError("required field is not set", fmt.Sprintf("%s is not set", projectSchemaField)) return types.String{} } @@ -62,7 +54,7 @@ func ParseProjectFieldValueFramework(resourceType, fieldValue, projectSchemaFiel } } - project := getProviderDefaultFromFrameworkSchema(projectSchemaField, rVal, pVal, diags) + project := getProjectFromFrameworkSchema(projectSchemaField, rVal, pVal, diags) if diags.HasError() { return nil } @@ -119,10 +111,3 @@ func ReplaceVarsForFrameworkTest(prov *transport_tpg.Config, rs *terraform.Resou return re.ReplaceAllStringFunc(linkTmpl, replaceFunc), nil } - -func FlattenStringEmptyToNull(configuredValue types.String, apiValue string) types.String { - if configuredValue.IsNull() && apiValue == "" { - return types.StringNull() - } - return types.StringValue(apiValue) -} diff --git a/mmv1/third_party/terraform/fwresource/framework_import.go b/mmv1/third_party/terraform/fwresource/framework_import.go deleted file mode 100644 index 3e344332c0ff..000000000000 --- a/mmv1/third_party/terraform/fwresource/framework_import.go +++ /dev/null @@ -1,192 +0,0 @@ -package fwresource - -import ( - "context" - "fmt" - "regexp" - "strconv" - "strings" - - "github.com/hashicorp/terraform-plugin-framework/attr" - "github.com/hashicorp/terraform-plugin-framework/diag" - "github.com/hashicorp/terraform-plugin-framework/resource" - "github.com/hashicorp/terraform-plugin-framework/resource/schema" - "github.com/hashicorp/terraform-plugin-framework/types" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -// ParseImportId uses a list of regular expressions to parse a resource's import ID. -// It extracts named capture groups from the regex and converts them to their -// corresponding type-safe attribute values based on the provided resource schema. -// It also handles setting default values (project, region, etc) if they are not -// present in the import ID. -func ParseImportId( - ctx context.Context, - req resource.ImportStateRequest, - resourceSchema schema.Schema, - providerConfig *transport_tpg.Config, - idRegexes []string, -) (map[string]attr.Value, diag.Diagnostics) { - var diags diag.Diagnostics - parsedAttributes := make(map[string]attr.Value) - - var matchFound bool - for _, idFormat := range idRegexes { - re, err := regexp.Compile(idFormat) - if err != nil { - diags.AddError( - "Invalid Import Regex", - fmt.Sprintf("Provider developer error: could not compile regex %q. Please report this issue. Error: %s", idFormat, err), - ) - // This is a developer error, so we stop immediately. - return nil, diags - } - - if match := re.FindStringSubmatch(req.ID); match != nil { - matchFound = true - subexpNames := re.SubexpNames() - for i, valueStr := range match { - // Index 0 is the full match, so we skip it. - if i == 0 { - continue - } - - fieldName := subexpNames[i] - if fieldName == "" { - continue - } - - // Look up the attribute in the resource's schema. - attribute, ok := resourceSchema.Attributes[fieldName] - if !ok { - diags.AddWarning( - "Unknown Import Field", - fmt.Sprintf("Parsed field %q from import ID but it is not defined in the resource schema.", fieldName), - ) - continue - } - - // Convert the parsed string value to the correct attr.Value type. - attrVal, conversionDiags := convertToAttrValue(valueStr, attribute) - diags.Append(conversionDiags...) - if conversionDiags.HasError() { - continue - } - parsedAttributes[fieldName] = attrVal - } - // Once a match is found, we stop. The most specific regex should be first. - break - } - } - - if !matchFound { - diags.AddError( - "Invalid Import ID", - fmt.Sprintf("Import ID %q doesn't match any of the accepted formats: %v", req.ID, idRegexes), - ) - return nil, diags - } - - // Handle default values like project, region, and zone. - defaultDiags := addDefaultValues(ctx, parsedAttributes, providerConfig, resourceSchema, idRegexes[0]) - diags.Append(defaultDiags...) - - return parsedAttributes, diags -} - -// convertToAttrValue converts a string to the appropriate attr.Value based on the schema attribute type. -func convertToAttrValue(valueStr string, attr schema.Attribute) (attr.Value, diag.Diagnostics) { - var diags diag.Diagnostics - - switch attr.(type) { - case schema.StringAttribute: - return types.StringValue(valueStr), nil - case schema.Int64Attribute: - intVal, err := strconv.ParseInt(valueStr, 10, 64) - if err != nil { - diags.AddError( - "Import Value Conversion Error", - fmt.Sprintf("Failed to parse %q as an integer: %s", valueStr, err), - ) - return nil, diags - } - return types.Int64Value(intVal), nil - case schema.BoolAttribute: - boolVal, err := strconv.ParseBool(valueStr) - if err != nil { - diags.AddError( - "Import Value Conversion Error", - fmt.Sprintf("Failed to parse %q as a boolean: %s", valueStr, err), - ) - return nil, diags - } - return types.BoolValue(boolVal), nil - case schema.Float64Attribute: - floatVal, err := strconv.ParseFloat(valueStr, 64) - if err != nil { - diags.AddError( - "Import Value Conversion Error", - fmt.Sprintf("Failed to parse %q as a float: %s", valueStr, err), - ) - return nil, diags - } - return types.Float64Value(floatVal), nil - default: - // For complex types like List, Object, etc., a simple string conversion is not feasible. - // The assumption is that import IDs will only contain primitive types. - diags.AddError( - "Unsupported Import Attribute Type", - fmt.Sprintf("Importing attributes of type %T is not supported. This is a provider developer issue.", attr), - ) - return nil, diags - } -} - -// addDefaultValues checks for common provider-level defaults (project, region, zone) -// and adds them to the parsed attributes map if they were not already set from the import ID. -func addDefaultValues( - ctx context.Context, - parsedAttributes map[string]attr.Value, - config *transport_tpg.Config, - resourceSchema schema.Schema, - primaryRegex string, -) diag.Diagnostics { - var diags diag.Diagnostics - - defaults := map[string]func(*transport_tpg.Config) (string, error){ - "project": func(c *transport_tpg.Config) (string, error) { return c.Project, nil }, - "region": func(c *transport_tpg.Config) (string, error) { return c.Region, nil }, - "zone": func(c *transport_tpg.Config) (string, error) { return c.Zone, nil }, - } - - for field, getDefault := range defaults { - // Check if the primary regex expects this field. - if !strings.Contains(primaryRegex, fmt.Sprintf("(?P<%s>", field)) { - continue - } - // Check if the resource schema actually has this attribute. - if _, ok := resourceSchema.Attributes[field]; !ok { - continue - } - // Check if the value was already parsed from the import ID. - if _, ok := parsedAttributes[field]; ok { - continue - } - - // Get the default value from the provider configuration. - value, err := getDefault(config) - if err != nil { - diags.AddError( - fmt.Sprintf("Failed to get default value for %s", field), - err.Error(), - ) - continue - } - - if value != "" { - parsedAttributes[field] = types.StringValue(value) - } - } - - return diags -} diff --git a/mmv1/third_party/terraform/fwresource/framework_import_test.go b/mmv1/third_party/terraform/fwresource/framework_import_test.go deleted file mode 100644 index 278b55dd9a19..000000000000 --- a/mmv1/third_party/terraform/fwresource/framework_import_test.go +++ /dev/null @@ -1,183 +0,0 @@ -package fwresource - -import ( - "context" - "reflect" - "strings" - "testing" - - "github.com/hashicorp/terraform-plugin-framework/attr" - "github.com/hashicorp/terraform-plugin-framework/resource" - "github.com/hashicorp/terraform-plugin-framework/resource/schema" - "github.com/hashicorp/terraform-plugin-framework/resource/schema/int64planmodifier" - "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" - "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" - "github.com/hashicorp/terraform-plugin-framework/types" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -func TestParseImportId(t *testing.T) { - testSchema := schema.Schema{ - Attributes: map[string]schema.Attribute{ - "project": schema.StringAttribute{ - Optional: true, - Computed: true, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.UseStateForUnknown(), - }, - }, - "name": schema.StringAttribute{ - Required: true, - }, - "zone": schema.StringAttribute{ - Required: true, - }, - "instance_id": schema.Int64Attribute{ - Required: true, - PlanModifiers: []planmodifier.Int64{ - int64planmodifier.RequiresReplace(), - }, - }, - }, - } - - cases := map[string]struct { - importId string - idRegexes []string - resourceSchema schema.Schema - providerConfig *transport_tpg.Config - expectedAttributes map[string]attr.Value - expectError bool - errorContains string - }{ - "successfully parses full resource ID format": { - importId: "projects/my-project/zones/us-central1-a/instances/12345", - idRegexes: []string{ - "projects/(?P[^/]+)/zones/(?P[^/]+)/instances/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - }, - resourceSchema: testSchema, - providerConfig: &transport_tpg.Config{}, - expectedAttributes: map[string]attr.Value{ - "project": types.StringValue("my-project"), - "zone": types.StringValue("us-central1-a"), - "instance_id": types.Int64Value(12345), - }, - }, - "successfully parses shorter ID format": { - importId: "my-project/us-central1-a/12345", - idRegexes: []string{ - "projects/(?P[^/]+)/zones/(?P[^/]+)/instances/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - }, - resourceSchema: testSchema, - providerConfig: &transport_tpg.Config{}, - expectedAttributes: map[string]attr.Value{ - "project": types.StringValue("my-project"), - "zone": types.StringValue("us-central1-a"), - "instance_id": types.Int64Value(12345), - }, - }, - "successfully uses provider default for project": { - importId: "us-central1-a/my-instance/12345", - idRegexes: []string{ - "projects/(?P[^/]+)/zones/(?P[^/]+)/instances/(?P[^/]+)/(?P[^/]+)", // Most specific - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - }, - resourceSchema: testSchema, - providerConfig: &transport_tpg.Config{ - Project: "default-provider-project", - }, - expectedAttributes: map[string]attr.Value{ - "project": types.StringValue("default-provider-project"), - "zone": types.StringValue("us-central1-a"), - "name": types.StringValue("my-instance"), - "instance_id": types.Int64Value(12345), - }, - }, - "returns error for non-matching ID": { - importId: "invalid-id-format", - idRegexes: []string{ - "projects/(?P[^/]+)/zones/(?P[^/]+)/instances/(?P[^/]+)", - }, - resourceSchema: testSchema, - providerConfig: &transport_tpg.Config{}, - expectError: true, - errorContains: "doesn't match any of the accepted formats", - }, - "returns error for value that cannot be converted to type": { - importId: "projects/my-project/zones/us-central1-a/instances/not-an-integer", - idRegexes: []string{ - "projects/(?P[^/]+)/zones/(?P[^/]+)/instances/(?P[^/]+)", - }, - resourceSchema: testSchema, - providerConfig: &transport_tpg.Config{}, - expectError: true, - errorContains: "Failed to parse \"not-an-integer\" as an integer", - }, - "returns error for invalid regex pattern": { - importId: "any/id", - idRegexes: []string{ - "projects/(?P[^/]+)/zones/(?P[^/+", // Invalid regex with unclosed bracket - }, - resourceSchema: testSchema, - providerConfig: &transport_tpg.Config{}, - expectError: true, - errorContains: "could not compile regex", - }, - "warns about field in regex not present in schema": { - importId: "projects/my-project/zones/us-central1-a/instances/12345/extra/field", - idRegexes: []string{ - "projects/(?P[^/]+)/zones/(?P[^/]+)/instances/(?P[^/]+)/extra/(?P[^/]+)", - }, - resourceSchema: testSchema, - providerConfig: &transport_tpg.Config{}, - // We expect success, but with a warning diagnostic. The valid fields should still be parsed. - expectedAttributes: map[string]attr.Value{ - "project": types.StringValue("my-project"), - "zone": types.StringValue("us-central1-a"), - "instance_id": types.Int64Value(12345), - }, - }, - } - - for name, tc := range cases { - t.Run(name, func(t *testing.T) { - ctx := context.Background() - req := resource.ImportStateRequest{ - ID: tc.importId, - } - - parsedAttributes, diags := ParseImportId(ctx, req, tc.resourceSchema, tc.providerConfig, tc.idRegexes) - - if diags.HasError() { - if tc.expectError { - // Check if the error message contains the expected substring. - if tc.errorContains != "" { - found := false - for _, d := range diags.Errors() { - if strings.Contains(d.Detail(), tc.errorContains) { - found = true - break - } - } - if !found { - t.Fatalf("expected error to contain %q, but it did not. Got: %v", tc.errorContains, diags.Errors()) - } - } - // Correctly handled an expected error. - return - } - t.Fatalf("unexpected error: %v", diags) - } - - if tc.expectError { - t.Fatal("expected an error, but got none") - } - - if !reflect.DeepEqual(tc.expectedAttributes, parsedAttributes) { - t.Fatalf("incorrect attributes parsed.\n- got: %v\n- want: %v", parsedAttributes, tc.expectedAttributes) - } - }) - } -} diff --git a/mmv1/third_party/terraform/fwresource/framework_location.go b/mmv1/third_party/terraform/fwresource/framework_location.go index 8e227a83524c..565aa9089420 100644 --- a/mmv1/third_party/terraform/fwresource/framework_location.go +++ b/mmv1/third_party/terraform/fwresource/framework_location.go @@ -70,7 +70,7 @@ func (ld *LocationDescription) GetLocation() (types.String, error) { } func (ld *LocationDescription) GetRegion() (types.String, error) { - // TODO: Make empty strings not ignored, see https://github.com/hashicorp/terraform-provider-google/issues/14447 + // TODO(SarahFrench): Make empty strings not ignored, see https://github.com/hashicorp/terraform-provider-google/issues/14447 // For all checks in this function body // Region from resource config @@ -104,7 +104,7 @@ func (ld *LocationDescription) GetRegion() (types.String, error) { } func (ld *LocationDescription) GetZone() (types.String, error) { - // TODO: Make empty strings not ignored, see https://github.com/hashicorp/terraform-provider-google/issues/14447 + // TODO(SarahFrench): Make empty strings not ignored, see https://github.com/hashicorp/terraform-provider-google/issues/14447 // For all checks in this function body if !ld.ResourceZone.IsNull() && !ld.ResourceZone.IsUnknown() && !ld.ResourceZone.Equal(types.StringValue("")) { diff --git a/mmv1/third_party/terraform/fwtransport/framework_utils.go b/mmv1/third_party/terraform/fwtransport/framework_utils.go index 238670b1b581..b297b475cb25 100644 --- a/mmv1/third_party/terraform/fwtransport/framework_utils.go +++ b/mmv1/third_party/terraform/fwtransport/framework_utils.go @@ -1,29 +1,17 @@ package fwtransport import ( - "bytes" "context" - "encoding/json" "fmt" - "net/http" "os" - "reflect" - "regexp" "strings" - "time" "github.com/hashicorp/terraform-plugin-framework/diag" - "github.com/hashicorp/terraform-plugin-framework/path" - "github.com/hashicorp/terraform-plugin-framework/resource" "github.com/hashicorp/terraform-plugin-framework/tfsdk" - "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/hashicorp/terraform-provider-google/google/fwmodels" - "github.com/hashicorp/terraform-provider-google/google/fwresource" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" - "google.golang.org/api/googleapi" ) const uaEnvVar = "TF_APPEND_USER_AGENT" @@ -50,7 +38,7 @@ func GenerateFrameworkUserAgentString(metaData *fwmodels.ProviderMetaModel, curr return currUserAgent } -func HandleNotFoundError(ctx context.Context, err error, state *tfsdk.State, resource string, diags *diag.Diagnostics) { +func HandleDatasourceNotFoundError(ctx context.Context, err error, state *tfsdk.State, resource string, diags *diag.Diagnostics) { if transport_tpg.IsGoogleApiErrorWithCode(err, 404) { tflog.Warn(ctx, fmt.Sprintf("Removing %s because it's gone", resource)) // The resource doesn't exist anymore @@ -59,310 +47,3 @@ func HandleNotFoundError(ctx context.Context, err error, state *tfsdk.State, res diags.AddError(fmt.Sprintf("Error when reading or editing %s", resource), err.Error()) } - -var DefaultRequestTimeout = 5 * time.Minute - -type SendRequestOptions struct { - Config *transport_tpg.Config - Method string - Project string - RawURL string - UserAgent string - Body map[string]any - Timeout time.Duration - Headers http.Header - ErrorRetryPredicates []transport_tpg.RetryErrorPredicateFunc - ErrorAbortPredicates []transport_tpg.RetryErrorPredicateFunc -} - -func SendRequest(opt SendRequestOptions, diags *diag.Diagnostics) map[string]interface{} { - reqHeaders := opt.Headers - if reqHeaders == nil { - reqHeaders = make(http.Header) - } - reqHeaders.Set("User-Agent", opt.UserAgent) - reqHeaders.Set("Content-Type", "application/json") - - if opt.Config.UserProjectOverride && opt.Project != "" { - // When opt.Project is "NO_BILLING_PROJECT_OVERRIDE" in the function GetCurrentUserEmail, - // set the header X-Goog-User-Project to be empty string. - if opt.Project == "NO_BILLING_PROJECT_OVERRIDE" { - reqHeaders.Set("X-Goog-User-Project", "") - } else { - // Pass the project into this fn instead of parsing it from the URL because - // both project names and URLs can have colons in them. - reqHeaders.Set("X-Goog-User-Project", opt.Project) - } - } - - if opt.Timeout == 0 { - opt.Timeout = DefaultRequestTimeout - } - - var res *http.Response - err := transport_tpg.Retry(transport_tpg.RetryOptions{ - RetryFunc: func() error { - var buf bytes.Buffer - if opt.Body != nil { - err := json.NewEncoder(&buf).Encode(opt.Body) - if err != nil { - return err - } - } - - u, err := transport_tpg.AddQueryParams(opt.RawURL, map[string]string{"alt": "json"}) - if err != nil { - return err - } - req, err := http.NewRequest(opt.Method, u, &buf) - if err != nil { - return err - } - - req.Header = reqHeaders - res, err = opt.Config.Client.Do(req) - if err != nil { - return err - } - - if err := googleapi.CheckResponse(res); err != nil { - googleapi.CloseBody(res) - return err - } - - return nil - }, - Timeout: opt.Timeout, - ErrorRetryPredicates: opt.ErrorRetryPredicates, - ErrorAbortPredicates: opt.ErrorAbortPredicates, - }) - if err != nil { - diags.AddError("Error when sending HTTP request: ", err.Error()) - return nil - } - - if res == nil { - diags.AddError("Unable to parse server response. This is most likely a terraform problem, please file a bug at https://github.com/hashicorp/terraform-provider-google/issues.", "") - return nil - } - - // The defer call must be made outside of the retryFunc otherwise it's closed too soon. - defer googleapi.CloseBody(res) - - // 204 responses will have no body, so we're going to error with "EOF" if we - // try to parse it. Instead, we can just return nil. - if res.StatusCode == 204 { - return nil - } - result := make(map[string]interface{}) - if err := json.NewDecoder(res.Body).Decode(&result); err != nil { - diags.AddError("Error when sending HTTP request: ", err.Error()) - return nil - } - - return result -} - -type DefaultVars struct { - BillingProject types.String - Project types.String - Region types.String - Zone types.String -} - -func ReplaceVars(ctx context.Context, req interface{}, diags *diag.Diagnostics, data DefaultVars, config *transport_tpg.Config, linkTmpl string) string { - return ReplaceVarsRecursive(ctx, req, diags, data, config, linkTmpl, false, 0) -} - -// relaceVarsForId shortens variables by running them through GetResourceNameFromSelfLink -// this allows us to use long forms of variables from configs without needing -// custom id formats. For instance: -// accessPolicies/{{access_policy}}/accessLevels/{{access_level}} -// with values: -// access_policy: accessPolicies/foo -// access_level: accessPolicies/foo/accessLevels/bar -// becomes accessPolicies/foo/accessLevels/bar -func ReplaceVarsForId(ctx context.Context, req interface{}, diags *diag.Diagnostics, data DefaultVars, config *transport_tpg.Config, linkTmpl string) string { - return ReplaceVarsRecursive(ctx, req, diags, data, config, linkTmpl, true, 0) -} - -// ReplaceVars must be done recursively because there are baseUrls that can contain references to regions -// (eg cloudrun service) there aren't any cases known for 2+ recursion but we will track a run away -// substitution as 10+ calls to allow for future use cases. -func ReplaceVarsRecursive(ctx context.Context, req interface{}, diags *diag.Diagnostics, data DefaultVars, config *transport_tpg.Config, linkTmpl string, shorten bool, depth int) string { - if depth > 10 { - diags.AddError("url building error", "Recursive substitution detected.") - } - - // https://github.com/google/re2/wiki/Syntax - re := regexp.MustCompile("{{([%[:word:]]+)}}") - f := BuildReplacementFunc(ctx, re, req, diags, data, config, linkTmpl, shorten) - if diags.HasError() { - return "" - } - final := re.ReplaceAllStringFunc(linkTmpl, f) - - if re.Match([]byte(final)) { - return ReplaceVarsRecursive(ctx, req, diags, data, config, final, shorten, depth+1) - } - - return final -} - -// This function replaces references to Terraform properties (in the form of {{var}}) with their value in Terraform -// It also replaces {{project}}, {{project_id_or_project}}, {{region}}, and {{zone}} with their appropriate values -// This function supports URL-encoding the result by prepending '%' to the field name e.g. {{%var}} -func BuildReplacementFunc(ctx context.Context, re *regexp.Regexp, req interface{}, diags *diag.Diagnostics, data DefaultVars, config *transport_tpg.Config, linkTmpl string, shorten bool) func(string) string { - var project, region, zone string - var projectID types.String - - if strings.Contains(linkTmpl, "{{project}}") { - project = fwresource.GetProjectFramework(data.Project, types.StringValue(config.Project), diags).ValueString() - if diags.HasError() { - return nil - } - if shorten { - project = strings.TrimPrefix(project, "projects/") - } - } - - if strings.Contains(linkTmpl, "{{project_id_or_project}}") { - var diagInfo diag.Diagnostics - switch req.(type) { - case resource.CreateRequest: - pReq := req.(resource.CreateRequest) - diagInfo = pReq.Plan.GetAttribute(ctx, path.Root("project_id"), &projectID) - case resource.UpdateRequest: - pReq := req.(resource.UpdateRequest) - diagInfo = pReq.Plan.GetAttribute(ctx, path.Root("project_id"), &projectID) - case resource.ReadRequest: - sReq := req.(resource.ReadRequest) - diagInfo = sReq.State.GetAttribute(ctx, path.Root("project_id"), &projectID) - case resource.DeleteRequest: - sReq := req.(resource.DeleteRequest) - diagInfo = sReq.State.GetAttribute(ctx, path.Root("project_id"), &projectID) - } - diags.Append(diagInfo...) - if diags.HasError() { - return nil - } - if projectID.ValueString() != "" { - project = fwresource.GetProjectFramework(data.Project, types.StringValue(config.Project), diags).ValueString() - if diags.HasError() { - return nil - } - } - if shorten { - project = strings.TrimPrefix(project, "projects/") - projectID = types.StringValue(strings.TrimPrefix(projectID.ValueString(), "projects/")) - } - } - - if strings.Contains(linkTmpl, "{{region}}") { - region = fwresource.GetRegionFramework(data.Region, types.StringValue(config.Region), diags).ValueString() - if diags.HasError() { - return nil - } - if shorten { - region = strings.TrimPrefix(region, "regions/") - } - } - - if strings.Contains(linkTmpl, "{{zone}}") { - zone = fwresource.GetRegionFramework(data.Zone, types.StringValue(config.Zone), diags).ValueString() - if diags.HasError() { - return nil - } - if shorten { - zone = strings.TrimPrefix(region, "zones/") - } - } - - f := func(s string) string { - - m := re.FindStringSubmatch(s)[1] - if m == "project" { - return project - } - if m == "project_id_or_project" { - if projectID.ValueString() != "" { - return projectID.ValueString() - } - return project - } - if m == "region" { - return region - } - if m == "zone" { - return zone - } - if string(m[0]) == "%" { - var v types.String - var diagInfo diag.Diagnostics - switch req.(type) { - case resource.CreateRequest: - pReq := req.(resource.CreateRequest) - diagInfo = pReq.Plan.GetAttribute(ctx, path.Root(m[1:]), &v) - case resource.UpdateRequest: - pReq := req.(resource.UpdateRequest) - diagInfo = pReq.Plan.GetAttribute(ctx, path.Root(m[1:]), &v) - case resource.ReadRequest: - sReq := req.(resource.ReadRequest) - diagInfo = sReq.State.GetAttribute(ctx, path.Root(m[1:]), &v) - case resource.DeleteRequest: - sReq := req.(resource.DeleteRequest) - diagInfo = sReq.State.GetAttribute(ctx, path.Root(m[1:]), &v) - } - //an error here means the attribute was not found, we want to do nothing in that case - if !diagInfo.HasError() { - diags.Append(diagInfo...) - if v.ValueString() != "" { - if shorten { - return tpgresource.GetResourceNameFromSelfLink(fmt.Sprintf("%v", v.ValueString())) - } else { - return fmt.Sprintf("%v", v.ValueString()) - } - } - } - } else { - var v types.String - var diagInfo diag.Diagnostics - switch req.(type) { - case resource.CreateRequest: - pReq := req.(resource.CreateRequest) - diagInfo = pReq.Plan.GetAttribute(ctx, path.Root(m), &v) - case resource.UpdateRequest: - pReq := req.(resource.UpdateRequest) - diagInfo = pReq.Plan.GetAttribute(ctx, path.Root(m), &v) - case resource.ReadRequest: - sReq := req.(resource.ReadRequest) - diagInfo = sReq.State.GetAttribute(ctx, path.Root(m), &v) - case resource.DeleteRequest: - sReq := req.(resource.DeleteRequest) - diagInfo = sReq.State.GetAttribute(ctx, path.Root(m), &v) - } - //an error here means the attribute was not found, we want to do nothing in that case - if !diagInfo.HasError() { - diags.Append(diagInfo...) - if v.ValueString() != "" { - if shorten { - return tpgresource.GetResourceNameFromSelfLink(fmt.Sprintf("%v", v.ValueString())) - } else { - return fmt.Sprintf("%v", v.ValueString()) - } - } - } - } - - // terraform-google-conversion doesn't provide a provider config in tests. - if config != nil { - // Attempt to draw values from the provider config if it's present. - if f := reflect.Indirect(reflect.ValueOf(config)).FieldByName(m); f.IsValid() { - return f.String() - } - } - return "" - } - - return f -} diff --git a/mmv1/third_party/terraform/fwvalidators/framework_validators.go b/mmv1/third_party/terraform/fwvalidators/framework_validators.go index eed3a32e98f9..b0da8417591e 100644 --- a/mmv1/third_party/terraform/fwvalidators/framework_validators.go +++ b/mmv1/third_party/terraform/fwvalidators/framework_validators.go @@ -9,8 +9,6 @@ import ( "strings" "time" - "github.com/hashicorp/terraform-plugin-framework/types" - "github.com/hashicorp/terraform-plugin-framework/schema/validator" googleoauth "golang.org/x/oauth2/google" @@ -264,81 +262,3 @@ func (v jwtValidator) ValidateString(ctx context.Context, request validator.Stri func JWTValidator() validator.String { return jwtValidator{} } - -// stringValuesInSetValidator validates that all string elements in a set -// are present in the configured list of valid strings. -type stringValuesInSetValidator struct { - ValidStrings []string -} - -func (v stringValuesInSetValidator) Description(_ context.Context) string { - return fmt.Sprintf("all elements must be one of: %q", v.ValidStrings) -} - -func (v stringValuesInSetValidator) MarkdownDescription(ctx context.Context) string { - return v.Description(ctx) -} - -func (v stringValuesInSetValidator) ValidateSet(ctx context.Context, req validator.SetRequest, resp *validator.SetResponse) { - if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { - return - } - - valid := make(map[string]struct{}, len(v.ValidStrings)) - for _, s := range v.ValidStrings { - valid[s] = struct{}{} - } - - var elements []types.String - resp.Diagnostics.Append(req.ConfigValue.ElementsAs(ctx, &elements, false)...) - if resp.Diagnostics.HasError() { - return - } - - for _, el := range elements { - if _, ok := valid[el.ValueString()]; !ok { - resp.Diagnostics.AddAttributeError( - req.Path, - "Invalid Set Element", - fmt.Sprintf("Element %q is not a valid value. %s.", el.ValueString(), v.Description(ctx)), - ) - } - } -} - -func StringValuesInSet(validStrings ...string) validator.Set { - return stringValuesInSetValidator{ - ValidStrings: validStrings, - } -} - -type TopicPrefixValidator struct{} - -func (v TopicPrefixValidator) Description(ctx context.Context) string { - return "ensures the topic does not start with '//pubsub.googleapis.com/'" -} - -func (v TopicPrefixValidator) MarkdownDescription(ctx context.Context) string { - return "Ensures the topic does not start with `//pubsub.googleapis.com/`." -} - -func (v TopicPrefixValidator) ValidateString(ctx context.Context, req validator.StringRequest, resp *validator.StringResponse) { - if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { - return - } - - value := req.ConfigValue.ValueString() - forbiddenPrefix := "//pubsub.googleapis.com/" - - if strings.HasPrefix(value, forbiddenPrefix) { - resp.Diagnostics.AddAttributeError( - req.Path, - "Invalid Topic Format", - fmt.Sprintf("The topic must not start with '%s', please use the format projects/{project}/topics/{topic} instead.", forbiddenPrefix), - ) - } -} - -func NewTopicPrefixValidator() validator.String { - return TopicPrefixValidator{} -} diff --git a/mmv1/third_party/terraform/fwvalidators/framework_validators_test.go b/mmv1/third_party/terraform/fwvalidators/framework_validators_test.go index 8d8a285584fd..07e2378e8bf3 100644 --- a/mmv1/third_party/terraform/fwvalidators/framework_validators_test.go +++ b/mmv1/third_party/terraform/fwvalidators/framework_validators_test.go @@ -307,141 +307,3 @@ func TestBoundedDuration(t *testing.T) { }) } } - -func TestStringValuesInSetValidator(t *testing.T) { - t.Parallel() - - // Define the set of valid strings for the validator - validStrings := []string{"APPLE", "BANANA", "CHERRY"} - - stringSet := func(elems []string) types.Set { - if elems == nil { - return types.SetNull(types.StringType) - } - val, diags := types.SetValueFrom(context.Background(), types.StringType, elems) - if diags.HasError() { - t.Fatalf("Failed to create test set: %v", diags) - } - return val - } - - cases := map[string]struct { - ConfigValue types.Set - ExpectedErrorCount int - }{ - "valid set with one element": { - ConfigValue: stringSet([]string{"APPLE"}), - ExpectedErrorCount: 0, - }, - "valid set with multiple elements": { - ConfigValue: stringSet([]string{"BANANA", "CHERRY"}), - ExpectedErrorCount: 0, - }, - "valid empty set": { - ConfigValue: stringSet([]string{}), - ExpectedErrorCount: 0, - }, - "null set is valid": { - ConfigValue: stringSet(nil), - ExpectedErrorCount: 0, - }, - "unknown set is valid": { - ConfigValue: types.SetUnknown(types.StringType), - ExpectedErrorCount: 0, - }, - "invalid set with one element": { - ConfigValue: stringSet([]string{"DURIAN"}), - ExpectedErrorCount: 1, - }, - "invalid set with multiple elements": { - ConfigValue: stringSet([]string{"DURIAN", "ELDERBERRY"}), - ExpectedErrorCount: 2, - }, - "set with mixed valid and invalid elements": { - ConfigValue: stringSet([]string{"APPLE", "DURIAN", "CHERRY"}), - ExpectedErrorCount: 1, - }, - } - - for tn, tc := range cases { - tn, tc := tn, tc - t.Run(tn, func(t *testing.T) { - t.Parallel() - - req := validator.SetRequest{ - Path: path.Root("test_attribute"), - ConfigValue: tc.ConfigValue, - } - resp := &validator.SetResponse{ - Diagnostics: diag.Diagnostics{}, - } - v := fwvalidators.StringValuesInSet(validStrings...) - - v.ValidateSet(context.Background(), req, resp) - - if resp.Diagnostics.ErrorsCount() != tc.ExpectedErrorCount { - t.Errorf("Expected %d errors, but got %d. Errors: %v", tc.ExpectedErrorCount, resp.Diagnostics.ErrorsCount(), resp.Diagnostics.Errors()) - } - }) - } -} - -func TestTopicPrefixValidator(t *testing.T) { - t.Parallel() - - type testCase struct { - value types.String - expectError bool - errorContains string - } - - tests := map[string]testCase{ - "valid topic format": { - value: types.StringValue("projects/my-project/topics/my-topic"), - expectError: false, - }, - "invalid topic format - starts with pubsub prefix": { - value: types.StringValue("//pubsub.googleapis.com/projects/my-project/topics/my-topic"), - expectError: true, - errorContains: "The topic must not start with '//pubsub.googleapis.com/', please use the format projects/{project}/topics/{topic} instead.", - }, - } - - for name, test := range tests { - name, test := name, test - t.Run(name, func(t *testing.T) { - t.Parallel() - - request := validator.StringRequest{ - Path: path.Root("test_topic"), - PathExpression: path.MatchRoot("test_topic"), - ConfigValue: test.value, - } - response := validator.StringResponse{} - v := fwvalidators.NewTopicPrefixValidator() - - v.ValidateString(context.Background(), request, &response) - - if test.expectError && !response.Diagnostics.HasError() { - t.Errorf("expected error, got none for value: %q", test.value.ValueString()) - } - - if !test.expectError && response.Diagnostics.HasError() { - t.Errorf("got unexpected error for value: %q: %s", test.value.ValueString(), response.Diagnostics.Errors()) - } - - if test.errorContains != "" { - foundError := false - for _, err := range response.Diagnostics.Errors() { - if err.Detail() == test.errorContains { - foundError = true - break - } - } - if !foundError { - t.Errorf("expected error with detail %q, got none", test.errorContains) - } - } - }) - } -} diff --git a/mmv1/third_party/terraform/go.mod b/mmv1/third_party/terraform/go.mod index ab0546334c5d..9001ab5e21bb 100644 --- a/mmv1/third_party/terraform/go.mod +++ b/mmv1/third_party/terraform/go.mod @@ -3,10 +3,8 @@ module github.com/hashicorp/terraform-provider-google go 1.23.0 require ( - cloud.google.com/go/auth v0.16.5 - cloud.google.com/go/auth/oauth2adapt v0.2.8 cloud.google.com/go/bigtable v1.37.0 - github.com/GoogleCloudPlatform/declarative-resource-client-library v1.83.0 + github.com/GoogleCloudPlatform/declarative-resource-client-library v1.79.0 github.com/apparentlymart/go-cidr v1.1.0 github.com/davecgh/go-spew v1.1.1 github.com/dnaeon/go-vcr v1.0.1 @@ -15,73 +13,73 @@ require ( github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 github.com/hashicorp/errwrap v1.0.0 github.com/hashicorp/go-cleanhttp v0.5.2 - github.com/hashicorp/go-cty v1.5.0 + github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320 github.com/hashicorp/go-multierror v1.1.1 github.com/hashicorp/go-version v1.7.0 - github.com/hashicorp/terraform-json v0.25.0 - github.com/hashicorp/terraform-plugin-framework v1.15.0 - github.com/hashicorp/terraform-plugin-framework-timeouts v0.5.0 + github.com/hashicorp/terraform-json v0.24.0 + github.com/hashicorp/terraform-plugin-framework v1.13.0 github.com/hashicorp/terraform-plugin-framework-validators v0.9.0 - github.com/hashicorp/terraform-plugin-go v0.28.0 + github.com/hashicorp/terraform-plugin-go v0.26.0 github.com/hashicorp/terraform-plugin-log v0.9.0 - github.com/hashicorp/terraform-plugin-mux v0.20.0 - github.com/hashicorp/terraform-plugin-sdk/v2 v2.37.0 + github.com/hashicorp/terraform-plugin-mux v0.17.0 + github.com/hashicorp/terraform-plugin-sdk/v2 v2.36.0 github.com/hashicorp/terraform-plugin-testing v1.5.1 github.com/mitchellh/go-homedir v1.1.0 github.com/mitchellh/hashstructure v1.1.0 github.com/sirupsen/logrus v1.8.1 github.com/stretchr/testify v1.10.0 go4.org/netipx v0.0.0-20231129151722-fdeea329fbba - golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 - golang.org/x/net v0.43.0 - golang.org/x/oauth2 v0.30.0 - google.golang.org/api v0.248.0 - google.golang.org/genproto/googleapis/rpc v0.0.0-20250818200422-3122310a409c - google.golang.org/grpc v1.74.2 - google.golang.org/protobuf v1.36.7 - gopkg.in/yaml.v2 v2.4.0 + golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 + golang.org/x/net v0.39.0 + golang.org/x/oauth2 v0.29.0 + google.golang.org/api v0.229.0 + google.golang.org/genproto/googleapis/rpc v0.0.0-20250414145226-207652e42e2e + google.golang.org/grpc v1.71.1 + google.golang.org/protobuf v1.36.6 ) require ( bitbucket.org/creachadair/stringset v0.0.8 // indirect - cel.dev/expr v0.24.0 // indirect - cloud.google.com/go v0.121.0 // indirect - cloud.google.com/go/compute/metadata v0.8.0 // indirect - cloud.google.com/go/iam v1.5.2 // indirect - cloud.google.com/go/longrunning v0.6.7 // indirect - cloud.google.com/go/monitoring v1.24.2 // indirect - github.com/ProtonMail/go-crypto v1.1.6 // indirect + cel.dev/expr v0.19.2 // indirect + cloud.google.com/go v0.120.0 // indirect + cloud.google.com/go/auth v0.16.0 // indirect + cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect + cloud.google.com/go/compute/metadata v0.6.0 // indirect + cloud.google.com/go/iam v1.5.0 // indirect + cloud.google.com/go/longrunning v0.6.6 // indirect + cloud.google.com/go/monitoring v1.24.1 // indirect + github.com/ProtonMail/go-crypto v1.1.3 // indirect github.com/agext/levenshtein v1.2.2 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect github.com/cenkalti/backoff v2.2.1+incompatible // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect - github.com/cloudflare/circl v1.6.0 // indirect - github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 // indirect + github.com/cloudflare/circl v1.3.7 // indirect + github.com/cncf/xds/go v0.0.0-20250121191232-2f005788dc42 // indirect github.com/envoyproxy/go-control-plane/envoy v1.32.4 // indirect github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect github.com/fatih/color v1.16.0 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect github.com/gammazero/deque v0.0.0-20180920172122-f6adf94963e4 // indirect - github.com/go-jose/go-jose/v4 v4.0.5 // indirect - github.com/go-logr/logr v1.4.3 // indirect + github.com/go-logr/logr v1.4.2 // indirect github.com/go-logr/stdr v1.2.2 // indirect - github.com/golang/glog v1.2.5 // indirect + github.com/golang/glog v1.2.4 // indirect + github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/protobuf v1.5.4 // indirect github.com/google/go-cpy v0.0.0-20211218193943-a9c933c06932 // indirect github.com/google/s2a-go v0.1.9 // indirect github.com/google/uuid v1.6.0 // indirect github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect - github.com/googleapis/gax-go/v2 v2.15.0 // indirect + github.com/googleapis/gax-go/v2 v2.14.1 // indirect github.com/hashicorp/go-checkpoint v0.5.0 // indirect github.com/hashicorp/go-hclog v1.6.3 // indirect - github.com/hashicorp/go-plugin v1.6.3 // indirect + github.com/hashicorp/go-plugin v1.6.2 // indirect github.com/hashicorp/go-retryablehttp v0.7.7 // indirect github.com/hashicorp/go-uuid v1.0.3 // indirect - github.com/hashicorp/hc-install v0.9.2 // indirect + github.com/hashicorp/hc-install v0.9.1 // indirect github.com/hashicorp/hcl/v2 v2.23.0 // indirect github.com/hashicorp/logutils v1.0.0 // indirect - github.com/hashicorp/terraform-exec v0.23.0 // indirect - github.com/hashicorp/terraform-registry-address v0.2.5 // indirect + github.com/hashicorp/terraform-exec v0.22.0 // indirect + github.com/hashicorp/terraform-registry-address v0.2.4 // indirect github.com/hashicorp/terraform-svchost v0.1.1 // indirect github.com/hashicorp/yamux v0.1.1 // indirect github.com/kylelemons/godebug v1.1.0 // indirect @@ -95,29 +93,29 @@ require ( github.com/oklog/run v1.0.0 // indirect github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect - github.com/spiffe/go-spiffe/v2 v2.5.0 // indirect github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect github.com/zclconf/go-cty v1.16.2 // indirect - github.com/zeebo/errs v1.4.0 // indirect + go.opencensus.io v0.24.0 // indirect go.opentelemetry.io/auto/sdk v1.1.0 // indirect - go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0 // indirect - go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect - go.opentelemetry.io/otel v1.36.0 // indirect - go.opentelemetry.io/otel/metric v1.36.0 // indirect - go.opentelemetry.io/otel/sdk v1.36.0 // indirect - go.opentelemetry.io/otel/sdk/metric v1.36.0 // indirect - go.opentelemetry.io/otel/trace v1.36.0 // indirect - golang.org/x/crypto v0.41.0 // indirect - golang.org/x/mod v0.26.0 // indirect - golang.org/x/sync v0.16.0 // indirect - golang.org/x/sys v0.35.0 // indirect - golang.org/x/text v0.28.0 // indirect - golang.org/x/time v0.12.0 // indirect - golang.org/x/tools v0.35.0 // indirect + go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.60.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0 // indirect + go.opentelemetry.io/otel v1.35.0 // indirect + go.opentelemetry.io/otel/metric v1.35.0 // indirect + go.opentelemetry.io/otel/sdk v1.35.0 // indirect + go.opentelemetry.io/otel/sdk/metric v1.35.0 // indirect + go.opentelemetry.io/otel/trace v1.35.0 // indirect + golang.org/x/crypto v0.37.0 // indirect + golang.org/x/mod v0.22.0 // indirect + golang.org/x/sync v0.13.0 // indirect + golang.org/x/sys v0.32.0 // indirect + golang.org/x/text v0.24.0 // indirect + golang.org/x/time v0.11.0 // indirect + golang.org/x/tools v0.22.0 // indirect google.golang.org/appengine v1.6.8 // indirect - google.golang.org/genproto v0.0.0-20250603155806-513f23925822 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 // indirect + google.golang.org/genproto v0.0.0-20250303144028-a0af3efb3deb // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20250414145226-207652e42e2e // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/mmv1/third_party/terraform/go.sum b/mmv1/third_party/terraform/go.sum index b4176222aeaf..7a39295adb53 100644 --- a/mmv1/third_party/terraform/go.sum +++ b/mmv1/third_party/terraform/go.sum @@ -1,42 +1,111 @@ bitbucket.org/creachadair/stringset v0.0.8 h1:gQqe4vs8XWgMyijfyKE6K8o4TcyGGrRXe0JvHgx5H+M= bitbucket.org/creachadair/stringset v0.0.8/go.mod h1:AgthVMyMxC/6FK1KBJ2ALdqkZObGN8hOetgpwXyMn34= -cel.dev/expr v0.24.0 h1:56OvJKSH3hDGL0ml5uSxZmz3/3Pq4tJ+fb1unVLAFcY= -cel.dev/expr v0.24.0/go.mod h1:hLPLo1W4QUmuYdA72RBX06QTs6MXw941piREPl3Yfiw= +bitbucket.org/creachadair/stringset v0.0.14 h1:t1ejQyf8utS4GZV/4fM+1gvYucggZkfhb+tMobDxYOE= +bitbucket.org/creachadair/stringset v0.0.14/go.mod h1:Ej8fsr6rQvmeMDf6CCWMWGb14H9mz8kmDgPPTdiVT0w= +cel.dev/expr v0.19.1 h1:NciYrtDRIR0lNCnH1LFJegdjspNx9fI59O7TWcua/W4= +cel.dev/expr v0.19.1/go.mod h1:MrpN08Q+lEBs+bGYdLxxHkZoUSsCp0nSKTs0nTymJgw= +cel.dev/expr v0.19.2 h1:V354PbqIXr9IQdwy4SYA4xa0HXaWq1BUPAGzugBY5V4= +cel.dev/expr v0.19.2/go.mod h1:MrpN08Q+lEBs+bGYdLxxHkZoUSsCp0nSKTs0nTymJgw= +cel.dev/expr v0.23.1 h1:K4KOtPCJQjVggkARsjG9RWXP6O4R73aHeJMa/dmCQQg= +cel.dev/expr v0.23.1/go.mod h1:hLPLo1W4QUmuYdA72RBX06QTs6MXw941piREPl3Yfiw= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.121.0 h1:pgfwva8nGw7vivjZiRfrmglGWiCJBP+0OmDpenG/Fwg= -cloud.google.com/go v0.121.0/go.mod h1:rS7Kytwheu/y9buoDmu5EIpMMCI4Mb8ND4aeN4Vwj7Q= -cloud.google.com/go/auth v0.16.5 h1:mFWNQ2FEVWAliEQWpAdH80omXFokmrnbDhUS9cBywsI= -cloud.google.com/go/auth v0.16.5/go.mod h1:utzRfHMP+Vv0mpOkTRQoWD2q3BatTOoWbA7gCc2dUhQ= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.44.3/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= +cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= +cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= +cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= +cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= +cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= +cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= +cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= +cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= +cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY= +cloud.google.com/go v0.116.0 h1:B3fRrSDkLRt5qSHWe40ERJvhvnQwdZiHu0bJOpldweE= +cloud.google.com/go v0.116.0/go.mod h1:cEPSRWPzZEswwdr9BxE6ChEn01dWlTaF05LiC2Xs70U= +cloud.google.com/go v0.120.0 h1:wc6bgG9DHyKqF5/vQvX1CiZrtHnxJjBlKUyF9nP6meA= +cloud.google.com/go v0.120.0/go.mod h1:/beW32s8/pGRuj4IILWQNd4uuebeT4dkOhKmkfit64Q= +cloud.google.com/go v0.120.1 h1:Z+5V7yd383+9617XDCyszmK5E4wJRJL+tquMfDj9hLM= +cloud.google.com/go v0.120.1/go.mod h1:56Vs7sf/i2jYM6ZL9NYlC82r04PThNcPS5YgFmb0rp8= +cloud.google.com/go/auth v0.15.0 h1:Ly0u4aA5vG/fsSsxu98qCQBemXtAtJf+95z9HK+cxps= +cloud.google.com/go/auth v0.15.0/go.mod h1:WJDGqZ1o9E9wKIL+IwStfyn/+s59zl4Bi+1KQNVXLZ8= +cloud.google.com/go/auth v0.16.0 h1:Pd8P1s9WkcrBE2n/PhAwKsdrR35V3Sg2II9B+ndM3CU= +cloud.google.com/go/auth v0.16.0/go.mod h1:1howDHJ5IETh/LwYs3ZxvlkXF48aSqqJUM+5o02dNOI= +cloud.google.com/go/auth/oauth2adapt v0.2.7 h1:/Lc7xODdqcEw8IrZ9SvwnlLX6j9FHQM74z6cBk9Rw6M= +cloud.google.com/go/auth/oauth2adapt v0.2.7/go.mod h1:NTbTTzfvPl1Y3V1nPpOgl2w6d/FjO7NNUQaWSox6ZMc= cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc= cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= +cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= +cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= +cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= +cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= +cloud.google.com/go/bigtable v1.33.0 h1:2BDaWLRAwXO14DJL/u8crbV2oUbMZkIa2eGq8Yao1bk= +cloud.google.com/go/bigtable v1.33.0/go.mod h1:HtpnH4g25VT1pejHRtInlFPnN5sjTxbQlsYBjh9t5l0= cloud.google.com/go/bigtable v1.37.0 h1:Q+x7y04lQ0B+WXp03wc1/FLhFt4CwcQdkwWT0M4Jp3w= cloud.google.com/go/bigtable v1.37.0/go.mod h1:HXqddP6hduwzrtiTCqZPpj9ij4hGZb4Zy1WF/dT+yaU= -cloud.google.com/go/compute/metadata v0.8.0 h1:HxMRIbao8w17ZX6wBnjhcDkW6lTFpgcaobyVfZWqRLA= -cloud.google.com/go/compute/metadata v0.8.0/go.mod h1:sYOGTp851OV9bOFJ9CH7elVvyzopvWQFNNghtDQ/Biw= +cloud.google.com/go/compute/metadata v0.6.0 h1:A6hENjEsCDtC1k8byVsgwvVcioamEHvZ4j01OwKxG9I= +cloud.google.com/go/compute/metadata v0.6.0/go.mod h1:FjyFAW1MW0C203CEOMDTu3Dk1FlqW3Rga40jzHL4hfg= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/iam v1.2.2 h1:ozUSofHUGf/F4tCNy/mu9tHLTaxZFLOUiKzjcgWHGIA= +cloud.google.com/go/iam v1.2.2/go.mod h1:0Ys8ccaZHdI1dEUilwzqng/6ps2YB6vRsjIe00/+6JY= +cloud.google.com/go/iam v1.5.0 h1:QlLcVMhbLGOjRcGe6VTGGTyQib8dRLK2B/kYNV0+2xs= +cloud.google.com/go/iam v1.5.0/go.mod h1:U+DOtKQltF/LxPEtcDLoobcsZMilSRwR7mgNL7knOpo= cloud.google.com/go/iam v1.5.2 h1:qgFRAGEmd8z6dJ/qyEchAuL9jpswyODjA2lS+w234g8= cloud.google.com/go/iam v1.5.2/go.mod h1:SE1vg0N81zQqLzQEwxL2WI6yhetBdbNQuTvIKCSkUHE= +cloud.google.com/go/longrunning v0.6.2 h1:xjDfh1pQcWPEvnfjZmwjKQEcHnpz6lHjfy7Fo0MK+hc= +cloud.google.com/go/longrunning v0.6.2/go.mod h1:k/vIs83RN4bE3YCswdXC5PFfWVILjm3hpEUlSko4PiI= +cloud.google.com/go/longrunning v0.6.6 h1:XJNDo5MUfMM05xK3ewpbSdmt7R2Zw+aQEMbdQR65Rbw= +cloud.google.com/go/longrunning v0.6.6/go.mod h1:hyeGJUrPHcx0u2Uu1UFSoYZLn4lkMrccJig0t4FI7yw= cloud.google.com/go/longrunning v0.6.7 h1:IGtfDWHhQCgCjwQjV9iiLnUta9LBCo8R9QmAFsS/PrE= cloud.google.com/go/longrunning v0.6.7/go.mod h1:EAFV3IZAKmM56TyiE6VAP3VoTzhZzySwI/YI1s/nRsY= +cloud.google.com/go/monitoring v1.21.2 h1:FChwVtClH19E7pJ+e0xUhJPGksctZNVOk2UhMmblmdU= +cloud.google.com/go/monitoring v1.21.2/go.mod h1:hS3pXvaG8KgWTSz+dAdyzPrGUYmi2Q+WFX8g2hqVEZU= +cloud.google.com/go/monitoring v1.24.1 h1:vKiypZVFD/5a3BbQMvI4gZdl8445ITzXFh257XBgrS0= +cloud.google.com/go/monitoring v1.24.1/go.mod h1:Z05d1/vn9NaujqY2voG6pVQXoJGbp+r3laV+LySt9K0= cloud.google.com/go/monitoring v1.24.2 h1:5OTsoJ1dXYIiMiuL+sYscLc9BumrL3CarVLL7dd7lHM= cloud.google.com/go/monitoring v1.24.2/go.mod h1:x7yzPWcgDRnPEv3sI+jJGBkwl5qINf+6qY4eq0I9B4U= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= +cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= +cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= +cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= +cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= +cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.81.0 h1:zTRBYNu7nk3TMbiRfkBcRNzw4cOeym0z1GduDYNyRyE= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.81.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.82.0 h1:58Vw+qpPWX4JGAB/DfuDwEg6dGp0+q6raXqjs52qRik= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.82.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= -github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= -github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= -github.com/ProtonMail/go-crypto v1.1.6 h1:ZcV+Ropw6Qn0AX9brlQLAUXfqLBc7Bl+f/DmNxpLfdw= -github.com/ProtonMail/go-crypto v1.1.6/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.79.0 h1:vaebDVboAZ2tbAoMKRsprO3zAdZnQegYFhkgAwjJC8g= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.79.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= +github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= +github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= +github.com/ProtonMail/go-crypto v1.1.3 h1:nRBOetoydLeUb4nHajyO2bKqMLfWQ/ZPwkXqXxPxCFk= +github.com/ProtonMail/go-crypto v1.1.3/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= +github.com/ProtonMail/go-crypto v1.2.0 h1:+PhXXn4SPGd+qk76TlEePBfOfivE0zkWFenhGhFLzWs= +github.com/ProtonMail/go-crypto v1.2.0/go.mod h1:9whxjD8Rbs29b4XWbB8irEcE8KHMqaR2e7GWU1R+/PE= github.com/agext/levenshtein v1.2.2 h1:0S/Yg6LYmFJ5stwQeRp6EeOcCbj7xiqQSdNelsXvaqE= github.com/agext/levenshtein v1.2.2/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= +github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7lmo= +github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= github.com/apparentlymart/go-cidr v1.1.0 h1:2mAhrMoF+nhXqxTzSZMUzDHkLjmIHC+Zzn4tdgBZjnU= github.com/apparentlymart/go-cidr v1.1.0/go.mod h1:EBcsNrHc3zQeuaeCeCtQruQm+n9/YjEn/vI25Lg7Gwc= github.com/apparentlymart/go-textseg/v12 v12.0.0/go.mod h1:S/4uRK2UtaQttw1GenVJEynmyUenKwP++x/+DdGV/Ec= github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY= github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4= +github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= github.com/bufbuild/protocompile v0.4.0 h1:LbFKd2XowZvQ/kajzguUp2DC9UEIQhIq77fZZlaQsNA= github.com/bufbuild/protocompile v0.4.0/go.mod h1:3v93+mbWn/v3xzN+31nwkJfrEpAUwp+BagBSZWx+TP8= github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4= @@ -44,25 +113,40 @@ github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QH github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= -github.com/cloudflare/circl v1.6.0 h1:cr5JKic4HI+LkINy2lg3W2jF8sHCVTBncJr5gIIq7qk= -github.com/cloudflare/circl v1.6.0/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs= +github.com/cloudflare/circl v1.3.7 h1:qlCDlTPz2n9fu58M0Nh1J/JzcFpfgkFHHX3O35r5vcU= +github.com/cloudflare/circl v1.3.7/go.mod h1:sRTcRWXGLrKw6yIGJ+l7amYJFfAXbZG0kBSc8r4zxgA= +github.com/cloudflare/circl v1.6.1 h1:zqIqSPIndyBh1bjLVVDHMPpVKqp8Su/V+6MeDzzQBQ0= +github.com/cloudflare/circl v1.6.1/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= -github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 h1:aQ3y1lwWyqYPiWZThqv1aFbZMiM9vblcSArJRf2Irls= -github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= +github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/xds/go v0.0.0-20241223141626-cff3c89139a3 h1:boJj011Hh+874zpIySeApCX4GeOjPl9qhRF3QuIZq+Q= +github.com/cncf/xds/go v0.0.0-20241223141626-cff3c89139a3/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= +github.com/cncf/xds/go v0.0.0-20250121191232-2f005788dc42 h1:Om6kYQYDUk5wWbT0t0q6pvyM49i9XZAv9dDrkDA7gjk= +github.com/cncf/xds/go v0.0.0-20250121191232-2f005788dc42/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= +github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f h1:C5bqEmzEPLsHm9Mv73lSE9e9bKV23aB1vxOsmZrkl3k= +github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= github.com/creachadair/staticfile v0.1.2/go.mod h1:a3qySzCIXEprDGxk6tSxSI+dBBdLzqeBOMhZ+o2d3pM= -github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s= -github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI= +github.com/cyphar/filepath-securejoin v0.2.5 h1:6iR5tXJ/e6tJZzzdMc1km3Sa7RRIVBKAK32O2s7AYfo= +github.com/cyphar/filepath-securejoin v0.2.5/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/dnaeon/go-vcr v1.0.1 h1:r8L/HqC0Hje5AXMu1ooW8oyQyOFv4GxqpL0nRP7SLLY= github.com/dnaeon/go-vcr v1.0.1/go.mod h1:aBB1+wY4s93YsC3HHjMBMrwTj2R9FHDzUr9KyGc8n1E= +github.com/dnaeon/go-vcr v1.2.0 h1:zHCHvJYTMh1N7xnV7zf1m1GPBF9Ad0Jk/whtQ1663qI= +github.com/dnaeon/go-vcr v1.2.0/go.mod h1:R4UdLID7HZT3taECzJs4YgbbH6PIGXB6W/sc5OLb6RQ= github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.13.4 h1:zEqyPVyku6IvWCFwux4x9RxkLOMUL+1vC9xUFv5l2/M= github.com/envoyproxy/go-control-plane v0.13.4/go.mod h1:kDfuBlDVsSj2MjrLEtRWtHlsWIFcGyB2RMO44Dc5GZA= github.com/envoyproxy/go-control-plane/envoy v1.32.4 h1:jb83lalDRZSpPWW2Z7Mck/8kXZ5CQAFYVjQcdVIr83A= @@ -75,25 +159,36 @@ github.com/envoyproxy/protoc-gen-validate v1.2.1/go.mod h1:d/C80l/jxXLdfEIhX1W2T github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM= github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE= +github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM= +github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU= github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/gammazero/deque v0.0.0-20180920172122-f6adf94963e4 h1:R+19WKQClnfMXS60cP5BmMe1wjZ4u0evY2p2Ar0ZTXo= github.com/gammazero/deque v0.0.0-20180920172122-f6adf94963e4/go.mod h1:GeIq9qoE43YdGnDXURnmKTnGg15pQz4mYkXSTChbneI= +github.com/gammazero/deque v1.0.0 h1:LTmimT8H7bXkkCy6gZX7zNLtkbz4NdS2z8LZuor3j34= +github.com/gammazero/deque v1.0.0/go.mod h1:iflpYvtGfM3U8S8j+sZEKIak3SAKYpA5/SQewgfXDKo= github.com/gammazero/workerpool v0.0.0-20181230203049-86a96b5d5d92 h1:EipXK6U05IQ2wtuFRn4k3h0+2lXypzItoXGVyf4r9Io= github.com/gammazero/workerpool v0.0.0-20181230203049-86a96b5d5d92/go.mod h1:w9RqFVO2BM3xwWEcAB8Fwp0OviTBBEiRmSBDfbXnd3w= +github.com/gammazero/workerpool v1.1.3 h1:WixN4xzukFoN0XSeXF6puqEqFTl2mECI9S6W44HWy9Q= +github.com/gammazero/workerpool v1.1.3/go.mod h1:wPjyBLDbyKnUn2XwwyD3EEwo9dHutia9/fwNmSHWACc= github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI= github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic= -github.com/go-git/go-billy/v5 v5.6.2 h1:6Q86EsPXMa7c3YZ3aLAQsMA0VlWmy43r6FHqa/UNbRM= -github.com/go-git/go-billy/v5 v5.6.2/go.mod h1:rcFC2rAsp/erv7CMz9GczHcuD0D32fWzH+MJAU+jaUU= -github.com/go-git/go-git/v5 v5.14.0 h1:/MD3lCrGjCen5WfEAzKg00MJJffKhC8gzS80ycmCi60= -github.com/go-git/go-git/v5 v5.14.0/go.mod h1:Z5Xhoia5PcWA3NF8vRLURn9E5FRhSl7dGj9ItW3Wk5k= -github.com/go-jose/go-jose/v4 v4.0.5 h1:M6T8+mKZl/+fNNuFHvGIzDz7BTLQPIounk/b9dw3AaE= -github.com/go-jose/go-jose/v4 v4.0.5/go.mod h1:s3P1lRrkT8igV8D9OjyL4WRyHvjB6a4JSllnOrmmBOA= +github.com/go-git/go-billy/v5 v5.6.0 h1:w2hPNtoehvJIxR00Vb4xX94qHQi/ApZfX+nBE2Cjio8= +github.com/go-git/go-billy/v5 v5.6.0/go.mod h1:sFDq7xD3fn3E0GOwUSZqHo9lrkmx8xJhA0ZrfvjBRGM= +github.com/go-git/go-git/v5 v5.13.0 h1:vLn5wlGIh/X78El6r3Jr+30W16Blk0CTcxTYcYPWi5E= +github.com/go-git/go-git/v5 v5.13.0/go.mod h1:Wjo7/JyVKtQgUNdXYXIepzWfJQkUEIGvkvVkiXRR/zw= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-jose/go-jose/v4 v4.0.4 h1:VsjPI33J0SB9vQM6PLmNjoHqMQNGPiZ0rHL7Ni7Q6/E= +github.com/go-jose/go-jose/v4 v4.0.4/go.mod h1:NKb5HO1EZccyMpiZNbdUw/14tiXNyUJh188dfnMCAfc= github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= -github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= +github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= @@ -102,51 +197,111 @@ github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3a github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/glog v1.2.5 h1:DrW6hGnjIhtvhOIiAKT6Psh/Kd/ldepEa81DKeiRJ5I= -github.com/golang/glog v1.2.5/go.mod h1:6AhwSGph0fcJtXVM/PEHPqZlFeoLxhs7/t5UDAwmO+w= +github.com/golang/glog v1.2.4 h1:CNNw5U8lSiiBk7druxtSHHTsRWcxKoac6kZKm2peBBc= +github.com/golang/glog v1.2.4/go.mod h1:6AhwSGph0fcJtXVM/PEHPqZlFeoLxhs7/t5UDAwmO+w= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ= github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.1.3 h1:CVpQJjYgC4VbzxeGVHfvZrv1ctoYCAI8vbl07Fcxlyg= github.com/google/btree v1.1.3/go.mod h1:qOPhT0dTNdNzV6Z/lhRX0YXUafgPLFUh+gZMl761Gm4= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= github.com/google/go-cpy v0.0.0-20211218193943-a9c933c06932 h1:5/4TSDzpDnHQ8rKEEQBjRlYx77mHOvXu08oGchxej7o= github.com/google/go-cpy v0.0.0-20211218193943-a9c933c06932/go.mod h1:cC6EdPbj/17GFCPDK39NRarlMI+kt+O60S12cNB5J9Y= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/s2a-go v0.1.9 h1:LGD7gtMgezd8a/Xak7mEWL0PjoTQFvpRudN895yqKW0= github.com/google/s2a-go v0.1.9/go.mod h1:YA0Ei2ZQL3acow2O62kdp9UlnvMmU7kA6Eutn0dXayM= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/enterprise-certificate-proxy v0.3.5 h1:VgzTY2jogw3xt39CusEnFJWm7rlsq5yL5q9XdLOuP5g= +github.com/googleapis/enterprise-certificate-proxy v0.3.5/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA= github.com/googleapis/enterprise-certificate-proxy v0.3.6 h1:GW/XbdyBFQ8Qe+YAmFU9uHLo7OnF5tL52HFAgMmyrf4= github.com/googleapis/enterprise-certificate-proxy v0.3.6/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA= -github.com/googleapis/gax-go/v2 v2.15.0 h1:SyjDc1mGgZU5LncH8gimWo9lW1DtIfPibOG81vgd/bo= -github.com/googleapis/gax-go/v2 v2.15.0/go.mod h1:zVVkkxAQHa1RQpg9z2AUCMnKhi0Qld9rcmyfL1OZhoc= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/googleapis/gax-go/v2 v2.14.1 h1:hb0FFeiPaQskmvakKu5EbCbpntQn48jyHuvrkurSS/Q= +github.com/googleapis/gax-go/v2 v2.14.1/go.mod h1:Hb/NubMaVM88SrNkvl8X/o8XWwDJEPqouaLeN2IUxoA= +github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 h1:+9834+KizmvFV7pXQGSXQTsaWhq2GjuNUt0aUU0YBYw= github.com/grpc-ecosystem/go-grpc-middleware v1.3.0/go.mod h1:z0ButlSOZa5vEBq9m2m2hlwIgKw+rp3sdCBRoJY+30Y= +github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 h1:UH//fgunKIs4JdUbpDl1VZCDaL56wXCB/5+wF6uHfaI= +github.com/grpc-ecosystem/go-grpc-middleware v1.4.0/go.mod h1:g5qyo/la0ALbONm6Vbp88Yd8NsDy6rZz+RcrMPxvld8= github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= +github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/go-checkpoint v0.5.0 h1:MFYpPZCnQqQTE18jFwSII6eUQrD/oxMFp3mlgcqk5mU= github.com/hashicorp/go-checkpoint v0.5.0/go.mod h1:7nfLNL10NsxqO4iWuW6tWW0HjZuDrwkBuEQsVcpCOgg= github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= +github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320 h1:1/D3zfFHttUKaCaGKZ/dR2roBXv0vKbSCnssIldfQdI= +github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320/go.mod h1:EiZBMaudVLy8fmjf9Npq1dq9RalhveqZG5w/yz3mHWs= github.com/hashicorp/go-cty v1.5.0 h1:EkQ/v+dDNUqnuVpmS5fPqyY71NXVgT5gf32+57xY8g0= github.com/hashicorp/go-cty v1.5.0/go.mod h1:lFUCG5kd8exDobgSfyj4ONE/dc822kiYMguVKdHGMLM= github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k= github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/go-plugin v1.6.2 h1:zdGAEd0V1lCaU0u+MxWQhtSDQmahpkwOun8U8EiRVog= +github.com/hashicorp/go-plugin v1.6.2/go.mod h1:CkgLQ5CZqNmdL9U9JzM532t8ZiYQ35+pj3b1FD37R0Q= github.com/hashicorp/go-plugin v1.6.3 h1:xgHB+ZUSYeuJi96WtxEjzi23uh7YQpznjGh0U0UUrwg= github.com/hashicorp/go-plugin v1.6.3/go.mod h1:MRobyh+Wc/nYy1V4KAXUiYfzxoYhs7V1mlH1Z7iY2h0= github.com/hashicorp/go-retryablehttp v0.7.7 h1:C8hUCYzor8PIfXHa4UrZkU4VvK8o9ISHxT2Q8+VepXU= @@ -156,6 +311,10 @@ github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/C github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-version v1.7.0 h1:5tqGy27NaOTB8yJKUZELlFAS/LTKJkrmONwQKeRZfjY= github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/hc-install v0.9.1 h1:gkqTfE3vVbafGQo6VZXcy2v5yoz2bE0+nhZXruCuODQ= +github.com/hashicorp/hc-install v0.9.1/go.mod h1:pWWvN/IrfeBK4XPeXXYkL6EjMufHkCK5DvwxeLKuBf0= github.com/hashicorp/hc-install v0.9.2 h1:v80EtNX4fCVHqzL9Lg/2xkp62bbvQMnvPQ0G+OmtO24= github.com/hashicorp/hc-install v0.9.2/go.mod h1:XUqBQNnuT4RsxoxiM9ZaUk0NX8hi2h+Lb6/c0OZnC/I= github.com/hashicorp/hcl/v2 v2.23.0 h1:Fphj1/gCylPxHutVSEOf2fBOh1VE4AuLV7+kbJf3qos= @@ -168,41 +327,56 @@ github.com/hashicorp/terraform-exec v0.23.0 h1:MUiBM1s0CNlRFsCLJuM5wXZrzA3MnPYEs github.com/hashicorp/terraform-exec v0.23.0/go.mod h1:mA+qnx1R8eePycfwKkCRk3Wy65mwInvlpAeOwmA7vlY= github.com/hashicorp/terraform-json v0.24.0 h1:rUiyF+x1kYawXeRth6fKFm/MdfBS6+lW4NbeATsYz8Q= github.com/hashicorp/terraform-json v0.24.0/go.mod h1:Nfj5ubo9xbu9uiAoZVBsNOjvNKB66Oyrvtit74kC7ow= -github.com/hashicorp/terraform-json v0.25.0 h1:rmNqc/CIfcWawGiwXmRuiXJKEiJu1ntGoxseG1hLhoQ= -github.com/hashicorp/terraform-json v0.25.0/go.mod h1:sMKS8fiRDX4rVlR6EJUMudg1WcanxCMoWwTLkgZP/vc= github.com/hashicorp/terraform-plugin-framework v1.13.0 h1:8OTG4+oZUfKgnfTdPTJwZ532Bh2BobF4H+yBiYJ/scw= github.com/hashicorp/terraform-plugin-framework v1.13.0/go.mod h1:j64rwMGpgM3NYXTKuxrCnyubQb/4VKldEKlcG8cvmjU= -github.com/hashicorp/terraform-plugin-framework v1.15.0 h1:LQ2rsOfmDLxcn5EeIwdXFtr03FVsNktbbBci8cOKdb4= -github.com/hashicorp/terraform-plugin-framework v1.15.0/go.mod h1:hxrNI/GY32KPISpWqlCoTLM9JZsGH3CyYlir09bD/fI= -github.com/hashicorp/terraform-plugin-framework-timeouts v0.5.0 h1:I/N0g/eLZ1ZkLZXUQ0oRSXa8YG/EF0CEuQP1wXdrzKw= -github.com/hashicorp/terraform-plugin-framework-timeouts v0.5.0/go.mod h1:t339KhmxnaF4SzdpxmqW8HnQBHVGYazwtfxU0qCs4eE= +github.com/hashicorp/terraform-plugin-framework v1.14.1 h1:jaT1yvU/kEKEsxnbrn4ZHlgcxyIfjvZ41BLdlLk52fY= +github.com/hashicorp/terraform-plugin-framework v1.14.1/go.mod h1:xNUKmvTs6ldbwTuId5euAtg37dTxuyj3LHS3uj7BHQ4= github.com/hashicorp/terraform-plugin-framework-validators v0.9.0 h1:LYz4bXh3t7bTEydXOmPDPupRRnA480B/9+jV8yZvxBA= github.com/hashicorp/terraform-plugin-framework-validators v0.9.0/go.mod h1:+BVERsnfdlhYR2YkXMBtPnmn9UsL19U3qUtSZ+Y/5MY= -github.com/hashicorp/terraform-plugin-go v0.28.0 h1:zJmu2UDwhVN0J+J20RE5huiF3XXlTYVIleaevHZgKPA= -github.com/hashicorp/terraform-plugin-go v0.28.0/go.mod h1:FDa2Bb3uumkTGSkTFpWSOwWJDwA7bf3vdP3ltLDTH6o= +github.com/hashicorp/terraform-plugin-framework-validators v0.17.0 h1:0uYQcqqgW3BMyyve07WJgpKorXST3zkpzvrOnf3mpbg= +github.com/hashicorp/terraform-plugin-framework-validators v0.17.0/go.mod h1:VwdfgE/5Zxm43flraNa0VjcvKQOGVrcO4X8peIri0T0= +github.com/hashicorp/terraform-plugin-go v0.26.0 h1:cuIzCv4qwigug3OS7iKhpGAbZTiypAfFQmw8aE65O2M= +github.com/hashicorp/terraform-plugin-go v0.26.0/go.mod h1:+CXjuLDiFgqR+GcrM5a2E2Kal5t5q2jb0E3D57tTdNY= github.com/hashicorp/terraform-plugin-log v0.9.0 h1:i7hOA+vdAItN1/7UrfBqBwvYPQ9TFvymaRGZED3FCV0= github.com/hashicorp/terraform-plugin-log v0.9.0/go.mod h1:rKL8egZQ/eXSyDqzLUuwUYLVdlYeamldAHSxjUFADow= -github.com/hashicorp/terraform-plugin-mux v0.20.0 h1:3QpBnI9uCuL0Yy2Rq/kR9cOdmOFNhw88A2GoZtk5aXM= -github.com/hashicorp/terraform-plugin-mux v0.20.0/go.mod h1:wSIZwJjSYk86NOTX3fKUlThMT4EAV1XpBHz9SAvjQr4= -github.com/hashicorp/terraform-plugin-sdk/v2 v2.37.0 h1:NFPMacTrY/IdcIcnUB+7hsore1ZaRWU9cnB6jFoBnIM= -github.com/hashicorp/terraform-plugin-sdk/v2 v2.37.0/go.mod h1:QYmYnLfsosrxjCnGY1p9c7Zj6n9thnEE+7RObeYs3fA= +github.com/hashicorp/terraform-plugin-mux v0.17.0 h1:/J3vv3Ps2ISkbLPiZOLspFcIZ0v5ycUXCEQScudGCCw= +github.com/hashicorp/terraform-plugin-mux v0.17.0/go.mod h1:yWuM9U1Jg8DryNfvCp+lH70WcYv6D8aooQxxxIzFDsE= +github.com/hashicorp/terraform-plugin-mux v0.18.0 h1:7491JFSpWyAe0v9YqBT+kel7mzHAbO5EpxxT0cUL/Ms= +github.com/hashicorp/terraform-plugin-mux v0.18.0/go.mod h1:Ho1g4Rr8qv0qTJlcRKfjjXTIO67LNbDtM6r+zHUNHJQ= +github.com/hashicorp/terraform-plugin-sdk/v2 v2.36.0 h1:7/iejAPyCRBhqAg3jOx+4UcAhY0A+Sg8B+0+d/GxSfM= +github.com/hashicorp/terraform-plugin-sdk/v2 v2.36.0/go.mod h1:TiQwXAjFrgBf5tg5rvBRz8/ubPULpU0HjSaVi5UoJf8= +github.com/hashicorp/terraform-plugin-sdk/v2 v2.36.1 h1:WNMsTLkZf/3ydlgsuXePa3jvZFwAJhruxTxP/c1Viuw= +github.com/hashicorp/terraform-plugin-sdk/v2 v2.36.1/go.mod h1:P6o64QS97plG44iFzSM6rAn6VJIC/Sy9a9IkEtl79K4= github.com/hashicorp/terraform-plugin-testing v1.5.1 h1:T4aQh9JAhmWo4+t1A7x+rnxAJHCDIYW9kXyo4sVO92c= github.com/hashicorp/terraform-plugin-testing v1.5.1/go.mod h1:dg8clO6K59rZ8w9EshBmDp1CxTIPu3yA4iaDpX1h5u0= +github.com/hashicorp/terraform-plugin-testing v1.12.0 h1:tpIe+T5KBkA1EO6aT704SPLedHUo55RenguLHcaSBdI= +github.com/hashicorp/terraform-plugin-testing v1.12.0/go.mod h1:jbDQUkT9XRjAh1Bvyufq+PEH1Xs4RqIdpOQumSgSXBM= +github.com/hashicorp/terraform-registry-address v0.2.4 h1:JXu/zHB2Ymg/TGVCRu10XqNa4Sh2bWcqCNyKWjnCPJA= +github.com/hashicorp/terraform-registry-address v0.2.4/go.mod h1:tUNYTVyCtU4OIGXXMDp7WNcJ+0W1B4nmstVDgHMjfAU= github.com/hashicorp/terraform-registry-address v0.2.5 h1:2GTftHqmUhVOeuu9CW3kwDkRe4pcBDq0uuK5VJngU1M= github.com/hashicorp/terraform-registry-address v0.2.5/go.mod h1:PpzXWINwB5kuVS5CA7m1+eO2f1jKb5ZDIxrOPfpnGkg= github.com/hashicorp/terraform-svchost v0.1.1 h1:EZZimZ1GxdqFRinZ1tpJwVxxt49xc/S52uzrw4x0jKQ= github.com/hashicorp/terraform-svchost v0.1.1/go.mod h1:mNsjQfZyf/Jhz35v6/0LWcv26+X7JPS+buii2c9/ctc= github.com/hashicorp/yamux v0.1.1 h1:yrQxtgseBDrq9Y652vSRDvsKCJKOUD+GzTS4Y0Y8pvE= github.com/hashicorp/yamux v0.1.1/go.mod h1:CtWFDAQgb7dxtzFs4tWbplKIe2jSi3+5vKbgIO0SLnQ= +github.com/hashicorp/yamux v0.1.2 h1:XtB8kyFOyHXYVFnwT5C3+Bdo8gArse7j2AQ0DA0Uey8= +github.com/hashicorp/yamux v0.1.2/go.mod h1:C+zze2n6e/7wshOZep2A70/aQU6QBRWJO/G6FT1wIns= +github.com/iancoleman/strcase v0.3.0 h1:nTXanmYxhfFAMjZL34Ov6gkzEsSJZ5DbhxWjvSASxEI= +github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= github.com/jhump/protoreflect v1.15.1 h1:HUMERORf3I3ZdX05WaQ6MIpd/NJ434hTp5YiKgfCL6c= github.com/jhump/protoreflect v1.15.1/go.mod h1:jD/2GMKKE6OqX8qTjhADU1e6DShO+gavG9e0Q693nKo= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4= github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= @@ -213,10 +387,14 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/lyft/protoc-gen-star/v2 v2.0.4-0.20230330145011-496ad1ac90a4 h1:sIXJOMrYnQZJu7OB7ANSF4MYri2fTEGIsRLz6LwI4xE= +github.com/lyft/protoc-gen-star/v2 v2.0.4-0.20230330145011-496ad1ac90a4/go.mod h1:amey7yeodaJhXSbf/TlLvWiqQfLOSpEk//mLlc+axEk= github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE= +github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= @@ -230,23 +408,32 @@ github.com/mitchellh/go-testing-interface v1.14.1 h1:jrgshOhYAUVNMAJiKbEu7EqAwgJ github.com/mitchellh/go-testing-interface v1.14.1/go.mod h1:gfgS7OtZj6MA4U1UrDRp04twqAjfvlZyCfX3sDjEym8= github.com/mitchellh/go-wordwrap v1.0.0 h1:6GlHJ/LTGMrIJbwgdqdl2eEH8o+Exx/0m8ir9Gns0u4= github.com/mitchellh/go-wordwrap v1.0.0/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= +github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0= +github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0= github.com/mitchellh/hashstructure v1.1.0 h1:P6P1hdjqAAknpY/M1CGipelZgp+4y9ja9kmUZPXP+H0= github.com/mitchellh/hashstructure v1.1.0/go.mod h1:xUDAozZz0Wmdiufv0uyhnHkUTN6/6d8ulp4AwfLKrmA= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= +github.com/modocache/gover v0.0.0-20171022184752-b58185e213c5 h1:8Q0qkMVC/MmWkpIdlvZgcv2o2jrlF6zqVOh7W5YHdMA= +github.com/modocache/gover v0.0.0-20171022184752-b58185e213c5/go.mod h1:caMODM3PzxT8aQXRPkAt8xlV/e7d7w8GM5g0fa5F0D8= github.com/oklog/run v1.0.0 h1:Ru7dDtJNOyC66gQ5dQmaCa0qIsAUFY3sFpK1Xk8igrw= github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= +github.com/oklog/run v1.1.0 h1:GEenZ1cK0+q0+wsJew9qUg/DyD8k3JzYsZAi5gYi2mA= +github.com/oklog/run v1.1.0/go.mod h1:sVPdnTZT1zYwAJeCMu2Th4T21pA3FPOQRfWjQlk7DVU= github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= -github.com/pjbgf/sha1cd v0.3.2 h1:a9wb0bp1oC2TGwStyn0Umc/IGKQnEgF0vVaZ8QF8eo4= -github.com/pjbgf/sha1cd v0.3.2/go.mod h1:zQWigSxVmsHEZow5qaLtPYxpcKMMQpa09ixqBxuCS6A= +github.com/pjbgf/sha1cd v0.3.0 h1:4D5XXmUUBUl/xQ6IjCkEAbqXskkq/4O7LmGn0AqMDs4= +github.com/pjbgf/sha1cd v0.3.0/go.mod h1:nZ1rrWOcGJ5uZgEEVL1VUM9iRQiZvWdbZjkKyFzPPsI= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 h1:GFCKgmp0tecUJ0sJuv4pzYCqS9+RGSn52M3FUwPs+uo= github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1:t/avpk3KcrXxUnYOhZhMXJlSEyie6gQbtLq5NM3loB8= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8= @@ -254,15 +441,27 @@ github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE= github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= -github.com/skeema/knownhosts v1.3.1 h1:X2osQ+RAjK76shCbvhHHHVl3ZlgDm8apHEHFqRjnBY8= -github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQNxPwTcfiY= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/skeema/knownhosts v1.3.0 h1:AM+y0rI04VksttfwjkSTNQorvGqmwATnvnAHpSgc0LY= +github.com/skeema/knownhosts v1.3.0/go.mod h1:sPINvnADmT/qYH1kfv+ePMmOBTH6Tbl7b5LvTDjFK7M= +github.com/spf13/afero v1.10.0 h1:EaGW2JJh15aKOejeuJ+wpFSHnbd7GE6Wvp3TsNhb6LY= +github.com/spf13/afero v1.10.0/go.mod h1:UBogFpq8E9Hx+xc5CNTTEpTnuHVmXDwZcZcE1eb/UhQ= github.com/spiffe/go-spiffe/v2 v2.5.0 h1:N2I01KCUkv1FAjZXJMwh95KK1ZIQLYbPfhaxw8WS0hE= github.com/spiffe/go-spiffe/v2 v2.5.0/go.mod h1:P+NxobPc6wXhVtINNtFjNWGBTreew1GBUCwT2wPmb7g= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= @@ -274,7 +473,9 @@ github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAh github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw= +github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= github.com/zclconf/go-cty v1.16.2 h1:LAJSwc3v81IRBZyUVQDUdZ7hs3SYs9jv0eZJDWHD/70= @@ -283,153 +484,450 @@ github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940 h1:4r45xpDWB6 github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940/go.mod h1:CmBdvvj3nqzfzJ6nTCIwDTPZ56aVGvDrmztiO5g3qrM= github.com/zeebo/errs v1.4.0 h1:XNdoD/RRMKP7HD0UhJnIzUy74ISdGGxURlYG8HSWSfM= github.com/zeebo/errs v1.4.0/go.mod h1:sgbWHsvVuTPHcqJJGQ1WhI5KbWlHYz+2+2C/LSEtCw4= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= +go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= +go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= -go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0 h1:q4XOmH/0opmeuJtPsbFNivyl7bCt7yRBbeEm2sC/XtQ= -go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0/go.mod h1:snMWehoOh2wsEwnvvwtDyFCxVeDAODenXHtn5vzrKjo= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 h1:F7Jx+6hwnZ41NSFTO5q4LYDtJRXBf2PD0rNBkeB/lus= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0/go.mod h1:UHB22Z8QsdRDrnAtX4PntOl36ajSxcdUMt1sF7Y6E7Q= -go.opentelemetry.io/otel v1.36.0 h1:UumtzIklRBY6cI/lllNZlALOF5nNIzJVb16APdvgTXg= -go.opentelemetry.io/otel v1.36.0/go.mod h1:/TcFMXYjyRNh8khOAO9ybYkqaDBb/70aVwkNML4pP8E= -go.opentelemetry.io/otel/metric v1.36.0 h1:MoWPKVhQvJ+eeXWHFBOPoBOi20jh6Iq2CcCREuTYufE= -go.opentelemetry.io/otel/metric v1.36.0/go.mod h1:zC7Ks+yeyJt4xig9DEw9kuUFe5C3zLbVjV2PzT6qzbs= -go.opentelemetry.io/otel/sdk v1.36.0 h1:b6SYIuLRs88ztox4EyrvRti80uXIFy+Sqzoh9kFULbs= -go.opentelemetry.io/otel/sdk v1.36.0/go.mod h1:+lC+mTgD+MUWfjJubi2vvXWcVxyr9rmlshZni72pXeY= -go.opentelemetry.io/otel/sdk/metric v1.36.0 h1:r0ntwwGosWGaa0CrSt8cuNuTcccMXERFwHX4dThiPis= -go.opentelemetry.io/otel/sdk/metric v1.36.0/go.mod h1:qTNOhFDfKRwX0yXOqJYegL5WRaW376QbB7P4Pb0qva4= -go.opentelemetry.io/otel/trace v1.36.0 h1:ahxWNuqZjpdiFAyrIoQ4GIiAIhxAunQR6MUoKrsNd4w= -go.opentelemetry.io/otel/trace v1.36.0/go.mod h1:gQ+OnDZzrybY4k4seLzPAWNwVBBVlF2szhehOBB/tGA= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.59.0 h1:rgMkmiGfix9vFJDcDi1PK8WEQP4FLQwLDfhp5ZLpFeE= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.59.0/go.mod h1:ijPqXp5P6IRRByFVVg9DY8P5HkxkHE5ARIa+86aXPf4= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.60.0 h1:x7wzEgXfnzJcHDwStJT+mxOz4etr2EcexjqhBvmoakw= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.60.0/go.mod h1:rg+RlpR5dKwaS95IyyZqj5Wd4E13lk/msnTS0Xl9lJM= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.59.0 h1:CV7UdSGJt/Ao6Gp4CXckLxVRRsRgDHoI8XjbL3PDl8s= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.59.0/go.mod h1:FRmFuRJfag1IZ2dPkHnEoSFVgTVPUd2qf5Vi69hLb8I= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0 h1:sbiXRNDSWJOTobXh5HyQKjq6wUC5tNybqjIqDpAY4CU= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0/go.mod h1:69uWxva0WgAA/4bu2Yy70SLDBwZXuQ6PbBpbsa5iZrQ= +go.opentelemetry.io/otel v1.34.0 h1:zRLXxLCgL1WyKsPVrgbSdMN4c0FMkDAskSTQP+0hdUY= +go.opentelemetry.io/otel v1.34.0/go.mod h1:OWFPOQ+h4G8xpyjgqo4SxJYdDQ/qmRH+wivy7zzx9oI= +go.opentelemetry.io/otel v1.35.0 h1:xKWKPxrxB6OtMCbmMY021CqC45J+3Onta9MqjhnusiQ= +go.opentelemetry.io/otel v1.35.0/go.mod h1:UEqy8Zp11hpkUrL73gSlELM0DupHoiq72dR+Zqel/+Y= +go.opentelemetry.io/otel/metric v1.34.0 h1:+eTR3U0MyfWjRDhmFMxe2SsW64QrZ84AOhvqS7Y+PoQ= +go.opentelemetry.io/otel/metric v1.34.0/go.mod h1:CEDrp0fy2D0MvkXE+dPV7cMi8tWZwX3dmaIhwPOaqHE= +go.opentelemetry.io/otel/metric v1.35.0 h1:0znxYu2SNyuMSQT4Y9WDWej0VpcsxkuklLa4/siN90M= +go.opentelemetry.io/otel/metric v1.35.0/go.mod h1:nKVFgxBZ2fReX6IlyW28MgZojkoAkJGaE8CpgeAU3oE= +go.opentelemetry.io/otel/sdk v1.34.0 h1:95zS4k/2GOy069d321O8jWgYsW3MzVV+KuSPKp7Wr1A= +go.opentelemetry.io/otel/sdk v1.34.0/go.mod h1:0e/pNiaMAqaykJGKbi+tSjWfNNHMTxoC9qANsCzbyxU= +go.opentelemetry.io/otel/sdk v1.35.0 h1:iPctf8iprVySXSKJffSS79eOjl9pvxV9ZqOWT0QejKY= +go.opentelemetry.io/otel/sdk v1.35.0/go.mod h1:+ga1bZliga3DxJ3CQGg3updiaAJoNECOgJREo9KHGQg= +go.opentelemetry.io/otel/sdk/metric v1.34.0 h1:5CeK9ujjbFVL5c1PhLuStg1wxA7vQv7ce1EK0Gyvahk= +go.opentelemetry.io/otel/sdk/metric v1.34.0/go.mod h1:jQ/r8Ze28zRKoNRdkjCZxfs6YvBTG1+YIqyFVFYec5w= +go.opentelemetry.io/otel/sdk/metric v1.35.0 h1:1RriWBmCKgkeHEhM7a2uMjMUfP7MsOF5JpUCaEqEI9o= +go.opentelemetry.io/otel/sdk/metric v1.35.0/go.mod h1:is6XYCUMpcKi+ZsOvfluY5YstFnhW0BidkR+gL+qN+w= +go.opentelemetry.io/otel/trace v1.34.0 h1:+ouXS2V8Rd4hp4580a8q23bg0azF2nI8cqLYnC8mh/k= +go.opentelemetry.io/otel/trace v1.34.0/go.mod h1:Svm7lSjQD7kG7KJ/MUHPVXSDGz2OX4h0M2jHBhmSfRE= +go.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt/xgMs= +go.opentelemetry.io/otel/trace v1.35.0/go.mod h1:WUk7DtFp1Aw2MkvqGdwiXYDZZNvA/1J8o6xRXLrIkyc= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/goleak v1.1.10/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= +go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.18.1/go.mod h1:xg/QME4nWcxGxrpdeYfq7UvYrLh66cuVKdrbD1XF/NI= go4.org/netipx v0.0.0-20231129151722-fdeea329fbba h1:0b9z3AuHCjxk0x/opv64kcgZLBseWJUpBw5I82+2U4M= go4.org/netipx v0.0.0-20231129151722-fdeea329fbba/go.mod h1:PLyyIXexvUFg3Owu6p/WfdlivPbZJsZdgWZlrGope/Y= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4= -golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc= +golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34= +golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc= +golang.org/x/crypto v0.37.0 h1:kJNSjF/Xp7kU0iB2Z+9viTPMW4EqqsrywMXLJOOsXSE= +golang.org/x/crypto v0.37.0/go.mod h1:vg+k43peMZ0pUMhYmVAWysMK35e6ioLh3wB8ZCAfbVc= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 h1:2dVuKD2vS7b0QIHQbpyTISPd0LeHDbnYEryqj5Q1ug8= -golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56/go.mod h1:M4RDyNAINzryxdtnbRXRL/OHtkFuWGRjvuhBJpk2IlY= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= +golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 h1:ESSUROHIBHg7USnszlcdmjBEwdMj9VUvU+OPk4yl2mc= +golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8/go.mod h1:/lliqkxwWAhPjf5oSOIJup2XcqJaw8RGS6k3TGEc7GI= +golang.org/x/exp v0.0.0-20250408133849-7e4ce0ab07d0 h1:R84qjqJb5nVJMxqWYb3np9L5ZsaDtB+a39EqjV0JSUM= +golang.org/x/exp v0.0.0-20250408133849-7e4ce0ab07d0/go.mod h1:S9Xr4PYopiDyqSyp5NjCrhFrqg6A5zA2E/iPHPhqnS8= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= +golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/mod v0.26.0 h1:EGMPT//Ezu+ylkCijjPc+f4Aih7sZvaAr+O3EHBxvZg= -golang.org/x/mod v0.26.0/go.mod h1:/j6NAhSk8iQ723BGAUyoAcn7SlD7s15Dp9Nd/SfeaFQ= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/mod v0.24.0 h1:ZfthKaKaT4NrhGVZHO1/WDTwGES4De8KtWO0SIbNJMU= +golang.org/x/mod v0.24.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE= -golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg= +golang.org/x/net v0.37.0 h1:1zLorHbz+LYj7MQlSf1+2tPIIgibq2eL5xkrGk6f+2c= +golang.org/x/net v0.37.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8= +golang.org/x/net v0.39.0 h1:ZCu7HMWDxpXpaiKdhzIfaltL9Lp31x/3fCP11bc6/fY= +golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI= -golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.28.0 h1:CrgCKl8PPAVtLnU3c+EDw6x11699EWlsDeWNWKdIOkc= +golang.org/x/oauth2 v0.28.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8= +golang.org/x/oauth2 v0.29.0 h1:WdYw2tdTK1S8olAzWHdgeqfy+Mtm9XNhv/xJsY65d98= +golang.org/x/oauth2 v0.29.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw= -golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sync v0.12.0 h1:MHc5BpPuC30uJk597Ri8TV3CNZcTLu6B6z4lJy+g6Jw= +golang.org/x/sync v0.12.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sync v0.13.0 h1:AauUjRAJ9OSnvULf/ARrrVywoJDy0YS2AwQ98I37610= +golang.org/x/sync v0.13.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI= -golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik= +golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20= +golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.34.0 h1:O/2T7POpk0ZZ7MAzMeWFSg6S5IpWd/RXDlM9hgM3DR4= -golang.org/x/term v0.34.0/go.mod h1:5jC53AEywhIVebHgPVeg0mj8OD3VO9OzclacVrqpaAw= +golang.org/x/term v0.30.0 h1:PQ39fJZ+mfadBm0y5WlL4vlM7Sx1Hgf13sMIY2+QS9Y= +golang.org/x/term v0.30.0/go.mod h1:NYYFdzHoI5wRh/h5tDMdMqCqPJZEuNqVR5xJLd/n67g= +golang.org/x/term v0.31.0 h1:erwDkOK1Msy6offm1mOgvspSkslFnIGsFnxOKoufg3o= +golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= -golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng= -golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU= -golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE= -golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= +golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY= +golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4= +golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0= +golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.11.0 h1:/bpjEDfN9tkoN/ryeYHnv5hcMlc8ncjMcM4XBk5NWV0= +golang.org/x/time v0.11.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= +golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/tools v0.35.0 h1:mBffYraMEf7aa0sB+NuKnuCy8qI/9Bughn8dC2Gu5r0= -golang.org/x/tools v0.35.0/go.mod h1:NKdj5HkL/73byiZSJjqJgKn3ep7KjFkBOkR/Hps3VPw= +golang.org/x/tools v0.22.0 h1:gqSGLZqv+AI9lIQzniJ0nZDRG5GBPsSi+DRNHWNz6yA= +golang.org/x/tools v0.22.0/go.mod h1:aCwcsjqvq7Yqt6TNyX7QMU2enbQ/Gt0bo6krSeEri+c= +golang.org/x/tools v0.32.0 h1:Q7N1vhpkQv7ybVzLFtTjvQya2ewbwNDZzUgfXGqtMWU= +golang.org/x/tools v0.32.0/go.mod h1:ZxrU41P/wAbZD8EDa6dDCa6XfpkhJ7HFMjHJXfBDu8s= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/api v0.248.0 h1:hUotakSkcwGdYUqzCRc5yGYsg4wXxpkKlW5ryVqvC1Y= -google.golang.org/api v0.248.0/go.mod h1:yAFUAF56Li7IuIQbTFoLwXTCI6XCFKueOlS7S9e4F9k= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= +google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= +google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= +google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= +google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= +google.golang.org/api v0.226.0 h1:9A29y1XUD+YRXfnHkO66KggxHBZWg9LsTGqm7TkUvtQ= +google.golang.org/api v0.226.0/go.mod h1:WP/0Xm4LVvMOCldfvOISnWquSRWbG2kArDZcg+W2DbY= +google.golang.org/api v0.229.0 h1:p98ymMtqeJ5i3lIBMj5MpR9kzIIgzpHHh8vQ+vgAzx8= +google.golang.org/api v0.229.0/go.mod h1:wyDfmq5g1wYJWn29O22FDWN48P7Xcz0xz+LBpptYvB0= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= +google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200423170343-7949de9c1215/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20250603155806-513f23925822 h1:rHWScKit0gvAPuOnu87KpaYtjK5zBMLcULh7gxkCXu4= -google.golang.org/genproto v0.0.0-20250603155806-513f23925822/go.mod h1:HubltRL7rMh0LfnQPkMH4NPDFEWp0jw3vixw7jEM53s= -google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 h1:oWVWY3NzT7KJppx2UKhKmzPq4SRe0LdCijVRwvGeikY= -google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822/go.mod h1:h3c4v36UTKzUiuaOKQ6gr3S+0hovBtUrXzTG/i3+XEc= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250818200422-3122310a409c h1:qXWI/sQtv5UKboZ/zUk7h+mrf/lXORyI+n9DKDAusdg= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250818200422-3122310a409c/go.mod h1:gw1tLEfykwDz2ET4a12jcXt4couGAm7IwsVaTy0Sflo= +google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= +google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20241104194629-dd2ea8efbc28 h1:KJjNNclfpIkVqrZlTWcgOOaVQ00LdBnoEaRfkUx760s= +google.golang.org/genproto v0.0.0-20241104194629-dd2ea8efbc28/go.mod h1:mt9/MofW7AWQ+Gy179ChOnvmJatV8YHUmrcedo9CIFI= +google.golang.org/genproto v0.0.0-20250303144028-a0af3efb3deb h1:ITgPrl429bc6+2ZraNSzMDk3I95nmQln2fuPstKwFDE= +google.golang.org/genproto v0.0.0-20250303144028-a0af3efb3deb/go.mod h1:sAo5UzpjUwgFBCzupwhcLcxHVDK7vG5IqI30YnwX2eE= +google.golang.org/genproto v0.0.0-20250414145226-207652e42e2e h1:mYHFv3iX85YMwhGSaZS4xpkM8WQDmJUovz7yqsFrwDk= +google.golang.org/genproto v0.0.0-20250414145226-207652e42e2e/go.mod h1:TQT1YpH/rlDCS5+EuFaqPIMqDfuNMFR1OI8EcZJGgAk= +google.golang.org/genproto/googleapis/api v0.0.0-20250106144421-5f5ef82da422 h1:GVIKPyP/kLIyVOgOnTwFOrvQaQUzOzGMCxgFUOEmm24= +google.golang.org/genproto/googleapis/api v0.0.0-20250106144421-5f5ef82da422/go.mod h1:b6h1vNKhxaSoEI+5jc3PJUCustfli/mRab7295pY7rw= +google.golang.org/genproto/googleapis/api v0.0.0-20250414145226-207652e42e2e h1:UdXH7Kzbj+Vzastr5nVfccbmFsmYNygVLSPk1pEfDoY= +google.golang.org/genproto/googleapis/api v0.0.0-20250414145226-207652e42e2e/go.mod h1:085qFyf2+XaZlRdCgKNCIZ3afY2p4HHZdoIRpId8F4A= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250303144028-a0af3efb3deb h1:TLPQVbx1GJ8VKZxz52VAxl1EBgKXXbTiU9Fc5fZeLn4= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250303144028-a0af3efb3deb/go.mod h1:LuRYeWDFV6WOn90g357N17oMCaxpgCnbi/44qJvDn2I= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250414145226-207652e42e2e h1:ztQaXfzEXTmCBvbtWYRhJxW+0iJcz2qXfd38/e9l7bA= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250414145226-207652e42e2e/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= -google.golang.org/grpc v1.74.2 h1:WoosgB65DlWVC9FqI82dGsZhWFNBSLjQ84bjROOpMu4= -google.golang.org/grpc v1.74.2/go.mod h1:CtQ+BGjaAIXHs/5YS3i473GqwBBa1zGQNevxdeBEXrM= +google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= +google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= +google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.71.0 h1:kF77BGdPTQ4/JZWMlb9VpJ5pa25aqvVqogsxNHHdeBg= +google.golang.org/grpc v1.71.0/go.mod h1:H0GRtasmQOh9LkFoCPDu3ZrwUtD1YGE+b2vYBYd/8Ec= +google.golang.org/grpc v1.71.1 h1:ffsFWr7ygTUscGPI0KKK6TLrGz0476KUvvsbqWK0rPI= +google.golang.org/grpc v1.71.1/go.mod h1:H0GRtasmQOh9LkFoCPDu3ZrwUtD1YGE+b2vYBYd/8Ec= +google.golang.org/grpc v1.72.0 h1:S7UkcVa60b5AAQTaO6ZKamFp1zMZSU0fGDK2WZLbBnM= +google.golang.org/grpc v1.72.0/go.mod h1:wH5Aktxcg25y1I3w7H69nHfXdOG3UiadoBtjh3izSDM= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.36.7 h1:IgrO7UwFQGJdRNXH/sQux4R1Dj1WAKcLElzeeRaXV2A= -google.golang.org/protobuf v1.36.7/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= +google.golang.org/protobuf v1.36.5 h1:tPhr+woSbjfYvY6/GPufUoYizxw1cF/yFoxJ2fmpwlM= +google.golang.org/protobuf v1.36.5/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= +google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= +google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME= gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= rsc.io/binaryregexp v0.2.0 h1:HfqmD5MEmC0zvwBuF187nq9mdnXjXsSivRiXN7SmRkE= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.83.0 h1:pvSYcI7HKOtqHTr4E9cRqVbgnh0+qnJZCrnmozltFVg= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.83.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= +rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= +rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/mmv1/third_party/terraform/provider/provider.go.tmpl b/mmv1/third_party/terraform/provider/provider.go.tmpl index 4ea3c6a9a0f9..67f405ee835d 100644 --- a/mmv1/third_party/terraform/provider/provider.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider.go.tmpl @@ -197,11 +197,6 @@ func Provider() *schema.Provider { // dcl transport_tpg.ContainerAwsCustomEndpointEntryKey: transport_tpg.ContainerAwsCustomEndpointEntry, transport_tpg.ContainerAzureCustomEndpointEntryKey: transport_tpg.ContainerAzureCustomEndpointEntry, - transport_tpg.ApikeysEndpointEntryKey: transport_tpg.ApikeysEndpointEntry, - transport_tpg.AssuredWorkloadsEndpointEntryKey: transport_tpg.AssuredWorkloadsEndpointEntry, - transport_tpg.CloudResourceManagerEndpointEntryKey: transport_tpg.CloudResourceManagerEndpointEntry, - transport_tpg.FirebaserulesEndpointEntryKey: transport_tpg.FirebaserulesEndpointEntry, - transport_tpg.RecaptchaEnterpriseEndpointEntryKey: transport_tpg.RecaptchaEnterpriseEndpointEntry, }, ProviderMetaSchema: map[string]*schema.Schema{ @@ -210,18 +205,20 @@ func Provider() *schema.Provider { Optional: true, }, }, -{{if ne $.Compiler "terraformgoogleconversion-codegen"}} + DataSourcesMap: DatasourceMap(), -{{- end }} ResourcesMap: ResourceMap(), } provider.ConfigureContextFunc = func(ctx context.Context, d *schema.ResourceData) (interface{}, diag.Diagnostics) { return ProviderConfigure(ctx, d, provider) } + + transport_tpg.ConfigureDCLProvider(provider) + return provider } -{{ if ne $.Compiler "terraformgoogleconversion-codegen"}} + func DatasourceMap() map[string]*schema.Resource { datasourceMap, _ := DatasourceMapWithErrors() return datasourceMap @@ -234,7 +231,6 @@ func DatasourceMapWithErrors() (map[string]*schema.Resource, error) { handwrittenIAMDatasources, ) } -{{- end }} func ResourceMap() map[string]*schema.Resource { resourceMap, _ := ResourceMapWithErrors() @@ -242,19 +238,12 @@ func ResourceMap() map[string]*schema.Resource { } func ResourceMapWithErrors() (map[string]*schema.Resource, error) { -{{- if ne $.Compiler "terraformgoogleconversion-codegen"}} return mergeResourceMaps( generatedResources, handwrittenResources, handwrittenIAMResources, dclResources, ) -{{- else }} - return mergeResourceMaps( - handwrittenTfplan2caiResources, - generatedResources, - ) -{{- end }} } func ProviderConfigure(ctx context.Context, d *schema.ResourceData, p *schema.Provider) (interface{}, diag.Diagnostics) { @@ -331,6 +320,9 @@ func ProviderConfigure(ctx context.Context, d *schema.ResourceData, p *schema.Pr config.UniverseDomain = v.(string) } + // Configure DCL basePath + transport_tpg.ProviderDCLConfigure(d, &config) + // Replace hostname by the universe_domain field. if config.UniverseDomain != "" && config.UniverseDomain != "googleapis.com" { for key, basePath := range transport_tpg.DefaultBasePaths { @@ -342,6 +334,7 @@ func ProviderConfigure(ctx context.Context, d *schema.ResourceData, p *schema.Pr if err != nil { return nil, diag.FromErr(err) } + transport_tpg.HandleDCLCustomEndpointDefaults(d) // Given that impersonate_service_account is a secondary auth method, it has // no conflicts to worry about. We pull the env var in a DefaultFunc. @@ -414,11 +407,6 @@ func ProviderConfigure(ctx context.Context, d *schema.ResourceData, p *schema.Pr // dcl config.ContainerAwsBasePath = d.Get(transport_tpg.ContainerAwsCustomEndpointEntryKey).(string) config.ContainerAzureBasePath = d.Get(transport_tpg.ContainerAzureCustomEndpointEntryKey).(string) - config.ApikeysBasePath = d.Get(transport_tpg.ApikeysEndpointEntryKey).(string) - config.AssuredWorkloadsBasePath = d.Get(transport_tpg.AssuredWorkloadsEndpointEntryKey).(string) - config.CloudResourceManagerBasePath = d.Get(transport_tpg.CloudResourceManagerEndpointEntryKey).(string) - config.FirebaserulesBasePath = d.Get(transport_tpg.FirebaserulesEndpointEntryKey).(string) - config.RecaptchaEnterpriseBasePath = d.Get(transport_tpg.RecaptchaEnterpriseEndpointEntryKey).(string) stopCtx, ok := schema.StopContext(ctx) if !ok { @@ -427,6 +415,7 @@ func ProviderConfigure(ctx context.Context, d *schema.ResourceData, p *schema.Pr if err := config.LoadAndValidate(stopCtx); err != nil { return nil, diag.FromErr(err) } + // Verify that universe domains match between credentials and configuration if v, ok := d.GetOk("universe_domain"); ok { if config.UniverseDomain == "" && v.(string) != "googleapis.com" { // v can't be "", as it wouldn't pass `ok` above diff --git a/mmv1/third_party/terraform/provider/provider_billing_project_test.go b/mmv1/third_party/terraform/provider/provider_billing_project_test.go index e9f36c187931..452ddbfa2b0b 100644 --- a/mmv1/third_party/terraform/provider/provider_billing_project_test.go +++ b/mmv1/third_party/terraform/provider/provider_billing_project_test.go @@ -265,6 +265,8 @@ resource "google_project" "project" { resource "google_project_service" "serviceusage" { project = google_project.project.project_id service = "serviceusage.googleapis.com" + + disable_on_destroy = false # Need it enabled in the project when the test disables services in post-test cleanup } `, context) } @@ -308,6 +310,7 @@ resource "google_project_service" "pubsub" { resource "google_project_service" "cloudresourcemanager" { project = google_project.project.project_id service = "cloudresourcemanager.googleapis.com" + disable_on_destroy = false # Need it enabled in the project when the test deletes the project resource in post-test cleanup } `, context) } diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index a9a0a2f2d81f..c6407beb19cb 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -27,15 +27,8 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_alloydb_supported_database_flags": alloydb.DataSourceAlloydbSupportedDatabaseFlags(), "google_alloydb_instance": alloydb.DataSourceAlloydbDatabaseInstance(), "google_artifact_registry_docker_image": artifactregistry.DataSourceArtifactRegistryDockerImage(), - "google_artifact_registry_docker_images": artifactregistry.DataSourceArtifactRegistryDockerImages(), - "google_artifact_registry_locations": artifactregistry.DataSourceGoogleArtifactRegistryLocations(), - "google_artifact_registry_npm_package": artifactregistry.DataSourceArtifactRegistryNpmPackage(), - "google_artifact_registry_package": artifactregistry.DataSourceArtifactRegistryPackage(), - "google_artifact_registry_repositories": artifactregistry.DataSourceArtifactRegistryRepositories(), + "google_artifact_registry_locations": artifactregistry.DataSourceGoogleArtifactRegistryLocations(), "google_artifact_registry_repository": artifactregistry.DataSourceArtifactRegistryRepository(), - "google_artifact_registry_tag": artifactregistry.DataSourceArtifactRegistryTag(), - "google_artifact_registry_tags": artifactregistry.DataSourceArtifactRegistryTags(), - "google_artifact_registry_version": artifactregistry.DataSourceArtifactRegistryVersion(), "google_apphub_discovered_workload": apphub.DataSourceApphubDiscoveredWorkload(), "google_app_engine_default_service_account": appengine.DataSourceGoogleAppEngineDefaultServiceAccount(), "google_apphub_application": apphub.DataSourceGoogleApphubApplication(), @@ -49,16 +42,12 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_beyondcorp_app_connection": beyondcorp.DataSourceGoogleBeyondcorpAppConnection(), "google_beyondcorp_app_connector": beyondcorp.DataSourceGoogleBeyondcorpAppConnector(), "google_beyondcorp_app_gateway": beyondcorp.DataSourceGoogleBeyondcorpAppGateway(), - "google_beyondcorp_security_gateway": beyondcorp.DataSourceGoogleBeyondcorpSecurityGateway(), "google_billing_account": billing.DataSourceGoogleBillingAccount(), - "google_bigquery_table": bigquery.DataSourceGoogleBigQueryTable(), "google_bigquery_tables": bigquery.DataSourceGoogleBigQueryTables(), "google_bigquery_dataset": bigquery.DataSourceGoogleBigqueryDataset(), - "google_bigquery_datasets": bigquery.DataSourceGoogleBigqueryDatasets(), "google_bigquery_default_service_account": bigquery.DataSourceGoogleBigqueryDefaultServiceAccount(), "google_certificate_manager_certificates": certificatemanager.DataSourceGoogleCertificateManagerCertificates(), "google_certificate_manager_certificate_map": certificatemanager.DataSourceGoogleCertificateManagerCertificateMap(), - "google_certificate_manager_dns_authorization": certificatemanager.DataSourceGoogleCertificateManagerDnsAuthorization(), "google_cloudbuild_trigger": cloudbuild.DataSourceGoogleCloudBuildTrigger(), "google_cloudfunctions_function": cloudfunctions.DataSourceGoogleCloudFunctionsFunction(), "google_cloudfunctions2_function": cloudfunctions2.DataSourceGoogleCloudFunctions2Function(), @@ -73,10 +62,9 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_cloud_quotas_quota_info": cloudquotas.DataSourceGoogleCloudQuotasQuotaInfo(), "google_cloud_quotas_quota_infos": cloudquotas.DataSourceGoogleCloudQuotasQuotaInfos(), "google_cloud_run_locations": cloudrun.DataSourceGoogleCloudRunLocations(), - "google_cloud_run_service": cloudrun.DataSourceCloudRunService(), + "google_cloud_run_service": cloudrun.DataSourceGoogleCloudRunService(), "google_cloud_run_v2_job": cloudrunv2.DataSourceGoogleCloudRunV2Job(), "google_cloud_run_v2_service": cloudrunv2.DataSourceGoogleCloudRunV2Service(), - "google_cloud_run_v2_worker_pool": cloudrunv2.DataSourceGoogleCloudRunV2WorkerPool(), "google_composer_environment": composer.DataSourceGoogleComposerEnvironment(), "google_composer_user_workloads_config_map": composer.DataSourceGoogleComposerUserWorkloadsConfigMap(), "google_composer_user_workloads_secret": composer.DataSourceGoogleComposerUserWorkloadsSecret(), @@ -101,13 +89,12 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_compute_instance_serial_port": compute.DataSourceGoogleComputeInstanceSerialPort(), "google_compute_instance_template": compute.DataSourceGoogleComputeInstanceTemplate(), "google_compute_instance_guest_attributes": compute.DataSourceGoogleComputeInstanceGuestAttributes(), - "google_compute_interconnect_location": compute.DataSourceGoogleComputeInterconnectLocation(), - "google_compute_interconnect_locations": compute.DataSourceGoogleComputeInterconnectLocations(), + "google_compute_interconnect_location": compute.DataSourceGoogleComputeInterconnectLocation(), + "google_compute_interconnect_locations": compute.DataSourceGoogleComputeInterconnectLocations(), "google_compute_lb_ip_ranges": compute.DataSourceGoogleComputeLbIpRanges(), "google_compute_machine_types": compute.DataSourceGoogleComputeMachineTypes(), "google_compute_network": compute.DataSourceGoogleComputeNetwork(), "google_compute_networks": compute.DataSourceGoogleComputeNetworks(), - "google_compute_network_attachment": compute.DataSourceGoogleComputeNetworkAttachment(), "google_compute_network_endpoint_group": compute.DataSourceGoogleComputeNetworkEndpointGroup(), "google_compute_network_peering": compute.DataSourceComputeNetworkPeering(), "google_compute_node_types": compute.DataSourceGoogleComputeNodeTypes(), @@ -141,14 +128,12 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_container_engine_versions": container.DataSourceGoogleContainerEngineVersions(), "google_container_registry_image": containeranalysis.DataSourceGoogleContainerImage(), "google_container_registry_repository": containeranalysis.DataSourceGoogleContainerRepo(), - "google_dataplex_data_quality_rules": dataplex.DataSourceDataplexDataQualityRules(), "google_dataproc_metastore_service": dataprocmetastore.DataSourceDataprocMetastoreService(), "google_datastream_static_ips": datastream.DataSourceGoogleDatastreamStaticIps(), "google_dns_keys": dns.DataSourceDNSKeys(), "google_dns_managed_zone": dns.DataSourceDnsManagedZone(), "google_dns_managed_zones": dns.DataSourceDnsManagedZones(), "google_dns_record_set": dns.DataSourceDnsRecordSet(), - "google_gke_hub_membership": gkehub.DataSourceGoogleGkeHubMembership(), "google_gke_hub_membership_binding": gkehub2.DataSourceGoogleGkeHubMembershipBinding(), "google_gke_hub_feature": gkehub2.DataSourceGoogleGkeHubFeature(), "google_filestore_instance": filestore.DataSourceGoogleFilestoreInstance(), @@ -157,7 +142,7 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_iam_testable_permissions": resourcemanager.DataSourceGoogleIamTestablePermissions(), "google_iam_workload_identity_pool": iambeta.DataSourceIAMBetaWorkloadIdentityPool(), "google_iam_workload_identity_pool_provider": iambeta.DataSourceIAMBetaWorkloadIdentityPoolProvider(), - "google_iap_client": iap.DataSourceIapClient(), + "google_iap_client": iap.DataSourceGoogleIapClient(), "google_kms_crypto_key": kms.DataSourceGoogleKmsCryptoKey(), "google_kms_crypto_keys": kms.DataSourceGoogleKmsCryptoKeys(), "google_kms_crypto_key_version": kms.DataSourceGoogleKmsCryptoKeyVersion(), @@ -165,9 +150,11 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_kms_crypto_key_versions": kms.DataSourceGoogleKmsCryptoKeyVersions(), "google_kms_key_ring": kms.DataSourceGoogleKmsKeyRing(), "google_kms_key_rings": kms.DataSourceGoogleKmsKeyRings(), + {{- if ne $.TargetVersionName "ga" }} "google_kms_key_handle": kms.DataSourceGoogleKmsKeyHandle(), "google_kms_autokey_config": kms.DataSourceGoogleKmsAutokeyConfig(), "google_kms_key_handles": kms.DataSourceGoogleKmsKeyHandles(), + {{- end }} "google_kms_secret": kms.DataSourceGoogleKmsSecret(), "google_kms_secret_ciphertext": kms.DataSourceGoogleKmsSecretCiphertext(), {{- if ne $.TargetVersionName "ga" }} @@ -177,7 +164,6 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_firebase_hosting_channel": firebasehosting.DataSourceGoogleFirebaseHostingChannel(), "google_firebase_web_app": firebase.DataSourceGoogleFirebaseWebApp(), {{- end }} - "google_firestore_document": firestore.DataSourceGoogleFirestoreDocument(), "google_folder": resourcemanager.DataSourceGoogleFolder(), "google_folders": resourcemanager.DataSourceGoogleFolders(), "google_folder_organization_policy": resourcemanager.DataSourceGoogleFolderOrganizationPolicy(), @@ -186,7 +172,6 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_logging_project_cmek_settings": logging.DataSourceGoogleLoggingProjectCmekSettings(), "google_logging_project_settings": logging.DataSourceGoogleLoggingProjectSettings(), "google_logging_sink": logging.DataSourceGoogleLoggingSink(), - "google_lustre_instance": lustre.DataSourceLustreInstance(), "google_monitoring_notification_channel": monitoring.DataSourceMonitoringNotificationChannel(), "google_monitoring_cluster_istio_service": monitoring.DataSourceMonitoringServiceClusterIstio(), "google_monitoring_istio_canonical_service": monitoring.DataSourceMonitoringIstioCanonicalService(), @@ -261,10 +246,9 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_storage_bucket_object": storage.DataSourceGoogleStorageBucketObject(), "google_storage_bucket_objects": storage.DataSourceGoogleStorageBucketObjects(), "google_storage_bucket_object_content": storage.DataSourceGoogleStorageBucketObjectContent(), - "google_storage_control_folder_intelligence_config": storagecontrol.DataSourceStorageControlFolderIntelligenceConfig(), - "google_storage_control_organization_intelligence_config": storagecontrol.DataSourceStorageControlOrganizationIntelligenceConfig(), - "google_storage_control_project_intelligence_config": storagecontrol.DataSourceStorageControlProjectIntelligenceConfig(), - "google_storage_insights_dataset_config": storageinsights.DataSourceGoogleStorageInsightsDatasetConfig(), + "google_storage_control_folder_intelligence_config": storagecontrol.DataSourceGoogleStorageControlFolderIntelligenceConfig(), + "google_storage_control_organization_intelligence_config": storagecontrol.DataSourceGoogleStorageControlOrganizationIntelligenceConfig(), + "google_storage_control_project_intelligence_config": storagecontrol.DataSourceGoogleStorageControlProjectIntelligenceConfig(), "google_storage_object_signed_url": storage.DataSourceGoogleSignedUrl(), "google_storage_project_service_account": storage.DataSourceGoogleStorageProjectServiceAccount(), "google_storage_transfer_project_service_account": storagetransfer.DataSourceGoogleStorageTransferProjectServiceAccount(), @@ -272,6 +256,7 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_tags_tag_keys": tags.DataSourceGoogleTagsTagKeys(), "google_tags_tag_value": tags.DataSourceGoogleTagsTagValue(), "google_tags_tag_values": tags.DataSourceGoogleTagsTagValues(), + "google_tpu_tensorflow_versions": tpu.DataSourceTpuTensorflowVersions(), {{- if ne $.TargetVersionName "ga" }} "google_tpu_v2_runtime_versions": tpuv2.DataSourceTpuV2RuntimeVersions(), "google_tpu_v2_accelerator_types": tpuv2.DataSourceTpuV2AcceleratorTypes(), @@ -280,7 +265,6 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_memorystore_instance": memorystore.DataSourceMemorystoreInstance(), "google_memcache_instance": memcache.DataSourceMemcacheInstance(), "google_redis_instance": redis.DataSourceGoogleRedisInstance(), - "google_redis_cluster": redis.DataSourceRedisCluster(), "google_vertex_ai_index": vertexai.DataSourceVertexAIIndex(), "google_vmwareengine_cluster": vmwareengine.DataSourceVmwareengineCluster(), "google_vmwareengine_external_access_rule": vmwareengine.DataSourceVmwareengineExternalAccessRule(), @@ -293,7 +277,6 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_vmwareengine_subnet": vmwareengine.DataSourceVmwareengineSubnet(), "google_vmwareengine_vcenter_credentials": vmwareengine.DataSourceVmwareengineVcenterCredentials(), "google_compute_region_backend_service": compute.DataSourceGoogleComputeRegionBackendService(), - "google_network_management_connectivity_test_run": networkmanagement.DataSourceGoogleNetworkManagementTestRun(), // ####### END handwritten datasources ########### } @@ -358,6 +341,7 @@ var handwrittenResources = map[string]*schema.Resource{ "google_apigee_sharedflow_deployment": apigee.ResourceApigeeSharedFlowDeployment(), "google_apigee_flowhook": apigee.ResourceApigeeFlowhook(), "google_apigee_keystores_aliases_pkcs12": apigee.ResourceApigeeKeystoresAliasesPkcs12(), + "google_apigee_keystores_aliases_key_cert_file": apigee.ResourceApigeeKeystoresAliasesKeyCertFile(), "google_bigquery_table": bigquery.ResourceBigQueryTable(), "google_bigtable_gc_policy": bigtable.ResourceBigtableGCPolicy(), "google_bigtable_instance": bigtable.ResourceBigtableInstance(), @@ -448,6 +432,7 @@ var handwrittenResources = map[string]*schema.Resource{ "google_storage_bucket_object": storage.ResourceStorageBucketObject(), "google_storage_object_acl": storage.ResourceStorageObjectAcl(), "google_storage_default_object_acl": storage.ResourceStorageDefaultObjectAcl(), + "google_storage_notification": storage.ResourceStorageNotification(), "google_storage_transfer_job": storagetransfer.ResourceStorageTransferJob(), "google_tags_location_tag_binding": tags.ResourceTagsLocationTagBinding(), // ####### END handwritten resources ########### @@ -523,14 +508,3 @@ var handwrittenIAMResources = map[string]*schema.Resource{ "google_service_account_iam_policy": tpgiamresource.ResourceIamPolicy(resourcemanager.IamServiceAccountSchema, resourcemanager.NewServiceAccountIamUpdater, resourcemanager.ServiceAccountIdParseFunc), // ####### END non-generated IAM resources ########### } - -// UseGeneratedProducts uses every generated product to avoid "imported and not used" errors. -// This allows developers to define a product without any resources, datasources, or other files. -// -// This avoids goimports which takes a very long time, or needing to know if there are any other -// files in a given package when generating this file (which is done in parallel) -func UseGeneratedProducts() { - {{- range $service := $.GetMmv1ServicesInVersion $.Products }} - var _ = {{$service}}.ProductName - {{- end }} -} diff --git a/mmv1/third_party/terraform/provider/provider_validators.go b/mmv1/third_party/terraform/provider/provider_validators.go.tmpl similarity index 99% rename from mmv1/third_party/terraform/provider/provider_validators.go rename to mmv1/third_party/terraform/provider/provider_validators.go.tmpl index 5d1081235716..67012b616821 100644 --- a/mmv1/third_party/terraform/provider/provider_validators.go +++ b/mmv1/third_party/terraform/provider/provider_validators.go.tmpl @@ -85,4 +85,4 @@ func ValidateServiceAccountEmail(v interface{}, k string) (warnings []string, er } return -} +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/provider/universe/universe_domain_compute_test.go b/mmv1/third_party/terraform/provider/universe/universe_domain_compute_test.go index ad223abc2916..ea2703508e3b 100644 --- a/mmv1/third_party/terraform/provider/universe/universe_domain_compute_test.go +++ b/mmv1/third_party/terraform/provider/universe/universe_domain_compute_test.go @@ -33,31 +33,6 @@ func TestAccUniverseDomainDisk(t *testing.T) { }) } -func TestAccUniverseDomainDiskImage(t *testing.T) { - - universeDomain := envvar.GetTestUniverseDomainFromEnv(t) - zone := envvar.GetTestZoneFromEnv() - prefix := envvar.GetUniverseProjectPrefixFromEnv() - image_project := "" - - if prefix != "" { - image_project = prefix + ":debian-cloud" - } else { - image_project = "debian-cloud" - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeDiskDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccUniverseDomain_basic_disk_image(universeDomain, zone, image_project), - }, - }, - }) -} - func TestAccDefaultUniverseDomainDisk(t *testing.T) { universeDomain := "googleapis.com" @@ -110,28 +85,6 @@ resource "google_compute_instance_template" "instance_template" { `, universeDomain) } -func testAccUniverseDomain_basic_disk_image(universeDomain, zone, image_project string) string { - return fmt.Sprintf(` -provider "google" { - universe_domain = "%s" -} - -data "google_compute_images" "debian" { - project = "%s" - filter = "name=debian-12*" -} - -resource "google_compute_disk" "primary" { - name = "async-test-disk" - type = "pd-ssd" - zone = "%s" - - physical_block_size_bytes = 4096 - image = "projects/%s/global/images/${data.google_compute_images.debian.images[0].name}" -} -`, universeDomain, image_project, zone, image_project) -} - func testAccCheckComputeDiskDestroyProducer(t *testing.T) func(s *terraform.State) error { return func(s *terraform.State) error { for name, rs := range s.RootModule().Resources { diff --git a/mmv1/third_party/terraform/provider/universe/universe_domain_storage_test.go b/mmv1/third_party/terraform/provider/universe/universe_domain_storage_test.go index c359f0a7b1af..259fe944968c 100644 --- a/mmv1/third_party/terraform/provider/universe/universe_domain_storage_test.go +++ b/mmv1/third_party/terraform/provider/universe/universe_domain_storage_test.go @@ -18,7 +18,6 @@ func TestAccUniverseDomainStorage(t *testing.T) { universeDomain := envvar.GetTestUniverseDomainFromEnv(t) bucketName := acctest.TestBucketName(t) - region := envvar.GetTestRegionFromEnv() acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -26,13 +25,13 @@ func TestAccUniverseDomainStorage(t *testing.T) { CheckDestroy: testAccStorageBucketDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccUniverseDomain_bucket(universeDomain, bucketName, region), + Config: testAccUniverseDomain_bucket(universeDomain, bucketName), }, }, }) } -func testAccUniverseDomain_bucket(universeDomain string, bucketName string, region string) string { +func testAccUniverseDomain_bucket(universeDomain string, bucketName string) string { return fmt.Sprintf(` provider "google" { universe_domain = "%s" @@ -40,7 +39,7 @@ provider "google" { resource "google_storage_bucket" "foo" { name = "%s" - location = "%s" + location = "US" } data "google_storage_bucket" "bar" { @@ -49,7 +48,7 @@ data "google_storage_bucket" "bar" { google_storage_bucket.foo, ] } -`, universeDomain, bucketName, region) +`, universeDomain, bucketName) } func testAccStorageBucketDestroyProducer(t *testing.T) func(s *terraform.State) error { diff --git a/mmv1/third_party/terraform/scripts/teamcitytestscripts/main.go b/mmv1/third_party/terraform/scripts/teamcitytestscripts/main.go deleted file mode 100644 index 327fca741b2a..000000000000 --- a/mmv1/third_party/terraform/scripts/teamcitytestscripts/main.go +++ /dev/null @@ -1,125 +0,0 @@ -package main - -import ( - "bufio" - "bytes" - "flag" - "fmt" - "io" - "os" - "os/exec" - "strings" - "time" -) - -func usage() string { - return `Usage: - teamcity-diff-test -test [-parallelism n] [-timeout t] - - Test names must be listed one per line on stdin. -` -} - -func main() { - testBinary := flag.String("test", "", "executable containing the tests to run") - parallelism := flag.Int("parallelism", 1, "number of tests to execute in parallel") - timeout := flag.String("timeout", "", "an optional per-test timeout") - flag.Parse() - - if testBinary == nil || *testBinary == "" { - fmt.Fprint(os.Stderr, usage()) - os.Exit(1) - } - - if _, err := os.Stat(*testBinary); err != nil { - fmt.Fprintf(os.Stderr, "Cannot find binary: %s\n", *testBinary) - os.Exit(1) - } - - testNames := make([]string, 0, 0) - stdInReader := bufio.NewReader(os.Stdin) - - for { - line, err := stdInReader.ReadString('\n') - if err != nil { - if err == io.EOF { - if strings.TrimSpace(line) != "" { - testNames = append(testNames, line) - } - break - } - fmt.Fprintf(os.Stderr, "error reading stdin: %s", err) - os.Exit(1) - } - - if strings.TrimSpace(line) != "" { - testNames = append(testNames, line) - } - } - - testQueue := make(chan string) - messages := make(chan string) - completed := make(chan struct{}) - - for i := 0; i < *parallelism; i++ { - go runWorker(testQueue, messages, completed, *testBinary, *timeout) - } - - go func() { - for _, testName := range testNames { - testQueue <- strings.TrimSpace(testName) - } - }() - - resultsCount := 0 - for { - select { - case message := <-messages: - fmt.Printf("%s", message) - case <-completed: - resultsCount++ - } - - if resultsCount == len(testNames) { - break - } - } -} - -func runWorker(inputQueue <-chan string, messages chan<- string, done chan<- struct{}, binaryName, timeout string) { - for { - select { - case testName := <-inputQueue: - test := NewTeamCityTest(testName) - //messages <- fmt.Sprintf("%s", test.FormatStartNotice()) - runTest(test, binaryName, timeout) - messages <- test.FormatTestOutput() - done <- struct{}{} - } - } -} - -func runTest(test *TeamCityTest, binaryName, timeout string) { - var out bytes.Buffer - var errOut bytes.Buffer - - test.Started = time.Now() - - args := []string{ - "-test.v", - "-test.run", - fmt.Sprintf("^%s$", test.Name), - } - if timeout != "" { - args = append(args, "-test.timeout") - args = append(args, timeout) - } - - cmd := exec.Command(binaryName, args...) - cmd.Stdout = &out - cmd.Stderr = &errOut - // Not sure what to do with errors here other than report them out to the runner. - cmd.Run() - - test.ParseTestRunnerOutput(out.String(), errOut.String()) -} diff --git a/mmv1/third_party/terraform/scripts/teamcitytestscripts/teamcity.go b/mmv1/third_party/terraform/scripts/teamcitytestscripts/teamcity.go deleted file mode 100644 index 93675c2e5384..000000000000 --- a/mmv1/third_party/terraform/scripts/teamcitytestscripts/teamcity.go +++ /dev/null @@ -1,125 +0,0 @@ -package main - -import ( - "bytes" - "fmt" - "regexp" - "strings" - "time" -) - -const ( - TeamCityTimestampFormat = "2006-01-02T15:04:05.000" - TeamCityTestStarted = "##teamcity[testStarted timestamp='%s' name='%s']\n" - TeamCityTestFailed = "##teamcity[testFailed timestamp='%s' name='%s']\n" - TeamCityTestFinished = "##teamcity[testFinished timestamp='%s' name='%s']\n" - TeamCityTestFailedRace = "##teamcity[testFailed timestamp='%s' name='%s' message='Race detected!']\n" - TeamCityTestIgnored = "##teamcity[testIgnored timestamp='%s' name='%s']\n" - TeamCityTestFailedPanic = "##teamcity[testFailed timestamp='%s' name='%s' message='Test ended in panic.']\n" - TeamCityTestDiffFailed = "##teamcity[testDiffFailed timestamp='%s' name='%s']\n" - TeamCityTestStdOut = "##teamcity[testStdOut name='%s' out='%s']\n" - TeamCityTestStdErr = "##teamcity[testStdErr name='%s' out='%s']\n" -) - -var ( - // Looks for the final status line, accommodating both simple and full summaries. - end = regexp.MustCompile(`\n(PASS|SKIP|FAIL)(?:[\t\s]+(.*)\s+([0-9\.]+[a-z]+))?\s*$`) - diff = regexp.MustCompile(`\[Diff\] (.*)`) - paniced = regexp.MustCompile(`panic:\s+(.*)\s+\[recovered\]\n`) - //suite = regexp.MustCompile("^(ok|FAIL)\\s+([^\\s]+)\\s+([\\.\\d]+)s") - race = regexp.MustCompile("^WARNING: DATA RACE") -) - -type TeamCityTest struct { - Name, Output, ErrOutput, Duration string - Race, Fail, Skip, Pass, Diff bool - Started time.Time -} - -func NewTeamCityTest(testName string) *TeamCityTest { - return &TeamCityTest{ - Name: testName, - } -} - -func (test *TeamCityTest) ParseTestRunnerOutput(testOutput string, errOutput string) { - hasDataRace := race.MatchString(testOutput) - test.Race = hasDataRace - - resultDiff := diff.FindStringSubmatch(testOutput) - if resultDiff != nil { - test.Diff = true - } else { - resultLines := end.FindStringSubmatch(testOutput) - if resultLines != nil { - switch resultLines[1] { - case "PASS": - test.Pass = true - case "SKIP": - test.Skip = true - case "FAIL": - test.Fail = true - } - test.Duration = resultLines[3] - } - } - test.Output = testOutput - test.ErrOutput = errOutput -} - -func (test *TeamCityTest) FormatTestOutput() string { - now := time.Now().Format(TeamCityTimestampFormat) - - var output bytes.Buffer - - output.WriteString(fmt.Sprintf(TeamCityTestStarted, test.Started.Format(TeamCityTimestampFormat), test.Name)) - - output.WriteString(fmt.Sprintf(TeamCityTestStdOut, test.Name, escapeOutput(test.Output))) - output.WriteString(fmt.Sprintf(TeamCityTestStdErr, test.Name, escapeOutput(test.ErrOutput))) - - if test.Diff { - output.WriteString(fmt.Sprintf(TeamCityTestDiffFailed, now, test.Name)) - // have to fail so that teamcity catches failure correctly - output.WriteString(fmt.Sprintf(TeamCityTestFailedPanic, now, test.Name)) - output.WriteString(fmt.Sprintf(TeamCityTestFinished, now, test.Name)) - return output.String() - - } - - if test.Fail { - // skip failures for diff tests - output.WriteString(fmt.Sprintf(TeamCityTestIgnored, now, test.Name)) - output.WriteString(fmt.Sprintf(TeamCityTestFinished, now, test.Name)) - return output.String() - } - - if test.Race { - output.WriteString(fmt.Sprintf(TeamCityTestFailedRace, now, test.Name)) - output.WriteString(fmt.Sprintf(TeamCityTestFinished, now, test.Name)) - return output.String() - } - - if test.Skip { - output.WriteString(fmt.Sprintf(TeamCityTestIgnored, now, test.Name)) - return output.String() - } - - if test.Pass { - output.WriteString(fmt.Sprintf(TeamCityTestFinished, now, test.Name)) - return output.String() - } - - // instead of failing when something unexpected happens, we skip the test now - output.WriteString(fmt.Sprintf(TeamCityTestIgnored, now, test.Name)) - - return output.String() -} - -func escapeOutput(outputLines string) string { - newOutput := strings.Replace(outputLines, "|", "||", -1) - newOutput = strings.Replace(newOutput, "\n", "|n", -1) - newOutput = strings.Replace(newOutput, "'", "|'", -1) - newOutput = strings.Replace(newOutput, "]", "|]", -1) - newOutput = strings.Replace(newOutput, "[", "|[", -1) - return newOutput -} diff --git a/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_access_level_condition_test.go b/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_access_level_condition_test.go index e53b1ae3328a..9c9ec415ebb2 100644 --- a/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_access_level_condition_test.go +++ b/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_access_level_condition_test.go @@ -25,7 +25,7 @@ func testAccAccessContextManagerAccessLevelCondition_basicTest(t *testing.T) { vpcName := fmt.Sprintf("test-vpc-%s", acctest.RandString(t, 10)) expected := map[string]interface{}{ - "members": []interface{}{fmt.Sprintf("serviceAccount:%s@%s.iam.gserviceaccount.com", serviceAccountName, project)}, + "members": []interface{}{"user:test@google.com", "user:test2@google.com", fmt.Sprintf("serviceAccount:%s@%s.iam.gserviceaccount.com", serviceAccountName, project)}, "devicePolicy": map[string]interface{}{ "requireCorpOwned": true, "osConstraints": []interface{}{ @@ -164,7 +164,7 @@ resource "google_compute_network" "vpc_network" { resource "google_access_context_manager_access_level_condition" "access-level-condition" { access_level = google_access_context_manager_access_level.test-access.name - members = ["serviceAccount:${google_service_account.created-later.email}"] + members = ["user:test@google.com", "user:test2@google.com", "serviceAccount:${google_service_account.created-later.email}"] negate = false device_policy { require_screen_lock = false diff --git a/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_access_level_test.go.tmpl b/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_access_level_test.go.tmpl index 5a086b792c93..f2d5dd34bec7 100644 --- a/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_access_level_test.go.tmpl +++ b/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_access_level_test.go.tmpl @@ -46,13 +46,7 @@ func testAccAccessContextManagerAccessLevel_basicTest(t *testing.T) { } func testAccAccessContextManagerAccessLevel_fullTest(t *testing.T) { - context := map[string]interface{}{ - "org_id": envvar.GetTestOrgFromEnv(t), - "billing_account": envvar.GetTestBillingAccountFromEnv(t), - "random_suffix": acctest.RandString(t, 10), - "policy_title": "my policy", - "level_title_name": "level", - } + org := envvar.GetTestOrgFromEnv(t) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -60,7 +54,7 @@ func testAccAccessContextManagerAccessLevel_fullTest(t *testing.T) { CheckDestroy: testAccCheckAccessContextManagerAccessLevelDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccAccessContextManagerAccessLevel_full(context), + Config: testAccAccessContextManagerAccessLevel_full(org, "my policy", "level"), }, { ResourceName: "google_access_context_manager_access_level.test-access", @@ -208,38 +202,23 @@ resource "google_access_context_manager_access_level" "test-access" { `, org, policyTitle, levelTitleName, levelTitleName) } -func testAccAccessContextManagerAccessLevel_full(context map[string]interface{}) string { - - return acctest.Nprintf(` +func testAccAccessContextManagerAccessLevel_full(org, policyTitle, levelTitleName string) string { + return fmt.Sprintf(` resource "google_access_context_manager_access_policy" "test-access" { - parent = "organizations/%{org_id}" - title = "%{policy_title}" -} - -resource "google_project" "project" { - project_id = "tf-test%{random_suffix}" - name = "tf-test%{random_suffix}" - org_id = "%{org_id}" - billing_account = "%{billing_account}" - deletion_policy = "DELETE" -} - -resource "google_service_account" "test-account" { - account_id = "tf-test-account%{random_suffix}" - display_name = "Test Service Account" - project = google_project.project.project_id + parent = "organizations/%s" + title = "%s" } resource "google_access_context_manager_access_level" "test-access" { parent = "accessPolicies/${google_access_context_manager_access_policy.test-access.name}" - name = "accessPolicies/${google_access_context_manager_access_policy.test-access.name}/accessLevels/%{level_title_name}" - title = "%{level_title_name}" + name = "accessPolicies/${google_access_context_manager_access_policy.test-access.name}/accessLevels/%s" + title = "%s" description = "hello" basic { combining_function = "AND" conditions { ip_subnetworks = ["192.0.4.0/24"] - members = ["serviceAccount:${google_service_account.test-account.email}"] + members = ["user:test@google.com", "user:test2@google.com"] negate = false device_policy { require_screen_lock = false @@ -257,5 +236,5 @@ resource "google_access_context_manager_access_level" "test-access" { } } } -`, context) +`, org, policyTitle, levelTitleName, levelTitleName) } diff --git a/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_gcp_user_access_binding_test.go b/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_gcp_user_access_binding_test.go index 1c7aee210b8f..ac5216766ffa 100644 --- a/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_gcp_user_access_binding_test.go +++ b/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_gcp_user_access_binding_test.go @@ -41,15 +41,6 @@ func testAccAccessContextManagerGcpUserAccessBinding_basicTest(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"organization_id"}, }, - { - Config: testAccAccessContextManagerGcpUserAccessBinding_accessContextManagerGcpUserAccessBindingNamedExample(context), - }, - { - ResourceName: "google_access_context_manager_gcp_user_access_binding.gcp_user_access_binding", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"organization_id"}, - }, }, }) } @@ -101,6 +92,7 @@ resource "google_access_context_manager_gcp_user_access_binding" "gcp_user_acces google_access_context_manager_access_level.tf_test_access_level_id_for_user_access_binding%{random_suffix}.name, ] session_settings { + max_inactivity = "300s" session_length = "1800s" session_length_enabled = true session_reauth_method = "LOGIN" @@ -119,88 +111,7 @@ resource "google_access_context_manager_gcp_user_access_binding" "gcp_user_acces google_access_context_manager_access_level.tf_test_access_level_id_for_user_access_binding%{random_suffix}.name, ] session_settings { - session_length = "1800s" - session_length_enabled = true - session_reauth_method = "LOGIN" - use_oidc_max_age = false - } - } - dry_run_settings { - access_levels = [ - google_access_context_manager_access_level.tf_test_access_level_id_for_user_access_binding%{random_suffix}.name, - ] - } - } -} -`, context) -} - -func testAccAccessContextManagerGcpUserAccessBinding_accessContextManagerGcpUserAccessBindingNamedExample(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_cloud_identity_group" "group" { - display_name = "tf-test-my-identity-group%{random_suffix}" - - parent = "customers/%{cust_id}" - - group_key { - id = "tf-test-my-identity-group%{random_suffix}@%{org_domain}" - } - - labels = { - "cloudidentity.googleapis.com/groups.discussion_forum" = "" - } -} - -resource "google_access_context_manager_access_level" "tf_test_access_level_id_for_user_access_binding%{random_suffix}" { - parent = "accessPolicies/${google_access_context_manager_access_policy.access-policy.name}" - name = "accessPolicies/${google_access_context_manager_access_policy.access-policy.name}/accessLevels/tf_test_chromeos_no_lock%{random_suffix}" - title = "tf_test_chromeos_no_lock%{random_suffix}" - basic { - conditions { - device_policy { - require_screen_lock = true - os_constraints { - os_type = "DESKTOP_CHROME_OS" - } - } - regions = [ - "US", - ] - } - } -} - -resource "google_access_context_manager_access_policy" "access-policy" { - parent = "organizations/%{org_id}" - title = "my policy" -} - -resource "google_access_context_manager_gcp_user_access_binding" "gcp_user_access_binding" { - organization_id = "%{org_id}" - group_key = trimprefix(google_cloud_identity_group.group.id, "groups/") - access_levels = [ - google_access_context_manager_access_level.tf_test_access_level_id_for_user_access_binding%{random_suffix}.name, - ] - session_settings { - session_length = "1800s" - session_length_enabled = true - session_reauth_method = "LOGIN" - use_oidc_max_age = false - } - scoped_access_settings { - scope { - client_scope { - restricted_client_application { - name = "Cloud Console" - } - } - } - active_settings { - access_levels = [ - google_access_context_manager_access_level.tf_test_access_level_id_for_user_access_binding%{random_suffix}.name, - ] - session_settings { - max_inactivity = "400s" + max_inactivity = "300s" session_length = "1800s" session_length_enabled = true session_reauth_method = "LOGIN" diff --git a/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_service_perimeter_test.go.tmpl b/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_service_perimeter_test.go.tmpl index e7b2f7cfccdc..08f854d44b90 100644 --- a/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_service_perimeter_test.go.tmpl +++ b/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_service_perimeter_test.go.tmpl @@ -258,7 +258,7 @@ resource "google_access_context_manager_service_perimeter" "test-access" { ingress_policies { title = "ingress policy 2" ingress_from { - identities = ["group:test@google.com"] + identities = ["user:test@google.com"] } ingress_to { resources = ["*"] @@ -267,7 +267,7 @@ resource "google_access_context_manager_service_perimeter" "test-access" { ingress_policies { title = "ingress policy 3" ingress_from { - identities = ["group:test@google.com"] + identities = ["user:test@google.com"] } ingress_to { resources = ["*"] @@ -302,7 +302,7 @@ resource "google_access_context_manager_service_perimeter" "test-access" { egress_policies { title = "egress policy 2" egress_from { - identities = ["group:test@google.com"] + identities = ["user:test@google.com"] } egress_to { resources = ["*"] @@ -311,7 +311,7 @@ resource "google_access_context_manager_service_perimeter" "test-access" { egress_policies { title = "egress policy 3" egress_from { - identities = ["group:test@google.com"] + identities = ["user:test@google.com"] } egress_to { resources = ["*"] @@ -368,7 +368,7 @@ resource "google_access_context_manager_service_perimeter" "test-access" { ingress_policies { title = "ingress policy 2" ingress_from { - identities = ["group:test@google.com"] + identities = ["user:test@google.com"] } ingress_to { resources = ["*"] @@ -403,7 +403,7 @@ resource "google_access_context_manager_service_perimeter" "test-access" { egress_policies { title = "egress policy 2" egress_from { - identities = ["group:test@google.com"] + identities = ["user:test@google.com"] } egress_to { resources = ["*"] diff --git a/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_services_perimeters_test.go b/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_services_perimeters_test.go index afa80fb2a486..3a1bb9693ff5 100644 --- a/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_services_perimeters_test.go +++ b/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_services_perimeters_test.go @@ -260,7 +260,7 @@ resource "google_access_context_manager_service_perimeters" "test-access" { ingress_policies { title = "ingress policy title 2" ingress_from { - identities = ["group:test@google.com"] + identities = ["user:test@google.com"] } ingress_to { resources = ["*"] @@ -286,7 +286,7 @@ resource "google_access_context_manager_service_perimeters" "test-access" { egress_policies { title = "egress policy title 2" egress_from { - identities = ["group:test@google.com"] + identities = ["user:test@google.com"] } egress_to { resources = ["*"] @@ -350,7 +350,7 @@ resource "google_access_context_manager_service_perimeters" "test-access" { ingress_policies { title = "ingress policy title 2" ingress_from { - identities = ["group:test@google.com"] + identities = ["user:test@google.com"] } ingress_to { resources = ["*"] @@ -376,7 +376,7 @@ resource "google_access_context_manager_service_perimeters" "test-access" { egress_policies { title = "egress policy title 2" egress_from { - identities = ["group:test@google.com"] + identities = ["user:test@google.com"] } egress_to { resources = ["*"] diff --git a/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_cluster.go b/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_cluster.go index 3e289182ae8d..4ccc3db083c3 100644 --- a/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_cluster.go +++ b/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_cluster.go @@ -55,10 +55,5 @@ func dataSourceAlloydbDatabaseClusterRead(d *schema.ResourceData, meta interface if d.Id() == "" { return fmt.Errorf("%s not found", id) } - - if err := d.Set("deletion_protection", nil); err != nil { - return fmt.Errorf("Error setting deletion_protection: %s", err) - } - return nil } diff --git a/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_cluster_test.go b/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_cluster_test.go index c70906077b38..69541de1a6b7 100644 --- a/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_cluster_test.go +++ b/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_cluster_test.go @@ -12,7 +12,7 @@ func TestAccAlloydbDatabaseClusterDatasourceConfig(t *testing.T) { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-cluster-ds"), } acctest.VcrTest(t, resource.TestCase{ @@ -38,8 +38,6 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf-test-alloydb-cluster%{random_suffix}" } - - deletion_protection = false } data "google_compute_network" "default" { diff --git a/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_database_instance_test.go b/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_database_instance_test.go index 805a1cb8c1d3..cb1a5c799452 100644 --- a/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_database_instance_test.go +++ b/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_database_instance_test.go @@ -12,7 +12,7 @@ func TestAccAlloydbDatabaseInstanceDatasourceConfig(t *testing.T) { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-instance-mandatory-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -48,8 +48,6 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf-test-alloydb-cluster%{random_suffix}" } - - deletion_protection = false } data "google_compute_network" "default" { diff --git a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_backup_test.go b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_backup_test.go index 4bff41abc781..6b92415691b9 100644 --- a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_backup_test.go +++ b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_backup_test.go @@ -12,7 +12,7 @@ func TestAccAlloydbBackup_update(t *testing.T) { random_suffix := acctest.RandString(t, 10) context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-backup-update-1"), "random_suffix": random_suffix, } @@ -63,8 +63,6 @@ resource "google_alloydb_cluster" "default" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "default" { @@ -101,8 +99,6 @@ resource "google_alloydb_cluster" "default" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "default" { @@ -123,7 +119,7 @@ func TestAccAlloydbBackup_createBackupWithMandatoryFields(t *testing.T) { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-backup-mandatory-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -153,8 +149,6 @@ resource "google_alloydb_cluster" "default" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } data "google_project" "project" { } @@ -175,7 +169,7 @@ func TestAccAlloydbBackup_usingCMEK(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-backup-cmek-1"), "random_suffix": acctest.RandString(t, 10), "kms_key_name": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-bootstrap-alloydb-backup-key1").CryptoKey.Name, } @@ -224,8 +218,6 @@ resource "google_alloydb_cluster" "default" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "default" { diff --git a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_cluster_restore_test.go b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_cluster_restore_test.go index b3ce46da17df..4685bd07752a 100644 --- a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_cluster_restore_test.go +++ b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_cluster_restore_test.go @@ -19,7 +19,7 @@ func TestAccAlloydbCluster_restore(t *testing.T) { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-instance-restore-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -34,7 +34,7 @@ func TestAccAlloydbCluster_restore(t *testing.T) { ResourceName: "google_alloydb_cluster.source", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, }, { // Invalid input check - cannot pass in both sources @@ -54,7 +54,7 @@ func TestAccAlloydbCluster_restore(t *testing.T) { ResourceName: "google_alloydb_cluster.restored_from_backup", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location", "restore_backup_source"}, + ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location", "restore_backup_source"}, }, { // Validate PITR succeeds @@ -64,7 +64,7 @@ func TestAccAlloydbCluster_restore(t *testing.T) { ResourceName: "google_alloydb_cluster.restored_from_point_in_time", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location", "restore_continuous_backup_source"}, + ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location", "restore_continuous_backup_source"}, }, { // Make sure updates work without recreating the clusters @@ -89,8 +89,6 @@ resource "google_alloydb_cluster" "source" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "source" { @@ -124,8 +122,6 @@ resource "google_alloydb_cluster" "source" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "source" { @@ -159,8 +155,6 @@ resource "google_alloydb_cluster" "restored" { lifecycle { prevent_destroy = true } - - deletion_protection = false } data "google_project" "project" {} @@ -180,8 +174,6 @@ resource "google_alloydb_cluster" "source" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "source" { @@ -209,8 +201,6 @@ resource "google_alloydb_cluster" "restored" { cluster = google_alloydb_cluster.source.name } - deletion_protection = false - lifecycle { prevent_destroy = true } @@ -232,8 +222,6 @@ resource "google_alloydb_cluster" "source" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "source" { @@ -260,8 +248,6 @@ resource "google_alloydb_cluster" "restored_from_backup" { backup_name = google_alloydb_backup.default.name } - deletion_protection = false - lifecycle { prevent_destroy = true } @@ -285,8 +271,6 @@ resource "google_alloydb_cluster" "source" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "source" { @@ -313,8 +297,6 @@ resource "google_alloydb_cluster" "restored_from_backup" { backup_name = google_alloydb_backup.default.name } - deletion_protection = false - lifecycle { prevent_destroy = true } @@ -331,8 +313,6 @@ resource "google_alloydb_cluster" "restored_from_point_in_time" { point_in_time = google_alloydb_backup.default.update_time } - deletion_protection = false - lifecycle { prevent_destroy = true } @@ -356,8 +336,6 @@ resource "google_alloydb_cluster" "source" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "source" { @@ -389,8 +367,6 @@ resource "google_alloydb_cluster" "restored_from_backup" { recovery_window_days = 20 } - deletion_protection = false - lifecycle { prevent_destroy = true } @@ -412,8 +388,6 @@ resource "google_alloydb_cluster" "restored_from_point_in_time" { recovery_window_days = 20 } - deletion_protection = false - lifecycle { prevent_destroy = true } @@ -437,8 +411,6 @@ resource "google_alloydb_cluster" "source" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "source" { @@ -478,8 +450,6 @@ resource "google_alloydb_cluster" "restored_from_backup" { recovery_window_days = 20 } - deletion_protection = false - lifecycle { prevent_destroy = true } @@ -503,8 +473,6 @@ resource "google_alloydb_cluster" "restored_from_point_in_time" { recovery_window_days = 20 } - deletion_protection = false - lifecycle { prevent_destroy = true } @@ -528,8 +496,6 @@ resource "google_alloydb_cluster" "source" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "source" { @@ -555,8 +521,6 @@ resource "google_alloydb_cluster" "restored_from_backup" { restore_backup_source { backup_name = google_alloydb_backup.default.name } - - deletion_protection = false } resource "google_alloydb_cluster" "restored_from_point_in_time" { @@ -569,8 +533,6 @@ resource "google_alloydb_cluster" "restored_from_point_in_time" { cluster = google_alloydb_cluster.source.name point_in_time = google_alloydb_backup.default.update_time } - - deletion_protection = false } data "google_project" "project" {} diff --git a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_cluster_test.go b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_cluster_test.go index d3f141b83b84..1babba6c1cd5 100644 --- a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_cluster_test.go +++ b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_cluster_test.go @@ -30,7 +30,7 @@ func TestAccAlloydbCluster_update(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location", "labels", "terraform_labels"}, }, { Config: testAccAlloydbCluster_update(context), @@ -39,7 +39,7 @@ func TestAccAlloydbCluster_update(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location", "labels", "terraform_labels"}, }, { Config: testAccAlloydbCluster_alloydbClusterBasicExample(context), @@ -56,8 +56,6 @@ resource "google_alloydb_cluster" "default" { network_config { network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.default.name}" } - - deletion_protection = false } data "google_project" "project" { @@ -73,7 +71,7 @@ func TestAccAlloydbCluster_upgrade(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-instance-upgrade-1"), "random_suffix": acctest.RandString(t, 10), } @@ -89,7 +87,7 @@ func TestAccAlloydbCluster_upgrade(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location", "labels", "terraform_labels", "skip_await_major_version_upgrade"}, + ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location", "labels", "terraform_labels", "skip_await_major_version_upgrade"}, }, { Config: testAccAlloydbCluster_afterUpgrade(context), @@ -98,7 +96,7 @@ func TestAccAlloydbCluster_upgrade(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location", "labels", "terraform_labels", "skip_await_major_version_upgrade"}, + ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location", "labels", "terraform_labels", "skip_await_major_version_upgrade"}, }, }, }) @@ -114,8 +112,6 @@ resource "google_alloydb_cluster" "default" { network = data.google_compute_network.default.id } database_version = "POSTGRES_14" - - deletion_protection = false } resource "google_alloydb_instance" "default" { @@ -144,8 +140,6 @@ resource "google_alloydb_cluster" "default" { network = data.google_compute_network.default.id } database_version = "POSTGRES_15" - - deletion_protection = false } resource "google_alloydb_instance" "default" { @@ -198,8 +192,6 @@ resource "google_alloydb_cluster" "default" { network_config { network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.default.name}" } - - deletion_protection = false } data "google_project" "project" { @@ -243,8 +235,6 @@ resource "google_alloydb_cluster" "default" { network_config { network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.default.name}" } - - deletion_protection = false } data "google_project" "project" { @@ -279,7 +269,7 @@ func TestAccAlloydbCluster_addAutomatedBackupPolicyAndInitialUser(t *testing.T) ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_withInitialUserAndAutomatedBackupPolicy(context), @@ -288,7 +278,7 @@ func TestAccAlloydbCluster_addAutomatedBackupPolicyAndInitialUser(t *testing.T) ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_alloydbClusterBasicExample(context), @@ -320,7 +310,7 @@ func TestAccAlloydbCluster_deleteAutomatedBackupPolicyAndInitialUser(t *testing. ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_withoutInitialUserAndAutomatedBackupPolicy(context), @@ -329,7 +319,7 @@ func TestAccAlloydbCluster_deleteAutomatedBackupPolicyAndInitialUser(t *testing. ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_alloydbClusterBasicExample(context), @@ -360,7 +350,7 @@ func TestAccAlloydbCluster_AutomatedBackupPolicyHandlesMidnight(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_alloydbClusterBasicExample(context), @@ -407,9 +397,6 @@ resource "google_alloydb_cluster" "default" { test = "tf-test-alloydb-cluster%{random_suffix}" } } - - deletion_protection = false - lifecycle { prevent_destroy = true } @@ -432,9 +419,6 @@ resource "google_alloydb_cluster" "default" { network_config { network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.default.name}" } - - deletion_protection = false - lifecycle { prevent_destroy = true } @@ -466,10 +450,9 @@ func TestAccAlloydbCluster_missingWeeklySchedule(t *testing.T) { Config: testAccAlloydbCluster_missingWeeklySchedule(context), }, { - ResourceName: "google_alloydb_cluster.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ResourceName: "google_alloydb_cluster.default", + ImportState: true, + ImportStateVerify: true, }, }, }) @@ -491,11 +474,9 @@ resource "google_alloydb_cluster" "default" { count = 1 } labels = { - test = "tf-test-alloydb-cluster%{random_suffix}" - } + test = "tf-test-alloydb-cluster%{random_suffix}" + } } - - deletion_protection = false } data "google_project" "project" {} resource "google_compute_network" "default" { @@ -561,19 +542,17 @@ func TestAccAlloydbCluster_deleteTimeBasedRetentionPolicy(t *testing.T) { Config: testAccAlloydbCluster_withTimeBasedRetentionPolicy(context), }, { - ResourceName: "google_alloydb_cluster.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ResourceName: "google_alloydb_cluster.default", + ImportState: true, + ImportStateVerify: true, }, { Config: testAccAlloydbCluster_withoutTimeBasedRetentionPolicy(context), }, { - ResourceName: "google_alloydb_cluster.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ResourceName: "google_alloydb_cluster.default", + ImportState: true, + ImportStateVerify: true, }, { Config: testAccAlloydbCluster_alloydbClusterBasicExample(context), @@ -609,9 +588,6 @@ resource "google_alloydb_cluster" "default" { retention_period = "4.5s" } } - - deletion_protection = false - lifecycle { ignore_changes = [ automated_backup_policy[0].time_based_retention @@ -652,9 +628,6 @@ resource "google_alloydb_cluster" "default" { } } } - - deletion_protection = false - lifecycle { ignore_changes = [ automated_backup_policy[0].time_based_retention @@ -690,7 +663,7 @@ func TestAccAlloydbCluster_usingCMEK(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"cluster_id", "location"}, }, }, }) @@ -707,8 +680,6 @@ resource "google_alloydb_cluster" "default" { encryption_config { kms_key_name = "%{kms_key_name}" } - - deletion_protection = false depends_on = [google_kms_crypto_key_iam_member.crypto_key] } resource "google_compute_network" "default" { @@ -744,7 +715,7 @@ func TestAccAlloydbCluster_CMEKInAutomatedBackupIsUpdatable(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"cluster_id", "location"}, }, { Config: testAccAlloydbCluster_updateCMEKInAutomatedBackup(context), @@ -753,7 +724,7 @@ func TestAccAlloydbCluster_CMEKInAutomatedBackupIsUpdatable(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"cluster_id", "location"}, }, { Config: testAccAlloydbCluster_usingCMEKallowDeletion(context), @@ -762,7 +733,7 @@ func TestAccAlloydbCluster_CMEKInAutomatedBackupIsUpdatable(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"cluster_id", "location"}, }, }, }) @@ -790,11 +761,8 @@ resource "google_alloydb_cluster" "default" { retention_period = "510s" } } - - deletion_protection = false - lifecycle { - prevent_destroy = true + prevent_destroy = true } depends_on = [google_kms_crypto_key_iam_member.crypto_key] } @@ -835,9 +803,6 @@ resource "google_alloydb_cluster" "default" { retention_period = "510s" } } - - deletion_protection = false - lifecycle { prevent_destroy = true } @@ -886,8 +851,6 @@ resource "google_alloydb_cluster" "default" { retention_period = "510s" } } - - deletion_protection = false depends_on = [google_kms_crypto_key_iam_member.crypto_key] } @@ -935,7 +898,7 @@ func TestAccAlloydbCluster_continuousBackup_enabledByDefault(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_alloydbClusterBasicExample(context), @@ -971,7 +934,7 @@ func TestAccAlloydbCluster_continuousBackup_update_noChangeIfDefaultsSet(t *test ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_continuousBackupConfig(context), @@ -984,7 +947,7 @@ func TestAccAlloydbCluster_continuousBackup_update_noChangeIfDefaultsSet(t *test ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_alloydbClusterBasicExample(context), @@ -1020,7 +983,7 @@ func TestAccAlloydbCluster_continuousBackup_noChangeIfRemoved(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_alloydbClusterBasicExample(context), @@ -1065,7 +1028,7 @@ func TestAccAlloydbCluster_continuousBackup_update(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_continuousBackupConfig(context), @@ -1078,7 +1041,7 @@ func TestAccAlloydbCluster_continuousBackup_update(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_continuousBackupConfig(context2), @@ -1091,7 +1054,7 @@ func TestAccAlloydbCluster_continuousBackup_update(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_alloydbClusterBasicExample(context), @@ -1108,9 +1071,6 @@ resource "google_alloydb_cluster" "default" { network_config { network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.default.name}" } - - deletion_protection = false - lifecycle { prevent_destroy = true } @@ -1138,9 +1098,6 @@ resource "google_alloydb_cluster" "default" { enabled = %{enabled} recovery_window_days = %{recovery_window_days} } - - deletion_protection = false - lifecycle { prevent_destroy = true } @@ -1185,7 +1142,7 @@ func TestAccAlloydbCluster_continuousBackup_CMEKIsUpdatable(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"cluster_id", "location"}, }, { Config: testAccAlloydbCluster_usingCMEKInClusterAndContinuousBackup(context2), @@ -1194,7 +1151,7 @@ func TestAccAlloydbCluster_continuousBackup_CMEKIsUpdatable(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"cluster_id", "location"}, }, { Config: testAccAlloydbCluster_continuousBackupUsingCMEKAllowDeletion(context2), @@ -1203,7 +1160,7 @@ func TestAccAlloydbCluster_continuousBackup_CMEKIsUpdatable(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"cluster_id", "location"}, }, }, }) @@ -1224,11 +1181,8 @@ resource "google_alloydb_cluster" "default" { kms_key_name = "%{key_name}" } } - - deletion_protection = false - lifecycle { - prevent_destroy = true + prevent_destroy = true } depends_on = [google_kms_crypto_key_iam_member.crypto_key] } @@ -1262,8 +1216,6 @@ resource "google_alloydb_cluster" "default" { kms_key_name = "%{key_name}" } } - - deletion_protection = false depends_on = [google_kms_crypto_key_iam_member.crypto_key] } @@ -1298,10 +1250,9 @@ func TestAccAlloydbCluster_withNetworkConfig(t *testing.T) { Config: testAccAlloydbCluster_withNetworkConfig(context), }, { - ResourceName: "google_alloydb_cluster.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ResourceName: "google_alloydb_cluster.default", + ImportState: true, + ImportStateVerify: true, }, }, }) @@ -1315,8 +1266,6 @@ resource "google_alloydb_cluster" "default" { network_config { network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.default.name}" } - - deletion_protection = false } data "google_project" "project" {} resource "google_compute_network" "default" { @@ -1342,10 +1291,9 @@ func TestAccAlloydbCluster_withNetworkConfigAndAllocatedIPRange(t *testing.T) { Config: testAccAlloydbCluster_withNetworkConfigAndAllocatedIPRange(context), }, { - ResourceName: "google_alloydb_cluster.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ResourceName: "google_alloydb_cluster.default", + ImportState: true, + ImportStateVerify: true, }, }, }) @@ -1360,8 +1308,6 @@ resource "google_alloydb_cluster" "default" { network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.default.name}" allocated_ip_range = google_compute_global_address.private_ip_alloc.name } - - deletion_protection = false } data "google_project" "project" {} resource "google_compute_network" "default" { @@ -1395,10 +1341,9 @@ func TestAccAlloydbCluster_withMaintenanceWindows(t *testing.T) { Config: testAccAlloydbCluster_withMaintenanceWindows(context), }, { - ResourceName: "google_alloydb_cluster.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ResourceName: "google_alloydb_cluster.default", + ImportState: true, + ImportStateVerify: true, }, }, }) @@ -1423,8 +1368,6 @@ resource "google_alloydb_cluster" "default" { } } } - - deletion_protection = false } data "google_project" "project" {} resource "google_compute_network" "default" { @@ -1473,8 +1416,6 @@ resource "google_alloydb_cluster" "default" { day = "WEDNESDAY" } } - - deletion_protection = false } resource "google_compute_network" "default" { @@ -1504,8 +1445,6 @@ resource "google_alloydb_cluster" "default" { } } } - - deletion_protection = false } resource "google_compute_network" "default" { @@ -1548,8 +1487,6 @@ resource "google_alloydb_cluster" "default" { psc_config { psc_enabled = true } - - deletion_protection = false } data "google_project" "project" {} `, context) @@ -1578,7 +1515,7 @@ func TestAccAlloydbCluster_standardClusterUpdate(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_withSubscriptionTypeStandard(context), @@ -1587,7 +1524,7 @@ func TestAccAlloydbCluster_standardClusterUpdate(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_withSubscriptionTypeStandard(context), @@ -1596,7 +1533,7 @@ func TestAccAlloydbCluster_standardClusterUpdate(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, }, }, }) @@ -1622,7 +1559,7 @@ func TestAccAlloydbCluster_trialClusterUpdate(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_withSubscriptionTypeTrial(context), @@ -1631,7 +1568,7 @@ func TestAccAlloydbCluster_trialClusterUpdate(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_withSubscriptionTypeStandard(context), @@ -1640,7 +1577,7 @@ func TestAccAlloydbCluster_trialClusterUpdate(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, }, }, }) diff --git a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go index 9280badf632b..a6a7d1050fff 100644 --- a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go +++ b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go @@ -12,7 +12,7 @@ func TestAccAlloydbInstance_update(t *testing.T) { random_suffix := acctest.RandString(t, 10) context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-instance-update-1"), "random_suffix": random_suffix, } @@ -64,8 +64,6 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf-test-alloydb-cluster%{random_suffix}" } - - deletion_protection = false } data "google_compute_network" "default" { @@ -101,8 +99,6 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf-test-alloydb-cluster%{random_suffix}" } - - deletion_protection = false } data "google_compute_network" "default" { @@ -117,7 +113,7 @@ func TestAccAlloydbInstance_createInstanceWithMandatoryFields(t *testing.T) { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-instance-mandatory-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -132,101 +128,6 @@ func TestAccAlloydbInstance_createInstanceWithMandatoryFields(t *testing.T) { }) } -// This test passes if we are able to create a primary instance STOP it and then START it back again -func TestAccAlloydbInstance_stopstart(t *testing.T) { - t.Parallel() - - suffix := acctest.RandString(t, 10) - networkName := acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1") - - context := map[string]interface{}{ - "random_suffix": suffix, - "network_name": networkName, - } - - contextStop := map[string]interface{}{ - "random_suffix": suffix, - "network_name": networkName, - "activation_policy": "NEVER", - } - - contextStart := map[string]interface{}{ - "random_suffix": suffix, - "network_name": networkName, - "activation_policy": "ALWAYS", - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckAlloydbInstanceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccAlloydbInstance_createInstanceWithMandatoryFields(context), - }, - { - ResourceName: "google_alloydb_instance.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"cluster", "instance_id", "reconciling", "update_time"}, - }, - { - Config: testAccAlloydbInstance_updateActivationPolicy(contextStop), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("google_alloydb_instance.default", "activation_policy", "NEVER"), - resource.TestCheckResourceAttr("google_alloydb_instance.default", "state", "STOPPED"), - ), - }, - { - ResourceName: "google_alloydb_instance.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"cluster", "instance_id", "reconciling", "update_time", "labels", "terraform_labels"}, - }, - { - Config: testAccAlloydbInstance_updateActivationPolicy(contextStart), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("google_alloydb_instance.default", "activation_policy", "ALWAYS"), - resource.TestCheckResourceAttr("google_alloydb_instance.default", "state", "READY"), - ), - }, - { - ResourceName: "google_alloydb_instance.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"cluster", "instance_id", "reconciling", "update_time", "labels", "terraform_labels"}, - }, - }, - }) -} - -func testAccAlloydbInstance_updateActivationPolicy(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_alloydb_instance" "default" { - cluster = google_alloydb_cluster.default.name - instance_id = "tf-test-alloydb-instance%{random_suffix}" - instance_type = "PRIMARY" - activation_policy = "%{activation_policy}" -} - -resource "google_alloydb_cluster" "default" { - cluster_id = "tf-test-alloydb-cluster%{random_suffix}" - location = "us-central1" - network_config { - network = data.google_compute_network.default.id - } - - deletion_protection = false -} - -data "google_project" "project" {} - -data "google_compute_network" "default" { - name = "%{network_name}" -} -`, context) -} - func testAccAlloydbInstance_createInstanceWithMandatoryFields(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_alloydb_instance" "default" { @@ -241,8 +142,6 @@ resource "google_alloydb_cluster" "default" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } data "google_project" "project" {} @@ -259,7 +158,7 @@ data "google_compute_network" "default" { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-instance-maximum-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -331,7 +230,7 @@ func TestAccAlloydbInstance_createPrimaryAndReadPoolInstance(t *testing.T) { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-instance-readpool-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -370,8 +269,6 @@ resource "google_alloydb_cluster" "default" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } data "google_project" "project" {} @@ -388,7 +285,7 @@ data "google_compute_network" "default" { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-instance-updatedb-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -455,8 +352,6 @@ resource "google_alloydb_cluster" "default" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } data "google_project" "project" {} @@ -471,7 +366,7 @@ data "google_compute_network" "default" { func TestAccAlloydbInstance_createInstanceWithNetworkConfigAndAllocatedIPRange(t *testing.T) { t.Parallel() - testId := "alloydb-1" + testId := "alloydbinstance-network-config-1" addressName := acctest.BootstrapSharedTestGlobalAddress(t, testId) networkName := acctest.BootstrapSharedServiceNetworkingConnection(t, testId) @@ -508,8 +403,6 @@ resource "google_alloydb_cluster" "default" { network = data.google_compute_network.default.id allocated_ip_range = data.google_compute_global_address.private_ip_alloc.name } - - deletion_protection = false } data "google_compute_network" "default" { @@ -531,7 +424,7 @@ func TestAccAlloydbInstance_clientConnectionConfig(t *testing.T) { t.Parallel() suffix := acctest.RandString(t, 10) - networkName := acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1") + networkName := acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-clientconnectionconfig") context := map[string]interface{}{ "random_suffix": suffix, @@ -649,8 +542,6 @@ resource "google_alloydb_cluster" "default" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } data "google_project" "project" {} @@ -682,8 +573,6 @@ resource "google_alloydb_cluster" "default" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } data "google_project" "project" {} @@ -700,7 +589,7 @@ func TestAccAlloydbInstance_networkConfig(t *testing.T) { t.Parallel() suffix := acctest.RandString(t, 10) - networkName := acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1") + networkName := acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-networkconfig") context1 := map[string]interface{}{ "random_suffix": suffix, @@ -815,8 +704,6 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf-test-alloydb-cluster%{random_suffix}" } - - deletion_protection = false } data "google_project" "project" {} @@ -855,8 +742,6 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf-test-alloydb-cluster%{random_suffix}" } - - deletion_protection = false } data "google_project" "project" {} @@ -912,8 +797,6 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf-test-alloydb-cluster%{random_suffix}" } - - deletion_protection = false } data "google_project" "project" {} `, context) @@ -941,8 +824,6 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf-test-alloydb-cluster%{random_suffix}" } - - deletion_protection = false } data "google_project" "project" {} `, context) @@ -998,8 +879,6 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf-test-alloydb-cluster%{random_suffix}" } - - deletion_protection = false } data "google_project" "project" {} `, context) @@ -1031,151 +910,3 @@ func TestAccAlloydbInstance_updateInstanceWithPscInterfaceConfigs(t *testing.T) }, }) } - -func TestAccAlloydbInstance_updatePscAutoConnections(t *testing.T) { - t.Parallel() - - networkName := acctest.BootstrapSharedTestNetwork(t, "tf-test-alloydb-network-psc") - random_suffix := acctest.RandString(t, 10) - context := map[string]interface{}{ - "network_name": networkName, - "random_suffix": random_suffix, - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckAlloydbInstanceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccAlloydbInstance_pscAutoConnections(context), - }, - { - Config: testAccAlloydbInstance_updatePscAutoConnections(context), - }, - }, - }) -} - -func testAccAlloydbInstance_pscAutoConnections(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_alloydb_instance" "default" { - cluster = google_alloydb_cluster.default.name - instance_id = "tf-test-alloydb-instance%{random_suffix}" - instance_type = "PRIMARY" - machine_config { - cpu_count = 2 - } - psc_instance_config { - psc_auto_connections { - consumer_project = "${data.google_project.project.project_id}" - consumer_network = "projects/${data.google_project.project.project_id}/global/networks/%{network_name}" - } - } -} -resource "google_alloydb_cluster" "default" { - cluster_id = "tf-test-alloydb-cluster%{random_suffix}" - location = "us-central1" - psc_config { - psc_enabled = true - } - initial_user { - password = "tf-test-alloydb-cluster%{random_suffix}" - } - - deletion_protection = false -} -data "google_project" "project" {} -`, context) -} - -func testAccAlloydbInstance_updatePscAutoConnections(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_alloydb_instance" "default" { - cluster = google_alloydb_cluster.default.name - instance_id = "tf-test-alloydb-instance%{random_suffix}" - instance_type = "PRIMARY" - machine_config { - cpu_count = 2 - } -} -resource "google_alloydb_cluster" "default" { - cluster_id = "tf-test-alloydb-cluster%{random_suffix}" - location = "us-central1" - psc_config { - psc_enabled = true - } - initial_user { - password = "tf-test-alloydb-cluster%{random_suffix}" - } - - deletion_protection = false -} -data "google_project" "project" {} -`, context) -} - -func TestAccAlloydbInstance_createPrimaryAndReadPoolInstanceWithAllocatedIpRangeOverride(t *testing.T) { - t.Parallel() - - testId := "alloydb-1" - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - "address_name": acctest.BootstrapSharedTestGlobalAddress(t, testId), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, testId), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckAlloydbInstanceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccAlloydbInstance_createPrimaryAndReadPoolInstanceWithAllocatedIpRangeOverride(context), - }, - }, - }) -} - -func testAccAlloydbInstance_createPrimaryAndReadPoolInstanceWithAllocatedIpRangeOverride(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_alloydb_instance" "primary" { - cluster = google_alloydb_cluster.default.name - instance_id = "tf-test-alloydb-instance%{random_suffix}" - instance_type = "PRIMARY" -} - -resource "google_alloydb_instance" "read_pool" { - cluster = google_alloydb_cluster.default.name - instance_id = "tf-test-alloydb-instance%{random_suffix}-read" - instance_type = "READ_POOL" - read_pool_config { - node_count = 4 - } - network_config { - allocated_ip_range_override = data.google_compute_global_address.private_ip_alloc.name - } - depends_on = [google_alloydb_instance.primary] -} - -resource "google_alloydb_cluster" "default" { - cluster_id = "tf-test-alloydb-cluster%{random_suffix}" - location = "us-central1" - network_config { - network = data.google_compute_network.default.id - } - - deletion_protection = false -} - -data "google_project" "project" {} - -data "google_compute_network" "default" { - name = "%{network_name}" -} - -data "google_compute_global_address" "private_ip_alloc" { - name = "%{address_name}" -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_secondary_cluster_test.go b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_secondary_cluster_test.go index e3010a2c8454..9855f928c54d 100644 --- a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_secondary_cluster_test.go +++ b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_secondary_cluster_test.go @@ -12,7 +12,7 @@ import ( func TestAccAlloydbCluster_secondaryClusterMandatoryFields(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), "random_suffix": acctest.RandString(t, 10), } @@ -28,7 +28,7 @@ func TestAccAlloydbCluster_secondaryClusterMandatoryFields(t *testing.T) { ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -42,8 +42,6 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -72,8 +70,6 @@ resource "google_alloydb_cluster" "secondary" { primary_cluster_name = google_alloydb_cluster.primary.name } - deletion_protection = false - depends_on = [google_alloydb_instance.primary] } @@ -90,7 +86,7 @@ func TestAccAlloydbCluster_secondaryClusterMissingSecondaryConfig(t *testing.T) t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), "random_suffix": acctest.RandString(t, 10), } @@ -115,8 +111,6 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -141,8 +135,6 @@ resource "google_alloydb_cluster" "secondary" { enabled = false } - deletion_protection = false - depends_on = [google_alloydb_instance.primary] } @@ -159,7 +151,7 @@ func TestAccAlloydbCluster_secondaryClusterDefinedSecondaryConfigButMissingClust t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), "random_suffix": acctest.RandString(t, 10), } @@ -184,8 +176,6 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -213,8 +203,6 @@ resource "google_alloydb_cluster" "secondary" { primary_cluster_name = google_alloydb_cluster.primary.name } - deletion_protection = false - depends_on = [google_alloydb_instance.primary] } @@ -231,7 +219,7 @@ func TestAccAlloydbCluster_secondaryClusterDefinedSecondaryConfigButClusterTypeI t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), "random_suffix": acctest.RandString(t, 10), } @@ -256,8 +244,6 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -286,8 +272,6 @@ resource "google_alloydb_cluster" "secondary" { primary_cluster_name = google_alloydb_cluster.primary.name } - deletion_protection = false - depends_on = [google_alloydb_instance.primary] } @@ -304,7 +288,7 @@ func TestAccAlloydbCluster_secondaryClusterUpdate(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), "random_suffix": acctest.RandString(t, 10), } @@ -320,7 +304,7 @@ func TestAccAlloydbCluster_secondaryClusterUpdate(t *testing.T) { ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterUpdate(context), @@ -329,7 +313,7 @@ func TestAccAlloydbCluster_secondaryClusterUpdate(t *testing.T) { ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -343,8 +327,6 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -377,8 +359,6 @@ resource "google_alloydb_cluster" "secondary" { foo = "bar" } - deletion_protection = false - depends_on = [google_alloydb_instance.primary] } @@ -394,7 +374,7 @@ func TestAccAlloydbCluster_secondaryClusterUsingCMEK(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), "random_suffix": acctest.RandString(t, 10), "kms_key_name": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-east1", "tf-bootstrap-alloydb-secondary-key1").CryptoKey.Name, } @@ -411,7 +391,7 @@ func TestAccAlloydbCluster_secondaryClusterUsingCMEK(t *testing.T) { ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -425,8 +405,6 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -459,8 +437,6 @@ resource "google_alloydb_cluster" "secondary" { kms_key_name = "%{kms_key_name}" } - deletion_protection = false - depends_on = [ google_alloydb_instance.primary, google_kms_crypto_key_iam_member.crypto_key @@ -486,7 +462,7 @@ func TestAccAlloydbCluster_secondaryClusterWithNetworkConfig(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), "random_suffix": acctest.RandString(t, 10), } @@ -502,7 +478,7 @@ func TestAccAlloydbCluster_secondaryClusterWithNetworkConfig(t *testing.T) { ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -516,8 +492,6 @@ resource "google_alloydb_cluster" "primary" { network_config { network = "projects/${data.google_project.project.number}/global/networks/${data.google_compute_network.default.name}" } - - deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -546,8 +520,6 @@ resource "google_alloydb_cluster" "secondary" { primary_cluster_name = google_alloydb_cluster.primary.name } - deletion_protection = false - depends_on = [google_alloydb_instance.primary] } @@ -564,8 +536,8 @@ func TestAccAlloydbCluster_secondaryClusterWithNetworkConfigAndAllocatedIPRange( t.Parallel() context := map[string]interface{}{ - "address_name": acctest.BootstrapSharedTestGlobalAddress(t, "alloydb-1"), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "address_name": acctest.BootstrapSharedTestGlobalAddress(t, "alloydbinstance-network-config-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), "random_suffix": acctest.RandString(t, 10), } @@ -581,7 +553,7 @@ func TestAccAlloydbCluster_secondaryClusterWithNetworkConfigAndAllocatedIPRange( ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -596,8 +568,6 @@ resource "google_alloydb_cluster" "primary" { network = "projects/${data.google_project.project.number}/global/networks/${data.google_compute_network.default.name}" allocated_ip_range = data.google_compute_global_address.private_ip_alloc.name } - - deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -627,8 +597,6 @@ resource "google_alloydb_cluster" "secondary" { primary_cluster_name = google_alloydb_cluster.primary.name } - deletion_protection = false - depends_on = [google_alloydb_instance.primary] } @@ -651,7 +619,7 @@ func TestAccAlloydbCluster_secondaryClusterPromote(t *testing.T) { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), "secondary_cluster_location": "us-east1", - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -666,7 +634,7 @@ func TestAccAlloydbCluster_secondaryClusterPromote(t *testing.T) { ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromote(context), @@ -675,7 +643,7 @@ func TestAccAlloydbCluster_secondaryClusterPromote(t *testing.T) { ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -689,8 +657,6 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -721,8 +687,6 @@ resource "google_alloydb_cluster" "secondary" { deletion_policy = "FORCE" - deletion_protection = false - depends_on = [google_alloydb_instance.primary] } @@ -756,8 +720,6 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -781,8 +743,6 @@ resource "google_alloydb_cluster" "secondary" { continuous_backup_config { enabled = false } - - deletion_protection = false } resource "google_alloydb_instance" "secondary" { @@ -814,7 +774,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndSimultaneousUpdate(t *testi context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), "secondary_cluster_location": "us-east1", - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -829,7 +789,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndSimultaneousUpdate(t *testi ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromoteAndSimultaneousUpdate(context), @@ -838,7 +798,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndSimultaneousUpdate(t *testi ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -852,8 +812,6 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -881,8 +839,6 @@ resource "google_alloydb_cluster" "secondary" { labels = { foo = "bar" } - - deletion_protection = false } resource "google_alloydb_instance" "secondary" { @@ -914,7 +870,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndDeleteOriginalPrimary(t *te context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), "secondary_cluster_location": "us-east1", - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -929,7 +885,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndDeleteOriginalPrimary(t *te ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromote(context), @@ -938,7 +894,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndDeleteOriginalPrimary(t *te ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromoteAndDeleteOriginalPrimary(context), @@ -947,7 +903,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndDeleteOriginalPrimary(t *te ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -966,8 +922,6 @@ resource "google_alloydb_cluster" "secondary" { continuous_backup_config { enabled = false } - - deletion_protection = false } resource "google_alloydb_instance" "secondary" { @@ -999,7 +953,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndUpdate(t *testing.T) { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), "secondary_cluster_location": "us-east1", - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -1014,7 +968,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndUpdate(t *testing.T) { ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromote(context), @@ -1023,7 +977,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndUpdate(t *testing.T) { ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromoteAndUpdate(context), @@ -1032,7 +986,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndUpdate(t *testing.T) { ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -1046,8 +1000,6 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -1076,7 +1028,6 @@ resource "google_alloydb_cluster" "secondary" { foo = "bar" } - deletion_protection = false } resource "google_alloydb_instance" "secondary" { @@ -1107,8 +1058,8 @@ func TestAccAlloydbCluster_secondaryClusterPromoteWithNetworkConfigAndAllocatedI context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), - "address_name": acctest.BootstrapSharedTestGlobalAddress(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), + "address_name": acctest.BootstrapSharedTestGlobalAddress(t, "alloydbinstance-network-config-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -1123,7 +1074,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteWithNetworkConfigAndAllocatedI ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromoteWithNetworkConfigAndAllocatedIPRange(context), @@ -1132,7 +1083,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteWithNetworkConfigAndAllocatedI ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -1147,8 +1098,6 @@ resource "google_alloydb_cluster" "primary" { network = "projects/${data.google_project.project.number}/global/networks/${data.google_compute_network.default.name}" allocated_ip_range = data.google_compute_global_address.private_ip_alloc.name } - - deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -1180,8 +1129,6 @@ resource "google_alloydb_cluster" "secondary" { deletion_policy = "FORCE" - deletion_protection = false - depends_on = [google_alloydb_instance.primary] } @@ -1220,8 +1167,6 @@ resource "google_alloydb_cluster" "primary" { network = "projects/${data.google_project.project.number}/global/networks/${data.google_compute_network.default.name}" allocated_ip_range = data.google_compute_global_address.private_ip_alloc.name } - - deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -1246,8 +1191,6 @@ resource "google_alloydb_cluster" "secondary" { continuous_backup_config { enabled = false } - - deletion_protection = false } resource "google_alloydb_instance" "secondary" { @@ -1283,7 +1226,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndAddAndDeleteAutomatedBackup context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), "secondary_cluster_location": "us-south1", - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), "hour": 23, } @@ -1299,7 +1242,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndAddAndDeleteAutomatedBackup ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromote(context), @@ -1308,7 +1251,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndAddAndDeleteAutomatedBackup ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromoteAndAddAutomatedBackupPolicyAndInitialUser(context), @@ -1317,7 +1260,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndAddAndDeleteAutomatedBackup ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromote(context), @@ -1326,7 +1269,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndAddAndDeleteAutomatedBackup ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -1340,8 +1283,6 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -1395,8 +1336,6 @@ resource "google_alloydb_cluster" "secondary" { test = "tf-test-alloydb-secondary-cluster%{random_suffix}" } } - - deletion_protection = false } resource "google_alloydb_instance" "secondary" { @@ -1428,7 +1367,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndDeleteTimeBasedRetentionPol context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), "secondary_cluster_location": "us-south1", - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -1443,7 +1382,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndDeleteTimeBasedRetentionPol ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromote(context), @@ -1452,7 +1391,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndDeleteTimeBasedRetentionPol ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromoteWithTimeBasedRetentionPolicy(context), @@ -1461,7 +1400,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndDeleteTimeBasedRetentionPol ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromoteWithoutTimeBasedRetentionPolicy(context), @@ -1470,7 +1409,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndDeleteTimeBasedRetentionPol ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -1484,8 +1423,6 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -1534,9 +1471,6 @@ resource "google_alloydb_cluster" "secondary" { retention_period = "4.5s" } } - - deletion_protection = false - lifecycle { ignore_changes = [ automated_backup_policy[0].time_based_retention @@ -1574,8 +1508,6 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -1621,9 +1553,6 @@ resource "google_alloydb_cluster" "secondary" { } } } - - deletion_protection = false - lifecycle { ignore_changes = [ automated_backup_policy[0].time_based_retention @@ -1660,7 +1589,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndAddContinuousBackupConfig(t context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), "secondary_cluster_location": "us-south1", - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -1675,7 +1604,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndAddContinuousBackupConfig(t ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromote(context), @@ -1684,7 +1613,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndAddContinuousBackupConfig(t ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromoteAndAddContinuousBackupConfig(context), @@ -1693,7 +1622,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndAddContinuousBackupConfig(t ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -1707,8 +1636,6 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -1734,7 +1661,6 @@ resource "google_alloydb_cluster" "secondary" { recovery_window_days = 14 } - deletion_protection = false } resource "google_alloydb_instance" "secondary" { diff --git a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_secondary_instance_test.go b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_secondary_instance_test.go index 4554df032a82..eddf0b2c77fe 100644 --- a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_secondary_instance_test.go +++ b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_secondary_instance_test.go @@ -13,7 +13,7 @@ func TestAccAlloydbInstance_secondaryInstanceUpdateMachineConfig(t *testing.T) { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -51,8 +51,6 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -83,8 +81,6 @@ resource "google_alloydb_cluster" "secondary" { deletion_policy = "FORCE" - deletion_protection = false - depends_on = [google_alloydb_instance.primary] } @@ -114,8 +110,6 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -146,8 +140,6 @@ resource "google_alloydb_cluster" "secondary" { deletion_policy = "FORCE" - deletion_protection = false - depends_on = [google_alloydb_instance.primary] } @@ -175,7 +167,7 @@ func TestAccAlloydbInstance_secondaryInstanceWithReadPoolInstance(t *testing.T) context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -204,8 +196,6 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -236,8 +226,6 @@ resource "google_alloydb_cluster" "secondary" { deletion_policy = "FORCE" - deletion_protection = false - depends_on = [google_alloydb_instance.primary] } @@ -275,8 +263,8 @@ func TestAccAlloydbCluster_secondaryInstanceWithNetworkConfigAndAllocatedIPRange context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), - "address_name": acctest.BootstrapSharedTestGlobalAddress(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), + "address_name": acctest.BootstrapSharedTestGlobalAddress(t, "alloydbinstance-network-config-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -306,8 +294,6 @@ resource "google_alloydb_cluster" "primary" { network = data.google_compute_network.default.id allocated_ip_range = data.google_compute_global_address.private_ip_alloc.name } - - deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -339,8 +325,6 @@ resource "google_alloydb_cluster" "secondary" { deletion_policy = "FORCE" - deletion_protection = false - depends_on = [google_alloydb_instance.primary] } @@ -372,7 +356,7 @@ func TestAccAlloydbInstance_secondaryInstanceUpdateDatabaseFlag(t *testing.T) { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -410,8 +394,6 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -442,8 +424,6 @@ resource "google_alloydb_cluster" "secondary" { deletion_policy = "FORCE" - deletion_protection = false - depends_on = [google_alloydb_instance.primary] } @@ -475,7 +455,7 @@ func TestAccAlloydbInstance_secondaryInstanceUpdateQueryInsightConfig(t *testing context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -513,8 +493,6 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -545,8 +523,6 @@ resource "google_alloydb_cluster" "secondary" { deletion_policy = "FORCE" - deletion_protection = false - depends_on = [google_alloydb_instance.primary] } @@ -581,7 +557,7 @@ func TestAccAlloydbInstance_secondaryInstanceMaximumFields(t *testing.T) { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), } acctest.VcrTest(t, resource.TestCase{ @@ -610,8 +586,6 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } - - deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -642,8 +616,6 @@ resource "google_alloydb_cluster" "secondary" { deletion_policy = "FORCE" - deletion_protection = false - depends_on = [google_alloydb_instance.primary] } diff --git a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_user_test.go b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_user_test.go index f7a41a81df3f..ef4d7c99d674 100644 --- a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_user_test.go +++ b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_user_test.go @@ -11,7 +11,7 @@ func TestAccAlloydbUser_updateRoles_BuiltIn(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), "random_suffix": acctest.RandString(t, 10), } @@ -60,8 +60,6 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf_test_cluster_secret%{random_suffix}" } - - deletion_protection = false } data "google_project" "project" {} @@ -85,7 +83,7 @@ func TestAccAlloydbUser_updatePassword_BuiltIn(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), "random_suffix": acctest.RandString(t, 10), } @@ -134,8 +132,6 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf_test_cluster_secret%{random_suffix}" } - - deletion_protection = false } data "google_project" "project" {} @@ -159,7 +155,7 @@ func TestAccAlloydbUser_updateRoles_IAM(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1"), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydbinstance-network-config-1"), "random_suffix": acctest.RandString(t, 10), } @@ -206,8 +202,6 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf_test_cluster_secret%{random_suffix}" } - - deletion_protection = false } data "google_project" "project" {} data "google_compute_network" "default" { diff --git a/mmv1/third_party/terraform/services/apigateway/resource_api_gateway_gateway_test.go.tmpl b/mmv1/third_party/terraform/services/apigateway/resource_api_gateway_gateway_test.go.tmpl index cf52682b47e3..f10a3d3c88fa 100644 --- a/mmv1/third_party/terraform/services/apigateway/resource_api_gateway_gateway_test.go.tmpl +++ b/mmv1/third_party/terraform/services/apigateway/resource_api_gateway_gateway_test.go.tmpl @@ -6,7 +6,6 @@ import ( "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" ) func TestAccApiGatewayGateway_apigatewayGatewayBasicExampleUpdated(t *testing.T) { @@ -26,11 +25,6 @@ func TestAccApiGatewayGateway_apigatewayGatewayBasicExampleUpdated(t *testing.T) }, { Config: testAccApiGatewayGateway_apigatewayGatewayBasicExampleUpdated(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_api_gateway_gateway.api_gw", plancheck.ResourceActionUpdate), - }, - }, }, }, }) @@ -40,13 +34,16 @@ func testAccApiGatewayGateway_apigatewayGatewayBasicExampleUpdated(context map[s return acctest.Nprintf(` resource "google_api_gateway_api" "api_gw" { provider = google-beta - api_id = "tf-test-my-api%{random_suffix}" + api_id = "tf-test-api-gw%{random_suffix}" } resource "google_api_gateway_api_config" "api_gw" { provider = google-beta api = google_api_gateway_api.api_gw.api_id - api_config_id = "tf-test-my-config%{random_suffix}" + api_config_id = "tf-test-api-gw%{random_suffix}" + lifecycle { + create_before_destroy = true + } openapi_documents { document { @@ -54,15 +51,12 @@ resource "google_api_gateway_api_config" "api_gw" { contents = filebase64("test-fixtures/openapi.yaml") } } - lifecycle { - create_before_destroy = true - } } resource "google_api_gateway_gateway" "api_gw" { provider = google-beta api_config = google_api_gateway_api_config.api_gw.id - gateway_id = "tf-test-my-gateway%{random_suffix}" + gateway_id = "tf-test-api-gw%{random_suffix}" display_name = "MM Dev API Gateway" labels = { environment = "dev" diff --git a/mmv1/third_party/terraform/services/apigee/fw_resource_apigee_keystores_aliases_key_cert_file.go b/mmv1/third_party/terraform/services/apigee/fw_resource_apigee_keystores_aliases_key_cert_file.go deleted file mode 100644 index 787f84425f65..000000000000 --- a/mmv1/third_party/terraform/services/apigee/fw_resource_apigee_keystores_aliases_key_cert_file.go +++ /dev/null @@ -1,533 +0,0 @@ -package apigee - -import ( - "bytes" - "context" - "fmt" - "mime/multipart" - "time" - - "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts" - "github.com/hashicorp/terraform-plugin-framework/attr" - "github.com/hashicorp/terraform-plugin-framework/diag" - "github.com/hashicorp/terraform-plugin-framework/path" - "github.com/hashicorp/terraform-plugin-framework/resource" - "github.com/hashicorp/terraform-plugin-framework/resource/schema" - "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" - "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" - "github.com/hashicorp/terraform-plugin-framework/tfsdk" - "github.com/hashicorp/terraform-plugin-framework/types" - "github.com/hashicorp/terraform-plugin-log/tflog" - "github.com/hashicorp/terraform-provider-google/google/fwmodels" - "github.com/hashicorp/terraform-provider-google/google/fwresource" - "github.com/hashicorp/terraform-provider-google/google/fwtransport" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -var ( - _ resource.Resource = &ApigeeKeystoresAliasesKeyCertFileResource{} - _ resource.ResourceWithConfigure = &ApigeeKeystoresAliasesKeyCertFileResource{} - _ resource.ResourceWithImportState = &ApigeeKeystoresAliasesKeyCertFileResource{} -) - -func NewApigeeKeystoresAliasesKeyCertFileResource() resource.Resource { - return &ApigeeKeystoresAliasesKeyCertFileResource{} -} - -type ApigeeKeystoresAliasesKeyCertFileResource struct { - providerConfig *transport_tpg.Config -} - -type ApigeeKeystoresAliasesKeyCertFileResourceModel struct { - Id types.String `tfsdk:"id"` - OrgId types.String `tfsdk:"org_id"` - Environment types.String `tfsdk:"environment"` - Keystore types.String `tfsdk:"keystore"` - Alias types.String `tfsdk:"alias"` - Cert types.String `tfsdk:"cert"` - Key types.String `tfsdk:"key"` - Password types.String `tfsdk:"password"` - Type types.String `tfsdk:"type"` - CertsInfo types.List `tfsdk:"certs_info"` - Timeouts timeouts.Value `tfsdk:"timeouts"` -} - -type CertInfoDetailModel struct { - BasicConstraints types.String `tfsdk:"basic_constraints"` - ExpiryDate types.String `tfsdk:"expiry_date"` - IsValid types.String `tfsdk:"is_valid"` - Issuer types.String `tfsdk:"issuer"` - PublicKey types.String `tfsdk:"public_key"` - SerialNumber types.String `tfsdk:"serial_number"` - SigAlgName types.String `tfsdk:"sig_alg_name"` - Subject types.String `tfsdk:"subject"` - SubjectAlternativeNames types.List `tfsdk:"subject_alternative_names"` - ValidFrom types.String `tfsdk:"valid_from"` - Version types.Int64 `tfsdk:"version"` -} - -func (r *ApigeeKeystoresAliasesKeyCertFileResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { - resp.TypeName = req.ProviderTypeName + "_apigee_keystores_aliases_key_cert_file" -} - -func (r *ApigeeKeystoresAliasesKeyCertFileResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { - if req.ProviderData == nil { - return - } - p, ok := req.ProviderData.(*transport_tpg.Config) - if !ok { - resp.Diagnostics.AddError( - "Unexpected Resource Configure Type", - fmt.Sprintf("Expected *transport_tpg.Config, got: %T. Please report this issue to the provider developers.", req.ProviderData), - ) - return - } - r.providerConfig = p -} - -func (r *ApigeeKeystoresAliasesKeyCertFileResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { - resp.Schema = schema.Schema{ - Description: "An alias from a key/cert file.", - Attributes: map[string]schema.Attribute{ - "org_id": schema.StringAttribute{ - Description: "Organization ID associated with the alias.", - Required: true, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.RequiresReplace(), - }, - }, - "environment": schema.StringAttribute{ - Description: "Environment associated with the alias.", - Required: true, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.RequiresReplace(), - }, - }, - "keystore": schema.StringAttribute{ - Description: "Keystore Name.", - Required: true, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.RequiresReplace(), - }, - }, - "alias": schema.StringAttribute{ - Description: "Alias Name.", - Required: true, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.RequiresReplace(), - }, - }, - "cert": schema.StringAttribute{ - Description: "Cert content.", - Required: true, - }, - "key": schema.StringAttribute{ - Description: "Private Key content, omit if uploading to truststore.", - Optional: true, - Sensitive: true, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.RequiresReplace(), - }, - }, - "password": schema.StringAttribute{ - Description: "Password for the Private Key if it's encrypted.", - Optional: true, - Sensitive: true, - }, - "type": schema.StringAttribute{ - Description: "Optional. Type of Alias.", - Computed: true, - }, - "id": schema.StringAttribute{ - Description: "Project identifier", - Computed: true, - }, - "certs_info": schema.ListAttribute{ - Description: "Chain of certificates under this alias.", - Computed: true, - ElementType: types.ObjectType{ - AttrTypes: map[string]attr.Type{ - "basic_constraints": types.StringType, - "expiry_date": types.StringType, - "is_valid": types.StringType, - "issuer": types.StringType, - "public_key": types.StringType, - "serial_number": types.StringType, - "sig_alg_name": types.StringType, - "subject": types.StringType, - "subject_alternative_names": types.ListType{ElemType: types.StringType}, - "valid_from": types.StringType, - "version": types.Int64Type, - }, - }, - }, - }, - Blocks: map[string]schema.Block{ - "timeouts": timeouts.Block(ctx, timeouts.Opts{ - Create: true, - Read: true, - Update: true, - Delete: true, - }), - }, - } -} - -func (r *ApigeeKeystoresAliasesKeyCertFileResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { - var plan ApigeeKeystoresAliasesKeyCertFileResourceModel - var metaData *fwmodels.ProviderMetaModel - - resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) - resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) - if resp.Diagnostics.HasError() { - return - } - - createTimeout, diags := plan.Timeouts.Create(ctx, 20*time.Minute) - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } - - buf := new(bytes.Buffer) - bw := multipart.NewWriter(buf) - if !plan.Key.IsNull() && !plan.Key.IsUnknown() { - keyFilePartWriter, _ := bw.CreateFormField("keyFile") - keyFilePartWriter.Write([]byte(plan.Key.ValueString())) - } - if !plan.Password.IsNull() && !plan.Password.IsUnknown() { - keyFilePartWriter, _ := bw.CreateFormField("password") - keyFilePartWriter.Write([]byte(plan.Password.ValueString())) - } - certFilePartWriter, _ := bw.CreateFormField("certFile") - certFilePartWriter.Write([]byte(plan.Cert.ValueString())) - bw.Close() - - billingProject := types.StringValue(r.providerConfig.BillingProject) - - var schemaDefaultVals fwtransport.DefaultVars - - userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, r.providerConfig.UserAgent) - url := fwtransport.ReplaceVars(ctx, req, &resp.Diagnostics, schemaDefaultVals, r.providerConfig, "{{ApigeeBasePath}}organizations/{{org_id}}/environments/{{environment}}/keystores/{{keystore}}/aliases?format=keycertfile&alias={{alias}}&ignoreExpiryValidation=true") - if resp.Diagnostics.HasError() { - return - } - - res, err := sendRequestRawBodyWithTimeout(r.providerConfig, "POST", billingProject.ValueString(), url, userAgent, buf, bw.FormDataContentType(), createTimeout) - if err != nil { - resp.Diagnostics.AddError("Error, failure to create key cert file", err.Error()) - return - } - - tflog.Trace(ctx, "Successfully created Apigee Keystore Alias", map[string]interface{}{"response": res}) - - id := fmt.Sprintf("organizations/%s/environments/%s/keystores/%s/aliases/%s", - plan.OrgId.ValueString(), - plan.Environment.ValueString(), - plan.Keystore.ValueString(), - plan.Alias.ValueString(), - ) - plan.Id = types.StringValue(id) - - r.refresh(ctx, req, &plan, &resp.State, &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } - - resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...) -} - -func (r *ApigeeKeystoresAliasesKeyCertFileResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { - var state ApigeeKeystoresAliasesKeyCertFileResourceModel - - resp.Diagnostics.Append(req.State.Get(ctx, &state)...) - if resp.Diagnostics.HasError() { - return - } - - r.refresh(ctx, req, &state, &resp.State, &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } - - resp.Diagnostics.Append(resp.State.Set(ctx, &state)...) -} - -func (r *ApigeeKeystoresAliasesKeyCertFileResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { - var plan ApigeeKeystoresAliasesKeyCertFileResourceModel - var state ApigeeKeystoresAliasesKeyCertFileResourceModel - var metaData *fwmodels.ProviderMetaModel - - resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) - resp.Diagnostics.Append(req.State.Get(ctx, &state)...) - if resp.Diagnostics.HasError() { - return - } - - resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) - if resp.Diagnostics.HasError() { - return - } - updateTimeout, diags := plan.Timeouts.Update(ctx, 20*time.Minute) - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } - - buf := new(bytes.Buffer) - bw := multipart.NewWriter(buf) - certFilePartWriter, err := bw.CreateFormField("certFile") - if err != nil { - resp.Diagnostics.AddError("Unable to create form field for certificate", err.Error()) - return - } - certFilePartWriter.Write([]byte(plan.Cert.ValueString())) - bw.Close() - - billingProject := types.StringValue(r.providerConfig.BillingProject) - userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, r.providerConfig.UserAgent) - - var schemaDefaultVals fwtransport.DefaultVars - - url := fwtransport.ReplaceVars(ctx, req, &resp.Diagnostics, schemaDefaultVals, r.providerConfig, "{{ApigeeBasePath}}organizations/{{org_id}}/environments/{{environment}}/keystores/{{keystore}}/aliases/{{alias}}?ignoreExpiryValidation=true") - if resp.Diagnostics.HasError() { - return - } - - tflog.Trace(ctx, "Updating Apigee Keystore Alias", map[string]interface{}{"url": url}) - res, err := sendRequestRawBodyWithTimeout(r.providerConfig, "PUT", billingProject.ValueString(), url, userAgent, buf, bw.FormDataContentType(), updateTimeout) - - if err != nil { - resp.Diagnostics.AddError("Error, failure to update key cert file", err.Error()) - return - } - - tflog.Trace(ctx, "Successfully sent update request for Apigee Keystore Alias", map[string]interface{}{"response": res}) - - r.refresh(ctx, req, &plan, &resp.State, &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } - - resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...) -} - -func (r *ApigeeKeystoresAliasesKeyCertFileResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { - var data ApigeeKeystoresAliasesKeyCertFileResourceModel - var metaData *fwmodels.ProviderMetaModel - - resp.Diagnostics.Append(req.State.Get(ctx, &data)...) - if resp.Diagnostics.HasError() { - return - } - - resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) - if resp.Diagnostics.HasError() { - return - } - - deleteTimeout, diags := data.Timeouts.Delete(ctx, 20*time.Minute) - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } - - userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, r.providerConfig.UserAgent) - - var schemaDefaultVals fwtransport.DefaultVars - url := fwtransport.ReplaceVars(ctx, req, &resp.Diagnostics, schemaDefaultVals, r.providerConfig, "{{ApigeeBasePath}}organizations/{{org_id}}/environments/{{environment}}/keystores/{{keystore}}/aliases/{{alias}}") - if resp.Diagnostics.HasError() { - return - } - - tflog.Trace(ctx, "Deleting Apigee Keystore Alias", map[string]interface{}{"url": url}) - - _ = fwtransport.SendRequest(fwtransport.SendRequestOptions{ - Config: r.providerConfig, - Method: "DELETE", - Project: data.OrgId.ValueString(), - RawURL: url, - UserAgent: userAgent, - Timeout: deleteTimeout, - }, &resp.Diagnostics) - - tflog.Trace(ctx, "Successfully deleted Apigee Keystore Alias.") -} - -func (r *ApigeeKeystoresAliasesKeyCertFileResource) refresh(ctx context.Context, req interface{}, data *ApigeeKeystoresAliasesKeyCertFileResourceModel, state *tfsdk.State, diags *diag.Diagnostics) { - var metaData *fwmodels.ProviderMetaModel - - userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, r.providerConfig.UserAgent) - - var schemaDefaultVals fwtransport.DefaultVars - url := fwtransport.ReplaceVars(ctx, req, diags, schemaDefaultVals, r.providerConfig, "{{ApigeeBasePath}}organizations/{{org_id}}/environments/{{environment}}/keystores/{{keystore}}/aliases/{{alias}}") - if diags.HasError() { - return - } - - readTimeout, timeoutDiags := data.Timeouts.Read(ctx, 20*time.Minute) - diags.Append(timeoutDiags...) - if diags.HasError() { - return - } - - tflog.Trace(ctx, "Refreshing Apigee Keystore Alias", map[string]interface{}{"url": url}) - - res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ - Config: r.providerConfig, - Method: "GET", - Project: data.OrgId.ValueString(), - RawURL: url, - UserAgent: userAgent, - Timeout: readTimeout, - }, diags) - - if diags.HasError() { - return - } - - tflog.Trace(ctx, "Successfully refreshed Apigee Keystore Alias", map[string]interface{}{"response": res}) - - id := fmt.Sprintf("organizations/%s/environments/%s/keystores/%s/aliases/%s", - data.OrgId.ValueString(), - data.Environment.ValueString(), - data.Keystore.ValueString(), - data.Alias.ValueString(), - ) - data.Id = types.StringValue(id) - - data.Type = types.StringValue(res["type"].(string)) - - flattenedCertsInfo, certDiags := flattenCertsInfo(res["certsInfo"]) - diags.Append(certDiags...) - if diags.HasError() { - return - } - data.CertsInfo = flattenedCertsInfo -} - -var certInfoObjectType = types.ObjectType{ - AttrTypes: map[string]attr.Type{ - "basic_constraints": types.StringType, - "expiry_date": types.StringType, - "is_valid": types.StringType, - "issuer": types.StringType, - "public_key": types.StringType, - "serial_number": types.StringType, - "sig_alg_name": types.StringType, - "subject": types.StringType, - "subject_alternative_names": types.ListType{ElemType: types.StringType}, - "valid_from": types.StringType, - "version": types.Int64Type, - }, -} - -func flattenCertsInfo(v interface{}) (types.List, diag.Diagnostics) { - if v == nil { - return types.ListNull(certInfoObjectType), nil - } - - var diags diag.Diagnostics - - certsInfoMap, ok := v.(map[string]interface{}) - if !ok { - diags.AddError("Invalid Type", "Cannot flatten certs_info: input is not a map.") - return types.ListNull(certInfoObjectType), diags - } - if len(certsInfoMap) == 0 { - return types.ListNull(certInfoObjectType), nil - } - - certInfoListRaw, ok := certsInfoMap["certInfo"].([]interface{}) - if !ok || len(certInfoListRaw) == 0 { - return types.ListNull(certInfoObjectType), nil - } - - var certInfoDetails []CertInfoDetailModel - for _, rawCertInfo := range certInfoListRaw { - certInfo, ok := rawCertInfo.(map[string]interface{}) - if !ok || len(certInfo) == 0 { - continue - } - getStringValue := func(key string) types.String { - if val, ok := certInfo[key].(string); ok { - return types.StringValue(val) - } - return types.StringNull() - } - var sansValue types.List - if sansRaw, ok := certInfo["subjectAlternativeNames"].([]interface{}); ok { - sans := make([]string, 0, len(sansRaw)) - for _, san := range sansRaw { - if s, ok := san.(string); ok { - sans = append(sans, s) - } - } - var listDiags diag.Diagnostics - sansValue, listDiags = types.ListValueFrom(context.Background(), types.StringType, sans) - diags.Append(listDiags...) - } else { - sansValue = types.ListNull(types.StringType) - } - var versionValue types.Int64 - if versionRaw, ok := certInfo["version"]; ok { - switch v := versionRaw.(type) { - case float64: - versionValue = types.Int64Value(int64(v)) - case string: - versionValue = types.Int64Null() - default: - versionValue = types.Int64Null() - } - } else { - versionValue = types.Int64Null() - } - detail := CertInfoDetailModel{ - BasicConstraints: getStringValue("basicConstraints"), - ExpiryDate: getStringValue("expiryDate"), - IsValid: getStringValue("isValid"), - Issuer: getStringValue("issuer"), - PublicKey: getStringValue("publicKey"), - SerialNumber: getStringValue("serialNumber"), - SigAlgName: getStringValue("sigAlgName"), - Subject: getStringValue("subject"), - ValidFrom: getStringValue("validFrom"), - SubjectAlternativeNames: sansValue, - Version: versionValue, - } - certInfoDetails = append(certInfoDetails, detail) - } - - if diags.HasError() { - return types.ListNull(certInfoObjectType), diags - } - - flattenedList, listDiags := types.ListValueFrom(context.Background(), certInfoObjectType, certInfoDetails) - diags.Append(listDiags...) - - return flattenedList, diags -} - -func (r *ApigeeKeystoresAliasesKeyCertFileResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { - idRegexes := []string{ - "organizations/(?P[^/]+)/environments/(?P[^/]+)/keystores/(?P[^/]+)/aliases/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - } - - var resourceSchemaResp resource.SchemaResponse - r.Schema(ctx, resource.SchemaRequest{}, &resourceSchemaResp) - if resourceSchemaResp.Diagnostics.HasError() { - resp.Diagnostics.Append(resourceSchemaResp.Diagnostics...) - return - } - - parsedAttributes, diags := fwresource.ParseImportId(ctx, req, resourceSchemaResp.Schema, r.providerConfig, idRegexes) - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } - - for name, value := range parsedAttributes { - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root(name), value)...) - } -} diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_api.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_api.go index 2921e19e0caa..57375e59f2ae 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_api.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_api.go @@ -324,8 +324,8 @@ func resourceApigeeApiDelete(d *schema.ResourceData, meta interface{}) error { func resourceApigeeApiImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^organizations/(?P[^/]+)/apis/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)$", + "organizations/(?P[^/]+)/apis/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_api_product_update_test.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_api_product_update_test.go deleted file mode 100644 index fb1cc182568d..000000000000 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_api_product_update_test.go +++ /dev/null @@ -1,518 +0,0 @@ -package apigee_test - -import ( - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" - "testing" -) - -func TestAccApigeeApiProduct_apigeeApiProduct_full(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "billing_account": envvar.GetTestBillingAccountFromEnv(t), - "org_id": envvar.GetTestOrgFromEnv(t), - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - ExternalProviders: map[string]resource.ExternalProvider{ - "time": {}, - }, - CheckDestroy: testAccCheckApigeeApiProductDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccApigeeApiProduct_apigeeApiProduct_full(context), - }, - { - ResourceName: "google_apigee_api_product.apigee_api_product", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"org_id"}, - }, - { - Config: testAccApigeeApiProduct_apigeeApiProduct_update(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_apigee_api_product.apigee_api_product", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_apigee_api_product.apigee_api_product", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"org_id"}, - }, - }, - }) -} - -func testAccApigeeApiProduct_apigeeApiProduct_full(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_project" "project" { - project_id = "tf-test%{random_suffix}" - name = "tf-test%{random_suffix}" - org_id = "%{org_id}" - billing_account = "%{billing_account}" - deletion_policy = "DELETE" -} -resource "time_sleep" "wait_60_seconds" { - create_duration = "60s" - depends_on = [google_project.project] -} -resource "google_project_service" "apigee" { - project = google_project.project.project_id - service = "apigee.googleapis.com" - depends_on = [time_sleep.wait_60_seconds] -} -resource "google_project_service" "compute" { - project = google_project.project.project_id - service = "compute.googleapis.com" - depends_on = [google_project_service.apigee] -} -resource "google_project_service" "servicenetworking" { - project = google_project.project.project_id - service = "servicenetworking.googleapis.com" - depends_on = [google_project_service.compute] -} -resource "time_sleep" "wait_120_seconds" { - create_duration = "120s" - depends_on = [google_project_service.servicenetworking] -} -resource "google_compute_network" "apigee_network" { - name = "apigee-network" - project = google_project.project.project_id - depends_on = [time_sleep.wait_120_seconds] -} -resource "google_compute_global_address" "apigee_range" { - name = "apigee-range" - purpose = "VPC_PEERING" - address_type = "INTERNAL" - prefix_length = 16 - network = google_compute_network.apigee_network.id - project = google_project.project.project_id -} -resource "google_service_networking_connection" "apigee_vpc_connection" { - network = google_compute_network.apigee_network.id - service = "servicenetworking.googleapis.com" - reserved_peering_ranges = [google_compute_global_address.apigee_range.name] - depends_on = [google_project_service.servicenetworking] -} -resource "google_apigee_organization" "apigee_org" { - analytics_region = "us-central1" - project_id = google_project.project.project_id - authorized_network = google_compute_network.apigee_network.id - depends_on = [ - google_service_networking_connection.apigee_vpc_connection, - google_project_service.apigee, - ] -} -resource "google_apigee_instance" "apigee_instance" { - name = "tf-test%{random_suffix}" - location = "us-central1" - org_id = google_apigee_organization.apigee_org.id - peering_cidr_range = "SLASH_22" -} -resource "google_apigee_api_product" "apigee_api_product" { - org_id = google_apigee_organization.apigee_org.id - name = "tf-test%{random_suffix}" - display_name = "My full API Product" - - approval_type = "auto" - - description = "This is a sample API Product created with Terraform." - - quota = "10000" - quota_interval = "1" - quota_time_unit = "day" - quota_counter_scope = "PROXY" - - environments = ["dev", "hom"] - scopes = [ - "read:weather", - "write:reports" - ] - - attributes { - name = "access" - value = "private" - } - - attributes { - name = "custom" - value = "value" - } - - operation_group { - operation_config_type = "proxy" - - operation_configs { - api_source = "anoter-proxy" - - operations { - resource = "/" - methods = ["POST", "GET"] - } - - quota { - limit = "1000" - interval = "5" - time_unit = "minute" - } - - attributes { - name = "custom" - value = "value" - } - } - - operation_configs { - api_source = "hello-world" - - operations { - resource = "/test" - methods = ["POST", "GET"] - } - - quota { - limit = "10" - interval = "30" - time_unit = "second" - } - - attributes { - name = "custom" - value = "value" - } - } - } - - graphql_operation_group { - operation_config_type = "proxy" - - operation_configs { - api_source = "hello-world" - - quota { - limit = "30" - interval = "50" - time_unit = "second" - } - - operations { - operation_types = ["QUERY"] - operation = "test" - } - - attributes { - name = "custom" - value = "value" - } - } - - operation_configs { - api_source = "another-proxy" - - quota { - limit = "50000" - interval = "12" - time_unit = "hour" - } - - operations { - operation_types = ["MUTATION"] - operation = "test" - } - - attributes { - name = "custom" - value = "value" - } - } - } - - grpc_operation_group { - - operation_configs { - api_source = "another-proxy" - service = "grpc another test" - methods = ["method3", "method4"] - - quota { - limit = "1000000" - interval = "1" - time_unit = "month" - } - - attributes { - name = "graph" - value = "value" - } - } - - operation_configs { - api_source = "hello-world" - service = "grpc test" - methods = ["method1", "method2"] - - quota { - limit = "5" - interval = "1" - time_unit = "second" - } - - attributes { - name = "graph" - value = "value" - } - } - } - - depends_on = [ - google_apigee_instance.apigee_instance - ] -} -`, context) -} - -func testAccApigeeApiProduct_apigeeApiProduct_update(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_project" "project" { - project_id = "tf-test%{random_suffix}" - name = "tf-test%{random_suffix}" - org_id = "%{org_id}" - billing_account = "%{billing_account}" - deletion_policy = "DELETE" -} -resource "time_sleep" "wait_60_seconds" { - create_duration = "60s" - depends_on = [google_project.project] -} -resource "google_project_service" "apigee" { - project = google_project.project.project_id - service = "apigee.googleapis.com" - depends_on = [time_sleep.wait_60_seconds] -} -resource "google_project_service" "compute" { - project = google_project.project.project_id - service = "compute.googleapis.com" - depends_on = [google_project_service.apigee] -} -resource "google_project_service" "servicenetworking" { - project = google_project.project.project_id - service = "servicenetworking.googleapis.com" - depends_on = [google_project_service.compute] -} -resource "time_sleep" "wait_120_seconds" { - create_duration = "120s" - depends_on = [google_project_service.servicenetworking] -} -resource "google_compute_network" "apigee_network" { - name = "apigee-network" - project = google_project.project.project_id - depends_on = [time_sleep.wait_120_seconds] -} -resource "google_compute_global_address" "apigee_range" { - name = "apigee-range" - purpose = "VPC_PEERING" - address_type = "INTERNAL" - prefix_length = 16 - network = google_compute_network.apigee_network.id - project = google_project.project.project_id -} -resource "google_service_networking_connection" "apigee_vpc_connection" { - network = google_compute_network.apigee_network.id - service = "servicenetworking.googleapis.com" - reserved_peering_ranges = [google_compute_global_address.apigee_range.name] - depends_on = [google_project_service.servicenetworking] -} -resource "google_apigee_organization" "apigee_org" { - analytics_region = "us-central1" - project_id = google_project.project.project_id - authorized_network = google_compute_network.apigee_network.id - depends_on = [ - google_service_networking_connection.apigee_vpc_connection, - google_project_service.apigee, - ] -} -resource "google_apigee_instance" "apigee_instance" { - name = "tf-test%{random_suffix}" - location = "us-central1" - org_id = google_apigee_organization.apigee_org.id - peering_cidr_range = "SLASH_22" -} -resource "google_apigee_developer" "apigee_developer" { - email = "tf-test%{random_suffix}@acme.com" - first_name = "John" - last_name = "Doe" - user_name = "john.doe" - org_id = google_apigee_organization.apigee_org.id - depends_on = [ - google_apigee_instance.apigee_instance - ] -} -resource "google_apigee_api_product" "apigee_api_product" { - org_id = google_apigee_organization.apigee_org.id - name = "tf-test%{random_suffix}" - display_name = "My full API Product" - - approval_type = "auto" - - description = "This is a sample API Product created with Terraform." - - quota = "5000" - quota_interval = "2" - quota_time_unit = "day" - quota_counter_scope = "PROXY" - - environments = ["dev"] - scopes = [ - "read:weather" - ] - - attributes { - name = "access" - value = "private" - } - - attributes { - name = "custom" - value = "value_changed" - } - - operation_group { - operation_config_type = "proxy" - - operation_configs { - api_source = "anoter-proxy" - - operations { - resource = "/changed" - methods = ["POST", "GET", "PUT"] - } - - quota { - limit = "500" - interval = "6" - time_unit = "minute" - } - - attributes { - name = "custom" - value = "value_changed" - } - } - - operation_configs { - api_source = "hello-world" - - operations { - resource = "/test_changed" - methods = ["POST"] - } - - quota { - limit = "7" - interval = "20" - time_unit = "second" - } - - attributes { - name = "custom" - value = "value_changed" - } - } - } - - graphql_operation_group { - operation_config_type = "proxy" - - operation_configs { - api_source = "hello-world" - - quota { - limit = "20" - interval = "40" - time_unit = "second" - } - - operations { - operation_types = ["MUTATION"] - operation = "test_changed" - } - - attributes { - name = "custom" - value = "value_changed" - } - } - - operation_configs { - api_source = "another-proxy" - - quota { - limit = "5000" - interval = "10" - time_unit = "hour" - } - - operations { - operation_types = ["QUERY"] - operation = "test_changed" - } - - attributes { - name = "custom" - value = "value_changed" - } - } - } - - grpc_operation_group { - - operation_configs { - api_source = "another-proxy" - service = "grpc another test" - methods = ["method3_changed", "method4_changed"] - - quota { - limit = "10000" - interval = "10" - time_unit = "month" - } - - attributes { - name = "graph" - value = "value_changed" - } - } - - operation_configs { - api_source = "hello-world" - service = "grpc test" - methods = ["method1_changed", "method2_changed"] - - quota { - limit = "50" - interval = "5" - time_unit = "hour" - } - - attributes { - name = "graph" - value = "value_changed" - } - } - } - - depends_on = [ - google_apigee_instance.apigee_instance - ] -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_environment_update_test.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_environment_update_test.go index 4a1069bc1450..b61b4e453f80 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_environment_update_test.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_environment_update_test.go @@ -129,12 +129,6 @@ resource "google_apigee_environment" "apigee_environment" { value = "property-1-value" } } - client_ip_resolution_config { - header_index_algorithm { - ip_header_name = "X-Forwarded-For" - ip_header_index = 1 - } - } } `, context) } diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_flowhook.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_flowhook.go index 635838873cd0..8a5d6ac30957 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_flowhook.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_flowhook.go @@ -223,8 +223,8 @@ func resourceApigeeFlowhookDelete(d *schema.ResourceData, meta interface{}) erro func resourceApigeeFlowhookImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^organizations/(?P[^/]+)/environments/(?P[^/]+)/flowhooks/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "organizations/(?P[^/]+)/environments/(?P[^/]+)/flowhooks/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_instance_update_test.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_instance_update_test.go index 5027d34de265..71f234952410 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_instance_update_test.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_instance_update_test.go @@ -120,11 +120,6 @@ resource "google_apigee_instance" "apigee_instance" { consumer_accept_list = [ google_project.project1.project_id, ] - - access_logging_config { - enabled = false - filter = "status_code >= 0 && status_code < 600" - } } `, context) } @@ -209,11 +204,6 @@ resource "google_apigee_instance" "apigee_instance" { google_project.project1.project_id, google_project.project2.project_id, ] - - access_logging_config { - enabled = false - filter = "status_code >= 0 && status_code < 600" - } } `, context) } diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_key_cert_file.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_key_cert_file.go new file mode 100644 index 000000000000..f66975481024 --- /dev/null +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_key_cert_file.go @@ -0,0 +1,692 @@ +package apigee + +import ( + "bytes" + "context" + "fmt" + "log" + "mime/multipart" + "reflect" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func ResourceApigeeKeystoresAliasesKeyCertFile() *schema.Resource { + return &schema.Resource{ + Create: resourceApigeeKeystoresAliasesKeyCertFileCreate, + Read: resourceApigeeKeystoresAliasesKeyCertFileRead, + Update: resourceApigeeKeystoresAliasesKeyCertFileUpdate, + Delete: resourceApigeeKeystoresAliasesKeyCertFileDelete, + + Importer: &schema.ResourceImporter{ + State: resourceApigeeKeystoresAliasesKeyCertFileImport, + }, + + CustomizeDiff: customdiff.All( + /* + If cert is changed then an update is expected, so we tell Terraform core to expect update on certs_info + */ + + customdiff.ComputedIf("certs_info", func(_ context.Context, diff *schema.ResourceDiff, v interface{}) bool { + return diff.HasChange("cert") + }), + ), + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(20 * time.Minute), + Read: schema.DefaultTimeout(20 * time.Minute), + Update: schema.DefaultTimeout(20 * time.Minute), + Delete: schema.DefaultTimeout(20 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "alias": { + Type: schema.TypeString, + ForceNew: true, + Required: true, + Description: `Alias Name`, + }, + "cert": { + Type: schema.TypeString, + Required: true, + Description: `Cert content`, + }, + "environment": { + Type: schema.TypeString, + ForceNew: true, + Required: true, + Description: `Environment associated with the alias`, + }, + "keystore": { + Type: schema.TypeString, + ForceNew: true, + Required: true, + Description: `Keystore Name`, + }, + "org_id": { + Type: schema.TypeString, + ForceNew: true, + Required: true, + Description: `Organization ID associated with the alias`, + }, + "certs_info": { + Type: schema.TypeList, + Optional: true, + Computed: true, + Description: `Chain of certificates under this alias.`, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "cert_info": { + Type: schema.TypeList, + Optional: true, + Computed: true, + Description: `List of all properties in the object.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "basic_constraints": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `X.509 basic constraints extension.`, + }, + "expiry_date": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `X.509 notAfter validity period in milliseconds since epoch.`, + }, + "is_valid": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `Flag that specifies whether the certificate is valid. +Flag is set to Yes if the certificate is valid, No if expired, or Not yet if not yet valid.`, + }, + "issuer": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `X.509 issuer.`, + }, + "public_key": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `Public key component of the X.509 subject public key info.`, + }, + "serial_number": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `X.509 serial number.`, + }, + "sig_alg_name": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `X.509 signatureAlgorithm.`, + }, + "subject": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `X.509 subject.`, + }, + "subject_alternative_names": { + Type: schema.TypeList, + Optional: true, + Computed: true, + Description: `X.509 subject alternative names (SANs) extension.`, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "valid_from": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `X.509 notBefore validity period in milliseconds since epoch.`, + }, + "version": { + Type: schema.TypeInt, + Optional: true, + Computed: true, + Description: `X.509 version.`, + }, + }, + }, + }, + }, + }, + }, + "key": { + Type: schema.TypeString, + ForceNew: true, + Optional: true, + Sensitive: true, + Description: `Private Key content, omit if uploading to truststore`, + }, + "password": { + Type: schema.TypeString, + Optional: true, + Sensitive: true, + Description: `Password for the Private Key if it's encrypted`, + }, + "type": { + Type: schema.TypeString, + Computed: true, + Description: `Optional.Type of Alias`, + }, + }, + UseJSONNumber: true, + } +} + +func resourceApigeeKeystoresAliasesKeyCertFileCreate(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + buf := new(bytes.Buffer) + bw := multipart.NewWriter(buf) + if key, ok := d.GetOkExists("key"); ok { + keyFilePartWriter, _ := bw.CreateFormField("keyFile") + keyFilePartWriter.Write([]byte(key.(string))) + } + if password, ok := d.GetOkExists("password"); ok { + keyFilePartWriter, _ := bw.CreateFormField("password") + keyFilePartWriter.Write([]byte(password.(string))) + } + certFilePartWriter, _ := bw.CreateFormField("certFile") + certFilePartWriter.Write([]byte(d.Get("cert").(string))) + bw.Close() + + url, err := tpgresource.ReplaceVars(d, config, "{{ApigeeBasePath}}organizations/{{org_id}}/environments/{{environment}}/keystores/{{keystore}}/aliases?format=keycertfile&alias={{alias}}&ignoreExpiryValidation=true") + if err != nil { + return err + } + + log.Printf("[DEBUG] Creating new KeystoresAliasesKeyCertFile") + billingProject := "" + + // err == nil indicates that the billing_project value was found + if bp, err := tpgresource.GetBillingProject(d, config); err == nil { + billingProject = bp + } + + res, err := sendRequestRawBodyWithTimeout(config, "POST", billingProject, url, userAgent, buf, "multipart/form-data; boundary="+bw.Boundary(), d.Timeout(schema.TimeoutCreate)) + if err != nil { + return fmt.Errorf("Error creating KeystoresAliasesKeyCertFile: %s", err) + } + + // Store the ID now + id, err := tpgresource.ReplaceVars(d, config, "organizations/{{org_id}}/environments/{{environment}}/keystores/{{keystore}}/aliases/{{alias}}") + if err != nil { + return fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + + log.Printf("[DEBUG] Finished creating KeystoresAliasesKeyCertFile %q: %#v", d.Id(), res) + + return resourceApigeeKeystoresAliasesKeyCertFileRead(d, meta) +} + +func resourceApigeeKeystoresAliasesKeyCertFileRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + url, err := tpgresource.ReplaceVars(d, config, "{{ApigeeBasePath}}organizations/{{org_id}}/environments/{{environment}}/keystores/{{keystore}}/aliases/{{alias}}") + if err != nil { + return err + } + + billingProject := "" + + // err == nil indicates that the billing_project value was found + if bp, err := tpgresource.GetBillingProject(d, config); err == nil { + billingProject = bp + } + + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: url, + UserAgent: userAgent, + }) + if err != nil { + return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("ApigeeKeystoresAliasesKeyCertFile %q", d.Id())) + } + + if err := d.Set("alias", flattenApigeeKeystoresAliasesKeyCertFileAlias(res["alias"], d, config)); err != nil { + return fmt.Errorf("Error reading KeystoresAliasesKeyCertFile: %s", err) + } + + if err := d.Set("certs_info", flattenApigeeKeystoresAliasesKeyCertFileCertsInfo(res["certsInfo"], d, config)); err != nil { + return fmt.Errorf("Error reading KeystoresAliasesKeyCertFile: %s", err) + } + if err := d.Set("type", flattenApigeeKeystoresAliasesKeyCertFileType(res["type"], d, config)); err != nil { + return fmt.Errorf("Error reading KeystoresAliasesKeyCertFile: %s", err) + } + + return nil +} + +func resourceApigeeKeystoresAliasesKeyCertFileUpdate(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + billingProject := "" + + url, err := tpgresource.ReplaceVars(d, config, "{{ApigeeBasePath}}organizations/{{org_id}}/environments/{{environment}}/keystores/{{keystore}}/aliases/{{alias}}?ignoreExpiryValidation=true") + if err != nil { + return err + } + + log.Printf("[DEBUG] Updating KeystoresAliasesKeyCertFile %q", d.Id()) + + // err == nil indicates that the billing_project value was found + if bp, err := tpgresource.GetBillingProject(d, config); err == nil { + billingProject = bp + } + + buf := new(bytes.Buffer) + bw := multipart.NewWriter(buf) + certFilePartWriter, _ := bw.CreateFormField("certFile") + certFilePartWriter.Write([]byte(d.Get("cert").(string))) + bw.Close() + + res, err := sendRequestRawBodyWithTimeout(config, "PUT", billingProject, url, userAgent, buf, "multipart/form-data; boundary="+bw.Boundary(), d.Timeout(schema.TimeoutCreate)) + + if err != nil { + return fmt.Errorf("Error updating KeystoresAliasesKeyCertFile %q: %s", d.Id(), err) + } else { + log.Printf("[DEBUG] Finished updating KeystoresAliasesKeyCertFile %q: %#v", d.Id(), res) + } + + return resourceApigeeKeystoresAliasesKeyCertFileRead(d, meta) +} + +func resourceApigeeKeystoresAliasesKeyCertFileDelete(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + billingProject := "" + + url, err := tpgresource.ReplaceVars(d, config, "{{ApigeeBasePath}}organizations/{{org_id}}/environments/{{environment}}/keystores/{{keystore}}/aliases/{{alias}}") + if err != nil { + return err + } + + var obj map[string]interface{} + log.Printf("[DEBUG] Deleting KeystoresAliasesKeyCertFile %q", d.Id()) + + // err == nil indicates that the billing_project value was found + if bp, err := tpgresource.GetBillingProject(d, config); err == nil { + billingProject = bp + } + + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "DELETE", + Project: billingProject, + RawURL: url, + UserAgent: userAgent, + Body: obj, + Timeout: d.Timeout(schema.TimeoutDelete), + }) + if err != nil { + return transport_tpg.HandleNotFoundError(err, d, "KeystoresAliasesKeyCertFile") + } + + log.Printf("[DEBUG] Finished deleting KeystoresAliasesKeyCertFile %q: %#v", d.Id(), res) + return nil +} + +func resourceApigeeKeystoresAliasesKeyCertFileImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { + config := meta.(*transport_tpg.Config) + if err := tpgresource.ParseImportId([]string{ + "organizations/(?P[^/]+)/environments/(?P[^/]+)/keystores/(?P[^/]+)/aliases/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + }, d, config); err != nil { + return nil, err + } + + // Replace import id for the resource id + id, err := tpgresource.ReplaceVars(d, config, "organizations/{{org_id}}/environments/{{environment}}/keystores/{{keystore}}/aliases/{{alias}}") + if err != nil { + return nil, fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + + return []*schema.ResourceData{d}, nil +} + +func flattenApigeeKeystoresAliasesKeyCertFileOrgId(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenApigeeKeystoresAliasesKeyCertFileEnvironment(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenApigeeKeystoresAliasesKeyCertFileKeystore(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenApigeeKeystoresAliasesKeyCertFileAlias(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenApigeeKeystoresAliasesKeyCertFileKey(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenApigeeKeystoresAliasesKeyCertFilePassword(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenApigeeKeystoresAliasesKeyCertFileCert(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenApigeeKeystoresAliasesKeyCertFileCertsInfo(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["cert_info"] = + flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfo(original["certInfo"], d, config) + return []interface{}{transformed} +} +func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfo(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "version": flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoVersion(original["version"], d, config), + "subject": flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSubject(original["subject"], d, config), + "issuer": flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoIssuer(original["issuer"], d, config), + "expiry_date": flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoExpiryDate(original["expiryDate"], d, config), + "valid_from": flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoValidFrom(original["validFrom"], d, config), + "is_valid": flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoIsValid(original["isValid"], d, config), + "subject_alternative_names": flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSubjectAlternativeNames(original["subjectAlternativeNames"], d, config), + "sig_alg_name": flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSigAlgName(original["sigAlgName"], d, config), + "public_key": flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoPublicKey(original["publicKey"], d, config), + "basic_constraints": flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoBasicConstraints(original["basicConstraints"], d, config), + "serial_number": flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSerialNumber(original["serialNumber"], d, config), + }) + } + return transformed +} +func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoVersion(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + // Handles the string fixed64 format + if strVal, ok := v.(string); ok { + if intVal, err := tpgresource.StringToFixed64(strVal); err == nil { + return intVal + } + } + + // number values are represented as float64 + if floatVal, ok := v.(float64); ok { + intVal := int(floatVal) + return intVal + } + + return v // let terraform core handle it otherwise +} + +func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSubject(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoIssuer(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoExpiryDate(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoValidFrom(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoIsValid(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSubjectAlternativeNames(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSigAlgName(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoPublicKey(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoBasicConstraints(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSerialNumber(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenApigeeKeystoresAliasesKeyCertFileType(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func expandApigeeKeystoresAliasesKeyCertFileOrgId(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandApigeeKeystoresAliasesKeyCertFileEnvironment(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandApigeeKeystoresAliasesKeyCertFileKeystore(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandApigeeKeystoresAliasesKeyCertFileAlias(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandApigeeKeystoresAliasesKeyCertFileKey(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandApigeeKeystoresAliasesKeyCertFilePassword(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandApigeeKeystoresAliasesKeyCertFileCert(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandApigeeKeystoresAliasesKeyCertFileCertsInfo(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + l := v.([]interface{}) + if len(l) == 0 || l[0] == nil { + return nil, nil + } + raw := l[0] + original := raw.(map[string]interface{}) + transformed := make(map[string]interface{}) + + transformedCertInfo, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfo(original["cert_info"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedCertInfo); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["certInfo"] = transformedCertInfo + } + + return transformed, nil +} + +func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfo(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + l := v.([]interface{}) + req := make([]interface{}, 0, len(l)) + for _, raw := range l { + if raw == nil { + continue + } + original := raw.(map[string]interface{}) + transformed := make(map[string]interface{}) + + transformedVersion, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoVersion(original["version"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedVersion); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["version"] = transformedVersion + } + + transformedSubject, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSubject(original["subject"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedSubject); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["subject"] = transformedSubject + } + + transformedIssuer, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoIssuer(original["issuer"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedIssuer); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["issuer"] = transformedIssuer + } + + transformedExpiryDate, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoExpiryDate(original["expiry_date"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedExpiryDate); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["expiryDate"] = transformedExpiryDate + } + + transformedValidFrom, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoValidFrom(original["valid_from"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedValidFrom); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["validFrom"] = transformedValidFrom + } + + transformedIsValid, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoIsValid(original["is_valid"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedIsValid); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["isValid"] = transformedIsValid + } + + transformedSubjectAlternativeNames, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSubjectAlternativeNames(original["subject_alternative_names"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedSubjectAlternativeNames); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["subjectAlternativeNames"] = transformedSubjectAlternativeNames + } + + transformedSigAlgName, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSigAlgName(original["sig_alg_name"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedSigAlgName); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["sigAlgName"] = transformedSigAlgName + } + + transformedPublicKey, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoPublicKey(original["public_key"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedPublicKey); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["publicKey"] = transformedPublicKey + } + + transformedBasicConstraints, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoBasicConstraints(original["basic_constraints"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedBasicConstraints); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["basicConstraints"] = transformedBasicConstraints + } + + transformedSerialNumber, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSerialNumber(original["serial_number"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedSerialNumber); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["serialNumber"] = transformedSerialNumber + } + + req = append(req, transformed) + } + return req, nil +} + +func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoVersion(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSubject(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoIssuer(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoExpiryDate(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoValidFrom(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoIsValid(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSubjectAlternativeNames(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSigAlgName(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoPublicKey(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoBasicConstraints(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSerialNumber(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_pkcs12.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_pkcs12.go index ef6a2655cf39..12b1f85fc0ef 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_pkcs12.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_pkcs12.go @@ -299,8 +299,8 @@ func ResourceApigeeKeystoresAliasesPkcs12Delete(d *schema.ResourceData, meta int func ResourceApigeeKeystoresAliasesPkcs12Import(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^organizations/(?P[^/]+)/environments/(?P[^/]+)/keystores/(?P[^/]+)/aliases/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "organizations/(?P[^/]+)/environments/(?P[^/]+)/keystores/(?P[^/]+)/aliases/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_security_action_test.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_security_action_test.go deleted file mode 100644 index e18f05fa5a66..000000000000 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_security_action_test.go +++ /dev/null @@ -1,649 +0,0 @@ -package apigee_test - -import ( - "fmt" - "strings" - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/terraform" - - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -func testAccCheckApigeeSecurityActionDestroyProducer(t *testing.T) func(s *terraform.State) error { - return func(s *terraform.State) error { - for name, rs := range s.RootModule().Resources { - if rs.Type != "google_apigee_security_action" { - continue - } - if strings.HasPrefix(name, "data.") { - continue - } - - config := acctest.GoogleProviderConfig(t) - - url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{ApigeeBasePath}}organizations/{{org_id}}/environments/{{env_id}}/securityActions/{{security_action_id}}") - if err != nil { - return err - } - - billingProject := "" - - if config.BillingProject != "" { - billingProject = config.BillingProject - } - - _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - Project: billingProject, - RawURL: url, - UserAgent: config.UserAgent, - }) - if err == nil { - return fmt.Errorf("ApigeeSecurityAction still exists at %s", url) - } - } - - return nil - } -} - -func TestAccApigeeSecurityAction_apigeeSecurityActionFull(t *testing.T) { - acctest.SkipIfVcr(t) - t.Parallel() - - context := map[string]interface{}{ - "billing_account": envvar.GetTestBillingAccountFromEnv(t), - "org_id": envvar.GetTestOrgFromEnv(t), - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - ExternalProviders: map[string]resource.ExternalProvider{ - "time": {}, - }, - CheckDestroy: testAccCheckApigeeSecurityActionDestroyProducer(t), - /* allow, deny and flag are mutually exclusive, so we test them in sequence */ - /* also all conditions except ip_address_ranges and bot_reasons seem to be mutually exclusive, so we test them in sequence */ - Steps: []resource.TestStep{ - { - Config: testAccApigeeSecurityAction_apigeeSecurityActionFullAllow(context), - }, - { - ResourceName: "google_apigee_security_action.default", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccApigeeSecurityAction_apigeeSecurityActionFullDeny(context), - }, - { - ResourceName: "google_apigee_security_action.default", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccApigeeSecurityAction_apigeeSecurityActionFullHttpMethods(context), - }, - { - ResourceName: "google_apigee_security_action.default", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccApigeeSecurityAction_apigeeSecurityActionFullFlag(context), - }, - { - ResourceName: "google_apigee_security_action.default", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccApigeeSecurityAction_apigeeSecurityActionFullApiKeys(context), - }, - { - ResourceName: "google_apigee_security_action.default", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccApigeeSecurityAction_apigeeSecurityActionFullAccessTokens(context), - }, - { - ResourceName: "google_apigee_security_action.default", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccApigeeSecurityAction_apigeeSecurityActionFullApiProducts(context), - }, - - { - ResourceName: "google_apigee_security_action.default", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccApigeeSecurityAction_apigeeSecurityActionFullDeveloperApps(context), - }, - { - ResourceName: "google_apigee_security_action.default", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccApigeeSecurityAction_apigeeSecurityActionFullDevelopers(context), - }, - { - ResourceName: "google_apigee_security_action.default", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccApigeeSecurityAction_apigeeSecurityActionFullUserAgents(context), - }, - { - ResourceName: "google_apigee_security_action.default", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccApigeeSecurityAction_apigeeSecurityActionFullRegionCodes(context), - }, - { - ResourceName: "google_apigee_security_action.default", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccApigeeSecurityAction_apigeeSecurityActionFullAsns(context), - }, - { - ResourceName: "google_apigee_security_action.default", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccApigeeSecurityAction_apigeeSecurityActionFullTTL(context), - ExpectNonEmptyPlan: true, // ttl change enforces recreation of the resource - }, - }, - }) -} - -func testAccApigeeSecurityAction_apigeeBase(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_project" "project" { - project_id = "tf-test-%{random_suffix}" - name = "tf-test-%{random_suffix}" - org_id = "%{org_id}" - billing_account = "%{billing_account}" - deletion_policy = "DELETE" -} - -resource "time_sleep" "wait_60_seconds" { - create_duration = "60s" - depends_on = [google_project.project] -} - -resource "google_project_service" "apigee" { - project = google_project.project.project_id - service = "apigee.googleapis.com" - depends_on = [time_sleep.wait_60_seconds] -} - -resource "google_project_service" "compute" { - project = google_project.project.project_id - service = "compute.googleapis.com" - depends_on = [google_project_service.apigee] -} - -resource "google_project_service" "servicenetworking" { - project = google_project.project.project_id - service = "servicenetworking.googleapis.com" - depends_on = [google_project_service.compute] -} - -resource "time_sleep" "wait_120_seconds" { - create_duration = "120s" - depends_on = [google_project_service.servicenetworking] -} - -resource "google_compute_network" "apigee_network" { - name = "tf-test-network-%{random_suffix}" - depends_on = [time_sleep.wait_120_seconds] -} - -resource "google_compute_global_address" "apigee_range" { - name = "tf-test-address-%{random_suffix}" - purpose = "VPC_PEERING" - address_type = "INTERNAL" - prefix_length = 16 - network = google_compute_network.apigee_network.id -} - -resource "google_service_networking_connection" "apigee_vpc_connection" { - network = google_compute_network.apigee_network.id - service = "servicenetworking.googleapis.com" - reserved_peering_ranges = [google_compute_global_address.apigee_range.name] -} - -resource "google_apigee_organization" "apigee_org" { - analytics_region = "us-central1" - project_id = google_project.project.project_id - authorized_network = google_compute_network.apigee_network.id - depends_on = [google_service_networking_connection.apigee_vpc_connection] -} - -resource "google_apigee_environment" "env" { - name = "tf-test-env-%{random_suffix}" - description = "Apigee Environment" - display_name = "environment-1" - org_id = google_apigee_organization.apigee_org.id -} - -resource "google_apigee_addons_config" "apigee_org_security_addons_config" { - org = google_apigee_organization.apigee_org.name - addons_config { - api_security_config { - enabled = true - } - } -} -`, context) -} - -func testAccApigeeSecurityAction_apigeeSecurityActionFullAllow(context map[string]interface{}) string { - return testAccApigeeSecurityAction_apigeeBase(context) + acctest.Nprintf(` -resource "google_apigee_security_action" "default" { - security_action_id = "tf-test-%{random_suffix}" - org_id = google_apigee_organization.apigee_org.name - env_id = google_apigee_environment.env.name - description = "Apigee Security Action" - state = "ENABLED" - - condition_config { - ip_address_ranges = [ - "100.0.220.1", - "200.0.0.1", - ] - - bot_reasons = [ - "Flooder", - "Public Cloud Azure", - "Public Cloud AWS", - ] - } - - allow {} - - expire_time = "2032-12-31T23:59:59Z" - depends_on = [ - google_apigee_addons_config.apigee_org_security_addons_config - ] -} -`, context) -} - -func testAccApigeeSecurityAction_apigeeSecurityActionFullFlag(context map[string]interface{}) string { - return testAccApigeeSecurityAction_apigeeBase(context) + acctest.Nprintf(` -resource "google_apigee_security_action" "default" { - security_action_id = "tf-test-%{random_suffix}" - org_id = google_apigee_organization.apigee_org.name - env_id = google_apigee_environment.env.name - description = "Apigee Security Action" - state = "ENABLED" - - condition_config { - ip_address_ranges = [ - "100.0.220.1", - "200.0.0.1", - ] - - bot_reasons = [ - "Flooder", - "Public Cloud Azure", - "Public Cloud AWS", - ] - } - - flag { - headers { - name = "X-Flag-Header" - value = "flag-value" - } - headers { - name = "X-Flag-Header-2" - value = "flag-value-2" - } - } - - expire_time = "2032-12-31T23:59:59Z" - depends_on = [ - google_apigee_addons_config.apigee_org_security_addons_config - ] -} -`, context) -} - -func testAccApigeeSecurityAction_apigeeSecurityActionFullDeny(context map[string]interface{}) string { - return testAccApigeeSecurityAction_apigeeBase(context) + acctest.Nprintf(` -resource "google_apigee_security_action" "default" { - security_action_id = "tf-test-%{random_suffix}" - org_id = google_apigee_organization.apigee_org.name - env_id = google_apigee_environment.env.name - description = "Apigee Security Action" - state = "ENABLED" - - condition_config { - ip_address_ranges = [ - "100.0.220.1", - "200.0.0.1", - ] - - bot_reasons = [ - "Flooder", - "Public Cloud Azure", - "Public Cloud AWS", - ] - } - - deny { - response_code = 403 - } - - expire_time = "2032-12-31T23:59:59Z" - depends_on = [ - google_apigee_addons_config.apigee_org_security_addons_config - ] -} -`, context) -} - -func testAccApigeeSecurityAction_apigeeSecurityActionFullHttpMethods(context map[string]interface{}) string { - return testAccApigeeSecurityAction_apigeeBase(context) + acctest.Nprintf(` -resource "google_apigee_security_action" "default" { - security_action_id = "tf-test-%{random_suffix}" - org_id = google_apigee_organization.apigee_org.name - env_id = google_apigee_environment.env.name - description = "Apigee Security Action" - state = "ENABLED" - - condition_config { - http_methods = [ - "GET", - "POST", - "PUT", - ] - } - - deny { - response_code = 403 - } - - expire_time = "2032-12-31T23:59:59Z" - depends_on = [ - google_apigee_addons_config.apigee_org_security_addons_config - ] -} -`, context) -} - -func testAccApigeeSecurityAction_apigeeSecurityActionFullApiKeys(context map[string]interface{}) string { - return testAccApigeeSecurityAction_apigeeBase(context) + acctest.Nprintf(` -resource "google_apigee_security_action" "default" { - security_action_id = "tf-test-%{random_suffix}" - org_id = google_apigee_organization.apigee_org.name - env_id = google_apigee_environment.env.name - description = "Apigee Security Action" - state = "ENABLED" - - condition_config { - api_keys = [ - "foo-key", - "bar-key", - ] - } - - deny { - response_code = 403 - } - - expire_time = "2032-12-31T23:59:59Z" - depends_on = [ - google_apigee_addons_config.apigee_org_security_addons_config - ] -} -`, context) -} - -func testAccApigeeSecurityAction_apigeeSecurityActionFullAccessTokens(context map[string]interface{}) string { - return testAccApigeeSecurityAction_apigeeBase(context) + acctest.Nprintf(` -resource "google_apigee_security_action" "default" { - security_action_id = "tf-test-%{random_suffix}" - org_id = google_apigee_organization.apigee_org.name - env_id = google_apigee_environment.env.name - description = "Apigee Security Action" - state = "ENABLED" - - condition_config { - access_tokens = [ - "foo-token", - "bar-token", - ] - } - - deny { - response_code = 403 - } - - expire_time = "2032-12-31T23:59:59Z" - depends_on = [ - google_apigee_addons_config.apigee_org_security_addons_config - ] -} -`, context) -} - -func testAccApigeeSecurityAction_apigeeSecurityActionFullApiProducts(context map[string]interface{}) string { - return testAccApigeeSecurityAction_apigeeBase(context) + acctest.Nprintf(` -resource "google_apigee_security_action" "default" { - security_action_id = "tf-test-%{random_suffix}" - org_id = google_apigee_organization.apigee_org.name - env_id = google_apigee_environment.env.name - description = "Apigee Security Action" - state = "ENABLED" - - condition_config { - api_products = [ - "foo-product", - "bar-product", - ] - } - - deny { - response_code = 403 - } - - expire_time = "2032-12-31T23:59:59Z" - depends_on = [ - google_apigee_addons_config.apigee_org_security_addons_config - ] -} -`, context) -} - -func testAccApigeeSecurityAction_apigeeSecurityActionFullDeveloperApps(context map[string]interface{}) string { - return testAccApigeeSecurityAction_apigeeBase(context) + acctest.Nprintf(` -resource "google_apigee_security_action" "default" { - security_action_id = "tf-test-%{random_suffix}" - org_id = google_apigee_organization.apigee_org.name - env_id = google_apigee_environment.env.name - description = "Apigee Security Action" - state = "ENABLED" - - condition_config { - developer_apps = [ - "foo-app", - "bar-app", - ] - } - - deny { - response_code = 403 - } - - expire_time = "2032-12-31T23:59:59Z" - depends_on = [ - google_apigee_addons_config.apigee_org_security_addons_config - ] -} -`, context) -} - -func testAccApigeeSecurityAction_apigeeSecurityActionFullDevelopers(context map[string]interface{}) string { - return testAccApigeeSecurityAction_apigeeBase(context) + acctest.Nprintf(` -resource "google_apigee_security_action" "default" { - security_action_id = "tf-test-%{random_suffix}" - org_id = google_apigee_organization.apigee_org.name - env_id = google_apigee_environment.env.name - description = "Apigee Security Action" - state = "ENABLED" - - condition_config { - developers = [ - "foo-developer", - "bar-developer", - ] - } - - deny { - response_code = 403 - } - - expire_time = "2032-12-31T23:59:59Z" - depends_on = [ - google_apigee_addons_config.apigee_org_security_addons_config - ] -} -`, context) -} - -func testAccApigeeSecurityAction_apigeeSecurityActionFullUserAgents(context map[string]interface{}) string { - return testAccApigeeSecurityAction_apigeeBase(context) + acctest.Nprintf(` -resource "google_apigee_security_action" "default" { - security_action_id = "tf-test-%{random_suffix}" - org_id = google_apigee_organization.apigee_org.name - env_id = google_apigee_environment.env.name - description = "Apigee Security Action" - state = "ENABLED" - - condition_config { - user_agents = [ - "Mozilla/5.0", - "curl/7.64.1", - ] - } - - deny { - response_code = 403 - } - - expire_time = "2032-12-31T23:59:59Z" - depends_on = [ - google_apigee_addons_config.apigee_org_security_addons_config - ] -} -`, context) -} - -func testAccApigeeSecurityAction_apigeeSecurityActionFullRegionCodes(context map[string]interface{}) string { - return testAccApigeeSecurityAction_apigeeBase(context) + acctest.Nprintf(` -resource "google_apigee_security_action" "default" { - security_action_id = "tf-test-%{random_suffix}" - org_id = google_apigee_organization.apigee_org.name - env_id = google_apigee_environment.env.name - description = "Apigee Security Action" - state = "ENABLED" - - condition_config { - region_codes = [ - "US", - "CA", - ] - } - - deny { - response_code = 403 - } - - expire_time = "2032-12-31T23:59:59Z" - depends_on = [ - google_apigee_addons_config.apigee_org_security_addons_config - ] -} -`, context) -} - -func testAccApigeeSecurityAction_apigeeSecurityActionFullAsns(context map[string]interface{}) string { - return testAccApigeeSecurityAction_apigeeBase(context) + acctest.Nprintf(` -resource "google_apigee_security_action" "default" { - security_action_id = "tf-test-%{random_suffix}" - org_id = google_apigee_organization.apigee_org.name - env_id = google_apigee_environment.env.name - description = "Apigee Security Action" - state = "ENABLED" - - condition_config { - asns = [ - "23", - "42", - ] - } - - deny { - response_code = 403 - } - - expire_time = "2032-12-31T23:59:59Z" - depends_on = [ - google_apigee_addons_config.apigee_org_security_addons_config - ] -} -`, context) -} - -func testAccApigeeSecurityAction_apigeeSecurityActionFullTTL(context map[string]interface{}) string { - return testAccApigeeSecurityAction_apigeeBase(context) + acctest.Nprintf(` -resource "google_apigee_security_action" "default" { - security_action_id = "tf-test-%{random_suffix}" - org_id = google_apigee_organization.apigee_org.name - env_id = google_apigee_environment.env.name - description = "Apigee Security Action" - state = "ENABLED" - - condition_config { - asns = [ - "23", - "42", - ] - } - - deny { - response_code = 403 - } - - ttl = "3600s" - depends_on = [ - google_apigee_addons_config.apigee_org_security_addons_config - ] -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow.go index 3b0eba665a4c..4820b95768d7 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow.go @@ -324,8 +324,8 @@ func resourceApigeeSharedFlowDelete(d *schema.ResourceData, meta interface{}) er func resourceApigeeSharedFlowImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^organizations/(?P[^/]+)/sharedflows/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)$", + "organizations/(?P[^/]+)/sharedflows/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow_deployment.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow_deployment.go index 68f1c91d9ac2..7cd90e8676b0 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow_deployment.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow_deployment.go @@ -228,9 +228,8 @@ func resourceApigeeSharedflowDeploymentDelete(d *schema.ResourceData, meta inter func resourceApigeeSharedflowDeploymentImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^organizations/(?P[^/]+)/environments/(?P[^/]+)/sharedflows/(?P[^/]+)/revisions/(?P[^/]+)$", - "^organizations/(?P[^/]+)/environments/(?P[^/]+)/sharedflows/(?P[^/]+)/revisions/(?P[^/]+)/deployments$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "organizations/(?P[^/]+)/environments/(?P[^/]+)/sharedflows/(?P[^/]+)/revisions/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_target_server_test.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_target_server_test.go index 87552eb2d2c0..6740c72c3e5b 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_target_server_test.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_target_server_test.go @@ -353,7 +353,6 @@ resource "google_apigee_target_server" "apigee_target_server"{ key_store = google_apigee_env_keystore.apigee_environment_keystore.name protocols = ["TLSv1.1"] trust_store = google_apigee_env_keystore.apigee_environment_keystore.name - enforce = false common_name{ value = "testCn" wildcard_match = true @@ -469,7 +468,6 @@ resource "google_apigee_target_server" "apigee_target_server"{ key_store = google_apigee_env_keystore.apigee_environment_keystore2.name protocols = ["TLSv1.2", "TLSv1.1"] trust_store = google_apigee_env_keystore.apigee_environment_keystore2.name - enforce = true } depends_on = [ google_apigee_env_keystore.apigee_environment_keystore2, diff --git a/mmv1/third_party/terraform/services/apihub/resource_apihub_curation_test.go b/mmv1/third_party/terraform/services/apihub/resource_apihub_curation_test.go deleted file mode 100644 index 3c8590b3bcae..000000000000 --- a/mmv1/third_party/terraform/services/apihub/resource_apihub_curation_test.go +++ /dev/null @@ -1,87 +0,0 @@ -package apihub_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - - "github.com/hashicorp/terraform-provider-google/google/acctest" -) - -func TestAccApihubCuration_apihubCurationBasic_Update(t *testing.T) { - // This is added for reference, but the test needs to be skipped as it needs API hub instance as a prerequisite - // But the support for that resources is not yet complete. - t.Skip() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - ExternalProviders: map[string]resource.ExternalProvider{ - "time": {}, - }, - Steps: []resource.TestStep{ - { - Config: testAccApihubCuration_apihubCuration_basic(context), - }, - { - ResourceName: "google_apihub_curation.apihub_curation_basic", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"curation_id", "location"}, - }, - { - Config: testAccApihubCuration_apihubCuration_update(context), - }, - { - ResourceName: "google_apihub_curation.apihub_curation_basic", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"curation_id", "location"}, - }, - }, - }) -} - -func testAccApihubCuration_apihubCuration_basic(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_apihub_curation" "apihub_curation_basic" { - location = "us-central1" - curation_id = "test%{random_suffix}" - display_name = "Test Curation" - description = "This is a sample curation resource managed by Terraform." - endpoint { - application_integration_endpoint_details { - trigger_id = "api_trigger/curation_API_1" - uri = "https://integrations.googleapis.com/v1/projects/1082615593856/locations/us-central1/integrations/curation:execute" - } - } - -} - - -`, context) -} - -func testAccApihubCuration_apihubCuration_update(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_apihub_curation" "apihub_curation_basic" { - location = "us-central1" - curation_id = "test%{random_suffix}" - display_name = "Test Curation Updated" - description = "This is a sample updated curation resource managed by Terraform." - endpoint { - application_integration_endpoint_details { - trigger_id = "api_trigger/curation_API_1" - uri = "https://integrations.googleapis.com/v1/projects/1082615593856/locations/us-central1/integrations/curation:execute" - } - } - -} - - -`, context) -} diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_docker_images.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_docker_images.go deleted file mode 100644 index b6c8f3dbba7c..000000000000 --- a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_docker_images.go +++ /dev/null @@ -1,191 +0,0 @@ -package artifactregistry - -import ( - "fmt" - "net/http" - "net/url" - "strings" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -func DataSourceArtifactRegistryDockerImages() *schema.Resource { - return &schema.Resource{ - Read: dataSourceArtifactRegistryDockerImagesRead, - Schema: map[string]*schema.Schema{ - "location": { - Type: schema.TypeString, - Required: true, - }, - "repository_id": { - Type: schema.TypeString, - Required: true, - }, - "project": { - Type: schema.TypeString, - Optional: true, - }, - "docker_images": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "image_name": { - Type: schema.TypeString, - Computed: true, - }, - "name": { - Type: schema.TypeString, - Computed: true, - }, - "self_link": { - Type: schema.TypeString, - Computed: true, - }, - "tags": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Schema{Type: schema.TypeString}, - }, - "image_size_bytes": { - Type: schema.TypeString, - Computed: true, - }, - "media_type": { - Type: schema.TypeString, - Computed: true, - }, - "upload_time": { - Type: schema.TypeString, - Computed: true, - }, - "build_time": { - Type: schema.TypeString, - Computed: true, - }, - "update_time": { - Type: schema.TypeString, - Computed: true, - }, - }, - }, - }, - }, - } -} - -func dataSourceArtifactRegistryDockerImagesRead(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return err - } - - project, err := tpgresource.GetProject(d, config) - if err != nil { - return err - } - - basePath, err := tpgresource.ReplaceVars(d, config, "{{ArtifactRegistryBasePath}}") - if err != nil { - return fmt.Errorf("Error setting Artifact Registry base path: %s", err) - } - - resourcePath, err := tpgresource.ReplaceVars(d, config, fmt.Sprintf("projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/dockerImages")) - if err != nil { - return fmt.Errorf("Error setting resource path: %s", err) - } - - urlRequest := basePath + resourcePath - - headers := make(http.Header) - dockerImages := make([]map[string]interface{}, 0) - pageToken := "" - - for { - u, err := url.Parse(urlRequest) - if err != nil { - return fmt.Errorf("Error parsing URL: %s", err) - } - - q := u.Query() - if pageToken != "" { - q.Set("pageToken", pageToken) - } - u.RawQuery = q.Encode() - - res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - RawURL: u.String(), - UserAgent: userAgent, - Headers: headers, - }) - - if err != nil { - return fmt.Errorf("Error listing Artifact Registry Docker images: %s", err) - } - - if items, ok := res["dockerImages"].([]interface{}); ok { - for _, item := range items { - image := item.(map[string]interface{}) - - name, ok := image["name"].(string) - if !ok { - return fmt.Errorf("Error getting Artifact Registry Docker image name: %s", err) - } - - lastComponent := name[strings.LastIndex(name, "/")+1:] - imageName := strings.SplitN(strings.Split(lastComponent, "@")[0], ":", 2)[0] - - var tags []string - if rawTags, ok := image["tags"].([]interface{}); ok { - for _, tag := range rawTags { - if tagStr, ok := tag.(string); ok { - tags = append(tags, tagStr) - } - } - } - - getString := func(m map[string]interface{}, key string) string { - if v, ok := m[key].(string); ok { - return v - } - return "" - } - - dockerImages = append(dockerImages, map[string]interface{}{ - "image_name": imageName, - "name": name, - "self_link": getString(image, "uri"), - "tags": tags, - "image_size_bytes": getString(image, "imageSizeBytes"), - "media_type": getString(image, "mediaType"), - "upload_time": getString(image, "uploadTime"), - "build_time": getString(image, "buildTime"), - "update_time": getString(image, "updateTime"), - }) - } - } - - if nextToken, ok := res["nextPageToken"].(string); ok && nextToken != "" { - pageToken = nextToken - } else { - break - } - } - - if err := d.Set("project", project); err != nil { - return fmt.Errorf("Error setting project: %s", err) - } - - if err := d.Set("docker_images", dockerImages); err != nil { - return fmt.Errorf("Error setting Artifact Registry Docker images: %s", err) - } - - d.SetId(resourcePath) - - return nil -} diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_docker_images_test.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_docker_images_test.go deleted file mode 100644 index 1a110653faa5..000000000000 --- a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_docker_images_test.go +++ /dev/null @@ -1,42 +0,0 @@ -package artifactregistry_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" -) - -func TestAccDataSourceArtifactRegistryDockerImages_basic(t *testing.T) { - t.Parallel() - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccDataSourceArtifactRegistryDockerImagesConfig, - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet("data.google_artifact_registry_docker_images.this", "project"), - resource.TestCheckResourceAttrSet("data.google_artifact_registry_docker_images.this", "location"), - resource.TestCheckResourceAttrSet("data.google_artifact_registry_docker_images.this", "repository_id"), - resource.TestCheckResourceAttrSet("data.google_artifact_registry_docker_images.this", "docker_images.0.image_name"), - resource.TestCheckResourceAttrSet("data.google_artifact_registry_docker_images.this", "docker_images.0.name"), - resource.TestCheckResourceAttrSet("data.google_artifact_registry_docker_images.this", "docker_images.0.self_link"), - ), - }, - }, - }) -} - -// Test the data source against the public AR repos -// https://console.cloud.google.com/artifacts/docker/cloudrun/us/container -// https://console.cloud.google.com/artifacts/docker/go-containerregistry/us/gcr.io -// Currently, gcr.io does not provide a imageSizeBytes or buildTime field in the JSON response -const testAccDataSourceArtifactRegistryDockerImagesConfig = ` -data "google_artifact_registry_docker_images" "this" { - project = "go-containerregistry" - location = "us" - repository_id = "gcr.io" -} -` diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_npm_package.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_npm_package.go deleted file mode 100644 index ca0355700f8c..000000000000 --- a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_npm_package.go +++ /dev/null @@ -1,295 +0,0 @@ -package artifactregistry - -import ( - "fmt" - "net/url" - "sort" - "strings" - "time" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -type NpmPackage struct { - name string - packageName string - version string - tags []string - createTime time.Time - updateTime time.Time -} - -func DataSourceArtifactRegistryNpmPackage() *schema.Resource { - return &schema.Resource{ - Read: DataSourceArtifactRegistryNpmPackageRead, - - Schema: map[string]*schema.Schema{ - "project": { - Type: schema.TypeString, - Optional: true, - Description: "Project ID of the project.", - }, - "location": { - Type: schema.TypeString, - Required: true, - Description: "The region of the Artifact Registry repository.", - }, - "repository_id": { - Type: schema.TypeString, - Required: true, - Description: "The repository ID containing the Npm package.", - }, - "package_name": { - Type: schema.TypeString, - Required: true, - Description: "The name of the Npm package.", - }, - "version": { - Type: schema.TypeString, - Computed: true, - Description: "The version of the Npm package.", - }, - "tags": { - Type: schema.TypeList, - Computed: true, - Description: "The tags associated with the Npm package.", - Elem: &schema.Schema{Type: schema.TypeString}, - }, - "name": { - Type: schema.TypeString, - Computed: true, - Description: "The fully qualified name of the Npm package.", - }, - "create_time": { - Type: schema.TypeString, - Computed: true, - Description: "The time the package was created.", - }, - "update_time": { - Type: schema.TypeString, - Computed: true, - Description: "The time the package was last updated.", - }, - }, - } -} - -func DataSourceArtifactRegistryNpmPackageRead(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return err - } - - project, err := tpgresource.GetProject(d, config) - if err != nil { - return err - } - - var res NpmPackage - - packageName, version := parseNpmPackage(d.Get("package_name").(string)) - - if version != "" { - // fetch package by version - // https://cloud.google.com/artifact-registry/docs/reference/rest/v1/projects.locations.repositories.npmPackages/get - packageUrlSafe := url.QueryEscape(packageName) - urlRequest, err := tpgresource.ReplaceVars(d, config, fmt.Sprintf("{{ArtifactRegistryBasePath}}projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/npmPackages/%s:%s", packageUrlSafe, version)) - if err != nil { - return fmt.Errorf("Error setting api endpoint") - } - - resGet, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - RawURL: urlRequest, - UserAgent: userAgent, - }) - if err != nil { - return err - } - - res = convertNpmPackageResponseToStruct(resGet) - } else { - // fetch the list of packages, ordered by update time - // https://cloud.google.com/artifact-registry/docs/reference/rest/v1/projects.locations.repositories.npmPackages/list - urlRequest, err := tpgresource.ReplaceVars(d, config, "{{ArtifactRegistryBasePath}}projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/npmPackages") - if err != nil { - return fmt.Errorf("Error setting api endpoint") - } - - // to reduce the number of pages we need to fetch, we set the pageSize to 1000(max) - urlRequest, err = transport_tpg.AddQueryParams(urlRequest, map[string]string{"pageSize": "1000"}) - if err != nil { - return err - } - - res, err = retrieveAndFilterNpmPackages(d, config, urlRequest, userAgent, packageName, version) - if err != nil { - return err - } - } - - // Set Terraform schema fields - if err := d.Set("project", project); err != nil { - return err - } - if err := d.Set("package_name", packageName); err != nil { - return err - } - if err := d.Set("name", res.name); err != nil { - return err - } - if err := d.Set("version", res.version); err != nil { - return err - } - if err := d.Set("tags", res.tags); err != nil { - return err - } - if err := d.Set("create_time", res.createTime.Format(time.RFC3339Nano)); err != nil { - return err - } - if err := d.Set("update_time", res.updateTime.Format(time.RFC3339Nano)); err != nil { - return err - } - - d.SetId(res.name) - - return nil -} - -func parseNpmPackage(pkg string) (packageName string, version string) { - splitByColon := strings.Split(pkg, ":") - - if len(splitByColon) == 2 { - packageName = splitByColon[0] - version = splitByColon[1] - } else { - packageName = pkg - } - - return packageName, version -} - -func retrieveAndFilterNpmPackages(d *schema.ResourceData, config *transport_tpg.Config, urlRequest string, userAgent string, packageName string, version string) (NpmPackage, error) { - // Paging through the list method until either: - // if a version was provided, the matching package name and version pair - // otherwise, return the first matching package name - - var allPackages []NpmPackage - - for { - resListNpmPackages, token, err := retrieveListOfNpmPackages(config, urlRequest, userAgent) - if err != nil { - return NpmPackage{}, err - } - - for _, pkg := range resListNpmPackages { - if strings.Contains(pkg.name, "/"+url.QueryEscape(packageName)+":") { - allPackages = append(allPackages, pkg) - } - } - - if token == "" { - break - } - - urlRequest, err = transport_tpg.AddQueryParams(urlRequest, map[string]string{"pageToken": token}) - if err != nil { - return NpmPackage{}, err - } - } - - if len(allPackages) == 0 { - return NpmPackage{}, fmt.Errorf("Requested Npm package was not found.") - } - - // Client-side sort by updateTime descending and createTime descending - sort.Slice(allPackages, func(i, j int) bool { - if !allPackages[i].updateTime.Equal(allPackages[j].updateTime) { - return allPackages[i].updateTime.After(allPackages[j].updateTime) - } - return allPackages[i].createTime.After(allPackages[j].createTime) - }) - - if version != "" { - for _, pkg := range allPackages { - if pkg.version == version { - return pkg, nil - } - } - return NpmPackage{}, fmt.Errorf("Requested version was not found.") - } - - // Return the latest package if no version specified - return allPackages[0], nil -} - -func retrieveListOfNpmPackages(config *transport_tpg.Config, urlRequest string, userAgent string) ([]NpmPackage, string, error) { - resList, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - RawURL: urlRequest, - UserAgent: userAgent, - }) - if err != nil { - return make([]NpmPackage, 0), "", err - } - - if nextPageToken, ok := resList["nextPageToken"].(string); ok { - return flattenNpmPackageDataSourceListResponse(resList), nextPageToken, nil - } else { - return flattenNpmPackageDataSourceListResponse(resList), "", nil - } -} - -func flattenNpmPackageDataSourceListResponse(res map[string]interface{}) []NpmPackage { - var npmPackages []NpmPackage - - resNpmPackages, _ := res["npmPackages"].([]interface{}) - - for _, resPackage := range resNpmPackages { - pkg, _ := resPackage.(map[string]interface{}) - npmPackages = append(npmPackages, convertNpmPackageResponseToStruct(pkg)) - } - - return npmPackages -} - -func convertNpmPackageResponseToStruct(res map[string]interface{}) NpmPackage { - var npmPackage NpmPackage - - if name, ok := res["name"].(string); ok { - npmPackage.name = name - } - - if packageName, ok := res["packageName"].(string); ok { - npmPackage.packageName = packageName - } - - if version, ok := res["version"].(string); ok { - npmPackage.version = version - } - - var tags []string - if rawTags, ok := res["tags"].([]interface{}); ok { - for _, tag := range rawTags { - if tagStr, ok := tag.(string); ok { - tags = append(tags, tagStr) - } - } - } - npmPackage.tags = tags - - if createTimeStr, ok := res["createTime"].(string); ok { - npmPackage.createTime, _ = time.Parse(time.RFC3339, createTimeStr) - } - - if updateTimeStr, ok := res["updateTime"].(string); ok { - npmPackage.updateTime, _ = time.Parse(time.RFC3339, updateTimeStr) - } - - return npmPackage -} diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_npm_package_test.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_npm_package_test.go deleted file mode 100644 index ae9b112b192e..000000000000 --- a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_npm_package_test.go +++ /dev/null @@ -1,67 +0,0 @@ -package artifactregistry_test - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/terraform" - "github.com/hashicorp/terraform-provider-google/google/acctest" -) - -func TestAccDataSourceArtifactRegistryNpmPackage_basic(t *testing.T) { - acctest.SkipIfVcr(t) - t.Parallel() - - // At the moment there are no public Npm packages available in Artifact Registry. - // This test is skipped to avoid unnecessary failures. - // As soon as there are public packages available, this test can be enabled by removing the skip and adjusting the configuration accordingly. - t.Skip("No public Npm packages available in Artifact Registry") - - resourceName := "data.google_artifact_registry_npm_package.test" - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccDataSourceArtifactRegistryNpmPackageConfig, - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet(resourceName, "project"), - resource.TestCheckResourceAttrSet(resourceName, "location"), - resource.TestCheckResourceAttrSet(resourceName, "repository_id"), - resource.TestCheckResourceAttrSet(resourceName, "package_name"), - resource.TestCheckResourceAttrSet(resourceName, "name"), - resource.TestCheckResourceAttrSet(resourceName, "version"), - validateNpmPackageTimestamps(resourceName), - ), - }, - }, - }) -} - -const testAccDataSourceArtifactRegistryNpmPackageConfig = ` -data "google_artifact_registry_npm_package" "test" { - project = "example-project" - location = "us" - repository_id = "example-repo" - package_name = "example-package" -} -` - -func validateNpmPackageTimestamps(dataSourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - res, ok := s.RootModule().Resources[dataSourceName] - if !ok { - return fmt.Errorf("can't find %s in state", dataSourceName) - } - - for _, attr := range []string{"create_time", "update_time"} { - if ts, ok := res.Primary.Attributes[attr]; !ok || !isRFC3339(ts) { - return fmt.Errorf("%s is not RFC3339: %s", attr, ts) - } - } - - return nil - } -} diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_package.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_package.go deleted file mode 100644 index 71edbf2cf241..000000000000 --- a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_package.go +++ /dev/null @@ -1,136 +0,0 @@ -package artifactregistry - -import ( - "fmt" - "net/http" - "net/url" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -func DataSourceArtifactRegistryPackage() *schema.Resource { - return &schema.Resource{ - Read: DataSourceArtifactRegistryPackageRead, - - Schema: map[string]*schema.Schema{ - "location": { - Type: schema.TypeString, - Required: true, - }, - "repository_id": { - Type: schema.TypeString, - Required: true, - }, - "name": { - Type: schema.TypeString, - Required: true, - }, - "project": { - Type: schema.TypeString, - Optional: true, - }, - "display_name": { - Type: schema.TypeString, - Computed: true, - }, - "create_time": { - Type: schema.TypeString, - Computed: true, - }, - "update_time": { - Type: schema.TypeString, - Computed: true, - }, - "annotations": { - Type: schema.TypeMap, - Computed: true, - Elem: &schema.Schema{Type: schema.TypeString}, - }, - }, - } -} - -func DataSourceArtifactRegistryPackageRead(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return fmt.Errorf("Error setting Artifact Registry user agent: %s", err) - } - - project, err := tpgresource.GetProject(d, config) - if err != nil { - return fmt.Errorf("Error setting Artifact Registry project: %s", err) - } - - basePath, err := tpgresource.ReplaceVars(d, config, "{{ArtifactRegistryBasePath}}") - if err != nil { - return fmt.Errorf("Error setting Artifact Registry base path: %s", err) - } - - resourcePath, err := tpgresource.ReplaceVars(d, config, fmt.Sprintf("projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/packages/{{name}}")) - if err != nil { - return fmt.Errorf("Error setting resource path: %s", err) - } - - urlRequest := basePath + resourcePath - headers := make(http.Header) - - u, err := url.Parse(urlRequest) - if err != nil { - return fmt.Errorf("Error parsing URL: %s", err) - } - - res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - RawURL: u.String(), - UserAgent: userAgent, - Headers: headers, - }) - if err != nil { - return fmt.Errorf("Error getting Artifact Registry package: %s", err) - } - - annotations := make(map[string]string) - if anno, ok := res["annotations"].(map[string]interface{}); ok { - for k, v := range anno { - if val, ok := v.(string); ok { - annotations[k] = val - } - } - } - - getString := func(m map[string]interface{}, key string) string { - if v, ok := m[key].(string); ok { - return v - } - return "" - } - - name := getString(res, "name") - - if err := d.Set("project", project); err != nil { - return err - } - if err := d.Set("name", name); err != nil { - return err - } - if err := d.Set("display_name", getString(res, "displayName")); err != nil { - return err - } - if err := d.Set("create_time", getString(res, "createTime")); err != nil { - return err - } - if err := d.Set("update_time", getString(res, "updateTime")); err != nil { - return err - } - if err := d.Set("annotations", annotations); err != nil { - return err - } - - d.SetId(name) - - return nil -} diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_package_test.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_package_test.go deleted file mode 100644 index 78d713183500..000000000000 --- a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_package_test.go +++ /dev/null @@ -1,37 +0,0 @@ -package artifactregistry_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" -) - -func TestAccDataSourceArtifactRegistryPackage_basic(t *testing.T) { - t.Parallel() - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccDataSourceArtifactRegistryPackageConfig, - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("data.google_artifact_registry_package.this", "name", "projects/go-containerregistry/locations/us/repositories/gcr.io/packages/gcrane"), - ), - }, - }, - }) -} - -// Test the data source against the public AR repos -// https://console.cloud.google.com/artifacts/docker/cloudrun/us/container -// https://console.cloud.google.com/artifacts/docker/go-containerregistry/us/gcr.io -const testAccDataSourceArtifactRegistryPackageConfig = ` -data "google_artifact_registry_package" "this" { - project = "go-containerregistry" - location = "us" - repository_id = "gcr.io" - name = "gcrane" -} -` diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_repositories.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_repositories.go deleted file mode 100644 index 3c94509a9482..000000000000 --- a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_repositories.go +++ /dev/null @@ -1,173 +0,0 @@ -package artifactregistry - -import ( - "fmt" - "net/http" - "net/url" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -func DataSourceArtifactRegistryRepositories() *schema.Resource { - return &schema.Resource{ - Read: dataSourceArtifactRegistryRepositoriesRead, - Schema: map[string]*schema.Schema{ - "location": { - Type: schema.TypeString, - Required: true, - }, - "name_filter": { - Type: schema.TypeString, - Optional: true, - }, - "project": { - Type: schema.TypeString, - Optional: true, - }, - "repositories": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "id": { - Type: schema.TypeString, - Computed: true, - }, - "repository_id": { - Type: schema.TypeString, - Computed: true, - }, - "format": { - Type: schema.TypeString, - Computed: true, - }, - "description": { - Type: schema.TypeString, - Computed: true, - }, - "create_time": { - Type: schema.TypeString, - Computed: true, - }, - "update_time": { - Type: schema.TypeString, - Computed: true, - }, - }, - }, - }, - }, - } -} - -func dataSourceArtifactRegistryRepositoriesRead(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return err - } - - project, err := tpgresource.GetProject(d, config) - if err != nil { - return err - } - - basePath, err := tpgresource.ReplaceVars(d, config, "{{ArtifactRegistryBasePath}}") - if err != nil { - return fmt.Errorf("Error setting Artifact Registry base path: %s", err) - } - - resourcePath, err := tpgresource.ReplaceVars(d, config, fmt.Sprintf("projects/{{project}}/locations/{{location}}/repositories")) - if err != nil { - return fmt.Errorf("Error setting resource path: %s", err) - } - - urlRequest := basePath + resourcePath - - nameFilter := "" - if v, ok := d.GetOk("name_filter"); ok { - nameFilter = fmt.Sprintf("name=\"%s/%s\"", resourcePath, v.(string)) - - u, err := url.Parse(urlRequest) - if err != nil { - return fmt.Errorf("Error parsing URL: %s", err) - } - - q := u.Query() - q.Set("filter", nameFilter) - u.RawQuery = q.Encode() - urlRequest = u.String() - } - - headers := make(http.Header) - repos := make([]map[string]interface{}, 0) - pageToken := "" - - for { - u, err := url.Parse(urlRequest) - if err != nil { - return fmt.Errorf("Error parsing URL: %s", err) - } - - q := u.Query() - if nameFilter != "" { - q.Set("filter", nameFilter) - } - if pageToken != "" { - q.Set("pageToken", pageToken) - } - u.RawQuery = q.Encode() - - res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - RawURL: u.String(), - UserAgent: userAgent, - Headers: headers, - }) - - if err != nil { - return fmt.Errorf("Error listing Artifact Registry repositories: %s", err) - } - - if items, ok := res["repositories"].([]interface{}); ok { - for _, item := range items { - repo := item.(map[string]interface{}) - repos = append(repos, map[string]interface{}{ - "id": repo["name"], - "repository_id": tpgresource.GetResourceNameFromSelfLink(repo["name"].(string)), - "format": repo["format"], - "description": repo["description"], - "create_time": repo["createTime"], - "update_time": repo["updateTime"], - }) - } - } - - if nextToken, ok := res["nextPageToken"].(string); ok && nextToken != "" { - pageToken = nextToken - } else { - break - } - } - - if err := d.Set("project", project); err != nil { - return fmt.Errorf("Error setting project: %s", err) - } - - if err := d.Set("repositories", repos); err != nil { - return fmt.Errorf("Error setting Artifact Registry repositories: %s", err) - } - - setId := resourcePath - - if nameFilter != "" { - setId += "/" + nameFilter - } - - d.SetId(setId) - - return nil -} diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_repositories_test.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_repositories_test.go deleted file mode 100644 index cb569bbbe419..000000000000 --- a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_repositories_test.go +++ /dev/null @@ -1,127 +0,0 @@ -package artifactregistry_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" -) - -func TestAccDataSourceArtifactRegistryRepositories_basic(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "location": envvar.GetTestRegionFromEnv(), - "random_suffix": acctest.RandString(t, 8), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccDataSourceArtifactRegistryRepositoriesConfig(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet("data.google_artifact_registry_repositories.all", "project"), - resource.TestCheckResourceAttrSet("data.google_artifact_registry_repositories.all", "repositories.0.id"), - resource.TestCheckResourceAttrSet("data.google_artifact_registry_repositories.all", "repositories.1.id"), - resource.TestCheckResourceAttrSet("data.google_artifact_registry_repositories.all", "repositories.2.id"), - resource.TestCheckResourceAttrSet("data.google_artifact_registry_repositories.all", "repositories.0.repository_id"), - resource.TestCheckResourceAttrSet("data.google_artifact_registry_repositories.all", "repositories.0.format"), - resource.TestCheckResourceAttrSet("data.google_artifact_registry_repositories.all", "repositories.0.create_time"), - resource.TestCheckResourceAttrSet("data.google_artifact_registry_repositories.all", "repositories.0.update_time"), - ), - }, - { - Config: testAccDataSourceArtifactRegistryRepositoriesConfigWithFilter(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet("data.google_artifact_registry_repositories.filtered", "repositories.0.id"), - resource.TestCheckResourceAttrSet("data.google_artifact_registry_repositories.filtered", "repositories.0.repository_id"), - resource.TestCheckResourceAttrSet("data.google_artifact_registry_repositories.filtered", "repositories.0.format"), - resource.TestCheckResourceAttrSet("data.google_artifact_registry_repositories.filtered", "repositories.0.create_time"), - resource.TestCheckResourceAttrSet("data.google_artifact_registry_repositories.filtered", "repositories.0.update_time"), - ), - }, - }, - }) -} - -func testAccDataSourceArtifactRegistryRepositoriesConfig(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_artifact_registry_repository" "repo1" { - location = "%{location}" - repository_id = "tf-test-repo1%{random_suffix}" - format = "DOCKER" - description = "repo1 desc" -} - -resource "google_artifact_registry_repository" "repo2" { - location = "%{location}" - repository_id = "tf-test-repo2%{random_suffix}" - format = "DOCKER" - description = "repo2 desc" -} - -resource "google_artifact_registry_repository" "repo3" { - location = "%{location}" - repository_id = "tf-test-repo3%{random_suffix}" - format = "DOCKER" - description = "repo3 desc" -} - -data "google_artifact_registry_repositories" "all" { - location = "%{location}" - - depends_on = [ - google_artifact_registry_repository.repo1, - google_artifact_registry_repository.repo2, - google_artifact_registry_repository.repo3, - ] -} -`, context) -} - -func testAccDataSourceArtifactRegistryRepositoriesConfigWithFilter(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_artifact_registry_repository" "repo1" { - location = "%{location}" - repository_id = "tf-test-repo1%{random_suffix}" - format = "DOCKER" - description = "repo1 desc" -} - -resource "google_artifact_registry_repository" "repo2" { - location = "%{location}" - repository_id = "tf-test-repo2%{random_suffix}" - format = "DOCKER" - description = "repo2 desc" -} - -resource "google_artifact_registry_repository" "repo3" { - location = "%{location}" - repository_id = "tf-test-repo3%{random_suffix}" - format = "DOCKER" - description = "repo3 desc" -} - -resource "google_artifact_registry_repository" "repo4" { - location = "%{location}" - repository_id = "tf-acc-repo4%{random_suffix}" - format = "DOCKER" - description = "acc desc" -} - -data "google_artifact_registry_repositories" "filtered" { - location = "%{location}" - name_filter = "*acc*" - - depends_on = [ - google_artifact_registry_repository.repo1, - google_artifact_registry_repository.repo2, - google_artifact_registry_repository.repo3, - google_artifact_registry_repository.repo4, - ] -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_tag.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_tag.go deleted file mode 100644 index 7654a57d7973..000000000000 --- a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_tag.go +++ /dev/null @@ -1,122 +0,0 @@ -package artifactregistry - -import ( - "fmt" - "net/http" - "net/url" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -func DataSourceArtifactRegistryTag() *schema.Resource { - return &schema.Resource{ - Read: DataSourceArtifactRegistryTagRead, - - Schema: map[string]*schema.Schema{ - "location": { - Type: schema.TypeString, - Required: true, - }, - "repository_id": { - Type: schema.TypeString, - Required: true, - }, - "package_name": { - Type: schema.TypeString, - Required: true, - }, - "tag_name": { - Type: schema.TypeString, - Required: true, - }, - "project": { - Type: schema.TypeString, - Optional: true, - }, - "name": { - Type: schema.TypeString, - Computed: true, - }, - "version": { - Type: schema.TypeString, - Computed: true, - }, - }, - } -} - -func DataSourceArtifactRegistryTagRead(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return fmt.Errorf("Error setting Artifact Registry user agent: %s", err) - } - - project, err := tpgresource.GetProject(d, config) - if err != nil { - return fmt.Errorf("Error setting Artifact Registry project: %s", err) - } - - basePath, err := tpgresource.ReplaceVars(d, config, "{{ArtifactRegistryBasePath}}") - if err != nil { - return fmt.Errorf("Error setting Artifact Registry base path: %s", err) - } - - resourcePath, err := tpgresource.ReplaceVars(d, config, fmt.Sprintf("projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/packages/{{package_name}}/tags/{{tag_name}}")) - if err != nil { - return fmt.Errorf("Error setting resource path: %s", err) - } - - urlRequest := basePath + resourcePath - headers := make(http.Header) - - u, err := url.Parse(urlRequest) - if err != nil { - return fmt.Errorf("Error parsing URL: %s", err) - } - - res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - RawURL: u.String(), - UserAgent: userAgent, - Headers: headers, - }) - if err != nil { - return fmt.Errorf("Error getting Artifact Registry tag: %s", err) - } - - annotations := make(map[string]string) - if anno, ok := res["annotations"].(map[string]interface{}); ok { - for k, v := range anno { - if val, ok := v.(string); ok { - annotations[k] = val - } - } - } - - getString := func(m map[string]interface{}, key string) string { - if v, ok := m[key].(string); ok { - return v - } - return "" - } - - name := getString(res, "name") - - if err := d.Set("project", project); err != nil { - return err - } - if err := d.Set("name", name); err != nil { - return err - } - if err := d.Set("version", res["version"].(string)); err != nil { - return err - } - - d.SetId(name) - - return nil -} diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_tag_test.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_tag_test.go deleted file mode 100644 index b29fb4183b45..000000000000 --- a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_tag_test.go +++ /dev/null @@ -1,38 +0,0 @@ -package artifactregistry_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" -) - -func TestAccDataSourceArtifactRegistryTag_basic(t *testing.T) { - t.Parallel() - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccDataSourceArtifactRegistryTagConfig, - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("data.google_artifact_registry_tag.this", "name", "projects/go-containerregistry/locations/us/repositories/gcr.io/packages/gcrane/tags/latest"), - ), - }, - }, - }) -} - -// Test the data source against the public AR repos -// https://console.cloud.google.com/artifacts/docker/cloudrun/us/container -// https://console.cloud.google.com/artifacts/docker/go-containerregistry/us/gcr.io -const testAccDataSourceArtifactRegistryTagConfig = ` -data "google_artifact_registry_tag" "this" { - project = "go-containerregistry" - location = "us" - repository_id = "gcr.io" - package_name = "gcrane" - tag_name = "latest" -} -` diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_tags.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_tags.go deleted file mode 100644 index 0160bc2f3f4e..000000000000 --- a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_tags.go +++ /dev/null @@ -1,169 +0,0 @@ -package artifactregistry - -import ( - "fmt" - "net/http" - "net/url" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -func DataSourceArtifactRegistryTags() *schema.Resource { - return &schema.Resource{ - Read: dataSourceArtifactRegistryTagsRead, - Schema: map[string]*schema.Schema{ - "location": { - Type: schema.TypeString, - Required: true, - }, - "repository_id": { - Type: schema.TypeString, - Required: true, - }, - "package_name": { - Type: schema.TypeString, - Required: true, - }, - "filter": { - Type: schema.TypeString, - Optional: true, - }, - "project": { - Type: schema.TypeString, - Optional: true, - }, - "tags": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Computed: true, - }, - "version": { - Type: schema.TypeString, - Computed: true, - }, - }, - }, - }, - }, - } -} - -func dataSourceArtifactRegistryTagsRead(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return err - } - - project, err := tpgresource.GetProject(d, config) - if err != nil { - return err - } - - basePath, err := tpgresource.ReplaceVars(d, config, "{{ArtifactRegistryBasePath}}") - if err != nil { - return fmt.Errorf("Error setting Artifact Registry base path: %s", err) - } - - resourcePath, err := tpgresource.ReplaceVars(d, config, fmt.Sprintf("projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/packages/{{package_name}}/tags")) - if err != nil { - return fmt.Errorf("Error setting resource path: %s", err) - } - - urlRequest := basePath + resourcePath - - filter := "" - if v, ok := d.GetOk("filter"); ok { - filter = v.(string) - - u, err := url.Parse(urlRequest) - if err != nil { - return fmt.Errorf("Error parsing URL: %s", err) - } - - q := u.Query() - q.Set("filter", filter) - u.RawQuery = q.Encode() - urlRequest = u.String() - } - - headers := make(http.Header) - tags := make([]map[string]interface{}, 0) - pageToken := "" - - for { - u, err := url.Parse(urlRequest) - if err != nil { - return fmt.Errorf("Error parsing URL: %s", err) - } - - q := u.Query() - if pageToken != "" { - q.Set("pageToken", pageToken) - } - u.RawQuery = q.Encode() - - res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - RawURL: u.String(), - UserAgent: userAgent, - Headers: headers, - }) - - if err != nil { - return fmt.Errorf("Error listing Artifact Registry tags: %s", err) - } - - if items, ok := res["tags"].([]interface{}); ok { - for _, item := range items { - tag := item.(map[string]interface{}) - - annotations := make(map[string]string) - if anno, ok := tag["annotations"].(map[string]interface{}); ok { - for k, v := range anno { - if val, ok := v.(string); ok { - annotations[k] = val - } - } - } - - getString := func(m map[string]interface{}, key string) string { - if v, ok := m[key].(string); ok { - return v - } - return "" - } - - tags = append(tags, map[string]interface{}{ - "name": getString(tag, "name"), - "version": getString(tag, "version"), - }) - } - } - - if nextToken, ok := res["nextPageToken"].(string); ok && nextToken != "" { - pageToken = nextToken - } else { - break - } - } - - if err := d.Set("project", project); err != nil { - return fmt.Errorf("Error setting project: %s", err) - } - - if err := d.Set("tags", tags); err != nil { - return fmt.Errorf("Error setting Artifact Registry tags: %s", err) - } - - d.SetId(resourcePath) - - return nil -} diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_tags_test.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_tags_test.go deleted file mode 100644 index ecd0fe7ed322..000000000000 --- a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_tags_test.go +++ /dev/null @@ -1,44 +0,0 @@ -package artifactregistry_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" -) - -func TestAccDataSourceArtifactRegistryTags_basic(t *testing.T) { - t.Parallel() - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccDataSourceArtifactRegistryTagsConfig, - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet("data.google_artifact_registry_tags.this", "project"), - resource.TestCheckResourceAttrSet("data.google_artifact_registry_tags.this", "location"), - resource.TestCheckResourceAttrSet("data.google_artifact_registry_tags.this", "repository_id"), - resource.TestCheckResourceAttrSet("data.google_artifact_registry_tags.this", "package_name"), - resource.TestCheckResourceAttrSet("data.google_artifact_registry_tags.this", "tags.0.name"), - resource.TestCheckResourceAttrSet("data.google_artifact_registry_tags.this", "tags.0.version"), - ), - }, - }, - }) -} - -// Test the data source against the public AR repos -// https://console.cloud.google.com/artifacts/docker/cloudrun/us/container -// https://console.cloud.google.com/artifacts/docker/go-containerregistry/us/gcr.io -const testAccDataSourceArtifactRegistryTagsConfig = ` -data "google_artifact_registry_tags" "this" { - project = "go-containerregistry" - location = "us" - repository_id = "gcr.io" - package_name = "gcrane" - # Filter doesn't work with gcr.io - # filter = "name=\"projects/go-containerregistry/locations/us/repositories/gcr.io/packages/gcrane/tags/latest\"" -} -` diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_version.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_version.go deleted file mode 100644 index 9965bfb7b758..000000000000 --- a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_version.go +++ /dev/null @@ -1,206 +0,0 @@ -package artifactregistry - -import ( - "fmt" - "net/http" - "net/url" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -func DataSourceArtifactRegistryVersion() *schema.Resource { - return &schema.Resource{ - Read: DataSourceArtifactRegistryVersionRead, - - Schema: map[string]*schema.Schema{ - "location": { - Type: schema.TypeString, - Required: true, - }, - "repository_id": { - Type: schema.TypeString, - Required: true, - }, - "package_name": { - Type: schema.TypeString, - Required: true, - }, - "version_name": { - Type: schema.TypeString, - Required: true, - }, - "view": { - Type: schema.TypeString, - Optional: true, - Default: "BASIC", - ValidateFunc: validateViewArtifactRegistryVersion, - }, - "project": { - Type: schema.TypeString, - Optional: true, - }, - "name": { - Type: schema.TypeString, - Computed: true, - }, - "description": { - Type: schema.TypeString, - Computed: true, - }, - "related_tags": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Computed: true, - }, - "version": { - Type: schema.TypeString, - Computed: true, - }, - }, - }, - }, - "create_time": { - Type: schema.TypeString, - Computed: true, - }, - "update_time": { - Type: schema.TypeString, - Computed: true, - }, - "annotations": { - Type: schema.TypeMap, - Computed: true, - Elem: &schema.Schema{Type: schema.TypeString}, - }, - }, - } -} - -func DataSourceArtifactRegistryVersionRead(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return fmt.Errorf("Error setting Artifact Registry user agent: %s", err) - } - - project, err := tpgresource.GetProject(d, config) - if err != nil { - return fmt.Errorf("Error setting Artifact Registry project: %s", err) - } - - basePath, err := tpgresource.ReplaceVars(d, config, "{{ArtifactRegistryBasePath}}") - if err != nil { - return fmt.Errorf("Error setting Artifact Registry base path: %s", err) - } - - resourcePath, err := tpgresource.ReplaceVars(d, config, fmt.Sprintf("projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/packages/{{package_name}}/versions/{{version_name}}")) - if err != nil { - return fmt.Errorf("Error setting resource path: %s", err) - } - - view := d.Get("view").(string) - - urlRequest := basePath + resourcePath - - u, err := url.Parse(urlRequest) - if err != nil { - return fmt.Errorf("Error parsing URL: %s", err) - } - - q := u.Query() - q.Set("view", view) - u.RawQuery = q.Encode() - urlRequest = u.String() - - headers := make(http.Header) - - u, err = url.Parse(urlRequest) - if err != nil { - return fmt.Errorf("Error parsing URL: %s", err) - } - - res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - RawURL: u.String(), - UserAgent: userAgent, - Headers: headers, - }) - if err != nil { - return fmt.Errorf("Error getting Artifact Registry version: %s", err) - } - - var relatedTags []map[string]interface{} - if rawTags, ok := res["relatedTags"].([]interface{}); ok { - for _, rawTag := range rawTags { - if tagMap, ok := rawTag.(map[string]interface{}); ok { - entry := map[string]interface{}{ - "name": tagMap["name"], - "version": tagMap["version"], - } - relatedTags = append(relatedTags, entry) - } - } - } - - annotations := make(map[string]string) - if anno, ok := res["annotations"].(map[string]interface{}); ok { - for k, v := range anno { - if val, ok := v.(string); ok { - annotations[k] = val - } - } - } - - getString := func(m map[string]interface{}, key string) string { - if v, ok := m[key].(string); ok { - return v - } - return "" - } - - name := getString(res, "name") - - if err := d.Set("project", project); err != nil { - return err - } - if err := d.Set("name", name); err != nil { - return err - } - if err := d.Set("description", getString(res, "description")); err != nil { - return err - } - if err := d.Set("related_tags", relatedTags); err != nil { - return err - } - if err := d.Set("create_time", getString(res, "createTime")); err != nil { - return err - } - if err := d.Set("update_time", getString(res, "updateTime")); err != nil { - return err - } - if err := d.Set("annotations", annotations); err != nil { - return err - } - - d.SetId(name) - - return nil -} - -func validateViewArtifactRegistryVersion(val interface{}, key string) ([]string, []error) { - v := val.(string) - var errs []error - - if v != "BASIC" && v != "FULL" { - errs = append(errs, fmt.Errorf("%q must be either 'BASIC' or 'FULL', got %q", key, v)) - } - - return nil, errs -} diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_version_test.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_version_test.go deleted file mode 100644 index fab8ed95a440..000000000000 --- a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_version_test.go +++ /dev/null @@ -1,38 +0,0 @@ -package artifactregistry_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" -) - -func TestAccDataSourceArtifactRegistryVersion_basic(t *testing.T) { - t.Parallel() - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccDataSourceArtifactRegistryVersionConfig, - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("data.google_artifact_registry_version.this", "name", "projects/go-containerregistry/locations/us/repositories/gcr.io/packages/gcrane/versions/sha256:c0cf52c2bd8c636bbf701c6c74c5ff819447d384dc957d52a52a668de63e8f5d"), - ), - }, - }, - }) -} - -// Test the data source against the public AR repos -// https://console.cloud.google.com/artifacts/docker/cloudrun/us/container -// https://console.cloud.google.com/artifacts/docker/go-containerregistry/us/gcr.io -const testAccDataSourceArtifactRegistryVersionConfig = ` -data "google_artifact_registry_version" "this" { - project = "go-containerregistry" - location = "us" - repository_id = "gcr.io" - package_name = "gcrane" - version_name = "sha256:c0cf52c2bd8c636bbf701c6c74c5ff819447d384dc957d52a52a668de63e8f5d" -} -` diff --git a/mmv1/third_party/terraform/services/artifactregistry/resource_artifact_registry_repository_test.go.tmpl b/mmv1/third_party/terraform/services/artifactregistry/resource_artifact_registry_repository_test.go.tmpl index b6a73470e45a..8da32e1976f7 100644 --- a/mmv1/third_party/terraform/services/artifactregistry/resource_artifact_registry_repository_test.go.tmpl +++ b/mmv1/third_party/terraform/services/artifactregistry/resource_artifact_registry_repository_test.go.tmpl @@ -138,74 +138,6 @@ func TestAccArtifactRegistryRepository_kfp(t *testing.T) { }) } -func TestAccArtifactRegistryRepository_cleanup(t *testing.T) { - t.Parallel() - - repositoryID := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckArtifactRegistryRepositoryDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccArtifactRegistryRepository_cleanup(repositoryID), - }, - { - ResourceName: "google_artifact_registry_repository.test", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccArtifactRegistryRepository_cleanup2(repositoryID), - PlanOnly: true, - ExpectNonEmptyPlan: true, - }, - { - Config: testAccArtifactRegistryRepository_cleanup2(repositoryID), - }, - }, - }) -} - -func testAccArtifactRegistryRepository_cleanup(repositoryID string)string { - return fmt.Sprintf(` -resource "google_artifact_registry_repository" "test" { - repository_id = "%s" - location = "us-central1" - description = "cleanup with non-second time" - format = "DOCKER" - - cleanup_policies { - id = "delete" - action = "DELETE" - condition { - older_than = "7d" - } - } -} -`, repositoryID) -} - -func testAccArtifactRegistryRepository_cleanup2(repositoryID string)string { - return fmt.Sprintf(` -resource "google_artifact_registry_repository" "test" { - repository_id = "%s" - location = "us-central1" - description = "cleanup with non-second time" - format = "DOCKER" - - cleanup_policies { - id = "delete" - action = "DELETE" - condition { - older_than = "10d" - } - } -} -`, repositoryID) -} - func testAccArtifactRegistryRepository_update(repositoryID string) string { return fmt.Sprintf(` resource "google_artifact_registry_repository" "test" { diff --git a/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_backup.go.tmpl b/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_backup.go.tmpl index 2933f3c1ea3f..944e547352d3 100644 --- a/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_backup.go.tmpl +++ b/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_backup.go.tmpl @@ -45,11 +45,6 @@ func DataSourceGoogleCloudBackupDRBackup() *schema.Resource { Computed: true, Description: `Name of the Data Source associated with Backup.`, }, - "create_time": { - Type: schema.TypeString, - Computed: true, - Description: `The time when the backup was created.`, - }, }, }, }, @@ -69,11 +64,6 @@ func DataSourceGoogleCloudBackupDRBackup() *schema.Resource { Type: schema.TypeString, Required: true, }, - "create_time": { - Type: schema.TypeString, - Computed: true, - Description: `The time when the backup was created.`, - }, } return &schema.Resource{ @@ -156,7 +146,6 @@ func flattenDataSourceBackupDRBackups(v interface{}, d *schema.ResourceData, con "backup_id": flattenDataSourceBackupDRBackupsBackupId(original["backupId"], d, config), "backup_vault_id": flattenDataSourceBackupDRBackupsBackupVaultId(original["backupVaultId"], d, config), "data_source_id": flattenDataSourceBackupDRBackupsDataSourceId(original["dataSourceId"], d, config), - "create_time": flattenDataSourceBackupDRBackupsCreateTime(original["createTime"], d, config), }) } return transformed @@ -181,7 +170,3 @@ func flattenDataSourceBackupDRBackupsBackupVaultId(v interface{}, d *schema.Reso func flattenDataSourceBackupDRBackupsDataSourceId(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { return v } - -func flattenDataSourceBackupDRBackupsCreateTime(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_backup_plan_test.go b/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_backup_plan_test.go index 71489048da69..8677d3815b54 100644 --- a/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_backup_plan_test.go +++ b/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_backup_plan_test.go @@ -1,10 +1,9 @@ package backupdr_test import ( - "testing" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" + "testing" ) func TestAccDataSourceGoogleBackupDRBackupPlan_basic(t *testing.T) { @@ -27,26 +26,6 @@ func TestAccDataSourceGoogleBackupDRBackupPlan_basic(t *testing.T) { }) } -func TestAccDataSourceGoogleBackupDRBackupPlan_csql(t *testing.T) { - t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckBackupDRBackupPlanDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccDataSourceGoogleBackupDRBackupPlan_csql(context), - Check: resource.ComposeTestCheckFunc( - acctest.CheckDataSourceStateMatchesResourceState("data.google_backup_dr_backup_plan.fetch-bp", "google_backup_dr_backup_plan.csql-test"), - ), - }, - }, - }) -} - func testAccDataSourceGoogleBackupDRBackupPlan_basic(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_backup_dr_backup_vault" "my-backup-vault-1" { @@ -99,57 +78,3 @@ data "google_backup_dr_backup_plan" "fetch-bp" { } `, context) } - -func testAccDataSourceGoogleBackupDRBackupPlan_csql(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_backup_dr_backup_vault" "my-backup-vault-csql" { - location ="us-central1" - backup_vault_id = "tf-test-bv-%{random_suffix}" - description = "This is a backup vault built by Terraform for cloudsql." - backup_minimum_enforced_retention_duration = "100000s" - labels = { - foo = "bar1" - bar = "baz1" - } - annotations = { - annotations1 = "bar1" - annotations2 = "baz1" - } - force_update = "true" - force_delete = "true" - allow_missing = "true" -} - - -resource "google_backup_dr_backup_plan" "csql-test" { - location = "us-central1" - backup_plan_id = "tf-test-bp-%{random_suffix}" - resource_type= "sqladmin.googleapis.com/Instance" - backup_vault = google_backup_dr_backup_vault.my-backup-vault-csql.name - depends_on=[ google_backup_dr_backup_vault.my-backup-vault-csql ] - lifecycle { - ignore_changes = [backup_vault] - } - log_retention_days = 4 - backup_rules { - rule_id = "rule-1" - backup_retention_days = 5 - standard_schedule { - recurrence_type = "HOURLY" - hourly_frequency = 6 - time_zone = "UTC" - backup_window{ - start_hour_of_day = 0 - end_hour_of_day = 24 - } - } - } -} - -data "google_backup_dr_backup_plan" "fetch-bp" { - location = "us-central1" - backup_plan_id=google_backup_dr_backup_plan.csql-test.backup_plan_id - depends_on= [ google_backup_dr_backup_plan.csql-test ] - } -`, context) -} diff --git a/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_data_source_test.go.tmpl b/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_data_source_test.go.tmpl index fb5628955556..884120f87c30 100644 --- a/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_data_source_test.go.tmpl +++ b/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_data_source_test.go.tmpl @@ -15,15 +15,8 @@ import ( func TestAccDataSourceGoogleCloudBackupDRDataSource_basic(t *testing.T) { t.Parallel() - - {{ if ne $.TargetVersionName "ga" -}} - data_source_id := "ds-test" - {{ else -}} - data_source_id := "56b93b14529b77d764b21b2251e1ea8f0006e8dd" - {{- end }} context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "data_source_id": data_source_id, } stepChecks := func(wantName string, wantState string) []resource.TestCheckFunc { @@ -34,7 +27,7 @@ func TestAccDataSourceGoogleCloudBackupDRDataSource_basic(t *testing.T) { return stepCheck } project := envvar.GetTestProjectFromEnv() - expectedName := fmt.Sprintf("projects/%s/locations/us-central1/backupVaults/bv-test/dataSources/%s", project, data_source_id) + expectedName := fmt.Sprintf("projects/%s/locations/us-central1/backupVaults/bv-test/dataSources/ds-test", project) expectedState := "ACTIVE" acctest.VcrTest(t, resource.TestCase{ @@ -58,7 +51,12 @@ data "google_backup_dr_data_source" "foo" { project = data.google_project.project.project_id location = "us-central1" backup_vault_id = "bv-test" - data_source_id = "%{data_source_id}" + {{ if ne $.TargetVersionName "ga" -}} + data_source_id = "ds-test" + {{ else -}} + data_source_id = "56b93b14529b77d764b21b2251e1ea8f0006e8dd" + {{- end }} } + `, context) } diff --git a/mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_plan_association_test.go b/mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_plan_association_test.go deleted file mode 100644 index 492dcce0fc91..000000000000 --- a/mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_plan_association_test.go +++ /dev/null @@ -1,251 +0,0 @@ -package backupdr_test - -import ( - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" - "testing" - "time" -) - -func TestAccBackupDRBackupPlanAssociation_fullUpdate(t *testing.T) { - // Uses time.Now - acctest.SkipIfVcr(t) - - t.Parallel() - - timeNow := time.Now().UTC() - referenceTime := time.Date(timeNow.Year(), timeNow.Month(), timeNow.Day(), 0, 0, 0, 0, time.UTC) - - context := map[string]interface{}{ - "project": envvar.GetTestProjectFromEnv(), - "effective_time": referenceTime.Add(24 * time.Hour).Format(time.RFC3339), - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccBackupDRBackupPlanAssociation_fullCreate(context), - }, - { - ResourceName: "google_backup_dr_backup_plan_association.bpa", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"resource"}, - }, - { - Config: testAccBackupDRBackupPlanAssociation_fullUpdate(context), - }, - { - ResourceName: "google_backup_dr_backup_plan_association.bpa", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"resource"}, - }, - }, - }) -} - -func testAccBackupDRBackupPlanAssociation_fullCreate(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_service_account" "default" { - account_id = "tf-test-my-custom-%{random_suffix}" - display_name = "Custom SA for VM Instance" -} - -resource "google_compute_instance" "default" { - name = "tf-test-compute-instance-%{random_suffix}" - machine_type = "n2-standard-2" - zone = "us-central1-a" - tags = ["foo", "bar"] - boot_disk { - initialize_params { - image = "debian-cloud/debian-11" - labels = { - my_label = "value" - } - } - } - // Local SSD disk - scratch_disk { - interface = "NVME" - } - network_interface { - network = "default" - access_config { - // Ephemeral public IP - } - } - service_account { - # Google recommends custom service accounts that have cloud-platform scope and permissions granted via IAM Roles. - email = google_service_account.default.email - scopes = ["cloud-platform"] - } -} -resource "google_backup_dr_backup_vault" "my-backup-vault" { - location ="us-central1" - backup_vault_id = "tf-test-bv-%{random_suffix}" - description = "This is a second backup vault built by Terraform." - backup_minimum_enforced_retention_duration = "100000s" - labels = { - foo = "bar1" - bar = "baz1" - } - annotations = { - annotations1 = "bar1" - annotations2 = "baz1" - } - force_update = "true" - force_delete = "true" - allow_missing = "true" -} - -resource "google_backup_dr_backup_plan" "foo" { - location = "us-central1" - backup_plan_id = "tf-test-bp-test-%{random_suffix}" - resource_type = "compute.googleapis.com/Instance" - backup_vault = google_backup_dr_backup_vault.my-backup-vault.name - - backup_rules { - rule_id = "rule-1" - backup_retention_days = 366 - - standard_schedule { - recurrence_type = "YEARLY" - months = ["JANUARY"] - days_of_month = [15] - time_zone = "UTC" - - backup_window { - start_hour_of_day = 2 # Backup starts at 2:00 AM UTC - end_hour_of_day = 8 # Optional, backup window ends at 3:00 AM - } - } - } -} - -resource "google_backup_dr_backup_plan_association" "bpa" { - location = "us-central1" - backup_plan_association_id = "tf-test-bpa-test-%{random_suffix}" - resource = google_compute_instance.default.id - resource_type= "compute.googleapis.com/Instance" - backup_plan = google_backup_dr_backup_plan.foo.name -} -`, context) -} - -func testAccBackupDRBackupPlanAssociation_fullUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_service_account" "default" { - account_id = "tf-test-my-custom-%{random_suffix}" - display_name = "Custom SA for VM Instance" -} - -resource "google_compute_instance" "default" { - name = "tf-test-compute-instance-%{random_suffix}" - machine_type = "n2-standard-2" - zone = "us-central1-a" - tags = ["foo", "bar"] - boot_disk { - initialize_params { - image = "debian-cloud/debian-11" - labels = { - my_label = "value" - } - } - } - // Local SSD disk - scratch_disk { - interface = "NVME" - } - network_interface { - network = "default" - access_config { - // Ephemeral public IP - } - } - service_account { - # Google recommends custom service accounts that have cloud-platform scope and permissions granted via IAM Roles. - email = google_service_account.default.email - scopes = ["cloud-platform"] - } -} - -resource "google_backup_dr_backup_vault" "my-backup-vault" { - location ="us-central1" - backup_vault_id = "tf-test-bv-%{random_suffix}" - description = "This is a second backup vault built by Terraform." - backup_minimum_enforced_retention_duration = "100000s" - labels = { - foo = "bar1" - bar = "baz1" - } - annotations = { - annotations1 = "bar1" - annotations2 = "baz1" - } - force_update = "true" - force_delete = "true" - allow_missing = "true" -} - -resource "google_backup_dr_backup_plan" "updated-bp" { - location = "us-central1" - backup_plan_id = "tf-test-bp-test-1-%{random_suffix}" - resource_type = "compute.googleapis.com/Instance" - backup_vault = google_backup_dr_backup_vault.my-backup-vault.name - - backup_rules { - rule_id = "rule-1" - backup_retention_days = 366 - - standard_schedule { - recurrence_type = "YEARLY" - months = ["JANUARY"] - days_of_month = [15] - time_zone = "UTC" - - backup_window { - start_hour_of_day = 2 # Backup starts at 2:00 AM UTC - end_hour_of_day = 8 # Optional, backup window ends at 3:00 AM - } - } - } -} - -resource "google_backup_dr_backup_plan" "foo" { - location = "us-central1" - backup_plan_id = "tf-test-bp-test-%{random_suffix}" - resource_type = "compute.googleapis.com/Instance" - backup_vault = google_backup_dr_backup_vault.my-backup-vault.name - - backup_rules { - rule_id = "rule-1" - backup_retention_days = 366 - - standard_schedule { - recurrence_type = "YEARLY" - months = ["JANUARY"] - days_of_month = [15] - time_zone = "UTC" - - backup_window { - start_hour_of_day = 2 # Backup starts at 2:00 AM UTC - end_hour_of_day = 8 # Optional, backup window ends at 3:00 AM - } - } - } -} - -resource "google_backup_dr_backup_plan_association" "bpa" { - location = "us-central1" - backup_plan_association_id = "tf-test-bpa-test-%{random_suffix}" - resource = google_compute_instance.default.id - resource_type= "compute.googleapis.com/Instance" - backup_plan = google_backup_dr_backup_plan.updated-bp.name -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_plan_test.go b/mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_plan_test.go deleted file mode 100644 index 2b01509366a9..000000000000 --- a/mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_plan_test.go +++ /dev/null @@ -1,219 +0,0 @@ -package backupdr_test - -import ( - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" - "testing" -) - -func TestAccBackupDRBackupPlan_fullUpdate(t *testing.T) { - - t.Parallel() - - context := map[string]interface{}{ - "project": envvar.GetTestProjectFromEnv(), - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccBackupDRBackupPlan_fullCreate(context), - }, - { - ResourceName: "google_backup_dr_backup_plan.bp", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"resource"}, - }, - { - Config: testAccBackupDRBackupPlan_fullUpdate(context), - }, - { - ResourceName: "google_backup_dr_backup_plan.bp", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"resource"}, - }, - }, - }) -} - -func testAccBackupDRBackupPlan_fullCreate(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_service_account" "default" { - account_id = "tf-test-my-custom-%{random_suffix}" - display_name = "Custom SA for VM Instance" -} - -resource "google_compute_instance" "default" { - name = "tf-test-compute-instance-%{random_suffix}" - machine_type = "n2-standard-2" - zone = "us-central1-a" - tags = ["foo", "bar"] - boot_disk { - initialize_params { - image = "debian-cloud/debian-11" - labels = { - my_label = "value" - } - } - } - // Local SSD disk - scratch_disk { - interface = "NVME" - } - network_interface { - network = "default" - access_config { - // Ephemeral public IP - } - } - service_account { - # Google recommends custom service accounts that have cloud-platform scope and permissions granted via IAM Roles. - email = google_service_account.default.email - scopes = ["cloud-platform"] - } -} -resource "google_backup_dr_backup_vault" "my-backup-vault" { - location ="us-central1" - backup_vault_id = "tf-test-bv-%{random_suffix}" - description = "This is a second backup vault built by Terraform." - backup_minimum_enforced_retention_duration = "100000s" - labels = { - foo = "bar1" - bar = "baz1" - } - annotations = { - annotations1 = "bar1" - annotations2 = "baz1" - } - force_update = "true" - force_delete = "true" - allow_missing = "true" -} - -resource "google_backup_dr_backup_plan" "bp" { - location = "us-central1" - backup_plan_id = "tf-test-bp-test-%{random_suffix}" - resource_type = "compute.googleapis.com/Instance" - backup_vault = google_backup_dr_backup_vault.my-backup-vault.name - - backup_rules { - rule_id = "rule-1" - backup_retention_days = 366 - - standard_schedule { - recurrence_type = "YEARLY" - months = ["JANUARY"] - days_of_month = [15] - time_zone = "UTC" - - backup_window { - start_hour_of_day = 2 # Backup starts at 2:00 AM UTC - end_hour_of_day = 8 # Optional, backup window ends at 3:00 AM - } - } - } -} - -`, context) -} - -func testAccBackupDRBackupPlan_fullUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_service_account" "default" { - account_id = "tf-test-my-custom-%{random_suffix}" - display_name = "Custom SA for VM Instance" -} - -resource "google_compute_instance" "default" { - name = "tf-test-compute-instance-%{random_suffix}" - machine_type = "n2-standard-2" - zone = "us-central1-a" - tags = ["foo", "bar"] - boot_disk { - initialize_params { - image = "debian-cloud/debian-11" - labels = { - my_label = "value" - } - } - } - // Local SSD disk - scratch_disk { - interface = "NVME" - } - network_interface { - network = "default" - access_config { - // Ephemeral public IP - } - } - service_account { - # Google recommends custom service accounts that have cloud-platform scope and permissions granted via IAM Roles. - email = google_service_account.default.email - scopes = ["cloud-platform"] - } -} - -resource "google_backup_dr_backup_vault" "my-backup-vault" { - location ="us-central1" - backup_vault_id = "tf-test-bv-%{random_suffix}" - description = "This is a second backup vault built by Terraform." - backup_minimum_enforced_retention_duration = "100000s" - labels = { - foo = "bar1" - bar = "baz1" - } - annotations = { - annotations1 = "bar1" - annotations2 = "baz1" - } - force_update = "true" - force_delete = "true" - allow_missing = "true" -} - -resource "google_backup_dr_backup_plan" "bp" { - location = "us-central1" - backup_plan_id = "tf-test-bp-test-%{random_suffix}" - resource_type = "compute.googleapis.com/Instance" - backup_vault = google_backup_dr_backup_vault.my-backup-vault.name - - backup_rules { - rule_id = "rule-1" - backup_retention_days = 366 - standard_schedule { - recurrence_type = "MONTHLY" # Updated recurrence_type from YEARLY - days_of_month = [1, 15] # Updated days_of_month - time_zone = "America/New_York" # Updated time_zone - - backup_window { - start_hour_of_day = 1 # Updated start hour - end_hour_of_day = 7 # Updated end hour - } - } - } - backup_rules { - # Adding a second rule to test weekly schedule - rule_id = "rule-2" - backup_retention_days = 60 # Different retention for rule-2 - - standard_schedule { - recurrence_type = "WEEKLY" - days_of_week = ["MONDAY", "FRIDAY"] # Added days_of_week - time_zone = "UTC" - - backup_window { - start_hour_of_day = 1 # Different backup window for rule-2 - end_hour_of_day = 7 - } - } - } -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_vault_test.go b/mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_vault_test.go.tmpl similarity index 85% rename from mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_vault_test.go rename to mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_vault_test.go.tmpl index 33d24d8ba32a..c851a5387309 100644 --- a/mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_vault_test.go +++ b/mmv1/third_party/terraform/services/backupdr/resource_backup_dr_backup_vault_test.go.tmpl @@ -18,9 +18,9 @@ func TestAccBackupDRBackupVault_fullUpdate(t *testing.T) { referenceTime := time.Date(timeNow.Year(), timeNow.Month(), timeNow.Day(), 0, 0, 0, 0, time.UTC) context := map[string]interface{}{ - "project": envvar.GetTestProjectFromEnv(), + "project": envvar.GetTestProjectFromEnv(), "effective_time": referenceTime.Add(24 * time.Hour).Format(time.RFC3339), - "random_suffix": acctest.RandString(t, 10), + "random_suffix": acctest.RandString(t, 10), } acctest.VcrTest(t, resource.TestCase{ @@ -34,7 +34,7 @@ func TestAccBackupDRBackupVault_fullUpdate(t *testing.T) { ResourceName: "google_backup_dr_backup_vault.backup-vault-test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"allow_missing", "annotations", "backup_vault_id", "force_delete", "force_update", "ignore_backup_plan_references", "ignore_inactive_datasources", "backup_retention_inheritance", "access_restriction", "labels", "location", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"allow_missing", "annotations", "backup_vault_id", "force_delete", "force_update", "ignore_backup_plan_references", "ignore_inactive_datasources", "access_restriction", "labels", "location", "terraform_labels"}, }, { Config: testAccBackupDRBackupVault_fullUpdate(context), @@ -43,7 +43,7 @@ func TestAccBackupDRBackupVault_fullUpdate(t *testing.T) { ResourceName: "google_backup_dr_backup_vault.backup-vault-test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"allow_missing", "annotations", "backup_vault_id", "force_delete", "force_update", "ignore_backup_plan_references", "ignore_inactive_datasources", "backup_retention_inheritance", "access_restriction", "labels", "location", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"allow_missing", "annotations", "backup_vault_id", "force_delete", "force_update", "ignore_backup_plan_references", "ignore_inactive_datasources", "access_restriction", "labels", "location", "terraform_labels"}, }, }, }) @@ -68,7 +68,6 @@ resource "google_backup_dr_backup_vault" "backup-vault-test" { force_update = "true" ignore_inactive_datasources = "true" access_restriction = "WITHIN_ORGANIZATION" - backup_retention_inheritance = "INHERIT_VAULT_RETENTION" ignore_backup_plan_references = "true" allow_missing = "true" } @@ -93,10 +92,9 @@ resource "google_backup_dr_backup_vault" "backup-vault-test" { } force_update = "true" access_restriction = "WITHIN_ORGANIZATION" - backup_retention_inheritance = "INHERIT_VAULT_RETENTION" ignore_inactive_datasources = "true" ignore_backup_plan_references = "true" allow_missing = "true" } `, context) -} +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/beyondcorp/data_source_google_beyondcorp_security_gateway.go b/mmv1/third_party/terraform/services/beyondcorp/data_source_google_beyondcorp_security_gateway.go deleted file mode 100644 index f24e25373afa..000000000000 --- a/mmv1/third_party/terraform/services/beyondcorp/data_source_google_beyondcorp_security_gateway.go +++ /dev/null @@ -1,53 +0,0 @@ -package beyondcorp - -import ( - "fmt" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -func DataSourceGoogleBeyondcorpSecurityGateway() *schema.Resource { - - dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceBeyondcorpSecurityGateway().Schema) - tpgresource.AddRequiredFieldsToSchema(dsSchema, "security_gateway_id") - - tpgresource.AddOptionalFieldsToSchema(dsSchema, "project") - - return &schema.Resource{ - Read: dataSourceGoogleBeyondcorpSecurityGatewayRead, - Schema: dsSchema, - } -} - -func dataSourceGoogleBeyondcorpSecurityGatewayRead(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - - name := d.Get("security_gateway_id").(string) - - project, err := tpgresource.GetProject(d, config) - if err != nil { - return err - } - - id := fmt.Sprintf("projects/%s/locations/global/securityGateways/%s", project, name) - d.SetId(id) - // Remove after deprecated location variable is removed. - d.Set("location", "global") - - err = resourceBeyondcorpSecurityGatewayRead(d, meta) - if err != nil { - return err - } - - if err := tpgresource.SetDataSourceLabels(d); err != nil { - return err - } - - if d.Id() == "" { - return fmt.Errorf("%s not found", id) - } - - return nil -} diff --git a/mmv1/third_party/terraform/services/beyondcorp/data_source_google_beyondcorp_security_gateway_test.go b/mmv1/third_party/terraform/services/beyondcorp/data_source_google_beyondcorp_security_gateway_test.go deleted file mode 100644 index 8ce0c344addb..000000000000 --- a/mmv1/third_party/terraform/services/beyondcorp/data_source_google_beyondcorp_security_gateway_test.go +++ /dev/null @@ -1,82 +0,0 @@ -package beyondcorp_test - -import ( - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - - "testing" - - "github.com/hashicorp/terraform-provider-google/google/acctest" -) - -func TestAccDataSourceGoogleBeyondcorpSecurityGateway_basic(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckBeyondcorpSecurityGatewayDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccDataSourceGoogleBeyondcorpSecurityGateway_basic(context), - Check: resource.ComposeTestCheckFunc( - acctest.CheckDataSourceStateMatchesResourceState("data.google_beyondcorp_security_gateway.foo", "google_beyondcorp_security_gateway.foo"), - ), - }, - }, - }) -} - -func TestAccDataSourceGoogleBeyondcorpSecurityGateway_full(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckBeyondcorpSecurityGatewayDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccDataSourceGoogleBeyondcorpSecurityGateway_full(context), - Check: resource.ComposeTestCheckFunc( - acctest.CheckDataSourceStateMatchesResourceState("data.google_beyondcorp_security_gateway.foo", "google_beyondcorp_security_gateway.foo"), - ), - }, - }, - }) -} - -func testAccDataSourceGoogleBeyondcorpSecurityGateway_basic(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_beyondcorp_security_gateway" "foo" { - security_gateway_id = "default-foo-sg-basic-%{random_suffix}" - display_name = "My Security Gateway resource" - hubs { region = "us-central1" } -} - -data "google_beyondcorp_security_gateway" "foo" { - security_gateway_id = google_beyondcorp_security_gateway.foo.security_gateway_id -} -`, context) -} - -func testAccDataSourceGoogleBeyondcorpSecurityGateway_full(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_beyondcorp_security_gateway" "foo" { - security_gateway_id = "default-foo-sg-full-%{random_suffix}" - display_name = "My Security Gateway resource" - hubs { region = "us-central1" } -} - -data "google_beyondcorp_security_gateway" "foo" { - security_gateway_id = google_beyondcorp_security_gateway.foo.security_gateway_id - project = google_beyondcorp_security_gateway.foo.project -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/beyondcorp/resource_beyondcorp_application_test.go b/mmv1/third_party/terraform/services/beyondcorp/resource_beyondcorp_application_test.go new file mode 100644 index 000000000000..28086bc32df4 --- /dev/null +++ b/mmv1/third_party/terraform/services/beyondcorp/resource_beyondcorp_application_test.go @@ -0,0 +1,85 @@ +package beyondcorp_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccBeyondcorpApplication_beyondcorpSecurityGatewayApplicationBasicExample_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccBeyondcorpApplication_beyondcorpSecurityGatewayApplicationBasicExample_basic(context), + }, + { + ResourceName: "google_beyondcorp_application.example", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"application_id", "security_gateways_id"}, + }, + { + Config: testAccBeyondcorpApplication_beyondcorpSecurityGatewayApplicationBasicExample_update(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_beyondcorp_application.example", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_beyondcorp_application.example", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"application_id", "security_gateways_id"}, + }, + }, + }) +} + +func testAccBeyondcorpApplication_beyondcorpSecurityGatewayApplicationBasicExample_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_beyondcorp_security_gateway" "default" { + security_gateway_id = "default%{random_suffix}" + display_name = "My Security Gateway resource" + hubs { region = "us-central1" } +} + +resource "google_beyondcorp_application" "example" { + security_gateways_id = google_beyondcorp_security_gateway.default.security_gateway_id + application_id = "google%{random_suffix}" + endpoint_matchers { + hostname = "google.com" + } +} +`, context) +} + +func testAccBeyondcorpApplication_beyondcorpSecurityGatewayApplicationBasicExample_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_beyondcorp_security_gateway" "default" { + security_gateway_id = "default%{random_suffix}" + display_name = "My Security Gateway resource" + hubs { region = "us-central1" } +} + +resource "google_beyondcorp_application" "example" { + security_gateways_id = google_beyondcorp_security_gateway.default.security_gateway_id + display_name = "Updated Name" + application_id = "google%{random_suffix}" + endpoint_matchers { + hostname = "google.com" + } +} +`, context) +} diff --git a/mmv1/third_party/terraform/services/beyondcorp/resource_beyondcorp_security_gateway_application_test.go b/mmv1/third_party/terraform/services/beyondcorp/resource_beyondcorp_security_gateway_application_test.go deleted file mode 100644 index 2175e9a51a16..000000000000 --- a/mmv1/third_party/terraform/services/beyondcorp/resource_beyondcorp_security_gateway_application_test.go +++ /dev/null @@ -1,98 +0,0 @@ -package beyondcorp_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" - - "github.com/hashicorp/terraform-provider-google/google/acctest" -) - -func TestAccBeyondcorpSecurityGatewayApplication_beyondcorpSecurityGatewayApplicationBasicExample_update(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccBeyondcorpSecurityGatewayApplication_beyondcorpSecurityGatewayApplicationBasicExample_basic(context), - }, - { - ResourceName: "google_beyondcorp_security_gateway_application.example", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"application_id", "security_gateway_id"}, - }, - { - Config: testAccBeyondcorpSecurityGatewayApplication_beyondcorpSecurityGatewayApplicationBasicExample_update(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_beyondcorp_security_gateway_application.example", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_beyondcorp_security_gateway_application.example", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"application_id", "security_gateway_id"}, - }, - }, - }) -} - -func testAccBeyondcorpSecurityGatewayApplication_beyondcorpSecurityGatewayApplicationBasicExample_basic(context map[string]interface{}) string { - return acctest.Nprintf(` -data "google_project" "project" {} - -resource "google_beyondcorp_security_gateway" "default" { - security_gateway_id = "default-sg%{random_suffix}" - display_name = "My Security Gateway resource" - hubs { region = "us-central1" } -} - -resource "google_beyondcorp_security_gateway_application" "example" { - security_gateway_id = google_beyondcorp_security_gateway.default.security_gateway_id - application_id = "google-sga%{random_suffix}" - endpoint_matchers { - hostname = "google.com" - } -} -`, context) -} - -func testAccBeyondcorpSecurityGatewayApplication_beyondcorpSecurityGatewayApplicationBasicExample_update(context map[string]interface{}) string { - return acctest.Nprintf(` -data "google_project" "project" {} - -resource "google_beyondcorp_security_gateway" "default" { - security_gateway_id = "default-sg%{random_suffix}" - display_name = "My Security Gateway resource" - hubs { region = "us-central1" } -} - -resource "google_beyondcorp_security_gateway_application" "example" { - security_gateway_id = google_beyondcorp_security_gateway.default.security_gateway_id - display_name = "Updated Name" - application_id = "google-sga%{random_suffix}" - endpoint_matchers { - hostname = "*.google.com" - } - endpoint_matchers { - hostname = "google.com" - ports = [443, 80] - } - upstreams { - network { - name = "projects/${data.google_project.project.project_id}/global/networks/default" - } - } -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/bigquery/data_source_google_bigquery_datasets.go b/mmv1/third_party/terraform/services/bigquery/data_source_google_bigquery_datasets.go deleted file mode 100644 index 1bbc9c66d5f0..000000000000 --- a/mmv1/third_party/terraform/services/bigquery/data_source_google_bigquery_datasets.go +++ /dev/null @@ -1,161 +0,0 @@ -package bigquery - -import ( - "fmt" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -func DataSourceGoogleBigqueryDatasets() *schema.Resource { - dsSchema := map[string]*schema.Schema{ - "project": { - Type: schema.TypeString, - Optional: true, - Description: "The ID of the project in which the datasets are located. If it is not provided, the provider project is used.", - }, - "datasets": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "labels": { - Type: schema.TypeMap, - Computed: true, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - Description: "The labels associated with this dataset. You can use these to organize and group your datasets.", - }, - "friendly_name": { - Type: schema.TypeString, - Computed: true, - Description: "A user-friendly name for the dataset.", - }, - "dataset_id": { - Type: schema.TypeString, - Computed: true, - Description: "A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_).", - }, - "location": { - Type: schema.TypeString, - Computed: true, - Description: "The geographic location where the dataset resides.", - }, - }, - }, - }, - } - - return &schema.Resource{ - Read: DataSourceGoogleBigQueryDatasetsRead, - Schema: dsSchema, - } -} - -func DataSourceGoogleBigQueryDatasetsRead(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return err - } - - project, err := tpgresource.GetProject(d, config) - - if err != nil { - return fmt.Errorf("Error fetching project: %s", err) - } - - params := make(map[string]string) - datasets := make([]map[string]interface{}, 0) - - for { - url, err := tpgresource.ReplaceVars(d, config, "{{BigQueryBasePath}}projects/{{project}}/datasets") - if err != nil { - return err - } - - url, err = transport_tpg.AddQueryParams(url, params) - if err != nil { - return err - } - - res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - RawURL: url, - UserAgent: userAgent, - }) - if err != nil { - return fmt.Errorf("Error retrieving datasets: %s", err) - } - - pageDatasets := flattenDataSourceGoogleBigQueryDatasetsList(res["datasets"]) - datasets = append(datasets, pageDatasets...) - - pToken, ok := res["nextPageToken"] - if ok && pToken != nil && pToken.(string) != "" { - params["pageToken"] = pToken.(string) - } else { - break - } - } - - if err := d.Set("datasets", datasets); err != nil { - return fmt.Errorf("Error retrieving datasets: %s", err) - } - - id := fmt.Sprintf("projects/%s/datasets", project) - d.SetId(id) - - return nil -} - -func flattenDataSourceGoogleBigQueryDatasetsList(res interface{}) []map[string]interface{} { - if res == nil { - return make([]map[string]interface{}, 0) - } - - ls := res.([]interface{}) - - datasets := make([]map[string]interface{}, 0, len(ls)) - - for _, raw := range ls { - output := raw.(map[string]interface{}) - - var mLabels map[string]interface{} - var mDatasetID string - var mFriendlyName string - var mLocation string - - if oLabels, ok := output["labels"].(map[string]interface{}); ok { - mLabels = oLabels - } else { - mLabels = make(map[string]interface{}) // Initialize as an empty map if labels are missing - } - - if oFriendlyName, ok := output["friendlyName"].(string); ok { - mFriendlyName = oFriendlyName - } - - if oDatasetReference, ok := output["datasetReference"].(map[string]interface{}); ok { - if datasetID, ok := oDatasetReference["datasetId"].(string); ok { - mDatasetID = datasetID - } - } - - if oLocation, ok := output["location"].(string); ok { - mLocation = oLocation - } - - datasets = append(datasets, map[string]interface{}{ - "labels": mLabels, - "friendly_name": mFriendlyName, - "dataset_id": mDatasetID, - "location": mLocation, - }) - } - - return datasets -} diff --git a/mmv1/third_party/terraform/services/bigquery/data_source_google_bigquery_datasets_test.go b/mmv1/third_party/terraform/services/bigquery/data_source_google_bigquery_datasets_test.go deleted file mode 100644 index 74146ee7a310..000000000000 --- a/mmv1/third_party/terraform/services/bigquery/data_source_google_bigquery_datasets_test.go +++ /dev/null @@ -1,106 +0,0 @@ -package bigquery_test - -import ( - "fmt" - "regexp" - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" -) - -func TestAccDataSourceGoogleBigqueryDatasets_basic(t *testing.T) { - t.Parallel() - - randomSuffix := acctest.RandString(t, 10) - projectID := envvar.GetTestProjectFromEnv() - - expectedDatasetFoo := map[string]string{ - "dataset_id": fmt.Sprintf("tf_test_foo_%s", randomSuffix), - "friendly_name": "Foo", - "location": "US", - "labels.%": "1", - "labels.goog-terraform-provisioned": "true", - } - - expectedDatasetBar := map[string]string{ - "dataset_id": fmt.Sprintf("tf_test_bar_%s", randomSuffix), - "friendly_name": "bar", - "location": "EU", - "labels.%": "1", - "labels.goog-terraform-provisioned": "true", - } - - nonExpectedDataset := map[string]string{ - "dataset_id": "non_existent_dataset", - "friendly_name": "I do not exist, and should throw an error", - "location": "NON_EXIST", - "labels.%": "8", - "labels.goog-terraform-provisioned": "Nah", - } - - context := map[string]interface{}{ - "random_suffix": randomSuffix, - "project_id": projectID, - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccDataSourceGoogleBigqueryDatasets_basic(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckTypeSetElemNestedAttrs("data.google_bigquery_datasets.example", "datasets.*", expectedDatasetFoo), - resource.TestCheckTypeSetElemNestedAttrs("data.google_bigquery_datasets.example", "datasets.*", expectedDatasetBar), - // this check is intended to throw an error, see ExpectError below - resource.TestCheckTypeSetElemNestedAttrs("data.google_bigquery_datasets.example", "datasets.*", nonExpectedDataset), - ), - ExpectError: regexp.MustCompile(".*no TypeSet element \"datasets.*\", with nested attrs.*non_existent_dataset.*I do not exist, and should throw an error.*"), - }, - }, - }) -} - -func testAccDataSourceGoogleBigqueryDatasets_basic(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_bigquery_dataset" "foo" { - dataset_id = "tf_test_foo_%{random_suffix}" - friendly_name = "Foo" - description = "This is a test description" - location = "US" - default_table_expiration_ms = 3600000 - - access { - role = "OWNER" - user_by_email = google_service_account.bqowner.email - } -} - -resource "google_bigquery_dataset" "bar" { - dataset_id = "tf_test_bar_%{random_suffix}" - friendly_name = "bar" - description = "This is a test description" - location = "EU" - default_table_expiration_ms = 3600000 - - access { - role = "OWNER" - user_by_email = google_service_account.bqowner.email - } -} - -resource "google_service_account" "bqowner" { - account_id = "tf-test-%{random_suffix}" -} - -data "google_bigquery_datasets" "example" { - project = "%{project_id}" - depends_on = [ - google_bigquery_dataset.foo, - google_bigquery_dataset.bar, - ] -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/bigquery/data_source_google_bigquery_table.go b/mmv1/third_party/terraform/services/bigquery/data_source_google_bigquery_table.go deleted file mode 100644 index c08d51788af8..000000000000 --- a/mmv1/third_party/terraform/services/bigquery/data_source_google_bigquery_table.go +++ /dev/null @@ -1,51 +0,0 @@ -package bigquery - -import ( - "fmt" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -func DataSourceGoogleBigQueryTable() *schema.Resource { - dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceBigQueryTable().Schema) - - tpgresource.AddRequiredFieldsToSchema(dsSchema, "dataset_id") - tpgresource.AddRequiredFieldsToSchema(dsSchema, "table_id") - tpgresource.AddOptionalFieldsToSchema(dsSchema, "project") - - return &schema.Resource{ - Read: dataSourceBigQueryTableRead, - Schema: dsSchema, - } -} - -func dataSourceBigQueryTableRead(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - project, err := tpgresource.GetProject(d, config) - if err != nil { - return fmt.Errorf("Error fetching project: %s", err) - } - - datasetID := d.Get("dataset_id").(string) - tableID := d.Get("table_id").(string) - - id := fmt.Sprintf("projects/%s/datasets/%s/tables/%s", project, datasetID, tableID) - d.SetId(id) - - err = resourceBigQueryTableRead(d, meta) - if err != nil { - return fmt.Errorf("Error retrieving table: %s", err) - } - - if err := tpgresource.SetDataSourceLabels(d); err != nil { - return err - } - - if d.Id() == "" { - return fmt.Errorf("%s not found", id) - } - - return nil -} diff --git a/mmv1/third_party/terraform/services/bigquery/data_source_google_bigquery_table_test.go b/mmv1/third_party/terraform/services/bigquery/data_source_google_bigquery_table_test.go deleted file mode 100644 index 906978facf6b..000000000000 --- a/mmv1/third_party/terraform/services/bigquery/data_source_google_bigquery_table_test.go +++ /dev/null @@ -1,190 +0,0 @@ -package bigquery_test - -import ( - "encoding/json" - "fmt" - "regexp" - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" -) - -func TestAccDataSourceGoogleBigqueryTable_basic(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - expectedID := fmt.Sprintf("projects/%s/datasets/%s/tables/%s", envvar.GetTestProjectFromEnv(), fmt.Sprintf("tf_test_ds_%s", context["random_suffix"]), fmt.Sprintf("tf_test_table_%s", context["random_suffix"])) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccDataSourceGoogleBigqueryTable_basic(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("data.google_bigquery_table.example", "table_id", fmt.Sprintf("tf_test_table_%s", context["random_suffix"])), - resource.TestCheckResourceAttr("data.google_bigquery_table.example", "dataset_id", fmt.Sprintf("tf_test_ds_%s", context["random_suffix"])), - resource.TestCheckResourceAttrSet("data.google_bigquery_table.example", "schema"), - resource.TestCheckResourceAttr("data.google_bigquery_table.example", "id", expectedID), - resource.TestCheckResourceAttrWith("data.google_bigquery_table.example", "schema", func(schema string) error { - var parsedSchema []map[string]interface{} - - if err := json.Unmarshal([]byte(schema), &parsedSchema); err != nil { - return fmt.Errorf("failed to parse schema JSON: %w", err) - } - - if len(parsedSchema) > 0 { - if parsedSchema[0]["name"] != "name" { - return fmt.Errorf("expected fields[0].name to be 'name', got '%v'", parsedSchema[0]["name"]) - } - if parsedSchema[0]["type"] != "STRING" { - return fmt.Errorf("expected fields[0].type to be 'STRING', got '%v'", parsedSchema[0]["type"]) - } - if parsedSchema[0]["mode"] != "NULLABLE" { - return fmt.Errorf("expected fields[0].mode to be 'NULLABLE', got '%v'", parsedSchema[0]["mode"]) - } - } - - if len(parsedSchema) > 2 { - if parsedSchema[2]["name"] != "address" { - return fmt.Errorf("expected fields[2].name to be 'address', got '%v'", parsedSchema[2]["name"]) - } - if subFields, ok := parsedSchema[2]["fields"].([]interface{}); ok && len(subFields) > 1 { - subField := subFields[1].(map[string]interface{}) - if subField["name"] != "zip" { - return fmt.Errorf("expected fields[2].fields[1].name to be 'zip', got '%v'", subField["name"]) - } - } - } - - if len(parsedSchema) > 4 { - if parsedSchema[4]["name"] != "policy_tag_test" { - return fmt.Errorf("expected fields[4].name to be 'policy_tag_test', got '%v'", parsedSchema[4]["name"]) - } - if policyTags, ok := parsedSchema[4]["policyTags"].(map[string]interface{}); ok { - if names, ok := policyTags["names"].([]interface{}); ok && len(names) > 0 { - if !regexp.MustCompile("^projects/[^/]+/locations/us-central1/taxonomies/[^/]+/policyTags/[^/]+$").MatchString(names[0].(string)) { - return fmt.Errorf("policy tag does not match expected pattern") - } - } - } - } - - return nil - }), - ), - }, - }, - }) -} - -func testAccDataSourceGoogleBigqueryTable_basic(context map[string]interface{}) string { - return acctest.Nprintf(` - - resource "google_data_catalog_policy_tag" "test" { - taxonomy = google_data_catalog_taxonomy.test.id - display_name = "Low security" - description = "A policy tag normally associated with low security items" - } - - resource "google_data_catalog_taxonomy" "test" { - region = "us-central1" - display_name = "taxonomy_%{random_suffix}" - description = "A collection of policy tags" - activated_policy_types = ["FINE_GRAINED_ACCESS_CONTROL"] - } - - resource "google_bigquery_dataset" "test" { - dataset_id = "tf_test_ds_%{random_suffix}" - friendly_name = "testing" - description = "This is a test description" - location = "us-central1" - default_table_expiration_ms = 3600000 - } - - resource "google_bigquery_table" "test" { - dataset_id = google_bigquery_dataset.test.dataset_id - table_id = "tf_test_table_%{random_suffix}" - deletion_protection = false - depends_on = [google_data_catalog_policy_tag.test] - schema = < 0 { - autogeneratedFieldsJson, err := json.Marshal(autogeneratedFields) - if err != nil { - return fmt.Errorf("error marshalling autogenerated schema fields: %w", err) - } - if err := d.Set("generated_schema_columns", string(autogeneratedFieldsJson)); err != nil { - return fmt.Errorf("error setting generated_schema_columns: %w", err) - } - } else { - d.Set("generated_schema_columns", "") - } - } else { - // If not ignoring, ensure the field is cleared - d.Set("generated_schema_columns", "") - } - schema, err := flattenSchema(schemaFiltered) + schema, err := flattenSchema(res.Schema) if err != nil { return err } @@ -2158,28 +2052,9 @@ type TableReference struct { tableID string } -func addAutoGenSchemaFields(d *schema.ResourceData, table *bigquery.Table) error { - // When ignore_auto_generated_schema is true, we must include the autogenerated fields - // in the update payload to avoid the API thinking we're trying to delete them. - if ignore, enabled := d.Get("ignore_auto_generated_schema").(bool); enabled && ignore { - // Only proceed if the table has a schema to begin with. - if table.Schema != nil { - if autogenStr, ok := d.Get("generated_schema_columns").(string); ok && autogenStr != "" { - var autogenFields []*bigquery.TableFieldSchema - if err := json.Unmarshal([]byte(autogenStr), &autogenFields); err != nil { - return fmt.Errorf("failed to unmarshal autogenerated schema fields: %w", err) - } - table.Schema.Fields = append(table.Schema.Fields, autogenFields...) - log.Printf("[DEBUG] Appended %d autogenerated fields to schema for update", len(autogenFields)) - } - } - } - return nil -} - func resourceBigQueryTableUpdate(d *schema.ResourceData, meta interface{}) error { // If only client-side fields were modified, short-circuit the Update function to avoid sending an update API request. - clientSideFields := map[string]bool{"deletion_protection": true, "ignore_schema_changes": true, "ignore_auto_generated_schema": true, "table_metadata_view": true} + clientSideFields := map[string]bool{"deletion_protection": true, "table_metadata_view": true} clientSideOnly := true for field := range ResourceBigQueryTable().Schema { if d.HasChange(field) && !clientSideFields[field] { @@ -2202,10 +2077,6 @@ func resourceBigQueryTableUpdate(d *schema.ResourceData, meta interface{}) error return err } - if err := addAutoGenSchemaFields(d, table); err != nil { - return err - } - if table.ExternalDataConfiguration != nil && table.ExternalDataConfiguration.Schema != nil { log.Printf("[INFO] Removing ExternalDataConfiguration.Schema when updating BigQuery table %s", d.Id()) table.ExternalDataConfiguration.Schema = nil @@ -2231,11 +2102,8 @@ func resourceBigQueryTableUpdate(d *schema.ResourceData, meta interface{}) error tableID: tableID, } - // If we are supposed to ignore server generated schema columns, we don't need to drop them - if !d.Get("ignore_auto_generated_schema").(bool) { - if err = resourceBigQueryTableColumnDrop(config, userAgent, table, tableReference, tableMetadataView); err != nil { - return err - } + if err = resourceBigQueryTableColumnDrop(config, userAgent, table, tableReference, tableMetadataView); err != nil { + return err } if _, err = config.NewBigQueryClient(userAgent).Tables.Update(project, datasetID, tableID, table).Do(); err != nil { @@ -3052,15 +2920,12 @@ func flattenRangePartitioning(rp *bigquery.RangePartitioning) []map[string]inter return []map[string]interface{}{result} } -func expandView(d *schema.ResourceData) *bigquery.ViewDefinition { - v, _ := d.GetOk("view") - raw := v.([]interface{})[0].(map[string]interface{}) +func expandView(configured interface{}) *bigquery.ViewDefinition { + raw := configured.([]interface{})[0].(map[string]interface{}) vd := &bigquery.ViewDefinition{Query: raw["query"].(string)} - configValue := d.GetRawConfig().GetAttr("view").Index(cty.NumberIntVal(0)).AsValueMap() - useLegacySQLValue := configValue["use_legacy_sql"] - if !useLegacySQLValue.IsNull() { - vd.UseLegacySql = useLegacySQLValue.RawEquals(cty.True) + if v, ok := raw["use_legacy_sql"]; ok { + vd.UseLegacySql = v.(bool) vd.ForceSendFields = append(vd.ForceSendFields, "UseLegacySql") } @@ -3505,9 +3370,9 @@ func flattenSerDeInfo(si *bigquery.SerDeInfo) []map[string]interface{} { func resourceBigQueryTableImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+)/datasets/(?P[^/]+)/tables/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)$", + "projects/(?P[^/]+)/datasets/(?P[^/]+)/tables/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_meta.yaml.tmpl b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_meta.yaml.tmpl index 97c634f5784c..720a051337e3 100644 --- a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_meta.yaml.tmpl +++ b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_meta.yaml.tmpl @@ -82,7 +82,6 @@ fields: - field: 'external_data_configuration.parquet_options.enum_as_string' - field: 'external_data_configuration.reference_file_schema_uri' - field: 'external_data_configuration.schema' - json: true - field: 'external_data_configuration.source_format' - field: 'external_data_configuration.source_uris' - field: 'friendly_name' @@ -105,7 +104,6 @@ fields: - field: 'require_partition_filter' - field: 'resource_tags' - field: 'schema' - json: true {{- if ne $.TargetVersionName "ga" }} - field: 'schema_foreign_type_info.type_system' {{- end }} diff --git a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go index 08f8863cfff8..2160f6b3d066 100644 --- a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go +++ b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go @@ -30,7 +30,7 @@ func TestAccBigQueryTable_Basic(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { Config: testAccBigQueryTableUpdated(datasetID, tableID), @@ -39,52 +39,7 @@ func TestAccBigQueryTable_Basic(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, - }, - }, - }) -} - -func TestAccBigQueryTable_IgnoreSchemaDataPoliciesChanges(t *testing.T) { - t.Parallel() - - projectID := envvar.GetTestProjectFromEnv() - random_suffix := acctest.RandString(t, 10) - datasetID := fmt.Sprintf("tf_test_dataset_%s", random_suffix) - tableID := fmt.Sprintf("tf_test_table_%s", random_suffix) - dataPolicyID1 := fmt.Sprintf("tf_test_data_policy_%s", random_suffix) - dataPolicyName1 := fmt.Sprintf("projects/%s/locations/us-central1/dataPolicies/%s", projectID, dataPolicyID1) - dataPolicyID2 := fmt.Sprintf("tf_test_data_policy_%s", acctest.RandString(t, 10)) - dataPolicyName2 := fmt.Sprintf("projects/%s/locations/us-central1/dataPolicies/%s", projectID, dataPolicyID2) - dataCatTaxonomy := fmt.Sprintf("tf_test_taxonomy_%s", random_suffix) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccBigQueryTableDataPolicies(datasetID, tableID, dataPolicyID1, dataPolicyID2, dataCatTaxonomy, dataPolicyName1), - }, - { - ResourceName: "google_bigquery_table.test", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns", "ignore_schema_changes"}, - }, - { - Config: testAccBigQueryTableDataPolicies(datasetID, tableID, dataPolicyID1, dataPolicyID2, dataCatTaxonomy, dataPolicyName2), - PlanOnly: true, - ExpectNonEmptyPlan: false, - }, - { - Config: testAccBigQueryTableUpdated(datasetID, tableID), - }, - { - ResourceName: "google_bigquery_table.test", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns", "ignore_schema_changes"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -108,7 +63,7 @@ func TestAccBigQueryTable_TableMetadataView(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns", "last_modified_time", "table_metadata_view"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "last_modified_time", "table_metadata_view"}, }, { Config: testAccBigQueryTableUpdated(datasetID, tableID), @@ -117,7 +72,7 @@ func TestAccBigQueryTable_TableMetadataView(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns", "last_modified_time", "table_metadata_view"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "last_modified_time", "table_metadata_view"}, }, }, }) @@ -141,7 +96,7 @@ func TestAccBigQueryTable_OnlyDeletionProtectionUpdate(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { Config: testAccBigQueryTableBasicSchema(datasetID, tableID), @@ -150,7 +105,7 @@ func TestAccBigQueryTable_OnlyDeletionProtectionUpdate(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -174,7 +129,7 @@ func TestAccBigQueryTable_OnlyNestedFieldUpdate(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { Config: testAccBigQueryTableTimePartitioningWithExpirationMs(datasetID, tableID, 2000), @@ -183,7 +138,7 @@ func TestAccBigQueryTable_OnlyNestedFieldUpdate(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -207,7 +162,7 @@ func TestAccBigQueryTable_DropColumns(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { Config: testAccBigQueryTableTimePartitioningDropColumnsUpdate(datasetID, tableID), @@ -216,7 +171,7 @@ func TestAccBigQueryTable_DropColumns(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -242,7 +197,7 @@ func TestAccBigQueryTable_Kms(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -266,7 +221,7 @@ func TestAccBigQueryTable_HourlyTimePartitioning(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { Config: testAccBigQueryTableUpdated(datasetID, tableID), @@ -275,7 +230,7 @@ func TestAccBigQueryTable_HourlyTimePartitioning(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -299,7 +254,7 @@ func TestAccBigQueryTable_MonthlyTimePartitioning(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { Config: testAccBigQueryTableUpdated(datasetID, tableID), @@ -308,7 +263,7 @@ func TestAccBigQueryTable_MonthlyTimePartitioning(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -332,7 +287,7 @@ func TestAccBigQueryTable_YearlyTimePartitioning(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { Config: testAccBigQueryTableUpdated(datasetID, tableID), @@ -341,7 +296,7 @@ func TestAccBigQueryTable_YearlyTimePartitioning(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -366,7 +321,7 @@ func TestAccBigQueryTable_HivePartitioning(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -391,7 +346,7 @@ func TestAccBigQueryTable_HivePartitioningCustomSchema_update(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"external_data_configuration.0.schema", "labels", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"external_data_configuration.0.schema", "labels", "deletion_protection"}, }, { Config: testAccBigQueryTableHivePartitioningCustomSchema(bucketName, datasetID, tableID, "new-label"), @@ -400,7 +355,7 @@ func TestAccBigQueryTable_HivePartitioningCustomSchema_update(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"external_data_configuration.0.schema", "labels", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"external_data_configuration.0.schema", "labels", "deletion_protection"}, }, }, }) @@ -426,7 +381,7 @@ func TestAccBigQueryTable_AvroPartitioning(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -452,7 +407,7 @@ func TestAccBigQueryBigLakeManagedTable(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -520,7 +475,7 @@ func TestAccBigQueryExternalDataTable_json(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"external_data_configuration.0.schema", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"external_data_configuration.0.schema", "deletion_protection"}, }, { Config: testAccBigQueryTableJson(datasetID, tableID, bucketName, "UTF-16BE"), @@ -547,7 +502,7 @@ func TestAccBigQueryTable_RangePartitioning(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -571,7 +526,7 @@ func TestAccBigQueryTable_PrimaryKey(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -598,7 +553,7 @@ func TestAccBigQueryTable_ForeignKey(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -625,7 +580,7 @@ func TestAccBigQueryTable_updateTableConstraints(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { Config: testAccBigQueryTableTableConstraintsUpdate(projectID, datasetID, tableID_pk, tableID_fk), @@ -634,7 +589,7 @@ func TestAccBigQueryTable_updateTableConstraints(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -658,7 +613,7 @@ func TestAccBigQueryTable_View(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -682,7 +637,7 @@ func TestAccBigQueryTable_updateView(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { Config: testAccBigQueryTableWithNewSqlView(datasetID, tableID), @@ -691,7 +646,7 @@ func TestAccBigQueryTable_updateView(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -715,7 +670,7 @@ func TestAccBigQueryTable_WithViewAndSchema(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { Config: testAccBigQueryTableWithViewAndSchema(datasetID, tableID, "table description2"), @@ -724,7 +679,7 @@ func TestAccBigQueryTable_WithViewAndSchema(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -751,13 +706,13 @@ func TestAccBigQueryTable_MaterializedView_DailyTimePartioning_Basic(t *testing. ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection"}, }, { ResourceName: "google_bigquery_table.mv_test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection"}, }, { Config: testAccBigQueryTableWithMatViewDailyTimePartitioning_basic(datasetID, tableID, materialized_viewID, queryNew), @@ -766,13 +721,13 @@ func TestAccBigQueryTable_MaterializedView_DailyTimePartioning_Basic(t *testing. ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection"}, }, { ResourceName: "google_bigquery_table.mv_test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection"}, }, }, }) @@ -802,13 +757,13 @@ func TestAccBigQueryTable_MaterializedView_DailyTimePartioning_Update(t *testing ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection"}, }, { ResourceName: "google_bigquery_table.mv_test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection"}, }, { Config: testAccBigQueryTableWithMatViewDailyTimePartitioning(datasetID, tableID, materialized_viewID, enable_refresh, refresh_interval_ms, query), @@ -817,13 +772,13 @@ func TestAccBigQueryTable_MaterializedView_DailyTimePartioning_Update(t *testing ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection"}, }, { ResourceName: "google_bigquery_table.mv_test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection"}, }, }, }) @@ -850,13 +805,13 @@ func TestAccBigQueryTable_MaterializedView_NonIncremental_basic(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns", "require_partition_filter", "time_partitioning.0.require_partition_filter"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "require_partition_filter", "time_partitioning.0.require_partition_filter"}, }, { ResourceName: "google_bigquery_table.mv_test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns", "require_partition_filter", "time_partitioning.0.require_partition_filter"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "require_partition_filter", "time_partitioning.0.require_partition_filter"}, }, }, }) @@ -1208,45 +1163,6 @@ func TestAccBigQueryExternalDataTable_CSV_WithSchema_InvalidSchemas(t *testing.T }) } -func TestAccBigQueryExternalDataTable_CSV_WithSchemaAndConnectionIDAndHivePartitioning(t *testing.T) { - t.Parallel() - - bucketName := acctest.TestBucketName(t) - objectName := fmt.Sprintf("country_partitioned=US/tf_test_%s.csv", acctest.RandString(t, 10)) - - datasetID := fmt.Sprintf("tf_test_%s", acctest.RandString(t, 10)) - tableID := fmt.Sprintf("tf_test_%s", acctest.RandString(t, 10)) - connectionID := fmt.Sprintf("tf_test_%s", acctest.RandString(t, 10)) - - projectID := envvar.GetTestProjectFromEnv() - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccBigQueryTableFromGCSWithSchemaWithConnectionIdAndHivePartitioning(datasetID, tableID, connectionID, projectID, bucketName, objectName, TEST_SIMPLE_CSV, TEST_SIMPLE_CSV_SCHEMA), - }, - { - ResourceName: "google_bigquery_table.test", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns", "schema"}, - }, - { - Config: testAccBigQueryTableFromGCSWithSchema(datasetID, tableID, bucketName, objectName, TEST_SIMPLE_CSV, TEST_SIMPLE_CSV_SCHEMA), - }, - { - ResourceName: "google_bigquery_table.test", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns", "schema"}, - }, - }, - }) -} - func TestAccBigQueryExternalDataTable_CSV_WithSchemaAndConnectionID_UpdateNoConnectionID(t *testing.T) { t.Parallel() @@ -1271,7 +1187,7 @@ func TestAccBigQueryExternalDataTable_CSV_WithSchemaAndConnectionID_UpdateNoConn ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection"}, }, { Config: testAccBigQueryTableFromGCSWithSchema(datasetID, tableID, bucketName, objectName, TEST_SIMPLE_CSV, TEST_SIMPLE_CSV_SCHEMA), @@ -1280,7 +1196,7 @@ func TestAccBigQueryExternalDataTable_CSV_WithSchemaAndConnectionID_UpdateNoConn ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection"}, }, }, }) @@ -1310,7 +1226,7 @@ func TestAccBigQueryExternalDataTable_CSV_WithSchema_UpdateToConnectionID(t *tes ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection"}, }, { Config: testAccBigQueryTableFromGCSWithSchemaWithConnectionId(datasetID, tableID, connectionID, projectID, bucketName, objectName, TEST_SIMPLE_CSV, TEST_SIMPLE_CSV_SCHEMA), @@ -1319,7 +1235,7 @@ func TestAccBigQueryExternalDataTable_CSV_WithSchema_UpdateToConnectionID(t *tes ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection"}, }, { Config: testAccBigQueryTableFromGCSWithSchemaWithConnectionId2(datasetID, tableID, connectionID, projectID, bucketName, objectName, TEST_SIMPLE_CSV, TEST_SIMPLE_CSV_SCHEMA), @@ -1328,7 +1244,7 @@ func TestAccBigQueryExternalDataTable_CSV_WithSchema_UpdateToConnectionID(t *tes ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection"}, }, }, }) @@ -1355,7 +1271,7 @@ func TestAccBigQueryExternalDataTable_CSV_WithSchema_UpdateAllowQuotedNewlines(t ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection"}, }, { Config: testAccBigQueryTableFromGCSWithSchema_UpdatAllowQuotedNewlines(datasetID, tableID, bucketName, objectName, TEST_SIMPLE_CSV, TEST_SIMPLE_CSV_SCHEMA), @@ -1364,7 +1280,7 @@ func TestAccBigQueryExternalDataTable_CSV_WithSchema_UpdateAllowQuotedNewlines(t ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection"}, }, }, }) @@ -1392,7 +1308,7 @@ func TestAccBigQueryDataTable_bigtable(t *testing.T) { ResourceName: "google_bigquery_table.table", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -1420,7 +1336,7 @@ func TestAccBigQueryDataTable_bigtable_options(t *testing.T) { ResourceName: "google_bigquery_table.table", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { Config: testAccBigQueryTableFromBigtable(context), @@ -1448,7 +1364,7 @@ func TestAccBigQueryDataTable_sheet(t *testing.T) { ResourceName: "google_bigquery_table.table", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -1472,7 +1388,7 @@ func TestAccBigQueryDataTable_jsonEquivalency(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "labels", "terraform_labels", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "labels", "terraform_labels"}, }, { Config: testAccBigQueryTable_jsonEqModeRemoved(datasetID, tableID), @@ -1481,7 +1397,7 @@ func TestAccBigQueryDataTable_jsonEquivalency(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "labels", "terraform_labels", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "labels", "terraform_labels"}, }, }, }) @@ -1531,7 +1447,7 @@ func TestAccBigQueryDataTable_expandArray(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "labels", "terraform_labels", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "labels", "terraform_labels"}, }, { Config: testAccBigQueryTable_arrayExpanded(datasetID, tableID), @@ -1540,7 +1456,7 @@ func TestAccBigQueryDataTable_expandArray(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "labels", "terraform_labels", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"etag", "last_modified_time", "deletion_protection", "labels", "terraform_labels"}, }, }, }) @@ -1564,7 +1480,7 @@ func TestAccBigQueryTable_allowDestroy(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "labels", "terraform_labels", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "labels", "terraform_labels"}, }, { Config: testAccBigQueryTable_noAllowDestroy(datasetID, tableID), @@ -1596,7 +1512,7 @@ func TestAccBigQueryTable_emptySchema(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { Config: testAccBigQueryTable_emptySchema(datasetID, tableID), @@ -1605,7 +1521,7 @@ func TestAccBigQueryTable_emptySchema(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -1630,7 +1546,7 @@ func TestAccBigQueryTable_Update_SchemaWithoutPolicyTagsToWithPolicyTags(t *test ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { Config: testAccBigQueryTableBasicSchemaWithPolicyTags(datasetID, tableID, projectID), @@ -1639,7 +1555,7 @@ func TestAccBigQueryTable_Update_SchemaWithoutPolicyTagsToWithPolicyTags(t *test ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -1664,7 +1580,7 @@ func TestAccBigQueryTable_Update_SchemaWithPolicyTagsToNoPolicyTag(t *testing.T) ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { Config: testAccBigQueryTableBasicSchema(datasetID, tableID), @@ -1673,7 +1589,7 @@ func TestAccBigQueryTable_Update_SchemaWithPolicyTagsToNoPolicyTag(t *testing.T) ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -1698,7 +1614,7 @@ func TestAccBigQueryTable_Update_SchemaWithPolicyTagsToEmptyPolicyTag(t *testing ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { Config: testAccBigQueryTableBasicSchemaWithEmptyPolicyTags(datasetID, tableID), @@ -1707,7 +1623,7 @@ func TestAccBigQueryTable_Update_SchemaWithPolicyTagsToEmptyPolicyTag(t *testing ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -1732,7 +1648,7 @@ func TestAccBigQueryTable_Update_SchemaWithPolicyTagsToEmptyPolicyTagNames(t *te ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { Config: testAccBigQueryTableBasicSchemaWithEmptyPolicyTagNames(datasetID, tableID), @@ -1741,7 +1657,7 @@ func TestAccBigQueryTable_Update_SchemaWithPolicyTagsToEmptyPolicyTagNames(t *te ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -1840,7 +1756,7 @@ func TestAccBigQueryTable_TableReplicationInfo_WithoutReplicationInterval(t *tes ResourceName: "google_bigquery_table.replica_mv", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -1875,7 +1791,7 @@ func TestAccBigQueryTable_TableReplicationInfo_WithReplicationInterval(t *testin ResourceName: "google_bigquery_table.replica_mv", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -1906,7 +1822,7 @@ func TestAccBigQueryTable_ResourceTags(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { Config: testAccBigQueryTableWithResourceTagsUpdate(context), @@ -1915,7 +1831,7 @@ func TestAccBigQueryTable_ResourceTags(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, // testAccBigQueryTableWithResourceTagsDestroy must be called at the end of this test to clear the resource tag bindings of the table before deletion. { @@ -1925,7 +1841,7 @@ func TestAccBigQueryTable_ResourceTags(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -1953,7 +1869,7 @@ func TestAccBigQueryTable_externalCatalogTableOptions(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { Config: testAccBigQueryTable_externalCatalogTableOptions_update(context), @@ -1962,7 +1878,7 @@ func TestAccBigQueryTable_externalCatalogTableOptions(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -1989,7 +1905,7 @@ func TestAccBigQueryTable_foreignTypeInfo(t *testing.T) { ResourceName: "google_bigquery_table.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -2065,102 +1981,6 @@ EOH `, datasetID, tableID) } -func testAccBigQueryTableDataPolicies(datasetID, tableID, dataPolicyID1, dataPolicyID2, dataCatTaxonomy, dataPolicyName string) string { - return fmt.Sprintf(` -resource "google_bigquery_dataset" "test" { - location = "us-central1" - dataset_id = "%s" -} - -resource "google_bigquery_datapolicy_data_policy" "data_policy1" { - location = "us-central1" - data_policy_id = "%s" - policy_tag = google_data_catalog_policy_tag.policy_tag.name - data_policy_type = "DATA_MASKING_POLICY" - data_masking_policy { - predefined_expression = "SHA256" - } -} - -resource "google_bigquery_datapolicy_data_policy" "data_policy2" { - location = "us-central1" - data_policy_id = "%s" - policy_tag = google_data_catalog_policy_tag.policy_tag.name - data_policy_type = "DATA_MASKING_POLICY" - data_masking_policy { - predefined_expression = "FIRST_FOUR_CHARACTERS" - } -} - -resource "google_data_catalog_policy_tag" "policy_tag" { - taxonomy = google_data_catalog_taxonomy.taxonomy.id - display_name = "Low security" - description = "A policy tag normally associated with low security items" -} - -resource "google_data_catalog_taxonomy" "taxonomy" { - region = "us-central1" - display_name = "%s" - description = "A collection of policy tags" - activated_policy_types = ["FINE_GRAINED_ACCESS_CONTROL"] -} - -resource "google_bigquery_table" "test" { - depends_on = [google_bigquery_datapolicy_data_policy.data_policy1, google_bigquery_datapolicy_data_policy.data_policy2] - deletion_protection = false - table_id = "%s" - dataset_id = google_bigquery_dataset.test.dataset_id - - ignore_schema_changes = [ - "dataPolicies" - ] - - schema = <[^/]+)/instances/(?P[^/]+)/tables/(?P
[^/]+)"}, d, config, d.Id()) + m, err := tpgresource.GetImportIdQualifiers([]string{"projects/(?P[^/]+)/instances/(?P[^/]+)/tables/(?P
[^/]+)"}, d, config, d.Id()) if err != nil { return err } @@ -77,7 +77,7 @@ func BigtableTableIdParseFunc(d *schema.ResourceData, config *transport_tpg.Conf return fmt.Errorf("Error setting project: %s", err) } - if err := d.Set("instance_name", values["instance_name"]); err != nil { + if err := d.Set("instance", values["instance"]); err != nil { return fmt.Errorf("Error setting instance: %s", err) } @@ -86,7 +86,7 @@ func BigtableTableIdParseFunc(d *schema.ResourceData, config *transport_tpg.Conf } // Explicitly set the id so imported resources have the same ID format as non-imported ones. - d.SetId(fmt.Sprintf("projects/%s/instances/%s/tables/%s", project, values["instance_name"], values["table"])) + d.SetId(fmt.Sprintf("projects/%s/instances/%s/tables/%s", project, values["instance"], values["table"])) return nil } @@ -133,13 +133,13 @@ func (u *BigtableTableIamUpdater) SetResourceIamPolicy(policy *cloudresourcemana } func (u *BigtableTableIamUpdater) GetResourceId() string { - return fmt.Sprintf("projects/%s/instances/%s/tables/%s", u.project, u.instanceName, u.table) + return fmt.Sprintf("projects/%s/instances/%s/tables/%s", u.project, u.instance, u.table) } func (u *BigtableTableIamUpdater) GetMutexKey() string { - return fmt.Sprintf("iam-bigtable-instance-%s-%s-%s", u.project, u.instanceName, u.table) + return fmt.Sprintf("iam-bigtable-instance-%s-%s-%s", u.project, u.instance, u.table) } func (u *BigtableTableIamUpdater) DescribeResource() string { - return fmt.Sprintf("Bigtable Table %s/%s-%s", u.project, u.instanceName, u.table) + return fmt.Sprintf("Bigtable Table %s/%s-%s", u.project, u.instance, u.table) } diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_authorized_view.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_authorized_view.go index b34ff48e2388..4a26b6e82a8b 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_authorized_view.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_authorized_view.go @@ -355,9 +355,9 @@ func resourceBigtableAuthorizedViewDestroy(d *schema.ResourceData, meta interfac func resourceBigtableAuthorizedViewImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+)/instances/(?P[^/]+)/tables/(?P[^/]+)/authorizedViews/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "projects/(?P[^/]+)/instances/(?P[^/]+)/tables/(?P[^/]+)/authorizedViews/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance.go index c51d6b8c68ff..c3afbf79c873 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance.go @@ -152,14 +152,6 @@ func ResourceBigtableInstance() *schema.Resource { Computed: true, Description: `The state of the cluster`, }, - "node_scaling_factor": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - Default: "NodeScalingFactor1X", - ValidateFunc: validation.StringInSlice([]string{"NodeScalingFactor1X", "NodeScalingFactor2X"}, false), - Description: `The node scaling factor of this cluster. One of "NodeScalingFactor1X" or "NodeScalingFactor2X". Defaults to "NodeScalingFactor1X".`, - }, }, }, }, @@ -529,24 +521,13 @@ func flattenBigtableCluster(c *bigtable.ClusterInfo) map[string]interface{} { storageType = "HDD" } - var nodeScalingFactor string - switch c.NodeScalingFactor { - case bigtable.NodeScalingFactor1X: - nodeScalingFactor = "NodeScalingFactor1X" - case bigtable.NodeScalingFactor2X: - nodeScalingFactor = "NodeScalingFactor2X" - default: - nodeScalingFactor = "NodeScalingFactor1X" - } - cluster := map[string]interface{}{ - "zone": c.Zone, - "num_nodes": c.ServeNodes, - "cluster_id": c.Name, - "storage_type": storageType, - "kms_key_name": c.KMSKeyName, - "state": c.State, - "node_scaling_factor": nodeScalingFactor, + "zone": c.Zone, + "num_nodes": c.ServeNodes, + "cluster_id": c.Name, + "storage_type": storageType, + "kms_key_name": c.KMSKeyName, + "state": c.State, } if c.AutoscalingConfig != nil { cluster["autoscaling_config"] = make([]map[string]interface{}, 1) @@ -629,21 +610,12 @@ func expandBigtableClusters(clusters []interface{}, instanceID string, config *t storageType = bigtable.HDD } - var nodeScalingFactor bigtable.NodeScalingFactor - switch cluster["node_scaling_factor"].(string) { - case "NodeScalingFactor1X": - nodeScalingFactor = bigtable.NodeScalingFactor1X - case "NodeScalingFactor2X": - nodeScalingFactor = bigtable.NodeScalingFactor2X - } - cluster_config := bigtable.ClusterConfig{ - InstanceID: instanceID, - Zone: zone, - ClusterID: cluster["cluster_id"].(string), - StorageType: storageType, - KMSKeyName: cluster["kms_key_name"].(string), - NodeScalingFactor: nodeScalingFactor, + InstanceID: instanceID, + Zone: zone, + ClusterID: cluster["cluster_id"].(string), + StorageType: storageType, + KMSKeyName: cluster["kms_key_name"].(string), } autoscaling_configs := cluster["autoscaling_config"].([]interface{}) if len(autoscaling_configs) > 0 { @@ -782,7 +754,7 @@ func resourceBigtableInstanceClusterReorderTypeListFunc(diff tpgresource.Terrafo return err } - // Clusters can't have their zone, storage_type, kms_key_name, or node_scaling_factor updated, + // Clusters can't have their zone, storage_type or kms_key_name updated, // ForceNew if it's changed. This will show a diff with the old state on // the left side and the unmodified new state on the right and the ForceNew // attributed to the _old state index_ even if the diff appears to have moved. @@ -829,9 +801,9 @@ func resourceBigtableInstanceClusterReorderTypeListFunc(diff tpgresource.Terrafo func resourceBigtableInstanceImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+)/instances/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)$", + "projects/(?P[^/]+)/instances/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance_internal_test.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance_internal_test.go index 8562a35c72d6..7490932c314b 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance_internal_test.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance_internal_test.go @@ -167,28 +167,24 @@ func TestUnitBigtable_flattenBigtableCluster(t *testing.T) { "storage_target": 60, }, }, - // unspecified node scaling factor in input will default to 1X - "node_scaling_factor": "NodeScalingFactor1X", }, }, "HDD manual scaling": { clusterInfo: &bigtable.ClusterInfo{ - StorageType: bigtable.HDD, - Zone: "zone2", - ServeNodes: 7, - Name: "hdd-cluster", - KMSKeyName: "KMS", - State: "READY", - NodeScalingFactor: bigtable.NodeScalingFactor2X, + StorageType: bigtable.HDD, + Zone: "zone2", + ServeNodes: 7, + Name: "hdd-cluster", + KMSKeyName: "KMS", + State: "READY", }, want: map[string]interface{}{ - "zone": "zone2", - "num_nodes": 7, - "cluster_id": "hdd-cluster", - "storage_type": "HDD", - "kms_key_name": "KMS", - "state": "READY", - "node_scaling_factor": "NodeScalingFactor2X", + "zone": "zone2", + "num_nodes": 7, + "cluster_id": "hdd-cluster", + "storage_type": "HDD", + "kms_key_name": "KMS", + "state": "READY", }, }, } diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance_test.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance_test.go index 9ff69fb1f1ec..52aef858126e 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance_test.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance_test.go @@ -196,7 +196,7 @@ func TestAccBigtableInstance_kms(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"deletion_protection", "instance_type"}, // we don't read instance type back }, - // TODO: Verify that the instance can be recreated due to `kms_key_name` change. + // TODO(kevinsi4508): Verify that the instance can be recreated due to `kms_key_name` change. { Config: testAccBigtableInstance_kms(pid, instanceName, kms2.CryptoKey.Name, 3), PlanOnly: true, @@ -546,101 +546,6 @@ func TestAccBigtableInstance_forceDestroyBackups(t *testing.T) { }) } -func TestAccBigtableInstance_createWithNodeScalingFactorDefault(t *testing.T) { - // bigtable instance does not use the shared HTTP client, this test creates an instance - acctest.SkipIfVcr(t) - t.Parallel() - - instanceName := fmt.Sprintf("tf-test-nsf-%s", acctest.RandString(t, 10)) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckBigtableInstanceDestroyProducer(t), - Steps: []resource.TestStep{ - { - // Create config with nothing specified for node scaling factor. - // Ensure that we get 1X back. - Config: testAccBigtableInstance_nodeScalingFactor_allowDestroy(instanceName, 2, ""), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("google_bigtable_instance.instance", "cluster.0.num_nodes", "2"), - resource.TestCheckResourceAttr("google_bigtable_instance.instance", "cluster.0.node_scaling_factor", "NodeScalingFactor1X"), - ), - }, - { - ResourceName: "google_bigtable_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "instance_type"}, // we don't read instance type back - }, - }, - }) -} - -func TestAccBigtableInstance_createWithNodeScalingFactorThenUpdateViaForceNew(t *testing.T) { - // bigtable instance does not use the shared HTTP client, this test creates an instance - acctest.SkipIfVcr(t) - t.Parallel() - - instanceName := fmt.Sprintf("tf-test-nsf-%s", acctest.RandString(t, 10)) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckBigtableInstanceDestroyProducer(t), - Steps: []resource.TestStep{ - { - // Create config with node scaling factor as 2x. - Config: testAccBigtableInstance_nodeScalingFactor_allowDestroy(instanceName, 2, "NodeScalingFactor2X"), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("google_bigtable_instance.instance", "cluster.0.num_nodes", "2"), - resource.TestCheckResourceAttr("google_bigtable_instance.instance", "cluster.0.node_scaling_factor", "NodeScalingFactor2X"), - ), - }, - { - ResourceName: "google_bigtable_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "instance_type"}, // we don't read instance type back - }, - { - // Updating the node scaling factor only possible without delete protection, as we need ForceNew - Config: testAccBigtableInstance_nodeScalingFactor_allowDestroy(instanceName, 2, "NodeScalingFactor1X"), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("google_bigtable_instance.instance", "cluster.0.num_nodes", "2"), - resource.TestCheckResourceAttr("google_bigtable_instance.instance", "cluster.0.node_scaling_factor", "NodeScalingFactor1X"), - ), - }, - { - ResourceName: "google_bigtable_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "instance_type"}, // we don't read instance type back - }, - }, - }) -} - -func testAccBigtableInstance_nodeScalingFactor_allowDestroy(instanceName string, numNodes int, nodeScalingFactor string) string { - nodeScalingFactorAttribute := "" - if nodeScalingFactor != "" { - nodeScalingFactorAttribute = fmt.Sprintf("node_scaling_factor = \"%s\"", nodeScalingFactor) - } - return fmt.Sprintf(` -resource "google_bigtable_instance" "instance" { - name = "%s" - cluster { - cluster_id = "%s" - zone = "us-central1-b" - num_nodes = %d - storage_type = "SSD" - %s - } - deletion_protection = false -} -`, instanceName, instanceName, numNodes, nodeScalingFactorAttribute) -} - func testAccBigtableInstance_multipleClustersSameID(instanceName string) string { return fmt.Sprintf(` resource "google_bigtable_instance" "instance" { diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_logical_view_test.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_logical_view_test.go index a6e3c39c6301..2328899a7dee 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_logical_view_test.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_logical_view_test.go @@ -19,14 +19,14 @@ func TestAccBigtableLogicalView_update(t *testing.T) { instanceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) tableName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - lvName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + mvName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { - Config: testAccBigtableLogicalView_update(instanceName, tableName, lvName, "col1", true), + Config: testAccBigtableLogicalView_update(instanceName, tableName, mvName, "col1"), }, { ResourceName: "google_bigtable_logical_view.logical_view", @@ -34,7 +34,7 @@ func TestAccBigtableLogicalView_update(t *testing.T) { ImportStateVerify: true, }, { - Config: testAccBigtableLogicalView_update(instanceName, tableName, lvName, "col2", false), + Config: testAccBigtableLogicalView_update(instanceName, tableName, mvName, "col2"), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ @@ -51,7 +51,7 @@ func TestAccBigtableLogicalView_update(t *testing.T) { }) } -func testAccBigtableLogicalView_update(instanceName, tableName, lvName, colName string, dp bool) string { +func testAccBigtableLogicalView_update(instanceName, tableName, mvName, colName string) string { return fmt.Sprintf(` resource "google_bigtable_instance" "instance" { name = "%s" @@ -75,7 +75,6 @@ resource "google_bigtable_table" "table" { resource "google_bigtable_logical_view" "logical_view" { logical_view_id = "%s" instance = google_bigtable_instance.instance.name - deletion_protection = %v query = <[^/]+)/instances/(?P[^/]+)/tables/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)$", + "projects/(?P[^/]+)/instances/(?P[^/]+)/tables/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)", }, d, config); err != nil { return nil, err } @@ -737,15 +687,3 @@ func getType(input interface{}) (bigtable.Type, error) { } return output, nil } - -func getRowKeySchema(input interface{}) (*bigtable.StructType, error) { - rks, err := getType(input) - if err != nil { - return nil, err - } - structRks, ok := rks.(bigtable.StructType) - if !ok { - return nil, fmt.Errorf("only struct type is accepted as row key schema") - } - return &structRks, nil -} diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_iam_test.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_iam_test.go index dd31e17eb757..e729f0a49a81 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_iam_test.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_iam_test.go @@ -140,10 +140,10 @@ resource "google_service_account" "test-account2" { } resource "google_bigtable_table_iam_binding" "binding" { - instance_name = google_bigtable_instance.instance.name - table = google_bigtable_table.table.name - role = "%s" - members = [ + instance = google_bigtable_instance.instance.name + table = google_bigtable_table.table.name + role = "%s" + members = [ "serviceAccount:${google_service_account.test-account1.email}", ] } @@ -163,10 +163,10 @@ resource "google_service_account" "test-account2" { } resource "google_bigtable_table_iam_binding" "binding" { - instance_name = google_bigtable_instance.instance.name - table = google_bigtable_table.table.name - role = "%s" - members = [ + instance = google_bigtable_instance.instance.name + table = google_bigtable_table.table.name + role = "%s" + members = [ "serviceAccount:${google_service_account.test-account1.email}", "serviceAccount:${google_service_account.test-account2.email}", ] @@ -182,10 +182,10 @@ resource "google_service_account" "test-account" { } resource "google_bigtable_table_iam_member" "member" { - instance_name = google_bigtable_instance.instance.name - table = google_bigtable_table.table.name - role = "%s" - member = "serviceAccount:${google_service_account.test-account.email}" + instance = google_bigtable_instance.instance.name + table = google_bigtable_table.table.name + role = "%s" + member = "serviceAccount:${google_service_account.test-account.email}" } `, instance, cluster, cluster, account, role) } @@ -205,14 +205,14 @@ data "google_iam_policy" "policy" { } resource "google_bigtable_table_iam_policy" "policy" { - instance_name = google_bigtable_instance.instance.name - table = google_bigtable_table.table.name - policy_data = data.google_iam_policy.policy.policy_data + instance = google_bigtable_instance.instance.name + table = google_bigtable_table.table.name + policy_data = data.google_iam_policy.policy.policy_data } data "google_bigtable_table_iam_policy" "policy" { - instance_name = google_bigtable_instance.instance.name - table = google_bigtable_table.table.name + instance = google_bigtable_instance.instance.name + table = google_bigtable_table.table.name } `, instance, cluster, cluster, account, role) diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_meta.yaml b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_meta.yaml index fb0dfb987c44..18341ecb1a02 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_meta.yaml +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_meta.yaml @@ -13,6 +13,4 @@ fields: - field: 'instance_name' - field: 'name' - field: 'project' - - field: 'row_key_schema' - json: true - field: 'split_keys' diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_test.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_test.go index 2cdb5930380e..477dd587894a 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_test.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_test.go @@ -147,97 +147,6 @@ func TestAccBigtableTable_familyType(t *testing.T) { }) } -func TestAccBigtableTable_testTableWithRowKeySchema(t *testing.T) { - // bigtable instance does not use the shared HTTP client, this test creates an instance - acctest.SkipIfVcr(t) - t.Parallel() - - instanceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - tableName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - family := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckBigtableTableDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccBigtableTable_rowKeySchema(instanceName, tableName, family, `{ - "structType": { - "fields": [{ - "fieldName": "myfield", - "type": { - "stringType": { "encoding": { "utf8Bytes": { } } } - } - }], - "encoding": { "orderedCodeBytes": { } } - } - }`), - Check: resource.ComposeTestCheckFunc( - testAccBigtableRowKeySchemaExists(t, "google_bigtable_table.table", true), - ), - }, - { - ResourceName: "google_bigtable_table.table", - ImportState: true, - ImportStateVerify: true, - }, - { - // In-place modification is not accepted - Config: testAccBigtableTable_rowKeySchema(instanceName, tableName, family, `{ - "structType": { - "fields": [{ - "fieldName": "newfieldname", - "type": { - "stringType": { "encoding": { "utf8Bytes": { } } } - } - }], - "encoding": { "orderedCodeBytes": { } } - } - }`), - ExpectError: regexp.MustCompile(".*Row key schema in-place modification is not allowed.*"), - }, - { - // Removing the schema is ok - Config: testAccBigtableTable_family(instanceName, tableName, family), - Check: resource.ComposeTestCheckFunc( - testAccBigtableRowKeySchemaExists(t, "google_bigtable_table.table", false), - ), - }, - { - ResourceName: "google_bigtable_table.table", - ImportState: true, - ImportStateVerify: true, - }, - // Set the schema to a new one is ok - { - Config: testAccBigtableTable_rowKeySchema(instanceName, tableName, family, `{ - "structType": { - "fields": [ - { - "fieldName": "mystringfield", - "type": { - "stringType": { "encoding": { "utf8Bytes": { } } } - } - }, - { - "fieldName": "myintfield", - "type": { - "int64Type": { "encoding": { "bigEndianBytes": { } } } - } - } - ], - "encoding": { "delimitedBytes": { "delimiter": "Iw==" } } - } - }`), - Check: resource.ComposeTestCheckFunc( - testAccBigtableRowKeySchemaExists(t, "google_bigtable_table.table", true), - ), - }, - }, - }) -} - func TestAccBigtableTable_deletion_protection_protected(t *testing.T) { // bigtable instance does not use the shared HTTP client, this test creates an instance acctest.SkipIfVcr(t) @@ -623,45 +532,6 @@ func TestAccBigtableTable_automated_backups(t *testing.T) { }) } -func TestAccBigtableTable_automated_backups_explicitly_disabled_on_create(t *testing.T) { - // bigtable instance does not use the shared HTTP client, this test creates an instance - acctest.SkipIfVcr(t) - t.Parallel() - - instanceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - tableName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - family := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckBigtableTableDestroyProducer(t), - Steps: []resource.TestStep{ - // Creating a table with automated backup explicitly disabled - { - Config: testAccBigtableTable_automated_backups(instanceName, tableName, "0", "0", family), - Check: resource.ComposeTestCheckFunc(verifyBigtableAutomatedBackupsEnablementState(t, false)), - }, - { - ResourceName: "google_bigtable_table.table", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"automated_backup_policy"}, // ImportStateVerify doesn't use CustomizeDiff function - }, - // it is possible to delete the table when automated backup is disabled - { - Config: testAccBigtableTable_destroyTable(instanceName), - }, - { - ResourceName: "google_bigtable_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "instance_type"}, - }, - }, - }) -} - func TestAccBigtableTable_familyMany(t *testing.T) { // bigtable instance does not use the shared HTTP client, this test creates an instance acctest.SkipIfVcr(t) @@ -783,35 +653,6 @@ func testAccBigtableColumnFamilyExists(t *testing.T, table_name_space, family st } } -func testAccBigtableRowKeySchemaExists(t *testing.T, table_name_space string, expected_has_schema bool) resource.TestCheckFunc { - ctx := context.Background() - return func(s *terraform.State) error { - rs, ok := s.RootModule().Resources[table_name_space] - if !ok { - return fmt.Errorf("Table not found during schema check: %v", table_name_space) - } - - config := acctest.GoogleProviderConfig(t) - c, err := config.BigTableClientFactory(config.UserAgent).NewAdminClient(config.Project, rs.Primary.Attributes["instance_name"]) - if err != nil { - return fmt.Errorf("Error starting admin client %s", err) - } - defer c.Close() - - table, err := c.TableInfo(ctx, rs.Primary.Attributes["name"]) - if err != nil { - return fmt.Errorf("Error retrieving table. Could not find %s in %s", rs.Primary.Attributes["name"], rs.Primary.Attributes["instance_name"]) - } - - actual_has_schema := (table.RowKeySchema != nil) - if actual_has_schema != expected_has_schema { - return fmt.Errorf("expecting table to have row key schema to be %v, got %v", expected_has_schema, actual_has_schema) - } - - return nil - } -} - func testAccBigtableChangeStreamDisabled(t *testing.T) resource.TestCheckFunc { var ctx = context.Background() return func(s *terraform.State) error { @@ -966,35 +807,6 @@ EOF `, instanceName, instanceName, tableName, family, familyType) } -func testAccBigtableTable_rowKeySchema(instanceName, tableName, family, rowKeySchema string) string { - return fmt.Sprintf(` -resource "google_bigtable_instance" "instance" { - name = "%s" - - cluster { - cluster_id = "%s" - zone = "us-central1-b" - } - - instance_type = "DEVELOPMENT" - deletion_protection = false -} - -resource "google_bigtable_table" "table" { - name = "%s" - instance_name = google_bigtable_instance.instance.name - - column_family { - family = "%s" - } - - row_key_schema = <[^/]+)/locations/(?P[^/]+)/functions/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)$", + "projects/(?P[^/]+)/locations/(?P[^/]+)/functions/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)", }, d, config); err != nil { return nil, err } @@ -492,42 +492,11 @@ func ResourceCloudFunctionsFunction() *schema.Resource { }, }, }, - - "automatic_update_policy": { - Type: schema.TypeList, - Optional: true, - Computed: true, - ConflictsWith: []string{"on_deploy_update_policy"}, - MaxItems: 1, - Description: `Security patches are applied automatically to the runtime without requiring the function to be redeployed.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{}, - }, - }, - - "on_deploy_update_policy": { - Type: schema.TypeList, - Optional: true, - ConflictsWith: []string{"automatic_update_policy"}, - MaxItems: 1, - Description: `Security patches are only applied when a function is redeployed.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "runtime_version": { - Type: schema.TypeString, - Computed: true, - Description: `The runtime version which was used during latest function deployment.`, - }, - }, - }, - }, - "status": { Type: schema.TypeString, Computed: true, Description: `Describes the current stage of a deployment.`, }, - "version_id": { Type: schema.TypeString, Computed: true, @@ -621,14 +590,6 @@ func resourceCloudFunctionsCreate(d *schema.ResourceData, meta interface{}) erro "You must specify a trigger when deploying a new function.") } - if v, ok := d.GetOk("automatic_update_policy"); ok { - function.AutomaticUpdatePolicy = expandAutomaticUpdatePolicy(v.([]interface{})) - function.OnDeployUpdatePolicy = nil - } else if v, ok := d.GetOk("on_deploy_update_policy"); ok { - function.OnDeployUpdatePolicy = expandOnDeployUpdatePolicy(v.([]interface{})) - function.AutomaticUpdatePolicy = nil - } - if v, ok := d.GetOk("ingress_settings"); ok { function.IngressSettings = v.(string) } @@ -847,25 +808,6 @@ func resourceCloudFunctionsRead(d *schema.ResourceData, meta interface{}) error if err := d.Set("version_id", strconv.FormatInt(function.VersionId, 10)); err != nil { return fmt.Errorf("Error setting version_id: %s", err) } - // check the on_deploy_update_policy first as it's mutually exclusive to automatice_update_policy, and the latter is system default - if function.OnDeployUpdatePolicy != nil { - if err := d.Set("on_deploy_update_policy", flattenOnDeployUpdatePolicy(function.OnDeployUpdatePolicy)); err != nil { - return fmt.Errorf("Error setting on_deploy_update_policy: %s", err) - } - function.AutomaticUpdatePolicy = nil - d.Set("automatic_update_policy", nil) - } else { - d.Set("on_deploy_update_policy", nil) - } - - if function.AutomaticUpdatePolicy != nil { - if err := d.Set("automatic_update_policy", flattenAutomaticUpdatePolicy(function.AutomaticUpdatePolicy)); err != nil { - return fmt.Errorf("Error setting automatic_update_policy: %s", err) - } - d.Set("on_deploy_update_policy", nil) - } else { - d.Set("automatic_update_policy", nil) - } return nil } @@ -1022,22 +964,6 @@ func resourceCloudFunctionsUpdate(d *schema.ResourceData, meta interface{}) erro updateMaskArr = append(updateMaskArr, "buildServiceAccount") } - if d.HasChange("automatic_update_policy") { - function.AutomaticUpdatePolicy = expandAutomaticUpdatePolicy(d.Get("automatic_update_policy").([]interface{})) - if function.AutomaticUpdatePolicy != nil { - function.OnDeployUpdatePolicy = nil - } - updateMaskArr = append(updateMaskArr, "automatic_update_policy") - } - - if d.HasChange("on_deploy_update_policy") { - function.OnDeployUpdatePolicy = expandOnDeployUpdatePolicy(d.Get("on_deploy_update_policy").([]interface{})) - if function.OnDeployUpdatePolicy != nil { - function.AutomaticUpdatePolicy = nil - } - updateMaskArr = append(updateMaskArr, "on_deploy_update_policy") - } - if len(updateMaskArr) > 0 { log.Printf("[DEBUG] Send Patch CloudFunction Configuration request: %#v", function) updateMask := strings.Join(updateMaskArr, ",") @@ -1306,42 +1232,3 @@ func flattenSecretVersion(secretVersions []*cloudfunctions.SecretVersion) []map[ } return result } - -func expandAutomaticUpdatePolicy(configured []interface{}) *cloudfunctions.AutomaticUpdatePolicy { - if len(configured) == 0 { - return nil - } - return &cloudfunctions.AutomaticUpdatePolicy{} -} - -func flattenAutomaticUpdatePolicy(policy *cloudfunctions.AutomaticUpdatePolicy) []map[string]interface{} { - result := make([]map[string]interface{}, 0, 1) - if policy == nil { - return nil - } - // Have to append an empty element for empty message type - result = append(result, map[string]interface{}{}) - return result -} - -func expandOnDeployUpdatePolicy(configured []interface{}) *cloudfunctions.OnDeployUpdatePolicy { - if len(configured) == 0 { - return nil - } - return &cloudfunctions.OnDeployUpdatePolicy{} -} - -func flattenOnDeployUpdatePolicy(policy *cloudfunctions.OnDeployUpdatePolicy) []map[string]interface{} { - result := make([]map[string]interface{}, 0, 1) - if policy == nil { - return nil - } - - result = append(result, map[string]interface{}{ - "runtime_version": policy.RuntimeVersion, - }) - - log.Printf("flatten on_deploy_update_policy to: %s", result) - - return result -} diff --git a/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function_meta.yaml b/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function_meta.yaml index ffb9b5a0a060..c0a8e21b27b8 100644 --- a/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function_meta.yaml +++ b/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function_meta.yaml @@ -51,5 +51,3 @@ fields: - field: 'version_id' - field: 'vpc_connector' - field: 'vpc_connector_egress_settings' - - field: 'automatic_update_policy' - - field: 'on_deploy_update_policy.runtime_version' diff --git a/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function_test.go.tmpl b/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function_test.go.tmpl index 0f3790c0e92f..156116cb011b 100644 --- a/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function_test.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function_test.go.tmpl @@ -628,70 +628,6 @@ func TestAccCloudFunctionsFunction_buildServiceAccount(t *testing.T) { }) } -func TestAccCloudFunctionsFunction_abiuCRUD(t *testing.T) { - t.Parallel() - - var function cloudfunctions.CloudFunction - - funcResourceName := "google_cloudfunctions_function.function" - functionName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - bucketName := fmt.Sprintf("tf-test-bucket-%d", acctest.RandInt(t)) - zipFilePath := acctest.CreateZIPArchiveForCloudFunctionSource(t, testHTTPTriggerPath) - defer os.Remove(zipFilePath) // clean up - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccCloudFunctionsFunction_abiuAutomatic(functionName, bucketName, zipFilePath), - Check: resource.ComposeTestCheckFunc( - testAccCloudFunctionsFunctionExists( - t, funcResourceName, &function), - resource.TestCheckResourceAttrSet(funcResourceName, - "automatic_update_policy.#"), - ), - }, - { - ResourceName: funcResourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"build_environment_variables", "labels", "terraform_labels"}, - }, - { - Config: testAccCloudFunctionsFunction_abiuOndeploy(functionName, bucketName, zipFilePath), - Check: resource.ComposeTestCheckFunc( - testAccCloudFunctionsFunctionExists( - t, funcResourceName, &function), - resource.TestCheckResourceAttrSet(funcResourceName, - "on_deploy_update_policy.#"), - ), - }, - { - ResourceName: funcResourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"build_environment_variables", "labels", "terraform_labels"}, - }, - { - Config: testAccCloudFunctionsFunction_basic(functionName, bucketName, zipFilePath), - Check: resource.ComposeTestCheckFunc( - testAccCloudFunctionsFunctionExists( - t, funcResourceName, &function), - resource.TestCheckResourceAttrSet(funcResourceName, - "automatic_update_policy.#"), - ), - }, - { - ResourceName: funcResourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"build_environment_variables", "labels", "terraform_labels"}, - }, - }, - }) -} - func testAccCheckCloudFunctionsFunctionDestroyProducer(t *testing.T) func(s *terraform.State) error { return func(s *terraform.State) error { config := acctest.GoogleProviderConfig(t) @@ -1567,87 +1503,3 @@ resource "google_cloudfunctions_function" "function" { } `, bucketName, zipFilePath, saName, serviceAccount, functionName) } - -func testAccCloudFunctionsFunction_abiuAutomatic(functionName string, bucketName string, zipFilePath string) string { - return fmt.Sprintf(` -resource "google_storage_bucket" "bucket" { - name = "%s" - location = "US" - uniform_bucket_level_access = true -} - -resource "google_storage_bucket_object" "archive" { - name = "index.zip" - bucket = google_storage_bucket.bucket.name - source = "%s" -} - -resource "google_cloudfunctions_function" "function" { - name = "%s" - runtime = "nodejs20" - description = "test function" - docker_registry = "ARTIFACT_REGISTRY" - available_memory_mb = 128 - source_archive_bucket = google_storage_bucket.bucket.name - source_archive_object = google_storage_bucket_object.archive.name - trigger_http = true - timeout = 61 - entry_point = "helloGET" - ingress_settings = "ALLOW_INTERNAL_ONLY" - labels = { - my-label = "my-label-value" - } - environment_variables = { - TEST_ENV_VARIABLE = "test-env-variable-value" - } - build_environment_variables = { - TEST_ENV_VARIABLE = "test-build-env-variable-value" - } - automatic_update_policy {} - max_instances = 10 - min_instances = 3 -} -`, bucketName, zipFilePath, functionName) -} - -func testAccCloudFunctionsFunction_abiuOndeploy(functionName string, bucketName string, zipFilePath string) string { - return fmt.Sprintf(` -resource "google_storage_bucket" "bucket" { - name = "%s" - location = "US" - uniform_bucket_level_access = true -} - -resource "google_storage_bucket_object" "archive" { - name = "index.zip" - bucket = google_storage_bucket.bucket.name - source = "%s" -} - -resource "google_cloudfunctions_function" "function" { - name = "%s" - runtime = "nodejs20" - description = "test function" - docker_registry = "ARTIFACT_REGISTRY" - available_memory_mb = 128 - source_archive_bucket = google_storage_bucket.bucket.name - source_archive_object = google_storage_bucket_object.archive.name - trigger_http = true - timeout = 61 - entry_point = "helloGET" - ingress_settings = "ALLOW_INTERNAL_ONLY" - labels = { - my-label = "my-label-value" - } - environment_variables = { - TEST_ENV_VARIABLE = "test-env-variable-value" - } - build_environment_variables = { - TEST_ENV_VARIABLE = "test-build-env-variable-value" - } - on_deploy_update_policy {} - max_instances = 10 - min_instances = 3 -} -`, bucketName, zipFilePath, functionName) -} diff --git a/mmv1/third_party/terraform/services/cloudidentity/data_source_cloud_identity_group_lookup.go b/mmv1/third_party/terraform/services/cloudidentity/data_source_cloud_identity_group_lookup.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/cloudidentity/data_source_cloud_identity_group_lookup.go rename to mmv1/third_party/terraform/services/cloudidentity/data_source_cloud_identity_group_lookup.go.tmpl diff --git a/mmv1/third_party/terraform/services/cloudidentity/resource_cloud_identity_group_membership_test.go.tmpl b/mmv1/third_party/terraform/services/cloudidentity/resource_cloud_identity_group_membership_test.go.tmpl index f299956ee9d5..6d03d0d1c2cc 100644 --- a/mmv1/third_party/terraform/services/cloudidentity/resource_cloud_identity_group_membership_test.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudidentity/resource_cloud_identity_group_membership_test.go.tmpl @@ -561,178 +561,3 @@ func testAccCheckCloudIdentityGroupMembershipDestroyProducer(t *testing.T) func( return nil } } - -// Test setting create_ignore_already_exists on an existing resource -func testAccCloudIdentityGroupMembership_existingResourceCreateIgnoreAlreadyExists(t *testing.T) { - context := map[string]interface{}{ - "org_domain": envvar.GetTestOrgDomainFromEnv(t), - "cust_id": envvar.GetTestCustIdFromEnv(t), - "random_suffix": acctest.RandString(t, 10), - } - id := "groups/groupId/memberships/membershipId" - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckCloudIdentityGroupMembershipDestroyProducer(t), - Steps: []resource.TestStep{ - // The first step creates a new resource with create_ignore_already_exists=false - { - Config: testAccCloudIdentityGroupMembershipCreateIgnoreAlreadyExists(context, false), - Check: resource.TestCheckResourceAttr("google_cloud_identity_group_membership.acceptance", "id", id), - }, - { - ResourceName: "google_cloud_identity_group_membership.acceptance", - ImportStateId: id, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"create_ignore_already_exists"}, // Import leaves this field out when false - }, - // The second step updates the resource to have create_ignore_already_exists=true - { - Config: testAccCloudIdentityGroupMembershipCreateIgnoreAlreadyExists(context, true), - Check: resource.TestCheckResourceAttr("google_cloud_identity_group_membership.acceptance", "id", id), - }, - }, - }) -} - -// Test the option to ignore ALREADY_EXISTS error from creating a Source Repository. -func testAccCloudIdentityGroupMembership_createIgnoreAlreadyExists(t *testing.T) { - context := map[string]interface{}{ - "org_domain": envvar.GetTestOrgDomainFromEnv(t), - "cust_id": envvar.GetTestCustIdFromEnv(t), - "random_suffix": acctest.RandString(t, 10), - } - id := "groups/groupId/memberships/membershipId" - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckCloudIdentityGroupMembershipDestroyProducer(t), - Steps: []resource.TestStep{ - // The first step creates a group membership - { - Config: testAccCloudIdentityGroupMembershipCreateIgnoreAlreadyExists(context, false), - Check: resource.TestCheckResourceAttr("google_cloud_identity_group_membership.acceptance", "id", id), - }, - { - ResourceName: "google_cloud_identity_group_membership.acceptance", - ImportStateId: id, - ImportState: true, - ImportStateVerify: true, - }, - // The second step creates a new resource that duplicates with the existing group membership - { - Config: testAccCloudIdentityGroupMembershipDuplicateIgnoreAlreadyExists(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("google_cloud_identity_group_membership.acceptance", "id", id), - resource.TestCheckResourceAttr("google_cloud_identity_group_membership.duplicate", "id", id), - ), - }, - }, - }) -} - -func testAccCloudIdentityGroupMembershipCreateIgnoreAlreadyExists(context map[string]interface{}, ignore_already_exists bool) string { - context["create_ignore_already_exists"] = fmt.Sprintf("%t", ignore_already_exists) - return acctest.Nprintf(` -resource "google_cloud_identity_group" "group" { - display_name = "tf-test-my-identity-group%{random_suffix}" - - parent = "customers/%{cust_id}" - - group_key { - id = "tf-test-my-identity-group%{random_suffix}@%{org_domain}" - } - - labels = { - "cloudidentity.googleapis.com/groups.discussion_forum" = "" - } -} -resource "google_cloud_identity_group" "child-group" { - display_name = "tf-test-my-identity-group%{random_suffix}-child" - - parent = "customers/%{cust_id}" - - group_key { - id = "tf-test-my-identity-group%{random_suffix}-child@%{org_domain}" - } - - labels = { - "cloudidentity.googleapis.com/groups.discussion_forum" = "" - } -} - -resource "google_cloud_identity_group_membership" "acceptance" { - group = google_cloud_identity_group.group.id - - preferred_member_key { - id = google_cloud_identity_group.child-group.group_key[0].id - } - - roles { - name = "MEMBER" - } - - create_ignore_already_exists = %{create_ignore_already_exists} -} -`, context) -} - -func testAccCloudIdentityGroupMembershipDuplicateIgnoreAlreadyExists(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_cloud_identity_group" "group" { - display_name = "tf-test-my-identity-group%{random_suffix}" - - parent = "customers/%{cust_id}" - - group_key { - id = "tf-test-my-identity-group%{random_suffix}@%{org_domain}" - } - - labels = { - "cloudidentity.googleapis.com/groups.discussion_forum" = "" - } -} -resource "google_cloud_identity_group" "child-group" { - display_name = "tf-test-my-identity-group%{random_suffix}-child" - - parent = "customers/%{cust_id}" - - group_key { - id = "tf-test-my-identity-group%{random_suffix}-child@%{org_domain}" - } - - labels = { - "cloudidentity.googleapis.com/groups.discussion_forum" = "" - } -} - -resource "google_cloud_identity_group_membership" "acceptance" { - group = google_cloud_identity_group.group.id - - preferred_member_key { - id = google_cloud_identity_group.child-group.group_key[0].id - } - - roles { - name = "MEMBER" - } -} - -resource "google_cloud_identity_group_membership" "duplicate" { - group = google_cloud_identity_group.group.id - - preferred_member_key { - id = google_cloud_identity_group.child-group.group_key[0].id - } - - roles { - name = "MEMBER" - } - - create_ignore_already_exists = true -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/storageinsights/data_source_storage_insights_dataset_config.go b/mmv1/third_party/terraform/services/cloudrun/data_source_cloud_run_service.go similarity index 53% rename from mmv1/third_party/terraform/services/storageinsights/data_source_storage_insights_dataset_config.go rename to mmv1/third_party/terraform/services/cloudrun/data_source_cloud_run_service.go index b8a916b47a25..c674b097723d 100644 --- a/mmv1/third_party/terraform/services/storageinsights/data_source_storage_insights_dataset_config.go +++ b/mmv1/third_party/terraform/services/cloudrun/data_source_cloud_run_service.go @@ -1,4 +1,4 @@ -package storageinsights +package cloudrun import ( "fmt" @@ -8,27 +8,27 @@ import ( transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) -func DataSourceGoogleStorageInsightsDatasetConfig() *schema.Resource { +func DataSourceGoogleCloudRunService() *schema.Resource { - dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceStorageInsightsDatasetConfig().Schema) - tpgresource.AddRequiredFieldsToSchema(dsSchema, "location", "dataset_config_id") + dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceCloudRunService().Schema) + tpgresource.AddRequiredFieldsToSchema(dsSchema, "name", "location") tpgresource.AddOptionalFieldsToSchema(dsSchema, "project") return &schema.Resource{ - Read: dataSourceGoogleStorageInsightsDatasetConfigRead, + Read: dataSourceGoogleCloudRunServiceRead, Schema: dsSchema, } } -func dataSourceGoogleStorageInsightsDatasetConfigRead(d *schema.ResourceData, meta interface{}) error { +func dataSourceGoogleCloudRunServiceRead(d *schema.ResourceData, meta interface{}) error { config := meta.(*transport_tpg.Config) - id, err := tpgresource.ReplaceVars(d, config, "projects/{{project}}/locations/{{location}}/datasetConfigs/{{dataset_config_id}}") + id, err := tpgresource.ReplaceVars(d, config, "locations/{{location}}/namespaces/{{project}}/services/{{name}}") if err != nil { return fmt.Errorf("Error constructing id: %s", err) } d.SetId(id) - err = resourceStorageInsightsDatasetConfigRead(d, meta) + err = resourceCloudRunServiceRead(d, meta) if err != nil { return err } diff --git a/mmv1/third_party/terraform/services/cloudrun/resource_cloud_run_service_test.go.tmpl b/mmv1/third_party/terraform/services/cloudrun/resource_cloud_run_service_test.go.tmpl index e2370c664c30..4ba8b05e14da 100644 --- a/mmv1/third_party/terraform/services/cloudrun/resource_cloud_run_service_test.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudrun/resource_cloud_run_service_test.go.tmpl @@ -1594,177 +1594,6 @@ resource "google_cloud_run_service" "default" { `, name, project) } -func TestAccCloudRunService_cloudRunServiceWithMinInstance(t *testing.T) { - acctest.SkipIfVcr(t) - t.Parallel() - - project := envvar.GetTestProjectFromEnv() - name := "tftest-cloudrun-" + acctest.RandString(t, 6) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccCloudRunService_cloudRunServiceWithMinInstance(name, project), - }, - { - ResourceName: "google_cloud_run_service.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"metadata.0.resource_version", "metadata.0.annotations", "metadata.0.labels", "metadata.0.terraform_labels", "status.0.conditions"}, - }, - { - Config: testAccCloudRunService_cloudRunServiceUpdateWithMinInstance(name, project,), - }, - { - ResourceName: "google_cloud_run_service.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"metadata.0.resource_version", "metadata.0.annotations", "metadata.0.labels", "metadata.0.terraform_labels", "status.0.conditions"}, - }, - }, - }) - } - -func testAccCloudRunService_cloudRunServiceWithMinInstance(name, project string) string { - return fmt.Sprintf(` -resource "google_cloud_run_service" "default" { - name = "%s" - location = "us-central1" - - metadata { - namespace = "%s" - annotations = { - generated-by = "magic-modules" - "run.googleapis.com/minScale": "2" - } - } - - template { - spec { - containers { - image = "gcr.io/cloudrun/hello" - } - } - } -} -`, name, project) -} - -func testAccCloudRunService_cloudRunServiceUpdateWithMinInstance(name, project string) string { - return fmt.Sprintf(` -resource "google_cloud_run_service" "default" { - name = "%s" - location = "us-central1" - - metadata { - namespace = "%s" - annotations = { - generated-by = "magic-modules" - "run.googleapis.com/minScale": "5" - } - } - - template { - spec { - containers { - image = "gcr.io/cloudrun/hello" - } - } - } -} -`, name, project) -} - -func TestAccCloudRunService_cloudRunServiceWithManualScaling(t *testing.T) { - acctest.SkipIfVcr(t) - t.Parallel() - - project := envvar.GetTestProjectFromEnv() - name := "tftest-cloudrun-" + acctest.RandString(t, 6) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccCloudRunService_cloudRunServiceWithManualScaling(name, project), - }, - { - ResourceName: "google_cloud_run_service.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"metadata.0.resource_version", "metadata.0.annotations", "metadata.0.labels", "metadata.0.terraform_labels", "status.0.conditions"}, - }, - { - Config: testAccCloudRunService_cloudRunServiceUpdateWithManualScaling(name, project,), - }, - { - ResourceName: "google_cloud_run_service.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"metadata.0.resource_version", "metadata.0.annotations", "metadata.0.labels", "metadata.0.terraform_labels", "status.0.conditions"}, - }, - }, - }) - } - -func testAccCloudRunService_cloudRunServiceWithManualScaling(name, project string) string { - return fmt.Sprintf(` -resource "google_cloud_run_service" "default" { - name = "%s" - location = "us-central1" - - metadata { - namespace = "%s" - annotations = { - generated-by = "magic-modules" - "run.googleapis.com/launch-stage": "BETA" - "run.googleapis.com/scalingMode": "manual" - "run.googleapis.com/manualInstanceCount": "2" - } - } - - template { - spec { - containers { - image = "gcr.io/cloudrun/hello" - } - } - } -} -`, name, project) -} - -func testAccCloudRunService_cloudRunServiceUpdateWithManualScaling(name, project string) string { - return fmt.Sprintf(` -resource "google_cloud_run_service" "default" { - name = "%s" - location = "us-central1" - - metadata { - namespace = "%s" - annotations = { - generated-by = "magic-modules" - "run.googleapis.com/launch-stage": "BETA" - "run.googleapis.com/scalingMode": "manual" - "run.googleapis.com/manualInstanceCount": "5" - - } - } - - template { - spec { - containers { - image = "gcr.io/cloudrun/hello" - } - } - } -} -`, name, project) -} - {{ if ne $.TargetVersionName `ga` -}} func TestAccCloudRunService_cloudRunServiceIap_update(t *testing.T) { t.Parallel() diff --git a/mmv1/third_party/terraform/services/cloudrunv2/data_source_google_cloud_run_v2_worker_pool.go b/mmv1/third_party/terraform/services/cloudrunv2/data_source_google_cloud_run_v2_worker_pool.go deleted file mode 100644 index 3bd61fb84bec..000000000000 --- a/mmv1/third_party/terraform/services/cloudrunv2/data_source_google_cloud_run_v2_worker_pool.go +++ /dev/null @@ -1,52 +0,0 @@ -package cloudrunv2 - -import ( - "fmt" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -func DataSourceGoogleCloudRunV2WorkerPool() *schema.Resource { - dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceCloudRunV2WorkerPool().Schema) - tpgresource.AddRequiredFieldsToSchema(dsSchema, "name") - tpgresource.AddOptionalFieldsToSchema(dsSchema, "location") - - // Set 'Optional' schema elements - tpgresource.AddOptionalFieldsToSchema(dsSchema, "project") - - return &schema.Resource{ - Read: dataSourceGoogleCloudRunV2WorkerPoolRead, - Schema: dsSchema, - } -} - -func dataSourceGoogleCloudRunV2WorkerPoolRead(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - - id, err := tpgresource.ReplaceVars(d, config, "projects/{{project}}/locations/{{location}}/workerPools/{{name}}") - if err != nil { - return fmt.Errorf("Error constructing id: %s", err) - } - - d.SetId(id) - - err = resourceCloudRunV2WorkerPoolRead(d, meta) - if err != nil { - return err - } - - if err := tpgresource.SetDataSourceLabels(d); err != nil { - return err - } - - if err := tpgresource.SetDataSourceAnnotations(d); err != nil { - return err - } - - if d.Id() == "" { - return fmt.Errorf("%s not found", id) - } - return nil -} diff --git a/mmv1/third_party/terraform/services/cloudrunv2/data_source_google_cloud_run_v2_worker_pool_test.go b/mmv1/third_party/terraform/services/cloudrunv2/data_source_google_cloud_run_v2_worker_pool_test.go deleted file mode 100644 index d7d716b59556..000000000000 --- a/mmv1/third_party/terraform/services/cloudrunv2/data_source_google_cloud_run_v2_worker_pool_test.go +++ /dev/null @@ -1,65 +0,0 @@ -package cloudrunv2_test - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" -) - -func TestAccDataSourceGoogleCloudRunV2WorkerPool_basic(t *testing.T) { - t.Parallel() - - project := envvar.GetTestProjectFromEnv() - - name := fmt.Sprintf("tf-test-cloud-run-v2-wp-%d", acctest.RandInt(t)) - location := "us-central1" - id := fmt.Sprintf("projects/%s/locations/%s/workerPools/%s", project, location, name) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccDataSourceGoogleCloudRunV2WorkerPool_basic(name, location), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("data.google_cloud_run_v2_worker_pool.hello", "id", id), - resource.TestCheckResourceAttr("data.google_cloud_run_v2_worker_pool.hello", "name", name), - resource.TestCheckResourceAttr("data.google_cloud_run_v2_worker_pool.hello", "location", location), - ), - }, - }, - }) -} - -func testAccDataSourceGoogleCloudRunV2WorkerPool_basic(name, location string) string { - return fmt.Sprintf(` -resource "google_cloud_run_v2_worker_pool" "hello" { - name = "%s" - location = "%s" - deletion_protection = false - launch_stage = "BETA" - - template { - containers { - image = "us-docker.pkg.dev/cloudrun/container/worker-pool" - } - } - - labels = { - "key" = "value" - } - - annotations = { - "key" = "value" - } -} - -data "google_cloud_run_v2_worker_pool" "hello" { - name = google_cloud_run_v2_worker_pool.hello.name - location = google_cloud_run_v2_worker_pool.hello.location -} -`, name, location) -} diff --git a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl index d58b6d7fede3..4f9b46d483a1 100644 --- a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl @@ -6,7 +6,7 @@ import ( "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-provider-google/google/envvar" ) func TestAccCloudRunV2Job_cloudrunv2JobFullUpdate(t *testing.T) { @@ -211,7 +211,7 @@ resource "google_compute_network" "custom_test" { func TestAccCloudRunV2Job_cloudrunv2JobWithDirectVPCUpdate(t *testing.T) { t.Parallel() - jobName := fmt.Sprintf("tf-test-cloudrun-job%s", acctest.RandString(t, 10)) + jobName := fmt.Sprintf("tf-test-cloudrun-service%s", acctest.RandString(t, 10)) context := map[string]interface{}{ "job_name": jobName, "project": envvar.GetTestProjectFromEnv(), @@ -312,7 +312,7 @@ func TestAccCloudRunV2Job_cloudrunv2JobWithGcsUpdate(t *testing.T) { acctest.SkipIfVcr(t) t.Parallel() - jobName := fmt.Sprintf("tf-test-cloudrun-job%s", acctest.RandString(t, 10)) + jobName := fmt.Sprintf("tf-test-cloudrun-service%s", acctest.RandString(t, 10)) context := map[string]interface{}{ "job_name": jobName, } @@ -323,7 +323,7 @@ func TestAccCloudRunV2Job_cloudrunv2JobWithGcsUpdate(t *testing.T) { CheckDestroy: testAccCheckCloudRunV2JobDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccCloudRunV2Job_cloudrunv2BasicJob(context), + Config: testAccCloudRunV2Job_cloudrunv2JobWithNoVolume(context), }, { ResourceName: "google_cloud_run_v2_job.default", @@ -344,7 +344,7 @@ func TestAccCloudRunV2Job_cloudrunv2JobWithGcsUpdate(t *testing.T) { }) } -func testAccCloudRunV2Job_cloudrunv2BasicJob(context map[string]interface{}) string { +func testAccCloudRunV2Job_cloudrunv2JobWithNoVolume(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_cloud_run_v2_job" "default" { name = "%{job_name}" @@ -407,7 +407,7 @@ func TestAccCloudRunV2Job_cloudrunv2JobWithNfsUpdate(t *testing.T) { acctest.SkipIfVcr(t) t.Parallel() - jobName := fmt.Sprintf("tf-test-cloudrun-job%s", acctest.RandString(t, 10)) + jobName := fmt.Sprintf("tf-test-cloudrun-service%s", acctest.RandString(t, 10)) context := map[string]interface{}{ "job_name": jobName, } @@ -418,7 +418,7 @@ func TestAccCloudRunV2Job_cloudrunv2JobWithNfsUpdate(t *testing.T) { CheckDestroy: testAccCheckCloudRunV2JobDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccCloudRunV2Job_cloudrunv2BasicJob(context), + Config: testAccCloudRunV2Job_cloudrunv2JobWithNoVolume(context), }, { ResourceName: "google_cloud_run_v2_job.default", @@ -473,420 +473,11 @@ func testAccCloudRunV2Job_cloudrunv2JobWithNfsVolume(context map[string]interfac `, context) } - -func TestAccCloudRunV2Job_cloudrunv2JobTCPProbesUpdate(t *testing.T) { - t.Parallel() - - jobName := fmt.Sprintf("tf-test-cloudrun-job%s", acctest.RandString(t, 10)) - context := map[string]interface{}{ - "job_name": jobName, - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckCloudRunV2JobDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccCloudRunV2Job_cloudrunv2JobWithEmptyTCPStartupProbe(context), - }, - { - ResourceName: "google_cloud_run_v2_job.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "location", "annotations", "deletion_protection"}, - }, - { - Config: testAccCloudRunV2Job_cloudrunv2JobUpdateWithTCPStartupProbe(context), - }, - { - ResourceName: "google_cloud_run_v2_job.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "location", "annotations", "deletion_protection"}, - }, - }, - }) -} - -func TestAccCloudRunV2Job_cloudrunv2JobHTTPProbesUpdate(t *testing.T) { - t.Parallel() - - jobName := fmt.Sprintf("tf-test-cloudrun-job%s", acctest.RandString(t, 10)) - context := map[string]interface{}{ - "job_name": jobName, - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckCloudRunV2JobDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccCloudRunV2Job_cloudrunv2JobUpdateWithEmptyHTTPStartupProbe(context), - }, - { - ResourceName: "google_cloud_run_v2_job.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "location", "annotations", "deletion_protection"}, - }, - { - Config: testAccCloudRunV2Job_cloudrunv2JobUpdateWithHTTPStartupProbe(context), - }, - { - ResourceName: "google_cloud_run_v2_job.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "location", "annotations", "deletion_protection"}, - }, - }, - }) -} - -func TestAccCloudRunV2Job_cloudrunv2JobGRPCProbesUpdate(t *testing.T) { - t.Parallel() - - jobName := fmt.Sprintf("tf-test-cloudrun-job%s", acctest.RandString(t, 10)) - context := map[string]interface{}{ - "job_name": jobName, - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckCloudRunV2JobDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccCloudRunV2Job_cloudRunJobUpdateWithEmptyGRPCStartupProbe(context), - }, - { - ResourceName: "google_cloud_run_v2_job.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "location", "annotations", "deletion_protection"}, - }, - { - Config: testAccCloudRunV2Job_cloudRunJobUpdateWithGRPCStartupProbe(context), - }, - { - ResourceName: "google_cloud_run_v2_job.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "location", "annotations", "deletion_protection"}, - }, - }, - }) -} - -func testAccCloudRunV2Job_cloudrunv2JobWithEmptyTCPStartupProbe(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_cloud_run_v2_job" "default" { - name = "%{job_name}" - location = "us-central1" - deletion_protection = false - - template { - template { - containers { - image = "us-docker.pkg.dev/cloudrun/container/job" - ports { - container_port = 8080 - } - startup_probe { - tcp_socket {} - } - } - } - } -} -`, context) -} - -func testAccCloudRunV2Job_cloudrunv2JobUpdateWithTCPStartupProbe(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_cloud_run_v2_job" "default" { - name = "%{job_name}" - location = "us-central1" - deletion_protection = false - - template { - template { - containers { - image = "us-docker.pkg.dev/cloudrun/container/hello" - ports { - container_port = 8080 - } - startup_probe { - initial_delay_seconds = 2 - period_seconds = 1 - timeout_seconds = 5 - failure_threshold = 2 - tcp_socket { - port = 8080 - } - } - } - } - } -} -`, context) -} - -func testAccCloudRunV2Job_cloudrunv2JobUpdateWithEmptyHTTPStartupProbe(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_cloud_run_v2_job" "default" { - name = "%{job_name}" - location = "us-central1" - deletion_protection = false - - template { - template { - containers { - image = "us-docker.pkg.dev/cloudrun/container/hello" - startup_probe { - http_get {} - } - } - } - } -} -`, context) -} - -func testAccCloudRunV2Job_cloudrunv2JobUpdateWithHTTPStartupProbe(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_cloud_run_v2_job" "default" { - name = "%{job_name}" - location = "us-central1" - deletion_protection = false - - template { - template { - containers { - image = "us-docker.pkg.dev/cloudrun/container/hello" - startup_probe { - initial_delay_seconds = 3 - period_seconds = 2 - timeout_seconds = 6 - failure_threshold = 3 - http_get { - path = "/some-path" - port = 8080 - http_headers { - name = "User-Agent" - value = "magic-modules" - } - http_headers { - name = "Some-Name" - } - } - } - } - } - } -} -`, context) -} - -func testAccCloudRunV2Job_cloudRunJobUpdateWithEmptyGRPCStartupProbe(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_cloud_run_v2_job" "default" { - name = "%{job_name}" - location = "us-central1" - deletion_protection = false - - template { - template { - containers { - image = "us-docker.pkg.dev/cloudrun/container/hello" - ports { - container_port = 8080 - } - startup_probe { - grpc {} - } - } - } - } -} -`, context) -} - -func testAccCloudRunV2Job_cloudRunJobUpdateWithGRPCStartupProbe(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_cloud_run_v2_job" "default" { - name = "%{job_name}" - location = "us-central1" - deletion_protection = false - - template { - template { - containers { - image = "us-docker.pkg.dev/cloudrun/container/hello" - ports { - container_port = 8080 - } - startup_probe { - grpc { - port = 8080 - service = "grpc.health.v1.Health" - } - } - } - } - } -} -`, context) -} - -func TestAccCloudRunV2Job_cloudrunv2JobDependsOnUpdate(t *testing.T) { - t.Parallel() - - jobName := fmt.Sprintf("tf-test-cloudrun-job%s", acctest.RandString(t, 10)) - context := map[string]interface{}{ - "job_name": jobName, - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckCloudRunV2JobDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccCloudRunV2Job_cloudRunJobWithoutDependsOn(context), - }, - { - ResourceName: "google_cloud_run_v2_job.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "location", "annotations", "deletion_protection"}, - }, - { - Config: testAccCloudRunV2Job_cloudRunJobWithDependsOn(context), - }, - { - ResourceName: "google_cloud_run_v2_job.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "location", "annotations", "deletion_protection"}, - }, - { - Config: testAccCloudRunV2Job_cloudRunJobWithDependsOnUpdate(context), - }, - { - ResourceName: "google_cloud_run_v2_job.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "location", "annotations", "deletion_protection"}, - }, - }, - }) -} - -func testAccCloudRunV2Job_cloudRunJobWithoutDependsOn(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_cloud_run_v2_job" "default" { - name = "%{job_name}" - location = "us-central1" - deletion_protection = false - - template { - template { - containers { - name = "foo" - image = "us-docker.pkg.dev/cloudrun/container/hello" - ports { - container_port = 8080 - } - startup_probe { - grpc {} - } - } - } - } -} -`, context) -} - - - -func testAccCloudRunV2Job_cloudRunJobWithDependsOn(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_cloud_run_v2_job" "default" { - name = "%{job_name}" - location = "us-central1" - deletion_protection = false - - template { - template { - containers { - name = "foo" - image = "us-docker.pkg.dev/cloudrun/container/hello" - ports { - container_port = 8080 - } - startup_probe { - grpc {} - } - } - containers { - name = "bar" - image = "us-docker.pkg.dev/cloudrun/container/hello" - ports { - container_port = 8080 - } - depends_on = [ - "foo" - ] - } - } - } -} -`, context) -} - -func testAccCloudRunV2Job_cloudRunJobWithDependsOnUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_cloud_run_v2_job" "default" { - name = "%{job_name}" - location = "us-central1" - deletion_protection = false - - template { - template { - containers { - name = "baz" - image = "us-docker.pkg.dev/cloudrun/container/hello" - ports { - container_port = 8080 - } - startup_probe { - grpc {} - } - } - containers { - name = "bar" - image = "us-docker.pkg.dev/cloudrun/container/hello" - ports { - container_port = 8080 - } - depends_on = [ - "baz" - ] - } - } - } -} -`, context) -} - - {{ if ne $.TargetVersionName `ga` -}} func TestAccCloudRunV2Job_cloudrunv2JobWithStartExecutionTokenUpdate(t *testing.T) { t.Parallel() - jobName := fmt.Sprintf("tf-test-cloudrun-job%s", acctest.RandString(t, 10)) + jobName := fmt.Sprintf("tf-test-cloudrun-service%s", acctest.RandString(t, 10)) context1 := map[string]interface{}{ "job_name": jobName, "token": "token1", @@ -944,7 +535,7 @@ func testAccCloudRunV2Job_cloudrunv2JobWithStartExecutionToken(context map[strin func TestAccCloudRunV2Job_cloudrunv2JobWithRunExecutionTokenUpdate(t *testing.T) { t.Parallel() - jobName := fmt.Sprintf("tf-test-cloudrun-job%s", acctest.RandString(t, 10)) + jobName := fmt.Sprintf("tf-test-cloudrun-service%s", acctest.RandString(t, 10)) context1 := map[string]interface{}{ "job_name": jobName, "token": "token1", @@ -999,74 +590,3 @@ func testAccCloudRunV2Job_cloudrunv2JobWithRunExecutionToken(context map[string] `, context) } {{- end }} - - -func TestAccCloudRunV2Job_cloudrunv2JobWithGpuUpdate(t *testing.T) { - acctest.SkipIfVcr(t) - t.Parallel() - - jobName := fmt.Sprintf("tf-test-cloudrun-job-gpu%s", acctest.RandString(t, 10)) - context := map[string]interface{}{ - "job_name": jobName, - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckCloudRunV2JobDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccCloudRunV2Job_cloudrunv2BasicJob(context), - }, - { - ResourceName: "google_cloud_run_v2_job.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"location", "launch_stage", "deletion_protection"}, - }, - { - Config: testAccCloudRunV2Job_cloudrunv2JobWithGpu(context), - }, - { - ResourceName: "google_cloud_run_v2_job.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"location", "launch_stage", "deletion_protection"}, - }, - }, - }) -} - -func testAccCloudRunV2Job_cloudrunv2JobWithGpu(context map[string]interface{}) string { - return acctest.Nprintf(` - resource "google_cloud_run_v2_job" "default" { - name = "%{job_name}" - location = "us-central1" - launch_stage = "BETA" - deletion_protection = false - template { - template { - containers { - image = "us-docker.pkg.dev/cloudrun/container/job" - resources { - limits = { - "cpu" = "4" - "memory" = "16Gi" - "nvidia.com/gpu" = "1" - } - } - } - node_selector { - accelerator = "nvidia-l4" - } - gpu_zonal_redundancy_disabled = true - } - } - lifecycle { - ignore_changes = [ - launch_stage, - ] - } - } -`, context) -} diff --git a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl index fd20578a8d20..0cf551030454 100644 --- a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl @@ -63,10 +63,6 @@ resource "google_cloud_run_v2_service" "default" { } client = "client-1" client_version = "client-version-1" - scaling { - min_instance_count = 1 - max_instance_count = 3 - } template { labels = { label-1 = "value-1" @@ -74,6 +70,10 @@ resource "google_cloud_run_v2_service" "default" { timeout = "300s" service_account = google_service_account.service_account.email execution_environment = "EXECUTION_ENVIRONMENT_GEN2" + scaling { + max_instance_count = 3 + min_instance_count = 1 + } annotations = { generated-by = "magic-modules" } @@ -131,10 +131,7 @@ resource "google_cloud_run_v2_service" "default" { } client = "client-update" client_version = "client-version-update" - scaling { - min_instance_count = 1 - max_instance_count = 2 - } + template { labels = { label-1 = "value-update" @@ -142,6 +139,10 @@ resource "google_cloud_run_v2_service" "default" { timeout = "500s" service_account = google_service_account.service_account.email execution_environment = "EXECUTION_ENVIRONMENT_GEN1" + scaling { + max_instance_count = 2 + min_instance_count = 1 + } annotations = { generated-by = "magic-modules" } @@ -252,10 +253,6 @@ resource "google_cloud_run_v2_service" "default" { } client = "client-1" client_version = "client-version-1" - scaling { - min_instance_count = 1 - max_instance_count = 3 - } template { labels = { label-1 = "value-1" @@ -263,6 +260,10 @@ resource "google_cloud_run_v2_service" "default" { timeout = "300s" service_account = google_service_account.service_account.email execution_environment = "EXECUTION_ENVIRONMENT_GEN2" + scaling { + max_instance_count = 3 + min_instance_count = 1 + } annotations = { generated-by = "magic-modules" } @@ -1326,9 +1327,6 @@ resource "google_cloud_run_v2_service" "default" { } client = "client-1" client_version = "client-version-1" - scaling { - max_instance_count = 1 - } template { containers { image = "us-docker.pkg.dev/cloudrun/container/hello" @@ -1340,102 +1338,8 @@ resource "google_cloud_run_v2_service" "default" { startup_cpu_boost = true } } - } -} -`, context) -} - -func TestAccCloudRunV2Service_cloudrunv2MultiRegionService(t *testing.T) { - t.Parallel() - context := map[string]interface{} { - "random_suffix" : acctest.RandString(t, 10), - } - acctest.VcrTest(t, resource.TestCase { - PreCheck: func() { acctest.AccTestPreCheck(t)}, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckCloudRunV2ServiceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccCloudRunV2Service_cloudrunv2ServiceWithMultiRegion(context), - }, - { - ResourceName: "google_cloud_run_v2_service.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "location", "annotations", "labels", "terraform_labels", "launch_stage", "deletion_protection"}, - }, - { - Config: testAccCloudRunV2Service_cloudrunv2ServiceWithMultiRegionUpdate(context), - }, - { - ResourceName: "google_cloud_run_v2_service.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "location", "annotations", "labels", "terraform_labels", "launch_stage", "deletion_protection"}, - }, - }, - }) -} - -func testAccCloudRunV2Service_cloudrunv2ServiceWithMultiRegion(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_cloud_run_v2_service" "default" { - name = "tf-test-cloudrun-service%{random_suffix}" - description = "Multi-Region Service" - location = "global" - deletion_protection = false - launch_stage = "GA" - annotations = { - generated-by = "magic-modules" - } - multi_region_settings { - regions = [ - "us-central1", - "us-east1", - "us-west1", - ] - } - ingress = "INGRESS_TRAFFIC_ALL" - labels = { - label-1 = "value-1" - } - client = "client-1" - client_version = "client-version-1" - template { - containers { - image = "us-docker.pkg.dev/cloudrun/container/hello" - } - } -} -`, context) -} - -func testAccCloudRunV2Service_cloudrunv2ServiceWithMultiRegionUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_cloud_run_v2_service" "default" { - name = "tf-test-cloudrun-service%{random_suffix}" - description = "Multi-Region Service" - location = "global" - deletion_protection = false - launch_stage = "GA" - annotations = { - generated-by = "magic-modules" - } - multi_region_settings { - regions = [ - "us-central1", - "us-east1", - ] - } - ingress = "INGRESS_TRAFFIC_ALL" - labels = { - label-1 = "value-1" - } - client = "client-1" - client_version = "client-version-1" - template { - containers { - image = "us-docker.pkg.dev/cloudrun/container/hello" + scaling { + max_instance_count = 1 } } } @@ -1458,9 +1362,6 @@ resource "google_cloud_run_v2_service" "default" { } client = "client-1" client_version = "client-version-1" - scaling { - max_instance_count = 1 - } template { containers { image = "us-docker.pkg.dev/cloudrun/container/hello" @@ -1477,6 +1378,9 @@ resource "google_cloud_run_v2_service" "default" { accelerator = "nvidia-l4" } gpu_zonal_redundancy_disabled = true + scaling { + max_instance_count = 1 + } } } `, context) @@ -1542,6 +1446,7 @@ resource "google_cloud_run_v2_service" "default" { image_uri = "us-docker.pkg.dev/cloudrun/container/hello" base_image = "us-central1-docker.pkg.dev/serverless-runtimes/google-22-full/runtimes/nodejs22" enable_automatic_updates = true + worker_pool = "worker-pool" environment_variables = { FOO_KEY = "FOO_VALUE" BAR_KEY = "BAR_VALUE" @@ -1607,6 +1512,7 @@ resource "google_cloud_run_v2_service" "default" { image_uri = "gcr.io/cloudrun/hello:latest" base_image = "us-central1-docker.pkg.dev/serverless-runtimes/google-22-full/runtimes/nodejs20" enable_automatic_updates = false + worker_pool = "worker-pool-2" environment_variables = { FOO_KEY_FOO = "FOO_VALUE_FOO" BAR_KEY_BAR = "BAR_VALUE_BAR" @@ -1652,88 +1558,6 @@ resource "google_project_iam_member" "logs_writer" { `, context) } -func TestAccCloudRunV2Service_cloudrunv2ServiceWithManualScaling(t *testing.T) { - t.Parallel() - context := map[string]interface{} { - "random_suffix" : acctest.RandString(t, 10), - } - acctest.VcrTest(t, resource.TestCase { - PreCheck: func() { acctest.AccTestPreCheck(t)}, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckCloudRunV2ServiceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccCloudRunV2Service_cloudrunv2ServiceWithManualScaling(context), - }, - { - ResourceName: "google_cloud_run_v2_service.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "location", "annotations", "labels", "terraform_labels", "launch_stage", "deletion_protection"}, - }, - { - Config: testAccCloudRunV2Service_cloudrunv2ServiceUpdateWithManualScaling(context), - }, - { - ResourceName: "google_cloud_run_v2_service.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "location", "annotations", "labels", "terraform_labels", "launch_stage", "deletion_protection"}, - }, - }, - }) -} - -func testAccCloudRunV2Service_cloudrunv2ServiceWithManualScaling(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_cloud_run_v2_service" "default" { - name = "tf-test-cloudrun-manual-scaling-service%{random_suffix}" - description = "description creating" - location = "us-central1" - deletion_protection = false - annotations = { - generated-by = "magic-modules" - } - ingress = "INGRESS_TRAFFIC_ALL" - launch_stage = "BETA" - scaling { - scaling_mode = "MANUAL" - manual_instance_count = 2 - } - template { - containers { - image = "us-docker.pkg.dev/cloudrun/container/hello" - } - } -} -`, context) -} - -func testAccCloudRunV2Service_cloudrunv2ServiceUpdateWithManualScaling(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_cloud_run_v2_service" "default" { - name = "tf-test-cloudrun-manual-scaling-service%{random_suffix}" - description = "description creating" - location = "us-central1" - deletion_protection = false - annotations = { - generated-by = "magic-modules" - } - ingress = "INGRESS_TRAFFIC_ALL" - launch_stage = "BETA" - scaling { - scaling_mode = "MANUAL" - manual_instance_count = 10 - } - template { - containers { - image = "us-docker.pkg.dev/cloudrun/container/hello" - } - } -} -`, context) -} - {{ if ne $.TargetVersionName `ga` -}} func TestAccCloudRunV2Service_cloudrunv2ServiceIapUpdate(t *testing.T) { t.Parallel() diff --git a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_worker_pool_test.go.tmpl b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_worker_pool_test.go.tmpl deleted file mode 100644 index cd1640ba5ccd..000000000000 --- a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_worker_pool_test.go.tmpl +++ /dev/null @@ -1,670 +0,0 @@ -package cloudrunv2_test - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" -) - -func TestAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolFullUpdate(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckCloudRunV2WorkerPoolDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolFull(context), - }, - { - ResourceName: "google_cloud_run_v2_worker_pool.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "location", "annotations", "labels", "terraform_labels"}, - }, - { - Config: testAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolFullUpdate(context), - }, - { - ResourceName: "google_cloud_run_v2_worker_pool.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "location", "annotations", "labels", "terraform_labels", "deletion_protection"}, - }, - }, - }) -} - - -func testAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolFull(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_cloud_run_v2_worker_pool" "default" { - name = "tf-test-cloudrun-worker-pool%{random_suffix}" - description = "description creating" - location = "us-central1" - launch_stage = "BETA" - annotations = { - generated-by = "magic-modules" - } - - labels = { - label-1 = "value-1" - } - client = "client-1" - client_version = "client-version-1" - template { - labels = { - label-1 = "value-1" - } - service_account = google_service_account.service_account.email - annotations = { - generated-by = "magic-modules" - } - containers { - name = "container-1" - image = "us-docker.pkg.dev/cloudrun/container/worker-pool" - env { - name = "SOURCE" - value = "remote" - } - env { - name = "TARGET" - value = "home" - } - resources { - limits = { - cpu = "4" - memory = "2Gi" - } - } - } - } -} - -resource "google_service_account" "service_account" { - account_id = "tf-test-my-account%{random_suffix}" - display_name = "Test Service Account" -} -`, context) -} -func testAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolFullUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_cloud_run_v2_worker_pool" "default" { - name = "tf-test-cloudrun-worker-pool%{random_suffix}" - description = "description updating" - location = "us-central1" - deletion_protection = false - launch_stage = "BETA" - - annotations = { - generated-by = "magic-modules-files" - } - - binary_authorization { - use_default = true - breakglass_justification = "Some justification" - } - labels = { - label-1 = "value-update" - } - client = "client-update" - client_version = "client-version-update" - - template { - revision = "tf-test-cloudrun-worker-pool%{random_suffix}-001" - labels = { - label-1 = "value-update" - } - service_account = google_service_account.service_account.email - annotations = { - generated-by = "magic-modules" - } - containers { - name = "container-update" - image = "us-docker.pkg.dev/cloudrun/container/worker-pool" - args = ["arg1", "arg2"] - command = ["/bin/sh", "-c"] - env { - name = "SOURCE_UPDATE" - value = "remote-update" - } - env { - name = "TARGET_UPDATE" - value = "home-update" - } - resources { - limits = { - cpu = "2" - memory = "8Gi" - } - } - working_dir = "/home" - } - } - instance_splits { - type = "INSTANCE_SPLIT_ALLOCATION_TYPE_REVISION" - revision = "tf-test-cloudrun-worker-pool%{random_suffix}-001" - } -} - -resource "google_service_account" "service_account" { - account_id = "tf-test-my-account%{random_suffix}" - display_name = "Test Service Account" -} -`, context) -} -func TestAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolGcsVolume(t *testing.T) { - acctest.SkipIfVcr(t) - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckCloudRunV2WorkerPoolDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolGcsVolume(context), - }, - { - ResourceName: "google_cloud_run_v2_worker_pool.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "location", "annotations", "labels", "terraform_labels", "launch_stage", "deletion_protection"}, - }, - }, - }) -} - -func testAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolGcsVolume(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_cloud_run_v2_worker_pool" "default" { - name = "tf-test-cloudrun-worker-pool%{random_suffix}" - description = "description creating" - location = "us-central1" - deletion_protection = false - launch_stage = "BETA" - - - annotations = { - generated-by = "magic-modules" - } - - labels = { - label-1 = "value-1" - } - client = "client-1" - client_version = "client-version-1" - template { - labels = { - label-1 = "value-1" - } - service_account = google_service_account.service_account.email - annotations = { - generated-by = "magic-modules" - } - volumes { - name = "gcs" - gcs { - bucket = "gcp-public-data-landsat" - read_only = true -{{ if ne $.TargetVersionName `ga` -}} - mount_options = ["log-severity=info"] -{{ end }} - } - } - containers { - name = "container-1" - image = "us-docker.pkg.dev/cloudrun/container/worker-pool" - env { - name = "SOURCE" - value = "remote" - } - env { - name = "TARGET" - value = "home" - } - volume_mounts { - name = "gcs" - mount_path = "/mnt/landsat" - } - resources { - limits = { - cpu = "4" - memory = "2Gi" - } - } - } - } -} - -resource "google_service_account" "service_account" { - account_id = "tf-test-my-account%{random_suffix}" - display_name = "Test Service Account" -} -`, context) -} - -func TestAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolWithDirectVPCUpdate(t *testing.T) { - t.Parallel() - - workerPoolName := fmt.Sprintf("tf-test-cloudrun-worker-pool%s", acctest.RandString(t, 10)) - context := map[string]interface{}{ - "service_name": workerPoolName, - "project": envvar.GetTestProjectFromEnv(), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckCloudRunV2WorkerPoolDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccCloudRunV2WorkerPool_cloudRunWorkerPoolWithDirectVPC(context), - }, - { - ResourceName: "google_cloud_run_v2_worker_pool.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "location", "deletion_protection"}, - }, - { - Config: testAccCloudRunV2WorkerPool_cloudRunWorkerPoolWithDirectVPCAndNamedBinAuthPolicyUpdate(context), - }, - { - ResourceName: "google_cloud_run_v2_worker_pool.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "location", "deletion_protection"}, - }, - }, - }) -} - -func testAccCloudRunV2WorkerPool_cloudRunWorkerPoolWithDirectVPC(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_cloud_run_v2_worker_pool" "default" { - name = "%{service_name}" - location = "us-central1" - deletion_protection = false - launch_stage = "BETA" - - template { - containers { - image = "us-docker.pkg.dev/cloudrun/container/worker-pool" - } - vpc_access { - egress = "ALL_TRAFFIC" - network_interfaces { - network = "default" - } - } - } -} -`, context) -} - -func testAccCloudRunV2WorkerPool_cloudRunWorkerPoolWithDirectVPCAndNamedBinAuthPolicyUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_cloud_run_v2_worker_pool" "default" { - name = "%{service_name}" - location = "us-central1" - deletion_protection = false - launch_stage = "BETA" - - binary_authorization { - policy = "projects/%{project}/platforms/cloudRun/policies/my-policy" - breakglass_justification = "Some justification" - } - template { - containers { - image = "us-docker.pkg.dev/cloudrun/container/worker-pool" - } - vpc_access { - network_interfaces { - subnetwork = "default" - tags = ["foo", "bar"] - } - } - } -} -`, context) -} - -func TestAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolCustomAudienceUpdate(t *testing.T) { - t.Parallel() - - workerPoolName := fmt.Sprintf("tf-test-cloudrun-worker-pool%s", acctest.RandString(t, 10)) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckCloudRunV2WorkerPoolDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccCloudRunV2WorkerPool_cloudRunWorkerPoolUpdateWithCustomAudience(workerPoolName, "test"), - }, - { - ResourceName: "google_cloud_run_v2_worker_pool.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "location", "annotations", "launch_stage", "deletion_protection"}, - }, - { - Config: testAccCloudRunV2WorkerPool_cloudRunWorkerPoolUpdateWithCustomAudience(workerPoolName, "test_update"), - }, - { - ResourceName: "google_cloud_run_v2_worker_pool.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "location", "annotations", "launch_stage", "deletion_protection"}, - }, - { - Config: testAccCloudRunV2WorkerPool_cloudRunWorkerPoolUpdateWithoutCustomAudience(workerPoolName), - }, - { - ResourceName: "google_cloud_run_v2_worker_pool.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "location", "annotations", "launch_stage", "deletion_protection"}, - }, - }, - }) -} - -func testAccCloudRunV2WorkerPool_cloudRunWorkerPoolUpdateWithoutCustomAudience(workerPoolName string) string { - return fmt.Sprintf(` -resource "google_cloud_run_v2_worker_pool" "default" { - name = "%s" - location = "us-central1" - deletion_protection = false - launch_stage = "BETA" - - template { - containers { - image = "us-docker.pkg.dev/cloudrun/container/worker-pool" - } - } -} -`, workerPoolName) -} - -func testAccCloudRunV2WorkerPool_cloudRunWorkerPoolUpdateWithCustomAudience(workerPoolName string, customAudience string) string { - return fmt.Sprintf(` -resource "google_cloud_run_v2_worker_pool" "default" { - name = "%s" - location = "us-central1" - deletion_protection = false - custom_audiences = ["%s"] - launch_stage = "BETA" - - template { - containers { - image = "us-docker.pkg.dev/cloudrun/container/worker-pool" - } - } -} -`, workerPoolName, customAudience) -} - - -func TestAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolAttributionLabel(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - "add_attribution": "true", - "attribution_strategy": "CREATION_ONLY", - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckCloudRunV2WorkerPoolDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolWithAttributionLabel(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("google_cloud_run_v2_worker_pool.default", "labels.%", "1"), - resource.TestCheckResourceAttr("google_cloud_run_v2_worker_pool.default", "labels.user_label", "foo"), - - resource.TestCheckResourceAttr("google_cloud_run_v2_worker_pool.default", "terraform_labels.%", "2"), - resource.TestCheckResourceAttr("google_cloud_run_v2_worker_pool.default", "terraform_labels.user_label", "foo"), - resource.TestCheckResourceAttr("google_cloud_run_v2_worker_pool.default", "terraform_labels.goog-terraform-provisioned", "true"), - - resource.TestCheckResourceAttr("google_cloud_run_v2_worker_pool.default", "effective_labels.%", "2"), - resource.TestCheckResourceAttr("google_cloud_run_v2_worker_pool.default", "effective_labels.user_label", "foo"), - resource.TestCheckResourceAttr("google_cloud_run_v2_worker_pool.default", "effective_labels.goog-terraform-provisioned", "true"), - ), - }, - { - Config: testAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolWithAttributionLabelUpdate(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("google_cloud_run_v2_worker_pool.default", "labels.%", "1"), - resource.TestCheckResourceAttr("google_cloud_run_v2_worker_pool.default", "labels.user_label", "bar"), - - resource.TestCheckResourceAttr("google_cloud_run_v2_worker_pool.default", "terraform_labels.%", "2"), - resource.TestCheckResourceAttr("google_cloud_run_v2_worker_pool.default", "terraform_labels.user_label", "bar"), - resource.TestCheckResourceAttr("google_cloud_run_v2_worker_pool.default", "terraform_labels.goog-terraform-provisioned", "true"), - - resource.TestCheckResourceAttr("google_cloud_run_v2_worker_pool.default", "effective_labels.%", "2"), - resource.TestCheckResourceAttr("google_cloud_run_v2_worker_pool.default", "effective_labels.user_label", "bar"), - resource.TestCheckResourceAttr("google_cloud_run_v2_worker_pool.default", "effective_labels.goog-terraform-provisioned", "true"), - ), - }, - }, - }) -} - -func testAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolWithAttributionLabel(context map[string]interface{}) string { - return acctest.Nprintf(` -provider "google" { - add_terraform_attribution_label = %{add_attribution} - terraform_attribution_label_addition_strategy = "%{attribution_strategy}" -} - -resource "google_cloud_run_v2_worker_pool" "default" { - name = "tf-test-cloudrun-worker-pool%{random_suffix}" - location = "us-central1" - deletion_protection = false - launch_stage = "BETA" - - - labels = { - user_label = "foo" - } - - template { - containers { - image = "us-docker.pkg.dev/cloudrun/container/worker-pool" - } - } -} -`, context) -} - -func testAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolWithAttributionLabelUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` -provider "google" { - add_terraform_attribution_label = %{add_attribution} - terraform_attribution_label_addition_strategy = "%{attribution_strategy}" -} - -resource "google_cloud_run_v2_worker_pool" "default" { - name = "tf-test-cloudrun-worker-pool%{random_suffix}" - location = "us-central1" - deletion_protection = false - launch_stage = "BETA" - - - labels = { - user_label = "bar" - } - - template { - containers { - image = "us-docker.pkg.dev/cloudrun/container/worker-pool" - } - } -} -`, context) -} - -func TestAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolWithManualScaling(t *testing.T) { - t.Parallel() - context := map[string]interface{} { - "random_suffix" : acctest.RandString(t, 10), - } - acctest.VcrTest(t, resource.TestCase { - PreCheck: func() { acctest.AccTestPreCheck(t)}, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckCloudRunV2WorkerPoolDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolWithManualScaling(context), - }, - { - ResourceName: "google_cloud_run_v2_worker_pool.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "location", "annotations", "labels", "terraform_labels", "launch_stage", "deletion_protection"}, - }, - { - Config: testAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolUpdateWithManualScaling(context), - }, - { - ResourceName: "google_cloud_run_v2_worker_pool.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "location", "annotations", "labels", "terraform_labels", "launch_stage", "deletion_protection"}, - }, - - }, - }) -} - -func testAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolWithManualScaling(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_cloud_run_v2_worker_pool" "default" { - name = "tf-test-cloudrun-worker-pool%{random_suffix}" - description = "description creating" - location = "us-central1" - deletion_protection = false - launch_stage = "BETA" - annotations = { - generated-by = "magic-modules" - } - scaling { - manual_instance_count = 5 - } - - labels = { - label-1 = "value-1" - } - client = "client-1" - client_version = "client-version-1" - template { - containers { - name = "container-1" - image = "us-docker.pkg.dev/cloudrun/container/worker-pool" - } - } -} - -`, context) -} -func testAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolUpdateWithManualScaling(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_cloud_run_v2_worker_pool" "default" { - name = "tf-test-cloudrun-worker-pool%{random_suffix}" - description = "description creating" - location = "us-central1" - deletion_protection = false - annotations = { - generated-by = "magic-modules" - } - - labels = { - label-1 = "value-1" - } - client = "client-1" - client_version = "client-version-1" - launch_stage = "BETA" - scaling { - manual_instance_count = 2 - } - template { - containers { - name = "container-1" - image = "us-docker.pkg.dev/cloudrun/container/worker-pool" - } - } -} - -`, context) -} - -func TestAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolWithManualInstanceCountZero(t *testing.T) { - t.Parallel() - context := map[string]interface{} { - "random_suffix" : acctest.RandString(t, 10), - } - acctest.VcrTest(t, resource.TestCase { - PreCheck: func() { acctest.AccTestPreCheck(t)}, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckCloudRunV2WorkerPoolDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolWithManualInstanceCountZero(context), - }, - { - ResourceName: "google_cloud_run_v2_worker_pool.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "location", "annotations", "labels", "terraform_labels", "launch_stage", "deletion_protection"}, - }, - }, - }) -} - -func testAccCloudRunV2WorkerPool_cloudrunv2WorkerPoolWithManualInstanceCountZero(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_cloud_run_v2_worker_pool" "default" { - name = "tf-test-cloudrun-worker-pool%{random_suffix}" - description = "description creating" - location = "us-central1" - deletion_protection = false - launch_stage = "BETA" - annotations = { - generated-by = "magic-modules" - } - scaling { - manual_instance_count = 0 - } - - labels = { - label-1 = "value-1" - } - client = "client-1" - client_version = "client-version-1" - template { - containers { - name = "container-1" - image = "us-docker.pkg.dev/cloudrun/container/worker-pool" - } - } -} - -`, context) -} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/cloudtasks/resource_cloud_tasks_queue_test.go b/mmv1/third_party/terraform/services/cloudtasks/resource_cloud_tasks_queue_test.go.tmpl similarity index 83% rename from mmv1/third_party/terraform/services/cloudtasks/resource_cloud_tasks_queue_test.go rename to mmv1/third_party/terraform/services/cloudtasks/resource_cloud_tasks_queue_test.go.tmpl index eb7d61fe65ec..98d3a3df2c6e 100644 --- a/mmv1/third_party/terraform/services/cloudtasks/resource_cloud_tasks_queue_test.go +++ b/mmv1/third_party/terraform/services/cloudtasks/resource_cloud_tasks_queue_test.go.tmpl @@ -3,7 +3,6 @@ package cloudtasks_test import ( "fmt" "testing" - "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" @@ -128,17 +127,17 @@ func TestAccCloudTasksQueue_HttpTargetOIDC_update(t *testing.T) { Config: testAccCloudTasksQueue_HttpTargetOIDC(name, serviceAccountID), }, { - ResourceName: "google_cloud_tasks_queue.default", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_cloud_tasks_queue.default", + ImportState: true, + ImportStateVerify: true, }, { Config: testAccCloudTasksQueue_basic(name), }, { - ResourceName: "google_cloud_tasks_queue.default", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_cloud_tasks_queue.default", + ImportState: true, + ImportStateVerify: true, }, }, }) @@ -157,49 +156,18 @@ func TestAccCloudTasksQueue_HttpTargetOAuth_update(t *testing.T) { { Config: testAccCloudTasksQueue_HttpTargetOAuth(name, serviceAccountID), }, - { - ResourceName: "google_cloud_tasks_queue.default", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccCloudTasksQueue_basic(name), - }, - { - ResourceName: "google_cloud_tasks_queue.default", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func TestAccCloudTasksQueue_paused(t *testing.T) { - t.Parallel() - - name := "cloudtasksqueuetest-" + acctest.RandString(t, 10) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccCloudTasksQueue_full(name), - }, { ResourceName: "google_cloud_tasks_queue.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"app_engine_routing_override.0.service", "app_engine_routing_override.0.version", "app_engine_routing_override.0.instance"}, }, { - Config: testAccCloudTasksQueue_paused(name), + Config: testAccCloudTasksQueue_basic(name), }, { ResourceName: "google_cloud_tasks_queue.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"app_engine_routing_override.0.service", "app_engine_routing_override.0.version", "app_engine_routing_override.0.instance", "desired_state"}, }, }, }) @@ -224,7 +192,6 @@ func testAccCloudTasksQueue_full(name string) string { resource "google_cloud_tasks_queue" "default" { name = "%s" location = "us-central1" - desired_state = "RUNNING" app_engine_routing_override { service = "worker" @@ -365,6 +332,7 @@ resource "google_service_account" "test" { `, name, serviceAccountID) } + func testAccCloudTasksQueue_HttpTargetOAuth(name, serviceAccountID string) string { return fmt.Sprintf(` resource "google_cloud_tasks_queue" "default" { @@ -411,36 +379,3 @@ resource "google_service_account" "test" { `, name, serviceAccountID) } - -func testAccCloudTasksQueue_paused(name string) string { - return fmt.Sprintf(` -resource "google_cloud_tasks_queue" "default" { - name = "%s" - location = "us-central1" - desired_state = "PAUSED" - - app_engine_routing_override { - service = "main" - version = "2.0" - instance = "beta" - } - - rate_limits { - max_concurrent_dispatches = 4 - max_dispatches_per_second = 3 - } - - retry_config { - max_attempts = 6 - max_retry_duration = "5s" - max_backoff = "4s" - min_backoff = "3s" - max_doublings = 2 - } - - stackdriver_logging_config { - sampling_ratio = 0.1 - } -} -`, name) -} diff --git a/mmv1/third_party/terraform/services/composer/data_source_google_composer_environment_test.go b/mmv1/third_party/terraform/services/composer/data_source_google_composer_environment_test.go index e8bf78d776c6..c73feb1b1e63 100644 --- a/mmv1/third_party/terraform/services/composer/data_source_google_composer_environment_test.go +++ b/mmv1/third_party/terraform/services/composer/data_source_google_composer_environment_test.go @@ -15,8 +15,7 @@ func TestAccDataSourceComposerEnvironment_basic(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - "service_account": fmt.Sprintf("tf-test-%d", acctest.RandInt(t)), + "random_suffix": acctest.RandString(t, 10), } acctest.VcrTest(t, resource.TestCase{ @@ -80,7 +79,6 @@ func testAccCheckGoogleComposerEnvironmentMeta(n string) resource.TestCheckFunc func testAccDataSourceComposerEnvironment_basic(context map[string]interface{}) string { return acctest.Nprintf(` -data "google_project" "project" {} resource "google_composer_environment" "test" { name = "tf-test-composer-env-%{random_suffix}" region = "us-central1" @@ -90,7 +88,6 @@ resource "google_composer_environment" "test" { network = google_compute_network.test.self_link subnetwork = google_compute_subnetwork.test.self_link zone = "us-central1-a" - service_account = google_service_account.test.name } software_config { image_version = "composer-1-airflow-2" @@ -99,7 +96,6 @@ resource "google_composer_environment" "test" { labels = { my-label = "my-label-value" } - depends_on = [google_project_iam_member.composer-worker] } // use a separate network to avoid conflicts with other tests running in parallel @@ -120,14 +116,5 @@ data "google_composer_environment" "test" { name = google_composer_environment.test.name region = google_composer_environment.test.region } -resource "google_service_account" "test" { - account_id = "%{service_account}" - display_name = "Test Service Account for Composer Environment" -} -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" -} `, context) } diff --git a/mmv1/third_party/terraform/services/composer/data_source_google_composer_user_workloads_config_map_test.go b/mmv1/third_party/terraform/services/composer/data_source_google_composer_user_workloads_config_map_test.go.tmpl similarity index 74% rename from mmv1/third_party/terraform/services/composer/data_source_google_composer_user_workloads_config_map_test.go rename to mmv1/third_party/terraform/services/composer/data_source_google_composer_user_workloads_config_map_test.go.tmpl index d2c6b856a2aa..ea8e664c2773 100644 --- a/mmv1/third_party/terraform/services/composer/data_source_google_composer_user_workloads_config_map_test.go +++ b/mmv1/third_party/terraform/services/composer/data_source_google_composer_user_workloads_config_map_test.go.tmpl @@ -14,7 +14,6 @@ func TestAccDataSourceComposerUserWorkloadsConfigMap_basic(t *testing.T) { context := map[string]interface{}{ "env_name": fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)), "config_map_name": fmt.Sprintf("tf-test-composer-config-map-%d", acctest.RandInt(t)), - "service_account": fmt.Sprintf("tf-test-%d", acctest.RandInt(t)), } acctest.VcrTest(t, resource.TestCase{ @@ -34,18 +33,13 @@ func TestAccDataSourceComposerUserWorkloadsConfigMap_basic(t *testing.T) { func testAccDataSourceComposerUserWorkloadsConfigMap_basic(context map[string]interface{}) string { return acctest.Nprintf(` -data "google_project" "project" {} resource "google_composer_environment" "test" { name = "%{env_name}" config { software_config { image_version = "composer-3-airflow-2" } - node_config { - service_account = google_service_account.test.name - } } - depends_on = [google_project_iam_member.composer-worker] } resource "google_composer_user_workloads_config_map" "test" { environment = google_composer_environment.test.name @@ -59,14 +53,5 @@ data "google_composer_user_workloads_config_map" "test" { name = google_composer_user_workloads_config_map.test.name environment = google_composer_environment.test.name } -resource "google_service_account" "test" { - account_id = "%{service_account}" - display_name = "Test Service Account for Composer Environment" -} -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" -} `, context) } diff --git a/mmv1/third_party/terraform/services/composer/data_source_google_composer_user_workloads_secret_test.go b/mmv1/third_party/terraform/services/composer/data_source_google_composer_user_workloads_secret_test.go.tmpl similarity index 75% rename from mmv1/third_party/terraform/services/composer/data_source_google_composer_user_workloads_secret_test.go rename to mmv1/third_party/terraform/services/composer/data_source_google_composer_user_workloads_secret_test.go.tmpl index 3e7b477cada6..2098a4aeb89e 100644 --- a/mmv1/third_party/terraform/services/composer/data_source_google_composer_user_workloads_secret_test.go +++ b/mmv1/third_party/terraform/services/composer/data_source_google_composer_user_workloads_secret_test.go.tmpl @@ -15,9 +15,8 @@ func TestAccDataSourceComposerUserWorkloadsSecret_basic(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "env_name": fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)), - "secret_name": fmt.Sprintf("%s-%d", testComposerUserWorkloadsSecretPrefix, acctest.RandInt(t)), - "service_account": fmt.Sprintf("tf-test-%d", acctest.RandInt(t)), + "env_name": fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)), + "secret_name": fmt.Sprintf("%s-%d", testComposerUserWorkloadsSecretPrefix, acctest.RandInt(t)), } acctest.VcrTest(t, resource.TestCase{ @@ -55,7 +54,7 @@ func checkSecretDataSourceMatchesResource() resource.TestCheckFunc { } // ignore diff if it's due to secrets being masked. if strings.HasPrefix(k, "data.") { - if _, ok := dsAttr[k]; !ok { + if _, ok := dsAttr[k]; !ok{ errMsg += fmt.Sprintf("%s is defined in resource and not in datasource\n", k) } if dsAttr[k] == "**********" { @@ -77,18 +76,13 @@ func checkSecretDataSourceMatchesResource() resource.TestCheckFunc { func testAccDataSourceComposerUserWorkloadsSecret_basic(context map[string]interface{}) string { return acctest.Nprintf(` -data "google_project" "project" {} resource "google_composer_environment" "test" { name = "%{env_name}" config { software_config { image_version = "composer-3-airflow-2" } - node_config { - service_account = google_service_account.test.name - } } - depends_on = [google_project_iam_member.composer-worker] } resource "google_composer_user_workloads_secret" "test" { environment = google_composer_environment.test.name @@ -102,14 +96,5 @@ data "google_composer_user_workloads_secret" "test" { name = google_composer_user_workloads_secret.test.name environment = google_composer_environment.test.name } -resource "google_service_account" "test" { - account_id = "%{service_account}" - display_name = "Test Service Account for Composer Environment" -} -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" -} `, context) } diff --git a/mmv1/third_party/terraform/services/composer/resource_composer_environment.go.tmpl b/mmv1/third_party/terraform/services/composer/resource_composer_environment.go.tmpl index bbdbcb9e3368..d37923fa8bb2 100644 --- a/mmv1/third_party/terraform/services/composer/resource_composer_environment.go.tmpl +++ b/mmv1/third_party/terraform/services/composer/resource_composer_environment.go.tmpl @@ -1528,9 +1528,7 @@ func resourceComposerEnvironmentUpdate(d *schema.ResourceData, meta interface{}) patchObj.Config.RecoveryConfig = config.RecoveryConfig } err = resourceComposerEnvironmentPatchField("config.RecoveryConfig.ScheduledSnapshotsConfig", userAgent, patchObj, d, tfConfig) - // Empty ScheduledSnapshotsConfig and config with scheduled snapshots explicitly disabled (and nothing else configured) represent in fact the same configuration. - // If applying a change fails specifically because it does not bring any actual modification, this error should be silently ignored. - if err != nil && !strings.Contains(err.Error(), "No change in configuration."){ + if err != nil { return err } } @@ -3142,4 +3140,4 @@ func gscBucketNameDiffSuppress(_, old, new string, _ *schema.ResourceData) bool return true } return false -} +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/composer/resource_composer_environment_test.go b/mmv1/third_party/terraform/services/composer/resource_composer_environment_test.go.tmpl similarity index 61% rename from mmv1/third_party/terraform/services/composer/resource_composer_environment_test.go rename to mmv1/third_party/terraform/services/composer/resource_composer_environment_test.go.tmpl index 932b63a01db4..6126b86df3fd 100644 --- a/mmv1/third_party/terraform/services/composer/resource_composer_environment_test.go +++ b/mmv1/third_party/terraform/services/composer/resource_composer_environment_test.go.tmpl @@ -2,15 +2,15 @@ package composer_test import ( "fmt" - "log" - "regexp" - "strings" - "testing" - "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" - "github.com/hashicorp/terraform-provider-google/google/services/composer" tpgcompute "github.com/hashicorp/terraform-provider-google/google/services/compute" + "github.com/hashicorp/terraform-provider-google/google/services/composer" + "testing" + + "log" + "regexp" + "strings" "github.com/hashicorp/go-multierror" "github.com/hashicorp/terraform-plugin-testing/helper/resource" @@ -47,380 +47,395 @@ func bootstrapComposerServiceAgents(t *testing.T) { }) } -// Checks private environment creation for composer 2. -func TestAccComposerEnvironmentComposer2_private(t *testing.T) { +// Checks environment creation with minimum required information. +func TestAccComposerEnvironment_basic(t *testing.T) { t.Parallel() envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" - serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironmentComposer2_private(envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironment_basic(envName, network, subnetwork), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("google_composer_environment.test", "config.0.airflow_uri"), + resource.TestCheckResourceAttrSet("google_composer_environment.test", "config.0.gke_cluster"), + resource.TestCheckResourceAttrSet("google_composer_environment.test", "config.0.node_count"), + resource.TestCheckResourceAttrSet("google_composer_environment.test", "config.0.node_config.0.zone"), + resource.TestCheckResourceAttrSet("google_composer_environment.test", "config.0.node_config.0.machine_type")), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, { - ResourceName: "google_composer_environment.test", - ImportState: true, - ImportStateId: fmt.Sprintf("projects/%s/locations/%s/environments/%s", envvar.GetTestProjectFromEnv(), "us-central1", envName), + ResourceName: "google_composer_environment.test", + ImportState: true, + ImportStateId: fmt.Sprintf("projects/%s/locations/%s/environments/%s", envvar.GetTestProjectFromEnv(), "us-central1", envName), ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO: Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironmentComposer2_private(envName, network, subnetwork, serviceAccount), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironment_basic(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -// Checks environment creation with minimum required information. -func TestAccComposerEnvironment_withEncryptionConfigComposer2(t *testing.T) { - acctest.SkipIfVcr(t) + +// Checks private environment creation for composer 1 and 2. +func TestAccComposerEnvironmentComposer1_private(t *testing.T) { t.Parallel() - kms := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-bootstrap-composer2-key1") - pid := envvar.GetTestProjectFromEnv() - bootstrapComposerServiceAgents(t) envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" - serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_encryptionCfg(pid, "2", "2", envName, kms.CryptoKey.Name, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironmentComposer1_private(envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, + ImportStateVerify: true, + }, + { + ResourceName: "google_composer_environment.test", + ImportState: true, + ImportStateId: fmt.Sprintf("projects/%s/locations/%s/environments/%s", envvar.GetTestProjectFromEnv(), "us-central1", envName), ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO: Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_encryptionCfg(pid, "2", "2", envName, kms.CryptoKey.Name, network, subnetwork, serviceAccount), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironmentComposer1_private(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironment_withMaintenanceWindow(t *testing.T) { +func TestAccComposerEnvironmentComposer2_private(t *testing.T) { t.Parallel() envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" - serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_maintenanceWindow(envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironmentComposer2_private(envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, + ImportStateVerify: true, + }, + { + ResourceName: "google_composer_environment.test", + ImportState: true, + ImportStateId: fmt.Sprintf("projects/%s/locations/%s/environments/%s", envvar.GetTestProjectFromEnv(), "us-central1", envName), ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO: Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_maintenanceWindow(envName, network, subnetwork, serviceAccount), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironmentComposer2_private(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironment_maintenanceWindowUpdate(t *testing.T) { +// Checks environment creation with minimum required information. +func TestAccComposerEnvironment_privateWithWebServerControl(t *testing.T) { t.Parallel() envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" - serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_maintenanceWindow(envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironment_privateWithWebServerControl(envName, network, subnetwork), }, { - Config: testAccComposerEnvironment_maintenanceWindowUpdate(envName, network, subnetwork, serviceAccount), + ResourceName: "google_composer_environment.test", + ImportState: true, + ImportStateVerify: true, }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + Config: testAccComposerEnvironment_privateWithWebServerControlUpdated(envName, network, subnetwork), + }, + { + ResourceName: "google_composer_environment.test", + ImportState: true, + ImportStateVerify: true, + }, + { + ResourceName: "google_composer_environment.test", + ImportState: true, + ImportStateId: fmt.Sprintf("projects/%s/locations/%s/environments/%s", envvar.GetTestProjectFromEnv(), "us-central1", envName), ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO: Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_maintenanceWindowUpdate(envName, network, subnetwork, serviceAccount), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironment_privateWithWebServerControlUpdated(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironment_ComposerV2(t *testing.T) { +func TestAccComposerEnvironment_withDatabaseConfig(t *testing.T) { t.Parallel() - envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" - serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_composerV2(envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironment_databaseCfg(envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + Config: testAccComposerEnvironment_databaseCfgUpdated(envName, network, subnetwork), + }, + { + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO: Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_composerV2(envName, network, subnetwork, serviceAccount), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironment_databaseCfgUpdated(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironment_UpdateComposerV2ImageVersion(t *testing.T) { +func TestAccComposerEnvironment_withEncryptionConfigComposer1(t *testing.T) { t.Parallel() + kms := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-bootstrap-composer1-key1") + pid := envvar.GetTestProjectFromEnv() + bootstrapComposerServiceAgents(t) envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" - serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_composerOldVersion(envName, network, subnetwork, serviceAccount), - }, - { - Config: testAccComposerEnvironment_composerNewVersion(envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironment_encryptionCfg(pid, "1", "1", envName, kms.CryptoKey.Name, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO: Remove this check if firewall rules bug gets fixed by Composer. + // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_composerNewVersion(envName, network, subnetwork, serviceAccount), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironment_encryptionCfg(pid, "1", "1", envName, kms.CryptoKey.Name, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironment_UpdateComposerV2ResilienceMode(t *testing.T) { +func TestAccComposerEnvironment_withEncryptionConfigComposer2(t *testing.T) { + acctest.SkipIfVcr(t) t.Parallel() + kms := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-bootstrap-composer2-key1") + pid := envvar.GetTestProjectFromEnv() + bootstrapComposerServiceAgents(t) envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" - serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_composerV2HighResilience(envName, network, subnetwork, serviceAccount), - }, - { - Config: testAccComposerEnvironment_updateComposerV2StandardResilience(envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironment_encryptionCfg(pid, "2", "2", envName, kms.CryptoKey.Name, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO: Remove this check if firewall rules bug gets fixed by Composer. + // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_updateComposerV2StandardResilience(envName, network, subnetwork, serviceAccount), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironment_encryptionCfg(pid, "2", "2", envName, kms.CryptoKey.Name, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironment_ComposerV2HighResilience(t *testing.T) { +func TestAccComposerEnvironment_withMaintenanceWindow(t *testing.T) { t.Parallel() envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" - serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_composerV2HighResilience(envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironment_maintenanceWindow(envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO: Remove this check if firewall rules bug gets fixed by Composer. + // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_composerV2HighResilience(envName, network, subnetwork, serviceAccount), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironment_maintenanceWindow(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironment_UpdateComposerV2WithTriggerer(t *testing.T) { +func TestAccComposerEnvironment_maintenanceWindowUpdate(t *testing.T) { t.Parallel() envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" - serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_composerV2(envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironment_maintenanceWindow(envName, network, subnetwork), }, { - Config: testAccComposerEnvironment_composerV2WithDisabledTriggerer(envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironment_maintenanceWindowUpdate(envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO: Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_composerV2WithDisabledTriggerer(envName, network, subnetwork, serviceAccount), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironment_maintenanceWindowUpdate(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironment_UpdateComposerV2(t *testing.T) { +func TestAccComposerEnvironment_ComposerV2(t *testing.T) { t.Parallel() envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" - serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_composerV2(envName, network, subnetwork, serviceAccount), - }, - { - Config: testAccComposerEnvironment_updateComposerV2(envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironment_composerV2(envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO: Remove this check if firewall rules bug gets fixed by Composer. + // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_updateComposerV2(envName, network, subnetwork, serviceAccount), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironment_composerV2(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironment_composerV2PrivateServiceConnect(t *testing.T) { +func TestAccComposerEnvironment_UpdateComposerV2ImageVersion(t *testing.T) { t.Parallel() envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" - serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -428,67 +443,106 @@ func TestAccComposerEnvironment_composerV2PrivateServiceConnect(t *testing.T) { CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_composerV2PrivateServiceConnect(envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironment_composerOldVersion(envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + Config: testAccComposerEnvironment_composerNewVersion(envName, network, subnetwork), + }, + { + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO: Remove this check if firewall rules bug gets fixed by Composer. + // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_composerV2PrivateServiceConnect(envName, network, subnetwork, serviceAccount), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironment_composerNewVersion(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironment_composerV2MasterAuthNetworks(t *testing.T) { +func TestAccComposerEnvironment_UpdateComposerV2ResilienceMode(t *testing.T) { + t.Parallel() + + envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) + network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) + subnetwork := network + "-1" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComposerEnvironment_composerV2HighResilience(envName, network, subnetwork), + }, + { + Config: testAccComposerEnvironment_updateComposerV2StandardResilience(envName, network, subnetwork), + }, + { + ResourceName: "google_composer_environment.test", + ImportState: true, + ImportStateVerify: true, + }, + // This is a terrible clean-up step in order to get destroy to succeed, + // due to dangling firewall rules left by the Composer Environment blocking network deletion. + // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. + { + PlanOnly: true, + ExpectNonEmptyPlan: false, + Config: testAccComposerEnvironment_updateComposerV2StandardResilience(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + }, + }, + }) +} + + + +func TestAccComposerEnvironment_ComposerV2HighResilience(t *testing.T) { t.Parallel() envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" - serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_MasterAuthNetworks("2", "2", envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironment_composerV2HighResilience(envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO: Remove this check if firewall rules bug gets fixed by Composer. + // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_MasterAuthNetworks("2", "2", envName, network, subnetwork, serviceAccount), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironment_composerV2HighResilience(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironment_composerV2MasterAuthNetworksUpdate(t *testing.T) { +func TestAccComposerEnvironment_UpdateComposerV2WithTriggerer(t *testing.T) { t.Parallel() envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" - serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -496,10 +550,10 @@ func TestAccComposerEnvironment_composerV2MasterAuthNetworksUpdate(t *testing.T) CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_MasterAuthNetworks("2", "2", envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironment_composerV2(envName, network, subnetwork), }, { - Config: testAccComposerEnvironment_MasterAuthNetworksUpdate("2", "2", envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironment_composerV2WithDisabledTriggerer(envName, network, subnetwork), }, { ResourceName: "google_composer_environment.test", @@ -508,224 +562,220 @@ func TestAccComposerEnvironment_composerV2MasterAuthNetworksUpdate(t *testing.T) }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO: Remove this check if firewall rules bug gets fixed by Composer. + // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_MasterAuthNetworksUpdate("2", "2", envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironment_composerV2WithDisabledTriggerer(envName, network, subnetwork), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposer2Environment_withNodeConfig(t *testing.T) { +func TestAccComposerEnvironment_UpdateComposerV2(t *testing.T) { t.Parallel() envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" - serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposer2Environment_nodeCfg(envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironment_composerV2(envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + Config: testAccComposerEnvironment_updateComposerV2(envName, network, subnetwork), + }, + { + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO: Remove this check if firewall rules bug gets fixed by Composer. + // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposer2Environment_nodeCfg(envName, network, subnetwork, serviceAccount), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironment_updateComposerV2(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironmentAirflow2_withRecoveryConfig(t *testing.T) { +func TestAccComposerEnvironment_composerV2PrivateServiceConnect(t *testing.T) { t.Parallel() + envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" - serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_airflow2RecoveryCfg(envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironment_composerV2PrivateServiceConnect(envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccComposerEnvironmentUpdate_airflow2RecoveryCfg(envName, network, subnetwork, serviceAccount), - }, - { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO: Remove this check if firewall rules bug gets fixed by Composer. + // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironmentUpdate_airflow2RecoveryCfg(envName, network, subnetwork, serviceAccount), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironment_composerV2PrivateServiceConnect(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -// Checks behavior of config for creation for attributes that must -// be updated during create. -func TestAccComposerEnvironment_fixPyPiPackages(t *testing.T) { +func TestAccComposerEnvironment_composerV1MasterAuthNetworks(t *testing.T) { t.Parallel() envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" - serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_fixPyPiPackages(envName, network, subnetwork, serviceAccount), - ExpectError: regexp.MustCompile("Failed to install Python packages"), + Config: testAccComposerEnvironment_MasterAuthNetworks("1", "1", envName, network, subnetwork), }, { - Config: testAccComposerEnvironment_fixPyPiPackagesUpdate(envName, network, subnetwork, serviceAccount), + ResourceName: "google_composer_environment.test", + ImportState: true, + ImportStateVerify: true, }, + // This is a terrible clean-up step in order to get destroy to succeed, + // due to dangling firewall rules left by the Composer Environment blocking network deletion. + // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. { - ResourceName: "google_composer_environment.test", - ImportState: true, - ImportStateVerify: true, + PlanOnly: true, + ExpectNonEmptyPlan: false, + Config: testAccComposerEnvironment_MasterAuthNetworks("1", "1", envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func testAccComposerEnvironmentDestroyProducer(t *testing.T) func(s *terraform.State) error { - return func(s *terraform.State) error { - config := acctest.GoogleProviderConfig(t) - - for _, rs := range s.RootModule().Resources { - if rs.Type != "google_composer_environment" { - continue - } - - idTokens := strings.Split(rs.Primary.ID, "/") - if len(idTokens) != 6 { - return fmt.Errorf("Invalid ID %q, expected format projects/{project}/regions/{region}/environments/{environment}", rs.Primary.ID) - } - envName := &composer.ComposerEnvironmentName{ - Project: idTokens[1], - Region: idTokens[3], - Environment: idTokens[5], - } - - _, err := config.NewComposerClient(config.UserAgent).Projects.Locations.Environments.Get(envName.ResourceName()).Do() - if err == nil { - return fmt.Errorf("environment %s still exists", envName.ResourceName()) - } - } - - return nil - } -} - -// Checks environment creation with custom bucket -func TestAccComposerEnvironment_customBucket(t *testing.T) { +func TestAccComposerEnvironment_composerV2MasterAuthNetworks(t *testing.T) { t.Parallel() - bucketName := fmt.Sprintf("%s-%d", testComposerBucketPrefix, acctest.RandInt(t)) envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" - serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_customBucket(bucketName, envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironment_MasterAuthNetworks("2", "2", envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO: Remove this check if firewall rules bug gets fixed by Composer. + // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_customBucket(bucketName, envName, network, subnetwork, serviceAccount), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironment_MasterAuthNetworks("2", "2", envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironment_customBucketWithUrl(t *testing.T) { +func TestAccComposerEnvironment_composerV1MasterAuthNetworksUpdate(t *testing.T) { t.Parallel() - bucketName := fmt.Sprintf("%s-%d", testComposerBucketPrefix, acctest.RandInt(t)) envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" - serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironment_customBucketWithUrl(bucketName, envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironment_MasterAuthNetworks("1", "1", envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + Config: testAccComposerEnvironment_MasterAuthNetworksUpdate("1", "1", envName, network, subnetwork), + }, + { + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. - // TODO: Remove this check if firewall rules bug gets fixed by Composer. + // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironment_customBucketWithUrl(bucketName, envName, network, subnetwork, serviceAccount), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposerEnvironment_MasterAuthNetworksUpdate("1", "1", envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -// Checks Composer 3 environment creation with new fields. -func TestAccComposerEnvironmentComposer3_basic(t *testing.T) { +func TestAccComposerEnvironment_composerV2MasterAuthNetworksUpdate(t *testing.T) { + t.Parallel() + + envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) + network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) + subnetwork := network + "-1" + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComposerEnvironment_MasterAuthNetworks("2", "2", envName, network, subnetwork), + }, + { + Config: testAccComposerEnvironment_MasterAuthNetworksUpdate("2", "2", envName, network, subnetwork), + }, + { + ResourceName: "google_composer_environment.test", + ImportState: true, + ImportStateVerify: true, + }, + // This is a terrible clean-up step in order to get destroy to succeed, + // due to dangling firewall rules left by the Composer Environment blocking network deletion. + // TODO(dzarmola): Remove this check if firewall rules bug gets fixed by Composer. + { + PlanOnly: true, + ExpectNonEmptyPlan: false, + Config: testAccComposerEnvironment_MasterAuthNetworksUpdate("2", "2", envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + }, + }, + }) +} + +func TestAccComposer2Environment_withNodeConfig(t *testing.T) { t.Parallel() envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) @@ -734,39 +784,36 @@ func TestAccComposerEnvironmentComposer3_basic(t *testing.T) { serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironmentComposer3_basic(envName, network, subnetwork, serviceAccount), + Config: testAccComposer2Environment_nodeCfg(envName, network, subnetwork, serviceAccount), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO: Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironmentComposer3_basic(envName, network, subnetwork, serviceAccount), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + Config: testAccComposer2Environment_nodeCfg(envName, network, subnetwork, serviceAccount), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -// Checks Composer 3 specific updatable fields. -func TestAccComposerEnvironmentComposer3_update(t *testing.T) { +func TestAccComposerEnvironmentAirflow2_withRecoveryConfig(t *testing.T) { t.Parallel() - envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" - serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -774,10 +821,15 @@ func TestAccComposerEnvironmentComposer3_update(t *testing.T) { CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironmentComposer3_basic(envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironment_airflow2RecoveryCfg(envName, network, subnetwork), + }, + { + ResourceName: "google_composer_environment.test", + ImportState: true, + ImportStateVerify: true, }, { - Config: testAccComposerEnvironmentComposer3_update(envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironmentUpdate_airflow2RecoveryCfg(envName, network, subnetwork), }, { ResourceName: "google_composer_environment.test", @@ -790,59 +842,66 @@ func TestAccComposerEnvironmentComposer3_update(t *testing.T) { { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironmentComposer3_update(envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironmentUpdate_airflow2RecoveryCfg(envName, network, subnetwork), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironmentComposer3_withNetworkSubnetworkAndAttachment_expectError(t *testing.T) { +func TestAccComposerEnvironment_withSoftwareConfig(t *testing.T) { t.Parallel() - envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" - networkAttachment := fmt.Sprintf("%s-%d", testComposerNetworkAttachmentPrefix, acctest.RandInt(t)) - serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironmentComposer3_withNetworkSubnetworkAndAttachment_expectError(envName, networkAttachment, network, subnetwork, serviceAccount), - ExpectError: regexp.MustCompile("Conflicting configuration arguments"), + Config: testAccComposerEnvironment_softwareCfg(envName, network, subnetwork), + }, + { + ResourceName: "google_composer_environment.test", + ImportState: true, + ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO: Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, - ExpectNonEmptyPlan: true, - Config: testAccComposerEnvironmentComposer3_basic(envName, network, subnetwork, serviceAccount), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + PlanOnly: true, + ExpectNonEmptyPlan: false, + Config: testAccComposerEnvironment_softwareCfg(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironmentComposer3_databaseRetention(t *testing.T) { +func TestAccComposerEnvironmentAirflow2_withSoftwareConfig(t *testing.T) { t.Parallel() - envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" - serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironmentComposer3_databaseRetention(envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironment_airflow2SoftwareCfg(envName, network, subnetwork), + }, + { + ResourceName: "google_composer_environment.test", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccComposerEnvironmentUpdate_airflow2SoftwareCfg(envName, network, subnetwork), }, { ResourceName: "google_composer_environment.test", @@ -853,59 +912,56 @@ func TestAccComposerEnvironmentComposer3_databaseRetention(t *testing.T) { // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO: Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, + PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironmentComposer3_databaseRetention(envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironmentUpdate_airflow2SoftwareCfg(envName, network, subnetwork), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironmentComposer3_withNetworkAttachment(t *testing.T) { +// Checks behavior of config for creation for attributes that must +// be updated during create. +func TestAccComposerEnvironment_withUpdateOnCreate(t *testing.T) { t.Parallel() envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" - networkAttachment := fmt.Sprintf("%s-%d", testComposerNetworkAttachmentPrefix, acctest.RandInt(t)) - fullFormNetworkAttachmentName := fmt.Sprintf("projects/%s/regions/%s/networkAttachments/%s", envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), networkAttachment) - serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, networkAttachment, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironment_updateOnlyFields(envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO: Remove this check if firewall rules bug gets fixed by Composer. { - PlanOnly: true, - Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, fullFormNetworkAttachmentName, network, subnetwork, serviceAccount), - Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), - ExpectNonEmptyPlan: true, + PlanOnly: true, + ExpectNonEmptyPlan: false, + Config: testAccComposerEnvironment_updateOnlyFields(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironmentComposer3_updateWithNetworkAttachment(t *testing.T) { +func TestAccComposerEnvironment_fixPyPiPackages(t *testing.T) { t.Parallel() envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" - networkAttachment := fmt.Sprintf("%s-%d", testComposerNetworkAttachmentPrefix, acctest.RandInt(t)) - fullFormNetworkAttachmentName := fmt.Sprintf("projects/%s/regions/%s/networkAttachments/%s", envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), networkAttachment) serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ @@ -914,10 +970,65 @@ func TestAccComposerEnvironmentComposer3_updateWithNetworkAttachment(t *testing. CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironmentComposer3_withNetworkAndSubnetwork(envName, networkAttachment, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironment_fixPyPiPackages(envName, network, subnetwork, serviceAccount), + ExpectError: regexp.MustCompile("Failed to install Python packages"), + }, + { + Config: testAccComposerEnvironment_fixPyPiPackagesUpdate(envName, network, subnetwork, serviceAccount), + }, + { + ResourceName: "google_composer_environment.test", + ImportState: true, + ImportStateVerify: true, }, + }, + }) +} + +func testAccComposerEnvironmentDestroyProducer(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + config := acctest.GoogleProviderConfig(t) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_composer_environment" { + continue + } + + idTokens := strings.Split(rs.Primary.ID, "/") + if len(idTokens) != 6 { + return fmt.Errorf("Invalid ID %q, expected format projects/{project}/regions/{region}/environments/{environment}", rs.Primary.ID) + } + envName := &composer.ComposerEnvironmentName{ + Project: idTokens[1], + Region: idTokens[3], + Environment: idTokens[5], + } + + _, err := config.NewComposerClient(config.UserAgent).Projects.Locations.Environments.Get(envName.ResourceName()).Do() + if err == nil { + return fmt.Errorf("environment %s still exists", envName.ResourceName()) + } + } + + return nil + } +} + +// Checks environment creation with custom bucket +func TestAccComposerEnvironment_customBucket(t *testing.T) { + t.Parallel() + + bucketName := fmt.Sprintf("%s-%d", testComposerBucketPrefix, acctest.RandInt(t)) + envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) + network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) + subnetwork := network + "-1" + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), + Steps: []resource.TestStep{ { - Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, networkAttachment, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironment_customBucket(bucketName, envName, network, subnetwork), }, { ResourceName: "google_composer_environment.test", @@ -929,34 +1040,28 @@ func TestAccComposerEnvironmentComposer3_updateWithNetworkAttachment(t *testing. // TODO: Remove this check if firewall rules bug gets fixed by Composer. { PlanOnly: true, - Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, fullFormNetworkAttachmentName, network, subnetwork, serviceAccount), + ExpectNonEmptyPlan: false, + Config: testAccComposerEnvironment_customBucket(bucketName, envName, network, subnetwork), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), - ExpectNonEmptyPlan: true, }, }, }) } -func TestAccComposerEnvironmentComposer3_updateWithNetworkAndSubnetwork(t *testing.T) { +func TestAccComposerEnvironment_customBucketWithUrl(t *testing.T) { t.Parallel() + bucketName := fmt.Sprintf("%s-%d", testComposerBucketPrefix, acctest.RandInt(t)) envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" - networkAttachment := fmt.Sprintf("%s-%d", testComposerNetworkAttachmentPrefix, acctest.RandInt(t)) - fullFormNetworkAttachmentName := fmt.Sprintf("projects/%s/regions/%s/networkAttachments/%s", envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), networkAttachment) - serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, networkAttachment, network, subnetwork, serviceAccount), - }, - { - Config: testAccComposerEnvironmentComposer3_withNetworkAndSubnetwork(envName, networkAttachment, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironment_customBucketWithUrl(bucketName, envName, network, subnetwork), }, { ResourceName: "google_composer_environment.test", @@ -968,33 +1073,29 @@ func TestAccComposerEnvironmentComposer3_updateWithNetworkAndSubnetwork(t *testi // TODO: Remove this check if firewall rules bug gets fixed by Composer. { PlanOnly: true, - Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, fullFormNetworkAttachmentName, network, subnetwork, serviceAccount), + ExpectNonEmptyPlan: false, + Config: testAccComposerEnvironment_customBucketWithUrl(bucketName, envName, network, subnetwork), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), - ExpectNonEmptyPlan: true, }, }, }) } -// Checks Composer 3 specific updatable fields. -func TestAccComposerEnvironmentComposer3_updateToEmpty(t *testing.T) { +// Checks Composer 3 environment creation with new fields. +func TestAccComposerEnvironmentComposer3_basic(t *testing.T) { t.Parallel() envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" - serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironmentComposer3_basic(envName, network, subnetwork, serviceAccount), - }, - { - Config: testAccComposerEnvironmentComposer3_empty(envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironmentComposer3_basic(envName, network, subnetwork), }, { ResourceName: "google_composer_environment.test", @@ -1007,7 +1108,7 @@ func TestAccComposerEnvironmentComposer3_updateToEmpty(t *testing.T) { { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironmentComposer3_empty(envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironmentComposer3_basic(envName, network, subnetwork), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, @@ -1015,28 +1116,27 @@ func TestAccComposerEnvironmentComposer3_updateToEmpty(t *testing.T) { } // Checks Composer 3 specific updatable fields. -func TestAccComposerEnvironmentComposer3_updateFromEmpty(t *testing.T) { +func TestAccComposerEnvironmentComposer3_update(t *testing.T) { t.Parallel() envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" - serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironmentComposer3_empty(envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironmentComposer3_basic(envName, network, subnetwork), }, { - Config: testAccComposerEnvironmentComposer3_update(envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironmentComposer3_update(envName, network, subnetwork), }, { - ResourceName: "google_composer_environment.test", - ImportState: true, + ResourceName: "google_composer_environment.test", + ImportState: true, ImportStateVerify: true, }, // This is a terrible clean-up step in order to get destroy to succeed, @@ -1045,114 +1145,347 @@ func TestAccComposerEnvironmentComposer3_updateFromEmpty(t *testing.T) { { PlanOnly: true, ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironmentComposer3_update(envName, network, subnetwork, serviceAccount), + Config: testAccComposerEnvironmentComposer3_update(envName, network, subnetwork), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironmentComposer3_upgrade_expectError(t *testing.T) { +func TestAccComposerEnvironmentComposer3_withNetworkSubnetworkAndAttachment_expectError(t *testing.T) { t.Parallel() envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) subnetwork := network + "-1" - errorRegExp, _ := regexp.Compile(".*upgrade to composer 3 is not yet supported.*") - serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + networkAttachment := fmt.Sprintf("%s-%d", testComposerNetworkAttachmentPrefix, acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironmentComposer2_empty(envName, network, subnetwork, serviceAccount), - }, - { - Config: testAccComposerEnvironmentComposer3_empty(envName, network, subnetwork, serviceAccount), - ExpectError: errorRegExp, + Config: testAccComposerEnvironmentComposer3_withNetworkSubnetworkAndAttachment_expectError(envName, networkAttachment, network, subnetwork), + ExpectError: regexp.MustCompile("Conflicting configuration arguments"), }, // This is a terrible clean-up step in order to get destroy to succeed, // due to dangling firewall rules left by the Composer Environment blocking network deletion. // TODO: Remove this check if firewall rules bug gets fixed by Composer. { PlanOnly: true, - ExpectNonEmptyPlan: false, - Config: testAccComposerEnvironmentComposer2_empty(envName, network, subnetwork, serviceAccount), + ExpectNonEmptyPlan: true, + Config: testAccComposerEnvironmentComposer3_basic(envName, network, subnetwork), Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironmentComposer2_usesUnsupportedField_expectError(t *testing.T) { +func TestAccComposerEnvironmentComposer3_databaseRetention(t *testing.T) { t.Parallel() envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) - errorRegExp, _ := regexp.Compile(".*error in configuration, .* should only be used in Composer 3.*") + network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) + subnetwork := network + "-1" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironmentComposer2_usesUnsupportedField(envName), - ExpectError: errorRegExp, + Config: testAccComposerEnvironmentComposer3_databaseRetention(envName, network, subnetwork), + }, + { + ResourceName: "google_composer_environment.test", + ImportState: true, + ImportStateVerify: true, + }, + // This is a terrible clean-up step in order to get destroy to succeed, + // due to dangling firewall rules left by the Composer Environment blocking network deletion. + // TODO: Remove this check if firewall rules bug gets fixed by Composer. + { + PlanOnly: true, + ExpectNonEmptyPlan: false, + Config: testAccComposerEnvironmentComposer3_databaseRetention(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), }, }, }) } -func TestAccComposerEnvironmentComposer3_usesUnsupportedField_expectError(t *testing.T) { +func TestAccComposerEnvironmentComposer3_withNetworkAttachment(t *testing.T) { t.Parallel() envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) - errorRegExp, _ := regexp.Compile(".*error in configuration, .* should not be used in Composer 3.*") + network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) + subnetwork := network + "-1" + networkAttachment := fmt.Sprintf("%s-%d", testComposerNetworkAttachmentPrefix, acctest.RandInt(t)) + fullFormNetworkAttachmentName := fmt.Sprintf("projects/%s/regions/%s/networkAttachments/%s", envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), networkAttachment) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComposerEnvironmentComposer3_usesUnsupportedField(envName), - ExpectError: errorRegExp, + Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, networkAttachment, network, subnetwork), + }, + { + ResourceName: "google_composer_environment.test", + ImportState: true, + ImportStateVerify: true, + }, + // This is a terrible clean-up step in order to get destroy to succeed, + // due to dangling firewall rules left by the Composer Environment blocking network deletion. + // TODO: Remove this check if firewall rules bug gets fixed by Composer. + { + PlanOnly: true, + Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, fullFormNetworkAttachmentName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + ExpectNonEmptyPlan: true, }, }, }) } -func testAccComposerEnvironment_customBucket(bucketName, envName, network, subnetwork, serviceAccount string) string { - return fmt.Sprintf(` -data "google_project" "project" {} +func TestAccComposerEnvironmentComposer3_updateWithNetworkAttachment(t *testing.T) { + t.Parallel() -resource "google_service_account" "test" { - account_id = "%s" - display_name = "Test Service Account for Composer Environment" -} + envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) + network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) + subnetwork := network + "-1" + networkAttachment := fmt.Sprintf("%s-%d", testComposerNetworkAttachmentPrefix, acctest.RandInt(t)) + fullFormNetworkAttachmentName := fmt.Sprintf("projects/%s/regions/%s/networkAttachments/%s", envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), networkAttachment) -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComposerEnvironmentComposer3_withNetworkAndSubnetwork(envName, networkAttachment, network, subnetwork), + }, + { + Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, networkAttachment, network, subnetwork), + }, + { + ResourceName: "google_composer_environment.test", + ImportState: true, + ImportStateVerify: true, + }, + // This is a terrible clean-up step in order to get destroy to succeed, + // due to dangling firewall rules left by the Composer Environment blocking network deletion. + // TODO: Remove this check if firewall rules bug gets fixed by Composer. + { + PlanOnly: true, + Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, fullFormNetworkAttachmentName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + ExpectNonEmptyPlan: true, + }, + }, + }) } -resource "google_storage_bucket" "test" { - name = "%s" - location = "us-central1" - force_destroy = true -} +func TestAccComposerEnvironmentComposer3_updateWithNetworkAndSubnetwork(t *testing.T) { + t.Parallel() -resource "google_composer_environment" "test" { - name = "%s" + envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) + network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) + subnetwork := network + "-1" + networkAttachment := fmt.Sprintf("%s-%d", testComposerNetworkAttachmentPrefix, acctest.RandInt(t)) + fullFormNetworkAttachmentName := fmt.Sprintf("projects/%s/regions/%s/networkAttachments/%s", envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), networkAttachment) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, networkAttachment, network, subnetwork), + }, + { + Config: testAccComposerEnvironmentComposer3_withNetworkAndSubnetwork(envName, networkAttachment, network, subnetwork), + }, + { + ResourceName: "google_composer_environment.test", + ImportState: true, + ImportStateVerify: true, + }, + // This is a terrible clean-up step in order to get destroy to succeed, + // due to dangling firewall rules left by the Composer Environment blocking network deletion. + // TODO: Remove this check if firewall rules bug gets fixed by Composer. + { + PlanOnly: true, + Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, fullFormNetworkAttachmentName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + ExpectNonEmptyPlan: true, + }, + }, + }) +} + +// Checks Composer 3 specific updatable fields. +func TestAccComposerEnvironmentComposer3_updateToEmpty(t *testing.T) { + t.Parallel() + + envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) + network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) + subnetwork := network + "-1" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComposerEnvironmentComposer3_basic(envName, network, subnetwork), + }, + { + Config: testAccComposerEnvironmentComposer3_empty(envName, network, subnetwork), + }, + { + ResourceName: "google_composer_environment.test", + ImportState: true, + ImportStateVerify: true, + }, + // This is a terrible clean-up step in order to get destroy to succeed, + // due to dangling firewall rules left by the Composer Environment blocking network deletion. + // TODO: Remove this check if firewall rules bug gets fixed by Composer. + { + PlanOnly: true, + ExpectNonEmptyPlan: false, + Config: testAccComposerEnvironmentComposer3_empty(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + }, + }, + }) +} + +// Checks Composer 3 specific updatable fields. +func TestAccComposerEnvironmentComposer3_updateFromEmpty(t *testing.T) { + t.Parallel() + + envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) + network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) + subnetwork := network + "-1" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComposerEnvironmentComposer3_empty(envName, network, subnetwork), + }, + { + Config: testAccComposerEnvironmentComposer3_update(envName, network, subnetwork), + }, + { + ResourceName: "google_composer_environment.test", + ImportState: true, + ImportStateVerify: true, + }, + // This is a terrible clean-up step in order to get destroy to succeed, + // due to dangling firewall rules left by the Composer Environment blocking network deletion. + // TODO: Remove this check if firewall rules bug gets fixed by Composer. + { + PlanOnly: true, + ExpectNonEmptyPlan: false, + Config: testAccComposerEnvironmentComposer3_update(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + }, + }, + }) +} + +func TestAccComposerEnvironmentComposer3_upgrade_expectError(t *testing.T) { + t.Parallel() + + envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) + network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) + subnetwork := network + "-1" + errorRegExp, _ := regexp.Compile(".*upgrade to composer 3 is not yet supported.*") + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComposerEnvironmentComposer2_empty(envName, network, subnetwork), + }, + { + Config: testAccComposerEnvironmentComposer3_empty(envName, network, subnetwork), + ExpectError: errorRegExp, + }, + // This is a terrible clean-up step in order to get destroy to succeed, + // due to dangling firewall rules left by the Composer Environment blocking network deletion. + // TODO: Remove this check if firewall rules bug gets fixed by Composer. + { + PlanOnly: true, + ExpectNonEmptyPlan: false, + Config: testAccComposerEnvironmentComposer2_empty(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + }, + }, + }) +} + +func TestAccComposerEnvironmentComposer2_usesUnsupportedField_expectError(t *testing.T) { + t.Parallel() + + envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) + errorRegExp, _ := regexp.Compile(".*error in configuration, .* should only be used in Composer 3.*") + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComposerEnvironmentComposer2_usesUnsupportedField(envName), + ExpectError: errorRegExp, + }, + }, + }) +} + +func TestAccComposerEnvironmentComposer3_usesUnsupportedField_expectError(t *testing.T) { + t.Parallel() + + envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) + errorRegExp, _ := regexp.Compile(".*error in configuration, .* should not be used in Composer 3.*") + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComposerEnvironmentComposer3_usesUnsupportedField(envName), + ExpectError: errorRegExp, + }, + }, + }) +} + +func testAccComposerEnvironment_customBucket(bucketName, envName, network, subnetwork string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "test" { + name = "%s" + location = "us-central1" + force_destroy = true +} + +resource "google_composer_environment" "test" { + name = "%s" region = "us-central1" config { node_config { network = google_compute_network.test.self_link subnetwork = google_compute_subnetwork.test.self_link - service_account = google_service_account.test.name ip_allocation_policy { cluster_ipv4_cidr_block = "10.0.0.0/16" } @@ -1162,9 +1495,8 @@ resource "google_composer_environment" "test" { } } storage_config { - bucket = google_storage_bucket.test.name - } - depends_on = [google_project_iam_member.composer-worker] + bucket = google_storage_bucket.test.name + } } // use a separate network to avoid conflicts with other tests running in parallel @@ -1180,23 +1512,11 @@ resource "google_compute_subnetwork" "test" { region = "us-central1" network = google_compute_network.test.self_link } -`, serviceAccount, bucketName, envName, network, subnetwork) +`, bucketName, envName, network, subnetwork) } -func testAccComposerEnvironment_customBucketWithUrl(bucketName, envName, network, subnetwork, serviceAccount string) string { +func testAccComposerEnvironment_customBucketWithUrl(bucketName, envName, network, subnetwork string) string { return fmt.Sprintf(` -data "google_project" "project" {} - -resource "google_service_account" "test" { - account_id = "%s" - display_name = "Test Service Account for Composer Environment" -} -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" -} - resource "google_storage_bucket" "test" { name = "%s" location = "us-central1" @@ -1210,7 +1530,6 @@ resource "google_composer_environment" "test" { node_config { network = google_compute_network.test.self_link subnetwork = google_compute_subnetwork.test.self_link - service_account = google_service_account.test.name } software_config { image_version = "composer-2-airflow-2" @@ -1219,7 +1538,6 @@ resource "google_composer_environment" "test" { storage_config { bucket = google_storage_bucket.test.url } - depends_on = [google_project_iam_member.composer-worker] } // use a separate network to avoid conflicts with other tests running in parallel @@ -1235,23 +1553,49 @@ resource "google_compute_subnetwork" "test" { region = "us-central1" network = google_compute_network.test.self_link } -`, serviceAccount, bucketName, envName, network, subnetwork) +`, bucketName, envName, network, subnetwork) } -func testAccComposerEnvironmentComposer2_private(name, network, subnetwork, serviceAccount string) string { +func testAccComposerEnvironment_basic(name, network, subnetwork string) string { return fmt.Sprintf(` -data "google_project" "project" {} +resource "google_composer_environment" "test" { + name = "%s" + region = "us-central1" + config { + node_config { + network = google_compute_network.test.self_link + subnetwork = google_compute_subnetwork.test.self_link + zone = "us-central1-a" + machine_type = "n1-standard-1" + ip_allocation_policy { + use_ip_aliases = true + cluster_ipv4_cidr_block = "10.0.0.0/16" + } + } + software_config { + image_version = "composer-1-airflow-2.3" + } + } +} -resource "google_service_account" "test" { - account_id = "%s" - display_name = "Test Service Account for Composer Environment" +// use a separate network to avoid conflicts with other tests running in parallel +// that use the default network/subnet +resource "google_compute_network" "test" { + name = "%s" + auto_create_subnetworks = false } -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" + +resource "google_compute_subnetwork" "test" { + name = "%s" + ip_cidr_range = "10.2.0.0/16" + region = "us-central1" + network = google_compute_network.test.self_link +} +`, name, network, subnetwork) } +func testAccComposerEnvironmentComposer1_private(name, network, subnetwork string) string { + return fmt.Sprintf(` resource "google_composer_environment" "test" { name = "%s" region = "us-central1" @@ -1260,22 +1604,21 @@ resource "google_composer_environment" "test" { node_config { network = google_compute_network.test.self_link subnetwork = google_compute_subnetwork.test.self_link - service_account = google_service_account.test.name + zone = "us-central1-a" enable_ip_masq_agent = true ip_allocation_policy { - cluster_ipv4_cidr_block = "10.56.0.0/14" + use_ip_aliases = true + cluster_ipv4_cidr_block = "10.0.0.0/16" } } software_config { - image_version = "composer-2-airflow-2" + image_version = "composer-1-airflow-2" } private_environment_config { - connection_type = "VPC_PEERING" enable_private_endpoint = true enable_privately_used_public_ips = true - } + } } - depends_on = [google_project_iam_member.composer-worker] } // use a separate network to avoid conflicts with other tests running in parallel @@ -1292,95 +1635,184 @@ resource "google_compute_subnetwork" "test" { network = google_compute_network.test.self_link private_ip_google_access = true } -`, serviceAccount, name, network, subnetwork) +`, name, network, subnetwork) } -func testAccComposerEnvironment_encryptionCfg(pid, compVersion, airflowVersion, name, kmsKey, network, subnetwork, serviceAccount string) string { +func testAccComposerEnvironmentComposer2_private(name, network, subnetwork string) string { return fmt.Sprintf(` -data "google_project" "project" { - project_id = "%s" -} -resource "google_service_account" "test" { - account_id = "%s" - display_name = "Test Service Account for Composer Environment" +resource "google_composer_environment" "test" { + name = "%s" + region = "us-central1" + + config { + node_config { + network = google_compute_network.test.self_link + subnetwork = google_compute_subnetwork.test.self_link + enable_ip_masq_agent = true + ip_allocation_policy { + cluster_ipv4_cidr_block = "10.56.0.0/14" + } + } + software_config { + image_version = "composer-2-airflow-2" + } + private_environment_config { + connection_type = "VPC_PEERING" + enable_private_endpoint = true + enable_privately_used_public_ips = true + } + } } -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" + +// use a separate network to avoid conflicts with other tests running in parallel +// that use the default network/subnet +resource "google_compute_network" "test" { + name = "%s" + auto_create_subnetworks = false } -resource "google_kms_crypto_key_iam_member" "iam" { - crypto_key_id = "%s" - role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" - member = "serviceAccount:service-${data.google_project.project.number}@gs-project-accounts.iam.gserviceaccount.com" +resource "google_compute_subnetwork" "test" { + name = "%s" + ip_cidr_range = "10.2.0.0/16" + region = "us-central1" + network = google_compute_network.test.self_link + private_ip_google_access = true } +`, name, network, subnetwork) +} + +func testAccComposerEnvironment_privateWithWebServerControl(name, network, subnetwork string) string { + return fmt.Sprintf(` resource "google_composer_environment" "test" { name = "%s" region = "us-central1" + config { node_config { network = google_compute_network.test.self_link subnetwork = google_compute_subnetwork.test.self_link - service_account = google_service_account.test.name + zone = "us-central1-a" + ip_allocation_policy { + use_ip_aliases = true + cluster_ipv4_cidr_block = "10.56.0.0/14" + services_ipv4_cidr_block = "10.122.0.0/20" + } + } + private_environment_config { + enable_private_endpoint = false + web_server_ipv4_cidr_block = "172.30.240.0/24" + cloud_sql_ipv4_cidr_block = "10.32.0.0/12" + master_ipv4_cidr_block = "172.17.50.0/28" } - software_config { - image_version = "composer-%s-airflow-%s" + image_version = "composer-1-airflow-2" } - - encryption_config { - kms_key_name = "%s" + web_server_network_access_control { + allowed_ip_range { + value = "192.168.0.1" + description = "my range1" + } + allowed_ip_range { + value = "0.0.0.0/0" + } } } - depends_on = [google_project_iam_member.composer-worker, google_kms_crypto_key_iam_member.iam] } + // use a separate network to avoid conflicts with other tests running in parallel // that use the default network/subnet resource "google_compute_network" "test" { name = "%s" auto_create_subnetworks = false } + resource "google_compute_subnetwork" "test" { - name = "%s" - ip_cidr_range = "10.2.0.0/16" - region = "us-central1" - network = google_compute_network.test.self_link + name = "%s" + ip_cidr_range = "10.2.0.0/16" + region = "us-central1" + network = google_compute_network.test.self_link + private_ip_google_access = true } -`, - pid, serviceAccount, kmsKey, name, compVersion, airflowVersion, kmsKey, network, subnetwork) +`, name, network, subnetwork) } -func testAccComposerEnvironment_maintenanceWindow(envName, network, subnetwork, serviceAccount string) string { +func testAccComposerEnvironment_privateWithWebServerControlUpdated(name, network, subnetwork string) string { return fmt.Sprintf(` -data "google_project" "project" {} +resource "google_composer_environment" "test" { + name = "%s" + region = "us-central1" -resource "google_service_account" "test" { - account_id = "%s" - display_name = "Test Service Account for Composer Environment" + config { + node_config { + network = google_compute_network.test.self_link + subnetwork = google_compute_subnetwork.test.self_link + zone = "us-central1-a" + ip_allocation_policy { + use_ip_aliases = true + cluster_ipv4_cidr_block = "10.56.0.0/14" + services_ipv4_cidr_block = "10.122.0.0/20" + } + } + private_environment_config { + enable_private_endpoint = false + web_server_ipv4_cidr_block = "172.30.240.0/24" + cloud_sql_ipv4_cidr_block = "10.32.0.0/12" + master_ipv4_cidr_block = "172.17.50.0/28" + } + software_config { + image_version = "composer-1-airflow-2" + } + web_server_network_access_control { + allowed_ip_range { + value = "192.168.0.1" + description = "my range1" + } + allowed_ip_range { + value = "0.0.0.0/0" + } + } + } } -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" + +// use a separate network to avoid conflicts with other tests running in parallel +// that use the default network/subnet +resource "google_compute_network" "test" { + name = "%s" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "test" { + name = "%s" + ip_cidr_range = "10.2.0.0/16" + region = "us-central1" + network = google_compute_network.test.self_link + private_ip_google_access = true +} +`, name, network, subnetwork) } +func testAccComposerEnvironment_databaseCfg(name, network, subnetwork string) string { + return fmt.Sprintf(` resource "google_composer_environment" "test" { name = "%s" region = "us-central1" config { node_config { - service_account = google_service_account.test.name + network = google_compute_network.test.self_link + subnetwork = google_compute_subnetwork.test.self_link + zone = "us-central1-a" } - maintenance_window { - start_time = "2019-08-01T01:00:00Z" - end_time = "2019-08-01T07:00:00Z" - recurrence = "FREQ=WEEKLY;BYDAY=TU,WE" + database_config { + machine_type = "db-n1-standard-4" + } + software_config { + image_version = "composer-1-airflow-2" } } - depends_on = [google_project_iam_member.composer-worker] } +// use a separate network to avoid conflicts with other tests running in parallel +// that use the default network/subnet resource "google_compute_network" "test" { name = "%s" auto_create_subnetworks = false @@ -1392,40 +1824,31 @@ resource "google_compute_subnetwork" "test" { region = "us-central1" network = google_compute_network.test.self_link } - -`, serviceAccount, envName, network, subnetwork) +`, name, network, subnetwork) } -func testAccComposerEnvironment_maintenanceWindowUpdate(envName, network, subnetwork, serviceAccount string) string { +func testAccComposerEnvironment_databaseCfgUpdated(name, network, subnetwork string) string { return fmt.Sprintf(` -data "google_project" "project" {} - -resource "google_service_account" "test" { - account_id = "%s" - display_name = "Test Service Account for Composer Environment" -} -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" -} - resource "google_composer_environment" "test" { name = "%s" region = "us-central1" config { node_config { - service_account = google_service_account.test.name + network = google_compute_network.test.self_link + subnetwork = google_compute_subnetwork.test.self_link + zone = "us-central1-a" } - maintenance_window { - start_time = "2019-08-01T01:00:00Z" - end_time = "2019-08-01T07:00:00Z" - recurrence = "FREQ=DAILY" + database_config { + machine_type = "db-n1-standard-8" + } + software_config { + image_version = "composer-1-airflow-2" } } - depends_on = [google_project_iam_member.composer-worker] } +// use a separate network to avoid conflicts with other tests running in parallel +// that use the default network/subnet resource "google_compute_network" "test" { name = "%s" auto_create_subnetworks = false @@ -1437,72 +1860,68 @@ resource "google_compute_subnetwork" "test" { region = "us-central1" network = google_compute_network.test.self_link } - -`, serviceAccount, envName, network, subnetwork) +`, name, network, subnetwork) } -func testAccComposerEnvironment_composerV2WithDisabledTriggerer(envName, network, subnetwork, serviceAccount string) string { +func testAccComposerEnvironment_encryptionCfg(pid, compVersion, airflowVersion, name, kmsKey, network, subnetwork string) string { return fmt.Sprintf(` -data "google_project" "project" {} - -resource "google_service_account" "test" { - account_id = "%s" - display_name = "Test Service Account for Composer Environment" +data "google_project" "project" { + project_id = "%s" } -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" +resource "google_kms_crypto_key_iam_member" "iam" { + crypto_key_id = "%s" + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + member = "serviceAccount:service-${data.google_project.project.number}@gs-project-accounts.iam.gserviceaccount.com" } - resource "google_composer_environment" "test" { + depends_on = [google_kms_crypto_key_iam_member.iam] name = "%s" - region = "us-east1" - + region = "us-central1" config { node_config { - network = google_compute_network.test.self_link - subnetwork = google_compute_subnetwork.test.self_link - service_account = google_service_account.test.name - ip_allocation_policy { - cluster_ipv4_cidr_block = "10.0.0.0/16" - } - } + network = google_compute_network.test.self_link + subnetwork = google_compute_subnetwork.test.self_link + } software_config { - image_version = "composer-2-airflow-2" + image_version = "composer-%s-airflow-%s" } - workloads_config { - scheduler { - cpu = 1.25 - memory_gb = 2.5 - storage_gb = 5.4 - count = 2 - } - web_server { - cpu = 1.75 - memory_gb = 3.0 - storage_gb = 4.4 - } - worker { - cpu = 0.5 - memory_gb = 2.0 - storage_gb = 3.4 - min_count = 2 - max_count = 5 - } + encryption_config { + kms_key_name = "%s" } - environment_size = "ENVIRONMENT_SIZE_MEDIUM" - private_environment_config { - enable_private_endpoint = true - cloud_composer_network_ipv4_cidr_block = "10.3.192.0/24" - master_ipv4_cidr_block = "172.16.194.0/23" - cloud_sql_ipv4_cidr_block = "10.3.224.0/20" + } +} +// use a separate network to avoid conflicts with other tests running in parallel +// that use the default network/subnet +resource "google_compute_network" "test" { + name = "%s" + auto_create_subnetworks = false +} +resource "google_compute_subnetwork" "test" { + name = "%s" + ip_cidr_range = "10.2.0.0/16" + region = "us-central1" + network = google_compute_network.test.self_link +} +`, + pid, kmsKey, name, compVersion, airflowVersion, kmsKey, network, subnetwork) +} + + +func testAccComposerEnvironment_maintenanceWindow(envName, network, subnetwork string) string { + return fmt.Sprintf(` +resource "google_composer_environment" "test" { + name = "%s" + region = "us-central1" + config { + maintenance_window { + start_time = "2019-08-01T01:00:00Z" + end_time = "2019-08-01T07:00:00Z" + recurrence = "FREQ=WEEKLY;BYDAY=TU,WE" } } - depends_on = [google_project_iam_member.composer-worker] } resource "google_compute_network" "test" { @@ -1513,36 +1932,50 @@ resource "google_compute_network" "test" { resource "google_compute_subnetwork" "test" { name = "%s" ip_cidr_range = "10.2.0.0/16" - region = "us-east1" - network = google_compute_network.test.self_link - private_ip_google_access = true + region = "us-central1" + network = google_compute_network.test.self_link } -`, serviceAccount, envName, network, subnetwork) +`, envName, network, subnetwork) } -func testAccComposerEnvironment_composerV2(envName, network, subnetwork, serviceAccount string) string { +func testAccComposerEnvironment_maintenanceWindowUpdate(envName, network, subnetwork string) string { return fmt.Sprintf(` -data "google_project" "project" {} +resource "google_composer_environment" "test" { + name = "%s" + region = "us-central1" + config { + maintenance_window { + start_time = "2019-08-01T01:00:00Z" + end_time = "2019-08-01T07:00:00Z" + recurrence = "FREQ=DAILY" + } + } +} -resource "google_service_account" "test" { - account_id = "%s" - display_name = "Test Service Account for Composer Environment" +resource "google_compute_network" "test" { + name = "%s" + auto_create_subnetworks = false } -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" +resource "google_compute_subnetwork" "test" { + name = "%s" + ip_cidr_range = "10.2.0.0/16" + region = "us-central1" + network = google_compute_network.test.self_link } +`, envName, network, subnetwork) +} + +func testAccComposerEnvironment_composerV2WithDisabledTriggerer(envName, network, subnetwork string) string { + return fmt.Sprintf(` resource "google_composer_environment" "test" { name = "%s" region = "us-east1" config { node_config { - service_account = google_service_account.test.name network = google_compute_network.test.self_link subnetwork = google_compute_subnetwork.test.self_link ip_allocation_policy { @@ -1552,45 +1985,29 @@ resource "google_composer_environment" "test" { software_config { image_version = "composer-2-airflow-2" - cloud_data_lineage_integration { - enabled = true - } } workloads_config { scheduler { - cpu = 1.25 - memory_gb = 2.5 + cpu = 1.25 + memory_gb = 2.5 storage_gb = 5.4 - count = 2 + count = 2 } web_server { - cpu = 1.75 - memory_gb = 3.0 + cpu = 1.75 + memory_gb = 3.0 storage_gb = 4.4 } worker { - cpu = 0.5 - memory_gb = 2.0 + cpu = 0.5 + memory_gb = 2.0 storage_gb = 3.4 - min_count = 2 - max_count = 5 - } - triggerer { - cpu = 0.5 - memory_gb = 2.0 - count = 1 + min_count = 2 + max_count = 5 } } - database_config { - zone = "us-east1-c" - } environment_size = "ENVIRONMENT_SIZE_MEDIUM" - data_retention_config { - task_logs_retention_config { - storage_mode = "CLOUD_LOGGING_ONLY" - } - } private_environment_config { enable_private_endpoint = true cloud_composer_network_ipv4_cidr_block = "10.3.192.0/24" @@ -1598,7 +2015,7 @@ resource "google_composer_environment" "test" { cloud_sql_ipv4_cidr_block = "10.3.224.0/20" } } - depends_on = [google_project_iam_member.composer-worker] + } resource "google_compute_network" "test" { @@ -1614,40 +2031,73 @@ resource "google_compute_subnetwork" "test" { private_ip_google_access = true } -`, serviceAccount, envName, network, subnetwork) +`, envName, network, subnetwork) } -func testAccComposerEnvironment_composerOldVersion(envName, network, subnetwork, serviceAccount string) string { +func testAccComposerEnvironment_composerV2(envName, network, subnetwork string) string { return fmt.Sprintf(` -data "google_project" "project" {} - -resource "google_service_account" "test" { - account_id = "%s" - display_name = "Test Service Account for Composer Environment" -} - -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" -} - resource "google_composer_environment" "test" { name = "%s" region = "us-east1" - config { - node_config { - network = google_compute_network.test.self_link - subnetwork = google_compute_subnetwork.test.self_link - service_account = google_service_account.test.name - } + config { + node_config { + network = google_compute_network.test.self_link + subnetwork = google_compute_subnetwork.test.self_link + ip_allocation_policy { + cluster_ipv4_cidr_block = "10.0.0.0/16" + } + } - software_config { - image_version = "composer-2.10.0-airflow-2.10.2" + software_config { + image_version = "composer-2-airflow-2" + cloud_data_lineage_integration { + enabled = true + } + } + + workloads_config { + scheduler { + cpu = 1.25 + memory_gb = 2.5 + storage_gb = 5.4 + count = 2 + } + web_server { + cpu = 1.75 + memory_gb = 3.0 + storage_gb = 4.4 + } + worker { + cpu = 0.5 + memory_gb = 2.0 + storage_gb = 3.4 + min_count = 2 + max_count = 5 + } + triggerer { + cpu = 0.5 + memory_gb = 2.0 + count = 1 + } + } + database_config { + zone = "us-east1-c" + } + environment_size = "ENVIRONMENT_SIZE_MEDIUM" + data_retention_config { + task_logs_retention_config { + storage_mode = "CLOUD_LOGGING_ONLY" + } + } + private_environment_config { + enable_private_endpoint = true + cloud_composer_network_ipv4_cidr_block = "10.3.192.0/24" + master_ipv4_cidr_block = "172.16.194.0/23" + cloud_sql_ipv4_cidr_block = "10.3.224.0/20" + } } - } - depends_on = [google_project_iam_member.composer-worker] + } resource "google_compute_network" "test" { @@ -1663,40 +2113,25 @@ resource "google_compute_subnetwork" "test" { private_ip_google_access = true } -`, serviceAccount, envName, network, subnetwork) +`, envName, network, subnetwork) } -func testAccComposerEnvironment_composerNewVersion(envName, network, subnetwork, serviceAccount string) string { +func testAccComposerEnvironment_composerOldVersion(envName, network, subnetwork string) string { return fmt.Sprintf(` -data "google_project" "project" {} - -resource "google_service_account" "test" { - account_id = "%s" - display_name = "Test Service Account for Composer Environment" -} - -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" -} - resource "google_composer_environment" "test" { name = "%s" region = "us-east1" - config { - node_config { - network = google_compute_network.test.self_link - subnetwork = google_compute_subnetwork.test.self_link - service_account = google_service_account.test.name - } + config { + node_config { + network = google_compute_network.test.self_link + subnetwork = google_compute_subnetwork.test.self_link + } - software_config { - image_version = "composer-2.10.1-airflow-2.10.2" + software_config { + image_version = "composer-2.10.0-airflow-2.10.2" + } } - } - depends_on = [google_project_iam_member.composer-worker] } resource "google_compute_network" "test" { @@ -1712,69 +2147,25 @@ resource "google_compute_subnetwork" "test" { private_ip_google_access = true } -`, serviceAccount, envName, network, subnetwork) +`, envName, network, subnetwork) } -func testAccComposerEnvironment_composerV2HighResilience(envName, network, subnetwork, serviceAccount string) string { +func testAccComposerEnvironment_composerNewVersion(envName, network, subnetwork string) string { return fmt.Sprintf(` -data "google_project" "project" {} - -resource "google_service_account" "test" { - account_id = "%s" - display_name = "Test Service Account for Composer Environment" -} - -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" -} - resource "google_composer_environment" "test" { name = "%s" region = "us-east1" - config { - node_config { - network = google_compute_network.test.self_link - subnetwork = google_compute_subnetwork.test.self_link - service_account = google_service_account.test.name - } - - software_config { - image_version = "composer-2-airflow-2" - } - - workloads_config { - scheduler { - cpu = 1.25 - memory_gb = 2.5 - storage_gb = 5.4 - count = 2 + config { + node_config { + network = google_compute_network.test.self_link + subnetwork = google_compute_subnetwork.test.self_link } - web_server { - cpu = 1.75 - memory_gb = 3.0 - storage_gb = 4.4 - } - worker { - cpu = 0.5 - memory_gb = 2.0 - storage_gb = 3.4 - min_count = 2 - max_count = 5 + + software_config { + image_version = "composer-2.10.1-airflow-2.10.2" } } - environment_size = "ENVIRONMENT_SIZE_MEDIUM" - resilience_mode = "HIGH_RESILIENCE" - private_environment_config { - enable_private_endpoint = true - cloud_composer_network_ipv4_cidr_block = "10.3.192.0/24" - master_ipv4_cidr_block = "172.16.194.0/23" - cloud_sql_ipv4_cidr_block = "10.3.224.0/20" - } - } - depends_on = [google_project_iam_member.composer-worker] } resource "google_compute_network" "test" { @@ -1790,42 +2181,92 @@ resource "google_compute_subnetwork" "test" { private_ip_google_access = true } -`, serviceAccount, envName, network, subnetwork) +`, envName, network, subnetwork) } -func testAccComposerEnvironment_composerV2PrivateServiceConnect(envName, network, subnetwork, serviceAccount string) string { +func testAccComposerEnvironment_composerV2HighResilience(envName, network, subnetwork string) string { return fmt.Sprintf(` -data "google_project" "project" {} +resource "google_composer_environment" "test" { + name = "%s" + region = "us-east1" -resource "google_service_account" "test" { - account_id = "%s" - display_name = "Test Service Account for Composer Environment" + config { + node_config { + network = google_compute_network.test.self_link + subnetwork = google_compute_subnetwork.test.self_link + } + + software_config { + image_version = "composer-2-airflow-2" + } + + workloads_config { + scheduler { + cpu = 1.25 + memory_gb = 2.5 + storage_gb = 5.4 + count = 2 + } + web_server { + cpu = 1.75 + memory_gb = 3.0 + storage_gb = 4.4 + } + worker { + cpu = 0.5 + memory_gb = 2.0 + storage_gb = 3.4 + min_count = 2 + max_count = 5 + } + } + environment_size = "ENVIRONMENT_SIZE_MEDIUM" + resilience_mode = "HIGH_RESILIENCE" + private_environment_config { + enable_private_endpoint = true + cloud_composer_network_ipv4_cidr_block = "10.3.192.0/24" + master_ipv4_cidr_block = "172.16.194.0/23" + cloud_sql_ipv4_cidr_block = "10.3.224.0/20" + } + } } -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" + +resource "google_compute_network" "test" { + name = "%s" + auto_create_subnetworks = false } +resource "google_compute_subnetwork" "test" { + name = "%s" + ip_cidr_range = "10.2.0.0/16" + region = "us-east1" + network = google_compute_network.test.self_link + private_ip_google_access = true +} + +`, envName, network, subnetwork) +} + +func testAccComposerEnvironment_composerV2PrivateServiceConnect(envName, network, subnetwork string) string { + return fmt.Sprintf(` resource "google_composer_environment" "test" { name = "%s" region = "us-central1" - config { - node_config { - network = google_compute_network.test.self_link - subnetwork = google_compute_subnetwork.test.self_link - service_account = google_service_account.test.name - } + config { + node_config { + network = google_compute_network.test.self_link + subnetwork = google_compute_subnetwork.test.self_link + } - software_config { - image_version = "composer-2-airflow-2" - } + software_config { + image_version = "composer-2-airflow-2" + } private_environment_config { - cloud_composer_connection_subnetwork = google_compute_subnetwork.test.self_link + cloud_composer_connection_subnetwork = google_compute_subnetwork.test.self_link + } } - } - depends_on = [google_project_iam_member.composer-worker] + } resource "google_compute_network" "test" { @@ -1841,23 +2282,11 @@ resource "google_compute_subnetwork" "test" { private_ip_google_access = true } -`, serviceAccount, envName, network, subnetwork) +`, envName, network, subnetwork) } -func testAccComposerEnvironment_MasterAuthNetworks(compVersion, airflowVersion, envName, network, subnetwork, serviceAccount string) string { +func testAccComposerEnvironment_MasterAuthNetworks(compVersion, airflowVersion, envName, network, subnetwork string) string { return fmt.Sprintf(` -data "google_project" "project" {} - -resource "google_service_account" "test" { - account_id = "%s" - display_name = "Test Service Account for Composer Environment" -} -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" -} - resource "google_composer_environment" "test" { name = "%s" region = "us-central1" @@ -1866,7 +2295,6 @@ resource "google_composer_environment" "test" { node_config { network = google_compute_network.test.self_link subnetwork = google_compute_subnetwork.test.self_link - service_account = google_service_account.test.name } software_config { @@ -1884,7 +2312,6 @@ resource "google_composer_environment" "test" { } } } - depends_on = [google_project_iam_member.composer-worker] } resource "google_compute_network" "test" { @@ -1899,69 +2326,55 @@ resource "google_compute_subnetwork" "test" { network = google_compute_network.test.self_link } -`, serviceAccount, envName, compVersion, airflowVersion, network, subnetwork) -} - -func testAccComposerEnvironment_updateComposerV2StandardResilience(envName, network, subnetwork, serviceAccount string) string { - return fmt.Sprintf(` -data "google_project" "project" {} - -resource "google_service_account" "test" { - account_id = "%s" - display_name = "Test Service Account for Composer Environment" +`, envName, compVersion, airflowVersion, network, subnetwork) } -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" -} +func testAccComposerEnvironment_updateComposerV2StandardResilience(envName, network, subnetwork string) string { + return fmt.Sprintf(` resource "google_composer_environment" "test" { name = "%s" region = "us-east1" - config { - node_config { - network = google_compute_network.test.self_link - subnetwork = google_compute_subnetwork.test.self_link - service_account = google_service_account.test.name - } - - software_config { - image_version = "composer-2-airflow-2" - } - - workloads_config { - scheduler { - cpu = 1.25 - memory_gb = 2.5 - storage_gb = 5.4 - count = 2 - } - web_server { - cpu = 1.75 - memory_gb = 3.0 - storage_gb = 4.4 - } - worker { - cpu = 0.5 - memory_gb = 2.0 - storage_gb = 3.4 - min_count = 2 - max_count = 5 - } - } - environment_size = "ENVIRONMENT_SIZE_MEDIUM" - resilience_mode = "STANDARD_RESILIENCE" - private_environment_config { - enable_private_endpoint = true - cloud_composer_network_ipv4_cidr_block = "10.3.192.0/24" - master_ipv4_cidr_block = "172.16.194.0/23" - cloud_sql_ipv4_cidr_block = "10.3.224.0/20" + config { + node_config { + network = google_compute_network.test.self_link + subnetwork = google_compute_subnetwork.test.self_link + } + + software_config { + image_version = "composer-2-airflow-2" + } + + workloads_config { + scheduler { + cpu = 1.25 + memory_gb = 2.5 + storage_gb = 5.4 + count = 2 + } + web_server { + cpu = 1.75 + memory_gb = 3.0 + storage_gb = 4.4 + } + worker { + cpu = 0.5 + memory_gb = 2.0 + storage_gb = 3.4 + min_count = 2 + max_count = 5 + } + } + environment_size = "ENVIRONMENT_SIZE_MEDIUM" + resilience_mode = "STANDARD_RESILIENCE" + private_environment_config { + enable_private_endpoint = true + cloud_composer_network_ipv4_cidr_block = "10.3.192.0/24" + master_ipv4_cidr_block = "172.16.194.0/23" + cloud_sql_ipv4_cidr_block = "10.3.224.0/20" + } } - } - depends_on = [google_project_iam_member.composer-worker] } resource "google_compute_network" "test" { @@ -1977,23 +2390,12 @@ resource "google_compute_subnetwork" "test" { private_ip_google_access = true } -`, serviceAccount, envName, network, subnetwork) +`, envName, network, subnetwork) } -func testAccComposerEnvironment_MasterAuthNetworksUpdate(compVersion, airflowVersion, envName, network, subnetwork, serviceAccount string) string { - return fmt.Sprintf(` -data "google_project" "project" {} - -resource "google_service_account" "test" { - account_id = "%s" - display_name = "Test Service Account for Composer Environment" -} -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" -} +func testAccComposerEnvironment_MasterAuthNetworksUpdate(compVersion, airflowVersion, envName, network, subnetwork string) string { + return fmt.Sprintf(` resource "google_composer_environment" "test" { name = "%s" region = "us-central1" @@ -2002,7 +2404,6 @@ resource "google_composer_environment" "test" { node_config { network = google_compute_network.test.self_link subnetwork = google_compute_subnetwork.test.self_link - service_account = google_service_account.test.name } software_config { @@ -2017,7 +2418,6 @@ resource "google_composer_environment" "test" { } } } - depends_on = [google_project_iam_member.composer-worker] } resource "google_compute_network" "test" { @@ -2032,84 +2432,70 @@ resource "google_compute_subnetwork" "test" { network = google_compute_network.test.self_link } -`, serviceAccount, envName, compVersion, airflowVersion, network, subnetwork) +`, envName, compVersion, airflowVersion, network, subnetwork) } -func testAccComposerEnvironment_updateComposerV2(name, network, subnetwork, serviceAccount string) string { +func testAccComposerEnvironment_updateComposerV2(name, network, subnetwork string) string { return fmt.Sprintf(` -data "google_project" "project" {} - -resource "google_service_account" "test" { - account_id = "%s" - display_name = "Test Service Account for Composer Environment" -} - -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" -} - resource "google_composer_environment" "test" { name = "%s" region = "us-east1" - config { - node_config { - network = google_compute_network.test.self_link - subnetwork = google_compute_subnetwork.test.self_link - service_account = google_service_account.test.name - ip_allocation_policy { - cluster_ipv4_cidr_block = "10.0.0.0/16" + config { + node_config { + network = google_compute_network.test.self_link + subnetwork = google_compute_subnetwork.test.self_link + ip_allocation_policy { + cluster_ipv4_cidr_block = "10.0.0.0/16" + } } - } - software_config { - image_version = "composer-2-airflow-2" - cloud_data_lineage_integration { - enabled = false + software_config { + image_version = "composer-2-airflow-2" + cloud_data_lineage_integration { + enabled = false + } } - } - workloads_config { - scheduler { - cpu = 2.25 - memory_gb = 3.5 - storage_gb = 6.4 - count = 3 - } - web_server { - cpu = 2.75 - memory_gb = 4.0 - storage_gb = 5.4 - } - worker { - cpu = 1.5 - memory_gb = 3.0 - storage_gb = 4.4 - min_count = 3 - max_count = 6 + workloads_config { + scheduler { + cpu = 2.25 + memory_gb = 3.5 + storage_gb = 6.4 + count = 3 + } + web_server { + cpu = 2.75 + memory_gb = 4.0 + storage_gb = 5.4 + } + worker { + cpu = 1.5 + memory_gb = 3.0 + storage_gb = 4.4 + min_count = 3 + max_count = 6 + } + triggerer { + cpu = 0.75 + memory_gb = 2 + count = 1 + } } - triggerer { - cpu = 0.75 - memory_gb = 2 - count = 1 + environment_size = "ENVIRONMENT_SIZE_LARGE" + data_retention_config { + task_logs_retention_config { + storage_mode = "CLOUD_LOGGING_AND_CLOUD_STORAGE" + } } - } - environment_size = "ENVIRONMENT_SIZE_LARGE" - data_retention_config { - task_logs_retention_config { - storage_mode = "CLOUD_LOGGING_AND_CLOUD_STORAGE" + private_environment_config { + enable_private_endpoint = true + cloud_composer_network_ipv4_cidr_block = "10.3.192.0/24" + master_ipv4_cidr_block = "172.16.194.0/23" + cloud_sql_ipv4_cidr_block = "10.3.224.0/20" } - } - private_environment_config { - enable_private_endpoint = true - cloud_composer_network_ipv4_cidr_block = "10.3.192.0/24" - master_ipv4_cidr_block = "172.16.194.0/23" - cloud_sql_ipv4_cidr_block = "10.3.224.0/20" - } - } - depends_on = [google_project_iam_member.composer-worker] + } + } resource "google_compute_network" "test" { @@ -2124,7 +2510,7 @@ resource "google_compute_subnetwork" "test" { network = google_compute_network.test.self_link private_ip_google_access = true } -`, serviceAccount, name, network, subnetwork) +`, name, network, subnetwork) } func testAccComposer2Environment_nodeCfg(environment, network, subnetwork, serviceAccount string) string { @@ -2177,20 +2563,8 @@ resource "google_project_iam_member" "composer-worker" { `, environment, network, subnetwork, serviceAccount) } -func testAccComposerEnvironment_airflow2RecoveryCfg(name, network, subnetwork, serviceAccount string) string { +func testAccComposerEnvironment_airflow2RecoveryCfg(name, network, subnetwork string) string { return fmt.Sprintf(` -data "google_project" "project" {} - -resource "google_service_account" "test" { - account_id = "%s" - display_name = "Test Service Account for Composer Environment" -} -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" -} - resource "google_composer_environment" "test" { name = "%s" region = "us-central1" @@ -2199,7 +2573,6 @@ resource "google_composer_environment" "test" { node_config { network = google_compute_network.test.self_link subnetwork = google_compute_subnetwork.test.self_link - service_account = google_service_account.test.name ip_allocation_policy { cluster_ipv4_cidr_block = "10.0.0.0/16" } @@ -2218,7 +2591,7 @@ resource "google_composer_environment" "test" { } } } - depends_on = [google_project_iam_member.composer-worker] + } resource "google_compute_network" "test" { @@ -2233,23 +2606,11 @@ resource "google_compute_subnetwork" "test" { network = google_compute_network.test.self_link private_ip_google_access = true } -`, serviceAccount, name, network, subnetwork) +`, name, network, subnetwork) } -func testAccComposerEnvironmentUpdate_airflow2RecoveryCfg(name, network, subnetwork, serviceAccount string) string { +func testAccComposerEnvironmentUpdate_airflow2RecoveryCfg(name, network, subnetwork string) string { return fmt.Sprintf(` -data "google_project" "project" {} - -resource "google_service_account" "test" { - account_id = "%s" - display_name = "Test Service Account for Composer Environment" -} -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" -} - resource "google_composer_environment" "test" { name = "%s" region = "us-central1" @@ -2261,7 +2622,6 @@ resource "google_composer_environment" "test" { ip_allocation_policy { cluster_ipv4_cidr_block = "10.0.0.0/16" } - service_account = google_service_account.test.name } software_config { @@ -2277,7 +2637,7 @@ resource "google_composer_environment" "test" { } } } - depends_on = [google_project_iam_member.composer-worker] + } resource "google_compute_network" "test" { @@ -2292,7 +2652,145 @@ resource "google_compute_subnetwork" "test" { network = google_compute_network.test.self_link private_ip_google_access = true } -`, serviceAccount, name, network, subnetwork) +`, name, network, subnetwork) +} + +func testAccComposerEnvironment_softwareCfg(name, network, subnetwork string) string { + return fmt.Sprintf(` +resource "google_composer_environment" "test" { + name = "%s" + region = "us-central1" + config { + node_config { + network = google_compute_network.test.self_link + subnetwork = google_compute_subnetwork.test.self_link + zone = "us-central1-a" + } + software_config { + image_version = "composer-1-airflow-1" + python_version = "3" + } + } +} + +// use a separate network to avoid conflicts with other tests running in parallel +// that use the default network/subnet +resource "google_compute_network" "test" { + name = "%s" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "test" { + name = "%s" + ip_cidr_range = "10.2.0.0/16" + region = "us-central1" + network = google_compute_network.test.self_link +} +`, name, network, subnetwork) +} + +func testAccComposerEnvironment_updateOnlyFields(name, network, subnetwork string) string { + return fmt.Sprintf(` +resource "google_composer_environment" "test" { + name = "%s" + region = "us-central1" + config { + node_config { + network = google_compute_network.test.self_link + subnetwork = google_compute_subnetwork.test.self_link + zone = "us-central1-a" + } + software_config { + image_version = "composer-1-airflow-2" + pypi_packages = { + numpy = "" + } + } + } +} + +// use a separate network to avoid conflicts with other tests running in parallel +// that use the default network/subnet +resource "google_compute_network" "test" { + name = "%s" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "test" { + name = "%s" + ip_cidr_range = "10.2.0.0/16" + region = "us-central1" + network = google_compute_network.test.self_link +} +`, name, network, subnetwork) +} + +func testAccComposerEnvironment_airflow2SoftwareCfg(name, network, subnetwork string) string { + return fmt.Sprintf(` +resource "google_composer_environment" "test" { + name = "%s" + region = "us-central1" + config { + node_config { + network = google_compute_network.test.self_link + subnetwork = google_compute_subnetwork.test.self_link + zone = "us-central1-a" + } + software_config { + image_version = "composer-1-airflow-2" + scheduler_count = 2 + } + } +} + +// use a separate network to avoid conflicts with other tests running in parallel +// that use the default network/subnet +resource "google_compute_network" "test" { + name = "%s" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "test" { + name = "%s" + ip_cidr_range = "10.2.0.0/16" + region = "us-central1" + network = google_compute_network.test.self_link +} +`, name, network, subnetwork) +} + +func testAccComposerEnvironmentUpdate_airflow2SoftwareCfg(name, network, subnetwork string) string { + return fmt.Sprintf(` +resource "google_composer_environment" "test" { + name = "%s" + region = "us-central1" + config { + node_config { + network = google_compute_network.test.self_link + subnetwork = google_compute_subnetwork.test.self_link + zone = "us-central1-a" + } + software_config { + image_version = "composer-1-airflow-2" + scheduler_count = 3 + } + } +} + +// use a separate network to avoid conflicts with other tests running in parallel +// that use the default network/subnet +resource "google_compute_network" "test" { + name = "%s" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "test" { + name = "%s" + ip_cidr_range = "10.2.0.0/16" + region = "us-central1" + network = google_compute_network.test.self_link +} +`, name, network, subnetwork) } func testAccComposerEnvironment_fixPyPiPackages(environment, network, subnetwork, serviceAccount string) string { @@ -2452,32 +2950,16 @@ resource "google_project_iam_member" "composer-worker" { `, environment, network, subnetwork, serviceAccount) } -func testAccComposerEnvironmentComposer2_empty(name, network, subnetwork, serviceAccount string) string { +func testAccComposerEnvironmentComposer2_empty(name, network, subnetwork string) string { return fmt.Sprintf(` -data "google_project" "project" {} - -resource "google_service_account" "test" { - account_id = "%s" - display_name = "Test Service Account for Composer Environment" -} -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" -} - resource "google_composer_environment" "test" { name = "%s" region = "us-central1" config { - node_config { - service_account = google_service_account.test.name - } software_config { image_version = "composer-2-airflow-2" } } - depends_on = [google_project_iam_member.composer-worker] } // use a separate network to avoid conflicts with other tests running in parallel @@ -2493,23 +2975,11 @@ resource "google_compute_subnetwork" "test" { region = "us-central1" network = google_compute_network.test.self_link } -`, serviceAccount, name, network, subnetwork) +`, name, network, subnetwork) } -func testAccComposerEnvironmentComposer3_empty(name, network, subnetwork, serviceAccount string) string { +func testAccComposerEnvironmentComposer3_empty(name, network, subnetwork string) string { return fmt.Sprintf(` -data "google_project" "project" {} - -resource "google_service_account" "test" { - account_id = "%s" - display_name = "Test Service Account for Composer Environment" -} -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" -} - resource "google_composer_environment" "test" { name = "%s" region = "us-central1" @@ -2520,10 +2990,8 @@ resource "google_composer_environment" "test" { node_config { network = google_compute_network.test.id subnetwork = google_compute_subnetwork.test.id - service_account = google_service_account.test.name } } - depends_on = [google_project_iam_member.composer-worker] } // use a separate network to avoid conflicts with other tests running in parallel @@ -2539,11 +3007,11 @@ resource "google_compute_subnetwork" "test" { region = "us-central1" network = google_compute_network.test.self_link } -`, serviceAccount, name, network, subnetwork) +`, name, network, subnetwork) } func testAccComposerEnvironmentComposer2_usesUnsupportedField(name string) string { - return fmt.Sprintf(` +return fmt.Sprintf(` resource "google_composer_environment" "test" { name = "%s" region = "us-central1" @@ -2558,7 +3026,7 @@ resource "google_composer_environment" "test" { } func testAccComposerEnvironmentComposer3_usesUnsupportedField(name string) string { - return fmt.Sprintf(` +return fmt.Sprintf(` resource "google_composer_environment" "test" { name = "%s" region = "us-central1" @@ -2574,20 +3042,8 @@ resource "google_composer_environment" "test" { `, name) } -func testAccComposerEnvironmentComposer3_basic(name, network, subnetwork, serviceAccount string) string { +func testAccComposerEnvironmentComposer3_basic(name, network, subnetwork string) string { return fmt.Sprintf(` -data "google_project" "project" {} - -resource "google_service_account" "test" { - account_id = "%s" - display_name = "Test Service Account for Composer Environment" -} -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" -} - resource "google_composer_environment" "test" { name = "%s" region = "us-central1" @@ -2596,7 +3052,6 @@ resource "google_composer_environment" "test" { composer_internal_ipv4_cidr_block = "100.64.128.0/20" network = google_compute_network.test.id subnetwork = google_compute_subnetwork.test.id - service_account = google_service_account.test.name } software_config { image_version = "composer-3-airflow-2" @@ -2612,7 +3067,6 @@ resource "google_composer_environment" "test" { enable_private_environment = true enable_private_builds_only = true } - depends_on = [google_project_iam_member.composer-worker] } // use a separate network to avoid conflicts with other tests running in parallel @@ -2628,23 +3082,11 @@ resource "google_compute_subnetwork" "test" { region = "us-central1" network = google_compute_network.test.self_link } -`, serviceAccount, name, network, subnetwork) +`, name, network, subnetwork) } -func testAccComposerEnvironmentComposer3_update(name, network, subnetwork, serviceAccount string) string { +func testAccComposerEnvironmentComposer3_update(name, network, subnetwork string) string { return fmt.Sprintf(` -data "google_project" "project" {} - -resource "google_service_account" "test" { - account_id = "%s" - display_name = "Test Service Account for Composer Environment" -} -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" -} - resource "google_composer_environment" "test" { name = "%s" region = "us-central1" @@ -2653,7 +3095,6 @@ resource "google_composer_environment" "test" { network = google_compute_network.test_1.id subnetwork = google_compute_subnetwork.test_1.id composer_internal_ipv4_cidr_block = "100.64.128.0/20" - service_account = google_service_account.test.name } software_config { web_server_plugins_mode = "DISABLED" @@ -2670,7 +3111,6 @@ resource "google_composer_environment" "test" { enable_private_environment = false enable_private_builds_only = false } - depends_on = [google_project_iam_member.composer-worker] } // use a separate network to avoid conflicts with other tests running in parallel @@ -2698,36 +3138,22 @@ resource "google_compute_subnetwork" "test_1" { region = "us-central1" network = google_compute_network.test_1.self_link } -`, serviceAccount, name, network, subnetwork, network+"-update", subnetwork+"update") +`, name, network, subnetwork, network + "-update", subnetwork + "update") } -func testAccComposerEnvironmentComposer3_withNetworkAttachment(name, networkAttachment, network, subnetwork, serviceAccount string) string { +func testAccComposerEnvironmentComposer3_withNetworkAttachment(name, networkAttachment, network, subnetwork string) string { return fmt.Sprintf(` -data "google_project" "project" {} - -resource "google_service_account" "test" { - account_id = "%s" - display_name = "Test Service Account for Composer Environment" -} -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" -} - resource "google_composer_environment" "test" { name = "%s" region = "us-central1" config { node_config { composer_network_attachment = google_compute_network_attachment.test.id - service_account = google_service_account.test.name } software_config { image_version = "composer-3-airflow-2" } } - depends_on = [google_project_iam_member.composer-worker] } resource "google_compute_network_attachment" "test" { @@ -2766,23 +3192,11 @@ resource "google_compute_subnetwork" "test" { region = "us-central1" network = google_compute_network.test.self_link } -`, serviceAccount, name, networkAttachment, network, subnetwork, network, subnetwork) +`, name, networkAttachment, network, subnetwork, network, subnetwork) } -func testAccComposerEnvironmentComposer3_withNetworkAndSubnetwork(name, networkAttachment, network, subnetwork, serviceAccount string) string { +func testAccComposerEnvironmentComposer3_withNetworkAndSubnetwork(name, networkAttachment, network, subnetwork string) string { return fmt.Sprintf(` -data "google_project" "project" {} - -resource "google_service_account" "test" { - account_id = "%s" - display_name = "Test Service Account for Composer Environment" -} -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" -} - resource "google_composer_environment" "test" { name = "%s" region = "us-central1" @@ -2790,13 +3204,11 @@ resource "google_composer_environment" "test" { node_config { network = google_compute_network.test.id subnetwork = google_compute_subnetwork.test.id - service_account = google_service_account.test.name } software_config { image_version = "composer-3-airflow-2" } } - depends_on = [google_project_iam_member.composer-worker] } resource "google_compute_network_attachment" "test" { @@ -2835,23 +3247,11 @@ resource "google_compute_subnetwork" "test" { region = "us-central1" network = google_compute_network.test.self_link } -`, serviceAccount, name, networkAttachment, network, subnetwork, network, subnetwork) +`, name, networkAttachment, network, subnetwork, network, subnetwork) } -func testAccComposerEnvironmentComposer3_withNetworkSubnetworkAndAttachment_expectError(name, networkAttachment, network, subnetwork, serviceAccount string) string { +func testAccComposerEnvironmentComposer3_withNetworkSubnetworkAndAttachment_expectError(name, networkAttachment, network, subnetwork string) string { return fmt.Sprintf(` -data "google_project" "project" {} - -resource "google_service_account" "test" { - account_id = "%s" - display_name = "Test Service Account for Composer Environment" -} -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" -} - resource "google_composer_environment" "test" { name = "%s" region = "us-central1" @@ -2860,13 +3260,11 @@ resource "google_composer_environment" "test" { network = google_compute_network.test.id subnetwork = google_compute_subnetwork.test.id composer_network_attachment = google_compute_network_attachment.test.id - service_account = google_service_account.test.name } software_config { image_version = "composer-3-airflow-2" } } - depends_on = [google_project_iam_member.composer-worker] } resource "google_compute_network_attachment" "test" { @@ -2889,23 +3287,11 @@ resource "google_compute_subnetwork" "test" { region = "us-central1" network = google_compute_network.test.self_link } -`, serviceAccount, name, networkAttachment, network, subnetwork) +`, name, networkAttachment, network, subnetwork) } -func testAccComposerEnvironmentComposer3_databaseRetention(name, network, subnetwork, serviceAccount string) string { +func testAccComposerEnvironmentComposer3_databaseRetention(name, network, subnetwork string) string { return fmt.Sprintf(` -data "google_project" "project" {} - -resource "google_service_account" "test" { - account_id = "%s" - display_name = "Test Service Account for Composer Environment" -} -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" -} - resource "google_composer_environment" "test" { name = "%s" region = "us-central1" @@ -2916,7 +3302,6 @@ resource "google_composer_environment" "test" { node_config { network = google_compute_network.test.id subnetwork = google_compute_subnetwork.test.id - service_account = google_service_account.test.name } data_retention_config { airflow_metadata_retention_config { @@ -2925,7 +3310,6 @@ resource "google_composer_environment" "test" { } } } - depends_on = [google_project_iam_member.composer-worker] } // use a separate network to avoid conflicts with other tests running in parallel @@ -2941,7 +3325,7 @@ resource "google_compute_subnetwork" "test" { region = "us-central1" network = google_compute_network.test.self_link } -`, serviceAccount, name, network, subnetwork) +`, name, network, subnetwork) } // WARNING: This is not actually a check and is a terrible clean-up step because Composer Environments diff --git a/mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_config_map_test.go b/mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_config_map_test.go.tmpl similarity index 67% rename from mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_config_map_test.go rename to mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_config_map_test.go.tmpl index fc646087ad52..37feb346b799 100644 --- a/mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_config_map_test.go +++ b/mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_config_map_test.go.tmpl @@ -15,8 +15,7 @@ func TestAccComposerUserWorkloadsConfigMap_composerUserWorkloadsConfigMapBasicEx t.Parallel() context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - "service_account": fmt.Sprintf("tf-test-%d", acctest.RandInt(t)), + "random_suffix": acctest.RandString(t, 10), } acctest.VcrTest(t, resource.TestCase{ @@ -34,9 +33,9 @@ func TestAccComposerUserWorkloadsConfigMap_composerUserWorkloadsConfigMapBasicEx }, { Config: testAccComposerUserWorkloadsConfigMap_composerUserWorkloadsConfigMapBasicExample_update(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("google_composer_user_workloads_config_map.config_map", "data.db_host", "dbhost:5432"), - resource.TestCheckNoResourceAttr("google_composer_user_workloads_config_map.config_map", "data.api_host"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_composer_user_workloads_config_map.config_map", "data.db_host", "dbhost:5432"), + resource.TestCheckNoResourceAttr("google_composer_user_workloads_config_map.config_map", "data.api_host"), ), }, { @@ -52,8 +51,7 @@ func TestAccComposerUserWorkloadsConfigMap_composerUserWorkloadsConfigMapBasicEx t.Parallel() context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - "service_account": fmt.Sprintf("tf-test-%d", acctest.RandInt(t)), + "random_suffix": acctest.RandString(t, 10), } acctest.VcrTest(t, resource.TestCase{ @@ -71,8 +69,8 @@ func TestAccComposerUserWorkloadsConfigMap_composerUserWorkloadsConfigMapBasicEx }, { Config: testAccComposerUserWorkloadsConfigMap_composerUserWorkloadsConfigMapBasicExample_delete(context), - Check: resource.ComposeTestCheckFunc( - testAccComposerUserWorkloadsConfigMapDestroyed(t), + Check: resource.ComposeTestCheckFunc( + testAccComposerUserWorkloadsConfigMapDestroyed(t), ), }, }, @@ -81,30 +79,14 @@ func TestAccComposerUserWorkloadsConfigMap_composerUserWorkloadsConfigMapBasicEx func testAccComposerUserWorkloadsConfigMap_composerUserWorkloadsConfigMapBasicExample_basic(context map[string]interface{}) string { return acctest.Nprintf(` -data "google_project" "project" {} - -resource "google_service_account" "test" { - account_id = "%{service_account}" - display_name = "Test Service Account for Composer Environment" -} -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" -} - resource "google_composer_environment" "environment" { name = "tf-test-test-environment%{random_suffix}" region = "us-central1" config { - node_config { - service_account = google_service_account.test.name - } software_config { image_version = "composer-3-airflow-2" } } - depends_on = [google_project_iam_member.composer-worker] } resource "google_composer_user_workloads_config_map" "config_map" { @@ -120,30 +102,14 @@ resource "google_composer_user_workloads_config_map" "config_map" { func testAccComposerUserWorkloadsConfigMap_composerUserWorkloadsConfigMapBasicExample_update(context map[string]interface{}) string { return acctest.Nprintf(` -data "google_project" "project" {} - -resource "google_service_account" "test" { - account_id = "%{service_account}" - display_name = "Test Service Account for Composer Environment" -} -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" -} - resource "google_composer_environment" "environment" { name = "tf-test-test-environment%{random_suffix}" region = "us-central1" config { - node_config { - service_account = google_service_account.test.name - } software_config { image_version = "composer-3-airflow-2" } } - depends_on = [google_project_iam_member.composer-worker] } resource "google_composer_user_workloads_config_map" "config_map" { @@ -159,30 +125,14 @@ resource "google_composer_user_workloads_config_map" "config_map" { func testAccComposerUserWorkloadsConfigMap_composerUserWorkloadsConfigMapBasicExample_delete(context map[string]interface{}) string { return acctest.Nprintf(` -data "google_project" "project" {} - -resource "google_service_account" "test" { - account_id = "%{service_account}" - display_name = "Test Service Account for Composer Environment" -} -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" -} - resource "google_composer_environment" "environment" { name = "tf-test-test-environment%{random_suffix}" region = "us-central1" config { - node_config { - service_account = google_service_account.test.name - } software_config { image_version = "composer-3-airflow-2" } } - depends_on = [google_project_iam_member.composer-worker] } `, context) } diff --git a/mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_secret_test.go b/mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_secret_test.go deleted file mode 100644 index 367bf3609477..000000000000 --- a/mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_secret_test.go +++ /dev/null @@ -1,230 +0,0 @@ -package composer_test - -import ( - "fmt" - "strings" - "testing" - - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" - "github.com/hashicorp/terraform-provider-google/google/services/composer" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/terraform" -) - -const testComposerUserWorkloadsSecretPrefix = "tf-test-composer-secret" - -func TestAccComposerUserWorkloadsSecret_basic(t *testing.T) { - t.Parallel() - - envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) - secretName := fmt.Sprintf("%s-%d", testComposerUserWorkloadsSecretPrefix, acctest.RandInt(t)) - serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComposerUserWorkloadsSecret_basic(envName, secretName, envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), serviceAccount), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet("google_composer_user_workloads_secret.test", "data.username"), - resource.TestCheckResourceAttrSet("google_composer_user_workloads_secret.test", "data.password"), - ), - }, - { - ResourceName: "google_composer_user_workloads_secret.test", - ImportState: true, - }, - }, - }) -} - -func TestAccComposerUserWorkloadsSecret_update(t *testing.T) { - t.Parallel() - - envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) - secretName := fmt.Sprintf("%s-%d", testComposerUserWorkloadsSecretPrefix, acctest.RandInt(t)) - serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComposerUserWorkloadsSecret_basic(envName, secretName, envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), serviceAccount), - }, - { - Config: testAccComposerUserWorkloadsSecret_update(envName, secretName, serviceAccount), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet("google_composer_user_workloads_secret.test", "data.email"), - resource.TestCheckResourceAttrSet("google_composer_user_workloads_secret.test", "data.password"), - resource.TestCheckNoResourceAttr("google_composer_user_workloads_secret.test", "data.username"), - ), - }, - }, - }) -} - -func TestAccComposerUserWorkloadsSecret_delete(t *testing.T) { - t.Parallel() - - envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) - secretName := fmt.Sprintf("%s-%d", testComposerUserWorkloadsSecretPrefix, acctest.RandInt(t)) - serviceAccount := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComposerUserWorkloadsSecret_basic(envName, secretName, envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), serviceAccount), - }, - { - Config: testAccComposerUserWorkloadsSecret_delete(envName, serviceAccount), - Check: resource.ComposeTestCheckFunc( - testAccComposerUserWorkloadsSecretDestroyed(t), - ), - }, - }, - }) -} - -func testAccComposerUserWorkloadsSecret_basic(envName, secretName, project, region, serviceAccount string) string { - return fmt.Sprintf(` -data "google_project" "project" {} - -resource "google_service_account" "test" { - account_id = "%s" - display_name = "Test Service Account for Composer Environment" -} -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" -} - -resource "google_composer_environment" "test" { - name = "%s" - config { - node_config { - service_account = google_service_account.test.name - } - software_config { - image_version = "composer-3-airflow-2" - } - } - depends_on = [google_project_iam_member.composer-worker] -} -resource "google_composer_user_workloads_secret" "test" { - environment = google_composer_environment.test.name - name = "%s" - project = "%s" - region = "%s" - data = { - username: base64encode("username"), - password: base64encode("password"), - } -} -`, serviceAccount, envName, secretName, project, region) -} - -func testAccComposerUserWorkloadsSecret_update(envName, secretName, serviceAccount string) string { - return fmt.Sprintf(` -data "google_project" "project" {} - -resource "google_service_account" "test" { - account_id = "%s" - display_name = "Test Service Account for Composer Environment" -} -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" -} - -resource "google_composer_environment" "test" { - name = "%s" - config { - node_config { - service_account = google_service_account.test.name - } - software_config { - image_version = "composer-3-airflow-2" - } - } - depends_on = [google_project_iam_member.composer-worker] -} -resource "google_composer_user_workloads_secret" "test" { - environment = google_composer_environment.test.name - name = "%s" - data = { - email: base64encode("email"), - password: base64encode("password"), - } -} -`, serviceAccount, envName, secretName) -} - -func testAccComposerUserWorkloadsSecret_delete(envName, serviceAccount string) string { - return fmt.Sprintf(` -data "google_project" "project" {} - -resource "google_service_account" "test" { - account_id = "%s" - display_name = "Test Service Account for Composer Environment" -} -resource "google_project_iam_member" "composer-worker" { - project = data.google_project.project.project_id - role = "roles/composer.worker" - member = "serviceAccount:${google_service_account.test.email}" -} - -resource "google_composer_environment" "test" { - name = "%s" - config { - node_config { - service_account = google_service_account.test.name - } - software_config { - image_version = "composer-3-airflow-2" - } - } - depends_on = [google_project_iam_member.composer-worker] -} -`, serviceAccount, envName) -} - -func testAccComposerUserWorkloadsSecretDestroyed(t *testing.T) func(s *terraform.State) error { - return func(s *terraform.State) error { - config := acctest.GoogleProviderConfig(t) - - for _, rs := range s.RootModule().Resources { - if rs.Type != "google_composer_user_workloads_secret" { - continue - } - - idTokens := strings.Split(rs.Primary.ID, "/") - if len(idTokens) != 8 { - return fmt.Errorf("Invalid ID %q, expected format projects/{project}/regions/{region}/environments/{environment}/userWorkloadsSecrets/{name}", rs.Primary.ID) - } - secretName := &composer.UserWorkloadsSecretName{ - Project: idTokens[1], - Region: idTokens[3], - Environment: idTokens[5], - Secret: idTokens[7], - } - - _, err := config.NewComposerClient(config.UserAgent).Projects.Locations.Environments.UserWorkloadsSecrets.Get(secretName.ResourceName()).Do() - if err == nil { - return fmt.Errorf("secret %s still exists", secretName.ResourceName()) - } - } - - return nil - } -} diff --git a/mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_secret_test.go.tmpl b/mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_secret_test.go.tmpl new file mode 100644 index 000000000000..cb83e29ce0d0 --- /dev/null +++ b/mmv1/third_party/terraform/services/composer/resource_composer_user_workloads_secret_test.go.tmpl @@ -0,0 +1,179 @@ +package composer_test + +import ( + "fmt" + "testing" + "strings" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-provider-google/google/services/composer" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" +) + +const testComposerUserWorkloadsSecretPrefix = "tf-test-composer-secret" + +func TestAccComposerUserWorkloadsSecret_basic(t *testing.T) { + t.Parallel() + + envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) + secretName := fmt.Sprintf("%s-%d", testComposerUserWorkloadsSecretPrefix, acctest.RandInt(t)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComposerUserWorkloadsSecret_basic(envName, secretName, envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv()), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("google_composer_user_workloads_secret.test", "data.username"), + resource.TestCheckResourceAttrSet("google_composer_user_workloads_secret.test", "data.password"), + ), + }, + { + ResourceName: "google_composer_user_workloads_secret.test", + ImportState: true, + }, + }, + }) +} + +func TestAccComposerUserWorkloadsSecret_update(t *testing.T) { + t.Parallel() + + envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) + secretName := fmt.Sprintf("%s-%d", testComposerUserWorkloadsSecretPrefix, acctest.RandInt(t)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComposerUserWorkloadsSecret_basic(envName, secretName, envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv()), + }, + { + Config: testAccComposerUserWorkloadsSecret_update(envName, secretName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("google_composer_user_workloads_secret.test", "data.email"), + resource.TestCheckResourceAttrSet("google_composer_user_workloads_secret.test", "data.password"), + resource.TestCheckNoResourceAttr("google_composer_user_workloads_secret.test", "data.username"), + ), + }, + }, + }) +} + +func TestAccComposerUserWorkloadsSecret_delete(t *testing.T) { + t.Parallel() + + envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) + secretName := fmt.Sprintf("%s-%d", testComposerUserWorkloadsSecretPrefix, acctest.RandInt(t)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComposerUserWorkloadsSecret_basic(envName, secretName, envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv()), + }, + { + Config: testAccComposerUserWorkloadsSecret_delete(envName), + Check: resource.ComposeTestCheckFunc( + testAccComposerUserWorkloadsSecretDestroyed(t), + ), + }, + }, + }) +} + +func testAccComposerUserWorkloadsSecret_basic(envName, secretName, project, region string) string { + return fmt.Sprintf(` +resource "google_composer_environment" "test" { + name = "%s" + config { + software_config { + image_version = "composer-3-airflow-2" + } + } +} +resource "google_composer_user_workloads_secret" "test" { + environment = google_composer_environment.test.name + name = "%s" + project = "%s" + region = "%s" + data = { + username: base64encode("username"), + password: base64encode("password"), + } +} +`, envName, secretName, project, region) +} + +func testAccComposerUserWorkloadsSecret_update(envName, secretName string) string { + return fmt.Sprintf(` +resource "google_composer_environment" "test" { + name = "%s" + config { + software_config { + image_version = "composer-3-airflow-2" + } + } +} +resource "google_composer_user_workloads_secret" "test" { + environment = google_composer_environment.test.name + name = "%s" + data = { + email: base64encode("email"), + password: base64encode("password"), + } +} +`, envName, secretName) +} + +func testAccComposerUserWorkloadsSecret_delete(envName string) string { + return fmt.Sprintf(` +resource "google_composer_environment" "test" { + name = "%s" + config { + software_config { + image_version = "composer-3-airflow-2" + } + } +} +`, envName) +} + +func testAccComposerUserWorkloadsSecretDestroyed(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + config := acctest.GoogleProviderConfig(t) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_composer_user_workloads_secret" { + continue + } + + idTokens := strings.Split(rs.Primary.ID, "/") + if len(idTokens) != 8 { + return fmt.Errorf("Invalid ID %q, expected format projects/{project}/regions/{region}/environments/{environment}/userWorkloadsSecrets/{name}", rs.Primary.ID) + } + secretName := &composer.UserWorkloadsSecretName{ + Project: idTokens[1], + Region: idTokens[3], + Environment: idTokens[5], + Secret: idTokens[7], + } + + _, err := config.NewComposerClient(config.UserAgent).Projects.Locations.Environments.UserWorkloadsSecrets.Get(secretName.ResourceName()).Do() + if err == nil { + return fmt.Errorf("secret %s still exists", secretName.ResourceName()) + } + } + + return nil + } +} diff --git a/mmv1/third_party/terraform/services/compute/compute_instance_helpers_test.go b/mmv1/third_party/terraform/services/compute/compute_instance_helpers_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/compute/compute_instance_helpers_test.go rename to mmv1/third_party/terraform/services/compute/compute_instance_helpers_test.go.tmpl diff --git a/mmv1/third_party/terraform/services/compute/data_source_compute_network_endpoint_group_test.go b/mmv1/third_party/terraform/services/compute/data_source_compute_network_endpoint_group_test.go index bd4fd55f0b58..56b6b27a1d08 100644 --- a/mmv1/third_party/terraform/services/compute/data_source_compute_network_endpoint_group_test.go +++ b/mmv1/third_party/terraform/services/compute/data_source_compute_network_endpoint_group_test.go @@ -61,11 +61,6 @@ func testAccDataSourceComputeNetworkEndpointGroupCheck(data_source_name string, ) } } - - if v, ok := ds_attr["generated_id"]; !ok || v == "" { - return fmt.Errorf("generated_id is not set") - } - return nil } } diff --git a/mmv1/third_party/terraform/services/compute/data_source_google_compute_images.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_images.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/compute/data_source_google_compute_images.go rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_images.go.tmpl diff --git a/mmv1/third_party/terraform/services/compute/data_source_google_compute_instance.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_instance.go.tmpl similarity index 99% rename from mmv1/third_party/terraform/services/compute/data_source_google_compute_instance.go rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_instance.go.tmpl index 9f28b771750d..cd846fe99d08 100644 --- a/mmv1/third_party/terraform/services/compute/data_source_google_compute_instance.go +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_instance.go.tmpl @@ -204,7 +204,7 @@ func dataSourceGoogleComputeInstanceRead(d *schema.ResourceData, meta interface{ } if err := d.Set("key_revocation_action_type", instance.KeyRevocationActionType); err != nil { return fmt.Errorf("Error setting key_revocation_action_type: %s", err) - } + } if err := d.Set("creation_timestamp", instance.CreationTimestamp); err != nil { return fmt.Errorf("Error setting creation_timestamp: %s", err) } diff --git a/mmv1/third_party/terraform/services/compute/data_source_google_compute_instance_group_test.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_instance_group_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/compute/data_source_google_compute_instance_group_test.go rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_instance_group_test.go.tmpl index 2603417e79f5..1bf600dceffa 100644 --- a/mmv1/third_party/terraform/services/compute/data_source_google_compute_instance_group_test.go +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_instance_group_test.go.tmpl @@ -8,10 +8,10 @@ import ( "strings" "testing" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/terraform" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/tpgresource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" ) func TestAccDataSourceGoogleComputeInstanceGroup_basic(t *testing.T) { diff --git a/mmv1/third_party/terraform/services/compute/data_source_google_compute_instance_test.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_instance_test.go.tmpl similarity index 99% rename from mmv1/third_party/terraform/services/compute/data_source_google_compute_instance_test.go rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_instance_test.go.tmpl index 5b7c9cd9b767..2a738c5614e0 100644 --- a/mmv1/third_party/terraform/services/compute/data_source_google_compute_instance_test.go +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_instance_test.go.tmpl @@ -223,4 +223,4 @@ resource "google_compute_network_attachment" "net_attar_default" { connection_preference = "ACCEPT_AUTOMATIC" } `, instanceName, instanceName, instanceName, instanceName) -} +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_location.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_location.go index 4558e116d16a..161365db1943 100644 --- a/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_location.go +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_location.go @@ -2,127 +2,82 @@ package compute import ( "fmt" - "regexp" - "strings" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) -var ( - computeInterconnectLocationIdTemplate = "projects/%s/global/interconnectlocations/%s" - computeInterconnectLocationLinkRegex = regexp.MustCompile(`projects/(.+)/global/interconnectlocations/(.+)$`) -) - -type ComputeInterconnectLocationId struct { - Project string - Name string -} - -func (s ComputeInterconnectLocationId) CanonicalId() string { - return fmt.Sprintf(computeInterconnectLocationIdTemplate, s.Project, s.Name) -} - -// ParseComputeInterconnectLocationId parses IDs of the form: -// - projects/{project}/global/interconnectlocations/{name} -// - {project}/{name} -// - {name} (requires config.Project) -func ParseComputeInterconnectLocationId(id string, config *transport_tpg.Config) (*ComputeInterconnectLocationId, error) { - var parts []string - if computeInterconnectLocationLinkRegex.MatchString(id) { - parts = computeInterconnectLocationLinkRegex.FindStringSubmatch(id) - return &ComputeInterconnectLocationId{ - Project: parts[1], - Name: parts[2], - }, nil - } else { - parts = strings.Split(id, "/") - } - - if len(parts) == 2 { - return &ComputeInterconnectLocationId{ - Project: parts[0], - Name: parts[1], - }, nil - } else if len(parts) == 1 { - if config.Project == "" { - return nil, fmt.Errorf("The default project for the provider must be set when using the `{name}` id format.") - } - return &ComputeInterconnectLocationId{ - Project: config.Project, - Name: parts[0], - }, nil - } - - return nil, fmt.Errorf("Invalid interconnect location id. Expecting resource link, `{project}/{name}` or `{name}` format.") -} - func DataSourceGoogleComputeInterconnectLocation() *schema.Resource { return &schema.Resource{ Read: dataSourceGoogleComputeInterconnectLocationRead, - Schema: map[string]*schema.Schema{ "name": { Type: schema.TypeString, Required: true, }, - "project": { Type: schema.TypeString, Optional: true, Computed: true, }, - - "self_link": { + "description": { Type: schema.TypeString, Computed: true, }, - - "description": { + "self_link": { Type: schema.TypeString, Computed: true, }, - "peeringdb_facility_id": { Type: schema.TypeString, Computed: true, }, - "address": { Type: schema.TypeString, Computed: true, }, - "facility_provider": { Type: schema.TypeString, Computed: true, }, - "facility_provider_facility_id": { Type: schema.TypeString, Computed: true, }, - + "status": { + Type: schema.TypeString, + Computed: true, + }, "continent": { Type: schema.TypeString, Computed: true, }, - "city": { Type: schema.TypeString, Computed: true, }, - "availability_zone": { Type: schema.TypeString, Computed: true, }, - - "status": { - Type: schema.TypeString, + "supports_pzs": { + Type: schema.TypeBool, + Computed: true, + }, + "available_features": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "available_link_types": { + Type: schema.TypeList, Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, }, }, } @@ -142,48 +97,26 @@ func dataSourceGoogleComputeInterconnectLocationRead(d *schema.ResourceData, met name := d.Get("name").(string) - id := fmt.Sprintf("projects/%s/global/interconnectlocations/%s", project, name) - location, err := config.NewComputeClient(userAgent).InterconnectLocations.Get(project, name).Do() if err != nil { - return transport_tpg.HandleDataSourceNotFoundError(err, d, fmt.Sprintf("InterconnectLocation Not Found : %s", name), id) + return transport_tpg.HandleDataSourceNotFoundError(err, d, fmt.Sprintf("Interconnect Location %q not found", name), "") } d.SetId(location.Name) - if err := d.Set("project", project); err != nil { - return fmt.Errorf("Error setting project: %s", err) - } - if err := d.Set("self_link", location.SelfLink); err != nil { - return fmt.Errorf("Error setting self_link: %s", err) - } - if err := d.Set("description", location.Description); err != nil { - return fmt.Errorf("Error setting description: %s", err) - } - if err := d.Set("peeringdb_facility_id", location.PeeringdbFacilityId); err != nil { - return fmt.Errorf("Error setting peeringdb_facility_id: %s", err) - } - if err := d.Set("address", location.Address); err != nil { - return fmt.Errorf("Error setting address: %s", err) - } - if err := d.Set("facility_provider", location.FacilityProvider); err != nil { - return fmt.Errorf("Error setting facility_provider: %s", err) - } - if err := d.Set("facility_provider_facility_id", location.FacilityProviderFacilityId); err != nil { - return fmt.Errorf("Error setting facility_provider_facility_id: %s", err) - } - if err := d.Set("continent", location.Continent); err != nil { - return fmt.Errorf("Error setting continent: %s", err) - } - if err := d.Set("city", location.City); err != nil { - return fmt.Errorf("Error setting city: %s", err) - } - if err := d.Set("availability_zone", location.AvailabilityZone); err != nil { - return fmt.Errorf("Error setting availability_zone: %s", err) - } - if err := d.Set("status", location.Status); err != nil { - return fmt.Errorf("Error setting status: %s", err) - } + d.Set("project", project) + d.Set("description", location.Description) + d.Set("self_link", location.SelfLink) + d.Set("peeringdb_facility_id", location.PeeringdbFacilityId) + d.Set("address", location.Address) + d.Set("facility_provider", location.FacilityProvider) + d.Set("facility_provider_facility_id", location.FacilityProviderFacilityId) + d.Set("status", location.Status) + d.Set("continent", location.Continent) + d.Set("city", location.City) + d.Set("availability_zone", location.AvailabilityZone) + d.Set("supports_pzs", location.SupportsPzs) + d.Set("available_features", location.AvailableFeatures) + d.Set("available_link_types", location.AvailableLinkTypes) - d.SetId(id) return nil } diff --git a/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_location_test.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_location_test.go index 0d5f07d12ac5..9e1e1f1a758d 100644 --- a/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_location_test.go +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_location_test.go @@ -1,99 +1,32 @@ -package compute_test +package compute import ( - "fmt" "testing" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" - - "github.com/hashicorp/terraform-plugin-testing/terraform" - "github.com/hashicorp/terraform-provider-google/google/services/compute" - "github.com/hashicorp/terraform-provider-google/google/transport" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" ) -var interconnectLoc = "z2z-us-west8-zone2-ncphxk-z" - -func testAccDataSourceCheckInterconnectLocation() func(s *terraform.State) error { - return func(s *terraform.State) error { - data_source_name := "data.google_compute_interconnect_location.my_location" - ds, ok := s.RootModule().Resources[data_source_name] - if !ok { - return fmt.Errorf("root module has no resource called %s", data_source_name) - } - - ds_attr := ds.Primary.Attributes - expected := map[string]string{ - "name": interconnectLoc, - "description": "Zakim-to-Zakim location", - "facility_provider": "Google", - } - for attr, expect_value := range expected { - if ds_attr[attr] != expect_value { - return fmt.Errorf("%s is %s; want %s", attr, ds_attr[attr], expect_value) - } - } - - if ds_attr["self_link"] == "" { - return fmt.Errorf("self_link is not set") - } - - return nil - } -} - func TestAccDataSourceGoogleComputeInterconnectLocation_basic(t *testing.T) { - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + t.Parallel() + + vcrTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, Steps: []resource.TestStep{ { - Config: testAccDataSourceGoogleComputeInterconnectLocationConfig(interconnectLoc), + Config: testAccDataSourceGoogleComputeInterconnectLocation_basic(), Check: resource.ComposeTestCheckFunc( - testAccDataSourceCheckInterconnectLocation(), + resource.TestCheckResourceAttrSet("data.google_compute_interconnect_location.iad_zone1", "self_link"), ), }, }, }) } -func testAccDataSourceGoogleComputeInterconnectLocationConfig(locationName string) string { - return fmt.Sprintf(` -data "google_compute_interconnect_location" "my_location" { - name = "%s" -} -`, locationName) +func testAccDataSourceGoogleComputeInterconnectLocation_basic() string { + return ` +data "google_compute_interconnect_location" "iad_zone1" { + name = "iad-zone1-1" } - -func TestParseComputeInterconnectLocationId(t *testing.T) { - config := &transport.Config{Project: "my-project"} - - cases := []struct { - id string - wantProj string - wantName string - wantErr bool - }{ - {"projects/my-project/global/interconnectlocations/z2z-us-west8-zone2-ncphxk-z", "my-project", interconnectLoc, false}, - {"my-project/z2z-us-west8-zone2-ncphxk-z", "my-project", interconnectLoc, false}, - {interconnectLoc, "my-project", interconnectLoc, false}, - {"invalid/format/extra", "", "", true}, - } - - for _, tc := range cases { - got, err := compute.ParseComputeInterconnectLocationId(tc.id, config) - if tc.wantErr { - if err == nil { - t.Errorf("ParseComputeInterconnectLocationId(%q) expected error, got nil", tc.id) - } - continue - } - if err != nil { - t.Errorf("ParseComputeInterconnectLocationId(%q) unexpected error: %v", tc.id, err) - continue - } - if got.Project != tc.wantProj || got.Name != tc.wantName { - t.Errorf("ParseComputeInterconnectLocationId(%q) = (%q, %q), want (%q, %q)", tc.id, got.Project, got.Name, tc.wantProj, tc.wantName) - } - } +` } diff --git a/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_locations.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_locations.go index 9c7ebcd5fd7d..a7c2ca017023 100644 --- a/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_locations.go +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_locations.go @@ -113,20 +113,20 @@ func dataSourceGoogleComputeInterconnectLocationsRead(d *schema.ResourceData, me var locations []map[string]interface{} for _, location := range list.Items { locations = append(locations, map[string]interface{}{ - "name": location.Name, - "description": location.Description, - "self_link": location.SelfLink, - "peeringdb_facility_id": location.PeeringdbFacilityId, - "address": location.Address, - "facility_provider": location.FacilityProvider, - "facility_provider_facility_id": location.FacilityProviderFacilityId, - "status": location.Status, - "continent": location.Continent, - "city": location.City, - "availability_zone": location.AvailabilityZone, - "supports_pzs": location.SupportsPzs, - "available_features": location.AvailableFeatures, - "available_link_types": location.AvailableLinkTypes, + "name": location.Name, + "description": location.Description, + "self_link": location.SelfLink, + "peeringdb_facility_id": location.PeeringdbFacilityId, + "address": location.Address, + "facility_provider": location.FacilityProvider, + "facility_provider_facility_id": location.FacilityProviderFacilityId, + "status": location.Status, + "continent": location.Continent, + "city": location.City, + "availability_zone": location.AvailabilityZone, + "supports_pzs": location.SupportsPzs, + "available_features": location.AvailableFeatures, + "available_link_types": location.AvailableLinkTypes, }) } if err := d.Set("locations", locations); err != nil { diff --git a/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_locations_test.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_locations_test.go index 67d00491521e..ff1280eed849 100644 --- a/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_locations_test.go +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_locations_test.go @@ -1,18 +1,17 @@ -package compute_test +package compute import ( "testing" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" ) func TestAccDataSourceGoogleComputeInterconnectLocations_basic(t *testing.T) { t.Parallel() - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + vcrTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, Steps: []resource.TestStep{ { Config: testAccDataSourceGoogleComputeInterconnectLocations_basic(), diff --git a/mmv1/third_party/terraform/services/compute/data_source_google_compute_network.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_network.go.tmpl similarity index 98% rename from mmv1/third_party/terraform/services/compute/data_source_google_compute_network.go rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_network.go.tmpl index 5c06a0c68572..0800230929f7 100644 --- a/mmv1/third_party/terraform/services/compute/data_source_google_compute_network.go +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_network.go.tmpl @@ -31,8 +31,8 @@ func DataSourceGoogleComputeNetwork() *schema.Resource { // Deprecated in favor of network_id "numeric_id": { - Type: schema.TypeString, - Computed: true, + Type: schema.TypeString, + Computed: true, Deprecated: "`numeric_id` is deprecated and will be removed in a future major release. Use `network_id` instead.", }, diff --git a/mmv1/third_party/terraform/services/compute/data_source_google_compute_network_attachment.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_network_attachment.go deleted file mode 100644 index 94043e6780e8..000000000000 --- a/mmv1/third_party/terraform/services/compute/data_source_google_compute_network_attachment.go +++ /dev/null @@ -1,70 +0,0 @@ -package compute - -import ( - "fmt" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -func DataSourceGoogleComputeNetworkAttachment() *schema.Resource { - dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceComputeNetworkAttachment().Schema) - - tpgresource.AddRequiredFieldsToSchema(dsSchema, "name", "region") - tpgresource.AddOptionalFieldsToSchema(dsSchema, "project") - - return &schema.Resource{ - Read: dataSourceComputeNetworkAttachmentRead, - Schema: dsSchema, - } -} - -func dataSourceComputeNetworkAttachmentRead(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - project, err := tpgresource.GetProject(d, config) - if err != nil { - return fmt.Errorf("Error fetching project: %s", err) - } - - name := d.Get("name").(string) - region := d.Get("region").(string) - - id := fmt.Sprintf("projects/%s/regions/%s/networkAttachments/%s", project, region, name) - d.SetId(id) - - err = resourceComputeNetworkAttachmentRead(d, meta) - if err != nil { - return fmt.Errorf("Error reading Network Attachment %q: %s", id, err) - } - - // normalize fields to ensure they are in the correct format - // the API returns a full URL here for fields such as `network` and `region` and not just the resource name - if v, ok := d.Get("network").(string); ok && v != "" { - d.Set("network", tpgresource.GetResourceNameFromSelfLink(v)) - } - - if v, ok := d.Get("region").(string); ok && v != "" { - d.Set("region", tpgresource.GetResourceNameFromSelfLink(v)) - } - - if v, ok := d.Get("subnetworks").([]interface{}); ok && len(v) > 0 { - var subnetworks []string - for _, s := range v { - subnetworks = append(subnetworks, tpgresource.GetResourceNameFromSelfLink(s.(string))) - } - if err := d.Set("subnetworks", subnetworks); err != nil { - return fmt.Errorf("Error setting subnetworks: %s", err) - } - } - - if err := tpgresource.SetDataSourceLabels(d); err != nil { - return err - } - - if d.Id() == "" { - return fmt.Errorf("%s not found", id) - } - - return nil -} diff --git a/mmv1/third_party/terraform/services/compute/data_source_google_compute_network_attachment_test.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_network_attachment_test.go deleted file mode 100644 index 9b584cd54ed6..000000000000 --- a/mmv1/third_party/terraform/services/compute/data_source_google_compute_network_attachment_test.go +++ /dev/null @@ -1,202 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 -package compute_test - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" -) - -func TestAccDataSourceComputeNetworkAttachment_basic(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeNetworkAttachment_basic(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "name", fmt.Sprintf("tf-test-basic-network-attachment-%s", context["random_suffix"])), - resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "region", "us-central1"), - resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "description", "my basic network attachment"), - resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "connection_preference", "ACCEPT_AUTOMATIC"), - resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "subnetworks.#", "1"), - ), - }, - }, - }) -} - -func TestAccDataSourceComputeNetworkAttachment_full(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "billing_account": envvar.GetTestBillingAccountFromEnv(t), - "org_id": envvar.GetTestOrgFromEnv(t), - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccDataSourceComputeNetworkAttachment_full(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "name", fmt.Sprintf("tf-test-basic-network-attachment-%s", context["random_suffix"])), - resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "region", "us-central1"), - resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "description", "basic network attachment description"), - resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "connection_preference", "ACCEPT_MANUAL"), - resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "subnetworks.#", "1"), - resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "subnetworks.0", fmt.Sprintf("tf-test-basic-subnetwork1-%s", context["random_suffix"])), - resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "producer_accept_lists.#", "2"), - resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "producer_accept_lists.0", fmt.Sprintf("tf-test-prj-accept1-%s", context["random_suffix"])), - resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "producer_accept_lists.1", fmt.Sprintf("tf-test-prj-accept2-%s", context["random_suffix"])), - resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "producer_reject_lists.#", "2"), - resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "producer_reject_lists.0", fmt.Sprintf("tf-test-prj-reject1-%s", context["random_suffix"])), - resource.TestCheckResourceAttr("data.google_compute_network_attachment.default", "producer_reject_lists.1", fmt.Sprintf("tf-test-prj-reject2-%s", context["random_suffix"])), - ), - }, - }, - }) -} - -func testAccComputeNetworkAttachment_basic(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_compute_network" "default" { - name = "tf-test-basic-network%{random_suffix}" - auto_create_subnetworks = false -} - -resource "google_compute_subnetwork" "default" { - name = "tf-test-basic-subnetwork%{random_suffix}" - region = "us-central1" - - network = google_compute_network.default.id - ip_cidr_range = "10.0.0.0/16" -} - -resource "google_compute_network_attachment" "default" { - name = "tf-test-basic-network-attachment-%{random_suffix}" - region = "us-central1" - description = "my basic network attachment" - - subnetworks = [google_compute_subnetwork.default.id] - connection_preference = "ACCEPT_AUTOMATIC" -} - -data "google_compute_network_attachment" "default" { - name = google_compute_network_attachment.default.name - region = google_compute_network_attachment.default.region - project = google_compute_network_attachment.default.project - depends_on = [ - google_compute_network.default, - google_compute_subnetwork.default, - google_compute_network_attachment.default, - ] -} -`, context) -} - -func testAccDataSourceComputeNetworkAttachment_full(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_compute_network_attachment" "default" { - name = "tf-test-basic-network-attachment-%{random_suffix}" - region = "us-central1" - description = "basic network attachment description" - connection_preference = "ACCEPT_MANUAL" - - subnetworks = [ - google_compute_subnetwork.net1.self_link - ] - - producer_accept_lists = [ - google_project.accepted_producer_project1.project_id, - google_project.accepted_producer_project2.project_id - ] - - producer_reject_lists = [ - google_project.rejected_producer_project1.project_id, - google_project.rejected_producer_project2.project_id - ] -} - -resource "google_compute_network" "default" { - name = "tf-test-basic-network-%{random_suffix}" - auto_create_subnetworks = false -} - -resource "google_compute_subnetwork" "net1" { - name = "tf-test-basic-subnetwork1-%{random_suffix}" - region = "us-central1" - - network = google_compute_network.default.id - ip_cidr_range = "10.0.0.0/16" -} - -resource "google_compute_subnetwork" "net2" { - name = "tf-test-basic-subnetwork2-%{random_suffix}" - region = "us-central1" - - network = google_compute_network.default.id - ip_cidr_range = "10.1.0.0/16" -} - -resource "google_project" "rejected_producer_project1" { - project_id = "tf-test-prj-reject1-%{random_suffix}" - name = "tf-test-prj-reject1-%{random_suffix}" - org_id = "%{org_id}" - billing_account = "%{billing_account}" - deletion_policy = "DELETE" -} - -resource "google_project" "rejected_producer_project2" { - project_id = "tf-test-prj-reject2-%{random_suffix}" - name = "tf-test-prj-reject2-%{random_suffix}" - org_id = "%{org_id}" - billing_account = "%{billing_account}" - deletion_policy = "DELETE" -} - -resource "google_project" "accepted_producer_project1" { - project_id = "tf-test-prj-accept1-%{random_suffix}" - name = "tf-test-prj-accept1-%{random_suffix}" - org_id = "%{org_id}" - billing_account = "%{billing_account}" - deletion_policy = "DELETE" -} - -resource "google_project" "accepted_producer_project2" { - project_id = "tf-test-prj-accept2-%{random_suffix}" - name = "tf-test-prj-accept2-%{random_suffix}" - org_id = "%{org_id}" - billing_account = "%{billing_account}" - deletion_policy = "DELETE" -} - -data "google_compute_network_attachment" "default" { - name = google_compute_network_attachment.default.name - region = google_compute_network_attachment.default.region - project = google_compute_network_attachment.default.project - depends_on = [ - google_compute_network_attachment.default, - google_compute_network.default, - google_compute_subnetwork.net1, - google_compute_subnetwork.net2, - google_project.accepted_producer_project1, - google_project.accepted_producer_project2, - google_project.rejected_producer_project1, - google_project.rejected_producer_project2, - ] -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/compute/data_source_google_compute_region_instance_group_test.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_region_instance_group_test.go.tmpl similarity index 97% rename from mmv1/third_party/terraform/services/compute/data_source_google_compute_region_instance_group_test.go rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_region_instance_group_test.go.tmpl index 24bbde48d011..0f30c8d653fe 100644 --- a/mmv1/third_party/terraform/services/compute/data_source_google_compute_region_instance_group_test.go +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_region_instance_group_test.go.tmpl @@ -2,10 +2,10 @@ package compute_test import ( "fmt" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" - "testing" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceRegionInstanceGroup(t *testing.T) { @@ -14,7 +14,7 @@ func TestAccDataSourceRegionInstanceGroup(t *testing.T) { t.Parallel() name := "tf-test-" + acctest.RandString(t, 6) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { diff --git a/mmv1/third_party/terraform/services/compute/data_source_google_compute_resource_policy.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_resource_policy.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/compute/data_source_google_compute_resource_policy.go rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_resource_policy.go.tmpl index bfd165e60c5c..c86f5215b5a4 100644 --- a/mmv1/third_party/terraform/services/compute/data_source_google_compute_resource_policy.go +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_resource_policy.go.tmpl @@ -3,9 +3,9 @@ package compute import ( "fmt" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) func DataSourceGoogleComputeResourcePolicy() *schema.Resource { diff --git a/mmv1/third_party/terraform/services/compute/data_source_google_compute_subnetworks.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_subnetworks.go index 1c4444fdb391..117928d98c5c 100644 --- a/mmv1/third_party/terraform/services/compute/data_source_google_compute_subnetworks.go +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_subnetworks.go @@ -52,12 +52,6 @@ func DataSourceGoogleComputeSubnetworks() *schema.Resource { "network_self_link": { Type: schema.TypeString, Computed: true, - // TODO: remove in next major release (7.0.0) also from docs and implementation below - Deprecated: "Use `network_name` instead. This field will be removed in a future major release.", - }, - "network_name": { - Type: schema.TypeString, - Computed: true, }, "private_ip_google_access": { Type: schema.TypeBool, @@ -107,7 +101,6 @@ func dataSourceGoogleComputeSubnetworksRead(d *schema.ResourceData, meta interfa "name": subnet.Name, "network_self_link": filepath.Base(subnet.Network), "network": subnet.Network, - "network_name": filepath.Base(subnet.Network), "private_ip_google_access": subnet.PrivateIpGoogleAccess, "self_link": subnet.SelfLink, }) diff --git a/mmv1/third_party/terraform/services/compute/fw_data_source_google_compute_network.go.tmpl b/mmv1/third_party/terraform/services/compute/fw_data_source_google_compute_network.go.tmpl deleted file mode 100644 index b08deb9e3c65..000000000000 --- a/mmv1/third_party/terraform/services/compute/fw_data_source_google_compute_network.go.tmpl +++ /dev/null @@ -1,187 +0,0 @@ -package compute - -import ( - "context" - "fmt" - "strconv" - - "github.com/hashicorp/terraform-plugin-framework/datasource" - "github.com/hashicorp/terraform-plugin-framework/datasource/schema" - "github.com/hashicorp/terraform-plugin-framework/types" - "github.com/hashicorp/terraform-plugin-log/tflog" - {{ if eq $.TargetVersionName `ga` }} - "google.golang.org/api/compute/v1" - {{- else }} - compute "google.golang.org/api/compute/v0.beta" - {{- end }} - - "github.com/hashicorp/terraform-provider-google/google/fwmodels" - "github.com/hashicorp/terraform-provider-google/google/fwresource" - "github.com/hashicorp/terraform-provider-google/google/fwtransport" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -// Ensure the implementation satisfies the expected interfaces. -var ( - _ datasource.DataSource = &ComputeNetworkFWDataSource{} - _ datasource.DataSourceWithConfigure = &ComputeNetworkFWDataSource{} -) - -// NewComputeNetworkFWDataSource is a helper function to simplify the provider implementation. -func NewComputeNetworkFWDataSource() datasource.DataSource { - return &ComputeNetworkFWDataSource{} -} - -// ComputeNetworkFWDataSource is the data source implementation. -type ComputeNetworkFWDataSource struct { - client *compute.Service - providerConfig *transport_tpg.Config -} - -type ComputeNetworkModel struct { - Id types.String `tfsdk:"id"` - Project types.String `tfsdk:"project"` - Name types.String `tfsdk:"name"` - Description types.String `tfsdk:"description"` - NetworkId types.Int64 `tfsdk:"network_id"` - NumericId types.String `tfsdk:"numeric_id"` - GatewayIpv4 types.String `tfsdk:"gateway_ipv4"` - InternalIpv6Range types.String `tfsdk:"internal_ipv6_range"` - SelfLink types.String `tfsdk:"self_link"` - // NetworkProfile types.String `tfsdk:"network_profile"` - // SubnetworksSelfLinks types.List `tfsdk:"subnetworks_self_links"` -} - -// Metadata returns the data source type name. -func (d *ComputeNetworkFWDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { - resp.TypeName = req.ProviderTypeName + "_fw_compute_network" -} - -func (d *ComputeNetworkFWDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { - // Prevent panic if the provider has not been configured. - if req.ProviderData == nil { - return - } - - p, ok := req.ProviderData.(*transport_tpg.Config) - if !ok { - resp.Diagnostics.AddError( - "Unexpected Data Source Configure Type", - fmt.Sprintf("Expected *transport_tpg.Config, got: %T. Please report this issue to the provider developers.", req.ProviderData), - ) - return - } - - d.client = p.NewComputeClient(p.UserAgent) - if resp.Diagnostics.HasError() { - return - } - d.providerConfig = p -} - -// Schema defines the schema for the data source. -func (d *ComputeNetworkFWDataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { - resp.Schema = schema.Schema{ - MarkdownDescription: "A data source to get network details.", - - Attributes: map[string]schema.Attribute{ - "project": schema.StringAttribute{ - Description: `The project name.`, - MarkdownDescription: `The project name.`, - Optional: true, - }, - "name": schema.StringAttribute{ - Description: `The name of the Compute network.`, - MarkdownDescription: `The name of the Compute network.`, - Required: true, - }, - "description": schema.StringAttribute{ - Description: `The description of the network.`, - MarkdownDescription: `The description of the network.`, - Computed: true, - }, - "network_id": schema.Int64Attribute{ - Description: `The network ID.`, - MarkdownDescription: `The network ID.`, - Computed: true, - }, - "numeric_id": schema.StringAttribute{ - Description: `The numeric ID of the network. Deprecated in favor of network_id.`, - MarkdownDescription: `The numeric ID of the network. Deprecated in favor of network_id.`, - Computed: true, - DeprecationMessage: "`numeric_id` is deprecated and will be removed in a future major release. Use `network_id` instead.", - }, - "gateway_ipv4": schema.StringAttribute{ - Description: `The gateway address for default routing out of the network.`, - MarkdownDescription: `The gateway address for default routing out of the network.`, - Computed: true, - }, - "internal_ipv6_range": schema.StringAttribute{ - Description: `The internal ipv6 address range of the network.`, - MarkdownDescription: `The internal ipv6 address range of the network.`, - Computed: true, - }, - "self_link": schema.StringAttribute{ - Description: `The network self link.`, - MarkdownDescription: `The network self link.`, - Computed: true, - }, - // This is included for backwards compatibility with the original, SDK-implemented data source. - "id": schema.StringAttribute{ - Description: "Project identifier", - MarkdownDescription: "Project identifier", - Computed: true, - }, - }, - } -} - -// Read refreshes the Terraform state with the latest data. -func (d *ComputeNetworkFWDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { - var data ComputeNetworkModel - var metaData *fwmodels.ProviderMetaModel - - // Read Provider meta into the meta model - resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) - if resp.Diagnostics.HasError() { - return - } - - // Read Terraform configuration data into the model - resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) - if resp.Diagnostics.HasError() { - return - } - - // Use provider_meta to set User-Agent - d.client.UserAgent = fwtransport.GenerateFrameworkUserAgentString(metaData, d.client.UserAgent) - - project := fwresource.GetProjectFramework(data.Project, types.StringValue(d.providerConfig.Project), &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } - - // GET Request - clientResp, err := d.client.Networks.Get(project.ValueString(), data.Name.ValueString()).Do() - if err != nil { - fwtransport.HandleNotFoundError(ctx, err, &resp.State, fmt.Sprintf("dataSourceComputeNetwork %q", data.Name.ValueString()), &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } - } - - tflog.Trace(ctx, "read compute network data source") - - // Put data in model - id := fmt.Sprintf("projects/%s/global/networks/%s", project.ValueString(), clientResp.Name) - data.Id = types.StringValue(id) - data.Description = types.StringValue(clientResp.Description) - data.NetworkId = types.Int64Value(int64(clientResp.Id)) - data.NumericId = types.StringValue(strconv.Itoa(int(clientResp.Id))) - data.GatewayIpv4 = types.StringValue(clientResp.GatewayIPv4) - data.InternalIpv6Range = types.StringValue(clientResp.InternalIpv6Range) - data.SelfLink = types.StringValue(clientResp.SelfLink) - - // Save data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) -} diff --git a/mmv1/third_party/terraform/services/compute/fw_data_source_google_compute_network_test.go b/mmv1/third_party/terraform/services/compute/fw_data_source_google_compute_network_test.go deleted file mode 100644 index ae82326c793d..000000000000 --- a/mmv1/third_party/terraform/services/compute/fw_data_source_google_compute_network_test.go +++ /dev/null @@ -1,86 +0,0 @@ -package compute_test - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/terraform" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" -) - -func TestAccDataSourceGoogleFWNetwork(t *testing.T) { - t.Parallel() - - networkName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccDataSourceGoogleNetworkFWConfig(networkName), - Check: resource.ComposeTestCheckFunc( - testAccDataSourceGoogleFWNetworkCheck("data.google_fw_compute_network.my_network", "google_compute_network.foobar"), - ), - }, - }, - }) -} - -func testAccDataSourceGoogleFWNetworkCheck(data_source_name string, resource_name string) resource.TestCheckFunc { - return func(s *terraform.State) error { - ds, ok := s.RootModule().Resources[data_source_name] - if !ok { - return fmt.Errorf("root module has no resource called %s", data_source_name) - } - - rs, ok := s.RootModule().Resources[resource_name] - if !ok { - return fmt.Errorf("can't find %s in state", resource_name) - } - - ds_attr := ds.Primary.Attributes - rs_attr := rs.Primary.Attributes - network_attrs_to_test := []string{ - "id", - "name", - "network_id", - "numeric_id", - "description", - "internal_ipv6_range", - } - - for _, attr_to_check := range network_attrs_to_test { - if ds_attr[attr_to_check] != rs_attr[attr_to_check] { - return fmt.Errorf( - "%s is %s; want %s", - attr_to_check, - ds_attr[attr_to_check], - rs_attr[attr_to_check], - ) - } - } - - if !tpgresource.CompareSelfLinkOrResourceName("", ds_attr["self_link"], rs_attr["self_link"], nil) && ds_attr["self_link"] != rs_attr["self_link"] { - return fmt.Errorf("self link does not match: %s vs %s", ds_attr["self_link"], rs_attr["self_link"]) - } - - return nil - } -} - -func testAccDataSourceGoogleNetworkFWConfig(name string) string { - return fmt.Sprintf(` -resource "google_compute_network" "foobar" { - name = "%s" - description = "my-description" - enable_ula_internal_ipv6 = true - auto_create_subnetworks = false -} - -data "google_fw_compute_network" "my_network" { - name = google_compute_network.foobar.name -} -`, name) -} diff --git a/mmv1/third_party/terraform/services/compute/image.go b/mmv1/third_party/terraform/services/compute/image.go index 7b020823db98..e35942046e24 100644 --- a/mmv1/third_party/terraform/services/compute/image.go +++ b/mmv1/third_party/terraform/services/compute/image.go @@ -13,9 +13,8 @@ import ( ) const ( - resolveImageFamilyRegex = "[-_a-zA-Z0-9]*" - resolveImageImageRegex = "[-_a-zA-Z0-9]*" - resolveImageUniverseRegex = "[-_a-zA-Z0-9.]*" + resolveImageFamilyRegex = "[-_a-zA-Z0-9]*" + resolveImageImageRegex = "[-_a-zA-Z0-9]*" ) var ( @@ -29,7 +28,6 @@ var ( resolveImageFamily = regexp.MustCompile(fmt.Sprintf("^(%s)$", resolveImageFamilyRegex)) resolveImageImage = regexp.MustCompile(fmt.Sprintf("^(%s)$", resolveImageImageRegex)) resolveImageLink = regexp.MustCompile(fmt.Sprintf("^https://www.googleapis.com/compute/[a-z0-9]+/projects/(%s)/global/images/(%s)", verify.ProjectRegex, resolveImageImageRegex)) - resolveImageUniverseLink = regexp.MustCompile(fmt.Sprintf("^https://compute.%s/compute/[a-z0-9]+/projects/(%s)/global/images/(%s)", resolveImageUniverseRegex, verify.ProjectRegex, resolveImageImageRegex)) windowsSqlImage = regexp.MustCompile("^sql-(?:server-)?([0-9]{4})-([a-z]+)-windows-(?:server-)?([0-9]{4})(?:-r([0-9]+))?-dc-v[0-9]+$") canonicalUbuntuLtsImage = regexp.MustCompile("^ubuntu-(minimal-)?([0-9]+)(?:.*(arm64|amd64))?.*$") @@ -111,8 +109,6 @@ func ResolveImage(c *transport_tpg.Config, project, name, userAgent string) (str switch { case resolveImageLink.MatchString(name): // https://www.googleapis.com/compute/v1/projects/xyz/global/images/xyz return name, nil - case resolveImageUniverseLink.MatchString(name): // https://compute.xyz/compute/[a-z0-9]+/projects/xyz/global/images/xyz - return name, nil case resolveImageProjectImage.MatchString(name): // projects/xyz/global/images/xyz res := resolveImageProjectImage.FindStringSubmatch(name) if err := sanityTestRegexMatches(2, res, "project image", name); err != nil { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_backend_bucket_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_backend_bucket_test.go index dd2ff2fea9f3..f515811428fd 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_backend_bucket_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_backend_bucket_test.go @@ -6,7 +6,6 @@ import ( "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" ) func TestAccComputeBackendBucket_basicModified(t *testing.T) { @@ -206,35 +205,6 @@ func TestAccComputeBackendBucket_withCdnCacheMode_update(t *testing.T) { }) } -func TestAccComputeBackendBucket_withTags(t *testing.T) { - t.Parallel() - - org := envvar.GetTestOrgFromEnv(t) - - backendName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - storageName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - tagKeyResult := acctest.BootstrapSharedTestTagKeyDetails(t, "crm-bb-tagkey", "organizations/"+org, make(map[string]interface{})) - sharedTagkey, _ := tagKeyResult["shared_tag_key"] - tagValueResult := acctest.BootstrapSharedTestTagValueDetails(t, "crm-bb-tagvalue", sharedTagkey, org) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeBackendBucketDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeBackendBucket_withTags(backendName, storageName, tagKeyResult["name"], tagValueResult["name"]), - }, - { - ResourceName: "google_compute_backend_bucket.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"params"}, - }, - }, - }) -} - func testAccComputeBackendBucket_basic(backendName, storageName string) string { return fmt.Sprintf(` resource "google_compute_backend_bucket" "foobar" { @@ -444,22 +414,3 @@ resource "google_storage_bucket" "bucket" { } `, backendName, default_ttl, storageName) } - -func testAccComputeBackendBucket_withTags(backendName, storageName string, tagKey string, tagValue string) string { - return fmt.Sprintf(` -resource "google_compute_backend_bucket" "foobar" { - name = "%s" - bucket_name = google_storage_bucket.bucket_one.name - params { - resource_manager_tags = { - "%s" = "%s" - } - } -} - -resource "google_storage_bucket" "bucket_one" { - name = "%s" - location = "EU" -} -`, backendName, tagKey, tagValue, storageName) -} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_backend_service_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_backend_service_test.go.tmpl index 5bb447cf1b70..c7a428667fc3 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_backend_service_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_backend_service_test.go.tmpl @@ -2,15 +2,10 @@ package compute_test import ( "fmt" -{{ if ne $.TargetVersionName `ga` -}} - "regexp" -{{- end }} "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" ) func TestAccComputeBackendService_basic(t *testing.T) { @@ -151,18 +146,17 @@ func TestAccComputeBackendService_withBackendAndIAP(t *testing.T) { }) } -func TestAccComputeBackendService_withBackendAndPreferenceInternalManaged(t *testing.T) { +func TestAccComputeBackendService_withBackendAndPreference(t *testing.T) { t.Parallel() - im_suffix := fmt.Sprintf("im-%s", acctest.RandString(t, 10)) - + randomSuffix := acctest.RandString(t, 10) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccCheckComputeBackendServiceDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComputeBackendService_withBackendAndPreference(im_suffix, "INTERNAL_MANAGED", "DEFAULT", 10), + Config: testAccComputeBackendService_withBackendAndPreference(randomSuffix, "INTERNAL_MANAGED", "DEFAULT", 10), }, { ResourceName: "google_compute_backend_service.lipsum", @@ -170,29 +164,15 @@ func TestAccComputeBackendService_withBackendAndPreferenceInternalManaged(t *tes ImportStateVerify: true, }, { - Config: testAccComputeBackendService_withBackendAndPreference(im_suffix, "INTERNAL_MANAGED", "PREFERRED", 20), + Config: testAccComputeBackendService_withBackendAndPreference(randomSuffix, "INTERNAL_MANAGED", "PREFERRED", 20), }, { ResourceName: "google_compute_backend_service.lipsum", ImportState: true, ImportStateVerify: true, }, - }, - }) -} - -func TestAccComputeBackendService_withBackendAndPreferenceInternalSelfManaged(t *testing.T) { - t.Parallel() - - ism_suffix := fmt.Sprintf("ism-%s", acctest.RandString(t, 10)) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeBackendServiceDestroyProducer(t), - Steps: []resource.TestStep{ { - Config: testAccComputeBackendService_withBackendAndPreference(ism_suffix, "INTERNAL_SELF_MANAGED", "DEFAULT", 10), + Config: testAccComputeBackendService_withBackendAndPreference(randomSuffix, "INTERNAL_SELF_MANAGED", "DEFAULT", 10), }, { ResourceName: "google_compute_backend_service.lipsum", @@ -200,28 +180,15 @@ func TestAccComputeBackendService_withBackendAndPreferenceInternalSelfManaged(t ImportStateVerify: true, }, { - Config: testAccComputeBackendService_withBackendAndPreference(ism_suffix, "INTERNAL_SELF_MANAGED", "PREFERRED", 20), + Config: testAccComputeBackendService_withBackendAndPreference(randomSuffix, "INTERNAL_SELF_MANAGED", "PREFERRED", 20), }, { ResourceName: "google_compute_backend_service.lipsum", ImportState: true, ImportStateVerify: true, }, - }, - }) -} - -func TestAccComputeBackendService_withBackendAndPreferenceExternalManaged(t *testing.T) { - t.Parallel() - em_suffix := fmt.Sprintf("em-%s", acctest.RandString(t, 10)) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeBackendServiceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeBackendService_withBackendAndPreference(em_suffix, "EXTERNAL_MANAGED", "DEFAULT", 10), + { + Config: testAccComputeBackendService_withBackendAndPreference(randomSuffix, "EXTERNAL_MANAGED", "DEFAULT", 10), }, { ResourceName: "google_compute_backend_service.lipsum", @@ -229,7 +196,7 @@ func TestAccComputeBackendService_withBackendAndPreferenceExternalManaged(t *tes ImportStateVerify: true, }, { - Config: testAccComputeBackendService_withBackendAndPreference(em_suffix, "EXTERNAL_MANAGED", "PREFERRED", 20), + Config: testAccComputeBackendService_withBackendAndPreference(randomSuffix, "EXTERNAL_MANAGED", "PREFERRED", 20), }, { ResourceName: "google_compute_backend_service.lipsum", @@ -822,23 +789,8 @@ func TestAccComputeBackendService_withLogConfig(t *testing.T) { ImportState: true, ImportStateVerify: true, }, - }, - }) -} - -func TestAccComputeBackendService_withLogConfigMode(t *testing.T) { - t.Parallel() - - serviceName := fmt.Sprintf("tf-test-lc-%s", acctest.RandString(t, 10)) - checkName := fmt.Sprintf("tf-test-lc-%s", acctest.RandString(t, 10)) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeBackendServiceDestroyProducer(t), - Steps: []resource.TestStep{ { - Config: testAccComputeBackendService_withLogConfigMode(serviceName, checkName, "INCLUDE_ALL_OPTIONAL", true), + Config: testAccComputeBackendService_withLogConfig3(serviceName, checkName, "INCLUDE_ALL_OPTIONAL", true), }, { ResourceName: "google_compute_backend_service.foobar", @@ -846,7 +798,7 @@ func TestAccComputeBackendService_withLogConfigMode(t *testing.T) { ImportStateVerify: true, }, { - Config: testAccComputeBackendService_withLogConfigMode(serviceName, checkName, "EXCLUDE_ALL_OPTIONAL", true), + Config: testAccComputeBackendService_withLogConfig3(serviceName, checkName, "EXCLUDE_ALL_OPTIONAL", true), }, { ResourceName: "google_compute_backend_service.foobar", @@ -1120,79 +1072,6 @@ func TestAccComputeBackendService_backendServiceMaxDuration(t *testing.T) { }) } -{{ if ne $.TargetVersionName `ga` -}} -func TestAccComputeBackendService_withNetworkPassThroughLbTrafficPolicy(t *testing.T) { - t.Parallel() - - namePrefix := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), - CheckDestroy: testAccCheckComputeBackendServiceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeBackendService_withNetworkPassThroughLbTrafficPolicy(namePrefix, "ZONAL_AFFINITY_DISABLED", 0.5), - }, - { - ResourceName: "google_compute_backend_service.nptlbtp", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccComputeBackendService_withNetworkPassThroughLbTrafficPolicy(namePrefix, "ZONAL_AFFINITY_SPILL_CROSS_ZONE", 0.6), - }, - { - ResourceName: "google_compute_backend_service.nptlbtp", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccComputeBackendService_withNetworkPassThroughLbTrafficPolicy(namePrefix, "ZONAL_AFFINITY_STAY_WITHIN_ZONE", 0.2), - }, - { - ResourceName: "google_compute_backend_service.nptlbtp", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccComputeBackendService_withNetworkPassThroughLbTrafficPolicy(namePrefix, "ZONAL_AFFINITY_STAY_WITHIN_ZONE", 1.001), - ExpectError: regexp.MustCompile("Must be less than or equal to 1.0"), - }, - }, - }) -} -{{- end }} - -func TestAccComputeBackendService_resourceManagerTags(t *testing.T) { - t.Parallel() - - org := envvar.GetTestOrgFromEnv(t) - - serviceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - checkName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - tagKeyResult := acctest.BootstrapSharedTestTagKeyDetails(t, "crm-bs-tagkey", "organizations/"+org, make(map[string]interface{})) - sharedTagkey,_ := tagKeyResult["shared_tag_key"] - tagValueResult := acctest.BootstrapSharedTestTagValueDetails(t, "crm-bs-tagvalue", sharedTagkey, org) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeBackendServiceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeBackendService_withTags(serviceName, checkName, tagKeyResult["name"], tagValueResult["name"]), - }, - { - ResourceName: "google_compute_backend_service.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"params"}, - }, - }, - }) -} - func testAccComputeBackendService_trafficDirectorBasic(serviceName, checkName string) string { return fmt.Sprintf(` resource "google_compute_backend_service" "foobar" { @@ -2353,7 +2232,7 @@ resource "google_compute_http_health_check" "zero" { `, serviceName, enabled, checkName) } -func testAccComputeBackendService_withLogConfigMode(serviceName, checkName, mode string, enabled bool) string { +func testAccComputeBackendService_withLogConfig3(serviceName, checkName, mode string, enabled bool) string { return fmt.Sprintf(` resource "google_compute_backend_service" "foobar" { name = "%s" @@ -2807,219 +2686,3 @@ resource "google_compute_health_check" "health_check" { } `, suffix, timeout, loadBalancingScheme, preference, suffix, suffix, suffix) } - -func TestAccComputeBackendService_updateCanaryMigration(t *testing.T) { - t.Parallel() - - serviceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - checkName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - ExternalProviders: map[string]resource.ExternalProvider{ - "time": {}, - }, - CheckDestroy: testAccCheckComputeBackendServiceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeBackendService_basic(serviceName, checkName), - }, - { - ResourceName: "google_compute_backend_service.foobar", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccComputeBackendService_withCanaryMigration( - serviceName, checkName, "updated-to-prepare", "PREPARE"), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_compute_backend_service.foobar", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_compute_backend_service.foobar", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccComputeBackendService_withCanaryMigrationPercentage( - serviceName, checkName, "updated-to-percentage", 50), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_compute_backend_service.foobar", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_compute_backend_service.foobar", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccComputeBackendService_withCanaryMigration( - serviceName, checkName, "update-to-all", "TEST_ALL_TRAFFIC"), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_compute_backend_service.foobar", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_compute_backend_service.foobar", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func testAccComputeBackendService_withCanaryMigration(serviceName, checkName, description, migrationState string) string { - return fmt.Sprintf(` -resource "google_compute_backend_service" "foobar" { - name = "%s" - description = "%s" - health_checks = [google_compute_http_health_check.zero.self_link] - external_managed_migration_state = "%s" -} - -resource "google_compute_http_health_check" "zero" { - name = "%s" - request_path = "/" - check_interval_sec = 1 - timeout_sec = 1 -} -`, serviceName, description, migrationState, checkName) -} - -func testAccComputeBackendService_withCanaryMigrationWithWait(serviceName, checkName, description, migrationState string) string { - return fmt.Sprintf(` -resource "time_sleep" "six_minutes_delay" { - create_duration = "370s" # litte more than 6 minutes (360 seconds = 6 minutes) -} - -resource "google_compute_backend_service" "foobar" { - name = "%s" - description = "%s" - health_checks = [google_compute_http_health_check.zero.self_link] - external_managed_migration_state = "%s" - depends_on = [ - time_sleep.six_minutes_delay - ] -} - -resource "google_compute_http_health_check" "zero" { - name = "%s" - request_path = "/" - check_interval_sec = 1 - timeout_sec = 1 -} -`, serviceName, description, migrationState, checkName) -} - -func testAccComputeBackendService_withCanaryMigrationPercentage(serviceName, checkName, description string, percentage int64) string { - return fmt.Sprintf(` -resource "time_sleep" "six_minutes_delay" { - create_duration = "370s" # litte more than 6 minutes (360 seconds = 6 minutes) -} - -resource "google_compute_backend_service" "foobar" { - name = "%s" - description = "%s" - health_checks = [google_compute_http_health_check.zero.self_link] - external_managed_migration_state = "TEST_BY_PERCENTAGE" - external_managed_migration_testing_percentage = %d - depends_on = [ - time_sleep.six_minutes_delay - ] -} - -resource "google_compute_http_health_check" "zero" { - name = "%s" - request_path = "/" - check_interval_sec = 1 - timeout_sec = 1 -} -`, serviceName, description, percentage, checkName) -} - -{{ if ne $.TargetVersionName `ga` -}} -func testAccComputeBackendService_withNetworkPassThroughLbTrafficPolicy(namePrefix, spillover string, ratio float64) string { - return fmt.Sprintf(` -resource "google_compute_backend_service" "nptlbtp" { - provider = google-beta - name = "%s-backend" - description = "Hello World 1234" - protocol = "TCP" - health_checks = [google_compute_health_check.default.self_link] - - backend { - group = google_compute_network_endpoint_group.lb-neg.self_link - balancing_mode = "CONNECTION" - max_connections_per_endpoint = 1000 - } - - network_pass_through_lb_traffic_policy { - zonal_affinity { - spillover = "%s" - spillover_ratio = %f - } - } -} - -resource "google_compute_network_endpoint_group" "lb-neg" { - provider = google-beta - name = "%s-neg" - network = google_compute_network.default.self_link - subnetwork = google_compute_subnetwork.default.self_link - default_port = "90" - zone = "us-central1-a" -} - -resource "google_compute_network" "default" { - provider = google-beta - name = "%s-network" - auto_create_subnetworks = false -} - -resource "google_compute_subnetwork" "default" { - provider = google-beta - name = "%s-subnetwork" - ip_cidr_range = "10.0.0.0/16" - region = "us-central1" - network = google_compute_network.default.self_link -} - -resource "google_compute_health_check" "default" { - provider = google-beta - name = "%s-health-check" - tcp_health_check { - port = "110" - } -} -`, namePrefix, spillover, ratio, namePrefix, namePrefix, namePrefix, namePrefix) -} -{{- end }} - -func testAccComputeBackendService_withTags(serviceName, checkName string, tagKey string, tagValue string) string { - return fmt.Sprintf(` -resource "google_compute_backend_service" "foobar" { - name = "%s" - health_checks = [google_compute_http_health_check.zero.self_link] - params { - resource_manager_tags = { - "%s" = "%s" - } - } -} - -resource "google_compute_http_health_check" "zero" { - name = "%s" - request_path = "/" - check_interval_sec = 1 - timeout_sec = 1 -} -`, serviceName, tagKey, tagValue, checkName) -} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_disk_async_replication.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_disk_async_replication.go.tmpl index 930038af39ef..1daf655331c6 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_disk_async_replication.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_disk_async_replication.go.tmpl @@ -155,7 +155,7 @@ func resourceDiskAsyncReplicationCreate(d *schema.ResourceData, meta interface{} return retry.NonRetryableError(err) } if diskStatus.ResourceStatus == nil { - return retry.RetryableError(fmt.Errorf("no resource status for disk: %s", resourceId)) + return retry.NonRetryableError(fmt.Errorf("no resource status for disk: %s", resourceId)) } if secondaryState, ok := diskStatus.ResourceStatus.AsyncSecondaryDisks[secondaryDisk]; ok { if secondaryState.State != "ACTIVE" { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_disk_sweeper.go b/mmv1/third_party/terraform/services/compute/resource_compute_disk_sweeper.go index 838b0010bf8a..15971a309b88 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_disk_sweeper.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_disk_sweeper.go @@ -33,74 +33,64 @@ func testSweepDisk(region string) error { zones := []string{"us-central1-a", "us-central1-b", "us-central1-c", "us-central1-f", "us-east1-b", "us-east1-c", "us-east1-d", "us-west1-a", "us-west1-b", "us-west1-c"} for _, zone := range zones { servicesUrl := "https://compute.googleapis.com/compute/v1/projects/" + config.Project + "/zones/" + zone + "/disks" - // Page zero's URL is the raw list URL. Successive pages will return the token for the next page. - pageUrl := servicesUrl - for { + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: config.Project, + RawURL: servicesUrl, + UserAgent: config.UserAgent, + }) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] Error in response from request %s: %s", servicesUrl, err) + return nil + } - res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - Project: config.Project, - RawURL: pageUrl, - UserAgent: config.UserAgent, - }) - if err != nil { - log.Printf("[INFO][SWEEPER_LOG] Error in response from request %s: %s", pageUrl, err) - return nil - } + resourceList, ok := res["items"] + if !ok { + log.Printf("[INFO][SWEEPER_LOG] Nothing found in response.") + return nil + } - resourceList, ok := res["items"] - if !ok { - log.Printf("[INFO][SWEEPER_LOG] Nothing found in response.") + rl := resourceList.([]interface{}) + + log.Printf("[INFO][SWEEPER_LOG] Found %d items in %s list response.", len(rl), resourceName) + // Count items that weren't sweeped. + nonPrefixCount := 0 + for _, ri := range rl { + obj := ri.(map[string]interface{}) + if obj["id"] == nil { + log.Printf("[INFO][SWEEPER_LOG] %s resource id was nil", resourceName) return nil } - rl := resourceList.([]interface{}) - - log.Printf("[INFO][SWEEPER_LOG] Found %d items in %s list response.", len(rl), resourceName) - // Count items that weren't sweeped. - nonPrefixCount := 0 - for _, ri := range rl { - obj := ri.(map[string]interface{}) - if obj["id"] == nil { - log.Printf("[INFO][SWEEPER_LOG] %s resource id was nil", resourceName) - return nil - } - - id := obj["name"].(string) - // Increment count and skip if resource is not sweepable. - prefixes := []string{ - "pvc-", // b/291168201 - } - if !sweeper.IsSweepableTestResource(id) && !sweeper.HasAnyPrefix(id, prefixes) { - nonPrefixCount++ - continue - } - - deleteUrl := servicesUrl + "/" + id - // Don't wait on operations as we may have a lot to delete - _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "DELETE", - Project: config.Project, - RawURL: deleteUrl, - UserAgent: config.UserAgent, - }) - if err != nil { - log.Printf("[INFO][SWEEPER_LOG] Error deleting for url %s : %s", deleteUrl, err) - } else { - log.Printf("[INFO][SWEEPER_LOG] Sent delete request for %s resource: %s", resourceName, id) - } + id := obj["name"].(string) + // Increment count and skip if resource is not sweepable. + prefixes := []string{ + "pvc-", // b/291168201 } - - if nonPrefixCount > 0 { - log.Printf("[INFO][SWEEPER_LOG] %d items without tf-test prefix remain for zone %s", nonPrefixCount, zone) + if !sweeper.IsSweepableTestResource(id) && !sweeper.HasAnyPrefix(id, prefixes) { + nonPrefixCount++ + continue } - if res["nextPageToken"] == nil || res["nextPageToken"].(string) == "" { - break + deleteUrl := servicesUrl + "/" + id + // Don't wait on operations as we may have a lot to delete + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "DELETE", + Project: config.Project, + RawURL: deleteUrl, + UserAgent: config.UserAgent, + }) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] Error deleting for url %s : %s", deleteUrl, err) + } else { + log.Printf("[INFO][SWEEPER_LOG] Sent delete request for %s resource: %s", resourceName, id) } - pageUrl, err = transport_tpg.AddQueryParams(servicesUrl, map[string]string{"pageToken": res["nextPageToken"].(string)}) + } + + if nonPrefixCount > 0 { + log.Printf("[INFO][SWEEPER_LOG] %d items without tf-test prefix remain for zone %s", nonPrefixCount, zone) } } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_disk_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_disk_test.go.tmpl index 9fb63e9d07ef..d67dfa0bd1f2 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_disk_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_disk_test.go.tmpl @@ -2193,12 +2193,12 @@ func testAccComputeDisk_resourceManagerTags(context map[string]interface{}) stri return acctest.Nprintf(` resource "google_tags_tag_key" "tag_key" { parent = "projects/%{project_id}" - short_name = "test-%{random_suffix}" + short_name = "test" } resource "google_tags_tag_value" "tag_value" { parent = "tagKeys/${google_tags_tag_key.tag_key.name}" - short_name = "name-%{random_suffix}" + short_name = "name" } resource "google_compute_disk" "foobar" { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_rule_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_rule_test.go.tmpl similarity index 70% rename from mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_rule_test.go rename to mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_rule_test.go.tmpl index baf97eb84e78..3ee224c40dec 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_rule_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_rule_test.go.tmpl @@ -66,85 +66,85 @@ func TestAccComputeFirewallPolicyRule_update(t *testing.T) { } func TestAccComputeFirewallPolicyRule_multipleRules(t *testing.T) { - t.Parallel() + t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - "org_name": fmt.Sprintf("organizations/%s", envvar.GetTestOrgFromEnv(t)), - } + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "org_name": fmt.Sprintf("organizations/%s", envvar.GetTestOrgFromEnv(t)), + } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeFirewallPolicyRule_multiple(context), - }, - { - ResourceName: "google_compute_firewall_policy_rule.fw_policy_rule1", - ImportState: true, - ImportStateVerify: true, - // Referencing using ID causes import to fail - ImportStateVerifyIgnore: []string{"firewall_policy"}, - }, - { - ResourceName: "google_compute_firewall_policy_rule.fw_policy_rule2", - ImportState: true, - ImportStateVerify: true, - // Referencing using ID causes import to fail - ImportStateVerifyIgnore: []string{"firewall_policy"}, - }, - { - Config: testAccComputeFirewallPolicyRule_multipleAdd(context), - }, - { - ResourceName: "google_compute_firewall_policy_rule.fw_policy_rule3", - ImportState: true, - ImportStateVerify: true, - // Referencing using ID causes import to fail - ImportStateVerifyIgnore: []string{"firewall_policy"}, - }, - { - Config: testAccComputeFirewallPolicyRule_multipleRemove(context), - }, - }, - }) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeFirewallPolicyRule_multiple(context), + }, + { + ResourceName: "google_compute_firewall_policy_rule.fw_policy_rule1", + ImportState: true, + ImportStateVerify: true, + // Referencing using ID causes import to fail + ImportStateVerifyIgnore: []string{"firewall_policy"}, + }, + { + ResourceName: "google_compute_firewall_policy_rule.fw_policy_rule2", + ImportState: true, + ImportStateVerify: true, + // Referencing using ID causes import to fail + ImportStateVerifyIgnore: []string{"firewall_policy"}, + }, + { + Config: testAccComputeFirewallPolicyRule_multipleAdd(context), + }, + { + ResourceName: "google_compute_firewall_policy_rule.fw_policy_rule3", + ImportState: true, + ImportStateVerify: true, + // Referencing using ID causes import to fail + ImportStateVerifyIgnore: []string{"firewall_policy"}, + }, + { + Config: testAccComputeFirewallPolicyRule_multipleRemove(context), + }, + }, + }) } func TestAccComputeFirewallPolicyRule_securityProfileGroup_update(t *testing.T) { - t.Parallel() + t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - "org_name": fmt.Sprintf("organizations/%s", envvar.GetTestOrgFromEnv(t)), - } + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "org_name": fmt.Sprintf("organizations/%s", envvar.GetTestOrgFromEnv(t)), + } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeFirewallPolicyRule_securityProfileGroup_basic(context), - }, - { - ResourceName: "google_compute_firewall_policy_rule.fw_policy_rule1", - ImportState: true, - ImportStateVerify: true, - // Referencing using ID causes import to fail - ImportStateVerifyIgnore: []string{"firewall_policy"}, - }, - { - Config: testAccComputeFirewallPolicyRule_securityProfileGroup_update(context), - }, - { - ResourceName: "google_compute_firewall_policy_rule.fw_policy_rule1", - ImportState: true, - ImportStateVerify: true, - // Referencing using ID causes import to fail - ImportStateVerifyIgnore: []string{"firewall_policy", "target_resources"}, - }, - }, - }) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeFirewallPolicyRule_securityProfileGroup_basic(context), + }, + { + ResourceName: "google_compute_firewall_policy_rule.fw_policy_rule1", + ImportState: true, + ImportStateVerify: true, + // Referencing using ID causes import to fail + ImportStateVerifyIgnore: []string{"firewall_policy"}, + }, + { + Config: testAccComputeFirewallPolicyRule_securityProfileGroup_update(context), + }, + { + ResourceName: "google_compute_firewall_policy_rule.fw_policy_rule1", + ImportState: true, + ImportStateVerify: true, + // Referencing using ID causes import to fail + ImportStateVerifyIgnore: []string{"firewall_policy", "target_resources"}, + }, + }, + }) } func TestAccComputeFirewallPolicyRule_basic(t *testing.T) { @@ -173,82 +173,6 @@ func TestAccComputeFirewallPolicyRule_basic(t *testing.T) { }) } -func TestAccComputeFirewallPolicyRule_disabled_enabled(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - "org_name": fmt.Sprintf("organizations/%s", envvar.GetTestOrgFromEnv(t)), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeFirewallPolicyRule_disabled(context, true), - }, - { - ResourceName: "google_compute_firewall_policy_rule.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"firewall_policy"}, - }, - { - Config: testAccComputeFirewallPolicyRule_disabled(context, false), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("google_compute_firewall_policy_rule.default", "disabled", "false"), - ), - }, - { - ResourceName: "google_compute_firewall_policy_rule.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"firewall_policy"}, - }, - }, - }) -} - -func TestAccComputeFirewallPolicyRule_secureTags(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "org_id": envvar.GetTestOrgFromEnv(t), - "org_name": fmt.Sprintf("organizations/%s", envvar.GetTestOrgFromEnv(t)), - "project_name": envvar.GetTestProjectFromEnv(), - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeFirewallPolicyRuleDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeFirewallPolicyRule_secureTags(context), - }, - { - ResourceName: "google_compute_firewall_policy_rule.fw_policy_rule", - ImportState: true, - ImportStateVerify: true, - // Referencing using ID causes import to fail - ImportStateVerifyIgnore: []string{"firewall_policy", "project"}, - }, - { - Config: testAccComputeFirewallPolicyRule_secureTagsUpdate(context), - }, - { - ResourceName: "google_compute_firewall_policy_rule.fw_policy_rule", - ImportState: true, - ImportStateVerify: true, - // Referencing using ID causes import to fail - ImportStateVerifyIgnore: []string{"firewall_policy", "project"}, - }, - }, - }) -} - func testAccComputeFirewallPolicyRule_basic(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_folder" "folder" { @@ -299,7 +223,7 @@ resource "google_compute_firewall_policy_rule" "fw_policy_rule" { } func testAccComputeFirewallPolicyRule_securityProfileGroup_basic(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_folder" "folder" { display_name = "tf-test-folder-%{random_suffix}" parent = "%{org_name}" @@ -349,7 +273,7 @@ resource "google_compute_firewall_policy_rule" "fw_policy_rule1" { } func testAccComputeFirewallPolicyRule_securityProfileGroup_update(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_folder" "folder" { display_name = "tf-test-folder-%{random_suffix}" parent = "%{org_name}" @@ -854,152 +778,3 @@ resource "google_compute_firewall_policy_rule" "fw_policy_rule3" { } `, context) } - -func testAccComputeFirewallPolicyRule_disabled(context map[string]interface{}, disabled bool) string { - context["disabled"] = fmt.Sprintf("%t", disabled) - return acctest.Nprintf(` -resource "google_folder" "default" { - display_name = "tf-test-folder-%{random_suffix}" - parent = "%{org_name}" - deletion_protection = false -} - -resource "google_compute_firewall_policy" "default" { - parent = google_folder.default.name - short_name = "tf-test-policy-%{random_suffix}" - description = "Resource created for Terraform acceptance testing" -} - -resource "google_compute_firewall_policy_rule" "default" { - firewall_policy = google_compute_firewall_policy.default.name - description = "Resource created for Terraform acceptance testing" - priority = 9000 - enable_logging = true - action = "allow" - direction = "EGRESS" - disabled = %{disabled} - - match { - dest_ip_ranges = ["35.235.240.0/20"] - - layer4_configs { - ip_protocol = "tcp" - ports = [22] - } - } -} -`, context) -} - -func testAccComputeFirewallPolicyRule_secureTags(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_folder" "folder" { - display_name = "tf-test-folder-%{random_suffix}" - parent = "%{org_name}" - deletion_protection = false -} - -resource "google_compute_firewall_policy" "fw_policy" { - parent = google_folder.folder.name - short_name = "tf-test-policy-%{random_suffix}" - description = "Resource created for Terraform acceptance testing" -} - -resource "google_compute_firewall_policy_rule" "fw_policy_rule" { - firewall_policy = google_compute_firewall_policy.fw_policy.id - description = "Resource created for Terraform acceptance testing" - priority = 9000 - enable_logging = true - action = "allow" - direction = "INGRESS" - disabled = false - tls_inspect = false - - match { - src_ip_ranges = ["11.100.0.1/32"] - - src_secure_tags { - name = google_tags_tag_value.basic_value.id - } - - layer4_configs { - ip_protocol = "tcp" - ports = [8080] - } - } -} - -resource "google_tags_tag_key" "basic_key" { - description = "For keyname resources." - parent = "organizations/%{org_id}" - purpose = "GCE_FIREWALL" - short_name = "tf-test-tagkey-%{random_suffix}" - - purpose_data = { - organization = "auto" - } -} - -resource "google_tags_tag_value" "basic_value" { - description = "For valuename resources." - parent = google_tags_tag_key.basic_key.id - short_name = "tf-test-tagvalue-%{random_suffix}" -} -`, context) -} - -func testAccComputeFirewallPolicyRule_secureTagsUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_folder" "folder" { - display_name = "tf-test-folder-%{random_suffix}" - parent = "%{org_name}" - deletion_protection = false -} - -resource "google_compute_firewall_policy" "fw_policy" { - parent = google_folder.folder.name - short_name = "tf-test-policy-%{random_suffix}" - description = "Resource created for Terraform acceptance testing" -} - -resource "google_compute_firewall_policy_rule" "fw_policy_rule" { - firewall_policy = google_compute_firewall_policy.fw_policy.id - description = "Resource created for Terraform acceptance testing" - priority = 9000 - enable_logging = true - action = "allow" - direction = "INGRESS" - disabled = false - tls_inspect = false - - target_secure_tags { - name = google_tags_tag_value.basic_value.id - } - - match { - src_ip_ranges = ["11.100.0.1/32"] - - layer4_configs { - ip_protocol = "tcp" - ports = [8080] - } - } -} - -resource "google_tags_tag_key" "basic_key" { - description = "For keyname resources." - parent = "organizations/%{org_id}" - purpose = "GCE_FIREWALL" - short_name = "tf-test-tagkey-%{random_suffix}" - purpose_data = { - organization = "auto" - } -} - -resource "google_tags_tag_value" "basic_value" { - description = "For valuename resources." - parent = google_tags_tag_key.basic_key.id - short_name = "tf-test-tagvalue-%{random_suffix}" -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_test.go.tmpl similarity index 98% rename from mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_test.go rename to mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_test.go.tmpl index 76ccb0720cbf..5d45e5016768 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_test.go.tmpl @@ -2,9 +2,9 @@ package compute_test import ( "fmt" + "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" - "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) @@ -50,7 +50,7 @@ func TestAccComputeFirewallPolicy_update(t *testing.T) { } func testAccComputeFirewallPolicy_basic(org, policyName, folderName string) string { - return fmt.Sprintf(` + return fmt.Sprintf(` resource "google_folder" "folder" { display_name = "%s" parent = "%s" @@ -66,7 +66,7 @@ resource "google_compute_firewall_policy" "default" { } func testAccComputeFirewallPolicy_update(org, policyName, folderName string) string { - return fmt.Sprintf(` + return fmt.Sprintf(` resource "google_folder" "folder" { display_name = "%s" parent = "%s" diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_with_rules_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_with_rules_test.go.tmpl similarity index 52% rename from mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_with_rules_test.go rename to mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_with_rules_test.go.tmpl index 7423fee3b20f..b7d42e56ca08 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_with_rules_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_firewall_policy_with_rules_test.go.tmpl @@ -1,5 +1,5 @@ package compute_test - +{{- if ne $.TargetVersionName "ga" }} import ( "testing" @@ -19,7 +19,7 @@ func TestAccComputeFirewallPolicyWithRules_update(t *testing.T) { acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), CheckDestroy: testAccCheckComputeFirewallPolicyWithRulesDestroyProducer(t), Steps: []resource.TestStep{ { @@ -45,99 +45,73 @@ func TestAccComputeFirewallPolicyWithRules_update(t *testing.T) { func testAccComputeFirewallPolicyWithRules_full(context map[string]interface{}) string { return acctest.Nprintf(` data "google_project" "project" { + provider = google-beta } resource "google_compute_firewall_policy_with_rules" "firewall-policy-with-rules" { - short_name = "tf-test-tf-fw-org-policy-with-rules%{random_suffix}" + short_name = "tf-test-tf-fw-org-policy-with-rules%{random_suffix}" description = "Terraform test" - parent = "organizations/%{org_id}" + parent = "organizations/%{org_id}" + provider = google-beta rule { - description = "tcp rule" - priority = 1000 - enable_logging = true - action = "allow" - direction = "EGRESS" - target_resources = [google_compute_network.network.self_link] - + description = "tcp rule" + priority = 1000 + enable_logging = true + action = "allow" + direction = "EGRESS" match { - dest_ip_ranges = ["11.100.0.1/32"] - dest_fqdns = ["www.yyy.com", "www.zzz.com"] - dest_region_codes = ["HK", "IN"] - dest_threat_intelligences = ["iplist-search-engines-crawlers", "iplist-tor-exit-nodes"] - dest_address_groups = [google_network_security_address_group.address_group_1.id] - layer4_config { ip_protocol = "tcp" ports = [8080, 7070] } + dest_ip_ranges = ["11.100.0.1/32"] + dest_fqdns = ["www.yyy.com", "www.zzz.com"] + dest_region_codes = ["HK", "IN"] + dest_threat_intelligences = ["iplist-search-engines-crawlers", "iplist-tor-exit-nodes"] + dest_address_groups = [google_network_security_address_group.address_group_1.id] } + target_resources = ["https://www.googleapis.com/compute/beta/projects/${data.google_project.project.project_id}/global/networks/default"] } - rule { description = "udp rule" priority = 2000 enable_logging = false action = "deny" direction = "INGRESS" - disabled = true - match { - src_ip_ranges = ["0.0.0.0/0"] - src_fqdns = ["www.abc.com", "www.def.com"] - src_region_codes = ["US", "CA"] - src_threat_intelligences = ["iplist-known-malicious-ips", "iplist-public-clouds"] - src_address_groups = [google_network_security_address_group.address_group_1.id] - layer4_config { ip_protocol = "udp" } - } - } - - rule { - description = "security profile group rule" - rule_name = "tcp rule" - priority = 3000 - enable_logging = false - action = "apply_security_profile_group" - direction = "INGRESS" - target_service_accounts = ["test@google.com"] - security_profile_group = "//networksecurity.googleapis.com/${google_network_security_security_profile_group.security_profile_group_1.id}" - tls_inspect = true - - match { src_ip_ranges = ["0.0.0.0/0"] - - layer4_config { - ip_protocol = "tcp" - } + src_fqdns = ["www.abc.com", "www.def.com"] + src_region_codes = ["US", "CA"] + src_threat_intelligences = ["iplist-known-malicious-ips", "iplist-public-clouds"] + src_address_groups = [google_network_security_address_group.address_group_1.id] } + disabled = true } - rule { - description = "secure tags" - rule_name = "secure tags" - priority = 4000 + description = "security profile group rule" + rule_name = "tcp rule" + priority = 3000 enable_logging = false - action = "allow" + action = "apply_security_profile_group" direction = "INGRESS" - match { - src_ip_ranges = ["0.0.0.0/0"] - - src_secure_tag { - name = google_tags_tag_value.basic_value.id - } - layer4_config { ip_protocol = "tcp" } + src_ip_ranges = ["0.0.0.0/0"] } + target_service_accounts = ["test@google.com"] + security_profile_group = "//networksecurity.googleapis.com/${google_network_security_security_profile_group.security_profile_group_1.id}" + tls_inspect = true } } resource "google_network_security_address_group" "address_group_1" { + provider = google-beta name = "tf-test-tf-address-group%{random_suffix}" parent = "organizations/%{org_id}" description = "Global address group" @@ -148,6 +122,7 @@ resource "google_network_security_address_group" "address_group_1" { } resource "google_network_security_security_profile_group" "security_profile_group_1" { + provider = google-beta name = "tf-test-tf-security-profile-group%{random_suffix}" parent = "organizations/%{org_id}" description = "my description" @@ -155,46 +130,26 @@ resource "google_network_security_security_profile_group" "security_profile_grou } resource "google_network_security_security_profile" "security_profile_1" { - name = "tf-test-tf-security-profile%{random_suffix}" - type = "THREAT_PREVENTION" - parent = "organizations/%{org_id}" - location = "global" -} - -resource "google_tags_tag_key" "basic_key" { - description = "For keyname resources." + provider = google-beta + name = "tf-test-tf-security-profile%{random_suffix}" + type = "THREAT_PREVENTION" parent = "organizations/%{org_id}" - purpose = "GCE_FIREWALL" - short_name = "tf-test-tagkey-%{random_suffix}" - - purpose_data = { - organization = "auto" - } -} - -resource "google_tags_tag_value" "basic_value" { - description = "For valuename resources." - parent = google_tags_tag_key.basic_key.id - short_name = "tf-test-tagvalue-%{random_suffix}" -} - -resource "google_compute_network" "network" { - name = "tf-network%{random_suffix}" - auto_create_subnetworks = false + location = "global" } - `, context) } func testAccComputeFirewallPolicyWithRules_update(context map[string]interface{}) string { return acctest.Nprintf(` data "google_project" "project" { + provider = google-beta } resource "google_compute_firewall_policy_with_rules" "firewall-policy-with-rules" { - short_name = "tf-test-tf-fw-org-policy-with-rules%{random_suffix}" + short_name = "tf-test-tf-fw-org-policy-with-rules%{random_suffix}" description = "Terraform test - update" - parent = "organizations/%{org_id}" + parent = "organizations/%{org_id}" + provider = google-beta rule { description = "tcp rule - update" @@ -203,64 +158,39 @@ resource "google_compute_firewall_policy_with_rules" "firewall-policy-with-rules enable_logging = false action = "deny" direction = "INGRESS" - match { - src_ip_ranges = ["11.100.0.1/32", "0.0.0.0/0"] - src_fqdns = ["www.yyy.com"] - src_region_codes = ["HK"] - src_threat_intelligences = ["iplist-search-engines-crawlers"] - layer4_config { ip_protocol = "udp" ports = [8080] } + src_ip_ranges = ["11.100.0.1/32", "0.0.0.0/0"] + src_fqdns = ["www.yyy.com"] + src_region_codes = ["HK"] + src_threat_intelligences = ["iplist-search-engines-crawlers"] } } - - rule { - description = "udp rule" - priority = 3000 - enable_logging = false - action = "deny" - direction = "INGRESS" - disabled = false - - match { - src_ip_ranges = ["0.0.0.0/0"] - src_fqdns = ["www.abc.com", "www.xyz.com"] - src_region_codes = ["US", "CA", "FR"] - src_threat_intelligences = ["iplist-known-malicious-ips", "iplist-public-clouds"] - src_address_groups = [google_network_security_address_group.address_group_1.id] - - layer4_config { - ip_protocol = "all" - } - } - } - rule { - description = "secure tags" - rule_name = "secure tags" - priority = 4000 - enable_logging = false - action = "allow" - direction = "INGRESS" - - target_secure_tag { - name = google_tags_tag_value.basic_value.id - } - - match { - src_ip_ranges = ["0.0.0.0/0"] - - layer4_config { - ip_protocol = "tcp" + description = "udp rule" + priority = 3000 + enable_logging = false + action = "deny" + direction = "INGRESS" + match { + layer4_config { + ip_protocol = "all" + } + src_ip_ranges = ["0.0.0.0/0"] + src_fqdns = ["www.abc.com", "www.xyz.com"] + src_region_codes = ["US", "CA", "FR"] + src_threat_intelligences = ["iplist-known-malicious-ips", "iplist-public-clouds"] + src_address_groups = [google_network_security_address_group.address_group_1.id] } + disabled = false } - } } resource "google_network_security_address_group" "address_group_1" { + provider = google-beta name = "tf-test-tf-address-group%{random_suffix}" parent = "organizations/%{org_id}" description = "Global address group" @@ -271,6 +201,7 @@ resource "google_network_security_address_group" "address_group_1" { } resource "google_network_security_security_profile_group" "security_profile_group_1" { + provider = google-beta name = "tf-test-tf-security-profile-group%{random_suffix}" parent = "organizations/%{org_id}" description = "my description" @@ -278,27 +209,12 @@ resource "google_network_security_security_profile_group" "security_profile_grou } resource "google_network_security_security_profile" "security_profile_1" { - name = "tf-test-tf-security-profile%{random_suffix}" - type = "THREAT_PREVENTION" - parent = "organizations/%{org_id}" - location = "global" -} - -resource "google_tags_tag_key" "basic_key" { - description = "For keyname resources." + provider = google-beta + name = "tf-test-tf-security-profile%{random_suffix}" + type = "THREAT_PREVENTION" parent = "organizations/%{org_id}" - purpose = "GCE_FIREWALL" - short_name = "tf-test-tagkey-%{random_suffix}" - - purpose_data = { - organization = "auto" - } -} - -resource "google_tags_tag_value" "basic_value" { - description = "For valuename resources." - parent = google_tags_tag_key.basic_key.id - short_name = "tf-test-tagvalue-%{random_suffix}" + location = "global" } `, context) } +{{- end }} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_firewall_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_firewall_test.go.tmpl similarity index 88% rename from mmv1/third_party/terraform/services/compute/resource_compute_firewall_test.go rename to mmv1/third_party/terraform/services/compute/resource_compute_firewall_test.go.tmpl index 1f1e6fb690b5..d0576311d631 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_firewall_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_firewall_test.go.tmpl @@ -7,7 +7,6 @@ import ( "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" ) func TestAccComputeFirewall_update(t *testing.T) { @@ -134,7 +133,7 @@ func TestAccComputeFirewall_noSource(t *testing.T) { CheckDestroy: testAccCheckComputeFirewallDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComputeFirewall_noSource(networkName, firewallName), + Config: testAccComputeFirewall_noSource(networkName, firewallName), ExpectError: regexp.MustCompile("one of source_tags, source_ranges, or source_service_accounts must be defined"), }, }, @@ -314,68 +313,6 @@ func TestAccComputeFirewall_moduleOutput(t *testing.T) { }) } -func TestAccComputeFirewall_resourceManagerTags(t *testing.T) { - t.Parallel() - - org := envvar.GetTestOrgFromEnv(t) - - networkName := fmt.Sprintf("tf-test-firewall-%s", acctest.RandString(t, 10)) - firewallName := fmt.Sprintf("tf-test-firewall-%s", acctest.RandString(t, 10)) - - tagKeyResult := acctest.BootstrapSharedTestTagKeyDetails(t, "crm-firewall-tagkey", "organizations/"+org, make(map[string]interface{})) - sharedTagkey, _ := tagKeyResult["shared_tag_key"] - tagValueResult := acctest.BootstrapSharedTestTagValueDetails(t, "crm-firewall-tagvalue", sharedTagkey, org) - - context := map[string]interface{}{ - "network_name": networkName, - "firewall_name": firewallName, - "tag_key_id": tagKeyResult["name"], - "tag_value_id": tagValueResult["name"], - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeFirewallDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeFirewall_resourceManagerTags(context), - }, - { - ResourceName: "google_compute_firewall.acc_firewall_with_resource_manager_tags", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"params"}, // we don't read tags back. The whole params block is input only - }, - }, - }) -} - -func testAccComputeFirewall_resourceManagerTags(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_compute_network" "foobar" { - name = "%{network_name}" - auto_create_subnetworks = false -} - -resource "google_compute_firewall" "acc_firewall_with_resource_manager_tags" { - name = "%{firewall_name}" - description = "Resource created for Terraform acceptance testing" - network = google_compute_network.foobar.name - source_tags = ["foo"] - - allow { - protocol = "icmp" - } - params { - resource_manager_tags = { - "%{tag_key_id}" = "%{tag_value_id}" - } - } - } -`, context) -} - func testAccComputeFirewall_basic(network, firewall string) string { return fmt.Sprintf(` resource "google_compute_network" "foobar" { @@ -419,6 +356,7 @@ resource "google_compute_firewall" "foobar" { `, network, firewall) } + func testAccComputeFirewall_localRangesUpdate(network, firewall string) string { return fmt.Sprintf(` resource "google_compute_network" "foobar" { @@ -486,6 +424,7 @@ resource "google_compute_firewall" "foobar" { `, network, firewall) } + func testAccComputeFirewall_priority(network, firewall string, priority int) string { return fmt.Sprintf(` resource "google_compute_network" "foobar" { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_future_reservation_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_future_reservation_test.go.tmpl deleted file mode 100644 index 411c3d35dcd0..000000000000 --- a/mmv1/third_party/terraform/services/compute/resource_compute_future_reservation_test.go.tmpl +++ /dev/null @@ -1,101 +0,0 @@ -{{- if ne $.TargetVersionName "ga" -}} -package compute_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/envvar" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-plugin-testing/plancheck" -) - -func TestAccComputeFutureReservation_update(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - "project": envvar.GetTestProjectFromEnv(), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeFutureReservation_full(context), - }, - { - ResourceName: "google_compute_future_reservation.gce_future_reservation", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_delete_auto_created_reservations"}, - }, - { - Config: testAccComputeFutureReservation_update(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_compute_future_reservation.gce_future_reservation", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_compute_future_reservation.gce_future_reservation", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_delete_auto_created_reservations"}, - }, - }, - }) -} - -func testAccComputeFutureReservation_full(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_compute_future_reservation" "gce_future_reservation" { - provider = google-beta - name = "tf-fr%{random_suffix}" - name_prefix = "fr-%{random_suffix}" - project = "%{project}" - planning_status = "DRAFT" - auto_delete_auto_created_reservations = true - description = "test future reservation" - time_window { - start_time = "2025-11-01T00:00:00Z" - end_time = "2025-11-02T00:00:00Z" - } - - specific_sku_properties { - total_count = "1" - instance_properties { - machine_type = "e2-standard-2" - } - } -} -`, context) -} - -func testAccComputeFutureReservation_update(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_compute_future_reservation" "gce_future_reservation" { - provider = google-beta - name = "tf-fr%{random_suffix}" - name_prefix = "fru-%{random_suffix}" - project = "%{project}" - planning_status = "SUBMITTED" - auto_delete_auto_created_reservations = false - description = "test updated future reservation" - time_window { - start_time = "2025-11-01T00:00:00Z" - end_time = "2025-11-02T00:00:00Z" - } - specific_sku_properties { - total_count = "1" - instance_properties { - machine_type = "e2-standard-2" - } - } -} -`, context) -} - -{{- end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_global_address_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_global_address_test.go.tmpl similarity index 93% rename from mmv1/third_party/terraform/services/compute/resource_compute_global_address_test.go rename to mmv1/third_party/terraform/services/compute/resource_compute_global_address_test.go.tmpl index 4748818ec51a..7c4eb0fde00e 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_global_address_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_global_address_test.go.tmpl @@ -2,8 +2,8 @@ package compute_test import ( "fmt" - "github.com/hashicorp/terraform-provider-google/google/acctest" "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/plancheck" @@ -25,9 +25,9 @@ func TestAccComputeGlobalAddress_update(t *testing.T) { Config: testAccComputeGlobalAddress_update1(context), }, { - ResourceName: "google_compute_global_address.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_global_address.foobar", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, }, { @@ -39,9 +39,9 @@ func TestAccComputeGlobalAddress_update(t *testing.T) { }, }, { - ResourceName: "google_compute_global_address.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_global_address.foobar", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, }, }, diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_global_forwarding_rule_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_global_forwarding_rule_test.go.tmpl index e71c21ef61ae..fd8366e75dee 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_global_forwarding_rule_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_global_forwarding_rule_test.go.tmpl @@ -6,7 +6,6 @@ import ( "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/services/compute" ) @@ -368,74 +367,6 @@ func TestUnitComputeGlobalForwardingRule_InternalIpDiffSuppress(t *testing.T) { } } -func TestAccComputeGlobalForwardingRule_updateCanaryMigration(t *testing.T) { - t.Parallel() - - fr := fmt.Sprintf("fr-canary-mgiration-%s", acctest.RandString(t, 10)) - proxy := fmt.Sprintf("pr-canary-mgiration-%s", acctest.RandString(t, 10)) - urlmap := fmt.Sprintf("um-canary-mgiration-%s", acctest.RandString(t, 10)) - backendservice := fmt.Sprintf("bs-canary-mgiration-%s", acctest.RandString(t, 10)) - address := fmt.Sprintf("addr-canary-mgiration-%s", acctest.RandString(t, 10)) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - ExternalProviders: map[string]resource.ExternalProvider{ - "time": {}, - }, - CheckDestroy: testAccCheckComputeGlobalForwardingRuleDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeGlobalForwardingRule_basic(fr, proxy, urlmap, backendservice, address), - }, - { - ResourceName: "google_compute_global_forwarding_rule.forwarding_rule", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccComputeGlobalForwardingRule_withCanaryMigration(fr, "PREPARE", proxy, urlmap, backendservice, address), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_compute_global_forwarding_rule.forwarding_rule", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_compute_global_forwarding_rule.forwarding_rule", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccComputeGlobalForwardingRule_withCanaryMigrationPercentage(fr, proxy, urlmap, backendservice, address, 50), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_compute_global_forwarding_rule.forwarding_rule", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_compute_global_forwarding_rule.forwarding_rule", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccComputeGlobalForwardingRule_withCanaryMigration(fr, "TEST_ALL_TRAFFIC", proxy, urlmap, backendservice, address), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_compute_global_forwarding_rule.forwarding_rule", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_compute_global_forwarding_rule.forwarding_rule", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - func testAccComputeGlobalForwardingRule_httpProxy(fr, targetProxy, proxy, proxy2, backend, hc, urlmap string) string { return fmt.Sprintf(` resource "google_compute_global_forwarding_rule" "forwarding_rule" { @@ -981,113 +912,3 @@ resource "google_compute_instance_template" "instance_template" { `, fr, proxy, backend, hc, urlmap, igm, it) } {{- end }} - - - -func testAccComputeGlobalForwardingRule_basic(fr, proxy, urlmap, backendservice, address string) string { - return fmt.Sprintf(` -resource "google_compute_global_forwarding_rule" "forwarding_rule" { - name = "%s" - ip_protocol = "TCP" - port_range = "80" - load_balancing_scheme = "EXTERNAL" - target = google_compute_target_http_proxy.my_target_http_proxy.id - ip_address = google_compute_global_address.my_global_ip.address -} - -resource "google_compute_target_http_proxy" "my_target_http_proxy" { - name = "%s" - url_map = google_compute_url_map.my_url_map.id -} - -resource "google_compute_url_map" "my_url_map" { - name = "%s" - default_service = google_compute_backend_service.my_backend_service.id -} - -resource "google_compute_backend_service" "my_backend_service" { - name = "%s" - protocol = "HTTP" - load_balancing_scheme = "EXTERNAL" -} - -resource "google_compute_global_address" "my_global_ip" { - name = "%s" -} -`, fr, proxy, urlmap, backendservice, address) -} - -func testAccComputeGlobalForwardingRule_withCanaryMigration(fr, bucket_migration_state, proxy, urlmap, backendservice, address string) string { - return fmt.Sprintf(` -resource "google_compute_global_forwarding_rule" "forwarding_rule" { - name = "%s" - ip_protocol = "TCP" - port_range = "80" - load_balancing_scheme = "EXTERNAL" - target = google_compute_target_http_proxy.my_target_http_proxy.id - ip_address = google_compute_global_address.my_global_ip.address - external_managed_backend_bucket_migration_state = "%s" -} - -resource "google_compute_target_http_proxy" "my_target_http_proxy" { - name = "%s" - url_map = google_compute_url_map.my_url_map.id -} - -resource "google_compute_url_map" "my_url_map" { - name = "%s" - default_service = google_compute_backend_service.my_backend_service.id -} - -resource "google_compute_backend_service" "my_backend_service" { - name = "%s" - protocol = "HTTP" - load_balancing_scheme = "EXTERNAL" -} - -resource "google_compute_global_address" "my_global_ip" { - name = "%s" -} -`, fr, bucket_migration_state, proxy, urlmap, backendservice, address) -} - - -func testAccComputeGlobalForwardingRule_withCanaryMigrationPercentage(fr, proxy, urlmap, backendservice, address string, percentage int64) string { - return fmt.Sprintf(` -resource "time_sleep" "six_minutes_delay" { - create_duration = "370s" # litte more than 6 minutes (360 seconds = 6 minutes) -} - -resource "google_compute_global_forwarding_rule" "forwarding_rule" { - name = "%s" - ip_protocol = "TCP" - port_range = "80" - load_balancing_scheme = "EXTERNAL" - target = google_compute_target_http_proxy.my_target_http_proxy.id - ip_address = google_compute_global_address.my_global_ip.address - external_managed_backend_bucket_migration_state = "TEST_BY_PERCENTAGE" - external_managed_backend_bucket_migration_testing_percentage = %d - depends_on = [time_sleep.six_minutes_delay] -} - -resource "google_compute_target_http_proxy" "my_target_http_proxy" { - name = "%s" - url_map = google_compute_url_map.my_url_map.id -} - -resource "google_compute_url_map" "my_url_map" { - name = "%s" - default_service = google_compute_backend_service.my_backend_service.id -} - -resource "google_compute_backend_service" "my_backend_service" { - name = "%s" - protocol = "HTTP" - load_balancing_scheme = "EXTERNAL" -} - -resource "google_compute_global_address" "my_global_ip" { - name = "%s" -} -`, fr, percentage, proxy, urlmap, backendservice, address) -} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_global_network_endpoint_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_global_network_endpoint_test.go.tmpl similarity index 99% rename from mmv1/third_party/terraform/services/compute/resource_compute_global_network_endpoint_test.go rename to mmv1/third_party/terraform/services/compute/resource_compute_global_network_endpoint_test.go.tmpl index 73a249356d51..b60be89689e2 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_global_network_endpoint_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_global_network_endpoint_test.go.tmpl @@ -1,12 +1,11 @@ package compute_test - import ( "fmt" "testing" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccComputeGlobalNetworkEndpoint_networkEndpointsBasic(t *testing.T) { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_health_check_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_health_check_test.go.tmpl index f53433d9e468..42baf71b8e3c 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_health_check_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_health_check_test.go.tmpl @@ -39,62 +39,6 @@ func TestAccComputeHealthCheck_tcp_update(t *testing.T) { }) } -{{ if ne $.TargetVersionName `ga` -}} -func TestAccComputeHealthCheck_grpcWithTls_create(t *testing.T) { - t.Parallel() - - hckName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), - CheckDestroy: testAccCheckComputeHealthCheckDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeHealthCheck_grpcWithTls(hckName), - }, - { - ResourceName: "google_compute_health_check.foobar", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} -{{- end }} - -{{ if ne $.TargetVersionName `ga` -}} -func TestAccComputeHealthCheck_grpcWithTls_update(t *testing.T) { - t.Parallel() - - hckName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), - CheckDestroy: testAccCheckComputeHealthCheckDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeHealthCheck_grpcWithTls(hckName), - }, - { - ResourceName: "google_compute_health_check.foobar", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccComputeHealthCheck_grpcWithTls_update(hckName), - }, - { - ResourceName: "google_compute_health_check.foobar", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} -{{- end }} - func TestAccComputeHealthCheck_ssl_port_spec(t *testing.T) { t.Parallel() @@ -192,6 +136,26 @@ func TestAccComputeHealthCheck_typeTransition(t *testing.T) { }) } +func TestAccComputeHealthCheck_tcpAndSsl_shouldFail(t *testing.T) { + // No HTTP interactions, is a unit test + acctest.SkipIfVcr(t) + t.Parallel() + + hckName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeHealthCheckDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeHealthCheck_tcpAndSsl_shouldFail(hckName), + ExpectError: regexp.MustCompile("only one of\n`grpc_health_check,http2_health_check,http_health_check,https_health_check,ssl_health_check,tcp_health_check`\ncan be specified, but `ssl_health_check,tcp_health_check` were specified"), + }, + }, + }) +} + {{ if ne $.TargetVersionName `ga` -}} func TestAccComputeHealthCheck_logConfigDisabled(t *testing.T) { t.Parallel() @@ -248,43 +212,6 @@ resource "google_compute_health_check" "foobar" { `, hckName) } -{{ if ne $.TargetVersionName `ga` -}} -func testAccComputeHealthCheck_grpcWithTls(hckName string) string { - return fmt.Sprintf(` -resource "google_compute_health_check" "foobar" { - provider = "google-beta" - check_interval_sec = 3 - description = "Resource created for Terraform acceptance testing" - healthy_threshold = 3 - name = "tf-test-health-test-%s" - timeout_sec = 2 - unhealthy_threshold = 3 - grpc_tls_health_check { - port = "443" - } -} -`, hckName) -} -{{- end }} - -{{ if ne $.TargetVersionName `ga` -}} -func testAccComputeHealthCheck_grpcWithTls_update(hckName string) string { - return fmt.Sprintf(` -resource "google_compute_health_check" "foobar" { - provider = "google-beta" - check_interval_sec = 3 - healthy_threshold = 10 - name = "tf-test-health-test-%s" - timeout_sec = 2 - unhealthy_threshold = 10 - grpc_tls_health_check { - port = "8080" - } -} -`, hckName) -} -{{- end }} - func testAccComputeHealthCheck_ssl(hckName string) string { return fmt.Sprintf(` resource "google_compute_health_check" "foobar" { @@ -415,6 +342,26 @@ resource "google_compute_health_check" "foobar" { `, hckName) } +func testAccComputeHealthCheck_tcpAndSsl_shouldFail(hckName string) string { + return fmt.Sprintf(` +resource "google_compute_health_check" "foobar" { + check_interval_sec = 3 + description = "Resource created for Terraform acceptance testing" + healthy_threshold = 3 + name = "health-test-%s" + timeout_sec = 2 + unhealthy_threshold = 3 + + tcp_health_check { + port = 443 + } + ssl_health_check { + port = 443 + } +} +`, hckName) +} + {{ if ne $.TargetVersionName `ga` -}} func testAccComputeHealthCheck_logConfigDisabled(hckName string) string { return fmt.Sprintf(` diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl index 718a857dab8d..d85a74e24e3f 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl @@ -53,17 +53,6 @@ func IpCidrRangeDiffSuppress(k, old, new string, d *schema.ResourceData) bool { return false } -func DisksForceAttachDiffSuppress(_, old, new string, _ *schema.ResourceData) bool { - if (new == "false" && old == "") || (new == "" && old == "false") { - return true - } - - if new == old { - return true - } - return false -} - var ( advancedMachineFeaturesKeys = []string{ "advanced_machine_features.0.enable_nested_virtualization", @@ -85,7 +74,6 @@ var ( "boot_disk.0.initialize_params", "boot_disk.0.mode", "boot_disk.0.source", - "boot_disk.0.force_attach", } initializeParamsKeys = []string{ @@ -155,9 +143,8 @@ func ValidateSubnetworkProjectFunc(d tpgresource.TerraformResourceDiff) error { return nil } - project := tpgresource.GetProjectFromRegionalSelfLink(subnetwork.(string)) - if project != subnetworkProject.(string) { - return fmt.Errorf("project %s in subnetwork's self_link %q must match subnetwork_project %q", project, subnetwork, subnetworkProject) + if tpgresource.GetProjectFromRegionalSelfLink(subnetwork.(string)) != subnetworkProject.(string) { + return fmt.Errorf("project in subnetwork's self_link %q must match subnetwork_project %q", subnetwork, subnetworkProject) } } return nil @@ -561,16 +548,6 @@ func ResourceComputeInstance() *schema.Resource { DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, Description: `The name or self_link of the disk attached to this instance.`, }, - - "force_attach": { - Type: schema.TypeBool, - Optional: true, - Default: false, - AtLeastOneOf: bootDiskKeys, - ForceNew: true, - DiffSuppressFunc: DisksForceAttachDiffSuppress, - Description: `Whether to force attach the regional disk even if it's currently attached to another instance. If you try to force attach a zonal disk to an instance, you will receive an error. Setting this parameter cause VM recreation.`, - }, }, }, }, @@ -885,15 +862,6 @@ func ResourceComputeInstance() *schema.Resource { Computed: true, Description: `The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied encryption key that protects this resource.`, }, - - "force_attach": { - Type: schema.TypeBool, - Optional: true, - Default: false, - ForceNew: true, - DiffSuppressFunc: DisksForceAttachDiffSuppress, - Description: `Whether to force attach the regional disk even if it's currently attached to another instance. If you try to force attach a zonal disk to an instance, you will receive an error. Setting this parameter cause VM recreation.`, - }, }, }, }, @@ -2122,10 +2090,6 @@ func resourceComputeInstanceRead(d *schema.ResourceData, meta interface{}) error } } - if v, ok := d.GetOk(fmt.Sprintf("attached_disk.%d.force_attach", adIndex)); ok { - di["force_attach"] = v.(bool) - } - // We want the disks to remain in the order we set in the config, so if a disk // is present in the config, make sure it's at the correct index. Otherwise, append it. if inConfig { @@ -3266,11 +3230,6 @@ func expandAttachedDisk(diskConfig map[string]interface{}, d *schema.ResourceDat disk.DiskEncryptionKey.KmsKeyServiceAccount = kmsServiceAccount.(string) } } - - if forceAttach, ok := diskConfig["force_attach"]; ok { - disk.ForceAttach = forceAttach.(bool) - } - return disk, nil } @@ -3436,9 +3395,9 @@ func resourceComputeInstanceDelete(d *schema.ResourceData, meta interface{}) err func resourceComputeInstanceImportState(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+)/zones/(?P[^/]+)/instances/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)$", + "projects/(?P[^/]+)/zones/(?P[^/]+)/instances/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)", }, d, config); err != nil { return nil, err } @@ -3618,10 +3577,6 @@ func expandBootDisk(d *schema.ResourceData, config *transport_tpg.Config, projec disk.Mode = v.(string) } - if v, ok := d.GetOk("boot_disk.0.force_attach"); ok { - disk.ForceAttach = v.(bool) - } - return disk, nil } @@ -3632,7 +3587,6 @@ func flattenBootDisk(d *schema.ResourceData, disk *compute.AttachedDisk, config "mode": disk.Mode, "source": tpgresource.ConvertSelfLinkToV1(disk.Source), "guest_os_features": flattenComputeInstanceGuestOsFeatures(disk.GuestOsFeatures), - "force_attach": d.Get("boot_disk.0.force_attach"), // disk_encryption_key_raw is not returned from the API, so copy it from what the user // originally specified to avoid diffs. "disk_encryption_key_raw": d.Get("boot_disk.0.disk_encryption_key_raw"), @@ -3641,9 +3595,6 @@ func flattenBootDisk(d *schema.ResourceData, disk *compute.AttachedDisk, config if _,ok := d.GetOk("boot_disk.0.interface"); ok { result["interface"] = disk.Interface } - if v, ok := d.GetOk("boot_disk.0.force_attach"); ok { - result["force_attach"] = v.(bool) - } diskDetails, err := getDisk(disk.Source, d, config) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_from_machine_image_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_from_machine_image_test.go.tmpl index 589e37d1ec69..7b4849dbe01c 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_from_machine_image_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_from_machine_image_test.go.tmpl @@ -428,7 +428,7 @@ resource "google_compute_instance" "vm1" { boot_disk { initialize_params { - image = "ubuntu-os-cloud/ubuntu-2204-lts" + image = "ubuntu-os-cloud/ubuntu-2004-lts" } } @@ -485,7 +485,7 @@ resource "google_compute_instance" "vm2" { boot_disk { initialize_params { - image = "ubuntu-os-cloud/ubuntu-2204-lts" + image = "ubuntu-os-cloud/ubuntu-2004-lts" } } @@ -541,7 +541,7 @@ resource "google_compute_instance" "vm3" { boot_disk { initialize_params { - image = "ubuntu-os-cloud/ubuntu-2204-lts" + image = "ubuntu-os-cloud/ubuntu-2004-lts" } } @@ -600,7 +600,7 @@ resource "google_compute_instance" "vm4" { boot_disk { initialize_params { - image = "ubuntu-os-cloud/ubuntu-2204-lts" + image = "ubuntu-os-cloud/ubuntu-2004-lts" } } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_from_template_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_from_template_test.go.tmpl index 15a420339f63..35b9553f27d3 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_from_template_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_from_template_test.go.tmpl @@ -566,37 +566,6 @@ func TestAccComputeInstanceFromTemplate_confidentialInstanceConfigMain(t *testin }) } -func TestAccComputeInstanceFromTemplate_DiskForceAttach(t *testing.T) { - t.Parallel() - - var instance compute.Instance - instanceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - templateName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - resourceName := "google_compute_instance_from_template.foobar" - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceFromTemplateDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeInstanceFromTemplate_DiskForceAttach_zonal(instanceName, templateName), - ExpectError: regexp.MustCompile("Force attaching zonal disks is not supported"), - }, - { - Config: testAccComputeInstanceFromTemplate_DiskForceAttach(instanceName, templateName), - Check: resource.ComposeTestCheckFunc( - testAccCheckComputeInstanceExists(t, resourceName, &instance), - - // Check that fields were set based on the template - resource.TestCheckResourceAttr(resourceName, "boot_disk.0.force_attach", "true"), - resource.TestCheckResourceAttr(resourceName, "attached_disk.0.force_attach", "true"), - ), - }, - }, - }) -} - func testAccComputeInstanceFromTemplate_basic(instance, template string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { @@ -1731,7 +1700,7 @@ resource "google_compute_instance_from_template" "inst" { func testAccComputeInstanceFromTemplate_confidentialInstanceConfigEnable(templateDisk string, image string, template string, instance string, template2 string, instance2 string, confidentialInstanceType string) string { return fmt.Sprintf(` data "google_compute_image" "my_image1" { - family = "ubuntu-2204-lts" + family = "ubuntu-2004-lts" project = "ubuntu-os-cloud" } @@ -1824,7 +1793,7 @@ resource "google_compute_instance_from_template" "inst2" { func testAccComputeInstanceFromTemplate_confidentialInstanceConfigNoConfigSevSnp(templateDisk string, image string, template string, instance string, template2 string, instance2 string, confidentialInstanceType string) string { return fmt.Sprintf(` data "google_compute_image" "my_image1" { - family = "ubuntu-2204-lts" + family = "ubuntu-2004-lts" project = "ubuntu-os-cloud" } @@ -2246,138 +2215,3 @@ resource "google_compute_instance_from_template" "foobar" { } `, suffix, suffix, template, template) } - -func testAccComputeInstanceFromTemplate_DiskForceAttach_zonal(instance, template string) string { - return fmt.Sprintf(` -data "google_compute_image" "my_image" { - family = "debian-11" - project = "debian-cloud" -} - -resource "google_compute_disk" "foobar" { - name = "%s" - image = data.google_compute_image.my_image.self_link - size = 10 - type = "pd-ssd" - zone = "us-central1-a" -} - -resource "google_compute_instance_template" "foobar" { - name = "%s" - machine_type = "n1-standard-1" // can't be e2 because of local-ssd - - disk { - source = google_compute_disk.foobar.name - auto_delete = false - boot = true - } - - network_interface { - network = "default" - } - - metadata = { - foo = "bar" - } - - scheduling { - automatic_restart = true - } - - can_ip_forward = true -} - -resource "google_compute_instance_from_template" "foobar" { - name = "%s" - zone = "us-central1-a" - - source_instance_template = google_compute_instance_template.foobar.self_link - - // Overrides - boot_disk { - source = google_compute_disk.foobar.name - force_attach = true - } - attached_disk { - source = google_compute_disk.foobar.name - force_attach = true - } -} -`, template, template, instance) -} - -func testAccComputeInstanceFromTemplate_DiskForceAttach(instance, template string) string { - return fmt.Sprintf(` -data "google_compute_image" "my_image" { - family = "debian-11" - project = "debian-cloud" -} - -resource "google_compute_region_disk" "foobar" { - name = "%s-1" - size = 10 - type = "pd-ssd" - region = "us-central1" - replica_zones = ["us-central1-a", "us-central1-b"] -} - -resource "google_compute_region_disk" "foobaz" { - name = "%s-2" - size = 10 - type = "pd-ssd" - region = "us-central1" - replica_zones = ["us-central1-a", "us-central1-b"] -} - -resource "google_compute_instance_template" "foobar" { - name = "%s" - machine_type = "n1-standard-1" // can't be e2 because of local-ssd - - disk { - source = google_compute_region_disk.foobar.self_link - auto_delete = false - boot = true - } - - disk { - source_image = data.google_compute_image.my_image.self_link - auto_delete = true - disk_size_gb = 100 - boot = false - disk_type = "pd-ssd" - type = "PERSISTENT" - } - - network_interface { - network = "default" - } - - metadata = { - foo = "bar" - } - - scheduling { - automatic_restart = true - } - - can_ip_forward = true -} - -resource "google_compute_instance_from_template" "foobar" { - name = "%s" - zone = "us-central1-a" - - source_instance_template = google_compute_instance_template.foobar.self_link - - // Overrides - boot_disk { - source = google_compute_region_disk.foobar.self_link - force_attach = true - } - attached_disk { - source = google_compute_region_disk.foobaz.self_link - force_attach = true - } -} -`, template, template, template, instance) -} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_group.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_group.go.tmpl index af13d60d0d6b..b5782de796cb 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_group.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_group.go.tmpl @@ -455,9 +455,9 @@ func resourceComputeInstanceGroupDelete(d *schema.ResourceData, meta interface{} func resourceComputeInstanceGroupImportState(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+)/zones/(?P[^/]+)/instanceGroups/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)$", + "projects/(?P[^/]+)/zones/(?P[^/]+)/instanceGroups/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_group_manager.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_group_manager.go.tmpl index 85ac3d6b44f1..a442c66ce425 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_group_manager.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_group_manager.go.tmpl @@ -1,7 +1,6 @@ package compute import ( - "context" "fmt" "log" "strings" @@ -39,7 +38,6 @@ func ResourceComputeInstanceGroupManager() *schema.Resource { CustomizeDiff: customdiff.All( tpgresource.DefaultProviderProject, tpgresource.DefaultProviderZone, - customdiff.ForceNewIfChange("resource_policies.0.workload_policy", ForceNewResourcePoliciesWorkloadPolicyIfNewIsEmpty), ), Schema: map[string]*schema.Schema{ "base_instance_name": { @@ -585,31 +583,11 @@ func ResourceComputeInstanceGroupManager() *schema.Resource { }, }, }, - "resource_policies": { - Type: schema.TypeList, - Optional: true, - Description: `Resource policies for this managed instance group.`, - MaxItems: 1, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "workload_policy": { - Type: schema.TypeString, - Optional: true, - DiffSuppressFunc: tpgresource.CompareSelfLinkRelativePaths, - Description: `The URL of the workload policy that is specified for this managed instance group. It can be a full or partial URL.`, - }, - }, - }, - }, }, UseJSONNumber: true, } } -func ForceNewResourcePoliciesWorkloadPolicyIfNewIsEmpty(_ context.Context, old, new, _ interface{}) bool { - return (old.(string) != "") && (new.(string) == "") -} - func parseUniqueId(s string) (string, string) { splits:= strings.SplitN(s, "?uniqueId=", 2) if len(splits) == 2 { @@ -698,7 +676,6 @@ func resourceComputeInstanceGroupManagerCreate(d *schema.ResourceData, meta inte InstanceLifecyclePolicy: expandInstanceLifecyclePolicy(d.Get("instance_lifecycle_policy").([]interface{})), AllInstancesConfig: expandAllInstancesConfig(nil, d.Get("all_instances_config").([]interface{})), StatefulPolicy: expandStatefulPolicy(d), - ResourcePolicies: expandResourcePolicies(d.Get("resource_policies").([]interface{})), {{- if ne $.TargetVersionName "ga" }} Params: expandInstanceGroupManagerParams(d), {{- end }} @@ -946,9 +923,6 @@ func resourceComputeInstanceGroupManagerRead(d *schema.ResourceData, meta interf if err = d.Set("status", flattenStatus(manager.Status)); err != nil { return fmt.Errorf("Error setting status in state: %s", err.Error()) } - if err = d.Set("resource_policies", flattenResourcePolicies(manager.ResourcePolicies)); err != nil { - return fmt.Errorf("Error setting resource_policies in state: %s", err.Error()) - } // If unset in state set to default value if d.Get("wait_for_instances_status").(string) == "" { @@ -1053,11 +1027,6 @@ func resourceComputeInstanceGroupManagerUpdate(d *schema.ResourceData, meta inte change = true } - if d.HasChange("resource_policies") { - updatedManager.ResourcePolicies = expandResourcePolicies(d.Get("resource_policies").([]interface{})) - change = true - } - if change { op, err := config.NewComputeClient(userAgent).InstanceGroupManagers.Patch(project, zone, d.Get("name").(string), updatedManager).Do() if err != nil { @@ -1324,20 +1293,6 @@ func expandVersions(configured []interface{}) []*compute.InstanceGroupManagerVer return versions } -func expandResourcePolicies(configured []interface{}) *compute.InstanceGroupManagerResourcePolicies { - resourcePolicies := &compute.InstanceGroupManagerResourcePolicies{} - - if len(configured) > 0 { - data := configured[0].(map[string]interface{}) - resourcePolicies.WorkloadPolicy = data["workload_policy"].(string) - resourcePolicies.ForceSendFields = []string{"WorkloadPolicy"} - } else { - resourcePolicies.NullFields = []string{"WorkloadPolicy"} - } - - return resourcePolicies -} - func expandFixedOrPercent(configured []interface{}) *compute.FixedOrPercent { fixedOrPercent := &compute.FixedOrPercent{} @@ -1679,17 +1634,6 @@ func flattenStatusAllInstancesConfig(allInstancesConfig *compute.InstanceGroupMa return results } -func flattenResourcePolicies(resourcePolicies *compute.InstanceGroupManagerResourcePolicies) []map[string]interface{} { - results := []map[string]interface{}{} - if resourcePolicies != nil { - data := map[string]interface{}{ - "workload_policy": resourcePolicies.WorkloadPolicy, - } - results = append(results, data) - } - return results -} - func resourceInstanceGroupManagerStateImporter(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { if err := d.Set("wait_for_instances", false); err != nil { return nil, fmt.Errorf("Error setting wait_for_instances: %s", err) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_group_manager_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_group_manager_test.go.tmpl index d292fa273c84..ad8a8daa10d0 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_group_manager_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_group_manager_test.go.tmpl @@ -1,9 +1,7 @@ package compute_test import ( - "context" "fmt" - "regexp" "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" @@ -12,55 +10,8 @@ import ( {{- if ne $.TargetVersionName "ga" }} "github.com/hashicorp/terraform-provider-google/google/envvar" {{- end }} - tpgcompute "github.com/hashicorp/terraform-provider-google/google/services/compute" ) -func TestForceNewResourcePoliciesWorkloadPolicyIfNewIsEmpty(t *testing.T) { - cases := map[string]struct { - Old, New interface{} - ExpectForceNew bool - }{ - "empty to empty": { - Old: "", - New: "", - ExpectForceNew: false, - }, - "empty to non-empty": { - Old: "", - New: "some-policy", - ExpectForceNew: false, - }, - "non-empty to non-empty": { - Old: "old-policy", - New: "new-policy", - ExpectForceNew: false, - }, - "non-empty to empty (should force new)": { - Old: "existing-policy", - New: "", - ExpectForceNew: true, - }, - "whitespace to empty (should force new)": { - Old: " ", - New: "", - ExpectForceNew: true, - }, - "policy name to empty (should force new)": { - Old: "projects/my-project/regions/us-central1/resourcePolicies/my-policy", - New: "", - ExpectForceNew: true, - }, - } - - for tn, tc := range cases { - result := tpgcompute.ForceNewResourcePoliciesWorkloadPolicyIfNewIsEmpty(context.Background(), tc.Old, tc.New, nil) - if result != tc.ExpectForceNew { - t.Errorf("bad: %s, %q => %q expect ForceNew to return %t but got %t", tn, tc.Old, tc.New, tc.ExpectForceNew, result) - } - } -} - - func TestAccInstanceGroupManager_basic(t *testing.T) { t.Parallel() @@ -521,76 +472,6 @@ func TestAccInstanceGroupManager_waitForStatus(t *testing.T) { }) } -func TestAccInstanceGroupManager_addingResourcePoliciesToMigWithNonZeroTargetSize_fails(t *testing.T) { - t.Parallel() - - suffix := acctest.RandString(t, 10) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckInstanceGroupManagerDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccInstanceGroupManager_withTargetSize(suffix), - }, - { - ResourceName: "google_compute_instance_group_manager.igm-workload-policy", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"status"}, - }, - { - Config: testAccInstanceGroupManager_resourcePoliciesWorkloadPolicyUpdateError(suffix), - ExpectError: regexp.MustCompile("Workload policy update is not allowed when the MIG has instances."), - }, - }, - }) -} - -func TestAccInstanceGroupManager_resourcePoliciesWorkloadPolicyUpdate(t *testing.T) { - t.Parallel() - - suffix := acctest.RandString(t, 10) - workloadPolicyResourceName := "workload_policy" - workloadPolicyResourceUpdate := "workload_policy_2" - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckInstanceGroupManagerDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccInstanceGroupManager_resourcePoliciesWorkloadPolicyUpdate(suffix, workloadPolicyResourceName), - }, - { - ResourceName: "google_compute_instance_group_manager.igm-workload-policy", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"status"}, - }, - { - Config: testAccInstanceGroupManager_resourcePoliciesWorkloadPolicyUpdate(suffix, workloadPolicyResourceUpdate), - }, - { - ResourceName: "google_compute_instance_group_manager.igm-workload-policy", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"status"}, - }, - { - Config: testAccInstanceGroupManager_resourcePoliciesWorkloadPolicyUpdate2(suffix), - }, - { - ResourceName: "google_compute_instance_group_manager.igm-workload-policy", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"status"}, - }, - }, - }) -} - {{ if ne $.TargetVersionName `ga` -}} func TestAccInstanceGroupManager_resourceManagerTags(t *testing.T) { t.Parallel() @@ -2113,220 +1994,6 @@ resource "google_compute_per_instance_config" "per-instance" { `, template, target, igm, perInstanceConfig) } -func testAccInstanceGroupManager_withTargetSize(suffix string) string { - return fmt.Sprintf(` -data "google_compute_image" "my_image" { - family = "debian-11" - project = "debian-cloud" -} - -resource "google_compute_instance_template" "igm-basic" { - name = "igm-instance-template-%s" - machine_type = "a4-highgpu-8g" - can_ip_forward = false - tags = ["foo", "bar"] - - disk { - source_image = data.google_compute_image.my_image.self_link - auto_delete = true - boot = true - disk_type = "hyperdisk-balanced" - } - - network_interface { - network = "default" - } - - service_account { - scopes = ["userinfo-email", "compute-ro", "storage-ro"] - } -} - -resource "google_compute_instance_group_manager" "igm-workload-policy" { - description = "Terraform test instance group manager" - name = "igm-basic-workload-policy-%s" - - version { - name = "prod" - instance_template = google_compute_instance_template.igm-basic.self_link - } - - base_instance_name = "tf-test-igm-no-tp" - zone = "us-central1-b" - target_size = 2 -} -`, suffix, suffix) -} - -func testAccInstanceGroupManager_resourcePoliciesWorkloadPolicyUpdateError(suffix string) string { - return fmt.Sprintf(` -data "google_compute_image" "my_image" { - family = "debian-11" - project = "debian-cloud" -} - -resource "google_compute_resource_policy" "workload_policy_error" { - name = "tf-test-gce-policy-%s" - region = "us-central1" - workload_policy { - type = "HIGH_THROUGHPUT" - } -} - -resource "google_compute_instance_template" "igm-basic" { - name = "igm-instance-template-%s" - machine_type = "a4-highgpu-8g" - can_ip_forward = false - tags = ["foo", "bar"] - - disk { - source_image = data.google_compute_image.my_image.self_link - auto_delete = true - boot = true - disk_type = "hyperdisk-balanced" - } - - network_interface { - network = "default" - } - - service_account { - scopes = ["userinfo-email", "compute-ro", "storage-ro"] - } -} - -resource "google_compute_instance_group_manager" "igm-workload-policy" { - description = "Terraform test instance group manager" - name = "igm-basic-workload-policy-%s" - - version { - name = "prod" - instance_template = google_compute_instance_template.igm-basic.self_link - } - - base_instance_name = "tf-test-igm-no-tp" - zone = "us-central1-b" - target_size = 2 - - resource_policies { - workload_policy = google_compute_resource_policy.workload_policy_error.self_link - } -} -`, suffix, suffix, suffix) -} - -func testAccInstanceGroupManager_resourcePoliciesWorkloadPolicyUpdate(suffix, workloadPolicy string) string { - return fmt.Sprintf(` -data "google_compute_image" "my_image" { - family = "debian-11" - project = "debian-cloud" -} - -resource "google_compute_resource_policy" "workload_policy" { - name = "tf-test-gce-policy-%s" - region = "us-central1" - workload_policy { - type = "HIGH_THROUGHPUT" - } -} - -resource "google_compute_resource_policy" "workload_policy_2" { - name = "tf-test-gce-policy-%s-2" - region = "us-central1" - workload_policy { - type = "HIGH_THROUGHPUT" - } -} - - -resource "google_compute_instance_template" "igm-basic" { - name = "igm-instance-template-%s" - machine_type = "a4-highgpu-8g" - can_ip_forward = false - tags = ["foo", "bar"] - - disk { - source_image = data.google_compute_image.my_image.self_link - auto_delete = true - boot = true - disk_type = "hyperdisk-balanced" - } - - network_interface { - network = "default" - } - - service_account { - scopes = ["userinfo-email", "compute-ro", "storage-ro"] - } -} - -resource "google_compute_instance_group_manager" "igm-workload-policy" { - description = "Terraform test instance group manager" - name = "igm-basic-workload-policy-%s" - - version { - name = "prod" - instance_template = google_compute_instance_template.igm-basic.self_link - } - - base_instance_name = "tf-test-igm-no-tp" - zone = "us-central1-b" - target_size = 0 - - resource_policies { - workload_policy = google_compute_resource_policy.%s.self_link - } -} -`, suffix, suffix, suffix, suffix, workloadPolicy) -} - -func testAccInstanceGroupManager_resourcePoliciesWorkloadPolicyUpdate2(suffix string) string { - return fmt.Sprintf(` -data "google_compute_image" "my_image" { - family = "debian-11" - project = "debian-cloud" -} - -resource "google_compute_instance_template" "igm-basic" { - name = "igm-instance-template-%s" - machine_type = "a4-highgpu-8g" - can_ip_forward = false - tags = ["foo", "bar"] - - disk { - source_image = data.google_compute_image.my_image.self_link - auto_delete = true - boot = true - disk_type = "hyperdisk-balanced" - } - - network_interface { - network = "default" - } - - service_account { - scopes = ["userinfo-email", "compute-ro", "storage-ro"] - } -} - -resource "google_compute_instance_group_manager" "igm-workload-policy" { - description = "Terraform test instance group manager" - name = "igm-basic-workload-policy-%s" - - version { - name = "prod" - instance_template = google_compute_instance_template.igm-basic.self_link - } - - base_instance_name = "tf-test-igm-no-tp" - zone = "us-central1-b" - target_size = 0 -} -`, suffix, suffix) -} - - {{ if ne $.TargetVersionName `ga` -}} func testAccInstanceGroupManager_resourceManagerTags(template_name, tag_name, igm_name, project_id string) string { return fmt.Sprintf(` diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_settings_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_instance_settings_test.go.tmpl similarity index 95% rename from mmv1/third_party/terraform/services/compute/resource_compute_instance_settings_test.go rename to mmv1/third_party/terraform/services/compute/resource_compute_instance_settings_test.go.tmpl index 58d1a7abb5f9..7567a28d28e8 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_settings_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_settings_test.go.tmpl @@ -12,7 +12,7 @@ func TestAccComputeInstanceSettings_update(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), + "random_suffix": acctest.RandString(t, 10), } acctest.VcrTest(t, resource.TestCase{ @@ -55,7 +55,7 @@ func testAccComputeInstanceSettings_basic(context map[string]interface{}) string return acctest.Nprintf(` resource "google_compute_instance_settings" "gce_instance_settings" { - zone = "us-east5-c" + zone = "us-east7-b" metadata { items = { foo = "baz" @@ -70,7 +70,7 @@ func testAccComputeInstanceSettings_update(context map[string]interface{}) strin return acctest.Nprintf(` resource "google_compute_instance_settings" "gce_instance_settings" { - zone = "us-east5-c" + zone = "us-east7-b" metadata { items = { foo = "bar" @@ -86,7 +86,7 @@ func testAccComputeInstanceSettings_delete(context map[string]interface{}) strin return acctest.Nprintf(` resource "google_compute_instance_settings" "gce_instance_settings" { - zone = "us-east5-c" + zone = "us-east7-b" metadata { items = { baz = "qux" diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_template.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template.go.tmpl index ba87f811432b..cca279377579 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_template.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template.go.tmpl @@ -6,7 +6,6 @@ import ( "fmt" "reflect" "strings" - "strconv" "time" "github.com/hashicorp/errwrap" @@ -151,14 +150,6 @@ func ResourceComputeInstanceTemplate() *schema.Resource { Description: `Name of the disk. When not provided, this defaults to the name of the instance.`, }, - "architecture": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - Computed: true, - Description: `The architecture of the image. Allowed values are ARM64 or X86_64.`, - }, - "disk_size_gb": { Type: schema.TypeInt, Optional: true, @@ -210,16 +201,6 @@ func ResourceComputeInstanceTemplate() *schema.Resource { Description: `A map of resource manager tags. Resource manager tag keys and values have the same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/456. The field is ignored (both PUT & PATCH) when empty.`, }, - "guest_os_features": { - Type: schema.TypeList, - Optional: true, - ForceNew: true, - Description: `A list of features to enable on the guest operating system. Applicable only for bootable images.`, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - }, - "source_image": { Type: schema.TypeString, Optional: true, @@ -685,13 +666,6 @@ Google Cloud KMS. Only one of kms_key_self_link, rsa_encrypted_key and raw_key m }, }, - "numeric_id": { - Type: schema.TypeString, - ForceNew: true, - Computed: true, - Description: `The ID of the template in numeric format.`, - }, - "project": { Type: schema.TypeString, Optional: true, @@ -1376,6 +1350,9 @@ func buildDisks(d *schema.ResourceData, config *transport_tpg.Config) ([]*comput // Build the disk var disk compute.AttachedDisk + disk.Type = "PERSISTENT" + disk.Mode = "READ_WRITE" + disk.Interface = "SCSI" disk.Boot = i == 0 disk.AutoDelete = d.Get(prefix + ".auto_delete").(bool) @@ -1498,14 +1475,6 @@ func buildDisks(d *schema.ResourceData, config *transport_tpg.Config) ([]*comput disk.Type = v.(string) } - if v, ok := d.GetOk(prefix + ".guest_os_features"); ok { - disk.GuestOsFeatures = expandComputeInstanceGuestOsFeatures(v.([]interface{})) - } - - if v, ok := d.GetOk(prefix + ".architecture"); ok { - disk.Architecture = v.(string) - } - disks = append(disks, &disk) } @@ -1783,8 +1752,6 @@ func flattenDisk(disk *compute.AttachedDisk, configDisk map[string]any, defaultP diskMap["source"] = tpgresource.ConvertSelfLinkToV1(disk.Source) diskMap["mode"] = disk.Mode diskMap["type"] = disk.Type - diskMap["guest_os_features"] = flattenComputeInstanceGuestOsFeatures(disk.GuestOsFeatures) - diskMap["architecture"] = configDisk["architecture"] return diskMap, nil } @@ -1830,6 +1797,10 @@ func reorderDisks(configDisks []interface{}, apiDisks []map[string]interface{}) disksByDeviceName[v.(string)] = i } else if v := disk["type"]; v.(string) == "SCRATCH" { iface := disk["interface"].(string) + if iface == "" { + // apply-time default + iface = "SCSI" + } scratchDisksByInterface[iface] = append(scratchDisksByInterface[iface], i) } else if v := disk["source"]; v.(string) != "" { attachedDisksBySource[v.(string)] = i @@ -1967,10 +1938,6 @@ func resourceComputeInstanceTemplateRead(d *schema.ResourceData, meta interface{ } } - if err = d.Set("numeric_id", strconv.FormatUint(instanceTemplate.Id, 10)); err != nil { - return fmt.Errorf("Error setting numeric_id: %s", err) - } - {{ if ne $.TargetVersionName `ga` -}} if instanceTemplate.Properties.PartnerMetadata != nil { partnerMetadata, err := flattenPartnerMetadata(instanceTemplate.Properties.PartnerMetadata) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_internal_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_internal_test.go index f917b6b601e9..1ff79ef03e2a 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_internal_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_internal_test.go @@ -19,9 +19,8 @@ func TestComputeInstanceTemplate_reorderDisks(t *testing.T) { cDeviceName := map[string]interface{}{ "device_name": "disk-1", } - cScratchScsi := map[string]interface{}{ - "type": "SCRATCH", - "interface": "SCSI", + cScratch := map[string]interface{}{ + "type": "SCRATCH", } cSource := map[string]interface{}{ "source": "disk-source", @@ -79,7 +78,7 @@ func TestComputeInstanceTemplate_reorderDisks(t *testing.T) { aBoot, aScratchNvme, aSource, aScratchScsi, aFallThrough, aDeviceName, }, ConfigDisks: []interface{}{ - cBoot, cFallThrough, cDeviceName, cScratchScsi, cSource, cScratchNvme, + cBoot, cFallThrough, cDeviceName, cScratch, cSource, cScratchNvme, }, ExpectedResult: []map[string]interface{}{ aBoot, aFallThrough, aDeviceName, aScratchScsi, aSource, aScratchNvme, @@ -90,7 +89,7 @@ func TestComputeInstanceTemplate_reorderDisks(t *testing.T) { aBoot, aNoMatch, aScratchNvme, aScratchScsi, aFallThrough, aDeviceName, }, ConfigDisks: []interface{}{ - cBoot, cFallThrough, cDeviceName, cScratchScsi, cSource, cScratchNvme, + cBoot, cFallThrough, cDeviceName, cScratch, cSource, cScratchNvme, }, ExpectedResult: []map[string]interface{}{ aBoot, aFallThrough, aDeviceName, aScratchScsi, aScratchNvme, aNoMatch, @@ -101,7 +100,7 @@ func TestComputeInstanceTemplate_reorderDisks(t *testing.T) { aBoot, aScratchNvme, aFallThrough, aSource, aScratchScsi, aFallThrough2, aDeviceName, }, ConfigDisks: []interface{}{ - cBoot, cFallThrough, cDeviceName, cScratchScsi, cFallThrough, cSource, cScratchNvme, + cBoot, cFallThrough, cDeviceName, cScratch, cFallThrough, cSource, cScratchNvme, }, ExpectedResult: []map[string]interface{}{ aBoot, aFallThrough, aDeviceName, aScratchScsi, aFallThrough2, aSource, aScratchNvme, diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_test.go.tmpl index 778e88ba42a3..f487b7f0d2e7 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_test.go.tmpl @@ -937,6 +937,10 @@ func TestAccComputeInstanceTemplate_performanceMonitoringUnit(t *testing.T) { "instance_name": fmt.Sprintf("tf-test-instance-template-%s", acctest.RandString(t, 10)), "performance_monitoring_unit": "STANDARD", } + context_2 := map[string]interface{}{ + "instance_name": context_1["instance_name"].(string), + "performance_monitoring_unit": "ENHANCED", + } context_3 := map[string]interface{}{ "instance_name": context_1["instance_name"].(string), "performance_monitoring_unit": "ARCHITECTURAL", @@ -959,6 +963,18 @@ func TestAccComputeInstanceTemplate_performanceMonitoringUnit(t *testing.T) { ImportState: true, ImportStateVerify: true, }, + { + Config: testAccComputeInstanceTemplate_performanceMonitoringUnit(context_2), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeInstanceTemplateExists(t, "google_compute_instance_template.foobar", &instanceTemplate), + resource.TestCheckResourceAttr("google_compute_instance_template.foobar", "advanced_machine_features.0.performance_monitoring_unit", "ENHANCED"), + ), + }, + { + ResourceName: "google_compute_instance_template.foobar", + ImportState: true, + ImportStateVerify: true, + }, { Config: testAccComputeInstanceTemplate_performanceMonitoringUnit(context_3), Check: resource.ComposeTestCheckFunc( @@ -1759,36 +1775,6 @@ func TestAccComputeInstanceTemplate_migration(t *testing.T) { }) } -func TestAccComputeInstanceTemplate_GuestOsFeatures(t *testing.T) { - t.Parallel() - - var instanceTemplate compute.InstanceTemplate - context := map[string]interface{}{ - "template_name": fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)), - "guest_os_features": `["UEFI_COMPATIBLE", "VIRTIO_SCSI_MULTIQUEUE", "GVNIC", "IDPF"]`, - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceTemplateDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeInstanceTemplate_GuestOsFeatures(context), - Check: resource.ComposeTestCheckFunc( - testAccCheckComputeInstanceTemplateExists( - t, "google_compute_instance_template.foobar", &instanceTemplate), - resource.TestCheckResourceAttr("google_compute_instance_template.foobar", "disk.0.guest_os_features.#", "4"), - resource.TestCheckResourceAttr("google_compute_instance_template.foobar", "disk.0.guest_os_features.0", "UEFI_COMPATIBLE"), - resource.TestCheckResourceAttr("google_compute_instance_template.foobar", "disk.0.guest_os_features.1", "VIRTIO_SCSI_MULTIQUEUE"), - resource.TestCheckResourceAttr("google_compute_instance_template.foobar", "disk.0.guest_os_features.2", "GVNIC"), - resource.TestCheckResourceAttr("google_compute_instance_template.foobar", "disk.0.guest_os_features.3", "IDPF"), - ), - }, - }, - }) -} - func TestAccComputeInstanceTemplate_withLabels(t *testing.T) { acctest.SkipIfVcr(t) t.Parallel() @@ -4173,7 +4159,7 @@ resource "google_compute_instance_template" "foobar" { func testAccComputeInstanceTemplateConfidentialInstanceConfigEnable(suffix string, confidentialInstanceType string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { - family = "ubuntu-2204-lts" + family = "ubuntu-2004-lts" project = "ubuntu-os-cloud" } @@ -4231,7 +4217,7 @@ resource "google_compute_instance_template" "foobar2" { func testAccComputeInstanceTemplateConfidentialInstanceConfigNoEnable(suffix string, minCpuPlatform, confidentialInstanceType string) string { return fmt.Sprintf(` data "google_compute_image" "my_image2" { - family = "ubuntu-2204-lts" + family = "ubuntu-2004-lts" project = "ubuntu-os-cloud" } @@ -4325,7 +4311,7 @@ resource "google_compute_instance_template" "foobar5" { func testAccComputeInstanceTemplateAdvancedMachineFeatures(suffix string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { - family = "ubuntu-2204-lts" + family = "ubuntu-2004-lts" project = "ubuntu-os-cloud" } @@ -4361,7 +4347,7 @@ resource "google_compute_instance_template" "foobar" { func testAccComputeInstanceTemplate_performanceMonitoringUnit(context map[string]interface{}) string { return acctest.Nprintf(` data "google_compute_image" "my_image" { - family = "ubuntu-2204-lts" + family = "ubuntu-2004-lts" project = "ubuntu-os-cloud" } @@ -4387,7 +4373,7 @@ resource "google_compute_instance_template" "foobar" { func testAccComputeInstanceTemplate_enableUefiNetworking(context map[string]interface{}) string { return acctest.Nprintf(` data "google_compute_image" "my_image" { - family = "ubuntu-2204-lts" + family = "ubuntu-2004-lts" project = "ubuntu-os-cloud" } @@ -5251,30 +5237,3 @@ resource "google_compute_instance_template" "foobar" { } `, context) } - -func testAccComputeInstanceTemplate_GuestOsFeatures(context map[string]interface{}) string { - return acctest.Nprintf(` -data "google_compute_image" "my_image" { - family = "debian-11" - project = "debian-cloud" -} - -resource "google_compute_instance_template" "foobar" { - name = "%{template_name}" - machine_type = "e2-medium" - - disk { - source_image = data.google_compute_image.my_image.self_link - auto_delete = true - disk_size_gb = 10 - architecture = "X86_64" - boot = true - guest_os_features = %{guest_os_features} - } - - network_interface { - network = "default" - } -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl index b3a76dd0f26f..27044f37e455 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl @@ -101,50 +101,6 @@ func TestMinCpuPlatformDiffSuppress(t *testing.T) { } } -func TestDisksForceAttachDiffSuppress(t *testing.T) { - cases := map[string]struct { - Old, New string - ExpectDiffSuppress bool - }{ - "force_attach unchanged": { - Old: "true", - New: "true", - ExpectDiffSuppress: true, - }, - "force_attach changed to true": { - Old: "false", - New: "true", - ExpectDiffSuppress: false, - }, - "force_attach changed to false": { - Old: "true", - New: "false", - ExpectDiffSuppress: false, - }, - "force_attach unchanged false": { - Old: "false", - New: "false", - ExpectDiffSuppress: true, - }, - "force_attach changed false from empty": { - Old: "", - New: "false", - ExpectDiffSuppress: true, - }, - "force_attach changed empty from false": { - Old: "false", - New: "", - ExpectDiffSuppress: true, - }, - } - - for tn, tc := range cases { - if tpgcompute.DisksForceAttachDiffSuppress("", tc.Old, tc.New, nil) != tc.ExpectDiffSuppress { - t.Errorf("bad: %s, %q => %q expect DiffSuppress to return %t", tn, tc.Old, tc.New, tc.ExpectDiffSuppress) - } - } -} - func TestCheckForCommonAliasIp(t *testing.T) { type testCase struct { old, new []*compute.AliasIpRange @@ -203,7 +159,7 @@ func computeInstanceImportStep(zone, instanceName string, additionalImportIgnore // metadata is only read into state if set in the config // importing doesn't know whether metadata.startup_script vs metadata_startup_script is set in the config, // it always takes metadata.startup-script - ignores := []string{"metadata.%", "metadata.startup-script", "metadata_startup_script", "boot_disk.0.initialize_params.0.resource_manager_tags.%", "params.0.resource_manager_tags.%"} + ignores := []string{"metadata.%", "metadata.startup-script", "metadata_startup_script", "boot_disk.0.initialize_params.0.resource_manager_tags.%", "params.0.resource_manager_tags.%"} return resource.TestStep{ ResourceName: "google_compute_instance.foobar", @@ -221,9 +177,9 @@ func TestAccComputeInstance_basic1(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic(instanceName), @@ -243,6 +199,9 @@ func TestAccComputeInstance_basic1(t *testing.T) { // instance resource without an explicit deletion_protection = true declaration. // Other tests check explicit true/false configs: TestAccComputeInstance_deletionProtectionExplicit[True | False] testAccCheckComputeInstanceHasConfiguredDeletionProtection(&instance, false), + + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_basic(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{"metadata.baz", "metadata.foo", "desired_status", "current_status", "labels", "terraform_labels"}), @@ -257,9 +216,9 @@ func TestAccComputeInstance_basic2(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic2(instanceName), @@ -269,6 +228,8 @@ func TestAccComputeInstance_basic2(t *testing.T) { testAccCheckComputeInstanceTag(&instance, "foo"), testAccCheckComputeInstanceMetadata(&instance, "foo", "bar"), testAccCheckComputeInstanceDisk(&instance, instanceName, true, true), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_basic2(instanceName)), ), }, }, @@ -282,9 +243,9 @@ func TestAccComputeInstance_basic3(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic3(instanceName), @@ -294,6 +255,8 @@ func TestAccComputeInstance_basic3(t *testing.T) { testAccCheckComputeInstanceTag(&instance, "foo"), testAccCheckComputeInstanceMetadata(&instance, "foo", "bar"), testAccCheckComputeInstanceDisk(&instance, instanceName, true, true), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_basic3(instanceName)), ), }, }, @@ -307,9 +270,9 @@ func TestAccComputeInstance_basic4(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic4(instanceName), @@ -319,6 +282,8 @@ func TestAccComputeInstance_basic4(t *testing.T) { testAccCheckComputeInstanceTag(&instance, "foo"), testAccCheckComputeInstanceMetadata(&instance, "foo", "bar"), testAccCheckComputeInstanceDisk(&instance, instanceName, true, true), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_basic4(instanceName)), ), }, }, @@ -332,9 +297,9 @@ func TestAccComputeInstance_basic5(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic5(instanceName), @@ -344,6 +309,8 @@ func TestAccComputeInstance_basic5(t *testing.T) { testAccCheckComputeInstanceTag(&instance, "foo"), testAccCheckComputeInstanceMetadata(&instance, "foo", "bar"), testAccCheckComputeInstanceDisk(&instance, instanceName, true, true), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_basic5(instanceName)), ), }, }, @@ -355,16 +322,16 @@ func TestAccComputeInstance_resourceManagerTags(t *testing.T) { var instance compute.Instance var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - context := map[string]interface{}{ + context := map[string]interface{}{ "project": envvar.GetTestProjectFromEnv(), "random_suffix": acctest.RandString(t, 10), "instance_name": instanceName, } acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_resourceManagerTags(context), @@ -377,6 +344,8 @@ func TestAccComputeInstance_resourceManagerTags(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_resourceManagerTagsUpdate(context)), ), }, }, @@ -409,6 +378,8 @@ func TestAccComputeInstance_diskResourcePolicies(t *testing.T) { Config: testAccComputeInstance_diskResourcePoliciesOnePolicyUpdate(context), Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_diskResourcePoliciesOnePolicyUpdate(context)), ), }, { @@ -465,6 +436,8 @@ func TestAccComputeInstance_diskResourcePolicies_attachmentDiff(t *testing.T) { Config: testAccComputeInstance_diskResourcePoliciesOnePolicy(context_1), Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_diskResourcePoliciesOnePolicy(context_1)), ), }, { @@ -494,6 +467,8 @@ func TestAccComputeInstance_machineTypeUrl(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), resource.TestCheckResourceAttr("google_compute_instance.foobar", "description", "old_desc"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_machineType(instanceName, machineTypeUrl)), ), }, }, @@ -507,9 +482,9 @@ func TestAccComputeInstance_descriptionUpdate(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_description(instanceName), @@ -525,6 +500,8 @@ func TestAccComputeInstance_descriptionUpdate(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), resource.TestCheckResourceAttr("google_compute_instance.foobar", "description", "new_desc"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_descriptionUpdate(instanceName)), ), }, }, @@ -539,16 +516,19 @@ func TestAccComputeInstance_IP(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_ip(ipName, instanceName), Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), - testAccCheckComputeInstanceAccessConfigHasNatIP(&instance)), + testAccCheckComputeInstanceAccessConfigHasNatIP(&instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_ip(ipName, instanceName)), + ), }, }, }) @@ -563,9 +543,9 @@ func TestAccComputeInstance_IPv6(t *testing.T) { var ptrName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_ipv6(ipName, instanceName, ptrName), @@ -573,6 +553,8 @@ func TestAccComputeInstance_IPv6(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceIpv6AccessConfigHasExternalIPv6(&instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_ipv6(ipName, instanceName, ptrName)), ), }, { @@ -600,6 +582,8 @@ func TestAccComputeInstance_ipv6ExternalReservation(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_ipv6ExternalReservation(instanceName)), ), }, computeInstanceImportStep("us-west2-a", instanceName, []string{}), @@ -608,52 +592,54 @@ func TestAccComputeInstance_ipv6ExternalReservation(t *testing.T) { } func TestAccComputeInstance_internalIPv6(t *testing.T) { - t.Parallel() - - var instance compute.Instance - var ipName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeInstance_internalIpv6(ipName, instanceName), - Check: resource.ComposeTestCheckFunc( - testAccCheckComputeInstanceExists( - t, "google_compute_instance.foobar", &instance), - testAccCheckComputeInstanceIpv6AccessConfigHasInternalIPv6(&instance), - ), - }, - computeInstanceImportStep("us-west2-a", instanceName, []string{}), - }, - }) + t.Parallel() + + var instance compute.Instance + var ipName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeInstance_internalIpv6(ipName, instanceName), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeInstanceExists( + t, "google_compute_instance.foobar", &instance), + testAccCheckComputeInstanceIpv6AccessConfigHasInternalIPv6(&instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_internalIpv6(ipName, instanceName)), + ), + }, + computeInstanceImportStep("us-west2-a", instanceName, []string{}), + }, + }) } func TestAccComputeInstance_internalIPv6PrefixLength(t *testing.T) { - t.Parallel() - - var instance compute.Instance - var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeInstance_internalIpv6PrefixLength("96", instanceName), - Check: resource.ComposeTestCheckFunc( - testAccCheckComputeInstanceExists( - t, "google_compute_instance.foobar", &instance), - testAccCheckComputeInstanceIpv6AccessConfigHasInternalIPv6(&instance), - ), - }, - computeInstanceImportStep("us-west2-a", instanceName, []string{"allow_stopping_for_update"}), - }, - }) + t.Parallel() + + var instance compute.Instance + var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeInstance_internalIpv6PrefixLength("96", instanceName), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeInstanceExists( + t, "google_compute_instance.foobar", &instance), + testAccCheckComputeInstanceIpv6AccessConfigHasInternalIPv6(&instance), + ), + }, + computeInstanceImportStep("us-west2-a", instanceName, []string{"allow_stopping_for_update"}), + }, + }) } func TestAccComputeInstance_PTRRecord(t *testing.T) { @@ -665,9 +651,9 @@ func TestAccComputeInstance_PTRRecord(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_PTRRecord(ptrName, instanceName), @@ -696,9 +682,9 @@ func TestAccComputeInstance_networkTier(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_networkTier(instanceName), @@ -707,6 +693,8 @@ func TestAccComputeInstance_networkTier(t *testing.T) { t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceAccessConfigHasNatIP(&instance), testAccCheckComputeInstanceHasAssignedNatIP, + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_networkTier(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -737,9 +725,9 @@ func TestAccComputeInstance_diskEncryption(t *testing.T) { } acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_disks_encryption(bootEncryptionKey, diskNameToEncryptionKey, instanceName, acctest.RandString(t, 10)), @@ -747,6 +735,8 @@ func TestAccComputeInstance_diskEncryption(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceDiskEncryptionKey("google_compute_instance.foobar", &instance, bootEncryptionKeyHash, diskNameToEncryptionKey), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_disks_encryption(bootEncryptionKey, diskNameToEncryptionKey, instanceName, acctest.RandString(t, 10))), ), }, }, @@ -768,9 +758,9 @@ func TestAccComputeInstance_diskEncryptionRestart(t *testing.T) { } acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_disks_encryption_restart(bootEncryptionKey, diskNameToEncryptionKey, instanceName), @@ -786,6 +776,8 @@ func TestAccComputeInstance_diskEncryptionRestart(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceDiskEncryptionKey("google_compute_instance.foobar", &instance, bootEncryptionKeyHash, diskNameToEncryptionKey), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_disks_encryption_restartUpdate(bootEncryptionKey, diskNameToEncryptionKey, instanceName)), ), }, }, @@ -815,20 +807,22 @@ func TestAccComputeInstance_kmsDiskEncryption(t *testing.T) { acctest.BootstrapIamMembers(t, []acctest.IamMember{ { Member: "serviceAccount:service-{project_number}@compute-system.iam.gserviceaccount.com", - Role: "roles/cloudkms.cryptoKeyEncrypterDecrypter", + Role: "roles/cloudkms.cryptoKeyEncrypterDecrypter", }, }) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_disks_kms(bootKmsKeyName, diskNameToEncryptionKey, instanceName, acctest.RandString(t, 10)), Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceDiskKmsEncryptionKey("google_compute_instance.foobar", &instance, bootKmsKeyName, diskNameToEncryptionKey), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_disks_kms(bootKmsKeyName, diskNameToEncryptionKey, instanceName, acctest.RandString(t, 10))), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -1066,9 +1060,9 @@ func TestAccComputeInstance_resourcePolicyUpdate(t *testing.T) { var scheduleName2 = fmt.Sprintf("tf-tests-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_instanceSchedule(instanceName, scheduleName1), @@ -1103,6 +1097,8 @@ func TestAccComputeInstance_resourcePolicyUpdate(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeResourcePolicy(&instance, "", 0), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_removeResourcePolicy(instanceName, scheduleName1, scheduleName2)), ), }, }, @@ -1117,9 +1113,9 @@ func TestAccComputeInstance_attachedDisk(t *testing.T) { var diskName = fmt.Sprintf("tf-testd-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_attachedDisk(diskName, instanceName), @@ -1127,6 +1123,8 @@ func TestAccComputeInstance_attachedDisk(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceDisk(&instance, diskName, false, false), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_attachedDisk(diskName, instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -1142,9 +1140,9 @@ func TestAccComputeInstance_attachedDisk_sourceUrl(t *testing.T) { var diskName = fmt.Sprintf("tf-testd-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_attachedDisk_sourceUrl(diskName, instanceName), @@ -1152,6 +1150,8 @@ func TestAccComputeInstance_attachedDisk_sourceUrl(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceDisk(&instance, diskName, false, false), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_attachedDisk_sourceUrl(diskName, instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -1167,9 +1167,9 @@ func TestAccComputeInstance_attachedDisk_modeRo(t *testing.T) { var diskName = fmt.Sprintf("tf-testd-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_attachedDisk_modeRo(diskName, instanceName), @@ -1177,6 +1177,8 @@ func TestAccComputeInstance_attachedDisk_modeRo(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceDisk(&instance, diskName, false, false), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_attachedDisk_modeRo(diskName, instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -1184,52 +1186,6 @@ func TestAccComputeInstance_attachedDisk_modeRo(t *testing.T) { }) } -func TestAccComputeInstance_attachDisk_forceAttach(t *testing.T) { - t.Parallel() - - var instance compute.Instance - var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - var diskName = fmt.Sprintf("tf-testd-%s", acctest.RandString(t, 10)) - var forceAttachSetToTrue = true - var forceAttachSetToFalse = false - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeInstance_attachedDisk_forceAttach_zonal(diskName, instanceName, forceAttachSetToTrue), - ExpectError: regexp.MustCompile("Force attaching zonal disks is not supported"), - }, - { - Config: testAccComputeInstance_attachedDisk_forceAttach_zonal(diskName, instanceName, forceAttachSetToFalse), - Check: resource.ComposeTestCheckFunc( - testAccCheckComputeInstanceExists( - t, "google_compute_instance.foobar", &instance), - resource.TestCheckResourceAttr("google_compute_instance.foobar", "attached_disk.0.force_attach", fmt.Sprintf("%t", forceAttachSetToFalse)), - ), - }, - { - Config: testAccComputeInstance_attachedDisk_forceAttach(diskName, instanceName, forceAttachSetToTrue), - Check: resource.ComposeTestCheckFunc( - testAccCheckComputeInstanceExists( - t, "google_compute_instance.foobar", &instance), - resource.TestCheckResourceAttr("google_compute_instance.foobar", "attached_disk.0.force_attach", fmt.Sprintf("%t", forceAttachSetToTrue)), - ), - }, - { - Config: testAccComputeInstance_attachedDisk_forceAttach(diskName, instanceName, forceAttachSetToFalse), - Check: resource.ComposeTestCheckFunc( - testAccCheckComputeInstanceExists( - t, "google_compute_instance.foobar", &instance), - resource.TestCheckResourceAttr("google_compute_instance.foobar", "attached_disk.0.force_attach", fmt.Sprintf("%t", forceAttachSetToFalse)), - ), - }, - }, - }) -} - func TestAccComputeInstance_attachedDiskUpdate(t *testing.T) { t.Parallel() @@ -1239,9 +1195,9 @@ func TestAccComputeInstance_attachedDiskUpdate(t *testing.T) { var diskName2 = fmt.Sprintf("tf-testd-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_attachedDisk(diskName, instanceName), @@ -1277,6 +1233,8 @@ func TestAccComputeInstance_attachedDiskUpdate(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceDisk(&instance, diskName, false, false), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_updateAttachedDiskEncryptionKey(diskName, instanceName)), ), }, }, @@ -1291,9 +1249,9 @@ func TestAccComputeInstance_bootDisk_source(t *testing.T) { var diskName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_bootDisk_source(diskName, instanceName), @@ -1301,6 +1259,8 @@ func TestAccComputeInstance_bootDisk_source(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceBootDisk(&instance, diskName), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_bootDisk_source(diskName, instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -1316,9 +1276,9 @@ func TestAccComputeInstance_bootDisk_sourceUrl(t *testing.T) { var diskName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_bootDisk_sourceUrl(diskName, instanceName), @@ -1326,6 +1286,8 @@ func TestAccComputeInstance_bootDisk_sourceUrl(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceBootDisk(&instance, diskName), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_bootDisk_sourceUrl(diskName, instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -1341,9 +1303,9 @@ func TestAccComputeInstance_bootDisk_type(t *testing.T) { var diskType = "pd-ssd" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_bootDisk_type(instanceName, diskType), @@ -1351,6 +1313,8 @@ func TestAccComputeInstance_bootDisk_type(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceBootDiskType(t, instanceName, diskType), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_bootDisk_type(instanceName, diskType)), ), }, }, @@ -1364,60 +1328,18 @@ func TestAccComputeInstance_bootDisk_mode(t *testing.T) { var diskMode = "READ_WRITE" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_bootDisk_mode(instanceName, diskMode), - }, - computeInstanceImportStep("us-central1-a", instanceName, []string{}), - }, - }) -} - -func TestAccComputeInstance_bootDisk_forceAttach(t *testing.T) { - t.Parallel() - - var instance compute.Instance - var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - var diskName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - var forceAttachSetToTrue = true - var forceAttachSetToFalse = false - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeInstance_bootDisk_forceAttach_zonal(diskName, instanceName, forceAttachSetToTrue), - ExpectError: regexp.MustCompile("Force attaching zonal disks is not supported"), - }, - { - Config: testAccComputeInstance_bootDisk_forceAttach_zonal(diskName, instanceName, forceAttachSetToFalse), - Check: resource.ComposeTestCheckFunc( - testAccCheckComputeInstanceExists( - t, "google_compute_instance.foobar", &instance), - resource.TestCheckResourceAttr("google_compute_instance.foobar", "boot_disk.0.force_attach", fmt.Sprintf("%t", forceAttachSetToFalse)), - ), - }, - { - Config: testAccComputeInstance_bootDisk_forceAttach(instanceName, forceAttachSetToTrue), Check: resource.ComposeTestCheckFunc( - testAccCheckComputeInstanceExists( - t, "google_compute_instance.foobar", &instance), - resource.TestCheckResourceAttr("google_compute_instance.foobar", "boot_disk.0.force_attach", fmt.Sprintf("%t", forceAttachSetToTrue)), - ), - }, - { - Config: testAccComputeInstance_bootDisk_forceAttach(instanceName, forceAttachSetToFalse), - Check: resource.ComposeTestCheckFunc( - testAccCheckComputeInstanceExists( - t, "google_compute_instance.foobar", &instance), - resource.TestCheckResourceAttr("google_compute_instance.foobar", "boot_disk.0.force_attach", fmt.Sprintf("%t", forceAttachSetToFalse)), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_bootDisk_mode(instanceName, diskMode)), ), }, + computeInstanceImportStep("us-central1-a", instanceName, []string{}), }, }) } @@ -1429,9 +1351,9 @@ func TestAccComputeInstance_with375GbScratchDisk(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_with375GbScratchDisk(instanceName), @@ -1446,14 +1368,16 @@ func TestAccComputeInstance_with375GbScratchDisk(t *testing.T) { "interface": "SCSI", }, { - "interface": "NVME", + "interface": "NVME", "deviceName": "nvme-local-ssd", }, { - "interface": "SCSI", + "interface": "SCSI", "deviceName": "scsi-local-ssd", }, }), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_with375GbScratchDisk(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -1471,9 +1395,9 @@ func TestAccComputeInstance_with18TbScratchDisk(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_with18TbScratchDisk(instanceName), @@ -1500,6 +1424,8 @@ func TestAccComputeInstance_with18TbScratchDisk(t *testing.T) { "interface": "NVME", }, }), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_with18TbScratchDisk(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -1514,9 +1440,9 @@ func TestAccComputeInstance_forceNewAndChangeMetadata(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic(instanceName), @@ -1532,6 +1458,8 @@ func TestAccComputeInstance_forceNewAndChangeMetadata(t *testing.T) { t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceMetadata( &instance, "qux", "true"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_forceNewAndChangeMetadata(instanceName)), ), }, }, @@ -1545,9 +1473,9 @@ func TestAccComputeInstance_update(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic(instanceName), @@ -1566,6 +1494,8 @@ func TestAccComputeInstance_update(t *testing.T) { testAccCheckComputeInstanceLabel(&instance, "only_me", "nothing_else"), testAccCheckComputeInstanceTag(&instance, "baz"), testAccCheckComputeInstanceAccessConfig(&instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_update(instanceName)), ), }, }, @@ -1579,9 +1509,9 @@ func TestAccComputeInstance_stopInstanceToUpdate(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ // Set fields that require stopping the instance { @@ -1607,6 +1537,8 @@ func TestAccComputeInstance_stopInstanceToUpdate(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_stopInstanceToUpdate3(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), @@ -1621,9 +1553,9 @@ func TestAccComputeInstance_serviceAccount(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_serviceAccount(instanceName), @@ -1636,6 +1568,8 @@ func TestAccComputeInstance_serviceAccount(t *testing.T) { "https://www.googleapis.com/auth/devstorage.read_only"), testAccCheckComputeInstanceServiceAccount(&instance, "https://www.googleapis.com/auth/userinfo.email"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_serviceAccount(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -1660,6 +1594,8 @@ func TestAccComputeInstance_noServiceAccount(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceNoServiceAccount(&instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_noServiceAccount(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -1684,6 +1620,8 @@ func TestAccComputeInstance_serviceAccountEmail_0scopes(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceMatchServiceAccount(&instance, "\\d+-compute@developer.gserviceaccount.com"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_serviceAccountEmail_0scopes(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -1698,9 +1636,9 @@ func TestAccComputeInstance_serviceAccount_updated(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_serviceAccount_update0(instanceName), @@ -1739,6 +1677,8 @@ func TestAccComputeInstance_serviceAccount_updated(t *testing.T) { t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceMatchServiceAccount(&instance, "\\d+-compute@developer.gserviceaccount.com"), testAccCheckComputeInstanceScopes(&instance, 3), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_serviceAccount_update3(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), @@ -1753,9 +1693,9 @@ func TestAccComputeInstance_serviceAccount_updated0to1to0scopes(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_serviceAccount_update01(instanceName), @@ -1784,6 +1724,8 @@ func TestAccComputeInstance_serviceAccount_updated0to1to0scopes(t *testing.T) { t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceNoServiceAccount(&instance), testAccCheckComputeInstanceScopes(&instance, 0), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_serviceAccount_update01(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), @@ -1798,9 +1740,9 @@ func TestAccComputeInstance_scheduling(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_scheduling(instanceName), @@ -1815,6 +1757,8 @@ func TestAccComputeInstance_scheduling(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_schedulingUpdated(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -1884,9 +1828,9 @@ func TestAccComputeInstance_advancedMachineFeatures(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_advancedMachineFeatures(instanceName), @@ -1901,6 +1845,8 @@ func TestAccComputeInstance_advancedMachineFeatures(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_advancedMachineFeaturesUpdated(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), @@ -1916,6 +1862,10 @@ func TestAccComputeInstance_performanceMonitoringUnit(t *testing.T) { "instance_name": fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)), "performance_monitoring_unit": "STANDARD", } + context_2 := map[string]interface{}{ + "instance_name": context_1["instance_name"].(string), + "performance_monitoring_unit": "ENHANCED", + } context_3 := map[string]interface{}{ "instance_name": context_1["instance_name"].(string), "performance_monitoring_unit": "ARCHITECTURAL", @@ -1935,12 +1885,23 @@ func TestAccComputeInstance_performanceMonitoringUnit(t *testing.T) { ), }, computeInstanceImportStep("us-central1-a", context_1["instance_name"].(string), []string{"allow_stopping_for_update"}), + { + Config: testAccComputeInstance_performanceMonitoringUnit(context_2), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeInstanceExists( + t, "google_compute_instance.foobar", &instance), + resource.TestCheckResourceAttr("google_compute_instance.foobar", "advanced_machine_features.0.performance_monitoring_unit", "ENHANCED"), + ), + }, + computeInstanceImportStep("us-central1-a", context_2["instance_name"].(string), []string{"allow_stopping_for_update"}), { Config: testAccComputeInstance_performanceMonitoringUnit(context_3), Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), resource.TestCheckResourceAttr("google_compute_instance.foobar", "advanced_machine_features.0.performance_monitoring_unit", "ARCHITECTURAL"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_performanceMonitoringUnit(context_3)), ), }, }, @@ -1952,7 +1913,7 @@ func TestAccComputeInstance_enableUefiNetworking(t *testing.T) { var instance compute.Instance context_1 := map[string]interface{}{ - "instance_name": fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)), + "instance_name": fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)), "enable_uefi_networking": "true", } @@ -1967,6 +1928,8 @@ func TestAccComputeInstance_enableUefiNetworking(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), resource.TestCheckResourceAttr("google_compute_instance.foobar", "advanced_machine_features.0.enable_uefi_networking", "true"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_enableUefiNetworking(context_1)), ), }, computeInstanceImportStep("us-central1-a", context_1["instance_name"].(string), []string{}), @@ -1982,9 +1945,9 @@ func TestAccComputeInstance_soleTenantNodeAffinities(t *testing.T) { var groupName = fmt.Sprintf("tf-test-nodegroup-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_withoutNodeAffinities(instanceName, templateName, groupName), @@ -2000,12 +1963,17 @@ func TestAccComputeInstance_soleTenantNodeAffinities(t *testing.T) { computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), { Config: testAccComputeInstance_soleTenantNodeAffinitiesReduced(instanceName, templateName, groupName), + Check: resource.ComposeTestCheckFunc( + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_soleTenantNodeAffinitiesReduced(instanceName, templateName, groupName)), + ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), }, }) } + func TestAccComputeInstance_reservationAffinities(t *testing.T) { t.Parallel() @@ -2013,9 +1981,9 @@ func TestAccComputeInstance_reservationAffinities(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-resaffinity-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_reservationAffinity_nonSpecificReservationConfig(instanceName, "NO_RESERVATION"), @@ -2038,6 +2006,8 @@ func TestAccComputeInstance_reservationAffinities(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasReservationAffinity(&instance, "SPECIFIC_RESERVATION", instanceName), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_reservationAffinity_specificReservationConfig(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -2108,17 +2078,11 @@ func TestAccComputeInstance_hostErrorTimeoutSecconds(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), resource.TestCheckResourceAttr("google_compute_instance.foobar", "scheduling.0.host_error_timeout_seconds", "0"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_hostErrorTimeoutSeconds(context_4)), ), }, computeInstanceImportStep(context_4["zone"].(string), context_4["instance_name"].(string), []string{}), - { - Config: testAccComputeInstance_hostErrorTimeoutSeconds(context_1), - Check: resource.ComposeTestCheckFunc( - testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), - resource.TestCheckResourceAttr("google_compute_instance.foobar", "scheduling.0.host_error_timeout_seconds", "90"), - ), - }, - computeInstanceImportStep(context_1["zone"].(string), context_1["instance_name"].(string), []string{}), }, }) } @@ -2131,9 +2095,9 @@ func TestAccComputeInstance_subnet_auto(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_subnet_auto(acctest.RandString(t, 10), instanceName), @@ -2141,6 +2105,8 @@ func TestAccComputeInstance_subnet_auto(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasSubnet(&instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_subnet_auto(acctest.RandString(t, 10), instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -2155,9 +2121,9 @@ func TestAccComputeInstance_subnet_custom(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_subnet_custom(acctest.RandString(t, 10), instanceName), @@ -2165,6 +2131,8 @@ func TestAccComputeInstance_subnet_custom(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasSubnet(&instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_subnet_custom(acctest.RandString(t, 10), instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -2184,9 +2152,9 @@ func TestAccComputeInstance_subnet_xpn(t *testing.T) { projectName := fmt.Sprintf("tf-test-xpn-%d", time.Now().Unix()) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_subnet_xpn(org, billingId, projectName, instanceName, acctest.RandString(t, 10)), @@ -2195,6 +2163,8 @@ func TestAccComputeInstance_subnet_xpn(t *testing.T) { t, "google_compute_instance.foobar", fmt.Sprintf("%s-service", projectName), &instance), testAccCheckComputeInstanceHasSubnet(&instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_subnet_xpn(org, billingId, projectName, instanceName, acctest.RandString(t, 10))), ), }, }, @@ -2208,9 +2178,9 @@ func TestAccComputeInstance_networkIPAuto(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_networkIPAuto(acctest.RandString(t, 10), instanceName), @@ -2218,6 +2188,8 @@ func TestAccComputeInstance_networkIPAuto(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasAnyNetworkIP(&instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_networkIPAuto(acctest.RandString(t, 10), instanceName)), ), }, }, @@ -2231,9 +2203,9 @@ func TestAccComputeInstance_network_ip_custom(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) var ipAddress = "10.0.200.200" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_network_ip_custom(acctest.RandString(t, 10), instanceName, ipAddress), @@ -2241,6 +2213,8 @@ func TestAccComputeInstance_network_ip_custom(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasNetworkIP(&instance, ipAddress), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_network_ip_custom(acctest.RandString(t, 10), instanceName, ipAddress)), ), }, }, @@ -2256,15 +2230,17 @@ func TestAccComputeInstance_private_image_family(t *testing.T) { var familyName = fmt.Sprintf("tf-testf-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_private_image_family(diskName, familyName, instanceName), Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_private_image_family(diskName, familyName, instanceName)), ), }, }, @@ -2280,9 +2256,9 @@ func TestAccComputeInstance_networkPerformanceConfig(t *testing.T) { var imageName = fmt.Sprintf("tf-testf-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_networkPerformanceConfig(imageName, diskName, instanceName), @@ -2290,6 +2266,8 @@ func TestAccComputeInstance_networkPerformanceConfig(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasNetworkPerformanceConfig(&instance, "DEFAULT"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_networkPerformanceConfig(imageName, diskName, instanceName)), ), }, }, @@ -2303,9 +2281,9 @@ func TestAccComputeInstance_forceChangeMachineTypeManually(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic(instanceName), @@ -2329,15 +2307,17 @@ func TestAccComputeInstance_multiNic(t *testing.T) { subnetworkName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_multiNic(instanceName, networkName, subnetworkName), Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasMultiNic(&instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_multiNic(instanceName, networkName, subnetworkName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -2352,9 +2332,9 @@ func TestAccComputeInstance_nictype_update(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_nictype(instanceName, "GVNIC"), @@ -2368,6 +2348,8 @@ func TestAccComputeInstance_nictype_update(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_nictype(instanceName, "VIRTIO_NET")), ), }, }, @@ -2381,15 +2363,17 @@ func TestAccComputeInstance_guestAccelerator(t *testing.T) { instanceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_guestAccelerator(instanceName, 1), Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasGuestAccelerator(&instance, "nvidia-tesla-t4", 1), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_guestAccelerator(instanceName, 1)), ), }, computeInstanceImportStep("us-east1-d", instanceName, []string{"metadata.baz", "metadata.foo"}), @@ -2405,9 +2389,9 @@ func TestAccComputeInstance_guestAcceleratorSkip(t *testing.T) { instanceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_guestAccelerator(instanceName, 0), @@ -2428,9 +2412,9 @@ func TestAccComputeInstance_minCpuPlatform(t *testing.T) { instanceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_minCpuPlatform(instanceName), @@ -2445,6 +2429,8 @@ func TestAccComputeInstance_minCpuPlatform(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasMinCpuPlatform(&instance, ""), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_minCpuPlatform_remove(instanceName)), ), }, computeInstanceImportStep("us-east1-d", instanceName, []string{"allow_stopping_for_update"}), @@ -2459,9 +2445,9 @@ func TestAccComputeInstance_deletionProtectionExplicitFalse(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic_deletionProtectionFalse(instanceName), @@ -2469,6 +2455,8 @@ func TestAccComputeInstance_deletionProtectionExplicitFalse(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasConfiguredDeletionProtection(&instance, false), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_basic_deletionProtectionFalse(instanceName)), ), }, }, @@ -2482,9 +2470,9 @@ func TestAccComputeInstance_deletionProtectionExplicitTrueAndUpdateFalse(t *test var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic_deletionProtectionTrue(instanceName), @@ -2515,15 +2503,17 @@ func TestAccComputeInstance_primaryAliasIpRange(t *testing.T) { instanceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_primaryAliasIpRange(instanceName), Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasAliasIpRange(&instance, "", "/24"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_primaryAliasIpRange(instanceName)), ), }, computeInstanceImportStep("us-east1-d", instanceName, []string{}), @@ -2540,9 +2530,9 @@ func TestAccComputeInstance_secondaryAliasIpRange(t *testing.T) { subnetName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_secondaryAliasIpRange(networkName, subnetName, instanceName), @@ -2557,6 +2547,8 @@ func TestAccComputeInstance_secondaryAliasIpRange(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasAliasIpRange(&instance, "", "10.0.1.0/24"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_secondaryAliasIpRangeUpdate(networkName, subnetName, instanceName)), ), }, computeInstanceImportStep("us-east1-d", instanceName, []string{"network_interface.0.alias_ip_range.0.ip_cidr_range", "network_interface.0.alias_ip_range.0.subnetwork_range_name", "network_interface.0.alias_ip_range.1.ip_cidr_range", "network_interface.0.alias_ip_range.1.subnetwork_range_name"}), @@ -2601,6 +2593,8 @@ func TestAccComputeInstance_aliasIpRangeCommonAddresses(t *testing.T) { testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasAliasIpRange(&instance, "inst-test-secondary", "172.16.1.0/24"), testAccCheckComputeInstanceHasAliasIpRange(&instance, "inst-test-tertiary", "10.1.3.0/24"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_secondaryAliasIpRangeUpdateWithCommonAddressDifferentRanges(networkName, subnetName, instanceName)), ), }, computeInstanceImportStep("us-east1-d", instanceName, []string{"network_interface.0.alias_ip_range.0.ip_cidr_range", "network_interface.0.alias_ip_range.0.subnetwork_range_name", "network_interface.0.alias_ip_range.1.ip_cidr_range", "network_interface.0.alias_ip_range.1.subnetwork_range_name"}), @@ -2615,15 +2609,17 @@ func TestAccComputeInstance_hostname(t *testing.T) { instanceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_hostname(instanceName), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttrSet("google_compute_instance.foobar", "hostname"), testAccCheckComputeInstanceLacksShieldedVmConfig(&instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_hostname(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -2638,9 +2634,9 @@ func TestAccComputeInstance_shieldedVmConfig(t *testing.T) { instanceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_shieldedVmConfig(instanceName, true, true, true), @@ -2655,6 +2651,8 @@ func TestAccComputeInstance_shieldedVmConfig(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasShieldedVmConfig(&instance, true, true, false), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_shieldedVmConfig(instanceName, true, true, false)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), @@ -2674,9 +2672,9 @@ func TestAccComputeInstanceConfidentialInstanceConfigMain(t *testing.T) { instanceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstanceConfidentialInstanceConfigEnable(instanceName, "SEV"), @@ -2707,6 +2705,8 @@ func TestAccComputeInstanceConfidentialInstanceConfigMain(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar5", &instance), testAccCheckComputeInstanceHasConfidentialInstanceConfig(&instance, false, "TDX"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar5", + testAccComputeInstanceConfidentialInstanceConfigEnableTdx(instanceName, "TDX")), ), }, }, @@ -2750,6 +2750,10 @@ func TestAccComputeInstance_confidentialHyperDiskBootDisk(t *testing.T) { computeInstanceImportStep(context_1["zone"].(string), context_1["instance_name"].(string), []string{"allow_stopping_for_update"}), { Config: testAccComputeInstanceConfidentialHyperDiskBootDisk(context_2), + Check: resource.ComposeTestCheckFunc( + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstanceConfidentialHyperDiskBootDisk(context_2)), + ), }, computeInstanceImportStep(context_2["zone"].(string), context_2["instance_name"].(string), []string{"allow_stopping_for_update"}), }, @@ -2760,9 +2764,9 @@ func TestAccComputeInstance_hyperdiskBootDisk_provisioned_iops_throughput(t *tes t.Parallel() context := map[string]interface{}{ - "instance_name": fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)), - "zone": "us-central1-a", - "provisioned_iops": 12000, + "instance_name": fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)), + "zone": "us-central1-a", + "provisioned_iops": 12000, "provisioned_throughput": 200, } @@ -2773,6 +2777,10 @@ func TestAccComputeInstance_hyperdiskBootDisk_provisioned_iops_throughput(t *tes Steps: []resource.TestStep{ { Config: testAccComputeInstanceHyperDiskBootDiskProvisionedIopsThroughput(context), + Check: resource.ComposeTestCheckFunc( + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstanceHyperDiskBootDiskProvisionedIopsThroughput(context)), + ), }, computeInstanceImportStep(context["zone"].(string), context["instance_name"].(string), []string{"allow_stopping_for_update"}), }, @@ -2785,9 +2793,9 @@ func TestAccComputeInstance_enableDisplay(t *testing.T) { instanceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_enableDisplay(instanceName), @@ -2795,10 +2803,10 @@ func TestAccComputeInstance_enableDisplay(t *testing.T) { computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), { Config: testAccComputeInstance_enableDisplayUpdated(instanceName), - }, - computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), - { - Config: testAccComputeInstance_enableDisplay(instanceName), + Check: resource.ComposeTestCheckFunc( + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_enableDisplayUpdated(instanceName)), + ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), }, @@ -2839,6 +2847,8 @@ func TestAccComputeInstance_desiredStatusTerminatedOnCreation(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasStatus(&instance, context_2["desired_status"].(string)), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_desiredStatusOnCreation(context_2)), ), }, }, @@ -2892,9 +2902,9 @@ func TestAccComputeInstance_desiredStatusUpdateBasic(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic2(instanceName), @@ -2933,6 +2943,8 @@ func TestAccComputeInstance_desiredStatusUpdateBasic(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasStatus(&instance, "RUNNING"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_machineType_desiredStatus_allowStoppingForUpdate(instanceName, "e2-medium", "RUNNING", false)), ), }, }, @@ -2946,9 +2958,9 @@ func TestAccComputeInstance_desiredStatusTerminatedUpdateFields(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic2(instanceName), @@ -2975,6 +2987,8 @@ func TestAccComputeInstance_desiredStatusTerminatedUpdateFields(t *testing.T) { testAccCheckComputeInstanceLabel(&instance, "only_me", "nothing_else"), testAccCheckComputeInstanceTag(&instance, "baz"), testAccCheckComputeInstanceHasStatus(&instance, "TERMINATED"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_desiredStatusTerminatedUpdate(instanceName)), ), }, }, @@ -2988,9 +3002,9 @@ func TestAccComputeInstance_updateRunning_desiredStatusRunning_allowStoppingForU var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic2(instanceName), @@ -3022,9 +3036,9 @@ func TestAccComputeInstance_updateRunning_desiredStatusNotSet_notAllowStoppingFo var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic2(instanceName), @@ -3049,9 +3063,9 @@ func TestAccComputeInstance_updateRunning_desiredStatusRunning_notAllowStoppingF var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic2(instanceName), @@ -3076,9 +3090,9 @@ func TestAccComputeInstance_updateRunning_desiredStatusTerminated_allowStoppingF var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic2(instanceName), @@ -3095,6 +3109,8 @@ func TestAccComputeInstance_updateRunning_desiredStatusTerminated_allowStoppingF t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasMachineType(&instance, "e2-standard-2"), testAccCheckComputeInstanceHasStatus(&instance, "TERMINATED"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_machineType_desiredStatus_allowStoppingForUpdate(instanceName, "e2-standard-2", "TERMINATED", true)), ), }, }, @@ -3108,9 +3124,9 @@ func TestAccComputeInstance_updateRunning_desiredStatusTerminated_notAllowStoppi var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic2(instanceName), @@ -3140,9 +3156,9 @@ func TestAccComputeInstance_updateTerminated_desiredStatusNotSet_allowStoppingFo var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic2(instanceName), @@ -3167,6 +3183,8 @@ func TestAccComputeInstance_updateTerminated_desiredStatusNotSet_allowStoppingFo t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasMachineType(&instance, "e2-standard-2"), testAccCheckComputeInstanceHasStatus(&instance, "TERMINATED"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_machineType_desiredStatus_allowStoppingForUpdate(instanceName, "e2-standard-2", "", true)), ), }, }, @@ -3180,9 +3198,9 @@ func TestAccComputeInstance_updateTerminated_desiredStatusTerminated_allowStoppi var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic2(instanceName), @@ -3220,9 +3238,9 @@ func TestAccComputeInstance_updateTerminated_desiredStatusNotSet_notAllowStoppin var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic2(instanceName), @@ -3247,6 +3265,8 @@ func TestAccComputeInstance_updateTerminated_desiredStatusNotSet_notAllowStoppin t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasMachineType(&instance, "e2-standard-2"), testAccCheckComputeInstanceHasStatus(&instance, "TERMINATED"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_machineType_desiredStatus_allowStoppingForUpdate(instanceName, "e2-standard-2", "", false)), ), }, }, @@ -3260,9 +3280,9 @@ func TestAccComputeInstance_updateTerminated_desiredStatusTerminated_notAllowSto var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic2(instanceName), @@ -3300,9 +3320,9 @@ func TestAccComputeInstance_updateTerminated_desiredStatusRunning_allowStoppingF var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic2(instanceName), @@ -3340,9 +3360,9 @@ func TestAccComputeInstance_updateTerminated_desiredStatusRunning_notAllowStoppi var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_basic2(instanceName), @@ -3413,6 +3433,8 @@ func TestAccComputeInstance_desiredStatus_suspended(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasStatus(&instance, "RUNNING"), // this mimics resume method behavior + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_desiredStatus_suspended(context_1)), ), }, }, @@ -3425,12 +3447,16 @@ func TestAccComputeInstance_resourcePolicyCollocate(t *testing.T) { instanceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_resourcePolicyCollocate(instanceName, acctest.RandString(t, 10)), + Check: resource.ComposeTestCheckFunc( + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_resourcePolicyCollocate(instanceName, acctest.RandString(t, 10))), + ), }, computeInstanceImportStep("us-east4-b", instanceName, []string{"allow_stopping_for_update"}), }, @@ -3444,9 +3470,9 @@ func TestAccComputeInstance_resourcePolicySpread(t *testing.T) { var instance compute.Instance acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_resourcePolicySpread(instanceName, acctest.RandString(t, 10)), @@ -3455,6 +3481,8 @@ func TestAccComputeInstance_resourcePolicySpread(t *testing.T) { t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasStatus(&instance, "RUNNING"), testAccCheckComputeInstanceHasAvailabilityDomain(&instance, 3), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_resourcePolicySpread(instanceName, acctest.RandString(t, 10))), ), }, computeInstanceImportStep("us-east4-b", instanceName, []string{"allow_stopping_for_update"}), @@ -3468,9 +3496,9 @@ func TestAccComputeInstance_subnetworkUpdate(t *testing.T) { suffix := fmt.Sprintf("%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_subnetworkUpdate(suffix, instanceName), @@ -3482,6 +3510,10 @@ func TestAccComputeInstance_subnetworkUpdate(t *testing.T) { computeInstanceImportStep("us-east1-d", instanceName, []string{"allow_stopping_for_update", "network_interface.0.alias_ip_range.0.ip_cidr_range", "network_interface.0.alias_ip_range.0.subnetwork_range_name", "network_interface.0.alias_ip_range.1.ip_cidr_range", "network_interface.0.alias_ip_range.1.subnetwork_range_name"}), { Config: testAccComputeInstance_subnetworkUpdate(suffix, instanceName), + Check: resource.ComposeTestCheckFunc( + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_subnetworkUpdate(suffix, instanceName)), + ), }, computeInstanceImportStep("us-east1-d", instanceName, []string{"allow_stopping_for_update", "network_interface.0.alias_ip_range.0.ip_cidr_range", "network_interface.0.alias_ip_range.0.subnetwork_range_name", "network_interface.0.alias_ip_range.1.ip_cidr_range", "network_interface.0.alias_ip_range.1.subnetwork_range_name"}), }, @@ -3493,12 +3525,12 @@ func TestAccComputeInstance_subnetworkProjectMustMatchError(t *testing.T) { instanceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) suffix := fmt.Sprintf("%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComputeInstance_subnetworkProjectExpectError(suffix, instanceName), + Config: testAccComputeInstance_subnetworkProjectExpectError(suffix, instanceName), ExpectError: regexp.MustCompile("must match subnetwork_project"), }, }, @@ -3538,6 +3570,8 @@ func TestAccComputeInstance_networkIpUpdate(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceHasNetworkIP(&instance, "10.3.0.5"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_networkIpUpdateWithComputeAddress(suffix, instanceName)), ), }, computeInstanceImportStep("us-east1-d", instanceName, []string{}), @@ -3550,33 +3584,40 @@ func TestAccComputeInstance_queueCount(t *testing.T) { instanceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_queueCountSet(instanceName), + Check: resource.ComposeTestCheckFunc( + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_queueCountSet(instanceName)), + ), }, computeInstanceImportStep("us-east1-d", instanceName, []string{"allow_stopping_for_update"}), }, }) } + func TestAccComputeInstance_spotVM(t *testing.T) { t.Parallel() var instance compute.Instance var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_spotVM(instanceName), Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_spotVM(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -3584,15 +3625,16 @@ func TestAccComputeInstance_spotVM(t *testing.T) { }) } + func TestAccComputeInstance_spotVM_update(t *testing.T) { t.Parallel() var instance compute.Instance var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_scheduling(instanceName), @@ -3625,9 +3667,9 @@ func TestAccComputeInstance_maxRunDuration_update(t *testing.T) { expectedMaxRunDuration.Seconds = 60 acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_standardVM_maxRunDuration(instanceName, "STOP"), @@ -3640,6 +3682,8 @@ func TestAccComputeInstance_maxRunDuration_update(t *testing.T) { t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceTerminationAction(&instance, "STOP"), testAccCheckComputeInstanceMaxRunDuration(&instance, expectedMaxRunDuration), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_standardVM_maxRunDurationUpdated(instanceName, "STOP")), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{"allow_stopping_for_update"}), @@ -3659,9 +3703,9 @@ func TestAccComputeInstance_standardVM_maxRunDuration_stopTerminationAction(t *t var instanceTerminationAction = "STOP" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_standardVM_maxRunDuration(instanceName, instanceTerminationAction), @@ -3670,6 +3714,8 @@ func TestAccComputeInstance_standardVM_maxRunDuration_stopTerminationAction(t *t t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceTerminationAction(&instance, instanceTerminationAction), testAccCheckComputeInstanceMaxRunDuration(&instance, expectedMaxRunDuration), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_standardVM_maxRunDuration(instanceName, instanceTerminationAction)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -3689,9 +3735,9 @@ func TestAccComputeInstance_localSsdVM_maxRunDuration_stopTerminationAction(t *t var instanceTerminationAction = "STOP" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_localSsdVM_maxRunDuration(instanceName, instanceTerminationAction), @@ -3700,6 +3746,8 @@ func TestAccComputeInstance_localSsdVM_maxRunDuration_stopTerminationAction(t *t t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceTerminationAction(&instance, instanceTerminationAction), testAccCheckComputeInstanceMaxRunDuration(&instance, expectedMaxRunDuration), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_localSsdVM_maxRunDuration(instanceName, instanceTerminationAction)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -3719,9 +3767,9 @@ func TestAccComputeInstance_spotVM_maxRunDuration_deleteTerminationAction(t *tes var instanceTerminationAction = "DELETE" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_spotVM_maxRunDuration(instanceName, instanceTerminationAction), @@ -3730,6 +3778,8 @@ func TestAccComputeInstance_spotVM_maxRunDuration_deleteTerminationAction(t *tes t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceTerminationAction(&instance, instanceTerminationAction), testAccCheckComputeInstanceMaxRunDuration(&instance, expectedMaxRunDuration), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_spotVM_maxRunDuration(instanceName, instanceTerminationAction)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -3749,9 +3799,9 @@ func TestAccComputeInstance_standardVM_maxRunDuration_deleteTerminationAction(t var instanceTerminationAction = "DELETE" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_standardVM_maxRunDuration(instanceName, instanceTerminationAction), @@ -3777,9 +3827,9 @@ func TestAccComputeInstance_spotVM_maxRunDuration_update(t *testing.T) { expectedMaxRunDuration.Nanos = 123 expectedMaxRunDuration.Seconds = 60 acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_scheduling(instanceName), @@ -3813,9 +3863,9 @@ func TestAccComputeInstance_localSsdRecoveryTimeout(t *testing.T) { expectedLocalSsdRecoveryTimeout.Seconds = 3600 acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_localSsdRecoveryTimeout(instanceName), @@ -3823,6 +3873,8 @@ func TestAccComputeInstance_localSsdRecoveryTimeout(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceLocalSsdRecoveryTimeout(&instance, expectedLocalSsdRecoveryTimeout), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_localSsdRecoveryTimeout(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{}), @@ -3840,9 +3892,9 @@ func TestAccComputeInstance_localSsdRecoveryTimeout_update(t *testing.T) { expectedLocalSsdRecoveryTimeout.Nanos = 0 expectedLocalSsdRecoveryTimeout.Seconds = 3600 acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_scheduling(instanceName), @@ -3887,6 +3939,8 @@ func TestAccComputeInstance_partnerMetadata(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstancePartnerMetadata(&instance, expectedPartnerMetadata), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_partnerMetadata(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{fmt.Sprintf("partner_metadata.%s", namespace)}), @@ -3959,6 +4013,8 @@ func TestAccComputeInstance_partnerMetadata_deletePartnerMetadata(t *testing.T) Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_partnerMetadata_empty(instanceName)), ), }, computeInstanceImportStep("us-central1-a", instanceName, []string{fmt.Sprintf("partner_metadata.%s", namespace)}), @@ -3974,9 +4030,9 @@ func TestAccComputeInstance_metadataStartupScript_update(t *testing.T) { var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_metadataStartupScript(instanceName, "e2-medium", "abc"), @@ -3990,6 +4046,8 @@ func TestAccComputeInstance_metadataStartupScript_update(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_metadataStartupScript(instanceName, "e2-standard-4", "xyz")), ), }, }, @@ -4027,6 +4085,8 @@ func TestAccComputeInstance_metadataStartupScript_gracefulSwitch(t *testing.T) { &instance, "foo", "abc"), testAccCheckComputeInstanceMetadata( &instance, "startup-script", "echo hi > /test.txt"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_metadataStartupScript_gracefulSwitch(instanceName, "e2-medium", "abc")), ), }, }, @@ -4042,9 +4102,9 @@ func TestAccComputeInstance_regionBootDisk(t *testing.T) { var suffix = acctest.RandString(t, 10) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeInstance_regionBootDisk(instanceName, diskName, suffix), @@ -4052,7 +4112,10 @@ func TestAccComputeInstance_regionBootDisk(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.regional_vm_instance", &instance), testAccCheckComputeInstanceBootDisk(&instance, diskName), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.regional_vm_instance", + testAccComputeInstance_regionBootDisk(instanceName, diskName, suffix)), ), + }, }, }) @@ -4085,6 +4148,8 @@ func TestAccComputeInstance_creationOnlyAttributionLabel(t *testing.T) { t, "google_compute_instance.foobar", &instance), testAccCheckComputeInstanceLabel(&instance, "user_label", "bar"), testAccCheckComputeInstanceAttributionLabel(&instance, true), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_attributionLabelUpdate(instanceName, "true", "CREATION_ONLY")), ), }, }, @@ -4198,6 +4263,8 @@ func TestAccComputeInstance_autoDeleteUpdate(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), resource.TestCheckResourceAttr("google_compute_instance.foobar", "boot_disk.0.auto_delete", "true"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_autoDeleteUpdate(context_1)), ), }, }, @@ -4248,14 +4315,8 @@ func TestAccComputeInstance_keyRevocationActionType(t *testing.T) { testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), resource.TestCheckResourceAttr("google_compute_instance.foobar", "key_revocation_action_type", ""), - ), - }, - { - Config: testAccComputeInstance_keyRevocationActionType(context_2), - Check: resource.ComposeTestCheckFunc( - testAccCheckComputeInstanceExists( - t, "google_compute_instance.foobar", &instance), - resource.TestCheckResourceAttr("google_compute_instance.foobar", "key_revocation_action_type", "STOP"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_keyRevocationActionType(context_3)), ), }, }, @@ -4361,6 +4422,8 @@ func TestAccComputeInstance_GracefulShutdownWithResetUpdate(t *testing.T) { testAccCheckComputeInstanceExists(t, "google_compute_instance.foobar", &instance), resource.TestCheckResourceAttr("google_compute_instance.foobar", "allow_stopping_for_update", "true"), resource.TestCheckResourceAttr("google_compute_instance.foobar", "scheduling.0.graceful_shutdown.0.max_duration.0.seconds", "100"), + acctest.GetTestMetadataForTgc("compute", "google_compute_instance.foobar", + testAccComputeInstance_GracefulShutdownUpdate(acceptableByApi_3)), ), }, { @@ -4636,7 +4699,7 @@ func testAccComputeInstance_nic_securityPolicyCreateWithTwoNicsAndTwoAccessConfi ImportStateVerify: true, }, { - Config: testAccComputeInstance_nic_securityPolicyCreateWithTwoNicsAndTwoAccessConfigsUpdateTwoPoliciesRemoveAccessConfig(suffix, policyName, policyName2, instanceName, "google_compute_region_security_policy.policyforinstance.self_link", "google_compute_region_security_policy.policyforinstance.self_link"), + Config: testAccComputeInstance_nic_securityPolicyCreateWithTwoNicsAndTwoAccessConfigsUpdateTwoPoliciesRemoveAccessConfig(suffix, policyName, policyName2, instanceName, "google_compute_region_security_policy.policyforinstance.self_link", "google_compute_region_security_policy.policyforinstance.self_link"), ExpectError: regexp.MustCompile(errorDeleteAccessConfigWithSecPolicy), }, { @@ -4742,7 +4805,7 @@ func testAccComputeInstance_nic_securityPolicyCreateWithAccessConfigUpdateAccess ImportStateVerify: true, }, { - Config: testAccComputeInstance_nic_securityPolicyCreateWithTwoAccessConfigsUpdateAccessConfig(suffix, policyName, instanceName), + Config: testAccComputeInstance_nic_securityPolicyCreateWithTwoAccessConfigsUpdateAccessConfig(suffix, policyName, instanceName), ExpectError: regexp.MustCompile(errorDeleteAccessConfigWithSecPolicy), }, { @@ -4856,7 +4919,7 @@ func testAccCheckComputeInstanceUpdateMachineType(t *testing.T, n string) resour func TestAccComputeInstance_NetworkAttachment(t *testing.T) { t.Parallel() suffix := fmt.Sprintf("%s", acctest.RandString(t, 10)) - envRegion := envvar.GetTestRegionFromEnv() + envRegion := envvar.GetTestRegionFromEnv() var instance compute.Instance {{ if eq $.TargetVersionName `ga` }} @@ -4873,7 +4936,7 @@ func TestAccComputeInstance_NetworkAttachment(t *testing.T) { context := map[string]interface{}{ "suffix": (acctest.RandString(t, 10)), "network_attachment_name": testNetworkAttachmentName, - "region": envRegion, + "region": envRegion, } acctest.VcrTest(t, resource.TestCase{ @@ -4997,6 +5060,7 @@ func TestAccComputeInstance_guestOsFeatures(t *testing.T) { }) } + func testAccCheckComputeInstanceDestroyProducer(t *testing.T) func(s *terraform.State) error { return func(s *terraform.State) error { config := acctest.GoogleProviderConfig(t) @@ -5123,15 +5187,15 @@ func testAccCheckComputeInstanceIpv6AccessConfigHasExternalIPv6(instance *comput } func testAccCheckComputeInstanceIpv6AccessConfigHasInternalIPv6(instance *compute.Instance) resource.TestCheckFunc { - return func(s *terraform.State) error { - for _, i := range instance.NetworkInterfaces { - if i.Ipv6Address == "" { - return fmt.Errorf("no internal IPv6 address") - } - } + return func(s *terraform.State) error { + for _, i := range instance.NetworkInterfaces { + if i.Ipv6Address == "" { + return fmt.Errorf("no internal IPv6 address") + } + } - return nil - } + return nil + } } func testAccCheckComputeInstanceAccessConfigHasPTR(instance *compute.Instance) resource.TestCheckFunc { @@ -5173,7 +5237,7 @@ func testAccCheckComputeInstanceMaxRunDuration(instance *compute.Instance, insta } if !reflect.DeepEqual(*instance.Scheduling.MaxRunDuration, instanceMaxRunDurationWant) { - return fmt.Errorf("got the wrong instance max run duration action: have: %#v; want: %#v", instance.Scheduling.MaxRunDuration, instanceMaxRunDurationWant) + return fmt.Errorf("got the wrong instance max run duration action: have: %#v; want: %#v",instance.Scheduling.MaxRunDuration, instanceMaxRunDurationWant) } return nil @@ -5207,7 +5271,7 @@ func testAccCheckComputeInstanceLocalSsdRecoveryTimeout(instance *compute.Instan } if !reflect.DeepEqual(*instance.Scheduling.LocalSsdRecoveryTimeout, instanceLocalSsdRecoveryTiemoutWant) { - return fmt.Errorf("got the wrong instance local ssd recovery timeout action: have: %#v; want: %#v", instance.Scheduling.LocalSsdRecoveryTimeout, instanceLocalSsdRecoveryTiemoutWant) + return fmt.Errorf("got the wrong instance local ssd recovery timeout action: have: %#v; want: %#v",instance.Scheduling.LocalSsdRecoveryTimeout, instanceLocalSsdRecoveryTiemoutWant) } return nil @@ -5254,7 +5318,7 @@ func testAccCheckComputeInstanceTerminationAction(instance *compute.Instance, in } if instance.Scheduling.InstanceTerminationAction != instanceTerminationActionWant { - return fmt.Errorf("got the wrong instance termniation action: have: %s; want: %s", instance.Scheduling.InstanceTerminationAction, instanceTerminationActionWant) + return fmt.Errorf("got the wrong instance termniation action: have: %s; want: %s",instance.Scheduling.InstanceTerminationAction, instanceTerminationActionWant) } return nil @@ -5350,7 +5414,7 @@ func testAccCheckComputeInstanceScratchDisk(instance *compute.Instance, interfac if deviceName, ok := interfaces[i]["deviceName"]; ok { if disk.DeviceName != deviceName { return fmt.Errorf("Mismatched device name on scratch disk #%d, expected: %q, found: %q", - i, deviceName, disk.DeviceName) + i, deviceName, disk.DeviceName) } } @@ -5907,6 +5971,7 @@ resource "google_compute_instance" "foobar" { name = "%s" machine_type = "e2-medium" zone = "us-central1-a" + can_ip_forward = false tags = ["foo", "bar"] boot_disk { @@ -5968,6 +6033,7 @@ resource "google_compute_instance" "foobar" { name = "%s" machine_type = "e2-medium" zone = "us-central1-a" + can_ip_forward = false tags = ["foo", "bar"] boot_disk { @@ -5998,6 +6064,7 @@ resource "google_compute_instance" "foobar" { name = "%s" machine_type = "e2-medium" zone = "us-central1-a" + can_ip_forward = false tags = ["foo", "bar"] boot_disk { @@ -6416,6 +6483,7 @@ resource "google_compute_instance" "foobar" { name = "%s" machine_type = "e2-medium" zone = "us-central1-a" + can_ip_forward = false tags = ["foo", "bar"] deletion_protection = false @@ -6443,6 +6511,7 @@ resource "google_compute_instance" "foobar" { name = "%s" machine_type = "e2-medium" zone = "us-central1-a" + can_ip_forward = false tags = ["foo", "bar"] deletion_protection = true @@ -6626,7 +6695,7 @@ resource "google_compute_instance" "foobar" { } func testAccComputeInstance_internalIpv6(ip, instance string) string { - return fmt.Sprintf(` + return fmt.Sprintf(` data "google_compute_image" "my_image" { family = "debian-11" project = "debian-cloud" @@ -7580,106 +7649,6 @@ resource "google_compute_instance" "foobar" { `, disk, instance) } -func testAccComputeInstance_attachedDisk_forceAttach(disk, instance string, force_attach bool) string { - return fmt.Sprintf(` -data "google_compute_image" "my_image" { - family = "debian-11" - project = "debian-cloud" -} - -resource "google_compute_region_disk" "regionaldisk" { - name = "%s-1" - size = 10 - type = "pd-ssd" - region = "us-central1" - replica_zones = ["us-central1-a", "us-central1-b"] -} - -resource "google_compute_region_disk" "regionaldisk2" { - name = "%s-2" - size = 10 - type = "pd-ssd" - region = "us-central1" - replica_zones = ["us-central1-a", "us-central1-b"] -} - -resource "google_compute_instance" "foobar" { - name = "%s" - machine_type = "e2-medium" - zone = "us-central1-a" - - boot_disk { - initialize_params { - image = data.google_compute_image.my_image.self_link - } - } - - attached_disk { - source = google_compute_region_disk.regionaldisk.self_link - force_attach = %t - } - - attached_disk { - source = google_compute_region_disk.regionaldisk2.self_link - force_attach = %t - } - - network_interface { - network = "default" - } -} -`, disk, disk, instance, force_attach, force_attach) -} - -func testAccComputeInstance_attachedDisk_forceAttach_zonal(disk, instance string, force_attach bool) string { - return fmt.Sprintf(` -data "google_compute_image" "my_image" { - family = "debian-11" - project = "debian-cloud" -} - -resource "google_compute_disk" "foobar" { - name = "%s-1" - size = 10 - type = "pd-ssd" - zone = "us-central1-a" -} - -resource "google_compute_disk" "foobar2" { - name = "%s-2" - size = 10 - type = "pd-ssd" - zone = "us-central1-a" -} - -resource "google_compute_instance" "foobar" { - name = "%s" - machine_type = "e2-medium" - zone = "us-central1-a" - - boot_disk { - initialize_params { - image = data.google_compute_image.my_image.self_link - } - } - - attached_disk { - source = google_compute_disk.foobar.name - force_attach = %t - } - - attached_disk { - source = google_compute_disk.foobar2.name - force_attach = %t - } - - network_interface { - network = "default" - } -} -`, disk, disk, instance, force_attach, force_attach) -} - func testAccComputeInstance_bootDisk_source(disk, instance string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { @@ -7792,62 +7761,6 @@ resource "google_compute_instance" "foobar" { `, instance, diskMode) } -func testAccComputeInstance_bootDisk_forceAttach_zonal(disk, instance string, force_attach bool) string { - return fmt.Sprintf(` -data "google_compute_image" "my_image" { - family = "debian-11" - project = "debian-cloud" -} - -resource "google_compute_disk" "foobar" { - name = "%s" - zone = "us-central1-a" - image = data.google_compute_image.my_image.self_link -} - -resource "google_compute_instance" "foobar" { - name = "%s" - machine_type = "e2-medium" - zone = "us-central1-a" - - boot_disk { - source = google_compute_disk.foobar.name - force_attach = %t - } - - network_interface { - network = "default" - } -} -`, disk, instance, force_attach) -} - -func testAccComputeInstance_bootDisk_forceAttach(instance string, force_attach bool) string { - return fmt.Sprintf(` -data "google_compute_image" "my_image" { - family = "debian-11" - project = "debian-cloud" -} - -resource "google_compute_instance" "foobar" { - name = "%s" - machine_type = "e2-medium" - zone = "us-central1-a" - - boot_disk { - initialize_params { - image = data.google_compute_image.my_image.self_link - } - force_attach = %t - } - - network_interface { - network = "default" - } -} -`, instance, force_attach) -} - func testAccComputeInstance_with375GbScratchDisk(instance string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { @@ -8822,6 +8735,7 @@ resource "google_compute_instance" "foobar" { name = "%s" machine_type = "e2-medium" zone = "us-central1-a" + can_ip_forward = false tags = ["foo", "bar"] //deletion_protection = false is implicit in this config due to default value @@ -9239,6 +9153,7 @@ resource "google_compute_instance" "foobar" { name = "%s" machine_type = "e2-medium" zone = "us-central1-a" + can_ip_forward = false boot_disk { initialize_params { @@ -9675,8 +9590,7 @@ resource "google_compute_instance" "foobar" { values = ["%[1]s"] } } -} -`, instanceName) +}`, instanceName) } {{ if ne $.TargetVersionName `ga` -}} @@ -9705,8 +9619,7 @@ resource "google_compute_instance" "foobar" { %{host_error_timeout_sec} automatic_restart = true } -} -`, context) +}`, context) } {{- end }} @@ -9746,7 +9659,7 @@ resource "google_compute_instance" "foobar" { func testAccComputeInstanceConfidentialInstanceConfigEnable(instance string, confidentialInstanceType string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { - family = "ubuntu-2204-lts" + family = "ubuntu-2004-lts" project = "ubuntu-os-cloud" } @@ -9861,7 +9774,7 @@ resource "google_compute_instance" "foobar4" { func testAccComputeInstanceConfidentialInstanceConfigNoEnable(instance string, minCpuPlatform, confidentialInstanceType string) string { return fmt.Sprintf(` data "google_compute_image" "my_image2" { - family = "ubuntu-2204-lts" + family = "ubuntu-2004-lts" project = "ubuntu-os-cloud" } @@ -9971,6 +9884,7 @@ resource "google_compute_instance" "foobar" { name = "%s" machine_type = "e2-medium" zone = "us-central1-a" + can_ip_forward = false tags = ["foo", "bar"] desired_status = "RUNNING" @@ -10007,6 +9921,7 @@ resource "google_compute_instance" "foobar" { name = "%s" machine_type = "e2-medium" zone = "us-central1-a" + can_ip_forward = false tags = ["foo", "bar"] desired_status = "RUNNING" @@ -10173,6 +10088,7 @@ resource "google_compute_instance" "foobar" { name = "%s" machine_type = "%s" zone = "us-central1-a" + can_ip_forward = false tags = ["foo", "bar"] boot_disk { @@ -10234,6 +10150,7 @@ resource "google_compute_instance" "foobar" { name = "%s" machine_type = "e2-medium" zone = "us-central1-a" + can_ip_forward = false tags = ["baz"] boot_disk { @@ -10297,6 +10214,7 @@ resource "google_compute_instance" "foobar" { name = "%s" machine_type = "c2-standard-4" zone = "us-east4-b" + can_ip_forward = false tags = ["foo", "bar"] //deletion_protection = false is implicit in this config due to default value @@ -10324,6 +10242,7 @@ resource "google_compute_instance" "second" { name = "%s-2" machine_type = "c2-standard-4" zone = "us-east4-b" + can_ip_forward = false tags = ["foo", "bar"] //deletion_protection = false is implicit in this config due to default value @@ -10355,6 +10274,7 @@ resource "google_compute_resource_policy" "foo" { collocation = "COLLOCATED" } } + `, instance, instance, suffix) } @@ -10369,6 +10289,7 @@ resource "google_compute_instance" "foobar" { name = "%s" machine_type = "e2-standard-4" zone = "us-east4-b" + can_ip_forward = false tags = ["foo", "bar"] //deletion_protection = false is implicit in this config due to default value @@ -10760,7 +10681,7 @@ resource "google_compute_instance" "foobar" { func testAccComputeInstance_spotVM(instance string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { - family = "ubuntu-2204-lts" + family = "ubuntu-2004-lts" project = "ubuntu-os-cloud" } @@ -10792,7 +10713,7 @@ resource "google_compute_instance" "foobar" { func testAccComputeInstance_standardVM_maxRunDuration(instance string, instanceTerminationAction string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { - family = "ubuntu-2204-lts" + family = "ubuntu-2004-lts" project = "ubuntu-os-cloud" } @@ -10827,7 +10748,7 @@ resource "google_compute_instance" "foobar" { func testAccComputeInstance_standardVM_maxRunDurationUpdated(instance string, instanceTerminationAction string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { - family = "ubuntu-2204-lts" + family = "ubuntu-2004-lts" project = "ubuntu-os-cloud" } @@ -10900,10 +10821,11 @@ resource "google_compute_instance" "foobar" { `, instance, instanceTerminationAction) } + func testAccComputeInstance_spotVM_maxRunDuration(instance string, instanceTerminationAction string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { - family = "ubuntu-2204-lts" + family = "ubuntu-2004-lts" project = "ubuntu-os-cloud" } @@ -10939,7 +10861,7 @@ resource "google_compute_instance" "foobar" { func testAccComputeInstance_localSsdRecoveryTimeout(instance string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { - family = "ubuntu-2204-lts" + family = "ubuntu-2004-lts" project = "ubuntu-os-cloud" } @@ -10973,7 +10895,7 @@ resource "google_compute_instance" "foobar" { func testAccComputeInstance_partnerMetadata_empty(instance string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { - family = "ubuntu-2204-lts" + family = "ubuntu-2004-lts" project = "ubuntu-os-cloud" } @@ -10991,14 +10913,13 @@ resource "google_compute_instance" "foobar" { network_interface { network = "default" } -} -`, instance) +}`, instance) } func testAccComputeInstance_partnerMetadata(instance string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { - family = "ubuntu-2204-lts" + family = "ubuntu-2004-lts" project = "ubuntu-os-cloud" } @@ -11028,8 +10949,7 @@ resource "google_compute_instance" "foobar" { } }) } -} -`, instance) +}`, instance) } {{- end }} @@ -11044,6 +10964,7 @@ resource "google_compute_instance" "foobar" { name = "%s" machine_type = "%s" zone = "us-central1-a" + can_ip_forward = false tags = ["foo", "bar"] boot_disk { @@ -11076,6 +10997,7 @@ resource "google_compute_instance" "foobar" { name = "%s" machine_type = "%s" zone = "us-central1-a" + can_ip_forward = false tags = ["foo", "bar"] boot_disk { @@ -12141,7 +12063,7 @@ func TestAccComputeInstance_bootDisk_storagePoolSpecified(t *testing.T) { ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { - Config: testAccComputeInstance_bootDisk_storagePoolSpecified(instanceName, storagePoolNameLong, envvar.GetTestZoneFromEnv()), + Config: testAccComputeInstance_bootDisk_storagePoolSpecified(instanceName, storagePoolNameLong, envvar.GetTestZoneFromEnv()), }, { ResourceName: "google_compute_instance.foobar", @@ -12163,7 +12085,7 @@ func TestAccComputeInstance_bootDisk_storagePoolSpecified_nameOnly(t *testing.T) ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { - Config: testAccComputeInstance_bootDisk_storagePoolSpecified(instanceName, "tf-bootstrap-storage-pool-hyperdisk-balanced-basic-2", envvar.GetTestZoneFromEnv()), + Config: testAccComputeInstance_bootDisk_storagePoolSpecified(instanceName, "tf-bootstrap-storage-pool-hyperdisk-balanced-basic-2", envvar.GetTestZoneFromEnv()), }, { ResourceName: "google_compute_instance.foobar", @@ -12205,38 +12127,38 @@ resource "google_compute_instance" "foobar" { } func TestAccComputeInstance_bootAndAttachedDisk_interface(t *testing.T) { - t.Parallel() - - instanceName1 := fmt.Sprintf("tf-test-vm1-%s", acctest.RandString(t, 10)) - diskName1 := fmt.Sprintf("tf-test-disk1-%s", acctest.RandString(t, 10)) - instanceName2 := fmt.Sprintf("tf-test-vm2-%s", acctest.RandString(t, 10)) - diskName2 := fmt.Sprintf("tf-test-disk2-%s", acctest.RandString(t, 10)) - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeInstance_bootAndAttachedDisk_interface(instanceName1, diskName1, envvar.GetTestZoneFromEnv(), "c3-standard-22", "NVME", false), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("google_compute_instance.foobar", "boot_disk.0.interface", "NVME"), - resource.TestCheckResourceAttr("google_compute_instance.foobar", "machine_type", "c3-standard-22"), - ), - }, - //computeInstanceImportStep("us-central1-a", instanceName1, []string{"desired_status","allow_stopping_for_update"}), - { - Config: testAccComputeInstance_bootAndAttachedDisk_interface(instanceName2, diskName2, envvar.GetTestZoneFromEnv(), "n2-standard-8", "SCSI", true), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("google_compute_instance.foobar", "boot_disk.0.interface", "SCSI"), - resource.TestCheckResourceAttr("google_compute_instance.foobar", "machine_type", "n2-standard-8"), - ), - }, - //computeInstanceImportStep("us-central1-a", instanceName2, []string{"desired_status","allow_stopping_for_update"}), - }, - }) + t.Parallel() + + instanceName1 := fmt.Sprintf("tf-test-vm1-%s", acctest.RandString(t, 10)) + diskName1 := fmt.Sprintf("tf-test-disk1-%s", acctest.RandString(t, 10)) + instanceName2 := fmt.Sprintf("tf-test-vm2-%s", acctest.RandString(t, 10)) + diskName2 := fmt.Sprintf("tf-test-disk2-%s", acctest.RandString(t, 10)) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeInstance_bootAndAttachedDisk_interface(instanceName1, diskName1, envvar.GetTestZoneFromEnv(), "c3-standard-22", "NVME", false), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_compute_instance.foobar", "boot_disk.0.interface", "NVME"), + resource.TestCheckResourceAttr("google_compute_instance.foobar", "machine_type", "c3-standard-22"), + ), + }, + //computeInstanceImportStep("us-central1-a", instanceName1, []string{"desired_status","allow_stopping_for_update"}), + { + Config: testAccComputeInstance_bootAndAttachedDisk_interface(instanceName2, diskName2, envvar.GetTestZoneFromEnv(), "n2-standard-8", "SCSI", true), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_compute_instance.foobar", "boot_disk.0.interface", "SCSI"), + resource.TestCheckResourceAttr("google_compute_instance.foobar", "machine_type", "n2-standard-8"), + ), + }, + //computeInstanceImportStep("us-central1-a", instanceName2, []string{"desired_status","allow_stopping_for_update"}), + }, + }) } func testAccComputeInstance_bootAndAttachedDisk_interface(instanceName, diskName, zone, machineType, bootDiskInterface string, allowStoppingForUpdate bool) string { - return fmt.Sprintf(` + return fmt.Sprintf(` data "google_compute_image" "my_image" { family = "ubuntu-2204-lts" project = "ubuntu-os-cloud" @@ -12356,8 +12278,7 @@ resource "google_compute_instance" "foobar" { network_interface { network = "default" } -} -`, context) +}`, context) } func testAccComputeInstance_nicStackTypeUpdate_ipv6(context map[string]interface{}) string { @@ -12890,4 +12811,4 @@ resource "google_compute_instance" "foobar" { data "google_compute_default_service_account" "default" { } `, context) -} +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_interconnect_application_awareness_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_interconnect_application_awareness_test.go.tmpl deleted file mode 100644 index fa2406ef7f8f..000000000000 --- a/mmv1/third_party/terraform/services/compute/resource_compute_interconnect_application_awareness_test.go.tmpl +++ /dev/null @@ -1,131 +0,0 @@ -{{ if ne $.TargetVersionName `ga` -}} -package compute_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - - "github.com/hashicorp/terraform-provider-google/google/acctest" -) - -func TestAccComputeInterconnect_computeInterconnectBasicTestExample_update(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInterconnectDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeInterconnect_computeInterconnect_create(context), - }, - { - ResourceName: "google_compute_interconnect.example-interconnect", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"labels", "location", "terraform_labels"}, - }, - { - Config: testAccComputeInterconnect_computeInterconnect_enable_aai(context), - }, - { - ResourceName: "google_compute_interconnect.example-interconnect", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"labels", "location", "terraform_labels"}, - }, - }, - }) -} - -func testAccComputeInterconnect_computeInterconnect_create(context map[string]interface{}) string { - return acctest.Nprintf(` -data "google_project" "project" {} - -resource "google_compute_interconnect" "example-interconnect" { - name = "tf-test-example-interconnect%{random_suffix}" - customer_name = "internal_customer" # Special customer only available for Google testing. - interconnect_type = "DEDICATED" - link_type = "LINK_TYPE_ETHERNET_100G_LR" - location = "https://www.googleapis.com/compute/v1/projects/${data.google_project.project.name}/global/interconnectLocations/z2z-us-east4-zone1-pniada-a" # Special location only available for Google testing. - requested_link_count = 1 - admin_enabled = true - description = "example description" - macsec_enabled = false - noc_contact_email = "user@example.com" - labels = { - mykey = "myvalue" - } -} -`, context) -} - -func testAccComputeInterconnect_computeInterconnect_enable_aai(context map[string]interface{}) string { - return acctest.Nprintf(` -data "google_project" "project" {} - -resource "google_compute_interconnect" "example-interconnect" { - name = "tf-test-example-interconnect%{random_suffix}" - customer_name = "internal_customer" # Special customer only available for Google testing. - interconnect_type = "DEDICATED" - link_type = "LINK_TYPE_ETHERNET_100G_LR" - location = "https://www.googleapis.com/compute/v1/projects/${data.google_project.project.name}/global/interconnectLocations/z2z-us-east4-zone1-pniada-a" # Special location only available for Google testing. - requested_link_count = 1 - admin_enabled = true - description = "example description" - macsec_enabled = false - noc_contact_email = "user@example.com" - labels = { - mykey = "myvalue" - } - aai_enabled = true - application_aware_interconnect { - profile_description = "application awareness config with BandwidthPercentage policy." - bandwidth_percentage_policy { - bandwidth_percentage { - traffic_class = "TC1" - percentage = 20 - } - bandwidth_percentage { - traffic_class = "TC2" - percentage = 20 - } - bandwidth_percentage { - traffic_class = "TC3" - percentage = 20 - } - bandwidth_percentage { - traffic_class = "TC4" - percentage = 20 - } - bandwidth_percentage { - traffic_class = "TC5" - percentage = 10 - } - bandwidth_percentage { - traffic_class = "TC6" - percentage = 10 - } - } - shape_average_percentage { - traffic_class = "TC1" - percentage = 30 - } - shape_average_percentage { - traffic_class = "TC2" - percentage = 25 - } - shape_average_percentage { - traffic_class = "TC3" - percentage = 25 - } - } -} -`, context) -} -{{- end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_interconnect_attachment_group_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_interconnect_attachment_group_test.go deleted file mode 100644 index 0b6c23691860..000000000000 --- a/mmv1/third_party/terraform/services/compute/resource_compute_interconnect_attachment_group_test.go +++ /dev/null @@ -1,95 +0,0 @@ -package compute_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" - - "github.com/hashicorp/terraform-provider-google/google/acctest" -) - -func TestAccComputeInterconnectAttachmentGroup_basic(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "deletion_protection": false, - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInterconnectAttachmentGroupDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeInterconnectAttachmentGroup_basic(context), - }, - { - ResourceName: "google_compute_interconnect_attachment_group.example-interconnect-attachment-group", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func testAccComputeInterconnectAttachmentGroup_basic(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_compute_interconnect_attachment_group" "example-interconnect-attachment-group" { - name = "tf-test-example-interconnect-attachment-group%{random_suffix}" - intent { - availability_sla = "NO_SLA" - } -} -`, context) -} - -func TestAccComputeInterconnectAttachmentGroup_update(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "deletion_protection": false, - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeInterconnectAttachmentGroup_basic(context), - }, - { - ResourceName: "google_compute_interconnect_attachment_group.example-interconnect-attachment-group", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccComputeInterconnectAttachmentGroup_update(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_compute_interconnect_attachment_group.example-interconnect-attachment-group", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_compute_interconnect_attachment_group.example-interconnect-attachment-group", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func testAccComputeInterconnectAttachmentGroup_update(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_compute_interconnect_attachment_group" "example-interconnect-attachment-group" { - name = "tf-test-example-interconnect-attachment-group%{random_suffix}" - intent { - availability_sla = "NO_SLA" - } - description = "New description" -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_interconnect_group_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_interconnect_group_test.go deleted file mode 100644 index 89b7d56fcd60..000000000000 --- a/mmv1/third_party/terraform/services/compute/resource_compute_interconnect_group_test.go +++ /dev/null @@ -1,95 +0,0 @@ -package compute_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" - - "github.com/hashicorp/terraform-provider-google/google/acctest" -) - -func TestAccComputeInterconnectGroup_basic(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "deletion_protection": false, - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeInterconnectGroup_basic(context), - }, - { - ResourceName: "google_compute_interconnect_group.example-interconnect-group", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func testAccComputeInterconnectGroup_basic(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_compute_interconnect_group" "example-interconnect-group" { - name = "tf-test-example-interconnect-group%{random_suffix}" - intent { - topology_capability = "NO_SLA" - } -} -`, context) -} - -func TestAccComputeInterconnectGroup_update(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "deletion_protection": false, - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeInterconnectGroupDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeInterconnectGroup_basic(context), - }, - { - ResourceName: "google_compute_interconnect_group.example-interconnect-group", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccComputeInterconnectGroup_update(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_compute_interconnect_group.example-interconnect-group", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_compute_interconnect_group.example-interconnect-group", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func testAccComputeInterconnectGroup_update(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_compute_interconnect_group" "example-interconnect-group" { - name = "tf-test-example-interconnect-group%{random_suffix}" - intent { - topology_capability = "NO_SLA" - } - description = "New description" -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_network_attachment_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_network_attachment_test.go deleted file mode 100644 index 3aa877e8b1c7..000000000000 --- a/mmv1/third_party/terraform/services/compute/resource_compute_network_attachment_test.go +++ /dev/null @@ -1,204 +0,0 @@ -package compute_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" - - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" -) - -func TestAccComputeNetworkAttachment_update(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "billing_account": envvar.GetTestBillingAccountFromEnv(t), - "org_id": envvar.GetTestOrgFromEnv(t), - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeNetworkAttachmentDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeNetworkAttachment_full(context), - }, - { - ResourceName: "google_compute_network_attachment.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"region"}, - }, - { - Config: testAccComputeNetworkAttachment_update(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_compute_network_attachment.default", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_compute_network_attachment.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"region"}, - }, - }, - }) -} - -func testAccComputeNetworkAttachment_full(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_compute_network_attachment" "default" { - name = "tf-test-basic-network-attachment%{random_suffix}" - region = "us-central1" - description = "basic network attachment description" - connection_preference = "ACCEPT_MANUAL" - - subnetworks = [ - google_compute_subnetwork.net1.self_link - ] - - producer_accept_lists = [ - google_project.accepted_producer_project1.project_id - ] - - producer_reject_lists = [ - google_project.rejected_producer_project1.project_id - ] -} - -resource "google_compute_network" "default" { - name = "tf-test-basic-network%{random_suffix}" - auto_create_subnetworks = false -} - -resource "google_compute_subnetwork" "net1" { - name = "tf-test-basic-subnetwork1-%{random_suffix}" - region = "us-central1" - - network = google_compute_network.default.id - ip_cidr_range = "10.0.0.0/16" -} - -resource "google_compute_subnetwork" "net2" { - name = "tf-test-basic-subnetwork2-%{random_suffix}" - region = "us-central1" - - network = google_compute_network.default.id - ip_cidr_range = "10.1.0.0/16" -} - -resource "google_project" "rejected_producer_project1" { - project_id = "tf-test-prj-reject1-%{random_suffix}" - name = "tf-test-prj-reject1-%{random_suffix}" - org_id = "%{org_id}" - billing_account = "%{billing_account}" - deletion_policy = "DELETE" -} - -resource "google_project" "rejected_producer_project2" { - project_id = "tf-test-prj-reject2-%{random_suffix}" - name = "tf-test-prj-reject2-%{random_suffix}" - org_id = "%{org_id}" - billing_account = "%{billing_account}" - deletion_policy = "DELETE" -} - -resource "google_project" "accepted_producer_project1" { - project_id = "tf-test-prj-accept1-%{random_suffix}" - name = "tf-test-prj-accept1-%{random_suffix}" - org_id = "%{org_id}" - billing_account = "%{billing_account}" - deletion_policy = "DELETE" -} - -resource "google_project" "accepted_producer_project2" { - project_id = "tf-test-prj-accept2-%{random_suffix}" - name = "tf-test-prj-accept2-%{random_suffix}" - org_id = "%{org_id}" - billing_account = "%{billing_account}" - deletion_policy = "DELETE" -} -`, context) -} - -func testAccComputeNetworkAttachment_update(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_compute_network_attachment" "default" { - name = "tf-test-basic-network-attachment%{random_suffix}" - region = "us-central1" - description = "basic network attachment description" - connection_preference = "ACCEPT_MANUAL" - - subnetworks = [ - google_compute_subnetwork.net2.self_link - ] - - producer_accept_lists = [ - google_project.accepted_producer_project2.project_id - ] - - producer_reject_lists = [ - google_project.rejected_producer_project2.project_id - ] -} - -resource "google_compute_network" "default" { - name = "tf-test-basic-network%{random_suffix}" - auto_create_subnetworks = false -} - -resource "google_compute_subnetwork" "net1" { - name = "tf-test-basic-subnetwork1-%{random_suffix}" - region = "us-central1" - - network = google_compute_network.default.id - ip_cidr_range = "10.0.0.0/16" -} - -resource "google_compute_subnetwork" "net2" { - name = "tf-test-basic-subnetwork2-%{random_suffix}" - region = "us-central1" - - network = google_compute_network.default.id - ip_cidr_range = "10.1.0.0/16" -} - -resource "google_project" "rejected_producer_project1" { - project_id = "tf-test-prj-reject1-%{random_suffix}" - name = "tf-test-prj-reject1-%{random_suffix}" - org_id = "%{org_id}" - billing_account = "%{billing_account}" - deletion_policy = "DELETE" -} - -resource "google_project" "rejected_producer_project2" { - project_id = "tf-test-prj-reject2-%{random_suffix}" - name = "tf-test-prj-reject2-%{random_suffix}" - org_id = "%{org_id}" - billing_account = "%{billing_account}" - deletion_policy = "DELETE" -} - -resource "google_project" "accepted_producer_project1" { - project_id = "tf-test-prj-accept1-%{random_suffix}" - name = "tf-test-prj-accept1-%{random_suffix}" - org_id = "%{org_id}" - billing_account = "%{billing_account}" - deletion_policy = "DELETE" -} - -resource "google_project" "accepted_producer_project2" { - project_id = "tf-test-prj-accept2-%{random_suffix}" - name = "tf-test-prj-accept2-%{random_suffix}" - org_id = "%{org_id}" - billing_account = "%{billing_account}" - deletion_policy = "DELETE" -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_network_endpoint_group_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_network_endpoint_group_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/compute/resource_compute_network_endpoint_group_test.go rename to mmv1/third_party/terraform/services/compute/resource_compute_network_endpoint_group_test.go.tmpl index fd4d0954f704..388cdc74c2a1 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_network_endpoint_group_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_network_endpoint_group_test.go.tmpl @@ -3,8 +3,8 @@ package compute_test import ( "testing" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccComputeNetworkEndpointGroup_networkEndpointGroup(t *testing.T) { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_network_firewall_policy_rule_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_network_firewall_policy_rule_test.go.tmpl similarity index 84% rename from mmv1/third_party/terraform/services/compute/resource_compute_network_firewall_policy_rule_test.go rename to mmv1/third_party/terraform/services/compute/resource_compute_network_firewall_policy_rule_test.go.tmpl index ded8779fd055..3ce9ff750231 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_network_firewall_policy_rule_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_network_firewall_policy_rule_test.go.tmpl @@ -5,9 +5,9 @@ import ( "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-plugin-testing/plancheck" ) func TestAccComputeNetworkFirewallPolicyRule_update(t *testing.T) { @@ -34,7 +34,7 @@ func TestAccComputeNetworkFirewallPolicyRule_update(t *testing.T) { }, { Config: testAccComputeNetworkFirewallPolicyRule_update(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ + ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction("google_compute_network_firewall_policy_rule.fw_policy_rule1", plancheck.ResourceActionUpdate), }, @@ -49,7 +49,7 @@ func TestAccComputeNetworkFirewallPolicyRule_update(t *testing.T) { }, { Config: testAccComputeNetworkFirewallPolicyRule_removeConfigs(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ + ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction("google_compute_network_firewall_policy_rule.fw_policy_rule1", plancheck.ResourceActionUpdate), }, @@ -64,7 +64,7 @@ func TestAccComputeNetworkFirewallPolicyRule_update(t *testing.T) { }, { Config: testAccComputeNetworkFirewallPolicyRule_start(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ + ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction("google_compute_network_firewall_policy_rule.fw_policy_rule1", plancheck.ResourceActionUpdate), }, @@ -82,139 +82,139 @@ func TestAccComputeNetworkFirewallPolicyRule_update(t *testing.T) { } func TestAccComputeNetworkFirewallPolicyRule_multipleRules(t *testing.T) { - t.Parallel() + t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - "project_name": envvar.GetTestProjectFromEnv(), - "org_name": fmt.Sprintf("organizations/%s", envvar.GetTestOrgFromEnv(t)), - } + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "project_name": envvar.GetTestProjectFromEnv(), + "org_name": fmt.Sprintf("organizations/%s", envvar.GetTestOrgFromEnv(t)), + } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeNetworkFirewallPolicyRule_multiple(context), - }, - { - ResourceName: "google_compute_network_firewall_policy_rule.fw_policy_rule1", - ImportState: true, - ImportStateVerify: true, - // Referencing using ID causes import to fail - ImportStateVerifyIgnore: []string{"firewall_policy"}, - }, - { - ResourceName: "google_compute_network_firewall_policy_rule.fw_policy_rule2", - ImportState: true, - ImportStateVerify: true, - // Referencing using ID causes import to fail - ImportStateVerifyIgnore: []string{"firewall_policy"}, - }, - { - Config: testAccComputeNetworkFirewallPolicyRule_multipleAdd(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeNetworkFirewallPolicyRule_multiple(context), + }, + { + ResourceName: "google_compute_network_firewall_policy_rule.fw_policy_rule1", + ImportState: true, + ImportStateVerify: true, + // Referencing using ID causes import to fail + ImportStateVerifyIgnore: []string{"firewall_policy"}, + }, + { + ResourceName: "google_compute_network_firewall_policy_rule.fw_policy_rule2", + ImportState: true, + ImportStateVerify: true, + // Referencing using ID causes import to fail + ImportStateVerifyIgnore: []string{"firewall_policy"}, + }, + { + Config: testAccComputeNetworkFirewallPolicyRule_multipleAdd(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction("google_compute_network_firewall_policy_rule.fw_policy_rule1", plancheck.ResourceActionUpdate), }, }, - }, - { - ResourceName: "google_compute_network_firewall_policy_rule.fw_policy_rule3", - ImportState: true, - ImportStateVerify: true, - // Referencing using ID causes import to fail - ImportStateVerifyIgnore: []string{"firewall_policy"}, - }, - { - Config: testAccComputeNetworkFirewallPolicyRule_multipleRemove(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ + }, + { + ResourceName: "google_compute_network_firewall_policy_rule.fw_policy_rule3", + ImportState: true, + ImportStateVerify: true, + // Referencing using ID causes import to fail + ImportStateVerifyIgnore: []string{"firewall_policy"}, + }, + { + Config: testAccComputeNetworkFirewallPolicyRule_multipleRemove(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction("google_compute_network_firewall_policy_rule.fw_policy_rule1", plancheck.ResourceActionUpdate), plancheck.ExpectResourceAction("google_compute_network_firewall_policy_rule.fw_policy_rule2", plancheck.ResourceActionDestroy), plancheck.ExpectResourceAction("google_compute_network_firewall_policy_rule.fw_policy_rule3", plancheck.ResourceActionUpdate), }, }, - }, - }, - }) + }, + }, + }) } func TestAccComputeNetworkFirewallPolicyRule_addressGroupOrder(t *testing.T) { - t.Parallel() + t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - "project": envvar.GetTestProjectFromEnv(), - } + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "project": envvar.GetTestProjectFromEnv(), + } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeNetworkFirewallPolicyRule_addressGroupOrder(context), - }, - { - ResourceName: "google_compute_network_firewall_policy_rule.src_test", - ImportState: true, - ImportStateVerify: true, - // Referencing using ID causes import to fail - // Client-side reordering doesn't work with no state, so ignore on import - ImportStateVerifyIgnore: []string{"firewall_policy", "match.0.src_address_groups"}, - }, - { - ResourceName: "google_compute_network_firewall_policy_rule.dest_test", - ImportState: true, - ImportStateVerify: true, - // Referencing using ID causes import to fail - // Client-side reordering doesn't work with no state, so ignore on import - ImportStateVerifyIgnore: []string{"firewall_policy", "match.0.dest_address_groups"}, - }, - }, - }) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeNetworkFirewallPolicyRule_addressGroupOrder(context), + }, + { + ResourceName: "google_compute_network_firewall_policy_rule.src_test", + ImportState: true, + ImportStateVerify: true, + // Referencing using ID causes import to fail + // Client-side reordering doesn't work with no state, so ignore on import + ImportStateVerifyIgnore: []string{"firewall_policy", "match.0.src_address_groups"}, + }, + { + ResourceName: "google_compute_network_firewall_policy_rule.dest_test", + ImportState: true, + ImportStateVerify: true, + // Referencing using ID causes import to fail + // Client-side reordering doesn't work with no state, so ignore on import + ImportStateVerifyIgnore: []string{"firewall_policy", "match.0.dest_address_groups"}, + }, + }, + }) } func TestAccComputeNetworkFirewallPolicyRule_securityProfileGroup_update(t *testing.T) { - t.Parallel() + t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - "org_name": fmt.Sprintf("organizations/%s", envvar.GetTestOrgFromEnv(t)), - "security_profile_group_prefix": "//", - } + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "org_name": fmt.Sprintf("organizations/%s", envvar.GetTestOrgFromEnv(t)), + "security_profile_group_prefix": "//", + } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeNetworkFirewallPolicyRule_securityProfileGroup_basic(context), - }, - { - ResourceName: "google_compute_network_firewall_policy_rule.fw_policy_rule1", - ImportState: true, - ImportStateVerify: true, - // Referencing using ID causes import to fail - ImportStateVerifyIgnore: []string{"firewall_policy"}, - }, - { - Config: testAccComputeNetworkFirewallPolicyRule_securityProfileGroup_update(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeNetworkFirewallPolicyRule_securityProfileGroup_basic(context), + }, + { + ResourceName: "google_compute_network_firewall_policy_rule.fw_policy_rule1", + ImportState: true, + ImportStateVerify: true, + // Referencing using ID causes import to fail + ImportStateVerifyIgnore: []string{"firewall_policy"}, + }, + { + Config: testAccComputeNetworkFirewallPolicyRule_securityProfileGroup_update(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction("google_compute_network_firewall_policy_rule.fw_policy_rule1", plancheck.ResourceActionUpdate), }, }, - }, - { - ResourceName: "google_compute_network_firewall_policy_rule.fw_policy_rule1", - ImportState: true, - ImportStateVerify: true, - // Referencing using ID causes import to fail - ImportStateVerifyIgnore: []string{"firewall_policy"}, - }, - }, - }) + }, + { + ResourceName: "google_compute_network_firewall_policy_rule.fw_policy_rule1", + ImportState: true, + ImportStateVerify: true, + // Referencing using ID causes import to fail + ImportStateVerifyIgnore: []string{"firewall_policy"}, + }, + }, + }) } func TestAccComputeNetworkFirewallPolicyRule_secureTags(t *testing.T) { @@ -239,12 +239,12 @@ func TestAccComputeNetworkFirewallPolicyRule_secureTags(t *testing.T) { ResourceName: "google_compute_network_firewall_policy_rule.primary", ImportState: true, ImportStateVerify: true, - // Referencing using ID causes import to fail + // Referencing using ID causes import to fail ImportStateVerifyIgnore: []string{"firewall_policy", "project"}, }, { Config: testAccComputeNetworkFirewallPolicyRule_secureTagsUpdate(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ + ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction("google_compute_network_firewall_policy_rule.primary", plancheck.ResourceActionUpdate), }, @@ -254,40 +254,43 @@ func TestAccComputeNetworkFirewallPolicyRule_secureTags(t *testing.T) { ResourceName: "google_compute_network_firewall_policy_rule.primary", ImportState: true, ImportStateVerify: true, - // Referencing using ID causes import to fail + // Referencing using ID causes import to fail ImportStateVerifyIgnore: []string{"firewall_policy", "project"}, }, }, }) } + func TestAccComputeNetworkFirewallSecurityProfileGroupDiffsuppress(t *testing.T) { - t.Parallel() + t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - "org_name": fmt.Sprintf("organizations/%s", envvar.GetTestOrgFromEnv(t)), - "security_profile_group_prefix": "/", - } + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "org_name": fmt.Sprintf("organizations/%s", envvar.GetTestOrgFromEnv(t)), + "security_profile_group_prefix": "/", + } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeNetworkFirewallPolicyRule_securityProfileGroup_update(context), - }, - { - ResourceName: "google_compute_network_firewall_policy_rule.fw_policy_rule1", - ImportState: true, - ImportStateVerify: true, - // Referencing using ID causes import to fail + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeNetworkFirewallPolicyRule_securityProfileGroup_update(context), + }, + { + ResourceName: "google_compute_network_firewall_policy_rule.fw_policy_rule1", + ImportState: true, + ImportStateVerify: true, + // Referencing using ID causes import to fail ImportStateVerifyIgnore: []string{"firewall_policy"}, - }, - }, - }) + }, + }, + }) } + + func testAccComputeNetworkFirewallPolicyRule_secureTags(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_network_security_address_group" "basic_global_networksecurity_address_group" { @@ -431,7 +434,7 @@ resource "google_tags_tag_value" "basic_value" { } func testAccComputeNetworkFirewallPolicyRule_securityProfileGroup_basic(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_compute_network" "network1" { name = "tf-test-%{random_suffix}" auto_create_subnetworks = false @@ -484,7 +487,7 @@ resource "google_compute_network_firewall_policy_rule" "fw_policy_rule1" { } func testAccComputeNetworkFirewallPolicyRule_securityProfileGroup_update(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_network_security_security_profile" "security_profile" { name = "tf-test-my-sp%{random_suffix}" type = "THREAT_PREVENTION" @@ -960,8 +963,9 @@ resource "google_compute_network_firewall_policy_rule" "fw_policy_rule3" { `, context) } + func testAccComputeNetworkFirewallPolicyRule_addressGroupOrder(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_compute_network_firewall_policy" "policy" { name = "tf-test-policy-%{random_suffix}" description = "Resource created for Terraform acceptance testing" @@ -1027,4 +1031,4 @@ resource "google_compute_network_firewall_policy_rule" "dest_test" { } `, context) -} +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_network_firewall_policy_with_rules_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_network_firewall_policy_with_rules_test.go.tmpl similarity index 93% rename from mmv1/third_party/terraform/services/compute/resource_compute_network_firewall_policy_with_rules_test.go rename to mmv1/third_party/terraform/services/compute/resource_compute_network_firewall_policy_with_rules_test.go.tmpl index 43488d34c0bc..db0429635ab0 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_network_firewall_policy_with_rules_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_network_firewall_policy_with_rules_test.go.tmpl @@ -1,11 +1,12 @@ package compute_test - +{{- if ne $.TargetVersionName "ga" }} import ( "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" + ) func TestAccComputeNetworkFirewallPolicyWithRules_update(t *testing.T) { @@ -13,12 +14,12 @@ func TestAccComputeNetworkFirewallPolicyWithRules_update(t *testing.T) { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "org_id": envvar.GetTestOrgFromEnv(t), + "org_id": envvar.GetTestOrgFromEnv(t), } acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), CheckDestroy: testAccCheckComputeNetworkFirewallPolicyWithRulesDestroyProducer(t), Steps: []resource.TestStep{ { @@ -44,11 +45,13 @@ func TestAccComputeNetworkFirewallPolicyWithRules_update(t *testing.T) { func testAccComputeNetworkFirewallPolicyWithRules_full(context map[string]interface{}) string { return acctest.Nprintf(` data "google_project" "project" { + provider = google-beta } resource "google_compute_network_firewall_policy_with_rules" "network-firewall-policy-with-rules" { name = "tf-test-tf-fw-policy-with-rules%{random_suffix}" description = "Terraform test" + provider = google-beta rule { description = "tcp rule" @@ -112,6 +115,7 @@ resource "google_compute_network_firewall_policy_with_rules" "network-firewall-p } resource "google_network_security_address_group" "address_group_1" { + provider = google-beta name = "tf-test-tf-address-group%{random_suffix}" parent = "projects/${data.google_project.project.name}" description = "Global address group" @@ -122,6 +126,7 @@ resource "google_network_security_address_group" "address_group_1" { } resource "google_tags_tag_key" "secure_tag_key_1" { + provider = google-beta description = "Tag key" parent = "projects/${data.google_project.project.name}" purpose = "GCE_FIREWALL" @@ -132,12 +137,14 @@ resource "google_tags_tag_key" "secure_tag_key_1" { } resource "google_tags_tag_value" "secure_tag_value_1" { + provider = google-beta description = "Tag value" parent = google_tags_tag_key.secure_tag_key_1.id short_name = "tf-test-tf-tag-value%{random_suffix}" } resource "google_network_security_security_profile_group" "security_profile_group_1" { + provider = google-beta name = "tf-test-tf-security-profile-group%{random_suffix}" parent = "organizations/%{org_id}" description = "my description" @@ -145,6 +152,7 @@ resource "google_network_security_security_profile_group" "security_profile_grou } resource "google_network_security_security_profile" "security_profile_1" { + provider = google-beta name = "tf-test-tf-security-profile%{random_suffix}" type = "THREAT_PREVENTION" parent = "organizations/%{org_id}" @@ -156,11 +164,13 @@ resource "google_network_security_security_profile" "security_profile_1" { func testAccComputeNetworkFirewallPolicyWithRules_update(context map[string]interface{}) string { return acctest.Nprintf(` data "google_project" "project" { + provider = google-beta } resource "google_compute_network_firewall_policy_with_rules" "network-firewall-policy-with-rules" { name = "tf-test-tf-fw-policy-with-rules%{random_suffix}" description = "Terraform test - update" + provider = google-beta rule { description = "tcp rule - changed" @@ -203,6 +213,7 @@ resource "google_compute_network_firewall_policy_with_rules" "network-firewall-p } resource "google_network_security_address_group" "address_group_1" { + provider = google-beta name = "tf-test-tf-address-group%{random_suffix}" parent = "projects/${data.google_project.project.name}" description = "Global address group" @@ -213,6 +224,7 @@ resource "google_network_security_address_group" "address_group_1" { } resource "google_tags_tag_key" "secure_tag_key_1" { + provider = google-beta description = "Tag key" parent = "projects/${data.google_project.project.name}" purpose = "GCE_FIREWALL" @@ -223,12 +235,14 @@ resource "google_tags_tag_key" "secure_tag_key_1" { } resource "google_tags_tag_value" "secure_tag_value_1" { + provider = google-beta description = "Tag value" parent = google_tags_tag_key.secure_tag_key_1.id short_name = "tf-test-tf-tag-value%{random_suffix}" } resource "google_network_security_security_profile_group" "security_profile_group_1" { + provider = google-beta name = "tf-test-tf-security-profile-group%{random_suffix}" parent = "organizations/%{org_id}" description = "my description" @@ -236,6 +250,7 @@ resource "google_network_security_security_profile_group" "security_profile_grou } resource "google_network_security_security_profile" "security_profile_1" { + provider = google-beta name = "tf-test-tf-security-profile%{random_suffix}" type = "THREAT_PREVENTION" parent = "organizations/%{org_id}" @@ -243,3 +258,4 @@ resource "google_network_security_security_profile" "security_profile_1" { } `, context) } +{{- end }} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_network_peering.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_network_peering.go.tmpl index 0b24e3fc61fc..5540d7f4fb4c 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_network_peering.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_network_peering.go.tmpl @@ -113,14 +113,6 @@ func ResourceComputeNetworkPeering() *schema.Resource { Description: `Which IP version(s) of traffic and routes are allowed to be imported or exported between peer networks. The default value is IPV4_ONLY. Possible values: ["IPV4_ONLY", "IPV4_IPV6"]`, Default: "IPV4_ONLY", }, - - "update_strategy": { - Type: schema.TypeString, - Optional: true, - ValidateFunc: verify.ValidateEnum([]string{"INDEPENDENT", "CONSENSUS"}), - Description: `The update strategy determines the semantics for updates and deletes to the peering connection configuration. The default value is INDEPENDENT. Possible values: ["INDEPENDENT", "CONSENSUS"]`, - Default: "INDEPENDENT", - }, }, UseJSONNumber: true, } @@ -220,10 +212,6 @@ func resourceComputeNetworkPeeringRead(d *schema.ResourceData, meta interface{}) if err := d.Set("stack_type", flattenNetworkPeeringStackType(peering.StackType, d, config)); err != nil { return fmt.Errorf("Error setting stack_type: %s", err) } - - if err := d.Set("update_strategy", flattenNetworkPeeringUpdateStrategy(peering.UpdateStrategy, d, config)); err != nil { - return fmt.Errorf("Error setting update_strategy: %s", err) - } return nil } @@ -324,18 +312,17 @@ func findPeeringFromNetwork(network *compute.Network, peeringName string) *compu return nil } func expandNetworkPeering(d *schema.ResourceData) *compute.NetworkPeering { - return &compute.NetworkPeering{ - ExchangeSubnetRoutes: true, - Name: d.Get("name").(string), - Network: d.Get("peer_network").(string), - ExportCustomRoutes: d.Get("export_custom_routes").(bool), - ImportCustomRoutes: d.Get("import_custom_routes").(bool), - ExportSubnetRoutesWithPublicIp: d.Get("export_subnet_routes_with_public_ip").(bool), - ImportSubnetRoutesWithPublicIp: d.Get("import_subnet_routes_with_public_ip").(bool), - StackType: d.Get("stack_type").(string), - UpdateStrategy: d.Get("update_strategy").(string), - ForceSendFields: []string{"ExportSubnetRoutesWithPublicIp", "ImportCustomRoutes", "ExportCustomRoutes"}, - } + return &compute.NetworkPeering{ + ExchangeSubnetRoutes: true, + Name: d.Get("name").(string), + Network: d.Get("peer_network").(string), + ExportCustomRoutes: d.Get("export_custom_routes").(bool), + ImportCustomRoutes: d.Get("import_custom_routes").(bool), + ExportSubnetRoutesWithPublicIp: d.Get("export_subnet_routes_with_public_ip").(bool), + ImportSubnetRoutesWithPublicIp: d.Get("import_subnet_routes_with_public_ip").(bool), + StackType: d.Get("stack_type").(string), + ForceSendFields: []string{"ExportSubnetRoutesWithPublicIp", "ImportCustomRoutes", "ExportCustomRoutes"}, + } } func flattenNetworkPeeringStackType(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { @@ -347,15 +334,6 @@ func flattenNetworkPeeringStackType(v interface{}, d *schema.ResourceData, confi return v } -func flattenNetworkPeeringUpdateStrategy(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - // To prevent the perma-diff caused by the absence of `update_strategy` in API responses for older resource. - if v == nil || tpgresource.IsEmptyValue(reflect.ValueOf(v)) { - return "INDEPENDENT" - } - - return v -} - func sortedNetworkPeeringMutexKeys(networkName, peerNetworkName *tpgresource.GlobalFieldValue) []string { // Whether you delete the peering from network A to B or the one from B to A, they // cannot happen at the same time. diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_network_peering_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_network_peering_test.go index f97e7d71a5d0..928d3566a228 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_network_peering_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_network_peering_test.go @@ -141,42 +141,6 @@ func TestAccComputeNetworkPeering_stackType(t *testing.T) { } -func TestAccComputeNetworkPeering_updateStrategy(t *testing.T) { - t.Parallel() - - primaryNetworkName := fmt.Sprintf("tf-test-network-1-%d", acctest.RandInt(t)) - peeringNetworkName := fmt.Sprintf("tf-test-network-2-%d", acctest.RandInt(t)) - peeringName := fmt.Sprintf("tf-test-peering-%d", acctest.RandInt(t)) - importId := fmt.Sprintf("%s/%s/%s", envvar.GetTestProjectFromEnv(), primaryNetworkName, peeringName) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccComputeNetworkPeeringDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeNetworkPeering_updateStrategyDefault(primaryNetworkName, peeringNetworkName, peeringName), - }, - { - ResourceName: "google_compute_network_peering.foo", - ImportState: true, - ImportStateVerify: true, - ImportStateId: importId, - }, - { - Config: testAccComputeNetworkPeering_updateStrategyUpdate(primaryNetworkName, peeringNetworkName, peeringName), - }, - { - ResourceName: "google_compute_network_peering.foo", - ImportState: true, - ImportStateVerify: true, - ImportStateId: importId, - }, - }, - }) - -} - func testAccComputeNetworkPeeringDestroyProducer(t *testing.T) func(s *terraform.State) error { return func(s *terraform.State) error { config := acctest.GoogleProviderConfig(t) @@ -313,44 +277,3 @@ resource "google_compute_network_peering" "foo" { } `, primaryNetworkName, peeringNetworkName, peeringName) } - -func testAccComputeNetworkPeering_updateStrategyDefault(primaryNetworkName, peeringNetworkName, peeringName string) string { - return fmt.Sprintf(` -resource "google_compute_network" "network1" { - name = "%s" - auto_create_subnetworks = false -} - -resource "google_compute_network" "network2" { - name = "%s" - auto_create_subnetworks = false -} - -resource "google_compute_network_peering" "foo" { - name = "%s" - network = google_compute_network.network1.self_link - peer_network = google_compute_network.network2.self_link -} -`, primaryNetworkName, peeringNetworkName, peeringName) -} - -func testAccComputeNetworkPeering_updateStrategyUpdate(primaryNetworkName, peeringNetworkName, peeringName string) string { - return fmt.Sprintf(` -resource "google_compute_network" "network1" { - name = "%s" - auto_create_subnetworks = false -} - -resource "google_compute_network" "network2" { - name = "%s" - auto_create_subnetworks = false -} - -resource "google_compute_network_peering" "foo" { - name = "%s" - network = google_compute_network.network1.self_link - peer_network = google_compute_network.network2.self_link - update_strategy = "CONSENSUS" -} -`, primaryNetworkName, peeringNetworkName, peeringName) -} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_network_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_network_test.go.tmpl index 7d39c62d604d..3632c5a084ce 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_network_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_network_test.go.tmpl @@ -78,41 +78,6 @@ func TestAccComputeNetwork_customSubnet(t *testing.T) { }) } -func TestAccComputeNetwork_mtuAndUpdate(t *testing.T) { - t.Parallel() - - var network compute.Network - suffixName := acctest.RandString(t, 10) - networkName := fmt.Sprintf("tf-test-network-routing-mode-%s", suffixName) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeNetworkDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeNetwork_mtu(networkName, 1460), - Check: resource.ComposeTestCheckFunc( - testAccCheckComputeNetworkExists( - t, "google_compute_network.acc_network_mtu", &network), - testAccCheckComputeNetworkHasMtu( - t, "google_compute_network.acc_network_mtu", &network, 1460), - ), - }, - // Test updating the mtu field from 1460 to 1500. - { - Config: testAccComputeNetwork_mtu(networkName, 1500), - Check: resource.ComposeTestCheckFunc( - testAccCheckComputeNetworkExists( - t, "google_compute_network.acc_network_mtu", &network), - testAccCheckComputeNetworkHasMtu( - t, "google_compute_network.acc_network_mtu", &network, 1500), - ), - }, - }, - }) -} - func TestAccComputeNetwork_routingModeAndUpdate(t *testing.T) { t.Parallel() @@ -211,14 +176,6 @@ func TestAccComputeNetwork_bgpAlwaysCompareMedAndUpdate(t *testing.T) { t, "google_compute_network.acc_network_bgp_always_compare_med", &network), resource.TestCheckResourceAttr("google_compute_network.acc_network_bgp_always_compare_med", "bgp_always_compare_med", "true"), ), - }, - { - Config: testAccComputeNetwork_bgp_always_compare_med(networkName, false), - Check: resource.ComposeTestCheckFunc( - testAccCheckComputeNetworkExists( - t, "google_compute_network.acc_network_bgp_always_compare_med", &network), - resource.TestCheckResourceAttr("google_compute_network.acc_network_bgp_always_compare_med", "bgp_always_compare_med", "false"), - ), }, }, }) @@ -289,49 +246,6 @@ func TestAccComputeNetwork_networkProfile(t *testing.T) { }) } -func TestComputeNetworkProfileDiffSuppress(t *testing.T) { - cases := map[string]struct { - Old, New string - ExpectDiffSuppress bool - }{ - "old: no previous profile, new: partial profile URL": { - Old: "", - New: "projects/dummy-project/global/networkProfiles/europe-west1-b-vpc-roce", - ExpectDiffSuppress: false, - }, - "old: no previous profile, new: full profile URL": { - Old: "", - New: "https://www.googleapis.com/compute/v1/projects/dummy-project/global/networkProfiles/europe-west1-b-vpc-roce", - ExpectDiffSuppress: false, - }, - "old: beta profile URL, new: partial profile URL": { - Old: "https://www.googleapis.com/compute/beta/projects/dummy-project/global/networkProfiles/europe-west1-b-vpc-roce", - New: "projects/dummy-project/global/networkProfiles/europe-west1-b-vpc-roce", - ExpectDiffSuppress: true, - }, - "old: v1 profile URL, new: partial profile URL": { - Old: "https://www.googleapis.com/compute/v1/projects/dummy-project/global/networkProfiles/europe-west1-b-vpc-roce", - New: "projects/dummy-project/global/networkProfiles/europe-west1-b-vpc-roce", - ExpectDiffSuppress: true, - }, - "old: beta profile URL, new: v1 profile URL": { - Old: "https://www.googleapis.com/compute/beta/projects/dummy-project/global/networkProfiles/europe-west1-b-vpc-roce", - New: "https://www.googleapis.com/compute/v1/projects/dummy-project/global/networkProfiles/europe-west1-b-vpc-roce", - ExpectDiffSuppress: true, - }, - } - - for tn, tc := range cases { - tc := tc - t.Run(tn, func(t *testing.T) { - t.Parallel() - if tpgresource.CompareSelfLinkRelativePaths("", tc.Old, tc.New, nil) != tc.ExpectDiffSuppress { - t.Errorf("%q => %q expected DiffSuppress to return %t", tc.Old, tc.New, tc.ExpectDiffSuppress) - } - }) - } -} - func TestAccComputeNetwork_numericId(t *testing.T) { t.Parallel() suffixName := acctest.RandString(t, 10) @@ -517,40 +431,6 @@ func TestAccComputeNetwork_networkFirewallPolicyEnforcementOrderAndUpdate(t *tes }) } -func TestAccComputeNetwork_resourceManagerTags(t *testing.T) { - - t.Parallel() - - var network compute.Network - org := envvar.GetTestOrgFromEnv(t) - - suffixName := acctest.RandString(t, 10) - tagKeyResult := acctest.BootstrapSharedTestTagKeyDetails(t, "crm-networks-tagkey", "organizations/"+org, make(map[string]interface{})) - sharedTagkey,_ := tagKeyResult["shared_tag_key"] - tagValueResult := acctest.BootstrapSharedTestTagValueDetails(t, "crm-networks-tagvalue", sharedTagkey, org) - networkName := fmt.Sprintf("tf-test-network-resource-manager-tags-%s", suffixName) - context := map[string]interface{}{ - "network_name": networkName, - "tag_key_id": tagKeyResult["name"], - "tag_value_id": tagValueResult["name"], - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeNetworkDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeNetwork_resourceManagerTags(context), - Check: resource.ComposeTestCheckFunc( - testAccCheckComputeNetworkExists( - t, "google_compute_network.acc_network_with_resource_manager_tags", &network), - ), - }, - }, - }) -} - func testAccCheckComputeNetworkExists(t *testing.T, n string, network *compute.Network) resource.TestCheckFunc { return func(s *terraform.State) error { rs, ok := s.RootModule().Resources[n] @@ -650,35 +530,6 @@ func testAccCheckComputeNetworkIsCustomSubnet(t *testing.T, n string, network *c } } -func testAccCheckComputeNetworkHasMtu(t *testing.T, n string, network *compute.Network, mtu int32) resource.TestCheckFunc { - return func(s *terraform.State) error { - config := acctest.GoogleProviderConfig(t) - - rs, ok := s.RootModule().Resources[n] - if !ok { - return fmt.Errorf("Not found: %s", n) - } - - if rs.Primary.Attributes["mtu"] == "" { - return fmt.Errorf("Routing mode not found on resource") - } - - found, err := config.NewComputeClient(config.UserAgent).Networks.Get( - config.Project, network.Name).Do() - if err != nil { - return err - } - - foundMtu := found.Mtu - - if int64(mtu) != foundMtu { - return fmt.Errorf("Expected mtu %d to match actual routing mode %d", mtu, foundMtu) - } - - return nil - } -} - func testAccCheckComputeNetworkHasRoutingMode(t *testing.T, n string, network *compute.Network, routingMode string) resource.TestCheckFunc { return func(s *terraform.State) error { config := acctest.GoogleProviderConfig(t) @@ -793,15 +644,6 @@ resource "google_compute_network" "baz" { `, networkName) } -func testAccComputeNetwork_mtu(networkName string, mtu int32) string { - return fmt.Sprintf(` -resource "google_compute_network" "acc_network_mtu" { - name = "%s" - mtu = %d -} -`, networkName, mtu) -} - func testAccComputeNetwork_routing_mode(networkName, routingMode string) string { return fmt.Sprintf(` resource "google_compute_network" "acc_network_routing_mode" { @@ -811,6 +653,7 @@ resource "google_compute_network" "acc_network_routing_mode" { `, networkName, routingMode) } + func testAccComputeNetwork_best_bgp_path_selection_mode(networkName, bgpBestPathSelection string) string { return fmt.Sprintf(` resource "google_compute_network" "acc_network_bgp_best_path_selection_mode" { @@ -880,17 +723,3 @@ resource "google_compute_network" "acc_network_firewall_policy_enforcement_order } `, networkName, order) } - -func testAccComputeNetwork_resourceManagerTags(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_compute_network" "acc_network_with_resource_manager_tags" { - name = "%{network_name}" - auto_create_subnetworks = false - params { - resource_manager_tags = { - "%{tag_key_id}" = "%{tag_value_id}" - } - } -} -`, context) -} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_node_group_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_node_group_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/compute/resource_compute_node_group_test.go rename to mmv1/third_party/terraform/services/compute/resource_compute_node_group_test.go.tmpl diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_organization_security_policy_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_organization_security_policy_test.go.tmpl index 56bbcb25e466..cdfe1e9894a7 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_organization_security_policy_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_organization_security_policy_test.go.tmpl @@ -42,29 +42,6 @@ func TestAccComputeOrganizationSecurityPolicy_organizationSecurityPolicyUpdateEx }) } -func TestAccComputeOrganizationSecurityPolicy_organizationSecurityPolicyShortName(t *testing.T) { - context := map[string]interface{}{ - "org_id": envvar.GetTestOrgFromEnv(t), - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeOrganizationSecurityPolicyDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeOrganizationSecurityPolicy_organizationSecurityPolicyShortName(context), - }, - { - ResourceName: "google_compute_organization_security_policy.policy", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - func testAccComputeOrganizationSecurityPolicy_organizationSecurityPolicyPreUpdateExample(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_compute_organization_security_policy" "policy" { @@ -83,15 +60,4 @@ resource "google_compute_organization_security_policy" "policy" { } `, context) } - -func testAccComputeOrganizationSecurityPolicy_organizationSecurityPolicyShortName(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_compute_organization_security_policy" "policy" { - short_name = "tf-test%{random_suffix}" - parent = "organizations/%{org_id}" - description = "org security policy description" - type = "CLOUD_ARMOR" -} -`, context) -} {{- end }} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_per_instance_config_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_per_instance_config_test.go.tmpl similarity index 99% rename from mmv1/third_party/terraform/services/compute/resource_compute_per_instance_config_test.go rename to mmv1/third_party/terraform/services/compute/resource_compute_per_instance_config_test.go.tmpl index 5b57791c967f..2943de437d62 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_per_instance_config_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_per_instance_config_test.go.tmpl @@ -2,9 +2,9 @@ package compute_test import ( "fmt" + "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" - "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/terraform" @@ -688,9 +688,9 @@ func testAccComputePerInstanceConfigListInstances(t *testing.T, igmId string) (m url := fmt.Sprintf("%s%s/listManagedInstances", config.ComputeBasePath, igmId) res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "POST", - RawURL: url, + Config: config, + Method: "POST", + RawURL: url, UserAgent: config.UserAgent, }) if err != nil { @@ -715,9 +715,9 @@ func testAccComputePerInstanceConfigListNames(t *testing.T, igmId string) (map[s url := fmt.Sprintf("%s%s/listPerInstanceConfigs", config.ComputeBasePath, igmId) res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "POST", - RawURL: url, + Config: config, + Method: "POST", + RawURL: url, UserAgent: config.UserAgent, }) if err != nil { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_preview_features_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_preview_features_test.go deleted file mode 100644 index a2e6cc2a26f9..000000000000 --- a/mmv1/third_party/terraform/services/compute/resource_compute_preview_features_test.go +++ /dev/null @@ -1,55 +0,0 @@ -package compute_test - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" -) - -func TestAccComputePreviewFeature_update(t *testing.T) { - t.Parallel() - - // The specific feature name to test. - featureName := "alpha-api-access" - // The resource name in Terraform state. - resourceName := "google_compute_preview_feature.acceptance" - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - // Step 1: Disable the "alpha-api-access" feature and verify its attributes. - { - Config: testAccComputePreviewFeature_disable(featureName), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "name", featureName), - resource.TestCheckResourceAttr(resourceName, "activation_status", "DISABLED"), - ), - }, - // Step 2: Verify that the resource can be successfully imported. - { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"rollout_operation"}, - }, - }, - }) -} - -func testAccComputePreviewFeature_disable(name string) string { - return fmt.Sprintf(` -resource "google_compute_preview_feature" "acceptance" { - name = "%s" - activation_status = "DISABLED" - - rollout_operation { - rollout_input { - predefined_rollout_plan = "ROLLOUT_PLAN_FAST_ROLLOUT" - } - } -} -`, name) -} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_project_metadata_item.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_project_metadata_item.go.tmpl index 4f10ee9db007..fdf16116aa12 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_project_metadata_item.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_project_metadata_item.go.tmpl @@ -184,8 +184,8 @@ func resourceComputeProjectMetadataItemDelete(d *schema.ResourceData, meta inter func resourceComputeProjectMetadataItemImportState(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+)/meta-data/(?P[^/]+)$", - "^(?P[^/]+)$", + "projects/(?P[^/]+)/meta-data/(?P[^/]+)", + "(?P[^/]+)", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_public_advertised_prefix_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_public_advertised_prefix_test.go index 856a6801d044..9c4ca876645e 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_public_advertised_prefix_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_public_advertised_prefix_test.go @@ -21,7 +21,6 @@ func TestAccComputePublicPrefixes(t *testing.T) { "public_delegated_prefixes_ipv6": testAccComputePublicDelegatedPrefix_publicDelegatedPrefixesIpv6Test, "public_advertised_prefixes_pdp_scope": testAccComputePublicAdvertisedPrefix_publicAdvertisedPrefixesPdpScopeTest, "public_delegated_prefix_ipv6_subnet_mode": testAccComputePublicDelegatedPrefix_publicDelegatedPrefixIpv6SubnetModeTest, - "public_delgated_prefix_with_sub_prefix": TestAccComputePublicDelegatedPrefix_computePublicDelegatedPrefixWithSubPrefixExample, } for name, tc := range testCases { @@ -36,84 +35,6 @@ func TestAccComputePublicPrefixes(t *testing.T) { } } -func TestAccComputePublicDelegatedPrefix_computePublicDelegatedPrefixWithSubPrefixExample(t *testing.T) { - t.Parallel() - subPrefixResourceName := "google_compute_public_delegated_prefix.subprefix" - parentProject := "tf-static-byoip" - parentRegion := "us-central1" - parentName := "tf-test-delegation-mode-sub-pdp" - - context := map[string]interface{}{ - "parent_pdp_id": "projects/tf-static-byoip/regions/us-central1/publicDelegatedPrefixes/tf-test-delegation-mode-sub-pdp", - "project": "tf-static-byoip", - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputePublicDelegatedPrefixDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputePublicDelegatedPrefix_computePublicDelegatedPrefixWithSubPrefixExample(context), - Check: resource.ComposeTestCheckFunc( - // First, a basic check that the sub-prefix was created - resource.TestCheckResourceAttrSet(subPrefixResourceName, "id"), - - // Now, the custom check function - testAccCheckParentHasSubPrefix(t, parentProject, parentRegion, parentName, subPrefixResourceName), - ), - }, - { - ResourceName: "google_compute_public_delegated_prefix.subprefix", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"region"}, - }, - }, - }) -} - -func testAccComputePublicDelegatedPrefix_computePublicDelegatedPrefixWithSubPrefixExample(context map[string]interface{}) string { - return acctest.Nprintf(` - -resource "google_compute_public_delegated_prefix" "subprefix" { - name = "tf-test-sub-prefix-1%{random_suffix}" - description = "A nested address" - region = "us-central1" - ip_cidr_range = "2600:1901:4500:2::/64" - parent_prefix = "%{parent_pdp_id}" - mode = "DELEGATION" -} -`, context) -} - -func testAccCheckParentHasSubPrefix(t *testing.T, project, region, parentName, subPrefixResourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - rs, ok := s.RootModule().Resources[subPrefixResourceName] - if !ok { - return fmt.Errorf("Not found: %s", subPrefixResourceName) - } - newSubPrefixName := rs.Primary.Attributes["name"] - - config := acctest.GoogleProviderConfig(t) - computeService := config.NewComputeClient(config.UserAgent) - - parent, err := computeService.PublicDelegatedPrefixes.Get(project, region, parentName).Do() - if err != nil { - return err - } - - for _, sub := range parent.PublicDelegatedSubPrefixs { - if sub.Name == newSubPrefixName { - return nil - } - } - - return fmt.Errorf("Sub-Prefix %q not found in parent %q's sub-prefix list", newSubPrefixName, parentName) - } -} - func testAccComputePublicAdvertisedPrefix_publicAdvertisedPrefixesPdpScopeTest(t *testing.T) { context := map[string]interface{}{ "description": envvar.GetTestPublicAdvertisedPrefixDescriptionFromEnv(t), diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_ha_policy_manual_leader_update_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_ha_policy_manual_leader_update_test.go deleted file mode 100644 index 952893dde6bb..000000000000 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_ha_policy_manual_leader_update_test.go +++ /dev/null @@ -1,262 +0,0 @@ -package compute_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" - "github.com/hashicorp/terraform-provider-google/google/acctest" -) - -func TestAccComputeRegionBackendService_regionBackendServiceHaPolicyManualLeader_update(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeRegionBackendServiceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeRegionBackendService_regionBackendServiceHaPolicyManualLeader_full(context), - }, - { - ResourceName: "google_compute_region_backend_service.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"iap.0.oauth2_client_secret", "network", "region"}, - }, - { - Config: testAccComputeRegionBackendService_regionBackendServiceHaPolicyManualLeader_update(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_compute_region_backend_service.default", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_compute_region_backend_service.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"iap.0.oauth2_client_secret", "network", "region"}, - }, - }, - }) -} - -func testAccComputeRegionBackendService_regionBackendServiceHaPolicyManualLeader_full(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_compute_network" "default" { - name = "tf-test-rbs-net%{random_suffix}" - auto_create_subnetworks = false -} - -resource "google_compute_subnetwork" "default" { - name = "tf-test-rbs-subnet%{random_suffix}" - ip_cidr_range = "10.1.2.0/24" - region = "us-central1" - network = google_compute_network.default.id -} - -resource "google_compute_network_endpoint" "endpoint1" { - network_endpoint_group = google_compute_network_endpoint_group.neg.name - - instance = google_compute_instance.endpoint-instance1.name - ip_address = google_compute_instance.endpoint-instance1.network_interface[0].network_ip -} - -resource "google_compute_network_endpoint" "endpoint2" { - network_endpoint_group = google_compute_network_endpoint_group.neg.name - - instance = google_compute_instance.endpoint-instance2.name - ip_address = google_compute_instance.endpoint-instance2.network_interface[0].network_ip -} - -data "google_compute_image" "my_image" { - family = "debian-12" - project = "debian-cloud" -} - -resource "google_compute_instance" "endpoint-instance1" { - name = "tf-test-rbs-instance1-%{random_suffix}" - machine_type = "e2-medium" - - boot_disk { - initialize_params { - image = data.google_compute_image.my_image.self_link - } - } - - network_interface { - subnetwork = google_compute_subnetwork.default.id - access_config { - } - } -} - -resource "google_compute_instance" "endpoint-instance2" { - name = "tf-test-rbs-instance2-%{random_suffix}" - machine_type = "e2-medium" - - boot_disk { - initialize_params { - image = data.google_compute_image.my_image.self_link - } - } - - network_interface { - subnetwork = google_compute_subnetwork.default.id - access_config { - } - } -} - -resource "google_compute_network_endpoint_group" "neg" { - name = "tf-test-rbs-neg%{random_suffix}" - network_endpoint_type = "GCE_VM_IP" - network = google_compute_network.default.id - subnetwork = google_compute_subnetwork.default.id - zone = "us-central1-a" -} - -resource "google_compute_region_backend_service" "default" { - region = "us-central1" - name = "tf-test-region-service%{random_suffix}" - protocol = "UDP" - load_balancing_scheme = "EXTERNAL" - network = google_compute_network.default.id - backend { - group = google_compute_network_endpoint_group.neg.self_link - balancing_mode = "CONNECTION" - } - ha_policy { - fast_ip_move = "GARP_RA" - leader { - backend_group = google_compute_network_endpoint_group.neg.self_link - network_endpoint { - instance = google_compute_instance.endpoint-instance1.name - } - } - } - // Must explicitly disable connection draining to override default value. - connection_draining_timeout_sec = 0 - // Explicitly depend on the endpoints to prevent test flakes due to creating - // the BackendService before the endpoints have been added to the NEG. - depends_on = [ - google_compute_network_endpoint_group.neg, - google_compute_network_endpoint.endpoint1, - google_compute_network_endpoint.endpoint2 - ] -} -`, context) -} - -func testAccComputeRegionBackendService_regionBackendServiceHaPolicyManualLeader_update(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_compute_network" "default" { - name = "tf-test-rbs-net%{random_suffix}" - auto_create_subnetworks = false -} - -resource "google_compute_subnetwork" "default" { - name = "tf-test-rbs-subnet%{random_suffix}" - ip_cidr_range = "10.1.2.0/24" - region = "us-central1" - network = google_compute_network.default.id -} - -resource "google_compute_network_endpoint" "endpoint1" { - network_endpoint_group = google_compute_network_endpoint_group.neg.name - - instance = google_compute_instance.endpoint-instance1.name - ip_address = google_compute_instance.endpoint-instance1.network_interface[0].network_ip -} - -resource "google_compute_network_endpoint" "endpoint2" { - network_endpoint_group = google_compute_network_endpoint_group.neg.name - - instance = google_compute_instance.endpoint-instance2.name - ip_address = google_compute_instance.endpoint-instance2.network_interface[0].network_ip -} - -data "google_compute_image" "my_image" { - family = "debian-12" - project = "debian-cloud" -} - -resource "google_compute_instance" "endpoint-instance1" { - name = "tf-test-rbs-instance1-%{random_suffix}" - machine_type = "e2-medium" - - boot_disk { - initialize_params { - image = data.google_compute_image.my_image.self_link - } - } - - network_interface { - subnetwork = google_compute_subnetwork.default.id - access_config { - } - } -} - -resource "google_compute_instance" "endpoint-instance2" { - name = "tf-test-rbs-instance2-%{random_suffix}" - machine_type = "e2-medium" - - boot_disk { - initialize_params { - image = data.google_compute_image.my_image.self_link - } - } - - network_interface { - subnetwork = google_compute_subnetwork.default.id - access_config { - } - } -} - -resource "google_compute_network_endpoint_group" "neg" { - name = "tf-test-rbs-neg%{random_suffix}" - network_endpoint_type = "GCE_VM_IP" - network = google_compute_network.default.id - subnetwork = google_compute_subnetwork.default.id - zone = "us-central1-a" -} - -resource "google_compute_region_backend_service" "default" { - region = "us-central1" - name = "tf-test-region-service%{random_suffix}" - protocol = "UDP" - load_balancing_scheme = "EXTERNAL" - network = google_compute_network.default.id - backend { - group = google_compute_network_endpoint_group.neg.self_link - balancing_mode = "CONNECTION" - } - ha_policy { - fast_ip_move = "GARP_RA" - leader { - backend_group = google_compute_network_endpoint_group.neg.self_link - network_endpoint { - instance = google_compute_instance.endpoint-instance2.name - } - } - } - // Must explicitly disable connection draining to override default value. - connection_draining_timeout_sec = 0 - // Explicitly depend on the endpoints to prevent test flakes due to creating - // the BackendService before the endpoints have been added to the NEG. - depends_on = [ - google_compute_network_endpoint_group.neg, - google_compute_network_endpoint.endpoint1, - google_compute_network_endpoint.endpoint2 - ] -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_test.go.tmpl index 591114bb6691..0618571eff17 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_test.go.tmpl @@ -7,7 +7,6 @@ import ( "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" ) func TestAccComputeRegionBackendService_basic(t *testing.T) { @@ -386,18 +385,6 @@ func TestAccComputeRegionBackendService_subsettingUpdate(t *testing.T) { ImportState: true, ImportStateVerify: true, }, - { - Config: testAccComputeRegionBackendService_imlbWithSubsettingSubsetSize(backendName, checkName, 3), - }, - { - ResourceName: "google_compute_region_backend_service.foobar", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccComputeRegionBackendService_imlbWithSubsettingSubsetSize(backendName, checkName, -1), - ExpectError: regexp.MustCompile("Must be greater than or equal to 1"), - }, }, }) } @@ -434,286 +421,6 @@ func TestAccComputeRegionBackendService_withLogConfig(t *testing.T) { }) } -func TestAccComputeRegionBackendService_zonalILB(t *testing.T) { - t.Parallel() - - serviceName := fmt.Sprintf("tf-test-ilb-bs-%s", acctest.RandString(t, 10)) - checkName := fmt.Sprintf("tf-test-ilb-hc-%s", acctest.RandString(t, 10)) - checkName2 := fmt.Sprintf("tf-test-ilb-hc2-%s", acctest.RandString(t, 10)) - negName := fmt.Sprintf("tf-test-ilb-neg-%s", acctest.RandString(t, 10)) - negName2 := fmt.Sprintf("tf-test-ilb-neg2-%s", acctest.RandString(t, 10)) - instanceName := fmt.Sprintf("tf-test-ilb-vm-%s", acctest.RandString(t, 10)) - instanceName2 := fmt.Sprintf("tf-test-ilb-vm2-%s", acctest.RandString(t, 10)) - - // subnetwork with random suffix - subnetName := fmt.Sprintf("tf-test-subnet-%s", acctest.RandString(t, 8)) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeRegionBackendServiceDestroyProducer(t), - Steps: []resource.TestStep{ - // STEP 1: base (self-link v1) - { - Config: testAccComputeRegionBackendService_zonalILB_withGroup( - testAccComputeRegionBackendService_common(checkName, negName, instanceName, subnetName), - serviceName, - "google_compute_network_endpoint_group.neg.id", - ), - }, - { - ResourceName: "google_compute_region_backend_service.default", - ImportState: true, - ImportStateVerify: true, - }, - - // STEP 2: same NEG with /compute/beta/ (apply OK) - { - Config: fmt.Sprintf(` -%s - -locals { - neg_beta = replace(google_compute_network_endpoint_group.neg.id, "/compute/v1/", "/compute/beta/") -} - -%s -`, testAccComputeRegionBackendService_common(checkName, negName, instanceName, subnetName), - testAccComputeRegionBackendService_zonalILB_withGroup("", serviceName, "local.neg_beta"), - ), - }, - { - ResourceName: "google_compute_region_backend_service.default", - ImportState: true, - ImportStateVerify: true, - }, - - // STEP 3: Invalid variation for API (UPPERCASE + "/") — tested only in PLAN - { - PlanOnly: true, // does not call the API; only exercises diff/canonicalization - Config: fmt.Sprintf(` -%s - -locals { - neg_slash_upper = "${google_compute_network_endpoint_group.neg.id}" -} - -%s -`, testAccComputeRegionBackendService_common(checkName, negName, instanceName, subnetName), - testAccComputeRegionBackendService_zonalILB_withGroup("", serviceName, "local.neg_slash_upper"), - ), - }, - - // STEP 4: Modified scenario (changes NEG/HC/VM) — continues validating real updates - { - Config: testAccComputeRegionBackendService_zonalILBModified(serviceName, checkName, negName, instanceName, checkName2, negName2, instanceName2, subnetName), - }, - { - ResourceName: "google_compute_region_backend_service.default", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func testAccComputeRegionBackendService_common(checkName, negName, instanceName, subnetworkName string) string { - return fmt.Sprintf(` -resource "google_compute_network" "default" { - name = "tf-test-net" - auto_create_subnetworks = false -} - -resource "google_compute_subnetwork" "default" { - name = "%s" - ip_cidr_range = "10.10.0.0/16" - region = "us-central1" - network = google_compute_network.default.id -} - -resource "google_compute_region_health_check" "hc1" { - name = "%s" - region = "us-central1" - http_health_check { - port = 8080 - request_path = "/status" - } -} - -resource "google_compute_instance" "default" { - name = "%s" - zone = "us-central1-a" - machine_type = "e2-micro" - - boot_disk { - initialize_params { - image = "debian-cloud/debian-11" - } - } - - network_interface { - network = google_compute_network.default.id - subnetwork = google_compute_subnetwork.default.id - access_config {} - } -} - -resource "google_compute_network_endpoint_group" "neg" { - name = "%s" - network = google_compute_network.default.id - subnetwork = google_compute_subnetwork.default.id - zone = "us-central1-a" - network_endpoint_type = "GCE_VM_IP_PORT" -} - -resource "google_compute_network_endpoint" "endpoint" { - network_endpoint_group = google_compute_network_endpoint_group.neg.name - zone = "us-central1-a" - instance = google_compute_instance.default.name - ip_address = google_compute_instance.default.network_interface[0].network_ip - port = 8080 -} -`, subnetworkName, checkName, instanceName, negName) -} - -func testAccComputeRegionBackendService_zonalILB_withGroup(commonHCL string, serviceName string, groupExpr string) string { - header := commonHCL - return fmt.Sprintf(` -%s -resource "google_compute_region_backend_service" "default" { - name = "%s" - region = "us-central1" - protocol = "HTTP" - load_balancing_scheme = "INTERNAL_MANAGED" - health_checks = [google_compute_region_health_check.hc1.id] - - backend { - group = %s - balancing_mode = "RATE" - max_rate_per_endpoint = 100 - capacity_scaler = 1.0 - } - - session_affinity = "CLIENT_IP" - locality_lb_policy = "ROUND_ROBIN" -} -`, header, serviceName, groupExpr) -} - -func testAccComputeRegionBackendService_zonalILBModified(serviceName, checkName, negName, instanceName, checkName2, negName2, instanceName2, subnetworkName string) string { - return fmt.Sprintf(` -resource "google_compute_network" "default" { - name = "tf-test-net" - auto_create_subnetworks = false -} - -resource "google_compute_subnetwork" "default" { - name = "%s" - ip_cidr_range = "10.10.0.0/16" - region = "us-central1" - network = google_compute_network.default.id -} - -resource "google_compute_region_health_check" "hc1" { - name = "%s" - region = "us-central1" - http_health_check { - port = 8080 - request_path = "/status" - } -} - -resource "google_compute_instance" "default" { - name = "%s" - zone = "us-central1-a" - machine_type = "e2-micro" - - boot_disk { - initialize_params { - image = "debian-cloud/debian-11" - } - } - - network_interface { - network = google_compute_network.default.id - subnetwork = google_compute_subnetwork.default.id - access_config {} - } -} - -resource "google_compute_network_endpoint_group" "neg" { - name = "%s" - network = google_compute_network.default.id - subnetwork = google_compute_subnetwork.default.id - zone = "us-central1-a" - network_endpoint_type = "GCE_VM_IP_PORT" -} - -resource "google_compute_network_endpoint" "endpoint" { - network_endpoint_group = google_compute_network_endpoint_group.neg.name - zone = "us-central1-a" - instance = google_compute_instance.default.name - ip_address = google_compute_instance.default.network_interface[0].network_ip - port = 8080 -} - -resource "google_compute_instance" "instance2" { - name = "%s" - zone = "us-central1-a" - machine_type = "e2-micro" - - boot_disk { - initialize_params { - image = "debian-cloud/debian-11" - } - } - - network_interface { - network = google_compute_network.default.id - subnetwork = google_compute_subnetwork.default.id - access_config {} - } -} - -resource "google_compute_region_health_check" "hc2" { - name = "%s" - region = "us-central1" - http_health_check { - port = 80 - } -} - -resource "google_compute_network_endpoint_group" "neg2" { - name = "%s" - network = google_compute_network.default.id - subnetwork = google_compute_subnetwork.default.id - zone = "us-central1-a" - network_endpoint_type = "GCE_VM_IP_PORT" -} - -resource "google_compute_network_endpoint" "endpoint2" { - network_endpoint_group = google_compute_network_endpoint_group.neg2.name - zone = "us-central1-a" - instance = google_compute_instance.instance2.name - ip_address = google_compute_instance.instance2.network_interface[0].network_ip - port = 8080 -} - -resource "google_compute_region_backend_service" "default" { - name = "%s" - region = "us-central1" - load_balancing_scheme = "INTERNAL_MANAGED" - health_checks = [google_compute_region_health_check.hc2.id] - - backend { - group = google_compute_network_endpoint_group.neg2.id - balancing_mode = "RATE" - max_rate_per_endpoint = 200 - capacity_scaler = 0.5 - } -} -`, subnetworkName, checkName, instanceName, negName, instanceName2, checkName2, negName2, serviceName) -} - func TestAccComputeRegionBackendService_withDynamicBackendCount(t *testing.T) { t.Parallel() @@ -757,35 +464,6 @@ func TestAccComputeRegionBackendService_withDynamicBackendCount(t *testing.T) { }) } -func TestAccComputeRegionBackendService_withTags(t *testing.T) { - t.Parallel() - - org := envvar.GetTestOrgFromEnv(t) - - serviceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - checkName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - tagKeyResult := acctest.BootstrapSharedTestTagKeyDetails(t, "crm-rbs-tagkey", "organizations/"+org, make(map[string]interface{})) - sharedTagkey,_ := tagKeyResult["shared_tag_key"] - tagValueResult := acctest.BootstrapSharedTestTagValueDetails(t, "crm-rbs-tagvalue", sharedTagkey, org) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeRegionBackendServiceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeRegionBackendService_withTags(serviceName, checkName, tagKeyResult["name"], tagValueResult["name"]), - }, - { - ResourceName: "google_compute_region_backend_service.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"params"}, - }, - }, - }) -} - func testAccComputeRegionBackendService_withDynamicBackendCount(serviceName, netName, hcName, igName string) string { return fmt.Sprintf(` locals { @@ -1664,32 +1342,6 @@ resource "google_compute_health_check" "health_check" { } `, serviceName, checkName) } - -func testAccComputeRegionBackendService_imlbWithSubsettingSubsetSize(serviceName, checkName string, subsetSize int64) string { - return fmt.Sprintf(` -resource "google_compute_region_backend_service" "foobar" { - name = "%s" - health_checks = [google_compute_region_health_check.zero.self_link] - protocol = "HTTP" - load_balancing_scheme = "INTERNAL_MANAGED" - subsetting { - policy = "CONSISTENT_HASH_SUBSETTING" - subset_size = %d - } -} - -resource "google_compute_region_health_check" "zero" { - name = "%s" - region = "us-central1" - check_interval_sec = 1 - timeout_sec = 1 - - http_health_check { - port = 80 - } -} -`, serviceName, subsetSize, checkName) -} {{- end }} {{ if ne $.TargetVersionName `ga` -}} @@ -1800,28 +1452,3 @@ resource "google_compute_region_health_check" "health_check" { } `, serviceName, checkName) } - -func testAccComputeRegionBackendService_withTags(serviceName, checkName string, tagKey string, tagValue string) string { - return fmt.Sprintf(` -resource "google_compute_region_backend_service" "foobar" { - name = "%s" - health_checks = [google_compute_health_check.zero.self_link] - region = "us-central1" - params { - resource_manager_tags = { - "%s" = "%s" - } - } -} - -resource "google_compute_health_check" "zero" { - name = "%s" - check_interval_sec = 1 - timeout_sec = 1 - - tcp_health_check { - port = "80" - } -} -`, serviceName, tagKey, tagValue, checkName) -} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_disk_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_disk_test.go.tmpl index 7f9acb4fc8dd..159c6091fe2a 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_disk_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_disk_test.go.tmpl @@ -12,7 +12,6 @@ import ( "github.com/hashicorp/terraform-plugin-testing/terraform" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" - "github.com/hashicorp/terraform-plugin-testing/plancheck" {{ if eq $.TargetVersionName `ga` }} "google.golang.org/api/compute/v1" @@ -63,56 +62,6 @@ func TestAccComputeRegionDisk_basic(t *testing.T) { }) } -func TestAccComputeRegionDisk_hyperdisk(t *testing.T) { - t.Parallel() - - diskName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - - var disk compute.Disk - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeRegionDiskDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeRegionDisk_hyperdisk(diskName, "self_link"), - Check: resource.ComposeTestCheckFunc( - testAccCheckComputeRegionDiskExists( - t, "google_compute_region_disk.regiondisk", &disk), - ), - }, - { - ResourceName: "google_compute_region_disk.regiondisk", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, - }, - { - Config: testAccComputeRegionDisk_hyperdiskUpdated(diskName, "name"), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - // Check that the update is done in-place - plancheck.ExpectResourceAction("google_compute_region_disk.regiondisk", plancheck.ResourceActionUpdate), - }, - }, - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("google_compute_region_disk.regiondisk", "access_mode", "READ_WRITE_SINGLE"), - resource.TestCheckResourceAttr("google_compute_region_disk.regiondisk", "provisioned_iops", "20000"), - resource.TestCheckResourceAttr("google_compute_region_disk.regiondisk", "provisioned_throughput", "250"), - testAccCheckComputeRegionDiskExists(t, "google_compute_region_disk.regiondisk", &disk), - ), - }, - { - ResourceName: "google_compute_region_disk.regiondisk", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, - }, - }, - }) -} - func TestAccComputeRegionDisk_basicUpdate(t *testing.T) { t.Parallel() @@ -450,68 +399,6 @@ func testAccCheckComputeRegionDiskInstances(n string, disk *compute.Disk) resour } } -func testAccComputeRegionDisk_hyperdisk(diskName, refSelector string) string { - return fmt.Sprintf(` -resource "google_compute_disk" "disk" { - name = "%s" - image = "debian-cloud/debian-11" - size = 50 - type = "pd-ssd" - zone = "us-central1-a" -} - -resource "google_compute_snapshot" "snapdisk" { - name = "%s" - source_disk = google_compute_disk.disk.name - zone = "us-central1-a" -} - -resource "google_compute_region_disk" "regiondisk" { - name = "%s" - snapshot = google_compute_snapshot.snapdisk.%s - type = "hyperdisk-balanced-high-availability" - size = 50 - replica_zones = ["us-central1-a", "us-central1-f"] - - access_mode = "READ_WRITE_MANY" - provisioned_iops = 10000 - provisioned_throughput = 190 -} -`, diskName, diskName, diskName, refSelector) -} - -func testAccComputeRegionDisk_hyperdiskUpdated(diskName, refSelector string) string { - return fmt.Sprintf(` -resource "google_compute_disk" "disk" { - name = "%s" - image = "debian-cloud/debian-11" - size = 50 - type = "pd-ssd" - zone = "us-central1-a" -} - -resource "google_compute_snapshot" "snapdisk" { - name = "%s" - source_disk = google_compute_disk.disk.name - zone = "us-central1-a" -} - -resource "google_compute_region_disk" "regiondisk" { - name = "%s" - snapshot = google_compute_snapshot.snapdisk.%s - type = "hyperdisk-balanced-high-availability" - region = "us-central1" - - replica_zones = ["us-central1-a", "us-central1-f"] - - size = 100 - access_mode = "READ_WRITE_SINGLE" - provisioned_iops = 20000 - provisioned_throughput = 250 -} -`, diskName, diskName, diskName, refSelector) -} - func testAccComputeRegionDisk_basic(diskName, refSelector string) string { return fmt.Sprintf(` resource "google_compute_disk" "disk" { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_health_check_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_health_check_test.go.tmpl index 5e9a2d0629af..a0d12639dd3c 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_health_check_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_health_check_test.go.tmpl @@ -43,70 +43,6 @@ func TestAccComputeRegionHealthCheck_tcp_update(t *testing.T) { }) } -{{ if ne $.TargetVersionName `ga` -}} -func TestAccComputeRegionHealthCheck_grpcWithTls_create(t *testing.T) { - t.Parallel() - - hckName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), - CheckDestroy: testAccCheckComputeRegionHealthCheckDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeRegionHealthCheck_grpcWithTls(hckName), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet( - "google_compute_region_health_check.foobar", "health_check_id"), - ), - }, - { - ResourceName: "google_compute_region_health_check.foobar", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} -{{- end }} - -{{ if ne $.TargetVersionName `ga` -}} -func TestAccComputeRegionHealthCheck_grpcWithTls_update(t *testing.T) { - t.Parallel() - - hckName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), - CheckDestroy: testAccCheckComputeRegionHealthCheckDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeRegionHealthCheck_grpcWithTls(hckName), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet( - "google_compute_region_health_check.foobar", "health_check_id"), - ), - }, - { - ResourceName: "google_compute_region_health_check.foobar", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccComputeRegionHealthCheck_grpcWithTls_update(hckName), - }, - { - ResourceName: "google_compute_region_health_check.foobar", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} -{{- end }} - func TestAccComputeRegionHealthCheck_ssl_port_spec(t *testing.T) { t.Parallel() @@ -209,6 +145,27 @@ func TestAccComputeRegionHealthCheck_typeTransition(t *testing.T) { }) } +func TestAccComputeRegionHealthCheck_tcpAndSsl_shouldFail(t *testing.T) { + // This is essentially a unit test, no interactions + acctest.SkipIfVcr(t) + t.Parallel() + + hckName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeRegionHealthCheckDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeRegionHealthCheck_tcpAndSsl_shouldFail(hckName), + ExpectError: regexp.MustCompile("only one of\n`grpc_health_check,http2_health_check,http_health_check,https_health_check,ssl_health_check,tcp_health_check`\ncan be specified, but `ssl_health_check,tcp_health_check` were specified"), + + }, + }, + }) +} + func TestAccComputeRegionHealthCheck_logConfigDisabled(t *testing.T) { t.Parallel() @@ -281,43 +238,6 @@ resource "google_compute_region_health_check" "foobar" { `, hckName) } -{{ if ne $.TargetVersionName `ga` -}} -func testAccComputeRegionHealthCheck_grpcWithTls(hckName string) string { - return fmt.Sprintf(` -resource "google_compute_region_health_check" "foobar" { - provider = "google-beta" - check_interval_sec = 3 - description = "Resource created for Terraform acceptance testing" - healthy_threshold = 3 - name = "tf-test-health-test-%s" - timeout_sec = 2 - unhealthy_threshold = 3 - grpc_tls_health_check { - port = "443" - } -} -`, hckName) -} -{{- end }} - -{{ if ne $.TargetVersionName `ga` -}} -func testAccComputeRegionHealthCheck_grpcWithTls_update(hckName string) string { - return fmt.Sprintf(` -resource "google_compute_region_health_check" "foobar" { - provider = "google-beta" - check_interval_sec = 3 - healthy_threshold = 10 - name = "tf-test-health-test-%s" - timeout_sec = 2 - unhealthy_threshold = 10 - grpc_tls_health_check { - port = "8080" - } -} -`, hckName) -} -{{- end }} - func testAccComputeRegionHealthCheck_ssl(hckName string) string { return fmt.Sprintf(` resource "google_compute_region_health_check" "foobar" { @@ -447,3 +367,23 @@ resource "google_compute_region_health_check" "foobar" { } `, hckName) } + +func testAccComputeRegionHealthCheck_tcpAndSsl_shouldFail(hckName string) string { + return fmt.Sprintf(` +resource "google_compute_region_health_check" "foobar" { + check_interval_sec = 3 + description = "Resource created for Terraform acceptance testing" + healthy_threshold = 3 + name = "health-test-%s" + timeout_sec = 2 + unhealthy_threshold = 3 + + tcp_health_check { + port = 443 + } + ssl_health_check { + port = 443 + } +} +`, hckName) +} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template.go.tmpl index a8b5bd424eb7..bd355498bb1d 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template.go.tmpl @@ -124,14 +124,6 @@ func ResourceComputeRegionInstanceTemplate() *schema.Resource { Description: `Name of the disk. When not provided, this defaults to the name of the instance.`, }, - "architecture": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - Computed: true, - Description: `The architecture of the image. Allowed values are ARM64 or X86_64.`, - }, - "disk_size_gb": { Type: schema.TypeInt, Optional: true, @@ -183,16 +175,6 @@ func ResourceComputeRegionInstanceTemplate() *schema.Resource { Description: `A map of resource manager tags. Resource manager tag keys and values have the same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/456. The field is ignored (both PUT & PATCH) when empty.`, }, - "guest_os_features": { - Type: schema.TypeList, - Optional: true, - ForceNew: true, - Description: `A list of features to enable on the guest operating system. Applicable only for bootable images.`, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - }, - "source_image": { Type: schema.TypeString, Optional: true, @@ -650,13 +632,6 @@ Google Cloud KMS. Only one of kms_key_self_link, rsa_encrypted_key and raw_key m }, }, - "numeric_id": { - Type: schema.TypeString, - ForceNew: true, - Computed: true, - Description: `The ID of the template in numeric format.`, - }, - "project": { Type: schema.TypeString, Optional: true, @@ -1428,10 +1403,6 @@ func resourceComputeRegionInstanceTemplateRead(d *schema.ResourceData, meta inte } } - if err = d.Set("numeric_id", instanceTemplate["id"]); err != nil { - return fmt.Errorf("Error setting numeric_id: %s", err) - } - {{ if ne $.TargetVersionName `ga` -}} if instanceProperties.PartnerMetadata != nil { partnerMetadata, err := flattenPartnerMetadata(instanceProperties.PartnerMetadata) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_internal_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_internal_test.go.tmpl index bb235b036588..a643e4719836 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_internal_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_internal_test.go.tmpl @@ -22,9 +22,8 @@ func TestComputeRegionInstanceTemplate_reorderDisks(t *testing.T) { cDeviceName := map[string]interface{}{ "device_name": "disk-1", } - cScratchScsi := map[string]interface{}{ + cScratch := map[string]interface{}{ "type": "SCRATCH", - "interface": "SCSI", } cSource := map[string]interface{}{ "source": "disk-source", @@ -82,7 +81,7 @@ func TestComputeRegionInstanceTemplate_reorderDisks(t *testing.T) { aBoot, aScratchNvme, aSource, aScratchScsi, aFallThrough, aDeviceName, }, ConfigDisks: []interface{}{ - cBoot, cFallThrough, cDeviceName, cScratchScsi, cSource, cScratchNvme, + cBoot, cFallThrough, cDeviceName, cScratch, cSource, cScratchNvme, }, ExpectedResult: []map[string]interface{}{ aBoot, aFallThrough, aDeviceName, aScratchScsi, aSource, aScratchNvme, @@ -93,7 +92,7 @@ func TestComputeRegionInstanceTemplate_reorderDisks(t *testing.T) { aBoot, aNoMatch, aScratchNvme, aScratchScsi, aFallThrough, aDeviceName, }, ConfigDisks: []interface{}{ - cBoot, cFallThrough, cDeviceName, cScratchScsi, cSource, cScratchNvme, + cBoot, cFallThrough, cDeviceName, cScratch, cSource, cScratchNvme, }, ExpectedResult: []map[string]interface{}{ aBoot, aFallThrough, aDeviceName, aScratchScsi, aScratchNvme, aNoMatch, @@ -104,7 +103,7 @@ func TestComputeRegionInstanceTemplate_reorderDisks(t *testing.T) { aBoot, aScratchNvme, aFallThrough, aSource, aScratchScsi, aFallThrough2, aDeviceName, }, ConfigDisks: []interface{}{ - cBoot, cFallThrough, cDeviceName, cScratchScsi, cFallThrough, cSource, cScratchNvme, + cBoot, cFallThrough, cDeviceName, cScratch, cFallThrough, cSource, cScratchNvme, }, ExpectedResult: []map[string]interface{}{ aBoot, aFallThrough, aDeviceName, aScratchScsi, aFallThrough2, aSource, aScratchNvme, diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_test.go.tmpl index eb7f8681125d..89a03ce2bcbe 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_test.go.tmpl @@ -809,6 +809,10 @@ func TestAccComputeRegionInstanceTemplate_performanceMonitoringUnit(t *testing.T "instance_name": fmt.Sprintf("tf-test-instance-template-%s", acctest.RandString(t, 10)), "performance_monitoring_unit": "STANDARD", } + context_2 := map[string]interface{}{ + "instance_name": context_1["instance_name"].(string), + "performance_monitoring_unit": "ENHANCED", + } context_3 := map[string]interface{}{ "instance_name": context_1["instance_name"].(string), "performance_monitoring_unit": "ARCHITECTURAL", @@ -831,6 +835,18 @@ func TestAccComputeRegionInstanceTemplate_performanceMonitoringUnit(t *testing.T ImportState: true, ImportStateVerify: true, }, + { + Config: testAccComputeRegionInstanceTemplate_performanceMonitoringUnit(context_2), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeRegionInstanceTemplateExists(t, "google_compute_region_instance_template.foobar", &instanceTemplate), + resource.TestCheckResourceAttr("google_compute_region_instance_template.foobar", "advanced_machine_features.0.performance_monitoring_unit", "ENHANCED"), + ), + }, + { + ResourceName: "google_compute_region_instance_template.foobar", + ImportState: true, + ImportStateVerify: true, + }, { Config: testAccComputeRegionInstanceTemplate_performanceMonitoringUnit(context_3), Check: resource.ComposeTestCheckFunc( @@ -1643,36 +1659,6 @@ func TestAccComputeRegionInstanceTemplate_gracefulShutdown(t *testing.T) { } {{- end }} -func TestAccComputeRegionInstanceTemplate_GuestOsFeatures(t *testing.T) { - t.Parallel() - - var instanceTemplate compute.InstanceTemplate - context := map[string]interface{}{ - "template_name": fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)), - "guest_os_features": `["UEFI_COMPATIBLE", "VIRTIO_SCSI_MULTIQUEUE", "GVNIC", "IDPF"]`, - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeRegionInstanceTemplateDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeRegionInstanceTemplate_GuestOsFeatures(context), - Check: resource.ComposeTestCheckFunc( - testAccCheckComputeRegionInstanceTemplateExists( - t, "google_compute_region_instance_template.foobar", &instanceTemplate), - resource.TestCheckResourceAttr("google_compute_region_instance_template.foobar", "disk.0.guest_os_features.#", "4"), - resource.TestCheckResourceAttr("google_compute_region_instance_template.foobar", "disk.0.guest_os_features.0", "UEFI_COMPATIBLE"), - resource.TestCheckResourceAttr("google_compute_region_instance_template.foobar", "disk.0.guest_os_features.1", "VIRTIO_SCSI_MULTIQUEUE"), - resource.TestCheckResourceAttr("google_compute_region_instance_template.foobar", "disk.0.guest_os_features.2", "GVNIC"), - resource.TestCheckResourceAttr("google_compute_region_instance_template.foobar", "disk.0.guest_os_features.3", "IDPF"), - ), - }, - }, - }) -} - func testAccCheckComputeRegionInstanceTemplateDestroyProducer(t *testing.T) func(s *terraform.State) error { return func(s *terraform.State) error { config := acctest.GoogleProviderConfig(t) @@ -3374,7 +3360,7 @@ resource "google_compute_region_instance_template" "foobar" { func testAccComputeRegionInstanceTemplateConfidentialInstanceConfigEnable(suffix string, confidentialInstanceType string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { - family = "ubuntu-2204-lts" + family = "ubuntu-2004-lts" project = "ubuntu-os-cloud" } @@ -3434,7 +3420,7 @@ resource "google_compute_region_instance_template" "foobar2" { func testAccComputeRegionInstanceTemplateConfidentialInstanceConfigNoEnable(suffix string, minCpuPlatform, confidentialInstanceType string) string { return fmt.Sprintf(` data "google_compute_image" "my_image2" { - family = "ubuntu-2204-lts" + family = "ubuntu-2004-lts" project = "ubuntu-os-cloud" } @@ -3531,7 +3517,7 @@ resource "google_compute_region_instance_template" "foobar5" { func testAccComputeRegionInstanceTemplateAdvancedMachineFeatures(suffix string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { - family = "ubuntu-2204-lts" + family = "ubuntu-2004-lts" project = "ubuntu-os-cloud" } @@ -3568,7 +3554,7 @@ resource "google_compute_region_instance_template" "foobar" { func testAccComputeRegionInstanceTemplate_performanceMonitoringUnit(context map[string]interface{}) string { return acctest.Nprintf(` data "google_compute_image" "my_image" { - family = "ubuntu-2204-lts" + family = "ubuntu-2004-lts" project = "ubuntu-os-cloud" } @@ -3595,7 +3581,7 @@ resource "google_compute_region_instance_template" "foobar" { func testAccComputeRegionInstanceTemplate_enableUefiNetworking(context map[string]interface{}) string { return acctest.Nprintf(` data "google_compute_image" "my_image" { - family = "ubuntu-2204-lts" + family = "ubuntu-2004-lts" project = "ubuntu-os-cloud" } @@ -4622,34 +4608,6 @@ data "google_compute_default_service_account" "default" { `, context) } -func testAccComputeRegionInstanceTemplate_GuestOsFeatures(context map[string]interface{}) string { - return acctest.Nprintf(` -data "google_compute_image" "my_image" { - family = "debian-11" - project = "debian-cloud" -} - -resource "google_compute_region_instance_template" "foobar" { - name = "%{template_name}" - machine_type = "e2-medium" - region = "us-central1" - - disk { - source_image = data.google_compute_image.my_image.self_link - auto_delete = true - disk_size_gb = 10 - boot = true - architecture = "X86_64" - guest_os_features = %{guest_os_features} - } - - network_interface { - network = "default" - } -} -`, context) -} - {{ if ne $.TargetVersionName `ga` -}} func testAccComputeRegionInstanceTemplate_gracefulShutdown(context map[string]interface{}) string { return acctest.Nprintf(` diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_network_firewall_policy_rule_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_region_network_firewall_policy_rule_test.go.tmpl similarity index 98% rename from mmv1/third_party/terraform/services/compute/resource_compute_region_network_firewall_policy_rule_test.go rename to mmv1/third_party/terraform/services/compute/resource_compute_region_network_firewall_policy_rule_test.go.tmpl index f416cff53491..3e157ac6b7e3 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_network_firewall_policy_rule_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_network_firewall_policy_rule_test.go.tmpl @@ -5,9 +5,9 @@ import ( "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-plugin-testing/plancheck" ) func TestAccComputeRegionNetworkFirewallPolicyRule_update(t *testing.T) { @@ -35,7 +35,7 @@ func TestAccComputeRegionNetworkFirewallPolicyRule_update(t *testing.T) { }, { Config: testAccComputeRegionNetworkFirewallPolicyRule_update(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ + ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction("google_compute_region_network_firewall_policy_rule.fw_policy_rule1", plancheck.ResourceActionUpdate), }, @@ -50,7 +50,7 @@ func TestAccComputeRegionNetworkFirewallPolicyRule_update(t *testing.T) { }, { Config: testAccComputeRegionNetworkFirewallPolicyRule_removeConfigs(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ + ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction("google_compute_region_network_firewall_policy_rule.fw_policy_rule1", plancheck.ResourceActionUpdate), }, @@ -65,7 +65,7 @@ func TestAccComputeRegionNetworkFirewallPolicyRule_update(t *testing.T) { }, { Config: testAccComputeRegionNetworkFirewallPolicyRule_start(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ + ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction("google_compute_region_network_firewall_policy_rule.fw_policy_rule1", plancheck.ResourceActionUpdate), }, @@ -114,7 +114,7 @@ func TestAccComputeRegionNetworkFirewallPolicyRule_multipleRules(t *testing.T) { }, { Config: testAccComputeRegionNetworkFirewallPolicyRule_multipleAdd(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ + ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction("google_compute_region_network_firewall_policy_rule.fw_policy_rule1", plancheck.ResourceActionUpdate), }, @@ -129,7 +129,7 @@ func TestAccComputeRegionNetworkFirewallPolicyRule_multipleRules(t *testing.T) { }, { Config: testAccComputeRegionNetworkFirewallPolicyRule_multipleRemove(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ + ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction("google_compute_region_network_firewall_policy_rule.fw_policy_rule1", plancheck.ResourceActionUpdate), plancheck.ExpectResourceAction("google_compute_region_network_firewall_policy_rule.fw_policy_rule2", plancheck.ResourceActionDestroy), @@ -164,12 +164,12 @@ func TestAccComputeRegionNetworkFirewallPolicyRule_secureTags(t *testing.T) { ResourceName: "google_compute_region_network_firewall_policy_rule.primary", ImportState: true, ImportStateVerify: true, - // Referencing using ID causes import to fail + // Referencing using ID causes import to fail ImportStateVerifyIgnore: []string{"firewall_policy", "project"}, }, { Config: testAccComputeRegionNetworkFirewallPolicyRule_secureTagsUpdate(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ + ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction("google_compute_region_network_firewall_policy_rule.primary", plancheck.ResourceActionUpdate), }, @@ -179,7 +179,7 @@ func TestAccComputeRegionNetworkFirewallPolicyRule_secureTags(t *testing.T) { ResourceName: "google_compute_region_network_firewall_policy_rule.primary", ImportState: true, ImportStateVerify: true, - // Referencing using ID causes import to fail + // Referencing using ID causes import to fail ImportStateVerifyIgnore: []string{"firewall_policy", "project"}, }, }, diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_network_firewall_policy_with_rules_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_region_network_firewall_policy_with_rules_test.go.tmpl similarity index 94% rename from mmv1/third_party/terraform/services/compute/resource_compute_region_network_firewall_policy_with_rules_test.go rename to mmv1/third_party/terraform/services/compute/resource_compute_region_network_firewall_policy_with_rules_test.go.tmpl index e6a8b8702e96..153fd91ee8d5 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_network_firewall_policy_with_rules_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_network_firewall_policy_with_rules_test.go.tmpl @@ -1,5 +1,5 @@ package compute_test - +{{- if ne $.TargetVersionName "ga" }} import ( "testing" @@ -17,7 +17,7 @@ func TestAccComputeRegionNetworkFirewallPolicyWithRules_update(t *testing.T) { acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), CheckDestroy: testAccCheckComputeRegionNetworkFirewallPolicyWithRulesDestroyProducer(t), Steps: []resource.TestStep{ { @@ -45,12 +45,14 @@ func TestAccComputeRegionNetworkFirewallPolicyWithRules_update(t *testing.T) { func testAccComputeRegionNetworkFirewallPolicyWithRules_full(context map[string]interface{}) string { return acctest.Nprintf(` data "google_project" "project" { + provider = google-beta } resource "google_compute_region_network_firewall_policy_with_rules" "region-network-firewall-policy-with-rules" { name = "tf-test-tf-region-fw-policy-with-rules%{random_suffix}" region = "us-west2" description = "Terraform test" + provider = google-beta rule { description = "tcp rule" @@ -98,6 +100,7 @@ resource "google_compute_region_network_firewall_policy_with_rules" "region-netw } resource "google_network_security_address_group" "address_group_1" { + provider = google-beta name = "tf-test-tf-address-group%{random_suffix}" parent = "projects/${data.google_project.project.name}" description = "Regional address group" @@ -108,6 +111,7 @@ resource "google_network_security_address_group" "address_group_1" { } resource "google_tags_tag_key" "secure_tag_key_1" { + provider = google-beta description = "Tag key" parent = "projects/${data.google_project.project.name}" purpose = "GCE_FIREWALL" @@ -118,6 +122,7 @@ resource "google_tags_tag_key" "secure_tag_key_1" { } resource "google_tags_tag_value" "secure_tag_value_1" { + provider = google-beta description = "Tag value" parent = google_tags_tag_key.secure_tag_key_1.id short_name = "tf-test-tf-tag-value%{random_suffix}" @@ -128,12 +133,14 @@ resource "google_tags_tag_value" "secure_tag_value_1" { func testAccComputeRegionNetworkFirewallPolicyWithRules_update(context map[string]interface{}) string { return acctest.Nprintf(` data "google_project" "project" { + provider = google-beta } resource "google_compute_region_network_firewall_policy_with_rules" "region-network-firewall-policy-with-rules" { name = "tf-test-tf-fw-policy-with-rules%{random_suffix}" description = "Terraform test - update" region = "us-west2" + provider = google-beta rule { description = "tcp rule - changed" @@ -173,6 +180,7 @@ resource "google_compute_region_network_firewall_policy_with_rules" "region-netw } resource "google_network_security_address_group" "address_group_1" { + provider = google-beta name = "tf-test-tf-address-group%{random_suffix}" parent = "projects/${data.google_project.project.name}" description = "Regional address group" @@ -183,6 +191,7 @@ resource "google_network_security_address_group" "address_group_1" { } resource "google_tags_tag_key" "secure_tag_key_1" { + provider = google-beta description = "Tag key" parent = "projects/${data.google_project.project.name}" purpose = "GCE_FIREWALL" @@ -193,9 +202,11 @@ resource "google_tags_tag_key" "secure_tag_key_1" { } resource "google_tags_tag_value" "secure_tag_value_1" { + provider = google-beta description = "Tag value" parent = google_tags_tag_key.secure_tag_key_1.id short_name = "tf-test-tf-tag-value%{random_suffix}" } `, context) } +{{- end }} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_per_instance_config_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_region_per_instance_config_test.go.tmpl similarity index 99% rename from mmv1/third_party/terraform/services/compute/resource_compute_region_per_instance_config_test.go rename to mmv1/third_party/terraform/services/compute/resource_compute_region_per_instance_config_test.go.tmpl index 2a98a49462f2..7de4e9725b1d 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_per_instance_config_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_per_instance_config_test.go.tmpl @@ -2,9 +2,9 @@ package compute_test import ( "fmt" + "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" - "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/terraform" @@ -407,6 +407,7 @@ resource "google_compute_region_instance_group_manager" "rigm" { `, context) } + func testAccComputeRegionPerInstanceConfig_removeInstanceOnDestroyBefore(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_compute_network" "default" { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_security_policy_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_security_policy_test.go.tmpl index 32f060468122..5c8e00f02234 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_security_policy_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_security_policy_test.go.tmpl @@ -722,178 +722,6 @@ func testAccComputeRegionSecurityPolicy_withMultipleEnforceOnKeyConfigs_update(c `, context) } -func testAccComputeRegionSecurityPolicy_withMultipleEnforceOnKeyConfigs_ja4(context map[string]interface{}) string { - return acctest.Nprintf(` - resource "google_compute_region_security_policy" "policy" { - name = "tf-test%{random_suffix}" - type = "CLOUD_ARMOR" - region = "us-west2" - - rules { - priority = "100" - action = "throttle" - rate_limit_options { - conform_action = "allow" - exceed_action = "deny(429)" - - rate_limit_threshold { - count = 10 - interval_sec = 60 - } - - enforce_on_key_configs { - enforce_on_key_type = "USER_IP" - } - - enforce_on_key_configs { - enforce_on_key_type = "TLS_JA4_FINGERPRINT" - } - - enforce_on_key_configs { - enforce_on_key_type = "REGION_CODE" - } - } - match { - config { - src_ip_ranges = [ - "*" - ] - } - versioned_expr = "SRC_IPS_V1" - } - } - - rules { - action = "allow" - priority = "2147483647" - preview = false - match { - versioned_expr = "SRC_IPS_V1" - config { - src_ip_ranges = ["*"] - } - } - description = "default rule" - } - } - `, context) -} - -func TestAccComputeRegionSecurityPolicy_regionSecurityPolicyRuleOrderingWithMultipleRules(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeRegionSecurityPolicyDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeRegionSecurityPolicy_ruleOrderingWithMultipleRules_create(context), - }, - { - ResourceName: "google_compute_region_security_policy.policy", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccComputeRegionSecurityPolicy_ruleOrderingWithMultipleRules_update(context), - }, - { - ResourceName: "google_compute_region_security_policy.policy", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - - -func testAccComputeRegionSecurityPolicy_ruleOrderingWithMultipleRules_create(context map[string]interface{}) string { - return acctest.Nprintf(` - -resource "google_compute_region_security_policy" "policy" { - name = "tf-test-ordering%{random_suffix}" - description = "basic region security policy with multiple rules" - type = "CLOUD_ARMOR" - region = "us-central1" - - rules { - action = "deny" - priority = "3000" - match { - expr { - expression = "request.path.matches(\"/login.html\") && token.recaptcha_session.score < 0.2" - } - } - } - - rules { - action = "deny" - priority = "2147483647" - match { - versioned_expr = "SRC_IPS_V1" - config { - src_ip_ranges = ["*"] - } - } - description = "default rule" - } -} - - `, context) -} - - -func testAccComputeRegionSecurityPolicy_ruleOrderingWithMultipleRules_update(context map[string]interface{}) string { - return acctest.Nprintf(` - -resource "google_compute_region_security_policy" "policy" { - name = "tf-test-ordering%{random_suffix}" - description = "basic region security policy with multiple rules, updated" - type = "CLOUD_ARMOR" - region = "us-central1" - - rules { - action = "allow" - priority = "4000" - match { - expr { - expression = "request.path.matches(\"/login.html\") && token.recaptcha_session.score < 0.2" - } - } - } - - rules { - action = "allow" - priority = "5000" - match { - expr { - expression = "request.path.matches(\"/404.html\") && token.recaptcha_session.score > 0.4" - } - } - description = "new rule" - } - - rules { - action = "deny" - priority = "2147483647" - match { - versioned_expr = "SRC_IPS_V1" - config { - src_ip_ranges = ["*"] - } - } - description = "default rule" - } -} - `, context) -} - - {{- if ne $.TargetVersionName "ga" }} func TestAccComputeRegionSecurityPolicy_regionSecurityPolicyWithRulesNetworkMatch(t *testing.T) { t.Parallel() @@ -1115,76 +943,4 @@ func testAccComputeRegionSecurityPolicy_withNetworkMatch_update(context map[stri } `, context) } - -func TestAccComputeRegionSecurityPolicy_withAdvancedOptions(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeRegionSecurityPolicyDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeRegionSecurityPolicy_withAdvancedOptions(context), - }, - { - ResourceName: "google_compute_region_security_policy.policy", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccComputeRegionSecurityPolicy_withAdvancedOptionsUpdate(context), - }, - { - ResourceName: "google_compute_region_security_policy.policy", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func testAccComputeRegionSecurityPolicy_withAdvancedOptions(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_compute_region_security_policy" "policy" { - name = "tf-test%{random_suffix}" - description = "basic region security policy" - type = "CLOUD_ARMOR" - - advanced_options_config { - json_parsing = "STANDARD_WITH_GRAPHQL" - json_custom_config { - content_types = ["application/json"] - } - log_level = "VERBOSE" - user_ip_request_headers = ["x-forwarded-for"] - request_body_inspection_size = "8KB" - } -} -`, context) -} - -func testAccComputeRegionSecurityPolicy_withAdvancedOptionsUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_compute_region_security_policy" "policy" { - name = "tf-test%{random_suffix}" - description = "basic region security policy" - type = "CLOUD_ARMOR" - - advanced_options_config { - json_parsing = "STANDARD" - json_custom_config { - content_types = ["text/json"] - } - log_level = "NORMAL" - user_ip_request_headers = ["x-real-ip"] - request_body_inspection_size = "16KB" - } -} -`, context) -} {{- end }} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_target_http_proxy_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_region_target_http_proxy_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/compute/resource_compute_region_target_http_proxy_test.go rename to mmv1/third_party/terraform/services/compute/resource_compute_region_target_http_proxy_test.go.tmpl index e2b982856093..ff6cd381af7f 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_target_http_proxy_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_target_http_proxy_test.go.tmpl @@ -2,8 +2,8 @@ package compute_test import ( "fmt" - "github.com/hashicorp/terraform-provider-google/google/acctest" "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_target_tcp_proxy_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_region_target_tcp_proxy_test.go.tmpl similarity index 99% rename from mmv1/third_party/terraform/services/compute/resource_compute_region_target_tcp_proxy_test.go rename to mmv1/third_party/terraform/services/compute/resource_compute_region_target_tcp_proxy_test.go.tmpl index 2d34dbed13a5..7964c51e6b68 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_target_tcp_proxy_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_target_tcp_proxy_test.go.tmpl @@ -2,8 +2,8 @@ package compute_test import ( "fmt" - "github.com/hashicorp/terraform-provider-google/google/acctest" "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/terraform" @@ -152,4 +152,4 @@ resource "google_compute_region_health_check" "zero" { region = "us-central1" } `, target, backend, backend, hc) -} +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_url_map_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_region_url_map_test.go.tmpl similarity index 99% rename from mmv1/third_party/terraform/services/compute/resource_compute_region_url_map_test.go rename to mmv1/third_party/terraform/services/compute/resource_compute_region_url_map_test.go.tmpl index 2602db0de647..7a3bef847c29 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_url_map_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_url_map_test.go.tmpl @@ -2,8 +2,8 @@ package compute_test import ( "fmt" - "github.com/hashicorp/terraform-provider-google/google/acctest" "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) @@ -1204,4 +1204,4 @@ resource "google_compute_region_backend_service" "home" { timeout_sec = 10 } `, randomSuffix, randomSuffix, randomSuffix) -} +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_reservation_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_reservation_test.go index e7a98248d138..6b72e8d4417d 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_reservation_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_reservation_test.go @@ -2,9 +2,7 @@ package compute_test import ( "fmt" - "regexp" "testing" - "time" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" @@ -40,65 +38,6 @@ func TestAccComputeReservation_update(t *testing.T) { }) } -func TestAccComputeReservation_deleteAtTime(t *testing.T) { - acctest.SkipIfVcr(t) // timestamp - t.Parallel() - - reservationName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - deleteTime := time.Now().UTC().Add(24 * time.Hour) // Set delete_at_time to 24 hours in the future - deleteAtTimeRFC3339 := deleteTime.Format(time.RFC3339) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeReservationDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeReservation_deleteAtTime_deleteAfterDuration(reservationName, deleteAtTimeRFC3339, deleteTime.Unix()), - ExpectError: regexp.MustCompile("Conflicting configuration arguments"), - }, - { - Config: testAccComputeReservation_deleteAtTime(reservationName, deleteAtTimeRFC3339), - }, - { - ResourceName: "google_compute_reservation.reservation", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func TestAccComputeReservation_deleteAfterDuration(t *testing.T) { - acctest.SkipIfVcr(t) // timestamp - t.Parallel() - - reservationName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - deleteTime := time.Now().UTC().Add(24 * time.Hour) // Set delete_at_time to 24 hours in the future - deleteAtTimeRFC3339 := deleteTime.Format(time.RFC3339) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeReservationDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeReservation_deleteAtTime_deleteAfterDuration(reservationName, deleteAtTimeRFC3339, deleteTime.Unix()), - ExpectError: regexp.MustCompile("Conflicting configuration arguments"), - }, - { - Config: testAccComputeReservation_deleteAfterDuration(reservationName, deleteTime.Unix()), - }, - { - ResourceName: "google_compute_reservation.reservation", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"delete_after_duration"}, - }, - }, - }) -} - func testAccComputeReservation_basic(reservationName, count string) string { return fmt.Sprintf(` resource "google_compute_reservation" "reservation" { @@ -115,62 +54,3 @@ resource "google_compute_reservation" "reservation" { } `, reservationName, count) } - -func testAccComputeReservation_deleteAtTime(reservationName, time string) string { - return fmt.Sprintf(` -resource "google_compute_reservation" "reservation" { - name = "%s" - zone = "us-central1-a" - delete_at_time = "%s" - - specific_reservation { - count = 2 - instance_properties { - min_cpu_platform = "Intel Cascade Lake" - machine_type = "n2-standard-2" - } - } -} -`, reservationName, time) -} - -func testAccComputeReservation_deleteAfterDuration(reservationName string, duration int64) string { - return fmt.Sprintf(` -resource "google_compute_reservation" "reservation" { - name = "%s" - zone = "us-central1-a" - delete_after_duration { - seconds = %d - } - - specific_reservation { - count = 2 - instance_properties { - min_cpu_platform = "Intel Cascade Lake" - machine_type = "n2-standard-2" - } - } -} -`, reservationName, duration) -} - -func testAccComputeReservation_deleteAtTime_deleteAfterDuration(reservationName, time string, duration int64) string { - return fmt.Sprintf(` -resource "google_compute_reservation" "reservation" { - name = "%s" - zone = "us-central1-a" - delete_at_time = "%s" - delete_after_duration { - seconds = %d - } - - specific_reservation { - count = 2 - instance_properties { - min_cpu_platform = "Intel Cascade Lake" - machine_type = "n2-standard-2" - } - } -} -`, reservationName, time, duration) -} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_route_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_route_test.go index 7edb2cf77fd1..2d69d740b63f 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_route_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_route_test.go @@ -6,7 +6,6 @@ import ( "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" ) func TestAccComputeRoute_defaultInternetGateway(t *testing.T) { @@ -50,55 +49,6 @@ func TestAccComputeRoute_hopInstance(t *testing.T) { }) } -func TestAccComputeRoute_resourceManagerTags(t *testing.T) { - - org := envvar.GetTestOrgFromEnv(t) - - routeName := fmt.Sprintf("tf-test-route-resource-manager-tags-%s", acctest.RandString(t, 10)) - tagKeyResult := acctest.BootstrapSharedTestTagKeyDetails(t, "crm-nroute-tagkey", "organizations/"+org, make(map[string]interface{})) - sharedTagkey, _ := tagKeyResult["shared_tag_key"] - tagValueResult := acctest.BootstrapSharedTestTagValueDetails(t, "crm-route-tagvalue", sharedTagkey, org) - context := map[string]interface{}{ - "route_name": routeName, - "tag_key_id": tagKeyResult["name"], - "tag_value_id": tagValueResult["name"], - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeRouteDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeRoute_resourceManagerTags(context), - }, - { - ResourceName: "google_compute_route.acc_route_with_resource_manager_tags", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"params"}, // we don't read tags back. The whole params block is input only - }, - }, - }) -} - -func testAccComputeRoute_resourceManagerTags(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_compute_route" "acc_route_with_resource_manager_tags" { - name = "%{route_name}" - dest_range = "0.0.0.0/0" - network = "default" - next_hop_gateway = "default-internet-gateway" - priority = 100 - params { - resource_manager_tags = { - "%{tag_key_id}" = "%{tag_value_id}" - } - } -} -`, context) -} - func testAccComputeRoute_defaultInternetGateway(suffix string) string { return fmt.Sprintf(` resource "google_compute_route" "foobar" { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_router_nat_address_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_router_nat_address_test.go.tmpl similarity index 99% rename from mmv1/third_party/terraform/services/compute/resource_compute_router_nat_address_test.go rename to mmv1/third_party/terraform/services/compute/resource_compute_router_nat_address_test.go.tmpl index f42099418d6d..de2498f991f8 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_router_nat_address_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_router_nat_address_test.go.tmpl @@ -113,7 +113,7 @@ func TestAccComputeRouterNatAddress_withAddressRemoved(t *testing.T) { ExternalProviders: map[string]resource.ExternalProvider{ "random": {}, }, - CheckDestroy: testAccCheckComputeRouterNatAddressDestroyProducer(t), + CheckDestroy: testAccCheckComputeRouterNatAddressDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeRouterNatAddressWithNatIps(routerName), diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_router_nat_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_router_nat_test.go.tmpl similarity index 79% rename from mmv1/third_party/terraform/services/compute/resource_compute_router_nat_test.go rename to mmv1/third_party/terraform/services/compute/resource_compute_router_nat_test.go.tmpl index bc28459922b2..dc4ae4f9a2be 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_router_nat_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_router_nat_test.go.tmpl @@ -31,31 +31,27 @@ func TestAccComputeRouterNat_basic(t *testing.T) { }, { // implicitly full ImportStateId - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, }, { - ResourceName: "google_compute_router_nat.foobar", - ImportStateId: fmt.Sprintf("%s/%s/%s/%s", project, region, routerName, routerName), - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: "google_compute_router_nat.foobar", + ImportStateId: fmt.Sprintf("%s/%s/%s/%s", project, region, routerName, routerName), + ImportState: true, + ImportStateVerify: true, }, { - ResourceName: "google_compute_router_nat.foobar", - ImportStateId: fmt.Sprintf("%s/%s/%s", region, routerName, routerName), - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: "google_compute_router_nat.foobar", + ImportStateId: fmt.Sprintf("%s/%s/%s", region, routerName, routerName), + ImportState: true, + ImportStateVerify: true, }, { - ResourceName: "google_compute_router_nat.foobar", - ImportStateId: fmt.Sprintf("%s/%s", routerName, routerName), - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: "google_compute_router_nat.foobar", + ImportStateId: fmt.Sprintf("%s/%s", routerName, routerName), + ImportState: true, + ImportStateVerify: true, }, { Config: testAccComputeRouterNatKeepRouter(routerName), @@ -81,10 +77,9 @@ func TestAccComputeRouterNat_update(t *testing.T) { Config: testAccComputeRouterNatBasicBeforeUpdate(routerName), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, }, { Config: testAccComputeRouterNatUpdated(routerName), @@ -95,10 +90,9 @@ func TestAccComputeRouterNat_update(t *testing.T) { }, }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, }, { Config: testAccComputeRouterNatUpdateToNatIPsId(routerName), @@ -109,10 +103,9 @@ func TestAccComputeRouterNat_update(t *testing.T) { }, }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, }, { Config: testAccComputeRouterNatUpdateToNatIPsName(routerName), @@ -123,10 +116,9 @@ func TestAccComputeRouterNat_update(t *testing.T) { }, }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, }, { Config: testAccComputeRouterNatBasicBeforeUpdate(routerName), @@ -137,10 +129,9 @@ func TestAccComputeRouterNat_update(t *testing.T) { }, }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, }, }, }) @@ -161,10 +152,9 @@ func TestAccComputeRouterNat_withManualIpAndSubnetConfiguration(t *testing.T) { Config: testAccComputeRouterNatWithManualIpAndSubnetConfiguration(routerName), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, }, }, }) @@ -181,59 +171,53 @@ func TestAccComputeRouterNat_withPortAllocationMethods(t *testing.T) { ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccCheckComputeRouterNatDestroyProducer(t), Steps: []resource.TestStep{ - { + { Config: testAccComputeRouterNatWithAllocationMethod(routerName, false, true), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, }, { Config: testAccComputeRouterNatWithAllocationMethod(routerName, true, false), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, }, { Config: testAccComputeRouterNatWithAllocationMethod(routerName, false, false), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, }, { Config: testAccComputeRouterNatWithAllocationMethod(routerName, true, false), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, }, { Config: testAccComputeRouterNatWithAllocationMethod(routerName, false, true), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, }, { Config: testAccComputeRouterNatWithAllocationMethodWithParameters(routerName, false, true, 256, 8192), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, }, }, }) @@ -260,10 +244,9 @@ func TestAccComputeRouterNat_withNatIpsAndDrainNatIps(t *testing.T) { Config: testAccComputeRouterNatWithNatIps(routerName), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, }, // (ERROR) - Should not allow draining IPs still in natIps { @@ -275,10 +258,9 @@ func TestAccComputeRouterNat_withNatIpsAndDrainNatIps(t *testing.T) { Config: testAccComputeRouterNatWithOneDrainOneRemovedNatIps(routerName), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, }, // (ERROR): Should not be able to drain previously removed natIps (#1) { @@ -289,6 +271,7 @@ func TestAccComputeRouterNat_withNatIpsAndDrainNatIps(t *testing.T) { }) } + func TestAccComputeRouterNat_withNatRules(t *testing.T) { t.Parallel() @@ -308,118 +291,105 @@ func TestAccComputeRouterNat_withNatRules(t *testing.T) { Config: testAccComputeRouterNatRulesBasic_omitRules(routerName), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, }, { Config: testAccComputeRouterNatRulesBasic(routerName, 0, ruleDescription, match), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, }, { Config: testAccComputeRouterNatRulesBasic(routerName, 65000, ruleDescription, match), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, }, { Config: testAccComputeRouterNatRulesBasic(routerName, 100, ruleDescription, match), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, }, { Config: testAccComputeRouterNatRulesBasic(routerName, 100, ruleDescriptionUpdate, match), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, }, { Config: testAccComputeRouterNatRulesBasic(routerName, 100, ruleDescriptionUpdate, matchUpdate), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, }, { Config: testAccComputeRouterNatRulesWithSourceActiveAndDrainIps(routerName, 100, ruleDescriptionUpdate, matchUpdate), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, }, { Config: testAccComputeRouterNatRulesWithDrainIps(routerName, 100, ruleDescriptionUpdate, matchUpdate), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, }, { Config: testAccComputeRouterNatMultiRules(routerName), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, }, { Config: testAccComputeRouterNatRulesBasic_omitAction(routerName, 100, ruleDescriptionUpdate, matchUpdate), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, }, { Config: testAccComputeRouterNatRulesBasic_omitDescription(routerName, 100, matchUpdate), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, }, { Config: testAccComputeRouterNatMultiRulesWithIpId(routerName), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, }, { Config: testAccComputeRouterNatRulesBasic_omitRules(routerName), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, }, }, }) @@ -444,10 +414,9 @@ func TestAccComputeRouterNat_withEndpointTypes(t *testing.T) { ), }, { - ResourceName: testResourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: testResourceName, + ImportState: true, + ImportStateVerify: true, }, { Config: testAccComputeRouterNatUpdateEndpointType(routerName, "ENDPOINT_TYPE_SWG"), @@ -456,10 +425,9 @@ func TestAccComputeRouterNat_withEndpointTypes(t *testing.T) { ), }, { - ResourceName: testResourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: testResourceName, + ImportState: true, + ImportStateVerify: true, }, { Config: testAccComputeRouterNatUpdateEndpointType(routerName, "ENDPOINT_TYPE_VM"), @@ -468,10 +436,9 @@ func TestAccComputeRouterNat_withEndpointTypes(t *testing.T) { ), }, { - ResourceName: testResourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: testResourceName, + ImportState: true, + ImportStateVerify: true, }, { Config: testAccComputeRouterNatUpdateEndpointType(routerName, "ENDPOINT_TYPE_MANAGED_PROXY_LB"), @@ -480,10 +447,9 @@ func TestAccComputeRouterNat_withEndpointTypes(t *testing.T) { ), }, { - ResourceName: testResourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + ResourceName: testResourceName, + ImportState: true, + ImportStateVerify: true, }, }, }) @@ -502,68 +468,13 @@ func TestAccComputeRouterNat_AutoNetworkTier(t *testing.T) { CheckDestroy: testAccCheckComputeRouterNatDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccComputeRouterNatWitAutoNetworkTier(routerName, hubName), - }, - { - // implicitly full ImportStateId - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, - }, - }, - }) -} - -func TestAccComputeRouterNat_withPrivateNatNetworkTierStandard(t *testing.T) { - t.Parallel() - - project := envvar.GetTestProjectFromEnv() - region := envvar.GetTestRegionFromEnv() - - testId := acctest.RandString(t, 10) - routerName := fmt.Sprintf("tf-test-router-nat-%s", testId) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeRouterNatDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeRouterNatPrivateTypeNetworkTierStandard(routerName), - }, - { - // implicitly full ImportStateId - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, - }, - { - ResourceName: "google_compute_router_nat.foobar", - ImportStateId: fmt.Sprintf("%s/%s/%s/%s", project, region, routerName, routerName), - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, + Config: testAccComputeRouterNatWitAutoNetworkTier(routerName, hubName), }, { - ResourceName: "google_compute_router_nat.foobar", - ImportStateId: fmt.Sprintf("%s/%s/%s", region, routerName, routerName), - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, - }, - { - ResourceName: "google_compute_router_nat.foobar", - ImportStateId: fmt.Sprintf("%s/%s", routerName, routerName), - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"auto_network_tier"}, - }, - { - Config: testAccComputeRouterNatKeepRouter(routerName), - Check: testAccCheckComputeRouterNatDelete( - t, "google_compute_router_nat.foobar"), + // implicitly full ImportStateId + ResourceName: "google_compute_router_nat.foobar", + ImportState: true, + ImportStateVerify: true, }, }, }) @@ -887,38 +798,6 @@ func testAccCheckComputeRouterNatDelete(t *testing.T, n string) resource.TestChe } } -func TestAccComputeRouterNat_withNat64Configuration(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeRouterNatDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeRouterNatWithNat64Configuration(context), - }, - { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccComputeRouterNatWithNat64ConfigurationUpdate(context), - }, - { - ResourceName: "google_compute_router_nat.foobar", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - func testAccComputeRouterNatBasic(routerName string) string { return fmt.Sprintf(` resource "google_compute_network" "foobar" { @@ -2169,176 +2048,3 @@ resource "google_compute_router_nat" "foobar" { } `, testAccComputeRouterNatBaseResourcesWithPrivateNatSubnetworks(routerName, hubName), routerName) } - -func testAccComputeRouterNatPrivateTypeNetworkTierStandard(routerName string) string { - return fmt.Sprintf(` -resource "google_compute_network" "foobar" { - name = "%s-net" - auto_create_subnetworks = false -} - -resource "google_compute_subnetwork" "foobar" { - name = "%s-subnet" - network = google_compute_network.foobar.self_link - ip_cidr_range = "10.0.0.0/16" - region = "us-central1" - purpose = "PRIVATE_NAT" -} - -resource "google_compute_router" "foobar" { - name = "%s" - region = google_compute_subnetwork.foobar.region - network = google_compute_network.foobar.self_link -} - -resource "google_compute_router_nat" "foobar" { - name = "%s" - router = google_compute_router.foobar.name - region = google_compute_router.foobar.region - source_subnetwork_ip_ranges_to_nat = "LIST_OF_SUBNETWORKS" - type = "PRIVATE" - enable_dynamic_port_allocation = false - enable_endpoint_independent_mapping = false - auto_network_tier = "STANDARD" - min_ports_per_vm = 32 - - subnetwork { - name = google_compute_subnetwork.foobar.id - source_ip_ranges_to_nat = ["ALL_IP_RANGES"] - } -} -`, routerName, routerName, routerName, routerName) -} - -func testAccComputeRouterNatWithNat64Configuration(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_dns_policy" "foobar" { - name = "tf-test-example-policy%{random_suffix}" - enable_inbound_forwarding = false - enable_logging = false - - dns64_config { - scope { - all_queries = true - } - } - networks { - network_url = google_compute_network.foobar.id - } -} - -resource "google_compute_network" "foobar" { - name = "tf-test-network%{random_suffix}" - enable_ula_internal_ipv6 = true - auto_create_subnetworks = false -} - -resource "google_compute_subnetwork" "foobar" { - name = "tf-test-subnetwork-%{random_suffix}" - network = google_compute_network.foobar.self_link - ip_cidr_range = "10.0.0.0/16" - region = "us-central1" -} - -resource "google_compute_subnetwork" "foobar2" { - name = "tf-test-subnetwork-2-%{random_suffix}" - network = google_compute_network.foobar.self_link - ip_cidr_range = "10.182.0.0/20" - ipv6_access_type = "EXTERNAL" - stack_type = "IPV4_IPV6" - region = "us-central1" -} - -resource "google_compute_router" "foobar" { - name = "tf-test-router%{random_suffix}" - region = google_compute_subnetwork.foobar.region - network = google_compute_network.foobar.self_link - bgp { - asn = 64514 - } -} - -resource "google_compute_router_nat" "foobar" { - name = "tf-test-router-nat%{random_suffix}" - router = google_compute_router.foobar.name - region = google_compute_router.foobar.region - nat_ip_allocate_option = "AUTO_ONLY" - - source_subnetwork_ip_ranges_to_nat = "LIST_OF_SUBNETWORKS" - subnetwork { - name = google_compute_subnetwork.foobar.name - source_ip_ranges_to_nat = ["ALL_IP_RANGES"] - } - - source_subnetwork_ip_ranges_to_nat64 = "ALL_IPV6_SUBNETWORKS" -} -`, context) -} - -func testAccComputeRouterNatWithNat64ConfigurationUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_dns_policy" "foobar" { - name = "tf-test-example-policy%{random_suffix}" - enable_inbound_forwarding = false - enable_logging = false - - dns64_config { - scope { - all_queries = true - } - } - networks { - network_url = google_compute_network.foobar.id - } -} - -resource "google_compute_network" "foobar" { - name = "tf-test-network%{random_suffix}" - enable_ula_internal_ipv6 = true - auto_create_subnetworks = false -} - -resource "google_compute_subnetwork" "foobar" { - name = "tf-test-subnetwork-%{random_suffix}" - network = google_compute_network.foobar.self_link - ip_cidr_range = "10.0.0.0/16" - region = "us-central1" -} - -resource "google_compute_subnetwork" "foobar2" { - name = "tf-test-subnetwork-2-%{random_suffix}" - network = google_compute_network.foobar.self_link - ip_cidr_range = "10.182.0.0/20" - ipv6_access_type = "EXTERNAL" - stack_type = "IPV4_IPV6" - region = "us-central1" -} - -resource "google_compute_router" "foobar" { - name = "tf-test-router%{random_suffix}" - region = google_compute_subnetwork.foobar.region - network = google_compute_network.foobar.self_link - bgp { - asn = 64514 - } -} - -resource "google_compute_router_nat" "foobar" { - name = "tf-test-router-nat%{random_suffix}" - router = google_compute_router.foobar.name - region = google_compute_router.foobar.region - nat_ip_allocate_option = "AUTO_ONLY" - - source_subnetwork_ip_ranges_to_nat = "LIST_OF_SUBNETWORKS" - subnetwork { - name = google_compute_subnetwork.foobar.name - source_ip_ranges_to_nat = ["ALL_IP_RANGES"] - } - - source_subnetwork_ip_ranges_to_nat64 = "LIST_OF_IPV6_SUBNETWORKS" - nat64_subnetwork { - name = google_compute_subnetwork.foobar2.name - } -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_router_peer.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_router_peer.go.tmpl index 197639163cd3..c5755139504b 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_router_peer.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_router_peer.go.tmpl @@ -112,14 +112,28 @@ Leave this field blank to advertise no custom groups.`, }, }, "advertised_ip_ranges": { - Type: schema.TypeSet, + Type: schema.TypeList, Optional: true, Description: `User-specified list of individual IP ranges to advertise in custom mode. This field can only be populated if advertiseMode is 'CUSTOM' and is advertised to all peers of the router. These IP ranges will be advertised in addition to any specified groups. Leave this field blank to advertise no custom IP ranges.`, - Elem: computeRouterBgpPeerAdvertisedIpRangesSchema(), + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "range": { + Type: schema.TypeString, + Required: true, + Description: `The IP range to advertise. The value must be a +CIDR-formatted string.`, + }, + "description": { + Type: schema.TypeString, + Optional: true, + Description: `User-specified description for the IP range.`, + }, + }, + }, }, "advertised_route_priority": { Type: schema.TypeInt, @@ -376,24 +390,6 @@ Must be unique within a router. Must be referenced by exactly one bgpPeer. Must } } -func computeRouterBgpPeerAdvertisedIpRangesSchema() *schema.Resource { - return &schema.Resource{ - Schema: map[string]*schema.Schema{ - "range": { - Type: schema.TypeString, - Required: true, - Description: `The IP range to advertise. The value must be a -CIDR-formatted string.`, - }, - "description": { - Type: schema.TypeString, - Optional: true, - Description: `User-specified description for the IP range.`, - }, - }, - } -} - func resourceComputeRouterBgpPeerCreate(d *schema.ResourceData, meta interface{}) error { config := meta.(*transport_tpg.Config) userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) @@ -1154,14 +1150,14 @@ func flattenNestedComputeRouterBgpPeerAdvertisedIpRanges(v interface{}, d *schem return v } l := v.([]interface{}) - transformed := schema.NewSet(schema.HashResource(computeRouterBgpPeerAdvertisedIpRangesSchema()), []interface{}{}) + transformed := make([]interface{}, 0, len(l)) for _, raw := range l { original := raw.(map[string]interface{}) if len(original) < 1 { // Do not include empty json objects coming back from the api continue } - transformed.Add(map[string]interface{}{ + transformed = append(transformed, map[string]interface{}{ "range": flattenNestedComputeRouterBgpPeerAdvertisedIpRangesRange(original["range"], d, config), "description": flattenNestedComputeRouterBgpPeerAdvertisedIpRangesDescription(original["description"], d, config), }) @@ -1392,7 +1388,6 @@ func expandNestedComputeRouterBgpPeerAdvertisedGroups(v interface{}, d tpgresour } func expandNestedComputeRouterBgpPeerAdvertisedIpRanges(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - v = v.(*schema.Set).List() l := v.([]interface{}) req := make([]interface{}, 0, len(l)) for _, raw := range l { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_router_route_policy_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_router_route_policy_test.go.tmpl similarity index 99% rename from mmv1/third_party/terraform/services/compute/resource_compute_router_route_policy_test.go rename to mmv1/third_party/terraform/services/compute/resource_compute_router_route_policy_test.go.tmpl index 104bab62a947..f4ac5f418e9d 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_router_route_policy_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_router_route_policy_test.go.tmpl @@ -68,4 +68,4 @@ resource "google_compute_router_route_policy" "route_policy" { } } `, routerName, routePolicyName) -} +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_router_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_router_test.go.tmpl index af98e66adbae..d02cab34f229 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_router_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_router_test.go.tmpl @@ -91,9 +91,9 @@ func TestAccComputeRouter_advertisedIpRangesOrder(t *testing.T) { Config: testAccComputeRouterAdvertisedIpRangesOrder(routerName), }, { - ResourceName: "google_compute_router.foobar", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_router.foobar", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"bgp.0.advertised_ip_ranges.0.range", "bgp.0.advertised_ip_ranges.1.range"}, }, }, @@ -211,46 +211,6 @@ func TestAccComputeRouter_addAndUpdateIdentifierRangeBgp(t *testing.T) { }) } - -{{- if ne $.TargetVersionName "ga" }} -func TestAccComputeRouter_resourceManagerTags(t *testing.T) { - t.Parallel() - org := envvar.GetTestOrgFromEnv(t) - - suffixName := acctest.RandString(t, 10) - tagKeyResult := acctest.BootstrapSharedTestTagKeyDetails(t, "crm-routers-tagkey", "organizations/"+org, make(map[string]interface{})) - sharedTagkey,_ := tagKeyResult["shared_tag_key"] - tagValueResult := acctest.BootstrapSharedTestTagValueDetails(t, "crm-routers-tagvalue", sharedTagkey, org) - routerName := fmt.Sprintf("tf-test-router-resource-manager-tags-%s", suffixName) - networkName := fmt.Sprintf("tf-test-network-resource-manager-tags-%s-net", suffixName) - subnetName := fmt.Sprintf("tf-test-subnet-resource-manager-tags-%s-subnet", suffixName) - context := map[string]interface{}{ - "network_name": networkName, - "subnet_name": subnetName, - "router_name": routerName, - "tag_key_id": tagKeyResult["name"], - "tag_value_id": tagValueResult["name"], - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), - CheckDestroy: testAccCheckComputeRouterDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeRouter_resourceManagerTags(context), - }, - { - ResourceName: "google_compute_router.foobar", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"params"}, - }, - }, - }) -} -{{- end }} - func testAccComputeRouterBasic(routerName, resourceRegion string) string { return fmt.Sprintf(` resource "google_compute_network" "foobar" { @@ -426,39 +386,4 @@ resource "google_compute_router" "foobar" { } } `, routerName, routerName) -} - - -{{- if ne $.TargetVersionName "ga" }} -func testAccComputeRouter_resourceManagerTags(context map[string]interface{}) string { - return acctest.Nprintf(` - resource "google_compute_network" "foobar" { - provider = google-beta - name = "%{network_name}" - auto_create_subnetworks = false - } - - resource "google_compute_subnetwork" "foobar" { - provider = google-beta - name = "%{subnet_name}" - network = google_compute_network.foobar.self_link - ip_cidr_range = "10.0.0.0/16" - } - - resource "google_compute_router" "foobar" { - provider = google-beta - name = "%{router_name}" - region = google_compute_subnetwork.foobar.region - network = google_compute_network.foobar.name - bgp { - asn = 4294967294 - } - params { - resource_manager_tags = { - "%{tag_key_id}" = "%{tag_value_id}" - } - } - } - `, context) -} -{{- end }} +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_security_policy.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_security_policy.go.tmpl index a91284724b3f..cbbe297d3c83 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_security_policy.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_security_policy.go.tmpl @@ -338,7 +338,7 @@ func ResourceComputeSecurityPolicy() *schema.Resource { Type: schema.TypeString, Optional: true, Description: `Determines the key to enforce the rateLimitThreshold on`, - ValidateFunc: validation.StringInSlice([]string{"ALL", "IP", "HTTP_HEADER", "XFF_IP", "HTTP_COOKIE", "HTTP_PATH", "SNI", "REGION_CODE", "TLS_JA3_FINGERPRINT", "TLS_JA4_FINGERPRINT", "USER_IP", ""}, false), + ValidateFunc: validation.StringInSlice([]string{"ALL", "IP", "HTTP_HEADER", "XFF_IP", "HTTP_COOKIE", "HTTP_PATH", "SNI", "REGION_CODE", "TLS_JA3_FINGERPRINT", "USER_IP", ""}, false), }, "enforce_on_key_name": { @@ -357,7 +357,7 @@ func ResourceComputeSecurityPolicy() *schema.Resource { Type: schema.TypeString, Optional: true, Description: `Determines the key to enforce the rate_limit_threshold on`, - ValidateFunc: validation.StringInSlice([]string{"ALL", "IP", "HTTP_HEADER", "XFF_IP", "HTTP_COOKIE", "HTTP_PATH", "SNI", "REGION_CODE", "TLS_JA3_FINGERPRINT", "TLS_JA4_FINGERPRINT", "USER_IP"}, false), + ValidateFunc: validation.StringInSlice([]string{"ALL", "IP", "HTTP_HEADER", "XFF_IP", "HTTP_COOKIE", "HTTP_PATH", "SNI", "REGION_CODE", "TLS_JA3_FINGERPRINT", "USER_IP"}, false), }, "enforce_on_key_name": { Type: schema.TypeString, @@ -535,15 +535,6 @@ func ResourceComputeSecurityPolicy() *schema.Resource { Description: `An optional list of case-insensitive request header names to use for resolving the callers client IP address.`, Elem: &schema.Schema{Type: schema.TypeString}, }, - {{- if ne $.TargetVersionName "ga" }} - "request_body_inspection_size": { - Type: schema.TypeString, - Optional: true, - Computed: true, - ValidateFunc: validation.StringInSlice([]string{"8KB", "16KB", "32KB", "48KB", "64KB"}, false), - Description: `The maximum request size chosen by the customer with Waf enabled. Values supported are "8KB", "16KB, "32KB", "48KB" and "64KB". Values are case insensitive.`, - }, - {{- end }} }, }, }, @@ -898,11 +889,7 @@ func resourceComputeSecurityPolicyUpdate(d *schema.ResourceData, meta interface{ if d.HasChange("advanced_options_config") { securityPolicy.AdvancedOptionsConfig = expandSecurityPolicyAdvancedOptionsConfig(d.Get("advanced_options_config").([]interface{})) -{{ if eq $.TargetVersionName `ga` }} securityPolicy.ForceSendFields = append(securityPolicy.ForceSendFields, "AdvancedOptionsConfig", "advancedOptionsConfig.jsonParsing", "advancedOptionsConfig.jsonCustomConfig", "advancedOptionsConfig.logLevel") -{{- else }} - securityPolicy.ForceSendFields = append(securityPolicy.ForceSendFields, "AdvancedOptionsConfig", "advancedOptionsConfig.jsonParsing", "advancedOptionsConfig.jsonCustomConfig", "advancedOptionsConfig.logLevel", "advancedOptionsConfig.requestBodyInspectionSize") -{{- end }} securityPolicy.ForceSendFields = append(securityPolicy.ForceSendFields, "advanceOptionConfig.userIpRequestHeaders") if len(securityPolicy.AdvancedOptionsConfig.UserIpRequestHeaders) == 0 { // to clean this list we must send the updateMask of this field on the request. @@ -1365,13 +1352,10 @@ func expandSecurityPolicyAdvancedOptionsConfig(configured []interface{}) *comput data := configured[0].(map[string]interface{}) return &compute.SecurityPolicyAdvancedOptionsConfig{ - JsonParsing: data["json_parsing"].(string), - JsonCustomConfig: expandSecurityPolicyAdvancedOptionsConfigJsonCustomConfig(data["json_custom_config"].([]interface{})), - LogLevel: data["log_level"].(string), - UserIpRequestHeaders: tpgresource.ConvertStringArr(data["user_ip_request_headers"].(*schema.Set).List()), - {{- if ne $.TargetVersionName "ga" }} - RequestBodyInspectionSize: data["request_body_inspection_size"].(string), - {{- end }} + JsonParsing: data["json_parsing"].(string), + JsonCustomConfig: expandSecurityPolicyAdvancedOptionsConfigJsonCustomConfig(data["json_custom_config"].([]interface{})), + LogLevel: data["log_level"].(string), + UserIpRequestHeaders: tpgresource.ConvertStringArr(data["user_ip_request_headers"].(*schema.Set).List()), } } @@ -1381,13 +1365,10 @@ func flattenSecurityPolicyAdvancedOptionsConfig(conf *compute.SecurityPolicyAdva } data := map[string]interface{}{ - "json_parsing": conf.JsonParsing, - "json_custom_config": flattenSecurityPolicyAdvancedOptionsConfigJsonCustomConfig(conf.JsonCustomConfig), - "log_level": conf.LogLevel, - "user_ip_request_headers": schema.NewSet(schema.HashString, tpgresource.ConvertStringArrToInterface(conf.UserIpRequestHeaders)), - {{- if ne $.TargetVersionName "ga" }} - "request_body_inspection_size": conf.RequestBodyInspectionSize, - {{- end }} + "json_parsing": conf.JsonParsing, + "json_custom_config": flattenSecurityPolicyAdvancedOptionsConfigJsonCustomConfig(conf.JsonCustomConfig), + "log_level": conf.LogLevel, + "user_ip_request_headers": schema.NewSet(schema.HashString, tpgresource.ConvertStringArrToInterface(conf.UserIpRequestHeaders)), } return []map[string]interface{}{data} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_security_policy_rule_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_security_policy_rule_test.go.tmpl similarity index 94% rename from mmv1/third_party/terraform/services/compute/resource_compute_security_policy_rule_test.go rename to mmv1/third_party/terraform/services/compute/resource_compute_security_policy_rule_test.go.tmpl index bed35a86c641..50cd764a47c2 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_security_policy_rule_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_security_policy_rule_test.go.tmpl @@ -1,10 +1,10 @@ package compute_test import ( - "fmt" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "regexp" + "fmt" + "regexp" "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) @@ -94,7 +94,7 @@ func TestAccComputeSecurityPolicyRule_extendedUpdate(t *testing.T) { ImportStateVerify: true, }, { - Config: testAccComputeSecurityPolicyRule_extPosUpdateSamePriority(context), + Config: testAccComputeSecurityPolicyRule_extPosUpdateSamePriority(context), ExpectError: regexp.MustCompile("Cannot have rules with the same priorities."), }, { @@ -130,9 +130,9 @@ func TestAccComputeSecurityPolicyRule_withPreconfiguredWafConfig(t *testing.T) { Config: testAccComputeSecurityPolicyRule_withPreconfiguredWafConfig_create(context), }, { - ResourceName: "google_compute_security_policy_rule.policy_rule", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_compute_security_policy_rule.policy_rule", + ImportState: true, + ImportStateVerify: true, }, { Config: testAccComputeSecurityPolicyRule_withPreconfiguredWafConfig_update(context), @@ -172,21 +172,21 @@ func TestAccComputeSecurityPolicyRule_withRateLimitOptions(t *testing.T) { { Config: testAccComputeSecurityPolicyRule_withRateLimitOptionsCreate(context), }, - { - ResourceName: "google_compute_security_policy_rule.policy_rule", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccComputeSecurityPolicyRule_withRateLimitOptionsUpdate(context), - }, - { - ResourceName: "google_compute_security_policy_rule.policy_rule", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) + { + ResourceName: "google_compute_security_policy_rule.policy_rule", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccComputeSecurityPolicyRule_withRateLimitOptionsUpdate(context), + }, + { + ResourceName: "google_compute_security_policy_rule.policy_rule", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) } func TestAccComputeSecurityPolicyRule_withRateLimit_withEnforceOnKeyConfigs(t *testing.T) { @@ -237,18 +237,11 @@ func TestAccComputeSecurityPolicyRule_withRateLimitOption_withMultipleEnforceOnK ImportState: true, ImportStateVerify: true, }, - { - Config: testAccComputeSecurityPolicyRule_withRateLimitOption_withMultipleEnforceOnKeyConfigs3(spName), - }, - { - ResourceName: "google_compute_security_policy_rule.policy_rule", - ImportState: true, - ImportStateVerify: true, - }, }, }) } + func TestAccComputeSecurityPolicyRule_EnforceOnKeyUpdates(t *testing.T) { t.Parallel() @@ -1030,54 +1023,6 @@ resource "google_compute_security_policy_rule" "policy_rule" { `, spName) } -func testAccComputeSecurityPolicyRule_withRateLimitOption_withMultipleEnforceOnKeyConfigs3(spName string) string { - return fmt.Sprintf(` -resource "google_compute_security_policy" "policy" { - name = "%s" - description = "basic policy base" -} - -resource "google_compute_security_policy_rule" "policy_rule" { - security_policy = google_compute_security_policy.policy.name - description = "throttle rule withMultipleEnforceOnKeyConfigs3" - action = "throttle" - priority = "100" - - match { - versioned_expr = "SRC_IPS_V1" - config { - src_ip_ranges = ["*"] - } - } - - rate_limit_options { - conform_action = "allow" - exceed_action = "deny(429)" - - rate_limit_threshold { - count = 10 - interval_sec = 60 - } - - enforce_on_key = "" - - enforce_on_key_configs { - enforce_on_key_type = "REGION_CODE" - } - - enforce_on_key_configs { - enforce_on_key_type = "TLS_JA4_FINGERPRINT" - } - - enforce_on_key_configs { - enforce_on_key_type = "USER_IP" - } - } -} - -`, spName) -} - func testAccComputeSecurityPolicyRule_withRateLimitOptions_withoutRateLimitOptions(spName string) string { return fmt.Sprintf(` resource "google_compute_security_policy" "policy" { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_security_policy_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_security_policy_test.go.tmpl index 59d7bdb90a4a..1345b1d7f4fc 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_security_policy_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_security_policy_test.go.tmpl @@ -295,17 +295,6 @@ func TestAccComputeSecurityPolicy_withAdvancedOptionsConfig(t *testing.T) { ImportState: true, ImportStateVerify: true, }, - {{- if ne $.TargetVersionName "ga" }} - // Add request_body_inspection_size value - { - Config: testAccComputeSecurityPolicy_withAdvancedOptionsConfig_update4(spName), - }, - { - ResourceName: "google_compute_security_policy.policy", - ImportState: true, - ImportStateVerify: true, - }, - {{- end }} { Config: testAccComputeSecurityPolicy_basic(spName, "CLOUD_ARMOR"), }, @@ -534,14 +523,6 @@ func TestAccComputeSecurityPolicy_withRateLimitOption_withMultipleEnforceOnKeyCo ImportState: true, ImportStateVerify: true, }, - { - Config: testAccComputeSecurityPolicy_withRateLimitOption_withMultipleEnforceOnKeyConfigs3(spName), - }, - { - ResourceName: "google_compute_security_policy.policy", - ImportState: true, - ImportStateVerify: true, - }, }, }) } @@ -1485,30 +1466,6 @@ resource "google_compute_security_policy" "policy" { `, spName) } -{{- if ne $.TargetVersionName "ga" }} -func testAccComputeSecurityPolicy_withAdvancedOptionsConfig_update4(spName string) string { - return fmt.Sprintf(` -resource "google_compute_security_policy" "policy" { - name = "%s" - description = "updated description changing json_parsing to STANDARD_WITH_GRAPHQL" - - advanced_options_config { - json_parsing = "STANDARD_WITH_GRAPHQL" - json_custom_config { - content_types = [ - "application/json", - "application/vnd.hyper+json" - ] - } - log_level = "NORMAL" - user_ip_request_headers = [] - request_body_inspection_size = "64KB" - } -} -`, spName) -} -{{- end }} - func testAccComputeSecurityPolicy_withoutAdaptiveProtection(spName string) string { return fmt.Sprintf(` resource "google_compute_security_policy" "policy" { @@ -1979,51 +1936,6 @@ resource "google_compute_security_policy" "policy" { `, spName) } -func testAccComputeSecurityPolicy_withRateLimitOption_withMultipleEnforceOnKeyConfigs3(spName string) string { - return fmt.Sprintf(` -resource "google_compute_security_policy" "policy" { - name = "%s" - description = "throttle rule with enforce_on_key_configs" - - rule { - action = "throttle" - priority = "2147483647" - match { - versioned_expr = "SRC_IPS_V1" - config { - src_ip_ranges = ["*"] - } - } - description = "default rule withMultipleEnforceOnKeyConfigs3" - - rate_limit_options { - conform_action = "allow" - exceed_action = "deny(429)" - - rate_limit_threshold { - count = 10 - interval_sec = 60 - } - - enforce_on_key = "" - - enforce_on_key_configs { - enforce_on_key_type = "REGION_CODE" - } - - enforce_on_key_configs { - enforce_on_key_type = "TLS_JA4_FINGERPRINT" - } - - enforce_on_key_configs { - enforce_on_key_type = "USER_IP" - } - } - } -} -`, spName) -} - func TestAccComputeSecurityPolicy_withRedirectOptionsRecaptcha(t *testing.T) { t.Parallel() diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_service_attachment_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_service_attachment_test.go.tmpl similarity index 94% rename from mmv1/third_party/terraform/services/compute/resource_compute_service_attachment_test.go rename to mmv1/third_party/terraform/services/compute/resource_compute_service_attachment_test.go.tmpl index 014eec069952..ca1cbca38a37 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_service_attachment_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_service_attachment_test.go.tmpl @@ -1,11 +1,9 @@ package compute_test import ( - "fmt" "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" "github.com/hashicorp/terraform-provider-google/google/acctest" ) @@ -31,7 +29,7 @@ func TestAccComputeServiceAttachment_serviceAttachmentBasicExampleUpdate(t *test ImportStateVerifyIgnore: []string{"target_service", "region"}, }, { - Config: testAccComputeServiceAttachment_serviceAttachmentBasicExampleUpdate(context, true, -1), + Config: testAccComputeServiceAttachment_serviceAttachmentBasicExampleUpdate(context, true), }, { ResourceName: "google_compute_service_attachment.psc_ilb_service_attachment", @@ -40,7 +38,7 @@ func TestAccComputeServiceAttachment_serviceAttachmentBasicExampleUpdate(t *test ImportStateVerifyIgnore: []string{"target_service", "region"}, }, { - Config: testAccComputeServiceAttachment_serviceAttachmentBasicExampleUpdate(context, false, -1), + Config: testAccComputeServiceAttachment_serviceAttachmentBasicExampleUpdate(context, false), }, { ResourceName: "google_compute_service_attachment.psc_ilb_service_attachment", @@ -48,14 +46,6 @@ func TestAccComputeServiceAttachment_serviceAttachmentBasicExampleUpdate(t *test ImportStateVerify: true, ImportStateVerifyIgnore: []string{"target_service", "region"}, }, - { - Config: testAccComputeServiceAttachment_serviceAttachmentBasicExampleUpdate(context, false, 0), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectNonEmptyPlan(), - }, - }, - }, }, }) } @@ -189,7 +179,7 @@ resource "google_compute_subnetwork" "psc_ilb_nat" { `, context) } -func testAccComputeServiceAttachment_serviceAttachmentBasicExampleUpdate(context map[string]interface{}, preventDestroy bool, propagatedConnectionLimit int) string { +func testAccComputeServiceAttachment_serviceAttachmentBasicExampleUpdate(context map[string]interface{}, preventDestroy bool) string { context["lifecycle_block"] = "" if preventDestroy { context["lifecycle_block"] = ` @@ -198,18 +188,6 @@ func testAccComputeServiceAttachment_serviceAttachmentBasicExampleUpdate(context }` } - switch { - case propagatedConnectionLimit == 0: - context["propagated_connection_limit"] = ` - propagated_connection_limit = 0 - send_propagated_connection_limit_if_zero = true - ` - case propagatedConnectionLimit > 0: - context["propagated_connection_limit"] = fmt.Sprintf("propagated_connection_limit = %d", propagatedConnectionLimit) - default: - context["propagated_connection_limit"] = "" - } - return acctest.Nprintf(` resource "google_compute_service_attachment" "psc_ilb_service_attachment" { name = "tf-test-my-psc-ilb%{random_suffix}" @@ -228,7 +206,6 @@ resource "google_compute_service_attachment" "psc_ilb_service_attachment" { } reconcile_connections = false %{lifecycle_block} - %{propagated_connection_limit} } resource "google_compute_address" "psc_ilb_consumer_address" { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_shared_reservation_update_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_shared_reservation_update_test.go index c13210c1ff96..52b836733dd1 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_shared_reservation_update_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_shared_reservation_update_test.go @@ -68,6 +68,7 @@ resource "google_project" "owner_project" { resource "google_project_service" "compute" { project = google_project.owner_project.project_id service = "compute.googleapis.com" + disable_on_destroy = false } resource "google_project" "guest_project" { @@ -94,13 +95,12 @@ resource "google_project" "guest_project_third" { deletion_policy = "DELETE" } -resource "google_org_policy_policy" "shared_reservation_org_policy" { - name = "projects/${google_project.owner_project.project_id}/policies/compute.sharedReservationsOwnerProjects" - parent = "projects/${google_project.owner_project.project_id}" - - spec { - rules { - allow_all = "TRUE" +resource "google_organization_policy" "shared_reservation_org_policy" { + org_id = "%{org_id}" + constraint = "constraints/compute.sharedReservationsOwnerProjects" + list_policy { + allow { + values = ["projects/${google_project.owner_project.number}"] } } } @@ -108,16 +108,19 @@ resource "google_org_policy_policy" "shared_reservation_org_policy" { resource "google_project_service" "compute_second_project" { project = google_project.guest_project.project_id service = "compute.googleapis.com" + disable_on_destroy = false } resource "google_project_service" "compute_third_project" { project = google_project.guest_project_second.project_id service = "compute.googleapis.com" + disable_on_destroy = false } resource "google_project_service" "compute_fourth_project" { project = google_project.guest_project_third.project_id service = "compute.googleapis.com" + disable_on_destroy = false } resource "google_compute_reservation" "gce_reservation" { @@ -139,7 +142,7 @@ resource "google_compute_reservation" "gce_reservation" { project_id = google_project.guest_project.project_id } } - depends_on = [google_org_policy_policy.shared_reservation_org_policy,google_project_service.compute,google_project_service.compute_second_project,google_project_service.compute_third_project] + depends_on = [google_organization_policy.shared_reservation_org_policy,google_project_service.compute,google_project_service.compute_second_project,google_project_service.compute_third_project] } `, context) } @@ -157,6 +160,7 @@ resource "google_project" "owner_project" { resource "google_project_service" "compute" { project = google_project.owner_project.project_id service = "compute.googleapis.com" + disable_on_destroy = false } resource "google_project" "guest_project" { @@ -183,13 +187,12 @@ resource "google_project" "guest_project_third" { deletion_policy = "DELETE" } -resource "google_org_policy_policy" "shared_reservation_org_policy" { - name = "projects/${google_project.owner_project.project_id}/policies/compute.sharedReservationsOwnerProjects" - parent = "projects/${google_project.owner_project.project_id}" - - spec { - rules { - allow_all = "TRUE" +resource "google_organization_policy" "shared_reservation_org_policy" { + org_id = "%{org_id}" + constraint = "constraints/compute.sharedReservationsOwnerProjects" + list_policy { + allow { + values = ["projects/${google_project.owner_project.number}"] } } } @@ -197,16 +200,19 @@ resource "google_org_policy_policy" "shared_reservation_org_policy" { resource "google_project_service" "compute_second_project" { project = google_project.guest_project.project_id service = "compute.googleapis.com" + disable_on_destroy = false } resource "google_project_service" "compute_third_project" { project = google_project.guest_project_second.project_id service = "compute.googleapis.com" + disable_on_destroy = false } resource "google_project_service" "compute_fourth_project" { project = google_project.guest_project_third.project_id service = "compute.googleapis.com" + disable_on_destroy = false } resource "google_compute_reservation" "gce_reservation" { @@ -236,7 +242,7 @@ resource "google_compute_reservation" "gce_reservation" { project_id = google_project.guest_project_third.project_id } } - depends_on = [google_org_policy_policy.shared_reservation_org_policy,google_project_service.compute,google_project_service.compute_second_project,google_project_service.compute_third_project] + depends_on = [google_organization_policy.shared_reservation_org_policy,google_project_service.compute,google_project_service.compute_second_project,google_project_service.compute_third_project] } `, context) } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_snapshot_settings_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_snapshot_settings_test.go deleted file mode 100644 index 93c9c5ca4e15..000000000000 --- a/mmv1/third_party/terraform/services/compute/resource_compute_snapshot_settings_test.go +++ /dev/null @@ -1,116 +0,0 @@ -package compute_test - -import ( - "github.com/hashicorp/terraform-plugin-testing/plancheck" - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" -) - -func TestAccComputeSnapshotSettings_snapshotSettings_update(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - "org_id": envvar.GetTestOrgFromEnv(t), - "billing_account": envvar.GetTestBillingAccountFromEnv(t), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - ExternalProviders: map[string]resource.ExternalProvider{ - "time": {}, - }, - Steps: []resource.TestStep{ - { - Config: testAccComputeSnapshotSettings_snapshotSettings_basic(context), - }, - { - ResourceName: "google_compute_snapshot_settings.tf_test_snapshot_settings", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccComputeSnapshotSettings_snapshotSettings_update(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_compute_snapshot_settings.tf_test_snapshot_settings", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_compute_snapshot_settings.tf_test_snapshot_settings", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func testAccComputeSnapshotSettings_snapshotSettings_basic(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_project" "project" { - project_id = "tf-test%{random_suffix}" - name = "tf-test%{random_suffix}" - org_id = "%{org_id}" - billing_account = "%{billing_account}" - deletion_policy = "DELETE" -} - -resource "google_project_service" "compute" { - project = google_project.project.project_id - service = "compute.googleapis.com" -} - -resource "time_sleep" "wait_120_seconds" { - create_duration = "120s" - depends_on = [google_project_service.compute] -} - - -resource "google_compute_snapshot_settings" "tf_test_snapshot_settings" { - project = google_project.project.project_id - storage_location { - policy = "SPECIFIC_LOCATIONS" - locations { - name = "us-central1" - location = "us-central1" - } - } - depends_on = [time_sleep.wait_120_seconds] -} -`, context) -} - -func testAccComputeSnapshotSettings_snapshotSettings_update(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_project" "project" { - project_id = "tf-test%{random_suffix}" - name = "tf-test%{random_suffix}" - org_id = "%{org_id}" - billing_account = "%{billing_account}" - deletion_policy = "DELETE" -} - -resource "google_project_service" "compute" { - project = google_project.project.project_id - service = "compute.googleapis.com" -} -resource "time_sleep" "wait_120_seconds" { - create_duration = "120s" - depends_on = [google_project_service.compute] -} - -resource "google_compute_snapshot_settings" "tf_test_snapshot_settings" { - project = google_project.project.project_id - storage_location { - policy = "NEAREST_MULTI_REGION" - } - depends_on = [time_sleep.wait_120_seconds] -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_subnetwork_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_subnetwork_test.go.tmpl index 74a8e6b6c3ae..e4461c0902bc 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_subnetwork_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_subnetwork_test.go.tmpl @@ -1,6 +1,5 @@ package compute_test - import ( "context" "fmt" @@ -9,7 +8,6 @@ import ( "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/terraform" "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" "github.com/hashicorp/terraform-plugin-testing/plancheck" tpgcompute "github.com/hashicorp/terraform-provider-google/google/services/compute" @@ -498,63 +496,6 @@ func TestAccComputeSubnetwork_internal_ipv6(t *testing.T) { }) } -func TestAccComputeSubnetwork_resourceManagerTags(t *testing.T) { - t.Parallel() - - var subnetwork compute.Subnetwork - org := envvar.GetTestOrgFromEnv(t) - - suffixName := acctest.RandString(t, 10) - tagKeyResult := acctest.BootstrapSharedTestTagKeyDetails(t, "crm-subnetworks-tagkey", "organizations/"+org, make(map[string]interface{})) - sharedTagkey,_ := tagKeyResult["shared_tag_key"] - tagValueResult := acctest.BootstrapSharedTestTagValueDetails(t, "crm-subnetworks-tagvalue", sharedTagkey, org) - - cnName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - subnetworkName := fmt.Sprintf("tf-test-subnetwork-resource-manager-tags-%s", suffixName) - context := map[string]interface{}{ - "subnetwork_name": subnetworkName, - "network_name": cnName, - "tag_key_id": tagKeyResult["name"], - "tag_value_id": tagValueResult["name"], - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeSubnetworkDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeSubnetwork_resourceManagerTags(context), - Check: resource.ComposeTestCheckFunc( - testAccCheckComputeSubnetworkExists( - t, "google_compute_subnetwork.acc_subnetwork_with_resource_manager_tags", &subnetwork), - ), - }, - }, - }) -} - -func testAccComputeSubnetwork_resourceManagerTags(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_compute_network" "custom-test" { - name = "%{network_name}" - auto_create_subnetworks = false -} - -resource "google_compute_subnetwork" "acc_subnetwork_with_resource_manager_tags" { - name = "%{subnetwork_name}" - ip_cidr_range = "10.0.0.0/16" - region = "us-central1" - network = google_compute_network.custom-test.self_link - params { - resource_manager_tags = { - "%{tag_key_id}" = "%{tag_value_id}" - } - } -} -`, context) -} - func testAccCheckComputeSubnetworkExists(t *testing.T, n string, subnetwork *compute.Subnetwork) resource.TestCheckFunc { return func(s *terraform.State) error { rs, ok := s.RootModule().Resources[n] diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_target_pool.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_target_pool.go.tmpl index 612ec7399dd1..d5617b8e3e96 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_target_pool.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_target_pool.go.tmpl @@ -571,10 +571,10 @@ func resourceComputeTargetPoolDelete(d *schema.ResourceData, meta interface{}) e func resourceTargetPoolStateImporter(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+)/regions/(?P[^/]+)/targetPools/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)$", + "projects/(?P[^/]+)/regions/(?P[^/]+)/targetPools/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl deleted file mode 100644 index 716a5b383d8b..000000000000 --- a/mmv1/third_party/terraform/services/compute/resource_compute_wire_group_test.go.tmpl +++ /dev/null @@ -1,117 +0,0 @@ -package compute_test -{{ if ne $.TargetVersionName `ga` -}} -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" - - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" -) - -func TestAccComputeWireGroup_update(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "project": envvar.GetTestProjectFromEnv(), - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), - CheckDestroy: testAccCheckComputeWireGroupDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccComputeWireGroup_basic(context), - }, - { - ResourceName: "google_compute_wire_group.example-test-wire-group", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"cross_site_network"}, - }, - { - Config: testAccComputeWireGroup_update(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_compute_wire_group.example-test-wire-group", plancheck.ResourceActionUpdate), - }, - }, - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("google_compute_wire_group.example-test-wire-group", "description", "Example Wire Group Updated"+context["random_suffix"].(string)), - ), - }, - { - ResourceName: "google_compute_wire_group.example-test-wire-group", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"cross_site_network"}, - }, - }, - }) -} - -func testAccComputeWireGroup_basic(context map[string]interface{}) string { - return acctest.Nprintf(` -data "google_project" "project" { -provider = google-beta -} - -resource "google_compute_cross_site_network" "example-cross-site-network" { - name = "tf-test-cross-site-network%{random_suffix}" - description = "Example cross site network" - provider = google-beta -} - -resource "google_compute_wire_group" "example-test-wire-group" { - name = "tf-test-test-wire-group%{random_suffix}" - description = "Example Wire Group%{random_suffix}" - cross_site_network = google_compute_cross_site_network.example-cross-site-network.name - provider = google-beta - depends_on = [ - google_compute_cross_site_network.example-cross-site-network - ] - wire_properties { - bandwidth_unmetered = 1000 - } - wire_group_properties { - type = "REDUNDANT" - } - admin_enabled = true -} -`, context) -} - -func testAccComputeWireGroup_update(context map[string]interface{}) string { - return acctest.Nprintf(` -data "google_project" "project" { -provider = google-beta -} - -resource "google_compute_cross_site_network" "example-cross-site-network" { - name = "tf-test-cross-site-network%{random_suffix}" - description = "Example cross site network" - provider = google-beta -} - -resource "google_compute_wire_group" "example-test-wire-group" { - name = "tf-test-test-wire-group%{random_suffix}" - description = "Example Wire Group Updated%{random_suffix}" - cross_site_network = google_compute_cross_site_network.example-cross-site-network.name - provider = google-beta - depends_on = [ - google_compute_cross_site_network.example-cross-site-network - ] - wire_properties { - bandwidth_unmetered = 1000 - } - wire_group_properties { - type = "REDUNDANT" - } - admin_enabled = true -} -`, context) -} -{{- end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/contactcenterinsights/resource_contact_center_insights_analysis_rule_test.go b/mmv1/third_party/terraform/services/contactcenterinsights/resource_contact_center_insights_analysis_rule_test.go deleted file mode 100644 index 08d87a18f658..000000000000 --- a/mmv1/third_party/terraform/services/contactcenterinsights/resource_contact_center_insights_analysis_rule_test.go +++ /dev/null @@ -1,138 +0,0 @@ -package contactcenterinsights_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" -) - -func TestAccContactCenterInsightsAnalysisRule_update(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - "project_number": envvar.GetTestProjectNumberFromEnv(), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccInsightsAnalysisRule(context), - }, - { - ResourceName: "google_contact_center_insights_analysis_rule.default", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccContactCenterInsightsAnalysisRule_full(context), - }, - { - ResourceName: "google_contact_center_insights_analysis_rule.basic_analysis_rule", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"analysis_rule_id", "location"}, - }, - { - Config: testAccContactCenterInsightsAnalysisRule_update(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_contact_center_insights_analysis_rule.basic_analysis_rule", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_contact_center_insights_analysis_rule.basic_analysis_rule", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"analysis_rule_id", "location"}, - }, - }, - }) -} - -func testAccInsightsAnalysisRule(context map[string]interface{}) string { - return acctest.Nprintf(` - resource "google_contact_center_insights_analysis_rule" "default" { - display_name = "default-analysis-rule-display-name-%{random_suffix}" - location = "us-central1" - conversation_filter = "agent_id = \"1\"" - analysis_percentage = 0.5 - annotator_selector { - run_silence_annotator = true - } - active = true - } - `, context) -} - -func testAccContactCenterInsightsAnalysisRule_full(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_contact_center_insights_analysis_rule" "basic_analysis_rule" { - display_name = "analysis-rule-display-name-%{random_suffix}" - location = "us-central1" - conversation_filter = "agent_id = \"1\"" - annotator_selector { - run_interruption_annotator = false - issue_models = ["projects/%{project_number}/locations/us-central1/issueModels/some_issue_model_id"] - phrase_matchers = ["projects/%{project_number}/locations/us-central1/phraseMatchers/123"] - qa_config { - scorecard_list { - qa_scorecard_revisions = ["projects/%{project_number}/locations/us-central1/qaScorecards/*/revisions/some_scorecard_revision_id"] - } - } - run_entity_annotator = false - run_intent_annotator = false - run_issue_model_annotator = false - run_phrase_matcher_annotator = false - run_qa_annotator = false - run_sentiment_annotator = false - run_silence_annotator = true - run_summarization_annotator = false - summarization_config { - summarization_model = "BASELINE_MODEL" - } - } - analysis_percentage = 0.5 - active = true -} -`, context) -} - -func testAccContactCenterInsightsAnalysisRule_update(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_contact_center_insights_analysis_rule" "basic_analysis_rule" { - display_name = "analysis-rule-display-name-%{random_suffix}-updated" - location = "us-central1" - conversation_filter = "agent_id = \"1\"" - annotator_selector { - run_interruption_annotator = true - issue_models = ["projects/%{project_number}/locations/us-central1/issueModels/alt_issue_model_id"] - phrase_matchers = ["projects/%{project_number}/locations/us-central1/phraseMatchers/123"] - qa_config { - scorecard_list { - qa_scorecard_revisions = ["projects/%{project_number}/locations/us-central1/qaScorecards/*/revisions/alt_scorecard_revision_id"] - } - } - run_entity_annotator = true - run_intent_annotator = true - run_issue_model_annotator = false - run_phrase_matcher_annotator = true - run_qa_annotator = true - run_sentiment_annotator = true - run_silence_annotator = true - run_summarization_annotator = true - summarization_config { - summarization_model = "BASELINE_MODEL_V2_0" - } - } - analysis_percentage = 0.0 - active = false -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/contactcenterinsights/resource_contact_center_insights_view_test.go b/mmv1/third_party/terraform/services/contactcenterinsights/resource_contact_center_insights_view_test.go deleted file mode 100644 index f2ddc54cd7b8..000000000000 --- a/mmv1/third_party/terraform/services/contactcenterinsights/resource_contact_center_insights_view_test.go +++ /dev/null @@ -1,72 +0,0 @@ -package contactcenterinsights_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" -) - -func TestAccContactCenterInsightsView_update(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "project_name": envvar.GetTestProjectFromEnv(), - "region": "us-central1", - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccContactCenterInsightsView_full(context), - }, - { - ResourceName: "google_contact_center_insights_view.full_view", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"location"}, - }, - { - Config: testAccContactCenterInsightsView_update(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_contact_center_insights_view.full_view", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_contact_center_insights_view.full_view", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"location"}, - }, - }, - }) -} - -func testAccContactCenterInsightsView_full(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_contact_center_insights_view" "full_view" { - project = "%{project_name}" - location = "%{region}" - display_name = "view-display-name-%{random_suffix}" - value = "medium=\"PHONE_CALL\"" -} -`, context) -} - -func testAccContactCenterInsightsView_update(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_contact_center_insights_view" "full_view" { - project = "%{project_name}" - location = "%{region}" - display_name = "view-display-name-%{random_suffix}-updated" - value = "medium=\"CHAT\"" -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/container/data_source_google_container_engine_versions.go b/mmv1/third_party/terraform/services/container/data_source_google_container_engine_versions.go index 551eebd8a4e0..8f4006428741 100644 --- a/mmv1/third_party/terraform/services/container/data_source_google_container_engine_versions.go +++ b/mmv1/third_party/terraform/services/container/data_source_google_container_engine_versions.go @@ -58,11 +58,6 @@ func DataSourceGoogleContainerEngineVersions() *schema.Resource { Computed: true, Elem: &schema.Schema{Type: schema.TypeString}, }, - "release_channel_upgrade_target_version": { - Type: schema.TypeMap, - Computed: true, - Elem: &schema.Schema{Type: schema.TypeString}, - }, }, } } @@ -131,10 +126,8 @@ func dataSourceGoogleContainerEngineVersionsRead(d *schema.ResourceData, meta in releaseChannelDefaultVersion := map[string]string{} releaseChannelLatestVersion := map[string]string{} - releaseChannelUpgradeTargetVersion := map[string]string{} for _, channelResp := range resp.Channels { releaseChannelDefaultVersion[channelResp.Channel] = channelResp.DefaultVersion - releaseChannelUpgradeTargetVersion[channelResp.Channel] = channelResp.UpgradeTargetVersion for _, v := range channelResp.ValidVersions { if strings.HasPrefix(v, d.Get("version_prefix").(string)) { releaseChannelLatestVersion[channelResp.Channel] = v @@ -149,9 +142,6 @@ func dataSourceGoogleContainerEngineVersionsRead(d *schema.ResourceData, meta in if err := d.Set("release_channel_latest_version", releaseChannelLatestVersion); err != nil { return fmt.Errorf("Error setting release_channel_latest_version: %s", err) } - if err := d.Set("release_channel_upgrade_target_version", releaseChannelUpgradeTargetVersion); err != nil { - return fmt.Errorf("Error setting release_channel_upgrade_target_version: %s", err) - } d.SetId(time.Now().UTC().String()) return nil diff --git a/mmv1/third_party/terraform/services/container/data_source_google_container_engine_versions_test.go b/mmv1/third_party/terraform/services/container/data_source_google_container_engine_versions_test.go index 152ead1957cf..a8297604d920 100644 --- a/mmv1/third_party/terraform/services/container/data_source_google_container_engine_versions_test.go +++ b/mmv1/third_party/terraform/services/container/data_source_google_container_engine_versions_test.go @@ -122,11 +122,6 @@ func testAccCheckGoogleContainerEngineVersionsMeta(n string) resource.TestCheckF return errors.New("failed to read latest STABLE version") } - _, ok = rs.Primary.Attributes["release_channel_upgrade_target_version.STABLE"] - if !ok { - return errors.New("failed to read latest STABLE version") - } - return nil } } diff --git a/mmv1/third_party/terraform/services/container/node_config.go.tmpl b/mmv1/third_party/terraform/services/container/node_config.go.tmpl index 9525e93f6c99..fcfd7832752d 100644 --- a/mmv1/third_party/terraform/services/container/node_config.go.tmpl +++ b/mmv1/third_party/terraform/services/container/node_config.go.tmpl @@ -11,7 +11,6 @@ import ( "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" - "github.com/hashicorp/terraform-provider-google/google/verify" {{ if eq $.TargetVersionName `ga` }} "google.golang.org/api/container/v1" {{- else }} @@ -146,8 +145,6 @@ func schemaNodeConfig() *schema.Schema { Description: `Type of the disk attached to each node. Such as pd-standard, pd-balanced or pd-ssd`, }, - "boot_disk": schemaBootDiskConfig(), - "guest_accelerator": { Type: schema.TypeList, Optional: true, @@ -473,6 +470,7 @@ func schemaNodeConfig() *schema.Schema { "storage_pools": { Type: schema.TypeList, + ForceNew: true, Optional: true, Elem: &schema.Schema{Type: schema.TypeString}, Description: `The list of Storage Pools where boot disks are provisioned.`, @@ -631,177 +629,41 @@ func schemaNodeConfig() *schema.Schema { Optional: true, Description: `Controls the maximum number of processes allowed to run in a pod.`, }, - "max_parallel_image_pulls": { - Type: schema.TypeInt, - Optional: true, - Computed: true, - Description: `Set the maximum number of image pulls in parallel.`, - }, "container_log_max_size": { - Type: schema.TypeString, - Optional: true, - Description: `Defines the maximum size of the container log file before it is rotated.`, - }, + Type: schema.TypeString, + Optional: true, + Description: `Defines the maximum size of the container log file before it is rotated.`, + }, "container_log_max_files": { - Type: schema.TypeInt, - Optional: true, - Description: `Defines the maximum number of container log files that can be present for a container.`, - }, + Type: schema.TypeInt, + Optional: true, + Description: `Defines the maximum number of container log files that can be present for a container.`, + }, "image_gc_low_threshold_percent": { - Type: schema.TypeInt, - Optional: true, - Description: `Defines the percent of disk usage before which image garbage collection is never run. Lowest disk usage to garbage collect to.`, - }, + Type: schema.TypeInt, + Optional: true, + Description: `Defines the percent of disk usage before which image garbage collection is never run. Lowest disk usage to garbage collect to.`, + }, "image_gc_high_threshold_percent": { - Type: schema.TypeInt, - Optional: true, - Description: `Defines the percent of disk usage after which image garbage collection is always run.`, - }, - "image_minimum_gc_age": { - Type: schema.TypeString, - Optional: true, - Description: `Defines the minimum age for an unused image before it is garbage collected.`, - }, - "image_maximum_gc_age": { - Type: schema.TypeString, - Optional: true, - Description: `Defines the maximum age an image can be unused before it is garbage collected.`, - }, - "allowed_unsafe_sysctls": { - Type: schema.TypeList, - Optional: true, - Description: `Defines a comma-separated allowlist of unsafe sysctls or sysctl patterns which can be set on the Pods.`, - Elem: &schema.Schema{Type: schema.TypeString}, - }, - "single_process_oom_kill": { - Type: schema.TypeBool, - Optional: true, - Description: `Defines whether to enable single process OOM killer.`, - }, - "eviction_max_pod_grace_period_seconds": { Type: schema.TypeInt, Optional: true, - Description: `Defines the maximum allowed grace period (in seconds) to use when terminating pods in response to a soft eviction threshold being met.`, + Description: `Defines the percent of disk usage after which image garbage collection is always run.`, }, - "eviction_soft": { - Type: schema.TypeList, - Optional: true, - MaxItems: 1, - Description: `Defines a map of signal names to quantities or percentage that defines soft eviction thresholds.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "memory_available": { - Type: schema.TypeString, - Optional: true, - Description: `Defines quantity of soft eviction threshold for memory.available.`, - }, - "nodefs_available": { - Type: schema.TypeString, - Optional: true, - Description: `Defines percentage of soft eviction threshold for nodefs.available.`, - }, - "nodefs_inodes_free": { - Type: schema.TypeString, - Optional: true, - Description: `Defines percentage of soft eviction threshold for nodefs.inodesFree.`, - }, - "imagefs_available": { - Type: schema.TypeString, - Optional: true, - Description: `Defines percentage of soft eviction threshold for imagefs.available.`, - }, - "imagefs_inodes_free": { - Type: schema.TypeString, - Optional: true, - Description: `Defines percentage of soft eviction threshold for imagefs.inodesFree.`, - }, - "pid_available": { - Type: schema.TypeString, - Optional: true, - Description: `Defines percentage of soft eviction threshold for pid.available.`, - }, - }, - }, - }, - "eviction_soft_grace_period": { - Type: schema.TypeList, + "image_minimum_gc_age": { + Type: schema.TypeString, + Optional: true, + Description: `Defines the minimum age for an unused image before it is garbage collected.`, + }, + "image_maximum_gc_age": { + Type: schema.TypeString, Optional: true, - MaxItems: 1, - Description: `Defines a map of signal names to durations that defines grace periods for soft eviction thresholds. Each soft eviction threshold must have a corresponding grace period.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "memory_available": { - Type: schema.TypeString, - Optional: true, - Description: `Defines grace period for the memory.available soft eviction threshold.`, - }, - "nodefs_available": { - Type: schema.TypeString, - Optional: true, - Description: `Defines grace period for the nodefs.available soft eviction threshold.`, - }, - "nodefs_inodes_free": { - Type: schema.TypeString, - Optional: true, - Description: `Defines grace period for the nodefs.inodesFree soft eviction threshold.`, - }, - "imagefs_available": { - Type: schema.TypeString, - Optional: true, - Description: `Defines grace period for the imagefs.available soft eviction threshold`, - }, - "imagefs_inodes_free": { - Type: schema.TypeString, - Optional: true, - Description: `Defines grace period for the imagefs.inodesFree soft eviction threshold.`, - }, - "pid_available": { - Type: schema.TypeString, - Optional: true, - Description: `Defines grace period for the pid.available soft eviction threshold.`, - }, - }, - }, + Description: `Defines the maximum age an image can be unused before it is garbage collected.`, }, - "eviction_minimum_reclaim": { + "allowed_unsafe_sysctls": { Type: schema.TypeList, Optional: true, - MaxItems: 1, - Description: `Defines a map of signal names to percentage that defines minimum reclaims. It describes the minimum amount of a given resource the kubelet will reclaim when performing a pod eviction.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "memory_available": { - Type: schema.TypeString, - Optional: true, - Description: `Defines percentage of minimum reclaim for memory.available.`, - }, - "nodefs_available": { - Type: schema.TypeString, - Optional: true, - Description: `Defines percentage of minimum reclaim for nodefs.available.`, - }, - "nodefs_inodes_free": { - Type: schema.TypeString, - Optional: true, - Description: `Defines percentage of minimum reclaim for nodefs.inodesFree.`, - }, - "imagefs_available": { - Type: schema.TypeString, - Optional: true, - Description: `Defines percentage of minimum reclaim for imagefs.available.`, - }, - "imagefs_inodes_free": { - Type: schema.TypeString, - Optional: true, - Description: `Defines percentage of minimum reclaim for imagefs.inodesFree.`, - }, - "pid_available": { - Type: schema.TypeString, - Optional: true, - Description: `Defines percentage of minimum reclaim for pid.available.`, - }, - }, - }, + Description: `Defines a comma-separated allowlist of unsafe sysctls or sysctl patterns which can be set on the Pods.`, + Elem: &schema.Schema{Type: schema.TypeString}, }, }, }, @@ -810,7 +672,6 @@ func schemaNodeConfig() *schema.Schema { Type: schema.TypeList, Optional: true, MaxItems: 1, - Computed: true, Description: `Parameters that can be configured on Linux nodes.`, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ @@ -828,21 +689,6 @@ func schemaNodeConfig() *schema.Schema { Description: `cgroupMode specifies the cgroup mode to be used on the node.`, DiffSuppressFunc: tpgresource.EmptyOrDefaultStringSuppress("CGROUP_MODE_UNSPECIFIED"), }, - "transparent_hugepage_enabled": { - Type: schema.TypeString, - Optional: true, - Computed: true, - ValidateFunc: validation.StringInSlice([]string{"TRANSPARENT_HUGEPAGE_ENABLED_ALWAYS", "TRANSPARENT_HUGEPAGE_ENABLED_MADVISE", "TRANSPARENT_HUGEPAGE_ENABLED_NEVER", "TRANSPARENT_HUGEPAGE_ENABLED_UNSPECIFIED"}, false), - Description: `The Linux kernel transparent hugepage setting.`, - DiffSuppressFunc: tpgresource.EmptyOrDefaultStringSuppress("TRANSPARENT_HUGEPAGE_ENABLED_UNSPECIFIED"), - }, - "transparent_hugepage_defrag": { - Type: schema.TypeString, - Optional: true, - ValidateFunc: validation.StringInSlice([]string{"TRANSPARENT_HUGEPAGE_DEFRAG_ALWAYS", "TRANSPARENT_HUGEPAGE_DEFRAG_DEFER", "TRANSPARENT_HUGEPAGE_DEFRAG_DEFER_WITH_MADVISE", "TRANSPARENT_HUGEPAGE_DEFRAG_MADVISE", "TRANSPARENT_HUGEPAGE_DEFRAG_NEVER", "TRANSPARENT_HUGEPAGE_DEFRAG_UNSPECIFIED"}, false), - Description: `The Linux kernel transparent hugepage defrag setting.`, - DiffSuppressFunc: tpgresource.EmptyOrDefaultStringSuppress("TRANSPARENT_HUGEPAGE_DEFRAG_UNSPECIFIED"), - }, "hugepages_config": { Type: schema.TypeList, Optional: true, @@ -877,6 +723,7 @@ func schemaNodeConfig() *schema.Schema { "osversion": { Type: schema.TypeString, Optional: true, + ForceNew: true, Default: "OS_VERSION_UNSPECIFIED", Description: `The OS Version of the windows nodepool.Values are OS_VERSION_UNSPECIFIED,OS_VERSION_LTSC2019 and OS_VERSION_LTSC2022`, ValidateFunc: validation.StringInSlice([]string{"OS_VERSION_UNSPECIFIED", "OS_VERSION_LTSC2019", "OS_VERSION_LTSC2022"}, false), @@ -901,8 +748,8 @@ func schemaNodeConfig() *schema.Schema { Schema: map[string]*schema.Schema{ "threads_per_core": { Type: schema.TypeInt, - Required: true, - ForceNew: true, + Required: true, + ForceNew: true, Description: `The number of threads per physical core. To disable simultaneous multithreading (SMT) set this to 1. If unset, the maximum number of threads supported per core by the underlying processor is assumed.`, }, "enable_nested_virtualization": { @@ -911,12 +758,6 @@ func schemaNodeConfig() *schema.Schema { ForceNew: true, Description: `Whether the node should have nested virtualization enabled.`, }, - "performance_monitoring_unit": { - Type: schema.TypeString, - Optional: true, - ValidateFunc: verify.ValidateEnum([]string{"ARCHITECTURAL", "STANDARD", "ENHANCED"}), - Description: `Level of Performance Monitoring Unit (PMU) requested. If unset, no access to the PMU is assumed.`, - }, }, }, }, @@ -958,11 +799,6 @@ func schemaNodeConfig() *schema.Schema { }, }, }, - "min_node_cpus": { - Type: schema.TypeInt, - Optional: true, - Description: `Specifies the minimum number of vCPUs that each sole tenant node must have to use CPU overcommit. If not specified, the CPU overcommit feature is disabled.`, - }, }, }, }, @@ -997,14 +833,6 @@ func schemaNodeConfig() *schema.Schema { Required: true, Description: `Whether Confidential Nodes feature is enabled for all nodes in this pool.`, }, - "confidential_instance_type": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - DiffSuppressFunc: suppressDiffForConfidentialNodes, - Description: `Defines the type of technology used by the confidential node.`, - ValidateFunc: validation.StringInSlice([]string{"SEV", "SEV_SNP", "TDX"}, false), - }, }, }, }, @@ -1029,10 +857,10 @@ func schemaNodeConfig() *schema.Schema { Description: `A map of resource manager tags. Resource manager tag keys and values have the same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/456. The field is ignored (both PUT & PATCH) when empty.`, }, "enable_confidential_storage": { - Type: schema.TypeBool, - Optional: true, - ForceNew: true, - Description: `If enabled boot disks are configured with confidential mode.`, + Type: schema.TypeBool, + Optional: true, + ForceNew: true, + Description: `If enabled boot disks are configured with confidential mode.`, }, "local_ssd_encryption_mode": { Type: schema.TypeString, @@ -1058,46 +886,6 @@ func schemaNodeConfig() *schema.Schema { } } - -func schemaBootDiskConfig() *schema.Schema { - return &schema.Schema{ - Type: schema.TypeList, - Optional: true, - Computed: true, - MaxItems: 1, - Description: `Boot disk configuration for node pools nodes.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "disk_type": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: `Type of the disk attached to each node. Such as pd-standard, pd-balanced or pd-ssd`, - }, - "size_gb": { - Type: schema.TypeInt, - Optional: true, - Computed: true, - ValidateFunc: validation.IntAtLeast(10), - Description: `Size of the disk attached to each node, specified in GB. The smallest allowed disk size is 10GB.`, - }, - "provisioned_iops": { - Type: schema.TypeInt, - Optional: true, - Computed: true, - Description: `Configured IOPs provisioning. Only valid with disk type hyperdisk-balanced.`, - }, - "provisioned_throughput": { - Type: schema.TypeInt, - Optional: true, - Computed: true, - Description: `Configured throughput provisioning. Only valid with disk type hyperdisk-balanced.`, - }, - }, - }, - } -} - // Separate since this currently only supports a single value -- a subset of // the overall NodeKubeletConfig func schemaNodePoolAutoConfigNodeKubeletConfig() *schema.Schema { @@ -1233,10 +1021,6 @@ func expandNodeConfig(v interface{}) *container.NodeConfig { nc.DiskType = v.(string) } - if v, ok := nodeConfig["boot_disk"]; ok { - nc.BootDisk = expandBootDiskConfig(v) - } - if v, ok := nodeConfig["local_ssd_count"]; ok { nc.LocalSsdCount = int64(v.(int)) } @@ -1473,7 +1257,6 @@ func expandNodeConfig(v interface{}) *container.NodeConfig { nc.AdvancedMachineFeatures = &container.AdvancedMachineFeatures{ ThreadsPerCore: int64(advanced_machine_features["threads_per_core"].(int)), EnableNestedVirtualization: advanced_machine_features["enable_nested_virtualization"].(bool), - PerformanceMonitoringUnit: advanced_machine_features["performance_monitoring_unit"].(string), } } @@ -1510,36 +1293,6 @@ func expandNodeConfig(v interface{}) *container.NodeConfig { return nc } -func expandBootDiskConfig(v interface{}) *container.BootDisk { - bd := &container.BootDisk{} - if v == nil { - return nil - } - ls := v.([]interface{}) - if len(ls) == 0 { - return nil - } - cfg := ls[0].(map[string]interface{}) - - if v, ok := cfg["disk_type"]; ok { - bd.DiskType = v.(string) - } - - if v, ok := cfg["size_gb"]; ok { - bd.SizeGb = int64(v.(int)) - } - - if v, ok := cfg["provisioned_iops"]; ok { - bd.ProvisionedIops = int64(v.(int)) - } - - if v, ok := cfg["provisioned_throughput"]; ok { - bd.ProvisionedThroughput = int64(v.(int)) - } - - return bd -} - func expandResourceManagerTags(v interface{}) *container.ResourceManagerTags { if v == nil { return nil @@ -1610,108 +1363,30 @@ func expandKubeletConfig(v interface{}) *container.NodeKubeletConfig { if podPidsLimit, ok := cfg["pod_pids_limit"]; ok { kConfig.PodPidsLimit = int64(podPidsLimit.(int)) } - if maxParallelImagePulls, ok := cfg["max_parallel_image_pulls"]; ok { - kConfig.MaxParallelImagePulls = int64(maxParallelImagePulls.(int)) - } if containerLogMaxSize, ok := cfg["container_log_max_size"]; ok { - kConfig.ContainerLogMaxSize = containerLogMaxSize.(string) - } + kConfig.ContainerLogMaxSize = containerLogMaxSize.(string) + } if containerLogMaxFiles, ok := cfg["container_log_max_files"]; ok { - kConfig.ContainerLogMaxFiles = int64(containerLogMaxFiles.(int)) + kConfig.ContainerLogMaxFiles = int64(containerLogMaxFiles.(int)) } if imageGcLowThresholdPercent, ok := cfg["image_gc_low_threshold_percent"]; ok { - kConfig.ImageGcLowThresholdPercent = int64(imageGcLowThresholdPercent.(int)) - } + kConfig.ImageGcLowThresholdPercent = int64(imageGcLowThresholdPercent.(int)) + } if imageGcHighThresholdPercent, ok := cfg["image_gc_high_threshold_percent"]; ok { - kConfig.ImageGcHighThresholdPercent = int64(imageGcHighThresholdPercent.(int)) - } + kConfig.ImageGcHighThresholdPercent = int64(imageGcHighThresholdPercent.(int)) + } if imageMinimumGcAge, ok := cfg["image_minimum_gc_age"]; ok { - kConfig.ImageMinimumGcAge = imageMinimumGcAge.(string) - } + kConfig.ImageMinimumGcAge = imageMinimumGcAge.(string) + } if imageMaximumGcAge, ok := cfg["image_maximum_gc_age"]; ok { - kConfig.ImageMaximumGcAge = imageMaximumGcAge.(string) - } + kConfig.ImageMaximumGcAge = imageMaximumGcAge.(string) + } if allowedUnsafeSysctls, ok := cfg["allowed_unsafe_sysctls"]; ok { sysctls := allowedUnsafeSysctls.([]interface{}) kConfig.AllowedUnsafeSysctls = make([]string, len(sysctls)) for i, s := range sysctls { kConfig.AllowedUnsafeSysctls[i] = s.(string) } - } - if singleProcessOomKill, ok := cfg["single_process_oom_kill"]; ok { - kConfig.SingleProcessOomKill = singleProcessOomKill.(bool) - } - if evictionMaxPodGracePeriodSeconds, ok := cfg["eviction_max_pod_grace_period_seconds"]; ok { - kConfig.EvictionMaxPodGracePeriodSeconds = int64(evictionMaxPodGracePeriodSeconds.(int)) - } - if v, ok := cfg["eviction_soft"]; ok && len(v.([]interface{})) > 0 { - es := v.([]interface{})[0].(map[string]interface{}) - evictionSoft := &container.EvictionSignals{} - if val, ok := es["memory_available"]; ok { - evictionSoft.MemoryAvailable = val.(string) - } - if val, ok := es["nodefs_available"]; ok { - evictionSoft.NodefsAvailable = val.(string) - } - if val, ok := es["imagefs_available"]; ok { - evictionSoft.ImagefsAvailable = val.(string) - } - if val, ok := es["imagefs_inodes_free"]; ok { - evictionSoft.ImagefsInodesFree = val.(string) - } - if val, ok := es["nodefs_inodes_free"]; ok { - evictionSoft.NodefsInodesFree = val.(string) - } - if val, ok := es["pid_available"]; ok { - evictionSoft.PidAvailable = val.(string) - } - kConfig.EvictionSoft = evictionSoft - } - if v, ok := cfg["eviction_soft_grace_period"]; ok && len(v.([]interface{})) > 0 { - es := v.([]interface{})[0].(map[string]interface{}) - periods := &container.EvictionGracePeriod{} - if val, ok := es["memory_available"]; ok { - periods.MemoryAvailable = val.(string) - } - if val, ok := es["nodefs_available"]; ok { - periods.NodefsAvailable = val.(string) - } - if val, ok := es["imagefs_available"]; ok { - periods.ImagefsAvailable = val.(string) - } - if val, ok := es["imagefs_inodes_free"]; ok { - periods.ImagefsInodesFree = val.(string) - } - if val, ok := es["nodefs_inodes_free"]; ok { - periods.NodefsInodesFree = val.(string) - } - if val, ok := es["pid_available"]; ok { - periods.PidAvailable = val.(string) - } - kConfig.EvictionSoftGracePeriod = periods - } - if v, ok := cfg["eviction_minimum_reclaim"]; ok && len(v.([]interface{})) > 0 { - es := v.([]interface{})[0].(map[string]interface{}) - reclaim := &container.EvictionMinimumReclaim{} - if val, ok := es["memory_available"]; ok { - reclaim.MemoryAvailable = val.(string) - } - if val, ok := es["nodefs_available"]; ok { - reclaim.NodefsAvailable = val.(string) - } - if val, ok := es["imagefs_available"]; ok { - reclaim.ImagefsAvailable = val.(string) - } - if val, ok := es["imagefs_inodes_free"]; ok { - reclaim.ImagefsInodesFree = val.(string) - } - if val, ok := es["nodefs_inodes_free"]; ok { - reclaim.NodefsInodesFree = val.(string) - } - if val, ok := es["pid_available"]; ok { - reclaim.PidAvailable = val.(string) - } - kConfig.EvictionMinimumReclaim = reclaim } return kConfig } @@ -1739,13 +1414,6 @@ func expandLinuxNodeConfig(v interface{}) *container.LinuxNodeConfig { linuxNodeConfig.CgroupMode = cgroupMode } - if v, ok := cfg["transparent_hugepage_enabled"]; ok { - linuxNodeConfig.TransparentHugepageEnabled = v.(string) - } - if v, ok := cfg["transparent_hugepage_defrag"]; ok { - linuxNodeConfig.TransparentHugepageDefrag = v.(string) - } - if v, ok := cfg["hugepages_config"]; ok { linuxNodeConfig.Hugepages = expandHugepagesConfig(v) } @@ -1912,24 +1580,24 @@ func expandSoleTenantConfig(v interface{}) *container.SoleTenantConfig { if len(ls) == 0 { return nil } - stConfig := &container.SoleTenantConfig{} cfg := ls[0].(map[string]interface{}) - if affinitiesRaw, ok := cfg["node_affinity"]; ok { - affinities := make([]*container.NodeAffinity, 0) - for _, v := range affinitiesRaw.(*schema.Set).List() { - na := v.(map[string]interface{}) - affinities = append(affinities, &container.NodeAffinity{ - Key: na["key"].(string), - Operator: na["operator"].(string), - Values: tpgresource.ConvertStringArr(na["values"].([]interface{})), - }) - } - stConfig.NodeAffinities = affinities + affinitiesRaw, ok := cfg["node_affinity"] + if !ok { + return nil + } + affinities := make([]*container.NodeAffinity, 0) + for _, v := range affinitiesRaw.(*schema.Set).List() { + na := v.(map[string]interface{}) + + affinities = append(affinities, &container.NodeAffinity{ + Key: na["key"].(string), + Operator: na["operator"].(string), + Values: tpgresource.ConvertStringArr(na["values"].([]interface{})), + }) } - if v, ok := cfg["min_node_cpus"]; ok { - stConfig.MinNodeCpus = int64(v.(int)) + return &container.SoleTenantConfig{ + NodeAffinities: affinities, } - return stConfig } {{ if ne $.TargetVersionName `ga` -}} @@ -1959,7 +1627,6 @@ func expandConfidentialNodes(configured interface{}) *container.ConfidentialNode config := l[0].(map[string]interface{}) return &container.ConfidentialNodes{ Enabled: config["enabled"].(bool), - ConfidentialInstanceType: config["confidential_instance_type"].(string), } } @@ -2006,17 +1673,16 @@ func flattenNodeConfig(c *container.NodeConfig, v interface{}) []map[string]inte "containerd_config": flattenContainerdConfig(c.ContainerdConfig), "disk_size_gb": c.DiskSizeGb, "disk_type": c.DiskType, - "boot_disk": flattenBootDiskConfig(c.BootDisk), "guest_accelerator": flattenContainerGuestAccelerators(c.Accelerators), "local_ssd_count": c.LocalSsdCount, "logging_variant": flattenLoggingVariant(c.LoggingConfig), {{- if ne $.TargetVersionName "ga" }} - "ephemeral_storage_config": flattenEphemeralStorageConfig(c.EphemeralStorageConfig), + "ephemeral_storage_config": flattenEphemeralStorageConfig(c.EphemeralStorageConfig), {{- end }} "local_nvme_ssd_block_config": flattenLocalNvmeSsdBlockConfig(c.LocalNvmeSsdBlockConfig), "ephemeral_storage_local_ssd_config": flattenEphemeralStorageLocalSsdConfig(c.EphemeralStorageLocalSsdConfig), "gcfs_config": flattenGcfsConfig(c.GcfsConfig), - "gvnic": flattenGvnic(c.Gvnic), + "gvnic": flattenGvnic(c.Gvnic), "reservation_affinity": flattenGKEReservationAffinity(c.ReservationAffinity), "service_account": c.ServiceAccount, "metadata": c.Metadata, @@ -2034,7 +1700,7 @@ func flattenNodeConfig(c *container.NodeConfig, v interface{}) []map[string]inte "effective_taints": flattenEffectiveTaints(c.Taints), "workload_metadata_config": flattenWorkloadMetadataConfig(c.WorkloadMetadataConfig), {{- if ne $.TargetVersionName "ga" }} - "sandbox_config": flattenSandboxConfig(c.SandboxConfig), + "sandbox_config": flattenSandboxConfig(c.SandboxConfig), "host_maintenance_policy": flattenHostMaintenancePolicy(c.HostMaintenancePolicy), {{- end }} "confidential_nodes": flattenConfidentialNodes(c.ConfidentialNodes), @@ -2060,23 +1726,6 @@ func flattenNodeConfig(c *container.NodeConfig, v interface{}) []map[string]inte return config } -func flattenBootDiskConfig(c *container.BootDisk) []map[string]interface{} { - config := []map[string]interface{}{} - - if c == nil { - return config - } - - config = append(config, map[string]interface{}{ - "disk_type": c.DiskType, - "size_gb": c.SizeGb, - "provisioned_iops": c.ProvisionedIops, - "provisioned_throughput": c.ProvisionedThroughput, - }) - - return config -} - func flattenResourceManagerTags(c *container.ResourceManagerTags) map[string]interface{} { if c == nil { return nil @@ -2097,7 +1746,6 @@ func flattenAdvancedMachineFeaturesConfig(c *container.AdvancedMachineFeatures) result = append(result, map[string]interface{}{ "threads_per_core": c.ThreadsPerCore, "enable_nested_virtualization": c.EnableNestedVirtualization, - "performance_monitoring_unit": c.PerformanceMonitoringUnit, }) } return result @@ -2373,12 +2021,6 @@ func flattenKubeletConfig(c *container.NodeKubeletConfig) []map[string]interface "image_minimum_gc_age": c.ImageMinimumGcAge, "image_maximum_gc_age": c.ImageMaximumGcAge, "allowed_unsafe_sysctls": c.AllowedUnsafeSysctls, - "single_process_oom_kill": c.SingleProcessOomKill, - "max_parallel_image_pulls": c.MaxParallelImagePulls, - "eviction_max_pod_grace_period_seconds": c.EvictionMaxPodGracePeriodSeconds, - "eviction_soft": flattenEvictionSignals(c.EvictionSoft), - "eviction_soft_grace_period": flattenEvictionGracePeriod(c.EvictionSoftGracePeriod), - "eviction_minimum_reclaim": flattenEvictionMinimumReclaim(c.EvictionMinimumReclaim), }) } return result @@ -2394,61 +2036,13 @@ func flattenNodePoolAutoConfigNodeKubeletConfig(c *container.NodeKubeletConfig) return result } - -func flattenEvictionSignals(c *container.EvictionSignals) []map[string]interface{} { - result := []map[string]interface{}{} - if c != nil { - result = append(result, map[string]interface{}{ - "memory_available": c.MemoryAvailable, - "nodefs_available": c.NodefsAvailable, - "nodefs_inodes_free": c.NodefsInodesFree, - "imagefs_available": c.ImagefsAvailable, - "imagefs_inodes_free": c.ImagefsInodesFree, - "pid_available": c.PidAvailable, - }) - } - return result -} - -func flattenEvictionGracePeriod(c *container.EvictionGracePeriod) []map[string]interface{} { - result := []map[string]interface{}{} - if c != nil { - result = append(result, map[string]interface{}{ - "memory_available": c.MemoryAvailable, - "nodefs_available": c.NodefsAvailable, - "nodefs_inodes_free": c.NodefsInodesFree, - "imagefs_available": c.ImagefsAvailable, - "imagefs_inodes_free": c.ImagefsInodesFree, - "pid_available": c.PidAvailable, - }) - } - return result -} - -func flattenEvictionMinimumReclaim(c *container.EvictionMinimumReclaim) []map[string]interface{} { - result := []map[string]interface{}{} - if c != nil { - result = append(result, map[string]interface{}{ - "memory_available": c.MemoryAvailable, - "nodefs_available": c.NodefsAvailable, - "nodefs_inodes_free": c.NodefsInodesFree, - "imagefs_available": c.ImagefsAvailable, - "imagefs_inodes_free": c.ImagefsInodesFree, - "pid_available": c.PidAvailable, - }) - } - return result -} - func flattenLinuxNodeConfig(c *container.LinuxNodeConfig) []map[string]interface{} { result := []map[string]interface{}{} if c != nil { result = append(result, map[string]interface{}{ - "sysctls": c.Sysctls, - "cgroup_mode": c.CgroupMode, - "hugepages_config": flattenHugepagesConfig(c.Hugepages), - "transparent_hugepage_enabled": c.TransparentHugepageEnabled, - "transparent_hugepage_defrag": c.TransparentHugepageDefrag, + "sysctls": c.Sysctls, + "cgroup_mode": c.CgroupMode, + "hugepages_config": flattenHugepagesConfig(c.Hugepages), }) } return result @@ -2548,7 +2142,6 @@ func flattenConfidentialNodes(c *container.ConfidentialNodes) []map[string]inter if c != nil { result = append(result, map[string]interface{}{ "enabled": c.Enabled, - "confidential_instance_type": c.ConfidentialInstanceType, }) } return result @@ -2569,7 +2162,6 @@ func flattenSoleTenantConfig(c *container.SoleTenantConfig) []map[string]interfa } return append(result, map[string]interface{}{ "node_affinity": affinities, - "min_node_cpus": c.MinNodeCpus, }) } @@ -2723,9 +2315,7 @@ func nodePoolNodeConfigUpdate(d *schema.ResourceData, config *transport_tpg.Conf if d.HasChange("node_config.0.disk_size_gb") || d.HasChange("node_config.0.disk_type") || d.HasChange("node_config.0.machine_type") || - d.HasChange("node_config.0.storage_pools") || - d.HasChange("node_config.0.boot_disk") { - + d.HasChange("node_config.0.storage_pools") { req := &container.UpdateNodePoolRequest{ Name: name, DiskSizeGb: int64(d.Get("node_config.0.disk_size_gb").(int)), @@ -2743,34 +2333,6 @@ func nodePoolNodeConfigUpdate(d *schema.ResourceData, config *transport_tpg.Conf req.StoragePools = storagePools } - if v, ok := d.GetOk("node_config.0.boot_disk"); ok { - bd := expandBootDiskConfig(v) - req.BootDisk = bd - - // The following checks are to ensure that the migrating fields are handled properly. - // Migrating fields are disk_type -> boot_disk.disk_type and disk_size_gb -> boot_disk.size_gb - // If the legacy (top level) disk_type field is not changing, nil it out to allow the API to fill it in. - legacyDiskTypeOld, legacyDiskTypeNew := d.GetChange("node_config.0.disk_type") - if legacyDiskTypeOld == legacyDiskTypeNew { - req.DiskType = "" - } - // If the new boot disk configuration disk_filed is not changing, nil it out to allow the API to fill it in. - bootDiskTypeOld, bootDiskTypeNew := d.GetChange("node_config.0.boot_disk.0.disk_type") - if bootDiskTypeOld == bootDiskTypeNew { - req.BootDisk.DiskType = "" - } - // If the legacy (top level) disk_size_gb field is not changing, nil it out to allow the API to fill it in. - legacyDiskSizeGbOld, legacyDiskSizeGbNew := d.GetChange("node_config.0.disk_size_gb") - if legacyDiskSizeGbOld == legacyDiskSizeGbNew { - req.DiskSizeGb = 0 - } - // if the new boot disk configuration size_gb field is not changing, nil it out to allow the API to fill it in. - bootDiskSizeGbOld, bootDiskSizeGbNew := d.GetChange("node_config.0.boot_disk.0.size_gb") - if bootDiskSizeGbOld == bootDiskSizeGbNew { - req.BootDisk.SizeGb = 0 - } - } - updateF := func() error { clusterNodePoolsUpdateCall := config.NewContainerClient(userAgent).Projects.Locations.Clusters.NodePools.Update(nodePoolInfo.fullyQualifiedName(name), req) if config.UserProjectOverride { @@ -2785,14 +2347,14 @@ func nodePoolNodeConfigUpdate(d *schema.ResourceData, config *transport_tpg.Conf return ContainerOperationWait(config, op, nodePoolInfo.project, nodePoolInfo.location, - "updating GKE node pool disk_size_gb/disk_type/machine_type/storage_pools/boot_disk", userAgent, + "updating GKE node pool disk_size_gb/disk_type/machine_type/storage_pools", userAgent, timeout) } if err := retryWhileIncompatibleOperation(timeout, npLockKey, updateF); err != nil { return err } - log.Printf("[INFO] Updated disk disk_size_gb/disk_type/machine_type/storage_pools/boot_disk for Node Pool %s", d.Id()) + log.Printf("[INFO] Updated disk disk_size_gb/disk_type/machine_type/storage_pools for Node Pool %s", d.Id()) } if d.HasChange(prefix + "node_config.0.taint") { diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl index 16456ccafaf0..ae5b636411eb 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl @@ -97,7 +97,6 @@ var ( "addons_config.0.stateful_ha_config", "addons_config.0.ray_operator_config", "addons_config.0.parallelstore_csi_driver_config", - "addons_config.0.lustre_csi_driver_config", {{- if ne $.TargetVersionName "ga" }} "addons_config.0.istio_config", "addons_config.0.kalm_config", @@ -144,14 +143,6 @@ var ( } return false }) - - suppressDiffForConfidentialNodes = schema.SchemaDiffSuppressFunc(func(k, oldValue, newValue string, d *schema.ResourceData) bool { - k = strings.Replace(k, "confidential_instance_type", "enabled", 1) - if v, _ := d.Get(k).(bool); v { - return oldValue == "SEV" && newValue == "" - } - return false - }) ) // Defines default nodel pool settings for the entire cluster. These settings are @@ -492,29 +483,6 @@ func ResourceContainerCluster() *schema.Resource { }, }, }, - "lustre_csi_driver_config": { - Type: schema.TypeList, - Optional: true, - Computed: true, - AtLeastOneOf: addonsConfigKeys, - MaxItems: 1, - Description: `Configuration for the Lustre CSI driver. Defaults to disabled; set enabled = true to enable.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "enabled": { - Type: schema.TypeBool, - Required: true, - Description: `Whether the Lustre CSI driver is enabled for this cluster.`, - }, - "enable_legacy_lustre_port": { - Type: schema.TypeBool, - Optional: true, - Description: `If set to true, the Lustre CSI driver will initialize LNet (the virtual network layer for Lustre kernel module) using port 6988. - This flag is required to workaround a port conflict with the gke-metadata-server on GKE nodes.`, - }, - }, - }, - }, {{- if ne $.TargetVersionName "ga" }} "istio_config": { Type: schema.TypeList, @@ -920,11 +888,6 @@ func ResourceContainerCluster() *schema.Resource { ValidateFunc: validation.StringInSlice([]string{"BALANCED", "OPTIMIZE_UTILIZATION"}, false), Description: `Configuration options for the Autoscaling profile feature, which lets you choose whether the cluster autoscaler should optimize for resource utilization or resource availability when deciding to remove nodes from a cluster. Can be BALANCED or OPTIMIZE_UTILIZATION. Defaults to BALANCED.`, }, - "default_compute_class_enabled": { - Type: schema.TypeBool, - Optional: true, - Description: `Specifies whether default compute class behaviour is enabled. If enabled, cluster autoscaler will use Compute Class with name default for all the workloads, if not overriden.`, - }, }, }, }, @@ -1447,14 +1410,6 @@ func ResourceContainerCluster() *schema.Resource { ForceNew: true, Description: `Whether Confidential Nodes feature is enabled for all nodes in this cluster.`, }, - "confidential_instance_type": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - DiffSuppressFunc: suppressDiffForConfidentialNodes, - Description: `Defines the type of technology used by the confidential node.`, - ValidateFunc: validation.StringInSlice([]string{"SEV", "SEV_SNP", "TDX"}, false), - }, }, }, }, @@ -1642,7 +1597,7 @@ func ResourceContainerCluster() *schema.Resource { }, }, }, -{{- end }} + "pod_autoscaling": { Type: schema.TypeList, Optional: true, @@ -1665,6 +1620,7 @@ func ResourceContainerCluster() *schema.Resource { }, }, }, +{{- end }} "secret_manager_config": { Type: schema.TypeList, Optional: true, @@ -1678,30 +1634,6 @@ func ResourceContainerCluster() *schema.Resource { Required: true, Description: `Enable the Secret manager csi component.`, }, - {{- if ne $.TargetVersionName "ga" }} - "rotation_config" : { - Type: schema.TypeList, - Optional: true, - Computed: true, - MaxItems: 1, - Description: `Configuration for Secret Manager auto rotation.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "enabled": { - Type: schema.TypeBool, - Required: true, - Description: `Enable the Secret manager auto rotation.`, - }, - "rotation_interval": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: `The interval between two consecutive rotations. Default rotation interval is 2 minutes`, - }, - }, - }, - }, - {{- end }} }, }, }, @@ -1800,6 +1732,7 @@ func ResourceContainerCluster() *schema.Resource { "stack_type": { Type: schema.TypeString, Optional: true, + ForceNew: true, Default: "IPV4", ValidateFunc: validation.StringInSlice([]string{"IPV4", "IPV4_IPV6"}, false), Description: `The IP Stack type of the cluster. Choose between IPV4 and IPV4_IPV6. Default type is IPV4 Only if not set`, @@ -1837,29 +1770,6 @@ func ResourceContainerCluster() *schema.Resource { }, }, }, - "additional_ip_ranges_config": { - Type: schema.TypeList, - Optional: true, - Description: `AdditionalIPRangesConfig is the configuration for individual additional subnetworks attached to the cluster`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "subnetwork": { - Type: schema.TypeString, - Required: true, - DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, - Description: `Name of the subnetwork. This can be the full path of the subnetwork or just the name.`, - }, - "pod_ipv4_range_names": { - Type: schema.TypeList, - Optional: true, - Description: `List of secondary ranges names within this subnetwork that can be used for pod IPs.`, - Elem: &schema.Schema{Type: schema.TypeString}, - }, - }, - - }, - - }, }, }, }, @@ -2183,24 +2093,6 @@ func ResourceContainerCluster() *schema.Resource { }, }, - "gke_auto_upgrade_config": { - Type: schema.TypeList, - Optional: true, - Computed: true, - Description: `Configuration options for the auto-upgrade patch type feature, which provide more control over the speed of automatic upgrades of your GKE clusters.`, - MaxItems: 1, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "patch_mode": { - Type: schema.TypeString, - Required: true, - Description: `The selected auto-upgrade patch type. Accepted values are: -* ACCELERATED: Upgrades to the latest available patch version in a given minor and release channel.`, - }, - }, - }, - }, - "tpu_ipv4_cidr_block": { Computed: true, Type: schema.TypeString, @@ -2281,6 +2173,7 @@ func ResourceContainerCluster() *schema.Resource { "enable_multi_networking": { Type: schema.TypeBool, Optional: true, + ForceNew: true, Description: `Whether multi-networking is enabled for this cluster.`, Default: false, }, @@ -2442,6 +2335,7 @@ func ResourceContainerCluster() *schema.Resource { "user_managed_keys_config": { Type: schema.TypeList, Optional: true, + ForceNew: true, MaxItems: 1, Description: `The custom keys configuration of the cluster.`, Elem: &schema.Resource{ @@ -2449,25 +2343,21 @@ func ResourceContainerCluster() *schema.Resource { "cluster_ca": { Type: schema.TypeString, Optional: true, - ForceNew: true, Description: `The Certificate Authority Service caPool to use for the cluster CA in this cluster.`, }, "etcd_api_ca": { Type: schema.TypeString, Optional: true, - ForceNew: true, Description: `The Certificate Authority Service caPool to use for the etcd API CA in this cluster.`, }, "etcd_peer_ca": { Type: schema.TypeString, Optional: true, - ForceNew: true, Description: `The Certificate Authority Service caPool to use for the etcd peer CA in this cluster.`, }, "aggregation_ca": { Type: schema.TypeString, Optional: true, - ForceNew: true, Description: `The Certificate Authority Service caPool to use for the aggreation CA in this cluster.`, }, "service_account_signing_keys": { @@ -2523,14 +2413,12 @@ func ResourceContainerCluster() *schema.Resource { MaxItems: 1, Computed: true, Description: `Defines the config needed to enable/disable GKE Enterprise`, - Deprecated: `GKE Enterprise features are now available without an Enterprise tier. This field is deprecated and will be removed in a future major release`, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "cluster_tier": { Type: schema.TypeString, Computed: true, Description: `Indicates the effective cluster tier. Available options include STANDARD and ENTERPRISE.`, - Deprecated: `GKE Enterprise features are now available without an Enterprise tier. This field is deprecated and will be removed in a future major release`, }, "desired_tier": { Type: schema.TypeString, @@ -2538,74 +2426,11 @@ func ResourceContainerCluster() *schema.Resource { Computed: true, ValidateFunc: validation.StringInSlice([]string{"STANDARD", "ENTERPRISE"}, false), Description: `Indicates the desired cluster tier. Available options include STANDARD and ENTERPRISE.`, - Deprecated: `GKE Enterprise features are now available without an Enterprise tier. This field is deprecated and will be removed in a future major release`, DiffSuppressFunc: tpgresource.EmptyOrDefaultStringSuppress("CLUSTER_TIER_UNSPECIFIED"), }, }, }, }, - "in_transit_encryption_config": { - Type: schema.TypeString, - Optional: true, - Description: `Defines the config of in-transit encryption`, - ValidateFunc: validation.StringInSlice([]string{"IN_TRANSIT_ENCRYPTION_CONFIG_UNSPECIFIED", "IN_TRANSIT_ENCRYPTION_DISABLED", "IN_TRANSIT_ENCRYPTION_INTER_NODE_TRANSPARENT"}, false), - }, - "network_performance_config": { - Type: schema.TypeList, - Optional: true, - MaxItems: 1, - Description: `Network bandwidth tier configuration.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "total_egress_bandwidth_tier": { - Type: schema.TypeString, - Required: true, - Description: `Specifies the total network bandwidth tier for NodePools in the cluster.`, - }, - }, - }, - }, - "anonymous_authentication_config": { - Type: schema.TypeList, - Optional: true, - MaxItems: 1, - Computed: true, - Description: `AnonymousAuthenticationConfig allows users to restrict or enable anonymous access to the cluster.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "mode": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringInSlice([]string{"ENABLED", "LIMITED"}, false), - Description: `Setting this to LIMITED will restrict authentication of anonymous users to health check endpoints only. - Accepted values are: -* ENABLED: Authentication of anonymous users is enabled for all endpoints. -* LIMITED: Anonymous access is only allowed for health check endpoints.`, - }, - }, - }, - }, - "rbac_binding_config": { - Type: schema.TypeList, - Optional: true, - MaxItems: 1, - Computed: true, - Description: `RBACBindingConfig allows user to restrict ClusterRoleBindings an RoleBindings that can be created.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "enable_insecure_binding_system_unauthenticated": { - Type: schema.TypeBool, - Optional: true, - Description: `Setting this to true will allow any ClusterRoleBinding and RoleBinding with subjects system:anonymous or system:unauthenticated.`, - }, - "enable_insecure_binding_system_authenticated": { - Type: schema.TypeBool, - Optional: true, - Description: `Setting this to true will allow any ClusterRoleBinding and RoleBinding with subjects system:authenticated.`, - }, - }, - }, - }, }, } } @@ -2711,7 +2536,7 @@ func resourceContainerClusterCreate(d *schema.ResourceData, meta interface{}) er } } - ipAllocationBlock, aircs, err := expandIPAllocationPolicy(d.Get("ip_allocation_policy"), d, d.Get("networking_mode").(string), d.Get("enable_autopilot").(bool), config) + ipAllocationBlock, err := expandIPAllocationPolicy(d.Get("ip_allocation_policy"), d.Get("networking_mode").(string), d.Get("enable_autopilot").(bool)) if err != nil { return err } @@ -2743,8 +2568,8 @@ func resourceContainerClusterCreate(d *schema.ResourceData, meta interface{}) er IpAllocationPolicy: ipAllocationBlock, {{- if ne $.TargetVersionName "ga" }} PodSecurityPolicyConfig: expandPodSecurityPolicyConfig(d.Get("pod_security_policy_config")), -{{- end }} PodAutoscaling: expandPodAutoscaling(d.Get("pod_autoscaling")), +{{- end }} SecretManagerConfig: expandSecretManagerConfig(d.Get("secret_manager_config")), Autoscaling: expandClusterAutoscaling(d.Get("cluster_autoscaling"), d), BinaryAuthorization: expandBinaryAuthorization(d.Get("binary_authorization")), @@ -2754,7 +2579,6 @@ func resourceContainerClusterCreate(d *schema.ResourceData, meta interface{}) er ForceSendFields: []string{"Enabled"}, }, ReleaseChannel: expandReleaseChannel(d.Get("release_channel")), - GkeAutoUpgradeConfig: expandGkeAutoUpgradeConfig(d.Get("gke_auto_upgrade_config")), {{- if ne $.TargetVersionName "ga" }} ClusterTelemetry: expandClusterTelemetry(d.Get("cluster_telemetry")), {{- end }} @@ -2765,7 +2589,6 @@ func resourceContainerClusterCreate(d *schema.ResourceData, meta interface{}) er DatapathProvider: d.Get("datapath_provider").(string), EnableCiliumClusterwideNetworkPolicy: d.Get("enable_cilium_clusterwide_network_policy").(bool), PrivateIpv6GoogleAccess: d.Get("private_ipv6_google_access").(string), - InTransitEncryptionConfig: d.Get("in_transit_encryption_config").(string), EnableL4ilbSubsetting: d.Get("enable_l4_ilb_subsetting").(bool), DisableL4LbFirewallReconciliation: d.Get("disable_l4_lb_firewall_reconciliation").(bool), DnsConfig: expandDnsConfig(d.Get("dns_config")), @@ -2773,7 +2596,6 @@ func resourceContainerClusterCreate(d *schema.ResourceData, meta interface{}) er EnableMultiNetworking: d.Get("enable_multi_networking").(bool), DefaultEnablePrivateNodes: expandDefaultEnablePrivateNodes(d), EnableFqdnNetworkPolicy: d.Get("enable_fqdn_network_policy").(bool), - NetworkPerformanceConfig: expandNetworkPerformanceConfig(d.Get("network_performance_config")), }, MasterAuth: expandMasterAuth(d.Get("master_auth")), NotificationConfig: expandNotificationConfig(d.Get("notification_config")), @@ -2935,20 +2757,8 @@ func resourceContainerClusterCreate(d *schema.ResourceData, meta interface{}) er cluster.EnterpriseConfig = expandEnterpriseConfig(v) } - if v, ok := d.GetOk("anonymous_authentication_config"); ok { - cluster.AnonymousAuthenticationConfig = expandAnonymousAuthenticationConfig(v) - } - - if v, ok := d.GetOk("rbac_binding_config"); ok { - cluster.RbacBindingConfig = expandRBACBindingConfig(v) - } - needUpdateAfterCreate := false - if len(aircs) > 0 { - needUpdateAfterCreate = true - } - // For now PSC based cluster don't support `enable_private_endpoint` on `create`, but only on `update` API call. // If cluster is PSC based and enable_private_endpoint is set to true we will ignore it on `create` call and update cluster right after creation. enablePrivateEndpointPSCCluster := isEnablePrivateEndpointPSCCluster(cluster) @@ -3076,13 +2886,6 @@ func resourceContainerClusterCreate(d *schema.ResourceData, meta interface{}) er } update.ForceSendFields = append(update.ForceSendFields, "DesiredAddonsConfig.GcePersistentDiskCsiDriverConfig.Enabled"); } - - if len(aircs) > 0 { - update.DesiredAdditionalIpRangesConfig = &container.DesiredAdditionalIPRangesConfig{ - AdditionalIpRangesConfigs: aircs, - } - } - req := &container.UpdateClusterRequest{Update: update} err = transport_tpg.Retry(transport_tpg.RetryOptions{ @@ -3334,9 +3137,6 @@ func resourceContainerClusterRead(d *schema.ResourceData, meta interface{}) erro if err := d.Set("release_channel", flattenReleaseChannel(cluster.ReleaseChannel)); err != nil { return err } - if err := d.Set("gke_auto_upgrade_config", flattenGkeAutoUpgradeConfig(cluster.GkeAutoUpgradeConfig)); err != nil { - return err - } if err := d.Set("notification_config", flattenNotificationConfig(cluster.NotificationConfig)); err != nil { return err } @@ -3379,9 +3179,6 @@ func resourceContainerClusterRead(d *schema.ResourceData, meta interface{}) erro if err := d.Set("private_ipv6_google_access", cluster.NetworkConfig.PrivateIpv6GoogleAccess); err != nil { return fmt.Errorf("Error setting private_ipv6_google_access: %s", err) } - if err := d.Set("in_transit_encryption_config", cluster.NetworkConfig.InTransitEncryptionConfig); err != nil { - return fmt.Errorf("Error setting in_transit_encryption_config: %s", err) - } if err := d.Set("authenticator_groups_config", flattenAuthenticatorGroupsConfig(cluster.AuthenticatorGroupsConfig)); err != nil { return err } @@ -3455,10 +3252,11 @@ func resourceContainerClusterRead(d *schema.ResourceData, meta interface{}) erro if err := d.Set("cluster_telemetry", flattenClusterTelemetry(cluster.ClusterTelemetry)); err != nil { return err } -{{- end }} + if err := d.Set("pod_autoscaling", flattenPodAutoscaling(cluster.PodAutoscaling)); err != nil { return err } +{{- end }} if err := d.Set("secret_manager_config", flattenSecretManagerConfig(cluster.SecretManagerConfig)); err != nil { return err @@ -3486,9 +3284,6 @@ func resourceContainerClusterRead(d *schema.ResourceData, meta interface{}) erro if err := d.Set("gateway_api_config", flattenGatewayApiConfig(cluster.NetworkConfig.GatewayApiConfig)); err != nil { return err } - if err := d.Set("network_performance_config", flattenNetworkPerformanceConfig(cluster.NetworkConfig.NetworkPerformanceConfig)); err != nil { - return err - } if err := d.Set("fleet", flattenFleet(cluster.Fleet)); err != nil { return err } @@ -3534,14 +3329,6 @@ func resourceContainerClusterRead(d *schema.ResourceData, meta interface{}) erro return err } - if err := d.Set("anonymous_authentication_config", flattenAnonymousAuthenticationConfig(cluster.AnonymousAuthenticationConfig)); err != nil { - return err - } - - if err := d.Set("rbac_binding_config", flattenRBACBindingConfig(cluster.RbacBindingConfig)); err != nil { - return err - } - return nil } @@ -3626,24 +3413,6 @@ func resourceContainerClusterUpdate(d *schema.ResourceData, meta interface{}) er log.Printf("[INFO] GKE cluster %s's default enable private nodes has been updated to %v", d.Id(), enabled) } - if d.HasChange("ip_allocation_policy.0.stack_type") { - if stackType, ok := d.GetOk("ip_allocation_policy.0.stack_type"); ok { - req := &container.UpdateClusterRequest{ - Update: &container.ClusterUpdate{ - DesiredStackType: stackType.(string), - }, - } - - updateF := updateFunc(req, "updating GKE cluster stack type") - // Call update serially. - if err := transport_tpg.LockedCall(lockKey, updateF); err != nil { - return err - } - - log.Printf("[INFO] GKE cluster %s stack type has been updated", d.Id()) - } - } - if d.HasChange("addons_config") { if ac, ok := d.GetOk("addons_config"); ok { req := &container.UpdateClusterRequest{ @@ -3780,38 +3549,6 @@ func resourceContainerClusterUpdate(d *schema.ResourceData, meta interface{}) er log.Printf("[INFO] GKE cluster %s Release Channel has been updated to %#v", d.Id(), req.Update.DesiredReleaseChannel) } - if d.HasChange("gke_auto_upgrade_config") { - req := &container.UpdateClusterRequest{ - Update: &container.ClusterUpdate{ - GkeAutoUpgradeConfig: expandGkeAutoUpgradeConfig(d.Get("gke_auto_upgrade_config")), - }, - } - updateF := func() error { - log.Println("[DEBUG] updating gke_auto_upgrade_config") - name := containerClusterFullName(project, location, clusterName) - clusterUpdateCall := config.NewContainerClient(userAgent).Projects.Locations.Clusters.Update(name, req) - if config.UserProjectOverride { - clusterUpdateCall.Header().Add("X-Goog-User-Project", project) - } - op, err := clusterUpdateCall.Do() - if err != nil { - return err - } - - // Wait until it's updated - err = ContainerOperationWait(config, op, project, location, "updating GKE Auto Upgrade Config", userAgent, d.Timeout(schema.TimeoutUpdate)) - log.Println("[DEBUG] done updating gke_auto_upgrade_config") - return err - } - - // Call update serially. - if err := transport_tpg.LockedCall(lockKey, updateF); err != nil { - return err - } - - log.Printf("[INFO] GKE cluster %s GKE Auto Upgrade Config has been updated to %#v", d.Id(), req.Update.GkeAutoUpgradeConfig) - } - if d.HasChange("enable_intranode_visibility") { enabled := d.Get("enable_intranode_visibility").(bool) req := &container.UpdateClusterRequest{ @@ -3951,57 +3688,6 @@ func resourceContainerClusterUpdate(d *schema.ResourceData, meta interface{}) er log.Printf("[INFO] GKE cluster %s Disable L4 LB Firewall Reconciliation has been updated to %v", d.Id(), enabled) } - if d.HasChange("in_transit_encryption_config") { - inTransitConfig := d.Get("in_transit_encryption_config").(string) - req := &container.UpdateClusterRequest{ - Update: &container.ClusterUpdate{ - DesiredInTransitEncryptionConfig: inTransitConfig, - ForceSendFields: []string{"DesiredInTransitEncryptionConfig"}, - }, - } - updateF := func() error { - log.Println("[DEBUG] updating in_transit_encryption_config") - name := containerClusterFullName(project, location, clusterName) - clusterUpdateCall := config.NewContainerClient(userAgent).Projects.Locations.Clusters.Update(name, req) - if config.UserProjectOverride { - clusterUpdateCall.Header().Add("X-Goog-User-Project", project) - } - op, err := clusterUpdateCall.Do() - if err != nil { - return err - } - - // Wait until it's updated - err = ContainerOperationWait(config, op, project, location, "updating In-Transit Encryption Config", userAgent, d.Timeout(schema.TimeoutUpdate)) - log.Println("[DEBUG] done updating in_transit_encryption_config") - return err - } - - // Call update serially. - if err := transport_tpg.LockedCall(lockKey, updateF); err != nil { - return err - } - - log.Printf("[INFO] GKE cluster %s In-Transit Encryption Config has been updated to %v", d.Id(), inTransitConfig) - } - if d.HasChange("enable_multi_networking") { - enabled := d.Get("enable_multi_networking").(bool) - req := &container.UpdateClusterRequest{ - Update: &container.ClusterUpdate{ - DesiredEnableMultiNetworking: enabled, - ForceSendFields: []string{"DesiredEnableMultiNetworking"}, - - }, - } - updateF := updateFunc(req, "updating multi networking") - // Call update serially. - if err := transport_tpg.LockedCall(lockKey, updateF); err != nil { - return err - } - - log.Printf("[INFO] GKE cluster %s Multi Networking has been updated to %v", d.Id(), enabled) - } - if d.HasChange("enable_fqdn_network_policy") { enabled := d.Get("enable_fqdn_network_policy").(bool) req := &container.UpdateClusterRequest{ @@ -4290,30 +3976,6 @@ func resourceContainerClusterUpdate(d *schema.ResourceData, meta interface{}) er log.Printf("[INFO] GKE cluster %s's AdditionalPodRangesConfig has been updated", d.Id()) } - if d.HasChange("ip_allocation_policy.0.additional_ip_ranges_config") { - c := d.Get("ip_allocation_policy.0.additional_ip_ranges_config") - aircs, err := expandAdditionalIpRangesConfigs(c, d, config) - if err != nil { - return err - } - - req := &container.UpdateClusterRequest{ - Update: &container.ClusterUpdate{ - DesiredAdditionalIpRangesConfig: &container.DesiredAdditionalIPRangesConfig{ - AdditionalIpRangesConfigs: aircs, - }, - }, - } - - updateF := updateFunc(req, "updating AdditionalIpRangesConfig") - // Call update serially. - if err := transport_tpg.LockedCall(lockKey, updateF); err != nil { - return err - } - - log.Printf("[INFO] GKE cluster %s's AdditionalIpRangesConfig has been updated", d.Id()) - } - if n, ok := d.GetOk("node_pool.#"); ok { for i := 0; i < n.(int); i++ { nodePoolInfo, err := extractNodePoolInformationFromCluster(d, config, clusterName) @@ -4429,9 +4091,7 @@ func resourceContainerClusterUpdate(d *schema.ResourceData, meta interface{}) er return err } - if err = nodePoolNodeConfigUpdate(d, config, nodePoolInfo, "", defaultPool, d.Timeout(schema.TimeoutUpdate)); err != nil { - return err - } + nodePoolNodeConfigUpdate(d, config, nodePoolInfo, "", defaultPool, d.Timeout(schema.TimeoutUpdate)) } if d.HasChange("notification_config") { @@ -4592,7 +4252,6 @@ func resourceContainerClusterUpdate(d *schema.ResourceData, meta interface{}) er } log.Printf("[INFO] GKE cluster %s pod security policy config has been updated", d.Id()) } -{{- end }} if d.HasChange("pod_autoscaling") { c := d.Get("pod_autoscaling") @@ -4621,6 +4280,8 @@ func resourceContainerClusterUpdate(d *schema.ResourceData, meta interface{}) er log.Printf("[INFO] GKE cluster %s horizontal pod autoscaling profile has been updated", d.Id()) } +{{- end }} + if d.HasChange("secret_manager_config") { c := d.Get("secret_manager_config") req := &container.UpdateClusterRequest{ @@ -4803,24 +4464,6 @@ func resourceContainerClusterUpdate(d *schema.ResourceData, meta interface{}) er log.Printf("[INFO] GKE cluster %s resource usage export config has been updated", d.Id()) } - if d.HasChange("network_performance_config") { - if npc, ok := d.GetOk("network_performance_config"); ok { - req := &container.UpdateClusterRequest{ - Update: &container.ClusterUpdate{ - DesiredNetworkPerformanceConfig: expandNetworkPerformanceConfig(npc), - }, - } - - updateF := updateFunc(req, "updating GKE Network Performance Config") - // Call update serially. - if err := transport_tpg.LockedCall(lockKey, updateF); err != nil { - return err - } - - log.Printf("[INFO] GKE cluster %s Network Performance Config has been updated", d.Id()) - } - } - if d.HasChange("gateway_api_config") { if gac, ok := d.GetOk("gateway_api_config"); ok { req := &container.UpdateClusterRequest{ @@ -5088,37 +4731,6 @@ func resourceContainerClusterUpdate(d *schema.ResourceData, meta interface{}) er log.Printf("[INFO] GKE cluster %s Enterprise Config has been updated to %#v", d.Id(), req.Update.DesiredSecurityPostureConfig) } - if d.HasChange("anonymous_authentication_config") { - req := &container.UpdateClusterRequest{ - Update: &container.ClusterUpdate{ - DesiredAnonymousAuthenticationConfig: expandAnonymousAuthenticationConfig( - d.Get("anonymous_authentication_config"), - ), - }, - } - updateF := updateFunc(req, "updating anonymous authentication config") - // Call update serially. - if err := transport_tpg.LockedCall(lockKey, updateF); err != nil { - return err - } - } - - if d.HasChange("rbac_binding_config") { - req := &container.UpdateClusterRequest{ - Update: &container.ClusterUpdate{ - DesiredRbacBindingConfig: expandRBACBindingConfig(d.Get("rbac_binding_config")), - ForceSendFields: []string{"DesiredRbacBindingConfig"}, - }} - - updateF := updateFunc(req, "updating GKE cluster RBAC binding config") - // Call update serially. - if err := transport_tpg.LockedCall(lockKey, updateF); err != nil { - return err - } - - log.Printf("[INFO] GKE cluster %s's RBAC binding config has been updated", d.Id()) - } - d.Partial(false) {{ if ne $.TargetVersionName `ga` -}} @@ -5190,22 +4802,6 @@ func resourceContainerClusterUpdate(d *schema.ResourceData, meta interface{}) er log.Printf("[INFO] GKE cluster %s's WorkloadALTSConfig has been updated", d.Id()) } {{- end }} - - if d.HasChange("user_managed_keys_config") { - req := &container.UpdateClusterRequest{ - Update: &container.ClusterUpdate{ - DesiredUserManagedKeysConfig: expandUserManagedKeysConfig(d.Get("user_managed_keys_config")), - }, - } - - updateF := updateFunc(req, "updating user managed keys config") - if err := transport_tpg.LockedCall(lockKey, updateF); err != nil { - return err - } - - log.Printf("[INFO] GKE cluster %s user managed keys config has been updated to %#v", d.Id(), req.Update.DesiredUserManagedKeysConfig) - } - return resourceContainerClusterRead(d, meta) } @@ -5446,20 +5042,6 @@ func expandClusterAddonsConfig(configured interface{}) *container.AddonsConfig { } } - if v, ok := config["lustre_csi_driver_config"]; ok && len(v.([]interface{})) > 0 { - lustreConfig := v.([]interface{})[0].(map[string]interface{}) - ac.LustreCsiDriverConfig = &container.LustreCsiDriverConfig{ - Enabled: lustreConfig["enabled"].(bool), - ForceSendFields: []string{"Enabled"}, - } - - // Check for enable_legacy_lustre_port - if val, ok := lustreConfig["enable_legacy_lustre_port"]; ok { - ac.LustreCsiDriverConfig.EnableLegacyLustrePort = val.(bool) - ac.LustreCsiDriverConfig.ForceSendFields = append(ac.LustreCsiDriverConfig.ForceSendFields, "EnableLegacyLustrePort") - } - } - {{ if ne $.TargetVersionName `ga` -}} if v, ok := config["istio_config"]; ok && len(v.([]interface{})) > 0 { addon := v.([]interface{})[0].(map[string]interface{}) @@ -5494,66 +5076,23 @@ func expandPodCidrOverprovisionConfig(configured interface{}) *container.PodCIDR } } -func expandPodIpv4RangeNames(configured interface{}) []string { - l := configured.([]interface{}) - if len(l) == 0 || l[0] == nil { - return nil - } - var ranges []string - for _, rawRange := range l { - ranges = append(ranges, rawRange.(string)) - } - return ranges -} - -func expandAdditionalIpRangesConfigs(configured interface{}, d *schema.ResourceData, c *transport_tpg.Config) ([]*container.AdditionalIPRangesConfig, error) { - l := configured.([]interface{}) - if len(l) == 0 || l[0] == nil { - return nil, nil - } - var additionalIpRangesConfig []*container.AdditionalIPRangesConfig - for _, rawConfig := range l { - config := rawConfig.(map[string]interface{}) - subnetwork, err := tpgresource.ParseSubnetworkFieldValue(config["subnetwork"].(string), d, c) - if err != nil { - return nil, err - } - additionalIpRangesConfig = append(additionalIpRangesConfig, &container.AdditionalIPRangesConfig { - Subnetwork: subnetwork.RelativeLink(), - PodIpv4RangeNames: expandPodIpv4RangeNames(config["pod_ipv4_range_names"]), - }) - } - - return additionalIpRangesConfig, nil -} - -func expandIPAllocationPolicy(configured interface{}, d *schema.ResourceData, networkingMode string, autopilot bool, c *transport_tpg.Config) (*container.IPAllocationPolicy, []*container.AdditionalIPRangesConfig, error) { +func expandIPAllocationPolicy(configured interface{}, networkingMode string, autopilot bool) (*container.IPAllocationPolicy, error) { l := configured.([]interface{}) if len(l) == 0 || l[0] == nil { if networkingMode == "VPC_NATIVE" { - return nil, nil, nil + return nil, nil } return &container.IPAllocationPolicy{ UseIpAliases: false, UseRoutes: true, StackType: "IPV4", ForceSendFields: []string{"UseIpAliases"}, - }, nil, nil + }, nil } config := l[0].(map[string]interface{}) stackType := config["stack_type"].(string) - // We expand and return additional_ip_ranges_config separately because - // this field is OUTPUT_ONLY for ClusterCreate RPCs. Instead, during the - // Terraform Create flow, we follow the CreateCluster (without - // additional_ip_ranges_config populated) with an UpdateCluster (_with_ - // additional_ip_ranges_config populated). - additionalIpRangesConfigs, err := expandAdditionalIpRangesConfigs(config["additional_ip_ranges_config"], d, c) - if err != nil { - return nil, nil, err - } - return &container.IPAllocationPolicy{ UseIpAliases: networkingMode == "VPC_NATIVE" || networkingMode == "", ClusterIpv4CidrBlock: config["cluster_ipv4_cidr_block"].(string), @@ -5564,7 +5103,7 @@ func expandIPAllocationPolicy(configured interface{}, d *schema.ResourceData, ne UseRoutes: networkingMode == "ROUTES", StackType: stackType, PodCidrOverprovisionConfig: expandPodCidrOverprovisionConfig(config["pod_cidr_overprovision_config"]), - }, additionalIpRangesConfigs, nil + }, nil } func expandMaintenancePolicy(d *schema.ResourceData, meta interface{}) *container.MaintenancePolicy { @@ -5707,16 +5246,9 @@ func expandClusterAutoscaling(configured interface{}, d *schema.ResourceData) *c } } } - var defaultCCConfig *container.DefaultComputeClassConfig - if defaultCCEnabled, ok := config["default_compute_class_enabled"]; ok { - defaultCCConfig = &container.DefaultComputeClassConfig{ - Enabled: defaultCCEnabled.(bool), - } - } return &container.ClusterAutoscaling{ EnableNodeAutoprovisioning: config["enabled"].(bool), ResourceLimits: resourceLimits, - DefaultComputeClassConfig: defaultCCConfig, AutoscalingProfile: config["autoscaling_profile"].(string), AutoprovisioningNodePoolDefaults: expandAutoProvisioningDefaults(config["auto_provisioning_defaults"], d), AutoprovisioningLocations: tpgresource.ConvertStringArr(config["auto_provisioning_locations"].([]interface{})), @@ -5964,15 +5496,6 @@ func flattenEnterpriseConfig(ec *container.EnterpriseConfig) []map[string]interf return []map[string]interface{}{result} } -func flattenAnonymousAuthenticationConfig(aac *container.AnonymousAuthenticationConfig) []map[string]interface{} { - if aac == nil { - return nil - } - result := make(map[string]interface{}) - result["mode"] = aac.Mode - return []map[string]interface{}{result} -} - func flattenAdditionalPodRangesConfig(ipAllocationPolicy *container.IPAllocationPolicy) []map[string]interface{} { if ipAllocationPolicy == nil { return nil @@ -6100,23 +5623,6 @@ func expandMasterAuthorizedNetworksConfig(d *schema.ResourceData) *container.Mas return result } -func expandAnonymousAuthenticationConfig(configured interface{}) *container.AnonymousAuthenticationConfig { - l, ok := configured.([]interface{}) - if len(l) == 0 || l[0] == nil || !ok { - return nil - } - - anonAuthConfig := l[0].(map[string]interface{}) - result := container.AnonymousAuthenticationConfig{} - - if v, ok := anonAuthConfig["mode"]; ok { - if mode, ok := v.(string); ok && mode != "" { - result.Mode = mode - } - } - return &result -} - func expandManCidrBlocks(configured interface{}) []*container.CidrBlock { config, ok := configured.(*schema.Set) if !ok { @@ -6297,17 +5803,6 @@ func expandReleaseChannel(configured interface{}) *container.ReleaseChannel { } } -func expandGkeAutoUpgradeConfig(configured interface{}) *container.GkeAutoUpgradeConfig { - l := configured.([]interface{}) - if len(l) == 0 || l[0] == nil { - return nil - } - config := l[0].(map[string]interface{}) - return &container.GkeAutoUpgradeConfig{ - PatchMode: config["patch_mode"].(string), - } -} - {{ if ne $.TargetVersionName `ga` -}} func expandClusterTelemetry(configured interface{}) *container.ClusterTelemetry { l := configured.([]interface{}) @@ -6374,7 +5869,7 @@ func expandPodSecurityPolicyConfig(configured interface{}) *container.PodSecurit ForceSendFields: []string{"Enabled"}, } } -{{- end }} + func expandPodAutoscaling(configured interface{}) *container.PodAutoscaling { if configured == nil { return nil @@ -6396,6 +5891,7 @@ func expandPodAutoscaling(configured interface{}) *container.PodAutoscaling { return podAutoscaling } +{{- end }} func expandSecretManagerConfig(configured interface{}) *container.SecretManagerConfig { l := configured.([]interface{}) @@ -6404,30 +5900,10 @@ func expandSecretManagerConfig(configured interface{}) *container.SecretManagerC } config := l[0].(map[string]interface{}) - sc := &container.SecretManagerConfig{ + return &container.SecretManagerConfig{ Enabled: config["enabled"].(bool), ForceSendFields: []string{"Enabled"}, } - {{- if ne $.TargetVersionName "ga" }} - if autoRotation, ok := config["rotation_config"]; ok { - if autoRotationList, ok := autoRotation.([]interface{}); ok { - if len(autoRotationList) > 0 { - autoRotationConfig := autoRotationList[0].(map[string]interface{}) - if rotationInterval, ok := autoRotationConfig["rotation_interval"].(string); ok && rotationInterval != "" { - sc.RotationConfig = &container.RotationConfig{ - Enabled: autoRotationConfig["enabled"].(bool), - RotationInterval: rotationInterval, - } - } else { - sc.RotationConfig = &container.RotationConfig{ - Enabled: autoRotationConfig["enabled"].(bool), - } - } - } - } - } - {{- end }} - return sc } func expandDefaultMaxPodsConstraint(v interface{}) *container.MaxPodsConstraint { @@ -6498,18 +5974,6 @@ func expandDnsConfig(configured interface{}) *container.DNSConfig { } } -func expandNetworkPerformanceConfig(configured interface{}) *container.ClusterNetworkPerformanceConfig { - l := configured.([]interface{}) - if len(l) == 0 || l[0] == nil { - return nil - } - - config := l[0].(map[string]interface{}) - return &container.ClusterNetworkPerformanceConfig{ - TotalEgressBandwidthTier: config["total_egress_bandwidth_tier"].(string), - } -} - func expandGatewayApiConfig(configured interface{}) *container.GatewayAPIConfig { l := configured.([]interface{}) if len(l) == 0 || l[0] == nil { @@ -6759,20 +6223,6 @@ func expandWorkloadAltsConfig(configured interface{}) *container.WorkloadALTSCon } {{- end }} -func expandRBACBindingConfig(configured interface{}) *container.RBACBindingConfig { - l := configured.([]interface{}) - if len(l) == 0 || l[0] == nil { - return nil - } - - config := l[0].(map[string]interface{}) - return &container.RBACBindingConfig{ - EnableInsecureBindingSystemUnauthenticated: config["enable_insecure_binding_system_unauthenticated"].(bool), - EnableInsecureBindingSystemAuthenticated: config["enable_insecure_binding_system_authenticated"].(bool), - ForceSendFields: []string{"EnableInsecureBindingSystemUnauthenticated", "EnableInsecureBindingSystemAuthenticated"}, - } -} - func flattenNotificationConfig(c *container.NotificationConfig) []map[string]interface{} { if c == nil { return nil @@ -6953,15 +6403,6 @@ func flattenClusterAddonsConfig(c *container.AddonsConfig) []map[string]interfac }, } } - if c.LustreCsiDriverConfig != nil { - lustreConfig := c.LustreCsiDriverConfig - result["lustre_csi_driver_config"] = []map[string]interface{}{ - { - "enabled": lustreConfig.Enabled, - "enable_legacy_lustre_port":lustreConfig.EnableLegacyLustrePort, - }, - } - } {{ if ne $.TargetVersionName `ga` -}} if c.IstioConfig != nil { @@ -7102,21 +6543,6 @@ func flattenReleaseChannel(c *container.ReleaseChannel) []map[string]interface{} return result } -func flattenGkeAutoUpgradeConfig(c *container.GkeAutoUpgradeConfig) []map[string]interface{} { - if c == nil { - return nil - } - - result := []map[string]interface{}{} - if c.PatchMode != "" { - result = append(result, map[string]interface{}{ - "patch_mode": c.PatchMode, - }) - } - - return result -} - {{ if ne $.TargetVersionName `ga` -}} func flattenClusterTelemetry(c *container.ClusterTelemetry) []map[string]interface{} { @@ -7177,23 +6603,6 @@ func flattenPodCidrOverprovisionConfig(c *container.PodCIDROverprovisionConfig) } } -func flattenAdditionalIpRangesConfigs(c []*container.AdditionalIPRangesConfig) []map[string]interface{} { - if len(c) == 0 { - return nil - } - - var outRanges []map[string]interface{} - for _, rangeConfig := range c { - outRangeConfig := map[string]interface{} { - "subnetwork": rangeConfig.Subnetwork, - "pod_ipv4_range_names": rangeConfig.PodIpv4RangeNames, - } - outRanges = append(outRanges, outRangeConfig) - } - - return outRanges -} - func flattenIPAllocationPolicy(c *container.Cluster, d *schema.ResourceData, config *transport_tpg.Config) ([]map[string]interface{}, error) { // If IP aliasing isn't enabled, none of the values in this block can be set. if c == nil || c.IpAllocationPolicy == nil || !c.IpAllocationPolicy.UseIpAliases { @@ -7224,7 +6633,6 @@ func flattenIPAllocationPolicy(c *container.Cluster, d *schema.ResourceData, con "stack_type": p.StackType, "pod_cidr_overprovision_config": flattenPodCidrOverprovisionConfig(p.PodCidrOverprovisionConfig), "additional_pod_ranges_config": flattenAdditionalPodRangesConfig(c.IpAllocationPolicy), - "additional_ip_ranges_config": flattenAdditionalIpRangesConfigs(p.AdditionalIpRangesConfigs), }, }, nil } @@ -7340,9 +6748,6 @@ func flattenClusterAutoscaling(a *container.ClusterAutoscaling) []map[string]int r["enabled"] = false } r["autoscaling_profile"] = a.AutoscalingProfile - if a.DefaultComputeClassConfig != nil { - r["default_compute_class_enabled"] = a.DefaultComputeClassConfig.Enabled - } return []map[string]interface{}{r} } @@ -7458,7 +6863,7 @@ func flattenPodSecurityPolicyConfig(c *container.PodSecurityPolicyConfig) []map[ }, } } -{{ end }} + func flattenPodAutoscaling(c *container.PodAutoscaling) []map[string]interface{} { config := make([]map[string]interface{}, 0, 1) @@ -7472,6 +6877,8 @@ func flattenPodAutoscaling(c *container.PodAutoscaling) []map[string]interface{} return config } +{{ end }} + func flattenSecretManagerConfig(c *container.SecretManagerConfig) []map[string]interface{} { if c == nil { return []map[string]interface{}{ @@ -7480,25 +6887,11 @@ func flattenSecretManagerConfig(c *container.SecretManagerConfig) []map[string]i }, } } - - result := make(map[string]interface{}) - - result["enabled"] = c.Enabled - - {{- if ne $.TargetVersionName "ga" }} - rotationList := []map[string]interface{}{} - if c.RotationConfig != nil { - rotationConfigMap := map[string]interface{}{ - "enabled": c.RotationConfig.Enabled, - } - if c.RotationConfig.RotationInterval != "" { - rotationConfigMap["rotation_interval"] = c.RotationConfig.RotationInterval - } - rotationList = append(rotationList, rotationConfigMap) + return []map[string]interface{}{ + { + "enabled": c.Enabled, + }, } - result["rotation_config"] = rotationList - {{- end }} - return []map[string]interface{}{result} } @@ -7582,17 +6975,6 @@ func flattenDnsConfig(c *container.DNSConfig) []map[string]interface{} { } } -func flattenNetworkPerformanceConfig(c *container.ClusterNetworkPerformanceConfig) []map[string]interface{} { - if c == nil { - return nil - } - return []map[string]interface{}{ - { - "total_egress_bandwidth_tier": c.TotalEgressBandwidthTier, - }, - } -} - func flattenGatewayApiConfig(c *container.GatewayAPIConfig) []map[string]interface{} { if c == nil { return nil @@ -7785,18 +7167,6 @@ func flattenWorkloadAltsConfig(c *container.WorkloadALTSConfig) []map[string]int } {{- end }} -func flattenRBACBindingConfig(c *container.RBACBindingConfig) []map[string]interface{} { - if c == nil { - return nil - } - return []map[string]interface{}{ - { - "enable_insecure_binding_system_authenticated": c.EnableInsecureBindingSystemAuthenticated, - "enable_insecure_binding_system_unauthenticated": c.EnableInsecureBindingSystemUnauthenticated, - }, - } -} - func resourceContainerClusterStateImporter(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_meta.yaml.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_meta.yaml.tmpl index c476c2bd18f3..22079db2965f 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_meta.yaml.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_meta.yaml.tmpl @@ -99,7 +99,6 @@ fields: - field: 'cluster_telemetry.type' {{- end }} - field: 'confidential_nodes.enabled' - - field: 'confidential_nodes.confidential_instance_type' - field: 'control_plane_endpoints_config.dns_endpoint_config.allow_external_traffic' - field: 'control_plane_endpoints_config.dns_endpoint_config.endpoint' - field: 'cost_management_config.enabled' @@ -160,8 +159,6 @@ fields: - field: 'identity_service_config.enabled' - field: 'initial_node_count' - field: 'ip_allocation_policy.additional_pod_ranges_config.pod_range_names' - - field: 'ip_allocation_policy.additional_ip_ranges_config.subnetwork' - - field: 'ip_allocation_policy.additional_ip_ranges_config.pod_ipv4_range_names' - field: 'ip_allocation_policy.cluster_ipv4_cidr_block' - field: 'ip_allocation_policy.cluster_secondary_range_name' - field: 'ip_allocation_policy.pod_cidr_overprovision_config.disabled' @@ -222,7 +219,6 @@ fields: - field: 'node_config.advanced_machine_features.threads_per_core' - field: 'node_config.boot_disk_kms_key' - field: 'node_config.confidential_nodes.enabled' - - field: 'node_config.confidential_nodes.confidential_instance_type' - field: 'node_config.containerd_config.private_registry_access_config.certificate_authority_domain_config.fqdns' - field: 'node_config.containerd_config.private_registry_access_config.certificate_authority_domain_config.gcp_secret_manager_certificate_config.secret_uri' - field: 'node_config.containerd_config.private_registry_access_config.enabled' @@ -368,8 +364,6 @@ fields: api_field: 'node_pools.config.boot_disk_kms_key' - field: 'node_pool.node_config.confidential_nodes.enabled' api_field: 'node_pools.config.confidential_nodes.enabled' - - field: 'node_pool.node_config.confidential_nodes.confidential_instance_type' - api_field: 'node_pools.config.confidential_nodes.confidential_instance_type' - field: 'node_pool.node_config.containerd_config.private_registry_access_config.certificate_authority_domain_config.fqdns' api_field: 'node_pools.config.containerd_config.private_registry_access_config.certificate_authority_domain_config.fqdns' - field: 'node_pool.node_config.containerd_config.private_registry_access_config.certificate_authority_domain_config.gcp_secret_manager_certificate_config.secret_uri' diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_migratev1.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_migratev1.go.tmpl index ad5b0a014dd1..a1af95014872 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_migratev1.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_migratev1.go.tmpl @@ -1086,14 +1086,6 @@ func resourceContainerClusterResourceV1() *schema.Resource { ForceNew: true, Description: `Whether Confidential Nodes feature is enabled for all nodes in this cluster.`, }, - "confidential_instance_type": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - DiffSuppressFunc: suppressDiffForConfidentialNodes, - Description: `Defines the type of technology used by the confidential node.`, - ValidateFunc: validation.StringInSlice([]string{"SEV", "SEV_SNP", "TDX"}, false), - }, }, }, }, diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl index 9eef6abaf583..cf1e7ece8b3e 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl @@ -242,6 +242,15 @@ func TestAccContainerCluster_withAddons(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"min_master_version", "deletion_protection"}, }, + { + Config: testAccContainerCluster_withInternalLoadBalancer(pid, clusterName, networkName, subnetworkName), + }, + { + ResourceName: "google_container_cluster.primary", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"min_master_version", "deletion_protection"}, + }, }, }) } @@ -394,7 +403,7 @@ func TestAccContainerCluster_withConfidentialNodes(t *testing.T) { CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccContainerCluster_withConfidentialNodes(clusterName, npName, networkName, subnetworkName, false, "", "n2d-standard-2"), + Config: testAccContainerCluster_withConfidentialNodes(clusterName, npName, networkName, subnetworkName), }, { ResourceName: "google_container_cluster.confidential_nodes", @@ -403,7 +412,7 @@ func TestAccContainerCluster_withConfidentialNodes(t *testing.T) { ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { - Config: testAccContainerCluster_withConfidentialNodes(clusterName, npName, networkName, subnetworkName, true, "", "n2d-standard-2"), + Config: testAccContainerCluster_disableConfidentialNodes(clusterName, npName, networkName, subnetworkName), }, { ResourceName: "google_container_cluster.confidential_nodes", @@ -412,32 +421,14 @@ func TestAccContainerCluster_withConfidentialNodes(t *testing.T) { ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { - Config: testAccContainerCluster_withConfidentialNodes(clusterName, npName, networkName, subnetworkName, false, "SEV", "n2d-standard-2"), - }, - { - ResourceName: "google_container_cluster.confidential_nodes", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - { - Config: testAccContainerCluster_withConfidentialNodes(clusterName, npName, networkName, subnetworkName, false, "SEV_SNP", "n2d-standard-2"), - }, - { - ResourceName: "google_container_cluster.confidential_nodes", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, + Config: testAccContainerCluster_withConfidentialNodes(clusterName, npName, networkName, subnetworkName), + }, { - Config: testAccContainerCluster_withConfidentialNodes(clusterName, npName, networkName, subnetworkName, false, "TDX", "c3-standard-4"), - }, - { - ResourceName: "google_container_cluster.confidential_nodes", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_container_cluster.confidential_nodes", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, + }, }, }) } @@ -512,6 +503,7 @@ func TestAccContainerCluster_withMaxRunDuration(t *testing.T) { }) } + func TestAccContainerCluster_withFlexStart(t *testing.T) { t.Parallel() @@ -655,84 +647,6 @@ func TestAccContainerCluster_withMultiNetworking(t *testing.T) { }) } -func TestAccContainerCluster_inTransitEncryptionConfig(t *testing.T) { - t.Parallel() - - clusterName := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) - networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") - subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccContainerCluster_inTransitEncryptionConfig(clusterName, networkName, subnetworkName, "IN_TRANSIT_ENCRYPTION_INTER_NODE_TRANSPARENT"), - Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttr("google_container_cluster.primary", "in_transit_encryption_config", "IN_TRANSIT_ENCRYPTION_INTER_NODE_TRANSPARENT"), - ), - }, - { - ResourceName: "google_container_cluster.primary", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - { - Config: testAccContainerCluster_inTransitEncryptionConfig(clusterName, networkName, subnetworkName, "IN_TRANSIT_ENCRYPTION_DISABLED"), - Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttr("google_container_cluster.primary", "in_transit_encryption_config", "IN_TRANSIT_ENCRYPTION_DISABLED"), - ), - }, - { - ResourceName: "google_container_cluster.primary", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - }, - }) -} - -func TestAccContainerCluster_networkPerformanceConfig(t *testing.T) { - t.Parallel() - - clusterName := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) - networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") - subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccContainerCluster_networkPerformanceConfig(clusterName, networkName, subnetworkName, "TIER_1"), - Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttr("google_container_cluster.primary", "network_performance_config.0.total_egress_bandwidth_tier", "TIER_1"), - ), - }, - { - ResourceName: "google_container_cluster.primary", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - { - Config: testAccContainerCluster_networkPerformanceConfig(clusterName, networkName, subnetworkName, "TIER_UNSPECIFIED"), - Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttr("google_container_cluster.primary", "network_performance_config.0.total_egress_bandwidth_tier", "TIER_UNSPECIFIED"), - ), - }, - { - ResourceName: "google_container_cluster.primary", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - }, - }) -} - func TestAccContainerCluster_withFQDNNetworkPolicy(t *testing.T) { t.Parallel() @@ -915,6 +829,7 @@ func TestUnitContainerCluster_Rfc3339TimeDiffSuppress(t *testing.T) { } } +{{ if ne $.TargetVersionName `ga` -}} func TestAccContainerCluster_withPodAutoscaling(t *testing.T) { t.Parallel() @@ -972,6 +887,8 @@ resource "google_container_cluster" "pod_autoscaling_config" { } `, clusterName, networkName, subnetworkName, hpaProfile) } +{{- end }} + func testAccContainerCluster_enableMultiNetworking(clusterName string) string { return fmt.Sprintf(` @@ -1307,31 +1224,6 @@ func TestAccContainerCluster_withInvalidReleaseChannel(t *testing.T) { }) } -func TestAccContainerCluster_withAcceleratedGkeAutoUpgradeConfig(t *testing.T) { - t.Parallel() - clusterName := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) - networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") - subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccContainerCluster_withGkeAutoUpgradeConfig(clusterName, "ACCELERATED", networkName, subnetworkName), - }, - { - ResourceName: "google_container_cluster.with_gke_auto_upgrade_config", - ImportStateIdPrefix: "us-central1-a/", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"min_master_version", "deletion_protection"}, - }, - }, - }) -} - {{ if ne $.TargetVersionName `ga` -}} func TestAccContainerCluster_withTelemetryEnabled(t *testing.T) { t.Parallel() @@ -1940,7 +1832,7 @@ func TestAccContainerCluster_withNodeConfigLinuxNodeConfig(t *testing.T) { Steps: []resource.TestStep{ // First test with empty `node_config.linux_node_config` (should result in "CGROUP_MODE_UNSPECIFIED") { - Config: testAccContainerCluster_withNodeConfigLinuxNodeConfig(clusterName, networkName, subnetworkName, "", false), + Config: testAccContainerCluster_withNodeConfigLinuxNodeConfig(clusterName, networkName, subnetworkName, ""), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ acctest.ExpectNoDelete(), @@ -1951,11 +1843,11 @@ func TestAccContainerCluster_withNodeConfigLinuxNodeConfig(t *testing.T) { ResourceName: "google_container_cluster.with_linux_node_config", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"min_master_version", "deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, // Then add a config and make sure it updates. { - Config: testAccContainerCluster_withNodeConfigLinuxNodeConfig(clusterName, networkName, subnetworkName, "CGROUP_MODE_V2", false), + Config: testAccContainerCluster_withNodeConfigLinuxNodeConfig(clusterName, networkName, subnetworkName, "CGROUP_MODE_V2"), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr( "google_container_cluster.with_linux_node_config", @@ -1972,11 +1864,11 @@ func TestAccContainerCluster_withNodeConfigLinuxNodeConfig(t *testing.T) { ResourceName: "google_container_cluster.with_linux_node_config", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"min_master_version", "deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, // Lastly, update the setting in-place. V1 since UNSPECIFIED is default { - Config: testAccContainerCluster_withNodeConfigLinuxNodeConfig(clusterName, networkName, subnetworkName, "CGROUP_MODE_V1", false), + Config: testAccContainerCluster_withNodeConfigLinuxNodeConfig(clusterName, networkName, subnetworkName, "CGROUP_MODE_V1"), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr( "google_container_cluster.with_linux_node_config", @@ -1993,33 +1885,8 @@ func TestAccContainerCluster_withNodeConfigLinuxNodeConfig(t *testing.T) { ResourceName: "google_container_cluster.with_linux_node_config", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"min_master_version", "deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, - // Update linux config transparent hugepage - { - Config: testAccContainerCluster_withNodeConfigLinuxNodeConfig(clusterName, networkName, subnetworkName, "", true), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_container_cluster.with_linux_node_config", - "node_config.0.linux_node_config.0.transparent_hugepage_enabled", "TRANSPARENT_HUGEPAGE_ENABLED_ALWAYS", - ), - resource.TestCheckResourceAttr( - "google_container_cluster.with_linux_node_config", - "node_config.0.linux_node_config.0.transparent_hugepage_defrag", "TRANSPARENT_HUGEPAGE_DEFRAG_ALWAYS", - ), - ), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - acctest.ExpectNoDelete(), - }, - }, - }, - { - ResourceName: "google_container_cluster.with_linux_node_config", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"min_master_version", "deletion_protection"}, - }, }, }) } @@ -2178,31 +2045,6 @@ func TestAccContainerCluster_withNodeConfigKubeletConfigSettingsUpdates(t *testi }) } -func TestAccContainerCluster_withNodeConfigKubeletConfigSettingsInNodePool(t *testing.T) { - t.Parallel() - clusterName := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) - nodePoolName := fmt.Sprintf("tf-test-nodepool-%s", acctest.RandString(t, 10)) - networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") - subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccContainerCluster_withNodeConfigKubeletConfigSettingsInNodePool(clusterName, nodePoolName, networkName, subnetworkName, "TRANSPARENT_HUGEPAGE_DEFRAG_NEVER", "TRANSPARENT_HUGEPAGE_ENABLED_MADVISE"), - }, - { - ResourceName: "google_container_cluster.with_node_config_kubelet_config_settings_in_node_pool", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"min_master_version", "deletion_protection"}, - }, - }, - }) -} - func TestAccContainerCluster_withInsecureKubeletReadonlyPortEnabledInNodePool(t *testing.T) { t.Parallel() clusterName := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) @@ -3527,7 +3369,7 @@ func TestAccContainerCluster_stackType_withDualStack(t *testing.T) { CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccContainerCluster_stackType_withDualStack(containerNetName, clusterName, "IPV4_IPV6"), + Config: testAccContainerCluster_stackType_withDualStack(containerNetName, clusterName), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr(resourceName, "ip_allocation_policy.0.stack_type", "IPV4_IPV6"), ), @@ -3538,18 +3380,6 @@ func TestAccContainerCluster_stackType_withDualStack(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"deletion_protection"}, }, - { - Config: testAccContainerCluster_stackType_withDualStack(containerNetName, clusterName, "IPV4"), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "ip_allocation_policy.0.stack_type", "IPV4"), - ), - }, - { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, }, }) } @@ -3772,76 +3602,6 @@ func TestAccContainerCluster_nodeAutoprovisioningNetworkTags(t *testing.T) { }) } -func TestAccContainerCluster_withDefaultComputeClassEnabled(t *testing.T) { - t.Parallel() - - clusterName := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) - networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") - subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccContainerCluster_withDefaultComputeClassEnabled(clusterName, networkName, subnetworkName, true), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("google_container_cluster.primary", "cluster_autoscaling.0.default_compute_class_enabled", "true"), - ), - }, - { - ResourceName: "google_container_cluster.primary", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - { - Config: testAccContainerCluster_withDefaultComputeClassEnabled(clusterName, networkName, subnetworkName, false), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("google_container_cluster.primary", "cluster_autoscaling.0.default_compute_class_enabled", "false"), - ), - }, - { - ResourceName: "google_container_cluster.primary", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - }, - }) -} - -func testAccContainerCluster_withDefaultComputeClassEnabled(clusterName, networkName, subnetworkName string, enabled bool) string { - return fmt.Sprintf(` -resource "google_container_cluster" "primary" { - name = "%s" - location = "us-central1-a" - initial_node_count = 1 - network = "%s" - subnetwork = "%s" - deletion_protection = false - - cluster_autoscaling { - enabled = true - default_compute_class_enabled = %t - resource_limits { - resource_type = "cpu" - minimum = 1 - maximum = 10 - } - resource_limits { - resource_type = "memory" - minimum = 10 - maximum = 100 - } - } -} -`, clusterName, networkName, subnetworkName, enabled) -} - - - func TestAccContainerCluster_withShieldedNodes(t *testing.T) { t.Parallel() @@ -4301,24 +4061,6 @@ func TestAccContainerCluster_withSecretManagerConfig(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"deletion_protection"}, }, - { - Config: testAccContainerCluster_withSecretManagerRotationPeriodUpdated(pid, clusterName, networkName, subnetworkName), - }, - { - ResourceName: "google_container_cluster.primary", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - { - Config: testAccContainerCluster_withSecretManagerConfigRotationDisabled(pid, clusterName, networkName, subnetworkName), - }, - { - ResourceName: "google_container_cluster.primary", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, { Config: testAccContainerCluster_withSecretManagerConfigUpdated(pid, clusterName, networkName, subnetworkName), }, @@ -6110,6 +5852,7 @@ func TestAccContainerCluster_WithCPAFeatures(t *testing.T) { CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), Steps: []resource.TestStep{ { + // We are only supporting CPA features on create for now. Config: testAccContainerCluster_EnableCPAFeatures(context), }, { @@ -6987,6 +6730,9 @@ resource "google_container_cluster" "primary" { gcp_filestore_csi_driver_config { enabled = false } + cloudrun_config { + disabled = true + } dns_cache_config { enabled = false } @@ -7011,9 +6757,6 @@ resource "google_container_cluster" "primary" { parallelstore_csi_driver_config { enabled = false } - lustre_csi_driver_config { - enabled = false - } {{- if ne $.TargetVersionName "ga" }} istio_config { disabled = true @@ -7062,6 +6805,9 @@ resource "google_container_cluster" "primary" { gcp_filestore_csi_driver_config { enabled = true } + cloudrun_config { + disabled = false + } dns_cache_config { enabled = true } @@ -7089,12 +6835,8 @@ resource "google_container_cluster" "primary" { enabled = true } } - parallelstore_csi_driver_config { - enabled = true - } - lustre_csi_driver_config { + parallelstore_csi_driver_config { enabled = true - enable_legacy_lustre_port=true } {{- if ne $.TargetVersionName "ga" }} istio_config { @@ -7114,34 +6856,74 @@ resource "google_container_cluster" "primary" { `, projectID, clusterName, networkName, subnetworkName) } -func testAccContainerCluster_withNotificationConfig(clusterName, topic, networkName, subnetworkName string) string { +func testAccContainerCluster_withInternalLoadBalancer(projectID string, clusterName, networkName, subnetworkName string) string { return fmt.Sprintf(` - -resource "google_pubsub_topic" "%s" { - name = "%s" +data "google_project" "project" { + project_id = "%s" } -resource "google_container_cluster" "notification_config" { +resource "google_container_cluster" "primary" { name = "%s" location = "us-central1-a" - initial_node_count = 3 - notification_config { - pubsub { - enabled = true - topic = google_pubsub_topic.%s.id - } - } - network = "%s" - subnetwork = "%s" + initial_node_count = 1 - deletion_protection = false -} -`, topic, topic, clusterName, topic, networkName, subnetworkName) -} + min_master_version = "latest" -func testAccContainerCluster_disableNotificationConfig(clusterName, networkName, subnetworkName string) string { - return fmt.Sprintf(` -resource "google_container_cluster" "notification_config" { + workload_identity_config { + workload_pool = "${data.google_project.project.project_id}.svc.id.goog" + } + + addons_config { + http_load_balancing { + disabled = false + } + horizontal_pod_autoscaling { + disabled = false + } + network_policy_config { + disabled = false + } + cloudrun_config { + disabled = false + load_balancer_type = "LOAD_BALANCER_TYPE_INTERNAL" + } + } + network = "%s" + subnetwork = "%s" + + deletion_protection = false +} +`, projectID, clusterName, networkName, subnetworkName) +} + +func testAccContainerCluster_withNotificationConfig(clusterName, topic, networkName, subnetworkName string) string { + return fmt.Sprintf(` + +resource "google_pubsub_topic" "%s" { + name = "%s" +} + +resource "google_container_cluster" "notification_config" { + name = "%s" + location = "us-central1-a" + initial_node_count = 3 + notification_config { + pubsub { + enabled = true + topic = google_pubsub_topic.%s.id + } + } + network = "%s" + subnetwork = "%s" + + deletion_protection = false +} +`, topic, topic, clusterName, topic, networkName, subnetworkName) +} + +func testAccContainerCluster_disableNotificationConfig(clusterName, networkName, subnetworkName string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "notification_config" { name = "%s" location = "us-central1-a" initial_node_count = 3 @@ -7241,36 +7023,60 @@ resource "google_container_cluster" "filtered_notification_config" { `, topic, topic, clusterName, topic, networkName, subnetworkName) } -func testAccContainerCluster_withConfidentialNodes(clusterName, npName, networkName, subnetworkName string, enable bool, confidentialInstanceType, machineType string) string { - confInsTypeString := "" - if confidentialInstanceType != "" { - confInsTypeString = fmt.Sprintf(`confidential_instance_type = "%s"`, confidentialInstanceType) - } - +func testAccContainerCluster_withConfidentialNodes(clusterName, npName, networkName, subnetworkName string) string { return fmt.Sprintf(` resource "google_container_cluster" "confidential_nodes" { name = "%s" location = "us-central1-a" + release_channel { + channel = "RAPID" + } node_pool { name = "%s" initial_node_count = 1 node_config { - machine_type = "%s" + machine_type = "n2d-standard-2" // can't be e2 because Confidential Nodes require AMD CPUs } } confidential_nodes { - enabled = %t - %s + enabled = true + } + network = "%s" + subnetwork = "%s" + + deletion_protection = false +} +`, clusterName, npName, networkName, subnetworkName) +} + +func testAccContainerCluster_disableConfidentialNodes(clusterName, npName, networkName, subnetworkName string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "confidential_nodes" { + name = "%s" + location = "us-central1-a" + release_channel { + channel = "RAPID" + } + + node_pool { + name = "%s" + initial_node_count = 1 + node_config { + machine_type = "n2d-standard-2" + } } + confidential_nodes { + enabled = false + } network = "%s" subnetwork = "%s" deletion_protection = false } -`, clusterName, npName, machineType, enable, confInsTypeString, networkName, subnetworkName) +`, clusterName, npName, networkName, subnetworkName) } func testAccContainerCluster_withLocalSsdEncryptionMode(clusterName, npName, networkName, subnetworkName, mode string) string { @@ -7350,12 +7156,8 @@ resource "google_container_cluster" "max_run_duration" { func testAccContainerCluster_withFlexStart(clusterName, npName, networkName, subnetworkName string) string { return fmt.Sprintf(` -data "google_container_engine_versions" "uscentral1a" { - location = "us-central1-a" -} - resource "google_container_cluster" "flex_start" { - min_master_version = data.google_container_engine_versions.uscentral1a.release_channel_latest_version["RAPID"] + min_master_version = "1.32.3-gke.1717000" name = "%s" location = "us-central1-a" @@ -7585,24 +7387,6 @@ resource "google_container_cluster" "with_release_channel" { `, clusterName, channel, networkName, subnetworkName) } -func testAccContainerCluster_withGkeAutoUpgradeConfig(clusterName, patchMode, networkName, subnetworkName string) string { - return fmt.Sprintf(` -resource "google_container_cluster" "with_gke_auto_upgrade_config" { - name = "%s" - location = "us-central1-a" - initial_node_count = 1 - - gke_auto_upgrade_config { - patch_mode = "%s" - } - network = "%s" - subnetwork = "%s" - - deletion_protection = false -} -`, clusterName, patchMode, networkName, subnetworkName) -} - {{ if ne $.TargetVersionName `ga` -}} func testAccContainerCluster_withTelemetryEnabled(clusterName, telemetryType, networkName, subnetworkName string) string { return fmt.Sprintf(` @@ -7948,7 +7732,7 @@ func TestAccContainerCluster_withCidrBlockWithoutPrivateEndpointSubnetwork(t *te CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccContainerCluster_withCidrBlockWithoutPrivateEndpointSubnetwork(containerNetName, clusterName), + Config: testAccContainerCluster_withCidrBlockWithoutPrivateEndpointSubnetwork(containerNetName, clusterName, "us-central1-a"), }, { ResourceName: "google_container_cluster.with_private_flexible_cluster", @@ -7960,12 +7744,8 @@ func TestAccContainerCluster_withCidrBlockWithoutPrivateEndpointSubnetwork(t *te }) } -func testAccContainerCluster_withCidrBlockWithoutPrivateEndpointSubnetwork(containerNetName, clusterName string) string { +func testAccContainerCluster_withCidrBlockWithoutPrivateEndpointSubnetwork(containerNetName, clusterName, location string) string { return fmt.Sprintf(` -data "google_container_engine_versions" "uscentral1a" { - location = "us-central1-a" -} - resource "google_compute_network" "container_network" { name = "%s" auto_create_subnetworks = false @@ -7979,8 +7759,8 @@ resource "google_compute_subnetwork" "container_subnetwork" { resource "google_container_cluster" "with_private_flexible_cluster" { name = "%s" - location = "us-central1-a" - min_master_version = data.google_container_engine_versions.uscentral1a.release_channel_latest_version["STABLE"] + location = "%s" + min_master_version = "1.29" initial_node_count = 1 networking_mode = "VPC_NATIVE" @@ -7993,7 +7773,7 @@ resource "google_container_cluster" "with_private_flexible_cluster" { } deletion_protection = false } -`, containerNetName, clusterName) +`, containerNetName, clusterName, location) } func TestAccContainerCluster_withEnablePrivateEndpointToggle(t *testing.T) { @@ -8388,35 +8168,7 @@ resource "google_container_cluster" "with_node_config_kubelet_config_settings" { node_config { kubelet_config { - pod_pids_limit = 1024 - container_log_max_files = 4 - single_process_oom_kill = true - max_parallel_image_pulls = 5 - eviction_max_pod_grace_period_seconds = 200 - eviction_soft { - memory_available = "200Mi" - nodefs_available = "10%%" - nodefs_inodes_free = "20%%" - imagefs_available = "30%%" - imagefs_inodes_free = "40%%" - pid_available = "50%%" - } - eviction_soft_grace_period { - memory_available = "4m" - nodefs_available = "3m30s" - nodefs_inodes_free = "3m" - imagefs_available = "5m" - imagefs_inodes_free = "2.5m" - pid_available = "10s" - } - eviction_minimum_reclaim { - memory_available = "5%%" - nodefs_available = "6%%" - nodefs_inodes_free = "4%%" - imagefs_available = "2.5%%" - imagefs_inodes_free = "9.0%%" - pid_available = "1.5%%" - } + pod_pids_limit = 1024 } } network = "%s" @@ -8441,33 +8193,6 @@ resource "google_container_cluster" "with_node_config_kubelet_config_settings" { cpu_cfs_quota_period = "%s" insecure_kubelet_readonly_port_enabled = "%s" pod_pids_limit = %v - single_process_oom_kill = true - max_parallel_image_pulls = 5 - eviction_max_pod_grace_period_seconds = 200 - eviction_soft { - memory_available = "100Mi" - nodefs_available = "50%%" - nodefs_inodes_free = "40%%" - imagefs_available = "30%%" - imagefs_inodes_free = "20%%" - pid_available = "10%%" - } - eviction_soft_grace_period { - memory_available = "5m" - nodefs_available = "4m30s" - nodefs_inodes_free = "3.6m" - imagefs_available = "100s" - imagefs_inodes_free = "2m" - pid_available = "3m2.6s" - } - eviction_minimum_reclaim { - memory_available = "10%%" - nodefs_available = "8.5%%" - nodefs_inodes_free = "5.0%%" - imagefs_available = "3%%" - imagefs_inodes_free = "9%%" - pid_available = "5%%" - } } } network = "%s" @@ -8478,87 +8203,6 @@ resource "google_container_cluster" "with_node_config_kubelet_config_settings" { `, clusterName, cpuManagerPolicy, cpuCfsQuota, cpuCfsQuotaPeriod, insecureKubeletReadonlyPortEnabled, podPidsLimit, networkName, subnetworkName) } -func testAccContainerCluster_withNodeConfigKubeletConfigSettingsInNodePool(clusterName, nodePoolName, networkName, subnetworkName, thpDefrag, thpEnabled string) string { - return fmt.Sprintf(` -data "google_container_engine_versions" "central1f" { - location = "us-central1-f" -} -resource "google_compute_node_template" "soletenant-tmpl" { - name = "%s" - region = "us-central1" - node_type = "n1-node-96-624" - cpu_overcommit_type = "ENABLED" -} -resource "google_compute_node_group" "group" { - name = "%s" - zone = "us-central1-f" - description = "example google_compute_node_group for Terraform Google Provider" - initial_size = 1 - node_template = google_compute_node_template.soletenant-tmpl.id -} -resource "google_container_cluster" "with_node_config_kubelet_config_settings_in_node_pool" { - name = "%s" - location = "us-central1-f" - min_master_version = data.google_container_engine_versions.central1f.latest_master_version - - node_pool { - name = "%s" - initial_node_count = 1 - machine_type = "n1-standard-1" - node_config { - kubelet_config { - max_parallel_image_pulls = 5 - eviction_max_pod_grace_period_seconds = 200 - eviction_soft { - memory_available = "200Mi" - nodefs_available = "10%%" - nodefs_inodes_free = "20%%" - imagefs_available = "30%%" - imagefs_inodes_free = "40%%" - pid_available = "50%%" - } - eviction_soft_grace_period { - memory_available = "1m" - nodefs_available = "2s" - nodefs_inodes_free = "3m" - imagefs_available = "100s" - imagefs_inodes_free = "2m" - pid_available = "3m2.6s" - } - eviction_minimum_reclaim { - memory_available = "10%%" - nodefs_available = "8.5%%" - nodefs_inodes_free = "5.0%%" - imagefs_available = "3%%" - imagefs_inodes_free = "9%%" - pid_available = "5%%" - } - } - disk_size_gb = 15 - disk_type = "pd-ssd" - node_group = google_compute_node_group.group.name - sole_tenant_config { - node_affinity { - key = "compute.googleapis.com/node-group-name" - operator = "IN" - values = [google_compute_node_group.group.name] - } - min_node_cpus = 1 - } - linux_node_config { - transparent_hugepage_defrag = %s - transparent_hugepage_enabled = %s - } - } - } - network = "%s" - subnetwork = "%s" - - deletion_protection = false -} -`, clusterName, clusterName, clusterName, nodePoolName, thpDefrag, thpEnabled, networkName, subnetworkName) -} - func testAccContainerCluster_withInsecureKubeletReadonlyPortEnabledInNodePool(clusterName, nodePoolName, networkName, subnetworkName, insecureKubeletReadonlyPortEnabled string) string { return fmt.Sprintf(` resource "google_container_cluster" "with_insecure_kubelet_readonly_port_enabled_in_node_pool" { @@ -8780,7 +8424,7 @@ resource "google_container_cluster" "with_node_config" { `, clusterName, networkName, subnetworkName) } -func testAccContainerCluster_withNodeConfigLinuxNodeConfig(clusterName, networkName, subnetworkName, cgroupMode string, thpEnabled bool) string { +func testAccContainerCluster_withNodeConfigLinuxNodeConfig(clusterName, networkName, subnetworkName, cgroupMode string) string { // Empty block inside node_config if cgroupMode is empty linuxNodeConfig := "" @@ -8792,23 +8436,11 @@ func testAccContainerCluster_withNodeConfigLinuxNodeConfig(clusterName, networkN `, cgroupMode) } - if cgroupMode== "" && thpEnabled { - linuxNodeConfig = ` - linux_node_config { - transparent_hugepage_defrag = "TRANSPARENT_HUGEPAGE_DEFRAG_ALWAYS" - transparent_hugepage_enabled = "TRANSPARENT_HUGEPAGE_ENABLED_ALWAYS" - }` - } - return fmt.Sprintf(` -data "google_container_engine_versions" "central1a" { - location = "us-central1-a" -} resource "google_container_cluster" "with_linux_node_config" { name = "%s" location = "us-central1-f" initial_node_count = 1 - min_master_version = data.google_container_engine_versions.central1a.latest_master_version node_config { disk_size_gb = 15 @@ -8936,10 +8568,12 @@ func testAccContainerCluster_withNodeConfigReservationAffinitySpecific(reservati resource "google_project_service" "compute" { service = "compute.googleapis.com" + disable_on_destroy = false } resource "google_project_service" "container" { service = "container.googleapis.com" + disable_on_destroy = false depends_on = [google_project_service.compute] } @@ -10427,7 +10061,7 @@ resource "google_container_cluster" "with_ip_allocation_policy" { `, containerNetName, clusterName) } -func testAccContainerCluster_stackType_withDualStack(containerNetName, clusterName, stack string) string { +func testAccContainerCluster_stackType_withDualStack(containerNetName string, clusterName string) string { return fmt.Sprintf(` resource "google_compute_network" "container_network" { name = "%s" @@ -10457,11 +10091,11 @@ resource "google_container_cluster" "with_stack_type" { ip_allocation_policy { cluster_ipv4_cidr_block = "10.0.0.0/16" services_ipv4_cidr_block = "10.1.0.0/16" - stack_type = "%s" + stack_type = "IPV4_IPV6" } deletion_protection = false } -`, containerNetName, clusterName, stack) +`, containerNetName, clusterName) } func testAccContainerCluster_stackType_withSingleStack(containerNetName string, clusterName string) string { @@ -11509,24 +11143,28 @@ resource "google_container_cluster" "primary" { min_master_version = data.google_container_engine_versions.uscentral1a.release_channel_latest_version["STABLE"] initial_node_count = 1 - # Some existing Beta APIs will be deprecated as the feature will be GAed, - # and the Beta API will be eventually removed. In the case of the ResourceClaims - # and its depended APIs, they are GAed in Kubernetes as of 1.34. And, the Beta APIs - # will be removed after at least 3 minor version bumps, so it will be removed as - # of Kubernetes 1.37 or later. - # https://pr.k8s.io/132706 + # This feature has been available since GKE 1.27, and currently the only + # supported Beta API is authentication.k8s.io/v1beta1/selfsubjectreviews. + # However, in the future, more Beta APIs will be supported, such as the + # resource.k8s.io group. At the same time, some existing Beta APIs will be + # deprecated as the feature will be GAed, and the Beta API will be eventually + # removed. In the case of the SelfSubjectReview API, it is planned to be GAed + # in Kubernetes as of 1.28. And, the Beta API of SelfSubjectReview will be removed + # after at least 3 minor version bumps, so it will be removed as of Kubernetes 1.31 + # or later. + # https://pr.k8s.io/117713 # https://kubernetes.io/docs/reference/using-api/deprecation-guide/ # + # The new Beta APIs will be available since GKE 1.28 + # - admissionregistration.k8s.io/v1beta1/validatingadmissionpolicies + # - admissionregistration.k8s.io/v1beta1/validatingadmissionpolicybindings + # https://pr.k8s.io/118644 + # # Removing the Beta API from Kubernetes will break the test. # TODO: Replace the Beta API with one available on the version of GKE # if the test is broken. enable_k8s_beta_apis { - enabled_apis = [ - "resource.k8s.io/v1beta1/deviceclasses", - "resource.k8s.io/v1beta1/resourceclaims", - "resource.k8s.io/v1beta1/resourceclaimtemplates", - "resource.k8s.io/v1beta1/resourceslices" - ] + enabled_apis = ["authentication.k8s.io/v1beta1/selfsubjectreviews"] } network = "%s" subnetwork = "%s" @@ -11713,68 +11351,6 @@ resource "google_container_cluster" "primary" { initial_node_count = 1 secret_manager_config { enabled = true -{{- if ne $.TargetVersionName "ga" }} - rotation_config { - enabled = true - rotation_interval = "300s" - } -{{- end }} - } - deletion_protection = false - network = "%s" - subnetwork = "%s" - workload_identity_config { - workload_pool = "${data.google_project.project.project_id}.svc.id.goog" - } -} -`, projectID, name, networkName, subnetworkName) -} - -func testAccContainerCluster_withSecretManagerRotationPeriodUpdated(projectID, name, networkName, subnetworkName string) string { - return fmt.Sprintf(` -data "google_project" "project" { - project_id = "%s" -} -resource "google_container_cluster" "primary" { - name = "%s" - location = "us-central1-a" - initial_node_count = 1 - secret_manager_config { - enabled = true -{{- if ne $.TargetVersionName "ga" }} - rotation_config { - enabled = true - rotation_interval = "120s" - } -{{- end }} - } - deletion_protection = false - network = "%s" - subnetwork = "%s" - workload_identity_config { - workload_pool = "${data.google_project.project.project_id}.svc.id.goog" - } -} -`, projectID, name, networkName, subnetworkName) -} - -func testAccContainerCluster_withSecretManagerConfigRotationDisabled(projectID, name, networkName, subnetworkName string) string { - return fmt.Sprintf(` -data "google_project" "project" { - project_id = "%s" -} -resource "google_container_cluster" "primary" { - name = "%s" - location = "us-central1-a" - initial_node_count = 1 - secret_manager_config { - enabled = true -{{- if ne $.TargetVersionName "ga" }} - rotation_config { - enabled = false - rotation_interval = "120s" - } -{{- end }} } deletion_protection = false network = "%s" @@ -12122,7 +11698,6 @@ resource "google_compute_node_template" "soletenant-tmpl" { name = "%s" region = "us-central1" node_type = "n1-node-96-624" - cpu_overcommit_type = "ENABLED" } resource "google_compute_node_group" "group" { @@ -12143,14 +11718,6 @@ resource "google_container_cluster" "primary" { disk_size_gb = 15 disk_type = "pd-ssd" node_group = google_compute_node_group.group.name - sole_tenant_config { - node_affinity { - key = "compute.googleapis.com/node-group-name" - operator = "IN" - values = [google_compute_node_group.group.name] - } - min_node_cpus = 1 - } } network = "%s" subnetwork = "%s" @@ -13780,18 +13347,6 @@ func TestAccContainerCluster_storagePoolsWithNodeConfig(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"deletion_protection"}, }, - { - Config: testAccContainerCluster_storagePoolsWithNodeConfigUpdate(cluster, location, networkName, subnetworkName), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("google_container_cluster.storage_pools_with_node_config", "node_config.0.storage_pools.#", "0"), - ), - }, - { - ResourceName: "google_container_cluster.storage_pools_with_node_config", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, }, }) } @@ -13818,27 +13373,6 @@ resource "google_container_cluster" "storage_pools_with_node_config" { `, cluster, location, storagePoolResourceName, networkName, subnetworkName) } -func testAccContainerCluster_storagePoolsWithNodeConfigUpdate(cluster, location, networkName, subnetworkName string) string { - return fmt.Sprintf(` -resource "google_container_cluster" "storage_pools_with_node_config" { - name = "%s" - location = "%s" - - initial_node_count = 1 - node_config { - machine_type = "c3-standard-4" - image_type = "COS_CONTAINERD" - disk_type = "hyperdisk-balanced" - } - - network = "%s" - subnetwork = "%s" - - deletion_protection = false -} -`, cluster, location, networkName, subnetworkName) -} - func TestAccContainerCluster_withAutopilotGcpFilestoreCsiDriver(t *testing.T) { t.Parallel() @@ -14197,573 +13731,3 @@ resource "google_container_cluster" "primary" { } `, clusterName, networkName, subnetworkName) } - -func TestAccContainerCluster_withAdvancedMachineFeaturesPMU_Standard(t *testing.T) { - t.Parallel() - - suffix := acctest.RandString(t, 10) - clusterResourceName := "google_container_cluster.primary" - clusterName := fmt.Sprintf("tf-test-cluster-%s", suffix) - networkName := fmt.Sprintf("test-network-%s", suffix) - subnetworkName := fmt.Sprintf("test-subnetwork-%s", suffix) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccContainerCluster_withAdvancedMachineFeaturesPMU(clusterName, networkName, subnetworkName, "STANDARD"), - }, - { - ResourceName: clusterResourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - }, - }) -} - -func TestAccContainerCluster_withAdvancedMachineFeaturesPMU_Architectural(t *testing.T) { - t.Parallel() - - suffix := acctest.RandString(t, 10) - clusterResourceName := "google_container_cluster.primary" - clusterName := fmt.Sprintf("tf-test-cluster-%s", suffix) - networkName := fmt.Sprintf("test-network-%s", suffix) - subnetworkName := fmt.Sprintf("test-subnetwork-%s", suffix) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccContainerCluster_withAdvancedMachineFeaturesPMU(clusterName, networkName, subnetworkName, "ARCHITECTURAL"), - }, - { - ResourceName: clusterResourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - }, - }) -} - -func testAccContainerCluster_withAdvancedMachineFeaturesPMU(clusterName, networkName, subnetworkName, pmuLevel string) string { - return fmt.Sprintf(` -resource "google_compute_network" "default" { - name = "%s" - auto_create_subnetworks = false -} - -resource "google_compute_subnetwork" "default" { - name = "%s" - network = google_compute_network.default.name - ip_cidr_range = "10.9.0.0/16" - region = "us-central1" -} - -resource "google_container_cluster" "primary" { - name = "%s" - location = "us-central1-a" - initial_node_count = 1 - network = google_compute_network.default.name - subnetwork = google_compute_subnetwork.default.name - deletion_protection = false - node_config { - machine_type = "c4-standard-2" - advanced_machine_features { - threads_per_core = 2 - performance_monitoring_unit = "%s" - } - } -} -`, networkName, subnetworkName, clusterName, pmuLevel) -} - -func testAccContainerCluster_inTransitEncryptionConfig(name, networkName, subnetworkName, config string) string { - return fmt.Sprintf(` -resource "google_container_cluster" "primary" { - name = "%s" - location = "us-central1-a" - initial_node_count = 1 - network = "%s" - subnetwork = "%s" - datapath_provider = "ADVANCED_DATAPATH" - deletion_protection = false - in_transit_encryption_config = "%s" -} -`, name, networkName, subnetworkName, config) -} - -func testAccContainerCluster_networkPerformanceConfig(name, networkName, subnetworkName, config string) string { - return fmt.Sprintf(` -resource "google_container_cluster" "primary" { - name = "%s" - location = "us-central1-a" - initial_node_count = 1 - network = "%s" - subnetwork = "%s" - deletion_protection = false - - node_config { - machine_type = "n2-standard-32" - gvnic { - enabled = true - } - } - - network_performance_config { - total_egress_bandwidth_tier = "%s" - } -} -`, name, networkName, subnetworkName, config) -} - -type subnetRangeInfo struct { - SubnetName string - RangeNames []string -} - -func bootstrapAdditionalIpRangesNetworkConfig(t *testing.T, name string, additionalSubnetCount int, secondaryRangeCount int) (string, []subnetRangeInfo) { - sri := []subnetRangeInfo{} - - // We create our network to ensure no range collisions. - networkName := acctest.BootstrapSharedTestNetwork(t, fmt.Sprintf("%s-network", name)) - mainSubnet := acctest.BootstrapSubnetWithOverrides(t, fmt.Sprintf("%s-subnet-main", name), networkName, map[string]interface{}{ - "ipCidrRange": "10.2.0.0/24", - "secondaryIpRanges": []map[string]interface{}{ - { - "rangeName": "pods", - "ipCidrRange": "10.3.0.0/16", - }, - { - "rangeName": "services", - "ipCidrRange": "10.4.0.0/16", - }, - }, - }) - - si := subnetRangeInfo{ - SubnetName: mainSubnet, - RangeNames: []string{"pods"}, - } - sri = append(sri, si) - - cumulativeRangeIndex := 0 - for subnetIndex := 0; subnetIndex < additionalSubnetCount; subnetIndex++ { - ranges := []map[string]interface{}{} - rangeNames := []string{} - for rangeIndex := 0; rangeIndex < secondaryRangeCount; rangeIndex++ { - rangeName := fmt.Sprintf("range-%d", cumulativeRangeIndex) - r := map[string]interface{}{ - "rangeName": rangeName, - "ipCidrRange": fmt.Sprintf("10.0.%d.0/24", cumulativeRangeIndex), - } - rangeNames = append(rangeNames, rangeName) - ranges = append(ranges, r) - cumulativeRangeIndex++ - } - - subnetOverrides := map[string]interface{}{ - "ipCidrRange": fmt.Sprintf("10.1.%d.0/24", subnetIndex), - "secondaryIpRanges": ranges, - } - - subnetName := fmt.Sprintf("%s-subnet-add-%d", name, subnetIndex) - acctest.BootstrapSubnetWithOverrides(t, subnetName, networkName, subnetOverrides) - - si := subnetRangeInfo{ - SubnetName: subnetName, - RangeNames: rangeNames, - } - - sri = append(sri, si) - } - - return networkName, sri -} - -func TestAccContainerCluster_additional_ip_ranges_config_on_create(t *testing.T) { - t.Parallel() - - testName := "gke-msc" - network, sri := bootstrapAdditionalIpRangesNetworkConfig(t, testName, 2, 2) - - - clusterName := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, network, sri), - }, - { - ResourceName: "google_container_cluster.primary", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - Check: resource.TestCheckResourceAttrSet("google_container_cluster.primary", "node_pool.0.network_config.subnetwork"), - }, - }, - }) -} - -func TestAccContainerCluster_additional_ip_ranges_config_on_update(t *testing.T) { - t.Parallel() - - testName := "gke-msc-update" - network, sri := bootstrapAdditionalIpRangesNetworkConfig(t, testName, 2, 2) - - clusterName := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, network, sri), - }, - { - ResourceName: "google_container_cluster.primary", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - Check: resource.TestCheckResourceAttrSet("google_container_cluster.primary", "node_pool.0.network_config.subnetwork"), - }, - { - Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, network, sri[:len(sri)-1]), - }, - { - ResourceName: "google_container_cluster.primary", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - { - Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, network, sri[:1]), - }, - { - ResourceName: "google_container_cluster.primary", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - { - Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, network, sri), - }, - { - ResourceName: "google_container_cluster.primary", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - { - Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, network, sri[:1]), - }, - { - ResourceName: "google_container_cluster.primary", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - }, - }) -} - -func TestAccContainerCluster_withAnonymousAuthenticationConfig(t *testing.T) { - t.Parallel() - - clusterName := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) - networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") - subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccContainerCluster_withAnonymousAuthenticationConfig(clusterName, networkName, subnetworkName, "LIMITED"), - Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttr("google_container_cluster.primary", "anonymous_authentication_config.0.mode", "LIMITED"), - ), - }, - { - ResourceName: "google_container_cluster.primary", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - { - Config: testAccContainerCluster_withAnonymousAuthenticationConfig(clusterName, networkName, subnetworkName, "ENABLED"), - Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttr("google_container_cluster.primary", "anonymous_authentication_config.0.mode", "ENABLED"), - ), - }, - { - ResourceName: "google_container_cluster.primary", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - }, - }) -} - -func testAccContainerCluster_additional_ip_ranges_config(clusterName string, networkName string, sri []subnetRangeInfo) string { - var additionalIpRangesStr string - - for _, si := range sri[1:] { - var podIpv4RangeStr string - for i, rn := range si.RangeNames { - podIpv4RangeStr += fmt.Sprintf("\"%s\"", rn) - if i != len(si.RangeNames) - 1 { - podIpv4RangeStr += ", " - } - } - additionalIpRangesStr += fmt.Sprintf(` - additional_ip_ranges_config { - subnetwork = "%s" - pod_ipv4_range_names = [%s] - } - `, si.SubnetName, podIpv4RangeStr) - } - - return fmt.Sprintf(` - resource "google_container_cluster" "primary" { - name = "%s" - location = "us-central1-a" - network = "%s" - subnetwork = "%s" - initial_node_count = 1 - - ip_allocation_policy { - cluster_secondary_range_name = "pods" - services_secondary_range_name = "services" - %s - } - - deletion_protection = false - } - `, clusterName, networkName, sri[0].SubnetName, additionalIpRangesStr) -} - -func testAccContainerCluster_withAnonymousAuthenticationConfig(name, networkName, subnetworkName string, mode string) string { - return fmt.Sprintf(` -resource "google_container_cluster" "primary" { - name = "%s" - network = "%s" - subnetwork = "%s" - initial_node_count = 1 - deletion_protection = false - - anonymous_authentication_config { - mode = "%s" - } -} - `, name, networkName, subnetworkName, mode) -} - -func TestAccContainerCluster_WithCPAFeaturesUpdate(t *testing.T) { - t.Parallel() - - suffix := acctest.RandString(t, 10) - clusterName := fmt.Sprintf("tf-test-cluster-%s", suffix) - networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") - subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) - - // Bootstrap KMS keys and needed IAM role. - diskKey := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "control-plane-disk-encryption") - signingKey1 := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ASYMMETRIC_SIGN", "us-central1", "rs256-service-account-signing-1") - signingKey2 := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ASYMMETRIC_SIGN", "us-central1", "rs256-service-account-signing-2") - backupKey := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "etcd-backups") - - // Here, we are granting the container engine service agent permissions on - // *ALL* Cloud KMS keys in the project. A more realistic usage would be to - // grant the service agent the necessary roles only on the individual keys - // we have created. - acctest.BootstrapIamMembers(t, []acctest.IamMember{ - { - Member: "serviceAccount:service-{project_number}@container-engine-robot.iam.gserviceaccount.com", - Role: "roles/container.cloudKmsKeyUser", - }, - { - Member: "serviceAccount:service-{project_number}@container-engine-robot.iam.gserviceaccount.com", - Role: "roles/privateca.certificateManager", - }, - { - Member: "serviceAccount:service-{project_number}@container-engine-robot.iam.gserviceaccount.com", - Role: "roles/cloudkms.cryptoKeyEncrypterDecrypter", - }, - { - Member: "serviceAccount:service-{project_number}@container-engine-robot.iam.gserviceaccount.com", - Role: "roles/cloudkms.cryptoKeyEncrypterDecrypterViaDelegation", - }, - }) - - // Find an active cryptoKeyVersion on the signing key. - var signingCryptoKeyVersion1 *cloudkms.CryptoKeyVersion - for _, ckv := range signingKey1.CryptoKeyVersions { - if ckv.State == "ENABLED" && ckv.Algorithm == "RSA_SIGN_PKCS1_4096_SHA256" { - signingCryptoKeyVersion1 = ckv - } - } - if signingCryptoKeyVersion1 == nil { - t.Fatal("Didn't find an appropriate cryptoKeyVersion for signingCryptoKeyVersion1 to use as the service account signing key") - } - - var signingCryptoKeyVersion2 *cloudkms.CryptoKeyVersion - for _, ckv := range signingKey2.CryptoKeyVersions { - if ckv.State == "ENABLED" && ckv.Algorithm == "RSA_SIGN_PKCS1_4096_SHA256" { - signingCryptoKeyVersion2 = ckv - } - } - if signingCryptoKeyVersion2 == nil { - t.Fatal("Didn't find an appropriate cryptoKeyVersion for signingCryptoKeyVersion2 to use as the service account signing key") - } - - context := map[string]interface{}{ - "resource_name": clusterName, - "networkName": networkName, - "subnetworkName": subnetworkName, - "disk_key": diskKey.CryptoKey.Name, - "backup_key": backupKey.CryptoKey.Name, - "signing_cryptokeyversion": signingCryptoKeyVersion1.Name, - "random_suffix": suffix, - } - - updateContext:= map[string]interface{}{ - "resource_name": clusterName, - "networkName": networkName, - "subnetworkName": subnetworkName, - "disk_key": diskKey.CryptoKey.Name, - "backup_key": backupKey.CryptoKey.Name, - "signing_cryptokeyversion": signingCryptoKeyVersion2.Name, - "random_suffix": suffix, - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccContainerCluster_EnableCPAFeaturesWithSAkeys(context), - }, - { - ResourceName: "google_container_cluster.with_cpa_features", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - { - Config: testAccContainerCluster_EnableCPAFeaturesWithSAkeys(updateContext), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_container_cluster.with_cpa_features", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_container_cluster.with_cpa_features", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - }, - }) -} - -func testAccContainerCluster_EnableCPAFeaturesWithSAkeys(context map[string]interface{}) string { - return acctest.Nprintf(` - resource "google_container_cluster" "with_cpa_features" { - name = "%{resource_name}" - location = "us-central1-a" - initial_node_count = 1 - release_channel { - channel = "RAPID" - } - user_managed_keys_config { - service_account_signing_keys = [ - "%{signing_cryptokeyversion}", - ] - service_account_verification_keys = [ - "%{signing_cryptokeyversion}", - ] - } - deletion_protection = false - network = "%{networkName}" - subnetwork = "%{subnetworkName}" - } - `, context) -} - -func TestAccContainerCluster_RbacBindingConfig(t *testing.T) { - t.Parallel() - - clusterName := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) - networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") - subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccContainerCluster_RbacBindingConfig(clusterName, networkName, subnetworkName, true, true), - Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttr("google_container_cluster.primary", "rbac_binding_config.#", "1"), - resource.TestCheckResourceAttr("google_container_cluster.primary", "rbac_binding_config.0.enable_insecure_binding_system_unauthenticated", "true"), - resource.TestCheckResourceAttr("google_container_cluster.primary", "rbac_binding_config.0.enable_insecure_binding_system_authenticated", "true"), - ), - }, - { - ResourceName: "google_container_cluster.primary", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - { - Config: testAccContainerCluster_RbacBindingConfig(clusterName, networkName, subnetworkName, false, false), - Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttr("google_container_cluster.primary", "rbac_binding_config.#", "1"), - resource.TestCheckResourceAttr("google_container_cluster.primary", "rbac_binding_config.0.enable_insecure_binding_system_unauthenticated", "false"), - resource.TestCheckResourceAttr("google_container_cluster.primary", "rbac_binding_config.0.enable_insecure_binding_system_authenticated", "false"), - ), - }, - { - ResourceName: "google_container_cluster.primary", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - }, - }) -} - -func testAccContainerCluster_RbacBindingConfig(clusterName, networkName, subnetworkName string, unauthenticated, authenticated bool) string { - return fmt.Sprintf(` -resource "google_container_cluster" "primary" { - name = "%s" - location = "us-central1-a" - initial_node_count = 1 - - network = "%s" - subnetwork = "%s" - - rbac_binding_config { - enable_insecure_binding_system_unauthenticated = %t - enable_insecure_binding_system_authenticated = %t - } - - deletion_protection = false -} -`, clusterName, networkName, subnetworkName, unauthenticated, authenticated) -} diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl index 1356a211a42e..ccda8717798e 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl @@ -5,9 +5,6 @@ import ( "log" "regexp" "strings" -{{- if ne $.TargetVersionName `ga` }} - "sync" -{{- end }} "time" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" @@ -18,6 +15,7 @@ import ( "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + "github.com/hashicorp/terraform-provider-google/google/verify" {{ if eq $.TargetVersionName `ga` }} "google.golang.org/api/container/v1" @@ -28,78 +26,6 @@ import ( var clusterIdRegex = regexp.MustCompile("projects/(?P[^/]+)/locations/(?P[^/]+)/clusters/(?P[^/]+)") -{{ if ne $.TargetVersionName `ga` }} -type nodePoolWithUpdateTime struct { - nodePool *container.NodePool - updateTime time.Time -} - -type nodePoolCache struct { - nodePools map[string]*nodePoolWithUpdateTime - ttl time.Duration - mutex sync.RWMutex -} - -func (nodePoolCache *nodePoolCache) get(nodePool string) (*container.NodePool, error) { - nodePoolCache.mutex.RLock() - defer nodePoolCache.mutex.RUnlock() - np, ok := nodePoolCache.nodePools[nodePool] - if !ok { - return nil, fmt.Errorf("NodePool %q was not found", nodePool) - } - return np.nodePool, nil -} - -func (nodePoolCache *nodePoolCache) refreshIfNeeded(d *schema.ResourceData, config *transport_tpg.Config, userAgent string, nodePoolInfo *NodePoolInformation, name string) error { - if !nodePoolCache.needsRefresh(nodePoolInfo.fullyQualifiedName(name)) { - return nil - } - - nodePoolCache.mutex.Lock() - defer nodePoolCache.mutex.Unlock() - - parent := fmt.Sprintf("projects/%s/locations/%s/clusters/%s", nodePoolInfo.project, nodePoolInfo.location, nodePoolInfo.cluster) - clusterNodePoolsListCall := config.NewContainerClient(userAgent).Projects.Locations.Clusters.NodePools.List(parent) - if config.UserProjectOverride { - clusterNodePoolsListCall.Header().Add("X-Goog-User-Project", nodePoolInfo.project) - } - listNodePoolsResponse, err := clusterNodePoolsListCall.Do() - if err != nil { - return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("NodePools from cluster %q", nodePoolInfo.cluster)) - } - - updateTime := time.Now() - for _, nodePool := range listNodePoolsResponse.NodePools { - nodePoolCache.nodePools[nodePoolInfo.fullyQualifiedName(nodePool.Name)] = &nodePoolWithUpdateTime{ - nodePool: nodePool, - updateTime: updateTime, - } - } - return nil -} - -func (nodePoolCache *nodePoolCache) needsRefresh(nodePool string) bool { - nodePoolCache.mutex.RLock() - defer nodePoolCache.mutex.RUnlock() - np, ok := nodePoolCache.nodePools[nodePool] - if !ok { - return true - } - return time.Since(np.updateTime) > nodePoolCache.ttl -} - -func (nodePoolCache *nodePoolCache) remove(nodePool string) { - nodePoolCache.mutex.Lock() - defer nodePoolCache.mutex.Unlock() - delete(nodePoolCache.nodePools, nodePool) -} - -var npCache = &nodePoolCache{ - nodePools: make(map[string]*nodePoolWithUpdateTime), - ttl: 30 * time.Second, -} -{{- end }} - func ResourceContainerNodePool() *schema.Resource { return &schema.Resource{ Create: resourceContainerNodePoolCreate, @@ -275,7 +201,7 @@ var schemaNodePool = map[string]*schema.Schema{ "tpu_topology": { Type: schema.TypeString, Optional: true, - Description: `The TPU topology like "2x4" or "2x2x2". https://cloud.google.com/kubernetes-engine/docs/concepts/plan-tpus#topology`, + Description: `TPU placement topology for pod slice node pool. https://cloud.google.com/tpu/docs/types-topologies#tpu_topologies`, }, }, }, @@ -464,7 +390,7 @@ var schemaNodePool = map[string]*schema.Schema{ Optional: true, ForceNew: true, Computed: true, - DiffSuppressFunc: tpgresource.CidrOrSizeDiffSuppress, + ValidateFunc: verify.ValidateIpCidrRange, Description: `The IP address range for pod IPs in this node pool. Only applicable if create_pod_range is true. Set to blank to have a range chosen with the default size. Set to /netmask (e.g. /14) to have a range chosen with a specific netmask. Set to a CIDR notation (e.g. 10.96.0.0/14) to pick a specific range to use.`, }, "additional_node_network_configs": { @@ -544,16 +470,11 @@ var schemaNodePool = map[string]*schema.Schema{ "total_egress_bandwidth_tier": { Type: schema.TypeString, Required: true, - Description: `Specifies the total network bandwidth tier for the NodePool. [Valid values](https://cloud.google.com/kubernetes-engine/docs/reference/rest/v1/projects.locations.clusters.nodePools#NodePool.Tier) include: "TIER_1" and "TIER_UNSPECIFIED".`, + Description: `Specifies the total network bandwidth tier for the NodePool.`, }, }, }, }, - "subnetwork": { - Type: schema.TypeString, - Computed: true, - Description: `The subnetwork path for the node pool. Format: projects/{project}/regions/{region}/subnetworks/{subnetwork} . If the cluster is associated with multiple subnetworks, the subnetwork for the node pool is picked based on the IP utilization during node pool creation and is immutable.`, - }, }, }, }, @@ -702,7 +623,6 @@ func resourceContainerNodePoolCreate(d *schema.ResourceData, meta interface{}) e return nil }) if err != nil { - d.SetId("") return fmt.Errorf("error creating NodePool: %s", err) } timeout -= time.Since(startTime) @@ -783,7 +703,6 @@ func resourceContainerNodePoolRead(d *schema.ResourceData, meta interface{}) err name := getNodePoolName(d.Id()) -{{ if eq $.TargetVersionName `ga` }} clusterNodePoolsGetCall := config.NewContainerClient(userAgent).Projects.Locations.Clusters.NodePools.Get(nodePoolInfo.fullyQualifiedName(name)) if config.UserProjectOverride { clusterNodePoolsGetCall.Header().Add("X-Goog-User-Project", nodePoolInfo.project) @@ -793,16 +712,6 @@ func resourceContainerNodePoolRead(d *schema.ResourceData, meta interface{}) err return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("NodePool %q from cluster %q", name, nodePoolInfo.cluster)) } -{{- else }} - npCache.refreshIfNeeded(d, config, userAgent, nodePoolInfo, name) - nodePool, err := npCache.get(nodePoolInfo.fullyQualifiedName(name)) - if err != nil { - log.Printf("[WARN] Removing %s because it's gone", fmt.Sprintf("NodePool %q from cluster %q", name, nodePoolInfo.cluster)) - d.SetId("") - return nil - } -{{- end }} - npMap, err := flattenNodePool(d, config, nodePool, "") if err != nil { return err @@ -858,10 +767,6 @@ func resourceContainerNodePoolUpdate(d *schema.ResourceData, meta interface{}) e return err } -{{ if ne $.TargetVersionName `ga` }} - npCache.remove(nodePoolInfo.fullyQualifiedName(name)) -{{- end }} - return resourceContainerNodePoolRead(d, meta) } @@ -942,10 +847,6 @@ func resourceContainerNodePoolDelete(d *schema.ResourceData, meta interface{}) e d.SetId("") -{{ if ne $.TargetVersionName `ga` }} - npCache.remove(nodePoolInfo.fullyQualifiedName(name)) -{{- end }} - return nil } @@ -962,12 +863,12 @@ func resourceContainerNodePoolExists(d *schema.ResourceData, meta interface{}) ( } name := getNodePoolName(d.Id()) -{{- if eq $.TargetVersionName `ga` }} clusterNodePoolsGetCall := config.NewContainerClient(userAgent).Projects.Locations.Clusters.NodePools.Get(nodePoolInfo.fullyQualifiedName(name)) if config.UserProjectOverride { clusterNodePoolsGetCall.Header().Add("X-Goog-User-Project", nodePoolInfo.project) } _, err = clusterNodePoolsGetCall.Do() + if err != nil { if err = transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("Container NodePool %s", name)); err == nil { return false, nil @@ -975,15 +876,6 @@ func resourceContainerNodePoolExists(d *schema.ResourceData, meta interface{}) ( // There was some other error in reading the resource return true, err } -{{- else }} - npCache.refreshIfNeeded(d, config, userAgent, nodePoolInfo, name) - _, err = npCache.get(nodePoolInfo.fullyQualifiedName(name)) - if err != nil { - log.Printf("[WARN] Removing %s because it's gone", fmt.Sprintf("NodePool %q from cluster %q", name, nodePoolInfo.cluster)) - d.SetId("") - return false, nil - } -{{- end }} return true, nil } @@ -1336,7 +1228,6 @@ func flattenNodeNetworkConfig(c *container.NodeNetworkConfig, d *schema.Resource "network_performance_config": flattenNodeNetworkPerformanceConfig(c.NetworkPerformanceConfig), "additional_node_network_configs": flattenAdditionalNodeNetworkConfig(c.AdditionalNodeNetworkConfigs), "additional_pod_network_configs": flattenAdditionalPodNetworkConfig(c.AdditionalPodNetworkConfigs), - "subnetwork": c.Subnetwork, }) } return result @@ -1516,15 +1407,13 @@ func nodePoolUpdate(d *schema.ResourceData, meta interface{}, nodePoolInfo *Node } if err := retryWhileIncompatibleOperation(timeout, npLockKey, updateF); err != nil { - return err + return err } log.Printf("[INFO] Updated autoscaling in Node Pool %s", d.Id()) } if d.HasChange(prefix + "node_config") { - if err := nodePoolNodeConfigUpdate(d, config, nodePoolInfo, prefix, name, timeout); err != nil { - return err - } + nodePoolNodeConfigUpdate(d, config, nodePoolInfo, prefix, name, timeout) } if d.HasChange(prefix + "node_count") { diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool_meta.yaml.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool_meta.yaml.tmpl index 7cf47290f1f7..21974f355452 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool_meta.yaml.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool_meta.yaml.tmpl @@ -39,7 +39,6 @@ fields: api_field: 'network_config.pod_cidr_overprovision_config.disable' - field: 'network_config.pod_ipv4_cidr_block' - field: 'network_config.pod_range' - - field: 'network_config.subnetwork' - field: 'node_config.advanced_machine_features.enable_nested_virtualization' api_field: 'config.advanced_machine_features.enable_nested_virtualization' - field: 'node_config.advanced_machine_features.threads_per_core' @@ -48,8 +47,6 @@ fields: api_field: 'config.boot_disk_kms_key' - field: 'node_config.confidential_nodes.enabled' api_field: 'config.confidential_nodes.enabled' - - field: 'node_config.confidential_nodes.confidential_instance_type' - api_field: 'config.confidential_nodes.confidential_instance_type' - field: 'node_config.containerd_config.private_registry_access_config.certificate_authority_domain_config.fqdns' api_field: 'config.containerd_config.private_registry_access_config.certificate_authority_domain_config.fqdns' - field: 'node_config.containerd_config.private_registry_access_config.certificate_authority_domain_config.gcp_secret_manager_certificate_config.secret_uri' diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl index 571f99164a7a..24f698677a6d 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl @@ -41,27 +41,11 @@ func TestAccContainerNodePool_resourceManagerTags(t *testing.T) { t.Parallel() pid := envvar.GetTestProjectFromEnv() + randomSuffix := acctest.RandString(t, 10) + clusterName := fmt.Sprintf("tf-test-cluster-%s", randomSuffix) + networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") - tagData := map[string]interface{}{ - "purpose": "GCE_FIREWALL", - "purpose_data": map[string]interface{}{ - "network": pid + "/" + networkName, - }, - } - tagKey1 := acctest.BootstrapSharedTestProjectTagKey(t, "resourceManagerTags1", tagData) - tagKey2 := acctest.BootstrapSharedTestProjectTagKey(t, "resourceManagerTags2", tagData) - - context := map[string]interface{}{ - "pid": pid, - "org": envvar.GetTestOrgFromEnv(t), - "network": networkName, - "subnet": acctest.BootstrapSubnet(t, "gke-cluster", networkName), - "tagKey1": tagKey1, - "tagValue1": acctest.BootstrapSharedTestProjectTagValue(t, "resourceManagerTags1", tagKey1), - "tagKey2": tagKey2, - "tagValue2": acctest.BootstrapSharedTestProjectTagValue(t, "resourceManagerTags2", tagKey2), - "random_suffix": acctest.RandString(t, 10), - } + subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) bootstrapGkeTagManagerServiceAgents(t) @@ -74,7 +58,7 @@ func TestAccContainerNodePool_resourceManagerTags(t *testing.T) { CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccContainerNodePool_resourceManagerTags(context), + Config: testAccContainerNodePool_resourceManagerTags(pid, clusterName, networkName, subnetworkName, randomSuffix), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttrSet("google_container_node_pool.primary_nodes", "node_config.0.resource_manager_tags.%"), ), @@ -86,7 +70,7 @@ func TestAccContainerNodePool_resourceManagerTags(t *testing.T) { ImportStateVerifyIgnore: []string{"min_master_version", "cluster"}, }, { - Config: testAccContainerNodePool_resourceManagerTagsUpdate1(context), + Config: testAccContainerNodePool_resourceManagerTagsUpdate1(pid, clusterName, networkName, subnetworkName, randomSuffix), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttrSet("google_container_node_pool.primary_nodes", "node_config.0.resource_manager_tags.%"), ), @@ -98,7 +82,7 @@ func TestAccContainerNodePool_resourceManagerTags(t *testing.T) { ImportStateVerifyIgnore: []string{"min_master_version", "cluster"}, }, { - Config: testAccContainerNodePool_resourceManagerTagsUpdate2(context), + Config: testAccContainerNodePool_resourceManagerTagsUpdate2(pid, clusterName, networkName, subnetworkName, randomSuffix), }, { ResourceName: "google_container_node_pool.primary_nodes", @@ -317,408 +301,6 @@ func TestAccContainerNodePool_withNodeConfig(t *testing.T) { }) } - -func TestAccContainerNodePool_withClusterBootDisk(t *testing.T) { - t.Parallel() - - cluster := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) - networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") - subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckContainerNodePoolDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccContainerNodePool_withClusterBootDisk(cluster, networkName, subnetworkName), - }, - { - ResourceName: "google_container_cluster.cluster", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - { - Config: testAccContainerNodePool_withClusterBootDiskUpdate(cluster, networkName, subnetworkName), - }, - { - ResourceName: "google_container_cluster.cluster", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - }, - }) -} - -func testAccContainerNodePool_withClusterBootDisk(cluster, networkName, subnetworkName string) string { - return fmt.Sprintf(` -provider "google" { - alias = "user-project-override" - user_project_override = true -} -resource "google_container_cluster" "cluster" { - provider = google.user-project-override - name = "%s" - location = "us-central1-a" - initial_node_count = 3 - deletion_protection = false - network = "%s" - subnetwork = "%s" - - node_config { - machine_type = "c3-standard-4" - boot_disk { - size_gb = 100 - disk_type = "hyperdisk-balanced" - provisioned_iops = 3456 - provisioned_throughput = 234 - } - } -} -`, cluster, networkName, subnetworkName) -} - -func testAccContainerNodePool_withClusterBootDiskUpdate(cluster, networkName, subnetworkName string) string { - return fmt.Sprintf(` -provider "google" { - alias = "user-project-override" - user_project_override = true -} -resource "google_container_cluster" "cluster" { - provider = google.user-project-override - name = "%s" - location = "us-central1-a" - initial_node_count = 3 - deletion_protection = false - network = "%s" - subnetwork = "%s" - - node_config { - machine_type = "c3-standard-4" - boot_disk { - size_gb = 170 - disk_type = "hyperdisk-balanced" - provisioned_iops = 4567 - provisioned_throughput = 345 - } - } -} -`, cluster, networkName, subnetworkName) -} - - - -func TestAccContainerNodePool_withNodeConfigWithBootDiskConfig(t *testing.T) { - t.Parallel() - - cluster := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) - nodePool := fmt.Sprintf("tf-test-nodepool-%s", acctest.RandString(t, 10)) - networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") - subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckContainerNodePoolDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccContainerNodePool_withBootDisk(cluster, nodePool, networkName, subnetworkName), - }, - { - ResourceName: "google_container_node_pool.np", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccContainerNodePool_withBootDiskUpdate(cluster, nodePool, networkName, subnetworkName), - }, - { - ResourceName: "google_container_node_pool.np", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func TestAccContainerNodePool_withNodeConfigWithBootDiskConfigChangeType(t *testing.T) { - t.Parallel() - - cluster := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) - nodePool := fmt.Sprintf("tf-test-nodepool-%s", acctest.RandString(t, 10)) - networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") - subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckContainerNodePoolDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccContainerNodePool_withBootDisk(cluster, nodePool, networkName, subnetworkName), - }, - { - ResourceName: "google_container_node_pool.np", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccContainerNodePool_withBootDiskTypeChangeUpdate(cluster, nodePool, networkName, subnetworkName), - }, - { - ResourceName: "google_container_node_pool.np", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func TestAccContainerNodePool_withNodeConfigWithBootDiskConfigChangeTypeLegacy(t *testing.T) { - t.Parallel() - - cluster := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) - nodePool := fmt.Sprintf("tf-test-nodepool-%s", acctest.RandString(t, 10)) - networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") - subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckContainerNodePoolDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccContainerNodePool_withBootDisk(cluster, nodePool, networkName, subnetworkName), - }, - { - ResourceName: "google_container_node_pool.np", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccContainerNodePool_withLegacyBootDiskTypeSize(cluster, nodePool, networkName, subnetworkName), - }, - { - ResourceName: "google_container_node_pool.np", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func TestAccContainerNodePool_withLegacyNodeConfigAndBootDiskUpdate(t *testing.T) { - // Ensure that the legacy configuration (top level of node_config) can be updated to the new configuration (in boot_disk). - t.Parallel() - - cluster := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) - nodePool := fmt.Sprintf("tf-test-nodepool-%s", acctest.RandString(t, 10)) - networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") - subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckContainerNodePoolDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccContainerNodePool_withLegacyBootDiskTypeSize(cluster, nodePool, networkName, subnetworkName), - }, - { - ResourceName: "google_container_node_pool.np", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccContainerNodePool_withLegacyBootDiskTypeSizeUpdateNew(cluster, nodePool, networkName, subnetworkName), - }, - { - ResourceName: "google_container_node_pool.np", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func testAccContainerNodePool_withBootDisk(cluster, np, networkName, subnetworkName string) string { - return fmt.Sprintf(` -provider "google" { - alias = "user-project-override" - user_project_override = true -} -resource "google_container_cluster" "cluster" { - provider = google.user-project-override - name = "%s" - location = "us-central1-a" - initial_node_count = 3 - deletion_protection = false - network = "%s" - subnetwork = "%s" -} - -resource "google_container_node_pool" "np" { - provider = google.user-project-override - name = "%s" - location = "us-central1-a" - cluster = google_container_cluster.cluster.name - initial_node_count = 3 - - node_config { - machine_type = "c3-standard-4" - boot_disk { - size_gb = 100 - disk_type = "hyperdisk-balanced" - provisioned_iops = 3456 - provisioned_throughput = 234 - } - } -} -`, cluster, networkName, subnetworkName, np) -} - -func testAccContainerNodePool_withBootDiskUpdate(cluster, np, networkName, subnetworkName string) string { - return fmt.Sprintf(` -provider "google" { - alias = "user-project-override" - user_project_override = true -} -resource "google_container_cluster" "cluster" { - provider = google.user-project-override - name = "%s" - location = "us-central1-a" - initial_node_count = 3 - deletion_protection = false - network = "%s" - subnetwork = "%s" -} - -resource "google_container_node_pool" "np" { - provider = google.user-project-override - name = "%s" - location = "us-central1-a" - cluster = google_container_cluster.cluster.name - initial_node_count = 3 - - node_config { - machine_type = "c3-standard-4" - boot_disk { - size_gb = 200 - disk_type = "hyperdisk-balanced" - provisioned_iops = 4567 - provisioned_throughput = 345 - } - } -} -`, cluster, networkName, subnetworkName, np) -} - -// Checks to ensure that boot disk type can be moved off of hyperdisk-balanced. -func testAccContainerNodePool_withBootDiskTypeChangeUpdate(cluster, np, networkName, subnetworkName string) string { - return fmt.Sprintf(` -provider "google" { - alias = "user-project-override" - user_project_override = true -} -resource "google_container_cluster" "cluster" { - provider = google.user-project-override - name = "%s" - location = "us-central1-a" - initial_node_count = 3 - deletion_protection = false - network = "%s" - subnetwork = "%s" -} - -resource "google_container_node_pool" "np" { - provider = google.user-project-override - name = "%s" - location = "us-central1-a" - cluster = google_container_cluster.cluster.name - initial_node_count = 3 - - node_config { - machine_type = "c3-standard-4" - boot_disk { - size_gb = 201 - disk_type = "pd-balanced" - } - } -} -`, cluster, networkName, subnetworkName, np) -} - -// Checks to ensure legacy boot disk type and size can be used. -func testAccContainerNodePool_withLegacyBootDiskTypeSize(cluster, np, networkName, subnetworkName string) string { - return fmt.Sprintf(` -provider "google" { - alias = "user-project-override" - user_project_override = true -} -resource "google_container_cluster" "cluster" { - provider = google.user-project-override - name = "%s" - location = "us-central1-a" - initial_node_count = 3 - deletion_protection = false - network = "%s" - subnetwork = "%s" -} - -resource "google_container_node_pool" "np" { - provider = google.user-project-override - name = "%s" - location = "us-central1-a" - cluster = google_container_cluster.cluster.name - initial_node_count = 3 - - node_config { - machine_type = "c3-standard-4" - disk_type = "pd-balanced" - disk_size_gb = 202 - } -} -`, cluster, networkName, subnetworkName, np) -} - -// Used to check that legacy -> new updates work. -func testAccContainerNodePool_withLegacyBootDiskTypeSizeUpdateNew(cluster, np, networkName, subnetworkName string) string { - return fmt.Sprintf(` -provider "google" { - alias = "user-project-override" - user_project_override = true -} -resource "google_container_cluster" "cluster" { - provider = google.user-project-override - name = "%s" - location = "us-central1-a" - initial_node_count = 3 - deletion_protection = false - network = "%s" - subnetwork = "%s" -} - -resource "google_container_node_pool" "np" { - provider = google.user-project-override - name = "%s" - location = "us-central1-a" - cluster = google_container_cluster.cluster.name - initial_node_count = 3 - - node_config { - machine_type = "c3-standard-4" - boot_disk { - size_gb = 203 - disk_type = "hyperdisk-balanced" - } - } -} -`, cluster, networkName, subnetworkName, np) -} - - func TestAccContainerNodePool_withTaintsUpdate(t *testing.T) { t.Parallel() @@ -947,7 +529,7 @@ func TestAccContainerNodePool_withKubeletConfig(t *testing.T) { CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccContainerNodePool_withKubeletConfig(cluster, np, "static", "100ms", networkName, subnetworkName, "TRUE", "100Mi", "1m", "10m", true, true, 2048, 10, 10, 85), + Config: testAccContainerNodePool_withKubeletConfig(cluster, np, "static", "100ms", networkName, subnetworkName, "TRUE", "100Mi", "1m", "10m", true, 2048, 10, 10, 85), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ acctest.ExpectNoDelete(), @@ -956,8 +538,6 @@ func TestAccContainerNodePool_withKubeletConfig(t *testing.T) { Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("google_container_node_pool.with_kubelet_config", "node_config.0.kubelet_config.0.cpu_cfs_quota", "true"), - resource.TestCheckResourceAttr("google_container_node_pool.with_kubelet_config", - "node_config.0.kubelet_config.0.single_process_oom_kill", "true"), resource.TestCheckResourceAttr("google_container_node_pool.with_kubelet_config", "node_config.0.kubelet_config.0.insecure_kubelet_readonly_port_enabled", "TRUE"), resource.TestCheckResourceAttr("google_container_node_pool.with_kubelet_config", @@ -984,7 +564,7 @@ func TestAccContainerNodePool_withKubeletConfig(t *testing.T) { ImportStateVerify: true, }, { - Config: testAccContainerNodePool_withKubeletConfig(cluster, np, "", "", networkName, subnetworkName, "FALSE", "200Mi", "30s", "", false, true, 1024, 5, 50, 80), + Config: testAccContainerNodePool_withKubeletConfig(cluster, np, "", "", networkName, subnetworkName, "FALSE", "200Mi", "30s", "", false, 1024, 5, 50, 80), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ acctest.ExpectNoDelete(), @@ -1022,7 +602,7 @@ func TestAccContainerNodePool_withInvalidKubeletCpuManagerPolicy(t *testing.T) { CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccContainerNodePool_withKubeletConfig(cluster, np, "dontexist", "100us", networkName, subnetworkName,"TRUE", "", "", "", false, true, 1024, 2, 70, 75), + Config: testAccContainerNodePool_withKubeletConfig(cluster, np, "dontexist", "100us", networkName, subnetworkName,"TRUE", "", "", "", false, 1024, 2, 70, 75), ExpectError: regexp.MustCompile(`.*to be one of \["?static"? "?none"? "?"?\].*`), }, }, @@ -1094,11 +674,6 @@ func TestAccContainerNodePool_withWindowsNodeConfig(t *testing.T) { // Perform an update. { Config: testAccContainerNodePool_withWindowsNodeConfig(cluster, np, "OS_VERSION_LTSC2022"), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_container_node_pool.with_windows_node_config", plancheck.ResourceActionUpdate), - }, - }, }, { ResourceName: "google_container_node_pool.with_windows_node_config", @@ -1237,21 +812,6 @@ func TestAccContainerNodePool_withMultiNicNetworkConfig(t *testing.T) { Steps: []resource.TestStep{ { Config: testAccContainerNodePool_withMultiNicNetworkConfig(cluster, np, network), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("google_container_cluster.cluster", "enable_multi_networking", "true"), - ), - }, - { - ResourceName: "google_container_cluster.cluster", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"network_config.0.create_pod_range", "deletion_protection"}, - }, - { - Config: testAccContainerNodePool_withMultiNicNetworkConfigUpdate(cluster, np, network), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("google_container_cluster.cluster", "enable_multi_networking", "false"), - ), }, { ResourceName: "google_container_cluster.cluster", @@ -1965,7 +1525,6 @@ func TestAccContainerNodePool_withSoleTenantConfig(t *testing.T) { np := fmt.Sprintf("tf-test-np-%s", acctest.RandString(t, 10)) networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) - minNodeCpus := 1 acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -1973,7 +1532,7 @@ func TestAccContainerNodePool_withSoleTenantConfig(t *testing.T) { CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccContainerNodePool_withSoleTenantConfig(cluster, np, networkName, subnetworkName, minNodeCpus), + Config: testAccContainerNodePool_withSoleTenantConfig(cluster, np, networkName, subnetworkName), }, { ResourceName: "google_container_node_pool.with_sole_tenant_config", @@ -2876,67 +2435,6 @@ resource "google_container_node_pool" "np" { `, cluster, networkName, subnetworkName, enableNV, np, enableNV) } -func TestAccContainerNodePool_performanceMonitoringUnit(t *testing.T) { - t.Parallel() - - cluster := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) - np := fmt.Sprintf("tf-test-nodepool-%s", acctest.RandString(t, 10)) - networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") - subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckContainerNodePoolDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccContainerNodePool_performanceMonitoringUnit(cluster, np, networkName, subnetworkName, "ARCHITECTURAL"), - }, - { - ResourceName: "google_container_cluster.cluster", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - }, - }) -} - -func testAccContainerNodePool_performanceMonitoringUnit(cluster, np, networkName, subnetworkName, pmuLevel string) string { - return fmt.Sprintf(` -resource "google_container_cluster" "cluster" { - name = "%s" - location = "us-central1-a" - initial_node_count = 1 - deletion_protection = false - network = "%s" - subnetwork = "%s" - - node_config { - machine_type = "c4-standard-4" - advanced_machine_features { - threads_per_core = 2 - performance_monitoring_unit = "%s" - } - } -} - -resource "google_container_node_pool" "np" { - name = "%s" - location = "us-central1-a" - cluster = google_container_cluster.cluster.name - initial_node_count = 2 - - node_config { - machine_type = "c4-standard-4" - advanced_machine_features { - threads_per_core = 2 - performance_monitoring_unit = "%s" - } - } -} -`, cluster, networkName, subnetworkName, pmuLevel, np, pmuLevel) -} func testAccCheckContainerNodePoolDestroyProducer(t *testing.T) func(s *terraform.State) error { return func(s *terraform.State) error { @@ -3453,7 +2951,7 @@ resource "google_container_node_pool" "np_with_management" { node_config { machine_type = "g1-small" - disk_size_gb = 15 + disk_size_gb = 10 oauth_scopes = ["compute-rw", "storage-ro", "logging-write", "monitoring"] } } @@ -3478,7 +2976,7 @@ resource "google_container_node_pool" "np_with_node_config" { initial_node_count = 1 node_config { machine_type = "g1-small" - disk_size_gb = 15 + disk_size_gb = 10 oauth_scopes = [ "https://www.googleapis.com/auth/compute", "https://www.googleapis.com/auth/devstorage.read_only", @@ -3535,7 +3033,7 @@ resource "google_container_node_pool" "np_with_node_config" { initial_node_count = 1 node_config { machine_type = "g1-small" - disk_size_gb = 15 + disk_size_gb = 10 oauth_scopes = [ "https://www.googleapis.com/auth/compute", "https://www.googleapis.com/auth/devstorage.read_only", @@ -3856,7 +3354,7 @@ resource "google_container_node_pool" "with_sandbox_config" { } {{- end }} -func testAccContainerNodePool_withKubeletConfig(cluster, np, policy, period, networkName, subnetworkName, insecureKubeletReadonlyPortEnabled, containerLogMaxSize, imageMinimumGcAge, imageMaximumGcAge string, quota, singleProcessOomKill bool, podPidsLimit, containerLogMaxFiles, imageGcLowThresholdPercent, imageGcHighThresholdPercent int) string { +func testAccContainerNodePool_withKubeletConfig(cluster, np, policy, period, networkName, subnetworkName, insecureKubeletReadonlyPortEnabled, containerLogMaxSize, imageMinimumGcAge, imageMaximumGcAge string, quota bool, podPidsLimit, containerLogMaxFiles, imageGcLowThresholdPercent, imageGcHighThresholdPercent int) string { return fmt.Sprintf(` data "google_container_engine_versions" "central1a" { location = "us-central1-a" @@ -3893,34 +3391,7 @@ resource "google_container_node_pool" "with_kubelet_config" { image_gc_high_threshold_percent = %d image_minimum_gc_age = %q image_maximum_gc_age = %q - allowed_unsafe_sysctls = ["kernel.shm*", "kernel.msg*", "kernel.sem", "fs.mqueue.*", "net.*"] - single_process_oom_kill = %v - max_parallel_image_pulls = 5 - eviction_max_pod_grace_period_seconds = 200 - eviction_soft { - memory_available = "100Mi" - nodefs_available = "50%%" - nodefs_inodes_free = "40%%" - imagefs_available = "30%%" - imagefs_inodes_free = "20%%" - pid_available = "10%%" - } - eviction_soft_grace_period { - memory_available = "5m" - nodefs_available = "4m30s" - nodefs_inodes_free = "3.6m" - imagefs_available = "100s" - imagefs_inodes_free = "2m" - pid_available = "3m2.6s" - } - eviction_minimum_reclaim { - memory_available = "10%%" - nodefs_available = "8.5%%" - nodefs_inodes_free = "5.0%%" - imagefs_available = "3%%" - imagefs_inodes_free = "9%%" - pid_available = "5%%" - } + allowed_unsafe_sysctls = ["kernel.shm*", "kernel.msg*", "kernel.sem", "fs.mqueue.*", "net.*"] } oauth_scopes = [ "https://www.googleapis.com/auth/logging.write", @@ -3929,7 +3400,7 @@ resource "google_container_node_pool" "with_kubelet_config" { logging_variant = "DEFAULT" } } -`, cluster, networkName, subnetworkName, np, policy, quota, period, insecureKubeletReadonlyPortEnabled, podPidsLimit, containerLogMaxSize, containerLogMaxFiles, imageGcLowThresholdPercent, imageGcHighThresholdPercent, imageMinimumGcAge, imageMaximumGcAge, singleProcessOomKill) +`, cluster, networkName, subnetworkName, np, policy, quota, period, insecureKubeletReadonlyPortEnabled, podPidsLimit, containerLogMaxSize, containerLogMaxFiles, imageGcLowThresholdPercent, imageGcHighThresholdPercent, imageMinimumGcAge, imageMaximumGcAge) } func testAccContainerNodePool_withLinuxNodeConfig(cluster, np, tcpMem, networkName, subnetworkName string) string { @@ -3953,8 +3424,6 @@ func testAccContainerNodePool_withLinuxNodeConfig(cluster, np, tcpMem, networkNa "net.ipv4.tcp_tw_reuse" = 1 "kernel.shmmni" = 8192 } - transparent_hugepage_enabled = "TRANSPARENT_HUGEPAGE_ENABLED_ALWAYS" - transparent_hugepage_defrag = "TRANSPARENT_HUGEPAGE_DEFRAG_DEFER_WITH_MADVISE" } `, tcpMem, tcpMem) } @@ -4100,8 +3569,7 @@ resource "google_compute_subnetwork" "container_subnetwork" { resource "google_container_cluster" "cluster" { name = "%s" - # Zonal rather than regional to reduce setup time and node count per zone. - location = "us-central1-c" + location = "us-central1" initial_node_count = 1 network = google_compute_network.container_network.name @@ -4111,82 +3579,82 @@ resource "google_container_cluster" "cluster" { services_secondary_range_name = google_compute_subnetwork.container_subnetwork.secondary_ip_range[1].range_name } release_channel { - channel = "RAPID" + channel = "RAPID" } deletion_protection = false } resource "google_container_node_pool" "with_manual_pod_cidr" { - name = "%s-manual" - location = google_container_cluster.cluster.location - cluster = google_container_cluster.cluster.name + name = "%s-manual" + location = "us-central1" + cluster = google_container_cluster.cluster.name node_count = 1 network_config { create_pod_range = false - pod_range = google_compute_subnetwork.container_subnetwork.secondary_ip_range[2].range_name + pod_range = google_compute_subnetwork.container_subnetwork.secondary_ip_range[2].range_name } node_config { - oauth_scopes = [ - "https://www.googleapis.com/auth/cloud-platform", - ] + oauth_scopes = [ + "https://www.googleapis.com/auth/cloud-platform", + ] } } resource "google_container_node_pool" "with_auto_pod_cidr" { - name = "%s-auto" - location = google_container_cluster.cluster.location - cluster = google_container_cluster.cluster.name + name = "%s-auto" + location = "us-central1" + cluster = google_container_cluster.cluster.name node_count = 1 network_config { - create_pod_range = true - pod_range = "auto-pod-range" - pod_ipv4_cidr_block = "10.2.0.0/20" + create_pod_range = true + pod_range = "auto-pod-range" + pod_ipv4_cidr_block = "10.2.0.0/20" } node_config { - oauth_scopes = [ - "https://www.googleapis.com/auth/cloud-platform", - ] + oauth_scopes = [ + "https://www.googleapis.com/auth/cloud-platform", + ] } } resource "google_container_node_pool" "with_pco_disabled" { - name = "%s-pco" - location = google_container_cluster.cluster.location - cluster = google_container_cluster.cluster.name + name = "%s-pco" + location = "us-central1" + cluster = google_container_cluster.cluster.name node_count = 1 network_config { - pod_cidr_overprovision_config { - disabled = true - } + pod_cidr_overprovision_config { + disabled = true + } } node_config { - oauth_scopes = [ - "https://www.googleapis.com/auth/cloud-platform", - ] + oauth_scopes = [ + "https://www.googleapis.com/auth/cloud-platform", + ] } } resource "google_container_node_pool" "with_tier1_net" { - name = "%s-tier1" - location = google_container_cluster.cluster.location - cluster = google_container_cluster.cluster.name + name = "%s-tier1" + location = "us-central1" + cluster = google_container_cluster.cluster.name node_count = 1 node_locations = [ - "us-central1-c", + "us-central1-a", ] network_config { - network_performance_config { - total_egress_bandwidth_tier = "%s" - } + network_performance_config { + total_egress_bandwidth_tier = "%s" + } } node_config { - machine_type = "n2-standard-32" - gvnic { - enabled = true - } - oauth_scopes = [ - "https://www.googleapis.com/auth/cloud-platform", - ] + machine_type = "n2-standard-32" + gvnic { + enabled = true + } + oauth_scopes = [ + "https://www.googleapis.com/auth/cloud-platform", + ] } } @@ -4352,93 +3820,6 @@ resource "google_container_node_pool" "with_multi_nic" { `, network, network, network, network, network, network, cluster, np) } -func testAccContainerNodePool_withMultiNicNetworkConfigUpdate(cluster, np, network string) string { - return fmt.Sprintf(` -resource "google_compute_network" "container_network" { - name = "%s-1" - auto_create_subnetworks = false -} - -resource "google_compute_network" "addn_net_1" { - name = "%s-2" - auto_create_subnetworks = false -} - -resource "google_compute_network" "addn_net_2" { - name = "%s-3" - auto_create_subnetworks = false -} - -resource "google_compute_subnetwork" "container_subnetwork" { - name = "%s-subnet-1" - network = google_compute_network.container_network.name - ip_cidr_range = "10.0.36.0/24" - region = "us-central1" - private_ip_google_access = true - - secondary_ip_range { - range_name = "pod" - ip_cidr_range = "10.0.0.0/19" - } - - secondary_ip_range { - range_name = "svc" - ip_cidr_range = "10.0.32.0/22" - } - - lifecycle { - ignore_changes = [ - # The auto nodepool creates a secondary range which diffs this resource. - secondary_ip_range, - ] - } -} - -resource "google_compute_subnetwork" "subnet1" { - name = "%s-subnet-2" - network = google_compute_network.addn_net_1.name - ip_cidr_range = "10.0.37.0/24" - region = "us-central1" -} - -resource "google_compute_subnetwork" "subnet2" { - name = "%s-subnet-3" - network = google_compute_network.addn_net_2.name - ip_cidr_range = "10.0.38.0/24" - region = "us-central1" - - secondary_ip_range { - range_name = "pod" - ip_cidr_range = "10.0.64.0/19" - } -} - -resource "google_container_cluster" "cluster" { - name = "%s" - location = "us-central1" - initial_node_count = 1 - - network = google_compute_network.container_network.name - subnetwork = google_compute_subnetwork.container_subnetwork.name - ip_allocation_policy { - cluster_secondary_range_name = google_compute_subnetwork.container_subnetwork.secondary_ip_range[0].range_name - services_secondary_range_name = google_compute_subnetwork.container_subnetwork.secondary_ip_range[1].range_name - } - private_cluster_config { - enable_private_nodes = true - master_ipv4_cidr_block = "10.42.0.0/28" - } - release_channel { - channel = "RAPID" - } - enable_multi_networking = false - datapath_provider = "ADVANCED_DATAPATH" - deletion_protection = false -} - -`, network, network, network, network, network, network, cluster) -} - {{ if not (or (eq $.TargetVersionName ``) (eq $.TargetVersionName `ga`)) }} func testAccContainerNodePool_withBootDiskKmsKey(cluster, np, networkName, subnetworkName string) string { return fmt.Sprintf(` @@ -4612,7 +3993,7 @@ resource "google_container_node_pool" "np_with_node_config_scope_alias" { initial_node_count = 1 node_config { machine_type = "g1-small" - disk_size_gb = 15 + disk_size_gb = 10 oauth_scopes = ["compute-rw", "storage-ro", "logging-write", "monitoring"] } } @@ -4885,7 +4266,7 @@ resource "google_container_node_pool" "np2" { `, cluster, networkName, subnetworkName, np1, np2) } -func testAccContainerNodePool_withSoleTenantConfig(cluster, np, networkName, subnetworkName string, minNodeCpus int) string { +func testAccContainerNodePool_withSoleTenantConfig(cluster, np, networkName, subnetworkName string) string { return fmt.Sprintf(` data "google_container_engine_versions" "central1a" { location = "us-central1-a" @@ -4895,7 +4276,6 @@ resource "google_compute_node_template" "soletenant-tmpl" { name = "tf-test-soletenant-tmpl" region = "us-central1" node_type = "n1-node-96-624" - cpu_overcommit_type = "ENABLED" } resource "google_compute_node_group" "nodes" { @@ -4928,7 +4308,6 @@ resource "google_container_node_pool" "with_sole_tenant_config" { operator = "IN" values = [google_compute_node_group.nodes.name] } - min_node_cpus = %d } oauth_scopes = [ "https://www.googleapis.com/auth/logging.write", @@ -4936,7 +4315,7 @@ resource "google_container_node_pool" "with_sole_tenant_config" { ] } } -`, cluster, networkName, subnetworkName, np, minNodeCpus) +`, cluster, networkName, subnetworkName, np) } func TestAccContainerNodePool_withConfidentialNodes(t *testing.T) { @@ -4953,7 +4332,7 @@ func TestAccContainerNodePool_withConfidentialNodes(t *testing.T) { CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccContainerNodePool_withConfidentialNodes(clusterName, np, networkName, subnetworkName, false, "", "n2d-standard-2"), + Config: testAccContainerNodePool_withConfidentialNodes(clusterName, np, networkName, subnetworkName, true), }, { ResourceName: "google_container_node_pool.np", @@ -4961,7 +4340,7 @@ func TestAccContainerNodePool_withConfidentialNodes(t *testing.T) { ImportStateVerify: true, }, { - Config: testAccContainerNodePool_withConfidentialNodes(clusterName, np, networkName, subnetworkName, true, "", "n2d-standard-2"), + Config: testAccContainerNodePool_withConfidentialNodes(clusterName, np, networkName, subnetworkName, false), }, { ResourceName: "google_container_node_pool.np", @@ -4969,41 +4348,29 @@ func TestAccContainerNodePool_withConfidentialNodes(t *testing.T) { ImportStateVerify: true, }, { - Config: testAccContainerNodePool_withConfidentialNodes(clusterName, np, networkName, subnetworkName, false, "SEV", "n2d-standard-2"), - }, - { - ResourceName: "google_container_node_pool.np", - ImportState: true, - ImportStateVerify: true, - },{ - Config: testAccContainerNodePool_withConfidentialNodes(clusterName, np, networkName, subnetworkName, false, "SEV_SNP", "n2d-standard-2"), - }, - { - ResourceName: "google_container_node_pool.np", - ImportState: true, - ImportStateVerify: true, - },{ - Config: testAccContainerNodePool_withConfidentialNodes(clusterName, np, networkName, subnetworkName, false, "TDX", "c3-standard-4"), - }, - { - ResourceName: "google_container_node_pool.np", - ImportState: true, - ImportStateVerify: true, - }, + Config: testAccContainerNodePool_withConfidentialNodes(clusterName, np, networkName, subnetworkName, true), + }, + { + ResourceName: "google_container_node_pool.np", + ImportState: true, + ImportStateVerify: true, + }, }, }) } -func testAccContainerNodePool_withConfidentialNodes(clusterName, np, networkName, subnetworkName string, enable bool, confidentialInstanceType, machineType string) string { - confInsTypeString := "" - if confidentialInstanceType != "" { - confInsTypeString = fmt.Sprintf(`confidential_instance_type = "%s"`, confidentialInstanceType) - } +func testAccContainerNodePool_withConfidentialNodes(clusterName, np, networkName, subnetworkName string, confidential bool) string { return fmt.Sprintf(` resource "google_container_cluster" "cluster" { name = "%s" location = "us-central1-a" initial_node_count = 1 + node_config { + confidential_nodes { + enabled = false + } + machine_type = "n2-standard-2" + } deletion_protection = false network = "%s" subnetwork = "%s" @@ -5015,14 +4382,13 @@ resource "google_container_node_pool" "np" { cluster = google_container_cluster.cluster.name initial_node_count = 1 node_config { - machine_type = "%s" + machine_type = "n2d-standard-2" // can't be e2 because Confidential Nodes require AMD CPUs confidential_nodes { - enabled = %t - %s + enabled = "%t" } } } -`, clusterName, networkName, subnetworkName, np, machineType, enable, confInsTypeString) +`, clusterName, networkName, subnetworkName, np, confidential) } func TestAccContainerNodePool_withLocalSsdEncryptionMode(t *testing.T) { @@ -5213,20 +4579,13 @@ func TestAccContainerNodePool_withFlexStart(t *testing.T) { func testAccContainerNodePool_withFlexStart(clusterName, np, networkName, subnetworkName string) string { return fmt.Sprintf(` -data "google_container_engine_versions" "central1a" { - location = "us-central1-a" -} - resource "google_container_cluster" "cluster" { + min_master_version = "1.32.3-gke.1717000" + name = "%s" location = "us-central1-a" initial_node_count = 1 deletion_protection = false - - min_master_version = data.google_container_engine_versions.central1a.release_channel_latest_version["RAPID"] - release_channel { - channel = "RAPID" - } network = "%s" subnetwork = "%s" } @@ -5524,10 +4883,44 @@ resource "google_container_node_pool" "without_confidential_boot_disk" { `, cluster, networkName, subnetworkName, np) } -func testAccContainerNodePool_resourceManagerTags(context map[string]interface{}) string { - return acctest.Nprintf(` +func testAccContainerNodePool_resourceManagerTags(projectID, clusterName, networkName, subnetworkName, randomSuffix string) string { + return fmt.Sprintf(` data "google_project" "project" { - project_id = "%{pid}" + project_id = "%[1]s" +} + +resource "google_tags_tag_key" "key1" { + parent = "projects/%[1]s" + short_name = "foobarbaz1-%[2]s" + description = "For foo/bar1 resources" + purpose = "GCE_FIREWALL" + purpose_data = { + network = "%[1]s/%[4]s" + } +} + +resource "google_tags_tag_value" "value1" { + parent = google_tags_tag_key.key1.id + short_name = "foo1-%[2]s" + description = "For foo1 resources" +} + +resource "google_tags_tag_key" "key2" { + parent = "projects/%[1]s" + short_name = "foobarbaz2-%[2]s" + description = "For foo/bar2 resources" + purpose = "GCE_FIREWALL" + purpose_data = { + network = "%[1]s/%[4]s" + } + + depends_on = [google_tags_tag_key.key1] +} + +resource "google_tags_tag_value" "value2" { + parent = google_tags_tag_key.key2.id + short_name = "foo2-%[2]s" + description = "For foo2 resources" } data "google_container_engine_versions" "uscentral1a" { @@ -5535,7 +4928,7 @@ data "google_container_engine_versions" "uscentral1a" { } resource "google_container_cluster" "primary" { - name = "tf-test-cluster-%{random_suffix}" + name = "%[3]s" location = "us-central1-a" min_master_version = data.google_container_engine_versions.uscentral1a.release_channel_latest_version["STABLE"] @@ -5546,8 +4939,13 @@ resource "google_container_cluster" "primary" { initial_node_count = 1 deletion_protection = false - network = "%{network}" - subnetwork = "%{subnet}" + network = "%[4]s" + subnetwork = "%[5]s" + + timeouts { + create = "30m" + update = "40m" + } } # Separately Managed Node Pool @@ -5560,20 +4958,55 @@ resource "google_container_node_pool" "primary_nodes" { node_count = 1 node_config { + machine_type = "n1-standard-1" // can't be e2 because of local-ssd disk_size_gb = 15 resource_manager_tags = { - "%{pid}/%{tagKey1}" = "%{tagValue1}" + (google_tags_tag_key.key1.id) = google_tags_tag_value.value1.id } } } -`, context) +`, projectID, randomSuffix, clusterName, networkName, subnetworkName) } -func testAccContainerNodePool_resourceManagerTagsUpdate1(context map[string]interface{}) string { - return acctest.Nprintf(` +func testAccContainerNodePool_resourceManagerTagsUpdate1(projectID, clusterName, networkName, subnetworkName, randomSuffix string) string { + return fmt.Sprintf(` data "google_project" "project" { - project_id = "%{pid}" + project_id = "%[1]s" +} + +resource "google_tags_tag_key" "key1" { + parent = "projects/%[1]s" + short_name = "foobarbaz1-%[2]s" + description = "For foo/bar1 resources" + purpose = "GCE_FIREWALL" + purpose_data = { + network = "%[1]s/%[4]s" + } +} + +resource "google_tags_tag_value" "value1" { + parent = google_tags_tag_key.key1.id + short_name = "foo1-%[2]s" + description = "For foo1 resources" +} + +resource "google_tags_tag_key" "key2" { + parent = "projects/%[1]s" + short_name = "foobarbaz2-%[2]s" + description = "For foo/bar2 resources" + purpose = "GCE_FIREWALL" + purpose_data = { + network = "%[1]s/%[4]s" + } + + depends_on = [google_tags_tag_key.key1] +} + +resource "google_tags_tag_value" "value2" { + parent = google_tags_tag_key.key2.id + short_name = "foo2-%[2]s" + description = "For foo2 resources" } data "google_container_engine_versions" "uscentral1a" { @@ -5581,7 +5014,7 @@ data "google_container_engine_versions" "uscentral1a" { } resource "google_container_cluster" "primary" { - name = "tf-test-cluster-%{random_suffix}" + name = "%[3]s" location = "us-central1-a" min_master_version = data.google_container_engine_versions.uscentral1a.release_channel_latest_version["STABLE"] @@ -5592,8 +5025,13 @@ resource "google_container_cluster" "primary" { initial_node_count = 1 deletion_protection = false - network = "%{network}" - subnetwork = "%{subnet}" + network = "%[4]s" + subnetwork = "%[5]s" + + timeouts { + create = "30m" + update = "40m" + } } # Separately Managed Node Pool @@ -5606,21 +5044,56 @@ resource "google_container_node_pool" "primary_nodes" { node_count = 1 node_config { + machine_type = "n1-standard-1" // can't be e2 because of local-ssd disk_size_gb = 15 resource_manager_tags = { - "%{pid}/%{tagKey1}" = "%{tagValue1}" - "%{pid}/%{tagKey2}" = "%{tagValue2}" + (google_tags_tag_key.key1.id) = google_tags_tag_value.value1.id + (google_tags_tag_key.key2.id) = google_tags_tag_value.value2.id } } } -`, context) +`, projectID, randomSuffix, clusterName, networkName, subnetworkName) } -func testAccContainerNodePool_resourceManagerTagsUpdate2(context map[string]interface{}) string { - return acctest.Nprintf(` +func testAccContainerNodePool_resourceManagerTagsUpdate2(projectID, clusterName, networkName, subnetworkName, randomSuffix string) string { + return fmt.Sprintf(` data "google_project" "project" { - project_id = "%{pid}" + project_id = "%[1]s" +} + +resource "google_tags_tag_key" "key1" { + parent = "projects/%[1]s" + short_name = "foobarbaz1-%[2]s" + description = "For foo/bar1 resources" + purpose = "GCE_FIREWALL" + purpose_data = { + network = "%[1]s/%[4]s" + } +} + +resource "google_tags_tag_value" "value1" { + parent = google_tags_tag_key.key1.id + short_name = "foo1-%[2]s" + description = "For foo1 resources" +} + +resource "google_tags_tag_key" "key2" { + parent = "projects/%[1]s" + short_name = "foobarbaz2-%[2]s" + description = "For foo/bar2 resources" + purpose = "GCE_FIREWALL" + purpose_data = { + network = "%[1]s/%[4]s" + } + + depends_on = [google_tags_tag_key.key1] +} + +resource "google_tags_tag_value" "value2" { + parent = google_tags_tag_key.key2.id + short_name = "foo2-%[2]s" + description = "For foo2 resources" } data "google_container_engine_versions" "uscentral1a" { @@ -5628,7 +5101,7 @@ data "google_container_engine_versions" "uscentral1a" { } resource "google_container_cluster" "primary" { - name = "tf-test-cluster-%{random_suffix}" + name = "%[3]s" location = "us-central1-a" min_master_version = data.google_container_engine_versions.uscentral1a.release_channel_latest_version["STABLE"] @@ -5639,8 +5112,13 @@ resource "google_container_cluster" "primary" { initial_node_count = 1 deletion_protection = false - network = "%{network}" - subnetwork = "%{subnet}" + network = "%[4]s" + subnetwork = "%[5]s" + + timeouts { + create = "30m" + update = "40m" + } } # Separately Managed Node Pool @@ -5653,10 +5131,11 @@ resource "google_container_node_pool" "primary_nodes" { node_count = 1 node_config { + machine_type = "n1-standard-1" // can't be e2 because of local-ssd disk_size_gb = 15 } } -`, context) +`, projectID, randomSuffix, clusterName, networkName, subnetworkName) } func TestAccContainerNodePool_privateRegistry(t *testing.T) { diff --git a/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job.go.tmpl b/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job.go.tmpl index 2312ff2334b3..3a77c6601e69 100644 --- a/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job.go.tmpl +++ b/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job.go.tmpl @@ -222,15 +222,6 @@ func ResourceDataflowFlexTemplateJob() *schema.Resource { }, }, - "additional_pipeline_options": { - Type: schema.TypeSet, - Optional: true, - Description: `List of pipeline options that should be used by the job. An example value is ["numberOfWorkerHarnessThreads=20"].`, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - }, - "enable_streaming_engine": { Type: schema.TypeBool, Optional: true, @@ -322,8 +313,6 @@ func resourceDataflowFlexJobSetupEnv(d *schema.ResourceData, config *transport_t additionalExperiments := tpgresource.ConvertStringSet(d.Get("additional_experiments").(*schema.Set)) - additionalPipelineOptions := tpgresource.ConvertStringSet(d.Get("additional_pipeline_options").(*schema.Set)) - var autoscalingAlgorithm string autoscalingAlgorithm, updatedParameters = dataflowFlexJobTypeTransferVar("autoscaling_algorithm", "autoscalingAlgorithm", updatedParameters, d) @@ -394,23 +383,22 @@ func resourceDataflowFlexJobSetupEnv(d *schema.ResourceData, config *transport_t launcherMachineType, updatedParameters := dataflowFlexJobTypeTransferVar("launcher_machine_type", "launcherMachineType", updatedParameters, d) env := dataflow.FlexTemplateRuntimeEnvironment{ - AdditionalUserLabels: tpgresource.ExpandStringMap(d, "effective_labels"), - AutoscalingAlgorithm: autoscalingAlgorithm, - NumWorkers: int64(numWorkers), - MaxWorkers: int64(maxNumWorkers), - Network: network, - ServiceAccountEmail: serviceAccountEmail, - Subnetwork: subnetwork, - TempLocation: tempLocation, - StagingLocation: stagingLocation, - MachineType: machineType, - KmsKeyName: kmsKeyName, - IpConfiguration: ipConfiguration, - EnableStreamingEngine: enableStreamingEngine, - AdditionalExperiments: additionalExperiments, - AdditionalPipelineOptions: additionalPipelineOptions, - SdkContainerImage: sdkContainerImage, - LauncherMachineType: launcherMachineType, + AdditionalUserLabels: tpgresource.ExpandStringMap(d, "effective_labels"), + AutoscalingAlgorithm: autoscalingAlgorithm, + NumWorkers: int64(numWorkers), + MaxWorkers: int64(maxNumWorkers), + Network: network, + ServiceAccountEmail: serviceAccountEmail, + Subnetwork: subnetwork, + TempLocation: tempLocation, + StagingLocation: stagingLocation, + MachineType: machineType, + KmsKeyName: kmsKeyName, + IpConfiguration: ipConfiguration, + EnableStreamingEngine: enableStreamingEngine, + AdditionalExperiments: additionalExperiments, + SdkContainerImage: sdkContainerImage, + LauncherMachineType: launcherMachineType, } return env, updatedParameters, nil } @@ -477,24 +465,12 @@ func resourceDataflowFlexTemplateJobRead(d *schema.ResourceData, meta interface{ } optionsMap := sdkPipelineOptions["options"].(map[string]interface{}) - // sdkPipelineOptions is not always populated with these values, hence the fallback - if _, ok := d.GetOk("num_workers"); !ok && optionsMap["numWorkers"] == nil || optionsMap["numWorkers"] == 0 { - optionsMap["numWorkers"] = job.Environment.WorkerPools[0].NumWorkers - } - if _, ok := d.GetOk("max_num_workers"); !ok && optionsMap["maxNumWorkers"] == nil || optionsMap["maxNumWorkers"] == 0 { - optionsMap["maxNumWorkers"] = job.Environment.WorkerPools[0].AutoscalingSettings.MaxNumWorkers - } - if _, ok := d.GetOk("machine_type"); !ok && optionsMap["workerMachineType"] == nil || optionsMap["workerMachineType"] == "" { - optionsMap["workerMachineType"] = job.Environment.WorkerPools[0].MachineType - } - if _, ok := d.GetOk("sdk_container_image"); !ok && optionsMap["sdkContainerImage"] == nil || optionsMap["sdkContainerImage"] == "" { - optionsMap["sdkContainerImage"] = job.Environment.WorkerPools[0].WorkerHarnessContainerImage - } - - if err := d.Set("temp_location", optionsMap["tempLocation"]); err != nil { return fmt.Errorf("Error setting temp_gcs_location: %s", err) } + if err := d.Set("network", optionsMap["network"]); err != nil { + return fmt.Errorf("Error setting network: %s", err) + } if err := d.Set("num_workers", optionsMap["numWorkers"]); err != nil { return fmt.Errorf("Error setting num_workers: %s", err) } @@ -507,10 +483,10 @@ func resourceDataflowFlexTemplateJobRead(d *schema.ResourceData, meta interface{ if err := d.Set("sdk_container_image", optionsMap["sdkContainerImage"]); err != nil { return fmt.Errorf("Error setting sdk_container_image: %s", err) } - if err := d.Set("network", job.Environment.WorkerPools[0].Network); err != nil { + if err := d.Set("network", optionsMap["network"]); err != nil { return fmt.Errorf("Error setting network: %s", err) } - if err := d.Set("subnetwork", job.Environment.WorkerPools[0].Subnetwork); err != nil { + if err := d.Set("subnetwork", optionsMap["subnetwork"]); err != nil { return fmt.Errorf("Error setting subnetwork: %s", err) } if err := d.Set("machine_type", optionsMap["workerMachineType"]); err != nil { diff --git a/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job_meta.yaml.tmpl b/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job_meta.yaml.tmpl index 05695e5a3529..dfc601232de8 100644 --- a/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job_meta.yaml.tmpl +++ b/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job_meta.yaml.tmpl @@ -6,7 +6,6 @@ api_version: 'v1beta3' api_resource_type_kind: 'Job' fields: - field: 'additional_experiments' - - field: 'additional_pipeline_options' - field: 'autoscaling_algorithm' - field: 'container_spec_gcs_path' - field: 'effective_labels' diff --git a/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job_test.go.tmpl b/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job_test.go.tmpl index 8210ff4ae2ec..aec6c6971afd 100644 --- a/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job_test.go.tmpl +++ b/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job_test.go.tmpl @@ -2,7 +2,6 @@ package dataflow_test {{- if ne $.TargetVersionName "ga" }} import ( - "encoding/json" "fmt" "regexp" "strings" @@ -44,7 +43,7 @@ func TestAccDataflowFlexTemplateJob_basic(t *testing.T) { ResourceName: "google_dataflow_flex_template_job.flex_job", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels"}, }, }, }) @@ -83,7 +82,7 @@ func TestAccDataflowFlexTemplateJob_streamUpdate(t *testing.T) { ResourceName: "google_dataflow_flex_template_job.flex_job", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "transform_name_mapping", "state", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "transform_name_mapping", "state", "container_spec_gcs_path", "labels", "terraform_labels"}, }, }, }) @@ -148,7 +147,7 @@ func TestAccDataflowFlexTemplateJob_FullUpdate(t *testing.T) { ResourceName: "google_dataflow_flex_template_job.flex_job_fullupdate", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels"}, }, { Config: testAccDataflowFlexTemplateJob_dataflowFlexTemplateJobFullUpdate(job, bucket, topic, randStr), @@ -157,7 +156,7 @@ func TestAccDataflowFlexTemplateJob_FullUpdate(t *testing.T) { ResourceName: "google_dataflow_flex_template_job.flex_job_fullupdate", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels"}, }, }, }) @@ -192,7 +191,7 @@ func TestAccDataflowFlexTemplateJob_withNetwork(t *testing.T) { ResourceName: "google_dataflow_flex_template_job.flex_job_network", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels"}, }, { Config: testAccDataflowFlexTemplateJob_networkUpdate(job, network1, network2, bucket, topic), @@ -205,7 +204,7 @@ func TestAccDataflowFlexTemplateJob_withNetwork(t *testing.T) { ResourceName: "google_dataflow_flex_template_job.flex_job_network", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels"}, }, }, }) @@ -241,7 +240,7 @@ func TestAccDataflowFlexTemplateJob_withSubNetwork(t *testing.T) { ResourceName: "google_dataflow_flex_template_job.flex_job_subnetwork", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels"}, }, { Config: testAccDataflowFlexTemplateJob_subnetworkUpdate(job, network, subnetwork1, subnetwork2, bucket, topic), @@ -254,7 +253,7 @@ func TestAccDataflowFlexTemplateJob_withSubNetwork(t *testing.T) { ResourceName: "google_dataflow_flex_template_job.flex_job_subnetwork", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels"}, }, }, }) @@ -288,7 +287,7 @@ func TestAccDataflowFlexTemplateJob_withIpConfig(t *testing.T) { ResourceName: "google_dataflow_flex_template_job.flex_job_ipconfig", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "ip_configuration", "state", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "ip_configuration", "state", "container_spec_gcs_path", "labels", "terraform_labels"}, }, }, }) @@ -333,7 +332,7 @@ func TestAccDataflowFlexTemplateJob_withKmsKey(t *testing.T) { ResourceName: "google_dataflow_flex_template_job.flex_job_kms", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels"}, }, }, }) @@ -367,41 +366,7 @@ func TestAccDataflowFlexTemplateJob_withAdditionalExperiments(t *testing.T) { ResourceName: "google_dataflow_flex_template_job.flex_job_experiments", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "additional_experiments", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, - }, - }, - }) -} - -func TestAccDataflowFlexTemplateJob_withAdditionalPipelineOptions(t *testing.T) { - // Dataflow responses include serialized java classes and bash commands - // This makes body comparison infeasible - acctest.SkipIfVcr(t) - t.Parallel() - - randStr := acctest.RandString(t, 10) - job := "tf-test-dataflow-job-" + randStr - additionalPipelineOptions := []string{"numberOfWorkerHarnessThreads=200"} - bucket := "tf-test-dataflow-bucket-" + randStr - topic := "tf-test-topic" + randStr - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckDataflowJobDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccDataflowFlexTemplateJob_additionalPipelineOptions(job, bucket, topic, additionalPipelineOptions), - Check: resource.ComposeTestCheckFunc( - testAccDataflowFlexJobExists(t, "google_dataflow_flex_template_job.flex_job_pipeline_options", false), - testAccDataflowFlexTemplateJobHasAdditionalPipelineOptions(t, "google_dataflow_flex_template_job.flex_job_pipeline_options", additionalPipelineOptions, false), - ), - }, - { - ResourceName: "google_dataflow_flex_template_job.flex_job_pipeline_options", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "additional_pipeline_options", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "additional_experiments", "container_spec_gcs_path", "labels", "terraform_labels"}, }, }, }) @@ -422,9 +387,6 @@ func TestAccDataflowFlexTemplateJob_withProviderDefaultLabels(t *testing.T) { PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccCheckDataflowJobDestroyProducer(t), - ExternalProviders: map[string]resource.ExternalProvider{ - "time": {}, - }, Steps: []resource.TestStep{ { Config: testAccDataflowFlexTemplateJob_withProviderDefaultLabels(job, bucket, topic, randStr), @@ -436,7 +398,7 @@ func TestAccDataflowFlexTemplateJob_withProviderDefaultLabels(t *testing.T) { ResourceName: "google_dataflow_flex_template_job.flex_job_fullupdate", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels"}, }, { Config: testAccComputeAddress_resourceLabelsOverridesProviderDefaultLabels(job, bucket, topic, randStr), @@ -448,7 +410,7 @@ func TestAccDataflowFlexTemplateJob_withProviderDefaultLabels(t *testing.T) { ResourceName: "google_dataflow_flex_template_job.flex_job_fullupdate", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels"}, }, { Config: testAccComputeAddress_moveResourceLabelToProviderDefaultLabels(job, bucket, topic, randStr), @@ -460,7 +422,7 @@ func TestAccDataflowFlexTemplateJob_withProviderDefaultLabels(t *testing.T) { ResourceName: "google_dataflow_flex_template_job.flex_job_fullupdate", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels"}, }, { Config: testAccComputeAddress_resourceLabelsOverridesProviderDefaultLabels(job, bucket, topic, randStr), @@ -472,7 +434,7 @@ func TestAccDataflowFlexTemplateJob_withProviderDefaultLabels(t *testing.T) { ResourceName: "google_dataflow_flex_template_job.flex_job_fullupdate", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels"}, }, { Config: testAccDataflowFlexTemplateJob_dataflowFlexTemplateJobFull(job, bucket, topic, randStr), @@ -484,7 +446,7 @@ func TestAccDataflowFlexTemplateJob_withProviderDefaultLabels(t *testing.T) { ResourceName: "google_dataflow_flex_template_job.flex_job_fullupdate", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "container_spec_gcs_path", "labels", "terraform_labels"}, }, }, }) @@ -524,7 +486,7 @@ func TestAccDataflowJob_withAttributionLabelCreationOnly(t *testing.T) { ResourceName: "google_dataflow_job.big_data", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "labels", "terraform_labels"}, }, { Config: testAccDataflowJob_attributionLabelUpdate(bucket, job, add, strategy), @@ -543,7 +505,7 @@ func TestAccDataflowJob_withAttributionLabelCreationOnly(t *testing.T) { ResourceName: "google_dataflow_job.big_data", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "labels", "terraform_labels"}, }, }, }) @@ -581,7 +543,7 @@ func TestAccDataflowJob_withAttributionLabelProactive(t *testing.T) { ResourceName: "google_dataflow_job.big_data", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "labels", "terraform_labels"}, }, { Config: testAccDataflowJob_attributionLabelUpdate(bucket, job, "true", strategy), @@ -600,7 +562,7 @@ func TestAccDataflowJob_withAttributionLabelProactive(t *testing.T) { ResourceName: "google_dataflow_job.big_data", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "labels", "terraform_labels", "machine_type", "sdk_container_image"}, + ImportStateVerifyIgnore: []string{"on_delete", "parameters", "skip_wait_on_job_termination", "state", "labels", "terraform_labels"}, }, }, }) @@ -646,50 +608,6 @@ func TestAccDataflowFlexTemplateJob_enableStreamingEngine(t *testing.T) { }) } -func TestAccDataflowFlexTemplateJob_workerPoolsFallback(t *testing.T) { - acctest.SkipIfVcr(t) - t.Parallel() - - context1 := map[string]interface{}{ - "random_id": acctest.RandString(t, 10), - "max_workers": 2, - "num_workers": 1, - "machine_type": `"n1-standard-1"`, - } - - context2 := map[string]interface{}{ - "random_id": context1["random_id"], - "max_workers": 3, - "num_workers": 2, - "machine_type": `"n1-standard-1"`, - } - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckDataflowJobDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccDataflowFlexTemplateJob_workerPoolFallback(context1), - Check: resource.ComposeTestCheckFunc( - testAccDataflowFlexJobExists(t, "google_dataflow_flex_template_job.flex_job", false), - resource.TestCheckResourceAttr("google_dataflow_flex_template_job.flex_job", "num_workers", "1"), - resource.TestCheckResourceAttr("google_dataflow_flex_template_job.flex_job", "max_workers", "2"), - ), - }, - { - Config: testAccDataflowFlexTemplateJob_workerPoolFallback(context2), - Check: resource.ComposeTestCheckFunc( - testAccDataflowFlexJobExists(t, "google_dataflow_flex_template_job.flex_job", true), - resource.TestCheckResourceAttr("google_dataflow_flex_template_job.flex_job", "num_workers", "2"), - resource.TestCheckResourceAttr("google_dataflow_flex_template_job.flex_job", "max_workers", "3"), - ), - }, - }, - }) -} - - func testAccDataflowFlexTemplateJobHasNetwork(t *testing.T, res, expected string, wait bool) resource.TestCheckFunc { return func(s *terraform.State) error { instanceTmpl, err := testAccDataflowFlexTemplateGetGeneratedInstanceTemplate(t, s, res) @@ -757,55 +675,6 @@ func testAccDataflowFlexTemplateJobHasAdditionalExperiments(t *testing.T, res st } } -func testAccDataflowFlexTemplateJobHasAdditionalPipelineOptions(t *testing.T, res string, pipelineOptions []string, wait bool) resource.TestCheckFunc { - return func(s *terraform.State) error { - rs, ok := s.RootModule().Resources[res] - if !ok { - return fmt.Errorf("resource %q not found in state", res) - } - - if rs.Primary.ID == "" { - return fmt.Errorf("No ID is set") - } - config := acctest.GoogleProviderConfig(t) - - job, err := config.NewDataflowClient(config.UserAgent).Projects.Jobs.Get(config.Project, rs.Primary.ID).View("JOB_VIEW_ALL").Do() - if err != nil { - return fmt.Errorf("dataflow job does not exist") - } - - var sdkPipelineOptionsMap map[string]interface{} - // Unmarshal the SdkPipelineOptions - err = json.Unmarshal(job.Environment.SdkPipelineOptions, &sdkPipelineOptionsMap) - if err != nil { - return fmt.Errorf("Error unmarshaling SdkPipelineOptions: '%s'", err) - } - - //Capture the options inside SdkPipelineOptions - options, ok := sdkPipelineOptionsMap["options"] - if !ok { - return fmt.Errorf("Error: 'options' field not found within actualPipelineOptionsMap.") - } - - actualPipelineOptionsMap, isMap := options.(map[string]interface{}) - if !isMap { - return fmt.Errorf("Error: 'options' field is not a JSON object") - } - - // Check if each pipelineOption exists in SdkPipelineOptions.options - for _, expectedPipelineOption := range pipelineOptions { - pOption := strings.SplitN(expectedPipelineOption, "=", 2) - key := pOption[0] - _, ok := actualPipelineOptionsMap[key] - if !ok { - return fmt.Errorf("Expected pipeline option '%s' not found in SdkPipelineOptions", expectedPipelineOption) - } - } - - return nil - } -} - func testAccDataflowFlexTemplateGetGeneratedInstanceTemplate(t *testing.T, s *terraform.State, res string) (*compute.InstanceTemplate, error) { rs, ok := s.RootModule().Resources[res] if !ok { @@ -1550,61 +1419,6 @@ resource "google_dataflow_flex_template_job" "flex_job_experiments" { `, topicName, bucket, job, strings.Join(experiments, `", "`)) } -func testAccDataflowFlexTemplateJob_additionalPipelineOptions(job, bucket, topicName string, pipelineOptions []string) string { - return fmt.Sprintf(` -data "google_project" "project" {} - -resource "google_pubsub_topic" "example" { - name = "%s" -} - -resource "google_storage_bucket" "bucket" { - name = "%s" - location = "US-CENTRAL1" - force_destroy = true - uniform_bucket_level_access = true -} - -resource "google_storage_bucket_object" "schema" { - name = "schema.json" - bucket = google_storage_bucket.bucket.name - content = <') the minimum, or if equality is allowed.`, - }, - "strict_max_enabled": { - Type: schema.TypeBool, - Computed: true, - Description: ` Whether each value needs to be strictly lesser than ('<') the maximum, or if equality is allowed.`, - }, - }, - }, - Description: `Row-level rule which evaluates whether each column value lies between a specified range.`, - }, - "non_null_expectation": { - Type: schema.TypeList, - Computed: true, - Description: `Row-level rule which evaluates whether each column value is null.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{}, - }, - }, - "set_expectation": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "values": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Schema{Type: schema.TypeString}, - Description: `Expected values for the column value.`, - }, - }, - }, - Description: `Row-level rule which evaluates whether each column value is contained by a specified set.`, - }, - "regex_expectation": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "regex": { - Type: schema.TypeString, - Computed: true, - Description: `A regular expression the column value is expected to match.`, - }, - }, - }, - - Description: `Row-level rule which evaluates whether each column value matches a specified regex.`, - }, - "uniqueness_expectation": { - Type: schema.TypeList, - Computed: true, - Description: `Row-level rule which evaluates whether each column value is unique.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{}, - }, - }, - "statistic_range_expectation": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "statistic": { - Type: schema.TypeString, - Computed: true, - Description: `The list of aggregate metrics a rule can be evaluated against. - Possible values: ["STATISTIC_UNDEFINED", "MEAN", "MIN", "MAX"]`, - }, - "min_value": { - Type: schema.TypeString, - Computed: true, - Description: `The minimum column value allowed for a row to pass this validation.`, - }, - "max_value": { - Type: schema.TypeString, - Computed: true, - Description: `The maximum column value allowed for a row to pass this validation.`, - }, - "strict_min_enabled": { - Type: schema.TypeBool, - Computed: true, - Description: `Whether each value needs to be strictly greater than ('>') the minimum, or if equality is allowed.`, - }, - "strict_max_enabled": { - Type: schema.TypeBool, - Computed: true, - Description: ` Whether each value needs to be strictly lesser than ('<') the maximum, or if equality is allowed.`, - }, - }, - }, - Description: `Aggregate rule which evaluates whether the column aggregate statistic lies between a specified range.`, - }, - "row_condition_expectation": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "sql_expression": { - Type: schema.TypeString, - Computed: true, - Description: `The SQL expression.`, - }, - }, - }, - Description: `Row-level rule which evaluates whether each row in a table passes the specified condition.`, - }, - "table_condition_expectation": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "sql_expression": { - Type: schema.TypeString, - Computed: true, - Description: `The SQL expression.`, - }, - }, - }, - Description: `Aggregate rule which evaluates whether the provided expression is true for a table.`, - }, - "sql_assertion": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "sql_statement": { - Type: schema.TypeString, - Computed: true, - Description: `The SQL expression.`, - }, - }, - }, - Description: `Aggregate rule which evaluates the number of rows returned for the provided statement. If any rows are returned, this rule fails.`, - }, - }, - }, - }, - }, - } -} - -func camelToSnake(s string) string { - var result strings.Builder - for i, ch := range s { - if unicode.IsUpper(ch) { - if i > 0 { - result.WriteByte('_') - } - result.WriteRune(unicode.ToLower(ch)) - } else { - result.WriteRune(ch) - } - } - return result.String() -} - -func flattenDataSourceDataplexDataQualityRulesExpectation(expectation interface{}) []interface{} { - expectationsToSet := make(map[string]interface{}) - - if expectation == nil { - return []interface{}{expectationsToSet} - } - - originalExpectation := expectation.(map[string]interface{}) - for k, v := range originalExpectation { - snakeCaseKey := camelToSnake(k) - expectationsToSet[snakeCaseKey] = v - } - return []interface{}{expectationsToSet} -} - -func flattenDataSourceDataplexDataQualityRulesRules(rules interface{}) []interface{} { - rulesToSet := make([]interface{}, 0) - - originalRules := rules.([]interface{}) - - for _, rule := range originalRules { - - newRuleMap := make(map[string]interface{}) - ruleMap := rule.(map[string]interface{}) - - for k, v := range ruleMap { - snakeCaseKey := camelToSnake(k) - if strings.HasSuffix(k, "Expectation") { - // For expectation fields, need extra flatten - newRuleMap[snakeCaseKey] = flattenDataSourceDataplexDataQualityRulesExpectation(v) - } else { - // For other fields (column, dimension, threshold, etc.), directly assign - newRuleMap[snakeCaseKey] = v - } - } - rulesToSet = append(rulesToSet, newRuleMap) - } - - return rulesToSet -} - -func dataSourceDataplexDataQualityRulesRead(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return err - } - - project, err := tpgresource.GetProject(d, config) - if err != nil { - return err - } - - location, err := tpgresource.GetLocation(d, config) - if err != nil { - return err - } - - data_scan_id := d.Get("data_scan_id").(string) - - url, err := tpgresource.ReplaceVars(d, config, "{{DataplexBasePath}}projects/{{project}}/locations/{{location}}/dataScans/{{data_scan_id}}:generateDataQualityRules") - if err != nil { - return err - } - - id := fmt.Sprintf("projects/%s/locations/%s/dataScans/%s", project, location, data_scan_id) - d.SetId(id) - - res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "POST", - Project: project, - RawURL: url, - UserAgent: userAgent, - ErrorAbortPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.Is429QuotaError}, - }) - - if err != nil { - return transport_tpg.HandleDataSourceNotFoundError(err, d, fmt.Sprintf("DataQualityRules %q", d.Id()), url) - } - - if err := d.Set("rules", flattenDataSourceDataplexDataQualityRulesRules(res["rule"])); err != nil { - return fmt.Errorf("Error setting rule: %s", err) - } - - return nil -} diff --git a/mmv1/third_party/terraform/services/dataplex/data_source_dataplex_data_quality_rules_test.go b/mmv1/third_party/terraform/services/dataplex/data_source_dataplex_data_quality_rules_test.go deleted file mode 100644 index dc67c07ac24a..000000000000 --- a/mmv1/third_party/terraform/services/dataplex/data_source_dataplex_data_quality_rules_test.go +++ /dev/null @@ -1,190 +0,0 @@ -package dataplex_test - -import ( - "fmt" - "testing" - "time" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/terraform" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -func TestAccDataplexDataQualityRules(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "project": envvar.GetTestProjectFromEnv(), - "random_suffix": acctest.RandString(t, 10), - "location": envvar.GetTestRegionFromEnv(), - "data_scan_id": "tf-test-datascan-profile-id", - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccDataplexDataQualityRules_datascan_config(context), - }, - { - RefreshState: true, - Check: testAccDataplexDataScanJobTriggerRunAndWaitUntilComplete(t, "google_dataplex_datascan.tf_test_datascan_profile"), - }, - { - Config: testAccDataplexDataQualityRules_rules_config(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("data.google_dataplex_data_quality_rules.generated_dq_rules", "rules.#", "7"), - ), - }, - }, - }) -} - -func testAccDataplexDataQualityRules_datascan_config(context map[string]interface{}) string { - return acctest.Nprintf(` - resource "google_dataplex_datascan" "tf_test_datascan_profile" { - location = "%{location}" - data_scan_id = "%{data_scan_id}-%{random_suffix}" - - data { - resource = "//bigquery.googleapis.com/projects/bigquery-public-data/datasets/samples/tables/shakespeare" - } - - execution_spec { - trigger { - on_demand {} - } - } - - data_profile_spec {} - - project = "%{project}" - }`, context) -} - -func testAccDataplexDataScanJobTriggerRunAndWaitUntilComplete(t *testing.T, resourceName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - - rs, ok := s.RootModule().Resources[resourceName] - - if !ok { - return fmt.Errorf("Resource not found: %s", resourceName) - } - - config := acctest.GoogleProviderConfig(t) - url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{DataplexBasePath}}projects/{{project}}/locations/{{location}}/dataScans/{{data_scan_id}}:run") - if err != nil { - return fmt.Errorf("Failed to generate URL for triggering datascan run: %s", err) - } - - billingProject := "" - - if config.BillingProject != "" { - billingProject = config.BillingProject - } - - res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "POST", - Project: billingProject, - RawURL: url, - UserAgent: config.UserAgent, - }) - - if err != nil { - return fmt.Errorf("Request for triggering data scan run failed: %s", err) - } - - dataScanJobId := extractDataScanJobId(res["job"]) - dataScanJobState := extractDataScanJobState(res["job"]) - - for dataScanJobState != "SUCCEEDED" { - dataScanJobState, err = getDataScanJobState(t, rs, dataScanJobId) - if err != nil { - return fmt.Errorf("Getting data scan job state failed: failed to get state: %s", err) - } - - switch dataScanJobState { - case "STATE_UNSPECIFIED", "RUNNING", "PENDING": - time.Sleep(10 * time.Second) // Pause for 10 seconds to prevend making too many api calls - case "CANCELING", "CANCELLED", "FAILED": - return fmt.Errorf("Data scan job failed: Invalid state: %s", dataScanJobState) - case "SUCCEEDED": - default: - return fmt.Errorf("Getting data scan job state failed: invalid state: %s", dataScanJobState) - } - } - - return nil - } -} - -func testAccDataplexDataQualityRules_rules_config(context map[string]interface{}) string { - return acctest.Nprintf(` - resource "google_dataplex_datascan" "tf_test_datascan_profile" { - location = "%{location}" - data_scan_id = "%{data_scan_id}-%{random_suffix}" - - data { - resource = "//bigquery.googleapis.com/projects/bigquery-public-data/datasets/samples/tables/shakespeare" - } - - execution_spec { - trigger { - on_demand {} - } - } - - data_profile_spec {} - - project = "%{project}" - } - - data "google_dataplex_data_quality_rules" "generated_dq_rules" { - project = google_dataplex_datascan.tf_test_datascan_profile.project - location = google_dataplex_datascan.tf_test_datascan_profile.location - data_scan_id = google_dataplex_datascan.tf_test_datascan_profile.data_scan_id - }`, context) -} - -func getDataScanJobState(t *testing.T, rs *terraform.ResourceState, dataScanJobId string) (string, error) { - config := acctest.GoogleProviderConfig(t) - url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{DataplexBasePath}}projects/{{project}}/locations/{{location}}/dataScans/{{data_scan_id}}/jobs/"+dataScanJobId) - if err != nil { - return "", fmt.Errorf("Failed to generate URL for getting data scan job state: %s", err) - } - - billingProject := "" - - if config.BillingProject != "" { - billingProject = config.BillingProject - } - - res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - Project: billingProject, - RawURL: url, - UserAgent: config.UserAgent, - }) - - if err != nil { - return "", fmt.Errorf("Request for getting data scan job state failed: %s", err) - } - - return extractDataScanJobState(res), nil -} - -func extractDataScanJobState(job interface{}) string { - dataScanJob := job.(map[string]interface{}) - return dataScanJob["state"].(string) -} - -func extractDataScanJobId(job interface{}) string { - dataScanJob := job.(map[string]interface{}) - return dataScanJob["uid"].(string) -} diff --git a/mmv1/third_party/terraform/services/dataplex/resource_dataplex_datascan_test.go b/mmv1/third_party/terraform/services/dataplex/resource_dataplex_datascan_test.go deleted file mode 100644 index d42015c291c3..000000000000 --- a/mmv1/third_party/terraform/services/dataplex/resource_dataplex_datascan_test.go +++ /dev/null @@ -1,232 +0,0 @@ -package dataplex_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" - - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" -) - -func TestAccDataplexDatascanDataplexDatascanFullQuality_update(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "project_name": envvar.GetTestProjectFromEnv(), - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckDataplexDatascanDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccDataplexDatascanDataplexDatascanFullQuality_full(context), - }, - { - ResourceName: "google_dataplex_datascan.full_quality", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"data_scan_id", "labels", "location", "terraform_labels"}, - }, - { - Config: testAccDataplexDatascanDataplexDatascanFullQuality_update(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_dataplex_datascan.full_quality", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_dataplex_datascan.full_quality", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"data_scan_id", "labels", "location", "terraform_labels"}, - }, - }, - }) -} - -func testAccDataplexDatascanDataplexDatascanFullQuality_full(context map[string]interface{}) string { - return acctest.Nprintf(` - -resource "google_bigquery_dataset" "tf_test_dataset" { - dataset_id = "tf_test_dataset_id_%{random_suffix}" - default_table_expiration_ms = 3600000 -} - -resource "google_bigquery_table" "tf_test_table" { - dataset_id = google_bigquery_dataset.tf_test_dataset.dataset_id - table_id = "tf_test_table_%{random_suffix}" - deletion_protection = false - schema = < 0 - - if hasError != tc.expectError { - t.Fatalf("%s: NumberOfAspectsValidation() error expectation mismatch: got error = %v (%v), want error = %v", tc.name, hasError, errors, tc.expectError) - } - - if tc.expectError && tc.errorMsg != "" { - found := false - for _, err := range errors { - if strings.Contains(err.Error(), tc.errorMsg) { // Check if error message contains the expected substring - found = true - break - } - } - if !found { - t.Errorf("%s: NumberOfAspectsValidation() expected error containing %q, but got: %v", tc.name, tc.errorMsg, errors) - } - } - }) - } -} - -func TestProjectNumberValidation(t *testing.T) { - fieldName := "some_field" - testCases := []struct { - name string - input interface{} - expectError bool - errorMsg string - }{ - {"valid input", "projects/1234567890/locations/us-central1", false, ""}, - {"valid input with only number", "projects/987/stuff", false, ""}, - {"valid input with trailing slash content", "projects/1/a/b/c", false, ""}, - {"valid input minimal", "projects/1/a", false, ""}, - {"invalid input trailing slash only", "projects/555/", true, "has an invalid format"}, - {"invalid type - int", 123, true, `to be string, but got int`}, - {"invalid type - nil", nil, true, `to be string, but got `}, - {"invalid format - missing 'projects/' prefix", "12345/locations/us", true, "has an invalid format"}, - {"invalid format - project number starts with 0", "projects/0123/data", true, "has an invalid format"}, - {"invalid format - no project number", "projects//data", true, "has an invalid format"}, - {"invalid format - letters instead of number", "projects/abc/data", true, "has an invalid format"}, - {"invalid format - missing content after number/", "projects/123", true, "has an invalid format"}, - {"invalid format - empty string", "", true, "has an invalid format"}, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - _, errors := dataplex.ProjectNumberValidation(tc.input, fieldName) - hasError := len(errors) > 0 - - if hasError != tc.expectError { - t.Fatalf("%s: ProjectNumberValidation() error expectation mismatch: got error = %v (%v), want error = %v", tc.name, hasError, errors, tc.expectError) - } - - if tc.expectError && tc.errorMsg != "" { - found := false - for _, err := range errors { - if strings.Contains(err.Error(), tc.errorMsg) { // Check if error message contains the expected substring - found = true - break - } - } - if !found { - t.Errorf("%s: ProjectNumberValidation() expected error containing %q, but got: %v", tc.name, tc.errorMsg, errors) - } - } - }) - } -} - -func TestAspectProjectNumberValidation(t *testing.T) { - fieldName := "some_field" - testCases := []struct { - name string - input interface{} - expectError bool - errorMsg string - }{ - {"valid input", "1234567890.compute.googleapis.com/Disk", false, ""}, - {"valid input minimal", "1.a", false, ""}, - {"invalid input trailing dot only", "987.", true, "has an invalid format"}, - {"invalid type - int", 456, true, `to be string, but got int`}, - {"invalid type - nil", nil, true, `to be string, but got `}, - {"invalid format - missing number", ".compute.googleapis.com/Disk", true, "has an invalid format"}, - {"invalid format - number starts with 0", "0123.compute.googleapis.com/Disk", true, "has an invalid format"}, - {"invalid format - missing dot", "12345compute", true, "has an invalid format"}, - {"invalid format - letters instead of number", "abc.compute.googleapis.com/Disk", true, "has an invalid format"}, - {"invalid format - missing content after dot", "12345", true, "has an invalid format"}, - {"invalid format - empty string", "", true, "has an invalid format"}, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - _, errors := dataplex.AspectProjectNumberValidation(tc.input, fieldName) - hasError := len(errors) > 0 - - if hasError != tc.expectError { - t.Fatalf("%s: AspectProjectNumberValidation() error expectation mismatch: got error = %v (%v), want error = %v", tc.name, hasError, errors, tc.expectError) - } - - if tc.expectError && tc.errorMsg != "" { - found := false - for _, err := range errors { - if strings.Contains(err.Error(), tc.errorMsg) { // Check if error message contains the expected substring - found = true - break - } - } - if !found { - t.Errorf("%s: AspectProjectNumberValidation() expected error containing %q, but got: %v", tc.name, tc.errorMsg, errors) - } - } - }) - } -} - -func TestFilterAspects(t *testing.T) { - testCases := []struct { - name string - aspectKeySet map[string]struct{} - resInput map[string]interface{} - expectedAspects map[string]interface{} - expectError bool - errorMsg string - }{ - {"aspects key is absent", map[string]struct{}{"keep": {}}, map[string]interface{}{"otherKey": "value"}, nil, false, ""}, - {"aspects value is nil", map[string]struct{}{"keep": {}}, map[string]interface{}{"aspects": nil}, nil, false, ""}, - {"empty aspectKeySet", map[string]struct{}{}, map[string]interface{}{"aspects": map[string]interface{}{"one": map[string]interface{}{"data": 1}, "two": map[string]interface{}{"data": 2}}}, map[string]interface{}{}, false, ""}, - {"keep all aspects", map[string]struct{}{"one": {}, "two": {}}, map[string]interface{}{"aspects": map[string]interface{}{"one": map[string]interface{}{"data": 1}, "two": map[string]interface{}{"data": 2}}}, map[string]interface{}{"one": map[string]interface{}{"data": 1}, "two": map[string]interface{}{"data": 2}}, false, ""}, - {"keep some aspects", map[string]struct{}{"two": {}, "three_not_present": {}}, map[string]interface{}{"aspects": map[string]interface{}{"one": map[string]interface{}{"data": 1}, "two": map[string]interface{}{"data": 2}}}, map[string]interface{}{"two": map[string]interface{}{"data": 2}}, false, ""}, - {"input aspects map is empty", map[string]struct{}{"keep": {}}, map[string]interface{}{"aspects": map[string]interface{}{}}, map[string]interface{}{}, false, ""}, - {"aspects is wrong type", map[string]struct{}{"keep": {}}, map[string]interface{}{"aspects": "not a map"}, nil, true, "FilterAspects: 'aspects' field is not a map[string]interface{}, got string"}, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - resCopy := deepCopyMap(tc.resInput) - originalAspectsBeforeCall := deepCopyValue(resCopy["aspects"]) - - err := dataplex.FilterAspects(tc.aspectKeySet, resCopy) - - if tc.expectError { - if err == nil { - t.Fatalf("%s: Expected an error, but got nil", tc.name) - } - if tc.errorMsg != "" && !strings.Contains(err.Error(), tc.errorMsg) { - t.Errorf("%s: Expected error message containing %q, got %q", tc.name, tc.errorMsg, err.Error()) - } - if !reflect.DeepEqual(resCopy["aspects"], originalAspectsBeforeCall) { - t.Errorf("%s: resCopy['aspects'] was modified during error case.\nBefore: %#v\nAfter: %#v", tc.name, originalAspectsBeforeCall, resCopy["aspects"]) - } - return - } - - if err != nil { - t.Fatalf("%s: Did not expect an error, but got: %v", tc.name, err) - } - - actualAspectsRaw, aspectsKeyExists := resCopy["aspects"] - - if tc.expectedAspects == nil { - if aspectsKeyExists && actualAspectsRaw != nil { - if tc.name == "aspects key is absent" { - if aspectsKeyExists { - t.Errorf("%s: Expected 'aspects' key to be absent, but it exists with value: %v", tc.name, actualAspectsRaw) - } - } else { - t.Errorf("%s: Expected 'aspects' value to be nil, but got: %v", tc.name, actualAspectsRaw) - } - } - return - } - - if !aspectsKeyExists { - t.Fatalf("%s: Expected 'aspects' key to exist, but it was absent. Expected value: %#v", tc.name, tc.expectedAspects) - } - - actualAspects, ok := actualAspectsRaw.(map[string]interface{}) - if !ok { - t.Fatalf("%s: Expected 'aspects' to be a map[string]interface{}, but got %T. Value: %#v", tc.name, actualAspectsRaw, actualAspectsRaw) - } - - if !reflect.DeepEqual(actualAspects, tc.expectedAspects) { - t.Errorf("%s: FilterAspects() result mismatch:\ngot: %#v\nwant: %#v", tc.name, actualAspects, tc.expectedAspects) - } - }) - } -} - -func TestAddAspectsToSet(t *testing.T) { - testCases := []struct { - name string - initialSet map[string]struct{} - aspectsInput interface{} - expectedSet map[string]struct{} - expectError bool - errorMsg string - }{ - {"add to empty set", map[string]struct{}{}, []interface{}{map[string]interface{}{"aspect_key": "key1"}, map[string]interface{}{"aspect_key": "key2"}}, map[string]struct{}{"key1": {}, "key2": {}}, false, ""}, - {"add to existing set", map[string]struct{}{"existing": {}}, []interface{}{map[string]interface{}{"aspect_key": "key1"}}, map[string]struct{}{"existing": {}, "key1": {}}, false, ""}, - {"add duplicate keys", map[string]struct{}{}, []interface{}{map[string]interface{}{"aspect_key": "key1"}, map[string]interface{}{"aspect_key": "key1"}, map[string]interface{}{"aspect_key": "key2"}}, map[string]struct{}{"key1": {}, "key2": {}}, false, ""}, - {"input aspects is empty slice", map[string]struct{}{"existing": {}}, []interface{}{}, map[string]struct{}{"existing": {}}, false, ""}, - {"input aspects is nil", map[string]struct{}{"original": {}}, nil, map[string]struct{}{"original": {}}, false, ""}, - {"input aspects is wrong type", map[string]struct{}{}, "not a slice", map[string]struct{}{}, true, "AddAspectsToSet: input 'aspects' is not a []interface{}, got string"}, - {"item in slice is not a map", map[string]struct{}{}, []interface{}{"not a map"}, map[string]struct{}{}, true, "AddAspectsToSet: item at index 0 is not a map[string]interface{}, got string"}, - {"item map missing aspect_key", map[string]struct{}{}, []interface{}{map[string]interface{}{"wrong_key": "key1"}}, map[string]struct{}{}, true, "AddAspectsToSet: 'aspect_key' not found in aspect item at index 0"}, - {"aspect_key is not a string", map[string]struct{}{}, []interface{}{map[string]interface{}{"aspect_key": 123}}, map[string]struct{}{}, true, "AddAspectsToSet: 'aspect_key' in item at index 0 is not a string, got int"}, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - currentSet := make(map[string]struct{}) - for k, v := range tc.initialSet { - currentSet[k] = v - } - - err := dataplex.AddAspectsToSet(currentSet, tc.aspectsInput) - - if tc.expectError { - if err == nil { - t.Fatalf("%s: Expected an error, but got nil", tc.name) - } - if tc.errorMsg != "" && !strings.Contains(err.Error(), tc.errorMsg) { - t.Errorf("%s: Expected error message containing %q, got %q", tc.name, tc.errorMsg, err.Error()) - } - } else { - if err != nil { - t.Fatalf("%s: Did not expect an error, but got: %v", tc.name, err) - } - if !reflect.DeepEqual(currentSet, tc.expectedSet) { - t.Errorf("%s: AddAspectsToSet() result mismatch:\ngot: %v\nwant: %v", tc.name, currentSet, tc.expectedSet) - } - } - }) - } -} - -func TestInverseTransformAspects(t *testing.T) { - testCases := []struct { - name string - resInput map[string]interface{} - expectedAspects []interface{} - expectNilAspects bool - expectError bool - errorMsg string - }{ - {"aspects key is absent", map[string]interface{}{"otherKey": "value"}, nil, true, false, ""}, - {"aspects value is nil", map[string]interface{}{"aspects": nil}, nil, true, false, ""}, - {"aspects is empty map", map[string]interface{}{"aspects": map[string]interface{}{}}, []interface{}{}, false, false, ""}, - {"aspects with one entry", map[string]interface{}{"aspects": map[string]interface{}{"key1": map[string]interface{}{"data": "value1"}}}, []interface{}{map[string]interface{}{"aspectKey": "key1", "aspect": map[string]interface{}{"data": "value1"}}}, false, false, ""}, - {"aspects with multiple entries", map[string]interface{}{"aspects": map[string]interface{}{"key2": map[string]interface{}{"data": "value2"}, "key1": map[string]interface{}{"data": "value1"}}}, []interface{}{map[string]interface{}{"aspectKey": "key1", "aspect": map[string]interface{}{"data": "value1"}}, map[string]interface{}{"aspectKey": "key2", "aspect": map[string]interface{}{"data": "value2"}}}, false, false, ""}, - {"aspects is wrong type (not map)", map[string]interface{}{"aspects": "not a map"}, nil, false, true, "InverseTransformAspects: 'aspects' field is not a map[string]interface{}, got string"}, - {"aspect value is not a map", map[string]interface{}{"aspects": map[string]interface{}{"key1": "not a map value"}}, nil, false, true, "InverseTransformAspects: value for key 'key1' is not a map[string]interface{}, got string"}, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - resCopy := deepCopyMap(tc.resInput) - originalAspectsBeforeCall := deepCopyValue(resCopy["aspects"]) - - err := dataplex.InverseTransformAspects(resCopy) - - if tc.expectError { - if err == nil { - t.Fatalf("%s: Expected an error, but got nil", tc.name) - } - if tc.errorMsg != "" && !strings.Contains(err.Error(), tc.errorMsg) { - t.Errorf("%s: Expected error message containing %q, got %q", tc.name, tc.errorMsg, err.Error()) - } - if !reflect.DeepEqual(resCopy["aspects"], originalAspectsBeforeCall) { - t.Errorf("%s: resCopy['aspects'] was modified during error case.\nBefore: %#v\nAfter: %#v", tc.name, originalAspectsBeforeCall, resCopy["aspects"]) - } - return - } - - if err != nil { - t.Fatalf("%s: Did not expect an error, but got: %v", tc.name, err) - } - - actualAspectsRaw, aspectsKeyExists := resCopy["aspects"] - - if tc.expectNilAspects { - if aspectsKeyExists && actualAspectsRaw != nil { - t.Errorf("%s: Expected 'aspects' to be nil or absent, but got: %#v", tc.name, actualAspectsRaw) - } - return - } - - if !aspectsKeyExists { - t.Fatalf("%s: Expected 'aspects' key in result map, but it was missing. Expected value: %#v", tc.name, tc.expectedAspects) - } - if actualAspectsRaw == nil && tc.expectedAspects != nil { - t.Fatalf("%s: Expected 'aspects' to be non-nil, but got nil. Expected value: %#v", tc.name, tc.expectedAspects) - } - - actualAspectsSlice, ok := actualAspectsRaw.([]interface{}) - if !ok { - if tc.expectedAspects != nil || actualAspectsRaw != nil { - t.Fatalf("%s: Expected 'aspects' to be []interface{}, but got %T. Value: %#v", tc.name, actualAspectsRaw, actualAspectsRaw) - } - } - - if actualAspectsSlice != nil { - sortAspectSlice(actualAspectsSlice) - } - if tc.expectedAspects != nil { - sortAspectSlice(tc.expectedAspects) - } - - if !reflect.DeepEqual(actualAspectsSlice, tc.expectedAspects) { - t.Errorf("%s: InverseTransformAspects() result mismatch:\ngot: %#v\nwant: %#v", tc.name, actualAspectsSlice, tc.expectedAspects) - } - }) - } -} - -func TestTransformAspects(t *testing.T) { - testCases := []struct { - name string - objInput map[string]interface{} - expectedAspects map[string]interface{} - expectNilAspects bool - expectError bool - errorMsg string - }{ - {"aspects key is absent", map[string]interface{}{"otherKey": "value"}, nil, true, false, ""}, - {"aspects value is nil", map[string]interface{}{"aspects": nil}, nil, true, false, ""}, - {"aspects is empty slice", map[string]interface{}{"aspects": []interface{}{}}, map[string]interface{}{}, false, false, ""}, - {"aspects with one item", map[string]interface{}{"aspects": []interface{}{map[string]interface{}{"aspectKey": "key1", "aspect": map[string]interface{}{"data": "value1"}}}}, map[string]interface{}{"key1": map[string]interface{}{"data": "value1"}}, false, false, ""}, - {"aspects with one item that has no aspect", map[string]interface{}{"aspects": []interface{}{map[string]interface{}{"aspectKey": "key1"}}}, map[string]interface{}{"key1": map[string]interface{}{"data": map[string]interface{}{}}}, false, false, ""}, - {"aspects with multiple items", map[string]interface{}{"aspects": []interface{}{map[string]interface{}{"aspectKey": "key1", "aspect": map[string]interface{}{"data": "value1"}}, map[string]interface{}{"aspectKey": "key2", "aspect": map[string]interface{}{"data": "value2"}}}}, map[string]interface{}{"key1": map[string]interface{}{"data": "value1"}, "key2": map[string]interface{}{"data": "value2"}}, false, false, ""}, - {"aspects with duplicate aspectKey", map[string]interface{}{"aspects": []interface{}{map[string]interface{}{"aspectKey": "key1", "aspect": map[string]interface{}{"data": "value_first"}}, map[string]interface{}{"aspectKey": "key2", "aspect": map[string]interface{}{"data": "value2"}}, map[string]interface{}{"aspectKey": "key1", "aspect": map[string]interface{}{"data": "value_last"}}}}, map[string]interface{}{"key1": map[string]interface{}{"data": "value_last"}, "key2": map[string]interface{}{"data": "value2"}}, false, false, ""}, - {"aspects is wrong type (not slice)", map[string]interface{}{"aspects": "not a slice"}, nil, false, true, "TransformAspects: 'aspects' field is not a []interface{}, got string"}, - {"item in slice is not a map", map[string]interface{}{"aspects": []interface{}{"not a map"}}, nil, false, true, "TransformAspects: item in 'aspects' slice at index 0 is not a map[string]interface{}, got string"}, - {"item map missing aspectKey", map[string]interface{}{"aspects": []interface{}{map[string]interface{}{"wrongKey": "k1", "aspect": map[string]interface{}{}}}}, nil, false, true, "TransformAspects: 'aspectKey' not found in aspect item at index 0"}, - {"aspectKey is not a string", map[string]interface{}{"aspects": []interface{}{map[string]interface{}{"aspectKey": 123, "aspect": map[string]interface{}{}}}}, nil, false, true, "TransformAspects: 'aspectKey' in item at index 0 is not a string, got int"}, - {"aspect is present but wrong type", map[string]interface{}{"aspects": []interface{}{map[string]interface{}{"aspectKey": "key1", "aspect": "not a map"}}}, map[string]interface{}{"key1": map[string]interface{}{"data": map[string]interface{}{}}}, false, false, ""}, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - objCopy := deepCopyMap(tc.objInput) - originalAspectsBeforeCall := deepCopyValue(objCopy["aspects"]) - - err := dataplex.TransformAspects(objCopy) - - if tc.expectError { - if err == nil { - t.Fatalf("%s: Expected an error, but got nil", tc.name) - } - if tc.errorMsg != "" && !strings.Contains(err.Error(), tc.errorMsg) { - t.Errorf("%s: Expected error message containing %q, got %q", tc.name, tc.errorMsg, err.Error()) - } - if !reflect.DeepEqual(objCopy["aspects"], originalAspectsBeforeCall) { - t.Errorf("%s: objCopy['aspects'] was modified during error case.\nBefore: %#v\nAfter: %#v", tc.name, originalAspectsBeforeCall, objCopy["aspects"]) - } - return - } - - if err != nil { - t.Fatalf("%s: Did not expect an error, but got: %v", tc.name, err) - } - - actualAspectsRaw, aspectsKeyExists := objCopy["aspects"] - - if tc.expectNilAspects { - if aspectsKeyExists && actualAspectsRaw != nil { - t.Errorf("%s: Expected 'aspects' to be nil or absent, but got: %#v", tc.name, actualAspectsRaw) - } - return - } - - if !aspectsKeyExists { - t.Fatalf("%s: Expected 'aspects' key in result map, but it was missing. Expected value: %#v", tc.name, tc.expectedAspects) - } - if actualAspectsRaw == nil && tc.expectedAspects != nil { - t.Fatalf("%s: Expected 'aspects' to be non-nil, but got nil. Expected value: %#v", tc.name, tc.expectedAspects) - } - - actualAspectsMap, ok := actualAspectsRaw.(map[string]interface{}) - if !ok { - if tc.expectedAspects != nil || actualAspectsRaw != nil { - t.Fatalf("%s: Expected 'aspects' to be map[string]interface{}, but got %T. Value: %#v", tc.name, actualAspectsRaw, actualAspectsRaw) - } - } - - if !reflect.DeepEqual(actualAspectsMap, tc.expectedAspects) { - t.Errorf("%s: TransformAspects() result mismatch:\ngot: %#v\nwant: %#v", tc.name, actualAspectsMap, tc.expectedAspects) - } - }) - } -} - -func deepCopyMap(original map[string]interface{}) map[string]interface{} { - if original == nil { - return nil - } - copyMap := make(map[string]interface{}, len(original)) - for key, value := range original { - copyMap[key] = deepCopyValue(value) - } - return copyMap -} - -func deepCopySlice(original []interface{}) []interface{} { - if original == nil { - return nil - } - copySlice := make([]interface{}, len(original)) - for i, value := range original { - copySlice[i] = deepCopyValue(value) - } - return copySlice -} - -func deepCopyValue(value interface{}) interface{} { - if value == nil { - return nil - } - switch v := value.(type) { - case map[string]interface{}: - return deepCopyMap(v) - case []interface{}: - return deepCopySlice(v) - default: - return v - } -} - -func sortAspectSlice(slice []interface{}) { - if slice == nil { - return - } - sort.SliceStable(slice, func(i, j int) bool { - mapI, okI := slice[i].(map[string]interface{}) - mapJ, okJ := slice[j].(map[string]interface{}) - if !okI || !okJ { - return false - } - - keyIRaw, keyIExists := mapI["aspectKey"] - keyJRaw, keyJExists := mapJ["aspectKey"] - - if !keyIExists || !keyJExists { - return false - } - - keyI, okI := keyIRaw.(string) - keyJ, okJ := keyJRaw.(string) - if !okI || !okJ { - return false - } - return keyI < keyJ - }) -} - -func TestAccDataplexEntry_dataplexEntryUpdate(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "project_number": envvar.GetTestProjectNumberFromEnv(), - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckDataplexEntryDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccDataplexEntry_dataplexEntryFullUpdatePrepare(context), - }, - { - ResourceName: "google_dataplex_entry.test_entry_full", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"aspects", "entry_group_id", "entry_id", "location"}, - }, - { - Config: testAccDataplexEntry_dataplexEntryUpdate(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_dataplex_entry.test_entry_full", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_dataplex_entry.test_entry_full", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"aspects", "entry_group_id", "entry_id", "location"}, - }, - }, - }) -} - -func testAccDataplexEntry_dataplexEntryFullUpdatePrepare(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_dataplex_aspect_type" "tf-test-aspect-type-full-one" { - aspect_type_id = "tf-test-aspect-type-full%{random_suffix}-one" - location = "us-central1" - project = "%{project_number}" - - metadata_template = < 0 { - conf.KerberosConfig = expandKerberosConfig(k[0].(map[string]interface{})) - } - } - if ifg, ok := cfg["identity_config"]; ok { - i := ifg.([]interface{}) - if len(i) > 0 { - conf.IdentityConfig = expandIdentityConfig(i[0].(map[string]interface{})) - } - } - return conf -} - -func expandIdentityConfig(cfg map[string]interface{}) *dataproc.IdentityConfig { - conf := &dataproc.IdentityConfig{} - if v, ok := cfg["user_service_account_mapping"]; ok { - m := make(map[string]string) - for k, val := range v.(map[string]interface{}) { - m[k] = val.(string) - } - conf.UserServiceAccountMapping = m + conf.KerberosConfig = expandKerberosConfig(kfg.([]interface{})[0].(map[string]interface{})) } return conf } @@ -2982,8 +2922,8 @@ func flattenClusterConfig(d *schema.ResourceData, cfg *dataproc.ClusterConfig) ( } data := map[string]interface{}{ - "staging_bucket": d.Get("cluster_config.0.staging_bucket").(string), - "cluster_tier": d.Get("cluster_config.0.cluster_tier").(string), + "staging_bucket": d.Get("cluster_config.0.staging_bucket").(string), + "bucket": cfg.ConfigBucket, "temp_bucket": cfg.TempBucket, "gce_cluster_config": flattenGceClusterConfig(d, cfg.GceClusterConfig), @@ -3025,7 +2965,6 @@ func flattenSecurityConfig(d *schema.ResourceData, sc *dataproc.SecurityConfig) } data := map[string]interface{}{ "kerberos_config": flattenKerberosConfig(d, sc.KerberosConfig), - "identity_config": flattenIdentityConfig(d, sc.IdentityConfig), } return []map[string]interface{}{data} @@ -3056,17 +2995,6 @@ func flattenKerberosConfig(d *schema.ResourceData, kfg *dataproc.KerberosConfig) return []map[string]interface{}{data} } -func flattenIdentityConfig(d *schema.ResourceData, ifg *dataproc.IdentityConfig) []map[string]interface{} { - if ifg == nil { - return nil - } - data := map[string]interface{}{ - "user_service_account_mapping": d.Get("cluster_config.0.security_config.0.identity_config.0.user_service_account_mapping").(map[string]interface{}), - } - - return []map[string]interface{}{data} -} - func flattenSoftwareConfig(d *schema.ResourceData, sc *dataproc.SoftwareConfig) []map[string]interface{} { data := map[string]interface{}{ "image_version": sc.ImageVersion, @@ -3514,10 +3442,8 @@ func dataprocImageVersionDiffSuppress(_, old, new string, _ *schema.ResourceData if newV.minor != oldV.minor { return false } - - ignoreSubminor := []string{"", "prodcurrent", "prodprevious"} - // Only compare subminor version if set to a numeric value in config version. - if !slices.Contains(ignoreSubminor, newV.subminor) && newV.subminor != oldV.subminor { + // Only compare subminor version if set in config version. + if newV.subminor != "" && newV.subminor != oldV.subminor { return false } // Only compare os if it is set in config version. diff --git a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_internal_test.go b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_internal_test.go index fb5aa82c4808..4b3458e89571 100644 --- a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_internal_test.go +++ b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_internal_test.go @@ -93,8 +93,6 @@ func TestDataprocDiffSuppress(t *testing.T) { {"1.3.10-debian9", "1.3-debian9"}, {"1.3.10", "1.3"}, {"1.3-debian9", "1.3"}, - {"1.3.10-debian9", "1.3.prodprevious-debian9"}, - {"1.3.10-debian9", "1.3.prodcurrent-debian9"}, } noSuppress := [][]string{ @@ -108,9 +106,6 @@ func TestDataprocDiffSuppress(t *testing.T) { {"1.3", "1.3.10"}, {"1.3", "1.3.10-debian9"}, {"1.3", "1.3-debian9"}, - {"1.3.prodprevious-debian9", "1.3.10-debian9"}, - {"1.3.prodcurrent-debian9", "1.3.10-debian9"}, - {"1.3.10-debian9", "1.3.randomstring-debian9"}, } for _, tup := range doSuppress { diff --git a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_meta.yaml b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_meta.yaml index 41cab5b3a61a..464f5759cd76 100644 --- a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_meta.yaml +++ b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_meta.yaml @@ -85,14 +85,12 @@ fields: - field: 'cluster_config.security_config.kerberos_config.tgt_lifetime_hours' - field: 'cluster_config.security_config.kerberos_config.truststore_password_uri' - field: 'cluster_config.security_config.kerberos_config.truststore_uri' - - field: 'cluster_config.security_config.identity_config.user_service_account_mapping' - field: 'cluster_config.software_config.image_version' - field: 'cluster_config.software_config.optional_components' - field: 'cluster_config.software_config.override_properties' - field: 'cluster_config.software_config.properties' - field: 'cluster_config.staging_bucket' - field: 'cluster_config.temp_bucket' - - field: 'cluster_config.cluster_tier' - field: 'cluster_config.worker_config.accelerators.accelerator_count' - field: 'cluster_config.worker_config.accelerators.accelerator_type' - field: 'cluster_config.worker_config.disk_config.boot_disk_size_gb' diff --git a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_test.go b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_test.go.tmpl similarity index 92% rename from mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_test.go rename to mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_test.go.tmpl index d47fb21248af..6f2fdee1edf6 100644 --- a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_test.go +++ b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_test.go.tmpl @@ -280,48 +280,48 @@ func TestAccDataprocCluster_withShieldedConfig(t *testing.T) { } func TestAccDataprocCluster_withConfidentialCompute(t *testing.T) { - t.Parallel() + t.Parallel() - var cluster dataproc.Cluster - rnd := acctest.RandString(t, 10) + var cluster dataproc.Cluster + rnd := acctest.RandString(t, 10) networkName := acctest.BootstrapSharedTestNetwork(t, "dataproc-cluster") subnetworkName := acctest.BootstrapSubnet(t, "dataproc-cluster", networkName) acctest.BootstrapFirewallForDataprocSharedNetwork(t, "dataproc-cluster", networkName) imageUri := "https://www.googleapis.com/compute/v1/projects/cloud-dataproc/global/images/dataproc-2-1-ubu20-20241026-165100-rc01" - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckDataprocClusterDestroy(t), - Steps: []resource.TestStep{ - { - Config: testAccDataprocCluster_withConfidentialCompute(rnd, subnetworkName, imageUri), - Check: resource.ComposeTestCheckFunc( - testAccCheckDataprocClusterExists(t, "google_dataproc_cluster.confidential", &cluster), - - // Check confidential compute - resource.TestCheckResourceAttr("google_dataproc_cluster.confidential", - "cluster_config.0.gce_cluster_config.0.confidential_instance_config.0.enable_confidential_compute", "true"), - - // Check master - resource.TestCheckResourceAttr("google_dataproc_cluster.confidential", - "cluster_config.0.master_config.0.machine_type", "n2d-standard-2"), - resource.TestCheckResourceAttr("google_dataproc_cluster.confidential", - "cluster_config.0.master_config.0.image_uri", imageUri), - resource.TestCheckResourceAttr("google_dataproc_cluster.confidential", - "cluster_config.0.master_config.0.min_cpu_platform", "AMD Rome"), - - // Check worker - resource.TestCheckResourceAttr("google_dataproc_cluster.confidential", - "cluster_config.0.worker_config.0.machine_type", "n2d-standard-2"), - resource.TestCheckResourceAttr("google_dataproc_cluster.confidential", - "cluster_config.0.worker_config.0.image_uri", imageUri), - resource.TestCheckResourceAttr("google_dataproc_cluster.confidential", - "cluster_config.0.worker_config.0.min_cpu_platform", "AMD Rome"), - ), - }, - }, - }) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataprocClusterDestroy(t), + Steps: []resource.TestStep{ + { + Config: testAccDataprocCluster_withConfidentialCompute(rnd, subnetworkName, imageUri), + Check: resource.ComposeTestCheckFunc( + testAccCheckDataprocClusterExists(t, "google_dataproc_cluster.confidential", &cluster), + + // Check confidential compute + resource.TestCheckResourceAttr("google_dataproc_cluster.confidential", + "cluster_config.0.gce_cluster_config.0.confidential_instance_config.0.enable_confidential_compute", "true"), + + // Check master + resource.TestCheckResourceAttr("google_dataproc_cluster.confidential", + "cluster_config.0.master_config.0.machine_type", "n2d-standard-2"), + resource.TestCheckResourceAttr("google_dataproc_cluster.confidential", + "cluster_config.0.master_config.0.image_uri", imageUri), + resource.TestCheckResourceAttr("google_dataproc_cluster.confidential", + "cluster_config.0.master_config.0.min_cpu_platform", "AMD Rome"), + + // Check worker + resource.TestCheckResourceAttr("google_dataproc_cluster.confidential", + "cluster_config.0.worker_config.0.machine_type", "n2d-standard-2"), + resource.TestCheckResourceAttr("google_dataproc_cluster.confidential", + "cluster_config.0.worker_config.0.image_uri", imageUri), + resource.TestCheckResourceAttr("google_dataproc_cluster.confidential", + "cluster_config.0.worker_config.0.min_cpu_platform", "AMD Rome"), + ), + }, + }, + }) } func TestAccDataprocCluster_withMetadataAndTags(t *testing.T) { @@ -1055,7 +1055,7 @@ func TestAccDataprocCluster_KMS(t *testing.T) { acctest.BootstrapIamMembers(t, []acctest.IamMember{ { Member: "serviceAccount:service-{project_number}@compute-system.iam.gserviceaccount.com", - Role: "roles/cloudkms.cryptoKeyEncrypterDecrypter", + Role: "roles/cloudkms.cryptoKeyEncrypterDecrypter", }, }) @@ -1100,64 +1100,6 @@ func TestAccDataprocCluster_withKerberos(t *testing.T) { }) } -func TestAccDataprocCluster_withIdentityConfig(t *testing.T) { - t.Parallel() - - rnd := acctest.RandString(t, 10) - networkName := acctest.BootstrapSharedTestNetwork(t, "dataproc-cluster") - subnetworkName := acctest.BootstrapSubnet(t, "dataproc-cluster", networkName) - acctest.BootstrapFirewallForDataprocSharedNetwork(t, "dataproc-cluster", networkName) - - var cluster dataproc.Cluster - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckDataprocClusterDestroy(t), - Steps: []resource.TestStep{ - { - Config: testAccDataprocCluster_withIdentityConfig(rnd, subnetworkName), - Check: resource.ComposeTestCheckFunc( - testAccCheckDataprocClusterExists(t, "google_dataproc_cluster.identity_config", &cluster), - ), - }, - }, - }) -} - -// Test updating identity_config.user_service_account_mapping field -func TestAccDataprocCluster_updateIdentityConfigUserMapping(t *testing.T) { - t.Parallel() - - rnd := acctest.RandString(t, 10) - networkName := acctest.BootstrapSharedTestNetwork(t, "dataproc-cluster") - subnetworkName := acctest.BootstrapSubnet(t, "dataproc-cluster", networkName) - acctest.BootstrapFirewallForDataprocSharedNetwork(t, "dataproc-cluster", networkName) - - var cluster dataproc.Cluster - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckDataprocClusterDestroy(t), - Steps: []resource.TestStep{ - { - Config: testAccDataprocCluster_updateIdentityConfig(rnd, subnetworkName, "bob@company.com", "bob-sa@iam.gserviceaccount.com"), - Check: resource.ComposeTestCheckFunc( - testAccCheckDataprocClusterExists(t, "google_dataproc_cluster.identity_config_user_mapping", &cluster), - resource.TestCheckResourceAttr("google_dataproc_cluster.identity_config_user_mapping", "cluster_config.0.security_config.0.identity_config.0.user_service_account_mapping.bob@company.com", "bob-sa@iam.gserviceaccount.com"), - ), - }, - { - Config: testAccDataprocCluster_updateIdentityConfig(rnd, subnetworkName, "alice@company.com", "alice-sa@iam.gserviceaccount.com"), - Check: resource.ComposeTestCheckFunc( - testAccCheckDataprocClusterExists(t, "google_dataproc_cluster.identity_config_user_mapping", &cluster), - resource.TestCheckResourceAttr("google_dataproc_cluster.identity_config_user_mapping", "cluster_config.0.security_config.0.identity_config.0.user_service_account_mapping.alice@company.com", "alice-sa@iam.gserviceaccount.com"), - ), - }, - }, - }) -} - func TestAccDataprocCluster_withAutoscalingPolicy(t *testing.T) { t.Parallel() @@ -1224,76 +1166,6 @@ func TestAccDataprocCluster_withMetastoreConfig(t *testing.T) { }) } -func TestAccDataprocCluster_withClusterTier(t *testing.T) { - t.Parallel() - - var cluster dataproc.Cluster - rnd := acctest.RandString(t, 10) - networkName := acctest.BootstrapSharedTestNetwork(t, "dataproc-cluster") - subnetworkName := acctest.BootstrapSubnet(t, "dataproc-cluster", networkName) - acctest.BootstrapFirewallForDataprocSharedNetwork(t, "dataproc-cluster", networkName) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckDataprocClusterDestroy(t), - Steps: []resource.TestStep{ - { - // Set tier to CLUSTER_TIER_STANDARD - Config: testAccDataprocCluster_withClusterTier(rnd, subnetworkName, "CLUSTER_TIER_STANDARD"), - Check: resource.ComposeTestCheckFunc( - testAccCheckDataprocClusterExists(t, "google_dataproc_cluster.tier_cluster", &cluster), - resource.TestCheckResourceAttr("google_dataproc_cluster.tier_cluster", "cluster_config.0.cluster_tier", "CLUSTER_TIER_STANDARD"), - ), - }, - { - // Set tier to CLUSTER_TIER_PREMIUM - Config: testAccDataprocCluster_withClusterTier(rnd, subnetworkName, "CLUSTER_TIER_PREMIUM"), - Check: resource.ComposeTestCheckFunc( - testAccCheckDataprocClusterExists(t, "google_dataproc_cluster.tier_cluster", &cluster), - resource.TestCheckResourceAttr("google_dataproc_cluster.tier_cluster", "cluster_config.0.cluster_tier", "CLUSTER_TIER_PREMIUM"), - ), - }, - }, - }) -} - -func testAccDataprocCluster_withClusterTier(rnd, subnetworkName, tier string) string { - tierConfig := "" - if tier != "" { - tierConfig = fmt.Sprintf(`cluster_tier = "%s"`, tier) - } - clusterName := fmt.Sprintf("tf-test-dproc-tier-%s", rnd) - bucketName := clusterName + "-temp-bucket" - - return fmt.Sprintf(` -resource "google_storage_bucket" "bucket" { - name = "%s" - location = "US" - force_destroy = "true" -} - -resource "google_dataproc_cluster" "tier_cluster" { - name = "%s" - region = "us-central1" - - cluster_config { - %s - staging_bucket = google_storage_bucket.bucket.name - temp_bucket = google_storage_bucket.bucket.name - - software_config { - image_version = "2.3.4-debian12" - } - - gce_cluster_config { - subnetwork = "%s" - } - } -} -`, bucketName, clusterName, tierConfig, subnetworkName) -} - func testAccCheckDataprocClusterDestroy(t *testing.T) resource.TestCheckFunc { return func(s *terraform.State) error { config := acctest.GoogleProviderConfig(t) @@ -1757,7 +1629,7 @@ resource "google_dataproc_cluster" "basic" { } func testAccDataprocCluster_withConfidentialCompute(rnd, subnetworkName string, imageUri string) string { - return fmt.Sprintf(` + return fmt.Sprintf(` resource "google_dataproc_cluster" "confidential" { name = "tf-test-dproc-%s" region = "us-central1" @@ -2769,57 +2641,6 @@ resource "google_dataproc_cluster" "kerb" { `, rnd, rnd, rnd, subnetworkName, kmsKey) } -func testAccDataprocCluster_withIdentityConfig(rnd, subnetworkName string) string { - return fmt.Sprintf(` -resource "google_dataproc_cluster" "identity_config" { - name = "tf-test-dataproc-identity-%s" - region = "us-central1" - cluster_config { - gce_cluster_config { - subnetwork = "%s" - } - security_config { - identity_config { - user_service_account_mapping = { - "bob@company.com" = "bob-sa@iam.gserviceaccouts.com" - } - } - } - } -} -`, rnd, subnetworkName) -} - -func testAccDataprocCluster_updateIdentityConfig(rnd, subnetworkName, user, sa string) string { - return fmt.Sprintf(` -resource "google_dataproc_cluster" "identity_config_user_mapping" { - name = "tf-test-dataproc-update-identity-%s" - region = "us-central1" - - cluster_config { - gce_cluster_config { - subnetwork = "%s" - } - security_config { - identity_config { - user_service_account_mapping = { - "%s" = "%s" - } - } - } - master_config { - num_instances = 1 - machine_type = "n1-standard-2" - } - worker_config { - num_instances = 2 - machine_type = "n1-standard-2" - } - } -} -`, rnd, subnetworkName, user, sa) -} - func testAccDataprocCluster_withAutoscalingPolicy(rnd, subnetworkName string) string { return fmt.Sprintf(` resource "google_dataproc_cluster" "basic" { diff --git a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_job.go b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_job.go.tmpl similarity index 99% rename from mmv1/third_party/terraform/services/dataproc/resource_dataproc_job.go rename to mmv1/third_party/terraform/services/dataproc/resource_dataproc_job.go.tmpl index e08344fc40b4..4fc7c56c3528 100644 --- a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_job.go +++ b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_job.go.tmpl @@ -148,13 +148,13 @@ func ResourceDataprocJob() *schema.Resource { }, "labels": { - Type: schema.TypeMap, + Type: schema.TypeMap, Description: `Optional. The labels to associate with this job. **Note**: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.`, - Optional: true, - Elem: &schema.Schema{Type: schema.TypeString}, + Optional: true, + Elem: &schema.Schema{Type: schema.TypeString}, }, "terraform_labels": { @@ -219,7 +219,7 @@ func resourceDataprocJobUpdate(d *schema.ResourceData, meta interface{}) error { func resourceDataprocJobCreate(d *schema.ResourceData, meta interface{}) error { config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) if err != nil { return err } @@ -311,7 +311,7 @@ func resourceDataprocJobCreate(d *schema.ResourceData, meta interface{}) error { func resourceDataprocJobRead(d *schema.ResourceData, meta interface{}) error { config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) if err != nil { return err } @@ -406,7 +406,7 @@ func resourceDataprocJobRead(d *schema.ResourceData, meta interface{}) error { func resourceDataprocJobDelete(d *schema.ResourceData, meta interface{}) error { config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) if err != nil { return err } diff --git a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_job_test.go.tmpl b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_job_test.go.tmpl index 776dfac39de1..9b6faf85a2d6 100644 --- a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_job_test.go.tmpl +++ b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_job_test.go.tmpl @@ -26,7 +26,7 @@ type jobTestField struct { gcp_attr interface{} } -// TODO: Test `ExactlyOneOf` here +// TODO (mbang): Test `ExactlyOneOf` here // func TestAccDataprocJob_failForMissingJobConfig(t *testing.T) { // t.Parallel() @@ -48,7 +48,7 @@ func TestAccDataprocJob_updatable(t *testing.T) { var job dataproc.Job rnd := acctest.RandString(t, 10) - jobId := fmt.Sprintf("tf-test-dproc-update-job-id-%s", rnd) + jobId := fmt.Sprintf("dproc-update-job-id-%s", rnd) networkName := acctest.BootstrapSharedTestNetwork(t, "dataproc-cluster") subnetworkName := acctest.BootstrapSubnet(t, "dataproc-cluster", networkName) acctest.BootstrapFirewallForDataprocSharedNetwork(t, "dataproc-cluster", networkName) @@ -81,7 +81,7 @@ func TestAccDataprocJob_PySpark(t *testing.T) { var job dataproc.Job rnd := acctest.RandString(t, 10) - jobId := fmt.Sprintf("tf-test-dproc-custom-job-id-%s", rnd) + jobId := fmt.Sprintf("dproc-custom-job-id-%s", rnd) networkName := acctest.BootstrapSharedTestNetwork(t, "dataproc-cluster") subnetworkName := acctest.BootstrapSubnet(t, "dataproc-cluster", networkName) acctest.BootstrapFirewallForDataprocSharedNetwork(t, "dataproc-cluster", networkName) @@ -645,7 +645,7 @@ func matchError(attr, tf interface{}, gcp interface{}) string { return fmt.Sprintf("Cluster has mismatched %s.\nTF State: %+v\nGCP State: %+v", attr, tf, gcp) } -// TODO: Test `ExactlyOneOf` here +// TODO (mbang): Test `ExactlyOneOf` here // func testAccDataprocJob_missingJobConf() string { // return ` // resource "google_dataproc_job" "missing_config" { @@ -659,7 +659,7 @@ func matchError(attr, tf interface{}, gcp interface{}) string { var singleNodeClusterConfig = ` resource "google_dataproc_cluster" "basic" { - name = "tf-test-dproc-job-%s" + name = "dproc-job-test-%s" region = "us-central1" cluster_config { @@ -714,7 +714,7 @@ resource "google_dataproc_job" "pyspark" { cluster_name = google_dataproc_cluster.basic.name } reference { - job_id = "tf-test-dproc-custom-job-id-%s" + job_id = "dproc-custom-job-id-%s" } region = google_dataproc_cluster.basic.region @@ -883,7 +883,7 @@ resource "google_dataproc_job" "sparksql" { func testAccDataprocJob_presto(rnd, subnetworkName string) string { return fmt.Sprintf(` resource "google_dataproc_cluster" "basic" { - name = "tf-test-dproc-job-%s" + name = "dproc-job-test-%s" region = "us-central1" cluster_config { diff --git a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_session_template_test.go b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_session_template_test.go deleted file mode 100644 index c7f58783b3ca..000000000000 --- a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_session_template_test.go +++ /dev/null @@ -1,198 +0,0 @@ -package dataproc_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" - - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" -) - -func TestAccDataprocSessionTemplate_update(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "project_name": envvar.GetTestProjectFromEnv(), - "kms_key_name": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-bootstrap-dataproc-session-template-key1").CryptoKey.Name, - "prevent_destroy": false, - "subnetwork_name": acctest.BootstrapSubnetWithFirewallForDataprocBatches(t, "jupyer-session-test-network", "jupyter-session-test-subnetwork"), - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckDataprocSessionTemplateDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccDataprocSessionTemplate_preupdate(context), - }, - { - ResourceName: "google_dataproc_session_template.dataproc_session_templates_jupyter_update", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"labels", "location", "runtime_config.0.properties", "terraform_labels"}, - }, - { - Config: testAccDataprocSessionTemplate_updated(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_dataproc_session_template.dataproc_session_templates_jupyter_update", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_dataproc_session_template.dataproc_session_templates_jupyter_update", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"labels", "location", "runtime_config.0.properties", "terraform_labels"}, - }, - }, - }) -} - -func testAccDataprocSessionTemplate_preupdate(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_dataproc_session_template" "dataproc_session_templates_jupyter_update" { - name = "projects/%{project_name}/locations/us-central1/sessionTemplates/tf-test-jupyter-session-template%{random_suffix}" - location = "us-central1" - labels = {"session_template_test": "terraform"} - - runtime_config { - properties = { "spark.dynamicAllocation.enabled": "false", "spark.executor.instances": "2" } - } - - environment_config { - execution_config { - subnetwork_uri = "%{subnetwork_name}" - ttl = "3600s" - network_tags = ["tag1"] - } - } - - jupyter_session { - kernel = "PYTHON" - display_name = "tf python kernel" - } -} -`, context) -} - -func testAccDataprocSessionTemplate_updated(context map[string]interface{}) string { - return acctest.Nprintf(` -data "google_project" "project" { -} - -data "google_storage_project_service_account" "gcs_account" { -} - -resource "google_dataproc_session_template" "dataproc_session_templates_jupyter_update" { - name = "projects/%{project_name}/locations/us-central1/sessionTemplates/tf-test-jupyter-session-template%{random_suffix}" - location = "us-central1" - labels = {"session_template_test": "terraform"} - - runtime_config { - properties = { "spark.dynamicAllocation.enabled": "false", "spark.executor.instances": "2" } - version = "2.2" - } - - environment_config { - execution_config { - ttl = "4800s" - network_tags = ["tag2"] - kms_key = "%{kms_key_name}" - subnetwork_uri = "%{subnetwork_name}" - service_account = "${data.google_project.project.number}-compute@developer.gserviceaccount.com" - staging_bucket = google_storage_bucket.bucket.name - } - peripherals_config { - metastore_service = google_dataproc_metastore_service.ms.name - spark_history_server_config { - dataproc_cluster = google_dataproc_cluster.basic.id - } - } - } - - jupyter_session { - kernel = "SCALA" - display_name = "tf scala kernel" - } - - depends_on = [ - google_kms_crypto_key_iam_member.crypto_key_member_1, - ] -} - -resource "google_storage_bucket" "bucket" { - uniform_bucket_level_access = true - name = "tf-test-dataproc-bucket%{random_suffix}" - location = "US" - force_destroy = true -} - -resource "google_kms_crypto_key_iam_member" "crypto_key_member_1" { - crypto_key_id = "%{kms_key_name}" - role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" - member = "serviceAccount:service-${data.google_project.project.number}@dataproc-accounts.iam.gserviceaccount.com" -} - -resource "google_dataproc_cluster" "basic" { - name = "tf-test-jupyter-session-template%{random_suffix}" - region = "us-central1" - - cluster_config { - # Keep the costs down with smallest config we can get away with - software_config { - override_properties = { - "dataproc:dataproc.allow.zero.workers" = "true" - "spark:spark.history.fs.logDirectory" = "gs://${google_storage_bucket.bucket.name}/*/spark-job-history" - } - } - - gce_cluster_config { - subnetwork = "%{subnetwork_name}" - } - - endpoint_config { - enable_http_port_access = true - } - - master_config { - num_instances = 1 - machine_type = "e2-standard-2" - disk_config { - boot_disk_size_gb = 35 - } - } - - metastore_config { - dataproc_metastore_service = google_dataproc_metastore_service.ms.name - } - } -} - -resource "google_dataproc_metastore_service" "ms" { - service_id = "tf-test-jupyter-session-template%{random_suffix}" - location = "us-central1" - port = 9080 - tier = "DEVELOPER" - - maintenance_window { - hour_of_day = 2 - day_of_week = "SUNDAY" - } - - hive_metastore_config { - version = "3.1.2" - } - - network_config { - consumers { - subnetwork = "projects/%{project_name}/regions/us-central1/subnetworks/%{subnetwork_name}" - } - } -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/dataprocgdc/resource_dataproc_gdc_application_environment_test.go b/mmv1/third_party/terraform/services/dataprocgdc/resource_dataproc_gdc_application_environment_test.go index 0b0f43830992..7531bdcd9ef6 100644 --- a/mmv1/third_party/terraform/services/dataprocgdc/resource_dataproc_gdc_application_environment_test.go +++ b/mmv1/third_party/terraform/services/dataprocgdc/resource_dataproc_gdc_application_environment_test.go @@ -9,7 +9,6 @@ import ( ) func TestAccDataprocGdcApplicationEnvironment_update(t *testing.T) { - t.Skip("https://github.com/hashicorp/terraform-provider-google/issues/20419") t.Parallel() context := map[string]interface{}{ diff --git a/mmv1/third_party/terraform/services/dataprocmetastore/data_source_dataproc_metastore_service_test.go b/mmv1/third_party/terraform/services/dataprocmetastore/data_source_dataproc_metastore_service_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/dataprocmetastore/data_source_dataproc_metastore_service_test.go rename to mmv1/third_party/terraform/services/dataprocmetastore/data_source_dataproc_metastore_service_test.go.tmpl index 04109d7f2c73..ec98020e5c97 100644 --- a/mmv1/third_party/terraform/services/dataprocmetastore/data_source_dataproc_metastore_service_test.go +++ b/mmv1/third_party/terraform/services/dataprocmetastore/data_source_dataproc_metastore_service_test.go.tmpl @@ -2,8 +2,8 @@ package dataprocmetastore_test import ( "fmt" - "github.com/hashicorp/terraform-provider-google/google/acctest" "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) diff --git a/mmv1/third_party/terraform/services/dataprocmetastore/dataproc_metastore_service_diff_supress.go b/mmv1/third_party/terraform/services/dataprocmetastore/dataproc_metastore_service_diff_supress.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/dataprocmetastore/dataproc_metastore_service_diff_supress.go rename to mmv1/third_party/terraform/services/dataprocmetastore/dataproc_metastore_service_diff_supress.go.tmpl diff --git a/mmv1/third_party/terraform/services/dataprocmetastore/resource_dataproc_metastore_service_test.go b/mmv1/third_party/terraform/services/dataprocmetastore/resource_dataproc_metastore_service_test.go.tmpl similarity index 74% rename from mmv1/third_party/terraform/services/dataprocmetastore/resource_dataproc_metastore_service_test.go rename to mmv1/third_party/terraform/services/dataprocmetastore/resource_dataproc_metastore_service_test.go.tmpl index 4979c2114595..dfac75470d1a 100644 --- a/mmv1/third_party/terraform/services/dataprocmetastore/resource_dataproc_metastore_service_test.go +++ b/mmv1/third_party/terraform/services/dataprocmetastore/resource_dataproc_metastore_service_test.go.tmpl @@ -3,6 +3,7 @@ package dataprocmetastore_test import ( "fmt" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" @@ -167,3 +168,58 @@ resource "google_storage_bucket" "bucket" { } `, context) } + +func TestAccMetastoreService_tags(t *testing.T) { + t.Parallel() + tagKey := acctest.BootstrapSharedTestTagKey(t, "metastore-service-tagkey") + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "org": envvar.GetTestOrgFromEnv(t), + "tagKey": tagKey, + "tagValue": acctest.BootstrapSharedTestTagValue(t, "metastore-service-tagvalue", tagKey), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataprocMetastoreServiceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccMetastoreServiceTags(context), + }, + { + ResourceName: "google_dataproc_metastore_service.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"service_id", "location", "labels", "terraform_labels", "tags"}, + }, + }, + }) +} + +func testAccMetastoreServiceTags(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_dataproc_metastore_service" "default" { + service_id = "tf-test-my-service-%{random_suffix}" + location = "us-central1" + port = 9080 + tier = "DEVELOPER" + + maintenance_window { + hour_of_day = 2 + day_of_week = "SUNDAY" + } + + hive_metastore_config { + version = "2.3.6" + } + + labels = { + env = "test" + } + tags = { + "%{org}/%{tagKey}" = "%{tagValue}" + } +} +`, context) +} diff --git a/mmv1/third_party/terraform/services/datastream/resource_datastream_connection_profile_test.go b/mmv1/third_party/terraform/services/datastream/resource_datastream_connection_profile_test.go index ed33c1f14748..d29028d83f53 100644 --- a/mmv1/third_party/terraform/services/datastream/resource_datastream_connection_profile_test.go +++ b/mmv1/third_party/terraform/services/datastream/resource_datastream_connection_profile_test.go @@ -36,7 +36,7 @@ func TestAccDatastreamConnectionProfile_update(t *testing.T) { ResourceName: "google_datastream_connection_profile.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"create_without_validation", "connection_profile_id", "location"}, + ImportStateVerifyIgnore: []string{"connection_profile_id", "location"}, }, { Config: testAccDatastreamConnectionProfile_update2(context, true), @@ -45,7 +45,7 @@ func TestAccDatastreamConnectionProfile_update(t *testing.T) { ResourceName: "google_datastream_connection_profile.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"create_without_validation", "connection_profile_id", "location", "postgresql_profile.0.password"}, + ImportStateVerifyIgnore: []string{"connection_profile_id", "location", "postgresql_profile.0.password"}, }, { // Disable prevent_destroy @@ -58,7 +58,7 @@ func TestAccDatastreamConnectionProfile_update(t *testing.T) { ResourceName: "google_datastream_connection_profile.mysql_con_profile", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"create_without_validation", "connection_profile_id", "location", "mysql_profile.0.password"}, + ImportStateVerifyIgnore: []string{"connection_profile_id", "location", "mysql_profile.0.password"}, }, { // run once more to update the password. it should update it in-place @@ -68,7 +68,7 @@ func TestAccDatastreamConnectionProfile_update(t *testing.T) { ResourceName: "google_datastream_connection_profile.mysql_con_profile", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"create_without_validation", "connection_profile_id", "location", "mysql_profile.0.password"}, + ImportStateVerifyIgnore: []string{"connection_profile_id", "location", "mysql_profile.0.password"}, }, { // Disable prevent_destroy diff --git a/mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_account_connector_test.go b/mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_account_connector_test.go deleted file mode 100644 index a6f19404144d..000000000000 --- a/mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_account_connector_test.go +++ /dev/null @@ -1,524 +0,0 @@ -package developerconnect_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" - "github.com/hashicorp/terraform-provider-google/google/acctest" -) - -func TestAccDeveloperConnectAccountConnector_developerConnectAccountConnectorGithubUpdate(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccDeveloperConnectAccountConnector_Github(context), - }, - { - ResourceName: "google_developer_connect_account_connector.my-account-connector", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, - }, - { - Config: testAccDeveloperConnectAccountConnector_GithubUpdate(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_developer_connect_account_connector.my-account-connector", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_developer_connect_account_connector.my-account-connector", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, - }, - }, - }) -} - -func testAccDeveloperConnectAccountConnector_Github(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_developer_connect_account_connector" "my-account-connector" { - location = "us-central1" - account_connector_id = "tf-test-ac%{random_suffix}" - - provider_oauth_config { - system_provider_id = "GITHUB" - scopes = ["repo"] - } -} -`, context) -} - -func testAccDeveloperConnectAccountConnector_GithubUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_developer_connect_account_connector" "my-account-connector" { - location = "us-central1" - account_connector_id = "tf-test-ac%{random_suffix}" - annotations = { - "foo": "bar" - } - labels = { - "bar": "foo" - } - - provider_oauth_config { - system_provider_id = "GITHUB" - scopes = ["repo", "public_repo"] - } -} -`, context) -} - -func TestAccDeveloperConnectAccountConnector_developerConnectAccountConnectorGitlabUpdate(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccDeveloperConnectAccountConnector_Gitlab(context), - }, - { - ResourceName: "google_developer_connect_account_connector.my-account-connector", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, - }, - { - Config: testAccDeveloperConnectAccountConnector_GitlabUpdate(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_developer_connect_account_connector.my-account-connector", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_developer_connect_account_connector.my-account-connector", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, - }, - }, - }) -} - -func testAccDeveloperConnectAccountConnector_Gitlab(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_developer_connect_account_connector" "my-account-connector" { - location = "us-central1" - account_connector_id = "tf-test-ac%{random_suffix}" - - provider_oauth_config { - system_provider_id = "GITLAB" - scopes = ["api"] - } -} -`, context) -} - -func testAccDeveloperConnectAccountConnector_GitlabUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_developer_connect_account_connector" "my-account-connector" { - location = "us-central1" - account_connector_id = "tf-test-ac%{random_suffix}" - - annotations = { - "foo": "bar" - } - - labels = { - "bar": "foo" - } - - provider_oauth_config { - system_provider_id = "GITLAB" - scopes = ["api", "read_api"] - } -} -`, context) -} - -func TestAccDeveloperConnectAccountConnector_developerConnectAccountConnectorGoogleUpdate(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccDeveloperConnectAccountConnector_Google(context), - }, - { - ResourceName: "google_developer_connect_account_connector.my-account-connector", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, - }, - { - Config: testAccDeveloperConnectAccountConnector_GoogleUpdate(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_developer_connect_account_connector.my-account-connector", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_developer_connect_account_connector.my-account-connector", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, - }, - }, - }) -} - -func testAccDeveloperConnectAccountConnector_Google(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_developer_connect_account_connector" "my-account-connector" { - location = "us-central1" - account_connector_id = "tf-test-ac%{random_suffix}" - - provider_oauth_config { - system_provider_id = "GOOGLE" - scopes = ["https://www.googleapis.com/auth/drive.readonly"] - } -} -`, context) -} - -func testAccDeveloperConnectAccountConnector_GoogleUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_developer_connect_account_connector" "my-account-connector" { - location = "us-central1" - account_connector_id = "tf-test-ac%{random_suffix}" - - annotations = { - "foo": "bar" - } - - labels = { - "bar": "foo" - } - - provider_oauth_config { - system_provider_id = "GOOGLE" - scopes = ["https://www.googleapis.com/auth/drive.readonly", "https://www.googleapis.com/auth/documents.readonly"] - } -} -`, context) -} - -func TestAccDeveloperConnectAccountConnector_developerConnectAccountConnectorSentryUpdate(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccDeveloperConnectAccountConnector_Sentry(context), - }, - { - ResourceName: "google_developer_connect_account_connector.my-account-connector", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, - }, - { - Config: testAccDeveloperConnectAccountConnector_SentryUpdate(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_developer_connect_account_connector.my-account-connector", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_developer_connect_account_connector.my-account-connector", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, - }, - }, - }) -} - -func testAccDeveloperConnectAccountConnector_Sentry(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_developer_connect_account_connector" "my-account-connector" { - location = "us-central1" - account_connector_id = "tf-test-ac%{random_suffix}" - - provider_oauth_config { - system_provider_id = "SENTRY" - scopes = ["org:read"] - } -} -`, context) -} - -func testAccDeveloperConnectAccountConnector_SentryUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_developer_connect_account_connector" "my-account-connector" { - location = "us-central1" - account_connector_id = "tf-test-ac%{random_suffix}" - - annotations = { - "foo": "bar" - } - - labels = { - "bar": "foo" - } - - provider_oauth_config { - system_provider_id = "SENTRY" - scopes = ["org:read", "org:write"] - } -} -`, context) -} - -func TestAccDeveloperConnectAccountConnector_developerConnectAccountConnectorRovoUpdate(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccDeveloperConnectAccountConnector_Rovo(context), - }, - { - ResourceName: "google_developer_connect_account_connector.my-account-connector", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, - }, - { - Config: testAccDeveloperConnectAccountConnector_RovoUpdate(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_developer_connect_account_connector.my-account-connector", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_developer_connect_account_connector.my-account-connector", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, - }, - }, - }) -} - -func testAccDeveloperConnectAccountConnector_Rovo(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_developer_connect_account_connector" "my-account-connector" { - location = "us-central1" - account_connector_id = "tf-test-ac%{random_suffix}" - - provider_oauth_config { - system_provider_id = "ROVO" - scopes = ["rovo"] - } -} -`, context) -} - -func testAccDeveloperConnectAccountConnector_RovoUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_developer_connect_account_connector" "my-account-connector" { - location = "us-central1" - account_connector_id = "tf-test-ac%{random_suffix}" - - annotations = { - "foo": "bar" - } - - labels = { - "bar": "foo" - } - - provider_oauth_config { - system_provider_id = "ROVO" - scopes = ["rovo"] - } -} -`, context) -} - -func TestAccDeveloperConnectAccountConnector_developerConnectAccountConnectorNewRelicUpdate(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccDeveloperConnectAccountConnector_NewRelic(context), - }, - { - ResourceName: "google_developer_connect_account_connector.my-account-connector", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, - }, - { - Config: testAccDeveloperConnectAccountConnector_NewRelicUpdate(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_developer_connect_account_connector.my-account-connector", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_developer_connect_account_connector.my-account-connector", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, - }, - }, - }) -} - -func testAccDeveloperConnectAccountConnector_NewRelic(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_developer_connect_account_connector" "my-account-connector" { - location = "us-central1" - account_connector_id = "tf-test-ac%{random_suffix}" - - provider_oauth_config { - system_provider_id = "NEW_RELIC" - scopes = [] - } -} -`, context) -} - -func testAccDeveloperConnectAccountConnector_NewRelicUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_developer_connect_account_connector" "my-account-connector" { - location = "us-central1" - account_connector_id = "tf-test-ac%{random_suffix}" - - annotations = { - "foo": "bar" - } - - labels = { - "bar": "foo" - } - - provider_oauth_config { - system_provider_id = "NEW_RELIC" - scopes = [] - } -} -`, context) -} - -func TestAccDeveloperConnectAccountConnector_developerConnectAccountConnectorDatastaxUpdate(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccDeveloperConnectAccountConnector_Datastax(context), - }, - { - ResourceName: "google_developer_connect_account_connector.my-account-connector", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, - }, - { - Config: testAccDeveloperConnectAccountConnector_DatastaxUpdate(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_developer_connect_account_connector.my-account-connector", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_developer_connect_account_connector.my-account-connector", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, - }, - }, - }) -} - -func testAccDeveloperConnectAccountConnector_Datastax(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_developer_connect_account_connector" "my-account-connector" { - location = "us-central1" - account_connector_id = "tf-test-ac%{random_suffix}" - - provider_oauth_config { - system_provider_id = "DATASTAX" - scopes = [] - } -} -`, context) -} - -func testAccDeveloperConnectAccountConnector_DatastaxUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_developer_connect_account_connector" "my-account-connector" { - location = "us-central1" - account_connector_id = "tf-test-ac%{random_suffix}" - - annotations = { - "foo": "bar" - } - - labels = { - "bar": "foo" - } - - provider_oauth_config { - system_provider_id = "DATASTAX" - scopes = [] - } -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_account_connector_test.go.tmpl b/mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_account_connector_test.go.tmpl new file mode 100644 index 000000000000..4fd87a5ed964 --- /dev/null +++ b/mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_account_connector_test.go.tmpl @@ -0,0 +1,545 @@ +package developerconnect_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + +) + +func TestAccDeveloperConnectAccountConnector_developerConnectAccountConnectorGithubUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDeveloperConnectAccountConnector_Github(context), + }, + { + ResourceName: "google_developer_connect_account_connector.my-account-connector", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, + }, + { + Config: testAccDeveloperConnectAccountConnector_GithubUpdate(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_developer_connect_account_connector.my-account-connector", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_developer_connect_account_connector.my-account-connector", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, + }, + }, + }) +} + + +func testAccDeveloperConnectAccountConnector_Github(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_developer_connect_account_connector" "my-account-connector" { + location = "us-central1" + account_connector_id = "tf-test-ac%{random_suffix}" + + provider_oauth_config { + system_provider_id = "GITHUB" + scopes = ["repo"] + } +} +`, context) +} + + +func testAccDeveloperConnectAccountConnector_GithubUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_developer_connect_account_connector" "my-account-connector" { + location = "us-central1" + account_connector_id = "tf-test-ac%{random_suffix}" + annotations = { + "foo": "bar" + } + labels = { + "bar": "foo" + } + + provider_oauth_config { + system_provider_id = "GITHUB" + scopes = ["repo", "public_repo"] + } +} +`, context) +} + + +func TestAccDeveloperConnectAccountConnector_developerConnectAccountConnectorGitlabUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDeveloperConnectAccountConnector_Gitlab(context), + }, + { + ResourceName: "google_developer_connect_account_connector.my-account-connector", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, + }, + { + Config: testAccDeveloperConnectAccountConnector_GitlabUpdate(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_developer_connect_account_connector.my-account-connector", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_developer_connect_account_connector.my-account-connector", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, + }, + }, + }) +} + + +func testAccDeveloperConnectAccountConnector_Gitlab(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_developer_connect_account_connector" "my-account-connector" { + location = "us-central1" + account_connector_id = "tf-test-ac%{random_suffix}" + + provider_oauth_config { + system_provider_id = "GITLAB" + scopes = ["api"] + } +} +`, context) +} + + +func testAccDeveloperConnectAccountConnector_GitlabUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_developer_connect_account_connector" "my-account-connector" { + location = "us-central1" + account_connector_id = "tf-test-ac%{random_suffix}" + + annotations = { + "foo": "bar" + } + + labels = { + "bar": "foo" + } + + provider_oauth_config { + system_provider_id = "GITLAB" + scopes = ["api", "read_api"] + } +} +`, context) +} + + +func TestAccDeveloperConnectAccountConnector_developerConnectAccountConnectorGoogleUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDeveloperConnectAccountConnector_Google(context), + }, + { + ResourceName: "google_developer_connect_account_connector.my-account-connector", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, + }, + { + Config: testAccDeveloperConnectAccountConnector_GoogleUpdate(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_developer_connect_account_connector.my-account-connector", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_developer_connect_account_connector.my-account-connector", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, + }, + }, + }) +} + + +func testAccDeveloperConnectAccountConnector_Google(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_developer_connect_account_connector" "my-account-connector" { + location = "us-central1" + account_connector_id = "tf-test-ac%{random_suffix}" + + provider_oauth_config { + system_provider_id = "GOOGLE" + scopes = ["https://www.googleapis.com/auth/drive.readonly"] + } +} +`, context) +} + + +func testAccDeveloperConnectAccountConnector_GoogleUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_developer_connect_account_connector" "my-account-connector" { + location = "us-central1" + account_connector_id = "tf-test-ac%{random_suffix}" + + annotations = { + "foo": "bar" + } + + labels = { + "bar": "foo" + } + + provider_oauth_config { + system_provider_id = "GOOGLE" + scopes = ["https://www.googleapis.com/auth/drive.readonly", "https://www.googleapis.com/auth/documents.readonly"] + } +} +`, context) +} + + +func TestAccDeveloperConnectAccountConnector_developerConnectAccountConnectorSentryUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDeveloperConnectAccountConnector_Sentry(context), + }, + { + ResourceName: "google_developer_connect_account_connector.my-account-connector", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, + }, + { + Config: testAccDeveloperConnectAccountConnector_SentryUpdate(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_developer_connect_account_connector.my-account-connector", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_developer_connect_account_connector.my-account-connector", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, + }, + }, + }) +} + + +func testAccDeveloperConnectAccountConnector_Sentry(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_developer_connect_account_connector" "my-account-connector" { + location = "us-central1" + account_connector_id = "tf-test-ac%{random_suffix}" + + provider_oauth_config { + system_provider_id = "SENTRY" + scopes = ["org:read"] + } +} +`, context) +} + + +func testAccDeveloperConnectAccountConnector_SentryUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_developer_connect_account_connector" "my-account-connector" { + location = "us-central1" + account_connector_id = "tf-test-ac%{random_suffix}" + + annotations = { + "foo": "bar" + } + + labels = { + "bar": "foo" + } + + provider_oauth_config { + system_provider_id = "SENTRY" + scopes = ["org:read", "org:write"] + } +} +`, context) +} + + +func TestAccDeveloperConnectAccountConnector_developerConnectAccountConnectorRovoUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDeveloperConnectAccountConnector_Rovo(context), + }, + { + ResourceName: "google_developer_connect_account_connector.my-account-connector", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, + }, + { + Config: testAccDeveloperConnectAccountConnector_RovoUpdate(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_developer_connect_account_connector.my-account-connector", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_developer_connect_account_connector.my-account-connector", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, + }, + }, + }) +} + + +func testAccDeveloperConnectAccountConnector_Rovo(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_developer_connect_account_connector" "my-account-connector" { + location = "us-central1" + account_connector_id = "tf-test-ac%{random_suffix}" + + provider_oauth_config { + system_provider_id = "ROVO" + scopes = ["rovo"] + } +} +`, context) +} + + +func testAccDeveloperConnectAccountConnector_RovoUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_developer_connect_account_connector" "my-account-connector" { + location = "us-central1" + account_connector_id = "tf-test-ac%{random_suffix}" + + annotations = { + "foo": "bar" + } + + labels = { + "bar": "foo" + } + + provider_oauth_config { + system_provider_id = "ROVO" + scopes = ["rovo"] + } +} +`, context) +} + + +func TestAccDeveloperConnectAccountConnector_developerConnectAccountConnectorNewRelicUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDeveloperConnectAccountConnector_NewRelic(context), + }, + { + ResourceName: "google_developer_connect_account_connector.my-account-connector", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, + }, + { + Config: testAccDeveloperConnectAccountConnector_NewRelicUpdate(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_developer_connect_account_connector.my-account-connector", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_developer_connect_account_connector.my-account-connector", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, + }, + }, + }) +} + + +func testAccDeveloperConnectAccountConnector_NewRelic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_developer_connect_account_connector" "my-account-connector" { + location = "us-central1" + account_connector_id = "tf-test-ac%{random_suffix}" + + provider_oauth_config { + system_provider_id = "NEW_RELIC" + scopes = [] + } +} +`, context) +} + + +func testAccDeveloperConnectAccountConnector_NewRelicUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_developer_connect_account_connector" "my-account-connector" { + location = "us-central1" + account_connector_id = "tf-test-ac%{random_suffix}" + + annotations = { + "foo": "bar" + } + + labels = { + "bar": "foo" + } + + provider_oauth_config { + system_provider_id = "NEW_RELIC" + scopes = [] + } +} +`, context) +} + + +func TestAccDeveloperConnectAccountConnector_developerConnectAccountConnectorDatastaxUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDeveloperConnectAccountConnector_Datastax(context), + }, + { + ResourceName: "google_developer_connect_account_connector.my-account-connector", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, + }, + { + Config: testAccDeveloperConnectAccountConnector_DatastaxUpdate(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_developer_connect_account_connector.my-account-connector", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_developer_connect_account_connector.my-account-connector", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"account_connector_id", "annotations", "labels"}, + }, + }, + }) +} + + +func testAccDeveloperConnectAccountConnector_Datastax(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_developer_connect_account_connector" "my-account-connector" { + location = "us-central1" + account_connector_id = "tf-test-ac%{random_suffix}" + + provider_oauth_config { + system_provider_id = "DATASTAX" + scopes = [] + } +} +`, context) +} + + +func testAccDeveloperConnectAccountConnector_DatastaxUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_developer_connect_account_connector" "my-account-connector" { + location = "us-central1" + account_connector_id = "tf-test-ac%{random_suffix}" + + annotations = { + "foo": "bar" + } + + labels = { + "bar": "foo" + } + + provider_oauth_config { + system_provider_id = "DATASTAX" + scopes = [] + } +} +`, context) +} diff --git a/mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_connection_test.go b/mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_connection_test.go.tmpl similarity index 83% rename from mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_connection_test.go rename to mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_connection_test.go.tmpl index 0bc3bf5ae643..29269b5c44a2 100644 --- a/mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_connection_test.go +++ b/mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_connection_test.go.tmpl @@ -1,11 +1,13 @@ + package developerconnect_test import ( "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + ) func TestAccDeveloperConnectConnection_developerConnectConnectionGithubUpdate(t *testing.T) { @@ -41,8 +43,9 @@ func TestAccDeveloperConnectConnection_developerConnectConnectionGithubUpdate(t }) } + func testAccDeveloperConnectConnection_Github(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_developer_connect_connection" "my-connection" { location = "us-central1" connection_id = "tf-test-tf-test-connection%{random_suffix}" @@ -58,6 +61,7 @@ resource "google_developer_connect_connection" "my-connection" { `, context) } + func testAccDeveloperConnectConnection_GithubUpdate(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_developer_connect_connection" "my-connection" { @@ -82,6 +86,7 @@ resource "google_developer_connect_connection" "my-connection" { `, context) } + func TestAccDeveloperConnectConnection_developerConnectConnectionGithubEnterpriseUpdate(t *testing.T) { t.Parallel() @@ -115,8 +120,9 @@ func TestAccDeveloperConnectConnection_developerConnectConnectionGithubEnterpris }) } + func testAccDeveloperConnectConnection_GithubEnterprise(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_developer_connect_connection" "my-connection" { location = "us-central1" connection_id = "tf-test-tf-test-connection%{random_suffix}" @@ -131,6 +137,7 @@ resource "google_developer_connect_connection" "my-connection" { `, context) } + func testAccDeveloperConnectConnection_GithubEnterpriseUpdate(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_developer_connect_connection" "my-connection" { @@ -154,6 +161,7 @@ resource "google_developer_connect_connection" "my-connection" { `, context) } + func TestAccDeveloperConnectConnection_GhePrivConnection(t *testing.T) { t.Parallel() @@ -178,8 +186,9 @@ func TestAccDeveloperConnectConnection_GhePrivConnection(t *testing.T) { }) } + func testAccDeveloperConnectConnection_GhePrivConnection(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_developer_connect_connection" "my-connection" { location = "us-central1" connection_id = "tf-test-tf-test-connection%{random_suffix}" @@ -204,6 +213,7 @@ resource "google_developer_connect_connection" "my-connection" { `, context) } + func TestAccDeveloperConnectConnection_developerConnectConnectionGitlabUpdate(t *testing.T) { t.Parallel() @@ -237,8 +247,9 @@ func TestAccDeveloperConnectConnection_developerConnectConnectionGitlabUpdate(t }) } + func testAccDeveloperConnectConnection_Gitlab(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_developer_connect_connection" "my-connection" { location = "us-central1" connection_id = "tf-test-tf-test-connection%{random_suffix}" @@ -258,6 +269,7 @@ resource "google_developer_connect_connection" "my-connection" { `, context) } + func testAccDeveloperConnectConnection_GitlabUpdate(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_developer_connect_connection" "my-connection" { @@ -309,8 +321,9 @@ func TestAccDeveloperConnectConnection_GlePrivConnection(t *testing.T) { }) } + func testAccDeveloperConnectConnection_GlePrivConnection(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_developer_connect_connection" "my-connection" { location = "us-central1" connection_id = "tf-test-tf-test-connection%{random_suffix}" @@ -342,6 +355,7 @@ resource "google_developer_connect_connection" "my-connection" { `, context) } + func TestAccDeveloperConnectConnection_developerConnectConnectionGitlabEnterpriseUpdate(t *testing.T) { t.Parallel() @@ -375,8 +389,9 @@ func TestAccDeveloperConnectConnection_developerConnectConnectionGitlabEnterpris }) } + func testAccDeveloperConnectConnection_GitlabEnterprise(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_developer_connect_connection" "my-connection" { location = "us-central1" connection_id = "tf-test-tf-test-connection%{random_suffix}" @@ -398,6 +413,7 @@ resource "google_developer_connect_connection" "my-connection" { `, context) } + func testAccDeveloperConnectConnection_GitlabEnterpriseUpdate(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_developer_connect_connection" "my-connection" { @@ -427,46 +443,48 @@ resource "google_developer_connect_connection" "my-connection" { `, context) } -func TestAccDeveloperConnectConnection_developerConnectConnectionBitbucketCloudUpdate(t *testing.T) { - t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccDeveloperConnectConnection_BitbucketCloud(context), - }, - { - ResourceName: "google_developer_connect_connection.my-connection", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"connection_id", "location", "terraform_labels"}, - }, - { - Config: testAccDeveloperConnectConnection_BitbucketCloudUpdate(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_developer_connect_connection.my-connection", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_developer_connect_connection.my-connection", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"connection_id", "location", "terraform_labels"}, - }, - }, - }) +func TestAccDeveloperConnectConnection_developerConnectConnectionBitbucketCloudUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDeveloperConnectConnection_BitbucketCloud(context), + }, + { + ResourceName: "google_developer_connect_connection.my-connection", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"connection_id", "location", "terraform_labels"}, + }, + { + Config: testAccDeveloperConnectConnection_BitbucketCloudUpdate(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_developer_connect_connection.my-connection", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_developer_connect_connection.my-connection", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"connection_id", "location", "terraform_labels"}, + }, + }, + }) } + func testAccDeveloperConnectConnection_BitbucketCloud(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_developer_connect_connection" "my-connection" { location = "us-central1" connection_id = "tf-test-tf-test-connection%{random_suffix}" @@ -488,8 +506,9 @@ resource "google_developer_connect_connection" "my-connection" { `, context) } + func testAccDeveloperConnectConnection_BitbucketCloudUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_developer_connect_connection" "my-connection" { location = "us-central1" connection_id = "tf-test-tf-test-connection%{random_suffix}" @@ -517,46 +536,48 @@ resource "google_developer_connect_connection" "my-connection" { `, context) } -func TestAccDeveloperConnectConnection_developerConnectConnectionBitbucketDataCenterUpdate(t *testing.T) { - t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccDeveloperConnectConnection_BitbucketDataCenter(context), - }, - { - ResourceName: "google_developer_connect_connection.my-connection", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"connection_id", "location", "terraform_labels"}, - }, - { - Config: testAccDeveloperConnectConnection_BitbucketDataCenterUpdate(context), +func TestAccDeveloperConnectConnection_developerConnectConnectionBitbucketDataCenterUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDeveloperConnectConnection_BitbucketDataCenter(context), + }, + { + ResourceName: "google_developer_connect_connection.my-connection", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"connection_id", "location", "terraform_labels"}, + }, + { + Config: testAccDeveloperConnectConnection_BitbucketDataCenterUpdate(context), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction("google_developer_connect_connection.my-connection", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_developer_connect_connection.my-connection", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"connection_id", "location", "terraform_labels"}, - }, - }, - }) + }, + }, + }, + { + ResourceName: "google_developer_connect_connection.my-connection", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"connection_id", "location", "terraform_labels"}, + }, + }, + }) } + func testAccDeveloperConnectConnection_BitbucketDataCenter(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_developer_connect_connection" "my-connection" { location = "us-central1" connection_id = "tf-test-tf-test-connection%{random_suffix}" @@ -578,8 +599,9 @@ resource "google_developer_connect_connection" "my-connection" { `, context) } + func testAccDeveloperConnectConnection_BitbucketDataCenterUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_developer_connect_connection" "my-connection" { location = "us-central1" connection_id = "tf-test-tf-test-connection%{random_suffix}" @@ -608,31 +630,32 @@ resource "google_developer_connect_connection" "my-connection" { } func TestAccDeveloperConnectConnection_BbdcPrivConnection(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccDeveloperConnectConnection_BbdcPrivConnection(context), - }, - { - ResourceName: "google_developer_connect_connection.my-connection", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"connection_id", "location", "terraform_labels"}, - }, - }, - }) + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDeveloperConnectConnection_BbdcPrivConnection(context), + }, + { + ResourceName: "google_developer_connect_connection.my-connection", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"connection_id", "location", "terraform_labels"}, + }, + }, + }) } + func testAccDeveloperConnectConnection_BbdcPrivConnection(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_developer_connect_connection" "my-connection" { location = "us-central1" connection_id = "tf-test-tf-test-connection%{random_suffix}" diff --git a/mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_insights_config_test.go b/mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_insights_config_test.go deleted file mode 100644 index 44237e1f2c17..000000000000 --- a/mmv1/third_party/terraform/services/developerconnect/resource_developer_connect_insights_config_test.go +++ /dev/null @@ -1,341 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -// ---------------------------------------------------------------------------- -// -// *** AUTO GENERATED CODE *** Type: MMv1 *** -// -// ---------------------------------------------------------------------------- -// -// This file is automatically generated by Magic Modules and manual -// changes will be clobbered when the file is regenerated. -// -// Please read more about how to change this file in -// .github/CONTRIBUTING.md. -// -// ---------------------------------------------------------------------------- - -package developerconnect_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" -) - -func TestAccDeveloperConnectInsightsConfig_update(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - "org_id": envvar.GetTestOrgFromEnv(t), - "billing_account": envvar.GetTestBillingAccountFromEnv(t), - } - - acctest.SkipIfVcr(t) // See: https://github.com/GoogleCloudPlatform/magic-modules/pull/14412 - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - ExternalProviders: map[string]resource.ExternalProvider{ - "time": {}, - }, - Steps: []resource.TestStep{ - { - Config: testAccDeveloperConnectInsightsConfig_basic(context), - }, - { - ResourceName: "google_developer_connect_insights_config.insights_config", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"insights_config_id", "labels", "location", "terraform_labels", "workload"}, - }, - { - Config: testAccDeveloperConnectInsightsConfig_update(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_developer_connect_insights_config.insights_config", plancheck.ResourceActionDestroyBeforeCreate), - }, - }, - }, - { - ResourceName: "google_developer_connect_insights_config.insights_config", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"insights_config_id", "location", "labels", "terraform_labels", "workload"}, - }, - }, - }) -} - -func testAccDeveloperConnectInsightsConfig_basic(context map[string]interface{}) string { - return acctest.Nprintf(` - resource "google_project" "project" { - project_id = "dci-tf-%{random_suffix}" - name = "Service Project" - org_id = "%{org_id}" - billing_account = "%{billing_account}" - deletion_policy = "DELETE" - } - - # Grant Permissions - resource "google_project_iam_member" "apphub_permissions" { - project = google_project.project.project_id - role = "roles/apphub.admin" - member = "serviceAccount:hashicorp-test-runner@ci-test-project-188019.iam.gserviceaccount.com" - } - - resource "google_project_iam_member" "insights_agent" { - project = google_project.project.project_id - role = "roles/developerconnect.insightsAgent" - member = "serviceAccount:66214305248-compute@developer.gserviceaccount.com" - } - - # Enable APIs - resource "google_project_service" "apphub_api_service" { - project = google_project.project.project_id - service = "apphub.googleapis.com" - disable_dependent_services=true - depends_on = [google_project.project] - } - - resource "google_project_service" "containeranalysis_api" { - project = google_project.project.project_id - service = "containeranalysis.googleapis.com" - disable_dependent_services=true - depends_on = [google_project.project] - } - - resource "google_project_service" "containerscanning_api" { - project = google_project.project.project_id - service = "containerscanning.googleapis.com" - disable_dependent_services=true - depends_on = [google_project.project] - } - - resource "google_project_service" "container_api" { - project = google_project.project.project_id - service = "container.googleapis.com" - disable_dependent_services=true - depends_on = [google_project.project] - } - - resource "google_project_service" "artifactregistry_api" { - project = google_project.project.project_id - service = "artifactregistry.googleapis.com" - disable_dependent_services=true - depends_on = [google_project.project] - } - - resource "google_project_service" "cloudbuild_api" { - project = google_project.project.project_id - service = "cloudbuild.googleapis.com" - disable_dependent_services=true - depends_on = [google_project.project] - } - - resource "google_project_service" "cloudasset_api" { - project = google_project.project.project_id - service = "cloudasset.googleapis.com" - disable_dependent_services=true - depends_on = [google_project.project] - } - - resource "google_project_service" "compute_api" { - project = google_project.project.project_id - service = "compute.googleapis.com" - disable_dependent_services=true - depends_on = [google_project.project] - } - - resource "google_project_service" "devconnect_api" { - project = google_project.project.project_id - service = "developerconnect.googleapis.com" - depends_on = [google_project.project] - } - - # Wait delay after enabling APIs and granting permissions - resource "time_sleep" "wait_for_propagation" { - depends_on = [ - google_project_iam_member.apphub_permissions, - google_project_iam_member.insights_agent, - google_project_service.apphub_api_service, - google_project_service.containeranalysis_api, - google_project_service.containerscanning_api, - google_project_service.container_api, - google_project_service.artifactregistry_api, - google_project_service.artifactregistry_api, - google_project_service.cloudbuild_api, - google_project_service.cloudasset_api, - google_project_service.compute_api, - google_project_service.devconnect_api, - ] - create_duration = "120s" - } - - resource "google_apphub_application" "my_apphub_application" { - location = "us-central1" - application_id = "tf-test-example-application%{random_suffix}" - scope { - type = "REGIONAL" - } - project = google_project.project.project_id - depends_on = [time_sleep.wait_for_propagation] - } - - resource "google_developer_connect_insights_config" "insights_config" { - location = "us-central1" - insights_config_id = "tf-test-ic%{random_suffix}" - project = google_project.project.project_id - annotations = {} - labels = {} - app_hub_application = format("//apphub.googleapis.com/projects/%s/locations/%s/applications/%s", - google_project.project.number, - google_apphub_application.my_apphub_application.location, - google_apphub_application.my_apphub_application.application_id) - - depends_on = [time_sleep.wait_for_propagation] - } - `, context) -} - -func testAccDeveloperConnectInsightsConfig_update(context map[string]interface{}) string { - return acctest.Nprintf(` - resource "google_project" "project" { - project_id = "dci-tf-%{random_suffix}" - name = "Service Project" - org_id = "%{org_id}" - billing_account = "%{billing_account}" - deletion_policy = "DELETE" - } - - # Grant Permissions - resource "google_project_iam_member" "apphub_permissions" { - project = google_project.project.project_id - role = "roles/apphub.admin" - member = "serviceAccount:hashicorp-test-runner@ci-test-project-188019.iam.gserviceaccount.com" - } - - resource "google_project_iam_member" "insights_agent" { - project = google_project.project.project_id - role = "roles/developerconnect.insightsAgent" - member = "serviceAccount:66214305248-compute@developer.gserviceaccount.com" - } - - # Enable APIs - resource "google_project_service" "apphub_api_service" { - project = google_project.project.project_id - service = "apphub.googleapis.com" - disable_dependent_services=true - depends_on = [google_project.project] - } - - resource "google_project_service" "containeranalysis_api" { - project = google_project.project.project_id - service = "containeranalysis.googleapis.com" - disable_dependent_services=true - depends_on = [google_project.project] - } - - resource "google_project_service" "containerscanning_api" { - project = google_project.project.project_id - service = "containerscanning.googleapis.com" - disable_dependent_services=true - depends_on = [google_project.project] - } - - resource "google_project_service" "container_api" { - project = google_project.project.project_id - service = "container.googleapis.com" - disable_dependent_services=true - depends_on = [google_project.project] - } - - resource "google_project_service" "artifactregistry_api" { - project = google_project.project.project_id - service = "artifactregistry.googleapis.com" - disable_dependent_services=true - depends_on = [google_project.project] - } - - resource "google_project_service" "cloudbuild_api" { - project = google_project.project.project_id - service = "cloudbuild.googleapis.com" - disable_dependent_services=true - depends_on = [google_project.project] - } - - resource "google_project_service" "cloudasset_api" { - project = google_project.project.project_id - service = "cloudasset.googleapis.com" - disable_dependent_services=true - depends_on = [google_project.project] - } - - resource "google_project_service" "compute_api" { - project = google_project.project.project_id - service = "compute.googleapis.com" - disable_dependent_services=true - depends_on = [google_project.project] - } - - resource "google_project_service" "devconnect_api" { - project = google_project.project.project_id - service = "developerconnect.googleapis.com" - depends_on = [google_project.project] - } - - # Wait delay after enabling APIs and granting permissions - resource "time_sleep" "wait_for_propagation" { - depends_on = [ - google_project_iam_member.apphub_permissions, - google_project_iam_member.insights_agent, - google_project_service.apphub_api_service, - google_project_service.containeranalysis_api, - google_project_service.containerscanning_api, - google_project_service.container_api, - google_project_service.artifactregistry_api, - google_project_service.artifactregistry_api, - google_project_service.cloudbuild_api, - google_project_service.cloudasset_api, - google_project_service.compute_api, - google_project_service.devconnect_api, - ] - create_duration = "120s" - } - - resource "google_apphub_application" "my_apphub_application" { - location = "us-central1" - application_id = "tf-test-example-application%{random_suffix}" - scope { - type = "REGIONAL" - } - project = google_project.project.project_id - depends_on = [time_sleep.wait_for_propagation] - } - resource "google_developer_connect_insights_config" "insights_config" { - location = "us-central1" - insights_config_id = "tf-test-ic%{random_suffix}" - project = google_project.project.project_id - annotations = {} - labels = {} - app_hub_application = format("//apphub.googleapis.com/projects/%s/locations/%s/applications/%s", - google_project.project.number, - google_apphub_application.my_apphub_application.location, - google_apphub_application.my_apphub_application.application_id) - artifact_configs { - google_artifact_analysis { - project_id = google_project.project.project_id - } - google_artifact_registry { - artifact_registry_package = "my-package" - project_id = google_project.project.project_id - } - uri = "us-docker.pkg.dev/my-project/my-repo/my-image" - } - depends_on = [time_sleep.wait_for_propagation] - } - `, context) -} diff --git a/mmv1/third_party/terraform/services/dialogflow/dialogflow_operation.go b/mmv1/third_party/terraform/services/dialogflow/dialogflow_operation.go deleted file mode 100644 index 51c9efba9c6e..000000000000 --- a/mmv1/third_party/terraform/services/dialogflow/dialogflow_operation.go +++ /dev/null @@ -1,87 +0,0 @@ -package dialogflow - -import ( - "encoding/json" - "errors" - "fmt" - "regexp" - "time" - - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -type DialogflowOperationWaiter struct { - Config *transport_tpg.Config - UserAgent string - Project string - tpgresource.CommonOperationWaiter -} - -func (w *DialogflowOperationWaiter) QueryOp() (interface{}, error) { - if w == nil { - return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") - } - // Returns the proper get. - location := "" - if parts := regexp.MustCompile(`locations\/([^\/]*)\/`).FindStringSubmatch(w.CommonOperationWaiter.Op.Name); parts != nil { - location = parts[1] - } else { - return nil, fmt.Errorf( - "Saw %s when the op name is expected to contains location %s", - w.CommonOperationWaiter.Op.Name, - "projects/{{project}}/locations/{{location}}/...", - ) - } - - url := fmt.Sprintf("https://%s-dialogflow.googleapis.com/v2/%s", location, w.CommonOperationWaiter.Op.Name) - - return transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: w.Config, - Method: "GET", - Project: w.Project, - RawURL: url, - UserAgent: w.UserAgent, - }) -} - -func createDialogflowWaiter(config *transport_tpg.Config, op map[string]interface{}, project, activity, userAgent string) (*DialogflowOperationWaiter, error) { - w := &DialogflowOperationWaiter{ - Config: config, - UserAgent: userAgent, - Project: project, - } - if err := w.CommonOperationWaiter.SetOp(op); err != nil { - return nil, err - } - return w, nil -} - -// nolint: deadcode,unused -func DialogflowOperationWaitTimeWithResponse(config *transport_tpg.Config, op map[string]interface{}, response *map[string]interface{}, project, activity, userAgent string, timeout time.Duration) error { - w, err := createDialogflowWaiter(config, op, project, activity, userAgent) - if err != nil { - return err - } - if err := tpgresource.OperationWait(w, activity, timeout, config.PollInterval); err != nil { - return err - } - rawResponse := []byte(w.CommonOperationWaiter.Op.Response) - if len(rawResponse) == 0 { - return errors.New("`resource` not set in operation response") - } - return json.Unmarshal(rawResponse, response) -} - -func DialogflowOperationWaitTime(config *transport_tpg.Config, op map[string]interface{}, project, activity, userAgent string, timeout time.Duration) error { - if val, ok := op["name"]; !ok || val == "" { - // This was a synchronous call - there is no operation to wait for. - return nil - } - w, err := createDialogflowWaiter(config, op, project, activity, userAgent) - if err != nil { - // If w is nil, the op was synchronous. - return err - } - return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) -} diff --git a/mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_agent_test.go b/mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_agent_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_agent_test.go rename to mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_agent_test.go.tmpl index 640d82c9e1e5..8e9c3aed7b54 100644 --- a/mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_agent_test.go +++ b/mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_agent_test.go.tmpl @@ -3,9 +3,9 @@ package dialogflow_test import ( "testing" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDialogflowAgent_update(t *testing.T) { diff --git a/mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_conversation_profile_test.go b/mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_conversation_profile_test.go deleted file mode 100644 index f337597e4c24..000000000000 --- a/mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_conversation_profile_test.go +++ /dev/null @@ -1,411 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -// ---------------------------------------------------------------------------- -// -// *** AUTO GENERATED CODE *** Type: MMv1 *** -// -// ---------------------------------------------------------------------------- -// -// This file is automatically generated by Magic Modules and manual -// changes will be clobbered when the file is regenerated. -// -// Please read more about how to change this file in -// .github/CONTRIBUTING.md. -// -// ---------------------------------------------------------------------------- - -package dialogflow_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" -) - -func TestAccDialogflowConversationProfile_update(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "org_id": envvar.GetTestOrgFromEnv(t), - "billing_account": envvar.GetTestBillingAccountFromEnv(t), - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - ExternalProviders: map[string]resource.ExternalProvider{ - "time": {}, - }, - Steps: []resource.TestStep{ - { - Config: testAccDialogflowConversationProfile_dialogflowAgentFull1(context), - }, - { - ResourceName: "google_dialogflow_conversation_profile.profile", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"location", "logging_config", "logging_config.0", "logging_config.0.enable_stackdriver_logging"}, - }, - { - Config: testAccDialogflowConversationProfile_dialogflowAgentFull2(context), - }, - { - ResourceName: "google_dialogflow_conversation_profile.profile", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"location", "logging_config", "logging_config.0", "logging_config.0.enable_stackdriver_logging"}, - }, - }, - }) -} - -func testAccDialogflowConversationProfile_dialogflowAgentFull1(context map[string]interface{}) string { - return acctest.Nprintf(` - resource "google_project" "agent_project" { - name = "tf-test-dialogflow-%{random_suffix}" - project_id = "tf-test-dialogflow-%{random_suffix}" - org_id = "%{org_id}" - billing_account = "%{billing_account}" - deletion_policy = "DELETE" - } - - resource "google_project_service" "agent_project" { - service = "dialogflow.googleapis.com" - disable_dependent_services = false - project = "${google_project.agent_project.id}" - } - - resource "google_service_account" "dialogflow_service_account" { - account_id = "tf-test-dialogflow-%{random_suffix}" - } - - resource "google_project_iam_member" "agent_create" { - role = "roles/dialogflow.admin" - member = "serviceAccount:${google_service_account.dialogflow_service_account.email}" - project = "${google_project.agent_project.id}" - } - - resource "google_dialogflow_agent" "agent" { - display_name = "tf-test-agent-%{random_suffix}" - default_language_code = "en-us" - time_zone = "America/New_York" - project = google_project.agent_project.name - } - - resource "google_pubsub_topic" "topic" { - name = "tf-test-topic-%{random_suffix}" - project = google_project.agent_project.project_id - depends_on = [google_project.agent_project, time_sleep.wait_120_seconds] - message_retention_duration = "8000s" - } - resource "google_dialogflow_cx_security_settings" "security_setting" { - display_name = "tf-test-setting-%{random_suffix}" - location = "global" - purge_data_types = [] - retention_window_days = 7 - project = google_project.agent_project.project_id - depends_on = [time_sleep.wait_120_seconds] - } - resource "time_sleep" "wait_120_seconds" { - create_duration = "120s" - depends_on = [google_dialogflow_agent.agent] - } - resource "google_dialogflow_conversation_profile" "profile" { - depends_on = [google_dialogflow_agent.agent, google_dialogflow_cx_security_settings.security_setting,time_sleep.wait_120_seconds] - project = google_project.agent_project.name - display_name = "tf-test-conversation-profile-%{random_suffix}" - location = "global" - language_code = "en-US" - automated_agent_config { - agent = "projects/tf-test-dialogflow-%{random_suffix}/locations/global/agent/environments/draft" - session_ttl = "30s" - } - human_agent_assistant_config { - end_user_suggestion_config { - disable_high_latency_features_sync_delivery = true - feature_configs { - conversation_process_config { - recent_sentences_count = 1 - } - disable_agent_query_logging = false - enable_conversation_augmented_query = false - enable_event_based_suggestion = false - enable_query_suggestion_when_no_answer = false - enable_query_suggestion_only = false - query_config { - confidence_threshold = "1.0" - context_filter_settings { - drop_handoff_messages = true - drop_ivr_messages = true - drop_virtual_agent_messages = true - } - dialogflow_query_source { - agent = "projects/tf-test-dialogflow-%{random_suffix}/locations/global/agent/environments/draft" - human_agent_side_config { - agent = "projects/tf-test-dialogflow-%{random_suffix}/locations/global/agent/environments/draft" - } - } - max_results = 1 - sections { - section_types = ["SECTION_TYPE_UNSPECIFIED"] - } - } - suggestion_feature { - type = "CONVERSATION_SUMMARIZATION" - } - suggestion_trigger_settings { - no_small_talk = false - only_end_user = true - } - } - group_suggestion_responses = true - } - human_agent_suggestion_config { - disable_high_latency_features_sync_delivery = true - feature_configs { - conversation_process_config { - recent_sentences_count = 1 - } - disable_agent_query_logging = false - enable_conversation_augmented_query = false - enable_event_based_suggestion = false - enable_query_suggestion_when_no_answer = false - enable_query_suggestion_only = false - query_config { - confidence_threshold = 0.1 - context_filter_settings { - drop_handoff_messages = true - drop_ivr_messages = true - drop_virtual_agent_messages = true - } - dialogflow_query_source { - agent = "projects/tf-test-dialogflow-%{random_suffix}/locations/global/agent/environments/draft" - human_agent_side_config { - agent = "projects/tf-test-dialogflow-%{random_suffix}/locations/global/agent/environments/draft" - } - } - max_results = 1 - sections { - section_types = ["SECTION_TYPE_UNSPECIFIED"] - } - } - suggestion_feature { - type = "CONVERSATION_SUMMARIZATION" - } - suggestion_trigger_settings { - no_small_talk = false - only_end_user = true - } - } - group_suggestion_responses = true - } - notification_config { - message_format = "JSON" - topic = google_pubsub_topic.topic.id - } - } - human_agent_handoff_config { - live_person_config { - account_number = "00" - } - } - logging_config { - enable_stackdriver_logging = true - } - new_message_event_notification_config { - message_format = "JSON" - topic = google_pubsub_topic.topic.id - } - notification_config { - message_format = "JSON" - topic = google_pubsub_topic.topic.id - } - security_settings = google_dialogflow_cx_security_settings.security_setting.id - stt_config { - enable_word_info = true - language_code = "en-US" - model = "phone_call" - sample_rate_hertz = 1000 - speech_model_variant = "USE_ENHANCED" - use_timeout_based_endpointing = true - } - tts_config { - effects_profile_id = ["id"] - pitch = 1 - speaking_rate = 1 - voice { - name = "john" - ssml_gender = "SSML_VOICE_GENDER_MALE" - } - volume_gain_db = 5 - } - } -`, context) -} -func testAccDialogflowConversationProfile_dialogflowAgentFull2(context map[string]interface{}) string { - return acctest.Nprintf(` - resource "google_project" "agent_project" { - name = "tf-test-dialogflow-%{random_suffix}" - project_id = "tf-test-dialogflow-%{random_suffix}" - org_id = "%{org_id}" - billing_account = "%{billing_account}" - deletion_policy = "DELETE" - } - resource "google_project_service" "agent_project" { - project = "${google_project.agent_project.id}" - service = "dialogflow.googleapis.com" - disable_dependent_services = false - } - - resource "google_service_account" "dialogflow_service_account" { - account_id = "tf-test-dialogflow-%{random_suffix}" - } - - resource "google_project_iam_member" "agent_create" { - project = "${google_project.agent_project.id}" - role = "roles/dialogflow.admin" - member = "serviceAccount:${google_service_account.dialogflow_service_account.email}" - } - - resource "google_dialogflow_agent" "agent" { - display_name = "tf-test-agent-%{random_suffix}" - default_language_code = "en-us" - time_zone = "America/New_York" - project = google_project.agent_project.name - } - resource "google_pubsub_topic" "topic_diff" { - name = "tf-test-topic-%{random_suffix}-diff" - project = google_project.agent_project.project_id - depends_on = [google_project.agent_project, time_sleep.wait_120_seconds] - message_retention_duration = "8000s" - } - resource "google_dialogflow_cx_security_settings" "security_setting_diff" { - display_name = "tf-test-setting-%{random_suffix}-diff" - location = "global" - purge_data_types = [] - retention_window_days = 7 - project = google_project.agent_project.project_id - depends_on = [time_sleep.wait_120_seconds] - } - resource "time_sleep" "wait_120_seconds" { - create_duration = "120s" - depends_on = [google_dialogflow_agent.agent] - } - resource "google_dialogflow_conversation_profile" "profile" { - depends_on = [google_dialogflow_agent.agent, google_dialogflow_cx_security_settings.security_setting_diff, time_sleep.wait_120_seconds] - project = "${google_project.agent_project.name}" - display_name = "tf-test-conversation-profile-%{random_suffix}-new" - location = "global" - language_code = "fr" - automated_agent_config { - agent = "projects/tf-test-dialogflow-%{random_suffix}/locations/global/agent/environments/draft" - session_ttl = "31s" - } - human_agent_assistant_config { - end_user_suggestion_config { - disable_high_latency_features_sync_delivery = false - feature_configs { - conversation_process_config { - recent_sentences_count = 2 - } - disable_agent_query_logging = false - enable_conversation_augmented_query = false - enable_event_based_suggestion = false - enable_query_suggestion_when_no_answer = false - enable_query_suggestion_only = false - query_config { - confidence_threshold = "0.9" - context_filter_settings { - drop_handoff_messages = false - drop_ivr_messages = false - drop_virtual_agent_messages = false - } - dialogflow_query_source { - agent = "projects/tf-test-dialogflow-%{random_suffix}/locations/global/agent/environments/draft" - human_agent_side_config { - agent = "projects/tf-test-dialogflow-%{random_suffix}/locations/global/agent/environments/draft" - } - } - max_results = 2 - sections { - section_types = ["SITUATION"] - } - } - suggestion_feature { - type = "CONVERSATION_SUMMARIZATION" - } - suggestion_trigger_settings { - no_small_talk = false - only_end_user = false - } - } - group_suggestion_responses = false - } - human_agent_suggestion_config { - disable_high_latency_features_sync_delivery = false - feature_configs { - conversation_process_config { - recent_sentences_count = 2 - } - disable_agent_query_logging = false - enable_conversation_augmented_query = false - enable_event_based_suggestion = false - enable_query_suggestion_when_no_answer = false - enable_query_suggestion_only = false - query_config { - confidence_threshold = 0.2 - context_filter_settings { - drop_handoff_messages = false - drop_ivr_messages = false - drop_virtual_agent_messages = false - } - dialogflow_query_source { - agent = "projects/tf-test-dialogflow-%{random_suffix}/locations/global/agent/environments/draft" - human_agent_side_config { - agent = "projects/tf-test-dialogflow-%{random_suffix}/locations/global/agent/environments/draft" - } - } - max_results = 2 - sections { - section_types = ["SITUATION"] - } - } - suggestion_feature { - type = "CONVERSATION_SUMMARIZATION" - } - suggestion_trigger_settings { - no_small_talk = false - only_end_user = false - } - } - group_suggestion_responses = false - } - notification_config { - message_format = "PROTO" - topic = google_pubsub_topic.topic_diff.id - } - } - human_agent_handoff_config { - live_person_config { - account_number = "01" - } - } - logging_config { - enable_stackdriver_logging = false - } - new_message_event_notification_config { - message_format = "PROTO" - topic = google_pubsub_topic.topic_diff.id - } - notification_config { - message_format = "PROTO" - topic = google_pubsub_topic.topic_diff.id - } - security_settings = google_dialogflow_cx_security_settings.security_setting_diff.id - } -`, context) -} diff --git a/mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_entity_type_test.go b/mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_entity_type_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_entity_type_test.go rename to mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_entity_type_test.go.tmpl diff --git a/mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_fulfillment_test.go b/mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_fulfillment_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_fulfillment_test.go rename to mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_fulfillment_test.go.tmpl diff --git a/mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_intent_test.go b/mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_intent_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_intent_test.go rename to mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_intent_test.go.tmpl diff --git a/mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflow_cx_generator_test.go b/mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflow_cx_generator_test.go deleted file mode 100644 index 828b5602d19d..000000000000 --- a/mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflow_cx_generator_test.go +++ /dev/null @@ -1,88 +0,0 @@ -package dialogflowcx_test - -import ( - "testing" - - "github.com/hashicorp/terraform-provider-google/google/acctest" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" -) - -func TestAccDialogflowCXGenerator_dialogflowcxGeneratorUpdate(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckDialogflowCXGeneratorDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccDialogflowCXGenerator_dialogflowcxGeneratorBasicExample(context), - }, - { - ResourceName: "google_dialogflow_cx_generator.generator", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"language_code", "parent", "prompt_text"}, - }, - { - Config: testAccDialogflowCXGenerator_dialogflowcxGeneratorUpdate(context), - }, - { - ResourceName: "google_dialogflow_cx_generator.generator", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"language_code", "parent", "prompt_text"}, - }, - { - Config: testAccDialogflowCXGenerator_dialogflowcxGeneratorBasicExample(context), - }, - { - ResourceName: "google_dialogflow_cx_generator.generator", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"language_code", "parent", "prompt_text"}, - }, - }, - }) -} - -func testAccDialogflowCXGenerator_dialogflowcxGeneratorUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_dialogflow_cx_agent" "agent" { - display_name = "tf-test-dialogflowcx-agent-fucntion%{random_suffix}" - location = "global" - default_language_code = "en" - supported_language_codes = ["fr","de","es"] - time_zone = "America/New_York" - description = "Example description." -} - -resource "google_dialogflow_cx_generator" "generator" { - parent = google_dialogflow_cx_agent.agent.id - language_code = "es" - display_name = "TF Prompt generator different" - llm_model_settings { - model = "gemini-2.0-flash-001" - prompt_text = "Other results" - } - prompt_text { - text = "Send me great results in Spanish for $placeholder" - } - model_parameter { - temperature = 0.58 - max_decode_steps = 10 - top_p = 0.1 - top_k = 2000 - } - placeholders { - id = "my-id" - name = "placeholder" - } -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflow_cx_tool_test.go b/mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflow_cx_tool_test.go deleted file mode 100644 index 03b640ea1591..000000000000 --- a/mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflow_cx_tool_test.go +++ /dev/null @@ -1,310 +0,0 @@ -package dialogflowcx_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" -) - -func TestAccDialogflowCXTool_update(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "org_id": envvar.GetTestOrgFromEnv(t), - "billing_account": envvar.GetTestBillingAccountFromEnv(t), - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccDialogflowCXTool_basic(context), - }, - { - ResourceName: "google_dialogflow_cx_tool.my_tool", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccDialogflowCXTool_full_api_key(context), - }, - { - ResourceName: "google_dialogflow_cx_tool.my_tool", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"open_api_spec.0.authentication.0.api_key_config.0.api_key"}, - }, - { - Config: testAccDialogflowCXTool_full_service_agent_auth(context), - }, - { - ResourceName: "google_dialogflow_cx_tool.my_tool", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccDialogflowCXTool_full_bearer_token(context), - }, - { - ResourceName: "google_dialogflow_cx_tool.my_tool", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"open_api_spec.0.authentication.0.bearer_token_config.0.token"}, - }, - }, - }) -} - -func testAccDialogflowCXTool_basic(context map[string]interface{}) string { - return acctest.Nprintf(` - resource "google_dialogflow_cx_agent" "agent_tool" { - display_name = "tf-test-%{random_suffix}" - location = "global" - default_language_code = "en" - time_zone = "America/New_York" - description = "ageng for tool test" - } - - resource "google_dialogflow_cx_tool" "my_tool" { - parent = google_dialogflow_cx_agent.agent_tool.id - display_name = "Example" - description = "Example Description" - } - `, context) -} - -func testAccDialogflowCXTool_full_api_key(context map[string]interface{}) string { - return acctest.Nprintf(` - resource "google_dialogflow_cx_agent" "agent_tool" { - display_name = "tf-test-%{random_suffix}" - location = "global" - default_language_code = "en" - time_zone = "America/New_York" - description = "ageng for tool test" - } - - resource "google_dialogflow_cx_tool" "my_tool" { - parent = google_dialogflow_cx_agent.agent_tool.id - display_name = "Example Open API Tool with api_key_config" - description = "Example Description" - open_api_spec { - authentication { - api_key_config { - key_name = "example key name" - api_key = "example key" - secret_version_for_api_key = "projects/-/secrets/-/versions/-" - request_location = "HEADER" - } - } - tls_config { - ca_certs { - display_name = "example ca cert name" - cert = base64encode("example cert") - } - } - service_directory_config { - service = "projects/-/locations/-/namespaces/-/services/-" - } - text_schema = <[^/]+)/managedZones/(?P[^/]+)/rrsets/(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "projects/(?P[^/]+)/managedZones/(?P[^/]+)/rrsets/(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/documentaiwarehouse/resource_document_ai_warehouse_document_schema_test.go b/mmv1/third_party/terraform/services/documentaiwarehouse/resource_document_ai_warehouse_document_schema_test.go index 1a3a5568b955..e2521f2b5b33 100644 --- a/mmv1/third_party/terraform/services/documentaiwarehouse/resource_document_ai_warehouse_document_schema_test.go +++ b/mmv1/third_party/terraform/services/documentaiwarehouse/resource_document_ai_warehouse_document_schema_test.go @@ -132,6 +132,7 @@ resource "google_project" "project" { resource "google_project_service" "contentwarehouse" { project = google_project.project.project_id service = "contentwarehouse.googleapis.com" + disable_on_destroy = false } resource "time_sleep" "wait_120s" { diff --git a/mmv1/third_party/terraform/services/eventarc/resource_eventarc_message_bus_test.go b/mmv1/third_party/terraform/services/eventarc/resource_eventarc_message_bus_test.go index 4697db07ef60..7038f03b33cf 100644 --- a/mmv1/third_party/terraform/services/eventarc/resource_eventarc_message_bus_test.go +++ b/mmv1/third_party/terraform/services/eventarc/resource_eventarc_message_bus_test.go @@ -413,8 +413,10 @@ resource "google_eventarc_message_bus" "message_bus" { // concerned with testing the Pipeline resource, which depends on a singleton MessageBus. func testAccEventarcMessageBus_pipeline(t *testing.T) { context := map[string]interface{}{ - "region": envvar.GetTestRegionFromEnv(), - "random_suffix": acctest.RandString(t, 10), + "project_id": envvar.GetTestProjectFromEnv(), + "region": envvar.GetTestRegionFromEnv(), + "random_suffix": acctest.RandString(t, 10), + "network_attachment_name": acctest.BootstrapNetworkAttachment(t, "tf-test-eventarc-messagebus-na", acctest.BootstrapSubnet(t, "tf-test-eventarc-messagebus-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-test-eventarc-messagebus-network"))), } acctest.VcrTest(t, resource.TestCase{ @@ -442,6 +444,9 @@ resource "google_eventarc_pipeline" "primary" { pipeline_id = "tf-test-some-pipeline%{random_suffix}" destinations { message_bus = google_eventarc_message_bus.primary.id + network_config { + network_attachment = "projects/%{project_id}/regions/%{region}/networkAttachments/%{network_attachment_name}" + } } } @@ -456,8 +461,10 @@ resource "google_eventarc_message_bus" "primary" { // concerned with testing the Enrollment resource, which depends on a singleton MessageBus. func testAccEventarcMessageBus_enrollment(t *testing.T) { context := map[string]interface{}{ - "region": envvar.GetTestRegionFromEnv(), - "random_suffix": acctest.RandString(t, 10), + "project_id": envvar.GetTestProjectFromEnv(), + "region": envvar.GetTestRegionFromEnv(), + "random_suffix": acctest.RandString(t, 10), + "network_attachment_name": acctest.BootstrapNetworkAttachment(t, "tf-test-eventarc-messagebus-na", acctest.BootstrapSubnet(t, "tf-test-eventarc-messagebus-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-test-eventarc-messagebus-network"))), } acctest.VcrTest(t, resource.TestCase{ @@ -504,6 +511,9 @@ resource "google_eventarc_pipeline" "pipeline" { pipeline_id = "tf-test-pipeline%{random_suffix}" destinations { topic = google_pubsub_topic.pipeline_topic.id + network_config { + network_attachment = "projects/%{project_id}/regions/%{region}/networkAttachments/%{network_attachment_name}" + } } } @@ -518,8 +528,10 @@ resource "google_eventarc_message_bus" "message_bus" { // concerned with testing the Enrollment resource, which depends on a singleton MessageBus. func testAccEventarcMessageBus_updateEnrollment(t *testing.T) { context := map[string]interface{}{ - "region": envvar.GetTestRegionFromEnv(), - "random_suffix": acctest.RandString(t, 10), + "project_id": envvar.GetTestProjectFromEnv(), + "region": envvar.GetTestRegionFromEnv(), + "random_suffix": acctest.RandString(t, 10), + "network_attachment_name": acctest.BootstrapNetworkAttachment(t, "tf-test-eventarc-messagebus-na", acctest.BootstrapSubnet(t, "tf-test-eventarc-messagebus-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-test-eventarc-messagebus-network"))), } acctest.VcrTest(t, resource.TestCase{ @@ -583,7 +595,7 @@ resource "google_eventarc_enrollment" "primary" { annotations = { updated_test_annotation = "updated-test-eventarc-annotation" } - # TODO As of time of writing, enrollments can't be updated + # TODO(tommyreddad) As of time of writing, enrollments can't be updated # if their pipeline has been deleted. So use this workaround until the # underlying issue in the Eventarc API is fixed. depends_on = [google_eventarc_pipeline.pipeline] @@ -598,6 +610,9 @@ resource "google_eventarc_pipeline" "pipeline_update" { pipeline_id = "tf-test-pipeline2%{random_suffix}" destinations { topic = google_pubsub_topic.pipeline_update_topic.id + network_config { + network_attachment = "projects/%{project_id}/regions/%{region}/networkAttachments/%{network_attachment_name}" + } } } @@ -610,6 +625,9 @@ resource "google_eventarc_pipeline" "pipeline" { pipeline_id = "tf-test-pipeline%{random_suffix}" destinations { topic = google_pubsub_topic.pipeline_topic.id + network_config { + network_attachment = "projects/%{project_id}/regions/%{region}/networkAttachments/%{network_attachment_name}" + } } } @@ -639,6 +657,9 @@ resource "google_eventarc_pipeline" "pipeline_update" { pipeline_id = "tf-test-pipeline2%{random_suffix}" destinations { topic = google_pubsub_topic.pipeline_update_topic.id + network_config { + network_attachment = "projects/%{project_id}/regions/%{region}/networkAttachments/%{network_attachment_name}" + } } } diff --git a/mmv1/third_party/terraform/services/eventarc/resource_eventarc_pipeline_test.go b/mmv1/third_party/terraform/services/eventarc/resource_eventarc_pipeline_test.go index 31031d833559..b7a188398ca2 100644 --- a/mmv1/third_party/terraform/services/eventarc/resource_eventarc_pipeline_test.go +++ b/mmv1/third_party/terraform/services/eventarc/resource_eventarc_pipeline_test.go @@ -14,10 +14,12 @@ func TestAccEventarcPipeline_update(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "service_account": envvar.GetTestServiceAccountFromEnv(t), - "key_name": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-bootstrap-eventarc-pipeline-key").CryptoKey.Name, - "key2_name": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-bootstrap-eventarc-pipeline-key2").CryptoKey.Name, - "random_suffix": acctest.RandString(t, 10), + "project_id": envvar.GetTestProjectFromEnv(), + "service_account": envvar.GetTestServiceAccountFromEnv(t), + "key_name": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-bootstrap-eventarc-pipeline-key").CryptoKey.Name, + "key2_name": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-bootstrap-eventarc-pipeline-key2").CryptoKey.Name, + "network_attachment_name": acctest.BootstrapNetworkAttachment(t, "tf-test-eventarc-pipeline-na", acctest.BootstrapSubnet(t, "tf-test-eventarc-pipeline-subnet", acctest.BootstrapSharedTestNetwork(t, "tf-test-eventarc-pipeline-network"))), + "random_suffix": acctest.RandString(t, 10), } acctest.BootstrapIamMembers(t, []acctest.IamMember{ { @@ -88,6 +90,9 @@ resource "google_eventarc_pipeline" "primary" { } destinations { topic = google_pubsub_topic.topic_update.id + network_config { + network_attachment = "projects/%{project_id}/regions/us-central1/networkAttachments/%{network_attachment_name}" + } authentication_config { google_oidc { service_account = "%{service_account}" @@ -148,6 +153,9 @@ resource "google_eventarc_pipeline" "primary" { pipeline_id = "tf-test-some-pipeline%{random_suffix}" destinations { topic = google_pubsub_topic.topic_update.id + network_config { + network_attachment = "projects/%{project_id}/regions/us-central1/networkAttachments/%{network_attachment_name}" + } } } `, context) diff --git a/mmv1/third_party/terraform/services/filestore/resource_filestore_backup_test.go b/mmv1/third_party/terraform/services/filestore/resource_filestore_backup_test.go index b01cd9705ecc..abe465107b2c 100644 --- a/mmv1/third_party/terraform/services/filestore/resource_filestore_backup_test.go +++ b/mmv1/third_party/terraform/services/filestore/resource_filestore_backup_test.go @@ -118,11 +118,11 @@ resource "google_filestore_backup" "backup" { func TestAccFilestoreBackup_tags(t *testing.T) { t.Parallel() - tagKey := acctest.BootstrapSharedTestOrganizationTagKey(t, "filestore-backups-tagkey", nil) + tagKey := acctest.BootstrapSharedTestTagKey(t, "filestore-backups-tagkey") context := map[string]interface{}{ "org": envvar.GetTestOrgFromEnv(t), "tagKey": tagKey, - "tagValue": acctest.BootstrapSharedTestOrganizationTagValue(t, "filestore-backups-tagvalue", tagKey), + "tagValue": acctest.BootstrapSharedTestTagValue(t, "filestore-backups-tagvalue", tagKey), "random_suffix": acctest.RandString(t, 10), } diff --git a/mmv1/third_party/terraform/services/filestore/resource_filestore_instance_test.go.tmpl b/mmv1/third_party/terraform/services/filestore/resource_filestore_instance_test.go similarity index 63% rename from mmv1/third_party/terraform/services/filestore/resource_filestore_instance_test.go.tmpl rename to mmv1/third_party/terraform/services/filestore/resource_filestore_instance_test.go index c776dba31293..1e05c21ecdaf 100644 --- a/mmv1/third_party/terraform/services/filestore/resource_filestore_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/filestore/resource_filestore_instance_test.go @@ -409,11 +409,11 @@ resource "google_filestore_instance" "instance" { func TestAccFilestoreInstance_tags(t *testing.T) { t.Parallel() - tagKey := acctest.BootstrapSharedTestOrganizationTagKey(t, "filestore-instances-tagkey", nil) + tagKey := acctest.BootstrapSharedTestTagKey(t, "filestore-instances-tagkey") context := map[string]interface{}{ "org": envvar.GetTestOrgFromEnv(t), "tagKey": tagKey, - "tagValue": acctest.BootstrapSharedTestOrganizationTagValue(t, "filestore-instances-tagvalue", tagKey), + "tagValue": acctest.BootstrapSharedTestTagValue(t, "filestore-instances-tagvalue", tagKey), "random_suffix": acctest.RandString(t, 10), } @@ -465,7 +465,6 @@ func TestAccFilestoreInstance_replication(t *testing.T) { "location_1": "us-east1", "location_2": "us-west1", "tier": "ENTERPRISE", - "project": envvar.GetTestProjectFromEnv(), } acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -474,21 +473,9 @@ func TestAccFilestoreInstance_replication(t *testing.T) { Steps: []resource.TestStep{ { Config: testAccFilestoreInstance_replication(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_filestore_instance.replica_instance", - "effective_replication.0.replicas.0.peer_instance", - "projects/" + context["project"].(string) + "/locations/us-east1/instances/tf-test-source-instance-" + context["random_suffix"].(string), - ), - resource.TestCheckResourceAttr( - "google_filestore_instance.replica_instance", - "effective_replication.0.role", - "STANDBY", - ), - ), }, { - ResourceName: "google_filestore_instance.replica_instance", + ResourceName: "google_filestore_instance.replica-instance", ImportState: true, ImportStateVerify: true, ImportStateVerifyIgnore: []string{"zone", "initial_replication"}, @@ -499,11 +486,11 @@ func TestAccFilestoreInstance_replication(t *testing.T) { func testAccFilestoreInstance_replication(context map[string]interface{}) string { return acctest.Nprintf(` -resource "google_filestore_instance" "source_instance" { - name = "tf-test-source-instance-%{random_suffix}" +resource "google_filestore_instance" "instance" { + name = "tf-test-instance-%{random_suffix}" location = "%{location_1}" tier = "%{tier}" - description = "An source instance created during testing." + description = "An instance created during testing." file_shares { capacity_gb = 1024 @@ -516,8 +503,8 @@ resource "google_filestore_instance" "source_instance" { } } -resource "google_filestore_instance" "replica_instance" { - name = "tf-test-replica-instance-%{random_suffix}" +resource "google_filestore_instance" "replica-instance" { + name = "tf-test-instance-%{random_suffix}" location = "%{location_2}" tier = "%{tier}" description = "An replica instance created during testing." @@ -534,237 +521,9 @@ resource "google_filestore_instance" "replica_instance" { initial_replication { replicas { - peer_instance = google_filestore_instance.source_instance.id - } - } -} -`, context) -} - -{{- if ne $.TargetVersionName "ga" }} - -func TestAccFilestoreInstance_directoryServices(t *testing.T) { - t.Parallel() - - name := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) - location := "us-central1" - tier := "REGIONAL" - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), - CheckDestroy: testAccCheckFilestoreInstanceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccFilestoreInstance_ldap(name, location, tier), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("google_filestore_instance.instance", "directory_services.0.ldap.0.domain", "my-domain.com"), - resource.TestCheckResourceAttr("google_filestore_instance.instance", "directory_services.0.ldap.0.servers.0", "ldap.example1.com"), - resource.TestCheckResourceAttr("google_filestore_instance.instance", "directory_services.0.ldap.0.users_ou", "users"), - resource.TestCheckResourceAttr("google_filestore_instance.instance", "directory_services.0.ldap.0.groups_ou", "groups"), - ), - }, - { - ResourceName: "google_filestore_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"zone"}, - }, - }, - }) -} - -func testAccFilestoreInstance_ldap(name, location, tier string) string { - return fmt.Sprintf(` -resource "google_filestore_instance" "instance" { - provider = google-beta - name = "%s" - location = "%s" - tier = "%s" - description = "An instance created during testing." - protocol = "NFS_V4_1" - - file_shares { - capacity_gb = 1024 - name = "share" - } - - networks { - network = "default" - modes = ["MODE_IPV4"] - } - - directory_services { - ldap { - domain = "my-domain.com" - servers = ["ldap.example1.com"] - users_ou = "users" - groups_ou = "groups" - } - } -} -`, name, location, tier) -} - -{{- end }} -{{- if ne $.TargetVersionName "ga" }} - -func TestAccFilestoreInstance_psc(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "name": fmt.Sprintf("tf-test-%d", acctest.RandInt(t)), - "location": "us-central1", - "tier": "REGIONAL", - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), - CheckDestroy: testAccCheckFilestoreInstanceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccFilestoreInstance_psc(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("google_filestore_instance.instance", "networks.0.connect_mode", "PRIVATE_SERVICE_CONNECT"), - ), - }, - { - ResourceName: "google_filestore_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"zone"}, - }, - }, - }) -} - -func testAccFilestoreInstance_psc(context map[string]interface{}) string { - return acctest.Nprintf(` -data "google_client_config" "current" { - provider = google-beta -} - -resource "google_compute_network" "psc_network" { - provider = google-beta - name = "%{name}" - auto_create_subnetworks = false -} - -resource "google_compute_subnetwork" "psc_subnet" { - provider = google-beta - name = "%{name}" - ip_cidr_range = "10.2.0.0/16" - region = "%{location}" - network = google_compute_network.psc_network.id -} - -resource "google_network_connectivity_service_connection_policy" "default" { - provider = google-beta - name = "%{name}" - location = "%{location}" - service_class = "google-cloud-filestore" - network = google_compute_network.psc_network.id - psc_config { - subnetworks = [google_compute_subnetwork.psc_subnet.id] - } -} - -resource "google_filestore_instance" "instance" { - provider = google-beta - depends_on = [ - google_network_connectivity_service_connection_policy.default - ] - name = "%{name}" - location = "%{location}" - tier = "%{tier}" - description = "An instance created during testing." - protocol = "NFS_V4_1" - - file_shares { - capacity_gb = 1024 - name = "share" - - nfs_export_options { - ip_ranges = ["70.0.0.1/24"] - network = google_compute_network.psc_network.name - } - } - - networks { - network = google_compute_network.psc_network.name - modes = ["MODE_IPV4"] - connect_mode = "PRIVATE_SERVICE_CONNECT" - psc_config { - endpoint_project = data.google_client_config.current.project + peer_instance = google_filestore_instance.instance.id } } } `, context) } - -func TestAccFilestoreInstance_nfsExportOptionsNetwork_update(t *testing.T) { - t.Parallel() - - name := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) - location := "us-central1-a" - tier := "ZONAL" - - // Currently, we can only alternate between an empty network and the instance network of non-PSC instances. - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), - CheckDestroy: testAccCheckFilestoreInstanceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccFilestoreInstance_nfsExportOptionsNetwork_update(name, location, tier, ""), - Check: resource.TestCheckResourceAttr("google_filestore_instance.instance", "file_shares.0.nfs_export_options.0.network", ""), - }, - { - ResourceName: "google_filestore_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"zone"}, - }, - { - Config: testAccFilestoreInstance_nfsExportOptionsNetwork_update(name, location, tier, "default"), - Check: resource.TestCheckResourceAttr("google_filestore_instance.instance", "file_shares.0.nfs_export_options.0.network", "default"), - }, - { - ResourceName: "google_filestore_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"zone"}, - }, - }, - }) -} - -func testAccFilestoreInstance_nfsExportOptionsNetwork_update(name, location, tier, network string) string { - return fmt.Sprintf(` -resource "google_filestore_instance" "instance" { - provider = google-beta - name = "%s" - zone = "%s" - tier = "%s" - description = "An instance created during testing." - - file_shares { - capacity_gb = 1024 - name = "share" - - nfs_export_options { - ip_ranges = ["70.0.0.1/24"] - network = "%s" - } - } - - networks { - network = "default" - modes = ["MODE_IPV4"] - } -} -`, name, location, tier, network) -} - -{{- end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_android_app_config.go.tmpl b/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_android_app_config.go.tmpl index 6b735ae83e81..0755b2f3740a 100644 --- a/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_android_app_config.go.tmpl +++ b/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_android_app_config.go.tmpl @@ -139,7 +139,7 @@ func (d *GoogleFirebaseAndroidAppConfigDataSource) Read(ctx context.Context, req appName := fmt.Sprintf("projects/%s/androidApps/%s/config", data.Project.ValueString(), data.AppId.ValueString()) clientResp, err := service.GetConfig(appName).Do() if err != nil { - fwtransport.HandleNotFoundError(ctx, err, &resp.State, fmt.Sprintf("dataSourceFirebaseAndroidAppConfig %q", data.AppId.ValueString()), &resp.Diagnostics) + fwtransport.HandleDatasourceNotFoundError(ctx, err, &resp.State, fmt.Sprintf("dataSourceFirebaseAndroidAppConfig %q", data.AppId.ValueString()), &resp.Diagnostics) if resp.Diagnostics.HasError() { return } diff --git a/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_apple_app_config.go.tmpl b/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_apple_app_config.go.tmpl index e64e5949608f..8dfdf61f9dc2 100644 --- a/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_apple_app_config.go.tmpl +++ b/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_apple_app_config.go.tmpl @@ -137,7 +137,7 @@ func (d *GoogleFirebaseAppleAppConfigDataSource) Read(ctx context.Context, req d appName := fmt.Sprintf("projects/%s/iosApps/%s/config", data.Project.ValueString(), data.AppId.ValueString()) clientResp, err := service.GetConfig(appName).Do() if err != nil { - fwtransport.HandleNotFoundError(ctx, err, &resp.State, fmt.Sprintf("dataSourceFirebaseAppleAppConfig %q", data.AppId.ValueString()), &resp.Diagnostics) + fwtransport.HandleDatasourceNotFoundError(ctx, err, &resp.State, fmt.Sprintf("dataSourceFirebaseAppleAppConfig %q", data.AppId.ValueString()), &resp.Diagnostics) if resp.Diagnostics.HasError() { return } diff --git a/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_web_app_config.go.tmpl b/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_web_app_config.go.tmpl index 29891e565551..7626b3d5b902 100644 --- a/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_web_app_config.go.tmpl +++ b/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_web_app_config.go.tmpl @@ -184,7 +184,7 @@ func (d *GoogleFirebaseWebAppConfigDataSource) Read(ctx context.Context, req dat appName := fmt.Sprintf("projects/%s/webApps/%s/config", data.Project.ValueString(), data.WebAppId.ValueString()) clientResp, err := service.GetConfig(appName).Do() if err != nil { - fwtransport.HandleNotFoundError(ctx, err, &resp.State, fmt.Sprintf("dataSourceFirebaseWebAppConfig %q", data.WebAppId.ValueString()), &resp.Diagnostics) + fwtransport.HandleDatasourceNotFoundError(ctx, err, &resp.State, fmt.Sprintf("dataSourceFirebaseWebAppConfig %q", data.WebAppId.ValueString()), &resp.Diagnostics) if resp.Diagnostics.HasError() { return } diff --git a/mmv1/third_party/terraform/services/firebaseappcheck/resource_firebase_app_check_service_config_test.go.tmpl b/mmv1/third_party/terraform/services/firebaseappcheck/resource_firebase_app_check_service_config_test.go.tmpl index 9a518159c6f5..dfd7901eff82 100644 --- a/mmv1/third_party/terraform/services/firebaseappcheck/resource_firebase_app_check_service_config_test.go.tmpl +++ b/mmv1/third_party/terraform/services/firebaseappcheck/resource_firebase_app_check_service_config_test.go.tmpl @@ -77,12 +77,14 @@ resource "google_project_service" "firebase" { provider = google-beta project = google_project.default.project_id service = "firebase.googleapis.com" + disable_on_destroy = false } resource "google_project_service" "database" { provider = google-beta project = google_project.default.project_id service = "firebasedatabase.googleapis.com" + disable_on_destroy = false depends_on = [ google_project_service.firebase, ] @@ -92,6 +94,7 @@ resource "google_project_service" "appcheck" { provider = google-beta project = google_project.default.project_id service = "firebaseappcheck.googleapis.com" + disable_on_destroy = false depends_on = [ google_project_service.database, ] diff --git a/mmv1/third_party/terraform/services/firebasedataconnect/resource_firebase_data_connect_service_test.go b/mmv1/third_party/terraform/services/firebasedataconnect/resource_firebase_data_connect_service_test.go index 8dea93b53594..26c5bad11741 100644 --- a/mmv1/third_party/terraform/services/firebasedataconnect/resource_firebase_data_connect_service_test.go +++ b/mmv1/third_party/terraform/services/firebasedataconnect/resource_firebase_data_connect_service_test.go @@ -61,6 +61,7 @@ func testAccFirebaseDataConnectService_update(context map[string]interface{}, di resource "google_project_service" "fdc" { project = "%{project_id}" service = "firebasedataconnect.googleapis.com" + disable_on_destroy = false } # Create an FDC service diff --git a/mmv1/third_party/terraform/services/firestore/data_source_google_firestore_document.go b/mmv1/third_party/terraform/services/firestore/data_source_google_firestore_document.go deleted file mode 100644 index 6799e0b41cba..000000000000 --- a/mmv1/third_party/terraform/services/firestore/data_source_google_firestore_document.go +++ /dev/null @@ -1,51 +0,0 @@ -package firestore - -import ( - "fmt" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -func DataSourceGoogleFirestoreDocument() *schema.Resource { - dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceFirestoreDocument().Schema) - tpgresource.AddRequiredFieldsToSchema(dsSchema, "collection") - tpgresource.AddRequiredFieldsToSchema(dsSchema, "document_id") - tpgresource.AddRequiredFieldsToSchema(dsSchema, "database") - tpgresource.AddOptionalFieldsToSchema(dsSchema, "project") - - return &schema.Resource{ - Read: DataSourceGoogleFirestoreDocumentRead, - Schema: dsSchema, - } -} - -func DataSourceGoogleFirestoreDocumentRead(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - - collection := d.Get("collection").(string) - document_id := d.Get("document_id").(string) - database := d.Get("database").(string) - - project, err := tpgresource.GetProject(d, config) - if err != nil { - return fmt.Errorf("Error fetching project: %s", err) - } - - name := fmt.Sprintf("projects/%s/databases/%s/documents/%s/%s", project, database, collection, document_id) - d.SetId(name) - if err = d.Set("name", name); err != nil { - return err - } - err = resourceFirestoreDocumentRead(d, meta) - if err != nil { - return err - } - - if d.Id() == "" { - return fmt.Errorf("%s not found", name) - } - - return nil -} diff --git a/mmv1/third_party/terraform/services/firestore/data_source_google_firestore_document_test.go b/mmv1/third_party/terraform/services/firestore/data_source_google_firestore_document_test.go deleted file mode 100644 index 5c7d40638477..000000000000 --- a/mmv1/third_party/terraform/services/firestore/data_source_google_firestore_document_test.go +++ /dev/null @@ -1,98 +0,0 @@ -package firestore_test - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" -) - -func TestAccDatasourceFirestoreDocument_simple(t *testing.T) { - t.Parallel() - - orgId := envvar.GetTestOrgFromEnv(t) - randomSuffix := acctest.RandString(t, 10) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - ExternalProviders: map[string]resource.ExternalProvider{ - "time": {}, - }, - Steps: []resource.TestStep{ - { - Config: testAccDatasourceFirestoreDocument_simple(randomSuffix, orgId, "doc-id-1", "val1"), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("data.google_firestore_document.instance", "fields", - "{\"something\":{\"mapValue\":{\"fields\":{\"yo\":{\"stringValue\":\"val1\"}}}}}"), - resource.TestCheckResourceAttr("data.google_firestore_document.instance", - "id", fmt.Sprintf("projects/tf-test-%s/databases/(default)/documents/somenewcollection/doc-id-1", randomSuffix)), - resource.TestCheckResourceAttr("data.google_firestore_document.instance", - "name", fmt.Sprintf("projects/tf-test-%s/databases/(default)/documents/somenewcollection/doc-id-1", randomSuffix)), - resource.TestCheckResourceAttr("data.google_firestore_document.instance", - "collection", "somenewcollection"), - resource.TestCheckResourceAttr("data.google_firestore_document.instance", - "database", "(default)"), - resource.TestCheckResourceAttrSet("data.google_firestore_document.instance", "path"), - resource.TestCheckResourceAttrSet("data.google_firestore_document.instance", "create_time"), - resource.TestCheckResourceAttrSet("data.google_firestore_document.instance", "update_time"), - ), - }, - }, - }) -} - -func testAccDatasourceFirestoreDocument_simple_basicDeps(randomSuffix, orgId string) string { - return fmt.Sprintf(` -resource "google_project" "project" { - project_id = "tf-test-%s" - name = "tf-test-%s" - org_id = "%s" - deletion_policy = "DELETE" -} - -resource "time_sleep" "wait_60_seconds" { - depends_on = [google_project.project] - - create_duration = "60s" -} - -resource "google_project_service" "firestore" { - project = google_project.project.project_id - service = "firestore.googleapis.com" - - # Needed for CI tests for permissions to propagate, should not be needed for actual usage - depends_on = [time_sleep.wait_60_seconds] -} - -resource "google_firestore_database" "database" { - project = google_project.project.project_id - name = "(default)" - location_id = "nam5" - type = "FIRESTORE_NATIVE" - - depends_on = [google_project_service.firestore] -} -`, randomSuffix, randomSuffix, orgId) -} - -func testAccDatasourceFirestoreDocument_simple(randomSuffix, orgId, name, val string) string { - return testAccDatasourceFirestoreDocument_simple_basicDeps(randomSuffix, orgId) + fmt.Sprintf(` -resource "google_firestore_document" "instance" { - project = google_project.project.project_id - database = google_firestore_database.database.name - collection = "somenewcollection" - document_id = "%s" - fields = "{\"something\":{\"mapValue\":{\"fields\":{\"yo\":{\"stringValue\":\"%s\"}}}}}" -} - -data "google_firestore_document" "instance" { - project = google_firestore_document.instance.project - database = google_firestore_document.instance.database - collection = google_firestore_document.instance.collection - document_id = google_firestore_document.instance.document_id -} -`, name, val) -} diff --git a/mmv1/third_party/terraform/services/firestore/resource_firestore_database_test.go b/mmv1/third_party/terraform/services/firestore/resource_firestore_database_test.go deleted file mode 100644 index 1a17e8ba1906..000000000000 --- a/mmv1/third_party/terraform/services/firestore/resource_firestore_database_test.go +++ /dev/null @@ -1,54 +0,0 @@ -package firestore_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" -) - -func TestAccFirestoreDatabase_tags(t *testing.T) { - t.Parallel() - - // Bootstrap shared tag key and value - tagKey := acctest.BootstrapSharedTestProjectTagKey(t, "firestore-databases-tagkey", map[string]interface{}{}) - context := map[string]interface{}{ - "pid": envvar.GetTestProjectFromEnv(), - "tagKey": tagKey, - "tagValue": acctest.BootstrapSharedTestProjectTagValue(t, "firestore-databases-tagvalue", tagKey), - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckFirestoreDatabaseDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccFirestoreDatabaseTags(context), - }, - { - ResourceName: "google_firestore_database.database", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"project", "etag", "deletion_policy", "tags"}, - }, - }, - }) -} - -func testAccFirestoreDatabaseTags(context map[string]interface{}) string { - return acctest.Nprintf(` - resource "google_firestore_database" "database" { - name = "tf-test-database-%{random_suffix}" - location_id = "nam5" - type = "FIRESTORE_NATIVE" - delete_protection_state = "DELETE_PROTECTION_DISABLED" - deletion_policy = "DELETE" - tags = { - "%{pid}/%{tagKey}" = "%{tagValue}" - } - } - `, context) -} diff --git a/mmv1/third_party/terraform/services/firestore/resource_firestore_database_update_test.go b/mmv1/third_party/terraform/services/firestore/resource_firestore_database_update_test.go.tmpl similarity index 83% rename from mmv1/third_party/terraform/services/firestore/resource_firestore_database_update_test.go rename to mmv1/third_party/terraform/services/firestore/resource_firestore_database_update_test.go.tmpl index 596a4ccb48ed..d867090678b0 100644 --- a/mmv1/third_party/terraform/services/firestore/resource_firestore_database_update_test.go +++ b/mmv1/third_party/terraform/services/firestore/resource_firestore_database_update_test.go.tmpl @@ -2,9 +2,9 @@ package firestore_test import ( "fmt" + "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" - "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) @@ -26,18 +26,18 @@ func TestAccFirestoreDatabase_updateConcurrencyMode(t *testing.T) { Config: testAccFirestoreDatabase_concurrencyMode(projectId, randomSuffix, "OPTIMISTIC"), }, { - ResourceName: "google_firestore_database.database", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_firestore_database.database", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"etag", "project"}, }, { Config: testAccFirestoreDatabase_concurrencyMode(projectId, randomSuffix, "PESSIMISTIC"), }, { - ResourceName: "google_firestore_database.database", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_firestore_database.database", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"etag", "project"}, }, }, @@ -61,18 +61,18 @@ func TestAccFirestoreDatabase_updatePitrEnablement(t *testing.T) { Config: testAccFirestoreDatabase_pitrEnablement(projectId, randomSuffix, "POINT_IN_TIME_RECOVERY_ENABLED"), }, { - ResourceName: "google_firestore_database.database", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_firestore_database.database", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"etag", "project"}, }, { Config: testAccFirestoreDatabase_pitrEnablement(projectId, randomSuffix, "POINT_IN_TIME_RECOVERY_DISABLED"), }, { - ResourceName: "google_firestore_database.database", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_firestore_database.database", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"etag", "project"}, }, }, @@ -96,18 +96,18 @@ func TestAccFirestoreDatabase_updateDeleteProtectionState(t *testing.T) { Config: testAccFirestoreDatabase_deleteProtectionState(projectId, randomSuffix, "DELETE_PROTECTION_ENABLED"), }, { - ResourceName: "google_firestore_database.database", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_firestore_database.database", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"etag", "project"}, }, { Config: testAccFirestoreDatabase_deleteProtectionState(projectId, randomSuffix, "DELETE_PROTECTION_DISABLED"), }, { - ResourceName: "google_firestore_database.database", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_firestore_database.database", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"etag", "project"}, }, }, diff --git a/mmv1/third_party/terraform/services/gemini/iam_gemini_repository_group_test.go b/mmv1/third_party/terraform/services/gemini/iam_gemini_repository_group_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/gemini/iam_gemini_repository_group_test.go rename to mmv1/third_party/terraform/services/gemini/iam_gemini_repository_group_test.go.tmpl diff --git a/mmv1/third_party/terraform/services/gemini/resource_gemini_code_repository_index_test.go b/mmv1/third_party/terraform/services/gemini/resource_gemini_code_repository_index_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/gemini/resource_gemini_code_repository_index_test.go rename to mmv1/third_party/terraform/services/gemini/resource_gemini_code_repository_index_test.go.tmpl diff --git a/mmv1/third_party/terraform/services/gemini/resource_gemini_code_tools_setting_binding_test.go b/mmv1/third_party/terraform/services/gemini/resource_gemini_code_tools_setting_binding_test.go.tmpl similarity index 97% rename from mmv1/third_party/terraform/services/gemini/resource_gemini_code_tools_setting_binding_test.go rename to mmv1/third_party/terraform/services/gemini/resource_gemini_code_tools_setting_binding_test.go.tmpl index f9d053b35866..d54ea63c659c 100644 --- a/mmv1/third_party/terraform/services/gemini/resource_gemini_code_tools_setting_binding_test.go +++ b/mmv1/third_party/terraform/services/gemini/resource_gemini_code_tools_setting_binding_test.go.tmpl @@ -1,7 +1,7 @@ package gemini_test import ( - "fmt" + "fmt" "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" @@ -15,7 +15,7 @@ func TestAccGeminiCodeToolsSettingBinding_update(t *testing.T) { context := map[string]interface{}{ "code_tools_setting_id": fmt.Sprintf("tf-test-ls-%s", acctest.RandString(t, 10)), - "setting_binding_id": fmt.Sprintf("tf-test-lsb-%s", acctest.RandString(t, 10)), + "setting_binding_id": fmt.Sprintf("tf-test-lsb-%s", acctest.RandString(t, 10)), } acctest.VcrTest(t, resource.TestCase{ @@ -109,4 +109,4 @@ resource "google_gemini_code_tools_setting_binding" "basic_binding" { product = "GEMINI_CODE_ASSIST" } `, context) -} +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/gemini/resource_gemini_code_tools_setting_test.go b/mmv1/third_party/terraform/services/gemini/resource_gemini_code_tools_setting_test.go.tmpl similarity index 99% rename from mmv1/third_party/terraform/services/gemini/resource_gemini_code_tools_setting_test.go rename to mmv1/third_party/terraform/services/gemini/resource_gemini_code_tools_setting_test.go.tmpl index 5743e8d565b6..67a0da137674 100644 --- a/mmv1/third_party/terraform/services/gemini/resource_gemini_code_tools_setting_test.go +++ b/mmv1/third_party/terraform/services/gemini/resource_gemini_code_tools_setting_test.go.tmpl @@ -1,7 +1,7 @@ package gemini_test import ( - "fmt" + "fmt" "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" diff --git a/mmv1/third_party/terraform/services/gemini/resource_gemini_data_sharing_with_google_setting_binding_test.go b/mmv1/third_party/terraform/services/gemini/resource_gemini_data_sharing_with_google_setting_binding_test.go.tmpl similarity index 95% rename from mmv1/third_party/terraform/services/gemini/resource_gemini_data_sharing_with_google_setting_binding_test.go rename to mmv1/third_party/terraform/services/gemini/resource_gemini_data_sharing_with_google_setting_binding_test.go.tmpl index ad2de07295e2..b52680727430 100644 --- a/mmv1/third_party/terraform/services/gemini/resource_gemini_data_sharing_with_google_setting_binding_test.go +++ b/mmv1/third_party/terraform/services/gemini/resource_gemini_data_sharing_with_google_setting_binding_test.go.tmpl @@ -1,7 +1,7 @@ package gemini_test import ( - "fmt" + "fmt" "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" @@ -15,7 +15,7 @@ func TestAccGeminiDataSharingWithGoogleSettingBinding_update(t *testing.T) { context := map[string]interface{}{ "data_sharing_with_google_setting_id": fmt.Sprintf("tf-test-ls-%s", acctest.RandString(t, 10)), - "setting_binding_id": fmt.Sprintf("tf-test-lsb-%s", acctest.RandString(t, 10)), + "setting_binding_id": fmt.Sprintf("tf-test-lsb-%s", acctest.RandString(t, 10)), } acctest.VcrTest(t, resource.TestCase{ @@ -59,7 +59,6 @@ resource "google_gemini_data_sharing_with_google_setting" "basic" { location = "global" labels = {"my_key" = "my_value"} enable_preview_data_sharing = true - enable_data_sharing = true } resource "google_gemini_data_sharing_with_google_setting_binding" "basic_binding" { @@ -81,7 +80,6 @@ resource "google_gemini_data_sharing_with_google_setting" "basic" { location = "global" labels = {"my_key" = "my_value"} enable_preview_data_sharing = true - enable_data_sharing = true } resource "google_gemini_data_sharing_with_google_setting_binding" "basic_binding" { diff --git a/mmv1/third_party/terraform/services/gemini/resource_gemini_data_sharing_with_google_setting_test.go b/mmv1/third_party/terraform/services/gemini/resource_gemini_data_sharing_with_google_setting_test.go.tmpl similarity index 97% rename from mmv1/third_party/terraform/services/gemini/resource_gemini_data_sharing_with_google_setting_test.go rename to mmv1/third_party/terraform/services/gemini/resource_gemini_data_sharing_with_google_setting_test.go.tmpl index 6c13c4f1b57d..720fc3b8bd4a 100644 --- a/mmv1/third_party/terraform/services/gemini/resource_gemini_data_sharing_with_google_setting_test.go +++ b/mmv1/third_party/terraform/services/gemini/resource_gemini_data_sharing_with_google_setting_test.go.tmpl @@ -1,7 +1,7 @@ package gemini_test import ( - "fmt" + "fmt" "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" @@ -50,7 +50,6 @@ resource "google_gemini_data_sharing_with_google_setting" "example" { data_sharing_with_google_setting_id = "%{setting_id}" location = "global" enable_preview_data_sharing = true - enable_data_sharing = true } `, context) } @@ -61,7 +60,6 @@ resource "google_gemini_data_sharing_with_google_setting" "example" { location = "global" labels = {"my_key" = "my_value"} enable_preview_data_sharing = false - enable_data_sharing = false } `, context) } diff --git a/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_binding_test.go b/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_binding_test.go.tmpl similarity index 92% rename from mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_binding_test.go rename to mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_binding_test.go.tmpl index a69ef8ff388f..38b5fd383fa4 100644 --- a/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_binding_test.go +++ b/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_binding_test.go.tmpl @@ -1,7 +1,7 @@ package gemini_test import ( - "fmt" + "fmt" "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" @@ -15,7 +15,7 @@ func TestAccGeminiGeminiGcpEnablementSettingBinding_update(t *testing.T) { context := map[string]interface{}{ "gemini_gcp_enablement_setting_id": fmt.Sprintf("tf-test-ls-%s", acctest.RandString(t, 10)), - "setting_binding_id": fmt.Sprintf("tf-test-lsb-%s", acctest.RandString(t, 10)), + "setting_binding_id": fmt.Sprintf("tf-test-lsb-%s", acctest.RandString(t, 10)), } acctest.VcrTest(t, resource.TestCase{ @@ -59,7 +59,6 @@ resource "google_gemini_gemini_gcp_enablement_setting" "basic" { location = "global" labels = {"my_key": "my_value"} enable_customer_data_sharing = true - web_grounding_type = "WEB_GROUNDING_FOR_ENTERPRISE" } resource "google_gemini_gemini_gcp_enablement_setting_binding" "basic_binding" { @@ -81,7 +80,6 @@ resource "google_gemini_gemini_gcp_enablement_setting" "basic" { location = "global" labels = {"my_key" = "my_value"} enable_customer_data_sharing = false - web_grounding_type = "GROUNDING_WITH_GOOGLE_SEARCH" } resource "google_gemini_gemini_gcp_enablement_setting_binding" "basic_binding" { @@ -90,7 +88,7 @@ resource "google_gemini_gemini_gcp_enablement_setting_binding" "basic_binding" { location = "global" target = "projects/${data.google_project.project.number}" labels = {"my_key" = "my_value"} - product = "GEMINI_CLOUD_ASSIST" + product = "GEMINI_IN_BIGQUERY" } `, context) } diff --git a/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_test.go b/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_test.go.tmpl similarity index 95% rename from mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_test.go rename to mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_test.go.tmpl index 93eb4a8f5afe..2f0edee86fe3 100644 --- a/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_test.go +++ b/mmv1/third_party/terraform/services/gemini/resource_gemini_gemini_gcp_enablement_setting_test.go.tmpl @@ -1,7 +1,7 @@ package gemini_test import ( - "fmt" + "fmt" "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" @@ -51,7 +51,6 @@ resource "google_gemini_gemini_gcp_enablement_setting" "example" { location = "global" labels = {"my_key" = "my_value"} enable_customer_data_sharing = true - web_grounding_type = "WEB_GROUNDING_FOR_ENTERPRISE" } `, context) } @@ -62,7 +61,6 @@ resource "google_gemini_gemini_gcp_enablement_setting" "example" { location = "global" labels = {"my_key" = "my_value"} enable_customer_data_sharing = false - web_grounding_type = "GROUNDING_WITH_GOOGLE_SEARCH" } `, context) } diff --git a/mmv1/third_party/terraform/services/gemini/resource_gemini_logging_setting_binding_test.go b/mmv1/third_party/terraform/services/gemini/resource_gemini_logging_setting_binding_test.go.tmpl similarity index 99% rename from mmv1/third_party/terraform/services/gemini/resource_gemini_logging_setting_binding_test.go rename to mmv1/third_party/terraform/services/gemini/resource_gemini_logging_setting_binding_test.go.tmpl index dd0368926e09..363c71a9b2a6 100644 --- a/mmv1/third_party/terraform/services/gemini/resource_gemini_logging_setting_binding_test.go +++ b/mmv1/third_party/terraform/services/gemini/resource_gemini_logging_setting_binding_test.go.tmpl @@ -1,7 +1,7 @@ package gemini_test import ( - "fmt" + "fmt" "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" @@ -91,4 +91,4 @@ resource "google_gemini_logging_setting_binding" "basic_binding" { product = "GEMINI_CODE_ASSIST" } `, context) -} +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/gemini/resource_gemini_release_channel_setting_binding_test.go b/mmv1/third_party/terraform/services/gemini/resource_gemini_release_channel_setting_binding_test.go.tmpl similarity index 96% rename from mmv1/third_party/terraform/services/gemini/resource_gemini_release_channel_setting_binding_test.go rename to mmv1/third_party/terraform/services/gemini/resource_gemini_release_channel_setting_binding_test.go.tmpl index 8012ed347746..a68853e1714a 100644 --- a/mmv1/third_party/terraform/services/gemini/resource_gemini_release_channel_setting_binding_test.go +++ b/mmv1/third_party/terraform/services/gemini/resource_gemini_release_channel_setting_binding_test.go.tmpl @@ -1,7 +1,7 @@ package gemini_test import ( - "fmt" + "fmt" "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" @@ -15,7 +15,7 @@ func TestAccGeminiReleaseChannelSettingBinding_update(t *testing.T) { context := map[string]interface{}{ "release_channel_setting_id": fmt.Sprintf("tf-test-ls-%s", acctest.RandString(t, 10)), - "setting_binding_id": fmt.Sprintf("tf-test-lsb-%s", acctest.RandString(t, 10)), + "setting_binding_id": fmt.Sprintf("tf-test-lsb-%s", acctest.RandString(t, 10)), } acctest.VcrTest(t, resource.TestCase{ diff --git a/mmv1/third_party/terraform/services/gemini/resource_gemini_release_channel_setting_test.go b/mmv1/third_party/terraform/services/gemini/resource_gemini_release_channel_setting_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/gemini/resource_gemini_release_channel_setting_test.go rename to mmv1/third_party/terraform/services/gemini/resource_gemini_release_channel_setting_test.go.tmpl diff --git a/mmv1/third_party/terraform/services/gemini/resource_gemini_repository_group_test.go b/mmv1/third_party/terraform/services/gemini/resource_gemini_repository_group_test.go.tmpl similarity index 98% rename from mmv1/third_party/terraform/services/gemini/resource_gemini_repository_group_test.go rename to mmv1/third_party/terraform/services/gemini/resource_gemini_repository_group_test.go.tmpl index eac176d96b7e..bb4547d3f545 100644 --- a/mmv1/third_party/terraform/services/gemini/resource_gemini_repository_group_test.go +++ b/mmv1/third_party/terraform/services/gemini/resource_gemini_repository_group_test.go.tmpl @@ -13,7 +13,7 @@ import ( // More details: https://cloud.google.com/developer-connect/docs/connect-github-repo#before_you_begin func TestAccGeminiRepositoryGroup_update(t *testing.T) { - codeRepositoryIndexId := acctest.BootstrapSharedCodeRepositoryIndex(t, "basic", "us-central1", "", map[string]string{"ccfe_debug_note": "terraform_e2e_do_not_delete"}) + codeRepositoryIndexId := acctest.BootstrapSharedCodeRepositoryIndex(t, "basic-rg-test", "us-central1", "", map[string]string{"ccfe_debug_note": "terraform_e2e_do_not_delete"}) context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), "project_id": os.Getenv("GOOGLE_PROJECT"), diff --git a/mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_backup_channel_test.go b/mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_backup_channel_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_backup_channel_test.go rename to mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_backup_channel_test.go.tmpl diff --git a/mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_backup_plan_test.go b/mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_backup_plan_test.go.tmpl similarity index 90% rename from mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_backup_plan_test.go rename to mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_backup_plan_test.go.tmpl index 658e79ce5b0b..613c30efa76c 100644 --- a/mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_backup_plan_test.go +++ b/mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_backup_plan_test.go.tmpl @@ -28,54 +28,54 @@ func TestAccGKEBackupBackupPlan_update(t *testing.T) { Config: testAccGKEBackupBackupPlan_basic(context), }, { - ResourceName: "google_gke_backup_backup_plan.backupplan", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_gke_backup_backup_plan.backupplan", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, }, - { + { Config: testAccGKEBackupBackupPlan_permissive(context), }, { - ResourceName: "google_gke_backup_backup_plan.backupplan", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_gke_backup_backup_plan.backupplan", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, }, { Config: testAccGKEBackupBackupPlan_full(context), }, { - ResourceName: "google_gke_backup_backup_plan.backupplan", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_gke_backup_backup_plan.backupplan", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, }, - { + { Config: testAccGKEBackupBackupPlan_rpo_daily_window(context), }, { - ResourceName: "google_gke_backup_backup_plan.backupplan", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_gke_backup_backup_plan.backupplan", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, }, { Config: testAccGKEBackupBackupPlan_rpo_weekly_window(context), }, { - ResourceName: "google_gke_backup_backup_plan.backupplan", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_gke_backup_backup_plan.backupplan", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, }, - { + { Config: testAccGKEBackupBackupPlan_full(context), }, { - ResourceName: "google_gke_backup_backup_plan.backupplan", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_gke_backup_backup_plan.backupplan", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, }, }, diff --git a/mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_restore_channel_test.go b/mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_restore_channel_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_restore_channel_test.go rename to mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_restore_channel_test.go.tmpl diff --git a/mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_restore_plan_test.go b/mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_restore_plan_test.go.tmpl similarity index 99% rename from mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_restore_plan_test.go rename to mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_restore_plan_test.go.tmpl index 04c53ad5f927..cde094427e20 100644 --- a/mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_restore_plan_test.go +++ b/mmv1/third_party/terraform/services/gkebackup/resource_gke_backup_restore_plan_test.go.tmpl @@ -1,13 +1,16 @@ + + package gkebackup_test import ( "testing" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) + func TestAccGKEBackupRestorePlan_update(t *testing.T) { t.Parallel() @@ -32,7 +35,7 @@ func TestAccGKEBackupRestorePlan_update(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"labels", "location", "terraform_labels"}, }, - { + { Config: testAccGKEBackupRestorePlan_update(context), }, { @@ -201,4 +204,4 @@ resource "google_gke_backup_restore_plan" "restore_plan" { } } `, context) -} +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/gkehub/data_source_google_gke_hub_membership.go b/mmv1/third_party/terraform/services/gkehub/data_source_google_gke_hub_membership.go deleted file mode 100644 index 94fad0369a13..000000000000 --- a/mmv1/third_party/terraform/services/gkehub/data_source_google_gke_hub_membership.go +++ /dev/null @@ -1,39 +0,0 @@ -package gkehub - -import ( - "fmt" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -func DataSourceGoogleGkeHubMembership() *schema.Resource { - dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceGKEHubMembership().Schema) - tpgresource.AddRequiredFieldsToSchema(dsSchema, "membership_id") - tpgresource.AddRequiredFieldsToSchema(dsSchema, "location") - tpgresource.AddOptionalFieldsToSchema(dsSchema, "project") - - return &schema.Resource{ - Read: dataSourceGoogleGkeHubMembershipRead, - Schema: dsSchema, - } -} - -func dataSourceGoogleGkeHubMembershipRead(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - - id, err := tpgresource.ReplaceVars(d, config, "projects/{{project}}/locations/{{location}}/memberships/{{membership_id}}") - if err != nil { - return fmt.Errorf("Error constructing id: %s", err) - } - d.SetId(id) - - err = resourceGKEHubMembershipRead(d, meta) - if err != nil { - return err - } - - // No labels or annotations for Membership datasource - return nil -} diff --git a/mmv1/third_party/terraform/services/gkehub/data_source_google_gke_hub_membership_test.go b/mmv1/third_party/terraform/services/gkehub/data_source_google_gke_hub_membership_test.go deleted file mode 100644 index 9dac0b1556df..000000000000 --- a/mmv1/third_party/terraform/services/gkehub/data_source_google_gke_hub_membership_test.go +++ /dev/null @@ -1,144 +0,0 @@ -package gkehub_test - -import ( - "fmt" - "strings" - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/terraform" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -func TestAccDataSourceGoogleGkeHubMembership_basic(t *testing.T) { - t.Parallel() - - project := envvar.GetTestProjectFromEnv() - gkeClusterRegion := "us-central1" - gkeClusterZone := "us-central1-a" - membershipLocation := "global" - randomSuffix := acctest.RandString(t, 10) - - // Define unique names for network and subnetwork for this test run - networkName := fmt.Sprintf("tf-test-mem-ds-net-%s", randomSuffix) - subnetworkName := fmt.Sprintf("tf-test-mem-ds-sub-%s", randomSuffix) - - context := map[string]interface{}{ - "project": project, - "gke_cluster_region": gkeClusterRegion, - "gke_cluster_zone": gkeClusterZone, - "membership_location": membershipLocation, - "random_suffix": randomSuffix, - "network_name": networkName, - "subnetwork_name": subnetworkName, - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckGoogleGkeHubMembershipDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccDataSourceGoogleGkeHubMembership_basic_config(context), - Check: resource.ComposeTestCheckFunc( - acctest.CheckDataSourceStateMatchesResourceState("data.google_gke_hub_membership.example", "google_gke_hub_membership.example"), - ), - }, - }, - }) -} - -func testAccDataSourceGoogleGkeHubMembership_basic_config(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_compute_network" "default" { - project = "%{project}" - name = "%{network_name}" - auto_create_subnetworks = false -} - -resource "google_compute_subnetwork" "default" { - project = "%{project}" - name = "%{subnetwork_name}" - ip_cidr_range = "10.2.0.0/16" // Example CIDR - region = "%{gke_cluster_region}" - network = google_compute_network.default.id -} - -resource "google_container_cluster" "primary" { - project = "%{project}" - name = "tf-test-mem-ds-cl-%{random_suffix}" - location = "%{gke_cluster_zone}" - initial_node_count = 1 - deletion_protection = false - network = google_compute_network.default.id - subnetwork = google_compute_subnetwork.default.id - - master_auth { - client_certificate_config { - issue_client_certificate = false - } - } -} - -resource "google_gke_hub_membership" "example" { - project = "%{project}" - membership_id = "tf-test-mem-%{random_suffix}" - location = "%{membership_location}" - - endpoint { - gke_cluster { - resource_link = "//container.googleapis.com/${google_container_cluster.primary.id}" - } - } - - depends_on = [google_container_cluster.primary] -} - -data "google_gke_hub_membership" "example" { - project = google_gke_hub_membership.example.project - location = google_gke_hub_membership.example.location - membership_id = google_gke_hub_membership.example.membership_id -} -`, context) -} - -func testAccCheckGoogleGkeHubMembershipDestroyProducer(t *testing.T) func(s *terraform.State) error { - return func(s *terraform.State) error { - for name, rs := range s.RootModule().Resources { - if rs.Type != "google_gke_hub_membership" { - continue - } - if strings.HasPrefix(name, "data.") { - continue - } - - config := acctest.GoogleProviderConfig(t) - url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{GKEHub2BasePath}}projects/{{project}}/locations/{{location}}/memberships/{{membership_id}}") - if err != nil { - return fmt.Errorf("Error constructing URL for GKE Hub Membership: %s", err) - } - - billingProject := "" - - if config.BillingProject != "" { - billingProject = config.BillingProject - } - - _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - RawURL: url, - UserAgent: config.UserAgent, - Project: billingProject, - }) - - if err == nil { - return fmt.Errorf("GKEHubMembership still exists at %s", url) - } - } - return nil - } -} diff --git a/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_meta.yaml.tmpl b/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_meta.yaml.tmpl index bb160974eefd..778b7227674b 100644 --- a/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_meta.yaml.tmpl +++ b/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_meta.yaml.tmpl @@ -8,6 +8,7 @@ api_version: 'v1' {{- end }} api_resource_type_kind: 'Feature' fields: + - field: 'configmanagement.binauthz.enabled' - field: 'configmanagement.config_sync.enabled' - field: 'configmanagement.config_sync.git.gcp_service_account_email' - field: 'configmanagement.config_sync.git.https_proxy' diff --git a/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_test.go.tmpl b/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_test.go.tmpl index 8f2b784f4b21..3b3c03cafa27 100644 --- a/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_test.go.tmpl +++ b/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_test.go.tmpl @@ -109,7 +109,7 @@ resource "google_gke_hub_feature_membership" "feature_member_1" { feature = google_gke_hub_feature.feature.name membership = google_gke_hub_membership.membership.membership_id configmanagement { - version = "1.21.0" + version = "1.18.2" config_sync { enabled = true source_format = "hierarchy" @@ -138,7 +138,7 @@ resource "google_gke_hub_feature_membership" "feature_member_2" { feature = google_gke_hub_feature.feature.name membership = google_gke_hub_membership.membership_second.membership_id configmanagement { - version = "1.21.0" + version = "1.18.2" config_sync { enabled = true source_format = "hierarchy" @@ -209,6 +209,7 @@ resource "google_gke_hub_feature_membership" "feature_member_1" { } } } + management = "MANAGEMENT_AUTOMATIC" } } @@ -218,7 +219,7 @@ resource "google_gke_hub_feature_membership" "feature_member_2" { feature = google_gke_hub_feature.feature.name membership = google_gke_hub_membership.membership_second.membership_id configmanagement { - version = "1.21.0" + version = "1.18.2" config_sync { enabled = true source_format = "hierarchy" @@ -251,7 +252,7 @@ resource "google_gke_hub_feature_membership" "feature_member_2" { feature = google_gke_hub_feature.feature.name membership = google_gke_hub_membership.membership_second.membership_id configmanagement { - version = "1.21.0" + version = "1.18.2" config_sync { enabled = true source_format = "unstructured" @@ -274,7 +275,7 @@ resource "google_gke_hub_feature_membership" "feature_member_3" { feature = google_gke_hub_feature.feature.name membership = google_gke_hub_membership.membership_third.membership_id configmanagement { - version = "1.21.0" + version = "1.18.2" config_sync { enabled = true source_format = "hierarchy" @@ -297,7 +298,7 @@ resource "google_gke_hub_feature_membership" "feature_member_4" { feature = google_gke_hub_feature.feature.name membership = google_gke_hub_membership.membership_fourth.membership_id configmanagement { - version = "1.21.0" + version = "1.18.2" } } `, context) @@ -322,7 +323,7 @@ resource "google_gke_hub_feature_membership" "feature_member_3" { feature = google_gke_hub_feature.feature.name membership = google_gke_hub_membership.membership_third.membership_id configmanagement { - version = "1.21.0" + version = "1.18.2" } } `, context) @@ -490,7 +491,7 @@ resource "google_gke_hub_feature_membership" "feature_member" { feature = google_gke_hub_feature.feature.name membership = google_gke_hub_membership.membership.membership_id configmanagement { - version = "1.21.0" + version = "1.18.2" config_sync { enabled = true git { @@ -552,7 +553,7 @@ resource "google_gke_hub_feature_membership" "feature_member" { feature = google_gke_hub_feature.feature.name membership = google_gke_hub_membership.membership.membership_id configmanagement { - version = "1.21.0" + version = "1.18.2" config_sync { enabled = true git { @@ -645,7 +646,7 @@ resource "google_gke_hub_feature_membership" "feature_member" { feature = google_gke_hub_feature.feature.name membership = google_gke_hub_membership.membership_acmoci.membership_id configmanagement { - version = "1.21.0" + version = "1.18.2" config_sync { enabled = true source_format = "unstructured" @@ -687,7 +688,7 @@ resource "google_gke_hub_feature_membership" "feature_member" { feature = google_gke_hub_feature.feature.name membership = google_gke_hub_membership.membership_acmoci.membership_id configmanagement { - version = "1.21.0" + version = "1.18.2" config_sync { enabled = true source_format = "hierarchy" @@ -729,7 +730,7 @@ resource "google_gke_hub_feature_membership" "feature_member" { feature = google_gke_hub_feature.feature.name membership = google_gke_hub_membership.membership_acmoci.membership_id configmanagement { - version = "1.21.0" + version = "1.18.2" } } `, context) @@ -1086,7 +1087,7 @@ resource "google_gke_hub_feature_membership" "feature_member" { } } } - version = "1.20.0" + version = "1.17.0" } } `, context) @@ -1136,7 +1137,7 @@ resource "google_gke_hub_feature_membership" "feature_member" { } } } - version = "1.20.0" + version = "1.17.0" } } `, context) @@ -1308,6 +1309,7 @@ resource "google_project" "project" { resource "google_project_service" "anthos" { project = google_project.project.project_id service = "anthos.googleapis.com" + disable_on_destroy = false } resource "google_project_service" "mesh" { @@ -1338,16 +1340,19 @@ resource "google_project_service" "mcsd" { resource "google_project_service" "compute" { project = google_project.project.project_id service = "compute.googleapis.com" + disable_on_destroy = false } resource "google_project_service" "container" { project = google_project.project.project_id service = "container.googleapis.com" + disable_on_destroy = false } resource "google_project_service" "gkehub" { project = google_project.project.project_id service = "gkehub.googleapis.com" + disable_on_destroy = false } // It needs waiting until the API services are really activated. diff --git a/mmv1/third_party/terraform/services/gkehub2/data_source_google_gke_hub_feature_test.go b/mmv1/third_party/terraform/services/gkehub2/data_source_google_gke_hub_feature_test.go index c585194fc0a0..da4cf8be7dfa 100644 --- a/mmv1/third_party/terraform/services/gkehub2/data_source_google_gke_hub_feature_test.go +++ b/mmv1/third_party/terraform/services/gkehub2/data_source_google_gke_hub_feature_test.go @@ -20,9 +20,9 @@ func TestAccDataSourceGoogleGkeHubFeature_basic(t *testing.T) { } acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckGoogleGkeHubFeatureDestroyProducer(t), + PreCheck: func() { acctest.AccTestPreCheck(t) }, + Providers: acctest.TestAccProviders, + CheckDestroy: testAccCheckGoogleGkeHubFeatureDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccDataSourceGoogleGkeHubFeature_basic(context), diff --git a/mmv1/third_party/terraform/services/gkehub2/iam_gke_hub_feature_test.go b/mmv1/third_party/terraform/services/gkehub2/iam_gke_hub_feature_test.go.tmpl similarity index 98% rename from mmv1/third_party/terraform/services/gkehub2/iam_gke_hub_feature_test.go rename to mmv1/third_party/terraform/services/gkehub2/iam_gke_hub_feature_test.go.tmpl index c9133ec25c35..48eb09f11440 100644 --- a/mmv1/third_party/terraform/services/gkehub2/iam_gke_hub_feature_test.go +++ b/mmv1/third_party/terraform/services/gkehub2/iam_gke_hub_feature_test.go.tmpl @@ -137,6 +137,7 @@ resource "google_project_service" "mcsd" { resource "google_project_service" "gkehub" { project = google_project.project.project_id service = "gkehub.googleapis.com" + disable_on_destroy = false } resource "google_gke_hub_feature" "feature" { name = "multiclusterservicediscovery" @@ -173,6 +174,7 @@ resource "google_project_service" "mcsd" { resource "google_project_service" "gkehub" { project = google_project.project.project_id service = "gkehub.googleapis.com" + disable_on_destroy = false } resource "google_gke_hub_feature" "feature" { name = "multiclusterservicediscovery" @@ -222,6 +224,7 @@ resource "google_project_service" "mcsd" { resource "google_project_service" "gkehub" { project = google_project.project.project_id service = "gkehub.googleapis.com" + disable_on_destroy = false } resource "google_gke_hub_feature" "feature" { name = "multiclusterservicediscovery" @@ -259,6 +262,7 @@ resource "google_project_service" "mcsd" { resource "google_project_service" "gkehub" { project = google_project.project.project_id service = "gkehub.googleapis.com" + disable_on_destroy = false } resource "google_gke_hub_feature" "feature" { name = "multiclusterservicediscovery" @@ -295,6 +299,7 @@ resource "google_project_service" "mcsd" { resource "google_project_service" "gkehub" { project = google_project.project.project_id service = "gkehub.googleapis.com" + disable_on_destroy = false } resource "google_gke_hub_feature" "feature" { name = "multiclusterservicediscovery" diff --git a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.tmpl b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.tmpl index c1bb69f57339..88b1330b8c9b 100644 --- a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.tmpl +++ b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.tmpl @@ -178,18 +178,21 @@ resource "google_project_service" "mcsd" { resource "google_project_service" "compute" { project = google_project.project.project_id service = "compute.googleapis.com" + disable_on_destroy = false provider = google-beta } resource "google_project_service" "container" { project = google_project.project.project_id service = "container.googleapis.com" + disable_on_destroy = false provider = google-beta } resource "google_project_service" "gkehub" { project = google_project.project.project_id service = "gkehub.googleapis.com" + disable_on_destroy = false provider = google-beta } `, context) @@ -489,6 +492,14 @@ func TestAccGKEHubFeature_FleetDefaultMemberConfigConfigManagement(t *testing.T) ImportState: true, ImportStateVerify: true, }, + { + Config: testAccGKEHubFeature_FleetDefaultMemberConfigConfigManagementEnableAutomaticManagementUpdate(context), + }, + { + ResourceName: "google_gke_hub_feature.feature", + ImportState: true, + ImportStateVerify: true, + }, { Config: testAccGKEHubFeature_FleetDefaultMemberConfigConfigManagementRemovalUpdate(context), }, @@ -497,10 +508,37 @@ func TestAccGKEHubFeature_FleetDefaultMemberConfigConfigManagement(t *testing.T) ImportState: true, ImportStateVerify: true, }, + { + Config: testAccGKEHubFeature_FleetDefaultMemberConfigConfigManagementAutomaticManagement(context), + }, + { + ResourceName: "google_gke_hub_feature.feature", + ImportState: true, + ImportStateVerify: true, + }, }, }) } +func testAccGKEHubFeature_FleetDefaultMemberConfigConfigManagementAutomaticManagement(context map[string]interface{}) string { + return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` +resource "google_gke_hub_feature" "feature" { + name = "configmanagement" + location = "global" + fleet_default_member_config { + configmanagement { + management = "MANAGEMENT_AUTOMATIC" + config_sync { + enabled = true + } + } + } + depends_on = [google_project_service.anthos, google_project_service.gkehub, google_project_service.acm] + project = google_project.project.project_id +} +`, context) +} + func testAccGKEHubFeature_FleetDefaultMemberConfigConfigManagement(context map[string]interface{}) string { return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` resource "google_gke_hub_feature" "feature" { @@ -508,7 +546,7 @@ resource "google_gke_hub_feature" "feature" { location = "global" fleet_default_member_config { configmanagement { - version = "1.21.2" + version = "1.19.1" config_sync { source_format = "hierarchy" git { @@ -535,7 +573,7 @@ resource "google_gke_hub_feature" "feature" { location = "global" fleet_default_member_config { configmanagement { - version = "1.21.3" + version = "1.19.2" management = "MANAGEMENT_MANUAL" config_sync { enabled = true @@ -558,6 +596,33 @@ resource "google_gke_hub_feature" "feature" { `, context) } +func testAccGKEHubFeature_FleetDefaultMemberConfigConfigManagementEnableAutomaticManagementUpdate(context map[string]interface{}) string { + return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` +resource "google_gke_hub_feature" "feature" { + name = "configmanagement" + location = "global" + fleet_default_member_config { + configmanagement { + management = "MANAGEMENT_AUTOMATIC" + config_sync { + prevent_drift = true + source_format = "unstructured" + oci { + sync_repo = "us-central1-docker.pkg.dev/corp-gke-build-artifacts/acm/configs:latest" + policy_dir = "/acm/nonprod-root/" + secret_type = "gcpserviceaccount" + sync_wait_secs = "15" + gcp_service_account_email = "gke-cluster@gke-foo-nonprod.iam.gserviceaccount.com" + } + } + } + } + depends_on = [google_project_service.anthos, google_project_service.gkehub, google_project_service.acm] + project = google_project.project.project_id +} +`, context) +} + func testAccGKEHubFeature_FleetDefaultMemberConfigConfigManagementRemovalUpdate(context map[string]interface{}) string { return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` resource "google_gke_hub_feature" "feature" { @@ -913,76 +978,6 @@ resource "google_gke_hub_feature" "feature" { `, context) } -func TestAccGKEHubFeature_Rbacrolebindingactuation(t *testing.T) { - // VCR fails to handle batched project services - acctest.SkipIfVcr(t) - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - "org_id": envvar.GetTestOrgFromEnv(t), - "billing_account": envvar.GetTestBillingAccountFromEnv(t), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckGKEHubFeatureDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccGKEHubFeature_Rbacrolebindingactuation(context), - }, - { - ResourceName: "google_gke_hub_feature.feature", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"project", "labels", "terraform_labels"}, - }, - { - Config: testAccGKEHubFeature_RbacrolebindingactuationUpdate(context), - }, - { - ResourceName: "google_gke_hub_feature.feature", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, - }, - }, - }) -} - -func testAccGKEHubFeature_Rbacrolebindingactuation(context map[string]interface{}) string { - return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` -resource "google_gke_hub_feature" "feature" { - name = "rbacrolebindingactuation" - location = "global" - spec { - rbacrolebindingactuation { - allowed_custom_roles = ["custom-role1","custom-role2","custom-role3"] - } - } - depends_on = [google_project_service.anthos, google_project_service.gkehub] - project = google_project.project.project_id -} -`, context) -} - -func testAccGKEHubFeature_RbacrolebindingactuationUpdate(context map[string]interface{}) string { - return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` -resource "google_gke_hub_feature" "feature" { - name = "rbacrolebindingactuation" - location = "global" - spec { - rbacrolebindingactuation { - allowed_custom_roles = ["custom-role1","custom-role2","custom-role3","custom-role4"] - } - } - depends_on = [google_project_service.anthos, google_project_service.gkehub] - project = google_project.project.project_id -} -`, context) -} - func gkeHubFeatureProjectSetupForGA(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_project" "project" { @@ -1021,11 +1016,13 @@ resource "google_project_service" "mcsd" { resource "google_project_service" "compute" { project = google_project.project.project_id service = "compute.googleapis.com" + disable_on_destroy = false } resource "google_project_service" "container" { project = google_project.project.project_id service = "container.googleapis.com" + disable_on_destroy = false } resource "google_project_service" "anthos" { @@ -1036,6 +1033,7 @@ resource "google_project_service" "anthos" { resource "google_project_service" "gkehub" { project = google_project.project.project_id service = "gkehub.googleapis.com" + disable_on_destroy = false } resource "google_project" "project_2" { @@ -1049,16 +1047,19 @@ resource "google_project" "project_2" { resource "google_project_service" "compute_2" { project = google_project.project_2.project_id service = "compute.googleapis.com" + disable_on_destroy = false } resource "google_project_service" "container_2" { project = google_project.project_2.project_id service = "container.googleapis.com" + disable_on_destroy = false } resource "google_project_service" "gkehub_2" { project = google_project.project_2.project_id service = "gkehub.googleapis.com" + disable_on_destroy = false } `, context) } diff --git a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_fleet_test.go.tmpl b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_fleet_test.go.tmpl index 8127202e4e94..7fb8d5170a94 100644 --- a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_fleet_test.go.tmpl +++ b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_fleet_test.go.tmpl @@ -124,12 +124,14 @@ resource "google_project" "project" { resource "google_project_service" "gkehub" { project = google_project.project.project_id service = "gkehub.googleapis.com" + disable_on_destroy = false depends_on = [google_project_service.anthos] } resource "google_project_service" "anthos" { project = google_project.project.project_id service = "anthos.googleapis.com" + disable_on_destroy = false } resource "time_sleep" "wait_for_gkehub_enablement" { diff --git a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_scope_rbac_role_binding_test.go b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_scope_rbac_role_binding_test.go index ae358d84ebb7..8efb36aeb073 100644 --- a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_scope_rbac_role_binding_test.go +++ b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_scope_rbac_role_binding_test.go @@ -4,6 +4,7 @@ import ( "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" ) @@ -12,16 +13,13 @@ func TestAccGKEHub2ScopeRBACRoleBinding_gkehubScopeRbacRoleBindingBasicExample_u t.Parallel() context := map[string]interface{}{ - "project": envvar.GetTestProjectFromEnv(), - "random_suffix": acctest.RandString(t, 10), - "org_id": envvar.GetTestOrgFromEnv(t), - "billing_account": envvar.GetTestBillingAccountFromEnv(t), + "project": envvar.GetTestProjectFromEnv(), + "random_suffix": acctest.RandString(t, 10), } acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckGKEHub2ScopeRBACRoleBindingDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccGKEHub2ScopeRBACRoleBinding_gkehubScopeRbacRoleBindingBasicExample_basic(context), @@ -53,7 +51,7 @@ resource "google_gke_hub_scope" "scoperbacrolebinding" { resource "google_gke_hub_scope_rbac_role_binding" "scoperbacrolebinding" { scope_rbac_role_binding_id = "tf-test-scope-rbac-role-binding%{random_suffix}" - scope_id = google_gke_hub_scope.scoperbacrolebinding.scope_id + scope_id = "tf-test-scope%{random_suffix}" user = "test-email@gmail.com" role { predefined_role = "ADMIN" @@ -61,6 +59,7 @@ resource "google_gke_hub_scope_rbac_role_binding" "scoperbacrolebinding" { labels = { key = "value" } + depends_on = [google_gke_hub_scope.scoperbacrolebinding] } `, context) } @@ -73,7 +72,7 @@ resource "google_gke_hub_scope" "scoperbacrolebinding" { resource "google_gke_hub_scope_rbac_role_binding" "scoperbacrolebinding" { scope_rbac_role_binding_id = "tf-test-scope-rbac-role-binding%{random_suffix}" - scope_id = google_gke_hub_scope.scoperbacrolebinding.scope_id + scope_id = "tf-test-scope%{random_suffix}" group = "test-email2@gmail.com" role { predefined_role = "VIEW" @@ -81,132 +80,7 @@ resource "google_gke_hub_scope_rbac_role_binding" "scoperbacrolebinding" { labels = { key = "updated_value" } -} -`, context) -} - -func TestAccGKEHub2ScopeRBACRoleBinding_gkehubScopeRbacCustomRoleBindingBasicExample_update(t *testing.T) { - // VCR fails to handle batched project services - acctest.SkipIfVcr(t) - t.Parallel() - - context := map[string]interface{}{ - "project": envvar.GetTestProjectFromEnv(), - "random_suffix": acctest.RandString(t, 10), - "org_id": envvar.GetTestOrgFromEnv(t), - "billing_account": envvar.GetTestBillingAccountFromEnv(t), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckGKEHub2ScopeRBACRoleBindingDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccGKEHub2ScopeRBACRoleBinding_gkehubScopeRbacCustomRoleBindingBasicExample_basic(context), - }, - { - ResourceName: "google_gke_hub_scope_rbac_role_binding.scope_rbac_custom_role_binding", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"labels", "scope_id", "scope_rbac_role_binding_id", "terraform_labels"}, - }, - { - Config: testAccGKEHub2ScopeRBACRoleBinding_gkehubScopeRbacCustomRoleBindingBasicExample_update(context), - }, - { - ResourceName: "google_gke_hub_scope_rbac_role_binding.scope_rbac_custom_role_binding", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"scope_rbac_role_binding_id", "scope_id", "labels", "terraform_labels"}, - }, - }, - }) -} - -func testAccGKEHub2ScopeRBACRoleBinding_gkehubScopeRbacCustomRoleBindingBasicExample_basic(context map[string]interface{}) string { - return gkeHubRRBActuationProjectSetupForGA(context) + acctest.Nprintf(` -resource "google_gke_hub_scope" "scope" { - scope_id = "tf-test-scope%{random_suffix}" - depends_on = [google_project_service.anthos, google_project_service.gkehub] -} - -resource "google_gke_hub_feature" "rbacrolebindingactuation" { - name = "rbacrolebindingactuation" - location = "global" - spec { - rbacrolebindingactuation { - allowed_custom_roles = ["my-custom-role", "my-custom-role-2"] - } - } - depends_on = [google_project_service.anthos, google_project_service.gkehub] -} - -resource "google_gke_hub_scope_rbac_role_binding" "scope_rbac_custom_role_binding" { - scope_rbac_role_binding_id = "tf-test-scope-rbac-role-binding%{random_suffix}" - scope_id = google_gke_hub_scope.scope.scope_id - user = "test-email@gmail.com" - role { - custom_role = "my-custom-role" - } - labels = { - key = "value" - } - depends_on = [google_gke_hub_feature.rbacrolebindingactuation] -} -`, context) -} - -func testAccGKEHub2ScopeRBACRoleBinding_gkehubScopeRbacCustomRoleBindingBasicExample_update(context map[string]interface{}) string { - return gkeHubRRBActuationProjectSetupForGA(context) + acctest.Nprintf(` -resource "google_gke_hub_scope" "scope" { - scope_id = "tf-test-scope%{random_suffix}" -} - -resource "google_gke_hub_feature" "rbacrolebindingactuation" { - name = "rbacrolebindingactuation" - location = "global" - spec { - rbacrolebindingactuation { - allowed_custom_roles = ["my-custom-role", "my-custom-role-2"] - } - } - depends_on = [google_project_service.anthos, google_project_service.gkehub] -} - -resource "google_gke_hub_scope_rbac_role_binding" "scope_rbac_custom_role_binding" { - scope_rbac_role_binding_id = "tf-test-scope-rbac-role-binding%{random_suffix}" - scope_id = google_gke_hub_scope.scope.scope_id - user = "test-email@gmail.com" - role { - custom_role = "my-custom-role-2" - } - labels = { - key = "value" - } - depends_on = [google_gke_hub_feature.rbacrolebindingactuation] -} -`, context) -} - -func gkeHubRRBActuationProjectSetupForGA(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_project" "project" { - name = "tf-test-gkehub%{random_suffix}" - project_id = "tf-test-gkehub%{random_suffix}" - org_id = "%{org_id}" - billing_account = "%{billing_account}" - deletion_policy = "DELETE" -} - -resource "google_project_service" "anthos" { - project = google_project.project.project_id - service = "anthos.googleapis.com" -} - -resource "google_project_service" "gkehub" { - project = google_project.project.project_id - service = "gkehub.googleapis.com" + depends_on = [google_gke_hub_scope.scoperbacrolebinding] } `, context) } diff --git a/mmv1/third_party/terraform/services/gkeonprem/gkeonprem_operation.go b/mmv1/third_party/terraform/services/gkeonprem/gkeonprem_operation.go.tmpl similarity index 97% rename from mmv1/third_party/terraform/services/gkeonprem/gkeonprem_operation.go rename to mmv1/third_party/terraform/services/gkeonprem/gkeonprem_operation.go.tmpl index b255c5011634..255f43439b96 100644 --- a/mmv1/third_party/terraform/services/gkeonprem/gkeonprem_operation.go +++ b/mmv1/third_party/terraform/services/gkeonprem/gkeonprem_operation.go.tmpl @@ -99,10 +99,10 @@ func (w *gkeonpremOperationWaiter) QueryOp() (interface{}, error) { url := fmt.Sprintf("%s%s", w.Config.GkeonpremBasePath, w.Op.Name) return transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: w.Config, - Method: "GET", - Project: w.Project, - RawURL: url, + Config: w.Config, + Method: "GET", + Project: w.Project, + RawURL: url, UserAgent: w.UserAgent, }) } diff --git a/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_bare_metal_cluster_test.go b/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_bare_metal_cluster_test.go.tmpl similarity index 79% rename from mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_bare_metal_cluster_test.go rename to mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_bare_metal_cluster_test.go.tmpl index 692a58875fd1..7fa9b1323f19 100644 --- a/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_bare_metal_cluster_test.go +++ b/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_bare_metal_cluster_test.go.tmpl @@ -8,105 +8,105 @@ import ( ) func TestAccGkeonpremBareMetalCluster_bareMetalClusterUpdateBasic(t *testing.T) { - t.Parallel() + t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckGkeonpremBareMetalClusterDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateMetalLbStart(context), - }, - { - ResourceName: "google_gkeonprem_bare_metal_cluster.cluster-metallb", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"annotations"}, - }, - { - Config: testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateMetalLb(context), - }, - { - ResourceName: "google_gkeonprem_bare_metal_cluster.cluster-metallb", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"annotations"}, - }, - }, - }) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGkeonpremBareMetalClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateMetalLbStart(context), + }, + { + ResourceName: "google_gkeonprem_bare_metal_cluster.cluster-metallb", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations"}, + }, + { + Config: testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateMetalLb(context), + }, + { + ResourceName: "google_gkeonprem_bare_metal_cluster.cluster-metallb", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations"}, + }, + }, + }) } func TestAccGkeonpremBareMetalCluster_bareMetalClusterUpdateManualLb(t *testing.T) { - t.Parallel() + t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckGkeonpremBareMetalClusterDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateManualLbStart(context), - }, - { - ResourceName: "google_gkeonprem_bare_metal_cluster.cluster-manuallb", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateManualLb(context), - }, - { - ResourceName: "google_gkeonprem_bare_metal_cluster.cluster-manuallb", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGkeonpremBareMetalClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateManualLbStart(context), + }, + { + ResourceName: "google_gkeonprem_bare_metal_cluster.cluster-manuallb", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateManualLb(context), + }, + { + ResourceName: "google_gkeonprem_bare_metal_cluster.cluster-manuallb", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) } func TestAccGkeonpremBareMetalCluster_bareMetalClusterUpdateBgpLb(t *testing.T) { - t.Parallel() + t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckGkeonpremBareMetalClusterDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateBgpLbStart(context), - }, - { - ResourceName: "google_gkeonprem_bare_metal_cluster.cluster-bgplb", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateBgpLb(context), - }, - { - ResourceName: "google_gkeonprem_bare_metal_cluster.cluster-bgplb", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGkeonpremBareMetalClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateBgpLbStart(context), + }, + { + ResourceName: "google_gkeonprem_bare_metal_cluster.cluster-bgplb", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateBgpLb(context), + }, + { + ResourceName: "google_gkeonprem_bare_metal_cluster.cluster-bgplb", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) } func testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateMetalLbStart(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_gkeonprem_bare_metal_cluster" "cluster-metallb" { name = "cluster-metallb%{random_suffix}" @@ -184,7 +184,7 @@ func testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateMetalLbStart(context } func testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateMetalLb(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_gkeonprem_bare_metal_cluster" "cluster-metallb" { name = "cluster-metallb%{random_suffix}" @@ -260,7 +260,7 @@ func testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateMetalLb(context map[ } func testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateManualLbStart(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_gkeonprem_bare_metal_cluster" "cluster-manuallb" { name = "cluster-manuallb%{random_suffix}" @@ -339,7 +339,7 @@ func testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateManualLbStart(contex } func testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateManualLb(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_gkeonprem_bare_metal_cluster" "cluster-manuallb" { name = "cluster-manuallb%{random_suffix}" @@ -407,7 +407,7 @@ func testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateManualLb(context map } func testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateBgpLbStart(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_gkeonprem_bare_metal_cluster" "cluster-bgplb" { name = "cluster-bgplb%{random_suffix}" @@ -453,7 +453,6 @@ func testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateBgpLbStart(context m "10.200.0.14/32", "fd00:1::12/128" ] - manual_assign = true } load_balancer_node_pool_config { node_pool_config { @@ -492,7 +491,7 @@ func testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateBgpLbStart(context m } func testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateBgpLb(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_gkeonprem_bare_metal_cluster" "cluster-bgplb" { name = "cluster-bgplb%{random_suffix}" diff --git a/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_bare_metal_node_pool_test.go b/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_bare_metal_node_pool_test.go.tmpl similarity index 81% rename from mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_bare_metal_node_pool_test.go rename to mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_bare_metal_node_pool_test.go.tmpl index e0e430b79f3a..da81a36fe5cc 100644 --- a/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_bare_metal_node_pool_test.go +++ b/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_bare_metal_node_pool_test.go.tmpl @@ -8,41 +8,41 @@ import ( ) func TestAccGkeonpremBareMetalNodePool_bareMetalNodePoolUpdate(t *testing.T) { - t.Parallel() + t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckGkeonpremBareMetalNodePoolDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccGkeonpremBareMetalNodePool_bareMetalNodePoolUpdateStart(context), - }, - { - ResourceName: "google_gkeonprem_bare_metal_node_pool.nodepool", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"annotations"}, - }, - { - Config: testAccGkeonpremBareMetalNodePool_bareMetalNodePoolUpdate(context), - }, - { - ResourceName: "google_gkeonprem_bare_metal_node_pool.nodepool", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"annotations"}, - }, - }, - }) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGkeonpremBareMetalNodePoolDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGkeonpremBareMetalNodePool_bareMetalNodePoolUpdateStart(context), + }, + { + ResourceName: "google_gkeonprem_bare_metal_node_pool.nodepool", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations"}, + }, + { + Config: testAccGkeonpremBareMetalNodePool_bareMetalNodePoolUpdate(context), + }, + { + ResourceName: "google_gkeonprem_bare_metal_node_pool.nodepool", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations"}, + }, + }, + }) } func testAccGkeonpremBareMetalNodePool_bareMetalNodePoolUpdateStart(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_gkeonprem_bare_metal_cluster" "cluster" { name = "tf-test-cluster-%{random_suffix}" @@ -134,7 +134,7 @@ func testAccGkeonpremBareMetalNodePool_bareMetalNodePoolUpdateStart(context map[ } func testAccGkeonpremBareMetalNodePool_bareMetalNodePoolUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_gkeonprem_bare_metal_cluster" "cluster" { name = "tf-test-cluster-%{random_suffix}" diff --git a/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_vmware_cluster_test.go b/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_vmware_cluster_test.go.tmpl similarity index 77% rename from mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_vmware_cluster_test.go rename to mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_vmware_cluster_test.go.tmpl index 4f495496da86..8e380005fb16 100644 --- a/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_vmware_cluster_test.go +++ b/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_vmware_cluster_test.go.tmpl @@ -8,107 +8,107 @@ import ( ) func TestAccGkeonpremVmwareCluster_vmwareClusterUpdateBasic(t *testing.T) { - t.Parallel() + t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckGkeonpremVmwareClusterDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccGkeonpremVmwareCluster_vmwareClusterUpdateMetalLbStart(context), - }, - { - ResourceName: "google_gkeonprem_vmware_cluster.cluster", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"annotations"}, - }, - { - Config: testAccGkeonpremVmwareCluster_vmwareClusterUpdateMetalLb(context), - }, - { - ResourceName: "google_gkeonprem_vmware_cluster.cluster", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"annotations"}, - }, - }, - }) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGkeonpremVmwareClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGkeonpremVmwareCluster_vmwareClusterUpdateMetalLbStart(context), + }, + { + ResourceName: "google_gkeonprem_vmware_cluster.cluster", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations"}, + }, + { + Config: testAccGkeonpremVmwareCluster_vmwareClusterUpdateMetalLb(context), + }, + { + ResourceName: "google_gkeonprem_vmware_cluster.cluster", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations"}, + }, + }, + }) } func TestAccGkeonpremVmwareCluster_vmwareClusterUpdateF5Lb(t *testing.T) { - t.Parallel() + t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckGkeonpremVmwareClusterDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccGkeonpremVmwareCluster_vmwareClusterUpdateF5LbStart(context), - }, - { - ResourceName: "google_gkeonprem_vmware_cluster.cluster", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccGkeonpremVmwareCluster_vmwareClusterUpdateF5lb(context), - }, - { - ResourceName: "google_gkeonprem_vmware_cluster.cluster", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGkeonpremVmwareClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGkeonpremVmwareCluster_vmwareClusterUpdateF5LbStart(context), + }, + { + ResourceName: "google_gkeonprem_vmware_cluster.cluster", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccGkeonpremVmwareCluster_vmwareClusterUpdateF5lb(context), + }, + { + ResourceName: "google_gkeonprem_vmware_cluster.cluster", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) } func TestAccGkeonpremVmwareCluster_vmwareClusterUpdateManualLb(t *testing.T) { - // VCR fails to handle batched project services - acctest.SkipIfVcr(t) - t.Parallel() + // VCR fails to handle batched project services + acctest.SkipIfVcr(t) + t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckGkeonpremVmwareClusterDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccGkeonpremVmwareCluster_vmwareClusterUpdateManualLbStart(context), - }, - { - ResourceName: "google_gkeonprem_vmware_cluster.cluster", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccGkeonpremVmwareCluster_vmwareClusterUpdateManualLb(context), - }, - { - ResourceName: "google_gkeonprem_vmware_cluster.cluster", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGkeonpremVmwareClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGkeonpremVmwareCluster_vmwareClusterUpdateManualLbStart(context), + }, + { + ResourceName: "google_gkeonprem_vmware_cluster.cluster", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccGkeonpremVmwareCluster_vmwareClusterUpdateManualLb(context), + }, + { + ResourceName: "google_gkeonprem_vmware_cluster.cluster", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) } func testAccGkeonpremVmwareCluster_vmwareClusterUpdateMetalLbStart(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_gkeonprem_vmware_cluster" "cluster" { name = "tf-test-cluster-%{random_suffix}" @@ -156,7 +156,7 @@ func testAccGkeonpremVmwareCluster_vmwareClusterUpdateMetalLbStart(context map[s } func testAccGkeonpremVmwareCluster_vmwareClusterUpdateMetalLb(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_gkeonprem_vmware_cluster" "cluster" { name = "tf-test-cluster-%{random_suffix}" @@ -204,7 +204,7 @@ func testAccGkeonpremVmwareCluster_vmwareClusterUpdateMetalLb(context map[string } func testAccGkeonpremVmwareCluster_vmwareClusterUpdateF5LbStart(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_gkeonprem_vmware_cluster" "cluster" { name = "tf-test-cluster-%{random_suffix}" @@ -251,7 +251,7 @@ func testAccGkeonpremVmwareCluster_vmwareClusterUpdateF5LbStart(context map[stri } func testAccGkeonpremVmwareCluster_vmwareClusterUpdateF5lb(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_gkeonprem_vmware_cluster" "cluster" { name = "tf-test-cluster-%{random_suffix}" @@ -298,7 +298,7 @@ func testAccGkeonpremVmwareCluster_vmwareClusterUpdateF5lb(context map[string]in } func testAccGkeonpremVmwareCluster_vmwareClusterUpdateManualLbStart(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_gkeonprem_vmware_cluster" "cluster" { name = "tf-test-cluster-%{random_suffix}" @@ -392,7 +392,7 @@ func testAccGkeonpremVmwareCluster_vmwareClusterUpdateManualLbStart(context map[ } func testAccGkeonpremVmwareCluster_vmwareClusterUpdateManualLb(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_gkeonprem_vmware_cluster" "cluster" { name = "tf-test-cluster-%{random_suffix}" diff --git a/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_vmware_node_pool_test.go b/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_vmware_node_pool_test.go.tmpl similarity index 81% rename from mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_vmware_node_pool_test.go rename to mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_vmware_node_pool_test.go.tmpl index b89cdaff1768..37b54ef28f53 100644 --- a/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_vmware_node_pool_test.go +++ b/mmv1/third_party/terraform/services/gkeonprem/resource_gkeonprem_vmware_node_pool_test.go.tmpl @@ -8,41 +8,41 @@ import ( ) func TestAccGkeonpremVmwareNodePool_vmwareNodePoolUpdate(t *testing.T) { - t.Parallel() + t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckGkeonpremVmwareNodePoolDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccGkeonpremVmwareNodePool_vmwareNodePoolUpdateStart(context), - }, - { - ResourceName: "google_gkeonprem_vmware_node_pool.nodepool", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"annotations"}, - }, - { - Config: testAccGkeonpremVmwareNodePool_vmwareNodePoolUpdate(context), - }, - { - ResourceName: "google_gkeonprem_vmware_node_pool.nodepool", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"annotations"}, - }, - }, - }) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGkeonpremVmwareNodePoolDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGkeonpremVmwareNodePool_vmwareNodePoolUpdateStart(context), + }, + { + ResourceName: "google_gkeonprem_vmware_node_pool.nodepool", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations"}, + }, + { + Config: testAccGkeonpremVmwareNodePool_vmwareNodePoolUpdate(context), + }, + { + ResourceName: "google_gkeonprem_vmware_node_pool.nodepool", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations"}, + }, + }, + }) } func testAccGkeonpremVmwareNodePool_vmwareNodePoolUpdateStart(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_gkeonprem_vmware_cluster" "cluster" { name = "tf-test-cluster-%{random_suffix}" @@ -127,7 +127,7 @@ func testAccGkeonpremVmwareNodePool_vmwareNodePoolUpdateStart(context map[string } func testAccGkeonpremVmwareNodePool_vmwareNodePoolUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_gkeonprem_vmware_cluster" "cluster" { name = "tf-test-cluster-%{random_suffix}" diff --git a/mmv1/third_party/terraform/services/iam2/resource_iam_deny_policy_test.go b/mmv1/third_party/terraform/services/iam2/resource_iam_deny_policy_test.go.tmpl similarity index 99% rename from mmv1/third_party/terraform/services/iam2/resource_iam_deny_policy_test.go rename to mmv1/third_party/terraform/services/iam2/resource_iam_deny_policy_test.go.tmpl index 3725784c4660..c1227b201afa 100644 --- a/mmv1/third_party/terraform/services/iam2/resource_iam_deny_policy_test.go +++ b/mmv1/third_party/terraform/services/iam2/resource_iam_deny_policy_test.go.tmpl @@ -76,7 +76,7 @@ func TestAccIAM2DenyPolicy_iamDenyPolicyFolderParent(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"name", "parent"}, }, - { + { Config: testAccIAM2DenyPolicy_iamDenyPolicyFolderUpdate(context), }, { diff --git a/mmv1/third_party/terraform/services/iam3/resource_iam_folders_policy_binding_test.go b/mmv1/third_party/terraform/services/iam3/resource_iam_folders_policy_binding_test.go.tmpl similarity index 99% rename from mmv1/third_party/terraform/services/iam3/resource_iam_folders_policy_binding_test.go rename to mmv1/third_party/terraform/services/iam3/resource_iam_folders_policy_binding_test.go.tmpl index 6a4856956932..8dedbe2d98f7 100644 --- a/mmv1/third_party/terraform/services/iam3/resource_iam_folders_policy_binding_test.go +++ b/mmv1/third_party/terraform/services/iam3/resource_iam_folders_policy_binding_test.go.tmpl @@ -21,7 +21,7 @@ func TestAccIAM3FoldersPolicyBinding_iamFoldersPolicyBindingExample_update(t *te PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), ExternalProviders: map[string]resource.ExternalProvider{ - "time": {}, + "time": {}, }, CheckDestroy: testAccCheckIAM3FoldersPolicyBindingDestroyProducer(t), Steps: []resource.TestStep{ diff --git a/mmv1/third_party/terraform/services/iam3/resource_iam_organizations_policy_binding_test.go b/mmv1/third_party/terraform/services/iam3/resource_iam_organizations_policy_binding_test.go.tmpl similarity index 98% rename from mmv1/third_party/terraform/services/iam3/resource_iam_organizations_policy_binding_test.go rename to mmv1/third_party/terraform/services/iam3/resource_iam_organizations_policy_binding_test.go.tmpl index 6eccb406b3f4..0e77227913ab 100644 --- a/mmv1/third_party/terraform/services/iam3/resource_iam_organizations_policy_binding_test.go +++ b/mmv1/third_party/terraform/services/iam3/resource_iam_organizations_policy_binding_test.go.tmpl @@ -21,7 +21,7 @@ func TestAccIAM3OrganizationsPolicyBinding_iam3OrganizationsPolicyBindingExample PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccCheckIAM3OrganizationsPolicyBindingDestroyProducer(t), - ExternalProviders: map[string]resource.ExternalProvider{ + ExternalProviders: map[string]resource.ExternalProvider{ "time": {}, }, Steps: []resource.TestStep{ diff --git a/mmv1/third_party/terraform/services/iam3/resource_iam_principal_access_boundary_policy_test.go b/mmv1/third_party/terraform/services/iam3/resource_iam_principal_access_boundary_policy_test.go.tmpl similarity index 99% rename from mmv1/third_party/terraform/services/iam3/resource_iam_principal_access_boundary_policy_test.go rename to mmv1/third_party/terraform/services/iam3/resource_iam_principal_access_boundary_policy_test.go.tmpl index 8aa95953b7a3..5603b10a2b9f 100644 --- a/mmv1/third_party/terraform/services/iam3/resource_iam_principal_access_boundary_policy_test.go +++ b/mmv1/third_party/terraform/services/iam3/resource_iam_principal_access_boundary_policy_test.go.tmpl @@ -1,5 +1,4 @@ package iam3_test - import ( "testing" diff --git a/mmv1/third_party/terraform/services/iam3/resource_iam_projects_policy_binding_test.go b/mmv1/third_party/terraform/services/iam3/resource_iam_projects_policy_binding_test.go.tmpl similarity index 96% rename from mmv1/third_party/terraform/services/iam3/resource_iam_projects_policy_binding_test.go rename to mmv1/third_party/terraform/services/iam3/resource_iam_projects_policy_binding_test.go.tmpl index 3e2ade2da282..70776bedcd6c 100644 --- a/mmv1/third_party/terraform/services/iam3/resource_iam_projects_policy_binding_test.go +++ b/mmv1/third_party/terraform/services/iam3/resource_iam_projects_policy_binding_test.go.tmpl @@ -13,14 +13,14 @@ func TestAccIAM3ProjectsPolicyBinding_iamProjectsPolicyBindingExample_update(t * t.Parallel() context := map[string]interface{}{ - "org_id": envvar.GetTestOrgFromEnv(t), - "random_suffix": acctest.RandString(t, 10), + "org_id": envvar.GetTestOrgFromEnv(t), + "random_suffix": acctest.RandString(t, 10), } acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckIAM3ProjectsPolicyBindingDestroyProducer(t), + CheckDestroy: testAccCheckIAM3ProjectsPolicyBindingDestroyProducer(t), ExternalProviders: map[string]resource.ExternalProvider{ "time": {}, }, @@ -52,6 +52,7 @@ func TestAccIAM3ProjectsPolicyBinding_iamProjectsPolicyBindingExample_update(t * ImportStateVerify: true, ImportStateVerifyIgnore: []string{"annotations", "location", "policy_binding_id"}, }, + }, }) } diff --git a/mmv1/third_party/terraform/services/iambeta/data_source_iam_workload_identity_pool_provider_test.go b/mmv1/third_party/terraform/services/iambeta/data_source_iam_workload_identity_pool_provider_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/iambeta/data_source_iam_workload_identity_pool_provider_test.go rename to mmv1/third_party/terraform/services/iambeta/data_source_iam_workload_identity_pool_provider_test.go.tmpl index 83c712a330cc..2a5d5d148483 100644 --- a/mmv1/third_party/terraform/services/iambeta/data_source_iam_workload_identity_pool_provider_test.go +++ b/mmv1/third_party/terraform/services/iambeta/data_source_iam_workload_identity_pool_provider_test.go.tmpl @@ -1,8 +1,8 @@ package iambeta_test import ( - "github.com/hashicorp/terraform-provider-google/google/acctest" "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) diff --git a/mmv1/third_party/terraform/services/iambeta/data_source_iam_workload_identity_pool_test.go b/mmv1/third_party/terraform/services/iambeta/data_source_iam_workload_identity_pool_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/iambeta/data_source_iam_workload_identity_pool_test.go rename to mmv1/third_party/terraform/services/iambeta/data_source_iam_workload_identity_pool_test.go.tmpl index 0a303d908299..da0d26fa2c88 100644 --- a/mmv1/third_party/terraform/services/iambeta/data_source_iam_workload_identity_pool_test.go +++ b/mmv1/third_party/terraform/services/iambeta/data_source_iam_workload_identity_pool_test.go.tmpl @@ -1,8 +1,8 @@ package iambeta_test import ( - "github.com/hashicorp/terraform-provider-google/google/acctest" "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) diff --git a/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_id_test.go b/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_id_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_id_test.go rename to mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_id_test.go.tmpl diff --git a/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_managed_identity_id_test.go.tmpl b/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_managed_identity_id_test.go.tmpl deleted file mode 100644 index c7f8beb10f52..000000000000 --- a/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_managed_identity_id_test.go.tmpl +++ /dev/null @@ -1,38 +0,0 @@ -{{- if ne $.TargetVersionName "ga" -}} -package iambeta_test - -import ( - "strings" - "testing" - - "github.com/hashicorp/terraform-provider-google/google/services/iambeta" - "github.com/hashicorp/terraform-provider-google/google/verify" -) - -func TestValidateWorkloadIdentityPoolManagedIdentityId(t *testing.T) { - x := []verify.StringValidationTestCase{ - // No errors - {TestName: "basic", Value: "foobar"}, - {TestName: "with numbers", Value: "foobar123"}, - {TestName: "short", Value: "foos"}, - {TestName: "long", Value: "12345678901234567890123456789012"}, - {TestName: "has a hyphen", Value: "foo-bar"}, - - // With errors - {TestName: "empty", Value: "", ExpectError: true}, - {TestName: "starts with a gcp-", Value: "gcp-foobar", ExpectError: true}, - {TestName: "with uppercase", Value: "fooBar", ExpectError: true}, - {TestName: "has an slash", Value: "foo/bar", ExpectError: true}, - {TestName: "has an backslash", Value: "foo\bar", ExpectError: true}, - {TestName: "too short", Value: "f", ExpectError: true}, - {TestName: "too long", Value: strings.Repeat("f", 64), ExpectError: true}, - {TestName: "starts with non-alphanumeric", Value: "-foobar", ExpectError: true}, - {TestName: "ends with non-alphanumeric", Value: "foobar-", ExpectError: true}, - } - - es := verify.TestStringValidationCases(x, iambeta.ValidateWorkloadIdentityPoolManagedIdentityId) - if len(es) > 0 { - t.Errorf("Failed to validate WorkloadIdentityPoolManagedIdentity names: %v", es) - } -} -{{- end -}} diff --git a/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_managed_identity_test.go.tmpl b/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_managed_identity_test.go.tmpl deleted file mode 100644 index 35f20fa9041b..000000000000 --- a/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_managed_identity_test.go.tmpl +++ /dev/null @@ -1,180 +0,0 @@ -{{- if ne $.TargetVersionName "ga" -}} -package iambeta_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" - - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" -) - -func TestAccIAMBetaWorkloadIdentityPoolManagedIdentity_minimal(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), - CheckDestroy: testAccCheckIAMBetaWorkloadIdentityPoolManagedIdentityDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccIAMBetaWorkloadIdentityPoolManagedIdentity_minimal(context), - }, - { - ResourceName: "google_iam_workload_identity_pool_managed_identity.example", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"workload_identity_pool_id", "workload_identity_pool_namespace_id", "workload_identity_pool_managed_identity_id"}, - }, - { - Config: testAccIAMBetaWorkloadIdentityPoolManagedIdentity_updated(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_iam_workload_identity_pool_managed_identity.example", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_iam_workload_identity_pool_managed_identity.example", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"workload_identity_pool_id", "workload_identity_pool_namespace_id", "workload_identity_pool_managed_identity_id"}, - }, - }, - }) -} - -func TestAccIAMBetaWorkloadIdentityPoolManagedIdentity_full(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "project": envvar.GetTestProjectNumberFromEnv(), - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), - CheckDestroy: testAccCheckIAMBetaWorkloadIdentityPoolManagedIdentityDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccIAMBetaWorkloadIdentityPoolManagedIdentity_full(context), - }, - { - ResourceName: "google_iam_workload_identity_pool_managed_identity.example", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"workload_identity_pool_id", "workload_identity_pool_namespace_id", "workload_identity_pool_managed_identity_id"}, - }, - { - Config: testAccIAMBetaWorkloadIdentityPoolManagedIdentity_updated(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_iam_workload_identity_pool_managed_identity.example", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_iam_workload_identity_pool_managed_identity.example", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"workload_identity_pool_id", "workload_identity_pool_namespace_id", "workload_identity_pool_managed_identity_id"}, - }, - }, - }) -} - -func testAccIAMBetaWorkloadIdentityPoolManagedIdentity_minimal(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_iam_workload_identity_pool" "pool" { - provider = google-beta - - workload_identity_pool_id = "tf-test-example-pool%{random_suffix}" - mode = "TRUST_DOMAIN" -} - -resource "google_iam_workload_identity_pool_namespace" "ns" { - provider = google-beta - - workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id - workload_identity_pool_namespace_id = "tf-test-example-namespace%{random_suffix}" -} - -resource "google_iam_workload_identity_pool_managed_identity" "example" { - provider = google-beta - - workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id - workload_identity_pool_namespace_id = google_iam_workload_identity_pool_namespace.ns.workload_identity_pool_namespace_id - workload_identity_pool_managed_identity_id = "tf-test-example-managed-identity%{random_suffix}" -} -`, context) -} - -func testAccIAMBetaWorkloadIdentityPoolManagedIdentity_full(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_iam_workload_identity_pool" "pool" { - provider = google-beta - - workload_identity_pool_id = "tf-test-example-pool%{random_suffix}" - mode = "TRUST_DOMAIN" -} - -resource "google_iam_workload_identity_pool_namespace" "ns" { - provider = google-beta - - workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id - workload_identity_pool_namespace_id = "tf-test-example-namespace%{random_suffix}" -} - -resource "google_iam_workload_identity_pool_managed_identity" "example" { - provider = google-beta - - workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id - workload_identity_pool_namespace_id = google_iam_workload_identity_pool_namespace.ns.workload_identity_pool_namespace_id - workload_identity_pool_managed_identity_id = "tf-test-example-managed-identity%{random_suffix}" - description = "Example Managed Identity in a Workload Identity Pool Namespace" - disabled = true - attestation_rules { - google_cloud_resource = "//compute.googleapis.com/projects/%{project}/uid/zones/us-central1-a/instances/12345678" - } - attestation_rules { - google_cloud_resource = "//run.googleapis.com/projects/%{project}/name/locations/us-east1/services/my-service" - } -} -`, context) -} - -func testAccIAMBetaWorkloadIdentityPoolManagedIdentity_updated(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_iam_workload_identity_pool" "pool" { - provider = google-beta - - workload_identity_pool_id = "tf-test-example-pool%{random_suffix}" - mode = "TRUST_DOMAIN" -} - -resource "google_iam_workload_identity_pool_namespace" "ns" { - provider = google-beta - - workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id - workload_identity_pool_namespace_id = "tf-test-example-namespace%{random_suffix}" -} - -resource "google_iam_workload_identity_pool_managed_identity" "example" { - provider = google-beta - - workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id - workload_identity_pool_namespace_id = google_iam_workload_identity_pool_namespace.ns.workload_identity_pool_namespace_id - workload_identity_pool_managed_identity_id = "tf-test-example-managed-identity%{random_suffix}" - description = "Updated Managed Identity in a Workload Identity Pool Namespace" - disabled = false -} -`, context) -} -{{- end -}} diff --git a/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_namespace_id_test.go.tmpl b/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_namespace_id_test.go.tmpl deleted file mode 100644 index 5b6b9b3ae8a9..000000000000 --- a/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_namespace_id_test.go.tmpl +++ /dev/null @@ -1,38 +0,0 @@ -{{- if ne $.TargetVersionName "ga" -}} -package iambeta_test - -import ( - "strings" - "testing" - - "github.com/hashicorp/terraform-provider-google/google/services/iambeta" - "github.com/hashicorp/terraform-provider-google/google/verify" -) - -func TestValidateWorkloadIdentityPoolNamespaceId(t *testing.T) { - x := []verify.StringValidationTestCase{ - // No errors - {TestName: "basic", Value: "foobar"}, - {TestName: "with numbers", Value: "foobar123"}, - {TestName: "short", Value: "foos"}, - {TestName: "long", Value: "12345678901234567890123456789012"}, - {TestName: "has a hyphen", Value: "foo-bar"}, - - // With errors - {TestName: "empty", Value: "", ExpectError: true}, - {TestName: "starts with a gcp-", Value: "gcp-foobar", ExpectError: true}, - {TestName: "with uppercase", Value: "fooBar", ExpectError: true}, - {TestName: "has an slash", Value: "foo/bar", ExpectError: true}, - {TestName: "has an backslash", Value: "foo\bar", ExpectError: true}, - {TestName: "too short", Value: "f", ExpectError: true}, - {TestName: "too long", Value: strings.Repeat("f", 64), ExpectError: true}, - {TestName: "starts with non-alphanumeric", Value: "-foobar", ExpectError: true}, - {TestName: "ends with non-alphanumeric", Value: "foobar-", ExpectError: true}, - } - - es := verify.TestStringValidationCases(x, iambeta.ValidateWorkloadIdentityPoolNamespaceId) - if len(es) > 0 { - t.Errorf("Failed to validate WorkloadIdentityPoolNamespace names: %v", es) - } -} -{{- end -}} diff --git a/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_namespace_test.go.tmpl b/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_namespace_test.go.tmpl deleted file mode 100644 index 9c5b5e4d069a..000000000000 --- a/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_namespace_test.go.tmpl +++ /dev/null @@ -1,148 +0,0 @@ -{{- if ne $.TargetVersionName "ga" -}} -package iambeta_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" - - "github.com/hashicorp/terraform-provider-google/google/acctest" -) - -func TestAccIAMBetaWorkloadIdentityPoolNamespace_minimal(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), - CheckDestroy: testAccCheckIAMBetaWorkloadIdentityPoolNamespaceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccIAMBetaWorkloadIdentityPoolNamespace_minimal(context), - }, - { - ResourceName: "google_iam_workload_identity_pool_namespace.example", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"workload_identity_pool_id", "workload_identity_pool_namespace_id"}, - }, - { - Config: testAccIAMBetaWorkloadIdentityPoolNamespace_updated(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_iam_workload_identity_pool_namespace.example", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_iam_workload_identity_pool_namespace.example", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"workload_identity_pool_id", "workload_identity_pool_namespace_id"}, - }, - }, - }) -} - -func TestAccIAMBetaWorkloadIdentityPoolNamespace_full(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), - CheckDestroy: testAccCheckIAMBetaWorkloadIdentityPoolNamespaceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccIAMBetaWorkloadIdentityPoolNamespace_full(context), - }, - { - ResourceName: "google_iam_workload_identity_pool_namespace.example", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"workload_identity_pool_id", "workload_identity_pool_namespace_id"}, - }, - { - Config: testAccIAMBetaWorkloadIdentityPoolNamespace_updated(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_iam_workload_identity_pool_namespace.example", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_iam_workload_identity_pool_namespace.example", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"workload_identity_pool_id", "workload_identity_pool_namespace_id"}, - }, - }, - }) -} - -func testAccIAMBetaWorkloadIdentityPoolNamespace_minimal(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_iam_workload_identity_pool" "pool" { - provider = google-beta - - workload_identity_pool_id = "tf-test-example-pool%{random_suffix}" - mode = "TRUST_DOMAIN" -} - -resource "google_iam_workload_identity_pool_namespace" "example" { - provider = google-beta - - workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id - workload_identity_pool_namespace_id = "tf-test-example-namespace%{random_suffix}" -} -`, context) -} - -func testAccIAMBetaWorkloadIdentityPoolNamespace_full(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_iam_workload_identity_pool" "pool" { - provider = google-beta - - workload_identity_pool_id = "tf-test-example-pool%{random_suffix}" - mode = "TRUST_DOMAIN" -} - -resource "google_iam_workload_identity_pool_namespace" "example" { - provider = google-beta - - workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id - workload_identity_pool_namespace_id = "tf-test-example-namespace%{random_suffix}" - description = "Example Namespace in a Workload Identity Pool" - disabled = true -} -`, context) -} - -func testAccIAMBetaWorkloadIdentityPoolNamespace_updated(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_iam_workload_identity_pool" "pool" { - provider = google-beta - - workload_identity_pool_id = "tf-test-example-pool%{random_suffix}" - mode = "TRUST_DOMAIN" -} - -resource "google_iam_workload_identity_pool_namespace" "example" { - provider = google-beta - - workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id - workload_identity_pool_namespace_id = "tf-test-example-namespace%{random_suffix}" - description = "Updated Namespace in a Workload Identity Pool" - disabled = false -} -`, context) -} -{{- end -}} diff --git a/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_provider_id_test.go b/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_provider_id_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_provider_id_test.go rename to mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_provider_id_test.go.tmpl diff --git a/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_provider_test.go b/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_provider_test.go.tmpl similarity index 99% rename from mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_provider_test.go rename to mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_provider_test.go.tmpl index 89c530f5caf5..dc07534ed47d 100644 --- a/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_provider_test.go +++ b/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_provider_test.go.tmpl @@ -1,8 +1,8 @@ package iambeta_test import ( - "github.com/hashicorp/terraform-provider-google/google/acctest" "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) @@ -259,7 +259,7 @@ func TestAccIAMBetaWorkloadIdentityPoolProvider_x509(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"workload_identity_pool_id", "workload_identity_pool_provider_id"}, }, - { + { Config: testAccIAMBetaWorkloadIdentityPoolProvider_x509_update(context), }, { diff --git a/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_test.go.tmpl b/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_test.go.tmpl index b1faf0591677..52113b646693 100644 --- a/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_test.go.tmpl @@ -2,9 +2,6 @@ package iambeta_test import ( "fmt" - {{if ne $.TargetVersionName "ga" -}} - "github.com/hashicorp/terraform-plugin-testing/plancheck" - {{- end }} "github.com/hashicorp/terraform-provider-google/google/acctest" "testing" @@ -71,56 +68,6 @@ func TestAccIAMBetaWorkloadIdentityPool_minimal(t *testing.T) { }) } -{{if ne $.TargetVersionName "ga" -}} -func TestAccIAMBetaWorkloadIdentityPool_beta_update(t *testing.T) { - t.Parallel() - - randomSuffix := acctest.RandString(t, 10) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), - CheckDestroy: testAccCheckIAMBetaWorkloadIdentityPoolDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccIAMBetaWorkloadIdentityPool_beta_full(randomSuffix), - }, - { - ResourceName: "google_iam_workload_identity_pool.my_pool", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccIAMBetaWorkloadIdentityPool_beta_update(randomSuffix), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_iam_workload_identity_pool.my_pool", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_iam_workload_identity_pool.my_pool", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccIAMBetaWorkloadIdentityPool_beta_minimum(randomSuffix), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_iam_workload_identity_pool.my_pool", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_iam_workload_identity_pool.my_pool", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} -{{- end }} - func testAccIAMBetaWorkloadIdentityPool_full(suffix string) string { return fmt.Sprintf(` resource "google_iam_workload_identity_pool" "my_pool" { @@ -150,90 +97,3 @@ resource "google_iam_workload_identity_pool" "my_pool" { } `, suffix) } - -{{if ne $.TargetVersionName "ga" -}} -func testAccIAMBetaWorkloadIdentityPool_beta_full(suffix string) string { - return fmt.Sprintf(` -resource "google_iam_workload_identity_pool" "my_pool" { - provider = google-beta - - workload_identity_pool_id = "my-pool-%s" - display_name = "Name of the pool" - description = "Identity pool operates in TRUST_DOMAIN mode" - disabled = true - mode = "TRUST_DOMAIN" - inline_certificate_issuance_config { - ca_pools = { - "us-central1" : "projects/project-bar/locations/us-central1/caPools/ca-pool-bar" - "asia-east2" : "projects/project-foo/locations/asia-east2/caPools/ca-pool-foo" - } - lifetime = "86400s" - rotation_window_percentage = 50 - key_algorithm = "ECDSA_P256" - } - inline_trust_config { - additional_trust_bundles { - trust_domain = "ca-pool-foo.global.project-foo.workload.id.goog" - trust_anchors { - pem_certificate = file("test-fixtures/trust_anchor_1.pem") - } - trust_anchors { - pem_certificate = file("test-fixtures/trust_anchor_2.pem") - } - } - additional_trust_bundles { - trust_domain = "ca-pool-bar.global.project-bar.workload.id.goog" - trust_anchors { - pem_certificate = file("test-fixtures/trust_anchor_3.pem") - } - trust_anchors { - pem_certificate = file("test-fixtures/trust_anchor_4.pem") - } - } - } -} -`, suffix) -} - -func testAccIAMBetaWorkloadIdentityPool_beta_update(suffix string) string { - return fmt.Sprintf(` -resource "google_iam_workload_identity_pool" "my_pool" { - provider = google-beta - - workload_identity_pool_id = "my-pool-%s" - display_name = "Updated name of the pool" - description = "Updated identity pool operates in TRUST_DOMAIN mode" - disabled = false - mode = "TRUST_DOMAIN" - inline_certificate_issuance_config { - ca_pools = { - "us-central2" : "projects/project-bar/locations/us-central2/caPools/ca-pool-bar" - "asia-east1" : "projects/project-foo/locations/asia-east1/caPools/ca-pool-foo" - } - lifetime = "36000s" - rotation_window_percentage = 75 - key_algorithm = "RSA_4096" - } - inline_trust_config { - additional_trust_bundles { - trust_domain = "ca-pool-baz.global.project-baz.workload.id.goog" - trust_anchors { - pem_certificate = file("test-fixtures/trust_anchor_updated.pem") - } - } - } -} -`, suffix) -} - -func testAccIAMBetaWorkloadIdentityPool_beta_minimum(suffix string) string { - return fmt.Sprintf(` -resource "google_iam_workload_identity_pool" "my_pool" { - provider = google-beta - - workload_identity_pool_id = "my-pool-%s" - mode = "TRUST_DOMAIN" -} -`, suffix) -} -{{- end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/iambeta/test-fixtures/trust_anchor_1.pem b/mmv1/third_party/terraform/services/iambeta/test-fixtures/trust_anchor_1.pem deleted file mode 100644 index 0c7e92db772c..000000000000 --- a/mmv1/third_party/terraform/services/iambeta/test-fixtures/trust_anchor_1.pem +++ /dev/null @@ -1,3 +0,0 @@ ------BEGIN CERTIFICATE----- -MIID9jCCAt6gAwIBAgIJALDL1dNMR+H8MA0GCSqGSIb3DQEBCwUAMIGeMQswCQYDVQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTESMBAGA1UEBwwJU3Vubnl2YWxlMRkwFwYDVQQKDBBHb29nbGUgVGVzdCBDb3JwMSAwHgYDVQQLDBdQcm9kdWN0aW9uIFdlYiBTZXJ2aWNlczEpMCcGA1UEAwwgdXMtd2VzdDIucHJvZC53ZWJhcHAuZXhhbXBsZS5jb20wHhcNMjUwNDI5MjMxMTAyWhcNMzUwNDI3MjMxMTAyWjCBnjELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExEjAQBgNVBAcMCVN1bm55dmFsZTEZMBcGA1UECgwQR29vZ2xlIFRlc3QgQ29ycDEgMB4GA1UECwwXUHJvZHVjdGlvbiBXZWIgU2VydmljZXMxKTAnBgNVBAMMIHVzLXdlc3QyLnByb2Qud2ViYXBwLmV4YW1wbGUuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAx1YFuo6mMlv+2e9r0LROY3bMwKUyUpaD1Jlf6fVFcTDXTHFYAU8uqjA6bxZDDaIfXIuzUbvXfnVsX7U5yWjDfYf0oRV9QDv/TbagdzBNvIIjIs1kxskO6wBrTmJNkWP3rlnlQhEnTai5X/uARZShajTbKU9yfQFPQj9aG0pptuqwWZQ7DGCpybfuFBQ296Zznul1Sunu090SE7InTsoJtthhUdPZ4krk6EH7bV/59+vjJjOF2rsAFEf9CmN4pLdK0+c003s6fZc/pkja40jwyKgRtRzh9SrDPgnF3Qy/hDGTG+BBGkvQRyBJ4EqtbuE05IUg1Ek58QiF3ET4nB9lqQIDAQABozUwMzAPBgNVHRMBAf8EBTADAQH/MAsGA1UdDwQEAwICBDATBgNVHSUEDDAKBggrBgEFBQcDAjANBgkqhkiG9w0BAQsFAAOCAQEAFv+inLwh8s5XOcM8GVSUwvxGei1WTSntt/ia2AUQa5iM1MIGLpu7EUYZId3Zmc4YCpfzXinnf8aCmWfNXhCXbrYVBZNgUTo3dBYyHR4lSQqVygmxFJ4Hwx8esP1+8W0yG+t7nmmwvMQaLElDl9I8B19VZP6IQwddYOmD/0eFdcrbVh00zvUZPNiuGRvfwTxPphaRDEd/VUWRkTLegRzL5WtJlCBJoP62M9EtHjoYjzUUrRarapgPMZJpO7DRHJwsLUNwHCHvyM+B2gDVmawLCvcxzMZUIBaxoGpyuOI9zbOK2wdGl2fLU48mm5qQQiw7toMcnG/I6Offj4Mu4m90bw== ------END CERTIFICATE----- \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/iambeta/test-fixtures/trust_anchor_2.pem b/mmv1/third_party/terraform/services/iambeta/test-fixtures/trust_anchor_2.pem deleted file mode 100644 index 0775e9046990..000000000000 --- a/mmv1/third_party/terraform/services/iambeta/test-fixtures/trust_anchor_2.pem +++ /dev/null @@ -1,3 +0,0 @@ ------BEGIN CERTIFICATE----- -MIID8jCCAtqgAwIBAgIJAPMKxdVc8n0fMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYDVQQGEwJVUzEQMA4GA1UECAwHQXJpem9uYTEQMA4GA1UEBwwHUGhvZW5peDEZMBcGA1UECgwQR29vZ2xlIFRlc3QgQ29ycDEgMB4GA1UECwwXUHJvZHVjdGlvbiBXZWIgU2VydmljZXMxLDAqBgNVBAMMI3VzLWNlbnRyYWwxLnByb2Qud2ViYXBwLmV4YW1wbGUuY29tMB4XDTI1MDQyOTIzMTUwOFoXDTM1MDQyNzIzMTUwOFowgZwxCzAJBgNVBAYTAlVTMRAwDgYDVQQIDAdBcml6b25hMRAwDgYDVQQHDAdQaG9lbml4MRkwFwYDVQQKDBBHb29nbGUgVGVzdCBDb3JwMSAwHgYDVQQLDBdQcm9kdWN0aW9uIFdlYiBTZXJ2aWNlczEsMCoGA1UEAwwjdXMtY2VudHJhbDEucHJvZC53ZWJhcHAuZXhhbXBsZS5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCxzfupfYd1r0DPwAPoD7YvqEmgOhZA3TAbtsPEjo7YeOj93gvTEXpAo7x9X2AAAdTKrgPiVlVKMKuLNEXXTQPOBaLkt08w92FnC2MS9AuihfrLNOei5ImEKCKkzsWquRU1spbRzMnVKW1i6AmZSsElZvxB6F6fW89+sclBMkII0FZPpsiC5q44OvYPgCOXBh4FE6lQddh/EnfAmQfJkPgjTWA/jTzCqKMe+fTU/x29psgEaB/Fa1fQ2P4wWEzzhlxEDEKudBgtRc9VjYECnYK9O72DlzN2K/Gv7M37ipCK2AF96/cPv3R13lk4LbHhSma7xj9VXjjoG3h5jpPJ5tuZAgMBAAGjNTAzMA8GA1UdEwEB/wQFMAMBAf8wCwYDVR0PBAQDAgIEMBMGA1UdJQQMMAoGCCsGAQUFBwMCMA0GCSqGSIb3DQEBCwUAA4IBAQAllMFpvcMb4DqK6Jukjlw473EvP22MzzF8+kfLWr4i/7KJQDou6sVbaKmSl2SNHCZNanIHANT9JvEr3UqvpujfOOg43M4O0o8vvfZgzcvQHBE1qgRkIPu3TVX/so9TCIvyOL1y0f15AEaRAdY5lMC1G5tjjZqkpJ4OsmVch9zK1SljteRsAJuNKpAyfiAV60YCR1e1gOYADd9kv17imHP89WFwmAL/c6pk0jne8w7Y7A/F122TlAyp+P+gne+EOXQwvBDCwMM4lsb3jS2js9XjKSvlNcRAw1B7cl2qmV79Qg48MlQaad7Ac//2fIlFGOwkWQBmHrVd59wxYzBJk+4o ------END CERTIFICATE----- \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/iambeta/test-fixtures/trust_anchor_3.pem b/mmv1/third_party/terraform/services/iambeta/test-fixtures/trust_anchor_3.pem deleted file mode 100644 index 165d362c24b1..000000000000 --- a/mmv1/third_party/terraform/services/iambeta/test-fixtures/trust_anchor_3.pem +++ /dev/null @@ -1,3 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIEADCCAuigAwIBAgIJAM98d8EGv17jMA0GCSqGSIb3DQEBCwUAMIGjMQswCQYDVQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTESMBAGA1UEBwwJU3Vubnl2YWxlMRkwFwYDVQQKDBBHb29nbGUgVGVzdCBDb3JwMRwwGgYDVQQLDBNTdGFnaW5nIEVudmlyb25tZW50MTIwMAYDVQQDDCl1cy13ZXN0Mi5zdGFnaW5nLWFwaS5pbnRlcm5hbC5leGFtcGxlLm5ldDAeFw0yNTA0MjkyMzI1MTJaFw0zNTA0MjcyMzI1MTJaMIGjMQswCQYDVQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTESMBAGA1UEBwwJU3Vubnl2YWxlMRkwFwYDVQQKDBBHb29nbGUgVGVzdCBDb3JwMRwwGgYDVQQLDBNTdGFnaW5nIEVudmlyb25tZW50MTIwMAYDVQQDDCl1cy13ZXN0Mi5zdGFnaW5nLWFwaS5pbnRlcm5hbC5leGFtcGxlLm5ldDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMLunz2AHYl4MYAlrZvSpRycgggsS+oOx/rJHAgb8jxuJSqGWb2aCnlKD5oC/P+qdthra7DRHR8zHGnrzCHKmsWwsaWqMpMh6VoQP0IyXvpQuhMBnjg7YiaaZ5+vegTIOqW1wWgUPPejVicROiWN7bbTaesoW+VwecvMvyGnlQWCLnSMUKzqUhvKA1nUWd+bPALDCvNtvFKUUA4gfhGRJBh/7aj+/OAIk3TcO1Io4peusvDpIAnVdTbiF3I9F7wHuyDs/nCt4+T/59khQoxOpqBHsmRqDUJbz1ZH/c9/Qmh5B+vPLvAN30K0LDx9l1B3xEy6aw7Rcf1I6MYSR384n5MCAwEAAaM1MDMwDwYDVR0TAQH/BAUwAwEB/zALBgNVHQ8EBAMCAgQwEwYDVR0lBAwwCgYIKwYBBQUHAwIwDQYJKoZIhvcNAQELBQADggEBAKVIWlkF/5aEVgY1jdREuIoxS0hexH3C5vqLG6jIiGkR3t89MAE60f0+aaR+cGvCnDJiYU0E+c4jDbvv3gIkhh4kvu3yhyFFTX583Zk4NldocwXDubR079AlE16pDnHdKUPdc2Tsxb1g+CWumPF2cjNi14P7eEQ3KdQOO5nV5tEpybsruGIspUZrH6hnc2q7dWRq+Ix6dfJYHIOnyLhkpfIRJ/6rVq6moizGuUAIuRgPrw9U8mSGEE349ZqC5x/sHzWpIgUdIaWLYbwe3PH+mxT4PlWRBmrmBe4BnJ+tl0P1TBTKNRjC2xUvRQGHss7VWbJkfZQhALw54aq1kKk96ns= ------END CERTIFICATE----- \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/iambeta/test-fixtures/trust_anchor_4.pem b/mmv1/third_party/terraform/services/iambeta/test-fixtures/trust_anchor_4.pem deleted file mode 100644 index 34fe5fa2d8e3..000000000000 --- a/mmv1/third_party/terraform/services/iambeta/test-fixtures/trust_anchor_4.pem +++ /dev/null @@ -1,3 +0,0 @@ ------BEGIN CERTIFICATE----- -MIID/DCCAuSgAwIBAgIJAMOZdoK0Z53fMA0GCSqGSIb3DQEBCwUAMIGhMQswCQYDVQQGEwJVUzEQMA4GA1UECAwHQXJpem9uYTEQMA4GA1UEBwwHUGhvZW5peDEZMBcGA1UECgwQR29vZ2xlIFRlc3QgQ29ycDEcMBoGA1UECwwTU3RhZ2luZyBFbnZpcm9ubWVudDE1MDMGA1UEAwwsdXMtY2VudHJhbDEuc3RhZ2luZy1hcGkuaW50ZXJuYWwuZXhhbXBsZS5uZXQwHhcNMjUwNDI5MjMyNDMxWhcNMzUwNDI3MjMyNDMxWjCBoTELMAkGA1UEBhMCVVMxEDAOBgNVBAgMB0FyaXpvbmExEDAOBgNVBAcMB1Bob2VuaXgxGTAXBgNVBAoMEEdvb2dsZSBUZXN0IENvcnAxHDAaBgNVBAsME1N0YWdpbmcgRW52aXJvbm1lbnQxNTAzBgNVBAMMLHVzLWNlbnRyYWwxLnN0YWdpbmctYXBpLmludGVybmFsLmV4YW1wbGUubmV0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA5EJGDTTfLz8yLXMqEUuocPJR5rGVF/JvMnmhU36bPRNxuaMt2uBNJQjoVTgsPjR77s9+nxQlls1ad6RPYGCqCXr43qrTYHM5grHcn0uwNgF7lmpTA2p56yQDO0dTko784o9O0eOFsmbtuXkVrYGYYZkoACzRayP7P/kIvfevtFXCP+acNNfMufLJ1ptm9vQrT91McnBM1qf35956M2MAZvVsVQxhfis8bN+qEOunMXOkvqhtYBLNicQV0dVb1osFwNmnEGrBgkrt7ov/1SXi1hoZY0QrUMSFwhSi+Iq7NUsnoP6SQ0MbNOr2thc7tRuDH72TT987zNrmBF9foj1Z2wIDAQABozUwMzAPBgNVHRMBAf8EBTADAQH/MAsGA1UdDwQEAwICBDATBgNVHSUEDDAKBggrBgEFBQcDAjANBgkqhkiG9w0BAQsFAAOCAQEA3siZEVRWCDzo2qpBMQpkPSTBVgcwIFbbGi0ZmdCatOn6nUPCvfSCIjcPcRVo8lUol6j7yHYDzdLj1ANdwE3IKwSJ4BRd5KrGULc+nCD1RB6Gj6VMHQ0TNgs3Ac36pcxWk6qf+2FDhmFfNu2PuUkSFlQyFbNy48w7Bzxzcy65PrGk8nqRrG2aqiYj3SUAlSSkFvWzK9CYy+ze5glTsP2IjaGaZmx4thThYhdCMI80RfzDFAyZqgJDNU9iVYw2uh/dSHwDwpOdPDfXUYLlLGE0dGIGHb/sMu13rLeT0FEKVABbG4hJPG5+Ajw7jrNwS5CDtXSjBLyLcsFST17R7ehVhg== ------END CERTIFICATE----- \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_oauth_client_credential_test.go b/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_oauth_client_credential_test.go.tmpl similarity index 99% rename from mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_oauth_client_credential_test.go rename to mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_oauth_client_credential_test.go.tmpl index 82167e5a58de..3d1dacc3ca48 100644 --- a/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_oauth_client_credential_test.go +++ b/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_oauth_client_credential_test.go.tmpl @@ -9,6 +9,7 @@ import ( "github.com/hashicorp/terraform-provider-google/google/acctest" ) + func TestAccIAMWorkforcePoolOauthClientCredential_full(t *testing.T) { t.Parallel() @@ -130,4 +131,4 @@ resource "google_iam_oauth_client_credential" "example" { oauth_client_credential_id = "tf-test-cred-id%{random_suffix}" } `, context) -} +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_oauth_client_test.go b/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_oauth_client_test.go.tmpl similarity index 99% rename from mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_oauth_client_test.go rename to mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_oauth_client_test.go.tmpl index 506fadd3c7ec..8e5ed6a79e0f 100644 --- a/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_oauth_client_test.go +++ b/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_oauth_client_test.go.tmpl @@ -30,7 +30,7 @@ func TestAccIAMWorkforcePoolOauthClient_full(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"location", "oauth_client_id"}, }, - { + { Config: testAccIAMWorkforcePoolOauthClient_full_update(context), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ @@ -106,4 +106,4 @@ resource "google_iam_oauth_client" "example" { client_type = "CONFIDENTIAL_CLIENT" } `, context) -} +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_provider_key_test.go b/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_provider_key_test.go deleted file mode 100644 index a9d23a2d1c1f..000000000000 --- a/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_provider_key_test.go +++ /dev/null @@ -1,136 +0,0 @@ -package iamworkforcepool_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" -) - -func TestAccIAMWorkforcePoolWorkforcePoolProviderKey_update(t *testing.T) { - t.Parallel() - - random_suffix := acctest.RandString(t, 10) - context := map[string]interface{}{ - "org_id": envvar.GetTestOrgFromEnv(t), - "random_suffix": random_suffix, - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckIAMWorkforcePoolWorkforcePoolProviderKeyDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccIAMWorkforcePoolWorkforcePoolProviderKey_basic(context), - }, - { - ResourceName: "google_iam_workforce_pool_provider_key.default", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccIAMWorkforcePoolWorkforcePoolProviderKey_update(context), - }, - { - ResourceName: "google_iam_workforce_pool_provider_key.default", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccIAMWorkforcePoolWorkforcePoolProviderKey_destroy(context), - }, - }, - }) -} - -func testAccIAMWorkforcePoolWorkforcePoolProviderKey_basic(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_iam_workforce_pool" "default" { - workforce_pool_id = "my-pool-%{random_suffix}" - parent = "organizations/%{org_id}" - location = "global" -} - -resource "google_iam_workforce_pool_provider" "default" { - workforce_pool_id = google_iam_workforce_pool.default.workforce_pool_id - location = google_iam_workforce_pool.default.location - provider_id = "my-provider-%{random_suffix}" - attribute_mapping = { - "google.subject" = "assertion.sub" - } - saml { - idp_metadata_xml = " MIIDpDCCAoygAwIBAgIGAX7/5qPhMA0GCSqGSIb3DQEBCwUAMIGSMQswCQYDVQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEUMBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi00NTg0MjExHDAaBgkqhkiG9w0BCQEWDWluZm9Ab2t0YS5jb20wHhcNMjIwMjE2MDAxOTEyWhcNMzIwMjE2MDAyMDEyWjCBkjELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNVBAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtNDU4NDIxMRwwGgYJKoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxrBl7GKz52cRpxF9xCsirnRuMxnhFBaUrsHqAQrLqWmdlpNYZTVg+T9iQ+aq/iE68L+BRZcZniKIvW58wqqS0ltXVvIkXuDSvnvnkkI5yMIVErR20K8jSOKQm1FmK+fgAJ4koshFiu9oLiqu0Ejc0DuL3/XRsb4RuxjktKTb1khgBBtb+7idEk0sFR0RPefAweXImJkDHDm7SxjDwGJUubbqpdTxasPr0W+AHI1VUzsUsTiHAoyb0XDkYqHfDzhj/ZdIEl4zHQ3bEZvlD984ztAnmX2SuFLLKfXeAAGHei8MMixJvwxYkkPeYZ/5h8WgBZPP4heS2CPjwYExt29L8QIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQARjJFz++a9Z5IQGFzsZMrX2EDR5ML4xxUiQkbhld1S1PljOLcYFARDmUC2YYHOueU4ee8Jid9nPGEUebV/4Jok+b+oQh+dWMgiWjSLI7h5q4OYZ3VJtdlVwgMFt2iz+/4yBKMUZ50g3Qgg36vE34us+eKitg759JgCNsibxn0qtJgSPm0sgP2L6yTaLnoEUbXBRxCwynTSkp9ZijZqEzbhN0e2dWv7Rx/nfpohpDP6vEiFImKFHpDSv3M/5de1ytQzPFrZBYt9WlzlYwE1aD9FHCxdd+rWgYMVVoRaRmndpV/Rq3QUuDuFJtaoX11bC7ExkOpg9KstZzA63i3VcfYv" - } -} - -resource "google_iam_workforce_pool_provider_key" "default" { - workforce_pool_id = google_iam_workforce_pool.default.workforce_pool_id - location = google_iam_workforce_pool.default.location - provider_id = google_iam_workforce_pool_provider.default.provider_id - key_id = "my-key-%{random_suffix}" - - key_data { - key_spec = "RSA_2048" - } - use = "ENCRYPTION" -} -`, context) -} - -func testAccIAMWorkforcePoolWorkforcePoolProviderKey_update(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_iam_workforce_pool" "default" { - workforce_pool_id = "my-pool-%{random_suffix}" - parent = "organizations/%{org_id}" - location = "global" -} - -resource "google_iam_workforce_pool_provider" "default" { - workforce_pool_id = google_iam_workforce_pool.default.workforce_pool_id - location = google_iam_workforce_pool.default.location - provider_id = "my-provider-%{random_suffix}" - attribute_mapping = { - "google.subject" = "assertion.sub" - } - saml { - idp_metadata_xml = " MIIDpDCCAoygAwIBAgIGAX7/5qPhMA0GCSqGSIb3DQEBCwUAMIGSMQswCQYDVQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEUMBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi00NTg0MjExHDAaBgkqhkiG9w0BCQEWDWluZm9Ab2t0YS5jb20wHhcNMjIwMjE2MDAxOTEyWhcNMzIwMjE2MDAyMDEyWjCBkjELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNVBAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtNDU4NDIxMRwwGgYJKoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxrBl7GKz52cRpxF9xCsirnRuMxnhFBaUrsHqAQrLqWmdlpNYZTVg+T9iQ+aq/iE68L+BRZcZniKIvW58wqqS0ltXVvIkXuDSvnvnkkI5yMIVErR20K8jSOKQm1FmK+fgAJ4koshFiu9oLiqu0Ejc0DuL3/XRsb4RuxjktKTb1khgBBtb+7idEk0sFR0RPefAweXImJkDHDm7SxjDwGJUubbqpdTxasPr0W+AHI1VUzsUsTiHAoyb0XDkYqHfDzhj/ZdIEl4zHQ3bEZvlD984ztAnmX2SuFLLKfXeAAGHei8MMixJvwxYkkPeYZ/5h8WgBZPP4heS2CPjwYExt29L8QIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQARjJFz++a9Z5IQGFzsZMrX2EDR5ML4xxUiQkbhld1S1PljOLcYFARDmUC2YYHOueU4ee8Jid9nPGEUebV/4Jok+b+oQh+dWMgiWjSLI7h5q4OYZ3VJtdlVwgMFt2iz+/4yBKMUZ50g3Qgg36vE34us+eKitg759JgCNsibxn0qtJgSPm0sgP2L6yTaLnoEUbXBRxCwynTSkp9ZijZqEzbhN0e2dWv7Rx/nfpohpDP6vEiFImKFHpDSv3M/5de1ytQzPFrZBYt9WlzlYwE1aD9FHCxdd+rWgYMVVoRaRmndpV/Rq3QUuDuFJtaoX11bC7ExkOpg9KstZzA63i3VcfYv" - } -} - -resource "google_iam_workforce_pool_provider_key" "default" { - workforce_pool_id = google_iam_workforce_pool.default.workforce_pool_id - location = google_iam_workforce_pool.default.location - provider_id = google_iam_workforce_pool_provider.default.provider_id - key_id = "my-other-key-%{random_suffix}" - - key_data { - key_spec = "RSA_3072" - } - use = "ENCRYPTION" -} -`, context) -} - -func testAccIAMWorkforcePoolWorkforcePoolProviderKey_destroy(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_iam_workforce_pool" "default" { - workforce_pool_id = "my-pool-%{random_suffix}" - parent = "organizations/%{org_id}" - location = "global" -} - -resource "google_iam_workforce_pool_provider" "default" { - workforce_pool_id = google_iam_workforce_pool.default.workforce_pool_id - location = google_iam_workforce_pool.default.location - provider_id = "my-provider-%{random_suffix}" - attribute_mapping = { - "google.subject" = "assertion.sub" - } - saml { - idp_metadata_xml = " MIIDpDCCAoygAwIBAgIGAX7/5qPhMA0GCSqGSIb3DQEBCwUAMIGSMQswCQYDVQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEUMBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi00NTg0MjExHDAaBgkqhkiG9w0BCQEWDWluZm9Ab2t0YS5jb20wHhcNMjIwMjE2MDAxOTEyWhcNMzIwMjE2MDAyMDEyWjCBkjELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNVBAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtNDU4NDIxMRwwGgYJKoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxrBl7GKz52cRpxF9xCsirnRuMxnhFBaUrsHqAQrLqWmdlpNYZTVg+T9iQ+aq/iE68L+BRZcZniKIvW58wqqS0ltXVvIkXuDSvnvnkkI5yMIVErR20K8jSOKQm1FmK+fgAJ4koshFiu9oLiqu0Ejc0DuL3/XRsb4RuxjktKTb1khgBBtb+7idEk0sFR0RPefAweXImJkDHDm7SxjDwGJUubbqpdTxasPr0W+AHI1VUzsUsTiHAoyb0XDkYqHfDzhj/ZdIEl4zHQ3bEZvlD984ztAnmX2SuFLLKfXeAAGHei8MMixJvwxYkkPeYZ/5h8WgBZPP4heS2CPjwYExt29L8QIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQARjJFz++a9Z5IQGFzsZMrX2EDR5ML4xxUiQkbhld1S1PljOLcYFARDmUC2YYHOueU4ee8Jid9nPGEUebV/4Jok+b+oQh+dWMgiWjSLI7h5q4OYZ3VJtdlVwgMFt2iz+/4yBKMUZ50g3Qgg36vE34us+eKitg759JgCNsibxn0qtJgSPm0sgP2L6yTaLnoEUbXBRxCwynTSkp9ZijZqEzbhN0e2dWv7Rx/nfpohpDP6vEiFImKFHpDSv3M/5de1ytQzPFrZBYt9WlzlYwE1aD9FHCxdd+rWgYMVVoRaRmndpV/Rq3QUuDuFJtaoX11bC7ExkOpg9KstZzA63i3VcfYv" - } -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_test.go b/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_test.go rename to mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_test.go.tmpl index 91c6c5211e3d..0a2ba96b08cb 100644 --- a/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_test.go +++ b/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_test.go.tmpl @@ -2,9 +2,9 @@ package iamworkforcepool_test import ( "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" - "testing" ) func TestAccIAMWorkforcePoolWorkforcePool_full(t *testing.T) { diff --git a/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_workforce_pool_id_test.go b/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_workforce_pool_id_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_workforce_pool_id_test.go rename to mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_workforce_pool_id_test.go.tmpl diff --git a/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_workforce_pool_provider_id_test.go b/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_workforce_pool_provider_id_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_workforce_pool_provider_id_test.go rename to mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_workforce_pool_provider_id_test.go.tmpl diff --git a/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_workforce_pool_provider_key_id_test.go b/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_workforce_pool_provider_key_id_test.go deleted file mode 100644 index 7f6d0673b7a3..000000000000 --- a/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_workforce_pool_provider_key_id_test.go +++ /dev/null @@ -1,33 +0,0 @@ -package iamworkforcepool_test - -import ( - "strings" - "testing" - - "github.com/hashicorp/terraform-provider-google/google/services/iamworkforcepool" - "github.com/hashicorp/terraform-provider-google/google/verify" -) - -func TestValidateIAMWorkforcePoolWorkforcePoolProviderKeyId(t *testing.T) { - x := []verify.StringValidationTestCase{ - // No errors - {TestName: "with numbers", Value: "foobar123"}, - {TestName: "short", Value: "foo-"}, - {TestName: "long", Value: strings.Repeat("f", 32)}, - {TestName: "has a hyphen", Value: "foo-bar"}, - - // With errors - {TestName: "empty", Value: "", ExpectError: true}, - {TestName: "starts with a gcp-", Value: "gcp-foobar", ExpectError: true}, - {TestName: "with uppercase", Value: "fooBar", ExpectError: true}, - {TestName: "has an slash", Value: "foo/bar", ExpectError: true}, - {TestName: "has an backslash", Value: "foo\bar", ExpectError: true}, - {TestName: "too short", Value: "foo", ExpectError: true}, - {TestName: "too long", Value: strings.Repeat("f", 33), ExpectError: true}, - } - - es := verify.TestStringValidationCases(x, iamworkforcepool.ValidateWorkforcePoolProviderKeyId) - if len(es) > 0 { - t.Errorf("Failed to validate WorkforcePoolProviderKey names: %v", es) - } -} diff --git a/mmv1/third_party/terraform/services/iap/data_source_iap_client.go b/mmv1/third_party/terraform/services/iap/data_source_iap_client.go new file mode 100644 index 000000000000..736e18da0fe3 --- /dev/null +++ b/mmv1/third_party/terraform/services/iap/data_source_iap_client.go @@ -0,0 +1,39 @@ +package iap + +import ( + "fmt" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func DataSourceGoogleIapClient() *schema.Resource { + + dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceIapClient().Schema) + tpgresource.AddRequiredFieldsToSchema(dsSchema, "brand", "client_id") + + return &schema.Resource{ + Read: dataSourceGoogleIapClientRead, + Schema: dsSchema, + } +} + +func dataSourceGoogleIapClientRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + + id, err := tpgresource.ReplaceVars(d, config, "{{brand}}/identityAwareProxyClients/{{client_id}}") + if err != nil { + return fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + err = resourceIapClientRead(d, meta) + if err != nil { + return err + } + + if d.Id() == "" { + return fmt.Errorf("%s not found", id) + } + return nil +} diff --git a/mmv1/third_party/terraform/services/iap/data_source_iap_client_test.go b/mmv1/third_party/terraform/services/iap/data_source_iap_client_test.go new file mode 100644 index 000000000000..5173f787a8e4 --- /dev/null +++ b/mmv1/third_party/terraform/services/iap/data_source_iap_client_test.go @@ -0,0 +1,70 @@ +package iap_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccIapClient_Datasource_basic(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "org_id": envvar.GetTestOrgFromEnv(t), + "org_domain": envvar.GetTestOrgDomainFromEnv(t), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccIapClientDatasourceConfig(context), + Check: resource.ComposeTestCheckFunc( + acctest.CheckDataSourceStateMatchesResourceStateWithIgnores( + "data.google_iap_client.project_client", + "google_iap_client.project_client", + map[string]struct{}{ + "brand": {}, + }, + ), + ), + }, + }, + }) +} + +func testAccIapClientDatasourceConfig(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_project" "project" { + project_id = "tf-test%{random_suffix}" + name = "tf-test%{random_suffix}" + org_id = "%{org_id}" + deletion_policy = "DELETE" +} + +resource "google_project_service" "project_service" { + project = google_project.project.project_id + service = "iap.googleapis.com" +} + +resource "google_iap_brand" "project_brand" { + support_email = "support@%{org_domain}" + application_title = "Cloud IAP protected Application" + project = google_project_service.project_service.project +} + +resource "google_iap_client" "project_client" { + display_name = "Test Client" + brand = google_iap_brand.project_brand.name +} + +data "google_iap_client" "project_client" { + brand = google_iap_client.project_client.brand + client_id = google_iap_client.project_client.client_id +} +`, context) +} diff --git a/mmv1/third_party/terraform/services/integrationconnectors/resource_integration_connectors_connection_test.go b/mmv1/third_party/terraform/services/integrationconnectors/resource_integration_connectors_connection_test.go index a52fa7aaa50e..15d96cf7c946 100644 --- a/mmv1/third_party/terraform/services/integrationconnectors/resource_integration_connectors_connection_test.go +++ b/mmv1/third_party/terraform/services/integrationconnectors/resource_integration_connectors_connection_test.go @@ -219,7 +219,6 @@ resource "google_integration_connectors_connection" "zendeskconnection" { } log_config { enabled = true - level = "DEBUG" } node_config { min_node_count = 2 diff --git a/mmv1/third_party/terraform/services/kms/data_source_google_kms_auotokey_config.go b/mmv1/third_party/terraform/services/kms/data_source_google_kms_auotokey_config.go.tmpl similarity index 94% rename from mmv1/third_party/terraform/services/kms/data_source_google_kms_auotokey_config.go rename to mmv1/third_party/terraform/services/kms/data_source_google_kms_auotokey_config.go.tmpl index b500d313fd5f..1a6a79a6c201 100644 --- a/mmv1/third_party/terraform/services/kms/data_source_google_kms_auotokey_config.go +++ b/mmv1/third_party/terraform/services/kms/data_source_google_kms_auotokey_config.go.tmpl @@ -1,5 +1,7 @@ package kms +{{ if ne $.TargetVersionName `ga` -}} + import ( "fmt" @@ -34,3 +36,5 @@ func dataSourceGoogleKmsAutokeyConfigRead(d *schema.ResourceData, meta interface } return nil } + +{{ end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/kms/data_source_google_kms_auotokey_config_test.go b/mmv1/third_party/terraform/services/kms/data_source_google_kms_auotokey_config_test.go.tmpl similarity index 95% rename from mmv1/third_party/terraform/services/kms/data_source_google_kms_auotokey_config_test.go rename to mmv1/third_party/terraform/services/kms/data_source_google_kms_auotokey_config_test.go.tmpl index 5875c6953613..ef770fd0a44d 100644 --- a/mmv1/third_party/terraform/services/kms/data_source_google_kms_auotokey_config_test.go +++ b/mmv1/third_party/terraform/services/kms/data_source_google_kms_auotokey_config_test.go.tmpl @@ -1,5 +1,7 @@ package kms_test +{{ if ne $.TargetVersionName `ga` -}} + import ( "fmt" "regexp" @@ -34,3 +36,4 @@ data "google_kms_autokey_config" "kms_autokey_config" { } `, folder) } +{{ end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handle.go b/mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handle.go.tmpl similarity index 91% rename from mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handle.go rename to mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handle.go.tmpl index 72b7ff745873..e21865a23de5 100644 --- a/mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handle.go +++ b/mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handle.go.tmpl @@ -1,5 +1,7 @@ package kms +{{ if ne $.TargetVersionName `ga` -}} + import ( "fmt" @@ -28,9 +30,9 @@ func dataSourceGoogleKmsKeyHandleRead(d *schema.ResourceData, meta interface{}) return err } keyHandleId := KmsKeyHandleId{ - Name: d.Get("name").(string), + Name: d.Get("name").(string), Location: d.Get("location").(string), - Project: project, + Project: project, } id := keyHandleId.KeyHandleId() d.SetId(id) @@ -44,3 +46,5 @@ func dataSourceGoogleKmsKeyHandleRead(d *schema.ResourceData, meta interface{}) } return nil } + +{{ end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handle_test.go b/mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handle_test.go.tmpl similarity index 96% rename from mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handle_test.go rename to mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handle_test.go.tmpl index 0a4fd012c720..1fdeaafe082a 100644 --- a/mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handle_test.go +++ b/mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handle_test.go.tmpl @@ -1,5 +1,7 @@ package kms_test +{{ if ne $.TargetVersionName `ga` -}} + import ( "fmt" "regexp" @@ -39,3 +41,4 @@ data "google_kms_key_handle" "kms_key_handle" { } `, keyHandleName, location, project) } +{{ end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handles_test.go b/mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handles_test.go.tmpl similarity index 97% rename from mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handles_test.go rename to mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handles_test.go.tmpl index a71e40839d14..7112a99194c5 100644 --- a/mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handles_test.go +++ b/mmv1/third_party/terraform/services/kms/data_source_google_kms_key_handles_test.go.tmpl @@ -2,6 +2,8 @@ // SPDX-License-Identifier: MPL-2.0 package kms_test +{{ if ne $.TargetVersionName `ga` -}} + import ( "errors" "fmt" @@ -70,3 +72,5 @@ data "google_kms_key_handles" "mykeyhandles" { `, project, location, filter) return str } + +{{ end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/kms/resource_kms_crypto_key_test.go.tmpl b/mmv1/third_party/terraform/services/kms/resource_kms_crypto_key_test.go.tmpl index ccacec67295e..f38115cdea1e 100644 --- a/mmv1/third_party/terraform/services/kms/resource_kms_crypto_key_test.go.tmpl +++ b/mmv1/third_party/terraform/services/kms/resource_kms_crypto_key_test.go.tmpl @@ -158,7 +158,7 @@ func TestAccKmsCryptoKey_basic(t *testing.T) { ResourceName: "google_kms_crypto_key.crypto_key", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"skip_initial_version_creation", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, }, // Test importing with a short id { @@ -166,7 +166,7 @@ func TestAccKmsCryptoKey_basic(t *testing.T) { ImportState: true, ImportStateId: fmt.Sprintf("%s/%s/%s/%s", projectId, location, keyRingName, cryptoKeyName), ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"skip_initial_version_creation", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, }, // Use a separate TestStep rather than a CheckDestroy because we need the project to still exist. { @@ -203,28 +203,25 @@ func TestAccKmsCryptoKey_rotation(t *testing.T) { Config: testGoogleKmsCryptoKey_rotation(projectId, projectOrg, projectBillingAccount, keyRingName, cryptoKeyName, rotationPeriod), }, { - ResourceName: "google_kms_crypto_key.crypto_key", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"skip_initial_version_creation"}, + ResourceName: "google_kms_crypto_key.crypto_key", + ImportState: true, + ImportStateVerify: true, }, { Config: testGoogleKmsCryptoKey_rotation(projectId, projectOrg, projectBillingAccount, keyRingName, cryptoKeyName, updatedRotationPeriod), }, { - ResourceName: "google_kms_crypto_key.crypto_key", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"skip_initial_version_creation"}, + ResourceName: "google_kms_crypto_key.crypto_key", + ImportState: true, + ImportStateVerify: true, }, { Config: testGoogleKmsCryptoKey_rotationRemoved(projectId, projectOrg, projectBillingAccount, keyRingName, cryptoKeyName), }, { - ResourceName: "google_kms_crypto_key.crypto_key", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"skip_initial_version_creation"}, + ResourceName: "google_kms_crypto_key.crypto_key", + ImportState: true, + ImportStateVerify: true, }, // Use a separate TestStep rather than a CheckDestroy because we need the project to still exist. { @@ -259,19 +256,17 @@ func TestAccKmsCryptoKey_template(t *testing.T) { Config: testGoogleKmsCryptoKey_template(projectId, projectOrg, projectBillingAccount, keyRingName, cryptoKeyName, algorithm), }, { - ResourceName: "google_kms_crypto_key.crypto_key", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"skip_initial_version_creation"}, + ResourceName: "google_kms_crypto_key.crypto_key", + ImportState: true, + ImportStateVerify: true, }, { Config: testGoogleKmsCryptoKey_template(projectId, projectOrg, projectBillingAccount, keyRingName, cryptoKeyName, updatedAlgorithm), }, { - ResourceName: "google_kms_crypto_key.crypto_key", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"skip_initial_version_creation"}, + ResourceName: "google_kms_crypto_key.crypto_key", + ImportState: true, + ImportStateVerify: true, }, // Use a separate TestStep rather than a CheckDestroy because we need the project to still exist. { @@ -307,7 +302,7 @@ func TestAccKmsCryptoKey_destroyDuration(t *testing.T) { ResourceName: "google_kms_crypto_key.crypto_key", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"skip_initial_version_creation", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, }, // Use a separate TestStep rather than a CheckDestroy because we need the project to still exist. { @@ -349,7 +344,7 @@ func TestAccKmsCryptoKey_keyAccessJustificationsPolicy(t *testing.T) { ResourceName: "google_kms_crypto_key.crypto_key", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"skip_initial_version_creation", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, }, { Config: testGoogleKmsCryptoKey_keyAccessJustificationsPolicy(projectId, projectOrg, projectBillingAccount, keyRingName, cryptoKeyName, updatedAllowedAccessReason), @@ -358,7 +353,7 @@ func TestAccKmsCryptoKey_keyAccessJustificationsPolicy(t *testing.T) { ResourceName: "google_kms_crypto_key.crypto_key", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"skip_initial_version_creation", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, }, // Use a separate TestStep rather than a CheckDestroy because we need the project to still exist. { diff --git a/mmv1/third_party/terraform/services/logging/resource_logging_organization_sink.go b/mmv1/third_party/terraform/services/logging/resource_logging_organization_sink.go index 708f370a5c4a..fcf750b93c86 100644 --- a/mmv1/third_party/terraform/services/logging/resource_logging_organization_sink.go +++ b/mmv1/third_party/terraform/services/logging/resource_logging_organization_sink.go @@ -33,7 +33,7 @@ func ResourceLoggingOrganizationSink() *schema.Resource { Type: schema.TypeBool, Optional: true, Default: false, - Description: `Whether or not to include child folders or projects in the sink export. If true, logs associated with child projects are also exported; otherwise only logs relating to the provided organization are included.`, + Description: `Whether or not to include children organizations in the sink export. If true, logs associated with child projects are also exported; otherwise only logs relating to the provided organization are included.`, } schm.Schema["intercept_children"] = &schema.Schema{ Type: schema.TypeBool, diff --git a/mmv1/third_party/terraform/services/lustre/data_source_lustre_instance.go b/mmv1/third_party/terraform/services/lustre/data_source_lustre_instance.go deleted file mode 100644 index dc84b9dcde67..000000000000 --- a/mmv1/third_party/terraform/services/lustre/data_source_lustre_instance.go +++ /dev/null @@ -1,76 +0,0 @@ -package lustre - -import ( - "fmt" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -func DataSourceLustreInstance() *schema.Resource { - - // Generate datasource schema from resource - dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceLustreInstance().Schema) - - dsScema_zone := map[string]*schema.Schema{ - "zone": { - Type: schema.TypeString, - Optional: true, - Description: `Zone of Lustre instance`, - }, - } - - // Set 'Required' schema elements from resource - tpgresource.AddRequiredFieldsToSchema(dsSchema, "instance_id") - - // Set 'Optional' schema elements from resource - tpgresource.AddOptionalFieldsToSchema(dsSchema, "project") - - // Merge schema elements - dsSchema_m := tpgresource.MergeSchemas(dsScema_zone, dsSchema) - - return &schema.Resource{ - Read: dataSourceLustreInstanceRead, - Schema: dsSchema_m, - } -} - -func dataSourceLustreInstanceRead(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - - // Get required fields for ID - instance_id := d.Get("instance_id").(string) - - zone, err := tpgresource.GetZone(d, config) - if err != nil { - return err - } - - project, err := tpgresource.GetProject(d, config) - if err != nil { - return err - } - - // Set the ID - id := fmt.Sprintf("projects/%s/locations/%s/instances/%s", project, zone, instance_id) - d.SetId(id) - - // Setting location field for url_param_only field - d.Set("location", zone) - - err = resourceLustreInstanceRead(d, meta) - if err != nil { - return err - } - - if err := tpgresource.SetDataSourceLabels(d); err != nil { - return err - } - - if d.Id() == "" { - return fmt.Errorf("%s not found", d.Id()) - } - - return nil -} diff --git a/mmv1/third_party/terraform/services/lustre/data_source_lustre_instance_test.go b/mmv1/third_party/terraform/services/lustre/data_source_lustre_instance_test.go deleted file mode 100644 index e369beca67db..000000000000 --- a/mmv1/third_party/terraform/services/lustre/data_source_lustre_instance_test.go +++ /dev/null @@ -1,67 +0,0 @@ -package lustre_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" -) - -func TestAccLustreInstanceDatasource_basic(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedTestNetwork(t, "default-vpc"), - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccLustreInstanceDatasource_basic(context), - Check: acctest.CheckDataSourceStateMatchesResourceState( - "data.google_lustre_instance.default", - "google_lustre_instance.instance", - ), - }, - { - ResourceName: "google_lustre_instance.instance", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func testAccLustreInstanceDatasource_basic(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_lustre_instance" "instance" { - instance_id = "tf-test-%{random_suffix}" - location = "us-central1-a" - filesystem = "testfs" - capacity_gib = 18000 - network = data.google_compute_network.lustre-network.id - gke_support_enabled = false - per_unit_storage_throughput = 1000 -} - -// This example assumes this network already exists. -// The API creates a tenant network per network authorized for a -// Lustre instance and that network is not deleted when the user-created -// network (authorized_network) is deleted, so this prevents issues -// with tenant network quota. -// If this network hasn't been created and you are using this example in your -// config, add an additional network resource or change -// this from "data"to "resource" -data "google_compute_network" "lustre-network" { - name = "%{network_name}" -} - -data "google_lustre_instance" "default" { - instance_id = google_lustre_instance.instance.instance_id - zone = "us-central1-a" -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/lustre/resource_lustre_instance_test.go b/mmv1/third_party/terraform/services/lustre/resource_lustre_instance_test.go index cb5cbc6a264f..433a68a57d68 100644 --- a/mmv1/third_party/terraform/services/lustre/resource_lustre_instance_test.go +++ b/mmv1/third_party/terraform/services/lustre/resource_lustre_instance_test.go @@ -54,16 +54,15 @@ func TestAccLustreInstance_update(t *testing.T) { func testAccLustreInstance_full(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_lustre_instance" "instance" { - instance_id = "tf-test-my-instance%{random_suffix}" - location = "us-central1-a" - filesystem = "testfs" - network = data.google_compute_network.lustre-network.id - gke_support_enabled = false - capacity_gib = 18000 - per_unit_storage_throughput = 1000 - timeouts { - create = "120m" - } + instance_id = "tf-test-my-instance%{random_suffix}" + location = "us-central1-a" + filesystem = "testfs" + network = data.google_compute_network.lustre-network.id + gke_support_enabled = false + capacity_gib = 18000 + timeouts { + create = "120m" + } } // This example assumes this network already exists. @@ -83,18 +82,17 @@ data "google_compute_network" "lustre-network" { func testAccLustreInstance_update(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_lustre_instance" "instance" { - instance_id = "tf-test-my-instance%{random_suffix}" - location = "us-central1-a" - filesystem = "testfs" - capacity_gib = 18000 - network = data.google_compute_network.lustre-network.id - description = "test-description" - per_unit_storage_throughput = 1000 - labels = { + instance_id = "tf-test-my-instance%{random_suffix}" + location = "us-central1-a" + filesystem = "testfs" + capacity_gib = 18000 + network = data.google_compute_network.lustre-network.id + description = "test-description" + labels = { test = "test-label" } - timeouts { - create = "120m" + timeouts { + create = "120m" } } diff --git a/mmv1/third_party/terraform/services/managedkafka/resource_managed_kafka_acl_test.go b/mmv1/third_party/terraform/services/managedkafka/resource_managed_kafka_acl_test.go deleted file mode 100644 index b9dd9fc7a058..000000000000 --- a/mmv1/third_party/terraform/services/managedkafka/resource_managed_kafka_acl_test.go +++ /dev/null @@ -1,130 +0,0 @@ -package managedkafka_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" -) - -func TestAccManagedKafkaAcl_update(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckManagedKafkaAclDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccManagedKafkaAcl_full(context), - }, - { - ResourceName: "google_managed_kafka_acl.example", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"cluster", "location", "acl_id"}, - }, - { - Config: testAccManagedKafkaAcl_update(context), - }, - { - ResourceName: "google_managed_kafka_acl.example", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"cluster", "location", "acl_id"}, - }, - }, - }) -} - -func testAccManagedKafkaAcl_full(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_managed_kafka_cluster" "example" { - cluster_id = "tf-test-my-cluster%{random_suffix}" - location = "us-central1" - capacity_config { - vcpu_count = 3 - memory_bytes = 3221225472 - } - gcp_config { - access_config { - network_configs { - subnet = "projects/${data.google_project.project.number}/regions/us-central1/subnetworks/default" - } - } - } -} - -resource "google_managed_kafka_acl" "example" { - cluster = google_managed_kafka_cluster.example.cluster_id - acl_id = "topic/tf-test-my-acl%{random_suffix}" - location = "us-central1" - acl_entries { - principal = "User:admin@my-project.iam.gserviceaccount.com" - permission_type = "ALLOW" - operation = "ALL" - host = "*" - } - acl_entries { - principal = "User:producer-client@my-project.iam.gserviceaccount.com" - permission_type = "ALLOW" - operation = "WRITE" - host = "*" - } -} - -data "google_project" "project" { -} -`, context) -} - -func testAccManagedKafkaAcl_update(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_managed_kafka_cluster" "example" { - cluster_id = "tf-test-my-cluster%{random_suffix}" - location = "us-central1" - capacity_config { - vcpu_count = 3 - memory_bytes = 3221225472 - } - gcp_config { - access_config { - network_configs { - subnet = "projects/${data.google_project.project.number}/regions/us-central1/subnetworks/default" - } - } - } -} - -resource "google_managed_kafka_acl" "example" { - cluster = google_managed_kafka_cluster.example.cluster_id - acl_id = "topic/tf-test-my-acl%{random_suffix}" - location = "us-central1" - acl_entries { - principal = "User:admin@project.iam.gserviceaccount.com" - permission_type = "ALLOW" - operation = "ALL" - host = "*" - } - acl_entries { - principal = "User:producer-client@my-project.iam.gserviceaccount.com" - permission_type = "ALLOW" - operation = "WRITE" - host = "*" - } - acl_entries { - principal = "User:producer-client@my-project.iam.gserviceaccount.com" - permission_type = "ALLOW" - operation = "CREATE" - host = "*" - } -} - -data "google_project" "project" { -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/managedkafka/resource_managed_kafka_cluster_test.go b/mmv1/third_party/terraform/services/managedkafka/resource_managed_kafka_cluster_test.go.tmpl similarity index 63% rename from mmv1/third_party/terraform/services/managedkafka/resource_managed_kafka_cluster_test.go rename to mmv1/third_party/terraform/services/managedkafka/resource_managed_kafka_cluster_test.go.tmpl index 29e641cebda1..472a90c4bad8 100644 --- a/mmv1/third_party/terraform/services/managedkafka/resource_managed_kafka_cluster_test.go +++ b/mmv1/third_party/terraform/services/managedkafka/resource_managed_kafka_cluster_test.go.tmpl @@ -37,15 +37,6 @@ func TestAccManagedKafkaCluster_update(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"cluster_id", "labels", "location", "terraform_labels"}, }, - { - Config: testAccManagedKafkaCluster_updateTlsConfigToEmpty(context), - }, - { - ResourceName: "google_managed_kafka_cluster.example", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"cluster_id", "labels", "location", "terraform_labels"}, - }, }, }) } @@ -98,57 +89,6 @@ resource "google_managed_kafka_cluster" "example" { rebalance_config { mode = "AUTO_REBALANCE_ON_SCALE_UP" } - tls_config { - trust_config { - cas_configs { - ca_pool = google_privateca_ca_pool.ca_pool.id - } - } - ssl_principal_mapping_rules = "RULE:pattern/replacement/L,DEFAULT" - } - labels = { - key = "new-value" - } -} - -resource "google_privateca_ca_pool" "ca_pool" { - name = "tf-test-pool-%{random_suffix}" - location = "us-central1" - tier = "ENTERPRISE" - publishing_options { - publish_ca_cert = true - publish_crl = true - } -} - -data "google_project" "project" { -} -`, context) -} - -func testAccManagedKafkaCluster_updateTlsConfigToEmpty(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_managed_kafka_cluster" "example" { - cluster_id = "tf-test-my-cluster%{random_suffix}" - location = "us-central1" - capacity_config { - vcpu_count = 4 - memory_bytes = 4512135122 - } - gcp_config { - access_config { - network_configs { - subnet = "projects/${data.google_project.project.number}/regions/us-central1/subnetworks/default" - } - } - } - rebalance_config { - mode = "AUTO_REBALANCE_ON_SCALE_UP" - } - tls_config { - trust_config { - } - } labels = { key = "new-value" } diff --git a/mmv1/third_party/terraform/services/managedkafka/resource_managed_kafka_topic_test.go b/mmv1/third_party/terraform/services/managedkafka/resource_managed_kafka_topic_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/managedkafka/resource_managed_kafka_topic_test.go rename to mmv1/third_party/terraform/services/managedkafka/resource_managed_kafka_topic_test.go.tmpl diff --git a/mmv1/third_party/terraform/services/memorystore/data_source_memorystore_instance_test.go b/mmv1/third_party/terraform/services/memorystore/data_source_memorystore_instance_test.go index ac36d32b96cb..4d7e1be646f1 100644 --- a/mmv1/third_party/terraform/services/memorystore/data_source_memorystore_instance_test.go +++ b/mmv1/third_party/terraform/services/memorystore/data_source_memorystore_instance_test.go @@ -12,6 +12,7 @@ func TestAccMemorystoreInstanceDatasourceConfig(t *testing.T) { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "memorystore-instance-ds"), } acctest.VcrTest(t, resource.TestCase{ @@ -21,9 +22,6 @@ func TestAccMemorystoreInstanceDatasourceConfig(t *testing.T) { Steps: []resource.TestStep{ { Config: testAccMemorystoreInstanceDatasourceConfig(context), - Check: resource.ComposeTestCheckFunc( - acctest.CheckDataSourceStateMatchesResourceState("data.google_memorystore_instance.default", "google_memorystore_instance.instance-basic"), - ), }, }, }) @@ -33,14 +31,48 @@ func testAccMemorystoreInstanceDatasourceConfig(context map[string]interface{}) return acctest.Nprintf(` resource "google_memorystore_instance" "instance-basic" { instance_id = "tf-test-memorystore-instance%{random_suffix}" - shard_count = 1 + shard_count = 3 + desired_psc_auto_connections { + network = google_compute_network.producer_net.id + project_id = data.google_project.project.project_id + } location = "us-central1" deletion_protection_enabled = false + depends_on = [google_network_connectivity_service_connection_policy.default] + +} + +resource "google_network_connectivity_service_connection_policy" "default" { + name = "%{network_name}-policy" + location = "us-central1" + service_class = "gcp-memorystore" + description = "my basic service connection policy" + network = google_compute_network.producer_net.id + psc_config { + subnetworks = [google_compute_subnetwork.producer_subnet.id] + } } + +resource "google_compute_subnetwork" "producer_subnet" { + name = "%{network_name}-sn" + ip_cidr_range = "10.0.0.248/29" + region = "us-central1" + network = google_compute_network.producer_net.id +} + +resource "google_compute_network" "producer_net" { + name = "%{network_name}-vpc" + auto_create_subnetworks = false +} + + data "google_project" "project" { + } + data "google_memorystore_instance" "default" { instance_id = google_memorystore_instance.instance-basic.instance_id location = "us-central1" + } `, context) } diff --git a/mmv1/third_party/terraform/services/memorystore/resource_memorystore_instance_test.go b/mmv1/third_party/terraform/services/memorystore/resource_memorystore_instance_test.go index 7636dfc388e6..6de9a8508464 100644 --- a/mmv1/third_party/terraform/services/memorystore/resource_memorystore_instance_test.go +++ b/mmv1/third_party/terraform/services/memorystore/resource_memorystore_instance_test.go @@ -14,7 +14,7 @@ import ( transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) -// Validate that replica count is updated for the instance: 1->2->0 +// Validate that replica count is updated for the instance func TestAccMemorystoreInstance_updateReplicaCount(t *testing.T) { t.Parallel() @@ -43,18 +43,9 @@ func TestAccMemorystoreInstance_updateReplicaCount(t *testing.T) { ImportState: true, ImportStateVerify: true, }, - { - // update the replica count to 0 - Config: createOrUpdateMemorystoreInstance(&InstanceParams{name: name, replicaCount: 0, shardCount: 3, preventDestroy: true, zoneDistributionMode: "MULTI_ZONE", deletionProtectionEnabled: false, maintenanceDay: "MONDAY", maintenanceHours: 1, maintenanceMinutes: 0, maintenanceSeconds: 0, maintenanceNanos: 0}), - }, - { - ResourceName: "google_memorystore_instance.test", - ImportState: true, - ImportStateVerify: true, - }, { // clean up the resource - Config: createOrUpdateMemorystoreInstance(&InstanceParams{name: name, replicaCount: 0, shardCount: 3, preventDestroy: false, zoneDistributionMode: "MULTI_ZONE", deletionProtectionEnabled: false, maintenanceDay: "MONDAY", maintenanceHours: 1, maintenanceMinutes: 0, maintenanceSeconds: 0, maintenanceNanos: 0}), + Config: createOrUpdateMemorystoreInstance(&InstanceParams{name: name, replicaCount: 2, shardCount: 3, preventDestroy: false, zoneDistributionMode: "MULTI_ZONE", deletionProtectionEnabled: false, maintenanceDay: "MONDAY", maintenanceHours: 1, maintenanceMinutes: 0, maintenanceSeconds: 0, maintenanceNanos: 0}), }, }, }) @@ -102,7 +93,7 @@ resource "google_memorystore_instance" "test_abc" { replica_count = 0 node_type = "SHARED_CORE_NANO" deletion_protection_enabled = false - desired_auto_created_endpoints { + desired_psc_auto_connections { network = google_compute_network.primary_producer_net.id project_id = data.google_project.project.project_id } @@ -148,75 +139,6 @@ data "google_project" "project" { func testAccMemorystoreInstance_automatedBackupConfigWithout(context map[string]interface{}) string { return acctest.Nprintf(` // Primary instance -resource "google_memorystore_instance" "test_abc" { - instance_id = "tf-test-instance-abc-%{random_suffix}" - shard_count = 1 - location = "us-central1" - replica_count = 0 - node_type = "SHARED_CORE_NANO" - deletion_protection_enabled = false - desired_auto_created_endpoints { - network = google_compute_network.primary_producer_net.id - project_id = data.google_project.project.project_id - } - depends_on = [ google_network_connectivity_service_connection_policy.primary_policy ] -} - -resource "google_network_connectivity_service_connection_policy" "primary_policy" { - name = "tf-test-abc-policy-%{random_suffix}" - location = "us-central1" - service_class = "gcp-memorystore" - description = "my basic service connection policy" - network = google_compute_network.primary_producer_net.id - psc_config { - subnetworks = [google_compute_subnetwork.primary_producer_subnet.id] - } -} - -resource "google_compute_subnetwork" "primary_producer_subnet" { - name = "tf-test-abc-%{random_suffix}" - ip_cidr_range = "10.0.4.0/29" - region = "us-central1" - network = google_compute_network.primary_producer_net.id -} - -resource "google_compute_network" "primary_producer_net" { - name = "tf-test-abc-net-%{random_suffix}" - auto_create_subnetworks = false -} - -data "google_project" "project" { -} -`, context) -} - -func TestAccMemorystoreInstance_deprecatedDesiredPscAutoConnections(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckMemorystoreInstanceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccMemorystoreInstance_deprecatedDesiredPscAutoConnections(context), - }, - { - ResourceName: "google_memorystore_instance.test_abc", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func testAccMemorystoreInstance_deprecatedDesiredPscAutoConnections(context map[string]interface{}) string { - return acctest.Nprintf(` -// Primary instance resource "google_memorystore_instance" "test_abc" { instance_id = "tf-test-instance-abc-%{random_suffix}" shard_count = 1 @@ -1111,7 +1033,7 @@ resource "google_memorystore_instance" "test_secondary" { shard_count = %d node_type = "%s" location = "us-west2" - desired_auto_created_endpoints { + desired_psc_auto_connections { network = google_compute_network.producer_net.id project_id = data.google_project.project.project_id } @@ -1406,7 +1328,7 @@ resource "google_memorystore_instance" "test" { shard_count = %d node_type = "%s" location = "us-west2" - desired_auto_created_endpoints { + desired_psc_auto_connections { network = google_compute_network.producer_net.id project_id = data.google_project.project.project_id } @@ -1453,320 +1375,3 @@ data "google_project" "project" { } `, params.name, params.replicaCount, params.shardCount, params.nodeType, params.deletionProtectionEnabled, params.engineVersion, strBuilder.String(), zoneDistributionConfigBlock, maintenancePolicyBlock, persistenceBlock, lifecycleBlock, secondaryInstanceBlock, params.name, params.name, params.name) } - -func TestAccMemorystoreInstance_memorystoreInstanceTlsEnabled(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - // Until https://github.com/hashicorp/terraform-provider-google/issues/23619 is fixed, use regions other than us-central1 to prevent issues like https://github.com/hashicorp/terraform-provider-google/issues/23543 - "location": "us-east1", - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckMemorystoreInstanceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccMemorystoreInstance_memorystoreInstanceTlsEnabled(context), - Check: resource.TestCheckResourceAttrSet("google_memorystore_instance.instance-tls", "managed_server_ca.0.ca_certs.0.certificates.0"), - }, - { - ResourceName: "google_memorystore_instance.instance-tls", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"gcs_source", "instance_id", "labels", "location", "managed_backup_source", "terraform_labels"}, - }, - }, - }) -} - -func testAccMemorystoreInstance_memorystoreInstanceTlsEnabled(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_memorystore_instance" "instance-tls" { - instance_id = "tf-test-tls-instance%{random_suffix}" - shard_count = 1 - desired_auto_created_endpoints { - network = google_compute_network.producer_net.id - project_id = data.google_project.project.project_id - } - location = "%{location}" - deletion_protection_enabled = false - maintenance_policy { - weekly_maintenance_window { - day = "MONDAY" - start_time { - hours = 1 - minutes = 0 - seconds = 0 - nanos = 0 - } - } - } - depends_on = [ - google_network_connectivity_service_connection_policy.default - ] - transit_encryption_mode = "SERVER_AUTHENTICATION" -} - -resource "google_network_connectivity_service_connection_policy" "default" { - name = "tf-test-my-policy%{random_suffix}" - location = "%{location}" - service_class = "gcp-memorystore" - description = "my basic service connection policy" - network = google_compute_network.producer_net.id - psc_config { - subnetworks = [google_compute_subnetwork.producer_subnet.id] - } -} - -resource "google_compute_subnetwork" "producer_subnet" { - name = "tf-test-my-subnet%{random_suffix}" - ip_cidr_range = "10.0.0.248/29" - region = "%{location}" - network = google_compute_network.producer_net.id -} - -resource "google_compute_network" "producer_net" { - name = "tf-test-my-network%{random_suffix}" - auto_create_subnetworks = false -} - -data "google_project" "project" { -} -`, context) -} - -func TestAccMemorystoreInstance_memorystorePscAutoInstanceClusterDisabled(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - "location": "us-central1", - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckMemorystoreInstanceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccMemorystoreInstance_memorystorePscAutoInstanceClusterDisabled_bothConnections(context), - }, - { - ResourceName: "google_memorystore_instance.instance-cluster-disabled", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"desired_auto_created_endpoints.#", "desired_auto_created_endpoints.0.%", "desired_auto_created_endpoints.0.project_id", "desired_auto_created_endpoints.0.network", "desired_psc_auto_connections.#", "desired_psc_auto_connections.0.%", "desired_psc_auto_connections.0.network", "desired_psc_auto_connections.0.project_id"}, - }, - { - Config: testAccMemorystoreInstance_memorystorePscAutoInstanceClusterDisabledPscAutoConnections(context), - }, - { - ResourceName: "google_memorystore_instance.instance-cluster-disabled", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"desired_auto_created_endpoints.#", "desired_auto_created_endpoints.0.%", "desired_auto_created_endpoints.0.project_id", "desired_auto_created_endpoints.0.network", "desired_psc_auto_connections.#", "desired_psc_auto_connections.0.%", "desired_psc_auto_connections.0.network", "desired_psc_auto_connections.0.project_id"}, - }, - { - Config: testAccMemorystoreInstance_memorystorePscAutoInstanceClusterDisabled_onlyAutoCreatedEndpoints(context), - }, - { - ResourceName: "google_memorystore_instance.instance-cluster-disabled", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"desired_auto_created_endpoints.#", "desired_auto_created_endpoints.0.%", "desired_auto_created_endpoints.0.project_id", "desired_auto_created_endpoints.0.network", "desired_psc_auto_connections.#", "desired_psc_auto_connections.0.%", "desired_psc_auto_connections.0.network", "desired_psc_auto_connections.0.project_id"}, - }, - { - Config: testAccMemorystoreInstance_memorystorePscAutoInstanceClusterDisabled_neitherConnection(context), - }, - { - ResourceName: "google_memorystore_instance.instance-cluster-disabled", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"desired_auto_created_endpoints.#", "desired_auto_created_endpoints.0.%", "desired_auto_created_endpoints.0.project_id", "desired_auto_created_endpoints.0.network", "desired_psc_auto_connections.#", "desired_psc_auto_connections.0.%", "desired_psc_auto_connections.0.network", "desired_psc_auto_connections.0.project_id"}, - }, - }, - }) -} - -func testAccMemorystoreInstance_memorystorePscAutoInstanceClusterDisabledPscAutoConnections(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_memorystore_instance" "instance-cluster-disabled" { - instance_id = "tf-test-instance-psc%{random_suffix}" - shard_count = 1 - desired_psc_auto_connections { - network = google_compute_network.producer_net.id - project_id = data.google_project.project.project_id - } - location = "%{location}" - deletion_protection_enabled = false - mode = "CLUSTER_DISABLED" - depends_on = [ - google_network_connectivity_service_connection_policy.default - ] -} - -resource "google_network_connectivity_service_connection_policy" "default" { - name = "tf-test-my-policy%{random_suffix}" - location = "%{location}" - service_class = "gcp-memorystore" - description = "my basic service connection policy" - network = google_compute_network.producer_net.id - psc_config { - subnetworks = [google_compute_subnetwork.producer_subnet.id] - } -} - -resource "google_compute_subnetwork" "producer_subnet" { - name = "tf-test-my-subnet%{random_suffix}" - ip_cidr_range = "10.0.0.248/29" - region = "%{location}" - network = google_compute_network.producer_net.id -} - -resource "google_compute_network" "producer_net" { - name = "tf-test-my-network%{random_suffix}" - auto_create_subnetworks = false -} - -data "google_project" "project" { -} -`, context) -} - -func testAccMemorystoreInstance_memorystorePscAutoInstanceClusterDisabled_bothConnections(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_memorystore_instance" "instance-cluster-disabled" { - instance_id = "tf-test-instance-psc%{random_suffix}" - shard_count = 1 - desired_psc_auto_connections { - network = google_compute_network.producer_net.id - project_id = data.google_project.project.project_id - } - desired_auto_created_endpoints { - network = google_compute_network.producer_net.id - project_id = data.google_project.project.project_id - } - location = "%{location}" - deletion_protection_enabled = false - mode = "CLUSTER_DISABLED" - depends_on = [ - google_network_connectivity_service_connection_policy.default - ] -} - -resource "google_network_connectivity_service_connection_policy" "default" { - name = "tf-test-my-policy%{random_suffix}" - location = "%{location}" - service_class = "gcp-memorystore" - description = "my basic service connection policy" - network = google_compute_network.producer_net.id - psc_config { - subnetworks = [google_compute_subnetwork.producer_subnet.id] - } -} - -resource "google_compute_subnetwork" "producer_subnet" { - name = "tf-test-my-subnet%{random_suffix}" - ip_cidr_range = "10.0.0.248/29" - region = "%{location}" - network = google_compute_network.producer_net.id -} - -resource "google_compute_network" "producer_net" { - name = "tf-test-my-network%{random_suffix}" - auto_create_subnetworks = false -} - -data "google_project" "project" { -} -`, context) -} - -func testAccMemorystoreInstance_memorystorePscAutoInstanceClusterDisabled_onlyAutoCreatedEndpoints(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_memorystore_instance" "instance-cluster-disabled" { - instance_id = "tf-test-instance-psc%{random_suffix}" - shard_count = 1 - desired_auto_created_endpoints { - network = google_compute_network.producer_net.id - project_id = data.google_project.project.project_id - } - location = "%{location}" - deletion_protection_enabled = false - mode = "CLUSTER_DISABLED" - depends_on = [ - google_network_connectivity_service_connection_policy.default - ] -} - -resource "google_network_connectivity_service_connection_policy" "default" { - name = "tf-test-my-policy%{random_suffix}" - location = "%{location}" - service_class = "gcp-memorystore" - description = "my basic service connection policy" - network = google_compute_network.producer_net.id - psc_config { - subnetworks = [google_compute_subnetwork.producer_subnet.id] - } -} - -resource "google_compute_subnetwork" "producer_subnet" { - name = "tf-test-my-subnet%{random_suffix}" - ip_cidr_range = "10.0.0.248/29" - region = "%{location}" - network = google_compute_network.producer_net.id -} - -resource "google_compute_network" "producer_net" { - name = "tf-test-my-network%{random_suffix}" - auto_create_subnetworks = false -} - -data "google_project" "project" { -} -`, context) -} - -func testAccMemorystoreInstance_memorystorePscAutoInstanceClusterDisabled_neitherConnection(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_memorystore_instance" "instance-cluster-disabled" { - instance_id = "tf-test-instance-psc%{random_suffix}" - shard_count = 1 - location = "%{location}" - deletion_protection_enabled = false - mode = "CLUSTER_DISABLED" - depends_on = [ - google_network_connectivity_service_connection_policy.default - ] -} - -resource "google_network_connectivity_service_connection_policy" "default" { - name = "tf-test-my-policy%{random_suffix}" - location = "%{location}" - service_class = "gcp-memorystore" - description = "my basic service connection policy" - network = google_compute_network.producer_net.id - psc_config { - subnetworks = [google_compute_subnetwork.producer_subnet.id] - } -} - -resource "google_compute_subnetwork" "producer_subnet" { - name = "tf-test-my-subnet%{random_suffix}" - ip_cidr_range = "10.0.0.248/29" - region = "%{location}" - network = google_compute_network.producer_net.id -} - -resource "google_compute_network" "producer_net" { - name = "tf-test-my-network%{random_suffix}" - auto_create_subnetworks = false -} - -data "google_project" "project" { -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/modelarmor/resource_model_armor_template_test.go b/mmv1/third_party/terraform/services/modelarmor/resource_model_armor_template_test.go deleted file mode 100644 index e9a41f543703..000000000000 --- a/mmv1/third_party/terraform/services/modelarmor/resource_model_armor_template_test.go +++ /dev/null @@ -1,185 +0,0 @@ -package modelarmor_test - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" - - "github.com/hashicorp/terraform-provider-google/google/acctest" -) - -func TestAccModelArmorTemplate_basic(t *testing.T) { - t.Parallel() - - templateId := "modelarmor-test-basic-" + acctest.RandString(t, 10) - - basicContext := map[string]interface{}{ - "location": "us-central1", - "templateId": templateId, - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckModelArmorTemplateDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccModelArmorTemplate_basic_config(basicContext), - }, - { - ResourceName: "google_model_armor_template.template-basic", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func testAccModelArmorTemplate_basic_config(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_model_armor_template" "template-basic" { - location = "%{location}" - template_id = "%{templateId}" - filter_config { - - } - template_metadata { - - } -}`, context) -} - -func TestAccModelArmorTemplate_update(t *testing.T) { - t.Parallel() - - templateId := fmt.Sprintf("modelarmor-test-update-%s", acctest.RandString(t, 5)) - - context := map[string]interface{}{ - "location": "us-central1", - "templateId": templateId, - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckModelArmorTemplateDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccModelArmorTemplate_initial(context), - }, - { - ResourceName: "google_model_armor_template.test-resource", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"labels", "location", "template_id", "terraform_labels"}, - }, - { - Config: testAccModelArmorTemplate_update(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_model_armor_template.test-resource", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_model_armor_template.test-resource", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"labels", "location", "template_id", "terraform_labels"}, - }, - }, - }) -} - -func testAccModelArmorTemplate_initial(context map[string]interface{}) string { - return acctest.Nprintf(` - resource "google_model_armor_template" "test-resource" { - location = "%{location}" - template_id = "%{templateId}" - labels = { - "test-label" = "env-testing-initial" - } - filter_config { - rai_settings { - rai_filters { - filter_type = "HATE_SPEECH" - confidence_level = "MEDIUM_AND_ABOVE" - } - } - sdp_settings { - advanced_config { - inspect_template = "projects/llm-firewall-demo/locations/us-central1/inspectTemplates/t2" - deidentify_template = "projects/llm-firewall-demo/locations/us-central1/deidentifyTemplates/t3" - } - } - pi_and_jailbreak_filter_settings { - filter_enforcement = "ENABLED" - confidence_level = "HIGH" - } - malicious_uri_filter_settings { - filter_enforcement = "ENABLED" - } - } - template_metadata { - custom_llm_response_safety_error_message = "This is a custom error message for LLM response" - log_template_operations = true - log_sanitize_operations = true - multi_language_detection { - enable_multi_language_detection = true - } - ignore_partial_invocation_failures = true - custom_prompt_safety_error_code = 400 - custom_prompt_safety_error_message = "This is a custom error message for prompt" - custom_llm_response_safety_error_code = 401 - enforcement_type = "INSPECT_ONLY" - } - } - `, context) -} - -func testAccModelArmorTemplate_update(context map[string]interface{}) string { - return acctest.Nprintf(` - resource "google_model_armor_template" "test-resource" { - location = "us-central1" - template_id = "%{templateId}" - labels = { - "test-label" = "env-testing-updated" - } - filter_config { - rai_settings { - rai_filters { - filter_type = "DANGEROUS" - confidence_level = "LOW_AND_ABOVE" - } - } - sdp_settings { - basic_config{ - filter_enforcement = "ENABLED" - } - } - pi_and_jailbreak_filter_settings { - filter_enforcement = "DISABLED" - confidence_level = "MEDIUM_AND_ABOVE" - } - malicious_uri_filter_settings { - filter_enforcement = "DISABLED" - } - } - template_metadata { - custom_llm_response_safety_error_message = "Updated LLM error message" - log_template_operations = false - log_sanitize_operations = false - multi_language_detection { - enable_multi_language_detection = false - } - ignore_partial_invocation_failures = false - custom_prompt_safety_error_code = 404 - custom_prompt_safety_error_message = "Updated prompt error message" - custom_llm_response_safety_error_code = 500 - enforcement_type = "INSPECT_AND_BLOCK" - } - } - `, context) -} diff --git a/mmv1/third_party/terraform/services/modelarmorglobal/resource_model_armor_floorsetting_test.go b/mmv1/third_party/terraform/services/modelarmorglobal/resource_model_armor_floorsetting_test.go deleted file mode 100644 index e30c658aa49e..000000000000 --- a/mmv1/third_party/terraform/services/modelarmorglobal/resource_model_armor_floorsetting_test.go +++ /dev/null @@ -1,131 +0,0 @@ -package modelarmorglobal_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - - "github.com/hashicorp/terraform-provider-google/google/acctest" - - "github.com/hashicorp/terraform-provider-google/google/envvar" -) - -func TestAccModelArmorGlobalFloorsetting_update(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "project_id": envvar.GetTestProjectFromEnv(), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccModelArmorGlobalFloorsetting_initial(context), - }, - { - ResourceName: "google_model_armor_floorsetting.test-resource", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"location", "parent"}, - }, - { - Config: testAccModelArmorGlobalFloorsetting_updated(context), - }, - { - ResourceName: "google_model_armor_floorsetting.test-resource", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"location", "parent"}, - }, - }, - }) -} - -func testAccModelArmorGlobalFloorsetting_initial(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_model_armor_floorsetting" "test-resource" { - location = "global" - parent = "projects/%{project_id}" - - filter_config { - rai_settings { - rai_filters { - filter_type = "DANGEROUS" - confidence_level = "LOW_AND_ABOVE" - } - } - sdp_settings { - basic_config { - filter_enforcement = "ENABLED" - } - } - pi_and_jailbreak_filter_settings { - filter_enforcement = "ENABLED" - confidence_level = "MEDIUM_AND_ABOVE" - } - malicious_uri_filter_settings { - filter_enforcement = "ENABLED" - } - } - - enable_floor_setting_enforcement = true - - integrated_services = [ "AI_PLATFORM" ] - - ai_platform_floor_setting { - inspect_only = true - enable_cloud_logging = true - } - - floor_setting_metadata { - multi_language_detection { - enable_multi_language_detection = true - } - } -} -`, context) -} - -func testAccModelArmorGlobalFloorsetting_updated(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_model_armor_floorsetting" "test-resource" { - location = "global" - parent = "projects/%{project_id}" - - filter_config { - rai_settings { - rai_filters { - filter_type = "SEXUALLY_EXPLICIT" - confidence_level = "HIGH" - } - } - sdp_settings { - advanced_config { - inspect_template = "projects/modelarmor-api-test/locations/global/inspectTemplates/modelarmor-tf-test" - deidentify_template = "projects/modelarmor-api-test/locations/us-central1/deidentifyTemplates/modelarmor-tf-test" - } - } - pi_and_jailbreak_filter_settings { - filter_enforcement = "ENABLED" - confidence_level = "MEDIUM_AND_ABOVE" - } - malicious_uri_filter_settings { - filter_enforcement = "ENABLED" - } - } - - ai_platform_floor_setting { - inspect_and_block = false - enable_cloud_logging = false - } - - floor_setting_metadata { - multi_language_detection { - enable_multi_language_detection = false - } - } -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/monitoring/resource_monitoring_dashboard_meta.yaml b/mmv1/third_party/terraform/services/monitoring/resource_monitoring_dashboard_meta.yaml index ca764c6567b1..8008b75c43f5 100644 --- a/mmv1/third_party/terraform/services/monitoring/resource_monitoring_dashboard_meta.yaml +++ b/mmv1/third_party/terraform/services/monitoring/resource_monitoring_dashboard_meta.yaml @@ -5,5 +5,4 @@ api_version: 'v1' api_resource_type_kind: 'Dashboard' fields: - field: 'dashboard_json' - json: true - field: 'project' diff --git a/mmv1/third_party/terraform/services/monitoring/resource_monitoring_metric_descriptor_test.go b/mmv1/third_party/terraform/services/monitoring/resource_monitoring_metric_descriptor_test.go index 55f0f94b5c0b..530ceab09f12 100644 --- a/mmv1/third_party/terraform/services/monitoring/resource_monitoring_metric_descriptor_test.go +++ b/mmv1/third_party/terraform/services/monitoring/resource_monitoring_metric_descriptor_test.go @@ -17,7 +17,7 @@ func TestAccMonitoringMetricDescriptor_update(t *testing.T) { CheckDestroy: testAccCheckMonitoringMetricDescriptorDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccMonitoringMetricDescriptor_update("initial description", "initial display name", "30s", "30s"), + Config: testAccMonitoringMetricDescriptor_update("30s", "30s"), }, { ResourceName: "google_monitoring_metric_descriptor.basic", @@ -26,7 +26,7 @@ func TestAccMonitoringMetricDescriptor_update(t *testing.T) { ImportStateVerifyIgnore: []string{"metadata", "launch_stage"}, }, { - Config: testAccMonitoringMetricDescriptor_update("updated description", "updated display name", "60s", "60s"), + Config: testAccMonitoringMetricDescriptor_update("60s", "60s"), }, { ResourceName: "google_monitoring_metric_descriptor.basic", @@ -34,24 +34,15 @@ func TestAccMonitoringMetricDescriptor_update(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"metadata", "launch_stage"}, }, - { - Config: testAccMonitoringMetricDescriptor_omittedFields(), - }, - { - ResourceName: "google_monitoring_metric_descriptor.basic", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"metadata", "launch_stage", "description", "display_name"}, - }, }, }) } -func testAccMonitoringMetricDescriptor_update(description, displayName, samplePeriod, ingestDelay string) string { +func testAccMonitoringMetricDescriptor_update(samplePeriod, ingestDelay string) string { return fmt.Sprintf(` resource "google_monitoring_metric_descriptor" "basic" { - description = "%s" - display_name = "%s" + description = "Daily sales records from all branch stores." + display_name = "daily sales" type = "custom.googleapis.com/stores/daily_sales" metric_kind = "GAUGE" value_type = "DOUBLE" @@ -67,27 +58,6 @@ resource "google_monitoring_metric_descriptor" "basic" { ingest_delay = "%s" } } -`, description, displayName, samplePeriod, ingestDelay, +`, samplePeriod, ingestDelay, ) } - -func testAccMonitoringMetricDescriptor_omittedFields() string { - return ` -resource "google_monitoring_metric_descriptor" "basic" { - type = "custom.googleapis.com/stores/daily_sales" - metric_kind = "GAUGE" - value_type = "DOUBLE" - unit = "{USD}" - labels { - key = "key" - value_type = "STRING" - description = "description" - } - launch_stage = "BETA" - metadata { - sample_period = "30s" - ingest_delay = "30s" - } -} -` -} diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_backup_test.go b/mmv1/third_party/terraform/services/netapp/resource_netapp_backup_test.go index 3f4c16cd3fb8..f6ea2a1e86d7 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_backup_test.go +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_backup_test.go @@ -12,7 +12,7 @@ import ( func TestAccNetappBackup_NetappBackupFull_update(t *testing.T) { context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } @@ -20,9 +20,6 @@ func TestAccNetappBackup_NetappBackupFull_update(t *testing.T) { PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccCheckNetappBackupDestroyProducer(t), - ExternalProviders: map[string]resource.ExternalProvider{ - "time": {}, - }, Steps: []resource.TestStep{ { Config: testAccNetappBackup_NetappBackupFromVolumeSnapshot(context), @@ -60,11 +57,6 @@ resource "google_netapp_storage_pool" "default" { network = data.google_compute_network.default.id } -resource "time_sleep" "wait_3_minutes" { - depends_on = [google_netapp_storage_pool.default] - create_duration = "3m" -} - resource "google_netapp_volume" "default" { name = "tf-test-backup-volume%{random_suffix}" location = google_netapp_storage_pool.default.location @@ -124,11 +116,6 @@ resource "google_netapp_storage_pool" "default" { network = data.google_compute_network.default.id } -resource "time_sleep" "wait_3_minutes" { - depends_on = [google_netapp_storage_pool.default] - create_duration = "3m" -} - resource "google_netapp_volume" "default" { name = "tf-test-backup-volume%{random_suffix}" location = google_netapp_storage_pool.default.location @@ -176,7 +163,7 @@ resource "google_netapp_backup" "test_backup" { func TestAccNetappBackup_NetappFlexBackup(t *testing.T) { context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } @@ -184,9 +171,6 @@ func TestAccNetappBackup_NetappFlexBackup(t *testing.T) { PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccCheckNetappBackupDestroyProducer(t), - ExternalProviders: map[string]resource.ExternalProvider{ - "time": {}, - }, Steps: []resource.TestStep{ { Config: testAccNetappBackup_FlexBackup(context), @@ -217,11 +201,6 @@ resource "google_netapp_storage_pool" "default" { replica_zone = "us-east4-b" } -resource "time_sleep" "wait_3_minutes" { - depends_on = [google_netapp_storage_pool.default] - create_duration = "3m" -} - resource "google_netapp_volume" "default" { name = "tf-test-backup-volume%{random_suffix}" location = google_netapp_storage_pool.default.location @@ -254,268 +233,11 @@ resource "google_netapp_volume_snapshot" "default" { resource "google_netapp_backup" "test_backup" { name = "tf-test-test-backup%{random_suffix}" - description = "This is a flex test backup" - source_volume = google_netapp_volume.default.id - location = google_netapp_backup_vault.default.location - vault_name = google_netapp_backup_vault.default.name - source_snapshot = google_netapp_volume_snapshot.default.id - labels = { - key= "test" - value= "backup" - } -} -`, context) -} - -func TestAccNetappBackup_NetappIntegratedBackup(t *testing.T) { - context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckNetappBackupDestroyProducer(t), - ExternalProviders: map[string]resource.ExternalProvider{ - "time": {}, - }, - Steps: []resource.TestStep{ - { - Config: testAccNetappBackup_IntegratedBackup(context), - }, - { - ResourceName: "google_netapp_backup.test_backup", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"labels", "location", "name", "terraform_labels", "vault_name"}, - }, - }, - }) -} - -func testAccNetappBackup_IntegratedBackup(context map[string]interface{}) string { - return acctest.Nprintf(` -data "google_compute_network" "default" { - name = "%{network_name}" -} -resource "google_netapp_storage_pool" "default" { - name = "tf-test-backup-pool%{random_suffix}" - location = "us-east4" - service_level = "PREMIUM" - capacity_gib = "2048" - network = data.google_compute_network.default.id -} -resource "time_sleep" "wait_3_minutes" { - depends_on = [google_netapp_storage_pool.default] - create_duration = "3m" -} -resource "google_netapp_volume" "default" { - name = "tf-test-backup-volume%{random_suffix}" - location = google_netapp_storage_pool.default.location - capacity_gib = "100" - share_name = "tf-test-backup-volume%{random_suffix}" - storage_pool = google_netapp_storage_pool.default.name - protocols = ["NFSV3"] - deletion_policy = "FORCE" - backup_config { - backup_vault = google_netapp_backup_vault.default.id - } -} -resource "google_netapp_backup_vault" "default" { - name = "tf-test-backup-vault%{random_suffix}" - location = google_netapp_storage_pool.default.location - backup_vault_type = "CROSS_REGION" - backup_region = "us-west4" -} -resource "google_netapp_volume_snapshot" "default" { - depends_on = [google_netapp_volume.default] - location = google_netapp_volume.default.location - volume_name = google_netapp_volume.default.name - description = "This is a test description" - name = "testvolumesnap%{random_suffix}" - labels = { - key= "test" - value= "snapshot" - } - } -resource "google_netapp_backup" "test_backup" { - name = "tf-test-test-backup%{random_suffix}" - description = "This is a test integrated backup" + description = "This is a test backup" source_volume = google_netapp_volume.default.id location = google_netapp_backup_vault.default.location vault_name = google_netapp_backup_vault.default.name source_snapshot = google_netapp_volume_snapshot.default.id - labels = { - key= "test" - value= "backup" - } -} -`, context) -} - -func TestAccNetappBackup_NetappImmutableBackup(t *testing.T) { - context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckNetappBackupDestroyProducer(t), - ExternalProviders: map[string]resource.ExternalProvider{ - "time": {}, - }, - Steps: []resource.TestStep{ - { - Config: testAccNetappBackup_ImmutableBackup(context), - }, - { - ResourceName: "google_netapp_backup.test_backup", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"labels", "location", "name", "terraform_labels", "vault_name"}, - }, - { - Config: testAccNetappBackup_ImmutableBackupUpdate(context), - }, - { - ResourceName: "google_netapp_backup.test_backup", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"labels", "location", "name", "terraform_labels", "vault_name"}, - }, - }, - }) -} - -func testAccNetappBackup_ImmutableBackup(context map[string]interface{}) string { - return acctest.Nprintf(` -data "google_compute_network" "default" { - name = "%{network_name}" -} -resource "google_netapp_storage_pool" "default" { - name = "tf-test-backup-pool%{random_suffix}" - location = "us-central1" - service_level = "FLEX" - capacity_gib = "2048" - network = data.google_compute_network.default.id - zone = "us-central1-a" - replica_zone = "us-central1-b" -} -resource "time_sleep" "wait_3_minutes" { - depends_on = [google_netapp_storage_pool.default] - create_duration = "3m" -} -resource "google_netapp_volume" "default" { - name = "tf-test-backup-volume%{random_suffix}" - location = "us-central1" - capacity_gib = "100" - share_name = "tf-test-backup-volume%{random_suffix}" - storage_pool = google_netapp_storage_pool.default.name - protocols = ["NFSV3"] - deletion_policy = "FORCE" - backup_config { - backup_vault = google_netapp_backup_vault.default.id - } -} -resource "google_netapp_backup_vault" "default" { - name = "tf-test-backup-vault%{random_suffix}" - location = "us-central1" - backup_retention_policy { - backup_minimum_enforced_retention_days = 2 - daily_backup_immutable = true - weekly_backup_immutable = false - monthly_backup_immutable = false - manual_backup_immutable = false - } -} -resource "google_netapp_volume_snapshot" "default" { - depends_on = [google_netapp_volume.default] - location = "us-central1" - volume_name = google_netapp_volume.default.name - description = "This is a test description" - name = "testvolumesnap%{random_suffix}" - labels = { - key= "test" - value= "snapshot" - } -} -resource "google_netapp_backup" "test_backup" { - name = "tf-test-test-backup%{random_suffix}" - description = "This is a test immutable backup" - source_volume = google_netapp_volume.default.id - location = "us-central1" - vault_name = google_netapp_backup_vault.default.name - source_snapshot = google_netapp_volume_snapshot.default.id - labels = { - key= "test" - value= "backup" - } -} -`, context) -} - -func testAccNetappBackup_ImmutableBackupUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` -data "google_compute_network" "default" { - name = "%{network_name}" -} -resource "google_netapp_storage_pool" "default" { - name = "tf-test-backup-pool%{random_suffix}" - location = "us-central1" - service_level = "FLEX" - capacity_gib = "2048" - network = data.google_compute_network.default.id - zone = "us-central1-a" - replica_zone = "us-central1-b" -} -resource "time_sleep" "wait_3_minutes" { - depends_on = [google_netapp_storage_pool.default] - create_duration = "3m" -} -resource "google_netapp_volume" "default" { - name = "tf-test-backup-volume%{random_suffix}" - location = "us-central1" - capacity_gib = "100" - share_name = "tf-test-backup-volume%{random_suffix}" - storage_pool = google_netapp_storage_pool.default.name - protocols = ["NFSV3"] - deletion_policy = "FORCE" - backup_config { - backup_vault = google_netapp_backup_vault.default.id - } -} -resource "google_netapp_backup_vault" "default" { - name = "tf-test-backup-vault%{random_suffix}" - location = "us-central1" - backup_retention_policy { - backup_minimum_enforced_retention_days = 12 - daily_backup_immutable = true - weekly_backup_immutable = true - monthly_backup_immutable = true - manual_backup_immutable = true - } -} -resource "google_netapp_volume_snapshot" "default" { - depends_on = [google_netapp_volume.default] - location = "us-central1" - volume_name = google_netapp_volume.default.name - description = "This is a test description" - name = "testvolumesnap%{random_suffix}" - labels = { - key= "test" - value= "snapshot" - } -} -resource "google_netapp_backup" "test_backup" { - name = "tf-test-test-backup%{random_suffix}" - description = "This is a test immutable backup" - source_volume = google_netapp_volume.default.id - location = "us-central1" - vault_name = google_netapp_backup_vault.default.name - source_snapshot = google_netapp_volume_snapshot.default.id labels = { key= "test" value= "backup" diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl b/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl index e5b5063c461f..9adcf1f1dc5e 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl @@ -3,9 +3,8 @@ package netapp_test import ( "testing" "time" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/terraform" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" ) @@ -13,7 +12,6 @@ func TestAccNetappStoragePool_storagePoolCreateExample_update(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } @@ -46,8 +44,24 @@ func TestAccNetappStoragePool_storagePoolCreateExample_update(t *testing.T) { func testAccNetappStoragePool_storagePoolCreateExample_full(context map[string]interface{}) string { return acctest.Nprintf(` -data "google_compute_network" "default" { - name = "%{network_name}" +resource "google_compute_network" "peering_network" { + name = "tf-test-network%{random_suffix}" +} + +# Create an IP address +resource "google_compute_global_address" "private_ip_alloc" { + name = "tf-test-address%{random_suffix}" + purpose = "VPC_PEERING" + address_type = "INTERNAL" + prefix_length = 16 + network = google_compute_network.peering_network.id +} + +# Create a private connection +resource "google_service_networking_connection" "default" { + network = google_compute_network.peering_network.id + service = "netapp.servicenetworking.goog" + reserved_peering_ranges = [google_compute_global_address.private_ip_alloc.name] } resource "google_netapp_storage_pool" "test_pool" { @@ -55,7 +69,7 @@ resource "google_netapp_storage_pool" "test_pool" { location = "us-central1" service_level = "PREMIUM" capacity_gib = "2048" - network = data.google_compute_network.default.id + network = google_compute_network.peering_network.id active_directory = "" description = "this is a test description" kms_config = "" @@ -72,8 +86,24 @@ resource "google_netapp_storage_pool" "test_pool" { func testAccNetappStoragePool_storagePoolCreateExample_update(context map[string]interface{}) string { return acctest.Nprintf(` -data "google_compute_network" "default" { - name = "%{network_name}" +resource "google_compute_network" "peering_network" { + name = "tf-test-network%{random_suffix}" +} + +# Create an IP address +resource "google_compute_global_address" "private_ip_alloc" { + name = "tf-test-address%{random_suffix}" + purpose = "VPC_PEERING" + address_type = "INTERNAL" + prefix_length = 16 + network = google_compute_network.peering_network.id +} + +# Create a private connection +resource "google_service_networking_connection" "default" { + network = google_compute_network.peering_network.id + service = "netapp.servicenetworking.goog" + reserved_peering_ranges = [google_compute_global_address.private_ip_alloc.name] } resource "google_netapp_storage_pool" "test_pool" { @@ -81,7 +111,7 @@ resource "google_netapp_storage_pool" "test_pool" { location = "us-central1" service_level = "PREMIUM" capacity_gib = "4096" - network = data.google_compute_network.default.id + network = google_compute_network.peering_network.id active_directory = "" description = "this is test" kms_config = "" @@ -95,9 +125,10 @@ resource "google_netapp_storage_pool" "test_pool" { `, context) } + func TestAccNetappStoragePool_autoTieredStoragePoolCreateExample_update(t *testing.T) { context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } @@ -118,48 +149,30 @@ func TestAccNetappStoragePool_autoTieredStoragePoolCreateExample_update(t *testi ImportStateVerify: true, ImportStateVerifyIgnore: []string{"location", "name", "labels", "terraform_labels"}, }, - { - Config: testAccNetappStoragePool_autoTieredStoragePoolCreateExample_update(context), - }, - { - ResourceName: "google_netapp_storage_pool.test_pool", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"location", "name", "labels", "terraform_labels"}, - }, }, }) } func testAccNetappStoragePool_autoTieredStoragePoolCreateExample_full(context map[string]interface{}) string { return acctest.Nprintf(` -data "google_compute_network" "default" { - name = "%{network_name}" +resource "google_compute_network" "peering_network" { + name = "tf-test-network%{random_suffix}" } -resource "google_netapp_storage_pool" "test_pool" { - name = "tf-test-pool%{random_suffix}" - location = "us-east4" - service_level = "PREMIUM" - capacity_gib = "2048" - network = data.google_compute_network.default.id - active_directory = "" - description = "this is a test description" - kms_config = "" - labels = { - key= "test" - value= "pool" - } - ldap_enabled = false - allow_auto_tiering = false -} -`, context) +# Create an IP address +resource "google_compute_global_address" "private_ip_alloc" { + name = "tf-test-address%{random_suffix}" + purpose = "VPC_PEERING" + address_type = "INTERNAL" + prefix_length = 16 + network = google_compute_network.peering_network.id } -func testAccNetappStoragePool_autoTieredStoragePoolCreateExample_update(context map[string]interface{}) string { - return acctest.Nprintf(` -data "google_compute_network" "default" { - name = "%{network_name}" +# Create a private connection +resource "google_service_networking_connection" "default" { + network = google_compute_network.peering_network.id + service = "netapp.servicenetworking.goog" + reserved_peering_ranges = [google_compute_global_address.private_ip_alloc.name] } resource "google_netapp_storage_pool" "test_pool" { @@ -167,7 +180,7 @@ resource "google_netapp_storage_pool" "test_pool" { location = "us-east4" service_level = "PREMIUM" capacity_gib = "2048" - network = data.google_compute_network.default.id + network = google_compute_network.peering_network.id active_directory = "" description = "this is a test description" kms_config = "" @@ -181,152 +194,9 @@ resource "google_netapp_storage_pool" "test_pool" { `, context) } -{{ if ne $.TargetVersionName `ga` -}} -func TestAccNetappStoragePool_flexAutoTierStoragePoolCreateExample_update(t *testing.T) { - context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-2", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), - CheckDestroy: testAccCheckNetappStoragePoolDestroyProducer(t), - ExternalProviders: map[string]resource.ExternalProvider{ - "time": {}, - }, - Steps: []resource.TestStep{ - { - Config: testAccNetappStoragePool_flexAutoTierStoragePoolCreateExample_full(context), - }, - { - ResourceName: "google_netapp_storage_pool.test_pool", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"enable_hot_tier_auto_resize", "location", "name", "labels", "terraform_labels"}, - }, - { - Config: testAccNetappStoragePool_flexAutoTierStoragePoolCreateExample_update(context), - }, - { - ResourceName: "google_netapp_storage_pool.test_pool", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"enable_hot_tier_auto_resize", "location", "name", "labels", "terraform_labels"}, - }, - - { - Config: testAccNetappStoragePool_flexAutoTierStoragePoolCreateExample_update_2(context), - }, - { - ResourceName: "google_netapp_storage_pool.test_pool", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"enable_hot_tier_auto_resize", "location", "name", "labels", "terraform_labels"}, - }, - }, - }) -} - -func testAccNetappStoragePool_flexAutoTierStoragePoolCreateExample_full(context map[string]interface{}) string { - return acctest.Nprintf(` -data "google_compute_network" "default" { - provider = google-beta - name = "%{network_name}" -} - -resource "google_netapp_storage_pool" "test_pool" { - provider = google-beta - name = "tf-test-pool%{random_suffix}" - location = "us-south1-a" - service_level = "FLEX" - capacity_gib = "2048" - network = data.google_compute_network.default.id - active_directory = "" - description = "this is a test description" - kms_config = "" - labels = { - key= "test" - value= "pool" - } - ldap_enabled = false - allow_auto_tiering = true - custom_performance_enabled = true - total_throughput_mibps = "64" - total_iops = "1024" - hot_tier_size_gib = "1024" - enable_hot_tier_auto_resize = false -} -`, context) -} - -func testAccNetappStoragePool_flexAutoTierStoragePoolCreateExample_update(context map[string]interface{}) string { - return acctest.Nprintf(` -data "google_compute_network" "default" { - provider = google-beta - name = "%{network_name}" -} - -resource "google_netapp_storage_pool" "test_pool" { - provider = google-beta - name = "tf-test-pool%{random_suffix}" - location = "us-south1-a" - service_level = "FLEX" - capacity_gib = "2048" - network = data.google_compute_network.default.id - active_directory = "" - description = "this is a test description" - kms_config = "" - labels = { - key= "test" - value= "pool" - } - ldap_enabled = false - allow_auto_tiering = true - custom_performance_enabled = true - total_throughput_mibps = "64" - total_iops = "1024" - hot_tier_size_gib = "1500" - enable_hot_tier_auto_resize = true -} -`, context) -} - -func testAccNetappStoragePool_flexAutoTierStoragePoolCreateExample_update_2(context map[string]interface{}) string { - return acctest.Nprintf(` -data "google_compute_network" "default" { - provider = google-beta - name = "%{network_name}" -} - -resource "google_netapp_storage_pool" "test_pool" { - provider = google-beta - name = "tf-test-pool%{random_suffix}" - location = "us-south1-a" - service_level = "FLEX" - capacity_gib = "2048" - network = data.google_compute_network.default.id - active_directory = "" - description = "this is a test description" - kms_config = "" - labels = { - key= "test" - value= "pool" - } - ldap_enabled = false - allow_auto_tiering = true - custom_performance_enabled = true - total_throughput_mibps = "64" - total_iops = "1024" - hot_tier_size_gib = "1500" -} -`, context) -} -{{ end }} - func TestAccNetappStoragePool_FlexRegionalStoragePoolCreateExample_update(t *testing.T) { context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } @@ -449,7 +319,7 @@ data "google_compute_network" "default" { func TestAccNetappStoragePool_FlexRegionalStoragePoolNoZone(t *testing.T) { context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } @@ -495,15 +365,16 @@ data "google_compute_network" "default" { `, context) } +{{ if ne $.TargetVersionName `ga` -}} func TestAccNetappStoragePool_customPerformanceStoragePoolCreateExample_update(t *testing.T) { context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), CheckDestroy: testAccCheckNetappStoragePoolDestroyProducer(t), Steps: []resource.TestStep{ { @@ -531,6 +402,7 @@ func TestAccNetappStoragePool_customPerformanceStoragePoolCreateExample_update(t func testAccNetappStoragePool_customPerformanceStoragePoolCreateExample_full(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_netapp_storage_pool" "test_pool" { + provider = google-beta name = "tf-test-pool%{random_suffix}" location = "us-east4-a" service_level = "FLEX" @@ -543,6 +415,7 @@ resource "google_netapp_storage_pool" "test_pool" { } data "google_compute_network" "default" { + provider = google-beta name = "%{network_name}" } `, context) @@ -551,6 +424,7 @@ data "google_compute_network" "default" { func testAccNetappStoragePool_customPerformanceStoragePoolCreateExample_update(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_netapp_storage_pool" "test_pool" { + provider = google-beta name = "tf-test-pool%{random_suffix}" location = "us-east4-a" service_level = "FLEX" @@ -563,79 +437,9 @@ resource "google_netapp_storage_pool" "test_pool" { } data "google_compute_network" "default" { + provider = google-beta name = "%{network_name}" } `, context) } - -func TestAccNetappStoragePool_customPerformanceEnabledStoragePoolCreateExample_update(t *testing.T) { - context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckNetappStoragePoolDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccNetappStoragePool_customPerformanceEnabledStoragePoolCreateExample_full(context), - }, - { - ResourceName: "google_netapp_storage_pool.test_pool", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"location", "name", "labels", "terraform_labels"}, - }, - { - Config: testAccNetappStoragePool_customPerformanceEnabledStoragePoolCreateExample_update(context), - }, - { - ResourceName: "google_netapp_storage_pool.test_pool", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"location", "name", "labels", "terraform_labels"}, - }, - }, - }) -} - -func testAccNetappStoragePool_customPerformanceEnabledStoragePoolCreateExample_full(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_netapp_storage_pool" "test_pool" { - name = "tf-test-pool%{random_suffix}" - location = "us-east4-a" - service_level = "FLEX" - capacity_gib = "2048" - network = data.google_compute_network.default.id - description = "this is a test description" - custom_performance_enabled = true - total_throughput_mibps = "200" -} - -data "google_compute_network" "default" { - name = "%{network_name}" -} -`, context) -} - -func testAccNetappStoragePool_customPerformanceEnabledStoragePoolCreateExample_update(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_netapp_storage_pool" "test_pool" { - name = "tf-test-pool%{random_suffix}" - location = "us-east4-a" - service_level = "FLEX" - capacity_gib = "2048" - network = data.google_compute_network.default.id - description = "this is updated test description" - custom_performance_enabled = true - total_throughput_mibps = "200" - total_iops = "3500" -} - -data "google_compute_network" "default" { - name = "%{network_name}" -} -`, context) -} \ No newline at end of file +{{ end }} diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_quotaRule_test.go b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_quotaRule_test.go index 30c3374d57f4..804dcb37b292 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_quotaRule_test.go +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_quotaRule_test.go @@ -15,7 +15,7 @@ func TestAccNetappVolumeQuotaRule_netappVolumeQuotaRuleBasicExample_update(t *te t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_replication_test.go b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_replication_test.go index 8e17171ff727..0a111b829a9a 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_replication_test.go +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_replication_test.go @@ -15,7 +15,7 @@ func TestAccNetappVolumeReplication_NetappVolumeReplicationCreateExample_update( t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_snapshot_test.go b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_snapshot_test.go index cef8f5ce594a..f1ae861a60c7 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_snapshot_test.go +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_snapshot_test.go @@ -15,7 +15,7 @@ func TestAccNetappVolumeSnapshot_volumeSnapshotCreateExample_update(t *testing.T t.Parallel() context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go.tmpl b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go similarity index 76% rename from mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go.tmpl rename to mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go index a8d8f2b9b5d9..6925ec048983 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go.tmpl +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go @@ -20,7 +20,7 @@ import ( func TestAccNetappVolume_NetappVolumeBasicExample_update(t *testing.T) { context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } @@ -599,7 +599,7 @@ func testAccNetappVolume_volumeBasicExample_cleanupScheduledBackup(t *testing.T, if !ok { return fmt.Errorf("Not found: %v", vault) } - url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{"{{"}}NetappBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/locations/{{"{{"}}location{{"}}"}}/backupVaults/{{"{{"}}name{{"}}"}}/backups") + url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{NetappBasePath}}projects/{{project}}/locations/{{location}}/backupVaults/{{name}}/backups") if err != nil { return fmt.Errorf("Error : %v", err) } @@ -636,7 +636,7 @@ func testAccNetappVolume_volumeBasicExample_cleanupScheduledBackup(t *testing.T, return backupDataList[i].createTime.After(backupDataList[j].createTime) }) for i := range backupDataList { - baseUrl, err := tpgresource.ReplaceVarsForTest(config, rs, "{{"{{"}}NetappBasePath{{"}}"}}") + baseUrl, err := tpgresource.ReplaceVarsForTest(config, rs, "{{NetappBasePath}}") if err != nil { return fmt.Errorf("Error : %v", err) } @@ -661,7 +661,7 @@ func testAccNetappVolume_volumeBasicExample_cleanupScheduledBackup(t *testing.T, func TestAccNetappVolume_autoTieredNetappVolume_update(t *testing.T) { context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-1", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), "random_suffix": acctest.RandString(t, 10), } @@ -705,10 +705,6 @@ resource "google_netapp_storage_pool" "default" { network = data.google_compute_network.default.id allow_auto_tiering = true } -resource "time_sleep" "wait_3_minutes" { - depends_on = [google_netapp_storage_pool.default] - create_duration = "3m" -} resource "google_netapp_volume" "test_volume" { location = "us-west4" name = "tf-test-volume%{random_suffix}" @@ -737,122 +733,9 @@ resource "google_netapp_storage_pool" "default" { network = data.google_compute_network.default.id allow_auto_tiering = true } -resource "time_sleep" "wait_3_minutes" { - depends_on = [google_netapp_storage_pool.default] - create_duration = "3m" -} -resource "google_netapp_volume" "test_volume" { - location = "us-west4" - name = "tf-test-volume%{random_suffix}" - capacity_gib = "100" - share_name = "tf-test-volume%{random_suffix}" - storage_pool = google_netapp_storage_pool.default.name - protocols = ["NFSV3"] - tiering_policy { - cooling_threshold_days = 20 - tier_action = "ENABLED" - } -} - -data "google_compute_network" "default" { - name = "%{network_name}" -} -`, context) -} - -{{ if ne $.TargetVersionName `ga` -}} -func TestAccNetappVolume_flexAutoTierNetappVolume_update(t *testing.T) { - context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), - CheckDestroy: testAccCheckNetappVolumeDestroyProducer(t), - ExternalProviders: map[string]resource.ExternalProvider{ - "time": {}, - }, - Steps: []resource.TestStep{ - { - Config: testAccNetappVolume_flexAutoTierVolume_default(context), - }, - { - ResourceName: "google_netapp_volume.test_volume", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"restore_parameters", "location", "name", "deletion_policy", "labels", "terraform_labels"}, - }, - { - Config: testAccNetappVolume_flexAutoTierVolume_update(context), - }, - { - ResourceName: "google_netapp_volume.test_volume", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"restore_parameters", "location", "name", "deletion_policy", "labels", "terraform_labels"}, - }, - }, - }) -} - -func testAccNetappVolume_flexAutoTierVolume_default(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_netapp_storage_pool" "default" { - provider = google-beta - name = "tf-test-pool%{random_suffix}" - location = "us-south1-a" - service_level = "FLEX" - capacity_gib = "2048" - network = data.google_compute_network.default.id - allow_auto_tiering = true - custom_performance_enabled = true - total_throughput_mibps = "64" - total_iops = "1024" - hot_tier_size_gib = "1024" - enable_hot_tier_auto_resize = true -} -resource "google_netapp_volume" "test_volume" { - provider = google-beta - location = "us-south1-a" - name = "tf-test-volume%{random_suffix}" - capacity_gib = "100" - share_name = "tf-test-volume%{random_suffix}" - storage_pool = google_netapp_storage_pool.default.name - protocols = ["NFSV3"] - tiering_policy { - cooling_threshold_days = 31 - tier_action = "ENABLED" - hot_tier_bypass_mode_enabled = false - } -} -data "google_compute_network" "default" { - provider = google-beta - name = "%{network_name}" -} -`, context) -} -func testAccNetappVolume_flexAutoTierVolume_update(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_netapp_storage_pool" "default" { - provider = google-beta - name = "tf-test-pool%{random_suffix}" - location = "us-south1-a" - service_level = "FLEX" - capacity_gib = "2048" - network = data.google_compute_network.default.id - allow_auto_tiering = true - custom_performance_enabled = true - total_throughput_mibps = "64" - total_iops = "1024" - hot_tier_size_gib = "1024" - enable_hot_tier_auto_resize = true -} resource "google_netapp_volume" "test_volume" { - provider = google-beta - location = "us-south1-a" + location = "us-west4" name = "tf-test-volume%{random_suffix}" capacity_gib = "100" share_name = "tf-test-volume%{random_suffix}" @@ -861,113 +744,11 @@ resource "google_netapp_volume" "test_volume" { tiering_policy { cooling_threshold_days = 20 tier_action = "ENABLED" - hot_tier_bypass_mode_enabled = true } } -data "google_compute_network" "default" { - provider = google-beta - name = "%{network_name}" -} -`, context) -} - -func TestAccNetappStoragePool_ManualQos(t *testing.T) { - context := map[string]interface{}{ - "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckNetappVolumeDestroyProducer(t), - ExternalProviders: map[string]resource.ExternalProvider{ - "time": {}, - }, - Steps: []resource.TestStep{ - { - Config: testAccNetappVolume_ManualQosAuto(context), - }, - { - ResourceName: "google_netapp_volume.test_volume", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"restore_parameters", "location", "name", "deletion_policy", "labels", "terraform_labels"}, - }, - { - Config: testAccNetappVolume_ManualQosManual(context), - }, - { - ResourceName: "google_netapp_volume.test_volume", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"restore_parameters", "location", "name", "deletion_policy", "labels", "terraform_labels"}, - }, - }, - }) -} - -func testAccNetappVolume_ManualQosAuto(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_netapp_storage_pool" "test_pool" { - name = "tf-test-pool%{random_suffix}" - location = "us-east4" - service_level = "EXTREME" - capacity_gib = "2048" - network = data.google_compute_network.default.id - qos_type = "AUTO" -} - -resource "time_sleep" "wait_3_minutes" { - depends_on = [google_netapp_storage_pool.test_pool] - create_duration = "3m" -} - -resource "google_netapp_volume" "test_volume" { - location = "us-east4" - name = "tf-test-test-volume%{random_suffix}" - capacity_gib = "100" - share_name = "tf-test-test-volume%{random_suffix}" - storage_pool = google_netapp_storage_pool.test_pool.name - protocols = ["NFSV3"] -} - -data "google_compute_network" "default" { - name = "%{network_name}" -} -`, context) -} - -func testAccNetappVolume_ManualQosManual(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_netapp_storage_pool" "test_pool" { - name = "tf-test-pool%{random_suffix}" - location = "us-east4" - service_level = "EXTREME" - capacity_gib = "2048" - network = data.google_compute_network.default.id - qos_type = "MANUAL" -} - -resource "time_sleep" "wait_3_minutes" { - depends_on = [google_netapp_storage_pool.test_pool] - create_duration = "3m" -} - -resource "google_netapp_volume" "test_volume" { - location = "us-east4" - name = "tf-test-test-volume%{random_suffix}" - capacity_gib = "100" - description = "This is a test description for manual qos volume" - share_name = "tf-test-test-volume%{random_suffix}" - storage_pool = google_netapp_storage_pool.test_pool.name - protocols = ["NFSV3"] - throughput_mibps = 12.5 -} data "google_compute_network" "default" { name = "%{network_name}" } `, context) } -{{ end }} diff --git a/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_internal_range_test.go b/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_internal_range_test.go index aa862d81ed18..5c549630356c 100644 --- a/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_internal_range_test.go +++ b/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_internal_range_test.go @@ -337,61 +337,3 @@ resource "google_compute_network" "default" { } `, context) } - -func TestAccNetworkConnectivityInternalRange_networkConnectivityInternalRangesImmutableExample_full(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - resourceName := "google_network_connectivity_internal_range.default" - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckNetworkConnectivityInternalRangeDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccNetworkConnectivityInternalRange_networkConnectivityInternalRangesImmutableExample_full(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - resourceName, "description", "Test internal range Immutable"), - resource.TestCheckResourceAttr( - resourceName, "ip_cidr_range", "11.11.20.0/24"), - resource.TestCheckResourceAttr( - resourceName, "usage", "FOR_VPC"), - resource.TestCheckResourceAttr( - resourceName, "peering", "FOR_SELF"), - resource.TestCheckResourceAttr( - resourceName, "immutable", "true"), - ), - }, - { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "network", "labels", "terraform_labels"}, - }, - }, - }) -} - -func testAccNetworkConnectivityInternalRange_networkConnectivityInternalRangesImmutableExample_full(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_network_connectivity_internal_range" "default" { - name = "basic%{random_suffix}" - description = "Test internal range Immutable" - network = google_compute_network.default.name - ip_cidr_range = "11.11.20.0/24" - usage = "FOR_VPC" - peering = "FOR_SELF" - immutable = true -} - -resource "google_compute_network" "default" { - name = "tf-test-internal-ranges%{random_suffix}" - auto_create_subnetworks = false -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_service_connection_policies_test.go b/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_service_connection_policies_test.go index 67c15b84cbe4..2ce8136a7fdd 100644 --- a/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_service_connection_policies_test.go +++ b/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_service_connection_policies_test.go @@ -6,14 +6,12 @@ import ( "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" ) func TestAccNetworkConnectivityServiceConnectionPolicy_update(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "org_id": envvar.GetTestOrgFromEnv(t), "networkProducerName": fmt.Sprintf("tf-test-network-%s", acctest.RandString(t, 10)), "subnetworkProducerName1": fmt.Sprintf("tf-test-subnet-producer-%s", acctest.RandString(t, 10)), "subnetworkProducerName2": fmt.Sprintf("tf-test-subnet-producer-%s", acctest.RandString(t, 10)), @@ -103,12 +101,8 @@ resource "google_network_connectivity_service_connection_policy" "default" { service_class = "gcp-memorystore-redis" network = google_compute_network.producer_net.id psc_config { - producer_instance_location = "CUSTOM_RESOURCE_HIERARCHY_LEVELS" - subnetworks = [google_compute_subnetwork.producer_subnet1.id] - limit = 4 - allowed_google_producers_resource_hierarchy_level = [ - "organizations/%{org_id}", - ] + subnetworks = [google_compute_subnetwork.producer_subnet1.id] + limit = 4 } labels = { foo = "bar" diff --git a/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_spoke_test.go b/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_spoke_test.go index 07b490cff4c5..ca155dd7024d 100644 --- a/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_spoke_test.go +++ b/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_spoke_test.go @@ -4,7 +4,6 @@ import ( "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" ) @@ -115,11 +114,6 @@ func TestAccNetworkConnectivitySpoke_RouterApplianceHandWritten(t *testing.T) { }, { Config: testAccNetworkConnectivitySpoke_RouterApplianceHandWrittenUpdate1(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_network_connectivity_spoke.primary", plancheck.ResourceActionUpdate), - }, - }, }, { ResourceName: "google_network_connectivity_spoke.primary", @@ -362,27 +356,6 @@ resource "google_compute_instance" "router-instance1" { } } -resource "google_compute_instance" "router-instance2" { - name = "tf-test-router-instance2%{random_suffix}" - machine_type = "e2-medium" - can_ip_forward = true - zone = "%{zone}" - - boot_disk { - initialize_params { - image = "projects/debian-cloud/global/images/debian-10-buster-v20210817" - } - } - - network_interface { - subnetwork = google_compute_subnetwork.subnetwork.name - network_ip = "10.0.0.3" - access_config { - network_tier = "PREMIUM" - } - } -} - resource "google_network_connectivity_hub" "basic_hub" { name = "tf-test-hub%{random_suffix}" description = "A sample hub" @@ -446,27 +419,6 @@ resource "google_compute_instance" "router-instance1" { } } -resource "google_compute_instance" "router-instance2" { - name = "tf-test-router-instance2%{random_suffix}" - machine_type = "e2-medium" - can_ip_forward = true - zone = "%{zone}" - - boot_disk { - initialize_params { - image = "projects/debian-cloud/global/images/debian-10-buster-v20210817" - } - } - - network_interface { - subnetwork = google_compute_subnetwork.subnetwork.name - network_ip = "10.0.0.3" - access_config { - network_tier = "PREMIUM" - } - } -} - resource "google_network_connectivity_hub" "basic_hub" { name = "tf-test-hub%{random_suffix}" description = "A sample hub" @@ -488,7 +440,6 @@ resource "google_network_connectivity_spoke" "primary" { virtual_machine = google_compute_instance.router-instance1.self_link ip_address = "10.0.0.2" } - include_import_ranges = ["ALL_IPV4_RANGES"] site_to_site_data_transfer = true } } @@ -565,7 +516,7 @@ resource "google_network_connectivity_spoke" "primary" { location = "%{region}" description = "An UPDATED sample spoke with two linked routher appliance instances" labels = { - label-two = "value-three" + label-two = "value-two" } hub = google_network_connectivity_hub.basic_hub.id linked_router_appliance_instances { @@ -649,11 +600,11 @@ resource "google_network_connectivity_spoke" "primary" { hub = google_network_connectivity_hub.basic_hub.id linked_vpc_network { exclude_export_ranges = [ - "198.51.110.0/24", + "198.51.100.0/24", "10.10.0.0/16" ] include_export_ranges = [ - "198.51.110.0/23", + "198.51.100.0/23", "10.0.0.0/8" ] uri = google_compute_network.network.self_link diff --git a/mmv1/third_party/terraform/services/networkmanagement/data_source_network_management_connectivity_test_run.go b/mmv1/third_party/terraform/services/networkmanagement/data_source_network_management_connectivity_test_run.go deleted file mode 100644 index 26cca8b3a065..000000000000 --- a/mmv1/third_party/terraform/services/networkmanagement/data_source_network_management_connectivity_test_run.go +++ /dev/null @@ -1,469 +0,0 @@ -package networkmanagement - -import ( - "fmt" - "log" - "net/http" - "reflect" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -func DataSourceGoogleNetworkManagementTestRun() *schema.Resource { - return &schema.Resource{ - Read: dataSourceGoogleNetworkManagementTestRun, - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: `Unique name for the connectivity test.`, - }, - "reachability_details": { - Type: schema.TypeList, - Computed: true, - Description: `Connectivity test reachability details.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "result": { - Type: schema.TypeString, - Computed: true, - Description: `Status of the connectivity test: RESULT_UNSPECIFIED, REACHABLE, UNREACHABLE, AMBIGUOUS or UNDETERMINED.`, - }, - "traces": { - Type: schema.TypeList, - Computed: true, - Description: `List of connectivity test traces.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "endpoint_info": { - Type: schema.TypeList, - Computed: true, - Description: `Derived from the source and destination endpoints definition specified by user request, and validated by the data plane model.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "destination_ip": { - Type: schema.TypeString, - Computed: true, - Description: `Destination IP address.`, - }, - "destination_network_uri": { - Type: schema.TypeString, - Computed: true, - Description: `URI of the network where this packet is sent to.`, - }, - "destination_port": { - Type: schema.TypeInt, - Computed: true, - Description: `Destination port. Only valid when protocol is TCP or UDP.`, - }, - "protocol": { - Type: schema.TypeString, - Computed: true, - Description: `IP protocol in string format, for example: "TCP", "UDP", "ICMP".`, - }, - "source_agent_uri": { - Type: schema.TypeString, - Computed: true, - Description: `URI of the source telemetry agent this packet originates from.`, - }, - "source_ip": { - Type: schema.TypeString, - Computed: true, - Description: `Source IP address.`, - }, - "source_network_uri": { - Type: schema.TypeString, - Computed: true, - Description: `URI of the network where this packet originates from.`, - }, - "source_port": { - Type: schema.TypeInt, - Computed: true, - Description: `Source port. Only valid when protocol is TCP or UDP.`, - }, - }, - }, - }, - "forward_trace_id": { - Type: schema.TypeInt, - Computed: true, - Description: `ID of the trace.`, - }, - "steps": { - Type: schema.TypeList, - Computed: true, - Description: `A trace of a test contains multiple steps from the initial state to the final state (delivered, dropped, forwarded, or aborted).`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "causes_drop": { - Type: schema.TypeBool, - Computed: true, - Description: `If this step leads to the final state Drop.`, - }, - "description": { - Type: schema.TypeString, - Computed: true, - Description: `Description of the connectivity test step.`, - }, - "project_id": { - Type: schema.TypeString, - Computed: true, - Description: `Project ID of the connectivity test step.`, - }, - "state": { - Type: schema.TypeString, - Computed: true, - Description: `State of the connectivity test step.`, - }, - }, - }, - }, - }, - }, - }, - "verify_time": { - Type: schema.TypeString, - Computed: true, - Description: `Time when reachability details were determined. An RFC3339 timestamp in UTC time. -This in the format of yyyy-MM-ddTHH:mm:ss.SSSZ.`, - }, - }, - }, - }, - "project": { - Type: schema.TypeString, - Optional: true, - Computed: true, - ForceNew: true, - }, - }, - UseJSONNumber: true, - } -} - -func dataSourceGoogleNetworkManagementTestRun(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return err - } - - obj := make(map[string]interface{}) - nameProp, err := expandNetworkManagementConnectivityTestRunName(d.Get("name"), d, config) - if err != nil { - return err - } else if v, ok := d.GetOkExists("name"); !tpgresource.IsEmptyValue(reflect.ValueOf(nameProp)) && (ok || !reflect.DeepEqual(v, nameProp)) { - obj["name"] = nameProp - } - - url, err := tpgresource.ReplaceVars(d, config, "{{NetworkManagementBasePath}}projects/{{project}}/locations/global/connectivityTests/{{name}}:rerun") - if err != nil { - return err - } - - log.Printf("[DEBUG] Rerunning ConnectivityTestRun: %#v", obj) - billingProject := "" - - project, err := tpgresource.GetProject(d, config) - if err != nil { - return fmt.Errorf("Error fetching project for ConnectivityTestRun: %s", err) - } - billingProject = project - - // err == nil indicates that the billing_project value was found - if bp, err := tpgresource.GetBillingProject(d, config); err == nil { - billingProject = bp - } - - headers := make(http.Header) - res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "POST", - Project: billingProject, - RawURL: url, - UserAgent: userAgent, - Body: obj, - Timeout: d.Timeout(schema.TimeoutCreate), - Headers: headers, - }) - if err != nil { - return fmt.Errorf("Error rerunning ConnectivityTestRun: %s", err) - } - - // Store the ID now - id, err := tpgresource.ReplaceVars(d, config, "projects/{{project}}/locations/global/connectivityTests/{{name}}") - if err != nil { - return fmt.Errorf("Error constructing id: %s", err) - } - d.SetId(id) - - err = NetworkManagementOperationWaitTime( - config, res, project, "Rerunning ConnectivityTestRun", userAgent, - d.Timeout(schema.TimeoutCreate)) - - if err != nil { - // The resource didn't actually create - d.SetId("") - return fmt.Errorf("Error waiting to rerun ConnectivityTestRun: %s", err) - } - - log.Printf("[DEBUG] Finished rerunning ConnectivityTestRun %q: %#v", d.Id(), res) - - return dataSourceGoogleNetworkManagementTestRunRead(d, meta) -} - -func dataSourceGoogleNetworkManagementTestRunRead(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return err - } - - url, err := tpgresource.ReplaceVars(d, config, "{{NetworkManagementBasePath}}projects/{{project}}/locations/global/connectivityTests/{{name}}") - if err != nil { - return err - } - - billingProject := "" - - project, err := tpgresource.GetProject(d, config) - if err != nil { - return fmt.Errorf("Error fetching project for ConnectivityTestRun: %s", err) - } - billingProject = project - - // err == nil indicates that the billing_project value was found - if bp, err := tpgresource.GetBillingProject(d, config); err == nil { - billingProject = bp - } - - headers := make(http.Header) - res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - Project: billingProject, - RawURL: url, - UserAgent: userAgent, - Headers: headers, - }) - if err != nil { - return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("NetworkManagementConnectivityTestRun %q", d.Id())) - } - - if err := d.Set("project", project); err != nil { - return fmt.Errorf("Error reading ConnectivityTestRun: %s", err) - } - - if err := d.Set("name", flattenNetworkManagementConnectivityTestRunName(res["name"], d, config)); err != nil { - return fmt.Errorf("Error reading ConnectivityTestRun: %s", err) - } - if err := d.Set("reachability_details", flattenNetworkManagementConnectivityTestRunReachabilityDetails(res["reachabilityDetails"], d, config)); err != nil { - return fmt.Errorf("Error reading ConnectivityTestRun: %s", err) - } - - return nil -} - -func flattenNetworkManagementConnectivityTestRunName(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - if v == nil { - return v - } - return tpgresource.GetResourceNameFromSelfLink(v.(string)) -} - -func flattenNetworkManagementConnectivityTestRunReachabilityDetails(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - if v == nil { - return nil - } - original := v.(map[string]interface{}) - if len(original) == 0 { - return nil - } - transformed := make(map[string]interface{}) - transformed["result"] = - flattenNetworkManagementConnectivityTestRunReachabilityDetailsResult(original["result"], d, config) - transformed["verify_time"] = - flattenNetworkManagementConnectivityTestRunReachabilityDetailsVerifyTime(original["verifyTime"], d, config) - transformed["traces"] = - flattenNetworkManagementConnectivityTestRunReachabilityDetailsTraces(original["traces"], d, config) - return []interface{}{transformed} -} - -func flattenNetworkManagementConnectivityTestRunReachabilityDetailsResult(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenNetworkManagementConnectivityTestRunReachabilityDetailsVerifyTime(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenNetworkManagementConnectivityTestRunReachabilityDetailsTraces(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - if v == nil { - return v - } - l := v.([]interface{}) - transformed := make([]interface{}, 0, len(l)) - for _, raw := range l { - original := raw.(map[string]interface{}) - if len(original) < 1 { - // Do not include empty json objects coming back from the api - continue - } - transformed = append(transformed, map[string]interface{}{ - "endpoint_info": flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfo(original["endpointInfo"], d, config), - "steps": flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesSteps(original["steps"], d, config), - "forward_trace_id": flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesForwardTraceId(original["forwardTraceId"], d, config), - }) - } - return transformed -} -func flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfo(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - if v == nil { - return nil - } - original := v.(map[string]interface{}) - if len(original) == 0 { - return nil - } - transformed := make(map[string]interface{}) - transformed["source_ip"] = - flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfoSourceIp(original["sourceIp"], d, config) - transformed["destination_ip"] = - flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfoDestinationIp(original["destinationIp"], d, config) - transformed["protocol"] = - flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfoProtocol(original["protocol"], d, config) - transformed["source_port"] = - flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfoSourcePort(original["sourcePort"], d, config) - transformed["destination_port"] = - flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfoDestinationPort(original["destinationPort"], d, config) - transformed["source_network_uri"] = - flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfoSourceNetworkUri(original["sourceNetworkUri"], d, config) - transformed["destination_network_uri"] = - flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfoDestinationNetworkUri(original["destinationNetworkUri"], d, config) - transformed["source_agent_uri"] = - flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfoSourceAgentUri(original["sourceAgentUri"], d, config) - return []interface{}{transformed} -} - -func flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfoSourceIp(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfoDestinationIp(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfoProtocol(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfoSourcePort(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - // Handles the string fixed64 format - if strVal, ok := v.(string); ok { - if intVal, err := tpgresource.StringToFixed64(strVal); err == nil { - return intVal - } - } - - // number values are represented as float64 - if floatVal, ok := v.(float64); ok { - intVal := int(floatVal) - return intVal - } - - return v // let terraform core handle it otherwise -} - -func flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfoDestinationPort(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - // Handles the string fixed64 format - if strVal, ok := v.(string); ok { - if intVal, err := tpgresource.StringToFixed64(strVal); err == nil { - return intVal - } - } - - // number values are represented as float64 - if floatVal, ok := v.(float64); ok { - intVal := int(floatVal) - return intVal - } - - return v // let terraform core handle it otherwise -} - -func flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfoSourceNetworkUri(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfoDestinationNetworkUri(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesEndpointInfoSourceAgentUri(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesSteps(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - if v == nil { - return v - } - l := v.([]interface{}) - transformed := make([]interface{}, 0, len(l)) - for _, raw := range l { - original := raw.(map[string]interface{}) - if len(original) < 1 { - // Do not include empty json objects coming back from the api - continue - } - transformed = append(transformed, map[string]interface{}{ - "description": flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesStepsDescription(original["description"], d, config), - "state": flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesStepsState(original["state"], d, config), - "causes_drop": flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesStepsCausesDrop(original["causesDrop"], d, config), - "project_id": flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesStepsProjectId(original["projectId"], d, config), - }) - } - return transformed -} -func flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesStepsDescription(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesStepsState(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesStepsCausesDrop(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesStepsProjectId(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenNetworkManagementConnectivityTestRunReachabilityDetailsTracesForwardTraceId(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - // Handles the string fixed64 format - if strVal, ok := v.(string); ok { - if intVal, err := tpgresource.StringToFixed64(strVal); err == nil { - return intVal - } - } - - // number values are represented as float64 - if floatVal, ok := v.(float64); ok { - intVal := int(floatVal) - return intVal - } - - return v // let terraform core handle it otherwise -} - -func expandNetworkManagementConnectivityTestRunName(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - f, err := tpgresource.ParseGlobalFieldValue("tests", v.(string), "project", d, config, true) - if err != nil { - return nil, fmt.Errorf("Invalid value for zone: %s", err) - } - return f.RelativeLink(), nil -} diff --git a/mmv1/third_party/terraform/services/networkmanagement/data_source_network_management_connectivity_test_run_test.go b/mmv1/third_party/terraform/services/networkmanagement/data_source_network_management_connectivity_test_run_test.go deleted file mode 100644 index e289e7e35975..000000000000 --- a/mmv1/third_party/terraform/services/networkmanagement/data_source_network_management_connectivity_test_run_test.go +++ /dev/null @@ -1,113 +0,0 @@ -package networkmanagement_test - -import ( - "fmt" - "regexp" - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" -) - -func TestAccNetworkManagementConnectivityTestRun_basic(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckNetworkManagementConnectivityTestDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccNetworkManagementConnectivityTestRun_instanceToInstance(context), - Check: resource.ComposeTestCheckFunc( - resource.TestMatchResourceAttr("data.google_network_management_connectivity_test_run.conn-test", - "reachability_details.0.result", regexp.MustCompile("REACHABLE")), - ), - }, - }, - }) -} - -func testAccNetworkManagementConnectivityTestRun_instanceToInstance(context map[string]interface{}) string { - connTestCfg := acctest.Nprintf(` -data "google_network_management_connectivity_test_run" "conn-test" { - name = google_network_management_connectivity_test.conn-test.name -} - -resource "google_network_management_connectivity_test" "conn-test" { - name = "tf-test-conntest%{random_suffix}" - source { - instance = google_compute_instance.vm1.id - } - - destination { - instance = google_compute_instance.vm2.id - } - - protocol = "TCP" -} -`, context) - return fmt.Sprintf("%s\n\n%s\n\n", connTestCfg, testAccNetworkManagementConnectivityTestRun_baseResources(context)) -} - -func testAccNetworkManagementConnectivityTestRun_baseResources(context map[string]interface{}) string { - return acctest.Nprintf(` - -resource "google_compute_address" "addr" { - name = "tf-test-addr%{random_suffix}" - subnetwork = google_compute_subnetwork.subnet.id - address_type = "INTERNAL" - address = "10.0.43.43" - region = "us-central1" -} - -resource "google_compute_instance" "vm1" { - name = "tf-test-src-vm%{random_suffix}" - machine_type = "e2-medium" - boot_disk { - initialize_params { - image = data.google_compute_image.debian_11.id - } - } - network_interface { - network = google_compute_network.vpc.id - } -} - -resource "google_compute_instance" "vm2" { - name = "tf-test-vm-dest%{random_suffix}" - machine_type = "e2-medium" - - boot_disk { - initialize_params { - image = data.google_compute_image.debian_11.id - } - } - - network_interface { - network = google_compute_network.vpc.id - - } -} - -resource "google_compute_network" "vpc" { - name = "tf-test-connnet%{random_suffix}" -} - -resource "google_compute_subnetwork" "subnet" { - name = "tf-test-connet%{random_suffix}" - ip_cidr_range = "10.0.0.0/16" - region = "us-central1" - network = google_compute_network.vpc.id -} - -data "google_compute_image" "debian_11" { - family = "debian-11" - project = "debian-cloud" -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/networkmanagement/resource_network_management_vpc_flow_logs_config_test.go.tmpl b/mmv1/third_party/terraform/services/networkmanagement/resource_network_management_vpc_flow_logs_config_test.go.tmpl index 12c083a79f4f..bb1b5d1a1396 100644 --- a/mmv1/third_party/terraform/services/networkmanagement/resource_network_management_vpc_flow_logs_config_test.go.tmpl +++ b/mmv1/third_party/terraform/services/networkmanagement/resource_network_management_vpc_flow_logs_config_test.go.tmpl @@ -9,41 +9,41 @@ import ( ) func TestAccNetworkManagementVpcFlowLogsConfig_updateInterconnect(t *testing.T) { - t.Parallel() + t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckNetworkManagementVpcFlowLogsConfigDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccNetworkManagementVpcFlowLogsConfig_fullInterconnect(context), - }, - { - ResourceName: "google_network_management_vpc_flow_logs_config.interconnect-test", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"labels", "location", "terraform_labels", "vpc_flow_logs_config_id"}, - }, - { - Config: testAccNetworkManagementVpcFlowLogsConfig_updateInterconnect(context), - }, - { - ResourceName: "google_network_management_vpc_flow_logs_config.interconnect-test", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"labels", "location", "terraform_labels", "vpc_flow_logs_config_id"}, - }, - }, - }) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckNetworkManagementVpcFlowLogsConfigDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccNetworkManagementVpcFlowLogsConfig_fullInterconnect(context), + }, + { + ResourceName: "google_network_management_vpc_flow_logs_config.interconnect-test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "location", "terraform_labels", "vpc_flow_logs_config_id"}, + }, + { + Config: testAccNetworkManagementVpcFlowLogsConfig_updateInterconnect(context), + }, + { + ResourceName: "google_network_management_vpc_flow_logs_config.interconnect-test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "location", "terraform_labels", "vpc_flow_logs_config_id"}, + }, + }, + }) } func testAccNetworkManagementVpcFlowLogsConfig_fullInterconnect(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` data "google_project" "project" { } @@ -77,7 +77,7 @@ resource "google_compute_interconnect_attachment" "attachment" { } func testAccNetworkManagementVpcFlowLogsConfig_updateInterconnect(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` data "google_project" "project" { } @@ -136,7 +136,7 @@ func TestAccNetworkManagementVpcFlowLogsConfig_updateVpn(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"labels", "location", "terraform_labels", "vpc_flow_logs_config_id"}, }, - { + { Config: testAccNetworkManagementVpcFlowLogsConfig_updateVpn(context), }, { @@ -160,7 +160,7 @@ resource "google_network_management_vpc_flow_logs_config" "example" { vpn_tunnel = "projects/${data.google_project.project.number}/regions/us-central1/vpnTunnels/${google_compute_vpn_tunnel.tunnel.name}" } `, context) - return fmt.Sprintf("%s\n\n%s\n\n", vpcFlowLogsCfg, testAccNetworkManagementVpcFlowLogsConfig_baseResources(context)) + return fmt.Sprintf("%s\n\n%s\n\n", vpcFlowLogsCfg, testAccNetworkManagementVpcFlowLogsConfig_baseResources(context)) } func testAccNetworkManagementVpcFlowLogsConfig_updateVpn(context map[string]interface{}) string { @@ -179,196 +179,8 @@ resource "google_network_management_vpc_flow_logs_config" "example" { metadata = "EXCLUDE_ALL_METADATA" } `, context) - return fmt.Sprintf("%s\n\n%s\n\n", vpcFlowLogsCfg, testAccNetworkManagementVpcFlowLogsConfig_baseResources(context)) -} - -{{ if ne $.TargetVersionName "ga" -}} -func TestAccNetworkManagementVpcFlowLogsConfig_network(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), - CheckDestroy: testAccCheckNetworkManagementVpcFlowLogsConfigDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccNetworkManagementVpcFlowLogsConfig_network(context), - }, - { - ResourceName: "google_network_management_vpc_flow_logs_config.network-test", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"location", "vpc_flow_logs_config_id"}, - }, - { - Config: testAccNetworkManagementVpcFlowLogsConfig_networkUpdate(context), - }, - { - ResourceName: "google_network_management_vpc_flow_logs_config.network-test", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"location", "vpc_flow_logs_config_id"}, - }, - }, - }) -} - -func testAccNetworkManagementVpcFlowLogsConfig_network(context map[string]interface{}) string { - return acctest.Nprintf(` -data "google_project" "project" { - provider = google-beta -} - -resource "google_compute_network" "network" { - provider = google-beta - name = "tf-test-flow-logs-network-%{random_suffix}" -} - -resource "google_network_management_vpc_flow_logs_config" "network-test" { - provider = google-beta - vpc_flow_logs_config_id = "tf-test-network-id-%{random_suffix}" - location = "global" - network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.network.name}" - state = "ENABLED" -} -`, context) -} - -func testAccNetworkManagementVpcFlowLogsConfig_networkUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` -data "google_project" "project" { - provider = google-beta -} - -resource "google_compute_network" "network" { - provider = google-beta - name = "tf-test-flow-logs-network-%{random_suffix}" -} - -resource "google_compute_network" "network_update" { - provider = google-beta - name = "tf-test-flow-logs-network-update-%{random_suffix}" -} - -resource "google_network_management_vpc_flow_logs_config" "network-test" { - provider = google-beta - vpc_flow_logs_config_id = "tf-test-network-id-%{random_suffix}" - location = "global" - network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.network_update.name}" - state = "DISABLED" - aggregation_interval = "INTERVAL_10_MIN" - flow_sampling = 0.05 - metadata = "INCLUDE_ALL_METADATA" - description = "Updated description for network test" -} -`, context) -} - -func TestAccNetworkManagementVpcFlowLogsConfig_subnet(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), - CheckDestroy: testAccCheckNetworkManagementVpcFlowLogsConfigDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccNetworkManagementVpcFlowLogsConfig_subnet(context), - }, - { - ResourceName: "google_network_management_vpc_flow_logs_config.subnet-test", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"location", "vpc_flow_logs_config_id"}, - }, - { - Config: testAccNetworkManagementVpcFlowLogsConfig_subnetUpdate(context), - }, - { - ResourceName: "google_network_management_vpc_flow_logs_config.subnet-test", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"location", "vpc_flow_logs_config_id"}, - }, - }, - }) -} - -func testAccNetworkManagementVpcFlowLogsConfig_subnet(context map[string]interface{}) string { - return acctest.Nprintf(` -data "google_project" "project" { - provider = google-beta -} - -resource "google_compute_network" "network" { - name = "tf-test-subnet-network-%{random_suffix}" - auto_create_subnetworks = false - provider = google-beta -} - -resource "google_compute_subnetwork" "subnet" { - provider = google-beta - name = "tf-test-flow-logs-subnet-%{random_suffix}" - ip_cidr_range = "10.2.0.0/16" - region = "us-central1" - network = google_compute_network.network.id -} - -resource "google_network_management_vpc_flow_logs_config" "subnet-test" { - provider = google-beta - vpc_flow_logs_config_id = "tf-test-subnet-id-%{random_suffix}" - location = "global" - subnet = "projects/${data.google_project.project.number}/regions/${google_compute_subnetwork.subnet.region}/subnetworks/${google_compute_subnetwork.subnet.name}" -} -`, context) -} - -func testAccNetworkManagementVpcFlowLogsConfig_subnetUpdate(context map[string]interface{}) string { - return acctest.Nprintf(` -data "google_project" "project" { - provider = google-beta -} - -resource "google_compute_network" "network" { - provider = google-beta - name = "tf-test-subnet-network-%{random_suffix}" - auto_create_subnetworks = false -} - -resource "google_compute_subnetwork" "subnet" { - provider = google-beta - name = "tf-test-flow-logs-subnet-%{random_suffix}" - ip_cidr_range = "10.2.0.0/16" - region = "us-central1" - network = google_compute_network.network.id -} - -resource "google_compute_subnetwork" "subnet_update" { - provider = google-beta - name = "tf-test-flow-logs-subnet-update-%{random_suffix}" - ip_cidr_range = "10.3.0.0/16" - region = "us-central1" - network = google_compute_network.network.id -} - -resource "google_network_management_vpc_flow_logs_config" "subnet-test" { - provider = google-beta - vpc_flow_logs_config_id = "tf-test-subnet-id-%{random_suffix}" - location = "global" - subnet = "projects/${data.google_project.project.number}/regions/${google_compute_subnetwork.subnet_update.region}/subnetworks/${google_compute_subnetwork.subnet_update.name}" - state = "ENABLED" -} -`, context) + return fmt.Sprintf("%s\n\n%s\n\n", vpcFlowLogsCfg, testAccNetworkManagementVpcFlowLogsConfig_baseResources(context)) } -{{ end }} func testAccNetworkManagementVpcFlowLogsConfig_baseResources(context map[string]interface{}) string { return acctest.Nprintf(` diff --git a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_backend_authentication_config_test.go b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_backend_authentication_config_test.go.tmpl similarity index 92% rename from mmv1/third_party/terraform/services/networksecurity/resource_network_security_backend_authentication_config_test.go rename to mmv1/third_party/terraform/services/networksecurity/resource_network_security_backend_authentication_config_test.go.tmpl index 4c79cbc74ce7..015446354702 100644 --- a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_backend_authentication_config_test.go +++ b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_backend_authentication_config_test.go.tmpl @@ -1,5 +1,5 @@ package networksecurity_test - +{{- if ne $.TargetVersionName "ga" }} import ( "testing" @@ -18,7 +18,7 @@ func TestAccNetworkSecurityBackendAuthenticationConfig_networkSecurityBackendAut acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), Steps: []resource.TestStep{ { Config: testAccNetworkSecurityBackendAuthenticationConfig_networkSecurityBackendAuthenticationConfigFullExample_full(context), @@ -50,6 +50,7 @@ func TestAccNetworkSecurityBackendAuthenticationConfig_networkSecurityBackendAut func testAccNetworkSecurityBackendAuthenticationConfig_networkSecurityBackendAuthenticationConfigFullExample_full(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_certificate_manager_certificate" "certificate" { + provider = google-beta name = "tf-test-my-certificate%{random_suffix}" location = "global" self_managed { @@ -60,6 +61,7 @@ resource "google_certificate_manager_certificate" "certificate" { } resource "google_certificate_manager_trust_config" "trust_config" { + provider = google-beta name = "tf-test-my-trust-config%{random_suffix}" description = "sample description for the trust config" location = "global" @@ -75,6 +77,7 @@ resource "google_certificate_manager_trust_config" "trust_config" { } resource "google_network_security_backend_authentication_config" "default" { + provider = google-beta name = "tf-test-my-backend-authentication-config%{random_suffix}" location = "global" description = "my description" @@ -87,7 +90,11 @@ resource "google_network_security_backend_authentication_config" "default" { func testAccNetworkSecurityBackendAuthenticationConfig_networkSecurityBackendAuthenticationConfigFullExample_update(context map[string]interface{}) string { return acctest.Nprintf(` +data "google_project" "project" { + provider = google-beta +} resource "google_certificate_manager_certificate" "certificate" { + provider = google-beta name = "tf-test-my-certificate%{random_suffix}" location = "global" self_managed { @@ -98,6 +105,7 @@ resource "google_certificate_manager_certificate" "certificate" { } resource "google_certificate_manager_trust_config" "trust_config" { + provider = google-beta name = "tf-test-my-trust-config%{random_suffix}" description = "sample description for the trust config" location = "global" @@ -113,6 +121,7 @@ resource "google_certificate_manager_trust_config" "trust_config" { } resource "google_network_security_backend_authentication_config" "default" { + provider = google-beta name = "tf-test-my-backend-authentication-config%{random_suffix}" location = "global" description = "updated description" @@ -122,3 +131,5 @@ resource "google_network_security_backend_authentication_config" "default" { } `, context) } + +{{ end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_client_tls_policy_test.go.tmpl b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_client_tls_policy_test.go.tmpl index e22f5dbeeb99..47ff9ffad9d2 100644 --- a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_client_tls_policy_test.go.tmpl +++ b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_client_tls_policy_test.go.tmpl @@ -84,6 +84,11 @@ func testAccNetworkSecurityClientTlsPolicy_update(clientTlsPolicyName string) st target_uri = "unix:mypath1" } } + server_validation_ca { + grpc_endpoint { + target_uri = "unix:mypath2" + } + } } `, clientTlsPolicyName) } diff --git a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_gateway_security_policy_rule_test.go b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_gateway_security_policy_rule_test.go index 22823730a924..a4e083fa8815 100644 --- a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_gateway_security_policy_rule_test.go +++ b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_gateway_security_policy_rule_test.go @@ -49,6 +49,8 @@ func TestAccNetworkSecurityGatewaySecurityPolicyRule_update(t *testing.T) { } func TestAccNetworkSecurityGatewaySecurityPolicyRule_multiple(t *testing.T) { + // Skip for now to avoid leaking resources until the fix for b/400293188 rolls out + t.Skip() t.Parallel() context := map[string]interface{}{ diff --git a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_deployment_test.go b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_deployment_generated_test.go.tmpl similarity index 74% rename from mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_deployment_test.go rename to mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_deployment_generated_test.go.tmpl index 932d25f63a97..8b16becf4d43 100644 --- a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_deployment_test.go +++ b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_deployment_generated_test.go.tmpl @@ -1,4 +1,5 @@ package networksecurity_test +{{- if ne $.TargetVersionName "ga" }} import ( "testing" @@ -18,7 +19,7 @@ func TestAccNetworkSecurityInterceptDeployment_update(t *testing.T) { acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), Steps: []resource.TestStep{ { Config: testAccNetworkSecurityInterceptDeployment_basic(context), @@ -50,11 +51,13 @@ func TestAccNetworkSecurityInterceptDeployment_update(t *testing.T) { func testAccNetworkSecurityInterceptDeployment_basic(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_compute_network" "network" { + provider = google-beta name = "tf-test-example-network%{random_suffix}" auto_create_subnetworks = false } resource "google_compute_subnetwork" "subnetwork" { + provider = google-beta name = "tf-test-example-subnet%{random_suffix}" region = "us-central1" ip_cidr_range = "10.1.0.0/16" @@ -62,14 +65,16 @@ resource "google_compute_subnetwork" "subnetwork" { } resource "google_compute_region_health_check" "health_check" { - name = "tf-test-example-hc%{random_suffix}" - region = "us-central1" + provider = google-beta + name = "tf-test-example-hc%{random_suffix}" + region = "us-central1" http_health_check { port = 80 } } resource "google_compute_region_backend_service" "backend_service" { + provider = google-beta name = "tf-test-example-bs%{random_suffix}" region = "us-central1" health_checks = [google_compute_region_health_check.health_check.id] @@ -78,23 +83,26 @@ resource "google_compute_region_backend_service" "backend_service" { } resource "google_compute_forwarding_rule" "forwarding_rule" { - name = "tf-test-example-fwr%{random_suffix}" - region = "us-central1" - network = google_compute_network.network.name - subnetwork = google_compute_subnetwork.subnetwork.name - backend_service = google_compute_region_backend_service.backend_service.id - load_balancing_scheme = "INTERNAL" - ports = [6081] - ip_protocol = "UDP" + provider = google-beta + name = "tf-test-example-fwr%{random_suffix}" + region = "us-central1" + network = google_compute_network.network.name + subnetwork = google_compute_subnetwork.subnetwork.name + backend_service = google_compute_region_backend_service.backend_service.id + load_balancing_scheme = "INTERNAL" + ports = [6081] + ip_protocol = "UDP" } resource "google_network_security_intercept_deployment_group" "deployment_group" { + provider = google-beta intercept_deployment_group_id = "tf-test-example-dg%{random_suffix}" location = "global" network = google_compute_network.network.id } resource "google_network_security_intercept_deployment" "default" { + provider = google-beta intercept_deployment_id = "tf-test-example-deployment%{random_suffix}" location = "us-central1-a" forwarding_rule = google_compute_forwarding_rule.forwarding_rule.id @@ -110,11 +118,13 @@ resource "google_network_security_intercept_deployment" "default" { func testAccNetworkSecurityInterceptDeployment_update(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_compute_network" "network" { + provider = google-beta name = "tf-test-example-network%{random_suffix}" auto_create_subnetworks = false } resource "google_compute_subnetwork" "subnetwork" { + provider = google-beta name = "tf-test-example-subnet%{random_suffix}" region = "us-central1" ip_cidr_range = "10.1.0.0/16" @@ -122,14 +132,16 @@ resource "google_compute_subnetwork" "subnetwork" { } resource "google_compute_region_health_check" "health_check" { - name = "tf-test-example-hc%{random_suffix}" - region = "us-central1" + provider = google-beta + name = "tf-test-example-hc%{random_suffix}" + region = "us-central1" http_health_check { port = 80 } } resource "google_compute_region_backend_service" "backend_service" { + provider = google-beta name = "tf-test-example-bs%{random_suffix}" region = "us-central1" health_checks = [google_compute_region_health_check.health_check.id] @@ -138,23 +150,26 @@ resource "google_compute_region_backend_service" "backend_service" { } resource "google_compute_forwarding_rule" "forwarding_rule" { - name = "tf-test-example-fwr%{random_suffix}" - region = "us-central1" - network = google_compute_network.network.name - subnetwork = google_compute_subnetwork.subnetwork.name - backend_service = google_compute_region_backend_service.backend_service.id - load_balancing_scheme = "INTERNAL" - ports = [6081] - ip_protocol = "UDP" + provider = google-beta + name = "tf-test-example-fwr%{random_suffix}" + region = "us-central1" + network = google_compute_network.network.name + subnetwork = google_compute_subnetwork.subnetwork.name + backend_service = google_compute_region_backend_service.backend_service.id + load_balancing_scheme = "INTERNAL" + ports = [6081] + ip_protocol = "UDP" } resource "google_network_security_intercept_deployment_group" "deployment_group" { + provider = google-beta intercept_deployment_group_id = "tf-test-example-dg%{random_suffix}" location = "global" network = google_compute_network.network.id } resource "google_network_security_intercept_deployment" "default" { + provider = google-beta intercept_deployment_id = "tf-test-example-deployment%{random_suffix}" location = "us-central1-a" forwarding_rule = google_compute_forwarding_rule.forwarding_rule.id @@ -166,3 +181,5 @@ resource "google_network_security_intercept_deployment" "default" { } `, context) } + +{{ end }} diff --git a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_deployment_group_test.go b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_deployment_group_generated_test.go.tmpl similarity index 90% rename from mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_deployment_group_test.go rename to mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_deployment_group_generated_test.go.tmpl index 614a06f28b81..5911b17c1bc3 100644 --- a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_deployment_group_test.go +++ b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_deployment_group_generated_test.go.tmpl @@ -1,4 +1,5 @@ package networksecurity_test +{{- if ne $.TargetVersionName "ga" }} import ( "testing" @@ -18,7 +19,7 @@ func TestAccNetworkSecurityInterceptDeploymentGroup_update(t *testing.T) { acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), Steps: []resource.TestStep{ { Config: testAccNetworkSecurityInterceptDeploymentGroup_basic(context), @@ -50,11 +51,13 @@ func TestAccNetworkSecurityInterceptDeploymentGroup_update(t *testing.T) { func testAccNetworkSecurityInterceptDeploymentGroup_basic(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_compute_network" "network" { + provider = google-beta name = "tf-test-example-network%{random_suffix}" auto_create_subnetworks = false } resource "google_network_security_intercept_deployment_group" "default" { + provider = google-beta intercept_deployment_group_id = "tf-test-example-dg%{random_suffix}" location = "global" network = google_compute_network.network.id @@ -69,11 +72,13 @@ resource "google_network_security_intercept_deployment_group" "default" { func testAccNetworkSecurityInterceptDeploymentGroup_update(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_compute_network" "network" { + provider = google-beta name = "tf-test-example-network%{random_suffix}" auto_create_subnetworks = false } resource "google_network_security_intercept_deployment_group" "default" { + provider = google-beta intercept_deployment_group_id = "tf-test-example-dg%{random_suffix}" location = "global" network = google_compute_network.network.id @@ -84,3 +89,5 @@ resource "google_network_security_intercept_deployment_group" "default" { } `, context) } + +{{ end }} diff --git a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_endpoint_group_association_test.go b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_endpoint_group_association_generated_test.go.tmpl similarity index 80% rename from mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_endpoint_group_association_test.go rename to mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_endpoint_group_association_generated_test.go.tmpl index 921ca4406600..cc581eb932dc 100644 --- a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_endpoint_group_association_test.go +++ b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_endpoint_group_association_generated_test.go.tmpl @@ -1,4 +1,5 @@ package networksecurity_test +{{- if ne $.TargetVersionName "ga" }} import ( "testing" @@ -18,7 +19,7 @@ func TestAccNetworkSecurityInterceptEndpointGroupAssociation_update(t *testing.T acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), Steps: []resource.TestStep{ { Config: testAccNetworkSecurityInterceptEndpointGroupAssociation_basic(context), @@ -50,28 +51,33 @@ func TestAccNetworkSecurityInterceptEndpointGroupAssociation_update(t *testing.T func testAccNetworkSecurityInterceptEndpointGroupAssociation_basic(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_compute_network" "producer_network" { + provider = google-beta name = "tf-test-example-prod-network%{random_suffix}" auto_create_subnetworks = false } resource "google_compute_network" "consumer_network" { + provider = google-beta name = "tf-test-example-cons-network%{random_suffix}" auto_create_subnetworks = false } resource "google_network_security_intercept_deployment_group" "deployment_group" { + provider = google-beta intercept_deployment_group_id = "tf-test-example-dg%{random_suffix}" location = "global" network = google_compute_network.producer_network.id } resource "google_network_security_intercept_endpoint_group" "endpoint_group" { - intercept_endpoint_group_id = "tf-test-example-eg%{random_suffix}" - location = "global" - intercept_deployment_group = google_network_security_intercept_deployment_group.deployment_group.id + provider = google-beta + intercept_endpoint_group_id = "tf-test-example-eg%{random_suffix}" + location = "global" + intercept_deployment_group = google_network_security_intercept_deployment_group.deployment_group.id } resource "google_network_security_intercept_endpoint_group_association" "default" { + provider = google-beta intercept_endpoint_group_association_id = "tf-test-example-ega%{random_suffix}" location = "global" network = google_compute_network.consumer_network.id @@ -86,28 +92,33 @@ resource "google_network_security_intercept_endpoint_group_association" "default func testAccNetworkSecurityInterceptEndpointGroupAssociation_update(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_compute_network" "producer_network" { + provider = google-beta name = "tf-test-example-prod-network%{random_suffix}" auto_create_subnetworks = false } resource "google_compute_network" "consumer_network" { + provider = google-beta name = "tf-test-example-cons-network%{random_suffix}" auto_create_subnetworks = false } resource "google_network_security_intercept_deployment_group" "deployment_group" { + provider = google-beta intercept_deployment_group_id = "tf-test-example-dg%{random_suffix}" location = "global" network = google_compute_network.producer_network.id } resource "google_network_security_intercept_endpoint_group" "endpoint_group" { - intercept_endpoint_group_id = "tf-test-example-eg%{random_suffix}" - location = "global" - intercept_deployment_group = google_network_security_intercept_deployment_group.deployment_group.id + provider = google-beta + intercept_endpoint_group_id = "tf-test-example-eg%{random_suffix}" + location = "global" + intercept_deployment_group = google_network_security_intercept_deployment_group.deployment_group.id } resource "google_network_security_intercept_endpoint_group_association" "default" { + provider = google-beta intercept_endpoint_group_association_id = "tf-test-example-ega%{random_suffix}" location = "global" network = google_compute_network.consumer_network.id @@ -118,3 +129,5 @@ resource "google_network_security_intercept_endpoint_group_association" "default } `, context) } + +{{ end }} diff --git a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_endpoint_group_test.go b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_endpoint_group_generated_test.go.tmpl similarity index 75% rename from mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_endpoint_group_test.go rename to mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_endpoint_group_generated_test.go.tmpl index de889329df01..70c9b750c9d3 100644 --- a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_endpoint_group_test.go +++ b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_intercept_endpoint_group_generated_test.go.tmpl @@ -1,4 +1,5 @@ package networksecurity_test +{{- if ne $.TargetVersionName "ga" }} import ( "testing" @@ -18,7 +19,7 @@ func TestAccNetworkSecurityInterceptEndpointGroup_update(t *testing.T) { acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), Steps: []resource.TestStep{ { Config: testAccNetworkSecurityInterceptEndpointGroup_basic(context), @@ -50,21 +51,24 @@ func TestAccNetworkSecurityInterceptEndpointGroup_update(t *testing.T) { func testAccNetworkSecurityInterceptEndpointGroup_basic(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_compute_network" "network" { + provider = google-beta name = "tf-test-example-network%{random_suffix}" auto_create_subnetworks = false } resource "google_network_security_intercept_deployment_group" "deployment_group" { + provider = google-beta intercept_deployment_group_id = "tf-test-example-dg%{random_suffix}" location = "global" network = google_compute_network.network.id } resource "google_network_security_intercept_endpoint_group" "default" { - intercept_endpoint_group_id = "tf-test-example-eg%{random_suffix}" - location = "global" - intercept_deployment_group = google_network_security_intercept_deployment_group.deployment_group.id - description = "initial description" + provider = google-beta + intercept_endpoint_group_id = "tf-test-example-eg%{random_suffix}" + location = "global" + intercept_deployment_group = google_network_security_intercept_deployment_group.deployment_group.id + description = "initial description" labels = { foo = "bar" } @@ -75,24 +79,29 @@ resource "google_network_security_intercept_endpoint_group" "default" { func testAccNetworkSecurityInterceptEndpointGroup_update(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_compute_network" "network" { + provider = google-beta name = "tf-test-example-network%{random_suffix}" auto_create_subnetworks = false } resource "google_network_security_intercept_deployment_group" "deployment_group" { + provider = google-beta intercept_deployment_group_id = "tf-test-example-dg%{random_suffix}" location = "global" network = google_compute_network.network.id } resource "google_network_security_intercept_endpoint_group" "default" { - intercept_endpoint_group_id = "tf-test-example-eg%{random_suffix}" - location = "global" - intercept_deployment_group = google_network_security_intercept_deployment_group.deployment_group.id - description = "updated description" + provider = google-beta + intercept_endpoint_group_id = "tf-test-example-eg%{random_suffix}" + location = "global" + intercept_deployment_group = google_network_security_intercept_deployment_group.deployment_group.id + description = "updated description" labels = { foo = "goo" } } `, context) } + +{{ end }} diff --git a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_security_profile_group_test.go b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_security_profile_group_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/networksecurity/resource_network_security_security_profile_group_test.go rename to mmv1/third_party/terraform/services/networksecurity/resource_network_security_security_profile_group_test.go.tmpl diff --git a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_security_profile_test.go b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_security_profile_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/networksecurity/resource_network_security_security_profile_test.go rename to mmv1/third_party/terraform/services/networksecurity/resource_network_security_security_profile_test.go.tmpl index bdf6ae2e66dc..f0ffccc8d9df 100644 --- a/mmv1/third_party/terraform/services/networksecurity/resource_network_security_security_profile_test.go +++ b/mmv1/third_party/terraform/services/networksecurity/resource_network_security_security_profile_test.go.tmpl @@ -4,8 +4,8 @@ import ( "fmt" "testing" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/plancheck" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" ) diff --git a/mmv1/third_party/terraform/services/networkservices/resource_network_services_edge_cache_origin_test.go b/mmv1/third_party/terraform/services/networkservices/resource_network_services_edge_cache_origin_test.go index 32de5a3a8d93..524969b82a63 100644 --- a/mmv1/third_party/terraform/services/networkservices/resource_network_services_edge_cache_origin_test.go +++ b/mmv1/third_party/terraform/services/networkservices/resource_network_services_edge_cache_origin_test.go @@ -51,9 +51,6 @@ func testAccNetworkServicesEdgeCacheOrigin_update_0(name string) string { timeout { connect_timeout = "10s" } - flex_shielding { - flex_shielding_regions = ["AFRICA_SOUTH1"] - } } `, name) } @@ -71,9 +68,6 @@ func testAccNetworkServicesEdgeCacheOrigin_update_1(name string) string { response_timeout = "29s" read_timeout = "13s" } - flex_shielding { - flex_shielding_regions = ["ME_CENTRAL1"] - } } `, name) } diff --git a/mmv1/third_party/terraform/services/networkservices/resource_network_services_edge_cache_service_test.go b/mmv1/third_party/terraform/services/networkservices/resource_network_services_edge_cache_service_test.go index 269b10fb2cf6..82b3329e5889 100644 --- a/mmv1/third_party/terraform/services/networkservices/resource_network_services_edge_cache_service_test.go +++ b/mmv1/third_party/terraform/services/networkservices/resource_network_services_edge_cache_service_test.go @@ -147,204 +147,3 @@ resource "google_network_services_edge_cache_service" "served" { } `, bktName, originName, serviceName) } - -func TestAccNetworkServicesEdgeCacheService_cacheModeAndTtl(t *testing.T) { - t.Parallel() - namebkt := "tf-test-bucket-" + acctest.RandString(t, 10) - nameorigin := "tf-test-origin-" + acctest.RandString(t, 10) - nameservice := "tf-test-service-" + acctest.RandString(t, 10) - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckNetworkServicesEdgeCacheServiceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccNetworkServicesEdgeCacheService_cacheModeAndTtl_0(namebkt, nameorigin, nameservice), - }, - { - ResourceName: "google_network_services_edge_cache_service.served", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccNetworkServicesEdgeCacheService_cacheModeAndTtl_1(namebkt, nameorigin, nameservice), - }, - { - ResourceName: "google_network_services_edge_cache_service.served", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccNetworkServicesEdgeCacheService_cacheModeAndTtl_2(namebkt, nameorigin, nameservice), - }, - { - ResourceName: "google_network_services_edge_cache_service.served", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func testAccNetworkServicesEdgeCacheService_cacheModeAndTtl_0(bktName, originName, serviceName string) string { - return fmt.Sprintf(` -resource "google_storage_bucket" "dest" { - name = "%s" - location = "US" - force_destroy = true - uniform_bucket_level_access = true -} -resource "google_network_services_edge_cache_origin" "instance" { - name = "%s" - origin_address = google_storage_bucket.dest.url - description = "The default bucket for media edge test" - max_attempts = 2 - timeout { - connect_timeout = "10s" - } -} -resource "google_network_services_edge_cache_service" "served" { - name = "%s" - description = "some description" - routing { - host_rule { - description = "host rule description" - hosts = ["sslcert.tf-test.club"] - path_matcher = "routes" - } - path_matcher { - name = "routes" - route_rule { - description = "a route rule to match against" - priority = 1 - match_rule { - prefix_match = "/" - } - origin = google_network_services_edge_cache_origin.instance.name - route_action { - cdn_policy { - cache_mode = "CACHE_ALL_STATIC" - default_ttl = "1000s" - max_ttl = "2000s" - } - compression_mode = "AUTOMATIC" - } - header_action { - response_header_to_add { - header_name = "x-cache-status" - header_value = "{cdn_cache_status}" - } - } - } - } - } -} -`, bktName, originName, serviceName) -} - -func testAccNetworkServicesEdgeCacheService_cacheModeAndTtl_1(bktName, originName, serviceName string) string { - return fmt.Sprintf(` -resource "google_storage_bucket" "dest" { - name = "%s" - location = "US" - force_destroy = true - uniform_bucket_level_access = true -} -resource "google_network_services_edge_cache_origin" "instance" { - name = "%s" - origin_address = google_storage_bucket.dest.url - description = "The default bucket for media edge test" - max_attempts = 2 - timeout { - connect_timeout = "10s" - } -} -resource "google_network_services_edge_cache_service" "served" { - name = "%s" - description = "some description" - routing { - host_rule { - description = "host rule description" - hosts = ["sslcert.tf-test.club"] - path_matcher = "routes" - } - path_matcher { - name = "routes" - route_rule { - description = "a route rule to match against" - priority = 1 - match_rule { - prefix_match = "/" - } - origin = google_network_services_edge_cache_origin.instance.name - route_action { - cdn_policy { - cache_mode = "FORCE_CACHE_ALL" - default_ttl = "1100s" - } - } - header_action { - response_header_to_add { - header_name = "x-cache-status" - header_value = "{cdn_cache_status}" - } - } - } - } - } -} -`, bktName, originName, serviceName) -} - -func testAccNetworkServicesEdgeCacheService_cacheModeAndTtl_2(bktName, originName, serviceName string) string { - return fmt.Sprintf(` -resource "google_storage_bucket" "dest" { - name = "%s" - location = "US" - force_destroy = true - uniform_bucket_level_access = true -} -resource "google_network_services_edge_cache_origin" "instance" { - name = "%s" - origin_address = google_storage_bucket.dest.url - description = "The default bucket for media edge test" - max_attempts = 2 - timeout { - connect_timeout = "10s" - } -} -resource "google_network_services_edge_cache_service" "served" { - name = "%s" - description = "some description" - routing { - host_rule { - description = "host rule description" - hosts = ["sslcert.tf-test.club"] - path_matcher = "routes" - } - path_matcher { - name = "routes" - route_rule { - description = "a route rule to match against" - priority = 1 - match_rule { - prefix_match = "/" - } - origin = google_network_services_edge_cache_origin.instance.name - route_action { - cdn_policy { - cache_mode = "BYPASS_CACHE" - } - } - header_action { - response_header_to_add { - header_name = "x-cache-status" - header_value = "{cdn_cache_status}" - } - } - } - } - } -} -`, bktName, originName, serviceName) -} diff --git a/mmv1/third_party/terraform/services/networkservices/resource_network_services_gateway_test.go b/mmv1/third_party/terraform/services/networkservices/resource_network_services_gateway_test.go index 08275ca4125e..c2e15bffad71 100644 --- a/mmv1/third_party/terraform/services/networkservices/resource_network_services_gateway_test.go +++ b/mmv1/third_party/terraform/services/networkservices/resource_network_services_gateway_test.go @@ -943,7 +943,6 @@ resource "google_network_services_gateway" "foobar" { location = "us-central1" addresses = ["10.128.0.99"] type = "SECURE_WEB_GATEWAY" - routing_mode = "EXPLICIT_ROUTING_MODE" ports = [443] description = "my description" gateway_security_policy = google_network_security_gateway_security_policy.default.id diff --git a/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_gpu_test.go.tmpl b/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_gpu_test.go.tmpl index a435f330b1ad..74294cc7ed66 100644 --- a/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_gpu_test.go.tmpl +++ b/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_gpu_test.go.tmpl @@ -44,8 +44,8 @@ resource "google_notebooks_instance" "test" { terraform = "true" } vm_image { - project = "cloud-notebooks-managed" - image_family = "workbench-instances" + project = "deeplearning-platform-release" + image_family = "tf-latest-gpu" } install_gpu_driver = true accelerator_config { diff --git a/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_state_test.go.tmpl b/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_state_test.go.tmpl index 8c872bc37fc2..e50bf5b9d2c2 100644 --- a/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_state_test.go.tmpl +++ b/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_state_test.go.tmpl @@ -59,8 +59,8 @@ resource "google_notebooks_instance" "test" { location = "us-west1-a" machine_type = "e2-medium" vm_image { - project = "cloud-notebooks-managed" - image_family = "workbench-instances" + project = "deeplearning-platform-release" + image_family = "tf-latest-cpu" } desired_state = "ACTIVE" } @@ -74,8 +74,8 @@ resource "google_notebooks_instance" "test" { location = "us-west1-a" machine_type = "e2-medium" vm_image { - project = "cloud-notebooks-managed" - image_family = "workbench-instances" + project = "deeplearning-platform-release" + image_family = "tf-latest-cpu" } desired_state = "STOPPED" } diff --git a/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_test.go b/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_test.go.tmpl similarity index 89% rename from mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_test.go rename to mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_test.go.tmpl index 84ac3f4f66d1..1a8a2ac91443 100644 --- a/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_test.go +++ b/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_test.go.tmpl @@ -15,7 +15,7 @@ func TestAccNotebooksInstance_create_vm_image(t *testing.T) { prefix := fmt.Sprintf("%d", acctest.RandInt(t)) name := fmt.Sprintf("tf-%s", prefix) - acctest.VcrTest(t, resource.TestCase{ + acctest.VcrTest(t, resource.TestCase{ ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { @@ -25,15 +25,13 @@ func TestAccNotebooksInstance_create_vm_image(t *testing.T) { ResourceName: "google_notebooks_instance.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"vm_image", "metadata", "update_time"}, + ImportStateVerifyIgnore: []string{"vm_image", "metadata"}, }, }, }) } func TestAccNotebooksInstance_update(t *testing.T) { - t.Skip() - context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), } @@ -91,8 +89,8 @@ resource "google_notebooks_instance" "test" { } vm_image { - project = "cloud-notebooks-managed" - image_family = "workbench-instances" + project = "deeplearning-platform-release" + image_family = "tf-latest-cpu" } } `, name) @@ -106,8 +104,8 @@ resource "google_notebooks_instance" "instance" { machine_type = "e2-medium" vm_image { - project = "cloud-notebooks-managed" - image_family = "workbench-instances" + project = "deeplearning-platform-release" + image_family = "tf-latest-cpu" } metadata = { @@ -132,8 +130,8 @@ resource "google_notebooks_instance" "instance" { machine_type = "e2-medium" vm_image { - project = "cloud-notebooks-managed" - image_family = "workbench-instances" + project = "deeplearning-platform-release" + image_family = "tf-latest-cpu" } metadata = { diff --git a/mmv1/third_party/terraform/services/notebooks/resource_notebooks_runtime_test.go b/mmv1/third_party/terraform/services/notebooks/resource_notebooks_runtime_test.go.tmpl similarity index 99% rename from mmv1/third_party/terraform/services/notebooks/resource_notebooks_runtime_test.go rename to mmv1/third_party/terraform/services/notebooks/resource_notebooks_runtime_test.go.tmpl index de8cff548f05..d9bc74aace88 100644 --- a/mmv1/third_party/terraform/services/notebooks/resource_notebooks_runtime_test.go +++ b/mmv1/third_party/terraform/services/notebooks/resource_notebooks_runtime_test.go.tmpl @@ -3,8 +3,8 @@ package notebooks_test import ( "testing" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccNotebooksRuntime_update(t *testing.T) { @@ -48,6 +48,7 @@ func TestAccNotebooksRuntime_update(t *testing.T) { }) } + func testAccNotebooksRuntime_basic(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_notebooks_runtime" "runtime" { diff --git a/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_autonomous_database_test.go b/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_autonomous_database_test.go index 7cf17999d4e4..fce8fe50cf32 100644 --- a/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_autonomous_database_test.go +++ b/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_autonomous_database_test.go @@ -34,7 +34,7 @@ func testAccOracleDatabaseAutonomousDatabase_basic() string { data "google_oracle_database_autonomous_database" "my-adb"{ autonomous_database_id = "do-not-delete-tf-adb" location = "us-east4" - project = "oci-terraform-testing-prod" + project = "oci-terraform-testing" } `) } diff --git a/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_autonomous_databases_test.go b/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_autonomous_databases_test.go index 2ee722c0252c..5048247fe538 100644 --- a/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_autonomous_databases_test.go +++ b/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_autonomous_databases_test.go @@ -35,7 +35,7 @@ func testAccOracleDatabaseAutonomousDatabases_basic() string { return fmt.Sprintf(` data "google_oracle_database_autonomous_databases" "my-adbs"{ location = "us-east4" - project = "oci-terraform-testing-prod" + project = "oci-terraform-testing" } `) } diff --git a/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_cloud_exadata_infrastructure_test.go b/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_cloud_exadata_infrastructure_test.go index c2dcb5dc6e17..af95923d4a8d 100644 --- a/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_cloud_exadata_infrastructure_test.go +++ b/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_cloud_exadata_infrastructure_test.go @@ -21,7 +21,7 @@ func TestAccOracleDatabaseCloudExadataInfrastructure_basic(t *testing.T) { resource.TestCheckResourceAttrSet("data.google_oracle_database_cloud_exadata_infrastructure.my-exadata", "gcp_oracle_zone"), resource.TestCheckResourceAttrSet("data.google_oracle_database_cloud_exadata_infrastructure.my-exadata", "properties.#"), resource.TestCheckResourceAttrSet("data.google_oracle_database_cloud_exadata_infrastructure.my-exadata", "properties.0.compute_count"), - resource.TestCheckResourceAttr("data.google_oracle_database_cloud_exadata_infrastructure.my-exadata", "display_name", "ofake-do-not-delete-tf-exadata"), + resource.TestCheckResourceAttr("data.google_oracle_database_cloud_exadata_infrastructure.my-exadata", "display_name", "ofake-do-not-delete-tf-exadata display name"), resource.TestCheckResourceAttr("data.google_oracle_database_cloud_exadata_infrastructure.my-exadata", "gcp_oracle_zone", "us-east4-b-r1"), resource.TestCheckResourceAttr("data.google_oracle_database_cloud_exadata_infrastructure.my-exadata", "properties.0.state", "AVAILABLE"), resource.TestCheckResourceAttr("data.google_oracle_database_cloud_exadata_infrastructure.my-exadata", "properties.0.shape", "Exadata.X9M"), @@ -35,7 +35,7 @@ func testAccOracleDatabaseCloudExadataInfrastructure_basic() string { return fmt.Sprintf(` data "google_oracle_database_cloud_exadata_infrastructure" "my-exadata"{ cloud_exadata_infrastructure_id = "ofake-do-not-delete-tf-exadata" - project = "oci-terraform-testing-prod" + project = "oci-terraform-testing" location = "us-east4" } `) diff --git a/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_cloud_exadata_infrastructures_test.go b/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_cloud_exadata_infrastructures_test.go index 5a9752323426..46c1b272d08d 100644 --- a/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_cloud_exadata_infrastructures_test.go +++ b/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_cloud_exadata_infrastructures_test.go @@ -36,7 +36,7 @@ func testAccOracleDatabaseCloudExadataInfrastructures_basic() string { return fmt.Sprintf(` data "google_oracle_database_cloud_exadata_infrastructures" "my_cloud_exadatas"{ location = "us-east4" - project = "oci-terraform-testing-prod" + project = "oci-terraform-testing" } `) } diff --git a/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_cloud_vm_cluster_test.go b/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_cloud_vm_cluster_test.go index 5c1a37bd38ad..4d5f55afba42 100644 --- a/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_cloud_vm_cluster_test.go +++ b/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_cloud_vm_cluster_test.go @@ -37,7 +37,7 @@ func testAccOracleDatabaseCloudVmCluster_basic() string { return fmt.Sprintf(` data "google_oracle_database_cloud_vm_cluster" "my-vmcluster"{ cloud_vm_cluster_id = "ofake-do-not-delete-tf-vmcluster" - project = "oci-terraform-testing-prod" + project = "oci-terraform-testing" location = "us-east4" } `) diff --git a/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_cloud_vm_clusters_test.go b/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_cloud_vm_clusters_test.go index db9df1244d32..4a38d480cbbf 100644 --- a/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_cloud_vm_clusters_test.go +++ b/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_cloud_vm_clusters_test.go @@ -30,7 +30,7 @@ func testAccOracleDatabaseCloudVmClusters_basic() string { return fmt.Sprintf(` data "google_oracle_database_cloud_vm_clusters" "my_vmclusters"{ location = "us-east4" - project = "oci-terraform-testing-prod" + project = "oci-terraform-testing" } `) } diff --git a/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_db_nodes_test.go b/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_db_nodes_test.go index 7cb74bf8ffb6..fce3340670aa 100644 --- a/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_db_nodes_test.go +++ b/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_db_nodes_test.go @@ -34,7 +34,7 @@ func testAccOracleDatabaseDbNodesConfig() string { return fmt.Sprintf(` data "google_oracle_database_db_nodes" "my_db_nodes"{ location = "us-east4" - project = "oci-terraform-testing-prod" + project = "oci-terraform-testing" cloud_vm_cluster = "ofake-do-not-delete-tf-vmcluster" } `) diff --git a/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_db_servers_test.go b/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_db_servers_test.go index e81972c4c9b2..fc91c97ef3b1 100644 --- a/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_db_servers_test.go +++ b/mmv1/third_party/terraform/services/oracledatabase/data_source_oracle_database_db_servers_test.go @@ -33,7 +33,7 @@ func TestAccOracleDatabaseDbServers_basic(t *testing.T) { const testAccOracleDatabaseDbServers_basic = ` data "google_oracle_database_db_servers" "my_db_servers"{ location = "us-east4" - project = "oci-terraform-testing-prod" + project = "oci-terraform-testing" cloud_exadata_infrastructure = "ofake-do-not-delete-tf-exadata" } ` diff --git a/mmv1/third_party/terraform/services/orgpolicy/resource_org_policy_custom_constraint_test.go b/mmv1/third_party/terraform/services/orgpolicy/resource_org_policy_custom_constraint_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/orgpolicy/resource_org_policy_custom_constraint_test.go rename to mmv1/third_party/terraform/services/orgpolicy/resource_org_policy_custom_constraint_test.go.tmpl index 87c14932c638..432d77563837 100644 --- a/mmv1/third_party/terraform/services/orgpolicy/resource_org_policy_custom_constraint_test.go +++ b/mmv1/third_party/terraform/services/orgpolicy/resource_org_policy_custom_constraint_test.go.tmpl @@ -1,9 +1,9 @@ package orgpolicy_test import ( + "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" - "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) diff --git a/mmv1/third_party/terraform/services/osconfig/resource_os_config_os_policy_assignment.go b/mmv1/third_party/terraform/services/osconfig/resource_os_config_os_policy_assignment.go index ad832cd01496..9ea349cdbce8 100644 --- a/mmv1/third_party/terraform/services/osconfig/resource_os_config_os_policy_assignment.go +++ b/mmv1/third_party/terraform/services/osconfig/resource_os_config_os_policy_assignment.go @@ -1445,9 +1445,9 @@ func resourceOSConfigOSPolicyAssignmentDelete(d *schema.ResourceData, meta inter func resourceOSConfigOSPolicyAssignmentImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+)/locations/(?P[^/]+)/osPolicyAssignments/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)$", + "projects/(?P[^/]+)/locations/(?P[^/]+)/osPolicyAssignments/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)", }, d, config); err != nil { return nil, err } @@ -1485,7 +1485,7 @@ func flattenOSConfigOSPolicyAssignmentName(v interface{}, d *schema.ResourceData if v == nil { return v } - return tpgresource.GetResourceNameFromSelfLink(v.(string)) + return tpgresource.NameFromSelfLinkStateFunc(v) } func flattenOSConfigOSPolicyAssignmentDescription(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { diff --git a/mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_for_folder_test.go.tmpl b/mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_for_folder_test.go.tmpl index 7f071363c4f3..142d7b1ab359 100644 --- a/mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_for_folder_test.go.tmpl +++ b/mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_for_folder_test.go.tmpl @@ -1,6 +1,5 @@ package osconfigv2_test -{{ if ne $.TargetVersionName `ga` -}} import ( "testing" @@ -289,4 +288,3 @@ resource "google_os_config_v2_policy_orchestrator_for_folder" "policy_orchestrat } `, context) } -{{- end }} diff --git a/mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_for_organization_test.go b/mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_for_organization_test.go.tmpl similarity index 96% rename from mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_for_organization_test.go rename to mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_for_organization_test.go.tmpl index e34839ff36e1..d247ec7ea7cd 100644 --- a/mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_for_organization_test.go +++ b/mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_for_organization_test.go.tmpl @@ -24,10 +24,11 @@ func TestAccOSConfigV2PolicyOrchestratorForOrganization_basic(t *testing.T) { }, }) + context := map[string]interface{}{ - "org_id": envvar.GetTestOrgTargetFromEnv(t), - "zone": envvar.GetTestZoneFromEnv(), - "random_suffix": acctest.RandString(t, 10), + "org_id": envvar.GetTestOrgTargetFromEnv(t), + "zone": envvar.GetTestZoneFromEnv(), + "random_suffix": acctest.RandString(t, 10), } acctest.VcrTest(t, resource.TestCase{ diff --git a/mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_test.go b/mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_test.go.tmpl similarity index 99% rename from mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_test.go rename to mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_test.go.tmpl index 4aed5eeac4ef..a51797992db9 100644 --- a/mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_test.go +++ b/mmv1/third_party/terraform/services/osconfigv2/resource_os_config_v2_policy_orchestrator_test.go.tmpl @@ -32,6 +32,7 @@ func TestAccOSConfigV2PolicyOrchestrator_basic(t *testing.T) { "random_suffix": acctest.RandString(t, 10), } + acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), diff --git a/mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameter_test.go b/mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameter_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameter_test.go rename to mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameter_test.go.tmpl diff --git a/mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameter_version_render_test.go b/mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameter_version_render_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameter_version_render_test.go rename to mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameter_version_render_test.go.tmpl diff --git a/mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameter_version_test.go b/mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameter_version_test.go.tmpl similarity index 98% rename from mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameter_version_test.go rename to mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameter_version_test.go.tmpl index 581fb8758070..dcd4666ea561 100644 --- a/mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameter_version_test.go +++ b/mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameter_version_test.go.tmpl @@ -198,7 +198,7 @@ func TestAccDataSourceParameterManagerParameterVersion_withKmsKey(t *testing.T) }) context := map[string]interface{}{ - "kms_key": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "global", "tf-parameter-manager-managed-1").CryptoKey.Name, + "kms_key": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "global", "tf-parameter-manager-managed-1").CryptoKey.Name, "random_suffix": acctest.RandString(t, 10), } diff --git a/mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameters_test.go b/mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameters_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameters_test.go rename to mmv1/third_party/terraform/services/parametermanager/data_source_parameter_manager_parameters_test.go.tmpl diff --git a/mmv1/third_party/terraform/services/parametermanager/resource_parameter_manager_parameter_test.go b/mmv1/third_party/terraform/services/parametermanager/resource_parameter_manager_parameter_test.go.tmpl similarity index 95% rename from mmv1/third_party/terraform/services/parametermanager/resource_parameter_manager_parameter_test.go rename to mmv1/third_party/terraform/services/parametermanager/resource_parameter_manager_parameter_test.go.tmpl index 627cf6d03178..d7104eba2062 100644 --- a/mmv1/third_party/terraform/services/parametermanager/resource_parameter_manager_parameter_test.go +++ b/mmv1/third_party/terraform/services/parametermanager/resource_parameter_manager_parameter_test.go.tmpl @@ -3,8 +3,8 @@ package parametermanager_test import ( "testing" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/plancheck" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" ) @@ -113,8 +113,8 @@ func TestAccParameterManagerParameter_kmsKeyUpdate(t *testing.T) { }) context := map[string]interface{}{ - "kms_key": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "global", "tf-parameter-manager-managed-1").CryptoKey.Name, - "kms_key_other": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "global", "tf-parameter-manager-managed-2").CryptoKey.Name, + "kms_key": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "global", "tf-parameter-manager-managed-1").CryptoKey.Name, + "kms_key_other": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "global", "tf-parameter-manager-managed-2").CryptoKey.Name, "random_suffix": acctest.RandString(t, 10), } diff --git a/mmv1/third_party/terraform/services/parametermanager/resource_parameter_manager_parameter_version_test.go b/mmv1/third_party/terraform/services/parametermanager/resource_parameter_manager_parameter_version_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/parametermanager/resource_parameter_manager_parameter_version_test.go rename to mmv1/third_party/terraform/services/parametermanager/resource_parameter_manager_parameter_version_test.go.tmpl diff --git a/mmv1/third_party/terraform/services/parametermanager/test-fixtures/parameter_data_json_format.json b/mmv1/third_party/terraform/services/parametermanager/test-fixtures/parameter_data_json_format.json deleted file mode 100644 index aee129d0f4d9..000000000000 --- a/mmv1/third_party/terraform/services/parametermanager/test-fixtures/parameter_data_json_format.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "db_host": "localhost", - "db_name": "testdb", - "db_user": "testuser", - "db_port": 5432 -} diff --git a/mmv1/third_party/terraform/services/parametermanager/test-fixtures/parameter_data_yaml_format.yaml b/mmv1/third_party/terraform/services/parametermanager/test-fixtures/parameter_data_yaml_format.yaml deleted file mode 100644 index d633d82a6ea8..000000000000 --- a/mmv1/third_party/terraform/services/parametermanager/test-fixtures/parameter_data_yaml_format.yaml +++ /dev/null @@ -1,4 +0,0 @@ -db_host: localhost -db_port: 5432 -db_name: testdb -db_user: testuser diff --git a/mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameter_test.go b/mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameter_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameter_test.go rename to mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameter_test.go.tmpl diff --git a/mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameter_version_render_test.go b/mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameter_version_render_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameter_version_render_test.go rename to mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameter_version_render_test.go.tmpl diff --git a/mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameter_version_test.go b/mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameter_version_test.go.tmpl similarity index 98% rename from mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameter_version_test.go rename to mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameter_version_test.go.tmpl index 28ba96aa4166..9e9083e19dbe 100644 --- a/mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameter_version_test.go +++ b/mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameter_version_test.go.tmpl @@ -204,7 +204,7 @@ func TestAccDataSourceParameterManagerRegionalRegionalParameterVersion_withKmsKe }) context := map[string]interface{}{ - "kms_key": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-parameter-manager-managed-central-key1").CryptoKey.Name, + "kms_key": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-parameter-manager-managed-central-key1").CryptoKey.Name, "random_suffix": acctest.RandString(t, 10), } diff --git a/mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameters_test.go b/mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameters_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameters_test.go rename to mmv1/third_party/terraform/services/parametermanagerregional/data_source_parameter_manager_regional_parameters_test.go.tmpl diff --git a/mmv1/third_party/terraform/services/parametermanagerregional/resource_parameter_manager_regional_parameter_test.go b/mmv1/third_party/terraform/services/parametermanagerregional/resource_parameter_manager_regional_parameter_test.go.tmpl similarity index 96% rename from mmv1/third_party/terraform/services/parametermanagerregional/resource_parameter_manager_regional_parameter_test.go rename to mmv1/third_party/terraform/services/parametermanagerregional/resource_parameter_manager_regional_parameter_test.go.tmpl index b49304df14ef..046c0ea269ae 100644 --- a/mmv1/third_party/terraform/services/parametermanagerregional/resource_parameter_manager_regional_parameter_test.go +++ b/mmv1/third_party/terraform/services/parametermanagerregional/resource_parameter_manager_regional_parameter_test.go.tmpl @@ -3,8 +3,8 @@ package parametermanagerregional_test import ( "testing" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/plancheck" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" ) @@ -160,8 +160,8 @@ func TestAccParameterManagerRegionalRegionalParameter_kmskeyUpdate(t *testing.T) }) context := map[string]interface{}{ - "kms_key": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-parameter-manager-managed-central-key1").CryptoKey.Name, - "kms_key_other": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-parameter-manager-managed-central-key2").CryptoKey.Name, + "kms_key": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-parameter-manager-managed-central-key1").CryptoKey.Name, + "kms_key_other": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-parameter-manager-managed-central-key2").CryptoKey.Name, "random_suffix": acctest.RandString(t, 10), } diff --git a/mmv1/third_party/terraform/services/parametermanagerregional/resource_parameter_manager_regional_parameter_version_test.go b/mmv1/third_party/terraform/services/parametermanagerregional/resource_parameter_manager_regional_parameter_version_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/parametermanagerregional/resource_parameter_manager_regional_parameter_version_test.go rename to mmv1/third_party/terraform/services/parametermanagerregional/resource_parameter_manager_regional_parameter_version_test.go.tmpl diff --git a/mmv1/third_party/terraform/services/parametermanagerregional/test-fixtures/regional_parameter_data_json_format.json b/mmv1/third_party/terraform/services/parametermanagerregional/test-fixtures/regional_parameter_data_json_format.json deleted file mode 100644 index aee129d0f4d9..000000000000 --- a/mmv1/third_party/terraform/services/parametermanagerregional/test-fixtures/regional_parameter_data_json_format.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "db_host": "localhost", - "db_name": "testdb", - "db_user": "testuser", - "db_port": 5432 -} diff --git a/mmv1/third_party/terraform/services/parametermanagerregional/test-fixtures/regional_parameter_data_yaml_format.yaml b/mmv1/third_party/terraform/services/parametermanagerregional/test-fixtures/regional_parameter_data_yaml_format.yaml deleted file mode 100644 index d633d82a6ea8..000000000000 --- a/mmv1/third_party/terraform/services/parametermanagerregional/test-fixtures/regional_parameter_data_yaml_format.yaml +++ /dev/null @@ -1,4 +0,0 @@ -db_host: localhost -db_port: 5432 -db_name: testdb -db_user: testuser diff --git a/mmv1/third_party/terraform/services/privateca/privateca_utils.go b/mmv1/third_party/terraform/services/privateca/privateca_utils.go index 10d85abdfa43..51211be49454 100644 --- a/mmv1/third_party/terraform/services/privateca/privateca_utils.go +++ b/mmv1/third_party/terraform/services/privateca/privateca_utils.go @@ -8,12 +8,8 @@ import ( transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) -// This file contains shared flatteners between PrivateCA Certificate, CaPool, CertificateTemplate and -// CertificateAuthority. These resources share the x509Config (Certificate, CertificateAuthority)/ -// baselineValues (CaPool) object. CertificateTemplate contains the predefinedValues object, which is slightly -// different from the other two, and so requires its own functions to process. These functions are also contained -// in this file. -// +// This file contains shared flatteners between PrivateCA Certificate, CaPool and CertificateAuthority. +// These resources share the x509Config (Certificate, CertificateAuthorty)/baselineValues (CaPool) object. // The API does not return this object if it only contains booleans with the default (false) value. This // causes problems if a user specifies only default values, as Terraform detects that the object has been // deleted on the API-side. This flattener creates default objects for sub-objects that match this pattern @@ -68,50 +64,6 @@ func expandPrivatecaCertificateConfigX509ConfigCaOptions(v interface{}, d tpgres return transformed, nil } -func expandPrivatecaCertificateTemplateConfigX509ConfigCaOptions(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - // Similar to expandPrivatecaCertificateConfigX509ConfigCaOptions, but only for use in - // Certificate Templates, which use a null_ca field instead of the non_ca field. - // Fields null_ca, zero_max_issuer_path_length are used to distinguish between - // unset booleans and booleans set with a default value. - // Unset is_ca or unset max_issuer_path_length either allow any values for these fields when - // used in an issuance policy, or allow the API to use default values when used in a - // certificate config. A default value of is_ca=false means that issued certificates cannot - // be CA certificates. A default value of max_issuer_path_length=0 means that the CA cannot - // issue CA certificates. - if v == nil { - return nil, nil - } - l := v.([]interface{}) - if len(l) == 0 || l[0] == nil { - return nil, nil - } - raw := l[0] - original := raw.(map[string]interface{}) - - nullCa := original["null_ca"].(bool) - isCa := original["is_ca"].(bool) - - zeroPathLength := original["zero_max_issuer_path_length"].(bool) - maxIssuerPathLength := original["max_issuer_path_length"].(int) - - transformed := make(map[string]interface{}) - - if nullCa && isCa { - return nil, fmt.Errorf("null_ca, is_ca can not be set to true at the same time.") - } - if zeroPathLength && maxIssuerPathLength > 0 { - return nil, fmt.Errorf("zero_max_issuer_path_length can not be set to true while max_issuer_path_length being set to a positive integer.") - } - - if !nullCa { - transformed["isCa"] = original["is_ca"] - } - if maxIssuerPathLength > 0 || zeroPathLength { - transformed["maxIssuerPathLength"] = original["max_issuer_path_length"] - } - return transformed, nil -} - func expandPrivatecaCertificateConfigX509ConfigKeyUsage(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { if v == nil { return v, nil @@ -409,33 +361,6 @@ func flattenPrivatecaCertificateConfigX509ConfigCaOptions(v interface{}, d *sche return []interface{}{transformed} } - -func flattenPrivatecaCertificateTemplateConfigX509ConfigCaOptions(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - // Special case here as the CaPool API returns an empty object rather than nil unlike the Certificate - // and CertificateAuthority APIs. - if v == nil || len(v.(map[string]interface{})) == 0 { - v = make(map[string]interface{}) - } - original := v.(map[string]interface{}) - transformed := make(map[string]interface{}) - - val, exists := original["isCa"] - transformed["is_ca"] = - flattenPrivatecaCertificateConfigX509ConfigCaOptionsIsCa(val, d, config) - if !exists { - transformed["null_ca"] = true - } - - val, exists = original["maxIssuerPathLength"] - transformed["max_issuer_path_length"] = - flattenPrivatecaCertificateConfigX509ConfigCaOptionsMaxIssuerPathLength(val, d, config) - if exists && int(val.(float64)) == 0 { - transformed["zero_max_issuer_path_length"] = true - } - - return []interface{}{transformed} -} - func flattenPrivatecaCertificateConfigX509ConfigCaOptionsIsCa(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { return v } diff --git a/mmv1/third_party/terraform/services/privateca/resource_privateca_certificate_template_test.go b/mmv1/third_party/terraform/services/privateca/resource_privateca_certificate_template_test.go index ff7e486ee91c..35dccbfb4d1c 100644 --- a/mmv1/third_party/terraform/services/privateca/resource_privateca_certificate_template_test.go +++ b/mmv1/third_party/terraform/services/privateca/resource_privateca_certificate_template_test.go @@ -81,60 +81,6 @@ func TestAccPrivatecaCertificateTemplate_BasicCertificateTemplateLongForm(t *tes }) } -func TestAccPrivatecaCertificateTemplate_updateCaOption(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "project_name": envvar.GetTestProjectFromEnv(), - "region": envvar.GetTestRegionFromEnv(), - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckPrivatecaCertificateTemplateDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccPrivatecaCertificateTemplate_CertificateTemplateCaOptionIsCaIsTrueAndMaxPathIsPositive(context), - }, - { - ResourceName: "google_privateca_certificate_template.primary", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"predefined_values.0.key_usage.0.extended_key_usage", "labels", "terraform_labels", "project", "location", "name"}, - }, - { - Config: testAccPrivatecaCertificateTemplate_CertificateTemplateCaOptionIsCaIsFalse(context), - }, - { - ResourceName: "google_privateca_certificate_template.primary", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"predefined_values.0.key_usage.0.extended_key_usage", "labels", "terraform_labels", "project", "location", "name"}, - }, - { - Config: testAccPrivatecaCertificateTemplate_CertificateTemplateCaOptionIsCaIsNull(context), - }, - { - ResourceName: "google_privateca_certificate_template.primary", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"predefined_values.0.key_usage.0.extended_key_usage", "labels", "terraform_labels", "project", "location", "name"}, - }, - { - Config: testAccPrivatecaCertificateTemplate_CertificateTemplateCaOptionMaxIssuerPathLenghIsZero(context), - }, - { - ResourceName: "google_privateca_certificate_template.primary", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"predefined_values.0.key_usage.0.extended_key_usage", "labels", "terraform_labels", "project", "location", "name"}, - }, - }, - }) -} - func testAccPrivatecaCertificateTemplate_BasicCertificateTemplate(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_privateca_certificate_template" "primary" { @@ -484,290 +430,3 @@ resource "google_privateca_certificate_template" "primary" { `, context) } - -func testAccPrivatecaCertificateTemplate_CertificateTemplateCaOptionIsCaIsTrueAndMaxPathIsPositive(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_privateca_certificate_template" "primary" { - location = "%{region}" - name = "tf-test-template%{random_suffix}" - maximum_lifetime = "86400s" - description = "A sample certificate template" - identity_constraints { - allow_subject_alt_names_passthrough = true - allow_subject_passthrough = true - cel_expression { - description = "Always true" - expression = "true" - location = "any.file.anywhere" - title = "Sample expression" - } - } - passthrough_extensions { - additional_extensions { - object_id_path = [1, 6] - } - known_extensions = ["EXTENDED_KEY_USAGE"] - } - predefined_values { - additional_extensions { - object_id { - object_id_path = [1, 6] - } - value = "c3RyaW5nCg==" - critical = true - } - aia_ocsp_servers = ["string"] - ca_options { - is_ca = true - max_issuer_path_length = 6 - } - key_usage { - base_key_usage { - cert_sign = false - content_commitment = true - crl_sign = false - data_encipherment = true - decipher_only = true - digital_signature = true - encipher_only = true - key_agreement = true - key_encipherment = true - } - extended_key_usage { - client_auth = true - code_signing = true - email_protection = true - ocsp_signing = true - server_auth = true - time_stamping = true - } - unknown_extended_key_usages { - object_id_path = [1, 6] - } - } - policy_ids { - object_id_path = [1, 6] - } - } - project = "%{project_name}" - labels = { - label-two = "value-two" - } -} -`, context) -} - -func testAccPrivatecaCertificateTemplate_CertificateTemplateCaOptionIsCaIsFalse(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_privateca_certificate_template" "primary" { - location = "%{region}" - name = "tf-test-template%{random_suffix}" - maximum_lifetime = "86400s" - description = "An updated sample certificate template" - identity_constraints { - allow_subject_alt_names_passthrough = true - allow_subject_passthrough = true - cel_expression { - description = "Always true" - expression = "true" - location = "any.file.anywhere" - title = "Sample expression" - } - } - passthrough_extensions { - additional_extensions { - object_id_path = [1, 6] - } - known_extensions = ["EXTENDED_KEY_USAGE"] - } - predefined_values { - additional_extensions { - object_id { - object_id_path = [1, 6] - } - value = "c3RyaW5nCg==" - critical = true - } - aia_ocsp_servers = ["string"] - ca_options { - is_ca = false - } - key_usage { - base_key_usage { - cert_sign = false - content_commitment = true - crl_sign = false - data_encipherment = true - decipher_only = true - digital_signature = true - encipher_only = true - key_agreement = true - key_encipherment = true - } - extended_key_usage { - client_auth = true - code_signing = true - email_protection = true - ocsp_signing = true - server_auth = true - time_stamping = true - } - unknown_extended_key_usages { - object_id_path = [1, 6] - } - } - policy_ids { - object_id_path = [1, 6] - } - } - project = "%{project_name}" - labels = { - label-two = "value-two" - } -} -`, context) -} - -func testAccPrivatecaCertificateTemplate_CertificateTemplateCaOptionIsCaIsNull(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_privateca_certificate_template" "primary" { - location = "%{region}" - name = "tf-test-template%{random_suffix}" - maximum_lifetime = "86400s" - description = "An updated sample certificate template" - identity_constraints { - allow_subject_alt_names_passthrough = true - allow_subject_passthrough = true - cel_expression { - description = "Always true" - expression = "true" - location = "any.file.anywhere" - title = "Sample expression" - } - } - passthrough_extensions { - additional_extensions { - object_id_path = [1, 6] - } - known_extensions = ["EXTENDED_KEY_USAGE"] - } - predefined_values { - additional_extensions { - object_id { - object_id_path = [1, 6] - } - value = "c3RyaW5nCg==" - critical = true - } - aia_ocsp_servers = ["string"] - ca_options { - null_ca = true - is_ca = false - } - key_usage { - base_key_usage { - cert_sign = false - content_commitment = true - crl_sign = false - data_encipherment = true - decipher_only = true - digital_signature = true - encipher_only = true - key_agreement = true - key_encipherment = true - } - extended_key_usage { - client_auth = true - code_signing = true - email_protection = true - ocsp_signing = true - server_auth = true - time_stamping = true - } - unknown_extended_key_usages { - object_id_path = [1, 6] - } - } - policy_ids { - object_id_path = [1, 6] - } - } - project = "%{project_name}" - labels = { - label-two = "value-two" - } -} -`, context) -} - -func testAccPrivatecaCertificateTemplate_CertificateTemplateCaOptionMaxIssuerPathLenghIsZero(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_privateca_certificate_template" "primary" { - location = "%{region}" - name = "tf-test-template%{random_suffix}" - maximum_lifetime = "86400s" - description = "Another updated sample certificate template" - identity_constraints { - allow_subject_alt_names_passthrough = true - allow_subject_passthrough = true - cel_expression { - description = "Always true" - expression = "true" - location = "any.file.anywhere" - title = "Sample expression" - } - } - passthrough_extensions { - additional_extensions { - object_id_path = [1, 6] - } - known_extensions = ["EXTENDED_KEY_USAGE"] - } - predefined_values { - additional_extensions { - object_id { - object_id_path = [1, 6] - } - value = "c3RyaW5nCg==" - critical = true - } - aia_ocsp_servers = ["string"] - ca_options { - zero_max_issuer_path_length = true - max_issuer_path_length = 0 - } - key_usage { - base_key_usage { - cert_sign = false - content_commitment = true - crl_sign = false - data_encipherment = true - decipher_only = true - digital_signature = true - encipher_only = true - key_agreement = true - key_encipherment = true - } - extended_key_usage { - client_auth = true - code_signing = true - email_protection = true - ocsp_signing = true - server_auth = true - time_stamping = true - } - unknown_extended_key_usages { - object_id_path = [1, 6] - } - } - policy_ids { - object_id_path = [1, 6] - } - } - project = "%{project_name}" - labels = { - label-two = "value-two" - } -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/privilegedaccessmanager/resource_privileged_access_manager_entitlement_test.go b/mmv1/third_party/terraform/services/privilegedaccessmanager/resource_privileged_access_manager_entitlement_test.go.tmpl similarity index 98% rename from mmv1/third_party/terraform/services/privilegedaccessmanager/resource_privileged_access_manager_entitlement_test.go rename to mmv1/third_party/terraform/services/privilegedaccessmanager/resource_privileged_access_manager_entitlement_test.go.tmpl index fc1b2d8d4cd3..55fa1c1a6ff3 100644 --- a/mmv1/third_party/terraform/services/privilegedaccessmanager/resource_privileged_access_manager_entitlement_test.go +++ b/mmv1/third_party/terraform/services/privilegedaccessmanager/resource_privileged_access_manager_entitlement_test.go.tmpl @@ -6,7 +6,7 @@ import ( "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-provider-google/google/envvar" ) func TestAccPrivilegedAccessManagerEntitlement_privilegedAccessManagerEntitlementProjectExample_update(t *testing.T) { diff --git a/mmv1/third_party/terraform/services/pubsub/resource_pubsub_subscription_test.go b/mmv1/third_party/terraform/services/pubsub/resource_pubsub_subscription_test.go index 51d3184dda3d..932cad28804a 100644 --- a/mmv1/third_party/terraform/services/pubsub/resource_pubsub_subscription_test.go +++ b/mmv1/third_party/terraform/services/pubsub/resource_pubsub_subscription_test.go @@ -539,89 +539,6 @@ func TestAccPubsubSubscription_filter(t *testing.T) { }) } -func TestAccPubsubSubscription_javascriptUdfUpdate(t *testing.T) { - t.Parallel() - - topic := fmt.Sprintf("tf-test-topic-%s", acctest.RandString(t, 10)) - subscriptionShort := fmt.Sprintf("tf-test-sub-%s", acctest.RandString(t, 10)) - functionName := "my_func" - code := "function my_func(message, metadata) {return null;}" - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckPubsubSubscriptionDestroyProducer(t), - Steps: []resource.TestStep{ - // Initial transform - { - Config: testAccPubsubSubscription_javascriptUdfSettings(topic, subscriptionShort, functionName, code), - }, - { - ResourceName: "google_pubsub_subscription.foo", - ImportStateId: subscriptionShort, - ImportState: true, - ImportStateVerify: true, - }, - { - // Remove non-required field - Config: testAccPubsubSubscription_javascriptUdfSettings_noEnabled(topic, subscriptionShort, functionName, code), - }, - { - ResourceName: "google_pubsub_subscription.foo", - ImportStateId: subscriptionShort, - ImportState: true, - ImportStateVerify: true, - }, - // Destroy transform - { - ResourceName: "google_pubsub_topic.foo", - ImportStateId: topic, - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func testAccPubsubSubscription_javascriptUdfSettings(topic, subscription, functionName, code string) string { - return fmt.Sprintf(` -resource "google_pubsub_topic" "foo" { - name = "%s" -} - -resource "google_pubsub_subscription" "foo" { - name = "%s" - topic = google_pubsub_topic.foo.id - message_transforms { - disabled = true - javascript_udf { - function_name = "%s" - code = "%s" - } - } -} -`, topic, subscription, functionName, code) -} - -func testAccPubsubSubscription_javascriptUdfSettings_noEnabled(topic, subscription, functionName, code string) string { - return fmt.Sprintf(` -resource "google_pubsub_topic" "foo" { - name = "%s" -} - -resource "google_pubsub_subscription" "foo" { - name = "%s" - topic = google_pubsub_topic.foo.id - message_transforms { - javascript_udf { - function_name = "%s" - code = "%s" - } - } -} -`, topic, subscription, functionName, code) -} - func testAccPubsubSubscription_emptyTTL(topic, subscription string) string { return fmt.Sprintf(` resource "google_pubsub_topic" "foo" { diff --git a/mmv1/third_party/terraform/services/pubsub/resource_pubsub_topic_test.go b/mmv1/third_party/terraform/services/pubsub/resource_pubsub_topic_test.go index 007a2384b471..a4efa7568eda 100644 --- a/mmv1/third_party/terraform/services/pubsub/resource_pubsub_topic_test.go +++ b/mmv1/third_party/terraform/services/pubsub/resource_pubsub_topic_test.go @@ -583,51 +583,3 @@ resource "google_pubsub_topic" "foo" { } `, topic) } -func TestAccPubsubTopic_javascriptUdfUpdate(t *testing.T) { - t.Parallel() - - topic := fmt.Sprintf("tf-test-topic-%s", acctest.RandString(t, 10)) - - functionName := "my_func" - code := "function my_func(message, metadata) {return null;}" - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckPubsubTopicDestroyProducer(t), - Steps: []resource.TestStep{ - // Initial transform - { - Config: testAccPubsubTopic_javascriptUdfSettings(topic, functionName, code), - }, - { - ResourceName: "google_pubsub_topic.foo", - ImportStateId: topic, - ImportState: true, - ImportStateVerify: true, - }, - // Destroy transform - { - ResourceName: "google_pubsub_topic.foo", - ImportStateId: topic, - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func testAccPubsubTopic_javascriptUdfSettings(topic, functionName, code string) string { - return fmt.Sprintf(` -resource "google_pubsub_topic" "foo" { - name = "%s" - - message_transforms { - javascript_udf { - function_name = "%s" - code = "%s" - } - } -} - `, topic, functionName, code) -} diff --git a/mmv1/third_party/terraform/services/pubsublite/fw_resource_pubsub_lite_reservation.go b/mmv1/third_party/terraform/services/pubsublite/fw_resource_pubsub_lite_reservation.go deleted file mode 100644 index 685428eca2de..000000000000 --- a/mmv1/third_party/terraform/services/pubsublite/fw_resource_pubsub_lite_reservation.go +++ /dev/null @@ -1,383 +0,0 @@ -package pubsublite - -import ( - "context" - "fmt" - "net/http" - "strings" - - "github.com/hashicorp/terraform-plugin-framework/resource" - "github.com/hashicorp/terraform-plugin-framework/resource/schema" - "github.com/hashicorp/terraform-plugin-framework/types" - "github.com/hashicorp/terraform-plugin-log/tflog" - - "github.com/hashicorp/terraform-provider-google/google/fwmodels" - "github.com/hashicorp/terraform-provider-google/google/fwresource" - "github.com/hashicorp/terraform-provider-google/google/fwtransport" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" - "google.golang.org/api/pubsublite/v1" -) - -// Ensure the implementation satisfies the expected interfaces. -var ( - _ resource.Resource = &GooglePubsubLiteReservationFWResource{} - _ resource.ResourceWithConfigure = &GooglePubsubLiteReservationFWResource{} -) - -// NewGooglePubsubLiteReservationResource is a helper function to simplify the provider implementation. -func NewGooglePubsubLiteReservationFWResource() resource.Resource { - return &GooglePubsubLiteReservationFWResource{} -} - -// GooglePubsubLiteReservationResource is the resource implementation. -type GooglePubsubLiteReservationFWResource struct { - client *pubsublite.Service - providerConfig *transport_tpg.Config -} - -type GooglePubsubLiteReservationModel struct { - Id types.String `tfsdk:"id"` - Project types.String `tfsdk:"project"` - Region types.String `tfsdk:"region"` - Name types.String `tfsdk:"name"` - ThroughputCapacity types.Int64 `tfsdk:"throughput_capacity"` -} - -// Metadata returns the resource type name. -func (d *GooglePubsubLiteReservationFWResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { - resp.TypeName = req.ProviderTypeName + "_fwprovider_pubsub_lite_reservation" -} - -func (d *GooglePubsubLiteReservationFWResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { - // Prevent panic if the provider has not been configured. - if req.ProviderData == nil { - return - } - - p, ok := req.ProviderData.(*transport_tpg.Config) - if !ok { - resp.Diagnostics.AddError( - "Unexpected Resource Configure Type", - fmt.Sprintf("Expected *transport_tpg.Config, got: %T. Please report this issue to the provider developers.", req.ProviderData), - ) - return - } - - d.providerConfig = p -} - -// Schema defines the schema for the data source. -func (d *GooglePubsubLiteReservationFWResource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { - resp.Schema = schema.Schema{ - MarkdownDescription: "Pubsub Lite Reservation resource description", - - Attributes: map[string]schema.Attribute{ - "project": schema.StringAttribute{ - Description: "The project id of the Pubsub Lite Reservation.", - MarkdownDescription: "The project id of the Pubsub Lite Reservation.", - Required: true, - }, - "region": schema.StringAttribute{ - Description: "The region of the Pubsub Lite Reservation.", - MarkdownDescription: "The region of the Pubsub Lite Reservation.", - Required: true, - }, - "name": schema.StringAttribute{ - Description: `The display name of the project.`, - MarkdownDescription: `The display name of the project.`, - Required: true, - }, - "throughput_capacity": schema.Int64Attribute{ - Description: `The reserved throughput capacity. Every unit of throughput capacity is equivalent to 1 MiB/s of published messages or 2 MiB/s of subscribed messages.`, - MarkdownDescription: `The reserved throughput capacity. Every unit of throughput capacity is equivalent to 1 MiB/s of published messages or 2 MiB/s of subscribed messages.`, - Required: true, - }, - // This is included for backwards compatibility with the original, SDK-implemented data source. - "id": schema.StringAttribute{ - Description: "Project identifier", - MarkdownDescription: "Project identifier", - Computed: true, - }, - }, - } -} - -func (d *GooglePubsubLiteReservationFWResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { - var data GooglePubsubLiteReservationModel - var metaData *fwmodels.ProviderMetaModel - - // Read Provider meta into the meta model - resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) - if resp.Diagnostics.HasError() { - return - } - - // Read Terraform configuration data into the model - resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...) - if resp.Diagnostics.HasError() { - return - } - - // Use provider_meta to set User-Agent - userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, d.providerConfig.UserAgent) - - obj := make(map[string]interface{}) - - obj["throughputCapacity"] = data.ThroughputCapacity.ValueInt64() - - data.Project = fwresource.GetProjectFramework(data.Project, types.StringValue(d.providerConfig.Project), &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } - data.Region = fwresource.GetRegionFramework(data.Region, types.StringValue(d.providerConfig.Region), &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } - - billingProject := data.Project - - var schemaDefaultVals fwtransport.DefaultVars - schemaDefaultVals.Project = data.Project - schemaDefaultVals.Region = data.Region - - url := fwtransport.ReplaceVars(ctx, req, &resp.Diagnostics, schemaDefaultVals, d.providerConfig, "{{PubsubLiteBasePath}}projects/{{project}}/locations/{{region}}/reservations?reservationId={{name}}") - if resp.Diagnostics.HasError() { - return - } - tflog.Trace(ctx, fmt.Sprintf("[DEBUG] Creating new Reservation: %#v", obj)) - - headers := make(http.Header) - res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ - Config: d.providerConfig, - Method: "POST", - Project: billingProject.ValueString(), - RawURL: url, - UserAgent: userAgent, - Body: obj, - Headers: headers, - }, &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } - - tflog.Trace(ctx, "create fwprovider google_pubsub_lite resource") - - // Put data in model - data.Id = types.StringValue(fmt.Sprintf("projects/%s/locations/%s/reservations/%s", data.Project.ValueString(), data.Region.ValueString(), data.Name.ValueString())) - data.ThroughputCapacity = types.Int64Value(res["throughputCapacity"].(int64)) - - // Save data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) -} - -// Read refreshes the Terraform state with the latest data. -func (d *GooglePubsubLiteReservationFWResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { - var data GooglePubsubLiteReservationModel - var metaData *fwmodels.ProviderMetaModel - - // Read Provider meta into the meta model - resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) - if resp.Diagnostics.HasError() { - return - } - - // Read Terraform configuration data into the model - resp.Diagnostics.Append(req.State.Get(ctx, &data)...) - if resp.Diagnostics.HasError() { - return - } - - // Use provider_meta to set User-Agent - userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, d.providerConfig.UserAgent) - - data.Project = fwresource.GetProjectFramework(data.Project, types.StringValue(d.providerConfig.Project), &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } - data.Region = fwresource.GetRegionFramework(data.Region, types.StringValue(d.providerConfig.Region), &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } - - billingProject := data.Project - - var schemaDefaultVals fwtransport.DefaultVars - schemaDefaultVals.Project = data.Project - schemaDefaultVals.Region = data.Region - - url := fwtransport.ReplaceVars(ctx, req, &resp.Diagnostics, schemaDefaultVals, d.providerConfig, "{{PubSubLiteBasePath}}projects/{{project}}/locations/{{region}}/instances/{{name}}") - - if resp.Diagnostics.HasError() { - return - } - - headers := make(http.Header) - res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ - Config: d.providerConfig, - Method: "GET", - Project: billingProject.ValueString(), - RawURL: url, - UserAgent: userAgent, - Headers: headers, - }, &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } - - tflog.Trace(ctx, "read fwprovider google_pubsub_lite resource") - - // Put data in model - data.Id = types.StringValue(fmt.Sprintf("projects/%s/locations/%s/instances/%s", data.Project.ValueString(), data.Region.ValueString(), data.Name.ValueString())) - data.ThroughputCapacity = types.Int64Value(res["throughputCapacity"].(int64)) - - // Save data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) -} - -func (d *GooglePubsubLiteReservationFWResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { - var plan, state GooglePubsubLiteReservationModel - var metaData *fwmodels.ProviderMetaModel - - // Read Provider meta into the meta model - resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) - if resp.Diagnostics.HasError() { - return - } - - // Read Terraform configuration data into the model - resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) - if resp.Diagnostics.HasError() { - return - } - - // Read Terraform configuration data into the model - resp.Diagnostics.Append(req.State.Get(ctx, &state)...) - if resp.Diagnostics.HasError() { - return - } - - // Use provider_meta to set User-Agent - userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, d.providerConfig.UserAgent) - - obj := make(map[string]interface{}) - - obj["throughputCapacity"] = plan.ThroughputCapacity.ValueInt64() - - plan.Project = fwresource.GetProjectFramework(plan.Project, types.StringValue(d.providerConfig.Project), &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } - plan.Region = fwresource.GetRegionFramework(plan.Region, types.StringValue(d.providerConfig.Region), &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } - - billingProject := plan.Project - - var schemaDefaultVals fwtransport.DefaultVars - schemaDefaultVals.Project = plan.Project - schemaDefaultVals.Region = plan.Region - - url := fwtransport.ReplaceVars(ctx, req, &resp.Diagnostics, schemaDefaultVals, d.providerConfig, "{{PubSubLiteBasePath}}projects/{{project}}/locations/{{region}}/instances/{{name}}") - - if resp.Diagnostics.HasError() { - return - } - tflog.Trace(ctx, fmt.Sprintf("[DEBUG] Updating Reservation: %#v", obj)) - - headers := make(http.Header) - - updateMask := []string{} - if !plan.ThroughputCapacity.Equal(state.ThroughputCapacity) { - updateMask = append(updateMask, "throughputCapacity") - } - - // updateMask is a URL parameter but not present in the schema, so ReplaceVars - // won't set it - var err error - url, err = transport_tpg.AddQueryParams(url, map[string]string{"updateMask": strings.Join(updateMask, ",")}) - if err != nil { - resp.Diagnostics.AddError("Error when sending HTTP request: ", err.Error()) - return - } - - res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ - Config: d.providerConfig, - Method: "PATCH", - Project: billingProject.ValueString(), - RawURL: url, - UserAgent: userAgent, - Body: obj, - Headers: headers, - }, &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } - - tflog.Trace(ctx, "update fwprovider google_pubsub_lite resource") - - // Put data in model - plan.Id = types.StringValue(fmt.Sprintf("projects/%s/locations/%s/instances/%s", plan.Project.ValueString(), plan.Region.ValueString(), plan.Name.ValueString())) - plan.ThroughputCapacity = types.Int64Value(res["throughputCapacity"].(int64)) - - // Save data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...) -} -func (d *GooglePubsubLiteReservationFWResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { - var data GooglePubsubLiteReservationModel - var metaData *fwmodels.ProviderMetaModel - - // Read Provider meta into the meta model - resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) - if resp.Diagnostics.HasError() { - return - } - - // Read Terraform configuration data into the model - resp.Diagnostics.Append(req.State.Get(ctx, &data)...) - if resp.Diagnostics.HasError() { - return - } - // Use provider_meta to set User-Agent - userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, d.providerConfig.UserAgent) - - obj := make(map[string]interface{}) - - data.Project = fwresource.GetProjectFramework(data.Project, types.StringValue(d.providerConfig.Project), &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } - data.Region = fwresource.GetRegionFramework(data.Region, types.StringValue(d.providerConfig.Region), &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } - - billingProject := data.Project - - var schemaDefaultVals fwtransport.DefaultVars - schemaDefaultVals.Project = data.Project - schemaDefaultVals.Region = data.Region - - url := fwtransport.ReplaceVars(ctx, req, &resp.Diagnostics, schemaDefaultVals, d.providerConfig, "{{PubSubLiteBasePath}}projects/{{project}}/locations/{{region}}/instances/{{name}}") - - if resp.Diagnostics.HasError() { - return - } - tflog.Trace(ctx, fmt.Sprintf("[DEBUG] Deleting Reservation: %#v", obj)) - - headers := make(http.Header) - res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ - Config: d.providerConfig, - Method: "DELETE", - Project: billingProject.ValueString(), - RawURL: url, - UserAgent: userAgent, - Body: obj, - Headers: headers, - }, &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } - - tflog.Trace(ctx, fmt.Sprintf("[DEBUG] Deleted Reservation: %#v", res)) -} diff --git a/mmv1/third_party/terraform/services/pubsublite/fw_resource_pubsub_lite_reservation_test.go b/mmv1/third_party/terraform/services/pubsublite/fw_resource_pubsub_lite_reservation_test.go deleted file mode 100644 index e4507dfaec41..000000000000 --- a/mmv1/third_party/terraform/services/pubsublite/fw_resource_pubsub_lite_reservation_test.go +++ /dev/null @@ -1,56 +0,0 @@ -package pubsublite_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" -) - -func TestAccResourceFWPubsubLiteReservation_basic(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccResourceFWPubsubLiteReservation_basic(context), - }, - { - Config: testAccResourceFWPubsubLiteReservation_upgrade(context), - }, - }, - }) -} - -func testAccResourceFWPubsubLiteReservation_basic(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_fwprovider_pubsub_lite_reservation" "basic" { - name = "tf-test-example-reservation%{random_suffix}" - region = "us-central1" - project = data.google_project.project.number - throughput_capacity = 2 -} - -data "google_project" "project" { -} -`, context) -} - -func testAccResourceFWPubsubLiteReservation_upgrade(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_fwprovider_pubsub_lite_reservation" "basic" { - name = "tf-test-example-reservation%{random_suffix}" - region = "us-central1" - project = data.google_project.project.number - throughput_capacity = 3 -} - -data "google_project" "project" { -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/redis/data_source_redis_cluster.go b/mmv1/third_party/terraform/services/redis/data_source_redis_cluster.go deleted file mode 100644 index 2054bb8aacf7..000000000000 --- a/mmv1/third_party/terraform/services/redis/data_source_redis_cluster.go +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 -package redis - -import ( - "fmt" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -func DataSourceRedisCluster() *schema.Resource { - // Generate datasource schema from resource - dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceRedisCluster().Schema) - - // Set 'Required' schema elements - tpgresource.AddRequiredFieldsToSchema(dsSchema, "name") - // Set 'Optional' schema elements - tpgresource.AddOptionalFieldsToSchema(dsSchema, "project", "region") - - return &schema.Resource{ - Read: dataSourceRedisClusterRead, - Schema: dsSchema, - } -} - -func dataSourceRedisClusterRead(d *schema.ResourceData, meta interface{}) error { - id, err := tpgresource.ReplaceVars(d, meta.(*transport_tpg.Config), "projects/{{project}}/locations/{{region}}/clusters/{{name}}") - if err != nil { - return fmt.Errorf("Error constructing id: %s", err) - } - d.SetId(id) - - err = resourceRedisClusterRead(d, meta) - if err != nil { - return err - } - - if err := tpgresource.SetDataSourceLabels(d); err != nil { - return err - } - - if d.Id() == "" { - return fmt.Errorf("%s not found", id) - } - return nil -} diff --git a/mmv1/third_party/terraform/services/redis/data_source_redis_cluster_test.go b/mmv1/third_party/terraform/services/redis/data_source_redis_cluster_test.go deleted file mode 100644 index 96a6843297a0..000000000000 --- a/mmv1/third_party/terraform/services/redis/data_source_redis_cluster_test.go +++ /dev/null @@ -1,46 +0,0 @@ -package redis_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" -) - -func TestAccRedisClusterDatasource(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccRedisClusterDatasourceConfig(context), - Check: resource.ComposeTestCheckFunc( - acctest.CheckDataSourceStateMatchesResourceState("data.google_redis_cluster.default", "google_redis_cluster.cluster"), - ), - }, - }, - }) -} - -func testAccRedisClusterDatasourceConfig(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_redis_cluster" "cluster" { - name = "tf-test-redis-cluster-%{random_suffix}" - shard_count = 1 - region = "us-central1" - deletion_protection_enabled = false - -} - -data "google_redis_cluster" "default" { - name = google_redis_cluster.cluster.name - region = "us-central1" -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/redis/data_source_redis_instance.go b/mmv1/third_party/terraform/services/redis/data_source_redis_instance.go index cd96ec0f141d..1d16a177a860 100644 --- a/mmv1/third_party/terraform/services/redis/data_source_redis_instance.go +++ b/mmv1/third_party/terraform/services/redis/data_source_redis_instance.go @@ -39,26 +39,9 @@ func dataSourceGoogleRedisInstanceRead(d *schema.ResourceData, meta interface{}) if err := tpgresource.SetDataSourceLabels(d); err != nil { return err } - // added to resolve a null value for reserved_ip_range. This was not getting populated due to the addtion of ignore_read - if err := SetDataSourceReservedIpRange(d); err != nil { - return err - } if d.Id() == "" { return fmt.Errorf("%s not found", id) } return nil } - -func SetDataSourceReservedIpRange(d *schema.ResourceData) error { - effectiveReservedIpRange := d.Get("effective_reserved_ip_range") - if effectiveReservedIpRange == nil { - return nil - } - - if err := d.Set("reserved_ip_range", effectiveReservedIpRange); err != nil { - return fmt.Errorf("Error setting reserved_ip_range in data source: %s", err) - } - - return nil -} diff --git a/mmv1/third_party/terraform/services/redis/data_source_redis_instance_test.go b/mmv1/third_party/terraform/services/redis/data_source_redis_instance_test.go index e50eed765216..e1486ada8327 100644 --- a/mmv1/third_party/terraform/services/redis/data_source_redis_instance_test.go +++ b/mmv1/third_party/terraform/services/redis/data_source_redis_instance_test.go @@ -19,7 +19,6 @@ func TestAccRedisInstanceDatasource_basic(t *testing.T) { Config: testAccRedisInstanceDatasourceConfig(acctest.RandString(t, 10)), Check: resource.ComposeTestCheckFunc( acctest.CheckDataSourceStateMatchesResourceState("data.google_redis_instance.redis", "google_redis_instance.redis"), - resource.TestCheckResourceAttrSet("data.google_redis_instance.redis", "reserved_ip_range"), ), }, }, diff --git a/mmv1/third_party/terraform/services/redis/resource_redis_cluster_test.go b/mmv1/third_party/terraform/services/redis/resource_redis_cluster_test.go.tmpl similarity index 94% rename from mmv1/third_party/terraform/services/redis/resource_redis_cluster_test.go rename to mmv1/third_party/terraform/services/redis/resource_redis_cluster_test.go.tmpl index e8938fa71046..cec59af347fe 100644 --- a/mmv1/third_party/terraform/services/redis/resource_redis_cluster_test.go +++ b/mmv1/third_party/terraform/services/redis/resource_redis_cluster_test.go.tmpl @@ -3,17 +3,17 @@ package redis_test import ( "fmt" "log" - "strings" "testing" "time" + "strings" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/terraform" - "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/services/redis" + "github.com/hashicorp/terraform-provider-google/google/acctest" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) - func TestAccRedisCluster_createUpdateClusterWithNodeType(t *testing.T) { t.Parallel() @@ -1250,94 +1250,3 @@ func createRedisClusterResourceConfig(params *ClusterParams, isSecondaryCluster crossClusterReplicationConfigBlock, dependsOnBlock) } - -func TestAccRedisCluster_redisClusterTlsEnabled(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "deletion_protection_enabled": false, - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckRedisClusterDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccRedisCluster_redisClusterTlsEnabled(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet("google_redis_cluster.cluster-tls", "managed_server_ca.0.ca_certs.0.certificates.0"), - ), - }, - { - ResourceName: "google_redis_cluster.cluster-tls", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"gcs_source", "managed_backup_source", "name", "psc_configs", "region"}, - }, - }, - }) -} - -func testAccRedisCluster_redisClusterTlsEnabled(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_redis_cluster" "cluster-tls" { - name = "tf-test-tls-cluster%{random_suffix}" - shard_count = 3 - psc_configs { - network = google_compute_network.consumer_net.id - } - region = "us-central1" - replica_count = 1 - node_type = "REDIS_SHARED_CORE_NANO" - transit_encryption_mode = "TRANSIT_ENCRYPTION_MODE_SERVER_AUTHENTICATION" - authorization_mode = "AUTH_MODE_DISABLED" - redis_configs = { - maxmemory-policy = "volatile-ttl" - } - deletion_protection_enabled = %{deletion_protection_enabled} - - zone_distribution_config { - mode = "MULTI_ZONE" - } - maintenance_policy { - weekly_maintenance_window { - day = "MONDAY" - start_time { - hours = 1 - minutes = 0 - seconds = 0 - nanos = 0 - } - } - } - depends_on = [ - google_network_connectivity_service_connection_policy.default - ] -} - -resource "google_network_connectivity_service_connection_policy" "default" { - name = "tf-test-my-policy%{random_suffix}" - location = "us-central1" - service_class = "gcp-memorystore-redis" - description = "my basic service connection policy" - network = google_compute_network.consumer_net.id - psc_config { - subnetworks = [google_compute_subnetwork.consumer_subnet.id] - } -} - -resource "google_compute_subnetwork" "consumer_subnet" { - name = "tf-test-my-subnet%{random_suffix}" - ip_cidr_range = "10.0.0.248/29" - region = "us-central1" - network = google_compute_network.consumer_net.id -} - -resource "google_compute_network" "consumer_net" { - name = "tf-test-my-network%{random_suffix}" - auto_create_subnetworks = false -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/redis/resource_redis_instance_test.go b/mmv1/third_party/terraform/services/redis/resource_redis_instance_test.go index b0ef69bf5be0..d5a0c6d0ae30 100644 --- a/mmv1/third_party/terraform/services/redis/resource_redis_instance_test.go +++ b/mmv1/third_party/terraform/services/redis/resource_redis_instance_test.go @@ -255,6 +255,40 @@ func TestAccRedisInstance_redisInstanceAuthEnabled(t *testing.T) { }) } +func TestAccRedisInstance_selfServiceUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckRedisInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccRedisInstance_selfServiceUpdate20240411_00_00(context), + }, + { + ResourceName: "google_redis_instance.cache", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"region"}, + }, + { + Config: testAccRedisInstance_selfServiceUpdate20240503_00_00(context), + }, + { + ResourceName: "google_redis_instance.cache", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"region"}, + }, + }, + }) +} + func TestAccRedisInstance_downgradeRedisVersion(t *testing.T) { t.Parallel() @@ -374,6 +408,26 @@ resource "google_redis_instance" "cache" { `, context) } +func testAccRedisInstance_selfServiceUpdate20240411_00_00(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_redis_instance" "cache" { + name = "tf-test-memory-cache%{random_suffix}" + memory_size_gb = 1 + maintenance_version = "20240411_00_00" +} +`, context) +} + +func testAccRedisInstance_selfServiceUpdate20240503_00_00(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_redis_instance" "cache" { + name = "tf-test-memory-cache%{random_suffix}" + memory_size_gb = 1 + maintenance_version = "20240503_00_00" +} +`, context) +} + func testAccRedisInstance_redis5(name string) string { return fmt.Sprintf(` resource "google_redis_instance" "test" { @@ -407,3 +461,45 @@ resource "google_redis_instance" "test" { } `, name) } + +func TestAccRedisInstance_tags(t *testing.T) { + + t.Parallel() + + tagKey := acctest.BootstrapSharedTestTagKey(t, "redis-instances-tagkey") + context := map[string]interface{}{ + "org": envvar.GetTestOrgFromEnv(t), + "tagKey": tagKey, + "tagValue": acctest.BootstrapSharedTestTagValue(t, "redis-instances-tagvalue", tagKey), + "random_suffix": acctest.RandString(t, 10), + } + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckRedisInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccRedisInstanceTags(context), + }, + { + ResourceName: "google_redis_instance.test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"tags"}, + }, + }, + }) +} + +func testAccRedisInstanceTags(context map[string]interface{}) string { + + return acctest.Nprintf(` + resource "google_redis_instance" "test" { + name = "tf-test-instance-%{random_suffix}" + memory_size_gb = 5 + tags = { + "%{org}/%{tagKey}" = "%{tagValue}" + } +} +`, context) +} diff --git a/mmv1/third_party/terraform/services/resourcemanager/data_source_google_folder.go b/mmv1/third_party/terraform/services/resourcemanager/data_source_google_folder.go index 7c9fd0c32ab5..f644f61c84c9 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/data_source_google_folder.go +++ b/mmv1/third_party/terraform/services/resourcemanager/data_source_google_folder.go @@ -54,15 +54,6 @@ func DataSourceGoogleFolder() *schema.Resource { Type: schema.TypeBool, Computed: true, }, - "configured_capabilities": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Schema{Type: schema.TypeString}, - }, - "management_project": { - Type: schema.TypeString, - Computed: true, - }, }, } } diff --git a/mmv1/third_party/terraform/services/resourcemanager/data_source_google_iam_policy.go b/mmv1/third_party/terraform/services/resourcemanager/data_source_google_iam_policy.go.tmpl similarity index 97% rename from mmv1/third_party/terraform/services/resourcemanager/data_source_google_iam_policy.go rename to mmv1/third_party/terraform/services/resourcemanager/data_source_google_iam_policy.go.tmpl index baf8cebe945b..ba67ce30dbf5 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/data_source_google_iam_policy.go +++ b/mmv1/third_party/terraform/services/resourcemanager/data_source_google_iam_policy.go.tmpl @@ -18,14 +18,14 @@ import ( // to express a Google Cloud IAM policy in a data resource. This is an example // of how the schema would be used in a config: // -// data "google_iam_policy" "admin" { -// binding { -// role = "roles/storage.objectViewer" -// members = [ -// "user:evanbrown@google.com", -// ] -// } -// } +// data "google_iam_policy" "admin" { +// binding { +// role = "roles/storage.objectViewer" +// members = [ +// "user:evanbrown@google.com", +// ] +// } +// } func DataSourceGoogleIamPolicy() *schema.Resource { return &schema.Resource{ Read: dataSourceGoogleIamPolicyRead, diff --git a/mmv1/third_party/terraform/services/resourcemanager/data_source_google_netblock_ip_ranges.go b/mmv1/third_party/terraform/services/resourcemanager/data_source_google_netblock_ip_ranges.go index d5a45808bc43..4a4f9ceebd3d 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/data_source_google_netblock_ip_ranges.go +++ b/mmv1/third_party/terraform/services/resourcemanager/data_source_google_netblock_ip_ranges.go @@ -124,63 +124,23 @@ func dataSourceGoogleNetblockIpRangesRead(d *schema.ResourceData, meta interface case "restricted-googleapis": // https://cloud.google.com/vpc/docs/private-access-options#domain-vips CidrBlocks["cidr_blocks_ipv4"] = append(CidrBlocks["cidr_blocks_ipv4"], "199.36.153.4/30") - CidrBlocks["cidr_blocks_ipv6"] = append(CidrBlocks["cidr_blocks_ipv6"], "2600:2d00:0002:1000::/64") - CidrBlocks["cidr_blocks"] = append(CidrBlocks["cidr_blocks_ipv4"], CidrBlocks["cidr_blocks_ipv6"]...) - - if err := d.Set("cidr_blocks", CidrBlocks["cidr_blocks"]); err != nil { - return fmt.Errorf("Error setting cidr_blocks: %s", err) - } - if err := d.Set("cidr_blocks_ipv4", CidrBlocks["cidr_blocks_ipv4"]); err != nil { - return fmt.Errorf("Error setting cidr_blocks_ipv4: %s", err) - } - if err := d.Set("cidr_blocks_ipv6", CidrBlocks["cidr_blocks_ipv6"]); err != nil { - return fmt.Errorf("Error setting cidr_blocks_ipv6: %s", err) - } - case "restricted-googleapis-with-directconnectivity": - // https://cloud.google.com/vpc/docs/configure-private-google-access#config-options - CidrBlocks["cidr_blocks_ipv4"] = append(CidrBlocks["cidr_blocks_ipv4"], "199.36.153.4/30", "34.126.0.0/18") - CidrBlocks["cidr_blocks_ipv6"] = append(CidrBlocks["cidr_blocks_ipv6"], "2600:2d00:0002:1000::/64", "2001:4860:8040::/42") - CidrBlocks["cidr_blocks"] = append(CidrBlocks["cidr_blocks_ipv4"], CidrBlocks["cidr_blocks_ipv6"]...) - + CidrBlocks["cidr_blocks"] = CidrBlocks["cidr_blocks_ipv4"] if err := d.Set("cidr_blocks", CidrBlocks["cidr_blocks"]); err != nil { return fmt.Errorf("Error setting cidr_blocks: %s", err) } if err := d.Set("cidr_blocks_ipv4", CidrBlocks["cidr_blocks_ipv4"]); err != nil { return fmt.Errorf("Error setting cidr_blocks_ipv4: %s", err) } - if err := d.Set("cidr_blocks_ipv6", CidrBlocks["cidr_blocks_ipv6"]); err != nil { - return fmt.Errorf("Error setting cidr_blocks_ipv6: %s", err) - } case "private-googleapis": // https://cloud.google.com/vpc/docs/private-access-options#domain-vips CidrBlocks["cidr_blocks_ipv4"] = append(CidrBlocks["cidr_blocks_ipv4"], "199.36.153.8/30") - CidrBlocks["cidr_blocks_ipv6"] = append(CidrBlocks["cidr_blocks_ipv6"], "2600:2d00:0002:2000::/64") - CidrBlocks["cidr_blocks"] = append(CidrBlocks["cidr_blocks_ipv4"], CidrBlocks["cidr_blocks_ipv6"]...) - - if err := d.Set("cidr_blocks", CidrBlocks["cidr_blocks"]); err != nil { - return fmt.Errorf("Error setting cidr_blocks: %s", err) - } - if err := d.Set("cidr_blocks_ipv4", CidrBlocks["cidr_blocks_ipv4"]); err != nil { - return fmt.Errorf("Error setting cidr_blocks_ipv4: %s", err) - } - if err := d.Set("cidr_blocks_ipv6", CidrBlocks["cidr_blocks_ipv6"]); err != nil { - return fmt.Errorf("Error setting cidr_blocks_ipv6: %s", err) - } - case "private-googleapis-with-directconnectivity": - // https://cloud.google.com/vpc/docs/private-access-options#domain-vips - CidrBlocks["cidr_blocks_ipv4"] = append(CidrBlocks["cidr_blocks_ipv4"], "199.36.153.8/30", "34.126.0.0/18") - CidrBlocks["cidr_blocks_ipv6"] = append(CidrBlocks["cidr_blocks_ipv6"], "2600:2d00:0002:2000::/64", "2001:4860:8040::/42") - CidrBlocks["cidr_blocks"] = append(CidrBlocks["cidr_blocks_ipv4"], CidrBlocks["cidr_blocks_ipv6"]...) - + CidrBlocks["cidr_blocks"] = CidrBlocks["cidr_blocks_ipv4"] if err := d.Set("cidr_blocks", CidrBlocks["cidr_blocks"]); err != nil { return fmt.Errorf("Error setting cidr_blocks: %s", err) } if err := d.Set("cidr_blocks_ipv4", CidrBlocks["cidr_blocks_ipv4"]); err != nil { return fmt.Errorf("Error setting cidr_blocks_ipv4: %s", err) } - if err := d.Set("cidr_blocks_ipv6", CidrBlocks["cidr_blocks_ipv6"]); err != nil { - return fmt.Errorf("Error setting cidr_blocks_ipv6: %s", err) - } case "dns-forwarders": // https://cloud.google.com/dns/zones/#creating-forwarding-zones CidrBlocks["cidr_blocks_ipv4"] = append(CidrBlocks["cidr_blocks_ipv4"], "35.199.192.0/19") diff --git a/mmv1/third_party/terraform/services/resourcemanager/data_source_google_netblock_ip_ranges_test.go b/mmv1/third_party/terraform/services/resourcemanager/data_source_google_netblock_ip_ranges_test.go index cb1ccea75fe6..1ac308f49fa9 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/data_source_google_netblock_ip_ranges_test.go +++ b/mmv1/third_party/terraform/services/resourcemanager/data_source_google_netblock_ip_ranges_test.go @@ -71,60 +71,26 @@ func TestAccDataSourceGoogleNetblockIpRanges_basic(t *testing.T) { Config: testAccNetblockIpRangesConfig_restricted, Check: resource.ComposeTestCheckFunc( // Private Google Access Restricted VIP - resource.TestCheckResourceAttr("data.google_netblock_ip_ranges.restricted", "cidr_blocks.#", "2"), + resource.TestCheckResourceAttr("data.google_netblock_ip_ranges.restricted", "cidr_blocks.#", "1"), resource.TestMatchResourceAttr("data.google_netblock_ip_ranges.restricted", "cidr_blocks.0", regexp.MustCompile("^(?:[0-9a-fA-F./:]{1,4}){1,2}.*/[0-9]{1,3}$")), resource.TestCheckResourceAttr("data.google_netblock_ip_ranges.restricted", "cidr_blocks_ipv4.#", "1"), resource.TestMatchResourceAttr("data.google_netblock_ip_ranges.restricted", "cidr_blocks_ipv4.0", regexp.MustCompile("^(?:[0-9]{1,3}.){3}[0-9]{1,3}/[0-9]{1,2}$")), - resource.TestCheckResourceAttr("data.google_netblock_ip_ranges.restricted", "cidr_blocks_ipv6.#", "1"), - resource.TestMatchResourceAttr("data.google_netblock_ip_ranges.restricted", - "cidr_blocks_ipv6.0", regexp.MustCompile("^(?:[0-9a-fA-F]{1,4}:){1,2}.*/[0-9]{1,3}$")), - ), - }, - { - Config: testAccNetblockIpRangesConfig_restricted_with_directconnectivity, - Check: resource.ComposeTestCheckFunc( - // Private Google Access Restricted VIP - resource.TestCheckResourceAttr("data.google_netblock_ip_ranges.restricted", "cidr_blocks.#", "4"), - resource.TestMatchResourceAttr("data.google_netblock_ip_ranges.restricted", - "cidr_blocks.0", regexp.MustCompile("^(?:[0-9a-fA-F./:]{1,4}){1,2}.*/[0-9]{1,3}$")), - resource.TestCheckResourceAttr("data.google_netblock_ip_ranges.restricted", "cidr_blocks_ipv4.#", "2"), - resource.TestMatchResourceAttr("data.google_netblock_ip_ranges.restricted", - "cidr_blocks_ipv4.1", regexp.MustCompile("^(?:[0-9]{1,3}.){3}[0-9]{1,3}/[0-9]{1,2}$")), - resource.TestCheckResourceAttr("data.google_netblock_ip_ranges.restricted", "cidr_blocks_ipv6.#", "2"), - resource.TestMatchResourceAttr("data.google_netblock_ip_ranges.restricted", - "cidr_blocks_ipv6.1", regexp.MustCompile("^(?:[0-9a-fA-F]{1,4}:){1,2}.*/[0-9]{1,3}$")), + resource.TestCheckResourceAttr("data.google_netblock_ip_ranges.restricted", "cidr_blocks_ipv6.#", "0"), ), }, { Config: testAccNetblockIpRangesConfig_private, Check: resource.ComposeTestCheckFunc( // Private Google Access Unrestricted VIP - resource.TestCheckResourceAttr("data.google_netblock_ip_ranges.private", "cidr_blocks.#", "2"), + resource.TestCheckResourceAttr("data.google_netblock_ip_ranges.private", "cidr_blocks.#", "1"), resource.TestMatchResourceAttr("data.google_netblock_ip_ranges.private", "cidr_blocks.0", regexp.MustCompile("^(?:[0-9a-fA-F./:]{1,4}){1,2}.*/[0-9]{1,3}$")), resource.TestCheckResourceAttr("data.google_netblock_ip_ranges.private", "cidr_blocks_ipv4.#", "1"), resource.TestMatchResourceAttr("data.google_netblock_ip_ranges.private", "cidr_blocks_ipv4.0", regexp.MustCompile("^(?:[0-9]{1,3}.){3}[0-9]{1,3}/[0-9]{1,2}$")), - resource.TestCheckResourceAttr("data.google_netblock_ip_ranges.private", "cidr_blocks_ipv6.#", "1"), - resource.TestMatchResourceAttr("data.google_netblock_ip_ranges.private", - "cidr_blocks_ipv6.0", regexp.MustCompile("^(?:[0-9a-fA-F]{1,4}:){1,2}.*/[0-9]{1,3}$")), - ), - }, - { - Config: testAccNetblockIpRangesConfig_private_with_directconnectivity, - Check: resource.ComposeTestCheckFunc( - // Private Google Access Unrestricted VIP - resource.TestCheckResourceAttr("data.google_netblock_ip_ranges.private", "cidr_blocks.#", "4"), - resource.TestMatchResourceAttr("data.google_netblock_ip_ranges.private", - "cidr_blocks.0", regexp.MustCompile("^(?:[0-9a-fA-F./:]{1,4}){1,2}.*/[0-9]{1,3}$")), - resource.TestCheckResourceAttr("data.google_netblock_ip_ranges.private", "cidr_blocks_ipv4.#", "2"), - resource.TestMatchResourceAttr("data.google_netblock_ip_ranges.private", - "cidr_blocks_ipv4.1", regexp.MustCompile("^(?:[0-9]{1,3}.){3}[0-9]{1,3}/[0-9]{1,2}$")), - resource.TestCheckResourceAttr("data.google_netblock_ip_ranges.private", "cidr_blocks_ipv6.#", "2"), - resource.TestMatchResourceAttr("data.google_netblock_ip_ranges.private", - "cidr_blocks_ipv6.1", regexp.MustCompile("^(?:[0-9a-fA-F]{1,4}:){1,2}.*/[0-9]{1,3}$")), + resource.TestCheckResourceAttr("data.google_netblock_ip_ranges.private", "cidr_blocks_ipv6.#", "0"), ), }, { @@ -205,24 +171,12 @@ data "google_netblock_ip_ranges" "restricted" { } ` -const testAccNetblockIpRangesConfig_restricted_with_directconnectivity = ` -data "google_netblock_ip_ranges" "restricted" { - range_type = "restricted-googleapis-with-directconnectivity" -} -` - const testAccNetblockIpRangesConfig_private = ` data "google_netblock_ip_ranges" "private" { range_type = "private-googleapis" } ` -const testAccNetblockIpRangesConfig_private_with_directconnectivity = ` -data "google_netblock_ip_ranges" "private" { - range_type = "private-googleapis-with-directconnectivity" -} -` - const testAccNetblockIpRangesConfig_dns = ` data "google_netblock_ip_ranges" "dns" { range_type = "dns-forwarders" diff --git a/mmv1/third_party/terraform/services/resourcemanager/data_source_google_projects.go b/mmv1/third_party/terraform/services/resourcemanager/data_source_google_projects.go index 5ba84edbf486..8dc84a628a65 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/data_source_google_projects.go +++ b/mmv1/third_party/terraform/services/resourcemanager/data_source_google_projects.go @@ -75,8 +75,7 @@ func datasourceGoogleProjectsRead(d *schema.ResourceData, meta interface{}) erro for { params["filter"] = d.Get("filter").(string) - domain := transport_tpg.GetUniverseDomainFromMeta(meta) - url := fmt.Sprintf("https://cloudresourcemanager.%s/v1/projects", domain) + url := "https://cloudresourcemanager.googleapis.com/v1/projects" url, err := transport_tpg.AddQueryParams(url, params) if err != nil { diff --git a/mmv1/third_party/terraform/services/resourcemanager/data_source_google_service_account_key.go b/mmv1/third_party/terraform/services/resourcemanager/data_source_google_service_account_key.go index e83244935d5e..95b47a88cc79 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/data_source_google_service_account_key.go +++ b/mmv1/third_party/terraform/services/resourcemanager/data_source_google_service_account_key.go @@ -29,6 +29,10 @@ func DataSourceGoogleServiceAccountKey() *schema.Resource { Optional: true, ValidateFunc: validation.StringInSlice([]string{"TYPE_NONE", "TYPE_X509_PEM_FILE", "TYPE_RAW_PUBLIC_KEY"}, false), }, + "project": { + Type: schema.TypeString, + Optional: true, + }, "key_algorithm": { Type: schema.TypeString, Computed: true, diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder.go index c1a472ca7a34..37b89229e998 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder.go @@ -78,17 +78,6 @@ func ResourceGoogleFolder() *schema.Resource { Elem: &schema.Schema{Type: schema.TypeString}, Description: `A map of resource manager tags. Resource manager tag keys and values have the same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/456. The field is ignored when empty. This field is only set at create time and modifying this field after creation will trigger recreation. To apply tags to an existing resource, see the google_tags_tag_value resource.`, }, - "configured_capabilities": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Schema{Type: schema.TypeString}, - Description: `A list of capabilities that are configured for this folder.`, - }, - "management_project": { - Type: schema.TypeString, - Computed: true, - Description: `The Management Project associated with the folder's configured capabilities.`, - }, }, UseJSONNumber: true, } @@ -190,12 +179,6 @@ func resourceGoogleFolderRead(d *schema.ResourceData, meta interface{}) error { if err := d.Set("create_time", folder.CreateTime); err != nil { return fmt.Errorf("Error setting create_time: %s", err) } - if err := d.Set("configured_capabilities", folder.ConfiguredCapabilities); err != nil { - return fmt.Errorf("Error setting configured_capabilities: %s", err) - } - if err := d.Set("management_project", folder.ManagementProject); err != nil { - return fmt.Errorf("Error setting management_project: %s", err) - } return nil } diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder_organization_policy.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder_organization_policy.go index 6e17b8d7603b..18ec055b54d1 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder_organization_policy.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder_organization_policy.go @@ -47,9 +47,9 @@ func resourceFolderOrgPolicyImporter(d *schema.ResourceData, meta interface{}) ( config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^folders/(?P[^/]+)/constraints/(?P[^/]+)$", - "^folders/(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)$"}, + "folders/(?P[^/]+)/constraints/(?P[^/]+)", + "folders/(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)"}, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder_test.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder_test.go index ce89b683ef14..d87e0d8034c1 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder_test.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder_test.go @@ -93,11 +93,11 @@ func TestAccFolder_moveParent(t *testing.T) { func TestAccFolder_tags(t *testing.T) { t.Parallel() - tagKey := acctest.BootstrapSharedTestOrganizationTagKey(t, "crm-folder-tagkey", nil) + tagKey := acctest.BootstrapSharedTestTagKey(t, "crm-folder-tagkey") context := map[string]interface{}{ "org": envvar.GetTestOrgFromEnv(t), "tagKey": tagKey, - "tagValue": acctest.BootstrapSharedTestOrganizationTagValue(t, "crm-folder-tagvalue", tagKey), + "tagValue": acctest.BootstrapSharedTestTagValue(t, "crm-folder-tagvalue", tagKey), "random_suffix": acctest.RandString(t, 10), } diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project.go index b87981203290..44bbc733b22b 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project.go @@ -774,7 +774,7 @@ func doEnableServicesRequest(services []string, project, billingProject, userAge // Handle errors that are retryable at call time for serviceusage // Specifically, errors in https://cloud.google.com/service-usage/docs/reference/rest/v1/services/batchEnable#response-body // Errors in operations are handled separately. -// TODO: This should probably be turned into a retry predicate +// NOTE(rileykarson): This should probably be turned into a retry predicate func handleServiceUsageRetryablePreconditionError(err error) error { if err == nil { return nil @@ -810,13 +810,16 @@ func ListCurrentlyEnabledServices(project, billingProject, userAgent string, con // services are returned as "projects/{{project}}/services/{{name}}" name := tpgresource.GetResourceNameFromSelfLink(v.Name) - apiServices[name] = struct{}{} + // if name not in ignoredProjectServicesSet + if _, ok := ignoredProjectServicesSet[name]; !ok { + apiServices[name] = struct{}{} - // if a service has been renamed, set both. We'll deal - // with setting the right values later. - if v, ok := renamedServicesByOldAndNewServiceNames[name]; ok { - log.Printf("[DEBUG] Adding service alias for %s to enabled services: %s", name, v) - apiServices[v] = struct{}{} + // if a service has been renamed, set both. We'll deal + // with setting the right values later. + if v, ok := renamedServicesByOldAndNewServiceNames[name]; ok { + log.Printf("[DEBUG] Adding service alias for %s to enabled services: %s", name, v) + apiServices[v] = struct{}{} + } } } return nil diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_binding_test.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_binding_test.go.tmpl similarity index 99% rename from mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_binding_test.go rename to mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_binding_test.go.tmpl index 31b935ca707a..85711b8f8989 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_binding_test.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_binding_test.go.tmpl @@ -2,9 +2,9 @@ package resourcemanager_test import ( "fmt" + "regexp" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" - "regexp" "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" @@ -266,7 +266,7 @@ func TestAccProjectIamBinding_invalidMembers(t *testing.T) { ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { - Config: testAccProjectAssociateBindingBasic(pid, org, role, "admin@hashicorptest.com"), + Config: testAccProjectAssociateBindingBasic(pid, org, role, "admin@hashicorptest.com"), ExpectError: regexp.MustCompile("invalid value \"admin@hashicorptest.com\" for members\\.0 \\(IAM members must have one of the values outlined here: https://cloud.google.com/billing/docs/reference/rest/v1/Policy#Binding\\)"), }, { diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_custom_role.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_custom_role.go index 9b4828db0ab8..ef27f2aacb9a 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_custom_role.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_custom_role.go @@ -238,9 +238,9 @@ func resourceGoogleProjectIamCustomRoleDelete(d *schema.ResourceData, meta inter func resourceGoogleProjectIamCustomRoleImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+)/roles/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)$", + "projects/(?P[^/]+)/roles/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_member_test.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_member_test.go.tmpl similarity index 98% rename from mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_member_test.go rename to mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_member_test.go.tmpl index 443f1f6febc6..248f9ab19bd3 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_member_test.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_member_test.go.tmpl @@ -2,9 +2,9 @@ package resourcemanager_test import ( "fmt" + "regexp" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" - "regexp" "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" @@ -182,7 +182,7 @@ func TestAccProjectIamMember_invalidMembers(t *testing.T) { ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { - Config: testAccProjectAssociateMemberBasic(pid, org, role, "admin@hashicorptest.com"), + Config: testAccProjectAssociateMemberBasic(pid, org, role, "admin@hashicorptest.com"), ExpectError: regexp.MustCompile("invalid value \"admin@hashicorptest.com\" for member \\(IAM members must have one of the values outlined here: https://cloud.google.com/billing/docs/reference/rest/v1/Policy#Binding\\)"), }, { diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_policy_test.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_policy_test.go.tmpl similarity index 97% rename from mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_policy_test.go rename to mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_policy_test.go.tmpl index fadec307ccd0..89593089fb69 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_policy_test.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_policy_test.go.tmpl @@ -55,7 +55,7 @@ func TestAccProjectIamPolicy_emptyMembers(t *testing.T) { org := envvar.GetTestOrgFromEnv(t) pid := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { @@ -72,7 +72,7 @@ func TestAccProjectIamPolicy_expanded(t *testing.T) { org := envvar.GetTestOrgFromEnv(t) pid := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { @@ -92,7 +92,7 @@ func TestAccProjectIamPolicy_basicAuditConfig(t *testing.T) { org := envvar.GetTestOrgFromEnv(t) pid := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ // Create a new project @@ -122,7 +122,7 @@ func TestAccProjectIamPolicy_expandedAuditConfig(t *testing.T) { org := envvar.GetTestOrgFromEnv(t) pid := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { @@ -141,7 +141,7 @@ func TestAccProjectIamPolicy_withCondition(t *testing.T) { org := envvar.GetTestOrgFromEnv(t) pid := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ // Create a new project @@ -176,7 +176,7 @@ func TestAccProjectIamPolicy_invalidMembers(t *testing.T) { ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { - Config: testAccProjectAssociatePolicyBasic(pid, org, "admin@hashicorptest.com"), + Config: testAccProjectAssociatePolicyBasic(pid, org, "admin@hashicorptest.com"), ExpectError: regexp.MustCompile("invalid value \"admin@hashicorptest.com\" for bindings\\.1\\.members\\.0 \\(IAM members must have one of the values outlined here: https://cloud.google.com/billing/docs/reference/rest/v1/Policy#Binding\\)"), }, { diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_organization_policy.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_organization_policy.go index 88ae902d0783..503117edf5b8 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_organization_policy.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_organization_policy.go @@ -47,9 +47,9 @@ func resourceProjectOrgPolicyImporter(d *schema.ResourceData, meta interface{}) config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+):constraints/(?P[^/]+)$", - "^(?P[^/]+):constraints/(?P[^/]+)$", - "^(?P[^/]+):(?P[^/]+)$"}, + "projects/(?P[^/]+):constraints/(?P[^/]+)", + "(?P[^/]+):constraints/(?P[^/]+)", + "(?P[^/]+):(?P[^/]+)"}, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service.go.tmpl b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service.go.tmpl index b496cd7f1643..ff1fd2a8fb7e 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service.go.tmpl +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service.go.tmpl @@ -21,6 +21,11 @@ import ( "google.golang.org/api/serviceusage/v1" ) +// These services can only be enabled as a side-effect of enabling other services, +// so don't bother storing them in the config or using them for diffing. +var ignoredProjectServices = []string{"dataproc-control.googleapis.com", "source.googleapis.com", "stackdriverprovisioning.googleapis.com"} +var ignoredProjectServicesSet = tpgresource.GolangSetFromStringSlice(ignoredProjectServices) + // Services that can't be user-specified but are otherwise valid. Renamed // services should be added to this set during major releases. var bannedProjectServices = []string{"bigquery-json.googleapis.com"} @@ -60,7 +65,7 @@ var renamedServicesByOldAndNewServiceNames = tpgresource.MergeStringMaps(Renamed const maxServiceUsageBatchSize = 20 func validateProjectServiceService(val interface{}, key string) (warns []string, errs []error) { - bannedServicesFunc := verify.StringNotInSlice(bannedProjectServices, false) + bannedServicesFunc := verify.StringNotInSlice(append(ignoredProjectServices, bannedProjectServices...), false) warns, errs = bannedServicesFunc(val, key) if len(errs) > 0 { return @@ -119,11 +124,13 @@ func ResourceGoogleProjectService() *schema.Resource { "disable_on_destroy": { Type: schema.TypeBool, Optional: true, + Default: true, }, {{- if ne $.TargetVersionName "ga" }} "check_if_service_has_usage_on_destroy": { Type: schema.TypeBool, Optional: true, + Default: false, }, {{- end }} }, diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service_internal_test.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service_internal_test.go index 72db52c3aa8c..3e6a67969342 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service_internal_test.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service_internal_test.go @@ -9,6 +9,10 @@ func TestProjectServiceServiceValidateFunc(t *testing.T) { val interface{} ExpectValidationError bool }{ + "ignoredProjectService": { + val: "dataproc-control.googleapis.com", + ExpectValidationError: true, + }, "bannedProjectService": { val: "bigquery-json.googleapis.com", ExpectValidationError: true, diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service_test.go.tmpl b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service_test.go.tmpl index 8fcb603673f0..c5fdaded7489 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service_test.go.tmpl +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service_test.go.tmpl @@ -282,16 +282,14 @@ resource "google_project" "acceptance" { } resource "google_project_service" "test" { - project = google_project.acceptance.project_id - service = "%s" - disable_on_destroy = true + project = google_project.acceptance.project_id + service = "%s" } resource "google_project_service" "test2" { project = google_project.acceptance.project_id service = "%s" disable_dependent_services = %s - disable_on_destroy = true } `, pid, pid, org, billing, services[0], services[1], disableDependentServices) } diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_test.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_test.go index 2c73b9988f42..4ce40fbc7dc0 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_test.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_test.go @@ -159,6 +159,8 @@ func TestAccProject_labels(t *testing.T) { Config: testAccProject_labels(pid, org, "label", "label-value"), Check: resource.ComposeTestCheckFunc( testAccCheckGoogleProjectHasLabels(t, "google_project.acceptance", pid, map[string]string{"label": "label-value"}), + acctest.GetTestMetadataForTgc("resourcemanager", "google_project.acceptance", + testAccProject_labels(pid, org, "label", "label-value")), ), }, }, @@ -194,6 +196,10 @@ func TestAccProject_parentFolder(t *testing.T) { Steps: []resource.TestStep{ { Config: testAccProject_parentFolder(pid, folderDisplayName, org), + Check: resource.ComposeTestCheckFunc( + acctest.GetTestMetadataForTgc("resourcemanager", "google_project.acceptance", + testAccProject_parentFolder(pid, folderDisplayName, org)), + ), }, }, }) @@ -245,12 +251,12 @@ func TestAccProject_tags(t *testing.T) { t.Parallel() pid := fmt.Sprintf("%s-%d", TestPrefix, acctest.RandInt(t)) - tagKey := acctest.BootstrapSharedTestOrganizationTagKey(t, "crm-projects-tagkey", nil) + tagKey := acctest.BootstrapSharedTestTagKey(t, "crm-projects-tagkey") context := map[string]interface{}{ "pid": pid, "org": envvar.GetTestOrgFromEnv(t), "tagKey": tagKey, - "tagValue": acctest.BootstrapSharedTestOrganizationTagValue(t, "crm-projects-tagvalue", tagKey), + "tagValue": acctest.BootstrapSharedTestTagValue(t, "crm-projects-tagvalue", tagKey), "random_suffix": acctest.RandString(t, 10), } acctest.VcrTest(t, resource.TestCase{ @@ -454,7 +460,8 @@ func TestAccProject_abandon(t *testing.T) { Config: testAccProject_abandon(pid, org), Destroy: true, Check: resource.ComposeTestCheckFunc( - testAccCheckGoogleProjectExists("google_project.acceptance", pid)), + testAccCheckGoogleProjectExists("google_project.acceptance", pid), + ), }, }, }) @@ -624,8 +631,8 @@ resource "google_project" "acceptance" { } func testAccProject_tagsAllowDestroy(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_project" "acceptance" { + return acctest.Nprintf( + `resource "google_project" "acceptance" { project_id = "%{pid}" name = "%{pid}" org_id = "%{org}" diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_service_account.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_service_account.go index 21e829a6374a..901b1d0ca975 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_service_account.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_service_account.go @@ -122,61 +122,54 @@ func resourceGoogleServiceAccountCreate(d *schema.ResourceData, meta interface{} ServiceAccount: sa, } - iamClient := config.NewIamClient(userAgent) - sa, err = iamClient.Projects.ServiceAccounts.Create("projects/"+project, r).Do() + sa, err = config.NewIamClient(userAgent).Projects.ServiceAccounts.Create("projects/"+project, r).Do() if err != nil { gerr, ok := err.(*googleapi.Error) alreadyExists := ok && gerr.Code == 409 && d.Get("create_ignore_already_exists").(bool) if alreadyExists { - fullServiceAccountName := fmt.Sprintf("projects/%s/serviceAccounts/%s@%s.iam.gserviceaccount.com", project, aid, project) - err = transport_tpg.Retry(transport_tpg.RetryOptions{ - RetryFunc: func() (operr error) { - sa, saerr := iamClient.Projects.ServiceAccounts.Get(fullServiceAccountName).Do() - - if saerr != nil { - return saerr - } - - d.SetId(sa.Name) - return populateResourceData(d, sa) - }, - Timeout: d.Timeout(schema.TimeoutCreate), - ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{ - transport_tpg.IsNotFoundRetryableError("service account creation"), - }, - }) - - return nil + sa = &iam.ServiceAccount{ + Name: fmt.Sprintf("projects/%s/serviceAccounts/%s@%s.iam.gserviceaccount.com", project, aid, project), + } } else { return fmt.Errorf("Error creating service account: %s", err) } } d.SetId(sa.Name) - populateResourceData(d, sa) + + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (operr error) { + _, saerr := config.NewIamClient(userAgent).Projects.ServiceAccounts.Get(d.Id()).Do() + return saerr + }, + Timeout: d.Timeout(schema.TimeoutCreate), + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{ + transport_tpg.IsNotFoundRetryableError("service account creation"), + transport_tpg.IsForbiddenIamServiceAccountRetryableError("service account creation"), + }, + }) + + if err != nil { + return fmt.Errorf("Error reading service account after creation: %s", err) + } // We poll until the resource is found due to eventual consistency issue - // on part of the api https://cloud.google.com/iam/docs/overview#consistency. - // Wait for at least 3 successful responses in a row to ensure result is consistent. + // on part of the api https://cloud.google.com/iam/docs/overview#consistency // IAM API returns 403 when the queried SA is not found, so we must ignore both 404 & 403 errors - transport_tpg.PollingWaitTime( - resourceServiceAccountPollRead(d, meta), - transport_tpg.PollCheckForExistence, - "Creating Service Account", - d.Timeout(schema.TimeoutCreate), - 3, // Number of consecutive occurences. - ) + err = transport_tpg.PollingWaitTime(resourceServiceAccountPollRead(d, meta), transport_tpg.PollCheckForExistenceWith403, "Creating Service Account", d.Timeout(schema.TimeoutCreate), 1) + + if err != nil { + return err + } // We can't guarantee complete consistency even after polling, // so sleep for some additional time to reduce the likelihood of // eventual consistency failures. time.Sleep(10 * time.Second) - return nil + return resourceGoogleServiceAccountRead(d, meta) } -// PollReadFunc for checking Service Account existence. -// If resourceData is not nil, it will be updated with the response. func resourceServiceAccountPollRead(d *schema.ResourceData, meta interface{}) transport_tpg.PollReadFunc { return func() (map[string]interface{}, error) { config := meta.(*transport_tpg.Config) @@ -208,10 +201,6 @@ func resourceGoogleServiceAccountRead(d *schema.ResourceData, meta interface{}) return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("Service Account %q", d.Id())) } - return populateResourceData(d, sa) -} - -func populateResourceData(d *schema.ResourceData, sa *iam.ServiceAccount) error { if err := d.Set("email", sa.Email); err != nil { return fmt.Errorf("Error setting email: %s", err) } @@ -321,9 +310,9 @@ func resourceGoogleServiceAccountUpdate(d *schema.ResourceData, meta interface{} func resourceGoogleServiceAccountImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+)/serviceAccounts/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)$"}, d, config); err != nil { + "projects/(?P[^/]+)/serviceAccounts/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)"}, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/resourcemanager3/resource_resource_manager_capability_test.go.tmpl b/mmv1/third_party/terraform/services/resourcemanager3/resource_resource_manager_capability_test.go.tmpl deleted file mode 100644 index 35dbd2c77259..000000000000 --- a/mmv1/third_party/terraform/services/resourcemanager3/resource_resource_manager_capability_test.go.tmpl +++ /dev/null @@ -1,76 +0,0 @@ -package resourcemanager3_test -{{- if ne $.TargetVersionName "ga" }} - -import ( - "testing" - "regexp" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" -) - -func TestAccResourceManagerCapability_resourceManagerCapabilityExample_basic(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "org_id": envvar.GetTestOrgFromEnv(t), - "random_suffix": acctest.RandString(t, 10), - } - folderTFResourceName := "google_folder.folder" - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), - ExternalProviders: map[string]resource.ExternalProvider{ - "time": {}, - }, - Steps: []resource.TestStep{ - { - Config: testAccResourceManagerCapability_resourceManagerCapabilityExample_basic(context), - }, - { - ResourceName: folderTFResourceName, - ImportState: true, - ImportStateVerify: false, - Check: resource.ComposeTestCheckFunc( - // Checks are now performed on the state *after* the import/refresh. - resource.TestCheckResourceAttr(folderTFResourceName, "configured_capabilities.#", "1"), - resource.TestMatchResourceAttr(folderTFResourceName, "management_project", regexp.MustCompile(".+")), - ), - }, - { - ResourceName: "google_resource_manager_capability.capability", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"capability_name", "parent"}, - }, - }, - }) -} - -func testAccResourceManagerCapability_resourceManagerCapabilityExample_basic(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_folder" "folder" { - provider = google-beta - display_name = "my-folder%{random_suffix}" - parent = "organizations/%{org_id}" - deletion_protection = false -} -resource "time_sleep" "wait_60s" { - depends_on = [google_folder.folder] - create_duration = "60s" -} -resource "google_resource_manager_capability" "capability" { - provider = google-beta - value = true - parent = "${google_folder.folder.name}" - capability_name = "app-management" - depends_on = [time_sleep.wait_60s] -} -`, context) -} -{{- else }} -// Capability is only in beta version. -{{- end }} diff --git a/mmv1/third_party/terraform/services/runtimeconfig/resource_runtimeconfig_variable.go.tmpl b/mmv1/third_party/terraform/services/runtimeconfig/resource_runtimeconfig_variable.go.tmpl index 52227b380ecb..78fb3b12229f 100644 --- a/mmv1/third_party/terraform/services/runtimeconfig/resource_runtimeconfig_variable.go.tmpl +++ b/mmv1/third_party/terraform/services/runtimeconfig/resource_runtimeconfig_variable.go.tmpl @@ -204,7 +204,7 @@ func newRuntimeconfigVariableFromResourceData(d *schema.ResourceData, project st text := d.Get("text") value := d.Get("value") - // TODO here we assume it's a simple name, not a full name. Should probably support full name as well + // TODO(selmanj) here we assume it's a simple name, not a full name. Should probably support full name as well parent = d.Get("parent").(string) name := d.Get("name").(string) diff --git a/mmv1/third_party/terraform/services/saasruntime/resource_saas_runtime_saas_test.go.tmpl b/mmv1/third_party/terraform/services/saasruntime/resource_saas_runtime_saas_test.go.tmpl deleted file mode 100644 index 1d9c30460336..000000000000 --- a/mmv1/third_party/terraform/services/saasruntime/resource_saas_runtime_saas_test.go.tmpl +++ /dev/null @@ -1,99 +0,0 @@ -package saasruntime_test - -{{ if ne $.TargetVersionName `ga` -}} - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" - - "github.com/hashicorp/terraform-provider-google/google/acctest" -) - -func TestAccSaasRuntimeSaas_update(t *testing.T) { - t.Parallel() - acctest.BootstrapIamMembers(t, []acctest.IamMember{ - { - Member: "serviceAccount:service-{project_number}@gcp-sa-saasservicemgmt.iam.gserviceaccount.com", - Role: "roles/saasservicemgmt.serviceAgent", - }, - }) - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccSaasRuntimeSaas_basic(context), - }, - { - ResourceName: "google_saas_runtime_saas.example", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "saas_id", "terraform_labels"}, - }, - { - Config: testAccSaasRuntimeSaas_update(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_saas_runtime_saas.example", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_saas_runtime_saas.example", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "saas_id", "terraform_labels"}, - }, - }, - }) -} - -func testAccSaasRuntimeSaas_basic(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_saas_runtime_saas" "example" { - provider = google-beta - saas_id = "tf-test-test-saas%{random_suffix}" - location = "global" - - locations { - name = "us-central1" - } - locations { - name = "europe-west1" - } -} -`, context) -} - -func testAccSaasRuntimeSaas_update(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_saas_runtime_saas" "example" { - provider = google-beta - saas_id = "tf-test-test-saas%{random_suffix}" - location = "global" - locations { - name = "us-central1" - } - locations { - name = "europe-west1" - } - locations { - name = "us-east1" - } - labels = { - "label-one": "foo" - } - annotations = { - "annotation-one": "bar" - } -} -`, context) -} -{{- end }} diff --git a/mmv1/third_party/terraform/services/secretmanager/data_source_secret_manager_secret_version.go b/mmv1/third_party/terraform/services/secretmanager/data_source_secret_manager_secret_version.go index 12b21bf5ac70..d656eae35b07 100644 --- a/mmv1/third_party/terraform/services/secretmanager/data_source_secret_manager_secret_version.go +++ b/mmv1/third_party/terraform/services/secretmanager/data_source_secret_manager_secret_version.go @@ -57,11 +57,6 @@ func DataSourceSecretManagerSecretVersion() *schema.Resource { Optional: true, Default: false, }, - "fetch_secret_data": { - Type: schema.TypeBool, - Optional: true, - Default: true, - }, }, } } @@ -142,32 +137,16 @@ func dataSourceSecretManagerSecretVersionRead(d *schema.ResourceData, meta inter return fmt.Errorf("error setting version: %s", err) } - if d.Get("fetch_secret_data").(bool) { - url = fmt.Sprintf("%s:access", url) - resp, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - Project: project, - RawURL: url, - UserAgent: userAgent, - }) - if err != nil { - return fmt.Errorf("error retrieving available secret manager secret version access: %s", err.Error()) - } - data := resp["payload"].(map[string]interface{}) - var secretData string - if d.Get("is_secret_data_base64").(bool) { - secretData = data["data"].(string) - } else { - payloadData, err := base64.StdEncoding.DecodeString(data["data"].(string)) - if err != nil { - return fmt.Errorf("error decoding secret manager secret version data: %s", err.Error()) - } - secretData = string(payloadData) - } - if err := d.Set("secret_data", secretData); err != nil { - return fmt.Errorf("error setting secret_data: %s", err) - } + url = fmt.Sprintf("%s:access", url) + resp, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: project, + RawURL: url, + UserAgent: userAgent, + }) + if err != nil { + return fmt.Errorf("error retrieving available secret manager secret version access: %s", err.Error()) } if err := d.Set("create_time", version["createTime"].(string)); err != nil { @@ -185,6 +164,21 @@ func dataSourceSecretManagerSecretVersionRead(d *schema.ResourceData, meta inter return fmt.Errorf("error setting enabled: %s", err) } + data := resp["payload"].(map[string]interface{}) + var secretData string + if d.Get("is_secret_data_base64").(bool) { + secretData = data["data"].(string) + } else { + payloadData, err := base64.StdEncoding.DecodeString(data["data"].(string)) + if err != nil { + return fmt.Errorf("error decoding secret manager secret version data: %s", err.Error()) + } + secretData = string(payloadData) + } + if err := d.Set("secret_data", secretData); err != nil { + return fmt.Errorf("error setting secret_data: %s", err) + } + d.SetId(nameValue.(string)) return nil } diff --git a/mmv1/third_party/terraform/services/secretmanager/data_source_secret_manager_secret_version_test.go b/mmv1/third_party/terraform/services/secretmanager/data_source_secret_manager_secret_version_test.go index 47427d1e4397..68890cf857d2 100644 --- a/mmv1/third_party/terraform/services/secretmanager/data_source_secret_manager_secret_version_test.go +++ b/mmv1/third_party/terraform/services/secretmanager/data_source_secret_manager_secret_version_test.go @@ -32,27 +32,6 @@ func TestAccDatasourceSecretManagerSecretVersion_basic(t *testing.T) { }) } -func TestAccDatasourceSecretManagerSecretVersion_fetchSecretDataFalse(t *testing.T) { - t.Parallel() - - randomString := acctest.RandString(t, 10) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckSecretManagerSecretVersionDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccDatasourceSecretManagerSecretVersion_fetchSecretDataFalse(randomString), - Check: resource.ComposeTestCheckFunc( - testAccCheckDatasourceSecretManagerSecretVersion("data.google_secret_manager_secret_version.basic", "1"), - resource.TestCheckNoResourceAttr("data.google_secret_manager_secret_version.basic", "secret_data"), - ), - }, - }, - }) -} - func TestAccDatasourceSecretManagerSecretVersion_latest(t *testing.T) { t.Parallel() @@ -210,31 +189,6 @@ data "google_secret_manager_secret_version" "basic" { `, randomString, randomString) } -func testAccDatasourceSecretManagerSecretVersion_fetchSecretDataFalse(randomString string) string { - return fmt.Sprintf(` -resource "google_secret_manager_secret" "secret-basic" { - secret_id = "tf-test-secret-version-%s" - labels = { - label = "my-label" - } - replication { - auto {} - } -} - -resource "google_secret_manager_secret_version" "secret-version-basic" { - secret = google_secret_manager_secret.secret-basic.name - secret_data = "my-tf-test-secret-%s" -} - -data "google_secret_manager_secret_version" "basic" { - secret = google_secret_manager_secret_version.secret-version-basic.secret - version = 1 - fetch_secret_data = false -} -`, randomString, randomString) -} - func testAccDatasourceSecretManagerSecretVersion_withBase64SecretData(randomString, data string) string { return fmt.Sprintf(` resource "google_secret_manager_secret" "secret-basic-base64" { diff --git a/mmv1/third_party/terraform/services/secretmanager/iam_secret_manager_secret_test.go b/mmv1/third_party/terraform/services/secretmanager/iam_secret_manager_secret_test.go.tmpl similarity index 98% rename from mmv1/third_party/terraform/services/secretmanager/iam_secret_manager_secret_test.go rename to mmv1/third_party/terraform/services/secretmanager/iam_secret_manager_secret_test.go.tmpl index 0eb78e092355..6e291254097d 100644 --- a/mmv1/third_party/terraform/services/secretmanager/iam_secret_manager_secret_test.go +++ b/mmv1/third_party/terraform/services/secretmanager/iam_secret_manager_secret_test.go.tmpl @@ -15,7 +15,7 @@ func TestAccSecretManagerSecretIam_iamMemberConditionUpdate(t *testing.T) { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "role": "roles/secretmanager.secretAccessor", + "role": "roles/secretmanager.secretAccessor", } acctest.VcrTest(t, resource.TestCase{ diff --git a/mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_test.go b/mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_test.go.tmpl similarity index 92% rename from mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_test.go rename to mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_test.go.tmpl index 2e5db070d291..510427e2dc4b 100644 --- a/mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_test.go +++ b/mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_test.go.tmpl @@ -464,42 +464,15 @@ func TestAccSecretManagerSecret_updateBetweenTtlAndExpireTime(t *testing.T) { }) } -func TestAccSecretManagerSecret_DeletionProtection(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccSecretManagerSecret_deletionprotectionTrue(context), - }, - { - ResourceName: "google_secret_manager_secret.secret-deletionprotection", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels", "deletion_protection"}, - }, - { - Config: testAccSecretManagerSecret_deletionprotectionFalse(context), - }, - }, - }) -} - func TestAccSecretManagerSecret_tags(t *testing.T) { t.Parallel() - tagKey := acctest.BootstrapSharedTestOrganizationTagKey(t, "secret_manager_secret-tagkey", map[string]interface{}{}) + tagKey := acctest.BootstrapSharedTestTagKey(t, "secret_manager_secret-tagkey") context := map[string]interface{}{ "org": envvar.GetTestOrgFromEnv(t), "tagKey": tagKey, - "tagValue": acctest.BootstrapSharedTestOrganizationTagValue(t, "secret_manager_secret-tagvalue", tagKey), + "tagValue": acctest.BootstrapSharedTestTagValue(t, "secret_manager_secret-tagvalue", tagKey), "random_suffix": acctest.RandString(t, 10), } @@ -509,16 +482,13 @@ func TestAccSecretManagerSecret_tags(t *testing.T) { CheckDestroy: testAccCheckSecretManagerSecretDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccSecretManagerSecretTags(context), + Config: testAccSecretManagerSecret_tags(context), }, { ResourceName: "google_secret_manager_secret.secret-tags", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels", "deletion_protection", "tags"}, - }, - { - Config: testAccSecretManagerSecretTagsDeletionProtection(context), + ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels", "tags"}, }, }, }) @@ -1281,92 +1251,15 @@ resource "google_secret_manager_secret" "secret-basic" { `, context) } -func testAccSecretManagerSecret_deletionprotectionTrue(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_secret_manager_secret" "secret-deletionprotection" { - secret_id = "tf-test-secret-%{random_suffix}" - - labels = { - label = "my-label" - } - - replication { - user_managed { - replicas { - location = "us-central1" - } - replicas { - location = "us-east1" - } - } - } - - ttl = "3600s" - - deletion_protection = true -} -`, context) -} - -func testAccSecretManagerSecret_deletionprotectionFalse(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_secret_manager_secret" "secret-deletionprotection" { - secret_id = "tf-test-secret-%{random_suffix}" - - labels = { - label = "my-label" - } - - replication { - user_managed { - replicas { - location = "us-central1" - } - replicas { - location = "us-east1" - } - } - } - - ttl = "3600s" - - deletion_protection = false -} -`, context) -} - -func testAccSecretManagerSecretTags(context map[string]interface{}) string { +func testAccSecretManagerSecret_tags(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_secret_manager_secret" "secret-tags" { secret_id = "tf-test-secret-%{random_suffix}" - labels = { - label = "my-label" - } - replication { - user_managed { - replicas { - location = "us-central1" - } - replicas { - location = "us-east1" - } - } - } - ttl = "3600s" - tags = { - "%{org}/%{tagKey}" = "%{tagValue}" - } -} -`, context) -} -func testAccSecretManagerSecretTagsDeletionProtection(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_secret_manager_secret" "secret-tags" { - secret_id = "tf-test-secret-%{random_suffix}" labels = { label = "my-label" } + replication { user_managed { replicas { @@ -1377,11 +1270,11 @@ resource "google_secret_manager_secret" "secret-tags" { } } } + ttl = "3600s" tags = { "%{org}/%{tagKey}" = "%{tagValue}" } - deletion_protection = false } `, context) } diff --git a/mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_version_test.go b/mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_version_test.go.tmpl similarity index 94% rename from mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_version_test.go rename to mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_version_test.go.tmpl index 761ccde659bb..a955b4f6d748 100644 --- a/mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_version_test.go +++ b/mmv1/third_party/terraform/services/secretmanager/resource_secret_manager_secret_version_test.go.tmpl @@ -3,8 +3,8 @@ package secretmanager_test import ( "testing" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccSecretManagerSecretVersion_update(t *testing.T) { @@ -23,18 +23,18 @@ func TestAccSecretManagerSecretVersion_update(t *testing.T) { Config: testAccSecretManagerSecretVersion_basic(context), }, { - ResourceName: "google_secret_manager_secret_version.secret-version-basic", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_secret_version.secret-version-basic", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"secret_data", "secret_data_wo_version"}, }, { Config: testAccSecretManagerSecretVersion_disable(context), }, { - ResourceName: "google_secret_manager_secret_version.secret-version-basic", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_secret_version.secret-version-basic", + ImportState: true, + ImportStateVerify: true, // at this point the secret data is disabled and so reading the data on import will // give an empty string ImportStateVerifyIgnore: []string{"secret_data", "secret_data_wo_version"}, @@ -43,9 +43,9 @@ func TestAccSecretManagerSecretVersion_update(t *testing.T) { Config: testAccSecretManagerSecretVersion_basic(context), }, { - ResourceName: "google_secret_manager_secret_version.secret-version-basic", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_secret_version.secret-version-basic", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"secret_data", "secret_data_wo_version"}, }, }, diff --git a/mmv1/third_party/terraform/services/secretmanagerregional/iam_secret_manager_regional_secret_test.go b/mmv1/third_party/terraform/services/secretmanagerregional/iam_secret_manager_regional_secret_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/secretmanagerregional/iam_secret_manager_regional_secret_test.go rename to mmv1/third_party/terraform/services/secretmanagerregional/iam_secret_manager_regional_secret_test.go.tmpl diff --git a/mmv1/third_party/terraform/services/secretmanagerregional/resource_secret_manager_regional_secret_test.go b/mmv1/third_party/terraform/services/secretmanagerregional/resource_secret_manager_regional_secret_test.go.tmpl similarity index 87% rename from mmv1/third_party/terraform/services/secretmanagerregional/resource_secret_manager_regional_secret_test.go rename to mmv1/third_party/terraform/services/secretmanagerregional/resource_secret_manager_regional_secret_test.go.tmpl index bba70b014d66..0324798304b8 100644 --- a/mmv1/third_party/terraform/services/secretmanagerregional/resource_secret_manager_regional_secret_test.go +++ b/mmv1/third_party/terraform/services/secretmanagerregional/resource_secret_manager_regional_secret_test.go.tmpl @@ -1,11 +1,9 @@ package secretmanagerregional_test import ( - "regexp" "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) @@ -26,9 +24,9 @@ func TestAccSecretManagerRegionalRegionalSecret_import(t *testing.T) { Config: testAccSecretManagerRegionalSecret_basic(context), }, { - ResourceName: "google_secret_manager_regional_secret.regional-secret-basic", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_regional_secret.regional-secret-basic", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "secret_id", "terraform_labels"}, }, }, @@ -51,36 +49,36 @@ func TestAccSecretManagerRegionalRegionalSecret_labelsUpdate(t *testing.T) { Config: testAccSecretManagerRegionalSecret_withoutLabels(context), }, { - ResourceName: "google_secret_manager_regional_secret.regional-secret-with-labels", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_regional_secret.regional-secret-with-labels", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "secret_id", "terraform_labels"}, }, { Config: testAccSecretManagerRegionalSecret_labelsUpdate(context), }, { - ResourceName: "google_secret_manager_regional_secret.regional-secret-with-labels", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_regional_secret.regional-secret-with-labels", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "secret_id", "terraform_labels"}, }, { Config: testAccSecretManagerRegionalSecret_labelsUpdateOther(context), }, { - ResourceName: "google_secret_manager_regional_secret.regional-secret-with-labels", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_regional_secret.regional-secret-with-labels", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "secret_id", "terraform_labels"}, }, { Config: testAccSecretManagerRegionalSecret_withoutLabels(context), }, { - ResourceName: "google_secret_manager_regional_secret.regional-secret-with-labels", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_regional_secret.regional-secret-with-labels", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "secret_id", "terraform_labels"}, }, }, @@ -103,36 +101,36 @@ func TestAccSecretManagerRegionalRegionalSecret_annotationsUpdate(t *testing.T) Config: testAccSecretManagerRegionalSecret_withoutAnnotations(context), }, { - ResourceName: "google_secret_manager_regional_secret.regional-secret-with-annotations", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_regional_secret.regional-secret-with-annotations", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "secret_id", "terraform_labels"}, }, { Config: testAccSecretManagerRegionalSecret_annotationsUpdate(context), }, { - ResourceName: "google_secret_manager_regional_secret.regional-secret-with-annotations", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_regional_secret.regional-secret-with-annotations", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "secret_id", "terraform_labels"}, }, { Config: testAccSecretManagerRegionalSecret_annotationsUpdateOther(context), }, { - ResourceName: "google_secret_manager_regional_secret.regional-secret-with-annotations", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_regional_secret.regional-secret-with-annotations", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "secret_id", "terraform_labels"}, }, { Config: testAccSecretManagerRegionalSecret_withoutAnnotations(context), }, { - ResourceName: "google_secret_manager_regional_secret.regional-secret-with-annotations", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_secret_manager_regional_secret.regional-secret-with-annotations", + ImportState: true, + ImportStateVerify: true, ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "secret_id", "terraform_labels"}, }, }, @@ -143,9 +141,9 @@ func TestAccSecretManagerRegionalRegionalSecret_cmekUpdate(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "kms_key_name": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-secret-manager-managed-central-key3").CryptoKey.Name, - "kms_key_name_other": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-secret-manager-managed-central-key4").CryptoKey.Name, - "random_suffix": acctest.RandString(t, 10), + "kms_key_name": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-secret-manager-managed-central-key3").CryptoKey.Name, + "kms_key_name_other": acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-secret-manager-managed-central-key4").CryptoKey.Name, + "random_suffix": acctest.RandString(t, 10), } acctest.VcrTest(t, resource.TestCase{ @@ -553,70 +551,6 @@ func TestAccSecretManagerRegionalRegionalSecret_versionAliasesUpdate(t *testing. }) } -func TestAccSecretManagerRegionalRegionalSecret_deletionprotection(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckSecretManagerRegionalRegionalSecretDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccSecretManagerRegionalSecretDeletionProtectionL1(context), - }, - { - ResourceName: "google_secret_manager_regional_secret.regional-secret-deletion-protection", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "secret_id", "terraform_labels", "deletion_protection"}, - }, - { - Config: testAccSecretManagerRegionalSecretDeletionProtectionL2(context), - ExpectError: regexp.MustCompile("deletion_protection"), - }, - { - Config: testAccSecretManagerRegionalSecretDeletionProtectionFalse(context), - }, - }, - }) -} - -func TestAccSecretManagerRegionalRegionalSecret_tags(t *testing.T) { - t.Parallel() - - tagKey := acctest.BootstrapSharedTestOrganizationTagKey(t, "secretmanager_regional_regionalsecret-tagkey", map[string]interface{}{}) - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - "org": envvar.GetTestOrgFromEnv(t), - "tagKey": tagKey, - "tagValue": acctest.BootstrapSharedTestOrganizationTagValue(t, "secretmanager_regional_regionalsecret-tagvalue", tagKey), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckSecretManagerRegionalRegionalSecretDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccSecretManagerRegionalSecretTags(context), - }, - { - ResourceName: "google_secret_manager_regional_secret.regional-secret-basic", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "secret_id", "terraform_labels", "deletion_protection", "tags"}, - }, - { - Config: testAccSecretManagerRegionalSecretTagsDeletionProtection(context), - }, - }, - }) -} - func testAccSecretManagerRegionalSecret_basic(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_secret_manager_regional_secret" "regional-secret-basic" { @@ -1373,58 +1307,3 @@ resource "google_secret_manager_regional_secret_version" "reg-secret-version-4" } `, context) } - -func testAccSecretManagerRegionalSecretDeletionProtectionL1(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_secret_manager_regional_secret" "regional-secret-deletion-protection" { - secret_id = "tf-test-reg-secret%{random_suffix}" - location = "us-central1" - deletion_protection = true -} -`, context) -} - -func testAccSecretManagerRegionalSecretDeletionProtectionL2(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_secret_manager_regional_secret" "regional-secret-deletion-protection" { - secret_id = "tf-test-reg-secret%{random_suffix}" - location = "us-west2" - deletion_protection = true -} -`, context) -} - -func testAccSecretManagerRegionalSecretDeletionProtectionFalse(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_secret_manager_regional_secret" "regional-secret-deletion-protection" { - secret_id = "tf-test-reg-secret%{random_suffix}" - location = "us-central1" - deletion_protection = false -} -`, context) -} - -func testAccSecretManagerRegionalSecretTags(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_secret_manager_regional_secret" "regional-secret-basic" { - secret_id = "tf-test-reg-secret-%{random_suffix}" - location = "us-central1" - tags = { - "%{org}/%{tagKey}" = "%{tagValue}" - } -} -`, context) -} - -func testAccSecretManagerRegionalSecretTagsDeletionProtection(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_secret_manager_regional_secret" "regional-secret-basic" { - secret_id = "tf-test-reg-secret-%{random_suffix}" - location = "us-central1" - tags = { - "%{org}/%{tagKey}" = "%{tagValue}" - } - deletion_protection = false -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/secretmanagerregional/resource_secret_manager_regional_secret_version_test.go b/mmv1/third_party/terraform/services/secretmanagerregional/resource_secret_manager_regional_secret_version_test.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/secretmanagerregional/resource_secret_manager_regional_secret_version_test.go rename to mmv1/third_party/terraform/services/secretmanagerregional/resource_secret_manager_regional_secret_version_test.go.tmpl diff --git a/mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_branch_rule_update_test.go b/mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_branch_rule_update_test.go index ed1397c981a5..3e219e5659b7 100644 --- a/mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_branch_rule_update_test.go +++ b/mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_branch_rule_update_test.go @@ -11,7 +11,7 @@ func TestAccSecureSourceManagerBranchRule_secureSourceManagerBranchRuleWithField t.Parallel() context := map[string]interface{}{ - "deletion_policy": "DELETE", + "prevent_destroy": false, "random_suffix": acctest.RandString(t, 10), } @@ -46,18 +46,20 @@ func testAccSecureSourceManagerBranchRule_secureSourceManagerBranchRuleWithField resource "google_secure_source_manager_instance" "instance" { location = "us-central1" instance_id = "tf-test-my-initial-instance%{random_suffix}" - # Prevent accidental deletions. - deletion_policy = "%{deletion_policy}" + lifecycle { + prevent_destroy = "%{prevent_destroy}" + } } resource "google_secure_source_manager_repository" "repository" { repository_id = "tf-test-my-initial-repository%{random_suffix}" instance = google_secure_source_manager_instance.instance.name location = google_secure_source_manager_instance.instance.location - # Prevent accidental deletions. - deletion_policy = "%{deletion_policy}" + lifecycle { + prevent_destroy = "%{prevent_destroy}" + } } resource "google_secure_source_manager_branch_rule" "default" { @@ -81,18 +83,20 @@ func testAccSecureSourceManagerBranchRule_secureSourceManagerBranchRuleWithField resource "google_secure_source_manager_instance" "instance" { location = "us-central1" instance_id = "tf-test-my-initial-instance%{random_suffix}" - - # Prevent accidental deletions. - deletion_policy = "%{deletion_policy}" + # Prevent accidental deletions. + lifecycle { + prevent_destroy = "%{prevent_destroy}" + } } resource "google_secure_source_manager_repository" "repository" { repository_id = "tf-test-my-initial-repository%{random_suffix}" instance = google_secure_source_manager_instance.instance.name location = google_secure_source_manager_instance.instance.location - # Prevent accidental deletions. - deletion_policy = "%{deletion_policy}" + lifecycle { + prevent_destroy = "%{prevent_destroy}" + } } resource "google_secure_source_manager_branch_rule" "default" { diff --git a/mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_repository_update_test.go b/mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_repository_update_test.go deleted file mode 100644 index a035b2094130..000000000000 --- a/mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_repository_update_test.go +++ /dev/null @@ -1,92 +0,0 @@ -package securesourcemanager_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" - "github.com/hashicorp/terraform-provider-google/google/acctest" -) - -func TestAccSecureSourceManagerRepository_secureSourceManagerRepositoryBasicExample_update(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "deletion_policy": "DELETE", - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccSecureSourceManagerRepository_secureSourceManagerRepositoryBasicExample_basic(context), - }, - { - ResourceName: "google_secure_source_manager_repository.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_config", "location", "repository_id", "deletion_policy"}, - }, - { - Config: testAccSecureSourceManagerRepository_secureSourceManagerRepositoryBasicExample_update(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_secure_source_manager_repository.default", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_secure_source_manager_repository.default", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_config", "location", "repository_id", "deletion_policy"}, - }, - }, - }) -} - -func testAccSecureSourceManagerRepository_secureSourceManagerRepositoryBasicExample_basic(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_secure_source_manager_instance" "instance" { - location = "us-central1" - instance_id = "tf-test-my-instance%{random_suffix}" - - # Prevent accidental deletions. - deletion_policy = "%{deletion_policy}" -} - -resource "google_secure_source_manager_repository" "default" { - location = "us-central1" - repository_id = "tf-test-my-repository%{random_suffix}" - instance = google_secure_source_manager_instance.instance.name - - # Prevent accidental deletions. - deletion_policy = "%{deletion_policy}" -} -`, context) -} - -func testAccSecureSourceManagerRepository_secureSourceManagerRepositoryBasicExample_update(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_secure_source_manager_instance" "instance" { - location = "us-central1" - instance_id = "tf-test-my-instance%{random_suffix}" - - # Prevent accidental deletions. - deletion_policy = "%{deletion_policy}" -} - -resource "google_secure_source_manager_repository" "default" { - location = "us-central1" - repository_id = "tf-test-my-repository%{random_suffix}" - instance = google_secure_source_manager_instance.instance.name - - description = "new description" - - # Prevent accidental deletions. - deletion_policy = "%{deletion_policy}" -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/servicenetworking/resource_google_service_networking_peered_dns_domain.go b/mmv1/third_party/terraform/services/servicenetworking/resource_google_service_networking_peered_dns_domain.go index 534c4be65432..9c93f55eb655 100644 --- a/mmv1/third_party/terraform/services/servicenetworking/resource_google_service_networking_peered_dns_domain.go +++ b/mmv1/third_party/terraform/services/servicenetworking/resource_google_service_networking_peered_dns_domain.go @@ -240,7 +240,7 @@ func resourceGoogleServiceNetworkingPeeredDNSDomainDelete(d *schema.ResourceData return nil } -// TODO: An out of band aspect of this API is that it uses a unique formatting of network +// NOTE(deviavir): An out of band aspect of this API is that it uses a unique formatting of network // different from the standard self_link URI. It requires a call to the resource manager to get the project // number for the current project. func getProjectNumber(d *schema.ResourceData, config *transport_tpg.Config, project, userAgent string) (string, error) { diff --git a/mmv1/third_party/terraform/services/servicenetworking/resource_service_networking_connection.go b/mmv1/third_party/terraform/services/servicenetworking/resource_service_networking_connection.go index dfac78f3392a..d3e5b76c0b7d 100644 --- a/mmv1/third_party/terraform/services/servicenetworking/resource_service_networking_connection.go +++ b/mmv1/third_party/terraform/services/servicenetworking/resource_service_networking_connection.go @@ -41,7 +41,7 @@ func ResourceServiceNetworkingConnection() *schema.Resource { DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, Description: `Name of VPC network connected with service producers using VPC peering.`, }, - // TODO: This field is weird, it's required to make the Insert/List calls as a parameter + // NOTE(craigatgoogle): This field is weird, it's required to make the Insert/List calls as a parameter // named "parent", however it's also defined in the response as an output field called "peering", which // uses "-" as a delimiter instead of ".". To alleviate user confusion I've opted to model the gcloud // CLI's approach, calling the field "service" and accepting the same format as the CLI with the "." @@ -341,7 +341,7 @@ func resourceServiceNetworkingConnectionImportState(d *schema.ResourceData, meta return []*schema.ResourceData{d}, nil } -// TODO: The Connection resource in this API doesn't have an Id field, so inorder +// NOTE(craigatgoogle): The Connection resource in this API doesn't have an Id field, so inorder // to support the Read method, we create an Id using the tuple(Network, Service). type connectionId struct { Network string @@ -379,7 +379,7 @@ func parseConnectionId(id string) (*connectionId, error) { }, nil } -// TODO: An out of band aspect of this API is that it uses a unique formatting of network +// NOTE(craigatgoogle): An out of band aspect of this API is that it uses a unique formatting of network // different from the standard self_link URI. It requires a call to the resource manager to get the project // number for the current project. func RetrieveServiceNetworkingNetworkName(d *schema.ResourceData, config *transport_tpg.Config, network, userAgent string) (string, error) { @@ -422,7 +422,7 @@ func RetrieveServiceNetworkingNetworkName(d *schema.ResourceData, config *transp const parentServicePattern = "^services/.+$" -// TODO: An out of band aspect of this API is that it requires the service name to be +// NOTE(craigatgoogle): An out of band aspect of this API is that it requires the service name to be // formatted as "services/" func formatParentService(service string) string { r := regexp.MustCompile(parentServicePattern) diff --git a/mmv1/third_party/terraform/services/siteverification/resource_site_verification_web_resource_test.go b/mmv1/third_party/terraform/services/siteverification/resource_site_verification_web_resource_test.go index 494ba4868d44..bd11ab324023 100644 --- a/mmv1/third_party/terraform/services/siteverification/resource_site_verification_web_resource_test.go +++ b/mmv1/third_party/terraform/services/siteverification/resource_site_verification_web_resource_test.go @@ -15,7 +15,7 @@ import ( func TestAccSiteVerificationWebResource_siteVerificationDomain(t *testing.T) { // This test requires manual project configuration. - t.Skip() + acctest.SkipIfVcr(t) // This test needs to be able to create DNS records that are publicly // resolvable. To run, you'll need a registered domain with a GCP managed zone diff --git a/mmv1/third_party/terraform/services/spanner/resource_spanner_database_test.go b/mmv1/third_party/terraform/services/spanner/resource_spanner_database_test.go.tmpl similarity index 86% rename from mmv1/third_party/terraform/services/spanner/resource_spanner_database_test.go rename to mmv1/third_party/terraform/services/spanner/resource_spanner_database_test.go.tmpl index e3ee32694a0c..5d438850a80a 100644 --- a/mmv1/third_party/terraform/services/spanner/resource_spanner_database_test.go +++ b/mmv1/third_party/terraform/services/spanner/resource_spanner_database_test.go.tmpl @@ -388,108 +388,6 @@ resource "google_spanner_database" "basic" { `, instanceName, instanceName, databaseName, databaseName, databaseName) } -func TestAccSpannerDatabase_defaultTimeZone(t *testing.T) { - t.Parallel() - - rnd := acctest.RandString(t, 10) - instanceName := fmt.Sprintf("tf-test-%s", rnd) - databaseName := fmt.Sprintf("tfgen_%s", rnd) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckSpannerDatabaseDestroyProducer(t), - Steps: []resource.TestStep{ - { - // Test creating a database with `default_time_zone` set - Config: testAccSpannerDatabase_defaultTimeZone(instanceName, databaseName), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet("google_spanner_database.basic", "state"), - resource.TestCheckResourceAttr("google_spanner_database.basic", "default_time_zone", "UTC"), - ), - }, - { - // Test removing `default_time_zone` and setting default time zone to a new value with a DDL statement in `ddl` - Config: testAccSpannerDatabase_defaultTimeZoneUpdate1(instanceName, databaseName), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet("google_spanner_database.basic", "state"), - resource.TestCheckResourceAttr("google_spanner_database.basic", "default_time_zone", "UTC"), - ), - }, - { - // Test that adding `default_time_zone`, regardless of any previous statements in `ddl` - Config: testAccSpannerDatabase_defaultTimeZoneUpdate2(instanceName, databaseName), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet("google_spanner_database.basic", "state"), - resource.TestCheckResourceAttr("google_spanner_database.basic", "default_time_zone", "Australia/Sydney"), - ), - }, - }, - }) -} - -func testAccSpannerDatabase_defaultTimeZone(instanceName, databaseName string) string { - return fmt.Sprintf(` -resource "google_spanner_instance" "basic" { - name = "%s" - config = "regional-us-central1" - display_name = "%s-display" - num_nodes = 1 -} - -resource "google_spanner_database" "basic" { - instance = google_spanner_instance.basic.name - name = "%s" - default_time_zone = "UTC" - ddl = [ - "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", - ] - deletion_protection = false -} -`, instanceName, instanceName, databaseName) -} - -func testAccSpannerDatabase_defaultTimeZoneUpdate1(instanceName, databaseName string) string { - return fmt.Sprintf(` -resource "google_spanner_instance" "basic" { - name = "%s" - config = "regional-us-central1" - display_name = "%s-display" - num_nodes = 1 -} - -resource "google_spanner_database" "basic" { - instance = google_spanner_instance.basic.name - name = "%s" - default_time_zone = "UTC" - // Change : remove the table. - ddl = [] - deletion_protection = false -} -`, instanceName, instanceName, databaseName) -} - -func testAccSpannerDatabase_defaultTimeZoneUpdate2(instanceName, databaseName string) string { - return fmt.Sprintf(` -resource "google_spanner_instance" "basic" { - name = "%s" - config = "regional-us-central1" - display_name = "%s-display" - num_nodes = 1 -} - -resource "google_spanner_database" "basic" { - instance = google_spanner_instance.basic.name - name = "%s" - default_time_zone = "Australia/Sydney" // Change : updated default_time_zone argument - ddl = [ - "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", - ] - deletion_protection = false -} -`, instanceName, instanceName, databaseName) -} - func TestAccSpannerDatabase_enableDropProtection(t *testing.T) { t.Parallel() @@ -625,7 +523,7 @@ func TestAccSpannerDatabase_cmek(t *testing.T) { acctest.BootstrapIamMembers(t, []acctest.IamMember{ { Member: "serviceAccount:service-{project_number}@gcp-sa-spanner.iam.gserviceaccount.com", - Role: "roles/cloudkms.cryptoKeyEncrypterDecrypter", + Role: "roles/cloudkms.cryptoKeyEncrypterDecrypter", }, }) diff --git a/mmv1/third_party/terraform/services/spanner/resource_spanner_instance_test.go b/mmv1/third_party/terraform/services/spanner/resource_spanner_instance_test.go index dac13ad9f4e6..854b5ca764fb 100644 --- a/mmv1/third_party/terraform/services/spanner/resource_spanner_instance_test.go +++ b/mmv1/third_party/terraform/services/spanner/resource_spanner_instance_test.go @@ -74,8 +74,6 @@ func TestAccSpannerInstance_basicUpdateWithProviderDefaultLabels(t *testing.T) { } func TestAccSpannerInstance_noNodeCountSpecified(t *testing.T) { - // Cannot be run in VCR because no API calls are made - acctest.SkipIfVcr(t) t.Parallel() idName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) @@ -86,7 +84,7 @@ func TestAccSpannerInstance_noNodeCountSpecified(t *testing.T) { Steps: []resource.TestStep{ { Config: testAccSpannerInstance_noNodeCountSpecified(idName), - ExpectError: regexp.MustCompile(".*one of\n`autoscaling_config,instance_type,num_nodes,processing_units` must be\nspecified.*"), + ExpectError: regexp.MustCompile(".*one of `autoscaling_config,num_nodes,processing_units`\nmust be specified.*"), }, }, }) @@ -499,41 +497,6 @@ func TestAccSpannerInstance_spannerInstanceWithAutoscaling(t *testing.T) { }) } -func TestAccSpannerInstance_freeInstanceBasicUpdate(t *testing.T) { - displayName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckSpannerInstanceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccSpannerInstance_freeInstanceBasic(displayName), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet("google_spanner_instance.main", "state"), - ), - }, - { - ResourceName: "google_spanner_instance.main", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, - }, - { - Config: testAccSpannerInstance_freeInstanceBasicUpdate(displayName), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet("google_spanner_instance.main", "state"), - ), - }, - { - ResourceName: "google_spanner_instance.main", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, - }, - }, - }) -} - func testAccSpannerInstance_basic(name string) string { return fmt.Sprintf(` resource "google_spanner_instance" "basic" { @@ -830,27 +793,3 @@ resource "google_spanner_instance" "example" { } `, context) } - -func testAccSpannerInstance_freeInstanceBasic(name string) string { - return fmt.Sprintf(` -resource "google_spanner_instance" "main" { - name = "%s" - config = "regional-europe-west1" - display_name = "%s" - instance_type = "FREE_INSTANCE" -} -`, name, name) -} - -func testAccSpannerInstance_freeInstanceBasicUpdate(name string) string { - return fmt.Sprintf(` -resource "google_spanner_instance" "main" { - name = "%s" - config = "nam-eur-asia3" - display_name = "%s" - edition = "ENTERPRISE_PLUS" - instance_type = "PROVISIONED" - num_nodes = 1 -} -`, name, name) -} diff --git a/mmv1/third_party/terraform/services/spanner/resource_spanner_schedule_backup_test.go b/mmv1/third_party/terraform/services/spanner/resource_spanner_schedule_backup_test.go index 92f2c0b77e58..96310a48fc57 100644 --- a/mmv1/third_party/terraform/services/spanner/resource_spanner_schedule_backup_test.go +++ b/mmv1/third_party/terraform/services/spanner/resource_spanner_schedule_backup_test.go @@ -104,76 +104,6 @@ func TestAccSpannerBackupSchedule_CMEKFullBackup(t *testing.T) { }) } -func TestAccSpannerBackupSchedule_MRCMEKIncrementalBackup(t *testing.T) { - t.Parallel() - suffix := acctest.RandString(t, 10) - - kms1 := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-mr-cmek-test-key-us-central1") - kms2 := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-east1", "tf-mr-cmek-test-key-us-east1") - kms3 := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-east4", "tf-mr-cmek-test-key-us-east4") - - context := map[string]interface{}{ - "random_suffix": suffix, - "key_ring1": kms1.KeyRing.Name, - "key_name1": kms1.CryptoKey.Name, - "key_ring2": kms2.KeyRing.Name, - "key_name2": kms2.CryptoKey.Name, - "key_ring3": kms3.KeyRing.Name, - "key_name3": kms3.CryptoKey.Name, - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckSpannerBackupScheduleDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccSpannerBackupSchedule_MRCMEKIncremental(context), - }, - { - ResourceName: "google_spanner_backup_schedule.backup_schedule", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func TestAccSpannerBackupSchedule_MRCMEKFullBackup(t *testing.T) { - t.Parallel() - suffix := acctest.RandString(t, 10) - - kms1 := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-mr-cmek-test-key-us-central1") - kms2 := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-east1", "tf-mr-cmek-test-key-us-east1") - kms3 := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-east4", "tf-mr-cmek-test-key-us-east4") - - context := map[string]interface{}{ - "random_suffix": suffix, - "key_ring1": kms1.KeyRing.Name, - "key_name1": kms1.CryptoKey.Name, - "key_ring2": kms2.KeyRing.Name, - "key_name2": kms2.CryptoKey.Name, - "key_ring3": kms3.KeyRing.Name, - "key_name3": kms3.CryptoKey.Name, - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckSpannerBackupScheduleDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccSpannerBackupSchedule_MRCMEKFull(context), - }, - { - ResourceName: "google_spanner_backup_schedule.backup_schedule", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - func testAccSpannerBackupSchedule_basic(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_spanner_instance" "instance" { @@ -334,106 +264,3 @@ resource "google_spanner_backup_schedule" "backup_schedule" { } `, context) } - -func testAccSpannerBackupSchedule_MRCMEKIncremental(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_spanner_instance" "instance" { - name = "my-instance-%{random_suffix}" - config = "nam3" - display_name = "My Instance" - num_nodes = 1 - edition = "ENTERPRISE_PLUS" -} - -resource "google_spanner_database" "database" { - instance = google_spanner_instance.instance.name - name = "my-database-%{random_suffix}" - ddl = [ - "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", - ] - deletion_protection = false - - encryption_config { - kms_key_names = [ - "%{key_name1}", - "%{key_name2}", - "%{key_name3}", - ] - } -} - -resource "google_spanner_backup_schedule" "backup_schedule" { - instance = google_spanner_instance.instance.name - database = google_spanner_database.database.name - name = "my-backup-schedule-%{random_suffix}" - - retention_duration = "172800s" - - spec { - cron_spec { - text = "0 12 * * *" - } - } - - incremental_backup_spec {} - - encryption_config { - encryption_type = "GOOGLE_DEFAULT_ENCRYPTION" - } -} -`, context) -} - -func testAccSpannerBackupSchedule_MRCMEKFull(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_spanner_instance" "instance" { - name = "my-instance-%{random_suffix}" - config = "nam3" - display_name = "My Instance" - num_nodes = 1 - edition = "ENTERPRISE_PLUS" -} - -resource "google_spanner_database" "database" { - instance = google_spanner_instance.instance.name - name = "my-database-%{random_suffix}" - ddl = [ - "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", - ] - deletion_protection = false - - encryption_config { - kms_key_names = [ - "%{key_name1}", - "%{key_name2}", - "%{key_name3}", - ] - } -} - -resource "google_spanner_backup_schedule" "backup_schedule" { - instance = google_spanner_instance.instance.name - database = google_spanner_database.database.name - name = "my-backup-schedule-%{random_suffix}" - - retention_duration = "172800s" - - spec { - cron_spec { - text = "0 12 * * *" - } - } - - full_backup_spec {} - - encryption_config { - encryption_type = "CUSTOMER_MANAGED_ENCRYPTION" - kms_key_names = [ - "%{key_name1}", - "%{key_name2}", - "%{key_name3}", - ] - } -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/sql/data_source_sql_database_instances.go b/mmv1/third_party/terraform/services/sql/data_source_sql_database_instances.go index 55b03d879137..8f48b4a5df94 100644 --- a/mmv1/third_party/terraform/services/sql/data_source_sql_database_instances.go +++ b/mmv1/third_party/terraform/services/sql/data_source_sql_database_instances.go @@ -146,7 +146,7 @@ func flattenDatasourceGoogleDatabaseInstancesList(fetchedInstances []*sqladmin.D instance["available_maintenance_versions"] = rawInstance.AvailableMaintenanceVersions instance["instance_type"] = rawInstance.InstanceType instance["service_account_email_address"] = rawInstance.ServiceAccountEmailAddress - instance["settings"] = flattenSettings(rawInstance.Settings, rawInstance.InstanceType, d) + instance["settings"] = flattenSettings(rawInstance.Settings, d) if rawInstance.DiskEncryptionConfiguration != nil { instance["encryption_key_name"] = rawInstance.DiskEncryptionConfiguration.KmsKeyName diff --git a/mmv1/third_party/terraform/services/sql/fw_resource_sql_user.go b/mmv1/third_party/terraform/services/sql/fw_resource_sql_user.go deleted file mode 100644 index 5e536900be96..000000000000 --- a/mmv1/third_party/terraform/services/sql/fw_resource_sql_user.go +++ /dev/null @@ -1,507 +0,0 @@ -package sql - -import ( - "context" - "fmt" - "strings" - "time" - - "github.com/hashicorp/terraform-plugin-framework/diag" - "github.com/hashicorp/terraform-plugin-framework/path" - "github.com/hashicorp/terraform-plugin-framework/resource" - "github.com/hashicorp/terraform-plugin-framework/resource/schema" - "github.com/hashicorp/terraform-plugin-framework/tfsdk" - "github.com/hashicorp/terraform-plugin-framework/types" - "github.com/hashicorp/terraform-plugin-log/tflog" - - "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts" - "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" - "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" - "github.com/hashicorp/terraform-provider-google/google/fwmodels" - "github.com/hashicorp/terraform-provider-google/google/fwresource" - "github.com/hashicorp/terraform-provider-google/google/fwtransport" - "github.com/hashicorp/terraform-provider-google/google/transport" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" - sqladmin "google.golang.org/api/sqladmin/v1beta4" -) - -var ( - _ resource.Resource = &SQLUserFWResource{} - _ resource.ResourceWithConfigure = &SQLUserFWResource{} -) - -func NewSQLUserFWResource() resource.Resource { - return &SQLUserFWResource{} -} - -type SQLUserFWResource struct { - client *sqladmin.Service - providerConfig *transport_tpg.Config -} - -type SQLUserModel struct { - Id types.String `tfsdk:"id"` - Project types.String `tfsdk:"project"` - Name types.String `tfsdk:"name"` - Host types.String `tfsdk:"host"` - Instance types.String `tfsdk:"instance"` - Password types.String `tfsdk:"password"` - // PasswordWO types.String `tfsdk:"password_wo"` - // PasswordWOVersion types.String `tfsdk:"password_wo_version"` - Type types.String `tfsdk:"type"` - // SqlServerUserDetails types.List `tfsdk:"sql_server_user_details"` - // PasswordPolicy types.List `tfsdk:"password_policy"` - // DeletionPolicy types.String `tfsdk:"deletion_policy"` - Timeouts timeouts.Value `tfsdk:"timeouts"` -} - -// Metadata returns the resource type name. -func (d *SQLUserFWResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { - resp.TypeName = req.ProviderTypeName + "_fw_sql_user" -} - -func (r *SQLUserFWResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { - // Prevent panic if the provider has not been configured. - if req.ProviderData == nil { - return - } - - p, ok := req.ProviderData.(*transport_tpg.Config) - if !ok { - resp.Diagnostics.AddError( - "Unexpected Resource Configure Type", - fmt.Sprintf("Expected *transport_tpg.Config, got: %T. Please report this issue to the provider developers.", req.ProviderData), - ) - return - } - - r.client = p.NewSqlAdminClient(p.UserAgent) - if resp.Diagnostics.HasError() { - return - } - r.providerConfig = p -} - -func (d *SQLUserFWResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { - resp.Schema = schema.Schema{ - MarkdownDescription: "A resource to represent a SQL User object.", - - Attributes: map[string]schema.Attribute{ - "project": schema.StringAttribute{ - Optional: true, - Computed: true, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.RequiresReplace(), - stringplanmodifier.UseStateForUnknown(), - }, - }, - "host": schema.StringAttribute{ - Optional: true, - Computed: true, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.RequiresReplace(), - stringplanmodifier.UseStateForUnknown(), - }, - }, - "instance": schema.StringAttribute{ - Required: true, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.RequiresReplace(), - }, - }, - "name": schema.StringAttribute{ - Description: `The name of the user. Changing this forces a new resource to be created.`, - Required: true, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.RequiresReplace(), - SQLUserNameIAMPlanModifier(), - }, - }, - "password": schema.StringAttribute{ - Optional: true, - Sensitive: true, - }, - "type": schema.StringAttribute{ - Optional: true, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.RequiresReplace(), - // TODO DiffSuppressFunc: tpgresource.EmptyOrDefaultStringSuppress("BUILT_IN"), - }, - }, - // This is included for backwards compatibility with the original, SDK-implemented resource. - "id": schema.StringAttribute{ - Description: "Project identifier", - MarkdownDescription: "Project identifier", - Computed: true, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.UseStateForUnknown(), - }, - }, - }, - Blocks: map[string]schema.Block{ - "timeouts": timeouts.Block(ctx, timeouts.Opts{ - Create: true, - }), - }, - } -} - -func (r *SQLUserFWResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { - var data SQLUserModel - var metaData *fwmodels.ProviderMetaModel - - // Read Provider meta into the meta model - resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) - if resp.Diagnostics.HasError() { - return - } - - // Read Terraform plan data into the model - resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...) - if resp.Diagnostics.HasError() { - return - } - - project := fwresource.GetProjectFramework(data.Project, types.StringValue(r.providerConfig.Project), &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } - - nameData, diags := data.Name.ToStringValue(ctx) - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } - - instanceData, diags := data.Instance.ToStringValue(ctx) - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } - - hostData, diags := data.Host.ToStringValue(ctx) - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } - - typeData, diags := data.Type.ToStringValue(ctx) - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } - - passwordData, diags := data.Password.ToStringValue(ctx) - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } - - createTimeout, diags := data.Timeouts.Create(ctx, 20*time.Minute) - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } - - user := &sqladmin.User{ - Name: nameData.ValueString(), - Instance: instanceData.ValueString(), - Password: passwordData.ValueString(), - Host: hostData.ValueString(), - Type: typeData.ValueString(), - } - - transport_tpg.MutexStore.Lock(instanceMutexKey(project.ValueString(), instanceData.ValueString())) - defer transport_tpg.MutexStore.Unlock(instanceMutexKey(project.ValueString(), instanceData.ValueString())) - - r.client.UserAgent = fwtransport.GenerateFrameworkUserAgentString(metaData, r.client.UserAgent) - - // TODO host check logic - - var op *sqladmin.Operation - var err error - insertFunc := func() error { - op, err = r.client.Users.Insert(project.ValueString(), instanceData.ValueString(), - user).Do() - return err - } - err = transport_tpg.Retry(transport_tpg.RetryOptions{ - RetryFunc: insertFunc, - Timeout: createTimeout, - }) - - if err != nil { - resp.Diagnostics.AddError(fmt.Sprintf("Error, failed to insert "+ - "user %s into instance %s", nameData.ValueString(), instanceData.ValueString()), err.Error()) - return - } - - err = SqlAdminOperationWaitTime(r.providerConfig, op, project.ValueString(), "Insert User", r.client.UserAgent, createTimeout) - - if err != nil { - resp.Diagnostics.AddError(fmt.Sprintf("Error, failure waiting to insert "+ - "user %s into instance %s", nameData.ValueString(), instanceData.ValueString()), err.Error()) - return - } - - tflog.Trace(ctx, "created sql user resource") - - // This will include a double-slash (//) for postgres instances, - // for which user.Host is an empty string. That's okay. - data.Id = types.StringValue(fmt.Sprintf("%s/%s/%s", user.Name, user.Host, user.Instance)) - data.Project = project - - // read back sql user - r.SQLUserRefresh(ctx, &data, &resp.State, &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } - - // Save data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) -} - -func (r *SQLUserFWResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { - var data SQLUserModel - var metaData *fwmodels.ProviderMetaModel - - // Read Provider meta into the meta model - resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) - if resp.Diagnostics.HasError() { - return - } - - // Read Terraform configuration data into the model - resp.Diagnostics.Append(req.State.Get(ctx, &data)...) - if resp.Diagnostics.HasError() { - return - } - - // Use provider_meta to set User-Agent - r.client.UserAgent = fwtransport.GenerateFrameworkUserAgentString(metaData, r.client.UserAgent) - - tflog.Trace(ctx, "read sql user resource") - - // read back sql user - r.SQLUserRefresh(ctx, &data, &resp.State, &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } - - // Save data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) -} - -func (r *SQLUserFWResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { - var old, new SQLUserModel - var metaData *fwmodels.ProviderMetaModel - - resp.Diagnostics.Append(req.State.Get(ctx, &old)...) - if resp.Diagnostics.HasError() { - return - } - - resp.Diagnostics.Append(req.Plan.Get(ctx, &new)...) - if resp.Diagnostics.HasError() { - return - } - - // Use provider_meta to set User-Agent - r.client.UserAgent = fwtransport.GenerateFrameworkUserAgentString(metaData, r.client.UserAgent) - - if !old.Password.Equal(new.Password) { - project := new.Project.ValueString() - instance := new.Instance.ValueString() - name := new.Name.ValueString() - host := new.Host.ValueString() - password := new.Password.ValueString() - - updateTimeout, diags := new.Timeouts.Update(ctx, 20*time.Minute) - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } - - user := &sqladmin.User{ - Name: name, - Instance: instance, - Password: password, - } - transport_tpg.MutexStore.Lock(instanceMutexKey(project, instance)) - defer transport_tpg.MutexStore.Unlock(instanceMutexKey(project, instance)) - var op *sqladmin.Operation - var err error - updateFunc := func() error { - op, err = r.client.Users.Update(project, instance, user).Host(host).Name(name).Do() - return err - } - err = transport_tpg.Retry(transport_tpg.RetryOptions{ - RetryFunc: updateFunc, - Timeout: updateTimeout, - }) - - if err != nil { - resp.Diagnostics.AddError(fmt.Sprintf("failed to update"+ - "user %s in instance %s", name, instance), err.Error()) - return - } - - err = SqlAdminOperationWaitTime(r.providerConfig, op, project, "Update User", r.client.UserAgent, updateTimeout) - - if err != nil { - resp.Diagnostics.AddError(fmt.Sprintf("failure waiting for update"+ - "user %s in instance %s", name, instance), err.Error()) - return - } - - // read back sql user - r.SQLUserRefresh(ctx, &new, &resp.State, &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } - } - - // Save updated data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &new)...) -} - -func (r *SQLUserFWResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { - var data SQLUserModel - - // Read Terraform prior state data into the model - resp.Diagnostics.Append(req.State.Get(ctx, &data)...) - if resp.Diagnostics.HasError() { - return - } - - project := data.Project.ValueString() - instance := data.Instance.ValueString() - name := data.Name.ValueString() - host := data.Host.ValueString() - - deleteTimeout, diags := data.Timeouts.Delete(ctx, 20*time.Minute) - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } - - transport_tpg.MutexStore.Lock(instanceMutexKey(project, instance)) - defer transport_tpg.MutexStore.Unlock(instanceMutexKey(project, instance)) - var op *sqladmin.Operation - var err error - deleteFunc := func() error { - op, err = r.client.Users.Delete(project, instance).Host(host).Name(name).Do() - return err - } - err = transport_tpg.Retry(transport_tpg.RetryOptions{ - RetryFunc: deleteFunc, - Timeout: deleteTimeout, - }) - - if err != nil { - resp.Diagnostics.AddError(fmt.Sprintf("failed to delete"+ - "user %s in instance %s", name, instance), err.Error()) - return - } - - err = SqlAdminOperationWaitTime(r.providerConfig, op, project, "Delete User", r.client.UserAgent, deleteTimeout) - - if err != nil { - resp.Diagnostics.AddError(fmt.Sprintf("Error, failure waiting to delete "+ - "user %s", name), err.Error()) - return - } -} - -func (r *SQLUserFWResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { - idParts := strings.Split(req.ID, "/") - - // TODO recreate all import cases - if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" { - resp.Diagnostics.AddError( - "Unexpected Import Identifier", - fmt.Sprintf("Expected import identifier with format: project/instance/host/name. Got: %q", req.ID), - ) - return - } - - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project"), idParts[0])...) - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance"), idParts[1])...) - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("host"), idParts[2])...) - resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("name"), idParts[3])...) -} - -func (r *SQLUserFWResource) SQLUserRefresh(ctx context.Context, data *SQLUserModel, state *tfsdk.State, diag *diag.Diagnostics) { - userReadResp, err := r.client.Users.Get(data.Project.ValueString(), data.Instance.ValueString(), data.Name.ValueString()).Host(data.Host.ValueString()).Do() - if err != nil { - // Treat HTTP 404 Not Found status as a signal to recreate resource - // and return early - if userReadResp != nil && transport.IsGoogleApiErrorWithCode(err, userReadResp.HTTPStatusCode) { - tflog.Trace(ctx, "sql user resource not found, removing from state") - state.RemoveResource(ctx) - return - } - diag.AddError(fmt.Sprintf("Error, failure waiting to read "+ - "user %s", data.Name.ValueString()), err.Error()) - return - } - - id := fmt.Sprintf("projects/%s/global/networks/%s", userReadResp.Project, userReadResp.Name) - data.Id = types.StringValue(id) - data.Project = types.StringValue(userReadResp.Project) - data.Instance = types.StringValue(userReadResp.Instance) - if userReadResp.Host != "" { - data.Host = types.StringValue(userReadResp.Host) - } - if userReadResp.Type != "" { - data.Type = types.StringValue(userReadResp.Type) - } -} - -// Plan Modifiers -func SQLUserNameIAMPlanModifier() planmodifier.String { - return &sqlUserNameIAMPlanModifier{} -} - -type sqlUserNameIAMPlanModifier struct { -} - -func (d *sqlUserNameIAMPlanModifier) Description(ctx context.Context) string { - return "Suppresses name diffs for IAM user types." -} -func (d *sqlUserNameIAMPlanModifier) MarkdownDescription(ctx context.Context) string { - return d.Description(ctx) -} - -// Plan modifier to emulate the SDK diffSuppressIamUserName -func (d *sqlUserNameIAMPlanModifier) PlanModifyString(ctx context.Context, req planmodifier.StringRequest, resp *planmodifier.StringResponse) { - // Retrieve relevant fields - var oldName types.String - diags := req.State.GetAttribute(ctx, path.Root("name"), &oldName) - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } - - var newName types.String - diags = req.Plan.GetAttribute(ctx, path.Root("name"), &newName) - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } - - var userType types.String - diags = req.Plan.GetAttribute(ctx, path.Root("type"), &userType) - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } - - // Old diff suppress logic - strippedNewName := strings.Split(newName.ValueString(), "@")[0] - - if oldName.ValueString() == strippedNewName && strings.Contains(userType.ValueString(), "IAM") { - // Suppress the diff by setting the planned value to the old value - resp.PlanValue = oldName - } -} diff --git a/mmv1/third_party/terraform/services/sql/fw_resource_sql_user_test.go b/mmv1/third_party/terraform/services/sql/fw_resource_sql_user_test.go deleted file mode 100644 index 80e78c4a0316..000000000000 --- a/mmv1/third_party/terraform/services/sql/fw_resource_sql_user_test.go +++ /dev/null @@ -1,90 +0,0 @@ -package sql_test - -import ( - "fmt" - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" -) - -func TestAccSqlUserFW_mysql(t *testing.T) { - // Multiple fine-grained resources - acctest.SkipIfVcr(t) - t.Parallel() - - instance := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccSqlUserDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testGoogleSqlUserFW_mysql(instance, "password"), - Check: resource.ComposeTestCheckFunc( - testAccCheckGoogleSqlUserExists(t, "google_fw_sql_user.user1"), - testAccCheckGoogleSqlUserExists(t, "google_fw_sql_user.user2"), - ), - }, - { - // Update password - Config: testGoogleSqlUserFW_mysql(instance, "new_password"), - Check: resource.ComposeTestCheckFunc( - testAccCheckGoogleSqlUserExists(t, "google_fw_sql_user.user1"), - testAccCheckGoogleSqlUserExists(t, "google_fw_sql_user.user2"), - testAccCheckGoogleSqlUserExists(t, "google_fw_sql_user.user3"), - ), - }, - { - ResourceName: "google_fw_sql_user.user2", - ImportStateId: fmt.Sprintf("%s/%s/gmail.com/admin", envvar.GetTestProjectFromEnv(), instance), - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"password"}, - }, - { - ResourceName: "google_fw_sql_user.user3", - ImportStateId: fmt.Sprintf("%s/%s/10.0.0.0/24/admin", envvar.GetTestProjectFromEnv(), instance), - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"password"}, - }, - }, - }) -} - -func testGoogleSqlUserFW_mysql(instance, password string) string { - return fmt.Sprintf(` -resource "google_sql_database_instance" "instance" { - name = "%s" - region = "us-central1" - database_version = "MYSQL_5_7" - deletion_protection = false - settings { - tier = "db-f1-micro" - } -} - -resource "google_fw_sql_user" "user1" { - name = "admin" - instance = google_sql_database_instance.instance.name - host = "google.com" - password = "%s" -} - -resource "google_fw_sql_user" "user2" { - name = "admin" - instance = google_sql_database_instance.instance.name - host = "gmail.com" - password = "hunter2" -} - -resource "google_fw_sql_user" "user3" { - name = "admin" - instance = google_sql_database_instance.instance.name - host = "10.0.0.0/24" - password = "hunter3" -} -`, instance, password) -} diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl index 6abe0b4d3d4e..0ee23a7ab748 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl @@ -73,11 +73,6 @@ var ( "settings.0.backup_configuration.0.transaction_log_retention_days", } - connectionPoolConfigKeys = []string{ - "settings.0.connection_pool_config.0.connection_pooling_enabled", - "settings.0.connection_pool_config.0.flags", - } - ipConfigurationKeys = []string{ "settings.0.ip_configuration.0.authorized_networks", "settings.0.ip_configuration.0.ipv4_enabled", @@ -314,16 +309,7 @@ func ResourceSqlDatabaseInstance() *schema.Resource { settings.backup_configuration.enabled is set to true. For MySQL instances, ensure that settings.backup_configuration.binary_log_enabled is set to true. For Postgres instances, ensure that settings.backup_configuration.point_in_time_recovery_enabled -is set to true. Defaults to ZONAL. -For read pool instances, this field is read-only. The availability type is changed by specifying -the number of nodes (node_count).`, - }, - "effective_availability_type": { - Type: schema.TypeString, - Computed: true, - Description: `The availability type of the Cloud SQL instance, high availability -(REGIONAL) or single zone (ZONAL). This field always contains the value that is reported by the -API (for read pools, effective_availability_type may differ from availability_type).`, +is set to true. Defaults to ZONAL.`, }, "backup_configuration": { Type: schema.TypeList, @@ -433,6 +419,7 @@ API (for read pools, effective_availability_type may differ from availability_ty "disk_size": { Type: schema.TypeInt, Optional: true, + // Default is likely 10gb, but it is undocumented and may change. Computed: true, Description: `The size of data disk, in GB. Size of a running instance cannot be reduced but can be increased. The minimum value is 10GB for PD_SSD, PD_HDD and 20GB for HYPERDISK_BALANCED.`, }, @@ -442,7 +429,7 @@ API (for read pools, effective_availability_type may differ from availability_ty Computed: true, ForceNew: true, DiffSuppressFunc: caseDiffDashSuppress, - Description: `The type of supported data disk is tier dependent and can be PD_SSD or PD_HDD or HYPERDISK_BALANCED.`, + Description: `The type of supported data disk is tier dependent and can be PD_SSD or PD_HDD or HyperDisk_Balanced `, }, {{- if ne $.TargetVersionName "ga" }} "data_disk_provisioned_iops": { @@ -458,28 +445,6 @@ API (for read pools, effective_availability_type may differ from availability_ty Description: `Provisioned throughput measured in MiB per second for the data disk. This field is only used for HYPERDISK_BALANCED disk types.`, }, {{- end }} - "connection_pool_config": { - Type: schema.TypeSet, - Optional: true, - Computed: true, - Description: `The managed connection pool setting for a Cloud SQL instance.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "connection_pooling_enabled": { - Type: schema.TypeBool, - Optional: true, - Description: `Whether Managed Connection Pool is enabled for this instance.`, - }, - "flags": { - Type: schema.TypeSet, - Optional: true, - Set: schema.HashResource(sqlDatabaseFlagSchemaElem), - Elem: sqlDatabaseFlagSchemaElem, - Description: `List of connection pool configuration flags`, - }, - }, - }, - }, "ip_configuration": { Type: schema.TypeList, Optional: true, @@ -541,11 +506,6 @@ API (for read pools, effective_availability_type may differ from availability_ty Set: schema.HashString, Description: `List of consumer projects that are allow-listed for PSC connections to this instance. This instance can be connected to with PSC from any network in these projects. Each consumer project in this list may be represented by a project number (numeric) or by a project id (alphanumeric).`, }, - "network_attachment_uri": { - Type: schema.TypeString, - Optional: true, - Description: `Name of network attachment resource used to authorize a producer service to connect a PSC interface to the consumer's VPC. For example: "projects/myProject/regions/myRegion/networkAttachments/myNetworkAttachment". This is required to enable outbound connection on a PSC instance.`, - }, "psc_auto_connections": { Type: schema.TypeList, Optional: true, @@ -561,21 +521,6 @@ API (for read pools, effective_availability_type may differ from availability_ty Required: true, Description: `The consumer network of this consumer endpoint. This must be a resource path that includes both the host project and the network name. The consumer host project of this network might be different from the consumer service project.`, }, - "consumer_network_status": { - Type: schema.TypeString, - Computed: true, - Description: `The connection policy status of the consumer network.`, - }, - "ip_address": { - Type: schema.TypeString, - Computed: true, - Description: `The IP address of the consumer endpoint.`, - }, - "status": { - Type: schema.TypeString, - Computed: true, - Description: `The connection status of the consumer endpoint.`, - }, }, }, Description: `A comma-separated list of networks or a comma-separated list of network-project pairs. Each project in this list is represented by a project number (numeric) or by a project ID (alphanumeric). This allows Private Service Connect connections to be created automatically for the specified networks.`, @@ -903,14 +848,7 @@ API (for read pools, effective_availability_type may differ from availability_ty Type: schema.TypeString, Computed: true, Optional: true, - Description: `The type of the instance. See https://cloud.google.com/sql/docs/mysql/admin-api/rest/v1/instances#SqlInstanceType for supported values.`, - }, - - "node_count": { - Type: schema.TypeInt, - Computed: true, - Optional: true, - Description: `For a read pool instance, the number of nodes in the read pool.`, + Description: `The type of the instance. The valid values are:- 'SQL_INSTANCE_TYPE_UNSPECIFIED', 'CLOUD_SQL_INSTANCE', 'ON_PREMISES_INSTANCE' and 'READ_REPLICA_INSTANCE'.`, }, "replica_configuration": { @@ -1026,11 +964,6 @@ API (for read pools, effective_availability_type may differ from availability_ty MaxItems: 1, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ - "psa_write_endpoint": { - Type: schema.TypeString, - Optional: true, - Description: fmt.Sprintf(`If set, this field indicates this instance has a private service access (PSA) DNS endpoint that is pointing to the primary instance of the cluster. If this instance is the primary, then the DNS endpoint points to this instance. After a switchover or replica failover operation, this DNS endpoint points to the promoted instance. This is a read-only field, returned to the user as information. This field can exist even if a standalone instance doesn't have a DR replica yet or the DR replica is deleted.`), - }, "failover_dr_replica_name": { Type: schema.TypeString, Optional: true, @@ -1146,11 +1079,6 @@ API (for read pools, effective_availability_type may differ from availability_ty }, }, }, - "backupdr_backup": { - Type: schema.TypeString, - Optional: true, - Description: `The name of the BackupDR backup to restore from.`, - }, "clone": { Type: schema.TypeList, Optional: true, @@ -1299,10 +1227,6 @@ func resourceSqlDatabaseInstanceCreate(d *schema.ResourceData, meta interface{}) instance.InstanceType = d.Get("instance_type").(string) } - if _, ok := d.GetOk("node_count"); ok { - instance.NodeCount = int64(d.Get("node_count").(int)) - } - instance.RootPassword = d.Get("root_password").(string) // Modifying a replica during Create can cause problems if the master is @@ -1457,14 +1381,7 @@ func resourceSqlDatabaseInstanceCreate(d *schema.ResourceData, meta interface{}) // Perform a backup restore if the backup context exists if r, ok := d.GetOk("restore_backup_context"); ok { - log.Printf("[DEBUG] Restoring instance %s from backup context: %v", name, r) - err = sqlDatabaseInstanceRestoreFromBackup(d, config, userAgent, project, name, r, "") - if err != nil { - return err - } - } else if b, ok := d.GetOk("backupdr_backup"); ok && b.(string) != "" { - log.Printf("[DEBUG] Restoring instance %s from BackupDR backup: %s", name, b.(string)) - err = sqlDatabaseInstanceRestoreFromBackup(d, config, userAgent, project, name, nil, b) + err = sqlDatabaseInstanceRestoreFromBackup(d, config, userAgent, project, name, r) if err != nil { return err } @@ -1515,7 +1432,6 @@ func expandSqlDatabaseInstanceSettings(configured []interface{}, databaseVersion MaintenanceWindow: expandMaintenanceWindow(_settings["maintenance_window"].([]interface{})), InsightsConfig: expandInsightsConfig(_settings["insights_config"].([]interface{})), PasswordValidationPolicy: expandPasswordValidationPolicy(_settings["password_validation_policy"].([]interface{})), - ConnectionPoolConfig: expandConnectionPoolConfig(_settings["connection_pool_config"].(*schema.Set).List()), } resize := _settings["disk_autoresize"].(bool) @@ -1653,7 +1569,6 @@ func expandPscConfig(configured []interface{}) *sqladmin.PscConfig { return &sqladmin.PscConfig{ PscEnabled: _entry["psc_enabled"].(bool), AllowedConsumerProjects: tpgresource.ConvertStringArr(_entry["allowed_consumer_projects"].(*schema.Set).List()), - NetworkAttachmentUri: _entry["network_attachment_uri"].(string), PscAutoConnections: expandPscAutoConnectionConfig(_entry["psc_auto_connections"].([]interface{})), } } @@ -1661,35 +1576,6 @@ func expandPscConfig(configured []interface{}) *sqladmin.PscConfig { return nil } -func expandFlags(configured []interface{}) []*sqladmin.ConnectionPoolFlags { - connectionPoolFlags := make([]*sqladmin.ConnectionPoolFlags, 0, len(configured)) - for _, _flag := range configured { - if _flag == nil { - continue - } - _entry := _flag.(map[string]interface{}) - - connectionPoolFlags = append(connectionPoolFlags, &sqladmin.ConnectionPoolFlags{ - Name: _entry["name"].(string), - Value: _entry["value"].(string), - }) - } - return connectionPoolFlags -} - -func expandConnectionPoolConfig(configured []interface{}) *sqladmin.ConnectionPoolConfig { - if len(configured) == 0 || configured[0] == nil { - return nil - } - - _connectionPoolConfig := configured[0].(map[string]interface{}) - - return &sqladmin.ConnectionPoolConfig{ - ConnectionPoolingEnabled: _connectionPoolConfig["connection_pooling_enabled"].(bool), - Flags: expandFlags(_connectionPoolConfig["flags"].(*schema.Set).List()), - } -} - func expandAuthorizedNetworks(configured []interface{}) []*sqladmin.AclEntry { an := make([]*sqladmin.AclEntry, 0, len(configured)) for _, _acl := range configured { @@ -1898,12 +1784,10 @@ func resourceSqlDatabaseInstanceRead(d *schema.ResourceData, meta interface{}) e if err := d.Set("instance_type", instance.InstanceType); err != nil { return fmt.Errorf("Error setting instance_type: %s", err) } - if err := d.Set("node_count", instance.NodeCount); err != nil { - return fmt.Errorf("Error setting node_count: %s", err) - } - if err := d.Set("settings", flattenSettings(instance.Settings, instance.InstanceType, d)); err != nil { + if err := d.Set("settings", flattenSettings(instance.Settings, d)); err != nil { log.Printf("[WARN] Failed to set SQL Database Instance Settings") } + if instance.DiskEncryptionConfiguration != nil { if err := d.Set("encryption_key_name", instance.DiskEncryptionConfiguration.KmsKeyName); err != nil { return fmt.Errorf("Error setting encryption_key_name: %s", err) @@ -2251,13 +2135,9 @@ func resourceSqlDatabaseInstanceUpdate(d *schema.ResourceData, meta interface{}) instance.InstanceType = d.Get("instance_type").(string) } - if _, ok := d.GetOk("node_count"); ok { - instance.NodeCount = int64(d.Get("node_count").(int)) - } - // Database Version is required for all calls with Google ML integration enabled or it will be rejected by the API. - if d.Get("settings.0.enable_google_ml_integration").(bool) || len(_settings["connection_pool_config"].(*schema.Set).List()) > 0 { - instance.DatabaseVersion = databaseVersion + if d.Get("settings.0.enable_google_ml_integration").(bool) { + instance.DatabaseVersion = databaseVersion } failoverDrReplicaName := d.Get("replication_cluster.0.failover_dr_replica_name").(string) @@ -2287,14 +2167,7 @@ func resourceSqlDatabaseInstanceUpdate(d *schema.ResourceData, meta interface{}) // Perform a backup restore if the backup context exists and has changed if r, ok := d.GetOk("restore_backup_context"); ok { if d.HasChange("restore_backup_context") { - err = sqlDatabaseInstanceRestoreFromBackup(d, config, userAgent, project, d.Get("name").(string), r, "") - if err != nil { - return err - } - } - } else if b, ok := d.GetOk("backupdr_backup"); ok && b.(string) != "" { - if d.HasChange("backupdr_backup") { - err = sqlDatabaseInstanceRestoreFromBackup(d, config, userAgent, project, d.Get("name").(string), nil, b) + err = sqlDatabaseInstanceRestoreFromBackup(d, config, userAgent, project, d.Get("name").(string), r) if err != nil { return err } @@ -2401,9 +2274,9 @@ func resourceSqlDatabaseInstanceDelete(d *schema.ResourceData, meta interface{}) func resourceSqlDatabaseInstanceImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+)/instances/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)$", - "^(?P[^/]+)$"}, d, config); err != nil { + "projects/(?P[^/]+)/instances/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)"}, d, config); err != nil { return nil, err } @@ -2421,14 +2294,13 @@ func resourceSqlDatabaseInstanceImport(d *schema.ResourceData, meta interface{}) return []*schema.ResourceData{d}, nil } -func flattenSettings(settings *sqladmin.Settings, iType string, d *schema.ResourceData) []map[string]interface{} { +func flattenSettings(settings *sqladmin.Settings, d *schema.ResourceData) []map[string]interface{} { data := map[string]interface{}{ "version": settings.SettingsVersion, "tier": settings.Tier, "edition": flattenEdition(settings.Edition), "activation_policy": settings.ActivationPolicy, - "availability_type": d.Get("settings.0.availability_type"), - "effective_availability_type": settings.AvailabilityType, + "availability_type": settings.AvailabilityType, "collation": settings.Collation, "connector_enforcement": settings.ConnectorEnforcement, "disk_type": settings.DataDiskType, @@ -2445,18 +2317,6 @@ func flattenSettings(settings *sqladmin.Settings, iType string, d *schema.Resour "retain_backups_on_delete": settings.RetainBackupsOnDelete, } - if data["availability_type"] == "" { - data["availability_type"] = "ZONAL" - } - // For read pools, availability type is server managed. Above, we - // pull it from the old TF resource so that it never shows a - // diff. Now, here, for non-pool instances, we overwrite it with the - // value obtained from the API (which would be the typical way to - // populate the field). - if iType != "READ_POOL_INSTANCE" { - data["availability_type"] = settings.AvailabilityType - } - if settings.ActiveDirectoryConfig != nil { data["active_directory_config"] = flattenActiveDirectoryConfig(settings.ActiveDirectoryConfig) } @@ -2477,10 +2337,6 @@ func flattenSettings(settings *sqladmin.Settings, iType string, d *schema.Resour data["database_flags"] = flattenDatabaseFlags(settings.DatabaseFlags) } - if settings.ConnectionPoolConfig != nil { - data["connection_pool_config"] = flattenConnectionPoolConfig(settings.ConnectionPoolConfig) - } - if settings.IpConfiguration != nil { data["ip_configuration"] = flattenIpConfiguration(settings.IpConfiguration, d) } @@ -2640,10 +2496,6 @@ func flattenDatabaseFlags(databaseFlags []*sqladmin.DatabaseFlags) []map[string] // is nil since replication_cluster is computed+optional. func flattenReplicationCluster(replicationCluster *sqladmin.ReplicationCluster, d *schema.ResourceData) []map[string]interface{} { data := make(map[string]interface{}) - data["psa_write_endpoint"] = "" - if replicationCluster != nil && replicationCluster.PsaWriteEndpoint != "" { - data["psa_write_endpoint"] = replicationCluster.PsaWriteEndpoint - } data["failover_dr_replica_name"] = "" if replicationCluster != nil && replicationCluster.FailoverDrReplicaName != "" { data["failover_dr_replica_name"] = replicationCluster.FailoverDrReplicaName @@ -2655,38 +2507,6 @@ func flattenReplicationCluster(replicationCluster *sqladmin.ReplicationCluster, return []map[string]interface{}{data} } -func flattenConnectionPoolFlags(connectionPoolFlags []*sqladmin.ConnectionPoolFlags) []interface{} { - if len(connectionPoolFlags) == 0 { // Handles nil or empty slice - return make([]interface{}, 0) // Explicitly return empty slice - } - - mcpflags := make([]interface{}, len(connectionPoolFlags)) // Pre-allocate for efficiency - for i, mcpflag := range connectionPoolFlags { - data := map[string]interface{}{ - "name": mcpflag.Name, - "value": mcpflag.Value, - } - mcpflags[i] = data - } - return mcpflags -} - -func flattenConnectionPoolConfig(connectionPoolConfig *sqladmin.ConnectionPoolConfig) []interface{}{ - if connectionPoolConfig == nil { - return []interface{}{ - map[string]interface{}{ - "connection_pooling_enabled": false, - "flags": make([]interface{}, 0), // Default to empty flags - }, - } - } - data := map[string]interface{}{ - "connection_pooling_enabled": connectionPoolConfig.ConnectionPoolingEnabled, // Corrected key - "flags": flattenConnectionPoolFlags(connectionPoolConfig.Flags), // Corrected key - } - return []interface{}{data} -} - func flattenIpConfiguration(ipConfiguration *sqladmin.IpConfiguration, d *schema.ResourceData) interface{} { data := map[string]interface{}{ "ipv4_enabled": ipConfiguration.Ipv4Enabled, @@ -2716,10 +2536,7 @@ func flattenPscAutoConnections(pscAutoConnections []*sqladmin.PscAutoConnectionC for _, flag := range pscAutoConnections { data := map[string]interface{}{ "consumer_network": flag.ConsumerNetwork, - "consumer_network_status": flag.ConsumerNetworkStatus, "consumer_service_project_id": flag.ConsumerProject, - "ip_address": flag.IpAddress, - "status": flag.Status, } flags = append(flags, data) @@ -2732,7 +2549,6 @@ func flattenPscConfigs(pscConfig *sqladmin.PscConfig) interface{} { data := map[string]interface{}{ "psc_enabled": pscConfig.PscEnabled, "allowed_consumer_projects": schema.NewSet(schema.HashString, tpgresource.ConvertStringArrToInterface(pscConfig.AllowedConsumerProjects)), - "network_attachment_uri": pscConfig.NetworkAttachmentUri, "psc_auto_connections": flattenPscAutoConnections(pscConfig.PscAutoConnections), } @@ -2942,16 +2758,12 @@ func expandRestoreBackupContext(configured []interface{}) *sqladmin.RestoreBacku } } -func sqlDatabaseInstanceRestoreFromBackup(d *schema.ResourceData, config *transport_tpg.Config, userAgent, project, instanceId string, r interface{}, backupdrBackup interface{}) error { +func sqlDatabaseInstanceRestoreFromBackup(d *schema.ResourceData, config *transport_tpg.Config, userAgent, project, instanceId string, r interface{}) error { log.Printf("[DEBUG] Initiating SQL database instance backup restore") + restoreContext := r.([]interface{}) - backupRequest := &sqladmin.InstancesRestoreBackupRequest{} - - if r != nil { - restoreContext := r.([]interface{}) - backupRequest.RestoreBackupContext = expandRestoreBackupContext(restoreContext) - } else if backupdrBackup != nil && backupdrBackup.(string) != "" { - backupRequest.BackupdrBackup = backupdrBackup.(string) + backupRequest := &sqladmin.InstancesRestoreBackupRequest{ + RestoreBackupContext: expandRestoreBackupContext(restoreContext), } var op *sqladmin.Operation diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_meta.yaml b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_meta.yaml index 18549ce5cb8b..003e20c08d5f 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_meta.yaml +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_meta.yaml @@ -5,7 +5,6 @@ api_version: 'v1beta4' api_resource_type_kind: 'DatabaseInstance' fields: - field: 'available_maintenance_versions' - - field: 'backupdr_backup' - field: 'clone.allocated_ip_range' - field: 'clone.database_names' - field: 'clone.point_in_time' @@ -80,8 +79,6 @@ fields: - field: 'settings.disk_autoresize' - field: 'settings.disk_autoresize_limit' - field: 'settings.disk_size' - - field: 'settings.data_disk_provisioned_iops' - - field: 'settings.data_disk_provisioned_throughput' - field: 'settings.disk_type' - field: 'settings.edition' - field: 'settings.enable_dataplex_integration' @@ -100,10 +97,7 @@ fields: - field: 'settings.ip_configuration.private_network' - field: 'settings.ip_configuration.psc_config.allowed_consumer_projects' - field: 'settings.ip_configuration.psc_config.psc_auto_connections.consumer_network' - - field: 'settings.ip_configuration.psc_config.psc_auto_connections.consumer_network_status' - field: 'settings.ip_configuration.psc_config.psc_auto_connections.consumer_service_project_id' - - field: 'settings.ip_configuration.psc_config.psc_auto_connections.ip_address' - - field: 'settings.ip_configuration.psc_config.psc_auto_connections.status' - field: 'settings.ip_configuration.psc_config.psc_enabled' - field: 'settings.ip_configuration.server_ca_mode' - field: 'settings.ip_configuration.server_ca_pool' diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl index 5bf0eeeeace0..38118d6cb415 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl @@ -4,7 +4,6 @@ import ( "fmt" "regexp" "strconv" - "strings" "testing" "time" @@ -852,84 +851,6 @@ func TestAccSqlDatabaseInstance_withPrivateNetwork_withoutAllocatedIpRange(t *te }) } -func TestAccSqlDatabaseInstance_withMCPEnabled(t *testing.T) { - t.Parallel() - - instanceName := "tf-test-" + acctest.RandString(t, 10) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccSqlDatabaseInstanceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccSqlDatabaseInstance_withMCPEnabled(instanceName), - }, - { - ResourceName: "google_sql_database_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - }, - }) -} - -func TestAccSqlDatabaseInstance_withoutMCPEnabled(t *testing.T) { - t.Parallel() - - instanceName := "tf-test-" + acctest.RandString(t, 10) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccSqlDatabaseInstanceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccSqlDatabaseInstance_withoutMCPEnabled(instanceName), - }, - { - ResourceName: "google_sql_database_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - }, - }) -} - -func TestAccSqlDatabaseInstance_updateMCPEnabled(t *testing.T) { - t.Parallel() - - instanceName := "tf-test-" + acctest.RandString(t, 10) - resourceName := "google_sql_database_instance.instance" - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccSqlDatabaseInstanceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccSqlDatabaseInstance_withoutMCPEnabled(instanceName), - }, - { - Config: testAccSqlDatabaseInstance_withMCPEnabled(instanceName), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "settings.0.connection_pool_config.0.connection_pooling_enabled", "true"), - resource.TestCheckResourceAttr(resourceName, "settings.0.connection_pool_config.0.flags.#", "1"), - ), - }, - { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - }, - }) -} - - func TestAccSqlDatabaseInstance_withPSCEnabled_withoutAllowedConsumerProjects(t *testing.T) { t.Parallel() @@ -1184,86 +1105,6 @@ func TestAccSqlDatabaseInstance_withPSCEnabled_withIpV4Enabled(t *testing.T) { }) } -func TestAccSqlDatabaseInstance_withPscEnabled_withNetworkAttachmentUri_thenRemoveNetworkAttachment(t *testing.T) { - t.Parallel() - - random_suffix := acctest.RandString(t, 10) - instanceName := "tf-test-" + random_suffix - projectId := envvar.GetTestProjectFromEnv() - region := "us-central1" - networkNameStr := "tf-test-cloud-sql-network-" + random_suffix - subnetworkNameStr := "tf-test-cloud-sql-subnetwork-" + random_suffix - networkAttachmentNameStr := "tf-test-cloud-sql-update-na-" + random_suffix - networkName := acctest.BootstrapSharedTestNetwork(t, networkNameStr) - subnetworkName := acctest.BootstrapSubnet(t, subnetworkNameStr, networkName) - networkAttachmentName := acctest.BootstrapNetworkAttachment(t, networkAttachmentNameStr, subnetworkName) - networkAttachmentUri := fmt.Sprintf("projects/%s/regions/%s/networkAttachments/%s", projectId, region, networkAttachmentName) - - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccSqlDatabaseInstanceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccSqlDatabaseInstance_withPSCEnabled_withoutPscOutbound(instanceName), - Check: resource.ComposeTestCheckFunc(verifyPscNetorkAttachmentOperation("google_sql_database_instance.instance", true, true, "")), - }, - { - ResourceName: "google_sql_database_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateIdPrefix: fmt.Sprintf("%s/", projectId), - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - { - Config: testAccSqlDatabaseInstance_withPSCEnabled_withNetworkAttachmentUri(instanceName, networkAttachmentUri), - Check: resource.ComposeTestCheckFunc(verifyPscNetorkAttachmentOperation("google_sql_database_instance.instance", true, true, networkAttachmentUri)), - }, - { - ResourceName: "google_sql_database_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateIdPrefix: fmt.Sprintf("%s/", projectId), - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - { - Config: testAccSqlDatabaseInstance_withPSCEnabled_withoutPscOutbound(instanceName), - Check: resource.ComposeTestCheckFunc(verifyPscNetorkAttachmentOperation("google_sql_database_instance.instance", true, true, "")), - }, - }, - }) -} - -func TestAccSqlDatabaseInstance_withPscEnabled_withNetworkAttachmentUriOnCreate(t *testing.T) { - t.Parallel() - - random_suffix := acctest.RandString(t, 10) - instanceName := "tf-test-" + random_suffix - projectId := envvar.GetTestProjectFromEnv() - region := "us-central1" - networkNameStr := "tf-test-cloud-sql-network-" + random_suffix - subnetworkNameStr := "tf-test-cloud-sql-subnetwork-" + random_suffix - networkAttachmentNameStr := "tf-test-cloud-sql-update-na-" + random_suffix - networkName := acctest.BootstrapSharedTestNetwork(t, networkNameStr) - subnetworkName := acctest.BootstrapSubnet(t, subnetworkNameStr, networkName) - networkAttachmentName := acctest.BootstrapNetworkAttachment(t, networkAttachmentNameStr, subnetworkName) - networkAttachmentUri := fmt.Sprintf("projects/%s/regions/%s/networkAttachments/%s", projectId, region, networkAttachmentName) - - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccSqlDatabaseInstanceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccSqlDatabaseInstance_withPSCEnabled_withNetworkAttachmentUri(instanceName, networkAttachmentUri), - ExpectError: regexp.MustCompile(`.*Network attachment used for Private Service Connect interfaces can not be assigned with instance creation.*`), - }, - }, - }) -} - func TestAccSqlDatabaseInstance_withPrivateNetwork_withAllocatedIpRange(t *testing.T) { t.Parallel() @@ -1400,43 +1241,6 @@ func TestAccSqlDatabaseInstance_createFromBackup(t *testing.T) { }) } -func TestAccSqlDatabaseInstance_createFromBackupDR(t *testing.T) { - t.Parallel() - - // Bootstrap the BackupDR vault - backupVaultID := "bv-test" - location := "us-central1" - project := envvar.GetTestProjectFromEnv() - backupvault := acctest.BootstrapBackupDRVault(t, backupVaultID, location) - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - "project": project, - "backup_vault_id": backupVaultID, - "backup_vault": backupvault, - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccSqlDatabaseInstanceDestroyProducer(t), - ExternalProviders: map[string]resource.ExternalProvider{ - "time": {}, - }, - Steps: []resource.TestStep{ - { - Config: testAccSqlDatabaseInstance_createFromBackupDR(context), - }, - { - ResourceName: "google_sql_database_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "backupdr_backup"}, - }, - }, - }) -} - func TestAccSqlDatabaseInstance_backupUpdate(t *testing.T) { // Sqladmin client acctest.SkipIfVcr(t) @@ -1444,7 +1248,6 @@ func TestAccSqlDatabaseInstance_backupUpdate(t *testing.T) { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "db_version": "POSTGRES_11", "original_db_name": acctest.BootstrapSharedSQLInstanceBackupRun(t), } @@ -1475,53 +1278,6 @@ func TestAccSqlDatabaseInstance_backupUpdate(t *testing.T) { }) } -func TestAccSqlDatabaseInstance_BackupDRUpdate(t *testing.T) { - t.Parallel() - - // Bootstrap the BackupDR vault - backupVaultID := "bv-test" - location := "us-central1" - project := envvar.GetTestProjectFromEnv() - backupvault := acctest.BootstrapBackupDRVault(t, backupVaultID, location) - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - "project": project, - "backup_vault_id": backupVaultID, - "backup_vault": backupvault, - "db_version": "MYSQL_8_0_41", - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccSqlDatabaseInstanceDestroyProducer(t), - ExternalProviders: map[string]resource.ExternalProvider{ - "time": {}, - }, - Steps: []resource.TestStep{ - { - Config: testAccSqlDatabaseInstance_beforeBackup(context), - }, - { - ResourceName: "google_sql_database_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - { - Config: testAccSqlDatabaseInstance_updateFromBackupDR(context), - }, - { - ResourceName: "google_sql_database_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "backupdr_backup"}, - }, - }, - }) -} - func TestAccSqlDatabaseInstance_basicClone(t *testing.T) { // Sqladmin client acctest.SkipIfVcr(t) @@ -2905,60 +2661,6 @@ func TestAccSqlDatabaseInstance_SwitchoverSuccess(t *testing.T) { }) } -func TestAccSqlDatabaseInstance_MysqlEplusWithPrivateNetwork(t *testing.T) { - t.Parallel() - - instanceName := "tf-test-" + acctest.RandString(t, 10) - networkName := acctest.BootstrapSharedServiceNetworkingConnection(t, "endpoint") - projectId := envvar.GetTestProjectFromEnv() - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccSqlDatabaseInstanceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testGoogleSqlDatabaseInstanceConfig_eplusOnPrivateNetwork(projectId, networkName, instanceName, "MYSQL_8_0"), - Check: resource.ComposeTestCheckFunc(verifyCreateOperationOnEplusWithPrivateNetwork("google_sql_database_instance.instance")), - }, - { - ResourceName: "google_sql_database_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateIdPrefix: fmt.Sprintf("%s/", projectId), - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - }, - }) -} - -func TestAccSqlDatabaseInstance_PostgresEplusWithPrivateNetwork(t *testing.T) { - t.Parallel() - - instanceName := "tf-test-" + acctest.RandString(t, 10) - networkName := acctest.BootstrapSharedServiceNetworkingConnection(t, "endpoint") - projectId := envvar.GetTestProjectFromEnv() - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccSqlDatabaseInstanceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testGoogleSqlDatabaseInstanceConfig_eplusOnPrivateNetwork(projectId, networkName, instanceName, "POSTGRES_12"), - Check: resource.ComposeTestCheckFunc(verifyCreateOperationOnEplusWithPrivateNetwork("google_sql_database_instance.instance")), - }, - { - ResourceName: "google_sql_database_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateIdPrefix: fmt.Sprintf("%s/", projectId), - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - }, - }) -} - // Switchover for MySQL. func TestAccSqlDatabaseInstance_MysqlSwitchoverSuccess(t *testing.T) { t.Parallel() @@ -3115,238 +2817,55 @@ func TestAccSqlDatabaseInstance_PostgresSwitchoverSuccess(t *testing.T) { }) } -// Read pool for Postgres. Scale out (change node count) -func TestAccSqlDatabaseInstance_PostgresReadPoolScaleOutSuccess(t *testing.T) { +func TestAccSqlDatabaseInstance_updateSslOptionsForPostgreSQL(t *testing.T) { t.Parallel() - primaryName := "tf-test-pg-readpool-primary-" + acctest.RandString(t, 10) - readPoolName := "tf-test-pg-readpool-" + acctest.RandString(t, 10) - project := envvar.GetTestProjectFromEnv() + + databaseName := "tf-test-" + acctest.RandString(t, 10) + databaseVersion := "POSTGRES_14" + resourceName := "google_sql_database_instance.instance" + acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccSqlDatabaseInstanceDestroyProducer(t), + + // We don't do ImportStateVerify for the ssl_mode because of the implementation. The ssl_mode is expected to be discarded if the local state doesn't have it. Steps: []resource.TestStep{ { - Config: testGoogleSqlDatabaseInstanceConfig_eplusWithReadPool(project, primaryName, ReadPoolConfig{ - DatabaseType: "POSTGRES_15", - ReplicaName: readPoolName, - NodeCount: 1, - }), + Config: testGoogleSqlDatabaseInstance_setSslOptionsForPostgreSQL(databaseName, databaseVersion, "ALLOW_UNENCRYPTED_AND_ENCRYPTED"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "settings.0.ip_configuration.0.ssl_mode", "ALLOW_UNENCRYPTED_AND_ENCRYPTED"), + ), }, { - ResourceName: "google_sql_database_instance.original-primary", + ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "settings.0.ip_configuration.0.ssl_mode"}, + }, + { + Config: testGoogleSqlDatabaseInstance_setSslOptionsForPostgreSQL(databaseName, databaseVersion, "ENCRYPTED_ONLY"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "settings.0.ip_configuration.0.ssl_mode", "ENCRYPTED_ONLY"), + ), }, { - ResourceName: "google_sql_database_instance.original-read-pool", + ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "settings.0.ip_configuration.0.ssl_mode"}, }, { - Config: testGoogleSqlDatabaseInstanceConfig_eplusWithReadPool(project, primaryName, ReadPoolConfig{ - DatabaseType: "POSTGRES_15", - ReplicaName: readPoolName, - NodeCount: 2, - }), + Config: testGoogleSqlDatabaseInstance_setSslOptionsForPostgreSQL(databaseName, databaseVersion, "TRUSTED_CLIENT_CERTIFICATE_REQUIRED"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "settings.0.ip_configuration.0.ssl_mode", "TRUSTED_CLIENT_CERTIFICATE_REQUIRED"), + ), }, { - ResourceName: "google_sql_database_instance.original-primary", + ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - { - ResourceName: "google_sql_database_instance.original-read-pool", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - }, - }) -} - -// Read pool for Postgres. Scale up (change machine type) -func TestAccSqlDatabaseInstance_PostgresReadPoolScaleUpSuccess(t *testing.T) { - t.Parallel() - primaryName := "tf-test-pg-readpool-mtc-primary-" + acctest.RandString(t, 10) - readPoolName := "tf-test-pg-readpool-mtc-" + acctest.RandString(t, 10) - project := envvar.GetTestProjectFromEnv() - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccSqlDatabaseInstanceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testGoogleSqlDatabaseInstanceConfig_eplusWithReadPool(project, primaryName, ReadPoolConfig{ - DatabaseType: "POSTGRES_15", - ReplicaName: readPoolName, - NodeCount: 1, - ReplicaMachineType: "db-perf-optimized-N-2", - }), - }, - { - ResourceName: "google_sql_database_instance.original-primary", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - { - ResourceName: "google_sql_database_instance.original-read-pool", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - { - Config: testGoogleSqlDatabaseInstanceConfig_eplusWithReadPool(project, primaryName, ReadPoolConfig{ - DatabaseType: "POSTGRES_15", - ReplicaName: readPoolName, - NodeCount: 1, - ReplicaMachineType: "db-perf-optimized-N-4", - }), - }, - { - ResourceName: "google_sql_database_instance.original-primary", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - { - ResourceName: "google_sql_database_instance.original-read-pool", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - }, - }) -} - -// Read pool for MySQL. Enable and disable read pool -func TestAccSqlDatabaseInstance_MysqlReadPoolEnableDisableSuccess(t *testing.T) { - t.Parallel() - primaryName := "tf-test-mysql-readpool-primary-" + acctest.RandString(t, 10) - readPoolName := "tf-test-mysql-readpool-" + acctest.RandString(t, 10) - project := envvar.GetTestProjectFromEnv() - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccSqlDatabaseInstanceDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testGoogleSqlDatabaseInstanceConfig_eplusWithReadPool(project, primaryName, ReadPoolConfig{ - DatabaseType: "MYSQL_8_0", - ReplicaName: readPoolName, - InstanceType: "READ_REPLICA_INSTANCE", - }), - }, - { - ResourceName: "google_sql_database_instance.original-primary", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - { - ResourceName: "google_sql_database_instance.original-read-pool", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - // Enable read pool - { - Config: testGoogleSqlDatabaseInstanceConfig_eplusWithReadPool(project, primaryName, ReadPoolConfig{ - DatabaseType: "MYSQL_8_0", - ReplicaName: readPoolName, - InstanceType: "READ_POOL_INSTANCE", - NodeCount: 1, - }), - }, - { - ResourceName: "google_sql_database_instance.original-primary", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - { - ResourceName: "google_sql_database_instance.original-read-pool", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - // Disable read pool - { - Config: testGoogleSqlDatabaseInstanceConfig_eplusWithReadPool(project, primaryName, ReadPoolConfig{ - DatabaseType: "MYSQL_8_0", - ReplicaName: readPoolName, - InstanceType: "READ_REPLICA_INSTANCE", - }), - }, - { - ResourceName: "google_sql_database_instance.original-primary", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - { - ResourceName: "google_sql_database_instance.original-read-pool", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection"}, - }, - }, - }) -} - -func TestAccSqlDatabaseInstance_updateSslOptionsForPostgreSQL(t *testing.T) { - t.Parallel() - - databaseName := "tf-test-" + acctest.RandString(t, 10) - databaseVersion := "POSTGRES_14" - resourceName := "google_sql_database_instance.instance" - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccSqlDatabaseInstanceDestroyProducer(t), - - // We don't do ImportStateVerify for the ssl_mode because of the implementation. The ssl_mode is expected to be discarded if the local state doesn't have it. - Steps: []resource.TestStep{ - { - Config: testGoogleSqlDatabaseInstance_setSslOptionsForPostgreSQL(databaseName, databaseVersion, "ALLOW_UNENCRYPTED_AND_ENCRYPTED"), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "settings.0.ip_configuration.0.ssl_mode", "ALLOW_UNENCRYPTED_AND_ENCRYPTED"), - ), - }, - { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "settings.0.ip_configuration.0.ssl_mode"}, - }, - { - Config: testGoogleSqlDatabaseInstance_setSslOptionsForPostgreSQL(databaseName, databaseVersion, "ENCRYPTED_ONLY"), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "settings.0.ip_configuration.0.ssl_mode", "ENCRYPTED_ONLY"), - ), - }, - { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "settings.0.ip_configuration.0.ssl_mode"}, - }, - { - Config: testGoogleSqlDatabaseInstance_setSslOptionsForPostgreSQL(databaseName, databaseVersion, "TRUSTED_CLIENT_CERTIFICATE_REQUIRED"), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "settings.0.ip_configuration.0.ssl_mode", "TRUSTED_CLIENT_CERTIFICATE_REQUIRED"), - ), - }, - { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"deletion_protection", "settings.0.ip_configuration.0.ssl_mode"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "settings.0.ip_configuration.0.ssl_mode"}, }, { Config: testGoogleSqlDatabaseInstance_setSslOptionsForPostgreSQL(databaseName, databaseVersion, "ALLOW_UNENCRYPTED_AND_ENCRYPTED"), @@ -4440,35 +3959,6 @@ resource "google_sql_database_instance" "original-replica" { `, replicaName) } -func testGoogleSqlDatabaseInstanceConfig_eplusOnPrivateNetwork(project, networkName, instanceName, databaseVersion string) string { - return fmt.Sprintf(` -data "google_compute_network" "servicenet" { - name = "%s" -} - -resource "google_sql_database_instance" "instance" { - project = "%s" - name = "%s" - region = "us-east1" - database_version = "%s" - instance_type = "CLOUD_SQL_INSTANCE" - deletion_protection = false - - settings { - tier = "db-perf-optimized-N-2" - edition = "ENTERPRISE_PLUS" - ip_configuration { - ipv4_enabled = "false" - private_network = data.google_compute_network.servicenet.self_link - } - backup_configuration { - enabled = true - } - } -} -`, networkName, project, instanceName, databaseVersion) -} - func testGoogleSqlDatabaseInstanceConfig_mysqlEplusWithReplica(project, primaryName, replicaName string) string { return fmt.Sprintf(` resource "google_sql_database_instance" "original-primary" { @@ -5026,81 +4516,6 @@ resource "google_sql_database_instance" "original-replica" { `, project, replicaName) } -type ReadPoolConfig struct { - DatabaseType string - ReplicaName string - // InstanceType specifies the instance type of the replica, - // defaulting to READ_POOL_INSTANCE. - // - // Despite the naming of this struct, you can also set it to - // READ_REPLICA_INSTANCE to create an ordinary read replica in order - // to test enable/disable pool scenarios. - InstanceType string - NodeCount int64 - // ReplicaMachineType gives the machine type of the read pool nodes - // or read replica. It defaults to db-perf-optimized-N-2. - ReplicaMachineType string -} - -func testGoogleSqlDatabaseInstanceConfig_eplusWithReadPool(project, primaryName string, rpconfig ReadPoolConfig) string { - nodeCountStr := "" - if rpconfig.NodeCount > 0 { - nodeCountStr = fmt.Sprintf(` node_count = %d -`, rpconfig.NodeCount) - } - - if rpconfig.InstanceType == "" { - rpconfig.InstanceType = "READ_POOL_INSTANCE" - } - - if rpconfig.ReplicaMachineType == "" { - rpconfig.ReplicaMachineType = "db-perf-optimized-N-2" - } - - primaryTxnLogs := "" - if strings.HasPrefix(rpconfig.DatabaseType, "MYSQL") { - primaryTxnLogs = "binary_log_enabled = true\n" - } else if strings.HasPrefix(rpconfig.DatabaseType, "POSTGRES") { - primaryTxnLogs = "point_in_time_recovery_enabled = true\n" - } - - return fmt.Sprintf(` -resource "google_sql_database_instance" "original-primary" { - project = "%s" - name = "%s" - region = "us-east1" - database_version = "%s" - instance_type = "CLOUD_SQL_INSTANCE" - deletion_protection = false - - settings { - tier = "db-perf-optimized-N-2" - edition = "ENTERPRISE_PLUS" - backup_configuration { - enabled = true -%s - } - } -} - -resource "google_sql_database_instance" "original-read-pool" { - project = "%s" - name = "%s" - region = "us-east1" - database_version = "%s" - instance_type = "%s" -%s - master_instance_name = google_sql_database_instance.original-primary.name - deletion_protection = false - - settings { - tier = "%s" - edition = "ENTERPRISE_PLUS" - } -} -`, project, primaryName, rpconfig.DatabaseType, primaryTxnLogs, project, rpconfig.ReplicaName, rpconfig.DatabaseType, rpconfig.InstanceType, nodeCountStr, rpconfig.ReplicaMachineType) -} - func testAccSqlDatabaseInstance_basicInstanceForPsc(instanceName string, projectId string, orgId string, billingAccount string) string { return fmt.Sprintf(` resource "google_project" "testproject" { @@ -5303,29 +4718,6 @@ func verifyPscOperation(resourceName string, isPscConfigExpected bool, expectedP } } -func verifyCreateOperationOnEplusWithPrivateNetwork(resourceName string) func(*terraform.State) error { - return func(s *terraform.State) error { - resource, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Can't find %s in state", resourceName) - } - - resourceAttributes := resource.Primary.Attributes - _, ok = resourceAttributes["replication_cluster.#"] - if !ok { - return fmt.Errorf("replication_cluster.# block is not present in state for %s", resourceName) - } - - _, ok = resourceAttributes["replication_cluster.0.psa_write_endpoint"] - if !ok { - return fmt.Errorf("replication_cluster.psa_write_endpoint is not present in state for %s", resourceName) - } - - return nil - } -} - - func verifyPscAutoConnectionsOperation(resourceName string, isPscConfigExpected bool, expectedPscEnabled bool, isPscAutoConnectionConfigExpected bool, expectedConsumerNetwork string, expectedConsumerProject string) func(*terraform.State) error { return func(s *terraform.State) error { resource, ok := s.RootModule().Resources[resourceName] @@ -5373,84 +4765,6 @@ func verifyPscAutoConnectionsOperation(resourceName string, isPscConfigExpected } } -func verifyPscNetorkAttachmentOperation(resourceName string, isPscConfigExpected bool, expectedPscEnabled bool, expectedNetworkAttachmentUri string ) func(*terraform.State) error { - return func(s *terraform.State) error { - resource, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Can't find %s in state", resourceName) - } - - resourceAttributes := resource.Primary.Attributes - _, ok = resourceAttributes["settings.0.ip_configuration.#"] - if !ok { - return fmt.Errorf("settings.0.ip_configuration.# block is not present in state for %s", resourceName) - } - - if isPscConfigExpected { - _, ok := resourceAttributes["settings.0.ip_configuration.0.psc_config.#"] - if !ok { - return fmt.Errorf("settings.0.ip_configuration.0.psc_config property is not present or set in state of %s", resourceName) - } - - pscEnabledStr, ok := resourceAttributes["settings.0.ip_configuration.0.psc_config.0.psc_enabled"] - pscEnabled, err := strconv.ParseBool(pscEnabledStr) - if err != nil || pscEnabled != expectedPscEnabled { - return fmt.Errorf("settings.0.ip_configuration.0.psc_config.0.psc_enabled property value is not set as expected in state of %s, expected %v, actual %v", resourceName, expectedPscEnabled, pscEnabled) - } - - networkAttachmentUriStr, ok := resourceAttributes["settings.0.ip_configuration.0.psc_config.0.network_attachment_uri"] - if !ok { - return fmt.Errorf("settings.0.ip_configuration.0.psc_config.0.network_attachment_uri block is not present in state for %s", resourceName) - } - - if networkAttachmentUriStr != expectedNetworkAttachmentUri && len(networkAttachmentUriStr) == 0 { - return fmt.Errorf("settings.0.ip_configuration.0.psc_config.0.network_attachment_uri block is not set in state for %s", resourceName) - } - - if networkAttachmentUriStr != expectedNetworkAttachmentUri { - return fmt.Errorf("settings.0.ip_configuration.0.psc_config.0.network_attachment_uri block does not match the expected value for %s", resourceName) - } - } - - return nil - } -} - -func testAccSqlDatabaseInstance_withoutMCPEnabled(instanceName string) string { - return fmt.Sprintf(` -resource "google_sql_database_instance" "instance" { - name = "%s" - region = "us-central1" - database_version = "POSTGRES_16" - deletion_protection = false - settings { - tier = "db-perf-optimized-N-2" - } -} -`, instanceName) -} - -func testAccSqlDatabaseInstance_withMCPEnabled(instanceName string) string { - return fmt.Sprintf(` -resource "google_sql_database_instance" "instance" { - name = "%s" - region = "us-central1" - database_version = "POSTGRES_16" - deletion_protection = false - settings { - tier = "db-perf-optimized-N-2" - connection_pool_config { - connection_pooling_enabled = true - flags { - name = "max_client_connections" - value = "1980" - } - } - } -} -`, instanceName) -} - func testAccSqlDatabaseInstance_withPSCEnabled_withoutPscAutoConnections(instanceName string) string { return fmt.Sprintf(` resource "google_sql_database_instance" "instance" { @@ -5476,32 +4790,6 @@ resource "google_sql_database_instance" "instance" { `, instanceName) } -func testAccSqlDatabaseInstance_withPSCEnabled_withoutPscOutbound(instanceName string) string { - return fmt.Sprintf(` -resource "google_sql_database_instance" "instance" { - name = "%s" - region = "us-central1" - database_version = "MYSQL_8_0" - deletion_protection = false - settings { - tier = "db-g1-small" - ip_configuration { - psc_config { - psc_enabled = true - network_attachment_uri = "" - } - ipv4_enabled = false - } - backup_configuration { - enabled = true - binary_log_enabled = true - } - availability_type = "REGIONAL" - } -} -`, instanceName) -} - func testAccSqlDatabaseInstance_withPSCEnabled_withPscAutoConnections(instanceName string, projectId string, networkName string) string { return fmt.Sprintf(` data "google_compute_network" "testnetwork" { @@ -5535,32 +4823,6 @@ resource "google_sql_database_instance" "instance" { `, networkName, instanceName, projectId, networkName, projectId) } -func testAccSqlDatabaseInstance_withPSCEnabled_withNetworkAttachmentUri(instanceName string, networkAttachmentUri string) string { - return fmt.Sprintf(` - -resource "google_sql_database_instance" "instance" { - name = "%s" - region = "us-central1" - database_version = "MYSQL_8_0" - deletion_protection = false - settings { - tier = "db-g1-small" - ip_configuration { - psc_config { - psc_enabled = true - network_attachment_uri = "%s" - } - ipv4_enabled = false - } - backup_configuration { - enabled = true - binary_log_enabled = true - } - availability_type = "REGIONAL" - } -}`, instanceName, networkAttachmentUri) -} - func testAccSqlDatabaseInstance_withPrivateNetwork_withoutAllocatedIpRange(databaseName, networkName string, specifyPrivatePathOption bool, enablePrivatePath bool) string { privatePathOption := "" if specifyPrivatePathOption { @@ -6436,7 +5698,7 @@ func testAccSqlDatabaseInstance_beforeBackup(context map[string]interface{}) str return acctest.Nprintf(` resource "google_sql_database_instance" "instance" { name = "tf-test-%{random_suffix}" - database_version = "%{db_version}" + database_version = "POSTGRES_11" region = "us-central1" settings { @@ -6485,203 +5747,6 @@ data "google_sql_backup_run" "backup" { `, context) } -func testAccSqlDatabaseInstance_createFromBackupDR(context map[string]interface{}) string { - return acctest.Nprintf(` -// Create service account -resource "google_service_account" "bkdr_sa" { - account_id = "tf-test-bkdr-sa-%{random_suffix}" - display_name = "Backup DR Service Account" -} - -// Create a backup plan -resource "google_backup_dr_backup_plan" "plan" { - location = "us-central1" - backup_plan_id = "tf-test-bp-test-%{random_suffix}" - resource_type = "sqladmin.googleapis.com/Instance" - backup_vault = "%{backup_vault}" - - backup_rules { - rule_id = "rule-1" - backup_retention_days = 7 - - standard_schedule { - recurrence_type = "DAILY" - hourly_frequency = 6 - time_zone = "UTC" - - backup_window { - start_hour_of_day = 0 - end_hour_of_day = 23 - } - } - } -} - -// Create source SQL instance to backup -resource "google_sql_database_instance" "source" { - name = "tf-test-source-%{random_suffix}" - database_version = "MYSQL_8_0_41" - region = "us-central1" - project = "%{project}" - settings { - tier = "db-f1-micro" - backup_configuration { - enabled = true - } - } - lifecycle { - ignore_changes = [ - settings[0].backup_configuration[0].enabled, - ] - } - deletion_protection = false -} - -// Associate backup plan with SQL instance -resource "google_backup_dr_backup_plan_association" "association" { - location = "us-central1" - backup_plan_association_id = "tf-test-bpa-test-%{random_suffix}" - resource = "projects/${google_sql_database_instance.source.project}/instances/${google_sql_database_instance.source.name}" - resource_type = "sqladmin.googleapis.com/Instance" - backup_plan = google_backup_dr_backup_plan.plan.name -} - -// Wait for the first backup to be created -resource "time_sleep" "wait_10_mins" { - depends_on = [google_backup_dr_backup_plan_association.association] - - create_duration = "600s" -} - -data "google_backup_dr_backup" "sql_backups" { - project = "%{project}" - location = "us-central1" - backup_vault_id = "%{backup_vault_id}" - data_source_id = element(split("/", google_backup_dr_backup_plan_association.association.data_source), length(split("/", google_backup_dr_backup_plan_association.association.data_source)) - 1) - - depends_on = [time_sleep.wait_10_mins] -} - -resource "google_sql_database_instance" "instance" { - name = "tf-test-%{random_suffix}" - database_version = "MYSQL_8_0_41" - region = "us-central1" - - settings { - tier = "db-g1-small" - backup_configuration { - enabled = true - } - } - - backupdr_backup = data.google_backup_dr_backup.sql_backups.backups[0].name - - deletion_protection = false -} -`, context) -} - -func testAccSqlDatabaseInstance_updateFromBackupDR(context map[string]interface{}) string { - return acctest.Nprintf(` -// Create service account -resource "google_service_account" "bkdr_sa" { - account_id = "tf-test-bkdr-sa-%{random_suffix}" - display_name = "Backup DR Service Account" -} - -// Create a backup plan -resource "google_backup_dr_backup_plan" "plan" { - location = "us-central1" - backup_plan_id = "tf-test-bp-test-%{random_suffix}" - resource_type = "sqladmin.googleapis.com/Instance" - backup_vault = "%{backup_vault}" - - backup_rules { - rule_id = "rule-1" - backup_retention_days = 7 - - standard_schedule { - recurrence_type = "DAILY" - hourly_frequency = 6 - time_zone = "UTC" - - backup_window { - start_hour_of_day = 0 - end_hour_of_day = 23 - } - } - } -} - -// Create source SQL instance to backup -resource "google_sql_database_instance" "source" { - name = "tf-test-source-%{random_suffix}" - database_version = "MYSQL_8_0_41" - region = "us-central1" - project = "%{project}" - settings { - tier = "db-f1-micro" - backup_configuration { - enabled = true - } - } - lifecycle { - ignore_changes = [ - settings[0].backup_configuration[0].enabled, - ] - } - deletion_protection = false -} - -// Associate backup plan with SQL instance -resource "google_backup_dr_backup_plan_association" "association" { - location = "us-central1" - backup_plan_association_id = "tf-test-bpa-test-%{random_suffix}" - resource = "projects/${google_sql_database_instance.source.project}/instances/${google_sql_database_instance.source.name}" - resource_type = "sqladmin.googleapis.com/Instance" - backup_plan = google_backup_dr_backup_plan.plan.name -} - -// Wait for the first backup to be created -resource "time_sleep" "wait_10_mins" { - depends_on = [google_backup_dr_backup_plan_association.association] - - create_duration = "600s" -} - -data "google_backup_dr_backup" "sql_backups" { - project = "%{project}" - location = "us-central1" - backup_vault_id = "%{backup_vault_id}" - data_source_id = element(split("/", google_backup_dr_backup_plan_association.association.data_source), length(split("/", google_backup_dr_backup_plan_association.association.data_source)) - 1) - - depends_on = [time_sleep.wait_10_mins] -} - -resource "google_sql_database_instance" "instance" { - name = "tf-test-%{random_suffix}" - database_version = "MYSQL_8_0_41" - region = "us-central1" - - settings { - tier = "db-g1-small" - backup_configuration { - enabled = "false" - } - } - lifecycle { - ignore_changes = [ - settings[0].backup_configuration[0].enabled, - ] - } - - backupdr_backup = data.google_backup_dr_backup.sql_backups.backups[0].name - - deletion_protection = false -} -`, context) -} - func testAccSqlDatabaseInstance_basicClone(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_sql_database_instance" "instance" { diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_user.go b/mmv1/third_party/terraform/services/sql/resource_sql_user.go index 7273955d0a40..ccac2a5f082d 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_user.go +++ b/mmv1/third_party/terraform/services/sql/resource_sql_user.go @@ -62,6 +62,10 @@ func ResourceSqlUser() *schema.Resource { tpgresource.DefaultProviderProject, ), + ValidateRawResourceConfigFuncs: []schema.ValidateRawResourceConfigFunc{ + validation.PreferWriteOnlyAttribute(cty.GetAttrPath("password"), cty.GetAttrPath("password_wo")), + }, + SchemaVersion: 1, MigrateState: resourceSqlUserMigrateState, @@ -103,7 +107,6 @@ func ResourceSqlUser() *schema.Resource { Optional: true, WriteOnly: true, ConflictsWith: []string{"password"}, - RequiredWith: []string{"password_wo_version"}, Description: `The password for the user. Can be updated. For Postgres instances this is a Required field, unless type is set to either CLOUD_IAM_USER or CLOUD_IAM_SERVICE_ACCOUNT.`, }, diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_user_test.go b/mmv1/third_party/terraform/services/sql/resource_sql_user_test.go index c4e5cc4404b7..b392d8ffd6b7 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_user_test.go +++ b/mmv1/third_party/terraform/services/sql/resource_sql_user_test.go @@ -439,7 +439,6 @@ resource "google_sql_user" "user1" { instance = google_sql_database_instance.instance.name host = "gmail.com" password_wo = "%s" - password_wo_version = 1 } `, instance, password) } @@ -461,7 +460,7 @@ resource "google_sql_user" "user1" { instance = google_sql_database_instance.instance.name host = "gmail.com" password_wo = "%s" - password_wo_version = 2 + password_wo_version = 1 } `, instance, password) } diff --git a/mmv1/third_party/terraform/services/storage/data_source_storage_bucket_object_content.go b/mmv1/third_party/terraform/services/storage/data_source_storage_bucket_object_content.go index 6662217a3e8a..0596fcb3637b 100644 --- a/mmv1/third_party/terraform/services/storage/data_source_storage_bucket_object_content.go +++ b/mmv1/third_party/terraform/services/storage/data_source_storage_bucket_object_content.go @@ -1,9 +1,7 @@ package storage import ( - "crypto/sha512" "encoding/base64" - "encoding/hex" "fmt" "io/ioutil" "net/http" @@ -32,22 +30,6 @@ func DataSourceGoogleStorageBucketObjectContent() *schema.Resource { Required: false, } - dsSchema["content_hexsha512"] = &schema.Schema{ - Type: schema.TypeString, - Description: "Hex encoded SHA512 checksum of object content.", - Computed: true, - Optional: false, - Required: false, - } - - dsSchema["content_base64sha512"] = &schema.Schema{ - Type: schema.TypeString, - Description: "Base64 encoded SHA512 checksum of object content.", - Computed: true, - Optional: false, - Required: false, - } - return &schema.Resource{ Read: dataSourceGoogleStorageBucketObjectContentRead, Schema: dsSchema, @@ -91,15 +73,6 @@ func dataSourceGoogleStorageBucketObjectContentRead(d *schema.ResourceData, meta return fmt.Errorf("Error setting content_base64: %s", err) } - sha512Sum := sha512.Sum512(objectBytes) - if err := d.Set("content_hexsha512", hex.EncodeToString(sha512Sum[:])); err != nil { - return fmt.Errorf("Error setting content_hexsha512: %s", err) - } - - if err := d.Set("content_base64sha512", base64.StdEncoding.EncodeToString(sha512Sum[:])); err != nil { - return fmt.Errorf("Error setting content_base64sha512: %s", err) - } - d.SetId(bucket + "-" + name) return nil } diff --git a/mmv1/third_party/terraform/services/storage/data_source_storage_bucket_object_content_test.go b/mmv1/third_party/terraform/services/storage/data_source_storage_bucket_object_content_test.go index 07dd90d84dad..e6774cf5fcbf 100644 --- a/mmv1/third_party/terraform/services/storage/data_source_storage_bucket_object_content_test.go +++ b/mmv1/third_party/terraform/services/storage/data_source_storage_bucket_object_content_test.go @@ -68,8 +68,6 @@ func TestAccDataSourceStorageBucketObjectContent_FileContentBase64(t *testing.T) Config: testAccDataSourceStorageBucketObjectContent_FileContentBase64(bucket, folderName), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttrSet("data.google_storage_bucket_object_content.this", "content_base64"), - resource.TestCheckResourceAttrSet("data.google_storage_bucket_object_content.this", "content_hexsha512"), - resource.TestCheckResourceAttrSet("data.google_storage_bucket_object_content.this", "content_base64sha512"), verifyValidZip(), ), }, diff --git a/mmv1/third_party/terraform/services/storage/fw_resource_storage_notification.go b/mmv1/third_party/terraform/services/storage/fw_resource_storage_notification.go deleted file mode 100644 index e8f5fe15be0f..000000000000 --- a/mmv1/third_party/terraform/services/storage/fw_resource_storage_notification.go +++ /dev/null @@ -1,325 +0,0 @@ -package storage - -import ( - "context" - "fmt" - "strings" - - "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" - "github.com/hashicorp/terraform-plugin-framework/diag" - "github.com/hashicorp/terraform-plugin-framework/path" - "github.com/hashicorp/terraform-plugin-framework/resource" - "github.com/hashicorp/terraform-plugin-framework/resource/schema" - "github.com/hashicorp/terraform-plugin-framework/resource/schema/mapplanmodifier" - "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" - "github.com/hashicorp/terraform-plugin-framework/resource/schema/setplanmodifier" - "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" - "github.com/hashicorp/terraform-plugin-framework/schema/validator" - "github.com/hashicorp/terraform-plugin-framework/types" - "github.com/hashicorp/terraform-plugin-log/tflog" - "google.golang.org/api/googleapi" - "google.golang.org/api/storage/v1" - - "github.com/hashicorp/terraform-provider-google/google/fwmodels" - "github.com/hashicorp/terraform-provider-google/google/fwresource" - "github.com/hashicorp/terraform-provider-google/google/fwtransport" - "github.com/hashicorp/terraform-provider-google/google/fwvalidators" - "github.com/hashicorp/terraform-provider-google/google/services/pubsub" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -var ( - _ resource.Resource = &storageNotificationResource{} - _ resource.ResourceWithConfigure = &storageNotificationResource{} - _ resource.ResourceWithImportState = &storageNotificationResource{} - _ resource.ResourceWithUpgradeState = &storageNotificationResource{} -) - -func NewStorageNotificationResource() resource.Resource { - return &storageNotificationResource{} -} - -type storageNotificationResource struct { - config *transport_tpg.Config -} - -type storageNotificationModel struct { - Bucket types.String `tfsdk:"bucket"` - PayloadFormat types.String `tfsdk:"payload_format"` - Topic types.String `tfsdk:"topic"` - CustomAttributes types.Map `tfsdk:"custom_attributes"` - EventTypes types.Set `tfsdk:"event_types"` - ObjectNamePrefix types.String `tfsdk:"object_name_prefix"` - NotificationID types.String `tfsdk:"notification_id"` - SelfLink types.String `tfsdk:"self_link"` - Id types.String `tfsdk:"id"` -} - -func (r *storageNotificationResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { - resp.TypeName = req.ProviderTypeName + "_storage_notification" -} - -func (r *storageNotificationResource) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { - if req.ProviderData == nil { - return - } - - config, ok := req.ProviderData.(*transport_tpg.Config) - if !ok { - resp.Diagnostics.AddError( - "Unexpected Resource Configure Type", - fmt.Sprintf("Expected *transport_tpg.Config, got: %T. Please report this issue to the provider developers.", req.ProviderData), - ) - return - } - r.config = config -} - -func (r *storageNotificationResource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { - resp.Schema = schema.Schema{ - Description: "Creates a new notification configuration on a specified bucket, establishing a flow of event notifications from GCS to a Cloud Pub/Sub topic.", - Version: 1, - Attributes: map[string]schema.Attribute{ - "bucket": schema.StringAttribute{ - Required: true, - Description: "The name of the bucket.", - PlanModifiers: []planmodifier.String{ - stringplanmodifier.RequiresReplace(), - }, - }, - "payload_format": schema.StringAttribute{ - Required: true, - Description: `The desired content of the Payload. One of "JSON_API_V1" or "NONE".`, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.RequiresReplace(), - }, - Validators: []validator.String{ - stringvalidator.OneOf("JSON_API_V1", "NONE"), - }, - }, - "topic": schema.StringAttribute{ - Required: true, - Description: "The Cloud Pub/Sub topic to which this subscription publishes.", - PlanModifiers: []planmodifier.String{ - stringplanmodifier.RequiresReplace(), - }, - Validators: []validator.String{ - fwvalidators.NewTopicPrefixValidator(), - }, - }, - "custom_attributes": schema.MapAttribute{ - ElementType: types.StringType, - Optional: true, - Description: "A set of key/value attribute pairs to attach to each Cloud Pub/Sub message published for this notification subscription.", - PlanModifiers: []planmodifier.Map{ - mapplanmodifier.RequiresReplace(), - }, - }, - "event_types": schema.SetAttribute{ - ElementType: types.StringType, - Optional: true, - Description: `List of event type filters for this notification config. If not specified, Cloud Storage will send notifications for all event types. The valid types are: "OBJECT_FINALIZE", "OBJECT_METADATA_UPDATE", "OBJECT_DELETE", "OBJECT_ARCHIVE"`, - PlanModifiers: []planmodifier.Set{ - setplanmodifier.RequiresReplace(), - }, - Validators: []validator.Set{ - fwvalidators.StringValuesInSet( - "OBJECT_FINALIZE", - "OBJECT_METADATA_UPDATE", - "OBJECT_DELETE", - "OBJECT_ARCHIVE", - ), - }, - }, - "object_name_prefix": schema.StringAttribute{ - Optional: true, - Description: "Specifies a prefix path filter for this notification config. Cloud Storage will only send notifications for objects in this bucket whose names begin with the specified prefix.", - PlanModifiers: []planmodifier.String{ - stringplanmodifier.RequiresReplace(), - }, - }, - "notification_id": schema.StringAttribute{ - Computed: true, - Description: "The ID of the created notification.", - }, - "self_link": schema.StringAttribute{ - Computed: true, - Description: "The URI of the created resource.", - }, - "id": schema.StringAttribute{ - Computed: true, - }, - }, - } -} - -func (r *storageNotificationResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { - var plan storageNotificationModel - var metaData *fwmodels.ProviderMetaModel - resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) - resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) - if resp.Diagnostics.HasError() { - return - } - - computedTopicName := pubsub.GetComputedTopicName("", plan.Topic.ValueString()) - - var customAttrs map[string]string - if !plan.CustomAttributes.IsNull() && !plan.CustomAttributes.IsUnknown() { - resp.Diagnostics.Append(plan.CustomAttributes.ElementsAs(ctx, &customAttrs, false)...) - if resp.Diagnostics.HasError() { - return - } - } - - var eventTypes []string - if !plan.EventTypes.IsNull() && !plan.EventTypes.IsUnknown() { - resp.Diagnostics.Append(plan.EventTypes.ElementsAs(ctx, &eventTypes, false)...) - if resp.Diagnostics.HasError() { - return - } - } - - storageNotification := &storage.Notification{ - CustomAttributes: customAttrs, - EventTypes: eventTypes, - ObjectNamePrefix: plan.ObjectNamePrefix.ValueString(), - PayloadFormat: plan.PayloadFormat.ValueString(), - Topic: computedTopicName, - } - - userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, r.config.UserAgent) - bucket := plan.Bucket.ValueString() - - res, err := r.config.NewStorageClient(userAgent).Notifications.Insert(bucket, storageNotification).Do() - if err != nil { - resp.Diagnostics.AddError(fmt.Sprintf("Error creating notification config for bucket %s", bucket), err.Error()) - return - } - - plan.Id = types.StringValue(fmt.Sprintf("%s/notificationConfigs/%s", bucket, res.Id)) - tflog.Info(ctx, "Created Storage Notification", map[string]interface{}{"id": plan.Id.ValueString()}) - - found := r.refresh(ctx, &plan, metaData, &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } - if !found { - resp.Diagnostics.AddError("Newly created resource not found", "The Storage Notification was not found immediately after creation.") - return - } - - resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...) -} - -func (r *storageNotificationResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { - var state storageNotificationModel - var metaData *fwmodels.ProviderMetaModel - resp.Diagnostics.Append(req.State.Get(ctx, &state)...) - resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) - if resp.Diagnostics.HasError() { - return - } - - found := r.refresh(ctx, &state, metaData, &resp.Diagnostics) - if resp.Diagnostics.HasError() { - return - } - - if !found { - tflog.Warn(ctx, "Storage Notification not found, removing from state.", map[string]interface{}{"id": state.Id.ValueString()}) - resp.State.RemoveResource(ctx) - return - } - - resp.Diagnostics.Append(resp.State.Set(ctx, &state)...) -} - -// Update is not supported for this resource. -func (r *storageNotificationResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { - // This resource is immutable and all configurable attributes are marked with `RequiresReplace`. - // This function should not get called. -} - -func (r *storageNotificationResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { - var state storageNotificationModel - var metaData *fwmodels.ProviderMetaModel - resp.Diagnostics.Append(req.State.Get(ctx, &state)...) - resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) - if resp.Diagnostics.HasError() { - return - } - - bucket, notificationID, err := ParseStorageNotificationID(state.Id.ValueString()) - if err != nil { - resp.Diagnostics.AddError("Invalid resource ID", err.Error()) - return - } - - userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, r.config.UserAgent) - - err = r.config.NewStorageClient(userAgent).Notifications.Delete(bucket, notificationID).Do() - if err != nil { - if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == 404 { - // Resource is gone. This is a successful deletion. - return - } - resp.Diagnostics.AddError(fmt.Sprintf("Error deleting notification configuration %s for bucket %s", notificationID, bucket), err.Error()) - return - } -} - -func (r *storageNotificationResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { - resource.ImportStatePassthroughID(ctx, path.Root("id"), req, resp) -} - -func (r *storageNotificationResource) refresh(ctx context.Context, model *storageNotificationModel, metaData *fwmodels.ProviderMetaModel, diags *diag.Diagnostics) bool { - bucket, notificationID, err := ParseStorageNotificationID(model.Id.ValueString()) - if err != nil { - diags.AddError("Invalid resource ID", err.Error()) - return false - } - - userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, r.config.UserAgent) - - res, err := r.config.NewStorageClient(userAgent).Notifications.Get(bucket, notificationID).Do() - if err != nil { - if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == 404 { - return false - } - diags.AddError("Error reading Storage Notification", err.Error()) - return false - } - - model.Bucket = types.StringValue(bucket) - model.NotificationID = types.StringValue(notificationID) - model.SelfLink = types.StringValue(res.SelfLink) - model.PayloadFormat = types.StringValue(res.PayloadFormat) - - configuredObjectNamePrefix := model.ObjectNamePrefix - apiObjectNamePrefix := res.ObjectNamePrefix - model.ObjectNamePrefix = fwresource.FlattenStringEmptyToNull(configuredObjectNamePrefix, apiObjectNamePrefix) - - // trim the fully qualified prefix - apiValue := res.Topic - model.Topic = types.StringValue(strings.TrimPrefix(apiValue, "//pubsub.googleapis.com/")) - - var eventTypesDiags diag.Diagnostics - model.EventTypes, eventTypesDiags = types.SetValueFrom(ctx, types.StringType, res.EventTypes) - diags.Append(eventTypesDiags...) - - var customAttrsDiags diag.Diagnostics - model.CustomAttributes, customAttrsDiags = types.MapValueFrom(ctx, types.StringType, res.CustomAttributes) - diags.Append(customAttrsDiags...) - - return !diags.HasError() -} - -// ParseStorageNotificationID replicates the logic from the SDKv2 helper. -func ParseStorageNotificationID(id string) (bucket string, notificationID string, err error) { - parts := strings.Split(id, "/") - if len(parts) != 3 || parts[1] != "notificationConfigs" { - return "", "", fmt.Errorf("invalid storage notification ID format, expected '{bucket}/notificationConfigs/{notification_id}', got '%s'", id) - } - return parts[0], parts[2], nil -} diff --git a/mmv1/third_party/terraform/services/storage/fw_storage_notification_state_upgraders.go b/mmv1/third_party/terraform/services/storage/fw_storage_notification_state_upgraders.go deleted file mode 100644 index d492237f7116..000000000000 --- a/mmv1/third_party/terraform/services/storage/fw_storage_notification_state_upgraders.go +++ /dev/null @@ -1,100 +0,0 @@ -package storage - -import ( - "context" - "strings" - - "github.com/hashicorp/terraform-plugin-framework/resource" - "github.com/hashicorp/terraform-plugin-framework/resource/schema" - "github.com/hashicorp/terraform-plugin-framework/types" -) - -// Represents the schema of the SDKv2 state -type storageNotificationModelV0 struct { - Bucket types.String `tfsdk:"bucket"` - PayloadFormat types.String `tfsdk:"payload_format"` - Topic types.String `tfsdk:"topic"` - CustomAttributes types.Map `tfsdk:"custom_attributes"` - EventTypes types.Set `tfsdk:"event_types"` - ObjectNamePrefix types.String `tfsdk:"object_name_prefix"` - NotificationID types.String `tfsdk:"notification_id"` - SelfLink types.String `tfsdk:"self_link"` - Id types.String `tfsdk:"id"` -} - -func (r *storageNotificationResource) UpgradeState(ctx context.Context) map[int64]resource.StateUpgrader { - return map[int64]resource.StateUpgrader{ - 0: { - PriorSchema: &schema.Schema{ - Attributes: map[string]schema.Attribute{ - "bucket": schema.StringAttribute{ - Required: true, - }, - "payload_format": schema.StringAttribute{ - Required: true, - }, - "topic": schema.StringAttribute{ - Required: true, - }, - "custom_attributes": schema.MapAttribute{ - ElementType: types.StringType, - Optional: true, - }, - "event_types": schema.SetAttribute{ - ElementType: types.StringType, - Optional: true, - }, - "object_name_prefix": schema.StringAttribute{ - Optional: true, - }, - "notification_id": schema.StringAttribute{ - Computed: true, - }, - "self_link": schema.StringAttribute{ - Computed: true, - }, - "id": schema.StringAttribute{ - Computed: true, - }, - }, - }, - StateUpgrader: func(ctx context.Context, req resource.UpgradeStateRequest, resp *resource.UpgradeStateResponse) { - var priorStateData storageNotificationModelV0 - - resp.Diagnostics.Append(req.State.Get(ctx, &priorStateData)...) - if resp.Diagnostics.HasError() { - return - } - - upgradedStateData := storageNotificationModel{ - Bucket: priorStateData.Bucket, - PayloadFormat: priorStateData.PayloadFormat, - CustomAttributes: priorStateData.CustomAttributes, - EventTypes: priorStateData.EventTypes, - ObjectNamePrefix: priorStateData.ObjectNamePrefix, - NotificationID: priorStateData.NotificationID, - SelfLink: priorStateData.SelfLink, - Id: priorStateData.Id, - } - - // topic - trim the fully qualified prefix - if !priorStateData.Topic.IsNull() && !priorStateData.Topic.IsUnknown() { - apiTopic := priorStateData.Topic.ValueString() - transformedTopic := strings.TrimPrefix(apiTopic, "//pubsub.googleapis.com/") - upgradedStateData.Topic = types.StringValue(transformedTopic) - } else { - upgradedStateData.Topic = priorStateData.Topic - } - - // ObjectNamePrefix - normalize "" to Null - if !priorStateData.ObjectNamePrefix.IsNull() && !priorStateData.ObjectNamePrefix.IsUnknown() && priorStateData.ObjectNamePrefix.ValueString() == "" { - upgradedStateData.ObjectNamePrefix = types.StringNull() - } else { - upgradedStateData.ObjectNamePrefix = priorStateData.ObjectNamePrefix - } - - resp.Diagnostics.Append(resp.State.Set(ctx, upgradedStateData)...) - }, - }, - } -} diff --git a/mmv1/third_party/terraform/services/storage/iam_storage_bucket_test.go b/mmv1/third_party/terraform/services/storage/iam_storage_bucket_test.go index 9b72d3e2f99e..26299b8cc6c0 100644 --- a/mmv1/third_party/terraform/services/storage/iam_storage_bucket_test.go +++ b/mmv1/third_party/terraform/services/storage/iam_storage_bucket_test.go @@ -311,7 +311,7 @@ func TestAccStorageBucket_iamPolicyGeneratedWithCondition(t *testing.T) { { Config: testAccStorageBucket_withConditionIamPolicy(context), Check: resource.ComposeAggregateTestCheckFunc( - // TODO - uncomment once https://github.com/GoogleCloudPlatform/magic-modules/pull/6466 merged + // TODO(SarahFrench) - uncomment once https://github.com/GoogleCloudPlatform/magic-modules/pull/6466 merged // resource.TestCheckResourceAttr("data.google_iam_policy.foo", "policy_data", expectedPolicyData), resource.TestCheckResourceAttr("google_storage_bucket_iam_policy.foo", "policy_data", expectedPolicyData), resource.TestCheckResourceAttrWith("data.google_iam_policy.foo", "policy_data", tpgresource.CheckGoogleIamPolicy), @@ -330,16 +330,12 @@ func TestAccStorageBucket_iamPolicyGeneratedWithCondition(t *testing.T) { func TestAccStorageBucketIamPolicy_destroy(t *testing.T) { t.Parallel() - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { - Config: testAccStorageBucketIamPolicy_destroy(context), + Config: testAccStorageBucketIamPolicy_destroy(), }, }, }) @@ -605,14 +601,14 @@ resource "google_storage_bucket_iam_policy" "foo" { `, context) } -func testAccStorageBucketIamPolicy_destroy(context map[string]interface{}) string { - return acctest.Nprintf(` +func testAccStorageBucketIamPolicy_destroy() string { + return fmt.Sprintf(` resource "google_service_account" "accessor" { account_id = "pub-sub-test-service-account" } resource "google_storage_bucket" "test_bucket" { - name = "tf-test-my-bucket%{random_suffix}" + name = "sd-pubsub-test-bucket" location = "US" storage_class = "STANDARD" @@ -662,5 +658,5 @@ resource "google_pubsub_topic_iam_policy" "topic_policy" { topic = google_pubsub_topic.topic.name policy_data = data.google_iam_policy.topic_policy_data.policy_data } -`, context) +`) } diff --git a/mmv1/third_party/terraform/services/storage/iam_storage_managed_folder_test.go b/mmv1/third_party/terraform/services/storage/iam_storage_managed_folder_test.go index 98f1628a9881..befb1adf2177 100644 --- a/mmv1/third_party/terraform/services/storage/iam_storage_managed_folder_test.go +++ b/mmv1/third_party/terraform/services/storage/iam_storage_managed_folder_test.go @@ -309,7 +309,7 @@ func TestAccStorageManagedFolderIamPolicyGenerated_withCondition(t *testing.T) { { Config: testAccStorageManagedFolderIamPolicy_withConditionGenerated(context), Check: resource.ComposeAggregateTestCheckFunc( - // TODO - uncomment once https://github.com/GoogleCloudPlatform/magic-modules/pull/6466 merged + // TODO(SarahFrench) - uncomment once https://github.com/GoogleCloudPlatform/magic-modules/pull/6466 merged // resource.TestCheckResourceAttr("data.google_iam_policy.foo", "policy_data", expectedPolicyData), resource.TestCheckResourceAttr("google_storage_managed_folder_iam_policy.foo", "policy_data", expectedPolicyData), resource.TestCheckResourceAttrWith("data.google_iam_policy.foo", "policy_data", tpgresource.CheckGoogleIamPolicy), diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket.go.tmpl b/mmv1/third_party/terraform/services/storage/resource_storage_bucket.go.tmpl index b7cb4ec0857f..c05147dc01ab 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket.go.tmpl +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket.go.tmpl @@ -6,6 +6,7 @@ import ( "errors" "fmt" "log" + "math" "regexp" "runtime" "strconv" @@ -14,6 +15,7 @@ import ( "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + "github.com/hashicorp/terraform-provider-google/google/verify" "github.com/gammazero/workerpool" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" @@ -45,7 +47,7 @@ func ResourceStorageBucket() *schema.Resource { Read: schema.DefaultTimeout(4 * time.Minute), }, - SchemaVersion: 4, + SchemaVersion: 3, StateUpgraders: []schema.StateUpgrader{ { Type: resourceStorageBucketV0().CoreConfigSchema().ImpliedType(), @@ -62,11 +64,6 @@ func ResourceStorageBucket() *schema.Resource { Upgrade: ResourceStorageBucketStateUpgradeV2, Version: 2, }, - { - Type: resourceStorageBucketV3().CoreConfigSchema().ImpliedType(), - Upgrade: ResourceStorageBucketStateUpgradeV3, - Version: 3, - }, }, Schema: map[string]*schema.Schema{ @@ -75,6 +72,7 @@ func ResourceStorageBucket() *schema.Resource { Required: true, ForceNew: true, Description: `The name of the bucket.`, + ValidateFunc: verify.ValidateGCSName, }, "encryption": { @@ -411,8 +409,9 @@ func ResourceStorageBucket() *schema.Resource { Description: `If set to true, the bucket will be locked and permanently restrict edits to the bucket's retention policy. Caution: Locking a bucket is an irreversible action.`, }, "retention_period": { - Type: schema.TypeString, + Type: schema.TypeInt, Required: true, + ValidateFunc: validation.IntBetween(1, math.MaxInt32), Description: `The period of time, in seconds, that objects in the bucket must be retained and cannot be deleted, overwritten, or archived. The value must be less than 3,155,760,000 seconds.`, }, }, @@ -577,94 +576,6 @@ func ResourceStorageBucket() *schema.Resource { Computed: true, Description: `The time at which the bucket's metadata or IAM policy was last updated, in RFC 3339 format.`, }, - "ip_filter": { - Type: schema.TypeList, - MaxItems: 1, - Optional: true, - Description: `The bucket IP filtering configuration.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "mode": { - Type: schema.TypeString, - Required: true, - Description: `The mode of the IP filter. Valid values are 'Enabled' and 'Disabled'.`, - ValidateFunc: validation.StringInSlice([]string{"Enabled", "Disabled"}, false), - }, - "public_network_source": { - Type: schema.TypeList, - MaxItems: 1, - Optional: true, - Description: `The public network IP address ranges that can access the bucket and its data.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "allowed_ip_cidr_ranges": { - Type: schema.TypeList, - Required: true, - Description: "The list of public IPv4, IPv6 cidr ranges that are allowed to access the bucket.", - Elem: &schema.Schema{ - Type: schema.TypeString, - ValidateFunc: validation.IsCIDR, - }, - }, - }, - }, - }, - "vpc_network_sources": { - Type: schema.TypeList, - Optional: true, - Description: `The list of VPC networks that can access the bucket.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "allowed_ip_cidr_ranges": { - Type: schema.TypeList, - Required: true, - Description: "The list of public or private IPv4 and IPv6 CIDR ranges that can access the bucket.", - Elem: &schema.Schema{ - Type: schema.TypeString, - ValidateFunc: validation.IsCIDR, - }, - }, - "network": { - Type: schema.TypeString, - Required: true, - Description: "Name of the network. Format: projects/{PROJECT_ID}/global/networks/{NETWORK_NAME}", - }, - }, - }, - }, - "allow_cross_org_vpcs" : { - Type: schema.TypeBool, - Optional: true, - Description: `Whether to allow cross-org VPCs in the bucket's IP filter configuration.`, - RequiredWith: []string{"ip_filter.0.vpc_network_sources"}, - }, - "allow_all_service_agent_access" : { - Type: schema.TypeBool, - Optional: true, - Description: `Whether to allow all service agents to access the bucket regardless of the IP filter configuration.`, - }, - }, - }, - DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { - if k == "ip_filter.#" { - o, _ := d.GetChange("ip_filter") - l := o.([]interface{}) - if len(l) == 0 { - return false - } - - if contents, ok := l[0].(map[string]interface{}); !ok { - return false - } else if mode, ok := contents["mode"].(string); ok && mode == "Disabled" { - return true - } - return false - } else if k == "ip_filter.0.mode" { - return old == "Disabled" && new == "" - } - return false - }, - }, }, UseJSONNumber: true, } @@ -722,63 +633,71 @@ func getAnywhereCacheListResult(d *schema.ResourceData, config *transport_tpg.Co } func deleteAnywhereCacheIfAny(d *schema.ResourceData, config *transport_tpg.Config) error { - for { - // Get the list of Anywhere Caches - cacheList, err := getAnywhereCacheListResult(d, config) - if err != nil { - return err - } + // Get the initial list of Anywhere Caches + cacheList, err := getAnywhereCacheListResult(d, config) + if err != nil { + return err + } - // Check if the cache list is empty - if len(cacheList) == 0 { - break + // If no cache exists initially, return early + if len(cacheList) == 0 { + return nil + } + + // Iterate over each object in the resource list + for _, item := range cacheList { + // Ensure the item is a map + obj, ok := item.(map[string]interface{}) + if !ok { + return fmt.Errorf("unexpected type for resource list item: %T", item) } - // Iterate over each object in the resource list - for _, item := range cacheList { - // Ensure the item is a map - obj, ok := item.(map[string]interface{}) - if !ok { - return fmt.Errorf("unexpected type for resource list item: %T", item) - } + // Check the state of the object + state, ok := obj["state"].(string) + if !ok { + continue // If state is not a string, skip this item + } + if !strings.EqualFold(state, "running") && !strings.EqualFold(state, "paused") { + continue + } - // Check the state of the object - state, ok := obj["state"].(string) - if !ok { - continue // If state is not a string, skip this item - } - if !strings.EqualFold(state, "running") && !strings.EqualFold(state, "paused") { - continue - } + // Disable the cache if state is running or paused + anywhereCacheId, ok := obj["anywhereCacheId"].(string) + if !ok { + return fmt.Errorf("missing or invalid anywhereCacheId: %v", obj) + } + anywhereCacheUrl, err := tpgresource.ReplaceVars(d, config, "{{"{{StorageBasePath}}b/{{name}}/anywhereCaches/"}}") + if err != nil { + return err + } + disableUrl := anywhereCacheUrl + fmt.Sprintf("%s/disable", anywhereCacheId) - // Disable the cache if state is running or paused - anywhereCacheId, ok := obj["anywhereCacheId"].(string) - if !ok { - return fmt.Errorf("missing or invalid anywhereCacheId: %v", obj) - } - anywhereCacheUrl, err := tpgresource.ReplaceVars(d, config, "{{"{{StorageBasePath}}b/{{name}}/anywhereCaches/"}}") - if err != nil { - return err - } - disableUrl := anywhereCacheUrl + fmt.Sprintf("%s/disable", anywhereCacheId) - - _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "POST", - Project: config.Project, - RawURL: disableUrl, - UserAgent: config.UserAgent, - }) - if err != nil { - return err - } + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "POST", + Project: config.Project, + RawURL: disableUrl, + UserAgent: config.UserAgent, + }) + if err != nil { + return err } + } + time.Sleep(80 * time.Minute) // It takes around 70 minutes of time for cache to finally delete post it disable time. - // Sleep for 1 minute - time.Sleep(1 * time.Minute) + // Post this time, we check again! + // Get the list of Anywhere Caches after the sleep + cacheList, err = getAnywhereCacheListResult(d, config) + if err != nil { + return err } - return nil + // Check if the cache list is now empty + if len(cacheList) == 0 { + return nil + } + + return fmt.Errorf("Error while deleting the cache: caches still exists post 80mins of their disable time") } func resourceDataplexLabelDiffSuppress(k, old, new string, d *schema.ResourceData) bool { @@ -865,11 +784,7 @@ func resourceStorageBucketCreate(d *schema.ResourceData, meta interface{}) error retentionPolicy := retention_policies[0].(map[string]interface{}) if v, ok := retentionPolicy["retention_period"]; ok { - value, err := strconv.ParseInt(v.(string), 10, 64) - if err != nil { - return err - } - sb.RetentionPolicy.RetentionPeriod = value + sb.RetentionPolicy.RetentionPeriod = int64(v.(int)) } } } @@ -912,10 +827,6 @@ func resourceStorageBucketCreate(d *schema.ResourceData, meta interface{}) error sb.HierarchicalNamespace = expandBucketHierachicalNamespace(v.([]interface{})) } - if v, ok := d.GetOk("ip_filter"); ok { - sb.IpFilter = expandBucketIpFilter(v.([]interface{})) - } - var res *storage.Bucket err = transport_tpg.Retry(transport_tpg.RetryOptions{ @@ -1100,12 +1011,6 @@ func resourceStorageBucketUpdate(d *schema.ResourceData, meta interface{}) error } } - if d.HasChange("ip_filter") { - if v, ok := d.GetOk("ip_filter"); ok { - sb.IpFilter = expandBucketIpFilter(v.([]interface{})) - } - } - res, err := config.NewStorageClient(userAgent).Buckets.Patch(d.Get("name").(string), sb).Do() if err != nil { return err @@ -1187,7 +1092,7 @@ func resourceStorageBucketDelete(d *schema.ResourceData, meta interface{}) error // Get the bucket bucket := d.Get("name").(string) - var listError, deleteObjectError, deleteCacheError error + var listError, deleteObjectError error for deleteObjectError == nil { res, err := config.NewStorageClient(userAgent).Objects.List(bucket).Versions(true).Do() if err != nil { @@ -1247,7 +1152,7 @@ func resourceStorageBucketDelete(d *schema.ResourceData, meta interface{}) error wp.Submit(func() { err = deleteAnywhereCacheIfAny(d, config) if err != nil { - deleteCacheError = fmt.Errorf("error deleting the caches on the bucket %s : %w", bucket, err) + deleteObjectError = fmt.Errorf("error deleting the caches on the bucket %s : %w", bucket, err) } }) @@ -1287,9 +1192,6 @@ func resourceStorageBucketDelete(d *schema.ResourceData, meta interface{}) error if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == 409 && strings.Contains(gerr.Message, "not empty") && deleteObjectError != nil { return fmt.Errorf("could not delete non-empty bucket due to error when deleting contents: %v", deleteObjectError) } - if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == 409 && strings.Contains(gerr.Message, "Anywhere Caches") && deleteCacheError != nil { - return fmt.Errorf("could not delete bucket due to error when deleting anywhere caches on it: %v", deleteCacheError) - } if err != nil { log.Printf("Error deleting bucket %s: %v", bucket, err) return err @@ -1477,14 +1379,9 @@ func expandBucketRetentionPolicy(configured interface{}) *storage.BucketRetentio } retentionPolicy := retentionPolicies[0].(map[string]interface{}) - var retentionPeriod int64 - if v, ok := retentionPolicy["retention_period"]; ok { - retentionPeriod, _ = strconv.ParseInt(v.(string), 10, 64) - } - bucketRetentionPolicy := &storage.BucketRetentionPolicy{ IsLocked: retentionPolicy["is_locked"].(bool), - RetentionPeriod: retentionPeriod, + RetentionPeriod: int64(retentionPolicy["retention_period"].(int)), } return bucketRetentionPolicy @@ -1499,7 +1396,7 @@ func flattenBucketRetentionPolicy(bucketRetentionPolicy *storage.BucketRetention retentionPolicy := map[string]interface{}{ "is_locked": bucketRetentionPolicy.IsLocked, - "retention_period": fmt.Sprintf("%d", bucketRetentionPolicy.RetentionPeriod), + "retention_period": bucketRetentionPolicy.RetentionPeriod, } bucketRetentionPolicies = append(bucketRetentionPolicies, retentionPolicy) @@ -2048,111 +1945,6 @@ func lockRetentionPolicy(bucketsService *storage.BucketsService, bucketName stri return nil } -func flattenBucketIpFilter(ipFilter *storage.BucketIpFilter) []map[string]interface{} { - ipFilterList := make([]map[string]interface{}, 0, 1) - - if ipFilter == nil { - return ipFilterList - } - - filterItem := map[string]interface{}{ - "mode": ipFilter.Mode, - "allow_cross_org_vpcs": ipFilter.AllowCrossOrgVpcs, - "allow_all_service_agent_access": ipFilter.AllowAllServiceAgentAccess, - } - - if publicSrc := flattenBucketIpFilterPublicNetworkSource(ipFilter.PublicNetworkSource); publicSrc != nil { - filterItem["public_network_source"] = publicSrc - } - if vpcSrc := flattenBucketIpFilterVpcNetworkSources(ipFilter.VpcNetworkSources); vpcSrc != nil { - filterItem["vpc_network_sources"] = vpcSrc - } - - return append(ipFilterList, filterItem) -} - -func flattenBucketIpFilterPublicNetworkSource(publicNetworkSource *storage.BucketIpFilterPublicNetworkSource) []map[string]interface{} { - if publicNetworkSource == nil || len(publicNetworkSource.AllowedIpCidrRanges) == 0 { - return nil - } - - return []map[string]interface{}{ - { - "allowed_ip_cidr_ranges": publicNetworkSource.AllowedIpCidrRanges, - }, - } -} - -func flattenBucketIpFilterVpcNetworkSources(vpnNetworkSource []*storage.BucketIpFilterVpcNetworkSources) []map[string]interface{} { - if len(vpnNetworkSource) == 0 { - return nil - } - - srcs := make([]map[string]interface{}, 0, len(vpnNetworkSource)) - - for i := range vpnNetworkSource { - srcs = append(srcs, map[string]interface{}{ - "allowed_ip_cidr_ranges": vpnNetworkSource[i].AllowedIpCidrRanges, - "network": vpnNetworkSource[i].Network, - }) - } - - return srcs -} - -func expandBucketIpFilter(v interface{}) (*storage.BucketIpFilter) { - ipFilterList := v.([]interface{}) - if len(ipFilterList) == 0 || ipFilterList[0] == nil { - return nil - } - ipFilter := ipFilterList[0].(map[string]interface{}) - return &storage.BucketIpFilter{ - Mode: ipFilter["mode"].(string), - PublicNetworkSource: expandBucketIpFilterPublicNetworkSource(ipFilter["public_network_source"]), - VpcNetworkSources: expandBucketIpFilterVpcNetworkSources(ipFilter["vpc_network_sources"]), - AllowCrossOrgVpcs: ipFilter["allow_cross_org_vpcs"].(bool), - AllowAllServiceAgentAccess: ipFilter["allow_all_service_agent_access"].(bool), - ForceSendFields: []string{"PublicNetworkSource", "VpcNetworkSources", "AllowCrossOrgVpcs", "AllowAllServiceAgentAccess"}, - } -} - -func expandBucketIpFilterPublicNetworkSource(v interface{}) (*storage.BucketIpFilterPublicNetworkSource) { - e := &storage.BucketIpFilterPublicNetworkSource{ - ForceSendFields: []string{"AllowedIpCidrRanges"}, - } - - publicNetworkSources := v.([]interface{}) - if len(publicNetworkSources) == 0 || publicNetworkSources[0] == nil { - return e - } - publicNetworkSource := publicNetworkSources[0].(map[string]interface{}) - cidrs := publicNetworkSource["allowed_ip_cidr_ranges"].([]interface{}) - if len(cidrs) == 0 { - return e - } - - e.AllowedIpCidrRanges = tpgresource.ConvertStringArr(cidrs) - return e -} - -func expandBucketIpFilterVpcNetworkSources(v interface{}) ([]*storage.BucketIpFilterVpcNetworkSources) { - vpcNetworkSources := v.([]interface{}) - if len(vpcNetworkSources) == 0 || vpcNetworkSources[0] == nil { - return nil - } - - transformedvpcNetworkSources := make([]*storage.BucketIpFilterVpcNetworkSources, 0, len(vpcNetworkSources)) - for i := range vpcNetworkSources { - transformedvpcNetworkSource := vpcNetworkSources[i].(map[string]interface{}) - transformedvpcNetworkSources = append(transformedvpcNetworkSources, &storage.BucketIpFilterVpcNetworkSources{ - AllowedIpCidrRanges: tpgresource.ConvertStringArr(transformedvpcNetworkSource["allowed_ip_cidr_ranges"].([]interface{})), - Network: transformedvpcNetworkSource["network"].(string), - }) - } - - return transformedvpcNetworkSources -} - // d.HasChange("lifecycle_rule") always returns true, giving false positives. This function detects changes // to the list size or the actions/conditions of rules directly. func detectLifecycleChange(d *schema.ResourceData) bool { @@ -2303,10 +2095,6 @@ func setStorageBucket(d *schema.ResourceData, config *transport_tpg.Config, res } } - if err := d.Set("ip_filter", flattenBucketIpFilter(res.IpFilter)); err != nil { - return fmt.Errorf("Error setting ip_filter: %s", err) - } - d.SetId(res.Id) return nil } diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_600_migration.go b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_600_migration.go index 22553d7bbbf3..39ff367d6f56 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_600_migration.go +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_600_migration.go @@ -2,14 +2,14 @@ package storage import ( "context" - "encoding/json" - "fmt" "log" "math" "strings" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" + + "github.com/hashicorp/terraform-provider-google/google/verify" ) func resourceStorageBucketV1() *schema.Resource { @@ -24,10 +24,11 @@ func resourceStorageBucketV1() *schema.Resource { Schema: map[string]*schema.Schema{ "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: `The name of the bucket.`, + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `The name of the bucket.`, + ValidateFunc: verify.ValidateGCSName, }, "encryption": { @@ -543,10 +544,11 @@ func resourceStorageBucketV2() *schema.Resource { }, Schema: map[string]*schema.Schema{ "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: `The name of the bucket.`, + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `The name of the bucket.`, + ValidateFunc: verify.ValidateGCSName, }, "encryption": { @@ -1052,537 +1054,3 @@ func ResourceStorageBucketStateUpgradeV2(_ context.Context, rawState map[string] log.Printf("[DEBUG] Attributes after migration: %#v", rawState) return rawState, nil } - -func resourceStorageBucketV3() *schema.Resource { - return &schema.Resource{ - StateUpgraders: []schema.StateUpgrader{ - { - Type: resourceStorageBucketV0().CoreConfigSchema().ImpliedType(), - Upgrade: ResourceStorageBucketStateUpgradeV0, - Version: 0, - }, - { - Type: resourceStorageBucketV1().CoreConfigSchema().ImpliedType(), - Upgrade: ResourceStorageBucketStateUpgradeV1, - Version: 1, - }, - { - Type: resourceStorageBucketV2().CoreConfigSchema().ImpliedType(), - Upgrade: ResourceStorageBucketStateUpgradeV1, - Version: 2, - }, - }, - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: `The name of the bucket.`, - }, - - "encryption": { - Type: schema.TypeList, - Optional: true, - MaxItems: 1, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "default_kms_key_name": { - Type: schema.TypeString, - Required: true, - Description: `A Cloud KMS key that will be used to encrypt objects inserted into this bucket, if no encryption method is specified. You must pay attention to whether the crypto key is available in the location that this bucket is created in. See the docs for more details.`, - }, - }, - }, - Description: `The bucket's encryption configuration.`, - }, - - "requester_pays": { - Type: schema.TypeBool, - Optional: true, - Description: `Enables Requester Pays on a storage bucket.`, - }, - - "force_destroy": { - Type: schema.TypeBool, - Optional: true, - Default: false, - Description: `When deleting a bucket, this boolean option will delete all contained objects. If you try to delete a bucket that contains objects, Terraform will fail that run.`, - }, - - "labels": { - Type: schema.TypeMap, - ValidateFunc: labelKeyValidator, - Optional: true, - Elem: &schema.Schema{Type: schema.TypeString}, - Description: `A set of key/value label pairs to assign to the bucket.`, - }, - - "terraform_labels": { - Type: schema.TypeMap, - Computed: true, - Description: `The combination of labels configured directly on the resource and default labels configured on the provider.`, - Elem: &schema.Schema{Type: schema.TypeString}, - }, - - "effective_labels": { - Type: schema.TypeMap, - Computed: true, - Description: `All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Terraform, other clients and services.`, - Elem: &schema.Schema{Type: schema.TypeString}, - }, - - "location": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - StateFunc: func(s interface{}) string { - return strings.ToUpper(s.(string)) - }, - Description: `The Google Cloud Storage location`, - }, - - "project": { - Type: schema.TypeString, - Optional: true, - Computed: true, - ForceNew: true, - Description: `The ID of the project in which the resource belongs. If it is not provided, the provider project is used.`, - }, - - "project_number": { - Type: schema.TypeInt, - Computed: true, - Description: `The project number of the project in which the resource belongs.`, - }, - - "self_link": { - Type: schema.TypeString, - Computed: true, - Description: `The URI of the created resource.`, - }, - - "url": { - Type: schema.TypeString, - Computed: true, - Description: `The base URL of the bucket, in the format gs://.`, - }, - - "storage_class": { - Type: schema.TypeString, - Optional: true, - Default: "STANDARD", - Description: `The Storage Class of the new bucket. Supported values include: STANDARD, MULTI_REGIONAL, REGIONAL, NEARLINE, COLDLINE, ARCHIVE.`, - }, - - "lifecycle_rule": { - Type: schema.TypeList, - Optional: true, - MaxItems: 100, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "action": { - Type: schema.TypeSet, - Required: true, - MinItems: 1, - MaxItems: 1, - Set: resourceGCSBucketLifecycleRuleActionHash, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "type": { - Type: schema.TypeString, - Required: true, - Description: `The type of the action of this Lifecycle Rule. Supported values include: Delete, SetStorageClass and AbortIncompleteMultipartUpload.`, - }, - "storage_class": { - Type: schema.TypeString, - Optional: true, - Description: `The target Storage Class of objects affected by this Lifecycle Rule. Supported values include: MULTI_REGIONAL, REGIONAL, NEARLINE, COLDLINE, ARCHIVE.`, - }, - }, - }, - Description: `The Lifecycle Rule's action configuration. A single block of this type is supported.`, - }, - "condition": { - Type: schema.TypeSet, - Required: true, - MinItems: 1, - MaxItems: 1, - Set: resourceGCSBucketLifecycleRuleConditionHash, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "age": { - Type: schema.TypeInt, - Optional: true, - Description: `Minimum age of an object in days to satisfy this condition.`, - }, - "created_before": { - Type: schema.TypeString, - Optional: true, - Description: `Creation date of an object in RFC 3339 (e.g. 2017-06-13) to satisfy this condition.`, - }, - "custom_time_before": { - Type: schema.TypeString, - Optional: true, - Description: `Creation date of an object in RFC 3339 (e.g. 2017-06-13) to satisfy this condition.`, - }, - "days_since_custom_time": { - Type: schema.TypeInt, - Optional: true, - Description: `Number of days elapsed since the user-specified timestamp set on an object.`, - }, - "days_since_noncurrent_time": { - Type: schema.TypeInt, - Optional: true, - Description: `Number of days elapsed since the noncurrent timestamp of an object. This - condition is relevant only for versioned objects.`, - }, - "noncurrent_time_before": { - Type: schema.TypeString, - Optional: true, - Description: `Creation date of an object in RFC 3339 (e.g. 2017-06-13) to satisfy this condition.`, - }, - "no_age": { - Type: schema.TypeBool, - Deprecated: "`no_age` is deprecated and will be removed in a future major release. Use `send_age_if_zero` instead.", - Optional: true, - Description: `While set true, age value will be omitted.Required to set true when age is unset in the config file.`, - }, - "with_state": { - Type: schema.TypeString, - Computed: true, - Optional: true, - ValidateFunc: validation.StringInSlice([]string{"LIVE", "ARCHIVED", "ANY", ""}, false), - Description: `Match to live and/or archived objects. Unversioned buckets have only live objects. Supported values include: "LIVE", "ARCHIVED", "ANY".`, - }, - "matches_storage_class": { - Type: schema.TypeList, - Optional: true, - Elem: &schema.Schema{Type: schema.TypeString}, - Description: `Storage Class of objects to satisfy this condition. Supported values include: MULTI_REGIONAL, REGIONAL, NEARLINE, COLDLINE, ARCHIVE, STANDARD, DURABLE_REDUCED_AVAILABILITY.`, - }, - "num_newer_versions": { - Type: schema.TypeInt, - Optional: true, - Description: `Relevant only for versioned objects. The number of newer versions of an object to satisfy this condition.`, - }, - "matches_prefix": { - Type: schema.TypeList, - Optional: true, - Elem: &schema.Schema{Type: schema.TypeString}, - Description: `One or more matching name prefixes to satisfy this condition.`, - }, - "matches_suffix": { - Type: schema.TypeList, - Optional: true, - Elem: &schema.Schema{Type: schema.TypeString}, - Description: `One or more matching name suffixes to satisfy this condition.`, - }, - "send_age_if_zero": { - Type: schema.TypeBool, - Optional: true, - Default: true, - Description: `While set true, age value will be sent in the request even for zero value of the field. This field is only useful for setting 0 value to the age field. It can be used alone or together with age.`, - }, - "send_days_since_noncurrent_time_if_zero": { - Type: schema.TypeBool, - Optional: true, - Description: `While set true, days_since_noncurrent_time value will be sent in the request even for zero value of the field. This field is only useful for setting 0 value to the days_since_noncurrent_time field. It can be used alone or together with days_since_noncurrent_time.`, - }, - "send_days_since_custom_time_if_zero": { - Type: schema.TypeBool, - Optional: true, - Description: `While set true, days_since_custom_time value will be sent in the request even for zero value of the field. This field is only useful for setting 0 value to the days_since_custom_time field. It can be used alone or together with days_since_custom_time.`, - }, - "send_num_newer_versions_if_zero": { - Type: schema.TypeBool, - Optional: true, - Description: `While set true, num_newer_versions value will be sent in the request even for zero value of the field. This field is only useful for setting 0 value to the num_newer_versions field. It can be used alone or together with num_newer_versions.`, - }, - }, - }, - Description: `The Lifecycle Rule's condition configuration.`, - }, - }, - }, - Description: `The bucket's Lifecycle Rules configuration.`, - }, - - "enable_object_retention": { - Type: schema.TypeBool, - Optional: true, - ForceNew: true, - Description: `Enables each object in the bucket to have its own retention policy, which prevents deletion until stored for a specific length of time.`, - }, - - "versioning": { - Type: schema.TypeList, - Optional: true, - Computed: true, - MaxItems: 1, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "enabled": { - Type: schema.TypeBool, - Required: true, - Description: `While set to true, versioning is fully enabled for this bucket.`, - }, - }, - }, - Description: `The bucket's Versioning configuration.`, - }, - - "autoclass": { - Type: schema.TypeList, - Optional: true, - MaxItems: 1, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "enabled": { - Type: schema.TypeBool, - Required: true, - Description: `While set to true, autoclass automatically transitions objects in your bucket to appropriate storage classes based on each object's access pattern.`, - }, - "terminal_storage_class": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: `The storage class that objects in the bucket eventually transition to if they are not read for a certain length of time. Supported values include: NEARLINE, ARCHIVE.`, - }, - }, - }, - Description: `The bucket's autoclass configuration.`, - DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { - _, n := d.GetChange(strings.TrimSuffix(k, ".#")) - if !strings.HasSuffix(k, ".#") { - return false - } - var l []interface{} - if new == "1" && old == "0" { - l = n.([]interface{}) - contents, ok := l[0].(map[string]interface{}) - if !ok { - return false - } - if contents["enabled"] == false { - return true - } - } - if new == "0" && old == "1" { - n := d.Get(strings.TrimSuffix(k, ".#")) - l = n.([]interface{}) - contents := l[0].(map[string]interface{}) - if contents["enabled"] == false { - return true - } - } - return false - }, - }, - "website": { - Type: schema.TypeList, - Optional: true, - MaxItems: 1, - Computed: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "main_page_suffix": { - Type: schema.TypeString, - Optional: true, - AtLeastOneOf: []string{"website.0.not_found_page", "website.0.main_page_suffix"}, - Description: `Behaves as the bucket's directory index where missing objects are treated as potential directories.`, - DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { - return old != "" && new == "" - }, - }, - "not_found_page": { - Type: schema.TypeString, - Optional: true, - AtLeastOneOf: []string{"website.0.main_page_suffix", "website.0.not_found_page"}, - Description: `The custom object to return when a requested resource is not found.`, - DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { - return old != "" && new == "" - }, - }, - }, - }, - Description: `Configuration if the bucket acts as a website.`, - }, - - "retention_policy": { - Type: schema.TypeList, - Optional: true, - MaxItems: 1, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "is_locked": { - Type: schema.TypeBool, - Optional: true, - Default: false, - Description: `If set to true, the bucket will be locked and permanently restrict edits to the bucket's retention policy. Caution: Locking a bucket is an irreversible action.`, - }, - "retention_period": { - Type: schema.TypeInt, - Required: true, - ValidateFunc: validation.IntBetween(1, math.MaxInt32), - Description: `The period of time, in seconds, that objects in the bucket must be retained and cannot be deleted, overwritten, or archived. The value must be less than 3,155,760,000 seconds.`, - }, - }, - }, - Description: `Configuration of the bucket's data retention policy for how long objects in the bucket should be retained.`, - }, - - "cors": { - Type: schema.TypeList, - Optional: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "origin": { - Type: schema.TypeList, - Optional: true, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - Description: `The list of Origins eligible to receive CORS response headers. Note: "*" is permitted in the list of origins, and means "any Origin".`, - }, - "method": { - Type: schema.TypeList, - Optional: true, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - Description: `The list of HTTP methods on which to include CORS response headers, (GET, OPTIONS, POST, etc) Note: "*" is permitted in the list of methods, and means "any method".`, - }, - "response_header": { - Type: schema.TypeList, - Optional: true, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - Description: `The list of HTTP headers other than the simple response headers to give permission for the user-agent to share across domains.`, - }, - "max_age_seconds": { - Type: schema.TypeInt, - Optional: true, - Description: `The value, in seconds, to return in the Access-Control-Max-Age header used in preflight responses.`, - }, - }, - }, - Description: `The bucket's Cross-Origin Resource Sharing (CORS) configuration.`, - }, - - "default_event_based_hold": { - Type: schema.TypeBool, - Optional: true, - Description: `Whether or not to automatically apply an eventBasedHold to new objects added to the bucket.`, - }, - - "logging": { - Type: schema.TypeList, - Optional: true, - MaxItems: 1, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "log_bucket": { - Type: schema.TypeString, - Required: true, - Description: `The bucket that will receive log objects.`, - }, - "log_object_prefix": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: `The object prefix for log objects. If it's not provided, by default Google Cloud Storage sets this to this bucket's name.`, - }, - }, - }, - Description: `The bucket's Access & Storage Logs configuration.`, - }, - "uniform_bucket_level_access": { - Type: schema.TypeBool, - Optional: true, - Computed: true, - Description: `Enables uniform bucket-level access on a bucket.`, - }, - "custom_placement_config": { - Type: schema.TypeList, - Optional: true, - MaxItems: 1, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "data_locations": { - Type: schema.TypeSet, - Required: true, - ForceNew: true, - MaxItems: 2, - MinItems: 2, - Elem: &schema.Schema{ - Type: schema.TypeString, - StateFunc: func(s interface{}) string { - return strings.ToUpper(s.(string)) - }, - }, - Description: `The list of individual regions that comprise a dual-region bucket. See the docs for a list of acceptable regions. Note: If any of the data_locations changes, it will recreate the bucket.`, - }, - }, - }, - Description: `The bucket's custom location configuration, which specifies the individual regions that comprise a dual-region bucket. If the bucket is designated a single or multi-region, the parameters are empty.`, - }, - "rpo": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: `Specifies the RPO setting of bucket. If set 'ASYNC_TURBO', The Turbo Replication will be enabled for the dual-region bucket. Value 'DEFAULT' will set RPO setting to default. Turbo Replication is only for buckets in dual-regions.See the docs for more details.`, - }, - "public_access_prevention": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: `Prevents public access to a bucket.`, - }, - "soft_delete_policy": { - Type: schema.TypeList, - MaxItems: 1, - Optional: true, - Computed: true, - Description: `The bucket's soft delete policy, which defines the period of time that soft-deleted objects will be retained, and cannot be permanently deleted. If it is not provided, by default Google Cloud Storage sets this to default soft delete policy`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "retention_duration_seconds": { - Type: schema.TypeInt, - Default: 604800, - Optional: true, - Description: `The duration in seconds that soft-deleted objects in the bucket will be retained and cannot be permanently deleted. Default value is 604800.`, - }, - "effective_time": { - Type: schema.TypeString, - Computed: true, - Description: `Server-determined value that indicates the time from which the policy, or one with a greater retention, was effective. This value is in RFC 3339 format.`, - }, - }, - }, - }, - }, - UseJSONNumber: true, - } -} - -func ResourceStorageBucketStateUpgradeV3(_ context.Context, rawState map[string]interface{}, meta interface{}) (map[string]interface{}, error) { - log.Printf("[DEBUG] Attributes before migration: %#v", rawState) - if rawState["retention_policy"] != nil { - retentionPolicies := rawState["retention_policy"].([]interface{}) - if len(retentionPolicies) > 0 { - retentionPolicy := retentionPolicies[0].(map[string]interface{}) - // nil check - if v, ok := retentionPolicy["retention_period"]; ok && v != nil { - // number conversion check to error rather than crash - if num, ok := v.(json.Number); ok { - retentionPolicy["retention_period"] = num.String() - } else { - return rawState, fmt.Errorf("retention_period in state has unexpected type %T", v) - } - } - } - } - log.Printf("[DEBUG] Attributes after migration: %#v", rawState) - return rawState, nil -} diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object.go b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object.go index 015fba07671a..249d5bf7fcc9 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object.go +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object.go @@ -10,12 +10,10 @@ import ( "os" "time" - "github.com/hashicorp/go-cty/cty" "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" "crypto/sha256" "encoding/base64" @@ -84,20 +82,11 @@ func ResourceStorageBucketObject() *schema.Resource { }, "content_type": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - Computed: true, - ConflictsWith: []string{"force_empty_content_type"}, - Description: `Content-Type of the object data. Defaults to "application/octet-stream" or "text/plain; charset=utf-8".`, - }, - - "force_empty_content_type": { - Type: schema.TypeBool, - Optional: true, - ForceNew: true, - ConflictsWith: []string{"content_type"}, - Description: `Flag to set empty Content-Type.`, + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Computed: true, + Description: `Content-Type of the object data. Defaults to "application/octet-stream" or "text/plain; charset=utf-8".`, }, "content": { @@ -143,12 +132,6 @@ func ResourceStorageBucketObject() *schema.Resource { Description: `A path to the data you want to upload. Must be defined if content is not.`, }, - "source_md5hash": { - Type: schema.TypeString, - Optional: true, - Description: `User-provided md5hash, Base 64 MD5 hash of the object data.`, - }, - // Detect changes to local file or changes made outside of Terraform to the file stored on the server. "detect_md5hash": { Type: schema.TypeString, @@ -164,12 +147,6 @@ func ResourceStorageBucketObject() *schema.Resource { // 3. Don't suppress the diff iff they don't match DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { localMd5Hash := "" - if d.GetRawConfig().GetAttr("source_md5hash") == cty.UnknownVal(cty.String) { - return true - } - if v, ok := d.GetOk("source_md5hash"); ok && v != "" { - return true - } if source, ok := d.GetOkExists("source"); ok { localMd5Hash = tpgresource.GetFileMd5Hash(source.(string)) } @@ -307,13 +284,6 @@ func ResourceStorageBucketObject() *schema.Resource { Computed: true, Description: `A url reference to download this object.`, }, - - "deletion_policy": { - Type: schema.TypeString, - Optional: true, - Description: `The deletion policy for the object. Setting ABANDON allows the resource to be abandoned rather than deleted when removed from your Terraform configuration.`, - ValidateFunc: validation.StringInSlice([]string{"ABANDON"}, false), - }, }, UseJSONNumber: true, } @@ -395,11 +365,7 @@ func resourceStorageBucketObjectCreate(d *schema.ResourceData, meta interface{}) insertCall := objectsService.Insert(bucket, object) insertCall.Name(name) - if v, ok := d.GetOk("force_empty_content_type"); ok && v.(bool) { - insertCall.Media(media, googleapi.ContentType("")) - } else { - insertCall.Media(media) - } + insertCall.Media(media) // This is done late as we need to add headers to enable customer encryption if v, ok := d.GetOk("customer_encryption"); ok { @@ -426,7 +392,7 @@ func resourceStorageBucketObjectUpdate(d *schema.ResourceData, meta interface{}) bucket := d.Get("bucket").(string) name := d.Get("name").(string) - if d.HasChange("content") || d.HasChange("source_md5hash") || d.HasChange("detect_md5hash") { + if d.HasChange("content") || d.HasChange("detect_md5hash") { // The KMS key name are not able to be set on create : // or you get error: Error uploading object test-maarc: googleapi: Error 400: Malformed Cloud KMS crypto key: projects/myproject/locations/myregion/keyRings/mykeyring/cryptoKeys/mykeyname/cryptoKeyVersions/1, invalid d.Set("kms_key_name", nil) @@ -514,9 +480,6 @@ func resourceStorageBucketObjectRead(d *schema.ResourceData, meta interface{}) e if err := d.Set("detect_md5hash", res.Md5Hash); err != nil { return fmt.Errorf("Error setting detect_md5hash: %s", err) } - if err := d.Set("source_md5hash", d.Get("source_md5hash")); err != nil { - return fmt.Errorf("Error setting source_md5hash: %s", err) - } if err := d.Set("generation", res.Generation); err != nil { return fmt.Errorf("Error setting generation: %s", err) } @@ -578,12 +541,6 @@ func resourceStorageBucketObjectDelete(d *schema.ResourceData, meta interface{}) return err } - if deletionPolicy := d.Get("deletion_policy"); deletionPolicy == "ABANDON" { - log.Printf("[WARN] Object %q deletion_policy is set to 'ABANDON', object deletion has been abandoned", d.Id()) - d.SetId("") - return nil - } - bucket := d.Get("bucket").(string) name := d.Get("name").(string) @@ -669,11 +626,6 @@ func flattenObjectRetention(objectRetention *storage.ObjectRetention) []map[stri func resourceStorageBucketObjectCustomizeDiff(ctx context.Context, d *schema.ResourceDiff, meta interface{}) error { localMd5Hash := "" - - if (d.GetRawConfig().GetAttr("source_md5hash") == cty.UnknownVal(cty.String)) || d.HasChange("source_md5hash") { - return showDiff(d) - } - if source, ok := d.GetOkExists("source"); ok { localMd5Hash = tpgresource.GetFileMd5Hash(source.(string)) } @@ -688,10 +640,7 @@ func resourceStorageBucketObjectCustomizeDiff(ctx context.Context, d *schema.Res if ok && oldMd5Hash == localMd5Hash { return nil } - return showDiff(d) -} -func showDiff(d *schema.ResourceDiff) error { err := d.SetNewComputed("md5hash") if err != nil { return fmt.Errorf("Error re-setting md5hash: %s", err) @@ -704,6 +653,5 @@ func showDiff(d *schema.ResourceDiff) error { if err != nil { return fmt.Errorf("Error re-setting generation: %s", err) } - return nil } diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object_test.go b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object_test.go index 7f9fb67cbc0c..92fc58b2b487 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object_test.go +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object_test.go @@ -129,26 +129,6 @@ func TestAccStorageObject_content(t *testing.T) { "google_storage_bucket_object.object", "storage_class", "STANDARD"), ), }, - { - Config: testGoogleStorageBucketsObjectEmptyContentType(bucketName), - Check: resource.ComposeTestCheckFunc( - testAccCheckGoogleStorageObject(t, bucketName, objectName, dataMd5), - resource.TestCheckResourceAttr( - "google_storage_bucket_object.object", "content_type", ""), - resource.TestCheckResourceAttr( - "google_storage_bucket_object.object", "storage_class", "STANDARD"), - ), - }, - { - Config: testGoogleStorageBucketsObjectContent(bucketName), - Check: resource.ComposeTestCheckFunc( - testAccCheckGoogleStorageObject(t, bucketName, objectName, dataMd5), - resource.TestCheckResourceAttr( - "google_storage_bucket_object.object", "content_type", "text/plain; charset=utf-8"), - resource.TestCheckResourceAttr( - "google_storage_bucket_object.object", "storage_class", "STANDARD"), - ), - }, }, }) } @@ -529,116 +509,6 @@ func TestResourceStorageBucketObjectUpdate_ContentChange(t *testing.T) { }) } -func TestAccStorageObject_sourceMd5Hash(t *testing.T) { - t.Parallel() - - bucketName := acctest.TestBucketName(t) - - data := []byte("data data data") - - writeMd5 := func(data []byte) string { - h := md5.New() - if _, err := h.Write(data); err != nil { - t.Errorf("error calculating md5: %v", err) - } - dataMd5 := base64.StdEncoding.EncodeToString(h.Sum(nil)) - return dataMd5 - } - - dataMd5 := writeMd5(data) - - updatedata := []byte("datum") - updatedDataMd5 := writeMd5(updatedata) - - testFile := getNewTmpTestFile(t, "tf-test") - if err := ioutil.WriteFile(testFile.Name(), data, 0644); err != nil { - t.Errorf("error writing file: %v", err) - } - - updateMd5 := []byte("sample") - newMd5 := writeMd5(updateMd5) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccStorageObjectDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testGoogleStorageBucketsObjectBasic(bucketName, testFile.Name()), - Check: testAccCheckGoogleStorageObject(t, bucketName, objectName, dataMd5), - }, - { - PreConfig: func() { - if err := ioutil.WriteFile(testFile.Name(), updatedata, 0644); err != nil { - t.Errorf("error writing file: %v", err) - } - }, - Config: testGoogleStorageBucketsObjectFileMd5(bucketName, testFile.Name(), updatedDataMd5), - Check: testAccCheckGoogleStorageObject(t, bucketName, objectName, updatedDataMd5), - }, - { - Config: testGoogleStorageBucketsObjectFileMd5(bucketName, testFile.Name(), newMd5), - Check: testAccCheckGoogleStorageObject(t, bucketName, objectName, updatedDataMd5), - }, - }, - }) -} - -func TestAccStorageObject_knownAfterApply(t *testing.T) { - t.Parallel() - - bucketName := acctest.TestBucketName(t) - destinationFilePath := getNewTmpTestFile(t, "tf-test-apply-") - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccStorageObjectDestroyProducer(t), - ExternalProviders: map[string]resource.ExternalProvider{ - "local": resource.ExternalProvider{ - VersionConstraint: "> 2.5.0", - }, - }, - Steps: []resource.TestStep{ - { - Config: testGoogleStorageBucketObject(bucketName, "first", destinationFilePath.Name()), - Check: resource.ComposeTestCheckFunc( - testAccCheckGoogleStorageValidOutput(t), - ), - }, - { - Config: testGoogleStorageBucketObjectKnownAfterApply(bucketName, "second", destinationFilePath.Name()), - Check: resource.ComposeTestCheckFunc( - testAccCheckGoogleStorageValidOutput(t), - ), - }, - }, - }) -} - -func TestAccStorageObject_objectDeletionPolicy(t *testing.T) { - t.Parallel() - - bucketName := acctest.TestBucketName(t) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccStorageObjectDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testGoogleStorageBucketsObjectDeletionPolicy(bucketName, "samplecontent"), - }, - { - Config: testGoogleStorageBucketsObjectAbandon(bucketName), - Check: resource.ComposeTestCheckFunc( - testAccCheckStorageObjectExists(t, bucketName), - ), - }, - }, - }) -} - func testAccCheckGoogleStorageObject(t *testing.T, bucket, object, md5 string) resource.TestCheckFunc { return testAccCheckGoogleStorageObjectWithEncryption(t, bucket, object, md5, "") } @@ -751,23 +621,6 @@ resource "google_storage_bucket_object" "object" { `, bucketName, objectName, content) } -func testGoogleStorageBucketsObjectEmptyContentType(bucketName string) string { - return fmt.Sprintf(` -resource "google_storage_bucket" "bucket" { - name = "%s" - location = "US" - force_destroy = true -} - -resource "google_storage_bucket_object" "object" { - name = "%s" - bucket = google_storage_bucket.bucket.name - content = "%s" - force_empty_content_type = true -} -`, bucketName, objectName, content) -} - func testGoogleStorageBucketsFolder(bucketName, folderName string) string { return fmt.Sprintf(` resource "google_storage_bucket" "bucket" { @@ -994,151 +847,3 @@ func getNewTmpTestFile(t *testing.T, prefix string) *os.File { } return testFile } - -func testGoogleStorageBucketsObjectFileMd5(bucketName, sourceFilename, md5hash string) string { - return fmt.Sprintf(` -resource "google_storage_bucket" "bucket" { - name = "%s" - location = "US" -} - -resource "google_storage_bucket_object" "bo_1861894" { - name = "%s" - source_md5hash = "%s" - bucket = google_storage_bucket.bucket.name - source = "%s" -} -`, bucketName, objectName, md5hash, sourceFilename) -} - -func testAccCheckGoogleStorageValidOutput(t *testing.T) resource.TestCheckFunc { - return func(s *terraform.State) error { - var root = s.Modules[0] - var outputs, ok = root.Outputs["valid"] - - if !ok { - return fmt.Errorf("Error: `valid` output missing") - } - - if outputs == nil { - return fmt.Errorf("Terraform output `valid` does not exists") - } - - if outputs.Value == false { - return fmt.Errorf("File content is not valid") - } - return nil - } -} - -func testGoogleStorageBucketObject(bucketName, content, filename string) string { - return fmt.Sprintf(` -resource "google_storage_bucket" "bucket" { - name = "%s" - location = "US" -} - -resource "google_storage_bucket_object" "changing" { - bucket = google_storage_bucket.bucket.name - name = "dynamic" - content = "%s" -} - -resource "local_file" "test" { - content = jsonencode(google_storage_bucket_object.changing.content) - filename = "%s" -} - -resource "google_storage_bucket_object" "bo" { - source = local_file.test.filename - bucket = google_storage_bucket.bucket.name - name = "test-file-bucket" -} - -data "google_storage_bucket_object_content" "bo" { - bucket = google_storage_bucket_object.bo.bucket - name = google_storage_bucket_object.bo.name - depends_on = [google_storage_bucket_object.bo] -} - -output "valid" { - value = nonsensitive(local_file.test.content) == data.google_storage_bucket_object_content.bo.content -} -`, bucketName, content, filename) -} - -func testGoogleStorageBucketObjectKnownAfterApply(bucketName, content, filename string) string { - return fmt.Sprintf(` -resource "google_storage_bucket" "bucket" { - name = "%s" - location = "US" -} - -resource "google_storage_bucket_object" "changing" { - bucket = google_storage_bucket.bucket.name - name = "dynamic" - content = "%s" -} - -resource "local_file" "test" { - content = jsonencode(google_storage_bucket_object.changing.content) - filename = "%s" -} - -resource "google_storage_bucket_object" "bo" { - source = local_file.test.filename - source_md5hash = local_file.test.content_md5 - bucket = google_storage_bucket.bucket.name - name = "test-file-bucket" -} - -data "google_storage_bucket_object_content" "bo" { - bucket = google_storage_bucket_object.bo.bucket - name = google_storage_bucket_object.bo.name - depends_on = [google_storage_bucket_object.bo] -} - -output "valid" { - value = nonsensitive(local_file.test.content) == data.google_storage_bucket_object_content.bo.content -} -`, bucketName, content, filename) -} - -func testGoogleStorageBucketsObjectDeletionPolicy(bucketName string, customContent string) string { - return fmt.Sprintf(` -resource "google_storage_bucket" "bucket" { - name = "%s" - location = "US" -} - -resource "google_storage_bucket_object" "object" { - name = "%s" - bucket = google_storage_bucket.bucket.name - content = "%s" - deletion_policy = "ABANDON" -} -`, bucketName, objectName, customContent) -} - -func testGoogleStorageBucketsObjectAbandon(bucketName string) string { - return fmt.Sprintf(` -resource "google_storage_bucket" "bucket" { - name = "%s" - location = "US" - force_destroy = true -} -`, bucketName) -} - -func testAccCheckStorageObjectExists(t *testing.T, bucketName string) resource.TestCheckFunc { - return func(s *terraform.State) error { - - config := acctest.GoogleProviderConfig(t) - - _, err := config.NewStorageClient(config.UserAgent).Objects.Get(bucketName, objectName).Do() - if err != nil { - return err - } - return nil - } -} diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go.tmpl similarity index 87% rename from mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go rename to mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go.tmpl index 40aec7f963d9..7176ab4ad65b 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go.tmpl @@ -140,7 +140,7 @@ func TestAccStorageBucket_AutoclassDiffSuppress(t *testing.T) { ImportStateVerifyIgnore: []string{"force_destroy"}, }, { - Config: testAccStorageBucket_basicWithAutoclass(bucketName, false), + Config: testAccStorageBucket_basicWithAutoclass(bucketName,false), Check: resource.ComposeTestCheckFunc( testAccCheckStorageBucketExists( t, "google_storage_bucket.bucket", bucketName, &bucket), @@ -153,7 +153,7 @@ func TestAccStorageBucket_AutoclassDiffSuppress(t *testing.T) { ImportStateVerifyIgnore: []string{"force_destroy"}, }, { - Config: testAccStorageBucket_basicWithAutoclass(bucketName, true), + Config: testAccStorageBucket_basicWithAutoclass(bucketName,true), Check: resource.ComposeTestCheckFunc( testAccCheckStorageBucketExists( t, "google_storage_bucket.bucket", bucketName, &bucket), @@ -353,7 +353,7 @@ func TestAccStorageBucket_dualLocation_rpo(t *testing.T) { ImportStateVerifyIgnore: []string{"force_destroy"}, }, { - Config: testAccStorageBucket_dualLocation_rpo(bucketName, "ASYNC_TURBO"), + Config: testAccStorageBucket_dualLocation_rpo(bucketName,"ASYNC_TURBO"), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr( "google_storage_bucket.bucket", "rpo", "ASYNC_TURBO"), @@ -366,7 +366,7 @@ func TestAccStorageBucket_dualLocation_rpo(t *testing.T) { ImportStateVerifyIgnore: []string{"force_destroy"}, }, { - Config: testAccStorageBucket_dualLocation_rpo(bucketName, "DEFAULT"), + Config: testAccStorageBucket_dualLocation_rpo(bucketName,"DEFAULT"), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr( "google_storage_bucket.bucket", "rpo", "DEFAULT"), @@ -427,7 +427,7 @@ func TestAccStorageBucket_lifecycleRulesMultiple(t *testing.T) { ResourceName: "google_storage_bucket.bucket", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"force_destroy", "lifecycle_rule.0.condition.0.send_age_if_zero", "lifecycle_rule.1.condition.0.send_age_if_zero", "lifecycle_rule.2.condition.0.send_age_if_zero", "lifecycle_rule.3.condition.0.send_age_if_zero", "lifecycle_rule.4.condition.0.send_age_if_zero", "lifecycle_rule.5.condition.0.send_age_if_zero", "lifecycle_rule.6.condition.0.send_age_if_zero", "lifecycle_rule.7.condition.0.send_age_if_zero", "lifecycle_rule.8.condition.0.send_age_if_zero", "lifecycle_rule.9.condition.0.send_age_if_zero"}, + ImportStateVerifyIgnore: []string{"force_destroy","lifecycle_rule.0.condition.0.send_age_if_zero","lifecycle_rule.1.condition.0.send_age_if_zero","lifecycle_rule.2.condition.0.send_age_if_zero","lifecycle_rule.3.condition.0.send_age_if_zero","lifecycle_rule.4.condition.0.send_age_if_zero","lifecycle_rule.5.condition.0.send_age_if_zero","lifecycle_rule.6.condition.0.send_age_if_zero","lifecycle_rule.7.condition.0.send_age_if_zero","lifecycle_rule.8.condition.0.send_age_if_zero","lifecycle_rule.9.condition.0.send_age_if_zero"}, }, { Config: testAccStorageBucket_lifecycleRulesMultiple_update(bucketName), @@ -436,7 +436,7 @@ func TestAccStorageBucket_lifecycleRulesMultiple(t *testing.T) { ResourceName: "google_storage_bucket.bucket", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"force_destroy", "lifecycle_rule.0.condition.0.send_age_if_zero", "lifecycle_rule.1.condition.0.send_age_if_zero", "lifecycle_rule.2.condition.0.send_age_if_zero", "lifecycle_rule.3.condition.0.send_age_if_zero", "lifecycle_rule.4.condition.0.send_age_if_zero", "lifecycle_rule.5.condition.0.send_age_if_zero", "lifecycle_rule.6.condition.0.send_age_if_zero", "lifecycle_rule.7.condition.0.send_age_if_zero", "lifecycle_rule.8.condition.0.send_age_if_zero", "lifecycle_rule.9.condition.0.send_age_if_zero"}, + ImportStateVerifyIgnore: []string{"force_destroy","lifecycle_rule.0.condition.0.send_age_if_zero","lifecycle_rule.1.condition.0.send_age_if_zero","lifecycle_rule.2.condition.0.send_age_if_zero","lifecycle_rule.3.condition.0.send_age_if_zero","lifecycle_rule.4.condition.0.send_age_if_zero","lifecycle_rule.5.condition.0.send_age_if_zero","lifecycle_rule.6.condition.0.send_age_if_zero","lifecycle_rule.7.condition.0.send_age_if_zero","lifecycle_rule.8.condition.0.send_age_if_zero","lifecycle_rule.9.condition.0.send_age_if_zero"}, }, }, }) @@ -465,7 +465,7 @@ func TestAccStorageBucket_lifecycleRuleStateLive(t *testing.T) { ResourceName: "google_storage_bucket.bucket", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"force_destroy", "lifecycle_rule.0.condition.0.send_age_if_zero", "lifecycle_rule.1.condition.0.send_age_if_zero"}, + ImportStateVerifyIgnore: []string{"force_destroy","lifecycle_rule.0.condition.0.send_age_if_zero","lifecycle_rule.1.condition.0.send_age_if_zero"}, }, }, }) @@ -494,7 +494,7 @@ func TestAccStorageBucket_lifecycleRuleStateArchived(t *testing.T) { ResourceName: "google_storage_bucket.bucket", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"force_destroy", "lifecycle_rule.0.condition.0.send_age_if_zero"}, + ImportStateVerifyIgnore: []string{"force_destroy","lifecycle_rule.0.condition.0.send_age_if_zero"}, }, { Config: testAccStorageBucket_lifecycleRule_withStateArchived(bucketName), @@ -508,7 +508,7 @@ func TestAccStorageBucket_lifecycleRuleStateArchived(t *testing.T) { ResourceName: "google_storage_bucket.bucket", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"force_destroy", "lifecycle_rule.0.condition.0.send_age_if_zero"}, + ImportStateVerifyIgnore: []string{"force_destroy","lifecycle_rule.0.condition.0.send_age_if_zero"}, }, }, }) @@ -537,7 +537,7 @@ func TestAccStorageBucket_lifecycleRuleStateAny(t *testing.T) { ResourceName: "google_storage_bucket.bucket", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"force_destroy", "lifecycle_rule.0.condition.0.send_age_if_zero"}, + ImportStateVerifyIgnore: []string{"force_destroy","lifecycle_rule.0.condition.0.send_age_if_zero"}, }, { Config: testAccStorageBucket_lifecycleRule_withStateLive(bucketName), @@ -551,7 +551,7 @@ func TestAccStorageBucket_lifecycleRuleStateAny(t *testing.T) { ResourceName: "google_storage_bucket.bucket", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"force_destroy", "lifecycle_rule.0.condition.0.send_age_if_zero", "lifecycle_rule.1.condition.0.send_age_if_zero"}, + ImportStateVerifyIgnore: []string{"force_destroy","lifecycle_rule.0.condition.0.send_age_if_zero","lifecycle_rule.1.condition.0.send_age_if_zero"}, }, { Config: testAccStorageBucket_lifecycleRule_withStateAny(bucketName), @@ -565,7 +565,7 @@ func TestAccStorageBucket_lifecycleRuleStateAny(t *testing.T) { ResourceName: "google_storage_bucket.bucket", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"force_destroy", "lifecycle_rule.0.condition.0.send_age_if_zero"}, + ImportStateVerifyIgnore: []string{"force_destroy","lifecycle_rule.0.condition.0.send_age_if_zero"}, }, { Config: testAccStorageBucket_lifecycleRule_withStateArchived(bucketName), @@ -579,7 +579,7 @@ func TestAccStorageBucket_lifecycleRuleStateAny(t *testing.T) { ResourceName: "google_storage_bucket.bucket", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"force_destroy", "lifecycle_rule.0.condition.0.send_age_if_zero"}, + ImportStateVerifyIgnore: []string{"force_destroy","lifecycle_rule.0.condition.0.send_age_if_zero"}, }, }, }) @@ -695,6 +695,21 @@ func TestAccStorageBucket_storageClass(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"force_destroy"}, }, + { + Config: testAccStorageBucket_storageClass(bucketName, "REGIONAL", "US-CENTRAL1"), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &updated), + // Location change causes recreate + testAccCheckStorageBucketWasRecreated(&updated, &bucket), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, }, }) } @@ -783,7 +798,7 @@ func TestAccStorageBucket_update(t *testing.T) { ResourceName: "google_storage_bucket.bucket", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"force_destroy", "lifecycle_rule.0.condition.0.send_age_if_zero"}, + ImportStateVerifyIgnore: []string{"force_destroy","lifecycle_rule.0.condition.0.send_age_if_zero"}, }, { Config: testAccStorageBucket_customAttributes_withLifecycle2(bucketName), @@ -799,7 +814,7 @@ func TestAccStorageBucket_update(t *testing.T) { ResourceName: "google_storage_bucket.bucket", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"force_destroy", "lifecycle_rule.0.condition.0.send_age_if_zero", "lifecycle_rule.1.condition.0.send_age_if_zero"}, + ImportStateVerifyIgnore: []string{"force_destroy","lifecycle_rule.0.condition.0.send_age_if_zero","lifecycle_rule.1.condition.0.send_age_if_zero"}, }, { Config: testAccStorageBucket_customAttributes_withLifecycle1Update(bucketName), @@ -815,7 +830,7 @@ func TestAccStorageBucket_update(t *testing.T) { ResourceName: "google_storage_bucket.bucket", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"force_destroy", "lifecycle_rule.0.condition.0.send_age_if_zero"}, + ImportStateVerifyIgnore: []string{"force_destroy","lifecycle_rule.0.condition.0.send_age_if_zero"}, }, { Config: testAccStorageBucket_customAttributes(bucketName), @@ -1447,7 +1462,7 @@ func TestAccStorageBucket_SoftDeletePolicy(t *testing.T) { ImportStateVerifyIgnore: []string{"force_destroy"}, }, { - Config: testAccStorageBucket_SoftDeletePolicy(bucketName, 7776000), + Config: testAccStorageBucket_SoftDeletePolicy(bucketName,7776000), Check: resource.ComposeTestCheckFunc( testAccCheckStorageBucketExists( t, "google_storage_bucket.bucket", bucketName, &bucket), @@ -1462,7 +1477,7 @@ func TestAccStorageBucket_SoftDeletePolicy(t *testing.T) { ImportStateVerifyIgnore: []string{"force_destroy"}, }, { - Config: testAccStorageBucket_SoftDeletePolicy(bucketName, 0), + Config: testAccStorageBucket_SoftDeletePolicy(bucketName,0), Check: resource.ComposeTestCheckFunc( testAccCheckStorageBucketExists( t, "google_storage_bucket.bucket", bucketName, &bucket), @@ -1551,97 +1566,6 @@ func TestAccStorageBucket_hns_force_destroy(t *testing.T) { }) } -func TestAccStorageBucket_IPFilter(t *testing.T) { - t.Parallel() - - var bucket storage.Bucket - var disabled storage.Bucket - var noIPfilter storage.Bucket - bucketName := fmt.Sprintf("tf-test-ip-filter-bucket-%d", acctest.RandInt(t)) - nwSuffix := acctest.RandString(t, 8) - project := envvar.GetTestProjectFromEnv() - serviceAccount := envvar.GetTestServiceAccountFromEnv(t) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccStorageBucketDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccStorageBucket_withoutIPFilter(bucketName), - Check: resource.ComposeTestCheckFunc( - testAccCheckStorageBucketExists( - t, "google_storage_bucket.bucket", bucketName, &noIPfilter), - ), - }, - { - ResourceName: "google_storage_bucket.bucket", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"force_destroy"}, - }, - { - Config: testAccStorageBucket_IPFilter( - bucketName, nwSuffix, project, serviceAccount, - ), - Check: resource.ComposeTestCheckFunc( - testAccCheckStorageBucketExists( - t, "google_storage_bucket.bucket", bucketName, &bucket), - ), - }, - { - ResourceName: "google_storage_bucket.bucket", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"force_destroy"}, - }, - { - Config: testAccStorageBucket_IPFilter_update( - bucketName, nwSuffix, project, serviceAccount, - ), - Check: resource.ComposeTestCheckFunc( - testAccCheckStorageBucketExists( - t, "google_storage_bucket.bucket", bucketName, &bucket), - ), - }, - { - ResourceName: "google_storage_bucket.bucket", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"force_destroy"}, - }, - { - Config: testAccStorageBucket_IPFilter_disable(bucketName, nwSuffix, project, serviceAccount), - Check: resource.ComposeTestCheckFunc( - testAccCheckStorageBucketExists( - t, "google_storage_bucket.bucket", bucketName, &disabled), - testAccCheckStorageBucketWasUpdated(&disabled, &bucket), - ), - }, - { - ResourceName: "google_storage_bucket.bucket", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"force_destroy"}, - }, - { - Config: testAccStorageBucket_withoutIPFilter(bucketName), - Check: resource.ComposeTestCheckFunc( - testAccCheckStorageBucketExists( - t, "google_storage_bucket.bucket", bucketName, &noIPfilter), - testAccCheckStorageBucketWasUpdated(&noIPfilter, &disabled), - ), - }, - { - ResourceName: "google_storage_bucket.bucket", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"force_destroy"}, - }, - }, - }) -} - func testAccCheckStorageBucketPutFolderItem(t *testing.T, bucketName string) resource.TestCheckFunc { return func(s *terraform.State) error { config := acctest.GoogleProviderConfig(t) @@ -1970,7 +1894,7 @@ resource "google_storage_bucket" "bucket" { `, bucketName) } -func testAccStorageBucket_dualLocation_rpo(bucketName string, rpo string) string { +func testAccStorageBucket_dualLocation_rpo(bucketName string,rpo string) string { return fmt.Sprintf(` resource "google_storage_bucket" "bucket" { name = "%s" @@ -1981,7 +1905,7 @@ resource "google_storage_bucket" "bucket" { } rpo = "%s" } -`, bucketName, rpo) +`, bucketName,rpo) } func testAccStorageBucket_customAttributes(bucketName string) string { @@ -2680,7 +2604,7 @@ resource "google_storage_bucket" "bucket" { force_destroy = true retention_policy { - retention_period = "10" + retention_period = 10 } } `, bucketName) @@ -2695,14 +2619,14 @@ resource "google_storage_bucket" "bucket" { retention_policy { is_locked = true - retention_period = "10" + retention_period = 10 } } `, bucketName) } func testAccStorageBucket_SoftDeletePolicy(bucketName string, duration int) string { - return fmt.Sprintf(` + return fmt.Sprintf(` resource "google_storage_bucket" "bucket" { name = "%s" location = "US" @@ -2788,173 +2712,8 @@ resource "google_storage_bucket" "bucket" { force_destroy = true retention_policy { - retention_period = "3600" - } -} -`, bucketName) -} - -func testAccStorageBucket_IPFilter(bucketName string, nwSuffix string, project string, serviceAccount string) string { - return fmt.Sprintf(` -resource "google_compute_network" "vpc_gcs_ipfilter1" { - name = "tf-test-storage-ipfilter1-%s" - auto_create_subnetworks = false -} - -resource "google_compute_subnetwork" "ipfilter_1" { - name = "tf-test-storage-ipfilter1-%s" - ip_cidr_range = "10.201.0.0/16" - region = "us-central1" - network = google_compute_network.vpc_gcs_ipfilter1.id -} - -resource "google_project_iam_custom_role" "ipfilter_exempt_role" { - role_id = "_%s" - title = "IP Filter Exempt Role" - description = "A custom role to bypass IP Filtering on GCS bucket." - permissions = ["storage.buckets.exemptFromIpFilter"] -} - -resource "google_project_iam_member" "primary" { - project = "%s" - role = "projects/%s/roles/${google_project_iam_custom_role.ipfilter_exempt_role.role_id}" - member = "serviceAccount:%s" -} - -resource "google_storage_bucket" "bucket" { - name = "%s" - location = "us-central1" - uniform_bucket_level_access = true - force_destroy = true - ip_filter { - mode = "Enabled" - public_network_source { - allowed_ip_cidr_ranges = ["0.0.0.0/0", "::/0"] - } - vpc_network_sources { - network = google_compute_network.vpc_gcs_ipfilter1.id - allowed_ip_cidr_ranges = ["0.0.0.0/0", "::/0"] - } - allow_all_service_agent_access = true - } -} -`, nwSuffix, nwSuffix, nwSuffix, project, project, serviceAccount, bucketName) -} - -func testAccStorageBucket_IPFilter_update(bucketName string, nwSuffix string, project string, serviceAccount string) string { - return fmt.Sprintf(` -resource "google_compute_network" "vpc_gcs_ipfilter1" { - name = "tf-test-storage-ipfilter1-%s" - auto_create_subnetworks = false -} - -resource "google_compute_subnetwork" "ipfilter_1" { - name = "tf-test-storage-ipfilter1-%s" - ip_cidr_range = "10.201.0.0/16" - region = "us-central1" - network = google_compute_network.vpc_gcs_ipfilter1.id -} - -resource "google_project_iam_custom_role" "ipfilter_exempt_role" { - role_id = "_%s" - title = "IP Filter Exempt Role" - description = "A custom role to bypass IP Filtering on GCS bucket." - permissions = ["storage.buckets.exemptFromIpFilter"] -} - -resource "google_project_iam_member" "primary" { - project = "%s" - role = "projects/%s/roles/${google_project_iam_custom_role.ipfilter_exempt_role.role_id}" - member = "serviceAccount:%s" -} - -resource "google_storage_bucket" "bucket" { - name = "%s" - location = "us-central1" - uniform_bucket_level_access = true - force_destroy = true - ip_filter { - mode = "Enabled" - vpc_network_sources { - network = google_compute_network.vpc_gcs_ipfilter1.id - allowed_ip_cidr_ranges = ["0.0.0.0/0"] - } - allow_cross_org_vpcs = false - allow_all_service_agent_access = false + retention_period = 3600 } } -`, nwSuffix, nwSuffix, nwSuffix, project, project, serviceAccount, bucketName) -} - -func testAccStorageBucket_IPFilter_disable(bucketName string, nwSuffix string, project string, serviceAccount string) string { - return fmt.Sprintf(` -resource "google_compute_network" "vpc_gcs_ipfilter1" { - name = "tf-test-storage-ipfilter1-%s" - auto_create_subnetworks = false -} - -resource "google_compute_subnetwork" "ipfilter_1" { - name = "tf-test-storage-ipfilter1-%s" - ip_cidr_range = "10.201.0.0/16" - region = "us-central1" - network = google_compute_network.vpc_gcs_ipfilter1.id -} - -resource "google_compute_network" "vpc_gcs_ipfilter2" { - name = "tf-test-storage-ipfilter2-%s" - auto_create_subnetworks = false -} - -resource "google_compute_subnetwork" "ipfilter_2" { - name = "tf-test-storage-ipfilter2-%s" - ip_cidr_range = "10.202.0.0/16" - region = "us-central1" - network = google_compute_network.vpc_gcs_ipfilter2.id -} - -resource "google_project_iam_custom_role" "ipfilter_exempt_role" { - role_id = "_%s" - title = "IP Filter Exempt Role" - description = "A custom role to bypass IP Filtering on GCS bucket." - permissions = ["storage.buckets.exemptFromIpFilter"] -} - -resource "google_project_iam_member" "primary" { - project = "%s" - role = "projects/%s/roles/${google_project_iam_custom_role.ipfilter_exempt_role.role_id}" - member = "serviceAccount:%s" -} - -resource "google_storage_bucket" "bucket" { - name = "%s" - location = "us-central1" - uniform_bucket_level_access = true - force_destroy = true - ip_filter { - mode = "Disabled" - public_network_source { - allowed_ip_cidr_ranges = ["192.0.2.0/24", "2001:db8::/32"] - } - vpc_network_sources { - network = google_compute_network.vpc_gcs_ipfilter1.id - allowed_ip_cidr_ranges = ["0.0.0.0/0", "::/0"] - } - vpc_network_sources { - network = google_compute_network.vpc_gcs_ipfilter2.id - allowed_ip_cidr_ranges = ["10.201.0.0/16", "10.202.0.0/16"] - } - } -} -`, nwSuffix, nwSuffix, nwSuffix, nwSuffix, nwSuffix, project, project, serviceAccount, bucketName) -} - -func testAccStorageBucket_withoutIPFilter(bucketName string) string { - return fmt.Sprintf(` -resource "google_storage_bucket" "bucket" { - name = "%s" - location = "us-central1" - uniform_bucket_level_access = true - force_destroy = true -} `, bucketName) } diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_notification.go b/mmv1/third_party/terraform/services/storage/resource_storage_notification.go new file mode 100644 index 000000000000..1bd4d46c84e5 --- /dev/null +++ b/mmv1/third_party/terraform/services/storage/resource_storage_notification.go @@ -0,0 +1,196 @@ +package storage + +import ( + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" + + "github.com/hashicorp/terraform-provider-google/google/services/pubsub" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + "google.golang.org/api/storage/v1" +) + +func ResourceStorageNotification() *schema.Resource { + return &schema.Resource{ + Create: resourceStorageNotificationCreate, + Read: resourceStorageNotificationRead, + Delete: resourceStorageNotificationDelete, + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + + Schema: map[string]*schema.Schema{ + "bucket": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `The name of the bucket.`, + }, + + "payload_format": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringInSlice([]string{"JSON_API_V1", "NONE"}, false), + Description: `The desired content of the Payload. One of "JSON_API_V1" or "NONE".`, + }, + + "topic": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, + Description: `The Cloud Pub/Sub topic to which this subscription publishes. Expects either the topic name, assumed to belong to the default GCP provider project, or the project-level name, i.e. projects/my-gcp-project/topics/my-topic or my-topic. If the project is not set in the provider, you will need to use the project-level name.`, + }, + + "custom_attributes": { + Type: schema.TypeMap, + Optional: true, + ForceNew: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + Description: ` A set of key/value attribute pairs to attach to each Cloud Pub/Sub message published for this notification subscription`, + }, + + "event_types": { + Type: schema.TypeSet, + Optional: true, + ForceNew: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.StringInSlice([]string{ + "OBJECT_FINALIZE", "OBJECT_METADATA_UPDATE", "OBJECT_DELETE", "OBJECT_ARCHIVE"}, + false), + }, + Description: `List of event type filters for this notification config. If not specified, Cloud Storage will send notifications for all event types. The valid types are: "OBJECT_FINALIZE", "OBJECT_METADATA_UPDATE", "OBJECT_DELETE", "OBJECT_ARCHIVE"`, + }, + + "object_name_prefix": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: `Specifies a prefix path filter for this notification config. Cloud Storage will only send notifications for objects in this bucket whose names begin with the specified prefix.`, + }, + + "notification_id": { + Type: schema.TypeString, + Computed: true, + Description: `The ID of the created notification.`, + }, + + "self_link": { + Type: schema.TypeString, + Computed: true, + Description: `The URI of the created resource.`, + }, + }, + UseJSONNumber: true, + } +} + +func resourceStorageNotificationCreate(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + bucket := d.Get("bucket").(string) + + topicName := d.Get("topic").(string) + computedTopicName := pubsub.GetComputedTopicName("", topicName) + if computedTopicName != topicName { + project, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + computedTopicName = pubsub.GetComputedTopicName(project, topicName) + } + + storageNotification := &storage.Notification{ + CustomAttributes: tpgresource.ExpandStringMap(d, "custom_attributes"), + EventTypes: tpgresource.ConvertStringSet(d.Get("event_types").(*schema.Set)), + ObjectNamePrefix: d.Get("object_name_prefix").(string), + PayloadFormat: d.Get("payload_format").(string), + Topic: computedTopicName, + } + + res, err := config.NewStorageClient(userAgent).Notifications.Insert(bucket, storageNotification).Do() + if err != nil { + return fmt.Errorf("Error creating notification config for bucket %s: %v", bucket, err) + } + + d.SetId(fmt.Sprintf("%s/notificationConfigs/%s", bucket, res.Id)) + + return resourceStorageNotificationRead(d, meta) +} + +func resourceStorageNotificationRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + bucket, notificationID := ResourceStorageNotificationParseID(d.Id()) + + res, err := config.NewStorageClient(userAgent).Notifications.Get(bucket, notificationID).Do() + if err != nil { + return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("Notification configuration %s for bucket %s", notificationID, bucket)) + } + + if err := d.Set("bucket", bucket); err != nil { + return fmt.Errorf("Error setting bucket: %s", err) + } + if err := d.Set("payload_format", res.PayloadFormat); err != nil { + return fmt.Errorf("Error setting payload_format: %s", err) + } + if err := d.Set("topic", res.Topic); err != nil { + return fmt.Errorf("Error setting topic: %s", err) + } + if err := d.Set("object_name_prefix", res.ObjectNamePrefix); err != nil { + return fmt.Errorf("Error setting object_name_prefix: %s", err) + } + if err := d.Set("event_types", res.EventTypes); err != nil { + return fmt.Errorf("Error setting event_types: %s", err) + } + if err := d.Set("notification_id", notificationID); err != nil { + return fmt.Errorf("Error setting notification_id: %s", err) + } + if err := d.Set("self_link", res.SelfLink); err != nil { + return fmt.Errorf("Error setting self_link: %s", err) + } + if err := d.Set("custom_attributes", res.CustomAttributes); err != nil { + return fmt.Errorf("Error setting custom_attributes: %s", err) + } + + return nil +} + +func resourceStorageNotificationDelete(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + bucket, notificationID := ResourceStorageNotificationParseID(d.Id()) + + err = config.NewStorageClient(userAgent).Notifications.Delete(bucket, notificationID).Do() + if err != nil { + return fmt.Errorf("Error deleting notification configuration %s for bucket %s: %v", notificationID, bucket, err) + } + + return nil +} + +func ResourceStorageNotificationParseID(id string) (string, string) { + //bucket, NotificationID + parts := strings.Split(id, "/") + + return parts[0], parts[2] +} diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_notification_test.go b/mmv1/third_party/terraform/services/storage/resource_storage_notification_test.go index 1e8ffccc5b90..a3650a829875 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_notification_test.go +++ b/mmv1/third_party/terraform/services/storage/resource_storage_notification_test.go @@ -27,7 +27,7 @@ func TestAccStorageNotification_basic(t *testing.T) { var notification storage.Notification bucketName := acctest.TestBucketName(t) topicName := fmt.Sprintf("tf-pstopic-test-%d", acctest.RandInt(t)) - topic := fmt.Sprintf("projects/%s/topics/%s", os.Getenv("GOOGLE_PROJECT"), topicName) + topic := fmt.Sprintf("//pubsub.googleapis.com/projects/%s/topics/%s", os.Getenv("GOOGLE_PROJECT"), topicName) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -71,7 +71,7 @@ func TestAccStorageNotification_withEventsAndAttributes(t *testing.T) { var notification storage.Notification bucketName := acctest.TestBucketName(t) topicName := fmt.Sprintf("tf-pstopic-test-%d", acctest.RandInt(t)) - topic := fmt.Sprintf("projects/%s/topics/%s", os.Getenv("GOOGLE_PROJECT"), topicName) + topic := fmt.Sprintf("//pubsub.googleapis.com/projects/%s/topics/%s", os.Getenv("GOOGLE_PROJECT"), topicName) eventType1 := "OBJECT_FINALIZE" eventType2 := "OBJECT_ARCHIVE" @@ -115,12 +115,9 @@ func testAccStorageNotificationDestroyProducer(t *testing.T) func(s *terraform.S continue } - bucket, notificationID, err := tpgstorage.ParseStorageNotificationID(rs.Primary.ID) - if err != nil { - return err - } + bucket, notificationID := tpgstorage.ResourceStorageNotificationParseID(rs.Primary.ID) - _, err = config.NewStorageClient(config.UserAgent).Notifications.Get(bucket, notificationID).Do() + _, err := config.NewStorageClient(config.UserAgent).Notifications.Get(bucket, notificationID).Do() if err == nil { return fmt.Errorf("Notification configuration still exists") } @@ -143,10 +140,7 @@ func testAccCheckStorageNotificationExists(t *testing.T, resource string, notifi config := acctest.GoogleProviderConfig(t) - bucket, notificationID, err := tpgstorage.ParseStorageNotificationID(rs.Primary.ID) - if err != nil { - return err - } + bucket, notificationID := tpgstorage.ResourceStorageNotificationParseID(rs.Primary.ID) found, err := config.NewStorageClient(config.UserAgent).Notifications.Get(bucket, notificationID).Do() if err != nil { diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_object_acl_test.go b/mmv1/third_party/terraform/services/storage/resource_storage_object_acl_test.go index 851ea03fba5e..e23f5903bb0b 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_object_acl_test.go +++ b/mmv1/third_party/terraform/services/storage/resource_storage_object_acl_test.go @@ -337,7 +337,7 @@ func TestAccStorageObjectAcl_noOwner(t *testing.T) { t.Errorf("error writing file: %v", err) } - // TODO we can leave this one using the SDK provider as we need to overwrite the configure function, + // TODO (mbang) we can leave this one using the SDK provider as we need to overwrite the configure function, // which we can't do in the plugin-framework version of the provider. When this resource does get updated to // use plugin-framework, best I can guess we'll want to do something similar to NewFrameworkTestProvider where // we have a nested production version of the provider, we re-write configure to call the production version and diff --git a/mmv1/third_party/terraform/services/storagecontrol/data_source_storage_control_folder_intelligence_config.go b/mmv1/third_party/terraform/services/storagecontrol/data_source_storage_control_folder_intelligence_config.go new file mode 100644 index 000000000000..c87ba75ecbbf --- /dev/null +++ b/mmv1/third_party/terraform/services/storagecontrol/data_source_storage_control_folder_intelligence_config.go @@ -0,0 +1,40 @@ +package storagecontrol + +import ( + "fmt" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func DataSourceGoogleStorageControlFolderIntelligenceConfig() *schema.Resource { + + dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceStorageControlFolderIntelligenceConfig().Schema) + tpgresource.AddRequiredFieldsToSchema(dsSchema, "name") + + return &schema.Resource{ + Read: dataSourceGoogleStorageControlFolderIntelligenceConfigRead, + Schema: dsSchema, + } +} + +func dataSourceGoogleStorageControlFolderIntelligenceConfigRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + + id, err := tpgresource.ReplaceVars(d, config, "folders/{{name}}/locations/global/intelligenceConfig") + if err != nil { + return fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + err = resourceStorageControlFolderIntelligenceConfigRead(d, meta) + if err != nil { + return err + } + + if d.Id() == "" { + return fmt.Errorf("%s not found", id) + } + + return nil +} diff --git a/mmv1/third_party/terraform/services/storagecontrol/data_source_storage_control_organization_intelligence_config.go b/mmv1/third_party/terraform/services/storagecontrol/data_source_storage_control_organization_intelligence_config.go new file mode 100644 index 000000000000..9730d488ded1 --- /dev/null +++ b/mmv1/third_party/terraform/services/storagecontrol/data_source_storage_control_organization_intelligence_config.go @@ -0,0 +1,40 @@ +package storagecontrol + +import ( + "fmt" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func DataSourceGoogleStorageControlOrganizationIntelligenceConfig() *schema.Resource { + + dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceStorageControlOrganizationIntelligenceConfig().Schema) + tpgresource.AddRequiredFieldsToSchema(dsSchema, "name") + + return &schema.Resource{ + Read: dataSourceGoogleStorageControlOrganizationIntelligenceConfigRead, + Schema: dsSchema, + } +} + +func dataSourceGoogleStorageControlOrganizationIntelligenceConfigRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + + id, err := tpgresource.ReplaceVars(d, config, "organizations/{{name}}/locations/global/intelligenceConfig") + if err != nil { + return fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + err = resourceStorageControlOrganizationIntelligenceConfigRead(d, meta) + if err != nil { + return err + } + + if d.Id() == "" { + return fmt.Errorf("%s not found", id) + } + + return nil +} diff --git a/mmv1/third_party/terraform/services/storagecontrol/data_source_storage_control_project_intelligence_config.go b/mmv1/third_party/terraform/services/storagecontrol/data_source_storage_control_project_intelligence_config.go new file mode 100644 index 000000000000..f74663d70839 --- /dev/null +++ b/mmv1/third_party/terraform/services/storagecontrol/data_source_storage_control_project_intelligence_config.go @@ -0,0 +1,40 @@ +package storagecontrol + +import ( + "fmt" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func DataSourceGoogleStorageControlProjectIntelligenceConfig() *schema.Resource { + + dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceStorageControlProjectIntelligenceConfig().Schema) + tpgresource.AddRequiredFieldsToSchema(dsSchema, "name") + + return &schema.Resource{ + Read: dataSourceGoogleStorageControlProjectIntelligenceConfigRead, + Schema: dsSchema, + } +} + +func dataSourceGoogleStorageControlProjectIntelligenceConfigRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + + id, err := tpgresource.ReplaceVars(d, config, "projects/{{name}}/locations/global/intelligenceConfig") + if err != nil { + return fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + err = resourceStorageControlProjectIntelligenceConfigRead(d, meta) + if err != nil { + return err + } + + if d.Id() == "" { + return fmt.Errorf("%s not found", id) + } + + return nil +} diff --git a/mmv1/third_party/terraform/services/storagecontrol/data_source_storage_control_project_intelligence_config_test.go b/mmv1/third_party/terraform/services/storagecontrol/data_source_storage_control_project_intelligence_config_test.go index 8766331ab785..a9b65169adc0 100644 --- a/mmv1/third_party/terraform/services/storagecontrol/data_source_storage_control_project_intelligence_config_test.go +++ b/mmv1/third_party/terraform/services/storagecontrol/data_source_storage_control_project_intelligence_config_test.go @@ -13,7 +13,7 @@ func TestAccDataSourceGoogleStorageControlProjectIntelligenceConfig_basic(t *tes context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), - "project": acctest.BootstrapProject(t, "tf-boot-stor-int-", envvar.GetTestBillingAccountFromEnv(t), []string{"storage.googleapis.com"}).ProjectId, + "project": acctest.BootstrapProject(t, "tf-test-stor-int-", envvar.GetTestBillingAccountFromEnv(t), []string{"storage.googleapis.com"}).ProjectId, } acctest.VcrTest(t, resource.TestCase{ diff --git a/mmv1/third_party/terraform/services/storagecontrol/resource_storage_control_project_intelligence_config_test.go b/mmv1/third_party/terraform/services/storagecontrol/resource_storage_control_project_intelligence_config_test.go index 848702b5c08b..b4535d9c40df 100644 --- a/mmv1/third_party/terraform/services/storagecontrol/resource_storage_control_project_intelligence_config_test.go +++ b/mmv1/third_party/terraform/services/storagecontrol/resource_storage_control_project_intelligence_config_test.go @@ -14,7 +14,7 @@ func TestAccStorageControlProjectIntelligenceConfig_update(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "project": acctest.BootstrapProject(t, "tf-boot-stor-int-", envvar.GetTestBillingAccountFromEnv(t), []string{"storage.googleapis.com"}).ProjectId, + "project": acctest.BootstrapProject(t, "tf-test-stor-int-", envvar.GetTestBillingAccountFromEnv(t), []string{"storage.googleapis.com"}).ProjectId, "random_suffix": acctest.RandString(t, 10), } diff --git a/mmv1/third_party/terraform/services/storageinsights/resource_storage_insights_dataset_config_test.go b/mmv1/third_party/terraform/services/storageinsights/resource_storage_insights_dataset_config_test.go deleted file mode 100644 index 48194cdc1d38..000000000000 --- a/mmv1/third_party/terraform/services/storageinsights/resource_storage_insights_dataset_config_test.go +++ /dev/null @@ -1,331 +0,0 @@ -package storageinsights_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/plancheck" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - - "github.com/hashicorp/terraform-provider-google/google/acctest" - - "github.com/hashicorp/terraform-provider-google/google/envvar" -) - -func TestAccStorageInsightsDatasetConfigExample_update_scope(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccStorageInsightsDatasetConfigExample_update_project(context), - }, - { - ResourceName: "google_storage_insights_dataset_config.config", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"dataset_config_id", "location"}, - }, - { - Config: testAccStorageInsightsDatasetConfigExample_update_org(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_storage_insights_dataset_config.config", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_storage_insights_dataset_config.config", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"dataset_config_id", "location"}, - }, - { - Config: testAccStorageInsightsDatasetConfigExample_update_folder(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_storage_insights_dataset_config.config", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_storage_insights_dataset_config.config", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"dataset_config_id", "location"}, - }, - { - Config: testAccStorageInsightsDatasetConfigExample_update_org(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_storage_insights_dataset_config.config", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_storage_insights_dataset_config.config", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"dataset_config_id", "location"}, - }, - { - Config: testAccStorageInsightsDatasetConfigExample_update_project(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_storage_insights_dataset_config.config", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_storage_insights_dataset_config.config", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"dataset_config_id", "location"}, - }, - { - Config: testAccStorageInsightsDatasetConfigExample_update_folder(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_storage_insights_dataset_config.config", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_storage_insights_dataset_config.config", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"dataset_config_id", "location"}, - }, - { - Config: testAccStorageInsightsDatasetConfigExample_update_project(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_storage_insights_dataset_config.config", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_storage_insights_dataset_config.config", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"dataset_config_id", "location"}, - }, - }, - }) -} - -func TestAccStorageInsightsDatasetConfigExample_update_filters(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccStorageInsightsDatasetConfigExample_full_filters(context), - }, - { - ResourceName: "google_storage_insights_dataset_config.config", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"dataset_config_id", "location"}, - }, - { - Config: testAccStorageInsightsDatasetConfigExample_update_filters(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_storage_insights_dataset_config.config", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_storage_insights_dataset_config.config", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"dataset_config_id", "location"}, - }, - }, - }) -} - -func TestAccStorageInsightsDatasetConfigExample_update_link(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - "org_id": envvar.GetTestOrgFromEnv(t), - "project_id": envvar.GetTestProjectFromEnv(), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccStorageInsightsDatasetConfigExample_full_link(context), - }, - { - ResourceName: "google_storage_insights_dataset_config.config", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"dataset_config_id", "location", "link_dataset"}, - }, - { - Config: testAccStorageInsightsDatasetConfigExample_update_unlink(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_storage_insights_dataset_config.config", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_storage_insights_dataset_config.config", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"dataset_config_id", "location", "link_dataset"}, - }, - }, - }) -} - -func testAccStorageInsightsDatasetConfigExample_update_project(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_storage_insights_dataset_config" "config" { - location = "us-central1" - dataset_config_id = "tf_test_my_config%{random_suffix}" - retention_period_days = 1 - source_projects { - project_numbers = ["123", "456"] - } - identity { - type = "IDENTITY_TYPE_PER_CONFIG" - } -} -`, context) -} - -func testAccStorageInsightsDatasetConfigExample_update_folder(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_storage_insights_dataset_config" "config" { - location = "us-central1" - dataset_config_id = "tf_test_my_config%{random_suffix}" - retention_period_days = 1 - source_folders { - folder_numbers = ["123", "456"] - } - identity { - type = "IDENTITY_TYPE_PER_CONFIG" - } -} -`, context) -} - -func testAccStorageInsightsDatasetConfigExample_update_org(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_storage_insights_dataset_config" "config" { - location = "us-central1" - dataset_config_id = "tf_test_my_config%{random_suffix}" - retention_period_days = 1 - organization_scope = true - identity { - type = "IDENTITY_TYPE_PER_CONFIG" - } -} -`, context) -} - -func testAccStorageInsightsDatasetConfigExample_full_link(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_storage_insights_dataset_config" "config" { - location = "us-central1" - dataset_config_id = "tf_test_my_config%{random_suffix}" - retention_period_days = 1 - organization_scope = true - identity { - type = "IDENTITY_TYPE_PER_CONFIG" - } - link_dataset = true - organization_number = "%{org_id}" - project = "%{project_id}" -} -`, context) -} - -func testAccStorageInsightsDatasetConfigExample_update_unlink(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_storage_insights_dataset_config" "config" { - location = "us-central1" - dataset_config_id = "tf_test_my_config%{random_suffix}" - retention_period_days = 1 - organization_scope = true - identity { - type = "IDENTITY_TYPE_PER_CONFIG" - } - link_dataset = false - project = "%{project_id}" -} -`, context) -} - -func testAccStorageInsightsDatasetConfigExample_full_filters(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_storage_insights_dataset_config" "config" { - location = "us-central1" - dataset_config_id = "tf_test_my_config%{random_suffix}" - retention_period_days = 1 - organization_scope = true - identity { - type = "IDENTITY_TYPE_PER_CONFIG" - } - description = "A sample description for dataset" - include_newly_created_buckets = true - include_cloud_storage_locations { - locations = ["us-east1", "europe-west2"] - } - exclude_cloud_storage_buckets { - cloud_storage_buckets { - bucket_name = "gs://sample-bucket1/" - } - cloud_storage_buckets { - bucket_prefix_regex = "gs://sample*/" - } - } -} -`, context) -} - -func testAccStorageInsightsDatasetConfigExample_update_filters(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_storage_insights_dataset_config" "config" { - location = "us-central1" - dataset_config_id = "tf_test_my_config%{random_suffix}" - retention_period_days = 1 - organization_scope = true - identity { - type = "IDENTITY_TYPE_PER_CONFIG" - } - include_newly_created_buckets = false - exclude_cloud_storage_locations { - locations = ["us-east1", "europe-west2"] - } - include_cloud_storage_buckets { - cloud_storage_buckets { - bucket_name = "gs://sample-bucket1/" - } - cloud_storage_buckets { - bucket_prefix_regex = "gs://sample*/" - } - } -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go.tmpl similarity index 90% rename from mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go rename to mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go.tmpl index 41bd024723ce..5ba48321dbdf 100644 --- a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go +++ b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go.tmpl @@ -4,7 +4,6 @@ import ( "fmt" "log" "reflect" - "regexp" "strings" "time" @@ -99,13 +98,13 @@ var ( awsS3AuthKeys = []string{ "transfer_spec.0.aws_s3_data_source.0.aws_access_key", "transfer_spec.0.aws_s3_data_source.0.role_arn", - "transfer_spec.0.aws_s3_data_source.0.credentials_secret", } + {{- if ne $.TargetVersionName "ga" }} azureOptionCredentials = []string{ "transfer_spec.0.azure_blob_storage_data_source.0.azure_credentials", "transfer_spec.0.azure_blob_storage_data_source.0.credentials_secret", - "transfer_spec.0.azure_blob_storage_data_source.0.federated_identity_config", } + {{- end }} ) func ResourceStorageTransferJob() *schema.Resource { @@ -142,16 +141,11 @@ func ResourceStorageTransferJob() *schema.Resource { ForceNew: true, Description: `The project in which the resource belongs. If it is not provided, the provider project is used.`, }, - "service_account": { - Type: schema.TypeString, - Optional: true, - Description: `The user-managed service account to run the job. If this field is specified, the given service account is granted the necessary permissions to all applicable resources (e.g. GCS buckets) required by the job.`, - }, "event_stream": { - Type: schema.TypeList, - Optional: true, - MaxItems: 1, - ConflictsWith: []string{"schedule"}, + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + ConflictsWith: []string{"schedule"}, DiffSuppressFunc: diffSuppressEventStream, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ @@ -180,7 +174,7 @@ func ResourceStorageTransferJob() *schema.Resource { MaxItems: 1, Optional: true, ConflictsWith: []string{"transfer_spec", "schedule"}, - ExactlyOneOf: []string{"transfer_spec", "replication_spec"}, + ExactlyOneOf: []string{"transfer_spec", "replication_spec"}, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "object_conditions": objectConditionsSchema(replicationSpecObjectConditionsKeys), @@ -206,11 +200,11 @@ func ResourceStorageTransferJob() *schema.Resource { Description: `Replication specification.`, }, "transfer_spec": { - Type: schema.TypeList, - Optional: true, - MaxItems: 1, + Type: schema.TypeList, + Optional: true, + MaxItems: 1, ConflictsWith: []string{"replication_spec"}, - ExactlyOneOf: []string{"transfer_spec", "replication_spec"}, + ExactlyOneOf: []string{"transfer_spec", "replication_spec"}, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "object_conditions": objectConditionsSchema(transferSpecObjectConditionsKeys), @@ -334,8 +328,8 @@ func ResourceStorageTransferJob() *schema.Resource { Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "log_actions": { - Type: schema.TypeList, - Optional: true, + Type: schema.TypeList, + Optional: true, AtLeastOneOf: []string{"logging_config.0.enable_on_prem_gcs_transfer_logs", "logging_config.0.log_actions", "logging_config.0.log_action_states"}, Elem: &schema.Schema{ Type: schema.TypeString, @@ -344,8 +338,8 @@ func ResourceStorageTransferJob() *schema.Resource { Description: `Specifies the actions to be logged. Not supported for transfers with PosifxFilesystem data sources; use enable_on_prem_gcs_transfer_logs instead.`, }, "log_action_states": { - Type: schema.TypeList, - Optional: true, + Type: schema.TypeList, + Optional: true, AtLeastOneOf: []string{"logging_config.0.enable_on_prem_gcs_transfer_logs", "logging_config.0.log_actions", "logging_config.0.log_action_states"}, Elem: &schema.Schema{ Type: schema.TypeString, @@ -354,10 +348,10 @@ func ResourceStorageTransferJob() *schema.Resource { Description: `States in which logActions are logged. Not supported for transfers with PosifxFilesystem data sources; use enable_on_prem_gcs_transfer_logs instead.`, }, "enable_on_prem_gcs_transfer_logs": { - Type: schema.TypeBool, - Optional: true, + Type: schema.TypeBool, + Optional: true, AtLeastOneOf: []string{"logging_config.0.enable_on_prem_gcs_transfer_logs", "logging_config.0.log_actions", "logging_config.0.log_action_states"}, - Description: `For transfers with a PosixFilesystem source, this option enables the Cloud Storage transfer logs for this transfer.`, + Description: `For transfers with a PosixFilesystem source, this option enables the Cloud Storage transfer logs for this transfer.`, }, }, }, @@ -700,26 +694,15 @@ func gcsDataSchema() *schema.Resource { Description: `Google Cloud Storage bucket name.`, }, "path": { - Optional: true, - Type: schema.TypeString, - Description: `Google Cloud Storage path in bucket to transfer. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should not begin with a '/'.`, - ValidateFunc: validateGCSDataPath, + Optional: true, + Computed: true, + Type: schema.TypeString, + Description: `Google Cloud Storage path in bucket to transfer`, }, }, } } -func validateGCSDataPath(v interface{}, k string) (ws []string, errors []error) { - value := v.(string) - value = strings.TrimSpace(value) - // checks if path not started with "/" - regex, err := regexp.Compile("^/+") - if err == nil && len(value) > 0 && regex.Match([]byte(value)) { - errors = append(errors, fmt.Errorf("%q cannot start with /", k)) - } - return -} - func awsS3DataSchema() *schema.Resource { return &schema.Resource{ Schema: map[string]*schema.Schema{ @@ -767,17 +750,6 @@ func awsS3DataSchema() *schema.Resource { Optional: true, Description: `Egress bytes over a Google-managed private network. This network is shared between other users of Storage Transfer Service.`, }, - "cloudfront_domain": { - Type: schema.TypeString, - Optional: true, - Description: `The CloudFront distribution domain name pointing to this bucket, to use when fetching. See [Transfer from S3 via CloudFront](https://cloud.google.com/storage-transfer/docs/s3-cloudfront) for more information. Format: https://{id}.cloudfront.net or any valid custom domain. Must begin with https://.`, - }, - "credentials_secret": { - Type: schema.TypeString, - Optional: true, - ExactlyOneOf: awsS3AuthKeys, - Description: `The Resource name of a secret in Secret Manager. AWS credentials must be stored in Secret Manager in JSON format. If credentials_secret is specified, do not specify role_arn or aws_access_key. Format: projects/{projectNumber}/secrets/{secret_name}.`, - }, }, } } @@ -839,8 +811,12 @@ func azureBlobStorageDataSchema() *schema.Resource { }, "azure_credentials": { Type: schema.TypeList, + {{- if ne $.TargetVersionName "ga" }} Optional: true, ExactlyOneOf: azureOptionCredentials, + {{- else }} + Required: true, + {{- end }} MaxItems: 1, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ @@ -854,35 +830,14 @@ func azureBlobStorageDataSchema() *schema.Resource { }, Description: ` Credentials used to authenticate API requests to Azure.`, }, + {{- if ne $.TargetVersionName "ga" }} "credentials_secret": { - Type: schema.TypeString, Optional: true, - ExactlyOneOf: azureOptionCredentials, + Type: schema.TypeString, Description: `The Resource name of a secret in Secret Manager containing SAS Credentials in JSON form. Service Agent must have permissions to access secret. If credentials_secret is specified, do not specify azure_credentials.`, - }, - "federated_identity_config": { - Type: schema.TypeList, - Optional: true, ExactlyOneOf: azureOptionCredentials, - MaxItems: 1, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "client_id": { - Type: schema.TypeString, - Required: true, - Sensitive: true, - Description: `The client (application) ID of the application with federated credentials.`, - }, - "tenant_id": { - Type: schema.TypeString, - Required: true, - Sensitive: true, - Description: `The tenant (directory) ID of the application with federated credentials.`, - }, - }, - }, - Description: ` Workload Identity Details used to authenticate API requests to Azure.`, }, + {{- end }} }, } } @@ -914,7 +869,6 @@ func resourceStorageTransferJobCreate(d *schema.ResourceData, meta interface{}) ReplicationSpec: expandReplicationSpecs(d.Get("replication_spec").([]interface{})), LoggingConfig: expandTransferJobLoggingConfig(d.Get("logging_config").([]interface{})), NotificationConfig: expandTransferJobNotificationConfig(d.Get("notification_config").([]interface{})), - ServiceAccount: d.Get("service_account").(string), } var res *storagetransfer.TransferJob @@ -982,9 +936,6 @@ func resourceStorageTransferJobRead(d *schema.ResourceData, meta interface{}) er if err := d.Set("deletion_time", res.DeletionTime); err != nil { return fmt.Errorf("Error setting deletion_time: %s", err) } - if err := d.Set("service_account", res.ServiceAccount); err != nil { - return fmt.Errorf("Error setting service_account: %s", err) - } err = d.Set("schedule", flattenTransferSchedule(res.Schedule)) if err != nil { @@ -1094,13 +1045,6 @@ func resourceStorageTransferJobUpdate(d *schema.ResourceData, meta interface{}) } } - if d.HasChange("service_account") { - fieldMask = append(fieldMask, "service_account") - if v, ok := d.GetOk("service_account"); ok { - transferJob.ServiceAccount = v.(string) - } - } - if len(fieldMask) == 0 { return nil } @@ -1185,30 +1129,6 @@ func resourceStorageTransferJobStateImporter(d *schema.ResourceData, meta interf return []*schema.ResourceData{d}, nil } -func expandAzureFederatedIdentifyConfig(federatedIdentifyConfig []interface{}) *storagetransfer.FederatedIdentityConfig { - if len(federatedIdentifyConfig) == 0 || federatedIdentifyConfig[0] == nil { - return nil - } - - federatedIdentifyCfg := federatedIdentifyConfig[0].(map[string]interface{}) - return &storagetransfer.FederatedIdentityConfig{ - ClientId: federatedIdentifyCfg["client_id"].(string), - TenantId: federatedIdentifyCfg["tenant_id"].(string), - } -} - -func flattenAzureFederatedIdentifyConfig(d *schema.ResourceData) []map[string]interface{} { - if (d.Get("transfer_spec.0.azure_blob_storage_data_source.0.federated_identity_config.0.client_id") == "") || (d.Get("transfer_spec.0.azure_blob_storage_data_source.0.federated_identity_config.0.tenant_id") == "") { - return []map[string]interface{}{} - } - - data := map[string]interface{}{ - "client_id": d.Get("transfer_spec.0.azure_blob_storage_data_source.0.federated_identity_config.0.client_id"), - "tenant_id": d.Get("transfer_spec.0.azure_blob_storage_data_source.0.federated_identity_config.0.tenant_id"), - } - return []map[string]interface{}{data} -} - func expandDates(dates []interface{}) *storagetransfer.Date { if len(dates) == 0 || dates[0] == nil { return nil @@ -1401,21 +1321,16 @@ func expandAwsS3Data(awsS3Datas []interface{}) *storagetransfer.AwsS3Data { awsS3Data := awsS3Datas[0].(map[string]interface{}) result := &storagetransfer.AwsS3Data{ - BucketName: awsS3Data["bucket_name"].(string), - AwsAccessKey: expandAwsAccessKeys(awsS3Data["aws_access_key"].([]interface{})), - RoleArn: awsS3Data["role_arn"].(string), - CredentialsSecret: awsS3Data["credentials_secret"].(string), - Path: awsS3Data["path"].(string), + BucketName: awsS3Data["bucket_name"].(string), + AwsAccessKey: expandAwsAccessKeys(awsS3Data["aws_access_key"].([]interface{})), + RoleArn: awsS3Data["role_arn"].(string), + Path: awsS3Data["path"].(string), } if v, ok := awsS3Data["managed_private_network"]; ok { result.ManagedPrivateNetwork = v.(bool) } - if v, ok := awsS3Data["cloudfront_domain"]; ok { - result.CloudfrontDomain = v.(string) - } - return result } @@ -1425,7 +1340,7 @@ func flattenAwsS3Data(awsS3Data *storagetransfer.AwsS3Data, d *schema.ResourceDa "path": awsS3Data.Path, "role_arn": awsS3Data.RoleArn, } - if _, exist := d.GetOk("transfer_spec.0.aws_s3_data_source.0.aws_access_key"); exist { + if _, exist := d.GetOk("transfer_spec.0.aws_s3_data_source.0.aws_access_key"); exist{ data["aws_access_key"] = flattenAwsAccessKeys(d) } @@ -1433,14 +1348,6 @@ func flattenAwsS3Data(awsS3Data *storagetransfer.AwsS3Data, d *schema.ResourceDa data["managed_private_network"] = awsS3Data.ManagedPrivateNetwork } - if awsS3Data.CloudfrontDomain != "" { - data["cloudfront_domain"] = awsS3Data.CloudfrontDomain - } - - if awsS3Data.CredentialsSecret != "" { - data["credentials_secret"] = awsS3Data.CredentialsSecret - } - return []map[string]interface{}{data} } @@ -1513,10 +1420,11 @@ func expandAzureCredentials(azureCredentials []interface{}) *storagetransfer.Azu } func flattenAzureCredentials(d *schema.ResourceData) []map[string]interface{} { + {{- if ne $.TargetVersionName "ga" }} if d.Get("transfer_spec.0.azure_blob_storage_data_source.0.azure_credentials.0.sas_token") == "" { return []map[string]interface{}{} } - + {{- end }} data := map[string]interface{}{ "sas_token": d.Get("transfer_spec.0.azure_blob_storage_data_source.0.azure_credentials.0.sas_token"), } @@ -1532,23 +1440,25 @@ func expandAzureBlobStorageData(azureBlobStorageDatas []interface{}) *storagetra azureBlobStorageData := azureBlobStorageDatas[0].(map[string]interface{}) return &storagetransfer.AzureBlobStorageData{ - Container: azureBlobStorageData["container"].(string), - Path: azureBlobStorageData["path"].(string), - StorageAccount: azureBlobStorageData["storage_account"].(string), - AzureCredentials: expandAzureCredentials(azureBlobStorageData["azure_credentials"].([]interface{})), - CredentialsSecret: azureBlobStorageData["credentials_secret"].(string), - FederatedIdentityConfig: expandAzureFederatedIdentifyConfig(azureBlobStorageData["federated_identity_config"].([]interface{})), + Container: azureBlobStorageData["container"].(string), + Path: azureBlobStorageData["path"].(string), + StorageAccount: azureBlobStorageData["storage_account"].(string), + AzureCredentials: expandAzureCredentials(azureBlobStorageData["azure_credentials"].([]interface{})), + {{- if ne $.TargetVersionName "ga" }} + CredentialsSecret: azureBlobStorageData["credentials_secret"].(string), + {{- end }} } } func flattenAzureBlobStorageData(azureBlobStorageData *storagetransfer.AzureBlobStorageData, d *schema.ResourceData) []map[string]interface{} { data := map[string]interface{}{ - "container": azureBlobStorageData.Container, - "path": azureBlobStorageData.Path, - "storage_account": azureBlobStorageData.StorageAccount, - "azure_credentials": flattenAzureCredentials(d), - "federated_identity_config": flattenAzureFederatedIdentifyConfig(d), - "credentials_secret": azureBlobStorageData.CredentialsSecret, + "container": azureBlobStorageData.Container, + "path": azureBlobStorageData.Path, + "storage_account": azureBlobStorageData.StorageAccount, + "azure_credentials": flattenAzureCredentials(d), + {{- if ne $.TargetVersionName "ga" }} + "credentials_secret": azureBlobStorageData.CredentialsSecret, + {{- end }} } return []map[string]interface{}{data} diff --git a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_meta.yaml b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_meta.yaml.tmpl similarity index 92% rename from mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_meta.yaml rename to mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_meta.yaml.tmpl index 8d14a6504710..900666301705 100644 --- a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_meta.yaml +++ b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_meta.yaml.tmpl @@ -45,16 +45,14 @@ fields: - field: 'transfer_spec.aws_s3_data_source.aws_access_key.access_key_id' - field: 'transfer_spec.aws_s3_data_source.aws_access_key.secret_access_key' - field: 'transfer_spec.aws_s3_data_source.bucket_name' - - field: 'transfer_spec.aws_s3_data_source.cloudfront_domain' - field: 'transfer_spec.aws_s3_data_source.managed_private_network' - field: 'transfer_spec.aws_s3_data_source.path' - field: 'transfer_spec.aws_s3_data_source.role_arn' - - field: 'transfer_spec.aws_s3_data_source.credentials_secret' - field: 'transfer_spec.azure_blob_storage_data_source.azure_credentials.sas_token' - field: 'transfer_spec.azure_blob_storage_data_source.container' +{{- if ne $.TargetVersionName "ga" }} - field: 'transfer_spec.azure_blob_storage_data_source.credentials_secret' - - field: 'transfer_spec.azure_blob_storage_data_source.federated_identity_config.client_id' - - field: 'transfer_spec.azure_blob_storage_data_source.federated_identity_config.tenant_id' +{{- end }} - field: 'transfer_spec.azure_blob_storage_data_source.path' - field: 'transfer_spec.azure_blob_storage_data_source.storage_account' - field: 'transfer_spec.gcs_data_sink.bucket_name' diff --git a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_test.go b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_test.go index 3dc2058b0e31..458bd223d620 100644 --- a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_test.go +++ b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_test.go @@ -573,83 +573,6 @@ func TestAccStorageTransferJob_hdfsSource(t *testing.T) { }) } -func TestAccStorageTransferJob_withServiceAccount(t *testing.T) { - t.Parallel() - - testTransferJobDescription := acctest.RandString(t, 10) - testSourceBucketName := fmt.Sprintf("tf-acc-source-%s", acctest.RandString(t, 10)) - testSinkBucketName := fmt.Sprintf("tf-acc-sink-%s", acctest.RandString(t, 10)) - testServiceAccountId := fmt.Sprintf("tf-acc-sa1-%s", acctest.RandString(t, 10)) - testUpdatedServiceAccountId := fmt.Sprintf("tf-acc-sa2-%s", acctest.RandString(t, 10)) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccStorageTransferJobDestroyProducer(t), - ExternalProviders: map[string]resource.ExternalProvider{ - "time": {}, - }, - Steps: []resource.TestStep{ - { - Config: testAccStorageTransferJob_withServiceAccount(testTransferJobDescription, testSourceBucketName, testSinkBucketName, testServiceAccountId, envvar.GetTestProjectFromEnv()), - Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttr("google_storage_transfer_job.with_sa", "service_account", fmt.Sprintf("%s@%s.iam.gserviceaccount.com", testServiceAccountId, envvar.GetTestProjectFromEnv())), - ), - }, - { - ResourceName: "google_storage_transfer_job.with_sa", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccStorageTransferJob_withServiceAccount_updated(testTransferJobDescription, testSourceBucketName, testSinkBucketName, testServiceAccountId, testUpdatedServiceAccountId, envvar.GetTestProjectFromEnv()), - Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttr("google_storage_transfer_job.with_sa", "service_account", fmt.Sprintf("%s@%s.iam.gserviceaccount.com", testUpdatedServiceAccountId, envvar.GetTestProjectFromEnv())), - ), - }, - { - ResourceName: "google_storage_transfer_job.with_sa", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccStorageTransferJob_withServiceAccount_removed(testTransferJobDescription, testSourceBucketName, testSinkBucketName, envvar.GetTestProjectFromEnv()), - }, - { - ResourceName: "google_storage_transfer_job.with_sa", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func TestAccStorageTransferJob_transferUpdateToEmptyString(t *testing.T) { - t.Parallel() - - testDataSourceBucketName := acctest.RandString(t, 10) - testDataSinkName := acctest.RandString(t, 10) - testTransferJobDescription := acctest.RandString(t, 10) - testTransferJobName := fmt.Sprintf("tf-test-transfer-job-%s", acctest.RandString(t, 10)) - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccStorageTransferJobDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccStorageTransferJob_transferJobGcsPath(envvar.GetTestProjectFromEnv(), testDataSourceBucketName, testDataSinkName, testTransferJobDescription, testTransferJobName, "bar/"), - }, - { - Config: testAccStorageTransferJob_transferJobGcsPath(envvar.GetTestProjectFromEnv(), testDataSourceBucketName, testDataSinkName, testTransferJobDescription, testTransferJobName, ""), - }, - { - Config: testAccStorageTransferJob_transferJobGcsPath(envvar.GetTestProjectFromEnv(), testDataSourceBucketName, testDataSinkName, testTransferJobDescription, testTransferJobName, "bar/"), - }, - }, - }) -} - func testAccStorageTransferJobDestroyProducer(t *testing.T) func(s *terraform.State) error { return func(s *terraform.State) error { config := acctest.GoogleProviderConfig(t) @@ -2476,335 +2399,3 @@ resource "google_storage_transfer_job" "transfer_job" { } `, project, dataSourceBucketName, project, dataSinkBucketName, project, transferJobDescription, project) } - -func testAccStorageTransferJob_transferJobGcsPath(project string, dataSourceBucketName string, dataSinkBucketName string, transferJobDescription string, testTransferJobName string, gcsPath string) string { - return fmt.Sprintf(` - data "google_storage_transfer_project_service_account" "default" { - project = "%s" - } - - resource "google_storage_bucket" "data_source" { - name = "%s" - project = "%s" - location = "US" - force_destroy = true - uniform_bucket_level_access = true - } - - resource "google_storage_bucket_iam_member" "data_source" { - bucket = google_storage_bucket.data_source.name - role = "roles/storage.admin" - member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" - } - - resource "google_storage_bucket" "data_sink" { - name = "%s" - project = "%s" - location = "US" - force_destroy = true - uniform_bucket_level_access = true - } - - resource "google_storage_bucket_iam_member" "data_sink" { - bucket = google_storage_bucket.data_sink.name - role = "roles/storage.admin" - member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" - } - - resource "google_storage_transfer_job" "transfer_job" { - name = "transferJobs/%s" - description = "%s" - project = "%s" - - transfer_spec { - gcs_data_source { - bucket_name = google_storage_bucket.data_source.name - path = "foo/" - } - gcs_data_sink { - bucket_name = google_storage_bucket.data_sink.name - path = "%s" - } - } - - schedule { - schedule_start_date { - year = 2018 - month = 10 - day = 1 - } - schedule_end_date { - year = 2019 - month = 10 - day = 1 - } - start_time_of_day { - hours = 0 - minutes = 30 - seconds = 0 - nanos = 0 - } - repeat_interval = "604800s" - } - - depends_on = [ - google_storage_bucket_iam_member.data_source, - google_storage_bucket_iam_member.data_sink, - ] - } - `, project, dataSourceBucketName, project, dataSinkBucketName, project, testTransferJobName, transferJobDescription, project, gcsPath) -} - -func testAccStorageTransferJob_withServiceAccount(description, dataSourceBucketName, dataSinkBucketName, serviceAccountId, project string) string { - return fmt.Sprintf(` -resource "google_service_account" "test_account" { - project = "%s" - account_id = "%s" - display_name = "Test Service Account" -} - -resource "google_storage_bucket" "source" { - project = "%s" - name = "%s" - location = "US" - force_destroy = true -} - -resource "google_storage_bucket" "sink" { - project = "%s" - name = "%s" - location = "US" - force_destroy = true -} - -resource "google_storage_bucket_iam_member" "source_iam" { - bucket = google_storage_bucket.source.name - role = "roles/storage.admin" - member = "serviceAccount:${google_service_account.test_account.email}" -} - -resource "google_storage_bucket_iam_member" "sink_iam" { - bucket = google_storage_bucket.sink.name - role = "roles/storage.admin" - member = "serviceAccount:${google_service_account.test_account.email}" -} - -data "google_storage_transfer_project_service_account" "transfer_sa" { -} - -resource "google_service_account_iam_member" "token_creator" { - service_account_id = google_service_account.test_account.name - role = "roles/iam.serviceAccountTokenCreator" - member = "serviceAccount:${data.google_storage_transfer_project_service_account.transfer_sa.email}" -} - -resource "time_sleep" "wait_120_seconds" { - depends_on = [ - google_service_account_iam_member.token_creator, - google_storage_bucket_iam_member.source_iam, - google_storage_bucket_iam_member.sink_iam, - ] - create_duration = "120s" -} - -resource "google_storage_transfer_job" "with_sa" { - description = "%s" - project = "%s" - service_account = google_service_account.test_account.email - - transfer_spec { - gcs_data_source { - bucket_name = google_storage_bucket.source.name - } - gcs_data_sink { - bucket_name = google_storage_bucket.sink.name - } - } - - schedule { - schedule_start_date { - year = 2023 - month = 1 - day = 15 - } - schedule_end_date { - year = 2023 - month = 1 - day = 15 - } - } - - depends_on = [ - time_sleep.wait_120_seconds, - ] -} -`, project, serviceAccountId, project, dataSourceBucketName, project, dataSinkBucketName, description, project) -} - -func testAccStorageTransferJob_withServiceAccount_updated(description, dataSourceBucketName, dataSinkBucketName, serviceAccountId, updatedServiceAccountId, project string) string { - return fmt.Sprintf(` -resource "google_service_account" "test_account" { - project = "%s" - account_id = "%s" - display_name = "Test Service Account" -} - -resource "google_service_account" "test_account_2" { - project = "%s" - account_id = "%s" - display_name = "Test Service Account 2" -} - -resource "google_storage_bucket" "source" { - project = "%s" - name = "%s" - location = "US" - force_destroy = true -} - -resource "google_storage_bucket" "sink" { - project = "%s" - name = "%s" - location = "US" - force_destroy = true -} - -resource "google_storage_bucket_iam_member" "source_iam" { - bucket = google_storage_bucket.source.name - role = "roles/storage.admin" - member = "serviceAccount:${google_service_account.test_account_2.email}" -} - -resource "google_storage_bucket_iam_member" "sink_iam" { - bucket = google_storage_bucket.sink.name - role = "roles/storage.admin" - member = "serviceAccount:${google_service_account.test_account_2.email}" -} - -data "google_storage_transfer_project_service_account" "transfer_sa" { -} - -resource "google_service_account_iam_member" "token_creator" { - service_account_id = google_service_account.test_account_2.name - role = "roles/iam.serviceAccountTokenCreator" - member = "serviceAccount:${data.google_storage_transfer_project_service_account.transfer_sa.email}" -} - -resource "time_sleep" "wait_120_seconds_2" { - depends_on = [ - google_service_account_iam_member.token_creator, - google_storage_bucket_iam_member.source_iam, - google_storage_bucket_iam_member.sink_iam, - ] - create_duration = "120s" -} - -resource "google_storage_transfer_job" "with_sa" { - description = "%s" - project = "%s" - service_account = google_service_account.test_account_2.email - - transfer_spec { - gcs_data_source { - bucket_name = google_storage_bucket.source.name - } - gcs_data_sink { - bucket_name = google_storage_bucket.sink.name - } - } - - schedule { - schedule_start_date { - year = 2023 - month = 1 - day = 15 - } - schedule_end_date { - year = 2023 - month = 1 - day = 15 - } - } - - depends_on = [ - time_sleep.wait_120_seconds_2, - ] -} -`, project, serviceAccountId, project, updatedServiceAccountId, project, dataSourceBucketName, project, dataSinkBucketName, description, project) -} - -func testAccStorageTransferJob_withServiceAccount_removed(description, dataSourceBucketName, dataSinkBucketName, project string) string { - return fmt.Sprintf(` - -resource "google_storage_bucket" "source" { - project = "%s" - name = "%s" - location = "US" - force_destroy = true -} - -resource "google_storage_bucket" "sink" { - project = "%s" - name = "%s" - location = "US" - force_destroy = true -} - - -data "google_storage_transfer_project_service_account" "default" { - project = "%s" -} - -resource "google_storage_bucket_iam_member" "source_iam" { - bucket = google_storage_bucket.source.name - role = "roles/storage.admin" - member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" -} - -resource "google_storage_bucket_iam_member" "sink_iam" { - bucket = google_storage_bucket.sink.name - role = "roles/storage.admin" - member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" -} - -resource "time_sleep" "wait_120_seconds_3" { - depends_on = [ - google_storage_bucket_iam_member.source_iam, - google_storage_bucket_iam_member.sink_iam, - ] - create_duration = "120s" -} - -resource "google_storage_transfer_job" "with_sa" { - description = "%s" - project = "%s" - - transfer_spec { - gcs_data_source { - bucket_name = google_storage_bucket.source.name - } - gcs_data_sink { - bucket_name = google_storage_bucket.sink.name - } - } - - schedule { - schedule_start_date { - year = 2023 - month = 1 - day = 15 - } - schedule_end_date { - year = 2023 - month = 1 - day = 15 - } - } - - depends_on = [ - time_sleep.wait_120_seconds_3, - ] - -} -`, project, dataSourceBucketName, project, dataSinkBucketName, project, description, project) -} diff --git a/mmv1/third_party/terraform/services/tags/resource_tags_location_tag_binding.go.tmpl b/mmv1/third_party/terraform/services/tags/resource_tags_location_tag_binding.go.tmpl index 88e4c2f5898a..c729994f45b5 100644 --- a/mmv1/third_party/terraform/services/tags/resource_tags_location_tag_binding.go.tmpl +++ b/mmv1/third_party/terraform/services/tags/resource_tags_location_tag_binding.go.tmpl @@ -35,7 +35,6 @@ func ResourceTagsLocationTagBinding() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - DiffSuppressFunc: tpgresource.ProjectNumberDiffSuppress, Description: `The full resource name of the resource the TagValue is bound to. E.g. //cloudresourcemanager.googleapis.com/projects/123`, }, "tag_value": { diff --git a/mmv1/third_party/terraform/services/tags/resource_tags_test.go b/mmv1/third_party/terraform/services/tags/resource_tags_test.go index 3501e8f3a342..a81bb3a1fa96 100644 --- a/mmv1/third_party/terraform/services/tags/resource_tags_test.go +++ b/mmv1/third_party/terraform/services/tags/resource_tags_test.go @@ -873,76 +873,6 @@ resource "google_tags_location_tag_binding" "binding" { `, context) } -func TestAccTagsLocationTagBinding_locationTagBindingBasicWithProjectId(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - ExternalProviders: map[string]resource.ExternalProvider{ - "random": {}, - }, - CheckDestroy: testAccCheckTagsLocationTagBindingDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccTagsLocationTagBinding_locationTagBindingBasicExampleWithProjectId(context), - }, - { - ResourceName: "google_tags_location_tag_binding.binding", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func testAccTagsLocationTagBinding_locationTagBindingBasicExampleWithProjectId(context map[string]interface{}) string { - return acctest.Nprintf(` -data "google_project" "project" { -} - -resource "google_tags_tag_key" "key" { - parent = "organizations/${data.google_project.project.org_id}" - short_name = "keyname%{random_suffix}" - description = "For a certain set of resources." -} - -resource "google_tags_tag_value" "value" { - parent = google_tags_tag_key.key.id - short_name = "foo%{random_suffix}" - description = "For foo%{random_suffix} resources." -} - -resource "google_cloud_run_service" "default" { - name = "tf-test-cloudrun-srv%{random_suffix}" - location = "us-central1" - - template { - spec { - containers { - image = "us-docker.pkg.dev/cloudrun/container/hello" - } - } - } - - traffic { - percent = 100 - latest_revision = true - } -} - -resource "google_tags_location_tag_binding" "binding" { - parent = "//run.googleapis.com/projects/${data.google_project.project.project_id}/locations/${google_cloud_run_service.default.location}/services/${google_cloud_run_service.default.name}" - tag_value = google_tags_tag_value.value.id - location = "us-central1" -} -`, context) -} - func testAccCheckTagsLocationTagBindingDestroyProducer(t *testing.T) func(s *terraform.State) error { return func(s *terraform.State) error { for name, rs := range s.RootModule().Resources { diff --git a/mmv1/third_party/terraform/services/tpu/data_source_tpu_tensorflow_versions.go b/mmv1/third_party/terraform/services/tpu/data_source_tpu_tensorflow_versions.go new file mode 100644 index 000000000000..f17ddc36d101 --- /dev/null +++ b/mmv1/third_party/terraform/services/tpu/data_source_tpu_tensorflow_versions.go @@ -0,0 +1,93 @@ +package tpu + +import ( + "fmt" + "log" + "sort" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func DataSourceTpuTensorflowVersions() *schema.Resource { + return &schema.Resource{ + Read: dataSourceTpuTensorFlowVersionsRead, + Schema: map[string]*schema.Schema{ + "project": { + Type: schema.TypeString, + Optional: true, + Computed: true, + }, + "zone": { + Type: schema.TypeString, + Optional: true, + Computed: true, + }, + "versions": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + }, + } +} + +func dataSourceTpuTensorFlowVersionsRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + + zone, err := tpgresource.GetZone(d, config) + if err != nil { + return err + } + + url, err := tpgresource.ReplaceVars(d, config, "{{TPUBasePath}}projects/{{project}}/locations/{{zone}}/tensorflowVersions") + if err != nil { + return err + } + + versionsRaw, err := tpgresource.PaginatedListRequest(project, url, userAgent, config, flattenTpuTensorflowVersions) + if err != nil { + return fmt.Errorf("Error listing TPU Tensorflow versions: %s", err) + } + + versions := make([]string, len(versionsRaw)) + for i, ver := range versionsRaw { + versions[i] = ver.(string) + } + sort.Strings(versions) + + log.Printf("[DEBUG] Received Google TPU Tensorflow Versions: %q", versions) + + if err := d.Set("versions", versions); err != nil { + return fmt.Errorf("Error setting versions: %s", err) + } + if err := d.Set("zone", zone); err != nil { + return fmt.Errorf("Error setting zone: %s", err) + } + if err := d.Set("project", project); err != nil { + return fmt.Errorf("Error setting project: %s", err) + } + d.SetId(fmt.Sprintf("projects/%s/zones/%s", project, zone)) + + return nil +} + +func flattenTpuTensorflowVersions(resp map[string]interface{}) []interface{} { + verObjList := resp["tensorflowVersions"].([]interface{}) + versions := make([]interface{}, len(verObjList)) + for i, v := range verObjList { + verObj := v.(map[string]interface{}) + versions[i] = verObj["version"] + } + return versions +} diff --git a/mmv1/third_party/terraform/services/tpu/data_source_tpu_tensorflow_versions_test.go b/mmv1/third_party/terraform/services/tpu/data_source_tpu_tensorflow_versions_test.go new file mode 100644 index 000000000000..78661db237e8 --- /dev/null +++ b/mmv1/third_party/terraform/services/tpu/data_source_tpu_tensorflow_versions_test.go @@ -0,0 +1,68 @@ +package tpu_test + +import ( + "errors" + "fmt" + "strconv" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccTPUTensorflowVersions_basic(t *testing.T) { + t.Parallel() + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccTpuTensorFlowVersionsConfig, + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleTpuTensorflowVersions("data.google_tpu_tensorflow_versions.available"), + ), + }, + }, + }) +} + +func testAccCheckGoogleTpuTensorflowVersions(n string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Can't find TPU Tensorflow versions data source: %s", n) + } + + if rs.Primary.ID == "" { + return errors.New("data source id not set") + } + + count, ok := rs.Primary.Attributes["versions.#"] + if !ok { + return errors.New("can't find 'versions' attribute") + } + + cnt, err := strconv.Atoi(count) + if err != nil { + return errors.New("failed to read number of version") + } + if cnt < 2 { + return fmt.Errorf("expected at least 2 versions, received %d, this is most likely a bug", cnt) + } + + for i := 0; i < cnt; i++ { + idx := fmt.Sprintf("versions.%d", i) + _, ok := rs.Primary.Attributes[idx] + if !ok { + return fmt.Errorf("expected %q, version not found", idx) + } + } + return nil + } +} + +var testAccTpuTensorFlowVersionsConfig = ` +data "google_tpu_tensorflow_versions" "available" {} +` diff --git a/mmv1/third_party/terraform/services/tpu/resource_tpu_node_test.go b/mmv1/third_party/terraform/services/tpu/resource_tpu_node_test.go new file mode 100644 index 000000000000..1b7d5caff451 --- /dev/null +++ b/mmv1/third_party/terraform/services/tpu/resource_tpu_node_test.go @@ -0,0 +1,58 @@ +package tpu_test + +import ( + "testing" + + "fmt" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" +) + +func TestAccTPUNode_tpuNodeBUpdateTensorFlowVersion(t *testing.T) { + t.Parallel() + + nodeId := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckTPUNodeDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccTpuNode_tpuNodeTensorFlow(nodeId, 0), + }, + { + ResourceName: "google_tpu_node.tpu", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"zone"}, + }, + { + Config: testAccTpuNode_tpuNodeTensorFlow(nodeId, 1), + }, + { + ResourceName: "google_tpu_node.tpu", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"zone"}, + }, + }, + }) +} + +func testAccTpuNode_tpuNodeTensorFlow(nodeId string, versionIdx int) string { + return fmt.Sprintf(` +data "google_tpu_tensorflow_versions" "available" { +} + +resource "google_tpu_node" "tpu" { + name = "%s" + zone = "us-central1-b" + + accelerator_type = "v3-8" + tensorflow_version = data.google_tpu_tensorflow_versions.available.versions[%d] +} +`, nodeId, versionIdx) +} diff --git a/mmv1/third_party/terraform/services/vertexai/resource_vertex_ai_deploy_test.go b/mmv1/third_party/terraform/services/vertexai/resource_vertex_ai_deploy_test.go deleted file mode 100644 index df07ab5c7370..000000000000 --- a/mmv1/third_party/terraform/services/vertexai/resource_vertex_ai_deploy_test.go +++ /dev/null @@ -1,229 +0,0 @@ -package vertexai_test - -import ( - "fmt" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/terraform" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" - "strings" - "testing" -) - -func TestAccVertexAIEndpointWithModelGardenDeployment_basic(t *testing.T) { - t.Parallel() - context := map[string]interface{}{"random_suffix": acctest.RandString(t, 10)} - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckVertexAIEndpointWithModelGardenDeploymentDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccVertexAIEndpointWithModelGardenDeployment_basic(context), - }, - }, - }) -} - -func testAccVertexAIEndpointWithModelGardenDeployment_basic(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_vertex_ai_endpoint_with_model_garden_deployment" "test" { - publisher_model_name = "publishers/google/models/paligemma@paligemma-224-float32" - location = "us-central1" - model_config { - accept_eula = true - } -} -`, context) -} - -func TestAccVertexAIEndpointWithModelGardenDeployment_withConfigs(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckVertexAIEndpointWithModelGardenDeploymentDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccVertexAIEndpointWithModelGardenDeployment_withConfigs(context), - }, - }, - }) -} - -func testAccVertexAIEndpointWithModelGardenDeployment_withConfigs(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_vertex_ai_endpoint_with_model_garden_deployment" "test_with_configs" { - publisher_model_name = "publishers/google/models/paligemma@paligemma-224-float32" - location = "us-central1" - model_config { - accept_eula = true - } - deploy_config { - dedicated_resources { - machine_spec { - machine_type = "g2-standard-16" - accelerator_type = "NVIDIA_L4" - accelerator_count = 1 - } - min_replica_count = 1 - } - } -} -`, context) -} - -func TestAccVertexAIEndpointWithModelGardenDeployment_huggingfaceModel(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckVertexAIEndpointWithModelGardenDeploymentDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccVertexAIEndpointWithModelGardenDeployment_huggingfaceModel(context), - }, - }, - }) -} - -func testAccVertexAIEndpointWithModelGardenDeployment_huggingfaceModel(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_vertex_ai_endpoint_with_model_garden_deployment" "deploy" { - hugging_face_model_id = "Qwen/Qwen3-0.6B" - location = "us-central1" - model_config { - accept_eula = true - } -} -`, context) -} - -func TestAccVertexAIEndpointWithModelGardenDeployment_multipleModelsInSequence(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckVertexAIEndpointWithModelGardenDeploymentDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccVertexAIEndpointWithModelGardenDeployment_multipleModelsInSequence(context), - }, - }, - }) -} - -func testAccVertexAIEndpointWithModelGardenDeployment_multipleModelsInSequence(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_vertex_ai_endpoint_with_model_garden_deployment" "deploy-gemma-1_1-2b-it" { - publisher_model_name = "publishers/google/models/gemma@gemma-1.1-2b-it" - location = "us-central1" - model_config { - accept_eula = true - } - deploy_config { - dedicated_resources { - machine_spec { - machine_type = "g2-standard-12" - accelerator_type = "NVIDIA_L4" - accelerator_count = 1 - } - min_replica_count = 1 - } - } -} - -resource "google_vertex_ai_endpoint_with_model_garden_deployment" "deploy-qwen3-0_6b" { - hugging_face_model_id = "Qwen/Qwen3-0.6B" - location = "us-central1" - model_config { - accept_eula = true - } - deploy_config { - dedicated_resources { - machine_spec { - machine_type = "g2-standard-12" - accelerator_type = "NVIDIA_L4" - accelerator_count = 1 - } - min_replica_count = 1 - } - } - depends_on = [ google_vertex_ai_endpoint_with_model_garden_deployment.deploy-gemma-1_1-2b-it ] -} - -resource "google_vertex_ai_endpoint_with_model_garden_deployment" "deploy-llama-3_2-1b" { - publisher_model_name = "publishers/meta/models/llama3-2@llama-3.2-1b" - location = "us-central1" - model_config { - accept_eula = true - } - deploy_config { - dedicated_resources { - machine_spec { - machine_type = "g2-standard-12" - accelerator_type = "NVIDIA_L4" - accelerator_count = 1 - } - min_replica_count = 1 - } - } - depends_on = [ google_vertex_ai_endpoint_with_model_garden_deployment.deploy-qwen3-0_6b ] -} -`, context) -} - -func testAccCheckVertexAIEndpointWithModelGardenDeploymentDestroyProducer(t *testing.T) func(s *terraform.State) error { - return func(s *terraform.State) error { - for name, rs := range s.RootModule().Resources { - if rs.Type != "google_vertex_ai_endpoint_with_model_garden_deployment" { - continue - } - if strings.HasPrefix(name, "data.") { - continue - } - - config := acctest.GoogleProviderConfig(t) - - url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{VertexAIBasePath}}projects/{{project}}/locations/{{location}}/endpoints/{{endpoint}}") - if err != nil { - return err - } - - billingProject := "" - - if config.BillingProject != "" { - billingProject = config.BillingProject - } - - _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - Project: billingProject, - RawURL: url, - UserAgent: config.UserAgent, - }) - if err == nil { - return fmt.Errorf("VertexAIEndpointWithModelGardenDeployment still exists at %s", url) - } - } - - return nil - } -} diff --git a/mmv1/third_party/terraform/services/vertexai/resource_vertex_ai_rag_engine_config_test.go b/mmv1/third_party/terraform/services/vertexai/resource_vertex_ai_rag_engine_config_test.go deleted file mode 100644 index 20f5f798ddc9..000000000000 --- a/mmv1/third_party/terraform/services/vertexai/resource_vertex_ai_rag_engine_config_test.go +++ /dev/null @@ -1,51 +0,0 @@ -package vertexai_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" -) - -func TestAccVertexAIRagEngineConfig_basic(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "project": envvar.GetTestProjectFromEnv(), - } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccVertexAIRagEngineConfig_basic(context), - }, - { - Config: testAccVertexAIRagEngineConfig_unprovisioned(context), - }, - }, - }) -} - -func testAccVertexAIRagEngineConfig_basic(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_vertex_ai_rag_engine_config" "test" { - region = "us-central1" - rag_managed_db_config { - basic {} - } -} -`, context) -} - -func testAccVertexAIRagEngineConfig_unprovisioned(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_vertex_ai_rag_engine_config" "test" { - region = "us-central1" - rag_managed_db_config { - unprovisioned {} - } -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/vpcaccess/resource_vpc_access_connector_test.go b/mmv1/third_party/terraform/services/vpcaccess/resource_vpc_access_connector_test.go index 544d7c21a4e2..c7ce3ac9ca7c 100644 --- a/mmv1/third_party/terraform/services/vpcaccess/resource_vpc_access_connector_test.go +++ b/mmv1/third_party/terraform/services/vpcaccess/resource_vpc_access_connector_test.go @@ -1,12 +1,10 @@ package vpcaccess_test import ( - "fmt" "regexp" "testing" "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" "github.com/hashicorp/terraform-provider-google/google/acctest" ) @@ -34,130 +32,6 @@ func TestAccVPCAccessConnector_vpcAccessConnectorThroughput(t *testing.T) { }) } -func TestAccVPCAccessConnector_vpcAccessConnectorUpdateAllMutableFieldsCauseUpdateInPlace(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckVPCAccessConnectorDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccVPCAccessConnector_vpcAccessConnectorThroughput(context), - }, - { - ResourceName: "google_vpc_access_connector.connector", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccVPCAccessConnector_vpcAccessConnectorUpdateAllMutableFields(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_vpc_access_connector.connector", plancheck.ResourceActionUpdate), - }, - }, - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("google_vpc_access_connector.connector", "machine_type", "f1-micro"), - resource.TestCheckResourceAttr("google_vpc_access_connector.connector", "min_instances", "3"), - resource.TestCheckResourceAttr("google_vpc_access_connector.connector", "max_instances", "5"), - ), - }, - { - ResourceName: "google_vpc_access_connector.connector", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func TestAccVPCAccessConnector_vpcAccessConnectorUpdateOnlyMinInstancesCauseUpdateInPlace(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckVPCAccessConnectorDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccVPCAccessConnector_vpcAccessConnectorThroughput(context), - }, - { - ResourceName: "google_vpc_access_connector.connector", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccVPCAccessConnector_vpcAccessConnectorUpdateOnlyMinInstances(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_vpc_access_connector.connector", plancheck.ResourceActionUpdate), - }, - }, - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("google_vpc_access_connector.connector", "machine_type", "e2-standard-4"), - resource.TestCheckResourceAttr("google_vpc_access_connector.connector", "min_instances", "3"), - resource.TestCheckResourceAttr("google_vpc_access_connector.connector", "max_instances", "4"), - ), - }, - { - ResourceName: "google_vpc_access_connector.connector", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func TestAccVPCAccessConnector_vpcAccessConnectorUpdateImmutableFieldCauseReplace(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckVPCAccessConnectorDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccVPCAccessConnector_vpcAccessConnectorThroughput(context), - }, - { - ResourceName: "google_vpc_access_connector.connector", - ImportState: true, - ImportStateVerify: true, - }, - { - Config: testAccVPCAccessConnector_vpcAccessConnectorUpdateImmutableField(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_vpc_access_connector.connector", plancheck.ResourceActionReplace), - }, - }, - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("google_vpc_access_connector.connector", "name", fmt.Sprintf("immutable%s", context["random_suffix"])), - ), - }, - { - ResourceName: "google_vpc_access_connector.connector", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - func TestAccVPCAccessConnector_vpcAccessConnectorThroughput_combiningThroughputAndInstancesFields_conflict(t *testing.T) { // Need to skip this test as the expected failure happens before the provider interacts with APIs // In VCR mode this test fails due to lack of cassettes @@ -242,87 +116,6 @@ resource "google_vpc_access_connector" "connector" { } machine_type = "e2-standard-4" min_instances = 2 - max_instances = 4 - region = "us-central1" -} - -resource "google_compute_subnetwork" "custom_test" { - name = "tf-test-vpc-con%{random_suffix}" - ip_cidr_range = "10.2.0.0/28" - region = "us-central1" - network = google_compute_network.custom_test.id -} - -resource "google_compute_network" "custom_test" { - name = "tf-test-vpc-con%{random_suffix}" - auto_create_subnetworks = false -} -`, context) -} - -func testAccVPCAccessConnector_vpcAccessConnectorUpdateOnlyMinInstances(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_vpc_access_connector" "connector" { - name = "tf-test-vpc-con%{random_suffix}" - subnet { - name = google_compute_subnetwork.custom_test.name - } - machine_type = "e2-standard-4" - min_instances = 3 - max_instances = 4 - region = "us-central1" -} - -resource "google_compute_subnetwork" "custom_test" { - name = "tf-test-vpc-con%{random_suffix}" - ip_cidr_range = "10.2.0.0/28" - region = "us-central1" - network = google_compute_network.custom_test.id -} - -resource "google_compute_network" "custom_test" { - name = "tf-test-vpc-con%{random_suffix}" - auto_create_subnetworks = false -} -`, context) -} - -func testAccVPCAccessConnector_vpcAccessConnectorUpdateAllMutableFields(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_vpc_access_connector" "connector" { - name = "tf-test-vpc-con%{random_suffix}" - subnet { - name = google_compute_subnetwork.custom_test.name - } - machine_type = "f1-micro" - min_instances = 3 - max_instances = 5 - region = "us-central1" -} - -resource "google_compute_subnetwork" "custom_test" { - name = "tf-test-vpc-con%{random_suffix}" - ip_cidr_range = "10.2.0.0/28" - region = "us-central1" - network = google_compute_network.custom_test.id -} - -resource "google_compute_network" "custom_test" { - name = "tf-test-vpc-con%{random_suffix}" - auto_create_subnetworks = false -} -`, context) -} - -func testAccVPCAccessConnector_vpcAccessConnectorUpdateImmutableField(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_vpc_access_connector" "connector" { - name = "immutable%{random_suffix}" - subnet { - name = google_compute_subnetwork.custom_test.name - } - machine_type = "e2-standard-4" - min_instances = 2 max_instances = 3 region = "us-central1" } diff --git a/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_shielded_config_test.go b/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_shielded_config_test.go deleted file mode 100644 index e72f63d05d14..000000000000 --- a/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_shielded_config_test.go +++ /dev/null @@ -1,228 +0,0 @@ -package workbench_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - - "github.com/hashicorp/terraform-provider-google/google/acctest" -) - -func TestAccWorkbenchInstance_shielded_config_update(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccWorkbenchInstance_shielded_config_false(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), - }, - { - ResourceName: "google_workbench_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, - }, - { - Config: testAccWorkbenchInstance_shielded_config_true(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), - }, - { - ResourceName: "google_workbench_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, - }, - }, - }) -} - -func TestAccWorkbenchInstance_shielded_config_remove(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccWorkbenchInstance_shielded_config_true(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), - }, - { - ResourceName: "google_workbench_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, - }, - { - Config: testAccWorkbenchInstance_shielded_config_none(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), - }, - { - ResourceName: "google_workbench_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, - }, - }, - }) -} - -func TestAccWorkbenchInstance_shielded_config_double_apply(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccWorkbenchInstance_shielded_config_none(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), - }, - { - ResourceName: "google_workbench_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, - }, - { - Config: testAccWorkbenchInstance_shielded_config_none(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), - }, - { - ResourceName: "google_workbench_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, - }, - { - Config: testAccWorkbenchInstance_shielded_config_false(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), - }, - { - ResourceName: "google_workbench_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, - }, - { - Config: testAccWorkbenchInstance_shielded_config_false(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), - }, - { - ResourceName: "google_workbench_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, - }, - { - Config: testAccWorkbenchInstance_shielded_config_true(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), - }, - { - ResourceName: "google_workbench_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, - }, - { - Config: testAccWorkbenchInstance_shielded_config_true(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), - }, - { - ResourceName: "google_workbench_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, - }, - }, - }) -} - -func testAccWorkbenchInstance_shielded_config_true(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_workbench_instance" "instance" { - name = "tf-test-workbench-instance%{random_suffix}" - location = "us-central1-a" - - gce_setup { - shielded_instance_config { - enable_secure_boot = true - enable_vtpm = true - enable_integrity_monitoring = true - } - } -} -`, context) -} - -func testAccWorkbenchInstance_shielded_config_false(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_workbench_instance" "instance" { - name = "tf-test-workbench-instance%{random_suffix}" - location = "us-central1-a" - - gce_setup { - shielded_instance_config { - enable_secure_boot = false - enable_vtpm = false - enable_integrity_monitoring = false - } - } - -} -`, context) -} - -func testAccWorkbenchInstance_shielded_config_none(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_workbench_instance" "instance" { - name = "tf-test-workbench-instance%{random_suffix}" - location = "us-central1-a" -} -`, context) -} diff --git a/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_shielded_config_test.go.tmpl b/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_shielded_config_test.go.tmpl new file mode 100644 index 000000000000..61487dfa4716 --- /dev/null +++ b/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_shielded_config_test.go.tmpl @@ -0,0 +1,228 @@ +package workbench_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccWorkbenchInstance_shielded_config_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkbenchInstance_shielded_config_false(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "update_time"}, + }, + { + Config: testAccWorkbenchInstance_shielded_config_true(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "update_time"}, + }, + }, + }) +} + +func TestAccWorkbenchInstance_shielded_config_remove(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkbenchInstance_shielded_config_true(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "update_time"}, + }, + { + Config: testAccWorkbenchInstance_shielded_config_none(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "update_time"}, + }, + }, + }) +} + +func TestAccWorkbenchInstance_shielded_config_double_apply(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkbenchInstance_shielded_config_none(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "update_time"}, + }, + { + Config: testAccWorkbenchInstance_shielded_config_none(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "update_time"}, + }, + { + Config: testAccWorkbenchInstance_shielded_config_false(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "update_time"}, + }, + { + Config: testAccWorkbenchInstance_shielded_config_false(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "update_time"}, + }, + { + Config: testAccWorkbenchInstance_shielded_config_true(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "update_time"}, + }, + { + Config: testAccWorkbenchInstance_shielded_config_true(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "update_time"}, + }, + }, + }) +} + +func testAccWorkbenchInstance_shielded_config_true(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_workbench_instance" "instance" { + name = "tf-test-workbench-instance%{random_suffix}" + location = "us-central1-a" + + gce_setup { + shielded_instance_config { + enable_secure_boot = true + enable_vtpm = true + enable_integrity_monitoring = true + } + } +} +`, context) +} + +func testAccWorkbenchInstance_shielded_config_false(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_workbench_instance" "instance" { + name = "tf-test-workbench-instance%{random_suffix}" + location = "us-central1-a" + + gce_setup { + shielded_instance_config { + enable_secure_boot = false + enable_vtpm = false + enable_integrity_monitoring = false + } + } + +} +`, context) +} + +func testAccWorkbenchInstance_shielded_config_none(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_workbench_instance" "instance" { + name = "tf-test-workbench-instance%{random_suffix}" + location = "us-central1-a" +} +`, context) +} diff --git a/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_test.go b/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_test.go.tmpl similarity index 79% rename from mmv1/third_party/terraform/services/workbench/resource_workbench_instance_test.go rename to mmv1/third_party/terraform/services/workbench/resource_workbench_instance_test.go.tmpl index 1ad111258b41..e63d65e7acad 100644 --- a/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_test.go +++ b/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_test.go.tmpl @@ -22,28 +22,28 @@ func TestAccWorkbenchInstance_update(t *testing.T) { { Config: testAccWorkbenchInstance_basic(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, }, - { + { Config: testAccWorkbenchInstance_update(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, }, }, }) @@ -87,8 +87,7 @@ resource "google_workbench_instance" "instance" { } metadata = { - terraform = "true", - "serial-port-logging-enable" = "false", + terraform = "true" } } @@ -115,28 +114,28 @@ func TestAccWorkbenchInstance_updateGpu(t *testing.T) { { Config: testAccWorkbenchInstance_basicGpu(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, }, - { + { Config: testAccWorkbenchInstance_updateGpu(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, }, }, }) @@ -203,28 +202,28 @@ func TestAccWorkbenchInstance_removeGpu(t *testing.T) { { Config: testAccWorkbenchInstance_Gpu(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, }, - { + { Config: testAccWorkbenchInstance_removeGpu(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, }, }, }) @@ -276,41 +275,41 @@ func TestAccWorkbenchInstance_updateMetadata(t *testing.T) { { Config: testAccWorkbenchInstance_basic(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "update_time"}, }, - { + { Config: testAccWorkbenchInstance_updateMetadata(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "update_time"}, }, { Config: testAccWorkbenchInstance_basic(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "update_time"}, }, }, }) @@ -330,54 +329,41 @@ func TestAccWorkbenchInstance_updateMetadataKey(t *testing.T) { { Config: testAccWorkbenchInstance_updateMetadata(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "update_time", "health_info", "health_state"}, }, - { + { Config: testAccWorkbenchInstance_updateMetadataKey(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, - }, - { - Config: testAccWorkbenchInstance_update(context), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), - }, - { - ResourceName: "google_workbench_instance.instance", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "update_time", "health_info", "health_state"}, }, { Config: testAccWorkbenchInstance_updateMetadata(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "update_time", "health_info", "health_state"}, }, }, }) @@ -393,7 +379,6 @@ resource "google_workbench_instance" "instance" { metadata = { terraform = "true" "resource-url" = "new-fake-value", - "serial-port-logging-enable" = "true", } } @@ -416,7 +401,6 @@ resource "google_workbench_instance" "instance" { terraform = "true", "idle-timeout-seconds" = "10800", "image-url" = "fake-value", - "container-custom-params" = "test-params", } } @@ -442,41 +426,41 @@ func TestAccWorkbenchInstance_updateState(t *testing.T) { { Config: testAccWorkbenchInstance_basic(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time"}, }, - { + { Config: testAccWorkbenchInstance_updateState(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "STOPPED"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "STOPPED"), + ), }, { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time"}, }, - { + { Config: testAccWorkbenchInstance_basic(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time"}, }, }, }) @@ -508,41 +492,41 @@ func TestAccWorkbenchInstance_empty_accelerator(t *testing.T) { { Config: testAccWorkbenchInstance_basic(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, }, - { + { Config: testAccWorkbenchInstance_empty_accelerator(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, }, { Config: testAccWorkbenchInstance_empty_accelerator(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, }, }, }) @@ -576,28 +560,28 @@ func TestAccWorkbenchInstance_updateBootDisk(t *testing.T) { { Config: testAccWorkbenchInstance_basic(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state"}, }, - { + { Config: testAccWorkbenchInstance_updateBootDisk(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state"}, }, }, }) @@ -617,28 +601,28 @@ func TestAccWorkbenchInstance_updateDataDisk(t *testing.T) { { Config: testAccWorkbenchInstance_basic(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state"}, }, - { + { Config: testAccWorkbenchInstance_updateDataDisk(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), - ), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), }, { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state"}, }, }, }) @@ -658,28 +642,28 @@ func TestAccWorkbenchInstance_updateBothDisks(t *testing.T) { { Config: testAccWorkbenchInstance_basic(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), ), }, { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state"}, }, - { + { Config: testAccWorkbenchInstance_updateBothDisks(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), ), }, { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state"}, }, }, }) @@ -745,8 +729,8 @@ func TestAccWorkbenchInstance_updatelabels(t *testing.T) { { Config: testAccWorkbenchInstance_label(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), ), }, { @@ -758,8 +742,8 @@ func TestAccWorkbenchInstance_updatelabels(t *testing.T) { { Config: testAccWorkbenchInstance_basic(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), ), }, { @@ -771,8 +755,8 @@ func TestAccWorkbenchInstance_updatelabels(t *testing.T) { { Config: testAccWorkbenchInstance_label(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), ), }, { @@ -797,6 +781,7 @@ resource "google_workbench_instance" "instance" { `, context) } + func TestAccWorkbenchInstance_updateCustomContainers(t *testing.T) { t.Parallel() @@ -811,28 +796,28 @@ func TestAccWorkbenchInstance_updateCustomContainers(t *testing.T) { { Config: testAccWorkbenchInstance_customcontainer(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), ), }, { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state"}, }, { Config: testAccWorkbenchInstance_updatedcustomcontainer(context), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr( - "google_workbench_instance.instance", "state", "ACTIVE"), + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), ), }, { ResourceName: "google_workbench_instance.instance", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state", "update_time", "health_info", "health_state"}, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels", "desired_state"}, }, }, }) diff --git a/mmv1/third_party/terraform/services/workflows/resource_workflows_workflow_test.go b/mmv1/third_party/terraform/services/workflows/resource_workflows_workflow_test.go.tmpl similarity index 99% rename from mmv1/third_party/terraform/services/workflows/resource_workflows_workflow_test.go rename to mmv1/third_party/terraform/services/workflows/resource_workflows_workflow_test.go.tmpl index 4ac91af4ceec..ead97f101f23 100644 --- a/mmv1/third_party/terraform/services/workflows/resource_workflows_workflow_test.go +++ b/mmv1/third_party/terraform/services/workflows/resource_workflows_workflow_test.go.tmpl @@ -141,7 +141,7 @@ func TestAccWorkflowsWorkflow_UpdateDeletionProtectionFalseToTrue(t *testing.T) { Config: testAccWorkflowsWorkflow_Basic_DeletionProtectionTrue(workflowName), }, - { + { Config: testAccWorkflowsWorkflow_Basic_DeletionProtectionFalse(workflowName), }, }, @@ -297,7 +297,7 @@ func TestAccWorkflowsWorkflow_CMEK(t *testing.T) { acctest.BootstrapIamMembers(t, []acctest.IamMember{ { Member: "serviceAccount:service-{project_number}@gcp-sa-workflows.iam.gserviceaccount.com", - Role: "roles/cloudkms.cryptoKeyEncrypterDecrypter", + Role: "roles/cloudkms.cryptoKeyEncrypterDecrypter", }, }) diff --git a/mmv1/third_party/terraform/terraform-registry-manifest.json b/mmv1/third_party/terraform/terraform-registry-manifest.json.tmpl similarity index 100% rename from mmv1/third_party/terraform/terraform-registry-manifest.json rename to mmv1/third_party/terraform/terraform-registry-manifest.json.tmpl diff --git a/mmv1/third_party/terraform/tpgresource/common_diff_suppress.go b/mmv1/third_party/terraform/tpgresource/common_diff_suppress.go.tmpl similarity index 88% rename from mmv1/third_party/terraform/tpgresource/common_diff_suppress.go rename to mmv1/third_party/terraform/tpgresource/common_diff_suppress.go.tmpl index a0ba73cd1a4e..3016bcb7681d 100644 --- a/mmv1/third_party/terraform/tpgresource/common_diff_suppress.go +++ b/mmv1/third_party/terraform/tpgresource/common_diff_suppress.go.tmpl @@ -101,19 +101,6 @@ func ProjectNumberDiffSuppress(_, old, new string, _ *schema.ResourceData) bool return a2 == b2 } -// Suppresses diffs where `routing_mode` is unset (empty string) vs. explicitly set -// to "EXPLICIT_ROUTING_MODE". Since null/empty is treated as the default -// EXPLICIT_ROUTING_MODE, both values collapse into the same state. This ensures -// Terraform does not show unnecessary differences unless the value is explicitly -// changed to "NEXT_HOP_ROUTING_MODE". -func SuppressRoutingModeDefault(_, old, new string, _ *schema.ResourceData) bool { - if old == new { - return true - } - return (old == "" && new == "EXPLICIT_ROUTING_MODE") || - (old == "EXPLICIT_ROUTING_MODE" && new == "") -} - // Suppress diffs when the value read from api // has the project ID instead of the project number func ProjectIDDiffSuppress(_, old, new string, _ *schema.ResourceData) bool { @@ -156,4 +143,4 @@ func Base64DiffSuppress(_, old, new string, _ *schema.ResourceData) bool { normalizedOld := r.Replace(old) normalizedNew := r.Replace(new) return normalizedOld == normalizedNew -} +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/tpgresource/field_helpers.go b/mmv1/third_party/terraform/tpgresource/field_helpers.go index 2fa1add6b666..4a3c34aa864b 100644 --- a/mmv1/third_party/terraform/tpgresource/field_helpers.go +++ b/mmv1/third_party/terraform/tpgresource/field_helpers.go @@ -17,8 +17,6 @@ const ( RegionalLinkTemplate = "projects/%s/regions/%s/%s/%s" RegionalLinkBasePattern = "projects/(.+)/regions/(.+)/%s/(.+)" RegionalPartialLinkBasePattern = "regions/(.+)/%s/(.+)" - LocalLinkBasePattern = "projects/(.+)/locations/(.+)/%s/(.+)" - LocalPartialLinkBasePattern = "locations/(.+)/%s/(.+)" ProjectLinkTemplate = "projects/%s/%s/%s" ProjectBasePattern = "projects/(.+)/%s/(.+)" OrganizationLinkTemplate = "organizations/%s/%s/%s" @@ -416,16 +414,6 @@ func ParseRegionalFieldValue(resourceType, fieldValue, projectSchemaField, regio }, nil } - r = regexp.MustCompile(fmt.Sprintf(LocalLinkBasePattern, resourceType)) - if parts := r.FindStringSubmatch(fieldValue); parts != nil { - return &RegionalFieldValue{ - Project: parts[1], - Region: parts[2], - Name: parts[3], - resourceType: resourceType, - }, nil - } - project, err := GetProjectFromSchema(projectSchemaField, d, config) if err != nil { return nil, err @@ -441,16 +429,6 @@ func ParseRegionalFieldValue(resourceType, fieldValue, projectSchemaField, regio }, nil } - r = regexp.MustCompile(fmt.Sprintf(LocalPartialLinkBasePattern, resourceType)) - if parts := r.FindStringSubmatch(fieldValue); parts != nil { - return &RegionalFieldValue{ - Project: project, - Region: parts[1], - Name: parts[2], - resourceType: resourceType, - }, nil - } - region, err := GetRegionFromSchema(regionSchemaField, zoneSchemaField, d, config) if err != nil { return nil, err diff --git a/mmv1/third_party/terraform/tpgresource/self_link_helpers.go b/mmv1/third_party/terraform/tpgresource/self_link_helpers.go index 61a8243cba8d..2e1089fb36aa 100644 --- a/mmv1/third_party/terraform/tpgresource/self_link_helpers.go +++ b/mmv1/third_party/terraform/tpgresource/self_link_helpers.go @@ -1,10 +1,8 @@ package tpgresource import ( - "bytes" "errors" "fmt" - "log" "net/url" "regexp" "strings" @@ -73,40 +71,6 @@ func CompareSelfLinkOrResourceName(_, old, new string, _ *schema.ResourceData) b return CompareSelfLinkRelativePaths("", old, new, nil) } -// canonicalizeSelfLink normalizes Compute API self-links by removing the version prefix (v1/beta), -// ensuring a leading "/", collapsing duplicate slashes, trimming any trailing "/", -// and lowercasing the result so logically identical links compare equal. -func CompareSelfLinkCanonicalPaths(_, old, new string, _ *schema.ResourceData) bool { - return canonicalizeSelfLink(old) == canonicalizeSelfLink(new) -} - -var ( - rePrefix = regexp.MustCompile(`(?i)^https?://[a-z0-9.-]*/compute/(v1|beta)/`) - reDuplicateSlashes = regexp.MustCompile(`/+`) -) - -func canonicalizeSelfLink(link string) string { - if link == "" { - return "" - } - - // Remove "https://…/compute/v1/" or "https://…/compute/beta/" - path := rePrefix.ReplaceAllString(link, "/") - - // Ensure leading "/" - if !strings.HasPrefix(path, "/") { - path = "/" + path - } - - // Collapse "//" - path = reDuplicateSlashes.ReplaceAllString(path, "/") - - // Remove trailing "/" - path = strings.TrimSuffix(path, "/") - - return strings.ToLower(path) -} - // Hash the relative path of a self link. func SelfLinkRelativePathHash(selfLink interface{}) int { path, _ := GetRelativePath(selfLink.(string)) @@ -128,34 +92,6 @@ func SelfLinkNameHash(selfLink interface{}) int { return Hashcode(name) } -// Hash based on relative url for a nested object containing a URL field. -func NestedUrlSetHashFunc(v interface{}) int { - if v == nil { - return 0 - } - - var buf bytes.Buffer - m := v.(map[string]interface{}) - log.Printf("[DEBUG] hashing %v", m) - - if v, ok := m["url"]; ok { - if v == nil { - v = "" - } else { - if relUrl, err := GetRelativePath(v.(string)); err != nil { - log.Printf("[WARN] Error on retrieving relative path of network url: %s", err) - } else { - v = relUrl - } - } - - buf.WriteString(fmt.Sprintf("%v-", v)) - } - - log.Printf("[DEBUG] computed hash value of %v from %v", Hashcode(buf.String()), buf.String()) - return Hashcode(buf.String()) -} - func ConvertSelfLinkToV1(link string) string { reg := regexp.MustCompile("/compute/[a-zA-Z0-9]*/projects/") return reg.ReplaceAllString(link, "/compute/v1/projects/") @@ -166,6 +102,14 @@ func GetResourceNameFromSelfLink(link string) string { return parts[len(parts)-1] } +func NameFromSelfLinkStateFunc(v interface{}) string { + return GetResourceNameFromSelfLink(v.(string)) +} + +func StoreResourceName(resourceLink interface{}) string { + return GetResourceNameFromSelfLink(resourceLink.(string)) +} + type LocationType int const ( @@ -245,7 +189,7 @@ func GetRegionFromRegionalSelfLink(selfLink string) string { } func GetProjectFromRegionalSelfLink(selfLink string) string { - re := regexp.MustCompile("projects/([a-zA-Z0-9-:.]*)/(?:locations|regions)/[a-zA-Z0-9-:]*") + re := regexp.MustCompile("projects/([a-zA-Z0-9-:]*)/(?:locations|regions)/[a-zA-Z0-9-:]*") switch { case re.MatchString(selfLink): if res := re.FindStringSubmatch(selfLink); len(res) == 2 && res[1] != "" { diff --git a/mmv1/third_party/terraform/tpgresource/self_link_helpers_test.go b/mmv1/third_party/terraform/tpgresource/self_link_helpers_test.go index df4cedb81bf9..4dc0c84e0381 100644 --- a/mmv1/third_party/terraform/tpgresource/self_link_helpers_test.go +++ b/mmv1/third_party/terraform/tpgresource/self_link_helpers_test.go @@ -189,10 +189,9 @@ func TestGetRegionFromRegionalSelfLink(t *testing.T) { func TestGetProjectFromRegionalSelfLink(t *testing.T) { cases := map[string]string{ - "projects/foo/locations/europe-north1/datasets/bar/operations/foobar": "foo", - "projects/REDACTED/regions/europe-north1/subnetworks/tf-test-net-xbwhsmlfm8": "REDACTED", - "projects/REDA:CT-ED09/regions/europe-north1/subnetworks/tf-test-net-xbwhsmlfm8": "REDA:CT-ED09", - "projects/REDA.com:CT-ED09/regions/europe-north1/subnetworks/tf-test-net-xbwhsmlfm8": "REDA.com:CT-ED09", + "projects/foo/locations/europe-north1/datasets/bar/operations/foobar": "foo", + "projects/REDACTED/regions/europe-north1/subnetworks/tf-test-net-xbwhsmlfm8": "REDACTED", + "projects/REDA:CT-ED09/regions/europe-north1/subnetworks/tf-test-net-xbwhsmlfm8": "REDA:CT-ED09", } for input, expected := range cases { if result := GetProjectFromRegionalSelfLink(input); result != expected { diff --git a/mmv1/third_party/terraform/transport/config.go.tmpl b/mmv1/third_party/terraform/transport/config.go.tmpl index 15deef0d53fb..ed7294954e55 100644 --- a/mmv1/third_party/terraform/transport/config.go.tmpl +++ b/mmv1/third_party/terraform/transport/config.go.tmpl @@ -14,9 +14,6 @@ import ( "time" "os" - "cloud.google.com/go/auth/credentials" - "cloud.google.com/go/auth/credentials/impersonate" - "cloud.google.com/go/auth/oauth2adapt" "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" "github.com/hashicorp/terraform-plugin-framework/schema/validator" @@ -87,7 +84,6 @@ import ( "google.golang.org/api/sourcerepo/v1" "google.golang.org/api/spanner/v1" sqladmin "google.golang.org/api/sqladmin/v1beta4" - backupdr "google.golang.org/api/backupdr/v1" "google.golang.org/api/storage/v1" "google.golang.org/api/storagetransfer/v1" "google.golang.org/api/transport" @@ -235,6 +231,7 @@ func ExpandExternalCredentialsConfig(v interface{}) (*ExternalCredentials, error // Config is the configuration structure used to instantiate the Google // provider. type Config struct { + DCLConfig AccessToken string Credentials string ExternalCredentials *ExternalCredentials @@ -278,14 +275,9 @@ type Config struct { BigtableAdminBasePath string TagsLocationBasePath string - // DCL + // dcl ContainerAwsBasePath string ContainerAzureBasePath string - ApikeysBasePath string - AssuredWorkloadsBasePath string - CloudResourceManagerBasePath string - FirebaserulesBasePath string - RecaptchaEnterpriseBasePath string RequestBatcherServiceUsage *RequestBatcher RequestBatcherIam *RequestBatcher @@ -321,19 +313,9 @@ var DefaultBasePaths = map[string]string{ IamCredentialsBasePathKey : "https://iamcredentials.googleapis.com/v1/", ResourceManagerV3BasePathKey : "https://cloudresourcemanager.googleapis.com/v3/", BigtableAdminBasePathKey : "https://bigtableadmin.googleapis.com/v2/", - TagsLocationBasePathKey: "https://{{"{{"}}location{{"}}"}}-cloudresourcemanager.googleapis.com/v3/", - // DCL - ContainerAwsBasePathKey: "https://{{"{{"}}location{{"}}"}}-gkemulticloud.googleapis.com/v1/", + ContainerAwsBasePathKey: "https://{{"{{"}}location{{"}}"}}-gkemulticloud.googleapis.com/v1/", ContainerAzureBasePathKey: "https://{{"{{"}}location{{"}}"}}-gkemulticloud.googleapis.com/v1/", - ApikeysEndpointEntryKey: "https://apikeys.googleapis.com/v2/", -{{- if eq $.TargetVersionName "ga" }} - AssuredWorkloadsEndpointEntryKey: "https://{{"{{"}}location{{"}}"}}-assuredworkloads.googleapis.com/v1beta1/", -{{- else }} - AssuredWorkloadsEndpointEntryKey: "https://{{"{{"}}location{{"}}"}}-assuredworkloads.googleapis.com/v1/", -{{- end }} - CloudResourceManagerEndpointEntryKey: "https://cloudresourcemanager.googleapis.com/", - FirebaserulesEndpointEntryKey: "https://firebaserules.googleapis.com/v1/", - RecaptchaEnterpriseEndpointEntryKey: "https://recaptchaenterprise.googleapis.com/v1/", + TagsLocationBasePathKey: "https://{{"{{"}}location{{"}}"}}-cloudresourcemanager.googleapis.com/v3/", } var DefaultClientScopes = []string{ @@ -478,9 +460,6 @@ func SetEndpointDefaults(d *schema.ResourceData) error { }, DefaultBasePaths[TagsLocationBasePathKey])) } - // DCL endpoints - these are hardcoded as a workaround for the DCL not providing a way to - // determine base paths at generation time. - if d.Get(ContainerAwsCustomEndpointEntryKey) == "" { d.Set(ContainerAwsCustomEndpointEntryKey, MultiEnvDefault([]string{ "GOOGLE_CONTAINERAWS_CUSTOM_ENDPOINT", @@ -492,31 +471,6 @@ func SetEndpointDefaults(d *schema.ResourceData) error { "GOOGLE_CONTAINERAZURE_CUSTOM_ENDPOINT", }, DefaultBasePaths[ContainerAzureBasePathKey])) } - if d.Get(ApikeysEndpointEntryKey) == "" { - d.Set(ApikeysEndpointEntryKey, MultiEnvDefault([]string{ - "GOOGLE_APIKEYS_CUSTOM_ENDPOINT", - }, DefaultBasePaths[ApikeysEndpointEntryKey])) - } - if d.Get(AssuredWorkloadsEndpointEntryKey) == "" { - d.Set(AssuredWorkloadsEndpointEntryKey, MultiEnvDefault([]string{ - "GOOGLE_ASSURED_WORKLOADS_CUSTOM_ENDPOINT", - }, DefaultBasePaths[AssuredWorkloadsEndpointEntryKey])) - } - if d.Get(CloudResourceManagerEndpointEntryKey) == "" { - d.Set(CloudResourceManagerEndpointEntryKey, MultiEnvDefault([]string{ - "GOOGLE_CLOUD_RESOURCE_MANAGER_CUSTOM_ENDPOINT", - }, DefaultBasePaths[CloudResourceManagerEndpointEntryKey])) - } - if d.Get(FirebaserulesEndpointEntryKey) == "" { - d.Set(FirebaserulesEndpointEntryKey, MultiEnvDefault([]string{ - "GOOGLE_FIREBASERULES_CUSTOM_ENDPOINT", - }, DefaultBasePaths[FirebaserulesEndpointEntryKey])) - } - if d.Get(RecaptchaEnterpriseEndpointEntryKey) == "" { - d.Set(RecaptchaEnterpriseEndpointEntryKey, MultiEnvDefault([]string{ - "GOOGLE_RECAPTCHA_ENTERPRISE_CUSTOM_ENDPOINT", - }, DefaultBasePaths[RecaptchaEnterpriseEndpointEntryKey])) - } return nil } @@ -605,9 +559,10 @@ func (c *Config) LoadAndValidate(ctx context.Context) error { TimestampFormat: "2006/01/02 15:04:05", LogFormat: "%time% [%lvl%] %msg% \n", }) - logger.SetOutput(log.Writer()) alwaysLoggingDeciderClient := func(ctx context.Context, fullMethodName string) bool { return true } + grpc_logrus.ReplaceGrpcLogger(logrus.NewEntry(logger)) + c.gRPCLoggingOptions = append( c.gRPCLoggingOptions, option.WithGRPCDialOption(grpc.WithUnaryInterceptor( grpc_logrus.PayloadUnaryClientInterceptor(logrus.NewEntry(logger), alwaysLoggingDeciderClient))), @@ -904,20 +859,6 @@ func (c *Config) NewSqlAdminClient(userAgent string) *sqladmin.Service { return clientSqlAdmin } -func (c *Config) NewBackupDRClient(userAgent string) *backupdr.Service { - backupdrClientBasePath := RemoveBasePathVersion(RemoveBasePathVersion(c.BackupDRBasePath)) - log.Printf("[INFO] Instantiating Google SqlAdmin client for path %s", backupdrClientBasePath) - clientBackupdrAdmin, err := backupdr.NewService(c.Context, option.WithHTTPClient(c.Client)) - if err != nil { - log.Printf("[WARN] Error creating client storage: %s", err) - return nil - } - clientBackupdrAdmin.UserAgent = userAgent - clientBackupdrAdmin.BasePath = backupdrClientBasePath - - return clientBackupdrAdmin -} - func (c *Config) NewPubsubClient(userAgent string) *pubsub.Service { pubsubClientBasePath := RemoveBasePathVersion(c.PubsubBasePath) log.Printf("[INFO] Instantiating Google Pubsub client for path %s", pubsubClientBasePath) @@ -1358,33 +1299,10 @@ func (c *Config) GetCredentials(clientScopes []string, initialCredentialsOnly bo } if c.ImpersonateServiceAccount != "" && !initialCredentialsOnly { - jsonCreds, err := credentials.DetectDefault(&credentials.DetectOptions{ - Scopes: clientScopes, - CredentialsJSON: []byte(contents), - }) - if err != nil { - return googleoauth.Credentials{}, fmt.Errorf("error loading credentials: %s", err) - } - - impersonateOpts := &impersonate.CredentialsOptions{ - TargetPrincipal: c.ImpersonateServiceAccount, - Scopes: clientScopes, - Delegates: c.ImpersonateServiceAccountDelegates, - Credentials: jsonCreds, - } - - if c.UniverseDomain != "" && c.UniverseDomain != "googleapis.com" { - impersonateOpts.UniverseDomain = c.UniverseDomain - } - - authCred, err := impersonate.NewCredentials(impersonateOpts) - if err != nil { - return googleoauth.Credentials{}, fmt.Errorf("error loading credentials: %s", err) - } - - creds := oauth2adapt.Oauth2CredentialsFromAuthCredentials(authCred) + opts := []option.ClientOption{option.WithCredentialsJSON([]byte(contents)), option.ImpersonateCredentials(c.ImpersonateServiceAccount, c.ImpersonateServiceAccountDelegates...), option.WithScopes(clientScopes...)} + creds, err := transport.Creds(context.TODO(), opts...) if err != nil { - return googleoauth.Credentials{}, fmt.Errorf("error loading credentials: %s", err) + return googleoauth.Credentials{}, err } return *creds, nil } @@ -1412,34 +1330,11 @@ func (c *Config) GetCredentials(clientScopes []string, initialCredentialsOnly bo var creds *googleoauth.Credentials var err error if c.ImpersonateServiceAccount != "" && !initialCredentialsOnly { - defaultCreds, err := credentials.DetectDefault(&credentials.DetectOptions{ - Scopes: clientScopes, - }) + opts := option.ImpersonateCredentials(c.ImpersonateServiceAccount, c.ImpersonateServiceAccountDelegates...) + creds, err = transport.Creds(context.TODO(), opts, option.WithScopes(clientScopes...)) if err != nil { - return googleoauth.Credentials{}, fmt.Errorf("error loading credentials: %s", err) + return googleoauth.Credentials{}, err } - - impersonateOpts := &impersonate.CredentialsOptions{ - TargetPrincipal: c.ImpersonateServiceAccount, - Scopes: clientScopes, - Delegates: c.ImpersonateServiceAccountDelegates, - Credentials: defaultCreds, - } - - if c.UniverseDomain != "" && c.UniverseDomain != "googleapis.com" { - impersonateOpts.UniverseDomain = c.UniverseDomain - } - - authCred, err := impersonate.NewCredentials(impersonateOpts) - if err != nil { - return googleoauth.Credentials{}, fmt.Errorf("error loading credentials: %s", err) - } - - creds := oauth2adapt.Oauth2CredentialsFromAuthCredentials(authCred) - if err != nil { - return googleoauth.Credentials{}, fmt.Errorf("error loading credentials: %s", err) - } - return *creds, nil } else { log.Printf("[INFO] Authenticating using DefaultClient...") log.Printf("[INFO] -- Scopes: %s", clientScopes) @@ -1508,15 +1403,6 @@ func ConfigureBasePaths(c *Config) { c.BigQueryBasePath = DefaultBasePaths[BigQueryBasePathKey] c.BigtableAdminBasePath = DefaultBasePaths[BigtableAdminBasePathKey] c.TagsLocationBasePath = DefaultBasePaths[TagsLocationBasePathKey] - - // DCL - c.ContainerAwsBasePath = DefaultBasePaths[ContainerAwsBasePathKey] - c.ContainerAzureBasePath = DefaultBasePaths[ContainerAzureBasePathKey] - c.ApikeysBasePath = DefaultBasePaths[ApikeysEndpointEntryKey] - c.AssuredWorkloadsBasePath = DefaultBasePaths[AssuredWorkloadsEndpointEntryKey] - c.CloudResourceManagerBasePath = DefaultBasePaths[CloudResourceManagerEndpointEntryKey] - c.FirebaserulesBasePath = DefaultBasePaths[FirebaserulesEndpointEntryKey] - c.RecaptchaEnterpriseBasePath = DefaultBasePaths[RecaptchaEnterpriseEndpointEntryKey] } func GetCurrentUserEmail(config *Config, userAgent string) (string, error) { diff --git a/mmv1/third_party/terraform/transport/error_retry_predicates.go b/mmv1/third_party/terraform/transport/error_retry_predicates.go index 842ec063c799..9969b71645ce 100644 --- a/mmv1/third_party/terraform/transport/error_retry_predicates.go +++ b/mmv1/third_party/terraform/transport/error_retry_predicates.go @@ -49,12 +49,6 @@ var defaultErrorRetryPredicates = []RetryErrorPredicateFunc{ // GCE returns the wrong error code, as this should be a 429, which we retry // already. is403QuotaExceededPerMinuteError, - - // GCE Networks are considered unready for a brief period when certain - // operations are performed on them, and the scope is likely too broad to - // apply a mutex. If we attempt an operation w/ an unready network, retry - // it. - isNetworkUnreadyError, } /** END GLOBAL ERROR RETRY PREDICATES HERE **/ @@ -149,19 +143,6 @@ func isSubnetworkUnreadyError(err error) (bool, string) { return false, "" } -func isNetworkUnreadyError(err error) (bool, string) { - gerr, ok := err.(*googleapi.Error) - if !ok { - return false, "" - } - - if gerr.Code == 400 && strings.Contains(gerr.Body, "resourceNotReady") && strings.Contains(gerr.Body, "networks") { - log.Printf("[DEBUG] Dismissed an error as retryable based on error code 400 and error reason 'resourceNotReady' w/ 'networks': %s", err) - return true, "Network not ready" - } - return false, "" -} - // GCE (and possibly other APIs) incorrectly return a 403 rather than a 429 on // rate limits. func is403QuotaExceededPerMinuteError(err error) (bool, string) { @@ -483,7 +464,7 @@ func PubsubTopicProjectNotReady(err error) (bool, string) { } // Retry on comon googleapi error codes for retryable errors. -// TODO: #5609 This may not need to be applied globally - figure out +// TODO(#5609): This may not need to be applied globally - figure out // what retryable error codes apply to which API. func isCommonRetryableErrorCode(err error) (bool, string) { gerr, ok := err.(*googleapi.Error) diff --git a/mmv1/third_party/terraform/transport/provider_handwritten_endpoint.go.tmpl b/mmv1/third_party/terraform/transport/provider_handwritten_endpoint.go.tmpl index 8ea1083a96a0..f42ba038e969 100644 --- a/mmv1/third_party/terraform/transport/provider_handwritten_endpoint.go.tmpl +++ b/mmv1/third_party/terraform/transport/provider_handwritten_endpoint.go.tmpl @@ -104,14 +104,6 @@ var PrivatecaCertificateTemplateCustomEndpointEntry = &schema.Schema{ }, DefaultBasePaths[PrivatecaBasePathKey]), } -var TagsLocationCustomEndpointEntryKey = "tags_location_custom_endpoint" -var TagsLocationCustomEndpointEntry = &schema.Schema{ - Type: schema.TypeString, - Optional: true, - ValidateFunc: ValidateCustomEndpoint, -} - -// DCL var ContainerAwsCustomEndpointEntryKey = "container_aws_custom_endpoint" var ContainerAwsCustomEndpointEntry = &schema.Schema{ Type: schema.TypeString, @@ -125,34 +117,12 @@ var ContainerAzureCustomEndpointEntry = &schema.Schema{ Optional: true, ValidateFunc: ValidateCustomEndpoint, } -var ApikeysEndpointEntryKey = "apikeys_custom_endpoint" -var ApikeysEndpointEntry = &schema.Schema{ - Type: schema.TypeString, - Optional: true, -} - -var AssuredWorkloadsEndpointEntryKey = "assured_workloads_custom_endpoint" -var AssuredWorkloadsEndpointEntry = &schema.Schema{ - Type: schema.TypeString, - Optional: true, -} -var CloudResourceManagerEndpointEntryKey = "cloud_resource_manager_custom_endpoint" -var CloudResourceManagerEndpointEntry = &schema.Schema{ - Type: schema.TypeString, - Optional: true, -} - -var FirebaserulesEndpointEntryKey = "firebaserules_custom_endpoint" -var FirebaserulesEndpointEntry = &schema.Schema{ - Type: schema.TypeString, - Optional: true, -} - -var RecaptchaEnterpriseEndpointEntryKey = "recaptcha_enterprise_custom_endpoint" -var RecaptchaEnterpriseEndpointEntry = &schema.Schema{ - Type: schema.TypeString, - Optional: true, +var TagsLocationCustomEndpointEntryKey = "tags_location_custom_endpoint" +var TagsLocationCustomEndpointEntry = &schema.Schema{ + Type: schema.TypeString, + Optional: true, + ValidateFunc: ValidateCustomEndpoint, } func ValidateCustomEndpoint(v interface{}, k string) (ws []string, errors []error) { diff --git a/mmv1/third_party/terraform/transport/transport.go b/mmv1/third_party/terraform/transport/transport.go index 54de21865aee..351ce30f8a88 100644 --- a/mmv1/third_party/terraform/transport/transport.go +++ b/mmv1/third_party/terraform/transport/transport.go @@ -30,10 +30,6 @@ type SendRequestOptions struct { } func SendRequest(opt SendRequestOptions) (map[string]interface{}, error) { - if opt.Config == nil || opt.Config.Client == nil { - return nil, fmt.Errorf("client is nil for request to %s", opt.RawURL) - } - reqHeaders := opt.Headers if reqHeaders == nil { reqHeaders = make(http.Header) diff --git a/mmv1/third_party/terraform/verify/validation.go b/mmv1/third_party/terraform/verify/validation.go index 5f4393137f69..25c0d280d4c9 100644 --- a/mmv1/third_party/terraform/verify/validation.go +++ b/mmv1/third_party/terraform/verify/validation.go @@ -74,6 +74,16 @@ var ( Rfc6996Asn32BitMin = int64(4200000000) Rfc6996Asn32BitMax = int64(4294967294) GcpRouterPartnerAsn = int64(16550) + + // Format of GCS Bucket Name + // https://cloud.google.com/storage/docs/naming-buckets + GCSNameValidChars = "^[a-z0-9_.-]*$" + GCSNameStartEndChars = "^[a-z|0-9].*[a-z|0-9]$" + GCSNameLength = "^.{3,222}" + GCSNameLengthSplit = "^.{1,63}$" + GCSNameCidr = "^[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}$" + GCSNameGoogPrefix = "^goog.*$" + GCSNameContainsGoogle = "^.*google.*$" ) var Rfc1918Networks = []string{ @@ -89,6 +99,44 @@ func ValidateGCEName(v interface{}, k string) (ws []string, errors []error) { return ValidateRegexp(re)(v, k) } +// validateGCSName ensures the name of a gcs bucket matches the requirements for GCS Buckets +// https://cloud.google.com/storage/docs/naming-buckets +func ValidateGCSName(v interface{}, k string) (ws []string, errors []error) { + value := v.(string) + + if !regexp.MustCompile(GCSNameValidChars).MatchString(value) { + errors = append(errors, fmt.Errorf("%q name value can only contain lowercase letters, numeric characters, dashes (-), underscores (_), and dots (.)", value)) + } + + if !regexp.MustCompile(GCSNameStartEndChars).MatchString(value) { + errors = append(errors, fmt.Errorf("%q name value must start and end with a number or letter", value)) + } + + if !regexp.MustCompile(GCSNameLength).MatchString(value) { + errors = append(errors, fmt.Errorf("%q name value must contain 3-63 characters. Names containing dots can contain up to 222 characters, but each dot-separated component can be no longer than 63 characters", value)) + } + + for _, str := range strings.Split(value, ".") { + if !regexp.MustCompile(GCSNameLengthSplit).MatchString(str) { + errors = append(errors, fmt.Errorf("%q name value must contain 3-63 characters. Names containing dots can contain up to 222 characters, but each dot-separated component can be no longer than 63 characters", value)) + } + } + + if regexp.MustCompile(GCSNameCidr).MatchString(value) { + errors = append(errors, fmt.Errorf("%q name value cannot be represented as an IP address in dotted-decimal notation (for example, 192.168.5.4)", value)) + } + + if regexp.MustCompile(GCSNameGoogPrefix).MatchString(value) { + errors = append(errors, fmt.Errorf("%q name value cannot begin with the \"goog\" prefix", value)) + } + + if regexp.MustCompile(GCSNameContainsGoogle).MatchString(strings.ReplaceAll(value, "0", "o")) { + errors = append(errors, fmt.Errorf("%q name value cannot contain \"google\" or close misspellings, such as \"g00gle\"", value)) + } + + return +} + // Ensure that the BGP ASN value of Cloud Router is a valid value as per RFC6996 or a value of 16550 func ValidateRFC6996Asn(v interface{}, k string) (ws []string, errors []error) { value := int64(v.(int)) diff --git a/mmv1/third_party/terraform/verify/validation_test.go b/mmv1/third_party/terraform/verify/validation_test.go index 3de72257ea74..19555861fb64 100644 --- a/mmv1/third_party/terraform/verify/validation_test.go +++ b/mmv1/third_party/terraform/verify/validation_test.go @@ -323,3 +323,43 @@ func TestValidateIAMCustomRoleIDRegex(t *testing.T) { t.Errorf("Failed to validate IAMCustomRole IDs: %v", es) } } + +func TestValidateGCSName(t *testing.T) { + x := []StringValidationTestCase{ + // No errors + {TestName: "basic", Value: "foobar"}, + {TestName: "has number", Value: "foobar1"}, + {TestName: "all numbers", Value: "12345"}, + {TestName: "all _", Value: "foo_bar_baz"}, + {TestName: "all -", Value: "foo-bar-baz"}, + {TestName: "begins with number", Value: "1foo-bar_baz"}, + {TestName: "ends with number", Value: "foo-bar_baz1"}, + {TestName: "almost an ip", Value: "192.168.5.foo"}, + {TestName: "has _", Value: "foo-bar_baz"}, + {TestName: "--", Value: "foo--bar"}, + {TestName: "__", Value: "foo__bar"}, + {TestName: "-goog", Value: "foo-goog"}, + {TestName: ".goog", Value: "foo.goog"}, + + // With errors + {TestName: "invalid char $", Value: "foo$bar", ExpectError: true}, + {TestName: "has uppercase", Value: "fooBar", ExpectError: true}, + {TestName: "begins with -", Value: "-foobar", ExpectError: true}, + {TestName: "ends with -", Value: "foobar-", ExpectError: true}, + {TestName: "begins with _", Value: "_foobar", ExpectError: true}, + {TestName: "ends with _", Value: "foobar_", ExpectError: true}, + {TestName: "less than 3 chars", Value: "fo", ExpectError: true}, + {TestName: "..", Value: "foo..bar", ExpectError: true}, + {TestName: "greater than 63 chars with no .", Value: "my-really-long-bucket-name-with-invalid-that-does-not-contain-a-period", ExpectError: true}, + {TestName: "greater than 63 chars between .", Value: "my.really-long-bucket-name-with-invalid-that-does-contain-a-period-but.is-too-long", ExpectError: true}, + {TestName: "has goog prefix", Value: "goog-foobar", ExpectError: true}, + {TestName: "almost an ip", Value: "192.168.5.1", ExpectError: true}, + {TestName: "contains google", Value: "foobar-google", ExpectError: true}, + {TestName: "contains close misspelling of google", Value: "foo-go0gle-bar", ExpectError: true}, + } + + es := TestStringValidationCases(x, ValidateGCSName) + if len(es) > 0 { + t.Errorf("Failed to validate GCS names: %v", es) + } +} diff --git a/mmv1/third_party/terraform/website/docs/d/artifact_registry_docker_images.html.markdown b/mmv1/third_party/terraform/website/docs/d/artifact_registry_docker_images.html.markdown deleted file mode 100644 index df1bd121b74e..000000000000 --- a/mmv1/third_party/terraform/website/docs/d/artifact_registry_docker_images.html.markdown +++ /dev/null @@ -1,56 +0,0 @@ ---- -subcategory: "Artifact Registry" -description: |- - Get information about Docker images within a Google Artifact Registry repository. ---- - -# google_artifact_registry_docker_images - -Get information about Artifact Registry Docker images. -See [the official documentation](https://cloud.google.com/artifact-registry/docs/docker) -and [API](https://cloud.google.com/artifact-registry/docs/reference/rest/v1/projects.locations.repositories.dockerImages/list). - -## Example Usage - -```hcl -data "google_artifact_registry_docker_images" "my_images" { - location = "us-central1" - repository_id = "example-repo" -} -``` - -## Argument Reference - -The following arguments are supported: - -* `location` - (Required) The location of the Artifact Registry repository. - -* `repository_id` - (Required) The last part of the repository name to fetch from. - -* `project` - (Optional) The project ID in which the resource belongs. If it is not provided, the provider project is used. - -## Attributes Reference - -The following attributes are exported: - -* `docker_images` - A list of all retrieved Artifact Registry Docker images. Structure is [defined below](#nested_docker_images). - -The `docker_images` block supports: - -* `name` - The fully qualified name of the fetched image. This name has the form: `projects/{{project}}/locations/{{location}}/repository/{{repository_id}}/dockerImages/{{docker_image}}`. For example, `projects/test-project/locations/us-west4/repositories/test-repo/dockerImages/nginx@sha256:e9954c1fc875017be1c3e36eca16be2d9e9bccc4bf072163515467d6a823c7cf` - -* `image_name` - Extracted short name of the image (last part of `name`, without tag or digest). For example, from `.../nginx@sha256:...` → `nginx`. - -* `self_link` - The URI to access the image. For example, `us-west4-docker.pkg.dev/test-project/test-repo/nginx@sha256:e9954c1fc875017be1c3e36eca16be2d9e9bccc4bf072163515467d6a823c7cf` - -* `tags` - A list of all tags associated with the image. - -* `image_size_bytes` - Calculated size of the image in bytes. - -* `media_type` - Media type of this image, e.g. `application/vnd.docker.distribution.manifest.v2+json`. - -* `upload_time` - The time, as a RFC 3339 string, the image was uploaded. For example, `2014-10-02T15:01:23.045123456Z`. - -* `build_time` - The time, as a RFC 3339 string, this image was built. - -* `update_time` - The time, as a RFC 3339 string, this image was updated. diff --git a/mmv1/third_party/terraform/website/docs/d/artifact_registry_npm_package.html.markdown b/mmv1/third_party/terraform/website/docs/d/artifact_registry_npm_package.html.markdown deleted file mode 100644 index b6e23d7d2829..000000000000 --- a/mmv1/third_party/terraform/website/docs/d/artifact_registry_npm_package.html.markdown +++ /dev/null @@ -1,65 +0,0 @@ ---- -subcategory: "Artifact Registry" -description: |- - Get information about an NPM package within a Google Artifact Registry Repository. ---- - -# google_artifact_registry_npm_package - -This data source fetches information from a provided Artifact Registry repository, based on a the latest version of the package and optional version. - -## Example Usage - -```hcl -resource "google_artifact_registry_repository" "npm_repo" { - location = "us-central1" - repository_id = "my-npm-repo" - format = "NPM" -} - -data "google_artifact_registry_npm_package" "latest" { - location = google_artifact_registry_repository.npm_repo.location - repository_id = google_artifact_registry_repository.npm_repo.repository_id - package_name = "example-pkg" -} - -data "google_artifact_registry_npm_package" "with_version" { - location = google_artifact_registry_repository.npm_repo.location - repository_id = google_artifact_registry_repository.npm_repo.repository_id - package_name = "example-pkg:1.0.0" -} -``` - -## Argument Reference - -The following arguments are supported: - -* `location` – (Required) The location of the Artifact Registry repository. - -* `repository_id` – (Required) The ID of the repository containing the NPM package. - -* `package_name` – (Required) The name of the package to fetch. Can optionally include a specific version (e.g., `my_pkg:1.2.3`). If no version is provided, the latest version is used. - -* `project` – (Optional) The ID of the project that owns the repository. If not provided, the provider-level project is used. - -## Attributes Reference - -The following computed attributes are exported: - -* `id` – The fully qualified name of the fetched package. Format: - ``` - projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/npmPackages/{{package}}:{{version}} - ``` - -* `name` – The fully qualified name of the fetched package. Format: - ``` - projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/npmPackages/{{package}}:{{version}} - ``` - -* `version` – The version of the NPM package. - -* `tags` - A list of all Tags attached to this package. - -* `create_time` – The time the package was created. - -* `update_time` – The time the package was last updated. diff --git a/mmv1/third_party/terraform/website/docs/d/artifact_registry_package.html.markdown b/mmv1/third_party/terraform/website/docs/d/artifact_registry_package.html.markdown deleted file mode 100644 index fa984c363fb9..000000000000 --- a/mmv1/third_party/terraform/website/docs/d/artifact_registry_package.html.markdown +++ /dev/null @@ -1,41 +0,0 @@ ---- -subcategory: "Artifact Registry" -description: |- - Get information about a package within a Google Artifact Registry repository. ---- - -# google_artifact_registry_package -This data source fetches information of a package from a provided Artifact Registry repository. - -## Example Usage - -```hcl -resource "google_artifact_registry_package" "my_package" { - location = "us-west1" - repository_id = "my-repository" -} -``` - -## Argument Reference - -The following arguments are supported: - -* `location` - (Required) The location of the artifact registry. - -* `repository_id` - (Required) The last part of the repository name to fetch from. - -* `name` - (Required) The name of the package. - -* `project` - (Optional) The project ID in which the resource belongs. If it is not provided, the provider project is used. - -## Attributes Reference - -The following computed attributes are exported: - -* `display_name` - The display name of the package. - -* `create_time` - The time, as a RFC 3339 string, this package was created. - -* `update_time` - The time, as a RFC 3339 string, this package was last updated. This includes publishing a new version of the package. - -* `annotations` - Client specified annotations. diff --git a/mmv1/third_party/terraform/website/docs/d/artifact_registry_repositories.markdown b/mmv1/third_party/terraform/website/docs/d/artifact_registry_repositories.markdown deleted file mode 100644 index ead542ec12ff..000000000000 --- a/mmv1/third_party/terraform/website/docs/d/artifact_registry_repositories.markdown +++ /dev/null @@ -1,48 +0,0 @@ ---- -subcategory: "Artifact Registry" -description: |- - Get information about Artifact Registry repositories. ---- - -# google_artifact_registry_repositories - -Get information about Artifact Registry repositories. -See [the official documentation](https://cloud.google.com/artifact-registry/docs) -and [API](https://cloud.google.com/artifact-registry/docs/reference/rest/v1/projects.locations.repositories/list). - -```hcl -data "google_artifact_registry_repositories" "example" { - location = "us-central1" - project = "my-project" -} -``` - -## Argument Reference - -The following arguments are supported: - -* `location` - (Optional) The location of the artifact registry repositories. eg `us-central1`. - -* `name_filter` - (Optional) Optional. An expression for filtering the results by name. You can also use wildcards `*`. I.e. `my-repo`, `*-repo`, `my-*`, `*-re*`. For further information reach out to the [API docs](https://cloud.google.com/artifact-registry/docs/reference/rest/v1/projects.locations.repositories/list). - -* `project` - (Optional) The ID of the project. If it is not provided, the provider project is used. - -## Attributes Reference - -The following attributes are exported: - -* `repositories` - A list of all retrieved Artifact Registry repositories. Structure is [defined below](#nested_repositories). - -The `repositories` block supports: - -* `id` - An identifier for the resource with format `projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}` - -* `repository_id` - The last part of the repository name, for example: `"repo1"` - -* `format` - The format of packages that are stored in the repository. Supported formats can be found [here](https://cloud.google.com/artifact-registry/docs/supported-formats). - -* `description` - The user-provided description of the repository. - -* `create_time` - The time when the repository was created. - -* `update_time` - The time when the repository was last updated. diff --git a/mmv1/third_party/terraform/website/docs/d/artifact_registry_tag.html.markdown b/mmv1/third_party/terraform/website/docs/d/artifact_registry_tag.html.markdown deleted file mode 100644 index 7898109ebd10..000000000000 --- a/mmv1/third_party/terraform/website/docs/d/artifact_registry_tag.html.markdown +++ /dev/null @@ -1,41 +0,0 @@ ---- -subcategory: "Artifact Registry" -description: |- - Get information about a tag within a Google Artifact Registry repository. ---- - -# google_artifact_registry_tag -This data source fetches information of a tag from a provided Artifact Registry repository. - -## Example Usage - -```hcl -data "google_artifact_registry_tags" "my_tags" { - location = "us-central1" - repository_id = "example-repo" - package_name = "example-package" - tag_name = "latest" -} -``` - -## Argument Reference - -The following arguments are supported: - -* `location` - (Required) The location of the artifact registry. - -* `repository_id` - (Required) The last part of the repository name to fetch from. - -* `package_name` - (Required) The name of the package. - -* `tag_name` - (Required) The name of the tag. - -* `project` - (Optional) The project ID in which the resource belongs. If it is not provided, the provider project is used. - -## Attributes Reference - -The following computed attributes are exported: - -* `name` - The name of the tag, for example: `projects/p1/locations/us-central1/repositories/repo1/packages/pkg1/tags/tag1`. If the package part contains slashes, the slashes are escaped. - -* `version` - The version of the tag. diff --git a/mmv1/third_party/terraform/website/docs/d/artifact_registry_tags.html.markdown b/mmv1/third_party/terraform/website/docs/d/artifact_registry_tags.html.markdown deleted file mode 100644 index 3345eeb9c98e..000000000000 --- a/mmv1/third_party/terraform/website/docs/d/artifact_registry_tags.html.markdown +++ /dev/null @@ -1,47 +0,0 @@ ---- -subcategory: "Artifact Registry" -description: |- - Get information about tags within a Google Artifact Registry package. ---- - -# google_artifact_registry_tags - -Get information about Artifact Registry tags. -See [the official documentation](https://cloud.google.com/artifact-registry/docs/overview) -and [API](https://cloud.google.com/artifact-registry/docs/reference/rest/v1/projects.locations.repositories.packages.tags/list). - -## Example Usage - -```hcl -data "google_artifact_registry_tags" "my_tags" { - location = "us-central1" - repository_id = "example-repo" - package_name = "example-package" -} -``` - -## Argument Reference - -The following arguments are supported: - -* `location` - (Required) The location of the Artifact Registry repository. - -* `repository_id` - (Required) The last part of the repository name to fetch from. - -* `package_name` - (Required) The name of the package. - -* `filter` - (Optional) An expression for filtering the results of the request. Filter rules are case insensitive. The fields eligible for filtering are `name` and `version`. Further information can be found in the [REST API](https://cloud.google.com/artifact-registry/docs/reference/rest/v1/projects.locations.repositories.packages.tags/list#query-parameters). - -* `project` - (Optional) The project ID in which the resource belongs. If it is not provided, the provider project is used. - -## Attributes Reference - -The following attributes are exported: - -* `tags` - A list of all retrieved Artifact Registry tags. Structure is [defined below](#nested_tags). - -The `tags` block supports: - -* `name` - The name of the tag, for example: `projects/p1/locations/us-central1/repositories/repo1/packages/pkg1/tags/tag1`. If the package part contains slashes, the slashes are escaped. - -* `version` - The version of the tag. diff --git a/mmv1/third_party/terraform/website/docs/d/artifact_registry_version.html.markdown b/mmv1/third_party/terraform/website/docs/d/artifact_registry_version.html.markdown deleted file mode 100644 index ddcd65975c4b..000000000000 --- a/mmv1/third_party/terraform/website/docs/d/artifact_registry_version.html.markdown +++ /dev/null @@ -1,51 +0,0 @@ ---- -subcategory: "Artifact Registry" -description: |- - Get information about a version within a Google Artifact Registry repository. ---- - -# google_artifact_registry_version -This data source fetches information of a version from a provided Artifact Registry repository. - -## Example Usage - -```hcl -data "google_artifact_registry_versions" "my_versions" { - location = "us-central1" - repository_id = "example-repo" - package_name = "example-package" - version_name = "latest" -} -``` - -## Argument Reference - -The following arguments are supported: - -* `location` - (Required) The location of the artifact registry. - -* `repository_id` - (Required) The last part of the repository name to fetch from. - -* `package_name` - (Required) The name of the package. - -* `version_name` - (Required) The name of the version. - -* `view` - (Optional) The view, which determines what version information is returned in a response. Possible values are `"BASIC"` and `"FULL"`. Defaults to `"BASIC"`. - -* `project` - (Optional) The project ID in which the resource belongs. If it is not provided, the provider project is used. - -## Attributes Reference - -The following computed attributes are exported: - -* `name` - The name of the version, for example: `projects/p1/locations/us-central1/repositories/repo1/packages/pkg1/versions/version1`. If the package part contains slashes, the slashes are escaped. - -* `description` - Description of the version, as specified in its metadata. - -* `related_tags` - A list of related tags. Will contain up to 100 tags that reference this version. - -* `create_time` - The time, as a RFC 3339 string, this package was created. - -* `update_time` - The time, as a RFC 3339 string, this package was last updated. This includes publishing a new version of the package. - -* `annotations` - Client specified annotations. diff --git a/mmv1/third_party/terraform/website/docs/d/beyondcorp_security_gateway.html.markdown b/mmv1/third_party/terraform/website/docs/d/beyondcorp_security_gateway.html.markdown deleted file mode 100644 index e9fbbea7b69a..000000000000 --- a/mmv1/third_party/terraform/website/docs/d/beyondcorp_security_gateway.html.markdown +++ /dev/null @@ -1,32 +0,0 @@ ---- -subcategory: "BeyondCorp" -description: |- - Get information about a Google BeyondCorp Security Gateway. ---- - -# google_beyondcorp_security_gateway - -Get information about a Google BeyondCorp Security Gateway. - -## Example Usage - -```hcl -data "google_beyondcorp_security_gateway" "my-beyondcorp-security-gateway" { - security_gateway_id = "my-beyondcorp-security-gateway" -} -``` - -## Argument Reference - -The following arguments are supported: - -* `security_gateway_id` - (Required) The name of the Security Gateway resource. - -- - - - -* `project` - (Optional) The project in which the resource belongs. If it - is not provided, the provider project is used. - -## Attributes Reference - -See [google_beyondcorp_security_gateway](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/beyondcorp_security_gateway) resource for details of the available attributes. diff --git a/mmv1/third_party/terraform/website/docs/d/bigquery_datasets.html.markdown b/mmv1/third_party/terraform/website/docs/d/bigquery_datasets.html.markdown deleted file mode 100644 index 0079be5641bc..000000000000 --- a/mmv1/third_party/terraform/website/docs/d/bigquery_datasets.html.markdown +++ /dev/null @@ -1,39 +0,0 @@ ---- -subcategory: "BigQuery" -description: |- - A datasource to retrieve a list of datasets in a project. ---- - -# `google_bigquery_datasets` - -Get a list of datasets in a GCP project. For more information see -the [official documentation](https://cloud.google.com/bigquery/docs) -and [API](https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/list). - -## Example Usage - -```hcl -data "google_bigquery_datasets" "datasets" { - project = "my-project" -} -``` - -## Argument Reference - -The following arguments are supported: - -* `project` - (Optional) The ID of the project in which the resource belongs. - If it is not provided, the provider project is used. - -## Attributes Reference - -The following attributes are exported: - -* `datasets` - A list of all retrieved BigQuery datasets. Structure is [defined below](#nested_datasets). - -The `datasets` block supports: - -* `labels` - User-provided dataset labels, in key/value pairs. -* `friendly_name` - The friendly name of the dataset. -* `dataset_id` - The id of the dataset. -* `location` - The geographic location of the dataset. diff --git a/mmv1/third_party/terraform/website/docs/d/bigquery_table.html.markdown b/mmv1/third_party/terraform/website/docs/d/bigquery_table.html.markdown deleted file mode 100644 index 891ad627d379..000000000000 --- a/mmv1/third_party/terraform/website/docs/d/bigquery_table.html.markdown +++ /dev/null @@ -1,36 +0,0 @@ ---- -subcategory: "BigQuery" -description: |- - A datasource to retrieve a specific table in a dataset. ---- - -# `google_bigquery_table` - -Get a specific table in a BigQuery dataset. For more information see -the [official documentation](https://cloud.google.com/bigquery/docs) -and [API](https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/get). - -## Example Usage - -```hcl -data "google_bigquery_table" "table" { - project = "my-project" - dataset_id = "my-bq-dataset" - table_id = "my-table" -} -``` - -## Argument Reference - -The following arguments are supported: - -* `dataset_id` - (Required) The dataset ID. - -* `table_id` - (Required) The table ID. - -* `project` - (Optional) The ID of the project in which the resource belongs. - If it is not provided, the provider project is used. - -## Attributes Reference - -See [google_bigquery_table](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/bigquery_table#attributes-reference) resource for details of the available attributes. diff --git a/mmv1/third_party/terraform/website/docs/d/bigtable_table_iam_policy.html.markdown b/mmv1/third_party/terraform/website/docs/d/bigtable_table_iam_policy.html.markdown index bfec2bb723b4..e44bd2b3e6f4 100644 --- a/mmv1/third_party/terraform/website/docs/d/bigtable_table_iam_policy.html.markdown +++ b/mmv1/third_party/terraform/website/docs/d/bigtable_table_iam_policy.html.markdown @@ -12,8 +12,8 @@ Retrieves the current IAM policy data for a Bigtable Table. ```hcl data "google_bigtable_table_iam_policy" "policy" { - instance_name = google_bigtable_instance.instance.name - table = google_bigtable_table.table.name + instance = google_bigtable_instance.instance.name + table = google_bigtable_table.table.name } ``` diff --git a/mmv1/third_party/terraform/website/docs/d/billing_account.html.markdown b/mmv1/third_party/terraform/website/docs/d/billing_account.html.markdown index 7cc208247d8f..f4d303d27147 100644 --- a/mmv1/third_party/terraform/website/docs/d/billing_account.html.markdown +++ b/mmv1/third_party/terraform/website/docs/d/billing_account.html.markdown @@ -45,4 +45,3 @@ The following additional attributes are exported: * `name` - The resource name of the billing account in the form `billingAccounts/{billing_account_id}`. * `project_ids` - The IDs of any projects associated with the billing account. `lookup_projects` must not be false for this to be populated. -* `currency_code` - The currency code of the billing account, e.g. `USD`. diff --git a/mmv1/third_party/terraform/website/docs/d/certificate_manager_dns_authorization.html.markdown b/mmv1/third_party/terraform/website/docs/d/certificate_manager_dns_authorization.html.markdown deleted file mode 100644 index a0d2b8ecfe2e..000000000000 --- a/mmv1/third_party/terraform/website/docs/d/certificate_manager_dns_authorization.html.markdown +++ /dev/null @@ -1,42 +0,0 @@ ---- -subcategory: "Certificate Manager" -description: |- - Fetches the details of a Certificate Manager DNS Authorization. ---- - -# google_certificate_manager_dns_authorization - -Use this data source to get information about a Certificate Manager DNS Authorization. For more details, see the [API documentation](https://cloud.google.com/certificate-manager/docs/reference/certificate-manager/rest/v1/projects.locations.dnsAuthorizations). - -## Example Usage - -```hcl -data "google_certificate_manager_dns_authorization" "default" { - name = "my-dns-auth" - location = "global" -} -``` - -## Argument Reference - -The following arguments are supported: - -* `name` - - (Required) - The name of the DNS Authorization. - -* `domain` - - (Required) - The name of the DNS Authorization. - -* `location` - - (Optional) - The Certificate Manager location. If not specified, "global" is used. - -* `project` - - (Optional) - The ID of the project in which the resource belongs. If it is not provided, the provider project is used. - -## Attributes Reference - -See [google_certificate_manager_dns_authorization](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/certificate_manager_dns_authorization) resource for details of all the available attributes. diff --git a/mmv1/third_party/terraform/website/docs/d/cloud_run_v2_worker_pool.html.markdown b/mmv1/third_party/terraform/website/docs/d/cloud_run_v2_worker_pool.html.markdown deleted file mode 100644 index 166d4ff9ad1a..000000000000 --- a/mmv1/third_party/terraform/website/docs/d/cloud_run_v2_worker_pool.html.markdown +++ /dev/null @@ -1,37 +0,0 @@ ---- -subcategory: "Cloud Run (v2 API)" -description: |- - Get information about a Google Cloud Run v2 Worker Pool. ---- - -# google_cloud_run_v2_worker_pool - -Get information about a Google Cloud Run v2 Worker Pool. For more information see -the [official documentation](https://cloud.google.com/run/docs/) -and [API](https://cloud.google.com/run/docs/apis). - -## Example Usage - -```hcl -data "google_cloud_run_v2_worker_pool" "my_worker_pool" { - name = "my-worker-pool" - location = "us-central1" -} -``` - -## Argument Reference - -The following arguments are supported: - -* `name` - (Required) The name of the Cloud Run v2 Worker Pool. - -* `location` - (Required) The location of the instance. eg us-central1 - -- - - - -* `project` - (Optional) The project in which the resource belongs. If it - is not provided, the provider project is used. - -## Attributes Reference - -See [google_cloud_run_v2_worker_pool](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/cloud_run_v2_worker_pool#argument-reference) resource for details of the available attributes. diff --git a/mmv1/third_party/terraform/website/docs/d/compute_network_attachment.html.markdown b/mmv1/third_party/terraform/website/docs/d/compute_network_attachment.html.markdown deleted file mode 100644 index 61dd7aafed0a..000000000000 --- a/mmv1/third_party/terraform/website/docs/d/compute_network_attachment.html.markdown +++ /dev/null @@ -1,38 +0,0 @@ ---- -subcategory: "Compute Engine" -description: |- - A data source to retrieve a network attachment ---- - -# `google_compute_network_attachment` - -Get a specific network attachment within a region. For more information see -the [official documentation](https://cloud.google.com/vpc/docs/about-network-attachments) -and [API](https://cloud.google.com/compute/docs/reference/rest/v1/networkAttachments/get). - -## Example Usage - -```hcl -data "google_compute_network_attachment" "default" { - project = "my-project" - name = "my-network-attachment" - region = "europe-west1" -} -``` - -## Argument Reference - -The following arguments are supported: - -* `name` - (Required) The name of the network attachment to retrieve. - The name must be unique within the region. - -* `region` - (Required) The region in which the network attachment resides. - For example, `europe-west1`. - -* `project` - (Optional) The ID of the project in which the resource belongs. - If it is not provided, the provider project is used. - -## Attributes Reference - -See [google_compute_network_attachment](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/bigquery_table#attributes-reference) resource for details of the available attributes. \ No newline at end of file diff --git a/mmv1/third_party/terraform/website/docs/d/compute_subnetworks.html.markdown b/mmv1/third_party/terraform/website/docs/d/compute_subnetworks.html.markdown index ca4749a31078..f62f951c366f 100644 --- a/mmv1/third_party/terraform/website/docs/d/compute_subnetworks.html.markdown +++ b/mmv1/third_party/terraform/website/docs/d/compute_subnetworks.html.markdown @@ -42,7 +42,6 @@ The following arguments are supported: * `ip_cidr_range` - The IP address range represented as a CIDR block. * `name` - The name of the subnetwork. * `network` - The self link of the parent network. -* `network_self_link` - (Deprecated) The name of the parent network computed from `network` attribute. (deprecated and will be removed in a future major release. Use `network_name` instead.) * `network_name` - The name of the parent network computed from `network` attribute. * `private_ip_google_access` - Whether the VMs in the subnet can access Google services without assigned external IP addresses. * `self_link` - The self link of the subnetwork. diff --git a/mmv1/third_party/terraform/website/docs/d/container_engine_versions.html.markdown b/mmv1/third_party/terraform/website/docs/d/container_engine_versions.html.markdown index e7e6f8e4b952..60d564479d86 100644 --- a/mmv1/third_party/terraform/website/docs/d/container_engine_versions.html.markdown +++ b/mmv1/third_party/terraform/website/docs/d/container_engine_versions.html.markdown @@ -72,4 +72,3 @@ The following attributes are exported: * `default_cluster_version` - Version of Kubernetes the service deploys by default. * `release_channel_default_version` - A map from a release channel name to the channel's default version. See the docs on [available release channel names](https://cloud.google.com/kubernetes-engine/docs/reference/rest/v1/projects.locations.clusters#Cluster.Channel_1) for more details. * `release_channel_latest_version` - A map from a release channel name to the channel's latest version. See the docs on [available release channel names](https://cloud.google.com/kubernetes-engine/docs/reference/rest/v1/projects.locations.clusters#Cluster.Channel_1) for more details. -* `release_channel_upgrade_target_version` - A map from a release channel name to the channel's auto upgrade target version. See the docs on [available release channel names](https://cloud.google.com/kubernetes-engine/docs/reference/rest/v1/projects.locations.clusters#Cluster.Channel_1) for more details. diff --git a/mmv1/third_party/terraform/website/docs/d/dataplex_data_quality_rules.html.markdown b/mmv1/third_party/terraform/website/docs/d/dataplex_data_quality_rules.html.markdown deleted file mode 100644 index 15271c4f3576..000000000000 --- a/mmv1/third_party/terraform/website/docs/d/dataplex_data_quality_rules.html.markdown +++ /dev/null @@ -1,38 +0,0 @@ ---- -subcategory: "Dataplex" -description: |- - A datasource to retrieve the data quality rules generated based on a data profile scan. ---- - - -# `google_dataplex_data_quality_rules` -Retrieves the generated data quality rules for the creating a new data quality scan. -For more information see -the [official documentation](https://cloud.google.com/dataplex/docs) -and [API](https://cloud.google.com/dataplex/docs/reference/rest/v1/projects.locations.dataScans/generateDataQualityRules). - -## example - -```hcl -data "google_dataplex_data_quality_rules" "dqrs" { - project = "my-project" - location = "use-central1" - data_scan_id = "my-datascan-profile" -} -``` - -## Argument Reference - -The following arguments are supported: - -* `project` - (Required) The ID of the project in which the datascan belongs. - -* `location` - (Required) The location where the referenced data profile scan resides. - -* `data_scan_id` - (Required) The ID of the data profile scan which the generation of quality rules will be basing on. - -## Attributes Reference - -The attributes are exported: - -* `rules` - (Computed) The list of generated data quality rules. For more details, please see the [datascan page](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/dataplex_datascan#nested_data_quality_spec_rules). \ No newline at end of file diff --git a/mmv1/third_party/terraform/website/docs/d/firestore_document.html.markdown b/mmv1/third_party/terraform/website/docs/d/firestore_document.html.markdown deleted file mode 100644 index 7d0237e8f3f6..000000000000 --- a/mmv1/third_party/terraform/website/docs/d/firestore_document.html.markdown +++ /dev/null @@ -1,43 +0,0 @@ ---- -subcategory: "Firestore" -description: |- - Read a document from a Firestore database ---- - - -# google_firestore_document - -Reads a document from a Firestore database. -See [the official documentation](https://cloud.google.com/firestore/native/docs/) -and -[API](https://cloud.google.com/firestore/docs/reference/rest/v1/projects.databases.documents/get/). - - -## Example Usage - -Retrieve a document from the Firestore database. - -```hcl -resource "google_firestore_document" "mydoc" { - project = google_firestore_database.database.project - database = google_firestore_database.database.name - collection = "somenewcollection" - document_id = "my-doc-id" -} -``` - -## Argument Reference - -The following arguments are supported: - -* `database` - (Required) The name of the Firestore database. - -* `collection` - (Required) The name of the collection of documents. - -* `document_id` - (Required) The id of the document to get. - -* `project` - (Optional) The project in which the database resides. - -## Attributes Reference - -See [google_firestore_document](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/google_firestore_document) resource for details of the available attributes. diff --git a/mmv1/third_party/terraform/website/docs/d/folder.html.markdown b/mmv1/third_party/terraform/website/docs/d/folder.html.markdown index 3f61f1e64a45..1a62e8ae5079 100644 --- a/mmv1/third_party/terraform/website/docs/d/folder.html.markdown +++ b/mmv1/third_party/terraform/website/docs/d/folder.html.markdown @@ -47,5 +47,3 @@ The following attributes are exported: * `create_time` - Timestamp when the Organization was created. A timestamp in RFC3339 UTC "Zulu" format, accurate to nanoseconds. Example: "2014-10-02T15:01:23.045123456Z". * `lifecycle_state` - The Folder's current lifecycle state. * `organization` - If `lookup_organization` is enable, the resource name of the Organization that the folder belongs. -* `configured_capabilities` - Optional capabilities configured for this folder. -* `management_project` - Management Project associated with this folder (if capability is enabled). diff --git a/mmv1/third_party/terraform/website/docs/d/gke_hub_feature.html.markdown b/mmv1/third_party/terraform/website/docs/d/gke_hub_feature.html.markdown deleted file mode 100644 index 8ae265a043c3..000000000000 --- a/mmv1/third_party/terraform/website/docs/d/gke_hub_feature.html.markdown +++ /dev/null @@ -1,30 +0,0 @@ ---- -subcategory: "GKEHub" -description: |- - Retrieves the details of a GKE Hub Feature. ---- - -# `google_gke_hub_feature` -Retrieves the details of a specific GKE Hub Feature. Use this data source to retrieve the feature's configuration and state. - -## Example Usage - -```hcl -data "google_gke_hub_feature" "example" { - location = "global" - name = "servicemesh" -} -``` - -## Argument Reference - -The following arguments are supported: - -* `name` - (Required) The name of the feature you want to know the status of. -* `location` - (Required) The location for the GKE Hub Feature. -* `project` - (Optional) The ID of the project in which the resource belongs. - If it is not provided, the provider project is used. - -## Attributes Reference - -See [google_gke_hub_feature](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/gke_hub_feature) resource for details of the available attributes. \ No newline at end of file diff --git a/mmv1/third_party/terraform/website/docs/d/gke_hub_membership.html.markdown b/mmv1/third_party/terraform/website/docs/d/gke_hub_membership.html.markdown deleted file mode 100644 index 202636fa3bb9..000000000000 --- a/mmv1/third_party/terraform/website/docs/d/gke_hub_membership.html.markdown +++ /dev/null @@ -1,35 +0,0 @@ ---- -subcategory: "GKEHub" -description: |- - Retrieves the details of a GKE Hub Membership. ---- - -# `google_gke_hub_membership` - -Retrieves the details of a specific GKE Hub Membership. Use this data source to retrieve the membership's configuration and state. - -## Example Usage - -```hcl -data "google_gke_hub_membership" "example" { - project = "my-project-id" - location = "global" - membership_id = "my-membership-id" # GKE Cluster's name -} -``` - -## Argument Reference - -The following arguments are supported: - -* `membership_id` - (Required) The GKE Hub Membership id or GKE Cluster's name. - -* `location` - (Required) The location for the GKE Hub Membership. - Currently only `global` is supported. - -* `project` - (Optional) The ID of the project in which the resource belongs. - If it is not provided, the provider project is used. - -## Attributes Reference - -See [google_gke_hub_membership](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/gke_hub_membership) resource for details of the available attributes. \ No newline at end of file diff --git a/mmv1/third_party/terraform/website/docs/d/kms_autokey_config.html.markdown b/mmv1/third_party/terraform/website/docs/d/kms_autokey_config.html.markdown index 580e27506fbe..2654ec3016d5 100644 --- a/mmv1/third_party/terraform/website/docs/d/kms_autokey_config.html.markdown +++ b/mmv1/third_party/terraform/website/docs/d/kms_autokey_config.html.markdown @@ -8,6 +8,10 @@ description: |- Provides access to Google Cloud Platform KMS AutokeyConfig. A AutokeyConfig is a Cloud KMS resource that helps you safely span the separation of duties to create new Cloud KMS keys for CMEK using Autokey. +~> **Warning:** This resource is in beta, and should be used with the terraform-provider-google-beta provider. +See [Provider Versions](https://terraform.io/docs/providers/google/guides/provider_versions.html) for more details on beta resources. + + For more information see [the official documentation](https://cloud.google.com/kms/docs/reference/rest/v1/folders) and diff --git a/mmv1/third_party/terraform/website/docs/d/kms_key_handle.html.markdown b/mmv1/third_party/terraform/website/docs/d/kms_key_handle.html.markdown index 78149971ee2a..4f65356a27fe 100644 --- a/mmv1/third_party/terraform/website/docs/d/kms_key_handle.html.markdown +++ b/mmv1/third_party/terraform/website/docs/d/kms_key_handle.html.markdown @@ -8,6 +8,9 @@ description: |- Provides access to Google Cloud Platform KMS KeyHandle. A key handle is a Cloud KMS resource that helps you safely span the separation of duties to create new Cloud KMS keys for CMEK using Autokey. +~> **Warning:** This resource is in beta, and should be used with the terraform-provider-google-beta provider. +See [Provider Versions](https://terraform.io/docs/providers/google/guides/provider_versions.html) for more details on beta resources. + For more information see [the official documentation](https://cloud.google.com/kms/docs/resource-hierarchy#key_handles) and diff --git a/mmv1/third_party/terraform/website/docs/d/kms_key_handles.html.markdown b/mmv1/third_party/terraform/website/docs/d/kms_key_handles.html.markdown index 93da252f63bd..7cc9ac8ba4f5 100644 --- a/mmv1/third_party/terraform/website/docs/d/kms_key_handles.html.markdown +++ b/mmv1/third_party/terraform/website/docs/d/kms_key_handles.html.markdown @@ -8,6 +8,9 @@ description: |- Provides access to Google Cloud Platform KMS KeyHandle. A key handle is a Cloud KMS resource that helps you safely span the separation of duties to create new Cloud KMS keys for CMEK using Autokey. +~> **Warning:** This resource is in beta, and should be used with the terraform-provider-google-beta provider. +See [Provider Versions](https://terraform.io/docs/providers/google/guides/provider_versions.html) for more details on beta resources. + For more information see [the official documentation](https://cloud.google.com/kms/docs/resource-hierarchy#key_handles) and diff --git a/mmv1/third_party/terraform/website/docs/d/lustre_instance.html.markdown b/mmv1/third_party/terraform/website/docs/d/lustre_instance.html.markdown deleted file mode 100644 index 95c0139243fe..000000000000 --- a/mmv1/third_party/terraform/website/docs/d/lustre_instance.html.markdown +++ /dev/null @@ -1,32 +0,0 @@ ---- -subcategory: "Lustre" -description: |- - Fetches the details of a Lustre instance. ---- - -# google_lustre_instance - -Use this data source to get information about a Lustre instance. For more information see the [API docs](https://cloud.google.com/filestore/docs/lustre/reference/rest/v1/projects.locations.instances). - -## Example Usage - -```hcl -data "google_lustre_instance" "instance" { - name = "my-instance" - zone = "us-central1-a" -} -``` - -## Argument Reference - -The following arguments are supported: - -* `instance_id` - (Required) The instance id of the Lustre instance. - -* `zone` - (Optional) The ID of the zone in which the resource belongs. If it is not provided, the provider zone is used. - -* `project` - (Optional) The ID of the project in which the resource belongs. If it is not provided, the provider project is used. - -## Attributes Reference - -See [google_lustre_instance](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/lustre_instance) resource for details of all the available attributes. diff --git a/mmv1/third_party/terraform/website/docs/d/netblock_ip_ranges.html.markdown b/mmv1/third_party/terraform/website/docs/d/netblock_ip_ranges.html.markdown index 6f96e17b2968..a2e11867dc6b 100644 --- a/mmv1/third_party/terraform/website/docs/d/netblock_ip_ranges.html.markdown +++ b/mmv1/third_party/terraform/website/docs/d/netblock_ip_ranges.html.markdown @@ -63,13 +63,9 @@ The following arguments are supported: * `google-netblocks` - Corresponds to IP addresses used for Google services. [More details.](https://cloud.google.com/compute/docs/faq#where_can_i_find_product_name_short_ip_ranges) - * `restricted-googleapis` - Corresponds to the IP addresses used for Private Google Access only for services that support VPC Service Controls API access. These ranges are for DNS configuration. [More details.](https://cloud.google.com/vpc/docs/configure-private-google-access#config-options) + * `restricted-googleapis` - Corresponds to the IP addresses used for Private Google Access only for services that support VPC Service Controls API access. [More details.](https://cloud.google.com/vpc/docs/private-access-options#domain-vips) - * `restricted-googleapis-with-directconnectivity` - Corresponds to the IP addresses used for Private Google Access only for services that support VPC Service Controls API access. These ranges are for routing and firewall configurations. [More details.](https://cloud.google.com/vpc/docs/configure-private-google-access#config-options) - - * `private-googleapis` - Corresponds to the IP addresses used for Private Google Access, including services that do not support VPC Service Controls. These ranges are for DNS configuration. [More details.](https://cloud.google.com/vpc/docs/configure-private-google-access#config-options) - - * `private-googleapis-with-directconnectivity` - Corresponds to the IP addresses used for Private Google Access, including services that do not support VPC Service Controls. These ranges are for routing and firewall configurations. [More details.](https://cloud.google.com/vpc/docs/configure-private-google-access#config-options) + * `private-googleapis` - Corresponds to the IP addresses used for Private Google Access for services that do not support VPC Service Controls. [More details.](https://cloud.google.com/vpc/docs/private-access-options#domain-vips) * `dns-forwarders` - Corresponds to the IP addresses used to originate Cloud DNS outbound forwarding. [More details.](https://cloud.google.com/dns/zones/#creating-forwarding-zones) @@ -77,7 +73,7 @@ The following arguments are supported: * `health-checkers` - Corresponds to the IP addresses used for health checking in Cloud Load Balancing. [More details.](https://cloud.google.com/load-balancing/docs/health-checks) - * `legacy-health-checkers` - Corresponds to the IP addresses used for legacy style health checkers (used by Network Load Balancing). [More details.](https://cloud.google.com/load-balancing/docs/health-checks) + * `legacy-health-checkers` - Corresponds to the IP addresses used for legacy style health checkers (used by Network Load Balancing). [ More details.](https://cloud.google.com/load-balancing/docs/health-checks) ## Attributes Reference diff --git a/mmv1/third_party/terraform/website/docs/d/network_management_connectivity_test_run.html.markdown b/mmv1/third_party/terraform/website/docs/d/network_management_connectivity_test_run.html.markdown deleted file mode 100644 index 1364b53c8710..000000000000 --- a/mmv1/third_party/terraform/website/docs/d/network_management_connectivity_test_run.html.markdown +++ /dev/null @@ -1,205 +0,0 @@ ---- -subcategory: "Network Management" -description: |- - A connectivity test is a static analysis of your resource configurations - that enables you to evaluate connectivity to and from Google Cloud - resources in your Virtual Private Cloud (VPC) network. ---- - -# google_network_management_connectivity_test_run - - -!> This datasource triggers side effects on the target resource. It will take a long time to refresh (i.e. `terraform plan` will take much longer than usual) and may modify the state of the parent resource or other copies of the resource copying the same parent. - -A connectivity test is a static analysis of your resource configurations -that enables you to evaluate connectivity to and from Google Cloud -resources in your Virtual Private Cloud (VPC) network. This data source allows -you to trigger a rerun operation on a connectivity test and return the results. - -To get more information about connectivity tests, see: - -* [API documentation](https://cloud.google.com/network-intelligence-center/docs/reference/networkmanagement/rest/v1/projects.locations.global.connectivityTests/rerun) -* How-to Guides - * [Official Documentation](https://cloud.google.com/network-intelligence-center/docs) - -## Example Usage - Network Management Connectivity Test Run Instances - -```hcl -data "google_network_management_connectivity_test_run" "instance-test-run" { - name = google_network_management_connectivity_test.instance-test.name -} - -resource "google_network_management_connectivity_test" "instance-test" { - name = "conn-test-instances" - source { - instance = google_compute_instance.source.id - } - - destination { - instance = google_compute_instance.destination.id - } - - protocol = "TCP" - labels = { - env = "test" - } -} - -resource "google_compute_instance" "source" { - name = "source-vm" - machine_type = "e2-medium" - - boot_disk { - initialize_params { - image = data.google_compute_image.debian_9.id - } - } - - network_interface { - network = google_compute_network.vpc.id - access_config { - } - } -} - -resource "google_compute_instance" "destination" { - name = "dest-vm" - machine_type = "e2-medium" - - boot_disk { - initialize_params { - image = data.google_compute_image.debian_9.id - } - } - - network_interface { - network = google_compute_network.vpc.id - access_config { - } - } -} - -resource "google_compute_network" "vpc" { - name = "conn-test-net" -} - -data "google_compute_image" "debian_9" { - family = "debian-11" - project = "debian-cloud" -} -``` - -## Argument Reference - -The following arguments are supported: - - -* `name` - - (Required) - Unique name for the connectivity test. - - -- - - - - -* `project` - (Optional) The ID of the project in which the resource belongs. - If it is not provided, the provider project is used. - - -## Attributes Reference - -In addition to the arguments listed above, the following computed attributes are exported: - -* `id` - an identifier for the resource with format `projects/{{project}}/locations/global/connectivityTests/{{name}}` - -* `reachability_details` - - Connectivity test reachability details. - Structure is [documented below](#nested_reachability_details). - - -The `reachability_details` block contains: - -* `result` - - (Output) - Status of the connectivity test: RESULT_UNSPECIFIED, REACHABLE, UNREACHABLE, AMBIGUOUS or UNDETERMINED. - -* `verify_time` - - (Output) - Time when reachability details were determined. An RFC3339 timestamp in UTC time. - This in the format of yyyy-MM-ddTHH:mm:ss.SSSZ. - -* `traces` - - (Output) - List of connectivity test traces. - Structure is [documented below](#nested_reachability_details_traces). - - -The `traces` block contains: - -* `endpoint_info` - - (Output) - Derived from the source and destination endpoints definition specified by user request, and validated by the data plane model. - Structure is [documented below](#nested_reachability_details_traces_traces_endpoint_info). - -* `steps` - - (Output) - A trace of a test contains multiple steps from the initial state to the final state (delivered, dropped, forwarded, or aborted). - Structure is [documented below](#nested_reachability_details_traces_traces_steps). - -* `forward_trace_id` - - (Output) - ID of the trace. - - -The `endpoint_info` block contains: - -* `source_ip` - - (Output) - Source IP address. - -* `destination_ip` - - (Output) - Destination IP address. - -* `protocol` - - (Output) - IP protocol in string format, for example: "TCP", "UDP", "ICMP". - -* `source_port` - - (Output) - Source port. Only valid when protocol is TCP or UDP. - -* `destination_port` - - (Output) - Destination port. Only valid when protocol is TCP or UDP. - -* `source_network_uri` - - (Output) - URI of the network where this packet originates from. - -* `destination_network_uri` - - (Output) - URI of the network where this packet is sent to. - -* `source_agent_uri` - - (Output) - URI of the source telemetry agent this packet originates from. - -The `steps` block contains: - -* `description` - - (Output) - Description of the connectivity test step. - -* `state` - - (Output) - State of the connectivity test step. - -* `causes_drop` - - (Output) - If this step leads to the final state Drop. - -* `project_id` - - (Output) - Project ID of the connectivity test step. - diff --git a/mmv1/third_party/terraform/website/docs/d/redis_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/d/redis_cluster.html.markdown deleted file mode 100644 index 4c6a51769664..000000000000 --- a/mmv1/third_party/terraform/website/docs/d/redis_cluster.html.markdown +++ /dev/null @@ -1,38 +0,0 @@ ---- -subcategory: "Memorystore (Redis)" -description: |- - Fetches the details of a Redis Cluster. ---- - -# google_redis_cluster - -Use this data source to get information about a Redis Cluster. For more details, see the [API documentation](https://cloud.google.com/memorystore/docs/cluster/reference/rest/v1/projects.locations.clusters). - -## Example Usage - -```hcl -data "google_redis_cluster" "default" { - name = "my-redis-cluster" - region = "us-central1" -} -``` - -## Argument Reference - -The following arguments are supported: - -* `name` - - (Required) - The name of the Redis cluster. - -* `region` - - (Required) - The region of the Redis cluster. - -* `project` - - (optional) - The ID of the project in which the resource belongs. If it is not provided, the provider project is used. - -## Attributes Reference - -See [google_redis_cluster](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/redis_cluster) resource for details of all the available attributes. diff --git a/mmv1/third_party/terraform/website/docs/d/secret_manager_secret_version.html.markdown b/mmv1/third_party/terraform/website/docs/d/secret_manager_secret_version.html.markdown index 4a509c3b1996..a12499283f5d 100644 --- a/mmv1/third_party/terraform/website/docs/d/secret_manager_secret_version.html.markdown +++ b/mmv1/third_party/terraform/website/docs/d/secret_manager_secret_version.html.markdown @@ -28,12 +28,9 @@ The following arguments are supported: * `version` - (Optional) The version of the secret to get. If it is not provided, the latest version is retrieved. -* `is_secret_data_base64` - (Optional) If set to `true`, the secret data is +* `is_secret_data_base64` - (Optional) If set to 'true', the secret data is expected to be base64-encoded string. -* `fetch_secret_data` - (Optional) If set to `false`, the `secret_data` - will not be fetched. Default is `true`. - ## Attributes Reference The following attributes are exported: diff --git a/mmv1/third_party/terraform/website/docs/d/storage_bucket_object_content.html.markdown b/mmv1/third_party/terraform/website/docs/d/storage_bucket_object_content.html.markdown index ce97d70e1221..a45cdadd011e 100644 --- a/mmv1/third_party/terraform/website/docs/d/storage_bucket_object_content.html.markdown +++ b/mmv1/third_party/terraform/website/docs/d/storage_bucket_object_content.html.markdown @@ -45,8 +45,3 @@ The following attributes are exported: * `content_base64` - (Computed) Base64 encoded version of the object content. Use this when dealing with binary data. - -* `content_hexsha512` - (Computed) Hex encoded SHA512 checksum of file content. - -* `content_base64sha512` - (Computed) Base64 encoded SHA512 checksum of file content. - diff --git a/mmv1/third_party/terraform/website/docs/d/storage_insights_dataset_config.html.markdown b/mmv1/third_party/terraform/website/docs/d/storage_insights_dataset_config.html.markdown deleted file mode 100644 index e22e8716ab69..000000000000 --- a/mmv1/third_party/terraform/website/docs/d/storage_insights_dataset_config.html.markdown +++ /dev/null @@ -1,36 +0,0 @@ ---- -subcategory: "Cloud Storage Insights" -description: |- - Represents a Storage Insights DatasetConfig. ---- - -# google_storage_insights_dataset_config - -Use this data source to get information about a Storage Insights Dataset Config resource. -See [the official documentation](https://cloud.google.com/storage/docs/insights/datasets) -and -[API](https://cloud.google.com/storage/docs/insights/reference/rest/v1/projects.locations.datasetConfigs). - - -## Example Usage - -```hcl -data "google_storage_insights_dataset_config" "sample-config" { - project = "sample_project" - location = "sample_location" - dataset_config_id = "sample_dataset_config_id" -} -``` - -## Argument Reference - -The following arguments are supported: - -* `project` - (Optional) The name of the GCP project in which dataset config exists. Can be configured through config as well. -* `location` - (Required) The location of the Dataset Config. -* `dataset_config_id` - (Required) The user-defined ID of the DatasetConfig - - -## Attributes Reference - -See [google_storage_insights_dataset_config](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/storage_insights_dataset_config#argument-reference) resource for details of the available attributes. diff --git a/mmv1/third_party/terraform/website/docs/d/tpu_tensorflow_versions.html.markdown b/mmv1/third_party/terraform/website/docs/d/tpu_tensorflow_versions.html.markdown index b4e66c8ab27b..4978589a5d7b 100644 --- a/mmv1/third_party/terraform/website/docs/d/tpu_tensorflow_versions.html.markdown +++ b/mmv1/third_party/terraform/website/docs/d/tpu_tensorflow_versions.html.markdown @@ -3,10 +3,6 @@ subcategory: "Cloud TPU" description: |- Get available TensorFlow versions. --- -~> **Warning:** -`google_tpu_tensorflow_versions` is deprecated and will be removed in a future major release. - Use `google_tpu_v2_runtime_versions` instead. For moving from TPU Node to TPU VM architecture, see - https://cloud.google.com/tpu/docs/system-architecture-tpu-vm#from-tpu-node-to-tpu-vm. # google_tpu_tensorflow_versions diff --git a/mmv1/third_party/terraform/website/docs/guides/external_credentials_stacks.html.markdown b/mmv1/third_party/terraform/website/docs/guides/external_credentials_stacks.html.markdown index 4bd5c4c8a879..f214d75715de 100644 --- a/mmv1/third_party/terraform/website/docs/guides/external_credentials_stacks.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/external_credentials_stacks.html.markdown @@ -69,6 +69,7 @@ resource "google_project_service" "services" { project = var.project_id service = each.key disable_dependent_services = false + disable_on_destroy = false } # Create Workload Identity Pool (reference google_project_service to ensure APIs are enabled) diff --git a/mmv1/third_party/terraform/website/docs/guides/sql_instance_switchover.html.markdown b/mmv1/third_party/terraform/website/docs/guides/sql_instance_switchover.html.markdown index af3b45ca3c77..eaa817f0de0c 100644 --- a/mmv1/third_party/terraform/website/docs/guides/sql_instance_switchover.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/sql_instance_switchover.html.markdown @@ -32,11 +32,11 @@ replica_configuration { } ``` -2. Invoke switchover on the replica - * Change `instance_type` from `READ_REPLICA_INSTANCE` to `CLOUD_SQL_INSTANCE` - * Remove `master_instance_name` - * Remove `replica_configuration` - * Add current primary's name to the replica's `replica_names` list +2. Invoke switchover on the replica \ +a. Change `instance_type` from `READ_REPLICA_INSTANCE` to `CLOUD_SQL_INSTANCE` \ +b. Remove `master_instance_name` \ +c. Remove `replica_configuration` \ +d. Add current primary's name to the replica's `replica_names` list ```diff resource "google_sql_database_instance" "original-replica" { @@ -54,13 +54,13 @@ resource "google_sql_database_instance" "original-replica" { } ``` -3. Update the old primary and run `terraform plan` - * Change `instance_type` from `CLOUD_SQL_INSTANCE` to `READ_REPLICA_INSTANCE` - * Set `master_instance_name` to the new primary (original replica) - * Set `replica_configuration` and indicate this is a `cascadable-replica` - * Remove old replica from `replica_names` - ~> **NOTE**: Do **not** delete the replica_names field, even if it has no replicas remaining. Set replica_names = [ ] to indicate it having no replicas. - * Run `terraform plan` and verify that everything is done in-place (or data will be lost) +3. Update the old primary and run `terraform plan` \ +a. Change `instance_type` from `CLOUD_SQL_INSTANCE` to `READ_REPLICA_INSTANCE` \ +b. Set `master_instance_name` to the new primary (original replica) \ +c. Set `replica_configuration` and indicate this is a `cascadable-replica` \ +d. Remove old replica from `replica_names` \ + ~> **NOTE**: Do **not** delete the replica_names field, even if it has no replicas remaining. Set replica_names = [ ] to indicate it having no replicas. \ +e. Run `terraform plan` and verify that everything is done in-place (or data will be lost) ```diff resource "google_sql_database_instance" "original-primary" { diff --git a/mmv1/third_party/terraform/website/docs/guides/using_gke_with_terraform.html.markdown b/mmv1/third_party/terraform/website/docs/guides/using_gke_with_terraform.html.markdown index c9736bd43d90..da5c7b3cd87a 100644 --- a/mmv1/third_party/terraform/website/docs/guides/using_gke_with_terraform.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/using_gke_with_terraform.html.markdown @@ -59,7 +59,7 @@ provider "kubernetes" { ) } ``` -Although the above can result in authentication errors, over time, as the token recorded in the google_client_config data resource is short lived (thus it expires) and it's stored in state. Fortunately, the [kubernetes provider can accept valid credentials from an exec-based plugin](https://registry.terraform.io/providers/hashicorp/kubernetes/latest/docs#exec-plugins) to fetch a new token before each Terraform operation (so long as you have the [gke-cloud-auth-plugin for kubectl installed](https://cloud.google.com/blog/products/containers-kubernetes/kubectl-auth-changes-in-gke)), like so: +Although the above can result in authentication errors, over time, as the token recorded in the google_client_cofig data resource is short lived (thus it expires) and it's stored in state. Fortunately, the [kubernetes provider can accept valid credentials from an exec-based plugin](https://registry.terraform.io/providers/hashicorp/kubernetes/latest/docs#exec-plugins) to fetch a new token before each Terraform operation (so long as you have the [gke-cloud-auth-plugin for kubectl installed](https://cloud.google.com/blog/products/containers-kubernetes/kubectl-auth-changes-in-gke)), like so: ```hcl # Retrieve an access token as the Terraform runner diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown deleted file mode 100644 index 3ee8d0e7bf63..000000000000 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ /dev/null @@ -1,315 +0,0 @@ ---- -page_title: "Terraform provider for Google Cloud 7.0.0 Upgrade Guide" -description: |- - Terraform provider for Google Cloud 7.0.0 Upgrade Guide ---- - -# Terraform Google Provider 7.0.0 Upgrade Guide - -The `7.0.0` release of the Google provider for Terraform is a major version and -includes some changes that you will need to consider when upgrading. This guide -is intended to help with that process and focuses only on the changes necessary -to upgrade from the final `6.X` series release to `7.0.0`. - -Most of the changes outlined in this guide have been previously marked as -deprecated in the Terraform `plan`/`apply` output throughout previous provider -releases, up to and including the final `6.X` series release. These changes, -such as deprecation notices, can always be found in the CHANGELOG of the -affected providers. [google](https://github.com/hashicorp/terraform-provider-google/blob/main/CHANGELOG.md) -[google-beta](https://github.com/hashicorp/terraform-provider-google-beta/blob/main/CHANGELOG.md) - -## I accidentally upgraded to 7.0.0, how do I downgrade to `6.X`? - -If you've inadvertently upgraded to `7.0.0`, first see the -[Provider Version Configuration Guide](#provider-version-configuration) to lock -your provider version; if you've constrained the provider to a lower version -such as shown in the previous version example in that guide, Terraform will pull -in a `6.X` series release on `terraform init`. - -If you've only ran `terraform init` or `terraform plan`, your state will not -have been modified and downgrading your provider is sufficient. - -If you've ran `terraform refresh` or `terraform apply`, Terraform may have made -state changes in the meantime. - -* If you're using a local state, or a remote state backend that does not support -versioning, `terraform refresh` with a downgraded provider is likely sufficient -to revert your state. The Google provider generally refreshes most state -information from the API, and the properties necessary to do so have been left -unchanged. - -* If you're using a remote state backend that supports versioning such as -[Google Cloud Storage](https://developer.hashicorp.com/terraform/language/settings/backends/gcs), -you can revert the Terraform state file to a previous version. If you do -so and Terraform had created resources as part of a `terraform apply` in the -meantime, you'll need to either delete them by hand or `terraform import` them -so Terraform knows to manage them. - -## Provider Version Configuration - --> Before upgrading to version 7.0.0, it is recommended to upgrade to the most -recent `6.X` series release of the provider, make the changes noted in this guide, -and ensure that your environment successfully runs -[`terraform plan`](https://developer.hashicorp.com/terraform/cli/commands/plan) -without unexpected changes or deprecation notices. - -It is recommended to use [version constraints](https://developer.hashicorp.com/terraform/language/providers/requirements#requiring-providers) -when configuring Terraform providers. If you are following that recommendation, -update the version constraints in your Terraform configuration and run -[`terraform init`](https://developer.hashicorp.com/terraform/cli/commands/init) to download -the new version. - -If you aren't using version constraints, you can use `terraform init -upgrade` -in order to upgrade your provider to the latest released version. - -For example, given this previous configuration: - -```hcl -terraform { - required_providers { - google = { -<<<<<<< HEAD - version = "~> 5.30.0" -======= - version = "~> 6.48.0" ->>>>>>> c96e1c59a (add 7.0.0 guide to main (#14861)) - } - } -} -``` - -An updated configuration: - -```hcl -terraform { - required_providers { - google = { - version = "~> 7.0.0" - } - } -} -``` - -<<<<<<< HEAD -## Provider - -### Resource import formats have improved validation - -Throughout the provider there were many resources which erroneously gave false positives to poorly formatted import input if a subset of the provided input was valid to their configured import formats. All GCP resource IDs supplied to "terraform import" must match the documentation specified import formats exactly. - -## Datasources - -## Datasource: `google_service_account_key` - -### `project` is now removed - -`project` has been removed. It can be safely removed from your configuration. - -## Resources - -## Resource: `google_alloydb_cluster` - -### Cluster deletion now prevented by default with `deletion_protection` - -The field `deletion_protection` has been added with a default value of `true`. This field prevents -Terraform from destroying or recreating the cluster during `terraform apply`. In 7.0.0, existing clusters will have -`deletion_protection` set to `true` during the next refresh unless otherwise set in configuration. - -## Resource: `google_beyondcorp_application` is now removed - -`google_beyondcorp_application`, the associated IAM resources `google_beyondcorp_application_iam_binding`, `google_beyondcorp_application_iam_member`, and `google_beyondcorp_application_iam_policy`, and the `google_beyondcorp_application_iam_policy` datasource have been removed. -Use `google_beyondcorp_security_gateway_application` instead. - -======= -## Resources - ->>>>>>> c96e1c59a (add 7.0.0 guide to main (#14861)) -## Resource: `google_artifact_registry_repository` - -### `public_repository` fields have had their default values removed. - -`public_repository` fields have had their default values removed. If your state has been reliant on them, they will need to be manually included into your configuration now. - -## Resource: `google_beyondcorp_application` is now removed - -`google_beyondcorp_application`, the associated IAM resources `google_beyondcorp_application_iam_binding`, `google_beyondcorp_application_iam_member`, and `google_beyondcorp_application_iam_policy`, and the `google_beyondcorp_application_iam_policy` datasource have been removed. -Use `google_beyondcorp_security_gateway_application` instead. - -## Resource: `google_bigquery_table` - -### `view.use_legacy_sql` no longer has a default value of `True` - -The `view.use_legacy_sql` field no longer has a default value. Configurations that relied on the old default will show no diff in the plan, and there will be no change to existing views. For newly created views, leaving this field unspecified in the configuration will result in the view being created with no `use_legacy_sql` value, which the API interprets as a `true` and assumes the legacy SQL dialect for its query. See the [API documentation](https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#ViewDefinition) for more details. - -## Resource: `google_bigtable_table_iam_binding` - -### `instance` is now removed - -`instance` has been removed in favor of `instance_name`. - -## Resource: `google_bigtable_table_iam_member` - -### `instance` is now removed - -`instance` has been removed in favor of `instance_name`. - -## Resource: `google_bigtable_table_iam_policy` - -### `instance` is now removed - -`instance` has been removed in favor of `instance_name`. - -<<<<<<< HEAD -<<<<<<< HEAD -======= -## Resource: `google_billing_budget` - -### `budget_filter.credit types` and `budget_filter.subaccounts` are no longer optional+computed, only optional - -`budget_filter.credit types` and `budget_filter.subaccounts` are no longer O+C. These fields already did not export any API-default values, so no change to your configuration should be necessary. - ->>>>>>> e2e1d5150 (remove default_from_api from credit_types and subaccounts in google_billing_budget (#14938)) -## Resource: `google_compute_packet_mirroring` - -### `subnetworks` and `instances` fields have been converted to sets - -`subnetworks` and `instances` fields have been converted to sets. If you need to access values in their nested objects, it will need to be accessed via `for_each` or locally converting the field to a list/array in your configuration. - -======= ->>>>>>> c96e1c59a (add 7.0.0 guide to main (#14861)) -## Resource: `google_compute_subnetwork` - -### `enable_flow_logs`is now removed - -`enable_flow_logs` has been removed in favor of `log_config`. - -## Resource: `google_gke_hub_feature_membership` - -### `configmanagement.binauthz` is now removed - -Remove `configmanagement.binauthz` from your configuration after upgrade. - -## Resource: `google_gke_hub_membership` - -### `description` is now removed - -Remove `description` from your configuration after upgrade. - -<<<<<<< HEAD -## Resource: `google_colab_runtime_template` - -### `post_startup_script_config` is now removed. - -Remove `post_startup_script_config` from your configuration after upgrade. - -<<<<<<< HEAD -======= ->>>>>>> c96e1c59a (add 7.0.0 guide to main (#14861)) -======= -## Resource: `google_monitoring_uptime_check_config` - -### Exactly one of `http_check.auth_info.password` and `http_check.auth_info.password_wo` must be set - -Setting exactly one of `http_check.auth_info.password` and `http_check.auth_info.password_wo` is now enforced in order to avoid situations where it is unclear which was being used. - ->>>>>>> 7b15bdcb5 (Standardized required_with behavior for write-only fields (#14941)) -## Resource: `google_network_services_lb_traffic_extension` - -### `load_balancing_scheme` is now required - -`load_balancing_scheme` is now a required field. This field was already required for resource functionality so no change to your configuration should be necessary. - -## Resource: `google_notebooks_location` is now removed - -This resource is not functional and can safely be removed from your configuration. - -## Resource: `google_project_service` - -### `disable_on_destroy` now defaults to `false` - -The default value for `disable_on_destroy` has been changed to `false`. The previous default (`true`) created a risk of unintended service disruptions, as destroying a single `google_project_service` resource would disable the API for the entire project. - -Now, destroying the resource will only remove it from Terraform's state and leave the service enabled. To disable a service when the resource is destroyed, you must now make an explicit decision by setting `disable_on_destroy = true`. - -## Resource: `google_redis_cluster` - - `allow_fewer_zones_deployment` has been removed because it isn't user-configurable. - -## Resource: `google_sql_user` - -### `password_wo` and `password_wo_version` must be set together - -This standardizes the behavior of write-only fields across the provider and makes it easier to remember to update the fields together. - -## Resource: `google_secure_source_manager_instance` - -### `deletion_policy` has had its default value changed to `PREVENT` - -`deletion_policy` has had its default value changed to `PREVENT`. This field prevents -Terraform from destroying or recreating the cluster during `terraform apply`. In 7.0.0, existing resources will have -`deletion_policy` set to `true` during the next refresh unless otherwise set in configuration. - -## Resource: `google_secure_source_manager_repository` - -### `deletion_policy` has had its default value changed to `PREVENT` - -`deletion_policy` has had its default value changed to `PREVENT`. This field prevents -Terraform from destroying or recreating the cluster during `terraform apply`. In 7.0.0, existing resources will have -`deletion_policy` set to `true` during the next refresh unless otherwise set in configuration. - -## Resource: `google_storage_transfer_job` - -### Several `path` fields have improved validation - -`transfer_spec.gcs_data_sink.path`, `transfer_spec.gcs_data_source.path`, `replication_spec.gcs_data_source.path`, and `replication_spec.gcs_data_sink.path` are now required to not start with a '/' character. - -## Resource: `google_storage_bucket` - -### `retention_period` changed to `string` data type - -`retention_period` was changed to the [`string` data type](https://developer.hashicorp.com/terraform/language/expressions/types#string) to handle higher values for the bucket's retention period. - -Terraform [Type Conversion](https://developer.hashicorp.com/terraform/language/expressions/types#type-conversion) will handle the change automatically for most configurations, and they will not need to be modified. - -To reflect the new type explicitly, surround the current integer value in quotes, i.e. `retention_period = 10` -> `retention_period = "10"`. - -## Resource: `google_storage_notification` - -### `google_storage_notification` Migrated to the Plugin Framework - -This resource has been migrated from SDKv2 to the more modern [plugin framework resource implementation](https://developer.hashicorp.com/terraform/plugin/framework). One associated breaking change is expected with this migration; please review the details below. - -### `topic` Field Format Change - -The `topic` field for `google_storage_notification` must now be provided in the format `projects/{{project}}/topics/{{topic}}`. - -The previous SDKv2 implementation accepted both `projects/{{project}}/topics/{{topic}}` and the fully qualified Google API format `//pubsub.googleapis.com/projects/{{project}}/topics/{{topic}}` in configuration. However, it consistently stored the latter (fully qualified) format in the Terraform state. - -With this migration, only the `projects/{{project}}/topics/{{topic}}` format is allowed in configuration, aligning with the `id` format of the `google_pubsub_topic` resource. - -A state upgrader will automatically migrate the `topic` field's format in your Terraform state when you upgrade to this provider version. However, you **must ensure your Terraform configuration files are updated** to use the `projects/{{project}}/topics/{{topic}}` format to avoid validation errors. - -## Resource: `google_tpu_node` is now removed - -`google_tpu_node` is removed in favor of `google_tpu_v2_vm`. For moving from TPU Node to TPU VM architecture, see https://cloud.google.com/tpu/docs/system-architecture-tpu-vm#from-tpu-node-to-tpu-vm. - -## Resource: `google_vertex_ai_endpoint` - -### `enable_secure_private_service_connect` is now removed from the GA provider - -`enable_secure_private_service_connect` has been removed from the GA provider it is not available in the GA version of the API. The field is still available when using the beta provider. - -<<<<<<< HEAD -Now, destroying the resource will only remove it from Terraform's state and leave the service enabled. To disable a service when the resource is destroyed, you must now make an explicit decision by setting `disable_on_destroy = true`. -<<<<<<< HEAD -======= -Now, destroying the resource will only remove it from Terraform's state and leave the service enabled. To disable a service when the resource is destroyed, you must now make an explicit decision by setting `disable_on_destroy = true`. ->>>>>>> c96e1c59a (add 7.0.0 guide to main (#14861)) -======= - -### `metadata`, and `metadata.config` are now required. - -`metadata`, and `metadata.config` are now required. These fields were already required for resource functionality, so no change is necessary to existing configurations. - - `allow_fewer_zones_deployment` has been removed because it isn't user-configurable. ->>>>>>> eed48c10c (Breaking Change: Remove allow_fewer_zones_deployment from Memorystore and Redis Cluster (#14889)) diff --git a/mmv1/third_party/terraform/website/docs/r/apigee_keystores_aliases_key_cert_file.html.markdown b/mmv1/third_party/terraform/website/docs/r/apigee_keystores_aliases_key_cert_file.html.markdown index 1424e2a1b351..2660aaf170d6 100644 --- a/mmv1/third_party/terraform/website/docs/r/apigee_keystores_aliases_key_cert_file.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/apigee_keystores_aliases_key_cert_file.html.markdown @@ -66,7 +66,7 @@ In addition to the arguments listed above, the following computed attributes are Optional.Type of Alias -The `certs_info` list contains: +The `certs_info` block contains: * `cert_info` - (Output) diff --git a/mmv1/third_party/terraform/website/docs/r/bigquery_table.html.markdown b/mmv1/third_party/terraform/website/docs/r/bigquery_table.html.markdown index 56d2430cc7d0..859215167d13 100644 --- a/mmv1/third_party/terraform/website/docs/r/bigquery_table.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/bigquery_table.html.markdown @@ -133,10 +133,9 @@ The following arguments are supported: ~>**NOTE:** Because this field expects a JSON string, any changes to the string will create a diff, even if the JSON itself hasn't changed. If the API returns a different value for the same schema, e.g. it - switched the order of values or replaced a field data type (`STRUCT` with - `RECORD`, `DECIMAL` with `NUMERIC`, etc.), we currently cannot suppress - the recurring diff this causes. As a workaround, we recommend using the - schema as returned by the API. + switched the order of values or replaced `STRUCT` field type with `RECORD` + field type, we currently cannot suppress the recurring diff this causes. + As a workaround, we recommend using the schema as returned by the API. ~>**NOTE:** If you use `external_data_configuration` [documented below](#nested_external_data_configuration) and do **not** set @@ -144,11 +143,6 @@ The following arguments are supported: with `external_data_configuration.schema`. Otherwise, schemas must be specified with this top-level field. -* `ignore_schema_changes` - (Optional) A list of fields which should be ignored for each column in schema. - **NOTE:** Right now only `dataPolicies` field is supported. We might support others in the future. - -* `ignore_auto_generated_schema` - (Optional) If true, Terraform will prevent columns added by the server(e.g. hive partitioned columns) in schema from showing diff. - * `schema_foreign_type_info` - (Optional) Specifies metadata of the foreign data type definition in field schema. Structure is [documented below](#nested_schema_foreign_type_info). @@ -425,12 +419,7 @@ The following arguments are supported: * `query` - (Required) A query that BigQuery executes when the view is referenced. * `use_legacy_sql` - (Optional) Specifies whether to use BigQuery's legacy SQL for this view. - If set to `false`, the view will use BigQuery's standard SQL. If set to - `true`, the view will use BigQuery's legacy SQL. If unset, the API will - interpret it as a `true` and assumes the legacy SQL dialect for its query - according to the [API documentation](https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#ViewDefinition). - -> **Note**: Starting in provider version `7.0.0`, no default value is - provided for this field unless explicitly set in the configuration. + The default value is true. If set to false, the view will use BigQuery's standard SQL. The `materialized_view` block supports: diff --git a/mmv1/third_party/terraform/website/docs/r/bigtable_instance.html.markdown b/mmv1/third_party/terraform/website/docs/r/bigtable_instance.html.markdown index fbe537ba6579..c9b952fdefb7 100644 --- a/mmv1/third_party/terraform/website/docs/r/bigtable_instance.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/bigtable_instance.html.markdown @@ -141,8 +141,6 @@ If no value is set, Cloud Bigtable automatically allocates nodes based on your d * `kms_key_name` - (Optional) Describes the Cloud KMS encryption key that will be used to protect the destination Bigtable cluster. The requirements for this key are: 1) The Cloud Bigtable service account associated with the project that contains this cluster must be granted the `cloudkms.cryptoKeyEncrypterDecrypter` role on the CMEK key. 2) Only regional keys can be used and the region of the CMEK key must match the region of the cluster. -* `node_scaling_factor` - (Optional) The node scaling factor for this cluster. One of `"NodeScalingFactor1X"` or `"NodeScalingFactor2X"`. Defaults to `"NodeScalingFactor1X"`. If `"NodeScalingFactor2X"` is specified, then `num_nodes`, `min_nodes`, and `max_nodes` would need to be specified in increments of 2. This value cannot be updated after the cluster is created. - -> **Note**: Removing the field entirely from the config will cause the provider to default to the backend value. !> **Warning:** Modifying the `storage_type`, `zone` or `kms_key_name` of an existing cluster (by diff --git a/mmv1/third_party/terraform/website/docs/r/bigtable_table.html.markdown b/mmv1/third_party/terraform/website/docs/r/bigtable_table.html.markdown index ef0f71085b91..ccd76783e0d1 100644 --- a/mmv1/third_party/terraform/website/docs/r/bigtable_table.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/bigtable_table.html.markdown @@ -100,7 +100,7 @@ to delete/recreate the entire `google_bigtable_table` resource. * `change_stream_retention` - (Optional) Duration to retain change stream data for the table. Set to 0 to disable. Must be between 1 and 7 days. -* `automated_backup_policy` - (Optional) Defines an automated backup policy for a table, specified by Retention Period and Frequency. To _create_ a table with automated backup disabled, either omit the automated_backup_policy argument, or set both Retention Period and Frequency properties to "0". To disable automated backup on an _existing_ table that has automated backup enabled, set _both_ Retention Period and Frequency properties to "0". When updating an existing table, to modify the Retention Period or Frequency properties of the resource's automated backup policy, set the respective property to a non-zero value. If the automated_backup_policy argument is not provided in the configuration on update, the resource's automated backup policy will _not_ be modified. +* `automated_backup_policy` - (Optional) Defines an automated backup policy for a table, specified by Retention Period and Frequency. To _create_ a table with automated backup disabled, omit this argument. To disable automated backup on an _existing_ table that has automated backup enabled, set both Retention Period and Frequency to "0". If this argument is not provided in the configuration on update, the resource's automated backup policy will _not_ be modified. ----- diff --git a/mmv1/third_party/terraform/website/docs/r/bigtable_table_iam.html.markdown b/mmv1/third_party/terraform/website/docs/r/bigtable_table_iam.html.markdown index b8dd500407fb..7007e82bb986 100644 --- a/mmv1/third_party/terraform/website/docs/r/bigtable_table_iam.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/bigtable_table_iam.html.markdown @@ -29,10 +29,10 @@ data "google_iam_policy" "admin" { } resource "google_bigtable_table_iam_policy" "editor" { - project = "your-project" - instance_name = "your-bigtable-instance" - table = "your-bigtable-table" - policy_data = data.google_iam_policy.admin.policy_data + project = "your-project" + instance = "your-bigtable-instance" + table = "your-bigtable-table" + policy_data = data.google_iam_policy.admin.policy_data } ``` @@ -40,10 +40,10 @@ resource "google_bigtable_table_iam_policy" "editor" { ```hcl resource "google_bigtable_table_iam_binding" "editor" { - table = "your-bigtable-table" - instance_name = "your-bigtable-instance" - role = "roles/bigtable.user" - members = [ + table = "your-bigtable-table" + instance = "your-bigtable-instance" + role = "roles/bigtable.user" + members = [ "user:jane@example.com", ] } @@ -53,10 +53,10 @@ resource "google_bigtable_table_iam_binding" "editor" { ```hcl resource "google_bigtable_table_iam_member" "editor" { - table = "your-bigtable-table" - instance_name = "your-bigtable-instance" - role = "roles/bigtable.user" - member = "user:jane@example.com" + table = "your-bigtable-table" + instance = "your-bigtable-instance" + role = "roles/bigtable.user" + member = "user:jane@example.com" } ``` @@ -64,7 +64,7 @@ resource "google_bigtable_table_iam_member" "editor" { The following arguments are supported: -* `instance_name` - (Required) The name or relative resource id of the instance that owns the table. +* `instance` - (Required) The name or relative resource id of the instance that owns the table. * `table` - (Required) The name or relative resource id of the table to manage IAM policies for. diff --git a/mmv1/third_party/terraform/website/docs/r/cloudbuild_worker_pool.html.markdown b/mmv1/third_party/terraform/website/docs/r/cloudbuild_worker_pool.html.markdown index 50005f52291f..3cbea365b775 100644 --- a/mmv1/third_party/terraform/website/docs/r/cloudbuild_worker_pool.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/cloudbuild_worker_pool.html.markdown @@ -27,6 +27,7 @@ resource "google_cloudbuild_worker_pool" "pool" { ```hcl resource "google_project_service" "servicenetworking" { service = "servicenetworking.googleapis.com" + disable_on_destroy = false } resource "google_compute_network" "network" { @@ -107,10 +108,6 @@ The following arguments are supported: The `worker_config` block supports: -* `enable_nested_virtualization` - - (Optional) - Enable nested virtualization on the worker, if supported by the machine type. See [Worker pool config file](https://cloud.google.com/build/docs/private-pools/worker-pool-config-file-schema). If left blank, Cloud Build will set this to false. - * `disk_size_gb` - (Optional) Size of the disk attached to the worker, in GB. See [diskSizeGb](https://cloud.google.com/build/docs/private-pools/private-pool-config-file-schema#disksizegb). Specify a value of up to 1000. If `0` is specified, Cloud Build will use a standard disk size. diff --git a/mmv1/third_party/terraform/website/docs/r/cloudfunctions_function.html.markdown b/mmv1/third_party/terraform/website/docs/r/cloudfunctions_function.html.markdown index a42abdbe11da..d6afdf250da0 100644 --- a/mmv1/third_party/terraform/website/docs/r/cloudfunctions_function.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/cloudfunctions_function.html.markdown @@ -179,10 +179,6 @@ Please refer to the field 'effective_labels' for all of the labels present on th * `secret_volumes` - (Optional) Secret volumes configuration. Structure is [documented below](#nested_secret_volumes). -* `automatic_update_policy` - (Optional) Security patches are applied automatically to the runtime without requiring the function to be redeployed. This should be specified as an empty block and cannot be set alongside `on_deploy_update_policy`. - -* `on_deploy_update_policy` - (Optional) Security patches are only applied when a function is redeployed. This should be specified as an empty block and cannot be set alongside `automatic_update_policy`. Structure is [documented below](#nested_on_deploy_update_policy). - The `event_trigger` block supports: * `event_type` - (Required) The type of event to observe. For example: `"google.storage.object.finalize"`. @@ -216,10 +212,6 @@ which to observe events. For example, `"myBucket"` or `"projects/my-project/topi * `version` - (Required) Version of the secret (version number or the string "latest"). It is recommended to use a numeric version for secret environment variables as any updates to the secret value is not reflected until new clones start. -The `on_deploy_update_policy` block supports: - -* `runtime_version` - (Output) The runtime version which was used during latest function deployment. - The `secret_volumes` block supports: * `mount_path` - (Required) The path within the container to mount the secret volume. For example, setting the mount_path as "/etc/secrets" would mount the secret value files under the "/etc/secrets" directory. This directory will also be completely shadowed and unavailable to mount any other secrets. Recommended mount paths: "/etc/secrets" Restricted mount paths: "/cloudsql", "/dev/log", "/pod", "/proc", "/var/log". diff --git a/mmv1/third_party/terraform/website/docs/r/composer_environment.html.markdown b/mmv1/third_party/terraform/website/docs/r/composer_environment.html.markdown index 62fd059fc110..53fcc32c059a 100644 --- a/mmv1/third_party/terraform/website/docs/r/composer_environment.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/composer_environment.html.markdown @@ -1422,8 +1422,6 @@ The following arguments are supported: note that the service account must have `roles/composer.worker` for any GCP resources created under the Cloud Composer Environment. - This field is required for newly created environments. - * `tags` - (Optional) The list of instance tags applied to all node VMs. Tags are diff --git a/mmv1/third_party/terraform/website/docs/r/compute_instance.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_instance.html.markdown index dca0f3569ded..da78b9884698 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_instance.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_instance.html.markdown @@ -85,7 +85,7 @@ resource "google_compute_instance" "confidential_instance" { boot_disk { initialize_params { - image = "ubuntu-os-cloud/ubuntu-2204-lts" + image = "ubuntu-os-cloud/ubuntu-2004-lts" labels = { my_label = "value" } @@ -293,10 +293,6 @@ is desired, you will need to modify your state file manually using `google_compute_disk`) or disk image. To create an instance from a snapshot, first create a `google_compute_disk` from a snapshot and reference it here. -* `force_attach` - (Optional) boolean field that determines whether to force attach the regional - disk even if it's currently attached to another instance. If you try to force attach a zonal - disk to an instance, you will receive an error. Setting this parameter cause VM recreation. - The `initialize_params` block supports: * `size` - (Optional) The size of the image in gigabytes. If not specified, it @@ -423,10 +419,6 @@ is desired, you will need to modify your state file manually using * `kms_key_service_account` - (Optional) The service account being used for the encryption request for the given KMS key. If absent, the Compute Engine default service account is used. -* `force_attach` - (Optional) boolean field that determines whether to force attach the regional - disk even if it's currently attached to another instance. If you try to force attach a zonal - disk to an instance, you will receive an error. Setting this parameter cause VM recreation. - The `network_performance_config` block supports: * `total_egress_bandwidth_tier` - (Optional) The egress bandwidth tier to enable. diff --git a/mmv1/third_party/terraform/website/docs/r/compute_instance_group_manager.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_instance_group_manager.html.markdown index 46577fa5b71a..004d9b05bf45 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_instance_group_manager.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_instance_group_manager.html.markdown @@ -113,62 +113,6 @@ resource "google_compute_instance_group_manager" "igm-sr" { } ``` -## Example Usage with resource policies (`google` provider) -```hcl -data "google_compute_image" "my_image" { - family = "debian-11" - project = "debian-cloud" -} - -resource "google_compute_resource_policy" "workload_policy" { - name = "tf-test-gce-policy" - region = "us-central1" - workload_policy { - type = "HIGH_THROUGHPUT" - } -} - -resource "google_compute_instance_template" "igm-basic" { - name = "igm-instance-template" - machine_type = "a4-highgpu-8g" - can_ip_forward = false - tags = ["foo", "bar"] - - disk { - source_image = data.google_compute_image.my_image.self_link - auto_delete = true - boot = true - disk_type = "hyperdisk-balanced" - } - - network_interface { - network = "default" - } - - service_account { - scopes = ["userinfo-email", "compute-ro", "storage-ro"] - } -} - -resource "google_compute_instance_group_manager" "igm-workload-policy" { - description = "Terraform test instance group manager" - name = "igm-basic-workload-policy" - - version { - name = "prod" - instance_template = google_compute_instance_template.igm-basic.self_link - } - - base_instance_name = "tf-test-igm-no-tp" - zone = "us-central1-b" - target_size = 0 - - resource_policies { - workload_policy = google_compute_resource_policy.workload_policy.self_link - } -} -``` - ## Argument Reference The following arguments are supported: @@ -254,8 +198,6 @@ group. You can specify only one value. Structure is [documented below](#nested_a * `params` - (Optional [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html)) Input only additional params for instance group manager creation. Structure is [documented below](#nested_params). For more information, see [API](https://cloud.google.com/compute/docs/reference/rest/beta/instanceGroupManagers/insert). -* `resource_policies` - (Optional) Resource policies for this managed instance group. Structure is [documented below](#nested_resource_policies). - - - - The `standby_policy` block supports: @@ -417,12 +359,6 @@ params{ * `resource_manager_tags` - (Optional) Resource manager tags to bind to the managed instance group. The tags are key-value pairs. Keys must be in the format tagKeys/123 and values in the format tagValues/456. For more information, see [Manage tags for resources](https://cloud.google.com/compute/docs/tag-resources) -- - - - -The `resource_policies` block supports: - -* `workload_policy` - (Optional) The URL of the workload policy that is specified for this managed instance group. It can be a full or partial URL. - ## Attributes Reference In addition to the arguments listed above, the following computed attributes are diff --git a/mmv1/third_party/terraform/website/docs/r/compute_instance_template.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_instance_template.html.markdown index f72b252840f8..ddb65e42f955 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_instance_template.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_instance_template.html.markdown @@ -246,7 +246,7 @@ resource "google_compute_instance_template" "confidential_instance_template" { } disk { - source_image = "ubuntu-os-cloud/ubuntu-2204-lts" + source_image = "ubuntu-os-cloud/ubuntu-2004-lts" } network_interface { @@ -444,14 +444,13 @@ The following arguments are supported: * `disk_name` - (Optional) Name of the disk. When not provided, this defaults to the name of the instance. -* `provisioned_iops` - (Optional) Indicates how many IOPS to provision for the disk. This sets the number of I/O operations per second that the disk can handle. For more details, see the [Extreme persistent disk documentation](https://cloud.google.com/compute/docs/disks/extreme-persistent-disk) or the [Hyperdisk documentation](https://cloud.google.com/compute/docs/disks/hyperdisks) depending on the selected disk_type. - -* `provisioned_throughput` - (Optional) Indicates how much throughput to provision for the disk, in MB/s. This sets the amount of data that can be read or written from the disk per second. Values must greater than or equal to 1. For more details, see the [Hyperdisk documentation](https://cloud.google.com/compute/docs/disks/hyperdisks). +* `provisioned_iops` - (Optional) Indicates how many IOPS to provision for the disk. This + sets the number of I/O operations per second that the disk can handle. + Values must be between 10,000 and 120,000. For more details, see the + [Extreme persistent disk documentation](https://cloud.google.com/compute/docs/disks/extreme-persistent-disk). * `resource_manager_tags` - (Optional) A set of key/value resource manager tag pairs to bind to this disk. Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/456. -* `guest_os_features` - (optional) A list of features to enable on the guest operating system. Applicable only for bootable images. Read [Enabling guest operating system features](https://cloud.google.com/compute/docs/images/create-delete-deprecate-private-images#guest-os-features) to see a list of available options. - * `source_image` - (Optional) The image from which to initialize this disk. This can be one of: the image's `self_link`, `projects/{project}/global/images/{image}`, @@ -485,8 +484,6 @@ The following arguments are supported: or READ_ONLY. If you are attaching or creating a boot disk, this must read-write mode. -* `architecture` - (Optional) The architecture of the attached disk. Valid values are `ARM64` or `x86_64`. - * `source` - (Optional) The name (**not self_link**) of the disk (such as those managed by `google_compute_disk`) to attach. ~> **Note:** Either `source`, `source_image`, or `source_snapshot` is **required** in a disk block unless the disk type is `local-ssd`. Check the API [docs](https://cloud.google.com/compute/docs/reference/rest/v1/instanceTemplates/insert) for details. @@ -798,8 +795,6 @@ exported: * `creation_timestamp` - Creation timestamp in RFC3339 text format. -* `numeric_id` - numeric identifier of the resource. - * `metadata_fingerprint` - The unique fingerprint of the metadata. * `self_link` - The URI of the created resource. diff --git a/mmv1/third_party/terraform/website/docs/r/compute_network_peering.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_network_peering.html.markdown index 03aabfac59f1..73a3014c7e6f 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_network_peering.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_network_peering.html.markdown @@ -68,9 +68,6 @@ Whether subnet routes with public IP range are imported. The default value is fa * `stack_type` - (Optional) Which IP version(s) of traffic and routes are allowed to be imported or exported between peer networks. The default value is IPV4_ONLY. Possible values: ["IPV4_ONLY", "IPV4_IPV6"]. -* `update_strategy` - (Optional) -The update strategy determines the semantics for updates and deletes to the peering connection configuration. The default value is INDEPENDENT. Possible values: ["INDEPENDENT", "CONSENSUS"] - ## Attributes Reference In addition to the arguments listed above, the following computed attributes are diff --git a/mmv1/third_party/terraform/website/docs/r/compute_region_instance_template.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_region_instance_template.html.markdown index afd836162e1a..f0f1886ed5bd 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_region_instance_template.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_region_instance_template.html.markdown @@ -409,14 +409,13 @@ The following arguments are supported: * `disk_name` - (Optional) Name of the disk. When not provided, this defaults to the name of the instance. -* `provisioned_iops` - (Optional) Indicates how many IOPS to provision for the disk. This sets the number of I/O operations per second that the disk can handle. For more details, see the [Extreme persistent disk documentation](https://cloud.google.com/compute/docs/disks/extreme-persistent-disk) or the [Hyperdisk documentation](https://cloud.google.com/compute/docs/disks/hyperdisks) depending on the selected disk_type. - -* `provisioned_throughput` - (Optional) Indicates how much throughput to provision for the disk, in MB/s. This sets the amount of data that can be read or written from the disk per second. Values must greater than or equal to 1. For more details, see the [Hyperdisk documentation](https://cloud.google.com/compute/docs/disks/hyperdisks). +* `provisioned_iops` - (Optional) Indicates how many IOPS to provision for the disk. This + sets the number of I/O operations per second that the disk can handle. + Values must be between 10,000 and 120,000. For more details, see the + [Extreme persistent disk documentation](https://cloud.google.com/compute/docs/disks/extreme-persistent-disk). * `resource_manager_tags` - (Optional) A set of key/value resource manager tag pairs to bind to this disk. Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/456. -* `guest_os_features` - (optional) A list of features to enable on the guest operating system. Applicable only for bootable images. Read [Enabling guest operating system features](https://cloud.google.com/compute/docs/images/create-delete-deprecate-private-images#guest-os-features) to see a list of available options. - * `source_image` - (Optional) The image from which to initialize this disk. This can be one of: the image's `self_link`, `projects/{project}/global/images/{image}`, @@ -450,8 +449,6 @@ The following arguments are supported: or READ_ONLY. If you are attaching or creating a boot disk, this must read-write mode. -* `architecture` - (Optional) The architecture of the attached disk. Valid values are `ARM64` or `x86_64`. - * `source` - (Optional) The name (**not self_link**) of the disk (such as those managed by `google_compute_disk`) to attach. ~> **Note:** Either `source`, `source_image`, or `source_snapshot` is **required** in a disk block unless the disk type is `local-ssd`. Check the API [docs](https://cloud.google.com/compute/docs/reference/rest/v1/instanceTemplates/insert) for details. @@ -756,8 +753,6 @@ exported: * `metadata_fingerprint` - The unique fingerprint of the metadata. -* `numeric_id` - numeric identifier of the resource. - * `self_link` - The URI of the created resource. * `tags_fingerprint` - The unique fingerprint of the tags. diff --git a/mmv1/third_party/terraform/website/docs/r/compute_security_policy.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_security_policy.html.markdown index ba850c6a7007..9b1aaa175604 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_security_policy.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_security_policy.html.markdown @@ -207,8 +207,6 @@ The following arguments are supported: * `user_ip_request_headers` - (Optional) An optional list of case-insensitive request header names to use for resolving the callers client IP address. -* `request_body_inspection_size` - (Optional, [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html)) The maximum request size chosen by the customer with Waf enabled. Values supported are "8KB", "16KB, "32KB", "48KB" and "64KB". Values are case insensitive. - The `json_custom_config` block supports: * `content_types` - A list of custom Content-Type header values to apply the JSON parsing. The @@ -353,7 +351,6 @@ The following arguments are supported: * `SNI`: Server name indication in the TLS session of the HTTPS request. The key value is truncated to the first 128 bytes. The key type defaults to `ALL` on a HTTP session. * `REGION_CODE`: The country/region from which the request originates. * `TLS_JA3_FINGERPRINT`: JA3 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. - * `TLS_JA4_FINGERPRINT`: JA4 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. * `USER_IP`: The IP address of the originating client, which is resolved based on "user_ip_request_headers" configured with the securitypolicy. If there is no "user_ip_request_headers" configuration or an IP address cannot be resolved from it, the key type defaults to IP. * `enforce_on_key_name` - (Optional) Rate limit key name applicable only for the following key types: @@ -383,7 +380,6 @@ The following arguments are supported: * `SNI`: Server name indication in the TLS session of the HTTPS request. The key value is truncated to the first 128 bytes. The key type defaults to `ALL` on a HTTP session. * `REGION_CODE`: The country/region from which the request originates. * `TLS_JA3_FINGERPRINT`: JA3 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. - * `TLS_JA4_FINGERPRINT`: JA4 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. * `USER_IP`: The IP address of the originating client, which is resolved based on "user_ip_request_headers" configured with the securitypolicy. If there is no "user_ip_request_headers" configuration or an IP address cannot be resolved from it, the key type defaults to IP. * `exceed_redirect_options` - (Optional) Parameters defining the redirect action that is used as the exceed action. Cannot be specified if the exceed action is not redirect. Structure is [documented below](#nested_exceed_redirect_options). @@ -394,7 +390,7 @@ The following arguments are supported: * `interval_sec` - (Required) Interval over which the threshold is computed. -The `exceed_redirect_options` block supports: +* The `exceed_redirect_options` block supports: * `type` - (Required) Type of the redirect action. diff --git a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown index bd0d63961dac..557b8f7897a1 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown @@ -293,7 +293,7 @@ region are guaranteed to support the same version. [PodSecurityPolicy](https://cloud.google.com/kubernetes-engine/docs/how-to/pod-security-policies) feature. Structure is [documented below](#nested_pod_security_policy_config). -* `pod_autoscaling` - (Optional) Configuration for the +* `pod_autoscaling` - (Optional, [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html)) Configuration for the Structure is [documented below](#nested_pod_autoscaling). * `secret_manager_config` - (Optional) Configuration for the @@ -304,8 +304,6 @@ region are guaranteed to support the same version. [Google Groups for GKE](https://cloud.google.com/kubernetes-engine/docs/how-to/role-based-access-control#groups-setup-gsuite) feature. Structure is [documented below](#nested_authenticator_groups_config). -* `user_managed_keys_config` - (Optional) The custom keys configuration of the cluster Structure is [documented below](#nested_control_plane_endpoints_config). - * `control_plane_endpoints_config` - (Optional) Configuration for all of the cluster's control plane endpoints. Structure is [documented below](#nested_control_plane_endpoints_config). @@ -330,10 +328,6 @@ field from your config will cause Terraform to stop managing your cluster's release channel, but will not unenroll it. Instead, use the `"UNSPECIFIED"` channel. Structure is [documented below](#nested_release_channel). -* `gke_auto_upgrade_config` - (Optional) -Configuration options for the auto-upgrade patch type feature, which provide more control over the speed of automatic upgrades of your GKE clusters. -Structure is [documented below](#nested_gke_auto_upgrade_config). - * `remove_default_node_pool` - (Optional) If `true`, deletes the default node pool upon cluster creation. If you're using `google_container_node_pool` resources with no default node pool, this should be set to `true`, alongside @@ -393,9 +387,6 @@ subnetwork in which the cluster's instances are launched. * `datapath_provider` - (Optional) The desired datapath provider for this cluster. This is set to `LEGACY_DATAPATH` by default, which uses the IPTables-based kube-proxy implementation. Set to `ADVANCED_DATAPATH` to enable Dataplane v2. -* `in_transit_encryption_config` - (Optional) - Defines the config of in-transit encryption. Valid values are `IN_TRANSIT_ENCRYPTION_DISABLED` and `IN_TRANSIT_ENCRYPTION_INTER_NODE_TRANSPARENT`. - * `enable_cilium_clusterwide_network_policy` - (Optional) Whether CiliumClusterWideNetworkPolicy is enabled on this cluster. Defaults to false. @@ -421,13 +412,8 @@ Fleet configuration for the cluster. Structure is [documented below](#nested_fle Configuration for [direct-path (via ALTS) with workload identity.](https://cloud.google.com/kubernetes-engine/docs/reference/rest/v1beta1/projects.locations.clusters#workloadaltsconfig). Structure is [documented below](#nested_workload_alts_config). * `enterprise_config` - (Optional) - (DEPRECATED) Configuration for [Enterprise edition].(https://cloud.google.com/kubernetes-engine/enterprise/docs/concepts/gke-editions). Structure is [documented below](#nested_enterprise_config). Deprecated as GKE Enterprise features are now available without an Enterprise tier. See https://cloud.google.com/blog/products/containers-kubernetes/gke-gets-new-pricing-and-capabilities-on-10th-birthday for the announcement of this change. - -* `anonymous_authentication_config` - (Optional) - Configuration for [anonymous authentication restrictions](https://cloud.google.com/kubernetes-engine/docs/how-to/hardening-your-cluster#restrict-anon-access). Structure is [documented below](#anonymous_authentication_config). + Configuration for [Enterprise edition].(https://cloud.google.com/kubernetes-engine/enterprise/docs/concepts/gke-editions). Structure is [documented below](#nested_enterprise_config). -* `rbac_binding_config` - (Optional) - RBACBindingConfig allows user to restrict ClusterRoleBindings an RoleBindings that can be created. Structure is [documented below](#nested_rbac_binding_config). The `default_snat_status` block supports @@ -515,15 +501,6 @@ Fleet configuration for the cluster. Structure is [documented below](#nested_fle It is enabled by default for Autopilot clusters with version 1.29 or later; set `enabled = true` to enable it explicitly. See [Enable the Parallelstore CSI driver](https://cloud.google.com/kubernetes-engine/docs/how-to/persistent-volumes/parallelstore-csi-new-volume#enable) for more information. -* `lustre_csi_driver_config` - (Optional) The status of the Lustre CSI driver addon, - which allows the usage of a Lustre instances as volumes. - It is disabled by default for Standard clusters; set `enabled = true` to enable. - It is disabled by default for Autopilot clusters; set `enabled = true` to enable. - Lustre CSI Driver Config has optional subfield - `enable_legacy_lustre_port` which allows the Lustre CSI driver to initialize LNet (the virtual networklayer for Lustre kernel module) using port 6988. - This flag is required to workaround a port conflict with the gke-metadata-server on GKE nodes. - See [Enable Lustre CSI driver](https://cloud.google.com/kubernetes-engine/docs/how-to/persistent-volumes/lustre-csi-driver-new-volume) for more information. - This example `addons_config` disables two addons: ```hcl @@ -603,8 +580,6 @@ options for the [Autoscaling profile](https://cloud.google.com/kubernetes-engine feature, which lets you choose whether the cluster autoscaler should optimize for resource utilization or resource availability when deciding to remove nodes from a cluster. Can be `BALANCED` or `OPTIMIZE_UTILIZATION`. Defaults to `BALANCED`. -* `default_compute_class_enabled` - (Optional) Specifies whether default compute class behaviour is enabled. If enabled, cluster autoscaler will use Compute Class with name default for all the workloads, if not overriden. - The `resource_limits` block supports: * `resource_type` - (Required) The type of the resource. For example, `cpu` and @@ -626,7 +601,7 @@ as "Intel Haswell" or "Intel Sandy Bridge". -> `monitoring.write` is always enabled regardless of user input. `monitoring` and `logging.write` may also be enabled depending on the values for `monitoring_service` and `logging_service`. -* `service_account` - (Optional) The `email` of the Google Cloud Platform Service Account to be used by the node VMs created by GKE Autopilot or NAP. +* `service_account` - (Optional) The Google Cloud Platform Service Account to be used by the node VMs created by GKE Autopilot or NAP. * `boot_disk_kms_key` - (Optional) The Customer Managed Encryption Key used to encrypt the boot disk attached to each node in the node pool. This should be of the form projects/[KEY_PROJECT_ID]/locations/[LOCATION]/keyRings/[RING_NAME]/cryptoKeys/[KEY_NAME]. For more information about protecting resources with Cloud KMS Keys please see: https://cloud.google.com/compute/docs/disks/customer-managed-encryption @@ -826,22 +801,12 @@ Possible values are `IPV4` and `IPV4_IPV6`. the cluster level. Used for Autopilot clusters and Standard clusters with which control of the secondary Pod IP address assignment to node pools isn't needed. Structure is [documented below](#nested_additional_pod_ranges_config). -* `additional_ip_ranges_config` - (Optional) The configuration for individual additional subnetworks attached to the cluster. -Structure is [documented below](#nested_additional_ip_ranges_config). - The `additional_pod_ranges_config` block supports: * `pod_range_names` - (Required) The names of the Pod ranges to add to the cluster. -The `additional_ip_ranges_config` block supports: - -* `subnetwork` - (Required) Name of the subnetwork. This can be the full path of the subnetwork or just the name. - -* `pod_ipv4_range_names`- (Required) List of secondary ranges names within this subnetwork that can be used for pod IPs. - - The `master_auth` block supports: * `client_certificate_config` - (Required) Whether client certificate authorization is enabled for this cluster. For example: @@ -881,16 +846,13 @@ The `master_authorized_networks_config.cidr_blocks` block supports: The `node_config` block supports: -* `boot_disk` - (Optional) Configuration of the node pool boot disk. Structure is [documented below](#nested_boot_disk) - * `confidential_nodes` - (Optional) Configuration for Confidential Nodes feature. Structure is [documented below](#nested_confidential_nodes). * `disk_size_gb` - (Optional) Size of the disk attached to each node, specified - in GB. The smallest allowed disk size is 10GB. Defaults to 100GB. This is being migrated to `boot_disk.size_gb`, and must match if specified in both places. - Prefer configuring `boot_disk`. + in GB. The smallest allowed disk size is 10GB. Defaults to 100GB. * `disk_type` - (Optional) Type of the disk attached to each node - (e.g. 'pd-standard', 'pd-balanced', 'pd-ssd', or 'hyperdisk-balanced'). Defaults to `hyperdisk-balanced` if `hyperdisk-balanced` is supported and `pd-balanced` is not supported for the machine type; otherwise defaults to `pd-balanced`. This is being migrated to `boot_disk.disk_type`, and must match if specified in both places. Prefer configuring `boot_disk`. + (e.g. 'pd-standard', 'pd-balanced' or 'pd-ssd'). If unspecified, the default disk type is 'pd-standard' * `enable_confidential_storage` - (Optional) Enabling Confidential Storage will create boot disk with confidential mode. It is disabled by default. @@ -974,8 +936,6 @@ gvnic { * `local_ssd_count` - (Optional) The amount of local SSD disks that will be attached to each cluster node. Defaults to 0. -* `network_performance_config` - (Optional) Network bandwidth tier configuration. Structure is [documented below](#network_performance_config). - * `machine_type` - (Optional) The name of a Google Compute Engine machine type. Defaults to `e2-medium`. To create a custom machine type, value should be set as specified [here](https://cloud.google.com/compute/docs/reference/latest/instances#machineType). @@ -1052,7 +1012,7 @@ kubelet_config { * `linux_node_config` - (Optional) Parameters that can be configured on Linux nodes. Structure is [documented below](#nested_linux_node_config). * `windows_node_config` - (Optional) -Windows node configuration, currently supporting OSVersion [attribute](https://cloud.google.com/kubernetes-engine/docs/reference/rest/v1/NodeConfig#osversion). The value must be one of [OS_VERSION_UNSPECIFIED, OS_VERSION_LTSC2019, OS_VERSION_LTSC2022]. For example: +Windows node configuration, currently supporting OSVersion [attribute](https://cloud.google.com/kubernetes-engine/docs/reference/rest/v1/NodeConfig#osversion). The value must be one of [OS_VERSION_UNSPECIFIED, OS_VERSION_LTSC2019, OS_VERSION_LTSC2019]. For example: ```hcl windows_node_config { @@ -1064,7 +1024,7 @@ windows_node_config { * `node_group` - (Optional) Setting this field will assign instances of this pool to run on the specified node group. This is useful for running workloads on [sole tenant nodes](https://cloud.google.com/compute/docs/nodes/sole-tenant-nodes). -* `sole_tenant_config` - (Optional) Allows specifying multiple [node affinities](https://cloud.google.com/compute/docs/nodes/sole-tenant-nodes#node_affinity_and_anti-affinity) useful for running workloads on [sole tenant nodes](https://cloud.google.com/kubernetes-engine/docs/how-to/sole-tenancy). Structure is [documented below](#nested_sole_tenant_config). +* `sole_tenant_config` (Optional) Allows specifying multiple [node affinities](https://cloud.google.com/compute/docs/nodes/sole-tenant-nodes#node_affinity_and_anti-affinity) useful for running workloads on [sole tenant nodes](https://cloud.google.com/kubernetes-engine/docs/how-to/sole-tenancy). `node_affinity` structure is [documented below](#nested_node_affinity). ```hcl sole_tenant_config { @@ -1079,32 +1039,12 @@ sole_tenant_config { * `advanced_machine_features` - (Optional) Specifies options for controlling advanced machine features. Structure is [documented below](#nested_advanced_machine_features). -The `boot_disk` block supports: - -* `size_gb` - (Optional) Size of the disk attached to each node, specified - in GB. The smallest allowed disk size is 10GB. Defaults to 100GB. This is being migrated from `node_config.disk_size_gb`, and must match if specified in both places. Prefer using this field. - -* `disk_type` - (Optional) Type of the disk attached to each node - (e.g. 'pd-standard', 'pd-balanced', 'pd-ssd', or 'hyperdisk-balanced'). Defaults to `hyperdisk-balanced` if `hyperdisk-balanced` is supported and `pd-balanced` is not supported for the machine type; otherwise defaults to `pd-balanced`. This is being migrated from `node_config.disk_type`, and must match if specified in both places. Prefer using this field. - -* `provisioned_iops` - (Optional) Configure disk IOPs. This is only valid if the `disk_type` is 'hyperdisk-balanced'. See [performance limit documention](https://cloud.google.com/compute/docs/disks/hyperdisk-perf-limits) for more information about valid values. - -* `provisioned_throughput` - (Optional) Configure disk throughput. This is only valid if the `disk_type` is 'hyperdisk-balanced'. See [performance limit documention](https://cloud.google.com/compute/docs/disks/hyperdisk-perf-limits) for more information about valid values. The `confidential_nodes` block supports: * `enabled` (Required) - Enable Confidential GKE Nodes for this node pool, to enforce encryption of data in-use. -* `confidential_instance_type` (Optional) - Defines the type of technology used - by the confidential node. - -The `sole_tenant_config` block supports: - -* `node_affinity` (Required) - The node affinity settings for the sole tenant node pool. Structure is [documented below](#nested_node_affinity). - -* `min_node_cpus` - (Optional) Specifies the minimum number of vCPUs that each sole tenant node must have to use CPU overcommit. If not specified, the CPU overcommit feeature is disabled. The value should be greater than or equal to half of the machine type's CPU count. - The `node_affinity` block supports: * `key` (Required) - The default or custom node affinity label key name. @@ -1119,8 +1059,6 @@ sole_tenant_config { * `enable_nested_virtualization`- (Optional) Defines whether the instance should have nested virtualization enabled. Defaults to false. -* `performance_monitoring_unit` - (Optional) Defines the performance monitoring unit [PMU](https://cloud.google.com/compute/docs/pmu-overview) level. Valid values are `ARCHITECTURAL`, `STANDARD`, or `ENHANCED`. Defaults to off. - The `ephemeral_storage_config` block supports: * `local_ssd_count` (Required) - Number of local SSDs to use to back ephemeral storage. Uses NVMe interfaces. Each local SSD is 375 GB in size. If zero, it means to disable using local SSDs as ephemeral storage. @@ -1184,10 +1122,6 @@ sole_tenant_config { * `max_shared_clients_per_gpu` (Required) - The maximum number of containers that can share a GPU. -The `network_performance_config` block supports: - -* `total_egress_bandwidth_tier` (Required) - Specifies the total network bandwidth tier for NodePools in the cluster. - The `workload_identity_config` block supports: * `workload_pool` (Optional) - The workload pool to attach all Kubernetes service accounts to. @@ -1266,9 +1200,6 @@ notification_config { * `enabled` (Required) - Enable Confidential GKE Nodes for this cluster, to enforce encryption of data in-use. -* `confidential_instance_type` (Optional) - Defines the type of technology used - by the confidential node. - The `pod_security_policy_config` block supports: * `enabled` (Required) - Enable the PodSecurityPolicy controller for this cluster. @@ -1285,23 +1216,6 @@ notification_config { The `secret_manager_config` block supports: * `enabled` (Required) - Enable the Secret Manager add-on for this cluster. -* `rotation_config` (Optional, Beta) - config for secret manager auto rotation. Structure is [docuemented below](#rotation_config) - -The `rotation_config` block supports: - -* `enabled` (Optional) - Enable the roation in Secret Manager add-on for this cluster. -* `rotation_interval` (Optional) - The interval between two consecutive rotations. Default rotation interval is 2 minutes. - -The `user_managed_keys_config` block supports: - -* `cluster_ca` - (Optional) The Certificate Authority Service caPool to use for the cluster CA in this cluster. -* `etcd_api_ca` - (Optional) The Certificate Authority Service caPool to use for the etcd API CA in this cluster. -* `etcd_peer_ca` - (Optional) The Certificate Authority Service caPool to use for the etcd peer CA in this cluster. -* `aggregation_ca` - (Optional) The Certificate Authority Service caPool to use for the aggreation CA in this cluster. -* `service_account_signing_keys` - (Optional) The Cloud KMS cryptoKeyVersions to use for signing service account JWTs issued by this cluster. -* `service_account_verification_keys` - (Optional) The Cloud KMS cryptoKeyVersions to use for verifying service account JWTs issued by this cluster. -* `control_plane_disk_encryption_key` - (Optional) The Cloud KMS cryptoKey to use for Confidential Hyperdisk on the control plane nodes. -* `gkeops_etcd_backup_encryption_key` - (Optional) Resource path of the Cloud KMS cryptoKey to use for encryption of internal etcd backups. The `control_plane_endpoints_config` block supports: @@ -1391,12 +1305,6 @@ not. * STABLE: Every few months upgrade cadence; Production users who need stability above all else, and for whom frequent upgrades are too risky. * EXTENDED: GKE provides extended support for Kubernetes minor versions through the Extended channel. With this channel, you can stay on a minor version for up to 24 months. -The `gke_auto_upgrade_config` block supports: - -* `patch_mode` - (Required) The selected patch mode. - Accepted values are: - * ACCELERATED: Upgrades to the latest available patch version in a given minor and release channel. - The `cost_management_config` block supports: * `enabled` (Optional) - Whether to enable the [cost allocation](https://cloud.google.com/kubernetes-engine/docs/how-to/cost-allocations) feature. @@ -1491,45 +1399,6 @@ such as `"300ms"`. Valid time units are "ns", "us" (or "µs"), "ms", "s", "m", * `allowed_unsafe_sysctls` - (Optional) Defines a comma-separated allowlist of unsafe sysctls or sysctl patterns which can be set on the Pods. The allowed sysctl groups are `kernel.shm*`, `kernel.msg*`, `kernel.sem`, `fs.mqueue.*`, and `net.*`. -* `single_process_oom_kill` - (Optional) Defines whether to enable single process OOM killer. If true, the processes in the container will be OOM killed individually instead of as a group. - -* `max_parallel_image_pulls` - (Optional) Set the maximum number of image pulls in parallel. The integer must be between 2 and 5, inclusive. - -* `eviction_max_pod_grace_period_seconds` - (Optional) Defines the maximum allowed grace period (in seconds) to use when terminating pods in response to a soft eviction threshold being met. The integer must be positive and not exceed 300. - -* `eviction_soft` - (Optional) Defines a map of signal names to quantities or percentage that defines soft eviction thresholds. Structure is [documented below](#nested_eviction_soft). - -* `eviction_soft_grace_period` - (Optional) Defines a map of signal names to durations that defines grace periods for soft eviction thresholds. Each soft eviction threshold must have a corresponding grace period. Structure is [documented below](#nested_eviction_soft_grace_period). - -* `eviction_minimum_reclaim` - (Optional) Defines a map of signal names to percentage that defines minimum reclaims. It describes the minimum amount of a given resource the kubelet will reclaim when performing a pod eviction. Structure is [documented below](#nested_eviction_minimum_reclaim). - -The `eviction_soft` block supports: - -* `memory_available` - (Optional) Defines quantity of soft eviction threshold for memory.available. The value must be a quantity, such as `"100Mi"`. The value must be greater than or equal to the GKE default hard eviction threshold of `"100Mi"` and less than 50% of machine memory. -* `nodefs_available` - (Optional) Defines percentage of soft eviction threshold for nodefs.available. The value must be a percentage between `10%` and `50%`, such as `"20%"`. -* `nodefs_inodes_free` - (Optional) Defines percentage of soft eviction threshold for nodefs.inodesFree. The value must be a percentage between `5%` and `50%`, such as `"20%"`. -* `imagefs_available` - (Optional) Defines percentage of soft eviction threshold for imagefs.available. The value must be a percentage between `15%` and `50%`, such as `"20%"`. -* `imagefs_inodes_free` - (Optional) Defines percentage of soft eviction threshold for imagefs.inodesFree. The value must be a percentage between `5%` and `50%`, such as `"20%"`. -* `pid_available` - (Optional) Defines percentage of soft eviction threshold for pid.available. The value must be a percentage between `10%` and `50%`, such as `"20%"`. - -The `eviction_soft_grace_period` block supports: - -* `memory_available` - (Optional) Defines grace period for the memory.available soft eviction threshold. The value must be a positive duration string no more than `"5m"`, such as `"30s"`, `"1m30s"`, `"2.5m"`. Valid time units are "ns", "us" (or "µs"), "ms", "s", "m", "h". -* `nodefs_available` - (Optional) Defines grace period for the nodefs.available soft eviction threshold. The value must be a positive duration string no more than `"5m"`. -* `nodefs_inodes_free` - (Optional) Defines grace period for the nodefs.inodesFree soft eviction threshold. The value must be a positive duration string no more than `"5m"`. -* `imagefs_available` - (Optional) Defines grace period for the imagefs.available soft eviction threshold. The value must be a positive duration string no more than `"5m"`. -* `imagefs_inodes_free` - (Optional) Defines grace period for the imagefs.inodesFree soft eviction threshold. The value must be a positive duration string no more than `"5m"`. -* `pid_available` - (Optional) Defines grace period for the pid.available soft eviction threshold. The value must be a positive duration string no more than `"5m"`. - -The `eviction_minimum_reclaim` block supports: - -* `memory_available` - (Optional) Defines percentage of minimum reclaim for memory.available. The value must be a percentage no more than `"10%"`, such as `"5%"`. -* `nodefs_available` - (Optional) Defines percentage of minimum reclaim for nodefs.available. The value must be a percentage no more than `"10%"`, such as `"5%"`. -* `nodefs_inodes_free` - (Optional) Defines percentage of minimum reclaim for nodefs.inodesFree. The value must be a percentage no more than `"10%"`, such as `"5%"`. -* `imagefs_available` - (Optional) Defines percentage of minimum reclaim for imagefs.available. The value must be a percentage no more than `"10%"`, such as `"5%"`. -* `imagefs_inodes_free` - (Optional) Defines percentage of minimum reclaim for imagefs.inodesFree. The value must be a percentage no more than `"10%"`, such as `"5%"`. -* `pid_available` - (Optional) Defines percentage of minimum reclaim for pid.available. The value must be a percentage no more than `"10%"`, such as `"5%"`. - The `linux_node_config` block supports: * `sysctls` - (Optional) The Linux kernel parameters to be applied to the nodes @@ -1560,22 +1429,6 @@ linux_node_config { * `hugepage_size_1g` - (Optional) Amount of 1G hugepages. -* `transparent_hugepage_enabled` - (Optional) The Linux kernel transparent hugepage setting. - Accepted values are: - * `TRANSPARENT_HUGEPAGE_ENABLED_ALWAYS`: Transparent hugepage is enabled system wide. - * `TRANSPARENT_HUGEPAGE_ENABLED_MADVISE`: Transparent hugepage is enabled inside MADV_HUGEPAGE regions. This is the default kernel configuration. - * `TRANSPARENT_HUGEPAGE_ENABLED_NEVER`: Transparent hugepage is disabled. - * `TRANSPARENT_HUGEPAGE_ENABLED_UNSPECIFIED`: Default value. GKE will not modify the kernel configuration. - -* `transparent_hugepage_defrag` - (Optional) The Linux kernel transparent hugepage defrag setting. - Accepted values are: - * `TRANSPARENT_HUGEPAGE_DEFRAG_ALWAYS`: An application requesting THP will stall on allocation failure and directly reclaim pages and compact memory in an effort to allocate a THP immediately. - * `TRANSPARENT_HUGEPAGE_DEFRAG_DEFER`: An application will wake kswapd in the background to reclaim pages and wake kcompactd to compact memory so that THP is available in the near future. It is the responsibility of khugepaged to then install the THP pages later. - * `TRANSPARENT_HUGEPAGE_DEFRAG_DEFER_WITH_MADVISE`: An application will enter direct reclaim and compaction like always, but only for regions that have used madvise(MADV_HUGEPAGE); all other regions will wake kswapd in the background to reclaim pages and wake kcompactd to compact memory so that THP is available in the near future. - * `TRANSPARENT_HUGEPAGE_DEFRAG_MADVISE`: An application will enter direct reclaim and compaction like always, but only for regions that have used madvise(MADV_HUGEPAGE); all other regions will wake kswapd in the background to reclaim pages and wake kcompactd to compact memory so that THP is available in the near future. - * `TRANSPARENT_HUGEPAGE_DEFRAG_NEVER`: An application will never enter direct reclaim or compaction. - * `TRANSPARENT_HUGEPAGE_DEFRAG_UNSPECIFIED`: Default value. GKE will not modify the kernel configuration. - The `containerd_config` block supports: * `private_registry_access_config` (Optional) - Configuration for private container registries. There are two fields in this config: @@ -1643,16 +1496,7 @@ linux_node_config { The `enterprise_config` block supports: -* `desired_tier` - (Optional) (DEPRECATED) Sets the tier of the cluster. Available options include `STANDARD` and `ENTERPRISE`. Deprecated as GKE Enterprise features are now available without an Enterprise tier. See https://cloud.google.com/blog/products/containers-kubernetes/gke-gets-new-pricing-and-capabilities-on-10th-birthday for the announcement of this change. - -The `anonymous_authentication_config` block supports: - -* `mode` - (Optional) Sets or removes authentication restrictions. Available options include `LIMITED` and `ENABLED`. - -The `rbac_binding_config` block supports: - -* `enable_insecure_binding_system_unauthenticated` - (Optional) Setting this to true will allow any ClusterRoleBinding and RoleBinding with subjects system:anonymous or system:unauthenticated. -* `enable_insecure_binding_system_authenticated` - (Optional) Setting this to true will allow any ClusterRoleBinding and RoleBinding with subjects system:authenticated. +* `desired_tier` - (Optional) Sets the tier of the cluster. Available options include `STANDARD` and `ENTERPRISE`. ## Attributes Reference diff --git a/mmv1/third_party/terraform/website/docs/r/container_node_pool.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_node_pool.html.markdown index 1e0cf928b373..9a2cc5411523 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_node_pool.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_node_pool.html.markdown @@ -173,6 +173,9 @@ cluster. * `queued_provisioning` - (Optional) Specifies node pool-level settings of queued provisioning. Structure is [documented below](#nested_queued_provisioning). +* `reservation_affinity` (Optional) The configuration of the desired reservation which instances could take capacity from. + Structure is [documented below](#nested_reservation_affinity). + The `autoscaling` block supports (either total or per zone limits are required): * `min_node_count` - (Optional) Minimum number of nodes per zone in the NodePool. @@ -221,8 +224,6 @@ cluster. * `network_performance_config` - (Optional) Network bandwidth tier configuration. Structure is [documented below](#network_performance_config). -* `subnetwork` - (Optional) The subnetwork path for the node pool. Format: `projects/{project}/regions/{region}/subnetworks/{subnetwork}`. If the cluster is associated with multiple subnetworks, the subnetwork for the node pool is picked based on the IP utilization during node pool creation and is immutable - The `additional_node_network_configs` block supports: * `network` - Name of the VPC where the additional interface belongs. @@ -239,8 +240,7 @@ cluster. The `network_performance_config` block supports: -* `total_egress_bandwidth_tier` (Required) - Specifies the total network bandwidth tier for the NodePool. [Valid values](https://cloud.google.com/kubernetes-engine/docs/reference/rest/v1/projects.locations.clusters.nodePools#NodePool.Tier) include: "TIER_1" and "TIER_UNSPECIFIED". -* ``` +* `total_egress_bandwidth_tier` (Required) - Specifies the total network bandwidth tier for the NodePool. The `pod_cidr_overprovision_config` block supports: @@ -288,7 +288,7 @@ cluster. The resource policy must be in the same project and region as the node pool. If not found, InvalidArgument error is returned. -* `tpu_topology` - (Optional) The [TPU topology](https://cloud.google.com/kubernetes-engine/docs/concepts/plan-tpus#topology) like `"2x4"` or `"2x2x2"`. +* `tpu_topology` - (Optional) The [TPU placement topology](https://cloud.google.com/tpu/docs/types-topologies#tpu_topologies) for pod slice node pool. The `queued_provisioning` block supports: diff --git a/mmv1/third_party/terraform/website/docs/r/dataflow_flex_template_job.html.markdown b/mmv1/third_party/terraform/website/docs/r/dataflow_flex_template_job.html.markdown index 44ac353c8167..0ae327af0d52 100644 --- a/mmv1/third_party/terraform/website/docs/r/dataflow_flex_template_job.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/dataflow_flex_template_job.html.markdown @@ -93,8 +93,6 @@ Template. * `additional_experiments` - (Optional) List of experiments that should be used by the job. An example value is `["enable_stackdriver_agent_metrics"]`. -* `additional_pipeline_options` - (Optional) List of pipeline options that should be used by the job. An example value is `["numberOfWorkerHarnessThreads=20"]`. - * `autoscaling_algorithm` - (Optional) The algorithm to use for autoscaling. * `parameters` - **Template specific** Key/Value pairs to be forwarded to the pipeline's options; keys are diff --git a/mmv1/third_party/terraform/website/docs/r/dataproc_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/r/dataproc_cluster.html.markdown index e2f75aea6c68..e4e4fe15eefd 100644 --- a/mmv1/third_party/terraform/website/docs/r/dataproc_cluster.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/dataproc_cluster.html.markdown @@ -45,8 +45,6 @@ resource "google_dataproc_cluster" "mycluster" { cluster_config { staging_bucket = "dataproc-staging-bucket" - cluster_tier = "CLUSTER_TIER_STANDARD" - master_config { num_instances = 1 machine_type = "e2-medium" @@ -343,8 +341,6 @@ resource "google_dataproc_cluster" "accelerated_cluster" { and jobs data, such as Spark and MapReduce history files. Note: If you don't explicitly specify a `temp_bucket` then GCP will auto create / assign one for you. -* `cluster_tier` - (Optional) The tier of the cluster. - * `gce_cluster_config` (Optional) Common config settings for resources of Google Compute Engine cluster instances, applicable to all instances in the cluster. Structure [defined below](#nested_gce_cluster_config). @@ -720,17 +716,11 @@ cluster_config { kms_key_uri = "projects/projectId/locations/locationId/keyRings/keyRingId/cryptoKeys/keyId" root_principal_password_uri = "bucketId/o/objectId" } - identity_config { - user_service_account_mapping = { - "user@company.com" = "service-account@iam.gserviceaccounts.com" - } - } } } ``` -* `kerberos_config` (Optional) Kerberos Configuration. At least one of `identity_config` - or `kerberos_config` is required. +* `kerberos_config` (Required) Kerberos Configuration * `cross_realm_trust_admin_server` - (Optional) The admin server (IP or hostname) for the remote trusted realm in a cross realm trust relationship. @@ -778,12 +768,6 @@ cluster_config { * `truststore_uri` - (Optional) The Cloud Storage URI of the truststore file used for SSL encryption. If not provided, Dataproc will provide a self-signed certificate. -* `identity_config` (Optional) Identity Configuration. At least one of `identity_config` - or `kerberos_config` is required. - - * `user_service_account_mapping` - (Required) The end user to service account mappings - in a service account based multi-tenant cluster - - - - The `cluster_config.autoscaling_config` block supports: diff --git a/mmv1/third_party/terraform/website/docs/r/dns_record_set.html.markdown b/mmv1/third_party/terraform/website/docs/r/dns_record_set.html.markdown index 25d84e9f5523..0f3a73923593 100644 --- a/mmv1/third_party/terraform/website/docs/r/dns_record_set.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/dns_record_set.html.markdown @@ -218,7 +218,6 @@ resource "google_dns_record_set" "a" { resource "google_dns_managed_zone" "prod" { name = "prod-zone" dns_name = "prod.mydomain.com." - visibility = "private" } resource "google_compute_forwarding_rule" "prod" { diff --git a/mmv1/third_party/terraform/website/docs/r/gke_hub_feature_membership.html.markdown b/mmv1/third_party/terraform/website/docs/r/gke_hub_feature_membership.html.markdown index 5bbe489b5019..4477d839a04f 100644 --- a/mmv1/third_party/terraform/website/docs/r/gke_hub_feature_membership.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/gke_hub_feature_membership.html.markdown @@ -4,7 +4,7 @@ description: |- Contains information about a GKEHub Feature Memberships. --- -# google_gke_hub_feature_membership +# google_gkehub_feature_membership Contains information about a GKEHub Feature Memberships. Feature Memberships configure GKEHub Features that apply to specific memberships rather than the project as a whole. The google_gke_hub is the Fleet API. @@ -426,6 +426,11 @@ The following arguments are supported: (Optional) Version of Config Sync installed. +* `binauthz` - + (Optional, Deprecated) + Binauthz configuration for the cluster. Structure is [documented below](#nested_binauthz). + This field will be ignored and should not be set. + * `hierarchy_controller` - (Optional) Hierarchy Controller configuration for the cluster. Structure is [documented below](#nested_hierarchy_controller). @@ -439,6 +444,13 @@ The following arguments are supported: Policy Controller configuration for the cluster. Structure is [documented below](#nested_policy_controller). Configuring Policy Controller through the configmanagement feature is no longer recommended. Use the policycontroller feature instead. + + +The `binauthz` block supports: + +* `enabled` - + (Optional) + Whether binauthz is enabled in this cluster. The `config_sync` block supports: diff --git a/mmv1/third_party/terraform/website/docs/r/google_folder.html.markdown b/mmv1/third_party/terraform/website/docs/r/google_folder.html.markdown index e6ec9fa94014..4bed99d434bd 100644 --- a/mmv1/third_party/terraform/website/docs/r/google_folder.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/google_folder.html.markdown @@ -69,8 +69,6 @@ exported: * `lifecycle_state` - The lifecycle state of the folder such as `ACTIVE` or `DELETE_REQUESTED`. * `create_time` - Timestamp when the Folder was created. Assigned by the server. A timestamp in RFC3339 UTC "Zulu" format, accurate to nanoseconds. Example: "2014-10-02T15:01:23.045123456Z". -* `configured_capabilities` - Optional capabilities configured for this folder. -* `management_project` - Management Project associated with this folder (if capability is enabled). ## Import diff --git a/mmv1/third_party/terraform/website/docs/r/google_project_service.html.markdown b/mmv1/third_party/terraform/website/docs/r/google_project_service.html.markdown index f5a53f913637..657f874cf3f2 100644 --- a/mmv1/third_party/terraform/website/docs/r/google_project_service.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/google_project_service.html.markdown @@ -33,6 +33,8 @@ resource "google_project_service" "project" { create = "30m" update = "40m" } + + disable_on_destroy = false } ``` @@ -47,8 +49,9 @@ is used. * `disable_on_destroy` - (Optional) If `true` or unset, disable the service when the Terraform resource is destroyed. If `false`, the service will be left enabled when -the Terraform resource is destroyed. Defaults to `false`. It should generally only -be `true` or unset in configurations that manage the `google_project` resource itself. +the Terraform resource is destroyed. Defaults to `true`. Most configurations should +set this to `false`; it should generally only be `true` or unset in configurations +that manage the `google_project` resource itself. * `disable_dependent_services` - (Optional) If `true`, services that are enabled and which depend on this service should also be disabled when this service is diff --git a/mmv1/third_party/terraform/website/docs/r/logging_organization_sink.html.markdown b/mmv1/third_party/terraform/website/docs/r/logging_organization_sink.html.markdown index 8dfb360b8a7c..a0a1fc869b0c 100644 --- a/mmv1/third_party/terraform/website/docs/r/logging_organization_sink.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/logging_organization_sink.html.markdown @@ -64,7 +64,7 @@ The following arguments are supported: * `disabled` - (Optional) If set to True, then this sink is disabled and it does not export any log entries. -* `include_children` - (Optional) Whether or not to include child folders or projects in the sink export. If true, logs +* `include_children` - (Optional) Whether or not to include children organizations in the sink export. If true, logs associated with child projects are also exported; otherwise only logs relating to the provided organization are included. * `intercept_children` - (Optional) Whether or not to intercept logs from child projects. If true, matching logs will not diff --git a/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown b/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown index f3c424607478..9cbb8c5d91fe 100644 --- a/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown @@ -173,25 +173,6 @@ resource "google_sql_database_instance" "main" { } ``` -### Cloud SQL Instance with Managed Connection Pooling -```hcl -resource "google_sql_database_instance" "instance" { - name: = "mcp-enabled-main-instance" - region = "us-central1" - database_version = "POSTGRES_16" - settings { - tier = "db-perf-optimized-N-2" - connection_pool_config { - connection_pooling_enabled = true - flags { - name = "max_client_connections" - value = "1980" - } - } - } -} -``` - ### Cloud SQL Instance with PSC connectivity ```hcl @@ -244,31 +225,6 @@ resource "google_sql_database_instance" "main" { } ``` -### Cloud SQL Instance with PSC outbound - -```hcl -resource "google_sql_database_instance" "main" { - name = "psc-enabled-main-instance" - database_version = "MYSQL_8_0" - settings { - tier = "db-f1-micro" - ip_configuration { - psc_config { - psc_enabled = true - allowed_consumer_projects = ["allowed-consumer-project-name"] - network_attachment_uri = "network-attachment-uri" - } - ipv4_enabled = false - } - backup_configuration { - enabled = true - binary_log_enabled = true - } - availability_type = "REGIONAL" - } -} -``` - ## Argument Reference The following arguments are supported: @@ -334,11 +290,6 @@ includes an up-to-date reference of supported versions. **NOTE:** Restoring from a backup is an imperative action and not recommended via Terraform. Adding or modifying this block during resource creation/update will trigger the restore action after the resource is created/updated. -* `backupdr_backup` - (optional) The backupdr_backup needed to restore the database to a backup run. This field will - cause Terraform to trigger the database to restore from the backup run indicated. The configuration is detailed below. - **NOTE:** Restoring from a backup is an imperative action and not recommended via Terraform. Adding or modifying this - block during resource creation/update will trigger the restore action after the resource is created/updated. - * `clone` - (Optional) The context needed to create this instance as a clone of another instance. When this field is set during resource creation, Terraform will attempt to clone another instance as indicated in the context. The configuration is detailed below. @@ -357,13 +308,11 @@ The `settings` block supports: active. Can be either `ALWAYS`, `NEVER` or `ON_DEMAND`. * `availability_type` - (Optional) The availability type of the Cloud SQL - instance, high availability (`REGIONAL`) or single zone (`ZONAL`). For all instances, ensure that + instance, high availability (`REGIONAL`) or single zone (`ZONAL`).' For all instances, ensure that `settings.backup_configuration.enabled` is set to `true`. For MySQL instances, ensure that `settings.backup_configuration.binary_log_enabled` is set to `true`. For Postgres and SQL Server instances, ensure that `settings.backup_configuration.point_in_time_recovery_enabled` is set to `true`. Defaults to `ZONAL`. - For read pool instances, this field is read-only. The availability type is changed by specifying - the number of nodes (`node_count`). * `collation` - (Optional) The name of server instance collation. @@ -379,16 +328,14 @@ The `settings` block supports: * `disk_autoresize_limit` - (Optional) The maximum size to which storage capacity can be automatically increased. The default value is 0, which specifies that there is no limit. -* `disk_size` - (Optional) The size of data disk, in GB. Size of a running instance cannot be reduced but can be increased. The minimum value is 10GB for `PD_SSD`, `PD_HDD` and 20GB for `HYPERDISK_BALANCED`. Note that this value will override the resizing from `disk_autoresize` if that feature is enabled. To avoid this, set `lifecycle.ignore_changes` on this field. +* `disk_size` - (Optional) The size of data disk, in GB. Size of a running instance cannot be reduced but can be increased. The minimum value is 10GB for PD_SSD, PD_HDD and 20GB for HYPERDISK_BALANCED. Note that this value will override the resizing from `disk_autoresize` if that feature is enabled. To avoid this, set `lifecycle.ignore_changes` on this field. -* `disk_type` - (Optional) The type of data disk: `PD_SSD`, `PD_HDD`, or `HYPERDISK_BALANCED`. Defaults to `PD_SSD`. `HYPERDISK_BALANCED` is preview. +* `disk_type` - (Optional) The type of data disk: PD_SSD, PD_HDD, or HYPERDISK_BALANCED. Defaults to `PD_SSD`. HYPERDISK_BALANCED is preview. * `data_disk_provisioned_iops` - (Optional, Beta) Provisioned number of I/O operations per second for the data disk. This field is only used for `HYPERDISK_BALANCED` disk types. * `data_disk_provisioned_throughput` - (Optional, Beta) Provisioned throughput measured in MiB per second for the data disk. This field is only used for `HYPERDISK_BALANCED` disk types. -* `node_count` - For a read pool instance, the number of nodes in the read pool. - * `pricing_plan` - (Optional) Pricing plan for this instance, can only be `PER_USE`. * `time_zone` - (Optional) The time_zone to be used by the database engine (supported only for SQL Server), in SQL Server timezone format. @@ -499,8 +446,6 @@ The optional `settings.ip_configuration.psc_config` sublist supports: * `consumer_network` - "The consumer network of this consumer endpoint. This must be a resource path that includes both the host project and the network name. For example, `projects/project1/global/networks/network1`. The consumer host project of this network might be different from the consumer service project." -* `network_attachment_uri` - (Optional) Network Attachment URI in the format `projects/project1/regions/region1/networkAttachments/networkAttachment1` to enable outbound connectivity on PSC instance. - * `consumer_service_project_id` - (Optional) The project ID of consumer service project of this consumer endpoint. The optional `settings.location_preference` subblock supports: @@ -622,22 +567,10 @@ block during resource creation/update will trigger the restore action after the The optional, computed `replication_cluster` block represents a primary instance and disaster recovery replica pair. Applicable to MySQL and PostgreSQL. This field can be set only after both the primary and replica are created. This block supports: -* `psa_write_endpoint`: Read-only field which if set, indicates this instance has a private service access (PSA) DNS endpoint that is pointing to the primary instance of the cluster. If this instance is the primary, then the DNS endpoint points to this instance. After a switchover or replica failover operation, this DNS endpoint points to the promoted instance. This is a read-only field, returned to the user as information. This field can exist even if a standalone instance doesn't have a DR replica yet or the DR replica is deleted. - * `failover_dr_replica_name`: (Optional) If the instance is a primary instance, then this field identifies the disaster recovery (DR) replica. The standard format of this field is "your-project:your-instance". You can also set this field to "your-instance", but cloud SQL backend will convert it to the aforementioned standard format. * `dr_replica`: Read-only field that indicates whether the replica is a DR replica. -The optional `settings.connection_pool_config` subblock supports: - -* `connection_pooling_enabled`: (Optional) True if the manager connection pooling configuration is enabled. - -The optional `settings.connection_pool_config.flags` sublist supports: - -* `name` - (Required) Name of the flag. - -* `value` - (Required) Value of the flag. - ## Attributes Reference In addition to the arguments listed above, the following computed attributes are @@ -694,25 +627,13 @@ performing filtering in a Terraform config. * `psc_service_attachment_link` - the URI that points to the service attachment of the instance. -* `instance_type` - The type of the instance. See [API reference for SqlInstanceType](https://cloud.google.com/sql/docs/mysql/admin-api/rest/v1/instances#SqlInstanceType) for supported values. +* `instance_type` - The type of the instance. The supported values are `SQL_INSTANCE_TYPE_UNSPECIFIED`, `CLOUD_SQL_INSTANCE`, `ON_PREMISES_INSTANCE` and `READ_REPLICA_INSTANCE`. ~> **NOTE:** Users can upgrade a read replica instance to a stand-alone Cloud SQL instance with the help of `instance_type`. To promote, users have to set the `instance_type` property as `CLOUD_SQL_INSTANCE` and remove/unset `master_instance_name` and `replica_configuration` from instance configuration. This operation might cause your instance to restart. -* `settings.ip_configuration.psc_config.psc_auto_connections.consumer_network_status` - (Output) The connection policy status of the consumer network. - -* `settings.ip_configuration.psc_config.psc_auto_connections.ip_address` - (Output) The IP address of the consumer endpoint. - -* `settings.ip_configuration.psc_config.psc_auto_connections.status` - (Output) The connection status of the consumer endpoint. - * `settings.version` - Used to make sure changes to the `settings` block are atomic. -* `settings.0.effective_availability_type` - (Computed) The availability type of - the Cloud SQL instance, high availability (REGIONAL) or single zone - (ZONAL). This field always contains the value that is reported by the API (for - read pools, `settings.0.effective_availability_type` may differ from - `settings.0.availability_type`). - * `server_ca_cert.0.cert` - The CA Certificate used to connect to the SQL Instance via SSL. * `server_ca_cert.0.common_name` - The CN valid for the CA Cert. diff --git a/mmv1/third_party/terraform/website/docs/r/sql_user.html.markdown b/mmv1/third_party/terraform/website/docs/r/sql_user.html.markdown index 054666ef4fc1..5e973aba88fa 100644 --- a/mmv1/third_party/terraform/website/docs/r/sql_user.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/sql_user.html.markdown @@ -141,7 +141,7 @@ The following arguments are supported: * `project` - (Optional) The ID of the project in which the resource belongs. If it is not provided, the provider project is used. -The optional `password_policy` block is only supported for creating MySQL and Postgres users. The `password_policy` block supports: +The optional `password_policy` block is only supported by Mysql. The `password_policy` block supports: * `allowed_failed_attempts` - (Optional) Number of failed attempts allowed before the user get locked. diff --git a/mmv1/third_party/terraform/website/docs/r/storage_bucket.html.markdown b/mmv1/third_party/terraform/website/docs/r/storage_bucket.html.markdown index 0b4f4b9bd5d9..9ee3665c7e2c 100644 --- a/mmv1/third_party/terraform/website/docs/r/storage_bucket.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/storage_bucket.html.markdown @@ -94,7 +94,7 @@ resource "google_storage_bucket" "no-age-enabled" { ## Example Usage - Enabling public access prevention ```hcl -resource "google_storage_bucket" "no-public-access" { +resource "google_storage_bucket" "auto-expire" { name = "no-public-access-bucket" location = "US" force_destroy = true @@ -106,7 +106,7 @@ resource "google_storage_bucket" "no-public-access" { ## Example Usage - Enabling hierarchical namespace ```hcl -resource "google_storage_bucket" "hns-enabled" { +resource "google_storage_bucket" "auto-expire" { name = "hns-enabled-bucket" location = "US" force_destroy = true @@ -121,7 +121,7 @@ resource "google_storage_bucket" "hns-enabled" { The following arguments are supported: -* `name` - (Required) The name of the bucket. Bucket names must be in lowercase and no more than 63 characters long. You can find the complete list of bucket naming rules [here](https://cloud.google.com/storage/docs/buckets#naming). +* `name` - (Required) The name of the bucket. * `location` - (Required) The [GCS location](https://cloud.google.com/storage/docs/bucket-locations). @@ -177,8 +177,6 @@ The following arguments are supported: * `updated` - (Computed) The time at which the bucket's metadata or IAM policy was last updated, in RFC 3339 format. -* `ip_filter` - (Optional) The bucket IP filtering configuration. Specifies the network sources that can access the bucket, as well as its underlying objects. Structure is [documented below](#nested_ip_filter). - The `lifecycle_rule` block supports: * `action` - (Required) The Lifecycle Rule's action configuration. A single block of this type is supported. Structure is [documented below](#nested_action). @@ -255,7 +253,7 @@ The following arguments are supported: * `is_locked` - (Optional) If set to `true`, the bucket will be [locked](https://cloud.google.com/storage/docs/using-bucket-lock#lock-bucket) and permanently restrict edits to the bucket's retention policy. Caution: Locking a bucket is an irreversible action. -* `retention_period` - (Required) The period of time, in seconds, that objects in the bucket must be retained and cannot be deleted, overwritten, or archived. The value must be less than 3,155,760,000 seconds. +* `retention_period` - (Required) The period of time, in seconds, that objects in the bucket must be retained and cannot be deleted, overwritten, or archived. The value must be less than 2,147,483,647 seconds. The `logging` block supports: @@ -295,27 +293,6 @@ The following arguments are supported: * `enabled` - (Required) Enables hierarchical namespace for the bucket. -The `ip_filter` block supports: - -* `mode` - (Required) The state of the IP filter configuration. Valid values are `Enabled` and `Disabled`. When set to `Enabled`, IP filtering rules are applied to a bucket and all incoming requests to the bucket are evaluated against these rules. When set to `Disabled`, IP filtering rules are not applied to a bucket. **Note**: `allow_all_service_agent_access` must be supplied when `mode` is set to `Enabled`, it can be ommited for other values. - -* `allow_cross_org_vpcs` - (Optional) While set `true`, allows cross-org VPCs in the bucket's IP filter configuration. - -* `allow_all_service_agent_access` (Optional) While set `true`, allows all service agents to access the bucket regardless of the IP filter configuration. - -* `public_network_source` - (Optional) The public network IP address ranges that can access the bucket and its data. Structure is [documented below](#nested_public_network_source). - -* `vpc_network_sources` - (Optional) The list of VPC networks that can access the bucket. Structure is [documented below](#nested_vpc_network_sources). - -The `public_network_source` block supports: - -* `allowed_ip_cidr_ranges` - The list of public IPv4 and IPv6 CIDR ranges that can access the bucket and its data. - -The `vpc_network_sources` block supports: - -* `network` - Name of the network. Format: `projects/PROJECT_ID/global/networks/NETWORK_NAME` - -* `allowed_ip_cidr_ranges` - The list of public or private IPv4 and IPv6 CIDR ranges that can access the bucket. ## Attributes Reference diff --git a/mmv1/third_party/terraform/website/docs/r/storage_bucket_object.html.markdown b/mmv1/third_party/terraform/website/docs/r/storage_bucket_object.html.markdown index 961983611298..d5de00b51b51 100644 --- a/mmv1/third_party/terraform/website/docs/r/storage_bucket_object.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/storage_bucket_object.html.markdown @@ -81,20 +81,12 @@ One of the following is required: * `detect_md5hash` - (Optional) Detect changes to local file or changes made outside of Terraform to the file stored on the server. MD5 hash of the data, encoded using [base64](https://datatracker.ietf.org/doc/html/rfc4648#section-4). This field is not present for [composite objects](https://cloud.google.com/storage/docs/composite-objects). For more information about using the MD5 hash, see [Hashes and ETags: Best Practices](https://cloud.google.com/storage/docs/hashes-etags#json-api). - ~> **Warning:** For dynamically populated files or objects, `detect_md5hash` cannot track or detect changes and will not trigger updates to the objects in the bucket. Please use `source_md5hash` instead. - * `storage_class` - (Optional) The [StorageClass](https://cloud.google.com/storage/docs/storage-classes) of the new bucket object. Supported values include: `MULTI_REGIONAL`, `REGIONAL`, `NEARLINE`, `COLDLINE`, `ARCHIVE`. If not provided, this defaults to the bucket's default storage class or to a [standard](https://cloud.google.com/storage/docs/storage-classes#standard) class. * `kms_key_name` - (Optional) The resource name of the Cloud KMS key that will be used to [encrypt](https://cloud.google.com/storage/docs/encryption/using-customer-managed-keys) the object. -* `source_md5hash` - (Optional) User-provided md5hash to trigger replacement of object in storage bucket, Must be Base 64 MD5 hash of the object data. The usual way to set this is filemd5("file.zip"), where "file.zip" is the local filename - -* `force_empty_content_type` - (Optional) When set to true, it ensure the object's Content-Type is empty. - -* `deletion_policy` - (Optional) When set to ABANDON, the object won't be deleted from storage bucket. Instead, it will only be removed from terraform's state file. - --- The `customer_encryption` block supports: @@ -129,8 +121,6 @@ exported: * `media_link` - (Computed) A url reference to download this object. -* `md5hexhash` - (Computed) Hex value of md5hash` - ## Timeouts This resource provides the following diff --git a/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown b/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown index 65bd53d0a055..7320698b3186 100644 --- a/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown @@ -140,8 +140,6 @@ The following arguments are supported: * `project` - (Optional) The project in which the resource belongs. If it is not provided, the provider project is used. -* `service_account` - (Optional) The user-managed service account to run the job. If this field is specified, the given service account is granted the necessary permissions to all applicable resources (e.g. GCS buckets) required by the job. - * `status` - (Optional) Status of the job. Default: `ENABLED`. **NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.** * `notification_config` - (Optional) Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure [documented below](#nested_notification_config). @@ -264,10 +262,6 @@ A duration in seconds with up to nine fractional digits, terminated by 's'. Exam * `managed_private_network` - (Optional) Egress bytes over a Google-managed private network. This network is shared between other users of Storage Transfer Service. -* `cloudfront_domain` - (Optional) The CloudFront distribution domain name pointing to this bucket, to use when fetching. See [Transfer from S3 via CloudFront](https://cloud.google.com/storage-transfer/docs/s3-cloudfront) for more information. Format: `https://{id}.cloudfront.net` or any valid custom domain. Must begin with `https://`. - -* `credentials_secret` - (Optional) The Resource name of a secret in Secret Manager. AWS credentials must be stored in Secret Manager in JSON format. If credentials_secret is specified, do not specify role_arn or aws_access_key. Format: `projects/{projectNumber}/secrets/{secret_name}`. - The `aws_access_key` block supports: * `access_key_id` - (Required) AWS Key ID. @@ -286,22 +280,14 @@ The `aws_access_key` block supports: * `path` - (Required) Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'. -* `credentials_secret` - (Optional, (https://terraform.io/docs/providers/google/guides/provider_versions.html)) Full Resource name of a secret in Secret Manager containing [SAS Credentials in JSON form](https://cloud.google.com/storage-transfer/docs/reference/rest/v1/TransferSpec#azureblobstoragedata:~:text=begin%20with%20a%20%27/%27.-,credentialsSecret,-string). Service Agent for Storage Transfer must have permissions to access secret. If credentials_secret is specified, do not specify azure_credentials.`, - -* `azure_credentials` - (Optional, (https://terraform.io/docs/providers/google/guides/provider_versions.html)) Credentials used to authenticate API requests to Azure block. +* `credentials_secret` - (Optional, [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html)) Full Resource name of a secret in Secret Manager containing [SAS Credentials in JSON form](https://cloud.google.com/storage-transfer/docs/reference/rest/v1/TransferSpec#azureblobstoragedata:~:text=begin%20with%20a%20%27/%27.-,credentialsSecret,-string). Service Agent for Storage Transfer must have permissions to access secret. If credentials_secret is specified, do not specify azure_credentials.`, -* `federated_identity_config` - (Optional) Federated identity config of a user registered Azure application. Structure [documented below](#nested_federated_identity_config). +* `azure_credentials` - (Required in GA, Optional in [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html)) Credentials used to authenticate API requests to Azure block. The `azure_credentials` block supports: * `sas_token` - (Required) Azure shared access signature. See [Grant limited access to Azure Storage resources using shared access signatures (SAS)](https://docs.microsoft.com/en-us/azure/storage/common/storage-sas-overview). -The `federated_identity_config` block supports: - -* `client_id` - (Required) The client (application) ID of the application with federated credentials. - -* `tenant_id` - (Required) The client (directory) ID of the application with federated credentials. - The `schedule_start_date` and `schedule_end_date` blocks support: * `year` - (Required) Year of date. Must be from 1 to 9999. diff --git a/mmv1/third_party/tgc/ancestrymanager/ancestrymanager.go b/mmv1/third_party/tgc/ancestrymanager/ancestrymanager.go index 6f0bb74a2372..0a77bc35bab4 100644 --- a/mmv1/third_party/tgc/ancestrymanager/ancestrymanager.go +++ b/mmv1/third_party/tgc/ancestrymanager/ancestrymanager.go @@ -162,6 +162,15 @@ func (m *manager) fetchAncestors(config *transport_tpg.Config, tfData tpgresourc return nil, fmt.Errorf("organization id not found in terraform data") } key = orgKey + case "iam.googleapis.com/Role": + // google_organization_iam_custom_role or google_project_iam_custom_role + if orgOK { + key = orgKey + } else if projectKey != "" { + key = projectKey + } else { + return []string{unknownOrg}, nil + } case "cloudresourcemanager.googleapis.com/Project", "cloudbilling.googleapis.com/ProjectBillingInfo": // for google_project and google_project_iam resources var ancestors []string @@ -196,16 +205,10 @@ func (m *manager) fetchAncestors(config *transport_tpg.Config, tfData tpgresourc key = projectKey default: - switch { - case orgOK: - key = orgKey - case folderOK: - key = folderKey - case projectKey != "": - key = projectKey - default: + if projectKey == "" { return []string{unknownOrg}, nil } + key = projectKey } return m.getAncestorsWithCache(key) } diff --git a/mmv1/third_party/tgc/ancestrymanager/ancestrymanager_test.go b/mmv1/third_party/tgc/ancestrymanager/ancestrymanager_test.go index fe3074ff6bf5..dc88819d6108 100644 --- a/mmv1/third_party/tgc/ancestrymanager/ancestrymanager_test.go +++ b/mmv1/third_party/tgc/ancestrymanager/ancestrymanager_test.go @@ -30,12 +30,10 @@ func TestGetAncestors(t *testing.T) { // Setup a simple test server to mock the response of resource manager. v3Responses := map[string]*crmv3.Project{ - "folders/bar": {Name: "folders/bar", Parent: "organizations/qux"}, - "organizations/qux": {Name: "organizations/qux", Parent: ""}, - "folders/bar2": {Name: "folders/bar2", Parent: "organizations/qux2"}, - "organizations/qux2": {Name: "organizations/qux2", Parent: ""}, - "organizations/12345": {Name: "organizations/12345"}, - "folders/67890": {Name: "folders/67890", Parent: "organizations/12345"}, + "folders/bar": {Name: "folders/bar", Parent: "organizations/qux"}, + "organizations/qux": {Name: "organizations/qux", Parent: ""}, + "folders/bar2": {Name: "folders/bar2", Parent: "organizations/qux2"}, + "organizations/qux2": {Name: "organizations/qux2", Parent: ""}, } v1Responses := map[string][]*crmv1.Ancestor{ ownerProject: { @@ -53,13 +51,6 @@ func TestGetAncestors(t *testing.T) { {ResourceId: &crmv1.ResourceId{Id: "bar2", Type: "folder"}}, {ResourceId: &crmv1.ResourceId{Id: "qux2", Type: "organization"}}, }, - "organizations/12345": { - {ResourceId: &crmv1.ResourceId{Id: "12345", Type: "organization"}}, - }, - "folders/67890": { - {ResourceId: &crmv1.ResourceId{Id: "67890", Type: "folder"}}, - {ResourceId: &crmv1.ResourceId{Id: "12345", Type: "organization"}}, - }, } ts := newTestServer(t, v1Responses, v3Responses) @@ -74,9 +65,7 @@ func TestGetAncestors(t *testing.T) { } entries := map[string]string{ - ownerProject: ownerAncestryPath, - "organizations/12345": "organizations/12345", - "folders/67890": "organizations/12345/folders/67890", + ownerProject: ownerAncestryPath, } p := provider.Provider() @@ -511,36 +500,6 @@ func TestGetAncestors(t *testing.T) { want: []string{"organizations/unknown"}, wantParent: "//cloudresourcemanager.googleapis.com/organizations/unknown", }, - { - name: "Org-level CuOP set with parent field", - data: tfdata.NewFakeResourceData( - "google_org_policy_custom_constraint", - p.ResourcesMap["google_org_policy_custom_constraint"].Schema, - map[string]interface{}{ - "parent": "organizations/12345", - }, - ), - asset: &resources.Asset{ - Type: "orgpolicy.googleapis.com/CustomConstraint", - }, - want: []string{"organizations/12345"}, - wantParent: "//cloudresourcemanager.googleapis.com/organizations/12345", - }, - { - name: "Folder-level Firewall Policy", - data: tfdata.NewFakeResourceData( - "google_compute_firewall_policy", - p.ResourcesMap["google_compute_firewall_policy"].Schema, - map[string]interface{}{ - "parent": "folders/67890", - }, - ), - asset: &resources.Asset{ - Type: "compute.googleapis.com/FirewallPolicy", - }, - want: []string{"folders/67890", "organizations/12345"}, - wantParent: "//cloudresourcemanager.googleapis.com/folders/67890", - }, } for _, c := range cases { for _, offline := range []bool{true, false} { diff --git a/mmv1/third_party/tgc/caiasset/asset.go b/mmv1/third_party/tgc/caiasset/asset.go deleted file mode 100644 index ec1f61b72572..000000000000 --- a/mmv1/third_party/tgc/caiasset/asset.go +++ /dev/null @@ -1,132 +0,0 @@ -package caiasset - -import ( - "fmt" - "strings" - "time" -) - -// Asset is the CAI representation of a resource. -type Asset struct { - // The name, in a peculiar format: `\\.googleapis.com/` - Name string `json:"name"` - // The type name in `google..` format. - Type string `json:"asset_type"` - Resource *AssetResource `json:"resource,omitempty"` - IAMPolicy *IAMPolicy `json:"iam_policy,omitempty"` - OrgPolicy []*OrgPolicy `json:"org_policy,omitempty"` - V2OrgPolicies []*V2OrgPolicies `json:"v2_org_policies,omitempty"` - Ancestors []string `json:"ancestors"` -} - -// IAMPolicy is the representation of a Cloud IAM policy set on a cloud resource. -type IAMPolicy struct { - Bindings []IAMBinding `json:"bindings"` -} - -// IAMBinding binds a role to a set of members. -type IAMBinding struct { - Role string `json:"role"` - Members []string `json:"members"` -} - -// AssetResource is nested within the Asset type. -type AssetResource struct { - Version string `json:"version"` - DiscoveryDocumentURI string `json:"discovery_document_uri"` - DiscoveryName string `json:"discovery_name"` - Parent string `json:"parent"` - Data map[string]interface{} `json:"data"` - Location string `json:"location,omitempty"` -} - -// OrgPolicy is for managing organization policies. -type OrgPolicy struct { - Constraint string `json:"constraint,omitempty"` - ListPolicy *ListPolicy `json:"list_policy,omitempty"` - BooleanPolicy *BooleanPolicy `json:"boolean_policy,omitempty"` - RestoreDefault *RestoreDefault `json:"restore_default,omitempty"` - UpdateTime *Timestamp `json:"update_time,omitempty"` -} - -// V2OrgPolicies is the represtation of V2OrgPolicies -type V2OrgPolicies struct { - Name string `json:"name"` - PolicySpec *PolicySpec `json:"spec,omitempty"` -} - -// Spec is the representation of Spec for Custom Org Policy -type PolicySpec struct { - Etag string `json:"etag,omitempty"` - UpdateTime *Timestamp `json:"update_time,omitempty"` - PolicyRules []*PolicyRule `json:"rules,omitempty"` - InheritFromParent bool `json:"inherit_from_parent,omitempty"` - Reset bool `json:"reset,omitempty"` -} - -type PolicyRule struct { - Values *StringValues `json:"values,omitempty"` - AllowAll bool `json:"allow_all,omitempty"` - DenyAll bool `json:"deny_all,omitempty"` - Enforce bool `json:"enforce,omitempty"` - Condition *Expr `json:"condition,omitempty"` -} - -type StringValues struct { - AllowedValues []string `json:"allowed_values,omitempty"` - DeniedValues []string `json:"denied_values,omitempty"` -} - -type Expr struct { - Expression string `json:"expression,omitempty"` - Title string `json:"title,omitempty"` - Description string `json:"description,omitempty"` - Location string `json:"location,omitempty"` -} - -type Timestamp struct { - Seconds int64 `json:"seconds,omitempty"` - Nanos int64 `json:"nanos,omitempty"` -} - -func (t Timestamp) MarshalJSON() ([]byte, error) { - return []byte(`"` + time.Unix(0, t.Nanos).UTC().Format(time.RFC3339Nano) + `"`), nil -} - -func (t *Timestamp) UnmarshalJSON(b []byte) error { - p, err := time.Parse(time.RFC3339Nano, strings.Trim(string(b), `"`)) - if err != nil { - return fmt.Errorf("bad Timestamp: %v", err) - } - t.Seconds = p.Unix() - t.Nanos = p.UnixNano() - return nil -} - -// ListPolicyAllValues is used to set `Policies` that apply to all possible -// configuration values rather than specific values in `allowed_values` or -// `denied_values`. -type ListPolicyAllValues int32 - -// ListPolicy can define specific values and subtrees of Cloud Resource -// Manager resource hierarchy (`Organizations`, `Folders`, `Projects`) that -// are allowed or denied by setting the `allowed_values` and `denied_values` -// fields. -type ListPolicy struct { - AllowedValues []string `json:"allowed_values,omitempty"` - DeniedValues []string `json:"denied_values,omitempty"` - AllValues ListPolicyAllValues `json:"all_values,omitempty"` - SuggestedValue string `json:"suggested_value,omitempty"` - InheritFromParent bool `json:"inherit_from_parent,omitempty"` -} - -// BooleanPolicy If `true`, then the `Policy` is enforced. If `false`, -// then any configuration is acceptable. -type BooleanPolicy struct { - Enforced bool `json:"enforced,omitempty"` -} - -// RestoreDefault determines if the default values of the `Constraints` are active for the -// resources. -type RestoreDefault struct { -} diff --git a/mmv1/third_party/tgc/dcl.go b/mmv1/third_party/tgc/dcl.go new file mode 100644 index 000000000000..e4b8d77ff59a --- /dev/null +++ b/mmv1/third_party/tgc/dcl.go @@ -0,0 +1,3 @@ +package transport + +type DCLConfig struct{} diff --git a/mmv1/third_party/tgc/resource_converters.go.tmpl b/mmv1/third_party/tgc/resource_converters.go.tmpl index e3f240ae5137..6b30091d1e27 100644 --- a/mmv1/third_party/tgc/resource_converters.go.tmpl +++ b/mmv1/third_party/tgc/resource_converters.go.tmpl @@ -108,7 +108,6 @@ func ResourceConverters() map[string][]cai.ResourceConverter { "google_datastream_private_connection": {datastream.ResourceConverterDatastreamPrivateConnection()}, "google_datastream_stream": {datastream.ResourceConverterDatastreamStream()}, "google_firebase_project": {firebase.ResourceConverterFirebaseProject()}, - "google_org_policy_custom_constraint": {orgpolicy.ResourceConverterOrgPolicyCustomConstraint()}, "google_org_policy_policy": {resourcemanager.ResourceConverterOrgPolicyPolicy()}, "google_redis_instance": {redis.ResourceConverterRedisInstance()}, "google_spanner_database": {spanner.ResourceConverterSpannerDatabase()}, diff --git a/mmv1/third_party/tgc/services/storage/storage_bucket.go b/mmv1/third_party/tgc/services/storage/storage_bucket.go index b7f83e34d92f..48ecbd64aa3a 100644 --- a/mmv1/third_party/tgc/services/storage/storage_bucket.go +++ b/mmv1/third_party/tgc/services/storage/storage_bucket.go @@ -10,7 +10,6 @@ package storage import ( "fmt" - "strconv" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" @@ -214,10 +213,8 @@ func expandBucketRetentionPolicy(configured interface{}) *storage.BucketRetentio } retentionPolicy := retentionPolicies[0].(map[string]interface{}) - value, _ := strconv.ParseInt(retentionPolicy["retention_period"].(string), 10, 64) - bucketRetentionPolicy := &storage.BucketRetentionPolicy{ - RetentionPeriod: value, + RetentionPeriod: int64(retentionPolicy["retention_period"].(int)), } return bucketRetentionPolicy diff --git a/mmv1/third_party/tgc/tests/data/example_access_context_manager_access_policy.json b/mmv1/third_party/tgc/tests/data/example_access_context_manager_access_policy.json index 476366431b5f..fe3b4aae1145 100644 --- a/mmv1/third_party/tgc/tests/data/example_access_context_manager_access_policy.json +++ b/mmv1/third_party/tgc/tests/data/example_access_context_manager_access_policy.json @@ -2,12 +2,12 @@ { "name": "//accesscontextmanager.googleapis.com/accessPolicies/placeholder-BpLnfgDs", "asset_type": "accesscontextmanager.googleapis.com/AccessPolicy", - "ancestry_path": "organizations/{{.OrgID}}", + "ancestry_path": "{{.Ancestry}}/project/{{.Provider.project}}", "resource": { "version": "v1", "discovery_document_uri": "https://www.googleapis.com/discovery/v1/apis/accesscontextmanager/v1/rest", "discovery_name": "AccessPolicy", - "parent": "//cloudresourcemanager.googleapis.com/organizations/{{.OrgID}}", + "parent": "//cloudresourcemanager.googleapis.com/projects/{{.Provider.project}}", "data": { "parent": "organizations/{{.OrgID}}", "scopes": [ @@ -17,4 +17,4 @@ } } } -] +] \ No newline at end of file diff --git a/mmv1/third_party/tgc/tests/data/example_alloydb_instance.tf b/mmv1/third_party/tgc/tests/data/example_alloydb_instance.tf index fbff8402bac3..b6803e2986af 100644 --- a/mmv1/third_party/tgc/tests/data/example_alloydb_instance.tf +++ b/mmv1/third_party/tgc/tests/data/example_alloydb_instance.tf @@ -22,8 +22,6 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "alloydb-cluster" } - - deletion_protection = false } resource "google_alloydb_instance" "default" { diff --git a/mmv1/third_party/tgc/tests/data/example_org_policy_custom_constraint.json b/mmv1/third_party/tgc/tests/data/example_org_policy_custom_constraint.json deleted file mode 100644 index 75b160720b47..000000000000 --- a/mmv1/third_party/tgc/tests/data/example_org_policy_custom_constraint.json +++ /dev/null @@ -1,20 +0,0 @@ -[ - { - "name": "//orgpolicy.googleapis.com/organizations/12345/customConstraints/custom.disableGkeAutoUpgrade", - "asset_type": "orgpolicy.googleapis.com/CustomConstraint", - "ancestry_path": "organizations/12345", - "resource": { - "version": "v2", - "discovery_document_uri": "https://www.googleapis.com/discovery/v1/apis/orgpolicy/v2/rest", - "discovery_name": "CustomConstraint", - "parent": "//cloudresourcemanager.googleapis.com/organizations/12345", - "data": { - "name": "organizations/12345/customConstraints/custom.disableGkeAutoUpgrade", - "actionType": "ALLOW", - "condition": "resource.management.autoUpgrade == false", - "methodTypes": ["CREATE", "UPDATE"], - "resourceTypes": ["container.googleapis.com/NodePool"] - } - } - } -] diff --git a/mmv1/third_party/tgc/tests/data/example_org_policy_custom_constraint.tf b/mmv1/third_party/tgc/tests/data/example_org_policy_custom_constraint.tf deleted file mode 100644 index b2e37e252a3d..000000000000 --- a/mmv1/third_party/tgc/tests/data/example_org_policy_custom_constraint.tf +++ /dev/null @@ -1,38 +0,0 @@ -/** - * Copyright 2022 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -terraform { - required_providers { - google = { - source = "hashicorp/google-beta" - version = "~> {{.Provider.version}}" - } - } -} - -provider "google" { - {{if .Provider.credentials }}credentials = "{{.Provider.credentials}}"{{end}} -} - -resource "google_org_policy_custom_constraint" "constraint" { - name = "custom.disableGkeAutoUpgrade" - parent = "organizations/12345" - - action_type = "ALLOW" - condition = "resource.management.autoUpgrade == false" - method_types = ["CREATE", "UPDATE"] - resource_types = ["container.googleapis.com/NodePool"] -} diff --git a/mmv1/third_party/tgc/tests/source/environment_test.go b/mmv1/third_party/tgc/tests/source/environment_test.go index 2db8f002ca65..16b97af0d3ea 100644 --- a/mmv1/third_party/tgc/tests/source/environment_test.go +++ b/mmv1/third_party/tgc/tests/source/environment_test.go @@ -18,7 +18,7 @@ const ( defaultOrganizationDomain = "meep.test.com" defaultOrganizationTarget = "13579" defaultProject = "foobar" - defaultProviderVersion = "6.14.0" // if dev override is enabled, the provider version is ignored in terraform execution + defaultProviderVersion = "5.5.0" // if dev override is enabled, the provider version is ignored in terraform execution defaultRegion = "us-central1" defaultServiceAccount = "meep@foobar.iam.gserviceaccount.com" ) diff --git a/mmv1/third_party/tgc/tfdata/fake_resource_data_test.go b/mmv1/third_party/tgc/tfdata/fake_resource_data_test.go index 1da86e017058..32d6a670e944 100644 --- a/mmv1/third_party/tgc/tfdata/fake_resource_data_test.go +++ b/mmv1/third_party/tgc/tfdata/fake_resource_data_test.go @@ -247,7 +247,6 @@ func TestFakeResourceData_getOkTypeObject(t *testing.T) { "disk_encryption_key_sha256": "", "disk_encryption_key_rsa": "", "disk_encryption_service_account": "", - "force_attach": false, "kms_key_self_link": "test-kms_key_self_link", "mode": "READ_ONLY", "source": "test-source", @@ -325,7 +324,6 @@ func TestFakeResourceData_getOknsetTypeObject(t *testing.T) { "disk_encryption_key_sha256": "", "disk_encryption_key_rsa": "", "disk_encryption_service_account": "", - "force_attach": false, "kms_key_self_link": "", "mode": "", "source": "", diff --git a/mmv1/third_party/tgc_next/Makefile b/mmv1/third_party/tgc_next/Makefile deleted file mode 100644 index d202b15330d7..000000000000 --- a/mmv1/third_party/tgc_next/Makefile +++ /dev/null @@ -1,38 +0,0 @@ -build_dir=bin -TF_CONFIG_FILE=tf-dev-override.tfrc -TEST?=$$(go list -e ./... | grep -v github.com/GoogleCloudPlatform/terraform-google-conversion/v6/test) - -build: - GO111MODULE=on go build -o ./${build_dir}/tfplan2cai ./cmd/tfplan2cai - GO111MODULE=on go build -o ./${build_dir}/tgc ./cmd/tgc - -test: - go version - terraform --version - ./config-tf-dev-override.sh - TF_CLI_CONFIG_FILE="$${PWD}/${TF_CONFIG_FILE}" GO111MODULE=on go test $(TEST) $(TESTARGS) -timeout 30m -short - -test-integration: - go version - terraform --version - ./config-tf-dev-override.sh - TF_CLI_CONFIG_FILE="$${PWD}/${TF_CONFIG_FILE}" GO111MODULE=on go test -run=TestAcc $(TESTPATH) $(TESTARGS) -timeout 30m -v ./... - -test-go-licenses: - cd .. && go version && go install github.com/google/go-licenses@latest - $$(go env GOPATH)/bin/go-licenses check ./... --ignore github.com/dnaeon/go-vcr - -run-docker: - docker run -it \ - -v `pwd`:/terraform-google-conversion \ - -v ${GOOGLE_APPLICATION_CREDENTIALS}:/terraform-google-conversion/credentials.json \ - -w /terraform-google-conversion \ - --entrypoint=/bin/bash \ - --env TEST_PROJECT=${PROJECT_ID} \ - --env GOOGLE_APPLICATION_CREDENTIALS=/terraform-google-conversion/credentials.json \ - gcr.io/graphite-docker-images/go-plus; - -release: - ./release.sh ${VERSION} - -.PHONY: build test test-integration test-go-licenses run-docker release diff --git a/mmv1/third_party/tgc_next/go.mod b/mmv1/third_party/tgc_next/go.mod deleted file mode 100644 index c5490885bad5..000000000000 --- a/mmv1/third_party/tgc_next/go.mod +++ /dev/null @@ -1,125 +0,0 @@ -module github.com/GoogleCloudPlatform/terraform-google-conversion/v6 - -go 1.23.0 - -toolchain go1.23.5 - -require ( - cloud.google.com/go/storage v1.50.0 - github.com/apparentlymart/go-cidr v1.1.0 - github.com/google/go-cmp v0.7.0 - github.com/hashicorp/errwrap v1.1.0 - github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320 - github.com/hashicorp/hcl v1.0.0 - github.com/hashicorp/hcl/v2 v2.23.0 - github.com/hashicorp/terraform-json v0.24.0 - github.com/hashicorp/terraform-plugin-sdk/v2 v2.36.0 - github.com/hashicorp/terraform-provider-google-beta v1.20.1-0.20250728173411-5cb5742bc083 - github.com/mitchellh/go-homedir v1.1.0 - github.com/pkg/errors v0.9.1 - github.com/stretchr/testify v1.10.0 - github.com/zclconf/go-cty v1.16.2 - go.uber.org/zap v1.27.0 - google.golang.org/api v0.229.0 -) - -require ( - cloud.google.com/go/bigtable v1.37.0 - github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 - github.com/hashicorp/go-cleanhttp v0.5.2 - github.com/hashicorp/terraform-plugin-framework v1.13.0 - github.com/hashicorp/terraform-plugin-framework-validators v0.12.0 - github.com/sethvargo/go-retry v0.3.0 - github.com/sirupsen/logrus v1.9.3 - github.com/spf13/cobra v1.8.1 - golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 - golang.org/x/oauth2 v0.29.0 - google.golang.org/genproto/googleapis/rpc v0.0.0-20250414145226-207652e42e2e - google.golang.org/grpc v1.71.1 -) - -require ( - bitbucket.org/creachadair/stringset v0.0.11 // indirect - cel.dev/expr v0.19.2 // indirect - cloud.google.com/go v0.120.0 // indirect - cloud.google.com/go/auth v0.16.0 // indirect - cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect - cloud.google.com/go/compute/metadata v0.6.0 // indirect - cloud.google.com/go/iam v1.5.0 // indirect - cloud.google.com/go/longrunning v0.6.6 // indirect - cloud.google.com/go/monitoring v1.24.1 // indirect - github.com/GoogleCloudPlatform/declarative-resource-client-library v1.79.0 // indirect - github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.25.0 // indirect - github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.50.0 // indirect - github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.50.0 // indirect - github.com/agext/levenshtein v1.2.3 // indirect - github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect - github.com/cenkalti/backoff v2.2.1+incompatible // indirect - github.com/cespare/xxhash/v2 v2.3.0 // indirect - github.com/cncf/xds/go v0.0.0-20250121191232-2f005788dc42 // indirect - github.com/davecgh/go-spew v1.1.1 // indirect - github.com/envoyproxy/go-control-plane/envoy v1.32.4 // indirect - github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect - github.com/fatih/color v1.16.0 // indirect - github.com/felixge/httpsnoop v1.0.4 // indirect - github.com/gammazero/deque v0.2.1 // indirect - github.com/gammazero/workerpool v1.1.3 // indirect - github.com/go-logr/logr v1.4.2 // indirect - github.com/go-logr/stdr v1.2.2 // indirect - github.com/golang/glog v1.2.4 // indirect - github.com/golang/protobuf v1.5.4 // indirect - github.com/google/go-cpy v0.0.0-20211218193943-a9c933c06932 // indirect - github.com/google/s2a-go v0.1.9 // indirect - github.com/google/uuid v1.6.0 // indirect - github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect - github.com/googleapis/gax-go/v2 v2.14.1 // indirect - github.com/hashicorp/go-hclog v1.6.3 // indirect - github.com/hashicorp/go-multierror v1.1.1 // indirect - github.com/hashicorp/go-uuid v1.0.3 // indirect - github.com/hashicorp/go-version v1.7.0 // indirect - github.com/hashicorp/logutils v1.0.0 // indirect - github.com/hashicorp/terraform-plugin-go v0.26.0 // indirect - github.com/hashicorp/terraform-plugin-log v0.9.0 // indirect - github.com/hashicorp/terraform-plugin-testing v1.5.1 // indirect - github.com/inconshreveable/mousetrap v1.1.0 // indirect - github.com/kylelemons/godebug v1.1.0 // indirect - github.com/mattn/go-colorable v0.1.13 // indirect - github.com/mattn/go-isatty v0.0.20 // indirect - github.com/mitchellh/copystructure v1.2.0 // indirect - github.com/mitchellh/go-testing-interface v1.14.1 // indirect - github.com/mitchellh/go-wordwrap v1.0.1 // indirect - github.com/mitchellh/hashstructure v1.1.0 // indirect - github.com/mitchellh/mapstructure v1.5.0 // indirect - github.com/mitchellh/reflectwalk v1.0.2 // indirect - github.com/oklog/run v1.1.0 // indirect - github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect - github.com/pmezard/go-difflib v1.0.0 // indirect - github.com/spf13/pflag v1.0.5 // indirect - github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect - github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect - github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - go.opentelemetry.io/auto/sdk v1.1.0 // indirect - go.opentelemetry.io/contrib/detectors/gcp v1.34.0 // indirect - go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.60.0 // indirect - go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0 // indirect - go.opentelemetry.io/otel v1.35.0 // indirect - go.opentelemetry.io/otel/metric v1.35.0 // indirect - go.opentelemetry.io/otel/sdk v1.35.0 // indirect - go.opentelemetry.io/otel/sdk/metric v1.35.0 // indirect - go.opentelemetry.io/otel/trace v1.35.0 // indirect - go.uber.org/multierr v1.10.0 // indirect - go4.org/netipx v0.0.0-20231129151722-fdeea329fbba // indirect - golang.org/x/crypto v0.37.0 // indirect - golang.org/x/mod v0.22.0 // indirect - golang.org/x/net v0.39.0 // indirect - golang.org/x/sync v0.13.0 // indirect - golang.org/x/sys v0.32.0 // indirect - golang.org/x/text v0.24.0 // indirect - golang.org/x/time v0.11.0 // indirect - golang.org/x/tools v0.22.0 // indirect - google.golang.org/appengine v1.6.8 // indirect - google.golang.org/genproto v0.0.0-20250303144028-a0af3efb3deb // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20250414145226-207652e42e2e // indirect - google.golang.org/protobuf v1.36.6 // indirect - gopkg.in/yaml.v3 v3.0.1 // indirect -) diff --git a/mmv1/third_party/tgc_next/go.sum b/mmv1/third_party/tgc_next/go.sum deleted file mode 100644 index 594c84f65387..000000000000 --- a/mmv1/third_party/tgc_next/go.sum +++ /dev/null @@ -1,431 +0,0 @@ -bitbucket.org/creachadair/stringset v0.0.11 h1:6Sv4CCv14Wm+OipW4f3tWOb0SQVpBDLW0knnJqUnmZ8= -bitbucket.org/creachadair/stringset v0.0.11/go.mod h1:wh0BHewFe+j0HrzWz7KcGbSNpFzWwnpmgPRlB57U5jU= -cel.dev/expr v0.19.2 h1:V354PbqIXr9IQdwy4SYA4xa0HXaWq1BUPAGzugBY5V4= -cel.dev/expr v0.19.2/go.mod h1:MrpN08Q+lEBs+bGYdLxxHkZoUSsCp0nSKTs0nTymJgw= -cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.120.0 h1:wc6bgG9DHyKqF5/vQvX1CiZrtHnxJjBlKUyF9nP6meA= -cloud.google.com/go v0.120.0/go.mod h1:/beW32s8/pGRuj4IILWQNd4uuebeT4dkOhKmkfit64Q= -cloud.google.com/go/auth v0.16.0 h1:Pd8P1s9WkcrBE2n/PhAwKsdrR35V3Sg2II9B+ndM3CU= -cloud.google.com/go/auth v0.16.0/go.mod h1:1howDHJ5IETh/LwYs3ZxvlkXF48aSqqJUM+5o02dNOI= -cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc= -cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c= -cloud.google.com/go/bigtable v1.37.0 h1:Q+x7y04lQ0B+WXp03wc1/FLhFt4CwcQdkwWT0M4Jp3w= -cloud.google.com/go/bigtable v1.37.0/go.mod h1:HXqddP6hduwzrtiTCqZPpj9ij4hGZb4Zy1WF/dT+yaU= -cloud.google.com/go/compute/metadata v0.6.0 h1:A6hENjEsCDtC1k8byVsgwvVcioamEHvZ4j01OwKxG9I= -cloud.google.com/go/compute/metadata v0.6.0/go.mod h1:FjyFAW1MW0C203CEOMDTu3Dk1FlqW3Rga40jzHL4hfg= -cloud.google.com/go/iam v1.5.0 h1:QlLcVMhbLGOjRcGe6VTGGTyQib8dRLK2B/kYNV0+2xs= -cloud.google.com/go/iam v1.5.0/go.mod h1:U+DOtKQltF/LxPEtcDLoobcsZMilSRwR7mgNL7knOpo= -cloud.google.com/go/logging v1.13.0 h1:7j0HgAp0B94o1YRDqiqm26w4q1rDMH7XNRU34lJXHYc= -cloud.google.com/go/logging v1.13.0/go.mod h1:36CoKh6KA/M0PbhPKMq6/qety2DCAErbhXT62TuXALA= -cloud.google.com/go/longrunning v0.6.6 h1:XJNDo5MUfMM05xK3ewpbSdmt7R2Zw+aQEMbdQR65Rbw= -cloud.google.com/go/longrunning v0.6.6/go.mod h1:hyeGJUrPHcx0u2Uu1UFSoYZLn4lkMrccJig0t4FI7yw= -cloud.google.com/go/monitoring v1.24.1 h1:vKiypZVFD/5a3BbQMvI4gZdl8445ITzXFh257XBgrS0= -cloud.google.com/go/monitoring v1.24.1/go.mod h1:Z05d1/vn9NaujqY2voG6pVQXoJGbp+r3laV+LySt9K0= -cloud.google.com/go/storage v1.50.0 h1:3TbVkzTooBvnZsk7WaAQfOsNrdoM8QHusXA1cpk6QJs= -cloud.google.com/go/storage v1.50.0/go.mod h1:l7XeiD//vx5lfqE3RavfmU9yvk5Pp0Zhcv482poyafY= -cloud.google.com/go/trace v1.11.3 h1:c+I4YFjxRQjvAhRmSsmjpASUKq88chOX854ied0K/pE= -cloud.google.com/go/trace v1.11.3/go.mod h1:pt7zCYiDSQjC9Y2oqCsh9jF4GStB/hmjrYLsxRR27q8= -github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.79.0 h1:vaebDVboAZ2tbAoMKRsprO3zAdZnQegYFhkgAwjJC8g= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.79.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.25.0 h1:3c8yed4lgqTt+oTQ+JNMDo+F4xprBf+O/il4ZC0nRLw= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.25.0/go.mod h1:obipzmGjfSjam60XLwGfqUkJsfiheAl+TUjG+4yzyPM= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.50.0 h1:5IT7xOdq17MtcdtL/vtl6mGfzhaq4m4vpollPRmlsBQ= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.50.0/go.mod h1:ZV4VOm0/eHR06JLrXWe09068dHpr3TRpY9Uo7T+anuA= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.50.0 h1:nNMpRpnkWDAaqcpxMJvxa/Ud98gjbYwayJY4/9bdjiU= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.50.0/go.mod h1:SZiPHWGOOk3bl8tkevxkoiwPgsIl6CwrWcbwjfHZpdM= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.50.0 h1:ig/FpDD2JofP/NExKQUbn7uOSZzJAQqogfqluZK4ed4= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.50.0/go.mod h1:otE2jQekW/PqXk1Awf5lmfokJx4uwuqcj1ab5SpGeW0= -github.com/ProtonMail/go-crypto v1.1.3 h1:nRBOetoydLeUb4nHajyO2bKqMLfWQ/ZPwkXqXxPxCFk= -github.com/ProtonMail/go-crypto v1.1.3/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= -github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7lmo= -github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= -github.com/apparentlymart/go-cidr v1.1.0 h1:2mAhrMoF+nhXqxTzSZMUzDHkLjmIHC+Zzn4tdgBZjnU= -github.com/apparentlymart/go-cidr v1.1.0/go.mod h1:EBcsNrHc3zQeuaeCeCtQruQm+n9/YjEn/vI25Lg7Gwc= -github.com/apparentlymart/go-textseg/v12 v12.0.0/go.mod h1:S/4uRK2UtaQttw1GenVJEynmyUenKwP++x/+DdGV/Ec= -github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY= -github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4= -github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= -github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4= -github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= -github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= -github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= -github.com/cloudflare/circl v1.3.7 h1:qlCDlTPz2n9fu58M0Nh1J/JzcFpfgkFHHX3O35r5vcU= -github.com/cloudflare/circl v1.3.7/go.mod h1:sRTcRWXGLrKw6yIGJ+l7amYJFfAXbZG0kBSc8r4zxgA= -github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= -github.com/cncf/xds/go v0.0.0-20250121191232-2f005788dc42 h1:Om6kYQYDUk5wWbT0t0q6pvyM49i9XZAv9dDrkDA7gjk= -github.com/cncf/xds/go v0.0.0-20250121191232-2f005788dc42/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= -github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= -github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= -github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/dnaeon/go-vcr v1.0.1 h1:r8L/HqC0Hje5AXMu1ooW8oyQyOFv4GxqpL0nRP7SLLY= -github.com/dnaeon/go-vcr v1.0.1/go.mod h1:aBB1+wY4s93YsC3HHjMBMrwTj2R9FHDzUr9KyGc8n1E= -github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= -github.com/envoyproxy/go-control-plane v0.13.4 h1:zEqyPVyku6IvWCFwux4x9RxkLOMUL+1vC9xUFv5l2/M= -github.com/envoyproxy/go-control-plane v0.13.4/go.mod h1:kDfuBlDVsSj2MjrLEtRWtHlsWIFcGyB2RMO44Dc5GZA= -github.com/envoyproxy/go-control-plane/envoy v1.32.4 h1:jb83lalDRZSpPWW2Z7Mck/8kXZ5CQAFYVjQcdVIr83A= -github.com/envoyproxy/go-control-plane/envoy v1.32.4/go.mod h1:Gzjc5k8JcJswLjAx1Zm+wSYE20UrLtt7JZMWiWQXQEw= -github.com/envoyproxy/go-control-plane/ratelimit v0.1.0 h1:/G9QYbddjL25KvtKTv3an9lx6VBE2cnb8wp1vEGNYGI= -github.com/envoyproxy/go-control-plane/ratelimit v0.1.0/go.mod h1:Wk+tMFAFbCXaJPzVVHnPgRKdUdwW/KdbRt94AzgRee4= -github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/envoyproxy/protoc-gen-validate v1.2.1 h1:DEo3O99U8j4hBFwbJfrz9VtgcDfUKS7KJ7spH3d86P8= -github.com/envoyproxy/protoc-gen-validate v1.2.1/go.mod h1:d/C80l/jxXLdfEIhX1W2TmLfsJ31lvEjwamM4DxlWXU= -github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= -github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM= -github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE= -github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= -github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= -github.com/gammazero/deque v0.2.1 h1:qSdsbG6pgp6nL7A0+K/B7s12mcCY/5l5SIUpMOl+dC0= -github.com/gammazero/deque v0.2.1/go.mod h1:LFroj8x4cMYCukHJDbxFCkT+r9AndaJnFMuZDV34tuU= -github.com/gammazero/workerpool v1.1.3 h1:WixN4xzukFoN0XSeXF6puqEqFTl2mECI9S6W44HWy9Q= -github.com/gammazero/workerpool v1.1.3/go.mod h1:wPjyBLDbyKnUn2XwwyD3EEwo9dHutia9/fwNmSHWACc= -github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= -github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= -github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= -github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= -github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= -github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= -github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= -github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68= -github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= -github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= -github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= -github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/glog v1.2.4 h1:CNNw5U8lSiiBk7druxtSHHTsRWcxKoac6kZKm2peBBc= -github.com/golang/glog v1.2.4/go.mod h1:6AhwSGph0fcJtXVM/PEHPqZlFeoLxhs7/t5UDAwmO+w= -github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= -github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= -github.com/google/btree v1.1.3 h1:CVpQJjYgC4VbzxeGVHfvZrv1ctoYCAI8vbl07Fcxlyg= -github.com/google/btree v1.1.3/go.mod h1:qOPhT0dTNdNzV6Z/lhRX0YXUafgPLFUh+gZMl761Gm4= -github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= -github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= -github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= -github.com/google/go-cpy v0.0.0-20211218193943-a9c933c06932 h1:5/4TSDzpDnHQ8rKEEQBjRlYx77mHOvXu08oGchxej7o= -github.com/google/go-cpy v0.0.0-20211218193943-a9c933c06932/go.mod h1:cC6EdPbj/17GFCPDK39NRarlMI+kt+O60S12cNB5J9Y= -github.com/google/martian/v3 v3.3.3 h1:DIhPTQrbPkgs2yJYdXU/eNACCG5DVQjySNRNlflZ9Fc= -github.com/google/martian/v3 v3.3.3/go.mod h1:iEPrYcgCF7jA9OtScMFQyAlZZ4YXTKEtJ1E6RWzmBA0= -github.com/google/s2a-go v0.1.9 h1:LGD7gtMgezd8a/Xak7mEWL0PjoTQFvpRudN895yqKW0= -github.com/google/s2a-go v0.1.9/go.mod h1:YA0Ei2ZQL3acow2O62kdp9UlnvMmU7kA6Eutn0dXayM= -github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= -github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/googleapis/enterprise-certificate-proxy v0.3.6 h1:GW/XbdyBFQ8Qe+YAmFU9uHLo7OnF5tL52HFAgMmyrf4= -github.com/googleapis/enterprise-certificate-proxy v0.3.6/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA= -github.com/googleapis/gax-go/v2 v2.14.1 h1:hb0FFeiPaQskmvakKu5EbCbpntQn48jyHuvrkurSS/Q= -github.com/googleapis/gax-go/v2 v2.14.1/go.mod h1:Hb/NubMaVM88SrNkvl8X/o8XWwDJEPqouaLeN2IUxoA= -github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 h1:UH//fgunKIs4JdUbpDl1VZCDaL56wXCB/5+wF6uHfaI= -github.com/grpc-ecosystem/go-grpc-middleware v1.4.0/go.mod h1:g5qyo/la0ALbONm6Vbp88Yd8NsDy6rZz+RcrMPxvld8= -github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= -github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= -github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= -github.com/hashicorp/go-checkpoint v0.5.0 h1:MFYpPZCnQqQTE18jFwSII6eUQrD/oxMFp3mlgcqk5mU= -github.com/hashicorp/go-checkpoint v0.5.0/go.mod h1:7nfLNL10NsxqO4iWuW6tWW0HjZuDrwkBuEQsVcpCOgg= -github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= -github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= -github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320 h1:1/D3zfFHttUKaCaGKZ/dR2roBXv0vKbSCnssIldfQdI= -github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320/go.mod h1:EiZBMaudVLy8fmjf9Npq1dq9RalhveqZG5w/yz3mHWs= -github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k= -github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= -github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= -github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= -github.com/hashicorp/go-plugin v1.6.2 h1:zdGAEd0V1lCaU0u+MxWQhtSDQmahpkwOun8U8EiRVog= -github.com/hashicorp/go-plugin v1.6.2/go.mod h1:CkgLQ5CZqNmdL9U9JzM532t8ZiYQ35+pj3b1FD37R0Q= -github.com/hashicorp/go-retryablehttp v0.7.7 h1:C8hUCYzor8PIfXHa4UrZkU4VvK8o9ISHxT2Q8+VepXU= -github.com/hashicorp/go-retryablehttp v0.7.7/go.mod h1:pkQpWZeYWskR+D1tR2O5OcBFOxfA7DoAO6xtkuQnHTk= -github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8= -github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/go-version v1.7.0 h1:5tqGy27NaOTB8yJKUZELlFAS/LTKJkrmONwQKeRZfjY= -github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= -github.com/hashicorp/hc-install v0.9.1 h1:gkqTfE3vVbafGQo6VZXcy2v5yoz2bE0+nhZXruCuODQ= -github.com/hashicorp/hc-install v0.9.1/go.mod h1:pWWvN/IrfeBK4XPeXXYkL6EjMufHkCK5DvwxeLKuBf0= -github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= -github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= -github.com/hashicorp/hcl/v2 v2.23.0 h1:Fphj1/gCylPxHutVSEOf2fBOh1VE4AuLV7+kbJf3qos= -github.com/hashicorp/hcl/v2 v2.23.0/go.mod h1:62ZYHrXgPoX8xBnzl8QzbWq4dyDsDtfCRgIq1rbJEvA= -github.com/hashicorp/logutils v1.0.0 h1:dLEQVugN8vlakKOUE3ihGLTZJRB4j+M2cdTm/ORI65Y= -github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= -github.com/hashicorp/terraform-exec v0.22.0 h1:G5+4Sz6jYZfRYUCg6eQgDsqTzkNXV+fP8l+uRmZHj64= -github.com/hashicorp/terraform-exec v0.22.0/go.mod h1:bjVbsncaeh8jVdhttWYZuBGj21FcYw6Ia/XfHcNO7lQ= -github.com/hashicorp/terraform-json v0.24.0 h1:rUiyF+x1kYawXeRth6fKFm/MdfBS6+lW4NbeATsYz8Q= -github.com/hashicorp/terraform-json v0.24.0/go.mod h1:Nfj5ubo9xbu9uiAoZVBsNOjvNKB66Oyrvtit74kC7ow= -github.com/hashicorp/terraform-plugin-framework v1.13.0 h1:8OTG4+oZUfKgnfTdPTJwZ532Bh2BobF4H+yBiYJ/scw= -github.com/hashicorp/terraform-plugin-framework v1.13.0/go.mod h1:j64rwMGpgM3NYXTKuxrCnyubQb/4VKldEKlcG8cvmjU= -github.com/hashicorp/terraform-plugin-framework-validators v0.12.0 h1:HOjBuMbOEzl7snOdOoUfE2Jgeto6JOjLVQ39Ls2nksc= -github.com/hashicorp/terraform-plugin-framework-validators v0.12.0/go.mod h1:jfHGE/gzjxYz6XoUwi/aYiiKrJDeutQNUtGQXkaHklg= -github.com/hashicorp/terraform-plugin-go v0.26.0 h1:cuIzCv4qwigug3OS7iKhpGAbZTiypAfFQmw8aE65O2M= -github.com/hashicorp/terraform-plugin-go v0.26.0/go.mod h1:+CXjuLDiFgqR+GcrM5a2E2Kal5t5q2jb0E3D57tTdNY= -github.com/hashicorp/terraform-plugin-log v0.9.0 h1:i7hOA+vdAItN1/7UrfBqBwvYPQ9TFvymaRGZED3FCV0= -github.com/hashicorp/terraform-plugin-log v0.9.0/go.mod h1:rKL8egZQ/eXSyDqzLUuwUYLVdlYeamldAHSxjUFADow= -github.com/hashicorp/terraform-plugin-mux v0.17.0 h1:/J3vv3Ps2ISkbLPiZOLspFcIZ0v5ycUXCEQScudGCCw= -github.com/hashicorp/terraform-plugin-mux v0.17.0/go.mod h1:yWuM9U1Jg8DryNfvCp+lH70WcYv6D8aooQxxxIzFDsE= -github.com/hashicorp/terraform-plugin-sdk/v2 v2.36.0 h1:7/iejAPyCRBhqAg3jOx+4UcAhY0A+Sg8B+0+d/GxSfM= -github.com/hashicorp/terraform-plugin-sdk/v2 v2.36.0/go.mod h1:TiQwXAjFrgBf5tg5rvBRz8/ubPULpU0HjSaVi5UoJf8= -github.com/hashicorp/terraform-plugin-testing v1.5.1 h1:T4aQh9JAhmWo4+t1A7x+rnxAJHCDIYW9kXyo4sVO92c= -github.com/hashicorp/terraform-plugin-testing v1.5.1/go.mod h1:dg8clO6K59rZ8w9EshBmDp1CxTIPu3yA4iaDpX1h5u0= -github.com/hashicorp/terraform-provider-google-beta v1.20.1-0.20250728173411-5cb5742bc083 h1:23TrEMAu7jpigg52dSymKmxVNFPwQc4z/pPLEI7PdgA= -github.com/hashicorp/terraform-provider-google-beta v1.20.1-0.20250728173411-5cb5742bc083/go.mod h1:E6QxtUznA+Ul5ek4hxqjGU3VrSWx/NWaqIpFNdUouu4= -github.com/hashicorp/terraform-registry-address v0.2.4 h1:JXu/zHB2Ymg/TGVCRu10XqNa4Sh2bWcqCNyKWjnCPJA= -github.com/hashicorp/terraform-registry-address v0.2.4/go.mod h1:tUNYTVyCtU4OIGXXMDp7WNcJ+0W1B4nmstVDgHMjfAU= -github.com/hashicorp/terraform-svchost v0.1.1 h1:EZZimZ1GxdqFRinZ1tpJwVxxt49xc/S52uzrw4x0jKQ= -github.com/hashicorp/terraform-svchost v0.1.1/go.mod h1:mNsjQfZyf/Jhz35v6/0LWcv26+X7JPS+buii2c9/ctc= -github.com/hashicorp/yamux v0.1.1 h1:yrQxtgseBDrq9Y652vSRDvsKCJKOUD+GzTS4Y0Y8pvE= -github.com/hashicorp/yamux v0.1.1/go.mod h1:CtWFDAQgb7dxtzFs4tWbplKIe2jSi3+5vKbgIO0SLnQ= -github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= -github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= -github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= -github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= -github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= -github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= -github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= -github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= -github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= -github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= -github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= -github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= -github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= -github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= -github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= -github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= -github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= -github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= -github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= -github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= -github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= -github.com/mitchellh/go-testing-interface v1.14.1 h1:jrgshOhYAUVNMAJiKbEu7EqAwgJJ2JqpQmpLJOu07cU= -github.com/mitchellh/go-testing-interface v1.14.1/go.mod h1:gfgS7OtZj6MA4U1UrDRp04twqAjfvlZyCfX3sDjEym8= -github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0= -github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0= -github.com/mitchellh/hashstructure v1.1.0 h1:P6P1hdjqAAknpY/M1CGipelZgp+4y9ja9kmUZPXP+H0= -github.com/mitchellh/hashstructure v1.1.0/go.mod h1:xUDAozZz0Wmdiufv0uyhnHkUTN6/6d8ulp4AwfLKrmA= -github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= -github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= -github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= -github.com/oklog/run v1.1.0 h1:GEenZ1cK0+q0+wsJew9qUg/DyD8k3JzYsZAi5gYi2mA= -github.com/oklog/run v1.1.0/go.mod h1:sVPdnTZT1zYwAJeCMu2Th4T21pA3FPOQRfWjQlk7DVU= -github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= -github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= -github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 h1:GFCKgmp0tecUJ0sJuv4pzYCqS9+RGSn52M3FUwPs+uo= -github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1:t/avpk3KcrXxUnYOhZhMXJlSEyie6gQbtLq5NM3loB8= -github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= -github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= -github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/sethvargo/go-retry v0.3.0 h1:EEt31A35QhrcRZtrYFDTBg91cqZVnFL2navjDrah2SE= -github.com/sethvargo/go-retry v0.3.0/go.mod h1:mNX17F0C/HguQMyMyJxcnU471gOZGxCLyYaFyAZraas= -github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= -github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= -github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= -github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM= -github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y= -github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= -github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= -github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals= -github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= -github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= -github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= -github.com/vmihailenco/msgpack v4.0.4+incompatible h1:dSLoQfGFAo3F6OoNhwUmLwVgaUXK79GlxNBwueZn0xI= -github.com/vmihailenco/msgpack v4.0.4+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= -github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IUPn0Bjt8= -github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= -github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= -github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -github.com/zclconf/go-cty v1.16.2 h1:LAJSwc3v81IRBZyUVQDUdZ7hs3SYs9jv0eZJDWHD/70= -github.com/zclconf/go-cty v1.16.2/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE= -github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940 h1:4r45xpDWB6ZMSMNJFMOjqrGHynW3DIBuR2H9j0ug+Mo= -github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940/go.mod h1:CmBdvvj3nqzfzJ6nTCIwDTPZ56aVGvDrmztiO5g3qrM= -go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= -go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= -go.opentelemetry.io/contrib/detectors/gcp v1.34.0 h1:JRxssobiPg23otYU5SbWtQC//snGVIM3Tx6QRzlQBao= -go.opentelemetry.io/contrib/detectors/gcp v1.34.0/go.mod h1:cV4BMFcscUR/ckqLkbfQmF0PRsq8w/lMGzdbCSveBHo= -go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.60.0 h1:x7wzEgXfnzJcHDwStJT+mxOz4etr2EcexjqhBvmoakw= -go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.60.0/go.mod h1:rg+RlpR5dKwaS95IyyZqj5Wd4E13lk/msnTS0Xl9lJM= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0 h1:sbiXRNDSWJOTobXh5HyQKjq6wUC5tNybqjIqDpAY4CU= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0/go.mod h1:69uWxva0WgAA/4bu2Yy70SLDBwZXuQ6PbBpbsa5iZrQ= -go.opentelemetry.io/otel v1.35.0 h1:xKWKPxrxB6OtMCbmMY021CqC45J+3Onta9MqjhnusiQ= -go.opentelemetry.io/otel v1.35.0/go.mod h1:UEqy8Zp11hpkUrL73gSlELM0DupHoiq72dR+Zqel/+Y= -go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.29.0 h1:WDdP9acbMYjbKIyJUhTvtzj601sVJOqgWdUxSdR/Ysc= -go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.29.0/go.mod h1:BLbf7zbNIONBLPwvFnwNHGj4zge8uTCM/UPIVW1Mq2I= -go.opentelemetry.io/otel/metric v1.35.0 h1:0znxYu2SNyuMSQT4Y9WDWej0VpcsxkuklLa4/siN90M= -go.opentelemetry.io/otel/metric v1.35.0/go.mod h1:nKVFgxBZ2fReX6IlyW28MgZojkoAkJGaE8CpgeAU3oE= -go.opentelemetry.io/otel/sdk v1.35.0 h1:iPctf8iprVySXSKJffSS79eOjl9pvxV9ZqOWT0QejKY= -go.opentelemetry.io/otel/sdk v1.35.0/go.mod h1:+ga1bZliga3DxJ3CQGg3updiaAJoNECOgJREo9KHGQg= -go.opentelemetry.io/otel/sdk/metric v1.35.0 h1:1RriWBmCKgkeHEhM7a2uMjMUfP7MsOF5JpUCaEqEI9o= -go.opentelemetry.io/otel/sdk/metric v1.35.0/go.mod h1:is6XYCUMpcKi+ZsOvfluY5YstFnhW0BidkR+gL+qN+w= -go.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt/xgMs= -go.opentelemetry.io/otel/trace v1.35.0/go.mod h1:WUk7DtFp1Aw2MkvqGdwiXYDZZNvA/1J8o6xRXLrIkyc= -go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= -go.uber.org/goleak v1.1.10/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A= -go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= -go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= -go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= -go.uber.org/multierr v1.10.0 h1:S0h4aNzvfcFsC3dRF1jLoaov7oRaKqRGC/pUEJ2yvPQ= -go.uber.org/multierr v1.10.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= -go.uber.org/zap v1.18.1/go.mod h1:xg/QME4nWcxGxrpdeYfq7UvYrLh66cuVKdrbD1XF/NI= -go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8= -go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= -go4.org/netipx v0.0.0-20231129151722-fdeea329fbba h1:0b9z3AuHCjxk0x/opv64kcgZLBseWJUpBw5I82+2U4M= -go4.org/netipx v0.0.0-20231129151722-fdeea329fbba/go.mod h1:PLyyIXexvUFg3Owu6p/WfdlivPbZJsZdgWZlrGope/Y= -golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.37.0 h1:kJNSjF/Xp7kU0iB2Z+9viTPMW4EqqsrywMXLJOOsXSE= -golang.org/x/crypto v0.37.0/go.mod h1:vg+k43peMZ0pUMhYmVAWysMK35e6ioLh3wB8ZCAfbVc= -golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 h1:ESSUROHIBHg7USnszlcdmjBEwdMj9VUvU+OPk4yl2mc= -golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8/go.mod h1:/lliqkxwWAhPjf5oSOIJup2XcqJaw8RGS6k3TGEc7GI= -golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= -golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= -golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= -golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.39.0 h1:ZCu7HMWDxpXpaiKdhzIfaltL9Lp31x/3fCP11bc6/fY= -golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E= -golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.29.0 h1:WdYw2tdTK1S8olAzWHdgeqfy+Mtm9XNhv/xJsY65d98= -golang.org/x/oauth2 v0.29.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8= -golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.13.0 h1:AauUjRAJ9OSnvULf/ARrrVywoJDy0YS2AwQ98I37610= -golang.org/x/sync v0.13.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= -golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20= -golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= -golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= -golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= -golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0= -golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU= -golang.org/x/time v0.11.0 h1:/bpjEDfN9tkoN/ryeYHnv5hcMlc8ncjMcM4XBk5NWV0= -golang.org/x/time v0.11.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= -golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= -golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/tools v0.22.0 h1:gqSGLZqv+AI9lIQzniJ0nZDRG5GBPsSi+DRNHWNz6yA= -golang.org/x/tools v0.22.0/go.mod h1:aCwcsjqvq7Yqt6TNyX7QMU2enbQ/Gt0bo6krSeEri+c= -golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/api v0.229.0 h1:p98ymMtqeJ5i3lIBMj5MpR9kzIIgzpHHh8vQ+vgAzx8= -google.golang.org/api v0.229.0/go.mod h1:wyDfmq5g1wYJWn29O22FDWN48P7Xcz0xz+LBpptYvB0= -google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= -google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= -google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds= -google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= -google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20200423170343-7949de9c1215/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20250303144028-a0af3efb3deb h1:ITgPrl429bc6+2ZraNSzMDk3I95nmQln2fuPstKwFDE= -google.golang.org/genproto v0.0.0-20250303144028-a0af3efb3deb/go.mod h1:sAo5UzpjUwgFBCzupwhcLcxHVDK7vG5IqI30YnwX2eE= -google.golang.org/genproto/googleapis/api v0.0.0-20250414145226-207652e42e2e h1:UdXH7Kzbj+Vzastr5nVfccbmFsmYNygVLSPk1pEfDoY= -google.golang.org/genproto/googleapis/api v0.0.0-20250414145226-207652e42e2e/go.mod h1:085qFyf2+XaZlRdCgKNCIZ3afY2p4HHZdoIRpId8F4A= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250414145226-207652e42e2e h1:ztQaXfzEXTmCBvbtWYRhJxW+0iJcz2qXfd38/e9l7bA= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250414145226-207652e42e2e/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= -google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= -google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= -google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= -google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= -google.golang.org/grpc v1.71.1 h1:ffsFWr7ygTUscGPI0KKK6TLrGz0476KUvvsbqWK0rPI= -google.golang.org/grpc v1.71.1/go.mod h1:H0GRtasmQOh9LkFoCPDu3ZrwUtD1YGE+b2vYBYd/8Ec= -google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= -google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= -gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= -gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= -gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= -gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= -gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -rsc.io/binaryregexp v0.2.0 h1:HfqmD5MEmC0zvwBuF187nq9mdnXjXsSivRiXN7SmRkE= -rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= diff --git a/mmv1/third_party/tgc_next/pkg/cai2hcl/convert.go b/mmv1/third_party/tgc_next/pkg/cai2hcl/convert.go index 22fae6b11c2d..d9a769642a7a 100644 --- a/mmv1/third_party/tgc_next/pkg/cai2hcl/convert.go +++ b/mmv1/third_party/tgc_next/pkg/cai2hcl/convert.go @@ -37,5 +37,7 @@ func Convert(assets []caiasset.Asset, options *Options) ([]byte, error) { t, err := models.HclWriteBlocks(allBlocks) + options.ErrorLogger.Debug(string(t)) + return t, err } diff --git a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/convert_resource.go b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/convert_resource.go index 18fbc46b56a4..3d79dc9fb385 100644 --- a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/convert_resource.go +++ b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/convert_resource.go @@ -1,43 +1,14 @@ package converters import ( - "strings" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/models" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" ) func ConvertResource(asset caiasset.Asset) ([]*models.TerraformResourceBlock, error) { - converters, ok := ConverterMap[asset.Type] - if !ok || len(converters) == 0 { + converter, ok := ConverterMap[asset.Type] + if !ok { return nil, nil } - - var converter models.Cai2hclConverter - // Normally, one asset type has only one converter. - if len(converters) == 1 { - for _, converter = range converters { - return converter.Convert(asset) - } - } - - // Handle the tdge case that multiple Terraform resources share the same CAI asset type - if asset.Type == "compute.googleapis.com/Autoscaler" { - if strings.Contains(asset.Name, "/zones/") { - converter = ConverterMap[asset.Type]["ComputeAutoscaler"] - } else { - converter = ConverterMap[asset.Type]["ComputeRegionAutoscaler"] - } - } - - if asset.Type == "cloudasset.googleapis.com/Feed" { - if strings.Contains(asset.Name, "/organizations/") { - converter = ConverterMap[asset.Type]["CloudAssetOrganizationFeed"] - } else if strings.Contains(asset.Name, "/folders/") { - converter = ConverterMap[asset.Type]["CloudAssetFolderFeed"] - } else { - converter = ConverterMap[asset.Type]["CloudAssetProjectFeed"] - } - } return converter.Convert(asset) } diff --git a/mmv1/third_party/tgc_next/pkg/services/compute/compute_instance_cai2hcl.go b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance.go similarity index 65% rename from mmv1/third_party/tgc_next/pkg/services/compute/compute_instance_cai2hcl.go rename to mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance.go index c39a7f2a623d..789f32bafc45 100644 --- a/mmv1/third_party/tgc_next/pkg/services/compute/compute_instance_cai2hcl.go +++ b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance.go @@ -7,31 +7,36 @@ import ( "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters/utils" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/models" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tgcresource" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google-beta/google-beta/tpgresource" compute "google.golang.org/api/compute/v0.beta" ) -// ComputeInstanceCai2hclConverter for compute instance resource. -type ComputeInstanceCai2hclConverter struct { +// ComputeInstanceAssetType is the CAI asset type name for compute instance. +const ComputeInstanceAssetType string = "compute.googleapis.com/Instance" + +// ComputeInstanceSchemaName is the TF resource schema name for compute instance. +const ComputeInstanceSchemaName string = "google_compute_instance" + +// ComputeInstanceConverter for compute instance resource. +type ComputeInstanceConverter struct { name string schema map[string]*schema.Schema } -// NewComputeInstanceCai2hclConverter returns an HCL converter for compute instance. -func NewComputeInstanceCai2hclConverter(provider *schema.Provider) models.Cai2hclConverter { +// NewComputeInstanceConverter returns an HCL converter for compute instance. +func NewComputeInstanceConverter(provider *schema.Provider) models.Converter { schema := provider.ResourcesMap[ComputeInstanceSchemaName].Schema - return &ComputeInstanceCai2hclConverter{ + return &ComputeInstanceConverter{ name: ComputeInstanceSchemaName, schema: schema, } } // Convert converts asset to HCL resource blocks. -func (c *ComputeInstanceCai2hclConverter) Convert(asset caiasset.Asset) ([]*models.TerraformResourceBlock, error) { +func (c *ComputeInstanceConverter) Convert(asset caiasset.Asset) ([]*models.TerraformResourceBlock, error) { var blocks []*models.TerraformResourceBlock block, err := c.convertResourceData(asset) if err != nil { @@ -41,7 +46,7 @@ func (c *ComputeInstanceCai2hclConverter) Convert(asset caiasset.Asset) ([]*mode return blocks, nil } -func (c *ComputeInstanceCai2hclConverter) convertResourceData(asset caiasset.Asset) (*models.TerraformResourceBlock, error) { +func (c *ComputeInstanceConverter) convertResourceData(asset caiasset.Asset) (*models.TerraformResourceBlock, error) { if asset.Resource == nil || asset.Resource.Data == nil { return nil, fmt.Errorf("asset resource data is nil") } @@ -62,7 +67,7 @@ func (c *ComputeInstanceCai2hclConverter) convertResourceData(asset caiasset.Ass hclData["network_performance_config"] = flattenNetworkPerformanceConfig(instance.NetworkPerformanceConfig) // Set the networks - networkInterfaces, _, _, err := flattenNetworkInterfacesTgc(instance.NetworkInterfaces, project) + networkInterfaces, _, _, err := flattenNetworkInterfaces(instance.NetworkInterfaces, project) if err != nil { return nil, err } @@ -72,8 +77,8 @@ func (c *ComputeInstanceCai2hclConverter) convertResourceData(asset caiasset.Ass hclData["tags"] = tpgresource.ConvertStringArrToInterface(instance.Tags.Items) } - hclData["labels"] = tgcresource.RemoveTerraformAttributionLabel(instance.Labels) - hclData["service_account"] = flattenServiceAccountsTgc(instance.ServiceAccounts) + hclData["labels"] = utils.RemoveTerraformAttributionLabel(instance.Labels) + hclData["service_account"] = flattenServiceAccounts(instance.ServiceAccounts) hclData["resource_policies"] = instance.ResourcePolicies bootDisk, ads, scratchDisks := flattenDisks(instance.Disks, instance.Name) @@ -81,8 +86,8 @@ func (c *ComputeInstanceCai2hclConverter) convertResourceData(asset caiasset.Ass hclData["attached_disk"] = ads hclData["scratch_disk"] = scratchDisks - hclData["scheduling"] = flattenSchedulingTgc(instance.Scheduling) - hclData["guest_accelerator"] = flattenGuestAcceleratorsTgc(instance.GuestAccelerators) + hclData["scheduling"] = flattenScheduling(instance.Scheduling) + hclData["guest_accelerator"] = flattenGuestAccelerators(instance.GuestAccelerators) hclData["shielded_instance_config"] = flattenShieldedVmConfig(instance.ShieldedInstanceConfig) hclData["enable_display"] = flattenEnableDisplay(instance.DisplayDevice) hclData["min_cpu_platform"] = instance.MinCpuPlatform @@ -97,15 +102,8 @@ func (c *ComputeInstanceCai2hclConverter) convertResourceData(asset caiasset.Ass hclData["hostname"] = instance.Hostname hclData["confidential_instance_config"] = flattenConfidentialInstanceConfig(instance.ConfidentialInstanceConfig) hclData["advanced_machine_features"] = flattenAdvancedMachineFeatures(instance.AdvancedMachineFeatures) - hclData["reservation_affinity"] = flattenReservationAffinityTgc(instance.ReservationAffinity) - hclData["key_revocation_action_type"] = strings.TrimSuffix(instance.KeyRevocationActionType, "_ON_KEY_REVOCATION") - hclData["instance_encryption_key"] = flattenComputeInstanceEncryptionKey(instance.InstanceEncryptionKey) - - partnerMetadata, err := flattenPartnerMetadata(instance.PartnerMetadata) - if err != nil { - return nil, fmt.Errorf("Error parsing partner metadata: %s", err) - } - hclData["partner_metadata"] = partnerMetadata + hclData["reservation_affinity"] = flattenReservationAffinity(instance.ReservationAffinity) + hclData["key_revocation_action_type"] = instance.KeyRevocationActionType // TODO: convert details from the boot disk assets (separate disk assets) into initialize_params in cai2hcl? // It needs to integrate the disk assets into instance assets with the resolver. @@ -142,18 +140,6 @@ func flattenDisks(disks []*compute.AttachedDisk, instanceName string) ([]map[str // format: projects//locations//keyRings//cryptoKeys//cryptoKeyVersions/1 di["kms_key_self_link"] = strings.Split(disk.DiskEncryptionKey.KmsKeyName, "/cryptoKeyVersions")[0] } - - if key.RsaEncryptedKey != "" { - di["disk_encryption_key_rsa"] = key.RsaEncryptedKey - } - - if key.RawKey != "" { - di["disk_encryption_key_raw"] = key.RawKey - } - - if key.KmsKeyServiceAccount != "" { - di["disk_encryption_service_account"] = key.KmsKeyServiceAccount - } } attachedDisks = append(attachedDisks, di) } @@ -186,33 +172,21 @@ func flattenBootDisk(disk *compute.AttachedDisk, instanceName string) []map[stri } if disk.DiskEncryptionKey != nil { - // disk_encryption_key_sha256 is computed, so it is not converted. - if disk.DiskEncryptionKey.KmsKeyName != "" { // The response for crypto keys often includes the version of the key which needs to be removed // format: projects//locations//keyRings//cryptoKeys//cryptoKeyVersions/1 result["kms_key_self_link"] = strings.Split(disk.DiskEncryptionKey.KmsKeyName, "/cryptoKeyVersions")[0] } + } - if disk.DiskEncryptionKey.KmsKeyServiceAccount != "" { - // The response for crypto keys often includes the version of the key which needs to be removed - // format: projects//locations//keyRings//cryptoKeys//cryptoKeyVersions/1 - result["disk_encryption_service_account"] = disk.DiskEncryptionKey.KmsKeyServiceAccount - } - - if disk.DiskEncryptionKey.RsaEncryptedKey != "" { - result["disk_encryption_key_rsa"] = disk.DiskEncryptionKey.RsaEncryptedKey - } - - if disk.DiskEncryptionKey.RawKey != "" { - result["disk_encryption_key_raw"] = disk.DiskEncryptionKey.RawKey - } + // Don't convert the field with the default value + if disk.Interface != "SCSI" { + result["interface"] = disk.Interface } - result["interface"] = disk.Interface - // "source" field is converted and "initialize_params" is not converted as these two fields conflict with each other. - result["source"] = tpgresource.ConvertSelfLinkToV1(disk.Source) - result["guest_os_features"] = flattenComputeInstanceGuestOsFeatures(disk.GuestOsFeatures) + if !strings.HasSuffix(disk.Source, instanceName) { + result["source"] = tpgresource.ConvertSelfLinkToV1(disk.Source) + } if len(result) == 0 { return nil @@ -230,7 +204,10 @@ func flattenScratchDisk(disk *compute.AttachedDisk) map[string]interface{} { result["device_name"] = disk.DeviceName } - result["interface"] = disk.Interface + // Don't convert the field with the default value + if disk.Interface != "SCSI" { + result["interface"] = disk.Interface + } return result } diff --git a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance_helpers.go b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance_helpers.go new file mode 100644 index 000000000000..2631e0c181cf --- /dev/null +++ b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/compute/compute_instance_helpers.go @@ -0,0 +1,330 @@ +package compute + +import ( + "strconv" + "strings" + + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters/utils" + "github.com/hashicorp/terraform-provider-google-beta/google-beta/tpgresource" + + compute "google.golang.org/api/compute/v0.beta" +) + +func flattenAliasIpRange(ranges []*compute.AliasIpRange) []map[string]interface{} { + rangesSchema := make([]map[string]interface{}, 0, len(ranges)) + for _, ipRange := range ranges { + rangesSchema = append(rangesSchema, map[string]interface{}{ + "ip_cidr_range": ipRange.IpCidrRange, + "subnetwork_range_name": ipRange.SubnetworkRangeName, + }) + } + return rangesSchema +} + +func flattenScheduling(resp *compute.Scheduling) []map[string]interface{} { + schedulingMap := make(map[string]interface{}, 0) + + if resp.InstanceTerminationAction != "" { + schedulingMap["instance_termination_action"] = resp.InstanceTerminationAction + } + + if resp.MinNodeCpus != 0 { + schedulingMap["min_node_cpus"] = resp.MinNodeCpus + } + + if resp.OnHostMaintenance != "MIGRATE" { + schedulingMap["on_host_maintenance"] = resp.OnHostMaintenance + } + + if resp.AutomaticRestart != nil && !*resp.AutomaticRestart { + schedulingMap["automatic_restart"] = *resp.AutomaticRestart + } + + if resp.Preemptible { + schedulingMap["preemptible"] = resp.Preemptible + } + + if resp.NodeAffinities != nil && len(resp.NodeAffinities) > 0 { + nodeAffinities := []map[string]interface{}{} + for _, na := range resp.NodeAffinities { + nodeAffinities = append(nodeAffinities, map[string]interface{}{ + "key": na.Key, + "operator": na.Operator, + "values": tpgresource.ConvertStringArrToInterface(na.Values), + }) + } + schedulingMap["node_affinities"] = nodeAffinities + } + + if resp.ProvisioningModel != "STANDARD" { + schedulingMap["provisioning_model"] = resp.ProvisioningModel + } + + if resp.AvailabilityDomain != 0 { + schedulingMap["availability_domain"] = resp.AvailabilityDomain + } + + if resp.MaxRunDuration != nil { + schedulingMap["max_run_duration"] = flattenComputeMaxRunDuration(resp.MaxRunDuration) + } + + if resp.OnInstanceStopAction != nil { + schedulingMap["on_instance_stop_action"] = flattenOnInstanceStopAction(resp.OnInstanceStopAction) + } + + if resp.HostErrorTimeoutSeconds != 0 { + schedulingMap["host_error_timeout_seconds"] = resp.HostErrorTimeoutSeconds + } + + if resp.MaintenanceInterval != "" { + schedulingMap["maintenance_interval"] = resp.MaintenanceInterval + } + + if resp.LocalSsdRecoveryTimeout != nil { + schedulingMap["local_ssd_recovery_timeout"] = flattenComputeLocalSsdRecoveryTimeout(resp.LocalSsdRecoveryTimeout) + } + + if len(schedulingMap) == 0 { + return nil + } + + return []map[string]interface{}{schedulingMap} +} + +func flattenComputeMaxRunDuration(v *compute.Duration) []interface{} { + if v == nil { + return nil + } + transformed := make(map[string]interface{}) + transformed["nanos"] = v.Nanos + transformed["seconds"] = v.Seconds + return []interface{}{transformed} +} + +func flattenOnInstanceStopAction(v *compute.SchedulingOnInstanceStopAction) []interface{} { + if v == nil { + return nil + } + transformed := make(map[string]interface{}) + transformed["discard_local_ssd"] = v.DiscardLocalSsd + return []interface{}{transformed} +} + +func flattenComputeLocalSsdRecoveryTimeout(v *compute.Duration) []interface{} { + if v == nil { + return nil + } + transformed := make(map[string]interface{}) + transformed["nanos"] = v.Nanos + transformed["seconds"] = v.Seconds + return []interface{}{transformed} +} + +func flattenAccessConfigs(accessConfigs []*compute.AccessConfig) ([]map[string]interface{}, string) { + flattened := make([]map[string]interface{}, len(accessConfigs)) + natIP := "" + for i, ac := range accessConfigs { + flattened[i] = map[string]interface{}{ + "nat_ip": ac.NatIP, + "network_tier": ac.NetworkTier, + } + if ac.SetPublicPtr { + flattened[i]["public_ptr_domain_name"] = ac.PublicPtrDomainName + } + if natIP == "" { + natIP = ac.NatIP + } + if ac.SecurityPolicy != "" { + flattened[i]["security_policy"] = ac.SecurityPolicy + } + } + return flattened, natIP +} + +func flattenIpv6AccessConfigs(ipv6AccessConfigs []*compute.AccessConfig) []map[string]interface{} { + flattened := make([]map[string]interface{}, len(ipv6AccessConfigs)) + for i, ac := range ipv6AccessConfigs { + flattened[i] = map[string]interface{}{ + "network_tier": ac.NetworkTier, + } + flattened[i]["public_ptr_domain_name"] = ac.PublicPtrDomainName + flattened[i]["external_ipv6"] = ac.ExternalIpv6 + flattened[i]["external_ipv6_prefix_length"] = strconv.FormatInt(ac.ExternalIpv6PrefixLength, 10) + flattened[i]["name"] = ac.Name + if ac.SecurityPolicy != "" { + flattened[i]["security_policy"] = ac.SecurityPolicy + } + } + return flattened +} + +func flattenNetworkInterfaces(networkInterfaces []*compute.NetworkInterface, project string) ([]map[string]interface{}, string, string, error) { + flattened := make([]map[string]interface{}, len(networkInterfaces)) + var internalIP, externalIP string + + for i, iface := range networkInterfaces { + var ac []map[string]interface{} + ac, externalIP = flattenAccessConfigs(iface.AccessConfigs) + + flattened[i] = map[string]interface{}{ + "network_ip": iface.NetworkIP, + "access_config": ac, + "alias_ip_range": flattenAliasIpRange(iface.AliasIpRanges), + "nic_type": iface.NicType, + "ipv6_access_config": flattenIpv6AccessConfigs(iface.Ipv6AccessConfigs), + "ipv6_address": iface.Ipv6Address, + } + + if !strings.HasSuffix(iface.Network, "/default") { + flattened[i]["network"] = tpgresource.ConvertSelfLinkToV1(iface.Network) + } + + if !strings.HasSuffix(iface.Subnetwork, "/default") { + flattened[i]["subnetwork"] = tpgresource.ConvertSelfLinkToV1(iface.Subnetwork) + } + + subnetProject := utils.ParseFieldValue(iface.Subnetwork, "projects") + if subnetProject != project { + flattened[i]["subnetwork_project"] = subnetProject + } + + if iface.StackType != "IPV4_ONLY" { + flattened[i]["stack_type"] = iface.StackType + } + + if iface.QueueCount != 0 { + flattened[i]["queue_count"] = iface.QueueCount + } + + if internalIP == "" { + internalIP = iface.NetworkIP + } + + if iface.NetworkAttachment != "" { + networkAttachment, err := tpgresource.GetRelativePath(iface.NetworkAttachment) + if err != nil { + return nil, "", "", err + } + flattened[i]["network_attachment"] = networkAttachment + } + + // the security_policy for a network_interface is found in one of its accessConfigs. + if len(iface.AccessConfigs) > 0 && iface.AccessConfigs[0].SecurityPolicy != "" { + flattened[i]["security_policy"] = iface.AccessConfigs[0].SecurityPolicy + } else if len(iface.Ipv6AccessConfigs) > 0 && iface.Ipv6AccessConfigs[0].SecurityPolicy != "" { + flattened[i]["security_policy"] = iface.Ipv6AccessConfigs[0].SecurityPolicy + } + } + return flattened, internalIP, externalIP, nil +} + +func flattenServiceAccounts(serviceAccounts []*compute.ServiceAccount) []map[string]interface{} { + result := make([]map[string]interface{}, len(serviceAccounts)) + for i, serviceAccount := range serviceAccounts { + result[i] = map[string]interface{}{ + "email": serviceAccount.Email, + "scopes": serviceAccount.Scopes, + } + } + return result +} + +func flattenGuestAccelerators(accelerators []*compute.AcceleratorConfig) []map[string]interface{} { + acceleratorsSchema := make([]map[string]interface{}, len(accelerators)) + for i, accelerator := range accelerators { + acceleratorsSchema[i] = map[string]interface{}{ + "count": accelerator.AcceleratorCount, + "type": accelerator.AcceleratorType, + } + } + return acceleratorsSchema +} + +func flattenConfidentialInstanceConfig(ConfidentialInstanceConfig *compute.ConfidentialInstanceConfig) []map[string]interface{} { + if ConfidentialInstanceConfig == nil { + return nil + } + + return []map[string]interface{}{{ + "enable_confidential_compute": ConfidentialInstanceConfig.EnableConfidentialCompute, + "confidential_instance_type": ConfidentialInstanceConfig.ConfidentialInstanceType, + }} +} + +func flattenAdvancedMachineFeatures(AdvancedMachineFeatures *compute.AdvancedMachineFeatures) []map[string]interface{} { + if AdvancedMachineFeatures == nil { + return nil + } + return []map[string]interface{}{{ + "enable_nested_virtualization": AdvancedMachineFeatures.EnableNestedVirtualization, + "threads_per_core": AdvancedMachineFeatures.ThreadsPerCore, + "turbo_mode": AdvancedMachineFeatures.TurboMode, + "visible_core_count": AdvancedMachineFeatures.VisibleCoreCount, + "performance_monitoring_unit": AdvancedMachineFeatures.PerformanceMonitoringUnit, + "enable_uefi_networking": AdvancedMachineFeatures.EnableUefiNetworking, + }} +} + +func flattenShieldedVmConfig(shieldedVmConfig *compute.ShieldedInstanceConfig) []map[string]bool { + if shieldedVmConfig == nil { + return nil + } + + shieldedInstanceConfig := map[string]bool{} + + if shieldedVmConfig.EnableSecureBoot { + shieldedInstanceConfig["enable_secure_boot"] = shieldedVmConfig.EnableSecureBoot + } + + if !shieldedVmConfig.EnableVtpm { + shieldedInstanceConfig["enable_vtpm"] = shieldedVmConfig.EnableVtpm + } + + if !shieldedVmConfig.EnableIntegrityMonitoring { + shieldedInstanceConfig["enable_integrity_monitoring"] = shieldedVmConfig.EnableIntegrityMonitoring + } + + if len(shieldedInstanceConfig) == 0 { + return nil + } + + return []map[string]bool{shieldedInstanceConfig} +} + +func flattenEnableDisplay(displayDevice *compute.DisplayDevice) interface{} { + if displayDevice == nil { + return nil + } + + return displayDevice.EnableDisplay +} + +func flattenReservationAffinity(affinity *compute.ReservationAffinity) []map[string]interface{} { + if affinity == nil { + return nil + } + + flattened := map[string]interface{}{ + "type": affinity.ConsumeReservationType, + } + + if affinity.ConsumeReservationType == "SPECIFIC_RESERVATION" { + flattened["specific_reservation"] = []map[string]interface{}{{ + "key": affinity.Key, + "values": affinity.Values, + }} + } + + return []map[string]interface{}{flattened} +} + +func flattenNetworkPerformanceConfig(c *compute.NetworkPerformanceConfig) []map[string]interface{} { + if c == nil { + return nil + } + return []map[string]interface{}{ + { + "total_egress_bandwidth_tier": c.TotalEgressBandwidthTier, + }, + } +} diff --git a/mmv1/third_party/tgc_next/pkg/services/resourcemanager/project_cai2hcl.go b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/resourcemanager/project.go similarity index 69% rename from mmv1/third_party/tgc_next/pkg/services/resourcemanager/project_cai2hcl.go rename to mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/resourcemanager/project.go index 699326a59f42..9ca8b5769e25 100644 --- a/mmv1/third_party/tgc_next/pkg/services/resourcemanager/project_cai2hcl.go +++ b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/services/resourcemanager/project.go @@ -7,29 +7,34 @@ import ( "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters/utils" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/models" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tgcresource" tfschema "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) -// ProjectCai2hclConverter for compute project resource. -type ProjectCai2hclConverter struct { +// ProjectAssetType is the CAI asset type name for project. +const ProjectAssetType string = "cloudresourcemanager.googleapis.com/Project" + +// ProjectSchemaName is the TF resource schema name for resourcemanager project. +const ProjectSchemaName string = "google_project" + +// ProjectConverter for compute project resource. +type ProjectConverter struct { name string schema map[string]*tfschema.Schema } // NewProjectConverter returns an HCL converter for compute project. -func NewProjectCai2hclConverter(provider *tfschema.Provider) models.Cai2hclConverter { +func NewProjectConverter(provider *tfschema.Provider) models.Converter { schema := provider.ResourcesMap[ProjectSchemaName].Schema - return &ProjectCai2hclConverter{ + return &ProjectConverter{ name: ProjectSchemaName, schema: schema, } } // Convert converts asset resource data. -func (c *ProjectCai2hclConverter) Convert(asset caiasset.Asset) ([]*models.TerraformResourceBlock, error) { +func (c *ProjectConverter) Convert(asset caiasset.Asset) ([]*models.TerraformResourceBlock, error) { var blocks []*models.TerraformResourceBlock block, err := c.convertResourceData(asset) if err != nil { @@ -39,7 +44,7 @@ func (c *ProjectCai2hclConverter) Convert(asset caiasset.Asset) ([]*models.Terra return blocks, nil } -func (c *ProjectCai2hclConverter) convertResourceData(asset caiasset.Asset) (*models.TerraformResourceBlock, error) { +func (c *ProjectConverter) convertResourceData(asset caiasset.Asset) (*models.TerraformResourceBlock, error) { if asset.Resource == nil || asset.Resource.Data == nil { return nil, fmt.Errorf("asset resource data is nil") } @@ -49,7 +54,7 @@ func (c *ProjectCai2hclConverter) convertResourceData(asset caiasset.Asset) (*mo hclData := make(map[string]interface{}) hclData["name"] = assetResourceData["name"] hclData["project_id"] = assetResourceData["projectId"] - hclData["labels"] = tgcresource.RemoveTerraformAttributionLabel(assetResourceData["labels"]) + hclData["labels"] = utils.RemoveTerraformAttributionLabel(assetResourceData["labels"]) if strings.Contains(asset.Resource.Parent, "folders/") { hclData["folder_id"] = utils.ParseFieldValue(asset.Resource.Parent, "folders") } else if strings.Contains(asset.Resource.Parent, "organizations/") { diff --git a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils.go b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils.go index 6e74880ed6cb..4496cf8179a6 100644 --- a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils.go +++ b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils.go @@ -3,11 +3,11 @@ package utils import ( "encoding/json" "fmt" - "log" "strings" hashicorpcty "github.com/hashicorp/go-cty/cty" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + transport_tpg "github.com/hashicorp/terraform-provider-google-beta/google-beta/transport" "github.com/zclconf/go-cty/cty" ctyjson "github.com/zclconf/go-cty/cty/json" ) @@ -23,79 +23,23 @@ func ParseFieldValue(url string, name string) string { return "" } -/* - ParseUrlParamValuesFromAssetName uses CaiAssetNameTemplate to parse hclData from assetName, filtering out all outputFields - -template: //bigquery.googleapis.com/projects/{{project}}/datasets/{{dataset_id}} -assetName: //bigquery.googleapis.com/projects/my-project/datasets/my-dataset -hclData: [project:my-project dataset_id:my-dataset] - -It also handles multi-fragment fields. -template: {{cluster}}/instances/{{instance_id}} -assetName: //alloydb.googleapis.com/projects/ci-test-project/locations/us-central1/clusters/tf-test-cluster/instances/tf-test-instance -hclData: [cluster:projects/ci-test-project/locations/us-central1/clusters/tf-test-cluster instance_id:tf-test-instance] -*/ -func ParseUrlParamValuesFromAssetName(assetName, template string, outputFields map[string]struct{}, hclData map[string]any) { - templateFragments := strings.Split(template, "/") - assetFragments := strings.Split(assetName, "/") - - // Iterate through the fragments and match fields. - assetIx := 0 - for templateIx := 0; templateIx < len(templateFragments); templateIx++ { - templateFragment := templateFragments[templateIx] - - // Check if the template fragment is a field (e.g., {{project}}) - if fieldName, isField := strings.CutPrefix(templateFragment, "{{"); isField { - if fieldName, hasEnd := strings.CutSuffix(fieldName, "}}"); hasEnd { - // Find the end of this field in the template. The end is the next non-field fragment. - endTemplateIx := templateIx + 1 - for endTemplateIx < len(templateFragments) && strings.HasPrefix(templateFragments[endTemplateIx], "{{") { - endTemplateIx++ - } - - endAssetIx := getEndAssetIx(endTemplateIx, templateFragments, assetFragments) - - valueFragments := assetFragments[assetIx:endAssetIx] - value := strings.Join(valueFragments, "/") - - if _, isOutput := outputFields[fieldName]; !isOutput { - hclData[fieldName] = value - } - - assetIx = endAssetIx - templateIx = endTemplateIx - 1 - } else { - assetIx++ - } - } else { - // This is a literal fragment, just advance the asset index if it matches. - if assetIx < len(assetFragments) && assetFragments[assetIx] == templateFragment { - assetIx++ - } else { - log.Printf("Warning: Template literal '%s' does not match assetName at index %d.", templateFragment, assetIx) - } - } +// Remove the Terraform attribution label "goog-terraform-provisioned" from labels +func RemoveTerraformAttributionLabel(raw interface{}) interface{} { + if raw == nil { + return nil } -} -// Finds the exclusive end index of a dynamic path segment within a Google Cloud asset name -// by searching for the next literal segment from a template. -func getEndAssetIx(endTemplateIx int, templateFragments []string, assetFragments []string) int { - if endTemplateIx >= len(templateFragments) { - return len(assetFragments) + if labels, ok := raw.(map[string]string); ok { + delete(labels, "goog-terraform-provisioned") + return labels } - // Find the index of the next non-field fragment in the asset name. - nextNonFieldFragment := templateFragments[endTemplateIx] - for ix, item := range assetFragments { - if item == nextNonFieldFragment { - return ix - } + if labels, ok := raw.(map[string]interface{}); ok { + delete(labels, "goog-terraform-provisioned") + return labels } - // If the next non-field fragment is not found in the asset name, - // it means the dynamic field goes to the end of the asset name. - return len(assetFragments) + return nil } // DecodeJSON decodes the map object into the target struct. @@ -146,6 +90,10 @@ func hashicorpCtyTypeToZclconfCtyType(t hashicorpcty.Type) (cty.Type, error) { return ret, nil } +func NewConfig() *transport_tpg.Config { + return &transport_tpg.Config{} +} + // normalizeFlattenedObj traverses the output map recursively, removes fields which are // not a part of TF schema and converts unmarshallable "schema.Set" objects to arrays. func normalizeFlattenedObj(obj interface{}, schemaPerProp map[string]*schema.Schema) interface{} { diff --git a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils_test.go b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils_test.go index afeebb1402ca..f16820860032 100644 --- a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils_test.go +++ b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/utils/utils_test.go @@ -1,54 +1,50 @@ -package utils_test +package utils import ( - "fmt" "testing" - "github.com/google/go-cmp/cmp" - "github.com/google/go-cmp/cmp/cmpopts" "github.com/stretchr/testify/assert" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters/utils" - tpg_provider "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/provider" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + tpg_provider "github.com/hashicorp/terraform-provider-google-beta/google-beta/provider" + "github.com/hashicorp/terraform-provider-google-beta/google-beta/tpgresource" "github.com/zclconf/go-cty/cty" ) func TestSubsetOfFieldsMapsToCtyValue(t *testing.T) { - schema := createSchema("google_compute_instance") + schema := createSchema("google_compute_forwarding_rule") outputMap := map[string]interface{}{ "name": "forwarding-rule-1", } - val, err := utils.MapToCtyValWithSchema(outputMap, schema) + val, err := MapToCtyValWithSchema(outputMap, schema) assert.Nil(t, err) assert.Equal(t, "forwarding-rule-1", val.GetAttr("name").AsString()) } func TestWrongFieldTypeBreaksConversion(t *testing.T) { - resourceSchema := createSchema("google_compute_instance") + resourceSchema := createSchema("google_compute_backend_service") outputMap := map[string]interface{}{ "name": "fr-1", "description": []string{"unknownValue"}, // string is required, not array. } - val, err := utils.MapToCtyValWithSchema(outputMap, resourceSchema) + val, err := MapToCtyValWithSchema(outputMap, resourceSchema) assert.True(t, val.IsNull()) assert.Contains(t, err.Error(), "string is required") } func TestNilValue(t *testing.T) { - resourceSchema := createSchema("google_compute_instance") + resourceSchema := createSchema("google_compute_forwarding_rule") outputMap := map[string]interface{}{ "name": "fr-1", "description": nil, } - val, err := utils.MapToCtyValWithSchema(outputMap, resourceSchema) + val, err := MapToCtyValWithSchema(outputMap, resourceSchema) assert.Nil(t, err) assert.Equal(t, cty.Value(cty.StringVal("fr-1")), val.GetAttr("name")) @@ -56,12 +52,12 @@ func TestNilValue(t *testing.T) { } func TestNilValueInRequiredField(t *testing.T) { - resourceSchema := createSchema("google_compute_instance") + resourceSchema := createSchema("google_compute_forwarding_rule") outputMap := map[string]interface{}{ "name": nil, } - val, err := utils.MapToCtyValWithSchema(outputMap, resourceSchema) + val, err := MapToCtyValWithSchema(outputMap, resourceSchema) // In future we may want to fail in this case. assert.Nil(t, err) @@ -69,27 +65,27 @@ func TestNilValueInRequiredField(t *testing.T) { } func TestFieldsWithTypeSlice(t *testing.T) { - resourceSchema := createSchema("google_compute_instance") + resourceSchema := createSchema("google_compute_forwarding_rule") outputMap := map[string]interface{}{ - "name": "fr-1", - "resource_policies": []string{"test"}, + "name": "fr-1", + "ports": []string{"80"}, } - val, err := utils.MapToCtyValWithSchema(outputMap, resourceSchema) + val, err := MapToCtyValWithSchema(outputMap, resourceSchema) assert.Nil(t, err) - assert.Equal(t, []cty.Value{cty.StringVal("test")}, val.GetAttr("resource_policies").AsValueSlice()) + assert.Equal(t, []cty.Value{cty.StringVal("80")}, val.GetAttr("ports").AsValueSlice()) } func TestMissingFieldDoesNotBreakConversionConversion(t *testing.T) { - resourceSchema := createSchema("google_compute_instance") + resourceSchema := createSchema("google_compute_forwarding_rule") outputMap := map[string]interface{}{ "name": "fr-1", "unknownField": "unknownValue", } - val, err := utils.MapToCtyValWithSchema(outputMap, resourceSchema) + val, err := MapToCtyValWithSchema(outputMap, resourceSchema) assert.Nil(t, err) @@ -98,16 +94,16 @@ func TestMissingFieldDoesNotBreakConversionConversion(t *testing.T) { } func TestFieldWithTypeSchemaSet(t *testing.T) { - resourceSchema := createSchema("google_compute_instance") + resourceSchema := createSchema("google_compute_forwarding_rule") outputMap := map[string]interface{}{ - "name": "fr-1", - "resource_policies": schema.NewSet(schema.HashString, tpgresource.ConvertStringArrToInterface([]string{"test"})), + "name": "fr-1", + "ports": schema.NewSet(schema.HashString, tpgresource.ConvertStringArrToInterface([]string{"80"})), } - val, err := utils.MapToCtyValWithSchema(outputMap, resourceSchema) + val, err := MapToCtyValWithSchema(outputMap, resourceSchema) assert.Nil(t, err) - assert.Equal(t, []cty.Value{cty.StringVal("test")}, val.GetAttr("resource_policies").AsValueSlice()) + assert.Equal(t, []cty.Value{cty.StringVal("80")}, val.GetAttr("ports").AsValueSlice()) } func TestFieldWithTypeSchemaListAndNestedObject(t *testing.T) { @@ -132,7 +128,7 @@ func TestFieldWithTypeSchemaListAndNestedObject(t *testing.T) { }, } - val, err := utils.MapToCtyValWithSchema(flattenedMap, resourceSchema) + val, err := MapToCtyValWithSchema(flattenedMap, resourceSchema) assert.Nil(t, err) assert.Equal(t, @@ -171,7 +167,7 @@ func TestFieldWithTypeSchemaSetAndNestedObject(t *testing.T) { }), } - val, err := utils.MapToCtyValWithSchema(flattenedMap, resourceSchema) + val, err := MapToCtyValWithSchema(flattenedMap, resourceSchema) assert.Nil(t, err) assert.Equal(t, @@ -189,75 +185,3 @@ func createSchema(name string) map[string]*schema.Schema { return provider.ResourcesMap[name].Schema } - -func TestParseUrlParamValuesFromAssetName(t *testing.T) { - compareMaps := func(m1, m2 map[string]any) error { - if diff := cmp.Diff(m1, m2, cmpopts.SortMaps(func(k1, k2 string) bool { return k1 < k2 })); diff != "" { - return fmt.Errorf("maps are not equal (-got +want):\n%s", diff) - } - return nil - } - - // Test cases for different scenarios - testCases := []struct { - name string - template string - assetName string - outputFields map[string]struct{} - want map[string]any - }{ - { - name: "ComputeUrlmap", - template: "//compute.googleapis.com/projects/{{project}}/global/urlMaps/{{name}}", - assetName: "//compute.googleapis.com/projects/my-project/global/urlMaps/urlmapibgtchooyo", - outputFields: make(map[string]struct{}), - want: map[string]any{"project": "my-project", "name": "urlmapibgtchooyo"}, - }, - { - name: "BigQueryDataset", - template: "//bigquery.googleapis.com/projects/{{project}}/datasets/{{dataset_id}}", - assetName: "//bigquery.googleapis.com/projects/my-project/datasets/my-dataset", - outputFields: make(map[string]struct{}), - want: map[string]any{"project": "my-project", "dataset_id": "my-dataset"}, - }, - { - name: "AlloyDBInstance", - template: "//alloydb.googleapis.com/{{cluster}}/instances/{{instance_id}}", - assetName: "//alloydb.googleapis.com/projects/ci-test/locations/us-central1/clusters/tf-test-cluster/instances/tf-test-instance", - outputFields: make(map[string]struct{}), - want: map[string]any{"cluster": "projects/ci-test/locations/us-central1/clusters/tf-test-cluster", "instance_id": "tf-test-instance"}, - }, - { - name: "WithOutputFieldsIgnored", - template: "//bigquery.googleapis.com/projects/{{project}}/location/{{location}}/datasets/{{dataset_id}}", - assetName: "//bigquery.googleapis.com/projects/my-project/location/abc/datasets/my-dataset", - outputFields: map[string]struct{}{"location": {}}, // 'location' should be ignored - want: map[string]any{"project": "my-project", "dataset_id": "my-dataset"}, - }, - { - name: "WithMissingSuffix", - template: "//bigquery.googleapis.com/projects/{{project/datasets/{{dataset_id}}", - assetName: "//bigquery.googleapis.com/projects/my-project/datasets/my-dataset", - outputFields: make(map[string]struct{}), - want: map[string]any{"dataset_id": "my-dataset"}, - }, - { - name: "EmptyTemplate", - template: "", - assetName: "//bigquery.googleapis.com/projects/my-project/datasets/my-dataset", - outputFields: make(map[string]struct{}), - want: map[string]any{}, - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - hclData := make(map[string]any) - utils.ParseUrlParamValuesFromAssetName(tc.assetName, tc.template, tc.outputFields, hclData) - - if err := compareMaps(hclData, tc.want); err != nil { - t.Fatalf("map mismatch: %v", err) - } - }) - } -} diff --git a/mmv1/third_party/tgc_next/pkg/cai2hcl/models/converter.go b/mmv1/third_party/tgc_next/pkg/cai2hcl/models/converter.go index 0951b17299df..5b6555834009 100644 --- a/mmv1/third_party/tgc_next/pkg/cai2hcl/models/converter.go +++ b/mmv1/third_party/tgc_next/pkg/cai2hcl/models/converter.go @@ -5,7 +5,7 @@ import ( ) // Converter interface for resources. -type Cai2hclConverter interface { +type Converter interface { // Convert turns asset into hcl blocks. Convert(asset caiasset.Asset) ([]*TerraformResourceBlock, error) } diff --git a/mmv1/third_party/tgc_next/pkg/cai2hcl/models/hcl_block.go b/mmv1/third_party/tgc_next/pkg/cai2hcl/models/hcl_block.go index 1d3aa8a8ad15..96b35171ead9 100644 --- a/mmv1/third_party/tgc_next/pkg/cai2hcl/models/hcl_block.go +++ b/mmv1/third_party/tgc_next/pkg/cai2hcl/models/hcl_block.go @@ -3,6 +3,7 @@ package models import ( "fmt" + "github.com/hashicorp/hcl/hcl/printer" "github.com/hashicorp/hcl/v2/hclwrite" "github.com/zclconf/go-cty/cty" ) @@ -19,15 +20,12 @@ func HclWriteBlocks(blocks []*TerraformResourceBlock) ([]byte, error) { for _, resourceBlock := range blocks { hclBlock := rootBody.AppendNewBlock("resource", resourceBlock.Labels) - resourceBody := hclBlock.Body() - resourceBody.SetAttributeRaw("provider", hclwrite.TokensForIdentifier("google-beta")) - if err := hclWriteBlock(resourceBlock.Value, hclBlock.Body()); err != nil { return nil, err } } - return hclwrite.Format(f.Bytes()), nil + return printer.Format(f.Bytes()) } func hclWriteBlock(val cty.Value, body *hclwrite.Body) error { @@ -51,7 +49,7 @@ func hclWriteBlock(val cty.Value, body *hclwrite.Body) error { return err } case objValType.IsCollectionType(): - if objVal.LengthInt() == 0 && !objValType.IsSetType() { + if objVal.LengthInt() == 0 { continue } // Presumes map should not contain object type. diff --git a/mmv1/third_party/tgc_next/pkg/services/compute/compute_instance.go b/mmv1/third_party/tgc_next/pkg/services/compute/compute_instance.go deleted file mode 100644 index 0de28812aadd..000000000000 --- a/mmv1/third_party/tgc_next/pkg/services/compute/compute_instance.go +++ /dev/null @@ -1,1612 +0,0 @@ -package compute - -import ( - "strings" - - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters/utils" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/verify" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" - - compute "google.golang.org/api/compute/v0.beta" -) - -// ComputeInstanceAssetType is the CAI asset type name for compute instance. -const ComputeInstanceAssetType string = "compute.googleapis.com/Instance" - -// ComputeInstanceSchemaName is the TF resource schema name for compute instance. -const ComputeInstanceSchemaName string = "google_compute_instance" - -var ( - advancedMachineFeaturesKeys = []string{ - "advanced_machine_features.0.enable_nested_virtualization", - "advanced_machine_features.0.threads_per_core", - "advanced_machine_features.0.turbo_mode", - "advanced_machine_features.0.visible_core_count", - "advanced_machine_features.0.performance_monitoring_unit", - "advanced_machine_features.0.enable_uefi_networking", - } - - bootDiskKeys = []string{ - "boot_disk.0.guest_os_features", - "boot_disk.0.auto_delete", - "boot_disk.0.device_name", - "boot_disk.0.disk_encryption_key_raw", - "boot_disk.0.kms_key_self_link", - "boot_disk.0.disk_encryption_key_rsa", - "boot_disk.0.disk_encryption_service_account", - "boot_disk.0.initialize_params", - "boot_disk.0.mode", - "boot_disk.0.source", - } - - initializeParamsKeys = []string{ - "boot_disk.0.initialize_params.0.size", - "boot_disk.0.initialize_params.0.type", - "boot_disk.0.initialize_params.0.image", - "boot_disk.0.initialize_params.0.labels", - "boot_disk.0.initialize_params.0.resource_manager_tags", - "boot_disk.0.initialize_params.0.provisioned_iops", - "boot_disk.0.initialize_params.0.provisioned_throughput", - "boot_disk.0.initialize_params.0.enable_confidential_compute", - "boot_disk.0.initialize_params.0.source_image_encryption_key", - "boot_disk.0.initialize_params.0.snapshot", - "boot_disk.0.initialize_params.0.source_snapshot_encryption_key", - "boot_disk.0.initialize_params.0.storage_pool", - "boot_disk.0.initialize_params.0.resource_policies", - "boot_disk.0.initialize_params.0.architecture", - } - - schedulingKeys = []string{ - "scheduling.0.on_host_maintenance", - "scheduling.0.automatic_restart", - "scheduling.0.preemptible", - "scheduling.0.node_affinities", - "scheduling.0.min_node_cpus", - "scheduling.0.provisioning_model", - "scheduling.0.instance_termination_action", - "scheduling.0.termination_time", - "scheduling.0.availability_domain", - "scheduling.0.max_run_duration", - "scheduling.0.on_instance_stop_action", - "scheduling.0.maintenance_interval", - "scheduling.0.host_error_timeout_seconds", - "scheduling.0.graceful_shutdown", - "scheduling.0.local_ssd_recovery_timeout", - } - - shieldedInstanceConfigKeys = []string{ - "shielded_instance_config.0.enable_secure_boot", - "shielded_instance_config.0.enable_vtpm", - "shielded_instance_config.0.enable_integrity_monitoring", - } -) - -func ResourceComputeInstance() *schema.Resource { - return &schema.Resource{ - // A compute instance is more or less a superset of a compute instance - // template. Please attempt to maintain consistency with the - // resource_compute_instance_template schema when updating this one. - Schema: map[string]*schema.Schema{ - "boot_disk": { - Type: schema.TypeList, - Required: true, - ForceNew: true, - MaxItems: 1, - Description: `The boot disk for the instance.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "auto_delete": { - Type: schema.TypeBool, - Optional: true, - AtLeastOneOf: bootDiskKeys, - Default: true, - Description: `Whether the disk will be auto-deleted when the instance is deleted.`, - }, - - "device_name": { - Type: schema.TypeString, - Optional: true, - AtLeastOneOf: bootDiskKeys, - Computed: true, - ForceNew: true, - Description: `Name with which attached disk will be accessible under /dev/disk/by-id/`, - }, - - "disk_encryption_key_raw": { - Type: schema.TypeString, - Optional: true, - AtLeastOneOf: bootDiskKeys, - ForceNew: true, - ConflictsWith: []string{"boot_disk.0.kms_key_self_link", "boot_disk.0.disk_encryption_key_rsa"}, - Sensitive: true, - Description: `A 256-bit customer-supplied encryption key, encoded in RFC 4648 base64 to encrypt this disk. Only one of kms_key_self_link, disk_encryption_key_raw and disk_encryption_key_rsa may be set.`, - }, - - "disk_encryption_key_rsa": { - Type: schema.TypeString, - Optional: true, - AtLeastOneOf: bootDiskKeys, - ForceNew: true, - ConflictsWith: []string{"boot_disk.0.kms_key_self_link", "boot_disk.0.disk_encryption_key_raw"}, - Sensitive: true, - Description: `Specifies an RFC 4648 base64 encoded, RSA-wrapped 2048-bit customer-supplied encryption key to either encrypt or decrypt this resource. Only one of kms_key_self_link, disk_encryption_key_raw and disk_encryption_key_rsa may be set.`, - }, - - "disk_encryption_key_sha256": { - Type: schema.TypeString, - Computed: true, - Description: `The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied encryption key that protects this resource.`, - }, - - "disk_encryption_service_account": { - Type: schema.TypeString, - Optional: true, - AtLeastOneOf: bootDiskKeys, - ForceNew: true, - Description: `The service account being used for the encryption request for the given KMS key. If absent, the Compute Engine default service account is used`, - }, - - "interface": { - Type: schema.TypeString, - Optional: true, - ValidateFunc: validation.StringInSlice([]string{"SCSI", "NVME"}, false), - Description: `The disk interface used for attaching this disk. One of SCSI or NVME. (This field is shared with attached_disk and only used for specific cases, please don't specify this field without advice from Google.)`, - }, - - "kms_key_self_link": { - Type: schema.TypeString, - Optional: true, - AtLeastOneOf: bootDiskKeys, - ForceNew: true, - ConflictsWith: []string{"boot_disk.0.disk_encryption_key_raw", "boot_disk.0.disk_encryption_key_rsa"}, - DiffSuppressFunc: tpgresource.CompareSelfLinkRelativePaths, - Computed: true, - Description: `The self_link of the encryption key that is stored in Google Cloud KMS to encrypt this disk. Only one of kms_key_self_link, disk_encryption_key_raw and disk_encryption_key_rsa may be set.`, - }, - - "guest_os_features": { - Type: schema.TypeList, - Optional: true, - AtLeastOneOf: bootDiskKeys, - ForceNew: true, - Computed: true, - Description: `A list of features to enable on the guest operating system. Applicable only for bootable images.`, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - }, - - "initialize_params": { - Type: schema.TypeList, - Optional: true, - AtLeastOneOf: bootDiskKeys, - Computed: true, - ForceNew: true, - MaxItems: 1, - Description: `Parameters with which a disk was created alongside the instance.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "size": { - Type: schema.TypeInt, - Optional: true, - AtLeastOneOf: initializeParamsKeys, - Computed: true, - ForceNew: true, - ValidateFunc: validation.IntAtLeast(1), - Description: `The size of the image in gigabytes.`, - }, - - "type": { - Type: schema.TypeString, - Optional: true, - AtLeastOneOf: initializeParamsKeys, - Computed: true, - ForceNew: true, - Description: `The Google Compute Engine disk type. Such as pd-standard, pd-ssd or pd-balanced.`, - }, - - "image": { - Type: schema.TypeString, - Optional: true, - AtLeastOneOf: initializeParamsKeys, - Computed: true, - ForceNew: true, - Description: `The image from which this disk was initialised.`, - }, - - "source_image_encryption_key": { - Type: schema.TypeList, - Optional: true, - AtLeastOneOf: initializeParamsKeys, - MaxItems: 1, - Description: `The encryption key used to decrypt the source image.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "raw_key": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - Sensitive: true, - Description: `Specifies a 256-bit customer-supplied encryption key, encoded in RFC 4648 base64 to either encrypt or decrypt this resource. Only one of kms_key_self_link, rsa_encrypted_key and raw_key may be set.`, - }, - - "rsa_encrypted_key": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - Sensitive: true, - Description: `Specifies an RFC 4648 base64 encoded, RSA-wrapped 2048-bit customer-supplied encryption key to either encrypt or decrypt this resource. Only one of kms_key_self_link, rsa_encrypted_key and raw_key may be set.`, - }, - - "kms_key_self_link": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - Computed: true, - DiffSuppressFunc: tpgresource.CompareCryptoKeyVersions, - Description: `The self link of the encryption key that is stored in Google Cloud KMS. Only one of kms_key_self_link, rsa_encrypted_key and raw_key may be set.`, - }, - - "kms_key_service_account": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - Description: `The service account being used for the encryption request for the given KMS key. If absent, the Compute Engine default service account is used.`, - }, - - "sha256": { - Type: schema.TypeString, - Computed: true, - Description: `The SHA256 hash of the encryption key used to encrypt this disk.`, - }, - }, - }, - }, - - "snapshot": { - Type: schema.TypeString, - Optional: true, - AtLeastOneOf: initializeParamsKeys, - Computed: true, - ForceNew: true, - DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, - Description: `The snapshot from which this disk was initialised.`, - }, - - "source_snapshot_encryption_key": { - Type: schema.TypeList, - Optional: true, - AtLeastOneOf: initializeParamsKeys, - MaxItems: 1, - Description: `The encryption key used to decrypt the source snapshot.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "raw_key": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - Sensitive: true, - Description: `Specifies a 256-bit customer-supplied encryption key, encoded in RFC 4648 base64 to either encrypt or decrypt this resource. Only one of kms_key_self_link, rsa_encrypted_key and raw_key may be set.`, - }, - - "rsa_encrypted_key": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - Sensitive: true, - Description: `Specifies an RFC 4648 base64 encoded, RSA-wrapped 2048-bit customer-supplied encryption key to either encrypt or decrypt this resource. Only one of kms_key_self_link, rsa_encrypted_key and raw_key may be set.`, - }, - - "kms_key_self_link": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - Computed: true, - DiffSuppressFunc: tpgresource.CompareCryptoKeyVersions, - Description: `The self link of the encryption key that is stored in Google Cloud KMS. Only one of kms_key_self_link, rsa_encrypted_key and raw_key may be set.`, - }, - - "kms_key_service_account": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - Description: `The service account being used for the encryption request for the given KMS key. If absent, the Compute Engine default service account is used.`, - }, - - "sha256": { - Type: schema.TypeString, - Computed: true, - Description: `The SHA256 hash of the encryption key used to encrypt this disk.`, - }, - }, - }, - }, - - "labels": { - Type: schema.TypeMap, - Optional: true, - AtLeastOneOf: initializeParamsKeys, - Computed: true, - ForceNew: true, - Description: `A set of key/value label pairs assigned to the disk.`, - }, - - "resource_manager_tags": { - Type: schema.TypeMap, - Optional: true, - ForceNew: true, - AtLeastOneOf: initializeParamsKeys, - Description: `A map of resource manager tags. Resource manager tag keys and values have the same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/456. The field is ignored (both PUT & PATCH) when empty.`, - }, - - "resource_policies": { - Type: schema.TypeList, - Elem: &schema.Schema{Type: schema.TypeString}, - Optional: true, - ForceNew: true, - Computed: true, - AtLeastOneOf: initializeParamsKeys, - DiffSuppressFunc: tpgresource.CompareSelfLinkRelativePaths, - MaxItems: 1, - Description: `A list of self_links of resource policies to attach to the instance's boot disk. Modifying this list will cause the instance to recreate. Currently a max of 1 resource policy is supported.`, - }, - - "provisioned_iops": { - Type: schema.TypeInt, - Optional: true, - AtLeastOneOf: initializeParamsKeys, - Computed: true, - ForceNew: true, - Description: `Indicates how many IOPS to provision for the disk. This sets the number of I/O operations per second that the disk can handle.`, - }, - - "provisioned_throughput": { - Type: schema.TypeInt, - Optional: true, - AtLeastOneOf: initializeParamsKeys, - Computed: true, - ForceNew: true, - Description: `Indicates how much throughput to provision for the disk. This sets the number of throughput mb per second that the disk can handle.`, - }, - - "enable_confidential_compute": { - Type: schema.TypeBool, - Optional: true, - AtLeastOneOf: initializeParamsKeys, - ForceNew: true, - Description: `A flag to enable confidential compute mode on boot disk`, - }, - - "storage_pool": { - Type: schema.TypeString, - Optional: true, - AtLeastOneOf: initializeParamsKeys, - ForceNew: true, - DiffSuppressFunc: tpgresource.CompareResourceNames, - Description: `The URL of the storage pool in which the new disk is created`, - }, - - "architecture": { - Type: schema.TypeString, - Optional: true, - Computed: true, - ForceNew: true, - AtLeastOneOf: initializeParamsKeys, - ValidateFunc: validation.StringInSlice([]string{"X86_64", "ARM64"}, false), - Description: `The architecture of the disk. One of "X86_64" or "ARM64".`, - }, - }, - }, - }, - - "mode": { - Type: schema.TypeString, - Optional: true, - AtLeastOneOf: bootDiskKeys, - ForceNew: true, - Default: "READ_WRITE", - ValidateFunc: validation.StringInSlice([]string{"READ_WRITE", "READ_ONLY"}, false), - Description: `Read/write mode for the disk. One of "READ_ONLY" or "READ_WRITE".`, - }, - - "source": { - Type: schema.TypeString, - Optional: true, - AtLeastOneOf: bootDiskKeys, - Computed: true, - ForceNew: true, - ConflictsWith: []string{"boot_disk.initialize_params"}, - DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, - Description: `The name or self_link of the disk attached to this instance.`, - }, - }, - }, - }, - - "machine_type": { - Type: schema.TypeString, - Required: true, - Description: `The machine type to create.`, - DiffSuppressFunc: tpgresource.CompareResourceNames, - }, - - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: verify.ValidateRFC1035Name(1, 63), - Description: `The name of the instance. One of name or self_link must be provided.`, - }, - - "network_interface": { - Type: schema.TypeList, - Required: true, - ForceNew: true, - Description: `The networks attached to the instance.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "network": { - Type: schema.TypeString, - Optional: true, - Computed: true, - DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, - Description: `The name or self_link of the network attached to this interface.`, - }, - - "subnetwork": { - Type: schema.TypeString, - Optional: true, - Computed: true, - DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, - Description: `The name or self_link of the subnetwork attached to this interface.`, - }, - - "network_attachment": { - Type: schema.TypeString, - Optional: true, - Computed: true, - ForceNew: true, - DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, - Description: `The URL of the network attachment that this interface should connect to in the following format: projects/{projectNumber}/regions/{region_name}/networkAttachments/{network_attachment_name}.`, - }, - - "subnetwork_project": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: `The project in which the subnetwork belongs.`, - }, - - "network_ip": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: `The private IP address assigned to the instance.`, - }, - - "name": { - Type: schema.TypeString, - Computed: true, - Description: `The name of the interface`, - }, - "nic_type": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - ValidateFunc: validation.StringInSlice([]string{"GVNIC", "VIRTIO_NET", "IDPF", "MRDMA", "IRDMA"}, false), - Description: `The type of vNIC to be used on this interface. Possible values:GVNIC, VIRTIO_NET, IDPF, MRDMA, and IRDMA`, - }, - "access_config": { - Type: schema.TypeList, - Optional: true, - Description: `Access configurations, i.e. IPs via which this instance can be accessed via the Internet.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "nat_ip": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: `The IP address that is be 1:1 mapped to the instance's network ip.`, - }, - - "network_tier": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: `The networking tier used for configuring this instance. One of PREMIUM or STANDARD.`, - }, - - "public_ptr_domain_name": { - Type: schema.TypeString, - Optional: true, - Description: `The DNS domain name for the public PTR record.`, - }, - "security_policy": { - Type: schema.TypeString, - Computed: true, - Description: `A full or partial URL to a security policy to add to this instance. If this field is set to an empty string it will remove the associated security policy.`, - }, - }, - }, - }, - - "alias_ip_range": { - Type: schema.TypeList, - Optional: true, - Description: `An array of alias IP ranges for this network interface.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "ip_cidr_range": { - Type: schema.TypeString, - Required: true, - Description: `The IP CIDR range represented by this alias IP range.`, - }, - "subnetwork_range_name": { - Type: schema.TypeString, - Optional: true, - Description: `The subnetwork secondary range name specifying the secondary range from which to allocate the IP CIDR range for this alias IP range.`, - }, - }, - }, - }, - - "stack_type": { - Type: schema.TypeString, - Optional: true, - Computed: true, - ValidateFunc: validation.StringInSlice([]string{"IPV4_ONLY", "IPV4_IPV6", "IPV6_ONLY", ""}, false), - Description: `The stack type for this network interface to identify whether the IPv6 feature is enabled or not. If not specified, IPV4_ONLY will be used.`, - }, - - "ipv6_access_type": { - Type: schema.TypeString, - Computed: true, - Description: `One of EXTERNAL, INTERNAL to indicate whether the IP can be accessed from the Internet. This field is always inherited from its subnetwork.`, - }, - - "ipv6_access_config": { - Type: schema.TypeList, - Optional: true, - Description: `An array of IPv6 access configurations for this interface. Currently, only one IPv6 access config, DIRECT_IPV6, is supported. If there is no ipv6AccessConfig specified, then this instance will have no external IPv6 Internet access.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "network_tier": { - Type: schema.TypeString, - Required: true, - Description: `The service-level to be provided for IPv6 traffic when the subnet has an external subnet. Only PREMIUM tier is valid for IPv6`, - }, - "public_ptr_domain_name": { - Type: schema.TypeString, - Optional: true, - Description: `The domain name to be used when creating DNSv6 records for the external IPv6 ranges.`, - }, - "external_ipv6": { - Type: schema.TypeString, - Optional: true, - Computed: true, - ForceNew: true, - Description: `The first IPv6 address of the external IPv6 range associated with this instance, prefix length is stored in externalIpv6PrefixLength in ipv6AccessConfig. To use a static external IP address, it must be unused and in the same region as the instance's zone. If not specified, Google Cloud will automatically assign an external IPv6 address from the instance's subnetwork.`, - }, - "external_ipv6_prefix_length": { - Type: schema.TypeString, - Optional: true, - Computed: true, - ForceNew: true, - Description: `The prefix length of the external IPv6 range.`, - }, - "name": { - Type: schema.TypeString, - Optional: true, - Computed: true, - ForceNew: true, - Description: `The name of this access configuration. In ipv6AccessConfigs, the recommended name is External IPv6.`, - }, - "security_policy": { - Type: schema.TypeString, - Computed: true, - Description: `A full or partial URL to a security policy to add to this instance. If this field is set to an empty string it will remove the associated security policy.`, - }, - }, - }, - }, - - "internal_ipv6_prefix_length": { - Type: schema.TypeInt, - Optional: true, - Computed: true, - Description: `The prefix length of the primary internal IPv6 range.`, - }, - - "ipv6_address": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: `An IPv6 internal network address for this network interface. If not specified, Google Cloud will automatically assign an internal IPv6 address from the instance's subnetwork.`, - }, - - "queue_count": { - Type: schema.TypeInt, - Optional: true, - ForceNew: true, - Description: `The networking queue count that's specified by users for the network interface. Both Rx and Tx queues will be set to this number. It will be empty if not specified.`, - }, - - "security_policy": { - Type: schema.TypeString, - Optional: true, - Description: `A full or partial URL to a security policy to add to this instance. If this field is set to an empty string it will remove the associated security policy.`, - }, - }, - }, - }, - "network_performance_config": { - Type: schema.TypeList, - MaxItems: 1, - Optional: true, - ForceNew: true, - Description: `Configures network performance settings for the instance. If not specified, the instance will be created with its default network performance configuration.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "total_egress_bandwidth_tier": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringInSlice([]string{"TIER_1", "DEFAULT"}, false), - Description: `The egress bandwidth tier to enable. Possible values:TIER_1, DEFAULT`, - }, - }, - }, - }, - "allow_stopping_for_update": { - Type: schema.TypeBool, - Optional: true, - Description: `If true, allows Terraform to stop the instance to update its properties. If you try to update a property that requires stopping the instance without setting this field, the update will fail.`, - }, - - "attached_disk": { - Type: schema.TypeList, - Optional: true, - Description: `List of disks attached to the instance`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "source": { - Type: schema.TypeString, - Required: true, - DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, - Description: `The name or self_link of the disk attached to this instance.`, - }, - - "device_name": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: `Name with which the attached disk is accessible under /dev/disk/by-id/`, - }, - - "mode": { - Type: schema.TypeString, - Optional: true, - Default: "READ_WRITE", - ValidateFunc: validation.StringInSlice([]string{"READ_WRITE", "READ_ONLY"}, false), - Description: `Read/write mode for the disk. One of "READ_ONLY" or "READ_WRITE".`, - }, - - "disk_encryption_key_raw": { - Type: schema.TypeString, - Optional: true, - Sensitive: true, - Description: `A 256-bit customer-supplied encryption key, encoded in RFC 4648 base64 to encrypt this disk. Only one of kms_key_self_link, disk_encryption_key_rsa and disk_encryption_key_raw may be set.`, - }, - - "disk_encryption_key_rsa": { - Type: schema.TypeString, - Optional: true, - Sensitive: true, - Description: `Specifies an RFC 4648 base64 encoded, RSA-wrapped 2048-bit customer-supplied encryption key to either encrypt or decrypt this resource. Only one of kms_key_self_link, disk_encryption_key_rsa and disk_encryption_key_raw may be set.`, - }, - - "kms_key_self_link": { - Type: schema.TypeString, - Optional: true, - DiffSuppressFunc: tpgresource.CompareSelfLinkRelativePaths, - Computed: true, - Description: `The self_link of the encryption key that is stored in Google Cloud KMS to encrypt this disk. Only one of kms_key_self_link, disk_encryption_key_rsa and disk_encryption_key_raw may be set.`, - }, - - "disk_encryption_service_account": { - Type: schema.TypeString, - Optional: true, - Description: `The service account being used for the encryption request for the given KMS key. If absent, the Compute Engine default service account is used`, - }, - - "disk_encryption_key_sha256": { - Type: schema.TypeString, - Computed: true, - Description: `The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied encryption key that protects this resource.`, - }, - }, - }, - }, - - "can_ip_forward": { - Type: schema.TypeBool, - Optional: true, - Default: false, - Description: `Whether sending and receiving of packets with non-matching source or destination IPs is allowed.`, - }, - - "description": { - Type: schema.TypeString, - Optional: true, - Description: `A brief description of the resource.`, - }, - - "deletion_protection": { - Type: schema.TypeBool, - Optional: true, - Default: false, - Description: `Whether deletion protection is enabled on this instance.`, - }, - - "enable_display": { - Type: schema.TypeBool, - Optional: true, - Description: `Whether the instance has virtual displays enabled.`, - }, - - "guest_accelerator": { - Type: schema.TypeList, - Optional: true, - Computed: true, - ForceNew: true, - Description: `List of the type and count of accelerator cards attached to the instance.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "count": { - Type: schema.TypeInt, - Required: true, - ForceNew: true, - Description: `The number of the guest accelerator cards exposed to this instance.`, - }, - "type": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, - Description: `The accelerator type resource exposed to this instance. E.g. nvidia-tesla-k80.`, - }, - }, - }, - }, - - "params": { - Type: schema.TypeList, - MaxItems: 1, - Optional: true, - ForceNew: true, - Description: `Stores additional params passed with the request, but not persisted as part of resource payload.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "resource_manager_tags": { - Type: schema.TypeMap, - Optional: true, - Description: `A map of resource manager tags. Resource manager tag keys and values have the same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/456. The field is ignored (both PUT & PATCH) when empty.`, - }, - }, - }, - }, - - "labels": { - Type: schema.TypeMap, - Optional: true, - Elem: &schema.Schema{Type: schema.TypeString}, - Description: `A set of key/value label pairs assigned to the instance. - - **Note**: This field is non-authoritative, and will only manage the labels present in your configuration. - Please refer to the field 'effective_labels' for all of the labels present on the resource.`, - }, - - "terraform_labels": { - Type: schema.TypeMap, - Computed: true, - Description: `The combination of labels configured directly on the resource and default labels configured on the provider.`, - Elem: &schema.Schema{Type: schema.TypeString}, - }, - - "effective_labels": { - Type: schema.TypeMap, - Computed: true, - Description: `All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Terraform, other clients and services.`, - Elem: &schema.Schema{Type: schema.TypeString}, - }, - - "metadata": { - Type: schema.TypeMap, - Optional: true, - Elem: &schema.Schema{Type: schema.TypeString}, - Description: `Metadata key/value pairs made available within the instance.`, - }, - - "partner_metadata": { - Type: schema.TypeMap, - Optional: true, - DiffSuppressFunc: ComparePartnerMetadataDiff, - DiffSuppressOnRefresh: true, - Elem: &schema.Schema{Type: schema.TypeString}, - Description: `Partner Metadata Map made available within the instance.`, - }, - - "metadata_startup_script": { - Type: schema.TypeString, - Optional: true, - Description: `Metadata startup scripts made available within the instance.`, - }, - - "min_cpu_platform": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: `The minimum CPU platform specified for the VM instance.`, - }, - - "project": { - Type: schema.TypeString, - Optional: true, - Computed: true, - ForceNew: true, - Description: `The ID of the project in which the resource belongs. If self_link is provided, this value is ignored. If neither self_link nor project are provided, the provider project is used.`, - }, - - "scheduling": { - Type: schema.TypeList, - MaxItems: 1, - Optional: true, - Computed: true, - Description: `The scheduling strategy being used by the instance.`, - Elem: &schema.Resource{ - // !!! IMPORTANT !!! - // We have a custom diff function for the scheduling block due to issues with Terraform's - // diff on schema.Set. If changes are made to this block, they must be reflected in that - // method. See schedulingHasChangeWithoutReboot in compute_instance_helpers.go - Schema: map[string]*schema.Schema{ - "on_host_maintenance": { - Type: schema.TypeString, - Optional: true, - Computed: true, - AtLeastOneOf: schedulingKeys, - Description: `Describes maintenance behavior for the instance. One of MIGRATE or TERMINATE,`, - }, - - "automatic_restart": { - Type: schema.TypeBool, - Optional: true, - AtLeastOneOf: schedulingKeys, - Default: true, - Description: `Specifies if the instance should be restarted if it was terminated by Compute Engine (not a user).`, - }, - - "preemptible": { - Type: schema.TypeBool, - Optional: true, - Default: false, - AtLeastOneOf: schedulingKeys, - ForceNew: true, - Description: `Whether the instance is preemptible.`, - }, - - "node_affinities": { - Type: schema.TypeSet, - Optional: true, - AtLeastOneOf: schedulingKeys, - Elem: instanceSchedulingNodeAffinitiesElemSchema(), - DiffSuppressFunc: tpgresource.EmptyOrDefaultStringSuppress(""), - Description: `Specifies node affinities or anti-affinities to determine which sole-tenant nodes your instances and managed instance groups will use as host systems.`, - }, - - "min_node_cpus": { - Type: schema.TypeInt, - Optional: true, - AtLeastOneOf: schedulingKeys, - }, - - "provisioning_model": { - Type: schema.TypeString, - Optional: true, - Computed: true, - ForceNew: true, - AtLeastOneOf: schedulingKeys, - Description: `Whether the instance is spot. If this is set as SPOT.`, - }, - - "instance_termination_action": { - Type: schema.TypeString, - Optional: true, - AtLeastOneOf: schedulingKeys, - Description: `Specifies the action GCE should take when SPOT VM is preempted.`, - }, - "termination_time": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - AtLeastOneOf: schedulingKeys, - Description: `Specifies the timestamp, when the instance will be terminated, -in RFC3339 text format. If specified, the instance termination action -will be performed at the termination time.`, - }, - "availability_domain": { - Type: schema.TypeInt, - Optional: true, - AtLeastOneOf: schedulingKeys, - Description: `Specifies the availability domain, which this instance should be scheduled on.`, - }, - "max_run_duration": { - Type: schema.TypeList, - Optional: true, - Description: `The timeout for new network connections to hosts.`, - MaxItems: 1, - ForceNew: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "seconds": { - Type: schema.TypeInt, - Required: true, - ForceNew: true, - Description: `Span of time at a resolution of a second. -Must be from 0 to 315,576,000,000 inclusive.`, - }, - "nanos": { - Type: schema.TypeInt, - Optional: true, - ForceNew: true, - Description: `Span of time that's a fraction of a second at nanosecond -resolution. Durations less than one second are represented -with a 0 seconds field and a positive nanos field. Must -be from 0 to 999,999,999 inclusive.`, - }, - }, - }, - }, - "on_instance_stop_action": { - Type: schema.TypeList, - Optional: true, - MaxItems: 1, - ForceNew: true, - Description: `Defines the behaviour for instances with the instance_termination_action.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "discard_local_ssd": { - Type: schema.TypeBool, - Optional: true, - Description: `If true, the contents of any attached Local SSD disks will be discarded.`, - Default: false, - ForceNew: true, - }, - }, - }, - }, - "host_error_timeout_seconds": { - Type: schema.TypeInt, - Optional: true, - Description: `Specify the time in seconds for host error detection, the value must be within the range of [90, 330] with the increment of 30, if unset, the default behavior of host error recovery will be used.`, - }, - - "maintenance_interval": { - Type: schema.TypeString, - Optional: true, - AtLeastOneOf: schedulingKeys, - Description: `Specifies the frequency of planned maintenance events. The accepted values are: PERIODIC`, - }, - "local_ssd_recovery_timeout": { - Type: schema.TypeList, - Optional: true, - Description: `Specifies the maximum amount of time a Local Ssd Vm should wait while - recovery of the Local Ssd state is attempted. Its value should be in - between 0 and 168 hours with hour granularity and the default value being 1 - hour.`, - MaxItems: 1, - ForceNew: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "seconds": { - Type: schema.TypeInt, - Required: true, - ForceNew: true, - Description: `Span of time at a resolution of a second. -Must be from 0 to 315,576,000,000 inclusive.`, - }, - "nanos": { - Type: schema.TypeInt, - Optional: true, - ForceNew: true, - Description: `Span of time that's a fraction of a second at nanosecond -resolution. Durations less than one second are represented -with a 0 seconds field and a positive nanos field. Must -be from 0 to 999,999,999 inclusive.`, - }, - }, - }, - }, - "graceful_shutdown": { - Type: schema.TypeList, - Optional: true, - Description: `Settings for the instance to perform a graceful shutdown.`, - MaxItems: 1, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "enabled": { - Type: schema.TypeBool, - Required: true, - Description: `Opts-in for graceful shutdown.`, - }, - "max_duration": { - Type: schema.TypeList, - Optional: true, - Description: `The time allotted for the instance to gracefully shut down. - If the graceful shutdown isn't complete after this time, then the instance - transitions to the STOPPING state.`, - MaxItems: 1, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "seconds": { - Type: schema.TypeInt, - Required: true, - Description: `Span of time at a resolution of a second. - The value must be between 1 and 3600, which is 3,600 seconds (one hour).`, - }, - "nanos": { - Type: schema.TypeInt, - Optional: true, - Description: `Span of time that's a fraction of a second at nanosecond - resolution. Durations less than one second are represented - with a 0 seconds field and a positive nanos field. Must - be from 0 to 999,999,999 inclusive.`, - }, - }, - }, - }, - }, - }, - }, - }, - }, - }, - - "scratch_disk": { - Type: schema.TypeList, - Optional: true, - ForceNew: true, - Description: `The scratch disks attached to the instance.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "device_name": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: `Name with which the attached disk is accessible under /dev/disk/by-id/`, - }, - "interface": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringInSlice([]string{"SCSI", "NVME"}, false), - Description: `The disk interface used for attaching this disk. One of SCSI or NVME.`, - }, - "size": { - Type: schema.TypeInt, - Optional: true, - ForceNew: true, - ValidateFunc: validation.IntAtLeast(375), - Default: 375, - Description: `The size of the disk in gigabytes. One of 375 or 3000.`, - }, - }, - }, - }, - - "service_account": { - Type: schema.TypeList, - MaxItems: 1, - Optional: true, - Description: `The service account to attach to the instance.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "email": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: `The service account e-mail address.`, - }, - - "scopes": { - Type: schema.TypeSet, - Required: true, - Description: `A list of service scopes.`, - Elem: &schema.Schema{ - Type: schema.TypeString, - StateFunc: func(v interface{}) string { - return tpgresource.CanonicalizeServiceScope(v.(string)) - }, - }, - Set: tpgresource.StringScopeHashcode, - }, - }, - }, - }, - - "shielded_instance_config": { - Type: schema.TypeList, - MaxItems: 1, - Optional: true, - // Since this block is used by the API based on which - // image being used, the field needs to be marked as Computed. - Computed: true, - DiffSuppressFunc: tpgresource.EmptyOrDefaultStringSuppress(""), - Description: `The shielded vm config being used by the instance.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "enable_secure_boot": { - Type: schema.TypeBool, - Optional: true, - AtLeastOneOf: shieldedInstanceConfigKeys, - Default: false, - Description: `Whether secure boot is enabled for the instance.`, - }, - - "enable_vtpm": { - Type: schema.TypeBool, - Optional: true, - AtLeastOneOf: shieldedInstanceConfigKeys, - Default: true, - Description: `Whether the instance uses vTPM.`, - }, - - "enable_integrity_monitoring": { - Type: schema.TypeBool, - Optional: true, - AtLeastOneOf: shieldedInstanceConfigKeys, - Default: true, - Description: `Whether integrity monitoring is enabled for the instance.`, - }, - }, - }, - }, - "advanced_machine_features": { - Type: schema.TypeList, - MaxItems: 1, - Optional: true, - Description: `Controls for advanced machine-related behavior features.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "enable_nested_virtualization": { - Type: schema.TypeBool, - Optional: true, - AtLeastOneOf: advancedMachineFeaturesKeys, - Description: `Whether to enable nested virtualization or not.`, - }, - "threads_per_core": { - Type: schema.TypeInt, - Optional: true, - AtLeastOneOf: advancedMachineFeaturesKeys, - Description: `The number of threads per physical core. To disable simultaneous multithreading (SMT) set this to 1. If unset, the maximum number of threads supported per core by the underlying processor is assumed.`, - }, - "turbo_mode": { - Type: schema.TypeString, - Optional: true, - AtLeastOneOf: advancedMachineFeaturesKeys, - Description: `Turbo frequency mode to use for the instance. Currently supported modes is "ALL_CORE_MAX".`, - ValidateFunc: validation.StringInSlice([]string{"ALL_CORE_MAX"}, false), - }, - "visible_core_count": { - Type: schema.TypeInt, - Optional: true, - AtLeastOneOf: advancedMachineFeaturesKeys, - Description: `The number of physical cores to expose to an instance. Multiply by the number of threads per core to compute the total number of virtual CPUs to expose to the instance. If unset, the number of cores is inferred from the instance\'s nominal CPU count and the underlying platform\'s SMT width.`, - }, - "performance_monitoring_unit": { - Type: schema.TypeString, - Optional: true, - AtLeastOneOf: advancedMachineFeaturesKeys, - ValidateFunc: validation.StringInSlice([]string{"STANDARD", "ENHANCED", "ARCHITECTURAL"}, false), - Description: `The PMU is a hardware component within the CPU core that monitors how the processor runs code. Valid values for the level of PMU are "STANDARD", "ENHANCED", and "ARCHITECTURAL".`, - }, - "enable_uefi_networking": { - Type: schema.TypeBool, - Optional: true, - ForceNew: true, - AtLeastOneOf: advancedMachineFeaturesKeys, - Description: `Whether to enable UEFI networking for the instance.`, - }, - }, - }, - }, - "confidential_instance_config": { - Type: schema.TypeList, - MaxItems: 1, - Optional: true, - ForceNew: true, - Computed: true, - Description: `The Confidential VM config being used by the instance. on_host_maintenance has to be set to TERMINATE or this will fail to create.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "enable_confidential_compute": { - Type: schema.TypeBool, - Optional: true, - Description: `Defines whether the instance should have confidential compute enabled. Field will be deprecated in a future release`, - AtLeastOneOf: []string{"confidential_instance_config.0.enable_confidential_compute", "confidential_instance_config.0.confidential_instance_type"}, - }, - "confidential_instance_type": { - Type: schema.TypeString, - Optional: true, - Description: ` - The confidential computing technology the instance uses. - SEV is an AMD feature. TDX is an Intel feature. One of the following - values is required: SEV, SEV_SNP, TDX. If SEV_SNP, min_cpu_platform = - "AMD Milan" is currently required.`, - AtLeastOneOf: []string{"confidential_instance_config.0.enable_confidential_compute", "confidential_instance_config.0.confidential_instance_type"}, - }, - }, - }, - }, - "desired_status": { - Type: schema.TypeString, - Optional: true, - ValidateFunc: validation.StringInSlice([]string{"RUNNING", "TERMINATED", "SUSPENDED"}, false), - Description: `Desired status of the instance. Either "RUNNING", "SUSPENDED" or "TERMINATED".`, - }, - "current_status": { - Type: schema.TypeString, - Computed: true, - Description: ` - Current status of the instance. - This could be one of the following values: PROVISIONING, STAGING, RUNNING, STOPPING, SUSPENDING, SUSPENDED, REPAIRING, and TERMINATED. - For more information about the status of the instance, see [Instance life cycle](https://cloud.google.com/compute/docs/instances/instance-life-cycle).`, - }, - "tags": { - Type: schema.TypeSet, - Optional: true, - Elem: &schema.Schema{Type: schema.TypeString}, - Set: schema.HashString, - Description: `The list of tags attached to the instance.`, - }, - - "zone": { - Type: schema.TypeString, - Optional: true, - Computed: true, - ForceNew: true, - Description: `The zone of the instance. If self_link is provided, this value is ignored. If neither self_link nor zone are provided, the provider zone is used.`, - }, - - "cpu_platform": { - Type: schema.TypeString, - Computed: true, - Description: `The CPU platform used by this instance.`, - }, - - "instance_id": { - Type: schema.TypeString, - Computed: true, - Description: `The server-assigned unique identifier of this instance.`, - }, - - "creation_timestamp": { - Type: schema.TypeString, - Computed: true, - Description: `Creation timestamp in RFC3339 text format.`, - }, - - "label_fingerprint": { - Type: schema.TypeString, - Computed: true, - Description: `The unique fingerprint of the labels.`, - }, - - "metadata_fingerprint": { - Type: schema.TypeString, - Computed: true, - Description: `The unique fingerprint of the metadata.`, - }, - - "self_link": { - Type: schema.TypeString, - Computed: true, - Description: `The URI of the created resource.`, - }, - - "tags_fingerprint": { - Type: schema.TypeString, - Computed: true, - Description: `The unique fingerprint of the tags.`, - }, - - "hostname": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - Description: `A custom hostname for the instance. Must be a fully qualified DNS name and RFC-1035-valid. Valid format is a series of labels 1-63 characters long matching the regular expression [a-z]([-a-z0-9]*[a-z0-9]), concatenated with periods. The entire hostname must not exceed 253 characters. Changing this forces a new resource to be created.`, - }, - - "resource_policies": { - Type: schema.TypeList, - Elem: &schema.Schema{Type: schema.TypeString}, - DiffSuppressFunc: tpgresource.CompareSelfLinkRelativePaths, - Optional: true, - MaxItems: 1, - Description: `A list of self_links of resource policies to attach to the instance. Currently a max of 1 resource policy is supported.`, - }, - - "reservation_affinity": { - Type: schema.TypeList, - MaxItems: 1, - Computed: true, - Optional: true, - ForceNew: true, - Description: `Specifies the reservations that this instance can consume from.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "type": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringInSlice([]string{"ANY_RESERVATION", "SPECIFIC_RESERVATION", "NO_RESERVATION"}, false), - Description: `The type of reservation from which this instance can consume resources.`, - }, - - "specific_reservation": { - Type: schema.TypeList, - MaxItems: 1, - Optional: true, - ForceNew: true, - Description: `Specifies the label selector for the reservation to use.`, - - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "key": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: `Corresponds to the label key of a reservation resource. To target a SPECIFIC_RESERVATION by name, specify compute.googleapis.com/reservation-name as the key and specify the name of your reservation as the only value.`, - }, - "values": { - Type: schema.TypeList, - Elem: &schema.Schema{Type: schema.TypeString}, - Required: true, - ForceNew: true, - Description: `Corresponds to the label values of a reservation resource.`, - }, - }, - }, - }, - }, - }, - }, - - "key_revocation_action_type": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - ValidateFunc: validation.StringInSlice([]string{"STOP", "NONE", ""}, false), - Description: `Action to be taken when a customer's encryption key is revoked. Supports "STOP" and "NONE", with "NONE" being the default.`, - }, - - "instance_encryption_key": { - Type: schema.TypeList, - MaxItems: 1, - Optional: true, - ForceNew: true, - Description: `Encryption key used to provide data encryption on the given instance.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "kms_key_self_link": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - DiffSuppressFunc: tpgresource.CompareCryptoKeyVersions, - Computed: true, - Description: `The self link of the encryption key that is stored in Google Cloud KMS.`, - }, - - "kms_key_service_account": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - Description: `The service account being used for the encryption request for the given KMS key. If absent, the Compute Engine default service account is used.`, - }, - - "sha256": { - Type: schema.TypeString, - Computed: true, - Description: `The SHA256 hash of the customer's encryption key.`, - }, - }, - }, - }, - }, - UseJSONNumber: true, - } -} - -func flattenAliasIpRangeTgc(ranges []*compute.AliasIpRange) []map[string]interface{} { - rangesSchema := make([]map[string]interface{}, 0, len(ranges)) - for _, ipRange := range ranges { - rangesSchema = append(rangesSchema, map[string]interface{}{ - "ip_cidr_range": ipRange.IpCidrRange, - "subnetwork_range_name": ipRange.SubnetworkRangeName, - }) - } - return rangesSchema -} - -func flattenSchedulingTgc(resp *compute.Scheduling) []map[string]interface{} { - schedulingMap := make(map[string]interface{}, 0) - - // gracefulShutdown is not in the cai asset, so graceful_shutdown is skipped. - - if resp.InstanceTerminationAction != "" { - schedulingMap["instance_termination_action"] = resp.InstanceTerminationAction - } - - if resp.MinNodeCpus != 0 { - schedulingMap["min_node_cpus"] = resp.MinNodeCpus - } - - schedulingMap["on_host_maintenance"] = resp.OnHostMaintenance - - if resp.AutomaticRestart != nil && !*resp.AutomaticRestart { - schedulingMap["automatic_restart"] = *resp.AutomaticRestart - } - - if resp.Preemptible { - schedulingMap["preemptible"] = resp.Preemptible - } - - if resp.NodeAffinities != nil && len(resp.NodeAffinities) > 0 { - nodeAffinities := []map[string]interface{}{} - for _, na := range resp.NodeAffinities { - nodeAffinities = append(nodeAffinities, map[string]interface{}{ - "key": na.Key, - "operator": na.Operator, - "values": tpgresource.ConvertStringArrToInterface(na.Values), - }) - } - schedulingMap["node_affinities"] = nodeAffinities - } - - schedulingMap["provisioning_model"] = resp.ProvisioningModel - - if resp.AvailabilityDomain != 0 { - schedulingMap["availability_domain"] = resp.AvailabilityDomain - } - - if resp.MaxRunDuration != nil { - schedulingMap["max_run_duration"] = flattenComputeMaxRunDuration(resp.MaxRunDuration) - } - - if resp.OnInstanceStopAction != nil { - schedulingMap["on_instance_stop_action"] = flattenOnInstanceStopAction(resp.OnInstanceStopAction) - } - - if resp.HostErrorTimeoutSeconds != 0 { - schedulingMap["host_error_timeout_seconds"] = resp.HostErrorTimeoutSeconds - } - - if resp.MaintenanceInterval != "" { - schedulingMap["maintenance_interval"] = resp.MaintenanceInterval - } - - if resp.LocalSsdRecoveryTimeout != nil { - schedulingMap["local_ssd_recovery_timeout"] = flattenComputeLocalSsdRecoveryTimeout(resp.LocalSsdRecoveryTimeout) - } - - if len(schedulingMap) == 0 { - return nil - } - - return []map[string]interface{}{schedulingMap} -} - -func flattenNetworkInterfacesTgc(networkInterfaces []*compute.NetworkInterface, project string) ([]map[string]interface{}, string, string, error) { - flattened := make([]map[string]interface{}, len(networkInterfaces)) - var internalIP, externalIP string - - for i, iface := range networkInterfaces { - var ac []map[string]interface{} - ac, externalIP = flattenAccessConfigs(iface.AccessConfigs) - - flattened[i] = map[string]interface{}{ - "network_ip": iface.NetworkIP, - "access_config": ac, - "alias_ip_range": flattenAliasIpRangeTgc(iface.AliasIpRanges), - "nic_type": iface.NicType, - "stack_type": iface.StackType, - "ipv6_access_config": flattenIpv6AccessConfigs(iface.Ipv6AccessConfigs), - "ipv6_address": iface.Ipv6Address, - "network": tpgresource.ConvertSelfLinkToV1(iface.Network), - "subnetwork": tpgresource.ConvertSelfLinkToV1(iface.Subnetwork), - "internal_ipv6_prefix_length": iface.InternalIpv6PrefixLength, - } - - subnetProject := utils.ParseFieldValue(iface.Subnetwork, "projects") - if subnetProject != project { - flattened[i]["subnetwork_project"] = subnetProject - } - - // The field name is computed, no it is not converted. - - if iface.StackType != "IPV4_ONLY" { - flattened[i]["stack_type"] = iface.StackType - } - - if iface.QueueCount != 0 { - flattened[i]["queue_count"] = iface.QueueCount - } - - if internalIP == "" { - internalIP = iface.NetworkIP - } - - if iface.NetworkAttachment != "" { - networkAttachment, err := tpgresource.GetRelativePath(iface.NetworkAttachment) - if err != nil { - return nil, "", "", err - } - flattened[i]["network_attachment"] = networkAttachment - } - - // the security_policy for a network_interface is found in one of its accessConfigs. - if len(iface.AccessConfigs) > 0 && iface.AccessConfigs[0].SecurityPolicy != "" { - flattened[i]["security_policy"] = iface.AccessConfigs[0].SecurityPolicy - } else if len(iface.Ipv6AccessConfigs) > 0 && iface.Ipv6AccessConfigs[0].SecurityPolicy != "" { - flattened[i]["security_policy"] = iface.Ipv6AccessConfigs[0].SecurityPolicy - } - } - return flattened, internalIP, externalIP, nil -} - -func flattenServiceAccountsTgc(serviceAccounts []*compute.ServiceAccount) []map[string]interface{} { - result := make([]map[string]interface{}, len(serviceAccounts)) - for i, serviceAccount := range serviceAccounts { - scopes := serviceAccount.Scopes - if len(scopes) == 0 { - scopes = []string{} - } - result[i] = map[string]interface{}{ - "email": serviceAccount.Email, - "scopes": scopes, - } - } - return result -} - -func flattenGuestAcceleratorsTgc(accelerators []*compute.AcceleratorConfig) []map[string]interface{} { - acceleratorsSchema := make([]map[string]interface{}, len(accelerators)) - for i, accelerator := range accelerators { - acceleratorsSchema[i] = map[string]interface{}{ - "count": accelerator.AcceleratorCount, - "type": tpgresource.GetResourceNameFromSelfLink(accelerator.AcceleratorType), - } - } - return acceleratorsSchema -} - -func flattenReservationAffinityTgc(affinity *compute.ReservationAffinity) []map[string]interface{} { - if affinity == nil { - return nil - } - - // The values of ConsumeReservationType in cai assets are NO_ALLOCATION, SPECIFIC_ALLOCATION, ANY_ALLOCATION - crt := strings.ReplaceAll(affinity.ConsumeReservationType, "_ALLOCATION", "_RESERVATION") - flattened := map[string]interface{}{ - "type": crt, - } - - if crt == "SPECIFIC_RESERVATION" { - flattened["specific_reservation"] = []map[string]interface{}{{ - "key": affinity.Key, - "values": affinity.Values, - }} - } - - return []map[string]interface{}{flattened} -} diff --git a/mmv1/third_party/tgc_next/pkg/services/resourcemanager/project.go b/mmv1/third_party/tgc_next/pkg/services/resourcemanager/project.go deleted file mode 100644 index c1ee60c4989f..000000000000 --- a/mmv1/third_party/tgc_next/pkg/services/resourcemanager/project.go +++ /dev/null @@ -1,117 +0,0 @@ -package resourcemanager - -import ( - "strings" - - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/verify" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" -) - -// ProjectAssetType is the CAI asset type name for project. -const ProjectAssetType string = "cloudresourcemanager.googleapis.com/Project" - -// ProjectSchemaName is the TF resource schema name for resourcemanager project. -const ProjectSchemaName string = "google_project" - -func ParseFolderId(v interface{}) string { - folderId := v.(string) - if strings.HasPrefix(folderId, "folders/") { - return folderId[8:] - } - return folderId -} - -// ResourceGoogleProject returns a *schema.Resource that allows a customer -// to declare a Google Cloud Project resource. -func ResourceGoogleProject() *schema.Resource { - return &schema.Resource{ - SchemaVersion: 1, - - Schema: map[string]*schema.Schema{ - "project_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: verify.ValidateProjectID(), - Description: `The project ID. Changing this forces a new project to be created.`, - }, - "deletion_policy": { - Type: schema.TypeString, - Optional: true, - Default: "PREVENT", - Description: `The deletion policy for the Project. Setting PREVENT will protect the project against any destroy actions caused by a terraform apply or terraform destroy. Setting ABANDON allows the resource - to be abandoned rather than deleted. Possible values are: "PREVENT", "ABANDON", "DELETE"`, - ValidateFunc: validation.StringInSlice([]string{"PREVENT", "ABANDON", "DELETE"}, false), - }, - "auto_create_network": { - Type: schema.TypeBool, - Optional: true, - Default: true, - Description: `Create the 'default' network automatically. Default true. If set to false, the default network will be deleted. Note that, for quota purposes, you will still need to have 1 network slot available to create the project successfully, even if you set auto_create_network to false, since the network will exist momentarily.`, - }, - "name": { - Type: schema.TypeString, - Required: true, - ValidateFunc: verify.ValidateProjectName(), - Description: `The display name of the project.`, - }, - "org_id": { - Type: schema.TypeString, - Optional: true, - ConflictsWith: []string{"folder_id"}, - Description: `The numeric ID of the organization this project belongs to. Changing this forces a new project to be created. Only one of org_id or folder_id may be specified. If the org_id is specified then the project is created at the top level. Changing this forces the project to be migrated to the newly specified organization.`, - }, - "folder_id": { - Type: schema.TypeString, - Optional: true, - StateFunc: ParseFolderId, - ConflictsWith: []string{"org_id"}, - Description: `The numeric ID of the folder this project should be created under. Only one of org_id or folder_id may be specified. If the folder_id is specified, then the project is created under the specified folder. Changing this forces the project to be migrated to the newly specified folder.`, - }, - "number": { - Type: schema.TypeString, - Computed: true, - Description: `The numeric identifier of the project.`, - }, - "billing_account": { - Type: schema.TypeString, - Optional: true, - Description: `The alphanumeric ID of the billing account this project belongs to. The user or service account performing this operation with Terraform must have Billing Account Administrator privileges (roles/billing.admin) in the organization. See Google Cloud Billing API Access Control for more details.`, - }, - "labels": { - Type: schema.TypeMap, - Optional: true, - Elem: &schema.Schema{Type: schema.TypeString}, - Description: `A set of key/value label pairs to assign to the project. - - **Note**: This field is non-authoritative, and will only manage the labels present in your configuration. - Please refer to the field 'effective_labels' for all of the labels present on the resource.`, - }, - - "terraform_labels": { - Type: schema.TypeMap, - Computed: true, - Description: `(ReadOnly) The combination of labels configured directly on the resource and default labels configured on the provider.`, - Elem: &schema.Schema{Type: schema.TypeString}, - }, - - "effective_labels": { - Type: schema.TypeMap, - Computed: true, - Description: `All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Terraform, other clients and services.`, - Elem: &schema.Schema{Type: schema.TypeString}, - }, - - "tags": { - Type: schema.TypeMap, - Optional: true, - ForceNew: true, - Elem: &schema.Schema{Type: schema.TypeString}, - Description: `A map of resource manager tags. Resource manager tag keys and values have the same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/456. The field is ignored when empty. This field is only set at create time and modifying this field after creation will trigger recreation. To apply tags to an existing resource, see the google_tags_tag_value resource.`, - }, - }, - UseJSONNumber: true, - } -} diff --git a/mmv1/third_party/tgc_next/pkg/tfplan2cai/ancestrymanager/ancestrymanager.go b/mmv1/third_party/tgc_next/pkg/tfplan2cai/ancestrymanager/ancestrymanager.go index 1c408a7ab36f..795c0a2059d0 100644 --- a/mmv1/third_party/tgc_next/pkg/tfplan2cai/ancestrymanager/ancestrymanager.go +++ b/mmv1/third_party/tgc_next/pkg/tfplan2cai/ancestrymanager/ancestrymanager.go @@ -13,8 +13,8 @@ import ( "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" - transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" + "github.com/hashicorp/terraform-provider-google-beta/google-beta/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google-beta/google-beta/transport" "go.uber.org/zap" ) @@ -164,6 +164,15 @@ func (m *manager) fetchAncestors(config *transport_tpg.Config, tfData tpgresourc return nil, fmt.Errorf("organization id not found in terraform data") } key = orgKey + case "iam.googleapis.com/Role": + // google_organization_iam_custom_role or google_project_iam_custom_role + if orgOK { + key = orgKey + } else if projectKey != "" { + key = projectKey + } else { + return []string{unknownOrg}, nil + } case "cloudresourcemanager.googleapis.com/Project", "cloudbilling.googleapis.com/ProjectBillingInfo": // for google_project and google_project_iam resources var ancestors []string @@ -196,24 +205,12 @@ func (m *manager) fetchAncestors(config *transport_tpg.Config, tfData tpgresourc return []string{unknownOrg}, nil } key = projectKey - case "apigee.googleapis.com/Instance": - // Project is used to find the ancestors. - // org_id in resource `google_apigee_instance` is the apigee org id under a project. + + default: if projectKey == "" { return []string{unknownOrg}, nil } key = projectKey - default: - switch { - case orgOK: - key = orgKey - case folderOK: - key = folderKey - case projectKey != "": - key = projectKey - default: - return []string{unknownOrg}, nil - } } return m.getAncestorsWithCache(key) } @@ -390,9 +387,7 @@ func (m *manager) SetAncestors(d tpgresource.TerraformResourceData, config *tran return fmt.Errorf("getting resource ancestry or parent failed: %w", err) } - if cai.Resource != nil { - cai.Resource.Parent = parent - } + cai.Resource.Parent = parent cai.Ancestors = ancestors return nil } diff --git a/mmv1/third_party/tgc_next/pkg/tfplan2cai/ancestrymanager/ancestryutil.go b/mmv1/third_party/tgc_next/pkg/tfplan2cai/ancestrymanager/ancestryutil.go index 65e251c47e07..0483cb414263 100644 --- a/mmv1/third_party/tgc_next/pkg/tfplan2cai/ancestrymanager/ancestryutil.go +++ b/mmv1/third_party/tgc_next/pkg/tfplan2cai/ancestrymanager/ancestryutil.go @@ -4,9 +4,9 @@ import ( "fmt" "strings" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" - transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" "github.com/hashicorp/errwrap" + "github.com/hashicorp/terraform-provider-google-beta/google-beta/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google-beta/google-beta/transport" "google.golang.org/api/googleapi" ) diff --git a/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/cai/cai.go b/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/cai/cai.go index 7c17829f56c3..d777675eff86 100644 --- a/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/cai/cai.go +++ b/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/cai/cai.go @@ -3,8 +3,8 @@ package cai import ( "regexp" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" - transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" + "github.com/hashicorp/terraform-provider-google-beta/google-beta/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google-beta/google-beta/transport" ) // AssetName templates an asset.name by looking up and replacing all instances diff --git a/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/cai/resource_converter.go b/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/cai/resource_converter.go index 49759bdd7888..de0ce5d93edd 100644 --- a/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/cai/resource_converter.go +++ b/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/cai/resource_converter.go @@ -3,8 +3,8 @@ package cai import ( "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" - transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" + "github.com/hashicorp/terraform-provider-google-beta/google-beta/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google-beta/google-beta/transport" ) type ConvertFunc func(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]caiasset.Asset, error) @@ -14,7 +14,7 @@ type ConvertFunc func(d tpgresource.TerraformResourceData, config *transport_tpg // by Terraform, like IAM policies managed with member/binding resources. type FetchFullResourceFunc func(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (caiasset.Asset, error) -type Tfplan2caiConverter struct { +type ResourceConverter struct { Convert ConvertFunc FetchFullResource FetchFullResourceFunc } diff --git a/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/convert_resource.go b/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/convert_resource.go index 5a10cf1710c5..d79ae0931fe2 100644 --- a/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/convert_resource.go +++ b/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/convert_resource.go @@ -8,7 +8,7 @@ import ( "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/converters/cai" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/models" - transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" + transport_tpg "github.com/hashicorp/terraform-provider-google-beta/google-beta/transport" "github.com/pkg/errors" "go.uber.org/zap" diff --git a/mmv1/third_party/tgc_next/pkg/services/compute/compute_instance_tfplan2cai.go b/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/compute/compute_instance.go similarity index 59% rename from mmv1/third_party/tgc_next/pkg/services/compute/compute_instance_tfplan2cai.go rename to mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/compute/compute_instance.go index a135cf6cb332..79da2f1ed4ff 100644 --- a/mmv1/third_party/tgc_next/pkg/services/compute/compute_instance_tfplan2cai.go +++ b/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/compute/compute_instance.go @@ -5,20 +5,22 @@ import ( "fmt" "strings" - compute "google.golang.org/api/compute/v0.beta" "google.golang.org/api/googleapi" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + compute "google.golang.org/api/compute/v0.beta" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/converters/cai" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" - transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" + "github.com/hashicorp/terraform-provider-google-beta/google-beta/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google-beta/google-beta/transport" ) -func ComputeInstanceTfplan2caiConverter() cai.Tfplan2caiConverter { - return cai.Tfplan2caiConverter{ +const ComputeInstanceAssetType string = "compute.googleapis.com/Instance" +const ComputeDiskAssetType string = "compute.googleapis.com/Disk" + +func ResourceConverterComputeInstance() cai.ResourceConverter { + return cai.ResourceConverter{ Convert: GetComputeInstanceAndDisksCaiObjects, } } @@ -26,7 +28,7 @@ func ComputeInstanceTfplan2caiConverter() cai.Tfplan2caiConverter { func GetComputeInstanceAndDisksCaiObjects(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]caiasset.Asset, error) { if instanceAsset, err := GetComputeInstanceCaiObject(d, config); err == nil { assets := []caiasset.Asset{instanceAsset} - if diskAsset, err := GetComputeInstanceDiskCaiObject(d, config); err == nil { + if diskAsset, err := GetComputeDiskCaiObject(d, config); err == nil { assets = append(assets, diskAsset) return assets, nil } else { @@ -43,7 +45,6 @@ func GetComputeInstanceCaiObject(d tpgresource.TerraformResourceData, config *tr return caiasset.Asset{}, err } if data, err := GetComputeInstanceData(d, config); err == nil { - location, _ := tpgresource.GetLocation(d, config) return caiasset.Asset{ Name: name, Type: ComputeInstanceAssetType, @@ -52,7 +53,6 @@ func GetComputeInstanceCaiObject(d tpgresource.TerraformResourceData, config *tr DiscoveryDocumentURI: "https://www.googleapis.com/discovery/v1/apis/compute/v1/rest", DiscoveryName: "Instance", Data: data, - Location: location, }, }, nil } else { @@ -88,6 +88,7 @@ func expandComputeInstance(project string, d tpgresource.TerraformResourceData, } // Build up the list of disks + disks := []*compute.AttachedDisk{} if _, hasBootDisk := d.GetOk("boot_disk"); hasBootDisk { bootDisk, err := expandBootDisk(d, config, project) @@ -117,9 +118,23 @@ func expandComputeInstance(project string, d tpgresource.TerraformResourceData, disks = append(disks, disk) } - scheduling, err := expandSchedulingTgc(d.Get("scheduling")) - if err != nil { - return nil, fmt.Errorf("error creating scheduling: %s", err) + sch := d.Get("scheduling").([]interface{}) + var scheduling *compute.Scheduling + if len(sch) == 0 { + // TF doesn't do anything about defaults inside of nested objects, so if + // scheduling hasn't been set, then send it with its default values. + scheduling = &compute.Scheduling{ + AutomaticRestart: googleapi.Bool(true), + } + } else { + prefix := "scheduling.0" + scheduling = &compute.Scheduling{ + AutomaticRestart: googleapi.Bool(d.Get(prefix + ".automatic_restart").(bool)), + Preemptible: d.Get(prefix + ".preemptible").(bool), + OnHostMaintenance: d.Get(prefix + ".on_host_maintenance").(string), + ProvisioningModel: d.Get(prefix + ".provisioning_model").(string), + ForceSendFields: []string{"AutomaticRestart", "Preemptible"}, + } } params, err := expandParams(d) @@ -132,12 +147,12 @@ func expandComputeInstance(project string, d tpgresource.TerraformResourceData, return nil, fmt.Errorf("Error creating metadata: %s", err) } - partnerMetadata, err := resourceInstancePartnerMetadata(d) + PartnerMetadata, err := resourceInstancePartnerMetadata(d) if err != nil { return nil, fmt.Errorf("Error creating partner metadata: %s", err) } - networkInterfaces, err := expandNetworkInterfacesTgc(d, config) + networkInterfaces, err := expandNetworkInterfaces(d, config) if err != nil { return nil, fmt.Errorf("Error creating network interfaces: %s", err) } @@ -164,7 +179,7 @@ func expandComputeInstance(project string, d tpgresource.TerraformResourceData, Disks: disks, MachineType: machineTypeUrl, Metadata: metadata, - PartnerMetadata: partnerMetadata, + PartnerMetadata: PartnerMetadata, Name: d.Get("name").(string), Zone: d.Get("zone").(string), NetworkInterfaces: networkInterfaces, @@ -178,6 +193,7 @@ func expandComputeInstance(project string, d tpgresource.TerraformResourceData, Scheduling: scheduling, DeletionProtection: d.Get("deletion_protection").(bool), Hostname: d.Get("hostname").(string), + ForceSendFields: []string{"CanIpForward", "DeletionProtection"}, ConfidentialInstanceConfig: expandConfidentialInstanceConfig(d), AdvancedMachineFeatures: expandAdvancedMachineFeatures(d), ShieldedInstanceConfig: expandShieldedVmConfigs(d), @@ -185,7 +201,6 @@ func expandComputeInstance(project string, d tpgresource.TerraformResourceData, ResourcePolicies: tpgresource.ConvertStringArr(d.Get("resource_policies").([]interface{})), ReservationAffinity: reservationAffinity, KeyRevocationActionType: d.Get("key_revocation_action_type").(string), - InstanceEncryptionKey: expandComputeInstanceEncryptionKey(d), }, nil } @@ -209,7 +224,7 @@ func expandAttachedDisk(diskConfig map[string]interface{}, d tpgresource.Terrafo } disk := &compute.AttachedDisk{ - Source: fmt.Sprintf("https://www.googleapis.com/compute/v1/%s", sourceLink), + Source: sourceLink, } if v, ok := diskConfig["mode"]; ok { @@ -229,15 +244,6 @@ func expandAttachedDisk(diskConfig map[string]interface{}, d tpgresource.Terrafo } } - keyValue, keyOk = diskConfig["disk_encryption_key_rsa"] - if keyOk { - if keyValue != "" { - disk.DiskEncryptionKey = &compute.CustomerEncryptionKey{ - RsaEncryptedKey: keyValue.(string), - } - } - } - kmsValue, kmsOk := diskConfig["kms_key_self_link"] if kmsOk { if keyOk && keyValue != "" && kmsValue != "" { @@ -249,18 +255,6 @@ func expandAttachedDisk(diskConfig map[string]interface{}, d tpgresource.Terrafo } } } - - kmsServiceAccount, kmsServiceAccountOk := diskConfig["disk_encryption_service_account"] - if kmsServiceAccountOk { - if kmsServiceAccount != "" { - if disk.DiskEncryptionKey == nil { - disk.DiskEncryptionKey = &compute.CustomerEncryptionKey{ - KmsKeyServiceAccount: kmsServiceAccount.(string), - } - } - disk.DiskEncryptionKey.KmsKeyServiceAccount = kmsServiceAccount.(string) - } - } return disk, nil } @@ -312,14 +306,6 @@ func expandBootDisk(d tpgresource.TerraformResourceData, config *transport_tpg.C disk.DeviceName = v.(string) } - if v, ok := d.GetOk("boot_disk.0.interface"); ok { - disk.Interface = v.(string) - } - - if v, ok := d.GetOk("boot_disk.0.guest_os_features"); ok { - disk.GuestOsFeatures = expandComputeInstanceGuestOsFeatures(v) - } - if v, ok := d.GetOk("boot_disk.0.disk_encryption_key_raw"); ok { if v != "" { disk.DiskEncryptionKey = &compute.CustomerEncryptionKey{ @@ -328,14 +314,6 @@ func expandBootDisk(d tpgresource.TerraformResourceData, config *transport_tpg.C } } - if v, ok := d.GetOk("boot_disk.0.disk_encryption_key_rsa"); ok { - if v != "" { - disk.DiskEncryptionKey = &compute.CustomerEncryptionKey{ - RsaEncryptedKey: v.(string), - } - } - } - if v, ok := d.GetOk("boot_disk.0.kms_key_self_link"); ok { if v != "" { disk.DiskEncryptionKey = &compute.CustomerEncryptionKey{ @@ -344,28 +322,12 @@ func expandBootDisk(d tpgresource.TerraformResourceData, config *transport_tpg.C } } - if v, ok := d.GetOk("boot_disk.0.disk_encryption_service_account"); ok { - if v != "" { - disk.DiskEncryptionKey.KmsKeyServiceAccount = v.(string) - } - } - - // disk_encryption_key_sha256 is computed, so it is not converted. - if v, ok := d.GetOk("boot_disk.0.source"); ok { - var err error - var source interface { - RelativeLink() string - } - if strings.Contains(v.(string), "regions/") { - source, err = tpgresource.ParseRegionDiskFieldValue(v.(string), d, config) - } else { - source, err = tpgresource.ParseDiskFieldValue(v.(string), d, config) - } + source, err := tpgresource.ParseDiskFieldValue(v.(string), d, config) if err != nil { return nil, err } - disk.Source = fmt.Sprintf("https://www.googleapis.com/compute/v1/%s", source.RelativeLink()) + disk.Source = source.RelativeLink() } if _, ok := d.GetOk("boot_disk.0.initialize_params"); ok { @@ -374,10 +336,6 @@ func expandBootDisk(d tpgresource.TerraformResourceData, config *transport_tpg.C } } - if v, ok := d.GetOk("boot_disk.0.initialize_params.0.architecture"); ok { - disk.Architecture = v.(string) - } - if v, ok := d.GetOk("boot_disk.0.mode"); ok { disk.Mode = v.(string) } @@ -398,8 +356,6 @@ func expandScratchDisks(d tpgresource.TerraformResourceData, config *transport_t AutoDelete: true, Type: "SCRATCH", Interface: d.Get(fmt.Sprintf("scratch_disk.%d.interface", i)).(string), - DeviceName: d.Get(fmt.Sprintf("scratch_disk.%d.device_name", i)).(string), - DiskSizeGb: int64(d.Get(fmt.Sprintf("scratch_disk.%d.size", i)).(int)), InitializeParams: &compute.AttachedDiskInitializeParams{ DiskType: diskType.RelativeLink(), }, @@ -415,13 +371,12 @@ func expandStoragePool(v interface{}, d tpgresource.TerraformResourceData, confi return nil, nil } -func GetComputeInstanceDiskCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (caiasset.Asset, error) { +func GetComputeDiskCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (caiasset.Asset, error) { name, err := cai.AssetName(d, config, "//compute.googleapis.com/projects/{{project}}/zones/{{zone}}/disks/{{name}}") if err != nil { return caiasset.Asset{}, err } if data, err := GetComputeDiskData(d, config); err == nil { - location, _ := tpgresource.GetLocation(d, config) return caiasset.Asset{ Name: name, Type: ComputeDiskAssetType, @@ -430,7 +385,6 @@ func GetComputeInstanceDiskCaiObject(d tpgresource.TerraformResourceData, config DiscoveryDocumentURI: "https://www.googleapis.com/discovery/v1/apis/compute/v1/rest", DiscoveryName: "Disk", Data: data, - Location: location, }, }, nil } else { @@ -497,158 +451,3 @@ func GetComputeDiskData(d tpgresource.TerraformResourceData, config *transport_t return diskDetails, nil } - -func expandNetworkInterfacesTgc(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]*compute.NetworkInterface, error) { - configs := d.Get("network_interface").([]interface{}) - ifaces := make([]*compute.NetworkInterface, len(configs)) - for i, raw := range configs { - data := raw.(map[string]interface{}) - - var networkAttachment = "" - network := data["network"].(string) - subnetwork := data["subnetwork"].(string) - if networkAttachmentObj, ok := data["network_attachment"]; ok { - networkAttachment = networkAttachmentObj.(string) - } - // Checks if networkAttachment is not specified in resource, network or subnetwork have to be specified. - if networkAttachment == "" && network == "" && subnetwork == "" { - return nil, fmt.Errorf("exactly one of network, subnetwork, or network_attachment must be provided") - } - - ifaces[i] = &compute.NetworkInterface{ - NetworkIP: data["network_ip"].(string), - Network: network, - NetworkAttachment: networkAttachment, - Subnetwork: subnetwork, - AccessConfigs: expandAccessConfigs(data["access_config"].([]interface{})), - AliasIpRanges: expandAliasIpRanges(data["alias_ip_range"].([]interface{})), - NicType: data["nic_type"].(string), - StackType: data["stack_type"].(string), - QueueCount: int64(data["queue_count"].(int)), - Ipv6AccessConfigs: expandIpv6AccessConfigs(data["ipv6_access_config"].([]interface{})), - Ipv6Address: data["ipv6_address"].(string), - InternalIpv6PrefixLength: int64(data["internal_ipv6_prefix_length"].(int)), - } - } - return ifaces, nil -} - -func expandSchedulingTgc(v interface{}) (*compute.Scheduling, error) { - if v == nil { - // We can't set default values for lists. - return &compute.Scheduling{ - AutomaticRestart: googleapi.Bool(true), - }, nil - } - - ls := v.([]interface{}) - if len(ls) == 0 { - // We can't set default values for lists - return &compute.Scheduling{ - AutomaticRestart: googleapi.Bool(true), - }, nil - } - - if len(ls) > 1 || ls[0] == nil { - return nil, fmt.Errorf("expected exactly one scheduling block") - } - - original := ls[0].(map[string]interface{}) - scheduling := &compute.Scheduling{ - ForceSendFields: make([]string, 0, 4), - } - - if v, ok := original["automatic_restart"]; ok { - scheduling.AutomaticRestart = googleapi.Bool(v.(bool)) - scheduling.ForceSendFields = append(scheduling.ForceSendFields, "AutomaticRestart") - } - - if v, ok := original["preemptible"]; ok { - scheduling.Preemptible = v.(bool) - scheduling.ForceSendFields = append(scheduling.ForceSendFields, "Preemptible") - } - - if v, ok := original["on_host_maintenance"]; ok { - scheduling.OnHostMaintenance = v.(string) - scheduling.ForceSendFields = append(scheduling.ForceSendFields, "OnHostMaintenance") - } - - if v, ok := original["node_affinities"]; ok && v != nil { - naSet := v.(*schema.Set).List() - scheduling.NodeAffinities = make([]*compute.SchedulingNodeAffinity, 0) - for _, nodeAffRaw := range naSet { - if nodeAffRaw == nil { - continue - } - nodeAff := nodeAffRaw.(map[string]interface{}) - transformed := &compute.SchedulingNodeAffinity{ - Key: nodeAff["key"].(string), - Operator: nodeAff["operator"].(string), - Values: tpgresource.ConvertStringArr(nodeAff["values"].(*schema.Set).List()), - } - scheduling.NodeAffinities = append(scheduling.NodeAffinities, transformed) - } - } - - if v, ok := original["min_node_cpus"]; ok { - scheduling.MinNodeCpus = int64(v.(int)) - } - if v, ok := original["provisioning_model"]; ok { - scheduling.ProvisioningModel = v.(string) - scheduling.ForceSendFields = append(scheduling.ForceSendFields, "ProvisioningModel") - } - if v, ok := original["instance_termination_action"]; ok { - scheduling.InstanceTerminationAction = v.(string) - scheduling.ForceSendFields = append(scheduling.ForceSendFields, "InstanceTerminationAction") - } - if v, ok := original["availability_domain"]; ok && v != nil { - scheduling.AvailabilityDomain = int64(v.(int)) - } - if v, ok := original["max_run_duration"]; ok { - transformedMaxRunDuration, err := expandComputeMaxRunDuration(v) - if err != nil { - return nil, err - } - scheduling.MaxRunDuration = transformedMaxRunDuration - scheduling.ForceSendFields = append(scheduling.ForceSendFields, "MaxRunDuration") - } - - if v, ok := original["on_instance_stop_action"]; ok { - transformedOnInstanceStopAction, err := expandComputeOnInstanceStopAction(v) - if err != nil { - return nil, err - } - scheduling.OnInstanceStopAction = transformedOnInstanceStopAction - scheduling.ForceSendFields = append(scheduling.ForceSendFields, "OnInstanceStopAction") - } - if v, ok := original["host_error_timeout_seconds"]; ok { - if v != nil && v != 0 { - scheduling.HostErrorTimeoutSeconds = int64(v.(int)) - } - } - - if v, ok := original["maintenance_interval"]; ok { - scheduling.MaintenanceInterval = v.(string) - } - - if v, ok := original["graceful_shutdown"]; ok { - transformedGracefulShutdown, err := expandGracefulShutdown(v) - if err != nil { - return nil, err - } - scheduling.GracefulShutdown = transformedGracefulShutdown - scheduling.ForceSendFields = append(scheduling.ForceSendFields, "GracefulShutdown") - } - if v, ok := original["local_ssd_recovery_timeout"]; ok { - transformedLocalSsdRecoveryTimeout, err := expandComputeLocalSsdRecoveryTimeout(v) - if err != nil { - return nil, err - } - scheduling.LocalSsdRecoveryTimeout = transformedLocalSsdRecoveryTimeout - scheduling.ForceSendFields = append(scheduling.ForceSendFields, "LocalSsdRecoveryTimeout") - } - if v, ok := original["termination_time"]; ok { - scheduling.TerminationTime = v.(string) - } - return scheduling, nil -} diff --git a/mmv1/third_party/tgc_next/pkg/services/resourcemanager/project_tfplan2cai.go b/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/resourcemanager/project.go similarity index 93% rename from mmv1/third_party/tgc_next/pkg/services/resourcemanager/project_tfplan2cai.go rename to mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/resourcemanager/project.go index cbe380607eb8..a5eb07c5047a 100644 --- a/mmv1/third_party/tgc_next/pkg/services/resourcemanager/project_tfplan2cai.go +++ b/mmv1/third_party/tgc_next/pkg/tfplan2cai/converters/services/resourcemanager/project.go @@ -8,15 +8,15 @@ import ( "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/converters/cai" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tpgresource" - transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" + "github.com/hashicorp/terraform-provider-google-beta/google-beta/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google-beta/google-beta/transport" "google.golang.org/api/cloudbilling/v1" "google.golang.org/api/cloudresourcemanager/v1" ) -func ProjectTfplan2caiConverter() cai.Tfplan2caiConverter { - return cai.Tfplan2caiConverter{ +func ResourceConverterProject() cai.ResourceConverter { + return cai.ResourceConverter{ Convert: GetProjectAndBillingInfoCaiObjects, } } diff --git a/mmv1/third_party/tgc_next/pkg/tfplan2cai/models/fake_resource_data_with_meta_test.go b/mmv1/third_party/tgc_next/pkg/tfplan2cai/models/fake_resource_data_with_meta_test.go index d48e3e96abea..fc6bada7d120 100644 --- a/mmv1/third_party/tgc_next/pkg/tfplan2cai/models/fake_resource_data_with_meta_test.go +++ b/mmv1/third_party/tgc_next/pkg/tfplan2cai/models/fake_resource_data_with_meta_test.go @@ -16,7 +16,7 @@ package models import ( "testing" - provider "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/provider" + provider "github.com/hashicorp/terraform-provider-google-beta/google-beta/provider" "github.com/stretchr/testify/assert" ) @@ -24,34 +24,38 @@ func TestFakeResourceDataWithMeta_kind(t *testing.T) { p := provider.Provider() values := map[string]interface{}{ - "name": "test-project", - "org_id": "529579013760", - "project_id": "tf-test-872899419570852129", + "name": "test-disk", + "type": "pd-ssd", + "zone": "us-central1-a", + "image": "projects/debian-cloud/global/images/debian-8-jessie-v20170523", + "physical_block_size_bytes": 4096, } d := NewFakeResourceDataWithMeta( - "google_project", - p.ResourcesMap["google_project"].Schema, + "google_compute_disk", + p.ResourcesMap["google_compute_disk"].Schema, values, false, - "google_project.test-project", + "google_compute_disk.test-disk", ) - assert.Equal(t, "google_project", d.Kind()) + assert.Equal(t, "google_compute_disk", d.Kind()) } func TestFakeResourceDataWithMeta_id(t *testing.T) { p := provider.Provider() values := map[string]interface{}{ - "name": "test-project", - "org_id": "529579013760", - "project_id": "tf-test-872899419570852129", + "name": "test-disk", + "type": "pd-ssd", + "zone": "us-central1-a", + "image": "projects/debian-cloud/global/images/debian-8-jessie-v20170523", + "physical_block_size_bytes": 4096, } d := NewFakeResourceDataWithMeta( - "google_project", - p.ResourcesMap["google_project"].Schema, + "google_compute_disk", + p.ResourcesMap["google_compute_disk"].Schema, values, false, - "google_project.test-project", + "google_compute_disk.test-disk", ) assert.Equal(t, d.Id(), "") } @@ -60,37 +64,41 @@ func TestFakeResourceDataWithMeta_get(t *testing.T) { p := provider.Provider() values := map[string]interface{}{ - "name": "test-project", - "org_id": "529579013760", - "project_id": "tf-test-872899419570852129", + "name": "test-disk", + "type": "pd-ssd", + "zone": "us-central1-a", + "image": "projects/debian-cloud/global/images/debian-8-jessie-v20170523", + "physical_block_size_bytes": 4096, } d := NewFakeResourceDataWithMeta( - "google_project", - p.ResourcesMap["google_project"].Schema, + "google_compute_disk", + p.ResourcesMap["google_compute_disk"].Schema, values, false, - "google_project.test-project", + "google_compute_disk.test-disk", ) - assert.Equal(t, d.Get("name"), "test-project") + assert.Equal(t, d.Get("name"), "test-disk") } func TestFakeResourceDataWithMeta_getOkOk(t *testing.T) { p := provider.Provider() values := map[string]interface{}{ - "name": "test-project", - "org_id": "529579013760", - "project_id": "tf-test-872899419570852129", + "name": "test-disk", + "type": "pd-ssd", + "zone": "us-central1-a", + "image": "projects/debian-cloud/global/images/debian-8-jessie-v20170523", + "physical_block_size_bytes": 4096, } d := NewFakeResourceDataWithMeta( - "google_project", - p.ResourcesMap["google_project"].Schema, + "google_compute_disk", + p.ResourcesMap["google_compute_disk"].Schema, values, false, - "google_project.test-project", + "google_compute_disk.test-disk", ) res, ok := d.GetOk("name") - assert.Equal(t, "test-project", res) + assert.Equal(t, "test-disk", res) assert.True(t, ok) } @@ -98,16 +106,18 @@ func TestFakeResourceDataWithMeta_getOkNonexistentField(t *testing.T) { p := provider.Provider() values := map[string]interface{}{ - "name": "test-project", - "org_id": "529579013760", - "project_id": "tf-test-872899419570852129", + "name": "test-disk", + "type": "pd-ssd", + "zone": "us-central1-a", + "image": "projects/debian-cloud/global/images/debian-8-jessie-v20170523", + "physical_block_size_bytes": 4096, } d := NewFakeResourceDataWithMeta( - "google_project", - p.ResourcesMap["google_project"].Schema, + "google_compute_disk", + p.ResourcesMap["google_compute_disk"].Schema, values, false, - "google_project.test-project", + "google_compute_disk.test-disk", ) res, ok := d.GetOk("incorrect") assert.Nil(t, res) @@ -118,19 +128,20 @@ func TestFakeResourceDataWithMeta_getOkEmptyString(t *testing.T) { p := provider.Provider() values := map[string]interface{}{ - "name": "test-project", - "org_id": "529579013760", - "project_id": "tf-test-872899419570852129", - "billing_account": "", + "name": "test-disk", + "type": "pd-ssd", + "zone": "us-central1-a", + "image": "", + "physical_block_size_bytes": 4096, } d := NewFakeResourceDataWithMeta( - "google_project", - p.ResourcesMap["google_project"].Schema, + "google_compute_disk", + p.ResourcesMap["google_compute_disk"].Schema, values, false, - "google_project.test-project", + "google_compute_disk.test-disk", ) - res, ok := d.GetOk("billing_account") + res, ok := d.GetOk("image") assert.Equal(t, "", res) assert.False(t, ok) } @@ -139,18 +150,28 @@ func TestFakeResourceDataWithMeta_getOkUnsetString(t *testing.T) { p := provider.Provider() values := map[string]interface{}{ - "name": "test-project", - "org_id": "529579013760", - "project_id": "tf-test-872899419570852129", + "name": "my-node-pool", + "location": "us-central1", + "cluster": "projects/my-project-id/global/clusters/my-gke-cluster", + "config": map[string]interface{}{ + "machineType": "n1-standard-1", + "metadata": map[string]string{ + "disable-legacy-endpoints": "true", + }, + "oauthScopes": []string{ + "https://www.googleapis.com/auth/cloud-platform", + }, + "preemptible": true, + }, } d := NewFakeResourceDataWithMeta( - "google_project", - p.ResourcesMap["google_project"].Schema, + "google_container_cluster", + p.ResourcesMap["google_container_cluster"].Schema, values, false, - "google_project.test-project", + "google_container_cluster.my-node-pool", ) - res, ok := d.GetOk("billing_account") + res, ok := d.GetOk("subnetwork") assert.Equal(t, "", res) assert.False(t, ok) } @@ -332,16 +353,18 @@ func TestFakeResourceDataWithMeta_isDelelted(t *testing.T) { p := provider.Provider() values := map[string]interface{}{ - "name": "test-project", - "org_id": "529579013760", - "project_id": "tf-test-872899419570852129", + "name": "test-disk", + "type": "pd-ssd", + "zone": "us-central1-a", + "image": "projects/debian-cloud/global/images/debian-8-jessie-v20170523", + "physical_block_size_bytes": 4096, } d := NewFakeResourceDataWithMeta( - "google_project", - p.ResourcesMap["google_project"].Schema, + "google_compute_disk", + p.ResourcesMap["google_compute_disk"].Schema, values, true, - "google_project.test-project", + "google_compute_disk.test-disk", ) assert.Equal(t, true, d.IsDeleted()) } @@ -350,16 +373,18 @@ func TestFakeResourceDataWithMeta_address(t *testing.T) { p := provider.Provider() values := map[string]interface{}{ - "name": "test-project", - "org_id": "529579013760", - "project_id": "tf-test-872899419570852129", + "name": "test-disk", + "type": "pd-ssd", + "zone": "us-central1-a", + "image": "projects/debian-cloud/global/images/debian-8-jessie-v20170523", + "physical_block_size_bytes": 4096, } d := NewFakeResourceDataWithMeta( - "google_project", - p.ResourcesMap["google_project"].Schema, + "google_compute_disk", + p.ResourcesMap["google_compute_disk"].Schema, values, - false, - "google_project.test-project", + true, + "google_compute_disk.test-disk", ) - assert.Equal(t, "google_project.test-project", d.Address()) + assert.Equal(t, "google_compute_disk.test-disk", d.Address()) } diff --git a/mmv1/third_party/tgc_next/pkg/tfplan2cai/resolvers/default_pre_resolver.go b/mmv1/third_party/tgc_next/pkg/tfplan2cai/resolvers/default_pre_resolver.go index 047c3cd89154..4c2aab4f0937 100644 --- a/mmv1/third_party/tgc_next/pkg/tfplan2cai/resolvers/default_pre_resolver.go +++ b/mmv1/third_party/tgc_next/pkg/tfplan2cai/resolvers/default_pre_resolver.go @@ -11,8 +11,8 @@ import ( "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/models" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/tfplan" - provider "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/provider" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + provider "github.com/hashicorp/terraform-provider-google-beta/google-beta/provider" ) var ErrDuplicateAsset = errors.New("duplicate asset") diff --git a/mmv1/third_party/tgc_next/pkg/tfplan2cai/transport/getconfig.go b/mmv1/third_party/tgc_next/pkg/tfplan2cai/transport/getconfig.go index 87f29fa2a2f8..c1b938e64280 100644 --- a/mmv1/third_party/tgc_next/pkg/tfplan2cai/transport/getconfig.go +++ b/mmv1/third_party/tgc_next/pkg/tfplan2cai/transport/getconfig.go @@ -5,7 +5,7 @@ import ( "github.com/pkg/errors" - transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" + transport_tpg "github.com/hashicorp/terraform-provider-google-beta/google-beta/transport" ) func NewConfig(ctx context.Context, project, zone, region string, offline bool, userAgent string) (*transport_tpg.Config, error) { diff --git a/mmv1/third_party/tgc_next/pkg/tgcresource/utils.go b/mmv1/third_party/tgc_next/pkg/tgcresource/utils.go deleted file mode 100644 index d0b3f9db3f64..000000000000 --- a/mmv1/third_party/tgc_next/pkg/tgcresource/utils.go +++ /dev/null @@ -1,78 +0,0 @@ -package tgcresource - -import ( - "fmt" - "strings" - - transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/transport" -) - -// Remove the Terraform attribution label "goog-terraform-provisioned" from labels -func RemoveTerraformAttributionLabel(raw interface{}) interface{} { - if raw == nil { - return nil - } - - if labels, ok := raw.(map[string]string); ok { - delete(labels, "goog-terraform-provisioned") - return labels - } - - if labels, ok := raw.(map[string]interface{}); ok { - delete(labels, "goog-terraform-provisioned") - return labels - } - - return nil -} - -// Gets the full url from relative url -func GetFullUrl(config *transport_tpg.Config, raw interface{}, baseUrl string) interface{} { - if raw == nil || baseUrl == "" { - return raw - } - - v := raw.(string) - if v != "" && !strings.HasPrefix(v, "https://") { - if config.UniverseDomain == "" || config.UniverseDomain == "googleapis.com" { - return fmt.Sprintf("%s%s", baseUrl, v) - } - } - - return v -} - -// Terraform must set the top level schema field, but since this object contains collapsed properties -// it's difficult to know what the top level should be. Instead we just loop over the map returned from flatten. -func MergeFlattenedProperties(hclData map[string]interface{}, flattenedProp interface{}) error { - if flattenedProp == nil { - return nil - } - flattenedPropSlice, ok := flattenedProp.([]interface{}) - if !ok || len(flattenedPropSlice) == 0 { - return fmt.Errorf("unexpected type returned from flattener: %T", flattenedProp) - } - flattedPropMap, ok := flattenedPropSlice[0].(map[string]interface{}) - if !ok || len(flattedPropMap) == 0 { - return fmt.Errorf("unexpected type returned from flattener: %T", flattenedPropSlice) - } - for k, v := range flattedPropMap { - hclData[k] = v - } - return nil -} - -// Checks if all values in the map are nil -func AllValuesAreNil(m map[string]interface{}) bool { - if len(m) == 0 { - return true - } - - for _, v := range m { - if v != nil { - return false - } - } - - return true -} diff --git a/mmv1/third_party/tgc_next/pkg/transport/config_tgc.go b/mmv1/third_party/tgc_next/pkg/transport/config_tgc.go deleted file mode 100644 index 27dd46309d2b..000000000000 --- a/mmv1/third_party/tgc_next/pkg/transport/config_tgc.go +++ /dev/null @@ -1,5 +0,0 @@ -package transport - -func NewConfig() *Config { - return &Config{} -} diff --git a/mmv1/third_party/tgc_next/test/assert_test_files.go b/mmv1/third_party/tgc_next/test/assert_test_files.go deleted file mode 100644 index 9c278dc867b4..000000000000 --- a/mmv1/third_party/tgc_next/test/assert_test_files.go +++ /dev/null @@ -1,484 +0,0 @@ -package test - -import ( - "context" - "fmt" - "log" - "os" - "path/filepath" - "sort" - "strconv" - "strings" - "sync" - "testing" - "time" - - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl" - cai2hclconverters "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/cai2hcl/converters/utils" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai" - tfplan2caiconverters "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/tfplan2cai/converters" - "github.com/sethvargo/go-retry" - - "go.uber.org/zap" - "go.uber.org/zap/zaptest" - - "github.com/google/go-cmp/cmp" - "github.com/google/go-cmp/cmp/cmpopts" -) - -var ( - cacheMutex = sync.Mutex{} - tmpDir = os.TempDir() -) - -func BidirectionalConversion(t *testing.T, ignoredFields []string, ignoredAssetFields []string) { - retries := 0 - flakyAction := func(ctx context.Context) error { - log.Printf("Starting the retry %d", retries) - resourceTestData, primaryResource, err := prepareTestData(t.Name(), retries) - retries++ - if err != nil { - return fmt.Errorf("error preparing the input data: %v", err) - } - - if resourceTestData == nil { - return retry.RetryableError(fmt.Errorf("fail: test data is unavailable")) - } - - // Create a temporary directory for running terraform. - tfDir, err := os.MkdirTemp(tmpDir, "terraform") - if err != nil { - return err - } - defer os.RemoveAll(tfDir) - - logger := zaptest.NewLogger(t) - - // If the primary resource is specified, only test the primary resource. - // Otherwise, test all of the resources in the test. - if primaryResource != "" { - t.Logf("Test for the primary resource %s begins.", primaryResource) - err = testSingleResource(t, t.Name(), resourceTestData[primaryResource], tfDir, ignoredFields, ignoredAssetFields, logger, true) - if err != nil { - return err - } - } else { - for _, testData := range resourceTestData { - err = testSingleResource(t, t.Name(), testData, tfDir, ignoredFields, ignoredAssetFields, logger, false) - if err != nil { - return err - } - } - } - - return nil - } - - // Note maxAttempts-1 is retries, not attempts. - backoffPolicy := retry.WithMaxRetries(maxAttempts-1, retry.NewConstant(50*time.Millisecond)) - - t.Log("Starting test with retry logic.") - - if err := retry.Do(context.Background(), backoffPolicy, flakyAction); err != nil { - if strings.Contains(err.Error(), "test data is unavailable") { - t.Skipf("Test skipped because data was unavailable after all retries: %v", err) - } else { - t.Fatalf("Failed after all retries %d: %v", retries, err) - } - } -} - -// Tests a single resource -func testSingleResource(t *testing.T, testName string, testData ResourceTestData, tfDir string, ignoredFields []string, ignoredAssetFields []string, logger *zap.Logger, primaryResource bool) error { - resourceType := testData.ResourceType - var tfplan2caiSupported, cai2hclSupported bool - if _, tfplan2caiSupported = tfplan2caiconverters.ConverterMap[resourceType]; !tfplan2caiSupported { - log.Printf("%s is not supported in tfplan2cai conversion.", resourceType) - } - - if testData.Cai == nil { - log.Printf("SKIP: cai asset is unavailable for resource %s", testData.ResourceAddress) - return nil - } - - assets := make([]caiasset.Asset, 0) - for assetName, assetData := range testData.Cai { - assets = append(assets, assetData.CaiAsset) - assetType := assetData.CaiAsset.Type - if assetType == "" { - return fmt.Errorf("cai asset is unavailable for %s", assetName) - } - if _, cai2hclSupported = cai2hclconverters.ConverterMap[assetType]; !cai2hclSupported { - log.Printf("%s is not supported in cai2hcl conversion.", assetType) - } - } - - if !tfplan2caiSupported && !cai2hclSupported { - if primaryResource { - return fmt.Errorf("conversion of the primary resource %s is not supported in tgc", testData.ResourceAddress) - } else { - log.Printf("SKIP: conversion of the resource %s is not supported in tgc.", resourceType) - return nil - } - } - - if !(tfplan2caiSupported && cai2hclSupported) { - return fmt.Errorf("resource %s is supported in either tfplan2cai or cai2hcl within tgc, but not in both", resourceType) - } - - if os.Getenv("WRITE_FILES") != "" { - assetFile := fmt.Sprintf("%s.json", t.Name()) - writeJSONFile(assetFile, assets) - } - - // Step 1: Use cai2hcl to convert export assets into a Terraform configuration (export config). - // Compare all of the fields in raw config are in export config. - - exportConfigData, err := cai2hcl.Convert(assets, &cai2hcl.Options{ - ErrorLogger: logger, - }) - if err != nil { - return fmt.Errorf("error when converting the export assets into export config: %#v", err) - } - - if os.Getenv("WRITE_FILES") != "" { - exportTfFile := fmt.Sprintf("%s_export.tf", t.Name()) - err = os.WriteFile(exportTfFile, exportConfigData, 0644) - if err != nil { - return fmt.Errorf("error writing file %s", exportTfFile) - } - } - - exportTfFilePath := fmt.Sprintf("%s/%s_export.tf", tfDir, t.Name()) - err = os.WriteFile(exportTfFilePath, exportConfigData, 0644) - if err != nil { - return fmt.Errorf("error when writing the file %s", exportTfFilePath) - } - - exportResources, err := parseResourceConfigs(exportTfFilePath) - if err != nil { - return err - } - - if len(exportResources) == 0 { - return fmt.Errorf("missing hcl after cai2hcl conversion for resource %s", testData.ResourceType) - } - - ignoredFieldSet := make(map[string]struct{}, 0) - for _, f := range ignoredFields { - ignoredFieldSet[f] = struct{}{} - } - - parsedExportConfig := exportResources[0].Attributes - missingKeys := compareHCLFields(testData.ParsedRawConfig, parsedExportConfig, ignoredFieldSet) - - // Sometimes, the reason for missing fields could be CAI asset data issue. - if len(missingKeys) > 0 { - log.Printf("missing fields in resource %s after cai2hcl conversion:\n%s", testData.ResourceAddress, missingKeys) - return retry.RetryableError(fmt.Errorf("missing fields")) - } - log.Printf("Step 1 passes for resource %s. All of the fields in raw config are in export config", testData.ResourceAddress) - - // Step 2 - // Run a terraform plan using export_config. - // Use tfplan2cai to convert the generated plan into CAI assets (roundtrip_assets). - // Convert roundtrip_assets back into a Terraform configuration (roundtrip_config) using cai2hcl. - // Compare roundtrip_config with export_config to ensure they are identical. - - // Convert the export config to roundtrip assets and then convert the roundtrip assets back to roundtrip config - ancestryCache, defaultProject := getAncestryCache(assets) - roundtripAssets, roundtripConfigData, err := getRoundtripConfig(t, testName, tfDir, ancestryCache, defaultProject, logger, ignoredAssetFields) - if err != nil { - return fmt.Errorf("error when converting the round-trip config: %#v", err) - } - - roundtripTfFilePath := fmt.Sprintf("%s_roundtrip.tf", testName) - err = os.WriteFile(roundtripTfFilePath, roundtripConfigData, 0644) - if err != nil { - return fmt.Errorf("error when writing the file %s", roundtripTfFilePath) - } - if os.Getenv("WRITE_FILES") == "" { - defer os.Remove(roundtripTfFilePath) - } - - if diff := cmp.Diff(string(roundtripConfigData), string(exportConfigData)); diff != "" { - log.Printf("Roundtrip config is different from the export config.\nroundtrip config:\n%s\nexport config:\n%s", string(roundtripConfigData), string(exportConfigData)) - return fmt.Errorf("test %s got diff (-want +got): %s", testName, diff) - } - log.Printf("Step 2 passes for resource %s. Roundtrip config and export config are identical", testData.ResourceAddress) - - // Step 3 - // Compare most fields between the exported asset and roundtrip asset, except for "data" field for resource - assetMap := convertToAssetMap(assets) - roundtripAssetMap := convertToAssetMap(roundtripAssets) - for assetType, asset := range assetMap { - if roundtripAsset, ok := roundtripAssetMap[assetType]; !ok { - return fmt.Errorf("roundtrip asset for type %s is missing", assetType) - } else { - if err := compareAssetName(asset.Name, roundtripAsset.Name); err != nil { - return err - } - if diff := cmp.Diff( - asset.Resource, - roundtripAsset.Resource, - cmpopts.IgnoreFields(caiasset.AssetResource{}, "Version", "Data", "Location", "DiscoveryDocumentURI"), - // Consider DiscoveryDocumentURI equal if they have the same number of path segments when split by "/". - cmp.FilterPath(func(p cmp.Path) bool { - return p.Last().String() == ".DiscoveryDocumentURI" - }, cmp.Comparer(func(x, y string) bool { - parts1 := strings.Split(x, "/") - parts2 := strings.Split(y, "/") - return len(parts1) == len(parts2) - })), - cmp.FilterPath(func(p cmp.Path) bool { - return p.Last().String() == ".DiscoveryName" - }, cmp.Comparer(func(x, y string) bool { - xParts := strings.Split(x, "/") - yParts := strings.Split(y, "/") - return xParts[len(xParts)-1] == yParts[len(yParts)-1] - })), - ); diff != "" { - return fmt.Errorf("differences found between exported asset and roundtrip asset (-want +got):\n%s", diff) - } - } - } - log.Printf("Step 3 passes for resource %s. Exported asset and roundtrip asset are identical", testData.ResourceAddress) - - return nil -} - -// Gets the ancestry cache for tfplan2cai conversion and the default project -func getAncestryCache(assets []caiasset.Asset) (map[string]string, string) { - ancestryCache := make(map[string]string, 0) - defaultProject := "" - - for _, asset := range assets { - ancestors := asset.Ancestors - if len(ancestors) != 0 { - var path string - for i := len(ancestors) - 1; i >= 0; i-- { - curr := ancestors[i] - if path == "" { - path = curr - } else { - path = fmt.Sprintf("%s/%s", path, curr) - } - } - - if _, ok := ancestryCache[ancestors[0]]; !ok { - ancestryCache[ancestors[0]] = path - if defaultProject == "" { - if s, hasPrefix := strings.CutPrefix(ancestors[0], "projects/"); hasPrefix { - defaultProject = s - } - } - } - - project := utils.ParseFieldValue(asset.Name, "projects") - if project != "" { - projectKey := fmt.Sprintf("projects/%s", project) - if strings.HasPrefix(ancestors[0], "projects") && ancestors[0] != projectKey { - if _, ok := ancestryCache[projectKey]; !ok { - ancestryCache[projectKey] = path - } - } - - if defaultProject == "" { - defaultProject = project - } - } - } - } - return ancestryCache, defaultProject -} - -// Compares HCL and finds all of the keys in map1 that are not in map2 -func compareHCLFields(map1, map2, ignoredFields map[string]struct{}) []string { - var missingKeys []string - for key := range map1 { - if isIgnored(key, ignoredFields) { - continue - } - - if _, ok := map2[key]; !ok { - missingKeys = append(missingKeys, key) - } - } - sort.Strings(missingKeys) - return missingKeys -} - -// Returns true if the given key should be ignored according to the given set of ignored fields. -func isIgnored(key string, ignoredFields map[string]struct{}) bool { - // Check for exact match first. - if _, ignored := ignoredFields[key]; ignored { - return true - } - - // Check for partial matches. - parts := strings.Split(key, ".") - if len(parts) < 2 { - return false - } - var nonIntegerParts []string - for _, part := range parts { - if _, err := strconv.Atoi(part); err != nil { - nonIntegerParts = append(nonIntegerParts, part) - } - } - var partialKey string - for _, part := range nonIntegerParts { - if partialKey == "" { - partialKey = part - } else { - partialKey += "." + part - } - if _, ignored := ignoredFields[partialKey]; ignored { - return true - } - } - return false -} - -// Converts a tfplan to CAI asset, and then converts the CAI asset into HCL -func getRoundtripConfig(t *testing.T, testName string, tfDir string, ancestryCache map[string]string, defaultProject string, logger *zap.Logger, ignoredAssetFields []string) ([]caiasset.Asset, []byte, error) { - fileName := fmt.Sprintf("%s_export", testName) - - // Run terraform init and terraform apply to generate tfplan.json files - terraformWorkflow(t, tfDir, fileName) - - planFile := fmt.Sprintf("%s.tfplan.json", fileName) - planfilePath := filepath.Join(tfDir, planFile) - jsonPlan, err := os.ReadFile(planfilePath) - if err != nil { - return nil, nil, err - } - - ctx := context.Background() - roundtripAssets, err := tfplan2cai.Convert(ctx, jsonPlan, &tfplan2cai.Options{ - ErrorLogger: logger, - Offline: true, - DefaultProject: defaultProject, - DefaultRegion: "", - DefaultZone: "", - UserAgent: "", - AncestryCache: ancestryCache, - }) - - if err != nil { - return nil, nil, err - } - - deleteFieldsFromAssets(roundtripAssets, ignoredAssetFields) - - if os.Getenv("WRITE_FILES") != "" { - roundtripAssetFile := fmt.Sprintf("%s_roundtrip.json", t.Name()) - writeJSONFile(roundtripAssetFile, roundtripAssets) - } - - roundtripConfig, err := cai2hcl.Convert(roundtripAssets, &cai2hcl.Options{ - ErrorLogger: logger, - }) - if err != nil { - return nil, nil, err - } - - return roundtripAssets, roundtripConfig, nil -} - -// Example: -// -// data := map[string]interface{}{ -// "database": map[string]interface{}{ -// "host": "localhost", -// "user": "admin", -// }, -// } -// -// Path of "host" in "data" is ["database", "host"] -type Field struct { - Path []string -} - -// Deletes fields from the resource data of CAI assets -func deleteFieldsFromAssets(assets []caiasset.Asset, ignoredResourceDataFields []string) []caiasset.Asset { - // The key is the content type, such as "resource" - ignoredFieldsMap := make(map[string][]Field, 0) - for _, ignoredField := range ignoredResourceDataFields { - parts := strings.Split(ignoredField, ".") - if len(parts) <= 1 { - continue - } - if parts[0] == "RESOURCE" { - if _, ok := ignoredFieldsMap["RESOURCE"]; !ok { - ignoredFieldsMap["RESOURCE"] = make([]Field, 0) - } - f := Field{Path: parts[1:]} - ignoredFieldsMap["RESOURCE"] = append(ignoredFieldsMap["RESOURCE"], f) - } - } - - for _, asset := range assets { - if asset.Resource != nil && asset.Resource.Data != nil { - data := asset.Resource.Data - for _, ignoredField := range ignoredFieldsMap["RESOURCE"] { - path := ignoredField.Path - deleteMapFieldByPath(data, path) - } - } - } - return assets -} - -// Deletes a field from a map by its path. -// Example: -// -// data := map[string]interface{}{ -// "database": map[string]interface{}{ -// "host": "localhost", -// "user": "admin", -// }, -// } -// -// path := ["database", "host"] -func deleteMapFieldByPath(data map[string]interface{}, path []string) { - i := 0 - for i < len(path)-1 { - k := path[i] - if v, ok := data[k]; ok { - if data, ok = v.(map[string]interface{}); ok && data != nil { - i++ - } else { - break - } - } else { - break - } - } - if i == len(path)-1 { - delete(data, path[i]) - } -} - -// Compares the asset name in export asset and roundtrip asset and ignores "null" in the name -// Example: //cloudresourcemanager.googleapis.com/projects/123456 -func compareAssetName(want, got string) error { - parts1 := strings.Split(want, "/") - parts2 := strings.Split(got, "/") - if len(parts1) != len(parts2) { - return fmt.Errorf("differences found between two asset names: want %s, got %s", want, got) - } - - for i, part := range parts1 { - if parts2[i] == "null" { - continue - } - - if part != parts2[i] { - return fmt.Errorf("differences found between two asset names: want %s, got %s", want, got) - } - } - return nil -} diff --git a/mmv1/third_party/tgc_next/test/hcl.go b/mmv1/third_party/tgc_next/test/hcl.go deleted file mode 100644 index 8748e33ce1fd..000000000000 --- a/mmv1/third_party/tgc_next/test/hcl.go +++ /dev/null @@ -1,164 +0,0 @@ -package test - -import ( - "fmt" - "log" - "sort" - "strings" - - "github.com/hashicorp/hcl/v2" - "github.com/hashicorp/hcl/v2/hclparse" - "github.com/hashicorp/hcl/v2/hclsyntax" -) - -func parseHCLBytes(src []byte, filePath string) (map[string]map[string]struct{}, error) { - parser := hclparse.NewParser() - hclFile, diags := parser.ParseHCL(src, filePath) - if diags.HasErrors() { - return nil, fmt.Errorf("parse HCL: %w", diags) - } - - if hclFile == nil { - return nil, fmt.Errorf("parsed HCL file %s is nil cannot proceed", filePath) - } - - parsed := make(map[string]map[string]struct{}) - - for _, block := range hclFile.Body.(*hclsyntax.Body).Blocks { - if block.Type == "resource" { - if len(block.Labels) != 2 { - log.Printf("Skipping address block with unexpected number of labels: %v", block.Labels) - continue - } - - resType := block.Labels[0] - resName := block.Labels[1] - addr := fmt.Sprintf("%s.%s", resType, resName) - attrs, procDiags := parseHCLBody(block.Body) - - if procDiags.HasErrors() { - log.Printf("Diagnostics while processing address %s.%s body in %s:", resType, resName, filePath) - for _, diag := range procDiags { - log.Printf(" - %s (Severity)", diag.Error()) - } - } - - flattenedAttrs := make(map[string]struct{}) - flatten(attrs, "", flattenedAttrs) - parsed[addr] = flattenedAttrs - } - } - return parsed, nil -} - -// parseHCLBody recursively parses attributes and nested blocks from an HCL body. -func parseHCLBody(body hcl.Body) ( - attributes map[string]any, - diags hcl.Diagnostics, -) { - attributes = make(map[string]any) - var allDiags hcl.Diagnostics - - if syntaxBody, ok := body.(*hclsyntax.Body); ok { - for _, attr := range syntaxBody.Attributes { - insert(struct{}{}, attr.Name, attributes) - } - - for _, block := range syntaxBody.Blocks { - nestedAttr, diags := parseHCLBody(block.Body) - if diags.HasErrors() { - allDiags = append(allDiags, diags...) - } - - insert(nestedAttr, block.Type, attributes) - } - } else { - allDiags = append(allDiags, &hcl.Diagnostic{ - Severity: hcl.DiagWarning, - Summary: "Body type assertion to *hclsyntax.Body failed", - Detail: fmt.Sprintf("Cannot directly parse attributes for body of type %T. Attribute parsing may be incomplete.", body), - }) - } - - return attributes, allDiags -} - -func insert(data any, key string, parent map[string]any) { - if existing, ok := parent[key]; ok { - if existingSlice, ok := existing.([]any); ok { - parent[key] = append(existingSlice, data) - } else { - // Until we see a second instance of a repeated block or attribute, it will look non-repeated. - parent[key] = []any{existing, data} - } - } else { - parent[key] = data - } -} - -func flatten(data any, prefix string, result map[string]struct{}) { - switch v := data.(type) { - case map[string]any: - for key, value := range v { - newPrefix := key - if prefix != "" { - newPrefix = prefix + "." + key - } - flatten(value, newPrefix, result) - } - case []any: - flattenSlice(prefix, v, result) - default: - if prefix != "" { - result[prefix] = struct{}{} - } - } -} - -func flattenSlice(prefix string, v []any, result map[string]struct{}) { - if len(v) == 0 && prefix != "" { - result[prefix] = struct{}{} - return - } - - type sortableElement struct { - flatKeys string - flattened map[string]struct{} - } - - sortable := make([]sortableElement, len(v)) - for i, value := range v { - flattened := make(map[string]struct{}) - flatten(value, "", flattened) - keys := make([]string, 0, len(flattened)) - for k := range flattened { - keys = append(keys, k) - } - sort.Strings(keys) - sortable[i] = sortableElement{ - flatKeys: strings.Join(keys, ";"), - flattened: flattened, - } - } - - sort.Slice(sortable, func(i, j int) bool { - return sortable[i].flatKeys < sortable[j].flatKeys - }) - - for i, element := range sortable { - newPrefix := fmt.Sprintf("%s.%d", prefix, i) - if len(element.flattened) == 0 { - if newPrefix != "" { - result[newPrefix] = struct{}{} - } - } else { - for k := range element.flattened { - newKey := newPrefix - if k != "" { - newKey = newPrefix + "." + k - } - result[newKey] = struct{}{} - } - } - } -} diff --git a/mmv1/third_party/tgc_next/test/hcl_test.go b/mmv1/third_party/tgc_next/test/hcl_test.go deleted file mode 100644 index b6d4b7f2fb18..000000000000 --- a/mmv1/third_party/tgc_next/test/hcl_test.go +++ /dev/null @@ -1,180 +0,0 @@ -package test - -import ( - "testing" - - "github.com/google/go-cmp/cmp" -) - -var ( - basicHCL = ` -resource "google_project_service" "project" { - service = "iam.googleapis.com" -} -` - nestedBlocksHCL = ` -resource "google_storage_bucket" "bucket" { - name = "my-bucket" - location = "US" - force_destroy = true - - lifecycle_rule { - action { - type = "Delete" - } - condition { - age = 30 - } - } -} -` - multipleResourcesHCL = ` -resource "google_project_service" "project" { - service = "iam.googleapis.com" -} - -resource "google_storage_bucket" "bucket" { - name = "my-bucket" -} -` - listOfNestedObjectsHCL = ` -resource "google_compute_firewall" "default" { - name = "test-firewall" - network = google_compute_network.default.name - - allow { - protocol = "icmp" - } - - allow { - protocol = "tcp" - ports = ["80", "8080", "1000-2000"] - } - - source_tags = ["web"] -} -` - listOfMultiLevelNestedObjectsHCL = ` -resource "google_compute_firewall" "default" { - name = "test-firewall" - network = google_compute_network.default.name - - allow { - protocol = "tcp" - ports = ["80", "8080", "1000-2000"] - } - - allow { - protocol = "icmp" - a_second_level { - b = true - } - a_second_level { - a = false - } - } - - source_tags = ["web"] -} -` -) - -func TestParseHCLBytes(t *testing.T) { - t.Parallel() - cases := []struct { - name string - hcl string - exp map[string]map[string]struct{} - expectErr bool - }{ - { - name: "basic", - hcl: basicHCL, - exp: map[string]map[string]struct{}{ - "google_project_service.project": { - "service": {}, - }, - }, - }, - { - name: "nested blocks", - hcl: nestedBlocksHCL, - exp: map[string]map[string]struct{}{ - "google_storage_bucket.bucket": { - "name": {}, - "location": {}, - "force_destroy": {}, - "lifecycle_rule.action.type": {}, - "lifecycle_rule.condition.age": {}, - }, - }, - }, - { - name: "multiple resources", - hcl: multipleResourcesHCL, - exp: map[string]map[string]struct{}{ - "google_project_service.project": { - "service": {}, - }, - "google_storage_bucket.bucket": { - "name": {}, - }, - }, - }, - { - name: "resource with a list of nested objects", - hcl: listOfNestedObjectsHCL, - exp: map[string]map[string]struct{}{ - "google_compute_firewall.default": { - "allow.0.ports": {}, // "ports" appears in first element due to sorting - "allow.0.protocol": {}, - "allow.1.protocol": {}, - "name": {}, - "network": {}, - "source_tags": {}, - }, - }, - }, - { - name: "resource with a list of multi-level nested objects", - hcl: listOfMultiLevelNestedObjectsHCL, - exp: map[string]map[string]struct{}{ - "google_compute_firewall.default": { - "allow.0.a_second_level.0.a": {}, - "allow.0.a_second_level.1.b": {}, - "allow.0.protocol": {}, - "allow.1.ports": {}, - "allow.1.protocol": {}, - "name": {}, - "network": {}, - "source_tags": {}, - }, - }, - }, - { - name: "invalid hcl", - hcl: `resource "google_project_service" "project" {`, - expectErr: true, - }, - } - - for _, tc := range cases { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - got, err := parseHCLBytes([]byte(tc.hcl), "test.hcl") - if tc.expectErr { - if err == nil { - t.Fatal("expected error, got nil") - } - return - } - if err != nil { - t.Fatalf("unexpected error: %v", err) - } - if diff := cmp.Diff(tc.exp, got); diff != "" { - t.Errorf("unexpected diff (-want +got): %s", diff) - } - }) - } -} diff --git a/mmv1/third_party/tgc_next/test/setup.go b/mmv1/third_party/tgc_next/test/setup.go deleted file mode 100644 index 50c2ecc5a9ed..000000000000 --- a/mmv1/third_party/tgc_next/test/setup.go +++ /dev/null @@ -1,249 +0,0 @@ -package test - -import ( - "context" - "encoding/json" - "fmt" - "io" - "log" - "os" - "strings" - "time" - - "cloud.google.com/go/storage" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/pkg/caiasset" -) - -type ResourceMetadata struct { - CaiAssetNames []string `json:"cai_asset_names"` - ResourceType string `json:"resource_type"` - ResourceAddress string `json:"resource_address"` - Service string `json:"service"` - Cai map[string]*CaiData `json:"cai_data,omitempty"` // Holds the fetched CAI assets data -} - -// CaiData holds the fetched CAI asset and related error information. -type CaiData struct { - CaiAsset caiasset.Asset `json:"cai_asset,omitempty"` -} - -type NightlyRun struct { - MetadataByTest map[string]TgcMetadataPayload - Date time.Time -} - -type TgcMetadataPayload struct { - TestName string `json:"test_name"` - RawConfig string `json:"raw_config"` - ResourceMetadata map[string]*ResourceMetadata `json:"resource_metadata"` - PrimaryResource string `json:"primary_resource"` -} - -type ResourceTestData struct { - ParsedRawConfig map[string]struct{} `json:"parsed_raw_config"` - ResourceMetadata `json:"resource_metadata"` -} - -type Resource struct { - Type string `json:"type"` - Name string `json:"name"` - Attributes map[string]struct{} `json:"attributes"` -} - -const ( - ymdFormat = "2006-01-02" - maxAttempts = 3 -) - -var ( - TestsMetadata = make([]NightlyRun, maxAttempts) - setupDone = false -) - -func ReadTestsDataFromGcs() ([]NightlyRun, error) { - if !setupDone { - bucketName := "cai_assets_metadata" - currentDate := time.Now() - ctx := context.Background() - client, err := storage.NewClient(ctx) - if err != nil { - return nil, fmt.Errorf("storage.NewClient: %v", err) - } - defer client.Close() - - bucket := client.Bucket(bucketName) - - var allErrs error - retries := 0 - for i := 0; i < len(TestsMetadata); i++ { - metadata, err := readTestsDataFromGCSForRun(ctx, currentDate, bucketName, bucket) - if err != nil { - if allErrs == nil { - allErrs = fmt.Errorf("reading tests data from gcs: %v", err) - } else { - allErrs = fmt.Errorf("%v, %v", allErrs, err) - } - } - if metadata == nil { - // Keep looking until we find a date with metadata. - i-- - retries++ - if retries > maxAttempts { - // Stop looking when we find maxAttempts dates with no metadata. - return nil, fmt.Errorf("too many retries, %v", allErrs) - } - } else { - TestsMetadata[i] = NightlyRun{ - MetadataByTest: metadata, - Date: currentDate, - } - } - currentDate = currentDate.AddDate(0, 0, -1) - } - - if allErrs != nil { - return nil, allErrs - } - - if os.Getenv("WRITE_FILES") != "" { - writeJSONFile("../../tests_metadata.json", TestsMetadata) - } - setupDone = true - } - return TestsMetadata, nil -} - -func readTestsDataFromGCSForRun(ctx context.Context, currentDate time.Time, bucketName string, bucket *storage.BucketHandle) (map[string]TgcMetadataPayload, error) { - metadata := make(map[string]TgcMetadataPayload) - objectName := fmt.Sprintf("nightly_tests/%s/nightly_tests_meta.json", currentDate.Format(ymdFormat)) - log.Printf("Read object %s from the bucket %s", objectName, bucketName) - - rc, err := bucket.Object(objectName).NewReader(ctx) - if err != nil { - if err == storage.ErrObjectNotExist { - log.Printf("Object '%s' in bucket '%s' does NOT exist.\n", objectName, bucketName) - return nil, nil - } else { - return nil, fmt.Errorf("Object(%q).NewReader: %v", objectName, err) - } - } - defer rc.Close() - - data, err := io.ReadAll(rc) - if err != nil { - return nil, fmt.Errorf("io.ReadAll: %v", err) - } - - err = json.Unmarshal(data, &metadata) - if err != nil { - return nil, fmt.Errorf("json.Unmarshal: %v", err) - } - - return metadata, nil -} - -func prepareTestData(testName string, retries int) (map[string]ResourceTestData, string, error) { - var err error - cacheMutex.Lock() - defer cacheMutex.Unlock() - TestsMetadata, err = ReadTestsDataFromGcs() - if err != nil { - return nil, "", err - } - - var testMetadata TgcMetadataPayload - var resourceMetadata map[string]*ResourceMetadata - - run := TestsMetadata[retries] - testMetadata, ok := run.MetadataByTest[testName] - if !ok { - log.Printf("Data of test is unavailable: %s", testName) - return nil, "", nil - } - resourceMetadata = testMetadata.ResourceMetadata - if len(resourceMetadata) == 0 { - log.Printf("Data of resource is unavailable: %s", testName) - return nil, "", nil - } - - log.Printf("Found metadata for %s from run on %s", testName, run.Date.Format(ymdFormat)) - - rawTfFile := fmt.Sprintf("%s.tf", testName) - err = os.WriteFile(rawTfFile, []byte(testMetadata.RawConfig), 0644) - if err != nil { - return nil, "", fmt.Errorf("error writing to file %s: %#v", rawTfFile, err) - } - if os.Getenv("WRITE_FILES") == "" { - defer os.Remove(rawTfFile) - } - - rawResourceConfigs, err := parseResourceConfigs(rawTfFile) - if err != nil { - return nil, "", fmt.Errorf("error parsing resource configs: %#v", err) - } - - if len(rawResourceConfigs) == 0 { - return nil, "", fmt.Errorf("test %s fails: raw config is unavailable", testName) - } - - rawConfigMap := convertToConfigMap(rawResourceConfigs) - - resourceTestData := make(map[string]ResourceTestData, 0) - for address, metadata := range resourceMetadata { - resourceTestData[address] = ResourceTestData{ - ParsedRawConfig: rawConfigMap[address], - ResourceMetadata: *metadata, - } - } - - return resourceTestData, testMetadata.PrimaryResource, nil -} - -// Parses a Terraform configuation file written with HCL -func parseResourceConfigs(filePath string) ([]Resource, error) { - src, err := os.ReadFile(filePath) - if err != nil { - return nil, fmt.Errorf("failed to read file %s: %s", filePath, err) - } - - topLevel, err := parseHCLBytes(src, filePath) - if err != nil { - return nil, fmt.Errorf("failed to parse hcl bytes: %s", err) - } - - var allParsedResources []Resource - for addr, attrs := range topLevel { - addrParts := strings.Split(addr, ".") - if len(addrParts) != 2 { - return nil, fmt.Errorf("invalid resource address %s", addr) - } - allParsedResources = append(allParsedResources, Resource{ - Type: addrParts[0], - Name: addrParts[1], - Attributes: attrs, - }) - } - return allParsedResources, nil -} - -// Converts the slice to map with resource address as the key -func convertToConfigMap(resources []Resource) map[string]map[string]struct{} { - configMap := make(map[string]map[string]struct{}, 0) - - for _, r := range resources { - addr := fmt.Sprintf("%s.%s", r.Type, r.Name) - configMap[addr] = r.Attributes - } - - return configMap -} - -// Converts the slice of assets to map with the asset name as the key -func convertToAssetMap(assets []caiasset.Asset) map[string]caiasset.Asset { - assetMap := make(map[string]caiasset.Asset) - - for _, asset := range assets { - assetMap[asset.Type] = asset - } - return assetMap -} diff --git a/mmv1/third_party/tgc_next/test/utils.go b/mmv1/third_party/tgc_next/test/utils.go deleted file mode 100644 index d11248f0ae7c..000000000000 --- a/mmv1/third_party/tgc_next/test/utils.go +++ /dev/null @@ -1,104 +0,0 @@ -package test - -import ( - "bytes" - "encoding/json" - "fmt" - "os" - "os/exec" - "path/filepath" - "strings" - "testing" -) - -// Writes the data into a JSON file -func writeJSONFile(filename string, data interface{}) error { - jsonData, err := json.MarshalIndent(data, "", " ") - if err != nil { - return fmt.Errorf("Error marshaling data for %s: %v\n", filename, err) - } - - err = os.WriteFile(filename, jsonData, 0644) - if err != nil { - return fmt.Errorf("Error writing to file %s: %v\n", filename, err) - } - return nil -} - -const ( - defaultOrganization = "529579013760" - defaultProject = "ci-test-project-nightly-beta" -) - -func terraformWorkflow(t *testing.T, dir, name string) { - terraformInit(t, "terraform", dir) - terraformPlan(t, "terraform", dir, name+".tfplan") - payload := terraformShow(t, "terraform", dir, name+".tfplan") - saveFile(t, dir, name+".tfplan.json", payload) -} - -func terraformInit(t *testing.T, executable, dir string) { - terraformExec(t, executable, dir, "init", "-input=false") -} - -func terraformPlan(t *testing.T, executable, dir, tfplan string) { - terraformExec(t, executable, dir, "plan", "-input=false", "-refresh=false", "-out", tfplan) -} - -func terraformShow(t *testing.T, executable, dir, tfplan string) []byte { - return terraformExec(t, executable, dir, "show", "--json", tfplan) -} - -func terraformExec(t *testing.T, executable, dir string, args ...string) []byte { - cmd := exec.Command(executable, args...) - cmd.Env = []string{ - "HOME=" + filepath.Join(dir, "fakehome"), - "GOOGLE_PROJECT=" + defaultProject, - "GOOGLE_FOLDER=" + "", - "GOOGLE_ORG=" + defaultOrganization, - "GOOGLE_OAUTH_ACCESS_TOKEN=fake-token", // GOOGLE_OAUTH_ACCESS_TOKEN is required so terraform plan does not require the google authentication cert - } - if os.Getenv("TF_CLI_CONFIG_FILE") != "" { - cmd.Env = append(cmd.Env, "TF_CLI_CONFIG_FILE="+os.Getenv("TF_CLI_CONFIG_FILE")) - } - cmd.Dir = dir - wantError := false - payload, _ := run(t, cmd, wantError) - return payload -} - -func saveFile(t *testing.T, dir, filename string, payload []byte) { - fullpath := filepath.Join(dir, filename) - f, err := os.Create(fullpath) - if err != nil { - t.Fatalf("error while creating file %s, error %v", fullpath, err) - } - _, err = f.Write(payload) - if err != nil { - t.Fatalf("error while writing to file %s, error %v", fullpath, err) - } -} - -// run a command and call t.Fatal on non-zero exit. -func run(t *testing.T, cmd *exec.Cmd, wantError bool) ([]byte, []byte) { - var stderr, stdout bytes.Buffer - cmd.Stderr, cmd.Stdout = &stderr, &stdout - err := cmd.Run() - if gotError := (err != nil); gotError != wantError { - t.Fatalf("running %s: \nerror=%v \nstderr=%s \nstdout=%s", cmd.String(), err, stderr.String(), stdout.String()) - } - // Print env, stdout and stderr if verbose flag is used. - if len(cmd.Env) != 0 { - t.Logf("=== Environment Variable of %s ===", cmd.String()) - t.Log(strings.Join(cmd.Env, "\n")) - } - if stdout.String() != "" { - t.Logf("=== STDOUT of %s ===", cmd.String()) - t.Log(stdout.String()) - } - if stderr.String() != "" { - t.Logf("=== STDERR of %s ===", cmd.String()) - t.Log(stderr.String()) - } - return stdout.Bytes(), stderr.Bytes() -} diff --git a/mmv1/validate_third_party_test.go b/mmv1/validate_third_party_test.go deleted file mode 100644 index 2c94ae01c32c..000000000000 --- a/mmv1/validate_third_party_test.go +++ /dev/null @@ -1,86 +0,0 @@ -package main - -import ( - "os" - "path/filepath" - "regexp" - "runtime" - "testing" -) - -func TestTemplatesStillNeedToBeTemplates(t *testing.T) { - // Get the directory where this test file is located - _, testFilePath, _, ok := runtime.Caller(0) - if !ok { - t.Fatal("Failed to get current test file path") - } - testDir := filepath.Dir(testFilePath) - - // Define the third_party directory relative to the test file - thirdPartyDir := filepath.Join(testDir, "third_party", "terraform") - - // Regular expression to match Go template syntax - templateSyntaxRegex := regexp.MustCompile(`\{\{.*?\}\}`) - - // Track files that no longer need to be templates - unnecessaryTemplates := []string{} - - // Walk through the third_party directory - err := filepath.Walk(thirdPartyDir, func(path string, info os.FileInfo, err error) error { - if err != nil { - // Handle case where third_party directory doesn't exist - if os.IsNotExist(err) && path == thirdPartyDir { - t.Logf("Warning: third_party directory not found at %s", thirdPartyDir) - return nil - } - return err - } - - // Skip directories - if info.IsDir() { - return nil - } - - // Only check .tmpl files - if filepath.Ext(path) != ".tmpl" { - return nil - } - - // Read file content - content, err := os.ReadFile(path) - if err != nil { - t.Logf("Error reading file %s: %v", path, err) - return nil - } - - // Check if file contains any Go template syntax - hasTemplateSyntax := templateSyntaxRegex.Match(content) - - // If no template syntax found, add to the list - if !hasTemplateSyntax { - // Get relative path for cleaner output - relPath, _ := filepath.Rel(testDir, path) - unnecessaryTemplates = append(unnecessaryTemplates, relPath) - } - - return nil - }) - - if err != nil { - t.Fatalf("Error walking directory: %v", err) - } - - // Output results at the end - if len(unnecessaryTemplates) > 0 { - t.Errorf("\nThe following %d .tmpl files in third_party directory don't contain any template syntax "+ - "and no longer need to be templates:\n", len(unnecessaryTemplates)) - - for _, file := range unnecessaryTemplates { - t.Errorf(" - %s", file) - } - - t.Errorf("\nConsider removing the .tmpl extension from these files.") - } else { - t.Logf("All .tmpl files in third_party directory properly contain template syntax.") - } -} diff --git a/tools/diff-processor/breaking_changes/breaking_changes.go b/tools/diff-processor/breaking_changes/breaking_changes.go index 1131dd4ffb8b..633a3fd4e652 100644 --- a/tools/diff-processor/breaking_changes/breaking_changes.go +++ b/tools/diff-processor/breaking_changes/breaking_changes.go @@ -45,8 +45,7 @@ func ComputeBreakingChanges(schemaDiff diff.SchemaDiff) []BreakingChange { for field, fieldDiff := range resourceDiff.Fields { for _, rule := range FieldDiffRules { - rd := schemaDiff[resource] - for _, message := range rule.Messages(resource, field, fieldDiff, rd) { + for _, message := range rule.Messages(resource, field, fieldDiff) { breakingChanges = append(breakingChanges, NewBreakingChange(message, rule.Identifier)) } } diff --git a/tools/diff-processor/breaking_changes/field_diff.go b/tools/diff-processor/breaking_changes/field_diff.go index 68857a77b886..0f4536558809 100644 --- a/tools/diff-processor/breaking_changes/field_diff.go +++ b/tools/diff-processor/breaking_changes/field_diff.go @@ -13,15 +13,13 @@ import ( // regarding field attribute changes type FieldDiffRule struct { Identifier string - Messages func(resource, field string, fieldDiff diff.FieldDiff, resourceDiff diff.ResourceDiffInterface) []string + Messages func(resource, field string, fieldDiff diff.FieldDiff) []string } // FieldDiffRules is a list of FieldDiffRule // guarding against provider breaking changes var FieldDiffRules = []FieldDiffRule{ FieldChangingType, - FieldNewRequired, - FieldNewOptionalFieldWithDefault, FieldBecomingRequired, FieldBecomingComputedOnly, FieldOptionalComputedToOptional, @@ -36,7 +34,7 @@ var FieldChangingType = FieldDiffRule{ Messages: FieldChangingTypeMessages, } -func FieldChangingTypeMessages(resource, field string, fieldDiff diff.FieldDiff, _ diff.ResourceDiffInterface) []string { +func FieldChangingTypeMessages(resource, field string, fieldDiff diff.FieldDiff) []string { // Type change doesn't matter for added / removed fields if fieldDiff.Old == nil || fieldDiff.New == nil { return nil @@ -64,7 +62,7 @@ var FieldBecomingRequired = FieldDiffRule{ Messages: FieldBecomingRequiredMessages, } -func FieldBecomingRequiredMessages(resource, field string, fieldDiff diff.FieldDiff, _ diff.ResourceDiffInterface) []string { +func FieldBecomingRequiredMessages(resource, field string, fieldDiff diff.FieldDiff) []string { // Ignore for added / removed fields if fieldDiff.Old == nil || fieldDiff.New == nil { return nil @@ -82,7 +80,7 @@ var FieldBecomingComputedOnly = FieldDiffRule{ Messages: FieldBecomingComputedOnlyMessages, } -func FieldBecomingComputedOnlyMessages(resource, field string, fieldDiff diff.FieldDiff, _ diff.ResourceDiffInterface) []string { +func FieldBecomingComputedOnlyMessages(resource, field string, fieldDiff diff.FieldDiff) []string { // ignore for added / removed fields if fieldDiff.Old == nil || fieldDiff.New == nil { return nil @@ -105,7 +103,7 @@ var FieldOptionalComputedToOptional = FieldDiffRule{ Messages: FieldOptionalComputedToOptionalMessages, } -func FieldOptionalComputedToOptionalMessages(resource, field string, fieldDiff diff.FieldDiff, _ diff.ResourceDiffInterface) []string { +func FieldOptionalComputedToOptionalMessages(resource, field string, fieldDiff diff.FieldDiff) []string { // ignore for added / removed fields if fieldDiff.Old == nil || fieldDiff.New == nil { return nil @@ -122,7 +120,7 @@ var FieldDefaultModification = FieldDiffRule{ Messages: FieldDefaultModificationMessages, } -func FieldDefaultModificationMessages(resource, field string, fieldDiff diff.FieldDiff, _ diff.ResourceDiffInterface) []string { +func FieldDefaultModificationMessages(resource, field string, fieldDiff diff.FieldDiff) []string { // ignore for added / removed fields if fieldDiff.Old == nil || fieldDiff.New == nil { return nil @@ -157,7 +155,7 @@ var FieldGrowingMin = FieldDiffRule{ Messages: FieldGrowingMinMessages, } -func FieldGrowingMinMessages(resource, field string, fieldDiff diff.FieldDiff, _ diff.ResourceDiffInterface) []string { +func FieldGrowingMinMessages(resource, field string, fieldDiff diff.FieldDiff) []string { // ignore for added / removed fields if fieldDiff.Old == nil || fieldDiff.New == nil { return nil @@ -179,7 +177,7 @@ var FieldShrinkingMax = FieldDiffRule{ Messages: FieldShrinkingMaxMessages, } -func FieldShrinkingMaxMessages(resource, field string, fieldDiff diff.FieldDiff, _ diff.ResourceDiffInterface) []string { +func FieldShrinkingMaxMessages(resource, field string, fieldDiff diff.FieldDiff) []string { // ignore for added / removed fields if fieldDiff.Old == nil || fieldDiff.New == nil { return nil @@ -204,7 +202,7 @@ var FieldRemovingDiffSuppress = FieldDiffRule{ Messages: FieldRemovingDiffSuppressMessages, } -func FieldRemovingDiffSuppressMessages(resource, field string, fieldDiff diff.FieldDiff, _ diff.ResourceDiffInterface) []string { +func FieldRemovingDiffSuppressMessages(resource, field string, fieldDiff diff.FieldDiff) []string { // ignore for added / removed fields if fieldDiff.Old == nil || fieldDiff.New == nil { return nil @@ -216,44 +214,3 @@ func FieldRemovingDiffSuppressMessages(resource, field string, fieldDiff diff.Fi } return nil } - -var FieldNewRequired = FieldDiffRule{ - Identifier: "no-new-required", - Messages: FieldNewRequiredMessages, -} - -func FieldNewRequiredMessages(resource, field string, fieldDiff diff.FieldDiff, resourceDiff diff.ResourceDiffInterface) []string { - if resourceDiff.IsNewResource() || resourceDiff.IsFieldInNewNestedStructure(field) { - return nil - } - - // This rule applies to newly added fields (Old == nil). - if fieldDiff.Old == nil { - if fieldDiff.New.Required { - tmpl := "Field `%s` added as required on pre-existing resource `%s`" - return []string{fmt.Sprintf(tmpl, field, resource)} - } - } - return nil -} - -var FieldNewOptionalFieldWithDefault = FieldDiffRule{ - Identifier: "no-new-optional-default", - Messages: FieldNewOptionalFieldWithDefaultMessages, -} - -func FieldNewOptionalFieldWithDefaultMessages(resource, field string, fieldDiff diff.FieldDiff, resourceDiff diff.ResourceDiffInterface) []string { - if resourceDiff.IsNewResource() || resourceDiff.IsFieldInNewNestedStructure(field) { - return nil - } - - // This rule applies to newly added fields (Old == nil). - if fieldDiff.Old == nil { - if fieldDiff.New.Optional && fieldDiff.New.Default != nil && fieldDiff.New.ForceNew { - tmpl := "Field `%s` added as optional with a default value and force new on pre-existing resource `%s`. " + - "This can be allowed if there is a confirmed API-level default that matches the schema default" - return []string{fmt.Sprintf(tmpl, field, resource)} - } - } - return nil -} diff --git a/tools/diff-processor/breaking_changes/field_diff_test.go b/tools/diff-processor/breaking_changes/field_diff_test.go index 1e7c7b092ac1..3e355aa9da91 100644 --- a/tools/diff-processor/breaking_changes/field_diff_test.go +++ b/tools/diff-processor/breaking_changes/field_diff_test.go @@ -12,7 +12,6 @@ type fieldTestCase struct { name string oldField *schema.Schema newField *schema.Schema - resourceDiff diff.ResourceDiffInterface expectedViolation bool messageRegex string // Optional regex to validate the message content } @@ -95,133 +94,8 @@ var FieldBecomingRequiredTestCases = []fieldTestCase{ }, } -func TestFieldNewRequired(t *testing.T) { - for _, tc := range FieldNewRequiredTestCases { - tc.check(FieldNewRequired, t) - } -} - -var FieldNewRequiredTestCases = []fieldTestCase{ - { - name: "existing resource - field added as required", - oldField: nil, - newField: &schema.Schema{ - Description: "beep", - Required: true, - }, - resourceDiff: existingResourceSchemaDiff, - expectedViolation: true, - }, - { - name: "new resource - field added as required but is new resource", - oldField: nil, - newField: &schema.Schema{ - Description: "beep", - Required: true, - }, - resourceDiff: newResourceSchemaDiff, - expectedViolation: false, - }, - { - name: "field in new nested structure - field added as required", - oldField: nil, - newField: &schema.Schema{ - Description: "beep", - Required: true, - }, - resourceDiff: fieldInNewStructureSchemaDiff, - expectedViolation: false, - }, -} - -func TestFieldNewOptionalWithDefault(t *testing.T) { - for _, tc := range FieldNewOptionalWithDefaultTestCases { - tc.check(FieldNewOptionalFieldWithDefault, t) - } -} - -var FieldNewOptionalWithDefaultTestCases = []fieldTestCase{ - { - name: "existing resource - new field added as optional with default and forcenew", - oldField: nil, - newField: &schema.Schema{ - Description: "beep", - Optional: true, - Default: "abc", - ForceNew: true, - }, - resourceDiff: existingResourceSchemaDiff, - expectedViolation: true, - }, - { - name: "existing resource - new field added as optional with falsey default and forcenew", - oldField: nil, - newField: &schema.Schema{ - Description: "beep", - Optional: true, - Default: false, - ForceNew: true, - }, - resourceDiff: existingResourceSchemaDiff, - expectedViolation: true, - }, - { - name: "existing resource - new field added as optional with default", - oldField: nil, - newField: &schema.Schema{ - Description: "beep", - Optional: true, - Default: "abc", - }, - resourceDiff: existingResourceSchemaDiff, - expectedViolation: false, - }, - { - name: "existing resource - new field added as optional with falsey default", - oldField: nil, - newField: &schema.Schema{ - Description: "beep", - Optional: true, - Default: false, - }, - resourceDiff: existingResourceSchemaDiff, - expectedViolation: false, - }, - { - name: "new resource - new field added as optional with default", - oldField: nil, - newField: &schema.Schema{ - Description: "beep", - Optional: true, - Default: "abc", - }, - resourceDiff: newResourceSchemaDiff, - expectedViolation: false, - }, - { - name: "new resource - new field added as optional with falsey default", - oldField: nil, - newField: &schema.Schema{ - Description: "beep", - Optional: true, - Default: false, - }, - resourceDiff: newResourceSchemaDiff, - expectedViolation: false, - }, - { - name: "field in new nested structure - new field added as optional with default and forcenew", - oldField: nil, - newField: &schema.Schema{ - Description: "beep", - Optional: true, - Default: "abc", - ForceNew: true, - }, - resourceDiff: fieldInNewStructureSchemaDiff, - expectedViolation: false, - }, -} +// !! min max ? +// isRuleBreak: FieldOptionalComputedToOptional_func, func TestFieldChangingType(t *testing.T) { for _, tc := range FieldChangingTypeTestCases { @@ -720,7 +594,7 @@ var FieldShrinkingMaxTestCases = []fieldTestCase{ // Extended check method that also validates message content when expected func (tc *fieldTestCase) check(rule FieldDiffRule, t *testing.T) { - messages := rule.Messages("resource", "field", diff.FieldDiff{Old: tc.oldField, New: tc.newField}, tc.resourceDiff) + messages := rule.Messages("resource", "field", diff.FieldDiff{Old: tc.oldField, New: tc.newField}) violation := len(messages) > 0 // Check violation expectation diff --git a/tools/diff-processor/breaking_changes/mock_schema_diff_test.go b/tools/diff-processor/breaking_changes/mock_schema_diff_test.go deleted file mode 100644 index 9bc1f187de71..000000000000 --- a/tools/diff-processor/breaking_changes/mock_schema_diff_test.go +++ /dev/null @@ -1,36 +0,0 @@ -package breaking_changes - -// MockSchemaDiff implements the diff.SchemaDiff interface for testing -type MockSchemaDiff struct { - isNewResource bool - fieldsInNewStructure map[string]bool // Maps field names to whether they're in a new structure -} - -func (sd MockSchemaDiff) IsNewResource() bool { - return sd.isNewResource -} - -func (sd MockSchemaDiff) IsFieldInNewNestedStructure(field string) bool { - return sd.fieldsInNewStructure[field] -} - -// Create mock schema diffs for testing -var ( - // Mock for existing resource (not new, field not in new structure) - existingResourceSchemaDiff = MockSchemaDiff{ - isNewResource: false, - fieldsInNewStructure: make(map[string]bool), - } - - // Mock for new resource - newResourceSchemaDiff = MockSchemaDiff{ - isNewResource: true, - fieldsInNewStructure: make(map[string]bool), - } - - // Mock for field in new nested structure - fieldInNewStructureSchemaDiff = MockSchemaDiff{ - isNewResource: false, - fieldsInNewStructure: map[string]bool{"field": true}, - } -) diff --git a/tools/diff-processor/breaking_changes/resource_diff.go b/tools/diff-processor/breaking_changes/resource_diff.go index 4aa1c7264de2..3b9a029493c4 100644 --- a/tools/diff-processor/breaking_changes/resource_diff.go +++ b/tools/diff-processor/breaking_changes/resource_diff.go @@ -2,7 +2,6 @@ package breaking_changes import ( "fmt" - "strings" "github.com/GoogleCloudPlatform/magic-modules/tools/diff-processor/diff" ) @@ -44,111 +43,38 @@ var AddingExactlyOneOf = ResourceDiffRule{ } func AddingExactlyOneOfMessages(resource string, resourceDiff diff.ResourceDiff) []string { - messages := []string{} - - for newKey, newSet := range resourceDiff.FieldSets.New.ExactlyOneOf { - if _, ok := resourceDiff.FieldSets.Old.ExactlyOneOf[newKey]; ok { - continue // Unchanged EOO. + var messages []string + newFieldSets := make(map[string]diff.FieldSet) // Set of field sets in new and not in old. + oldFieldSets := make(map[string]diff.FieldSet) // Set of field sets in old and not in new. + for key, fieldSet := range resourceDiff.FieldSets.New.ExactlyOneOf { + if _, ok := resourceDiff.FieldSets.Old.ExactlyOneOf[key]; !ok { + newFieldSets[key] = fieldSet } - - // Determine the type of change. - isSimpleModification := false - var simpleAddedFields diff.FieldSet - - for _, oldSet := range resourceDiff.FieldSets.Old.ExactlyOneOf { - if oldSet.IsSubsetOf(newSet) { - isSimpleModification = true - simpleAddedFields = newSet.Difference(oldSet) + } + for key, fieldSet := range resourceDiff.FieldSets.Old.ExactlyOneOf { + if _, ok := resourceDiff.FieldSets.New.ExactlyOneOf[key]; !ok { + oldFieldSets[key] = fieldSet + } + } + // Find old field sets which are subsets of new field sets. + for _, newFieldSet := range newFieldSets { + var addedFields diff.FieldSet + found := false + for _, oldFieldSet := range oldFieldSets { + if oldFieldSet.IsSubsetOf(newFieldSet) { + addedFields = newFieldSet.Difference(oldFieldSet) + found = true break } } - - if isSimpleModification { - // Simple modification: only added fields to an existing EOO. - // Only added *existing* optional fields are breaking. - for field := range simpleAddedFields { - if !isNewField(field, resourceDiff) && !isExistingFieldRequired(field, resourceDiff) { - messages = append(messages, fmt.Sprintf("Field `%s` within resource `%s` was added to exactly one of", field, resource)) - } - } - } else if isComplexModification(newSet, resourceDiff) { - // Complex modification: e.g., add and remove. - // Any existing, optional field in the new set is breaking. New fields are not. - for field := range newSet { - if !isNewField(field, resourceDiff) && !isExistingFieldRequired(field, resourceDiff) { - messages = append(messages, fmt.Sprintf("Field `%s` within resource `%s` was added to exactly one of", field, resource)) - } - } - } else { - // Brand new EOO. - // Not breaking if it relaxes a previously required field. - isRelaxingRequired := false - for field := range newSet { - if isExistingFieldRequired(field, resourceDiff) { - isRelaxingRequired = true - break - } - } - if isRelaxingRequired { - continue - } - - // Not breaking if all fields are in a new optional ancestor. - isContained := true - if len(newSet) == 0 { - isContained = false - } - for field := range newSet { - if !isContainedInNewOptionalAncestor(field, resourceDiff) { - isContained = false - break - } - } - if isContained { - continue - } - - // Otherwise, all fields are breaking. - for field := range newSet { + if !found { + addedFields = newFieldSet + } + for field := range addedFields { + if fieldDiff, ok := resourceDiff.Fields[field]; ok && fieldDiff.Old != nil && !fieldDiff.Old.Required { messages = append(messages, fmt.Sprintf("Field `%s` within resource `%s` was added to exactly one of", field, resource)) } } } return messages } - -func isComplexModification(newSet diff.FieldSet, resourceDiff diff.ResourceDiff) bool { - for _, oldSet := range resourceDiff.FieldSets.Old.ExactlyOneOf { - if len(newSet.Intersection(oldSet)) > 0 { - return true - } - } - return false -} - -func isNewField(field string, diff diff.ResourceDiff) bool { - fieldDiff, ok := diff.Fields[field] - return !ok || fieldDiff.Old == nil -} - -func isExistingFieldRequired(field string, diff diff.ResourceDiff) bool { - fieldDiff, ok := diff.Fields[field] - return ok && fieldDiff.Old != nil && fieldDiff.Old.Required -} - -func isContainedInNewOptionalAncestor(field string, diff diff.ResourceDiff) bool { - parts := strings.Split(field, ".") - if len(parts) < 2 { - return false - } - ancestorName := strings.Join(parts[:len(parts)-1], ".") - ancestorDiff, ok := diff.Fields[ancestorName] - if !ok { - return false - } - - isAncestorNew := ancestorDiff.Old == nil && ancestorDiff.New != nil - isAncestorOptional := ancestorDiff.New != nil && ancestorDiff.New.Optional - - return isAncestorNew && isAncestorOptional -} diff --git a/tools/diff-processor/breaking_changes/resource_diff_test.go b/tools/diff-processor/breaking_changes/resource_diff_test.go index a614f346f426..8719423f79cf 100644 --- a/tools/diff-processor/breaking_changes/resource_diff_test.go +++ b/tools/diff-processor/breaking_changes/resource_diff_test.go @@ -168,30 +168,6 @@ var resourceSchemaRule_AddingExactlyOneOf_TestCases = []resourceSchemaTestCase{ }, expectedFields: []string{"field-c"}, }, - { - name: "adding new fields to new exactly-one-of", - resourceDiff: diff.ResourceDiff{ - FieldSets: diff.ResourceFieldSetsDiff{ - Old: diff.ResourceFieldSets{}, - New: diff.ResourceFieldSets{ - ExactlyOneOf: map[string]diff.FieldSet{ - "field-a,field-b": {"field-a": {}, "field-b": {}}, - }, - }, - }, - Fields: map[string]diff.FieldDiff{ - "field-a": { - Old: nil, - New: &schema.Schema{Description: "beep", Optional: true}, - }, - "field-b": { - Old: nil, - New: &schema.Schema{Description: "boop", Optional: true}, - }, - }, - }, - expectedFields: []string{"field-a", "field-b"}, - }, { name: "adding new exactly-one-of with an existing field", resourceDiff: diff.ResourceDiff{ diff --git a/tools/diff-processor/diff/diff.go b/tools/diff-processor/diff/diff.go index d1966b537ef5..0f938296b5bb 100644 --- a/tools/diff-processor/diff/diff.go +++ b/tools/diff-processor/diff/diff.go @@ -11,16 +11,10 @@ import ( // SchemaDiff is a nested map with resource names as top-level keys. type SchemaDiff map[string]ResourceDiff -type ResourceDiffInterface interface { - IsNewResource() bool - IsFieldInNewNestedStructure(fieldPath string) bool -} - type ResourceDiff struct { - ResourceConfig ResourceConfigDiff - FlattenedSchema FlattenedSchemaRaw - Fields map[string]FieldDiff - FieldSets ResourceFieldSetsDiff + ResourceConfig ResourceConfigDiff + Fields map[string]FieldDiff + FieldSets ResourceFieldSetsDiff } type ResourceFieldSetsDiff struct { @@ -35,6 +29,8 @@ type ResourceFieldSets struct { RequiredWith map[string]FieldSet } +type FieldSet map[string]struct{} + type ResourceConfigDiff struct { Old *schema.Resource New *schema.Resource @@ -45,11 +41,6 @@ type FieldDiff struct { New *schema.Schema } -type FlattenedSchemaRaw struct { - Old map[string]*schema.Schema - New map[string]*schema.Schema -} - func ComputeSchemaDiff(oldResourceMap, newResourceMap map[string]*schema.Resource) SchemaDiff { schemaDiff := make(SchemaDiff) for resource := range union(oldResourceMap, newResourceMap) { @@ -60,14 +51,12 @@ func ComputeSchemaDiff(oldResourceMap, newResourceMap map[string]*schema.Resourc var flattenedOldSchema map[string]*schema.Schema if oldResource, ok := oldResourceMap[resource]; ok { flattenedOldSchema = flattenSchema("", oldResource.Schema) - resourceDiff.FlattenedSchema.Old = flattenedOldSchema resourceDiff.ResourceConfig.Old = &schema.Resource{} } var flattenedNewSchema map[string]*schema.Schema if newResource, ok := newResourceMap[resource]; ok { flattenedNewSchema = flattenSchema("", newResource.Schema) - resourceDiff.FlattenedSchema.New = flattenedNewSchema resourceDiff.ResourceConfig.New = &schema.Resource{} } @@ -324,33 +313,3 @@ func setKey(set FieldSet) string { slice := setToSortedSlice(set) return strings.Join(slice, ",") } - -func (rd ResourceDiff) IsNewResource() bool { - rcd := rd.ResourceConfig - if rcd.Old == nil && rcd.New != nil { - return true - } - return false -} - -// IsFieldInNewNestedStructure determines if a field is part of a completely new nested structure -func (rd ResourceDiff) IsFieldInNewNestedStructure(fieldPath string) bool { - if rd.IsNewResource() { - return true - } - - // Get the parent path (everything before the last dot) - lastDotIndex := strings.LastIndex(fieldPath, ".") - if lastDotIndex == -1 { - // No parent path (top-level field) - return false - } - - parentPath := fieldPath[:lastDotIndex] - - // Check if parent exists in new schema but not in old schema - _, parentExistsInOld := rd.FlattenedSchema.Old[parentPath] - _, parentExistsInNew := rd.FlattenedSchema.New[parentPath] - - return !parentExistsInOld && parentExistsInNew -} diff --git a/tools/diff-processor/diff/diff_test.go b/tools/diff-processor/diff/diff_test.go index 7e449e05fe19..e3a37a32fe26 100644 --- a/tools/diff-processor/diff/diff_test.go +++ b/tools/diff-processor/diff/diff_test.go @@ -1,7 +1,6 @@ package diff import ( - "strings" "testing" newProvider "google/provider/new/google/provider" @@ -1169,46 +1168,6 @@ func TestComputeSchemaDiff(t *testing.T) { Old: &schema.Resource{}, New: &schema.Resource{}, }, - FlattenedSchema: FlattenedSchemaRaw{ - Old: map[string]*schema.Schema{ - "field_one": {Type: schema.TypeString}, - "field_two": { - Type: schema.TypeList, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "field_three": {Type: schema.TypeString}, - }, - }, - }, - "field_two.field_three": {Type: schema.TypeString}, - }, - New: map[string]*schema.Schema{ - "field_one": {Type: schema.TypeString}, - "field_two": { - Type: schema.TypeList, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "field_three": { - Type: schema.TypeString, - ConflictsWith: []string{"field_two.0.field_four"}, - }, - "field_four": { - Type: schema.TypeInt, - ConflictsWith: []string{"field_two.0.field_three"}, - }, - }, - }, - }, - "field_two.field_three": { - Type: schema.TypeString, - ConflictsWith: []string{"field_two.0.field_four"}, - }, - "field_two.field_four": { - Type: schema.TypeInt, - ConflictsWith: []string{"field_two.0.field_three"}, - }, - }, - }, Fields: map[string]FieldDiff{ "field_two.field_three": FieldDiff{ Old: &schema.Schema{ @@ -1330,34 +1289,6 @@ func TestComputeSchemaDiff(t *testing.T) { Old: &schema.Resource{}, New: &schema.Resource{}, }, - FlattenedSchema: FlattenedSchemaRaw{ - Old: map[string]*schema.Schema{ - "field_one": {Type: schema.TypeString}, - "field_two": { - Type: schema.TypeList, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "field_three": {Type: schema.TypeString}, - }, - }, - }, - "field_two.field_three": {Type: schema.TypeString}, - }, - New: map[string]*schema.Schema{ - "field_one": {Type: schema.TypeString}, - "field_two": { - Type: schema.TypeList, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "field_three": {Type: schema.TypeString}, - "field_four": {Type: schema.TypeInt}, - }, - }, - }, - "field_two.field_three": {Type: schema.TypeString}, - "field_two.field_four": {Type: schema.TypeInt}, - }, - }, Fields: map[string]FieldDiff{ "field_two.field_four": FieldDiff{ Old: nil, @@ -1370,34 +1301,6 @@ func TestComputeSchemaDiff(t *testing.T) { Old: &schema.Resource{}, New: &schema.Resource{}, }, - FlattenedSchema: FlattenedSchemaRaw{ - Old: map[string]*schema.Schema{ - "field_one": {Type: schema.TypeString}, - "field_two": { - Type: schema.TypeList, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "field_three": {Type: schema.TypeString}, - }, - }, - }, - "field_two.field_three": {Type: schema.TypeString}, - }, - New: map[string]*schema.Schema{ - "field_one": {Type: schema.TypeString}, - "field_two": { - Type: schema.TypeList, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "field_three": {Type: schema.TypeString}, - "field_four": {Type: schema.TypeInt}, - }, - }, - }, - "field_two.field_three": {Type: schema.TypeString}, - "field_two.field_four": {Type: schema.TypeInt}, - }, - }, Fields: map[string]FieldDiff{ "field_two.field_four": FieldDiff{ Old: nil, @@ -1428,12 +1331,6 @@ func TestComputeSchemaDiff(t *testing.T) { Old: &schema.Resource{}, New: &schema.Resource{}, }, - FlattenedSchema: FlattenedSchemaRaw{ - Old: map[string]*schema.Schema{ - "field_one": {Type: schema.TypeString}, - }, - New: map[string]*schema.Schema{}, - }, Fields: map[string]FieldDiff{ "field_one": FieldDiff{ Old: &schema.Schema{Type: schema.TypeString}, @@ -1459,12 +1356,6 @@ func TestComputeSchemaDiff(t *testing.T) { Old: &schema.Resource{}, New: nil, }, - FlattenedSchema: FlattenedSchemaRaw{ - Old: map[string]*schema.Schema{ - "field_one": {Type: schema.TypeString}, - }, - New: nil, - }, Fields: map[string]FieldDiff{ "field_one": FieldDiff{ Old: &schema.Schema{Type: schema.TypeString}, @@ -1490,12 +1381,6 @@ func TestComputeSchemaDiff(t *testing.T) { Old: nil, New: &schema.Resource{}, }, - FlattenedSchema: FlattenedSchemaRaw{ - Old: nil, - New: map[string]*schema.Schema{ - "field_one": {Type: schema.TypeString}, - }, - }, Fields: map[string]FieldDiff{ "field_one": FieldDiff{ Old: nil, @@ -1517,252 +1402,3 @@ func TestComputeSchemaDiff(t *testing.T) { }) } } - -func TestIsNewResource(t *testing.T) { - cases := map[string]struct { - oldResourceMap map[string]*schema.Resource - newResourceMap map[string]*schema.Resource - resourceName string - expected bool - }{ - "resource exists in both maps": { - oldResourceMap: map[string]*schema.Resource{ - "google_resource": {Schema: map[string]*schema.Schema{}}, - }, - newResourceMap: map[string]*schema.Resource{ - "google_resource": {Schema: map[string]*schema.Schema{}}, - }, - resourceName: "google_resource", - expected: false, - }, - "resource only in new map": { - oldResourceMap: map[string]*schema.Resource{}, - newResourceMap: map[string]*schema.Resource{ - "google_resource": {Schema: map[string]*schema.Schema{}}, - }, - resourceName: "google_resource", - expected: true, - }, - "resource only in old map": { - oldResourceMap: map[string]*schema.Resource{ - "google_resource": {Schema: map[string]*schema.Schema{}}, - }, - newResourceMap: map[string]*schema.Resource{}, - resourceName: "google_resource", - expected: false, // ResourceConfig.New would be nil - }, - "resource not in diff because it has no changes": { - oldResourceMap: map[string]*schema.Resource{ - "google_resource": {Schema: map[string]*schema.Schema{}}, - }, - newResourceMap: map[string]*schema.Resource{ - "google_resource": {Schema: map[string]*schema.Schema{}}, - }, - resourceName: "non_existent_resource", - expected: false, // Resource isn't in the diff at all - }, - } - - for name, tc := range cases { - t.Run(name, func(t *testing.T) { - schemaDiff := ComputeSchemaDiff(tc.oldResourceMap, tc.newResourceMap) - resourceConfig, _ := schemaDiff[tc.resourceName] - result := resourceConfig.IsNewResource() - if result != tc.expected { - t.Errorf("IsNewResource(%q) = %v, want %v", tc.resourceName, result, tc.expected) - } - }) - } -} - -func TestIsFieldInNewNestedStructure(t *testing.T) { - cases := map[string]struct { - oldResourceMap map[string]*schema.Resource - newResourceMap map[string]*schema.Resource - resourceName string - fieldPath string - expected bool - }{ - "top-level field in existing resource": { - oldResourceMap: map[string]*schema.Resource{ - "google_resource": { - Schema: map[string]*schema.Schema{ - "old_field": {Type: schema.TypeString}, - }, - }, - }, - newResourceMap: map[string]*schema.Resource{ - "google_resource": { - Schema: map[string]*schema.Schema{ - "old_field": {Type: schema.TypeString}, - "new_field": {Type: schema.TypeString}, - }, - }, - }, - resourceName: "google_resource", - fieldPath: "new_field", - expected: false, // Top-level field, not in a nested structure - }, - "field in existing nested structure": { - oldResourceMap: map[string]*schema.Resource{ - "google_resource": { - Schema: map[string]*schema.Schema{ - "nested": { - Type: schema.TypeList, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "existing_field": {Type: schema.TypeString}, - }, - }, - }, - }, - }, - }, - newResourceMap: map[string]*schema.Resource{ - "google_resource": { - Schema: map[string]*schema.Schema{ - "nested": { - Type: schema.TypeList, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "existing_field": {Type: schema.TypeString}, - "new_field": {Type: schema.TypeString}, - }, - }, - }, - }, - }, - }, - resourceName: "google_resource", - fieldPath: "nested.new_field", - expected: false, // Parent "nested" exists in old schema - }, - "field in new nested structure": { - oldResourceMap: map[string]*schema.Resource{ - "google_resource": { - Schema: map[string]*schema.Schema{ - "old_field": {Type: schema.TypeString}, - }, - }, - }, - newResourceMap: map[string]*schema.Resource{ - "google_resource": { - Schema: map[string]*schema.Schema{ - "old_field": {Type: schema.TypeString}, - "new_nested": { - Type: schema.TypeList, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "new_field": {Type: schema.TypeString}, - }, - }, - }, - }, - }, - }, - resourceName: "google_resource", - fieldPath: "new_nested.new_field", - expected: true, // Parent "new_nested" doesn't exist in old schema - }, - "field in new deeply nested structure": { - oldResourceMap: map[string]*schema.Resource{ - "google_resource": { - Schema: map[string]*schema.Schema{ - "existing_nested": { - Type: schema.TypeList, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "existing_field": {Type: schema.TypeString}, - }, - }, - }, - }, - }, - }, - newResourceMap: map[string]*schema.Resource{ - "google_resource": { - Schema: map[string]*schema.Schema{ - "existing_nested": { - Type: schema.TypeList, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "existing_field": {Type: schema.TypeString}, - "new_nested": { - Type: schema.TypeList, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "new_field": {Type: schema.TypeString}, - }, - }, - }, - }, - }, - }, - }, - }, - }, - resourceName: "google_resource", - fieldPath: "existing_nested.new_nested.new_field", - expected: true, // Parent "existing_nested.new_nested" doesn't exist in old schema - }, - "field in new resource": { - oldResourceMap: map[string]*schema.Resource{}, - newResourceMap: map[string]*schema.Resource{ - "google_resource": { - Schema: map[string]*schema.Schema{ - "nested": { - Type: schema.TypeList, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "field": {Type: schema.TypeString}, - }, - }, - }, - }, - }, - }, - resourceName: "google_resource", - fieldPath: "nested.field", - expected: true, // New resource, so all fields are in new structures - }, - } - - for name, tc := range cases { - t.Run(name, func(t *testing.T) { - schemaDiff := ComputeSchemaDiff(tc.oldResourceMap, tc.newResourceMap) - - // Verify that FlattenedSchema was properly populated - if rd, ok := schemaDiff[tc.resourceName]; ok { - // Debug information for test verification - if tc.expected { - // If we expect the field to be in a new nested structure - // The parent path should not exist in the old schema but should exist in the new schema - lastDotIndex := strings.LastIndex(tc.fieldPath, ".") - if lastDotIndex != -1 { - parentPath := tc.fieldPath[:lastDotIndex] - _, parentInOld := rd.FlattenedSchema.Old[parentPath] - _, parentInNew := rd.FlattenedSchema.New[parentPath] - - // Log the verification for debugging - t.Logf("For %s: Parent path '%s' exists in old schema: %v, exists in new schema: %v", - tc.fieldPath, parentPath, parentInOld, parentInNew) - - // This should match our expectation - if parentInOld || !parentInNew { - t.Errorf("For field %s: Expected parent path %s to not exist in old schema and exist in new schema, but got old: %v, new: %v", - tc.fieldPath, parentPath, parentInOld, parentInNew) - } - } - } - } - - // Now test the actual method - resourceConfig := schemaDiff[tc.resourceName] - result := resourceConfig.IsFieldInNewNestedStructure(tc.fieldPath) - if result != tc.expected { - t.Errorf("IsFieldInNewNestedStructure(%q, %q) = %v, want %v", - tc.resourceName, tc.fieldPath, result, tc.expected) - } - }) - } -} diff --git a/tools/diff-processor/diff/sets.go b/tools/diff-processor/diff/sets.go index 4a8e73c96ad4..39e26f36c702 100644 --- a/tools/diff-processor/diff/sets.go +++ b/tools/diff-processor/diff/sets.go @@ -5,77 +5,70 @@ import ( "strings" ) -// FieldSet is a set of strings representing fields. -type FieldSet map[string]struct{} - -// Difference returns the fields in s that are not in other. -func (s FieldSet) Difference(other FieldSet) FieldSet { - diff := make(FieldSet) - for k := range s { - if _, ok := other[k]; !ok { - diff[k] = struct{}{} - } +// Return the union of two maps, overwriting any shared keys with the second map's values +func union[K comparable, V any](map1, map2 map[K]V) map[K]V { + if len(map1) == 0 { + return map2 } - return diff -} - -// IsSubsetOf returns true if s is a subset of other. -func (s FieldSet) IsSubsetOf(other FieldSet) bool { - for k := range s { - if _, ok := other[k]; !ok { - return false - } + if len(map2) == 0 { + return map1 } - return true -} - -// Intersection returns the fields that are in both s and other. -func (s FieldSet) Intersection(other FieldSet) FieldSet { - intersection := make(FieldSet) - for k := range s { - if _, ok := other[k]; ok { - intersection[k] = struct{}{} - } + merged := make(map[K]V, len(map1)+len(map2)) + for k, v := range map1 { + merged[k] = v } - return intersection + for k, v := range map2 { + merged[k] = v + } + return merged } -func sliceToSet(slice []string) FieldSet { - set := make(FieldSet) - for _, s := range slice { - if s != "" { - set[s] = struct{}{} - } +func sliceToSetRemoveZeroPadding(slice []string) map[string]struct{} { + set := make(map[string]struct{}) + for _, item := range slice { + set[removeZeroPadding(item)] = struct{}{} } return set } -func sliceToSetRemoveZeroPadding(slice []string) FieldSet { - set := make(FieldSet) - for _, s := range slice { - if s != "" { - set[strings.ReplaceAll(s, ".0", "")] = struct{}{} +// field1.0.field2 -> field1.field2 +func removeZeroPadding(zeroPadded string) string { + var trimmed string + for _, part := range strings.Split(zeroPadded, ".") { + if part != "0" { + trimmed += part + "." } } - return set + if trimmed == "" { + return "" + } + return trimmed[:len(trimmed)-1] } -func setToSortedSlice(set FieldSet) []string { +func setToSortedSlice(set map[string]struct{}) []string { slice := make([]string, 0, len(set)) - for k := range set { - slice = append(slice, k) + for item := range set { + slice = append(slice, item) } sort.Strings(slice) return slice } -func union[T any](a, b map[string]T) map[string]struct{} { - c := make(map[string]struct{}) - for k := range a { - c[k] = struct{}{} +func (fs FieldSet) IsSubsetOf(other FieldSet) bool { + for field := range fs { + if _, ok := other[field]; !ok { + return false + } } - for k := range b { - c[k] = struct{}{} + return true +} + +func (fs FieldSet) Difference(subset FieldSet) map[string]struct{} { + diff := make(map[string]struct{}) + for k := range fs { + if _, ok := subset[k]; !ok { + diff[k] = struct{}{} + } } - return c + return diff } diff --git a/tools/diff-processor/diff/sets_test.go b/tools/diff-processor/diff/sets_test.go index 84387798de03..4f58ec8319d4 100644 --- a/tools/diff-processor/diff/sets_test.go +++ b/tools/diff-processor/diff/sets_test.go @@ -1,16 +1,11 @@ package diff import ( - "strings" "testing" "github.com/google/go-cmp/cmp" ) -func removeZeroPadding(s string) string { - return strings.ReplaceAll(s, ".0", "") -} - func TestRemoveZeroPadding(t *testing.T) { for _, tc := range []struct { name string diff --git a/tools/issue-labeler/go.mod b/tools/issue-labeler/go.mod index c4fc7833d382..98c26dfa69fe 100644 --- a/tools/issue-labeler/go.mod +++ b/tools/issue-labeler/go.mod @@ -4,7 +4,7 @@ go 1.23 require ( github.com/golang/glog v1.1.1 - github.com/google/go-github/v68 v68.0.0 + github.com/google/go-github/v61 v61.0.0 github.com/spf13/cobra v1.8.1 golang.org/x/exp v0.0.0-20230810033253-352e893a4cad golang.org/x/oauth2 v0.24.0 diff --git a/tools/issue-labeler/go.sum b/tools/issue-labeler/go.sum index 8175a6852808..6bf2d02dc507 100644 --- a/tools/issue-labeler/go.sum +++ b/tools/issue-labeler/go.sum @@ -4,8 +4,8 @@ github.com/golang/glog v1.1.1/go.mod h1:zR+okUeTbrL6EL3xHUDxZuEtGv04p5shwip1+mL/ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/go-github/v68 v68.0.0 h1:ZW57zeNZiXTdQ16qrDiZ0k6XucrxZ2CGmoTvcCyQG6s= -github.com/google/go-github/v68 v68.0.0/go.mod h1:K9HAUBovM2sLwM408A18h+wd9vqdLOEqTUCbnRIcx68= +github.com/google/go-github/v61 v61.0.0 h1:VwQCBwhyE9JclCI+22/7mLB1PuU9eowCXKY5pNlu1go= +github.com/google/go-github/v61 v61.0.0/go.mod h1:0WR+KmsWX75G2EbpyGsGmradjo3IiciuI4BmdVCobQY= github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= diff --git a/tools/issue-labeler/labeler/backfill.go b/tools/issue-labeler/labeler/backfill.go index f7689f0316b5..f97499299d1f 100644 --- a/tools/issue-labeler/labeler/backfill.go +++ b/tools/issue-labeler/labeler/backfill.go @@ -9,7 +9,7 @@ import ( "time" "github.com/golang/glog" - "github.com/google/go-github/v68/github" + "github.com/google/go-github/v61/github" ) type Label struct { diff --git a/tools/issue-labeler/labeler/backfill_test.go b/tools/issue-labeler/labeler/backfill_test.go index c06e0f1279a9..294a405ef915 100644 --- a/tools/issue-labeler/labeler/backfill_test.go +++ b/tools/issue-labeler/labeler/backfill_test.go @@ -7,11 +7,11 @@ import ( "strings" "testing" - "github.com/google/go-github/v68/github" + "github.com/google/go-github/v61/github" ) func testIssueBodyWithResources(resources []string) *string { - return github.Ptr(fmt.Sprintf(` + return github.String(fmt.Sprintf(` ### New or Affected Resource(s): %s @@ -54,7 +54,7 @@ func TestComputeIssueUpdates(t *testing.T) { description: "gracefully handle a nil issue body", issues: []*github.Issue{ { - Number: github.Ptr(1), + Number: github.Int(1), }, }, regexpLabels: defaultRegexpLabels, @@ -75,8 +75,8 @@ func TestComputeIssueUpdates(t *testing.T) { name: "no listed resources", issues: []*github.Issue{ { - Number: github.Ptr(1), - Body: github.Ptr("Body with unusual structure"), + Number: github.Int(1), + Body: github.String("Body with unusual structure"), }, }, regexpLabels: defaultRegexpLabels, @@ -87,14 +87,14 @@ func TestComputeIssueUpdates(t *testing.T) { description: "issues with service/terraform shouldn't get new labels", issues: []*github.Issue{ { - Number: github.Ptr(1), + Number: github.Int(1), Body: testIssueBodyWithResources([]string{"google_service1_resource1"}), - Labels: []*github.Label{{Name: github.Ptr("service/terraform")}}, + Labels: []*github.Label{{Name: github.String("service/terraform")}}, }, { - Number: github.Ptr(2), + Number: github.Int(2), Body: testIssueBodyWithResources([]string{"google_service1_resource1"}), - Labels: []*github.Label{{Name: github.Ptr("forward/exempt")}}, + Labels: []*github.Label{{Name: github.String("forward/exempt")}}, }, }, regexpLabels: defaultRegexpLabels, @@ -105,11 +105,11 @@ func TestComputeIssueUpdates(t *testing.T) { description: "issues with affected resources should normally get new labels added", issues: []*github.Issue{ { - Number: github.Ptr(1), + Number: github.Int(1), Body: testIssueBodyWithResources([]string{"google_service1_resource1"}), }, { - Number: github.Ptr(2), + Number: github.Int(2), Body: testIssueBodyWithResources([]string{"google_service2_resource1"}), }, }, @@ -130,14 +130,14 @@ func TestComputeIssueUpdates(t *testing.T) { description: "don't update issues if all expected service labels are already present", issues: []*github.Issue{ { - Number: github.Ptr(1), + Number: github.Int(1), Body: testIssueBodyWithResources([]string{"google_service1_resource1"}), - Labels: []*github.Label{{Name: github.Ptr("service/service1")}}, + Labels: []*github.Label{{Name: github.String("service/service1")}}, }, { - Number: github.Ptr(2), + Number: github.Int(2), Body: testIssueBodyWithResources([]string{"google_service2_resource1"}), - Labels: []*github.Label{{Name: github.Ptr("service/service2-subteam1")}}, + Labels: []*github.Label{{Name: github.String("service/service2-subteam1")}}, }, }, regexpLabels: defaultRegexpLabels, @@ -148,14 +148,14 @@ func TestComputeIssueUpdates(t *testing.T) { description: "add missing service labels", issues: []*github.Issue{ { - Number: github.Ptr(1), + Number: github.Int(1), Body: testIssueBodyWithResources([]string{"google_service1_resource1"}), - Labels: []*github.Label{{Name: github.Ptr("service/service2-subteam1")}}, + Labels: []*github.Label{{Name: github.String("service/service2-subteam1")}}, }, { - Number: github.Ptr(2), + Number: github.Int(2), Body: testIssueBodyWithResources([]string{"google_service2_resource2"}), - Labels: []*github.Label{{Name: github.Ptr("service/service1")}}, + Labels: []*github.Label{{Name: github.String("service/service1")}}, }, }, regexpLabels: defaultRegexpLabels, @@ -177,9 +177,9 @@ func TestComputeIssueUpdates(t *testing.T) { description: "don't add missing service labels if already linked", issues: []*github.Issue{ { - Number: github.Ptr(1), + Number: github.Int(1), Body: testIssueBodyWithResources([]string{"google_service1_resource1"}), - Labels: []*github.Label{{Name: github.Ptr("service/service2-subteam1")}, {Name: github.Ptr("forward/linked")}}, + Labels: []*github.Label{{Name: github.String("service/service2-subteam1")}, {Name: github.String("forward/linked")}}, }, }, regexpLabels: defaultRegexpLabels, @@ -190,14 +190,14 @@ func TestComputeIssueUpdates(t *testing.T) { description: "add service labels if missed but don't add forward/review label for test failure ticket", issues: []*github.Issue{ { - Number: github.Ptr(1), + Number: github.Int(1), Body: testIssueBodyWithResources([]string{"google_service1_resource1"}), - Labels: []*github.Label{{Name: github.Ptr("test-failure")}, {Name: github.Ptr("test-failure-100")}}, + Labels: []*github.Label{{Name: github.String("test-failure")}, {Name: github.String("test-failure-100")}}, }, { - Number: github.Ptr(2), + Number: github.Int(2), Body: testIssueBodyWithResources([]string{"google_service2_resource1"}), - Labels: []*github.Label{{Name: github.Ptr("test-failure")}, {Name: github.Ptr("test-failure-50")}, {Name: github.Ptr("service/service2-subteam1")}}, + Labels: []*github.Label{{Name: github.String("test-failure")}, {Name: github.String("test-failure-50")}, {Name: github.String("service/service2-subteam1")}}, }, }, regexpLabels: defaultRegexpLabels, diff --git a/tools/issue-labeler/labeler/enrolled_teams.yml b/tools/issue-labeler/labeler/enrolled_teams.yml index d7aaa4a2fca2..480d1e3eb292 100755 --- a/tools/issue-labeler/labeler/enrolled_teams.yml +++ b/tools/issue-labeler/labeler/enrolled_teams.yml @@ -29,10 +29,6 @@ service/aiplatform-prediction: - google_vertex_ai_endpoint - google_vertex_ai_deployment_resource_pool - google_ml_engine_model - - google_vertex_ai_endpoint_with_model_garden_deployment -service/aiplatform-rag-engine: - resources: - - google_vertex_ai_rag_engine_config service/aiplatform-tensorboard: resources: - google_vertex_ai_tensorboard @@ -188,8 +184,6 @@ service/compute-instances: service/compute-interconnect: resources: - google_compute_interconnect.* - - google_compute_wire_group - - google_compute_cross_site_network service/compute-ipam: resources: - google_compute_address @@ -332,7 +326,6 @@ service/dataproc: - google_dataproc_job.* - google_dataproc_workflow_template - google_dataproc_batch - - google_dataproc_session_template service/dataprocgdc: resources: - google_dataproc_gdc_.* @@ -393,8 +386,6 @@ service/firebase: - google_firebase_storage.* - google_firebase_web.* - google_firebaserules_.* -service/firebaseapphosting: - resources: - google_firebase_app_hosting.* service/firebasedataconnect: resources: @@ -449,7 +440,8 @@ service/iam-serviceaccount: service/iam-wlid: resources: - google_iam_access_boundary_policy - - google_iam_workload_identity_pool.* + - google_iam_workload_identity_pool + - google_iam_workload_identity_pool_provider service/iam-workforce: resources: - google_iam_workforce_pool.* @@ -496,9 +488,6 @@ service/metastore: service/migrationcenter: resources: - google_migration_center_.* -service/modelarmor: - resources: - - google_model_armor_.* service/monitoring-alerting: resources: - google_monitoring_notification_channel @@ -649,9 +638,6 @@ service/run: team: cloud-run-control-plane resources: - google_cloud_run_.* -service/saasservicemgmt: - resources: - - google_saas_runtime_.* service/secretmanager: resources: - google_secret_manager_.* diff --git a/tools/issue-labeler/labeler/github.go b/tools/issue-labeler/labeler/github.go index 62d6b6bf6cf4..a71beb90ea8d 100644 --- a/tools/issue-labeler/labeler/github.go +++ b/tools/issue-labeler/labeler/github.go @@ -6,7 +6,7 @@ import ( "os" "strings" - "github.com/google/go-github/v68/github" + "github.com/google/go-github/v61/github" "golang.org/x/oauth2" ) diff --git a/tools/issue-labeler/labeler/labels.go b/tools/issue-labeler/labeler/labels.go index e10f9f3775b7..9aa6ce98b4d0 100644 --- a/tools/issue-labeler/labeler/labels.go +++ b/tools/issue-labeler/labeler/labels.go @@ -10,11 +10,11 @@ import ( _ "embed" "github.com/golang/glog" - "github.com/google/go-github/v68/github" + "github.com/google/go-github/v61/github" "gopkg.in/yaml.v2" ) -var sectionRegexp = regexp.MustCompile(`#+ (New or )?Affected Resource\(s\)[^#]+`) +var sectionRegexp = regexp.MustCompile(`### (New or )?Affected Resource\(s\)[^#]+`) var commentRegexp = regexp.MustCompile(``) var resourceRegexp = regexp.MustCompile(`google_[\w*.]+`) diff --git a/tools/issue-labeler/labeler/labels_test.go b/tools/issue-labeler/labeler/labels_test.go index 612193dbdba9..07658c9260b0 100644 --- a/tools/issue-labeler/labeler/labels_test.go +++ b/tools/issue-labeler/labeler/labels_test.go @@ -5,56 +5,36 @@ import ( "regexp" "testing" - "github.com/google/go-github/v68/github" + "github.com/google/go-github/v61/github" "golang.org/x/exp/slices" ) func TestExtractAffectedResources(t *testing.T) { - cases := []struct { - name string + cases := map[string]struct { body string expectedResources []string }{ - { - name: "2023 bug", + "2023 bug": { body: "\r\n\r\n### Community Note\r\n\r\n* Please vote on this issue by adding a 👍 [reaction](https://blog.github.com/2016-03-10-add-reactions-to-pull-requests-issues-and-comments/) to the original issue to help the community and maintainers prioritize this request.\r\n* Please do not leave _+1_ or _me too_ comments, they generate extra noise for issue followers and do not help prioritize the request.\r\n* If you are interested in working on this issue or have submitted a pull request, please leave a comment.\r\n* If an issue is assigned to the `modular-magician` user, it is either in the process of being autogenerated, or is planned to be autogenerated soon. If an issue is assigned to a user, that user is claiming responsibility for the issue. If an issue is assigned to `hashibot`, a community member has claimed the issue already.\r\n\r\n\r\n\r\n### Terraform Version\r\n\r\n\r\nTerraform v1.3.7\r\non linux_amd64\r\nprovider registry.terraform.io/hashicorp/google v4.48.0\r\n\r\n### Affected Resource(s)\r\n\r\n\r\n\r\n* google_container_node_pool\r\n* google_container_cluster\r\n\r\n### Terraform Configuration Files\r\n\r\n\r\n\r\n```tf\r\nnode_config {\r\n tags = null\r\n}\r\n```\r\n### Expected Behavior\r\n\r\nIn above code, if there already exists a list of tags defined in the `node_config` block then I would expect TF to ignore this tags field and leave them as they are\r\n\r\n### Actual Behavior\r\n\r\nTF sets the tags to an empty list, [], thus removing existing tags\r\n\r\n### Steps to Reproduce\r\n\r\n1. Create google nodepool TF code with node_config block and set `tags` within the block to a list of strings, i.e. `tags=[ \"one\", \"two\" ]`\r\n2. Terraform apply to create the nodepool with this node config\r\n3. Now update code to say `tags=null`\r\n4. Terraform apply and see the tags removed rather than ignored\r\n", expectedResources: []string{"google_container_node_pool", "google_container_cluster"}, }, - { - name: "2023 enhancement", + "2023 enhancement": { body: "\r\n\r\n### Community Note\r\n\r\n* Please vote on this issue by adding a 👍 [reaction](https://blog.github.com/2016-03-10-add-reactions-to-pull-requests-issues-and-comments/) to the original issue to help the community and maintainers prioritize this request\r\n* Please do not leave \"+1\" or \"me too\" comments, they generate extra noise for issue followers and do not help prioritize the request\r\n* If you are interested in working on this issue or have submitted a pull request, please leave a comment. If the issue is assigned to the \"modular-magician\" user, it is either in the process of being autogenerated, or is planned to be autogenerated soon. If the issue is assigned to a user, that user is claiming responsibility for the issue. If the issue is assigned to \"hashibot\", a community member has claimed the issue already.\r\n\r\n\r\n\r\n### Description\r\n\r\n\r\n\r\nSupport for creating mute configs in SCC:\r\nhttps://cloud.google.com/security-command-center/docs/reference/rest/v1/organizations.muteConfigs/create\r\n\r\n### New or Affected Resource(s)\r\n\r\n\r\n\r\n* google_scc_mute_config\r\n\r\n### Potential Terraform Configuration\r\n\r\n\r\n\r\n```tf\r\nresource \"google_scc_mute_config\" \"my_config\" {\r\n config_id = \"my-config\"\r\n organisation = \"12345678\"\r\n description = \"My Awesome Mute Config\"\r\n filter = \"severity=LOW\"\r\n}\r\n```\r\n\r\nCurious as to why the current notification config is only supported at the org level? Even though the parent config can exist at folder or project level? (Same applies here)\r\n\r\n### References\r\n\r\n\r\n\r\n* #0000\r\n\r\n\r\n", expectedResources: []string{"google_scc_mute_config"}, }, - { - name: "google_* comment ignored", + "google_* comment ignored": { body: "\r\n\r\n### Community Note\r\n\r\n* Please vote on this issue by adding a 👍 [reaction](https://blog.github.com/2016-03-10-add-reactions-to-pull-requests-issues-and-comments/) to the original issue to help the community and maintainers prioritize this request\r\n* Please do not leave \"+1\" or \"me too\" comments, they generate extra noise for issue followers and do not help prioritize the request\r\n* If you are interested in working on this issue or have submitted a pull request, please leave a comment. If the issue is assigned to the \"modular-magician\" user, it is either in the process of being autogenerated, or is planned to be autogenerated soon. If the issue is assigned to a user, that user is claiming responsibility for the issue. If the issue is assigned to \"hashibot\", a community member has claimed the issue already.\r\n\r\n\r\n\r\n### Description\r\n\r\n\r\n\r\nSupport for creating mute configs in SCC:\r\nhttps://cloud.google.com/security-command-center/docs/reference/rest/v1/organizations.muteConfigs/create\r\n\r\n### New or Affected Resource(s)\r\n\r\n\r\n\r\n* google_scc_mute_config\r\n\r\n### Potential Terraform Configuration\r\n\r\n\r\n\r\n```tf\r\nresource \"google_scc_mute_config\" \"my_config\" {\r\n config_id = \"my-config\"\r\n organisation = \"12345678\"\r\n description = \"My Awesome Mute Config\"\r\n filter = \"severity=LOW\"\r\n}\r\n```\r\n\r\nCurious as to why the current notification config is only supported at the org level? Even though the parent config can exist at folder or project level? (Same applies here)\r\n\r\n### References\r\n\r\n\r\n\r\n* #0000\r\n\r\n\r\n", expectedResources: []string{"google_scc_mute_config"}, }, - { - name: "no resources returns empty slice", + "no resources returns empty slice": { body: "### New or Affected Resource(s):\r\n#", expectedResources: []string{}, }, - { - name: "h1", - body: "\n# New or Affected Resource(s):\r\ngoogle_scc_mute_config", - expectedResources: []string{"google_scc_mute_config"}, - }, - { - name: "h2", - body: "\n## New or Affected Resource(s):\r\ngoogle_scc_mute_config", - expectedResources: []string{"google_scc_mute_config"}, - }, - { - name: "h4", - body: "\n#### New or Affected Resource(s):\r\ngoogle_scc_mute_config", - expectedResources: []string{"google_scc_mute_config"}, - }, } - for _, tc := range cases { + for tn, tc := range cases { tc := tc - t.Run(tc.name, func(t *testing.T) { + t.Run(tn, func(t *testing.T) { t.Parallel() resources := ExtractAffectedResources(tc.body) if !slices.Equal(resources, tc.expectedResources) { @@ -245,9 +225,9 @@ func TestComputeLabelChanges(t *testing.T) { { name: "existing labels with correct color", existingLabels: []*github.Label{ - {Name: github.Ptr("xyz"), Color: github.Ptr("FF0000")}, - {Name: github.Ptr("bug"), Color: github.Ptr("FF0000")}, - {Name: github.Ptr("enhancement"), Color: github.Ptr("FF0000")}, + {Name: github.String("xyz"), Color: github.String("FF0000")}, + {Name: github.String("bug"), Color: github.String("FF0000")}, + {Name: github.String("enhancement"), Color: github.String("FF0000")}, }, desiredLabels: []string{"bug", "enhancement"}, desiredColor: "FF0000", @@ -259,8 +239,8 @@ func TestComputeLabelChanges(t *testing.T) { { name: "existing labels with wrong color", existingLabels: []*github.Label{ - {Name: github.Ptr("bug"), Color: github.Ptr("00FF00")}, - {Name: github.Ptr("enhancement"), Color: github.Ptr("00FF00")}, + {Name: github.String("bug"), Color: github.String("00FF00")}, + {Name: github.String("enhancement"), Color: github.String("00FF00")}, }, desiredLabels: []string{"bug", "enhancement"}, desiredColor: "FF0000", @@ -272,7 +252,7 @@ func TestComputeLabelChanges(t *testing.T) { { name: "mixed existing and new labels", existingLabels: []*github.Label{ - {Name: github.Ptr("bug"), Color: github.Ptr("FF0000")}, + {Name: github.String("bug"), Color: github.String("FF0000")}, }, desiredLabels: []string{"bug", "enhancement"}, desiredColor: "FF0000", @@ -284,7 +264,7 @@ func TestComputeLabelChanges(t *testing.T) { { name: "case insensitive color comparison", existingLabels: []*github.Label{ - {Name: github.Ptr("bug"), Color: github.Ptr("ff0000")}, + {Name: github.String("bug"), Color: github.String("ff0000")}, }, desiredLabels: []string{"bug"}, desiredColor: "FF0000", diff --git a/tools/template-check/cmd/root.go b/tools/template-check/cmd/root.go deleted file mode 100644 index 90b71bdc71c5..000000000000 --- a/tools/template-check/cmd/root.go +++ /dev/null @@ -1,43 +0,0 @@ -package cmd - -import ( - "fmt" - "os" - - "github.com/spf13/cobra" -) - -const rootCmdDesc = "Utilities for template checks." - -type rootOptions struct { -} - -func newRootCmd() (*cobra.Command, *rootOptions, error) { - o := &rootOptions{} - cmd := &cobra.Command{ - Use: "template-check", - Short: rootCmdDesc, - Long: rootCmdDesc, - SilenceUsage: true, - SilenceErrors: true, - } - cmd.AddCommand(newversionGuardCmd(o)) - return cmd, o, nil -} - -// Execute is the entry-point for all commands. -// This lets us keep all new command functions private. -func Execute() { - rootCmd, _, err := newRootCmd() - if err != nil { - fmt.Printf("Error creating root logger: %s", err) - os.Exit(1) - } - err = rootCmd.Execute() - if err == nil { - os.Exit(0) - } else { - fmt.Println(err.Error()) - os.Exit(1) - } -} diff --git a/tools/template-check/cmd/versionguard.go b/tools/template-check/cmd/versionguard.go deleted file mode 100644 index f6885e3beefc..000000000000 --- a/tools/template-check/cmd/versionguard.go +++ /dev/null @@ -1,61 +0,0 @@ -package cmd - -import ( - "fmt" - - "io" - "os" - - "github.com/GoogleCloudPlatform/magic-modules/tools/template-check/gotemplate" - "github.com/spf13/cobra" -) - -const versionGuardDesc = `Check the files for version guards` - -type versionGuardOptions struct { - rootOptions *rootOptions - stdout io.Writer - fileList []string -} - -func newversionGuardCmd(rootOptions *rootOptions) *cobra.Command { - o := &versionGuardOptions{ - rootOptions: rootOptions, - stdout: os.Stdout, - } - command := &cobra.Command{ - Use: "version-guard", - Short: versionGuardDesc, - Long: versionGuardDesc, - RunE: func(c *cobra.Command, args []string) error { - return o.run() - }, - } - - command.Flags().StringSliceVar(&o.fileList, "file-list", []string{}, "file list to check") - return command - -} -func (o *versionGuardOptions) run() error { - if len(o.fileList) == 0 { - return nil - } - foundInvalidGuards := false - for _, fileName := range o.fileList { - results, err := gotemplate.CheckVersionGuardsForFile(fileName) - if err != nil { - return err - } - if len(results) > 0 { - fmt.Fprintf(os.Stderr, "%s:\n", fileName) - foundInvalidGuards = true - for _, result := range results { - fmt.Fprintf(os.Stderr, " %s\n", result) - } - } - } - if foundInvalidGuards { - return fmt.Errorf("found invalid version guards") - } - return nil -} diff --git a/tools/template-check/go.mod b/tools/template-check/go.mod index a52e908e7994..9aac9498761d 100644 --- a/tools/template-check/go.mod +++ b/tools/template-check/go.mod @@ -1,16 +1,3 @@ module github.com/GoogleCloudPlatform/magic-modules/tools/template-check -go 1.23.0 - -toolchain go1.23.1 - -require ( - github.com/google/go-cmp v0.7.0 - github.com/spf13/cobra v1.9.1 - gopkg.in/yaml.v2 v2.4.0 -) - -require ( - github.com/inconshreveable/mousetrap v1.1.0 // indirect - github.com/spf13/pflag v1.0.6 // indirect -) +go 1.23 diff --git a/tools/template-check/go.sum b/tools/template-check/go.sum deleted file mode 100644 index 69e02c70c16e..000000000000 --- a/tools/template-check/go.sum +++ /dev/null @@ -1,15 +0,0 @@ -github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= -github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= -github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= -github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= -github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= -github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo= -github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0= -github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o= -github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= -gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= -gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/tools/template-check/main.go b/tools/template-check/main.go index f785150aed3d..ac44d3b3a30a 100644 --- a/tools/template-check/main.go +++ b/tools/template-check/main.go @@ -1,7 +1,66 @@ package main -import "github.com/GoogleCloudPlatform/magic-modules/tools/template-check/cmd" +import ( + "bufio" + "flag" + "fmt" + "os" + + "github.com/GoogleCloudPlatform/magic-modules/tools/template-check/gotemplate" +) + +func isValidTemplate(filename string) (bool, error) { + results, err := gotemplate.CheckVersionGuardsForFile(filename) + if err != nil { + return false, err + } + + if len(results) > 0 { + fmt.Fprintf(os.Stderr, "error: invalid version checks found in %s:\n", filename) + for _, result := range results { + fmt.Fprintf(os.Stderr, " %s\n", result) + } + return false, nil + } + + return true, nil +} + +func checkTemplate(filename string) bool { + valid, err := isValidTemplate(filename) + if err != nil { + fmt.Fprintln(os.Stderr, err.Error()) + return false + } + return valid +} func main() { - cmd.Execute() + flag.Usage = func() { + fmt.Fprintf(flag.CommandLine.Output(), "template-check - check that a template file is valid\n template-check [file]\n") + } + + flag.Parse() + + // Handle file as a positional argument + if flag.Arg(0) != "" { + if !checkTemplate(flag.Arg(0)) { + os.Exit(1) + } + os.Exit(0) + } + + // Handle files coming from a linux pipe + fileInfo, _ := os.Stdin.Stat() + if fileInfo.Mode()&os.ModeCharDevice == 0 { + exitStatus := 0 + scanner := bufio.NewScanner(bufio.NewReader(os.Stdin)) + for scanner.Scan() { + if !checkTemplate(scanner.Text()) { + exitStatus = 1 + } + } + + os.Exit(exitStatus) + } } diff --git a/tpgtools/go.mod b/tpgtools/go.mod index f80b39ae5daa..4d9c2785f262 100644 --- a/tpgtools/go.mod +++ b/tpgtools/go.mod @@ -4,7 +4,7 @@ go 1.23 require ( bitbucket.org/creachadair/stringset v0.0.11 - github.com/GoogleCloudPlatform/declarative-resource-client-library v1.83.0 + github.com/GoogleCloudPlatform/declarative-resource-client-library v1.79.0 github.com/golang/glog v1.1.2 github.com/hashicorp/hcl v1.0.0 github.com/kylelemons/godebug v1.1.0 diff --git a/tpgtools/go.sum b/tpgtools/go.sum index 058e41114a23..7f702b3eb310 100644 --- a/tpgtools/go.sum +++ b/tpgtools/go.sum @@ -6,8 +6,8 @@ cloud.google.com/go/compute v1.23.0/go.mod h1:4tCnrn48xsqlwSAiLf1HXMQk8CONslYbdi cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.83.0 h1:pvSYcI7HKOtqHTr4E9cRqVbgnh0+qnJZCrnmozltFVg= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.83.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.79.0 h1:vaebDVboAZ2tbAoMKRsprO3zAdZnQegYFhkgAwjJC8g= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.79.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4= github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= diff --git a/tpgtools/ignored_handwritten/custom_import.go b/tpgtools/ignored_handwritten/custom_import.go index b50236160e00..be5cfbc9c300 100644 --- a/tpgtools/ignored_handwritten/custom_import.go +++ b/tpgtools/ignored_handwritten/custom_import.go @@ -10,8 +10,8 @@ import ( func sourceRepoImport(d *schema.ResourceData, config *transport_tpg.Config) error { if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+)/repos/(?P.+)$", - "^(?P.+)$", + "projects/(?P[^/]+)/repos/(?P.+)", + "(?P.+)", }, d, config); err != nil { return err } @@ -28,8 +28,8 @@ func sourceRepoImport(d *schema.ResourceData, config *transport_tpg.Config) erro func runtimeconfigVariableImport(d *schema.ResourceData, config *transport_tpg.Config) error { if err := tpgresource.ParseImportId([]string{ - "^projects/(?P[^/]+)/configs/(?P[^/]+)/variables/(?P.+)$", - "^(?P[^/]+)/(?P.+)$", + "projects/(?P[^/]+)/configs/(?P[^/]+)/variables/(?P.+)", + "(?P[^/]+)/(?P.+)", }, d, config); err != nil { return err } diff --git a/tpgtools/main.go b/tpgtools/main.go index 67f2cd81de17..19ff83fa5e4e 100644 --- a/tpgtools/main.go +++ b/tpgtools/main.go @@ -110,6 +110,7 @@ func main() { } // product specific generation + generateProductsFile("provider_dcl_endpoints", productsForVersion) generateProductsFile("provider_dcl_client_creation", productsForVersion) if oPath == nil || *oPath == "" { diff --git a/tpgtools/overrides/apikeys/beta/tpgtools_product.yaml b/tpgtools/overrides/apikeys/beta/tpgtools_product.yaml deleted file mode 100644 index 862ff73cb130..000000000000 --- a/tpgtools/overrides/apikeys/beta/tpgtools_product.yaml +++ /dev/null @@ -1,3 +0,0 @@ -- type: PRODUCT_BASE_PATH - details: - skip: true diff --git a/tpgtools/overrides/apikeys/samples/key/service_account_key.tf.tmpl b/tpgtools/overrides/apikeys/samples/key/service_account_key.tf.tmpl deleted file mode 100644 index a0b9a45815dd..000000000000 --- a/tpgtools/overrides/apikeys/samples/key/service_account_key.tf.tmpl +++ /dev/null @@ -1,19 +0,0 @@ -resource "google_apikeys_key" "primary" { - name = "{{key}}" - display_name = "sample-key" - project = google_project.project.project_id - service_account_email = google_service_account.key_service_account.email -} - -resource "google_project" "project" { - project_id = "{{app}}" - name = "{{app}}" - org_id = "{{org_id}}" - deletion_policy = "DELETE" -} - -resource "google_service_account" "key_service_account" { - account_id = "{{app}}" - project = google_project.project.project_id - display_name = "Test Service Account" -} \ No newline at end of file diff --git a/tpgtools/overrides/apikeys/samples/key/service_account_key.yaml b/tpgtools/overrides/apikeys/samples/key/service_account_key.yaml deleted file mode 100755 index 6b60761db48e..000000000000 --- a/tpgtools/overrides/apikeys/samples/key/service_account_key.yaml +++ /dev/null @@ -1,11 +0,0 @@ -variables: - - name: "app" - type: "resource_name" - - name: "project" - type: "project" - - name: "key" - type: "resource_name" - - name: "org_id" - type: "org_id" - - name: "billing_account" - type: "billing_account" diff --git a/tpgtools/overrides/apikeys/tpgtools_product.yaml b/tpgtools/overrides/apikeys/tpgtools_product.yaml deleted file mode 100644 index 862ff73cb130..000000000000 --- a/tpgtools/overrides/apikeys/tpgtools_product.yaml +++ /dev/null @@ -1,3 +0,0 @@ -- type: PRODUCT_BASE_PATH - details: - skip: true diff --git a/tpgtools/overrides/assuredworkloads/beta/tpgtools_product.yaml b/tpgtools/overrides/assuredworkloads/beta/tpgtools_product.yaml deleted file mode 100644 index 862ff73cb130..000000000000 --- a/tpgtools/overrides/assuredworkloads/beta/tpgtools_product.yaml +++ /dev/null @@ -1,3 +0,0 @@ -- type: PRODUCT_BASE_PATH - details: - skip: true diff --git a/tpgtools/overrides/assuredworkloads/tpgtools_product.yaml b/tpgtools/overrides/assuredworkloads/tpgtools_product.yaml deleted file mode 100644 index 862ff73cb130..000000000000 --- a/tpgtools/overrides/assuredworkloads/tpgtools_product.yaml +++ /dev/null @@ -1,3 +0,0 @@ -- type: PRODUCT_BASE_PATH - details: - skip: true diff --git a/tpgtools/overrides/cloudbuild/beta/tpgtools_product.yaml b/tpgtools/overrides/cloudbuild/beta/tpgtools_product.yaml index b260c549bce5..c14db0746cb2 100644 --- a/tpgtools/overrides/cloudbuild/beta/tpgtools_product.yaml +++ b/tpgtools/overrides/cloudbuild/beta/tpgtools_product.yaml @@ -1,7 +1,6 @@ - type: PRODUCT_BASE_PATH details: - skip: true - basepathidentifier: cloud_build + basepathidentifier: cloud_build_worker_pool - type: PRODUCT_TITLE # used to align with mmv1 product details: title: "cloudbuild" diff --git a/tpgtools/overrides/cloudbuild/tpgtools_product.yaml b/tpgtools/overrides/cloudbuild/tpgtools_product.yaml index b260c549bce5..c14db0746cb2 100644 --- a/tpgtools/overrides/cloudbuild/tpgtools_product.yaml +++ b/tpgtools/overrides/cloudbuild/tpgtools_product.yaml @@ -1,7 +1,6 @@ - type: PRODUCT_BASE_PATH details: - skip: true - basepathidentifier: cloud_build + basepathidentifier: cloud_build_worker_pool - type: PRODUCT_TITLE # used to align with mmv1 product details: title: "cloudbuild" diff --git a/tpgtools/overrides/cloudresourcemanager/beta/tpgtools_product.yaml b/tpgtools/overrides/cloudresourcemanager/beta/tpgtools_product.yaml deleted file mode 100644 index 862ff73cb130..000000000000 --- a/tpgtools/overrides/cloudresourcemanager/beta/tpgtools_product.yaml +++ /dev/null @@ -1,3 +0,0 @@ -- type: PRODUCT_BASE_PATH - details: - skip: true diff --git a/tpgtools/overrides/cloudresourcemanager/tpgtools_product.yaml b/tpgtools/overrides/cloudresourcemanager/tpgtools_product.yaml deleted file mode 100644 index 862ff73cb130..000000000000 --- a/tpgtools/overrides/cloudresourcemanager/tpgtools_product.yaml +++ /dev/null @@ -1,3 +0,0 @@ -- type: PRODUCT_BASE_PATH - details: - skip: true diff --git a/tpgtools/overrides/firebaserules/beta/tpgtools_product.yaml b/tpgtools/overrides/firebaserules/beta/tpgtools_product.yaml deleted file mode 100644 index 862ff73cb130..000000000000 --- a/tpgtools/overrides/firebaserules/beta/tpgtools_product.yaml +++ /dev/null @@ -1,3 +0,0 @@ -- type: PRODUCT_BASE_PATH - details: - skip: true diff --git a/tpgtools/overrides/firebaserules/tpgtools_product.yaml b/tpgtools/overrides/firebaserules/tpgtools_product.yaml deleted file mode 100644 index 862ff73cb130..000000000000 --- a/tpgtools/overrides/firebaserules/tpgtools_product.yaml +++ /dev/null @@ -1,3 +0,0 @@ -- type: PRODUCT_BASE_PATH - details: - skip: true diff --git a/tpgtools/overrides/gkehub/beta/tpgtools_product.yaml b/tpgtools/overrides/gkehub/beta/tpgtools_product.yaml index 589844700c05..cbded10b5146 100644 --- a/tpgtools/overrides/gkehub/beta/tpgtools_product.yaml +++ b/tpgtools/overrides/gkehub/beta/tpgtools_product.yaml @@ -3,5 +3,4 @@ ## Skip base path generation... needs to not share a name with GKEHub Membership - type: PRODUCT_BASE_PATH details: - skip: true - basepathidentifier: gkehub + basepathidentifier: gkehub_feature diff --git a/tpgtools/overrides/gkehub/tpgtools_product.yaml b/tpgtools/overrides/gkehub/tpgtools_product.yaml index 589844700c05..cbded10b5146 100644 --- a/tpgtools/overrides/gkehub/tpgtools_product.yaml +++ b/tpgtools/overrides/gkehub/tpgtools_product.yaml @@ -3,5 +3,4 @@ ## Skip base path generation... needs to not share a name with GKEHub Membership - type: PRODUCT_BASE_PATH details: - skip: true - basepathidentifier: gkehub + basepathidentifier: gkehub_feature diff --git a/tpgtools/overrides/recaptchaenterprise/beta/tpgtools_product.yaml b/tpgtools/overrides/recaptchaenterprise/beta/tpgtools_product.yaml deleted file mode 100644 index 862ff73cb130..000000000000 --- a/tpgtools/overrides/recaptchaenterprise/beta/tpgtools_product.yaml +++ /dev/null @@ -1,3 +0,0 @@ -- type: PRODUCT_BASE_PATH - details: - skip: true diff --git a/tpgtools/overrides/recaptchaenterprise/tpgtools_product.yaml b/tpgtools/overrides/recaptchaenterprise/tpgtools_product.yaml deleted file mode 100644 index 862ff73cb130..000000000000 --- a/tpgtools/overrides/recaptchaenterprise/tpgtools_product.yaml +++ /dev/null @@ -1,3 +0,0 @@ -- type: PRODUCT_BASE_PATH - details: - skip: true diff --git a/tpgtools/property.go b/tpgtools/property.go index aca248781c04..21b4341b7965 100644 --- a/tpgtools/property.go +++ b/tpgtools/property.go @@ -603,7 +603,7 @@ func createPropertiesFromSchema(schema *openapi.Schema, typeFetcher *TypeFetcher if v, ok := v.Extension["x-dcl-conflicts"].([]interface{}); ok { // NOTE: DCL not label x-dcl-conflicts for reused types - // TODO: handle nested field when b/213503595 got fixed + // TODO(shuya): handle nested field when b/213503595 got fixed if parent == nil { for _, ci := range v { diff --git a/tpgtools/sample.go b/tpgtools/sample.go index 561963db0867..4bf0c058c509 100644 --- a/tpgtools/sample.go +++ b/tpgtools/sample.go @@ -337,7 +337,7 @@ func (s *Sample) EnumerateWithUpdateSamples() []Sample { for i, update := range s.Updates { newSample := *s primaryResource := update.Resource - // TODO: Consume new dependency list. + // TODO(magic-modules-eng): Consume new dependency list. newSample.PrimaryResource = &primaryResource if !newSample.isNativeHCL() { var newDeps []Dependency diff --git a/tpgtools/templates/provider_dcl_endpoints.go.tmpl b/tpgtools/templates/provider_dcl_endpoints.go.tmpl new file mode 100644 index 000000000000..59bbbc4df714 --- /dev/null +++ b/tpgtools/templates/provider_dcl_endpoints.go.tmpl @@ -0,0 +1,105 @@ +{{/* Copyright 2021 Google LLC. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. */}} +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** Type: DCL *** +// +// ---------------------------------------------------------------------------- +// +// This file is managed by Magic Modules (https://github.com/GoogleCloudPlatform/magic-modules) +// and is based on the DCL (https://github.com/GoogleCloudPlatform/declarative-resource-client-library). +// Changes will need to be made to the DCL or Magic Modules instead of here. +// +// We are not currently able to accept contributions to this file. If changes +// are required, please file an issue at https://github.com/hashicorp/terraform-provider-google/issues/new/choose +// +// ---------------------------------------------------------------------------- + +package transport + +import ( + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + framework_schema "github.com/hashicorp/terraform-plugin-framework/provider/schema" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +// empty string is passed for dcl default since dcl +// [hardcodes the values](https://github.com/GoogleCloudPlatform/declarative-resource-client-library/blob/main/services/google/eventarc/beta/trigger_internal.go#L96-L103) +{{range $index, $pkg := .}} +{{- if $pkg.ShouldWriteProductBasePath }} +var {{$pkg.BasePathIdentifier.ToTitle}}EndpointEntryKey = "{{$pkg.BasePathIdentifier}}_custom_endpoint" +var {{$pkg.BasePathIdentifier.ToTitle}}EndpointEntry = &schema.Schema{ + Type: schema.TypeString, + Optional: true, +} +{{- end}} +{{end}} + +type DCLConfig struct { +{{- range $index, $pkg := . }} +{{- if $pkg.ShouldWriteProductBasePath }} + {{$pkg.BasePathIdentifier.ToTitle}}BasePath string +{{- end}} +{{- end}} +} + +func ConfigureDCLProvider(provider *schema.Provider) { +{{- range $index, $pkg := . }} +{{- if $pkg.ShouldWriteProductBasePath }} + provider.Schema[{{$pkg.BasePathIdentifier.ToTitle}}EndpointEntryKey] = {{$pkg.BasePathIdentifier.ToTitle}}EndpointEntry +{{- end}} +{{- end}} +} + +func HandleDCLCustomEndpointDefaults(d *schema.ResourceData) { +{{- range $index, $pkg := . }} +{{- if $pkg.ShouldWriteProductBasePath }} + if d.Get({{$pkg.BasePathIdentifier.ToTitle}}EndpointEntryKey) == "" { + d.Set({{$pkg.BasePathIdentifier.ToTitle}}EndpointEntryKey, MultiEnvDefault([]string{ + "GOOGLE_{{$pkg.BasePathIdentifier.ToUpper}}_CUSTOM_ENDPOINT", + }, "")) + } +{{- end}} +{{- end}} +} + +// plugin-framework provider set-up +func ConfigureDCLCustomEndpointAttributesFramework(frameworkSchema *framework_schema.Schema) { +{{- range $index, $pkg := . }} +{{- if $pkg.ShouldWriteProductBasePath }} + frameworkSchema.Attributes["{{$pkg.BasePathIdentifier}}_custom_endpoint"] = framework_schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + CustomEndpointValidator(), + }, + } +{{- end}} +{{- end}} +} + +func ProviderDCLConfigure(d *schema.ResourceData, config *Config) interface{} { + // networkConnectivity uses mmv1 basePath, assuredworkloads has a location variable in the basepath, can't be defined here. + config.ApikeysBasePath = "https://apikeys.googleapis.com/v2/" + config.AssuredWorkloadsBasePath = d.Get(AssuredWorkloadsEndpointEntryKey).(string) + config.CloudBuildWorkerPoolBasePath = "https://cloudbuild.googleapis.com/v1/" + config.CloudResourceManagerBasePath = "https://cloudresourcemanager.googleapis.com/" + config.EventarcBasePath = "https://eventarc.googleapis.com/v1/" + config.FirebaserulesBasePath = "https://firebaserules.googleapis.com/v1/" + config.RecaptchaEnterpriseBasePath = "https://recaptchaenterprise.googleapis.com/v1/" + + return config +} diff --git a/tpgtools/templates/resource.go.tmpl b/tpgtools/templates/resource.go.tmpl index ddced3024eea..e508882710dc 100644 --- a/tpgtools/templates/resource.go.tmpl +++ b/tpgtools/templates/resource.go.tmpl @@ -264,7 +264,7 @@ func resource{{$.PathType}}Create(d *schema.ResourceData, meta interface{}) erro {{ end }} {{- if $.UseTerraformID }} -{{/* TODO: When the DCL can correctly handle IDs for regional/global splits, all resources +{{/* TODO(magic-modules-eng): When the DCL can correctly handle IDs for regional/global splits, all resources should be converted to use the DCL's ID method, so normalization can be uniform. */}} id, err := {{ $.IDFunction }}(d, config, "{{$.ID}}") {{- else }} diff --git a/tpgtools/templates/serialization.go.tmpl b/tpgtools/templates/serialization.go.tmpl index f903d1d8ff2c..c1c46b76f030 100644 --- a/tpgtools/templates/serialization.go.tmpl +++ b/tpgtools/templates/serialization.go.tmpl @@ -172,7 +172,7 @@ func {{ $res.TitleCaseFullName }}{{$version.SerializationSuffix}}AsHCL(r {{$res. {{- end }} } {{- else if eq $field.Type.String "TypeMap" }} - {{- /* TODO: Implement maps with non-string values */}} + {{- /* TODO(magic-modules-eng): Implement maps with non-string values */}} outputConfig += "{{ if not $field.Collapsed }}\t{{end}}{{$field.Name}} = {" keys{{$field.PackageName}} := []string{} // golang range goes over maps in an arbitrary order- we've gotta order the @@ -238,7 +238,7 @@ func convert{{$res.TitleCaseFullName}}{{$version.SerializationSuffix}}{{$v.Packa {{- end }} } {{- else if eq $field.Type.String "TypeMap" }} - {{- /* TODO: Implement maps with non-string values */}} + {{- /* TODO(magic-modules-eng): Implement maps with non-string values */}} outputConfig += "{{ if not $field.Collapsed }}\t{{end}}{{$field.Name}} = {" keys{{$field.PackageName}} := []string{} // golang range goes over maps in an arbitrary order- we've gotta order the